commit f343f405f78cb8080614efa943a3e3a29892d4af Author: unknown Date: Mon Dec 8 21:35:55 2025 +0900 first commit diff --git a/.env b/.env new file mode 100644 index 0000000..587d927 --- /dev/null +++ b/.env @@ -0,0 +1,43 @@ +# Application +APP_NAME=VConnect API +APP_VERSION=1.0.0 +DEBUG=True +API_V1_PREFIX=/api + +# Database +DATABASE_URL=sqlite:///./vconnect.db +# SQLite (개발용): sqlite:///./vconnect.db + +# JWT Authentication +JWT_SECRET_KEY=your-super-secret-key-change-this-in-production +JWT_ALGORITHM=HS256 +JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 +JWT_REFRESH_TOKEN_EXPIRE_DAYS=7 + +# Proxmox +PROXMOX_HOST=https://192.168.0.115:8006 +PROXMOX_API_TOKEN=PVEAPIToken=root@pam!vconnect=d35fb41c-34fa-4fae-9d97-8d18af7b5525 +PROXMOX_VERIFY_SSL=False + +# SSH Gateway (터널링 서버) +SSH_HOST=192.168.0.97 +SSH_PORT=22 +SSH_USERNAME=kdesk84 +SSH_KEY_PATH=/data/vconnect-api/.ssh/id_rsa +# 또는 SSH_PASSWORD=your-password + +# Port Range for Tunneling +TUNNEL_PORT_MIN=50000 +TUNNEL_PORT_MAX=60000 + +# CORS (클라이언트 도메인) +CORS_ORIGINS=["http://localhost:8080", "http://localhost:3000"] + +# Logging +LOG_LEVEL=INFO +LOG_FILE=logs/vconnect.log + +# Admin (첫 관리자 계정) +ADMIN_USERNAME=admin +ADMIN_PASSWORD=admin1234 +ADMIN_EMAIL=admin@example.com diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..962c857 --- /dev/null +++ b/.env.example @@ -0,0 +1,43 @@ +# Application +APP_NAME=VConnect API +APP_VERSION=1.0.0 +DEBUG=True +API_V1_PREFIX=/api + +# Database +DATABASE_URL=postgresql://vconnect:vconnect123@localhost:5432/vconnect +# SQLite (개발용): sqlite:///./vconnect.db + +# JWT Authentication +JWT_SECRET_KEY=your-super-secret-key-change-this-in-production +JWT_ALGORITHM=HS256 +JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 +JWT_REFRESH_TOKEN_EXPIRE_DAYS=7 + +# Proxmox +PROXMOX_HOST=https://pve.mouse84.com:8006 +PROXMOX_API_TOKEN=PVEAPIToken=root@pam!vconnect=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +PROXMOX_VERIFY_SSL=False + +# SSH Gateway (터널링 서버) +SSH_HOST=ssh.example.com +SSH_PORT=22 +SSH_USERNAME=tunneluser +SSH_KEY_PATH=/path/to/ssh/private_key +# 또는 SSH_PASSWORD=your-password + +# Port Range for Tunneling +TUNNEL_PORT_MIN=50000 +TUNNEL_PORT_MAX=60000 + +# CORS (클라이언트 도메인) +CORS_ORIGINS=["http://localhost:8080", "http://localhost:3000"] + +# Logging +LOG_LEVEL=INFO +LOG_FILE=logs/vconnect.log + +# Admin (첫 관리자 계정) +ADMIN_USERNAME=admin +ADMIN_PASSWORD=admin123 +ADMIN_EMAIL=admin@example.com diff --git a/.ssh/id_rsa b/.ssh/id_rsa new file mode 100644 index 0000000..c4b882f --- /dev/null +++ b/.ssh/id_rsa @@ -0,0 +1,49 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAACFwAAAAdzc2gtcn +NhAAAAAwEAAQAAAgEAxujXqcl/QrtdHd1fGSJQmRR1/McRkLJSE9nYwne27XAjW27xXArc +giqSSgU+kh4MdjFJJ57iFg2L3ekZ86xdjvsuzzk1rSVIq6f3TA3Wg+apSrx4ny1Z53K3+u +YAwiMHcsIgv3C1zW8s+gY48Rtf7fnMYwZqrE4pr+M6beccv9LfnCi01oPomTCQagtXlTkU +rIm9fsu1VNHn3TkUAmKS8WRAMthzYLthmekl2OkfTpepSIT0xWuEv9Ny6eY+KzZeiF5NM3 +slTnImD6RpJSjt34sdn3Ng06C0LD9nHDkeUBAZ0TMoiu5arA0jjsf9hOFumCUX3hyaHtm8 +O19G3ZVZHnEa87Wjx+rHOyeTAjmC5IGGivDhyiy1ehWQw/wR9go54m95IT9rDKObpl5bQW +BeSn2Nid586kMd0qILEOz5Ziqpnl5XulbumFvZkG/SR1ef7cyaykYyfJp0XmjFd8YKlBS9 +fF10DcOQjhN389X4pblY1CavpvCE2oN+8ijDHUGR40PJBrCMDCnradGf3lB/5m6eKTJfhO +UtEz5iQYf8CoCVvpUU85j8648f2f7gxz9L9yhckwpMlUvuxI1yuSAehaD9u1pklECS1Sbt +vcp7a6O6WCHn/wolOPzOcleB9dXVziQUpCf8L/p+K7cFvM5tqawLuzOKFbk6FXNTMMVRcb +8AAAdIdfeBDHX3gQwAAAAHc3NoLXJzYQAAAgEAxujXqcl/QrtdHd1fGSJQmRR1/McRkLJS +E9nYwne27XAjW27xXArcgiqSSgU+kh4MdjFJJ57iFg2L3ekZ86xdjvsuzzk1rSVIq6f3TA +3Wg+apSrx4ny1Z53K3+uYAwiMHcsIgv3C1zW8s+gY48Rtf7fnMYwZqrE4pr+M6beccv9Lf +nCi01oPomTCQagtXlTkUrIm9fsu1VNHn3TkUAmKS8WRAMthzYLthmekl2OkfTpepSIT0xW +uEv9Ny6eY+KzZeiF5NM3slTnImD6RpJSjt34sdn3Ng06C0LD9nHDkeUBAZ0TMoiu5arA0j +jsf9hOFumCUX3hyaHtm8O19G3ZVZHnEa87Wjx+rHOyeTAjmC5IGGivDhyiy1ehWQw/wR9g +o54m95IT9rDKObpl5bQWBeSn2Nid586kMd0qILEOz5Ziqpnl5XulbumFvZkG/SR1ef7cya +ykYyfJp0XmjFd8YKlBS9fF10DcOQjhN389X4pblY1CavpvCE2oN+8ijDHUGR40PJBrCMDC +nradGf3lB/5m6eKTJfhOUtEz5iQYf8CoCVvpUU85j8648f2f7gxz9L9yhckwpMlUvuxI1y +uSAehaD9u1pklECS1Sbtvcp7a6O6WCHn/wolOPzOcleB9dXVziQUpCf8L/p+K7cFvM5tqa +wLuzOKFbk6FXNTMMVRcb8AAAADAQABAAACABoGuG/2YLObbs0sEeKThgaUUfXfxEbHFmHo +5L8D9AhbCRKl0Gici+AaRZGa4GY1DqizvTXfyQcIAbo+Yw9qXm7eze/+X9gbzfJBTuSBgQ +U5kVgpPgKHPWwWgMflqslm1PV+kii1IjdS/zMTIls7RjuOA7Yco6jlAfkKMIiAhNdSoRXp +O57qm+zYw7n5vvLUXltPGzTeibJN/tqIOpEvUw5zxsoAXrm++uqCbQSAnas7wUIvEhSiLg +39fbE+LPnxf2q00BbW26RJNP1h+VdhOh1rB1a9kuNN8+qVkd50WNOIBSDFjV3/CcKDu5iA +nDBJYPOrvX+UsCkuOz4XiDRmc3EC8QyKrTEwUODwQCJNU3QMyEisCzUUpgYxj/X83ZSe5w +zpazGXm/YBvijTCWxR/tmy/t1226ORzR7wNVQTw9JcPQ+HjKYlaWu2hkVlVnnRgyKwje8v +p2VXI2Qq2QOIjeJtneI8rUMZgv0H4N503PNnPuu+xTdy0Bzvuscesuf9NDcZfIwD95Jtdt +//n1so54ebVWqUxRHvDxFuCLp5OlCD3lJG1HHz4WT6MUiA2SL9znJt65myOhPyupILwSnH +33+9fyZhn9eYUapzJOSizJvbwQbnx1AnkvTKnz+sFfnJTiD/er5ChZdwlzUQd36oBn8qBh +DmUp59t/uzcDTMrtGdAAABAG7cy3jp5qnEU249IktkD3uPU0N/6JETMSUyRu2DdwI3RTol +KdscxICvK3E08ysOxjX/8kU6UqGirWjCIR7IHvjbLNOrKj/EXkeDyVA0HqmJ+tr0FIe4gR +qGQ016uS6uUANh2Bb8zicDu9e2VDuWkkq3h2XR+UGC8lUQn/OeoiDVb7hVxCbdFQwzmQHo +74+VVYCdYp2B9qJ6MyMdc92rc2dfamYj/eVitJz6Rhs3COOPJOOrAvrykkst4BtS4YZuzM +vuSYmjSii0TA9HIZXcdxZ+VjEbutrNkd467x44KBC6bzo+xVgn9vnyEDxQEBCKvNdDb5y9 +94QNndRGzYumSXoAAAEBAO8gcxXRUdlHtSWHml7+6XJiCmUSIgzCEmOxoG7C+hNWbc6pDc +WYvj1w3QNJYb+DQg+aHsUI5dAMSuLgZvIHogDoHSKy5mkoIb9uIS1vqt9UiY9XajuPPb7u +uJaxI6K8h7Xyc6z9f3gtajov4QDM6w5pyvZJjrzwmPqO6IQhcd7EWlpl+FOGVc0OFupn3M +0dl9GVxtfZq0GAD3W5mum1PkUjgBYt22hzXmQU1uqCqcoivPS7v3fHPWj/Uh+4ZzM5O7xk +WyGkt6RVosUfB1lNBuYxncy+SVVGlDLzwg5VVhwfX16jdA8ntVL8C7RmbO3jcGiEbVsSKp +cue33/5j75Ui0AAAEBANTx6j/uIS3kVOZc8whQOSC3Z0Fa02RbeDnmb8S/68Y3iz5EijCW +w8Cu85zZlCz5O+lnWel4Zy6oe6k/AXVm8CaRoafeMhw7AhvZth0bV0ZEIY98g0k/jzUkCX +6sQKBjnjw0DGtRLkboFkY4cfRUsgJe0C2E4c1iHoqLGNbcCGB0CNLUAJm1Cn6FPeQrB/Lo +F0TRy1u2ZWPwMNhITtR3nb1ZbnDt96Jp5BMRVxLn1bDx3yRd/Zi1EvXNYr7p3aoLhJDCKN +rZHIRl6gR8FFDulirMANZ+CxntDBCZ6Y9dozHrvgT+TIZRVUwIRxkwnbHsgtt93NfH/xrd +xLxm9mkUQxsAAAAPcm9vdEBhcHQtc2VydmVyAQIDBA== +-----END OPENSSH PRIVATE KEY----- diff --git a/.ssh/id_rsa.pub b/.ssh/id_rsa.pub new file mode 100644 index 0000000..a723d44 --- /dev/null +++ b/.ssh/id_rsa.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDG6NepyX9Cu10d3V8ZIlCZFHX8xxGQslIT2djCd7btcCNbbvFcCtyCKpJKBT6SHgx2MUknnuIWDYvd6RnzrF2O+y7POTWtJUirp/dMDdaD5qlKvHifLVnncrf65gDCIwdywiC/cLXNbyz6BjjxG1/t+cxjBmqsTimv4zpt5xy/0t+cKLTWg+iZMJBqC1eVORSsib1+y7VU0efdORQCYpLxZEAy2HNgu2GZ6SXY6R9Ol6lIhPTFa4S/03Lp5j4rNl6IXk0zeyVOciYPpGklKO3fix2fc2DToLQsP2ccOR5QEBnRMyiK7lqsDSOOx/2E4W6YJRfeHJoe2bw7X0bdlVkecRrztaPH6sc7J5MCOYLkgYaK8OHKLLV6FZDD/BH2Cjnib3khP2sMo5umXltBYF5KfY2J3nzqQx3SogsQ7PlmKqmeXle6Vu6YW9mQb9JHV5/tzJrKRjJ8mnReaMV3xgqUFL18XXQNw5COE3fz1filuVjUJq+m8ITag37yKMMdQZHjQ8kGsIwMKetp0Z/eUH/mbp4pMl+E5S0TPmJBh/wKgJW+lRTzmPzrjx/Z/uDHP0v3KFyTCkyVS+7EjXK5IB6FoP27WmSUQJLVJu29yntro7pYIef/CiU4/M5yV4H11dXOJBSkJ/wv+n4rtwW8zm2prAu7M4oVuToVc1MwxVFxvw== root@apt-server diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c802ec8 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.11-slim + +WORKDIR /app + +# 의존성 복사 및 설치 +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# 애플리케이션 코드 복사 +COPY app ./app + +# 포트 노출 +EXPOSE 8000 + +# 서버 실행 +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/QUICKSTART.md b/QUICKSTART.md new file mode 100644 index 0000000..e597c8d --- /dev/null +++ b/QUICKSTART.md @@ -0,0 +1,151 @@ +# VConnect API 서버 빠른 시작 가이드 + +## 1️⃣ 설치 + +```bash +# 프로젝트 디렉토리로 이동 +cd vconnect-api + +# 가상환경 생성 (선택사항이지만 권장) +python -m venv venv + +# 가상환경 활성화 +# Linux/Mac: +source venv/bin/activate +# Windows: +venv\Scripts\activate + +# 의존성 설치 +pip install -r requirements.txt +``` + +## 2️⃣ 환경 설정 + +```bash +# .env 파일 생성 +cp .env.example .env + +# .env 파일 편집 +nano .env # 또는 원하는 에디터 사용 +``` + +### 필수 설정 항목: + +```env +# Proxmox 설정 +PROXMOX_HOST=https://pve.mouse84.com:8006 +PROXMOX_API_TOKEN=PVEAPIToken=root@pam!vconnect=YOUR-TOKEN-HERE + +# SSH Gateway 설정 (터널링 서버) +SSH_HOST=your-ssh-server.com +SSH_PORT=22 +SSH_USERNAME=tunneluser +SSH_KEY_PATH=/path/to/ssh/key +# 또는 +SSH_PASSWORD=your-password + +# JWT Secret (반드시 변경!) +JWT_SECRET_KEY=your-super-secret-key-change-this +``` + +## 3️⃣ 서버 실행 + +```bash +# 개발 모드 (자동 리로드) +uvicorn app.main:app --reload --host 0.0.0.0 --port 8000 + +# 프로덕션 모드 +uvicorn app.main:app --host 0.0.0.0 --port 8000 --workers 4 +``` + +## 4️⃣ 접속 확인 + +브라우저에서: +- API Docs: http://localhost:8000/docs +- Health Check: http://localhost:8000/health + +## 5️⃣ 테스트 + +### 로그인 테스트 +```bash +curl -X POST http://localhost:8000/api/auth/login \ + -H "Content-Type: application/json" \ + -d '{"username":"admin","password":"admin123"}' +``` + +### VM 목록 조회 +```bash +# 먼저 로그인해서 토큰 받기 +TOKEN="your-jwt-token-here" + +curl -X GET http://localhost:8000/api/vms/my \ + -H "Authorization: Bearer $TOKEN" +``` + +## 6️⃣ Docker로 실행 (선택사항) + +```bash +# Docker 이미지 빌드 +docker build -t vconnect-api . + +# 컨테이너 실행 +docker run -d \ + --name vconnect-api \ + -p 8000:8000 \ + --env-file .env \ + vconnect-api +``` + +## 📋 기본 관리자 계정 + +- Username: `admin` +- Password: `admin123` + +⚠️ **보안을 위해 첫 로그인 후 반드시 비밀번호를 변경하세요!** + +## 🔧 문제 해결 + +### Proxmox 연결 오류 +- API Token이 올바른지 확인 +- Proxmox 서버 URL과 포트 확인 (기본 8006) +- 방화벽 설정 확인 + +### SSH 터널 오류 +- SSH 서버 접속 정보 확인 +- SSH 키 파일 경로 확인 +- SSH 서버에서 Port Forwarding 허용 여부 확인 + +### 포트 충돌 +- 8000 포트가 이미 사용 중이면 다른 포트로 변경: + ```bash + uvicorn app.main:app --reload --port 8001 + ``` + +## 📚 API 문서 + +서버 실행 후 http://localhost:8000/docs 에서 자동 생성된 API 문서를 확인할 수 있습니다. + +### 주요 엔드포인트: + +**인증** +- POST /api/auth/register - 회원가입 +- POST /api/auth/login - 로그인 +- GET /api/auth/me - 현재 사용자 정보 + +**VM 관리** +- GET /api/vms/my - 내 VM 목록 +- GET /api/vms/{vm_id} - VM 상세 정보 +- POST /api/vms/{vm_id}/start - VM 시작 +- POST /api/vms/{vm_id}/stop - VM 종료 + +**터널 관리** +- POST /api/tunnel/create - SSH 터널 생성 +- GET /api/tunnel/{session_id}/status - 터널 상태 +- DELETE /api/tunnel/{session_id} - 터널 종료 + +## 🎯 다음 단계 + +1. WPF 클라이언트와 연동 +2. 사용자별 VM 접근 권한 설정 +3. 감사 로그 확인 +4. 프로덕션 배포 diff --git a/README.md b/README.md new file mode 100644 index 0000000..848fc2e --- /dev/null +++ b/README.md @@ -0,0 +1,156 @@ +# VConnect API Server + +Zero-Port, Zero-VPN SSH 기반 Proxmox VM 원격접속 플랫폼 + +## 아키텍처 + +``` +[WPF Client] ←JWT→ [FastAPI Server] ←SSH→ [Proxmox VE] + ↓ + [PostgreSQL DB] +``` + +## 핵심 기능 + +### 1. 인증 시스템 +- JWT 기반 토큰 인증 +- 사용자 관리 (회원가입/로그인) +- 역할 기반 접근 제어 (RBAC) + +### 2. Proxmox 통합 +- API Token 기반 Proxmox 연동 +- VM 목록 조회 및 상태 모니터링 +- VM 제어 (시작/종료/재시작) +- QEMU Guest Agent IP 정보 수집 + +### 3. SSH 터널링 및 자격증명 +- **Zero-Trust Access**: 클라이언트는 SSH Private Key 없이 일회용 자격증명으로 접속 +- **임시 자격증명 발급**: JWT 토큰 인증 후 1시간 유효한 임시 SSH 비밀번호 발급 +- **PAM 연동**: SSH 게이트웨이에서 API를 통해 임시 비밀번호 검증 +- **동적 포트 할당**: 세션별 독립적인 포트 포워딩 관리 + +### 4. 감사 로그 +- 접속 기록 (누가, 언제, 어떤 VM에) +- VM 제어 이력 +- 보안 이벤트 추적 + +### 5. 자동 업데이트 +- 클라이언트 버전 관리 및 업데이트 배포 지원 + +## 기술 스택 + +- **FastAPI**: 고성능 Python 웹 프레임워크 +- **PostgreSQL**: 관계형 데이터베이스 +- **SQLAlchemy**: ORM (Async support) +- **Alembic**: DB 마이그레이션 관리 +- **JWT (Jose)**: 토큰 기반 인증 +- **Paramiko / AsyncSSH**: SSH 터널 및 연결 관리 +- **Uvicorn**: ASGI 웹 서버 + +## 프로젝트 구조 + +``` +vconnect-api/ +├── app/ +│ ├── main.py # FastAPI 앱 진입점 +│ ├── config.py # 설정 관리 (.env 로드) +│ ├── database.py # DB 세션 및 엔진 +│ ├── models/ # SQLAlchemy 모델 (User, VM, AuditLog) +│ ├── schemas/ # Pydantic 스키마 (Request/Response) +│ ├── api/ # API 라우터 +│ │ ├── auth.py # 인증 (로그인/가입) +│ │ ├── vms.py # VM 제어 및 조회 +│ │ ├── tunnel.py # SSH 터널 관리 +│ │ ├── ssh_credentials.py # 임시 SSH 자격증명 발급/검증 +│ │ └── admin.py # 관리자 기능 (사용자 관리, 로그) +│ ├── services/ # 비즈니스 로직 +│ └── utils/ # 유틸리티 (JWT, Security) +├── alembic/ # DB 마이그레이션 스크립트 +├── requirements.txt # 의존성 패키지 목록 +├── docker-compose.yml # Docker 배포 설정 +├── Dockerfile # Docker 빌드 설정 +└── README.md # 문서 +``` + +## 환경 변수 (.env) + +```env +# Database +DATABASE_URL=postgresql://user:password@localhost/vconnect + +# JWT +JWT_SECRET_KEY=your-secret-key-change-it +JWT_ALGORITHM=HS256 +JWT_ACCESS_TOKEN_EXPIRE_MINUTES=30 + +# Proxmox API +PROXMOX_HOST=https://pve.example.com:8006 +PROXMOX_API_TOKEN=PVEAPIToken=root@pam!vconnect=xxx... + +# SSH Gateway (Zero-Port) +SSH_HOST=ssh.example.com # 클라이언트가 접속할 외부 주소 +SSH_PORT=22 # 외부 포트 +SSH_INTERNAL_HOST=127.0.0.1 # 내부 터널링 바인딩 주소 +``` + +## 설치 및 실행 + +```bash +# 가상환경 생성 +python -m venv venv +# Windows +venv\Scripts\activate +# Linux/Mac +source venv/bin/activate + +# 의존성 설치 +pip install -r requirements.txt + +# 환경 변수 설정 +cp .env.example .env +# .env 파일 편집... + +# DB 마이그레이션 (테이블 생성) +alembic upgrade head + +# 서버 실행 (개발 모드) +uvicorn app.main:app --reload --host 0.0.0.0 --port 8000 +``` + +## API 엔드포인트 요약 + +### 인증 (`/api/v1/auth`) +- `POST /register` - 회원가입 +- `POST /login` - 로그인 (AccessToken 발급) +- `GET /me` - 현재 사용자 정보 확인 + +### VM 관리 (`/api/v1/vms`) +- `GET /my` - 사용자별 할당된 VM 목록 +- `POST /{vm_id}/start` - VM 켜기 +- `POST /{vm_id}/stop` - VM 끄기 + +### SSH 자격증명 (`/api/v1/ssh`) +- `POST /credentials` - [클라이언트용] 1시간 유효한 임시 SSH 비밀번호 발급 +- `POST /verify` - [게이트웨이용] 임시 비밀번호 유효성 검증 + +### 터널 관리 (`/api/v1/tunnel`) +- `POST /create` - SSH 터널 세션 생성 +- `DELETE /{tunnel_id}` - 터널 세션 종료 + +### 관리자 (`/api/v1/admin`) +- `GET /users` - 전체 사용자 관리 +- `GET /audit-logs` - 전체 감사 로그 조회 +- `GET /stats` - 시스템 통계 + +## 개발 로드맵 상태 + +- [x] 프로젝트 구조 및 FastAPI 기본 설정 +- [x] PostgreSQL 연동 및 Alembic 마이그레이션 구성 +- [x] JWT 인증 시스템 (Login/Refresh) +- [x] Proxmox API 연동 (VM 상태/제어) +- [x] SSH 터널링 매니저 구현 +- [x] **Zero-Trust 임시 자격증명 시스템** (SSH Password Manager) +- [x] 감사 로그 (Audit Log) +- [x] Docker 기반 배포 환경 (Dockerfile, Confirm) +- [ ] 클라이언트 자동 업데이트 배포 시스템 고도화 +- [ ] 유닛 테스트 작성 (Pytest) diff --git a/app/__pycache__/config.cpython-312.pyc b/app/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..52e469f Binary files /dev/null and b/app/__pycache__/config.cpython-312.pyc differ diff --git a/app/__pycache__/database.cpython-312.pyc b/app/__pycache__/database.cpython-312.pyc new file mode 100644 index 0000000..87e46cb Binary files /dev/null and b/app/__pycache__/database.cpython-312.pyc differ diff --git a/app/__pycache__/main.cpython-312.pyc b/app/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000..653fdaa Binary files /dev/null and b/app/__pycache__/main.cpython-312.pyc differ diff --git a/app/api/__pycache__/admin.cpython-312.pyc b/app/api/__pycache__/admin.cpython-312.pyc new file mode 100644 index 0000000..d1e1b4a Binary files /dev/null and b/app/api/__pycache__/admin.cpython-312.pyc differ diff --git a/app/api/__pycache__/auth.cpython-312.pyc b/app/api/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..14729b6 Binary files /dev/null and b/app/api/__pycache__/auth.cpython-312.pyc differ diff --git a/app/api/__pycache__/tunnel.cpython-312.pyc b/app/api/__pycache__/tunnel.cpython-312.pyc new file mode 100644 index 0000000..478db7a Binary files /dev/null and b/app/api/__pycache__/tunnel.cpython-312.pyc differ diff --git a/app/api/__pycache__/vms.cpython-312.pyc b/app/api/__pycache__/vms.cpython-312.pyc new file mode 100644 index 0000000..9b30d23 Binary files /dev/null and b/app/api/__pycache__/vms.cpython-312.pyc differ diff --git a/app/api/admin.py b/app/api/admin.py new file mode 100644 index 0000000..9124d3a --- /dev/null +++ b/app/api/admin.py @@ -0,0 +1,275 @@ +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from typing import List +from app.database import get_db +from app.schemas.admin import ( + UserCreate, UserUpdate, UserInfo, UserListResponse, + VMAccessCreate, VMAccessUpdate, VMAccessInfo, VMAccessListResponse, + AdminResponse +) +from app.schemas.auth import CurrentUser +from app.api.auth import get_current_user +from app.models.user import User, UserRole +from app.models.vm import VMAccess +from app.utils.security import hash_password +from app.utils.exceptions import PermissionDeniedError, NotFoundError, BadRequestError + +router = APIRouter() + +def require_admin(current_user: CurrentUser = Depends(get_current_user)): + """관리자 권한 확인""" + if current_user.role != "admin": + raise PermissionDeniedError("관리자 권한이 필요합니다") + return current_user + +# ==================== 사용자 관리 ==================== + +@router.get("/users", response_model=UserListResponse) +async def get_users( + current_user: CurrentUser = Depends(require_admin), + db: Session = Depends(get_db) +): + """사용자 목록 조회""" + users = db.query(User).all() + + user_list = [ + UserInfo( + id=user.id, + username=user.username, + email=user.email, + full_name=user.full_name, + role=user.role.value, + is_active=user.is_active, + created_at=user.created_at, + last_login=user.last_login + ) + for user in users + ] + + return UserListResponse(total=len(user_list), users=user_list) + +@router.post("/users", response_model=AdminResponse) +async def create_user( + user_data: UserCreate, + current_user: CurrentUser = Depends(require_admin), + db: Session = Depends(get_db) +): + """사용자 생성""" + # 중복 확인 + if db.query(User).filter(User.username == user_data.username).first(): + raise BadRequestError("이미 존재하는 사용자명입니다") + + if db.query(User).filter(User.email == user_data.email).first(): + raise BadRequestError("이미 존재하는 이메일입니다") + + # 역할 유효성 검사 + if user_data.role not in ["admin", "user"]: + raise BadRequestError("역할은 'admin' 또는 'user'만 가능합니다") + + # 사용자 생성 + new_user = User( + username=user_data.username, + email=user_data.email, + hashed_password=hash_password(user_data.password), + full_name=user_data.full_name, + role=UserRole.ADMIN if user_data.role == "admin" else UserRole.USER, + is_active=True + ) + + db.add(new_user) + db.commit() + + return AdminResponse( + success=True, + message=f"사용자 '{user_data.username}'이(가) 생성되었습니다" + ) + +@router.put("/users/{user_id}", response_model=AdminResponse) +async def update_user( + user_id: int, + user_data: UserUpdate, + current_user: CurrentUser = Depends(require_admin), + db: Session = Depends(get_db) +): + """사용자 정보 수정""" + user = db.query(User).filter(User.id == user_id).first() + if not user: + raise NotFoundError("사용자를 찾을 수 없습니다") + + # 이메일 중복 확인 + if user_data.email and user_data.email != user.email: + if db.query(User).filter(User.email == user_data.email).first(): + raise BadRequestError("이미 존재하는 이메일입니다") + user.email = user_data.email + + # 비밀번호 변경 + if user_data.password: + user.hashed_password = hash_password(user_data.password) + + # 기타 정보 업데이트 + if user_data.full_name is not None: + user.full_name = user_data.full_name + + if user_data.role: + if user_data.role not in ["admin", "user"]: + raise BadRequestError("역할은 'admin' 또는 'user'만 가능합니다") + user.role = UserRole.ADMIN if user_data.role == "admin" else UserRole.USER + + if user_data.is_active is not None: + user.is_active = user_data.is_active + + db.commit() + + return AdminResponse( + success=True, + message=f"사용자 '{user.username}'의 정보가 수정되었습니다" + ) + +@router.delete("/users/{user_id}", response_model=AdminResponse) +async def delete_user( + user_id: int, + current_user: CurrentUser = Depends(require_admin), + db: Session = Depends(get_db) +): + """사용자 삭제""" + # 자기 자신은 삭제 불가 + if user_id == current_user.id: + raise BadRequestError("자기 자신은 삭제할 수 없습니다") + + user = db.query(User).filter(User.id == user_id).first() + if not user: + raise NotFoundError("사용자를 찾을 수 없습니다") + + username = user.username + db.delete(user) + db.commit() + + return AdminResponse( + success=True, + message=f"사용자 '{username}'이(가) 삭제되었습니다" + ) + +# ==================== VM 접근 권한 관리 ==================== + +@router.get("/vm-access", response_model=VMAccessListResponse) +async def get_vm_access_list( + current_user: CurrentUser = Depends(require_admin), + db: Session = Depends(get_db) +): + """VM 접근 권한 목록 조회""" + accesses = db.query(VMAccess).join(User).all() + + access_list = [ + VMAccessInfo( + id=access.id, + user_id=access.user_id, + username=access.user.username, + vm_id=access.vm_id, + node=access.node, + vm_name=access.vm_name, + static_ip=access.static_ip, + rdp_username=access.rdp_username, + rdp_port=access.rdp_port, + is_active=access.is_active, + created_at=access.created_at + ) + for access in accesses + ] + + return VMAccessListResponse(total=len(access_list), accesses=access_list) + +@router.post("/vm-access", response_model=AdminResponse) +async def create_vm_access( + access_data: VMAccessCreate, + current_user: CurrentUser = Depends(require_admin), + db: Session = Depends(get_db) +): + """VM 접근 권한 부여""" + # 사용자 존재 확인 + user = db.query(User).filter(User.id == access_data.user_id).first() + if not user: + raise NotFoundError("사용자를 찾을 수 없습니다") + + # 중복 확인 + existing = db.query(VMAccess).filter( + VMAccess.user_id == access_data.user_id, + VMAccess.vm_id == access_data.vm_id + ).first() + + if existing: + raise BadRequestError("이미 해당 사용자에게 VM 접근 권한이 있습니다") + + # 권한 생성 + access = VMAccess( + user_id=access_data.user_id, + vm_id=access_data.vm_id, + node=access_data.node, + static_ip=access_data.static_ip, + rdp_username=access_data.rdp_username, + rdp_password=access_data.rdp_password, + rdp_port=access_data.rdp_port, + is_active=True + ) + + db.add(access) + db.commit() + + return AdminResponse( + success=True, + message=f"사용자 '{user.username}'에게 VM {access_data.vm_id} 접근 권한이 부여되었습니다" + ) + +@router.put("/vm-access/{access_id}", response_model=AdminResponse) +async def update_vm_access( + access_id: int, + access_data: VMAccessUpdate, + current_user: CurrentUser = Depends(require_admin), + db: Session = Depends(get_db) +): + """VM 접근 권한 수정""" + access = db.query(VMAccess).filter(VMAccess.id == access_id).first() + if not access: + raise NotFoundError("접근 권한을 찾을 수 없습니다") + + # 정보 업데이트 + if access_data.static_ip is not None: + access.static_ip = access_data.static_ip + + if access_data.rdp_username is not None: + access.rdp_username = access_data.rdp_username + + if access_data.rdp_password is not None: + access.rdp_password = access_data.rdp_password + + if access_data.rdp_port is not None: + access.rdp_port = access_data.rdp_port + + if access_data.is_active is not None: + access.is_active = access_data.is_active + + db.commit() + + return AdminResponse( + success=True, + message=f"VM {access.vm_id} 접근 권한이 수정되었습니다" + ) + +@router.delete("/vm-access/{access_id}", response_model=AdminResponse) +async def delete_vm_access( + access_id: int, + current_user: CurrentUser = Depends(require_admin), + db: Session = Depends(get_db) +): + """VM 접근 권한 삭제""" + access = db.query(VMAccess).filter(VMAccess.id == access_id).first() + if not access: + raise NotFoundError("접근 권한을 찾을 수 없습니다") + + vm_id = access.vm_id + db.delete(access) + db.commit() + + return AdminResponse( + success=True, + message=f"VM {vm_id} 접근 권한이 삭제되었습니다" + ) diff --git a/app/api/auth.py b/app/api/auth.py new file mode 100644 index 0000000..8abdfc5 --- /dev/null +++ b/app/api/auth.py @@ -0,0 +1,101 @@ +from fastapi import APIRouter, Depends, HTTPException, status +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from sqlalchemy.orm import Session +from app.database import get_db +from app.schemas.auth import UserRegister, UserLogin, Token, UserResponse, CurrentUser +from app.services.auth_service import auth_service +from app.utils.jwt_handler import decode_token +from app.utils.exceptions import AuthenticationError + +router = APIRouter() +security = HTTPBearer() + +# 현재 사용자 가져오기 의존성 +async def get_current_user( + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db) +) -> CurrentUser: + """JWT 토큰에서 현재 사용자 정보 추출""" + token = credentials.credentials + payload = decode_token(token) + + if not payload: + raise AuthenticationError("유효하지 않은 토큰입니다") + + user_id = payload.get("sub") + if not user_id: + raise AuthenticationError("토큰 정보가 올바르지 않습니다") + + user = auth_service.get_user_by_id(db, int(user_id)) + if not user: + raise AuthenticationError("사용자를 찾을 수 없습니다") + + if not user.is_active: + raise AuthenticationError("비활성화된 계정입니다") + + return CurrentUser( + id=user.id, + username=user.username, + email=user.email, + role=user.role, + is_active=user.is_active + ) + +@router.post("/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +async def register(user_data: UserRegister, db: Session = Depends(get_db)): + """ + 회원가입 + + 새로운 사용자 계정을 생성합니다. + """ + user = auth_service.register_user(db, user_data) + return user + +@router.post("/login", response_model=Token) +async def login(login_data: UserLogin, db: Session = Depends(get_db)): + """ + 로그인 + + 사용자 인증 후 JWT 토큰을 발급합니다. + """ + user = auth_service.authenticate_user(db, login_data.username, login_data.password) + tokens = auth_service.create_tokens(user) + return tokens + +@router.get("/me", response_model=UserResponse) +async def get_me( + current_user: CurrentUser = Depends(get_current_user), + db: Session = Depends(get_db) +): + """ + 현재 사용자 정보 조회 + + JWT 토큰을 기반으로 현재 로그인한 사용자 정보를 반환합니다. + """ + user = auth_service.get_user_by_id(db, current_user.id) + return user + +@router.post("/refresh", response_model=Token) +async def refresh_token( + credentials: HTTPAuthorizationCredentials = Depends(security), + db: Session = Depends(get_db) +): + """ + 토큰 갱신 + + Refresh Token을 사용하여 새로운 Access Token을 발급합니다. + """ + token = credentials.credentials + payload = decode_token(token) + + if not payload or payload.get("type") != "refresh": + raise AuthenticationError("유효하지 않은 Refresh Token입니다") + + user_id = payload.get("sub") + user = auth_service.get_user_by_id(db, int(user_id)) + + if not user or not user.is_active: + raise AuthenticationError("사용자를 찾을 수 없습니다") + + tokens = auth_service.create_tokens(user) + return tokens diff --git a/app/api/ssh_credentials.py b/app/api/ssh_credentials.py new file mode 100644 index 0000000..89ba80a --- /dev/null +++ b/app/api/ssh_credentials.py @@ -0,0 +1,66 @@ +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from datetime import datetime, timedelta +from app.api.auth import get_current_user +from app.schemas.auth import CurrentUser +from app.services.temp_ssh_password_service import temp_ssh_password_manager +import os + +router = APIRouter() + +class SshVerifyRequest(BaseModel): + """SSH 비밀번호 검증 요청""" + username: str + password: str + +@router.post("/credentials") +async def get_ssh_credentials(current_user: CurrentUser = Depends(get_current_user)): + """ + JWT 토큰으로 임시 SSH 자격증명 발급 + + Returns: + SSH 연결 정보 및 임시 비밀번호 + """ + # 임시 비밀번호 생성 (1시간 유효) + temp_password = temp_ssh_password_manager.generate_password( + username=current_user.username, + validity_hours=1 + ) + + # SSH 서버 정보 (외부 접속용) + ssh_host = os.getenv("SSH_HOST", "api.mouse84.com") # 외부 DDNS + ssh_port = int(os.getenv("SSH_PORT", "54054")) # 외부 포트 (내부 22로 포워딩) + + # 만료 시간 계산 + expires_at = datetime.utcnow() + timedelta(hours=1) + + return { + "ssh_host": ssh_host, + "ssh_port": ssh_port, + "ssh_username": current_user.username, + "ssh_password": temp_password, + "expires_at": expires_at.isoformat(), + "expires_in_seconds": 3600 + } + +@router.post("/verify") +async def verify_ssh_password(request: SshVerifyRequest): + """ + 임시 SSH 비밀번호 검증 (PAM 인증용) + + Args: + request: 사용자명과 비밀번호 + + Returns: + 200: 비밀번호 유효 + 401: 비밀번호 무효 + """ + is_valid = temp_ssh_password_manager.verify_password( + username=request.username, + password=request.password + ) + + if is_valid: + return {"valid": True, "message": "Password verified"} + else: + raise HTTPException(status_code=401, detail="Invalid password") diff --git a/app/api/tunnel.py b/app/api/tunnel.py new file mode 100644 index 0000000..bee1c2c --- /dev/null +++ b/app/api/tunnel.py @@ -0,0 +1,127 @@ +from fastapi import APIRouter, Depends +from sqlalchemy.orm import Session +from app.database import get_db +from app.schemas.tunnel import ( + TunnelCreateRequest, + TunnelCreateResponse, + TunnelStatusResponse, + TunnelCloseResponse, + TunnelInfo +) +from app.schemas.auth import CurrentUser +from app.api.auth import get_current_user +from app.services.ssh_tunnel_service import ssh_tunnel_manager +from app.services.proxmox_service import proxmox_service +from app.models.vm import VMAccess +from app.utils.exceptions import NotFoundError, BadRequestError + +router = APIRouter() + +@router.post("/create", response_model=TunnelCreateResponse) +async def create_tunnel( + request: TunnelCreateRequest, + current_user: CurrentUser = Depends(get_current_user), + db: Session = Depends(get_db) +): + """ + SSH 터널 생성 + + VM에 접속하기 위한 SSH Local Port Forwarding 터널을 생성합니다. + 클라이언트는 반환된 local_port로 RDP 연결을 시도해야 합니다. + """ + # VM IP 주소 확인 + # 1. 요청에 IP가 포함되어 있으면 사용 (Guest Agent 없이도 가능) + ip_address = request.vm_ip + + # 2. 없으면 Guest Agent로 조회 + if not ip_address: + try: + ip_address = await proxmox_service.get_vm_ip(request.node, request.vm_id) + if ip_address: + print(f"✅ Guest Agent에서 IP 조회 성공: {ip_address}") + except Exception as e: + print(f"⚠️ Guest Agent IP 조회 실패: {str(e)}") + ip_address = None + + # 3. Guest Agent 실패 시 VMAccess 테이블의 static_ip 사용 + if not ip_address: + vm_access = db.query(VMAccess).filter( + VMAccess.user_id == current_user.id, + VMAccess.vm_id == request.vm_id, + VMAccess.node == request.node + ).first() + + if vm_access and vm_access.static_ip: + ip_address = vm_access.static_ip + print(f"✅ Static IP 사용: {ip_address}") + else: + raise BadRequestError( + "VM의 IP 주소를 확인할 수 없습니다. " + "관리자 패널에서 고정 IP를 설정하거나 QEMU Guest Agent를 설치하세요." + ) + + try: + # SSH 터널 생성 + tunnel_info = await ssh_tunnel_manager.create_tunnel( + user_id=current_user.id, + vm_id=request.vm_id, + remote_host=ip_address, + remote_port=3389 # RDP 포트 + ) + + return TunnelCreateResponse( + success=True, + message="SSH 터널이 생성되었습니다", + session_id=tunnel_info["session_id"], + tunnel_info=TunnelInfo( + session_id=tunnel_info["session_id"], + local_port=tunnel_info["local_port"], + remote_host=tunnel_info["remote_host"], + remote_port=tunnel_info["remote_port"], + vm_id=request.vm_id, + is_active=True, + created_at=None # 자동 설정 + ) + ) + except Exception as e: + return TunnelCreateResponse( + success=False, + message=f"터널 생성 실패: {str(e)}", + session_id="", + tunnel_info=None + ) + +@router.get("/{session_id}/status", response_model=TunnelStatusResponse) +async def get_tunnel_status( + session_id: str, + current_user: CurrentUser = Depends(get_current_user) +): + """ + 터널 상태 조회 + + 활성 터널의 상태를 확인합니다. + """ + status = await ssh_tunnel_manager.get_tunnel_status(session_id) + + if not status: + raise NotFoundError("터널을 찾을 수 없습니다") + + return TunnelStatusResponse(**status) + +@router.delete("/{session_id}", response_model=TunnelCloseResponse) +async def close_tunnel( + session_id: str, + current_user: CurrentUser = Depends(get_current_user) +): + """ + 터널 종료 + + SSH 터널을 종료하고 리소스를 정리합니다. + """ + success = await ssh_tunnel_manager.close_tunnel(session_id) + + return TunnelCloseResponse( + success=success, + message="터널이 종료되었습니다" if success else "터널 종료에 실패했습니다", + session_id=session_id + ) diff --git a/app/api/vms.py b/app/api/vms.py new file mode 100644 index 0000000..50f75b7 --- /dev/null +++ b/app/api/vms.py @@ -0,0 +1,141 @@ +from fastapi import APIRouter, Depends +from sqlalchemy.orm import Session +from typing import List +from app.database import get_db +from app.schemas.vm import VMInfo, VMListResponse, VMDetail, VMControlRequest, VMControlResponse +from app.schemas.auth import CurrentUser +from app.api.auth import get_current_user +from app.services.proxmox_service import proxmox_service +from app.models.vm import VMAccess +from app.utils.exceptions import NotFoundError, PermissionDeniedError + +router = APIRouter() + +@router.get("/my", response_model=VMListResponse) +async def get_my_vms( + current_user: CurrentUser = Depends(get_current_user), + db: Session = Depends(get_db) +): + """ + 내 VM 목록 조회 + + 현재 사용자가 접근 가능한 VM 목록을 반환합니다. + """ + # Proxmox에서 모든 VM 가져오기 + all_vms = await proxmox_service.get_all_vms() + + # 현재 사용자의 VMAccess 정보 조회 + vm_accesses = db.query(VMAccess).filter( + VMAccess.user_id == current_user.id, + VMAccess.is_active == True + ).all() + + # vm_id를 키로 하는 딕셔너리 생성 + access_map = {access.vm_id: access for access in vm_accesses} + + vm_list = [] + for vm in all_vms: + vm_id = vm["vmid"] + access = access_map.get(vm_id) + + # VMAccess가 있으면 해당 정보 사용, 없으면 기본값 + vm_info = VMInfo( + vm_id=vm_id, + node=vm["node"], + name=vm.get("name", "Unknown"), + status=vm.get("status", "unknown"), + ip_address=access.static_ip if access else None, # Static IP 자동 설정 + cpus=vm.get("cpus", 0), + memory=vm.get("maxmem", 0) // (1024 * 1024), # bytes to MB + memory_usage=vm.get("mem", 0) // (1024 * 1024) if vm.get("mem") else None, + cpu_usage=vm.get("cpu", 0), + can_start=True, + can_stop=True, + can_reboot=True, + can_connect=True, + rdp_username=access.rdp_username if access else None, # RDP 사용자명 + rdp_password=access.rdp_password if access else None, # RDP 비밀번호 + rdp_port=access.rdp_port if access else 3389 # RDP 포트 + ) + vm_list.append(vm_info) + + return VMListResponse(total=len(vm_list), vms=vm_list) + +@router.get("/{vm_id}", response_model=VMDetail) +async def get_vm_detail( + vm_id: int, + node: str, + current_user: CurrentUser = Depends(get_current_user) +): + """ + VM 상세 정보 조회 + """ + # VM 상태 조회 + status = await proxmox_service.get_vm_status(node, vm_id) + + if not status: + raise NotFoundError(f"VM {vm_id}를 찾을 수 없습니다") + + # IP 조회 제거 - 연결에 필요하지 않음 + return VMDetail( + vm_id=vm_id, + node=node, + name=status.get("name", "Unknown"), + status=status.get("status", "unknown"), + ip_address=None, # IP 조회 안 함 + cpus=status.get("cpus", 0), + memory=status.get("maxmem", 0) // (1024 * 1024), + memory_usage=status.get("mem", 0) // (1024 * 1024) if status.get("mem") else None, + cpu_usage=status.get("cpu", 0), + uptime=status.get("uptime"), + rdp_port=3389, + has_guest_agent=False # Guest Agent 불필요 + ) + +@router.post("/{vm_id}/start", response_model=VMControlResponse) +async def start_vm( + vm_id: int, + node: str, + current_user: CurrentUser = Depends(get_current_user) +): + """VM 시작""" + success = await proxmox_service.start_vm(node, vm_id) + + return VMControlResponse( + success=success, + message="VM이 시작되었습니다" if success else "VM 시작에 실패했습니다", + vm_id=vm_id, + action="start" + ) + +@router.post("/{vm_id}/stop", response_model=VMControlResponse) +async def stop_vm( + vm_id: int, + node: str, + current_user: CurrentUser = Depends(get_current_user) +): + """VM 종료""" + success = await proxmox_service.stop_vm(node, vm_id) + + return VMControlResponse( + success=success, + message="VM이 종료되었습니다" if success else "VM 종료에 실패했습니다", + vm_id=vm_id, + action="stop" + ) + +@router.post("/{vm_id}/reboot", response_model=VMControlResponse) +async def reboot_vm( + vm_id: int, + node: str, + current_user: CurrentUser = Depends(get_current_user) +): + """VM 재시작""" + success = await proxmox_service.reboot_vm(node, vm_id) + + return VMControlResponse( + success=success, + message="VM이 재시작되었습니다" if success else "VM 재시작에 실패했습니다", + vm_id=vm_id, + action="reboot" + ) diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..8c8564c --- /dev/null +++ b/app/config.py @@ -0,0 +1,53 @@ +from pydantic_settings import BaseSettings +from typing import List +import os + +class Settings(BaseSettings): + # Application + APP_NAME: str = "VConnect API" + APP_VERSION: str = "1.0.0" + DEBUG: bool = True + API_V1_PREFIX: str = "/api" + + # Database + DATABASE_URL: str = "sqlite:///./vconnect.db" + + # JWT + JWT_SECRET_KEY: str = "your-secret-key-change-in-production" + JWT_ALGORITHM: str = "HS256" + JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 + JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = 7 + + # Proxmox + PROXMOX_HOST: str = "https://pve.mouse84.com:8006" + PROXMOX_API_TOKEN: str = "" + PROXMOX_VERIFY_SSL: bool = False + + # SSH Gateway + SSH_HOST: str = "" + SSH_PORT: int = 22 + SSH_USERNAME: str = "" + SSH_KEY_PATH: str = "" + SSH_PASSWORD: str = "" + + # Tunnel Port Range + TUNNEL_PORT_MIN: int = 50000 + TUNNEL_PORT_MAX: int = 60000 + + # CORS + CORS_ORIGINS: List[str] = ["http://localhost:8080", "http://localhost:3000"] + + # Logging + LOG_LEVEL: str = "INFO" + LOG_FILE: str = "logs/vconnect.log" + + # Admin + ADMIN_USERNAME: str = "admin" + ADMIN_PASSWORD: str = "admin123" + ADMIN_EMAIL: str = "admin@example.com" + + class Config: + env_file = ".env" + case_sensitive = True + +settings = Settings() diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..e68a180 --- /dev/null +++ b/app/database.py @@ -0,0 +1,25 @@ +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from app.config import settings + +# SQLAlchemy 엔진 생성 +engine = create_engine( + settings.DATABASE_URL, + connect_args={"check_same_thread": False} if "sqlite" in settings.DATABASE_URL else {}, + echo=settings.DEBUG +) + +# 세션 팩토리 +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +# Base 클래스 +Base = declarative_base() + +# DB 세션 의존성 +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..f465dff --- /dev/null +++ b/app/main.py @@ -0,0 +1,94 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from contextlib import asynccontextmanager +from app.config import settings +from app.database import engine, Base +from app.api import auth, vms, tunnel, admin, ssh_credentials +import logging + +# 로깅 설정 +logging.basicConfig( + level=settings.LOG_LEVEL, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + +# 라이프사이클 이벤트 +@asynccontextmanager +async def lifespan(app: FastAPI): + # 시작 시 + logger.info("🚀 VConnect API 서버 시작") + + # DB 테이블 생성 + Base.metadata.create_all(bind=engine) + + # 관리자 계정 생성 (없으면) + from app.services.auth_service import auth_service + from app.database import SessionLocal + from app.schemas.auth import UserRegister + from app.models.user import UserRole + + db = SessionLocal() + try: + existing_admin = auth_service.get_user_by_username(db, settings.ADMIN_USERNAME) + if not existing_admin: + admin_data = UserRegister( + username=settings.ADMIN_USERNAME, + email=settings.ADMIN_EMAIL, + password=settings.ADMIN_PASSWORD, + full_name="Administrator" + ) + admin_user = auth_service.register_user(db, admin_data) + admin_user.role = UserRole.ADMIN + db.commit() + logger.info(f"✅ 관리자 계정 생성: {settings.ADMIN_USERNAME}") + except Exception as e: + logger.error(f"관리자 계정 생성 실패: {e}") + finally: + db.close() + + yield + + # 종료 시 + logger.info("🛑 VConnect API 서버 종료") + +# FastAPI 앱 생성 +app = FastAPI( + title=settings.APP_NAME, + version=settings.APP_VERSION, + description="Zero-Port, Zero-VPN SSH 기반 Proxmox VM 원격접속 플랫폼", + lifespan=lifespan +) + +# CORS 설정 +app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# API 라우터 등록 +app.include_router(auth.router, prefix=f"{settings.API_V1_PREFIX}/auth", tags=["인증"]) +app.include_router(vms.router, prefix=f"{settings.API_V1_PREFIX}/vms", tags=["VM 관리"]) +app.include_router(tunnel.router, prefix=f"{settings.API_V1_PREFIX}/tunnel", tags=["터널 관리"]) +app.include_router(admin.router, prefix=f"{settings.API_V1_PREFIX}/admin", tags=["관리자"]) +app.include_router(ssh_credentials.router, prefix=f"{settings.API_V1_PREFIX}/ssh", tags=["SSH 자격증명"]) + +# Health Check +@app.get("/health") +async def health_check(): + return { + "status": "healthy", + "app_name": settings.APP_NAME, + "version": settings.APP_VERSION + } + +@app.get("/") +async def root(): + return { + "message": "VConnect API Server", + "version": settings.APP_VERSION, + "docs": "/docs" + } diff --git a/app/models/__pycache__/user.cpython-312.pyc b/app/models/__pycache__/user.cpython-312.pyc new file mode 100644 index 0000000..0f16b74 Binary files /dev/null and b/app/models/__pycache__/user.cpython-312.pyc differ diff --git a/app/models/__pycache__/vm.cpython-312.pyc b/app/models/__pycache__/vm.cpython-312.pyc new file mode 100644 index 0000000..1c21011 Binary files /dev/null and b/app/models/__pycache__/vm.cpython-312.pyc differ diff --git a/app/models/audit_log.py b/app/models/audit_log.py new file mode 100644 index 0000000..df0aa0e --- /dev/null +++ b/app/models/audit_log.py @@ -0,0 +1,43 @@ +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Text, Enum +from sqlalchemy.sql import func +from app.database import Base +import enum + +class AuditAction(str, enum.Enum): + LOGIN = "login" + LOGOUT = "logout" + VM_CONNECT = "vm_connect" + VM_DISCONNECT = "vm_disconnect" + VM_START = "vm_start" + VM_STOP = "vm_stop" + VM_REBOOT = "vm_reboot" + TUNNEL_CREATE = "tunnel_create" + TUNNEL_CLOSE = "tunnel_close" + USER_CREATE = "user_create" + USER_UPDATE = "user_update" + USER_DELETE = "user_delete" + ACCESS_DENIED = "access_denied" + +class AuditLog(Base): + """감사 로그 - 모든 중요 작업 기록""" + __tablename__ = "audit_logs" + + id = Column(Integer, primary_key=True, index=True) + user_id = Column(Integer, ForeignKey("users.id")) + username = Column(String(50)) # 비정규화 (삭제된 사용자 추적) + + action = Column(Enum(AuditAction), nullable=False, index=True) + resource_type = Column(String(50)) # "vm", "user", "tunnel" + resource_id = Column(String(100)) # VM ID, User ID 등 + + ip_address = Column(String(50)) + user_agent = Column(String(255)) + + details = Column(Text) # JSON 형태로 추가 정보 저장 + success = Column(Integer, default=True) + error_message = Column(Text) + + created_at = Column(DateTime(timezone=True), server_default=func.now(), index=True) + + def __repr__(self): + return f"" diff --git a/app/models/user.py b/app/models/user.py new file mode 100644 index 0000000..c2eef34 --- /dev/null +++ b/app/models/user.py @@ -0,0 +1,28 @@ +from sqlalchemy import Column, Integer, String, Boolean, DateTime, Enum +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship +from app.database import Base +import enum + +class UserRole(str, enum.Enum): + ADMIN = "admin" + USER = "user" + +class User(Base): + __tablename__ = "users" + + id = Column(Integer, primary_key=True, index=True) + username = Column(String(50), unique=True, index=True, nullable=False) + email = Column(String(100), unique=True, index=True, nullable=False) + hashed_password = Column(String(255), nullable=False) + full_name = Column(String(100)) + role = Column(Enum(UserRole), default=UserRole.USER, nullable=False) + is_active = Column(Boolean, default=True, nullable=False) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) + last_login = Column(DateTime(timezone=True)) + + vm_accesses = relationship("VMAccess", back_populates="user") + + def __repr__(self): + return f"" diff --git a/app/models/vm.py b/app/models/vm.py new file mode 100644 index 0000000..fbbd28e --- /dev/null +++ b/app/models/vm.py @@ -0,0 +1,62 @@ +from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship +from app.database import Base + +class VMAccess(Base): + """사용자별 VM 접근 권한""" + __tablename__ = "vm_access" + + id = Column(Integer, primary_key=True, index=True) + user_id = Column(Integer, ForeignKey("users.id"), nullable=False) + vm_id = Column(Integer, nullable=False) # Proxmox VM ID + node = Column(String(50), nullable=False) # Proxmox 노드명 + vm_name = Column(String(100)) + + # RDP 접속 정보 + rdp_username = Column(String(50)) + rdp_password = Column(String(255)) # 암호화 저장 + rdp_port = Column(Integer, default=3389) + + # 권한 + can_start = Column(Boolean, default=True) + can_stop = Column(Boolean, default=True) + can_reboot = Column(Boolean, default=True) + can_connect = Column(Boolean, default=True) + + is_active = Column(Boolean, default=True) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) + + # Guest Agent 없을 때 사용할 고정 IP + static_ip = Column(String(50), nullable=True) + + # 관계 + user = relationship("User", back_populates="vm_accesses") + + def __repr__(self): + return f"" + + +class SSHTunnel(Base): + """활성 SSH 터널 세션""" + __tablename__ = "ssh_tunnels" + + id = Column(Integer, primary_key=True, index=True) + user_id = Column(Integer, ForeignKey("users.id"), nullable=False) + vm_id = Column(Integer, nullable=False) + + # 터널 정보 + local_port = Column(Integer, nullable=False) # 클라이언트가 사용할 포트 + remote_host = Column(String(50), nullable=False) # VM IP + remote_port = Column(Integer, nullable=False) # VM RDP 포트 + + # 세션 정보 + session_id = Column(String(100), unique=True, index=True) + is_active = Column(Boolean, default=True) + + created_at = Column(DateTime(timezone=True), server_default=func.now()) + closed_at = Column(DateTime(timezone=True)) + + def __repr__(self): + return f"" diff --git a/app/schemas/__pycache__/admin.cpython-312.pyc b/app/schemas/__pycache__/admin.cpython-312.pyc new file mode 100644 index 0000000..e10d4c5 Binary files /dev/null and b/app/schemas/__pycache__/admin.cpython-312.pyc differ diff --git a/app/schemas/__pycache__/auth.cpython-312.pyc b/app/schemas/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..c79f8f5 Binary files /dev/null and b/app/schemas/__pycache__/auth.cpython-312.pyc differ diff --git a/app/schemas/__pycache__/tunnel.cpython-312.pyc b/app/schemas/__pycache__/tunnel.cpython-312.pyc new file mode 100644 index 0000000..a64c089 Binary files /dev/null and b/app/schemas/__pycache__/tunnel.cpython-312.pyc differ diff --git a/app/schemas/__pycache__/vm.cpython-312.pyc b/app/schemas/__pycache__/vm.cpython-312.pyc new file mode 100644 index 0000000..ad43cde Binary files /dev/null and b/app/schemas/__pycache__/vm.cpython-312.pyc differ diff --git a/app/schemas/admin.py b/app/schemas/admin.py new file mode 100644 index 0000000..b45c606 --- /dev/null +++ b/app/schemas/admin.py @@ -0,0 +1,71 @@ +from pydantic import BaseModel, EmailStr +from typing import Optional, List +from datetime import datetime + +# 사용자 관리 +class UserCreate(BaseModel): + username: str + email: EmailStr + password: str + full_name: Optional[str] = None + role: str = "user" # admin or user + +class UserUpdate(BaseModel): + email: Optional[EmailStr] = None + password: Optional[str] = None + full_name: Optional[str] = None + role: Optional[str] = None + is_active: Optional[bool] = None + +class UserInfo(BaseModel): + id: int + username: str + email: str + full_name: Optional[str] + role: str + is_active: bool + created_at: datetime + last_login: Optional[datetime] + +class UserListResponse(BaseModel): + total: int + users: List[UserInfo] + +# VM 접근 권한 관리 +class VMAccessCreate(BaseModel): + user_id: int + vm_id: int + node: str + static_ip: Optional[str] = None + rdp_username: Optional[str] = None + rdp_password: Optional[str] = None + rdp_port: int = 3389 + +class VMAccessUpdate(BaseModel): + static_ip: Optional[str] = None + rdp_username: Optional[str] = None + rdp_password: Optional[str] = None + rdp_port: Optional[int] = None + is_active: Optional[bool] = None + +class VMAccessInfo(BaseModel): + id: int + user_id: int + username: str # 조인해서 가져옴 + vm_id: int + node: str + vm_name: Optional[str] + static_ip: Optional[str] + rdp_username: Optional[str] + rdp_port: int + is_active: bool + created_at: datetime + +class VMAccessListResponse(BaseModel): + total: int + accesses: List[VMAccessInfo] + +# 공통 응답 +class AdminResponse(BaseModel): + success: bool + message: str diff --git a/app/schemas/auth.py b/app/schemas/auth.py new file mode 100644 index 0000000..9d9ed54 --- /dev/null +++ b/app/schemas/auth.py @@ -0,0 +1,51 @@ +from pydantic import BaseModel, EmailStr, Field +from typing import Optional +from datetime import datetime +from app.models.user import UserRole + +# 회원가입 요청 +class UserRegister(BaseModel): + username: str = Field(..., min_length=3, max_length=50) + email: EmailStr + password: str = Field(..., min_length=6) + full_name: Optional[str] = None + +# 로그인 요청 +class UserLogin(BaseModel): + username: str + password: str + +# 토큰 응답 +class Token(BaseModel): + access_token: str + refresh_token: str + token_type: str = "bearer" + expires_in: int + +# 토큰 페이로드 +class TokenPayload(BaseModel): + sub: str # user_id + exp: datetime + role: UserRole + +# 사용자 응답 +class UserResponse(BaseModel): + id: int + username: str + email: str + full_name: Optional[str] + role: UserRole + is_active: bool + created_at: datetime + last_login: Optional[datetime] + + class Config: + from_attributes = True + +# 현재 사용자 정보 +class CurrentUser(BaseModel): + id: int + username: str + email: str + role: UserRole + is_active: bool diff --git a/app/schemas/tunnel.py b/app/schemas/tunnel.py new file mode 100644 index 0000000..f441159 --- /dev/null +++ b/app/schemas/tunnel.py @@ -0,0 +1,44 @@ +from pydantic import BaseModel +from typing import Optional +from datetime import datetime + +# 터널 생성 요청 +class TunnelCreateRequest(BaseModel): + vm_id: int + node: str + vm_ip: Optional[str] = None # Guest Agent 없이 수동으로 IP 지정 가능 + +# 터널 생성 응답 +class TunnelCreateResponse(BaseModel): + success: bool + message: str + session_id: str + tunnel_info: Optional['TunnelInfo'] = None + +# 터널 정보 +class TunnelInfo(BaseModel): + session_id: str + local_port: int + remote_host: str + remote_port: int + vm_id: int + vm_name: Optional[str] = None + rdp_username: Optional[str] = None + is_active: bool + created_at: Optional[datetime] = None + + class Config: + from_attributes = True + +# 터널 상태 응답 +class TunnelStatusResponse(BaseModel): + session_id: str + is_active: bool + uptime_seconds: Optional[int] = None + created_at: datetime + +# 터널 종료 응답 +class TunnelCloseResponse(BaseModel): + success: bool + message: str + session_id: str diff --git a/app/schemas/vm.py b/app/schemas/vm.py new file mode 100644 index 0000000..d06ebb4 --- /dev/null +++ b/app/schemas/vm.py @@ -0,0 +1,52 @@ +from pydantic import BaseModel +from typing import Optional +from datetime import datetime + +# VM 정보 응답 +class VMInfo(BaseModel): + vm_id: int + node: str + name: str + status: str + ip_address: Optional[str] = None + cpus: int + memory: int # MB + memory_usage: Optional[int] = None + cpu_usage: Optional[float] = None + uptime: Optional[int] = None + + # 접근 권한 정보 + can_start: bool = True + can_stop: bool = True + can_reboot: bool = True + can_connect: bool = True + + # RDP 연결 정보 (VMAccess에서 가져옴) + rdp_username: Optional[str] = None + rdp_password: Optional[str] = None + rdp_port: int = 3389 + +# VM 목록 응답 +class VMListResponse(BaseModel): + total: int + vms: list[VMInfo] + +# VM 상세 정보 +class VMDetail(VMInfo): + rdp_port: int = 3389 + rdp_username: Optional[str] = None + has_guest_agent: bool = False + + class Config: + from_attributes = True + +# VM 제어 요청 +class VMControlRequest(BaseModel): + action: str # "start", "stop", "reboot" + +# VM 제어 응답 +class VMControlResponse(BaseModel): + success: bool + message: str + vm_id: int + action: str diff --git a/app/services/__pycache__/auth_service.cpython-312.pyc b/app/services/__pycache__/auth_service.cpython-312.pyc new file mode 100644 index 0000000..2123ccd Binary files /dev/null and b/app/services/__pycache__/auth_service.cpython-312.pyc differ diff --git a/app/services/__pycache__/proxmox_service.cpython-312.pyc b/app/services/__pycache__/proxmox_service.cpython-312.pyc new file mode 100644 index 0000000..024d8b3 Binary files /dev/null and b/app/services/__pycache__/proxmox_service.cpython-312.pyc differ diff --git a/app/services/__pycache__/ssh_tunnel_service.cpython-312.pyc b/app/services/__pycache__/ssh_tunnel_service.cpython-312.pyc new file mode 100644 index 0000000..49c101f Binary files /dev/null and b/app/services/__pycache__/ssh_tunnel_service.cpython-312.pyc differ diff --git a/app/services/auth_service.py b/app/services/auth_service.py new file mode 100644 index 0000000..5af2e51 --- /dev/null +++ b/app/services/auth_service.py @@ -0,0 +1,87 @@ +from sqlalchemy.orm import Session +from datetime import datetime +from typing import Optional +from app.models.user import User, UserRole +from app.schemas.auth import UserRegister, Token +from app.utils.security import hash_password, verify_password +from app.utils.jwt_handler import create_access_token, create_refresh_token +from app.utils.exceptions import AuthenticationError, ConflictError + +class AuthService: + """인증 관련 비즈니스 로직""" + + @staticmethod + def register_user(db: Session, user_data: UserRegister) -> User: + """사용자 등록""" + # 중복 체크 + if db.query(User).filter(User.username == user_data.username).first(): + raise ConflictError("이미 존재하는 사용자명입니다") + + if db.query(User).filter(User.email == user_data.email).first(): + raise ConflictError("이미 존재하는 이메일입니다") + + # 새 사용자 생성 + new_user = User( + username=user_data.username, + email=user_data.email, + hashed_password=hash_password(user_data.password), + full_name=user_data.full_name, + role=UserRole.USER + ) + + db.add(new_user) + db.commit() + db.refresh(new_user) + + return new_user + + @staticmethod + def authenticate_user(db: Session, username: str, password: str) -> User: + """사용자 인증""" + user = db.query(User).filter(User.username == username).first() + + if not user: + raise AuthenticationError("사용자를 찾을 수 없습니다") + + if not user.is_active: + raise AuthenticationError("비활성화된 계정입니다") + + if not verify_password(password, user.hashed_password): + raise AuthenticationError("비밀번호가 일치하지 않습니다") + + # 마지막 로그인 시간 업데이트 + user.last_login = datetime.utcnow() + db.commit() + + return user + + @staticmethod + def create_tokens(user: User) -> Token: + """JWT 토큰 생성""" + token_data = { + "sub": str(user.id), + "username": user.username, + "role": user.role.value + } + + access_token = create_access_token(token_data) + refresh_token = create_refresh_token(token_data) + + return Token( + access_token=access_token, + refresh_token=refresh_token, + token_type="bearer", + expires_in=1800 # 30분 + ) + + @staticmethod + def get_user_by_id(db: Session, user_id: int) -> Optional[User]: + """사용자 ID로 조회""" + return db.query(User).filter(User.id == user_id).first() + + @staticmethod + def get_user_by_username(db: Session, username: str) -> Optional[User]: + """사용자명으로 조회""" + return db.query(User).filter(User.username == username).first() + +auth_service = AuthService() diff --git a/app/services/proxmox_service.py b/app/services/proxmox_service.py new file mode 100644 index 0000000..098726f --- /dev/null +++ b/app/services/proxmox_service.py @@ -0,0 +1,140 @@ +import httpx +from typing import List, Optional, Dict +from app.config import settings +import json + +class ProxmoxService: + """Proxmox VE API 통신 서비스""" + + def __init__(self): + self.base_url = settings.PROXMOX_HOST + self.api_token = settings.PROXMOX_API_TOKEN + self.verify_ssl = settings.PROXMOX_VERIFY_SSL + + async def _make_request(self, method: str, endpoint: str, **kwargs) -> Dict: + """Proxmox API 요청""" + url = f"{self.base_url}/api2/json{endpoint}" + headers = {"Authorization": self.api_token} + + async with httpx.AsyncClient(verify=self.verify_ssl, timeout=30.0) as client: + response = await client.request(method, url, headers=headers, **kwargs) + response.raise_for_status() + return response.json() + + async def get_nodes(self) -> List[Dict]: + """노드 목록 조회""" + result = await self._make_request("GET", "/nodes") + return result.get("data", []) + + async def get_vms(self, node: str) -> List[Dict]: + """특정 노드의 VM 목록 조회""" + result = await self._make_request("GET", f"/nodes/{node}/qemu") + return result.get("data", []) + + async def get_all_vms(self) -> List[Dict]: + """모든 노드의 VM 목록 조회""" + nodes = await self.get_nodes() + print(f"DEBUG: 노드 목록: {nodes}") + + all_vms = [] + + for node in nodes: + node_name = node.get("node") + if node_name: + vms = await self.get_vms(node_name) + print(f"DEBUG: {node_name} 노드의 VM 목록: {vms}") + for vm in vms: + vm["node"] = node_name + all_vms.append(vm) + + print(f"DEBUG: 전체 VM 개수: {len(all_vms)}") + print(f"DEBUG: 전체 VM 목록: {all_vms}") + return all_vms + + async def get_vm_status(self, node: str, vm_id: int) -> Dict: + """VM 상태 조회""" + result = await self._make_request("GET", f"/nodes/{node}/qemu/{vm_id}/status/current") + return result.get("data", {}) + + async def get_vm_ip(self, node: str, vm_id: int) -> Optional[str]: + """QEMU Guest Agent를 통해 VM IP 주소 조회""" + try: + result = await self._make_request( + "GET", + f"/nodes/{node}/qemu/{vm_id}/agent/network-get-interfaces" + ) + + interfaces = result.get("data", {}).get("result", []) + + for iface in interfaces: + # loopback 제외 + if "loopback" in iface.get("name", "").lower(): + continue + + ip_addresses = iface.get("ip-addresses", []) + for ip in ip_addresses: + if ip.get("ip-address-type") == "ipv4": + address = ip.get("ip-address") + # 사설 IP만 반환 + if self._is_private_ip(address): + return address + + return None + except: + return None + + async def start_vm(self, node: str, vm_id: int) -> bool: + """VM 시작""" + try: + await self._make_request("POST", f"/nodes/{node}/qemu/{vm_id}/status/start") + return True + except: + return False + + async def stop_vm(self, node: str, vm_id: int) -> bool: + """VM 종료""" + try: + await self._make_request("POST", f"/nodes/{node}/qemu/{vm_id}/status/stop") + return True + except: + return False + + async def reboot_vm(self, node: str, vm_id: int) -> bool: + """VM 재시작""" + try: + await self._make_request("POST", f"/nodes/{node}/qemu/{vm_id}/status/reboot") + return True + except: + return False + + def _is_private_ip(self, ip: str) -> bool: + """사설 IP 주소 확인""" + if not ip: + return False + + parts = ip.split(".") + if len(parts) != 4: + return False + + try: + first = int(parts[0]) + second = int(parts[1]) + + # 10.0.0.0/8 + if first == 10: + return True + + # 172.16.0.0/12 + if first == 172 and 16 <= second <= 31: + return True + + # 192.168.0.0/16 + if first == 192 and second == 168: + return True + + return False + except: + return False + +# 싱글톤 인스턴스 +proxmox_service = ProxmoxService() diff --git a/app/services/ssh_tunnel_service.py b/app/services/ssh_tunnel_service.py new file mode 100644 index 0000000..3291f7e --- /dev/null +++ b/app/services/ssh_tunnel_service.py @@ -0,0 +1,220 @@ +import asyncio +import asyncssh +import secrets +from typing import Dict, Optional +from datetime import datetime +from app.config import settings +from app.utils.exceptions import InternalServerError + + +class SSHTunnelManager: + """SSH 터널 관리""" + + def __init__(self): + self.active_tunnels: Dict[str, 'TunnelSession'] = {} + self.used_ports = set() + + # --------------------------------------------------------- + # 포트 관리자 + # --------------------------------------------------------- + def _get_available_port(self) -> int: + """사용 가능한 포트 찾기""" + for port in range(settings.TUNNEL_PORT_MIN, settings.TUNNEL_PORT_MAX): + if port not in self.used_ports: + self.used_ports.add(port) + return port + raise InternalServerError("사용 가능한 포트가 없습니다") + + def _release_port(self, port: int): + """포트 해제""" + self.used_ports.discard(port) + + # --------------------------------------------------------- + # 터널 생성 + # --------------------------------------------------------- + async def create_tunnel( + self, + user_id: int, + vm_id: int, + remote_host: str, + remote_port: int = 3389, + vm_name: str = "", + rdp_username: str = "" + ) -> Dict: + """ + SSH 터널 생성 + """ + + session_id = secrets.token_urlsafe(32) + local_port = self._get_available_port() + + try: + ssh_config = { + "host": settings.SSH_HOST, + "port": settings.SSH_PORT, + "username": settings.SSH_USERNAME, + "known_hosts": None + } + + if settings.SSH_KEY_PATH: + ssh_config["client_keys"] = [settings.SSH_KEY_PATH] + elif settings.SSH_PASSWORD: + ssh_config["password"] = settings.SSH_PASSWORD + else: + raise InternalServerError("SSH 인증 정보가 설정되지 않았습니다") + + # SSH 연결 생성 + conn = await asyncssh.connect(**ssh_config) + + # 로컬 포트 포워딩 (외부 접속 허용) + listener = await conn.forward_local_port( + '0.0.0.0', # 모든 인터페이스에서 리스닝 (외부 접속 허용) + local_port, + remote_host, + remote_port + ) + + # 세션 저장 + tunnel_session = TunnelSession( + session_id=session_id, + user_id=user_id, + vm_id=vm_id, + local_port=local_port, + remote_host=remote_host, + remote_port=remote_port, + connection=conn, + listener=listener, + created_at=datetime.utcnow(), + vm_name=vm_name or "", + rdp_username=rdp_username or "" + ) + + self.active_tunnels[session_id] = tunnel_session + + return { + "session_id": session_id, + "local_port": local_port, + "remote_host": remote_host, + "remote_port": remote_port, + "ssh_host": settings.SSH_HOST, + "vm_name": vm_name or "", + "rdp_username": rdp_username or "", + "created_at": tunnel_session.created_at.isoformat() + } + + except Exception as e: + self._release_port(local_port) + raise InternalServerError(f"SSH 터널 생성 실패: {str(e)}") + + # --------------------------------------------------------- + # 터널 종료 + # --------------------------------------------------------- + async def close_tunnel(self, session_id: str) -> bool: + tunnel = self.active_tunnels.get(session_id) + if not tunnel: + return False + + try: + if tunnel.listener: + tunnel.listener.close() + await tunnel.listener.wait_closed() + + if tunnel.connection: + tunnel.connection.close() + await tunnel.connection.wait_closed() + + self._release_port(tunnel.local_port) + + del self.active_tunnels[session_id] + return True + + except Exception as e: + print(f"터널 종료 오류: {e}") + return False + + # --------------------------------------------------------- + # 터널 상태 조회 + # --------------------------------------------------------- + async def get_tunnel_status(self, session_id: str) -> Optional[Dict]: + tunnel = self.active_tunnels.get(session_id) + if not tunnel: + return None + + is_active = tunnel.connection is not None and tunnel.listener is not None + uptime = (datetime.utcnow() - tunnel.created_at).total_seconds() + + # 항상 null 반환하지 않도록 빈 문자열 처리 + vm_name = tunnel.vm_name or "" + rdp_username = tunnel.rdp_username or "" + + return { + "session_id": session_id, + "is_active": is_active, + "uptime_seconds": int(uptime), + "created_at": tunnel.created_at.isoformat(), + "vm_id": tunnel.vm_id, + "vm_name": vm_name, + "rdp_username": rdp_username, + "tunnel_info": { + "session_id": session_id, + "local_port": tunnel.local_port, + "remote_host": tunnel.remote_host, + "remote_port": tunnel.remote_port, + "vm_id": tunnel.vm_id, + "vm_name": vm_name, + "rdp_username": rdp_username, + "is_active": is_active + } + } + + # --------------------------------------------------------- + # 비활성 터널 정리 + # --------------------------------------------------------- + async def cleanup_inactive_tunnels(self): + to_remove = [] + + for sid, tunnel in self.active_tunnels.items(): + if tunnel.connection is None or tunnel.listener is None: + to_remove.append(sid) + + for sid in to_remove: + await self.close_tunnel(sid) + + +# --------------------------------------------------------- +# 터널 세션 클래스 +# --------------------------------------------------------- +class TunnelSession: + """터널 세션 정보""" + + def __init__( + self, + session_id: str, + user_id: int, + vm_id: int, + local_port: int, + remote_host: str, + remote_port: int, + connection, + listener, + created_at: datetime, + vm_name: str = "", + rdp_username: str = "" + ): + self.session_id = session_id + self.user_id = user_id + self.vm_id = vm_id + self.local_port = local_port + self.remote_host = remote_host + self.remote_port = remote_port + self.connection = connection + self.listener = listener + self.created_at = created_at + + # 절대 null 반환 방지 + self.vm_name = vm_name or "" + self.rdp_username = rdp_username or "" + + +# 싱글톤 인스턴스 +ssh_tunnel_manager = SSHTunnelManager() diff --git a/app/services/temp_ssh_password_service.py b/app/services/temp_ssh_password_service.py new file mode 100644 index 0000000..489701b --- /dev/null +++ b/app/services/temp_ssh_password_service.py @@ -0,0 +1,78 @@ +import secrets +import hashlib +from typing import Dict, Optional +from datetime import datetime, timedelta + +class TempSshPasswordManager: + """임시 SSH 비밀번호 관리""" + + def __init__(self): + # 메모리 기반 저장소 (프로덕션에서는 Redis 사용 권장) + self._passwords: Dict[str, dict] = {} + + def generate_password(self, username: str, validity_hours: int = 1) -> str: + """ + 임시 SSH 비밀번호 생성 + + Args: + username: 사용자명 + validity_hours: 유효 시간 (기본 1시간) + + Returns: + 임시 비밀번호 + """ + # 안전한 랜덤 비밀번호 생성 (32자) + temp_password = secrets.token_urlsafe(32) + + # 해시 저장 (실제 비밀번호는 저장하지 않음) + password_hash = hashlib.sha256(temp_password.encode()).hexdigest() + + # 만료 시간 계산 + expires_at = datetime.utcnow() + timedelta(hours=validity_hours) + + # 저장 + self._passwords[username] = { + "password_hash": password_hash, + "expires_at": expires_at, + "created_at": datetime.utcnow() + } + + return temp_password + + def verify_password(self, username: str, password: str) -> bool: + """ + 비밀번호 검증 + + Args: + username: 사용자명 + password: 검증할 비밀번호 + + Returns: + 유효 여부 + """ + if username not in self._passwords: + return False + + stored = self._passwords[username] + + # 만료 확인 + if datetime.utcnow() > stored["expires_at"]: + del self._passwords[username] + return False + + # 비밀번호 확인 + password_hash = hashlib.sha256(password.encode()).hexdigest() + return password_hash == stored["password_hash"] + + def cleanup_expired(self): + """만료된 비밀번호 정리""" + now = datetime.utcnow() + expired = [ + username for username, data in self._passwords.items() + if now > data["expires_at"] + ] + for username in expired: + del self._passwords[username] + +# 싱글톤 인스턴스 +temp_ssh_password_manager = TempSshPasswordManager() diff --git a/app/utils/__pycache__/exceptions.cpython-312.pyc b/app/utils/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..cfb8758 Binary files /dev/null and b/app/utils/__pycache__/exceptions.cpython-312.pyc differ diff --git a/app/utils/__pycache__/jwt_handler.cpython-312.pyc b/app/utils/__pycache__/jwt_handler.cpython-312.pyc new file mode 100644 index 0000000..240bfea Binary files /dev/null and b/app/utils/__pycache__/jwt_handler.cpython-312.pyc differ diff --git a/app/utils/__pycache__/security.cpython-312.pyc b/app/utils/__pycache__/security.cpython-312.pyc new file mode 100644 index 0000000..4f3f7bb Binary files /dev/null and b/app/utils/__pycache__/security.cpython-312.pyc differ diff --git a/app/utils/exceptions.py b/app/utils/exceptions.py new file mode 100644 index 0000000..77aaa2c --- /dev/null +++ b/app/utils/exceptions.py @@ -0,0 +1,31 @@ +from fastapi import HTTPException, status + +class AuthenticationError(HTTPException): + """인증 오류""" + def __init__(self, detail: str = "인증에 실패했습니다"): + super().__init__(status_code=status.HTTP_401_UNAUTHORIZED, detail=detail) + +class PermissionDeniedError(HTTPException): + """권한 부족 오류""" + def __init__(self, detail: str = "권한이 없습니다"): + super().__init__(status_code=status.HTTP_403_FORBIDDEN, detail=detail) + +class NotFoundError(HTTPException): + """리소스를 찾을 수 없음""" + def __init__(self, detail: str = "리소스를 찾을 수 없습니다"): + super().__init__(status_code=status.HTTP_404_NOT_FOUND, detail=detail) + +class BadRequestError(HTTPException): + """잘못된 요청""" + def __init__(self, detail: str = "잘못된 요청입니다"): + super().__init__(status_code=status.HTTP_400_BAD_REQUEST, detail=detail) + +class ConflictError(HTTPException): + """충돌 오류 (중복 등)""" + def __init__(self, detail: str = "이미 존재하는 리소스입니다"): + super().__init__(status_code=status.HTTP_409_CONFLICT, detail=detail) + +class InternalServerError(HTTPException): + """서버 내부 오류""" + def __init__(self, detail: str = "서버 오류가 발생했습니다"): + super().__init__(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=detail) diff --git a/app/utils/jwt_handler.py b/app/utils/jwt_handler.py new file mode 100644 index 0000000..872d832 --- /dev/null +++ b/app/utils/jwt_handler.py @@ -0,0 +1,38 @@ +from datetime import datetime, timedelta +from typing import Optional +from jose import JWTError, jwt +from app.config import settings + +def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str: + """JWT Access Token 생성""" + to_encode = data.copy() + + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES) + + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM) + return encoded_jwt + +def create_refresh_token(data: dict) -> str: + """JWT Refresh Token 생성""" + to_encode = data.copy() + expire = datetime.utcnow() + timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS) + to_encode.update({"exp": expire, "type": "refresh"}) + encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM) + return encoded_jwt + +def decode_token(token: str) -> Optional[dict]: + """JWT 토큰 디코드""" + try: + payload = jwt.decode(token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]) + return payload + except JWTError: + return None + +def verify_token(token: str) -> bool: + """토큰 유효성 검증""" + payload = decode_token(token) + return payload is not None diff --git a/app/utils/security.py b/app/utils/security.py new file mode 100644 index 0000000..3c76615 --- /dev/null +++ b/app/utils/security.py @@ -0,0 +1,12 @@ +from passlib.context import CryptContext + +# bcrypt 컨텍스트 +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + +def hash_password(password: str) -> str: + """비밀번호 해시화""" + return pwd_context.hash(password) + +def verify_password(plain_password: str, hashed_password: str) -> bool: + """비밀번호 검증""" + return pwd_context.verify(plain_password, hashed_password) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..63fc0cf --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,19 @@ +version: '3.8' + +services: + api: + build: . + container_name: vconnect-api + ports: + - "8000:8000" + env_file: + - .env + restart: unless-stopped + volumes: + - ./logs:/app/logs + networks: + - vconnect-network + +networks: + vconnect-network: + driver: bridge diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..19c69ce --- /dev/null +++ b/requirements.txt @@ -0,0 +1,27 @@ +# FastAPI & Web Server +fastapi==0.109.0 +uvicorn[standard]==0.27.0 +python-multipart==0.0.6 + +# Database +sqlalchemy==2.0.25 +psycopg2-binary==2.9.9 +alembic==1.13.1 + +# Authentication & Security +python-jose[cryptography]==3.3.0 +passlib[bcrypt]==1.7.4 +python-dotenv==1.0.0 +pydantic-settings==2.1.0 + +# SSH & Networking +paramiko==3.4.0 +asyncssh==2.14.2 + +# HTTP Client +httpx==0.26.0 +aiohttp==3.9.1 + +# Utilities +python-dateutil==2.8.2 +pytz==2023.3 diff --git a/scripts/pam_vconnect_auth.py b/scripts/pam_vconnect_auth.py new file mode 100644 index 0000000..c609674 --- /dev/null +++ b/scripts/pam_vconnect_auth.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 +""" +PAM 인증 스크립트 +임시 SSH 비밀번호 검증용 +""" +import sys +import requests +import os + +# VConnect API 서버 주소 +API_URL = os.getenv("VCONNECT_API_URL", "http://localhost:8000") + +def verify_password(username, password): + """ + 임시 SSH 비밀번호 검증 + + Args: + username: 사용자명 + password: 비밀번호 + + Returns: + True if valid, False otherwise + """ + try: + # API 서버에 비밀번호 검증 요청 + response = requests.post( + f"{API_URL}/api/ssh/verify", + json={"username": username, "password": password}, + timeout=5 + ) + return response.status_code == 200 + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + return False + +if __name__ == "__main__": + # PAM에서 사용자명은 환경 변수로, 비밀번호는 stdin으로 받음 + username = os.getenv("PAM_USER") + if not username and len(sys.argv) > 1: + username = sys.argv[1] + + # stdin에서 비밀번호 읽기 (PAM이 제공) + try: + password = sys.stdin.readline().strip() + except: + password = "" + + if not username or not password: + sys.exit(1) + + if verify_password(username, password): + sys.exit(0) # 성공 + else: + sys.exit(1) # 실패 diff --git a/vconnect.db b/vconnect.db new file mode 100644 index 0000000..c4e9feb Binary files /dev/null and b/vconnect.db differ diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1 new file mode 100644 index 0000000..eeea358 --- /dev/null +++ b/venv/bin/Activate.ps1 @@ -0,0 +1,247 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove VIRTUAL_ENV_PROMPT altogether. + if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { + Remove-Item -Path env:VIRTUAL_ENV_PROMPT + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } + $env:VIRTUAL_ENV_PROMPT = $Prompt +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv/bin/activate b/venv/bin/activate new file mode 100644 index 0000000..19381d7 --- /dev/null +++ b/venv/bin/activate @@ -0,0 +1,70 @@ +# This file must be used with "source bin/activate" *from bash* +# You cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # Call hash to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + hash -r 2> /dev/null + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +# on Windows, a path can contain colons and backslashes and has to be converted: +if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then + # transform D:\path\to\venv to /d/path/to/venv on MSYS + # and to /cygdrive/d/path/to/venv on Cygwin + export VIRTUAL_ENV=$(cygpath /data/vconnect-api/venv) +else + # use the path as-is + export VIRTUAL_ENV=/data/vconnect-api/venv +fi + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/"bin":$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1='(venv) '"${PS1:-}" + export PS1 + VIRTUAL_ENV_PROMPT='(venv) ' + export VIRTUAL_ENV_PROMPT +fi + +# Call hash to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +hash -r 2> /dev/null diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh new file mode 100644 index 0000000..ce6e1ac --- /dev/null +++ b/venv/bin/activate.csh @@ -0,0 +1,27 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. + +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV /data/vconnect-api/venv + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/"bin":$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = '(venv) '"$prompt" + setenv VIRTUAL_ENV_PROMPT '(venv) ' +endif + +alias pydoc python -m pydoc + +rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish new file mode 100644 index 0000000..a27c34b --- /dev/null +++ b/venv/bin/activate.fish @@ -0,0 +1,69 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/). You cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + set -e _OLD_FISH_PROMPT_OVERRIDE + # prevents error when using nested fish instances (Issue #93858) + if functions -q _old_fish_prompt + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV /data/vconnect-api/venv + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/"bin $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" + set -gx VIRTUAL_ENV_PROMPT '(venv) ' +end diff --git a/venv/bin/alembic b/venv/bin/alembic new file mode 100644 index 0000000..01dba21 --- /dev/null +++ b/venv/bin/alembic @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from alembic.config import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/venv/bin/dotenv b/venv/bin/dotenv new file mode 100644 index 0000000..8c46d80 --- /dev/null +++ b/venv/bin/dotenv @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from dotenv.__main__ import cli +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(cli()) diff --git a/venv/bin/email_validator b/venv/bin/email_validator new file mode 100644 index 0000000..0946f4c --- /dev/null +++ b/venv/bin/email_validator @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from email_validator.__main__ import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/venv/bin/httpx b/venv/bin/httpx new file mode 100644 index 0000000..ddab18f --- /dev/null +++ b/venv/bin/httpx @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from httpx import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/venv/bin/mako-render b/venv/bin/mako-render new file mode 100644 index 0000000..d57298a --- /dev/null +++ b/venv/bin/mako-render @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from mako.cmd import cmdline +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(cmdline()) diff --git a/venv/bin/pip b/venv/bin/pip new file mode 100644 index 0000000..1c4cdea --- /dev/null +++ b/venv/bin/pip @@ -0,0 +1,8 @@ +#!/data/vconnect-api/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 new file mode 100644 index 0000000..1c4cdea --- /dev/null +++ b/venv/bin/pip3 @@ -0,0 +1,8 @@ +#!/data/vconnect-api/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3.12 b/venv/bin/pip3.12 new file mode 100644 index 0000000..1c4cdea --- /dev/null +++ b/venv/bin/pip3.12 @@ -0,0 +1,8 @@ +#!/data/vconnect-api/venv/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pyrsa-decrypt b/venv/bin/pyrsa-decrypt new file mode 100644 index 0000000..8aa5e63 --- /dev/null +++ b/venv/bin/pyrsa-decrypt @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from rsa.cli import decrypt +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(decrypt()) diff --git a/venv/bin/pyrsa-encrypt b/venv/bin/pyrsa-encrypt new file mode 100644 index 0000000..e4a78cd --- /dev/null +++ b/venv/bin/pyrsa-encrypt @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from rsa.cli import encrypt +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(encrypt()) diff --git a/venv/bin/pyrsa-keygen b/venv/bin/pyrsa-keygen new file mode 100644 index 0000000..6bfbc61 --- /dev/null +++ b/venv/bin/pyrsa-keygen @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from rsa.cli import keygen +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(keygen()) diff --git a/venv/bin/pyrsa-priv2pub b/venv/bin/pyrsa-priv2pub new file mode 100644 index 0000000..2382143 --- /dev/null +++ b/venv/bin/pyrsa-priv2pub @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from rsa.util import private_to_public +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(private_to_public()) diff --git a/venv/bin/pyrsa-sign b/venv/bin/pyrsa-sign new file mode 100644 index 0000000..653b9f6 --- /dev/null +++ b/venv/bin/pyrsa-sign @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from rsa.cli import sign +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(sign()) diff --git a/venv/bin/pyrsa-verify b/venv/bin/pyrsa-verify new file mode 100644 index 0000000..168413c --- /dev/null +++ b/venv/bin/pyrsa-verify @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from rsa.cli import verify +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(verify()) diff --git a/venv/bin/uvicorn b/venv/bin/uvicorn new file mode 100644 index 0000000..8437b4f --- /dev/null +++ b/venv/bin/uvicorn @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from uvicorn.main import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/venv/bin/watchfiles b/venv/bin/watchfiles new file mode 100644 index 0000000..02a1308 --- /dev/null +++ b/venv/bin/watchfiles @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from watchfiles.cli import cli +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(cli()) diff --git a/venv/bin/websockets b/venv/bin/websockets new file mode 100644 index 0000000..5babb03 --- /dev/null +++ b/venv/bin/websockets @@ -0,0 +1,7 @@ +#!/data/vconnect-api/venv/bin/python3 +import sys +from websockets.cli import main +if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/venv/include/site/python3.12/greenlet/greenlet.h b/venv/include/site/python3.12/greenlet/greenlet.h new file mode 100644 index 0000000..d02a16e --- /dev/null +++ b/venv/include/site/python3.12/greenlet/greenlet.h @@ -0,0 +1,164 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +#ifndef GREENLET_MODULE +#define implementation_ptr_t void* +#endif + +typedef struct _greenlet { + PyObject_HEAD + PyObject* weakreflist; + PyObject* dict; + implementation_ptr_t pimpl; +} PyGreenlet; + +#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) + + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 12 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#define PyGreenlet_MAIN_NUM 8 +#define PyGreenlet_STARTED_NUM 9 +#define PyGreenlet_ACTIVE_NUM 10 +#define PyGreenlet_GET_PARENT_NUM 11 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* + * PyGreenlet_GetParent(PyObject* greenlet) + * + * return greenlet.parent; + * + * This could return NULL even if there is no exception active. + * If it does not return NULL, you are responsible for decrementing the + * reference count. + */ +# define PyGreenlet_GetParent \ + (*(PyGreenlet* (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) + +/* + * deprecated, undocumented alias. + */ +# define PyGreenlet_GET_PARENT PyGreenlet_GetParent + +# define PyGreenlet_MAIN \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_MAIN_NUM]) + +# define PyGreenlet_STARTED \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_STARTED_NUM]) + +# define PyGreenlet_ACTIVE \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) + + + + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/LICENSE b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/LICENSE new file mode 100644 index 0000000..967cdc5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright 2005-2024 SQLAlchemy authors and contributors . + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/METADATA b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/METADATA new file mode 100644 index 0000000..ac36a34 --- /dev/null +++ b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/METADATA @@ -0,0 +1,242 @@ +Metadata-Version: 2.1 +Name: SQLAlchemy +Version: 2.0.25 +Summary: Database Abstraction Library +Home-page: https://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.sqlalchemy.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: typing-extensions >=4.6.0 +Requires-Dist: greenlet !=0.4.17 ; platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32"))))) +Requires-Dist: importlib-metadata ; python_version < "3.8" +Provides-Extra: aiomysql +Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql' +Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql' +Provides-Extra: aioodbc +Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc' +Requires-Dist: aioodbc ; extra == 'aioodbc' +Provides-Extra: aiosqlite +Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite' +Requires-Dist: aiosqlite ; extra == 'aiosqlite' +Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite' +Provides-Extra: asyncio +Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio' +Provides-Extra: asyncmy +Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy' +Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy' +Provides-Extra: mariadb_connector +Requires-Dist: mariadb !=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector' +Provides-Extra: mssql +Requires-Dist: pyodbc ; extra == 'mssql' +Provides-Extra: mssql_pymssql +Requires-Dist: pymssql ; extra == 'mssql_pymssql' +Provides-Extra: mssql_pyodbc +Requires-Dist: pyodbc ; extra == 'mssql_pyodbc' +Provides-Extra: mypy +Requires-Dist: mypy >=0.910 ; extra == 'mypy' +Provides-Extra: mysql +Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql' +Provides-Extra: mysql_connector +Requires-Dist: mysql-connector-python ; extra == 'mysql_connector' +Provides-Extra: oracle +Requires-Dist: cx-oracle >=8 ; extra == 'oracle' +Provides-Extra: oracle_oracledb +Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb' +Provides-Extra: postgresql +Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql' +Provides-Extra: postgresql_asyncpg +Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg' +Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg' +Provides-Extra: postgresql_pg8000 +Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000' +Provides-Extra: postgresql_psycopg +Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg' +Provides-Extra: postgresql_psycopg2binary +Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary' +Provides-Extra: postgresql_psycopg2cffi +Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi' +Provides-Extra: postgresql_psycopgbinary +Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary' +Provides-Extra: pymysql +Requires-Dist: pymysql ; extra == 'pymysql' +Provides-Extra: sqlcipher +Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher' + +SQLAlchemy +========== + +|PyPI| |Python| |Downloads| + +.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI + +.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI - Python Version + +.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month + :target: https://pepy.tech/project/sqlalchemy + :alt: PyPI - Downloads + + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer-initiated + decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +https://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +Code of Conduct +--------------- + +Above all, SQLAlchemy places great emphasis on polite, thoughtful, and +constructive communication between users and developers. +Please see our current Code of Conduct at +`Code of Conduct `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + diff --git a/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/RECORD b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/RECORD new file mode 100644 index 0000000..b0484d8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/RECORD @@ -0,0 +1,530 @@ +SQLAlchemy-2.0.25.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +SQLAlchemy-2.0.25.dist-info/LICENSE,sha256=PA9Zq4h9BB3mpOUv_j6e212VIt6Qn66abNettue-MpM,1100 +SQLAlchemy-2.0.25.dist-info/METADATA,sha256=e57J_l66lNZ5LyXmLMbAGiL02rqx7HxQu_Ci5m_5Y8U,9602 +SQLAlchemy-2.0.25.dist-info/RECORD,, +SQLAlchemy-2.0.25.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +SQLAlchemy-2.0.25.dist-info/WHEEL,sha256=vJMp7mUkE-fMIYyE5xJ9Q2cYPnWVgHf20clVdwMSXAg,152 +SQLAlchemy-2.0.25.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 +sqlalchemy/__init__.py,sha256=Tu8hhzZF610d9j59ruCV2IROKp-u1Y9i-Plhc6Nf50c,13033 +sqlalchemy/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/__pycache__/events.cpython-312.pyc,, +sqlalchemy/__pycache__/exc.cpython-312.pyc,, +sqlalchemy/__pycache__/inspection.cpython-312.pyc,, +sqlalchemy/__pycache__/log.cpython-312.pyc,, +sqlalchemy/__pycache__/schema.cpython-312.pyc,, +sqlalchemy/__pycache__/types.cpython-312.pyc,, +sqlalchemy/connectors/__init__.py,sha256=PzXPqZqi3BzEnrs1eW0DcsR4lyknAzhhN9rWcQ97hb4,476 +sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,, +sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,, +sqlalchemy/connectors/aioodbc.py,sha256=GSTiNMO9h0qjPxgqaxDwWZ8HvhWMFNVR6MJQnN1oc40,5288 +sqlalchemy/connectors/asyncio.py,sha256=6s4hDYfuMjJ9KbJ4s7bF1fp5DmcgV77ozgZ5-bwZ0wc,5955 +sqlalchemy/connectors/pyodbc.py,sha256=PZC86t3poFmhgW9_tjDJH8o1Ua0OyiCdfrP7GRX5Gxc,8453 +sqlalchemy/cyextension/__init__.py,sha256=GzhhN8cjMnDTE0qerlUlpbrNmFPHQWCZ4Gk74OAxl04,244 +sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so,sha256=w52mPyvsvh9eI8jjcKdy6CkeoaAQqQgyA1fsoIxbyNw,1945776 +sqlalchemy/cyextension/collections.pyx,sha256=L7DZ3DGKpgw2MT2ZZRRxCnrcyE5pU1NAFowWgAzQPEc,12571 +sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so,sha256=DdLUYRGwSoHPvjZVH4IVg4juCsUDopZS8uaUFB6sgy8,811416 +sqlalchemy/cyextension/immutabledict.pxd,sha256=3x3-rXG5eRQ7bBnktZ-OJ9-6ft8zToPmTDOd92iXpB0,291 +sqlalchemy/cyextension/immutabledict.pyx,sha256=KfDTYbTfebstE8xuqAtuXsHNAK0_b5q_ymUiinUe_xs,3535 +sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so,sha256=e4g_ijRARjeUcb6O4mHKD6Si6o5syeTmmgxeYbheilY,534296 +sqlalchemy/cyextension/processors.pyx,sha256=R1rHsGLEaGeBq5VeCydjClzYlivERIJ9B-XLOJlf2MQ,1792 +sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so,sha256=hyEJquVScOr748nm3DgxYuOl2-hia-4K0i6VjvWhAzw,626328 +sqlalchemy/cyextension/resultproxy.pyx,sha256=eWLdyBXiBy_CLQrF5ScfWJm7X0NeelscSXedtj1zv9Q,2725 +sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so,sha256=wlYybCnldGKuyWaAF2i2-vcvCIfcq0NSleB1oeiuXlw,958968 +sqlalchemy/cyextension/util.pyx,sha256=B85orxa9LddLuQEaDoVSq1XmAXIbLKxrxpvuB8ogV_o,2530 +sqlalchemy/dialects/__init__.py,sha256=Kos9Gf5JZg1Vg6GWaCqEbD6e0r1jCwCmcnJIfcxDdcY,1770 +sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,, +sqlalchemy/dialects/_typing.py,sha256=hyv0nKucX2gI8ispB1IsvaUgrEPn9zEcq9hS7kfstEw,888 +sqlalchemy/dialects/mssql/__init__.py,sha256=r5t8wFRNtBQoiUWh0WfIEWzXZW6f3D0uDt6NZTW_7Cc,1880 +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,, +sqlalchemy/dialects/mssql/aioodbc.py,sha256=UQd9ecSMIML713TDnLAviuBVJle7P7i1FtqGZZePk2Y,2022 +sqlalchemy/dialects/mssql/base.py,sha256=lkOGhA8Kg3aximjmbAOZcShXlSWLZWEe94-KdfzxkMo,133650 +sqlalchemy/dialects/mssql/information_schema.py,sha256=ZmFLZ7d4qlguBTm5pIAe3XfnCOr8qZfXPDFK5DE7in8,8083 +sqlalchemy/dialects/mssql/json.py,sha256=evUACW2O62TAPq8B7QIPagz7jfc664ql9ms68JqiYzg,4816 +sqlalchemy/dialects/mssql/provision.py,sha256=RTVbgYLFAHzEnpVQDJroU8ji_10MqBTiZfyP9_-QNT4,5362 +sqlalchemy/dialects/mssql/pymssql.py,sha256=eZRLz7HGt3SdoZUjFBmA9BS43N7AhIASw7VPBPEJuG0,4038 +sqlalchemy/dialects/mssql/pyodbc.py,sha256=GqWKptZfVMKZJ7RXQyuXL4pRJlfnOrZtRYchEeeHl3o,27057 +sqlalchemy/dialects/mysql/__init__.py,sha256=bxbi4hkysUK2OOVvr1F49akUj1cky27kKb07tgFzI9U,2153 +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,, +sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,, +sqlalchemy/dialects/mysql/aiomysql.py,sha256=tGe7R8lfaRhdRNMJHQg-AocWu5UTkstLg1C_yIKFBl8,9759 +sqlalchemy/dialects/mysql/asyncmy.py,sha256=sAz0ctNETtEc_8vl03xroLTsEWS6CLNKcqjesm4Ne6Q,9828 +sqlalchemy/dialects/mysql/base.py,sha256=Zy_ZCzuMUeeFmaOPSjmVDP5oxBEw9898QTuxppQCoq8,120698 +sqlalchemy/dialects/mysql/cymysql.py,sha256=eXT1ry0w_qRxjiO24M980c-8PZ9qSsbhqBHntjEiKB0,2300 +sqlalchemy/dialects/mysql/dml.py,sha256=HXJMAvimJsqvhj3UZO4vW_6LkF5RqaKbHvklAjor7yU,7645 +sqlalchemy/dialects/mysql/enumerated.py,sha256=ipEPPQqoXfFwcywNdcLlZCEzHBtnitHRah1Gn6nItcg,8448 +sqlalchemy/dialects/mysql/expression.py,sha256=lsmQCHKwfPezUnt27d2kR6ohk4IRFCA64KBS16kx5dc,4097 +sqlalchemy/dialects/mysql/json.py,sha256=l6MEZ0qp8FgiRrIQvOMhyEJq0q6OqiEnvDTx5Cbt9uQ,2269 +sqlalchemy/dialects/mysql/mariadb.py,sha256=kTfBLioLKk4JFFst4TY_iWqPtnvvQXFHknLfm89H2N8,853 +sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=VVRwKLb6GzDmitOM4wLNvmZw6RdhnIwkLl7IZfAmUy8,8734 +sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=qiQdfLPze3QHuASAZ9iqRzD0hDW8FbKoQnfAEQCF7tM,5675 +sqlalchemy/dialects/mysql/mysqldb.py,sha256=9x_JiY4hj4tykG1ckuEGPyH4jCtsh4fgBhNukVnjUos,9658 +sqlalchemy/dialects/mysql/provision.py,sha256=4oGkClQ8jC3YLPF54sB4kCjFc8HRTwf5zl5zftAAXGo,3474 +sqlalchemy/dialects/mysql/pymysql.py,sha256=GUnSHd2M2uKjmN46Hheymtm26g7phEgwYOXrX0zLY8M,4083 +sqlalchemy/dialects/mysql/pyodbc.py,sha256=072crI4qVyPhajYvHnsfFeSrNjLFVPIjBQKo5uyz5yk,4297 +sqlalchemy/dialects/mysql/reflection.py,sha256=TsRocAsRbAisEgu5NWSND7DZg9OS9ZqwHyaxLzlEgnU,22565 +sqlalchemy/dialects/mysql/reserved_words.py,sha256=Dm7FINIAkrKLoXmdu26SpE6V8LDCGyp734nmHV2tMd0,9154 +sqlalchemy/dialects/mysql/types.py,sha256=aPzx7hqqZ21aGwByEC-yWZUl6OpMvkbxwTqdN3OUGGI,24267 +sqlalchemy/dialects/oracle/__init__.py,sha256=p4-2gw7TT0bX_MoJXTGD4i8WHctYsK9kCRbkpzykBrc,1493 +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,, +sqlalchemy/dialects/oracle/base.py,sha256=Ng_T-Xl1rOHJO7mcnTbINnTVEQUjeLIKBl1DutaBRdY,118045 +sqlalchemy/dialects/oracle/cx_oracle.py,sha256=21xgnOZ8kCKrIfC9XlZM7R2zKZa86nsBHx4cv0Iw3x8,55290 +sqlalchemy/dialects/oracle/dictionary.py,sha256=7WMrbPkqo8ZdGjaEZyQr-5f2pajSOF1OTGb8P97z8-g,19519 +sqlalchemy/dialects/oracle/oracledb.py,sha256=vDKUuy4DExEMnXeidZL0wjuggJAqTkhXtSvxkvaEjbs,9485 +sqlalchemy/dialects/oracle/provision.py,sha256=O9ZpF4OG6Cx4mMzLRfZwhs8dZjrJETWR402n9c7726A,8304 +sqlalchemy/dialects/oracle/types.py,sha256=QK3hJvWzKnnCe3oD3rItwEEIwcoBze8qGg7VFOvVlIk,8231 +sqlalchemy/dialects/postgresql/__init__.py,sha256=rlGgUbemHlQWvsDjqWufCU-CioMPtSHRDgCd8G0v10E,3743 +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,, +sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=7TudtgsPiSB8O5kX8W8KxcNYR8t5h_UHb86b_ChL0P8,5696 +sqlalchemy/dialects/postgresql/array.py,sha256=3EWWhFJbw2xJfie1RAqtscecCIXSGZM4qmOipLYc1T0,13691 +sqlalchemy/dialects/postgresql/asyncpg.py,sha256=KGBdzxbHnqFpWOe6usS5yxuw2KK_KBqSpp6RA7F0Ua8,40232 +sqlalchemy/dialects/postgresql/base.py,sha256=SVALrrOKYJfrlelcsUM50neHQzVAWcwA6RxQG84YM24,175627 +sqlalchemy/dialects/postgresql/dml.py,sha256=L3G3bVL41DXirham5XRBXYOM4eefhqyGCzMyn8zCdI0,11212 +sqlalchemy/dialects/postgresql/ext.py,sha256=1bZ--iNh2O9ym7l2gXZX48yP3yMO4dqb9RpYro2Mj2Q,16262 +sqlalchemy/dialects/postgresql/hstore.py,sha256=otAx-RTDfpi_tcXkMuQV0JOIXtYgevgnsikLKKOkI6U,11541 +sqlalchemy/dialects/postgresql/json.py,sha256=-ffnp85fQBOyt0Bjb7XAupmOxloUdzFZZgixUG3Wj5w,11212 +sqlalchemy/dialects/postgresql/named_types.py,sha256=i2GwHI8V83AA2Gr87yyVCNBsm9mF99as54UlyOBs7IY,17101 +sqlalchemy/dialects/postgresql/operators.py,sha256=NsAaWun_tL3d_be0fs9YL6T4LPKK6crnmFxxIJHgyeY,2808 +sqlalchemy/dialects/postgresql/pg8000.py,sha256=qnMSG3brW6XiygF-vXr1JguSSNpd-xlHJRkHHCDWs-k,18637 +sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=nAKavWTE_4cqxiDKDTdo-ivkCxxRIlzD5GO9Wl1yrG4,8884 +sqlalchemy/dialects/postgresql/provision.py,sha256=yqyx-aDFO9l2YcL9f4T5HBP_Lnt5dHsMjpuXUG8mi7A,5762 +sqlalchemy/dialects/postgresql/psycopg.py,sha256=zOM1PfiGU1I-XnkF5N6pBEDvTLP0qZZg6YSWWNgd-Xw,22320 +sqlalchemy/dialects/postgresql/psycopg2.py,sha256=f6vuQ4BStwcdLRV2iVYwyqjksYSV-c_uJvCWawU6cfg,31601 +sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=M7wAYSL6Pvt-4nbfacAHGyyw4XMKJ_bQZ1tc1pBtIdg,1756 +sqlalchemy/dialects/postgresql/ranges.py,sha256=mPsXfEz3Ot0QebOqp5dt9mmy_SATwdDQ7wVe-Q5Bqqc,30252 +sqlalchemy/dialects/postgresql/types.py,sha256=yoBV_hSq6m93mtXmWPX8LoOORNqWyLC1zyckgu656BI,7323 +sqlalchemy/dialects/sqlite/__init__.py,sha256=lp9DIggNn349M-7IYhUA8et8--e8FRExWD2V_r1LJk4,1182 +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,, +sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=OMvxP2eWyqk5beF-sHhzxRmjzO4VCQp55q7NH2XPVTE,12305 +sqlalchemy/dialects/sqlite/base.py,sha256=irgfivk--Pf7nj2tIZHALGOWjeK4CDnTtvRg9vN0QjY,96794 +sqlalchemy/dialects/sqlite/dml.py,sha256=ZZ6RiyflrhtPwrgNQSYUCdUWobDnuXPN9yop0gJTm9c,8443 +sqlalchemy/dialects/sqlite/json.py,sha256=Eoplbb_4dYlfrtmQaI8Xddd2suAIHA-IdbDQYM-LIhs,2777 +sqlalchemy/dialects/sqlite/provision.py,sha256=UCpmwxf4IWlrpb2eLHGbPTpCFVbdI_KAh2mKtjiLYao,5632 +sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=OL2S_05DK9kllZj6DOz7QtEl7jI7syxjW6woS725ii4,5356 +sqlalchemy/dialects/sqlite/pysqlite.py,sha256=TAOqsHIjhbUZOF_Qk7UooiekkVZNhYJNduxlGQjokeA,27900 +sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239 +sqlalchemy/engine/__init__.py,sha256=Stb2oV6l8w65JvqEo6J4qtKoApcmOpXy3AAxQud4C1o,2818 +sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/create.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/events.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/row.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-312.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-312.pyc,, +sqlalchemy/engine/_py_processors.py,sha256=j9i_lcYYQOYJMcsDerPxI0sVFBIlX5sqoYMdMJlgWPI,3744 +sqlalchemy/engine/_py_row.py,sha256=wSqoUFzLOJ1f89kgDb6sJm9LUrF5LMFpXPcK1vUsKcs,3787 +sqlalchemy/engine/_py_util.py,sha256=f2DI3AN1kv6EplelowesCVpwS8hSXNufRkZoQmJtSH8,2484 +sqlalchemy/engine/base.py,sha256=5n8SHmQh5Cr9DxGeRis12-_VQO4Cl7sM_FOnCBy2zD0,122207 +sqlalchemy/engine/characteristics.py,sha256=Qbvt4CPrggJ3GfxHl0hOAxopjnCQy-W_pjtwLIe-Q1g,2590 +sqlalchemy/engine/create.py,sha256=Lua52hd3e5H0a68rCVzrbwWZCnNKCef2Ew7FRchJK-c,32888 +sqlalchemy/engine/cursor.py,sha256=ErLMvqRMT8HJm-5RQeniC8-429cvXf8Sq3JSReLeVfI,74442 +sqlalchemy/engine/default.py,sha256=rZdc8JvEZCM4LtaWeJY3VsRrWMo_GDvfayK96nmGkvc,84065 +sqlalchemy/engine/events.py,sha256=c0unNFFiHzTAvkUtXoJaxzMFMDwurBkHiiUhuN8qluc,37381 +sqlalchemy/engine/interfaces.py,sha256=WE50MbuYGYhhMgF71GbfLVJAn2DDYEXiOOt6M3r-5z0,112814 +sqlalchemy/engine/mock.py,sha256=yvpxgFmRw5G4QsHeF-ZwQGHKES-HqQOucTxFtN1uzdk,4179 +sqlalchemy/engine/processors.py,sha256=XyfINKbo-2fjN-mW55YybvFyQMOil50_kVqsunahkNs,2379 +sqlalchemy/engine/reflection.py,sha256=FlT5kPpKm7Lah50GNt5XcnlJWojTL3LD_x0SoCF9kfY,75127 +sqlalchemy/engine/result.py,sha256=tGQX_zP5wlalOXXAV5BfljHd6bpSPePRXlju099TEe4,77756 +sqlalchemy/engine/row.py,sha256=S4d_WWD292B6_AnucKGG7E_6KFjEqP-BZAInrxGgicw,12080 +sqlalchemy/engine/strategies.py,sha256=DqFSWaXJPL-29Omot9O0aOcuGL8KmCGyOvnPGDkAJoE,442 +sqlalchemy/engine/url.py,sha256=dlbRISW9lMJ-Co_p1-TCXCBLMXGyrv7ROgll26FTe74,30558 +sqlalchemy/engine/util.py,sha256=hkEql1t19WHl6uzR55-F-Fs_VMCJ7p02KKQVNUDSXTk,5667 +sqlalchemy/event/__init__.py,sha256=KBrp622xojnC3FFquxa2JsMamwAbfkvzfv6Op0NKiYc,997 +sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/event/__pycache__/api.cpython-312.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-312.pyc,, +sqlalchemy/event/__pycache__/base.cpython-312.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-312.pyc,, +sqlalchemy/event/api.py,sha256=BUTAZjSlzvq4Hn2v2pihP_P1yo3lvCVDczK8lV_XJ80,8227 +sqlalchemy/event/attr.py,sha256=h8pFjHgyvLTeai6_LRQYk6ii-6M3PGgXFeZlPe-OJgo,20767 +sqlalchemy/event/base.py,sha256=LFcvzFaop51Im1IP1vz3BxFqQWXA90F2b4CMEV3JyKI,14980 +sqlalchemy/event/legacy.py,sha256=I5e9JLpRybIVlRi_ArEwymwbfx8vNns52v9QWaqVtA4,8211 +sqlalchemy/event/registry.py,sha256=LKbNsF5quf0DAGCw8jaGLmSf7p9ejWjT2y_Oz7FoDNo,10833 +sqlalchemy/events.py,sha256=k-ZD38aSPD29LYhED7CBqttp5MDVVx_YSaWC2-cu9ec,525 +sqlalchemy/exc.py,sha256=GRMcfOg64pRXX7nGsDt5iXfwqUdiMILstYse87vzmLI,24000 +sqlalchemy/ext/__init__.py,sha256=S1fGKAbycnQDV01gs-JWGaFQ9GCD4QHwKcU2wnugg_o,322 +sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,, +sqlalchemy/ext/associationproxy.py,sha256=BPYsvBlh8KVxKPLhHc31_JTatk7bhrSJTqO50b3OLcA,65960 +sqlalchemy/ext/asyncio/__init__.py,sha256=1OqSxEyIUn7RWLGyO12F-jAUIvk1I6DXlVy80-Gvkds,1317 +sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,, +sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,, +sqlalchemy/ext/asyncio/base.py,sha256=HVpevdn2vcCrhWyeSDdP0JFm2SdEIitmBlXv82Yywbo,8937 +sqlalchemy/ext/asyncio/engine.py,sha256=vy8_HiMFP4HWXvqLSES0AMQIWWNUmrIxPIeVHa6QbEw,48058 +sqlalchemy/ext/asyncio/exc.py,sha256=8sII7VMXzs2TrhizhFQMzSfcroRtiesq8o3UwLfXSgQ,639 +sqlalchemy/ext/asyncio/result.py,sha256=pVBeJym7zjT4eMcduU3X2_g5qtnaKuy2kIJWVJGTv_o,30554 +sqlalchemy/ext/asyncio/scoping.py,sha256=xLdjNJ1VnlTmi5YAsZKQo3XfBzKpviKpLG_HeSSbv2g,52685 +sqlalchemy/ext/asyncio/session.py,sha256=sc9SKwqEgPiTIjlIyVa679F-Q2CWp6_8ucJOis_hYaQ,62998 +sqlalchemy/ext/automap.py,sha256=MTvMs97xALDugdgyY3JqiVeusrcKruN0BR7iSFhNZOg,61431 +sqlalchemy/ext/baked.py,sha256=H6T1il7GY84BhzPFj49UECSpZh_eBuiHomA-QIsYOYQ,17807 +sqlalchemy/ext/compiler.py,sha256=ONPoxoKD2yUS9R2-oOhmPsA7efm-Bs0BXo7HE1dGlsU,20391 +sqlalchemy/ext/declarative/__init__.py,sha256=20psLdFQbbOWfpdXHZ0CTY6I1k4UqXvKemNVu1LvPOI,1818 +sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,, +sqlalchemy/ext/declarative/extensions.py,sha256=uCjN1GisQt54AjqYnKYzJdUjnGd2pZBW47WWdPlS7FE,19547 +sqlalchemy/ext/horizontal_shard.py,sha256=ITc2MU4pVc6t_HLR-T6tTMvYbwrl8cM9Bi02jI4e7FM,16766 +sqlalchemy/ext/hybrid.py,sha256=iU2GCE-PDHWa6hp3-nv9fNjuk46hzmRPJKxHEs7smxY,52514 +sqlalchemy/ext/indexable.py,sha256=UkTelbydKCdKelzbv3HWFFavoET9WocKaGRPGEOVfN8,11032 +sqlalchemy/ext/instrumentation.py,sha256=7908PLZGlD9tiq0nWS0A7iNNax0iGNsIchhJ_OoxOCg,15723 +sqlalchemy/ext/mutable.py,sha256=IX_H7vCBG834gFtCQzopuwgpmEDspBQBdb_nK-Dfrfo,37427 +sqlalchemy/ext/mypy/__init__.py,sha256=0WebDIZmqBD0OTq5JLtd_PmfF9JGxe4d4Qv3Ml3PKUg,241 +sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,, +sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,, +sqlalchemy/ext/mypy/apply.py,sha256=KZP0RAsQd65hyoS9bOBZ0UtgA1sZSd1wH9S0jyc_v08,10508 +sqlalchemy/ext/mypy/decl_class.py,sha256=gQOIMZOpKKdFW1TQjnEVM2RxGJbZzz5dFeMWRaIoFHc,17382 +sqlalchemy/ext/mypy/infer.py,sha256=KVnmLFEVS33Al8pUKI7MJbJQu3KeveBUMl78EluBORw,19369 +sqlalchemy/ext/mypy/names.py,sha256=IQ16GLZFqKxfYxIZxkbTurBqOUYbUV-64V_DSRns1tc,10630 +sqlalchemy/ext/mypy/plugin.py,sha256=74ML8LI9xar0V86oCxnPFv5FQGEEfUzK64vOay4BKFs,9750 +sqlalchemy/ext/mypy/util.py,sha256=DLvKhM38mZk_1vvxbpenq-krTwdqdrZbg-o072TodCs,9408 +sqlalchemy/ext/orderinglist.py,sha256=TGYbsGH72wEZcFNQDYDsZg9OSPuzf__P8YX8_2HtYUo,14384 +sqlalchemy/ext/serializer.py,sha256=YemanWdeMVUDweHCnQc-iMO6mVVXNo2qQ5NK0Eb2_Es,6178 +sqlalchemy/future/__init__.py,sha256=q2mw-gxk_xoxJLEvRoyMha3vO1xSRHrslcExOHZwmPA,512 +sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/future/__pycache__/engine.cpython-312.pyc,, +sqlalchemy/future/engine.py,sha256=AgIw6vMsef8W6tynOTkxsjd6o_OQDwGjLdbpoMD8ue8,495 +sqlalchemy/inspection.py,sha256=fLQyZnIS7irH7dRX3AaUHCkCArjIxMboaJj0rjM3cnA,5137 +sqlalchemy/log.py,sha256=KPxnFg6D2kj2_-3k6njiwxqbqyj51lid-O8ZWZN3HIg,8623 +sqlalchemy/orm/__init__.py,sha256=ZYys5nL3RFUDCMOLFDBrRI52F6er3S1U1OY9TeORuKs,8463 +sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/base.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/context.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/events.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-312.pyc,, +sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,, +sqlalchemy/orm/_orm_constructors.py,sha256=hhvHmDbY2cbMedLbLRYVAh14XO1U1Co513qzIke0THY,99370 +sqlalchemy/orm/_typing.py,sha256=RhN8pyEBjpYzGaxezyZCPRNV82U--BMRTIDdIiogsyo,5024 +sqlalchemy/orm/attributes.py,sha256=2R9p508nfGDwj_qr4Hxbv_g2QDDQSp0-n_GCHcjqb2U,92578 +sqlalchemy/orm/base.py,sha256=4-AzRGLdOTCOpgg8larSWb6s-YOqbMLrgQPhw__51ys,27700 +sqlalchemy/orm/bulk_persistence.py,sha256=vjx8DSvbeyTzDiuwaBonZ8TDc6kfUtBIEaXJZEuY6ac,69878 +sqlalchemy/orm/clsregistry.py,sha256=W9Su2JwNCvJuJiAyOT49ALUj59G36z4ugJimkKId530,17962 +sqlalchemy/orm/collections.py,sha256=t184F_YbXxZdwLOyv85l4M32gbJ-EQtbdMewnsmfdHg,52159 +sqlalchemy/orm/context.py,sha256=esaaJ7pYE4-SzXqeEE-Pv7BZLtnghwMY2dasW9N1vnY,111893 +sqlalchemy/orm/decl_api.py,sha256=YrN1zaL7yFAXFtzJYkzC6DUMaqfEenCdQnBGSK1Xn-E,63882 +sqlalchemy/orm/decl_base.py,sha256=WfKe0wyxrYr1-mmeiQgfIhRD_PHlFK-xDNsvYJ3R0T0,81621 +sqlalchemy/orm/dependency.py,sha256=lrTu8yfqLbz7U0iOHULR_Yk4C0z-2VDtFpuH7TaeynA,47583 +sqlalchemy/orm/descriptor_props.py,sha256=RennfXQ7bdnANgNF2SlB_-W_GlaZ26SsdhI4Yb_orYE,37176 +sqlalchemy/orm/dynamic.py,sha256=toSmHi9AF9nnbZmvLSMOSGR0NaG0YpszlLQx8KnxMbk,9798 +sqlalchemy/orm/evaluator.py,sha256=q292K5vdpP69G7Z9y1RqI5GFAk2diUPwnsXE8De_Wgw,11925 +sqlalchemy/orm/events.py,sha256=USrIP-2JlcIbmssvCkea1veL3eIIWC7WH7KDmTzqa-Q,127601 +sqlalchemy/orm/exc.py,sha256=w7MZkJMGGlu5J6jOFSmi9XXzc02ctnTv34jrEWpI-eM,7356 +sqlalchemy/orm/identity.py,sha256=jHdCxCpCyda_8mFOfGmN_Pr0XZdKiU-2hFZshlNxbHs,9249 +sqlalchemy/orm/instrumentation.py,sha256=wHdGTYpzND7nhgbpmiryLOXuWLIzCVii6jpfVWAi2RQ,24337 +sqlalchemy/orm/interfaces.py,sha256=0cK5udFIGusQ3cW697zXEElIPVafxW5UD5KL_6oNi8w,48404 +sqlalchemy/orm/loading.py,sha256=GKvLzmFklYgS89enTt_b2fvyDnu9rUAlYLL-Oa9eAeM,57417 +sqlalchemy/orm/mapped_collection.py,sha256=IsSSxYWuR396Qep4MYCe2VDmFr5sU_3AuDrhXDf_mVA,19704 +sqlalchemy/orm/mapper.py,sha256=j5r6ezZKW_WOcp_SIz0neMHMXiAMriVMdXbgvIhusfg,170969 +sqlalchemy/orm/path_registry.py,sha256=uZULekFBpy292nYuE0ON6vGcgM0Szs6a0iN7Wyya0_A,25938 +sqlalchemy/orm/persistence.py,sha256=2nQZpi9Mi-uKlJ-cwLodOMu-9gs4ZpYcUseIk4T210M,60989 +sqlalchemy/orm/properties.py,sha256=1gaf8QaGunBN2K2nEvMcucD4U1cOntJgsqJafLtHi7w,29095 +sqlalchemy/orm/query.py,sha256=QwUV1vm-6gyriYiZ4GIoNV3rdwdaCFhfdChTvVq00Oo,117714 +sqlalchemy/orm/relationships.py,sha256=NRQBABfdAWWqtRzhyE8Xq_uDeFPLxruSBNK646yv-vo,127619 +sqlalchemy/orm/scoping.py,sha256=Aeu34zEhcxcbS8NCzfgHzDBhlSXnlay5Ln8SPczdh9k,78821 +sqlalchemy/orm/session.py,sha256=bJTTXE7yB4qD6JSe0lvJv8QASf2OoUOoHQplt1ODHJk,193265 +sqlalchemy/orm/state.py,sha256=M60-bI0R0dzGHc-fBNGXX1V8osNTSC_WwtmlhcErSBM,37536 +sqlalchemy/orm/state_changes.py,sha256=qKYg7NxwrDkuUY3EPygAztym6oAVUFcP2wXn7QD3Mz4,6815 +sqlalchemy/orm/strategies.py,sha256=u6d7F5cAi2TC4NNYzFecoMYEUownpQcght5hIXVPJ7M,114052 +sqlalchemy/orm/strategy_options.py,sha256=267SGNfWJGlWjrdBffX8cQYrN7Ilk3Xk4kL4J150P7U,84161 +sqlalchemy/orm/sync.py,sha256=g7iZfSge1HgxMk9SKRgUgtHEbpbZ1kP_CBqOIdTOXqc,5779 +sqlalchemy/orm/unitofwork.py,sha256=fiVaqcymbDDHRa1NjS90N9Z466nd5pkJOEi1dHO6QLY,27033 +sqlalchemy/orm/util.py,sha256=j6BLkPtzZs88tINO21h-Dv3w2N1mlRZNabB_Q27U9ac,80340 +sqlalchemy/orm/writeonly.py,sha256=xh-KN8CiykLNQ_L9HE8QM1A822CJfiYopG9snSbWsz0,22329 +sqlalchemy/pool/__init__.py,sha256=qiDdq4r4FFAoDrK6ncugF_i6usi_X1LeJt-CuBHey0s,1804 +sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/pool/__pycache__/base.cpython-312.pyc,, +sqlalchemy/pool/__pycache__/events.cpython-312.pyc,, +sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,, +sqlalchemy/pool/base.py,sha256=2c614izCvoBw0xxu0LOhtziVB_G_MGns0jZu6ts9Bn8,52243 +sqlalchemy/pool/events.py,sha256=12gjivfkZkROTFYoIUS0kkvt3Ftj5ogBGavP9oSFPl4,13137 +sqlalchemy/pool/impl.py,sha256=2gdX23oZPLEqf3phI8yJod4ElB1BwFWkZCAtEXjXqbM,17718 +sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sqlalchemy/schema.py,sha256=dKiWmgHYjcKQ4TiiD6vD0UMmIsD8u0Fsor1M9AAeGUs,3194 +sqlalchemy/sql/__init__.py,sha256=UNa9EUiYWoPayf-FzNcwVgQvpsBdInPZfpJesAStN9o,5820 +sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/events.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/util.cpython-312.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,, +sqlalchemy/sql/_dml_constructors.py,sha256=YdBJex0MCVACv4q2nl_ii3uhxzwU6aDB8zAsratX5UQ,3867 +sqlalchemy/sql/_elements_constructors.py,sha256=oCro-h7QqOIbq4abduZP8EIw88HbHhPWPdyN8fzMuas,62558 +sqlalchemy/sql/_orm_types.py,sha256=T-vjcry4C1y0GToFKVxQCnmly_-Zsq4IO4SHN6bvUF4,625 +sqlalchemy/sql/_py_util.py,sha256=hiM9ePbRSGs60bAMxPFuJCIC_p9SQ1VzqXGiPchiYwE,2173 +sqlalchemy/sql/_selectable_constructors.py,sha256=kHm45Q6t2QCHIj2CS0SEZvD0MwKmK59lmg55NQMMKK8,18812 +sqlalchemy/sql/_typing.py,sha256=XDr2i-6GxPLBvtJC3ybzffQubUv_h3I2xRDUBlQLg30,12613 +sqlalchemy/sql/annotation.py,sha256=fEeyIJYId-xVHxp4VuHUzOnosa3iKLAHExnb8swq8v0,18271 +sqlalchemy/sql/base.py,sha256=vnkJFb9yJwTFX9Fd0CsISiP9m3PBfNQiBfMCmfUt69k,73928 +sqlalchemy/sql/cache_key.py,sha256=mTIa7UfTiUpeXDcVqn5vDBf-soj81fXOIHLPNjoQzhg,33124 +sqlalchemy/sql/coercions.py,sha256=BKj_pkSS4zBmc3tiS8wWmNhGWeyr66sK4QHJudqy1Lg,40489 +sqlalchemy/sql/compiler.py,sha256=WgnzXhjGvHxEC4VhIXgoeCoyKDFjRGfIwv0rH4Ug6vw,269842 +sqlalchemy/sql/crud.py,sha256=Xp3rX7N-YuecL14StUEc959a6oj61EG_fiyoiVYGAqY,56457 +sqlalchemy/sql/ddl.py,sha256=GJfH800KdwMhBbhp1f6FFyDVx-RjtG2WRGa8UTVaZI4,45542 +sqlalchemy/sql/default_comparator.py,sha256=1OiYbEojh6Vq8gy_jqY3b19dYZ0DvjFbyV5QeFiAs_o,16646 +sqlalchemy/sql/dml.py,sha256=BXUkqWPhnELKrhRKhfWZH4YZXKCCDCoHemVnfaew8Us,65728 +sqlalchemy/sql/elements.py,sha256=sv1D2nLZO0rmnmcrPuQcMAp2YCkTWv_r4GBG_xDlSbY,172784 +sqlalchemy/sql/events.py,sha256=iC_Q1Htm1Aobt5tOYxWfHHqNpoytrULORmUKcusH_-E,18290 +sqlalchemy/sql/expression.py,sha256=VMX-dLpsZYnVRJpYNDozDUgaj7iQ0HuewUKVefD57PE,7586 +sqlalchemy/sql/functions.py,sha256=e_29NAfsMBltrVVlgVUDt2CizHXrtS8dVBzvqnXgMJg,64248 +sqlalchemy/sql/lambdas.py,sha256=bG3D175kcQ3-9OWMLABSa6-diJyOfMf0ajr2w4mFdfM,49281 +sqlalchemy/sql/naming.py,sha256=ZHs1qSV3ou8TYmZ92uvU3sfdklUQlIz4uhe330n05SU,6858 +sqlalchemy/sql/operators.py,sha256=eCADkzisFq3PPWUSN28HYe8sgpxFQe6kJUATPn-XsuM,76193 +sqlalchemy/sql/roles.py,sha256=VwwJacCiopxIHABAeNgLApDxo-xHPhJl7UfdLebMJGw,7686 +sqlalchemy/sql/schema.py,sha256=XO0fxfaBbnyJjlunZln_Xi8ecPM_l7WZowgyi-btAek,228211 +sqlalchemy/sql/selectable.py,sha256=_Bu-nSxEC3kSUosAsDsZZn7S7BC_HtbFainREIQK6D8,233041 +sqlalchemy/sql/sqltypes.py,sha256=Bv_xlg27fsYN0xPQ81LaQ-snVm2JqsX_Rz1IpkUBqpw,126509 +sqlalchemy/sql/traversals.py,sha256=Tvt6DFCGIqsQHSCfKzk2kAbi_bnEX3jh_8ZRdtOFoYE,33521 +sqlalchemy/sql/type_api.py,sha256=PhMBSDza_dT5RXSPNkpTJUqGj2ojI_2vSkOSWl5OuRQ,83883 +sqlalchemy/sql/util.py,sha256=LVLjLqpZuJ_DT6oUeE7WJbuCkIMEA8ZUEbO204Gp06U,48187 +sqlalchemy/sql/visitors.py,sha256=St-h4A5ZMVMGu9hFhbGZJUxa9KaEg7e2KaCMihVTU64,36427 +sqlalchemy/testing/__init__.py,sha256=VsrEHrORpAF5n7Vfl43YQgABh6EP1xBx_gHxs7pSXeE,3126 +sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/config.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-312.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,, +sqlalchemy/testing/assertions.py,sha256=gL0rA7CCZJbcVgvWOPV91tTZTRwQc1_Ta0-ykBn83Ew,31439 +sqlalchemy/testing/assertsql.py,sha256=xwo0ZuCN69Y0ElCZys3lnmPxHdmJ34E2Cns4V3g8MA0,16817 +sqlalchemy/testing/asyncio.py,sha256=fkdRz-E37d5OrQKw5hdjmglOTJyXGnJzaJpvNXOBLxg,3728 +sqlalchemy/testing/config.py,sha256=9HWOgvPLSRJIjRWa0wauo3klYafV9oEhp8qhjptvlVw,12030 +sqlalchemy/testing/engines.py,sha256=iB3dLHhSBLPbTB5lSYnnppnfB66IP2DmfW56mhH4sjI,13355 +sqlalchemy/testing/entities.py,sha256=IphFegPKbff3Un47jY6bi7_MQXy6qkx_50jX2tHZJR4,3354 +sqlalchemy/testing/exclusions.py,sha256=vZlqF8Jy_PpXc9e-yPAI8-UHFVM-UiRfMwI-WrWS-nU,12444 +sqlalchemy/testing/fixtures/__init__.py,sha256=dMClrIoxqlYIFpk2ia4RZpkbfxsS_3EBigr9QsPJ66g,1198 +sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,, +sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,, +sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,, +sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,, +sqlalchemy/testing/fixtures/base.py,sha256=9r_J2ksiTzClpUxW0TczICHrWR7Ny8PV8IsBz6TsGFI,12256 +sqlalchemy/testing/fixtures/mypy.py,sha256=nrfgQnzIZoRFJ47F-7IZpouvAq6mSQHb8A-TbiDxv5I,11845 +sqlalchemy/testing/fixtures/orm.py,sha256=8EFbnaBbXX_Bf4FcCzBUaAHgyVpsLGBHX16SGLqE3Fg,6095 +sqlalchemy/testing/fixtures/sql.py,sha256=MFOuYBUyPIpHJzjRCHL9vU-IT4bD6LXGGMvsp0v1FY8,15704 +sqlalchemy/testing/pickleable.py,sha256=U9mIqk-zaxq9Xfy7HErP7UrKgTov-A3QFnhZh-NiOjI,2833 +sqlalchemy/testing/plugin/__init__.py,sha256=79F--BIY_NTBzVRIlJGgAY5LNJJ3cD19XvrAo4X0W9A,247 +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,, +sqlalchemy/testing/plugin/bootstrap.py,sha256=oYScMbEW4pCnWlPEAq1insFruCXFQeEVBwo__i4McpU,1685 +sqlalchemy/testing/plugin/plugin_base.py,sha256=IR2tLVvW7dbAqagFYwUjsc2X1oVIRVp1GM_b6tZTQkw,21581 +sqlalchemy/testing/plugin/pytestplugin.py,sha256=xasjXXEMsT4RtKR7WzmEqzYXPxz83KSRaM2CwJQEQK8,27546 +sqlalchemy/testing/profiling.py,sha256=PbuPhRFbauFilUONeY3tV_Y_5lBkD7iCa8VVyH2Sk9Y,10148 +sqlalchemy/testing/provision.py,sha256=zXsw2D2Xpmw_chmYLsE1GXQqKQ-so3V8xU_joTcKan0,14619 +sqlalchemy/testing/requirements.py,sha256=N9pSj7z2wVMkBif-DQfPVa_cl9k6p9g_J5FY1OsWtrY,51817 +sqlalchemy/testing/schema.py,sha256=lr4GkGrGwagaHMuSGzWdzkMaj3HnS7dgfLLWfxt__-U,6513 +sqlalchemy/testing/suite/__init__.py,sha256=Y5DRNG0Yl1u3ypt9zVF0Z9suPZeuO_UQGLl-wRgvTjU,722 +sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,, +sqlalchemy/testing/suite/test_cte.py,sha256=6zBC3W2OwX1Xs-HedzchcKN2S7EaLNkgkvV_JSZ_Pq0,6451 +sqlalchemy/testing/suite/test_ddl.py,sha256=1Npkf0C_4UNxphthAGjG078n0vPEgnSIHpDu5MfokxQ,12031 +sqlalchemy/testing/suite/test_deprecations.py,sha256=BcJxZTcjYqeOAENVElCg3hVvU6fkGEW3KGBMfnW8bng,5337 +sqlalchemy/testing/suite/test_dialect.py,sha256=EH4ZQWbnGdtjmx5amZtTyhYmrkXJCvW1SQoLahoE7uk,22923 +sqlalchemy/testing/suite/test_insert.py,sha256=8ASo87s2pvaFkYZbmx5zYDGsRloyGpI9Zo5ut4ttM9g,18557 +sqlalchemy/testing/suite/test_reflection.py,sha256=_44jrB9iQ0PbQ3Aj2nz45eJIkDmCGJ5I8r4Q2-Km9Ww,106458 +sqlalchemy/testing/suite/test_results.py,sha256=NQ23m8FDVd0ub751jN4PswGoAhk5nrqvjHvpYULZXnc,15937 +sqlalchemy/testing/suite/test_rowcount.py,sha256=Ozu9NmGrsMWROGNdtE-KKOaa_WMHnvSy-qcnAYpG20Y,7903 +sqlalchemy/testing/suite/test_select.py,sha256=FvMFYQW9IJpDWGYZiJk46is6YrtmdSghBdTjZCG8T0Y,58574 +sqlalchemy/testing/suite/test_sequence.py,sha256=66bCoy4xo99GBSaX6Hxb88foANAykLGRz1YEKbvpfuA,9923 +sqlalchemy/testing/suite/test_types.py,sha256=rFmTOg6XuMch9L2-XthfLJRCTTwpZbMfrNss2g09gmc,65677 +sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=c3_eIxLyORuSOhNDP0jWKxPyUf3SwMFpdalxtquwqlM,6141 +sqlalchemy/testing/suite/test_update_delete.py,sha256=3aaM9LtV_qC8NzMcOn2BZHxnQ5eWJnFKOx6iQpGArSw,3914 +sqlalchemy/testing/util.py,sha256=BFiSp3CEX95Dr-vv4l_7ZRu5vjZi9hjjnp-JKNfuS5E,14080 +sqlalchemy/testing/warnings.py,sha256=fJ-QJUY2zY2PPxZJKv9medW-BKKbCNbA4Ns_V3YwFXM,1546 +sqlalchemy/types.py,sha256=cQFM-hFRmaf1GErun1qqgEs6QxufvzMuwKqj9tuMPpE,3168 +sqlalchemy/util/__init__.py,sha256=B3bedg-LSQEscwqgmYYU-VENUX8_zAE3q9vb7tkfJNY,8277 +sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,, +sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,, +sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,, +sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-312.pyc,, +sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,, +sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-312.pyc,, +sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-312.pyc,, +sqlalchemy/util/__pycache__/typing.cpython-312.pyc,, +sqlalchemy/util/_collections.py,sha256=py6nwBAgPC_pmbghrobaCVZZxmiVuLgqrbjdgqP_3jU,20111 +sqlalchemy/util/_concurrency_py3k.py,sha256=3jieKUZN8sS-RS6L0a2BUCUm_pqQMldbuFCxZ7xr1-Q,8602 +sqlalchemy/util/_has_cy.py,sha256=wCQmeSjT3jaH_oxfCEtGk-1g0gbSpt5MCK5UcWdMWqk,1247 +sqlalchemy/util/_py_collections.py,sha256=AZEigbk8jfiKjH3Rqy6BBmRwe8csZuRZ5lz2DYS-Cls,16738 +sqlalchemy/util/compat.py,sha256=R6bpBydldtbr6h7oJePihQxFb7jKiI-YDsK465MSOzk,8714 +sqlalchemy/util/concurrency.py,sha256=mhwHm0utriD14DRqxTBWgIW7QuwdSEiLgLiJdUjiR3w,2427 +sqlalchemy/util/deprecations.py,sha256=YBwvvYhSB8LhasIZRKvg_-WNoVhPUcaYI1ZrnjDn868,11971 +sqlalchemy/util/langhelpers.py,sha256=HEdM_PcZxoSXH9qlcvf5Vp1k4xKU2XJBwn0Aq9RVmeU,64980 +sqlalchemy/util/preloaded.py,sha256=az7NmLJLsqs0mtM9uBkIu10-841RYDq8wOyqJ7xXvqE,5904 +sqlalchemy/util/queue.py,sha256=_5qHVIvWluQQt0jiDHnxuT2FhUxnxH2cKDxCmwmTFe0,10205 +sqlalchemy/util/tool_support.py,sha256=9braZyidaiNrZVsWtGmkSmus50-byhuYrlAqvhjcmnA,6135 +sqlalchemy/util/topological.py,sha256=N3M3Le7KzGHCmqPGg0ZBqixTDGwmFLhOZvBtc4rHL_g,3458 +sqlalchemy/util/typing.py,sha256=-YKRlPyicpM94qhFAUj9eYnv2hnKHkhv0Gxq7jNPAvE,16255 diff --git a/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/WHEEL b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/WHEEL new file mode 100644 index 0000000..bd099b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 + diff --git a/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/top_level.txt new file mode 100644 index 0000000..39fb2be --- /dev/null +++ b/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.25.dist-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc b/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc new file mode 100644 index 0000000..b840d6f Binary files /dev/null and b/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc b/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc new file mode 100644 index 0000000..ea9b0f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..156ee43 Binary files /dev/null and b/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/_yaml/__init__.py b/venv/lib/python3.12/site-packages/_yaml/__init__.py new file mode 100644 index 0000000..7baa8c4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/_yaml/__init__.py @@ -0,0 +1,33 @@ +# This is a stub package designed to roughly emulate the _yaml +# extension module, which previously existed as a standalone module +# and has been moved into the `yaml` package namespace. +# It does not perfectly mimic its old counterpart, but should get +# close enough for anyone who's relying on it even when they shouldn't. +import yaml + +# in some circumstances, the yaml module we imoprted may be from a different version, so we need +# to tread carefully when poking at it here (it may not have the attributes we expect) +if not getattr(yaml, '__with_libyaml__', False): + from sys import version_info + + exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError + raise exc("No module named '_yaml'") +else: + from yaml._yaml import * + import warnings + warnings.warn( + 'The _yaml extension module is now located at yaml._yaml' + ' and its location is subject to change. To use the' + ' LibYAML-based parser and emitter, import from `yaml`:' + ' `from yaml import CLoader as Loader, CDumper as Dumper`.', + DeprecationWarning + ) + del warnings + # Don't `del yaml` here because yaml is actually an existing + # namespace member of _yaml. + +__name__ = '_yaml' +# If the module is top-level (i.e. not a part of any specific package) +# then the attribute should be set to ''. +# https://docs.python.org/3.8/library/types.html +__package__ = '' diff --git a/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a151671 Binary files /dev/null and b/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/LICENSE.txt b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/LICENSE.txt new file mode 100644 index 0000000..e497a32 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/LICENSE.txt @@ -0,0 +1,13 @@ + Copyright aio-libs contributors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/METADATA new file mode 100644 index 0000000..e6d31c1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/METADATA @@ -0,0 +1,243 @@ +Metadata-Version: 2.1 +Name: aiohttp +Version: 3.9.1 +Summary: Async http client/server framework (asyncio) +Home-page: https://github.com/aio-libs/aiohttp +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache 2 +Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org +Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp +Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html +Project-URL: Docs: RTD, https://docs.aiohttp.org +Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: AsyncIO +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Requires-Dist: attrs >=17.3.0 +Requires-Dist: multidict <7.0,>=4.5 +Requires-Dist: yarl <2.0,>=1.0 +Requires-Dist: frozenlist >=1.1.1 +Requires-Dist: aiosignal >=1.1.2 +Requires-Dist: async-timeout <5.0,>=4.0 ; python_version < "3.11" +Provides-Extra: speedups +Requires-Dist: brotlicffi ; (platform_python_implementation != "CPython") and extra == 'speedups' +Requires-Dist: Brotli ; (platform_python_implementation == "CPython") and extra == 'speedups' +Requires-Dist: aiodns ; (sys_platform == "linux" or sys_platform == "darwin") and extra == 'speedups' + +================================== +Async http client/server framework +================================== + +.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg + :height: 64px + :width: 64px + :alt: aiohttp logo + +| + +.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg + :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI + :alt: GitHub Actions status for master branch + +.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg + :target: https://codecov.io/gh/aio-libs/aiohttp + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/aiohttp.svg + :target: https://pypi.org/project/aiohttp + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest + :target: https://docs.aiohttp.org/ + :alt: Latest Read The Docs + +.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs:matrix.org + :alt: Matrix Room — #aio-libs:matrix.org + +.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs-space:matrix.org + :alt: Matrix Space — #aio-libs-space:matrix.org + + +Key Features +============ + +- Supports both client and server side of HTTP protocol. +- Supports both client and server Web-Sockets out-of-the-box and avoids + Callback Hell. +- Provides Web-server with middleware and pluggable routing. + + +Getting started +=============== + +Client +------ + +To get something from the web: + +.. code-block:: python + + import aiohttp + import asyncio + + async def main(): + + async with aiohttp.ClientSession() as session: + async with session.get('http://python.org') as response: + + print("Status:", response.status) + print("Content-type:", response.headers['content-type']) + + html = await response.text() + print("Body:", html[:15], "...") + + asyncio.run(main()) + +This prints: + +.. code-block:: + + Status: 200 + Content-type: text/html; charset=utf-8 + Body: ... + +Coming from `requests `_ ? Read `why we need so many lines `_. + +Server +------ + +An example using a simple server: + +.. code-block:: python + + # examples/server_simple.py + from aiohttp import web + + async def handle(request): + name = request.match_info.get('name', "Anonymous") + text = "Hello, " + name + return web.Response(text=text) + + async def wshandle(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + async for msg in ws: + if msg.type == web.WSMsgType.text: + await ws.send_str("Hello, {}".format(msg.data)) + elif msg.type == web.WSMsgType.binary: + await ws.send_bytes(msg.data) + elif msg.type == web.WSMsgType.close: + break + + return ws + + + app = web.Application() + app.add_routes([web.get('/', handle), + web.get('/echo', wshandle), + web.get('/{name}', handle)]) + + if __name__ == '__main__': + web.run_app(app) + + +Documentation +============= + +https://aiohttp.readthedocs.io/ + + +Demos +===== + +https://github.com/aio-libs/aiohttp-demos + + +External links +============== + +* `Third party libraries + `_ +* `Built with aiohttp + `_ +* `Powered by aiohttp + `_ + +Feel free to make a Pull Request for adding your link to these pages! + + +Communication channels +====================== + +*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions + +*gitter chat* https://gitter.im/aio-libs/Lobby + +We support `Stack Overflow +`_. +Please add *aiohttp* tag to your question there. + +Requirements +============ + +- async-timeout_ +- attrs_ +- multidict_ +- yarl_ +- frozenlist_ + +Optionally you may install the aiodns_ library (highly recommended for sake of speed). + +.. _aiodns: https://pypi.python.org/pypi/aiodns +.. _attrs: https://github.com/python-attrs/attrs +.. _multidict: https://pypi.python.org/pypi/multidict +.. _frozenlist: https://pypi.org/project/frozenlist/ +.. _yarl: https://pypi.python.org/pypi/yarl +.. _async-timeout: https://pypi.python.org/pypi/async_timeout + +License +======= + +``aiohttp`` is offered under the Apache 2 license. + + +Keepsafe +======== + +The aiohttp community would like to thank Keepsafe +(https://www.getkeepsafe.com) for its support in the early days of +the project. + + +Source code +=========== + +The latest developer version is available in a GitHub repository: +https://github.com/aio-libs/aiohttp + +Benchmarks +========== + +If you are interested in efficiency, the AsyncIO community maintains a +list of benchmarks on the official wiki: +https://github.com/python/asyncio/wiki/Benchmarks diff --git a/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/RECORD new file mode 100644 index 0000000..0206aa8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/RECORD @@ -0,0 +1,120 @@ +aiohttp-3.9.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiohttp-3.9.1.dist-info/LICENSE.txt,sha256=n4DQ2311WpQdtFchcsJw7L2PCCuiFd3QlZhZQu2Uqes,588 +aiohttp-3.9.1.dist-info/METADATA,sha256=62Q_RgoSLj5AlXdi63xBFYgkT11mgnluERhDIRpWHjY,7357 +aiohttp-3.9.1.dist-info/RECORD,, +aiohttp-3.9.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +aiohttp-3.9.1.dist-info/WHEEL,sha256=vJMp7mUkE-fMIYyE5xJ9Q2cYPnWVgHf20clVdwMSXAg,152 +aiohttp-3.9.1.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8 +aiohttp/.hash/_cparser.pxd.hash,sha256=hYa9Vje-oMs2eh_7MfCPOh2QW_1x1yCjcZuc7AmwLd0,121 +aiohttp/.hash/_find_header.pxd.hash,sha256=_mbpD6vM-CVCKq3ulUvsOAz5Wdo88wrDzfpOsMQaMNA,125 +aiohttp/.hash/_helpers.pyi.hash,sha256=Ew4BZDc2LqFwszgZZUHHrJvw5P8HBhJ700n1Ntg52hE,121 +aiohttp/.hash/_helpers.pyx.hash,sha256=5JQ6BlMBE4HnRaCGdkK9_wpL3ZSWpU1gyLYva0Wwx2c,121 +aiohttp/.hash/_http_parser.pyx.hash,sha256=IRBIywLdT4-0kqWhb0g0WPjh6Gu10TreFmLI8JQF-L8,125 +aiohttp/.hash/_http_writer.pyx.hash,sha256=3Qg3T3D-Ud73elzPHBufK0yEu9tP5jsu6g-aPKQY9gE,125 +aiohttp/.hash/_websocket.pyx.hash,sha256=M97f-Yti-4vnE4GNTD1s_DzKs-fG_ww3jle6EUvixnE,123 +aiohttp/.hash/hdrs.py.hash,sha256=2oEszMWjYFTHoF2w4OcFCoM7osv4vY9KLLJCu9HP0xI,116 +aiohttp/__init__.py,sha256=EnBN-3iIseCzm7llWOVNSbPpNTarRN1dF2SSgRKtB-g,7782 +aiohttp/__pycache__/__init__.cpython-312.pyc,, +aiohttp/__pycache__/abc.cpython-312.pyc,, +aiohttp/__pycache__/base_protocol.cpython-312.pyc,, +aiohttp/__pycache__/client.cpython-312.pyc,, +aiohttp/__pycache__/client_exceptions.cpython-312.pyc,, +aiohttp/__pycache__/client_proto.cpython-312.pyc,, +aiohttp/__pycache__/client_reqrep.cpython-312.pyc,, +aiohttp/__pycache__/client_ws.cpython-312.pyc,, +aiohttp/__pycache__/compression_utils.cpython-312.pyc,, +aiohttp/__pycache__/connector.cpython-312.pyc,, +aiohttp/__pycache__/cookiejar.cpython-312.pyc,, +aiohttp/__pycache__/formdata.cpython-312.pyc,, +aiohttp/__pycache__/hdrs.cpython-312.pyc,, +aiohttp/__pycache__/helpers.cpython-312.pyc,, +aiohttp/__pycache__/http.cpython-312.pyc,, +aiohttp/__pycache__/http_exceptions.cpython-312.pyc,, +aiohttp/__pycache__/http_parser.cpython-312.pyc,, +aiohttp/__pycache__/http_websocket.cpython-312.pyc,, +aiohttp/__pycache__/http_writer.cpython-312.pyc,, +aiohttp/__pycache__/locks.cpython-312.pyc,, +aiohttp/__pycache__/log.cpython-312.pyc,, +aiohttp/__pycache__/multipart.cpython-312.pyc,, +aiohttp/__pycache__/payload.cpython-312.pyc,, +aiohttp/__pycache__/payload_streamer.cpython-312.pyc,, +aiohttp/__pycache__/pytest_plugin.cpython-312.pyc,, +aiohttp/__pycache__/resolver.cpython-312.pyc,, +aiohttp/__pycache__/streams.cpython-312.pyc,, +aiohttp/__pycache__/tcp_helpers.cpython-312.pyc,, +aiohttp/__pycache__/test_utils.cpython-312.pyc,, +aiohttp/__pycache__/tracing.cpython-312.pyc,, +aiohttp/__pycache__/typedefs.cpython-312.pyc,, +aiohttp/__pycache__/web.cpython-312.pyc,, +aiohttp/__pycache__/web_app.cpython-312.pyc,, +aiohttp/__pycache__/web_exceptions.cpython-312.pyc,, +aiohttp/__pycache__/web_fileresponse.cpython-312.pyc,, +aiohttp/__pycache__/web_log.cpython-312.pyc,, +aiohttp/__pycache__/web_middlewares.cpython-312.pyc,, +aiohttp/__pycache__/web_protocol.cpython-312.pyc,, +aiohttp/__pycache__/web_request.cpython-312.pyc,, +aiohttp/__pycache__/web_response.cpython-312.pyc,, +aiohttp/__pycache__/web_routedef.cpython-312.pyc,, +aiohttp/__pycache__/web_runner.cpython-312.pyc,, +aiohttp/__pycache__/web_server.cpython-312.pyc,, +aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc,, +aiohttp/__pycache__/web_ws.cpython-312.pyc,, +aiohttp/__pycache__/worker.cpython-312.pyc,, +aiohttp/_cparser.pxd,sha256=8jGIg-VJ9p3llwCakUYDsPGxA4HiZe9dmK9Jmtlz-5g,4318 +aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68 +aiohttp/_headers.pxi,sha256=n701k28dVPjwRnx5j6LpJhLTfj7dqu2vJt7f0O60Oyg,2007 +aiohttp/_helpers.cpython-312-x86_64-linux-gnu.so,sha256=xQukDyoc-AzBs0yYHDXWrw8ICGrg8fQVexTfVBsICt0,613288 +aiohttp/_helpers.pyi,sha256=ZoKiJSS51PxELhI2cmIr5737YjjZcJt7FbIRO3ym1Ss,202 +aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049 +aiohttp/_http_parser.cpython-312-x86_64-linux-gnu.so,sha256=ZvlhyVbDbz3UHTWTPeqmGsn1-cW0syQ4rgr3vD3ftrI,2788720 +aiohttp/_http_parser.pyx,sha256=fzKwwVlcGnGVeiGOzo05d-2Rccqtl9-PzYKqgK3fxdI,28058 +aiohttp/_http_writer.cpython-312-x86_64-linux-gnu.so,sha256=NgI7lQoig5xSvNGFFdHI3ZJpS2emu9zj78yOe1LgYKE,503128 +aiohttp/_http_writer.pyx,sha256=aIHAp8g4ZV5kbGRdmZce-vXjELw2M6fGKyJuOdgYQqw,4575 +aiohttp/_websocket.cpython-312-x86_64-linux-gnu.so,sha256=c3irDrlStRTO9xfB94T2KQkO_17dMYNZ-9-JkufCH6Q,278168 +aiohttp/_websocket.pyx,sha256=1XuOSNDCbyDrzF5uMA2isqausSs8l2jWTLDlNDLM9Io,1561 +aiohttp/abc.py,sha256=nAyCo7BadpvvExxO1khNYqDpYt40Qp1zFmepDfZqq28,5540 +aiohttp/base_protocol.py,sha256=5JUyuIGwKf7sFhf0YLAnk36_hkSIxBnP4hN09RdMGGk,2741 +aiohttp/client.py,sha256=6s0n3HM4CRk4Gl7f-umGBTGp2MiVA7yjMnGLhSkxZXs,46918 +aiohttp/client_exceptions.py,sha256=4NmjMG2-P__buR9xfuz8_w0pvbXzr2oyuo62eQxsea8,9445 +aiohttp/client_proto.py,sha256=eHQjoiZVvm1m31Vcj1H2huV-zwHZwEl71km5l-p22aE,8624 +aiohttp/client_reqrep.py,sha256=xfJsqTzkfmRfAzChxp1za6NcfU2GDo1XXkEhTnJDik4,39756 +aiohttp/client_ws.py,sha256=nNrwu1wA0U3B0cNsVr61QfV2S60bbKfaZXHfW7klFl4,11010 +aiohttp/compression_utils.py,sha256=GCkBNJqrybMhiTQGwqqhORnaTLpRFZD_-UvRtnZ5lEQ,5015 +aiohttp/connector.py,sha256=YnUCuZQSgseKsUbo2jJH7mi0nhZD_8A6UQ5Ll3GS37k,52834 +aiohttp/cookiejar.py,sha256=PdvsOiDasDYYUOPaaAfuuFJzR4CJyHHjut02YiZ_N8M,14015 +aiohttp/formdata.py,sha256=q2gpeiM9NFsl_eSFVxHZ7Qez6RbM8_BujERMkooQkx0,6106 +aiohttp/hdrs.py,sha256=uzn5agn_jXid2h-ky6Y0ZAQ8BrPeTGLDGr-weiMctso,4613 +aiohttp/helpers.py,sha256=hYm60xCxbJCdtdtLhTt7uspQ_9HPT27gTBx2q9Fu1Zk,30255 +aiohttp/http.py,sha256=8o8j8xH70OWjnfTWA9V44NR785QPxEPrUtzMXiAVpwc,1842 +aiohttp/http_exceptions.py,sha256=7LOFFUwq04fZsnZA-NP5nukd6c2i8daM8-ejj3ndbSQ,2716 +aiohttp/http_parser.py,sha256=99kVO47hO22HQV0B4Y-1XhtOK4LTISOOs2d8c9yOGqQ,35166 +aiohttp/http_websocket.py,sha256=5qBvfvbt6f24AptTHud-T99LEnpwbGQNLMB8wGcfs9c,26704 +aiohttp/http_writer.py,sha256=fxpyRj_S3WcBl9fxxF05t8YYAUA-0jW5b_PjVSluT3Y,5933 +aiohttp/locks.py,sha256=wRYFo1U82LwBBdqwU24JEPaoTAlKaaJd2FtfDKhkTb4,1136 +aiohttp/log.py,sha256=BbNKx9e3VMIm0xYjZI0IcBBoS7wjdeIeSaiJE7-qK2g,325 +aiohttp/multipart.py,sha256=rDZYg-I-530nIEq3-U4DF2Q-fl0E7mk5YEmejzT1bak,32492 +aiohttp/payload.py,sha256=IV5HwxYqgUVY_SiyPjzUQ_YUIYvQaGkrmLmZlvSzDeM,13582 +aiohttp/payload_streamer.py,sha256=eAS8S-UWfLkEMavRjP2Uu9amC3PnbV79wHTNDoRmYn8,2087 +aiohttp/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +aiohttp/pytest_plugin.py,sha256=3IwpuxtFiUVFGS_ZitWuqvECSGgXQWvCW312B2TaVLY,11605 +aiohttp/resolver.py,sha256=8peXjB482v0hg1ESn87op6f-UeLXk_fAMxQo_23Ek6M,5070 +aiohttp/streams.py,sha256=vNCy0k5R7XlnSfsyTEQAkBD4Q9tNZgNm76Gqlw8Tjok,20836 +aiohttp/tcp_helpers.py,sha256=BSadqVWaBpMFDRWnhaaR941N9MiDZ7bdTrxgCb0CW-M,961 +aiohttp/test_utils.py,sha256=t5ibahuV6klkUMLdWuJhlSvzwtIRprWZqnkdHt5XJYU,20205 +aiohttp/tracing.py,sha256=Kz9u3YGTegGebYM2EMhG9RKT6-ABcsHtM7J-Qvwyus8,15152 +aiohttp/typedefs.py,sha256=8pDFTXt-5sYdzE4JsRH6UjAQyURnfZ0ueeEtgQccQZU,1491 +aiohttp/web.py,sha256=HFTQaoYVK5pM3YmxNJtZl9fGrRIdFs_Nhloxe7_lJj0,19263 +aiohttp/web_app.py,sha256=sUqIpip4BUL_pwc-Qs7IWDrFpeBNpaD8ocKFGfs9B0U,18351 +aiohttp/web_exceptions.py,sha256=7nIuiwhZ39vJJ9KrWqArA5QcWbUdqkz2CLwEpJapeN8,10360 +aiohttp/web_fileresponse.py,sha256=VRmCEr-qz7hzBy1TPa1wzid2DkgtQdIDjA18amkmRO4,10705 +aiohttp/web_log.py,sha256=DOfOxGyh2U7K5K_w6O7ILdfGcs4qOdzHxOwj2-k3c6c,7801 +aiohttp/web_middlewares.py,sha256=imxf1cfCKvfkv_jQLfTNNs_hA95oLnapRcPl-2aDsdA,4052 +aiohttp/web_protocol.py,sha256=REP-s4onglMYW2XZ5nqPCWkK0vt_2f8FiP4MFQXdd3A,23064 +aiohttp/web_request.py,sha256=TGTsWNNpGSL49Q-uV101ecX9hLZMm5PFs4mLxGkbkHc,28776 +aiohttp/web_response.py,sha256=t8mNgT5nddHLpsgZhUkqbq0NDsipmdGzfZUB5pCP_Yo,27749 +aiohttp/web_routedef.py,sha256=EFk3v1dcFnLimTT5z0JSBO3PShF0w9sIzfK9iJd-LNs,6152 +aiohttp/web_runner.py,sha256=AM3klOcr72AZVGG9-LjYo8vJdHBWgNzDLsmi_aoKfAU,11736 +aiohttp/web_server.py,sha256=5P-9uPCoPEDkK9ILbvEXmkkJWPhnTxBzdwAXwveyyDk,2587 +aiohttp/web_urldispatcher.py,sha256=sblBVycAhKVFPRbtfbZGsrPivLL0sskeE3LRPK2Deec,39557 +aiohttp/web_ws.py,sha256=eGjrE3_lUbv9kpYZluZFvdCfvahi5O4-fF7hWgyEHQk,18039 +aiohttp/worker.py,sha256=bkozEd2rAzQS0qs4knnnplOmaZ4TNdYtqWXSXx9djEc,7965 diff --git a/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/WHEEL new file mode 100644 index 0000000..bd099b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 + diff --git a/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/top_level.txt new file mode 100644 index 0000000..ee4ba4f --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp-3.9.1.dist-info/top_level.txt @@ -0,0 +1 @@ +aiohttp diff --git a/venv/lib/python3.12/site-packages/aiohttp/.hash/_cparser.pxd.hash b/venv/lib/python3.12/site-packages/aiohttp/.hash/_cparser.pxd.hash new file mode 100644 index 0000000..65e3d4b --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/.hash/_cparser.pxd.hash @@ -0,0 +1 @@ +f2318883e549f69de597009a914603b0f1b10381e265ef5d98af499ad973fb98 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd diff --git a/venv/lib/python3.12/site-packages/aiohttp/.hash/_find_header.pxd.hash b/venv/lib/python3.12/site-packages/aiohttp/.hash/_find_header.pxd.hash new file mode 100644 index 0000000..f006c2d --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/.hash/_find_header.pxd.hash @@ -0,0 +1 @@ +d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd diff --git a/venv/lib/python3.12/site-packages/aiohttp/.hash/_helpers.pyi.hash b/venv/lib/python3.12/site-packages/aiohttp/.hash/_helpers.pyi.hash new file mode 100644 index 0000000..6a30d63 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/.hash/_helpers.pyi.hash @@ -0,0 +1 @@ +6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyi diff --git a/venv/lib/python3.12/site-packages/aiohttp/.hash/_helpers.pyx.hash b/venv/lib/python3.12/site-packages/aiohttp/.hash/_helpers.pyx.hash new file mode 100644 index 0000000..8f38727 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/.hash/_helpers.pyx.hash @@ -0,0 +1 @@ +5de2db35fb795ffe227e2f1007c8ba4f2ad1b9aca28cc48edc80c779203cf6e3 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx diff --git a/venv/lib/python3.12/site-packages/aiohttp/.hash/_http_parser.pyx.hash b/venv/lib/python3.12/site-packages/aiohttp/.hash/_http_parser.pyx.hash new file mode 100644 index 0000000..8e5a141 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/.hash/_http_parser.pyx.hash @@ -0,0 +1 @@ +7f32b0c1595c1a71957a218ece8d3977ed9171caad97df8fcd82aa80addfc5d2 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx diff --git a/venv/lib/python3.12/site-packages/aiohttp/.hash/_http_writer.pyx.hash b/venv/lib/python3.12/site-packages/aiohttp/.hash/_http_writer.pyx.hash new file mode 100644 index 0000000..8e1aaab --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/.hash/_http_writer.pyx.hash @@ -0,0 +1 @@ +6881c0a7c838655e646c645d99971efaf5e310bc3633a7c62b226e39d81842ac /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx diff --git a/venv/lib/python3.12/site-packages/aiohttp/.hash/_websocket.pyx.hash b/venv/lib/python3.12/site-packages/aiohttp/.hash/_websocket.pyx.hash new file mode 100644 index 0000000..ddbb4c7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/.hash/_websocket.pyx.hash @@ -0,0 +1 @@ +d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket.pyx diff --git a/venv/lib/python3.12/site-packages/aiohttp/.hash/hdrs.py.hash b/venv/lib/python3.12/site-packages/aiohttp/.hash/hdrs.py.hash new file mode 100644 index 0000000..e0b81d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/.hash/hdrs.py.hash @@ -0,0 +1 @@ +bb39f96a09ff8d789dda1fa4cba63464043c06b3de4c62c31abfb07a231cb6ca /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py diff --git a/venv/lib/python3.12/site-packages/aiohttp/__init__.py b/venv/lib/python3.12/site-packages/aiohttp/__init__.py new file mode 100644 index 0000000..32f85ac --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/__init__.py @@ -0,0 +1,240 @@ +__version__ = "3.9.1" + +from typing import TYPE_CHECKING, Tuple + +from . import hdrs as hdrs +from .client import ( + BaseConnector as BaseConnector, + ClientConnectionError as ClientConnectionError, + ClientConnectorCertificateError as ClientConnectorCertificateError, + ClientConnectorError as ClientConnectorError, + ClientConnectorSSLError as ClientConnectorSSLError, + ClientError as ClientError, + ClientHttpProxyError as ClientHttpProxyError, + ClientOSError as ClientOSError, + ClientPayloadError as ClientPayloadError, + ClientProxyConnectionError as ClientProxyConnectionError, + ClientRequest as ClientRequest, + ClientResponse as ClientResponse, + ClientResponseError as ClientResponseError, + ClientSession as ClientSession, + ClientSSLError as ClientSSLError, + ClientTimeout as ClientTimeout, + ClientWebSocketResponse as ClientWebSocketResponse, + ContentTypeError as ContentTypeError, + Fingerprint as Fingerprint, + InvalidURL as InvalidURL, + NamedPipeConnector as NamedPipeConnector, + RequestInfo as RequestInfo, + ServerConnectionError as ServerConnectionError, + ServerDisconnectedError as ServerDisconnectedError, + ServerFingerprintMismatch as ServerFingerprintMismatch, + ServerTimeoutError as ServerTimeoutError, + TCPConnector as TCPConnector, + TooManyRedirects as TooManyRedirects, + UnixConnector as UnixConnector, + WSServerHandshakeError as WSServerHandshakeError, + request as request, +) +from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar +from .formdata import FormData as FormData +from .helpers import BasicAuth, ChainMapProxy, ETag +from .http import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + WebSocketError as WebSocketError, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, +) +from .multipart import ( + BadContentDispositionHeader as BadContentDispositionHeader, + BadContentDispositionParam as BadContentDispositionParam, + BodyPartReader as BodyPartReader, + MultipartReader as MultipartReader, + MultipartWriter as MultipartWriter, + content_disposition_filename as content_disposition_filename, + parse_content_disposition as parse_content_disposition, +) +from .payload import ( + PAYLOAD_REGISTRY as PAYLOAD_REGISTRY, + AsyncIterablePayload as AsyncIterablePayload, + BufferedReaderPayload as BufferedReaderPayload, + BytesIOPayload as BytesIOPayload, + BytesPayload as BytesPayload, + IOBasePayload as IOBasePayload, + JsonPayload as JsonPayload, + Payload as Payload, + StringIOPayload as StringIOPayload, + StringPayload as StringPayload, + TextIOPayload as TextIOPayload, + get_payload as get_payload, + payload_type as payload_type, +) +from .payload_streamer import streamer as streamer +from .resolver import ( + AsyncResolver as AsyncResolver, + DefaultResolver as DefaultResolver, + ThreadedResolver as ThreadedResolver, +) +from .streams import ( + EMPTY_PAYLOAD as EMPTY_PAYLOAD, + DataQueue as DataQueue, + EofStream as EofStream, + FlowControlDataQueue as FlowControlDataQueue, + StreamReader as StreamReader, +) +from .tracing import ( + TraceConfig as TraceConfig, + TraceConnectionCreateEndParams as TraceConnectionCreateEndParams, + TraceConnectionCreateStartParams as TraceConnectionCreateStartParams, + TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams, + TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams, + TraceConnectionReuseconnParams as TraceConnectionReuseconnParams, + TraceDnsCacheHitParams as TraceDnsCacheHitParams, + TraceDnsCacheMissParams as TraceDnsCacheMissParams, + TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams, + TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams, + TraceRequestChunkSentParams as TraceRequestChunkSentParams, + TraceRequestEndParams as TraceRequestEndParams, + TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestRedirectParams as TraceRequestRedirectParams, + TraceRequestStartParams as TraceRequestStartParams, + TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, +) + +if TYPE_CHECKING: # pragma: no cover + # At runtime these are lazy-loaded at the bottom of the file. + from .worker import ( + GunicornUVLoopWebWorker as GunicornUVLoopWebWorker, + GunicornWebWorker as GunicornWebWorker, + ) + +__all__: Tuple[str, ...] = ( + "hdrs", + # client + "BaseConnector", + "ClientConnectionError", + "ClientConnectorCertificateError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponse", + "ClientRequest", + "ClientResponseError", + "ClientSSLError", + "ClientSession", + "ClientTimeout", + "ClientWebSocketResponse", + "ContentTypeError", + "Fingerprint", + "InvalidURL", + "RequestInfo", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "TCPConnector", + "TooManyRedirects", + "UnixConnector", + "NamedPipeConnector", + "WSServerHandshakeError", + "request", + # cookiejar + "CookieJar", + "DummyCookieJar", + # formdata + "FormData", + # helpers + "BasicAuth", + "ChainMapProxy", + "ETag", + # http + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + "WSMsgType", + "WSCloseCode", + "WSMessage", + "WebSocketError", + # multipart + "BadContentDispositionHeader", + "BadContentDispositionParam", + "BodyPartReader", + "MultipartReader", + "MultipartWriter", + "content_disposition_filename", + "parse_content_disposition", + # payload + "AsyncIterablePayload", + "BufferedReaderPayload", + "BytesIOPayload", + "BytesPayload", + "IOBasePayload", + "JsonPayload", + "PAYLOAD_REGISTRY", + "Payload", + "StringIOPayload", + "StringPayload", + "TextIOPayload", + "get_payload", + "payload_type", + # payload_streamer + "streamer", + # resolver + "AsyncResolver", + "DefaultResolver", + "ThreadedResolver", + # streams + "DataQueue", + "EMPTY_PAYLOAD", + "EofStream", + "FlowControlDataQueue", + "StreamReader", + # tracing + "TraceConfig", + "TraceConnectionCreateEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionReuseconnParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceDnsResolveHostEndParams", + "TraceDnsResolveHostStartParams", + "TraceRequestChunkSentParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceRequestRedirectParams", + "TraceRequestStartParams", + "TraceResponseChunkReceivedParams", + # workers (imported lazily with __getattr__) + "GunicornUVLoopWebWorker", + "GunicornWebWorker", +) + + +def __dir__() -> Tuple[str, ...]: + return __all__ + ("__author__", "__doc__") + + +def __getattr__(name: str) -> object: + global GunicornUVLoopWebWorker, GunicornWebWorker + + # Importing gunicorn takes a long time (>100ms), so only import if actually needed. + if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"): + try: + from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw + except ImportError: + return None + + GunicornUVLoopWebWorker = guv # type: ignore[misc] + GunicornWebWorker = gw # type: ignore[misc] + return guv if name == "GunicornUVLoopWebWorker" else gw + + raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..dbd26ce Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/abc.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/abc.cpython-312.pyc new file mode 100644 index 0000000..89b2523 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/abc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/base_protocol.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/base_protocol.cpython-312.pyc new file mode 100644 index 0000000..fd88d35 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/base_protocol.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..7829e10 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_exceptions.cpython-312.pyc new file mode 100644 index 0000000..b3a5401 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_proto.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_proto.cpython-312.pyc new file mode 100644 index 0000000..dd9cff8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_proto.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_reqrep.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_reqrep.cpython-312.pyc new file mode 100644 index 0000000..e8a370e Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_reqrep.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_ws.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_ws.cpython-312.pyc new file mode 100644 index 0000000..bc220cf Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/client_ws.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/compression_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/compression_utils.cpython-312.pyc new file mode 100644 index 0000000..b0c93bf Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/compression_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/connector.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/connector.cpython-312.pyc new file mode 100644 index 0000000..fb7da69 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/connector.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/cookiejar.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/cookiejar.cpython-312.pyc new file mode 100644 index 0000000..6df9b70 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/cookiejar.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/formdata.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/formdata.cpython-312.pyc new file mode 100644 index 0000000..8e83cb6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/formdata.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/hdrs.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/hdrs.cpython-312.pyc new file mode 100644 index 0000000..2871770 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/hdrs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/helpers.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/helpers.cpython-312.pyc new file mode 100644 index 0000000..6b85f81 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/helpers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http.cpython-312.pyc new file mode 100644 index 0000000..c13cdd5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_exceptions.cpython-312.pyc new file mode 100644 index 0000000..59c35ea Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_parser.cpython-312.pyc new file mode 100644 index 0000000..c793c33 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_websocket.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_websocket.cpython-312.pyc new file mode 100644 index 0000000..a59de94 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_websocket.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_writer.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_writer.cpython-312.pyc new file mode 100644 index 0000000..e28559f Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/http_writer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/locks.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/locks.cpython-312.pyc new file mode 100644 index 0000000..eab5461 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/locks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/log.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/log.cpython-312.pyc new file mode 100644 index 0000000..5413c5c Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/log.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/multipart.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/multipart.cpython-312.pyc new file mode 100644 index 0000000..f878e39 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/multipart.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/payload.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/payload.cpython-312.pyc new file mode 100644 index 0000000..60aaf33 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/payload.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/payload_streamer.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/payload_streamer.cpython-312.pyc new file mode 100644 index 0000000..d5bf071 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/payload_streamer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-312.pyc new file mode 100644 index 0000000..6b2179f Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/resolver.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/resolver.cpython-312.pyc new file mode 100644 index 0000000..e00e765 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/resolver.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/streams.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/streams.cpython-312.pyc new file mode 100644 index 0000000..be88183 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/streams.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-312.pyc new file mode 100644 index 0000000..337ba6a Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/test_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/test_utils.cpython-312.pyc new file mode 100644 index 0000000..de9b321 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/test_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/tracing.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/tracing.cpython-312.pyc new file mode 100644 index 0000000..3b3b2fe Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/tracing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/typedefs.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/typedefs.cpython-312.pyc new file mode 100644 index 0000000..4c35dca Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/typedefs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web.cpython-312.pyc new file mode 100644 index 0000000..fe764d8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_app.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_app.cpython-312.pyc new file mode 100644 index 0000000..9440f36 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_app.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_exceptions.cpython-312.pyc new file mode 100644 index 0000000..1e38185 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-312.pyc new file mode 100644 index 0000000..7733343 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_log.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_log.cpython-312.pyc new file mode 100644 index 0000000..a8227f3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_log.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_middlewares.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_middlewares.cpython-312.pyc new file mode 100644 index 0000000..95923c6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_middlewares.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_protocol.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_protocol.cpython-312.pyc new file mode 100644 index 0000000..1e3ab40 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_protocol.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_request.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_request.cpython-312.pyc new file mode 100644 index 0000000..6367eca Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_request.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_response.cpython-312.pyc new file mode 100644 index 0000000..97f7cb1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_routedef.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_routedef.cpython-312.pyc new file mode 100644 index 0000000..46e7a64 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_routedef.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_runner.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_runner.cpython-312.pyc new file mode 100644 index 0000000..d857076 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_runner.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_server.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_server.cpython-312.pyc new file mode 100644 index 0000000..39e0edd Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_server.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc new file mode 100644 index 0000000..199c418 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_ws.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_ws.cpython-312.pyc new file mode 100644 index 0000000..5e1d256 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/web_ws.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/__pycache__/worker.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/worker.cpython-312.pyc new file mode 100644 index 0000000..24092b0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/__pycache__/worker.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/_cparser.pxd b/venv/lib/python3.12/site-packages/aiohttp/_cparser.pxd new file mode 100644 index 0000000..c2cd5a9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/_cparser.pxd @@ -0,0 +1,158 @@ +from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t + + +cdef extern from "../vendor/llhttp/build/llhttp.h": + + struct llhttp__internal_s: + int32_t _index + void* _span_pos0 + void* _span_cb0 + int32_t error + const char* reason + const char* error_pos + void* data + void* _current + uint64_t content_length + uint8_t type + uint8_t method + uint8_t http_major + uint8_t http_minor + uint8_t header_state + uint8_t lenient_flags + uint8_t upgrade + uint8_t finish + uint16_t flags + uint16_t status_code + void* settings + + ctypedef llhttp__internal_s llhttp__internal_t + ctypedef llhttp__internal_t llhttp_t + + ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1 + ctypedef int (*llhttp_cb)(llhttp_t*) except -1 + + struct llhttp_settings_s: + llhttp_cb on_message_begin + llhttp_data_cb on_url + llhttp_data_cb on_status + llhttp_data_cb on_header_field + llhttp_data_cb on_header_value + llhttp_cb on_headers_complete + llhttp_data_cb on_body + llhttp_cb on_message_complete + llhttp_cb on_chunk_header + llhttp_cb on_chunk_complete + + llhttp_cb on_url_complete + llhttp_cb on_status_complete + llhttp_cb on_header_field_complete + llhttp_cb on_header_value_complete + + ctypedef llhttp_settings_s llhttp_settings_t + + enum llhttp_errno: + HPE_OK, + HPE_INTERNAL, + HPE_STRICT, + HPE_LF_EXPECTED, + HPE_UNEXPECTED_CONTENT_LENGTH, + HPE_CLOSED_CONNECTION, + HPE_INVALID_METHOD, + HPE_INVALID_URL, + HPE_INVALID_CONSTANT, + HPE_INVALID_VERSION, + HPE_INVALID_HEADER_TOKEN, + HPE_INVALID_CONTENT_LENGTH, + HPE_INVALID_CHUNK_SIZE, + HPE_INVALID_STATUS, + HPE_INVALID_EOF_STATE, + HPE_INVALID_TRANSFER_ENCODING, + HPE_CB_MESSAGE_BEGIN, + HPE_CB_HEADERS_COMPLETE, + HPE_CB_MESSAGE_COMPLETE, + HPE_CB_CHUNK_HEADER, + HPE_CB_CHUNK_COMPLETE, + HPE_PAUSED, + HPE_PAUSED_UPGRADE, + HPE_USER + + ctypedef llhttp_errno llhttp_errno_t + + enum llhttp_flags: + F_CHUNKED, + F_CONTENT_LENGTH + + enum llhttp_type: + HTTP_REQUEST, + HTTP_RESPONSE, + HTTP_BOTH + + enum llhttp_method: + HTTP_DELETE, + HTTP_GET, + HTTP_HEAD, + HTTP_POST, + HTTP_PUT, + HTTP_CONNECT, + HTTP_OPTIONS, + HTTP_TRACE, + HTTP_COPY, + HTTP_LOCK, + HTTP_MKCOL, + HTTP_MOVE, + HTTP_PROPFIND, + HTTP_PROPPATCH, + HTTP_SEARCH, + HTTP_UNLOCK, + HTTP_BIND, + HTTP_REBIND, + HTTP_UNBIND, + HTTP_ACL, + HTTP_REPORT, + HTTP_MKACTIVITY, + HTTP_CHECKOUT, + HTTP_MERGE, + HTTP_MSEARCH, + HTTP_NOTIFY, + HTTP_SUBSCRIBE, + HTTP_UNSUBSCRIBE, + HTTP_PATCH, + HTTP_PURGE, + HTTP_MKCALENDAR, + HTTP_LINK, + HTTP_UNLINK, + HTTP_SOURCE, + HTTP_PRI, + HTTP_DESCRIBE, + HTTP_ANNOUNCE, + HTTP_SETUP, + HTTP_PLAY, + HTTP_PAUSE, + HTTP_TEARDOWN, + HTTP_GET_PARAMETER, + HTTP_SET_PARAMETER, + HTTP_REDIRECT, + HTTP_RECORD, + HTTP_FLUSH + + ctypedef llhttp_method llhttp_method_t; + + void llhttp_settings_init(llhttp_settings_t* settings) + void llhttp_init(llhttp_t* parser, llhttp_type type, + const llhttp_settings_t* settings) + + llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len) + + int llhttp_should_keep_alive(const llhttp_t* parser) + + void llhttp_resume_after_upgrade(llhttp_t* parser) + + llhttp_errno_t llhttp_get_errno(const llhttp_t* parser) + const char* llhttp_get_error_reason(const llhttp_t* parser) + const char* llhttp_get_error_pos(const llhttp_t* parser) + + const char* llhttp_method_name(llhttp_method_t method) + + void llhttp_set_lenient_headers(llhttp_t* parser, int enabled) + void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled) + void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled) diff --git a/venv/lib/python3.12/site-packages/aiohttp/_find_header.pxd b/venv/lib/python3.12/site-packages/aiohttp/_find_header.pxd new file mode 100644 index 0000000..37a6c37 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/_find_header.pxd @@ -0,0 +1,2 @@ +cdef extern from "_find_header.h": + int find_header(char *, int) diff --git a/venv/lib/python3.12/site-packages/aiohttp/_headers.pxi b/venv/lib/python3.12/site-packages/aiohttp/_headers.pxi new file mode 100644 index 0000000..3744721 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/_headers.pxi @@ -0,0 +1,83 @@ +# The file is autogenerated from aiohttp/hdrs.py +# Run ./tools/gen.py to update it after the origin changing. + +from . import hdrs +cdef tuple headers = ( + hdrs.ACCEPT, + hdrs.ACCEPT_CHARSET, + hdrs.ACCEPT_ENCODING, + hdrs.ACCEPT_LANGUAGE, + hdrs.ACCEPT_RANGES, + hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + hdrs.ACCESS_CONTROL_ALLOW_METHODS, + hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + hdrs.ACCESS_CONTROL_MAX_AGE, + hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + hdrs.ACCESS_CONTROL_REQUEST_METHOD, + hdrs.AGE, + hdrs.ALLOW, + hdrs.AUTHORIZATION, + hdrs.CACHE_CONTROL, + hdrs.CONNECTION, + hdrs.CONTENT_DISPOSITION, + hdrs.CONTENT_ENCODING, + hdrs.CONTENT_LANGUAGE, + hdrs.CONTENT_LENGTH, + hdrs.CONTENT_LOCATION, + hdrs.CONTENT_MD5, + hdrs.CONTENT_RANGE, + hdrs.CONTENT_TRANSFER_ENCODING, + hdrs.CONTENT_TYPE, + hdrs.COOKIE, + hdrs.DATE, + hdrs.DESTINATION, + hdrs.DIGEST, + hdrs.ETAG, + hdrs.EXPECT, + hdrs.EXPIRES, + hdrs.FORWARDED, + hdrs.FROM, + hdrs.HOST, + hdrs.IF_MATCH, + hdrs.IF_MODIFIED_SINCE, + hdrs.IF_NONE_MATCH, + hdrs.IF_RANGE, + hdrs.IF_UNMODIFIED_SINCE, + hdrs.KEEP_ALIVE, + hdrs.LAST_EVENT_ID, + hdrs.LAST_MODIFIED, + hdrs.LINK, + hdrs.LOCATION, + hdrs.MAX_FORWARDS, + hdrs.ORIGIN, + hdrs.PRAGMA, + hdrs.PROXY_AUTHENTICATE, + hdrs.PROXY_AUTHORIZATION, + hdrs.RANGE, + hdrs.REFERER, + hdrs.RETRY_AFTER, + hdrs.SEC_WEBSOCKET_ACCEPT, + hdrs.SEC_WEBSOCKET_EXTENSIONS, + hdrs.SEC_WEBSOCKET_KEY, + hdrs.SEC_WEBSOCKET_KEY1, + hdrs.SEC_WEBSOCKET_PROTOCOL, + hdrs.SEC_WEBSOCKET_VERSION, + hdrs.SERVER, + hdrs.SET_COOKIE, + hdrs.TE, + hdrs.TRAILER, + hdrs.TRANSFER_ENCODING, + hdrs.URI, + hdrs.UPGRADE, + hdrs.USER_AGENT, + hdrs.VARY, + hdrs.VIA, + hdrs.WWW_AUTHENTICATE, + hdrs.WANT_DIGEST, + hdrs.WARNING, + hdrs.X_FORWARDED_FOR, + hdrs.X_FORWARDED_HOST, + hdrs.X_FORWARDED_PROTO, +) diff --git a/venv/lib/python3.12/site-packages/aiohttp/_helpers.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/aiohttp/_helpers.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..28b78a1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/_helpers.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/_helpers.pyi b/venv/lib/python3.12/site-packages/aiohttp/_helpers.pyi new file mode 100644 index 0000000..1e35893 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/_helpers.pyi @@ -0,0 +1,6 @@ +from typing import Any + +class reify: + def __init__(self, wrapped: Any) -> None: ... + def __get__(self, inst: Any, owner: Any) -> Any: ... + def __set__(self, inst: Any, value: Any) -> None: ... diff --git a/venv/lib/python3.12/site-packages/aiohttp/_helpers.pyx b/venv/lib/python3.12/site-packages/aiohttp/_helpers.pyx new file mode 100644 index 0000000..665f367 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/_helpers.pyx @@ -0,0 +1,35 @@ +cdef class reify: + """Use as a class method decorator. It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + + """ + + cdef object wrapped + cdef object name + + def __init__(self, wrapped): + self.wrapped = wrapped + self.name = wrapped.__name__ + + @property + def __doc__(self): + return self.wrapped.__doc__ + + def __get__(self, inst, owner): + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst, value): + raise AttributeError("reified property is read-only") diff --git a/venv/lib/python3.12/site-packages/aiohttp/_http_parser.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/aiohttp/_http_parser.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..94e350f Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/_http_parser.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/_http_parser.pyx b/venv/lib/python3.12/site-packages/aiohttp/_http_parser.pyx new file mode 100644 index 0000000..3f28fbd --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/_http_parser.pyx @@ -0,0 +1,836 @@ +#cython: language_level=3 +# +# Based on https://github.com/MagicStack/httptools +# + +from cpython cimport ( + Py_buffer, + PyBUF_SIMPLE, + PyBuffer_Release, + PyBytes_AsString, + PyBytes_AsStringAndSize, + PyObject_GetBuffer, +) +from cpython.mem cimport PyMem_Free, PyMem_Malloc +from libc.limits cimport ULLONG_MAX +from libc.string cimport memcpy + +from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy +from yarl import URL as _URL + +from aiohttp import hdrs +from aiohttp.helpers import DEBUG + +from .http_exceptions import ( + BadHttpMessage, + BadStatusLine, + ContentLengthError, + InvalidHeader, + InvalidURLError, + LineTooLong, + PayloadEncodingError, + TransferEncodingError, +) +from .http_parser import DeflateBuffer as _DeflateBuffer +from .http_writer import ( + HttpVersion as _HttpVersion, + HttpVersion10 as _HttpVersion10, + HttpVersion11 as _HttpVersion11, +) +from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader + +cimport cython + +from aiohttp cimport _cparser as cparser + +include "_headers.pxi" + +from aiohttp cimport _find_header + +DEF DEFAULT_FREELIST_SIZE = 250 + +cdef extern from "Python.h": + int PyByteArray_Resize(object, Py_ssize_t) except -1 + Py_ssize_t PyByteArray_Size(object) except -1 + char* PyByteArray_AsString(object) + +__all__ = ('HttpRequestParser', 'HttpResponseParser', + 'RawRequestMessage', 'RawResponseMessage') + +cdef object URL = _URL +cdef object URL_build = URL.build +cdef object CIMultiDict = _CIMultiDict +cdef object CIMultiDictProxy = _CIMultiDictProxy +cdef object HttpVersion = _HttpVersion +cdef object HttpVersion10 = _HttpVersion10 +cdef object HttpVersion11 = _HttpVersion11 +cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 +cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING +cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD +cdef object StreamReader = _StreamReader +cdef object DeflateBuffer = _DeflateBuffer + + +cdef inline object extend(object buf, const char* at, size_t length): + cdef Py_ssize_t s + cdef char* ptr + s = PyByteArray_Size(buf) + PyByteArray_Resize(buf, s + length) + ptr = PyByteArray_AsString(buf) + memcpy(ptr + s, at, length) + + +DEF METHODS_COUNT = 46; + +cdef list _http_method = [] + +for i in range(METHODS_COUNT): + _http_method.append( + cparser.llhttp_method_name( i).decode('ascii')) + + +cdef inline str http_method_str(int i): + if i < METHODS_COUNT: + return _http_method[i] + else: + return "" + +cdef inline object find_header(bytes raw_header): + cdef Py_ssize_t size + cdef char *buf + cdef int idx + PyBytes_AsStringAndSize(raw_header, &buf, &size) + idx = _find_header.find_header(buf, size) + if idx == -1: + return raw_header.decode('utf-8', 'surrogateescape') + return headers[idx] + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawRequestMessage: + cdef readonly str method + cdef readonly str path + cdef readonly object version # HttpVersion + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + cdef readonly object url # yarl.URL + + def __init__(self, method, path, version, headers, raw_headers, + should_close, compression, upgrade, chunked, url): + self.method = method + self.path = path + self.version = version + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + self.url = url + + def __repr__(self): + info = [] + info.append(("method", self.method)) + info.append(("path", self.path)) + info.append(("version", self.version)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + info.append(("url", self.url)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + def _replace(self, **dct): + cdef RawRequestMessage ret + ret = _new_request_message(self.method, + self.path, + self.version, + self.headers, + self.raw_headers, + self.should_close, + self.compression, + self.upgrade, + self.chunked, + self.url) + if "method" in dct: + ret.method = dct["method"] + if "path" in dct: + ret.path = dct["path"] + if "version" in dct: + ret.version = dct["version"] + if "headers" in dct: + ret.headers = dct["headers"] + if "raw_headers" in dct: + ret.raw_headers = dct["raw_headers"] + if "should_close" in dct: + ret.should_close = dct["should_close"] + if "compression" in dct: + ret.compression = dct["compression"] + if "upgrade" in dct: + ret.upgrade = dct["upgrade"] + if "chunked" in dct: + ret.chunked = dct["chunked"] + if "url" in dct: + ret.url = dct["url"] + return ret + +cdef _new_request_message(str method, + str path, + object version, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked, + object url): + cdef RawRequestMessage ret + ret = RawRequestMessage.__new__(RawRequestMessage) + ret.method = method + ret.path = path + ret.version = version + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + ret.url = url + return ret + + +@cython.freelist(DEFAULT_FREELIST_SIZE) +cdef class RawResponseMessage: + cdef readonly object version # HttpVersion + cdef readonly int code + cdef readonly str reason + cdef readonly object headers # CIMultiDict + cdef readonly object raw_headers # tuple + cdef readonly object should_close + cdef readonly object compression + cdef readonly object upgrade + cdef readonly object chunked + + def __init__(self, version, code, reason, headers, raw_headers, + should_close, compression, upgrade, chunked): + self.version = version + self.code = code + self.reason = reason + self.headers = headers + self.raw_headers = raw_headers + self.should_close = should_close + self.compression = compression + self.upgrade = upgrade + self.chunked = chunked + + def __repr__(self): + info = [] + info.append(("version", self.version)) + info.append(("code", self.code)) + info.append(("reason", self.reason)) + info.append(("headers", self.headers)) + info.append(("raw_headers", self.raw_headers)) + info.append(("should_close", self.should_close)) + info.append(("compression", self.compression)) + info.append(("upgrade", self.upgrade)) + info.append(("chunked", self.chunked)) + sinfo = ', '.join(name + '=' + repr(val) for name, val in info) + return '' + + +cdef _new_response_message(object version, + int code, + str reason, + object headers, + object raw_headers, + bint should_close, + object compression, + bint upgrade, + bint chunked): + cdef RawResponseMessage ret + ret = RawResponseMessage.__new__(RawResponseMessage) + ret.version = version + ret.code = code + ret.reason = reason + ret.headers = headers + ret.raw_headers = raw_headers + ret.should_close = should_close + ret.compression = compression + ret.upgrade = upgrade + ret.chunked = chunked + return ret + + +@cython.internal +cdef class HttpParser: + + cdef: + cparser.llhttp_t* _cparser + cparser.llhttp_settings_t* _csettings + + bytearray _raw_name + bytearray _raw_value + bint _has_value + + object _protocol + object _loop + object _timer + + size_t _max_line_size + size_t _max_field_size + size_t _max_headers + bint _response_with_body + bint _read_until_eof + + bint _started + object _url + bytearray _buf + str _path + str _reason + object _headers + list _raw_headers + bint _upgraded + list _messages + object _payload + bint _payload_error + object _payload_exception + object _last_error + bint _auto_decompress + int _limit + + str _content_encoding + + Py_buffer py_buf + + def __cinit__(self): + self._cparser = \ + PyMem_Malloc(sizeof(cparser.llhttp_t)) + if self._cparser is NULL: + raise MemoryError() + + self._csettings = \ + PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + if self._csettings is NULL: + raise MemoryError() + + def __dealloc__(self): + PyMem_Free(self._cparser) + PyMem_Free(self._csettings) + + cdef _init( + self, cparser.llhttp_type mode, + object protocol, object loop, int limit, + object timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True, + ): + cparser.llhttp_settings_init(self._csettings) + cparser.llhttp_init(self._cparser, mode, self._csettings) + self._cparser.data = self + self._cparser.content_length = 0 + + self._protocol = protocol + self._loop = loop + self._timer = timer + + self._buf = bytearray() + self._payload = None + self._payload_error = 0 + self._payload_exception = payload_exception + self._messages = [] + + self._raw_name = bytearray() + self._raw_value = bytearray() + self._has_value = False + + self._max_line_size = max_line_size + self._max_headers = max_headers + self._max_field_size = max_field_size + self._response_with_body = response_with_body + self._read_until_eof = read_until_eof + self._upgraded = False + self._auto_decompress = auto_decompress + self._content_encoding = None + + self._csettings.on_url = cb_on_url + self._csettings.on_status = cb_on_status + self._csettings.on_header_field = cb_on_header_field + self._csettings.on_header_value = cb_on_header_value + self._csettings.on_headers_complete = cb_on_headers_complete + self._csettings.on_body = cb_on_body + self._csettings.on_message_begin = cb_on_message_begin + self._csettings.on_message_complete = cb_on_message_complete + self._csettings.on_chunk_header = cb_on_chunk_header + self._csettings.on_chunk_complete = cb_on_chunk_complete + + self._last_error = None + self._limit = limit + + cdef _process_header(self): + if self._raw_name: + raw_name = bytes(self._raw_name) + raw_value = bytes(self._raw_value) + + name = find_header(raw_name) + value = raw_value.decode('utf-8', 'surrogateescape') + + self._headers.add(name, value) + + if name is CONTENT_ENCODING: + self._content_encoding = value + + PyByteArray_Resize(self._raw_name, 0) + PyByteArray_Resize(self._raw_value, 0) + self._has_value = False + self._raw_headers.append((raw_name, raw_value)) + + cdef _on_header_field(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + if self._has_value: + self._process_header() + + size = PyByteArray_Size(self._raw_name) + PyByteArray_Resize(self._raw_name, size + length) + buf = PyByteArray_AsString(self._raw_name) + memcpy(buf + size, at, length) + + cdef _on_header_value(self, char* at, size_t length): + cdef Py_ssize_t size + cdef char *buf + + size = PyByteArray_Size(self._raw_value) + PyByteArray_Resize(self._raw_value, size + length) + buf = PyByteArray_AsString(self._raw_value) + memcpy(buf + size, at, length) + self._has_value = True + + cdef _on_headers_complete(self): + self._process_header() + + method = http_method_str(self._cparser.method) + should_close = not cparser.llhttp_should_keep_alive(self._cparser) + upgrade = self._cparser.upgrade + chunked = self._cparser.flags & cparser.F_CHUNKED + + raw_headers = tuple(self._raw_headers) + headers = CIMultiDictProxy(self._headers) + + if upgrade or self._cparser.method == cparser.HTTP_CONNECT: + self._upgraded = True + + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + encoding = None + enc = self._content_encoding + if enc is not None: + self._content_encoding = None + enc = enc.lower() + if enc in ('gzip', 'deflate', 'br'): + encoding = enc + + if self._cparser.type == cparser.HTTP_REQUEST: + msg = _new_request_message( + method, self._path, + self.http_version(), headers, raw_headers, + should_close, encoding, upgrade, chunked, self._url) + else: + msg = _new_response_message( + self.http_version(), self._cparser.status_code, self._reason, + headers, raw_headers, should_close, encoding, + upgrade, chunked) + + if ( + ULLONG_MAX > self._cparser.content_length > 0 or chunked or + self._cparser.method == cparser.HTTP_CONNECT or + (self._cparser.status_code >= 199 and + self._cparser.content_length == 0 and + self._read_until_eof) + ): + payload = StreamReader( + self._protocol, timer=self._timer, loop=self._loop, + limit=self._limit) + else: + payload = EMPTY_PAYLOAD + + self._payload = payload + if encoding is not None and self._auto_decompress: + self._payload = DeflateBuffer(payload, encoding) + + if not self._response_with_body: + payload = EMPTY_PAYLOAD + + self._messages.append((msg, payload)) + + cdef _on_message_complete(self): + self._payload.feed_eof() + self._payload = None + + cdef _on_chunk_header(self): + self._payload.begin_http_chunk_receiving() + + cdef _on_chunk_complete(self): + self._payload.end_http_chunk_receiving() + + cdef object _on_status_complete(self): + pass + + cdef inline http_version(self): + cdef cparser.llhttp_t* parser = self._cparser + + if parser.http_major == 1: + if parser.http_minor == 0: + return HttpVersion10 + elif parser.http_minor == 1: + return HttpVersion11 + + return HttpVersion(parser.http_major, parser.http_minor) + + ### Public API ### + + def feed_eof(self): + cdef bytes desc + + if self._payload is not None: + if self._cparser.flags & cparser.F_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header.") + elif self._cparser.flags & cparser.F_CONTENT_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header.") + elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: + desc = cparser.llhttp_get_error_reason(self._cparser) + raise PayloadEncodingError(desc.decode('latin-1')) + else: + self._payload.feed_eof() + elif self._started: + self._on_headers_complete() + if self._messages: + return self._messages[-1][0] + + def feed_data(self, data): + cdef: + size_t data_len + size_t nb + cdef cparser.llhttp_errno_t errno + + PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) + data_len = self.py_buf.len + + errno = cparser.llhttp_execute( + self._cparser, + self.py_buf.buf, + data_len) + + if errno is cparser.HPE_PAUSED_UPGRADE: + cparser.llhttp_resume_after_upgrade(self._cparser) + + nb = cparser.llhttp_get_error_pos(self._cparser) - self.py_buf.buf + + PyBuffer_Release(&self.py_buf) + + if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): + if self._payload_error == 0: + if self._last_error is not None: + ex = self._last_error + self._last_error = None + else: + after = cparser.llhttp_get_error_pos(self._cparser) + before = data[:after - self.py_buf.buf] + after_b = after.split(b"\r\n", 1)[0] + before = before.rsplit(b"\r\n", 1)[-1] + data = before + after_b + pointer = " " * (len(repr(before))-1) + "^" + ex = parser_error_from_errno(self._cparser, data, pointer) + self._payload = None + raise ex + + if self._messages: + messages = self._messages + self._messages = [] + else: + messages = () + + if self._upgraded: + return messages, True, data[nb:] + else: + return messages, False, b'' + + def set_upgraded(self, val): + self._upgraded = val + + +cdef class HttpRequestParser(HttpParser): + + def __init__( + self, protocol, loop, int limit, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True, + ): + self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, read_until_eof, + auto_decompress) + + cdef object _on_status_complete(self): + cdef int idx1, idx2 + if not self._buf: + return + self._path = self._buf.decode('utf-8', 'surrogateescape') + try: + idx3 = len(self._path) + if self._cparser.method == cparser.HTTP_CONNECT: + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + self._url = URL.build(authority=self._path, encoded=True) + elif idx3 > 1 and self._path[0] == '/': + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + idx1 = self._path.find("?") + if idx1 == -1: + query = "" + idx2 = self._path.find("#") + if idx2 == -1: + path = self._path + fragment = "" + else: + path = self._path[0: idx2] + fragment = self._path[idx2+1:] + + else: + path = self._path[0:idx1] + idx1 += 1 + idx2 = self._path.find("#", idx1+1) + if idx2 == -1: + query = self._path[idx1:] + fragment = "" + else: + query = self._path[idx1: idx2] + fragment = self._path[idx2+1:] + + self._url = URL.build( + path=path, + query_string=query, + fragment=fragment, + encoded=True, + ) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + self._url = URL(self._path, encoded=True) + finally: + PyByteArray_Resize(self._buf, 0) + + +cdef class HttpResponseParser(HttpParser): + + def __init__( + self, protocol, loop, int limit, timer=None, + size_t max_line_size=8190, size_t max_headers=32768, + size_t max_field_size=8190, payload_exception=None, + bint response_with_body=True, bint read_until_eof=False, + bint auto_decompress=True + ): + self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, + max_line_size, max_headers, max_field_size, + payload_exception, response_with_body, read_until_eof, + auto_decompress) + # Use strict parsing on dev mode, so users are warned about broken servers. + if not DEBUG: + cparser.llhttp_set_lenient_headers(self._cparser, 1) + cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1) + cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1) + + cdef object _on_status_complete(self): + if self._buf: + self._reason = self._buf.decode('utf-8', 'surrogateescape') + PyByteArray_Resize(self._buf, 0) + else: + self._reason = self._reason or '' + +cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + + pyparser._started = True + pyparser._headers = CIMultiDict() + pyparser._raw_headers = [] + PyByteArray_Resize(pyparser._buf, 0) + pyparser._path = None + pyparser._reason = None + return 0 + + +cdef int cb_on_url(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_status(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef str reason + try: + if length > pyparser._max_line_size: + raise LineTooLong( + 'Status line is too long', pyparser._max_line_size, length) + extend(pyparser._buf, at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_field(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + pyparser._on_status_complete() + size = len(pyparser._raw_name) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header name is too long', pyparser._max_field_size, size) + pyparser._on_header_field(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_header_value(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef Py_ssize_t size + try: + size = len(pyparser._raw_value) + length + if size > pyparser._max_field_size: + raise LineTooLong( + 'Header value is too long', pyparser._max_field_size, size) + pyparser._on_header_value(at, length) + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_status_complete() + pyparser._on_headers_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + if ( + pyparser._cparser.upgrade or + pyparser._cparser.method == cparser.HTTP_CONNECT + ): + return 2 + else: + return 0 + + +cdef int cb_on_body(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + cdef bytes body = at[:length] + try: + pyparser._payload.feed_data(body, length) + except BaseException as exc: + if pyparser._payload_exception is not None: + pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) + else: + pyparser._payload.set_exception(exc) + pyparser._payload_error = 1 + return -1 + else: + return 0 + + +cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._started = False + pyparser._on_message_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_header() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_complete() + except BaseException as exc: + pyparser._last_error = exc + return -1 + else: + return 0 + + +cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): + cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser) + cdef bytes desc = cparser.llhttp_get_error_reason(parser) + + err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer) + + if errno in {cparser.HPE_CB_MESSAGE_BEGIN, + cparser.HPE_CB_HEADERS_COMPLETE, + cparser.HPE_CB_MESSAGE_COMPLETE, + cparser.HPE_CB_CHUNK_HEADER, + cparser.HPE_CB_CHUNK_COMPLETE, + cparser.HPE_INVALID_CONSTANT, + cparser.HPE_INVALID_HEADER_TOKEN, + cparser.HPE_INVALID_CONTENT_LENGTH, + cparser.HPE_INVALID_CHUNK_SIZE, + cparser.HPE_INVALID_EOF_STATE, + cparser.HPE_INVALID_TRANSFER_ENCODING}: + return BadHttpMessage(err_msg) + elif errno in {cparser.HPE_INVALID_STATUS, + cparser.HPE_INVALID_METHOD, + cparser.HPE_INVALID_VERSION}: + return BadStatusLine(error=err_msg) + elif errno == cparser.HPE_INVALID_URL: + return InvalidURLError(err_msg) + + return BadHttpMessage(err_msg) diff --git a/venv/lib/python3.12/site-packages/aiohttp/_http_writer.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/aiohttp/_http_writer.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..ab7a6b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/_http_writer.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/_http_writer.pyx b/venv/lib/python3.12/site-packages/aiohttp/_http_writer.pyx new file mode 100644 index 0000000..eff8521 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/_http_writer.pyx @@ -0,0 +1,163 @@ +from cpython.bytes cimport PyBytes_FromStringAndSize +from cpython.exc cimport PyErr_NoMemory +from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc +from cpython.object cimport PyObject_Str +from libc.stdint cimport uint8_t, uint64_t +from libc.string cimport memcpy + +from multidict import istr + +DEF BUF_SIZE = 16 * 1024 # 16KiB +cdef char BUFFER[BUF_SIZE] + +cdef object _istr = istr + + +# ----------------- writer --------------------------- + +cdef struct Writer: + char *buf + Py_ssize_t size + Py_ssize_t pos + + +cdef inline void _init_writer(Writer* writer): + writer.buf = &BUFFER[0] + writer.size = BUF_SIZE + writer.pos = 0 + + +cdef inline void _release_writer(Writer* writer): + if writer.buf != BUFFER: + PyMem_Free(writer.buf) + + +cdef inline int _write_byte(Writer* writer, uint8_t ch): + cdef char * buf + cdef Py_ssize_t size + + if writer.pos == writer.size: + # reallocate + size = writer.size + BUF_SIZE + if writer.buf == BUFFER: + buf = PyMem_Malloc(size) + if buf == NULL: + PyErr_NoMemory() + return -1 + memcpy(buf, writer.buf, writer.size) + else: + buf = PyMem_Realloc(writer.buf, size) + if buf == NULL: + PyErr_NoMemory() + return -1 + writer.buf = buf + writer.size = size + writer.buf[writer.pos] = ch + writer.pos += 1 + return 0 + + +cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): + cdef uint64_t utf = symbol + + if utf < 0x80: + return _write_byte(writer, utf) + elif utf < 0x800: + if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif 0xD800 <= utf <= 0xDFFF: + # surogate pair, ignored + return 0 + elif utf < 0x10000: + if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + return -1 + if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif utf > 0x10FFFF: + # symbol is too large + return 0 + else: + if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 12) & 0x3f))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + + +cdef inline int _write_str(Writer* writer, str s): + cdef Py_UCS4 ch + for ch in s: + if _write_utf8(writer, ch) < 0: + return -1 + + +# --------------- _serialize_headers ---------------------- + +cdef str to_str(object s): + typ = type(s) + if typ is str: + return s + elif typ is _istr: + return PyObject_Str(s) + elif not isinstance(s, str): + raise TypeError("Cannot serialize non-str key {!r}".format(s)) + else: + return str(s) + + +cdef void _safe_header(str string) except *: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return character detected in HTTP status message or " + "header. This is a potential security issue." + ) + + +def _serialize_headers(str status_line, headers): + cdef Writer writer + cdef object key + cdef object val + cdef bytes ret + + _init_writer(&writer) + + for key, val in headers.items(): + _safe_header(to_str(key)) + _safe_header(to_str(val)) + + try: + if _write_str(&writer, status_line) < 0: + raise + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + for key, val in headers.items(): + if _write_str(&writer, to_str(key)) < 0: + raise + if _write_byte(&writer, b':') < 0: + raise + if _write_byte(&writer, b' ') < 0: + raise + if _write_str(&writer, to_str(val)) < 0: + raise + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + return PyBytes_FromStringAndSize(writer.buf, writer.pos) + finally: + _release_writer(&writer) diff --git a/venv/lib/python3.12/site-packages/aiohttp/_websocket.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/aiohttp/_websocket.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..34c156a Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiohttp/_websocket.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/aiohttp/_websocket.pyx b/venv/lib/python3.12/site-packages/aiohttp/_websocket.pyx new file mode 100644 index 0000000..94318d2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/_websocket.pyx @@ -0,0 +1,56 @@ +from cpython cimport PyBytes_AsString + + +#from cpython cimport PyByteArray_AsString # cython still not exports that +cdef extern from "Python.h": + char* PyByteArray_AsString(bytearray ba) except NULL + +from libc.stdint cimport uint32_t, uint64_t, uintmax_t + + +def _websocket_mask_cython(object mask, object data): + """Note, this function mutates its `data` argument + """ + cdef: + Py_ssize_t data_len, i + # bit operations on signed integers are implementation-specific + unsigned char * in_buf + const unsigned char * mask_buf + uint32_t uint32_msk + uint64_t uint64_msk + + assert len(mask) == 4 + + if not isinstance(mask, bytes): + mask = bytes(mask) + + if isinstance(data, bytearray): + data = data + else: + data = bytearray(data) + + data_len = len(data) + in_buf = PyByteArray_AsString(data) + mask_buf = PyBytes_AsString(mask) + uint32_msk = (mask_buf)[0] + + # TODO: align in_data ptr to achieve even faster speeds + # does it need in python ?! malloc() always aligns to sizeof(long) bytes + + if sizeof(size_t) >= 8: + uint64_msk = uint32_msk + uint64_msk = (uint64_msk << 32) | uint32_msk + + while data_len >= 8: + (in_buf)[0] ^= uint64_msk + in_buf += 8 + data_len -= 8 + + + while data_len >= 4: + (in_buf)[0] ^= uint32_msk + in_buf += 4 + data_len -= 4 + + for i in range(0, data_len): + in_buf[i] ^= mask_buf[i] diff --git a/venv/lib/python3.12/site-packages/aiohttp/abc.py b/venv/lib/python3.12/site-packages/aiohttp/abc.py new file mode 100644 index 0000000..ceb4490 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/abc.py @@ -0,0 +1,209 @@ +import asyncio +import logging +from abc import ABC, abstractmethod +from collections.abc import Sized +from http.cookies import BaseCookie, Morsel +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Generator, + Iterable, + List, + Optional, + Tuple, +) + +from multidict import CIMultiDict +from yarl import URL + +from .helpers import get_running_loop +from .typedefs import LooseCookies + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + from .web_exceptions import HTTPException + from .web_request import BaseRequest, Request + from .web_response import StreamResponse +else: + BaseRequest = Request = Application = StreamResponse = None + HTTPException = None + + +class AbstractRouter(ABC): + def __init__(self) -> None: + self._frozen = False + + def post_init(self, app: Application) -> None: + """Post init stage. + + Not an abstract method for sake of backward compatibility, + but if the router wants to be aware of the application + it can override this. + """ + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + """Freeze router.""" + self._frozen = True + + @abstractmethod + async def resolve(self, request: Request) -> "AbstractMatchInfo": + """Return MATCH_INFO for given request""" + + +class AbstractMatchInfo(ABC): + @property # pragma: no branch + @abstractmethod + def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: + """Execute matched request handler""" + + @property + @abstractmethod + def expect_handler( + self, + ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]: + """Expect handler for 100-continue processing""" + + @property # pragma: no branch + @abstractmethod + def http_exception(self) -> Optional[HTTPException]: + """HTTPException instance raised on router's resolving, or None""" + + @abstractmethod # pragma: no branch + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + @property # pragma: no branch + @abstractmethod + def apps(self) -> Tuple[Application, ...]: + """Stack of nested applications. + + Top level application is left-most element. + + """ + + @abstractmethod + def add_app(self, app: Application) -> None: + """Add application to the nested apps stack.""" + + @abstractmethod + def freeze(self) -> None: + """Freeze the match info. + + The method is called after route resolution. + + After the call .add_app() is forbidden. + + """ + + +class AbstractView(ABC): + """Abstract class based view.""" + + def __init__(self, request: Request) -> None: + self._request = request + + @property + def request(self) -> Request: + """Request instance.""" + return self._request + + @abstractmethod + def __await__(self) -> Generator[Any, None, StreamResponse]: + """Execute the view handler.""" + + +class AbstractResolver(ABC): + """Abstract DNS resolver.""" + + @abstractmethod + async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: + """Return IP address for given hostname""" + + @abstractmethod + async def close(self) -> None: + """Release resolver""" + + +if TYPE_CHECKING: # pragma: no cover + IterableBase = Iterable[Morsel[str]] +else: + IterableBase = Iterable + + +ClearCookiePredicate = Callable[["Morsel[str]"], bool] + + +class AbstractCookieJar(Sized, IterableBase): + """Abstract Cookie Jar.""" + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._loop = get_running_loop(loop) + + @abstractmethod + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + """Clear all cookies if no predicate is passed.""" + + @abstractmethod + def clear_domain(self, domain: str) -> None: + """Clear all cookies for domain and all subdomains.""" + + @abstractmethod + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + + @abstractmethod + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + """Return the jar's cookies filtered by their attributes.""" + + +class AbstractStreamWriter(ABC): + """Abstract stream writer.""" + + buffer_size = 0 + output_size = 0 + length: Optional[int] = 0 + + @abstractmethod + async def write(self, chunk: bytes) -> None: + """Write chunk into stream.""" + + @abstractmethod + async def write_eof(self, chunk: bytes = b"") -> None: + """Write last chunk.""" + + @abstractmethod + async def drain(self) -> None: + """Flush the write buffer.""" + + @abstractmethod + def enable_compression(self, encoding: str = "deflate") -> None: + """Enable HTTP body compression""" + + @abstractmethod + def enable_chunking(self) -> None: + """Enable HTTP chunked mode""" + + @abstractmethod + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write HTTP headers""" + + +class AbstractAccessLogger(ABC): + """Abstract writer to access log.""" + + def __init__(self, logger: logging.Logger, log_format: str) -> None: + self.logger = logger + self.log_format = log_format + + @abstractmethod + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + """Emit log to logger.""" diff --git a/venv/lib/python3.12/site-packages/aiohttp/base_protocol.py b/venv/lib/python3.12/site-packages/aiohttp/base_protocol.py new file mode 100644 index 0000000..4c9f0a7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/base_protocol.py @@ -0,0 +1,90 @@ +import asyncio +from typing import Optional, cast + +from .tcp_helpers import tcp_nodelay + + +class BaseProtocol(asyncio.Protocol): + __slots__ = ( + "_loop", + "_paused", + "_drain_waiter", + "_connection_lost", + "_reading_paused", + "transport", + ) + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop: asyncio.AbstractEventLoop = loop + self._paused = False + self._drain_waiter: Optional[asyncio.Future[None]] = None + self._reading_paused = False + + self.transport: Optional[asyncio.Transport] = None + + @property + def connected(self) -> bool: + """Return True if the connection is open.""" + return self.transport is not None + + def pause_writing(self) -> None: + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def pause_reading(self) -> None: + if not self._reading_paused and self.transport is not None: + try: + self.transport.pause_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = True + + def resume_reading(self) -> None: + if self._reading_paused and self.transport is not None: + try: + self.transport.resume_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + tr = cast(asyncio.Transport, transport) + tcp_nodelay(tr, True) + self.transport = tr + + def connection_lost(self, exc: Optional[BaseException]) -> None: + # Wake up the writer if currently paused. + self.transport = None + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + + async def _drain_helper(self) -> None: + if not self.connected: + raise ConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + waiter = self._loop.create_future() + self._drain_waiter = waiter + await asyncio.shield(waiter) diff --git a/venv/lib/python3.12/site-packages/aiohttp/client.py b/venv/lib/python3.12/site-packages/aiohttp/client.py new file mode 100644 index 0000000..83ef1ba --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/client.py @@ -0,0 +1,1356 @@ +"""HTTP Client for asyncio.""" + +import asyncio +import base64 +import hashlib +import json +import os +import sys +import traceback +import warnings +from contextlib import suppress +from types import SimpleNamespace, TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Coroutine, + Final, + FrozenSet, + Generator, + Generic, + Iterable, + List, + Literal, + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +import attr +from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr +from yarl import URL + +from . import hdrs, http, payload +from .abc import AbstractCookieJar +from .client_exceptions import ( + ClientConnectionError as ClientConnectionError, + ClientConnectorCertificateError as ClientConnectorCertificateError, + ClientConnectorError as ClientConnectorError, + ClientConnectorSSLError as ClientConnectorSSLError, + ClientError as ClientError, + ClientHttpProxyError as ClientHttpProxyError, + ClientOSError as ClientOSError, + ClientPayloadError as ClientPayloadError, + ClientProxyConnectionError as ClientProxyConnectionError, + ClientResponseError as ClientResponseError, + ClientSSLError as ClientSSLError, + ContentTypeError as ContentTypeError, + InvalidURL as InvalidURL, + ServerConnectionError as ServerConnectionError, + ServerDisconnectedError as ServerDisconnectedError, + ServerFingerprintMismatch as ServerFingerprintMismatch, + ServerTimeoutError as ServerTimeoutError, + TooManyRedirects as TooManyRedirects, + WSServerHandshakeError as WSServerHandshakeError, +) +from .client_reqrep import ( + ClientRequest as ClientRequest, + ClientResponse as ClientResponse, + Fingerprint as Fingerprint, + RequestInfo as RequestInfo, + _merge_ssl_params, +) +from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse +from .connector import ( + BaseConnector as BaseConnector, + NamedPipeConnector as NamedPipeConnector, + TCPConnector as TCPConnector, + UnixConnector as UnixConnector, +) +from .cookiejar import CookieJar +from .helpers import ( + _SENTINEL, + DEBUG, + BasicAuth, + TimeoutHandle, + ceil_timeout, + get_env_proxy_for_url, + get_running_loop, + method_must_be_empty_body, + sentinel, + strip_auth_from_url, +) +from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter +from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse +from .streams import FlowControlDataQueue +from .tracing import Trace, TraceConfig +from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL + +__all__ = ( + # client_exceptions + "ClientConnectionError", + "ClientConnectorCertificateError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponseError", + "ClientSSLError", + "ContentTypeError", + "InvalidURL", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "TooManyRedirects", + "WSServerHandshakeError", + # client_reqrep + "ClientRequest", + "ClientResponse", + "Fingerprint", + "RequestInfo", + # connector + "BaseConnector", + "TCPConnector", + "UnixConnector", + "NamedPipeConnector", + # client_ws + "ClientWebSocketResponse", + # client + "ClientSession", + "ClientTimeout", + "request", +) + + +if TYPE_CHECKING: + from ssl import SSLContext +else: + SSLContext = None + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ClientTimeout: + total: Optional[float] = None + connect: Optional[float] = None + sock_read: Optional[float] = None + sock_connect: Optional[float] = None + ceil_threshold: float = 5 + + # pool_queue_timeout: Optional[float] = None + # dns_resolution_timeout: Optional[float] = None + # socket_connect_timeout: Optional[float] = None + # connection_acquiring_timeout: Optional[float] = None + # new_connection_timeout: Optional[float] = None + # http_header_timeout: Optional[float] = None + # response_body_timeout: Optional[float] = None + + # to create a timeout specific for a single request, either + # - create a completely new one to overwrite the default + # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # to overwrite the defaults + + +# 5 Minute default read timeout +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) + +_RetType = TypeVar("_RetType") +_CharsetResolver = Callable[[ClientResponse, bytes], str] + + +class ClientSession: + """First-class interface for making HTTP requests.""" + + ATTRS = frozenset( + [ + "_base_url", + "_source_traceback", + "_connector", + "requote_redirect_url", + "_loop", + "_cookie_jar", + "_connector_owner", + "_default_auth", + "_version", + "_json_serialize", + "_requote_redirect_url", + "_timeout", + "_raise_for_status", + "_auto_decompress", + "_trust_env", + "_default_headers", + "_skip_auto_headers", + "_request_class", + "_response_class", + "_ws_response_class", + "_trace_configs", + "_read_bufsize", + "_max_line_size", + "_max_field_size", + "_resolve_charset", + ] + ) + + _source_traceback: Optional[traceback.StackSummary] = None + _connector: Optional[BaseConnector] = None + + def __init__( + self, + base_url: Optional[StrOrURL] = None, + *, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + json_serialize: JSONEncoder = json.dumps, + request_class: Type[ClientRequest] = ClientRequest, + response_class: Type[ClientResponse] = ClientResponse, + ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, + version: HttpVersion = http.HttpVersion11, + cookie_jar: Optional[AbstractCookieJar] = None, + connector_owner: bool = True, + raise_for_status: Union[ + bool, Callable[[ClientResponse], Awaitable[None]] + ] = False, + read_timeout: Union[float, _SENTINEL] = sentinel, + conn_timeout: Optional[float] = None, + timeout: Union[object, ClientTimeout] = sentinel, + auto_decompress: bool = True, + trust_env: bool = False, + requote_redirect_url: bool = True, + trace_configs: Optional[List[TraceConfig]] = None, + read_bufsize: int = 2**16, + max_line_size: int = 8190, + max_field_size: int = 8190, + fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", + ) -> None: + if loop is None: + if connector is not None: + loop = connector._loop + + loop = get_running_loop(loop) + + if base_url is None or isinstance(base_url, URL): + self._base_url: Optional[URL] = base_url + else: + self._base_url = URL(base_url) + assert ( + self._base_url.origin() == self._base_url + ), "Only absolute URLs without path part are supported" + + if connector is None: + connector = TCPConnector(loop=loop) + + if connector._loop is not loop: + raise RuntimeError("Session and connector has to use same event loop") + + self._loop = loop + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies is not None: + self._cookie_jar.update_cookies(cookies) + + self._connector = connector + self._connector_owner = connector_owner + self._default_auth = auth + self._version = version + self._json_serialize = json_serialize + if timeout is sentinel: + self._timeout = DEFAULT_TIMEOUT + if read_timeout is not sentinel: + warnings.warn( + "read_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + self._timeout = attr.evolve(self._timeout, total=read_timeout) + if conn_timeout is not None: + self._timeout = attr.evolve(self._timeout, connect=conn_timeout) + warnings.warn( + "conn_timeout is deprecated, " "use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + else: + self._timeout = timeout # type: ignore[assignment] + if read_timeout is not sentinel: + raise ValueError( + "read_timeout and timeout parameters " + "conflict, please setup " + "timeout.read" + ) + if conn_timeout is not None: + raise ValueError( + "conn_timeout and timeout parameters " + "conflict, please setup " + "timeout.connect" + ) + self._raise_for_status = raise_for_status + self._auto_decompress = auto_decompress + self._trust_env = trust_env + self._requote_redirect_url = requote_redirect_url + self._read_bufsize = read_bufsize + self._max_line_size = max_line_size + self._max_field_size = max_field_size + + # Convert to list of tuples + if headers: + real_headers: CIMultiDict[str] = CIMultiDict(headers) + else: + real_headers = CIMultiDict() + self._default_headers: CIMultiDict[str] = real_headers + if skip_auto_headers is not None: + self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers) + else: + self._skip_auto_headers = frozenset() + + self._request_class = request_class + self._response_class = response_class + self._ws_response_class = ws_response_class + + self._trace_configs = trace_configs or [] + for trace_config in self._trace_configs: + trace_config.freeze() + + self._resolve_charset = fallback_charset_resolver + + def __init_subclass__(cls: Type["ClientSession"]) -> None: + warnings.warn( + "Inheritance class {} from ClientSession " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2, + ) + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom ClientSession.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def __del__(self, _warnings: Any = warnings) -> None: + if not self.closed: + kwargs = {"source": self} + _warnings.warn( + f"Unclosed client session {self!r}", ResourceWarning, **kwargs + ) + context = {"client_session": self, "message": "Unclosed client session"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def request( + self, method: str, url: StrOrURL, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) + + def _build_url(self, str_or_url: StrOrURL) -> URL: + url = URL(str_or_url) + if self._base_url is None: + return url + else: + assert not url.is_absolute() and url.path.startswith("/") + return self._base_url.join(url) + + async def _request( + self, + method: str, + str_or_url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Union[ + None, bool, Callable[[ClientResponse], Awaitable[None]] + ] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, _SENTINEL] = sentinel, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + ssl: Optional[Union[SSLContext, Literal[False], Fingerprint]] = None, + server_hostname: Optional[str] = None, + proxy_headers: Optional[LooseHeaders] = None, + trace_request_ctx: Optional[SimpleNamespace] = None, + read_bufsize: Optional[int] = None, + auto_decompress: Optional[bool] = None, + max_line_size: Optional[int] = None, + max_field_size: Optional[int] = None, + ) -> ClientResponse: + + # NOTE: timeout clamps existing connect and read timeouts. We cannot + # set the default to None because we need to detect if the user wants + # to use the existing timeouts by setting timeout to None. + + if self.closed: + raise RuntimeError("Session is closed") + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + if data is not None and json is not None: + raise ValueError( + "data and json parameters can not be used at the same time" + ) + elif json is not None: + data = payload.JsonPayload(json, dumps=self._json_serialize) + + if not isinstance(chunked, bool) and chunked is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + redirects = 0 + history = [] + version = self._version + params = params or {} + + # Merge with default headers and transform to CIMultiDict + headers = self._prepare_headers(headers) + proxy_headers = self._prepare_headers(proxy_headers) + + try: + url = self._build_url(str_or_url) + except ValueError as e: + raise InvalidURL(str_or_url) from e + + skip_headers = set(self._skip_auto_headers) + if skip_auto_headers is not None: + for i in skip_auto_headers: + skip_headers.add(istr(i)) + + if proxy is not None: + try: + proxy = URL(proxy) + except ValueError as e: + raise InvalidURL(proxy) from e + + if timeout is sentinel: + real_timeout: ClientTimeout = self._timeout + else: + if not isinstance(timeout, ClientTimeout): + real_timeout = ClientTimeout(total=timeout) + else: + real_timeout = timeout + # timeout is cumulative for all request operations + # (request, redirects, responses, data consuming) + tm = TimeoutHandle( + self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold + ) + handle = tm.start() + + if read_bufsize is None: + read_bufsize = self._read_bufsize + + if auto_decompress is None: + auto_decompress = self._auto_decompress + + if max_line_size is None: + max_line_size = self._max_line_size + + if max_field_size is None: + max_field_size = self._max_field_size + + traces = [ + Trace( + self, + trace_config, + trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx), + ) + for trace_config in self._trace_configs + ] + + for trace in traces: + await trace.send_request_start(method, url.update_query(params), headers) + + timer = tm.timer() + try: + with timer: + while True: + url, auth_from_url = strip_auth_from_url(url) + if auth and auth_from_url: + raise ValueError( + "Cannot combine AUTH argument with " + "credentials encoded in URL" + ) + + if auth is None: + auth = auth_from_url + if auth is None: + auth = self._default_auth + # It would be confusing if we support explicit + # Authorization header with auth argument + if ( + headers is not None + and auth is not None + and hdrs.AUTHORIZATION in headers + ): + raise ValueError( + "Cannot combine AUTHORIZATION header " + "with AUTH argument or credentials " + "encoded in URL" + ) + + all_cookies = self._cookie_jar.filter_cookies(url) + + if cookies is not None: + tmp_cookie_jar = CookieJar() + tmp_cookie_jar.update_cookies(cookies) + req_cookies = tmp_cookie_jar.filter_cookies(url) + if req_cookies: + all_cookies.load(req_cookies) + + if proxy is not None: + proxy = URL(proxy) + elif self._trust_env: + with suppress(LookupError): + proxy, proxy_auth = get_env_proxy_for_url(url) + + req = self._request_class( + method, + url, + params=params, + headers=headers, + skip_auto_headers=skip_headers, + data=data, + cookies=all_cookies, + auth=auth, + version=version, + compress=compress, + chunked=chunked, + expect100=expect100, + loop=self._loop, + response_class=self._response_class, + proxy=proxy, + proxy_auth=proxy_auth, + timer=timer, + session=self, + ssl=ssl, + server_hostname=server_hostname, + proxy_headers=proxy_headers, + traces=traces, + trust_env=self.trust_env, + ) + + # connection timeout + try: + async with ceil_timeout( + real_timeout.connect, + ceil_threshold=real_timeout.ceil_threshold, + ): + assert self._connector is not None + conn = await self._connector.connect( + req, traces=traces, timeout=real_timeout + ) + except asyncio.TimeoutError as exc: + raise ServerTimeoutError( + "Connection timeout " "to host {}".format(url) + ) from exc + + assert conn.transport is not None + + assert conn.protocol is not None + conn.protocol.set_response_params( + timer=timer, + skip_payload=method_must_be_empty_body(method), + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + read_timeout=real_timeout.sock_read, + read_bufsize=read_bufsize, + timeout_ceil_threshold=self._connector._timeout_ceil_threshold, + max_line_size=max_line_size, + max_field_size=max_field_size, + ) + + try: + try: + resp = await req.send(conn) + try: + await resp.start(conn) + except BaseException: + resp.close() + raise + except BaseException: + conn.close() + raise + except ClientError: + raise + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientOSError(*exc.args) from exc + + self._cookie_jar.update_cookies(resp.cookies, resp.url) + + # redirects + if resp.status in (301, 302, 303, 307, 308) and allow_redirects: + + for trace in traces: + await trace.send_request_redirect( + method, url.update_query(params), headers, resp + ) + + redirects += 1 + history.append(resp) + if max_redirects and redirects >= max_redirects: + resp.close() + raise TooManyRedirects( + history[0].request_info, tuple(history) + ) + + # For 301 and 302, mimic IE, now changed in RFC + # https://github.com/kennethreitz/requests/pull/269 + if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or ( + resp.status in (301, 302) and resp.method == hdrs.METH_POST + ): + method = hdrs.METH_GET + data = None + if headers.get(hdrs.CONTENT_LENGTH): + headers.pop(hdrs.CONTENT_LENGTH) + + r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( + hdrs.URI + ) + if r_url is None: + # see github.com/aio-libs/aiohttp/issues/2022 + break + else: + # reading from correct redirection + # response is forbidden + resp.release() + + try: + parsed_url = URL( + r_url, encoded=not self._requote_redirect_url + ) + + except ValueError as e: + raise InvalidURL(r_url) from e + + scheme = parsed_url.scheme + if scheme not in ("http", "https", ""): + resp.close() + raise ValueError("Can redirect only to http or https") + elif not scheme: + parsed_url = url.join(parsed_url) + + if url.origin() != parsed_url.origin(): + auth = None + headers.pop(hdrs.AUTHORIZATION, None) + + url = parsed_url + params = {} + resp.release() + continue + + break + + # check response status + if raise_for_status is None: + raise_for_status = self._raise_for_status + + if raise_for_status is None: + pass + elif callable(raise_for_status): + await raise_for_status(resp) + elif raise_for_status: + resp.raise_for_status() + + # register connection + if handle is not None: + if resp.connection is not None: + resp.connection.add_callback(handle.cancel) + else: + handle.cancel() + + resp._history = tuple(history) + + for trace in traces: + await trace.send_request_end( + method, url.update_query(params), headers, resp + ) + return resp + + except BaseException as e: + # cleanup timer + tm.close() + if handle: + handle.cancel() + handle = None + + for trace in traces: + await trace.send_request_exception( + method, url.update_query(params), headers, e + ) + raise + + def ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, Literal[False], None, Fingerprint] = None, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> "_WSRequestContextManager": + """Initiate websocket connection.""" + return _WSRequestContextManager( + self._ws_connect( + url, + method=method, + protocols=protocols, + timeout=timeout, + receive_timeout=receive_timeout, + autoclose=autoclose, + autoping=autoping, + heartbeat=heartbeat, + auth=auth, + origin=origin, + params=params, + headers=headers, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + verify_ssl=verify_ssl, + fingerprint=fingerprint, + ssl_context=ssl_context, + proxy_headers=proxy_headers, + compress=compress, + max_msg_size=max_msg_size, + ) + ) + + async def _ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, Literal[False], None, Fingerprint] = None, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> ClientWebSocketResponse: + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + else: + real_headers = CIMultiDict(headers) + + default_headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "Upgrade", + hdrs.SEC_WEBSOCKET_VERSION: "13", + } + + for key, value in default_headers.items(): + real_headers.setdefault(key, value) + + sec_key = base64.b64encode(os.urandom(16)) + real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() + + if protocols: + real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) + if origin is not None: + real_headers[hdrs.ORIGIN] = origin + if compress: + extstr = ws_ext_gen(compress=compress) + real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + # send request + resp = await self.request( + method, + url, + params=params, + headers=real_headers, + read_until_eof=False, + auth=auth, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + proxy_headers=proxy_headers, + ) + + try: + # check handshake + if resp.status != 101: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid response status", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid upgrade header", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid connection header", + status=resp.status, + headers=resp.headers, + ) + + # key calculation + r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") + match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() + if r_key != match: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid challenge response", + status=resp.status, + headers=resp.headers, + ) + + # websocket protocol + protocol = None + if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: + resp_protocols = [ + proto.strip() + for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in resp_protocols: + if proto in protocols: + protocol = proto + break + + # websocket compress + notakeover = False + if compress: + compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + if compress_hdrs: + try: + compress, notakeover = ws_ext_parse(compress_hdrs) + except WSHandshakeError as exc: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message=exc.args[0], + status=resp.status, + headers=resp.headers, + ) from exc + else: + compress = 0 + notakeover = False + + conn = resp.connection + assert conn is not None + conn_proto = conn.protocol + assert conn_proto is not None + transport = conn.transport + assert transport is not None + reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue( + conn_proto, 2**16, loop=self._loop + ) + conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) + writer = WebSocketWriter( + conn_proto, + transport, + use_mask=True, + compress=compress, + notakeover=notakeover, + ) + except BaseException: + resp.close() + raise + else: + return self._ws_response_class( + reader, + writer, + protocol, + resp, + timeout, + autoclose, + autoping, + self._loop, + receive_timeout=receive_timeout, + heartbeat=heartbeat, + compress=compress, + client_notakeover=notakeover, + ) + + def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": + """Add default headers and transform it to CIMultiDict""" + # Convert headers to MultiDict + result = CIMultiDict(self._default_headers) + if headers: + if not isinstance(headers, (MultiDictProxy, MultiDict)): + headers = CIMultiDict(headers) + added_names: Set[str] = set() + for key, value in headers.items(): + if key in added_names: + result.add(key, value) + else: + result[key] = value + added_names.add(key) + return result + + def get( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs) + ) + + def options( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request( + hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def head( + self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request( + hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def post( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, data=data, **kwargs) + ) + + def put( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, data=data, **kwargs) + ) + + def patch( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, data=data, **kwargs) + ) + + def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": + """Perform HTTP DELETE request.""" + return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs)) + + async def close(self) -> None: + """Close underlying connector. + + Release all acquired resources. + """ + if not self.closed: + if self._connector is not None and self._connector_owner: + await self._connector.close() + self._connector = None + + @property + def closed(self) -> bool: + """Is client session closed. + + A readonly property. + """ + return self._connector is None or self._connector.closed + + @property + def connector(self) -> Optional[BaseConnector]: + """Connector instance used for the session.""" + return self._connector + + @property + def cookie_jar(self) -> AbstractCookieJar: + """The session cookies.""" + return self._cookie_jar + + @property + def version(self) -> Tuple[int, int]: + """The session HTTP protocol version.""" + return self._version + + @property + def requote_redirect_url(self) -> bool: + """Do URL requoting on redirection handling.""" + return self._requote_redirect_url + + @requote_redirect_url.setter + def requote_redirect_url(self, val: bool) -> None: + """Do URL requoting on redirection handling.""" + warnings.warn( + "session.requote_redirect_url modification " "is deprecated #2778", + DeprecationWarning, + stacklevel=2, + ) + self._requote_redirect_url = val + + @property + def loop(self) -> asyncio.AbstractEventLoop: + """Session's loop.""" + warnings.warn( + "client.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def timeout(self) -> ClientTimeout: + """Timeout for the session.""" + return self._timeout + + @property + def headers(self) -> "CIMultiDict[str]": + """The default headers of the client session.""" + return self._default_headers + + @property + def skip_auto_headers(self) -> FrozenSet[istr]: + """Headers for which autogeneration should be skipped""" + return self._skip_auto_headers + + @property + def auth(self) -> Optional[BasicAuth]: + """An object that represents HTTP Basic Authorization""" + return self._default_auth + + @property + def json_serialize(self) -> JSONEncoder: + """Json serializer callable""" + return self._json_serialize + + @property + def connector_owner(self) -> bool: + """Should connector be closed on session closing""" + return self._connector_owner + + @property + def raise_for_status( + self, + ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: + """Should `ClientResponse.raise_for_status()` be called for each response.""" + return self._raise_for_status + + @property + def auto_decompress(self) -> bool: + """Should the body response be automatically decompressed.""" + return self._auto_decompress + + @property + def trust_env(self) -> bool: + """ + Should proxies information from environment or netrc be trusted. + + Information is from HTTP_PROXY / HTTPS_PROXY environment variables + or ~/.netrc file if present. + """ + return self._trust_env + + @property + def trace_configs(self) -> List[TraceConfig]: + """A list of TraceConfig instances used for client tracing""" + return self._trace_configs + + def detach(self) -> None: + """Detach connector from session without closing the former. + + Session is switched to closed state anyway. + """ + self._connector = None + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "ClientSession": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): + + __slots__ = ("_coro", "_resp") + + def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: + self._coro = coro + + def send(self, arg: None) -> "asyncio.Future[Any]": + return self._coro.send(arg) + + def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]": + return self._coro.throw(*args, **kwargs) + + def close(self) -> None: + return self._coro.close() + + def __await__(self) -> Generator[Any, None, _RetType]: + ret = self._coro.__await__() + return ret + + def __iter__(self) -> Generator[Any, None, _RetType]: + return self.__await__() + + async def __aenter__(self) -> _RetType: + self._resp = await self._coro + return self._resp + + +class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): + __slots__ = () + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # We're basing behavior on the exception as it can be caused by + # user code unrelated to the status of the connection. If you + # would like to close a connection you must do that + # explicitly. Otherwise connection error handling should kick in + # and close/recycle the connection as required. + self._resp.release() + await self._resp.wait_for_close() + + +class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): + __slots__ = () + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self._resp.close() + + +class _SessionRequestContextManager: + + __slots__ = ("_coro", "_resp", "_session") + + def __init__( + self, + coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], + session: ClientSession, + ) -> None: + self._coro = coro + self._resp: Optional[ClientResponse] = None + self._session = session + + async def __aenter__(self) -> ClientResponse: + try: + self._resp = await self._coro + except BaseException: + await self._session.close() + raise + else: + return self._resp + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + assert self._resp is not None + self._resp.close() + await self._session.close() + + +def request( + method: str, + url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + cookies: Optional[LooseCookies] = None, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + read_bufsize: Optional[int] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + max_line_size: int = 8190, + max_field_size: int = 8190, +) -> _SessionRequestContextManager: + """Constructs and sends a request. + + Returns response object. + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + json - (optional) Any json compatible python object + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + loop - Optional event loop. + timeout - Optional ClientTimeout settings structure, 5min + total timeout by default. + Usage:: + >>> import aiohttp + >>> resp = await aiohttp.request('GET', 'http://python.org/') + >>> resp + + >>> data = await resp.read() + """ + connector_owner = False + if connector is None: + connector_owner = True + connector = TCPConnector(loop=loop, force_close=True) + + session = ClientSession( + loop=loop, + cookies=cookies, + version=version, + timeout=timeout, + connector=connector, + connector_owner=connector_owner, + ) + + return _SessionRequestContextManager( + session._request( + method, + url, + params=params, + data=data, + json=json, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + compress=compress, + chunked=chunked, + expect100=expect100, + raise_for_status=raise_for_status, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth, + read_bufsize=read_bufsize, + max_line_size=max_line_size, + max_field_size=max_field_size, + ), + session, + ) diff --git a/venv/lib/python3.12/site-packages/aiohttp/client_exceptions.py b/venv/lib/python3.12/site-packages/aiohttp/client_exceptions.py new file mode 100644 index 0000000..588ffbf --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/client_exceptions.py @@ -0,0 +1,346 @@ +"""HTTP related errors.""" + +import asyncio +import warnings +from typing import TYPE_CHECKING, Any, Optional, Tuple, Union + +from .http_parser import RawResponseMessage +from .typedefs import LooseHeaders + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = SSLContext = None # type: ignore[assignment] + + +if TYPE_CHECKING: # pragma: no cover + from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo +else: + RequestInfo = ClientResponse = ConnectionKey = None + +__all__ = ( + "ClientError", + "ClientConnectionError", + "ClientOSError", + "ClientConnectorError", + "ClientProxyConnectionError", + "ClientSSLError", + "ClientConnectorSSLError", + "ClientConnectorCertificateError", + "ServerConnectionError", + "ServerTimeoutError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ClientResponseError", + "ClientHttpProxyError", + "WSServerHandshakeError", + "ContentTypeError", + "ClientPayloadError", + "InvalidURL", +) + + +class ClientError(Exception): + """Base class for client connection errors.""" + + +class ClientResponseError(ClientError): + """Base class for exceptions that occur after getting a response. + + request_info: An instance of RequestInfo. + history: A sequence of responses, if redirects occurred. + status: HTTP status code. + message: Error message. + headers: Response headers. + """ + + def __init__( + self, + request_info: RequestInfo, + history: Tuple[ClientResponse, ...], + *, + code: Optional[int] = None, + status: Optional[int] = None, + message: str = "", + headers: Optional[LooseHeaders] = None, + ) -> None: + self.request_info = request_info + if code is not None: + if status is not None: + raise ValueError( + "Both code and status arguments are provided; " + "code is deprecated, use status instead" + ) + warnings.warn( + "code argument is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + if status is not None: + self.status = status + elif code is not None: + self.status = code + else: + self.status = 0 + self.message = message + self.headers = headers + self.history = history + self.args = (request_info, history) + + def __str__(self) -> str: + return "{}, message={!r}, url={!r}".format( + self.status, + self.message, + self.request_info.real_url, + ) + + def __repr__(self) -> str: + args = f"{self.request_info!r}, {self.history!r}" + if self.status != 0: + args += f", status={self.status!r}" + if self.message != "": + args += f", message={self.message!r}" + if self.headers is not None: + args += f", headers={self.headers!r}" + return f"{type(self).__name__}({args})" + + @property + def code(self) -> int: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + return self.status + + @code.setter + def code(self, value: int) -> None: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + self.status = value + + +class ContentTypeError(ClientResponseError): + """ContentType found is not valid.""" + + +class WSServerHandshakeError(ClientResponseError): + """websocket server handshake error.""" + + +class ClientHttpProxyError(ClientResponseError): + """HTTP proxy error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + proxy responds with status other than ``200 OK`` + on ``CONNECT`` request. + """ + + +class TooManyRedirects(ClientResponseError): + """Client was redirected too many times.""" + + +class ClientConnectionError(ClientError): + """Base class for client socket errors.""" + + +class ClientOSError(ClientConnectionError, OSError): + """OSError error.""" + + +class ClientConnectorError(ClientOSError): + """Client connector error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + a connection can not be established. + """ + + def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None: + self._conn_key = connection_key + self._os_error = os_error + super().__init__(os_error.errno, os_error.strerror) + self.args = (connection_key, os_error) + + @property + def os_error(self) -> OSError: + return self._os_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]: + return self._conn_key.ssl + + def __str__(self) -> str: + return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( + self, self.ssl if self.ssl is not None else "default", self.strerror + ) + + # OSError.__reduce__ does too much black magick + __reduce__ = BaseException.__reduce__ + + +class ClientProxyConnectionError(ClientConnectorError): + """Proxy connection error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + connection to proxy can not be established. + """ + + +class UnixClientConnectorError(ClientConnectorError): + """Unix connector error. + + Raised in :py:class:`aiohttp.connector.UnixConnector` + if connection to unix socket can not be established. + """ + + def __init__( + self, path: str, connection_key: ConnectionKey, os_error: OSError + ) -> None: + self._path = path + super().__init__(connection_key, os_error) + + @property + def path(self) -> str: + return self._path + + def __str__(self) -> str: + return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( + self, self.ssl if self.ssl is not None else "default", self.strerror + ) + + +class ServerConnectionError(ClientConnectionError): + """Server connection errors.""" + + +class ServerDisconnectedError(ServerConnectionError): + """Server disconnected.""" + + def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None: + if message is None: + message = "Server disconnected" + + self.args = (message,) + self.message = message + + +class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): + """Server timeout error.""" + + +class ServerFingerprintMismatch(ServerConnectionError): + """SSL certificate does not match expected fingerprint.""" + + def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: + self.expected = expected + self.got = got + self.host = host + self.port = port + self.args = (expected, got, host, port) + + def __repr__(self) -> str: + return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( + self.__class__.__name__, self.expected, self.got, self.host, self.port + ) + + +class ClientPayloadError(ClientError): + """Response payload error.""" + + +class InvalidURL(ClientError, ValueError): + """Invalid URL. + + URL used for fetching is malformed, e.g. it doesn't contains host + part. + """ + + # Derive from ValueError for backward compatibility + + def __init__(self, url: Any) -> None: + # The type of url is not yarl.URL because the exception can be raised + # on URL(url) call + super().__init__(url) + + @property + def url(self) -> Any: + return self.args[0] + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.url}>" + + +class ClientSSLError(ClientConnectorError): + """Base error for ssl.*Errors.""" + + +if ssl is not None: + cert_errors = (ssl.CertificateError,) + cert_errors_bases = ( + ClientSSLError, + ssl.CertificateError, + ) + + ssl_errors = (ssl.SSLError,) + ssl_error_bases = (ClientSSLError, ssl.SSLError) +else: # pragma: no cover + cert_errors = tuple() + cert_errors_bases = ( + ClientSSLError, + ValueError, + ) + + ssl_errors = tuple() + ssl_error_bases = (ClientSSLError,) + + +class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc] + """Response ssl error.""" + + +class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc] + """Response certificate error.""" + + def __init__( + self, connection_key: ConnectionKey, certificate_error: Exception + ) -> None: + self._conn_key = connection_key + self._certificate_error = certificate_error + self.args = (connection_key, certificate_error) + + @property + def certificate_error(self) -> Exception: + return self._certificate_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> bool: + return self._conn_key.is_ssl + + def __str__(self) -> str: + return ( + "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " + "[{0.certificate_error.__class__.__name__}: " + "{0.certificate_error.args}]".format(self) + ) diff --git a/venv/lib/python3.12/site-packages/aiohttp/client_proto.py b/venv/lib/python3.12/site-packages/aiohttp/client_proto.py new file mode 100644 index 0000000..6225b33 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/client_proto.py @@ -0,0 +1,264 @@ +import asyncio +from contextlib import suppress +from typing import Any, Optional, Tuple + +from .base_protocol import BaseProtocol +from .client_exceptions import ( + ClientOSError, + ClientPayloadError, + ServerDisconnectedError, + ServerTimeoutError, +) +from .helpers import BaseTimerContext, status_code_must_be_empty_body +from .http import HttpResponseParser, RawResponseMessage +from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader + + +class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): + """Helper class to adapt between Protocol and StreamReader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + BaseProtocol.__init__(self, loop=loop) + DataQueue.__init__(self, loop) + + self._should_close = False + + self._payload: Optional[StreamReader] = None + self._skip_payload = False + self._payload_parser = None + + self._timer = None + + self._tail = b"" + self._upgraded = False + self._parser: Optional[HttpResponseParser] = None + + self._read_timeout: Optional[float] = None + self._read_timeout_handle: Optional[asyncio.TimerHandle] = None + + self._timeout_ceil_threshold: Optional[float] = 5 + + @property + def upgraded(self) -> bool: + return self._upgraded + + @property + def should_close(self) -> bool: + if self._payload is not None and not self._payload.is_eof() or self._upgraded: + return True + + return ( + self._should_close + or self._upgraded + or self.exception() is not None + or self._payload_parser is not None + or len(self) > 0 + or bool(self._tail) + ) + + def force_close(self) -> None: + self._should_close = True + + def close(self) -> None: + transport = self.transport + if transport is not None: + transport.close() + self.transport = None + self._payload = None + self._drop_timeout() + + def is_connected(self) -> bool: + return self.transport is not None and not self.transport.is_closing() + + def connection_lost(self, exc: Optional[BaseException]) -> None: + self._drop_timeout() + + if self._payload_parser is not None: + with suppress(Exception): + self._payload_parser.feed_eof() + + uncompleted = None + if self._parser is not None: + try: + uncompleted = self._parser.feed_eof() + except Exception: + if self._payload is not None: + self._payload.set_exception( + ClientPayloadError("Response payload is not completed") + ) + + if not self.is_eof(): + if isinstance(exc, OSError): + exc = ClientOSError(*exc.args) + if exc is None: + exc = ServerDisconnectedError(uncompleted) + # assigns self._should_close to True as side effect, + # we do it anyway below + self.set_exception(exc) + + self._should_close = True + self._parser = None + self._payload = None + self._payload_parser = None + self._reading_paused = False + + super().connection_lost(exc) + + def eof_received(self) -> None: + # should call parser.feed_eof() most likely + self._drop_timeout() + + def pause_reading(self) -> None: + super().pause_reading() + self._drop_timeout() + + def resume_reading(self) -> None: + super().resume_reading() + self._reschedule_timeout() + + def set_exception(self, exc: BaseException) -> None: + self._should_close = True + self._drop_timeout() + super().set_exception(exc) + + def set_parser(self, parser: Any, payload: Any) -> None: + # TODO: actual types are: + # parser: WebSocketReader + # payload: FlowControlDataQueue + # but they are not generi enough + # Need an ABC for both types + self._payload = payload + self._payload_parser = parser + + self._drop_timeout() + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def set_response_params( + self, + *, + timer: Optional[BaseTimerContext] = None, + skip_payload: bool = False, + read_until_eof: bool = False, + auto_decompress: bool = True, + read_timeout: Optional[float] = None, + read_bufsize: int = 2**16, + timeout_ceil_threshold: float = 5, + max_line_size: int = 8190, + max_field_size: int = 8190, + ) -> None: + self._skip_payload = skip_payload + + self._read_timeout = read_timeout + + self._timeout_ceil_threshold = timeout_ceil_threshold + + self._parser = HttpResponseParser( + self, + self._loop, + read_bufsize, + timer=timer, + payload_exception=ClientPayloadError, + response_with_body=not skip_payload, + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + max_line_size=max_line_size, + max_field_size=max_field_size, + ) + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def _drop_timeout(self) -> None: + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + self._read_timeout_handle = None + + def _reschedule_timeout(self) -> None: + timeout = self._read_timeout + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + + if timeout: + self._read_timeout_handle = self._loop.call_later( + timeout, self._on_read_timeout + ) + else: + self._read_timeout_handle = None + + def start_timeout(self) -> None: + self._reschedule_timeout() + + def _on_read_timeout(self) -> None: + exc = ServerTimeoutError("Timeout on reading data from socket") + self.set_exception(exc) + if self._payload is not None: + self._payload.set_exception(exc) + + def data_received(self, data: bytes) -> None: + self._reschedule_timeout() + + if not data: + return + + # custom payload parser + if self._payload_parser is not None: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self._payload = None + self._payload_parser = None + + if tail: + self.data_received(tail) + return + else: + if self._upgraded or self._parser is None: + # i.e. websocket connection, websocket parser is not set yet + self._tail += data + else: + # parse http messages + try: + messages, upgraded, tail = self._parser.feed_data(data) + except BaseException as exc: + if self.transport is not None: + # connection.release() could be called BEFORE + # data_received(), the transport is already + # closed in this case + self.transport.close() + # should_close is True after the call + self.set_exception(exc) + return + + self._upgraded = upgraded + + payload: Optional[StreamReader] = None + for message, payload in messages: + if message.should_close: + self._should_close = True + + self._payload = payload + + if self._skip_payload or status_code_must_be_empty_body( + message.code + ): + self.feed_data((message, EMPTY_PAYLOAD), 0) + else: + self.feed_data((message, payload), 0) + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediately for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() + + if tail: + if upgraded: + self.data_received(tail) + else: + self._tail = tail diff --git a/venv/lib/python3.12/site-packages/aiohttp/client_reqrep.py b/venv/lib/python3.12/site-packages/aiohttp/client_reqrep.py new file mode 100644 index 0000000..1d946ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/client_reqrep.py @@ -0,0 +1,1196 @@ +import asyncio +import codecs +import contextlib +import functools +import io +import re +import sys +import traceback +import warnings +from hashlib import md5, sha1, sha256 +from http.cookies import CookieError, Morsel, SimpleCookie +from types import MappingProxyType, TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + Literal, + Mapping, + Optional, + Tuple, + Type, + Union, + cast, +) + +import attr +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy +from yarl import URL + +from . import hdrs, helpers, http, multipart, payload +from .abc import AbstractStreamWriter +from .client_exceptions import ( + ClientConnectionError, + ClientOSError, + ClientResponseError, + ContentTypeError, + InvalidURL, + ServerFingerprintMismatch, +) +from .compression_utils import HAS_BROTLI +from .formdata import FormData +from .helpers import ( + BaseTimerContext, + BasicAuth, + HeadersMixin, + TimerNoop, + basicauth_from_netrc, + netrc_from_env, + noop, + reify, + set_result, +) +from .http import ( + SERVER_SOFTWARE, + HttpVersion, + HttpVersion10, + HttpVersion11, + StreamWriter, +) +from .log import client_logger +from .streams import StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseCookies, + LooseHeaders, + RawHeaders, +) + +try: + import ssl + from ssl import SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") + + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientSession + from .connector import Connection + from .tracing import Trace + + +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") +json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") + + +def _gen_default_accept_encoding() -> str: + return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ContentDisposition: + type: Optional[str] + parameters: "MappingProxyType[str, str]" + filename: Optional[str] + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class RequestInfo: + url: URL + method: str + headers: "CIMultiDictProxy[str]" + real_url: URL = attr.ib() + + @real_url.default + def real_url_default(self) -> URL: + return self.url + + +class Fingerprint: + HASHFUNC_BY_DIGESTLEN = { + 16: md5, + 20: sha1, + 32: sha256, + } + + def __init__(self, fingerprint: bytes) -> None: + digestlen = len(fingerprint) + hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) + if not hashfunc: + raise ValueError("fingerprint has invalid length") + elif hashfunc is md5 or hashfunc is sha1: + raise ValueError( + "md5 and sha1 are insecure and " "not supported. Use sha256." + ) + self._hashfunc = hashfunc + self._fingerprint = fingerprint + + @property + def fingerprint(self) -> bytes: + return self._fingerprint + + def check(self, transport: asyncio.Transport) -> None: + if not transport.get_extra_info("sslcontext"): + return + sslobj = transport.get_extra_info("ssl_object") + cert = sslobj.getpeercert(binary_form=True) + got = self._hashfunc(cert).digest() + if got != self._fingerprint: + host, port, *_ = transport.get_extra_info("peername") + raise ServerFingerprintMismatch(self._fingerprint, got, host, port) + + +if ssl is not None: + SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) +else: # pragma: no cover + SSL_ALLOWED_TYPES = type(None) + + +def _merge_ssl_params( + ssl: Union["SSLContext", Literal[False], Fingerprint, None], + verify_ssl: Optional[bool], + ssl_context: Optional["SSLContext"], + fingerprint: Optional[bytes], +) -> Union["SSLContext", Literal[False], Fingerprint, None]: + if verify_ssl is not None and not verify_ssl: + warnings.warn( + "verify_ssl is deprecated, use ssl=False instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = False + if ssl_context is not None: + warnings.warn( + "ssl_context is deprecated, use ssl=context instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = ssl_context + if fingerprint is not None: + warnings.warn( + "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", + DeprecationWarning, + stacklevel=3, + ) + if ssl is not None: + raise ValueError( + "verify_ssl, ssl_context, fingerprint and ssl " + "parameters are mutually exclusive" + ) + else: + ssl = Fingerprint(fingerprint) + if not isinstance(ssl, SSL_ALLOWED_TYPES): + raise TypeError( + "ssl should be SSLContext, bool, Fingerprint or None, " + "got {!r} instead.".format(ssl) + ) + return ssl + + +@attr.s(auto_attribs=True, slots=True, frozen=True) +class ConnectionKey: + # the key should contain an information about used proxy / TLS + # to prevent reusing wrong connections from a pool + host: str + port: Optional[int] + is_ssl: bool + ssl: Union[SSLContext, None, Literal[False], Fingerprint] + proxy: Optional[URL] + proxy_auth: Optional[BasicAuth] + proxy_headers_hash: Optional[int] # hash(CIMultiDict) + + +def _is_expected_content_type( + response_content_type: str, expected_content_type: str +) -> bool: + if expected_content_type == "application/json": + return json_re.match(response_content_type) is not None + return expected_content_type in response_content_type + + +class ClientRequest: + GET_METHODS = { + hdrs.METH_GET, + hdrs.METH_HEAD, + hdrs.METH_OPTIONS, + hdrs.METH_TRACE, + } + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} + ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) + + DEFAULT_HEADERS = { + hdrs.ACCEPT: "*/*", + hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), + } + + body = b"" + auth = None + response = None + + __writer = None # async task for streaming data + _continue = None # waiter future for '100 Continue' response + + # N.B. + # Adding __del__ method with self._writer closing doesn't make sense + # because _writer is instance method, thus it keeps a reference to self. + # Until writer has finished finalizer will not be called. + + def __init__( + self, + method: str, + url: URL, + *, + params: Optional[Mapping[str, str]] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Iterable[str] = frozenset(), + data: Any = None, + cookies: Optional[LooseCookies] = None, + auth: Optional[BasicAuth] = None, + version: http.HttpVersion = http.HttpVersion11, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + response_class: Optional[Type["ClientResponse"]] = None, + proxy: Optional[URL] = None, + proxy_auth: Optional[BasicAuth] = None, + timer: Optional[BaseTimerContext] = None, + session: Optional["ClientSession"] = None, + ssl: Union[SSLContext, Literal[False], Fingerprint, None] = None, + proxy_headers: Optional[LooseHeaders] = None, + traces: Optional[List["Trace"]] = None, + trust_env: bool = False, + server_hostname: Optional[str] = None, + ): + if loop is None: + loop = asyncio.get_event_loop() + + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + f"Method cannot contain non-token characters {method!r} " + "(found at least {match.group()!r})" + ) + + assert isinstance(url, URL), url + assert isinstance(proxy, (URL, type(None))), proxy + # FIXME: session is None in tests only, need to fix tests + # assert session is not None + self._session = cast("ClientSession", session) + if params: + q = MultiDict(url.query) + url2 = url.with_query(params) + q.extend(url2.query) + url = url.with_query(q) + self.original_url = url + self.url = url.with_fragment(None) + self.method = method.upper() + self.chunked = chunked + self.compress = compress + self.loop = loop + self.length = None + if response_class is None: + real_response_class = ClientResponse + else: + real_response_class = response_class + self.response_class: Type[ClientResponse] = real_response_class + self._timer = timer if timer is not None else TimerNoop() + self._ssl = ssl + self.server_hostname = server_hostname + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self.update_version(version) + self.update_host(url) + self.update_headers(headers) + self.update_auto_headers(skip_auto_headers) + self.update_cookies(cookies) + self.update_content_encoding(data) + self.update_auth(auth, trust_env) + self.update_proxy(proxy, proxy_auth, proxy_headers) + + self.update_body_from_data(data) + if data is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() + self.update_expect_continue(expect100) + if traces is None: + traces = [] + self._traces = traces + + def __reset_writer(self, _: object = None) -> None: + self.__writer = None + + @property + def _writer(self) -> Optional["asyncio.Task[None]"]: + return self.__writer + + @_writer.setter + def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + if self.__writer is not None: + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = writer + if writer is not None: + writer.add_done_callback(self.__reset_writer) + + def is_ssl(self) -> bool: + return self.url.scheme in ("https", "wss") + + @property + def ssl(self) -> Union["SSLContext", None, Literal[False], Fingerprint]: + return self._ssl + + @property + def connection_key(self) -> ConnectionKey: + proxy_headers = self.proxy_headers + if proxy_headers: + h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items())) + else: + h = None + return ConnectionKey( + self.host, + self.port, + self.is_ssl(), + self.ssl, + self.proxy, + self.proxy_auth, + h, + ) + + @property + def host(self) -> str: + ret = self.url.raw_host + assert ret is not None + return ret + + @property + def port(self) -> Optional[int]: + return self.url.port + + @property + def request_info(self) -> RequestInfo: + headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) + return RequestInfo(self.url, self.method, headers, self.original_url) + + def update_host(self, url: URL) -> None: + """Update destination host, port and connection type (ssl).""" + # get host/port + if not url.raw_host: + raise InvalidURL(url) + + # basic auth info + username, password = url.user, url.password + if username: + self.auth = helpers.BasicAuth(username, password or "") + + def update_version(self, version: Union[http.HttpVersion, str]) -> None: + """Convert request version to two elements tuple. + + parser HTTP version '1.1' => (1, 1) + """ + if isinstance(version, str): + v = [part.strip() for part in version.split(".", 1)] + try: + version = http.HttpVersion(int(v[0]), int(v[1])) + except ValueError: + raise ValueError( + f"Can not parse http version number: {version}" + ) from None + self.version = version + + def update_headers(self, headers: Optional[LooseHeaders]) -> None: + """Update request headers.""" + self.headers: CIMultiDict[str] = CIMultiDict() + + # add host + netloc = cast(str, self.url.raw_host) + if helpers.is_ipv6_address(netloc): + netloc = f"[{netloc}]" + # See https://github.com/aio-libs/aiohttp/issues/3636. + netloc = netloc.rstrip(".") + if self.url.port is not None and not self.url.is_default_port(): + netloc += ":" + str(self.url.port) + self.headers[hdrs.HOST] = netloc + + if headers: + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() # type: ignore[assignment] + + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key.lower() == "host": + self.headers[key] = value + else: + self.headers.add(key, value) + + def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + + for hdr, val in self.DEFAULT_HEADERS.items(): + if hdr not in used_headers: + self.headers.add(hdr, val) + + if hdrs.USER_AGENT not in used_headers: + self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE + + def update_cookies(self, cookies: Optional[LooseCookies]) -> None: + """Update request cookies header.""" + if not cookies: + return + + c = SimpleCookie() + if hdrs.COOKIE in self.headers: + c.load(self.headers.get(hdrs.COOKIE, "")) + del self.headers[hdrs.COOKIE] + + if isinstance(cookies, Mapping): + iter_cookies = cookies.items() + else: + iter_cookies = cookies # type: ignore[assignment] + for name, value in iter_cookies: + if isinstance(value, Morsel): + # Preserve coded_value + mrsl_val = value.get(value.key, Morsel()) + mrsl_val.set(value.key, value.value, value.coded_value) + c[name] = mrsl_val + else: + c[name] = value # type: ignore[assignment] + + self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() + + def update_content_encoding(self, data: Any) -> None: + """Set request content encoding.""" + if data is None: + return + + enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() + if enc: + if self.compress: + raise ValueError( + "compress can not be set " "if Content-Encoding header is set" + ) + elif self.compress: + if not isinstance(self.compress, str): + self.compress = "deflate" + self.headers[hdrs.CONTENT_ENCODING] = self.compress + self.chunked = True # enable chunked, no need to deal with length + + def update_transfer_encoding(self) -> None: + """Analyze transfer-encoding header.""" + te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + if "chunked" in te: + if self.chunked: + raise ValueError( + "chunked can not be set " + 'if "Transfer-Encoding: chunked" header is set' + ) + + elif self.chunked: + if hdrs.CONTENT_LENGTH in self.headers: + raise ValueError( + "chunked can not be set " "if Content-Length header is set" + ) + + self.headers[hdrs.TRANSFER_ENCODING] = "chunked" + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) + + def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: + """Set basic auth.""" + if auth is None: + auth = self.auth + if auth is None and trust_env and self.url.host is not None: + netrc_obj = netrc_from_env() + with contextlib.suppress(LookupError): + auth = basicauth_from_netrc(netrc_obj, self.url.host) + if auth is None: + return + + if not isinstance(auth, helpers.BasicAuth): + raise TypeError("BasicAuth() tuple is required instead") + + self.headers[hdrs.AUTHORIZATION] = auth.encode() + + def update_body_from_data(self, body: Any) -> None: + if body is None: + return + + # FormData + if isinstance(body, FormData): + body = body() + + try: + body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) + except payload.LookupError: + body = FormData(body)() + + self.body = body + + # enable chunked encoding if needed + if not self.chunked: + if hdrs.CONTENT_LENGTH not in self.headers: + size = body.size + if size is None: + self.chunked = True + else: + if hdrs.CONTENT_LENGTH not in self.headers: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + + # copy payload headers + assert body.headers + for (key, value) in body.headers.items(): + if key in self.headers: + continue + if key in self.skip_auto_headers: + continue + self.headers[key] = value + + def update_expect_continue(self, expect: bool = False) -> None: + if expect: + self.headers[hdrs.EXPECT] = "100-continue" + elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": + expect = True + + if expect: + self._continue = self.loop.create_future() + + def update_proxy( + self, + proxy: Optional[URL], + proxy_auth: Optional[BasicAuth], + proxy_headers: Optional[LooseHeaders], + ) -> None: + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): + raise ValueError("proxy_auth must be None or BasicAuth() tuple") + self.proxy = proxy + self.proxy_auth = proxy_auth + self.proxy_headers = proxy_headers + + def keep_alive(self) -> bool: + if self.version < HttpVersion10: + # keep alive not supported at all + return False + if self.version == HttpVersion10: + if self.headers.get(hdrs.CONNECTION) == "keep-alive": + return True + else: # no headers means we close for Http 1.0 + return False + elif self.headers.get(hdrs.CONNECTION) == "close": + return False + + return True + + async def write_bytes( + self, writer: AbstractStreamWriter, conn: "Connection" + ) -> None: + """Support coroutines that yields bytes objects.""" + # 100 response + if self._continue is not None: + try: + await writer.drain() + await self._continue + except asyncio.CancelledError: + return + + protocol = conn.protocol + assert protocol is not None + try: + if isinstance(self.body, payload.Payload): + await self.body.write(writer) + else: + if isinstance(self.body, (bytes, bytearray)): + self.body = (self.body,) # type: ignore[assignment] + + for chunk in self.body: + await writer.write(chunk) # type: ignore[arg-type] + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + protocol.set_exception(exc) + else: + new_exc = ClientOSError( + exc.errno, "Can not write request body for %s" % self.url + ) + new_exc.__context__ = exc + new_exc.__cause__ = exc + protocol.set_exception(new_exc) + except asyncio.CancelledError: + await writer.write_eof() + except Exception as exc: + protocol.set_exception(exc) + else: + await writer.write_eof() + protocol.start_timeout() + + async def send(self, conn: "Connection") -> "ClientResponse": + # Specify request target: + # - CONNECT request must send authority form URI + # - not CONNECT proxy must send absolute form URI + # - most common is origin form URI + if self.method == hdrs.METH_CONNECT: + connect_host = self.url.raw_host + assert connect_host is not None + if helpers.is_ipv6_address(connect_host): + connect_host = f"[{connect_host}]" + path = f"{connect_host}:{self.url.port}" + elif self.proxy and not self.is_ssl(): + path = str(self.url) + else: + path = self.url.raw_path + if self.url.raw_query_string: + path += "?" + self.url.raw_query_string + + protocol = conn.protocol + assert protocol is not None + writer = StreamWriter( + protocol, + self.loop, + on_chunk_sent=functools.partial( + self._on_chunk_request_sent, self.method, self.url + ), + on_headers_sent=functools.partial( + self._on_headers_request_sent, self.method, self.url + ), + ) + + if self.compress: + writer.enable_compression(self.compress) + + if self.chunked is not None: + writer.enable_chunking() + + # set default content-type + if ( + self.method in self.POST_METHODS + and hdrs.CONTENT_TYPE not in self.skip_auto_headers + and hdrs.CONTENT_TYPE not in self.headers + ): + self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" + + # set the connection header + connection = self.headers.get(hdrs.CONNECTION) + if not connection: + if self.keep_alive(): + if self.version == HttpVersion10: + connection = "keep-alive" + else: + if self.version == HttpVersion11: + connection = "close" + + if connection is not None: + self.headers[hdrs.CONNECTION] = connection + + # status + headers + status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format( + self.method, path, v=self.version + ) + await writer.write_headers(status_line, self.headers) + + self._writer = self.loop.create_task(self.write_bytes(writer, conn)) + + response_class = self.response_class + assert response_class is not None + self.response = response_class( + self.method, + self.original_url, + writer=self._writer, + continue100=self._continue, + timer=self._timer, + request_info=self.request_info, + traces=self._traces, + loop=self.loop, + session=self._session, + ) + return self.response + + async def close(self) -> None: + if self._writer is not None: + with contextlib.suppress(asyncio.CancelledError): + await self._writer + + def terminate(self) -> None: + if self._writer is not None: + if not self.loop.is_closed(): + self._writer.cancel() + self._writer.remove_done_callback(self.__reset_writer) + self._writer = None + + async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: + for trace in self._traces: + await trace.send_request_chunk_sent(method, url, chunk) + + async def _on_headers_request_sent( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + for trace in self._traces: + await trace.send_request_headers(method, url, headers) + + +class ClientResponse(HeadersMixin): + + # Some of these attributes are None when created, + # but will be set by the start() method. + # As the end user will likely never see the None values, we cheat the types below. + # from the Status-Line of the response + version: Optional[HttpVersion] = None # HTTP-Version + status: int = None # type: ignore[assignment] # Status-Code + reason: Optional[str] = None # Reason-Phrase + + content: StreamReader = None # type: ignore[assignment] # Payload stream + _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] + _raw_headers: RawHeaders = None # type: ignore[assignment] + + _connection = None # current connection + _source_traceback: Optional[traceback.StackSummary] = None + # set up by ClientRequest after ClientResponse object creation + # post-init stage allows to not change ctor signature + _closed = True # to allow __del__ for non-initialized properly response + _released = False + __writer = None + + def __init__( + self, + method: str, + url: URL, + *, + writer: "asyncio.Task[None]", + continue100: Optional["asyncio.Future[bool]"], + timer: BaseTimerContext, + request_info: RequestInfo, + traces: List["Trace"], + loop: asyncio.AbstractEventLoop, + session: "ClientSession", + ) -> None: + assert isinstance(url, URL) + + self.method = method + self.cookies = SimpleCookie() + + self._real_url = url + self._url = url.with_fragment(None) + self._body: Any = None + self._writer: Optional[asyncio.Task[None]] = writer + self._continue = continue100 # None by default + self._closed = True + self._history: Tuple[ClientResponse, ...] = () + self._request_info = request_info + self._timer = timer if timer is not None else TimerNoop() + self._cache: Dict[str, Any] = {} + self._traces = traces + self._loop = loop + # store a reference to session #1985 + self._session: Optional[ClientSession] = session + # Save reference to _resolve_charset, so that get_encoding() will still + # work after the response has finished reading the body. + if session is None: + # TODO: Fix session=None in tests (see ClientRequest.__init__). + self._resolve_charset: Callable[ + ["ClientResponse", bytes], str + ] = lambda *_: "utf-8" + else: + self._resolve_charset = session._resolve_charset + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __reset_writer(self, _: object = None) -> None: + self.__writer = None + + @property + def _writer(self) -> Optional["asyncio.Task[None]"]: + return self.__writer + + @_writer.setter + def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + if self.__writer is not None: + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = writer + if writer is not None: + writer.add_done_callback(self.__reset_writer) + + @reify + def url(self) -> URL: + return self._url + + @reify + def url_obj(self) -> URL: + warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) + return self._url + + @reify + def real_url(self) -> URL: + return self._real_url + + @reify + def host(self) -> str: + assert self._url.host is not None + return self._url.host + + @reify + def headers(self) -> "CIMultiDictProxy[str]": + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + return self._raw_headers + + @reify + def request_info(self) -> RequestInfo: + return self._request_info + + @reify + def content_disposition(self) -> Optional[ContentDisposition]: + raw = self._headers.get(hdrs.CONTENT_DISPOSITION) + if raw is None: + return None + disposition_type, params_dct = multipart.parse_content_disposition(raw) + params = MappingProxyType(params_dct) + filename = multipart.content_disposition_filename(params) + return ContentDisposition(disposition_type, params, filename) + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + + if self._connection is not None: + self._connection.release() + self._cleanup_writer() + + if self._loop.get_debug(): + kwargs = {"source": self} + _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) + context = {"client_response": self, "message": "Unclosed response"} + if self._source_traceback: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __repr__(self) -> str: + out = io.StringIO() + ascii_encodable_url = str(self.url) + if self.reason: + ascii_encodable_reason = self.reason.encode( + "ascii", "backslashreplace" + ).decode("ascii") + else: + ascii_encodable_reason = "None" + print( + "".format( + ascii_encodable_url, self.status, ascii_encodable_reason + ), + file=out, + ) + print(self.headers, file=out) + return out.getvalue() + + @property + def connection(self) -> Optional["Connection"]: + return self._connection + + @reify + def history(self) -> Tuple["ClientResponse", ...]: + """A sequence of of responses, if redirects occurred.""" + return self._history + + @reify + def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": + links_str = ", ".join(self.headers.getall("link", [])) + + if not links_str: + return MultiDictProxy(MultiDict()) + + links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() + + for val in re.split(r",(?=\s*<)", links_str): + match = re.match(r"\s*<(.*)>(.*)", val) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + url, params_str = match.groups() + params = params_str.split(";")[1:] + + link: MultiDict[Union[str, URL]] = MultiDict() + + for param in params: + match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) + if match is None: # pragma: no cover + # the check exists to suppress mypy error + continue + key, _, value, _ = match.groups() + + link.add(key, value) + + key = link.get("rel", url) + + link.add("url", self.url.join(URL(url))) + + links.add(str(key), MultiDictProxy(link)) + + return MultiDictProxy(links) + + async def start(self, connection: "Connection") -> "ClientResponse": + """Start response processing.""" + self._closed = False + self._protocol = connection.protocol + self._connection = connection + + with self._timer: + while True: + # read response + try: + protocol = self._protocol + message, payload = await protocol.read() # type: ignore[union-attr] + except http.HttpProcessingError as exc: + raise ClientResponseError( + self.request_info, + self.history, + status=exc.code, + message=exc.message, + headers=exc.headers, + ) from exc + + if message.code < 100 or message.code > 199 or message.code == 101: + break + + if self._continue is not None: + set_result(self._continue, True) + self._continue = None + + # payload eof handler + payload.on_eof(self._response_eof) + + # response status + self.version = message.version + self.status = message.code + self.reason = message.reason + + # headers + self._headers = message.headers # type is CIMultiDictProxy + self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] + + # payload + self.content = payload + + # cookies + for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): + try: + self.cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + return self + + def _response_eof(self) -> None: + if self._closed: + return + + # protocol could be None because connection could be detached + protocol = self._connection and self._connection.protocol + if protocol is not None and protocol.upgraded: + return + + self._closed = True + self._cleanup_writer() + self._release_connection() + + @property + def closed(self) -> bool: + return self._closed + + def close(self) -> None: + if not self._released: + self._notify_content() + + self._closed = True + if self._loop is None or self._loop.is_closed(): + return + + self._cleanup_writer() + if self._connection is not None: + self._connection.close() + self._connection = None + + def release(self) -> Any: + if not self._released: + self._notify_content() + + self._closed = True + + self._cleanup_writer() + self._release_connection() + return noop() + + @property + def ok(self) -> bool: + """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. + + This is **not** a check for ``200 OK`` but a check that the response + status is under 400. + """ + return 400 > self.status + + def raise_for_status(self) -> None: + if not self.ok: + # reason should always be not None for a started response + assert self.reason is not None + self.release() + raise ClientResponseError( + self.request_info, + self.history, + status=self.status, + message=self.reason, + headers=self.headers, + ) + + def _release_connection(self) -> None: + if self._connection is not None: + if self._writer is None: + self._connection.release() + self._connection = None + else: + self._writer.add_done_callback(lambda f: self._release_connection()) + + async def _wait_released(self) -> None: + if self._writer is not None: + await self._writer + self._release_connection() + + def _cleanup_writer(self) -> None: + if self._writer is not None: + self._writer.cancel() + self._session = None + + def _notify_content(self) -> None: + content = self.content + if content and content.exception() is None: + content.set_exception(ClientConnectionError("Connection closed")) + self._released = True + + async def wait_for_close(self) -> None: + if self._writer is not None: + await self._writer + self.release() + + async def read(self) -> bytes: + """Read response payload.""" + if self._body is None: + try: + self._body = await self.content.read() + for trace in self._traces: + await trace.send_response_chunk_received( + self.method, self.url, self._body + ) + except BaseException: + self.close() + raise + elif self._released: # Response explicitly released + raise ClientConnectionError("Connection closed") + + protocol = self._connection and self._connection.protocol + if protocol is None or not protocol.upgraded: + await self._wait_released() # Underlying connection released + return self._body # type: ignore[no-any-return] + + def get_encoding(self) -> str: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + mimetype = helpers.parse_mimetype(ctype) + + encoding = mimetype.parameters.get("charset") + if encoding: + with contextlib.suppress(LookupError): + return codecs.lookup(encoding).name + + if mimetype.type == "application" and ( + mimetype.subtype == "json" or mimetype.subtype == "rdap" + ): + # RFC 7159 states that the default encoding is UTF-8. + # RFC 7483 defines application/rdap+json + return "utf-8" + + if self._body is None: + raise RuntimeError( + "Cannot compute fallback encoding of a not yet read body" + ) + + return self._resolve_charset(self, self._body) + + async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: + """Read response payload and decode.""" + if self._body is None: + await self.read() + + if encoding is None: + encoding = self.get_encoding() + + return self._body.decode( # type: ignore[no-any-return,union-attr] + encoding, errors=errors + ) + + async def json( + self, + *, + encoding: Optional[str] = None, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + content_type: Optional[str] = "application/json", + ) -> Any: + """Read and decodes JSON response.""" + if self._body is None: + await self.read() + + if content_type: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + if not _is_expected_content_type(ctype, content_type): + raise ContentTypeError( + self.request_info, + self.history, + message=( + "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype + ), + headers=self.headers, + ) + + stripped = self._body.strip() # type: ignore[union-attr] + if not stripped: + return None + + if encoding is None: + encoding = self.get_encoding() + + return loads(stripped.decode(encoding)) + + async def __aenter__(self) -> "ClientResponse": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # similar to _RequestContextManager, we do not need to check + # for exceptions, response object can close connection + # if state is broken + self.release() + await self.wait_for_close() diff --git a/venv/lib/python3.12/site-packages/aiohttp/client_ws.py b/venv/lib/python3.12/site-packages/aiohttp/client_ws.py new file mode 100644 index 0000000..d9c74a3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/client_ws.py @@ -0,0 +1,315 @@ +"""WebSocket client for asyncio.""" + +import asyncio +import sys +from typing import Any, Optional, cast + +from .client_exceptions import ClientError +from .client_reqrep import ClientResponse +from .helpers import call_later, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WebSocketError, + WSCloseCode, + WSMessage, + WSMsgType, +) +from .http_websocket import WebSocketWriter # WSMessage +from .streams import EofStream, FlowControlDataQueue +from .typedefs import ( + DEFAULT_JSON_DECODER, + DEFAULT_JSON_ENCODER, + JSONDecoder, + JSONEncoder, +) + +if sys.version_info >= (3, 11): + import asyncio as async_timeout +else: + import async_timeout + + +class ClientWebSocketResponse: + def __init__( + self, + reader: "FlowControlDataQueue[WSMessage]", + writer: WebSocketWriter, + protocol: Optional[str], + response: ClientResponse, + timeout: float, + autoclose: bool, + autoping: bool, + loop: asyncio.AbstractEventLoop, + *, + receive_timeout: Optional[float] = None, + heartbeat: Optional[float] = None, + compress: int = 0, + client_notakeover: bool = False, + ) -> None: + self._response = response + self._conn = response.connection + + self._writer = writer + self._reader = reader + self._protocol = protocol + self._closed = False + self._closing = False + self._close_code: Optional[int] = None + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._loop = loop + self._waiting: Optional[asyncio.Future[bool]] = None + self._exception: Optional[BaseException] = None + self._compress = compress + self._client_notakeover = client_notakeover + + self._reset_heartbeat() + + def _cancel_heartbeat(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + + def _reset_heartbeat(self) -> None: + self._cancel_heartbeat() + + if self._heartbeat is not None: + self._heartbeat_cb = call_later( + self._send_heartbeat, + self._heartbeat, + self._loop, + timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold + if self._conn is not None + else 5, + ) + + def _send_heartbeat(self) -> None: + if self._heartbeat is not None and not self._closed: + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + self._loop.create_task(self._writer.ping()) + + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = call_later( + self._pong_not_received, + self._pong_heartbeat, + self._loop, + timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold + if self._conn is not None + else 5, + ) + + def _pong_not_received(self) -> None: + if not self._closed: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + self._response.close() + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def protocol(self) -> Optional[str]: + return self._protocol + + @property + def compress(self) -> int: + return self._compress + + @property + def client_notakeover(self) -> bool: + return self._client_notakeover + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """extra info from connection transport""" + conn = self._response.connection + if conn is None: + return default + transport = conn.transport + if transport is None: + return default + return transport.get_extra_info(name, default) + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes = b"") -> None: + await self._writer.ping(message) + + async def pong(self, message: bytes = b"") -> None: + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[int] = None) -> None: + if not isinstance(data, str): + raise TypeError("data argument must be str (%r)" % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data argument must be byte-ish (%r)" % type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json( + self, + data: Any, + compress: Optional[int] = None, + *, + dumps: JSONEncoder = DEFAULT_JSON_ENCODER, + ) -> None: + await self.send_str(dumps(data), compress=compress) + + async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting is not None and not self._closing: + self._closing = True + self._reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._waiting + + if not self._closed: + self._cancel_heartbeat() + self._closed = True + try: + await self._writer.close(code, message) + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if self._close_code: + self._response.close() + return True + + while True: + try: + async with async_timeout.timeout(self._timeout): + msg = await self._reader.read() + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._response.close() + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + self._response.close() + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + self._response.close() + return True + else: + return False + + async def receive(self, timeout: Optional[float] = None) -> WSMessage: + while True: + if self._waiting is not None: + raise RuntimeError("Concurrent call to receive() is not allowed") + + if self._closed: + return WS_CLOSED_MESSAGE + elif self._closing: + await self.close() + return WS_CLOSED_MESSAGE + + try: + self._waiting = self._loop.create_future() + try: + async with async_timeout.timeout(timeout or self._receive_timeout): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + waiter = self._waiting + self._waiting = None + set_result(waiter, True) + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except EofStream: + self._close_code = WSCloseCode.OK + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except ClientError: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + return WS_CLOSED_MESSAGE + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type == WSMsgType.CLOSING: + self._closing = True + elif msg.type == WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type == WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float] = None) -> str: + msg = await self.receive(timeout) + if msg.type != WSMsgType.TEXT: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") + return cast(str, msg.data) + + async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + msg = await self.receive(timeout) + if msg.type != WSMsgType.BINARY: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + return cast(bytes, msg.data) + + async def receive_json( + self, + *, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + timeout: Optional[float] = None, + ) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + def __aiter__(self) -> "ClientWebSocketResponse": + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): + raise StopAsyncIteration + return msg diff --git a/venv/lib/python3.12/site-packages/aiohttp/compression_utils.py b/venv/lib/python3.12/site-packages/aiohttp/compression_utils.py new file mode 100644 index 0000000..9631d37 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/compression_utils.py @@ -0,0 +1,157 @@ +import asyncio +import zlib +from concurrent.futures import Executor +from typing import Optional, cast + +try: + try: + import brotlicffi as brotli + except ImportError: + import brotli + + HAS_BROTLI = True +except ImportError: # pragma: no cover + HAS_BROTLI = False + +MAX_SYNC_CHUNK_SIZE = 1024 + + +def encoding_to_mode( + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, +) -> int: + if encoding == "gzip": + return 16 + zlib.MAX_WBITS + + return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS + + +class ZlibBaseHandler: + def __init__( + self, + mode: int, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + self._mode = mode + self._executor = executor + self._max_sync_chunk_size = max_sync_chunk_size + + +class ZLibCompressor(ZlibBaseHandler): + def __init__( + self, + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, + level: Optional[int] = None, + wbits: Optional[int] = None, + strategy: int = zlib.Z_DEFAULT_STRATEGY, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + super().__init__( + mode=encoding_to_mode(encoding, suppress_deflate_header) + if wbits is None + else wbits, + executor=executor, + max_sync_chunk_size=max_sync_chunk_size, + ) + if level is None: + self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy) + else: + self._compressor = zlib.compressobj( + wbits=self._mode, strategy=strategy, level=level + ) + self._compress_lock = asyncio.Lock() + + def compress_sync(self, data: bytes) -> bytes: + return self._compressor.compress(data) + + async def compress(self, data: bytes) -> bytes: + async with self._compress_lock: + # To ensure the stream is consistent in the event + # there are multiple writers, we need to lock + # the compressor so that only one writer can + # compress at a time. + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_event_loop().run_in_executor( + self._executor, self.compress_sync, data + ) + return self.compress_sync(data) + + def flush(self, mode: int = zlib.Z_FINISH) -> bytes: + return self._compressor.flush(mode) + + +class ZLibDecompressor(ZlibBaseHandler): + def __init__( + self, + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + super().__init__( + mode=encoding_to_mode(encoding, suppress_deflate_header), + executor=executor, + max_sync_chunk_size=max_sync_chunk_size, + ) + self._decompressor = zlib.decompressobj(wbits=self._mode) + + def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: + return self._decompressor.decompress(data, max_length) + + async def decompress(self, data: bytes, max_length: int = 0) -> bytes: + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_event_loop().run_in_executor( + self._executor, self.decompress_sync, data, max_length + ) + return self.decompress_sync(data, max_length) + + def flush(self, length: int = 0) -> bytes: + return ( + self._decompressor.flush(length) + if length > 0 + else self._decompressor.flush() + ) + + @property + def eof(self) -> bool: + return self._decompressor.eof + + @property + def unconsumed_tail(self) -> bytes: + return self._decompressor.unconsumed_tail + + @property + def unused_data(self) -> bytes: + return self._decompressor.unused_data + + +class BrotliDecompressor: + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self) -> None: + if not HAS_BROTLI: + raise RuntimeError( + "The brotli decompression is not available. " + "Please install `Brotli` module" + ) + self._obj = brotli.Decompressor() + + def decompress_sync(self, data: bytes) -> bytes: + if hasattr(self._obj, "decompress"): + return cast(bytes, self._obj.decompress(data)) + return cast(bytes, self._obj.process(data)) + + def flush(self) -> bytes: + if hasattr(self._obj, "flush"): + return cast(bytes, self._obj.flush()) + return b"" diff --git a/venv/lib/python3.12/site-packages/aiohttp/connector.py b/venv/lib/python3.12/site-packages/aiohttp/connector.py new file mode 100644 index 0000000..61c2643 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/connector.py @@ -0,0 +1,1489 @@ +import asyncio +import functools +import random +import sys +import traceback +import warnings +from collections import defaultdict, deque +from contextlib import suppress +from http import HTTPStatus +from http.cookies import SimpleCookie +from itertools import cycle, islice +from time import monotonic +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + DefaultDict, + Dict, + Iterator, + List, + Literal, + Optional, + Set, + Tuple, + Type, + Union, + cast, +) + +import attr + +from . import hdrs, helpers +from .abc import AbstractResolver +from .client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientHttpProxyError, + ClientProxyConnectionError, + ServerFingerprintMismatch, + UnixClientConnectorError, + cert_errors, + ssl_errors, +) +from .client_proto import ResponseHandler +from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params +from .helpers import ceil_timeout, get_running_loop, is_ip_address, noop, sentinel +from .locks import EventResultOrError +from .resolver import DefaultResolver + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") + + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientTimeout + from .client_reqrep import ConnectionKey + from .tracing import Trace + + +class _DeprecationWaiter: + __slots__ = ("_awaitable", "_awaited") + + def __init__(self, awaitable: Awaitable[Any]) -> None: + self._awaitable = awaitable + self._awaited = False + + def __await__(self) -> Any: + self._awaited = True + return self._awaitable.__await__() + + def __del__(self) -> None: + if not self._awaited: + warnings.warn( + "Connector.close() is a coroutine, " + "please use await connector.close()", + DeprecationWarning, + ) + + +class Connection: + + _source_traceback = None + _transport = None + + def __init__( + self, + connector: "BaseConnector", + key: "ConnectionKey", + protocol: ResponseHandler, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._key = key + self._connector = connector + self._loop = loop + self._protocol: Optional[ResponseHandler] = protocol + self._callbacks: List[Callable[[], None]] = [] + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __repr__(self) -> str: + return f"Connection<{self._key}>" + + def __del__(self, _warnings: Any = warnings) -> None: + if self._protocol is not None: + kwargs = {"source": self} + _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs) + if self._loop.is_closed(): + return + + self._connector._release(self._key, self._protocol, should_close=True) + + context = {"client_connection": self, "message": "Unclosed connection"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __bool__(self) -> Literal[True]: + """Force subclasses to not be falsy, to make checks simpler.""" + return True + + @property + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "connector.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def protocol(self) -> Optional[ResponseHandler]: + return self._protocol + + def add_callback(self, callback: Callable[[], None]) -> None: + if callback is not None: + self._callbacks.append(callback) + + def _notify_release(self) -> None: + callbacks, self._callbacks = self._callbacks[:], [] + + for cb in callbacks: + with suppress(Exception): + cb() + + def close(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release(self._key, self._protocol, should_close=True) + self._protocol = None + + def release(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release( + self._key, self._protocol, should_close=self._protocol.should_close + ) + self._protocol = None + + @property + def closed(self) -> bool: + return self._protocol is None or not self._protocol.is_connected() + + +class _TransportPlaceholder: + """placeholder for BaseConnector.connect function""" + + def close(self) -> None: + pass + + +class BaseConnector: + """Base connector class. + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + timeout_ceil_threshold - Trigger ceiling of timeout values when + it's above timeout_ceil_threshold. + loop - Optional event loop. + """ + + _closed = True # prevent AttributeError in __del__ if ctor was failed + _source_traceback = None + + # abort transport after 2 seconds (cleanup broken connections) + _cleanup_closed_period = 2.0 + + def __init__( + self, + *, + keepalive_timeout: Union[object, None, float] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + timeout_ceil_threshold: float = 5, + ) -> None: + + if force_close: + if keepalive_timeout is not None and keepalive_timeout is not sentinel: + raise ValueError( + "keepalive_timeout cannot " "be set if force_close is True" + ) + else: + if keepalive_timeout is sentinel: + keepalive_timeout = 15.0 + + loop = get_running_loop(loop) + self._timeout_ceil_threshold = timeout_ceil_threshold + + self._closed = False + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {} + self._limit = limit + self._limit_per_host = limit_per_host + self._acquired: Set[ResponseHandler] = set() + self._acquired_per_host: DefaultDict[ + ConnectionKey, Set[ResponseHandler] + ] = defaultdict(set) + self._keepalive_timeout = cast(float, keepalive_timeout) + self._force_close = force_close + + # {host_key: FIFO list of waiters} + self._waiters = defaultdict(deque) # type: ignore[var-annotated] + + self._loop = loop + self._factory = functools.partial(ResponseHandler, loop=loop) + + self.cookies = SimpleCookie() + + # start keep-alive connection cleanup task + self._cleanup_handle: Optional[asyncio.TimerHandle] = None + + # start cleanup closed transports task + self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None + self._cleanup_closed_disabled = not enable_cleanup_closed + self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] + self._cleanup_closed() + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + if not self._conns: + return + + conns = [repr(c) for c in self._conns.values()] + + self._close() + + kwargs = {"source": self} + _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs) + context = { + "connector": self, + "connections": conns, + "message": "Unclosed connector", + } + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __enter__(self) -> "BaseConnector": + warnings.warn( + '"with Connector():" is deprecated, ' + 'use "async with Connector():" instead', + DeprecationWarning, + ) + return self + + def __exit__(self, *exc: Any) -> None: + self._close() + + async def __aenter__(self) -> "BaseConnector": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + exc_traceback: Optional[TracebackType] = None, + ) -> None: + await self.close() + + @property + def force_close(self) -> bool: + """Ultimately close connection on releasing if True.""" + return self._force_close + + @property + def limit(self) -> int: + """The total number for simultaneous connections. + + If limit is 0 the connector has no limit. + The default limit size is 100. + """ + return self._limit + + @property + def limit_per_host(self) -> int: + """The limit for simultaneous connections to the same endpoint. + + Endpoints are the same if they are have equal + (host, port, is_ssl) triple. + """ + return self._limit_per_host + + def _cleanup(self) -> None: + """Cleanup unused transports.""" + if self._cleanup_handle: + self._cleanup_handle.cancel() + # _cleanup_handle should be unset, otherwise _release() will not + # recreate it ever! + self._cleanup_handle = None + + now = self._loop.time() + timeout = self._keepalive_timeout + + if self._conns: + connections = {} + deadline = now - timeout + for key, conns in self._conns.items(): + alive = [] + for proto, use_time in conns: + if proto.is_connected(): + if use_time - deadline < 0: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + alive.append((proto, use_time)) + else: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + + if alive: + connections[key] = alive + + self._conns = connections + + if self._conns: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + def _drop_acquired_per_host( + self, key: "ConnectionKey", val: ResponseHandler + ) -> None: + acquired_per_host = self._acquired_per_host + if key not in acquired_per_host: + return + conns = acquired_per_host[key] + conns.remove(val) + if not conns: + del self._acquired_per_host[key] + + def _cleanup_closed(self) -> None: + """Double confirmation for transport close. + + Some broken ssl servers may leave socket open without proper close. + """ + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + self._cleanup_closed_transports = [] + + if not self._cleanup_closed_disabled: + self._cleanup_closed_handle = helpers.weakref_handle( + self, + "_cleanup_closed", + self._cleanup_closed_period, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + def close(self) -> Awaitable[None]: + """Close all opened transports.""" + self._close() + return _DeprecationWaiter(noop()) + + def _close(self) -> None: + if self._closed: + return + + self._closed = True + + try: + if self._loop.is_closed(): + return + + # cancel cleanup task + if self._cleanup_handle: + self._cleanup_handle.cancel() + + # cancel cleanup close task + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for data in self._conns.values(): + for proto, t0 in data: + proto.close() + + for proto in self._acquired: + proto.close() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + finally: + self._conns.clear() + self._acquired.clear() + self._waiters.clear() + self._cleanup_handle = None + self._cleanup_closed_transports.clear() + self._cleanup_closed_handle = None + + @property + def closed(self) -> bool: + """Is connector closed. + + A readonly property. + """ + return self._closed + + def _available_connections(self, key: "ConnectionKey") -> int: + """ + Return number of available connections. + + The limit, limit_per_host and the connection key are taken into account. + + If it returns less than 1 means that there are no connections + available. + """ + if self._limit: + # total calc available connections + available = self._limit - len(self._acquired) + + # check limit per host + if ( + self._limit_per_host + and available > 0 + and key in self._acquired_per_host + ): + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + + elif self._limit_per_host and key in self._acquired_per_host: + # check limit per host + acquired = self._acquired_per_host.get(key) + assert acquired is not None + available = self._limit_per_host - len(acquired) + else: + available = 1 + + return available + + async def connect( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> Connection: + """Get from pool or create new connection.""" + key = req.connection_key + available = self._available_connections(key) + + # Wait if there are no available connections or if there are/were + # waiters (i.e. don't steal connection from a waiter about to wake up) + if available <= 0 or key in self._waiters: + fut = self._loop.create_future() + + # This connection will now count towards the limit. + self._waiters[key].append(fut) + + if traces: + for trace in traces: + await trace.send_connection_queued_start() + + try: + await fut + except BaseException as e: + if key in self._waiters: + # remove a waiter even if it was cancelled, normally it's + # removed when it's notified + try: + self._waiters[key].remove(fut) + except ValueError: # fut may no longer be in list + pass + + raise e + finally: + if key in self._waiters and not self._waiters[key]: + del self._waiters[key] + + if traces: + for trace in traces: + await trace.send_connection_queued_end() + + proto = self._get(key) + if proto is None: + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_start() + + try: + proto = await self._create_connection(req, traces, timeout) + if self._closed: + proto.close() + raise ClientConnectionError("Connector is closed.") + except BaseException: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + self._release_waiter() + raise + else: + if not self._closed: + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + if traces: + for trace in traces: + await trace.send_connection_create_end() + else: + if traces: + # Acquire the connection to prevent race conditions with limits + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + self._acquired_per_host[key].add(placeholder) + for trace in traces: + await trace.send_connection_reuseconn() + self._acquired.remove(placeholder) + self._drop_acquired_per_host(key, placeholder) + + self._acquired.add(proto) + self._acquired_per_host[key].add(proto) + return Connection(self, key, proto, self._loop) + + def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: + try: + conns = self._conns[key] + except KeyError: + return None + + t1 = self._loop.time() + while conns: + proto, t0 = conns.pop() + if proto.is_connected(): + if t1 - t0 > self._keepalive_timeout: + transport = proto.transport + proto.close() + # only for SSL transports + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + return proto + else: + transport = proto.transport + proto.close() + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + + # No more connections: drop the key + del self._conns[key] + return None + + def _release_waiter(self) -> None: + """ + Iterates over all waiters until one to be released is found. + + The one to be released is not finished and + belongs to a host that has available connections. + """ + if not self._waiters: + return + + # Having the dict keys ordered this avoids to iterate + # at the same order at each call. + queues = list(self._waiters.keys()) + random.shuffle(queues) + + for key in queues: + if self._available_connections(key) < 1: + continue + + waiters = self._waiters[key] + while waiters: + waiter = waiters.popleft() + if not waiter.done(): + waiter.set_result(None) + return + + def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + try: + self._acquired.remove(proto) + self._drop_acquired_per_host(key, proto) + except KeyError: # pragma: no cover + # this may be result of undetermenistic order of objects + # finalization due garbage collection. + pass + else: + self._release_waiter() + + def _release( + self, + key: "ConnectionKey", + protocol: ResponseHandler, + *, + should_close: bool = False, + ) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + self._release_acquired(key, protocol) + + if self._force_close: + should_close = True + + if should_close or protocol.should_close: + transport = protocol.transport + protocol.close() + + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + else: + conns = self._conns.get(key) + if conns is None: + conns = self._conns[key] = [] + conns.append((protocol, self._loop.time())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + self._keepalive_timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + raise NotImplementedError() + + +class _DNSCacheTable: + def __init__(self, ttl: Optional[float] = None) -> None: + self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {} + self._timestamps: Dict[Tuple[str, int], float] = {} + self._ttl = ttl + + def __contains__(self, host: object) -> bool: + return host in self._addrs_rr + + def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: + self._addrs_rr[key] = (cycle(addrs), len(addrs)) + + if self._ttl is not None: + self._timestamps[key] = monotonic() + + def remove(self, key: Tuple[str, int]) -> None: + self._addrs_rr.pop(key, None) + + if self._ttl is not None: + self._timestamps.pop(key, None) + + def clear(self) -> None: + self._addrs_rr.clear() + self._timestamps.clear() + + def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: + loop, length = self._addrs_rr[key] + addrs = list(islice(loop, length)) + # Consume one more element to shift internal state of `cycle` + next(loop) + return addrs + + def expired(self, key: Tuple[str, int]) -> bool: + if self._ttl is None: + return False + + return self._timestamps[key] + self._ttl < monotonic() + + +class TCPConnector(BaseConnector): + """TCP connector. + + verify_ssl - Set to True to check ssl certifications. + fingerprint - Pass the binary sha256 + digest of the expected certificate in DER format to verify + that the certificate the server presents matches. See also + https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning + resolver - Enable DNS lookups and use this + resolver + use_dns_cache - Use memory cache for DNS lookups. + ttl_dns_cache - Max seconds having cached a DNS entry, None forever. + family - socket address family + local_addr - local tuple of (host, port) to bind socket to + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + loop - Optional event loop. + """ + + def __init__( + self, + *, + verify_ssl: bool = True, + fingerprint: Optional[bytes] = None, + use_dns_cache: bool = True, + ttl_dns_cache: Optional[int] = 10, + family: int = 0, + ssl_context: Optional[SSLContext] = None, + ssl: Union[None, Literal[False], Fingerprint, SSLContext] = None, + local_addr: Optional[Tuple[str, int]] = None, + resolver: Optional[AbstractResolver] = None, + keepalive_timeout: Union[None, float, object] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + timeout_ceil_threshold: float = 5, + ): + super().__init__( + keepalive_timeout=keepalive_timeout, + force_close=force_close, + limit=limit, + limit_per_host=limit_per_host, + enable_cleanup_closed=enable_cleanup_closed, + loop=loop, + timeout_ceil_threshold=timeout_ceil_threshold, + ) + + self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + if resolver is None: + resolver = DefaultResolver(loop=self._loop) + self._resolver = resolver + + self._use_dns_cache = use_dns_cache + self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) + self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} + self._family = family + self._local_addr = local_addr + + def close(self) -> Awaitable[None]: + """Close all ongoing DNS calls.""" + for ev in self._throttle_dns_events.values(): + ev.cancel() + + return super().close() + + @property + def family(self) -> int: + """Socket family like AF_INET.""" + return self._family + + @property + def use_dns_cache(self) -> bool: + """True if local DNS caching is enabled.""" + return self._use_dns_cache + + def clear_dns_cache( + self, host: Optional[str] = None, port: Optional[int] = None + ) -> None: + """Remove specified host/port or clear all dns local cache.""" + if host is not None and port is not None: + self._cached_hosts.remove((host, port)) + elif host is not None or port is not None: + raise ValueError("either both host and port " "or none of them are allowed") + else: + self._cached_hosts.clear() + + async def _resolve_host( + self, host: str, port: int, traces: Optional[List["Trace"]] = None + ) -> List[Dict[str, Any]]: + if is_ip_address(host): + return [ + { + "hostname": host, + "host": host, + "port": port, + "family": self._family, + "proto": 0, + "flags": 0, + } + ] + + if not self._use_dns_cache: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + res = await self._resolver.resolve(host, port, family=self._family) + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + return res + + key = (host, port) + + if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): + # get result early, before any await (#4014) + result = self._cached_hosts.next_addrs(key) + + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + return result + + if key in self._throttle_dns_events: + # get event early, before any await (#4014) + event = self._throttle_dns_events[key] + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + await event.wait() + else: + # update dict early, before any await (#4014) + self._throttle_dns_events[key] = EventResultOrError(self._loop) + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + try: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + addrs = await self._resolver.resolve(host, port, family=self._family) + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + self._cached_hosts.add(key, addrs) + self._throttle_dns_events[key].set() + except BaseException as e: + # any DNS exception, independently of the implementation + # is set for the waiters to raise the same exception. + self._throttle_dns_events[key].set(exc=e) + raise + finally: + self._throttle_dns_events.pop(key) + + return self._cached_hosts.next_addrs(key) + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + """Create connection. + + Has same keyword arguments as BaseEventLoop.create_connection. + """ + if req.proxy: + _, proto = await self._create_proxy_connection(req, traces, timeout) + else: + _, proto = await self._create_direct_connection(req, traces, timeout) + + return proto + + @staticmethod + @functools.lru_cache(None) + def _make_ssl_context(verified: bool) -> SSLContext: + if verified: + return ssl.create_default_context() + else: + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + try: + sslcontext.options |= ssl.OP_NO_COMPRESSION + except AttributeError as attr_err: + warnings.warn( + "{!s}: The Python interpreter is compiled " + "against OpenSSL < 1.0.0. Ref: " + "https://docs.python.org/3/library/ssl.html" + "#ssl.OP_NO_COMPRESSION".format(attr_err), + ) + sslcontext.set_default_verify_paths() + return sslcontext + + def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: + """Logic to get the correct SSL context + + 0. if req.ssl is false, return None + + 1. if ssl_context is specified in req, use it + 2. if _ssl_context is specified in self, use it + 3. otherwise: + 1. if verify_ssl is not specified in req, use self.ssl_context + (will generate a default context according to self.verify_ssl) + 2. if verify_ssl is True in req, generate a default SSL context + 3. if verify_ssl is False in req, generate a SSL context that + won't verify + """ + if req.is_ssl(): + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not None: + # not verified or fingerprinted + return self._make_ssl_context(False) + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not None: + # not verified or fingerprinted + return self._make_ssl_context(False) + return self._make_ssl_context(True) + else: + return None + + def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: + ret = req.ssl + if isinstance(ret, Fingerprint): + return ret + ret = self._ssl + if isinstance(ret, Fingerprint): + return ret + return None + + async def _wrap_create_connection( + self, + *args: Any, + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + return await self._loop.create_connection(*args, **kwargs) + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + def _fail_on_no_start_tls(self, req: "ClientRequest") -> None: + """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``. + + It is necessary for TLS-in-TLS so that it is possible to + send HTTPS queries through HTTPS proxies. + + This doesn't affect regular HTTP requests, though. + """ + if not req.is_ssl(): + return + + proxy_url = req.proxy + assert proxy_url is not None + if proxy_url.scheme != "https": + return + + self._check_loop_for_start_tls() + + def _check_loop_for_start_tls(self) -> None: + try: + self._loop.start_tls + except AttributeError as attr_exc: + raise RuntimeError( + "An HTTPS request is being sent through an HTTPS proxy. " + "This needs support for TLS in TLS but it is not implemented " + "in your runtime for the stdlib asyncio.\n\n" + "Please upgrade to Python 3.11 or higher. For more details, " + "please see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n" + "* https://docs.aiohttp.org/en/stable/" + "client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + ) from attr_exc + + def _loop_supports_start_tls(self) -> bool: + try: + self._check_loop_for_start_tls() + except RuntimeError: + return False + else: + return True + + def _warn_about_tls_in_tls( + self, + underlying_transport: asyncio.Transport, + req: ClientRequest, + ) -> None: + """Issue a warning if the requested URL has HTTPS scheme.""" + if req.request_info.url.scheme != "https": + return + + asyncio_supports_tls_in_tls = getattr( + underlying_transport, + "_start_tls_compatible", + False, + ) + + if asyncio_supports_tls_in_tls: + return + + warnings.warn( + "An HTTPS request is being sent through an HTTPS proxy. " + "This support for TLS in TLS is known to be disabled " + "in the stdlib asyncio (Python <3.11). This is why you'll probably see " + "an error in the log below.\n\n" + "It is possible to enable it via monkeypatching. " + "For more details, see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n\n" + "You can temporarily patch this as follows:\n" + "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + RuntimeWarning, + source=self, + # Why `4`? At least 3 of the calls in the stack originate + # from the methods in this class. + stacklevel=3, + ) + + async def _start_tls_connection( + self, + underlying_transport: asyncio.Transport, + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + """Wrap the raw TCP transport with TLS.""" + tls_proto = self._factory() # Create a brand new proto for TLS + + # Safety of the `cast()` call here is based on the fact that + # internally `_get_ssl_context()` only returns `None` when + # `req.is_ssl()` evaluates to `False` which is never gonna happen + # in this code path. Of course, it's rather fragile + # maintainability-wise but this is to be solved separately. + sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req)) + + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + try: + tls_transport = await self._loop.start_tls( + underlying_transport, + tls_proto, + sslcontext, + server_hostname=req.server_hostname or req.host, + ssl_handshake_timeout=timeout.total, + ) + except BaseException: + # We need to close the underlying transport since + # `start_tls()` probably failed before it had a + # chance to do this: + underlying_transport.close() + raise + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + except TypeError as type_err: + # Example cause looks like this: + # TypeError: transport is not supported by start_tls() + + raise ClientConnectionError( + "Cannot initialize a TLS-in-TLS connection to host " + f"{req.host!s}:{req.port:d} through an underlying connection " + f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} " + f"[{type_err!s}]" + ) from type_err + else: + if tls_transport is None: + msg = "Failed to start TLS (possibly caused by closing transport)" + raise client_error(req.connection_key, OSError(msg)) + tls_proto.connection_made( + tls_transport + ) # Kick the state machine of the new TLS protocol + + return tls_transport, tls_proto + + async def _create_direct_connection( + self, + req: ClientRequest, + traces: List["Trace"], + timeout: "ClientTimeout", + *, + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + sslcontext = self._get_ssl_context(req) + fingerprint = self._get_fingerprint(req) + + host = req.url.raw_host + assert host is not None + # Replace multiple trailing dots with a single one. + # A trailing dot is only present for fully-qualified domain names. + # See https://github.com/aio-libs/aiohttp/pull/7364. + if host.endswith(".."): + host = host.rstrip(".") + "." + port = req.port + assert port is not None + host_resolved = asyncio.ensure_future( + self._resolve_host(host, port, traces=traces), loop=self._loop + ) + try: + # Cancelling this lookup should not cancel the underlying lookup + # or else the cancel event will get broadcast to all the waiters + # across all connections. + hosts = await asyncio.shield(host_resolved) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + host_resolved.add_done_callback(drop_exception) + raise + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + # in case of proxy it is not ClientProxyConnectionError + # it is problem of resolving proxy ip itself + raise ClientConnectorError(req.connection_key, exc) from exc + + last_exc: Optional[Exception] = None + + for hinfo in hosts: + host = hinfo["host"] + port = hinfo["port"] + + # Strip trailing dots, certificates contain FQDN without dots. + # See https://github.com/aio-libs/aiohttp/issues/3636 + server_hostname = ( + (req.server_hostname or hinfo["hostname"]).rstrip(".") + if sslcontext + else None + ) + + try: + transp, proto = await self._wrap_create_connection( + self._factory, + host, + port, + timeout=timeout, + ssl=sslcontext, + family=hinfo["family"], + proto=hinfo["proto"], + flags=hinfo["flags"], + server_hostname=server_hostname, + local_addr=self._local_addr, + req=req, + client_error=client_error, + ) + except ClientConnectorError as exc: + last_exc = exc + continue + + if req.is_ssl() and fingerprint: + try: + fingerprint.check(transp) + except ServerFingerprintMismatch as exc: + transp.close() + if not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transp) + last_exc = exc + continue + + return transp, proto + else: + assert last_exc is not None + raise last_exc + + async def _create_proxy_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + self._fail_on_no_start_tls(req) + runtime_has_start_tls = self._loop_supports_start_tls() + + headers: Dict[str, str] = {} + if req.proxy_headers is not None: + headers = req.proxy_headers # type: ignore[assignment] + headers[hdrs.HOST] = req.headers[hdrs.HOST] + + url = req.proxy + assert url is not None + proxy_req = ClientRequest( + hdrs.METH_GET, + url, + headers=headers, + auth=req.proxy_auth, + loop=self._loop, + ssl=req.ssl, + ) + + # create connection to proxy server + transport, proto = await self._create_direct_connection( + proxy_req, [], timeout, client_error=ClientProxyConnectionError + ) + + # Many HTTP proxies has buggy keepalive support. Let's not + # reuse connection but close it after processing every + # response. + proto.force_close() + + auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) + if auth is not None: + if not req.is_ssl(): + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + + if req.is_ssl(): + if runtime_has_start_tls: + self._warn_about_tls_in_tls(transport, req) + + # For HTTPS requests over HTTP proxy + # we must notify proxy to tunnel connection + # so we send CONNECT command: + # CONNECT www.python.org:443 HTTP/1.1 + # Host: www.python.org + # + # next we must do TLS handshake and so on + # to do this we must wrap raw socket into secure one + # asyncio handles this perfectly + proxy_req.method = hdrs.METH_CONNECT + proxy_req.url = req.url + key = attr.evolve( + req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None + ) + conn = Connection(self, key, proto, self._loop) + proxy_resp = await proxy_req.send(conn) + try: + protocol = conn._protocol + assert protocol is not None + + # read_until_eof=True will ensure the connection isn't closed + # once the response is received and processed allowing + # START_TLS to work on the connection below. + protocol.set_response_params( + read_until_eof=runtime_has_start_tls, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + resp = await proxy_resp.start(conn) + except BaseException: + proxy_resp.close() + conn.close() + raise + else: + conn._protocol = None + conn._transport = None + try: + if resp.status != 200: + message = resp.reason + if message is None: + message = HTTPStatus(resp.status).phrase + raise ClientHttpProxyError( + proxy_resp.request_info, + resp.history, + status=resp.status, + message=message, + headers=resp.headers, + ) + if not runtime_has_start_tls: + rawsock = transport.get_extra_info("socket", default=None) + if rawsock is None: + raise RuntimeError( + "Transport does not expose socket instance" + ) + # Duplicate the socket, so now we can close proxy transport + rawsock = rawsock.dup() + except BaseException: + # It shouldn't be closed in `finally` because it's fed to + # `loop.start_tls()` and the docs say not to touch it after + # passing there. + transport.close() + raise + finally: + if not runtime_has_start_tls: + transport.close() + + if not runtime_has_start_tls: + # HTTP proxy with support for upgrade to HTTPS + sslcontext = self._get_ssl_context(req) + return await self._wrap_create_connection( + self._factory, + timeout=timeout, + ssl=sslcontext, + sock=rawsock, + server_hostname=req.host, + req=req, + ) + + return await self._start_tls_connection( + # Access the old transport for the last time before it's + # closed and forgotten forever: + transport, + req=req, + timeout=timeout, + ) + finally: + proxy_resp.close() + + return transport, proto + + +class UnixConnector(BaseConnector): + """Unix socket connector. + + path - Unix socket path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + self._path = path + + @property + def path(self) -> str: + """Path to unix socket.""" + return self._path + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + _, proto = await self._loop.create_unix_connection( + self._factory, self._path + ) + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc + + return proto + + +class NamedPipeConnector(BaseConnector): + """Named pipe connector. + + Only supported by the proactor event loop. + See also: https://docs.python.org/3/library/asyncio-eventloop.html + + path - Windows named pipe path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + if not isinstance( + self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor " "loop under windows" + ) + self._path = path + + @property + def path(self) -> str: + """Path to the named pipe.""" + return self._path + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] + self._factory, self._path + ) + # the drain is required so that the connection_made is called + # and transport is set otherwise it is not set before the + # `assert conn.transport is not None` + # in client.py's _request method + await asyncio.sleep(0) + # other option is to manually set transport like + # `proto.transport = trans` + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientConnectorError(req.connection_key, exc) from exc + + return cast(ResponseHandler, proto) diff --git a/venv/lib/python3.12/site-packages/aiohttp/cookiejar.py b/venv/lib/python3.12/site-packages/aiohttp/cookiejar.py new file mode 100644 index 0000000..a348f11 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/cookiejar.py @@ -0,0 +1,419 @@ +import asyncio +import calendar +import contextlib +import datetime +import os # noqa +import pathlib +import pickle +import re +import time +from collections import defaultdict +from http.cookies import BaseCookie, Morsel, SimpleCookie +from math import ceil +from typing import ( # noqa + DefaultDict, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Set, + Tuple, + Union, + cast, +) + +from yarl import URL + +from .abc import AbstractCookieJar, ClearCookiePredicate +from .helpers import is_ip_address +from .typedefs import LooseCookies, PathLike, StrOrURL + +__all__ = ("CookieJar", "DummyCookieJar") + + +CookieItem = Union[str, "Morsel[str]"] + + +class CookieJar(AbstractCookieJar): + """Implements cookie storage adhering to RFC 6265.""" + + DATE_TOKENS_RE = re.compile( + r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" + r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)" + ) + + DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") + + DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") + + DATE_MONTH_RE = re.compile( + "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", + re.I, + ) + + DATE_YEAR_RE = re.compile(r"(\d{2,4})") + + # calendar.timegm() fails for timestamps after datetime.datetime.max + # Minus one as a loss of precision occurs when timestamp() is called. + MAX_TIME = ( + int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1 + ) + try: + calendar.timegm(time.gmtime(MAX_TIME)) + except (OSError, ValueError): + # Hit the maximum representable time on Windows + # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 + # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere + MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) + except OverflowError: + # #4515: datetime.max may not be representable on 32-bit platforms + MAX_TIME = 2**31 - 1 + # Avoid minuses in the future, 3x faster + SUB_MAX_TIME = MAX_TIME - 1 + + def __init__( + self, + *, + unsafe: bool = False, + quote_cookie: bool = True, + treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__(loop=loop) + self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( + SimpleCookie + ) + self._host_only_cookies: Set[Tuple[str, str]] = set() + self._unsafe = unsafe + self._quote_cookie = quote_cookie + if treat_as_secure_origin is None: + treat_as_secure_origin = [] + elif isinstance(treat_as_secure_origin, URL): + treat_as_secure_origin = [treat_as_secure_origin.origin()] + elif isinstance(treat_as_secure_origin, str): + treat_as_secure_origin = [URL(treat_as_secure_origin).origin()] + else: + treat_as_secure_origin = [ + URL(url).origin() if isinstance(url, str) else url.origin() + for url in treat_as_secure_origin + ] + self._treat_as_secure_origin = treat_as_secure_origin + self._next_expiration: float = ceil(time.time()) + self._expirations: Dict[Tuple[str, str, str], float] = {} + + def save(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="wb") as f: + pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) + + def load(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="rb") as f: + self._cookies = pickle.load(f) + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + if predicate is None: + self._next_expiration = ceil(time.time()) + self._cookies.clear() + self._host_only_cookies.clear() + self._expirations.clear() + return + + to_del = [] + now = time.time() + for (domain, path), cookie in self._cookies.items(): + for name, morsel in cookie.items(): + key = (domain, path, name) + if ( + key in self._expirations and self._expirations[key] <= now + ) or predicate(morsel): + to_del.append(key) + + for domain, path, name in to_del: + self._host_only_cookies.discard((domain, name)) + key = (domain, path, name) + if key in self._expirations: + del self._expirations[(domain, path, name)] + self._cookies[(domain, path)].pop(name, None) + + self._next_expiration = ( + min(*self._expirations.values(), self.SUB_MAX_TIME) + 1 + if self._expirations + else self.MAX_TIME + ) + + def clear_domain(self, domain: str) -> None: + self.clear(lambda x: self._is_domain_match(domain, x["domain"])) + + def __iter__(self) -> "Iterator[Morsel[str]]": + self._do_expiration() + for val in self._cookies.values(): + yield from val.values() + + def __len__(self) -> int: + return sum(1 for i in self) + + def _do_expiration(self) -> None: + self.clear(lambda x: False) + + def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: + self._next_expiration = min(self._next_expiration, when) + self._expirations[(domain, path, name)] = when + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + hostname = response_url.raw_host + + if not self._unsafe and is_ip_address(hostname): + # Don't accept cookies from IPs + return + + if isinstance(cookies, Mapping): + cookies = cookies.items() + + for name, cookie in cookies: + if not isinstance(cookie, Morsel): + tmp = SimpleCookie() + tmp[name] = cookie # type: ignore[assignment] + cookie = tmp[name] + + domain = cookie["domain"] + + # ignore domains with trailing dots + if domain.endswith("."): + domain = "" + del cookie["domain"] + + if not domain and hostname is not None: + # Set the cookie's domain to the response hostname + # and set its host-only-flag + self._host_only_cookies.add((hostname, name)) + domain = cookie["domain"] = hostname + + if domain.startswith("."): + # Remove leading dot + domain = domain[1:] + cookie["domain"] = domain + + if hostname and not self._is_domain_match(domain, hostname): + # Setting cookies for different domains is not allowed + continue + + path = cookie["path"] + if not path or not path.startswith("/"): + # Set the cookie's path to the response path + path = response_url.path + if not path.startswith("/"): + path = "/" + else: + # Cut everything from the last slash to the end + path = "/" + path[1 : path.rfind("/")] + cookie["path"] = path + + max_age = cookie["max-age"] + if max_age: + try: + delta_seconds = int(max_age) + max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME) + self._expire_cookie(max_age_expiration, domain, path, name) + except ValueError: + cookie["max-age"] = "" + + else: + expires = cookie["expires"] + if expires: + expire_time = self._parse_date(expires) + if expire_time: + self._expire_cookie(expire_time, domain, path, name) + else: + cookie["expires"] = "" + + self._cookies[(domain, path)][name] = cookie + + self._do_expiration() + + def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": + """Returns this jar's cookies filtered by their attributes.""" + filtered: Union[SimpleCookie, "BaseCookie[str]"] = ( + SimpleCookie() if self._quote_cookie else BaseCookie() + ) + if not self._cookies: + # Skip do_expiration() if there are no cookies. + return filtered + self._do_expiration() + if not self._cookies: + # Skip rest of function if no non-expired cookies. + return filtered + request_url = URL(request_url) + hostname = request_url.raw_host or "" + + is_not_secure = request_url.scheme not in ("https", "wss") + if is_not_secure and self._treat_as_secure_origin: + request_origin = URL() + with contextlib.suppress(ValueError): + request_origin = request_url.origin() + is_not_secure = request_origin not in self._treat_as_secure_origin + + # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 + for cookie in sorted(self, key=lambda c: len(c["path"])): + name = cookie.key + domain = cookie["domain"] + + # Send shared cookies + if not domain: + filtered[name] = cookie.value + continue + + if not self._unsafe and is_ip_address(hostname): + continue + + if (domain, name) in self._host_only_cookies: + if domain != hostname: + continue + elif not self._is_domain_match(domain, hostname): + continue + + if not self._is_path_match(request_url.path, cookie["path"]): + continue + + if is_not_secure and cookie["secure"]: + continue + + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + filtered[name] = mrsl_val + + return filtered + + @staticmethod + def _is_domain_match(domain: str, hostname: str) -> bool: + """Implements domain matching adhering to RFC 6265.""" + if hostname == domain: + return True + + if not hostname.endswith(domain): + return False + + non_matching = hostname[: -len(domain)] + + if not non_matching.endswith("."): + return False + + return not is_ip_address(hostname) + + @staticmethod + def _is_path_match(req_path: str, cookie_path: str) -> bool: + """Implements path matching adhering to RFC 6265.""" + if not req_path.startswith("/"): + req_path = "/" + + if req_path == cookie_path: + return True + + if not req_path.startswith(cookie_path): + return False + + if cookie_path.endswith("/"): + return True + + non_matching = req_path[len(cookie_path) :] + + return non_matching.startswith("/") + + @classmethod + def _parse_date(cls, date_str: str) -> Optional[int]: + """Implements date string parsing adhering to RFC 6265.""" + if not date_str: + return None + + found_time = False + found_day = False + found_month = False + found_year = False + + hour = minute = second = 0 + day = 0 + month = 0 + year = 0 + + for token_match in cls.DATE_TOKENS_RE.finditer(date_str): + + token = token_match.group("token") + + if not found_time: + time_match = cls.DATE_HMS_TIME_RE.match(token) + if time_match: + found_time = True + hour, minute, second = (int(s) for s in time_match.groups()) + continue + + if not found_day: + day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) + if day_match: + found_day = True + day = int(day_match.group()) + continue + + if not found_month: + month_match = cls.DATE_MONTH_RE.match(token) + if month_match: + found_month = True + assert month_match.lastindex is not None + month = month_match.lastindex + continue + + if not found_year: + year_match = cls.DATE_YEAR_RE.match(token) + if year_match: + found_year = True + year = int(year_match.group()) + + if 70 <= year <= 99: + year += 1900 + elif 0 <= year <= 69: + year += 2000 + + if False in (found_day, found_month, found_year, found_time): + return None + + if not 1 <= day <= 31: + return None + + if year < 1601 or hour > 23 or minute > 59 or second > 59: + return None + + return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1)) + + +class DummyCookieJar(AbstractCookieJar): + """Implements a dummy cookie storage. + + It can be used with the ClientSession when no cookie processing is needed. + + """ + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + super().__init__(loop=loop) + + def __iter__(self) -> "Iterator[Morsel[str]]": + while False: + yield None + + def __len__(self) -> int: + return 0 + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + pass + + def clear_domain(self, domain: str) -> None: + pass + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + pass + + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + return SimpleCookie() diff --git a/venv/lib/python3.12/site-packages/aiohttp/formdata.py b/venv/lib/python3.12/site-packages/aiohttp/formdata.py new file mode 100644 index 0000000..e7cd24c --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/formdata.py @@ -0,0 +1,172 @@ +import io +from typing import Any, Iterable, List, Optional +from urllib.parse import urlencode + +from multidict import MultiDict, MultiDictProxy + +from . import hdrs, multipart, payload +from .helpers import guess_filename +from .payload import Payload + +__all__ = ("FormData",) + + +class FormData: + """Helper class for form body generation. + + Supports multipart/form-data and application/x-www-form-urlencoded. + """ + + def __init__( + self, + fields: Iterable[Any] = (), + quote_fields: bool = True, + charset: Optional[str] = None, + ) -> None: + self._writer = multipart.MultipartWriter("form-data") + self._fields: List[Any] = [] + self._is_multipart = False + self._is_processed = False + self._quote_fields = quote_fields + self._charset = charset + + if isinstance(fields, dict): + fields = list(fields.items()) + elif not isinstance(fields, (list, tuple)): + fields = (fields,) + self.add_fields(*fields) + + @property + def is_multipart(self) -> bool: + return self._is_multipart + + def add_field( + self, + name: str, + value: Any, + *, + content_type: Optional[str] = None, + filename: Optional[str] = None, + content_transfer_encoding: Optional[str] = None, + ) -> None: + + if isinstance(value, io.IOBase): + self._is_multipart = True + elif isinstance(value, (bytes, bytearray, memoryview)): + if filename is None and content_transfer_encoding is None: + filename = name + + type_options: MultiDict[str] = MultiDict({"name": name}) + if filename is not None and not isinstance(filename, str): + raise TypeError( + "filename must be an instance of str. " "Got: %s" % filename + ) + if filename is None and isinstance(value, io.IOBase): + filename = guess_filename(value, name) + if filename is not None: + type_options["filename"] = filename + self._is_multipart = True + + headers = {} + if content_type is not None: + if not isinstance(content_type, str): + raise TypeError( + "content_type must be an instance of str. " "Got: %s" % content_type + ) + headers[hdrs.CONTENT_TYPE] = content_type + self._is_multipart = True + if content_transfer_encoding is not None: + if not isinstance(content_transfer_encoding, str): + raise TypeError( + "content_transfer_encoding must be an instance" + " of str. Got: %s" % content_transfer_encoding + ) + headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + self._is_multipart = True + + self._fields.append((type_options, headers, value)) + + def add_fields(self, *fields: Any) -> None: + to_add = list(fields) + + while to_add: + rec = to_add.pop(0) + + if isinstance(rec, io.IOBase): + k = guess_filename(rec, "unknown") + self.add_field(k, rec) # type: ignore[arg-type] + + elif isinstance(rec, (MultiDictProxy, MultiDict)): + to_add.extend(rec.items()) + + elif isinstance(rec, (list, tuple)) and len(rec) == 2: + k, fp = rec + self.add_field(k, fp) # type: ignore[arg-type] + + else: + raise TypeError( + "Only io.IOBase, multidict and (name, file) " + "pairs allowed, use .add_field() for passing " + "more complex parameters, got {!r}".format(rec) + ) + + def _gen_form_urlencoded(self) -> payload.BytesPayload: + # form data (x-www-form-urlencoded) + data = [] + for type_options, _, value in self._fields: + data.append((type_options["name"], value)) + + charset = self._charset if self._charset is not None else "utf-8" + + if charset == "utf-8": + content_type = "application/x-www-form-urlencoded" + else: + content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset + + return payload.BytesPayload( + urlencode(data, doseq=True, encoding=charset).encode(), + content_type=content_type, + ) + + def _gen_form_data(self) -> multipart.MultipartWriter: + """Encode a list of fields using the multipart/form-data MIME format""" + if self._is_processed: + raise RuntimeError("Form data has been processed already") + for dispparams, headers, value in self._fields: + try: + if hdrs.CONTENT_TYPE in headers: + part = payload.get_payload( + value, + content_type=headers[hdrs.CONTENT_TYPE], + headers=headers, + encoding=self._charset, + ) + else: + part = payload.get_payload( + value, headers=headers, encoding=self._charset + ) + except Exception as exc: + raise TypeError( + "Can not serialize value type: %r\n " + "headers: %r\n value: %r" % (type(value), headers, value) + ) from exc + + if dispparams: + part.set_content_disposition( + "form-data", quote_fields=self._quote_fields, **dispparams + ) + # FIXME cgi.FieldStorage doesn't likes body parts with + # Content-Length which were sent via chunked transfer encoding + assert part.headers is not None + part.headers.popall(hdrs.CONTENT_LENGTH, None) + + self._writer.append_payload(part) + + self._is_processed = True + return self._writer + + def __call__(self) -> Payload: + if self._is_multipart: + return self._gen_form_data() + else: + return self._gen_form_urlencoded() diff --git a/venv/lib/python3.12/site-packages/aiohttp/hdrs.py b/venv/lib/python3.12/site-packages/aiohttp/hdrs.py new file mode 100644 index 0000000..2f1f5e0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/hdrs.py @@ -0,0 +1,108 @@ +"""HTTP Headers constants.""" + +# After changing the file content call ./tools/gen.py +# to regenerate the headers parser +from typing import Final, Set + +from multidict import istr + +METH_ANY: Final[str] = "*" +METH_CONNECT: Final[str] = "CONNECT" +METH_HEAD: Final[str] = "HEAD" +METH_GET: Final[str] = "GET" +METH_DELETE: Final[str] = "DELETE" +METH_OPTIONS: Final[str] = "OPTIONS" +METH_PATCH: Final[str] = "PATCH" +METH_POST: Final[str] = "POST" +METH_PUT: Final[str] = "PUT" +METH_TRACE: Final[str] = "TRACE" + +METH_ALL: Final[Set[str]] = { + METH_CONNECT, + METH_HEAD, + METH_GET, + METH_DELETE, + METH_OPTIONS, + METH_PATCH, + METH_POST, + METH_PUT, + METH_TRACE, +} + +ACCEPT: Final[istr] = istr("Accept") +ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset") +ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding") +ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language") +ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges") +ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age") +ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials") +ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers") +ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods") +ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin") +ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers") +ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers") +ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method") +AGE: Final[istr] = istr("Age") +ALLOW: Final[istr] = istr("Allow") +AUTHORIZATION: Final[istr] = istr("Authorization") +CACHE_CONTROL: Final[istr] = istr("Cache-Control") +CONNECTION: Final[istr] = istr("Connection") +CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition") +CONTENT_ENCODING: Final[istr] = istr("Content-Encoding") +CONTENT_LANGUAGE: Final[istr] = istr("Content-Language") +CONTENT_LENGTH: Final[istr] = istr("Content-Length") +CONTENT_LOCATION: Final[istr] = istr("Content-Location") +CONTENT_MD5: Final[istr] = istr("Content-MD5") +CONTENT_RANGE: Final[istr] = istr("Content-Range") +CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding") +CONTENT_TYPE: Final[istr] = istr("Content-Type") +COOKIE: Final[istr] = istr("Cookie") +DATE: Final[istr] = istr("Date") +DESTINATION: Final[istr] = istr("Destination") +DIGEST: Final[istr] = istr("Digest") +ETAG: Final[istr] = istr("Etag") +EXPECT: Final[istr] = istr("Expect") +EXPIRES: Final[istr] = istr("Expires") +FORWARDED: Final[istr] = istr("Forwarded") +FROM: Final[istr] = istr("From") +HOST: Final[istr] = istr("Host") +IF_MATCH: Final[istr] = istr("If-Match") +IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since") +IF_NONE_MATCH: Final[istr] = istr("If-None-Match") +IF_RANGE: Final[istr] = istr("If-Range") +IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since") +KEEP_ALIVE: Final[istr] = istr("Keep-Alive") +LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID") +LAST_MODIFIED: Final[istr] = istr("Last-Modified") +LINK: Final[istr] = istr("Link") +LOCATION: Final[istr] = istr("Location") +MAX_FORWARDS: Final[istr] = istr("Max-Forwards") +ORIGIN: Final[istr] = istr("Origin") +PRAGMA: Final[istr] = istr("Pragma") +PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate") +PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization") +RANGE: Final[istr] = istr("Range") +REFERER: Final[istr] = istr("Referer") +RETRY_AFTER: Final[istr] = istr("Retry-After") +SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept") +SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version") +SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol") +SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions") +SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key") +SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1") +SERVER: Final[istr] = istr("Server") +SET_COOKIE: Final[istr] = istr("Set-Cookie") +TE: Final[istr] = istr("TE") +TRAILER: Final[istr] = istr("Trailer") +TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding") +UPGRADE: Final[istr] = istr("Upgrade") +URI: Final[istr] = istr("URI") +USER_AGENT: Final[istr] = istr("User-Agent") +VARY: Final[istr] = istr("Vary") +VIA: Final[istr] = istr("Via") +WANT_DIGEST: Final[istr] = istr("Want-Digest") +WARNING: Final[istr] = istr("Warning") +WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate") +X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For") +X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host") +X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto") diff --git a/venv/lib/python3.12/site-packages/aiohttp/helpers.py b/venv/lib/python3.12/site-packages/aiohttp/helpers.py new file mode 100644 index 0000000..a5c762e --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/helpers.py @@ -0,0 +1,999 @@ +"""Various helper functions""" + +import asyncio +import base64 +import binascii +import contextlib +import datetime +import enum +import functools +import inspect +import netrc +import os +import platform +import re +import sys +import time +import warnings +import weakref +from collections import namedtuple +from contextlib import suppress +from email.parser import HeaderParser +from email.utils import parsedate +from math import ceil +from pathlib import Path +from types import TracebackType +from typing import ( + Any, + Callable, + ContextManager, + Dict, + Generator, + Generic, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Protocol, + Tuple, + Type, + TypeVar, + Union, + get_args, + overload, +) +from urllib.parse import quote +from urllib.request import getproxies, proxy_bypass + +import attr +from multidict import MultiDict, MultiDictProxy, MultiMapping +from yarl import URL + +from . import hdrs +from .log import client_logger, internal_logger + +if sys.version_info >= (3, 11): + import asyncio as async_timeout +else: + import async_timeout + +__all__ = ("BasicAuth", "ChainMapProxy", "ETag") + +IS_MACOS = platform.system() == "Darwin" +IS_WINDOWS = platform.system() == "Windows" + +PY_310 = sys.version_info >= (3, 10) +PY_311 = sys.version_info >= (3, 11) + + +_T = TypeVar("_T") +_S = TypeVar("_S") + +_SENTINEL = enum.Enum("_SENTINEL", "sentinel") +sentinel = _SENTINEL.sentinel + +NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) + +DEBUG = sys.flags.dev_mode or ( + not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) +) + + +CHAR = {chr(i) for i in range(0, 128)} +CTL = {chr(i) for i in range(0, 32)} | { + chr(127), +} +SEPARATORS = { + "(", + ")", + "<", + ">", + "@", + ",", + ";", + ":", + "\\", + '"', + "/", + "[", + "]", + "?", + "=", + "{", + "}", + " ", + chr(9), +} +TOKEN = CHAR ^ CTL ^ SEPARATORS + + +class noop: + def __await__(self) -> Generator[None, None, None]: + yield + + +class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])): + """Http basic authentication helper.""" + + def __new__( + cls, login: str, password: str = "", encoding: str = "latin1" + ) -> "BasicAuth": + if login is None: + raise ValueError("None is not allowed as login value") + + if password is None: + raise ValueError("None is not allowed as password value") + + if ":" in login: + raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)') + + return super().__new__(cls, login, password, encoding) + + @classmethod + def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth": + """Create a BasicAuth object from an Authorization HTTP header.""" + try: + auth_type, encoded_credentials = auth_header.split(" ", 1) + except ValueError: + raise ValueError("Could not parse authorization header.") + + if auth_type.lower() != "basic": + raise ValueError("Unknown authorization method %s" % auth_type) + + try: + decoded = base64.b64decode( + encoded_credentials.encode("ascii"), validate=True + ).decode(encoding) + except binascii.Error: + raise ValueError("Invalid base64 encoding.") + + try: + # RFC 2617 HTTP Authentication + # https://www.ietf.org/rfc/rfc2617.txt + # the colon must be present, but the username and password may be + # otherwise blank. + username, password = decoded.split(":", 1) + except ValueError: + raise ValueError("Invalid credentials.") + + return cls(username, password, encoding=encoding) + + @classmethod + def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]: + """Create BasicAuth from url.""" + if not isinstance(url, URL): + raise TypeError("url should be yarl.URL instance") + if url.user is None: + return None + return cls(url.user, url.password or "", encoding=encoding) + + def encode(self) -> str: + """Encode credentials.""" + creds = (f"{self.login}:{self.password}").encode(self.encoding) + return "Basic %s" % base64.b64encode(creds).decode(self.encoding) + + +def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + auth = BasicAuth.from_url(url) + if auth is None: + return url, None + else: + return url.with_user(None), auth + + +def netrc_from_env() -> Optional[netrc.netrc]: + """Load netrc from file. + + Attempt to load it from the path specified by the env-var + NETRC or in the default location in the user's home directory. + + Returns None if it couldn't be found or fails to parse. + """ + netrc_env = os.environ.get("NETRC") + + if netrc_env is not None: + netrc_path = Path(netrc_env) + else: + try: + home_dir = Path.home() + except RuntimeError as e: # pragma: no cover + # if pathlib can't resolve home, it may raise a RuntimeError + client_logger.debug( + "Could not resolve home directory when " + "trying to look for .netrc file: %s", + e, + ) + return None + + netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc") + + try: + return netrc.netrc(str(netrc_path)) + except netrc.NetrcParseError as e: + client_logger.warning("Could not parse .netrc file: %s", e) + except OSError as e: + netrc_exists = False + with contextlib.suppress(OSError): + netrc_exists = netrc_path.is_file() + # we couldn't read the file (doesn't exist, permissions, etc.) + if netrc_env or netrc_exists: + # only warn if the environment wanted us to load it, + # or it appears like the default file does actually exist + client_logger.warning("Could not read .netrc file: %s", e) + + return None + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ProxyInfo: + proxy: URL + proxy_auth: Optional[BasicAuth] + + +def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth: + """ + Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``. + + :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no + entry is found for the ``host``. + """ + if netrc_obj is None: + raise LookupError("No .netrc file found") + auth_from_netrc = netrc_obj.authenticators(host) + + if auth_from_netrc is None: + raise LookupError(f"No entry for {host!s} found in the `.netrc` file.") + login, account, password = auth_from_netrc + + # TODO(PY311): username = login or account + # Up to python 3.10, account could be None if not specified, + # and login will be empty string if not specified. From 3.11, + # login and account will be empty string if not specified. + username = login if (login or account is None) else account + + # TODO(PY311): Remove this, as password will be empty string + # if not specified + if password is None: + password = "" + + return BasicAuth(username, password) + + +def proxies_from_env() -> Dict[str, ProxyInfo]: + proxy_urls = { + k: URL(v) + for k, v in getproxies().items() + if k in ("http", "https", "ws", "wss") + } + netrc_obj = netrc_from_env() + stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} + ret = {} + for proto, val in stripped.items(): + proxy, auth = val + if proxy.scheme in ("https", "wss"): + client_logger.warning( + "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy + ) + continue + if netrc_obj and auth is None: + if proxy.host is not None: + try: + auth = basicauth_from_netrc(netrc_obj, proxy.host) + except LookupError: + auth = None + ret[proto] = ProxyInfo(proxy, auth) + return ret + + +def current_task( + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> "Optional[asyncio.Task[Any]]": + return asyncio.current_task(loop=loop) + + +def get_running_loop( + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> asyncio.AbstractEventLoop: + if loop is None: + loop = asyncio.get_event_loop() + if not loop.is_running(): + warnings.warn( + "The object should be created within an async function", + DeprecationWarning, + stacklevel=3, + ) + if loop.get_debug(): + internal_logger.warning( + "The object should be created within an async function", stack_info=True + ) + return loop + + +def isasyncgenfunction(obj: Any) -> bool: + func = getattr(inspect, "isasyncgenfunction", None) + if func is not None: + return func(obj) # type: ignore[no-any-return] + else: + return False + + +def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + """Get a permitted proxy for the given URL from the env.""" + if url.host is not None and proxy_bypass(url.host): + raise LookupError(f"Proxying is disallowed for `{url.host!r}`") + + proxies_in_env = proxies_from_env() + try: + proxy_info = proxies_in_env[url.scheme] + except KeyError: + raise LookupError(f"No proxies found for `{url!s}` in the env") + else: + return proxy_info.proxy, proxy_info.proxy_auth + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class MimeType: + type: str + subtype: str + suffix: str + parameters: "MultiDictProxy[str]" + + +@functools.lru_cache(maxsize=56) +def parse_mimetype(mimetype: str) -> MimeType: + """Parses a MIME type into its components. + + mimetype is a MIME type string. + + Returns a MimeType object. + + Example: + + >>> parse_mimetype('text/html; charset=utf-8') + MimeType(type='text', subtype='html', suffix='', + parameters={'charset': 'utf-8'}) + + """ + if not mimetype: + return MimeType( + type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict()) + ) + + parts = mimetype.split(";") + params: MultiDict[str] = MultiDict() + for item in parts[1:]: + if not item: + continue + key, _, value = item.partition("=") + params.add(key.lower().strip(), value.strip(' "')) + + fulltype = parts[0].strip().lower() + if fulltype == "*": + fulltype = "*/*" + + mtype, _, stype = fulltype.partition("/") + stype, _, suffix = stype.partition("+") + + return MimeType( + type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params) + ) + + +def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: + name = getattr(obj, "name", None) + if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": + return Path(name).name + return default + + +not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]") +QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"} + + +def quoted_string(content: str) -> str: + """Return 7-bit content as quoted-string. + + Format content into a quoted-string as defined in RFC5322 for + Internet Message Format. Notice that this is not the 8-bit HTTP + format, but the 7-bit email format. Content must be in usascii or + a ValueError is raised. + """ + if not (QCONTENT > set(content)): + raise ValueError(f"bad content for quoted-string {content!r}") + return not_qtext_re.sub(lambda x: "\\" + x.group(0), content) + + +def content_disposition_header( + disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str +) -> str: + """Sets ``Content-Disposition`` header for MIME. + + This is the MIME payload Content-Disposition header from RFC 2183 + and RFC 7579 section 4.2, not the HTTP Content-Disposition from + RFC 6266. + + disptype is a disposition type: inline, attachment, form-data. + Should be valid extension token (see RFC 2183) + + quote_fields performs value quoting to 7-bit MIME headers + according to RFC 7578. Set to quote_fields to False if recipient + can take 8-bit file names and field values. + + _charset specifies the charset to use when quote_fields is True. + + params is a dict with disposition params. + """ + if not disptype or not (TOKEN > set(disptype)): + raise ValueError("bad content disposition type {!r}" "".format(disptype)) + + value = disptype + if params: + lparams = [] + for key, val in params.items(): + if not key or not (TOKEN > set(key)): + raise ValueError( + "bad content disposition parameter" " {!r}={!r}".format(key, val) + ) + if quote_fields: + if key.lower() == "filename": + qval = quote(val, "", encoding=_charset) + lparams.append((key, '"%s"' % qval)) + else: + try: + qval = quoted_string(val) + except ValueError: + qval = "".join( + (_charset, "''", quote(val, "", encoding=_charset)) + ) + lparams.append((key + "*", qval)) + else: + lparams.append((key, '"%s"' % qval)) + else: + qval = val.replace("\\", "\\\\").replace('"', '\\"') + lparams.append((key, '"%s"' % qval)) + sparams = "; ".join("=".join(pair) for pair in lparams) + value = "; ".join((value, sparams)) + return value + + +class _TSelf(Protocol, Generic[_T]): + _cache: Dict[str, _T] + + +class reify(Generic[_T]): + """Use as a class method decorator. + + It operates almost exactly like + the Python `@property` decorator, but it puts the result of the + method it decorates into the instance dict after the first call, + effectively replacing the function it decorates with an instance + variable. It is, in Python parlance, a data descriptor. + """ + + def __init__(self, wrapped: Callable[..., _T]) -> None: + self.wrapped = wrapped + self.__doc__ = wrapped.__doc__ + self.name = wrapped.__name__ + + def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T: + try: + try: + return inst._cache[self.name] + except KeyError: + val = self.wrapped(inst) + inst._cache[self.name] = val + return val + except AttributeError: + if inst is None: + return self + raise + + def __set__(self, inst: _TSelf[_T], value: _T) -> None: + raise AttributeError("reified property is read-only") + + +reify_py = reify + +try: + from ._helpers import reify as reify_c + + if not NO_EXTENSIONS: + reify = reify_c # type: ignore[misc,assignment] +except ImportError: + pass + +_ipv4_pattern = ( + r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" + r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" +) +_ipv6_pattern = ( + r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" + r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" + r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" + r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" + r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" + r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" + r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" + r":|:(:[A-F0-9]{1,4}){7})$" +) +_ipv4_regex = re.compile(_ipv4_pattern) +_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) +_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) +_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) + + +def _is_ip_address( + regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] +) -> bool: + if host is None: + return False + if isinstance(host, str): + return bool(regex.match(host)) + elif isinstance(host, (bytes, bytearray, memoryview)): + return bool(regexb.match(host)) + else: + raise TypeError(f"{host} [{type(host)}] is not a str or bytes") + + +is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) +is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) + + +def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: + return is_ipv4_address(host) or is_ipv6_address(host) + + +_cached_current_datetime: Optional[int] = None +_cached_formatted_datetime = "" + + +def rfc822_formatted_time() -> str: + global _cached_current_datetime + global _cached_formatted_datetime + + now = int(time.time()) + if now != _cached_current_datetime: + # Weekday and month names for HTTP date/time formatting; + # always English! + # Tuples are constants stored in codeobject! + _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") + _monthname = ( + "", # Dummy so we can use 1-based month numbers + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ) + + year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now) + _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + _weekdayname[wd], + day, + _monthname[month], + year, + hh, + mm, + ss, + ) + _cached_current_datetime = now + return _cached_formatted_datetime + + +def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: + ref, name = info + ob = ref() + if ob is not None: + with suppress(Exception): + getattr(ob, name)() + + +def weakref_handle( + ob: object, + name: str, + timeout: float, + loop: asyncio.AbstractEventLoop, + timeout_ceil_threshold: float = 5, +) -> Optional[asyncio.TimerHandle]: + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if timeout >= timeout_ceil_threshold: + when = ceil(when) + + return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) + return None + + +def call_later( + cb: Callable[[], Any], + timeout: float, + loop: asyncio.AbstractEventLoop, + timeout_ceil_threshold: float = 5, +) -> Optional[asyncio.TimerHandle]: + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if timeout > timeout_ceil_threshold: + when = ceil(when) + return loop.call_at(when, cb) + return None + + +class TimeoutHandle: + """Timeout handle""" + + def __init__( + self, + loop: asyncio.AbstractEventLoop, + timeout: Optional[float], + ceil_threshold: float = 5, + ) -> None: + self._timeout = timeout + self._loop = loop + self._ceil_threshold = ceil_threshold + self._callbacks: List[ + Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] + ] = [] + + def register( + self, callback: Callable[..., None], *args: Any, **kwargs: Any + ) -> None: + self._callbacks.append((callback, args, kwargs)) + + def close(self) -> None: + self._callbacks.clear() + + def start(self) -> Optional[asyncio.Handle]: + timeout = self._timeout + if timeout is not None and timeout > 0: + when = self._loop.time() + timeout + if timeout >= self._ceil_threshold: + when = ceil(when) + return self._loop.call_at(when, self.__call__) + else: + return None + + def timer(self) -> "BaseTimerContext": + if self._timeout is not None and self._timeout > 0: + timer = TimerContext(self._loop) + self.register(timer.timeout) + return timer + else: + return TimerNoop() + + def __call__(self) -> None: + for cb, args, kwargs in self._callbacks: + with suppress(Exception): + cb(*args, **kwargs) + + self._callbacks.clear() + + +class BaseTimerContext(ContextManager["BaseTimerContext"]): + def assert_timeout(self) -> None: + """Raise TimeoutError if timeout has been exceeded.""" + + +class TimerNoop(BaseTimerContext): + def __enter__(self) -> BaseTimerContext: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + return + + +class TimerContext(BaseTimerContext): + """Low resolution timeout context manager""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._tasks: List[asyncio.Task[Any]] = [] + self._cancelled = False + + def assert_timeout(self) -> None: + """Raise TimeoutError if timer has already been cancelled.""" + if self._cancelled: + raise asyncio.TimeoutError from None + + def __enter__(self) -> BaseTimerContext: + task = current_task(loop=self._loop) + + if task is None: + raise RuntimeError( + "Timeout context manager should be used " "inside a task" + ) + + if self._cancelled: + raise asyncio.TimeoutError from None + + self._tasks.append(task) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + if self._tasks: + self._tasks.pop() + + if exc_type is asyncio.CancelledError and self._cancelled: + raise asyncio.TimeoutError from None + return None + + def timeout(self) -> None: + if not self._cancelled: + for task in set(self._tasks): + task.cancel() + + self._cancelled = True + + +def ceil_timeout( + delay: Optional[float], ceil_threshold: float = 5 +) -> async_timeout.Timeout: + if delay is None or delay <= 0: + return async_timeout.timeout(None) + + loop = get_running_loop() + now = loop.time() + when = now + delay + if delay > ceil_threshold: + when = ceil(when) + return async_timeout.timeout_at(when) + + +class HeadersMixin: + ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) + + _headers: MultiMapping[str] + + _content_type: Optional[str] = None + _content_dict: Optional[Dict[str, str]] = None + _stored_content_type: Union[str, None, _SENTINEL] = sentinel + + def _parse_content_type(self, raw: Optional[str]) -> None: + self._stored_content_type = raw + if raw is None: + # default value according to RFC 2616 + self._content_type = "application/octet-stream" + self._content_dict = {} + else: + msg = HeaderParser().parsestr("Content-Type: " + raw) + self._content_type = msg.get_content_type() + params = msg.get_params(()) + self._content_dict = dict(params[1:]) # First element is content type again + + @property + def content_type(self) -> str: + """The value of content part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_type # type: ignore[return-value] + + @property + def charset(self) -> Optional[str]: + """The value of charset part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) + if self._stored_content_type != raw: + self._parse_content_type(raw) + return self._content_dict.get("charset") # type: ignore[union-attr] + + @property + def content_length(self) -> Optional[int]: + """The value of Content-Length HTTP header.""" + content_length = self._headers.get(hdrs.CONTENT_LENGTH) + + if content_length is not None: + return int(content_length) + else: + return None + + +def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: + if not fut.done(): + fut.set_result(result) + + +def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: + if not fut.done(): + fut.set_exception(exc) + + +@functools.total_ordering +class AppKey(Generic[_T]): + """Keys for static typing support in Application.""" + + __slots__ = ("_name", "_t", "__orig_class__") + + # This may be set by Python when instantiating with a generic type. We need to + # support this, in order to support types that are not concrete classes, + # like Iterable, which can't be passed as the second parameter to __init__. + __orig_class__: Type[object] + + def __init__(self, name: str, t: Optional[Type[_T]] = None): + # Prefix with module name to help deduplicate key names. + frame = inspect.currentframe() + while frame: + if frame.f_code.co_name == "": + module: str = frame.f_globals["__name__"] + break + frame = frame.f_back + + self._name = module + "." + name + self._t = t + + def __lt__(self, other: object) -> bool: + if isinstance(other, AppKey): + return self._name < other._name + return True # Order AppKey above other types. + + def __repr__(self) -> str: + t = self._t + if t is None: + with suppress(AttributeError): + # Set to type arg. + t = get_args(self.__orig_class__)[0] + + if t is None: + t_repr = "<>" + elif isinstance(t, type): + if t.__module__ == "builtins": + t_repr = t.__qualname__ + else: + t_repr = f"{t.__module__}.{t.__qualname__}" + else: + t_repr = repr(t) + return f"" + + +class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]): + __slots__ = ("_maps",) + + def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None: + self._maps = tuple(maps) + + def __init_subclass__(cls) -> None: + raise TypeError( + "Inheritance class {} from ChainMapProxy " + "is forbidden".format(cls.__name__) + ) + + @overload # type: ignore[override] + def __getitem__(self, key: AppKey[_T]) -> _T: + ... + + @overload + def __getitem__(self, key: str) -> Any: + ... + + def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: + for mapping in self._maps: + try: + return mapping[key] + except KeyError: + pass + raise KeyError(key) + + @overload # type: ignore[override] + def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: + ... + + @overload + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: + ... + + @overload + def get(self, key: str, default: Any = ...) -> Any: + ... + + def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: + try: + return self[key] + except KeyError: + return default + + def __len__(self) -> int: + # reuses stored hash values if possible + return len(set().union(*self._maps)) + + def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: + d: Dict[Union[str, AppKey[Any]], Any] = {} + for mapping in reversed(self._maps): + # reuses stored hash values if possible + d.update(mapping) + return iter(d) + + def __contains__(self, key: object) -> bool: + return any(key in m for m in self._maps) + + def __bool__(self) -> bool: + return any(self._maps) + + def __repr__(self) -> str: + content = ", ".join(map(repr, self._maps)) + return f"ChainMapProxy({content})" + + +# https://tools.ietf.org/html/rfc7232#section-2.3 +_ETAGC = r"[!\x23-\x7E\x80-\xff]+" +_ETAGC_RE = re.compile(_ETAGC) +_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"' +QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG) +LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)") + +ETAG_ANY = "*" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ETag: + value: str + is_weak: bool = False + + +def validate_etag_value(value: str) -> None: + if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value): + raise ValueError( + f"Value {value!r} is not a valid etag. Maybe it contains '\"'?" + ) + + +def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: + """Process a date string, return a datetime object""" + if date_str is not None: + timetuple = parsedate(date_str) + if timetuple is not None: + with suppress(ValueError): + return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) + return None + + +def must_be_empty_body(method: str, code: int) -> bool: + """Check if a request must return an empty body.""" + return ( + status_code_must_be_empty_body(code) + or method_must_be_empty_body(method) + or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) + ) + + +def method_must_be_empty_body(method: str) -> bool: + """Check if a method must return an empty body.""" + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 + return method.upper() == hdrs.METH_HEAD + + +def status_code_must_be_empty_body(code: int) -> bool: + """Check if a status code must return an empty body.""" + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 + return code in {204, 304} or 100 <= code < 200 + + +def should_remove_content_length(method: str, code: int) -> bool: + """Check if a Content-Length header should be removed. + + This should always be a subset of must_be_empty_body + """ + # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8 + # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4 + return ( + code in {204, 304} + or 100 <= code < 200 + or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) + ) diff --git a/venv/lib/python3.12/site-packages/aiohttp/http.py b/venv/lib/python3.12/site-packages/aiohttp/http.py new file mode 100644 index 0000000..a1feae2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/http.py @@ -0,0 +1,72 @@ +import sys +from http import HTTPStatus +from typing import Mapping, Tuple + +from . import __version__ +from .http_exceptions import HttpProcessingError as HttpProcessingError +from .http_parser import ( + HeadersParser as HeadersParser, + HttpParser as HttpParser, + HttpRequestParser as HttpRequestParser, + HttpResponseParser as HttpResponseParser, + RawRequestMessage as RawRequestMessage, + RawResponseMessage as RawResponseMessage, +) +from .http_websocket import ( + WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE, + WS_KEY as WS_KEY, + WebSocketError as WebSocketError, + WebSocketReader as WebSocketReader, + WebSocketWriter as WebSocketWriter, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen as ws_ext_gen, + ws_ext_parse as ws_ext_parse, +) +from .http_writer import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + StreamWriter as StreamWriter, +) + +__all__ = ( + "HttpProcessingError", + "RESPONSES", + "SERVER_SOFTWARE", + # .http_writer + "StreamWriter", + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + # .http_parser + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", + # .http_websocket + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "ws_ext_gen", + "ws_ext_parse", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( + sys.version_info, __version__ +) + +RESPONSES: Mapping[int, Tuple[str, str]] = { + v: (v.phrase, v.description) for v in HTTPStatus.__members__.values() +} diff --git a/venv/lib/python3.12/site-packages/aiohttp/http_exceptions.py b/venv/lib/python3.12/site-packages/aiohttp/http_exceptions.py new file mode 100644 index 0000000..72eac3a --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/http_exceptions.py @@ -0,0 +1,106 @@ +"""Low-level http related exceptions.""" + + +from textwrap import indent +from typing import Optional, Union + +from .typedefs import _CIMultiDict + +__all__ = ("HttpProcessingError",) + + +class HttpProcessingError(Exception): + """HTTP error. + + Shortcut for raising HTTP errors with custom code, message and headers. + + code: HTTP Error code. + message: (optional) Error message. + headers: (optional) Headers to be sent in response, a list of pairs + """ + + code = 0 + message = "" + headers = None + + def __init__( + self, + *, + code: Optional[int] = None, + message: str = "", + headers: Optional[_CIMultiDict] = None, + ) -> None: + if code is not None: + self.code = code + self.headers = headers + self.message = message + + def __str__(self) -> str: + msg = indent(self.message, " ") + return f"{self.code}, message:\n{msg}" + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>" + + +class BadHttpMessage(HttpProcessingError): + + code = 400 + message = "Bad Request" + + def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None: + super().__init__(message=message, headers=headers) + self.args = (message,) + + +class HttpBadRequest(BadHttpMessage): + + code = 400 + message = "Bad Request" + + +class PayloadEncodingError(BadHttpMessage): + """Base class for payload errors""" + + +class ContentEncodingError(PayloadEncodingError): + """Content encoding error.""" + + +class TransferEncodingError(PayloadEncodingError): + """transfer encoding error.""" + + +class ContentLengthError(PayloadEncodingError): + """Not enough data for satisfy content length header.""" + + +class LineTooLong(BadHttpMessage): + def __init__( + self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" + ) -> None: + super().__init__( + f"Got more than {limit} bytes ({actual_size}) when reading {line}." + ) + self.args = (line, limit, actual_size) + + +class InvalidHeader(BadHttpMessage): + def __init__(self, hdr: Union[bytes, str]) -> None: + hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr + super().__init__(f"Invalid HTTP header: {hdr!r}") + self.hdr = hdr_s + self.args = (hdr,) + + +class BadStatusLine(BadHttpMessage): + def __init__(self, line: str = "", error: Optional[str] = None) -> None: + if not isinstance(line, str): + line = repr(line) + super().__init__(error or f"Bad status line {line!r}") + self.args = (line,) + self.line = line + + +class InvalidURLError(BadHttpMessage): + pass diff --git a/venv/lib/python3.12/site-packages/aiohttp/http_parser.py b/venv/lib/python3.12/site-packages/aiohttp/http_parser.py new file mode 100644 index 0000000..8549917 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/http_parser.py @@ -0,0 +1,1009 @@ +import abc +import asyncio +import re +import string +from contextlib import suppress +from enum import IntEnum +from typing import ( + Any, + ClassVar, + Final, + Generic, + List, + Literal, + NamedTuple, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from multidict import CIMultiDict, CIMultiDictProxy, istr +from yarl import URL + +from . import hdrs +from .base_protocol import BaseProtocol +from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor +from .helpers import ( + DEBUG, + NO_EXTENSIONS, + BaseTimerContext, + method_must_be_empty_body, + status_code_must_be_empty_body, +) +from .http_exceptions import ( + BadHttpMessage, + BadStatusLine, + ContentEncodingError, + ContentLengthError, + InvalidHeader, + InvalidURLError, + LineTooLong, + TransferEncodingError, +) +from .http_writer import HttpVersion, HttpVersion10 +from .log import internal_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .typedefs import RawHeaders + +__all__ = ( + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", +) + +_SEP = Literal[b"\r\n", b"\n"] + +ASCIISET: Final[Set[str]] = set(string.printable) + +# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview +# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens +# +# method = token +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / +# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA +# token = 1*tchar +METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") +VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d).(\d)") +HDRRE: Final[Pattern[bytes]] = re.compile( + rb"[\x00-\x1F\x7F-\xFF()<>@,;:\[\]={} \t\"\\]" +) +HEXDIGIT = re.compile(rb"[0-9a-fA-F]+") + + +class RawRequestMessage(NamedTuple): + method: str + path: str + version: HttpVersion + headers: "CIMultiDictProxy[str]" + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + url: URL + + +class RawResponseMessage(NamedTuple): + version: HttpVersion + code: int + reason: str + headers: CIMultiDictProxy[str] + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + + +_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage) + + +class ParseState(IntEnum): + + PARSE_NONE = 0 + PARSE_LENGTH = 1 + PARSE_CHUNKED = 2 + PARSE_UNTIL_EOF = 3 + + +class ChunkState(IntEnum): + PARSE_CHUNKED_SIZE = 0 + PARSE_CHUNKED_CHUNK = 1 + PARSE_CHUNKED_CHUNK_EOF = 2 + PARSE_MAYBE_TRAILERS = 3 + PARSE_TRAILERS = 4 + + +class HeadersParser: + def __init__( + self, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + ) -> None: + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: + headers: CIMultiDict[str] = CIMultiDict() + raw_headers = [] + + lines_idx = 1 + line = lines[1] + line_count = len(lines) + + while line: + # Parse initial header name : value pair. + try: + bname, bvalue = line.split(b":", 1) + except ValueError: + raise InvalidHeader(line) from None + + # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2 + if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"} + raise InvalidHeader(line) + + bvalue = bvalue.lstrip(b" \t") + if HDRRE.search(bname): + raise InvalidHeader(bname) + if len(bname) > self.max_field_size: + raise LineTooLong( + "request header name {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(len(bname)), + ) + + header_length = len(bvalue) + + # next line + lines_idx += 1 + line = lines[lines_idx] + + # consume continuation lines + continuation = line and line[0] in (32, 9) # (' ', '\t') + + # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding + if continuation: + bvalue_lst = [bvalue] + while continuation: + header_length += len(line) + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(header_length), + ) + bvalue_lst.append(line) + + # next line + lines_idx += 1 + if lines_idx < line_count: + line = lines[lines_idx] + if line: + continuation = line[0] in (32, 9) # (' ', '\t') + else: + line = b"" + break + bvalue = b"".join(bvalue_lst) + else: + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(header_length), + ) + + bvalue = bvalue.strip(b" \t") + name = bname.decode("utf-8", "surrogateescape") + value = bvalue.decode("utf-8", "surrogateescape") + + # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 + if "\n" in value or "\r" in value or "\x00" in value: + raise InvalidHeader(bvalue) + + headers.add(name, value) + raw_headers.append((bname, bvalue)) + + return (CIMultiDictProxy(headers), tuple(raw_headers)) + + +class HttpParser(abc.ABC, Generic[_MsgT]): + lax: ClassVar[bool] = False + + def __init__( + self, + protocol: Optional[BaseProtocol] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + limit: int = 2**16, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + timer: Optional[BaseTimerContext] = None, + code: Optional[int] = None, + method: Optional[str] = None, + readall: bool = False, + payload_exception: Optional[Type[BaseException]] = None, + response_with_body: bool = True, + read_until_eof: bool = False, + auto_decompress: bool = True, + ) -> None: + self.protocol = protocol + self.loop = loop + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + self.timer = timer + self.code = code + self.method = method + self.readall = readall + self.payload_exception = payload_exception + self.response_with_body = response_with_body + self.read_until_eof = read_until_eof + + self._lines: List[bytes] = [] + self._tail = b"" + self._upgraded = False + self._payload = None + self._payload_parser: Optional[HttpPayloadParser] = None + self._auto_decompress = auto_decompress + self._limit = limit + self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) + + @abc.abstractmethod + def parse_message(self, lines: List[bytes]) -> _MsgT: + pass + + def feed_eof(self) -> Optional[_MsgT]: + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + else: + # try to extract partial message + if self._tail: + self._lines.append(self._tail) + + if self._lines: + if self._lines[-1] != "\r\n": + self._lines.append(b"") + with suppress(Exception): + return self.parse_message(self._lines) + return None + + def feed_data( + self, + data: bytes, + SEP: _SEP = b"\r\n", + EMPTY: bytes = b"", + CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, + METH_CONNECT: str = hdrs.METH_CONNECT, + SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, + ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]: + + messages = [] + + if self._tail: + data, self._tail = self._tail + data, b"" + + data_len = len(data) + start_pos = 0 + loop = self.loop + + while start_pos < data_len: + + # read HTTP message (request/response line + headers), \r\n\r\n + # and split by lines + if self._payload_parser is None and not self._upgraded: + pos = data.find(SEP, start_pos) + # consume \r\n + if pos == start_pos and not self._lines: + start_pos = pos + len(SEP) + continue + + if pos >= start_pos: + # line found + line = data[start_pos:pos] + if SEP == b"\n": # For lax response parsing + line = line.rstrip(b"\r") + self._lines.append(line) + start_pos = pos + len(SEP) + + # \r\n\r\n found + if self._lines[-1] == EMPTY: + try: + msg: _MsgT = self.parse_message(self._lines) + finally: + self._lines.clear() + + def get_content_length() -> Optional[int]: + # payload length + length_hdr = msg.headers.get(CONTENT_LENGTH) + if length_hdr is None: + return None + + # Shouldn't allow +/- or other number formats. + # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2 + if not length_hdr.strip(" \t").isdecimal(): + raise InvalidHeader(CONTENT_LENGTH) + + return int(length_hdr) + + length = get_content_length() + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in msg.headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + self._upgraded = msg.upgrade + + method = getattr(msg, "method", self.method) + # code is only present on responses + code = getattr(msg, "code", 0) + + assert self.protocol is not None + # calculate payload + empty_body = status_code_must_be_empty_body(code) or bool( + method and method_must_be_empty_body(method) + ) + if not empty_body and ( + (length is not None and length > 0) + or msg.chunked + and not msg.upgrade + ): + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + readall=self.readall, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + elif method == METH_CONNECT: + assert isinstance(msg, RawRequestMessage) + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + self._upgraded = True + self._payload_parser = HttpPayloadParser( + payload, + method=msg.method, + compression=msg.compression, + readall=True, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + elif not empty_body and length is None and self.read_until_eof: + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + readall=True, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + else: + payload = EMPTY_PAYLOAD + + messages.append((msg, payload)) + else: + self._tail = data[start_pos:] + data = EMPTY + break + + # no parser, just store + elif self._payload_parser is None and self._upgraded: + assert not self._lines + break + + # feed payload + elif data and start_pos < data_len: + assert not self._lines + assert self._payload_parser is not None + try: + eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) + except BaseException as exc: + if self.payload_exception is not None: + self._payload_parser.payload.set_exception( + self.payload_exception(str(exc)) + ) + else: + self._payload_parser.payload.set_exception(exc) + + eof = True + data = b"" + + if eof: + start_pos = 0 + data_len = len(data) + self._payload_parser = None + continue + else: + break + + if data and start_pos < data_len: + data = data[start_pos:] + else: + data = EMPTY + + return messages, self._upgraded, data + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple[ + "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool + ]: + """Parses RFC 5322 headers from a stream. + + Line continuations are supported. Returns list of header name + and value pairs. Header name is in upper case. + """ + headers, raw_headers = self._headers_parser.parse_headers(lines) + close_conn = None + encoding = None + upgrade = False + chunked = False + + # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6 + # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf + singletons = ( + hdrs.CONTENT_LENGTH, + hdrs.CONTENT_LOCATION, + hdrs.CONTENT_RANGE, + hdrs.CONTENT_TYPE, + hdrs.ETAG, + hdrs.HOST, + hdrs.MAX_FORWARDS, + hdrs.SERVER, + hdrs.TRANSFER_ENCODING, + hdrs.USER_AGENT, + ) + bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None) + if bad_hdr is not None: + raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.") + + # keep-alive + conn = headers.get(hdrs.CONNECTION) + if conn: + v = conn.lower() + if v == "close": + close_conn = True + elif v == "keep-alive": + close_conn = False + # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols + elif v == "upgrade" and headers.get(hdrs.UPGRADE): + upgrade = True + + # encoding + enc = headers.get(hdrs.CONTENT_ENCODING) + if enc: + enc = enc.lower() + if enc in ("gzip", "deflate", "br"): + encoding = enc + + # chunking + te = headers.get(hdrs.TRANSFER_ENCODING) + if te is not None: + if "chunked" == te.lower(): + chunked = True + else: + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + + if hdrs.CONTENT_LENGTH in headers: + raise BadHttpMessage( + "Transfer-Encoding can't be present with Content-Length", + ) + + return (headers, raw_headers, close_conn, encoding, upgrade, chunked) + + def set_upgraded(self, val: bool) -> None: + """Set connection upgraded (to websocket) mode. + + :param bool val: new state. + """ + self._upgraded = val + + +class HttpRequestParser(HttpParser[RawRequestMessage]): + """Read request status line. + + Exception .http_exceptions.BadStatusLine + could be raised in case of any errors in status line. + Returns RawRequestMessage. + """ + + def parse_message(self, lines: List[bytes]) -> RawRequestMessage: + # request line + line = lines[0].decode("utf-8", "surrogateescape") + try: + method, path, version = line.split(" ", maxsplit=2) + except ValueError: + raise BadStatusLine(line) from None + + if len(path) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(path)) + ) + + # method + if not METHRE.fullmatch(method): + raise BadStatusLine(method) + + # version + match = VERSRE.fullmatch(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + if method == "CONNECT": + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + url = URL.build(authority=path, encoded=True) + elif path.startswith("/"): + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + path_part, _hash_separator, url_fragment = path.partition("#") + path_part, _question_mark_separator, qs_part = path_part.partition("?") + + # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based + # NOTE: parser does, otherwise it results into the same + # NOTE: HTTP Request-Line input producing different + # NOTE: `yarl.URL()` objects + url = URL.build( + path=path_part, + query_string=qs_part, + fragment=url_fragment, + encoded=True, + ) + elif path == "*" and method == "OPTIONS": + # asterisk-form, + url = URL(path, encoded=True) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + url = URL(path, encoded=True) + if url.scheme == "": + # not absolute-form + raise InvalidURLError( + path.encode(errors="surrogateescape").decode("latin1") + ) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: # then the headers weren't set in the request + if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close + close = True + else: # HTTP 1.1 must ask to close. + close = False + + return RawRequestMessage( + method, + path, + version_o, + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + url, + ) + + +class HttpResponseParser(HttpParser[RawResponseMessage]): + """Read response status line and headers. + + BadStatusLine could be raised in case of any errors in status line. + Returns RawResponseMessage. + """ + + # Lax mode should only be enabled on response parser. + lax = not DEBUG + + def feed_data( + self, + data: bytes, + SEP: Optional[_SEP] = None, + *args: Any, + **kwargs: Any, + ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]: + if SEP is None: + SEP = b"\r\n" if DEBUG else b"\n" + return super().feed_data(data, SEP, *args, **kwargs) + + def parse_message(self, lines: List[bytes]) -> RawResponseMessage: + line = lines[0].decode("utf-8", "surrogateescape") + try: + version, status = line.split(maxsplit=1) + except ValueError: + raise BadStatusLine(line) from None + + try: + status, reason = status.split(maxsplit=1) + except ValueError: + status = status.strip() + reason = "" + + if len(reason) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(reason)) + ) + + # version + match = VERSRE.fullmatch(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + # The status code is a three-digit number + if len(status) != 3 or not status.isdecimal(): + raise BadStatusLine(line) + status_i = int(status) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: + close = version_o <= HttpVersion10 + + return RawResponseMessage( + version_o, + status_i, + reason.strip(), + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) + + +class HttpPayloadParser: + def __init__( + self, + payload: StreamReader, + length: Optional[int] = None, + chunked: bool = False, + compression: Optional[str] = None, + code: Optional[int] = None, + method: Optional[str] = None, + readall: bool = False, + response_with_body: bool = True, + auto_decompress: bool = True, + lax: bool = False, + ) -> None: + self._length = 0 + self._type = ParseState.PARSE_NONE + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + self._chunk_size = 0 + self._chunk_tail = b"" + self._auto_decompress = auto_decompress + self._lax = lax + self.done = False + + # payload decompression wrapper + if response_with_body and compression and self._auto_decompress: + real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer( + payload, compression + ) + else: + real_payload = payload + + # payload parser + if not response_with_body: + # don't parse payload if it's not expected to be received + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + + elif chunked: + self._type = ParseState.PARSE_CHUNKED + elif length is not None: + self._type = ParseState.PARSE_LENGTH + self._length = length + if self._length == 0: + real_payload.feed_eof() + self.done = True + else: + if readall and code != 204: + self._type = ParseState.PARSE_UNTIL_EOF + elif method in ("PUT", "POST"): + internal_logger.warning( # pragma: no cover + "Content-Length or Transfer-Encoding header is required" + ) + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + + self.payload = real_payload + + def feed_eof(self) -> None: + if self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_eof() + elif self._type == ParseState.PARSE_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header." + ) + elif self._type == ParseState.PARSE_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header." + ) + + def feed_data( + self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";" + ) -> Tuple[bool, bytes]: + # Read specified amount of bytes + if self._type == ParseState.PARSE_LENGTH: + required = self._length + chunk_len = len(chunk) + + if required >= chunk_len: + self._length = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + if self._length == 0: + self.payload.feed_eof() + return True, b"" + else: + self._length = 0 + self.payload.feed_data(chunk[:required], required) + self.payload.feed_eof() + return True, chunk[required:] + + # Chunked transfer encoding parser + elif self._type == ParseState.PARSE_CHUNKED: + if self._chunk_tail: + chunk = self._chunk_tail + chunk + self._chunk_tail = b"" + + while chunk: + + # read next chunk size + if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: + pos = chunk.find(SEP) + if pos >= 0: + i = chunk.find(CHUNK_EXT, 0, pos) + if i >= 0: + size_b = chunk[:i] # strip chunk-extensions + else: + size_b = chunk[:pos] + + if self._lax: # Allow whitespace in lax mode. + size_b = size_b.strip() + + if not re.fullmatch(HEXDIGIT, size_b): + exc = TransferEncodingError( + chunk[:pos].decode("ascii", "surrogateescape") + ) + self.payload.set_exception(exc) + raise exc + size = int(bytes(size_b), 16) + + chunk = chunk[pos + len(SEP) :] + if size == 0: # eof marker + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] + else: + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK + self._chunk_size = size + self.payload.begin_http_chunk_receiving() + else: + self._chunk_tail = chunk + return False, b"" + + # read chunk and feed buffer + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: + required = self._chunk_size + chunk_len = len(chunk) + + if required > chunk_len: + self._chunk_size = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + return False, b"" + else: + self._chunk_size = 0 + self.payload.feed_data(chunk[:required], required) + chunk = chunk[required:] + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF + self.payload.end_http_chunk_receiving() + + # toss the CRLF at the end of the chunk + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if chunk[: len(SEP)] == SEP: + chunk = chunk[len(SEP) :] + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + else: + self._chunk_tail = chunk + return False, b"" + + # if stream does not contain trailer, after 0\r\n + # we should get another \r\n otherwise + # trailers needs to be skipped until \r\n\r\n + if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: + head = chunk[: len(SEP)] + if head == SEP: + # end of stream + self.payload.feed_eof() + return True, chunk[len(SEP) :] + # Both CR and LF, or only LF may not be received yet. It is + # expected that CRLF or LF will be shown at the very first + # byte next time, otherwise trailers should come. The last + # CRLF which marks the end of response might not be + # contained in the same TCP segment which delivered the + # size indicator. + if not head: + return False, b"" + if head == SEP[:1]: + self._chunk_tail = head + return False, b"" + self._chunk = ChunkState.PARSE_TRAILERS + + # read and discard trailer up to the CRLF terminator + if self._chunk == ChunkState.PARSE_TRAILERS: + pos = chunk.find(SEP) + if pos >= 0: + chunk = chunk[pos + len(SEP) :] + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk_tail = chunk + return False, b"" + + # Read all bytes until eof + elif self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_data(chunk, len(chunk)) + + return False, b"" + + +class DeflateBuffer: + """DeflateStream decompress stream and feed data into specified stream.""" + + decompressor: Any + + def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: + self.out = out + self.size = 0 + self.encoding = encoding + self._started_decoding = False + + self.decompressor: Union[BrotliDecompressor, ZLibDecompressor] + if encoding == "br": + if not HAS_BROTLI: # pragma: no cover + raise ContentEncodingError( + "Can not decode content-encoding: brotli (br). " + "Please install `Brotli`" + ) + self.decompressor = BrotliDecompressor() + else: + self.decompressor = ZLibDecompressor(encoding=encoding) + + def set_exception(self, exc: BaseException) -> None: + self.out.set_exception(exc) + + def feed_data(self, chunk: bytes, size: int) -> None: + if not size: + return + + self.size += size + + # RFC1950 + # bits 0..3 = CM = 0b1000 = 8 = "deflate" + # bits 4..7 = CINFO = 1..7 = windows size. + if ( + not self._started_decoding + and self.encoding == "deflate" + and chunk[0] & 0xF != 8 + ): + # Change the decoder to decompress incorrectly compressed data + # Actually we should issue a warning about non-RFC-compliant data. + self.decompressor = ZLibDecompressor( + encoding=self.encoding, suppress_deflate_header=True + ) + + try: + chunk = self.decompressor.decompress_sync(chunk) + except Exception: + raise ContentEncodingError( + "Can not decode content-encoding: %s" % self.encoding + ) + + self._started_decoding = True + + if chunk: + self.out.feed_data(chunk, len(chunk)) + + def feed_eof(self) -> None: + chunk = self.decompressor.flush() + + if chunk or self.size > 0: + self.out.feed_data(chunk, len(chunk)) + if self.encoding == "deflate" and not self.decompressor.eof: + raise ContentEncodingError("deflate") + + self.out.feed_eof() + + def begin_http_chunk_receiving(self) -> None: + self.out.begin_http_chunk_receiving() + + def end_http_chunk_receiving(self) -> None: + self.out.end_http_chunk_receiving() + + +HttpRequestParserPy = HttpRequestParser +HttpResponseParserPy = HttpResponseParser +RawRequestMessagePy = RawRequestMessage +RawResponseMessagePy = RawResponseMessage + +try: + if not NO_EXTENSIONS: + from ._http_parser import ( # type: ignore[import-not-found,no-redef] + HttpRequestParser, + HttpResponseParser, + RawRequestMessage, + RawResponseMessage, + ) + + HttpRequestParserC = HttpRequestParser + HttpResponseParserC = HttpResponseParser + RawRequestMessageC = RawRequestMessage + RawResponseMessageC = RawResponseMessage +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.12/site-packages/aiohttp/http_websocket.py b/venv/lib/python3.12/site-packages/aiohttp/http_websocket.py new file mode 100644 index 0000000..f395a27 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/http_websocket.py @@ -0,0 +1,740 @@ +"""WebSocket protocol versions 13 and 8.""" + +import asyncio +import functools +import json +import random +import re +import sys +import zlib +from enum import IntEnum +from struct import Struct +from typing import ( + Any, + Callable, + Final, + List, + NamedTuple, + Optional, + Pattern, + Set, + Tuple, + Union, + cast, +) + +from .base_protocol import BaseProtocol +from .compression_utils import ZLibCompressor, ZLibDecompressor +from .helpers import NO_EXTENSIONS +from .streams import DataQueue + +__all__ = ( + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +class WSCloseCode(IntEnum): + OK = 1000 + GOING_AWAY = 1001 + PROTOCOL_ERROR = 1002 + UNSUPPORTED_DATA = 1003 + ABNORMAL_CLOSURE = 1006 + INVALID_TEXT = 1007 + POLICY_VIOLATION = 1008 + MESSAGE_TOO_BIG = 1009 + MANDATORY_EXTENSION = 1010 + INTERNAL_ERROR = 1011 + SERVICE_RESTART = 1012 + TRY_AGAIN_LATER = 1013 + BAD_GATEWAY = 1014 + + +ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} + +# For websockets, keeping latency low is extremely important as implementations +# generally expect to be able to send and receive messages quickly. We use a +# larger chunk size than the default to reduce the number of executor calls +# since the executor is a significant source of latency and overhead when +# the chunks are small. A size of 5KiB was chosen because it is also the +# same value python-zlib-ng choose to use as the threshold to release the GIL. + +WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024 + + +class WSMsgType(IntEnum): + # websocket spec types + CONTINUATION = 0x0 + TEXT = 0x1 + BINARY = 0x2 + PING = 0x9 + PONG = 0xA + CLOSE = 0x8 + + # aiohttp specific types + CLOSING = 0x100 + CLOSED = 0x101 + ERROR = 0x102 + + text = TEXT + binary = BINARY + ping = PING + pong = PONG + close = CLOSE + closing = CLOSING + closed = CLOSED + error = ERROR + + +WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + +UNPACK_LEN2 = Struct("!H").unpack_from +UNPACK_LEN3 = Struct("!Q").unpack_from +UNPACK_CLOSE_CODE = Struct("!H").unpack +PACK_LEN1 = Struct("!BB").pack +PACK_LEN2 = Struct("!BBH").pack +PACK_LEN3 = Struct("!BBQ").pack +PACK_CLOSE_CODE = Struct("!H").pack +MSG_SIZE: Final[int] = 2**14 +DEFAULT_LIMIT: Final[int] = 2**16 + + +class WSMessage(NamedTuple): + type: WSMsgType + # To type correctly, this would need some kind of tagged union for each type. + data: Any + extra: Optional[str] + + def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: + """Return parsed JSON data. + + .. versionadded:: 0.22 + """ + return loads(self.data) + + +WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) +WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) + + +class WebSocketError(Exception): + """WebSocket protocol parser error.""" + + def __init__(self, code: int, message: str) -> None: + self.code = code + super().__init__(code, message) + + def __str__(self) -> str: + return cast(str, self.args[1]) + + +class WSHandshakeError(Exception): + """WebSocket protocol handshake error.""" + + +native_byteorder: Final[str] = sys.byteorder + + +# Used by _websocket_mask_python +@functools.lru_cache +def _xor_table() -> List[bytes]: + return [bytes(a ^ b for a in range(256)) for b in range(256)] + + +def _websocket_mask_python(mask: bytes, data: bytearray) -> None: + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytearray` + object of any length. The contents of `data` are masked with `mask`, + as specified in section 5.3 of RFC 6455. + + Note that this function mutates the `data` argument. + + This pure-python implementation may be replaced by an optimized + version when available. + + """ + assert isinstance(data, bytearray), data + assert len(mask) == 4, mask + + if data: + _XOR_TABLE = _xor_table() + a, b, c, d = (_XOR_TABLE[n] for n in mask) + data[::4] = data[::4].translate(a) + data[1::4] = data[1::4].translate(b) + data[2::4] = data[2::4].translate(c) + data[3::4] = data[3::4].translate(d) + + +if NO_EXTENSIONS: # pragma: no cover + _websocket_mask = _websocket_mask_python +else: + try: + from ._websocket import _websocket_mask_cython # type: ignore[import-not-found] + + _websocket_mask = _websocket_mask_cython + except ImportError: # pragma: no cover + _websocket_mask = _websocket_mask_python + +_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) + + +_WS_EXT_RE: Final[Pattern[str]] = re.compile( + r"^(?:;\s*(?:" + r"(server_no_context_takeover)|" + r"(client_no_context_takeover)|" + r"(server_max_window_bits(?:=(\d+))?)|" + r"(client_max_window_bits(?:=(\d+))?)))*$" +) + +_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") + + +def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: + if not extstr: + return 0, False + + compress = 0 + notakeover = False + for ext in _WS_EXT_RE_SPLIT.finditer(extstr): + defext = ext.group(1) + # Return compress = 15 when get `permessage-deflate` + if not defext: + compress = 15 + break + match = _WS_EXT_RE.match(defext) + if match: + compress = 15 + if isserver: + # Server never fail to detect compress handshake. + # Server does not need to send max wbit to client + if match.group(4): + compress = int(match.group(4)) + # Group3 must match if group4 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # CONTINUE to next extension + if compress > 15 or compress < 9: + compress = 0 + continue + if match.group(1): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + else: + if match.group(6): + compress = int(match.group(6)) + # Group5 must match if group6 matches + # Compress wbit 8 does not support in zlib + # If compress level not support, + # FAIL the parse progress + if compress > 15 or compress < 9: + raise WSHandshakeError("Invalid window size") + if match.group(2): + notakeover = True + # Ignore regex group 5 & 6 for client_max_window_bits + break + # Return Fail if client side and not match + elif not isserver: + raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) + + return compress, notakeover + + +def ws_ext_gen( + compress: int = 15, isserver: bool = False, server_notakeover: bool = False +) -> str: + # client_notakeover=False not used for server + # compress wbit 8 does not support in zlib + if compress < 9 or compress > 15: + raise ValueError( + "Compress wbits must between 9 and 15, " "zlib does not support wbits=8" + ) + enabledext = ["permessage-deflate"] + if not isserver: + enabledext.append("client_max_window_bits") + + if compress < 15: + enabledext.append("server_max_window_bits=" + str(compress)) + if server_notakeover: + enabledext.append("server_no_context_takeover") + # if client_notakeover: + # enabledext.append('client_no_context_takeover') + return "; ".join(enabledext) + + +class WSParserState(IntEnum): + READ_HEADER = 1 + READ_PAYLOAD_LENGTH = 2 + READ_PAYLOAD_MASK = 3 + READ_PAYLOAD = 4 + + +class WebSocketReader: + def __init__( + self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True + ) -> None: + self.queue = queue + self._max_msg_size = max_msg_size + + self._exc: Optional[BaseException] = None + self._partial = bytearray() + self._state = WSParserState.READ_HEADER + + self._opcode: Optional[int] = None + self._frame_fin = False + self._frame_opcode: Optional[int] = None + self._frame_payload = bytearray() + + self._tail = b"" + self._has_mask = False + self._frame_mask: Optional[bytes] = None + self._payload_length = 0 + self._payload_length_flag = 0 + self._compressed: Optional[bool] = None + self._decompressobj: Optional[ZLibDecompressor] = None + self._compress = compress + + def feed_eof(self) -> None: + self.queue.feed_eof() + + def feed_data(self, data: bytes) -> Tuple[bool, bytes]: + if self._exc: + return True, data + + try: + return self._feed_data(data) + except Exception as exc: + self._exc = exc + self.queue.set_exception(exc) + return True, b"" + + def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: + for fin, opcode, payload, compressed in self.parse_frame(data): + if compressed and not self._decompressobj: + self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) + if opcode == WSMsgType.CLOSE: + if len(payload) >= 2: + close_code = UNPACK_CLOSE_CODE(payload[:2])[0] + if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close code: {close_code}", + ) + try: + close_message = payload[2:].decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) + elif payload: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + f"Invalid close frame: {fin} {opcode} {payload!r}", + ) + else: + msg = WSMessage(WSMsgType.CLOSE, 0, "") + + self.queue.feed_data(msg, 0) + + elif opcode == WSMsgType.PING: + self.queue.feed_data( + WSMessage(WSMsgType.PING, payload, ""), len(payload) + ) + + elif opcode == WSMsgType.PONG: + self.queue.feed_data( + WSMessage(WSMsgType.PONG, payload, ""), len(payload) + ) + + elif ( + opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) + and self._opcode is None + ): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" + ) + else: + # load text/binary + if not fin: + # got partial frame payload + if opcode != WSMsgType.CONTINUATION: + self._opcode = opcode + self._partial.extend(payload) + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + else: + # previous frame was non finished + # we should get continuation opcode + if self._partial: + if opcode != WSMsgType.CONTINUATION: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "The opcode in non-fin frame is expected " + "to be zero, got {!r}".format(opcode), + ) + + if opcode == WSMsgType.CONTINUATION: + assert self._opcode is not None + opcode = self._opcode + self._opcode = None + + self._partial.extend(payload) + if self._max_msg_size and len(self._partial) >= self._max_msg_size: + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Message size {} exceeds limit {}".format( + len(self._partial), self._max_msg_size + ), + ) + + # Decompress process must to be done after all packets + # received. + if compressed: + assert self._decompressobj is not None + self._partial.extend(_WS_DEFLATE_TRAILING) + payload_merged = self._decompressobj.decompress_sync( + self._partial, self._max_msg_size + ) + if self._decompressobj.unconsumed_tail: + left = len(self._decompressobj.unconsumed_tail) + raise WebSocketError( + WSCloseCode.MESSAGE_TOO_BIG, + "Decompressed message size {} exceeds limit {}".format( + self._max_msg_size + left, self._max_msg_size + ), + ) + else: + payload_merged = bytes(self._partial) + + self._partial.clear() + + if opcode == WSMsgType.TEXT: + try: + text = payload_merged.decode("utf-8") + self.queue.feed_data( + WSMessage(WSMsgType.TEXT, text, ""), len(text) + ) + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + else: + self.queue.feed_data( + WSMessage(WSMsgType.BINARY, payload_merged, ""), + len(payload_merged), + ) + + return False, b"" + + def parse_frame( + self, buf: bytes + ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: + """Return the next frame from the socket.""" + frames = [] + if self._tail: + buf, self._tail = self._tail + buf, b"" + + start_pos = 0 + buf_length = len(buf) + + while True: + # read header + if self._state == WSParserState.READ_HEADER: + if buf_length - start_pos >= 2: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + first_byte, second_byte = data + + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xF + + # frame-fin = %x0 ; more frames of this message follow + # / %x1 ; final frame of this message + # frame-rsv1 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv2 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # frame-rsv3 = %x0 ; + # 1 bit, MUST be 0 unless negotiated otherwise + # + # Remove rsv1 from this test for deflate development + if rsv2 or rsv3 or (rsv1 and not self._compress): + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + if opcode > 0x7 and fin == 0: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received fragmented control frame", + ) + + has_mask = (second_byte >> 7) & 1 + length = second_byte & 0x7F + + # Control frames MUST have a payload + # length of 125 bytes or less + if opcode > 0x7 and length > 125: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Control frame payload cannot be " "larger than 125 bytes", + ) + + # Set compress status if last package is FIN + # OR set compress status if this is first fragment + # Raise error if not first fragment with rsv1 = 0x1 + if self._frame_fin or self._compressed is None: + self._compressed = True if rsv1 else False + elif rsv1: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Received frame with non-zero reserved bits", + ) + + self._frame_fin = bool(fin) + self._frame_opcode = opcode + self._has_mask = bool(has_mask) + self._payload_length_flag = length + self._state = WSParserState.READ_PAYLOAD_LENGTH + else: + break + + # read payload length + if self._state == WSParserState.READ_PAYLOAD_LENGTH: + length = self._payload_length_flag + if length == 126: + if buf_length - start_pos >= 2: + data = buf[start_pos : start_pos + 2] + start_pos += 2 + length = UNPACK_LEN2(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + else: + break + elif length > 126: + if buf_length - start_pos >= 8: + data = buf[start_pos : start_pos + 8] + start_pos += 8 + length = UNPACK_LEN3(data)[0] + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + else: + break + else: + self._payload_length = length + self._state = ( + WSParserState.READ_PAYLOAD_MASK + if self._has_mask + else WSParserState.READ_PAYLOAD + ) + + # read payload mask + if self._state == WSParserState.READ_PAYLOAD_MASK: + if buf_length - start_pos >= 4: + self._frame_mask = buf[start_pos : start_pos + 4] + start_pos += 4 + self._state = WSParserState.READ_PAYLOAD + else: + break + + if self._state == WSParserState.READ_PAYLOAD: + length = self._payload_length + payload = self._frame_payload + + chunk_len = buf_length - start_pos + if length >= chunk_len: + self._payload_length = length - chunk_len + payload.extend(buf[start_pos:]) + start_pos = buf_length + else: + self._payload_length = 0 + payload.extend(buf[start_pos : start_pos + length]) + start_pos = start_pos + length + + if self._payload_length == 0: + if self._has_mask: + assert self._frame_mask is not None + _websocket_mask(self._frame_mask, payload) + + frames.append( + (self._frame_fin, self._frame_opcode, payload, self._compressed) + ) + + self._frame_payload = bytearray() + self._state = WSParserState.READ_HEADER + else: + break + + self._tail = buf[start_pos:] + + return frames + + +class WebSocketWriter: + def __init__( + self, + protocol: BaseProtocol, + transport: asyncio.Transport, + *, + use_mask: bool = False, + limit: int = DEFAULT_LIMIT, + random: Any = random.Random(), + compress: int = 0, + notakeover: bool = False, + ) -> None: + self.protocol = protocol + self.transport = transport + self.use_mask = use_mask + self.randrange = random.randrange + self.compress = compress + self.notakeover = notakeover + self._closing = False + self._limit = limit + self._output_size = 0 + self._compressobj: Any = None # actually compressobj + + async def _send_frame( + self, message: bytes, opcode: int, compress: Optional[int] = None + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if self._closing and not (opcode & WSMsgType.CLOSE): + raise ConnectionResetError("Cannot write to closing transport") + + rsv = 0 + + # Only compress larger packets (disabled) + # Does small packet needs to be compressed? + # if self.compress and opcode < 8 and len(message) > 124: + if (compress or self.compress) and opcode < 8: + if compress: + # Do not set self._compress if compressing is for this frame + compressobj = self._make_compress_obj(compress) + else: # self.compress + if not self._compressobj: + self._compressobj = self._make_compress_obj(self.compress) + compressobj = self._compressobj + + message = await compressobj.compress(message) + # Its critical that we do not return control to the event + # loop until we have finished sending all the compressed + # data. Otherwise we could end up mixing compressed frames + # if there are multiple coroutines compressing data. + message += compressobj.flush( + zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH + ) + if message.endswith(_WS_DEFLATE_TRAILING): + message = message[:-4] + rsv = rsv | 0x40 + + msg_length = len(message) + + use_mask = self.use_mask + if use_mask: + mask_bit = 0x80 + else: + mask_bit = 0 + + if msg_length < 126: + header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + elif msg_length < (1 << 16): + header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + else: + header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + if use_mask: + mask = self.randrange(0, 0xFFFFFFFF) + mask = mask.to_bytes(4, "big") + message = bytearray(message) + _websocket_mask(mask, message) + self._write(header + mask + message) + self._output_size += len(header) + len(mask) + len(message) + else: + if len(message) > MSG_SIZE: + self._write(header) + self._write(message) + else: + self._write(header + message) + + self._output_size += len(header) + len(message) + + # It is safe to return control to the event loop when using compression + # after this point as we have already sent or buffered all the data. + + if self._output_size > self._limit: + self._output_size = 0 + await self.protocol._drain_helper() + + def _make_compress_obj(self, compress: int) -> ZLibCompressor: + return ZLibCompressor( + level=zlib.Z_BEST_SPEED, + wbits=-compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + + def _write(self, data: bytes) -> None: + if self.transport is None or self.transport.is_closing(): + raise ConnectionResetError("Cannot write to closing transport") + self.transport.write(data) + + async def pong(self, message: Union[bytes, str] = b"") -> None: + """Send pong message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self._send_frame(message, WSMsgType.PONG) + + async def ping(self, message: Union[bytes, str] = b"") -> None: + """Send ping message.""" + if isinstance(message, str): + message = message.encode("utf-8") + await self._send_frame(message, WSMsgType.PING) + + async def send( + self, + message: Union[str, bytes], + binary: bool = False, + compress: Optional[int] = None, + ) -> None: + """Send a frame over the websocket with message as its payload.""" + if isinstance(message, str): + message = message.encode("utf-8") + if binary: + await self._send_frame(message, WSMsgType.BINARY, compress) + else: + await self._send_frame(message, WSMsgType.TEXT, compress) + + async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: + """Close the websocket, sending the specified code and message.""" + if isinstance(message, str): + message = message.encode("utf-8") + try: + await self._send_frame( + PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE + ) + finally: + self._closing = True diff --git a/venv/lib/python3.12/site-packages/aiohttp/http_writer.py b/venv/lib/python3.12/site-packages/aiohttp/http_writer.py new file mode 100644 index 0000000..d6b02e6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/http_writer.py @@ -0,0 +1,198 @@ +"""Http related parsers and protocol.""" + +import asyncio +import zlib +from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa + +from multidict import CIMultiDict + +from .abc import AbstractStreamWriter +from .base_protocol import BaseProtocol +from .compression_utils import ZLibCompressor +from .helpers import NO_EXTENSIONS + +__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") + + +class HttpVersion(NamedTuple): + major: int + minor: int + + +HttpVersion10 = HttpVersion(1, 0) +HttpVersion11 = HttpVersion(1, 1) + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] +_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]] + + +class StreamWriter(AbstractStreamWriter): + def __init__( + self, + protocol: BaseProtocol, + loop: asyncio.AbstractEventLoop, + on_chunk_sent: _T_OnChunkSent = None, + on_headers_sent: _T_OnHeadersSent = None, + ) -> None: + self._protocol = protocol + + self.loop = loop + self.length = None + self.chunked = False + self.buffer_size = 0 + self.output_size = 0 + + self._eof = False + self._compress: Optional[ZLibCompressor] = None + self._drain_waiter = None + + self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent + self._on_headers_sent: _T_OnHeadersSent = on_headers_sent + + @property + def transport(self) -> Optional[asyncio.Transport]: + return self._protocol.transport + + @property + def protocol(self) -> BaseProtocol: + return self._protocol + + def enable_chunking(self) -> None: + self.chunked = True + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + self._compress = ZLibCompressor(encoding=encoding, strategy=strategy) + + def _write(self, chunk: bytes) -> None: + size = len(chunk) + self.buffer_size += size + self.output_size += size + transport = self.transport + if not self._protocol.connected or transport is None or transport.is_closing(): + raise ConnectionResetError("Cannot write to closing transport") + transport.write(chunk) + + async def write( + self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 + ) -> None: + """Writes chunk of data to a stream. + + write_eof() indicates end of stream. + writer can't be used after write_eof() method being called. + write() return drain future. + """ + if self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if isinstance(chunk, memoryview): + if chunk.nbytes != len(chunk): + # just reshape it + chunk = chunk.cast("c") + + if self._compress is not None: + chunk = await self._compress.compress(chunk) + if not chunk: + return + + if self.length is not None: + chunk_len = len(chunk) + if self.length >= chunk_len: + self.length = self.length - chunk_len + else: + chunk = chunk[: self.length] + self.length = 0 + if not chunk: + return + + if chunk: + if self.chunked: + chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len_pre + chunk + b"\r\n" + + self._write(chunk) + + if self.buffer_size > LIMIT and drain: + self.buffer_size = 0 + await self.drain() + + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write request/response status and headers.""" + if self._on_headers_sent is not None: + await self._on_headers_sent(headers) + + # status + headers + buf = _serialize_headers(status_line, headers) + self._write(buf) + + async def write_eof(self, chunk: bytes = b"") -> None: + if self._eof: + return + + if chunk and self._on_chunk_sent is not None: + await self._on_chunk_sent(chunk) + + if self._compress: + if chunk: + chunk = await self._compress.compress(chunk) + + chunk += self._compress.flush() + if chunk and self.chunked: + chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" + else: + if self.chunked: + if chunk: + chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") + chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" + else: + chunk = b"0\r\n\r\n" + + if chunk: + self._write(chunk) + + await self.drain() + + self._eof = True + + async def drain(self) -> None: + """Flush the write buffer. + + The intended use is to write + + await w.write(data) + await w.drain() + """ + if self._protocol.transport is not None: + await self._protocol._drain_helper() + + +def _safe_header(string: str) -> str: + if "\r" in string or "\n" in string: + raise ValueError( + "Newline or carriage return detected in headers. " + "Potential header injection attack." + ) + return string + + +def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes: + headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items()) + line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n" + return line.encode("utf-8") + + +_serialize_headers = _py_serialize_headers + +try: + import aiohttp._http_writer as _http_writer # type: ignore[import-not-found] + + _c_serialize_headers = _http_writer._serialize_headers + if not NO_EXTENSIONS: + _serialize_headers = _c_serialize_headers +except ImportError: + pass diff --git a/venv/lib/python3.12/site-packages/aiohttp/locks.py b/venv/lib/python3.12/site-packages/aiohttp/locks.py new file mode 100644 index 0000000..de2dc83 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/locks.py @@ -0,0 +1,41 @@ +import asyncio +import collections +from typing import Any, Deque, Optional + + +class EventResultOrError: + """Event asyncio lock helper class. + + Wraps the Event asyncio lock allowing either to awake the + locked Tasks without any error or raising an exception. + + thanks to @vorpalsmith for the simple design. + """ + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._exc: Optional[BaseException] = None + self._event = asyncio.Event() + self._waiters: Deque[asyncio.Future[Any]] = collections.deque() + + def set(self, exc: Optional[BaseException] = None) -> None: + self._exc = exc + self._event.set() + + async def wait(self) -> Any: + waiter = self._loop.create_task(self._event.wait()) + self._waiters.append(waiter) + try: + val = await waiter + finally: + self._waiters.remove(waiter) + + if self._exc is not None: + raise self._exc + + return val + + def cancel(self) -> None: + """Cancel all waiters""" + for waiter in self._waiters: + waiter.cancel() diff --git a/venv/lib/python3.12/site-packages/aiohttp/log.py b/venv/lib/python3.12/site-packages/aiohttp/log.py new file mode 100644 index 0000000..3cecea2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/log.py @@ -0,0 +1,8 @@ +import logging + +access_logger = logging.getLogger("aiohttp.access") +client_logger = logging.getLogger("aiohttp.client") +internal_logger = logging.getLogger("aiohttp.internal") +server_logger = logging.getLogger("aiohttp.server") +web_logger = logging.getLogger("aiohttp.web") +ws_logger = logging.getLogger("aiohttp.websocket") diff --git a/venv/lib/python3.12/site-packages/aiohttp/multipart.py b/venv/lib/python3.12/site-packages/aiohttp/multipart.py new file mode 100644 index 0000000..3a87933 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/multipart.py @@ -0,0 +1,969 @@ +import base64 +import binascii +import json +import re +import uuid +import warnings +import zlib +from collections import deque +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Deque, + Dict, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import parse_qsl, unquote, urlencode + +from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping + +from .compression_utils import ZLibCompressor, ZLibDecompressor +from .hdrs import ( + CONTENT_DISPOSITION, + CONTENT_ENCODING, + CONTENT_LENGTH, + CONTENT_TRANSFER_ENCODING, + CONTENT_TYPE, +) +from .helpers import CHAR, TOKEN, parse_mimetype, reify +from .http import HeadersParser +from .payload import ( + JsonPayload, + LookupError, + Order, + Payload, + StringPayload, + get_payload, + payload_type, +) +from .streams import StreamReader + +__all__ = ( + "MultipartReader", + "MultipartWriter", + "BodyPartReader", + "BadContentDispositionHeader", + "BadContentDispositionParam", + "parse_content_disposition", + "content_disposition_filename", +) + + +if TYPE_CHECKING: # pragma: no cover + from .client_reqrep import ClientResponse + + +class BadContentDispositionHeader(RuntimeWarning): + pass + + +class BadContentDispositionParam(RuntimeWarning): + pass + + +def parse_content_disposition( + header: Optional[str], +) -> Tuple[Optional[str], Dict[str, str]]: + def is_token(string: str) -> bool: + return bool(string) and TOKEN >= set(string) + + def is_quoted(string: str) -> bool: + return string[0] == string[-1] == '"' + + def is_rfc5987(string: str) -> bool: + return is_token(string) and string.count("'") == 2 + + def is_extended_param(string: str) -> bool: + return string.endswith("*") + + def is_continuous_param(string: str) -> bool: + pos = string.find("*") + 1 + if not pos: + return False + substring = string[pos:-1] if string.endswith("*") else string[pos:] + return substring.isdigit() + + def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: + return re.sub(f"\\\\([{chars}])", "\\1", text) + + if not header: + return None, {} + + disptype, *parts = header.split(";") + if not is_token(disptype): + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params: Dict[str, str] = {} + while parts: + item = parts.pop(0) + + if "=" not in item: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + key, value = item.split("=", 1) + key = key.lower().strip() + value = value.lstrip() + + if key in params: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + if not is_token(key): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_continuous_param(key): + if is_quoted(value): + value = unescape(value[1:-1]) + elif not is_token(value): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_extended_param(key): + if is_rfc5987(value): + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + else: + warnings.warn(BadContentDispositionParam(item)) + continue + + try: + value = unquote(value, encoding, "strict") + except UnicodeDecodeError: # pragma: nocover + warnings.warn(BadContentDispositionParam(item)) + continue + + else: + failed = True + if is_quoted(value): + failed = False + value = unescape(value[1:-1].lstrip("\\/")) + elif is_token(value): + failed = False + elif parts: + # maybe just ; in filename, in any case this is just + # one case fix, for proper fix we need to redesign parser + _value = f"{value};{parts[0]}" + if is_quoted(_value): + parts.pop(0) + value = unescape(_value[1:-1].lstrip("\\/")) + failed = False + + if failed: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params[key] = value + + return disptype.lower(), params + + +def content_disposition_filename( + params: Mapping[str, str], name: str = "filename" +) -> Optional[str]: + name_suf = "%s*" % name + if not params: + return None + elif name_suf in params: + return params[name_suf] + elif name in params: + return params[name] + else: + parts = [] + fnparams = sorted( + (key, value) for key, value in params.items() if key.startswith(name_suf) + ) + for num, (key, value) in enumerate(fnparams): + _, tail = key.split("*", 1) + if tail.endswith("*"): + tail = tail[:-1] + if tail == str(num): + parts.append(value) + else: + break + if not parts: + return None + value = "".join(parts) + if "'" in value: + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + return unquote(value, encoding, "strict") + return value + + +class MultipartResponseWrapper: + """Wrapper around the MultipartReader. + + It takes care about + underlying connection and close it when it needs in. + """ + + def __init__( + self, + resp: "ClientResponse", + stream: "MultipartReader", + ) -> None: + self.resp = resp + self.stream = stream + + def __aiter__(self) -> "MultipartResponseWrapper": + return self + + async def __anext__( + self, + ) -> Union["MultipartReader", "BodyPartReader"]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + def at_eof(self) -> bool: + """Returns True when all response data had been read.""" + return self.resp.content.at_eof() + + async def next( + self, + ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: + """Emits next multipart reader object.""" + item = await self.stream.next() + if self.stream.at_eof(): + await self.release() + return item + + async def release(self) -> None: + """Release the connection gracefully. + + All remaining content is read to the void. + """ + await self.resp.release() + + +class BodyPartReader: + """Multipart reader for single body part.""" + + chunk_size = 8192 + + def __init__( + self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader + ) -> None: + self.headers = headers + self._boundary = boundary + self._content = content + self._at_eof = False + length = self.headers.get(CONTENT_LENGTH, None) + self._length = int(length) if length is not None else None + self._read_bytes = 0 + self._unread: Deque[bytes] = deque() + self._prev_chunk: Optional[bytes] = None + self._content_eof = 0 + self._cache: Dict[str, Any] = {} + + def __aiter__(self) -> AsyncIterator["BodyPartReader"]: + return self # type: ignore[return-value] + + async def __anext__(self) -> bytes: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + async def next(self) -> Optional[bytes]: + item = await self.read() + if not item: + return None + return item + + async def read(self, *, decode: bool = False) -> bytes: + """Reads body part data. + + decode: Decodes data following by encoding + method from Content-Encoding header. If it missed + data remains untouched + """ + if self._at_eof: + return b"" + data = bytearray() + while not self._at_eof: + data.extend(await self.read_chunk(self.chunk_size)) + if decode: + return self.decode(data) + return data + + async def read_chunk(self, size: int = chunk_size) -> bytes: + """Reads body part content chunk of the specified size. + + size: chunk size + """ + if self._at_eof: + return b"" + if self._length: + chunk = await self._read_chunk_from_length(size) + else: + chunk = await self._read_chunk_from_stream(size) + + self._read_bytes += len(chunk) + if self._read_bytes == self._length: + self._at_eof = True + if self._at_eof: + clrf = await self._content.readline() + assert ( + b"\r\n" == clrf + ), "reader did not read all the data or it is malformed" + return chunk + + async def _read_chunk_from_length(self, size: int) -> bytes: + # Reads body part content chunk of the specified size. + # The body part must has Content-Length header with proper value. + assert self._length is not None, "Content-Length required for chunked read" + chunk_size = min(size, self._length - self._read_bytes) + chunk = await self._content.read(chunk_size) + return chunk + + async def _read_chunk_from_stream(self, size: int) -> bytes: + # Reads content chunk of body part with unknown length. + # The Content-Length header for body part is not necessary. + assert ( + size >= len(self._boundary) + 2 + ), "Chunk size must be greater or equal than boundary length + 2" + first_chunk = self._prev_chunk is None + if first_chunk: + self._prev_chunk = await self._content.read(size) + + chunk = await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + assert self._prev_chunk is not None + window = self._prev_chunk + chunk + sub = b"\r\n" + self._boundary + if first_chunk: + idx = window.find(sub) + else: + idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) + if idx >= 0: + # pushing boundary back to content + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + self._content.unread_data(window[idx:]) + if size > idx: + self._prev_chunk = self._prev_chunk[:idx] + chunk = window[len(self._prev_chunk) : idx] + if not chunk: + self._at_eof = True + result = self._prev_chunk + self._prev_chunk = chunk + return result + + async def readline(self) -> bytes: + """Reads body part by line by line.""" + if self._at_eof: + return b"" + + if self._unread: + line = self._unread.popleft() + else: + line = await self._content.readline() + + if line.startswith(self._boundary): + # the very last boundary may not come with \r\n, + # so set single rules for everyone + sline = line.rstrip(b"\r\n") + boundary = self._boundary + last_boundary = self._boundary + b"--" + # ensure that we read exactly the boundary, not something alike + if sline == boundary or sline == last_boundary: + self._at_eof = True + self._unread.append(line) + return b"" + else: + next_line = await self._content.readline() + if next_line.startswith(self._boundary): + line = line[:-2] # strip CRLF but only once + self._unread.append(next_line) + + return line + + async def release(self) -> None: + """Like read(), but reads all the data to the void.""" + if self._at_eof: + return + while not self._at_eof: + await self.read_chunk(self.chunk_size) + + async def text(self, *, encoding: Optional[str] = None) -> str: + """Like read(), but assumes that body part contains text data.""" + data = await self.read(decode=True) + # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm + # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send + encoding = encoding or self.get_charset(default="utf-8") + return data.decode(encoding) + + async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: + """Like read(), but assumes that body parts contains JSON data.""" + data = await self.read(decode=True) + if not data: + return None + encoding = encoding or self.get_charset(default="utf-8") + return cast(Dict[str, Any], json.loads(data.decode(encoding))) + + async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: + """Like read(), but assumes that body parts contain form urlencoded data.""" + data = await self.read(decode=True) + if not data: + return [] + if encoding is not None: + real_encoding = encoding + else: + real_encoding = self.get_charset(default="utf-8") + try: + decoded_data = data.rstrip().decode(real_encoding) + except UnicodeDecodeError: + raise ValueError("data cannot be decoded with %s encoding" % real_encoding) + + return parse_qsl( + decoded_data, + keep_blank_values=True, + encoding=real_encoding, + ) + + def at_eof(self) -> bool: + """Returns True if the boundary was reached or False otherwise.""" + return self._at_eof + + def decode(self, data: bytes) -> bytes: + """Decodes data. + + Decoding is done according the specified Content-Encoding + or Content-Transfer-Encoding headers value. + """ + if CONTENT_TRANSFER_ENCODING in self.headers: + data = self._decode_content_transfer(data) + if CONTENT_ENCODING in self.headers: + return self._decode_content(data) + return data + + def _decode_content(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_ENCODING, "").lower() + if encoding == "identity": + return data + if encoding in {"deflate", "gzip"}: + return ZLibDecompressor( + encoding=encoding, + suppress_deflate_header=True, + ).decompress_sync(data) + + raise RuntimeError(f"unknown content encoding: {encoding}") + + def _decode_content_transfer(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + + if encoding == "base64": + return base64.b64decode(data) + elif encoding == "quoted-printable": + return binascii.a2b_qp(data) + elif encoding in ("binary", "8bit", "7bit"): + return data + else: + raise RuntimeError( + "unknown content transfer encoding: {}" "".format(encoding) + ) + + def get_charset(self, default: str) -> str: + """Returns charset parameter from Content-Type header or default.""" + ctype = self.headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + return mimetype.parameters.get("charset", default) + + @reify + def name(self) -> Optional[str]: + """Returns name specified in Content-Disposition header. + + If the header is missing or malformed, returns None. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "name") + + @reify + def filename(self) -> Optional[str]: + """Returns filename specified in Content-Disposition header. + + Returns None if the header is missing or malformed. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "filename") + + +@payload_type(BodyPartReader, order=Order.try_first) +class BodyPartReaderPayload(Payload): + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value, *args, **kwargs) + + params: Dict[str, str] = {} + if value.name is not None: + params["name"] = value.name + if value.filename is not None: + params["filename"] = value.filename + + if params: + self.set_content_disposition("attachment", True, **params) + + async def write(self, writer: Any) -> None: + field = self._value + chunk = await field.read_chunk(size=2**16) + while chunk: + await writer.write(field.decode(chunk)) + chunk = await field.read_chunk(size=2**16) + + +class MultipartReader: + """Multipart body reader.""" + + #: Response wrapper, used when multipart readers constructs from response. + response_wrapper_cls = MultipartResponseWrapper + #: Multipart reader class, used to handle multipart/* body parts. + #: None points to type(self) + multipart_reader_cls = None + #: Body part reader class for non multipart/* content types. + part_reader_cls = BodyPartReader + + def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self.headers = headers + self._boundary = ("--" + self._get_boundary()).encode() + self._content = content + self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None + self._at_eof = False + self._at_bof = True + self._unread: List[bytes] = [] + + def __aiter__( + self, + ) -> AsyncIterator["BodyPartReader"]: + return self # type: ignore[return-value] + + async def __anext__( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + @classmethod + def from_response( + cls, + response: "ClientResponse", + ) -> MultipartResponseWrapper: + """Constructs reader instance from HTTP response. + + :param response: :class:`~aiohttp.client.ClientResponse` instance + """ + obj = cls.response_wrapper_cls( + response, cls(response.headers, response.content) + ) + return obj + + def at_eof(self) -> bool: + """Returns True if the final boundary was reached, false otherwise.""" + return self._at_eof + + async def next( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + """Emits the next multipart body part.""" + # So, if we're at BOF, we need to skip till the boundary. + if self._at_eof: + return None + await self._maybe_release_last_part() + if self._at_bof: + await self._read_until_first_boundary() + self._at_bof = False + else: + await self._read_boundary() + if self._at_eof: # we just read the last boundary, nothing to do there + return None + self._last_part = await self.fetch_next_part() + return self._last_part + + async def release(self) -> None: + """Reads all the body parts to the void till the final boundary.""" + while not self._at_eof: + item = await self.next() + if item is None: + break + await item.release() + + async def fetch_next_part( + self, + ) -> Union["MultipartReader", BodyPartReader]: + """Returns the next body part reader.""" + headers = await self._read_headers() + return self._get_part_reader(headers) + + def _get_part_reader( + self, + headers: "CIMultiDictProxy[str]", + ) -> Union["MultipartReader", BodyPartReader]: + """Dispatches the response by the `Content-Type` header. + + Returns a suitable reader instance. + + :param dict headers: Response headers + """ + ctype = headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + + if mimetype.type == "multipart": + if self.multipart_reader_cls is None: + return type(self)(headers, self._content) + return self.multipart_reader_cls(headers, self._content) + else: + return self.part_reader_cls(self._boundary, headers, self._content) + + def _get_boundary(self) -> str: + mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) + + assert mimetype.type == "multipart", "multipart/* content type expected" + + if "boundary" not in mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] + ) + + boundary = mimetype.parameters["boundary"] + if len(boundary) > 70: + raise ValueError("boundary %r is too long (70 chars max)" % boundary) + + return boundary + + async def _readline(self) -> bytes: + if self._unread: + return self._unread.pop() + return await self._content.readline() + + async def _read_until_first_boundary(self) -> None: + while True: + chunk = await self._readline() + if chunk == b"": + raise ValueError( + "Could not find starting boundary %r" % (self._boundary) + ) + chunk = chunk.rstrip() + if chunk == self._boundary: + return + elif chunk == self._boundary + b"--": + self._at_eof = True + return + + async def _read_boundary(self) -> None: + chunk = (await self._readline()).rstrip() + if chunk == self._boundary: + pass + elif chunk == self._boundary + b"--": + self._at_eof = True + epilogue = await self._readline() + next_line = await self._readline() + + # the epilogue is expected and then either the end of input or the + # parent multipart boundary, if the parent boundary is found then + # it should be marked as unread and handed to the parent for + # processing + if next_line[:2] == b"--": + self._unread.append(next_line) + # otherwise the request is likely missing an epilogue and both + # lines should be passed to the parent for processing + # (this handles the old behavior gracefully) + else: + self._unread.extend([next_line, epilogue]) + else: + raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") + + async def _read_headers(self) -> "CIMultiDictProxy[str]": + lines = [b""] + while True: + chunk = await self._content.readline() + chunk = chunk.strip() + lines.append(chunk) + if not chunk: + break + parser = HeadersParser() + headers, raw_headers = parser.parse_headers(lines) + return headers + + async def _maybe_release_last_part(self) -> None: + """Ensures that the last read body part is read completely.""" + if self._last_part is not None: + if not self._last_part.at_eof(): + await self._last_part.release() + self._unread.extend(self._last_part._unread) + self._last_part = None + + +_Part = Tuple[Payload, str, str] + + +class MultipartWriter(Payload): + """Multipart body writer.""" + + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: + boundary = boundary if boundary is not None else uuid.uuid4().hex + # The underlying Payload API demands a str (utf-8), not bytes, + # so we need to ensure we don't lose anything during conversion. + # As a result, require the boundary to be ASCII only. + # In both situations. + + try: + self._boundary = boundary.encode("ascii") + except UnicodeEncodeError: + raise ValueError("boundary should contain ASCII only chars") from None + ctype = f"multipart/{subtype}; boundary={self._boundary_value}" + + super().__init__(None, content_type=ctype) + + self._parts: List[_Part] = [] + + def __enter__(self) -> "MultipartWriter": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + pass + + def __iter__(self) -> Iterator[_Part]: + return iter(self._parts) + + def __len__(self) -> int: + return len(self._parts) + + def __bool__(self) -> bool: + return True + + _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z") + _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]") + + @property + def _boundary_value(self) -> str: + """Wrap boundary parameter value in quotes, if necessary. + + Reads self.boundary and returns a unicode string. + """ + # Refer to RFCs 7231, 7230, 5234. + # + # parameter = token "=" ( token / quoted-string ) + # token = 1*tchar + # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + # obs-text = %x80-FF + # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + # / DIGIT / ALPHA + # ; any VCHAR, except delimiters + # VCHAR = %x21-7E + value = self._boundary + if re.match(self._valid_tchar_regex, value): + return value.decode("ascii") # cannot fail + + if re.search(self._invalid_qdtext_char_regex, value): + raise ValueError("boundary value contains invalid characters") + + # escape %x5C and %x22 + quoted_value_content = value.replace(b"\\", b"\\\\") + quoted_value_content = quoted_value_content.replace(b'"', b'\\"') + + return '"' + quoted_value_content.decode("ascii") + '"' + + @property + def boundary(self) -> str: + return self._boundary.decode("ascii") + + def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Payload): + obj.headers.update(headers) + return self.append_payload(obj) + else: + try: + payload = get_payload(obj, headers=headers) + except LookupError: + raise TypeError("Cannot create payload from %r" % obj) + else: + return self.append_payload(payload) + + def append_payload(self, payload: Payload) -> Payload: + """Adds a new body part to multipart writer.""" + # compression + encoding: Optional[str] = payload.headers.get( + CONTENT_ENCODING, + "", + ).lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding: Optional[str] = payload.headers.get( + CONTENT_TRANSFER_ENCODING, + "", + ).lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError( + "unknown content transfer encoding: {}" "".format(te_encoding) + ) + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) + + self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] + return payload + + def append_json( + self, obj: Any, headers: Optional[MultiMapping[str]] = None + ) -> Payload: + """Helper to append JSON part.""" + if headers is None: + headers = CIMultiDict() + + return self.append_payload(JsonPayload(obj, headers=headers)) + + def append_form( + self, + obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], + headers: Optional[MultiMapping[str]] = None, + ) -> Payload: + """Helper to append form urlencoded part.""" + assert isinstance(obj, (Sequence, Mapping)) + + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Mapping): + obj = list(obj.items()) + data = urlencode(obj, doseq=True) + + return self.append_payload( + StringPayload( + data, headers=headers, content_type="application/x-www-form-urlencoded" + ) + ) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + total = 0 + for part, encoding, te_encoding in self._parts: + if encoding or te_encoding or part.size is None: + return None + + total += int( + 2 + + len(self._boundary) + + 2 + + part.size # b'--'+self._boundary+b'\r\n' + + len(part._binary_headers) + + 2 # b'\r\n' + ) + + total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' + return total + + async def write(self, writer: Any, close_boundary: bool = True) -> None: + """Write body.""" + for part, encoding, te_encoding in self._parts: + await writer.write(b"--" + self._boundary + b"\r\n") + await writer.write(part._binary_headers) + + if encoding or te_encoding: + w = MultipartPayloadWriter(writer) + if encoding: + w.enable_compression(encoding) + if te_encoding: + w.enable_encoding(te_encoding) + await part.write(w) # type: ignore[arg-type] + await w.write_eof() + else: + await part.write(writer) + + await writer.write(b"\r\n") + + if close_boundary: + await writer.write(b"--" + self._boundary + b"--\r\n") + + +class MultipartPayloadWriter: + def __init__(self, writer: Any) -> None: + self._writer = writer + self._encoding: Optional[str] = None + self._compress: Optional[ZLibCompressor] = None + self._encoding_buffer: Optional[bytearray] = None + + def enable_encoding(self, encoding: str) -> None: + if encoding == "base64": + self._encoding = encoding + self._encoding_buffer = bytearray() + elif encoding == "quoted-printable": + self._encoding = "quoted-printable" + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + self._compress = ZLibCompressor( + encoding=encoding, + suppress_deflate_header=True, + strategy=strategy, + ) + + async def write_eof(self) -> None: + if self._compress is not None: + chunk = self._compress.flush() + if chunk: + self._compress = None + await self.write(chunk) + + if self._encoding == "base64": + if self._encoding_buffer: + await self._writer.write(base64.b64encode(self._encoding_buffer)) + + async def write(self, chunk: bytes) -> None: + if self._compress is not None: + if chunk: + chunk = await self._compress.compress(chunk) + if not chunk: + return + + if self._encoding == "base64": + buf = self._encoding_buffer + assert buf is not None + buf.extend(chunk) + + if buf: + div, mod = divmod(len(buf), 3) + enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :]) + if enc_chunk: + b64chunk = base64.b64encode(enc_chunk) + await self._writer.write(b64chunk) + elif self._encoding == "quoted-printable": + await self._writer.write(binascii.b2a_qp(chunk)) + else: + await self._writer.write(chunk) diff --git a/venv/lib/python3.12/site-packages/aiohttp/payload.py b/venv/lib/python3.12/site-packages/aiohttp/payload.py new file mode 100644 index 0000000..ba85669 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/payload.py @@ -0,0 +1,463 @@ +import asyncio +import enum +import io +import json +import mimetypes +import os +import warnings +from abc import ABC, abstractmethod +from itertools import chain +from typing import ( + IO, + TYPE_CHECKING, + Any, + ByteString, + Dict, + Final, + Iterable, + Optional, + TextIO, + Tuple, + Type, + Union, +) + +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + _SENTINEL, + content_disposition_header, + guess_filename, + parse_mimetype, + sentinel, +) +from .streams import StreamReader +from .typedefs import JSONEncoder, _CIMultiDict + +__all__ = ( + "PAYLOAD_REGISTRY", + "get_payload", + "payload_type", + "Payload", + "BytesPayload", + "StringPayload", + "IOBasePayload", + "BytesIOPayload", + "BufferedReaderPayload", + "TextIOPayload", + "StringIOPayload", + "JsonPayload", + "AsyncIterablePayload", +) + +TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB + +if TYPE_CHECKING: # pragma: no cover + from typing import List + + +class LookupError(Exception): + pass + + +class Order(str, enum.Enum): + normal = "normal" + try_first = "try_first" + try_last = "try_last" + + +def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": + return PAYLOAD_REGISTRY.get(data, *args, **kwargs) + + +def register_payload( + factory: Type["Payload"], type: Any, *, order: Order = Order.normal +) -> None: + PAYLOAD_REGISTRY.register(factory, type, order=order) + + +class payload_type: + def __init__(self, type: Any, *, order: Order = Order.normal) -> None: + self.type = type + self.order = order + + def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: + register_payload(factory, self.type, order=self.order) + return factory + + +PayloadType = Type["Payload"] +_PayloadRegistryItem = Tuple[PayloadType, Any] + + +class PayloadRegistry: + """Payload registry. + + note: we need zope.interface for more efficient adapter search + """ + + def __init__(self) -> None: + self._first: List[_PayloadRegistryItem] = [] + self._normal: List[_PayloadRegistryItem] = [] + self._last: List[_PayloadRegistryItem] = [] + + def get( + self, + data: Any, + *args: Any, + _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, + **kwargs: Any, + ) -> "Payload": + if isinstance(data, Payload): + return data + for factory, type in _CHAIN(self._first, self._normal, self._last): + if isinstance(data, type): + return factory(data, *args, **kwargs) + + raise LookupError() + + def register( + self, factory: PayloadType, type: Any, *, order: Order = Order.normal + ) -> None: + if order is Order.try_first: + self._first.append((factory, type)) + elif order is Order.normal: + self._normal.append((factory, type)) + elif order is Order.try_last: + self._last.append((factory, type)) + else: + raise ValueError(f"Unsupported order {order!r}") + + +class Payload(ABC): + + _default_content_type: str = "application/octet-stream" + _size: Optional[int] = None + + def __init__( + self, + value: Any, + headers: Optional[ + Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] + ] = None, + content_type: Union[str, None, _SENTINEL] = sentinel, + filename: Optional[str] = None, + encoding: Optional[str] = None, + **kwargs: Any, + ) -> None: + self._encoding = encoding + self._filename = filename + self._headers: _CIMultiDict = CIMultiDict() + self._value = value + if content_type is not sentinel and content_type is not None: + self._headers[hdrs.CONTENT_TYPE] = content_type + elif self._filename is not None: + content_type = mimetypes.guess_type(self._filename)[0] + if content_type is None: + content_type = self._default_content_type + self._headers[hdrs.CONTENT_TYPE] = content_type + else: + self._headers[hdrs.CONTENT_TYPE] = self._default_content_type + self._headers.update(headers or {}) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + return self._size + + @property + def filename(self) -> Optional[str]: + """Filename of the payload.""" + return self._filename + + @property + def headers(self) -> _CIMultiDict: + """Custom item headers""" + return self._headers + + @property + def _binary_headers(self) -> bytes: + return ( + "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode( + "utf-8" + ) + + b"\r\n" + ) + + @property + def encoding(self) -> Optional[str]: + """Payload encoding""" + return self._encoding + + @property + def content_type(self) -> str: + """Content type""" + return self._headers[hdrs.CONTENT_TYPE] + + def set_content_disposition( + self, + disptype: str, + quote_fields: bool = True, + _charset: str = "utf-8", + **params: Any, + ) -> None: + """Sets ``Content-Disposition`` header.""" + self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( + disptype, quote_fields=quote_fields, _charset=_charset, **params + ) + + @abstractmethod + async def write(self, writer: AbstractStreamWriter) -> None: + """Write payload. + + writer is an AbstractStreamWriter instance: + """ + + +class BytesPayload(Payload): + def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None: + if not isinstance(value, (bytes, bytearray, memoryview)): + raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + if isinstance(value, memoryview): + self._size = value.nbytes + else: + self._size = len(value) + + if self._size > TOO_LARGE_BYTES_BODY: + kwargs = {"source": self} + warnings.warn( + "Sending a large body directly with raw bytes might" + " lock the event loop. You should probably pass an " + "io.BytesIO object instead", + ResourceWarning, + **kwargs, + ) + + async def write(self, writer: AbstractStreamWriter) -> None: + await writer.write(self._value) + + +class StringPayload(BytesPayload): + def __init__( + self, + value: str, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + real_encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + real_encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + real_encoding = encoding + + super().__init__( + value.encode(real_encoding), + encoding=real_encoding, + content_type=content_type, + *args, + **kwargs, + ) + + +class StringIOPayload(StringPayload): + def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: + super().__init__(value.read(), *args, **kwargs) + + +class IOBasePayload(Payload): + _value: IO[Any] + + def __init__( + self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any + ) -> None: + if "filename" not in kwargs: + kwargs["filename"] = guess_filename(value) + + super().__init__(value, *args, **kwargs) + + if self._filename is not None and disposition is not None: + if hdrs.CONTENT_DISPOSITION not in self.headers: + self.set_content_disposition(disposition, filename=self._filename) + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + await writer.write(chunk) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class TextIOPayload(IOBasePayload): + _value: TextIO + + def __init__( + self, + value: TextIO, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + + super().__init__( + value, + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + return None + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + data = ( + chunk.encode(encoding=self._encoding) + if self._encoding + else chunk.encode() + ) + await writer.write(data) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class BytesIOPayload(IOBasePayload): + @property + def size(self) -> int: + position = self._value.tell() + end = self._value.seek(0, os.SEEK_END) + self._value.seek(position) + return end - position + + +class BufferedReaderPayload(IOBasePayload): + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + # data.fileno() is not supported, e.g. + # io.BufferedReader(io.BytesIO(b'data')) + return None + + +class JsonPayload(BytesPayload): + def __init__( + self, + value: Any, + encoding: str = "utf-8", + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, + *args: Any, + **kwargs: Any, + ) -> None: + + super().__init__( + dumps(value).encode(encoding), + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + +if TYPE_CHECKING: # pragma: no cover + from typing import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator[bytes] + _AsyncIterable = AsyncIterable[bytes] +else: + from collections.abc import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator + _AsyncIterable = AsyncIterable + + +class AsyncIterablePayload(Payload): + + _iter: Optional[_AsyncIterator] = None + + def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: + if not isinstance(value, AsyncIterable): + raise TypeError( + "value argument must support " + "collections.abc.AsyncIterable interface, " + "got {!r}".format(type(value)) + ) + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + self._iter = value.__aiter__() + + async def write(self, writer: AbstractStreamWriter) -> None: + if self._iter: + try: + # iter is not None check prevents rare cases + # when the case iterable is used twice + while True: + chunk = await self._iter.__anext__() + await writer.write(chunk) + except StopAsyncIteration: + self._iter = None + + +class StreamReaderPayload(AsyncIterablePayload): + def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value.iter_any(), *args, **kwargs) + + +PAYLOAD_REGISTRY = PayloadRegistry() +PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) +PAYLOAD_REGISTRY.register(StringPayload, str) +PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) +PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) +PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) +PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) +PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) +PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) +# try_last for giving a chance to more specialized async interables like +# multidict.BodyPartReaderPayload override the default +PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/venv/lib/python3.12/site-packages/aiohttp/payload_streamer.py b/venv/lib/python3.12/site-packages/aiohttp/payload_streamer.py new file mode 100644 index 0000000..364f763 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/payload_streamer.py @@ -0,0 +1,75 @@ +""" +Payload implementation for coroutines as data provider. + +As a simple case, you can upload data from file:: + + @aiohttp.streamer + async def file_sender(writer, file_name=None): + with open(file_name, 'rb') as f: + chunk = f.read(2**16) + while chunk: + await writer.write(chunk) + + chunk = f.read(2**16) + +Then you can use `file_sender` like this: + + async with session.post('http://httpbin.org/post', + data=file_sender(file_name='huge_file')) as resp: + print(await resp.text()) + +..note:: Coroutine must accept `writer` as first argument + +""" + +import types +import warnings +from typing import Any, Awaitable, Callable, Dict, Tuple + +from .abc import AbstractStreamWriter +from .payload import Payload, payload_type + +__all__ = ("streamer",) + + +class _stream_wrapper: + def __init__( + self, + coro: Callable[..., Awaitable[None]], + args: Tuple[Any, ...], + kwargs: Dict[str, Any], + ) -> None: + self.coro = types.coroutine(coro) + self.args = args + self.kwargs = kwargs + + async def __call__(self, writer: AbstractStreamWriter) -> None: + await self.coro(writer, *self.args, **self.kwargs) + + +class streamer: + def __init__(self, coro: Callable[..., Awaitable[None]]) -> None: + warnings.warn( + "@streamer is deprecated, use async generators instead", + DeprecationWarning, + stacklevel=2, + ) + self.coro = coro + + def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper: + return _stream_wrapper(self.coro, args, kwargs) + + +@payload_type(_stream_wrapper) +class StreamWrapperPayload(Payload): + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) + + +@payload_type(streamer) +class StreamPayload(StreamWrapperPayload): + def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None: + super().__init__(value(), *args, **kwargs) + + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) diff --git a/venv/lib/python3.12/site-packages/aiohttp/py.typed b/venv/lib/python3.12/site-packages/aiohttp/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/py.typed @@ -0,0 +1 @@ +Marker diff --git a/venv/lib/python3.12/site-packages/aiohttp/pytest_plugin.py b/venv/lib/python3.12/site-packages/aiohttp/pytest_plugin.py new file mode 100644 index 0000000..5754747 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/pytest_plugin.py @@ -0,0 +1,381 @@ +import asyncio +import contextlib +import warnings +from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union + +import pytest + +from aiohttp.helpers import isasyncgenfunction +from aiohttp.web import Application + +from .test_utils import ( + BaseTestServer, + RawTestServer, + TestClient, + TestServer, + loop_context, + setup_test_loop, + teardown_test_loop, + unused_port as _unused_port, +) + +try: + import uvloop +except ImportError: # pragma: no cover + uvloop = None # type: ignore[assignment] + +AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]] +AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]] +AiohttpServer = Callable[[Application], Awaitable[TestServer]] + + +def pytest_addoption(parser): # type: ignore[no-untyped-def] + parser.addoption( + "--aiohttp-fast", + action="store_true", + default=False, + help="run tests faster by disabling extra checks", + ) + parser.addoption( + "--aiohttp-loop", + action="store", + default="pyloop", + help="run tests with specific loop: pyloop, uvloop or all", + ) + parser.addoption( + "--aiohttp-enable-loop-debug", + action="store_true", + default=False, + help="enable event loop debug mode", + ) + + +def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] + """Set up pytest fixture. + + Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. + """ + func = fixturedef.func + + if isasyncgenfunction(func): + # async generator fixture + is_async_gen = True + elif asyncio.iscoroutinefunction(func): + # regular async fixture + is_async_gen = False + else: + # not an async fixture, nothing to do + return + + strip_request = False + if "request" not in fixturedef.argnames: + fixturedef.argnames += ("request",) + strip_request = True + + def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] + request = kwargs["request"] + if strip_request: + del kwargs["request"] + + # if neither the fixture nor the test use the 'loop' fixture, + # 'getfixturevalue' will fail because the test is not parameterized + # (this can be removed someday if 'loop' is no longer parameterized) + if "loop" not in request.fixturenames: + raise Exception( + "Asynchronous fixtures must depend on the 'loop' fixture or " + "be used in tests depending from it." + ) + + _loop = request.getfixturevalue("loop") + + if is_async_gen: + # for async generators, we need to advance the generator once, + # then advance it again in a finalizer + gen = func(*args, **kwargs) + + def finalizer(): # type: ignore[no-untyped-def] + try: + return _loop.run_until_complete(gen.__anext__()) + except StopAsyncIteration: + pass + + request.addfinalizer(finalizer) + return _loop.run_until_complete(gen.__anext__()) + else: + return _loop.run_until_complete(func(*args, **kwargs)) + + fixturedef.func = wrapper + + +@pytest.fixture +def fast(request): # type: ignore[no-untyped-def] + """--fast config option""" + return request.config.getoption("--aiohttp-fast") + + +@pytest.fixture +def loop_debug(request): # type: ignore[no-untyped-def] + """--enable-loop-debug config option""" + return request.config.getoption("--aiohttp-enable-loop-debug") + + +@contextlib.contextmanager +def _runtime_warning_context(): # type: ignore[no-untyped-def] + """Context manager which checks for RuntimeWarnings. + + This exists specifically to + avoid "coroutine 'X' was never awaited" warnings being missed. + + If RuntimeWarnings occur in the context a RuntimeError is raised. + """ + with warnings.catch_warnings(record=True) as _warnings: + yield + rw = [ + "{w.filename}:{w.lineno}:{w.message}".format(w=w) + for w in _warnings + if w.category == RuntimeWarning + ] + if rw: + raise RuntimeError( + "{} Runtime Warning{},\n{}".format( + len(rw), "" if len(rw) == 1 else "s", "\n".join(rw) + ) + ) + + +@contextlib.contextmanager +def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] + """Passthrough loop context. + + Sets up and tears down a loop unless one is passed in via the loop + argument when it's passed straight through. + """ + if loop: + # loop already exists, pass it straight through + yield loop + else: + # this shadows loop_context's standard behavior + loop = setup_test_loop() + yield loop + teardown_test_loop(loop, fast=fast) + + +def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] + """Fix pytest collecting for coroutines.""" + if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): + return list(collector._genfunctions(name, obj)) + + +def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] + """Run coroutines in an event loop instead of a normal function call.""" + fast = pyfuncitem.config.getoption("--aiohttp-fast") + if asyncio.iscoroutinefunction(pyfuncitem.function): + existing_loop = pyfuncitem.funcargs.get( + "proactor_loop" + ) or pyfuncitem.funcargs.get("loop", None) + with _runtime_warning_context(): + with _passthrough_loop_context(existing_loop, fast=fast) as _loop: + testargs = { + arg: pyfuncitem.funcargs[arg] + for arg in pyfuncitem._fixtureinfo.argnames + } + _loop.run_until_complete(pyfuncitem.obj(**testargs)) + + return True + + +def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] + if "loop_factory" not in metafunc.fixturenames: + return + + loops = metafunc.config.option.aiohttp_loop + avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]] + avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy} + + if uvloop is not None: # pragma: no cover + avail_factories["uvloop"] = uvloop.EventLoopPolicy + + if loops == "all": + loops = "pyloop,uvloop?" + + factories = {} # type: ignore[var-annotated] + for name in loops.split(","): + required = not name.endswith("?") + name = name.strip(" ?") + if name not in avail_factories: # pragma: no cover + if required: + raise ValueError( + "Unknown loop '%s', available loops: %s" + % (name, list(factories.keys())) + ) + else: + continue + factories[name] = avail_factories[name] + metafunc.parametrize( + "loop_factory", list(factories.values()), ids=list(factories.keys()) + ) + + +@pytest.fixture +def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def] + """Return an instance of the event loop.""" + policy = loop_factory() + asyncio.set_event_loop_policy(policy) + with loop_context(fast=fast) as _loop: + if loop_debug: + _loop.set_debug(True) # pragma: no cover + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def proactor_loop(): # type: ignore[no-untyped-def] + policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined] + asyncio.set_event_loop_policy(policy) + + with loop_context(policy.new_event_loop) as _loop: + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]: + warnings.warn( + "Deprecated, use aiohttp_unused_port fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_unused_port + + +@pytest.fixture +def aiohttp_unused_port() -> Callable[[], int]: + """Return a port that is unused on the current host.""" + return _unused_port + + +@pytest.fixture +def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: + """Factory to create a TestServer instance, given an app. + + aiohttp_server(app, **kwargs) + """ + servers = [] + + async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def] + server = TestServer(app, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize() -> None: + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_server fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_server + + +@pytest.fixture +def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]: + """Factory to create a RawTestServer instance, given a web handler. + + aiohttp_raw_server(handler, **kwargs) + """ + servers = [] + + async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def] + server = RawTestServer(handler, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize() -> None: + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover + aiohttp_raw_server, +): + warnings.warn( + "Deprecated, use aiohttp_raw_server fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_raw_server + + +@pytest.fixture +def aiohttp_client( + loop: asyncio.AbstractEventLoop, +) -> Iterator[AiohttpClient]: + """Factory to create a TestClient instance. + + aiohttp_client(app, **kwargs) + aiohttp_client(server, **kwargs) + aiohttp_client(raw_server, **kwargs) + """ + clients = [] + + async def go( + __param: Union[Application, BaseTestServer], + *args: Any, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any + ) -> TestClient: + + if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] + __param, (Application, BaseTestServer) + ): + __param = __param(loop, *args, **kwargs) + kwargs = {} + else: + assert not args, "args should be empty" + + if isinstance(__param, Application): + server_kwargs = server_kwargs or {} + server = TestServer(__param, loop=loop, **server_kwargs) + client = TestClient(server, loop=loop, **kwargs) + elif isinstance(__param, BaseTestServer): + client = TestClient(__param, loop=loop, **kwargs) + else: + raise ValueError("Unknown argument type: %r" % type(__param)) + + await client.start_server() + clients.append(client) + return client + + yield go + + async def finalize() -> None: + while clients: + await clients.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_client fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_client diff --git a/venv/lib/python3.12/site-packages/aiohttp/resolver.py b/venv/lib/python3.12/site-packages/aiohttp/resolver.py new file mode 100644 index 0000000..6c17b1e --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/resolver.py @@ -0,0 +1,160 @@ +import asyncio +import socket +from typing import Any, Dict, List, Optional, Type, Union + +from .abc import AbstractResolver +from .helpers import get_running_loop + +__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") + +try: + import aiodns + + # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') +except ImportError: # pragma: no cover + aiodns = None + +aiodns_default = False + + +class ThreadedResolver(AbstractResolver): + """Threaded resolver. + + Uses an Executor for synchronous getaddrinfo() calls. + concurrent.futures.ThreadPoolExecutor is used by default. + """ + + def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._loop = get_running_loop(loop) + + async def resolve( + self, hostname: str, port: int = 0, family: int = socket.AF_INET + ) -> List[Dict[str, Any]]: + infos = await self._loop.getaddrinfo( + hostname, + port, + type=socket.SOCK_STREAM, + family=family, + flags=socket.AI_ADDRCONFIG, + ) + + hosts = [] + for family, _, proto, _, address in infos: + if family == socket.AF_INET6: + if len(address) < 3: + # IPv6 is not supported by Python build, + # or IPv6 is not enabled in the host + continue + if address[3]: + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + host, _port = socket.getnameinfo( + address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV + ) + port = int(_port) + else: + host, port = address[:2] + else: # IPv4 + assert family == socket.AF_INET + host, port = address # type: ignore[misc] + hosts.append( + { + "hostname": hostname, + "host": host, + "port": port, + "family": family, + "proto": proto, + "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, + } + ) + + return hosts + + async def close(self) -> None: + pass + + +class AsyncResolver(AbstractResolver): + """Use the `aiodns` package to make asynchronous DNS lookups""" + + def __init__( + self, + loop: Optional[asyncio.AbstractEventLoop] = None, + *args: Any, + **kwargs: Any + ) -> None: + if aiodns is None: + raise RuntimeError("Resolver requires aiodns library") + + self._loop = get_running_loop(loop) + self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) + + if not hasattr(self._resolver, "gethostbyname"): + # aiodns 1.1 is not available, fallback to DNSResolver.query + self.resolve = self._resolve_with_query # type: ignore + + async def resolve( + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[Dict[str, Any]]: + try: + resp = await self._resolver.gethostbyname(host, family) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + hosts = [] + for address in resp.addresses: + hosts.append( + { + "hostname": host, + "host": address, + "port": port, + "family": family, + "proto": 0, + "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, + } + ) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def _resolve_with_query( + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[Dict[str, Any]]: + if family == socket.AF_INET6: + qtype = "AAAA" + else: + qtype = "A" + + try: + resp = await self._resolver.query(host, qtype) + except aiodns.error.DNSError as exc: + msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" + raise OSError(msg) from exc + + hosts = [] + for rr in resp: + hosts.append( + { + "hostname": host, + "host": rr.host, + "port": port, + "family": family, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ) + + if not hosts: + raise OSError("DNS lookup failed") + + return hosts + + async def close(self) -> None: + self._resolver.cancel() + + +_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]] +DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/venv/lib/python3.12/site-packages/aiohttp/streams.py b/venv/lib/python3.12/site-packages/aiohttp/streams.py new file mode 100644 index 0000000..3e4c355 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/streams.py @@ -0,0 +1,666 @@ +import asyncio +import collections +import warnings +from typing import ( + Awaitable, + Callable, + Deque, + Final, + Generic, + List, + Optional, + Tuple, + TypeVar, +) + +from .base_protocol import BaseProtocol +from .helpers import BaseTimerContext, TimerNoop, set_exception, set_result +from .log import internal_logger + +__all__ = ( + "EMPTY_PAYLOAD", + "EofStream", + "StreamReader", + "DataQueue", + "FlowControlDataQueue", +) + +_T = TypeVar("_T") + + +class EofStream(Exception): + """eof stream indication.""" + + +class AsyncStreamIterator(Generic[_T]): + def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: + self.read_func = read_func + + def __aiter__(self) -> "AsyncStreamIterator[_T]": + return self + + async def __anext__(self) -> _T: + try: + rv = await self.read_func() + except EofStream: + raise StopAsyncIteration + if rv == b"": + raise StopAsyncIteration + return rv + + +class ChunkTupleAsyncStreamIterator: + def __init__(self, stream: "StreamReader") -> None: + self._stream = stream + + def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": + return self + + async def __anext__(self) -> Tuple[bytes, bool]: + rv = await self._stream.readchunk() + if rv == (b"", False): + raise StopAsyncIteration + return rv + + +class AsyncStreamReaderMixin: + def __aiter__(self) -> AsyncStreamIterator[bytes]: + return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] + + def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: + """Returns an asynchronous iterator that yields chunks of size n.""" + return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined] + + def iter_any(self) -> AsyncStreamIterator[bytes]: + """Yield all available data as soon as it is received.""" + return AsyncStreamIterator(self.readany) # type: ignore[attr-defined] + + def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: + """Yield chunks of data as they are received by the server. + + The yielded objects are tuples + of (bytes, bool) as returned by the StreamReader.readchunk method. + """ + return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type] + + +class StreamReader(AsyncStreamReaderMixin): + """An enhancement of asyncio.StreamReader. + + Supports asynchronous iteration by line, chunk or as available:: + + async for line in reader: + ... + async for chunk in reader.iter_chunked(1024): + ... + async for slice in reader.iter_any(): + ... + + """ + + total_bytes = 0 + + def __init__( + self, + protocol: BaseProtocol, + limit: int, + *, + timer: Optional[BaseTimerContext] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + self._protocol = protocol + self._low_water = limit + self._high_water = limit * 2 + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._size = 0 + self._cursor = 0 + self._http_chunk_splits: Optional[List[int]] = None + self._buffer: Deque[bytes] = collections.deque() + self._buffer_offset = 0 + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._eof_waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._timer = TimerNoop() if timer is None else timer + self._eof_callbacks: List[Callable[[], None]] = [] + + def __repr__(self) -> str: + info = [self.__class__.__name__] + if self._size: + info.append("%d bytes" % self._size) + if self._eof: + info.append("eof") + if self._low_water != 2**16: # default limit + info.append("low=%d high=%d" % (self._low_water, self._high_water)) + if self._waiter: + info.append("w=%r" % self._waiter) + if self._exception: + info.append("e=%r" % self._exception) + return "<%s>" % " ".join(info) + + def get_read_buffer_limits(self) -> Tuple[int, int]: + return (self._low_water, self._high_water) + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception(self, exc: BaseException) -> None: + self._exception = exc + self._eof_callbacks.clear() + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_exception(waiter, exc) + + def on_eof(self, callback: Callable[[], None]) -> None: + if self._eof: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + else: + self._eof_callbacks.append(callback) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_result(waiter, None) + + for cb in self._eof_callbacks: + try: + cb() + except Exception: + internal_logger.exception("Exception in eof callback") + + self._eof_callbacks.clear() + + def is_eof(self) -> bool: + """Return True if 'feed_eof' was called.""" + return self._eof + + def at_eof(self) -> bool: + """Return True if the buffer is empty and 'feed_eof' was called.""" + return self._eof and not self._buffer + + async def wait_eof(self) -> None: + if self._eof: + return + + assert self._eof_waiter is None + self._eof_waiter = self._loop.create_future() + try: + await self._eof_waiter + finally: + self._eof_waiter = None + + def unread_data(self, data: bytes) -> None: + """rollback reading some data from stream, inserting it to buffer head.""" + warnings.warn( + "unread_data() is deprecated " + "and will be removed in future releases (#3260)", + DeprecationWarning, + stacklevel=2, + ) + if not data: + return + + if self._buffer_offset: + self._buffer[0] = self._buffer[0][self._buffer_offset :] + self._buffer_offset = 0 + self._size += len(data) + self._cursor -= len(data) + self._buffer.appendleft(data) + self._eof_counter = 0 + + # TODO: size is ignored, remove the param later + def feed_data(self, data: bytes, size: int = 0) -> None: + assert not self._eof, "feed_data after feed_eof" + + if not data: + return + + self._size += len(data) + self._buffer.append(data) + self.total_bytes += len(data) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + if self._size > self._high_water and not self._protocol._reading_paused: + self._protocol.pause_reading() + + def begin_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + if self.total_bytes: + raise RuntimeError( + "Called begin_http_chunk_receiving when" "some data was already fed" + ) + self._http_chunk_splits = [] + + def end_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + raise RuntimeError( + "Called end_chunk_receiving without calling " + "begin_chunk_receiving first" + ) + + # self._http_chunk_splits contains logical byte offsets from start of + # the body transfer. Each offset is the offset of the end of a chunk. + # "Logical" means bytes, accessible for a user. + # If no chunks containing logical data were received, current position + # is difinitely zero. + pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 + + if self.total_bytes == pos: + # We should not add empty chunks here. So we check for that. + # Note, when chunked + gzip is used, we can receive a chunk + # of compressed data, but that data may not be enough for gzip FSM + # to yield any uncompressed data. That's why current position may + # not change after receiving a chunk. + return + + self._http_chunk_splits.append(self.total_bytes) + + # wake up readchunk when end of http chunk received + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def _wait(self, func_name: str) -> None: + # StreamReader uses a future to link the protocol feed_data() method + # to a read coroutine. Running two read coroutines at the same time + # would have an unexpected behaviour. It would not possible to know + # which coroutine would get the next data. + if self._waiter is not None: + raise RuntimeError( + "%s() called while another coroutine is " + "already waiting for incoming data" % func_name + ) + + waiter = self._waiter = self._loop.create_future() + try: + with self._timer: + await waiter + finally: + self._waiter = None + + async def readline(self) -> bytes: + return await self.readuntil() + + async def readuntil(self, separator: bytes = b"\n") -> bytes: + seplen = len(separator) + if seplen == 0: + raise ValueError("Separator should be at least one-byte string") + + if self._exception is not None: + raise self._exception + + chunk = b"" + chunk_size = 0 + not_enough = True + + while not_enough: + while self._buffer and not_enough: + offset = self._buffer_offset + ichar = self._buffer[0].find(separator, offset) + 1 + # Read from current offset to found separator or to the end. + data = self._read_nowait_chunk( + ichar - offset + seplen - 1 if ichar else -1 + ) + chunk += data + chunk_size += len(data) + if ichar: + not_enough = False + + if chunk_size > self._high_water: + raise ValueError("Chunk too big") + + if self._eof: + break + + if not_enough: + await self._wait("readuntil") + + return chunk + + async def read(self, n: int = -1) -> bytes: + if self._exception is not None: + raise self._exception + + # migration problem; with DataQueue you have to catch + # EofStream exception, so common way is to run payload.read() inside + # infinite loop. what can cause real infinite loop with StreamReader + # lets keep this code one major release. + if __debug__: + if self._eof and not self._buffer: + self._eof_counter = getattr(self, "_eof_counter", 0) + 1 + if self._eof_counter > 5: + internal_logger.warning( + "Multiple access to StreamReader in eof state, " + "might be infinite loop.", + stack_info=True, + ) + + if not n: + return b"" + + if n < 0: + # This used to just loop creating a new waiter hoping to + # collect everything in self._buffer, but that would + # deadlock if the subprocess sends more than self.limit + # bytes. So just call self.readany() until EOF. + blocks = [] + while True: + block = await self.readany() + if not block: + break + blocks.append(block) + return b"".join(blocks) + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("read") + + return self._read_nowait(n) + + async def readany(self) -> bytes: + if self._exception is not None: + raise self._exception + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("readany") + + return self._read_nowait(-1) + + async def readchunk(self) -> Tuple[bytes, bool]: + """Returns a tuple of (data, end_of_http_chunk). + + When chunked transfer + encoding is used, end_of_http_chunk is a boolean indicating if the end + of the data corresponds to the end of a HTTP chunk , otherwise it is + always False. + """ + while True: + if self._exception is not None: + raise self._exception + + while self._http_chunk_splits: + pos = self._http_chunk_splits.pop(0) + if pos == self._cursor: + return (b"", True) + if pos > self._cursor: + return (self._read_nowait(pos - self._cursor), True) + internal_logger.warning( + "Skipping HTTP chunk end due to data " + "consumption beyond chunk boundary" + ) + + if self._buffer: + return (self._read_nowait_chunk(-1), False) + # return (self._read_nowait(-1), False) + + if self._eof: + # Special case for signifying EOF. + # (b'', True) is not a final return value actually. + return (b"", False) + + await self._wait("readchunk") + + async def readexactly(self, n: int) -> bytes: + if self._exception is not None: + raise self._exception + + blocks: List[bytes] = [] + while n > 0: + block = await self.read(n) + if not block: + partial = b"".join(blocks) + raise asyncio.IncompleteReadError(partial, len(partial) + n) + blocks.append(block) + n -= len(block) + + return b"".join(blocks) + + def read_nowait(self, n: int = -1) -> bytes: + # default was changed to be consistent with .read(-1) + # + # I believe the most users don't know about the method and + # they are not affected. + if self._exception is not None: + raise self._exception + + if self._waiter and not self._waiter.done(): + raise RuntimeError( + "Called while some coroutine is waiting for incoming data." + ) + + return self._read_nowait(n) + + def _read_nowait_chunk(self, n: int) -> bytes: + first_buffer = self._buffer[0] + offset = self._buffer_offset + if n != -1 and len(first_buffer) - offset > n: + data = first_buffer[offset : offset + n] + self._buffer_offset += n + + elif offset: + self._buffer.popleft() + data = first_buffer[offset:] + self._buffer_offset = 0 + + else: + data = self._buffer.popleft() + + self._size -= len(data) + self._cursor += len(data) + + chunk_splits = self._http_chunk_splits + # Prevent memory leak: drop useless chunk splits + while chunk_splits and chunk_splits[0] < self._cursor: + chunk_splits.pop(0) + + if self._size < self._low_water and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + + def _read_nowait(self, n: int) -> bytes: + """Read not more than n bytes, or whole buffer if n == -1""" + self._timer.assert_timeout() + + chunks = [] + while self._buffer: + chunk = self._read_nowait_chunk(n) + chunks.append(chunk) + if n != -1: + n -= len(chunk) + if n == 0: + break + + return b"".join(chunks) if chunks else b"" + + +class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] + def __init__(self) -> None: + self._read_eof_chunk = False + + def __repr__(self) -> str: + return "<%s>" % self.__class__.__name__ + + def exception(self) -> Optional[BaseException]: + return None + + def set_exception(self, exc: BaseException) -> None: + pass + + def on_eof(self, callback: Callable[[], None]) -> None: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + + def feed_eof(self) -> None: + pass + + def is_eof(self) -> bool: + return True + + def at_eof(self) -> bool: + return True + + async def wait_eof(self) -> None: + return + + def feed_data(self, data: bytes, n: int = 0) -> None: + pass + + async def readline(self) -> bytes: + return b"" + + async def read(self, n: int = -1) -> bytes: + return b"" + + # TODO add async def readuntil + + async def readany(self) -> bytes: + return b"" + + async def readchunk(self) -> Tuple[bytes, bool]: + if not self._read_eof_chunk: + self._read_eof_chunk = True + return (b"", False) + + return (b"", True) + + async def readexactly(self, n: int) -> bytes: + raise asyncio.IncompleteReadError(b"", n) + + def read_nowait(self, n: int = -1) -> bytes: + return b"" + + +EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader() + + +class DataQueue(Generic[_T]): + """DataQueue is a general-purpose blocking queue with one reader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._size = 0 + self._buffer: Deque[Tuple[_T, int]] = collections.deque() + + def __len__(self) -> int: + return len(self._buffer) + + def is_eof(self) -> bool: + return self._eof + + def at_eof(self) -> bool: + return self._eof and not self._buffer + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception(self, exc: BaseException) -> None: + self._eof = True + self._exception = exc + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc) + + def feed_data(self, data: _T, size: int = 0) -> None: + self._size += size + self._buffer.append((data, size)) + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + return data + else: + if self._exception is not None: + raise self._exception + else: + raise EofStream + + def __aiter__(self) -> AsyncStreamIterator[_T]: + return AsyncStreamIterator(self.read) + + +class FlowControlDataQueue(DataQueue[_T]): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data. + """ + + def __init__( + self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop + ) -> None: + super().__init__(loop=loop) + + self._protocol = protocol + self._limit = limit * 2 + + def feed_data(self, data: _T, size: int = 0) -> None: + super().feed_data(data, size) + + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> _T: + try: + return await super().read() + finally: + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() diff --git a/venv/lib/python3.12/site-packages/aiohttp/tcp_helpers.py b/venv/lib/python3.12/site-packages/aiohttp/tcp_helpers.py new file mode 100644 index 0000000..88b2442 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/tcp_helpers.py @@ -0,0 +1,37 @@ +"""Helper methods to tune a TCP connection""" + +import asyncio +import socket +from contextlib import suppress +from typing import Optional # noqa + +__all__ = ("tcp_keepalive", "tcp_nodelay") + + +if hasattr(socket, "SO_KEEPALIVE"): + + def tcp_keepalive(transport: asyncio.Transport) -> None: + sock = transport.get_extra_info("socket") + if sock is not None: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + +else: + + def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover + pass + + +def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None: + sock = transport.get_extra_info("socket") + + if sock is None: + return + + if sock.family not in (socket.AF_INET, socket.AF_INET6): + return + + value = bool(value) + + # socket may be closed already, on windows OSError get raised + with suppress(OSError): + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value) diff --git a/venv/lib/python3.12/site-packages/aiohttp/test_utils.py b/venv/lib/python3.12/site-packages/aiohttp/test_utils.py new file mode 100644 index 0000000..2a026fe --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/test_utils.py @@ -0,0 +1,675 @@ +"""Utilities shared by tests.""" + +import asyncio +import contextlib +import gc +import inspect +import ipaddress +import os +import socket +import sys +import warnings +from abc import ABC, abstractmethod +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterator, + List, + Optional, + Type, + Union, + cast, +) +from unittest import IsolatedAsyncioTestCase, mock + +from aiosignal import Signal +from multidict import CIMultiDict, CIMultiDictProxy +from yarl import URL + +import aiohttp +from aiohttp.client import _RequestContextManager, _WSRequestContextManager + +from . import ClientSession, hdrs +from .abc import AbstractCookieJar +from .client_reqrep import ClientResponse +from .client_ws import ClientWebSocketResponse +from .helpers import sentinel +from .http import HttpVersion, RawRequestMessage +from .typedefs import StrOrURL +from .web import ( + Application, + AppRunner, + BaseRunner, + Request, + Server, + ServerRunner, + SockSite, + UrlMappingMatchInfo, +) +from .web_protocol import _RequestHandler + +if TYPE_CHECKING: # pragma: no cover + from ssl import SSLContext +else: + SSLContext = None + +REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" + + +def get_unused_port_socket( + host: str, family: socket.AddressFamily = socket.AF_INET +) -> socket.socket: + return get_port_socket(host, 0, family) + + +def get_port_socket( + host: str, port: int, family: socket.AddressFamily +) -> socket.socket: + s = socket.socket(family, socket.SOCK_STREAM) + if REUSE_ADDRESS: + # Windows has different semantics for SO_REUSEADDR, + # so don't set it. Ref: + # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind((host, port)) + return s + + +def unused_port() -> int: + """Return a port that is unused on the current host.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("127.0.0.1", 0)) + return cast(int, s.getsockname()[1]) + + +class BaseTestServer(ABC): + __test__ = False + + def __init__( + self, + *, + scheme: Union[str, object] = sentinel, + loop: Optional[asyncio.AbstractEventLoop] = None, + host: str = "127.0.0.1", + port: Optional[int] = None, + skip_url_asserts: bool = False, + socket_factory: Callable[ + [str, int, socket.AddressFamily], socket.socket + ] = get_port_socket, + **kwargs: Any, + ) -> None: + self._loop = loop + self.runner: Optional[BaseRunner] = None + self._root: Optional[URL] = None + self.host = host + self.port = port + self._closed = False + self.scheme = scheme + self.skip_url_asserts = skip_url_asserts + self.socket_factory = socket_factory + + async def start_server( + self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any + ) -> None: + if self.runner: + return + self._loop = loop + self._ssl = kwargs.pop("ssl", None) + self.runner = await self._make_runner(handler_cancellation=True, **kwargs) + await self.runner.setup() + if not self.port: + self.port = 0 + try: + version = ipaddress.ip_address(self.host).version + except ValueError: + version = 4 + family = socket.AF_INET6 if version == 6 else socket.AF_INET + _sock = self.socket_factory(self.host, self.port, family) + self.host, self.port = _sock.getsockname()[:2] + site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) + await site.start() + server = site._server + assert server is not None + sockets = server.sockets # type: ignore[attr-defined] + assert sockets is not None + self.port = sockets[0].getsockname()[1] + if self.scheme is sentinel: + if self._ssl: + scheme = "https" + else: + scheme = "http" + self.scheme = scheme + self._root = URL(f"{self.scheme}://{self.host}:{self.port}") + + @abstractmethod # pragma: no cover + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + pass + + def make_url(self, path: StrOrURL) -> URL: + assert self._root is not None + url = URL(path) + if not self.skip_url_asserts: + assert not url.is_absolute() + return self._root.join(url) + else: + return URL(str(self._root) + str(path)) + + @property + def started(self) -> bool: + return self.runner is not None + + @property + def closed(self) -> bool: + return self._closed + + @property + def handler(self) -> Server: + # for backward compatibility + # web.Server instance + runner = self.runner + assert runner is not None + assert runner.server is not None + return runner.server + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run when the object is garbage collected, and on + exit when used as a context manager. + + """ + if self.started and not self.closed: + assert self.runner is not None + await self.runner.cleanup() + self._root = None + self.port = None + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "BaseTestServer": + await self.start_server(loop=self._loop) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + await self.close() + + +class TestServer(BaseTestServer): + def __init__( + self, + app: Application, + *, + scheme: Union[str, object] = sentinel, + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ): + self.app = app + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + return AppRunner(self.app, **kwargs) + + +class RawTestServer(BaseTestServer): + def __init__( + self, + handler: _RequestHandler, + *, + scheme: Union[str, object] = sentinel, + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ) -> None: + self._handler = handler + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: + srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs) + return ServerRunner(srv, debug=debug, **kwargs) + + +class TestClient: + """ + A test client implementation. + + To write functional tests for aiohttp based servers. + + """ + + __test__ = False + + def __init__( + self, + server: BaseTestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any, + ) -> None: + if not isinstance(server, BaseTestServer): + raise TypeError( + "server must be TestServer " "instance, found type: %r" % type(server) + ) + self._server = server + self._loop = loop + if cookie_jar is None: + cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) + self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) + self._closed = False + self._responses: List[ClientResponse] = [] + self._websockets: List[ClientWebSocketResponse] = [] + + async def start_server(self) -> None: + await self._server.start_server(loop=self._loop) + + @property + def host(self) -> str: + return self._server.host + + @property + def port(self) -> Optional[int]: + return self._server.port + + @property + def server(self) -> BaseTestServer: + return self._server + + @property + def app(self) -> Optional[Application]: + return cast(Optional[Application], getattr(self._server, "app", None)) + + @property + def session(self) -> ClientSession: + """An internal aiohttp.ClientSession. + + Unlike the methods on the TestClient, client session requests + do not automatically include the host in the url queried, and + will require an absolute path to the resource. + + """ + return self._session + + def make_url(self, path: StrOrURL) -> URL: + return self._server.make_url(path) + + async def _request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> ClientResponse: + resp = await self._session.request(method, self.make_url(path), **kwargs) + # save it to close later + self._responses.append(resp) + return resp + + def request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> _RequestContextManager: + """Routes a request to tested http server. + + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. + + """ + return _RequestContextManager(self._request(method, path, **kwargs)) + + def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) + + def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) + + def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs)) + + def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + + def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) + + def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs)) + + def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs)) + + def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: + """Initiate websocket connection. + + The api corresponds to aiohttp.ClientSession.ws_connect. + + """ + return _WSRequestContextManager(self._ws_connect(path, **kwargs)) + + async def _ws_connect( + self, path: StrOrURL, **kwargs: Any + ) -> ClientWebSocketResponse: + ws = await self._session.ws_connect(self.make_url(path), **kwargs) + self._websockets.append(ws) + return ws + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run on exit when used as a(n) (asynchronous) + context manager. + + """ + if not self._closed: + for resp in self._responses: + resp.close() + for ws in self._websockets: + await ws.close() + await self._session.close() + await self._server.close() + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "TestClient": + await self.start_server() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class AioHTTPTestCase(IsolatedAsyncioTestCase): + """A base class to allow for unittest web applications using aiohttp. + + Provides the following: + + * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. + * self.loop (asyncio.BaseEventLoop): the event loop in which the + application and server are running. + * self.app (aiohttp.web.Application): the application returned by + self.get_application() + + Note that the TestClient's methods are asynchronous: you have to + execute function on the test client using asynchronous methods. + """ + + async def get_application(self) -> Application: + """Get application. + + This method should be overridden + to return the aiohttp.web.Application + object to test. + """ + return self.get_app() + + def get_app(self) -> Application: + """Obsolete method used to constructing web application. + + Use .get_application() coroutine instead. + """ + raise RuntimeError("Did you forget to define get_application()?") + + async def asyncSetUp(self) -> None: + self.loop = asyncio.get_running_loop() + return await self.setUpAsync() + + async def setUpAsync(self) -> None: + self.app = await self.get_application() + self.server = await self.get_server(self.app) + self.client = await self.get_client(self.server) + + await self.client.start_server() + + async def asyncTearDown(self) -> None: + return await self.tearDownAsync() + + async def tearDownAsync(self) -> None: + await self.client.close() + + async def get_server(self, app: Application) -> TestServer: + """Return a TestServer instance.""" + return TestServer(app, loop=self.loop) + + async def get_client(self, server: TestServer) -> TestClient: + """Return a TestClient instance.""" + return TestClient(server, loop=self.loop) + + +def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: + """ + A decorator dedicated to use with asynchronous AioHTTPTestCase test methods. + + In 3.8+, this does nothing. + """ + warnings.warn( + "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+", + DeprecationWarning, + stacklevel=2, + ) + return func + + +_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] + + +@contextlib.contextmanager +def loop_context( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False +) -> Iterator[asyncio.AbstractEventLoop]: + """A contextmanager that creates an event_loop, for test purposes. + + Handles the creation and cleanup of a test loop. + """ + loop = setup_test_loop(loop_factory) + yield loop + teardown_test_loop(loop, fast=fast) + + +def setup_test_loop( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, +) -> asyncio.AbstractEventLoop: + """Create and return an asyncio.BaseEventLoop instance. + + The caller should also call teardown_test_loop, + once they are done with the loop. + """ + loop = loop_factory() + asyncio.set_event_loop(loop) + return loop + + +def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None: + """Teardown and cleanup an event_loop created by setup_test_loop.""" + closed = loop.is_closed() + if not closed: + loop.call_soon(loop.stop) + loop.run_forever() + loop.close() + + if not fast: + gc.collect() + + asyncio.set_event_loop(None) + + +def _create_app_mock() -> mock.MagicMock: + def get_dict(app: Any, key: str) -> Any: + return app.__app_dict[key] + + def set_dict(app: Any, key: str, value: Any) -> None: + app.__app_dict[key] = value + + app = mock.MagicMock(spec=Application) + app.__app_dict = {} + app.__getitem__ = get_dict + app.__setitem__ = set_dict + + app._debug = False + app.on_response_prepare = Signal(app) + app.on_response_prepare.freeze() + return app + + +def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: + transport = mock.Mock() + + def get_extra_info(key: str) -> Optional[SSLContext]: + if key == "sslcontext": + return sslcontext + else: + return None + + transport.get_extra_info.side_effect = get_extra_info + return transport + + +def make_mocked_request( + method: str, + path: str, + headers: Any = None, + *, + match_info: Any = sentinel, + version: HttpVersion = HttpVersion(1, 1), + closing: bool = False, + app: Any = None, + writer: Any = sentinel, + protocol: Any = sentinel, + transport: Any = sentinel, + payload: Any = sentinel, + sslcontext: Optional[SSLContext] = None, + client_max_size: int = 1024**2, + loop: Any = ..., +) -> Request: + """Creates mocked web.Request testing purposes. + + Useful in unit tests, when spinning full web server is overkill or + specific conditions and errors are hard to trigger. + """ + task = mock.Mock() + if loop is ...: + loop = mock.Mock() + loop.create_future.return_value = () + + if version < HttpVersion(1, 1): + closing = True + + if headers: + headers = CIMultiDictProxy(CIMultiDict(headers)) + raw_hdrs = tuple( + (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + ) + else: + headers = CIMultiDictProxy(CIMultiDict()) + raw_hdrs = () + + chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + message = RawRequestMessage( + method, + path, + version, + headers, + raw_hdrs, + closing, + None, + False, + chunked, + URL(path), + ) + if app is None: + app = _create_app_mock() + + if transport is sentinel: + transport = _create_transport(sslcontext) + + if protocol is sentinel: + protocol = mock.Mock() + protocol.transport = transport + + if writer is sentinel: + writer = mock.Mock() + writer.write_headers = make_mocked_coro(None) + writer.write = make_mocked_coro(None) + writer.write_eof = make_mocked_coro(None) + writer.drain = make_mocked_coro(None) + writer.transport = transport + + protocol.transport = transport + protocol.writer = writer + + if payload is sentinel: + payload = mock.Mock() + + req = Request( + message, payload, protocol, writer, task, loop, client_max_size=client_max_size + ) + + match_info = UrlMappingMatchInfo( + {} if match_info is sentinel else match_info, mock.Mock() + ) + match_info.add_app(app) + req._match_info = match_info + + return req + + +def make_mocked_coro( + return_value: Any = sentinel, raise_exception: Any = sentinel +) -> Any: + """Creates a coroutine mock.""" + + async def mock_coro(*args: Any, **kwargs: Any) -> Any: + if raise_exception is not sentinel: + raise raise_exception + if not inspect.isawaitable(return_value): + return return_value + await return_value + + return mock.Mock(wraps=mock_coro) diff --git a/venv/lib/python3.12/site-packages/aiohttp/tracing.py b/venv/lib/python3.12/site-packages/aiohttp/tracing.py new file mode 100644 index 0000000..70e2a62 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/tracing.py @@ -0,0 +1,471 @@ +from types import SimpleNamespace +from typing import TYPE_CHECKING, Awaitable, Optional, Protocol, Type, TypeVar + +import attr +from aiosignal import Signal +from multidict import CIMultiDict +from yarl import URL + +from .client_reqrep import ClientResponse + +if TYPE_CHECKING: # pragma: no cover + from .client import ClientSession + + _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) + + class _SignalCallback(Protocol[_ParamT_contra]): + def __call__( + self, + __client_session: ClientSession, + __trace_config_ctx: SimpleNamespace, + __params: _ParamT_contra, + ) -> Awaitable[None]: + ... + + +__all__ = ( + "TraceConfig", + "TraceRequestStartParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionCreateEndParams", + "TraceConnectionReuseconnParams", + "TraceDnsResolveHostStartParams", + "TraceDnsResolveHostEndParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceRequestRedirectParams", + "TraceRequestChunkSentParams", + "TraceResponseChunkReceivedParams", + "TraceRequestHeadersSentParams", +) + + +class TraceConfig: + """First-class used to trace requests launched via ClientSession objects.""" + + def __init__( + self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace + ) -> None: + self._on_request_start: Signal[ + _SignalCallback[TraceRequestStartParams] + ] = Signal(self) + self._on_request_chunk_sent: Signal[ + _SignalCallback[TraceRequestChunkSentParams] + ] = Signal(self) + self._on_response_chunk_received: Signal[ + _SignalCallback[TraceResponseChunkReceivedParams] + ] = Signal(self) + self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal( + self + ) + self._on_request_exception: Signal[ + _SignalCallback[TraceRequestExceptionParams] + ] = Signal(self) + self._on_request_redirect: Signal[ + _SignalCallback[TraceRequestRedirectParams] + ] = Signal(self) + self._on_connection_queued_start: Signal[ + _SignalCallback[TraceConnectionQueuedStartParams] + ] = Signal(self) + self._on_connection_queued_end: Signal[ + _SignalCallback[TraceConnectionQueuedEndParams] + ] = Signal(self) + self._on_connection_create_start: Signal[ + _SignalCallback[TraceConnectionCreateStartParams] + ] = Signal(self) + self._on_connection_create_end: Signal[ + _SignalCallback[TraceConnectionCreateEndParams] + ] = Signal(self) + self._on_connection_reuseconn: Signal[ + _SignalCallback[TraceConnectionReuseconnParams] + ] = Signal(self) + self._on_dns_resolvehost_start: Signal[ + _SignalCallback[TraceDnsResolveHostStartParams] + ] = Signal(self) + self._on_dns_resolvehost_end: Signal[ + _SignalCallback[TraceDnsResolveHostEndParams] + ] = Signal(self) + self._on_dns_cache_hit: Signal[ + _SignalCallback[TraceDnsCacheHitParams] + ] = Signal(self) + self._on_dns_cache_miss: Signal[ + _SignalCallback[TraceDnsCacheMissParams] + ] = Signal(self) + self._on_request_headers_sent: Signal[ + _SignalCallback[TraceRequestHeadersSentParams] + ] = Signal(self) + + self._trace_config_ctx_factory = trace_config_ctx_factory + + def trace_config_ctx( + self, trace_request_ctx: Optional[SimpleNamespace] = None + ) -> SimpleNamespace: + """Return a new trace_config_ctx instance""" + return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) + + def freeze(self) -> None: + self._on_request_start.freeze() + self._on_request_chunk_sent.freeze() + self._on_response_chunk_received.freeze() + self._on_request_end.freeze() + self._on_request_exception.freeze() + self._on_request_redirect.freeze() + self._on_connection_queued_start.freeze() + self._on_connection_queued_end.freeze() + self._on_connection_create_start.freeze() + self._on_connection_create_end.freeze() + self._on_connection_reuseconn.freeze() + self._on_dns_resolvehost_start.freeze() + self._on_dns_resolvehost_end.freeze() + self._on_dns_cache_hit.freeze() + self._on_dns_cache_miss.freeze() + self._on_request_headers_sent.freeze() + + @property + def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]": + return self._on_request_start + + @property + def on_request_chunk_sent( + self, + ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]": + return self._on_request_chunk_sent + + @property + def on_response_chunk_received( + self, + ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]": + return self._on_response_chunk_received + + @property + def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]": + return self._on_request_end + + @property + def on_request_exception( + self, + ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]": + return self._on_request_exception + + @property + def on_request_redirect( + self, + ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]": + return self._on_request_redirect + + @property + def on_connection_queued_start( + self, + ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]": + return self._on_connection_queued_start + + @property + def on_connection_queued_end( + self, + ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]": + return self._on_connection_queued_end + + @property + def on_connection_create_start( + self, + ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]": + return self._on_connection_create_start + + @property + def on_connection_create_end( + self, + ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]": + return self._on_connection_create_end + + @property + def on_connection_reuseconn( + self, + ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]": + return self._on_connection_reuseconn + + @property + def on_dns_resolvehost_start( + self, + ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]": + return self._on_dns_resolvehost_start + + @property + def on_dns_resolvehost_end( + self, + ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]": + return self._on_dns_resolvehost_end + + @property + def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]": + return self._on_dns_cache_hit + + @property + def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]": + return self._on_dns_cache_miss + + @property + def on_request_headers_sent( + self, + ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]": + return self._on_request_headers_sent + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestStartParams: + """Parameters sent by the `on_request_start` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestChunkSentParams: + """Parameters sent by the `on_request_chunk_sent` signal""" + + method: str + url: URL + chunk: bytes + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceResponseChunkReceivedParams: + """Parameters sent by the `on_response_chunk_received` signal""" + + method: str + url: URL + chunk: bytes + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestEndParams: + """Parameters sent by the `on_request_end` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + response: ClientResponse + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestExceptionParams: + """Parameters sent by the `on_request_exception` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + exception: BaseException + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestRedirectParams: + """Parameters sent by the `on_request_redirect` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + response: ClientResponse + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionQueuedStartParams: + """Parameters sent by the `on_connection_queued_start` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionQueuedEndParams: + """Parameters sent by the `on_connection_queued_end` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionCreateStartParams: + """Parameters sent by the `on_connection_create_start` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionCreateEndParams: + """Parameters sent by the `on_connection_create_end` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceConnectionReuseconnParams: + """Parameters sent by the `on_connection_reuseconn` signal""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsResolveHostStartParams: + """Parameters sent by the `on_dns_resolvehost_start` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsResolveHostEndParams: + """Parameters sent by the `on_dns_resolvehost_end` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsCacheHitParams: + """Parameters sent by the `on_dns_cache_hit` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceDnsCacheMissParams: + """Parameters sent by the `on_dns_cache_miss` signal""" + + host: str + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class TraceRequestHeadersSentParams: + """Parameters sent by the `on_request_headers_sent` signal""" + + method: str + url: URL + headers: "CIMultiDict[str]" + + +class Trace: + """Internal dependency holder class. + + Used to keep together the main dependencies used + at the moment of send a signal. + """ + + def __init__( + self, + session: "ClientSession", + trace_config: TraceConfig, + trace_config_ctx: SimpleNamespace, + ) -> None: + self._trace_config = trace_config + self._trace_config_ctx = trace_config_ctx + self._session = session + + async def send_request_start( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + return await self._trace_config.on_request_start.send( + self._session, + self._trace_config_ctx, + TraceRequestStartParams(method, url, headers), + ) + + async def send_request_chunk_sent( + self, method: str, url: URL, chunk: bytes + ) -> None: + return await self._trace_config.on_request_chunk_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestChunkSentParams(method, url, chunk), + ) + + async def send_response_chunk_received( + self, method: str, url: URL, chunk: bytes + ) -> None: + return await self._trace_config.on_response_chunk_received.send( + self._session, + self._trace_config_ctx, + TraceResponseChunkReceivedParams(method, url, chunk), + ) + + async def send_request_end( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + response: ClientResponse, + ) -> None: + return await self._trace_config.on_request_end.send( + self._session, + self._trace_config_ctx, + TraceRequestEndParams(method, url, headers, response), + ) + + async def send_request_exception( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + exception: BaseException, + ) -> None: + return await self._trace_config.on_request_exception.send( + self._session, + self._trace_config_ctx, + TraceRequestExceptionParams(method, url, headers, exception), + ) + + async def send_request_redirect( + self, + method: str, + url: URL, + headers: "CIMultiDict[str]", + response: ClientResponse, + ) -> None: + return await self._trace_config._on_request_redirect.send( + self._session, + self._trace_config_ctx, + TraceRequestRedirectParams(method, url, headers, response), + ) + + async def send_connection_queued_start(self) -> None: + return await self._trace_config.on_connection_queued_start.send( + self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams() + ) + + async def send_connection_queued_end(self) -> None: + return await self._trace_config.on_connection_queued_end.send( + self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams() + ) + + async def send_connection_create_start(self) -> None: + return await self._trace_config.on_connection_create_start.send( + self._session, self._trace_config_ctx, TraceConnectionCreateStartParams() + ) + + async def send_connection_create_end(self) -> None: + return await self._trace_config.on_connection_create_end.send( + self._session, self._trace_config_ctx, TraceConnectionCreateEndParams() + ) + + async def send_connection_reuseconn(self) -> None: + return await self._trace_config.on_connection_reuseconn.send( + self._session, self._trace_config_ctx, TraceConnectionReuseconnParams() + ) + + async def send_dns_resolvehost_start(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_start.send( + self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host) + ) + + async def send_dns_resolvehost_end(self, host: str) -> None: + return await self._trace_config.on_dns_resolvehost_end.send( + self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host) + ) + + async def send_dns_cache_hit(self, host: str) -> None: + return await self._trace_config.on_dns_cache_hit.send( + self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host) + ) + + async def send_dns_cache_miss(self, host: str) -> None: + return await self._trace_config.on_dns_cache_miss.send( + self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host) + ) + + async def send_request_headers( + self, method: str, url: URL, headers: "CIMultiDict[str]" + ) -> None: + return await self._trace_config._on_request_headers_sent.send( + self._session, + self._trace_config_ctx, + TraceRequestHeadersSentParams(method, url, headers), + ) diff --git a/venv/lib/python3.12/site-packages/aiohttp/typedefs.py b/venv/lib/python3.12/site-packages/aiohttp/typedefs.py new file mode 100644 index 0000000..57d95b3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/typedefs.py @@ -0,0 +1,54 @@ +import json +import os +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Iterable, + Mapping, + Tuple, + Union, +) + +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr +from yarl import URL + +DEFAULT_JSON_ENCODER = json.dumps +DEFAULT_JSON_DECODER = json.loads + +if TYPE_CHECKING: # pragma: no cover + _CIMultiDict = CIMultiDict[str] + _CIMultiDictProxy = CIMultiDictProxy[str] + _MultiDict = MultiDict[str] + _MultiDictProxy = MultiDictProxy[str] + from http.cookies import BaseCookie, Morsel + + from .web import Request, StreamResponse +else: + _CIMultiDict = CIMultiDict + _CIMultiDictProxy = CIMultiDictProxy + _MultiDict = MultiDict + _MultiDictProxy = MultiDictProxy + +Byteish = Union[bytes, bytearray, memoryview] +JSONEncoder = Callable[[Any], str] +JSONDecoder = Callable[[str], Any] +LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy] +RawHeaders = Tuple[Tuple[bytes, bytes], ...] +StrOrURL = Union[str, URL] + +LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] +LooseCookiesIterables = Iterable[ + Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] +] +LooseCookies = Union[ + LooseCookiesMappings, + LooseCookiesIterables, + "BaseCookie[str]", +] + +Handler = Callable[["Request"], Awaitable["StreamResponse"]] +Middleware = Callable[["Request", Handler], Awaitable["StreamResponse"]] + +PathLike = Union[str, "os.PathLike[str]"] diff --git a/venv/lib/python3.12/site-packages/aiohttp/web.py b/venv/lib/python3.12/site-packages/aiohttp/web.py new file mode 100644 index 0000000..e911650 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web.py @@ -0,0 +1,616 @@ +import asyncio +import logging +import os +import socket +import sys +import warnings +from argparse import ArgumentParser +from collections.abc import Iterable +from contextlib import suppress +from functools import partial +from importlib import import_module +from typing import ( + Any, + Awaitable, + Callable, + Iterable as TypingIterable, + List, + Optional, + Set, + Type, + Union, + cast, +) +from weakref import WeakSet + +from .abc import AbstractAccessLogger +from .helpers import AppKey as AppKey +from .log import access_logger +from .typedefs import PathLike +from .web_app import Application as Application, CleanupError as CleanupError +from .web_exceptions import ( + HTTPAccepted as HTTPAccepted, + HTTPBadGateway as HTTPBadGateway, + HTTPBadRequest as HTTPBadRequest, + HTTPClientError as HTTPClientError, + HTTPConflict as HTTPConflict, + HTTPCreated as HTTPCreated, + HTTPError as HTTPError, + HTTPException as HTTPException, + HTTPExpectationFailed as HTTPExpectationFailed, + HTTPFailedDependency as HTTPFailedDependency, + HTTPForbidden as HTTPForbidden, + HTTPFound as HTTPFound, + HTTPGatewayTimeout as HTTPGatewayTimeout, + HTTPGone as HTTPGone, + HTTPInsufficientStorage as HTTPInsufficientStorage, + HTTPInternalServerError as HTTPInternalServerError, + HTTPLengthRequired as HTTPLengthRequired, + HTTPMethodNotAllowed as HTTPMethodNotAllowed, + HTTPMisdirectedRequest as HTTPMisdirectedRequest, + HTTPMove as HTTPMove, + HTTPMovedPermanently as HTTPMovedPermanently, + HTTPMultipleChoices as HTTPMultipleChoices, + HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired, + HTTPNoContent as HTTPNoContent, + HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation, + HTTPNotAcceptable as HTTPNotAcceptable, + HTTPNotExtended as HTTPNotExtended, + HTTPNotFound as HTTPNotFound, + HTTPNotImplemented as HTTPNotImplemented, + HTTPNotModified as HTTPNotModified, + HTTPOk as HTTPOk, + HTTPPartialContent as HTTPPartialContent, + HTTPPaymentRequired as HTTPPaymentRequired, + HTTPPermanentRedirect as HTTPPermanentRedirect, + HTTPPreconditionFailed as HTTPPreconditionFailed, + HTTPPreconditionRequired as HTTPPreconditionRequired, + HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired, + HTTPRedirection as HTTPRedirection, + HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge, + HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge, + HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable, + HTTPRequestTimeout as HTTPRequestTimeout, + HTTPRequestURITooLong as HTTPRequestURITooLong, + HTTPResetContent as HTTPResetContent, + HTTPSeeOther as HTTPSeeOther, + HTTPServerError as HTTPServerError, + HTTPServiceUnavailable as HTTPServiceUnavailable, + HTTPSuccessful as HTTPSuccessful, + HTTPTemporaryRedirect as HTTPTemporaryRedirect, + HTTPTooManyRequests as HTTPTooManyRequests, + HTTPUnauthorized as HTTPUnauthorized, + HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons, + HTTPUnprocessableEntity as HTTPUnprocessableEntity, + HTTPUnsupportedMediaType as HTTPUnsupportedMediaType, + HTTPUpgradeRequired as HTTPUpgradeRequired, + HTTPUseProxy as HTTPUseProxy, + HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates, + HTTPVersionNotSupported as HTTPVersionNotSupported, + NotAppKeyWarning as NotAppKeyWarning, +) +from .web_fileresponse import FileResponse as FileResponse +from .web_log import AccessLogger +from .web_middlewares import ( + middleware as middleware, + normalize_path_middleware as normalize_path_middleware, +) +from .web_protocol import ( + PayloadAccessError as PayloadAccessError, + RequestHandler as RequestHandler, + RequestPayloadError as RequestPayloadError, +) +from .web_request import ( + BaseRequest as BaseRequest, + FileField as FileField, + Request as Request, +) +from .web_response import ( + ContentCoding as ContentCoding, + Response as Response, + StreamResponse as StreamResponse, + json_response as json_response, +) +from .web_routedef import ( + AbstractRouteDef as AbstractRouteDef, + RouteDef as RouteDef, + RouteTableDef as RouteTableDef, + StaticDef as StaticDef, + delete as delete, + get as get, + head as head, + options as options, + patch as patch, + post as post, + put as put, + route as route, + static as static, + view as view, +) +from .web_runner import ( + AppRunner as AppRunner, + BaseRunner as BaseRunner, + BaseSite as BaseSite, + GracefulExit as GracefulExit, + NamedPipeSite as NamedPipeSite, + ServerRunner as ServerRunner, + SockSite as SockSite, + TCPSite as TCPSite, + UnixSite as UnixSite, +) +from .web_server import Server as Server +from .web_urldispatcher import ( + AbstractResource as AbstractResource, + AbstractRoute as AbstractRoute, + DynamicResource as DynamicResource, + PlainResource as PlainResource, + PrefixedSubAppResource as PrefixedSubAppResource, + Resource as Resource, + ResourceRoute as ResourceRoute, + StaticResource as StaticResource, + UrlDispatcher as UrlDispatcher, + UrlMappingMatchInfo as UrlMappingMatchInfo, + View as View, +) +from .web_ws import ( + WebSocketReady as WebSocketReady, + WebSocketResponse as WebSocketResponse, + WSMsgType as WSMsgType, +) + +__all__ = ( + # web_app + "AppKey", + "Application", + "CleanupError", + # web_exceptions + "NotAppKeyWarning", + "HTTPAccepted", + "HTTPBadGateway", + "HTTPBadRequest", + "HTTPClientError", + "HTTPConflict", + "HTTPCreated", + "HTTPError", + "HTTPException", + "HTTPExpectationFailed", + "HTTPFailedDependency", + "HTTPForbidden", + "HTTPFound", + "HTTPGatewayTimeout", + "HTTPGone", + "HTTPInsufficientStorage", + "HTTPInternalServerError", + "HTTPLengthRequired", + "HTTPMethodNotAllowed", + "HTTPMisdirectedRequest", + "HTTPMove", + "HTTPMovedPermanently", + "HTTPMultipleChoices", + "HTTPNetworkAuthenticationRequired", + "HTTPNoContent", + "HTTPNonAuthoritativeInformation", + "HTTPNotAcceptable", + "HTTPNotExtended", + "HTTPNotFound", + "HTTPNotImplemented", + "HTTPNotModified", + "HTTPOk", + "HTTPPartialContent", + "HTTPPaymentRequired", + "HTTPPermanentRedirect", + "HTTPPreconditionFailed", + "HTTPPreconditionRequired", + "HTTPProxyAuthenticationRequired", + "HTTPRedirection", + "HTTPRequestEntityTooLarge", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPRequestRangeNotSatisfiable", + "HTTPRequestTimeout", + "HTTPRequestURITooLong", + "HTTPResetContent", + "HTTPSeeOther", + "HTTPServerError", + "HTTPServiceUnavailable", + "HTTPSuccessful", + "HTTPTemporaryRedirect", + "HTTPTooManyRequests", + "HTTPUnauthorized", + "HTTPUnavailableForLegalReasons", + "HTTPUnprocessableEntity", + "HTTPUnsupportedMediaType", + "HTTPUpgradeRequired", + "HTTPUseProxy", + "HTTPVariantAlsoNegotiates", + "HTTPVersionNotSupported", + # web_fileresponse + "FileResponse", + # web_middlewares + "middleware", + "normalize_path_middleware", + # web_protocol + "PayloadAccessError", + "RequestHandler", + "RequestPayloadError", + # web_request + "BaseRequest", + "FileField", + "Request", + # web_response + "ContentCoding", + "Response", + "StreamResponse", + "json_response", + # web_routedef + "AbstractRouteDef", + "RouteDef", + "RouteTableDef", + "StaticDef", + "delete", + "get", + "head", + "options", + "patch", + "post", + "put", + "route", + "static", + "view", + # web_runner + "AppRunner", + "BaseRunner", + "BaseSite", + "GracefulExit", + "ServerRunner", + "SockSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + # web_server + "Server", + # web_urldispatcher + "AbstractResource", + "AbstractRoute", + "DynamicResource", + "PlainResource", + "PrefixedSubAppResource", + "Resource", + "ResourceRoute", + "StaticResource", + "UrlDispatcher", + "UrlMappingMatchInfo", + "View", + # web_ws + "WebSocketReady", + "WebSocketResponse", + "WSMsgType", + # web + "run_app", +) + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = Any # type: ignore[misc,assignment] + +# Only display warning when using -Wdefault, -We, -X dev or similar. +warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True) + +HostSequence = TypingIterable[str] + + +async def _run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Union[PathLike, TypingIterable[PathLike], None] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Optional[Callable[..., None]] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + handler_cancellation: bool = False, +) -> None: + async def wait( + starting_tasks: "WeakSet[asyncio.Task[object]]", shutdown_timeout: float + ) -> None: + # Wait for pending tasks for a given time limit. + t = asyncio.current_task() + assert t is not None + starting_tasks.add(t) + with suppress(asyncio.TimeoutError): + await asyncio.wait_for(_wait(starting_tasks), timeout=shutdown_timeout) + + async def _wait(exclude: "WeakSet[asyncio.Task[object]]") -> None: + t = asyncio.current_task() + assert t is not None + exclude.add(t) + while tasks := asyncio.all_tasks().difference(exclude): + await asyncio.wait(tasks) + + # An internal function to actually do all dirty job for application running + if asyncio.iscoroutine(app): + app = await app + + app = cast(Application, app) + + runner = AppRunner( + app, + handle_signals=handle_signals, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + keepalive_timeout=keepalive_timeout, + shutdown_timeout=shutdown_timeout, + handler_cancellation=handler_cancellation, + ) + + await runner.setup() + # On shutdown we want to avoid waiting on tasks which run forever. + # It's very likely that all tasks which run forever will have been created by + # the time we have completed the application startup (in runner.setup()), + # so we just record all running tasks here and exclude them later. + starting_tasks: "WeakSet[asyncio.Task[object]]" = WeakSet(asyncio.all_tasks()) + runner.shutdown_callback = partial(wait, starting_tasks, shutdown_timeout) + + sites: List[BaseSite] = [] + + try: + if host is not None: + if isinstance(host, (str, bytes, bytearray, memoryview)): + sites.append( + TCPSite( + runner, + host, + port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + else: + for h in host: + sites.append( + TCPSite( + runner, + h, + port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + elif path is None and sock is None or port is not None: + sites.append( + TCPSite( + runner, + port=port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + + if path is not None: + if isinstance(path, (str, os.PathLike)): + sites.append( + UnixSite( + runner, + path, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for p in path: + sites.append( + UnixSite( + runner, + p, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + + if sock is not None: + if not isinstance(sock, Iterable): + sites.append( + SockSite( + runner, + sock, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for s in sock: + sites.append( + SockSite( + runner, + s, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + for site in sites: + await site.start() + + if print: # pragma: no branch + names = sorted(str(s.name) for s in runner.sites) + print( + "======== Running on {} ========\n" + "(Press CTRL+C to quit)".format(", ".join(names)) + ) + + # sleep forever by 1 hour intervals, + while True: + await asyncio.sleep(3600) + finally: + await runner.cleanup() + + +def _cancel_tasks( + to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop +) -> None: + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + +def run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Union[PathLike, TypingIterable[PathLike], None] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Optional[Callable[..., None]] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + handler_cancellation: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> None: + """Run an app locally""" + if loop is None: + loop = asyncio.new_event_loop() + + # Configure if and only if in debugging mode and using the default logger + if loop.get_debug() and access_log and access_log.name == "aiohttp.access": + if access_log.level == logging.NOTSET: + access_log.setLevel(logging.DEBUG) + if not access_log.hasHandlers(): + access_log.addHandler(logging.StreamHandler()) + + main_task = loop.create_task( + _run_app( + app, + host=host, + port=port, + path=path, + sock=sock, + shutdown_timeout=shutdown_timeout, + keepalive_timeout=keepalive_timeout, + ssl_context=ssl_context, + print=print, + backlog=backlog, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + handle_signals=handle_signals, + reuse_address=reuse_address, + reuse_port=reuse_port, + handler_cancellation=handler_cancellation, + ) + ) + + try: + asyncio.set_event_loop(loop) + loop.run_until_complete(main_task) + except (GracefulExit, KeyboardInterrupt): # pragma: no cover + pass + finally: + _cancel_tasks({main_task}, loop) + _cancel_tasks(asyncio.all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() + + +def main(argv: List[str]) -> None: + arg_parser = ArgumentParser( + description="aiohttp.web Application server", prog="aiohttp.web" + ) + arg_parser.add_argument( + "entry_func", + help=( + "Callable returning the `aiohttp.web.Application` instance to " + "run. Should be specified in the 'module:function' syntax." + ), + metavar="entry-func", + ) + arg_parser.add_argument( + "-H", + "--hostname", + help="TCP/IP hostname to serve on (default: %(default)r)", + default="localhost", + ) + arg_parser.add_argument( + "-P", + "--port", + help="TCP/IP port to serve on (default: %(default)r)", + type=int, + default="8080", + ) + arg_parser.add_argument( + "-U", + "--path", + help="Unix file system path to serve on. Specifying a path will cause " + "hostname and port arguments to be ignored.", + ) + args, extra_argv = arg_parser.parse_known_args(argv) + + # Import logic + mod_str, _, func_str = args.entry_func.partition(":") + if not func_str or not mod_str: + arg_parser.error("'entry-func' not in 'module:function' syntax") + if mod_str.startswith("."): + arg_parser.error("relative module names not supported") + try: + module = import_module(mod_str) + except ImportError as ex: + arg_parser.error(f"unable to import {mod_str}: {ex}") + try: + func = getattr(module, func_str) + except AttributeError: + arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}") + + # Compatibility logic + if args.path is not None and not hasattr(socket, "AF_UNIX"): + arg_parser.error( + "file system paths not supported by your operating" " environment" + ) + + logging.basicConfig(level=logging.DEBUG) + + app = func(extra_argv) + run_app(app, host=args.hostname, port=args.port, path=args.path) + arg_parser.exit(message="Stopped\n") + + +if __name__ == "__main__": # pragma: no branch + main(sys.argv[1:]) # pragma: no cover diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_app.py b/venv/lib/python3.12/site-packages/aiohttp/web_app.py new file mode 100644 index 0000000..6e822b8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_app.py @@ -0,0 +1,596 @@ +import asyncio +import logging +import warnings +from functools import partial, update_wrapper +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Awaitable, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) + +from aiosignal import Signal +from frozenlist import FrozenList + +from . import hdrs +from .abc import ( + AbstractAccessLogger, + AbstractMatchInfo, + AbstractRouter, + AbstractStreamWriter, +) +from .helpers import DEBUG, AppKey +from .http_parser import RawRequestMessage +from .log import web_logger +from .streams import StreamReader +from .typedefs import Middleware +from .web_exceptions import NotAppKeyWarning +from .web_log import AccessLogger +from .web_middlewares import _fix_request_current_app +from .web_protocol import RequestHandler +from .web_request import Request +from .web_response import StreamResponse +from .web_routedef import AbstractRouteDef +from .web_server import Server +from .web_urldispatcher import ( + AbstractResource, + AbstractRoute, + Domain, + MaskDomain, + MatchedSubAppResource, + PrefixedSubAppResource, + UrlDispatcher, +) + +__all__ = ("Application", "CleanupError") + + +if TYPE_CHECKING: # pragma: no cover + _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] + _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] + _Middlewares = FrozenList[Middleware] + _MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]] + _Subapps = List["Application"] +else: + # No type checker mode, skip types + _AppSignal = Signal + _RespPrepareSignal = Signal + _Middlewares = FrozenList + _MiddlewaresHandlers = Optional[Sequence] + _Subapps = List + +_T = TypeVar("_T") +_U = TypeVar("_U") + + +class Application(MutableMapping[Union[str, AppKey[Any]], Any]): + ATTRS = frozenset( + [ + "logger", + "_debug", + "_router", + "_loop", + "_handler_args", + "_middlewares", + "_middlewares_handlers", + "_run_middlewares", + "_state", + "_frozen", + "_pre_frozen", + "_subapps", + "_on_response_prepare", + "_on_startup", + "_on_shutdown", + "_on_cleanup", + "_client_max_size", + "_cleanup_ctx", + ] + ) + + def __init__( + self, + *, + logger: logging.Logger = web_logger, + router: Optional[UrlDispatcher] = None, + middlewares: Iterable[Middleware] = (), + handler_args: Optional[Mapping[str, Any]] = None, + client_max_size: int = 1024**2, + loop: Optional[asyncio.AbstractEventLoop] = None, + debug: Any = ..., # mypy doesn't support ellipsis + ) -> None: + if router is None: + router = UrlDispatcher() + else: + warnings.warn( + "router argument is deprecated", DeprecationWarning, stacklevel=2 + ) + assert isinstance(router, AbstractRouter), router + + if loop is not None: + warnings.warn( + "loop argument is deprecated", DeprecationWarning, stacklevel=2 + ) + + if debug is not ...: + warnings.warn( + "debug argument is deprecated", DeprecationWarning, stacklevel=2 + ) + self._debug = debug + self._router: UrlDispatcher = router + self._loop = loop + self._handler_args = handler_args + self.logger = logger + + self._middlewares: _Middlewares = FrozenList(middlewares) + + # initialized on freezing + self._middlewares_handlers: _MiddlewaresHandlers = None + # initialized on freezing + self._run_middlewares: Optional[bool] = None + + self._state: Dict[Union[AppKey[Any], str], object] = {} + self._frozen = False + self._pre_frozen = False + self._subapps: _Subapps = [] + + self._on_response_prepare: _RespPrepareSignal = Signal(self) + self._on_startup: _AppSignal = Signal(self) + self._on_shutdown: _AppSignal = Signal(self) + self._on_cleanup: _AppSignal = Signal(self) + self._cleanup_ctx = CleanupContext() + self._on_startup.append(self._cleanup_ctx._on_startup) + self._on_cleanup.append(self._cleanup_ctx._on_cleanup) + self._client_max_size = client_max_size + + def __init_subclass__(cls: Type["Application"]) -> None: + warnings.warn( + "Inheritance class {} from web.Application " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=3, + ) + + if DEBUG: # pragma: no cover + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom web.Application.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + # MutableMapping API + + def __eq__(self, other: object) -> bool: + return self is other + + @overload # type: ignore[override] + def __getitem__(self, key: AppKey[_T]) -> _T: + ... + + @overload + def __getitem__(self, key: str) -> Any: + ... + + def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: + return self._state[key] + + def _check_frozen(self) -> None: + if self._frozen: + warnings.warn( + "Changing state of started or joined " "application is deprecated", + DeprecationWarning, + stacklevel=3, + ) + + @overload # type: ignore[override] + def __setitem__(self, key: AppKey[_T], value: _T) -> None: + ... + + @overload + def __setitem__(self, key: str, value: Any) -> None: + ... + + def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None: + self._check_frozen() + if not isinstance(key, AppKey): + warnings.warn( + "It is recommended to use web.AppKey instances for keys.\n" + + "https://docs.aiohttp.org/en/stable/web_advanced.html" + + "#application-s-config", + category=NotAppKeyWarning, + stacklevel=2, + ) + self._state[key] = value + + def __delitem__(self, key: Union[str, AppKey[_T]]) -> None: + self._check_frozen() + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: + return iter(self._state) + + @overload # type: ignore[override] + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: + ... + + @overload + def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: + ... + + @overload + def get(self, key: str, default: Any = ...) -> Any: + ... + + def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: + return self._state.get(key, default) + + ######## + @property + def loop(self) -> asyncio.AbstractEventLoop: + # Technically the loop can be None + # but we mask it by explicit type cast + # to provide more convenient type annotation + warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2) + return cast(asyncio.AbstractEventLoop, self._loop) + + def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: + if loop is None: + loop = asyncio.get_event_loop() + if self._loop is not None and self._loop is not loop: + raise RuntimeError( + "web.Application instance initialized with different loop" + ) + + self._loop = loop + + # set loop debug + if self._debug is ...: + self._debug = loop.get_debug() + + # set loop to sub applications + for subapp in self._subapps: + subapp._set_loop(loop) + + @property + def pre_frozen(self) -> bool: + return self._pre_frozen + + def pre_freeze(self) -> None: + if self._pre_frozen: + return + + self._pre_frozen = True + self._middlewares.freeze() + self._router.freeze() + self._on_response_prepare.freeze() + self._cleanup_ctx.freeze() + self._on_startup.freeze() + self._on_shutdown.freeze() + self._on_cleanup.freeze() + self._middlewares_handlers = tuple(self._prepare_middleware()) + + # If current app and any subapp do not have middlewares avoid run all + # of the code footprint that it implies, which have a middleware + # hardcoded per app that sets up the current_app attribute. If no + # middlewares are configured the handler will receive the proper + # current_app without needing all of this code. + self._run_middlewares = True if self.middlewares else False + + for subapp in self._subapps: + subapp.pre_freeze() + self._run_middlewares = self._run_middlewares or subapp._run_middlewares + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + if self._frozen: + return + + self.pre_freeze() + self._frozen = True + for subapp in self._subapps: + subapp.freeze() + + @property + def debug(self) -> bool: + warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2) + return self._debug # type: ignore[no-any-return] + + def _reg_subapp_signals(self, subapp: "Application") -> None: + def reg_handler(signame: str) -> None: + subsig = getattr(subapp, signame) + + async def handler(app: "Application") -> None: + await subsig.send(subapp) + + appsig = getattr(self, signame) + appsig.append(handler) + + reg_handler("on_startup") + reg_handler("on_shutdown") + reg_handler("on_cleanup") + + def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: + if not isinstance(prefix, str): + raise TypeError("Prefix must be str") + prefix = prefix.rstrip("/") + if not prefix: + raise ValueError("Prefix cannot be empty") + factory = partial(PrefixedSubAppResource, prefix, subapp) + return self._add_subapp(factory, subapp) + + def _add_subapp( + self, resource_factory: Callable[[], AbstractResource], subapp: "Application" + ) -> AbstractResource: + if self.frozen: + raise RuntimeError("Cannot add sub application to frozen application") + if subapp.frozen: + raise RuntimeError("Cannot add frozen application") + resource = resource_factory() + self.router.register_resource(resource) + self._reg_subapp_signals(subapp) + self._subapps.append(subapp) + subapp.pre_freeze() + if self._loop is not None: + subapp._set_loop(self._loop) + return resource + + def add_domain(self, domain: str, subapp: "Application") -> AbstractResource: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + elif "*" in domain: + rule: Domain = MaskDomain(domain) + else: + rule = Domain(domain) + factory = partial(MatchedSubAppResource, rule, subapp) + return self._add_subapp(factory, subapp) + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + return self.router.add_routes(routes) + + @property + def on_response_prepare(self) -> _RespPrepareSignal: + return self._on_response_prepare + + @property + def on_startup(self) -> _AppSignal: + return self._on_startup + + @property + def on_shutdown(self) -> _AppSignal: + return self._on_shutdown + + @property + def on_cleanup(self) -> _AppSignal: + return self._on_cleanup + + @property + def cleanup_ctx(self) -> "CleanupContext": + return self._cleanup_ctx + + @property + def router(self) -> UrlDispatcher: + return self._router + + @property + def middlewares(self) -> _Middlewares: + return self._middlewares + + def _make_handler( + self, + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + **kwargs: Any, + ) -> Server: + + if not issubclass(access_log_class, AbstractAccessLogger): + raise TypeError( + "access_log_class must be subclass of " + "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class) + ) + + self._set_loop(loop) + self.freeze() + + kwargs["debug"] = self._debug + kwargs["access_log_class"] = access_log_class + if self._handler_args: + for k, v in self._handler_args.items(): + kwargs[k] = v + + return Server( + self._handle, # type: ignore[arg-type] + request_factory=self._make_request, + loop=self._loop, + **kwargs, + ) + + def make_handler( + self, + *, + loop: Optional[asyncio.AbstractEventLoop] = None, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + **kwargs: Any, + ) -> Server: + + warnings.warn( + "Application.make_handler(...) is deprecated, " "use AppRunner API instead", + DeprecationWarning, + stacklevel=2, + ) + + return self._make_handler( + loop=loop, access_log_class=access_log_class, **kwargs + ) + + async def startup(self) -> None: + """Causes on_startup signal + + Should be called in the event loop along with the request handler. + """ + await self.on_startup.send(self) + + async def shutdown(self) -> None: + """Causes on_shutdown signal + + Should be called before cleanup() + """ + await self.on_shutdown.send(self) + + async def cleanup(self) -> None: + """Causes on_cleanup signal + + Should be called after shutdown() + """ + if self.on_cleanup.frozen: + await self.on_cleanup.send(self) + else: + # If an exception occurs in startup, ensure cleanup contexts are completed. + await self._cleanup_ctx._on_cleanup(self) + + def _make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + _cls: Type[Request] = Request, + ) -> Request: + return _cls( + message, + payload, + protocol, + writer, + task, + self._loop, + client_max_size=self._client_max_size, + ) + + def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]: + for m in reversed(self._middlewares): + if getattr(m, "__middleware_version__", None) == 1: + yield m, True + else: + warnings.warn( + 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m), + DeprecationWarning, + stacklevel=2, + ) + yield m, False + + yield _fix_request_current_app(self), True + + async def _handle(self, request: Request) -> StreamResponse: + loop = asyncio.get_event_loop() + debug = loop.get_debug() + match_info = await self._router.resolve(request) + if debug: # pragma: no cover + if not isinstance(match_info, AbstractMatchInfo): + raise TypeError( + "match_info should be AbstractMatchInfo " + "instance, not {!r}".format(match_info) + ) + match_info.add_app(self) + + match_info.freeze() + + resp = None + request._match_info = match_info + expect = request.headers.get(hdrs.EXPECT) + if expect: + resp = await match_info.expect_handler(request) + await request.writer.drain() + + if resp is None: + handler = match_info.handler + + if self._run_middlewares: + for app in match_info.apps[::-1]: + for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] + if new_style: + handler = update_wrapper( + partial(m, handler=handler), handler + ) + else: + handler = await m(app, handler) # type: ignore[arg-type,assignment] + + resp = await handler(request) + + return resp + + def __call__(self) -> "Application": + """gunicorn compatibility""" + return self + + def __repr__(self) -> str: + return f"" + + def __bool__(self) -> bool: + return True + + +class CleanupError(RuntimeError): + @property + def exceptions(self) -> List[BaseException]: + return cast(List[BaseException], self.args[1]) + + +if TYPE_CHECKING: # pragma: no cover + _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] +else: + _CleanupContextBase = FrozenList + + +class CleanupContext(_CleanupContextBase): + def __init__(self) -> None: + super().__init__() + self._exits: List[AsyncIterator[None]] = [] + + async def _on_startup(self, app: Application) -> None: + for cb in self: + it = cb(app).__aiter__() + await it.__anext__() + self._exits.append(it) + + async def _on_cleanup(self, app: Application) -> None: + errors = [] + for it in reversed(self._exits): + try: + await it.__anext__() + except StopAsyncIteration: + pass + except Exception as exc: + errors.append(exc) + else: + errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) + if errors: + if len(errors) == 1: + raise errors[0] + else: + raise CleanupError("Multiple errors on cleanup stage", errors) diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_exceptions.py b/venv/lib/python3.12/site-packages/aiohttp/web_exceptions.py new file mode 100644 index 0000000..ee2c1e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_exceptions.py @@ -0,0 +1,452 @@ +import warnings +from typing import Any, Dict, Iterable, List, Optional, Set # noqa + +from yarl import URL + +from .typedefs import LooseHeaders, StrOrURL +from .web_response import Response + +__all__ = ( + "HTTPException", + "HTTPError", + "HTTPRedirection", + "HTTPSuccessful", + "HTTPOk", + "HTTPCreated", + "HTTPAccepted", + "HTTPNonAuthoritativeInformation", + "HTTPNoContent", + "HTTPResetContent", + "HTTPPartialContent", + "HTTPMove", + "HTTPMultipleChoices", + "HTTPMovedPermanently", + "HTTPFound", + "HTTPSeeOther", + "HTTPNotModified", + "HTTPUseProxy", + "HTTPTemporaryRedirect", + "HTTPPermanentRedirect", + "HTTPClientError", + "HTTPBadRequest", + "HTTPUnauthorized", + "HTTPPaymentRequired", + "HTTPForbidden", + "HTTPNotFound", + "HTTPMethodNotAllowed", + "HTTPNotAcceptable", + "HTTPProxyAuthenticationRequired", + "HTTPRequestTimeout", + "HTTPConflict", + "HTTPGone", + "HTTPLengthRequired", + "HTTPPreconditionFailed", + "HTTPRequestEntityTooLarge", + "HTTPRequestURITooLong", + "HTTPUnsupportedMediaType", + "HTTPRequestRangeNotSatisfiable", + "HTTPExpectationFailed", + "HTTPMisdirectedRequest", + "HTTPUnprocessableEntity", + "HTTPFailedDependency", + "HTTPUpgradeRequired", + "HTTPPreconditionRequired", + "HTTPTooManyRequests", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPUnavailableForLegalReasons", + "HTTPServerError", + "HTTPInternalServerError", + "HTTPNotImplemented", + "HTTPBadGateway", + "HTTPServiceUnavailable", + "HTTPGatewayTimeout", + "HTTPVersionNotSupported", + "HTTPVariantAlsoNegotiates", + "HTTPInsufficientStorage", + "HTTPNotExtended", + "HTTPNetworkAuthenticationRequired", +) + + +class NotAppKeyWarning(UserWarning): + """Warning when not using AppKey in Application.""" + + +############################################################ +# HTTP Exceptions +############################################################ + + +class HTTPException(Response, Exception): + + # You should set in subclasses: + # status = 200 + + status_code = -1 + empty_body = False + + __http_exception__ = True + + def __init__( + self, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + if body is not None: + warnings.warn( + "body argument is deprecated for http web exceptions", + DeprecationWarning, + ) + Response.__init__( + self, + status=self.status_code, + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + Exception.__init__(self, self.reason) + if self.body is None and not self.empty_body: + self.text = f"{self.status}: {self.reason}" + + def __bool__(self) -> bool: + return True + + +class HTTPError(HTTPException): + """Base class for exceptions with status codes in the 400s and 500s.""" + + +class HTTPRedirection(HTTPException): + """Base class for exceptions with status codes in the 300s.""" + + +class HTTPSuccessful(HTTPException): + """Base class for exceptions with status codes in the 200s.""" + + +class HTTPOk(HTTPSuccessful): + status_code = 200 + + +class HTTPCreated(HTTPSuccessful): + status_code = 201 + + +class HTTPAccepted(HTTPSuccessful): + status_code = 202 + + +class HTTPNonAuthoritativeInformation(HTTPSuccessful): + status_code = 203 + + +class HTTPNoContent(HTTPSuccessful): + status_code = 204 + empty_body = True + + +class HTTPResetContent(HTTPSuccessful): + status_code = 205 + empty_body = True + + +class HTTPPartialContent(HTTPSuccessful): + status_code = 206 + + +############################################################ +# 3xx redirection +############################################################ + + +class HTTPMove(HTTPRedirection): + def __init__( + self, + location: StrOrURL, + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + if not location: + raise ValueError("HTTP redirects need a location to redirect to.") + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Location"] = str(URL(location)) + self.location = location + + +class HTTPMultipleChoices(HTTPMove): + status_code = 300 + + +class HTTPMovedPermanently(HTTPMove): + status_code = 301 + + +class HTTPFound(HTTPMove): + status_code = 302 + + +# This one is safe after a POST (the redirected location will be +# retrieved with GET): +class HTTPSeeOther(HTTPMove): + status_code = 303 + + +class HTTPNotModified(HTTPRedirection): + # FIXME: this should include a date or etag header + status_code = 304 + empty_body = True + + +class HTTPUseProxy(HTTPMove): + # Not a move, but looks a little like one + status_code = 305 + + +class HTTPTemporaryRedirect(HTTPMove): + status_code = 307 + + +class HTTPPermanentRedirect(HTTPMove): + status_code = 308 + + +############################################################ +# 4xx client error +############################################################ + + +class HTTPClientError(HTTPError): + pass + + +class HTTPBadRequest(HTTPClientError): + status_code = 400 + + +class HTTPUnauthorized(HTTPClientError): + status_code = 401 + + +class HTTPPaymentRequired(HTTPClientError): + status_code = 402 + + +class HTTPForbidden(HTTPClientError): + status_code = 403 + + +class HTTPNotFound(HTTPClientError): + status_code = 404 + + +class HTTPMethodNotAllowed(HTTPClientError): + status_code = 405 + + def __init__( + self, + method: str, + allowed_methods: Iterable[str], + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + allow = ",".join(sorted(allowed_methods)) + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self.headers["Allow"] = allow + self.allowed_methods: Set[str] = set(allowed_methods) + self.method = method.upper() + + +class HTTPNotAcceptable(HTTPClientError): + status_code = 406 + + +class HTTPProxyAuthenticationRequired(HTTPClientError): + status_code = 407 + + +class HTTPRequestTimeout(HTTPClientError): + status_code = 408 + + +class HTTPConflict(HTTPClientError): + status_code = 409 + + +class HTTPGone(HTTPClientError): + status_code = 410 + + +class HTTPLengthRequired(HTTPClientError): + status_code = 411 + + +class HTTPPreconditionFailed(HTTPClientError): + status_code = 412 + + +class HTTPRequestEntityTooLarge(HTTPClientError): + status_code = 413 + + def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None: + kwargs.setdefault( + "text", + "Maximum request body size {} exceeded, " + "actual body size {}".format(max_size, actual_size), + ) + super().__init__(**kwargs) + + +class HTTPRequestURITooLong(HTTPClientError): + status_code = 414 + + +class HTTPUnsupportedMediaType(HTTPClientError): + status_code = 415 + + +class HTTPRequestRangeNotSatisfiable(HTTPClientError): + status_code = 416 + + +class HTTPExpectationFailed(HTTPClientError): + status_code = 417 + + +class HTTPMisdirectedRequest(HTTPClientError): + status_code = 421 + + +class HTTPUnprocessableEntity(HTTPClientError): + status_code = 422 + + +class HTTPFailedDependency(HTTPClientError): + status_code = 424 + + +class HTTPUpgradeRequired(HTTPClientError): + status_code = 426 + + +class HTTPPreconditionRequired(HTTPClientError): + status_code = 428 + + +class HTTPTooManyRequests(HTTPClientError): + status_code = 429 + + +class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): + status_code = 431 + + +class HTTPUnavailableForLegalReasons(HTTPClientError): + status_code = 451 + + def __init__( + self, + link: Optional[StrOrURL], + *, + headers: Optional[LooseHeaders] = None, + reason: Optional[str] = None, + body: Any = None, + text: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + super().__init__( + headers=headers, + reason=reason, + body=body, + text=text, + content_type=content_type, + ) + self._link = None + if link: + self._link = URL(link) + self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"' + + @property + def link(self) -> Optional[URL]: + return self._link + + +############################################################ +# 5xx Server Error +############################################################ +# Response status codes beginning with the digit "5" indicate cases in +# which the server is aware that it has erred or is incapable of +# performing the request. Except when responding to a HEAD request, the +# server SHOULD include an entity containing an explanation of the error +# situation, and whether it is a temporary or permanent condition. User +# agents SHOULD display any included entity to the user. These response +# codes are applicable to any request method. + + +class HTTPServerError(HTTPError): + pass + + +class HTTPInternalServerError(HTTPServerError): + status_code = 500 + + +class HTTPNotImplemented(HTTPServerError): + status_code = 501 + + +class HTTPBadGateway(HTTPServerError): + status_code = 502 + + +class HTTPServiceUnavailable(HTTPServerError): + status_code = 503 + + +class HTTPGatewayTimeout(HTTPServerError): + status_code = 504 + + +class HTTPVersionNotSupported(HTTPServerError): + status_code = 505 + + +class HTTPVariantAlsoNegotiates(HTTPServerError): + status_code = 506 + + +class HTTPInsufficientStorage(HTTPServerError): + status_code = 507 + + +class HTTPNotExtended(HTTPServerError): + status_code = 510 + + +class HTTPNetworkAuthenticationRequired(HTTPServerError): + status_code = 511 diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_fileresponse.py b/venv/lib/python3.12/site-packages/aiohttp/web_fileresponse.py new file mode 100644 index 0000000..0f39c70 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_fileresponse.py @@ -0,0 +1,285 @@ +import asyncio +import mimetypes +import os +import pathlib +from typing import ( # noqa + IO, + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Final, + Iterator, + List, + Optional, + Tuple, + Union, + cast, +) + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ETAG_ANY, ETag, must_be_empty_body +from .typedefs import LooseHeaders, PathLike +from .web_exceptions import ( + HTTPNotModified, + HTTPPartialContent, + HTTPPreconditionFailed, + HTTPRequestRangeNotSatisfiable, +) +from .web_response import StreamResponse + +__all__ = ("FileResponse",) + +if TYPE_CHECKING: # pragma: no cover + from .web_request import BaseRequest + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] + + +NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) + + +class FileResponse(StreamResponse): + """A response object can be used to send files.""" + + def __init__( + self, + path: PathLike, + chunk_size: int = 256 * 1024, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + ) -> None: + super().__init__(status=status, reason=reason, headers=headers) + + self._path = pathlib.Path(path) + self._chunk_size = chunk_size + + async def _sendfile_fallback( + self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int + ) -> AbstractStreamWriter: + # To keep memory usage low,fobj is transferred in chunks + # controlled by the constructor's chunk_size argument. + + chunk_size = self._chunk_size + loop = asyncio.get_event_loop() + + await loop.run_in_executor(None, fobj.seek, offset) + + chunk = await loop.run_in_executor(None, fobj.read, chunk_size) + while chunk: + await writer.write(chunk) + count = count - chunk_size + if count <= 0: + break + chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count)) + + await writer.drain() + return writer + + async def _sendfile( + self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int + ) -> AbstractStreamWriter: + writer = await super().prepare(request) + assert writer is not None + + if NOSENDFILE or self.compression: + return await self._sendfile_fallback(writer, fobj, offset, count) + + loop = request._loop + transport = request.transport + assert transport is not None + + try: + await loop.sendfile(transport, fobj, offset, count) + except NotImplementedError: + return await self._sendfile_fallback(writer, fobj, offset, count) + + await super().write_eof() + return writer + + @staticmethod + def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool: + if len(etags) == 1 and etags[0].value == ETAG_ANY: + return True + return any(etag.value == etag_value for etag in etags if not etag.is_weak) + + async def _not_modified( + self, request: "BaseRequest", etag_value: str, last_modified: float + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPNotModified.status_code) + self._length_check = False + self.etag = etag_value # type: ignore[assignment] + self.last_modified = last_modified # type: ignore[assignment] + # Delete any Content-Length headers provided by user. HTTP 304 + # should always have empty response body + return await super().prepare(request) + + async def _precondition_failed( + self, request: "BaseRequest" + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPPreconditionFailed.status_code) + self.content_length = 0 + return await super().prepare(request) + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + filepath = self._path + + gzip = False + if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""): + gzip_path = filepath.with_name(filepath.name + ".gz") + + if gzip_path.is_file(): + filepath = gzip_path + gzip = True + + loop = asyncio.get_event_loop() + st: os.stat_result = await loop.run_in_executor(None, filepath.stat) + + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + last_modified = st.st_mtime + + # https://tools.ietf.org/html/rfc7232#section-6 + ifmatch = request.if_match + if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch): + return await self._precondition_failed(request) + + unmodsince = request.if_unmodified_since + if ( + unmodsince is not None + and ifmatch is None + and st.st_mtime > unmodsince.timestamp() + ): + return await self._precondition_failed(request) + + ifnonematch = request.if_none_match + if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch): + return await self._not_modified(request, etag_value, last_modified) + + modsince = request.if_modified_since + if ( + modsince is not None + and ifnonematch is None + and st.st_mtime <= modsince.timestamp() + ): + return await self._not_modified(request, etag_value, last_modified) + + if hdrs.CONTENT_TYPE not in self.headers: + ct, encoding = mimetypes.guess_type(str(filepath)) + if not ct: + ct = "application/octet-stream" + should_set_ct = True + else: + encoding = "gzip" if gzip else None + should_set_ct = False + + status = self._status + file_size = st.st_size + count = file_size + + start = None + + ifrange = request.if_range + if ifrange is None or st.st_mtime <= ifrange.timestamp(): + # If-Range header check: + # condition = cached date >= last modification date + # return 206 if True else 200. + # if False: + # Range header would not be processed, return 200 + # if True but Range header missing + # return 200 + try: + rng = request.http_range + start = rng.start + end = rng.stop + except ValueError: + # https://tools.ietf.org/html/rfc7233: + # A server generating a 416 (Range Not Satisfiable) response to + # a byte-range request SHOULD send a Content-Range header field + # with an unsatisfied-range value. + # The complete-length in a 416 response indicates the current + # length of the selected representation. + # + # Will do the same below. Many servers ignore this and do not + # send a Content-Range header with HTTP 416 + self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + # If a range request has been made, convert start, end slice + # notation into file pointer offset and count + if start is not None or end is not None: + if start < 0 and end is None: # return tail of file + start += file_size + if start < 0: + # if Range:bytes=-1000 in request header but file size + # is only 200, there would be trouble without this + start = 0 + count = file_size - start + else: + # rfc7233:If the last-byte-pos value is + # absent, or if the value is greater than or equal to + # the current length of the representation data, + # the byte range is interpreted as the remainder + # of the representation (i.e., the server replaces the + # value of last-byte-pos with a value that is one less than + # the current length of the selected representation). + count = ( + min(end if end is not None else file_size, file_size) - start + ) + + if start >= file_size: + # HTTP 416 should be returned in this case. + # + # According to https://tools.ietf.org/html/rfc7233: + # If a valid byte-range-set includes at least one + # byte-range-spec with a first-byte-pos that is less than + # the current length of the representation, or at least one + # suffix-byte-range-spec with a non-zero suffix-length, + # then the byte-range-set is satisfiable. Otherwise, the + # byte-range-set is unsatisfiable. + self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + status = HTTPPartialContent.status_code + # Even though you are sending the whole file, you should still + # return a HTTP 206 for a Range request. + self.set_status(status) + + if should_set_ct: + self.content_type = ct # type: ignore[assignment] + if encoding: + self.headers[hdrs.CONTENT_ENCODING] = encoding + if gzip: + self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + + self.etag = etag_value # type: ignore[assignment] + self.last_modified = st.st_mtime # type: ignore[assignment] + self.content_length = count + + self.headers[hdrs.ACCEPT_RANGES] = "bytes" + + real_start = cast(int, start) + + if status == HTTPPartialContent.status_code: + self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( + real_start, real_start + count - 1, file_size + ) + + # If we are sending 0 bytes calling sendfile() will throw a ValueError + if count == 0 or must_be_empty_body(request.method, self.status): + return await super().prepare(request) + + fobj = await loop.run_in_executor(None, filepath.open, "rb") + if start: # be aware that start could be None or int=0 here. + offset = start + else: + offset = 0 + + try: + return await self._sendfile(request, fobj, offset, count) + finally: + await asyncio.shield(loop.run_in_executor(None, fobj.close)) diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_log.py b/venv/lib/python3.12/site-packages/aiohttp/web_log.py new file mode 100644 index 0000000..633e9e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_log.py @@ -0,0 +1,213 @@ +import datetime +import functools +import logging +import os +import re +import time as time_mod +from collections import namedtuple +from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa + +from .abc import AbstractAccessLogger +from .web_request import BaseRequest +from .web_response import StreamResponse + +KeyMethod = namedtuple("KeyMethod", "key method") + + +class AccessLogger(AbstractAccessLogger): + """Helper object to log access. + + Usage: + log = logging.getLogger("spam") + log_format = "%a %{User-Agent}i" + access_logger = AccessLogger(log, log_format) + access_logger.log(request, response, time) + + Format: + %% The percent sign + %a Remote IP-address (IP-address of proxy if using reverse proxy) + %t Time when the request was started to process + %P The process ID of the child that serviced the request + %r First line of request + %s Response status code + %b Size of response in bytes, including HTTP headers + %T Time taken to serve the request, in seconds + %Tf Time taken to serve the request, in seconds with floating fraction + in .06f format + %D Time taken to serve the request, in microseconds + %{FOO}i request.headers['FOO'] + %{FOO}o response.headers['FOO'] + %{FOO}e os.environ['FOO'] + + """ + + LOG_FORMAT_MAP = { + "a": "remote_address", + "t": "request_start_time", + "P": "process_id", + "r": "first_request_line", + "s": "response_status", + "b": "response_size", + "T": "request_time", + "Tf": "request_time_frac", + "D": "request_time_micro", + "i": "request_header", + "o": "response_header", + } + + LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' + FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)") + CLEANUP_RE = re.compile(r"(%[^s])") + _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {} + + def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None: + """Initialise the logger. + + logger is a logger object to be used for logging. + log_format is a string with apache compatible log format description. + + """ + super().__init__(logger, log_format=log_format) + + _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) + if not _compiled_format: + _compiled_format = self.compile_format(log_format) + AccessLogger._FORMAT_CACHE[log_format] = _compiled_format + + self._log_format, self._methods = _compiled_format + + def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: + """Translate log_format into form usable by modulo formatting + + All known atoms will be replaced with %s + Also methods for formatting of those atoms will be added to + _methods in appropriate order + + For example we have log_format = "%a %t" + This format will be translated to "%s %s" + Also contents of _methods will be + [self._format_a, self._format_t] + These method will be called and results will be passed + to translated string format. + + Each _format_* method receive 'args' which is list of arguments + given to self.log + + Exceptions are _format_e, _format_i and _format_o methods which + also receive key name (by functools.partial) + + """ + # list of (key, method) tuples, we don't use an OrderedDict as users + # can repeat the same key more than once + methods = list() + + for atom in self.FORMAT_RE.findall(log_format): + if atom[1] == "": + format_key1 = self.LOG_FORMAT_MAP[atom[0]] + m = getattr(AccessLogger, "_format_%s" % atom[0]) + key_method = KeyMethod(format_key1, m) + else: + format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) + m = getattr(AccessLogger, "_format_%s" % atom[2]) + key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) + + methods.append(key_method) + + log_format = self.FORMAT_RE.sub(r"%s", log_format) + log_format = self.CLEANUP_RE.sub(r"%\1", log_format) + return log_format, methods + + @staticmethod + def _format_i( + key: str, request: BaseRequest, response: StreamResponse, time: float + ) -> str: + if request is None: + return "(no headers)" + + # suboptimal, make istr(key) once + return request.headers.get(key, "-") + + @staticmethod + def _format_o( + key: str, request: BaseRequest, response: StreamResponse, time: float + ) -> str: + # suboptimal, make istr(key) once + return response.headers.get(key, "-") + + @staticmethod + def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str: + if request is None: + return "-" + ip = request.remote + return ip if ip is not None else "-" + + @staticmethod + def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str: + tz = datetime.timezone(datetime.timedelta(seconds=-time_mod.timezone)) + now = datetime.datetime.now(tz) + start_time = now - datetime.timedelta(seconds=time) + return start_time.strftime("[%d/%b/%Y:%H:%M:%S %z]") + + @staticmethod + def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str: + return "<%s>" % os.getpid() + + @staticmethod + def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str: + if request is None: + return "-" + return "{} {} HTTP/{}.{}".format( + request.method, + request.path_qs, + request.version.major, + request.version.minor, + ) + + @staticmethod + def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int: + return response.status + + @staticmethod + def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int: + return response.body_length + + @staticmethod + def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str: + return str(round(time)) + + @staticmethod + def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str: + return "%06f" % time + + @staticmethod + def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str: + return str(round(time * 1000000)) + + def _format_line( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: + return [(key, method(request, response, time)) for key, method in self._methods] + + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + if not self.logger.isEnabledFor(logging.INFO): + # Avoid formatting the log line if it will not be emitted. + return + try: + fmt_info = self._format_line(request, response, time) + + values = list() + extra = dict() + for key, value in fmt_info: + values.append(value) + + if key.__class__ is str: + extra[key] = value + else: + k1, k2 = key # type: ignore[misc] + dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type] + dct[k2] = value # type: ignore[index,has-type] + extra[k1] = dct # type: ignore[has-type,assignment] + + self.logger.info(self._log_format % tuple(values), extra=extra) + except Exception: + self.logger.exception("Error in logging") diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_middlewares.py b/venv/lib/python3.12/site-packages/aiohttp/web_middlewares.py new file mode 100644 index 0000000..cb24eec --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_middlewares.py @@ -0,0 +1,116 @@ +import re +from typing import TYPE_CHECKING, Tuple, Type, TypeVar + +from .typedefs import Handler, Middleware +from .web_exceptions import HTTPMove, HTTPPermanentRedirect +from .web_request import Request +from .web_response import StreamResponse +from .web_urldispatcher import SystemRoute + +__all__ = ( + "middleware", + "normalize_path_middleware", +) + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + +_Func = TypeVar("_Func") + + +async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]: + alt_request = request.clone(rel_url=path) + + match_info = await request.app.router.resolve(alt_request) + alt_request._match_info = match_info + + if match_info.http_exception is None: + return True, alt_request + + return False, request + + +def middleware(f: _Func) -> _Func: + f.__middleware_version__ = 1 # type: ignore[attr-defined] + return f + + +def normalize_path_middleware( + *, + append_slash: bool = True, + remove_slash: bool = False, + merge_slashes: bool = True, + redirect_class: Type[HTTPMove] = HTTPPermanentRedirect, +) -> Middleware: + """Factory for producing a middleware that normalizes the path of a request. + + Normalizing means: + - Add or remove a trailing slash to the path. + - Double slashes are replaced by one. + + The middleware returns as soon as it finds a path that resolves + correctly. The order if both merge and append/remove are enabled is + 1) merge slashes + 2) append/remove slash + 3) both merge slashes and append/remove slash. + If the path resolves with at least one of those conditions, it will + redirect to the new path. + + Only one of `append_slash` and `remove_slash` can be enabled. If both + are `True` the factory will raise an assertion error + + If `append_slash` is `True` the middleware will append a slash when + needed. If a resource is defined with trailing slash and the request + comes without it, it will append it automatically. + + If `remove_slash` is `True`, `append_slash` must be `False`. When enabled + the middleware will remove trailing slashes and redirect if the resource + is defined + + If merge_slashes is True, merge multiple consecutive slashes in the + path into one. + """ + correct_configuration = not (append_slash and remove_slash) + assert correct_configuration, "Cannot both remove and append slash" + + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + if isinstance(request.match_info.route, SystemRoute): + paths_to_check = [] + if "?" in request.raw_path: + path, query = request.raw_path.split("?", 1) + query = "?" + query + else: + query = "" + path = request.raw_path + + if merge_slashes: + paths_to_check.append(re.sub("//+", "/", path)) + if append_slash and not request.path.endswith("/"): + paths_to_check.append(path + "/") + if remove_slash and request.path.endswith("/"): + paths_to_check.append(path[:-1]) + if merge_slashes and append_slash: + paths_to_check.append(re.sub("//+", "/", path + "/")) + if merge_slashes and remove_slash: + merged_slashes = re.sub("//+", "/", path) + paths_to_check.append(merged_slashes[:-1]) + + for path in paths_to_check: + path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg + resolves, request = await _check_request_resolves(request, path) + if resolves: + raise redirect_class(request.raw_path + query) + + return await handler(request) + + return impl + + +def _fix_request_current_app(app: "Application") -> Middleware: + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + with request.match_info.set_current_app(app): + return await handler(request) + + return impl diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_protocol.py b/venv/lib/python3.12/site-packages/aiohttp/web_protocol.py new file mode 100644 index 0000000..d0ed059 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_protocol.py @@ -0,0 +1,698 @@ +import asyncio +import asyncio.streams +import traceback +import warnings +from collections import deque +from contextlib import suppress +from html import escape as html_escape +from http import HTTPStatus +from logging import Logger +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Deque, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import attr +import yarl + +from .abc import AbstractAccessLogger, AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import ceil_timeout +from .http import ( + HttpProcessingError, + HttpRequestParser, + HttpVersion10, + RawRequestMessage, + StreamWriter, +) +from .log import access_logger, server_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .tcp_helpers import tcp_keepalive +from .web_exceptions import HTTPException +from .web_log import AccessLogger +from .web_request import BaseRequest +from .web_response import Response, StreamResponse + +__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") + +if TYPE_CHECKING: # pragma: no cover + from .web_server import Server + + +_RequestFactory = Callable[ + [ + RawRequestMessage, + StreamReader, + "RequestHandler", + AbstractStreamWriter, + "asyncio.Task[None]", + ], + BaseRequest, +] + +_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] + +ERROR = RawRequestMessage( + "UNKNOWN", + "/", + HttpVersion10, + {}, # type: ignore[arg-type] + {}, # type: ignore[arg-type] + True, + None, + False, + False, + yarl.URL("/"), +) + + +class RequestPayloadError(Exception): + """Payload parsing error.""" + + +class PayloadAccessError(Exception): + """Payload was accessed after response was sent.""" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class _ErrInfo: + status: int + exc: BaseException + message: str + + +_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader] + + +class RequestHandler(BaseProtocol): + """HTTP protocol implementation. + + RequestHandler handles incoming HTTP request. It reads request line, + request headers and request payload and calls handle_request() method. + By default it always returns with 404 response. + + RequestHandler handles errors in incoming request, like bad + status line, bad headers or incomplete payload. If any error occurs, + connection gets closed. + + keepalive_timeout -- number of seconds before closing + keep-alive connection + + tcp_keepalive -- TCP keep-alive is on, default is on + + debug -- enable debug mode + + logger -- custom logger object + + access_log_class -- custom class for access_logger + + access_log -- custom logging object + + access_log_format -- access log format string + + loop -- Optional event loop + + max_line_size -- Optional maximum header line size + + max_field_size -- Optional maximum header field size + + max_headers -- Optional maximum header size + + timeout_ceil_threshold -- Optional value to specify + threshold to ceil() timeout + values + + """ + + KEEPALIVE_RESCHEDULE_DELAY = 1 + + __slots__ = ( + "_request_count", + "_keepalive", + "_manager", + "_request_handler", + "_request_factory", + "_tcp_keepalive", + "_keepalive_time", + "_keepalive_handle", + "_keepalive_timeout", + "_lingering_time", + "_messages", + "_message_tail", + "_waiter", + "_task_handler", + "_upgrade", + "_payload_parser", + "_request_parser", + "_reading_paused", + "logger", + "debug", + "access_log", + "access_logger", + "_close", + "_force_close", + "_current_request", + "_timeout_ceil_threshold", + ) + + def __init__( + self, + manager: "Server", + *, + loop: asyncio.AbstractEventLoop, + keepalive_timeout: float = 75.0, # NGINX default is 75 secs + tcp_keepalive: bool = True, + logger: Logger = server_logger, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log: Logger = access_logger, + access_log_format: str = AccessLogger.LOG_FORMAT, + debug: bool = False, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + lingering_time: float = 10.0, + read_bufsize: int = 2**16, + auto_decompress: bool = True, + timeout_ceil_threshold: float = 5, + ): + super().__init__(loop) + + self._request_count = 0 + self._keepalive = False + self._current_request: Optional[BaseRequest] = None + self._manager: Optional[Server] = manager + self._request_handler: Optional[_RequestHandler] = manager.request_handler + self._request_factory: Optional[_RequestFactory] = manager.request_factory + + self._tcp_keepalive = tcp_keepalive + # placeholder to be replaced on keepalive timeout setup + self._keepalive_time = 0.0 + self._keepalive_handle: Optional[asyncio.Handle] = None + self._keepalive_timeout = keepalive_timeout + self._lingering_time = float(lingering_time) + + self._messages: Deque[_MsgType] = deque() + self._message_tail = b"" + + self._waiter: Optional[asyncio.Future[None]] = None + self._task_handler: Optional[asyncio.Task[None]] = None + + self._upgrade = False + self._payload_parser: Any = None + self._request_parser: Optional[HttpRequestParser] = HttpRequestParser( + self, + loop, + read_bufsize, + max_line_size=max_line_size, + max_field_size=max_field_size, + max_headers=max_headers, + payload_exception=RequestPayloadError, + auto_decompress=auto_decompress, + ) + + self._timeout_ceil_threshold: float = 5 + try: + self._timeout_ceil_threshold = float(timeout_ceil_threshold) + except (TypeError, ValueError): + pass + + self.logger = logger + self.debug = debug + self.access_log = access_log + if access_log: + self.access_logger: Optional[AbstractAccessLogger] = access_log_class( + access_log, access_log_format + ) + else: + self.access_logger = None + + self._close = False + self._force_close = False + + def __repr__(self) -> str: + return "<{} {}>".format( + self.__class__.__name__, + "connected" if self.transport is not None else "disconnected", + ) + + @property + def keepalive_timeout(self) -> float: + return self._keepalive_timeout + + async def shutdown(self, timeout: Optional[float] = 15.0) -> None: + """Do worker process exit preparations. + + We need to clean up everything and stop accepting requests. + It is especially important for keep-alive connections. + """ + self._force_close = True + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._waiter: + self._waiter.cancel() + + # wait for handlers + with suppress(asyncio.CancelledError, asyncio.TimeoutError): + async with ceil_timeout(timeout): + if self._current_request is not None: + self._current_request._cancel(asyncio.CancelledError()) + + if self._task_handler is not None and not self._task_handler.done(): + await self._task_handler + + # force-close non-idle handler + if self._task_handler is not None: + self._task_handler.cancel() + + if self.transport is not None: + self.transport.close() + self.transport = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + super().connection_made(transport) + + real_transport = cast(asyncio.Transport, transport) + if self._tcp_keepalive: + tcp_keepalive(real_transport) + + self._task_handler = self._loop.create_task(self.start()) + assert self._manager is not None + self._manager.connection_made(self, real_transport) + + def connection_lost(self, exc: Optional[BaseException]) -> None: + if self._manager is None: + return + self._manager.connection_lost(self, exc) + + super().connection_lost(exc) + + # Grab value before setting _manager to None. + handler_cancellation = self._manager.handler_cancellation + + self._manager = None + self._force_close = True + self._request_factory = None + self._request_handler = None + self._request_parser = None + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._current_request is not None: + if exc is None: + exc = ConnectionResetError("Connection lost") + self._current_request._cancel(exc) + + if self._waiter is not None: + self._waiter.cancel() + + if handler_cancellation and self._task_handler is not None: + self._task_handler.cancel() + + self._task_handler = None + + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + + def set_parser(self, parser: Any) -> None: + # Actual type is WebReader + assert self._payload_parser is None + + self._payload_parser = parser + + if self._message_tail: + self._payload_parser.feed_data(self._message_tail) + self._message_tail = b"" + + def eof_received(self) -> None: + pass + + def data_received(self, data: bytes) -> None: + if self._force_close or self._close: + return + # parse http messages + messages: Sequence[_MsgType] + if self._payload_parser is None and not self._upgrade: + assert self._request_parser is not None + try: + messages, upgraded, tail = self._request_parser.feed_data(data) + except HttpProcessingError as exc: + messages = [ + (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD) + ] + upgraded = False + tail = b"" + + for msg, payload in messages or (): + self._request_count += 1 + self._messages.append((msg, payload)) + + waiter = self._waiter + if messages and waiter is not None and not waiter.done(): + # don't set result twice + waiter.set_result(None) + + self._upgrade = upgraded + if upgraded and tail: + self._message_tail = tail + + # no parser, just store + elif self._payload_parser is None and self._upgrade and data: + self._message_tail += data + + # feed payload + elif data: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self.close() + + def keep_alive(self, val: bool) -> None: + """Set keep-alive connection mode. + + :param bool val: new state. + """ + self._keepalive = val + if self._keepalive_handle: + self._keepalive_handle.cancel() + self._keepalive_handle = None + + def close(self) -> None: + """Close connection. + + Stop accepting new pipelining messages and close + connection when handlers done processing messages. + """ + self._close = True + if self._waiter: + self._waiter.cancel() + + def force_close(self) -> None: + """Forcefully close connection.""" + self._force_close = True + if self._waiter: + self._waiter.cancel() + if self.transport is not None: + self.transport.close() + self.transport = None + + def log_access( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> None: + if self.access_logger is not None: + self.access_logger.log(request, response, self._loop.time() - time) + + def log_debug(self, *args: Any, **kw: Any) -> None: + if self.debug: + self.logger.debug(*args, **kw) + + def log_exception(self, *args: Any, **kw: Any) -> None: + self.logger.exception(*args, **kw) + + def _process_keepalive(self) -> None: + if self._force_close or not self._keepalive: + return + + next = self._keepalive_time + self._keepalive_timeout + + # handler in idle state + if self._waiter: + if self._loop.time() > next: + self.force_close() + return + + # not all request handlers are done, + # reschedule itself to next second + self._keepalive_handle = self._loop.call_later( + self.KEEPALIVE_RESCHEDULE_DELAY, + self._process_keepalive, + ) + + async def _handle_request( + self, + request: BaseRequest, + start_time: float, + request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], + ) -> Tuple[StreamResponse, bool]: + assert self._request_handler is not None + try: + try: + self._current_request = request + resp = await request_handler(request) + finally: + self._current_request = None + except HTTPException as exc: + resp = exc + reset = await self.finish_response(request, resp, start_time) + except asyncio.CancelledError: + raise + except asyncio.TimeoutError as exc: + self.log_debug("Request handler timed out.", exc_info=exc) + resp = self.handle_error(request, 504) + reset = await self.finish_response(request, resp, start_time) + except Exception as exc: + resp = self.handle_error(request, 500, exc) + reset = await self.finish_response(request, resp, start_time) + else: + # Deprecation warning (See #2415) + if getattr(resp, "__http_exception__", False): + warnings.warn( + "returning HTTPException object is deprecated " + "(#2415) and will be removed, " + "please raise the exception instead", + DeprecationWarning, + ) + + reset = await self.finish_response(request, resp, start_time) + + return resp, reset + + async def start(self) -> None: + """Process incoming request. + + It reads request line, request headers and request payload, then + calls handle_request() method. Subclass has to override + handle_request(). start() handles various exceptions in request + or response handling. Connection is being closed always unless + keep_alive(True) specified. + """ + loop = self._loop + handler = self._task_handler + assert handler is not None + manager = self._manager + assert manager is not None + keepalive_timeout = self._keepalive_timeout + resp = None + assert self._request_factory is not None + assert self._request_handler is not None + + while not self._force_close: + if not self._messages: + try: + # wait for next request + self._waiter = loop.create_future() + await self._waiter + except asyncio.CancelledError: + break + finally: + self._waiter = None + + message, payload = self._messages.popleft() + + start = loop.time() + + manager.requests_count += 1 + writer = StreamWriter(self, loop) + if isinstance(message, _ErrInfo): + # make request_factory work + request_handler = self._make_error_handler(message) + message = ERROR + else: + request_handler = self._request_handler + + request = self._request_factory(message, payload, self, writer, handler) + try: + # a new task is used for copy context vars (#3406) + task = self._loop.create_task( + self._handle_request(request, start, request_handler) + ) + try: + resp, reset = await task + except (asyncio.CancelledError, ConnectionError): + self.log_debug("Ignored premature client disconnection") + break + + # Drop the processed task from asyncio.Task.all_tasks() early + del task + if reset: + self.log_debug("Ignored premature client disconnection 2") + break + + # notify server about keep-alive + self._keepalive = bool(resp.keep_alive) + + # check payload + if not payload.is_eof(): + lingering_time = self._lingering_time + if not self._force_close and lingering_time: + self.log_debug( + "Start lingering close timer for %s sec.", lingering_time + ) + + now = loop.time() + end_t = now + lingering_time + + with suppress(asyncio.TimeoutError, asyncio.CancelledError): + while not payload.is_eof() and now < end_t: + async with ceil_timeout(end_t - now): + # read and ignore + await payload.readany() + now = loop.time() + + # if payload still uncompleted + if not payload.is_eof() and not self._force_close: + self.log_debug("Uncompleted request.") + self.close() + + payload.set_exception(PayloadAccessError()) + + except asyncio.CancelledError: + self.log_debug("Ignored premature client disconnection ") + break + except RuntimeError as exc: + if self.debug: + self.log_exception("Unhandled runtime exception", exc_info=exc) + self.force_close() + except Exception as exc: + self.log_exception("Unhandled exception", exc_info=exc) + self.force_close() + finally: + if self.transport is None and resp is not None: + self.log_debug("Ignored premature client disconnection.") + elif not self._force_close: + if self._keepalive and not self._close: + # start keep-alive timer + if keepalive_timeout is not None: + now = self._loop.time() + self._keepalive_time = now + if self._keepalive_handle is None: + self._keepalive_handle = loop.call_at( + now + keepalive_timeout, self._process_keepalive + ) + else: + break + + # remove handler, close transport if no handlers left + if not self._force_close: + self._task_handler = None + if self.transport is not None: + self.transport.close() + + async def finish_response( + self, request: BaseRequest, resp: StreamResponse, start_time: float + ) -> bool: + """Prepare the response and write_eof, then log access. + + This has to + be called within the context of any exception so the access logger + can get exception information. Returns True if the client disconnects + prematurely. + """ + if self._request_parser is not None: + self._request_parser.set_upgraded(False) + self._upgrade = False + if self._message_tail: + self._request_parser.feed_data(self._message_tail) + self._message_tail = b"" + try: + prepare_meth = resp.prepare + except AttributeError: + if resp is None: + raise RuntimeError("Missing return " "statement on request handler") + else: + raise RuntimeError( + "Web-handler should return " + "a response instance, " + "got {!r}".format(resp) + ) + try: + await prepare_meth(request) + await resp.write_eof() + except ConnectionError: + self.log_access(request, resp, start_time) + return True + else: + self.log_access(request, resp, start_time) + return False + + def handle_error( + self, + request: BaseRequest, + status: int = 500, + exc: Optional[BaseException] = None, + message: Optional[str] = None, + ) -> StreamResponse: + """Handle errors. + + Returns HTTP response with specific status code. Logs additional + information. It always closes current connection. + """ + self.log_exception("Error handling request", exc_info=exc) + + # some data already got sent, connection is broken + if request.writer.output_size > 0: + raise ConnectionError( + "Response is sent already, cannot send another response " + "with the error message" + ) + + ct = "text/plain" + if status == HTTPStatus.INTERNAL_SERVER_ERROR: + title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) + msg = HTTPStatus.INTERNAL_SERVER_ERROR.description + tb = None + if self.debug: + with suppress(Exception): + tb = traceback.format_exc() + + if "text/html" in request.headers.get("Accept", ""): + if tb: + tb = html_escape(tb) + msg = f"

Traceback:

\n
{tb}
" + message = ( + "" + "{title}" + "\n

{title}

" + "\n{msg}\n\n" + ).format(title=title, msg=msg) + ct = "text/html" + else: + if tb: + msg = tb + message = title + "\n\n" + msg + + resp = Response(status=status, text=message, content_type=ct) + resp.force_close() + + return resp + + def _make_error_handler( + self, err_info: _ErrInfo + ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]: + async def handler(request: BaseRequest) -> StreamResponse: + return self.handle_error( + request, err_info.status, err_info.exc, err_info.message + ) + + return handler diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_request.py b/venv/lib/python3.12/site-packages/aiohttp/web_request.py new file mode 100644 index 0000000..a7e32ca --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_request.py @@ -0,0 +1,898 @@ +import asyncio +import datetime +import io +import re +import socket +import string +import tempfile +import types +import warnings +from http.cookies import SimpleCookie +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Final, + Iterator, + Mapping, + MutableMapping, + Optional, + Pattern, + Tuple, + Union, + cast, +) +from urllib.parse import parse_qsl + +import attr +from multidict import ( + CIMultiDict, + CIMultiDictProxy, + MultiDict, + MultiDictProxy, + MultiMapping, +) +from yarl import URL + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + _SENTINEL, + DEBUG, + ETAG_ANY, + LIST_QUOTED_ETAG_RE, + ChainMapProxy, + ETag, + HeadersMixin, + parse_http_date, + reify, + sentinel, +) +from .http_parser import RawRequestMessage +from .http_writer import HttpVersion +from .multipart import BodyPartReader, MultipartReader +from .streams import EmptyStreamReader, StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseHeaders, + RawHeaders, + StrOrURL, +) +from .web_exceptions import HTTPRequestEntityTooLarge +from .web_response import StreamResponse + +__all__ = ("BaseRequest", "FileField", "Request") + + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + from .web_protocol import RequestHandler + from .web_urldispatcher import UrlMappingMatchInfo + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class FileField: + name: str + filename: str + file: io.BufferedReader + content_type: str + headers: "CIMultiDictProxy[str]" + + +_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" +# '-' at the end to prevent interpretation as range in a char class + +_TOKEN: Final[str] = rf"[{_TCHAR}]+" + +_QDTEXT: Final[str] = r"[{}]".format( + r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) +) +# qdtext includes 0x5C to escape 0x5D ('\]') +# qdtext excludes obs-text (because obsoleted, and encoding not specified) + +_QUOTED_PAIR: Final[str] = r"\\[\t !-~]" + +_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( + qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR +) + +_FORWARDED_PAIR: Final[ + str +] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( + token=_TOKEN, quoted_string=_QUOTED_STRING +) + +_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") +# same pattern as _QUOTED_PAIR but contains a capture group + +_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR) + +############################################################ +# HTTP Request +############################################################ + + +class BaseRequest(MutableMapping[str, Any], HeadersMixin): + + POST_METHODS = { + hdrs.METH_PATCH, + hdrs.METH_POST, + hdrs.METH_PUT, + hdrs.METH_TRACE, + hdrs.METH_DELETE, + } + + ATTRS = HeadersMixin.ATTRS | frozenset( + [ + "_message", + "_protocol", + "_payload_writer", + "_payload", + "_headers", + "_method", + "_version", + "_rel_url", + "_post", + "_read_bytes", + "_state", + "_cache", + "_task", + "_client_max_size", + "_loop", + "_transport_sslcontext", + "_transport_peername", + ] + ) + + def __init__( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: "RequestHandler", + payload_writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + loop: asyncio.AbstractEventLoop, + *, + client_max_size: int = 1024**2, + state: Optional[Dict[str, Any]] = None, + scheme: Optional[str] = None, + host: Optional[str] = None, + remote: Optional[str] = None, + ) -> None: + if state is None: + state = {} + self._message = message + self._protocol = protocol + self._payload_writer = payload_writer + + self._payload = payload + self._headers = message.headers + self._method = message.method + self._version = message.version + self._cache: Dict[str, Any] = {} + url = message.url + if url.is_absolute(): + # absolute URL is given, + # override auto-calculating url, host, and scheme + # all other properties should be good + self._cache["url"] = url + self._cache["host"] = url.host + self._cache["scheme"] = url.scheme + self._rel_url = url.relative() + else: + self._rel_url = message.url + self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None + self._read_bytes: Optional[bytes] = None + + self._state = state + self._task = task + self._client_max_size = client_max_size + self._loop = loop + + transport = self._protocol.transport + assert transport is not None + self._transport_sslcontext = transport.get_extra_info("sslcontext") + self._transport_peername = transport.get_extra_info("peername") + + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host + if remote is not None: + self._cache["remote"] = remote + + def clone( + self, + *, + method: Union[str, _SENTINEL] = sentinel, + rel_url: Union[StrOrURL, _SENTINEL] = sentinel, + headers: Union[LooseHeaders, _SENTINEL] = sentinel, + scheme: Union[str, _SENTINEL] = sentinel, + host: Union[str, _SENTINEL] = sentinel, + remote: Union[str, _SENTINEL] = sentinel, + client_max_size: Union[int, _SENTINEL] = sentinel, + ) -> "BaseRequest": + """Clone itself with replacement some attributes. + + Creates and returns a new instance of Request object. If no parameters + are given, an exact copy is returned. If a parameter is not passed, it + will reuse the one from the current request object. + """ + if self._read_bytes: + raise RuntimeError("Cannot clone request " "after reading its content") + + dct: Dict[str, Any] = {} + if method is not sentinel: + dct["method"] = method + if rel_url is not sentinel: + new_url: URL = URL(rel_url) + dct["url"] = new_url + dct["path"] = str(new_url) + if headers is not sentinel: + # a copy semantic + dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) + dct["raw_headers"] = tuple( + (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + ) + + message = self._message._replace(**dct) + + kwargs = {} + if scheme is not sentinel: + kwargs["scheme"] = scheme + if host is not sentinel: + kwargs["host"] = host + if remote is not sentinel: + kwargs["remote"] = remote + if client_max_size is sentinel: + client_max_size = self._client_max_size + + return self.__class__( + message, + self._payload, + self._protocol, + self._payload_writer, + self._task, + self._loop, + client_max_size=client_max_size, + state=self._state.copy(), + **kwargs, + ) + + @property + def task(self) -> "asyncio.Task[None]": + return self._task + + @property + def protocol(self) -> "RequestHandler": + return self._protocol + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def writer(self) -> AbstractStreamWriter: + return self._payload_writer + + @property + def client_max_size(self) -> int: + return self._client_max_size + + @reify + def message(self) -> RawRequestMessage: + warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3) + return self._message + + @reify + def rel_url(self) -> URL: + return self._rel_url + + @reify + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "request.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + # MutableMapping API + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + + @reify + def secure(self) -> bool: + """A bool indicating if the request is handled with SSL.""" + return self.scheme == "https" + + @reify + def forwarded(self) -> Tuple[Mapping[str, str], ...]: + """A tuple containing all parsed Forwarded header(s). + + Makes an effort to parse Forwarded headers as specified by RFC 7239: + + - It adds one (immutable) dictionary per Forwarded 'field-value', ie + per proxy. The element corresponds to the data in the Forwarded + field-value added by the first proxy encountered by the client. Each + subsequent item corresponds to those added by later proxies. + - It checks that every value has valid syntax in general as specified + in section 4: either a 'token' or a 'quoted-string'. + - It un-escapes found escape sequences. + - It does NOT validate 'by' and 'for' contents as specified in section + 6. + - It does NOT validate 'host' contents (Host ABNF). + - It does NOT validate 'proto' contents for valid URI scheme names. + + Returns a tuple containing one or more immutable dicts + """ + elems = [] + for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): + length = len(field_value) + pos = 0 + need_separator = False + elem: Dict[str, str] = {} + elems.append(types.MappingProxyType(elem)) + while 0 <= pos < length: + match = _FORWARDED_PAIR_RE.match(field_value, pos) + if match is not None: # got a valid forwarded-pair + if need_separator: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + else: + name, value, port = match.groups() + if value[0] == '"': + # quoted string: remove quotes and unescape + value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1]) + if port: + value += port + elem[name.lower()] = value + pos += len(match.group(0)) + need_separator = True + elif field_value[pos] == ",": # next forwarded-element + need_separator = False + elem = {} + elems.append(types.MappingProxyType(elem)) + pos += 1 + elif field_value[pos] == ";": # next forwarded-pair + need_separator = False + pos += 1 + elif field_value[pos] in " \t": + # Allow whitespace even between forwarded-pairs, though + # RFC 7239 doesn't. This simplifies code and is in line + # with Postel's law. + pos += 1 + else: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + return tuple(elems) + + @reify + def scheme(self) -> str: + """A string representing the scheme of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(scheme=new_scheme) call. + - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. + + 'http' or 'https'. + """ + if self._transport_sslcontext: + return "https" + else: + return "http" + + @reify + def method(self) -> str: + """Read only property for getting HTTP method. + + The value is upper-cased str like 'GET', 'POST', 'PUT' etc. + """ + return self._method + + @reify + def version(self) -> HttpVersion: + """Read only property for getting HTTP version of request. + + Returns aiohttp.protocol.HttpVersion instance. + """ + return self._version + + @reify + def host(self) -> str: + """Hostname of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(host=new_host) call. + - HOST HTTP header + - socket.getfqdn() value + """ + host = self._message.headers.get(hdrs.HOST) + if host is not None: + return host + return socket.getfqdn() + + @reify + def remote(self) -> Optional[str]: + """Remote IP of client initiated HTTP request. + + The IP is resolved in this order: + + - overridden value by .clone(remote=new_remote) call. + - peername of opened socket + """ + if self._transport_peername is None: + return None + if isinstance(self._transport_peername, (list, tuple)): + return str(self._transport_peername[0]) + return str(self._transport_peername) + + @reify + def url(self) -> URL: + url = URL.build(scheme=self.scheme, host=self.host) + return url.join(self._rel_url) + + @reify + def path(self) -> str: + """The URL including *PATH INFO* without the host or scheme. + + E.g., ``/app/blog`` + """ + return self._rel_url.path + + @reify + def path_qs(self) -> str: + """The URL including PATH_INFO and the query string. + + E.g, /app/blog?id=10 + """ + return str(self._rel_url) + + @reify + def raw_path(self) -> str: + """The URL including raw *PATH INFO* without the host or scheme. + + Warning, the path is unquoted and may contains non valid URL characters + + E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` + """ + return self._message.path + + @reify + def query(self) -> "MultiMapping[str]": + """A multidict with all the variables in the query string.""" + return MultiDictProxy(self._rel_url.query) + + @reify + def query_string(self) -> str: + """The query string in the URL. + + E.g., id=10 + """ + return self._rel_url.query_string + + @reify + def headers(self) -> "MultiMapping[str]": + """A case-insensitive multidict proxy with all headers.""" + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + """A sequence of pairs for all headers.""" + return self._message.raw_headers + + @reify + def if_modified_since(self) -> Optional[datetime.datetime]: + """The value of If-Modified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) + + @reify + def if_unmodified_since(self) -> Optional[datetime.datetime]: + """The value of If-Unmodified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) + + @staticmethod + def _etag_values(etag_header: str) -> Iterator[ETag]: + """Extract `ETag` objects from raw header.""" + if etag_header == ETAG_ANY: + yield ETag( + is_weak=False, + value=ETAG_ANY, + ) + else: + for match in LIST_QUOTED_ETAG_RE.finditer(etag_header): + is_weak, value, garbage = match.group(2, 3, 4) + # Any symbol captured by 4th group means + # that the following sequence is invalid. + if garbage: + break + + yield ETag( + is_weak=bool(is_weak), + value=value, + ) + + @classmethod + def _if_match_or_none_impl( + cls, header_value: Optional[str] + ) -> Optional[Tuple[ETag, ...]]: + if not header_value: + return None + + return tuple(cls._etag_values(header_value)) + + @reify + def if_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) + + @reify + def if_none_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-None-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) + + @reify + def if_range(self) -> Optional[datetime.datetime]: + """The value of If-Range HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_RANGE)) + + @reify + def keep_alive(self) -> bool: + """Is keepalive enabled by client?""" + return not self._message.should_close + + @reify + def cookies(self) -> Mapping[str, str]: + """Return request cookies. + + A read-only dictionary-like object. + """ + raw = self.headers.get(hdrs.COOKIE, "") + parsed = SimpleCookie(raw) + return MappingProxyType({key: val.value for key, val in parsed.items()}) + + @reify + def http_range(self) -> slice: + """The content of Range HTTP header. + + Return a slice instance. + + """ + rng = self._headers.get(hdrs.RANGE) + start, end = None, None + if rng is not None: + try: + pattern = r"^bytes=(\d*)-(\d*)$" + start, end = re.findall(pattern, rng)[0] + except IndexError: # pattern was not found in header + raise ValueError("range not in acceptable format") + + end = int(end) if end else None + start = int(start) if start else None + + if start is None and end is not None: + # end with no start is to return tail of content + start = -end + end = None + + if start is not None and end is not None: + # end is inclusive in range header, exclusive for slice + end += 1 + + if start >= end: + raise ValueError("start cannot be after end") + + if start is end is None: # No valid range supplied + raise ValueError("No start or end of range specified") + + return slice(start, end, 1) + + @reify + def content(self) -> StreamReader: + """Return raw payload stream.""" + return self._payload + + @property + def has_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + warnings.warn( + "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2 + ) + return not self._payload.at_eof() + + @property + def can_read_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + return not self._payload.at_eof() + + @reify + def body_exists(self) -> bool: + """Return True if request has HTTP BODY, False otherwise.""" + return type(self._payload) is not EmptyStreamReader + + async def release(self) -> None: + """Release request. + + Eat unread part of HTTP BODY if present. + """ + while not self._payload.at_eof(): + await self._payload.readany() + + async def read(self) -> bytes: + """Read request body if present. + + Returns bytes object with full request content. + """ + if self._read_bytes is None: + body = bytearray() + while True: + chunk = await self._payload.readany() + body.extend(chunk) + if self._client_max_size: + body_size = len(body) + if body_size >= self._client_max_size: + raise HTTPRequestEntityTooLarge( + max_size=self._client_max_size, actual_size=body_size + ) + if not chunk: + break + self._read_bytes = bytes(body) + return self._read_bytes + + async def text(self) -> str: + """Return BODY as text using encoding from .charset.""" + bytes_body = await self.read() + encoding = self.charset or "utf-8" + return bytes_body.decode(encoding) + + async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any: + """Return BODY as JSON.""" + body = await self.text() + return loads(body) + + async def multipart(self) -> MultipartReader: + """Return async iterator to process BODY as multipart.""" + return MultipartReader(self._headers, self._payload) + + async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": + """Return POST parameters.""" + if self._post is not None: + return self._post + if self._method not in self.POST_METHODS: + self._post = MultiDictProxy(MultiDict()) + return self._post + + content_type = self.content_type + if content_type not in ( + "", + "application/x-www-form-urlencoded", + "multipart/form-data", + ): + self._post = MultiDictProxy(MultiDict()) + return self._post + + out: MultiDict[Union[str, bytes, FileField]] = MultiDict() + + if content_type == "multipart/form-data": + multipart = await self.multipart() + max_size = self._client_max_size + + field = await multipart.next() + while field is not None: + size = 0 + field_ct = field.headers.get(hdrs.CONTENT_TYPE) + + if isinstance(field, BodyPartReader): + assert field.name is not None + + # Note that according to RFC 7578, the Content-Type header + # is optional, even for files, so we can't assume it's + # present. + # https://tools.ietf.org/html/rfc7578#section-4.4 + if field.filename: + # store file in temp file + tmp = tempfile.TemporaryFile() + chunk = await field.read_chunk(size=2**16) + while chunk: + chunk = field.decode(chunk) + tmp.write(chunk) + size += len(chunk) + if 0 < max_size < size: + tmp.close() + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + chunk = await field.read_chunk(size=2**16) + tmp.seek(0) + + if field_ct is None: + field_ct = "application/octet-stream" + + ff = FileField( + field.name, + field.filename, + cast(io.BufferedReader, tmp), + field_ct, + field.headers, + ) + out.add(field.name, ff) + else: + # deal with ordinary data + value = await field.read(decode=True) + if field_ct is None or field_ct.startswith("text/"): + charset = field.get_charset(default="utf-8") + out.add(field.name, value.decode(charset)) + else: + out.add(field.name, value) + size += len(value) + if 0 < max_size < size: + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + else: + raise ValueError( + "To decode nested multipart you need " "to use custom reader", + ) + + field = await multipart.next() + else: + data = await self.read() + if data: + charset = self.charset or "utf-8" + out.extend( + parse_qsl( + data.rstrip().decode(charset), + keep_blank_values=True, + encoding=charset, + ) + ) + + self._post = MultiDictProxy(out) + return self._post + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Extra info from protocol transport""" + protocol = self._protocol + if protocol is None: + return default + + transport = protocol.transport + if transport is None: + return default + + return transport.get_extra_info(name, default) + + def __repr__(self) -> str: + ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( + "ascii" + ) + return "<{} {} {} >".format( + self.__class__.__name__, self._method, ascii_encodable_path + ) + + def __eq__(self, other: object) -> bool: + return id(self) == id(other) + + def __bool__(self) -> bool: + return True + + async def _prepare_hook(self, response: StreamResponse) -> None: + return + + def _cancel(self, exc: BaseException) -> None: + self._payload.set_exception(exc) + + +class Request(BaseRequest): + + ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # matchdict, route_name, handler + # or information about traversal lookup + + # initialized after route resolving + self._match_info: Optional[UrlMappingMatchInfo] = None + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom {}.{} attribute " + "is discouraged".format(self.__class__.__name__, name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def clone( + self, + *, + method: Union[str, _SENTINEL] = sentinel, + rel_url: Union[StrOrURL, _SENTINEL] = sentinel, + headers: Union[LooseHeaders, _SENTINEL] = sentinel, + scheme: Union[str, _SENTINEL] = sentinel, + host: Union[str, _SENTINEL] = sentinel, + remote: Union[str, _SENTINEL] = sentinel, + client_max_size: Union[int, _SENTINEL] = sentinel, + ) -> "Request": + ret = super().clone( + method=method, + rel_url=rel_url, + headers=headers, + scheme=scheme, + host=host, + remote=remote, + client_max_size=client_max_size, + ) + new_ret = cast(Request, ret) + new_ret._match_info = self._match_info + return new_ret + + @reify + def match_info(self) -> "UrlMappingMatchInfo": + """Result of route resolving.""" + match_info = self._match_info + assert match_info is not None + return match_info + + @property + def app(self) -> "Application": + """Application instance.""" + match_info = self._match_info + assert match_info is not None + return match_info.current_app + + @property + def config_dict(self) -> ChainMapProxy: + match_info = self._match_info + assert match_info is not None + lst = match_info.apps + app = self.app + idx = lst.index(app) + sublist = list(reversed(lst[: idx + 1])) + return ChainMapProxy(sublist) + + async def _prepare_hook(self, response: StreamResponse) -> None: + match_info = self._match_info + if match_info is None: + return + for app in match_info._apps: + await app.on_response_prepare.send(self, response) diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_response.py b/venv/lib/python3.12/site-packages/aiohttp/web_response.py new file mode 100644 index 0000000..e089c60 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_response.py @@ -0,0 +1,817 @@ +import asyncio +import collections.abc +import datetime +import enum +import json +import math +import time +import warnings +from concurrent.futures import Executor +from http import HTTPStatus +from http.cookies import SimpleCookie +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + MutableMapping, + Optional, + Union, + cast, +) + +from multidict import CIMultiDict, istr + +from . import hdrs, payload +from .abc import AbstractStreamWriter +from .compression_utils import ZLibCompressor +from .helpers import ( + ETAG_ANY, + QUOTED_ETAG_RE, + ETag, + HeadersMixin, + must_be_empty_body, + parse_http_date, + rfc822_formatted_time, + sentinel, + should_remove_content_length, + validate_etag_value, +) +from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11 +from .payload import Payload +from .typedefs import JSONEncoder, LooseHeaders + +__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") + + +if TYPE_CHECKING: # pragma: no cover + from .web_request import BaseRequest + + BaseClass = MutableMapping[str, Any] +else: + BaseClass = collections.abc.MutableMapping + + +class ContentCoding(enum.Enum): + # The content codings that we have support for. + # + # Additional registered codings are listed at: + # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding + deflate = "deflate" + gzip = "gzip" + identity = "identity" + + +############################################################ +# HTTP Response classes +############################################################ + + +class StreamResponse(BaseClass, HeadersMixin): + + _length_check = True + + def __init__( + self, + *, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + ) -> None: + self._body = None + self._keep_alive: Optional[bool] = None + self._chunked = False + self._compression = False + self._compression_force: Optional[ContentCoding] = None + self._cookies = SimpleCookie() + + self._req: Optional[BaseRequest] = None + self._payload_writer: Optional[AbstractStreamWriter] = None + self._eof_sent = False + self._must_be_empty_body: Optional[bool] = None + self._body_length = 0 + self._state: Dict[str, Any] = {} + + if headers is not None: + self._headers: CIMultiDict[str] = CIMultiDict(headers) + else: + self._headers = CIMultiDict() + + self.set_status(status, reason) + + @property + def prepared(self) -> bool: + return self._payload_writer is not None + + @property + def task(self) -> "Optional[asyncio.Task[None]]": + if self._req: + return self._req.task + else: + return None + + @property + def status(self) -> int: + return self._status + + @property + def chunked(self) -> bool: + return self._chunked + + @property + def compression(self) -> bool: + return self._compression + + @property + def reason(self) -> str: + return self._reason + + def set_status( + self, + status: int, + reason: Optional[str] = None, + ) -> None: + assert not self.prepared, ( + "Cannot change the response status code after " "the headers have been sent" + ) + self._status = int(status) + if reason is None: + try: + reason = HTTPStatus(self._status).phrase + except ValueError: + reason = "" + self._reason = reason + + @property + def keep_alive(self) -> Optional[bool]: + return self._keep_alive + + def force_close(self) -> None: + self._keep_alive = False + + @property + def body_length(self) -> int: + return self._body_length + + @property + def output_length(self) -> int: + warnings.warn("output_length is deprecated", DeprecationWarning) + assert self._payload_writer + return self._payload_writer.buffer_size + + def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: + """Enables automatic chunked transfer encoding.""" + self._chunked = True + + if hdrs.CONTENT_LENGTH in self._headers: + raise RuntimeError( + "You can't enable chunked encoding when " "a content length is set" + ) + if chunk_size is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + def enable_compression( + self, force: Optional[Union[bool, ContentCoding]] = None + ) -> None: + """Enables response compression encoding.""" + # Backwards compatibility for when force was a bool <0.17. + if type(force) == bool: + force = ContentCoding.deflate if force else ContentCoding.identity + warnings.warn( + "Using boolean for force is deprecated #3318", DeprecationWarning + ) + elif force is not None: + assert isinstance(force, ContentCoding), ( + "force should one of " "None, bool or " "ContentEncoding" + ) + + self._compression = True + self._compression_force = force + + @property + def headers(self) -> "CIMultiDict[str]": + return self._headers + + @property + def cookies(self) -> SimpleCookie: + return self._cookies + + def set_cookie( + self, + name: str, + value: str, + *, + expires: Optional[str] = None, + domain: Optional[str] = None, + max_age: Optional[Union[int, str]] = None, + path: str = "/", + secure: Optional[bool] = None, + httponly: Optional[bool] = None, + version: Optional[str] = None, + samesite: Optional[str] = None, + ) -> None: + """Set or update response cookie. + + Sets new cookie or updates existent with new value. + Also updates only those params which are not None. + """ + old = self._cookies.get(name) + if old is not None and old.coded_value == "": + # deleted cookie + self._cookies.pop(name, None) + + self._cookies[name] = value + c = self._cookies[name] + + if expires is not None: + c["expires"] = expires + elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT": + del c["expires"] + + if domain is not None: + c["domain"] = domain + + if max_age is not None: + c["max-age"] = str(max_age) + elif "max-age" in c: + del c["max-age"] + + c["path"] = path + + if secure is not None: + c["secure"] = secure + if httponly is not None: + c["httponly"] = httponly + if version is not None: + c["version"] = version + if samesite is not None: + c["samesite"] = samesite + + def del_cookie( + self, name: str, *, domain: Optional[str] = None, path: str = "/" + ) -> None: + """Delete cookie. + + Creates new empty expired cookie. + """ + # TODO: do we need domain/path here? + self._cookies.pop(name, None) + self.set_cookie( + name, + "", + max_age=0, + expires="Thu, 01 Jan 1970 00:00:00 GMT", + domain=domain, + path=path, + ) + + @property + def content_length(self) -> Optional[int]: + # Just a placeholder for adding setter + return super().content_length + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + if value is not None: + value = int(value) + if self._chunked: + raise RuntimeError( + "You can't set content length when " "chunked encoding is enable" + ) + self._headers[hdrs.CONTENT_LENGTH] = str(value) + else: + self._headers.pop(hdrs.CONTENT_LENGTH, None) + + @property + def content_type(self) -> str: + # Just a placeholder for adding setter + return super().content_type + + @content_type.setter + def content_type(self, value: str) -> None: + self.content_type # read header values if needed + self._content_type = str(value) + self._generate_content_type_header() + + @property + def charset(self) -> Optional[str]: + # Just a placeholder for adding setter + return super().charset + + @charset.setter + def charset(self, value: Optional[str]) -> None: + ctype = self.content_type # read header values if needed + if ctype == "application/octet-stream": + raise RuntimeError( + "Setting charset for application/octet-stream " + "doesn't make sense, setup content_type first" + ) + assert self._content_dict is not None + if value is None: + self._content_dict.pop("charset", None) + else: + self._content_dict["charset"] = str(value).lower() + self._generate_content_type_header() + + @property + def last_modified(self) -> Optional[datetime.datetime]: + """The value of Last-Modified HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED)) + + @last_modified.setter + def last_modified( + self, value: Optional[Union[int, float, datetime.datetime, str]] + ) -> None: + if value is None: + self._headers.pop(hdrs.LAST_MODIFIED, None) + elif isinstance(value, (int, float)): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)) + ) + elif isinstance(value, datetime.datetime): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple() + ) + elif isinstance(value, str): + self._headers[hdrs.LAST_MODIFIED] = value + + @property + def etag(self) -> Optional[ETag]: + quoted_value = self._headers.get(hdrs.ETAG) + if not quoted_value: + return None + elif quoted_value == ETAG_ANY: + return ETag(value=ETAG_ANY) + match = QUOTED_ETAG_RE.fullmatch(quoted_value) + if not match: + return None + is_weak, value = match.group(1, 2) + return ETag( + is_weak=bool(is_weak), + value=value, + ) + + @etag.setter + def etag(self, value: Optional[Union[ETag, str]]) -> None: + if value is None: + self._headers.pop(hdrs.ETAG, None) + elif (isinstance(value, str) and value == ETAG_ANY) or ( + isinstance(value, ETag) and value.value == ETAG_ANY + ): + self._headers[hdrs.ETAG] = ETAG_ANY + elif isinstance(value, str): + validate_etag_value(value) + self._headers[hdrs.ETAG] = f'"{value}"' + elif isinstance(value, ETag) and isinstance(value.value, str): + validate_etag_value(value.value) + hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"' + self._headers[hdrs.ETAG] = hdr_value + else: + raise ValueError( + f"Unsupported etag type: {type(value)}. " + f"etag must be str, ETag or None" + ) + + def _generate_content_type_header( + self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE + ) -> None: + assert self._content_dict is not None + assert self._content_type is not None + params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items()) + if params: + ctype = self._content_type + "; " + params + else: + ctype = self._content_type + self._headers[CONTENT_TYPE] = ctype + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if coding != ContentCoding.identity: + assert self._payload_writer is not None + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._payload_writer.enable_compression(coding.value) + # Compressed payload may have different content length, + # remove the header + self._headers.popall(hdrs.CONTENT_LENGTH, None) + + async def _start_compression(self, request: "BaseRequest") -> None: + if self._compression_force: + await self._do_start_compression(self._compression_force) + else: + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + for coding in ContentCoding: + if coding.value in accept_encoding: + await self._do_start_compression(coding) + return + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + if self._eof_sent: + return None + if self._payload_writer is not None: + return self._payload_writer + self._must_be_empty_body = must_be_empty_body(request.method, self.status) + return await self._start(request) + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + self._req = request + writer = self._payload_writer = request._payload_writer + + await self._prepare_headers() + await request._prepare_hook(self) + await self._write_headers() + + return writer + + async def _prepare_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + keep_alive = self._keep_alive + if keep_alive is None: + keep_alive = request.keep_alive + self._keep_alive = keep_alive + + version = request.version + + headers = self._headers + for cookie in self._cookies.values(): + value = cookie.output(header="")[1:] + headers.add(hdrs.SET_COOKIE, value) + + if self._compression: + await self._start_compression(request) + + if self._chunked: + if version != HttpVersion11: + raise RuntimeError( + "Using chunked encoding is forbidden " + "for HTTP/{0.major}.{0.minor}".format(request.version) + ) + if not self._must_be_empty_body: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + if hdrs.CONTENT_LENGTH in headers: + del headers[hdrs.CONTENT_LENGTH] + elif self._length_check: + writer.length = self.content_length + if writer.length is None: + if version >= HttpVersion11: + if not self._must_be_empty_body: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + elif not self._must_be_empty_body: + keep_alive = False + + # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2 + # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4 + if self._must_be_empty_body: + if hdrs.CONTENT_LENGTH in headers and should_remove_content_length( + request.method, self.status + ): + del headers[hdrs.CONTENT_LENGTH] + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10 + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 + if hdrs.TRANSFER_ENCODING in headers: + del headers[hdrs.TRANSFER_ENCODING] + else: + headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") + headers.setdefault(hdrs.DATE, rfc822_formatted_time()) + headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) + + # connection header + if hdrs.CONNECTION not in headers: + if keep_alive: + if version == HttpVersion10: + headers[hdrs.CONNECTION] = "keep-alive" + else: + if version == HttpVersion11: + headers[hdrs.CONNECTION] = "close" + + async def _write_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + # status line + version = request.version + status_line = "HTTP/{}.{} {} {}".format( + version[0], version[1], self._status, self._reason + ) + await writer.write_headers(status_line, self._headers) + + async def write(self, data: bytes) -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + raise RuntimeError("Cannot call write() after write_eof()") + if self._payload_writer is None: + raise RuntimeError("Cannot call write() before prepare()") + + await self._payload_writer.write(data) + + async def drain(self) -> None: + assert not self._eof_sent, "EOF has already been sent" + assert self._payload_writer is not None, "Response has not been started" + warnings.warn( + "drain method is deprecated, use await resp.write()", + DeprecationWarning, + stacklevel=2, + ) + await self._payload_writer.drain() + + async def write_eof(self, data: bytes = b"") -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + return + + assert self._payload_writer is not None, "Response has not been started" + + await self._payload_writer.write_eof(data) + self._eof_sent = True + self._req = None + self._body_length = self._payload_writer.output_size + self._payload_writer = None + + def __repr__(self) -> str: + if self._eof_sent: + info = "eof" + elif self.prepared: + assert self._req is not None + info = f"{self._req.method} {self._req.path} " + else: + info = "not prepared" + return f"<{self.__class__.__name__} {self.reason} {info}>" + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + def __hash__(self) -> int: + return hash(id(self)) + + def __eq__(self, other: object) -> bool: + return self is other + + +class Response(StreamResponse): + def __init__( + self, + *, + body: Any = None, + status: int = 200, + reason: Optional[str] = None, + text: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: Optional[str] = None, + charset: Optional[str] = None, + zlib_executor_size: Optional[int] = None, + zlib_executor: Optional[Executor] = None, + ) -> None: + if body is not None and text is not None: + raise ValueError("body and text are not allowed together") + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + elif not isinstance(headers, CIMultiDict): + real_headers = CIMultiDict(headers) + else: + real_headers = headers # = cast('CIMultiDict[str]', headers) + + if content_type is not None and "charset" in content_type: + raise ValueError("charset must not be in content_type " "argument") + + if text is not None: + if hdrs.CONTENT_TYPE in real_headers: + if content_type or charset: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + # fast path for filling headers + if not isinstance(text, str): + raise TypeError("text argument must be str (%r)" % type(text)) + if content_type is None: + content_type = "text/plain" + if charset is None: + charset = "utf-8" + real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset + body = text.encode(charset) + text = None + else: + if hdrs.CONTENT_TYPE in real_headers: + if content_type is not None or charset is not None: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + if content_type is not None: + if charset is not None: + content_type += "; charset=" + charset + real_headers[hdrs.CONTENT_TYPE] = content_type + + super().__init__(status=status, reason=reason, headers=real_headers) + + if text is not None: + self.text = text + else: + self.body = body + + self._compressed_body: Optional[bytes] = None + self._zlib_executor_size = zlib_executor_size + self._zlib_executor = zlib_executor + + @property + def body(self) -> Optional[Union[bytes, Payload]]: + return self._body + + @body.setter + def body(self, body: bytes) -> None: + if body is None: + self._body: Optional[bytes] = None + self._body_payload: bool = False + elif isinstance(body, (bytes, bytearray)): + self._body = body + self._body_payload = False + else: + try: + self._body = body = payload.PAYLOAD_REGISTRY.get(body) + except payload.LookupError: + raise ValueError("Unsupported body type %r" % type(body)) + + self._body_payload = True + + headers = self._headers + + # set content-type + if hdrs.CONTENT_TYPE not in headers: + headers[hdrs.CONTENT_TYPE] = body.content_type + + # copy payload headers + if body.headers: + for (key, value) in body.headers.items(): + if key not in headers: + headers[key] = value + + self._compressed_body = None + + @property + def text(self) -> Optional[str]: + if self._body is None: + return None + return self._body.decode(self.charset or "utf-8") + + @text.setter + def text(self, text: str) -> None: + assert text is None or isinstance( + text, str + ), "text argument must be str (%r)" % type(text) + + if self.content_type == "application/octet-stream": + self.content_type = "text/plain" + if self.charset is None: + self.charset = "utf-8" + + self._body = text.encode(self.charset) + self._body_payload = False + self._compressed_body = None + + @property + def content_length(self) -> Optional[int]: + if self._chunked: + return None + + if hdrs.CONTENT_LENGTH in self._headers: + return super().content_length + + if self._compressed_body is not None: + # Return length of the compressed body + return len(self._compressed_body) + elif self._body_payload: + # A payload without content length, or a compressed payload + return None + elif self._body is not None: + return len(self._body) + else: + return 0 + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + raise RuntimeError("Content length is set automatically") + + async def write_eof(self, data: bytes = b"") -> None: + if self._eof_sent: + return + if self._compressed_body is None: + body: Optional[Union[bytes, Payload]] = self._body + else: + body = self._compressed_body + assert not data, f"data arg is not supported, got {data!r}" + assert self._req is not None + assert self._payload_writer is not None + if body is not None: + if self._must_be_empty_body: + await super().write_eof() + elif self._body_payload: + payload = cast(Payload, body) + await payload.write(self._payload_writer) + await super().write_eof() + else: + await super().write_eof(cast(bytes, body)) + else: + await super().write_eof() + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + if should_remove_content_length(request.method, self.status): + if hdrs.CONTENT_LENGTH in self._headers: + del self._headers[hdrs.CONTENT_LENGTH] + elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: + if self._body_payload: + size = cast(Payload, self._body).size + if size is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(size) + else: + body_len = len(self._body) if self._body else "0" + # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 + if body_len != "0" or ( + self.status != 304 and request.method.upper() != hdrs.METH_HEAD + ): + self._headers[hdrs.CONTENT_LENGTH] = str(body_len) + + return await super()._start(request) + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if self._body_payload or self._chunked: + return await super()._do_start_compression(coding) + + if coding != ContentCoding.identity: + # Instead of using _payload_writer.enable_compression, + # compress the whole body + compressor = ZLibCompressor( + encoding=str(coding.value), + max_sync_chunk_size=self._zlib_executor_size, + executor=self._zlib_executor, + ) + assert self._body is not None + if self._zlib_executor_size is None and len(self._body) > 1024 * 1024: + warnings.warn( + "Synchronous compression of large response bodies " + f"({len(self._body)} bytes) might block the async event loop. " + "Consider providing a custom value to zlib_executor_size/" + "zlib_executor response properties or disabling compression on it." + ) + self._compressed_body = ( + await compressor.compress(self._body) + compressor.flush() + ) + assert self._compressed_body is not None + + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) + + +def json_response( + data: Any = sentinel, + *, + text: Optional[str] = None, + body: Optional[bytes] = None, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, +) -> Response: + if data is not sentinel: + if text or body: + raise ValueError("only one of data, text, or body should be specified") + else: + text = dumps(data) + return Response( + text=text, + body=body, + status=status, + reason=reason, + headers=headers, + content_type=content_type, + ) diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_routedef.py b/venv/lib/python3.12/site-packages/aiohttp/web_routedef.py new file mode 100644 index 0000000..a1eb0a7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_routedef.py @@ -0,0 +1,216 @@ +import abc +import os # noqa +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Sequence, + Type, + Union, + overload, +) + +import attr + +from . import hdrs +from .abc import AbstractView +from .typedefs import Handler, PathLike + +if TYPE_CHECKING: # pragma: no cover + from .web_request import Request + from .web_response import StreamResponse + from .web_urldispatcher import AbstractRoute, UrlDispatcher +else: + Request = StreamResponse = UrlDispatcher = AbstractRoute = None + + +__all__ = ( + "AbstractRouteDef", + "RouteDef", + "StaticDef", + "RouteTableDef", + "head", + "options", + "get", + "post", + "patch", + "put", + "delete", + "route", + "view", + "static", +) + + +class AbstractRouteDef(abc.ABC): + @abc.abstractmethod + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + pass # pragma: no cover + + +_HandlerType = Union[Type[AbstractView], Handler] + + +@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) +class RouteDef(AbstractRouteDef): + method: str + path: str + handler: _HandlerType + kwargs: Dict[str, Any] + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return " {handler.__name__!r}" "{info}>".format( + method=self.method, path=self.path, handler=self.handler, info="".join(info) + ) + + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + if self.method in hdrs.METH_ALL: + reg = getattr(router, "add_" + self.method.lower()) + return [reg(self.path, self.handler, **self.kwargs)] + else: + return [ + router.add_route(self.method, self.path, self.handler, **self.kwargs) + ] + + +@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) +class StaticDef(AbstractRouteDef): + prefix: str + path: PathLike + kwargs: Dict[str, Any] + + def __repr__(self) -> str: + info = [] + for name, value in sorted(self.kwargs.items()): + info.append(f", {name}={value!r}") + return " {path}" "{info}>".format( + prefix=self.prefix, path=self.path, info="".join(info) + ) + + def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + resource = router.add_static(self.prefix, self.path, **self.kwargs) + routes = resource.get_info().get("routes", {}) + return list(routes.values()) + + +def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return RouteDef(method, path, handler, kwargs) + + +def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_HEAD, path, handler, **kwargs) + + +def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + +def get( + path: str, + handler: _HandlerType, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, +) -> RouteDef: + return route( + hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs + ) + + +def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_POST, path, handler, **kwargs) + + +def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PUT, path, handler, **kwargs) + + +def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_PATCH, path, handler, **kwargs) + + +def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: + return route(hdrs.METH_DELETE, path, handler, **kwargs) + + +def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef: + return route(hdrs.METH_ANY, path, handler, **kwargs) + + +def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef: + return StaticDef(prefix, path, kwargs) + + +_Deco = Callable[[_HandlerType], _HandlerType] + + +class RouteTableDef(Sequence[AbstractRouteDef]): + """Route definition table""" + + def __init__(self) -> None: + self._items: List[AbstractRouteDef] = [] + + def __repr__(self) -> str: + return f"" + + @overload + def __getitem__(self, index: int) -> AbstractRouteDef: + ... + + @overload + def __getitem__(self, index: slice) -> List[AbstractRouteDef]: + ... + + def __getitem__(self, index): # type: ignore[no-untyped-def] + return self._items[index] + + def __iter__(self) -> Iterator[AbstractRouteDef]: + return iter(self._items) + + def __len__(self) -> int: + return len(self._items) + + def __contains__(self, item: object) -> bool: + return item in self._items + + def route(self, method: str, path: str, **kwargs: Any) -> _Deco: + def inner(handler: _HandlerType) -> _HandlerType: + self._items.append(RouteDef(method, path, handler, kwargs)) + return handler + + return inner + + def head(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_HEAD, path, **kwargs) + + def get(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_GET, path, **kwargs) + + def post(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_POST, path, **kwargs) + + def put(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PUT, path, **kwargs) + + def patch(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_PATCH, path, **kwargs) + + def delete(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_DELETE, path, **kwargs) + + def options(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_OPTIONS, path, **kwargs) + + def view(self, path: str, **kwargs: Any) -> _Deco: + return self.route(hdrs.METH_ANY, path, **kwargs) + + def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None: + self._items.append(StaticDef(prefix, path, kwargs)) diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_runner.py b/venv/lib/python3.12/site-packages/aiohttp/web_runner.py new file mode 100644 index 0000000..6999b5c --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_runner.py @@ -0,0 +1,406 @@ +import asyncio +import signal +import socket +import warnings +from abc import ABC, abstractmethod +from typing import Any, Awaitable, Callable, List, Optional, Set + +from yarl import URL + +from .typedefs import PathLike +from .web_app import Application +from .web_server import Server + +try: + from ssl import SSLContext +except ImportError: + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ( + "BaseSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + "SockSite", + "BaseRunner", + "AppRunner", + "ServerRunner", + "GracefulExit", +) + + +class GracefulExit(SystemExit): + code = 1 + + +def _raise_graceful_exit() -> None: + raise GracefulExit() + + +class BaseSite(ABC): + __slots__ = ("_runner", "_ssl_context", "_backlog", "_server") + + def __init__( + self, + runner: "BaseRunner", + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + if runner.server is None: + raise RuntimeError("Call runner.setup() before making a site") + if shutdown_timeout != 60.0: + msg = "shutdown_timeout should be set on BaseRunner" + warnings.warn(msg, DeprecationWarning, stacklevel=2) + runner._shutdown_timeout = shutdown_timeout + self._runner = runner + self._ssl_context = ssl_context + self._backlog = backlog + self._server: Optional[asyncio.AbstractServer] = None + + @property + @abstractmethod + def name(self) -> str: + pass # pragma: no cover + + @abstractmethod + async def start(self) -> None: + self._runner._reg_site(self) + + async def stop(self) -> None: + self._runner._check_site(self) + if self._server is not None: # Maybe not started yet + self._server.close() + + self._runner._unreg_site(self) + + +class TCPSite(BaseSite): + __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port") + + def __init__( + self, + runner: "BaseRunner", + host: Optional[str] = None, + port: Optional[int] = None, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._host = host + if port is None: + port = 8443 if self._ssl_context else 8080 + self._port = port + self._reuse_address = reuse_address + self._reuse_port = reuse_port + + @property + def name(self) -> str: + scheme = "https" if self._ssl_context else "http" + host = "0.0.0.0" if self._host is None else self._host + return str(URL.build(scheme=scheme, host=host, port=self._port)) + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, + self._host, + self._port, + ssl=self._ssl_context, + backlog=self._backlog, + reuse_address=self._reuse_address, + reuse_port=self._reuse_port, + ) + + +class UnixSite(BaseSite): + __slots__ = ("_path",) + + def __init__( + self, + runner: "BaseRunner", + path: PathLike, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._path = path + + @property + def name(self) -> str: + scheme = "https" if self._ssl_context else "http" + return f"{scheme}://unix:{self._path}:" + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_unix_server( + server, + self._path, + ssl=self._ssl_context, + backlog=self._backlog, + ) + + +class NamedPipeSite(BaseSite): + __slots__ = ("_path",) + + def __init__( + self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0 + ) -> None: + loop = asyncio.get_event_loop() + if not isinstance( + loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor" "loop under windows" + ) + super().__init__(runner, shutdown_timeout=shutdown_timeout) + self._path = path + + @property + def name(self) -> str: + return self._path + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + _server = await loop.start_serving_pipe( # type: ignore[attr-defined] + server, self._path + ) + self._server = _server[0] + + +class SockSite(BaseSite): + __slots__ = ("_sock", "_name") + + def __init__( + self, + runner: "BaseRunner", + sock: socket.socket, + *, + shutdown_timeout: float = 60.0, + ssl_context: Optional[SSLContext] = None, + backlog: int = 128, + ) -> None: + super().__init__( + runner, + shutdown_timeout=shutdown_timeout, + ssl_context=ssl_context, + backlog=backlog, + ) + self._sock = sock + scheme = "https" if self._ssl_context else "http" + if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX: + name = f"{scheme}://unix:{sock.getsockname()}:" + else: + host, port = sock.getsockname()[:2] + name = str(URL.build(scheme=scheme, host=host, port=port)) + self._name = name + + @property + def name(self) -> str: + return self._name + + async def start(self) -> None: + await super().start() + loop = asyncio.get_event_loop() + server = self._runner.server + assert server is not None + self._server = await loop.create_server( + server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog + ) + + +class BaseRunner(ABC): + __slots__ = ( + "shutdown_callback", + "_handle_signals", + "_kwargs", + "_server", + "_sites", + "_shutdown_timeout", + ) + + def __init__( + self, + *, + handle_signals: bool = False, + shutdown_timeout: float = 60.0, + **kwargs: Any, + ) -> None: + self.shutdown_callback: Optional[Callable[[], Awaitable[None]]] = None + self._handle_signals = handle_signals + self._kwargs = kwargs + self._server: Optional[Server] = None + self._sites: List[BaseSite] = [] + self._shutdown_timeout = shutdown_timeout + + @property + def server(self) -> Optional[Server]: + return self._server + + @property + def addresses(self) -> List[Any]: + ret: List[Any] = [] + for site in self._sites: + server = site._server + if server is not None: + sockets = server.sockets # type: ignore[attr-defined] + if sockets is not None: + for sock in sockets: + ret.append(sock.getsockname()) + return ret + + @property + def sites(self) -> Set[BaseSite]: + return set(self._sites) + + async def setup(self) -> None: + loop = asyncio.get_event_loop() + + if self._handle_signals: + try: + loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) + loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) + except NotImplementedError: # pragma: no cover + # add_signal_handler is not implemented on Windows + pass + + self._server = await self._make_server() + + @abstractmethod + async def shutdown(self) -> None: + """Call any shutdown hooks to help server close gracefully.""" + + async def cleanup(self) -> None: + # The loop over sites is intentional, an exception on gather() + # leaves self._sites in unpredictable state. + # The loop guaranties that a site is either deleted on success or + # still present on failure + for site in list(self._sites): + await site.stop() + + if self._server: # If setup succeeded + self._server.pre_shutdown() + await self.shutdown() + + if self.shutdown_callback: + await self.shutdown_callback() + + await self._server.shutdown(self._shutdown_timeout) + await self._cleanup_server() + + self._server = None + if self._handle_signals: + loop = asyncio.get_running_loop() + try: + loop.remove_signal_handler(signal.SIGINT) + loop.remove_signal_handler(signal.SIGTERM) + except NotImplementedError: # pragma: no cover + # remove_signal_handler is not implemented on Windows + pass + + @abstractmethod + async def _make_server(self) -> Server: + pass # pragma: no cover + + @abstractmethod + async def _cleanup_server(self) -> None: + pass # pragma: no cover + + def _reg_site(self, site: BaseSite) -> None: + if site in self._sites: + raise RuntimeError(f"Site {site} is already registered in runner {self}") + self._sites.append(site) + + def _check_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError(f"Site {site} is not registered in runner {self}") + + def _unreg_site(self, site: BaseSite) -> None: + if site not in self._sites: + raise RuntimeError(f"Site {site} is not registered in runner {self}") + self._sites.remove(site) + + +class ServerRunner(BaseRunner): + """Low-level web server runner""" + + __slots__ = ("_web_server",) + + def __init__( + self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any + ) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + self._web_server = web_server + + async def shutdown(self) -> None: + pass + + async def _make_server(self) -> Server: + return self._web_server + + async def _cleanup_server(self) -> None: + pass + + +class AppRunner(BaseRunner): + """Web Application runner""" + + __slots__ = ("_app",) + + def __init__( + self, app: Application, *, handle_signals: bool = False, **kwargs: Any + ) -> None: + super().__init__(handle_signals=handle_signals, **kwargs) + if not isinstance(app, Application): + raise TypeError( + "The first argument should be web.Application " + "instance, got {!r}".format(app) + ) + self._app = app + + @property + def app(self) -> Application: + return self._app + + async def shutdown(self) -> None: + await self._app.shutdown() + + async def _make_server(self) -> Server: + loop = asyncio.get_event_loop() + self._app._set_loop(loop) + self._app.on_startup.freeze() + await self._app.startup() + self._app.freeze() + + return self._app._make_handler(loop=loop, **self._kwargs) + + async def _cleanup_server(self) -> None: + await self._app.cleanup() diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_server.py b/venv/lib/python3.12/site-packages/aiohttp/web_server.py new file mode 100644 index 0000000..52faacb --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_server.py @@ -0,0 +1,77 @@ +"""Low level HTTP server.""" +import asyncio +from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa + +from .abc import AbstractStreamWriter +from .helpers import get_running_loop +from .http_parser import RawRequestMessage +from .streams import StreamReader +from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler +from .web_request import BaseRequest + +__all__ = ("Server",) + + +class Server: + def __init__( + self, + handler: _RequestHandler, + *, + request_factory: Optional[_RequestFactory] = None, + handler_cancellation: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any + ) -> None: + self._loop = get_running_loop(loop) + self._connections: Dict[RequestHandler, asyncio.Transport] = {} + self._kwargs = kwargs + self.requests_count = 0 + self.request_handler = handler + self.request_factory = request_factory or self._make_request + self.handler_cancellation = handler_cancellation + + @property + def connections(self) -> List[RequestHandler]: + return list(self._connections.keys()) + + def connection_made( + self, handler: RequestHandler, transport: asyncio.Transport + ) -> None: + self._connections[handler] = transport + + def connection_lost( + self, handler: RequestHandler, exc: Optional[BaseException] = None + ) -> None: + if handler in self._connections: + del self._connections[handler] + + def _make_request( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: RequestHandler, + writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + ) -> BaseRequest: + return BaseRequest(message, payload, protocol, writer, task, self._loop) + + def pre_shutdown(self) -> None: + for conn in self._connections: + conn.close() + + async def shutdown(self, timeout: Optional[float] = None) -> None: + coros = (conn.shutdown(timeout) for conn in self._connections) + await asyncio.gather(*coros) + self._connections.clear() + + def __call__(self) -> RequestHandler: + try: + return RequestHandler(self, loop=self._loop, **self._kwargs) + except TypeError: + # Failsafe creation: remove all custom handler_args + kwargs = { + k: v + for k, v in self._kwargs.items() + if k in ["debug", "access_log_class"] + } + return RequestHandler(self, loop=self._loop, **kwargs) diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_urldispatcher.py b/venv/lib/python3.12/site-packages/aiohttp/web_urldispatcher.py new file mode 100644 index 0000000..ddb6ede --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_urldispatcher.py @@ -0,0 +1,1222 @@ +import abc +import asyncio +import base64 +import hashlib +import inspect +import keyword +import os +import re +import warnings +from contextlib import contextmanager +from functools import wraps +from pathlib import Path +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Container, + Dict, + Final, + Generator, + Iterable, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Pattern, + Set, + Sized, + Tuple, + Type, + TypedDict, + Union, + cast, +) + +from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] + +from . import hdrs +from .abc import AbstractMatchInfo, AbstractRouter, AbstractView +from .helpers import DEBUG +from .http import HttpVersion11 +from .typedefs import Handler, PathLike +from .web_exceptions import ( + HTTPException, + HTTPExpectationFailed, + HTTPForbidden, + HTTPMethodNotAllowed, + HTTPNotFound, +) +from .web_fileresponse import FileResponse +from .web_request import Request +from .web_response import Response, StreamResponse +from .web_routedef import AbstractRouteDef + +__all__ = ( + "UrlDispatcher", + "UrlMappingMatchInfo", + "AbstractResource", + "Resource", + "PlainResource", + "DynamicResource", + "AbstractRoute", + "ResourceRoute", + "StaticResource", + "View", +) + + +if TYPE_CHECKING: # pragma: no cover + from .web_app import Application + + BaseDict = Dict[str, str] +else: + BaseDict = dict + +YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) + +HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( + r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" +) +ROUTE_RE: Final[Pattern[str]] = re.compile( + r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})" +) +PATH_SEP: Final[str] = re.escape("/") + + +_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] +_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] + + +class _InfoDict(TypedDict, total=False): + path: str + + formatter: str + pattern: Pattern[str] + + directory: Path + prefix: str + routes: Mapping[str, "AbstractRoute"] + + app: "Application" + + domain: str + + rule: "AbstractRuleMatching" + + http_exception: HTTPException + + +class AbstractResource(Sized, Iterable["AbstractRoute"]): + def __init__(self, *, name: Optional[str] = None) -> None: + self._name = name + + @property + def name(self) -> Optional[str]: + return self._name + + @property + @abc.abstractmethod + def canonical(self) -> str: + """Exposes the resource's canonical path. + + For example '/foo/bar/{name}' + + """ + + @abc.abstractmethod # pragma: no branch + def url_for(self, **kwargs: str) -> URL: + """Construct url for resource with additional params.""" + + @abc.abstractmethod # pragma: no branch + async def resolve(self, request: Request) -> _Resolve: + """Resolve resource. + + Return (UrlMappingMatchInfo, allowed_methods) pair. + """ + + @abc.abstractmethod + def add_prefix(self, prefix: str) -> None: + """Add a prefix to processed URLs. + + Required for subapplications support. + """ + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + def freeze(self) -> None: + pass + + @abc.abstractmethod + def raw_match(self, path: str) -> bool: + """Perform a raw match against path""" + + +class AbstractRoute(abc.ABC): + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + *, + expect_handler: Optional[_ExpectHandler] = None, + resource: Optional[AbstractResource] = None, + ) -> None: + + if expect_handler is None: + expect_handler = _default_expect_handler + + assert asyncio.iscoroutinefunction( + expect_handler + ), f"Coroutine is expected, got {expect_handler!r}" + + method = method.upper() + if not HTTP_METHOD_RE.match(method): + raise ValueError(f"{method} is not allowed HTTP method") + + assert callable(handler), handler + if asyncio.iscoroutinefunction(handler): + pass + elif inspect.isgeneratorfunction(handler): + warnings.warn( + "Bare generators are deprecated, " "use @coroutine wrapper", + DeprecationWarning, + ) + elif isinstance(handler, type) and issubclass(handler, AbstractView): + pass + else: + warnings.warn( + "Bare functions are deprecated, " "use async ones", DeprecationWarning + ) + + @wraps(handler) + async def handler_wrapper(request: Request) -> StreamResponse: + result = old_handler(request) + if asyncio.iscoroutine(result): + result = await result + assert isinstance(result, StreamResponse) + return result + + old_handler = handler + handler = handler_wrapper + + self._method = method + self._handler = handler + self._expect_handler = expect_handler + self._resource = resource + + @property + def method(self) -> str: + return self._method + + @property + def handler(self) -> Handler: + return self._handler + + @property + @abc.abstractmethod + def name(self) -> Optional[str]: + """Optional route's name, always equals to resource's name.""" + + @property + def resource(self) -> Optional[AbstractResource]: + return self._resource + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @abc.abstractmethod # pragma: no branch + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + + async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]: + return await self._expect_handler(request) + + +class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): + def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): + super().__init__(match_dict) + self._route = route + self._apps: List[Application] = [] + self._current_app: Optional[Application] = None + self._frozen = False + + @property + def handler(self) -> Handler: + return self._route.handler + + @property + def route(self) -> AbstractRoute: + return self._route + + @property + def expect_handler(self) -> _ExpectHandler: + return self._route.handle_expect_header + + @property + def http_exception(self) -> Optional[HTTPException]: + return None + + def get_info(self) -> _InfoDict: # type: ignore[override] + return self._route.get_info() + + @property + def apps(self) -> Tuple["Application", ...]: + return tuple(self._apps) + + def add_app(self, app: "Application") -> None: + if self._frozen: + raise RuntimeError("Cannot change apps stack after .freeze() call") + if self._current_app is None: + self._current_app = app + self._apps.insert(0, app) + + @property + def current_app(self) -> "Application": + app = self._current_app + assert app is not None + return app + + @contextmanager + def set_current_app(self, app: "Application") -> Generator[None, None, None]: + if DEBUG: # pragma: no cover + if app not in self._apps: + raise RuntimeError( + "Expected one of the following apps {!r}, got {!r}".format( + self._apps, app + ) + ) + prev = self._current_app + self._current_app = app + try: + yield + finally: + self._current_app = prev + + def freeze(self) -> None: + self._frozen = True + + def __repr__(self) -> str: + return f"" + + +class MatchInfoError(UrlMappingMatchInfo): + def __init__(self, http_exception: HTTPException) -> None: + self._exception = http_exception + super().__init__({}, SystemRoute(self._exception)) + + @property + def http_exception(self) -> HTTPException: + return self._exception + + def __repr__(self) -> str: + return "".format( + self._exception.status, self._exception.reason + ) + + +async def _default_expect_handler(request: Request) -> None: + """Default handler for Expect header. + + Just send "100 Continue" to client. + raise HTTPExpectationFailed if value of header is not "100-continue" + """ + expect = request.headers.get(hdrs.EXPECT, "") + if request.version == HttpVersion11: + if expect.lower() == "100-continue": + await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + else: + raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) + + +class Resource(AbstractResource): + def __init__(self, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + self._routes: List[ResourceRoute] = [] + + def add_route( + self, + method: str, + handler: Union[Type[AbstractView], Handler], + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> "ResourceRoute": + + for route_obj in self._routes: + if route_obj.method == method or route_obj.method == hdrs.METH_ANY: + raise RuntimeError( + "Added route will never be executed, " + "method {route.method} is already " + "registered".format(route=route_obj) + ) + + route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) + self.register_route(route_obj) + return route_obj + + def register_route(self, route: "ResourceRoute") -> None: + assert isinstance( + route, ResourceRoute + ), f"Instance of Route class is required, got {route!r}" + self._routes.append(route) + + async def resolve(self, request: Request) -> _Resolve: + allowed_methods: Set[str] = set() + + match_dict = self._match(request.rel_url.raw_path) + if match_dict is None: + return None, allowed_methods + + for route_obj in self._routes: + route_method = route_obj.method + allowed_methods.add(route_method) + + if route_method == request.method or route_method == hdrs.METH_ANY: + return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) + else: + return None, allowed_methods + + @abc.abstractmethod + def _match(self, path: str) -> Optional[Dict[str, str]]: + pass # pragma: no cover + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator["ResourceRoute"]: + return iter(self._routes) + + # TODO: implement all abstract methods + + +class PlainResource(Resource): + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + assert not path or path.startswith("/") + self._path = path + + @property + def canonical(self) -> str: + return self._path + + def freeze(self) -> None: + if not self._path: + self._path = "/" + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._path = prefix + self._path + + def _match(self, path: str) -> Optional[Dict[str, str]]: + # string comparison is about 10 times faster than regexp matching + if self._path == path: + return {} + else: + return None + + def raw_match(self, path: str) -> bool: + return self._path == path + + def get_info(self) -> _InfoDict: + return {"path": self._path} + + def url_for(self) -> URL: # type: ignore[override] + return URL.build(path=self._path, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return f"" + + +class DynamicResource(Resource): + + DYN = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}") + DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") + GOOD = r"[^{}/]+" + + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + pattern = "" + formatter = "" + for part in ROUTE_RE.split(path): + match = self.DYN.fullmatch(part) + if match: + pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD) + formatter += "{" + match.group("var") + "}" + continue + + match = self.DYN_WITH_RE.fullmatch(part) + if match: + pattern += "(?P<{var}>{re})".format(**match.groupdict()) + formatter += "{" + match.group("var") + "}" + continue + + if "{" in part or "}" in part: + raise ValueError(f"Invalid path '{path}'['{part}']") + + part = _requote_path(part) + formatter += part + pattern += re.escape(part) + + try: + compiled = re.compile(pattern) + except re.error as exc: + raise ValueError(f"Bad pattern '{pattern}': {exc}") from None + assert compiled.pattern.startswith(PATH_SEP) + assert formatter.startswith("/") + self._pattern = compiled + self._formatter = formatter + + @property + def canonical(self) -> str: + return self._formatter + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) + self._formatter = prefix + self._formatter + + def _match(self, path: str) -> Optional[Dict[str, str]]: + match = self._pattern.fullmatch(path) + if match is None: + return None + else: + return { + key: _unquote_path(value) for key, value in match.groupdict().items() + } + + def raw_match(self, path: str) -> bool: + return self._formatter == path + + def get_info(self) -> _InfoDict: + return {"formatter": self._formatter, "pattern": self._pattern} + + def url_for(self, **parts: str) -> URL: + url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()}) + return URL.build(path=url, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return "".format( + name=name, formatter=self._formatter + ) + + +class PrefixResource(AbstractResource): + def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: + assert not prefix or prefix.startswith("/"), prefix + assert prefix in ("", "/") or not prefix.endswith("/"), prefix + super().__init__(name=name) + self._prefix = _requote_path(prefix) + self._prefix2 = self._prefix + "/" + + @property + def canonical(self) -> str: + return self._prefix + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._prefix = prefix + self._prefix + self._prefix2 = self._prefix + "/" + + def raw_match(self, prefix: str) -> bool: + return False + + # TODO: impl missing abstract methods + + +class StaticResource(PrefixResource): + VERSION_KEY = "v" + + def __init__( + self, + prefix: str, + directory: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> None: + super().__init__(prefix, name=name) + try: + directory = Path(directory) + if str(directory).startswith("~"): + directory = Path(os.path.expanduser(str(directory))) + directory = directory.resolve() + if not directory.is_dir(): + raise ValueError("Not a directory") + except (FileNotFoundError, ValueError) as error: + raise ValueError(f"No directory exists at '{directory}'") from error + self._directory = directory + self._show_index = show_index + self._chunk_size = chunk_size + self._follow_symlinks = follow_symlinks + self._expect_handler = expect_handler + self._append_version = append_version + + self._routes = { + "GET": ResourceRoute( + "GET", self._handle, self, expect_handler=expect_handler + ), + "HEAD": ResourceRoute( + "HEAD", self._handle, self, expect_handler=expect_handler + ), + } + + def url_for( # type: ignore[override] + self, + *, + filename: PathLike, + append_version: Optional[bool] = None, + ) -> URL: + if append_version is None: + append_version = self._append_version + filename = str(filename).lstrip("/") + + url = URL.build(path=self._prefix, encoded=True) + # filename is not encoded + if YARL_VERSION < (1, 6): + url = url / filename.replace("%", "%25") + else: + url = url / filename + + if append_version: + try: + filepath = self._directory.joinpath(filename).resolve() + if not self._follow_symlinks: + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError): + # ValueError for case when path point to symlink + # with follow_symlinks is False + return url # relatively safe + if filepath.is_file(): + # TODO cache file content + # with file watcher for cache invalidation + with filepath.open("rb") as f: + file_bytes = f.read() + h = self._get_file_hash(file_bytes) + url = url.with_query({self.VERSION_KEY: h}) + return url + return url + + @staticmethod + def _get_file_hash(byte_array: bytes) -> str: + m = hashlib.sha256() # todo sha256 can be configurable param + m.update(byte_array) + b64 = base64.urlsafe_b64encode(m.digest()) + return b64.decode("ascii") + + def get_info(self) -> _InfoDict: + return { + "directory": self._directory, + "prefix": self._prefix, + "routes": self._routes, + } + + def set_options_route(self, handler: Handler) -> None: + if "OPTIONS" in self._routes: + raise RuntimeError("OPTIONS route was set already") + self._routes["OPTIONS"] = ResourceRoute( + "OPTIONS", handler, self, expect_handler=self._expect_handler + ) + + async def resolve(self, request: Request) -> _Resolve: + path = request.rel_url.raw_path + method = request.method + allowed_methods = set(self._routes) + if not path.startswith(self._prefix2) and path != self._prefix: + return None, set() + + if method not in allowed_methods: + return None, allowed_methods + + match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} + return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes.values()) + + async def _handle(self, request: Request) -> StreamResponse: + rel_url = request.match_info["filename"] + try: + filename = Path(rel_url) + if filename.anchor: + # rel_url is an absolute name like + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() + filepath = self._directory.joinpath(filename).resolve() + if not self._follow_symlinks: + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError) as error: + # relatively safe + raise HTTPNotFound() from error + except HTTPForbidden: + raise + except Exception as error: + # perm error or other kind! + request.app.logger.exception(error) + raise HTTPNotFound() from error + + # on opening a dir, load its contents if allowed + if filepath.is_dir(): + if self._show_index: + try: + return Response( + text=self._directory_as_html(filepath), content_type="text/html" + ) + except PermissionError: + raise HTTPForbidden() + else: + raise HTTPForbidden() + elif filepath.is_file(): + return FileResponse(filepath, chunk_size=self._chunk_size) + else: + raise HTTPNotFound + + def _directory_as_html(self, filepath: Path) -> str: + # returns directory's index as html + + # sanity check + assert filepath.is_dir() + + relative_path_to_dir = filepath.relative_to(self._directory).as_posix() + index_of = f"Index of /{relative_path_to_dir}" + h1 = f"

{index_of}

" + + index_list = [] + dir_index = filepath.iterdir() + for _file in sorted(dir_index): + # show file url as relative to static path + rel_path = _file.relative_to(self._directory).as_posix() + file_url = self._prefix + "/" + rel_path + + # if file is a directory, add '/' to the end of the name + if _file.is_dir(): + file_name = f"{_file.name}/" + else: + file_name = _file.name + + index_list.append( + '
  • {name}
  • '.format( + url=file_url, name=file_name + ) + ) + ul = "
      \n{}\n
    ".format("\n".join(index_list)) + body = f"\n{h1}\n{ul}\n" + + head_str = f"\n{index_of}\n" + html = f"\n{head_str}\n{body}\n" + + return html + + def __repr__(self) -> str: + name = "'" + self.name + "'" if self.name is not None else "" + return " {directory!r}>".format( + name=name, path=self._prefix, directory=self._directory + ) + + +class PrefixedSubAppResource(PrefixResource): + def __init__(self, prefix: str, app: "Application") -> None: + super().__init__(prefix) + self._app = app + for resource in app.router.resources(): + resource.add_prefix(prefix) + + def add_prefix(self, prefix: str) -> None: + super().add_prefix(prefix) + for resource in self._app.router.resources(): + resource.add_prefix(prefix) + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not supported " "by sub-application root") + + def get_info(self) -> _InfoDict: + return {"app": self._app, "prefix": self._prefix} + + async def resolve(self, request: Request) -> _Resolve: + if ( + not request.url.raw_path.startswith(self._prefix2) + and request.url.raw_path != self._prefix + ): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __len__(self) -> int: + return len(self._app.router.routes()) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._app.router.routes()) + + def __repr__(self) -> str: + return " {app!r}>".format( + prefix=self._prefix, app=self._app + ) + + +class AbstractRuleMatching(abc.ABC): + @abc.abstractmethod # pragma: no branch + async def match(self, request: Request) -> bool: + """Return bool if the request satisfies the criteria""" + + @abc.abstractmethod # pragma: no branch + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @property + @abc.abstractmethod # pragma: no branch + def canonical(self) -> str: + """Return a str""" + + +class Domain(AbstractRuleMatching): + re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: + super().__init__() + self._domain = self.validation(domain) + + @property + def canonical(self) -> str: + return self._domain + + def validation(self, domain: str) -> str: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + domain = domain.rstrip(".").lower() + if not domain: + raise ValueError("Domain cannot be empty") + elif "://" in domain: + raise ValueError("Scheme not supported") + url = URL("http://" + domain) + assert url.raw_host is not None + if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")): + raise ValueError("Domain not valid") + if url.port == 80: + return url.raw_host + return f"{url.raw_host}:{url.port}" + + async def match(self, request: Request) -> bool: + host = request.headers.get(hdrs.HOST) + if not host: + return False + return self.match_domain(host) + + def match_domain(self, host: str) -> bool: + return host.lower() == self._domain + + def get_info(self) -> _InfoDict: + return {"domain": self._domain} + + +class MaskDomain(Domain): + re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: + super().__init__(domain) + mask = self._domain.replace(".", r"\.").replace("*", ".*") + self._mask = re.compile(mask) + + @property + def canonical(self) -> str: + return self._mask.pattern + + def match_domain(self, host: str) -> bool: + return self._mask.fullmatch(host) is not None + + +class MatchedSubAppResource(PrefixedSubAppResource): + def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None: + AbstractResource.__init__(self) + self._prefix = "" + self._app = app + self._rule = rule + + @property + def canonical(self) -> str: + return self._rule.canonical + + def get_info(self) -> _InfoDict: + return {"app": self._app, "rule": self._rule} + + async def resolve(self, request: Request) -> _Resolve: + if not await self._rule.match(request): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __repr__(self) -> str: + return " {app!r}>" "".format(app=self._app) + + +class ResourceRoute(AbstractRoute): + """A route with resource""" + + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + resource: AbstractResource, + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> None: + super().__init__( + method, handler, expect_handler=expect_handler, resource=resource + ) + + def __repr__(self) -> str: + return " {handler!r}".format( + method=self.method, resource=self._resource, handler=self.handler + ) + + @property + def name(self) -> Optional[str]: + if self._resource is None: + return None + return self._resource.name + + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + assert self._resource is not None + return self._resource.url_for(*args, **kwargs) + + def get_info(self) -> _InfoDict: + assert self._resource is not None + return self._resource.get_info() + + +class SystemRoute(AbstractRoute): + def __init__(self, http_exception: HTTPException) -> None: + super().__init__(hdrs.METH_ANY, self._handle) + self._http_exception = http_exception + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not allowed for SystemRoute") + + @property + def name(self) -> Optional[str]: + return None + + def get_info(self) -> _InfoDict: + return {"http_exception": self._http_exception} + + async def _handle(self, request: Request) -> StreamResponse: + raise self._http_exception + + @property + def status(self) -> int: + return self._http_exception.status + + @property + def reason(self) -> str: + return self._http_exception.reason + + def __repr__(self) -> str: + return "".format(self=self) + + +class View(AbstractView): + async def _iter(self) -> StreamResponse: + if self.request.method not in hdrs.METH_ALL: + self._raise_allowed_methods() + method: Optional[Callable[[], Awaitable[StreamResponse]]] + method = getattr(self, self.request.method.lower(), None) + if method is None: + self._raise_allowed_methods() + ret = await method() + assert isinstance(ret, StreamResponse) + return ret + + def __await__(self) -> Generator[Any, None, StreamResponse]: + return self._iter().__await__() + + def _raise_allowed_methods(self) -> NoReturn: + allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())} + raise HTTPMethodNotAllowed(self.request.method, allowed_methods) + + +class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): + def __init__(self, resources: List[AbstractResource]) -> None: + self._resources = resources + + def __len__(self) -> int: + return len(self._resources) + + def __iter__(self) -> Iterator[AbstractResource]: + yield from self._resources + + def __contains__(self, resource: object) -> bool: + return resource in self._resources + + +class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): + def __init__(self, resources: List[AbstractResource]): + self._routes: List[AbstractRoute] = [] + for resource in resources: + for route in resource: + self._routes.append(route) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + yield from self._routes + + def __contains__(self, route: object) -> bool: + return route in self._routes + + +class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): + + NAME_SPLIT_RE = re.compile(r"[.:-]") + + def __init__(self) -> None: + super().__init__() + self._resources: List[AbstractResource] = [] + self._named_resources: Dict[str, AbstractResource] = {} + + async def resolve(self, request: Request) -> UrlMappingMatchInfo: + method = request.method + allowed_methods: Set[str] = set() + + for resource in self._resources: + match_dict, allowed = await resource.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + + if allowed_methods: + return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods)) + else: + return MatchInfoError(HTTPNotFound()) + + def __iter__(self) -> Iterator[str]: + return iter(self._named_resources) + + def __len__(self) -> int: + return len(self._named_resources) + + def __contains__(self, resource: object) -> bool: + return resource in self._named_resources + + def __getitem__(self, name: str) -> AbstractResource: + return self._named_resources[name] + + def resources(self) -> ResourcesView: + return ResourcesView(self._resources) + + def routes(self) -> RoutesView: + return RoutesView(self._resources) + + def named_resources(self) -> Mapping[str, AbstractResource]: + return MappingProxyType(self._named_resources) + + def register_resource(self, resource: AbstractResource) -> None: + assert isinstance( + resource, AbstractResource + ), f"Instance of AbstractResource class is required, got {resource!r}" + if self.frozen: + raise RuntimeError("Cannot register a resource into frozen router.") + + name = resource.name + + if name is not None: + parts = self.NAME_SPLIT_RE.split(name) + for part in parts: + if keyword.iskeyword(part): + raise ValueError( + f"Incorrect route name {name!r}, " + "python keywords cannot be used " + "for route name" + ) + if not part.isidentifier(): + raise ValueError( + "Incorrect route name {!r}, " + "the name should be a sequence of " + "python identifiers separated " + "by dash, dot or column".format(name) + ) + if name in self._named_resources: + raise ValueError( + "Duplicate {!r}, " + "already handled by {!r}".format(name, self._named_resources[name]) + ) + self._named_resources[name] = resource + self._resources.append(resource) + + def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: + if path and not path.startswith("/"): + raise ValueError("path should be started with / or be empty") + # Reuse last added resource if path and name are the same + if self._resources: + resource = self._resources[-1] + if resource.name == name and resource.raw_match(path): + return cast(Resource, resource) + if not ("{" in path or "}" in path or ROUTE_RE.search(path)): + resource = PlainResource(_requote_path(path), name=name) + self.register_resource(resource) + return resource + resource = DynamicResource(path, name=name) + self.register_resource(resource) + return resource + + def add_route( + self, + method: str, + path: str, + handler: Union[Handler, Type[AbstractView]], + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + ) -> AbstractRoute: + resource = self.add_resource(path, name=name) + return resource.add_route(method, handler, expect_handler=expect_handler) + + def add_static( + self, + prefix: str, + path: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> AbstractResource: + """Add static files view. + + prefix - url prefix + path - folder with files + + """ + assert prefix.startswith("/") + if prefix.endswith("/"): + prefix = prefix[:-1] + resource = StaticResource( + prefix, + path, + name=name, + expect_handler=expect_handler, + chunk_size=chunk_size, + show_index=show_index, + follow_symlinks=follow_symlinks, + append_version=append_version, + ) + self.register_resource(resource) + return resource + + def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method HEAD.""" + return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) + + def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method OPTIONS.""" + return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + def add_get( + self, + path: str, + handler: Handler, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, + ) -> AbstractRoute: + """Shortcut for add_route with method GET. + + If allow_head is true, another + route is added allowing head requests to the same endpoint. + """ + resource = self.add_resource(path, name=name) + if allow_head: + resource.add_route(hdrs.METH_HEAD, handler, **kwargs) + return resource.add_route(hdrs.METH_GET, handler, **kwargs) + + def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method POST.""" + return self.add_route(hdrs.METH_POST, path, handler, **kwargs) + + def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PUT.""" + return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) + + def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PATCH.""" + return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) + + def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method DELETE.""" + return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) + + def add_view( + self, path: str, handler: Type[AbstractView], **kwargs: Any + ) -> AbstractRoute: + """Shortcut for add_route with ANY methods for a class-based view.""" + return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) + + def freeze(self) -> None: + super().freeze() + for resource in self._resources: + resource.freeze() + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + """Append routes to route table. + + Parameter should be a sequence of RouteDef objects. + + Returns a list of registered AbstractRoute instances. + """ + registered_routes = [] + for route_def in routes: + registered_routes.extend(route_def.register(self)) + return registered_routes + + +def _quote_path(value: str) -> str: + if YARL_VERSION < (1, 6): + value = value.replace("%", "%25") + return URL.build(path=value, encoded=False).raw_path + + +def _unquote_path(value: str) -> str: + return URL.build(path=value, encoded=True).path + + +def _requote_path(value: str) -> str: + # Quote non-ascii characters and other characters which must be quoted, + # but preserve existing %-sequences. + result = _quote_path(value) + if "%" in value: + result = result.replace("%25", "%") + return result diff --git a/venv/lib/python3.12/site-packages/aiohttp/web_ws.py b/venv/lib/python3.12/site-packages/aiohttp/web_ws.py new file mode 100644 index 0000000..4e57bca --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/web_ws.py @@ -0,0 +1,515 @@ +import asyncio +import base64 +import binascii +import hashlib +import json +import sys +from typing import Any, Final, Iterable, Optional, Tuple, cast + +import attr +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import call_later, set_result +from .http import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WS_KEY, + WebSocketError, + WebSocketReader, + WebSocketWriter, + WSCloseCode, + WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen, + ws_ext_parse, +) +from .log import ws_logger +from .streams import EofStream, FlowControlDataQueue +from .typedefs import JSONDecoder, JSONEncoder +from .web_exceptions import HTTPBadRequest, HTTPException +from .web_request import BaseRequest +from .web_response import StreamResponse + +if sys.version_info >= (3, 11): + import asyncio as async_timeout +else: + import async_timeout + +__all__ = ( + "WebSocketResponse", + "WebSocketReady", + "WSMsgType", +) + +THRESHOLD_CONNLOST_ACCESS: Final[int] = 5 + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class WebSocketReady: + ok: bool + protocol: Optional[str] + + def __bool__(self) -> bool: + return self.ok + + +class WebSocketResponse(StreamResponse): + + _length_check = False + + def __init__( + self, + *, + timeout: float = 10.0, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + protocols: Iterable[str] = (), + compress: bool = True, + max_msg_size: int = 4 * 1024 * 1024, + ) -> None: + super().__init__(status=101) + self._protocols = protocols + self._ws_protocol: Optional[str] = None + self._writer: Optional[WebSocketWriter] = None + self._reader: Optional[FlowControlDataQueue[WSMessage]] = None + self._closed = False + self._closing = False + self._conn_lost = 0 + self._close_code: Optional[int] = None + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._waiting: Optional[asyncio.Future[bool]] = None + self._exception: Optional[BaseException] = None + self._timeout = timeout + self._receive_timeout = receive_timeout + self._autoclose = autoclose + self._autoping = autoping + self._heartbeat = heartbeat + self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + if heartbeat is not None: + self._pong_heartbeat = heartbeat / 2.0 + self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._compress = compress + self._max_msg_size = max_msg_size + + def _cancel_heartbeat(self) -> None: + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = None + + if self._heartbeat_cb is not None: + self._heartbeat_cb.cancel() + self._heartbeat_cb = None + + def _reset_heartbeat(self) -> None: + self._cancel_heartbeat() + + if self._heartbeat is not None: + assert self._loop is not None + self._heartbeat_cb = call_later( + self._send_heartbeat, + self._heartbeat, + self._loop, + timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold + if self._req is not None + else 5, + ) + + def _send_heartbeat(self) -> None: + if self._heartbeat is not None and not self._closed: + assert self._loop is not None + # fire-and-forget a task is not perfect but maybe ok for + # sending ping. Otherwise we need a long-living heartbeat + # task in the class. + self._loop.create_task(self._writer.ping()) # type: ignore[union-attr] + + if self._pong_response_cb is not None: + self._pong_response_cb.cancel() + self._pong_response_cb = call_later( + self._pong_not_received, + self._pong_heartbeat, + self._loop, + timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold + if self._req is not None + else 5, + ) + + def _pong_not_received(self) -> None: + if self._req is not None and self._req.transport is not None: + self._closed = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + self._req.transport.close() + + async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: + # make pre-check to don't hide it by do_handshake() exceptions + if self._payload_writer is not None: + return self._payload_writer + + protocol, writer = self._pre_start(request) + payload_writer = await super().prepare(request) + assert payload_writer is not None + self._post_start(request, protocol, writer) + await payload_writer.drain() + return payload_writer + + def _handshake( + self, request: BaseRequest + ) -> Tuple["CIMultiDict[str]", str, bool, bool]: + headers = request.headers + if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): + raise HTTPBadRequest( + text=( + "No WebSocket UPGRADE hdr: {}\n Can " + '"Upgrade" only to "WebSocket".' + ).format(headers.get(hdrs.UPGRADE)) + ) + + if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower(): + raise HTTPBadRequest( + text="No CONNECTION upgrade hdr: {}".format( + headers.get(hdrs.CONNECTION) + ) + ) + + # find common sub-protocol between client and server + protocol = None + if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: + req_protocols = [ + str(proto.strip()) + for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in req_protocols: + if proto in self._protocols: + protocol = proto + break + else: + # No overlap found: Return no protocol as per spec + ws_logger.warning( + "Client protocols %r don’t overlap server-known ones %r", + req_protocols, + self._protocols, + ) + + # check supported version + version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "") + if version not in ("13", "8", "7"): + raise HTTPBadRequest(text=f"Unsupported version: {version}") + + # check client handshake for validity + key = headers.get(hdrs.SEC_WEBSOCKET_KEY) + try: + if not key or len(base64.b64decode(key)) != 16: + raise HTTPBadRequest(text=f"Handshake error: {key!r}") + except binascii.Error: + raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None + + accept_val = base64.b64encode( + hashlib.sha1(key.encode() + WS_KEY).digest() + ).decode() + response_headers = CIMultiDict( + { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "upgrade", + hdrs.SEC_WEBSOCKET_ACCEPT: accept_val, + } + ) + + notakeover = False + compress = 0 + if self._compress: + extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + # Server side always get return with no exception. + # If something happened, just drop compress extension + compress, notakeover = ws_ext_parse(extensions, isserver=True) + if compress: + enabledext = ws_ext_gen( + compress=compress, isserver=True, server_notakeover=notakeover + ) + response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext + + if protocol: + response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol + return ( + response_headers, + protocol, + compress, + notakeover, + ) # type: ignore[return-value] + + def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: + self._loop = request._loop + + headers, protocol, compress, notakeover = self._handshake(request) + + self.set_status(101) + self.headers.update(headers) + self.force_close() + self._compress = compress + transport = request._protocol.transport + assert transport is not None + writer = WebSocketWriter( + request._protocol, transport, compress=compress, notakeover=notakeover + ) + + return protocol, writer + + def _post_start( + self, request: BaseRequest, protocol: str, writer: WebSocketWriter + ) -> None: + self._ws_protocol = protocol + self._writer = writer + + self._reset_heartbeat() + + loop = self._loop + assert loop is not None + self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop) + request.protocol.set_parser( + WebSocketReader(self._reader, self._max_msg_size, compress=self._compress) + ) + # disable HTTP keepalive for WebSocket + request.protocol.keep_alive(False) + + def can_prepare(self, request: BaseRequest) -> WebSocketReady: + if self._writer is not None: + raise RuntimeError("Already started") + try: + _, protocol, _, _ = self._handshake(request) + except HTTPException: + return WebSocketReady(False, None) + else: + return WebSocketReady(True, protocol) + + @property + def closed(self) -> bool: + return self._closed + + @property + def close_code(self) -> Optional[int]: + return self._close_code + + @property + def ws_protocol(self) -> Optional[str]: + return self._ws_protocol + + @property + def compress(self) -> bool: + return self._compress + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Get optional transport information. + + If no value associated with ``name`` is found, ``default`` is returned. + """ + writer = self._writer + if writer is None: + return default + transport = writer.transport + if transport is None: + return default + return transport.get_extra_info(name, default) + + def exception(self) -> Optional[BaseException]: + return self._exception + + async def ping(self, message: bytes = b"") -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.ping(message) + + async def pong(self, message: bytes = b"") -> None: + # unsolicited pong + if self._writer is None: + raise RuntimeError("Call .prepare() first") + await self._writer.pong(message) + + async def send_str(self, data: str, compress: Optional[bool] = None) -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + if not isinstance(data, str): + raise TypeError("data argument must be str (%r)" % type(data)) + await self._writer.send(data, binary=False, compress=compress) + + async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data argument must be byte-ish (%r)" % type(data)) + await self._writer.send(data, binary=True, compress=compress) + + async def send_json( + self, + data: Any, + compress: Optional[bool] = None, + *, + dumps: JSONEncoder = json.dumps, + ) -> None: + await self.send_str(dumps(data), compress=compress) + + async def write_eof(self) -> None: # type: ignore[override] + if self._eof_sent: + return + if self._payload_writer is None: + raise RuntimeError("Response has not been started") + + await self.close() + self._eof_sent = True + + async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + if self._writer is None: + raise RuntimeError("Call .prepare() first") + + self._cancel_heartbeat() + reader = self._reader + assert reader is not None + + # we need to break `receive()` cycle first, + # `close()` may be called from different task + if self._waiting is not None and not self._closed: + reader.feed_data(WS_CLOSING_MESSAGE, 0) + await self._waiting + + if not self._closed: + self._closed = True + try: + await self._writer.close(code, message) + writer = self._payload_writer + assert writer is not None + await writer.drain() + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + return True + + if self._closing: + return True + + reader = self._reader + assert reader is not None + try: + async with async_timeout.timeout(self._timeout): + msg = await reader.read() + except asyncio.CancelledError: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except Exception as exc: + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = exc + return True + + if msg.type == WSMsgType.CLOSE: + self._close_code = msg.data + return True + + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._exception = asyncio.TimeoutError() + return True + else: + return False + + async def receive(self, timeout: Optional[float] = None) -> WSMessage: + if self._reader is None: + raise RuntimeError("Call .prepare() first") + + loop = self._loop + assert loop is not None + while True: + if self._waiting is not None: + raise RuntimeError("Concurrent call to receive() is not allowed") + + if self._closed: + self._conn_lost += 1 + if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: + raise RuntimeError("WebSocket connection is closed.") + return WS_CLOSED_MESSAGE + elif self._closing: + return WS_CLOSING_MESSAGE + + try: + self._waiting = loop.create_future() + try: + async with async_timeout.timeout(timeout or self._receive_timeout): + msg = await self._reader.read() + self._reset_heartbeat() + finally: + waiter = self._waiting + set_result(waiter, True) + self._waiting = None + except (asyncio.CancelledError, asyncio.TimeoutError): + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + raise + except EofStream: + self._close_code = WSCloseCode.OK + await self.close() + return WSMessage(WSMsgType.CLOSED, None, None) + except WebSocketError as exc: + self._close_code = exc.code + await self.close(code=exc.code) + return WSMessage(WSMsgType.ERROR, exc, None) + except Exception as exc: + self._exception = exc + self._closing = True + self._close_code = WSCloseCode.ABNORMAL_CLOSURE + await self.close() + return WSMessage(WSMsgType.ERROR, exc, None) + + if msg.type == WSMsgType.CLOSE: + self._closing = True + self._close_code = msg.data + if not self._closed and self._autoclose: + await self.close() + elif msg.type == WSMsgType.CLOSING: + self._closing = True + elif msg.type == WSMsgType.PING and self._autoping: + await self.pong(msg.data) + continue + elif msg.type == WSMsgType.PONG and self._autoping: + continue + + return msg + + async def receive_str(self, *, timeout: Optional[float] = None) -> str: + msg = await self.receive(timeout) + if msg.type != WSMsgType.TEXT: + raise TypeError( + "Received message {}:{!r} is not WSMsgType.TEXT".format( + msg.type, msg.data + ) + ) + return cast(str, msg.data) + + async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + msg = await self.receive(timeout) + if msg.type != WSMsgType.BINARY: + raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") + return cast(bytes, msg.data) + + async def receive_json( + self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None + ) -> Any: + data = await self.receive_str(timeout=timeout) + return loads(data) + + async def write(self, data: bytes) -> None: + raise RuntimeError("Cannot call .write() for websocket") + + def __aiter__(self) -> "WebSocketResponse": + return self + + async def __anext__(self) -> WSMessage: + msg = await self.receive() + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): + raise StopAsyncIteration + return msg + + def _cancel(self, exc: BaseException) -> None: + if self._reader is not None: + self._reader.set_exception(exc) diff --git a/venv/lib/python3.12/site-packages/aiohttp/worker.py b/venv/lib/python3.12/site-packages/aiohttp/worker.py new file mode 100644 index 0000000..9b30769 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiohttp/worker.py @@ -0,0 +1,247 @@ +"""Async gunicorn worker for aiohttp.web""" + +import asyncio +import os +import re +import signal +import sys +from types import FrameType +from typing import Any, Awaitable, Callable, Optional, Union # noqa + +from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat +from gunicorn.workers import base + +from aiohttp import web + +from .helpers import set_result +from .web_app import Application +from .web_log import AccessLogger + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker") + + +class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] + + DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT + DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default + + def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover + super().__init__(*args, **kw) + + self._task: Optional[asyncio.Task[None]] = None + self.exit_code = 0 + self._notify_waiter: Optional[asyncio.Future[bool]] = None + + def init_process(self) -> None: + # create new event_loop after fork + asyncio.get_event_loop().close() + + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + super().init_process() + + def run(self) -> None: + self._task = self.loop.create_task(self._run()) + + try: # ignore all finalization problems + self.loop.run_until_complete(self._task) + except Exception: + self.log.exception("Exception in gunicorn worker") + self.loop.run_until_complete(self.loop.shutdown_asyncgens()) + self.loop.close() + + sys.exit(self.exit_code) + + async def _run(self) -> None: + runner = None + if isinstance(self.wsgi, Application): + app = self.wsgi + elif asyncio.iscoroutinefunction(self.wsgi): + wsgi = await self.wsgi() + if isinstance(wsgi, web.AppRunner): + runner = wsgi + app = runner.app + else: + app = wsgi + else: + raise RuntimeError( + "wsgi app should be either Application or " + "async function returning Application, got {}".format(self.wsgi) + ) + + if runner is None: + access_log = self.log.access_log if self.cfg.accesslog else None + runner = web.AppRunner( + app, + logger=self.log, + keepalive_timeout=self.cfg.keepalive, + access_log=access_log, + access_log_format=self._get_valid_log_format( + self.cfg.access_log_format + ), + shutdown_timeout=self.cfg.graceful_timeout / 100 * 95, + ) + await runner.setup() + + ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None + + runner = runner + assert runner is not None + server = runner.server + assert server is not None + for sock in self.sockets: + site = web.SockSite( + runner, + sock, + ssl_context=ctx, + ) + await site.start() + + # If our parent changed then we shut down. + pid = os.getpid() + try: + while self.alive: # type: ignore[has-type] + self.notify() + + cnt = server.requests_count + if self.max_requests and cnt > self.max_requests: + self.alive = False + self.log.info("Max requests, shutting down: %s", self) + + elif pid == os.getpid() and self.ppid != os.getppid(): + self.alive = False + self.log.info("Parent changed, shutting down: %s", self) + else: + await self._wait_next_notify() + except BaseException: + pass + + await runner.cleanup() + + def _wait_next_notify(self) -> "asyncio.Future[bool]": + self._notify_waiter_done() + + loop = self.loop + assert loop is not None + self._notify_waiter = waiter = loop.create_future() + self.loop.call_later(1.0, self._notify_waiter_done, waiter) + + return waiter + + def _notify_waiter_done( + self, waiter: Optional["asyncio.Future[bool]"] = None + ) -> None: + if waiter is None: + waiter = self._notify_waiter + if waiter is not None: + set_result(waiter, True) + + if waiter is self._notify_waiter: + self._notify_waiter = None + + def init_signals(self) -> None: + # Set up signals through the event loop API. + + self.loop.add_signal_handler( + signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None + ) + + self.loop.add_signal_handler( + signal.SIGTERM, self.handle_exit, signal.SIGTERM, None + ) + + self.loop.add_signal_handler( + signal.SIGINT, self.handle_quit, signal.SIGINT, None + ) + + self.loop.add_signal_handler( + signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None + ) + + self.loop.add_signal_handler( + signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None + ) + + self.loop.add_signal_handler( + signal.SIGABRT, self.handle_abort, signal.SIGABRT, None + ) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + # Reset signals so Gunicorn doesn't swallow subprocess return codes + # See: https://github.com/aio-libs/aiohttp/issues/6130 + + def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None: + self.alive = False + + # worker_int callback + self.cfg.worker_int(self) + + # wakeup closing process + self._notify_waiter_done() + + def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None: + self.alive = False + self.exit_code = 1 + self.cfg.worker_abort(self) + sys.exit(1) + + @staticmethod + def _create_ssl_context(cfg: Any) -> "SSLContext": + """Creates SSLContext instance for usage in asyncio.create_server. + + See ssl.SSLSocket.__init__ for more details. + """ + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + + ctx = ssl.SSLContext(cfg.ssl_version) + ctx.load_cert_chain(cfg.certfile, cfg.keyfile) + ctx.verify_mode = cfg.cert_reqs + if cfg.ca_certs: + ctx.load_verify_locations(cfg.ca_certs) + if cfg.ciphers: + ctx.set_ciphers(cfg.ciphers) + return ctx + + def _get_valid_log_format(self, source_format: str) -> str: + if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: + return self.DEFAULT_AIOHTTP_LOG_FORMAT + elif re.search(r"%\([^\)]+\)", source_format): + raise ValueError( + "Gunicorn's style options in form of `%(name)s` are not " + "supported for the log formatting. Please use aiohttp's " + "format specification to configure access log formatting: " + "http://docs.aiohttp.org/en/stable/logging.html" + "#format-specification" + ) + else: + return source_format + + +class GunicornUVLoopWebWorker(GunicornWebWorker): + def init_process(self) -> None: + import uvloop + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup uvloop policy, so that every + # asyncio.get_event_loop() will create an instance + # of uvloop event loop. + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + super().init_process() diff --git a/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/METADATA new file mode 100644 index 0000000..03a6f0f --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/METADATA @@ -0,0 +1,112 @@ +Metadata-Version: 2.4 +Name: aiosignal +Version: 1.4.0 +Summary: aiosignal: a list of registered asynchronous callbacks +Home-page: https://github.com/aio-libs/aiosignal +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache 2.0 +Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby +Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal +Project-URL: Docs: RTD, https://docs.aiosignal.org +Project-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/aiosignal +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Framework :: AsyncIO +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: frozenlist>=1.1.0 +Requires-Dist: typing-extensions>=4.2; python_version < "3.13" +Dynamic: license-file + +========= +aiosignal +========= + +.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg + :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI + :alt: GitHub status for master branch + +.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg?flag=pytest + :target: https://codecov.io/gh/aio-libs/aiosignal?flags[0]=pytest + :alt: codecov.io status for master branch + +.. image:: https://badge.fury.io/py/aiosignal.svg + :target: https://pypi.org/project/aiosignal + :alt: Latest PyPI package version + +.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest + :target: https://aiosignal.readthedocs.io/ + :alt: Latest Read The Docs + +.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F + :target: https://aio-libs.discourse.group/ + :alt: Discourse group for io-libs + +.. image:: https://badges.gitter.im/Join%20Chat.svg + :target: https://gitter.im/aio-libs/Lobby + :alt: Chat on Gitter + +Introduction +============ + +A project to manage callbacks in `asyncio` projects. + +``Signal`` is a list of registered asynchronous callbacks. + +The signal's life-cycle has two stages: after creation its content +could be filled by using standard list operations: ``sig.append()`` +etc. + +After you call ``sig.freeze()`` the signal is *frozen*: adding, removing +and dropping callbacks is forbidden. + +The only available operation is calling the previously registered +callbacks by using ``await sig.send(data)``. + +For concrete usage examples see the `Signals + +section of the `Web Server Advanced +` chapter of the `aiohttp +documentation`_. + + +Installation +------------ + +:: + + $ pip install aiosignal + + +Documentation +============= + +https://aiosignal.readthedocs.io/ + +License +======= + +``aiosignal`` is offered under the Apache 2 license. + +Source code +=========== + +The project is hosted on GitHub_ + +Please file an issue in the `bug tracker +`_ if you have found a bug +or have some suggestions to improve the library. + +.. _GitHub: https://github.com/aio-libs/aiosignal +.. _aiohttp documentation: https://docs.aiohttp.org/ diff --git a/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/RECORD new file mode 100644 index 0000000..11bc848 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/RECORD @@ -0,0 +1,9 @@ +aiosignal-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aiosignal-1.4.0.dist-info/METADATA,sha256=CSR-8dqLxpZyjUcTDnAuQwf299EB1sSFv_nzpxznAI0,3662 +aiosignal-1.4.0.dist-info/RECORD,, +aiosignal-1.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +aiosignal-1.4.0.dist-info/licenses/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 +aiosignal-1.4.0.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10 +aiosignal/__init__.py,sha256=TIkmUG9HTBt4dfq2nISYBiZiRB2xwvFtEZydLP0HPL4,1537 +aiosignal/__pycache__/__init__.cpython-312.pyc,, +aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/WHEEL new file mode 100644 index 0000000..e7fa31b --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..7082a2d --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2013-2019 Nikolay Kim and Andrew Svetlov + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..ac6df3a --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiosignal-1.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +aiosignal diff --git a/venv/lib/python3.12/site-packages/aiosignal/__init__.py b/venv/lib/python3.12/site-packages/aiosignal/__init__.py new file mode 100644 index 0000000..5ede009 --- /dev/null +++ b/venv/lib/python3.12/site-packages/aiosignal/__init__.py @@ -0,0 +1,59 @@ +import sys +from typing import Any, Awaitable, Callable, TypeVar + +from frozenlist import FrozenList + +if sys.version_info >= (3, 11): + from typing import Unpack +else: + from typing_extensions import Unpack + +if sys.version_info >= (3, 13): + from typing import TypeVarTuple +else: + from typing_extensions import TypeVarTuple + +_T = TypeVar("_T") +_Ts = TypeVarTuple("_Ts", default=Unpack[tuple[()]]) + +__version__ = "1.4.0" + +__all__ = ("Signal",) + + +class Signal(FrozenList[Callable[[Unpack[_Ts]], Awaitable[object]]]): + """Coroutine-based signal implementation. + + To connect a callback to a signal, use any list method. + + Signals are fired using the send() coroutine, which takes named + arguments. + """ + + __slots__ = ("_owner",) + + def __init__(self, owner: object): + super().__init__() + self._owner = owner + + def __repr__(self) -> str: + return "".format( + self._owner, self.frozen, list(self) + ) + + async def send(self, *args: Unpack[_Ts], **kwargs: Any) -> None: + """ + Sends data to all registered receivers. + """ + if not self.frozen: + raise RuntimeError("Cannot send non-frozen signal.") + + for receiver in self: + await receiver(*args, **kwargs) + + def __call__( + self, func: Callable[[Unpack[_Ts]], Awaitable[_T]] + ) -> Callable[[Unpack[_Ts]], Awaitable[_T]]: + """Decorator to add a function to this Signal.""" + self.append(func) + return func diff --git a/venv/lib/python3.12/site-packages/aiosignal/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/aiosignal/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d9e4346 Binary files /dev/null and b/venv/lib/python3.12/site-packages/aiosignal/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/aiosignal/py.typed b/venv/lib/python3.12/site-packages/aiosignal/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/LICENSE b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/LICENSE new file mode 100644 index 0000000..74b9ce3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright 2009-2023 Michael Bayer. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/METADATA new file mode 100644 index 0000000..7a6884d --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/METADATA @@ -0,0 +1,142 @@ +Metadata-Version: 2.1 +Name: alembic +Version: 1.13.1 +Summary: A database migration tool for SQLAlchemy. +Home-page: https://alembic.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://alembic.sqlalchemy.org/en/latest/ +Project-URL: Changelog, https://alembic.sqlalchemy.org/en/latest/changelog.html +Project-URL: Source, https://github.com/sqlalchemy/alembic/ +Project-URL: Issue Tracker, https://github.com/sqlalchemy/alembic/issues/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Environment :: Console +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: SQLAlchemy >=1.3.0 +Requires-Dist: Mako +Requires-Dist: typing-extensions >=4 +Requires-Dist: importlib-metadata ; python_version < "3.9" +Requires-Dist: importlib-resources ; python_version < "3.9" +Provides-Extra: tz +Requires-Dist: backports.zoneinfo ; (python_version < "3.9") and extra == 'tz' + +Alembic is a database migrations tool written by the author +of `SQLAlchemy `_. A migrations tool +offers the following functionality: + +* Can emit ALTER statements to a database in order to change + the structure of tables and other constructs +* Provides a system whereby "migration scripts" may be constructed; + each script indicates a particular series of steps that can "upgrade" a + target database to a new version, and optionally a series of steps that can + "downgrade" similarly, doing the same steps in reverse. +* Allows the scripts to execute in some sequential manner. + +The goals of Alembic are: + +* Very open ended and transparent configuration and operation. A new + Alembic environment is generated from a set of templates which is selected + among a set of options when setup first occurs. The templates then deposit a + series of scripts that define fully how database connectivity is established + and how migration scripts are invoked; the migration scripts themselves are + generated from a template within that series of scripts. The scripts can + then be further customized to define exactly how databases will be + interacted with and what structure new migration files should take. +* Full support for transactional DDL. The default scripts ensure that all + migrations occur within a transaction - for those databases which support + this (Postgresql, Microsoft SQL Server), migrations can be tested with no + need to manually undo changes upon failure. +* Minimalist script construction. Basic operations like renaming + tables/columns, adding/removing columns, changing column attributes can be + performed through one line commands like alter_column(), rename_table(), + add_constraint(). There is no need to recreate full SQLAlchemy Table + structures for simple operations like these - the functions themselves + generate minimalist schema structures behind the scenes to achieve the given + DDL sequence. +* "auto generation" of migrations. While real world migrations are far more + complex than what can be automatically determined, Alembic can still + eliminate the initial grunt work in generating new migration directives + from an altered schema. The ``--autogenerate`` feature will inspect the + current status of a database using SQLAlchemy's schema inspection + capabilities, compare it to the current state of the database model as + specified in Python, and generate a series of "candidate" migrations, + rendering them into a new migration script as Python directives. The + developer then edits the new file, adding additional directives and data + migrations as needed, to produce a finished migration. Table and column + level changes can be detected, with constraints and indexes to follow as + well. +* Full support for migrations generated as SQL scripts. Those of us who + work in corporate environments know that direct access to DDL commands on a + production database is a rare privilege, and DBAs want textual SQL scripts. + Alembic's usage model and commands are oriented towards being able to run a + series of migrations into a textual output file as easily as it runs them + directly to a database. Care must be taken in this mode to not invoke other + operations that rely upon in-memory SELECTs of rows - Alembic tries to + provide helper constructs like bulk_insert() to help with data-oriented + operations that are compatible with script-based DDL. +* Non-linear, dependency-graph versioning. Scripts are given UUID + identifiers similarly to a DVCS, and the linkage of one script to the next + is achieved via human-editable markers within the scripts themselves. + The structure of a set of migration files is considered as a + directed-acyclic graph, meaning any migration file can be dependent + on any other arbitrary set of migration files, or none at + all. Through this open-ended system, migration files can be organized + into branches, multiple roots, and mergepoints, without restriction. + Commands are provided to produce new branches, roots, and merges of + branches automatically. +* Provide a library of ALTER constructs that can be used by any SQLAlchemy + application. The DDL constructs build upon SQLAlchemy's own DDLElement base + and can be used standalone by any application or script. +* At long last, bring SQLite and its inability to ALTER things into the fold, + but in such a way that SQLite's very special workflow needs are accommodated + in an explicit way that makes the most of a bad situation, through the + concept of a "batch" migration, where multiple changes to a table can + be batched together to form a series of instructions for a single, subsequent + "move-and-copy" workflow. You can even use "move-and-copy" workflow for + other databases, if you want to recreate a table in the background + on a busy system. + +Documentation and status of Alembic is at https://alembic.sqlalchemy.org/ + +The SQLAlchemy Project +====================== + +Alembic is part of the `SQLAlchemy Project `_ and +adheres to the same standards and conventions as the core project. + +Development / Bug reporting / Pull requests +___________________________________________ + +Please refer to the +`SQLAlchemy Community Guide `_ for +guidelines on coding and participating in this project. + +Code of Conduct +_______________ + +Above all, SQLAlchemy places great emphasis on polite, thoughtful, and +constructive communication between users and developers. +Please see our current Code of Conduct at +`Code of Conduct `_. + +License +======= + +Alembic is distributed under the `MIT license +`_. diff --git a/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/RECORD new file mode 100644 index 0000000..8038424 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/RECORD @@ -0,0 +1,151 @@ +../../../bin/alembic,sha256=jS423RgCubdZtooh4kHWPxqZLqLe5Tyt1xvBzqAsGlU,205 +alembic-1.13.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +alembic-1.13.1.dist-info/LICENSE,sha256=soUmiob0QW6vTQWyrjiAwVb3xZqPk1pAK8BW6vszrwg,1058 +alembic-1.13.1.dist-info/METADATA,sha256=W1F2NBRkhqW55HiGmEIpdmiRt2skU5wwJd21UHFbSdQ,7390 +alembic-1.13.1.dist-info/RECORD,, +alembic-1.13.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +alembic-1.13.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +alembic-1.13.1.dist-info/entry_points.txt,sha256=aykM30soxwGN0pB7etLc1q0cHJbL9dy46RnK9VX4LLw,48 +alembic-1.13.1.dist-info/top_level.txt,sha256=FwKWd5VsPFC8iQjpu1u9Cn-JnK3-V1RhUCmWqz1cl-s,8 +alembic/__init__.py,sha256=PMiQT_1tHyC_Od3GDBArBk7r14v8F62_xkzliPq9tLU,63 +alembic/__main__.py,sha256=373m7-TBh72JqrSMYviGrxCHZo-cnweM8AGF8A22PmY,78 +alembic/__pycache__/__init__.cpython-312.pyc,, +alembic/__pycache__/__main__.cpython-312.pyc,, +alembic/__pycache__/command.cpython-312.pyc,, +alembic/__pycache__/config.cpython-312.pyc,, +alembic/__pycache__/context.cpython-312.pyc,, +alembic/__pycache__/environment.cpython-312.pyc,, +alembic/__pycache__/migration.cpython-312.pyc,, +alembic/__pycache__/op.cpython-312.pyc,, +alembic/autogenerate/__init__.py,sha256=ntmUTXhjLm4_zmqIwyVaECdpPDn6_u1yM9vYk6-553E,543 +alembic/autogenerate/__pycache__/__init__.cpython-312.pyc,, +alembic/autogenerate/__pycache__/api.cpython-312.pyc,, +alembic/autogenerate/__pycache__/compare.cpython-312.pyc,, +alembic/autogenerate/__pycache__/render.cpython-312.pyc,, +alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc,, +alembic/autogenerate/api.py,sha256=Oc7MRtDhkSICsQ82fYP9bBMYaAjzzW2X_izM3AQU-OY,22171 +alembic/autogenerate/compare.py,sha256=3QLK2yCDW37bXbAIXcHGz4YOFlOW8bSfLHJ8bmzgG1M,44938 +alembic/autogenerate/render.py,sha256=uSbCpkh72mo00xGZ8CJa3teM_gqulCoNtxH0Ey8Ny1k,34939 +alembic/autogenerate/rewriter.py,sha256=uZWRkTYJoncoEJ5WY1QBRiozjyChqZDJPy4LtcRibjM,7846 +alembic/command.py,sha256=jWFNS-wPWA-Klfm0GsPfWh_8sPj4n7rKROJ0zrwhoR0,21712 +alembic/config.py,sha256=I12lm4V-AXSt-7nvub-Vtx5Zfa68pYP5xSrFQQd45rQ,22256 +alembic/context.py,sha256=hK1AJOQXJ29Bhn276GYcosxeG7pC5aZRT5E8c4bMJ4Q,195 +alembic/context.pyi,sha256=hUHbSnbSeEEMVkk0gDSXOq4_9edSjYzsjmmf-mL9Iao,31737 +alembic/ddl/__init__.py,sha256=Df8fy4Vn_abP8B7q3x8gyFwEwnLw6hs2Ljt_bV3EZWE,152 +alembic/ddl/__pycache__/__init__.cpython-312.pyc,, +alembic/ddl/__pycache__/_autogen.cpython-312.pyc,, +alembic/ddl/__pycache__/base.cpython-312.pyc,, +alembic/ddl/__pycache__/impl.cpython-312.pyc,, +alembic/ddl/__pycache__/mssql.cpython-312.pyc,, +alembic/ddl/__pycache__/mysql.cpython-312.pyc,, +alembic/ddl/__pycache__/oracle.cpython-312.pyc,, +alembic/ddl/__pycache__/postgresql.cpython-312.pyc,, +alembic/ddl/__pycache__/sqlite.cpython-312.pyc,, +alembic/ddl/_autogen.py,sha256=0no9ywWP8gjvO57Ozc2naab4qNusVNn2fiJekjc275g,9179 +alembic/ddl/base.py,sha256=Jd7oPoAOGjOMcdMUIzSKnTjd8NKnTd7IjBXXyVpDCkU,9955 +alembic/ddl/impl.py,sha256=vkhkXFpLPJBG9jW2kv_sR5CC5czNd1ERLjLtfLuOFP0,28778 +alembic/ddl/mssql.py,sha256=ydvgBSaftKYjaBaMyqius66Ta4CICQSj79Og3Ed2atY,14219 +alembic/ddl/mysql.py,sha256=am221U_UK3wX33tNcXNiOObZV-Pa4CXuv7vN-epF9IU,16788 +alembic/ddl/oracle.py,sha256=TmoCq_FlbfyWAAk3e_q6mMQU0YmlfIcgKHpRfNMmgr0,6211 +alembic/ddl/postgresql.py,sha256=dcWLdDSqivzizVCce_H6RnOVAayPXDFfns-NC4-UaA8,29842 +alembic/ddl/sqlite.py,sha256=wLXhb8bJWRspKQTb-iVfepR4LXYgOuEbUWKX5qwDhIQ,7570 +alembic/environment.py,sha256=MM5lPayGT04H3aeng1H7GQ8HEAs3VGX5yy6mDLCPLT4,43 +alembic/migration.py,sha256=MV6Fju6rZtn2fTREKzXrCZM6aIBGII4OMZFix0X-GLs,41 +alembic/op.py,sha256=flHtcsVqOD-ZgZKK2pv-CJ5Cwh-KJ7puMUNXzishxLw,167 +alembic/op.pyi,sha256=8R6SJr1dQharU0blmMJJj23XFO_hk9ZmAQBJBQOeXRU,49816 +alembic/operations/__init__.py,sha256=e0KQSZAgLpTWvyvreB7DWg7RJV_MWSOPVDgCqsd2FzY,318 +alembic/operations/__pycache__/__init__.cpython-312.pyc,, +alembic/operations/__pycache__/base.cpython-312.pyc,, +alembic/operations/__pycache__/batch.cpython-312.pyc,, +alembic/operations/__pycache__/ops.cpython-312.pyc,, +alembic/operations/__pycache__/schemaobj.cpython-312.pyc,, +alembic/operations/__pycache__/toimpl.cpython-312.pyc,, +alembic/operations/base.py,sha256=LCx4NH5NA2NLWQFaZTqE_p2KgLtqJ76oVcp1Grj-zFM,74004 +alembic/operations/batch.py,sha256=YqtD4hJ3_RkFxvI7zbmBwxcLEyLHYyWQpsz4l5L85yI,26943 +alembic/operations/ops.py,sha256=2vtYFhYFDEVq158HwORfGTsobDM7c-t0lewuR7JKw7g,94353 +alembic/operations/schemaobj.py,sha256=vjoD57QvjbzzA-jyPIXulbOmb5_bGPtxoq58KsKI4Y0,9424 +alembic/operations/toimpl.py,sha256=SoNY2_gZX2baXTD-pNjpCWnON8D2QBSYQBIjo13-WKA,7115 +alembic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +alembic/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +alembic/runtime/__pycache__/__init__.cpython-312.pyc,, +alembic/runtime/__pycache__/environment.cpython-312.pyc,, +alembic/runtime/__pycache__/migration.cpython-312.pyc,, +alembic/runtime/environment.py,sha256=9wSJaePNAXBXvirif_85ql7dSq4bXM1E6pSb2k-6uGI,41508 +alembic/runtime/migration.py,sha256=Yfv2fa11wiQ0WgoZaFldlWxCPq4dVDOCEOxST_-1VB0,50066 +alembic/script/__init__.py,sha256=lSj06O391Iy5avWAiq8SPs6N8RBgxkSPjP8wpXcNDGg,100 +alembic/script/__pycache__/__init__.cpython-312.pyc,, +alembic/script/__pycache__/base.cpython-312.pyc,, +alembic/script/__pycache__/revision.cpython-312.pyc,, +alembic/script/__pycache__/write_hooks.cpython-312.pyc,, +alembic/script/base.py,sha256=4gkppn2FKCYDoBgzGkTslcwdyxSabmHvGq0uGKulwbI,37586 +alembic/script/revision.py,sha256=sfnXQw2UwiXs0E6gEPHBKWuSsB5KyuxZPTrFn__hIEk,62060 +alembic/script/write_hooks.py,sha256=NGB6NGgfdf7HK6XNNpSKqUCfzxazj-NRUePgFx7MJSM,5036 +alembic/templates/async/README,sha256=ISVtAOvqvKk_5ThM5ioJE-lMkvf9IbknFUFVU_vPma4,58 +alembic/templates/async/__pycache__/env.cpython-312.pyc,, +alembic/templates/async/alembic.ini.mako,sha256=uuhJETLWQuiYcs_jAOXHEjshEJ7VslEc1q4RRj0HWbE,3525 +alembic/templates/async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389 +alembic/templates/async/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635 +alembic/templates/generic/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38 +alembic/templates/generic/__pycache__/env.cpython-312.pyc,, +alembic/templates/generic/alembic.ini.mako,sha256=sT7F852yN3c8X1-GKFlhuWExXxw9hY1eb1ZZ9flFSzc,3634 +alembic/templates/generic/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103 +alembic/templates/generic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635 +alembic/templates/multidb/README,sha256=dWLDhnBgphA4Nzb7sNlMfCS3_06YqVbHhz-9O5JNqyI,606 +alembic/templates/multidb/__pycache__/env.cpython-312.pyc,, +alembic/templates/multidb/alembic.ini.mako,sha256=mPh8JFJfWiGs6tMtL8_HAQ-Dz1QOoJgE5Vm76nIMqgU,3728 +alembic/templates/multidb/env.py,sha256=6zNjnW8mXGUk7erTsAvrfhvqoczJ-gagjVq1Ypg2YIQ,4230 +alembic/templates/multidb/script.py.mako,sha256=N06nMtNSwHkgl0EBXDyMt8njp9tlOesR583gfq21nbY,1090 +alembic/testing/__init__.py,sha256=kOxOh5nwmui9d-_CCq9WA4Udwy7ITjm453w74CTLZDo,1159 +alembic/testing/__pycache__/__init__.cpython-312.pyc,, +alembic/testing/__pycache__/assertions.cpython-312.pyc,, +alembic/testing/__pycache__/env.cpython-312.pyc,, +alembic/testing/__pycache__/fixtures.cpython-312.pyc,, +alembic/testing/__pycache__/requirements.cpython-312.pyc,, +alembic/testing/__pycache__/schemacompare.cpython-312.pyc,, +alembic/testing/__pycache__/util.cpython-312.pyc,, +alembic/testing/__pycache__/warnings.cpython-312.pyc,, +alembic/testing/assertions.py,sha256=1CbJk8c8-WO9eJ0XJ0jJvMsNRLUrXV41NOeIJUAlOBk,5015 +alembic/testing/env.py,sha256=zJacVb_z6uLs2U1TtkmnFH9P3_F-3IfYbVv4UEPOvfo,10754 +alembic/testing/fixtures.py,sha256=NyP4wE_dFN9ZzSGiBagRu1cdzkka03nwJYJYHYrrkSY,9112 +alembic/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc,, +alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,, +alembic/testing/plugin/bootstrap.py,sha256=9C6wtjGrIVztZ928w27hsQE0KcjDLIUtUN3dvZKsMVk,50 +alembic/testing/requirements.py,sha256=dKeAO1l5TwBqXarJN-IPORlCqCJv-41Dj6oXoEikxHQ,5133 +alembic/testing/schemacompare.py,sha256=N5UqSNCOJetIKC4vKhpYzQEpj08XkdgIoqBmEPQ3tlc,4838 +alembic/testing/suite/__init__.py,sha256=MvE7-hwbaVN1q3NM-ztGxORU9dnIelUCINKqNxewn7Y,288 +alembic/testing/suite/__pycache__/__init__.cpython-312.pyc,, +alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc,, +alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc,, +alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc,, +alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc,, +alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc,, +alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc,, +alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc,, +alembic/testing/suite/__pycache__/test_op.cpython-312.pyc,, +alembic/testing/suite/_autogen_fixtures.py,sha256=cDq1pmzHe15S6dZPGNC6sqFaCQ3hLT_oPV2IDigUGQ0,9880 +alembic/testing/suite/test_autogen_comments.py,sha256=aEGqKUDw4kHjnDk298aoGcQvXJWmZXcIX_2FxH4cJK8,6283 +alembic/testing/suite/test_autogen_computed.py,sha256=qJeBpc8urnwTFvbwWrSTIbHVkRUuCXP-dKaNbUK2U2U,6077 +alembic/testing/suite/test_autogen_diffs.py,sha256=T4SR1n_kmcOKYhR4W1-dA0e5sddJ69DSVL2HW96kAkE,8394 +alembic/testing/suite/test_autogen_fks.py,sha256=AqFmb26Buex167HYa9dZWOk8x-JlB1OK3bwcvvjDFaU,32927 +alembic/testing/suite/test_autogen_identity.py,sha256=kcuqngG7qXAKPJDX4U8sRzPKHEJECHuZ0DtuaS6tVkk,5824 +alembic/testing/suite/test_environment.py,sha256=w9F0xnLEbALeR8k6_-Tz6JHvy91IqiTSypNasVzXfZQ,11877 +alembic/testing/suite/test_op.py,sha256=2XQCdm_NmnPxHGuGj7hmxMzIhKxXNotUsKdACXzE1mM,1343 +alembic/testing/util.py,sha256=CQrcQDA8fs_7ME85z5ydb-Bt70soIIID-qNY1vbR2dg,3350 +alembic/testing/warnings.py,sha256=RxA7x_8GseANgw07Us8JN_1iGbANxaw6_VitX2ZGQH4,1078 +alembic/util/__init__.py,sha256=KSZ7UT2YzH6CietgUtljVoE3QnGjoFKOi7RL5sgUxrk,1688 +alembic/util/__pycache__/__init__.cpython-312.pyc,, +alembic/util/__pycache__/compat.cpython-312.pyc,, +alembic/util/__pycache__/editor.cpython-312.pyc,, +alembic/util/__pycache__/exc.cpython-312.pyc,, +alembic/util/__pycache__/langhelpers.cpython-312.pyc,, +alembic/util/__pycache__/messaging.cpython-312.pyc,, +alembic/util/__pycache__/pyfiles.cpython-312.pyc,, +alembic/util/__pycache__/sqla_compat.cpython-312.pyc,, +alembic/util/compat.py,sha256=RjHdQa1NomU3Zlvgfvza0OMiSRQSLRL3xVl3OdUy2UE,2594 +alembic/util/editor.py,sha256=JIz6_BdgV8_oKtnheR6DZoB7qnrHrlRgWjx09AsTsUw,2546 +alembic/util/exc.py,sha256=KQTru4zcgAmN4IxLMwLFS56XToUewaXB7oOLcPNjPwg,98 +alembic/util/langhelpers.py,sha256=KYyOjFjJ26evPmrwhdTvLQNXN0bK7AIy5sRdKD91Fvg,10038 +alembic/util/messaging.py,sha256=BM5OCZ6qmLftFRw5yPSxj539_QmfVwNYoU8qYsDqoJY,3132 +alembic/util/pyfiles.py,sha256=zltVdcwEJJCPS2gHsQvkHkQakuF6wXiZ6zfwHbGNT0g,3489 +alembic/util/sqla_compat.py,sha256=toD1S63PgZ6iEteP9bwIf5E7DIUdQPo0UQ_Fn18qWnI,19536 diff --git a/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/entry_points.txt new file mode 100644 index 0000000..5945268 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +alembic = alembic.config:main diff --git a/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/top_level.txt new file mode 100644 index 0000000..b5bd98d --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic-1.13.1.dist-info/top_level.txt @@ -0,0 +1 @@ +alembic diff --git a/venv/lib/python3.12/site-packages/alembic/__init__.py b/venv/lib/python3.12/site-packages/alembic/__init__.py new file mode 100644 index 0000000..c153c8a --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/__init__.py @@ -0,0 +1,4 @@ +from . import context +from . import op + +__version__ = "1.13.1" diff --git a/venv/lib/python3.12/site-packages/alembic/__main__.py b/venv/lib/python3.12/site-packages/alembic/__main__.py new file mode 100644 index 0000000..af1b8e8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/__main__.py @@ -0,0 +1,4 @@ +from .config import main + +if __name__ == "__main__": + main(prog="alembic") diff --git a/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f816c90 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..924a263 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc new file mode 100644 index 0000000..fcdb43c Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..2bad6be Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc new file mode 100644 index 0000000..cdc61b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/__pycache__/environment.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/__pycache__/environment.cpython-312.pyc new file mode 100644 index 0000000..f9f3225 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/__pycache__/environment.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/__pycache__/migration.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/__pycache__/migration.cpython-312.pyc new file mode 100644 index 0000000..4a6c15b Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/__pycache__/migration.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc new file mode 100644 index 0000000..4ecf628 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/__init__.py b/venv/lib/python3.12/site-packages/alembic/autogenerate/__init__.py new file mode 100644 index 0000000..445ddb2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/autogenerate/__init__.py @@ -0,0 +1,10 @@ +from .api import _render_migration_diffs as _render_migration_diffs +from .api import compare_metadata as compare_metadata +from .api import produce_migrations as produce_migrations +from .api import render_python_code as render_python_code +from .api import RevisionContext as RevisionContext +from .compare import _produce_net_changes as _produce_net_changes +from .compare import comparators as comparators +from .render import render_op_text as render_op_text +from .render import renderers as renderers +from .rewriter import Rewriter as Rewriter diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6865348 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc new file mode 100644 index 0000000..b6799b1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc new file mode 100644 index 0000000..6fde28b Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc new file mode 100644 index 0000000..a3a5563 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc new file mode 100644 index 0000000..0aefe63 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/api.py b/venv/lib/python3.12/site-packages/alembic/autogenerate/api.py new file mode 100644 index 0000000..aa8f32f --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/autogenerate/api.py @@ -0,0 +1,650 @@ +from __future__ import annotations + +import contextlib +from typing import Any +from typing import Dict +from typing import Iterator +from typing import List +from typing import Optional +from typing import Sequence +from typing import Set +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import inspect + +from . import compare +from . import render +from .. import util +from ..operations import ops +from ..util import sqla_compat + +"""Provide the 'autogenerate' feature which can produce migration operations +automatically.""" + +if TYPE_CHECKING: + from sqlalchemy.engine import Connection + from sqlalchemy.engine import Dialect + from sqlalchemy.engine import Inspector + from sqlalchemy.sql.schema import MetaData + from sqlalchemy.sql.schema import SchemaItem + from sqlalchemy.sql.schema import Table + + from ..config import Config + from ..operations.ops import DowngradeOps + from ..operations.ops import MigrationScript + from ..operations.ops import UpgradeOps + from ..runtime.environment import NameFilterParentNames + from ..runtime.environment import NameFilterType + from ..runtime.environment import ProcessRevisionDirectiveFn + from ..runtime.environment import RenderItemFn + from ..runtime.migration import MigrationContext + from ..script.base import Script + from ..script.base import ScriptDirectory + from ..script.revision import _GetRevArg + + +def compare_metadata(context: MigrationContext, metadata: MetaData) -> Any: + """Compare a database schema to that given in a + :class:`~sqlalchemy.schema.MetaData` instance. + + The database connection is presented in the context + of a :class:`.MigrationContext` object, which + provides database connectivity as well as optional + comparison functions to use for datatypes and + server defaults - see the "autogenerate" arguments + at :meth:`.EnvironmentContext.configure` + for details on these. + + The return format is a list of "diff" directives, + each representing individual differences:: + + from alembic.migration import MigrationContext + from alembic.autogenerate import compare_metadata + from sqlalchemy import ( + create_engine, + MetaData, + Column, + Integer, + String, + Table, + text, + ) + import pprint + + engine = create_engine("sqlite://") + + with engine.begin() as conn: + conn.execute( + text( + ''' + create table foo ( + id integer not null primary key, + old_data varchar, + x integer + ) + ''' + ) + ) + conn.execute(text("create table bar (data varchar)")) + + metadata = MetaData() + Table( + "foo", + metadata, + Column("id", Integer, primary_key=True), + Column("data", Integer), + Column("x", Integer, nullable=False), + ) + Table("bat", metadata, Column("info", String)) + + mc = MigrationContext.configure(engine.connect()) + + diff = compare_metadata(mc, metadata) + pprint.pprint(diff, indent=2, width=20) + + Output:: + + [ + ( + "add_table", + Table( + "bat", + MetaData(), + Column("info", String(), table=), + schema=None, + ), + ), + ( + "remove_table", + Table( + "bar", + MetaData(), + Column("data", VARCHAR(), table=), + schema=None, + ), + ), + ( + "add_column", + None, + "foo", + Column("data", Integer(), table=), + ), + [ + ( + "modify_nullable", + None, + "foo", + "x", + { + "existing_comment": None, + "existing_server_default": False, + "existing_type": INTEGER(), + }, + True, + False, + ) + ], + ( + "remove_column", + None, + "foo", + Column("old_data", VARCHAR(), table=), + ), + ] + + :param context: a :class:`.MigrationContext` + instance. + :param metadata: a :class:`~sqlalchemy.schema.MetaData` + instance. + + .. seealso:: + + :func:`.produce_migrations` - produces a :class:`.MigrationScript` + structure based on metadata comparison. + + """ + + migration_script = produce_migrations(context, metadata) + assert migration_script.upgrade_ops is not None + return migration_script.upgrade_ops.as_diffs() + + +def produce_migrations( + context: MigrationContext, metadata: MetaData +) -> MigrationScript: + """Produce a :class:`.MigrationScript` structure based on schema + comparison. + + This function does essentially what :func:`.compare_metadata` does, + but then runs the resulting list of diffs to produce the full + :class:`.MigrationScript` object. For an example of what this looks like, + see the example in :ref:`customizing_revision`. + + .. seealso:: + + :func:`.compare_metadata` - returns more fundamental "diff" + data from comparing a schema. + + """ + + autogen_context = AutogenContext(context, metadata=metadata) + + migration_script = ops.MigrationScript( + rev_id=None, + upgrade_ops=ops.UpgradeOps([]), + downgrade_ops=ops.DowngradeOps([]), + ) + + compare._populate_migration_script(autogen_context, migration_script) + + return migration_script + + +def render_python_code( + up_or_down_op: Union[UpgradeOps, DowngradeOps], + sqlalchemy_module_prefix: str = "sa.", + alembic_module_prefix: str = "op.", + render_as_batch: bool = False, + imports: Sequence[str] = (), + render_item: Optional[RenderItemFn] = None, + migration_context: Optional[MigrationContext] = None, + user_module_prefix: Optional[str] = None, +) -> str: + """Render Python code given an :class:`.UpgradeOps` or + :class:`.DowngradeOps` object. + + This is a convenience function that can be used to test the + autogenerate output of a user-defined :class:`.MigrationScript` structure. + + :param up_or_down_op: :class:`.UpgradeOps` or :class:`.DowngradeOps` object + :param sqlalchemy_module_prefix: module prefix for SQLAlchemy objects + :param alembic_module_prefix: module prefix for Alembic constructs + :param render_as_batch: use "batch operations" style for rendering + :param imports: sequence of import symbols to add + :param render_item: callable to render items + :param migration_context: optional :class:`.MigrationContext` + :param user_module_prefix: optional string prefix for user-defined types + + .. versionadded:: 1.11.0 + + """ + opts = { + "sqlalchemy_module_prefix": sqlalchemy_module_prefix, + "alembic_module_prefix": alembic_module_prefix, + "render_item": render_item, + "render_as_batch": render_as_batch, + "user_module_prefix": user_module_prefix, + } + + if migration_context is None: + from ..runtime.migration import MigrationContext + from sqlalchemy.engine.default import DefaultDialect + + migration_context = MigrationContext.configure( + dialect=DefaultDialect() + ) + + autogen_context = AutogenContext(migration_context, opts=opts) + autogen_context.imports = set(imports) + return render._indent( + render._render_cmd_body(up_or_down_op, autogen_context) + ) + + +def _render_migration_diffs( + context: MigrationContext, template_args: Dict[Any, Any] +) -> None: + """legacy, used by test_autogen_composition at the moment""" + + autogen_context = AutogenContext(context) + + upgrade_ops = ops.UpgradeOps([]) + compare._produce_net_changes(autogen_context, upgrade_ops) + + migration_script = ops.MigrationScript( + rev_id=None, + upgrade_ops=upgrade_ops, + downgrade_ops=upgrade_ops.reverse(), + ) + + render._render_python_into_templatevars( + autogen_context, migration_script, template_args + ) + + +class AutogenContext: + """Maintains configuration and state that's specific to an + autogenerate operation.""" + + metadata: Optional[MetaData] = None + """The :class:`~sqlalchemy.schema.MetaData` object + representing the destination. + + This object is the one that is passed within ``env.py`` + to the :paramref:`.EnvironmentContext.configure.target_metadata` + parameter. It represents the structure of :class:`.Table` and other + objects as stated in the current database model, and represents the + destination structure for the database being examined. + + While the :class:`~sqlalchemy.schema.MetaData` object is primarily + known as a collection of :class:`~sqlalchemy.schema.Table` objects, + it also has an :attr:`~sqlalchemy.schema.MetaData.info` dictionary + that may be used by end-user schemes to store additional schema-level + objects that are to be compared in custom autogeneration schemes. + + """ + + connection: Optional[Connection] = None + """The :class:`~sqlalchemy.engine.base.Connection` object currently + connected to the database backend being compared. + + This is obtained from the :attr:`.MigrationContext.bind` and is + ultimately set up in the ``env.py`` script. + + """ + + dialect: Optional[Dialect] = None + """The :class:`~sqlalchemy.engine.Dialect` object currently in use. + + This is normally obtained from the + :attr:`~sqlalchemy.engine.base.Connection.dialect` attribute. + + """ + + imports: Set[str] = None # type: ignore[assignment] + """A ``set()`` which contains string Python import directives. + + The directives are to be rendered into the ``${imports}`` section + of a script template. The set is normally empty and can be modified + within hooks such as the + :paramref:`.EnvironmentContext.configure.render_item` hook. + + .. seealso:: + + :ref:`autogen_render_types` + + """ + + migration_context: MigrationContext = None # type: ignore[assignment] + """The :class:`.MigrationContext` established by the ``env.py`` script.""" + + def __init__( + self, + migration_context: MigrationContext, + metadata: Optional[MetaData] = None, + opts: Optional[Dict[str, Any]] = None, + autogenerate: bool = True, + ) -> None: + if ( + autogenerate + and migration_context is not None + and migration_context.as_sql + ): + raise util.CommandError( + "autogenerate can't use as_sql=True as it prevents querying " + "the database for schema information" + ) + + if opts is None: + opts = migration_context.opts + + self.metadata = metadata = ( + opts.get("target_metadata", None) if metadata is None else metadata + ) + + if ( + autogenerate + and metadata is None + and migration_context is not None + and migration_context.script is not None + ): + raise util.CommandError( + "Can't proceed with --autogenerate option; environment " + "script %s does not provide " + "a MetaData object or sequence of objects to the context." + % (migration_context.script.env_py_location) + ) + + include_object = opts.get("include_object", None) + include_name = opts.get("include_name", None) + + object_filters = [] + name_filters = [] + if include_object: + object_filters.append(include_object) + if include_name: + name_filters.append(include_name) + + self._object_filters = object_filters + self._name_filters = name_filters + + self.migration_context = migration_context + if self.migration_context is not None: + self.connection = self.migration_context.bind + self.dialect = self.migration_context.dialect + + self.imports = set() + self.opts: Dict[str, Any] = opts + self._has_batch: bool = False + + @util.memoized_property + def inspector(self) -> Inspector: + if self.connection is None: + raise TypeError( + "can't return inspector as this " + "AutogenContext has no database connection" + ) + return inspect(self.connection) + + @contextlib.contextmanager + def _within_batch(self) -> Iterator[None]: + self._has_batch = True + yield + self._has_batch = False + + def run_name_filters( + self, + name: Optional[str], + type_: NameFilterType, + parent_names: NameFilterParentNames, + ) -> bool: + """Run the context's name filters and return True if the targets + should be part of the autogenerate operation. + + This method should be run for every kind of name encountered within the + reflection side of an autogenerate operation, giving the environment + the chance to filter what names should be reflected as database + objects. The filters here are produced directly via the + :paramref:`.EnvironmentContext.configure.include_name` parameter. + + """ + if "schema_name" in parent_names: + if type_ == "table": + table_name = name + else: + table_name = parent_names.get("table_name", None) + if table_name: + schema_name = parent_names["schema_name"] + if schema_name: + parent_names["schema_qualified_table_name"] = "%s.%s" % ( + schema_name, + table_name, + ) + else: + parent_names["schema_qualified_table_name"] = table_name + + for fn in self._name_filters: + if not fn(name, type_, parent_names): + return False + else: + return True + + def run_object_filters( + self, + object_: SchemaItem, + name: sqla_compat._ConstraintName, + type_: NameFilterType, + reflected: bool, + compare_to: Optional[SchemaItem], + ) -> bool: + """Run the context's object filters and return True if the targets + should be part of the autogenerate operation. + + This method should be run for every kind of object encountered within + an autogenerate operation, giving the environment the chance + to filter what objects should be included in the comparison. + The filters here are produced directly via the + :paramref:`.EnvironmentContext.configure.include_object` parameter. + + """ + for fn in self._object_filters: + if not fn(object_, name, type_, reflected, compare_to): + return False + else: + return True + + run_filters = run_object_filters + + @util.memoized_property + def sorted_tables(self) -> List[Table]: + """Return an aggregate of the :attr:`.MetaData.sorted_tables` + collection(s). + + For a sequence of :class:`.MetaData` objects, this + concatenates the :attr:`.MetaData.sorted_tables` collection + for each individual :class:`.MetaData` in the order of the + sequence. It does **not** collate the sorted tables collections. + + """ + result = [] + for m in util.to_list(self.metadata): + result.extend(m.sorted_tables) + return result + + @util.memoized_property + def table_key_to_table(self) -> Dict[str, Table]: + """Return an aggregate of the :attr:`.MetaData.tables` dictionaries. + + The :attr:`.MetaData.tables` collection is a dictionary of table key + to :class:`.Table`; this method aggregates the dictionary across + multiple :class:`.MetaData` objects into one dictionary. + + Duplicate table keys are **not** supported; if two :class:`.MetaData` + objects contain the same table key, an exception is raised. + + """ + result: Dict[str, Table] = {} + for m in util.to_list(self.metadata): + intersect = set(result).intersection(set(m.tables)) + if intersect: + raise ValueError( + "Duplicate table keys across multiple " + "MetaData objects: %s" + % (", ".join('"%s"' % key for key in sorted(intersect))) + ) + + result.update(m.tables) + return result + + +class RevisionContext: + """Maintains configuration and state that's specific to a revision + file generation operation.""" + + generated_revisions: List[MigrationScript] + process_revision_directives: Optional[ProcessRevisionDirectiveFn] + + def __init__( + self, + config: Config, + script_directory: ScriptDirectory, + command_args: Dict[str, Any], + process_revision_directives: Optional[ + ProcessRevisionDirectiveFn + ] = None, + ) -> None: + self.config = config + self.script_directory = script_directory + self.command_args = command_args + self.process_revision_directives = process_revision_directives + self.template_args = { + "config": config # Let templates use config for + # e.g. multiple databases + } + self.generated_revisions = [self._default_revision()] + + def _to_script( + self, migration_script: MigrationScript + ) -> Optional[Script]: + template_args: Dict[str, Any] = self.template_args.copy() + + if getattr(migration_script, "_needs_render", False): + autogen_context = self._last_autogen_context + + # clear out existing imports if we are doing multiple + # renders + autogen_context.imports = set() + if migration_script.imports: + autogen_context.imports.update(migration_script.imports) + render._render_python_into_templatevars( + autogen_context, migration_script, template_args + ) + + assert migration_script.rev_id is not None + return self.script_directory.generate_revision( + migration_script.rev_id, + migration_script.message, + refresh=True, + head=migration_script.head, + splice=migration_script.splice, + branch_labels=migration_script.branch_label, + version_path=migration_script.version_path, + depends_on=migration_script.depends_on, + **template_args, + ) + + def run_autogenerate( + self, rev: _GetRevArg, migration_context: MigrationContext + ) -> None: + self._run_environment(rev, migration_context, True) + + def run_no_autogenerate( + self, rev: _GetRevArg, migration_context: MigrationContext + ) -> None: + self._run_environment(rev, migration_context, False) + + def _run_environment( + self, + rev: _GetRevArg, + migration_context: MigrationContext, + autogenerate: bool, + ) -> None: + if autogenerate: + if self.command_args["sql"]: + raise util.CommandError( + "Using --sql with --autogenerate does not make any sense" + ) + if set(self.script_directory.get_revisions(rev)) != set( + self.script_directory.get_revisions("heads") + ): + raise util.CommandError("Target database is not up to date.") + + upgrade_token = migration_context.opts["upgrade_token"] + downgrade_token = migration_context.opts["downgrade_token"] + + migration_script = self.generated_revisions[-1] + if not getattr(migration_script, "_needs_render", False): + migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token + migration_script.downgrade_ops_list[ + -1 + ].downgrade_token = downgrade_token + migration_script._needs_render = True + else: + migration_script._upgrade_ops.append( + ops.UpgradeOps([], upgrade_token=upgrade_token) + ) + migration_script._downgrade_ops.append( + ops.DowngradeOps([], downgrade_token=downgrade_token) + ) + + autogen_context = AutogenContext( + migration_context, autogenerate=autogenerate + ) + self._last_autogen_context: AutogenContext = autogen_context + + if autogenerate: + compare._populate_migration_script( + autogen_context, migration_script + ) + + if self.process_revision_directives: + self.process_revision_directives( + migration_context, rev, self.generated_revisions + ) + + hook = migration_context.opts["process_revision_directives"] + if hook: + hook(migration_context, rev, self.generated_revisions) + + for migration_script in self.generated_revisions: + migration_script._needs_render = True + + def _default_revision(self) -> MigrationScript: + command_args: Dict[str, Any] = self.command_args + op = ops.MigrationScript( + rev_id=command_args["rev_id"] or util.rev_id(), + message=command_args["message"], + upgrade_ops=ops.UpgradeOps([]), + downgrade_ops=ops.DowngradeOps([]), + head=command_args["head"], + splice=command_args["splice"], + branch_label=command_args["branch_label"], + version_path=command_args["version_path"], + depends_on=command_args["depends_on"], + ) + return op + + def generate_scripts(self) -> Iterator[Optional[Script]]: + for generated_revision in self.generated_revisions: + yield self._to_script(generated_revision) diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/compare.py b/venv/lib/python3.12/site-packages/alembic/autogenerate/compare.py new file mode 100644 index 0000000..fcef531 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/autogenerate/compare.py @@ -0,0 +1,1329 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import contextlib +import logging +import re +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterator +from typing import Mapping +from typing import Optional +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy import event +from sqlalchemy import inspect +from sqlalchemy import schema as sa_schema +from sqlalchemy import text +from sqlalchemy import types as sqltypes +from sqlalchemy.sql import expression +from sqlalchemy.sql.schema import ForeignKeyConstraint +from sqlalchemy.sql.schema import Index +from sqlalchemy.sql.schema import UniqueConstraint +from sqlalchemy.util import OrderedSet + +from .. import util +from ..ddl._autogen import is_index_sig +from ..ddl._autogen import is_uq_sig +from ..operations import ops +from ..util import sqla_compat + +if TYPE_CHECKING: + from typing import Literal + + from sqlalchemy.engine.reflection import Inspector + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.elements import TextClause + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Table + + from alembic.autogenerate.api import AutogenContext + from alembic.ddl.impl import DefaultImpl + from alembic.operations.ops import AlterColumnOp + from alembic.operations.ops import MigrationScript + from alembic.operations.ops import ModifyTableOps + from alembic.operations.ops import UpgradeOps + from ..ddl._autogen import _constraint_sig + + +log = logging.getLogger(__name__) + + +def _populate_migration_script( + autogen_context: AutogenContext, migration_script: MigrationScript +) -> None: + upgrade_ops = migration_script.upgrade_ops_list[-1] + downgrade_ops = migration_script.downgrade_ops_list[-1] + + _produce_net_changes(autogen_context, upgrade_ops) + upgrade_ops.reverse_into(downgrade_ops) + + +comparators = util.Dispatcher(uselist=True) + + +def _produce_net_changes( + autogen_context: AutogenContext, upgrade_ops: UpgradeOps +) -> None: + connection = autogen_context.connection + assert connection is not None + include_schemas = autogen_context.opts.get("include_schemas", False) + + inspector: Inspector = inspect(connection) + + default_schema = connection.dialect.default_schema_name + schemas: Set[Optional[str]] + if include_schemas: + schemas = set(inspector.get_schema_names()) + # replace default schema name with None + schemas.discard("information_schema") + # replace the "default" schema with None + schemas.discard(default_schema) + schemas.add(None) + else: + schemas = {None} + + schemas = { + s for s in schemas if autogen_context.run_name_filters(s, "schema", {}) + } + + assert autogen_context.dialect is not None + comparators.dispatch("schema", autogen_context.dialect.name)( + autogen_context, upgrade_ops, schemas + ) + + +@comparators.dispatch_for("schema") +def _autogen_for_tables( + autogen_context: AutogenContext, + upgrade_ops: UpgradeOps, + schemas: Union[Set[None], Set[Optional[str]]], +) -> None: + inspector = autogen_context.inspector + + conn_table_names: Set[Tuple[Optional[str], str]] = set() + + version_table_schema = ( + autogen_context.migration_context.version_table_schema + ) + version_table = autogen_context.migration_context.version_table + + for schema_name in schemas: + tables = set(inspector.get_table_names(schema=schema_name)) + if schema_name == version_table_schema: + tables = tables.difference( + [autogen_context.migration_context.version_table] + ) + + conn_table_names.update( + (schema_name, tname) + for tname in tables + if autogen_context.run_name_filters( + tname, "table", {"schema_name": schema_name} + ) + ) + + metadata_table_names = OrderedSet( + [(table.schema, table.name) for table in autogen_context.sorted_tables] + ).difference([(version_table_schema, version_table)]) + + _compare_tables( + conn_table_names, + metadata_table_names, + inspector, + upgrade_ops, + autogen_context, + ) + + +def _compare_tables( + conn_table_names: set, + metadata_table_names: set, + inspector: Inspector, + upgrade_ops: UpgradeOps, + autogen_context: AutogenContext, +) -> None: + default_schema = inspector.bind.dialect.default_schema_name + + # tables coming from the connection will not have "schema" + # set if it matches default_schema_name; so we need a list + # of table names from local metadata that also have "None" if schema + # == default_schema_name. Most setups will be like this anyway but + # some are not (see #170) + metadata_table_names_no_dflt_schema = OrderedSet( + [ + (schema if schema != default_schema else None, tname) + for schema, tname in metadata_table_names + ] + ) + + # to adjust for the MetaData collection storing the tables either + # as "schemaname.tablename" or just "tablename", create a new lookup + # which will match the "non-default-schema" keys to the Table object. + tname_to_table = { + no_dflt_schema: autogen_context.table_key_to_table[ + sa_schema._get_table_key(tname, schema) + ] + for no_dflt_schema, (schema, tname) in zip( + metadata_table_names_no_dflt_schema, metadata_table_names + ) + } + metadata_table_names = metadata_table_names_no_dflt_schema + + for s, tname in metadata_table_names.difference(conn_table_names): + name = "%s.%s" % (s, tname) if s else tname + metadata_table = tname_to_table[(s, tname)] + if autogen_context.run_object_filters( + metadata_table, tname, "table", False, None + ): + upgrade_ops.ops.append( + ops.CreateTableOp.from_table(metadata_table) + ) + log.info("Detected added table %r", name) + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + + comparators.dispatch("table")( + autogen_context, + modify_table_ops, + s, + tname, + None, + metadata_table, + ) + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + removal_metadata = sa_schema.MetaData() + for s, tname in conn_table_names.difference(metadata_table_names): + name = sa_schema._get_table_key(tname, s) + exists = name in removal_metadata.tables + t = sa_schema.Table(tname, removal_metadata, schema=s) + + if not exists: + event.listen( + t, + "column_reflect", + # fmt: off + autogen_context.migration_context.impl. + _compat_autogen_column_reflect + (inspector), + # fmt: on + ) + sqla_compat._reflect_table(inspector, t) + if autogen_context.run_object_filters(t, tname, "table", True, None): + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + + comparators.dispatch("table")( + autogen_context, modify_table_ops, s, tname, t, None + ) + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + upgrade_ops.ops.append(ops.DropTableOp.from_table(t)) + log.info("Detected removed table %r", name) + + existing_tables = conn_table_names.intersection(metadata_table_names) + + existing_metadata = sa_schema.MetaData() + conn_column_info = {} + for s, tname in existing_tables: + name = sa_schema._get_table_key(tname, s) + exists = name in existing_metadata.tables + t = sa_schema.Table(tname, existing_metadata, schema=s) + if not exists: + event.listen( + t, + "column_reflect", + # fmt: off + autogen_context.migration_context.impl. + _compat_autogen_column_reflect(inspector), + # fmt: on + ) + sqla_compat._reflect_table(inspector, t) + conn_column_info[(s, tname)] = t + + for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])): + s = s or None + name = "%s.%s" % (s, tname) if s else tname + metadata_table = tname_to_table[(s, tname)] + conn_table = existing_metadata.tables[name] + + if autogen_context.run_object_filters( + metadata_table, tname, "table", False, conn_table + ): + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + with _compare_columns( + s, + tname, + conn_table, + metadata_table, + modify_table_ops, + autogen_context, + inspector, + ): + comparators.dispatch("table")( + autogen_context, + modify_table_ops, + s, + tname, + conn_table, + metadata_table, + ) + + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + +_IndexColumnSortingOps: Mapping[str, Any] = util.immutabledict( + { + "asc": expression.asc, + "desc": expression.desc, + "nulls_first": expression.nullsfirst, + "nulls_last": expression.nullslast, + "nullsfirst": expression.nullsfirst, # 1_3 name + "nullslast": expression.nullslast, # 1_3 name + } +) + + +def _make_index( + impl: DefaultImpl, params: Dict[str, Any], conn_table: Table +) -> Optional[Index]: + exprs: list[Union[Column[Any], TextClause]] = [] + sorting = params.get("column_sorting") + + for num, col_name in enumerate(params["column_names"]): + item: Union[Column[Any], TextClause] + if col_name is None: + assert "expressions" in params + name = params["expressions"][num] + item = text(name) + else: + name = col_name + item = conn_table.c[col_name] + if sorting and name in sorting: + for operator in sorting[name]: + if operator in _IndexColumnSortingOps: + item = _IndexColumnSortingOps[operator](item) + exprs.append(item) + ix = sa_schema.Index( + params["name"], + *exprs, + unique=params["unique"], + _table=conn_table, + **impl.adjust_reflected_dialect_options(params, "index"), + ) + if "duplicates_constraint" in params: + ix.info["duplicates_constraint"] = params["duplicates_constraint"] + return ix + + +def _make_unique_constraint( + impl: DefaultImpl, params: Dict[str, Any], conn_table: Table +) -> UniqueConstraint: + uq = sa_schema.UniqueConstraint( + *[conn_table.c[cname] for cname in params["column_names"]], + name=params["name"], + **impl.adjust_reflected_dialect_options(params, "unique_constraint"), + ) + if "duplicates_index" in params: + uq.info["duplicates_index"] = params["duplicates_index"] + + return uq + + +def _make_foreign_key( + params: Dict[str, Any], conn_table: Table +) -> ForeignKeyConstraint: + tname = params["referred_table"] + if params["referred_schema"]: + tname = "%s.%s" % (params["referred_schema"], tname) + + options = params.get("options", {}) + + const = sa_schema.ForeignKeyConstraint( + [conn_table.c[cname] for cname in params["constrained_columns"]], + ["%s.%s" % (tname, n) for n in params["referred_columns"]], + onupdate=options.get("onupdate"), + ondelete=options.get("ondelete"), + deferrable=options.get("deferrable"), + initially=options.get("initially"), + name=params["name"], + ) + # needed by 0.7 + conn_table.append_constraint(const) + return const + + +@contextlib.contextmanager +def _compare_columns( + schema: Optional[str], + tname: Union[quoted_name, str], + conn_table: Table, + metadata_table: Table, + modify_table_ops: ModifyTableOps, + autogen_context: AutogenContext, + inspector: Inspector, +) -> Iterator[None]: + name = "%s.%s" % (schema, tname) if schema else tname + metadata_col_names = OrderedSet( + c.name for c in metadata_table.c if not c.system + ) + metadata_cols_by_name = { + c.name: c for c in metadata_table.c if not c.system + } + + conn_col_names = { + c.name: c + for c in conn_table.c + if autogen_context.run_name_filters( + c.name, "column", {"table_name": tname, "schema_name": schema} + ) + } + + for cname in metadata_col_names.difference(conn_col_names): + if autogen_context.run_object_filters( + metadata_cols_by_name[cname], cname, "column", False, None + ): + modify_table_ops.ops.append( + ops.AddColumnOp.from_column_and_tablename( + schema, tname, metadata_cols_by_name[cname] + ) + ) + log.info("Detected added column '%s.%s'", name, cname) + + for colname in metadata_col_names.intersection(conn_col_names): + metadata_col = metadata_cols_by_name[colname] + conn_col = conn_table.c[colname] + if not autogen_context.run_object_filters( + metadata_col, colname, "column", False, conn_col + ): + continue + alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema) + + comparators.dispatch("column")( + autogen_context, + alter_column_op, + schema, + tname, + colname, + conn_col, + metadata_col, + ) + + if alter_column_op.has_changes(): + modify_table_ops.ops.append(alter_column_op) + + yield + + for cname in set(conn_col_names).difference(metadata_col_names): + if autogen_context.run_object_filters( + conn_table.c[cname], cname, "column", True, None + ): + modify_table_ops.ops.append( + ops.DropColumnOp.from_column_and_tablename( + schema, tname, conn_table.c[cname] + ) + ) + log.info("Detected removed column '%s.%s'", name, cname) + + +_C = TypeVar("_C", bound=Union[UniqueConstraint, ForeignKeyConstraint, Index]) + + +@comparators.dispatch_for("table") +def _compare_indexes_and_uniques( + autogen_context: AutogenContext, + modify_ops: ModifyTableOps, + schema: Optional[str], + tname: Union[quoted_name, str], + conn_table: Optional[Table], + metadata_table: Optional[Table], +) -> None: + inspector = autogen_context.inspector + is_create_table = conn_table is None + is_drop_table = metadata_table is None + impl = autogen_context.migration_context.impl + + # 1a. get raw indexes and unique constraints from metadata ... + if metadata_table is not None: + metadata_unique_constraints = { + uq + for uq in metadata_table.constraints + if isinstance(uq, sa_schema.UniqueConstraint) + } + metadata_indexes = set(metadata_table.indexes) + else: + metadata_unique_constraints = set() + metadata_indexes = set() + + conn_uniques = conn_indexes = frozenset() # type:ignore[var-annotated] + + supports_unique_constraints = False + + unique_constraints_duplicate_unique_indexes = False + + if conn_table is not None: + # 1b. ... and from connection, if the table exists + try: + conn_uniques = inspector.get_unique_constraints( # type:ignore[assignment] # noqa + tname, schema=schema + ) + supports_unique_constraints = True + except NotImplementedError: + pass + except TypeError: + # number of arguments is off for the base + # method in SQLAlchemy due to the cache decorator + # not being present + pass + else: + conn_uniques = [ # type:ignore[assignment] + uq + for uq in conn_uniques + if autogen_context.run_name_filters( + uq["name"], + "unique_constraint", + {"table_name": tname, "schema_name": schema}, + ) + ] + for uq in conn_uniques: + if uq.get("duplicates_index"): + unique_constraints_duplicate_unique_indexes = True + try: + conn_indexes = inspector.get_indexes( # type:ignore[assignment] + tname, schema=schema + ) + except NotImplementedError: + pass + else: + conn_indexes = [ # type:ignore[assignment] + ix + for ix in conn_indexes + if autogen_context.run_name_filters( + ix["name"], + "index", + {"table_name": tname, "schema_name": schema}, + ) + ] + + # 2. convert conn-level objects from raw inspector records + # into schema objects + if is_drop_table: + # for DROP TABLE uniques are inline, don't need them + conn_uniques = set() # type:ignore[assignment] + else: + conn_uniques = { # type:ignore[assignment] + _make_unique_constraint(impl, uq_def, conn_table) + for uq_def in conn_uniques + } + + conn_indexes = { # type:ignore[assignment] + index + for index in ( + _make_index(impl, ix, conn_table) for ix in conn_indexes + ) + if index is not None + } + + # 2a. if the dialect dupes unique indexes as unique constraints + # (mysql and oracle), correct for that + + if unique_constraints_duplicate_unique_indexes: + _correct_for_uq_duplicates_uix( + conn_uniques, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + autogen_context.dialect, + impl, + ) + + # 3. give the dialect a chance to omit indexes and constraints that + # we know are either added implicitly by the DB or that the DB + # can't accurately report on + impl.correct_for_autogen_constraints( + conn_uniques, # type: ignore[arg-type] + conn_indexes, # type: ignore[arg-type] + metadata_unique_constraints, + metadata_indexes, + ) + + # 4. organize the constraints into "signature" collections, the + # _constraint_sig() objects provide a consistent facade over both + # Index and UniqueConstraint so we can easily work with them + # interchangeably + metadata_unique_constraints_sig = { + impl._create_metadata_constraint_sig(uq) + for uq in metadata_unique_constraints + } + + metadata_indexes_sig = { + impl._create_metadata_constraint_sig(ix) for ix in metadata_indexes + } + + conn_unique_constraints = { + impl._create_reflected_constraint_sig(uq) for uq in conn_uniques + } + + conn_indexes_sig = { + impl._create_reflected_constraint_sig(ix) for ix in conn_indexes + } + + # 5. index things by name, for those objects that have names + metadata_names = { + cast(str, c.md_name_to_sql_name(autogen_context)): c + for c in metadata_unique_constraints_sig.union(metadata_indexes_sig) + if c.is_named + } + + conn_uniques_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig] + conn_indexes_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig] + + conn_uniques_by_name = {c.name: c for c in conn_unique_constraints} + conn_indexes_by_name = {c.name: c for c in conn_indexes_sig} + conn_names = { + c.name: c + for c in conn_unique_constraints.union(conn_indexes_sig) + if sqla_compat.constraint_name_string(c.name) + } + + doubled_constraints = { + name: (conn_uniques_by_name[name], conn_indexes_by_name[name]) + for name in set(conn_uniques_by_name).intersection( + conn_indexes_by_name + ) + } + + # 6. index things by "column signature", to help with unnamed unique + # constraints. + conn_uniques_by_sig = {uq.unnamed: uq for uq in conn_unique_constraints} + metadata_uniques_by_sig = { + uq.unnamed: uq for uq in metadata_unique_constraints_sig + } + unnamed_metadata_uniques = { + uq.unnamed: uq + for uq in metadata_unique_constraints_sig + if not sqla_compat._constraint_is_named( + uq.const, autogen_context.dialect + ) + } + + # assumptions: + # 1. a unique constraint or an index from the connection *always* + # has a name. + # 2. an index on the metadata side *always* has a name. + # 3. a unique constraint on the metadata side *might* have a name. + # 4. The backend may double up indexes as unique constraints and + # vice versa (e.g. MySQL, Postgresql) + + def obj_added(obj: _constraint_sig): + if is_index_sig(obj): + if autogen_context.run_object_filters( + obj.const, obj.name, "index", False, None + ): + modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const)) + log.info( + "Detected added index '%r' on '%s'", + obj.name, + obj.column_names, + ) + elif is_uq_sig(obj): + if not supports_unique_constraints: + # can't report unique indexes as added if we don't + # detect them + return + if is_create_table or is_drop_table: + # unique constraints are created inline with table defs + return + if autogen_context.run_object_filters( + obj.const, obj.name, "unique_constraint", False, None + ): + modify_ops.ops.append( + ops.AddConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected added unique constraint %r on '%s'", + obj.name, + obj.column_names, + ) + else: + assert False + + def obj_removed(obj: _constraint_sig): + if is_index_sig(obj): + if obj.is_unique and not supports_unique_constraints: + # many databases double up unique constraints + # as unique indexes. without that list we can't + # be sure what we're doing here + return + + if autogen_context.run_object_filters( + obj.const, obj.name, "index", True, None + ): + modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const)) + log.info("Detected removed index %r on %r", obj.name, tname) + elif is_uq_sig(obj): + if is_create_table or is_drop_table: + # if the whole table is being dropped, we don't need to + # consider unique constraint separately + return + if autogen_context.run_object_filters( + obj.const, obj.name, "unique_constraint", True, None + ): + modify_ops.ops.append( + ops.DropConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected removed unique constraint %r on %r", + obj.name, + tname, + ) + else: + assert False + + def obj_changed( + old: _constraint_sig, + new: _constraint_sig, + msg: str, + ): + if is_index_sig(old): + assert is_index_sig(new) + + if autogen_context.run_object_filters( + new.const, new.name, "index", False, old.const + ): + log.info( + "Detected changed index %r on %r: %s", old.name, tname, msg + ) + modify_ops.ops.append(ops.DropIndexOp.from_index(old.const)) + modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const)) + elif is_uq_sig(old): + assert is_uq_sig(new) + + if autogen_context.run_object_filters( + new.const, new.name, "unique_constraint", False, old.const + ): + log.info( + "Detected changed unique constraint %r on %r: %s", + old.name, + tname, + msg, + ) + modify_ops.ops.append( + ops.DropConstraintOp.from_constraint(old.const) + ) + modify_ops.ops.append( + ops.AddConstraintOp.from_constraint(new.const) + ) + else: + assert False + + for removed_name in sorted(set(conn_names).difference(metadata_names)): + conn_obj = conn_names[removed_name] + if ( + is_uq_sig(conn_obj) + and conn_obj.unnamed in unnamed_metadata_uniques + ): + continue + elif removed_name in doubled_constraints: + conn_uq, conn_idx = doubled_constraints[removed_name] + if ( + all( + conn_idx.unnamed != meta_idx.unnamed + for meta_idx in metadata_indexes_sig + ) + and conn_uq.unnamed not in metadata_uniques_by_sig + ): + obj_removed(conn_uq) + obj_removed(conn_idx) + else: + obj_removed(conn_obj) + + for existing_name in sorted(set(metadata_names).intersection(conn_names)): + metadata_obj = metadata_names[existing_name] + + if existing_name in doubled_constraints: + conn_uq, conn_idx = doubled_constraints[existing_name] + if is_index_sig(metadata_obj): + conn_obj = conn_idx + else: + conn_obj = conn_uq + else: + conn_obj = conn_names[existing_name] + + if type(conn_obj) != type(metadata_obj): + obj_removed(conn_obj) + obj_added(metadata_obj) + else: + comparison = metadata_obj.compare_to_reflected(conn_obj) + + if comparison.is_different: + # constraint are different + obj_changed(conn_obj, metadata_obj, comparison.message) + elif comparison.is_skip: + # constraint cannot be compared, skip them + thing = ( + "index" if is_index_sig(conn_obj) else "unique constraint" + ) + log.info( + "Cannot compare %s %r, assuming equal and skipping. %s", + thing, + conn_obj.name, + comparison.message, + ) + else: + # constraint are equal + assert comparison.is_equal + + for added_name in sorted(set(metadata_names).difference(conn_names)): + obj = metadata_names[added_name] + obj_added(obj) + + for uq_sig in unnamed_metadata_uniques: + if uq_sig not in conn_uniques_by_sig: + obj_added(unnamed_metadata_uniques[uq_sig]) + + +def _correct_for_uq_duplicates_uix( + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + dialect, + impl, +): + # dedupe unique indexes vs. constraints, since MySQL / Oracle + # doesn't really have unique constraints as a separate construct. + # but look in the metadata and try to maintain constructs + # that already seem to be defined one way or the other + # on that side. This logic was formerly local to MySQL dialect, + # generalized to Oracle and others. See #276 + + # resolve final rendered name for unique constraints defined in the + # metadata. this includes truncation of long names. naming convention + # names currently should already be set as cons.name, however leave this + # to the sqla_compat to decide. + metadata_cons_names = [ + (sqla_compat._get_constraint_final_name(cons, dialect), cons) + for cons in metadata_unique_constraints + ] + + metadata_uq_names = { + name for name, cons in metadata_cons_names if name is not None + } + + unnamed_metadata_uqs = { + impl._create_metadata_constraint_sig(cons).unnamed + for name, cons in metadata_cons_names + if name is None + } + + metadata_ix_names = { + sqla_compat._get_constraint_final_name(cons, dialect) + for cons in metadata_indexes + if cons.unique + } + + # for reflection side, names are in their final database form + # already since they're from the database + conn_ix_names = {cons.name: cons for cons in conn_indexes if cons.unique} + + uqs_dupe_indexes = { + cons.name: cons + for cons in conn_unique_constraints + if cons.info["duplicates_index"] + } + + for overlap in uqs_dupe_indexes: + if overlap not in metadata_uq_names: + if ( + impl._create_reflected_constraint_sig( + uqs_dupe_indexes[overlap] + ).unnamed + not in unnamed_metadata_uqs + ): + conn_unique_constraints.discard(uqs_dupe_indexes[overlap]) + elif overlap not in metadata_ix_names: + conn_indexes.discard(conn_ix_names[overlap]) + + +@comparators.dispatch_for("column") +def _compare_nullable( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> None: + metadata_col_nullable = metadata_col.nullable + conn_col_nullable = conn_col.nullable + alter_column_op.existing_nullable = conn_col_nullable + + if conn_col_nullable is not metadata_col_nullable: + if ( + sqla_compat._server_default_is_computed( + metadata_col.server_default, conn_col.server_default + ) + and sqla_compat._nullability_might_be_unset(metadata_col) + or ( + sqla_compat._server_default_is_identity( + metadata_col.server_default, conn_col.server_default + ) + ) + ): + log.info( + "Ignoring nullable change on identity column '%s.%s'", + tname, + cname, + ) + else: + alter_column_op.modify_nullable = metadata_col_nullable + log.info( + "Detected %s on column '%s.%s'", + "NULL" if metadata_col_nullable else "NOT NULL", + tname, + cname, + ) + + +@comparators.dispatch_for("column") +def _setup_autoincrement( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: quoted_name, + conn_col: Column[Any], + metadata_col: Column[Any], +) -> None: + if metadata_col.table._autoincrement_column is metadata_col: + alter_column_op.kw["autoincrement"] = True + elif metadata_col.autoincrement is True: + alter_column_op.kw["autoincrement"] = True + elif metadata_col.autoincrement is False: + alter_column_op.kw["autoincrement"] = False + + +@comparators.dispatch_for("column") +def _compare_type( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> None: + conn_type = conn_col.type + alter_column_op.existing_type = conn_type + metadata_type = metadata_col.type + if conn_type._type_affinity is sqltypes.NullType: + log.info( + "Couldn't determine database type " "for column '%s.%s'", + tname, + cname, + ) + return + if metadata_type._type_affinity is sqltypes.NullType: + log.info( + "Column '%s.%s' has no type within " "the model; can't compare", + tname, + cname, + ) + return + + isdiff = autogen_context.migration_context._compare_type( + conn_col, metadata_col + ) + + if isdiff: + alter_column_op.modify_type = metadata_type + log.info( + "Detected type change from %r to %r on '%s.%s'", + conn_type, + metadata_type, + tname, + cname, + ) + + +def _render_server_default_for_compare( + metadata_default: Optional[Any], autogen_context: AutogenContext +) -> Optional[str]: + if isinstance(metadata_default, sa_schema.DefaultClause): + if isinstance(metadata_default.arg, str): + metadata_default = metadata_default.arg + else: + metadata_default = str( + metadata_default.arg.compile( + dialect=autogen_context.dialect, + compile_kwargs={"literal_binds": True}, + ) + ) + if isinstance(metadata_default, str): + return metadata_default + else: + return None + + +def _normalize_computed_default(sqltext: str) -> str: + """we want to warn if a computed sql expression has changed. however + we don't want false positives and the warning is not that critical. + so filter out most forms of variability from the SQL text. + + """ + + return re.sub(r"[ \(\)'\"`\[\]]", "", sqltext).lower() + + +def _compare_computed_default( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: str, + cname: str, + conn_col: Column[Any], + metadata_col: Column[Any], +) -> None: + rendered_metadata_default = str( + cast(sa_schema.Computed, metadata_col.server_default).sqltext.compile( + dialect=autogen_context.dialect, + compile_kwargs={"literal_binds": True}, + ) + ) + + # since we cannot change computed columns, we do only a crude comparison + # here where we try to eliminate syntactical differences in order to + # get a minimal comparison just to emit a warning. + + rendered_metadata_default = _normalize_computed_default( + rendered_metadata_default + ) + + if isinstance(conn_col.server_default, sa_schema.Computed): + rendered_conn_default = str( + conn_col.server_default.sqltext.compile( + dialect=autogen_context.dialect, + compile_kwargs={"literal_binds": True}, + ) + ) + if rendered_conn_default is None: + rendered_conn_default = "" + else: + rendered_conn_default = _normalize_computed_default( + rendered_conn_default + ) + else: + rendered_conn_default = "" + + if rendered_metadata_default != rendered_conn_default: + _warn_computed_not_supported(tname, cname) + + +def _warn_computed_not_supported(tname: str, cname: str) -> None: + util.warn("Computed default on %s.%s cannot be modified" % (tname, cname)) + + +def _compare_identity_default( + autogen_context, + alter_column_op, + schema, + tname, + cname, + conn_col, + metadata_col, +): + impl = autogen_context.migration_context.impl + diff, ignored_attr, is_alter = impl._compare_identity_default( + metadata_col.server_default, conn_col.server_default + ) + + return diff, is_alter + + +@comparators.dispatch_for("column") +def _compare_server_default( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> Optional[bool]: + metadata_default = metadata_col.server_default + conn_col_default = conn_col.server_default + if conn_col_default is None and metadata_default is None: + return False + + if sqla_compat._server_default_is_computed(metadata_default): + # return False in case of a computed column as the server + # default. Note that DDL for adding or removing "GENERATED AS" from + # an existing column is not currently known for any backend. + # Once SQLAlchemy can reflect "GENERATED" as the "computed" element, + # we would also want to ignore and/or warn for changes vs. the + # metadata (or support backend specific DDL if applicable). + if not sqla_compat.has_computed_reflection: + return False + + else: + return ( + _compare_computed_default( # type:ignore[func-returns-value] + autogen_context, + alter_column_op, + schema, + tname, + cname, + conn_col, + metadata_col, + ) + ) + if sqla_compat._server_default_is_computed(conn_col_default): + _warn_computed_not_supported(tname, cname) + return False + + if sqla_compat._server_default_is_identity( + metadata_default, conn_col_default + ): + alter_column_op.existing_server_default = conn_col_default + diff, is_alter = _compare_identity_default( + autogen_context, + alter_column_op, + schema, + tname, + cname, + conn_col, + metadata_col, + ) + if is_alter: + alter_column_op.modify_server_default = metadata_default + if diff: + log.info( + "Detected server default on column '%s.%s': " + "identity options attributes %s", + tname, + cname, + sorted(diff), + ) + else: + rendered_metadata_default = _render_server_default_for_compare( + metadata_default, autogen_context + ) + + rendered_conn_default = ( + cast(Any, conn_col_default).arg.text if conn_col_default else None + ) + + alter_column_op.existing_server_default = conn_col_default + + is_diff = autogen_context.migration_context._compare_server_default( + conn_col, + metadata_col, + rendered_metadata_default, + rendered_conn_default, + ) + if is_diff: + alter_column_op.modify_server_default = metadata_default + log.info("Detected server default on column '%s.%s'", tname, cname) + + return None + + +@comparators.dispatch_for("column") +def _compare_column_comment( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: quoted_name, + conn_col: Column[Any], + metadata_col: Column[Any], +) -> Optional[Literal[False]]: + assert autogen_context.dialect is not None + if not autogen_context.dialect.supports_comments: + return None + + metadata_comment = metadata_col.comment + conn_col_comment = conn_col.comment + if conn_col_comment is None and metadata_comment is None: + return False + + alter_column_op.existing_comment = conn_col_comment + + if conn_col_comment != metadata_comment: + alter_column_op.modify_comment = metadata_comment + log.info("Detected column comment '%s.%s'", tname, cname) + + return None + + +@comparators.dispatch_for("table") +def _compare_foreign_keys( + autogen_context: AutogenContext, + modify_table_ops: ModifyTableOps, + schema: Optional[str], + tname: Union[quoted_name, str], + conn_table: Table, + metadata_table: Table, +) -> None: + # if we're doing CREATE TABLE, all FKs are created + # inline within the table def + if conn_table is None or metadata_table is None: + return + + inspector = autogen_context.inspector + metadata_fks = { + fk + for fk in metadata_table.constraints + if isinstance(fk, sa_schema.ForeignKeyConstraint) + } + + conn_fks_list = [ + fk + for fk in inspector.get_foreign_keys(tname, schema=schema) + if autogen_context.run_name_filters( + fk["name"], + "foreign_key_constraint", + {"table_name": tname, "schema_name": schema}, + ) + ] + + conn_fks = { + _make_foreign_key(const, conn_table) # type: ignore[arg-type] + for const in conn_fks_list + } + + impl = autogen_context.migration_context.impl + + # give the dialect a chance to correct the FKs to match more + # closely + autogen_context.migration_context.impl.correct_for_autogen_foreignkeys( + conn_fks, metadata_fks + ) + + metadata_fks_sig = { + impl._create_metadata_constraint_sig(fk) for fk in metadata_fks + } + + conn_fks_sig = { + impl._create_reflected_constraint_sig(fk) for fk in conn_fks + } + + # check if reflected FKs include options, indicating the backend + # can reflect FK options + if conn_fks_list and "options" in conn_fks_list[0]: + conn_fks_by_sig = {c.unnamed: c for c in conn_fks_sig} + metadata_fks_by_sig = {c.unnamed: c for c in metadata_fks_sig} + else: + # otherwise compare by sig without options added + conn_fks_by_sig = {c.unnamed_no_options: c for c in conn_fks_sig} + metadata_fks_by_sig = { + c.unnamed_no_options: c for c in metadata_fks_sig + } + + metadata_fks_by_name = { + c.name: c for c in metadata_fks_sig if c.name is not None + } + conn_fks_by_name = {c.name: c for c in conn_fks_sig if c.name is not None} + + def _add_fk(obj, compare_to): + if autogen_context.run_object_filters( + obj.const, obj.name, "foreign_key_constraint", False, compare_to + ): + modify_table_ops.ops.append( + ops.CreateForeignKeyOp.from_constraint(const.const) # type: ignore[has-type] # noqa: E501 + ) + + log.info( + "Detected added foreign key (%s)(%s) on table %s%s", + ", ".join(obj.source_columns), + ", ".join(obj.target_columns), + "%s." % obj.source_schema if obj.source_schema else "", + obj.source_table, + ) + + def _remove_fk(obj, compare_to): + if autogen_context.run_object_filters( + obj.const, obj.name, "foreign_key_constraint", True, compare_to + ): + modify_table_ops.ops.append( + ops.DropConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected removed foreign key (%s)(%s) on table %s%s", + ", ".join(obj.source_columns), + ", ".join(obj.target_columns), + "%s." % obj.source_schema if obj.source_schema else "", + obj.source_table, + ) + + # so far it appears we don't need to do this by name at all. + # SQLite doesn't preserve constraint names anyway + + for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig): + const = conn_fks_by_sig[removed_sig] + if removed_sig not in metadata_fks_by_sig: + compare_to = ( + metadata_fks_by_name[const.name].const + if const.name in metadata_fks_by_name + else None + ) + _remove_fk(const, compare_to) + + for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig): + const = metadata_fks_by_sig[added_sig] + if added_sig not in conn_fks_by_sig: + compare_to = ( + conn_fks_by_name[const.name].const + if const.name in conn_fks_by_name + else None + ) + _add_fk(const, compare_to) + + +@comparators.dispatch_for("table") +def _compare_table_comment( + autogen_context: AutogenContext, + modify_table_ops: ModifyTableOps, + schema: Optional[str], + tname: Union[quoted_name, str], + conn_table: Optional[Table], + metadata_table: Optional[Table], +) -> None: + assert autogen_context.dialect is not None + if not autogen_context.dialect.supports_comments: + return + + # if we're doing CREATE TABLE, comments will be created inline + # with the create_table op. + if conn_table is None or metadata_table is None: + return + + if conn_table.comment is None and metadata_table.comment is None: + return + + if metadata_table.comment is None and conn_table.comment is not None: + modify_table_ops.ops.append( + ops.DropTableCommentOp( + tname, existing_comment=conn_table.comment, schema=schema + ) + ) + elif metadata_table.comment != conn_table.comment: + modify_table_ops.ops.append( + ops.CreateTableCommentOp( + tname, + metadata_table.comment, + existing_comment=conn_table.comment, + schema=schema, + ) + ) diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/render.py b/venv/lib/python3.12/site-packages/alembic/autogenerate/render.py new file mode 100644 index 0000000..317a6db --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/autogenerate/render.py @@ -0,0 +1,1097 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +from io import StringIO +import re +from typing import Any +from typing import cast +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from mako.pygen import PythonPrinter +from sqlalchemy import schema as sa_schema +from sqlalchemy import sql +from sqlalchemy import types as sqltypes +from sqlalchemy.sql.elements import conv +from sqlalchemy.sql.elements import quoted_name + +from .. import util +from ..operations import ops +from ..util import sqla_compat + +if TYPE_CHECKING: + from typing import Literal + + from sqlalchemy.sql.base import DialectKWArgs + from sqlalchemy.sql.elements import ColumnElement + from sqlalchemy.sql.elements import TextClause + from sqlalchemy.sql.schema import CheckConstraint + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Constraint + from sqlalchemy.sql.schema import FetchedValue + from sqlalchemy.sql.schema import ForeignKey + from sqlalchemy.sql.schema import ForeignKeyConstraint + from sqlalchemy.sql.schema import Index + from sqlalchemy.sql.schema import MetaData + from sqlalchemy.sql.schema import PrimaryKeyConstraint + from sqlalchemy.sql.schema import UniqueConstraint + from sqlalchemy.sql.sqltypes import ARRAY + from sqlalchemy.sql.type_api import TypeEngine + + from alembic.autogenerate.api import AutogenContext + from alembic.config import Config + from alembic.operations.ops import MigrationScript + from alembic.operations.ops import ModifyTableOps + from alembic.util.sqla_compat import Computed + from alembic.util.sqla_compat import Identity + + +MAX_PYTHON_ARGS = 255 + + +def _render_gen_name( + autogen_context: AutogenContext, + name: sqla_compat._ConstraintName, +) -> Optional[Union[quoted_name, str, _f_name]]: + if isinstance(name, conv): + return _f_name(_alembic_autogenerate_prefix(autogen_context), name) + else: + return sqla_compat.constraint_name_or_none(name) + + +def _indent(text: str) -> str: + text = re.compile(r"^", re.M).sub(" ", text).strip() + text = re.compile(r" +$", re.M).sub("", text) + return text + + +def _render_python_into_templatevars( + autogen_context: AutogenContext, + migration_script: MigrationScript, + template_args: Dict[str, Union[str, Config]], +) -> None: + imports = autogen_context.imports + + for upgrade_ops, downgrade_ops in zip( + migration_script.upgrade_ops_list, migration_script.downgrade_ops_list + ): + template_args[upgrade_ops.upgrade_token] = _indent( + _render_cmd_body(upgrade_ops, autogen_context) + ) + template_args[downgrade_ops.downgrade_token] = _indent( + _render_cmd_body(downgrade_ops, autogen_context) + ) + template_args["imports"] = "\n".join(sorted(imports)) + + +default_renderers = renderers = util.Dispatcher() + + +def _render_cmd_body( + op_container: ops.OpContainer, + autogen_context: AutogenContext, +) -> str: + buf = StringIO() + printer = PythonPrinter(buf) + + printer.writeline( + "# ### commands auto generated by Alembic - please adjust! ###" + ) + + has_lines = False + for op in op_container.ops: + lines = render_op(autogen_context, op) + has_lines = has_lines or bool(lines) + + for line in lines: + printer.writeline(line) + + if not has_lines: + printer.writeline("pass") + + printer.writeline("# ### end Alembic commands ###") + + return buf.getvalue() + + +def render_op( + autogen_context: AutogenContext, op: ops.MigrateOperation +) -> List[str]: + renderer = renderers.dispatch(op) + lines = util.to_list(renderer(autogen_context, op)) + return lines + + +def render_op_text( + autogen_context: AutogenContext, op: ops.MigrateOperation +) -> str: + return "\n".join(render_op(autogen_context, op)) + + +@renderers.dispatch_for(ops.ModifyTableOps) +def _render_modify_table( + autogen_context: AutogenContext, op: ModifyTableOps +) -> List[str]: + opts = autogen_context.opts + render_as_batch = opts.get("render_as_batch", False) + + if op.ops: + lines = [] + if render_as_batch: + with autogen_context._within_batch(): + lines.append( + "with op.batch_alter_table(%r, schema=%r) as batch_op:" + % (op.table_name, op.schema) + ) + for t_op in op.ops: + t_lines = render_op(autogen_context, t_op) + lines.extend(t_lines) + lines.append("") + else: + for t_op in op.ops: + t_lines = render_op(autogen_context, t_op) + lines.extend(t_lines) + + return lines + else: + return [] + + +@renderers.dispatch_for(ops.CreateTableCommentOp) +def _render_create_table_comment( + autogen_context: AutogenContext, op: ops.CreateTableCommentOp +) -> str: + if autogen_context._has_batch: + templ = ( + "{prefix}create_table_comment(\n" + "{indent}{comment},\n" + "{indent}existing_comment={existing}\n" + ")" + ) + else: + templ = ( + "{prefix}create_table_comment(\n" + "{indent}'{tname}',\n" + "{indent}{comment},\n" + "{indent}existing_comment={existing},\n" + "{indent}schema={schema}\n" + ")" + ) + return templ.format( + prefix=_alembic_autogenerate_prefix(autogen_context), + tname=op.table_name, + comment="%r" % op.comment if op.comment is not None else None, + existing="%r" % op.existing_comment + if op.existing_comment is not None + else None, + schema="'%s'" % op.schema if op.schema is not None else None, + indent=" ", + ) + + +@renderers.dispatch_for(ops.DropTableCommentOp) +def _render_drop_table_comment( + autogen_context: AutogenContext, op: ops.DropTableCommentOp +) -> str: + if autogen_context._has_batch: + templ = ( + "{prefix}drop_table_comment(\n" + "{indent}existing_comment={existing}\n" + ")" + ) + else: + templ = ( + "{prefix}drop_table_comment(\n" + "{indent}'{tname}',\n" + "{indent}existing_comment={existing},\n" + "{indent}schema={schema}\n" + ")" + ) + return templ.format( + prefix=_alembic_autogenerate_prefix(autogen_context), + tname=op.table_name, + existing="%r" % op.existing_comment + if op.existing_comment is not None + else None, + schema="'%s'" % op.schema if op.schema is not None else None, + indent=" ", + ) + + +@renderers.dispatch_for(ops.CreateTableOp) +def _add_table(autogen_context: AutogenContext, op: ops.CreateTableOp) -> str: + table = op.to_table() + + args = [ + col + for col in [ + _render_column(col, autogen_context) for col in table.columns + ] + if col + ] + sorted( + [ + rcons + for rcons in [ + _render_constraint( + cons, autogen_context, op._namespace_metadata + ) + for cons in table.constraints + ] + if rcons is not None + ] + ) + + if len(args) > MAX_PYTHON_ARGS: + args_str = "*[" + ",\n".join(args) + "]" + else: + args_str = ",\n".join(args) + + text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % { + "tablename": _ident(op.table_name), + "prefix": _alembic_autogenerate_prefix(autogen_context), + "args": args_str, + } + if op.schema: + text += ",\nschema=%r" % _ident(op.schema) + + comment = table.comment + if comment: + text += ",\ncomment=%r" % _ident(comment) + + info = table.info + if info: + text += f",\ninfo={info!r}" + + for k in sorted(op.kw): + text += ",\n%s=%r" % (k.replace(" ", "_"), op.kw[k]) + + if table._prefixes: + prefixes = ", ".join("'%s'" % p for p in table._prefixes) + text += ",\nprefixes=[%s]" % prefixes + + text += "\n)" + return text + + +@renderers.dispatch_for(ops.DropTableOp) +def _drop_table(autogen_context: AutogenContext, op: ops.DropTableOp) -> str: + text = "%(prefix)sdrop_table(%(tname)r" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": _ident(op.table_name), + } + if op.schema: + text += ", schema=%r" % _ident(op.schema) + text += ")" + return text + + +def _render_dialect_kwargs_items( + autogen_context: AutogenContext, item: DialectKWArgs +) -> list[str]: + return [ + f"{key}={_render_potential_expr(val, autogen_context)}" + for key, val in item.dialect_kwargs.items() + ] + + +@renderers.dispatch_for(ops.CreateIndexOp) +def _add_index(autogen_context: AutogenContext, op: ops.CreateIndexOp) -> str: + index = op.to_index() + + has_batch = autogen_context._has_batch + + if has_batch: + tmpl = ( + "%(prefix)screate_index(%(name)r, [%(columns)s], " + "unique=%(unique)r%(kwargs)s)" + ) + else: + tmpl = ( + "%(prefix)screate_index(%(name)r, %(table)r, [%(columns)s], " + "unique=%(unique)r%(schema)s%(kwargs)s)" + ) + + assert index.table is not None + + opts = _render_dialect_kwargs_items(autogen_context, index) + text = tmpl % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "name": _render_gen_name(autogen_context, index.name), + "table": _ident(index.table.name), + "columns": ", ".join( + _get_index_rendered_expressions(index, autogen_context) + ), + "unique": index.unique or False, + "schema": (", schema=%r" % _ident(index.table.schema)) + if index.table.schema + else "", + "kwargs": ", " + ", ".join(opts) if opts else "", + } + return text + + +@renderers.dispatch_for(ops.DropIndexOp) +def _drop_index(autogen_context: AutogenContext, op: ops.DropIndexOp) -> str: + index = op.to_index() + + has_batch = autogen_context._has_batch + + if has_batch: + tmpl = "%(prefix)sdrop_index(%(name)r%(kwargs)s)" + else: + tmpl = ( + "%(prefix)sdrop_index(%(name)r, " + "table_name=%(table_name)r%(schema)s%(kwargs)s)" + ) + opts = _render_dialect_kwargs_items(autogen_context, index) + text = tmpl % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "name": _render_gen_name(autogen_context, op.index_name), + "table_name": _ident(op.table_name), + "schema": ((", schema=%r" % _ident(op.schema)) if op.schema else ""), + "kwargs": ", " + ", ".join(opts) if opts else "", + } + return text + + +@renderers.dispatch_for(ops.CreateUniqueConstraintOp) +def _add_unique_constraint( + autogen_context: AutogenContext, op: ops.CreateUniqueConstraintOp +) -> List[str]: + return [_uq_constraint(op.to_constraint(), autogen_context, True)] + + +@renderers.dispatch_for(ops.CreateForeignKeyOp) +def _add_fk_constraint( + autogen_context: AutogenContext, op: ops.CreateForeignKeyOp +) -> str: + args = [repr(_render_gen_name(autogen_context, op.constraint_name))] + if not autogen_context._has_batch: + args.append(repr(_ident(op.source_table))) + + args.extend( + [ + repr(_ident(op.referent_table)), + repr([_ident(col) for col in op.local_cols]), + repr([_ident(col) for col in op.remote_cols]), + ] + ) + kwargs = [ + "referent_schema", + "onupdate", + "ondelete", + "initially", + "deferrable", + "use_alter", + "match", + ] + if not autogen_context._has_batch: + kwargs.insert(0, "source_schema") + + for k in kwargs: + if k in op.kw: + value = op.kw[k] + if value is not None: + args.append("%s=%r" % (k, value)) + + return "%(prefix)screate_foreign_key(%(args)s)" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + + +@renderers.dispatch_for(ops.CreatePrimaryKeyOp) +def _add_pk_constraint(constraint, autogen_context): + raise NotImplementedError() + + +@renderers.dispatch_for(ops.CreateCheckConstraintOp) +def _add_check_constraint(constraint, autogen_context): + raise NotImplementedError() + + +@renderers.dispatch_for(ops.DropConstraintOp) +def _drop_constraint( + autogen_context: AutogenContext, op: ops.DropConstraintOp +) -> str: + prefix = _alembic_autogenerate_prefix(autogen_context) + name = _render_gen_name(autogen_context, op.constraint_name) + schema = _ident(op.schema) if op.schema else None + type_ = _ident(op.constraint_type) if op.constraint_type else None + + params_strs = [] + params_strs.append(repr(name)) + if not autogen_context._has_batch: + params_strs.append(repr(_ident(op.table_name))) + if schema is not None: + params_strs.append(f"schema={schema!r}") + if type_ is not None: + params_strs.append(f"type_={type_!r}") + + return f"{prefix}drop_constraint({', '.join(params_strs)})" + + +@renderers.dispatch_for(ops.AddColumnOp) +def _add_column(autogen_context: AutogenContext, op: ops.AddColumnOp) -> str: + schema, tname, column = op.schema, op.table_name, op.column + if autogen_context._has_batch: + template = "%(prefix)sadd_column(%(column)s)" + else: + template = "%(prefix)sadd_column(%(tname)r, %(column)s" + if schema: + template += ", schema=%(schema)r" + template += ")" + text = template % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": tname, + "column": _render_column(column, autogen_context), + "schema": schema, + } + return text + + +@renderers.dispatch_for(ops.DropColumnOp) +def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str: + schema, tname, column_name = op.schema, op.table_name, op.column_name + + if autogen_context._has_batch: + template = "%(prefix)sdrop_column(%(cname)r)" + else: + template = "%(prefix)sdrop_column(%(tname)r, %(cname)r" + if schema: + template += ", schema=%(schema)r" + template += ")" + + text = template % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": _ident(tname), + "cname": _ident(column_name), + "schema": _ident(schema), + } + return text + + +@renderers.dispatch_for(ops.AlterColumnOp) +def _alter_column( + autogen_context: AutogenContext, op: ops.AlterColumnOp +) -> str: + tname = op.table_name + cname = op.column_name + server_default = op.modify_server_default + type_ = op.modify_type + nullable = op.modify_nullable + comment = op.modify_comment + autoincrement = op.kw.get("autoincrement", None) + existing_type = op.existing_type + existing_nullable = op.existing_nullable + existing_comment = op.existing_comment + existing_server_default = op.existing_server_default + schema = op.schema + + indent = " " * 11 + + if autogen_context._has_batch: + template = "%(prefix)salter_column(%(cname)r" + else: + template = "%(prefix)salter_column(%(tname)r, %(cname)r" + + text = template % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": tname, + "cname": cname, + } + if existing_type is not None: + text += ",\n%sexisting_type=%s" % ( + indent, + _repr_type(existing_type, autogen_context), + ) + if server_default is not False: + rendered = _render_server_default(server_default, autogen_context) + text += ",\n%sserver_default=%s" % (indent, rendered) + + if type_ is not None: + text += ",\n%stype_=%s" % (indent, _repr_type(type_, autogen_context)) + if nullable is not None: + text += ",\n%snullable=%r" % (indent, nullable) + if comment is not False: + text += ",\n%scomment=%r" % (indent, comment) + if existing_comment is not None: + text += ",\n%sexisting_comment=%r" % (indent, existing_comment) + if nullable is None and existing_nullable is not None: + text += ",\n%sexisting_nullable=%r" % (indent, existing_nullable) + if autoincrement is not None: + text += ",\n%sautoincrement=%r" % (indent, autoincrement) + if server_default is False and existing_server_default: + rendered = _render_server_default( + existing_server_default, autogen_context + ) + text += ",\n%sexisting_server_default=%s" % (indent, rendered) + if schema and not autogen_context._has_batch: + text += ",\n%sschema=%r" % (indent, schema) + text += ")" + return text + + +class _f_name: + def __init__(self, prefix: str, name: conv) -> None: + self.prefix = prefix + self.name = name + + def __repr__(self) -> str: + return "%sf(%r)" % (self.prefix, _ident(self.name)) + + +def _ident(name: Optional[Union[quoted_name, str]]) -> Optional[str]: + """produce a __repr__() object for a string identifier that may + use quoted_name() in SQLAlchemy 0.9 and greater. + + The issue worked around here is that quoted_name() doesn't have + very good repr() behavior by itself when unicode is involved. + + """ + if name is None: + return name + elif isinstance(name, quoted_name): + return str(name) + elif isinstance(name, str): + return name + + +def _render_potential_expr( + value: Any, + autogen_context: AutogenContext, + *, + wrap_in_text: bool = True, + is_server_default: bool = False, + is_index: bool = False, +) -> str: + if isinstance(value, sql.ClauseElement): + if wrap_in_text: + template = "%(prefix)stext(%(sql)r)" + else: + template = "%(sql)r" + + return template % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "sql": autogen_context.migration_context.impl.render_ddl_sql_expr( + value, is_server_default=is_server_default, is_index=is_index + ), + } + + else: + return repr(value) + + +def _get_index_rendered_expressions( + idx: Index, autogen_context: AutogenContext +) -> List[str]: + return [ + repr(_ident(getattr(exp, "name", None))) + if isinstance(exp, sa_schema.Column) + else _render_potential_expr(exp, autogen_context, is_index=True) + for exp in idx.expressions + ] + + +def _uq_constraint( + constraint: UniqueConstraint, + autogen_context: AutogenContext, + alter: bool, +) -> str: + opts: List[Tuple[str, Any]] = [] + + has_batch = autogen_context._has_batch + + if constraint.deferrable: + opts.append(("deferrable", str(constraint.deferrable))) + if constraint.initially: + opts.append(("initially", str(constraint.initially))) + if not has_batch and alter and constraint.table.schema: + opts.append(("schema", _ident(constraint.table.schema))) + if not alter and constraint.name: + opts.append( + ("name", _render_gen_name(autogen_context, constraint.name)) + ) + dialect_options = _render_dialect_kwargs_items(autogen_context, constraint) + + if alter: + args = [repr(_render_gen_name(autogen_context, constraint.name))] + if not has_batch: + args += [repr(_ident(constraint.table.name))] + args.append(repr([_ident(col.name) for col in constraint.columns])) + args.extend(["%s=%r" % (k, v) for k, v in opts]) + args.extend(dialect_options) + return "%(prefix)screate_unique_constraint(%(args)s)" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + else: + args = [repr(_ident(col.name)) for col in constraint.columns] + args.extend(["%s=%r" % (k, v) for k, v in opts]) + args.extend(dialect_options) + return "%(prefix)sUniqueConstraint(%(args)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + + +def _user_autogenerate_prefix(autogen_context, target): + prefix = autogen_context.opts["user_module_prefix"] + if prefix is None: + return "%s." % target.__module__ + else: + return prefix + + +def _sqlalchemy_autogenerate_prefix(autogen_context: AutogenContext) -> str: + return autogen_context.opts["sqlalchemy_module_prefix"] or "" + + +def _alembic_autogenerate_prefix(autogen_context: AutogenContext) -> str: + if autogen_context._has_batch: + return "batch_op." + else: + return autogen_context.opts["alembic_module_prefix"] or "" + + +def _user_defined_render( + type_: str, object_: Any, autogen_context: AutogenContext +) -> Union[str, Literal[False]]: + if "render_item" in autogen_context.opts: + render = autogen_context.opts["render_item"] + if render: + rendered = render(type_, object_, autogen_context) + if rendered is not False: + return rendered + return False + + +def _render_column( + column: Column[Any], autogen_context: AutogenContext +) -> str: + rendered = _user_defined_render("column", column, autogen_context) + if rendered is not False: + return rendered + + args: List[str] = [] + opts: List[Tuple[str, Any]] = [] + + if column.server_default: + rendered = _render_server_default( # type:ignore[assignment] + column.server_default, autogen_context + ) + if rendered: + if _should_render_server_default_positionally( + column.server_default + ): + args.append(rendered) + else: + opts.append(("server_default", rendered)) + + if ( + column.autoincrement is not None + and column.autoincrement != sqla_compat.AUTOINCREMENT_DEFAULT + ): + opts.append(("autoincrement", column.autoincrement)) + + if column.nullable is not None: + opts.append(("nullable", column.nullable)) + + if column.system: + opts.append(("system", column.system)) + + comment = column.comment + if comment: + opts.append(("comment", "%r" % comment)) + + # TODO: for non-ascii colname, assign a "key" + return "%(prefix)sColumn(%(name)r, %(type)s, %(args)s%(kwargs)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "name": _ident(column.name), + "type": _repr_type(column.type, autogen_context), + "args": ", ".join([str(arg) for arg in args]) + ", " if args else "", + "kwargs": ( + ", ".join( + ["%s=%s" % (kwname, val) for kwname, val in opts] + + [ + "%s=%s" + % (key, _render_potential_expr(val, autogen_context)) + for key, val in sqla_compat._column_kwargs(column).items() + ] + ) + ), + } + + +def _should_render_server_default_positionally(server_default: Any) -> bool: + return sqla_compat._server_default_is_computed( + server_default + ) or sqla_compat._server_default_is_identity(server_default) + + +def _render_server_default( + default: Optional[ + Union[FetchedValue, str, TextClause, ColumnElement[Any]] + ], + autogen_context: AutogenContext, + repr_: bool = True, +) -> Optional[str]: + rendered = _user_defined_render("server_default", default, autogen_context) + if rendered is not False: + return rendered + + if sqla_compat._server_default_is_computed(default): + return _render_computed(cast("Computed", default), autogen_context) + elif sqla_compat._server_default_is_identity(default): + return _render_identity(cast("Identity", default), autogen_context) + elif isinstance(default, sa_schema.DefaultClause): + if isinstance(default.arg, str): + default = default.arg + else: + return _render_potential_expr( + default.arg, autogen_context, is_server_default=True + ) + + if isinstance(default, str) and repr_: + default = repr(re.sub(r"^'|'$", "", default)) + + return cast(str, default) + + +def _render_computed( + computed: Computed, autogen_context: AutogenContext +) -> str: + text = _render_potential_expr( + computed.sqltext, autogen_context, wrap_in_text=False + ) + + kwargs = {} + if computed.persisted is not None: + kwargs["persisted"] = computed.persisted + return "%(prefix)sComputed(%(text)s, %(kwargs)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "text": text, + "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())), + } + + +def _render_identity( + identity: Identity, autogen_context: AutogenContext +) -> str: + kwargs = sqla_compat._get_identity_options_dict( + identity, dialect_kwargs=True + ) + + return "%(prefix)sIdentity(%(kwargs)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())), + } + + +def _repr_type( + type_: TypeEngine, + autogen_context: AutogenContext, + _skip_variants: bool = False, +) -> str: + rendered = _user_defined_render("type", type_, autogen_context) + if rendered is not False: + return rendered + + if hasattr(autogen_context.migration_context, "impl"): + impl_rt = autogen_context.migration_context.impl.render_type( + type_, autogen_context + ) + else: + impl_rt = None + + mod = type(type_).__module__ + imports = autogen_context.imports + if mod.startswith("sqlalchemy.dialects"): + match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod) + assert match is not None + dname = match.group(1) + if imports is not None: + imports.add("from sqlalchemy.dialects import %s" % dname) + if impl_rt: + return impl_rt + else: + return "%s.%r" % (dname, type_) + elif impl_rt: + return impl_rt + elif not _skip_variants and sqla_compat._type_has_variants(type_): + return _render_Variant_type(type_, autogen_context) + elif mod.startswith("sqlalchemy."): + if "_render_%s_type" % type_.__visit_name__ in globals(): + fn = globals()["_render_%s_type" % type_.__visit_name__] + return fn(type_, autogen_context) + else: + prefix = _sqlalchemy_autogenerate_prefix(autogen_context) + return "%s%r" % (prefix, type_) + else: + prefix = _user_autogenerate_prefix(autogen_context, type_) + return "%s%r" % (prefix, type_) + + +def _render_ARRAY_type(type_: ARRAY, autogen_context: AutogenContext) -> str: + return cast( + str, + _render_type_w_subtype( + type_, autogen_context, "item_type", r"(.+?\()" + ), + ) + + +def _render_Variant_type( + type_: TypeEngine, autogen_context: AutogenContext +) -> str: + base_type, variant_mapping = sqla_compat._get_variant_mapping(type_) + base = _repr_type(base_type, autogen_context, _skip_variants=True) + assert base is not None and base is not False # type: ignore[comparison-overlap] # noqa:E501 + for dialect in sorted(variant_mapping): + typ = variant_mapping[dialect] + base += ".with_variant(%s, %r)" % ( + _repr_type(typ, autogen_context, _skip_variants=True), + dialect, + ) + return base + + +def _render_type_w_subtype( + type_: TypeEngine, + autogen_context: AutogenContext, + attrname: str, + regexp: str, + prefix: Optional[str] = None, +) -> Union[Optional[str], Literal[False]]: + outer_repr = repr(type_) + inner_type = getattr(type_, attrname, None) + if inner_type is None: + return False + + inner_repr = repr(inner_type) + + inner_repr = re.sub(r"([\(\)])", r"\\\1", inner_repr) + sub_type = _repr_type(getattr(type_, attrname), autogen_context) + outer_type = re.sub(regexp + inner_repr, r"\1%s" % sub_type, outer_repr) + + if prefix: + return "%s%s" % (prefix, outer_type) + + mod = type(type_).__module__ + if mod.startswith("sqlalchemy.dialects"): + match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod) + assert match is not None + dname = match.group(1) + return "%s.%s" % (dname, outer_type) + elif mod.startswith("sqlalchemy"): + prefix = _sqlalchemy_autogenerate_prefix(autogen_context) + return "%s%s" % (prefix, outer_type) + else: + return None + + +_constraint_renderers = util.Dispatcher() + + +def _render_constraint( + constraint: Constraint, + autogen_context: AutogenContext, + namespace_metadata: Optional[MetaData], +) -> Optional[str]: + try: + renderer = _constraint_renderers.dispatch(constraint) + except ValueError: + util.warn("No renderer is established for object %r" % constraint) + return "[Unknown Python object %r]" % constraint + else: + return renderer(constraint, autogen_context, namespace_metadata) + + +@_constraint_renderers.dispatch_for(sa_schema.PrimaryKeyConstraint) +def _render_primary_key( + constraint: PrimaryKeyConstraint, + autogen_context: AutogenContext, + namespace_metadata: Optional[MetaData], +) -> Optional[str]: + rendered = _user_defined_render("primary_key", constraint, autogen_context) + if rendered is not False: + return rendered + + if not constraint.columns: + return None + + opts = [] + if constraint.name: + opts.append( + ("name", repr(_render_gen_name(autogen_context, constraint.name))) + ) + return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "args": ", ".join( + [repr(c.name) for c in constraint.columns] + + ["%s=%s" % (kwname, val) for kwname, val in opts] + ), + } + + +def _fk_colspec( + fk: ForeignKey, + metadata_schema: Optional[str], + namespace_metadata: MetaData, +) -> str: + """Implement a 'safe' version of ForeignKey._get_colspec() that + won't fail if the remote table can't be resolved. + + """ + colspec = fk._get_colspec() + tokens = colspec.split(".") + tname, colname = tokens[-2:] + + if metadata_schema is not None and len(tokens) == 2: + table_fullname = "%s.%s" % (metadata_schema, tname) + else: + table_fullname = ".".join(tokens[0:-1]) + + if ( + not fk.link_to_name + and fk.parent is not None + and fk.parent.table is not None + ): + # try to resolve the remote table in order to adjust for column.key. + # the FK constraint needs to be rendered in terms of the column + # name. + + if table_fullname in namespace_metadata.tables: + col = namespace_metadata.tables[table_fullname].c.get(colname) + if col is not None: + colname = _ident(col.name) # type: ignore[assignment] + + colspec = "%s.%s" % (table_fullname, colname) + + return colspec + + +def _populate_render_fk_opts( + constraint: ForeignKeyConstraint, opts: List[Tuple[str, str]] +) -> None: + if constraint.onupdate: + opts.append(("onupdate", repr(constraint.onupdate))) + if constraint.ondelete: + opts.append(("ondelete", repr(constraint.ondelete))) + if constraint.initially: + opts.append(("initially", repr(constraint.initially))) + if constraint.deferrable: + opts.append(("deferrable", repr(constraint.deferrable))) + if constraint.use_alter: + opts.append(("use_alter", repr(constraint.use_alter))) + if constraint.match: + opts.append(("match", repr(constraint.match))) + + +@_constraint_renderers.dispatch_for(sa_schema.ForeignKeyConstraint) +def _render_foreign_key( + constraint: ForeignKeyConstraint, + autogen_context: AutogenContext, + namespace_metadata: MetaData, +) -> Optional[str]: + rendered = _user_defined_render("foreign_key", constraint, autogen_context) + if rendered is not False: + return rendered + + opts = [] + if constraint.name: + opts.append( + ("name", repr(_render_gen_name(autogen_context, constraint.name))) + ) + + _populate_render_fk_opts(constraint, opts) + + apply_metadata_schema = namespace_metadata.schema + return ( + "%(prefix)sForeignKeyConstraint([%(cols)s], " + "[%(refcols)s], %(args)s)" + % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "cols": ", ".join( + repr(_ident(f.parent.name)) for f in constraint.elements + ), + "refcols": ", ".join( + repr(_fk_colspec(f, apply_metadata_schema, namespace_metadata)) + for f in constraint.elements + ), + "args": ", ".join( + ["%s=%s" % (kwname, val) for kwname, val in opts] + ), + } + ) + + +@_constraint_renderers.dispatch_for(sa_schema.UniqueConstraint) +def _render_unique_constraint( + constraint: UniqueConstraint, + autogen_context: AutogenContext, + namespace_metadata: Optional[MetaData], +) -> str: + rendered = _user_defined_render("unique", constraint, autogen_context) + if rendered is not False: + return rendered + + return _uq_constraint(constraint, autogen_context, False) + + +@_constraint_renderers.dispatch_for(sa_schema.CheckConstraint) +def _render_check_constraint( + constraint: CheckConstraint, + autogen_context: AutogenContext, + namespace_metadata: Optional[MetaData], +) -> Optional[str]: + rendered = _user_defined_render("check", constraint, autogen_context) + if rendered is not False: + return rendered + + # detect the constraint being part of + # a parent type which is probably in the Table already. + # ideally SQLAlchemy would give us more of a first class + # way to detect this. + if ( + constraint._create_rule + and hasattr(constraint._create_rule, "target") + and isinstance( + constraint._create_rule.target, + sqltypes.TypeEngine, + ) + ): + return None + opts = [] + if constraint.name: + opts.append( + ("name", repr(_render_gen_name(autogen_context, constraint.name))) + ) + return "%(prefix)sCheckConstraint(%(sqltext)s%(opts)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "opts": ", " + (", ".join("%s=%s" % (k, v) for k, v in opts)) + if opts + else "", + "sqltext": _render_potential_expr( + constraint.sqltext, autogen_context, wrap_in_text=False + ), + } + + +@renderers.dispatch_for(ops.ExecuteSQLOp) +def _execute_sql(autogen_context: AutogenContext, op: ops.ExecuteSQLOp) -> str: + if not isinstance(op.sqltext, str): + raise NotImplementedError( + "Autogenerate rendering of SQL Expression language constructs " + "not supported here; please use a plain SQL string" + ) + return "op.execute(%r)" % op.sqltext + + +renderers = default_renderers.branch() diff --git a/venv/lib/python3.12/site-packages/alembic/autogenerate/rewriter.py b/venv/lib/python3.12/site-packages/alembic/autogenerate/rewriter.py new file mode 100644 index 0000000..8994dcf --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/autogenerate/rewriter.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Iterator +from typing import List +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from .. import util +from ..operations import ops + +if TYPE_CHECKING: + from ..operations.ops import AddColumnOp + from ..operations.ops import AlterColumnOp + from ..operations.ops import CreateTableOp + from ..operations.ops import DowngradeOps + from ..operations.ops import MigrateOperation + from ..operations.ops import MigrationScript + from ..operations.ops import ModifyTableOps + from ..operations.ops import OpContainer + from ..operations.ops import UpgradeOps + from ..runtime.migration import MigrationContext + from ..script.revision import _GetRevArg + +ProcessRevisionDirectiveFn = Callable[ + ["MigrationContext", "_GetRevArg", List["MigrationScript"]], None +] + + +class Rewriter: + """A helper object that allows easy 'rewriting' of ops streams. + + The :class:`.Rewriter` object is intended to be passed along + to the + :paramref:`.EnvironmentContext.configure.process_revision_directives` + parameter in an ``env.py`` script. Once constructed, any number + of "rewrites" functions can be associated with it, which will be given + the opportunity to modify the structure without having to have explicit + knowledge of the overall structure. + + The function is passed the :class:`.MigrationContext` object and + ``revision`` tuple that are passed to the :paramref:`.Environment + Context.configure.process_revision_directives` function normally, + and the third argument is an individual directive of the type + noted in the decorator. The function has the choice of returning + a single op directive, which normally can be the directive that + was actually passed, or a new directive to replace it, or a list + of zero or more directives to replace it. + + .. seealso:: + + :ref:`autogen_rewriter` - usage example + + """ + + _traverse = util.Dispatcher() + + _chained: Tuple[Union[ProcessRevisionDirectiveFn, Rewriter], ...] = () + + def __init__(self) -> None: + self.dispatch = util.Dispatcher() + + def chain( + self, + other: Union[ + ProcessRevisionDirectiveFn, + Rewriter, + ], + ) -> Rewriter: + """Produce a "chain" of this :class:`.Rewriter` to another. + + This allows two or more rewriters to operate serially on a stream, + e.g.:: + + writer1 = autogenerate.Rewriter() + writer2 = autogenerate.Rewriter() + + + @writer1.rewrites(ops.AddColumnOp) + def add_column_nullable(context, revision, op): + op.column.nullable = True + return op + + + @writer2.rewrites(ops.AddColumnOp) + def add_column_idx(context, revision, op): + idx_op = ops.CreateIndexOp( + "ixc", op.table_name, [op.column.name] + ) + return [op, idx_op] + + writer = writer1.chain(writer2) + + :param other: a :class:`.Rewriter` instance + :return: a new :class:`.Rewriter` that will run the operations + of this writer, then the "other" writer, in succession. + + """ + wr = self.__class__.__new__(self.__class__) + wr.__dict__.update(self.__dict__) + wr._chained += (other,) + return wr + + def rewrites( + self, + operator: Union[ + Type[AddColumnOp], + Type[MigrateOperation], + Type[AlterColumnOp], + Type[CreateTableOp], + Type[ModifyTableOps], + ], + ) -> Callable[..., Any]: + """Register a function as rewriter for a given type. + + The function should receive three arguments, which are + the :class:`.MigrationContext`, a ``revision`` tuple, and + an op directive of the type indicated. E.g.:: + + @writer1.rewrites(ops.AddColumnOp) + def add_column_nullable(context, revision, op): + op.column.nullable = True + return op + + """ + return self.dispatch.dispatch_for(operator) + + def _rewrite( + self, + context: MigrationContext, + revision: _GetRevArg, + directive: MigrateOperation, + ) -> Iterator[MigrateOperation]: + try: + _rewriter = self.dispatch.dispatch(directive) + except ValueError: + _rewriter = None + yield directive + else: + if self in directive._mutations: + yield directive + else: + for r_directive in util.to_list( + _rewriter(context, revision, directive), [] + ): + r_directive._mutations = r_directive._mutations.union( + [self] + ) + yield r_directive + + def __call__( + self, + context: MigrationContext, + revision: _GetRevArg, + directives: List[MigrationScript], + ) -> None: + self.process_revision_directives(context, revision, directives) + for process_revision_directives in self._chained: + process_revision_directives(context, revision, directives) + + @_traverse.dispatch_for(ops.MigrationScript) + def _traverse_script( + self, + context: MigrationContext, + revision: _GetRevArg, + directive: MigrationScript, + ) -> None: + upgrade_ops_list: List[UpgradeOps] = [] + for upgrade_ops in directive.upgrade_ops_list: + ret = self._traverse_for(context, revision, upgrade_ops) + if len(ret) != 1: + raise ValueError( + "Can only return single object for UpgradeOps traverse" + ) + upgrade_ops_list.append(ret[0]) + + directive.upgrade_ops = upgrade_ops_list # type: ignore + + downgrade_ops_list: List[DowngradeOps] = [] + for downgrade_ops in directive.downgrade_ops_list: + ret = self._traverse_for(context, revision, downgrade_ops) + if len(ret) != 1: + raise ValueError( + "Can only return single object for DowngradeOps traverse" + ) + downgrade_ops_list.append(ret[0]) + directive.downgrade_ops = downgrade_ops_list # type: ignore + + @_traverse.dispatch_for(ops.OpContainer) + def _traverse_op_container( + self, + context: MigrationContext, + revision: _GetRevArg, + directive: OpContainer, + ) -> None: + self._traverse_list(context, revision, directive.ops) + + @_traverse.dispatch_for(ops.MigrateOperation) + def _traverse_any_directive( + self, + context: MigrationContext, + revision: _GetRevArg, + directive: MigrateOperation, + ) -> None: + pass + + def _traverse_for( + self, + context: MigrationContext, + revision: _GetRevArg, + directive: MigrateOperation, + ) -> Any: + directives = list(self._rewrite(context, revision, directive)) + for directive in directives: + traverser = self._traverse.dispatch(directive) + traverser(self, context, revision, directive) + return directives + + def _traverse_list( + self, + context: MigrationContext, + revision: _GetRevArg, + directives: Any, + ) -> None: + dest = [] + for directive in directives: + dest.extend(self._traverse_for(context, revision, directive)) + + directives[:] = dest + + def process_revision_directives( + self, + context: MigrationContext, + revision: _GetRevArg, + directives: List[MigrationScript], + ) -> None: + self._traverse_list(context, revision, directives) diff --git a/venv/lib/python3.12/site-packages/alembic/command.py b/venv/lib/python3.12/site-packages/alembic/command.py new file mode 100644 index 0000000..37aa6e6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/command.py @@ -0,0 +1,749 @@ +# mypy: allow-untyped-defs, allow-untyped-calls + +from __future__ import annotations + +import os +from typing import List +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from . import autogenerate as autogen +from . import util +from .runtime.environment import EnvironmentContext +from .script import ScriptDirectory + +if TYPE_CHECKING: + from alembic.config import Config + from alembic.script.base import Script + from alembic.script.revision import _RevIdType + from .runtime.environment import ProcessRevisionDirectiveFn + + +def list_templates(config: Config) -> None: + """List available templates. + + :param config: a :class:`.Config` object. + + """ + + config.print_stdout("Available templates:\n") + for tempname in os.listdir(config.get_template_directory()): + with open( + os.path.join(config.get_template_directory(), tempname, "README") + ) as readme: + synopsis = next(readme).rstrip() + config.print_stdout("%s - %s", tempname, synopsis) + + config.print_stdout("\nTemplates are used via the 'init' command, e.g.:") + config.print_stdout("\n alembic init --template generic ./scripts") + + +def init( + config: Config, + directory: str, + template: str = "generic", + package: bool = False, +) -> None: + """Initialize a new scripts directory. + + :param config: a :class:`.Config` object. + + :param directory: string path of the target directory + + :param template: string name of the migration environment template to + use. + + :param package: when True, write ``__init__.py`` files into the + environment location as well as the versions/ location. + + """ + + if os.access(directory, os.F_OK) and os.listdir(directory): + raise util.CommandError( + "Directory %s already exists and is not empty" % directory + ) + + template_dir = os.path.join(config.get_template_directory(), template) + if not os.access(template_dir, os.F_OK): + raise util.CommandError("No such template %r" % template) + + if not os.access(directory, os.F_OK): + with util.status( + f"Creating directory {os.path.abspath(directory)!r}", + **config.messaging_opts, + ): + os.makedirs(directory) + + versions = os.path.join(directory, "versions") + with util.status( + f"Creating directory {os.path.abspath(versions)!r}", + **config.messaging_opts, + ): + os.makedirs(versions) + + script = ScriptDirectory(directory) + + config_file: str | None = None + for file_ in os.listdir(template_dir): + file_path = os.path.join(template_dir, file_) + if file_ == "alembic.ini.mako": + assert config.config_file_name is not None + config_file = os.path.abspath(config.config_file_name) + if os.access(config_file, os.F_OK): + util.msg( + f"File {config_file!r} already exists, skipping", + **config.messaging_opts, + ) + else: + script._generate_template( + file_path, config_file, script_location=directory + ) + elif os.path.isfile(file_path): + output_file = os.path.join(directory, file_) + script._copy_file(file_path, output_file) + + if package: + for path in [ + os.path.join(os.path.abspath(directory), "__init__.py"), + os.path.join(os.path.abspath(versions), "__init__.py"), + ]: + with util.status(f"Adding {path!r}", **config.messaging_opts): + with open(path, "w"): + pass + + assert config_file is not None + util.msg( + "Please edit configuration/connection/logging " + f"settings in {config_file!r} before proceeding.", + **config.messaging_opts, + ) + + +def revision( + config: Config, + message: Optional[str] = None, + autogenerate: bool = False, + sql: bool = False, + head: str = "head", + splice: bool = False, + branch_label: Optional[_RevIdType] = None, + version_path: Optional[str] = None, + rev_id: Optional[str] = None, + depends_on: Optional[str] = None, + process_revision_directives: Optional[ProcessRevisionDirectiveFn] = None, +) -> Union[Optional[Script], List[Optional[Script]]]: + """Create a new revision file. + + :param config: a :class:`.Config` object. + + :param message: string message to apply to the revision; this is the + ``-m`` option to ``alembic revision``. + + :param autogenerate: whether or not to autogenerate the script from + the database; this is the ``--autogenerate`` option to + ``alembic revision``. + + :param sql: whether to dump the script out as a SQL string; when specified, + the script is dumped to stdout. This is the ``--sql`` option to + ``alembic revision``. + + :param head: head revision to build the new revision upon as a parent; + this is the ``--head`` option to ``alembic revision``. + + :param splice: whether or not the new revision should be made into a + new head of its own; is required when the given ``head`` is not itself + a head. This is the ``--splice`` option to ``alembic revision``. + + :param branch_label: string label to apply to the branch; this is the + ``--branch-label`` option to ``alembic revision``. + + :param version_path: string symbol identifying a specific version path + from the configuration; this is the ``--version-path`` option to + ``alembic revision``. + + :param rev_id: optional revision identifier to use instead of having + one generated; this is the ``--rev-id`` option to ``alembic revision``. + + :param depends_on: optional list of "depends on" identifiers; this is the + ``--depends-on`` option to ``alembic revision``. + + :param process_revision_directives: this is a callable that takes the + same form as the callable described at + :paramref:`.EnvironmentContext.configure.process_revision_directives`; + will be applied to the structure generated by the revision process + where it can be altered programmatically. Note that unlike all + the other parameters, this option is only available via programmatic + use of :func:`.command.revision` + + """ + + script_directory = ScriptDirectory.from_config(config) + + command_args = dict( + message=message, + autogenerate=autogenerate, + sql=sql, + head=head, + splice=splice, + branch_label=branch_label, + version_path=version_path, + rev_id=rev_id, + depends_on=depends_on, + ) + revision_context = autogen.RevisionContext( + config, + script_directory, + command_args, + process_revision_directives=process_revision_directives, + ) + + environment = util.asbool(config.get_main_option("revision_environment")) + + if autogenerate: + environment = True + + if sql: + raise util.CommandError( + "Using --sql with --autogenerate does not make any sense" + ) + + def retrieve_migrations(rev, context): + revision_context.run_autogenerate(rev, context) + return [] + + elif environment: + + def retrieve_migrations(rev, context): + revision_context.run_no_autogenerate(rev, context) + return [] + + elif sql: + raise util.CommandError( + "Using --sql with the revision command when " + "revision_environment is not configured does not make any sense" + ) + + if environment: + with EnvironmentContext( + config, + script_directory, + fn=retrieve_migrations, + as_sql=sql, + template_args=revision_context.template_args, + revision_context=revision_context, + ): + script_directory.run_env() + + # the revision_context now has MigrationScript structure(s) present. + # these could theoretically be further processed / rewritten *here*, + # in addition to the hooks present within each run_migrations() call, + # or at the end of env.py run_migrations_online(). + + scripts = [script for script in revision_context.generate_scripts()] + if len(scripts) == 1: + return scripts[0] + else: + return scripts + + +def check(config: "Config") -> None: + """Check if revision command with autogenerate has pending upgrade ops. + + :param config: a :class:`.Config` object. + + .. versionadded:: 1.9.0 + + """ + + script_directory = ScriptDirectory.from_config(config) + + command_args = dict( + message=None, + autogenerate=True, + sql=False, + head="head", + splice=False, + branch_label=None, + version_path=None, + rev_id=None, + depends_on=None, + ) + revision_context = autogen.RevisionContext( + config, + script_directory, + command_args, + ) + + def retrieve_migrations(rev, context): + revision_context.run_autogenerate(rev, context) + return [] + + with EnvironmentContext( + config, + script_directory, + fn=retrieve_migrations, + as_sql=False, + template_args=revision_context.template_args, + revision_context=revision_context, + ): + script_directory.run_env() + + # the revision_context now has MigrationScript structure(s) present. + + migration_script = revision_context.generated_revisions[-1] + diffs = [] + for upgrade_ops in migration_script.upgrade_ops_list: + diffs.extend(upgrade_ops.as_diffs()) + + if diffs: + raise util.AutogenerateDiffsDetected( + f"New upgrade operations detected: {diffs}" + ) + else: + config.print_stdout("No new upgrade operations detected.") + + +def merge( + config: Config, + revisions: _RevIdType, + message: Optional[str] = None, + branch_label: Optional[_RevIdType] = None, + rev_id: Optional[str] = None, +) -> Optional[Script]: + """Merge two revisions together. Creates a new migration file. + + :param config: a :class:`.Config` instance + + :param message: string message to apply to the revision + + :param branch_label: string label name to apply to the new revision + + :param rev_id: hardcoded revision identifier instead of generating a new + one. + + .. seealso:: + + :ref:`branches` + + """ + + script = ScriptDirectory.from_config(config) + template_args = { + "config": config # Let templates use config for + # e.g. multiple databases + } + + environment = util.asbool(config.get_main_option("revision_environment")) + + if environment: + + def nothing(rev, context): + return [] + + with EnvironmentContext( + config, + script, + fn=nothing, + as_sql=False, + template_args=template_args, + ): + script.run_env() + + return script.generate_revision( + rev_id or util.rev_id(), + message, + refresh=True, + head=revisions, + branch_labels=branch_label, + **template_args, # type:ignore[arg-type] + ) + + +def upgrade( + config: Config, + revision: str, + sql: bool = False, + tag: Optional[str] = None, +) -> None: + """Upgrade to a later version. + + :param config: a :class:`.Config` instance. + + :param revision: string revision target or range for --sql mode + + :param sql: if True, use ``--sql`` mode + + :param tag: an arbitrary "tag" that can be intercepted by custom + ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument` + method. + + """ + + script = ScriptDirectory.from_config(config) + + starting_rev = None + if ":" in revision: + if not sql: + raise util.CommandError("Range revision not allowed") + starting_rev, revision = revision.split(":", 2) + + def upgrade(rev, context): + return script._upgrade_revs(revision, rev) + + with EnvironmentContext( + config, + script, + fn=upgrade, + as_sql=sql, + starting_rev=starting_rev, + destination_rev=revision, + tag=tag, + ): + script.run_env() + + +def downgrade( + config: Config, + revision: str, + sql: bool = False, + tag: Optional[str] = None, +) -> None: + """Revert to a previous version. + + :param config: a :class:`.Config` instance. + + :param revision: string revision target or range for --sql mode + + :param sql: if True, use ``--sql`` mode + + :param tag: an arbitrary "tag" that can be intercepted by custom + ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument` + method. + + """ + + script = ScriptDirectory.from_config(config) + starting_rev = None + if ":" in revision: + if not sql: + raise util.CommandError("Range revision not allowed") + starting_rev, revision = revision.split(":", 2) + elif sql: + raise util.CommandError( + "downgrade with --sql requires :" + ) + + def downgrade(rev, context): + return script._downgrade_revs(revision, rev) + + with EnvironmentContext( + config, + script, + fn=downgrade, + as_sql=sql, + starting_rev=starting_rev, + destination_rev=revision, + tag=tag, + ): + script.run_env() + + +def show(config, rev): + """Show the revision(s) denoted by the given symbol. + + :param config: a :class:`.Config` instance. + + :param revision: string revision target + + """ + + script = ScriptDirectory.from_config(config) + + if rev == "current": + + def show_current(rev, context): + for sc in script.get_revisions(rev): + config.print_stdout(sc.log_entry) + return [] + + with EnvironmentContext(config, script, fn=show_current): + script.run_env() + else: + for sc in script.get_revisions(rev): + config.print_stdout(sc.log_entry) + + +def history( + config: Config, + rev_range: Optional[str] = None, + verbose: bool = False, + indicate_current: bool = False, +) -> None: + """List changeset scripts in chronological order. + + :param config: a :class:`.Config` instance. + + :param rev_range: string revision range + + :param verbose: output in verbose mode. + + :param indicate_current: indicate current revision. + + """ + base: Optional[str] + head: Optional[str] + script = ScriptDirectory.from_config(config) + if rev_range is not None: + if ":" not in rev_range: + raise util.CommandError( + "History range requires [start]:[end], " "[start]:, or :[end]" + ) + base, head = rev_range.strip().split(":") + else: + base = head = None + + environment = ( + util.asbool(config.get_main_option("revision_environment")) + or indicate_current + ) + + def _display_history(config, script, base, head, currents=()): + for sc in script.walk_revisions( + base=base or "base", head=head or "heads" + ): + if indicate_current: + sc._db_current_indicator = sc.revision in currents + + config.print_stdout( + sc.cmd_format( + verbose=verbose, + include_branches=True, + include_doc=True, + include_parents=True, + ) + ) + + def _display_history_w_current(config, script, base, head): + def _display_current_history(rev, context): + if head == "current": + _display_history(config, script, base, rev, rev) + elif base == "current": + _display_history(config, script, rev, head, rev) + else: + _display_history(config, script, base, head, rev) + return [] + + with EnvironmentContext(config, script, fn=_display_current_history): + script.run_env() + + if base == "current" or head == "current" or environment: + _display_history_w_current(config, script, base, head) + else: + _display_history(config, script, base, head) + + +def heads(config, verbose=False, resolve_dependencies=False): + """Show current available heads in the script directory. + + :param config: a :class:`.Config` instance. + + :param verbose: output in verbose mode. + + :param resolve_dependencies: treat dependency version as down revisions. + + """ + + script = ScriptDirectory.from_config(config) + if resolve_dependencies: + heads = script.get_revisions("heads") + else: + heads = script.get_revisions(script.get_heads()) + + for rev in heads: + config.print_stdout( + rev.cmd_format( + verbose, include_branches=True, tree_indicators=False + ) + ) + + +def branches(config, verbose=False): + """Show current branch points. + + :param config: a :class:`.Config` instance. + + :param verbose: output in verbose mode. + + """ + script = ScriptDirectory.from_config(config) + for sc in script.walk_revisions(): + if sc.is_branch_point: + config.print_stdout( + "%s\n%s\n", + sc.cmd_format(verbose, include_branches=True), + "\n".join( + "%s -> %s" + % ( + " " * len(str(sc.revision)), + rev_obj.cmd_format( + False, include_branches=True, include_doc=verbose + ), + ) + for rev_obj in ( + script.get_revision(rev) for rev in sc.nextrev + ) + ), + ) + + +def current(config: Config, verbose: bool = False) -> None: + """Display the current revision for a database. + + :param config: a :class:`.Config` instance. + + :param verbose: output in verbose mode. + + """ + + script = ScriptDirectory.from_config(config) + + def display_version(rev, context): + if verbose: + config.print_stdout( + "Current revision(s) for %s:", + util.obfuscate_url_pw(context.connection.engine.url), + ) + for rev in script.get_all_current(rev): + config.print_stdout(rev.cmd_format(verbose)) + + return [] + + with EnvironmentContext( + config, script, fn=display_version, dont_mutate=True + ): + script.run_env() + + +def stamp( + config: Config, + revision: _RevIdType, + sql: bool = False, + tag: Optional[str] = None, + purge: bool = False, +) -> None: + """'stamp' the revision table with the given revision; don't + run any migrations. + + :param config: a :class:`.Config` instance. + + :param revision: target revision or list of revisions. May be a list + to indicate stamping of multiple branch heads. + + .. note:: this parameter is called "revisions" in the command line + interface. + + :param sql: use ``--sql`` mode + + :param tag: an arbitrary "tag" that can be intercepted by custom + ``env.py`` scripts via the :class:`.EnvironmentContext.get_tag_argument` + method. + + :param purge: delete all entries in the version table before stamping. + + """ + + script = ScriptDirectory.from_config(config) + + if sql: + destination_revs = [] + starting_rev = None + for _revision in util.to_list(revision): + if ":" in _revision: + srev, _revision = _revision.split(":", 2) + + if starting_rev != srev: + if starting_rev is None: + starting_rev = srev + else: + raise util.CommandError( + "Stamp operation with --sql only supports a " + "single starting revision at a time" + ) + destination_revs.append(_revision) + else: + destination_revs = util.to_list(revision) + + def do_stamp(rev, context): + return script._stamp_revs(util.to_tuple(destination_revs), rev) + + with EnvironmentContext( + config, + script, + fn=do_stamp, + as_sql=sql, + starting_rev=starting_rev if sql else None, + destination_rev=util.to_tuple(destination_revs), + tag=tag, + purge=purge, + ): + script.run_env() + + +def edit(config: Config, rev: str) -> None: + """Edit revision script(s) using $EDITOR. + + :param config: a :class:`.Config` instance. + + :param rev: target revision. + + """ + + script = ScriptDirectory.from_config(config) + + if rev == "current": + + def edit_current(rev, context): + if not rev: + raise util.CommandError("No current revisions") + for sc in script.get_revisions(rev): + util.open_in_editor(sc.path) + return [] + + with EnvironmentContext(config, script, fn=edit_current): + script.run_env() + else: + revs = script.get_revisions(rev) + if not revs: + raise util.CommandError( + "No revision files indicated by symbol '%s'" % rev + ) + for sc in revs: + assert sc + util.open_in_editor(sc.path) + + +def ensure_version(config: Config, sql: bool = False) -> None: + """Create the alembic version table if it doesn't exist already . + + :param config: a :class:`.Config` instance. + + :param sql: use ``--sql`` mode + + .. versionadded:: 1.7.6 + + """ + + script = ScriptDirectory.from_config(config) + + def do_ensure_version(rev, context): + context._ensure_version_table() + return [] + + with EnvironmentContext( + config, + script, + fn=do_ensure_version, + as_sql=sql, + ): + script.run_env() diff --git a/venv/lib/python3.12/site-packages/alembic/config.py b/venv/lib/python3.12/site-packages/alembic/config.py new file mode 100644 index 0000000..4b2263f --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/config.py @@ -0,0 +1,645 @@ +from __future__ import annotations + +from argparse import ArgumentParser +from argparse import Namespace +from configparser import ConfigParser +import inspect +import os +import sys +from typing import Any +from typing import cast +from typing import Dict +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import TextIO +from typing import Union + +from typing_extensions import TypedDict + +from . import __version__ +from . import command +from . import util +from .util import compat + + +class Config: + r"""Represent an Alembic configuration. + + Within an ``env.py`` script, this is available + via the :attr:`.EnvironmentContext.config` attribute, + which in turn is available at ``alembic.context``:: + + from alembic import context + + some_param = context.config.get_main_option("my option") + + When invoking Alembic programmatically, a new + :class:`.Config` can be created by passing + the name of an .ini file to the constructor:: + + from alembic.config import Config + alembic_cfg = Config("/path/to/yourapp/alembic.ini") + + With a :class:`.Config` object, you can then + run Alembic commands programmatically using the directives + in :mod:`alembic.command`. + + The :class:`.Config` object can also be constructed without + a filename. Values can be set programmatically, and + new sections will be created as needed:: + + from alembic.config import Config + alembic_cfg = Config() + alembic_cfg.set_main_option("script_location", "myapp:migrations") + alembic_cfg.set_main_option("sqlalchemy.url", "postgresql://foo/bar") + alembic_cfg.set_section_option("mysection", "foo", "bar") + + .. warning:: + + When using programmatic configuration, make sure the + ``env.py`` file in use is compatible with the target configuration; + including that the call to Python ``logging.fileConfig()`` is + omitted if the programmatic configuration doesn't actually include + logging directives. + + For passing non-string values to environments, such as connections and + engines, use the :attr:`.Config.attributes` dictionary:: + + with engine.begin() as connection: + alembic_cfg.attributes['connection'] = connection + command.upgrade(alembic_cfg, "head") + + :param file\_: name of the .ini file to open. + :param ini_section: name of the main Alembic section within the + .ini file + :param output_buffer: optional file-like input buffer which + will be passed to the :class:`.MigrationContext` - used to redirect + the output of "offline generation" when using Alembic programmatically. + :param stdout: buffer where the "print" output of commands will be sent. + Defaults to ``sys.stdout``. + + :param config_args: A dictionary of keys and values that will be used + for substitution in the alembic config file. The dictionary as given + is **copied** to a new one, stored locally as the attribute + ``.config_args``. When the :attr:`.Config.file_config` attribute is + first invoked, the replacement variable ``here`` will be added to this + dictionary before the dictionary is passed to ``ConfigParser()`` + to parse the .ini file. + + :param attributes: optional dictionary of arbitrary Python keys/values, + which will be populated into the :attr:`.Config.attributes` dictionary. + + .. seealso:: + + :ref:`connection_sharing` + + """ + + def __init__( + self, + file_: Union[str, os.PathLike[str], None] = None, + ini_section: str = "alembic", + output_buffer: Optional[TextIO] = None, + stdout: TextIO = sys.stdout, + cmd_opts: Optional[Namespace] = None, + config_args: Mapping[str, Any] = util.immutabledict(), + attributes: Optional[Dict[str, Any]] = None, + ) -> None: + """Construct a new :class:`.Config`""" + self.config_file_name = file_ + self.config_ini_section = ini_section + self.output_buffer = output_buffer + self.stdout = stdout + self.cmd_opts = cmd_opts + self.config_args = dict(config_args) + if attributes: + self.attributes.update(attributes) + + cmd_opts: Optional[Namespace] = None + """The command-line options passed to the ``alembic`` script. + + Within an ``env.py`` script this can be accessed via the + :attr:`.EnvironmentContext.config` attribute. + + .. seealso:: + + :meth:`.EnvironmentContext.get_x_argument` + + """ + + config_file_name: Union[str, os.PathLike[str], None] = None + """Filesystem path to the .ini file in use.""" + + config_ini_section: str = None # type:ignore[assignment] + """Name of the config file section to read basic configuration + from. Defaults to ``alembic``, that is the ``[alembic]`` section + of the .ini file. This value is modified using the ``-n/--name`` + option to the Alembic runner. + + """ + + @util.memoized_property + def attributes(self) -> Dict[str, Any]: + """A Python dictionary for storage of additional state. + + + This is a utility dictionary which can include not just strings but + engines, connections, schema objects, or anything else. + Use this to pass objects into an env.py script, such as passing + a :class:`sqlalchemy.engine.base.Connection` when calling + commands from :mod:`alembic.command` programmatically. + + .. seealso:: + + :ref:`connection_sharing` + + :paramref:`.Config.attributes` + + """ + return {} + + def print_stdout(self, text: str, *arg: Any) -> None: + """Render a message to standard out. + + When :meth:`.Config.print_stdout` is called with additional args + those arguments will formatted against the provided text, + otherwise we simply output the provided text verbatim. + + This is a no-op when the``quiet`` messaging option is enabled. + + e.g.:: + + >>> config.print_stdout('Some text %s', 'arg') + Some Text arg + + """ + + if arg: + output = str(text) % arg + else: + output = str(text) + + util.write_outstream(self.stdout, output, "\n", **self.messaging_opts) + + @util.memoized_property + def file_config(self) -> ConfigParser: + """Return the underlying ``ConfigParser`` object. + + Direct access to the .ini file is available here, + though the :meth:`.Config.get_section` and + :meth:`.Config.get_main_option` + methods provide a possibly simpler interface. + + """ + + if self.config_file_name: + here = os.path.abspath(os.path.dirname(self.config_file_name)) + else: + here = "" + self.config_args["here"] = here + file_config = ConfigParser(self.config_args) + if self.config_file_name: + compat.read_config_parser(file_config, [self.config_file_name]) + else: + file_config.add_section(self.config_ini_section) + return file_config + + def get_template_directory(self) -> str: + """Return the directory where Alembic setup templates are found. + + This method is used by the alembic ``init`` and ``list_templates`` + commands. + + """ + import alembic + + package_dir = os.path.abspath(os.path.dirname(alembic.__file__)) + return os.path.join(package_dir, "templates") + + @overload + def get_section( + self, name: str, default: None = ... + ) -> Optional[Dict[str, str]]: + ... + + # "default" here could also be a TypeVar + # _MT = TypeVar("_MT", bound=Mapping[str, str]), + # however mypy wasn't handling that correctly (pyright was) + @overload + def get_section( + self, name: str, default: Dict[str, str] + ) -> Dict[str, str]: + ... + + @overload + def get_section( + self, name: str, default: Mapping[str, str] + ) -> Union[Dict[str, str], Mapping[str, str]]: + ... + + def get_section( + self, name: str, default: Optional[Mapping[str, str]] = None + ) -> Optional[Mapping[str, str]]: + """Return all the configuration options from a given .ini file section + as a dictionary. + + If the given section does not exist, the value of ``default`` + is returned, which is expected to be a dictionary or other mapping. + + """ + if not self.file_config.has_section(name): + return default + + return dict(self.file_config.items(name)) + + def set_main_option(self, name: str, value: str) -> None: + """Set an option programmatically within the 'main' section. + + This overrides whatever was in the .ini file. + + :param name: name of the value + + :param value: the value. Note that this value is passed to + ``ConfigParser.set``, which supports variable interpolation using + pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of + an interpolation symbol must therefore be escaped, e.g. ``%%``. + The given value may refer to another value already in the file + using the interpolation format. + + """ + self.set_section_option(self.config_ini_section, name, value) + + def remove_main_option(self, name: str) -> None: + self.file_config.remove_option(self.config_ini_section, name) + + def set_section_option(self, section: str, name: str, value: str) -> None: + """Set an option programmatically within the given section. + + The section is created if it doesn't exist already. + The value here will override whatever was in the .ini + file. + + :param section: name of the section + + :param name: name of the value + + :param value: the value. Note that this value is passed to + ``ConfigParser.set``, which supports variable interpolation using + pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of + an interpolation symbol must therefore be escaped, e.g. ``%%``. + The given value may refer to another value already in the file + using the interpolation format. + + """ + + if not self.file_config.has_section(section): + self.file_config.add_section(section) + self.file_config.set(section, name, value) + + def get_section_option( + self, section: str, name: str, default: Optional[str] = None + ) -> Optional[str]: + """Return an option from the given section of the .ini file.""" + if not self.file_config.has_section(section): + raise util.CommandError( + "No config file %r found, or file has no " + "'[%s]' section" % (self.config_file_name, section) + ) + if self.file_config.has_option(section, name): + return self.file_config.get(section, name) + else: + return default + + @overload + def get_main_option(self, name: str, default: str) -> str: + ... + + @overload + def get_main_option( + self, name: str, default: Optional[str] = None + ) -> Optional[str]: + ... + + def get_main_option( + self, name: str, default: Optional[str] = None + ) -> Optional[str]: + """Return an option from the 'main' section of the .ini file. + + This defaults to being a key from the ``[alembic]`` + section, unless the ``-n/--name`` flag were used to + indicate a different section. + + """ + return self.get_section_option(self.config_ini_section, name, default) + + @util.memoized_property + def messaging_opts(self) -> MessagingOptions: + """The messaging options.""" + return cast( + MessagingOptions, + util.immutabledict( + {"quiet": getattr(self.cmd_opts, "quiet", False)} + ), + ) + + +class MessagingOptions(TypedDict, total=False): + quiet: bool + + +class CommandLine: + def __init__(self, prog: Optional[str] = None) -> None: + self._generate_args(prog) + + def _generate_args(self, prog: Optional[str]) -> None: + def add_options( + fn: Any, parser: Any, positional: Any, kwargs: Any + ) -> None: + kwargs_opts = { + "template": ( + "-t", + "--template", + dict( + default="generic", + type=str, + help="Setup template for use with 'init'", + ), + ), + "message": ( + "-m", + "--message", + dict( + type=str, help="Message string to use with 'revision'" + ), + ), + "sql": ( + "--sql", + dict( + action="store_true", + help="Don't emit SQL to database - dump to " + "standard output/file instead. See docs on " + "offline mode.", + ), + ), + "tag": ( + "--tag", + dict( + type=str, + help="Arbitrary 'tag' name - can be used by " + "custom env.py scripts.", + ), + ), + "head": ( + "--head", + dict( + type=str, + help="Specify head revision or @head " + "to base new revision on.", + ), + ), + "splice": ( + "--splice", + dict( + action="store_true", + help="Allow a non-head revision as the " + "'head' to splice onto", + ), + ), + "depends_on": ( + "--depends-on", + dict( + action="append", + help="Specify one or more revision identifiers " + "which this revision should depend on.", + ), + ), + "rev_id": ( + "--rev-id", + dict( + type=str, + help="Specify a hardcoded revision id instead of " + "generating one", + ), + ), + "version_path": ( + "--version-path", + dict( + type=str, + help="Specify specific path from config for " + "version file", + ), + ), + "branch_label": ( + "--branch-label", + dict( + type=str, + help="Specify a branch label to apply to the " + "new revision", + ), + ), + "verbose": ( + "-v", + "--verbose", + dict(action="store_true", help="Use more verbose output"), + ), + "resolve_dependencies": ( + "--resolve-dependencies", + dict( + action="store_true", + help="Treat dependency versions as down revisions", + ), + ), + "autogenerate": ( + "--autogenerate", + dict( + action="store_true", + help="Populate revision script with candidate " + "migration operations, based on comparison " + "of database to model.", + ), + ), + "rev_range": ( + "-r", + "--rev-range", + dict( + action="store", + help="Specify a revision range; " + "format is [start]:[end]", + ), + ), + "indicate_current": ( + "-i", + "--indicate-current", + dict( + action="store_true", + help="Indicate the current revision", + ), + ), + "purge": ( + "--purge", + dict( + action="store_true", + help="Unconditionally erase the version table " + "before stamping", + ), + ), + "package": ( + "--package", + dict( + action="store_true", + help="Write empty __init__.py files to the " + "environment and version locations", + ), + ), + } + positional_help = { + "directory": "location of scripts directory", + "revision": "revision identifier", + "revisions": "one or more revisions, or 'heads' for all heads", + } + for arg in kwargs: + if arg in kwargs_opts: + args = kwargs_opts[arg] + args, kw = args[0:-1], args[-1] + parser.add_argument(*args, **kw) + + for arg in positional: + if ( + arg == "revisions" + or fn in positional_translations + and positional_translations[fn][arg] == "revisions" + ): + subparser.add_argument( + "revisions", + nargs="+", + help=positional_help.get("revisions"), + ) + else: + subparser.add_argument(arg, help=positional_help.get(arg)) + + parser = ArgumentParser(prog=prog) + + parser.add_argument( + "--version", action="version", version="%%(prog)s %s" % __version__ + ) + parser.add_argument( + "-c", + "--config", + type=str, + default=os.environ.get("ALEMBIC_CONFIG", "alembic.ini"), + help="Alternate config file; defaults to value of " + 'ALEMBIC_CONFIG environment variable, or "alembic.ini"', + ) + parser.add_argument( + "-n", + "--name", + type=str, + default="alembic", + help="Name of section in .ini file to " "use for Alembic config", + ) + parser.add_argument( + "-x", + action="append", + help="Additional arguments consumed by " + "custom env.py scripts, e.g. -x " + "setting1=somesetting -x setting2=somesetting", + ) + parser.add_argument( + "--raiseerr", + action="store_true", + help="Raise a full stack trace on error", + ) + parser.add_argument( + "-q", + "--quiet", + action="store_true", + help="Do not log to std output.", + ) + subparsers = parser.add_subparsers() + + positional_translations: Dict[Any, Any] = { + command.stamp: {"revision": "revisions"} + } + + for fn in [getattr(command, n) for n in dir(command)]: + if ( + inspect.isfunction(fn) + and fn.__name__[0] != "_" + and fn.__module__ == "alembic.command" + ): + spec = compat.inspect_getfullargspec(fn) + if spec[3] is not None: + positional = spec[0][1 : -len(spec[3])] + kwarg = spec[0][-len(spec[3]) :] + else: + positional = spec[0][1:] + kwarg = [] + + if fn in positional_translations: + positional = [ + positional_translations[fn].get(name, name) + for name in positional + ] + + # parse first line(s) of helptext without a line break + help_ = fn.__doc__ + if help_: + help_text = [] + for line in help_.split("\n"): + if not line.strip(): + break + else: + help_text.append(line.strip()) + else: + help_text = [] + subparser = subparsers.add_parser( + fn.__name__, help=" ".join(help_text) + ) + add_options(fn, subparser, positional, kwarg) + subparser.set_defaults(cmd=(fn, positional, kwarg)) + self.parser = parser + + def run_cmd(self, config: Config, options: Namespace) -> None: + fn, positional, kwarg = options.cmd + + try: + fn( + config, + *[getattr(options, k, None) for k in positional], + **{k: getattr(options, k, None) for k in kwarg}, + ) + except util.CommandError as e: + if options.raiseerr: + raise + else: + util.err(str(e), **config.messaging_opts) + + def main(self, argv: Optional[Sequence[str]] = None) -> None: + options = self.parser.parse_args(argv) + if not hasattr(options, "cmd"): + # see http://bugs.python.org/issue9253, argparse + # behavior changed incompatibly in py3.3 + self.parser.error("too few arguments") + else: + cfg = Config( + file_=options.config, + ini_section=options.name, + cmd_opts=options, + ) + self.run_cmd(cfg, options) + + +def main( + argv: Optional[Sequence[str]] = None, + prog: Optional[str] = None, + **kwargs: Any, +) -> None: + """The console runner function for Alembic.""" + + CommandLine(prog=prog).main(argv=argv) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/alembic/context.py b/venv/lib/python3.12/site-packages/alembic/context.py new file mode 100644 index 0000000..758fca8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/context.py @@ -0,0 +1,5 @@ +from .runtime.environment import EnvironmentContext + +# create proxy functions for +# each method on the EnvironmentContext class. +EnvironmentContext.create_module_class_proxy(globals(), locals()) diff --git a/venv/lib/python3.12/site-packages/alembic/context.pyi b/venv/lib/python3.12/site-packages/alembic/context.pyi new file mode 100644 index 0000000..80619fb --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/context.pyi @@ -0,0 +1,853 @@ +# ### this file stubs are generated by tools/write_pyi.py - do not edit ### +# ### imports are manually managed +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Collection +from typing import ContextManager +from typing import Dict +from typing import Iterable +from typing import List +from typing import Literal +from typing import Mapping +from typing import MutableMapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import TextIO +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +if TYPE_CHECKING: + from sqlalchemy.engine.base import Connection + from sqlalchemy.engine.url import URL + from sqlalchemy.sql import Executable + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import FetchedValue + from sqlalchemy.sql.schema import MetaData + from sqlalchemy.sql.schema import SchemaItem + from sqlalchemy.sql.type_api import TypeEngine + + from .autogenerate.api import AutogenContext + from .config import Config + from .operations.ops import MigrationScript + from .runtime.migration import _ProxyTransaction + from .runtime.migration import MigrationContext + from .runtime.migration import MigrationInfo + from .script import ScriptDirectory + +### end imports ### + +def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]: + """Return a context manager that will + enclose an operation within a "transaction", + as defined by the environment's offline + and transactional DDL settings. + + e.g.:: + + with context.begin_transaction(): + context.run_migrations() + + :meth:`.begin_transaction` is intended to + "do the right thing" regardless of + calling context: + + * If :meth:`.is_transactional_ddl` is ``False``, + returns a "do nothing" context manager + which otherwise produces no transactional + state or directives. + * If :meth:`.is_offline_mode` is ``True``, + returns a context manager that will + invoke the :meth:`.DefaultImpl.emit_begin` + and :meth:`.DefaultImpl.emit_commit` + methods, which will produce the string + directives ``BEGIN`` and ``COMMIT`` on + the output stream, as rendered by the + target backend (e.g. SQL Server would + emit ``BEGIN TRANSACTION``). + * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin` + on the current online connection, which + returns a :class:`sqlalchemy.engine.Transaction` + object. This object demarcates a real + transaction and is itself a context manager, + which will roll back if an exception + is raised. + + Note that a custom ``env.py`` script which + has more specific transactional needs can of course + manipulate the :class:`~sqlalchemy.engine.Connection` + directly to produce transactional state in "online" + mode. + + """ + +config: Config + +def configure( + connection: Optional[Connection] = None, + url: Union[str, URL, None] = None, + dialect_name: Optional[str] = None, + dialect_opts: Optional[Dict[str, Any]] = None, + transactional_ddl: Optional[bool] = None, + transaction_per_migration: bool = False, + output_buffer: Optional[TextIO] = None, + starting_rev: Optional[str] = None, + tag: Optional[str] = None, + template_args: Optional[Dict[str, Any]] = None, + render_as_batch: bool = False, + target_metadata: Union[MetaData, Sequence[MetaData], None] = None, + include_name: Optional[ + Callable[ + [ + Optional[str], + Literal[ + "schema", + "table", + "column", + "index", + "unique_constraint", + "foreign_key_constraint", + ], + MutableMapping[ + Literal[ + "schema_name", + "table_name", + "schema_qualified_table_name", + ], + Optional[str], + ], + ], + bool, + ] + ] = None, + include_object: Optional[ + Callable[ + [ + SchemaItem, + Optional[str], + Literal[ + "schema", + "table", + "column", + "index", + "unique_constraint", + "foreign_key_constraint", + ], + bool, + Optional[SchemaItem], + ], + bool, + ] + ] = None, + include_schemas: bool = False, + process_revision_directives: Optional[ + Callable[ + [ + MigrationContext, + Union[str, Iterable[Optional[str]], Iterable[str]], + List[MigrationScript], + ], + None, + ] + ] = None, + compare_type: Union[ + bool, + Callable[ + [ + MigrationContext, + Column[Any], + Column[Any], + TypeEngine[Any], + TypeEngine[Any], + ], + Optional[bool], + ], + ] = True, + compare_server_default: Union[ + bool, + Callable[ + [ + MigrationContext, + Column[Any], + Column[Any], + Optional[str], + Optional[FetchedValue], + Optional[str], + ], + Optional[bool], + ], + ] = False, + render_item: Optional[ + Callable[[str, Any, AutogenContext], Union[str, Literal[False]]] + ] = None, + literal_binds: bool = False, + upgrade_token: str = "upgrades", + downgrade_token: str = "downgrades", + alembic_module_prefix: str = "op.", + sqlalchemy_module_prefix: str = "sa.", + user_module_prefix: Optional[str] = None, + on_version_apply: Optional[ + Callable[ + [ + MigrationContext, + MigrationInfo, + Collection[Any], + Mapping[str, Any], + ], + None, + ] + ] = None, + **kw: Any, +) -> None: + """Configure a :class:`.MigrationContext` within this + :class:`.EnvironmentContext` which will provide database + connectivity and other configuration to a series of + migration scripts. + + Many methods on :class:`.EnvironmentContext` require that + this method has been called in order to function, as they + ultimately need to have database access or at least access + to the dialect in use. Those which do are documented as such. + + The important thing needed by :meth:`.configure` is a + means to determine what kind of database dialect is in use. + An actual connection to that database is needed only if + the :class:`.MigrationContext` is to be used in + "online" mode. + + If the :meth:`.is_offline_mode` function returns ``True``, + then no connection is needed here. Otherwise, the + ``connection`` parameter should be present as an + instance of :class:`sqlalchemy.engine.Connection`. + + This function is typically called from the ``env.py`` + script within a migration environment. It can be called + multiple times for an invocation. The most recent + :class:`~sqlalchemy.engine.Connection` + for which it was called is the one that will be operated upon + by the next call to :meth:`.run_migrations`. + + General parameters: + + :param connection: a :class:`~sqlalchemy.engine.Connection` + to use + for SQL execution in "online" mode. When present, is also + used to determine the type of dialect in use. + :param url: a string database url, or a + :class:`sqlalchemy.engine.url.URL` object. + The type of dialect to be used will be derived from this if + ``connection`` is not passed. + :param dialect_name: string name of a dialect, such as + "postgresql", "mssql", etc. + The type of dialect to be used will be derived from this if + ``connection`` and ``url`` are not passed. + :param dialect_opts: dictionary of options to be passed to dialect + constructor. + :param transactional_ddl: Force the usage of "transactional" + DDL on or off; + this otherwise defaults to whether or not the dialect in + use supports it. + :param transaction_per_migration: if True, nest each migration script + in a transaction rather than the full series of migrations to + run. + :param output_buffer: a file-like object that will be used + for textual output + when the ``--sql`` option is used to generate SQL scripts. + Defaults to + ``sys.stdout`` if not passed here and also not present on + the :class:`.Config` + object. The value here overrides that of the :class:`.Config` + object. + :param output_encoding: when using ``--sql`` to generate SQL + scripts, apply this encoding to the string output. + :param literal_binds: when using ``--sql`` to generate SQL + scripts, pass through the ``literal_binds`` flag to the compiler + so that any literal values that would ordinarily be bound + parameters are converted to plain strings. + + .. warning:: Dialects can typically only handle simple datatypes + like strings and numbers for auto-literal generation. Datatypes + like dates, intervals, and others may still require manual + formatting, typically using :meth:`.Operations.inline_literal`. + + .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy + versions prior to 0.8 where this feature is not supported. + + .. seealso:: + + :meth:`.Operations.inline_literal` + + :param starting_rev: Override the "starting revision" argument + when using ``--sql`` mode. + :param tag: a string tag for usage by custom ``env.py`` scripts. + Set via the ``--tag`` option, can be overridden here. + :param template_args: dictionary of template arguments which + will be added to the template argument environment when + running the "revision" command. Note that the script environment + is only run within the "revision" command if the --autogenerate + option is used, or if the option "revision_environment=true" + is present in the alembic.ini file. + + :param version_table: The name of the Alembic version table. + The default is ``'alembic_version'``. + :param version_table_schema: Optional schema to place version + table within. + :param version_table_pk: boolean, whether the Alembic version table + should use a primary key constraint for the "value" column; this + only takes effect when the table is first created. + Defaults to True; setting to False should not be necessary and is + here for backwards compatibility reasons. + :param on_version_apply: a callable or collection of callables to be + run for each migration step. + The callables will be run in the order they are given, once for + each migration step, after the respective operation has been + applied but before its transaction is finalized. + Each callable accepts no positional arguments and the following + keyword arguments: + + * ``ctx``: the :class:`.MigrationContext` running the migration, + * ``step``: a :class:`.MigrationInfo` representing the + step currently being applied, + * ``heads``: a collection of version strings representing the + current heads, + * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`. + + Parameters specific to the autogenerate feature, when + ``alembic revision`` is run with the ``--autogenerate`` feature: + + :param target_metadata: a :class:`sqlalchemy.schema.MetaData` + object, or a sequence of :class:`~sqlalchemy.schema.MetaData` + objects, that will be consulted during autogeneration. + The tables present in each :class:`~sqlalchemy.schema.MetaData` + will be compared against + what is locally available on the target + :class:`~sqlalchemy.engine.Connection` + to produce candidate upgrade/downgrade operations. + :param compare_type: Indicates type comparison behavior during + an autogenerate + operation. Defaults to ``True`` turning on type comparison, which + has good accuracy on most backends. See :ref:`compare_types` + for an example as well as information on other type + comparison options. Set to ``False`` which disables type + comparison. A callable can also be passed to provide custom type + comparison, see :ref:`compare_types` for additional details. + + .. versionchanged:: 1.12.0 The default value of + :paramref:`.EnvironmentContext.configure.compare_type` has been + changed to ``True``. + + .. seealso:: + + :ref:`compare_types` + + :paramref:`.EnvironmentContext.configure.compare_server_default` + + :param compare_server_default: Indicates server default comparison + behavior during + an autogenerate operation. Defaults to ``False`` which disables + server default + comparison. Set to ``True`` to turn on server default comparison, + which has + varied accuracy depending on backend. + + To customize server default comparison behavior, a callable may + be specified + which can filter server default comparisons during an + autogenerate operation. + defaults during an autogenerate operation. The format of this + callable is:: + + def my_compare_server_default(context, inspected_column, + metadata_column, inspected_default, metadata_default, + rendered_metadata_default): + # return True if the defaults are different, + # False if not, or None to allow the default implementation + # to compare these defaults + return None + + context.configure( + # ... + compare_server_default = my_compare_server_default + ) + + ``inspected_column`` is a dictionary structure as returned by + :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas + ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from + the local model environment. + + A return value of ``None`` indicates to allow default server default + comparison + to proceed. Note that some backends such as Postgresql actually + execute + the two defaults on the database side to compare for equivalence. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.compare_type` + + :param include_name: A callable function which is given + the chance to return ``True`` or ``False`` for any database reflected + object based on its name, including database schema names when + the :paramref:`.EnvironmentContext.configure.include_schemas` flag + is set to ``True``. + + The function accepts the following positional arguments: + + * ``name``: the name of the object, such as schema name or table name. + Will be ``None`` when indicating the default schema name of the + database connection. + * ``type``: a string describing the type of object; currently + ``"schema"``, ``"table"``, ``"column"``, ``"index"``, + ``"unique_constraint"``, or ``"foreign_key_constraint"`` + * ``parent_names``: a dictionary of "parent" object names, that are + relative to the name being given. Keys in this dictionary may + include: ``"schema_name"``, ``"table_name"`` or + ``"schema_qualified_table_name"``. + + E.g.:: + + def include_name(name, type_, parent_names): + if type_ == "schema": + return name in ["schema_one", "schema_two"] + else: + return True + + context.configure( + # ... + include_schemas = True, + include_name = include_name + ) + + .. seealso:: + + :ref:`autogenerate_include_hooks` + + :paramref:`.EnvironmentContext.configure.include_object` + + :paramref:`.EnvironmentContext.configure.include_schemas` + + + :param include_object: A callable function which is given + the chance to return ``True`` or ``False`` for any object, + indicating if the given object should be considered in the + autogenerate sweep. + + The function accepts the following positional arguments: + + * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such + as a :class:`~sqlalchemy.schema.Table`, + :class:`~sqlalchemy.schema.Column`, + :class:`~sqlalchemy.schema.Index` + :class:`~sqlalchemy.schema.UniqueConstraint`, + or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object + * ``name``: the name of the object. This is typically available + via ``object.name``. + * ``type``: a string describing the type of object; currently + ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``, + or ``"foreign_key_constraint"`` + * ``reflected``: ``True`` if the given object was produced based on + table reflection, ``False`` if it's from a local :class:`.MetaData` + object. + * ``compare_to``: the object being compared against, if available, + else ``None``. + + E.g.:: + + def include_object(object, name, type_, reflected, compare_to): + if (type_ == "column" and + not reflected and + object.info.get("skip_autogenerate", False)): + return False + else: + return True + + context.configure( + # ... + include_object = include_object + ) + + For the use case of omitting specific schemas from a target database + when :paramref:`.EnvironmentContext.configure.include_schemas` is + set to ``True``, the :attr:`~sqlalchemy.schema.Table.schema` + attribute can be checked for each :class:`~sqlalchemy.schema.Table` + object passed to the hook, however it is much more efficient + to filter on schemas before reflection of objects takes place + using the :paramref:`.EnvironmentContext.configure.include_name` + hook. + + .. seealso:: + + :ref:`autogenerate_include_hooks` + + :paramref:`.EnvironmentContext.configure.include_name` + + :paramref:`.EnvironmentContext.configure.include_schemas` + + :param render_as_batch: if True, commands which alter elements + within a table will be placed under a ``with batch_alter_table():`` + directive, so that batch migrations will take place. + + .. seealso:: + + :ref:`batch_migrations` + + :param include_schemas: If True, autogenerate will scan across + all schemas located by the SQLAlchemy + :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names` + method, and include all differences in tables found across all + those schemas. When using this option, you may want to also + use the :paramref:`.EnvironmentContext.configure.include_name` + parameter to specify a callable which + can filter the tables/schemas that get included. + + .. seealso:: + + :ref:`autogenerate_include_hooks` + + :paramref:`.EnvironmentContext.configure.include_name` + + :paramref:`.EnvironmentContext.configure.include_object` + + :param render_item: Callable that can be used to override how + any schema item, i.e. column, constraint, type, + etc., is rendered for autogenerate. The callable receives a + string describing the type of object, the object, and + the autogen context. If it returns False, the + default rendering method will be used. If it returns None, + the item will not be rendered in the context of a Table + construct, that is, can be used to skip columns or constraints + within op.create_table():: + + def my_render_column(type_, col, autogen_context): + if type_ == "column" and isinstance(col, MySpecialCol): + return repr(col) + else: + return False + + context.configure( + # ... + render_item = my_render_column + ) + + Available values for the type string include: ``"column"``, + ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``, + ``"type"``, ``"server_default"``. + + .. seealso:: + + :ref:`autogen_render_types` + + :param upgrade_token: When autogenerate completes, the text of the + candidate upgrade operations will be present in this template + variable when ``script.py.mako`` is rendered. Defaults to + ``upgrades``. + :param downgrade_token: When autogenerate completes, the text of the + candidate downgrade operations will be present in this + template variable when ``script.py.mako`` is rendered. Defaults to + ``downgrades``. + + :param alembic_module_prefix: When autogenerate refers to Alembic + :mod:`alembic.operations` constructs, this prefix will be used + (i.e. ``op.create_table``) Defaults to "``op.``". + Can be ``None`` to indicate no prefix. + + :param sqlalchemy_module_prefix: When autogenerate refers to + SQLAlchemy + :class:`~sqlalchemy.schema.Column` or type classes, this prefix + will be used + (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``". + Can be ``None`` to indicate no prefix. + Note that when dialect-specific types are rendered, autogenerate + will render them using the dialect module name, i.e. ``mssql.BIT()``, + ``postgresql.UUID()``. + + :param user_module_prefix: When autogenerate refers to a SQLAlchemy + type (e.g. :class:`.TypeEngine`) where the module name is not + under the ``sqlalchemy`` namespace, this prefix will be used + within autogenerate. If left at its default of + ``None``, the ``__module__`` attribute of the type is used to + render the import module. It's a good practice to set this + and to have all custom types be available from a fixed module space, + in order to future-proof migration files against reorganizations + in modules. + + .. seealso:: + + :ref:`autogen_module_prefix` + + :param process_revision_directives: a callable function that will + be passed a structure representing the end result of an autogenerate + or plain "revision" operation, which can be manipulated to affect + how the ``alembic revision`` command ultimately outputs new + revision scripts. The structure of the callable is:: + + def process_revision_directives(context, revision, directives): + pass + + The ``directives`` parameter is a Python list containing + a single :class:`.MigrationScript` directive, which represents + the revision file to be generated. This list as well as its + contents may be freely modified to produce any set of commands. + The section :ref:`customizing_revision` shows an example of + doing this. The ``context`` parameter is the + :class:`.MigrationContext` in use, + and ``revision`` is a tuple of revision identifiers representing the + current revision of the database. + + The callable is invoked at all times when the ``--autogenerate`` + option is passed to ``alembic revision``. If ``--autogenerate`` + is not passed, the callable is invoked only if the + ``revision_environment`` variable is set to True in the Alembic + configuration, in which case the given ``directives`` collection + will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps` + collections for ``.upgrade_ops`` and ``.downgrade_ops``. The + ``--autogenerate`` option itself can be inferred by inspecting + ``context.config.cmd_opts.autogenerate``. + + The callable function may optionally be an instance of + a :class:`.Rewriter` object. This is a helper object that + assists in the production of autogenerate-stream rewriter functions. + + .. seealso:: + + :ref:`customizing_revision` + + :ref:`autogen_rewriter` + + :paramref:`.command.revision.process_revision_directives` + + Parameters specific to individual backends: + + :param mssql_batch_separator: The "batch separator" which will + be placed between each statement when generating offline SQL Server + migrations. Defaults to ``GO``. Note this is in addition to the + customary semicolon ``;`` at the end of each statement; SQL Server + considers the "batch separator" to denote the end of an + individual statement execution, and cannot group certain + dependent operations in one step. + :param oracle_batch_separator: The "batch separator" which will + be placed between each statement when generating offline + Oracle migrations. Defaults to ``/``. Oracle doesn't add a + semicolon between statements like most other backends. + + """ + +def execute( + sql: Union[Executable, str], + execution_options: Optional[Dict[str, Any]] = None, +) -> None: + """Execute the given SQL using the current change context. + + The behavior of :meth:`.execute` is the same + as that of :meth:`.Operations.execute`. Please see that + function's documentation for full detail including + caveats and limitations. + + This function requires that a :class:`.MigrationContext` has + first been made available via :meth:`.configure`. + + """ + +def get_bind() -> Connection: + """Return the current 'bind'. + + In "online" mode, this is the + :class:`sqlalchemy.engine.Connection` currently being used + to emit SQL to the database. + + This function requires that a :class:`.MigrationContext` + has first been made available via :meth:`.configure`. + + """ + +def get_context() -> MigrationContext: + """Return the current :class:`.MigrationContext` object. + + If :meth:`.EnvironmentContext.configure` has not been + called yet, raises an exception. + + """ + +def get_head_revision() -> Union[str, Tuple[str, ...], None]: + """Return the hex identifier of the 'head' script revision. + + If the script directory has multiple heads, this + method raises a :class:`.CommandError`; + :meth:`.EnvironmentContext.get_head_revisions` should be preferred. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: :meth:`.EnvironmentContext.get_head_revisions` + + """ + +def get_head_revisions() -> Union[str, Tuple[str, ...], None]: + """Return the hex identifier of the 'heads' script revision(s). + + This returns a tuple containing the version number of all + heads in the script directory. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + +def get_revision_argument() -> Union[str, Tuple[str, ...], None]: + """Get the 'destination' revision argument. + + This is typically the argument passed to the + ``upgrade`` or ``downgrade`` command. + + If it was specified as ``head``, the actual + version number is returned; if specified + as ``base``, ``None`` is returned. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + +def get_starting_revision_argument() -> Union[str, Tuple[str, ...], None]: + """Return the 'starting revision' argument, + if the revision was passed using ``start:end``. + + This is only meaningful in "offline" mode. + Returns ``None`` if no value is available + or was configured. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + +def get_tag_argument() -> Optional[str]: + """Return the value passed for the ``--tag`` argument, if any. + + The ``--tag`` argument is not used directly by Alembic, + but is available for custom ``env.py`` configurations that + wish to use it; particularly for offline generation scripts + that wish to generate tagged filenames. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: + + :meth:`.EnvironmentContext.get_x_argument` - a newer and more + open ended system of extending ``env.py`` scripts via the command + line. + + """ + +@overload +def get_x_argument(as_dictionary: Literal[False]) -> List[str]: ... +@overload +def get_x_argument(as_dictionary: Literal[True]) -> Dict[str, str]: ... +@overload +def get_x_argument( + as_dictionary: bool = ..., +) -> Union[List[str], Dict[str, str]]: + """Return the value(s) passed for the ``-x`` argument, if any. + + The ``-x`` argument is an open ended flag that allows any user-defined + value or values to be passed on the command line, then available + here for consumption by a custom ``env.py`` script. + + The return value is a list, returned directly from the ``argparse`` + structure. If ``as_dictionary=True`` is passed, the ``x`` arguments + are parsed using ``key=value`` format into a dictionary that is + then returned. If there is no ``=`` in the argument, value is an empty + string. + + .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when + arguments are passed without the ``=`` symbol. + + For example, to support passing a database URL on the command line, + the standard ``env.py`` script can be modified like this:: + + cmd_line_url = context.get_x_argument( + as_dictionary=True).get('dbname') + if cmd_line_url: + engine = create_engine(cmd_line_url) + else: + engine = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + This then takes effect by running the ``alembic`` script as:: + + alembic -x dbname=postgresql://user:pass@host/dbname upgrade head + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: + + :meth:`.EnvironmentContext.get_tag_argument` + + :attr:`.Config.cmd_opts` + + """ + +def is_offline_mode() -> bool: + """Return True if the current migrations environment + is running in "offline mode". + + This is ``True`` or ``False`` depending + on the ``--sql`` flag passed. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + +def is_transactional_ddl() -> bool: + """Return True if the context is configured to expect a + transactional DDL capable backend. + + This defaults to the type of database in use, and + can be overridden by the ``transactional_ddl`` argument + to :meth:`.configure` + + This function requires that a :class:`.MigrationContext` + has first been made available via :meth:`.configure`. + + """ + +def run_migrations(**kw: Any) -> None: + """Run migrations as determined by the current command line + configuration + as well as versioning information present (or not) in the current + database connection (if one is present). + + The function accepts optional ``**kw`` arguments. If these are + passed, they are sent directly to the ``upgrade()`` and + ``downgrade()`` + functions within each target revision file. By modifying the + ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` + functions accept arguments, parameters can be passed here so that + contextual information, usually information to identify a particular + database in use, can be passed from a custom ``env.py`` script + to the migration functions. + + This function requires that a :class:`.MigrationContext` has + first been made available via :meth:`.configure`. + + """ + +script: ScriptDirectory + +def static_output(text: str) -> None: + """Emit text directly to the "offline" SQL stream. + + Typically this is for emitting comments that + start with --. The statement is not treated + as a SQL execution, no ; or batch separator + is added, etc. + + """ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__init__.py b/venv/lib/python3.12/site-packages/alembic/ddl/__init__.py new file mode 100644 index 0000000..f2f72b3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/__init__.py @@ -0,0 +1,6 @@ +from . import mssql +from . import mysql +from . import oracle +from . import postgresql +from . import sqlite +from .impl import DefaultImpl as DefaultImpl diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..bcce16f Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/_autogen.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/_autogen.cpython-312.pyc new file mode 100644 index 0000000..28dc6e4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/_autogen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..903501f Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc new file mode 100644 index 0000000..301f3a2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc new file mode 100644 index 0000000..538c66c Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc new file mode 100644 index 0000000..b8c5d71 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc new file mode 100644 index 0000000..157579b Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc new file mode 100644 index 0000000..5379a3d Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc new file mode 100644 index 0000000..c796c55 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/_autogen.py b/venv/lib/python3.12/site-packages/alembic/ddl/_autogen.py new file mode 100644 index 0000000..e22153c --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/_autogen.py @@ -0,0 +1,325 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +from typing import Any +from typing import ClassVar +from typing import Dict +from typing import Generic +from typing import NamedTuple +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy.sql.schema import Constraint +from sqlalchemy.sql.schema import ForeignKeyConstraint +from sqlalchemy.sql.schema import Index +from sqlalchemy.sql.schema import UniqueConstraint +from typing_extensions import TypeGuard + +from .. import util +from ..util import sqla_compat + +if TYPE_CHECKING: + from typing import Literal + + from alembic.autogenerate.api import AutogenContext + from alembic.ddl.impl import DefaultImpl + +CompareConstraintType = Union[Constraint, Index] + +_C = TypeVar("_C", bound=CompareConstraintType) + +_clsreg: Dict[str, Type[_constraint_sig]] = {} + + +class ComparisonResult(NamedTuple): + status: Literal["equal", "different", "skip"] + message: str + + @property + def is_equal(self) -> bool: + return self.status == "equal" + + @property + def is_different(self) -> bool: + return self.status == "different" + + @property + def is_skip(self) -> bool: + return self.status == "skip" + + @classmethod + def Equal(cls) -> ComparisonResult: + """the constraints are equal.""" + return cls("equal", "The two constraints are equal") + + @classmethod + def Different(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult: + """the constraints are different for the provided reason(s).""" + return cls("different", ", ".join(util.to_list(reason))) + + @classmethod + def Skip(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult: + """the constraint cannot be compared for the provided reason(s). + + The message is logged, but the constraints will be otherwise + considered equal, meaning that no migration command will be + generated. + """ + return cls("skip", ", ".join(util.to_list(reason))) + + +class _constraint_sig(Generic[_C]): + const: _C + + _sig: Tuple[Any, ...] + name: Optional[sqla_compat._ConstraintNameDefined] + + impl: DefaultImpl + + _is_index: ClassVar[bool] = False + _is_fk: ClassVar[bool] = False + _is_uq: ClassVar[bool] = False + + _is_metadata: bool + + def __init_subclass__(cls) -> None: + cls._register() + + @classmethod + def _register(cls): + raise NotImplementedError() + + def __init__( + self, is_metadata: bool, impl: DefaultImpl, const: _C + ) -> None: + raise NotImplementedError() + + def compare_to_reflected( + self, other: _constraint_sig[Any] + ) -> ComparisonResult: + assert self.impl is other.impl + assert self._is_metadata + assert not other._is_metadata + + return self._compare_to_reflected(other) + + def _compare_to_reflected( + self, other: _constraint_sig[_C] + ) -> ComparisonResult: + raise NotImplementedError() + + @classmethod + def from_constraint( + cls, is_metadata: bool, impl: DefaultImpl, constraint: _C + ) -> _constraint_sig[_C]: + # these could be cached by constraint/impl, however, if the + # constraint is modified in place, then the sig is wrong. the mysql + # impl currently does this, and if we fixed that we can't be sure + # someone else might do it too, so play it safe. + sig = _clsreg[constraint.__visit_name__](is_metadata, impl, constraint) + return sig + + def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]: + return sqla_compat._get_constraint_final_name( + self.const, context.dialect + ) + + @util.memoized_property + def is_named(self): + return sqla_compat._constraint_is_named(self.const, self.impl.dialect) + + @util.memoized_property + def unnamed(self) -> Tuple[Any, ...]: + return self._sig + + @util.memoized_property + def unnamed_no_options(self) -> Tuple[Any, ...]: + raise NotImplementedError() + + @util.memoized_property + def _full_sig(self) -> Tuple[Any, ...]: + return (self.name,) + self.unnamed + + def __eq__(self, other) -> bool: + return self._full_sig == other._full_sig + + def __ne__(self, other) -> bool: + return self._full_sig != other._full_sig + + def __hash__(self) -> int: + return hash(self._full_sig) + + +class _uq_constraint_sig(_constraint_sig[UniqueConstraint]): + _is_uq = True + + @classmethod + def _register(cls) -> None: + _clsreg["unique_constraint"] = cls + + is_unique = True + + def __init__( + self, + is_metadata: bool, + impl: DefaultImpl, + const: UniqueConstraint, + ) -> None: + self.impl = impl + self.const = const + self.name = sqla_compat.constraint_name_or_none(const.name) + self._sig = tuple(sorted([col.name for col in const.columns])) + self._is_metadata = is_metadata + + @property + def column_names(self) -> Tuple[str, ...]: + return tuple([col.name for col in self.const.columns]) + + def _compare_to_reflected( + self, other: _constraint_sig[_C] + ) -> ComparisonResult: + assert self._is_metadata + metadata_obj = self + conn_obj = other + + assert is_uq_sig(conn_obj) + return self.impl.compare_unique_constraint( + metadata_obj.const, conn_obj.const + ) + + +class _ix_constraint_sig(_constraint_sig[Index]): + _is_index = True + + name: sqla_compat._ConstraintName + + @classmethod + def _register(cls) -> None: + _clsreg["index"] = cls + + def __init__( + self, is_metadata: bool, impl: DefaultImpl, const: Index + ) -> None: + self.impl = impl + self.const = const + self.name = const.name + self.is_unique = bool(const.unique) + self._is_metadata = is_metadata + + def _compare_to_reflected( + self, other: _constraint_sig[_C] + ) -> ComparisonResult: + assert self._is_metadata + metadata_obj = self + conn_obj = other + + assert is_index_sig(conn_obj) + return self.impl.compare_indexes(metadata_obj.const, conn_obj.const) + + @util.memoized_property + def has_expressions(self): + return sqla_compat.is_expression_index(self.const) + + @util.memoized_property + def column_names(self) -> Tuple[str, ...]: + return tuple([col.name for col in self.const.columns]) + + @util.memoized_property + def column_names_optional(self) -> Tuple[Optional[str], ...]: + return tuple( + [getattr(col, "name", None) for col in self.const.expressions] + ) + + @util.memoized_property + def is_named(self): + return True + + @util.memoized_property + def unnamed(self): + return (self.is_unique,) + self.column_names_optional + + +class _fk_constraint_sig(_constraint_sig[ForeignKeyConstraint]): + _is_fk = True + + @classmethod + def _register(cls) -> None: + _clsreg["foreign_key_constraint"] = cls + + def __init__( + self, + is_metadata: bool, + impl: DefaultImpl, + const: ForeignKeyConstraint, + ) -> None: + self._is_metadata = is_metadata + + self.impl = impl + self.const = const + + self.name = sqla_compat.constraint_name_or_none(const.name) + + ( + self.source_schema, + self.source_table, + self.source_columns, + self.target_schema, + self.target_table, + self.target_columns, + onupdate, + ondelete, + deferrable, + initially, + ) = sqla_compat._fk_spec(const) + + self._sig: Tuple[Any, ...] = ( + self.source_schema, + self.source_table, + tuple(self.source_columns), + self.target_schema, + self.target_table, + tuple(self.target_columns), + ) + ( + (None if onupdate.lower() == "no action" else onupdate.lower()) + if onupdate + else None, + (None if ondelete.lower() == "no action" else ondelete.lower()) + if ondelete + else None, + # convert initially + deferrable into one three-state value + "initially_deferrable" + if initially and initially.lower() == "deferred" + else "deferrable" + if deferrable + else "not deferrable", + ) + + @util.memoized_property + def unnamed_no_options(self): + return ( + self.source_schema, + self.source_table, + tuple(self.source_columns), + self.target_schema, + self.target_table, + tuple(self.target_columns), + ) + + +def is_index_sig(sig: _constraint_sig) -> TypeGuard[_ix_constraint_sig]: + return sig._is_index + + +def is_uq_sig(sig: _constraint_sig) -> TypeGuard[_uq_constraint_sig]: + return sig._is_uq + + +def is_fk_sig(sig: _constraint_sig) -> TypeGuard[_fk_constraint_sig]: + return sig._is_fk diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/base.py b/venv/lib/python3.12/site-packages/alembic/ddl/base.py new file mode 100644 index 0000000..7a85a5c --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/base.py @@ -0,0 +1,335 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import functools +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import exc +from sqlalchemy import Integer +from sqlalchemy import types as sqltypes +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.schema import Column +from sqlalchemy.schema import DDLElement +from sqlalchemy.sql.elements import quoted_name + +from ..util.sqla_compat import _columns_for_constraint # noqa +from ..util.sqla_compat import _find_columns # noqa +from ..util.sqla_compat import _fk_spec # noqa +from ..util.sqla_compat import _is_type_bound # noqa +from ..util.sqla_compat import _table_for_constraint # noqa + +if TYPE_CHECKING: + from typing import Any + + from sqlalchemy.sql.compiler import Compiled + from sqlalchemy.sql.compiler import DDLCompiler + from sqlalchemy.sql.elements import TextClause + from sqlalchemy.sql.functions import Function + from sqlalchemy.sql.schema import FetchedValue + from sqlalchemy.sql.type_api import TypeEngine + + from .impl import DefaultImpl + from ..util.sqla_compat import Computed + from ..util.sqla_compat import Identity + +_ServerDefault = Union["TextClause", "FetchedValue", "Function[Any]", str] + + +class AlterTable(DDLElement): + + """Represent an ALTER TABLE statement. + + Only the string name and optional schema name of the table + is required, not a full Table object. + + """ + + def __init__( + self, + table_name: str, + schema: Optional[Union[quoted_name, str]] = None, + ) -> None: + self.table_name = table_name + self.schema = schema + + +class RenameTable(AlterTable): + def __init__( + self, + old_table_name: str, + new_table_name: Union[quoted_name, str], + schema: Optional[Union[quoted_name, str]] = None, + ) -> None: + super().__init__(old_table_name, schema=schema) + self.new_table_name = new_table_name + + +class AlterColumn(AlterTable): + def __init__( + self, + name: str, + column_name: str, + schema: Optional[str] = None, + existing_type: Optional[TypeEngine] = None, + existing_nullable: Optional[bool] = None, + existing_server_default: Optional[_ServerDefault] = None, + existing_comment: Optional[str] = None, + ) -> None: + super().__init__(name, schema=schema) + self.column_name = column_name + self.existing_type = ( + sqltypes.to_instance(existing_type) + if existing_type is not None + else None + ) + self.existing_nullable = existing_nullable + self.existing_server_default = existing_server_default + self.existing_comment = existing_comment + + +class ColumnNullable(AlterColumn): + def __init__( + self, name: str, column_name: str, nullable: bool, **kw + ) -> None: + super().__init__(name, column_name, **kw) + self.nullable = nullable + + +class ColumnType(AlterColumn): + def __init__( + self, name: str, column_name: str, type_: TypeEngine, **kw + ) -> None: + super().__init__(name, column_name, **kw) + self.type_ = sqltypes.to_instance(type_) + + +class ColumnName(AlterColumn): + def __init__( + self, name: str, column_name: str, newname: str, **kw + ) -> None: + super().__init__(name, column_name, **kw) + self.newname = newname + + +class ColumnDefault(AlterColumn): + def __init__( + self, + name: str, + column_name: str, + default: Optional[_ServerDefault], + **kw, + ) -> None: + super().__init__(name, column_name, **kw) + self.default = default + + +class ComputedColumnDefault(AlterColumn): + def __init__( + self, name: str, column_name: str, default: Optional[Computed], **kw + ) -> None: + super().__init__(name, column_name, **kw) + self.default = default + + +class IdentityColumnDefault(AlterColumn): + def __init__( + self, + name: str, + column_name: str, + default: Optional[Identity], + impl: DefaultImpl, + **kw, + ) -> None: + super().__init__(name, column_name, **kw) + self.default = default + self.impl = impl + + +class AddColumn(AlterTable): + def __init__( + self, + name: str, + column: Column[Any], + schema: Optional[Union[quoted_name, str]] = None, + ) -> None: + super().__init__(name, schema=schema) + self.column = column + + +class DropColumn(AlterTable): + def __init__( + self, name: str, column: Column[Any], schema: Optional[str] = None + ) -> None: + super().__init__(name, schema=schema) + self.column = column + + +class ColumnComment(AlterColumn): + def __init__( + self, name: str, column_name: str, comment: Optional[str], **kw + ) -> None: + super().__init__(name, column_name, **kw) + self.comment = comment + + +@compiles(RenameTable) # type: ignore[misc] +def visit_rename_table( + element: RenameTable, compiler: DDLCompiler, **kw +) -> str: + return "%s RENAME TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, element.schema), + ) + + +@compiles(AddColumn) # type: ignore[misc] +def visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) -> str: + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + add_column(compiler, element.column, **kw), + ) + + +@compiles(DropColumn) # type: ignore[misc] +def visit_drop_column(element: DropColumn, compiler: DDLCompiler, **kw) -> str: + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + drop_column(compiler, element.column.name, **kw), + ) + + +@compiles(ColumnNullable) # type: ignore[misc] +def visit_column_nullable( + element: ColumnNullable, compiler: DDLCompiler, **kw +) -> str: + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "DROP NOT NULL" if element.nullable else "SET NOT NULL", + ) + + +@compiles(ColumnType) # type: ignore[misc] +def visit_column_type(element: ColumnType, compiler: DDLCompiler, **kw) -> str: + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "TYPE %s" % format_type(compiler, element.type_), + ) + + +@compiles(ColumnName) # type: ignore[misc] +def visit_column_name(element: ColumnName, compiler: DDLCompiler, **kw) -> str: + return "%s RENAME %s TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + ) + + +@compiles(ColumnDefault) # type: ignore[misc] +def visit_column_default( + element: ColumnDefault, compiler: DDLCompiler, **kw +) -> str: + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "SET DEFAULT %s" % format_server_default(compiler, element.default) + if element.default is not None + else "DROP DEFAULT", + ) + + +@compiles(ComputedColumnDefault) # type: ignore[misc] +def visit_computed_column( + element: ComputedColumnDefault, compiler: DDLCompiler, **kw +): + raise exc.CompileError( + 'Adding or removing a "computed" construct, e.g. GENERATED ' + "ALWAYS AS, to or from an existing column is not supported." + ) + + +@compiles(IdentityColumnDefault) # type: ignore[misc] +def visit_identity_column( + element: IdentityColumnDefault, compiler: DDLCompiler, **kw +): + raise exc.CompileError( + 'Adding, removing or modifying an "identity" construct, ' + "e.g. GENERATED AS IDENTITY, to or from an existing " + "column is not supported in this dialect." + ) + + +def quote_dotted( + name: Union[quoted_name, str], quote: functools.partial +) -> Union[quoted_name, str]: + """quote the elements of a dotted name""" + + if isinstance(name, quoted_name): + return quote(name) + result = ".".join([quote(x) for x in name.split(".")]) + return result + + +def format_table_name( + compiler: Compiled, + name: Union[quoted_name, str], + schema: Optional[Union[quoted_name, str]], +) -> Union[quoted_name, str]: + quote = functools.partial(compiler.preparer.quote) + if schema: + return quote_dotted(schema, quote) + "." + quote(name) + else: + return quote(name) + + +def format_column_name( + compiler: DDLCompiler, name: Optional[Union[quoted_name, str]] +) -> Union[quoted_name, str]: + return compiler.preparer.quote(name) # type: ignore[arg-type] + + +def format_server_default( + compiler: DDLCompiler, + default: Optional[_ServerDefault], +) -> str: + return compiler.get_column_default_string( + Column("x", Integer, server_default=default) + ) + + +def format_type(compiler: DDLCompiler, type_: TypeEngine) -> str: + return compiler.dialect.type_compiler.process(type_) + + +def alter_table( + compiler: DDLCompiler, + name: str, + schema: Optional[str], +) -> str: + return "ALTER TABLE %s" % format_table_name(compiler, name, schema) + + +def drop_column(compiler: DDLCompiler, name: str, **kw) -> str: + return "DROP COLUMN %s" % format_column_name(compiler, name) + + +def alter_column(compiler: DDLCompiler, name: str) -> str: + return "ALTER COLUMN %s" % format_column_name(compiler, name) + + +def add_column(compiler: DDLCompiler, column: Column[Any], **kw) -> str: + text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) + + const = " ".join( + compiler.process(constraint) for constraint in column.constraints + ) + if const: + text += " " + const + + return text diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/impl.py b/venv/lib/python3.12/site-packages/alembic/ddl/impl.py new file mode 100644 index 0000000..2e4f1ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/impl.py @@ -0,0 +1,844 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import logging +import re +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import List +from typing import Mapping +from typing import NamedTuple +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import cast +from sqlalchemy import schema +from sqlalchemy import text + +from . import _autogen +from . import base +from ._autogen import _constraint_sig as _constraint_sig +from ._autogen import ComparisonResult as ComparisonResult +from .. import util +from ..util import sqla_compat + +if TYPE_CHECKING: + from typing import Literal + from typing import TextIO + + from sqlalchemy.engine import Connection + from sqlalchemy.engine import Dialect + from sqlalchemy.engine.cursor import CursorResult + from sqlalchemy.engine.reflection import Inspector + from sqlalchemy.sql import ClauseElement + from sqlalchemy.sql import Executable + from sqlalchemy.sql.elements import ColumnElement + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Constraint + from sqlalchemy.sql.schema import ForeignKeyConstraint + from sqlalchemy.sql.schema import Index + from sqlalchemy.sql.schema import Table + from sqlalchemy.sql.schema import UniqueConstraint + from sqlalchemy.sql.selectable import TableClause + from sqlalchemy.sql.type_api import TypeEngine + + from .base import _ServerDefault + from ..autogenerate.api import AutogenContext + from ..operations.batch import ApplyBatchImpl + from ..operations.batch import BatchOperationsImpl + +log = logging.getLogger(__name__) + + +class ImplMeta(type): + def __init__( + cls, + classname: str, + bases: Tuple[Type[DefaultImpl]], + dict_: Dict[str, Any], + ): + newtype = type.__init__(cls, classname, bases, dict_) + if "__dialect__" in dict_: + _impls[dict_["__dialect__"]] = cls # type: ignore[assignment] + return newtype + + +_impls: Dict[str, Type[DefaultImpl]] = {} + + +class DefaultImpl(metaclass=ImplMeta): + + """Provide the entrypoint for major migration operations, + including database-specific behavioral variances. + + While individual SQL/DDL constructs already provide + for database-specific implementations, variances here + allow for entirely different sequences of operations + to take place for a particular migration, such as + SQL Server's special 'IDENTITY INSERT' step for + bulk inserts. + + """ + + __dialect__ = "default" + + transactional_ddl = False + command_terminator = ";" + type_synonyms: Tuple[Set[str], ...] = ({"NUMERIC", "DECIMAL"},) + type_arg_extract: Sequence[str] = () + # These attributes are deprecated in SQLAlchemy via #10247. They need to + # be ignored to support older version that did not use dialect kwargs. + # They only apply to Oracle and are replaced by oracle_order, + # oracle_on_null + identity_attrs_ignore: Tuple[str, ...] = ("order", "on_null") + + def __init__( + self, + dialect: Dialect, + connection: Optional[Connection], + as_sql: bool, + transactional_ddl: Optional[bool], + output_buffer: Optional[TextIO], + context_opts: Dict[str, Any], + ) -> None: + self.dialect = dialect + self.connection = connection + self.as_sql = as_sql + self.literal_binds = context_opts.get("literal_binds", False) + + self.output_buffer = output_buffer + self.memo: dict = {} + self.context_opts = context_opts + if transactional_ddl is not None: + self.transactional_ddl = transactional_ddl + + if self.literal_binds: + if not self.as_sql: + raise util.CommandError( + "Can't use literal_binds setting without as_sql mode" + ) + + @classmethod + def get_by_dialect(cls, dialect: Dialect) -> Type[DefaultImpl]: + return _impls[dialect.name] + + def static_output(self, text: str) -> None: + assert self.output_buffer is not None + self.output_buffer.write(text + "\n\n") + self.output_buffer.flush() + + def requires_recreate_in_batch( + self, batch_op: BatchOperationsImpl + ) -> bool: + """Return True if the given :class:`.BatchOperationsImpl` + would need the table to be recreated and copied in order to + proceed. + + Normally, only returns True on SQLite when operations other + than add_column are present. + + """ + return False + + def prep_table_for_batch( + self, batch_impl: ApplyBatchImpl, table: Table + ) -> None: + """perform any operations needed on a table before a new + one is created to replace it in batch mode. + + the PG dialect uses this to drop constraints on the table + before the new one uses those same names. + + """ + + @property + def bind(self) -> Optional[Connection]: + return self.connection + + def _exec( + self, + construct: Union[Executable, str], + execution_options: Optional[dict[str, Any]] = None, + multiparams: Sequence[dict] = (), + params: Dict[str, Any] = util.immutabledict(), + ) -> Optional[CursorResult]: + if isinstance(construct, str): + construct = text(construct) + if self.as_sql: + if multiparams or params: + # TODO: coverage + raise Exception("Execution arguments not allowed with as_sql") + + compile_kw: dict[str, Any] + if self.literal_binds and not isinstance( + construct, schema.DDLElement + ): + compile_kw = dict(compile_kwargs={"literal_binds": True}) + else: + compile_kw = {} + + if TYPE_CHECKING: + assert isinstance(construct, ClauseElement) + compiled = construct.compile(dialect=self.dialect, **compile_kw) + self.static_output( + str(compiled).replace("\t", " ").strip() + + self.command_terminator + ) + return None + else: + conn = self.connection + assert conn is not None + if execution_options: + conn = conn.execution_options(**execution_options) + if params: + assert isinstance(multiparams, tuple) + multiparams += (params,) + + return conn.execute(construct, multiparams) + + def execute( + self, + sql: Union[Executable, str], + execution_options: Optional[dict[str, Any]] = None, + ) -> None: + self._exec(sql, execution_options) + + def alter_column( + self, + table_name: str, + column_name: str, + nullable: Optional[bool] = None, + server_default: Union[_ServerDefault, Literal[False]] = False, + name: Optional[str] = None, + type_: Optional[TypeEngine] = None, + schema: Optional[str] = None, + autoincrement: Optional[bool] = None, + comment: Optional[Union[str, Literal[False]]] = False, + existing_comment: Optional[str] = None, + existing_type: Optional[TypeEngine] = None, + existing_server_default: Optional[_ServerDefault] = None, + existing_nullable: Optional[bool] = None, + existing_autoincrement: Optional[bool] = None, + **kw: Any, + ) -> None: + if autoincrement is not None or existing_autoincrement is not None: + util.warn( + "autoincrement and existing_autoincrement " + "only make sense for MySQL", + stacklevel=3, + ) + if nullable is not None: + self._exec( + base.ColumnNullable( + table_name, + column_name, + nullable, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + ) + ) + if server_default is not False: + kw = {} + cls_: Type[ + Union[ + base.ComputedColumnDefault, + base.IdentityColumnDefault, + base.ColumnDefault, + ] + ] + if sqla_compat._server_default_is_computed( + server_default, existing_server_default + ): + cls_ = base.ComputedColumnDefault + elif sqla_compat._server_default_is_identity( + server_default, existing_server_default + ): + cls_ = base.IdentityColumnDefault + kw["impl"] = self + else: + cls_ = base.ColumnDefault + self._exec( + cls_( + table_name, + column_name, + server_default, # type:ignore[arg-type] + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + **kw, + ) + ) + if type_ is not None: + self._exec( + base.ColumnType( + table_name, + column_name, + type_, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + ) + ) + + if comment is not False: + self._exec( + base.ColumnComment( + table_name, + column_name, + comment, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + ) + ) + + # do the new name last ;) + if name is not None: + self._exec( + base.ColumnName( + table_name, + column_name, + name, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + ) + ) + + def add_column( + self, + table_name: str, + column: Column[Any], + schema: Optional[Union[str, quoted_name]] = None, + ) -> None: + self._exec(base.AddColumn(table_name, column, schema=schema)) + + def drop_column( + self, + table_name: str, + column: Column[Any], + schema: Optional[str] = None, + **kw, + ) -> None: + self._exec(base.DropColumn(table_name, column, schema=schema)) + + def add_constraint(self, const: Any) -> None: + if const._create_rule is None or const._create_rule(self): + self._exec(schema.AddConstraint(const)) + + def drop_constraint(self, const: Constraint) -> None: + self._exec(schema.DropConstraint(const)) + + def rename_table( + self, + old_table_name: str, + new_table_name: Union[str, quoted_name], + schema: Optional[Union[str, quoted_name]] = None, + ) -> None: + self._exec( + base.RenameTable(old_table_name, new_table_name, schema=schema) + ) + + def create_table(self, table: Table) -> None: + table.dispatch.before_create( + table, self.connection, checkfirst=False, _ddl_runner=self + ) + self._exec(schema.CreateTable(table)) + table.dispatch.after_create( + table, self.connection, checkfirst=False, _ddl_runner=self + ) + for index in table.indexes: + self._exec(schema.CreateIndex(index)) + + with_comment = ( + self.dialect.supports_comments and not self.dialect.inline_comments + ) + comment = table.comment + if comment and with_comment: + self.create_table_comment(table) + + for column in table.columns: + comment = column.comment + if comment and with_comment: + self.create_column_comment(column) + + def drop_table(self, table: Table) -> None: + table.dispatch.before_drop( + table, self.connection, checkfirst=False, _ddl_runner=self + ) + self._exec(schema.DropTable(table)) + table.dispatch.after_drop( + table, self.connection, checkfirst=False, _ddl_runner=self + ) + + def create_index(self, index: Index, **kw: Any) -> None: + self._exec(schema.CreateIndex(index, **kw)) + + def create_table_comment(self, table: Table) -> None: + self._exec(schema.SetTableComment(table)) + + def drop_table_comment(self, table: Table) -> None: + self._exec(schema.DropTableComment(table)) + + def create_column_comment(self, column: ColumnElement[Any]) -> None: + self._exec(schema.SetColumnComment(column)) + + def drop_index(self, index: Index, **kw: Any) -> None: + self._exec(schema.DropIndex(index, **kw)) + + def bulk_insert( + self, + table: Union[TableClause, Table], + rows: List[dict], + multiinsert: bool = True, + ) -> None: + if not isinstance(rows, list): + raise TypeError("List expected") + elif rows and not isinstance(rows[0], dict): + raise TypeError("List of dictionaries expected") + if self.as_sql: + for row in rows: + self._exec( + sqla_compat._insert_inline(table).values( + **{ + k: sqla_compat._literal_bindparam( + k, v, type_=table.c[k].type + ) + if not isinstance( + v, sqla_compat._literal_bindparam + ) + else v + for k, v in row.items() + } + ) + ) + else: + if rows: + if multiinsert: + self._exec( + sqla_compat._insert_inline(table), multiparams=rows + ) + else: + for row in rows: + self._exec( + sqla_compat._insert_inline(table).values(**row) + ) + + def _tokenize_column_type(self, column: Column) -> Params: + definition: str + definition = self.dialect.type_compiler.process(column.type).lower() + + # tokenize the SQLAlchemy-generated version of a type, so that + # the two can be compared. + # + # examples: + # NUMERIC(10, 5) + # TIMESTAMP WITH TIMEZONE + # INTEGER UNSIGNED + # INTEGER (10) UNSIGNED + # INTEGER(10) UNSIGNED + # varchar character set utf8 + # + + tokens: List[str] = re.findall(r"[\w\-_]+|\(.+?\)", definition) + + term_tokens: List[str] = [] + paren_term = None + + for token in tokens: + if re.match(r"^\(.*\)$", token): + paren_term = token + else: + term_tokens.append(token) + + params = Params(term_tokens[0], term_tokens[1:], [], {}) + + if paren_term: + term: str + for term in re.findall("[^(),]+", paren_term): + if "=" in term: + key, val = term.split("=") + params.kwargs[key.strip()] = val.strip() + else: + params.args.append(term.strip()) + + return params + + def _column_types_match( + self, inspector_params: Params, metadata_params: Params + ) -> bool: + if inspector_params.token0 == metadata_params.token0: + return True + + synonyms = [{t.lower() for t in batch} for batch in self.type_synonyms] + inspector_all_terms = " ".join( + [inspector_params.token0] + inspector_params.tokens + ) + metadata_all_terms = " ".join( + [metadata_params.token0] + metadata_params.tokens + ) + + for batch in synonyms: + if {inspector_all_terms, metadata_all_terms}.issubset(batch) or { + inspector_params.token0, + metadata_params.token0, + }.issubset(batch): + return True + return False + + def _column_args_match( + self, inspected_params: Params, meta_params: Params + ) -> bool: + """We want to compare column parameters. However, we only want + to compare parameters that are set. If they both have `collation`, + we want to make sure they are the same. However, if only one + specifies it, dont flag it for being less specific + """ + + if ( + len(meta_params.tokens) == len(inspected_params.tokens) + and meta_params.tokens != inspected_params.tokens + ): + return False + + if ( + len(meta_params.args) == len(inspected_params.args) + and meta_params.args != inspected_params.args + ): + return False + + insp = " ".join(inspected_params.tokens).lower() + meta = " ".join(meta_params.tokens).lower() + + for reg in self.type_arg_extract: + mi = re.search(reg, insp) + mm = re.search(reg, meta) + + if mi and mm and mi.group(1) != mm.group(1): + return False + + return True + + def compare_type( + self, inspector_column: Column[Any], metadata_column: Column + ) -> bool: + """Returns True if there ARE differences between the types of the two + columns. Takes impl.type_synonyms into account between retrospected + and metadata types + """ + inspector_params = self._tokenize_column_type(inspector_column) + metadata_params = self._tokenize_column_type(metadata_column) + + if not self._column_types_match(inspector_params, metadata_params): + return True + if not self._column_args_match(inspector_params, metadata_params): + return True + return False + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + return rendered_inspector_default != rendered_metadata_default + + def correct_for_autogen_constraints( + self, + conn_uniques: Set[UniqueConstraint], + conn_indexes: Set[Index], + metadata_unique_constraints: Set[UniqueConstraint], + metadata_indexes: Set[Index], + ) -> None: + pass + + def cast_for_batch_migrate(self, existing, existing_transfer, new_type): + if existing.type._type_affinity is not new_type._type_affinity: + existing_transfer["expr"] = cast( + existing_transfer["expr"], new_type + ) + + def render_ddl_sql_expr( + self, expr: ClauseElement, is_server_default: bool = False, **kw: Any + ) -> str: + """Render a SQL expression that is typically a server default, + index expression, etc. + + """ + + compile_kw = {"literal_binds": True, "include_table": False} + + return str( + expr.compile(dialect=self.dialect, compile_kwargs=compile_kw) + ) + + def _compat_autogen_column_reflect(self, inspector: Inspector) -> Callable: + return self.autogen_column_reflect + + def correct_for_autogen_foreignkeys( + self, + conn_fks: Set[ForeignKeyConstraint], + metadata_fks: Set[ForeignKeyConstraint], + ) -> None: + pass + + def autogen_column_reflect(self, inspector, table, column_info): + """A hook that is attached to the 'column_reflect' event for when + a Table is reflected from the database during the autogenerate + process. + + Dialects can elect to modify the information gathered here. + + """ + + def start_migrations(self) -> None: + """A hook called when :meth:`.EnvironmentContext.run_migrations` + is called. + + Implementations can set up per-migration-run state here. + + """ + + def emit_begin(self) -> None: + """Emit the string ``BEGIN``, or the backend-specific + equivalent, on the current connection context. + + This is used in offline mode and typically + via :meth:`.EnvironmentContext.begin_transaction`. + + """ + self.static_output("BEGIN" + self.command_terminator) + + def emit_commit(self) -> None: + """Emit the string ``COMMIT``, or the backend-specific + equivalent, on the current connection context. + + This is used in offline mode and typically + via :meth:`.EnvironmentContext.begin_transaction`. + + """ + self.static_output("COMMIT" + self.command_terminator) + + def render_type( + self, type_obj: TypeEngine, autogen_context: AutogenContext + ) -> Union[str, Literal[False]]: + return False + + def _compare_identity_default(self, metadata_identity, inspector_identity): + # ignored contains the attributes that were not considered + # because assumed to their default values in the db. + diff, ignored = _compare_identity_options( + metadata_identity, + inspector_identity, + sqla_compat.Identity(), + skip={"always"}, + ) + + meta_always = getattr(metadata_identity, "always", None) + inspector_always = getattr(inspector_identity, "always", None) + # None and False are the same in this comparison + if bool(meta_always) != bool(inspector_always): + diff.add("always") + + diff.difference_update(self.identity_attrs_ignore) + + # returns 3 values: + return ( + # different identity attributes + diff, + # ignored identity attributes + ignored, + # if the two identity should be considered different + bool(diff) or bool(metadata_identity) != bool(inspector_identity), + ) + + def _compare_index_unique( + self, metadata_index: Index, reflected_index: Index + ) -> Optional[str]: + conn_unique = bool(reflected_index.unique) + meta_unique = bool(metadata_index.unique) + if conn_unique != meta_unique: + return f"unique={conn_unique} to unique={meta_unique}" + else: + return None + + def _create_metadata_constraint_sig( + self, constraint: _autogen._C, **opts: Any + ) -> _constraint_sig[_autogen._C]: + return _constraint_sig.from_constraint(True, self, constraint, **opts) + + def _create_reflected_constraint_sig( + self, constraint: _autogen._C, **opts: Any + ) -> _constraint_sig[_autogen._C]: + return _constraint_sig.from_constraint(False, self, constraint, **opts) + + def compare_indexes( + self, + metadata_index: Index, + reflected_index: Index, + ) -> ComparisonResult: + """Compare two indexes by comparing the signature generated by + ``create_index_sig``. + + This method returns a ``ComparisonResult``. + """ + msg: List[str] = [] + unique_msg = self._compare_index_unique( + metadata_index, reflected_index + ) + if unique_msg: + msg.append(unique_msg) + m_sig = self._create_metadata_constraint_sig(metadata_index) + r_sig = self._create_reflected_constraint_sig(reflected_index) + + assert _autogen.is_index_sig(m_sig) + assert _autogen.is_index_sig(r_sig) + + # The assumption is that the index have no expression + for sig in m_sig, r_sig: + if sig.has_expressions: + log.warning( + "Generating approximate signature for index %s. " + "The dialect " + "implementation should either skip expression indexes " + "or provide a custom implementation.", + sig.const, + ) + + if m_sig.column_names != r_sig.column_names: + msg.append( + f"expression {r_sig.column_names} to {m_sig.column_names}" + ) + + if msg: + return ComparisonResult.Different(msg) + else: + return ComparisonResult.Equal() + + def compare_unique_constraint( + self, + metadata_constraint: UniqueConstraint, + reflected_constraint: UniqueConstraint, + ) -> ComparisonResult: + """Compare two unique constraints by comparing the two signatures. + + The arguments are two tuples that contain the unique constraint and + the signatures generated by ``create_unique_constraint_sig``. + + This method returns a ``ComparisonResult``. + """ + metadata_tup = self._create_metadata_constraint_sig( + metadata_constraint + ) + reflected_tup = self._create_reflected_constraint_sig( + reflected_constraint + ) + + meta_sig = metadata_tup.unnamed + conn_sig = reflected_tup.unnamed + if conn_sig != meta_sig: + return ComparisonResult.Different( + f"expression {conn_sig} to {meta_sig}" + ) + else: + return ComparisonResult.Equal() + + def _skip_functional_indexes(self, metadata_indexes, conn_indexes): + conn_indexes_by_name = {c.name: c for c in conn_indexes} + + for idx in list(metadata_indexes): + if idx.name in conn_indexes_by_name: + continue + iex = sqla_compat.is_expression_index(idx) + if iex: + util.warn( + "autogenerate skipping metadata-specified " + "expression-based index " + f"{idx.name!r}; dialect {self.__dialect__!r} under " + f"SQLAlchemy {sqla_compat.sqlalchemy_version} can't " + "reflect these indexes so they can't be compared" + ) + metadata_indexes.discard(idx) + + def adjust_reflected_dialect_options( + self, reflected_object: Dict[str, Any], kind: str + ) -> Dict[str, Any]: + return reflected_object.get("dialect_options", {}) + + +class Params(NamedTuple): + token0: str + tokens: List[str] + args: List[str] + kwargs: Dict[str, str] + + +def _compare_identity_options( + metadata_io: Union[schema.Identity, schema.Sequence, None], + inspector_io: Union[schema.Identity, schema.Sequence, None], + default_io: Union[schema.Identity, schema.Sequence], + skip: Set[str], +): + # this can be used for identity or sequence compare. + # default_io is an instance of IdentityOption with all attributes to the + # default value. + meta_d = sqla_compat._get_identity_options_dict(metadata_io) + insp_d = sqla_compat._get_identity_options_dict(inspector_io) + + diff = set() + ignored_attr = set() + + def check_dicts( + meta_dict: Mapping[str, Any], + insp_dict: Mapping[str, Any], + default_dict: Mapping[str, Any], + attrs: Iterable[str], + ): + for attr in set(attrs).difference(skip): + meta_value = meta_dict.get(attr) + insp_value = insp_dict.get(attr) + if insp_value != meta_value: + default_value = default_dict.get(attr) + if meta_value == default_value: + ignored_attr.add(attr) + else: + diff.add(attr) + + check_dicts( + meta_d, + insp_d, + sqla_compat._get_identity_options_dict(default_io), + set(meta_d).union(insp_d), + ) + if sqla_compat.identity_has_dialect_kwargs: + # use only the dialect kwargs in inspector_io since metadata_io + # can have options for many backends + check_dicts( + getattr(metadata_io, "dialect_kwargs", {}), + getattr(inspector_io, "dialect_kwargs", {}), + default_io.dialect_kwargs, # type: ignore[union-attr] + getattr(inspector_io, "dialect_kwargs", {}), + ) + + return diff, ignored_attr diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/mssql.py b/venv/lib/python3.12/site-packages/alembic/ddl/mssql.py new file mode 100644 index 0000000..baa43d5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/mssql.py @@ -0,0 +1,419 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import re +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import types as sqltypes +from sqlalchemy.schema import Column +from sqlalchemy.schema import CreateIndex +from sqlalchemy.sql.base import Executable +from sqlalchemy.sql.elements import ClauseElement + +from .base import AddColumn +from .base import alter_column +from .base import alter_table +from .base import ColumnDefault +from .base import ColumnName +from .base import ColumnNullable +from .base import ColumnType +from .base import format_column_name +from .base import format_server_default +from .base import format_table_name +from .base import format_type +from .base import RenameTable +from .impl import DefaultImpl +from .. import util +from ..util import sqla_compat +from ..util.sqla_compat import compiles + +if TYPE_CHECKING: + from typing import Literal + + from sqlalchemy.dialects.mssql.base import MSDDLCompiler + from sqlalchemy.dialects.mssql.base import MSSQLCompiler + from sqlalchemy.engine.cursor import CursorResult + from sqlalchemy.sql.schema import Index + from sqlalchemy.sql.schema import Table + from sqlalchemy.sql.selectable import TableClause + from sqlalchemy.sql.type_api import TypeEngine + + from .base import _ServerDefault + + +class MSSQLImpl(DefaultImpl): + __dialect__ = "mssql" + transactional_ddl = True + batch_separator = "GO" + + type_synonyms = DefaultImpl.type_synonyms + ({"VARCHAR", "NVARCHAR"},) + identity_attrs_ignore = DefaultImpl.identity_attrs_ignore + ( + "minvalue", + "maxvalue", + "nominvalue", + "nomaxvalue", + "cycle", + "cache", + ) + + def __init__(self, *arg, **kw) -> None: + super().__init__(*arg, **kw) + self.batch_separator = self.context_opts.get( + "mssql_batch_separator", self.batch_separator + ) + + def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]: + result = super()._exec(construct, *args, **kw) + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + return result + + def emit_begin(self) -> None: + self.static_output("BEGIN TRANSACTION" + self.command_terminator) + + def emit_commit(self) -> None: + super().emit_commit() + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + + def alter_column( # type:ignore[override] + self, + table_name: str, + column_name: str, + nullable: Optional[bool] = None, + server_default: Optional[ + Union[_ServerDefault, Literal[False]] + ] = False, + name: Optional[str] = None, + type_: Optional[TypeEngine] = None, + schema: Optional[str] = None, + existing_type: Optional[TypeEngine] = None, + existing_server_default: Optional[_ServerDefault] = None, + existing_nullable: Optional[bool] = None, + **kw: Any, + ) -> None: + if nullable is not None: + if type_ is not None: + # the NULL/NOT NULL alter will handle + # the type alteration + existing_type = type_ + type_ = None + elif existing_type is None: + raise util.CommandError( + "MS-SQL ALTER COLUMN operations " + "with NULL or NOT NULL require the " + "existing_type or a new type_ be passed." + ) + elif existing_nullable is not None and type_ is not None: + nullable = existing_nullable + + # the NULL/NOT NULL alter will handle + # the type alteration + existing_type = type_ + type_ = None + + elif type_ is not None: + util.warn( + "MS-SQL ALTER COLUMN operations that specify type_= " + "should also specify a nullable= or " + "existing_nullable= argument to avoid implicit conversion " + "of NOT NULL columns to NULL." + ) + + used_default = False + if sqla_compat._server_default_is_identity( + server_default, existing_server_default + ) or sqla_compat._server_default_is_computed( + server_default, existing_server_default + ): + used_default = True + kw["server_default"] = server_default + kw["existing_server_default"] = existing_server_default + + super().alter_column( + table_name, + column_name, + nullable=nullable, + type_=type_, + schema=schema, + existing_type=existing_type, + existing_nullable=existing_nullable, + **kw, + ) + + if server_default is not False and used_default is False: + if existing_server_default is not False or server_default is None: + self._exec( + _ExecDropConstraint( + table_name, + column_name, + "sys.default_constraints", + schema, + ) + ) + if server_default is not None: + super().alter_column( + table_name, + column_name, + schema=schema, + server_default=server_default, + ) + + if name is not None: + super().alter_column( + table_name, column_name, schema=schema, name=name + ) + + def create_index(self, index: Index, **kw: Any) -> None: + # this likely defaults to None if not present, so get() + # should normally not return the default value. being + # defensive in any case + mssql_include = index.kwargs.get("mssql_include", None) or () + assert index.table is not None + for col in mssql_include: + if col not in index.table.c: + index.table.append_column(Column(col, sqltypes.NullType)) + self._exec(CreateIndex(index, **kw)) + + def bulk_insert( # type:ignore[override] + self, table: Union[TableClause, Table], rows: List[dict], **kw: Any + ) -> None: + if self.as_sql: + self._exec( + "SET IDENTITY_INSERT %s ON" + % self.dialect.identifier_preparer.format_table(table) + ) + super().bulk_insert(table, rows, **kw) + self._exec( + "SET IDENTITY_INSERT %s OFF" + % self.dialect.identifier_preparer.format_table(table) + ) + else: + super().bulk_insert(table, rows, **kw) + + def drop_column( + self, + table_name: str, + column: Column[Any], + schema: Optional[str] = None, + **kw, + ) -> None: + drop_default = kw.pop("mssql_drop_default", False) + if drop_default: + self._exec( + _ExecDropConstraint( + table_name, column, "sys.default_constraints", schema + ) + ) + drop_check = kw.pop("mssql_drop_check", False) + if drop_check: + self._exec( + _ExecDropConstraint( + table_name, column, "sys.check_constraints", schema + ) + ) + drop_fks = kw.pop("mssql_drop_foreign_key", False) + if drop_fks: + self._exec(_ExecDropFKConstraint(table_name, column, schema)) + super().drop_column(table_name, column, schema=schema, **kw) + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + if rendered_metadata_default is not None: + rendered_metadata_default = re.sub( + r"[\(\) \"\']", "", rendered_metadata_default + ) + + if rendered_inspector_default is not None: + # SQL Server collapses whitespace and adds arbitrary parenthesis + # within expressions. our only option is collapse all of it + + rendered_inspector_default = re.sub( + r"[\(\) \"\']", "", rendered_inspector_default + ) + + return rendered_inspector_default != rendered_metadata_default + + def _compare_identity_default(self, metadata_identity, inspector_identity): + diff, ignored, is_alter = super()._compare_identity_default( + metadata_identity, inspector_identity + ) + + if ( + metadata_identity is None + and inspector_identity is not None + and not diff + and inspector_identity.column is not None + and inspector_identity.column.primary_key + ): + # mssql reflect primary keys with autoincrement as identity + # columns. if no different attributes are present ignore them + is_alter = False + + return diff, ignored, is_alter + + def adjust_reflected_dialect_options( + self, reflected_object: Dict[str, Any], kind: str + ) -> Dict[str, Any]: + options: Dict[str, Any] + options = reflected_object.get("dialect_options", {}).copy() + if not options.get("mssql_include"): + options.pop("mssql_include", None) + if not options.get("mssql_clustered"): + options.pop("mssql_clustered", None) + return options + + +class _ExecDropConstraint(Executable, ClauseElement): + inherit_cache = False + + def __init__( + self, + tname: str, + colname: Union[Column[Any], str], + type_: str, + schema: Optional[str], + ) -> None: + self.tname = tname + self.colname = colname + self.type_ = type_ + self.schema = schema + + +class _ExecDropFKConstraint(Executable, ClauseElement): + inherit_cache = False + + def __init__( + self, tname: str, colname: Column[Any], schema: Optional[str] + ) -> None: + self.tname = tname + self.colname = colname + self.schema = schema + + +@compiles(_ExecDropConstraint, "mssql") +def _exec_drop_col_constraint( + element: _ExecDropConstraint, compiler: MSSQLCompiler, **kw +) -> str: + schema, tname, colname, type_ = ( + element.schema, + element.tname, + element.colname, + element.type_, + ) + # from http://www.mssqltips.com/sqlservertip/1425/\ + # working-with-default-constraints-in-sql-server/ + return """declare @const_name varchar(256) +select @const_name = QUOTENAME([name]) from %(type)s +where parent_object_id = object_id('%(schema_dot)s%(tname)s') +and col_name(parent_object_id, parent_column_id) = '%(colname)s' +exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { + "type": type_, + "tname": tname, + "colname": colname, + "tname_quoted": format_table_name(compiler, tname, schema), + "schema_dot": schema + "." if schema else "", + } + + +@compiles(_ExecDropFKConstraint, "mssql") +def _exec_drop_col_fk_constraint( + element: _ExecDropFKConstraint, compiler: MSSQLCompiler, **kw +) -> str: + schema, tname, colname = element.schema, element.tname, element.colname + + return """declare @const_name varchar(256) +select @const_name = QUOTENAME([name]) from +sys.foreign_keys fk join sys.foreign_key_columns fkc +on fk.object_id=fkc.constraint_object_id +where fkc.parent_object_id = object_id('%(schema_dot)s%(tname)s') +and col_name(fkc.parent_object_id, fkc.parent_column_id) = '%(colname)s' +exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { + "tname": tname, + "colname": colname, + "tname_quoted": format_table_name(compiler, tname, schema), + "schema_dot": schema + "." if schema else "", + } + + +@compiles(AddColumn, "mssql") +def visit_add_column(element: AddColumn, compiler: MSDDLCompiler, **kw) -> str: + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + mssql_add_column(compiler, element.column, **kw), + ) + + +def mssql_add_column( + compiler: MSDDLCompiler, column: Column[Any], **kw +) -> str: + return "ADD %s" % compiler.get_column_specification(column, **kw) + + +@compiles(ColumnNullable, "mssql") +def visit_column_nullable( + element: ColumnNullable, compiler: MSDDLCompiler, **kw +) -> str: + return "%s %s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + format_type(compiler, element.existing_type), # type: ignore[arg-type] + "NULL" if element.nullable else "NOT NULL", + ) + + +@compiles(ColumnDefault, "mssql") +def visit_column_default( + element: ColumnDefault, compiler: MSDDLCompiler, **kw +) -> str: + # TODO: there can also be a named constraint + # with ADD CONSTRAINT here + return "%s ADD DEFAULT %s FOR %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_server_default(compiler, element.default), + format_column_name(compiler, element.column_name), + ) + + +@compiles(ColumnName, "mssql") +def visit_rename_column( + element: ColumnName, compiler: MSDDLCompiler, **kw +) -> str: + return "EXEC sp_rename '%s.%s', %s, 'COLUMN'" % ( + format_table_name(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + ) + + +@compiles(ColumnType, "mssql") +def visit_column_type( + element: ColumnType, compiler: MSDDLCompiler, **kw +) -> str: + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + format_type(compiler, element.type_), + ) + + +@compiles(RenameTable, "mssql") +def visit_rename_table( + element: RenameTable, compiler: MSDDLCompiler, **kw +) -> str: + return "EXEC sp_rename '%s', %s" % ( + format_table_name(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, None), + ) diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/mysql.py b/venv/lib/python3.12/site-packages/alembic/ddl/mysql.py new file mode 100644 index 0000000..f312173 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/mysql.py @@ -0,0 +1,474 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import re +from typing import Any +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import schema +from sqlalchemy import types as sqltypes + +from .base import alter_table +from .base import AlterColumn +from .base import ColumnDefault +from .base import ColumnName +from .base import ColumnNullable +from .base import ColumnType +from .base import format_column_name +from .base import format_server_default +from .impl import DefaultImpl +from .. import util +from ..util import sqla_compat +from ..util.sqla_compat import _is_mariadb +from ..util.sqla_compat import _is_type_bound +from ..util.sqla_compat import compiles + +if TYPE_CHECKING: + from typing import Literal + + from sqlalchemy.dialects.mysql.base import MySQLDDLCompiler + from sqlalchemy.sql.ddl import DropConstraint + from sqlalchemy.sql.schema import Constraint + from sqlalchemy.sql.type_api import TypeEngine + + from .base import _ServerDefault + + +class MySQLImpl(DefaultImpl): + __dialect__ = "mysql" + + transactional_ddl = False + type_synonyms = DefaultImpl.type_synonyms + ( + {"BOOL", "TINYINT"}, + {"JSON", "LONGTEXT"}, + ) + type_arg_extract = [r"character set ([\w\-_]+)", r"collate ([\w\-_]+)"] + + def alter_column( # type:ignore[override] + self, + table_name: str, + column_name: str, + nullable: Optional[bool] = None, + server_default: Union[_ServerDefault, Literal[False]] = False, + name: Optional[str] = None, + type_: Optional[TypeEngine] = None, + schema: Optional[str] = None, + existing_type: Optional[TypeEngine] = None, + existing_server_default: Optional[_ServerDefault] = None, + existing_nullable: Optional[bool] = None, + autoincrement: Optional[bool] = None, + existing_autoincrement: Optional[bool] = None, + comment: Optional[Union[str, Literal[False]]] = False, + existing_comment: Optional[str] = None, + **kw: Any, + ) -> None: + if sqla_compat._server_default_is_identity( + server_default, existing_server_default + ) or sqla_compat._server_default_is_computed( + server_default, existing_server_default + ): + # modifying computed or identity columns is not supported + # the default will raise + super().alter_column( + table_name, + column_name, + nullable=nullable, + type_=type_, + schema=schema, + existing_type=existing_type, + existing_nullable=existing_nullable, + server_default=server_default, + existing_server_default=existing_server_default, + **kw, + ) + if name is not None or self._is_mysql_allowed_functional_default( + type_ if type_ is not None else existing_type, server_default + ): + self._exec( + MySQLChangeColumn( + table_name, + column_name, + schema=schema, + newname=name if name is not None else column_name, + nullable=nullable + if nullable is not None + else existing_nullable + if existing_nullable is not None + else True, + type_=type_ if type_ is not None else existing_type, + default=server_default + if server_default is not False + else existing_server_default, + autoincrement=autoincrement + if autoincrement is not None + else existing_autoincrement, + comment=comment + if comment is not False + else existing_comment, + ) + ) + elif ( + nullable is not None + or type_ is not None + or autoincrement is not None + or comment is not False + ): + self._exec( + MySQLModifyColumn( + table_name, + column_name, + schema=schema, + newname=name if name is not None else column_name, + nullable=nullable + if nullable is not None + else existing_nullable + if existing_nullable is not None + else True, + type_=type_ if type_ is not None else existing_type, + default=server_default + if server_default is not False + else existing_server_default, + autoincrement=autoincrement + if autoincrement is not None + else existing_autoincrement, + comment=comment + if comment is not False + else existing_comment, + ) + ) + elif server_default is not False: + self._exec( + MySQLAlterDefault( + table_name, column_name, server_default, schema=schema + ) + ) + + def drop_constraint( + self, + const: Constraint, + ) -> None: + if isinstance(const, schema.CheckConstraint) and _is_type_bound(const): + return + + super().drop_constraint(const) + + def _is_mysql_allowed_functional_default( + self, + type_: Optional[TypeEngine], + server_default: Union[_ServerDefault, Literal[False]], + ) -> bool: + return ( + type_ is not None + and type_._type_affinity is sqltypes.DateTime + and server_default is not None + ) + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + # partially a workaround for SQLAlchemy issue #3023; if the + # column were created without "NOT NULL", MySQL may have added + # an implicit default of '0' which we need to skip + # TODO: this is not really covered anymore ? + if ( + metadata_column.type._type_affinity is sqltypes.Integer + and inspector_column.primary_key + and not inspector_column.autoincrement + and not rendered_metadata_default + and rendered_inspector_default == "'0'" + ): + return False + elif ( + rendered_inspector_default + and inspector_column.type._type_affinity is sqltypes.Integer + ): + rendered_inspector_default = ( + re.sub(r"^'|'$", "", rendered_inspector_default) + if rendered_inspector_default is not None + else None + ) + return rendered_inspector_default != rendered_metadata_default + elif ( + rendered_metadata_default + and metadata_column.type._type_affinity is sqltypes.String + ): + metadata_default = re.sub(r"^'|'$", "", rendered_metadata_default) + return rendered_inspector_default != f"'{metadata_default}'" + elif rendered_inspector_default and rendered_metadata_default: + # adjust for "function()" vs. "FUNCTION" as can occur particularly + # for the CURRENT_TIMESTAMP function on newer MariaDB versions + + # SQLAlchemy MySQL dialect bundles ON UPDATE into the server + # default; adjust for this possibly being present. + onupdate_ins = re.match( + r"(.*) (on update.*?)(?:\(\))?$", + rendered_inspector_default.lower(), + ) + onupdate_met = re.match( + r"(.*) (on update.*?)(?:\(\))?$", + rendered_metadata_default.lower(), + ) + + if onupdate_ins: + if not onupdate_met: + return True + elif onupdate_ins.group(2) != onupdate_met.group(2): + return True + + rendered_inspector_default = onupdate_ins.group(1) + rendered_metadata_default = onupdate_met.group(1) + + return re.sub( + r"(.*?)(?:\(\))?$", r"\1", rendered_inspector_default.lower() + ) != re.sub( + r"(.*?)(?:\(\))?$", r"\1", rendered_metadata_default.lower() + ) + else: + return rendered_inspector_default != rendered_metadata_default + + def correct_for_autogen_constraints( + self, + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ): + # TODO: if SQLA 1.0, make use of "duplicates_index" + # metadata + removed = set() + for idx in list(conn_indexes): + if idx.unique: + continue + # MySQL puts implicit indexes on FK columns, even if + # composite and even if MyISAM, so can't check this too easily. + # the name of the index may be the column name or it may + # be the name of the FK constraint. + for col in idx.columns: + if idx.name == col.name: + conn_indexes.remove(idx) + removed.add(idx.name) + break + for fk in col.foreign_keys: + if fk.name == idx.name: + conn_indexes.remove(idx) + removed.add(idx.name) + break + if idx.name in removed: + break + + # then remove indexes from the "metadata_indexes" + # that we've removed from reflected, otherwise they come out + # as adds (see #202) + for idx in list(metadata_indexes): + if idx.name in removed: + metadata_indexes.remove(idx) + + def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks): + conn_fk_by_sig = { + self._create_reflected_constraint_sig(fk).unnamed_no_options: fk + for fk in conn_fks + } + metadata_fk_by_sig = { + self._create_metadata_constraint_sig(fk).unnamed_no_options: fk + for fk in metadata_fks + } + + for sig in set(conn_fk_by_sig).intersection(metadata_fk_by_sig): + mdfk = metadata_fk_by_sig[sig] + cnfk = conn_fk_by_sig[sig] + # MySQL considers RESTRICT to be the default and doesn't + # report on it. if the model has explicit RESTRICT and + # the conn FK has None, set it to RESTRICT + if ( + mdfk.ondelete is not None + and mdfk.ondelete.lower() == "restrict" + and cnfk.ondelete is None + ): + cnfk.ondelete = "RESTRICT" + if ( + mdfk.onupdate is not None + and mdfk.onupdate.lower() == "restrict" + and cnfk.onupdate is None + ): + cnfk.onupdate = "RESTRICT" + + +class MariaDBImpl(MySQLImpl): + __dialect__ = "mariadb" + + +class MySQLAlterDefault(AlterColumn): + def __init__( + self, + name: str, + column_name: str, + default: _ServerDefault, + schema: Optional[str] = None, + ) -> None: + super(AlterColumn, self).__init__(name, schema=schema) + self.column_name = column_name + self.default = default + + +class MySQLChangeColumn(AlterColumn): + def __init__( + self, + name: str, + column_name: str, + schema: Optional[str] = None, + newname: Optional[str] = None, + type_: Optional[TypeEngine] = None, + nullable: Optional[bool] = None, + default: Optional[Union[_ServerDefault, Literal[False]]] = False, + autoincrement: Optional[bool] = None, + comment: Optional[Union[str, Literal[False]]] = False, + ) -> None: + super(AlterColumn, self).__init__(name, schema=schema) + self.column_name = column_name + self.nullable = nullable + self.newname = newname + self.default = default + self.autoincrement = autoincrement + self.comment = comment + if type_ is None: + raise util.CommandError( + "All MySQL CHANGE/MODIFY COLUMN operations " + "require the existing type." + ) + + self.type_ = sqltypes.to_instance(type_) + + +class MySQLModifyColumn(MySQLChangeColumn): + pass + + +@compiles(ColumnNullable, "mysql", "mariadb") +@compiles(ColumnName, "mysql", "mariadb") +@compiles(ColumnDefault, "mysql", "mariadb") +@compiles(ColumnType, "mysql", "mariadb") +def _mysql_doesnt_support_individual(element, compiler, **kw): + raise NotImplementedError( + "Individual alter column constructs not supported by MySQL" + ) + + +@compiles(MySQLAlterDefault, "mysql", "mariadb") +def _mysql_alter_default( + element: MySQLAlterDefault, compiler: MySQLDDLCompiler, **kw +) -> str: + return "%s ALTER COLUMN %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + "SET DEFAULT %s" % format_server_default(compiler, element.default) + if element.default is not None + else "DROP DEFAULT", + ) + + +@compiles(MySQLModifyColumn, "mysql", "mariadb") +def _mysql_modify_column( + element: MySQLModifyColumn, compiler: MySQLDDLCompiler, **kw +) -> str: + return "%s MODIFY %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + _mysql_colspec( + compiler, + nullable=element.nullable, + server_default=element.default, + type_=element.type_, + autoincrement=element.autoincrement, + comment=element.comment, + ), + ) + + +@compiles(MySQLChangeColumn, "mysql", "mariadb") +def _mysql_change_column( + element: MySQLChangeColumn, compiler: MySQLDDLCompiler, **kw +) -> str: + return "%s CHANGE %s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + _mysql_colspec( + compiler, + nullable=element.nullable, + server_default=element.default, + type_=element.type_, + autoincrement=element.autoincrement, + comment=element.comment, + ), + ) + + +def _mysql_colspec( + compiler: MySQLDDLCompiler, + nullable: Optional[bool], + server_default: Optional[Union[_ServerDefault, Literal[False]]], + type_: TypeEngine, + autoincrement: Optional[bool], + comment: Optional[Union[str, Literal[False]]], +) -> str: + spec = "%s %s" % ( + compiler.dialect.type_compiler.process(type_), + "NULL" if nullable else "NOT NULL", + ) + if autoincrement: + spec += " AUTO_INCREMENT" + if server_default is not False and server_default is not None: + spec += " DEFAULT %s" % format_server_default(compiler, server_default) + if comment: + spec += " COMMENT %s" % compiler.sql_compiler.render_literal_value( + comment, sqltypes.String() + ) + + return spec + + +@compiles(schema.DropConstraint, "mysql", "mariadb") +def _mysql_drop_constraint( + element: DropConstraint, compiler: MySQLDDLCompiler, **kw +) -> str: + """Redefine SQLAlchemy's drop constraint to + raise errors for invalid constraint type.""" + + constraint = element.element + if isinstance( + constraint, + ( + schema.ForeignKeyConstraint, + schema.PrimaryKeyConstraint, + schema.UniqueConstraint, + ), + ): + assert not kw + return compiler.visit_drop_constraint(element) + elif isinstance(constraint, schema.CheckConstraint): + # note that SQLAlchemy as of 1.2 does not yet support + # DROP CONSTRAINT for MySQL/MariaDB, so we implement fully + # here. + if _is_mariadb(compiler.dialect): + return "ALTER TABLE %s DROP CONSTRAINT %s" % ( + compiler.preparer.format_table(constraint.table), + compiler.preparer.format_constraint(constraint), + ) + else: + return "ALTER TABLE %s DROP CHECK %s" % ( + compiler.preparer.format_table(constraint.table), + compiler.preparer.format_constraint(constraint), + ) + else: + raise NotImplementedError( + "No generic 'DROP CONSTRAINT' in MySQL - " + "please specify constraint type" + ) diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/oracle.py b/venv/lib/python3.12/site-packages/alembic/ddl/oracle.py new file mode 100644 index 0000000..5401174 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/oracle.py @@ -0,0 +1,200 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import re +from typing import Any +from typing import Optional +from typing import TYPE_CHECKING + +from sqlalchemy.sql import sqltypes + +from .base import AddColumn +from .base import alter_table +from .base import ColumnComment +from .base import ColumnDefault +from .base import ColumnName +from .base import ColumnNullable +from .base import ColumnType +from .base import format_column_name +from .base import format_server_default +from .base import format_table_name +from .base import format_type +from .base import IdentityColumnDefault +from .base import RenameTable +from .impl import DefaultImpl +from ..util.sqla_compat import compiles + +if TYPE_CHECKING: + from sqlalchemy.dialects.oracle.base import OracleDDLCompiler + from sqlalchemy.engine.cursor import CursorResult + from sqlalchemy.sql.schema import Column + + +class OracleImpl(DefaultImpl): + __dialect__ = "oracle" + transactional_ddl = False + batch_separator = "/" + command_terminator = "" + type_synonyms = DefaultImpl.type_synonyms + ( + {"VARCHAR", "VARCHAR2"}, + {"BIGINT", "INTEGER", "SMALLINT", "DECIMAL", "NUMERIC", "NUMBER"}, + {"DOUBLE", "FLOAT", "DOUBLE_PRECISION"}, + ) + identity_attrs_ignore = () + + def __init__(self, *arg, **kw) -> None: + super().__init__(*arg, **kw) + self.batch_separator = self.context_opts.get( + "oracle_batch_separator", self.batch_separator + ) + + def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]: + result = super()._exec(construct, *args, **kw) + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + return result + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + if rendered_metadata_default is not None: + rendered_metadata_default = re.sub( + r"^\((.+)\)$", r"\1", rendered_metadata_default + ) + + rendered_metadata_default = re.sub( + r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default + ) + + if rendered_inspector_default is not None: + rendered_inspector_default = re.sub( + r"^\((.+)\)$", r"\1", rendered_inspector_default + ) + + rendered_inspector_default = re.sub( + r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default + ) + + rendered_inspector_default = rendered_inspector_default.strip() + return rendered_inspector_default != rendered_metadata_default + + def emit_begin(self) -> None: + self._exec("SET TRANSACTION READ WRITE") + + def emit_commit(self) -> None: + self._exec("COMMIT") + + +@compiles(AddColumn, "oracle") +def visit_add_column( + element: AddColumn, compiler: OracleDDLCompiler, **kw +) -> str: + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + add_column(compiler, element.column, **kw), + ) + + +@compiles(ColumnNullable, "oracle") +def visit_column_nullable( + element: ColumnNullable, compiler: OracleDDLCompiler, **kw +) -> str: + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "NULL" if element.nullable else "NOT NULL", + ) + + +@compiles(ColumnType, "oracle") +def visit_column_type( + element: ColumnType, compiler: OracleDDLCompiler, **kw +) -> str: + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "%s" % format_type(compiler, element.type_), + ) + + +@compiles(ColumnName, "oracle") +def visit_column_name( + element: ColumnName, compiler: OracleDDLCompiler, **kw +) -> str: + return "%s RENAME COLUMN %s TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + ) + + +@compiles(ColumnDefault, "oracle") +def visit_column_default( + element: ColumnDefault, compiler: OracleDDLCompiler, **kw +) -> str: + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "DEFAULT %s" % format_server_default(compiler, element.default) + if element.default is not None + else "DEFAULT NULL", + ) + + +@compiles(ColumnComment, "oracle") +def visit_column_comment( + element: ColumnComment, compiler: OracleDDLCompiler, **kw +) -> str: + ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}" + + comment = compiler.sql_compiler.render_literal_value( + (element.comment if element.comment is not None else ""), + sqltypes.String(), + ) + + return ddl.format( + table_name=element.table_name, + column_name=element.column_name, + comment=comment, + ) + + +@compiles(RenameTable, "oracle") +def visit_rename_table( + element: RenameTable, compiler: OracleDDLCompiler, **kw +) -> str: + return "%s RENAME TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, None), + ) + + +def alter_column(compiler: OracleDDLCompiler, name: str) -> str: + return "MODIFY %s" % format_column_name(compiler, name) + + +def add_column(compiler: OracleDDLCompiler, column: Column[Any], **kw) -> str: + return "ADD %s" % compiler.get_column_specification(column, **kw) + + +@compiles(IdentityColumnDefault, "oracle") +def visit_identity_column( + element: IdentityColumnDefault, compiler: OracleDDLCompiler, **kw +): + text = "%s %s " % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + ) + if element.default is None: + # drop identity + text += "DROP IDENTITY" + return text + else: + text += compiler.visit_identity_column(element.default) + return text diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/postgresql.py b/venv/lib/python3.12/site-packages/alembic/ddl/postgresql.py new file mode 100644 index 0000000..6507fcb --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/postgresql.py @@ -0,0 +1,848 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import logging +import re +from typing import Any +from typing import cast +from typing import Dict +from typing import List +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import Column +from sqlalchemy import literal_column +from sqlalchemy import Numeric +from sqlalchemy import text +from sqlalchemy import types as sqltypes +from sqlalchemy.dialects.postgresql import BIGINT +from sqlalchemy.dialects.postgresql import ExcludeConstraint +from sqlalchemy.dialects.postgresql import INTEGER +from sqlalchemy.schema import CreateIndex +from sqlalchemy.sql.elements import ColumnClause +from sqlalchemy.sql.elements import TextClause +from sqlalchemy.sql.functions import FunctionElement +from sqlalchemy.types import NULLTYPE + +from .base import alter_column +from .base import alter_table +from .base import AlterColumn +from .base import ColumnComment +from .base import format_column_name +from .base import format_table_name +from .base import format_type +from .base import IdentityColumnDefault +from .base import RenameTable +from .impl import ComparisonResult +from .impl import DefaultImpl +from .. import util +from ..autogenerate import render +from ..operations import ops +from ..operations import schemaobj +from ..operations.base import BatchOperations +from ..operations.base import Operations +from ..util import sqla_compat +from ..util.sqla_compat import compiles + +if TYPE_CHECKING: + from typing import Literal + + from sqlalchemy import Index + from sqlalchemy import UniqueConstraint + from sqlalchemy.dialects.postgresql.array import ARRAY + from sqlalchemy.dialects.postgresql.base import PGDDLCompiler + from sqlalchemy.dialects.postgresql.hstore import HSTORE + from sqlalchemy.dialects.postgresql.json import JSON + from sqlalchemy.dialects.postgresql.json import JSONB + from sqlalchemy.sql.elements import ClauseElement + from sqlalchemy.sql.elements import ColumnElement + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.schema import MetaData + from sqlalchemy.sql.schema import Table + from sqlalchemy.sql.type_api import TypeEngine + + from .base import _ServerDefault + from ..autogenerate.api import AutogenContext + from ..autogenerate.render import _f_name + from ..runtime.migration import MigrationContext + + +log = logging.getLogger(__name__) + + +class PostgresqlImpl(DefaultImpl): + __dialect__ = "postgresql" + transactional_ddl = True + type_synonyms = DefaultImpl.type_synonyms + ( + {"FLOAT", "DOUBLE PRECISION"}, + ) + + def create_index(self, index: Index, **kw: Any) -> None: + # this likely defaults to None if not present, so get() + # should normally not return the default value. being + # defensive in any case + postgresql_include = index.kwargs.get("postgresql_include", None) or () + for col in postgresql_include: + if col not in index.table.c: # type: ignore[union-attr] + index.table.append_column( # type: ignore[union-attr] + Column(col, sqltypes.NullType) + ) + self._exec(CreateIndex(index, **kw)) + + def prep_table_for_batch(self, batch_impl, table): + for constraint in table.constraints: + if ( + constraint.name is not None + and constraint.name in batch_impl.named_constraints + ): + self.drop_constraint(constraint) + + def compare_server_default( + self, + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default, + ): + # don't do defaults for SERIAL columns + if ( + metadata_column.primary_key + and metadata_column is metadata_column.table._autoincrement_column + ): + return False + + conn_col_default = rendered_inspector_default + + defaults_equal = conn_col_default == rendered_metadata_default + if defaults_equal: + return False + + if None in ( + conn_col_default, + rendered_metadata_default, + metadata_column.server_default, + ): + return not defaults_equal + + metadata_default = metadata_column.server_default.arg + + if isinstance(metadata_default, str): + if not isinstance(inspector_column.type, Numeric): + metadata_default = re.sub(r"^'|'$", "", metadata_default) + metadata_default = f"'{metadata_default}'" + + metadata_default = literal_column(metadata_default) + + # run a real compare against the server + conn = self.connection + assert conn is not None + return not conn.scalar( + sqla_compat._select( + literal_column(conn_col_default) == metadata_default + ) + ) + + def alter_column( # type:ignore[override] + self, + table_name: str, + column_name: str, + nullable: Optional[bool] = None, + server_default: Union[_ServerDefault, Literal[False]] = False, + name: Optional[str] = None, + type_: Optional[TypeEngine] = None, + schema: Optional[str] = None, + autoincrement: Optional[bool] = None, + existing_type: Optional[TypeEngine] = None, + existing_server_default: Optional[_ServerDefault] = None, + existing_nullable: Optional[bool] = None, + existing_autoincrement: Optional[bool] = None, + **kw: Any, + ) -> None: + using = kw.pop("postgresql_using", None) + + if using is not None and type_ is None: + raise util.CommandError( + "postgresql_using must be used with the type_ parameter" + ) + + if type_ is not None: + self._exec( + PostgresqlColumnType( + table_name, + column_name, + type_, + schema=schema, + using=using, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + ) + ) + + super().alter_column( + table_name, + column_name, + nullable=nullable, + server_default=server_default, + name=name, + schema=schema, + autoincrement=autoincrement, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_autoincrement=existing_autoincrement, + **kw, + ) + + def autogen_column_reflect(self, inspector, table, column_info): + if column_info.get("default") and isinstance( + column_info["type"], (INTEGER, BIGINT) + ): + seq_match = re.match( + r"nextval\('(.+?)'::regclass\)", column_info["default"] + ) + if seq_match: + info = sqla_compat._exec_on_inspector( + inspector, + text( + "select c.relname, a.attname " + "from pg_class as c join " + "pg_depend d on d.objid=c.oid and " + "d.classid='pg_class'::regclass and " + "d.refclassid='pg_class'::regclass " + "join pg_class t on t.oid=d.refobjid " + "join pg_attribute a on a.attrelid=t.oid and " + "a.attnum=d.refobjsubid " + "where c.relkind='S' and c.relname=:seqname" + ), + seqname=seq_match.group(1), + ).first() + if info: + seqname, colname = info + if colname == column_info["name"]: + log.info( + "Detected sequence named '%s' as " + "owned by integer column '%s(%s)', " + "assuming SERIAL and omitting", + seqname, + table.name, + colname, + ) + # sequence, and the owner is this column, + # its a SERIAL - whack it! + del column_info["default"] + + def correct_for_autogen_constraints( + self, + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ): + doubled_constraints = { + index + for index in conn_indexes + if index.info.get("duplicates_constraint") + } + + for ix in doubled_constraints: + conn_indexes.remove(ix) + + if not sqla_compat.sqla_2: + self._skip_functional_indexes(metadata_indexes, conn_indexes) + + # pg behavior regarding modifiers + # | # | compiled sql | returned sql | regexp. group is removed | + # | - | ---------------- | -----------------| ------------------------ | + # | 1 | nulls first | nulls first | - | + # | 2 | nulls last | | (? str: + expr = expr.lower().replace('"', "").replace("'", "") + if index.table is not None: + # should not be needed, since include_table=False is in compile + expr = expr.replace(f"{index.table.name.lower()}.", "") + + if "::" in expr: + # strip :: cast. types can have spaces in them + expr = re.sub(r"(::[\w ]+\w)", "", expr) + + while expr and expr[0] == "(" and expr[-1] == ")": + expr = expr[1:-1] + + # NOTE: when parsing the connection expression this cleanup could + # be skipped + for rs in self._default_modifiers_re: + if match := rs.search(expr): + start, end = match.span(1) + expr = expr[:start] + expr[end:] + break + + while expr and expr[0] == "(" and expr[-1] == ")": + expr = expr[1:-1] + + # strip casts + cast_re = re.compile(r"cast\s*\(") + if cast_re.match(expr): + expr = cast_re.sub("", expr) + # remove the as type + expr = re.sub(r"as\s+[^)]+\)", "", expr) + # remove spaces + expr = expr.replace(" ", "") + return expr + + def _dialect_options( + self, item: Union[Index, UniqueConstraint] + ) -> Tuple[Any, ...]: + # only the positive case is returned by sqlalchemy reflection so + # None and False are threated the same + if item.dialect_kwargs.get("postgresql_nulls_not_distinct"): + return ("nulls_not_distinct",) + return () + + def compare_indexes( + self, + metadata_index: Index, + reflected_index: Index, + ) -> ComparisonResult: + msg = [] + unique_msg = self._compare_index_unique( + metadata_index, reflected_index + ) + if unique_msg: + msg.append(unique_msg) + m_exprs = metadata_index.expressions + r_exprs = reflected_index.expressions + if len(m_exprs) != len(r_exprs): + msg.append(f"expression number {len(r_exprs)} to {len(m_exprs)}") + if msg: + # no point going further, return early + return ComparisonResult.Different(msg) + skip = [] + for pos, (m_e, r_e) in enumerate(zip(m_exprs, r_exprs), 1): + m_compile = self._compile_element(m_e) + m_text = self._cleanup_index_expr(metadata_index, m_compile) + # print(f"META ORIG: {m_compile!r} CLEANUP: {m_text!r}") + r_compile = self._compile_element(r_e) + r_text = self._cleanup_index_expr(metadata_index, r_compile) + # print(f"CONN ORIG: {r_compile!r} CLEANUP: {r_text!r}") + if m_text == r_text: + continue # expressions these are equal + elif m_compile.strip().endswith("_ops") and ( + " " in m_compile or ")" in m_compile # is an expression + ): + skip.append( + f"expression #{pos} {m_compile!r} detected " + "as including operator clause." + ) + util.warn( + f"Expression #{pos} {m_compile!r} in index " + f"{reflected_index.name!r} detected to include " + "an operator clause. Expression compare cannot proceed. " + "Please move the operator clause to the " + "``postgresql_ops`` dict to enable proper compare " + "of the index expressions: " + "https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#operator-classes", # noqa: E501 + ) + else: + msg.append(f"expression #{pos} {r_compile!r} to {m_compile!r}") + + m_options = self._dialect_options(metadata_index) + r_options = self._dialect_options(reflected_index) + if m_options != r_options: + msg.extend(f"options {r_options} to {m_options}") + + if msg: + return ComparisonResult.Different(msg) + elif skip: + # if there are other changes detected don't skip the index + return ComparisonResult.Skip(skip) + else: + return ComparisonResult.Equal() + + def compare_unique_constraint( + self, + metadata_constraint: UniqueConstraint, + reflected_constraint: UniqueConstraint, + ) -> ComparisonResult: + metadata_tup = self._create_metadata_constraint_sig( + metadata_constraint + ) + reflected_tup = self._create_reflected_constraint_sig( + reflected_constraint + ) + + meta_sig = metadata_tup.unnamed + conn_sig = reflected_tup.unnamed + if conn_sig != meta_sig: + return ComparisonResult.Different( + f"expression {conn_sig} to {meta_sig}" + ) + + metadata_do = self._dialect_options(metadata_tup.const) + conn_do = self._dialect_options(reflected_tup.const) + if metadata_do != conn_do: + return ComparisonResult.Different( + f"expression {conn_do} to {metadata_do}" + ) + + return ComparisonResult.Equal() + + def adjust_reflected_dialect_options( + self, reflected_options: Dict[str, Any], kind: str + ) -> Dict[str, Any]: + options: Dict[str, Any] + options = reflected_options.get("dialect_options", {}).copy() + if not options.get("postgresql_include"): + options.pop("postgresql_include", None) + return options + + def _compile_element(self, element: Union[ClauseElement, str]) -> str: + if isinstance(element, str): + return element + return element.compile( + dialect=self.dialect, + compile_kwargs={"literal_binds": True, "include_table": False}, + ).string + + def render_ddl_sql_expr( + self, + expr: ClauseElement, + is_server_default: bool = False, + is_index: bool = False, + **kw: Any, + ) -> str: + """Render a SQL expression that is typically a server default, + index expression, etc. + + """ + + # apply self_group to index expressions; + # see https://github.com/sqlalchemy/sqlalchemy/blob/ + # 82fa95cfce070fab401d020c6e6e4a6a96cc2578/ + # lib/sqlalchemy/dialects/postgresql/base.py#L2261 + if is_index and not isinstance(expr, ColumnClause): + expr = expr.self_group() + + return super().render_ddl_sql_expr( + expr, is_server_default=is_server_default, is_index=is_index, **kw + ) + + def render_type( + self, type_: TypeEngine, autogen_context: AutogenContext + ) -> Union[str, Literal[False]]: + mod = type(type_).__module__ + if not mod.startswith("sqlalchemy.dialects.postgresql"): + return False + + if hasattr(self, "_render_%s_type" % type_.__visit_name__): + meth = getattr(self, "_render_%s_type" % type_.__visit_name__) + return meth(type_, autogen_context) + + return False + + def _render_HSTORE_type( + self, type_: HSTORE, autogen_context: AutogenContext + ) -> str: + return cast( + str, + render._render_type_w_subtype( + type_, autogen_context, "text_type", r"(.+?\(.*text_type=)" + ), + ) + + def _render_ARRAY_type( + self, type_: ARRAY, autogen_context: AutogenContext + ) -> str: + return cast( + str, + render._render_type_w_subtype( + type_, autogen_context, "item_type", r"(.+?\()" + ), + ) + + def _render_JSON_type( + self, type_: JSON, autogen_context: AutogenContext + ) -> str: + return cast( + str, + render._render_type_w_subtype( + type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)" + ), + ) + + def _render_JSONB_type( + self, type_: JSONB, autogen_context: AutogenContext + ) -> str: + return cast( + str, + render._render_type_w_subtype( + type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)" + ), + ) + + +class PostgresqlColumnType(AlterColumn): + def __init__( + self, name: str, column_name: str, type_: TypeEngine, **kw + ) -> None: + using = kw.pop("using", None) + super().__init__(name, column_name, **kw) + self.type_ = sqltypes.to_instance(type_) + self.using = using + + +@compiles(RenameTable, "postgresql") +def visit_rename_table( + element: RenameTable, compiler: PGDDLCompiler, **kw +) -> str: + return "%s RENAME TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, None), + ) + + +@compiles(PostgresqlColumnType, "postgresql") +def visit_column_type( + element: PostgresqlColumnType, compiler: PGDDLCompiler, **kw +) -> str: + return "%s %s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "TYPE %s" % format_type(compiler, element.type_), + "USING %s" % element.using if element.using else "", + ) + + +@compiles(ColumnComment, "postgresql") +def visit_column_comment( + element: ColumnComment, compiler: PGDDLCompiler, **kw +) -> str: + ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}" + comment = ( + compiler.sql_compiler.render_literal_value( + element.comment, sqltypes.String() + ) + if element.comment is not None + else "NULL" + ) + + return ddl.format( + table_name=format_table_name( + compiler, element.table_name, element.schema + ), + column_name=format_column_name(compiler, element.column_name), + comment=comment, + ) + + +@compiles(IdentityColumnDefault, "postgresql") +def visit_identity_column( + element: IdentityColumnDefault, compiler: PGDDLCompiler, **kw +): + text = "%s %s " % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + ) + if element.default is None: + # drop identity + text += "DROP IDENTITY" + return text + elif element.existing_server_default is None: + # add identity options + text += "ADD " + text += compiler.visit_identity_column(element.default) + return text + else: + # alter identity + diff, _, _ = element.impl._compare_identity_default( + element.default, element.existing_server_default + ) + identity = element.default + for attr in sorted(diff): + if attr == "always": + text += "SET GENERATED %s " % ( + "ALWAYS" if identity.always else "BY DEFAULT" + ) + else: + text += "SET %s " % compiler.get_identity_options( + sqla_compat.Identity(**{attr: getattr(identity, attr)}) + ) + return text + + +@Operations.register_operation("create_exclude_constraint") +@BatchOperations.register_operation( + "create_exclude_constraint", "batch_create_exclude_constraint" +) +@ops.AddConstraintOp.register_add_constraint("exclude_constraint") +class CreateExcludeConstraintOp(ops.AddConstraintOp): + """Represent a create exclude constraint operation.""" + + constraint_type = "exclude" + + def __init__( + self, + constraint_name: sqla_compat._ConstraintName, + table_name: Union[str, quoted_name], + elements: Union[ + Sequence[Tuple[str, str]], + Sequence[Tuple[ColumnClause[Any], str]], + ], + where: Optional[Union[ColumnElement[bool], str]] = None, + schema: Optional[str] = None, + _orig_constraint: Optional[ExcludeConstraint] = None, + **kw, + ) -> None: + self.constraint_name = constraint_name + self.table_name = table_name + self.elements = elements + self.where = where + self.schema = schema + self._orig_constraint = _orig_constraint + self.kw = kw + + @classmethod + def from_constraint( # type:ignore[override] + cls, constraint: ExcludeConstraint + ) -> CreateExcludeConstraintOp: + constraint_table = sqla_compat._table_for_constraint(constraint) + return cls( + constraint.name, + constraint_table.name, + [ # type: ignore + (expr, op) for expr, name, op in constraint._render_exprs + ], + where=cast("ColumnElement[bool] | None", constraint.where), + schema=constraint_table.schema, + _orig_constraint=constraint, + deferrable=constraint.deferrable, + initially=constraint.initially, + using=constraint.using, + ) + + def to_constraint( + self, migration_context: Optional[MigrationContext] = None + ) -> ExcludeConstraint: + if self._orig_constraint is not None: + return self._orig_constraint + schema_obj = schemaobj.SchemaObjects(migration_context) + t = schema_obj.table(self.table_name, schema=self.schema) + excl = ExcludeConstraint( + *self.elements, + name=self.constraint_name, + where=self.where, + **self.kw, + ) + for ( + expr, + name, + oper, + ) in excl._render_exprs: + t.append_column(Column(name, NULLTYPE)) + t.append_constraint(excl) + return excl + + @classmethod + def create_exclude_constraint( + cls, + operations: Operations, + constraint_name: str, + table_name: str, + *elements: Any, + **kw: Any, + ) -> Optional[Table]: + """Issue an alter to create an EXCLUDE constraint using the + current migration context. + + .. note:: This method is Postgresql specific, and additionally + requires at least SQLAlchemy 1.0. + + e.g.:: + + from alembic import op + + op.create_exclude_constraint( + "user_excl", + "user", + ("period", "&&"), + ("group", "="), + where=("group != 'some group'"), + ) + + Note that the expressions work the same way as that of + the ``ExcludeConstraint`` object itself; if plain strings are + passed, quoting rules must be applied manually. + + :param name: Name of the constraint. + :param table_name: String name of the source table. + :param elements: exclude conditions. + :param where: SQL expression or SQL string with optional WHERE + clause. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. + + """ + op = cls(constraint_name, table_name, elements, **kw) + return operations.invoke(op) + + @classmethod + def batch_create_exclude_constraint( + cls, + operations: BatchOperations, + constraint_name: str, + *elements: Any, + **kw: Any, + ) -> Optional[Table]: + """Issue a "create exclude constraint" instruction using the + current batch migration context. + + .. note:: This method is Postgresql specific, and additionally + requires at least SQLAlchemy 1.0. + + .. seealso:: + + :meth:`.Operations.create_exclude_constraint` + + """ + kw["schema"] = operations.impl.schema + op = cls(constraint_name, operations.impl.table_name, elements, **kw) + return operations.invoke(op) + + +@render.renderers.dispatch_for(CreateExcludeConstraintOp) +def _add_exclude_constraint( + autogen_context: AutogenContext, op: CreateExcludeConstraintOp +) -> str: + return _exclude_constraint(op.to_constraint(), autogen_context, alter=True) + + +@render._constraint_renderers.dispatch_for(ExcludeConstraint) +def _render_inline_exclude_constraint( + constraint: ExcludeConstraint, + autogen_context: AutogenContext, + namespace_metadata: MetaData, +) -> str: + rendered = render._user_defined_render( + "exclude", constraint, autogen_context + ) + if rendered is not False: + return rendered + + return _exclude_constraint(constraint, autogen_context, False) + + +def _postgresql_autogenerate_prefix(autogen_context: AutogenContext) -> str: + imports = autogen_context.imports + if imports is not None: + imports.add("from sqlalchemy.dialects import postgresql") + return "postgresql." + + +def _exclude_constraint( + constraint: ExcludeConstraint, + autogen_context: AutogenContext, + alter: bool, +) -> str: + opts: List[Tuple[str, Union[quoted_name, str, _f_name, None]]] = [] + + has_batch = autogen_context._has_batch + + if constraint.deferrable: + opts.append(("deferrable", str(constraint.deferrable))) + if constraint.initially: + opts.append(("initially", str(constraint.initially))) + if constraint.using: + opts.append(("using", str(constraint.using))) + if not has_batch and alter and constraint.table.schema: + opts.append(("schema", render._ident(constraint.table.schema))) + if not alter and constraint.name: + opts.append( + ("name", render._render_gen_name(autogen_context, constraint.name)) + ) + + def do_expr_where_opts(): + args = [ + "(%s, %r)" + % ( + _render_potential_column( + sqltext, # type:ignore[arg-type] + autogen_context, + ), + opstring, + ) + for sqltext, name, opstring in constraint._render_exprs + ] + if constraint.where is not None: + args.append( + "where=%s" + % render._render_potential_expr( + constraint.where, autogen_context + ) + ) + args.extend(["%s=%r" % (k, v) for k, v in opts]) + return args + + if alter: + args = [ + repr(render._render_gen_name(autogen_context, constraint.name)) + ] + if not has_batch: + args += [repr(render._ident(constraint.table.name))] + args.extend(do_expr_where_opts()) + return "%(prefix)screate_exclude_constraint(%(args)s)" % { + "prefix": render._alembic_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + else: + args = do_expr_where_opts() + return "%(prefix)sExcludeConstraint(%(args)s)" % { + "prefix": _postgresql_autogenerate_prefix(autogen_context), + "args": ", ".join(args), + } + + +def _render_potential_column( + value: Union[ + ColumnClause[Any], Column[Any], TextClause, FunctionElement[Any] + ], + autogen_context: AutogenContext, +) -> str: + if isinstance(value, ColumnClause): + if value.is_literal: + # like literal_column("int8range(from, to)") in ExcludeConstraint + template = "%(prefix)sliteral_column(%(name)r)" + else: + template = "%(prefix)scolumn(%(name)r)" + + return template % { + "prefix": render._sqlalchemy_autogenerate_prefix(autogen_context), + "name": value.name, + } + else: + return render._render_potential_expr( + value, + autogen_context, + wrap_in_text=isinstance(value, (TextClause, FunctionElement)), + ) diff --git a/venv/lib/python3.12/site-packages/alembic/ddl/sqlite.py b/venv/lib/python3.12/site-packages/alembic/ddl/sqlite.py new file mode 100644 index 0000000..762e8ca --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/ddl/sqlite.py @@ -0,0 +1,225 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import re +from typing import Any +from typing import Dict +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import cast +from sqlalchemy import JSON +from sqlalchemy import schema +from sqlalchemy import sql + +from .base import alter_table +from .base import format_table_name +from .base import RenameTable +from .impl import DefaultImpl +from .. import util +from ..util.sqla_compat import compiles + +if TYPE_CHECKING: + from sqlalchemy.engine.reflection import Inspector + from sqlalchemy.sql.compiler import DDLCompiler + from sqlalchemy.sql.elements import Cast + from sqlalchemy.sql.elements import ClauseElement + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Constraint + from sqlalchemy.sql.schema import Table + from sqlalchemy.sql.type_api import TypeEngine + + from ..operations.batch import BatchOperationsImpl + + +class SQLiteImpl(DefaultImpl): + __dialect__ = "sqlite" + + transactional_ddl = False + """SQLite supports transactional DDL, but pysqlite does not: + see: http://bugs.python.org/issue10740 + """ + + def requires_recreate_in_batch( + self, batch_op: BatchOperationsImpl + ) -> bool: + """Return True if the given :class:`.BatchOperationsImpl` + would need the table to be recreated and copied in order to + proceed. + + Normally, only returns True on SQLite when operations other + than add_column are present. + + """ + for op in batch_op.batch: + if op[0] == "add_column": + col = op[1][1] + if isinstance( + col.server_default, schema.DefaultClause + ) and isinstance(col.server_default.arg, sql.ClauseElement): + return True + elif ( + isinstance(col.server_default, util.sqla_compat.Computed) + and col.server_default.persisted + ): + return True + elif op[0] not in ("create_index", "drop_index"): + return True + else: + return False + + def add_constraint(self, const: Constraint): + # attempt to distinguish between an + # auto-gen constraint and an explicit one + if const._create_rule is None: + raise NotImplementedError( + "No support for ALTER of constraints in SQLite dialect. " + "Please refer to the batch mode feature which allows for " + "SQLite migrations using a copy-and-move strategy." + ) + elif const._create_rule(self): + util.warn( + "Skipping unsupported ALTER for " + "creation of implicit constraint. " + "Please refer to the batch mode feature which allows for " + "SQLite migrations using a copy-and-move strategy." + ) + + def drop_constraint(self, const: Constraint): + if const._create_rule is None: + raise NotImplementedError( + "No support for ALTER of constraints in SQLite dialect. " + "Please refer to the batch mode feature which allows for " + "SQLite migrations using a copy-and-move strategy." + ) + + def compare_server_default( + self, + inspector_column: Column[Any], + metadata_column: Column[Any], + rendered_metadata_default: Optional[str], + rendered_inspector_default: Optional[str], + ) -> bool: + if rendered_metadata_default is not None: + rendered_metadata_default = re.sub( + r"^\((.+)\)$", r"\1", rendered_metadata_default + ) + + rendered_metadata_default = re.sub( + r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default + ) + + if rendered_inspector_default is not None: + rendered_inspector_default = re.sub( + r"^\((.+)\)$", r"\1", rendered_inspector_default + ) + + rendered_inspector_default = re.sub( + r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default + ) + + return rendered_inspector_default != rendered_metadata_default + + def _guess_if_default_is_unparenthesized_sql_expr( + self, expr: Optional[str] + ) -> bool: + """Determine if a server default is a SQL expression or a constant. + + There are too many assertions that expect server defaults to round-trip + identically without parenthesis added so we will add parens only in + very specific cases. + + """ + if not expr: + return False + elif re.match(r"^[0-9\.]$", expr): + return False + elif re.match(r"^'.+'$", expr): + return False + elif re.match(r"^\(.+\)$", expr): + return False + else: + return True + + def autogen_column_reflect( + self, + inspector: Inspector, + table: Table, + column_info: Dict[str, Any], + ) -> None: + # SQLite expression defaults require parenthesis when sent + # as DDL + if self._guess_if_default_is_unparenthesized_sql_expr( + column_info.get("default", None) + ): + column_info["default"] = "(%s)" % (column_info["default"],) + + def render_ddl_sql_expr( + self, expr: ClauseElement, is_server_default: bool = False, **kw + ) -> str: + # SQLite expression defaults require parenthesis when sent + # as DDL + str_expr = super().render_ddl_sql_expr( + expr, is_server_default=is_server_default, **kw + ) + + if ( + is_server_default + and self._guess_if_default_is_unparenthesized_sql_expr(str_expr) + ): + str_expr = "(%s)" % (str_expr,) + return str_expr + + def cast_for_batch_migrate( + self, + existing: Column[Any], + existing_transfer: Dict[str, Union[TypeEngine, Cast]], + new_type: TypeEngine, + ) -> None: + if ( + existing.type._type_affinity is not new_type._type_affinity + and not isinstance(new_type, JSON) + ): + existing_transfer["expr"] = cast( + existing_transfer["expr"], new_type + ) + + def correct_for_autogen_constraints( + self, + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + ): + self._skip_functional_indexes(metadata_indexes, conn_indexes) + + +@compiles(RenameTable, "sqlite") +def visit_rename_table( + element: RenameTable, compiler: DDLCompiler, **kw +) -> str: + return "%s RENAME TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, None), + ) + + +# @compiles(AddColumn, 'sqlite') +# def visit_add_column(element, compiler, **kw): +# return "%s %s" % ( +# alter_table(compiler, element.table_name, element.schema), +# add_column(compiler, element.column, **kw) +# ) + + +# def add_column(compiler, column, **kw): +# text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) +# need to modify SQLAlchemy so that the CHECK associated with a Boolean +# or Enum gets placed as part of the column constraints, not the Table +# see ticket 98 +# for const in column.constraints: +# text += compiler.process(AddConstraint(const)) +# return text diff --git a/venv/lib/python3.12/site-packages/alembic/environment.py b/venv/lib/python3.12/site-packages/alembic/environment.py new file mode 100644 index 0000000..adfc93e --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/environment.py @@ -0,0 +1 @@ +from .runtime.environment import * # noqa diff --git a/venv/lib/python3.12/site-packages/alembic/migration.py b/venv/lib/python3.12/site-packages/alembic/migration.py new file mode 100644 index 0000000..02626e2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/migration.py @@ -0,0 +1 @@ +from .runtime.migration import * # noqa diff --git a/venv/lib/python3.12/site-packages/alembic/op.py b/venv/lib/python3.12/site-packages/alembic/op.py new file mode 100644 index 0000000..f3f5fac --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/op.py @@ -0,0 +1,5 @@ +from .operations.base import Operations + +# create proxy functions for +# each method on the Operations class. +Operations.create_module_class_proxy(globals(), locals()) diff --git a/venv/lib/python3.12/site-packages/alembic/op.pyi b/venv/lib/python3.12/site-packages/alembic/op.pyi new file mode 100644 index 0000000..83deac1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/op.pyi @@ -0,0 +1,1321 @@ +# ### this file stubs are generated by tools/write_pyi.py - do not edit ### +# ### imports are manually managed +from __future__ import annotations + +from contextlib import contextmanager +from typing import Any +from typing import Awaitable +from typing import Callable +from typing import Dict +from typing import Iterator +from typing import List +from typing import Literal +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +if TYPE_CHECKING: + from sqlalchemy.engine import Connection + from sqlalchemy.sql import Executable + from sqlalchemy.sql.elements import ColumnElement + from sqlalchemy.sql.elements import conv + from sqlalchemy.sql.elements import TextClause + from sqlalchemy.sql.expression import TableClause + from sqlalchemy.sql.functions import Function + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Computed + from sqlalchemy.sql.schema import Identity + from sqlalchemy.sql.schema import SchemaItem + from sqlalchemy.sql.schema import Table + from sqlalchemy.sql.type_api import TypeEngine + from sqlalchemy.util import immutabledict + + from .operations.base import BatchOperations + from .operations.ops import AddColumnOp + from .operations.ops import AddConstraintOp + from .operations.ops import AlterColumnOp + from .operations.ops import AlterTableOp + from .operations.ops import BulkInsertOp + from .operations.ops import CreateIndexOp + from .operations.ops import CreateTableCommentOp + from .operations.ops import CreateTableOp + from .operations.ops import DropColumnOp + from .operations.ops import DropConstraintOp + from .operations.ops import DropIndexOp + from .operations.ops import DropTableCommentOp + from .operations.ops import DropTableOp + from .operations.ops import ExecuteSQLOp + from .operations.ops import MigrateOperation + from .runtime.migration import MigrationContext + from .util.sqla_compat import _literal_bindparam + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=Callable[..., Any]) + +### end imports ### + +def add_column( + table_name: str, column: Column[Any], *, schema: Optional[str] = None +) -> None: + """Issue an "add column" instruction using the current + migration context. + + e.g.:: + + from alembic import op + from sqlalchemy import Column, String + + op.add_column("organization", Column("name", String())) + + The :meth:`.Operations.add_column` method typically corresponds + to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope + of this command, the column's name, datatype, nullability, + and optional server-generated defaults may be indicated. + + .. note:: + + With the exception of NOT NULL constraints or single-column FOREIGN + KEY constraints, other kinds of constraints such as PRIMARY KEY, + UNIQUE or CHECK constraints **cannot** be generated using this + method; for these constraints, refer to operations such as + :meth:`.Operations.create_primary_key` and + :meth:`.Operations.create_check_constraint`. In particular, the + following :class:`~sqlalchemy.schema.Column` parameters are + **ignored**: + + * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases + typically do not support an ALTER operation that can add + individual columns one at a time to an existing primary key + constraint, therefore it's less ambiguous to use the + :meth:`.Operations.create_primary_key` method, which assumes no + existing primary key constraint is present. + * :paramref:`~sqlalchemy.schema.Column.unique` - use the + :meth:`.Operations.create_unique_constraint` method + * :paramref:`~sqlalchemy.schema.Column.index` - use the + :meth:`.Operations.create_index` method + + + The provided :class:`~sqlalchemy.schema.Column` object may include a + :class:`~sqlalchemy.schema.ForeignKey` constraint directive, + referencing a remote table name. For this specific type of constraint, + Alembic will automatically emit a second ALTER statement in order to + add the single-column FOREIGN KEY constraint separately:: + + from alembic import op + from sqlalchemy import Column, INTEGER, ForeignKey + + op.add_column( + "organization", + Column("account_id", INTEGER, ForeignKey("accounts.id")), + ) + + The column argument passed to :meth:`.Operations.add_column` is a + :class:`~sqlalchemy.schema.Column` construct, used in the same way it's + used in SQLAlchemy. In particular, values or functions to be indicated + as producing the column's default value on the database side are + specified using the ``server_default`` parameter, and not ``default`` + which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the column add + op.add_column( + "account", + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + :param table_name: String name of the parent table. + :param column: a :class:`sqlalchemy.schema.Column` object + representing the new column. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + +def alter_column( + table_name: str, + column_name: str, + *, + nullable: Optional[bool] = None, + comment: Union[str, Literal[False], None] = False, + server_default: Any = False, + new_column_name: Optional[str] = None, + type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, + existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, + existing_server_default: Union[ + str, bool, Identity, Computed, None + ] = False, + existing_nullable: Optional[bool] = None, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, + **kw: Any, +) -> None: + r"""Issue an "alter column" instruction using the + current migration context. + + Generally, only that aspect of the column which + is being changed, i.e. name, type, nullability, + default, needs to be specified. Multiple changes + can also be specified at once and the backend should + "do the right thing", emitting each change either + separately or together as the backend allows. + + MySQL has special requirements here, since MySQL + cannot ALTER a column without a full specification. + When producing MySQL-compatible migration files, + it is recommended that the ``existing_type``, + ``existing_server_default``, and ``existing_nullable`` + parameters be present, if not being altered. + + Type changes which are against the SQLAlchemy + "schema" types :class:`~sqlalchemy.types.Boolean` + and :class:`~sqlalchemy.types.Enum` may also + add or drop constraints which accompany those + types on backends that don't support them natively. + The ``existing_type`` argument is + used in this case to identify and remove a previous + constraint that was bound to the type object. + + :param table_name: string name of the target table. + :param column_name: string name of the target column, + as it exists before the operation begins. + :param nullable: Optional; specify ``True`` or ``False`` + to alter the column's nullability. + :param server_default: Optional; specify a string + SQL expression, :func:`~sqlalchemy.sql.expression.text`, + or :class:`~sqlalchemy.schema.DefaultClause` to indicate + an alteration to the column's default value. + Set to ``None`` to have the default removed. + :param comment: optional string text of a new comment to add to the + column. + :param new_column_name: Optional; specify a string name here to + indicate the new name within a column rename operation. + :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` + type object to specify a change to the column's type. + For SQLAlchemy types that also indicate a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), + the constraint is also generated. + :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; + currently understood by the MySQL dialect. + :param existing_type: Optional; a + :class:`~sqlalchemy.types.TypeEngine` + type object to specify the previous type. This + is required for all MySQL column alter operations that + don't otherwise specify a new type, as well as for + when nullability is being changed on a SQL Server + column. It is also used if the type is a so-called + SQLAlchemy "schema" type which may define a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, + :class:`~sqlalchemy.types.Enum`), + so that the constraint can be dropped. + :param existing_server_default: Optional; The existing + default value of the column. Required on MySQL if + an existing default is not being changed; else MySQL + removes the default. + :param existing_nullable: Optional; the existing nullability + of the column. Required on MySQL if the existing nullability + is not being changed; else MySQL sets this to NULL. + :param existing_autoincrement: Optional; the existing autoincrement + of the column. Used for MySQL's system of altering a column + that specifies ``AUTO_INCREMENT``. + :param existing_comment: string text of the existing comment on the + column to be maintained. Required on MySQL if the existing comment + on the column is not being changed. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param postgresql_using: String argument which will indicate a + SQL expression to render within the Postgresql-specific USING clause + within ALTER COLUMN. This string is taken directly as raw SQL which + must explicitly include any necessary quoting or escaping of tokens + within the expression. + + """ + +@contextmanager +def batch_alter_table( + table_name: str, + schema: Optional[str] = None, + recreate: Literal["auto", "always", "never"] = "auto", + partial_reordering: Optional[Tuple[Any, ...]] = None, + copy_from: Optional[Table] = None, + table_args: Tuple[Any, ...] = (), + table_kwargs: Mapping[str, Any] = immutabledict({}), + reflect_args: Tuple[Any, ...] = (), + reflect_kwargs: Mapping[str, Any] = immutabledict({}), + naming_convention: Optional[Dict[str, str]] = None, +) -> Iterator[BatchOperations]: + """Invoke a series of per-table migrations in batch. + + Batch mode allows a series of operations specific to a table + to be syntactically grouped together, and allows for alternate + modes of table migration, in particular the "recreate" style of + migration required by SQLite. + + "recreate" style is as follows: + + 1. A new table is created with the new specification, based on the + migration directives within the batch, using a temporary name. + + 2. the data copied from the existing table to the new table. + + 3. the existing table is dropped. + + 4. the new table is renamed to the existing table name. + + The directive by default will only use "recreate" style on the + SQLite backend, and only if directives are present which require + this form, e.g. anything other than ``add_column()``. The batch + operation on other backends will proceed using standard ALTER TABLE + operations. + + The method is used as a context manager, which returns an instance + of :class:`.BatchOperations`; this object is the same as + :class:`.Operations` except that table names and schema names + are omitted. E.g.:: + + with op.batch_alter_table("some_table") as batch_op: + batch_op.add_column(Column("foo", Integer)) + batch_op.drop_column("bar") + + The operations within the context manager are invoked at once + when the context is ended. When run against SQLite, if the + migrations include operations not supported by SQLite's ALTER TABLE, + the entire table will be copied to a new one with the new + specification, moving all data across as well. + + The copy operation by default uses reflection to retrieve the current + structure of the table, and therefore :meth:`.batch_alter_table` + in this mode requires that the migration is run in "online" mode. + The ``copy_from`` parameter may be passed which refers to an existing + :class:`.Table` object, which will bypass this reflection step. + + .. note:: The table copy operation will currently not copy + CHECK constraints, and may not copy UNIQUE constraints that are + unnamed, as is possible on SQLite. See the section + :ref:`sqlite_batch_constraints` for workarounds. + + :param table_name: name of table + :param schema: optional schema name. + :param recreate: under what circumstances the table should be + recreated. At its default of ``"auto"``, the SQLite dialect will + recreate the table if any operations other than ``add_column()``, + ``create_index()``, or ``drop_index()`` are + present. Other options include ``"always"`` and ``"never"``. + :param copy_from: optional :class:`~sqlalchemy.schema.Table` object + that will act as the structure of the table being copied. If omitted, + table reflection is used to retrieve the structure of the table. + + .. seealso:: + + :ref:`batch_offline_mode` + + :paramref:`~.Operations.batch_alter_table.reflect_args` + + :paramref:`~.Operations.batch_alter_table.reflect_kwargs` + + :param reflect_args: a sequence of additional positional arguments that + will be applied to the table structure being reflected / copied; + this may be used to pass column and constraint overrides to the + table that will be reflected, in lieu of passing the whole + :class:`~sqlalchemy.schema.Table` using + :paramref:`~.Operations.batch_alter_table.copy_from`. + :param reflect_kwargs: a dictionary of additional keyword arguments + that will be applied to the table structure being copied; this may be + used to pass additional table and reflection options to the table that + will be reflected, in lieu of passing the whole + :class:`~sqlalchemy.schema.Table` using + :paramref:`~.Operations.batch_alter_table.copy_from`. + :param table_args: a sequence of additional positional arguments that + will be applied to the new :class:`~sqlalchemy.schema.Table` when + created, in addition to those copied from the source table. + This may be used to provide additional constraints such as CHECK + constraints that may not be reflected. + :param table_kwargs: a dictionary of additional keyword arguments + that will be applied to the new :class:`~sqlalchemy.schema.Table` + when created, in addition to those copied from the source table. + This may be used to provide for additional table options that may + not be reflected. + :param naming_convention: a naming convention dictionary of the form + described at :ref:`autogen_naming_conventions` which will be applied + to the :class:`~sqlalchemy.schema.MetaData` during the reflection + process. This is typically required if one wants to drop SQLite + constraints, as these constraints will not have names when + reflected on this backend. Requires SQLAlchemy **0.9.4** or greater. + + .. seealso:: + + :ref:`dropping_sqlite_foreign_keys` + + :param partial_reordering: a list of tuples, each suggesting a desired + ordering of two or more columns in the newly created table. Requires + that :paramref:`.batch_alter_table.recreate` is set to ``"always"``. + Examples, given a table with columns "a", "b", "c", and "d": + + Specify the order of all columns:: + + with op.batch_alter_table( + "some_table", + recreate="always", + partial_reordering=[("c", "d", "a", "b")], + ) as batch_op: + pass + + Ensure "d" appears before "c", and "b", appears before "a":: + + with op.batch_alter_table( + "some_table", + recreate="always", + partial_reordering=[("d", "c"), ("b", "a")], + ) as batch_op: + pass + + The ordering of columns not included in the partial_reordering + set is undefined. Therefore it is best to specify the complete + ordering of all columns for best results. + + .. note:: batch mode requires SQLAlchemy 0.8 or above. + + .. seealso:: + + :ref:`batch_migrations` + + """ + +def bulk_insert( + table: Union[Table, TableClause], + rows: List[Dict[str, Any]], + *, + multiinsert: bool = True, +) -> None: + """Issue a "bulk insert" operation using the current + migration context. + + This provides a means of representing an INSERT of multiple rows + which works equally well in the context of executing on a live + connection as well as that of generating a SQL script. In the + case of a SQL script, the values are rendered inline into the + statement. + + e.g.:: + + from alembic import op + from datetime import date + from sqlalchemy.sql import table, column + from sqlalchemy import String, Integer, Date + + # Create an ad-hoc table to use for the insert statement. + accounts_table = table( + "account", + column("id", Integer), + column("name", String), + column("create_date", Date), + ) + + op.bulk_insert( + accounts_table, + [ + { + "id": 1, + "name": "John Smith", + "create_date": date(2010, 10, 5), + }, + { + "id": 2, + "name": "Ed Williams", + "create_date": date(2007, 5, 27), + }, + { + "id": 3, + "name": "Wendy Jones", + "create_date": date(2008, 8, 15), + }, + ], + ) + + When using --sql mode, some datatypes may not render inline + automatically, such as dates and other special types. When this + issue is present, :meth:`.Operations.inline_literal` may be used:: + + op.bulk_insert( + accounts_table, + [ + { + "id": 1, + "name": "John Smith", + "create_date": op.inline_literal("2010-10-05"), + }, + { + "id": 2, + "name": "Ed Williams", + "create_date": op.inline_literal("2007-05-27"), + }, + { + "id": 3, + "name": "Wendy Jones", + "create_date": op.inline_literal("2008-08-15"), + }, + ], + multiinsert=False, + ) + + When using :meth:`.Operations.inline_literal` in conjunction with + :meth:`.Operations.bulk_insert`, in order for the statement to work + in "online" (e.g. non --sql) mode, the + :paramref:`~.Operations.bulk_insert.multiinsert` + flag should be set to ``False``, which will have the effect of + individual INSERT statements being emitted to the database, each + with a distinct VALUES clause, so that the "inline" values can + still be rendered, rather than attempting to pass the values + as bound parameters. + + :param table: a table object which represents the target of the INSERT. + + :param rows: a list of dictionaries indicating rows. + + :param multiinsert: when at its default of True and --sql mode is not + enabled, the INSERT statement will be executed using + "executemany()" style, where all elements in the list of + dictionaries are passed as bound parameters in a single + list. Setting this to False results in individual INSERT + statements being emitted per parameter set, and is needed + in those cases where non-literal values are present in the + parameter sets. + + """ + +def create_check_constraint( + constraint_name: Optional[str], + table_name: str, + condition: Union[str, ColumnElement[bool], TextClause], + *, + schema: Optional[str] = None, + **kw: Any, +) -> None: + """Issue a "create check constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + from sqlalchemy.sql import column, func + + op.create_check_constraint( + "ck_user_name_len", + "user", + func.len(column("name")) > 5, + ) + + CHECK constraints are usually against a SQL expression, so ad-hoc + table metadata is usually needed. The function will convert the given + arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound + to an anonymous table in order to emit the CREATE statement. + + :param name: Name of the check constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the source table. + :param condition: SQL expression that's the condition of the + constraint. Can be a string or SQLAlchemy expression language + structure. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + +def create_exclude_constraint( + constraint_name: str, table_name: str, *elements: Any, **kw: Any +) -> Optional[Table]: + """Issue an alter to create an EXCLUDE constraint using the + current migration context. + + .. note:: This method is Postgresql specific, and additionally + requires at least SQLAlchemy 1.0. + + e.g.:: + + from alembic import op + + op.create_exclude_constraint( + "user_excl", + "user", + ("period", "&&"), + ("group", "="), + where=("group != 'some group'"), + ) + + Note that the expressions work the same way as that of + the ``ExcludeConstraint`` object itself; if plain strings are + passed, quoting rules must be applied manually. + + :param name: Name of the constraint. + :param table_name: String name of the source table. + :param elements: exclude conditions. + :param where: SQL expression or SQL string with optional WHERE + clause. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. + + """ + +def create_foreign_key( + constraint_name: Optional[str], + source_table: str, + referent_table: str, + local_cols: List[str], + remote_cols: List[str], + *, + onupdate: Optional[str] = None, + ondelete: Optional[str] = None, + deferrable: Optional[bool] = None, + initially: Optional[str] = None, + match: Optional[str] = None, + source_schema: Optional[str] = None, + referent_schema: Optional[str] = None, + **dialect_kw: Any, +) -> None: + """Issue a "create foreign key" instruction using the + current migration context. + + e.g.:: + + from alembic import op + + op.create_foreign_key( + "fk_user_address", + "address", + "user", + ["user_id"], + ["id"], + ) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.ForeignKeyConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param constraint_name: Name of the foreign key constraint. The name + is necessary so that an ALTER statement can be emitted. For setups + that use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param source_table: String name of the source table. + :param referent_table: String name of the destination table. + :param local_cols: a list of string column names in the + source table. + :param remote_cols: a list of string column names in the + remote table. + :param onupdate: Optional string. If set, emit ON UPDATE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param ondelete: Optional string. If set, emit ON DELETE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param deferrable: optional bool. If set, emit DEFERRABLE or NOT + DEFERRABLE when issuing DDL for this constraint. + :param source_schema: Optional schema name of the source table. + :param referent_schema: Optional schema name of the destination table. + + """ + +def create_index( + index_name: Optional[str], + table_name: str, + columns: Sequence[Union[str, TextClause, Function[Any]]], + *, + schema: Optional[str] = None, + unique: bool = False, + if_not_exists: Optional[bool] = None, + **kw: Any, +) -> None: + r"""Issue a "create index" instruction using the current + migration context. + + e.g.:: + + from alembic import op + + op.create_index("ik_test", "t1", ["foo", "bar"]) + + Functional indexes can be produced by using the + :func:`sqlalchemy.sql.expression.text` construct:: + + from alembic import op + from sqlalchemy import text + + op.create_index("ik_test", "t1", [text("lower(foo)")]) + + :param index_name: name of the index. + :param table_name: name of the owning table. + :param columns: a list consisting of string column names and/or + :func:`~sqlalchemy.sql.expression.text` constructs. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param unique: If True, create a unique index. + + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + """ + +def create_primary_key( + constraint_name: Optional[str], + table_name: str, + columns: List[str], + *, + schema: Optional[str] = None, +) -> None: + """Issue a "create primary key" instruction using the current + migration context. + + e.g.:: + + from alembic import op + + op.create_primary_key("pk_my_table", "my_table", ["id", "version"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.PrimaryKeyConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param constraint_name: Name of the primary key constraint. The name + is necessary so that an ALTER statement can be emitted. For setups + that use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions` + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the target table. + :param columns: a list of string column names to be applied to the + primary key constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + +def create_table(table_name: str, *columns: SchemaItem, **kw: Any) -> Table: + r"""Issue a "create table" instruction using the current migration + context. + + This directive receives an argument list similar to that of the + traditional :class:`sqlalchemy.schema.Table` construct, but without the + metadata:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("name", VARCHAR(50), nullable=False), + Column("description", NVARCHAR(200)), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + Note that :meth:`.create_table` accepts + :class:`~sqlalchemy.schema.Column` + constructs directly from the SQLAlchemy library. In particular, + default values to be created on the database side are + specified using the ``server_default`` parameter, and not + ``default`` which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the "timestamp" column + op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + The function also returns a newly created + :class:`~sqlalchemy.schema.Table` object, corresponding to the table + specification given, which is suitable for + immediate SQL operations, in particular + :meth:`.Operations.bulk_insert`:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + account_table = op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("name", VARCHAR(50), nullable=False), + Column("description", NVARCHAR(200)), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + op.bulk_insert( + account_table, + [ + {"name": "A1", "description": "account 1"}, + {"name": "A2", "description": "account 2"}, + ], + ) + + :param table_name: Name of the table + :param \*columns: collection of :class:`~sqlalchemy.schema.Column` + objects within + the table, as well as optional :class:`~sqlalchemy.schema.Constraint` + objects + and :class:`~.sqlalchemy.schema.Index` objects. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + :return: the :class:`~sqlalchemy.schema.Table` object corresponding + to the parameters given. + + """ + +def create_table_comment( + table_name: str, + comment: Optional[str], + *, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, +) -> None: + """Emit a COMMENT ON operation to set the comment for a table. + + :param table_name: string name of the target table. + :param comment: string value of the comment being registered against + the specified table. + :param existing_comment: String value of a comment + already registered on the specified table, used within autogenerate + so that the operation is reversible, but not required for direct + use. + + .. seealso:: + + :meth:`.Operations.drop_table_comment` + + :paramref:`.Operations.alter_column.comment` + + """ + +def create_unique_constraint( + constraint_name: Optional[str], + table_name: str, + columns: Sequence[str], + *, + schema: Optional[str] = None, + **kw: Any, +) -> Any: + """Issue a "create unique constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + op.create_unique_constraint("uq_user_name", "user", ["name"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.UniqueConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param name: Name of the unique constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the source table. + :param columns: a list of string column names in the + source table. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + +def drop_column( + table_name: str, + column_name: str, + *, + schema: Optional[str] = None, + **kw: Any, +) -> None: + """Issue a "drop column" instruction using the current + migration context. + + e.g.:: + + drop_column("organization", "account_id") + + :param table_name: name of table + :param column_name: name of column + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param mssql_drop_check: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the CHECK constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.check_constraints, + then exec's a separate DROP CONSTRAINT for that constraint. + :param mssql_drop_default: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the DEFAULT constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.default_constraints, + then exec's a separate DROP CONSTRAINT for that default. + :param mssql_drop_foreign_key: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop a single FOREIGN KEY constraint on the column using a + SQL-script-compatible + block that selects into a @variable from + sys.foreign_keys/sys.foreign_key_columns, + then exec's a separate DROP CONSTRAINT for that default. Only + works if the column has exactly one FK constraint which refers to + it, at the moment. + + """ + +def drop_constraint( + constraint_name: str, + table_name: str, + type_: Optional[str] = None, + *, + schema: Optional[str] = None, +) -> None: + r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. + + :param constraint_name: name of the constraint. + :param table_name: table name. + :param type\_: optional, required on MySQL. can be + 'foreignkey', 'primary', 'unique', or 'check'. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + +def drop_index( + index_name: str, + table_name: Optional[str] = None, + *, + schema: Optional[str] = None, + if_exists: Optional[bool] = None, + **kw: Any, +) -> None: + r"""Issue a "drop index" instruction using the current + migration context. + + e.g.:: + + drop_index("accounts") + + :param index_name: name of the index. + :param table_name: name of the owning table. Some + backends such as Microsoft SQL Server require this. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + """ + +def drop_table( + table_name: str, *, schema: Optional[str] = None, **kw: Any +) -> None: + r"""Issue a "drop table" instruction using the current + migration context. + + + e.g.:: + + drop_table("accounts") + + :param table_name: Name of the table + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + """ + +def drop_table_comment( + table_name: str, + *, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, +) -> None: + """Issue a "drop table comment" operation to + remove an existing comment set on a table. + + :param table_name: string name of the target table. + :param existing_comment: An optional string value of a comment already + registered on the specified table. + + .. seealso:: + + :meth:`.Operations.create_table_comment` + + :paramref:`.Operations.alter_column.comment` + + """ + +def execute( + sqltext: Union[Executable, str], + *, + execution_options: Optional[dict[str, Any]] = None, +) -> None: + r"""Execute the given SQL using the current migration context. + + The given SQL can be a plain string, e.g.:: + + op.execute("INSERT INTO table (foo) VALUES ('some value')") + + Or it can be any kind of Core SQL Expression construct, such as + below where we use an update construct:: + + from sqlalchemy.sql import table, column + from sqlalchemy import String + from alembic import op + + account = table("account", column("name", String)) + op.execute( + account.update() + .where(account.c.name == op.inline_literal("account 1")) + .values({"name": op.inline_literal("account 2")}) + ) + + Above, we made use of the SQLAlchemy + :func:`sqlalchemy.sql.expression.table` and + :func:`sqlalchemy.sql.expression.column` constructs to make a brief, + ad-hoc table construct just for our UPDATE statement. A full + :class:`~sqlalchemy.schema.Table` construct of course works perfectly + fine as well, though note it's a recommended practice to at least + ensure the definition of a table is self-contained within the migration + script, rather than imported from a module that may break compatibility + with older migrations. + + In a SQL script context, the statement is emitted directly to the + output stream. There is *no* return result, however, as this + function is oriented towards generating a change script + that can run in "offline" mode. Additionally, parameterized + statements are discouraged here, as they *will not work* in offline + mode. Above, we use :meth:`.inline_literal` where parameters are + to be used. + + For full interaction with a connected database where parameters can + also be used normally, use the "bind" available from the context:: + + from alembic import op + + connection = op.get_bind() + + connection.execute( + account.update() + .where(account.c.name == "account 1") + .values({"name": "account 2"}) + ) + + Additionally, when passing the statement as a plain string, it is first + coerced into a :func:`sqlalchemy.sql.expression.text` construct + before being passed along. In the less likely case that the + literal SQL string contains a colon, it must be escaped with a + backslash, as:: + + op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')") + + + :param sqltext: Any legal SQLAlchemy expression, including: + + * a string + * a :func:`sqlalchemy.sql.expression.text` construct. + * a :func:`sqlalchemy.sql.expression.insert` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. + * Any "executable" described in SQLAlchemy Core documentation, + noting that no result set is returned. + + .. note:: when passing a plain string, the statement is coerced into + a :func:`sqlalchemy.sql.expression.text` construct. This construct + considers symbols with colons, e.g. ``:foo`` to be bound parameters. + To avoid this, ensure that colon symbols are escaped, e.g. + ``\:foo``. + + :param execution_options: Optional dictionary of + execution options, will be passed to + :meth:`sqlalchemy.engine.Connection.execution_options`. + """ + +def f(name: str) -> conv: + """Indicate a string name that has already had a naming convention + applied to it. + + This feature combines with the SQLAlchemy ``naming_convention`` feature + to disambiguate constraint names that have already had naming + conventions applied to them, versus those that have not. This is + necessary in the case that the ``"%(constraint_name)s"`` token + is used within a naming convention, so that it can be identified + that this particular name should remain fixed. + + If the :meth:`.Operations.f` is used on a constraint, the naming + convention will not take effect:: + + op.add_column("t", "x", Boolean(name=op.f("ck_bool_t_x"))) + + Above, the CHECK constraint generated will have the name + ``ck_bool_t_x`` regardless of whether or not a naming convention is + in use. + + Alternatively, if a naming convention is in use, and 'f' is not used, + names will be converted along conventions. If the ``target_metadata`` + contains the naming convention + ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the + output of the following: + + op.add_column("t", "x", Boolean(name="x")) + + will be:: + + CONSTRAINT ck_bool_t_x CHECK (x in (1, 0))) + + The function is rendered in the output of autogenerate when + a particular constraint name is already converted. + + """ + +def get_bind() -> Connection: + """Return the current 'bind'. + + Under normal circumstances, this is the + :class:`~sqlalchemy.engine.Connection` currently being used + to emit SQL to the database. + + In a SQL script context, this value is ``None``. [TODO: verify this] + + """ + +def get_context() -> MigrationContext: + """Return the :class:`.MigrationContext` object that's + currently in use. + + """ + +def implementation_for(op_cls: Any) -> Callable[[_C], _C]: + """Register an implementation for a given :class:`.MigrateOperation`. + + This is part of the operation extensibility API. + + .. seealso:: + + :ref:`operation_plugins` - example of use + + """ + +def inline_literal( + value: Union[str, int], type_: Optional[TypeEngine[Any]] = None +) -> _literal_bindparam: + r"""Produce an 'inline literal' expression, suitable for + using in an INSERT, UPDATE, or DELETE statement. + + When using Alembic in "offline" mode, CRUD operations + aren't compatible with SQLAlchemy's default behavior surrounding + literal values, + which is that they are converted into bound values and passed + separately into the ``execute()`` method of the DBAPI cursor. + An offline SQL + script needs to have these rendered inline. While it should + always be noted that inline literal values are an **enormous** + security hole in an application that handles untrusted input, + a schema migration is not run in this context, so + literals are safe to render inline, with the caveat that + advanced types like dates may not be supported directly + by SQLAlchemy. + + See :meth:`.Operations.execute` for an example usage of + :meth:`.Operations.inline_literal`. + + The environment can also be configured to attempt to render + "literal" values inline automatically, for those simple types + that are supported by the dialect; see + :paramref:`.EnvironmentContext.configure.literal_binds` for this + more recently added feature. + + :param value: The value to render. Strings, integers, and simple + numerics should be supported. Other types like boolean, + dates, etc. may or may not be supported yet by various + backends. + :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine` + subclass stating the type of this value. In SQLAlchemy + expressions, this is usually derived automatically + from the Python type of the value itself, as well as + based on the context in which the value is used. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.literal_binds` + + """ + +@overload +def invoke(operation: CreateTableOp) -> Table: ... +@overload +def invoke( + operation: Union[ + AddConstraintOp, + DropConstraintOp, + CreateIndexOp, + DropIndexOp, + AddColumnOp, + AlterColumnOp, + AlterTableOp, + CreateTableCommentOp, + DropTableCommentOp, + DropColumnOp, + BulkInsertOp, + DropTableOp, + ExecuteSQLOp, + ] +) -> None: ... +@overload +def invoke(operation: MigrateOperation) -> Any: + """Given a :class:`.MigrateOperation`, invoke it in terms of + this :class:`.Operations` instance. + + """ + +def register_operation( + name: str, sourcename: Optional[str] = None +) -> Callable[[Type[_T]], Type[_T]]: + """Register a new operation for this class. + + This method is normally used to add new operations + to the :class:`.Operations` class, and possibly the + :class:`.BatchOperations` class as well. All Alembic migration + operations are implemented via this system, however the system + is also available as a public API to facilitate adding custom + operations. + + .. seealso:: + + :ref:`operation_plugins` + + + """ + +def rename_table( + old_table_name: str, new_table_name: str, *, schema: Optional[str] = None +) -> None: + """Emit an ALTER TABLE to rename a table. + + :param old_table_name: old name. + :param new_table_name: new name. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + +def run_async( + async_function: Callable[..., Awaitable[_T]], *args: Any, **kw_args: Any +) -> _T: + """Invoke the given asynchronous callable, passing an asynchronous + :class:`~sqlalchemy.ext.asyncio.AsyncConnection` as the first + argument. + + This method allows calling async functions from within the + synchronous ``upgrade()`` or ``downgrade()`` alembic migration + method. + + The async connection passed to the callable shares the same + transaction as the connection running in the migration context. + + Any additional arg or kw_arg passed to this function are passed + to the provided async function. + + .. versionadded: 1.11 + + .. note:: + + This method can be called only when alembic is called using + an async dialect. + """ diff --git a/venv/lib/python3.12/site-packages/alembic/operations/__init__.py b/venv/lib/python3.12/site-packages/alembic/operations/__init__.py new file mode 100644 index 0000000..26197cb --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/operations/__init__.py @@ -0,0 +1,15 @@ +from . import toimpl +from .base import AbstractOperations +from .base import BatchOperations +from .base import Operations +from .ops import MigrateOperation +from .ops import MigrationScript + + +__all__ = [ + "AbstractOperations", + "Operations", + "BatchOperations", + "MigrateOperation", + "MigrationScript", +] diff --git a/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..41286e7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..bee6c47 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc new file mode 100644 index 0000000..b81e337 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc new file mode 100644 index 0000000..2499fd6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc new file mode 100644 index 0000000..c4fa3bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc new file mode 100644 index 0000000..bc8cf4e Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/operations/base.py b/venv/lib/python3.12/site-packages/alembic/operations/base.py new file mode 100644 index 0000000..bafe441 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/operations/base.py @@ -0,0 +1,1893 @@ +# mypy: allow-untyped-calls + +from __future__ import annotations + +from contextlib import contextmanager +import re +import textwrap +from typing import Any +from typing import Awaitable +from typing import Callable +from typing import Dict +from typing import Iterator +from typing import List # noqa +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence # noqa +from typing import Tuple +from typing import Type # noqa +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy.sql.elements import conv + +from . import batch +from . import schemaobj +from .. import util +from ..util import sqla_compat +from ..util.compat import formatannotation_fwdref +from ..util.compat import inspect_formatargspec +from ..util.compat import inspect_getfullargspec +from ..util.sqla_compat import _literal_bindparam + + +if TYPE_CHECKING: + from typing import Literal + + from sqlalchemy import Table + from sqlalchemy.engine import Connection + from sqlalchemy.sql import Executable + from sqlalchemy.sql.expression import ColumnElement + from sqlalchemy.sql.expression import TableClause + from sqlalchemy.sql.expression import TextClause + from sqlalchemy.sql.functions import Function + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Computed + from sqlalchemy.sql.schema import Identity + from sqlalchemy.sql.schema import SchemaItem + from sqlalchemy.types import TypeEngine + + from .batch import BatchOperationsImpl + from .ops import AddColumnOp + from .ops import AddConstraintOp + from .ops import AlterColumnOp + from .ops import AlterTableOp + from .ops import BulkInsertOp + from .ops import CreateIndexOp + from .ops import CreateTableCommentOp + from .ops import CreateTableOp + from .ops import DropColumnOp + from .ops import DropConstraintOp + from .ops import DropIndexOp + from .ops import DropTableCommentOp + from .ops import DropTableOp + from .ops import ExecuteSQLOp + from .ops import MigrateOperation + from ..ddl import DefaultImpl + from ..runtime.migration import MigrationContext +__all__ = ("Operations", "BatchOperations") +_T = TypeVar("_T") + +_C = TypeVar("_C", bound=Callable[..., Any]) + + +class AbstractOperations(util.ModuleClsProxy): + """Base class for Operations and BatchOperations. + + .. versionadded:: 1.11.0 + + """ + + impl: Union[DefaultImpl, BatchOperationsImpl] + _to_impl = util.Dispatcher() + + def __init__( + self, + migration_context: MigrationContext, + impl: Optional[BatchOperationsImpl] = None, + ) -> None: + """Construct a new :class:`.Operations` + + :param migration_context: a :class:`.MigrationContext` + instance. + + """ + self.migration_context = migration_context + if impl is None: + self.impl = migration_context.impl + else: + self.impl = impl + + self.schema_obj = schemaobj.SchemaObjects(migration_context) + + @classmethod + def register_operation( + cls, name: str, sourcename: Optional[str] = None + ) -> Callable[[Type[_T]], Type[_T]]: + """Register a new operation for this class. + + This method is normally used to add new operations + to the :class:`.Operations` class, and possibly the + :class:`.BatchOperations` class as well. All Alembic migration + operations are implemented via this system, however the system + is also available as a public API to facilitate adding custom + operations. + + .. seealso:: + + :ref:`operation_plugins` + + + """ + + def register(op_cls: Type[_T]) -> Type[_T]: + if sourcename is None: + fn = getattr(op_cls, name) + source_name = fn.__name__ + else: + fn = getattr(op_cls, sourcename) + source_name = fn.__name__ + + spec = inspect_getfullargspec(fn) + + name_args = spec[0] + assert name_args[0:2] == ["cls", "operations"] + + name_args[0:2] = ["self"] + + args = inspect_formatargspec( + *spec, formatannotation=formatannotation_fwdref + ) + num_defaults = len(spec[3]) if spec[3] else 0 + + defaulted_vals: Tuple[Any, ...] + + if num_defaults: + defaulted_vals = tuple(name_args[0 - num_defaults :]) + else: + defaulted_vals = () + + defaulted_vals += tuple(spec[4]) + # here, we are using formatargspec in a different way in order + # to get a string that will re-apply incoming arguments to a new + # function call + + apply_kw = inspect_formatargspec( + name_args + spec[4], + spec[1], + spec[2], + defaulted_vals, + formatvalue=lambda x: "=" + x, + formatannotation=formatannotation_fwdref, + ) + + args = re.sub( + r'[_]?ForwardRef\(([\'"].+?[\'"])\)', + lambda m: m.group(1), + args, + ) + + func_text = textwrap.dedent( + """\ + def %(name)s%(args)s: + %(doc)r + return op_cls.%(source_name)s%(apply_kw)s + """ + % { + "name": name, + "source_name": source_name, + "args": args, + "apply_kw": apply_kw, + "doc": fn.__doc__, + } + ) + + globals_ = dict(globals()) + globals_.update({"op_cls": op_cls}) + lcl: Dict[str, Any] = {} + + exec(func_text, globals_, lcl) + setattr(cls, name, lcl[name]) + fn.__func__.__doc__ = ( + "This method is proxied on " + "the :class:`.%s` class, via the :meth:`.%s.%s` method." + % (cls.__name__, cls.__name__, name) + ) + if hasattr(fn, "_legacy_translations"): + lcl[name]._legacy_translations = fn._legacy_translations + return op_cls + + return register + + @classmethod + def implementation_for(cls, op_cls: Any) -> Callable[[_C], _C]: + """Register an implementation for a given :class:`.MigrateOperation`. + + This is part of the operation extensibility API. + + .. seealso:: + + :ref:`operation_plugins` - example of use + + """ + + def decorate(fn: _C) -> _C: + cls._to_impl.dispatch_for(op_cls)(fn) + return fn + + return decorate + + @classmethod + @contextmanager + def context( + cls, migration_context: MigrationContext + ) -> Iterator[Operations]: + op = Operations(migration_context) + op._install_proxy() + yield op + op._remove_proxy() + + @contextmanager + def batch_alter_table( + self, + table_name: str, + schema: Optional[str] = None, + recreate: Literal["auto", "always", "never"] = "auto", + partial_reordering: Optional[Tuple[Any, ...]] = None, + copy_from: Optional[Table] = None, + table_args: Tuple[Any, ...] = (), + table_kwargs: Mapping[str, Any] = util.immutabledict(), + reflect_args: Tuple[Any, ...] = (), + reflect_kwargs: Mapping[str, Any] = util.immutabledict(), + naming_convention: Optional[Dict[str, str]] = None, + ) -> Iterator[BatchOperations]: + """Invoke a series of per-table migrations in batch. + + Batch mode allows a series of operations specific to a table + to be syntactically grouped together, and allows for alternate + modes of table migration, in particular the "recreate" style of + migration required by SQLite. + + "recreate" style is as follows: + + 1. A new table is created with the new specification, based on the + migration directives within the batch, using a temporary name. + + 2. the data copied from the existing table to the new table. + + 3. the existing table is dropped. + + 4. the new table is renamed to the existing table name. + + The directive by default will only use "recreate" style on the + SQLite backend, and only if directives are present which require + this form, e.g. anything other than ``add_column()``. The batch + operation on other backends will proceed using standard ALTER TABLE + operations. + + The method is used as a context manager, which returns an instance + of :class:`.BatchOperations`; this object is the same as + :class:`.Operations` except that table names and schema names + are omitted. E.g.:: + + with op.batch_alter_table("some_table") as batch_op: + batch_op.add_column(Column("foo", Integer)) + batch_op.drop_column("bar") + + The operations within the context manager are invoked at once + when the context is ended. When run against SQLite, if the + migrations include operations not supported by SQLite's ALTER TABLE, + the entire table will be copied to a new one with the new + specification, moving all data across as well. + + The copy operation by default uses reflection to retrieve the current + structure of the table, and therefore :meth:`.batch_alter_table` + in this mode requires that the migration is run in "online" mode. + The ``copy_from`` parameter may be passed which refers to an existing + :class:`.Table` object, which will bypass this reflection step. + + .. note:: The table copy operation will currently not copy + CHECK constraints, and may not copy UNIQUE constraints that are + unnamed, as is possible on SQLite. See the section + :ref:`sqlite_batch_constraints` for workarounds. + + :param table_name: name of table + :param schema: optional schema name. + :param recreate: under what circumstances the table should be + recreated. At its default of ``"auto"``, the SQLite dialect will + recreate the table if any operations other than ``add_column()``, + ``create_index()``, or ``drop_index()`` are + present. Other options include ``"always"`` and ``"never"``. + :param copy_from: optional :class:`~sqlalchemy.schema.Table` object + that will act as the structure of the table being copied. If omitted, + table reflection is used to retrieve the structure of the table. + + .. seealso:: + + :ref:`batch_offline_mode` + + :paramref:`~.Operations.batch_alter_table.reflect_args` + + :paramref:`~.Operations.batch_alter_table.reflect_kwargs` + + :param reflect_args: a sequence of additional positional arguments that + will be applied to the table structure being reflected / copied; + this may be used to pass column and constraint overrides to the + table that will be reflected, in lieu of passing the whole + :class:`~sqlalchemy.schema.Table` using + :paramref:`~.Operations.batch_alter_table.copy_from`. + :param reflect_kwargs: a dictionary of additional keyword arguments + that will be applied to the table structure being copied; this may be + used to pass additional table and reflection options to the table that + will be reflected, in lieu of passing the whole + :class:`~sqlalchemy.schema.Table` using + :paramref:`~.Operations.batch_alter_table.copy_from`. + :param table_args: a sequence of additional positional arguments that + will be applied to the new :class:`~sqlalchemy.schema.Table` when + created, in addition to those copied from the source table. + This may be used to provide additional constraints such as CHECK + constraints that may not be reflected. + :param table_kwargs: a dictionary of additional keyword arguments + that will be applied to the new :class:`~sqlalchemy.schema.Table` + when created, in addition to those copied from the source table. + This may be used to provide for additional table options that may + not be reflected. + :param naming_convention: a naming convention dictionary of the form + described at :ref:`autogen_naming_conventions` which will be applied + to the :class:`~sqlalchemy.schema.MetaData` during the reflection + process. This is typically required if one wants to drop SQLite + constraints, as these constraints will not have names when + reflected on this backend. Requires SQLAlchemy **0.9.4** or greater. + + .. seealso:: + + :ref:`dropping_sqlite_foreign_keys` + + :param partial_reordering: a list of tuples, each suggesting a desired + ordering of two or more columns in the newly created table. Requires + that :paramref:`.batch_alter_table.recreate` is set to ``"always"``. + Examples, given a table with columns "a", "b", "c", and "d": + + Specify the order of all columns:: + + with op.batch_alter_table( + "some_table", + recreate="always", + partial_reordering=[("c", "d", "a", "b")], + ) as batch_op: + pass + + Ensure "d" appears before "c", and "b", appears before "a":: + + with op.batch_alter_table( + "some_table", + recreate="always", + partial_reordering=[("d", "c"), ("b", "a")], + ) as batch_op: + pass + + The ordering of columns not included in the partial_reordering + set is undefined. Therefore it is best to specify the complete + ordering of all columns for best results. + + .. note:: batch mode requires SQLAlchemy 0.8 or above. + + .. seealso:: + + :ref:`batch_migrations` + + """ + impl = batch.BatchOperationsImpl( + self, + table_name, + schema, + recreate, + copy_from, + table_args, + table_kwargs, + reflect_args, + reflect_kwargs, + naming_convention, + partial_reordering, + ) + batch_op = BatchOperations(self.migration_context, impl=impl) + yield batch_op + impl.flush() + + def get_context(self) -> MigrationContext: + """Return the :class:`.MigrationContext` object that's + currently in use. + + """ + + return self.migration_context + + @overload + def invoke(self, operation: CreateTableOp) -> Table: + ... + + @overload + def invoke( + self, + operation: Union[ + AddConstraintOp, + DropConstraintOp, + CreateIndexOp, + DropIndexOp, + AddColumnOp, + AlterColumnOp, + AlterTableOp, + CreateTableCommentOp, + DropTableCommentOp, + DropColumnOp, + BulkInsertOp, + DropTableOp, + ExecuteSQLOp, + ], + ) -> None: + ... + + @overload + def invoke(self, operation: MigrateOperation) -> Any: + ... + + def invoke(self, operation: MigrateOperation) -> Any: + """Given a :class:`.MigrateOperation`, invoke it in terms of + this :class:`.Operations` instance. + + """ + fn = self._to_impl.dispatch( + operation, self.migration_context.impl.__dialect__ + ) + return fn(self, operation) + + def f(self, name: str) -> conv: + """Indicate a string name that has already had a naming convention + applied to it. + + This feature combines with the SQLAlchemy ``naming_convention`` feature + to disambiguate constraint names that have already had naming + conventions applied to them, versus those that have not. This is + necessary in the case that the ``"%(constraint_name)s"`` token + is used within a naming convention, so that it can be identified + that this particular name should remain fixed. + + If the :meth:`.Operations.f` is used on a constraint, the naming + convention will not take effect:: + + op.add_column("t", "x", Boolean(name=op.f("ck_bool_t_x"))) + + Above, the CHECK constraint generated will have the name + ``ck_bool_t_x`` regardless of whether or not a naming convention is + in use. + + Alternatively, if a naming convention is in use, and 'f' is not used, + names will be converted along conventions. If the ``target_metadata`` + contains the naming convention + ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the + output of the following: + + op.add_column("t", "x", Boolean(name="x")) + + will be:: + + CONSTRAINT ck_bool_t_x CHECK (x in (1, 0))) + + The function is rendered in the output of autogenerate when + a particular constraint name is already converted. + + """ + return conv(name) + + def inline_literal( + self, value: Union[str, int], type_: Optional[TypeEngine[Any]] = None + ) -> _literal_bindparam: + r"""Produce an 'inline literal' expression, suitable for + using in an INSERT, UPDATE, or DELETE statement. + + When using Alembic in "offline" mode, CRUD operations + aren't compatible with SQLAlchemy's default behavior surrounding + literal values, + which is that they are converted into bound values and passed + separately into the ``execute()`` method of the DBAPI cursor. + An offline SQL + script needs to have these rendered inline. While it should + always be noted that inline literal values are an **enormous** + security hole in an application that handles untrusted input, + a schema migration is not run in this context, so + literals are safe to render inline, with the caveat that + advanced types like dates may not be supported directly + by SQLAlchemy. + + See :meth:`.Operations.execute` for an example usage of + :meth:`.Operations.inline_literal`. + + The environment can also be configured to attempt to render + "literal" values inline automatically, for those simple types + that are supported by the dialect; see + :paramref:`.EnvironmentContext.configure.literal_binds` for this + more recently added feature. + + :param value: The value to render. Strings, integers, and simple + numerics should be supported. Other types like boolean, + dates, etc. may or may not be supported yet by various + backends. + :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine` + subclass stating the type of this value. In SQLAlchemy + expressions, this is usually derived automatically + from the Python type of the value itself, as well as + based on the context in which the value is used. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.literal_binds` + + """ + return sqla_compat._literal_bindparam(None, value, type_=type_) + + def get_bind(self) -> Connection: + """Return the current 'bind'. + + Under normal circumstances, this is the + :class:`~sqlalchemy.engine.Connection` currently being used + to emit SQL to the database. + + In a SQL script context, this value is ``None``. [TODO: verify this] + + """ + return self.migration_context.impl.bind # type: ignore[return-value] + + def run_async( + self, + async_function: Callable[..., Awaitable[_T]], + *args: Any, + **kw_args: Any, + ) -> _T: + """Invoke the given asynchronous callable, passing an asynchronous + :class:`~sqlalchemy.ext.asyncio.AsyncConnection` as the first + argument. + + This method allows calling async functions from within the + synchronous ``upgrade()`` or ``downgrade()`` alembic migration + method. + + The async connection passed to the callable shares the same + transaction as the connection running in the migration context. + + Any additional arg or kw_arg passed to this function are passed + to the provided async function. + + .. versionadded: 1.11 + + .. note:: + + This method can be called only when alembic is called using + an async dialect. + """ + if not sqla_compat.sqla_14_18: + raise NotImplementedError("SQLAlchemy 1.4.18+ required") + sync_conn = self.get_bind() + if sync_conn is None: + raise NotImplementedError("Cannot call run_async in SQL mode") + if not sync_conn.dialect.is_async: + raise ValueError("Cannot call run_async with a sync engine") + from sqlalchemy.ext.asyncio import AsyncConnection + from sqlalchemy.util import await_only + + async_conn = AsyncConnection._retrieve_proxy_for_target(sync_conn) + return await_only(async_function(async_conn, *args, **kw_args)) + + +class Operations(AbstractOperations): + """Define high level migration operations. + + Each operation corresponds to some schema migration operation, + executed against a particular :class:`.MigrationContext` + which in turn represents connectivity to a database, + or a file output stream. + + While :class:`.Operations` is normally configured as + part of the :meth:`.EnvironmentContext.run_migrations` + method called from an ``env.py`` script, a standalone + :class:`.Operations` instance can be + made for use cases external to regular Alembic + migrations by passing in a :class:`.MigrationContext`:: + + from alembic.migration import MigrationContext + from alembic.operations import Operations + + conn = myengine.connect() + ctx = MigrationContext.configure(conn) + op = Operations(ctx) + + op.alter_column("t", "c", nullable=True) + + Note that as of 0.8, most of the methods on this class are produced + dynamically using the :meth:`.Operations.register_operation` + method. + + """ + + if TYPE_CHECKING: + # START STUB FUNCTIONS: op_cls + # ### the following stubs are generated by tools/write_pyi.py ### + # ### do not edit ### + + def add_column( + self, + table_name: str, + column: Column[Any], + *, + schema: Optional[str] = None, + ) -> None: + """Issue an "add column" instruction using the current + migration context. + + e.g.:: + + from alembic import op + from sqlalchemy import Column, String + + op.add_column("organization", Column("name", String())) + + The :meth:`.Operations.add_column` method typically corresponds + to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope + of this command, the column's name, datatype, nullability, + and optional server-generated defaults may be indicated. + + .. note:: + + With the exception of NOT NULL constraints or single-column FOREIGN + KEY constraints, other kinds of constraints such as PRIMARY KEY, + UNIQUE or CHECK constraints **cannot** be generated using this + method; for these constraints, refer to operations such as + :meth:`.Operations.create_primary_key` and + :meth:`.Operations.create_check_constraint`. In particular, the + following :class:`~sqlalchemy.schema.Column` parameters are + **ignored**: + + * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases + typically do not support an ALTER operation that can add + individual columns one at a time to an existing primary key + constraint, therefore it's less ambiguous to use the + :meth:`.Operations.create_primary_key` method, which assumes no + existing primary key constraint is present. + * :paramref:`~sqlalchemy.schema.Column.unique` - use the + :meth:`.Operations.create_unique_constraint` method + * :paramref:`~sqlalchemy.schema.Column.index` - use the + :meth:`.Operations.create_index` method + + + The provided :class:`~sqlalchemy.schema.Column` object may include a + :class:`~sqlalchemy.schema.ForeignKey` constraint directive, + referencing a remote table name. For this specific type of constraint, + Alembic will automatically emit a second ALTER statement in order to + add the single-column FOREIGN KEY constraint separately:: + + from alembic import op + from sqlalchemy import Column, INTEGER, ForeignKey + + op.add_column( + "organization", + Column("account_id", INTEGER, ForeignKey("accounts.id")), + ) + + The column argument passed to :meth:`.Operations.add_column` is a + :class:`~sqlalchemy.schema.Column` construct, used in the same way it's + used in SQLAlchemy. In particular, values or functions to be indicated + as producing the column's default value on the database side are + specified using the ``server_default`` parameter, and not ``default`` + which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the column add + op.add_column( + "account", + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + :param table_name: String name of the parent table. + :param column: a :class:`sqlalchemy.schema.Column` object + representing the new column. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ # noqa: E501 + ... + + def alter_column( + self, + table_name: str, + column_name: str, + *, + nullable: Optional[bool] = None, + comment: Union[str, Literal[False], None] = False, + server_default: Any = False, + new_column_name: Optional[str] = None, + type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, + existing_type: Union[ + TypeEngine[Any], Type[TypeEngine[Any]], None + ] = None, + existing_server_default: Union[ + str, bool, Identity, Computed, None + ] = False, + existing_nullable: Optional[bool] = None, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + r"""Issue an "alter column" instruction using the + current migration context. + + Generally, only that aspect of the column which + is being changed, i.e. name, type, nullability, + default, needs to be specified. Multiple changes + can also be specified at once and the backend should + "do the right thing", emitting each change either + separately or together as the backend allows. + + MySQL has special requirements here, since MySQL + cannot ALTER a column without a full specification. + When producing MySQL-compatible migration files, + it is recommended that the ``existing_type``, + ``existing_server_default``, and ``existing_nullable`` + parameters be present, if not being altered. + + Type changes which are against the SQLAlchemy + "schema" types :class:`~sqlalchemy.types.Boolean` + and :class:`~sqlalchemy.types.Enum` may also + add or drop constraints which accompany those + types on backends that don't support them natively. + The ``existing_type`` argument is + used in this case to identify and remove a previous + constraint that was bound to the type object. + + :param table_name: string name of the target table. + :param column_name: string name of the target column, + as it exists before the operation begins. + :param nullable: Optional; specify ``True`` or ``False`` + to alter the column's nullability. + :param server_default: Optional; specify a string + SQL expression, :func:`~sqlalchemy.sql.expression.text`, + or :class:`~sqlalchemy.schema.DefaultClause` to indicate + an alteration to the column's default value. + Set to ``None`` to have the default removed. + :param comment: optional string text of a new comment to add to the + column. + :param new_column_name: Optional; specify a string name here to + indicate the new name within a column rename operation. + :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` + type object to specify a change to the column's type. + For SQLAlchemy types that also indicate a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), + the constraint is also generated. + :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; + currently understood by the MySQL dialect. + :param existing_type: Optional; a + :class:`~sqlalchemy.types.TypeEngine` + type object to specify the previous type. This + is required for all MySQL column alter operations that + don't otherwise specify a new type, as well as for + when nullability is being changed on a SQL Server + column. It is also used if the type is a so-called + SQLAlchemy "schema" type which may define a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, + :class:`~sqlalchemy.types.Enum`), + so that the constraint can be dropped. + :param existing_server_default: Optional; The existing + default value of the column. Required on MySQL if + an existing default is not being changed; else MySQL + removes the default. + :param existing_nullable: Optional; the existing nullability + of the column. Required on MySQL if the existing nullability + is not being changed; else MySQL sets this to NULL. + :param existing_autoincrement: Optional; the existing autoincrement + of the column. Used for MySQL's system of altering a column + that specifies ``AUTO_INCREMENT``. + :param existing_comment: string text of the existing comment on the + column to be maintained. Required on MySQL if the existing comment + on the column is not being changed. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param postgresql_using: String argument which will indicate a + SQL expression to render within the Postgresql-specific USING clause + within ALTER COLUMN. This string is taken directly as raw SQL which + must explicitly include any necessary quoting or escaping of tokens + within the expression. + + """ # noqa: E501 + ... + + def bulk_insert( + self, + table: Union[Table, TableClause], + rows: List[Dict[str, Any]], + *, + multiinsert: bool = True, + ) -> None: + """Issue a "bulk insert" operation using the current + migration context. + + This provides a means of representing an INSERT of multiple rows + which works equally well in the context of executing on a live + connection as well as that of generating a SQL script. In the + case of a SQL script, the values are rendered inline into the + statement. + + e.g.:: + + from alembic import op + from datetime import date + from sqlalchemy.sql import table, column + from sqlalchemy import String, Integer, Date + + # Create an ad-hoc table to use for the insert statement. + accounts_table = table( + "account", + column("id", Integer), + column("name", String), + column("create_date", Date), + ) + + op.bulk_insert( + accounts_table, + [ + { + "id": 1, + "name": "John Smith", + "create_date": date(2010, 10, 5), + }, + { + "id": 2, + "name": "Ed Williams", + "create_date": date(2007, 5, 27), + }, + { + "id": 3, + "name": "Wendy Jones", + "create_date": date(2008, 8, 15), + }, + ], + ) + + When using --sql mode, some datatypes may not render inline + automatically, such as dates and other special types. When this + issue is present, :meth:`.Operations.inline_literal` may be used:: + + op.bulk_insert( + accounts_table, + [ + { + "id": 1, + "name": "John Smith", + "create_date": op.inline_literal("2010-10-05"), + }, + { + "id": 2, + "name": "Ed Williams", + "create_date": op.inline_literal("2007-05-27"), + }, + { + "id": 3, + "name": "Wendy Jones", + "create_date": op.inline_literal("2008-08-15"), + }, + ], + multiinsert=False, + ) + + When using :meth:`.Operations.inline_literal` in conjunction with + :meth:`.Operations.bulk_insert`, in order for the statement to work + in "online" (e.g. non --sql) mode, the + :paramref:`~.Operations.bulk_insert.multiinsert` + flag should be set to ``False``, which will have the effect of + individual INSERT statements being emitted to the database, each + with a distinct VALUES clause, so that the "inline" values can + still be rendered, rather than attempting to pass the values + as bound parameters. + + :param table: a table object which represents the target of the INSERT. + + :param rows: a list of dictionaries indicating rows. + + :param multiinsert: when at its default of True and --sql mode is not + enabled, the INSERT statement will be executed using + "executemany()" style, where all elements in the list of + dictionaries are passed as bound parameters in a single + list. Setting this to False results in individual INSERT + statements being emitted per parameter set, and is needed + in those cases where non-literal values are present in the + parameter sets. + + """ # noqa: E501 + ... + + def create_check_constraint( + self, + constraint_name: Optional[str], + table_name: str, + condition: Union[str, ColumnElement[bool], TextClause], + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + """Issue a "create check constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + from sqlalchemy.sql import column, func + + op.create_check_constraint( + "ck_user_name_len", + "user", + func.len(column("name")) > 5, + ) + + CHECK constraints are usually against a SQL expression, so ad-hoc + table metadata is usually needed. The function will convert the given + arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound + to an anonymous table in order to emit the CREATE statement. + + :param name: Name of the check constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the source table. + :param condition: SQL expression that's the condition of the + constraint. Can be a string or SQLAlchemy expression language + structure. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ # noqa: E501 + ... + + def create_exclude_constraint( + self, + constraint_name: str, + table_name: str, + *elements: Any, + **kw: Any, + ) -> Optional[Table]: + """Issue an alter to create an EXCLUDE constraint using the + current migration context. + + .. note:: This method is Postgresql specific, and additionally + requires at least SQLAlchemy 1.0. + + e.g.:: + + from alembic import op + + op.create_exclude_constraint( + "user_excl", + "user", + ("period", "&&"), + ("group", "="), + where=("group != 'some group'"), + ) + + Note that the expressions work the same way as that of + the ``ExcludeConstraint`` object itself; if plain strings are + passed, quoting rules must be applied manually. + + :param name: Name of the constraint. + :param table_name: String name of the source table. + :param elements: exclude conditions. + :param where: SQL expression or SQL string with optional WHERE + clause. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. + + """ # noqa: E501 + ... + + def create_foreign_key( + self, + constraint_name: Optional[str], + source_table: str, + referent_table: str, + local_cols: List[str], + remote_cols: List[str], + *, + onupdate: Optional[str] = None, + ondelete: Optional[str] = None, + deferrable: Optional[bool] = None, + initially: Optional[str] = None, + match: Optional[str] = None, + source_schema: Optional[str] = None, + referent_schema: Optional[str] = None, + **dialect_kw: Any, + ) -> None: + """Issue a "create foreign key" instruction using the + current migration context. + + e.g.:: + + from alembic import op + + op.create_foreign_key( + "fk_user_address", + "address", + "user", + ["user_id"], + ["id"], + ) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.ForeignKeyConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param constraint_name: Name of the foreign key constraint. The name + is necessary so that an ALTER statement can be emitted. For setups + that use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param source_table: String name of the source table. + :param referent_table: String name of the destination table. + :param local_cols: a list of string column names in the + source table. + :param remote_cols: a list of string column names in the + remote table. + :param onupdate: Optional string. If set, emit ON UPDATE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param ondelete: Optional string. If set, emit ON DELETE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param deferrable: optional bool. If set, emit DEFERRABLE or NOT + DEFERRABLE when issuing DDL for this constraint. + :param source_schema: Optional schema name of the source table. + :param referent_schema: Optional schema name of the destination table. + + """ # noqa: E501 + ... + + def create_index( + self, + index_name: Optional[str], + table_name: str, + columns: Sequence[Union[str, TextClause, Function[Any]]], + *, + schema: Optional[str] = None, + unique: bool = False, + if_not_exists: Optional[bool] = None, + **kw: Any, + ) -> None: + r"""Issue a "create index" instruction using the current + migration context. + + e.g.:: + + from alembic import op + + op.create_index("ik_test", "t1", ["foo", "bar"]) + + Functional indexes can be produced by using the + :func:`sqlalchemy.sql.expression.text` construct:: + + from alembic import op + from sqlalchemy import text + + op.create_index("ik_test", "t1", [text("lower(foo)")]) + + :param index_name: name of the index. + :param table_name: name of the owning table. + :param columns: a list consisting of string column names and/or + :func:`~sqlalchemy.sql.expression.text` constructs. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param unique: If True, create a unique index. + + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + """ # noqa: E501 + ... + + def create_primary_key( + self, + constraint_name: Optional[str], + table_name: str, + columns: List[str], + *, + schema: Optional[str] = None, + ) -> None: + """Issue a "create primary key" instruction using the current + migration context. + + e.g.:: + + from alembic import op + + op.create_primary_key("pk_my_table", "my_table", ["id", "version"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.PrimaryKeyConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param constraint_name: Name of the primary key constraint. The name + is necessary so that an ALTER statement can be emitted. For setups + that use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions` + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the target table. + :param columns: a list of string column names to be applied to the + primary key constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ # noqa: E501 + ... + + def create_table( + self, table_name: str, *columns: SchemaItem, **kw: Any + ) -> Table: + r"""Issue a "create table" instruction using the current migration + context. + + This directive receives an argument list similar to that of the + traditional :class:`sqlalchemy.schema.Table` construct, but without the + metadata:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("name", VARCHAR(50), nullable=False), + Column("description", NVARCHAR(200)), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + Note that :meth:`.create_table` accepts + :class:`~sqlalchemy.schema.Column` + constructs directly from the SQLAlchemy library. In particular, + default values to be created on the database side are + specified using the ``server_default`` parameter, and not + ``default`` which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the "timestamp" column + op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + The function also returns a newly created + :class:`~sqlalchemy.schema.Table` object, corresponding to the table + specification given, which is suitable for + immediate SQL operations, in particular + :meth:`.Operations.bulk_insert`:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + account_table = op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("name", VARCHAR(50), nullable=False), + Column("description", NVARCHAR(200)), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + op.bulk_insert( + account_table, + [ + {"name": "A1", "description": "account 1"}, + {"name": "A2", "description": "account 2"}, + ], + ) + + :param table_name: Name of the table + :param \*columns: collection of :class:`~sqlalchemy.schema.Column` + objects within + the table, as well as optional :class:`~sqlalchemy.schema.Constraint` + objects + and :class:`~.sqlalchemy.schema.Index` objects. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + :return: the :class:`~sqlalchemy.schema.Table` object corresponding + to the parameters given. + + """ # noqa: E501 + ... + + def create_table_comment( + self, + table_name: str, + comment: Optional[str], + *, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, + ) -> None: + """Emit a COMMENT ON operation to set the comment for a table. + + :param table_name: string name of the target table. + :param comment: string value of the comment being registered against + the specified table. + :param existing_comment: String value of a comment + already registered on the specified table, used within autogenerate + so that the operation is reversible, but not required for direct + use. + + .. seealso:: + + :meth:`.Operations.drop_table_comment` + + :paramref:`.Operations.alter_column.comment` + + """ # noqa: E501 + ... + + def create_unique_constraint( + self, + constraint_name: Optional[str], + table_name: str, + columns: Sequence[str], + *, + schema: Optional[str] = None, + **kw: Any, + ) -> Any: + """Issue a "create unique constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + op.create_unique_constraint("uq_user_name", "user", ["name"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.UniqueConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param name: Name of the unique constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the source table. + :param columns: a list of string column names in the + source table. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ # noqa: E501 + ... + + def drop_column( + self, + table_name: str, + column_name: str, + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + """Issue a "drop column" instruction using the current + migration context. + + e.g.:: + + drop_column("organization", "account_id") + + :param table_name: name of table + :param column_name: name of column + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param mssql_drop_check: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the CHECK constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.check_constraints, + then exec's a separate DROP CONSTRAINT for that constraint. + :param mssql_drop_default: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the DEFAULT constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.default_constraints, + then exec's a separate DROP CONSTRAINT for that default. + :param mssql_drop_foreign_key: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop a single FOREIGN KEY constraint on the column using a + SQL-script-compatible + block that selects into a @variable from + sys.foreign_keys/sys.foreign_key_columns, + then exec's a separate DROP CONSTRAINT for that default. Only + works if the column has exactly one FK constraint which refers to + it, at the moment. + + """ # noqa: E501 + ... + + def drop_constraint( + self, + constraint_name: str, + table_name: str, + type_: Optional[str] = None, + *, + schema: Optional[str] = None, + ) -> None: + r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. + + :param constraint_name: name of the constraint. + :param table_name: table name. + :param type\_: optional, required on MySQL. can be + 'foreignkey', 'primary', 'unique', or 'check'. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ # noqa: E501 + ... + + def drop_index( + self, + index_name: str, + table_name: Optional[str] = None, + *, + schema: Optional[str] = None, + if_exists: Optional[bool] = None, + **kw: Any, + ) -> None: + r"""Issue a "drop index" instruction using the current + migration context. + + e.g.:: + + drop_index("accounts") + + :param index_name: name of the index. + :param table_name: name of the owning table. Some + backends such as Microsoft SQL Server require this. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + """ # noqa: E501 + ... + + def drop_table( + self, table_name: str, *, schema: Optional[str] = None, **kw: Any + ) -> None: + r"""Issue a "drop table" instruction using the current + migration context. + + + e.g.:: + + drop_table("accounts") + + :param table_name: Name of the table + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + """ # noqa: E501 + ... + + def drop_table_comment( + self, + table_name: str, + *, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, + ) -> None: + """Issue a "drop table comment" operation to + remove an existing comment set on a table. + + :param table_name: string name of the target table. + :param existing_comment: An optional string value of a comment already + registered on the specified table. + + .. seealso:: + + :meth:`.Operations.create_table_comment` + + :paramref:`.Operations.alter_column.comment` + + """ # noqa: E501 + ... + + def execute( + self, + sqltext: Union[Executable, str], + *, + execution_options: Optional[dict[str, Any]] = None, + ) -> None: + r"""Execute the given SQL using the current migration context. + + The given SQL can be a plain string, e.g.:: + + op.execute("INSERT INTO table (foo) VALUES ('some value')") + + Or it can be any kind of Core SQL Expression construct, such as + below where we use an update construct:: + + from sqlalchemy.sql import table, column + from sqlalchemy import String + from alembic import op + + account = table("account", column("name", String)) + op.execute( + account.update() + .where(account.c.name == op.inline_literal("account 1")) + .values({"name": op.inline_literal("account 2")}) + ) + + Above, we made use of the SQLAlchemy + :func:`sqlalchemy.sql.expression.table` and + :func:`sqlalchemy.sql.expression.column` constructs to make a brief, + ad-hoc table construct just for our UPDATE statement. A full + :class:`~sqlalchemy.schema.Table` construct of course works perfectly + fine as well, though note it's a recommended practice to at least + ensure the definition of a table is self-contained within the migration + script, rather than imported from a module that may break compatibility + with older migrations. + + In a SQL script context, the statement is emitted directly to the + output stream. There is *no* return result, however, as this + function is oriented towards generating a change script + that can run in "offline" mode. Additionally, parameterized + statements are discouraged here, as they *will not work* in offline + mode. Above, we use :meth:`.inline_literal` where parameters are + to be used. + + For full interaction with a connected database where parameters can + also be used normally, use the "bind" available from the context:: + + from alembic import op + + connection = op.get_bind() + + connection.execute( + account.update() + .where(account.c.name == "account 1") + .values({"name": "account 2"}) + ) + + Additionally, when passing the statement as a plain string, it is first + coerced into a :func:`sqlalchemy.sql.expression.text` construct + before being passed along. In the less likely case that the + literal SQL string contains a colon, it must be escaped with a + backslash, as:: + + op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')") + + + :param sqltext: Any legal SQLAlchemy expression, including: + + * a string + * a :func:`sqlalchemy.sql.expression.text` construct. + * a :func:`sqlalchemy.sql.expression.insert` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. + * Any "executable" described in SQLAlchemy Core documentation, + noting that no result set is returned. + + .. note:: when passing a plain string, the statement is coerced into + a :func:`sqlalchemy.sql.expression.text` construct. This construct + considers symbols with colons, e.g. ``:foo`` to be bound parameters. + To avoid this, ensure that colon symbols are escaped, e.g. + ``\:foo``. + + :param execution_options: Optional dictionary of + execution options, will be passed to + :meth:`sqlalchemy.engine.Connection.execution_options`. + """ # noqa: E501 + ... + + def rename_table( + self, + old_table_name: str, + new_table_name: str, + *, + schema: Optional[str] = None, + ) -> None: + """Emit an ALTER TABLE to rename a table. + + :param old_table_name: old name. + :param new_table_name: new name. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ # noqa: E501 + ... + + # END STUB FUNCTIONS: op_cls + + +class BatchOperations(AbstractOperations): + """Modifies the interface :class:`.Operations` for batch mode. + + This basically omits the ``table_name`` and ``schema`` parameters + from associated methods, as these are a given when running under batch + mode. + + .. seealso:: + + :meth:`.Operations.batch_alter_table` + + Note that as of 0.8, most of the methods on this class are produced + dynamically using the :meth:`.Operations.register_operation` + method. + + """ + + impl: BatchOperationsImpl + + def _noop(self, operation: Any) -> NoReturn: + raise NotImplementedError( + "The %s method does not apply to a batch table alter operation." + % operation + ) + + if TYPE_CHECKING: + # START STUB FUNCTIONS: batch_op + # ### the following stubs are generated by tools/write_pyi.py ### + # ### do not edit ### + + def add_column( + self, + column: Column[Any], + *, + insert_before: Optional[str] = None, + insert_after: Optional[str] = None, + ) -> None: + """Issue an "add column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.add_column` + + """ # noqa: E501 + ... + + def alter_column( + self, + column_name: str, + *, + nullable: Optional[bool] = None, + comment: Union[str, Literal[False], None] = False, + server_default: Any = False, + new_column_name: Optional[str] = None, + type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, + existing_type: Union[ + TypeEngine[Any], Type[TypeEngine[Any]], None + ] = None, + existing_server_default: Union[ + str, bool, Identity, Computed, None + ] = False, + existing_nullable: Optional[bool] = None, + existing_comment: Optional[str] = None, + insert_before: Optional[str] = None, + insert_after: Optional[str] = None, + **kw: Any, + ) -> None: + """Issue an "alter column" instruction using the current + batch migration context. + + Parameters are the same as that of :meth:`.Operations.alter_column`, + as well as the following option(s): + + :param insert_before: String name of an existing column which this + column should be placed before, when creating the new table. + + :param insert_after: String name of an existing column which this + column should be placed after, when creating the new table. If + both :paramref:`.BatchOperations.alter_column.insert_before` + and :paramref:`.BatchOperations.alter_column.insert_after` are + omitted, the column is inserted after the last existing column + in the table. + + .. seealso:: + + :meth:`.Operations.alter_column` + + + """ # noqa: E501 + ... + + def create_check_constraint( + self, + constraint_name: str, + condition: Union[str, ColumnElement[bool], TextClause], + **kw: Any, + ) -> None: + """Issue a "create check constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_check_constraint` + + """ # noqa: E501 + ... + + def create_exclude_constraint( + self, constraint_name: str, *elements: Any, **kw: Any + ) -> Optional[Table]: + """Issue a "create exclude constraint" instruction using the + current batch migration context. + + .. note:: This method is Postgresql specific, and additionally + requires at least SQLAlchemy 1.0. + + .. seealso:: + + :meth:`.Operations.create_exclude_constraint` + + """ # noqa: E501 + ... + + def create_foreign_key( + self, + constraint_name: str, + referent_table: str, + local_cols: List[str], + remote_cols: List[str], + *, + referent_schema: Optional[str] = None, + onupdate: Optional[str] = None, + ondelete: Optional[str] = None, + deferrable: Optional[bool] = None, + initially: Optional[str] = None, + match: Optional[str] = None, + **dialect_kw: Any, + ) -> None: + """Issue a "create foreign key" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``source_schema`` + arguments from the call. + + e.g.:: + + with batch_alter_table("address") as batch_op: + batch_op.create_foreign_key( + "fk_user_address", + "user", + ["user_id"], + ["id"], + ) + + .. seealso:: + + :meth:`.Operations.create_foreign_key` + + """ # noqa: E501 + ... + + def create_index( + self, index_name: str, columns: List[str], **kw: Any + ) -> None: + """Issue a "create index" instruction using the + current batch migration context. + + .. seealso:: + + :meth:`.Operations.create_index` + + """ # noqa: E501 + ... + + def create_primary_key( + self, constraint_name: str, columns: List[str] + ) -> None: + """Issue a "create primary key" instruction using the + current batch migration context. + + The batch form of this call omits the ``table_name`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_primary_key` + + """ # noqa: E501 + ... + + def create_table_comment( + self, + comment: Optional[str], + *, + existing_comment: Optional[str] = None, + ) -> None: + """Emit a COMMENT ON operation to set the comment for a table + using the current batch migration context. + + :param comment: string value of the comment being registered against + the specified table. + :param existing_comment: String value of a comment + already registered on the specified table, used within autogenerate + so that the operation is reversible, but not required for direct + use. + + """ # noqa: E501 + ... + + def create_unique_constraint( + self, constraint_name: str, columns: Sequence[str], **kw: Any + ) -> Any: + """Issue a "create unique constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_unique_constraint` + + """ # noqa: E501 + ... + + def drop_column(self, column_name: str, **kw: Any) -> None: + """Issue a "drop column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.drop_column` + + """ # noqa: E501 + ... + + def drop_constraint( + self, constraint_name: str, type_: Optional[str] = None + ) -> None: + """Issue a "drop constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``table_name`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.drop_constraint` + + """ # noqa: E501 + ... + + def drop_index(self, index_name: str, **kw: Any) -> None: + """Issue a "drop index" instruction using the + current batch migration context. + + .. seealso:: + + :meth:`.Operations.drop_index` + + """ # noqa: E501 + ... + + def drop_table_comment( + self, *, existing_comment: Optional[str] = None + ) -> None: + """Issue a "drop table comment" operation to + remove an existing comment set on a table using the current + batch operations context. + + :param existing_comment: An optional string value of a comment already + registered on the specified table. + + """ # noqa: E501 + ... + + def execute( + self, + sqltext: Union[Executable, str], + *, + execution_options: Optional[dict[str, Any]] = None, + ) -> None: + """Execute the given SQL using the current migration context. + + .. seealso:: + + :meth:`.Operations.execute` + + """ # noqa: E501 + ... + + # END STUB FUNCTIONS: batch_op diff --git a/venv/lib/python3.12/site-packages/alembic/operations/batch.py b/venv/lib/python3.12/site-packages/alembic/operations/batch.py new file mode 100644 index 0000000..fd7ab99 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/operations/batch.py @@ -0,0 +1,717 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import CheckConstraint +from sqlalchemy import Column +from sqlalchemy import ForeignKeyConstraint +from sqlalchemy import Index +from sqlalchemy import MetaData +from sqlalchemy import PrimaryKeyConstraint +from sqlalchemy import schema as sql_schema +from sqlalchemy import Table +from sqlalchemy import types as sqltypes +from sqlalchemy.sql.schema import SchemaEventTarget +from sqlalchemy.util import OrderedDict +from sqlalchemy.util import topological + +from ..util import exc +from ..util.sqla_compat import _columns_for_constraint +from ..util.sqla_compat import _copy +from ..util.sqla_compat import _copy_expression +from ..util.sqla_compat import _ensure_scope_for_ddl +from ..util.sqla_compat import _fk_is_self_referential +from ..util.sqla_compat import _idx_table_bound_expressions +from ..util.sqla_compat import _insert_inline +from ..util.sqla_compat import _is_type_bound +from ..util.sqla_compat import _remove_column_from_collection +from ..util.sqla_compat import _resolve_for_variant +from ..util.sqla_compat import _select +from ..util.sqla_compat import constraint_name_defined +from ..util.sqla_compat import constraint_name_string + +if TYPE_CHECKING: + from typing import Literal + + from sqlalchemy.engine import Dialect + from sqlalchemy.sql.elements import ColumnClause + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.functions import Function + from sqlalchemy.sql.schema import Constraint + from sqlalchemy.sql.type_api import TypeEngine + + from ..ddl.impl import DefaultImpl + + +class BatchOperationsImpl: + def __init__( + self, + operations, + table_name, + schema, + recreate, + copy_from, + table_args, + table_kwargs, + reflect_args, + reflect_kwargs, + naming_convention, + partial_reordering, + ): + self.operations = operations + self.table_name = table_name + self.schema = schema + if recreate not in ("auto", "always", "never"): + raise ValueError( + "recreate may be one of 'auto', 'always', or 'never'." + ) + self.recreate = recreate + self.copy_from = copy_from + self.table_args = table_args + self.table_kwargs = dict(table_kwargs) + self.reflect_args = reflect_args + self.reflect_kwargs = dict(reflect_kwargs) + self.reflect_kwargs.setdefault( + "listeners", list(self.reflect_kwargs.get("listeners", ())) + ) + self.reflect_kwargs["listeners"].append( + ("column_reflect", operations.impl.autogen_column_reflect) + ) + self.naming_convention = naming_convention + self.partial_reordering = partial_reordering + self.batch = [] + + @property + def dialect(self) -> Dialect: + return self.operations.impl.dialect + + @property + def impl(self) -> DefaultImpl: + return self.operations.impl + + def _should_recreate(self) -> bool: + if self.recreate == "auto": + return self.operations.impl.requires_recreate_in_batch(self) + elif self.recreate == "always": + return True + else: + return False + + def flush(self) -> None: + should_recreate = self._should_recreate() + + with _ensure_scope_for_ddl(self.impl.connection): + if not should_recreate: + for opname, arg, kw in self.batch: + fn = getattr(self.operations.impl, opname) + fn(*arg, **kw) + else: + if self.naming_convention: + m1 = MetaData(naming_convention=self.naming_convention) + else: + m1 = MetaData() + + if self.copy_from is not None: + existing_table = self.copy_from + reflected = False + else: + if self.operations.migration_context.as_sql: + raise exc.CommandError( + f"This operation cannot proceed in --sql mode; " + f"batch mode with dialect " + f"{self.operations.migration_context.dialect.name} " # noqa: E501 + f"requires a live database connection with which " + f'to reflect the table "{self.table_name}". ' + f"To generate a batch SQL migration script using " + "table " + '"move and copy", a complete Table object ' + f'should be passed to the "copy_from" argument ' + "of the batch_alter_table() method so that table " + "reflection can be skipped." + ) + + existing_table = Table( + self.table_name, + m1, + schema=self.schema, + autoload_with=self.operations.get_bind(), + *self.reflect_args, + **self.reflect_kwargs, + ) + reflected = True + + batch_impl = ApplyBatchImpl( + self.impl, + existing_table, + self.table_args, + self.table_kwargs, + reflected, + partial_reordering=self.partial_reordering, + ) + for opname, arg, kw in self.batch: + fn = getattr(batch_impl, opname) + fn(*arg, **kw) + + batch_impl._create(self.impl) + + def alter_column(self, *arg, **kw) -> None: + self.batch.append(("alter_column", arg, kw)) + + def add_column(self, *arg, **kw) -> None: + if ( + "insert_before" in kw or "insert_after" in kw + ) and not self._should_recreate(): + raise exc.CommandError( + "Can't specify insert_before or insert_after when using " + "ALTER; please specify recreate='always'" + ) + self.batch.append(("add_column", arg, kw)) + + def drop_column(self, *arg, **kw) -> None: + self.batch.append(("drop_column", arg, kw)) + + def add_constraint(self, const: Constraint) -> None: + self.batch.append(("add_constraint", (const,), {})) + + def drop_constraint(self, const: Constraint) -> None: + self.batch.append(("drop_constraint", (const,), {})) + + def rename_table(self, *arg, **kw): + self.batch.append(("rename_table", arg, kw)) + + def create_index(self, idx: Index, **kw: Any) -> None: + self.batch.append(("create_index", (idx,), kw)) + + def drop_index(self, idx: Index, **kw: Any) -> None: + self.batch.append(("drop_index", (idx,), kw)) + + def create_table_comment(self, table): + self.batch.append(("create_table_comment", (table,), {})) + + def drop_table_comment(self, table): + self.batch.append(("drop_table_comment", (table,), {})) + + def create_table(self, table): + raise NotImplementedError("Can't create table in batch mode") + + def drop_table(self, table): + raise NotImplementedError("Can't drop table in batch mode") + + def create_column_comment(self, column): + self.batch.append(("create_column_comment", (column,), {})) + + +class ApplyBatchImpl: + def __init__( + self, + impl: DefaultImpl, + table: Table, + table_args: tuple, + table_kwargs: Dict[str, Any], + reflected: bool, + partial_reordering: tuple = (), + ) -> None: + self.impl = impl + self.table = table # this is a Table object + self.table_args = table_args + self.table_kwargs = table_kwargs + self.temp_table_name = self._calc_temp_name(table.name) + self.new_table: Optional[Table] = None + + self.partial_reordering = partial_reordering # tuple of tuples + self.add_col_ordering: Tuple[ + Tuple[str, str], ... + ] = () # tuple of tuples + + self.column_transfers = OrderedDict( + (c.name, {"expr": c}) for c in self.table.c + ) + self.existing_ordering = list(self.column_transfers) + + self.reflected = reflected + self._grab_table_elements() + + @classmethod + def _calc_temp_name(cls, tablename: Union[quoted_name, str]) -> str: + return ("_alembic_tmp_%s" % tablename)[0:50] + + def _grab_table_elements(self) -> None: + schema = self.table.schema + self.columns: Dict[str, Column[Any]] = OrderedDict() + for c in self.table.c: + c_copy = _copy(c, schema=schema) + c_copy.unique = c_copy.index = False + # ensure that the type object was copied, + # as we may need to modify it in-place + if isinstance(c.type, SchemaEventTarget): + assert c_copy.type is not c.type + self.columns[c.name] = c_copy + self.named_constraints: Dict[str, Constraint] = {} + self.unnamed_constraints = [] + self.col_named_constraints = {} + self.indexes: Dict[str, Index] = {} + self.new_indexes: Dict[str, Index] = {} + + for const in self.table.constraints: + if _is_type_bound(const): + continue + elif ( + self.reflected + and isinstance(const, CheckConstraint) + and not const.name + ): + # TODO: we are skipping unnamed reflected CheckConstraint + # because + # we have no way to determine _is_type_bound() for these. + pass + elif constraint_name_string(const.name): + self.named_constraints[const.name] = const + else: + self.unnamed_constraints.append(const) + + if not self.reflected: + for col in self.table.c: + for const in col.constraints: + if const.name: + self.col_named_constraints[const.name] = (col, const) + + for idx in self.table.indexes: + self.indexes[idx.name] = idx # type: ignore[index] + + for k in self.table.kwargs: + self.table_kwargs.setdefault(k, self.table.kwargs[k]) + + def _adjust_self_columns_for_partial_reordering(self) -> None: + pairs = set() + + col_by_idx = list(self.columns) + + if self.partial_reordering: + for tuple_ in self.partial_reordering: + for index, elem in enumerate(tuple_): + if index > 0: + pairs.add((tuple_[index - 1], elem)) + else: + for index, elem in enumerate(self.existing_ordering): + if index > 0: + pairs.add((col_by_idx[index - 1], elem)) + + pairs.update(self.add_col_ordering) + + # this can happen if some columns were dropped and not removed + # from existing_ordering. this should be prevented already, but + # conservatively making sure this didn't happen + pairs_list = [p for p in pairs if p[0] != p[1]] + + sorted_ = list( + topological.sort(pairs_list, col_by_idx, deterministic_order=True) + ) + self.columns = OrderedDict((k, self.columns[k]) for k in sorted_) + self.column_transfers = OrderedDict( + (k, self.column_transfers[k]) for k in sorted_ + ) + + def _transfer_elements_to_new_table(self) -> None: + assert self.new_table is None, "Can only create new table once" + + m = MetaData() + schema = self.table.schema + + if self.partial_reordering or self.add_col_ordering: + self._adjust_self_columns_for_partial_reordering() + + self.new_table = new_table = Table( + self.temp_table_name, + m, + *(list(self.columns.values()) + list(self.table_args)), + schema=schema, + **self.table_kwargs, + ) + + for const in ( + list(self.named_constraints.values()) + self.unnamed_constraints + ): + const_columns = {c.key for c in _columns_for_constraint(const)} + + if not const_columns.issubset(self.column_transfers): + continue + + const_copy: Constraint + if isinstance(const, ForeignKeyConstraint): + if _fk_is_self_referential(const): + # for self-referential constraint, refer to the + # *original* table name, and not _alembic_batch_temp. + # This is consistent with how we're handling + # FK constraints from other tables; we assume SQLite + # no foreign keys just keeps the names unchanged, so + # when we rename back, they match again. + const_copy = _copy( + const, schema=schema, target_table=self.table + ) + else: + # "target_table" for ForeignKeyConstraint.copy() is + # only used if the FK is detected as being + # self-referential, which we are handling above. + const_copy = _copy(const, schema=schema) + else: + const_copy = _copy( + const, schema=schema, target_table=new_table + ) + if isinstance(const, ForeignKeyConstraint): + self._setup_referent(m, const) + new_table.append_constraint(const_copy) + + def _gather_indexes_from_both_tables(self) -> List[Index]: + assert self.new_table is not None + idx: List[Index] = [] + + for idx_existing in self.indexes.values(): + # this is a lift-and-move from Table.to_metadata + + if idx_existing._column_flag: + continue + + idx_copy = Index( + idx_existing.name, + unique=idx_existing.unique, + *[ + _copy_expression(expr, self.new_table) + for expr in _idx_table_bound_expressions(idx_existing) + ], + _table=self.new_table, + **idx_existing.kwargs, + ) + idx.append(idx_copy) + + for index in self.new_indexes.values(): + idx.append( + Index( + index.name, + unique=index.unique, + *[self.new_table.c[col] for col in index.columns.keys()], + **index.kwargs, + ) + ) + return idx + + def _setup_referent( + self, metadata: MetaData, constraint: ForeignKeyConstraint + ) -> None: + spec = constraint.elements[0]._get_colspec() + parts = spec.split(".") + tname = parts[-2] + if len(parts) == 3: + referent_schema = parts[0] + else: + referent_schema = None + + if tname != self.temp_table_name: + key = sql_schema._get_table_key(tname, referent_schema) + + def colspec(elem: Any): + return elem._get_colspec() + + if key in metadata.tables: + t = metadata.tables[key] + for elem in constraint.elements: + colname = colspec(elem).split(".")[-1] + if colname not in t.c: + t.append_column(Column(colname, sqltypes.NULLTYPE)) + else: + Table( + tname, + metadata, + *[ + Column(n, sqltypes.NULLTYPE) + for n in [ + colspec(elem).split(".")[-1] + for elem in constraint.elements + ] + ], + schema=referent_schema, + ) + + def _create(self, op_impl: DefaultImpl) -> None: + self._transfer_elements_to_new_table() + + op_impl.prep_table_for_batch(self, self.table) + assert self.new_table is not None + op_impl.create_table(self.new_table) + + try: + op_impl._exec( + _insert_inline(self.new_table).from_select( + list( + k + for k, transfer in self.column_transfers.items() + if "expr" in transfer + ), + _select( + *[ + transfer["expr"] + for transfer in self.column_transfers.values() + if "expr" in transfer + ] + ), + ) + ) + op_impl.drop_table(self.table) + except: + op_impl.drop_table(self.new_table) + raise + else: + op_impl.rename_table( + self.temp_table_name, self.table.name, schema=self.table.schema + ) + self.new_table.name = self.table.name + try: + for idx in self._gather_indexes_from_both_tables(): + op_impl.create_index(idx) + finally: + self.new_table.name = self.temp_table_name + + def alter_column( + self, + table_name: str, + column_name: str, + nullable: Optional[bool] = None, + server_default: Optional[Union[Function[Any], str, bool]] = False, + name: Optional[str] = None, + type_: Optional[TypeEngine] = None, + autoincrement: Optional[Union[bool, Literal["auto"]]] = None, + comment: Union[str, Literal[False]] = False, + **kw, + ) -> None: + existing = self.columns[column_name] + existing_transfer: Dict[str, Any] = self.column_transfers[column_name] + if name is not None and name != column_name: + # note that we don't change '.key' - we keep referring + # to the renamed column by its old key in _create(). neat! + existing.name = name + existing_transfer["name"] = name + + existing_type = kw.get("existing_type", None) + if existing_type: + resolved_existing_type = _resolve_for_variant( + kw["existing_type"], self.impl.dialect + ) + + # pop named constraints for Boolean/Enum for rename + if ( + isinstance(resolved_existing_type, SchemaEventTarget) + and resolved_existing_type.name # type:ignore[attr-defined] # noqa E501 + ): + self.named_constraints.pop( + resolved_existing_type.name, # type:ignore[attr-defined] # noqa E501 + None, + ) + + if type_ is not None: + type_ = sqltypes.to_instance(type_) + # old type is being discarded so turn off eventing + # rules. Alternatively we can + # erase the events set up by this type, but this is simpler. + # we also ignore the drop_constraint that will come here from + # Operations.implementation_for(alter_column) + + if isinstance(existing.type, SchemaEventTarget): + existing.type._create_events = ( # type:ignore[attr-defined] + existing.type.create_constraint # type:ignore[attr-defined] # noqa + ) = False + + self.impl.cast_for_batch_migrate( + existing, existing_transfer, type_ + ) + + existing.type = type_ + + # we *dont* however set events for the new type, because + # alter_column is invoked from + # Operations.implementation_for(alter_column) which already + # will emit an add_constraint() + + if nullable is not None: + existing.nullable = nullable + if server_default is not False: + if server_default is None: + existing.server_default = None + else: + sql_schema.DefaultClause( + server_default # type: ignore[arg-type] + )._set_parent(existing) + if autoincrement is not None: + existing.autoincrement = bool(autoincrement) + + if comment is not False: + existing.comment = comment + + def _setup_dependencies_for_add_column( + self, + colname: str, + insert_before: Optional[str], + insert_after: Optional[str], + ) -> None: + index_cols = self.existing_ordering + col_indexes = {name: i for i, name in enumerate(index_cols)} + + if not self.partial_reordering: + if insert_after: + if not insert_before: + if insert_after in col_indexes: + # insert after an existing column + idx = col_indexes[insert_after] + 1 + if idx < len(index_cols): + insert_before = index_cols[idx] + else: + # insert after a column that is also new + insert_before = dict(self.add_col_ordering)[ + insert_after + ] + if insert_before: + if not insert_after: + if insert_before in col_indexes: + # insert before an existing column + idx = col_indexes[insert_before] - 1 + if idx >= 0: + insert_after = index_cols[idx] + else: + # insert before a column that is also new + insert_after = { + b: a for a, b in self.add_col_ordering + }[insert_before] + + if insert_before: + self.add_col_ordering += ((colname, insert_before),) + if insert_after: + self.add_col_ordering += ((insert_after, colname),) + + if ( + not self.partial_reordering + and not insert_before + and not insert_after + and col_indexes + ): + self.add_col_ordering += ((index_cols[-1], colname),) + + def add_column( + self, + table_name: str, + column: Column[Any], + insert_before: Optional[str] = None, + insert_after: Optional[str] = None, + **kw, + ) -> None: + self._setup_dependencies_for_add_column( + column.name, insert_before, insert_after + ) + # we copy the column because operations.add_column() + # gives us a Column that is part of a Table already. + self.columns[column.name] = _copy(column, schema=self.table.schema) + self.column_transfers[column.name] = {} + + def drop_column( + self, + table_name: str, + column: Union[ColumnClause[Any], Column[Any]], + **kw, + ) -> None: + if column.name in self.table.primary_key.columns: + _remove_column_from_collection( + self.table.primary_key.columns, column + ) + del self.columns[column.name] + del self.column_transfers[column.name] + self.existing_ordering.remove(column.name) + + # pop named constraints for Boolean/Enum for rename + if ( + "existing_type" in kw + and isinstance(kw["existing_type"], SchemaEventTarget) + and kw["existing_type"].name # type:ignore[attr-defined] + ): + self.named_constraints.pop( + kw["existing_type"].name, None # type:ignore[attr-defined] + ) + + def create_column_comment(self, column): + """the batch table creation function will issue create_column_comment + on the real "impl" as part of the create table process. + + That is, the Column object will have the comment on it already, + so when it is received by add_column() it will be a normal part of + the CREATE TABLE and doesn't need an extra step here. + + """ + + def create_table_comment(self, table): + """the batch table creation function will issue create_table_comment + on the real "impl" as part of the create table process. + + """ + + def drop_table_comment(self, table): + """the batch table creation function will issue drop_table_comment + on the real "impl" as part of the create table process. + + """ + + def add_constraint(self, const: Constraint) -> None: + if not constraint_name_defined(const.name): + raise ValueError("Constraint must have a name") + if isinstance(const, sql_schema.PrimaryKeyConstraint): + if self.table.primary_key in self.unnamed_constraints: + self.unnamed_constraints.remove(self.table.primary_key) + + if constraint_name_string(const.name): + self.named_constraints[const.name] = const + else: + self.unnamed_constraints.append(const) + + def drop_constraint(self, const: Constraint) -> None: + if not const.name: + raise ValueError("Constraint must have a name") + try: + if const.name in self.col_named_constraints: + col, const = self.col_named_constraints.pop(const.name) + + for col_const in list(self.columns[col.name].constraints): + if col_const.name == const.name: + self.columns[col.name].constraints.remove(col_const) + elif constraint_name_string(const.name): + const = self.named_constraints.pop(const.name) + elif const in self.unnamed_constraints: + self.unnamed_constraints.remove(const) + + except KeyError: + if _is_type_bound(const): + # type-bound constraints are only included in the new + # table via their type object in any case, so ignore the + # drop_constraint() that comes here via the + # Operations.implementation_for(alter_column) + return + raise ValueError("No such constraint: '%s'" % const.name) + else: + if isinstance(const, PrimaryKeyConstraint): + for col in const.columns: + self.columns[col.name].primary_key = False + + def create_index(self, idx: Index) -> None: + self.new_indexes[idx.name] = idx # type: ignore[index] + + def drop_index(self, idx: Index) -> None: + try: + del self.indexes[idx.name] # type: ignore[arg-type] + except KeyError: + raise ValueError("No such index: '%s'" % idx.name) + + def rename_table(self, *arg, **kw): + raise NotImplementedError("TODO") diff --git a/venv/lib/python3.12/site-packages/alembic/operations/ops.py b/venv/lib/python3.12/site-packages/alembic/operations/ops.py new file mode 100644 index 0000000..7b65191 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/operations/ops.py @@ -0,0 +1,2786 @@ +from __future__ import annotations + +from abc import abstractmethod +import re +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import FrozenSet +from typing import Iterator +from typing import List +from typing import MutableMapping +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy.types import NULLTYPE + +from . import schemaobj +from .base import BatchOperations +from .base import Operations +from .. import util +from ..util import sqla_compat + +if TYPE_CHECKING: + from typing import Literal + + from sqlalchemy.sql import Executable + from sqlalchemy.sql.elements import ColumnElement + from sqlalchemy.sql.elements import conv + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.elements import TextClause + from sqlalchemy.sql.functions import Function + from sqlalchemy.sql.schema import CheckConstraint + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Computed + from sqlalchemy.sql.schema import Constraint + from sqlalchemy.sql.schema import ForeignKeyConstraint + from sqlalchemy.sql.schema import Identity + from sqlalchemy.sql.schema import Index + from sqlalchemy.sql.schema import MetaData + from sqlalchemy.sql.schema import PrimaryKeyConstraint + from sqlalchemy.sql.schema import SchemaItem + from sqlalchemy.sql.schema import Table + from sqlalchemy.sql.schema import UniqueConstraint + from sqlalchemy.sql.selectable import TableClause + from sqlalchemy.sql.type_api import TypeEngine + + from ..autogenerate.rewriter import Rewriter + from ..runtime.migration import MigrationContext + from ..script.revision import _RevIdType + +_T = TypeVar("_T", bound=Any) +_AC = TypeVar("_AC", bound="AddConstraintOp") + + +class MigrateOperation: + """base class for migration command and organization objects. + + This system is part of the operation extensibility API. + + .. seealso:: + + :ref:`operation_objects` + + :ref:`operation_plugins` + + :ref:`customizing_revision` + + """ + + @util.memoized_property + def info(self) -> Dict[Any, Any]: + """A dictionary that may be used to store arbitrary information + along with this :class:`.MigrateOperation` object. + + """ + return {} + + _mutations: FrozenSet[Rewriter] = frozenset() + + def reverse(self) -> MigrateOperation: + raise NotImplementedError + + def to_diff_tuple(self) -> Tuple[Any, ...]: + raise NotImplementedError + + +class AddConstraintOp(MigrateOperation): + """Represent an add constraint operation.""" + + add_constraint_ops = util.Dispatcher() + + @property + def constraint_type(self) -> str: + raise NotImplementedError() + + @classmethod + def register_add_constraint( + cls, type_: str + ) -> Callable[[Type[_AC]], Type[_AC]]: + def go(klass: Type[_AC]) -> Type[_AC]: + cls.add_constraint_ops.dispatch_for(type_)(klass.from_constraint) + return klass + + return go + + @classmethod + def from_constraint(cls, constraint: Constraint) -> AddConstraintOp: + return cls.add_constraint_ops.dispatch(constraint.__visit_name__)( # type: ignore[no-any-return] # noqa: E501 + constraint + ) + + @abstractmethod + def to_constraint( + self, migration_context: Optional[MigrationContext] = None + ) -> Constraint: + pass + + def reverse(self) -> DropConstraintOp: + return DropConstraintOp.from_constraint(self.to_constraint()) + + def to_diff_tuple(self) -> Tuple[str, Constraint]: + return ("add_constraint", self.to_constraint()) + + +@Operations.register_operation("drop_constraint") +@BatchOperations.register_operation("drop_constraint", "batch_drop_constraint") +class DropConstraintOp(MigrateOperation): + """Represent a drop constraint operation.""" + + def __init__( + self, + constraint_name: Optional[sqla_compat._ConstraintNameDefined], + table_name: str, + type_: Optional[str] = None, + *, + schema: Optional[str] = None, + _reverse: Optional[AddConstraintOp] = None, + ) -> None: + self.constraint_name = constraint_name + self.table_name = table_name + self.constraint_type = type_ + self.schema = schema + self._reverse = _reverse + + def reverse(self) -> AddConstraintOp: + return AddConstraintOp.from_constraint(self.to_constraint()) + + def to_diff_tuple( + self, + ) -> Tuple[str, SchemaItem]: + if self.constraint_type == "foreignkey": + return ("remove_fk", self.to_constraint()) + else: + return ("remove_constraint", self.to_constraint()) + + @classmethod + def from_constraint(cls, constraint: Constraint) -> DropConstraintOp: + types = { + "unique_constraint": "unique", + "foreign_key_constraint": "foreignkey", + "primary_key_constraint": "primary", + "check_constraint": "check", + "column_check_constraint": "check", + "table_or_column_check_constraint": "check", + } + + constraint_table = sqla_compat._table_for_constraint(constraint) + return cls( + sqla_compat.constraint_name_or_none(constraint.name), + constraint_table.name, + schema=constraint_table.schema, + type_=types.get(constraint.__visit_name__), + _reverse=AddConstraintOp.from_constraint(constraint), + ) + + def to_constraint(self) -> Constraint: + if self._reverse is not None: + constraint = self._reverse.to_constraint() + constraint.name = self.constraint_name + constraint_table = sqla_compat._table_for_constraint(constraint) + constraint_table.name = self.table_name + constraint_table.schema = self.schema + + return constraint + else: + raise ValueError( + "constraint cannot be produced; " + "original constraint is not present" + ) + + @classmethod + def drop_constraint( + cls, + operations: Operations, + constraint_name: str, + table_name: str, + type_: Optional[str] = None, + *, + schema: Optional[str] = None, + ) -> None: + r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. + + :param constraint_name: name of the constraint. + :param table_name: table name. + :param type\_: optional, required on MySQL. can be + 'foreignkey', 'primary', 'unique', or 'check'. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + + op = cls(constraint_name, table_name, type_=type_, schema=schema) + return operations.invoke(op) + + @classmethod + def batch_drop_constraint( + cls, + operations: BatchOperations, + constraint_name: str, + type_: Optional[str] = None, + ) -> None: + """Issue a "drop constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``table_name`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.drop_constraint` + + """ + op = cls( + constraint_name, + operations.impl.table_name, + type_=type_, + schema=operations.impl.schema, + ) + return operations.invoke(op) + + +@Operations.register_operation("create_primary_key") +@BatchOperations.register_operation( + "create_primary_key", "batch_create_primary_key" +) +@AddConstraintOp.register_add_constraint("primary_key_constraint") +class CreatePrimaryKeyOp(AddConstraintOp): + """Represent a create primary key operation.""" + + constraint_type = "primarykey" + + def __init__( + self, + constraint_name: Optional[sqla_compat._ConstraintNameDefined], + table_name: str, + columns: Sequence[str], + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + self.constraint_name = constraint_name + self.table_name = table_name + self.columns = columns + self.schema = schema + self.kw = kw + + @classmethod + def from_constraint(cls, constraint: Constraint) -> CreatePrimaryKeyOp: + constraint_table = sqla_compat._table_for_constraint(constraint) + pk_constraint = cast("PrimaryKeyConstraint", constraint) + return cls( + sqla_compat.constraint_name_or_none(pk_constraint.name), + constraint_table.name, + pk_constraint.columns.keys(), + schema=constraint_table.schema, + **pk_constraint.dialect_kwargs, + ) + + def to_constraint( + self, migration_context: Optional[MigrationContext] = None + ) -> PrimaryKeyConstraint: + schema_obj = schemaobj.SchemaObjects(migration_context) + + return schema_obj.primary_key_constraint( + self.constraint_name, + self.table_name, + self.columns, + schema=self.schema, + **self.kw, + ) + + @classmethod + def create_primary_key( + cls, + operations: Operations, + constraint_name: Optional[str], + table_name: str, + columns: List[str], + *, + schema: Optional[str] = None, + ) -> None: + """Issue a "create primary key" instruction using the current + migration context. + + e.g.:: + + from alembic import op + + op.create_primary_key("pk_my_table", "my_table", ["id", "version"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.PrimaryKeyConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param constraint_name: Name of the primary key constraint. The name + is necessary so that an ALTER statement can be emitted. For setups + that use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions` + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the target table. + :param columns: a list of string column names to be applied to the + primary key constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + op = cls(constraint_name, table_name, columns, schema=schema) + return operations.invoke(op) + + @classmethod + def batch_create_primary_key( + cls, + operations: BatchOperations, + constraint_name: str, + columns: List[str], + ) -> None: + """Issue a "create primary key" instruction using the + current batch migration context. + + The batch form of this call omits the ``table_name`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_primary_key` + + """ + op = cls( + constraint_name, + operations.impl.table_name, + columns, + schema=operations.impl.schema, + ) + return operations.invoke(op) + + +@Operations.register_operation("create_unique_constraint") +@BatchOperations.register_operation( + "create_unique_constraint", "batch_create_unique_constraint" +) +@AddConstraintOp.register_add_constraint("unique_constraint") +class CreateUniqueConstraintOp(AddConstraintOp): + """Represent a create unique constraint operation.""" + + constraint_type = "unique" + + def __init__( + self, + constraint_name: Optional[sqla_compat._ConstraintNameDefined], + table_name: str, + columns: Sequence[str], + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + self.constraint_name = constraint_name + self.table_name = table_name + self.columns = columns + self.schema = schema + self.kw = kw + + @classmethod + def from_constraint( + cls, constraint: Constraint + ) -> CreateUniqueConstraintOp: + constraint_table = sqla_compat._table_for_constraint(constraint) + + uq_constraint = cast("UniqueConstraint", constraint) + + kw: Dict[str, Any] = {} + if uq_constraint.deferrable: + kw["deferrable"] = uq_constraint.deferrable + if uq_constraint.initially: + kw["initially"] = uq_constraint.initially + kw.update(uq_constraint.dialect_kwargs) + return cls( + sqla_compat.constraint_name_or_none(uq_constraint.name), + constraint_table.name, + [c.name for c in uq_constraint.columns], + schema=constraint_table.schema, + **kw, + ) + + def to_constraint( + self, migration_context: Optional[MigrationContext] = None + ) -> UniqueConstraint: + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.unique_constraint( + self.constraint_name, + self.table_name, + self.columns, + schema=self.schema, + **self.kw, + ) + + @classmethod + def create_unique_constraint( + cls, + operations: Operations, + constraint_name: Optional[str], + table_name: str, + columns: Sequence[str], + *, + schema: Optional[str] = None, + **kw: Any, + ) -> Any: + """Issue a "create unique constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + op.create_unique_constraint("uq_user_name", "user", ["name"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.UniqueConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param name: Name of the unique constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the source table. + :param columns: a list of string column names in the + source table. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + + op = cls(constraint_name, table_name, columns, schema=schema, **kw) + return operations.invoke(op) + + @classmethod + def batch_create_unique_constraint( + cls, + operations: BatchOperations, + constraint_name: str, + columns: Sequence[str], + **kw: Any, + ) -> Any: + """Issue a "create unique constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_unique_constraint` + + """ + kw["schema"] = operations.impl.schema + op = cls(constraint_name, operations.impl.table_name, columns, **kw) + return operations.invoke(op) + + +@Operations.register_operation("create_foreign_key") +@BatchOperations.register_operation( + "create_foreign_key", "batch_create_foreign_key" +) +@AddConstraintOp.register_add_constraint("foreign_key_constraint") +class CreateForeignKeyOp(AddConstraintOp): + """Represent a create foreign key constraint operation.""" + + constraint_type = "foreignkey" + + def __init__( + self, + constraint_name: Optional[sqla_compat._ConstraintNameDefined], + source_table: str, + referent_table: str, + local_cols: List[str], + remote_cols: List[str], + **kw: Any, + ) -> None: + self.constraint_name = constraint_name + self.source_table = source_table + self.referent_table = referent_table + self.local_cols = local_cols + self.remote_cols = remote_cols + self.kw = kw + + def to_diff_tuple(self) -> Tuple[str, ForeignKeyConstraint]: + return ("add_fk", self.to_constraint()) + + @classmethod + def from_constraint(cls, constraint: Constraint) -> CreateForeignKeyOp: + fk_constraint = cast("ForeignKeyConstraint", constraint) + kw: Dict[str, Any] = {} + if fk_constraint.onupdate: + kw["onupdate"] = fk_constraint.onupdate + if fk_constraint.ondelete: + kw["ondelete"] = fk_constraint.ondelete + if fk_constraint.initially: + kw["initially"] = fk_constraint.initially + if fk_constraint.deferrable: + kw["deferrable"] = fk_constraint.deferrable + if fk_constraint.use_alter: + kw["use_alter"] = fk_constraint.use_alter + if fk_constraint.match: + kw["match"] = fk_constraint.match + + ( + source_schema, + source_table, + source_columns, + target_schema, + target_table, + target_columns, + onupdate, + ondelete, + deferrable, + initially, + ) = sqla_compat._fk_spec(fk_constraint) + + kw["source_schema"] = source_schema + kw["referent_schema"] = target_schema + kw.update(fk_constraint.dialect_kwargs) + return cls( + sqla_compat.constraint_name_or_none(fk_constraint.name), + source_table, + target_table, + source_columns, + target_columns, + **kw, + ) + + def to_constraint( + self, migration_context: Optional[MigrationContext] = None + ) -> ForeignKeyConstraint: + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.foreign_key_constraint( + self.constraint_name, + self.source_table, + self.referent_table, + self.local_cols, + self.remote_cols, + **self.kw, + ) + + @classmethod + def create_foreign_key( + cls, + operations: Operations, + constraint_name: Optional[str], + source_table: str, + referent_table: str, + local_cols: List[str], + remote_cols: List[str], + *, + onupdate: Optional[str] = None, + ondelete: Optional[str] = None, + deferrable: Optional[bool] = None, + initially: Optional[str] = None, + match: Optional[str] = None, + source_schema: Optional[str] = None, + referent_schema: Optional[str] = None, + **dialect_kw: Any, + ) -> None: + """Issue a "create foreign key" instruction using the + current migration context. + + e.g.:: + + from alembic import op + + op.create_foreign_key( + "fk_user_address", + "address", + "user", + ["user_id"], + ["id"], + ) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.ForeignKeyConstraint` + object which it then associates with the + :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param constraint_name: Name of the foreign key constraint. The name + is necessary so that an ALTER statement can be emitted. For setups + that use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param source_table: String name of the source table. + :param referent_table: String name of the destination table. + :param local_cols: a list of string column names in the + source table. + :param remote_cols: a list of string column names in the + remote table. + :param onupdate: Optional string. If set, emit ON UPDATE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param ondelete: Optional string. If set, emit ON DELETE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param deferrable: optional bool. If set, emit DEFERRABLE or NOT + DEFERRABLE when issuing DDL for this constraint. + :param source_schema: Optional schema name of the source table. + :param referent_schema: Optional schema name of the destination table. + + """ + + op = cls( + constraint_name, + source_table, + referent_table, + local_cols, + remote_cols, + onupdate=onupdate, + ondelete=ondelete, + deferrable=deferrable, + source_schema=source_schema, + referent_schema=referent_schema, + initially=initially, + match=match, + **dialect_kw, + ) + return operations.invoke(op) + + @classmethod + def batch_create_foreign_key( + cls, + operations: BatchOperations, + constraint_name: str, + referent_table: str, + local_cols: List[str], + remote_cols: List[str], + *, + referent_schema: Optional[str] = None, + onupdate: Optional[str] = None, + ondelete: Optional[str] = None, + deferrable: Optional[bool] = None, + initially: Optional[str] = None, + match: Optional[str] = None, + **dialect_kw: Any, + ) -> None: + """Issue a "create foreign key" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``source_schema`` + arguments from the call. + + e.g.:: + + with batch_alter_table("address") as batch_op: + batch_op.create_foreign_key( + "fk_user_address", + "user", + ["user_id"], + ["id"], + ) + + .. seealso:: + + :meth:`.Operations.create_foreign_key` + + """ + op = cls( + constraint_name, + operations.impl.table_name, + referent_table, + local_cols, + remote_cols, + onupdate=onupdate, + ondelete=ondelete, + deferrable=deferrable, + source_schema=operations.impl.schema, + referent_schema=referent_schema, + initially=initially, + match=match, + **dialect_kw, + ) + return operations.invoke(op) + + +@Operations.register_operation("create_check_constraint") +@BatchOperations.register_operation( + "create_check_constraint", "batch_create_check_constraint" +) +@AddConstraintOp.register_add_constraint("check_constraint") +@AddConstraintOp.register_add_constraint("table_or_column_check_constraint") +@AddConstraintOp.register_add_constraint("column_check_constraint") +class CreateCheckConstraintOp(AddConstraintOp): + """Represent a create check constraint operation.""" + + constraint_type = "check" + + def __init__( + self, + constraint_name: Optional[sqla_compat._ConstraintNameDefined], + table_name: str, + condition: Union[str, TextClause, ColumnElement[Any]], + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + self.constraint_name = constraint_name + self.table_name = table_name + self.condition = condition + self.schema = schema + self.kw = kw + + @classmethod + def from_constraint( + cls, constraint: Constraint + ) -> CreateCheckConstraintOp: + constraint_table = sqla_compat._table_for_constraint(constraint) + + ck_constraint = cast("CheckConstraint", constraint) + return cls( + sqla_compat.constraint_name_or_none(ck_constraint.name), + constraint_table.name, + cast("ColumnElement[Any]", ck_constraint.sqltext), + schema=constraint_table.schema, + **ck_constraint.dialect_kwargs, + ) + + def to_constraint( + self, migration_context: Optional[MigrationContext] = None + ) -> CheckConstraint: + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.check_constraint( + self.constraint_name, + self.table_name, + self.condition, + schema=self.schema, + **self.kw, + ) + + @classmethod + def create_check_constraint( + cls, + operations: Operations, + constraint_name: Optional[str], + table_name: str, + condition: Union[str, ColumnElement[bool], TextClause], + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + """Issue a "create check constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + from sqlalchemy.sql import column, func + + op.create_check_constraint( + "ck_user_name_len", + "user", + func.len(column("name")) > 5, + ) + + CHECK constraints are usually against a SQL expression, so ad-hoc + table metadata is usually needed. The function will convert the given + arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound + to an anonymous table in order to emit the CREATE statement. + + :param name: Name of the check constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + :ref:`sqla:constraint_naming_conventions`, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the source table. + :param condition: SQL expression that's the condition of the + constraint. Can be a string or SQLAlchemy expression language + structure. + :param deferrable: optional bool. If set, emit DEFERRABLE or + NOT DEFERRABLE when issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY + when issuing DDL for this constraint. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + op = cls(constraint_name, table_name, condition, schema=schema, **kw) + return operations.invoke(op) + + @classmethod + def batch_create_check_constraint( + cls, + operations: BatchOperations, + constraint_name: str, + condition: Union[str, ColumnElement[bool], TextClause], + **kw: Any, + ) -> None: + """Issue a "create check constraint" instruction using the + current batch migration context. + + The batch form of this call omits the ``source`` and ``schema`` + arguments from the call. + + .. seealso:: + + :meth:`.Operations.create_check_constraint` + + """ + op = cls( + constraint_name, + operations.impl.table_name, + condition, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + + +@Operations.register_operation("create_index") +@BatchOperations.register_operation("create_index", "batch_create_index") +class CreateIndexOp(MigrateOperation): + """Represent a create index operation.""" + + def __init__( + self, + index_name: Optional[str], + table_name: str, + columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], + *, + schema: Optional[str] = None, + unique: bool = False, + if_not_exists: Optional[bool] = None, + **kw: Any, + ) -> None: + self.index_name = index_name + self.table_name = table_name + self.columns = columns + self.schema = schema + self.unique = unique + self.if_not_exists = if_not_exists + self.kw = kw + + def reverse(self) -> DropIndexOp: + return DropIndexOp.from_index(self.to_index()) + + def to_diff_tuple(self) -> Tuple[str, Index]: + return ("add_index", self.to_index()) + + @classmethod + def from_index(cls, index: Index) -> CreateIndexOp: + assert index.table is not None + return cls( + index.name, + index.table.name, + index.expressions, + schema=index.table.schema, + unique=index.unique, + **index.kwargs, + ) + + def to_index( + self, migration_context: Optional[MigrationContext] = None + ) -> Index: + schema_obj = schemaobj.SchemaObjects(migration_context) + + idx = schema_obj.index( + self.index_name, + self.table_name, + self.columns, + schema=self.schema, + unique=self.unique, + **self.kw, + ) + return idx + + @classmethod + def create_index( + cls, + operations: Operations, + index_name: Optional[str], + table_name: str, + columns: Sequence[Union[str, TextClause, Function[Any]]], + *, + schema: Optional[str] = None, + unique: bool = False, + if_not_exists: Optional[bool] = None, + **kw: Any, + ) -> None: + r"""Issue a "create index" instruction using the current + migration context. + + e.g.:: + + from alembic import op + + op.create_index("ik_test", "t1", ["foo", "bar"]) + + Functional indexes can be produced by using the + :func:`sqlalchemy.sql.expression.text` construct:: + + from alembic import op + from sqlalchemy import text + + op.create_index("ik_test", "t1", [text("lower(foo)")]) + + :param index_name: name of the index. + :param table_name: name of the owning table. + :param columns: a list consisting of string column names and/or + :func:`~sqlalchemy.sql.expression.text` constructs. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param unique: If True, create a unique index. + + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + """ + op = cls( + index_name, + table_name, + columns, + schema=schema, + unique=unique, + if_not_exists=if_not_exists, + **kw, + ) + return operations.invoke(op) + + @classmethod + def batch_create_index( + cls, + operations: BatchOperations, + index_name: str, + columns: List[str], + **kw: Any, + ) -> None: + """Issue a "create index" instruction using the + current batch migration context. + + .. seealso:: + + :meth:`.Operations.create_index` + + """ + + op = cls( + index_name, + operations.impl.table_name, + columns, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + + +@Operations.register_operation("drop_index") +@BatchOperations.register_operation("drop_index", "batch_drop_index") +class DropIndexOp(MigrateOperation): + """Represent a drop index operation.""" + + def __init__( + self, + index_name: Union[quoted_name, str, conv], + table_name: Optional[str] = None, + *, + schema: Optional[str] = None, + if_exists: Optional[bool] = None, + _reverse: Optional[CreateIndexOp] = None, + **kw: Any, + ) -> None: + self.index_name = index_name + self.table_name = table_name + self.schema = schema + self.if_exists = if_exists + self._reverse = _reverse + self.kw = kw + + def to_diff_tuple(self) -> Tuple[str, Index]: + return ("remove_index", self.to_index()) + + def reverse(self) -> CreateIndexOp: + return CreateIndexOp.from_index(self.to_index()) + + @classmethod + def from_index(cls, index: Index) -> DropIndexOp: + assert index.table is not None + return cls( + index.name, # type: ignore[arg-type] + table_name=index.table.name, + schema=index.table.schema, + _reverse=CreateIndexOp.from_index(index), + unique=index.unique, + **index.kwargs, + ) + + def to_index( + self, migration_context: Optional[MigrationContext] = None + ) -> Index: + schema_obj = schemaobj.SchemaObjects(migration_context) + + # need a dummy column name here since SQLAlchemy + # 0.7.6 and further raises on Index with no columns + return schema_obj.index( + self.index_name, + self.table_name, + self._reverse.columns if self._reverse else ["x"], + schema=self.schema, + **self.kw, + ) + + @classmethod + def drop_index( + cls, + operations: Operations, + index_name: str, + table_name: Optional[str] = None, + *, + schema: Optional[str] = None, + if_exists: Optional[bool] = None, + **kw: Any, + ) -> None: + r"""Issue a "drop index" instruction using the current + migration context. + + e.g.:: + + drop_index("accounts") + + :param index_name: name of the index. + :param table_name: name of the owning table. Some + backends such as Microsoft SQL Server require this. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + + :param \**kw: Additional keyword arguments not mentioned above are + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. + + """ + op = cls( + index_name, + table_name=table_name, + schema=schema, + if_exists=if_exists, + **kw, + ) + return operations.invoke(op) + + @classmethod + def batch_drop_index( + cls, operations: BatchOperations, index_name: str, **kw: Any + ) -> None: + """Issue a "drop index" instruction using the + current batch migration context. + + .. seealso:: + + :meth:`.Operations.drop_index` + + """ + + op = cls( + index_name, + table_name=operations.impl.table_name, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + + +@Operations.register_operation("create_table") +class CreateTableOp(MigrateOperation): + """Represent a create table operation.""" + + def __init__( + self, + table_name: str, + columns: Sequence[SchemaItem], + *, + schema: Optional[str] = None, + _namespace_metadata: Optional[MetaData] = None, + _constraints_included: bool = False, + **kw: Any, + ) -> None: + self.table_name = table_name + self.columns = columns + self.schema = schema + self.info = kw.pop("info", {}) + self.comment = kw.pop("comment", None) + self.prefixes = kw.pop("prefixes", None) + self.kw = kw + self._namespace_metadata = _namespace_metadata + self._constraints_included = _constraints_included + + def reverse(self) -> DropTableOp: + return DropTableOp.from_table( + self.to_table(), _namespace_metadata=self._namespace_metadata + ) + + def to_diff_tuple(self) -> Tuple[str, Table]: + return ("add_table", self.to_table()) + + @classmethod + def from_table( + cls, table: Table, *, _namespace_metadata: Optional[MetaData] = None + ) -> CreateTableOp: + if _namespace_metadata is None: + _namespace_metadata = table.metadata + + return cls( + table.name, + list(table.c) + list(table.constraints), + schema=table.schema, + _namespace_metadata=_namespace_metadata, + # given a Table() object, this Table will contain full Index() + # and UniqueConstraint objects already constructed in response to + # each unique=True / index=True flag on a Column. Carry this + # state along so that when we re-convert back into a Table, we + # skip unique=True/index=True so that these constraints are + # not doubled up. see #844 #848 + _constraints_included=True, + comment=table.comment, + info=dict(table.info), + prefixes=list(table._prefixes), + **table.kwargs, + ) + + def to_table( + self, migration_context: Optional[MigrationContext] = None + ) -> Table: + schema_obj = schemaobj.SchemaObjects(migration_context) + + return schema_obj.table( + self.table_name, + *self.columns, + schema=self.schema, + prefixes=list(self.prefixes) if self.prefixes else [], + comment=self.comment, + info=self.info.copy() if self.info else {}, + _constraints_included=self._constraints_included, + **self.kw, + ) + + @classmethod + def create_table( + cls, + operations: Operations, + table_name: str, + *columns: SchemaItem, + **kw: Any, + ) -> Table: + r"""Issue a "create table" instruction using the current migration + context. + + This directive receives an argument list similar to that of the + traditional :class:`sqlalchemy.schema.Table` construct, but without the + metadata:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("name", VARCHAR(50), nullable=False), + Column("description", NVARCHAR(200)), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + Note that :meth:`.create_table` accepts + :class:`~sqlalchemy.schema.Column` + constructs directly from the SQLAlchemy library. In particular, + default values to be created on the database side are + specified using the ``server_default`` parameter, and not + ``default`` which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the "timestamp" column + op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + The function also returns a newly created + :class:`~sqlalchemy.schema.Table` object, corresponding to the table + specification given, which is suitable for + immediate SQL operations, in particular + :meth:`.Operations.bulk_insert`:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + account_table = op.create_table( + "account", + Column("id", INTEGER, primary_key=True), + Column("name", VARCHAR(50), nullable=False), + Column("description", NVARCHAR(200)), + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + op.bulk_insert( + account_table, + [ + {"name": "A1", "description": "account 1"}, + {"name": "A2", "description": "account 2"}, + ], + ) + + :param table_name: Name of the table + :param \*columns: collection of :class:`~sqlalchemy.schema.Column` + objects within + the table, as well as optional :class:`~sqlalchemy.schema.Constraint` + objects + and :class:`~.sqlalchemy.schema.Index` objects. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + :return: the :class:`~sqlalchemy.schema.Table` object corresponding + to the parameters given. + + """ + op = cls(table_name, columns, **kw) + return operations.invoke(op) + + +@Operations.register_operation("drop_table") +class DropTableOp(MigrateOperation): + """Represent a drop table operation.""" + + def __init__( + self, + table_name: str, + *, + schema: Optional[str] = None, + table_kw: Optional[MutableMapping[Any, Any]] = None, + _reverse: Optional[CreateTableOp] = None, + ) -> None: + self.table_name = table_name + self.schema = schema + self.table_kw = table_kw or {} + self.comment = self.table_kw.pop("comment", None) + self.info = self.table_kw.pop("info", None) + self.prefixes = self.table_kw.pop("prefixes", None) + self._reverse = _reverse + + def to_diff_tuple(self) -> Tuple[str, Table]: + return ("remove_table", self.to_table()) + + def reverse(self) -> CreateTableOp: + return CreateTableOp.from_table(self.to_table()) + + @classmethod + def from_table( + cls, table: Table, *, _namespace_metadata: Optional[MetaData] = None + ) -> DropTableOp: + return cls( + table.name, + schema=table.schema, + table_kw={ + "comment": table.comment, + "info": dict(table.info), + "prefixes": list(table._prefixes), + **table.kwargs, + }, + _reverse=CreateTableOp.from_table( + table, _namespace_metadata=_namespace_metadata + ), + ) + + def to_table( + self, migration_context: Optional[MigrationContext] = None + ) -> Table: + if self._reverse: + cols_and_constraints = self._reverse.columns + else: + cols_and_constraints = [] + + schema_obj = schemaobj.SchemaObjects(migration_context) + t = schema_obj.table( + self.table_name, + *cols_and_constraints, + comment=self.comment, + info=self.info.copy() if self.info else {}, + prefixes=list(self.prefixes) if self.prefixes else [], + schema=self.schema, + _constraints_included=self._reverse._constraints_included + if self._reverse + else False, + **self.table_kw, + ) + return t + + @classmethod + def drop_table( + cls, + operations: Operations, + table_name: str, + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + r"""Issue a "drop table" instruction using the current + migration context. + + + e.g.:: + + drop_table("accounts") + + :param table_name: Name of the table + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + """ + op = cls(table_name, schema=schema, table_kw=kw) + operations.invoke(op) + + +class AlterTableOp(MigrateOperation): + """Represent an alter table operation.""" + + def __init__( + self, + table_name: str, + *, + schema: Optional[str] = None, + ) -> None: + self.table_name = table_name + self.schema = schema + + +@Operations.register_operation("rename_table") +class RenameTableOp(AlterTableOp): + """Represent a rename table operation.""" + + def __init__( + self, + old_table_name: str, + new_table_name: str, + *, + schema: Optional[str] = None, + ) -> None: + super().__init__(old_table_name, schema=schema) + self.new_table_name = new_table_name + + @classmethod + def rename_table( + cls, + operations: Operations, + old_table_name: str, + new_table_name: str, + *, + schema: Optional[str] = None, + ) -> None: + """Emit an ALTER TABLE to rename a table. + + :param old_table_name: old name. + :param new_table_name: new name. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + op = cls(old_table_name, new_table_name, schema=schema) + return operations.invoke(op) + + +@Operations.register_operation("create_table_comment") +@BatchOperations.register_operation( + "create_table_comment", "batch_create_table_comment" +) +class CreateTableCommentOp(AlterTableOp): + """Represent a COMMENT ON `table` operation.""" + + def __init__( + self, + table_name: str, + comment: Optional[str], + *, + schema: Optional[str] = None, + existing_comment: Optional[str] = None, + ) -> None: + self.table_name = table_name + self.comment = comment + self.existing_comment = existing_comment + self.schema = schema + + @classmethod + def create_table_comment( + cls, + operations: Operations, + table_name: str, + comment: Optional[str], + *, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, + ) -> None: + """Emit a COMMENT ON operation to set the comment for a table. + + :param table_name: string name of the target table. + :param comment: string value of the comment being registered against + the specified table. + :param existing_comment: String value of a comment + already registered on the specified table, used within autogenerate + so that the operation is reversible, but not required for direct + use. + + .. seealso:: + + :meth:`.Operations.drop_table_comment` + + :paramref:`.Operations.alter_column.comment` + + """ + + op = cls( + table_name, + comment, + existing_comment=existing_comment, + schema=schema, + ) + return operations.invoke(op) + + @classmethod + def batch_create_table_comment( + cls, + operations: BatchOperations, + comment: Optional[str], + *, + existing_comment: Optional[str] = None, + ) -> None: + """Emit a COMMENT ON operation to set the comment for a table + using the current batch migration context. + + :param comment: string value of the comment being registered against + the specified table. + :param existing_comment: String value of a comment + already registered on the specified table, used within autogenerate + so that the operation is reversible, but not required for direct + use. + + """ + + op = cls( + operations.impl.table_name, + comment, + existing_comment=existing_comment, + schema=operations.impl.schema, + ) + return operations.invoke(op) + + def reverse(self) -> Union[CreateTableCommentOp, DropTableCommentOp]: + """Reverses the COMMENT ON operation against a table.""" + if self.existing_comment is None: + return DropTableCommentOp( + self.table_name, + existing_comment=self.comment, + schema=self.schema, + ) + else: + return CreateTableCommentOp( + self.table_name, + self.existing_comment, + existing_comment=self.comment, + schema=self.schema, + ) + + def to_table( + self, migration_context: Optional[MigrationContext] = None + ) -> Table: + schema_obj = schemaobj.SchemaObjects(migration_context) + + return schema_obj.table( + self.table_name, schema=self.schema, comment=self.comment + ) + + def to_diff_tuple(self) -> Tuple[Any, ...]: + return ("add_table_comment", self.to_table(), self.existing_comment) + + +@Operations.register_operation("drop_table_comment") +@BatchOperations.register_operation( + "drop_table_comment", "batch_drop_table_comment" +) +class DropTableCommentOp(AlterTableOp): + """Represent an operation to remove the comment from a table.""" + + def __init__( + self, + table_name: str, + *, + schema: Optional[str] = None, + existing_comment: Optional[str] = None, + ) -> None: + self.table_name = table_name + self.existing_comment = existing_comment + self.schema = schema + + @classmethod + def drop_table_comment( + cls, + operations: Operations, + table_name: str, + *, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, + ) -> None: + """Issue a "drop table comment" operation to + remove an existing comment set on a table. + + :param table_name: string name of the target table. + :param existing_comment: An optional string value of a comment already + registered on the specified table. + + .. seealso:: + + :meth:`.Operations.create_table_comment` + + :paramref:`.Operations.alter_column.comment` + + """ + + op = cls(table_name, existing_comment=existing_comment, schema=schema) + return operations.invoke(op) + + @classmethod + def batch_drop_table_comment( + cls, + operations: BatchOperations, + *, + existing_comment: Optional[str] = None, + ) -> None: + """Issue a "drop table comment" operation to + remove an existing comment set on a table using the current + batch operations context. + + :param existing_comment: An optional string value of a comment already + registered on the specified table. + + """ + + op = cls( + operations.impl.table_name, + existing_comment=existing_comment, + schema=operations.impl.schema, + ) + return operations.invoke(op) + + def reverse(self) -> CreateTableCommentOp: + """Reverses the COMMENT ON operation against a table.""" + return CreateTableCommentOp( + self.table_name, self.existing_comment, schema=self.schema + ) + + def to_table( + self, migration_context: Optional[MigrationContext] = None + ) -> Table: + schema_obj = schemaobj.SchemaObjects(migration_context) + + return schema_obj.table(self.table_name, schema=self.schema) + + def to_diff_tuple(self) -> Tuple[Any, ...]: + return ("remove_table_comment", self.to_table()) + + +@Operations.register_operation("alter_column") +@BatchOperations.register_operation("alter_column", "batch_alter_column") +class AlterColumnOp(AlterTableOp): + """Represent an alter column operation.""" + + def __init__( + self, + table_name: str, + column_name: str, + *, + schema: Optional[str] = None, + existing_type: Optional[Any] = None, + existing_server_default: Any = False, + existing_nullable: Optional[bool] = None, + existing_comment: Optional[str] = None, + modify_nullable: Optional[bool] = None, + modify_comment: Optional[Union[str, Literal[False]]] = False, + modify_server_default: Any = False, + modify_name: Optional[str] = None, + modify_type: Optional[Any] = None, + **kw: Any, + ) -> None: + super().__init__(table_name, schema=schema) + self.column_name = column_name + self.existing_type = existing_type + self.existing_server_default = existing_server_default + self.existing_nullable = existing_nullable + self.existing_comment = existing_comment + self.modify_nullable = modify_nullable + self.modify_comment = modify_comment + self.modify_server_default = modify_server_default + self.modify_name = modify_name + self.modify_type = modify_type + self.kw = kw + + def to_diff_tuple(self) -> Any: + col_diff = [] + schema, tname, cname = self.schema, self.table_name, self.column_name + + if self.modify_type is not None: + col_diff.append( + ( + "modify_type", + schema, + tname, + cname, + { + "existing_nullable": self.existing_nullable, + "existing_server_default": ( + self.existing_server_default + ), + "existing_comment": self.existing_comment, + }, + self.existing_type, + self.modify_type, + ) + ) + + if self.modify_nullable is not None: + col_diff.append( + ( + "modify_nullable", + schema, + tname, + cname, + { + "existing_type": self.existing_type, + "existing_server_default": ( + self.existing_server_default + ), + "existing_comment": self.existing_comment, + }, + self.existing_nullable, + self.modify_nullable, + ) + ) + + if self.modify_server_default is not False: + col_diff.append( + ( + "modify_default", + schema, + tname, + cname, + { + "existing_nullable": self.existing_nullable, + "existing_type": self.existing_type, + "existing_comment": self.existing_comment, + }, + self.existing_server_default, + self.modify_server_default, + ) + ) + + if self.modify_comment is not False: + col_diff.append( + ( + "modify_comment", + schema, + tname, + cname, + { + "existing_nullable": self.existing_nullable, + "existing_type": self.existing_type, + "existing_server_default": ( + self.existing_server_default + ), + }, + self.existing_comment, + self.modify_comment, + ) + ) + + return col_diff + + def has_changes(self) -> bool: + hc1 = ( + self.modify_nullable is not None + or self.modify_server_default is not False + or self.modify_type is not None + or self.modify_comment is not False + ) + if hc1: + return True + for kw in self.kw: + if kw.startswith("modify_"): + return True + else: + return False + + def reverse(self) -> AlterColumnOp: + kw = self.kw.copy() + kw["existing_type"] = self.existing_type + kw["existing_nullable"] = self.existing_nullable + kw["existing_server_default"] = self.existing_server_default + kw["existing_comment"] = self.existing_comment + if self.modify_type is not None: + kw["modify_type"] = self.modify_type + if self.modify_nullable is not None: + kw["modify_nullable"] = self.modify_nullable + if self.modify_server_default is not False: + kw["modify_server_default"] = self.modify_server_default + if self.modify_comment is not False: + kw["modify_comment"] = self.modify_comment + + # TODO: make this a little simpler + all_keys = { + m.group(1) + for m in [re.match(r"^(?:existing_|modify_)(.+)$", k) for k in kw] + if m + } + + for k in all_keys: + if "modify_%s" % k in kw: + swap = kw["existing_%s" % k] + kw["existing_%s" % k] = kw["modify_%s" % k] + kw["modify_%s" % k] = swap + + return self.__class__( + self.table_name, self.column_name, schema=self.schema, **kw + ) + + @classmethod + def alter_column( + cls, + operations: Operations, + table_name: str, + column_name: str, + *, + nullable: Optional[bool] = None, + comment: Optional[Union[str, Literal[False]]] = False, + server_default: Any = False, + new_column_name: Optional[str] = None, + type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = None, + existing_type: Optional[ + Union[TypeEngine[Any], Type[TypeEngine[Any]]] + ] = None, + existing_server_default: Optional[ + Union[str, bool, Identity, Computed] + ] = False, + existing_nullable: Optional[bool] = None, + existing_comment: Optional[str] = None, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + r"""Issue an "alter column" instruction using the + current migration context. + + Generally, only that aspect of the column which + is being changed, i.e. name, type, nullability, + default, needs to be specified. Multiple changes + can also be specified at once and the backend should + "do the right thing", emitting each change either + separately or together as the backend allows. + + MySQL has special requirements here, since MySQL + cannot ALTER a column without a full specification. + When producing MySQL-compatible migration files, + it is recommended that the ``existing_type``, + ``existing_server_default``, and ``existing_nullable`` + parameters be present, if not being altered. + + Type changes which are against the SQLAlchemy + "schema" types :class:`~sqlalchemy.types.Boolean` + and :class:`~sqlalchemy.types.Enum` may also + add or drop constraints which accompany those + types on backends that don't support them natively. + The ``existing_type`` argument is + used in this case to identify and remove a previous + constraint that was bound to the type object. + + :param table_name: string name of the target table. + :param column_name: string name of the target column, + as it exists before the operation begins. + :param nullable: Optional; specify ``True`` or ``False`` + to alter the column's nullability. + :param server_default: Optional; specify a string + SQL expression, :func:`~sqlalchemy.sql.expression.text`, + or :class:`~sqlalchemy.schema.DefaultClause` to indicate + an alteration to the column's default value. + Set to ``None`` to have the default removed. + :param comment: optional string text of a new comment to add to the + column. + :param new_column_name: Optional; specify a string name here to + indicate the new name within a column rename operation. + :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` + type object to specify a change to the column's type. + For SQLAlchemy types that also indicate a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), + the constraint is also generated. + :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; + currently understood by the MySQL dialect. + :param existing_type: Optional; a + :class:`~sqlalchemy.types.TypeEngine` + type object to specify the previous type. This + is required for all MySQL column alter operations that + don't otherwise specify a new type, as well as for + when nullability is being changed on a SQL Server + column. It is also used if the type is a so-called + SQLAlchemy "schema" type which may define a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, + :class:`~sqlalchemy.types.Enum`), + so that the constraint can be dropped. + :param existing_server_default: Optional; The existing + default value of the column. Required on MySQL if + an existing default is not being changed; else MySQL + removes the default. + :param existing_nullable: Optional; the existing nullability + of the column. Required on MySQL if the existing nullability + is not being changed; else MySQL sets this to NULL. + :param existing_autoincrement: Optional; the existing autoincrement + of the column. Used for MySQL's system of altering a column + that specifies ``AUTO_INCREMENT``. + :param existing_comment: string text of the existing comment on the + column to be maintained. Required on MySQL if the existing comment + on the column is not being changed. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param postgresql_using: String argument which will indicate a + SQL expression to render within the Postgresql-specific USING clause + within ALTER COLUMN. This string is taken directly as raw SQL which + must explicitly include any necessary quoting or escaping of tokens + within the expression. + + """ + + alt = cls( + table_name, + column_name, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + modify_name=new_column_name, + modify_type=type_, + modify_server_default=server_default, + modify_nullable=nullable, + modify_comment=comment, + **kw, + ) + + return operations.invoke(alt) + + @classmethod + def batch_alter_column( + cls, + operations: BatchOperations, + column_name: str, + *, + nullable: Optional[bool] = None, + comment: Optional[Union[str, Literal[False]]] = False, + server_default: Any = False, + new_column_name: Optional[str] = None, + type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = None, + existing_type: Optional[ + Union[TypeEngine[Any], Type[TypeEngine[Any]]] + ] = None, + existing_server_default: Optional[ + Union[str, bool, Identity, Computed] + ] = False, + existing_nullable: Optional[bool] = None, + existing_comment: Optional[str] = None, + insert_before: Optional[str] = None, + insert_after: Optional[str] = None, + **kw: Any, + ) -> None: + """Issue an "alter column" instruction using the current + batch migration context. + + Parameters are the same as that of :meth:`.Operations.alter_column`, + as well as the following option(s): + + :param insert_before: String name of an existing column which this + column should be placed before, when creating the new table. + + :param insert_after: String name of an existing column which this + column should be placed after, when creating the new table. If + both :paramref:`.BatchOperations.alter_column.insert_before` + and :paramref:`.BatchOperations.alter_column.insert_after` are + omitted, the column is inserted after the last existing column + in the table. + + .. seealso:: + + :meth:`.Operations.alter_column` + + + """ + alt = cls( + operations.impl.table_name, + column_name, + schema=operations.impl.schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_comment=existing_comment, + modify_name=new_column_name, + modify_type=type_, + modify_server_default=server_default, + modify_nullable=nullable, + modify_comment=comment, + insert_before=insert_before, + insert_after=insert_after, + **kw, + ) + + return operations.invoke(alt) + + +@Operations.register_operation("add_column") +@BatchOperations.register_operation("add_column", "batch_add_column") +class AddColumnOp(AlterTableOp): + """Represent an add column operation.""" + + def __init__( + self, + table_name: str, + column: Column[Any], + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + super().__init__(table_name, schema=schema) + self.column = column + self.kw = kw + + def reverse(self) -> DropColumnOp: + return DropColumnOp.from_column_and_tablename( + self.schema, self.table_name, self.column + ) + + def to_diff_tuple( + self, + ) -> Tuple[str, Optional[str], str, Column[Any]]: + return ("add_column", self.schema, self.table_name, self.column) + + def to_column(self) -> Column[Any]: + return self.column + + @classmethod + def from_column(cls, col: Column[Any]) -> AddColumnOp: + return cls(col.table.name, col, schema=col.table.schema) + + @classmethod + def from_column_and_tablename( + cls, + schema: Optional[str], + tname: str, + col: Column[Any], + ) -> AddColumnOp: + return cls(tname, col, schema=schema) + + @classmethod + def add_column( + cls, + operations: Operations, + table_name: str, + column: Column[Any], + *, + schema: Optional[str] = None, + ) -> None: + """Issue an "add column" instruction using the current + migration context. + + e.g.:: + + from alembic import op + from sqlalchemy import Column, String + + op.add_column("organization", Column("name", String())) + + The :meth:`.Operations.add_column` method typically corresponds + to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope + of this command, the column's name, datatype, nullability, + and optional server-generated defaults may be indicated. + + .. note:: + + With the exception of NOT NULL constraints or single-column FOREIGN + KEY constraints, other kinds of constraints such as PRIMARY KEY, + UNIQUE or CHECK constraints **cannot** be generated using this + method; for these constraints, refer to operations such as + :meth:`.Operations.create_primary_key` and + :meth:`.Operations.create_check_constraint`. In particular, the + following :class:`~sqlalchemy.schema.Column` parameters are + **ignored**: + + * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases + typically do not support an ALTER operation that can add + individual columns one at a time to an existing primary key + constraint, therefore it's less ambiguous to use the + :meth:`.Operations.create_primary_key` method, which assumes no + existing primary key constraint is present. + * :paramref:`~sqlalchemy.schema.Column.unique` - use the + :meth:`.Operations.create_unique_constraint` method + * :paramref:`~sqlalchemy.schema.Column.index` - use the + :meth:`.Operations.create_index` method + + + The provided :class:`~sqlalchemy.schema.Column` object may include a + :class:`~sqlalchemy.schema.ForeignKey` constraint directive, + referencing a remote table name. For this specific type of constraint, + Alembic will automatically emit a second ALTER statement in order to + add the single-column FOREIGN KEY constraint separately:: + + from alembic import op + from sqlalchemy import Column, INTEGER, ForeignKey + + op.add_column( + "organization", + Column("account_id", INTEGER, ForeignKey("accounts.id")), + ) + + The column argument passed to :meth:`.Operations.add_column` is a + :class:`~sqlalchemy.schema.Column` construct, used in the same way it's + used in SQLAlchemy. In particular, values or functions to be indicated + as producing the column's default value on the database side are + specified using the ``server_default`` parameter, and not ``default`` + which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the column add + op.add_column( + "account", + Column("timestamp", TIMESTAMP, server_default=func.now()), + ) + + :param table_name: String name of the parent table. + :param column: a :class:`sqlalchemy.schema.Column` object + representing the new column. + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + + """ + + op = cls(table_name, column, schema=schema) + return operations.invoke(op) + + @classmethod + def batch_add_column( + cls, + operations: BatchOperations, + column: Column[Any], + *, + insert_before: Optional[str] = None, + insert_after: Optional[str] = None, + ) -> None: + """Issue an "add column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.add_column` + + """ + + kw = {} + if insert_before: + kw["insert_before"] = insert_before + if insert_after: + kw["insert_after"] = insert_after + + op = cls( + operations.impl.table_name, + column, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + + +@Operations.register_operation("drop_column") +@BatchOperations.register_operation("drop_column", "batch_drop_column") +class DropColumnOp(AlterTableOp): + """Represent a drop column operation.""" + + def __init__( + self, + table_name: str, + column_name: str, + *, + schema: Optional[str] = None, + _reverse: Optional[AddColumnOp] = None, + **kw: Any, + ) -> None: + super().__init__(table_name, schema=schema) + self.column_name = column_name + self.kw = kw + self._reverse = _reverse + + def to_diff_tuple( + self, + ) -> Tuple[str, Optional[str], str, Column[Any]]: + return ( + "remove_column", + self.schema, + self.table_name, + self.to_column(), + ) + + def reverse(self) -> AddColumnOp: + if self._reverse is None: + raise ValueError( + "operation is not reversible; " + "original column is not present" + ) + + return AddColumnOp.from_column_and_tablename( + self.schema, self.table_name, self._reverse.column + ) + + @classmethod + def from_column_and_tablename( + cls, + schema: Optional[str], + tname: str, + col: Column[Any], + ) -> DropColumnOp: + return cls( + tname, + col.name, + schema=schema, + _reverse=AddColumnOp.from_column_and_tablename(schema, tname, col), + ) + + def to_column( + self, migration_context: Optional[MigrationContext] = None + ) -> Column[Any]: + if self._reverse is not None: + return self._reverse.column + schema_obj = schemaobj.SchemaObjects(migration_context) + return schema_obj.column(self.column_name, NULLTYPE) + + @classmethod + def drop_column( + cls, + operations: Operations, + table_name: str, + column_name: str, + *, + schema: Optional[str] = None, + **kw: Any, + ) -> None: + """Issue a "drop column" instruction using the current + migration context. + + e.g.:: + + drop_column("organization", "account_id") + + :param table_name: name of table + :param column_name: name of column + :param schema: Optional schema name to operate within. To control + quoting of the schema outside of the default behavior, use + the SQLAlchemy construct + :class:`~sqlalchemy.sql.elements.quoted_name`. + :param mssql_drop_check: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the CHECK constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.check_constraints, + then exec's a separate DROP CONSTRAINT for that constraint. + :param mssql_drop_default: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the DEFAULT constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.default_constraints, + then exec's a separate DROP CONSTRAINT for that default. + :param mssql_drop_foreign_key: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop a single FOREIGN KEY constraint on the column using a + SQL-script-compatible + block that selects into a @variable from + sys.foreign_keys/sys.foreign_key_columns, + then exec's a separate DROP CONSTRAINT for that default. Only + works if the column has exactly one FK constraint which refers to + it, at the moment. + + """ + + op = cls(table_name, column_name, schema=schema, **kw) + return operations.invoke(op) + + @classmethod + def batch_drop_column( + cls, operations: BatchOperations, column_name: str, **kw: Any + ) -> None: + """Issue a "drop column" instruction using the current + batch migration context. + + .. seealso:: + + :meth:`.Operations.drop_column` + + """ + op = cls( + operations.impl.table_name, + column_name, + schema=operations.impl.schema, + **kw, + ) + return operations.invoke(op) + + +@Operations.register_operation("bulk_insert") +class BulkInsertOp(MigrateOperation): + """Represent a bulk insert operation.""" + + def __init__( + self, + table: Union[Table, TableClause], + rows: List[Dict[str, Any]], + *, + multiinsert: bool = True, + ) -> None: + self.table = table + self.rows = rows + self.multiinsert = multiinsert + + @classmethod + def bulk_insert( + cls, + operations: Operations, + table: Union[Table, TableClause], + rows: List[Dict[str, Any]], + *, + multiinsert: bool = True, + ) -> None: + """Issue a "bulk insert" operation using the current + migration context. + + This provides a means of representing an INSERT of multiple rows + which works equally well in the context of executing on a live + connection as well as that of generating a SQL script. In the + case of a SQL script, the values are rendered inline into the + statement. + + e.g.:: + + from alembic import op + from datetime import date + from sqlalchemy.sql import table, column + from sqlalchemy import String, Integer, Date + + # Create an ad-hoc table to use for the insert statement. + accounts_table = table( + "account", + column("id", Integer), + column("name", String), + column("create_date", Date), + ) + + op.bulk_insert( + accounts_table, + [ + { + "id": 1, + "name": "John Smith", + "create_date": date(2010, 10, 5), + }, + { + "id": 2, + "name": "Ed Williams", + "create_date": date(2007, 5, 27), + }, + { + "id": 3, + "name": "Wendy Jones", + "create_date": date(2008, 8, 15), + }, + ], + ) + + When using --sql mode, some datatypes may not render inline + automatically, such as dates and other special types. When this + issue is present, :meth:`.Operations.inline_literal` may be used:: + + op.bulk_insert( + accounts_table, + [ + { + "id": 1, + "name": "John Smith", + "create_date": op.inline_literal("2010-10-05"), + }, + { + "id": 2, + "name": "Ed Williams", + "create_date": op.inline_literal("2007-05-27"), + }, + { + "id": 3, + "name": "Wendy Jones", + "create_date": op.inline_literal("2008-08-15"), + }, + ], + multiinsert=False, + ) + + When using :meth:`.Operations.inline_literal` in conjunction with + :meth:`.Operations.bulk_insert`, in order for the statement to work + in "online" (e.g. non --sql) mode, the + :paramref:`~.Operations.bulk_insert.multiinsert` + flag should be set to ``False``, which will have the effect of + individual INSERT statements being emitted to the database, each + with a distinct VALUES clause, so that the "inline" values can + still be rendered, rather than attempting to pass the values + as bound parameters. + + :param table: a table object which represents the target of the INSERT. + + :param rows: a list of dictionaries indicating rows. + + :param multiinsert: when at its default of True and --sql mode is not + enabled, the INSERT statement will be executed using + "executemany()" style, where all elements in the list of + dictionaries are passed as bound parameters in a single + list. Setting this to False results in individual INSERT + statements being emitted per parameter set, and is needed + in those cases where non-literal values are present in the + parameter sets. + + """ + + op = cls(table, rows, multiinsert=multiinsert) + operations.invoke(op) + + +@Operations.register_operation("execute") +@BatchOperations.register_operation("execute", "batch_execute") +class ExecuteSQLOp(MigrateOperation): + """Represent an execute SQL operation.""" + + def __init__( + self, + sqltext: Union[Executable, str], + *, + execution_options: Optional[dict[str, Any]] = None, + ) -> None: + self.sqltext = sqltext + self.execution_options = execution_options + + @classmethod + def execute( + cls, + operations: Operations, + sqltext: Union[Executable, str], + *, + execution_options: Optional[dict[str, Any]] = None, + ) -> None: + r"""Execute the given SQL using the current migration context. + + The given SQL can be a plain string, e.g.:: + + op.execute("INSERT INTO table (foo) VALUES ('some value')") + + Or it can be any kind of Core SQL Expression construct, such as + below where we use an update construct:: + + from sqlalchemy.sql import table, column + from sqlalchemy import String + from alembic import op + + account = table("account", column("name", String)) + op.execute( + account.update() + .where(account.c.name == op.inline_literal("account 1")) + .values({"name": op.inline_literal("account 2")}) + ) + + Above, we made use of the SQLAlchemy + :func:`sqlalchemy.sql.expression.table` and + :func:`sqlalchemy.sql.expression.column` constructs to make a brief, + ad-hoc table construct just for our UPDATE statement. A full + :class:`~sqlalchemy.schema.Table` construct of course works perfectly + fine as well, though note it's a recommended practice to at least + ensure the definition of a table is self-contained within the migration + script, rather than imported from a module that may break compatibility + with older migrations. + + In a SQL script context, the statement is emitted directly to the + output stream. There is *no* return result, however, as this + function is oriented towards generating a change script + that can run in "offline" mode. Additionally, parameterized + statements are discouraged here, as they *will not work* in offline + mode. Above, we use :meth:`.inline_literal` where parameters are + to be used. + + For full interaction with a connected database where parameters can + also be used normally, use the "bind" available from the context:: + + from alembic import op + + connection = op.get_bind() + + connection.execute( + account.update() + .where(account.c.name == "account 1") + .values({"name": "account 2"}) + ) + + Additionally, when passing the statement as a plain string, it is first + coerced into a :func:`sqlalchemy.sql.expression.text` construct + before being passed along. In the less likely case that the + literal SQL string contains a colon, it must be escaped with a + backslash, as:: + + op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')") + + + :param sqltext: Any legal SQLAlchemy expression, including: + + * a string + * a :func:`sqlalchemy.sql.expression.text` construct. + * a :func:`sqlalchemy.sql.expression.insert` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. + * Any "executable" described in SQLAlchemy Core documentation, + noting that no result set is returned. + + .. note:: when passing a plain string, the statement is coerced into + a :func:`sqlalchemy.sql.expression.text` construct. This construct + considers symbols with colons, e.g. ``:foo`` to be bound parameters. + To avoid this, ensure that colon symbols are escaped, e.g. + ``\:foo``. + + :param execution_options: Optional dictionary of + execution options, will be passed to + :meth:`sqlalchemy.engine.Connection.execution_options`. + """ + op = cls(sqltext, execution_options=execution_options) + return operations.invoke(op) + + @classmethod + def batch_execute( + cls, + operations: Operations, + sqltext: Union[Executable, str], + *, + execution_options: Optional[dict[str, Any]] = None, + ) -> None: + """Execute the given SQL using the current migration context. + + .. seealso:: + + :meth:`.Operations.execute` + + """ + return cls.execute( + operations, sqltext, execution_options=execution_options + ) + + def to_diff_tuple(self) -> Tuple[str, Union[Executable, str]]: + return ("execute", self.sqltext) + + +class OpContainer(MigrateOperation): + """Represent a sequence of operations operation.""" + + def __init__(self, ops: Sequence[MigrateOperation] = ()) -> None: + self.ops = list(ops) + + def is_empty(self) -> bool: + return not self.ops + + def as_diffs(self) -> Any: + return list(OpContainer._ops_as_diffs(self)) + + @classmethod + def _ops_as_diffs( + cls, migrations: OpContainer + ) -> Iterator[Tuple[Any, ...]]: + for op in migrations.ops: + if hasattr(op, "ops"): + yield from cls._ops_as_diffs(cast("OpContainer", op)) + else: + yield op.to_diff_tuple() + + +class ModifyTableOps(OpContainer): + """Contains a sequence of operations that all apply to a single Table.""" + + def __init__( + self, + table_name: str, + ops: Sequence[MigrateOperation], + *, + schema: Optional[str] = None, + ) -> None: + super().__init__(ops) + self.table_name = table_name + self.schema = schema + + def reverse(self) -> ModifyTableOps: + return ModifyTableOps( + self.table_name, + ops=list(reversed([op.reverse() for op in self.ops])), + schema=self.schema, + ) + + +class UpgradeOps(OpContainer): + """contains a sequence of operations that would apply to the + 'upgrade' stream of a script. + + .. seealso:: + + :ref:`customizing_revision` + + """ + + def __init__( + self, + ops: Sequence[MigrateOperation] = (), + upgrade_token: str = "upgrades", + ) -> None: + super().__init__(ops=ops) + self.upgrade_token = upgrade_token + + def reverse_into(self, downgrade_ops: DowngradeOps) -> DowngradeOps: + downgrade_ops.ops[:] = list( + reversed([op.reverse() for op in self.ops]) + ) + return downgrade_ops + + def reverse(self) -> DowngradeOps: + return self.reverse_into(DowngradeOps(ops=[])) + + +class DowngradeOps(OpContainer): + """contains a sequence of operations that would apply to the + 'downgrade' stream of a script. + + .. seealso:: + + :ref:`customizing_revision` + + """ + + def __init__( + self, + ops: Sequence[MigrateOperation] = (), + downgrade_token: str = "downgrades", + ) -> None: + super().__init__(ops=ops) + self.downgrade_token = downgrade_token + + def reverse(self) -> UpgradeOps: + return UpgradeOps( + ops=list(reversed([op.reverse() for op in self.ops])) + ) + + +class MigrationScript(MigrateOperation): + """represents a migration script. + + E.g. when autogenerate encounters this object, this corresponds to the + production of an actual script file. + + A normal :class:`.MigrationScript` object would contain a single + :class:`.UpgradeOps` and a single :class:`.DowngradeOps` directive. + These are accessible via the ``.upgrade_ops`` and ``.downgrade_ops`` + attributes. + + In the case of an autogenerate operation that runs multiple times, + such as the multiple database example in the "multidb" template, + the ``.upgrade_ops`` and ``.downgrade_ops`` attributes are disabled, + and instead these objects should be accessed via the ``.upgrade_ops_list`` + and ``.downgrade_ops_list`` list-based attributes. These latter + attributes are always available at the very least as single-element lists. + + .. seealso:: + + :ref:`customizing_revision` + + """ + + _needs_render: Optional[bool] + _upgrade_ops: List[UpgradeOps] + _downgrade_ops: List[DowngradeOps] + + def __init__( + self, + rev_id: Optional[str], + upgrade_ops: UpgradeOps, + downgrade_ops: DowngradeOps, + *, + message: Optional[str] = None, + imports: Set[str] = set(), + head: Optional[str] = None, + splice: Optional[bool] = None, + branch_label: Optional[_RevIdType] = None, + version_path: Optional[str] = None, + depends_on: Optional[_RevIdType] = None, + ) -> None: + self.rev_id = rev_id + self.message = message + self.imports = imports + self.head = head + self.splice = splice + self.branch_label = branch_label + self.version_path = version_path + self.depends_on = depends_on + self.upgrade_ops = upgrade_ops + self.downgrade_ops = downgrade_ops + + @property + def upgrade_ops(self) -> Optional[UpgradeOps]: + """An instance of :class:`.UpgradeOps`. + + .. seealso:: + + :attr:`.MigrationScript.upgrade_ops_list` + """ + if len(self._upgrade_ops) > 1: + raise ValueError( + "This MigrationScript instance has a multiple-entry " + "list for UpgradeOps; please use the " + "upgrade_ops_list attribute." + ) + elif not self._upgrade_ops: + return None + else: + return self._upgrade_ops[0] + + @upgrade_ops.setter + def upgrade_ops( + self, upgrade_ops: Union[UpgradeOps, List[UpgradeOps]] + ) -> None: + self._upgrade_ops = util.to_list(upgrade_ops) + for elem in self._upgrade_ops: + assert isinstance(elem, UpgradeOps) + + @property + def downgrade_ops(self) -> Optional[DowngradeOps]: + """An instance of :class:`.DowngradeOps`. + + .. seealso:: + + :attr:`.MigrationScript.downgrade_ops_list` + """ + if len(self._downgrade_ops) > 1: + raise ValueError( + "This MigrationScript instance has a multiple-entry " + "list for DowngradeOps; please use the " + "downgrade_ops_list attribute." + ) + elif not self._downgrade_ops: + return None + else: + return self._downgrade_ops[0] + + @downgrade_ops.setter + def downgrade_ops( + self, downgrade_ops: Union[DowngradeOps, List[DowngradeOps]] + ) -> None: + self._downgrade_ops = util.to_list(downgrade_ops) + for elem in self._downgrade_ops: + assert isinstance(elem, DowngradeOps) + + @property + def upgrade_ops_list(self) -> List[UpgradeOps]: + """A list of :class:`.UpgradeOps` instances. + + This is used in place of the :attr:`.MigrationScript.upgrade_ops` + attribute when dealing with a revision operation that does + multiple autogenerate passes. + + """ + return self._upgrade_ops + + @property + def downgrade_ops_list(self) -> List[DowngradeOps]: + """A list of :class:`.DowngradeOps` instances. + + This is used in place of the :attr:`.MigrationScript.downgrade_ops` + attribute when dealing with a revision operation that does + multiple autogenerate passes. + + """ + return self._downgrade_ops diff --git a/venv/lib/python3.12/site-packages/alembic/operations/schemaobj.py b/venv/lib/python3.12/site-packages/alembic/operations/schemaobj.py new file mode 100644 index 0000000..32b26e9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/operations/schemaobj.py @@ -0,0 +1,288 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import schema as sa_schema +from sqlalchemy.sql.schema import Column +from sqlalchemy.sql.schema import Constraint +from sqlalchemy.sql.schema import Index +from sqlalchemy.types import Integer +from sqlalchemy.types import NULLTYPE + +from .. import util +from ..util import sqla_compat + +if TYPE_CHECKING: + from sqlalchemy.sql.elements import ColumnElement + from sqlalchemy.sql.elements import TextClause + from sqlalchemy.sql.schema import CheckConstraint + from sqlalchemy.sql.schema import ForeignKey + from sqlalchemy.sql.schema import ForeignKeyConstraint + from sqlalchemy.sql.schema import MetaData + from sqlalchemy.sql.schema import PrimaryKeyConstraint + from sqlalchemy.sql.schema import Table + from sqlalchemy.sql.schema import UniqueConstraint + from sqlalchemy.sql.type_api import TypeEngine + + from ..runtime.migration import MigrationContext + + +class SchemaObjects: + def __init__( + self, migration_context: Optional[MigrationContext] = None + ) -> None: + self.migration_context = migration_context + + def primary_key_constraint( + self, + name: Optional[sqla_compat._ConstraintNameDefined], + table_name: str, + cols: Sequence[str], + schema: Optional[str] = None, + **dialect_kw, + ) -> PrimaryKeyConstraint: + m = self.metadata() + columns = [sa_schema.Column(n, NULLTYPE) for n in cols] + t = sa_schema.Table(table_name, m, *columns, schema=schema) + # SQLAlchemy primary key constraint name arg is wrongly typed on + # the SQLAlchemy side through 2.0.5 at least + p = sa_schema.PrimaryKeyConstraint( + *[t.c[n] for n in cols], name=name, **dialect_kw # type: ignore + ) + return p + + def foreign_key_constraint( + self, + name: Optional[sqla_compat._ConstraintNameDefined], + source: str, + referent: str, + local_cols: List[str], + remote_cols: List[str], + onupdate: Optional[str] = None, + ondelete: Optional[str] = None, + deferrable: Optional[bool] = None, + source_schema: Optional[str] = None, + referent_schema: Optional[str] = None, + initially: Optional[str] = None, + match: Optional[str] = None, + **dialect_kw, + ) -> ForeignKeyConstraint: + m = self.metadata() + if source == referent and source_schema == referent_schema: + t1_cols = local_cols + remote_cols + else: + t1_cols = local_cols + sa_schema.Table( + referent, + m, + *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], + schema=referent_schema, + ) + + t1 = sa_schema.Table( + source, + m, + *[ + sa_schema.Column(n, NULLTYPE) + for n in util.unique_list(t1_cols) + ], + schema=source_schema, + ) + + tname = ( + "%s.%s" % (referent_schema, referent) + if referent_schema + else referent + ) + + dialect_kw["match"] = match + + f = sa_schema.ForeignKeyConstraint( + local_cols, + ["%s.%s" % (tname, n) for n in remote_cols], + name=name, + onupdate=onupdate, + ondelete=ondelete, + deferrable=deferrable, + initially=initially, + **dialect_kw, + ) + t1.append_constraint(f) + + return f + + def unique_constraint( + self, + name: Optional[sqla_compat._ConstraintNameDefined], + source: str, + local_cols: Sequence[str], + schema: Optional[str] = None, + **kw, + ) -> UniqueConstraint: + t = sa_schema.Table( + source, + self.metadata(), + *[sa_schema.Column(n, NULLTYPE) for n in local_cols], + schema=schema, + ) + kw["name"] = name + uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw) + # TODO: need event tests to ensure the event + # is fired off here + t.append_constraint(uq) + return uq + + def check_constraint( + self, + name: Optional[sqla_compat._ConstraintNameDefined], + source: str, + condition: Union[str, TextClause, ColumnElement[Any]], + schema: Optional[str] = None, + **kw, + ) -> Union[CheckConstraint]: + t = sa_schema.Table( + source, + self.metadata(), + sa_schema.Column("x", Integer), + schema=schema, + ) + ck = sa_schema.CheckConstraint(condition, name=name, **kw) + t.append_constraint(ck) + return ck + + def generic_constraint( + self, + name: Optional[sqla_compat._ConstraintNameDefined], + table_name: str, + type_: Optional[str], + schema: Optional[str] = None, + **kw, + ) -> Any: + t = self.table(table_name, schema=schema) + types: Dict[Optional[str], Any] = { + "foreignkey": lambda name: sa_schema.ForeignKeyConstraint( + [], [], name=name + ), + "primary": sa_schema.PrimaryKeyConstraint, + "unique": sa_schema.UniqueConstraint, + "check": lambda name: sa_schema.CheckConstraint("", name=name), + None: sa_schema.Constraint, + } + try: + const = types[type_] + except KeyError as ke: + raise TypeError( + "'type' can be one of %s" + % ", ".join(sorted(repr(x) for x in types)) + ) from ke + else: + const = const(name=name) + t.append_constraint(const) + return const + + def metadata(self) -> MetaData: + kw = {} + if ( + self.migration_context is not None + and "target_metadata" in self.migration_context.opts + ): + mt = self.migration_context.opts["target_metadata"] + if hasattr(mt, "naming_convention"): + kw["naming_convention"] = mt.naming_convention + return sa_schema.MetaData(**kw) + + def table(self, name: str, *columns, **kw) -> Table: + m = self.metadata() + + cols = [ + sqla_compat._copy(c) if c.table is not None else c + for c in columns + if isinstance(c, Column) + ] + # these flags have already added their UniqueConstraint / + # Index objects to the table, so flip them off here. + # SQLAlchemy tometadata() avoids this instead by preserving the + # flags and skipping the constraints that have _type_bound on them, + # but for a migration we'd rather list out the constraints + # explicitly. + _constraints_included = kw.pop("_constraints_included", False) + if _constraints_included: + for c in cols: + c.unique = c.index = False + + t = sa_schema.Table(name, m, *cols, **kw) + + constraints = [ + sqla_compat._copy(elem, target_table=t) + if getattr(elem, "parent", None) is not t + and getattr(elem, "parent", None) is not None + else elem + for elem in columns + if isinstance(elem, (Constraint, Index)) + ] + + for const in constraints: + t.append_constraint(const) + + for f in t.foreign_keys: + self._ensure_table_for_fk(m, f) + return t + + def column(self, name: str, type_: TypeEngine, **kw) -> Column: + return sa_schema.Column(name, type_, **kw) + + def index( + self, + name: Optional[str], + tablename: Optional[str], + columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], + schema: Optional[str] = None, + **kw, + ) -> Index: + t = sa_schema.Table( + tablename or "no_table", + self.metadata(), + schema=schema, + ) + kw["_table"] = t + idx = sa_schema.Index( + name, + *[util.sqla_compat._textual_index_column(t, n) for n in columns], + **kw, + ) + return idx + + def _parse_table_key(self, table_key: str) -> Tuple[Optional[str], str]: + if "." in table_key: + tokens = table_key.split(".") + sname: Optional[str] = ".".join(tokens[0:-1]) + tname = tokens[-1] + else: + tname = table_key + sname = None + return (sname, tname) + + def _ensure_table_for_fk(self, metadata: MetaData, fk: ForeignKey) -> None: + """create a placeholder Table object for the referent of a + ForeignKey. + + """ + if isinstance(fk._colspec, str): + table_key, cname = fk._colspec.rsplit(".", 1) + sname, tname = self._parse_table_key(table_key) + if table_key not in metadata.tables: + rel_t = sa_schema.Table(tname, metadata, schema=sname) + else: + rel_t = metadata.tables[table_key] + if cname not in rel_t.c: + rel_t.append_column(sa_schema.Column(cname, NULLTYPE)) diff --git a/venv/lib/python3.12/site-packages/alembic/operations/toimpl.py b/venv/lib/python3.12/site-packages/alembic/operations/toimpl.py new file mode 100644 index 0000000..4759f7f --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/operations/toimpl.py @@ -0,0 +1,226 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from typing import TYPE_CHECKING + +from sqlalchemy import schema as sa_schema + +from . import ops +from .base import Operations +from ..util.sqla_compat import _copy +from ..util.sqla_compat import sqla_14 + +if TYPE_CHECKING: + from sqlalchemy.sql.schema import Table + + +@Operations.implementation_for(ops.AlterColumnOp) +def alter_column( + operations: "Operations", operation: "ops.AlterColumnOp" +) -> None: + compiler = operations.impl.dialect.statement_compiler( + operations.impl.dialect, None + ) + + existing_type = operation.existing_type + existing_nullable = operation.existing_nullable + existing_server_default = operation.existing_server_default + type_ = operation.modify_type + column_name = operation.column_name + table_name = operation.table_name + schema = operation.schema + server_default = operation.modify_server_default + new_column_name = operation.modify_name + nullable = operation.modify_nullable + comment = operation.modify_comment + existing_comment = operation.existing_comment + + def _count_constraint(constraint): + return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and ( + not constraint._create_rule or constraint._create_rule(compiler) + ) + + if existing_type and type_: + t = operations.schema_obj.table( + table_name, + sa_schema.Column(column_name, existing_type), + schema=schema, + ) + for constraint in t.constraints: + if _count_constraint(constraint): + operations.impl.drop_constraint(constraint) + + operations.impl.alter_column( + table_name, + column_name, + nullable=nullable, + server_default=server_default, + name=new_column_name, + type_=type_, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + comment=comment, + existing_comment=existing_comment, + **operation.kw, + ) + + if type_: + t = operations.schema_obj.table( + table_name, + operations.schema_obj.column(column_name, type_), + schema=schema, + ) + for constraint in t.constraints: + if _count_constraint(constraint): + operations.impl.add_constraint(constraint) + + +@Operations.implementation_for(ops.DropTableOp) +def drop_table(operations: "Operations", operation: "ops.DropTableOp") -> None: + operations.impl.drop_table( + operation.to_table(operations.migration_context) + ) + + +@Operations.implementation_for(ops.DropColumnOp) +def drop_column( + operations: "Operations", operation: "ops.DropColumnOp" +) -> None: + column = operation.to_column(operations.migration_context) + operations.impl.drop_column( + operation.table_name, column, schema=operation.schema, **operation.kw + ) + + +@Operations.implementation_for(ops.CreateIndexOp) +def create_index( + operations: "Operations", operation: "ops.CreateIndexOp" +) -> None: + idx = operation.to_index(operations.migration_context) + kw = {} + if operation.if_not_exists is not None: + if not sqla_14: + raise NotImplementedError("SQLAlchemy 1.4+ required") + + kw["if_not_exists"] = operation.if_not_exists + operations.impl.create_index(idx, **kw) + + +@Operations.implementation_for(ops.DropIndexOp) +def drop_index(operations: "Operations", operation: "ops.DropIndexOp") -> None: + kw = {} + if operation.if_exists is not None: + if not sqla_14: + raise NotImplementedError("SQLAlchemy 1.4+ required") + + kw["if_exists"] = operation.if_exists + + operations.impl.drop_index( + operation.to_index(operations.migration_context), + **kw, + ) + + +@Operations.implementation_for(ops.CreateTableOp) +def create_table( + operations: "Operations", operation: "ops.CreateTableOp" +) -> "Table": + table = operation.to_table(operations.migration_context) + operations.impl.create_table(table) + return table + + +@Operations.implementation_for(ops.RenameTableOp) +def rename_table( + operations: "Operations", operation: "ops.RenameTableOp" +) -> None: + operations.impl.rename_table( + operation.table_name, operation.new_table_name, schema=operation.schema + ) + + +@Operations.implementation_for(ops.CreateTableCommentOp) +def create_table_comment( + operations: "Operations", operation: "ops.CreateTableCommentOp" +) -> None: + table = operation.to_table(operations.migration_context) + operations.impl.create_table_comment(table) + + +@Operations.implementation_for(ops.DropTableCommentOp) +def drop_table_comment( + operations: "Operations", operation: "ops.DropTableCommentOp" +) -> None: + table = operation.to_table(operations.migration_context) + operations.impl.drop_table_comment(table) + + +@Operations.implementation_for(ops.AddColumnOp) +def add_column(operations: "Operations", operation: "ops.AddColumnOp") -> None: + table_name = operation.table_name + column = operation.column + schema = operation.schema + kw = operation.kw + + if column.table is not None: + column = _copy(column) + + t = operations.schema_obj.table(table_name, column, schema=schema) + operations.impl.add_column(table_name, column, schema=schema, **kw) + + for constraint in t.constraints: + if not isinstance(constraint, sa_schema.PrimaryKeyConstraint): + operations.impl.add_constraint(constraint) + for index in t.indexes: + operations.impl.create_index(index) + + with_comment = ( + operations.impl.dialect.supports_comments + and not operations.impl.dialect.inline_comments + ) + comment = column.comment + if comment and with_comment: + operations.impl.create_column_comment(column) + + +@Operations.implementation_for(ops.AddConstraintOp) +def create_constraint( + operations: "Operations", operation: "ops.AddConstraintOp" +) -> None: + operations.impl.add_constraint( + operation.to_constraint(operations.migration_context) + ) + + +@Operations.implementation_for(ops.DropConstraintOp) +def drop_constraint( + operations: "Operations", operation: "ops.DropConstraintOp" +) -> None: + operations.impl.drop_constraint( + operations.schema_obj.generic_constraint( + operation.constraint_name, + operation.table_name, + operation.constraint_type, + schema=operation.schema, + ) + ) + + +@Operations.implementation_for(ops.BulkInsertOp) +def bulk_insert( + operations: "Operations", operation: "ops.BulkInsertOp" +) -> None: + operations.impl.bulk_insert( # type: ignore[union-attr] + operation.table, operation.rows, multiinsert=operation.multiinsert + ) + + +@Operations.implementation_for(ops.ExecuteSQLOp) +def execute_sql( + operations: "Operations", operation: "ops.ExecuteSQLOp" +) -> None: + operations.migration_context.impl.execute( + operation.sqltext, execution_options=operation.execution_options + ) diff --git a/venv/lib/python3.12/site-packages/alembic/py.typed b/venv/lib/python3.12/site-packages/alembic/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/alembic/runtime/__init__.py b/venv/lib/python3.12/site-packages/alembic/runtime/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..0ff2a0c Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc new file mode 100644 index 0000000..ecb6237 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc new file mode 100644 index 0000000..93f4e1e Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/runtime/environment.py b/venv/lib/python3.12/site-packages/alembic/runtime/environment.py new file mode 100644 index 0000000..d64b2ad --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/runtime/environment.py @@ -0,0 +1,1053 @@ +from __future__ import annotations + +from typing import Any +from typing import Callable +from typing import Collection +from typing import ContextManager +from typing import Dict +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import Optional +from typing import overload +from typing import Sequence +from typing import TextIO +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy.sql.schema import Column +from sqlalchemy.sql.schema import FetchedValue +from typing_extensions import Literal + +from .migration import _ProxyTransaction +from .migration import MigrationContext +from .. import util +from ..operations import Operations +from ..script.revision import _GetRevArg + +if TYPE_CHECKING: + from sqlalchemy.engine import URL + from sqlalchemy.engine.base import Connection + from sqlalchemy.sql import Executable + from sqlalchemy.sql.schema import MetaData + from sqlalchemy.sql.schema import SchemaItem + from sqlalchemy.sql.type_api import TypeEngine + + from .migration import MigrationInfo + from ..autogenerate.api import AutogenContext + from ..config import Config + from ..ddl import DefaultImpl + from ..operations.ops import MigrationScript + from ..script.base import ScriptDirectory + +_RevNumber = Optional[Union[str, Tuple[str, ...]]] + +ProcessRevisionDirectiveFn = Callable[ + [MigrationContext, _GetRevArg, List["MigrationScript"]], None +] + +RenderItemFn = Callable[ + [str, Any, "AutogenContext"], Union[str, Literal[False]] +] + +NameFilterType = Literal[ + "schema", + "table", + "column", + "index", + "unique_constraint", + "foreign_key_constraint", +] +NameFilterParentNames = MutableMapping[ + Literal["schema_name", "table_name", "schema_qualified_table_name"], + Optional[str], +] +IncludeNameFn = Callable[ + [Optional[str], NameFilterType, NameFilterParentNames], bool +] + +IncludeObjectFn = Callable[ + [ + "SchemaItem", + Optional[str], + NameFilterType, + bool, + Optional["SchemaItem"], + ], + bool, +] + +OnVersionApplyFn = Callable[ + [MigrationContext, "MigrationInfo", Collection[Any], Mapping[str, Any]], + None, +] + +CompareServerDefault = Callable[ + [ + MigrationContext, + "Column[Any]", + "Column[Any]", + Optional[str], + Optional[FetchedValue], + Optional[str], + ], + Optional[bool], +] + +CompareType = Callable[ + [ + MigrationContext, + "Column[Any]", + "Column[Any]", + "TypeEngine[Any]", + "TypeEngine[Any]", + ], + Optional[bool], +] + + +class EnvironmentContext(util.ModuleClsProxy): + + """A configurational facade made available in an ``env.py`` script. + + The :class:`.EnvironmentContext` acts as a *facade* to the more + nuts-and-bolts objects of :class:`.MigrationContext` as well as certain + aspects of :class:`.Config`, + within the context of the ``env.py`` script that is invoked by + most Alembic commands. + + :class:`.EnvironmentContext` is normally instantiated + when a command in :mod:`alembic.command` is run. It then makes + itself available in the ``alembic.context`` module for the scope + of the command. From within an ``env.py`` script, the current + :class:`.EnvironmentContext` is available by importing this module. + + :class:`.EnvironmentContext` also supports programmatic usage. + At this level, it acts as a Python context manager, that is, is + intended to be used using the + ``with:`` statement. A typical use of :class:`.EnvironmentContext`:: + + from alembic.config import Config + from alembic.script import ScriptDirectory + + config = Config() + config.set_main_option("script_location", "myapp:migrations") + script = ScriptDirectory.from_config(config) + + + def my_function(rev, context): + '''do something with revision "rev", which + will be the current database revision, + and "context", which is the MigrationContext + that the env.py will create''' + + + with EnvironmentContext( + config, + script, + fn=my_function, + as_sql=False, + starting_rev="base", + destination_rev="head", + tag="sometag", + ): + script.run_env() + + The above script will invoke the ``env.py`` script + within the migration environment. If and when ``env.py`` + calls :meth:`.MigrationContext.run_migrations`, the + ``my_function()`` function above will be called + by the :class:`.MigrationContext`, given the context + itself as well as the current revision in the database. + + .. note:: + + For most API usages other than full blown + invocation of migration scripts, the :class:`.MigrationContext` + and :class:`.ScriptDirectory` objects can be created and + used directly. The :class:`.EnvironmentContext` object + is *only* needed when you need to actually invoke the + ``env.py`` module present in the migration environment. + + """ + + _migration_context: Optional[MigrationContext] = None + + config: Config = None # type:ignore[assignment] + """An instance of :class:`.Config` representing the + configuration file contents as well as other variables + set programmatically within it.""" + + script: ScriptDirectory = None # type:ignore[assignment] + """An instance of :class:`.ScriptDirectory` which provides + programmatic access to version files within the ``versions/`` + directory. + + """ + + def __init__( + self, config: Config, script: ScriptDirectory, **kw: Any + ) -> None: + r"""Construct a new :class:`.EnvironmentContext`. + + :param config: a :class:`.Config` instance. + :param script: a :class:`.ScriptDirectory` instance. + :param \**kw: keyword options that will be ultimately + passed along to the :class:`.MigrationContext` when + :meth:`.EnvironmentContext.configure` is called. + + """ + self.config = config + self.script = script + self.context_opts = kw + + def __enter__(self) -> EnvironmentContext: + """Establish a context which provides a + :class:`.EnvironmentContext` object to + env.py scripts. + + The :class:`.EnvironmentContext` will + be made available as ``from alembic import context``. + + """ + self._install_proxy() + return self + + def __exit__(self, *arg: Any, **kw: Any) -> None: + self._remove_proxy() + + def is_offline_mode(self) -> bool: + """Return True if the current migrations environment + is running in "offline mode". + + This is ``True`` or ``False`` depending + on the ``--sql`` flag passed. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + return self.context_opts.get("as_sql", False) # type: ignore[no-any-return] # noqa: E501 + + def is_transactional_ddl(self) -> bool: + """Return True if the context is configured to expect a + transactional DDL capable backend. + + This defaults to the type of database in use, and + can be overridden by the ``transactional_ddl`` argument + to :meth:`.configure` + + This function requires that a :class:`.MigrationContext` + has first been made available via :meth:`.configure`. + + """ + return self.get_context().impl.transactional_ddl + + def requires_connection(self) -> bool: + return not self.is_offline_mode() + + def get_head_revision(self) -> _RevNumber: + """Return the hex identifier of the 'head' script revision. + + If the script directory has multiple heads, this + method raises a :class:`.CommandError`; + :meth:`.EnvironmentContext.get_head_revisions` should be preferred. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: :meth:`.EnvironmentContext.get_head_revisions` + + """ + return self.script.as_revision_number("head") + + def get_head_revisions(self) -> _RevNumber: + """Return the hex identifier of the 'heads' script revision(s). + + This returns a tuple containing the version number of all + heads in the script directory. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + return self.script.as_revision_number("heads") + + def get_starting_revision_argument(self) -> _RevNumber: + """Return the 'starting revision' argument, + if the revision was passed using ``start:end``. + + This is only meaningful in "offline" mode. + Returns ``None`` if no value is available + or was configured. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + if self._migration_context is not None: + return self.script.as_revision_number( + self.get_context()._start_from_rev + ) + elif "starting_rev" in self.context_opts: + return self.script.as_revision_number( + self.context_opts["starting_rev"] + ) + else: + # this should raise only in the case that a command + # is being run where the "starting rev" is never applicable; + # this is to catch scripts which rely upon this in + # non-sql mode or similar + raise util.CommandError( + "No starting revision argument is available." + ) + + def get_revision_argument(self) -> _RevNumber: + """Get the 'destination' revision argument. + + This is typically the argument passed to the + ``upgrade`` or ``downgrade`` command. + + If it was specified as ``head``, the actual + version number is returned; if specified + as ``base``, ``None`` is returned. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + return self.script.as_revision_number( + self.context_opts["destination_rev"] + ) + + def get_tag_argument(self) -> Optional[str]: + """Return the value passed for the ``--tag`` argument, if any. + + The ``--tag`` argument is not used directly by Alembic, + but is available for custom ``env.py`` configurations that + wish to use it; particularly for offline generation scripts + that wish to generate tagged filenames. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: + + :meth:`.EnvironmentContext.get_x_argument` - a newer and more + open ended system of extending ``env.py`` scripts via the command + line. + + """ + return self.context_opts.get("tag", None) # type: ignore[no-any-return] # noqa: E501 + + @overload + def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]: + ... + + @overload + def get_x_argument(self, as_dictionary: Literal[True]) -> Dict[str, str]: + ... + + @overload + def get_x_argument( + self, as_dictionary: bool = ... + ) -> Union[List[str], Dict[str, str]]: + ... + + def get_x_argument( + self, as_dictionary: bool = False + ) -> Union[List[str], Dict[str, str]]: + """Return the value(s) passed for the ``-x`` argument, if any. + + The ``-x`` argument is an open ended flag that allows any user-defined + value or values to be passed on the command line, then available + here for consumption by a custom ``env.py`` script. + + The return value is a list, returned directly from the ``argparse`` + structure. If ``as_dictionary=True`` is passed, the ``x`` arguments + are parsed using ``key=value`` format into a dictionary that is + then returned. If there is no ``=`` in the argument, value is an empty + string. + + .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when + arguments are passed without the ``=`` symbol. + + For example, to support passing a database URL on the command line, + the standard ``env.py`` script can be modified like this:: + + cmd_line_url = context.get_x_argument( + as_dictionary=True).get('dbname') + if cmd_line_url: + engine = create_engine(cmd_line_url) + else: + engine = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + This then takes effect by running the ``alembic`` script as:: + + alembic -x dbname=postgresql://user:pass@host/dbname upgrade head + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: + + :meth:`.EnvironmentContext.get_tag_argument` + + :attr:`.Config.cmd_opts` + + """ + if self.config.cmd_opts is not None: + value = self.config.cmd_opts.x or [] + else: + value = [] + if as_dictionary: + dict_value = {} + for arg in value: + x_key, _, x_value = arg.partition("=") + dict_value[x_key] = x_value + value = dict_value + + return value + + def configure( + self, + connection: Optional[Connection] = None, + url: Optional[Union[str, URL]] = None, + dialect_name: Optional[str] = None, + dialect_opts: Optional[Dict[str, Any]] = None, + transactional_ddl: Optional[bool] = None, + transaction_per_migration: bool = False, + output_buffer: Optional[TextIO] = None, + starting_rev: Optional[str] = None, + tag: Optional[str] = None, + template_args: Optional[Dict[str, Any]] = None, + render_as_batch: bool = False, + target_metadata: Union[MetaData, Sequence[MetaData], None] = None, + include_name: Optional[IncludeNameFn] = None, + include_object: Optional[IncludeObjectFn] = None, + include_schemas: bool = False, + process_revision_directives: Optional[ + ProcessRevisionDirectiveFn + ] = None, + compare_type: Union[bool, CompareType] = True, + compare_server_default: Union[bool, CompareServerDefault] = False, + render_item: Optional[RenderItemFn] = None, + literal_binds: bool = False, + upgrade_token: str = "upgrades", + downgrade_token: str = "downgrades", + alembic_module_prefix: str = "op.", + sqlalchemy_module_prefix: str = "sa.", + user_module_prefix: Optional[str] = None, + on_version_apply: Optional[OnVersionApplyFn] = None, + **kw: Any, + ) -> None: + """Configure a :class:`.MigrationContext` within this + :class:`.EnvironmentContext` which will provide database + connectivity and other configuration to a series of + migration scripts. + + Many methods on :class:`.EnvironmentContext` require that + this method has been called in order to function, as they + ultimately need to have database access or at least access + to the dialect in use. Those which do are documented as such. + + The important thing needed by :meth:`.configure` is a + means to determine what kind of database dialect is in use. + An actual connection to that database is needed only if + the :class:`.MigrationContext` is to be used in + "online" mode. + + If the :meth:`.is_offline_mode` function returns ``True``, + then no connection is needed here. Otherwise, the + ``connection`` parameter should be present as an + instance of :class:`sqlalchemy.engine.Connection`. + + This function is typically called from the ``env.py`` + script within a migration environment. It can be called + multiple times for an invocation. The most recent + :class:`~sqlalchemy.engine.Connection` + for which it was called is the one that will be operated upon + by the next call to :meth:`.run_migrations`. + + General parameters: + + :param connection: a :class:`~sqlalchemy.engine.Connection` + to use + for SQL execution in "online" mode. When present, is also + used to determine the type of dialect in use. + :param url: a string database url, or a + :class:`sqlalchemy.engine.url.URL` object. + The type of dialect to be used will be derived from this if + ``connection`` is not passed. + :param dialect_name: string name of a dialect, such as + "postgresql", "mssql", etc. + The type of dialect to be used will be derived from this if + ``connection`` and ``url`` are not passed. + :param dialect_opts: dictionary of options to be passed to dialect + constructor. + :param transactional_ddl: Force the usage of "transactional" + DDL on or off; + this otherwise defaults to whether or not the dialect in + use supports it. + :param transaction_per_migration: if True, nest each migration script + in a transaction rather than the full series of migrations to + run. + :param output_buffer: a file-like object that will be used + for textual output + when the ``--sql`` option is used to generate SQL scripts. + Defaults to + ``sys.stdout`` if not passed here and also not present on + the :class:`.Config` + object. The value here overrides that of the :class:`.Config` + object. + :param output_encoding: when using ``--sql`` to generate SQL + scripts, apply this encoding to the string output. + :param literal_binds: when using ``--sql`` to generate SQL + scripts, pass through the ``literal_binds`` flag to the compiler + so that any literal values that would ordinarily be bound + parameters are converted to plain strings. + + .. warning:: Dialects can typically only handle simple datatypes + like strings and numbers for auto-literal generation. Datatypes + like dates, intervals, and others may still require manual + formatting, typically using :meth:`.Operations.inline_literal`. + + .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy + versions prior to 0.8 where this feature is not supported. + + .. seealso:: + + :meth:`.Operations.inline_literal` + + :param starting_rev: Override the "starting revision" argument + when using ``--sql`` mode. + :param tag: a string tag for usage by custom ``env.py`` scripts. + Set via the ``--tag`` option, can be overridden here. + :param template_args: dictionary of template arguments which + will be added to the template argument environment when + running the "revision" command. Note that the script environment + is only run within the "revision" command if the --autogenerate + option is used, or if the option "revision_environment=true" + is present in the alembic.ini file. + + :param version_table: The name of the Alembic version table. + The default is ``'alembic_version'``. + :param version_table_schema: Optional schema to place version + table within. + :param version_table_pk: boolean, whether the Alembic version table + should use a primary key constraint for the "value" column; this + only takes effect when the table is first created. + Defaults to True; setting to False should not be necessary and is + here for backwards compatibility reasons. + :param on_version_apply: a callable or collection of callables to be + run for each migration step. + The callables will be run in the order they are given, once for + each migration step, after the respective operation has been + applied but before its transaction is finalized. + Each callable accepts no positional arguments and the following + keyword arguments: + + * ``ctx``: the :class:`.MigrationContext` running the migration, + * ``step``: a :class:`.MigrationInfo` representing the + step currently being applied, + * ``heads``: a collection of version strings representing the + current heads, + * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`. + + Parameters specific to the autogenerate feature, when + ``alembic revision`` is run with the ``--autogenerate`` feature: + + :param target_metadata: a :class:`sqlalchemy.schema.MetaData` + object, or a sequence of :class:`~sqlalchemy.schema.MetaData` + objects, that will be consulted during autogeneration. + The tables present in each :class:`~sqlalchemy.schema.MetaData` + will be compared against + what is locally available on the target + :class:`~sqlalchemy.engine.Connection` + to produce candidate upgrade/downgrade operations. + :param compare_type: Indicates type comparison behavior during + an autogenerate + operation. Defaults to ``True`` turning on type comparison, which + has good accuracy on most backends. See :ref:`compare_types` + for an example as well as information on other type + comparison options. Set to ``False`` which disables type + comparison. A callable can also be passed to provide custom type + comparison, see :ref:`compare_types` for additional details. + + .. versionchanged:: 1.12.0 The default value of + :paramref:`.EnvironmentContext.configure.compare_type` has been + changed to ``True``. + + .. seealso:: + + :ref:`compare_types` + + :paramref:`.EnvironmentContext.configure.compare_server_default` + + :param compare_server_default: Indicates server default comparison + behavior during + an autogenerate operation. Defaults to ``False`` which disables + server default + comparison. Set to ``True`` to turn on server default comparison, + which has + varied accuracy depending on backend. + + To customize server default comparison behavior, a callable may + be specified + which can filter server default comparisons during an + autogenerate operation. + defaults during an autogenerate operation. The format of this + callable is:: + + def my_compare_server_default(context, inspected_column, + metadata_column, inspected_default, metadata_default, + rendered_metadata_default): + # return True if the defaults are different, + # False if not, or None to allow the default implementation + # to compare these defaults + return None + + context.configure( + # ... + compare_server_default = my_compare_server_default + ) + + ``inspected_column`` is a dictionary structure as returned by + :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas + ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from + the local model environment. + + A return value of ``None`` indicates to allow default server default + comparison + to proceed. Note that some backends such as Postgresql actually + execute + the two defaults on the database side to compare for equivalence. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.compare_type` + + :param include_name: A callable function which is given + the chance to return ``True`` or ``False`` for any database reflected + object based on its name, including database schema names when + the :paramref:`.EnvironmentContext.configure.include_schemas` flag + is set to ``True``. + + The function accepts the following positional arguments: + + * ``name``: the name of the object, such as schema name or table name. + Will be ``None`` when indicating the default schema name of the + database connection. + * ``type``: a string describing the type of object; currently + ``"schema"``, ``"table"``, ``"column"``, ``"index"``, + ``"unique_constraint"``, or ``"foreign_key_constraint"`` + * ``parent_names``: a dictionary of "parent" object names, that are + relative to the name being given. Keys in this dictionary may + include: ``"schema_name"``, ``"table_name"`` or + ``"schema_qualified_table_name"``. + + E.g.:: + + def include_name(name, type_, parent_names): + if type_ == "schema": + return name in ["schema_one", "schema_two"] + else: + return True + + context.configure( + # ... + include_schemas = True, + include_name = include_name + ) + + .. seealso:: + + :ref:`autogenerate_include_hooks` + + :paramref:`.EnvironmentContext.configure.include_object` + + :paramref:`.EnvironmentContext.configure.include_schemas` + + + :param include_object: A callable function which is given + the chance to return ``True`` or ``False`` for any object, + indicating if the given object should be considered in the + autogenerate sweep. + + The function accepts the following positional arguments: + + * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such + as a :class:`~sqlalchemy.schema.Table`, + :class:`~sqlalchemy.schema.Column`, + :class:`~sqlalchemy.schema.Index` + :class:`~sqlalchemy.schema.UniqueConstraint`, + or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object + * ``name``: the name of the object. This is typically available + via ``object.name``. + * ``type``: a string describing the type of object; currently + ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``, + or ``"foreign_key_constraint"`` + * ``reflected``: ``True`` if the given object was produced based on + table reflection, ``False`` if it's from a local :class:`.MetaData` + object. + * ``compare_to``: the object being compared against, if available, + else ``None``. + + E.g.:: + + def include_object(object, name, type_, reflected, compare_to): + if (type_ == "column" and + not reflected and + object.info.get("skip_autogenerate", False)): + return False + else: + return True + + context.configure( + # ... + include_object = include_object + ) + + For the use case of omitting specific schemas from a target database + when :paramref:`.EnvironmentContext.configure.include_schemas` is + set to ``True``, the :attr:`~sqlalchemy.schema.Table.schema` + attribute can be checked for each :class:`~sqlalchemy.schema.Table` + object passed to the hook, however it is much more efficient + to filter on schemas before reflection of objects takes place + using the :paramref:`.EnvironmentContext.configure.include_name` + hook. + + .. seealso:: + + :ref:`autogenerate_include_hooks` + + :paramref:`.EnvironmentContext.configure.include_name` + + :paramref:`.EnvironmentContext.configure.include_schemas` + + :param render_as_batch: if True, commands which alter elements + within a table will be placed under a ``with batch_alter_table():`` + directive, so that batch migrations will take place. + + .. seealso:: + + :ref:`batch_migrations` + + :param include_schemas: If True, autogenerate will scan across + all schemas located by the SQLAlchemy + :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names` + method, and include all differences in tables found across all + those schemas. When using this option, you may want to also + use the :paramref:`.EnvironmentContext.configure.include_name` + parameter to specify a callable which + can filter the tables/schemas that get included. + + .. seealso:: + + :ref:`autogenerate_include_hooks` + + :paramref:`.EnvironmentContext.configure.include_name` + + :paramref:`.EnvironmentContext.configure.include_object` + + :param render_item: Callable that can be used to override how + any schema item, i.e. column, constraint, type, + etc., is rendered for autogenerate. The callable receives a + string describing the type of object, the object, and + the autogen context. If it returns False, the + default rendering method will be used. If it returns None, + the item will not be rendered in the context of a Table + construct, that is, can be used to skip columns or constraints + within op.create_table():: + + def my_render_column(type_, col, autogen_context): + if type_ == "column" and isinstance(col, MySpecialCol): + return repr(col) + else: + return False + + context.configure( + # ... + render_item = my_render_column + ) + + Available values for the type string include: ``"column"``, + ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``, + ``"type"``, ``"server_default"``. + + .. seealso:: + + :ref:`autogen_render_types` + + :param upgrade_token: When autogenerate completes, the text of the + candidate upgrade operations will be present in this template + variable when ``script.py.mako`` is rendered. Defaults to + ``upgrades``. + :param downgrade_token: When autogenerate completes, the text of the + candidate downgrade operations will be present in this + template variable when ``script.py.mako`` is rendered. Defaults to + ``downgrades``. + + :param alembic_module_prefix: When autogenerate refers to Alembic + :mod:`alembic.operations` constructs, this prefix will be used + (i.e. ``op.create_table``) Defaults to "``op.``". + Can be ``None`` to indicate no prefix. + + :param sqlalchemy_module_prefix: When autogenerate refers to + SQLAlchemy + :class:`~sqlalchemy.schema.Column` or type classes, this prefix + will be used + (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``". + Can be ``None`` to indicate no prefix. + Note that when dialect-specific types are rendered, autogenerate + will render them using the dialect module name, i.e. ``mssql.BIT()``, + ``postgresql.UUID()``. + + :param user_module_prefix: When autogenerate refers to a SQLAlchemy + type (e.g. :class:`.TypeEngine`) where the module name is not + under the ``sqlalchemy`` namespace, this prefix will be used + within autogenerate. If left at its default of + ``None``, the ``__module__`` attribute of the type is used to + render the import module. It's a good practice to set this + and to have all custom types be available from a fixed module space, + in order to future-proof migration files against reorganizations + in modules. + + .. seealso:: + + :ref:`autogen_module_prefix` + + :param process_revision_directives: a callable function that will + be passed a structure representing the end result of an autogenerate + or plain "revision" operation, which can be manipulated to affect + how the ``alembic revision`` command ultimately outputs new + revision scripts. The structure of the callable is:: + + def process_revision_directives(context, revision, directives): + pass + + The ``directives`` parameter is a Python list containing + a single :class:`.MigrationScript` directive, which represents + the revision file to be generated. This list as well as its + contents may be freely modified to produce any set of commands. + The section :ref:`customizing_revision` shows an example of + doing this. The ``context`` parameter is the + :class:`.MigrationContext` in use, + and ``revision`` is a tuple of revision identifiers representing the + current revision of the database. + + The callable is invoked at all times when the ``--autogenerate`` + option is passed to ``alembic revision``. If ``--autogenerate`` + is not passed, the callable is invoked only if the + ``revision_environment`` variable is set to True in the Alembic + configuration, in which case the given ``directives`` collection + will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps` + collections for ``.upgrade_ops`` and ``.downgrade_ops``. The + ``--autogenerate`` option itself can be inferred by inspecting + ``context.config.cmd_opts.autogenerate``. + + The callable function may optionally be an instance of + a :class:`.Rewriter` object. This is a helper object that + assists in the production of autogenerate-stream rewriter functions. + + .. seealso:: + + :ref:`customizing_revision` + + :ref:`autogen_rewriter` + + :paramref:`.command.revision.process_revision_directives` + + Parameters specific to individual backends: + + :param mssql_batch_separator: The "batch separator" which will + be placed between each statement when generating offline SQL Server + migrations. Defaults to ``GO``. Note this is in addition to the + customary semicolon ``;`` at the end of each statement; SQL Server + considers the "batch separator" to denote the end of an + individual statement execution, and cannot group certain + dependent operations in one step. + :param oracle_batch_separator: The "batch separator" which will + be placed between each statement when generating offline + Oracle migrations. Defaults to ``/``. Oracle doesn't add a + semicolon between statements like most other backends. + + """ + opts = self.context_opts + if transactional_ddl is not None: + opts["transactional_ddl"] = transactional_ddl + if output_buffer is not None: + opts["output_buffer"] = output_buffer + elif self.config.output_buffer is not None: + opts["output_buffer"] = self.config.output_buffer + if starting_rev: + opts["starting_rev"] = starting_rev + if tag: + opts["tag"] = tag + if template_args and "template_args" in opts: + opts["template_args"].update(template_args) + opts["transaction_per_migration"] = transaction_per_migration + opts["target_metadata"] = target_metadata + opts["include_name"] = include_name + opts["include_object"] = include_object + opts["include_schemas"] = include_schemas + opts["render_as_batch"] = render_as_batch + opts["upgrade_token"] = upgrade_token + opts["downgrade_token"] = downgrade_token + opts["sqlalchemy_module_prefix"] = sqlalchemy_module_prefix + opts["alembic_module_prefix"] = alembic_module_prefix + opts["user_module_prefix"] = user_module_prefix + opts["literal_binds"] = literal_binds + opts["process_revision_directives"] = process_revision_directives + opts["on_version_apply"] = util.to_tuple(on_version_apply, default=()) + + if render_item is not None: + opts["render_item"] = render_item + opts["compare_type"] = compare_type + if compare_server_default is not None: + opts["compare_server_default"] = compare_server_default + opts["script"] = self.script + + opts.update(kw) + + self._migration_context = MigrationContext.configure( + connection=connection, + url=url, + dialect_name=dialect_name, + environment_context=self, + dialect_opts=dialect_opts, + opts=opts, + ) + + def run_migrations(self, **kw: Any) -> None: + """Run migrations as determined by the current command line + configuration + as well as versioning information present (or not) in the current + database connection (if one is present). + + The function accepts optional ``**kw`` arguments. If these are + passed, they are sent directly to the ``upgrade()`` and + ``downgrade()`` + functions within each target revision file. By modifying the + ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` + functions accept arguments, parameters can be passed here so that + contextual information, usually information to identify a particular + database in use, can be passed from a custom ``env.py`` script + to the migration functions. + + This function requires that a :class:`.MigrationContext` has + first been made available via :meth:`.configure`. + + """ + assert self._migration_context is not None + with Operations.context(self._migration_context): + self.get_context().run_migrations(**kw) + + def execute( + self, + sql: Union[Executable, str], + execution_options: Optional[Dict[str, Any]] = None, + ) -> None: + """Execute the given SQL using the current change context. + + The behavior of :meth:`.execute` is the same + as that of :meth:`.Operations.execute`. Please see that + function's documentation for full detail including + caveats and limitations. + + This function requires that a :class:`.MigrationContext` has + first been made available via :meth:`.configure`. + + """ + self.get_context().execute(sql, execution_options=execution_options) + + def static_output(self, text: str) -> None: + """Emit text directly to the "offline" SQL stream. + + Typically this is for emitting comments that + start with --. The statement is not treated + as a SQL execution, no ; or batch separator + is added, etc. + + """ + self.get_context().impl.static_output(text) + + def begin_transaction( + self, + ) -> Union[_ProxyTransaction, ContextManager[None]]: + """Return a context manager that will + enclose an operation within a "transaction", + as defined by the environment's offline + and transactional DDL settings. + + e.g.:: + + with context.begin_transaction(): + context.run_migrations() + + :meth:`.begin_transaction` is intended to + "do the right thing" regardless of + calling context: + + * If :meth:`.is_transactional_ddl` is ``False``, + returns a "do nothing" context manager + which otherwise produces no transactional + state or directives. + * If :meth:`.is_offline_mode` is ``True``, + returns a context manager that will + invoke the :meth:`.DefaultImpl.emit_begin` + and :meth:`.DefaultImpl.emit_commit` + methods, which will produce the string + directives ``BEGIN`` and ``COMMIT`` on + the output stream, as rendered by the + target backend (e.g. SQL Server would + emit ``BEGIN TRANSACTION``). + * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin` + on the current online connection, which + returns a :class:`sqlalchemy.engine.Transaction` + object. This object demarcates a real + transaction and is itself a context manager, + which will roll back if an exception + is raised. + + Note that a custom ``env.py`` script which + has more specific transactional needs can of course + manipulate the :class:`~sqlalchemy.engine.Connection` + directly to produce transactional state in "online" + mode. + + """ + + return self.get_context().begin_transaction() + + def get_context(self) -> MigrationContext: + """Return the current :class:`.MigrationContext` object. + + If :meth:`.EnvironmentContext.configure` has not been + called yet, raises an exception. + + """ + + if self._migration_context is None: + raise Exception("No context has been configured yet.") + return self._migration_context + + def get_bind(self) -> Connection: + """Return the current 'bind'. + + In "online" mode, this is the + :class:`sqlalchemy.engine.Connection` currently being used + to emit SQL to the database. + + This function requires that a :class:`.MigrationContext` + has first been made available via :meth:`.configure`. + + """ + return self.get_context().bind # type: ignore[return-value] + + def get_impl(self) -> DefaultImpl: + return self.get_context().impl diff --git a/venv/lib/python3.12/site-packages/alembic/runtime/migration.py b/venv/lib/python3.12/site-packages/alembic/runtime/migration.py new file mode 100644 index 0000000..95c69bc --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/runtime/migration.py @@ -0,0 +1,1396 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +from contextlib import contextmanager +from contextlib import nullcontext +import logging +import sys +from typing import Any +from typing import Callable +from typing import cast +from typing import Collection +from typing import ContextManager +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import Column +from sqlalchemy import literal_column +from sqlalchemy import MetaData +from sqlalchemy import PrimaryKeyConstraint +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy.engine import Engine +from sqlalchemy.engine import url as sqla_url +from sqlalchemy.engine.strategies import MockEngineStrategy + +from .. import ddl +from .. import util +from ..util import sqla_compat +from ..util.compat import EncodedIO + +if TYPE_CHECKING: + from sqlalchemy.engine import Dialect + from sqlalchemy.engine import URL + from sqlalchemy.engine.base import Connection + from sqlalchemy.engine.base import Transaction + from sqlalchemy.engine.mock import MockConnection + from sqlalchemy.sql import Executable + + from .environment import EnvironmentContext + from ..config import Config + from ..script.base import Script + from ..script.base import ScriptDirectory + from ..script.revision import _RevisionOrBase + from ..script.revision import Revision + from ..script.revision import RevisionMap + +log = logging.getLogger(__name__) + + +class _ProxyTransaction: + def __init__(self, migration_context: MigrationContext) -> None: + self.migration_context = migration_context + + @property + def _proxied_transaction(self) -> Optional[Transaction]: + return self.migration_context._transaction + + def rollback(self) -> None: + t = self._proxied_transaction + assert t is not None + t.rollback() + self.migration_context._transaction = None + + def commit(self) -> None: + t = self._proxied_transaction + assert t is not None + t.commit() + self.migration_context._transaction = None + + def __enter__(self) -> _ProxyTransaction: + return self + + def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: + if self._proxied_transaction is not None: + self._proxied_transaction.__exit__(type_, value, traceback) + self.migration_context._transaction = None + + +class MigrationContext: + + """Represent the database state made available to a migration + script. + + :class:`.MigrationContext` is the front end to an actual + database connection, or alternatively a string output + stream given a particular database dialect, + from an Alembic perspective. + + When inside the ``env.py`` script, the :class:`.MigrationContext` + is available via the + :meth:`.EnvironmentContext.get_context` method, + which is available at ``alembic.context``:: + + # from within env.py script + from alembic import context + + migration_context = context.get_context() + + For usage outside of an ``env.py`` script, such as for + utility routines that want to check the current version + in the database, the :meth:`.MigrationContext.configure` + method to create new :class:`.MigrationContext` objects. + For example, to get at the current revision in the + database using :meth:`.MigrationContext.get_current_revision`:: + + # in any application, outside of an env.py script + from alembic.migration import MigrationContext + from sqlalchemy import create_engine + + engine = create_engine("postgresql://mydatabase") + conn = engine.connect() + + context = MigrationContext.configure(conn) + current_rev = context.get_current_revision() + + The above context can also be used to produce + Alembic migration operations with an :class:`.Operations` + instance:: + + # in any application, outside of the normal Alembic environment + from alembic.operations import Operations + + op = Operations(context) + op.alter_column("mytable", "somecolumn", nullable=True) + + """ + + def __init__( + self, + dialect: Dialect, + connection: Optional[Connection], + opts: Dict[str, Any], + environment_context: Optional[EnvironmentContext] = None, + ) -> None: + self.environment_context = environment_context + self.opts = opts + self.dialect = dialect + self.script: Optional[ScriptDirectory] = opts.get("script") + as_sql: bool = opts.get("as_sql", False) + transactional_ddl = opts.get("transactional_ddl") + self._transaction_per_migration = opts.get( + "transaction_per_migration", False + ) + self.on_version_apply_callbacks = opts.get("on_version_apply", ()) + self._transaction: Optional[Transaction] = None + + if as_sql: + self.connection = cast( + Optional["Connection"], self._stdout_connection(connection) + ) + assert self.connection is not None + self._in_external_transaction = False + else: + self.connection = connection + self._in_external_transaction = ( + sqla_compat._get_connection_in_transaction(connection) + ) + + self._migrations_fn: Optional[ + Callable[..., Iterable[RevisionStep]] + ] = opts.get("fn") + self.as_sql = as_sql + + self.purge = opts.get("purge", False) + + if "output_encoding" in opts: + self.output_buffer = EncodedIO( + opts.get("output_buffer") + or sys.stdout, # type:ignore[arg-type] + opts["output_encoding"], + ) + else: + self.output_buffer = opts.get("output_buffer", sys.stdout) + + self._user_compare_type = opts.get("compare_type", True) + self._user_compare_server_default = opts.get( + "compare_server_default", False + ) + self.version_table = version_table = opts.get( + "version_table", "alembic_version" + ) + self.version_table_schema = version_table_schema = opts.get( + "version_table_schema", None + ) + self._version = Table( + version_table, + MetaData(), + Column("version_num", String(32), nullable=False), + schema=version_table_schema, + ) + if opts.get("version_table_pk", True): + self._version.append_constraint( + PrimaryKeyConstraint( + "version_num", name="%s_pkc" % version_table + ) + ) + + self._start_from_rev: Optional[str] = opts.get("starting_rev") + self.impl = ddl.DefaultImpl.get_by_dialect(dialect)( + dialect, + self.connection, + self.as_sql, + transactional_ddl, + self.output_buffer, + opts, + ) + log.info("Context impl %s.", self.impl.__class__.__name__) + if self.as_sql: + log.info("Generating static SQL") + log.info( + "Will assume %s DDL.", + "transactional" + if self.impl.transactional_ddl + else "non-transactional", + ) + + @classmethod + def configure( + cls, + connection: Optional[Connection] = None, + url: Optional[Union[str, URL]] = None, + dialect_name: Optional[str] = None, + dialect: Optional[Dialect] = None, + environment_context: Optional[EnvironmentContext] = None, + dialect_opts: Optional[Dict[str, str]] = None, + opts: Optional[Any] = None, + ) -> MigrationContext: + """Create a new :class:`.MigrationContext`. + + This is a factory method usually called + by :meth:`.EnvironmentContext.configure`. + + :param connection: a :class:`~sqlalchemy.engine.Connection` + to use for SQL execution in "online" mode. When present, + is also used to determine the type of dialect in use. + :param url: a string database url, or a + :class:`sqlalchemy.engine.url.URL` object. + The type of dialect to be used will be derived from this if + ``connection`` is not passed. + :param dialect_name: string name of a dialect, such as + "postgresql", "mssql", etc. The type of dialect to be used will be + derived from this if ``connection`` and ``url`` are not passed. + :param opts: dictionary of options. Most other options + accepted by :meth:`.EnvironmentContext.configure` are passed via + this dictionary. + + """ + if opts is None: + opts = {} + if dialect_opts is None: + dialect_opts = {} + + if connection: + if isinstance(connection, Engine): + raise util.CommandError( + "'connection' argument to configure() is expected " + "to be a sqlalchemy.engine.Connection instance, " + "got %r" % connection, + ) + + dialect = connection.dialect + elif url: + url_obj = sqla_url.make_url(url) + dialect = url_obj.get_dialect()(**dialect_opts) + elif dialect_name: + url_obj = sqla_url.make_url("%s://" % dialect_name) + dialect = url_obj.get_dialect()(**dialect_opts) + elif not dialect: + raise Exception("Connection, url, or dialect_name is required.") + assert dialect is not None + return MigrationContext(dialect, connection, opts, environment_context) + + @contextmanager + def autocommit_block(self) -> Iterator[None]: + """Enter an "autocommit" block, for databases that support AUTOCOMMIT + isolation levels. + + This special directive is intended to support the occasional database + DDL or system operation that specifically has to be run outside of + any kind of transaction block. The PostgreSQL database platform + is the most common target for this style of operation, as many + of its DDL operations must be run outside of transaction blocks, even + though the database overall supports transactional DDL. + + The method is used as a context manager within a migration script, by + calling on :meth:`.Operations.get_context` to retrieve the + :class:`.MigrationContext`, then invoking + :meth:`.MigrationContext.autocommit_block` using the ``with:`` + statement:: + + def upgrade(): + with op.get_context().autocommit_block(): + op.execute("ALTER TYPE mood ADD VALUE 'soso'") + + Above, a PostgreSQL "ALTER TYPE..ADD VALUE" directive is emitted, + which must be run outside of a transaction block at the database level. + The :meth:`.MigrationContext.autocommit_block` method makes use of the + SQLAlchemy ``AUTOCOMMIT`` isolation level setting, which against the + psycogp2 DBAPI corresponds to the ``connection.autocommit`` setting, + to ensure that the database driver is not inside of a DBAPI level + transaction block. + + .. warning:: + + As is necessary, **the database transaction preceding the block is + unconditionally committed**. This means that the run of migrations + preceding the operation will be committed, before the overall + migration operation is complete. + + It is recommended that when an application includes migrations with + "autocommit" blocks, that + :paramref:`.EnvironmentContext.transaction_per_migration` be used + so that the calling environment is tuned to expect short per-file + migrations whether or not one of them has an autocommit block. + + + """ + _in_connection_transaction = self._in_connection_transaction() + + if self.impl.transactional_ddl and self.as_sql: + self.impl.emit_commit() + + elif _in_connection_transaction: + assert self._transaction is not None + + self._transaction.commit() + self._transaction = None + + if not self.as_sql: + assert self.connection is not None + current_level = self.connection.get_isolation_level() + base_connection = self.connection + + # in 1.3 and 1.4 non-future mode, the connection gets switched + # out. we can use the base connection with the new mode + # except that it will not know it's in "autocommit" and will + # emit deprecation warnings when an autocommit action takes + # place. + self.connection = ( + self.impl.connection + ) = base_connection.execution_options(isolation_level="AUTOCOMMIT") + + # sqlalchemy future mode will "autobegin" in any case, so take + # control of that "transaction" here + fake_trans: Optional[Transaction] = self.connection.begin() + else: + fake_trans = None + try: + yield + finally: + if not self.as_sql: + assert self.connection is not None + if fake_trans is not None: + fake_trans.commit() + self.connection.execution_options( + isolation_level=current_level + ) + self.connection = self.impl.connection = base_connection + + if self.impl.transactional_ddl and self.as_sql: + self.impl.emit_begin() + + elif _in_connection_transaction: + assert self.connection is not None + self._transaction = self.connection.begin() + + def begin_transaction( + self, _per_migration: bool = False + ) -> Union[_ProxyTransaction, ContextManager[None]]: + """Begin a logical transaction for migration operations. + + This method is used within an ``env.py`` script to demarcate where + the outer "transaction" for a series of migrations begins. Example:: + + def run_migrations_online(): + connectable = create_engine(...) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + Above, :meth:`.MigrationContext.begin_transaction` is used to demarcate + where the outer logical transaction occurs around the + :meth:`.MigrationContext.run_migrations` operation. + + A "Logical" transaction means that the operation may or may not + correspond to a real database transaction. If the target database + supports transactional DDL (or + :paramref:`.EnvironmentContext.configure.transactional_ddl` is true), + the :paramref:`.EnvironmentContext.configure.transaction_per_migration` + flag is not set, and the migration is against a real database + connection (as opposed to using "offline" ``--sql`` mode), a real + transaction will be started. If ``--sql`` mode is in effect, the + operation would instead correspond to a string such as "BEGIN" being + emitted to the string output. + + The returned object is a Python context manager that should only be + used in the context of a ``with:`` statement as indicated above. + The object has no other guaranteed API features present. + + .. seealso:: + + :meth:`.MigrationContext.autocommit_block` + + """ + + if self._in_external_transaction: + return nullcontext() + + if self.impl.transactional_ddl: + transaction_now = _per_migration == self._transaction_per_migration + else: + transaction_now = _per_migration is True + + if not transaction_now: + return nullcontext() + + elif not self.impl.transactional_ddl: + assert _per_migration + + if self.as_sql: + return nullcontext() + else: + # track our own notion of a "transaction block", which must be + # committed when complete. Don't rely upon whether or not the + # SQLAlchemy connection reports as "in transaction"; this + # because SQLAlchemy future connection features autobegin + # behavior, so it may already be in a transaction from our + # emitting of queries like "has_version_table", etc. While we + # could track these operations as well, that leaves open the + # possibility of new operations or other things happening in + # the user environment that still may be triggering + # "autobegin". + + in_transaction = self._transaction is not None + + if in_transaction: + return nullcontext() + else: + assert self.connection is not None + self._transaction = ( + sqla_compat._safe_begin_connection_transaction( + self.connection + ) + ) + return _ProxyTransaction(self) + elif self.as_sql: + + @contextmanager + def begin_commit(): + self.impl.emit_begin() + yield + self.impl.emit_commit() + + return begin_commit() + else: + assert self.connection is not None + self._transaction = sqla_compat._safe_begin_connection_transaction( + self.connection + ) + return _ProxyTransaction(self) + + def get_current_revision(self) -> Optional[str]: + """Return the current revision, usually that which is present + in the ``alembic_version`` table in the database. + + This method intends to be used only for a migration stream that + does not contain unmerged branches in the target database; + if there are multiple branches present, an exception is raised. + The :meth:`.MigrationContext.get_current_heads` should be preferred + over this method going forward in order to be compatible with + branch migration support. + + If this :class:`.MigrationContext` was configured in "offline" + mode, that is with ``as_sql=True``, the ``starting_rev`` + parameter is returned instead, if any. + + """ + heads = self.get_current_heads() + if len(heads) == 0: + return None + elif len(heads) > 1: + raise util.CommandError( + "Version table '%s' has more than one head present; " + "please use get_current_heads()" % self.version_table + ) + else: + return heads[0] + + def get_current_heads(self) -> Tuple[str, ...]: + """Return a tuple of the current 'head versions' that are represented + in the target database. + + For a migration stream without branches, this will be a single + value, synonymous with that of + :meth:`.MigrationContext.get_current_revision`. However when multiple + unmerged branches exist within the target database, the returned tuple + will contain a value for each head. + + If this :class:`.MigrationContext` was configured in "offline" + mode, that is with ``as_sql=True``, the ``starting_rev`` + parameter is returned in a one-length tuple. + + If no version table is present, or if there are no revisions + present, an empty tuple is returned. + + """ + if self.as_sql: + start_from_rev: Any = self._start_from_rev + if start_from_rev == "base": + start_from_rev = None + elif start_from_rev is not None and self.script: + start_from_rev = [ + self.script.get_revision(sfr).revision + for sfr in util.to_list(start_from_rev) + if sfr not in (None, "base") + ] + return util.to_tuple(start_from_rev, default=()) + else: + if self._start_from_rev: + raise util.CommandError( + "Can't specify current_rev to context " + "when using a database connection" + ) + if not self._has_version_table(): + return () + assert self.connection is not None + return tuple( + row[0] for row in self.connection.execute(self._version.select()) + ) + + def _ensure_version_table(self, purge: bool = False) -> None: + with sqla_compat._ensure_scope_for_ddl(self.connection): + assert self.connection is not None + self._version.create(self.connection, checkfirst=True) + if purge: + assert self.connection is not None + self.connection.execute(self._version.delete()) + + def _has_version_table(self) -> bool: + assert self.connection is not None + return sqla_compat._connectable_has_table( + self.connection, self.version_table, self.version_table_schema + ) + + def stamp(self, script_directory: ScriptDirectory, revision: str) -> None: + """Stamp the version table with a specific revision. + + This method calculates those branches to which the given revision + can apply, and updates those branches as though they were migrated + towards that revision (either up or down). If no current branches + include the revision, it is added as a new branch head. + + """ + heads = self.get_current_heads() + if not self.as_sql and not heads: + self._ensure_version_table() + head_maintainer = HeadMaintainer(self, heads) + for step in script_directory._stamp_revs(revision, heads): + head_maintainer.update_to_step(step) + + def run_migrations(self, **kw: Any) -> None: + r"""Run the migration scripts established for this + :class:`.MigrationContext`, if any. + + The commands in :mod:`alembic.command` will set up a function + that is ultimately passed to the :class:`.MigrationContext` + as the ``fn`` argument. This function represents the "work" + that will be done when :meth:`.MigrationContext.run_migrations` + is called, typically from within the ``env.py`` script of the + migration environment. The "work function" then provides an iterable + of version callables and other version information which + in the case of the ``upgrade`` or ``downgrade`` commands are the + list of version scripts to invoke. Other commands yield nothing, + in the case that a command wants to run some other operation + against the database such as the ``current`` or ``stamp`` commands. + + :param \**kw: keyword arguments here will be passed to each + migration callable, that is the ``upgrade()`` or ``downgrade()`` + method within revision scripts. + + """ + self.impl.start_migrations() + + heads: Tuple[str, ...] + if self.purge: + if self.as_sql: + raise util.CommandError("Can't use --purge with --sql mode") + self._ensure_version_table(purge=True) + heads = () + else: + heads = self.get_current_heads() + + dont_mutate = self.opts.get("dont_mutate", False) + + if not self.as_sql and not heads and not dont_mutate: + self._ensure_version_table() + + head_maintainer = HeadMaintainer(self, heads) + + assert self._migrations_fn is not None + for step in self._migrations_fn(heads, self): + with self.begin_transaction(_per_migration=True): + if self.as_sql and not head_maintainer.heads: + # for offline mode, include a CREATE TABLE from + # the base + assert self.connection is not None + self._version.create(self.connection) + log.info("Running %s", step) + if self.as_sql: + self.impl.static_output( + "-- Running %s" % (step.short_log,) + ) + step.migration_fn(**kw) + + # previously, we wouldn't stamp per migration + # if we were in a transaction, however given the more + # complex model that involves any number of inserts + # and row-targeted updates and deletes, it's simpler for now + # just to run the operations on every version + head_maintainer.update_to_step(step) + for callback in self.on_version_apply_callbacks: + callback( + ctx=self, + step=step.info, + heads=set(head_maintainer.heads), + run_args=kw, + ) + + if self.as_sql and not head_maintainer.heads: + assert self.connection is not None + self._version.drop(self.connection) + + def _in_connection_transaction(self) -> bool: + try: + meth = self.connection.in_transaction # type:ignore[union-attr] + except AttributeError: + return False + else: + return meth() + + def execute( + self, + sql: Union[Executable, str], + execution_options: Optional[Dict[str, Any]] = None, + ) -> None: + """Execute a SQL construct or string statement. + + The underlying execution mechanics are used, that is + if this is "offline mode" the SQL is written to the + output buffer, otherwise the SQL is emitted on + the current SQLAlchemy connection. + + """ + self.impl._exec(sql, execution_options) + + def _stdout_connection( + self, connection: Optional[Connection] + ) -> MockConnection: + def dump(construct, *multiparams, **params): + self.impl._exec(construct) + + return MockEngineStrategy.MockConnection(self.dialect, dump) + + @property + def bind(self) -> Optional[Connection]: + """Return the current "bind". + + In online mode, this is an instance of + :class:`sqlalchemy.engine.Connection`, and is suitable + for ad-hoc execution of any kind of usage described + in SQLAlchemy Core documentation as well as + for usage with the :meth:`sqlalchemy.schema.Table.create` + and :meth:`sqlalchemy.schema.MetaData.create_all` methods + of :class:`~sqlalchemy.schema.Table`, + :class:`~sqlalchemy.schema.MetaData`. + + Note that when "standard output" mode is enabled, + this bind will be a "mock" connection handler that cannot + return results and is only appropriate for a very limited + subset of commands. + + """ + return self.connection + + @property + def config(self) -> Optional[Config]: + """Return the :class:`.Config` used by the current environment, + if any.""" + + if self.environment_context: + return self.environment_context.config + else: + return None + + def _compare_type( + self, inspector_column: Column[Any], metadata_column: Column + ) -> bool: + if self._user_compare_type is False: + return False + + if callable(self._user_compare_type): + user_value = self._user_compare_type( + self, + inspector_column, + metadata_column, + inspector_column.type, + metadata_column.type, + ) + if user_value is not None: + return user_value + + return self.impl.compare_type(inspector_column, metadata_column) + + def _compare_server_default( + self, + inspector_column: Column[Any], + metadata_column: Column[Any], + rendered_metadata_default: Optional[str], + rendered_column_default: Optional[str], + ) -> bool: + if self._user_compare_server_default is False: + return False + + if callable(self._user_compare_server_default): + user_value = self._user_compare_server_default( + self, + inspector_column, + metadata_column, + rendered_column_default, + metadata_column.server_default, + rendered_metadata_default, + ) + if user_value is not None: + return user_value + + return self.impl.compare_server_default( + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_column_default, + ) + + +class HeadMaintainer: + def __init__(self, context: MigrationContext, heads: Any) -> None: + self.context = context + self.heads = set(heads) + + def _insert_version(self, version: str) -> None: + assert version not in self.heads + self.heads.add(version) + + self.context.impl._exec( + self.context._version.insert().values( + version_num=literal_column("'%s'" % version) + ) + ) + + def _delete_version(self, version: str) -> None: + self.heads.remove(version) + + ret = self.context.impl._exec( + self.context._version.delete().where( + self.context._version.c.version_num + == literal_column("'%s'" % version) + ) + ) + + if ( + not self.context.as_sql + and self.context.dialect.supports_sane_rowcount + and ret is not None + and ret.rowcount != 1 + ): + raise util.CommandError( + "Online migration expected to match one " + "row when deleting '%s' in '%s'; " + "%d found" + % (version, self.context.version_table, ret.rowcount) + ) + + def _update_version(self, from_: str, to_: str) -> None: + assert to_ not in self.heads + self.heads.remove(from_) + self.heads.add(to_) + + ret = self.context.impl._exec( + self.context._version.update() + .values(version_num=literal_column("'%s'" % to_)) + .where( + self.context._version.c.version_num + == literal_column("'%s'" % from_) + ) + ) + + if ( + not self.context.as_sql + and self.context.dialect.supports_sane_rowcount + and ret is not None + and ret.rowcount != 1 + ): + raise util.CommandError( + "Online migration expected to match one " + "row when updating '%s' to '%s' in '%s'; " + "%d found" + % (from_, to_, self.context.version_table, ret.rowcount) + ) + + def update_to_step(self, step: Union[RevisionStep, StampStep]) -> None: + if step.should_delete_branch(self.heads): + vers = step.delete_version_num + log.debug("branch delete %s", vers) + self._delete_version(vers) + elif step.should_create_branch(self.heads): + vers = step.insert_version_num + log.debug("new branch insert %s", vers) + self._insert_version(vers) + elif step.should_merge_branches(self.heads): + # delete revs, update from rev, update to rev + ( + delete_revs, + update_from_rev, + update_to_rev, + ) = step.merge_branch_idents(self.heads) + log.debug( + "merge, delete %s, update %s to %s", + delete_revs, + update_from_rev, + update_to_rev, + ) + for delrev in delete_revs: + self._delete_version(delrev) + self._update_version(update_from_rev, update_to_rev) + elif step.should_unmerge_branches(self.heads): + ( + update_from_rev, + update_to_rev, + insert_revs, + ) = step.unmerge_branch_idents(self.heads) + log.debug( + "unmerge, insert %s, update %s to %s", + insert_revs, + update_from_rev, + update_to_rev, + ) + for insrev in insert_revs: + self._insert_version(insrev) + self._update_version(update_from_rev, update_to_rev) + else: + from_, to_ = step.update_version_num(self.heads) + log.debug("update %s to %s", from_, to_) + self._update_version(from_, to_) + + +class MigrationInfo: + """Exposes information about a migration step to a callback listener. + + The :class:`.MigrationInfo` object is available exclusively for the + benefit of the :paramref:`.EnvironmentContext.on_version_apply` + callback hook. + + """ + + is_upgrade: bool + """True/False: indicates whether this operation ascends or descends the + version tree.""" + + is_stamp: bool + """True/False: indicates whether this operation is a stamp (i.e. whether + it results in any actual database operations).""" + + up_revision_id: Optional[str] + """Version string corresponding to :attr:`.Revision.revision`. + + In the case of a stamp operation, it is advised to use the + :attr:`.MigrationInfo.up_revision_ids` tuple as a stamp operation can + make a single movement from one or more branches down to a single + branchpoint, in which case there will be multiple "up" revisions. + + .. seealso:: + + :attr:`.MigrationInfo.up_revision_ids` + + """ + + up_revision_ids: Tuple[str, ...] + """Tuple of version strings corresponding to :attr:`.Revision.revision`. + + In the majority of cases, this tuple will be a single value, synonymous + with the scalar value of :attr:`.MigrationInfo.up_revision_id`. + It can be multiple revision identifiers only in the case of an + ``alembic stamp`` operation which is moving downwards from multiple + branches down to their common branch point. + + """ + + down_revision_ids: Tuple[str, ...] + """Tuple of strings representing the base revisions of this migration step. + + If empty, this represents a root revision; otherwise, the first item + corresponds to :attr:`.Revision.down_revision`, and the rest are inferred + from dependencies. + """ + + revision_map: RevisionMap + """The revision map inside of which this operation occurs.""" + + def __init__( + self, + revision_map: RevisionMap, + is_upgrade: bool, + is_stamp: bool, + up_revisions: Union[str, Tuple[str, ...]], + down_revisions: Union[str, Tuple[str, ...]], + ) -> None: + self.revision_map = revision_map + self.is_upgrade = is_upgrade + self.is_stamp = is_stamp + self.up_revision_ids = util.to_tuple(up_revisions, default=()) + if self.up_revision_ids: + self.up_revision_id = self.up_revision_ids[0] + else: + # this should never be the case with + # "upgrade", "downgrade", or "stamp" as we are always + # measuring movement in terms of at least one upgrade version + self.up_revision_id = None + self.down_revision_ids = util.to_tuple(down_revisions, default=()) + + @property + def is_migration(self) -> bool: + """True/False: indicates whether this operation is a migration. + + At present this is true if and only the migration is not a stamp. + If other operation types are added in the future, both this attribute + and :attr:`~.MigrationInfo.is_stamp` will be false. + """ + return not self.is_stamp + + @property + def source_revision_ids(self) -> Tuple[str, ...]: + """Active revisions before this migration step is applied.""" + return ( + self.down_revision_ids if self.is_upgrade else self.up_revision_ids + ) + + @property + def destination_revision_ids(self) -> Tuple[str, ...]: + """Active revisions after this migration step is applied.""" + return ( + self.up_revision_ids if self.is_upgrade else self.down_revision_ids + ) + + @property + def up_revision(self) -> Optional[Revision]: + """Get :attr:`~.MigrationInfo.up_revision_id` as + a :class:`.Revision`. + + """ + return self.revision_map.get_revision(self.up_revision_id) + + @property + def up_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]: + """Get :attr:`~.MigrationInfo.up_revision_ids` as a + :class:`.Revision`.""" + return self.revision_map.get_revisions(self.up_revision_ids) + + @property + def down_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]: + """Get :attr:`~.MigrationInfo.down_revision_ids` as a tuple of + :class:`Revisions <.Revision>`.""" + return self.revision_map.get_revisions(self.down_revision_ids) + + @property + def source_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]: + """Get :attr:`~MigrationInfo.source_revision_ids` as a tuple of + :class:`Revisions <.Revision>`.""" + return self.revision_map.get_revisions(self.source_revision_ids) + + @property + def destination_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]: + """Get :attr:`~MigrationInfo.destination_revision_ids` as a tuple of + :class:`Revisions <.Revision>`.""" + return self.revision_map.get_revisions(self.destination_revision_ids) + + +class MigrationStep: + from_revisions_no_deps: Tuple[str, ...] + to_revisions_no_deps: Tuple[str, ...] + is_upgrade: bool + migration_fn: Any + + if TYPE_CHECKING: + + @property + def doc(self) -> Optional[str]: + ... + + @property + def name(self) -> str: + return self.migration_fn.__name__ + + @classmethod + def upgrade_from_script( + cls, revision_map: RevisionMap, script: Script + ) -> RevisionStep: + return RevisionStep(revision_map, script, True) + + @classmethod + def downgrade_from_script( + cls, revision_map: RevisionMap, script: Script + ) -> RevisionStep: + return RevisionStep(revision_map, script, False) + + @property + def is_downgrade(self) -> bool: + return not self.is_upgrade + + @property + def short_log(self) -> str: + return "%s %s -> %s" % ( + self.name, + util.format_as_comma(self.from_revisions_no_deps), + util.format_as_comma(self.to_revisions_no_deps), + ) + + def __str__(self): + if self.doc: + return "%s %s -> %s, %s" % ( + self.name, + util.format_as_comma(self.from_revisions_no_deps), + util.format_as_comma(self.to_revisions_no_deps), + self.doc, + ) + else: + return self.short_log + + +class RevisionStep(MigrationStep): + def __init__( + self, revision_map: RevisionMap, revision: Script, is_upgrade: bool + ) -> None: + self.revision_map = revision_map + self.revision = revision + self.is_upgrade = is_upgrade + if is_upgrade: + self.migration_fn = revision.module.upgrade + else: + self.migration_fn = revision.module.downgrade + + def __repr__(self): + return "RevisionStep(%r, is_upgrade=%r)" % ( + self.revision.revision, + self.is_upgrade, + ) + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, RevisionStep) + and other.revision == self.revision + and self.is_upgrade == other.is_upgrade + ) + + @property + def doc(self) -> Optional[str]: + return self.revision.doc + + @property + def from_revisions(self) -> Tuple[str, ...]: + if self.is_upgrade: + return self.revision._normalized_down_revisions + else: + return (self.revision.revision,) + + @property + def from_revisions_no_deps( # type:ignore[override] + self, + ) -> Tuple[str, ...]: + if self.is_upgrade: + return self.revision._versioned_down_revisions + else: + return (self.revision.revision,) + + @property + def to_revisions(self) -> Tuple[str, ...]: + if self.is_upgrade: + return (self.revision.revision,) + else: + return self.revision._normalized_down_revisions + + @property + def to_revisions_no_deps( # type:ignore[override] + self, + ) -> Tuple[str, ...]: + if self.is_upgrade: + return (self.revision.revision,) + else: + return self.revision._versioned_down_revisions + + @property + def _has_scalar_down_revision(self) -> bool: + return len(self.revision._normalized_down_revisions) == 1 + + def should_delete_branch(self, heads: Set[str]) -> bool: + """A delete is when we are a. in a downgrade and b. + we are going to the "base" or we are going to a version that + is implied as a dependency on another version that is remaining. + + """ + if not self.is_downgrade: + return False + + if self.revision.revision not in heads: + return False + + downrevs = self.revision._normalized_down_revisions + + if not downrevs: + # is a base + return True + else: + # determine what the ultimate "to_revisions" for an + # unmerge would be. If there are none, then we're a delete. + to_revisions = self._unmerge_to_revisions(heads) + return not to_revisions + + def merge_branch_idents( + self, heads: Set[str] + ) -> Tuple[List[str], str, str]: + other_heads = set(heads).difference(self.from_revisions) + + if other_heads: + ancestors = { + r.revision + for r in self.revision_map._get_ancestor_nodes( + self.revision_map.get_revisions(other_heads), check=False + ) + } + from_revisions = list( + set(self.from_revisions).difference(ancestors) + ) + else: + from_revisions = list(self.from_revisions) + + return ( + # delete revs, update from rev, update to rev + list(from_revisions[0:-1]), + from_revisions[-1], + self.to_revisions[0], + ) + + def _unmerge_to_revisions(self, heads: Set[str]) -> Tuple[str, ...]: + other_heads = set(heads).difference([self.revision.revision]) + if other_heads: + ancestors = { + r.revision + for r in self.revision_map._get_ancestor_nodes( + self.revision_map.get_revisions(other_heads), check=False + ) + } + return tuple(set(self.to_revisions).difference(ancestors)) + else: + # for each revision we plan to return, compute its ancestors + # (excluding self), and remove those from the final output since + # they are already accounted for. + ancestors = { + r.revision + for to_revision in self.to_revisions + for r in self.revision_map._get_ancestor_nodes( + self.revision_map.get_revisions(to_revision), check=False + ) + if r.revision != to_revision + } + return tuple(set(self.to_revisions).difference(ancestors)) + + def unmerge_branch_idents( + self, heads: Set[str] + ) -> Tuple[str, str, Tuple[str, ...]]: + to_revisions = self._unmerge_to_revisions(heads) + + return ( + # update from rev, update to rev, insert revs + self.from_revisions[0], + to_revisions[-1], + to_revisions[0:-1], + ) + + def should_create_branch(self, heads: Set[str]) -> bool: + if not self.is_upgrade: + return False + + downrevs = self.revision._normalized_down_revisions + + if not downrevs: + # is a base + return True + else: + # none of our downrevs are present, so... + # we have to insert our version. This is true whether + # or not there is only one downrev, or multiple (in the latter + # case, we're a merge point.) + if not heads.intersection(downrevs): + return True + else: + return False + + def should_merge_branches(self, heads: Set[str]) -> bool: + if not self.is_upgrade: + return False + + downrevs = self.revision._normalized_down_revisions + + if len(downrevs) > 1 and len(heads.intersection(downrevs)) > 1: + return True + + return False + + def should_unmerge_branches(self, heads: Set[str]) -> bool: + if not self.is_downgrade: + return False + + downrevs = self.revision._normalized_down_revisions + + if self.revision.revision in heads and len(downrevs) > 1: + return True + + return False + + def update_version_num(self, heads: Set[str]) -> Tuple[str, str]: + if not self._has_scalar_down_revision: + downrev = heads.intersection( + self.revision._normalized_down_revisions + ) + assert ( + len(downrev) == 1 + ), "Can't do an UPDATE because downrevision is ambiguous" + down_revision = list(downrev)[0] + else: + down_revision = self.revision._normalized_down_revisions[0] + + if self.is_upgrade: + return down_revision, self.revision.revision + else: + return self.revision.revision, down_revision + + @property + def delete_version_num(self) -> str: + return self.revision.revision + + @property + def insert_version_num(self) -> str: + return self.revision.revision + + @property + def info(self) -> MigrationInfo: + return MigrationInfo( + revision_map=self.revision_map, + up_revisions=self.revision.revision, + down_revisions=self.revision._normalized_down_revisions, + is_upgrade=self.is_upgrade, + is_stamp=False, + ) + + +class StampStep(MigrationStep): + def __init__( + self, + from_: Optional[Union[str, Collection[str]]], + to_: Optional[Union[str, Collection[str]]], + is_upgrade: bool, + branch_move: bool, + revision_map: Optional[RevisionMap] = None, + ) -> None: + self.from_: Tuple[str, ...] = util.to_tuple(from_, default=()) + self.to_: Tuple[str, ...] = util.to_tuple(to_, default=()) + self.is_upgrade = is_upgrade + self.branch_move = branch_move + self.migration_fn = self.stamp_revision + self.revision_map = revision_map + + doc: Optional[str] = None + + def stamp_revision(self, **kw: Any) -> None: + return None + + def __eq__(self, other): + return ( + isinstance(other, StampStep) + and other.from_revisions == self.from_revisions + and other.to_revisions == self.to_revisions + and other.branch_move == self.branch_move + and self.is_upgrade == other.is_upgrade + ) + + @property + def from_revisions(self): + return self.from_ + + @property + def to_revisions(self) -> Tuple[str, ...]: + return self.to_ + + @property + def from_revisions_no_deps( # type:ignore[override] + self, + ) -> Tuple[str, ...]: + return self.from_ + + @property + def to_revisions_no_deps( # type:ignore[override] + self, + ) -> Tuple[str, ...]: + return self.to_ + + @property + def delete_version_num(self) -> str: + assert len(self.from_) == 1 + return self.from_[0] + + @property + def insert_version_num(self) -> str: + assert len(self.to_) == 1 + return self.to_[0] + + def update_version_num(self, heads: Set[str]) -> Tuple[str, str]: + assert len(self.from_) == 1 + assert len(self.to_) == 1 + return self.from_[0], self.to_[0] + + def merge_branch_idents( + self, heads: Union[Set[str], List[str]] + ) -> Union[Tuple[List[Any], str, str], Tuple[List[str], str, str]]: + return ( + # delete revs, update from rev, update to rev + list(self.from_[0:-1]), + self.from_[-1], + self.to_[0], + ) + + def unmerge_branch_idents( + self, heads: Set[str] + ) -> Tuple[str, str, List[str]]: + return ( + # update from rev, update to rev, insert revs + self.from_[0], + self.to_[-1], + list(self.to_[0:-1]), + ) + + def should_delete_branch(self, heads: Set[str]) -> bool: + # TODO: we probably need to look for self.to_ inside of heads, + # in a similar manner as should_create_branch, however we have + # no tests for this yet (stamp downgrades w/ branches) + return self.is_downgrade and self.branch_move + + def should_create_branch(self, heads: Set[str]) -> Union[Set[str], bool]: + return ( + self.is_upgrade + and (self.branch_move or set(self.from_).difference(heads)) + and set(self.to_).difference(heads) + ) + + def should_merge_branches(self, heads: Set[str]) -> bool: + return len(self.from_) > 1 + + def should_unmerge_branches(self, heads: Set[str]) -> bool: + return len(self.to_) > 1 + + @property + def info(self) -> MigrationInfo: + up, down = ( + (self.to_, self.from_) + if self.is_upgrade + else (self.from_, self.to_) + ) + assert self.revision_map is not None + return MigrationInfo( + revision_map=self.revision_map, + up_revisions=up, + down_revisions=down, + is_upgrade=self.is_upgrade, + is_stamp=True, + ) diff --git a/venv/lib/python3.12/site-packages/alembic/script/__init__.py b/venv/lib/python3.12/site-packages/alembic/script/__init__.py new file mode 100644 index 0000000..d78f3f1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/script/__init__.py @@ -0,0 +1,4 @@ +from .base import Script +from .base import ScriptDirectory + +__all__ = ["ScriptDirectory", "Script"] diff --git a/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d69a892 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..1489b73 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc new file mode 100644 index 0000000..b8b577a Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc new file mode 100644 index 0000000..fe8e90c Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/script/base.py b/venv/lib/python3.12/site-packages/alembic/script/base.py new file mode 100644 index 0000000..5945ca5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/script/base.py @@ -0,0 +1,1060 @@ +from __future__ import annotations + +from contextlib import contextmanager +import datetime +import os +import re +import shutil +import sys +from types import ModuleType +from typing import Any +from typing import cast +from typing import Iterator +from typing import List +from typing import Mapping +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from . import revision +from . import write_hooks +from .. import util +from ..runtime import migration +from ..util import compat +from ..util import not_none + +if TYPE_CHECKING: + from .revision import _GetRevArg + from .revision import _RevIdType + from .revision import Revision + from ..config import Config + from ..config import MessagingOptions + from ..runtime.migration import RevisionStep + from ..runtime.migration import StampStep + +try: + if compat.py39: + from zoneinfo import ZoneInfo + from zoneinfo import ZoneInfoNotFoundError + else: + from backports.zoneinfo import ZoneInfo # type: ignore[import-not-found,no-redef] # noqa: E501 + from backports.zoneinfo import ZoneInfoNotFoundError # type: ignore[no-redef] # noqa: E501 +except ImportError: + ZoneInfo = None # type: ignore[assignment, misc] + +_sourceless_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)(c|o)?$") +_only_source_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)$") +_legacy_rev = re.compile(r"([a-f0-9]+)\.py$") +_slug_re = re.compile(r"\w+") +_default_file_template = "%(rev)s_%(slug)s" +_split_on_space_comma = re.compile(r", *|(?: +)") + +_split_on_space_comma_colon = re.compile(r", *|(?: +)|\:") + + +class ScriptDirectory: + + """Provides operations upon an Alembic script directory. + + This object is useful to get information as to current revisions, + most notably being able to get at the "head" revision, for schemes + that want to test if the current revision in the database is the most + recent:: + + from alembic.script import ScriptDirectory + from alembic.config import Config + config = Config() + config.set_main_option("script_location", "myapp:migrations") + script = ScriptDirectory.from_config(config) + + head_revision = script.get_current_head() + + + + """ + + def __init__( + self, + dir: str, # noqa + file_template: str = _default_file_template, + truncate_slug_length: Optional[int] = 40, + version_locations: Optional[List[str]] = None, + sourceless: bool = False, + output_encoding: str = "utf-8", + timezone: Optional[str] = None, + hook_config: Optional[Mapping[str, str]] = None, + recursive_version_locations: bool = False, + messaging_opts: MessagingOptions = cast( + "MessagingOptions", util.EMPTY_DICT + ), + ) -> None: + self.dir = dir + self.file_template = file_template + self.version_locations = version_locations + self.truncate_slug_length = truncate_slug_length or 40 + self.sourceless = sourceless + self.output_encoding = output_encoding + self.revision_map = revision.RevisionMap(self._load_revisions) + self.timezone = timezone + self.hook_config = hook_config + self.recursive_version_locations = recursive_version_locations + self.messaging_opts = messaging_opts + + if not os.access(dir, os.F_OK): + raise util.CommandError( + "Path doesn't exist: %r. Please use " + "the 'init' command to create a new " + "scripts folder." % os.path.abspath(dir) + ) + + @property + def versions(self) -> str: + loc = self._version_locations + if len(loc) > 1: + raise util.CommandError("Multiple version_locations present") + else: + return loc[0] + + @util.memoized_property + def _version_locations(self) -> Sequence[str]: + if self.version_locations: + return [ + os.path.abspath(util.coerce_resource_to_filename(location)) + for location in self.version_locations + ] + else: + return (os.path.abspath(os.path.join(self.dir, "versions")),) + + def _load_revisions(self) -> Iterator[Script]: + if self.version_locations: + paths = [ + vers + for vers in self._version_locations + if os.path.exists(vers) + ] + else: + paths = [self.versions] + + dupes = set() + for vers in paths: + for file_path in Script._list_py_dir(self, vers): + real_path = os.path.realpath(file_path) + if real_path in dupes: + util.warn( + "File %s loaded twice! ignoring. Please ensure " + "version_locations is unique." % real_path + ) + continue + dupes.add(real_path) + + filename = os.path.basename(real_path) + dir_name = os.path.dirname(real_path) + script = Script._from_filename(self, dir_name, filename) + if script is None: + continue + yield script + + @classmethod + def from_config(cls, config: Config) -> ScriptDirectory: + """Produce a new :class:`.ScriptDirectory` given a :class:`.Config` + instance. + + The :class:`.Config` need only have the ``script_location`` key + present. + + """ + script_location = config.get_main_option("script_location") + if script_location is None: + raise util.CommandError( + "No 'script_location' key " "found in configuration." + ) + truncate_slug_length: Optional[int] + tsl = config.get_main_option("truncate_slug_length") + if tsl is not None: + truncate_slug_length = int(tsl) + else: + truncate_slug_length = None + + version_locations_str = config.get_main_option("version_locations") + version_locations: Optional[List[str]] + if version_locations_str: + version_path_separator = config.get_main_option( + "version_path_separator" + ) + + split_on_path = { + None: None, + "space": " ", + "os": os.pathsep, + ":": ":", + ";": ";", + } + + try: + split_char: Optional[str] = split_on_path[ + version_path_separator + ] + except KeyError as ke: + raise ValueError( + "'%s' is not a valid value for " + "version_path_separator; " + "expected 'space', 'os', ':', ';'" % version_path_separator + ) from ke + else: + if split_char is None: + # legacy behaviour for backwards compatibility + version_locations = _split_on_space_comma.split( + version_locations_str + ) + else: + version_locations = [ + x for x in version_locations_str.split(split_char) if x + ] + else: + version_locations = None + + prepend_sys_path = config.get_main_option("prepend_sys_path") + if prepend_sys_path: + sys.path[:0] = list( + _split_on_space_comma_colon.split(prepend_sys_path) + ) + + rvl = config.get_main_option("recursive_version_locations") == "true" + return ScriptDirectory( + util.coerce_resource_to_filename(script_location), + file_template=config.get_main_option( + "file_template", _default_file_template + ), + truncate_slug_length=truncate_slug_length, + sourceless=config.get_main_option("sourceless") == "true", + output_encoding=config.get_main_option("output_encoding", "utf-8"), + version_locations=version_locations, + timezone=config.get_main_option("timezone"), + hook_config=config.get_section("post_write_hooks", {}), + recursive_version_locations=rvl, + messaging_opts=config.messaging_opts, + ) + + @contextmanager + def _catch_revision_errors( + self, + ancestor: Optional[str] = None, + multiple_heads: Optional[str] = None, + start: Optional[str] = None, + end: Optional[str] = None, + resolution: Optional[str] = None, + ) -> Iterator[None]: + try: + yield + except revision.RangeNotAncestorError as rna: + if start is None: + start = cast(Any, rna.lower) + if end is None: + end = cast(Any, rna.upper) + if not ancestor: + ancestor = ( + "Requested range %(start)s:%(end)s does not refer to " + "ancestor/descendant revisions along the same branch" + ) + ancestor = ancestor % {"start": start, "end": end} + raise util.CommandError(ancestor) from rna + except revision.MultipleHeads as mh: + if not multiple_heads: + multiple_heads = ( + "Multiple head revisions are present for given " + "argument '%(head_arg)s'; please " + "specify a specific target revision, " + "'@%(head_arg)s' to " + "narrow to a specific head, or 'heads' for all heads" + ) + multiple_heads = multiple_heads % { + "head_arg": end or mh.argument, + "heads": util.format_as_comma(mh.heads), + } + raise util.CommandError(multiple_heads) from mh + except revision.ResolutionError as re: + if resolution is None: + resolution = "Can't locate revision identified by '%s'" % ( + re.argument + ) + raise util.CommandError(resolution) from re + except revision.RevisionError as err: + raise util.CommandError(err.args[0]) from err + + def walk_revisions( + self, base: str = "base", head: str = "heads" + ) -> Iterator[Script]: + """Iterate through all revisions. + + :param base: the base revision, or "base" to start from the + empty revision. + + :param head: the head revision; defaults to "heads" to indicate + all head revisions. May also be "head" to indicate a single + head revision. + + """ + with self._catch_revision_errors(start=base, end=head): + for rev in self.revision_map.iterate_revisions( + head, base, inclusive=True, assert_relative_length=False + ): + yield cast(Script, rev) + + def get_revisions(self, id_: _GetRevArg) -> Tuple[Script, ...]: + """Return the :class:`.Script` instance with the given rev identifier, + symbolic name, or sequence of identifiers. + + """ + with self._catch_revision_errors(): + return cast( + Tuple[Script, ...], + self.revision_map.get_revisions(id_), + ) + + def get_all_current(self, id_: Tuple[str, ...]) -> Set[Script]: + with self._catch_revision_errors(): + return cast(Set[Script], self.revision_map._get_all_current(id_)) + + def get_revision(self, id_: str) -> Script: + """Return the :class:`.Script` instance with the given rev id. + + .. seealso:: + + :meth:`.ScriptDirectory.get_revisions` + + """ + + with self._catch_revision_errors(): + return cast(Script, self.revision_map.get_revision(id_)) + + def as_revision_number( + self, id_: Optional[str] + ) -> Optional[Union[str, Tuple[str, ...]]]: + """Convert a symbolic revision, i.e. 'head' or 'base', into + an actual revision number.""" + + with self._catch_revision_errors(): + rev, branch_name = self.revision_map._resolve_revision_number(id_) + + if not rev: + # convert () to None + return None + elif id_ == "heads": + return rev + else: + return rev[0] + + def iterate_revisions( + self, + upper: Union[str, Tuple[str, ...], None], + lower: Union[str, Tuple[str, ...], None], + **kw: Any, + ) -> Iterator[Script]: + """Iterate through script revisions, starting at the given + upper revision identifier and ending at the lower. + + The traversal uses strictly the `down_revision` + marker inside each migration script, so + it is a requirement that upper >= lower, + else you'll get nothing back. + + The iterator yields :class:`.Script` objects. + + .. seealso:: + + :meth:`.RevisionMap.iterate_revisions` + + """ + return cast( + Iterator[Script], + self.revision_map.iterate_revisions(upper, lower, **kw), + ) + + def get_current_head(self) -> Optional[str]: + """Return the current head revision. + + If the script directory has multiple heads + due to branching, an error is raised; + :meth:`.ScriptDirectory.get_heads` should be + preferred. + + :return: a string revision number. + + .. seealso:: + + :meth:`.ScriptDirectory.get_heads` + + """ + with self._catch_revision_errors( + multiple_heads=( + "The script directory has multiple heads (due to branching)." + "Please use get_heads(), or merge the branches using " + "alembic merge." + ) + ): + return self.revision_map.get_current_head() + + def get_heads(self) -> List[str]: + """Return all "versioned head" revisions as strings. + + This is normally a list of length one, + unless branches are present. The + :meth:`.ScriptDirectory.get_current_head()` method + can be used normally when a script directory + has only one head. + + :return: a tuple of string revision numbers. + """ + return list(self.revision_map.heads) + + def get_base(self) -> Optional[str]: + """Return the "base" revision as a string. + + This is the revision number of the script that + has a ``down_revision`` of None. + + If the script directory has multiple bases, an error is raised; + :meth:`.ScriptDirectory.get_bases` should be + preferred. + + """ + bases = self.get_bases() + if len(bases) > 1: + raise util.CommandError( + "The script directory has multiple bases. " + "Please use get_bases()." + ) + elif bases: + return bases[0] + else: + return None + + def get_bases(self) -> List[str]: + """return all "base" revisions as strings. + + This is the revision number of all scripts that + have a ``down_revision`` of None. + + """ + return list(self.revision_map.bases) + + def _upgrade_revs( + self, destination: str, current_rev: str + ) -> List[RevisionStep]: + with self._catch_revision_errors( + ancestor="Destination %(end)s is not a valid upgrade " + "target from current head(s)", + end=destination, + ): + revs = self.iterate_revisions( + destination, current_rev, implicit_base=True + ) + return [ + migration.MigrationStep.upgrade_from_script( + self.revision_map, script + ) + for script in reversed(list(revs)) + ] + + def _downgrade_revs( + self, destination: str, current_rev: Optional[str] + ) -> List[RevisionStep]: + with self._catch_revision_errors( + ancestor="Destination %(end)s is not a valid downgrade " + "target from current head(s)", + end=destination, + ): + revs = self.iterate_revisions( + current_rev, destination, select_for_downgrade=True + ) + return [ + migration.MigrationStep.downgrade_from_script( + self.revision_map, script + ) + for script in revs + ] + + def _stamp_revs( + self, revision: _RevIdType, heads: _RevIdType + ) -> List[StampStep]: + with self._catch_revision_errors( + multiple_heads="Multiple heads are present; please specify a " + "single target revision" + ): + heads_revs = self.get_revisions(heads) + + steps = [] + + if not revision: + revision = "base" + + filtered_heads: List[Script] = [] + for rev in util.to_tuple(revision): + if rev: + filtered_heads.extend( + self.revision_map.filter_for_lineage( + cast(Sequence[Script], heads_revs), + rev, + include_dependencies=True, + ) + ) + filtered_heads = util.unique_list(filtered_heads) + + dests = self.get_revisions(revision) or [None] + + for dest in dests: + if dest is None: + # dest is 'base'. Return a "delete branch" migration + # for all applicable heads. + steps.extend( + [ + migration.StampStep( + head.revision, + None, + False, + True, + self.revision_map, + ) + for head in filtered_heads + ] + ) + continue + elif dest in filtered_heads: + # the dest is already in the version table, do nothing. + continue + + # figure out if the dest is a descendant or an + # ancestor of the selected nodes + descendants = set( + self.revision_map._get_descendant_nodes([dest]) + ) + ancestors = set(self.revision_map._get_ancestor_nodes([dest])) + + if descendants.intersection(filtered_heads): + # heads are above the target, so this is a downgrade. + # we can treat them as a "merge", single step. + assert not ancestors.intersection(filtered_heads) + todo_heads = [head.revision for head in filtered_heads] + step = migration.StampStep( + todo_heads, + dest.revision, + False, + False, + self.revision_map, + ) + steps.append(step) + continue + elif ancestors.intersection(filtered_heads): + # heads are below the target, so this is an upgrade. + # we can treat them as a "merge", single step. + todo_heads = [head.revision for head in filtered_heads] + step = migration.StampStep( + todo_heads, + dest.revision, + True, + False, + self.revision_map, + ) + steps.append(step) + continue + else: + # destination is in a branch not represented, + # treat it as new branch + step = migration.StampStep( + (), dest.revision, True, True, self.revision_map + ) + steps.append(step) + continue + + return steps + + def run_env(self) -> None: + """Run the script environment. + + This basically runs the ``env.py`` script present + in the migration environment. It is called exclusively + by the command functions in :mod:`alembic.command`. + + + """ + util.load_python_file(self.dir, "env.py") + + @property + def env_py_location(self) -> str: + return os.path.abspath(os.path.join(self.dir, "env.py")) + + def _generate_template(self, src: str, dest: str, **kw: Any) -> None: + with util.status( + f"Generating {os.path.abspath(dest)}", **self.messaging_opts + ): + util.template_to_file(src, dest, self.output_encoding, **kw) + + def _copy_file(self, src: str, dest: str) -> None: + with util.status( + f"Generating {os.path.abspath(dest)}", **self.messaging_opts + ): + shutil.copy(src, dest) + + def _ensure_directory(self, path: str) -> None: + path = os.path.abspath(path) + if not os.path.exists(path): + with util.status( + f"Creating directory {path}", **self.messaging_opts + ): + os.makedirs(path) + + def _generate_create_date(self) -> datetime.datetime: + if self.timezone is not None: + if ZoneInfo is None: + raise util.CommandError( + "Python >= 3.9 is required for timezone support or" + "the 'backports.zoneinfo' package must be installed." + ) + # First, assume correct capitalization + try: + tzinfo = ZoneInfo(self.timezone) + except ZoneInfoNotFoundError: + tzinfo = None + if tzinfo is None: + try: + tzinfo = ZoneInfo(self.timezone.upper()) + except ZoneInfoNotFoundError: + raise util.CommandError( + "Can't locate timezone: %s" % self.timezone + ) from None + create_date = ( + datetime.datetime.utcnow() + .replace(tzinfo=datetime.timezone.utc) + .astimezone(tzinfo) + ) + else: + create_date = datetime.datetime.now() + return create_date + + def generate_revision( + self, + revid: str, + message: Optional[str], + head: Optional[_RevIdType] = None, + splice: Optional[bool] = False, + branch_labels: Optional[_RevIdType] = None, + version_path: Optional[str] = None, + depends_on: Optional[_RevIdType] = None, + **kw: Any, + ) -> Optional[Script]: + """Generate a new revision file. + + This runs the ``script.py.mako`` template, given + template arguments, and creates a new file. + + :param revid: String revision id. Typically this + comes from ``alembic.util.rev_id()``. + :param message: the revision message, the one passed + by the -m argument to the ``revision`` command. + :param head: the head revision to generate against. Defaults + to the current "head" if no branches are present, else raises + an exception. + :param splice: if True, allow the "head" version to not be an + actual head; otherwise, the selected head must be a head + (e.g. endpoint) revision. + + """ + if head is None: + head = "head" + + try: + Script.verify_rev_id(revid) + except revision.RevisionError as err: + raise util.CommandError(err.args[0]) from err + + with self._catch_revision_errors( + multiple_heads=( + "Multiple heads are present; please specify the head " + "revision on which the new revision should be based, " + "or perform a merge." + ) + ): + heads = cast( + Tuple[Optional["Revision"], ...], + self.revision_map.get_revisions(head), + ) + for h in heads: + assert h != "base" # type: ignore[comparison-overlap] + + if len(set(heads)) != len(heads): + raise util.CommandError("Duplicate head revisions specified") + + create_date = self._generate_create_date() + + if version_path is None: + if len(self._version_locations) > 1: + for head_ in heads: + if head_ is not None: + assert isinstance(head_, Script) + version_path = os.path.dirname(head_.path) + break + else: + raise util.CommandError( + "Multiple version locations present, " + "please specify --version-path" + ) + else: + version_path = self.versions + + norm_path = os.path.normpath(os.path.abspath(version_path)) + for vers_path in self._version_locations: + if os.path.normpath(vers_path) == norm_path: + break + else: + raise util.CommandError( + "Path %s is not represented in current " + "version locations" % version_path + ) + + if self.version_locations: + self._ensure_directory(version_path) + + path = self._rev_path(version_path, revid, message, create_date) + + if not splice: + for head_ in heads: + if head_ is not None and not head_.is_head: + raise util.CommandError( + "Revision %s is not a head revision; please specify " + "--splice to create a new branch from this revision" + % head_.revision + ) + + resolved_depends_on: Optional[List[str]] + if depends_on: + with self._catch_revision_errors(): + resolved_depends_on = [ + dep + if dep in rev.branch_labels # maintain branch labels + else rev.revision # resolve partial revision identifiers + for rev, dep in [ + (not_none(self.revision_map.get_revision(dep)), dep) + for dep in util.to_list(depends_on) + ] + ] + else: + resolved_depends_on = None + + self._generate_template( + os.path.join(self.dir, "script.py.mako"), + path, + up_revision=str(revid), + down_revision=revision.tuple_rev_as_scalar( + tuple(h.revision if h is not None else None for h in heads) + ), + branch_labels=util.to_tuple(branch_labels), + depends_on=revision.tuple_rev_as_scalar(resolved_depends_on), + create_date=create_date, + comma=util.format_as_comma, + message=message if message is not None else ("empty message"), + **kw, + ) + + post_write_hooks = self.hook_config + if post_write_hooks: + write_hooks._run_hooks(path, post_write_hooks) + + try: + script = Script._from_path(self, path) + except revision.RevisionError as err: + raise util.CommandError(err.args[0]) from err + if script is None: + return None + if branch_labels and not script.branch_labels: + raise util.CommandError( + "Version %s specified branch_labels %s, however the " + "migration file %s does not have them; have you upgraded " + "your script.py.mako to include the " + "'branch_labels' section?" + % (script.revision, branch_labels, script.path) + ) + self.revision_map.add_revision(script) + return script + + def _rev_path( + self, + path: str, + rev_id: str, + message: Optional[str], + create_date: datetime.datetime, + ) -> str: + epoch = int(create_date.timestamp()) + slug = "_".join(_slug_re.findall(message or "")).lower() + if len(slug) > self.truncate_slug_length: + slug = slug[: self.truncate_slug_length].rsplit("_", 1)[0] + "_" + filename = "%s.py" % ( + self.file_template + % { + "rev": rev_id, + "slug": slug, + "epoch": epoch, + "year": create_date.year, + "month": create_date.month, + "day": create_date.day, + "hour": create_date.hour, + "minute": create_date.minute, + "second": create_date.second, + } + ) + return os.path.join(path, filename) + + +class Script(revision.Revision): + + """Represent a single revision file in a ``versions/`` directory. + + The :class:`.Script` instance is returned by methods + such as :meth:`.ScriptDirectory.iterate_revisions`. + + """ + + def __init__(self, module: ModuleType, rev_id: str, path: str): + self.module = module + self.path = path + super().__init__( + rev_id, + module.down_revision, + branch_labels=util.to_tuple( + getattr(module, "branch_labels", None), default=() + ), + dependencies=util.to_tuple( + getattr(module, "depends_on", None), default=() + ), + ) + + module: ModuleType + """The Python module representing the actual script itself.""" + + path: str + """Filesystem path of the script.""" + + _db_current_indicator: Optional[bool] = None + """Utility variable which when set will cause string output to indicate + this is a "current" version in some database""" + + @property + def doc(self) -> str: + """Return the docstring given in the script.""" + + return re.split("\n\n", self.longdoc)[0] + + @property + def longdoc(self) -> str: + """Return the docstring given in the script.""" + + doc = self.module.__doc__ + if doc: + if hasattr(self.module, "_alembic_source_encoding"): + doc = doc.decode( # type: ignore[attr-defined] + self.module._alembic_source_encoding + ) + return doc.strip() # type: ignore[union-attr] + else: + return "" + + @property + def log_entry(self) -> str: + entry = "Rev: %s%s%s%s%s\n" % ( + self.revision, + " (head)" if self.is_head else "", + " (branchpoint)" if self.is_branch_point else "", + " (mergepoint)" if self.is_merge_point else "", + " (current)" if self._db_current_indicator else "", + ) + if self.is_merge_point: + entry += "Merges: %s\n" % (self._format_down_revision(),) + else: + entry += "Parent: %s\n" % (self._format_down_revision(),) + + if self.dependencies: + entry += "Also depends on: %s\n" % ( + util.format_as_comma(self.dependencies) + ) + + if self.is_branch_point: + entry += "Branches into: %s\n" % ( + util.format_as_comma(self.nextrev) + ) + + if self.branch_labels: + entry += "Branch names: %s\n" % ( + util.format_as_comma(self.branch_labels), + ) + + entry += "Path: %s\n" % (self.path,) + + entry += "\n%s\n" % ( + "\n".join(" %s" % para for para in self.longdoc.splitlines()) + ) + return entry + + def __str__(self) -> str: + return "%s -> %s%s%s%s, %s" % ( + self._format_down_revision(), + self.revision, + " (head)" if self.is_head else "", + " (branchpoint)" if self.is_branch_point else "", + " (mergepoint)" if self.is_merge_point else "", + self.doc, + ) + + def _head_only( + self, + include_branches: bool = False, + include_doc: bool = False, + include_parents: bool = False, + tree_indicators: bool = True, + head_indicators: bool = True, + ) -> str: + text = self.revision + if include_parents: + if self.dependencies: + text = "%s (%s) -> %s" % ( + self._format_down_revision(), + util.format_as_comma(self.dependencies), + text, + ) + else: + text = "%s -> %s" % (self._format_down_revision(), text) + assert text is not None + if include_branches and self.branch_labels: + text += " (%s)" % util.format_as_comma(self.branch_labels) + if head_indicators or tree_indicators: + text += "%s%s%s" % ( + " (head)" if self._is_real_head else "", + " (effective head)" + if self.is_head and not self._is_real_head + else "", + " (current)" if self._db_current_indicator else "", + ) + if tree_indicators: + text += "%s%s" % ( + " (branchpoint)" if self.is_branch_point else "", + " (mergepoint)" if self.is_merge_point else "", + ) + if include_doc: + text += ", %s" % self.doc + return text + + def cmd_format( + self, + verbose: bool, + include_branches: bool = False, + include_doc: bool = False, + include_parents: bool = False, + tree_indicators: bool = True, + ) -> str: + if verbose: + return self.log_entry + else: + return self._head_only( + include_branches, include_doc, include_parents, tree_indicators + ) + + def _format_down_revision(self) -> str: + if not self.down_revision: + return "" + else: + return util.format_as_comma(self._versioned_down_revisions) + + @classmethod + def _from_path( + cls, scriptdir: ScriptDirectory, path: str + ) -> Optional[Script]: + dir_, filename = os.path.split(path) + return cls._from_filename(scriptdir, dir_, filename) + + @classmethod + def _list_py_dir(cls, scriptdir: ScriptDirectory, path: str) -> List[str]: + paths = [] + for root, dirs, files in os.walk(path, topdown=True): + if root.endswith("__pycache__"): + # a special case - we may include these files + # if a `sourceless` option is specified + continue + + for filename in sorted(files): + paths.append(os.path.join(root, filename)) + + if scriptdir.sourceless: + # look for __pycache__ + py_cache_path = os.path.join(root, "__pycache__") + if os.path.exists(py_cache_path): + # add all files from __pycache__ whose filename is not + # already in the names we got from the version directory. + # add as relative paths including __pycache__ token + names = {filename.split(".")[0] for filename in files} + paths.extend( + os.path.join(py_cache_path, pyc) + for pyc in os.listdir(py_cache_path) + if pyc.split(".")[0] not in names + ) + + if not scriptdir.recursive_version_locations: + break + + # the real script order is defined by revision, + # but it may be undefined if there are many files with a same + # `down_revision`, for a better user experience (ex. debugging), + # we use a deterministic order + dirs.sort() + + return paths + + @classmethod + def _from_filename( + cls, scriptdir: ScriptDirectory, dir_: str, filename: str + ) -> Optional[Script]: + if scriptdir.sourceless: + py_match = _sourceless_rev_file.match(filename) + else: + py_match = _only_source_rev_file.match(filename) + + if not py_match: + return None + + py_filename = py_match.group(1) + + if scriptdir.sourceless: + is_c = py_match.group(2) == "c" + is_o = py_match.group(2) == "o" + else: + is_c = is_o = False + + if is_o or is_c: + py_exists = os.path.exists(os.path.join(dir_, py_filename)) + pyc_exists = os.path.exists(os.path.join(dir_, py_filename + "c")) + + # prefer .py over .pyc because we'd like to get the + # source encoding; prefer .pyc over .pyo because we'd like to + # have the docstrings which a -OO file would not have + if py_exists or is_o and pyc_exists: + return None + + module = util.load_python_file(dir_, filename) + + if not hasattr(module, "revision"): + # attempt to get the revision id from the script name, + # this for legacy only + m = _legacy_rev.match(filename) + if not m: + raise util.CommandError( + "Could not determine revision id from filename %s. " + "Be sure the 'revision' variable is " + "declared inside the script (please see 'Upgrading " + "from Alembic 0.1 to 0.2' in the documentation)." + % filename + ) + else: + revision = m.group(1) + else: + revision = module.revision + return Script(module, revision, os.path.join(dir_, filename)) diff --git a/venv/lib/python3.12/site-packages/alembic/script/revision.py b/venv/lib/python3.12/site-packages/alembic/script/revision.py new file mode 100644 index 0000000..77a802c --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/script/revision.py @@ -0,0 +1,1721 @@ +from __future__ import annotations + +import collections +import re +from typing import Any +from typing import Callable +from typing import cast +from typing import Collection +from typing import Deque +from typing import Dict +from typing import FrozenSet +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import overload +from typing import Protocol +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy import util as sqlautil + +from .. import util +from ..util import not_none + +if TYPE_CHECKING: + from typing import Literal + +_RevIdType = Union[str, List[str], Tuple[str, ...]] +_GetRevArg = Union[ + str, + Iterable[Optional[str]], + Iterable[str], +] +_RevisionIdentifierType = Union[str, Tuple[str, ...], None] +_RevisionOrStr = Union["Revision", str] +_RevisionOrBase = Union["Revision", "Literal['base']"] +_InterimRevisionMapType = Dict[str, "Revision"] +_RevisionMapType = Dict[Union[None, str, Tuple[()]], Optional["Revision"]] +_T = TypeVar("_T") +_TR = TypeVar("_TR", bound=Optional[_RevisionOrStr]) + +_relative_destination = re.compile(r"(?:(.+?)@)?(\w+)?((?:\+|-)\d+)") +_revision_illegal_chars = ["@", "-", "+"] + + +class _CollectRevisionsProtocol(Protocol): + def __call__( + self, + upper: _RevisionIdentifierType, + lower: _RevisionIdentifierType, + inclusive: bool, + implicit_base: bool, + assert_relative_length: bool, + ) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase], ...]]: + ... + + +class RevisionError(Exception): + pass + + +class RangeNotAncestorError(RevisionError): + def __init__( + self, lower: _RevisionIdentifierType, upper: _RevisionIdentifierType + ) -> None: + self.lower = lower + self.upper = upper + super().__init__( + "Revision %s is not an ancestor of revision %s" + % (lower or "base", upper or "base") + ) + + +class MultipleHeads(RevisionError): + def __init__(self, heads: Sequence[str], argument: Optional[str]) -> None: + self.heads = heads + self.argument = argument + super().__init__( + "Multiple heads are present for given argument '%s'; " + "%s" % (argument, ", ".join(heads)) + ) + + +class ResolutionError(RevisionError): + def __init__(self, message: str, argument: str) -> None: + super().__init__(message) + self.argument = argument + + +class CycleDetected(RevisionError): + kind = "Cycle" + + def __init__(self, revisions: Sequence[str]) -> None: + self.revisions = revisions + super().__init__( + "%s is detected in revisions (%s)" + % (self.kind, ", ".join(revisions)) + ) + + +class DependencyCycleDetected(CycleDetected): + kind = "Dependency cycle" + + def __init__(self, revisions: Sequence[str]) -> None: + super().__init__(revisions) + + +class LoopDetected(CycleDetected): + kind = "Self-loop" + + def __init__(self, revision: str) -> None: + super().__init__([revision]) + + +class DependencyLoopDetected(DependencyCycleDetected, LoopDetected): + kind = "Dependency self-loop" + + def __init__(self, revision: Sequence[str]) -> None: + super().__init__(revision) + + +class RevisionMap: + """Maintains a map of :class:`.Revision` objects. + + :class:`.RevisionMap` is used by :class:`.ScriptDirectory` to maintain + and traverse the collection of :class:`.Script` objects, which are + themselves instances of :class:`.Revision`. + + """ + + def __init__(self, generator: Callable[[], Iterable[Revision]]) -> None: + """Construct a new :class:`.RevisionMap`. + + :param generator: a zero-arg callable that will generate an iterable + of :class:`.Revision` instances to be used. These are typically + :class:`.Script` subclasses within regular Alembic use. + + """ + self._generator = generator + + @util.memoized_property + def heads(self) -> Tuple[str, ...]: + """All "head" revisions as strings. + + This is normally a tuple of length one, + unless unmerged branches are present. + + :return: a tuple of string revision numbers. + + """ + self._revision_map + return self.heads + + @util.memoized_property + def bases(self) -> Tuple[str, ...]: + """All "base" revisions as strings. + + These are revisions that have a ``down_revision`` of None, + or empty tuple. + + :return: a tuple of string revision numbers. + + """ + self._revision_map + return self.bases + + @util.memoized_property + def _real_heads(self) -> Tuple[str, ...]: + """All "real" head revisions as strings. + + :return: a tuple of string revision numbers. + + """ + self._revision_map + return self._real_heads + + @util.memoized_property + def _real_bases(self) -> Tuple[str, ...]: + """All "real" base revisions as strings. + + :return: a tuple of string revision numbers. + + """ + self._revision_map + return self._real_bases + + @util.memoized_property + def _revision_map(self) -> _RevisionMapType: + """memoized attribute, initializes the revision map from the + initial collection. + + """ + # Ordering required for some tests to pass (but not required in + # general) + map_: _InterimRevisionMapType = sqlautil.OrderedDict() + + heads: Set[Revision] = sqlautil.OrderedSet() + _real_heads: Set[Revision] = sqlautil.OrderedSet() + bases: Tuple[Revision, ...] = () + _real_bases: Tuple[Revision, ...] = () + + has_branch_labels = set() + all_revisions = set() + + for revision in self._generator(): + all_revisions.add(revision) + + if revision.revision in map_: + util.warn( + "Revision %s is present more than once" % revision.revision + ) + map_[revision.revision] = revision + if revision.branch_labels: + has_branch_labels.add(revision) + + heads.add(revision) + _real_heads.add(revision) + if revision.is_base: + bases += (revision,) + if revision._is_real_base: + _real_bases += (revision,) + + # add the branch_labels to the map_. We'll need these + # to resolve the dependencies. + rev_map = map_.copy() + self._map_branch_labels( + has_branch_labels, cast(_RevisionMapType, map_) + ) + + # resolve dependency names from branch labels and symbolic + # names + self._add_depends_on(all_revisions, cast(_RevisionMapType, map_)) + + for rev in map_.values(): + for downrev in rev._all_down_revisions: + if downrev not in map_: + util.warn( + "Revision %s referenced from %s is not present" + % (downrev, rev) + ) + down_revision = map_[downrev] + down_revision.add_nextrev(rev) + if downrev in rev._versioned_down_revisions: + heads.discard(down_revision) + _real_heads.discard(down_revision) + + # once the map has downrevisions populated, the dependencies + # can be further refined to include only those which are not + # already ancestors + self._normalize_depends_on(all_revisions, cast(_RevisionMapType, map_)) + self._detect_cycles(rev_map, heads, bases, _real_heads, _real_bases) + + revision_map: _RevisionMapType = dict(map_.items()) + revision_map[None] = revision_map[()] = None + self.heads = tuple(rev.revision for rev in heads) + self._real_heads = tuple(rev.revision for rev in _real_heads) + self.bases = tuple(rev.revision for rev in bases) + self._real_bases = tuple(rev.revision for rev in _real_bases) + + self._add_branches(has_branch_labels, revision_map) + return revision_map + + def _detect_cycles( + self, + rev_map: _InterimRevisionMapType, + heads: Set[Revision], + bases: Tuple[Revision, ...], + _real_heads: Set[Revision], + _real_bases: Tuple[Revision, ...], + ) -> None: + if not rev_map: + return + if not heads or not bases: + raise CycleDetected(list(rev_map)) + total_space = { + rev.revision + for rev in self._iterate_related_revisions( + lambda r: r._versioned_down_revisions, + heads, + map_=cast(_RevisionMapType, rev_map), + ) + }.intersection( + rev.revision + for rev in self._iterate_related_revisions( + lambda r: r.nextrev, + bases, + map_=cast(_RevisionMapType, rev_map), + ) + ) + deleted_revs = set(rev_map.keys()) - total_space + if deleted_revs: + raise CycleDetected(sorted(deleted_revs)) + + if not _real_heads or not _real_bases: + raise DependencyCycleDetected(list(rev_map)) + total_space = { + rev.revision + for rev in self._iterate_related_revisions( + lambda r: r._all_down_revisions, + _real_heads, + map_=cast(_RevisionMapType, rev_map), + ) + }.intersection( + rev.revision + for rev in self._iterate_related_revisions( + lambda r: r._all_nextrev, + _real_bases, + map_=cast(_RevisionMapType, rev_map), + ) + ) + deleted_revs = set(rev_map.keys()) - total_space + if deleted_revs: + raise DependencyCycleDetected(sorted(deleted_revs)) + + def _map_branch_labels( + self, revisions: Collection[Revision], map_: _RevisionMapType + ) -> None: + for revision in revisions: + if revision.branch_labels: + assert revision._orig_branch_labels is not None + for branch_label in revision._orig_branch_labels: + if branch_label in map_: + map_rev = map_[branch_label] + assert map_rev is not None + raise RevisionError( + "Branch name '%s' in revision %s already " + "used by revision %s" + % ( + branch_label, + revision.revision, + map_rev.revision, + ) + ) + map_[branch_label] = revision + + def _add_branches( + self, revisions: Collection[Revision], map_: _RevisionMapType + ) -> None: + for revision in revisions: + if revision.branch_labels: + revision.branch_labels.update(revision.branch_labels) + for node in self._get_descendant_nodes( + [revision], map_, include_dependencies=False + ): + node.branch_labels.update(revision.branch_labels) + + parent = node + while ( + parent + and not parent._is_real_branch_point + and not parent.is_merge_point + ): + parent.branch_labels.update(revision.branch_labels) + if parent.down_revision: + parent = map_[parent.down_revision] + else: + break + + def _add_depends_on( + self, revisions: Collection[Revision], map_: _RevisionMapType + ) -> None: + """Resolve the 'dependencies' for each revision in a collection + in terms of actual revision ids, as opposed to branch labels or other + symbolic names. + + The collection is then assigned to the _resolved_dependencies + attribute on each revision object. + + """ + + for revision in revisions: + if revision.dependencies: + deps = [ + map_[dep] for dep in util.to_tuple(revision.dependencies) + ] + revision._resolved_dependencies = tuple( + [d.revision for d in deps if d is not None] + ) + else: + revision._resolved_dependencies = () + + def _normalize_depends_on( + self, revisions: Collection[Revision], map_: _RevisionMapType + ) -> None: + """Create a collection of "dependencies" that omits dependencies + that are already ancestor nodes for each revision in a given + collection. + + This builds upon the _resolved_dependencies collection created in the + _add_depends_on() method, looking in the fully populated revision map + for ancestors, and omitting them as the _resolved_dependencies + collection as it is copied to a new collection. The new collection is + then assigned to the _normalized_resolved_dependencies attribute on + each revision object. + + The collection is then used to determine the immediate "down revision" + identifiers for this revision. + + """ + + for revision in revisions: + if revision._resolved_dependencies: + normalized_resolved = set(revision._resolved_dependencies) + for rev in self._get_ancestor_nodes( + [revision], + include_dependencies=False, + map_=map_, + ): + if rev is revision: + continue + elif rev._resolved_dependencies: + normalized_resolved.difference_update( + rev._resolved_dependencies + ) + + revision._normalized_resolved_dependencies = tuple( + normalized_resolved + ) + else: + revision._normalized_resolved_dependencies = () + + def add_revision(self, revision: Revision, _replace: bool = False) -> None: + """add a single revision to an existing map. + + This method is for single-revision use cases, it's not + appropriate for fully populating an entire revision map. + + """ + map_ = self._revision_map + if not _replace and revision.revision in map_: + util.warn( + "Revision %s is present more than once" % revision.revision + ) + elif _replace and revision.revision not in map_: + raise Exception("revision %s not in map" % revision.revision) + + map_[revision.revision] = revision + + revisions = [revision] + self._add_branches(revisions, map_) + self._map_branch_labels(revisions, map_) + self._add_depends_on(revisions, map_) + + if revision.is_base: + self.bases += (revision.revision,) + if revision._is_real_base: + self._real_bases += (revision.revision,) + + for downrev in revision._all_down_revisions: + if downrev not in map_: + util.warn( + "Revision %s referenced from %s is not present" + % (downrev, revision) + ) + not_none(map_[downrev]).add_nextrev(revision) + + self._normalize_depends_on(revisions, map_) + + if revision._is_real_head: + self._real_heads = tuple( + head + for head in self._real_heads + if head + not in set(revision._all_down_revisions).union( + [revision.revision] + ) + ) + (revision.revision,) + if revision.is_head: + self.heads = tuple( + head + for head in self.heads + if head + not in set(revision._versioned_down_revisions).union( + [revision.revision] + ) + ) + (revision.revision,) + + def get_current_head( + self, branch_label: Optional[str] = None + ) -> Optional[str]: + """Return the current head revision. + + If the script directory has multiple heads + due to branching, an error is raised; + :meth:`.ScriptDirectory.get_heads` should be + preferred. + + :param branch_label: optional branch name which will limit the + heads considered to those which include that branch_label. + + :return: a string revision number. + + .. seealso:: + + :meth:`.ScriptDirectory.get_heads` + + """ + current_heads: Sequence[str] = self.heads + if branch_label: + current_heads = self.filter_for_lineage( + current_heads, branch_label + ) + if len(current_heads) > 1: + raise MultipleHeads( + current_heads, + "%s@head" % branch_label if branch_label else "head", + ) + + if current_heads: + return current_heads[0] + else: + return None + + def _get_base_revisions(self, identifier: str) -> Tuple[str, ...]: + return self.filter_for_lineage(self.bases, identifier) + + def get_revisions( + self, id_: Optional[_GetRevArg] + ) -> Tuple[Optional[_RevisionOrBase], ...]: + """Return the :class:`.Revision` instances with the given rev id + or identifiers. + + May be given a single identifier, a sequence of identifiers, or the + special symbols "head" or "base". The result is a tuple of one + or more identifiers, or an empty tuple in the case of "base". + + In the cases where 'head', 'heads' is requested and the + revision map is empty, returns an empty tuple. + + Supports partial identifiers, where the given identifier + is matched against all identifiers that start with the given + characters; if there is exactly one match, that determines the + full revision. + + """ + + if isinstance(id_, (list, tuple, set, frozenset)): + return sum([self.get_revisions(id_elem) for id_elem in id_], ()) + else: + resolved_id, branch_label = self._resolve_revision_number(id_) + if len(resolved_id) == 1: + try: + rint = int(resolved_id[0]) + if rint < 0: + # branch@-n -> walk down from heads + select_heads = self.get_revisions("heads") + if branch_label is not None: + select_heads = tuple( + head + for head in select_heads + if branch_label + in is_revision(head).branch_labels + ) + return tuple( + self._walk(head, steps=rint) + for head in select_heads + ) + except ValueError: + # couldn't resolve as integer + pass + return tuple( + self._revision_for_ident(rev_id, branch_label) + for rev_id in resolved_id + ) + + def get_revision(self, id_: Optional[str]) -> Optional[Revision]: + """Return the :class:`.Revision` instance with the given rev id. + + If a symbolic name such as "head" or "base" is given, resolves + the identifier into the current head or base revision. If the symbolic + name refers to multiples, :class:`.MultipleHeads` is raised. + + Supports partial identifiers, where the given identifier + is matched against all identifiers that start with the given + characters; if there is exactly one match, that determines the + full revision. + + """ + + resolved_id, branch_label = self._resolve_revision_number(id_) + if len(resolved_id) > 1: + raise MultipleHeads(resolved_id, id_) + + resolved: Union[str, Tuple[()]] = resolved_id[0] if resolved_id else () + return self._revision_for_ident(resolved, branch_label) + + def _resolve_branch(self, branch_label: str) -> Optional[Revision]: + try: + branch_rev = self._revision_map[branch_label] + except KeyError: + try: + nonbranch_rev = self._revision_for_ident(branch_label) + except ResolutionError as re: + raise ResolutionError( + "No such branch: '%s'" % branch_label, branch_label + ) from re + + else: + return nonbranch_rev + else: + return branch_rev + + def _revision_for_ident( + self, + resolved_id: Union[str, Tuple[()], None], + check_branch: Optional[str] = None, + ) -> Optional[Revision]: + branch_rev: Optional[Revision] + if check_branch: + branch_rev = self._resolve_branch(check_branch) + else: + branch_rev = None + + revision: Union[Optional[Revision], Literal[False]] + try: + revision = self._revision_map[resolved_id] + except KeyError: + # break out to avoid misleading py3k stack traces + revision = False + revs: Sequence[str] + if revision is False: + assert resolved_id + # do a partial lookup + revs = [ + x + for x in self._revision_map + if x and len(x) > 3 and x.startswith(resolved_id) + ] + + if branch_rev: + revs = self.filter_for_lineage(revs, check_branch) + if not revs: + raise ResolutionError( + "No such revision or branch '%s'%s" + % ( + resolved_id, + ( + "; please ensure at least four characters are " + "present for partial revision identifier matches" + if len(resolved_id) < 4 + else "" + ), + ), + resolved_id, + ) + elif len(revs) > 1: + raise ResolutionError( + "Multiple revisions start " + "with '%s': %s..." + % (resolved_id, ", ".join("'%s'" % r for r in revs[0:3])), + resolved_id, + ) + else: + revision = self._revision_map[revs[0]] + + if check_branch and revision is not None: + assert branch_rev is not None + assert resolved_id + if not self._shares_lineage( + revision.revision, branch_rev.revision + ): + raise ResolutionError( + "Revision %s is not a member of branch '%s'" + % (revision.revision, check_branch), + resolved_id, + ) + return revision + + def _filter_into_branch_heads( + self, targets: Iterable[Optional[_RevisionOrBase]] + ) -> Set[Optional[_RevisionOrBase]]: + targets = set(targets) + + for rev in list(targets): + assert rev + if targets.intersection( + self._get_descendant_nodes([rev], include_dependencies=False) + ).difference([rev]): + targets.discard(rev) + return targets + + def filter_for_lineage( + self, + targets: Iterable[_TR], + check_against: Optional[str], + include_dependencies: bool = False, + ) -> Tuple[_TR, ...]: + id_, branch_label = self._resolve_revision_number(check_against) + + shares = [] + if branch_label: + shares.append(branch_label) + if id_: + shares.extend(id_) + + return tuple( + tg + for tg in targets + if self._shares_lineage( + tg, shares, include_dependencies=include_dependencies + ) + ) + + def _shares_lineage( + self, + target: Optional[_RevisionOrStr], + test_against_revs: Sequence[_RevisionOrStr], + include_dependencies: bool = False, + ) -> bool: + if not test_against_revs: + return True + if not isinstance(target, Revision): + resolved_target = not_none(self._revision_for_ident(target)) + else: + resolved_target = target + + resolved_test_against_revs = [ + self._revision_for_ident(test_against_rev) + if not isinstance(test_against_rev, Revision) + else test_against_rev + for test_against_rev in util.to_tuple( + test_against_revs, default=() + ) + ] + + return bool( + set( + self._get_descendant_nodes( + [resolved_target], + include_dependencies=include_dependencies, + ) + ) + .union( + self._get_ancestor_nodes( + [resolved_target], + include_dependencies=include_dependencies, + ) + ) + .intersection(resolved_test_against_revs) + ) + + def _resolve_revision_number( + self, id_: Optional[_GetRevArg] + ) -> Tuple[Tuple[str, ...], Optional[str]]: + branch_label: Optional[str] + if isinstance(id_, str) and "@" in id_: + branch_label, id_ = id_.split("@", 1) + + elif id_ is not None and ( + (isinstance(id_, tuple) and id_ and not isinstance(id_[0], str)) + or not isinstance(id_, (str, tuple)) + ): + raise RevisionError( + "revision identifier %r is not a string; ensure database " + "driver settings are correct" % (id_,) + ) + + else: + branch_label = None + + # ensure map is loaded + self._revision_map + if id_ == "heads": + if branch_label: + return ( + self.filter_for_lineage(self.heads, branch_label), + branch_label, + ) + else: + return self._real_heads, branch_label + elif id_ == "head": + current_head = self.get_current_head(branch_label) + if current_head: + return (current_head,), branch_label + else: + return (), branch_label + elif id_ == "base" or id_ is None: + return (), branch_label + else: + return util.to_tuple(id_, default=None), branch_label + + def iterate_revisions( + self, + upper: _RevisionIdentifierType, + lower: _RevisionIdentifierType, + implicit_base: bool = False, + inclusive: bool = False, + assert_relative_length: bool = True, + select_for_downgrade: bool = False, + ) -> Iterator[Revision]: + """Iterate through script revisions, starting at the given + upper revision identifier and ending at the lower. + + The traversal uses strictly the `down_revision` + marker inside each migration script, so + it is a requirement that upper >= lower, + else you'll get nothing back. + + The iterator yields :class:`.Revision` objects. + + """ + fn: _CollectRevisionsProtocol + if select_for_downgrade: + fn = self._collect_downgrade_revisions + else: + fn = self._collect_upgrade_revisions + + revisions, heads = fn( + upper, + lower, + inclusive=inclusive, + implicit_base=implicit_base, + assert_relative_length=assert_relative_length, + ) + + for node in self._topological_sort(revisions, heads): + yield not_none(self.get_revision(node)) + + def _get_descendant_nodes( + self, + targets: Collection[Optional[_RevisionOrBase]], + map_: Optional[_RevisionMapType] = None, + check: bool = False, + omit_immediate_dependencies: bool = False, + include_dependencies: bool = True, + ) -> Iterator[Any]: + if omit_immediate_dependencies: + + def fn(rev: Revision) -> Iterable[str]: + if rev not in targets: + return rev._all_nextrev + else: + return rev.nextrev + + elif include_dependencies: + + def fn(rev: Revision) -> Iterable[str]: + return rev._all_nextrev + + else: + + def fn(rev: Revision) -> Iterable[str]: + return rev.nextrev + + return self._iterate_related_revisions( + fn, targets, map_=map_, check=check + ) + + def _get_ancestor_nodes( + self, + targets: Collection[Optional[_RevisionOrBase]], + map_: Optional[_RevisionMapType] = None, + check: bool = False, + include_dependencies: bool = True, + ) -> Iterator[Revision]: + if include_dependencies: + + def fn(rev: Revision) -> Iterable[str]: + return rev._normalized_down_revisions + + else: + + def fn(rev: Revision) -> Iterable[str]: + return rev._versioned_down_revisions + + return self._iterate_related_revisions( + fn, targets, map_=map_, check=check + ) + + def _iterate_related_revisions( + self, + fn: Callable[[Revision], Iterable[str]], + targets: Collection[Optional[_RevisionOrBase]], + map_: Optional[_RevisionMapType], + check: bool = False, + ) -> Iterator[Revision]: + if map_ is None: + map_ = self._revision_map + + seen = set() + todo: Deque[Revision] = collections.deque() + for target_for in targets: + target = is_revision(target_for) + todo.append(target) + if check: + per_target = set() + + while todo: + rev = todo.pop() + if check: + per_target.add(rev) + + if rev in seen: + continue + seen.add(rev) + # Check for map errors before collecting. + for rev_id in fn(rev): + next_rev = map_[rev_id] + assert next_rev is not None + if next_rev.revision != rev_id: + raise RevisionError( + "Dependency resolution failed; broken map" + ) + todo.append(next_rev) + yield rev + if check: + overlaps = per_target.intersection(targets).difference( + [target] + ) + if overlaps: + raise RevisionError( + "Requested revision %s overlaps with " + "other requested revisions %s" + % ( + target.revision, + ", ".join(r.revision for r in overlaps), + ) + ) + + def _topological_sort( + self, + revisions: Collection[Revision], + heads: Any, + ) -> List[str]: + """Yield revision ids of a collection of Revision objects in + topological sorted order (i.e. revisions always come after their + down_revisions and dependencies). Uses the order of keys in + _revision_map to sort. + + """ + + id_to_rev = self._revision_map + + def get_ancestors(rev_id: str) -> Set[str]: + return { + r.revision + for r in self._get_ancestor_nodes([id_to_rev[rev_id]]) + } + + todo = {d.revision for d in revisions} + + # Use revision map (ordered dict) key order to pre-sort. + inserted_order = list(self._revision_map) + + current_heads = list( + sorted( + {d.revision for d in heads if d.revision in todo}, + key=inserted_order.index, + ) + ) + ancestors_by_idx = [get_ancestors(rev_id) for rev_id in current_heads] + + output = [] + + current_candidate_idx = 0 + while current_heads: + candidate = current_heads[current_candidate_idx] + + for check_head_index, ancestors in enumerate(ancestors_by_idx): + # scan all the heads. see if we can continue walking + # down the current branch indicated by current_candidate_idx. + if ( + check_head_index != current_candidate_idx + and candidate in ancestors + ): + current_candidate_idx = check_head_index + # nope, another head is dependent on us, they have + # to be traversed first + break + else: + # yup, we can emit + if candidate in todo: + output.append(candidate) + todo.remove(candidate) + + # now update the heads with our ancestors. + + candidate_rev = id_to_rev[candidate] + assert candidate_rev is not None + + heads_to_add = [ + r + for r in candidate_rev._normalized_down_revisions + if r in todo and r not in current_heads + ] + + if not heads_to_add: + # no ancestors, so remove this head from the list + del current_heads[current_candidate_idx] + del ancestors_by_idx[current_candidate_idx] + current_candidate_idx = max(current_candidate_idx - 1, 0) + else: + if ( + not candidate_rev._normalized_resolved_dependencies + and len(candidate_rev._versioned_down_revisions) == 1 + ): + current_heads[current_candidate_idx] = heads_to_add[0] + + # for plain movement down a revision line without + # any mergepoints, branchpoints, or deps, we + # can update the ancestors collection directly + # by popping out the candidate we just emitted + ancestors_by_idx[current_candidate_idx].discard( + candidate + ) + + else: + # otherwise recalculate it again, things get + # complicated otherwise. This can possibly be + # improved to not run the whole ancestor thing + # each time but it was getting complicated + current_heads[current_candidate_idx] = heads_to_add[0] + current_heads.extend(heads_to_add[1:]) + ancestors_by_idx[ + current_candidate_idx + ] = get_ancestors(heads_to_add[0]) + ancestors_by_idx.extend( + get_ancestors(head) for head in heads_to_add[1:] + ) + + assert not todo + return output + + def _walk( + self, + start: Optional[Union[str, Revision]], + steps: int, + branch_label: Optional[str] = None, + no_overwalk: bool = True, + ) -> Optional[_RevisionOrBase]: + """ + Walk the requested number of :steps up (steps > 0) or down (steps < 0) + the revision tree. + + :branch_label is used to select branches only when walking up. + + If the walk goes past the boundaries of the tree and :no_overwalk is + True, None is returned, otherwise the walk terminates early. + + A RevisionError is raised if there is no unambiguous revision to + walk to. + """ + initial: Optional[_RevisionOrBase] + if isinstance(start, str): + initial = self.get_revision(start) + else: + initial = start + + children: Sequence[Optional[_RevisionOrBase]] + for _ in range(abs(steps)): + if steps > 0: + assert initial != "base" # type: ignore[comparison-overlap] + # Walk up + walk_up = [ + is_revision(rev) + for rev in self.get_revisions( + self.bases if initial is None else initial.nextrev + ) + ] + if branch_label: + children = self.filter_for_lineage(walk_up, branch_label) + else: + children = walk_up + else: + # Walk down + if initial == "base": # type: ignore[comparison-overlap] + children = () + else: + children = self.get_revisions( + self.heads + if initial is None + else initial.down_revision + ) + if not children: + children = ("base",) + if not children: + # This will return an invalid result if no_overwalk, otherwise + # further steps will stay where we are. + ret = None if no_overwalk else initial + return ret + elif len(children) > 1: + raise RevisionError("Ambiguous walk") + initial = children[0] + + return initial + + def _parse_downgrade_target( + self, + current_revisions: _RevisionIdentifierType, + target: _RevisionIdentifierType, + assert_relative_length: bool, + ) -> Tuple[Optional[str], Optional[_RevisionOrBase]]: + """ + Parse downgrade command syntax :target to retrieve the target revision + and branch label (if any) given the :current_revisions stamp of the + database. + + Returns a tuple (branch_label, target_revision) where branch_label + is a string from the command specifying the branch to consider (or + None if no branch given), and target_revision is a Revision object + which the command refers to. target_revisions is None if the command + refers to 'base'. The target may be specified in absolute form, or + relative to :current_revisions. + """ + if target is None: + return None, None + assert isinstance( + target, str + ), "Expected downgrade target in string form" + match = _relative_destination.match(target) + if match: + branch_label, symbol, relative = match.groups() + rel_int = int(relative) + if rel_int >= 0: + if symbol is None: + # Downgrading to current + n is not valid. + raise RevisionError( + "Relative revision %s didn't " + "produce %d migrations" % (relative, abs(rel_int)) + ) + # Find target revision relative to given symbol. + rev = self._walk( + symbol, + rel_int, + branch_label, + no_overwalk=assert_relative_length, + ) + if rev is None: + raise RevisionError("Walked too far") + return branch_label, rev + else: + relative_revision = symbol is None + if relative_revision: + # Find target revision relative to current state. + if branch_label: + cr_tuple = util.to_tuple(current_revisions) + symbol_list: Sequence[str] + symbol_list = self.filter_for_lineage( + cr_tuple, branch_label + ) + if not symbol_list: + # check the case where there are multiple branches + # but there is currently a single heads, since all + # other branch heads are dependent of the current + # single heads. + all_current = cast( + Set[Revision], self._get_all_current(cr_tuple) + ) + sl_all_current = self.filter_for_lineage( + all_current, branch_label + ) + symbol_list = [ + r.revision if r else r # type: ignore[misc] + for r in sl_all_current + ] + + assert len(symbol_list) == 1 + symbol = symbol_list[0] + else: + current_revisions = util.to_tuple(current_revisions) + if not current_revisions: + raise RevisionError( + "Relative revision %s didn't " + "produce %d migrations" + % (relative, abs(rel_int)) + ) + # Have to check uniques here for duplicate rows test. + if len(set(current_revisions)) > 1: + util.warn( + "downgrade -1 from multiple heads is " + "ambiguous; " + "this usage will be disallowed in a future " + "release." + ) + symbol = current_revisions[0] + # Restrict iteration to just the selected branch when + # ambiguous branches are involved. + branch_label = symbol + # Walk down the tree to find downgrade target. + rev = self._walk( + start=self.get_revision(symbol) + if branch_label is None + else self.get_revision("%s@%s" % (branch_label, symbol)), + steps=rel_int, + no_overwalk=assert_relative_length, + ) + if rev is None: + if relative_revision: + raise RevisionError( + "Relative revision %s didn't " + "produce %d migrations" % (relative, abs(rel_int)) + ) + else: + raise RevisionError("Walked too far") + return branch_label, rev + + # No relative destination given, revision specified is absolute. + branch_label, _, symbol = target.rpartition("@") + if not branch_label: + branch_label = None + return branch_label, self.get_revision(symbol) + + def _parse_upgrade_target( + self, + current_revisions: _RevisionIdentifierType, + target: _RevisionIdentifierType, + assert_relative_length: bool, + ) -> Tuple[Optional[_RevisionOrBase], ...]: + """ + Parse upgrade command syntax :target to retrieve the target revision + and given the :current_revisions stamp of the database. + + Returns a tuple of Revision objects which should be iterated/upgraded + to. The target may be specified in absolute form, or relative to + :current_revisions. + """ + if isinstance(target, str): + match = _relative_destination.match(target) + else: + match = None + + if not match: + # No relative destination, target is absolute. + return self.get_revisions(target) + + current_revisions_tup: Union[str, Tuple[Optional[str], ...], None] + current_revisions_tup = util.to_tuple(current_revisions) + + branch_label, symbol, relative_str = match.groups() + relative = int(relative_str) + if relative > 0: + if symbol is None: + if not current_revisions_tup: + current_revisions_tup = (None,) + # Try to filter to a single target (avoid ambiguous branches). + start_revs = current_revisions_tup + if branch_label: + start_revs = self.filter_for_lineage( + self.get_revisions(current_revisions_tup), # type: ignore[arg-type] # noqa: E501 + branch_label, + ) + if not start_revs: + # The requested branch is not a head, so we need to + # backtrack to find a branchpoint. + active_on_branch = self.filter_for_lineage( + self._get_ancestor_nodes( + self.get_revisions(current_revisions_tup) + ), + branch_label, + ) + # Find the tips of this set of revisions (revisions + # without children within the set). + start_revs = tuple( + {rev.revision for rev in active_on_branch} + - { + down + for rev in active_on_branch + for down in rev._normalized_down_revisions + } + ) + if not start_revs: + # We must need to go right back to base to find + # a starting point for this branch. + start_revs = (None,) + if len(start_revs) > 1: + raise RevisionError( + "Ambiguous upgrade from multiple current revisions" + ) + # Walk up from unique target revision. + rev = self._walk( + start=start_revs[0], + steps=relative, + branch_label=branch_label, + no_overwalk=assert_relative_length, + ) + if rev is None: + raise RevisionError( + "Relative revision %s didn't " + "produce %d migrations" % (relative_str, abs(relative)) + ) + return (rev,) + else: + # Walk is relative to a given revision, not the current state. + return ( + self._walk( + start=self.get_revision(symbol), + steps=relative, + branch_label=branch_label, + no_overwalk=assert_relative_length, + ), + ) + else: + if symbol is None: + # Upgrading to current - n is not valid. + raise RevisionError( + "Relative revision %s didn't " + "produce %d migrations" % (relative, abs(relative)) + ) + return ( + self._walk( + start=self.get_revision(symbol) + if branch_label is None + else self.get_revision("%s@%s" % (branch_label, symbol)), + steps=relative, + no_overwalk=assert_relative_length, + ), + ) + + def _collect_downgrade_revisions( + self, + upper: _RevisionIdentifierType, + lower: _RevisionIdentifierType, + inclusive: bool, + implicit_base: bool, + assert_relative_length: bool, + ) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase], ...]]: + """ + Compute the set of current revisions specified by :upper, and the + downgrade target specified by :target. Return all dependents of target + which are currently active. + + :inclusive=True includes the target revision in the set + """ + + branch_label, target_revision = self._parse_downgrade_target( + current_revisions=upper, + target=lower, + assert_relative_length=assert_relative_length, + ) + if target_revision == "base": + target_revision = None + assert target_revision is None or isinstance(target_revision, Revision) + + roots: List[Revision] + # Find candidates to drop. + if target_revision is None: + # Downgrading back to base: find all tree roots. + roots = [ + rev + for rev in self._revision_map.values() + if rev is not None and rev.down_revision is None + ] + elif inclusive: + # inclusive implies target revision should also be dropped + roots = [target_revision] + else: + # Downgrading to fixed target: find all direct children. + roots = [ + is_revision(rev) + for rev in self.get_revisions(target_revision.nextrev) + ] + + if branch_label and len(roots) > 1: + # Need to filter roots. + ancestors = { + rev.revision + for rev in self._get_ancestor_nodes( + [self._resolve_branch(branch_label)], + include_dependencies=False, + ) + } + # Intersection gives the root revisions we are trying to + # rollback with the downgrade. + roots = [ + is_revision(rev) + for rev in self.get_revisions( + {rev.revision for rev in roots}.intersection(ancestors) + ) + ] + + # Ensure we didn't throw everything away when filtering branches. + if len(roots) == 0: + raise RevisionError( + "Not a valid downgrade target from current heads" + ) + + heads = self.get_revisions(upper) + + # Aim is to drop :branch_revision; to do so we also need to drop its + # descendents and anything dependent on it. + downgrade_revisions = set( + self._get_descendant_nodes( + roots, + include_dependencies=True, + omit_immediate_dependencies=False, + ) + ) + active_revisions = set( + self._get_ancestor_nodes(heads, include_dependencies=True) + ) + + # Emit revisions to drop in reverse topological sorted order. + downgrade_revisions.intersection_update(active_revisions) + + if implicit_base: + # Wind other branches back to base. + downgrade_revisions.update( + active_revisions.difference(self._get_ancestor_nodes(roots)) + ) + + if ( + target_revision is not None + and not downgrade_revisions + and target_revision not in heads + ): + # Empty intersection: target revs are not present. + + raise RangeNotAncestorError("Nothing to drop", upper) + + return downgrade_revisions, heads + + def _collect_upgrade_revisions( + self, + upper: _RevisionIdentifierType, + lower: _RevisionIdentifierType, + inclusive: bool, + implicit_base: bool, + assert_relative_length: bool, + ) -> Tuple[Set[Revision], Tuple[Revision, ...]]: + """ + Compute the set of required revisions specified by :upper, and the + current set of active revisions specified by :lower. Find the + difference between the two to compute the required upgrades. + + :inclusive=True includes the current/lower revisions in the set + + :implicit_base=False only returns revisions which are downstream + of the current/lower revisions. Dependencies from branches with + different bases will not be included. + """ + targets: Collection[Revision] = [ + is_revision(rev) + for rev in self._parse_upgrade_target( + current_revisions=lower, + target=upper, + assert_relative_length=assert_relative_length, + ) + ] + + # assert type(targets) is tuple, "targets should be a tuple" + + # Handled named bases (e.g. branch@... -> heads should only produce + # targets on the given branch) + if isinstance(lower, str) and "@" in lower: + branch, _, _ = lower.partition("@") + branch_rev = self.get_revision(branch) + if branch_rev is not None and branch_rev.revision == branch: + # A revision was used as a label; get its branch instead + assert len(branch_rev.branch_labels) == 1 + branch = next(iter(branch_rev.branch_labels)) + targets = { + need for need in targets if branch in need.branch_labels + } + + required_node_set = set( + self._get_ancestor_nodes( + targets, check=True, include_dependencies=True + ) + ).union(targets) + + current_revisions = self.get_revisions(lower) + if not implicit_base and any( + rev not in required_node_set + for rev in current_revisions + if rev is not None + ): + raise RangeNotAncestorError(lower, upper) + assert ( + type(current_revisions) is tuple + ), "current_revisions should be a tuple" + + # Special case where lower = a relative value (get_revisions can't + # find it) + if current_revisions and current_revisions[0] is None: + _, rev = self._parse_downgrade_target( + current_revisions=upper, + target=lower, + assert_relative_length=assert_relative_length, + ) + assert rev + if rev == "base": + current_revisions = tuple() + lower = None + else: + current_revisions = (rev,) + lower = rev.revision + + current_node_set = set( + self._get_ancestor_nodes( + current_revisions, check=True, include_dependencies=True + ) + ).union(current_revisions) + + needs = required_node_set.difference(current_node_set) + + # Include the lower revision (=current_revisions?) in the iteration + if inclusive: + needs.update(is_revision(rev) for rev in self.get_revisions(lower)) + # By default, base is implicit as we want all dependencies returned. + # Base is also implicit if lower = base + # implicit_base=False -> only return direct downstreams of + # current_revisions + if current_revisions and not implicit_base: + lower_descendents = self._get_descendant_nodes( + [is_revision(rev) for rev in current_revisions], + check=True, + include_dependencies=False, + ) + needs.intersection_update(lower_descendents) + + return needs, tuple(targets) + + def _get_all_current( + self, id_: Tuple[str, ...] + ) -> Set[Optional[_RevisionOrBase]]: + top_revs: Set[Optional[_RevisionOrBase]] + top_revs = set(self.get_revisions(id_)) + top_revs.update( + self._get_ancestor_nodes(list(top_revs), include_dependencies=True) + ) + return self._filter_into_branch_heads(top_revs) + + +class Revision: + """Base class for revisioned objects. + + The :class:`.Revision` class is the base of the more public-facing + :class:`.Script` object, which represents a migration script. + The mechanics of revision management and traversal are encapsulated + within :class:`.Revision`, while :class:`.Script` applies this logic + to Python files in a version directory. + + """ + + nextrev: FrozenSet[str] = frozenset() + """following revisions, based on down_revision only.""" + + _all_nextrev: FrozenSet[str] = frozenset() + + revision: str = None # type: ignore[assignment] + """The string revision number.""" + + down_revision: Optional[_RevIdType] = None + """The ``down_revision`` identifier(s) within the migration script. + + Note that the total set of "down" revisions is + down_revision + dependencies. + + """ + + dependencies: Optional[_RevIdType] = None + """Additional revisions which this revision is dependent on. + + From a migration standpoint, these dependencies are added to the + down_revision to form the full iteration. However, the separation + of down_revision from "dependencies" is to assist in navigating + a history that contains many branches, typically a multi-root scenario. + + """ + + branch_labels: Set[str] = None # type: ignore[assignment] + """Optional string/tuple of symbolic names to apply to this + revision's branch""" + + _resolved_dependencies: Tuple[str, ...] + _normalized_resolved_dependencies: Tuple[str, ...] + + @classmethod + def verify_rev_id(cls, revision: str) -> None: + illegal_chars = set(revision).intersection(_revision_illegal_chars) + if illegal_chars: + raise RevisionError( + "Character(s) '%s' not allowed in revision identifier '%s'" + % (", ".join(sorted(illegal_chars)), revision) + ) + + def __init__( + self, + revision: str, + down_revision: Optional[Union[str, Tuple[str, ...]]], + dependencies: Optional[Union[str, Tuple[str, ...]]] = None, + branch_labels: Optional[Union[str, Tuple[str, ...]]] = None, + ) -> None: + if down_revision and revision in util.to_tuple(down_revision): + raise LoopDetected(revision) + elif dependencies is not None and revision in util.to_tuple( + dependencies + ): + raise DependencyLoopDetected(revision) + + self.verify_rev_id(revision) + self.revision = revision + self.down_revision = tuple_rev_as_scalar(util.to_tuple(down_revision)) + self.dependencies = tuple_rev_as_scalar(util.to_tuple(dependencies)) + self._orig_branch_labels = util.to_tuple(branch_labels, default=()) + self.branch_labels = set(self._orig_branch_labels) + + def __repr__(self) -> str: + args = [repr(self.revision), repr(self.down_revision)] + if self.dependencies: + args.append("dependencies=%r" % (self.dependencies,)) + if self.branch_labels: + args.append("branch_labels=%r" % (self.branch_labels,)) + return "%s(%s)" % (self.__class__.__name__, ", ".join(args)) + + def add_nextrev(self, revision: Revision) -> None: + self._all_nextrev = self._all_nextrev.union([revision.revision]) + if self.revision in revision._versioned_down_revisions: + self.nextrev = self.nextrev.union([revision.revision]) + + @property + def _all_down_revisions(self) -> Tuple[str, ...]: + return util.dedupe_tuple( + util.to_tuple(self.down_revision, default=()) + + self._resolved_dependencies + ) + + @property + def _normalized_down_revisions(self) -> Tuple[str, ...]: + """return immediate down revisions for a rev, omitting dependencies + that are still dependencies of ancestors. + + """ + return util.dedupe_tuple( + util.to_tuple(self.down_revision, default=()) + + self._normalized_resolved_dependencies + ) + + @property + def _versioned_down_revisions(self) -> Tuple[str, ...]: + return util.to_tuple(self.down_revision, default=()) + + @property + def is_head(self) -> bool: + """Return True if this :class:`.Revision` is a 'head' revision. + + This is determined based on whether any other :class:`.Script` + within the :class:`.ScriptDirectory` refers to this + :class:`.Script`. Multiple heads can be present. + + """ + return not bool(self.nextrev) + + @property + def _is_real_head(self) -> bool: + return not bool(self._all_nextrev) + + @property + def is_base(self) -> bool: + """Return True if this :class:`.Revision` is a 'base' revision.""" + + return self.down_revision is None + + @property + def _is_real_base(self) -> bool: + """Return True if this :class:`.Revision` is a "real" base revision, + e.g. that it has no dependencies either.""" + + # we use self.dependencies here because this is called up + # in initialization where _real_dependencies isn't set up + # yet + return self.down_revision is None and self.dependencies is None + + @property + def is_branch_point(self) -> bool: + """Return True if this :class:`.Script` is a branch point. + + A branchpoint is defined as a :class:`.Script` which is referred + to by more than one succeeding :class:`.Script`, that is more + than one :class:`.Script` has a `down_revision` identifier pointing + here. + + """ + return len(self.nextrev) > 1 + + @property + def _is_real_branch_point(self) -> bool: + """Return True if this :class:`.Script` is a 'real' branch point, + taking into account dependencies as well. + + """ + return len(self._all_nextrev) > 1 + + @property + def is_merge_point(self) -> bool: + """Return True if this :class:`.Script` is a merge point.""" + + return len(self._versioned_down_revisions) > 1 + + +@overload +def tuple_rev_as_scalar(rev: None) -> None: + ... + + +@overload +def tuple_rev_as_scalar( + rev: Union[Tuple[_T, ...], List[_T]] +) -> Union[_T, Tuple[_T, ...], List[_T]]: + ... + + +def tuple_rev_as_scalar( + rev: Optional[Sequence[_T]], +) -> Union[_T, Sequence[_T], None]: + if not rev: + return None + elif len(rev) == 1: + return rev[0] + else: + return rev + + +def is_revision(rev: Any) -> Revision: + assert isinstance(rev, Revision) + return rev diff --git a/venv/lib/python3.12/site-packages/alembic/script/write_hooks.py b/venv/lib/python3.12/site-packages/alembic/script/write_hooks.py new file mode 100644 index 0000000..9977147 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/script/write_hooks.py @@ -0,0 +1,179 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import shlex +import subprocess +import sys +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Mapping +from typing import Optional +from typing import Union + +from .. import util +from ..util import compat + + +REVISION_SCRIPT_TOKEN = "REVISION_SCRIPT_FILENAME" + +_registry: dict = {} + + +def register(name: str) -> Callable: + """A function decorator that will register that function as a write hook. + + See the documentation linked below for an example. + + .. seealso:: + + :ref:`post_write_hooks_custom` + + + """ + + def decorate(fn): + _registry[name] = fn + return fn + + return decorate + + +def _invoke( + name: str, revision: str, options: Mapping[str, Union[str, int]] +) -> Any: + """Invokes the formatter registered for the given name. + + :param name: The name of a formatter in the registry + :param revision: A :class:`.MigrationRevision` instance + :param options: A dict containing kwargs passed to the + specified formatter. + :raises: :class:`alembic.util.CommandError` + """ + try: + hook = _registry[name] + except KeyError as ke: + raise util.CommandError( + f"No formatter with name '{name}' registered" + ) from ke + else: + return hook(revision, options) + + +def _run_hooks(path: str, hook_config: Mapping[str, str]) -> None: + """Invoke hooks for a generated revision.""" + + from .base import _split_on_space_comma + + names = _split_on_space_comma.split(hook_config.get("hooks", "")) + + for name in names: + if not name: + continue + opts = { + key[len(name) + 1 :]: hook_config[key] + for key in hook_config + if key.startswith(name + ".") + } + opts["_hook_name"] = name + try: + type_ = opts["type"] + except KeyError as ke: + raise util.CommandError( + f"Key {name}.type is required for post write hook {name!r}" + ) from ke + else: + with util.status( + f"Running post write hook {name!r}", newline=True + ): + _invoke(type_, path, opts) + + +def _parse_cmdline_options(cmdline_options_str: str, path: str) -> List[str]: + """Parse options from a string into a list. + + Also substitutes the revision script token with the actual filename of + the revision script. + + If the revision script token doesn't occur in the options string, it is + automatically prepended. + """ + if REVISION_SCRIPT_TOKEN not in cmdline_options_str: + cmdline_options_str = REVISION_SCRIPT_TOKEN + " " + cmdline_options_str + cmdline_options_list = shlex.split( + cmdline_options_str, posix=compat.is_posix + ) + cmdline_options_list = [ + option.replace(REVISION_SCRIPT_TOKEN, path) + for option in cmdline_options_list + ] + return cmdline_options_list + + +@register("console_scripts") +def console_scripts( + path: str, options: dict, ignore_output: bool = False +) -> None: + try: + entrypoint_name = options["entrypoint"] + except KeyError as ke: + raise util.CommandError( + f"Key {options['_hook_name']}.entrypoint is required for post " + f"write hook {options['_hook_name']!r}" + ) from ke + for entry in compat.importlib_metadata_get("console_scripts"): + if entry.name == entrypoint_name: + impl: Any = entry + break + else: + raise util.CommandError( + f"Could not find entrypoint console_scripts.{entrypoint_name}" + ) + cwd: Optional[str] = options.get("cwd", None) + cmdline_options_str = options.get("options", "") + cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) + + kw: Dict[str, Any] = {} + if ignore_output: + kw["stdout"] = kw["stderr"] = subprocess.DEVNULL + + subprocess.run( + [ + sys.executable, + "-c", + f"import {impl.module}; {impl.module}.{impl.attr}()", + ] + + cmdline_options_list, + cwd=cwd, + **kw, + ) + + +@register("exec") +def exec_(path: str, options: dict, ignore_output: bool = False) -> None: + try: + executable = options["executable"] + except KeyError as ke: + raise util.CommandError( + f"Key {options['_hook_name']}.executable is required for post " + f"write hook {options['_hook_name']!r}" + ) from ke + cwd: Optional[str] = options.get("cwd", None) + cmdline_options_str = options.get("options", "") + cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) + + kw: Dict[str, Any] = {} + if ignore_output: + kw["stdout"] = kw["stderr"] = subprocess.DEVNULL + + subprocess.run( + [ + executable, + *cmdline_options_list, + ], + cwd=cwd, + **kw, + ) diff --git a/venv/lib/python3.12/site-packages/alembic/templates/async/README b/venv/lib/python3.12/site-packages/alembic/templates/async/README new file mode 100644 index 0000000..e0d0858 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/async/README @@ -0,0 +1 @@ +Generic single-database configuration with an async dbapi. \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/alembic/templates/async/__pycache__/env.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/templates/async/__pycache__/env.cpython-312.pyc new file mode 100644 index 0000000..723d3c8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/templates/async/__pycache__/env.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/templates/async/alembic.ini.mako b/venv/lib/python3.12/site-packages/alembic/templates/async/alembic.ini.mako new file mode 100644 index 0000000..0e5f43f --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/async/alembic.ini.mako @@ -0,0 +1,114 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to ${script_location}/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/venv/lib/python3.12/site-packages/alembic/templates/async/env.py b/venv/lib/python3.12/site-packages/alembic/templates/async/env.py new file mode 100644 index 0000000..9f2d519 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/async/env.py @@ -0,0 +1,89 @@ +import asyncio +from logging.config import fileConfig + +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/venv/lib/python3.12/site-packages/alembic/templates/async/script.py.mako b/venv/lib/python3.12/site-packages/alembic/templates/async/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/async/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/venv/lib/python3.12/site-packages/alembic/templates/generic/README b/venv/lib/python3.12/site-packages/alembic/templates/generic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/generic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/alembic/templates/generic/__pycache__/env.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/templates/generic/__pycache__/env.cpython-312.pyc new file mode 100644 index 0000000..799085a Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/templates/generic/__pycache__/env.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/templates/generic/alembic.ini.mako b/venv/lib/python3.12/site-packages/alembic/templates/generic/alembic.ini.mako new file mode 100644 index 0000000..29245dd --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/generic/alembic.ini.mako @@ -0,0 +1,116 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to ${script_location}/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/venv/lib/python3.12/site-packages/alembic/templates/generic/env.py b/venv/lib/python3.12/site-packages/alembic/templates/generic/env.py new file mode 100644 index 0000000..36112a3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/generic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/venv/lib/python3.12/site-packages/alembic/templates/generic/script.py.mako b/venv/lib/python3.12/site-packages/alembic/templates/generic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/generic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/venv/lib/python3.12/site-packages/alembic/templates/multidb/README b/venv/lib/python3.12/site-packages/alembic/templates/multidb/README new file mode 100644 index 0000000..f046ec9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/multidb/README @@ -0,0 +1,12 @@ +Rudimentary multi-database configuration. + +Multi-DB isn't vastly different from generic. The primary difference is that it +will run the migrations N times (depending on how many databases you have +configured), providing one engine name and associated context for each run. + +That engine name will then allow the migration to restrict what runs within it to +just the appropriate migrations for that engine. You can see this behavior within +the mako template. + +In the provided configuration, you'll need to have `databases` provided in +alembic's config, and an `sqlalchemy.url` provided for each engine name. diff --git a/venv/lib/python3.12/site-packages/alembic/templates/multidb/__pycache__/env.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/templates/multidb/__pycache__/env.cpython-312.pyc new file mode 100644 index 0000000..b208184 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/templates/multidb/__pycache__/env.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/templates/multidb/alembic.ini.mako b/venv/lib/python3.12/site-packages/alembic/templates/multidb/alembic.ini.mako new file mode 100644 index 0000000..c7fbe48 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/multidb/alembic.ini.mako @@ -0,0 +1,121 @@ +# a multi-database configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to ${script_location}/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +databases = engine1, engine2 + +[engine1] +sqlalchemy.url = driver://user:pass@localhost/dbname + +[engine2] +sqlalchemy.url = driver://user:pass@localhost/dbname2 + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/venv/lib/python3.12/site-packages/alembic/templates/multidb/env.py b/venv/lib/python3.12/site-packages/alembic/templates/multidb/env.py new file mode 100644 index 0000000..e937b64 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/multidb/env.py @@ -0,0 +1,140 @@ +import logging +from logging.config import fileConfig +import re + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +USE_TWOPHASE = False + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) +logger = logging.getLogger("alembic.env") + +# gather section names referring to different +# databases. These are named "engine1", "engine2" +# in the sample .ini file. +db_names = config.get_main_option("databases", "") + +# add your model's MetaData objects here +# for 'autogenerate' support. These must be set +# up to hold just those tables targeting a +# particular database. table.tometadata() may be +# helpful here in case a "copy" of +# a MetaData is needed. +# from myapp import mymodel +# target_metadata = { +# 'engine1':mymodel.metadata1, +# 'engine2':mymodel.metadata2 +# } +target_metadata = {} + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + # for the --sql use case, run migrations for each URL into + # individual files. + + engines = {} + for name in re.split(r",\s*", db_names): + engines[name] = rec = {} + rec["url"] = context.config.get_section_option(name, "sqlalchemy.url") + + for name, rec in engines.items(): + logger.info("Migrating database %s" % name) + file_ = "%s.sql" % name + logger.info("Writing output to %s" % file_) + with open(file_, "w") as buffer: + context.configure( + url=rec["url"], + output_buffer=buffer, + target_metadata=target_metadata.get(name), + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + with context.begin_transaction(): + context.run_migrations(engine_name=name) + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # for the direct-to-DB use case, start a transaction on all + # engines, then run all migrations, then commit all transactions. + + engines = {} + for name in re.split(r",\s*", db_names): + engines[name] = rec = {} + rec["engine"] = engine_from_config( + context.config.get_section(name, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + for name, rec in engines.items(): + engine = rec["engine"] + rec["connection"] = conn = engine.connect() + + if USE_TWOPHASE: + rec["transaction"] = conn.begin_twophase() + else: + rec["transaction"] = conn.begin() + + try: + for name, rec in engines.items(): + logger.info("Migrating database %s" % name) + context.configure( + connection=rec["connection"], + upgrade_token="%s_upgrades" % name, + downgrade_token="%s_downgrades" % name, + target_metadata=target_metadata.get(name), + ) + context.run_migrations(engine_name=name) + + if USE_TWOPHASE: + for rec in engines.values(): + rec["transaction"].prepare() + + for rec in engines.values(): + rec["transaction"].commit() + except: + for rec in engines.values(): + rec["transaction"].rollback() + raise + finally: + for rec in engines.values(): + rec["connection"].close() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/venv/lib/python3.12/site-packages/alembic/templates/multidb/script.py.mako b/venv/lib/python3.12/site-packages/alembic/templates/multidb/script.py.mako new file mode 100644 index 0000000..6108b8a --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/templates/multidb/script.py.mako @@ -0,0 +1,47 @@ +<%! +import re + +%>"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade(engine_name: str) -> None: + globals()["upgrade_%s" % engine_name]() + + +def downgrade(engine_name: str) -> None: + globals()["downgrade_%s" % engine_name]() + +<% + db_names = config.get_main_option("databases") +%> + +## generate an "upgrade_() / downgrade_()" function +## for each database name in the ini file. + +% for db_name in re.split(r',\s*', db_names): + +def upgrade_${db_name}() -> None: + ${context.get("%s_upgrades" % db_name, "pass")} + + +def downgrade_${db_name}() -> None: + ${context.get("%s_downgrades" % db_name, "pass")} + +% endfor diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__init__.py b/venv/lib/python3.12/site-packages/alembic/testing/__init__.py new file mode 100644 index 0000000..0407adf --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/__init__.py @@ -0,0 +1,29 @@ +from sqlalchemy.testing import config +from sqlalchemy.testing import emits_warning +from sqlalchemy.testing import engines +from sqlalchemy.testing import exclusions +from sqlalchemy.testing import mock +from sqlalchemy.testing import provide_metadata +from sqlalchemy.testing import skip_if +from sqlalchemy.testing import uses_deprecated +from sqlalchemy.testing.config import combinations +from sqlalchemy.testing.config import fixture +from sqlalchemy.testing.config import requirements as requires + +from .assertions import assert_raises +from .assertions import assert_raises_message +from .assertions import emits_python_deprecation_warning +from .assertions import eq_ +from .assertions import eq_ignore_whitespace +from .assertions import expect_raises +from .assertions import expect_raises_message +from .assertions import expect_sqlalchemy_deprecated +from .assertions import expect_sqlalchemy_deprecated_20 +from .assertions import expect_warnings +from .assertions import is_ +from .assertions import is_false +from .assertions import is_not_ +from .assertions import is_true +from .assertions import ne_ +from .fixtures import TestBase +from .util import resolve_lambda diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..39fff63 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/assertions.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/assertions.cpython-312.pyc new file mode 100644 index 0000000..1829cad Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/assertions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/env.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/env.cpython-312.pyc new file mode 100644 index 0000000..0bcf15a Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/env.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/fixtures.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/fixtures.cpython-312.pyc new file mode 100644 index 0000000..1656532 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/fixtures.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/requirements.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/requirements.cpython-312.pyc new file mode 100644 index 0000000..e99938c Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/requirements.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/schemacompare.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/schemacompare.cpython-312.pyc new file mode 100644 index 0000000..fea8135 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/schemacompare.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..c47da97 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/warnings.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/warnings.cpython-312.pyc new file mode 100644 index 0000000..af37658 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/warnings.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/assertions.py b/venv/lib/python3.12/site-packages/alembic/testing/assertions.py new file mode 100644 index 0000000..ec9593b --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/assertions.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +import contextlib +import re +import sys +from typing import Any +from typing import Dict + +from sqlalchemy import exc as sa_exc +from sqlalchemy.engine import default +from sqlalchemy.testing.assertions import _expect_warnings +from sqlalchemy.testing.assertions import eq_ # noqa +from sqlalchemy.testing.assertions import is_ # noqa +from sqlalchemy.testing.assertions import is_false # noqa +from sqlalchemy.testing.assertions import is_not_ # noqa +from sqlalchemy.testing.assertions import is_true # noqa +from sqlalchemy.testing.assertions import ne_ # noqa +from sqlalchemy.util import decorator + +from ..util import sqla_compat + + +def _assert_proper_exception_context(exception): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. we want + these exceptions in a cause chain. + + """ + + if ( + exception.__context__ is not exception.__cause__ + and not exception.__suppress_context__ + ): + assert False, ( + "Exception %r was correctly raised but did not set a cause, " + "within context %r as its cause." + % (exception, exception.__context__) + ) + + +def assert_raises(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw, check_context=True) + + +def assert_raises_context_ok(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw) + + +def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, check_context=True + ) + + +def assert_raises_message_context_ok( + except_cls, msg, callable_, *args, **kwargs +): + return _assert_raises(except_cls, callable_, args, kwargs, msg=msg) + + +def _assert_raises( + except_cls, callable_, args, kwargs, msg=None, check_context=False +): + with _expect_raises(except_cls, msg, check_context) as ec: + callable_(*args, **kwargs) + return ec.error + + +class _ErrorContainer: + error: Any = None + + +@contextlib.contextmanager +def _expect_raises(except_cls, msg=None, check_context=False): + ec = _ErrorContainer() + if check_context: + are_we_already_in_a_traceback = sys.exc_info()[0] + try: + yield ec + success = False + except except_cls as err: + ec.error = err + success = True + if msg is not None: + assert re.search(msg, str(err), re.UNICODE), f"{msg} !~ {err}" + if check_context and not are_we_already_in_a_traceback: + _assert_proper_exception_context(err) + print(str(err).encode("utf-8")) + + # assert outside the block so it works for AssertionError too ! + assert success, "Callable did not raise an exception" + + +def expect_raises(except_cls, check_context=True): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message(except_cls, msg, check_context=True): + return _expect_raises(except_cls, msg=msg, check_context=check_context) + + +def eq_ignore_whitespace(a, b, msg=None): + a = re.sub(r"^\s+?|\n", "", a) + a = re.sub(r" {2,}", " ", a) + b = re.sub(r"^\s+?|\n", "", b) + b = re.sub(r" {2,}", " ", b) + + assert a == b, msg or "%r != %r" % (a, b) + + +_dialect_mods: Dict[Any, Any] = {} + + +def _get_dialect(name): + if name is None or name == "default": + return default.DefaultDialect() + else: + d = sqla_compat._create_url(name).get_dialect()() + + if name == "postgresql": + d.implicit_returning = True + elif name == "mssql": + d.legacy_schema_aliasing = False + return d + + +def expect_warnings(*messages, **kw): + """Context manager which expects one or more warnings. + + With no arguments, squelches all SAWarnings emitted via + sqlalchemy.util.warn and sqlalchemy.util.warn_limited. Otherwise + pass string expressions that will match selected warnings via regex; + all non-matching warnings are sent through. + + The expect version **asserts** that the warnings were in fact seen. + + Note that the test suite sets SAWarning warnings to raise exceptions. + + """ + return _expect_warnings(Warning, messages, **kw) + + +def emits_python_deprecation_warning(*messages): + """Decorator form of expect_warnings(). + + Note that emits_warning does **not** assert that the warnings + were in fact seen. + + """ + + @decorator + def decorate(fn, *args, **kw): + with _expect_warnings(DeprecationWarning, assert_=False, *messages): + return fn(*args, **kw) + + return decorate + + +def expect_sqlalchemy_deprecated(*messages, **kw): + return _expect_warnings(sa_exc.SADeprecationWarning, messages, **kw) + + +def expect_sqlalchemy_deprecated_20(*messages, **kw): + return _expect_warnings(sa_exc.RemovedIn20Warning, messages, **kw) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/env.py b/venv/lib/python3.12/site-packages/alembic/testing/env.py new file mode 100644 index 0000000..5df7ef8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/env.py @@ -0,0 +1,518 @@ +import importlib.machinery +import os +import shutil +import textwrap + +from sqlalchemy.testing import config +from sqlalchemy.testing import provision + +from . import util as testing_util +from .. import command +from .. import script +from .. import util +from ..script import Script +from ..script import ScriptDirectory + + +def _get_staging_directory(): + if provision.FOLLOWER_IDENT: + return "scratch_%s" % provision.FOLLOWER_IDENT + else: + return "scratch" + + +def staging_env(create=True, template="generic", sourceless=False): + cfg = _testing_config() + if create: + path = os.path.join(_get_staging_directory(), "scripts") + assert not os.path.exists(path), ( + "staging directory %s already exists; poor cleanup?" % path + ) + + command.init(cfg, path, template=template) + if sourceless: + try: + # do an import so that a .pyc/.pyo is generated. + util.load_python_file(path, "env.py") + except AttributeError: + # we don't have the migration context set up yet + # so running the .env py throws this exception. + # theoretically we could be using py_compiler here to + # generate .pyc/.pyo without importing but not really + # worth it. + pass + assert sourceless in ( + "pep3147_envonly", + "simple", + "pep3147_everything", + ), sourceless + make_sourceless( + os.path.join(path, "env.py"), + "pep3147" if "pep3147" in sourceless else "simple", + ) + + sc = script.ScriptDirectory.from_config(cfg) + return sc + + +def clear_staging_env(): + from sqlalchemy.testing import engines + + engines.testing_reaper.close_all() + shutil.rmtree(_get_staging_directory(), True) + + +def script_file_fixture(txt): + dir_ = os.path.join(_get_staging_directory(), "scripts") + path = os.path.join(dir_, "script.py.mako") + with open(path, "w") as f: + f.write(txt) + + +def env_file_fixture(txt): + dir_ = os.path.join(_get_staging_directory(), "scripts") + txt = ( + """ +from alembic import context + +config = context.config +""" + + txt + ) + + path = os.path.join(dir_, "env.py") + pyc_path = util.pyc_file_from_path(path) + if pyc_path: + os.unlink(pyc_path) + + with open(path, "w") as f: + f.write(txt) + + +def _sqlite_file_db(tempname="foo.db", future=False, scope=None, **options): + dir_ = os.path.join(_get_staging_directory(), "scripts") + url = "sqlite:///%s/%s" % (dir_, tempname) + if scope and util.sqla_14: + options["scope"] = scope + return testing_util.testing_engine(url=url, future=future, options=options) + + +def _sqlite_testing_config(sourceless=False, future=False): + dir_ = os.path.join(_get_staging_directory(), "scripts") + url = "sqlite:///%s/foo.db" % dir_ + + sqlalchemy_future = future or ("future" in config.db.__class__.__module__) + + return _write_config_file( + """ +[alembic] +script_location = %s +sqlalchemy.url = %s +sourceless = %s +%s + +[loggers] +keys = root,sqlalchemy + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = DEBUG +handlers = +qualname = sqlalchemy.engine + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + """ + % ( + dir_, + url, + "true" if sourceless else "false", + "sqlalchemy.future = true" if sqlalchemy_future else "", + ) + ) + + +def _multi_dir_testing_config(sourceless=False, extra_version_location=""): + dir_ = os.path.join(_get_staging_directory(), "scripts") + sqlalchemy_future = "future" in config.db.__class__.__module__ + + url = "sqlite:///%s/foo.db" % dir_ + + return _write_config_file( + """ +[alembic] +script_location = %s +sqlalchemy.url = %s +sqlalchemy.future = %s +sourceless = %s +version_locations = %%(here)s/model1/ %%(here)s/model2/ %%(here)s/model3/ %s + +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + """ + % ( + dir_, + url, + "true" if sqlalchemy_future else "false", + "true" if sourceless else "false", + extra_version_location, + ) + ) + + +def _no_sql_testing_config(dialect="postgresql", directives=""): + """use a postgresql url with no host so that + connections guaranteed to fail""" + dir_ = os.path.join(_get_staging_directory(), "scripts") + return _write_config_file( + """ +[alembic] +script_location = %s +sqlalchemy.url = %s:// +%s + +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + +""" + % (dir_, dialect, directives) + ) + + +def _write_config_file(text): + cfg = _testing_config() + with open(cfg.config_file_name, "w") as f: + f.write(text) + return cfg + + +def _testing_config(): + from alembic.config import Config + + if not os.access(_get_staging_directory(), os.F_OK): + os.mkdir(_get_staging_directory()) + return Config(os.path.join(_get_staging_directory(), "test_alembic.ini")) + + +def write_script( + scriptdir, rev_id, content, encoding="ascii", sourceless=False +): + old = scriptdir.revision_map.get_revision(rev_id) + path = old.path + + content = textwrap.dedent(content) + if encoding: + content = content.encode(encoding) + with open(path, "wb") as fp: + fp.write(content) + pyc_path = util.pyc_file_from_path(path) + if pyc_path: + os.unlink(pyc_path) + script = Script._from_path(scriptdir, path) + old = scriptdir.revision_map.get_revision(script.revision) + if old.down_revision != script.down_revision: + raise Exception( + "Can't change down_revision " "on a refresh operation." + ) + scriptdir.revision_map.add_revision(script, _replace=True) + + if sourceless: + make_sourceless( + path, "pep3147" if sourceless == "pep3147_everything" else "simple" + ) + + +def make_sourceless(path, style): + import py_compile + + py_compile.compile(path) + + if style == "simple": + pyc_path = util.pyc_file_from_path(path) + suffix = importlib.machinery.BYTECODE_SUFFIXES[0] + filepath, ext = os.path.splitext(path) + simple_pyc_path = filepath + suffix + shutil.move(pyc_path, simple_pyc_path) + pyc_path = simple_pyc_path + else: + assert style in ("pep3147", "simple") + pyc_path = util.pyc_file_from_path(path) + + assert os.access(pyc_path, os.F_OK) + + os.unlink(path) + + +def three_rev_fixture(cfg): + a = util.rev_id() + b = util.rev_id() + c = util.rev_id() + + script = ScriptDirectory.from_config(cfg) + script.generate_revision(a, "revision a", refresh=True, head="base") + write_script( + script, + a, + """\ +"Rev A" +revision = '%s' +down_revision = None + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 1") + + +def downgrade(): + op.execute("DROP STEP 1") + +""" + % a, + ) + + script.generate_revision(b, "revision b", refresh=True, head=a) + write_script( + script, + b, + f"""# coding: utf-8 +"Rev B, méil, %3" +revision = '{b}' +down_revision = '{a}' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 2") + + +def downgrade(): + op.execute("DROP STEP 2") + +""", + encoding="utf-8", + ) + + script.generate_revision(c, "revision c", refresh=True, head=b) + write_script( + script, + c, + """\ +"Rev C" +revision = '%s' +down_revision = '%s' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 3") + + +def downgrade(): + op.execute("DROP STEP 3") + +""" + % (c, b), + ) + return a, b, c + + +def multi_heads_fixture(cfg, a, b, c): + """Create a multiple head fixture from the three-revs fixture""" + + # a->b->c + # -> d -> e + # -> f + d = util.rev_id() + e = util.rev_id() + f = util.rev_id() + + script = ScriptDirectory.from_config(cfg) + script.generate_revision( + d, "revision d from b", head=b, splice=True, refresh=True + ) + write_script( + script, + d, + """\ +"Rev D" +revision = '%s' +down_revision = '%s' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 4") + + +def downgrade(): + op.execute("DROP STEP 4") + +""" + % (d, b), + ) + + script.generate_revision( + e, "revision e from d", head=d, splice=True, refresh=True + ) + write_script( + script, + e, + """\ +"Rev E" +revision = '%s' +down_revision = '%s' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 5") + + +def downgrade(): + op.execute("DROP STEP 5") + +""" + % (e, d), + ) + + script.generate_revision( + f, "revision f from b", head=b, splice=True, refresh=True + ) + write_script( + script, + f, + """\ +"Rev F" +revision = '%s' +down_revision = '%s' + +from alembic import op + + +def upgrade(): + op.execute("CREATE STEP 6") + + +def downgrade(): + op.execute("DROP STEP 6") + +""" + % (f, b), + ) + + return d, e, f + + +def _multidb_testing_config(engines): + """alembic.ini fixture to work exactly with the 'multidb' template""" + + dir_ = os.path.join(_get_staging_directory(), "scripts") + + sqlalchemy_future = "future" in config.db.__class__.__module__ + + databases = ", ".join(engines.keys()) + engines = "\n\n".join( + "[%s]\n" "sqlalchemy.url = %s" % (key, value.url) + for key, value in engines.items() + ) + + return _write_config_file( + """ +[alembic] +script_location = %s +sourceless = false +sqlalchemy.future = %s +databases = %s + +%s +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARN +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + """ + % (dir_, "true" if sqlalchemy_future else "false", databases, engines) + ) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/fixtures.py b/venv/lib/python3.12/site-packages/alembic/testing/fixtures.py new file mode 100644 index 0000000..4b83a74 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/fixtures.py @@ -0,0 +1,306 @@ +from __future__ import annotations + +import configparser +from contextlib import contextmanager +import io +import re +from typing import Any +from typing import Dict + +from sqlalchemy import Column +from sqlalchemy import inspect +from sqlalchemy import MetaData +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy import testing +from sqlalchemy import text +from sqlalchemy.testing import config +from sqlalchemy.testing import mock +from sqlalchemy.testing.assertions import eq_ +from sqlalchemy.testing.fixtures import TablesTest as SQLAlchemyTablesTest +from sqlalchemy.testing.fixtures import TestBase as SQLAlchemyTestBase + +import alembic +from .assertions import _get_dialect +from ..environment import EnvironmentContext +from ..migration import MigrationContext +from ..operations import Operations +from ..util import sqla_compat +from ..util.sqla_compat import create_mock_engine +from ..util.sqla_compat import sqla_14 +from ..util.sqla_compat import sqla_2 + + +testing_config = configparser.ConfigParser() +testing_config.read(["test.cfg"]) + + +class TestBase(SQLAlchemyTestBase): + is_sqlalchemy_future = sqla_2 + + @testing.fixture() + def ops_context(self, migration_context): + with migration_context.begin_transaction(_per_migration=True): + yield Operations(migration_context) + + @testing.fixture + def migration_context(self, connection): + return MigrationContext.configure( + connection, opts=dict(transaction_per_migration=True) + ) + + @testing.fixture + def connection(self): + with config.db.connect() as conn: + yield conn + + +class TablesTest(TestBase, SQLAlchemyTablesTest): + pass + + +if sqla_14: + from sqlalchemy.testing.fixtures import FutureEngineMixin +else: + + class FutureEngineMixin: # type:ignore[no-redef] + __requires__ = ("sqlalchemy_14",) + + +FutureEngineMixin.is_sqlalchemy_future = True + + +def capture_db(dialect="postgresql://"): + buf = [] + + def dump(sql, *multiparams, **params): + buf.append(str(sql.compile(dialect=engine.dialect))) + + engine = create_mock_engine(dialect, dump) + return engine, buf + + +_engs: Dict[Any, Any] = {} + + +@contextmanager +def capture_context_buffer(**kw): + if kw.pop("bytes_io", False): + buf = io.BytesIO() + else: + buf = io.StringIO() + + kw.update({"dialect_name": "sqlite", "output_buffer": buf}) + conf = EnvironmentContext.configure + + def configure(*arg, **opt): + opt.update(**kw) + return conf(*arg, **opt) + + with mock.patch.object(EnvironmentContext, "configure", configure): + yield buf + + +@contextmanager +def capture_engine_context_buffer(**kw): + from .env import _sqlite_file_db + from sqlalchemy import event + + buf = io.StringIO() + + eng = _sqlite_file_db() + + conn = eng.connect() + + @event.listens_for(conn, "before_cursor_execute") + def bce(conn, cursor, statement, parameters, context, executemany): + buf.write(statement + "\n") + + kw.update({"connection": conn}) + conf = EnvironmentContext.configure + + def configure(*arg, **opt): + opt.update(**kw) + return conf(*arg, **opt) + + with mock.patch.object(EnvironmentContext, "configure", configure): + yield buf + + +def op_fixture( + dialect="default", + as_sql=False, + naming_convention=None, + literal_binds=False, + native_boolean=None, +): + opts = {} + if naming_convention: + opts["target_metadata"] = MetaData(naming_convention=naming_convention) + + class buffer_: + def __init__(self): + self.lines = [] + + def write(self, msg): + msg = msg.strip() + msg = re.sub(r"[\n\t]", "", msg) + if as_sql: + # the impl produces soft tabs, + # so search for blocks of 4 spaces + msg = re.sub(r" ", "", msg) + msg = re.sub(r"\;\n*$", "", msg) + + self.lines.append(msg) + + def flush(self): + pass + + buf = buffer_() + + class ctx(MigrationContext): + def get_buf(self): + return buf + + def clear_assertions(self): + buf.lines[:] = [] + + def assert_(self, *sql): + # TODO: make this more flexible about + # whitespace and such + eq_(buf.lines, [re.sub(r"[\n\t]", "", s) for s in sql]) + + def assert_contains(self, sql): + for stmt in buf.lines: + if re.sub(r"[\n\t]", "", sql) in stmt: + return + else: + assert False, "Could not locate fragment %r in %r" % ( + sql, + buf.lines, + ) + + if as_sql: + opts["as_sql"] = as_sql + if literal_binds: + opts["literal_binds"] = literal_binds + if not sqla_14 and dialect == "mariadb": + ctx_dialect = _get_dialect("mysql") + ctx_dialect.server_version_info = (10, 4, 0, "MariaDB") + + else: + ctx_dialect = _get_dialect(dialect) + if native_boolean is not None: + ctx_dialect.supports_native_boolean = native_boolean + # this is new as of SQLAlchemy 1.2.7 and is used by SQL Server, + # which breaks assumptions in the alembic test suite + ctx_dialect.non_native_boolean_check_constraint = True + if not as_sql: + + def execute(stmt, *multiparam, **param): + if isinstance(stmt, str): + stmt = text(stmt) + assert stmt.supports_execution + sql = str(stmt.compile(dialect=ctx_dialect)) + + buf.write(sql) + + connection = mock.Mock(dialect=ctx_dialect, execute=execute) + else: + opts["output_buffer"] = buf + connection = None + context = ctx(ctx_dialect, connection, opts) + + alembic.op._proxy = Operations(context) + return context + + +class AlterColRoundTripFixture: + # since these tests are about syntax, use more recent SQLAlchemy as some of + # the type / server default compare logic might not work on older + # SQLAlchemy versions as seems to be the case for SQLAlchemy 1.1 on Oracle + + __requires__ = ("alter_column",) + + def setUp(self): + self.conn = config.db.connect() + self.ctx = MigrationContext.configure(self.conn) + self.op = Operations(self.ctx) + self.metadata = MetaData() + + def _compare_type(self, t1, t2): + c1 = Column("q", t1) + c2 = Column("q", t2) + assert not self.ctx.impl.compare_type( + c1, c2 + ), "Type objects %r and %r didn't compare as equivalent" % (t1, t2) + + def _compare_server_default(self, t1, s1, t2, s2): + c1 = Column("q", t1, server_default=s1) + c2 = Column("q", t2, server_default=s2) + assert not self.ctx.impl.compare_server_default( + c1, c2, s2, s1 + ), "server defaults %r and %r didn't compare as equivalent" % (s1, s2) + + def tearDown(self): + sqla_compat._safe_rollback_connection_transaction(self.conn) + with self.conn.begin(): + self.metadata.drop_all(self.conn) + self.conn.close() + + def _run_alter_col(self, from_, to_, compare=None): + column = Column( + from_.get("name", "colname"), + from_.get("type", String(10)), + nullable=from_.get("nullable", True), + server_default=from_.get("server_default", None), + # comment=from_.get("comment", None) + ) + t = Table("x", self.metadata, column) + + with sqla_compat._ensure_scope_for_ddl(self.conn): + t.create(self.conn) + insp = inspect(self.conn) + old_col = insp.get_columns("x")[0] + + # TODO: conditional comment support + self.op.alter_column( + "x", + column.name, + existing_type=column.type, + existing_server_default=column.server_default + if column.server_default is not None + else False, + existing_nullable=True if column.nullable else False, + # existing_comment=column.comment, + nullable=to_.get("nullable", None), + # modify_comment=False, + server_default=to_.get("server_default", False), + new_column_name=to_.get("name", None), + type_=to_.get("type", None), + ) + + insp = inspect(self.conn) + new_col = insp.get_columns("x")[0] + + if compare is None: + compare = to_ + + eq_( + new_col["name"], + compare["name"] if "name" in compare else column.name, + ) + self._compare_type( + new_col["type"], compare.get("type", old_col["type"]) + ) + eq_(new_col["nullable"], compare.get("nullable", column.nullable)) + self._compare_server_default( + new_col["type"], + new_col.get("default", None), + compare.get("type", old_col["type"]), + compare["server_default"].text + if "server_default" in compare + else column.server_default.arg.text + if column.server_default is not None + else None, + ) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/plugin/__init__.py b/venv/lib/python3.12/site-packages/alembic/testing/plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..556fa74 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc new file mode 100644 index 0000000..66b7464 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/plugin/bootstrap.py b/venv/lib/python3.12/site-packages/alembic/testing/plugin/bootstrap.py new file mode 100644 index 0000000..d4a2c55 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/plugin/bootstrap.py @@ -0,0 +1,4 @@ +""" +Bootstrapper for test framework plugins. + +""" diff --git a/venv/lib/python3.12/site-packages/alembic/testing/requirements.py b/venv/lib/python3.12/site-packages/alembic/testing/requirements.py new file mode 100644 index 0000000..6e07e28 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/requirements.py @@ -0,0 +1,210 @@ +from sqlalchemy.testing.requirements import Requirements + +from alembic import util +from alembic.util import sqla_compat +from ..testing import exclusions + + +class SuiteRequirements(Requirements): + @property + def schemas(self): + """Target database must support external schemas, and have one + named 'test_schema'.""" + + return exclusions.open() + + @property + def autocommit_isolation(self): + """target database should support 'AUTOCOMMIT' isolation level""" + + return exclusions.closed() + + @property + def materialized_views(self): + """needed for sqlalchemy compat""" + return exclusions.closed() + + @property + def unique_constraint_reflection(self): + def doesnt_have_check_uq_constraints(config): + from sqlalchemy import inspect + + insp = inspect(config.db) + try: + insp.get_unique_constraints("x") + except NotImplementedError: + return True + except TypeError: + return True + except Exception: + pass + return False + + return exclusions.skip_if(doesnt_have_check_uq_constraints) + + @property + def sequences(self): + """Target database must support SEQUENCEs.""" + + return exclusions.only_if( + [lambda config: config.db.dialect.supports_sequences], + "no sequence support", + ) + + @property + def foreign_key_match(self): + return exclusions.open() + + @property + def foreign_key_constraint_reflection(self): + return exclusions.open() + + @property + def check_constraints_w_enforcement(self): + """Target database must support check constraints + and also enforce them.""" + + return exclusions.open() + + @property + def reflects_pk_names(self): + return exclusions.closed() + + @property + def reflects_fk_options(self): + return exclusions.closed() + + @property + def sqlalchemy_14(self): + return exclusions.skip_if( + lambda config: not util.sqla_14, + "SQLAlchemy 1.4 or greater required", + ) + + @property + def sqlalchemy_1x(self): + return exclusions.skip_if( + lambda config: util.sqla_2, + "SQLAlchemy 1.x test", + ) + + @property + def sqlalchemy_2(self): + return exclusions.skip_if( + lambda config: not util.sqla_2, + "SQLAlchemy 2.x test", + ) + + @property + def asyncio(self): + def go(config): + try: + import greenlet # noqa: F401 + except ImportError: + return False + else: + return True + + return self.sqlalchemy_14 + exclusions.only_if(go) + + @property + def comments(self): + return exclusions.only_if( + lambda config: config.db.dialect.supports_comments + ) + + @property + def alter_column(self): + return exclusions.open() + + @property + def computed_columns(self): + return exclusions.closed() + + @property + def computed_columns_api(self): + return exclusions.only_if( + exclusions.BooleanPredicate(sqla_compat.has_computed) + ) + + @property + def computed_reflects_normally(self): + return exclusions.only_if( + exclusions.BooleanPredicate(sqla_compat.has_computed_reflection) + ) + + @property + def computed_reflects_as_server_default(self): + return exclusions.closed() + + @property + def computed_doesnt_reflect_as_server_default(self): + return exclusions.closed() + + @property + def autoincrement_on_composite_pk(self): + return exclusions.closed() + + @property + def fk_ondelete_is_reflected(self): + return exclusions.closed() + + @property + def fk_onupdate_is_reflected(self): + return exclusions.closed() + + @property + def fk_onupdate(self): + return exclusions.open() + + @property + def fk_ondelete_restrict(self): + return exclusions.open() + + @property + def fk_onupdate_restrict(self): + return exclusions.open() + + @property + def fk_ondelete_noaction(self): + return exclusions.open() + + @property + def fk_initially(self): + return exclusions.closed() + + @property + def fk_deferrable(self): + return exclusions.closed() + + @property + def fk_deferrable_is_reflected(self): + return exclusions.closed() + + @property + def fk_names(self): + return exclusions.open() + + @property + def integer_subtype_comparisons(self): + return exclusions.open() + + @property + def no_name_normalize(self): + return exclusions.skip_if( + lambda config: config.db.dialect.requires_name_normalize + ) + + @property + def identity_columns(self): + return exclusions.closed() + + @property + def identity_columns_alter(self): + return exclusions.closed() + + @property + def identity_columns_api(self): + return exclusions.only_if( + exclusions.BooleanPredicate(sqla_compat.has_identity) + ) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/schemacompare.py b/venv/lib/python3.12/site-packages/alembic/testing/schemacompare.py new file mode 100644 index 0000000..204cc4d --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/schemacompare.py @@ -0,0 +1,169 @@ +from itertools import zip_longest + +from sqlalchemy import schema +from sqlalchemy.sql.elements import ClauseList + + +class CompareTable: + def __init__(self, table): + self.table = table + + def __eq__(self, other): + if self.table.name != other.name or self.table.schema != other.schema: + return False + + for c1, c2 in zip_longest(self.table.c, other.c): + if (c1 is None and c2 is not None) or ( + c2 is None and c1 is not None + ): + return False + if CompareColumn(c1) != c2: + return False + + return True + + # TODO: compare constraints, indexes + + def __ne__(self, other): + return not self.__eq__(other) + + +class CompareColumn: + def __init__(self, column): + self.column = column + + def __eq__(self, other): + return ( + self.column.name == other.name + and self.column.nullable == other.nullable + ) + # TODO: datatypes etc + + def __ne__(self, other): + return not self.__eq__(other) + + +class CompareIndex: + def __init__(self, index, name_only=False): + self.index = index + self.name_only = name_only + + def __eq__(self, other): + if self.name_only: + return self.index.name == other.name + else: + return ( + str(schema.CreateIndex(self.index)) + == str(schema.CreateIndex(other)) + and self.index.dialect_kwargs == other.dialect_kwargs + ) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + expr = ClauseList(*self.index.expressions) + try: + expr_str = expr.compile().string + except Exception: + expr_str = str(expr) + return f"" + + +class CompareCheckConstraint: + def __init__(self, constraint): + self.constraint = constraint + + def __eq__(self, other): + return ( + isinstance(other, schema.CheckConstraint) + and self.constraint.name == other.name + and (str(self.constraint.sqltext) == str(other.sqltext)) + and (other.table.name == self.constraint.table.name) + and other.table.schema == self.constraint.table.schema + ) + + def __ne__(self, other): + return not self.__eq__(other) + + +class CompareForeignKey: + def __init__(self, constraint): + self.constraint = constraint + + def __eq__(self, other): + r1 = ( + isinstance(other, schema.ForeignKeyConstraint) + and self.constraint.name == other.name + and (other.table.name == self.constraint.table.name) + and other.table.schema == self.constraint.table.schema + ) + if not r1: + return False + for c1, c2 in zip_longest(self.constraint.columns, other.columns): + if (c1 is None and c2 is not None) or ( + c2 is None and c1 is not None + ): + return False + if CompareColumn(c1) != c2: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + +class ComparePrimaryKey: + def __init__(self, constraint): + self.constraint = constraint + + def __eq__(self, other): + r1 = ( + isinstance(other, schema.PrimaryKeyConstraint) + and self.constraint.name == other.name + and (other.table.name == self.constraint.table.name) + and other.table.schema == self.constraint.table.schema + ) + if not r1: + return False + + for c1, c2 in zip_longest(self.constraint.columns, other.columns): + if (c1 is None and c2 is not None) or ( + c2 is None and c1 is not None + ): + return False + if CompareColumn(c1) != c2: + return False + + return True + + def __ne__(self, other): + return not self.__eq__(other) + + +class CompareUniqueConstraint: + def __init__(self, constraint): + self.constraint = constraint + + def __eq__(self, other): + r1 = ( + isinstance(other, schema.UniqueConstraint) + and self.constraint.name == other.name + and (other.table.name == self.constraint.table.name) + and other.table.schema == self.constraint.table.schema + ) + if not r1: + return False + + for c1, c2 in zip_longest(self.constraint.columns, other.columns): + if (c1 is None and c2 is not None) or ( + c2 is None and c1 is not None + ): + return False + if CompareColumn(c1) != c2: + return False + + return True + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__init__.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/__init__.py new file mode 100644 index 0000000..3da498d --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/__init__.py @@ -0,0 +1,7 @@ +from .test_autogen_comments import * # noqa +from .test_autogen_computed import * # noqa +from .test_autogen_diffs import * # noqa +from .test_autogen_fks import * # noqa +from .test_autogen_identity import * # noqa +from .test_environment import * # noqa +from .test_op import * # noqa diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..81d847c Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc new file mode 100644 index 0000000..761e38b Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc new file mode 100644 index 0000000..2ae0b38 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc new file mode 100644 index 0000000..29b2365 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc new file mode 100644 index 0000000..9250474 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc new file mode 100644 index 0000000..c568cac Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc new file mode 100644 index 0000000..717b62d Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc new file mode 100644 index 0000000..0d59f3e Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_op.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_op.cpython-312.pyc new file mode 100644 index 0000000..098e65a Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_op.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/_autogen_fixtures.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/_autogen_fixtures.py new file mode 100644 index 0000000..d838ebe --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/_autogen_fixtures.py @@ -0,0 +1,335 @@ +from __future__ import annotations + +from typing import Any +from typing import Dict +from typing import Set + +from sqlalchemy import CHAR +from sqlalchemy import CheckConstraint +from sqlalchemy import Column +from sqlalchemy import event +from sqlalchemy import ForeignKey +from sqlalchemy import Index +from sqlalchemy import inspect +from sqlalchemy import Integer +from sqlalchemy import MetaData +from sqlalchemy import Numeric +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy import Text +from sqlalchemy import text +from sqlalchemy import UniqueConstraint + +from ... import autogenerate +from ... import util +from ...autogenerate import api +from ...ddl.base import _fk_spec +from ...migration import MigrationContext +from ...operations import ops +from ...testing import config +from ...testing import eq_ +from ...testing.env import clear_staging_env +from ...testing.env import staging_env + +names_in_this_test: Set[Any] = set() + + +@event.listens_for(Table, "after_parent_attach") +def new_table(table, parent): + names_in_this_test.add(table.name) + + +def _default_include_object(obj, name, type_, reflected, compare_to): + if type_ == "table": + return name in names_in_this_test + else: + return True + + +_default_object_filters: Any = _default_include_object + +_default_name_filters: Any = None + + +class ModelOne: + __requires__ = ("unique_constraint_reflection",) + + schema: Any = None + + @classmethod + def _get_db_schema(cls): + schema = cls.schema + + m = MetaData(schema=schema) + + Table( + "user", + m, + Column("id", Integer, primary_key=True), + Column("name", String(50)), + Column("a1", Text), + Column("pw", String(50)), + Index("pw_idx", "pw"), + ) + + Table( + "address", + m, + Column("id", Integer, primary_key=True), + Column("email_address", String(100), nullable=False), + ) + + Table( + "order", + m, + Column("order_id", Integer, primary_key=True), + Column( + "amount", + Numeric(8, 2), + nullable=False, + server_default=text("0"), + ), + CheckConstraint("amount >= 0", name="ck_order_amount"), + ) + + Table( + "extra", + m, + Column("x", CHAR), + Column("uid", Integer, ForeignKey("user.id")), + ) + + return m + + @classmethod + def _get_model_schema(cls): + schema = cls.schema + + m = MetaData(schema=schema) + + Table( + "user", + m, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", Text, server_default="x"), + ) + + Table( + "address", + m, + Column("id", Integer, primary_key=True), + Column("email_address", String(100), nullable=False), + Column("street", String(50)), + UniqueConstraint("email_address", name="uq_email"), + ) + + Table( + "order", + m, + Column("order_id", Integer, primary_key=True), + Column( + "amount", + Numeric(10, 2), + nullable=True, + server_default=text("0"), + ), + Column("user_id", Integer, ForeignKey("user.id")), + CheckConstraint("amount > -1", name="ck_order_amount"), + ) + + Table( + "item", + m, + Column("id", Integer, primary_key=True), + Column("description", String(100)), + Column("order_id", Integer, ForeignKey("order.order_id")), + CheckConstraint("len(description) > 5"), + ) + return m + + +class _ComparesFKs: + def _assert_fk_diff( + self, + diff, + type_, + source_table, + source_columns, + target_table, + target_columns, + name=None, + conditional_name=None, + source_schema=None, + onupdate=None, + ondelete=None, + initially=None, + deferrable=None, + ): + # the public API for ForeignKeyConstraint was not very rich + # in 0.7, 0.8, so here we use the well-known but slightly + # private API to get at its elements + ( + fk_source_schema, + fk_source_table, + fk_source_columns, + fk_target_schema, + fk_target_table, + fk_target_columns, + fk_onupdate, + fk_ondelete, + fk_deferrable, + fk_initially, + ) = _fk_spec(diff[1]) + + eq_(diff[0], type_) + eq_(fk_source_table, source_table) + eq_(fk_source_columns, source_columns) + eq_(fk_target_table, target_table) + eq_(fk_source_schema, source_schema) + eq_(fk_onupdate, onupdate) + eq_(fk_ondelete, ondelete) + eq_(fk_initially, initially) + eq_(fk_deferrable, deferrable) + + eq_([elem.column.name for elem in diff[1].elements], target_columns) + if conditional_name is not None: + if conditional_name == "servergenerated": + fks = inspect(self.bind).get_foreign_keys(source_table) + server_fk_name = fks[0]["name"] + eq_(diff[1].name, server_fk_name) + else: + eq_(diff[1].name, conditional_name) + else: + eq_(diff[1].name, name) + + +class AutogenTest(_ComparesFKs): + def _flatten_diffs(self, diffs): + for d in diffs: + if isinstance(d, list): + yield from self._flatten_diffs(d) + else: + yield d + + @classmethod + def _get_bind(cls): + return config.db + + configure_opts: Dict[Any, Any] = {} + + @classmethod + def setup_class(cls): + staging_env() + cls.bind = cls._get_bind() + cls.m1 = cls._get_db_schema() + cls.m1.create_all(cls.bind) + cls.m2 = cls._get_model_schema() + + @classmethod + def teardown_class(cls): + cls.m1.drop_all(cls.bind) + clear_staging_env() + + def setUp(self): + self.conn = conn = self.bind.connect() + ctx_opts = { + "compare_type": True, + "compare_server_default": True, + "target_metadata": self.m2, + "upgrade_token": "upgrades", + "downgrade_token": "downgrades", + "alembic_module_prefix": "op.", + "sqlalchemy_module_prefix": "sa.", + "include_object": _default_object_filters, + "include_name": _default_name_filters, + } + if self.configure_opts: + ctx_opts.update(self.configure_opts) + self.context = context = MigrationContext.configure( + connection=conn, opts=ctx_opts + ) + + self.autogen_context = api.AutogenContext(context, self.m2) + + def tearDown(self): + self.conn.close() + + def _update_context( + self, object_filters=None, name_filters=None, include_schemas=None + ): + if include_schemas is not None: + self.autogen_context.opts["include_schemas"] = include_schemas + if object_filters is not None: + self.autogen_context._object_filters = [object_filters] + if name_filters is not None: + self.autogen_context._name_filters = [name_filters] + return self.autogen_context + + +class AutogenFixtureTest(_ComparesFKs): + def _fixture( + self, + m1, + m2, + include_schemas=False, + opts=None, + object_filters=_default_object_filters, + name_filters=_default_name_filters, + return_ops=False, + max_identifier_length=None, + ): + if max_identifier_length: + dialect = self.bind.dialect + existing_length = dialect.max_identifier_length + dialect.max_identifier_length = ( + dialect._user_defined_max_identifier_length + ) = max_identifier_length + try: + self._alembic_metadata, model_metadata = m1, m2 + for m in util.to_list(self._alembic_metadata): + m.create_all(self.bind) + + with self.bind.connect() as conn: + ctx_opts = { + "compare_type": True, + "compare_server_default": True, + "target_metadata": model_metadata, + "upgrade_token": "upgrades", + "downgrade_token": "downgrades", + "alembic_module_prefix": "op.", + "sqlalchemy_module_prefix": "sa.", + "include_object": object_filters, + "include_name": name_filters, + "include_schemas": include_schemas, + } + if opts: + ctx_opts.update(opts) + self.context = context = MigrationContext.configure( + connection=conn, opts=ctx_opts + ) + + autogen_context = api.AutogenContext(context, model_metadata) + uo = ops.UpgradeOps(ops=[]) + autogenerate._produce_net_changes(autogen_context, uo) + + if return_ops: + return uo + else: + return uo.as_diffs() + finally: + if max_identifier_length: + dialect = self.bind.dialect + dialect.max_identifier_length = ( + dialect._user_defined_max_identifier_length + ) = existing_length + + def setUp(self): + staging_env() + self.bind = config.db + + def tearDown(self): + if hasattr(self, "_alembic_metadata"): + for m in util.to_list(self._alembic_metadata): + m.drop_all(self.bind) + clear_staging_env() diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_comments.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_comments.py new file mode 100644 index 0000000..7ef074f --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_comments.py @@ -0,0 +1,242 @@ +from sqlalchemy import Column +from sqlalchemy import Float +from sqlalchemy import MetaData +from sqlalchemy import String +from sqlalchemy import Table + +from ._autogen_fixtures import AutogenFixtureTest +from ...testing import eq_ +from ...testing import mock +from ...testing import TestBase + + +class AutogenerateCommentsTest(AutogenFixtureTest, TestBase): + __backend__ = True + + __requires__ = ("comments",) + + def test_existing_table_comment_no_change(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + comment="this is some table", + ) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + comment="this is some table", + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs, []) + + def test_add_table_comment(self): + m1 = MetaData() + m2 = MetaData() + + Table("some_table", m1, Column("test", String(10), primary_key=True)) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + comment="this is some table", + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs[0][0], "add_table_comment") + eq_(diffs[0][1].comment, "this is some table") + eq_(diffs[0][2], None) + + def test_remove_table_comment(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + comment="this is some table", + ) + + Table("some_table", m2, Column("test", String(10), primary_key=True)) + + diffs = self._fixture(m1, m2) + + eq_(diffs[0][0], "remove_table_comment") + eq_(diffs[0][1].comment, None) + + def test_alter_table_comment(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + comment="this is some table", + ) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + comment="this is also some table", + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs[0][0], "add_table_comment") + eq_(diffs[0][1].comment, "this is also some table") + eq_(diffs[0][2], "this is some table") + + def test_existing_column_comment_no_change(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + Column("amount", Float, comment="the amount"), + ) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + Column("amount", Float, comment="the amount"), + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs, []) + + def test_add_column_comment(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + Column("amount", Float), + ) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + Column("amount", Float, comment="the amount"), + ) + + diffs = self._fixture(m1, m2) + eq_( + diffs, + [ + [ + ( + "modify_comment", + None, + "some_table", + "amount", + { + "existing_nullable": True, + "existing_type": mock.ANY, + "existing_server_default": False, + }, + None, + "the amount", + ) + ] + ], + ) + + def test_remove_column_comment(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + Column("amount", Float, comment="the amount"), + ) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + Column("amount", Float), + ) + + diffs = self._fixture(m1, m2) + eq_( + diffs, + [ + [ + ( + "modify_comment", + None, + "some_table", + "amount", + { + "existing_nullable": True, + "existing_type": mock.ANY, + "existing_server_default": False, + }, + "the amount", + None, + ) + ] + ], + ) + + def test_alter_column_comment(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + Column("amount", Float, comment="the amount"), + ) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + Column("amount", Float, comment="the adjusted amount"), + ) + + diffs = self._fixture(m1, m2) + + eq_( + diffs, + [ + [ + ( + "modify_comment", + None, + "some_table", + "amount", + { + "existing_nullable": True, + "existing_type": mock.ANY, + "existing_server_default": False, + }, + "the amount", + "the adjusted amount", + ) + ] + ], + ) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_computed.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_computed.py new file mode 100644 index 0000000..01a89a1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_computed.py @@ -0,0 +1,203 @@ +import sqlalchemy as sa +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import MetaData +from sqlalchemy import Table + +from ._autogen_fixtures import AutogenFixtureTest +from ... import testing +from ...testing import config +from ...testing import eq_ +from ...testing import exclusions +from ...testing import is_ +from ...testing import is_true +from ...testing import mock +from ...testing import TestBase + + +class AutogenerateComputedTest(AutogenFixtureTest, TestBase): + __requires__ = ("computed_columns",) + __backend__ = True + + def test_add_computed_column(self): + m1 = MetaData() + m2 = MetaData() + + Table("user", m1, Column("id", Integer, primary_key=True)) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("foo", Integer, sa.Computed("5")), + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs[0][0], "add_column") + eq_(diffs[0][2], "user") + eq_(diffs[0][3].name, "foo") + c = diffs[0][3].computed + + is_true(isinstance(c, sa.Computed)) + is_(c.persisted, None) + eq_(str(c.sqltext), "5") + + def test_remove_computed_column(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("foo", Integer, sa.Computed("5")), + ) + + Table("user", m2, Column("id", Integer, primary_key=True)) + + diffs = self._fixture(m1, m2) + + eq_(diffs[0][0], "remove_column") + eq_(diffs[0][2], "user") + c = diffs[0][3] + eq_(c.name, "foo") + + if config.requirements.computed_reflects_normally.enabled: + is_true(isinstance(c.computed, sa.Computed)) + else: + is_(c.computed, None) + + if config.requirements.computed_reflects_as_server_default.enabled: + is_true(isinstance(c.server_default, sa.DefaultClause)) + eq_(str(c.server_default.arg.text), "5") + elif config.requirements.computed_reflects_normally.enabled: + is_true(isinstance(c.computed, sa.Computed)) + else: + is_(c.computed, None) + + @testing.combinations( + lambda: (None, sa.Computed("bar*5")), + (lambda: (sa.Computed("bar*5"), None)), + lambda: ( + sa.Computed("bar*5"), + sa.Computed("bar * 42", persisted=True), + ), + lambda: (sa.Computed("bar*5"), sa.Computed("bar * 42")), + ) + @config.requirements.computed_reflects_normally + def test_cant_change_computed_warning(self, test_case): + arg_before, arg_after = testing.resolve_lambda(test_case, **locals()) + m1 = MetaData() + m2 = MetaData() + + arg_before = [] if arg_before is None else [arg_before] + arg_after = [] if arg_after is None else [arg_after] + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("bar", Integer), + Column("foo", Integer, *arg_before), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("bar", Integer), + Column("foo", Integer, *arg_after), + ) + + with mock.patch("alembic.util.warn") as mock_warn: + diffs = self._fixture(m1, m2) + + eq_( + mock_warn.mock_calls, + [mock.call("Computed default on user.foo cannot be modified")], + ) + + eq_(list(diffs), []) + + @testing.combinations( + lambda: (None, None), + lambda: (sa.Computed("5"), sa.Computed("5")), + lambda: (sa.Computed("bar*5"), sa.Computed("bar*5")), + ( + lambda: (sa.Computed("bar*5"), None), + config.requirements.computed_doesnt_reflect_as_server_default, + ), + ) + def test_computed_unchanged(self, test_case): + arg_before, arg_after = testing.resolve_lambda(test_case, **locals()) + m1 = MetaData() + m2 = MetaData() + + arg_before = [] if arg_before is None else [arg_before] + arg_after = [] if arg_after is None else [arg_after] + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("bar", Integer), + Column("foo", Integer, *arg_before), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("bar", Integer), + Column("foo", Integer, *arg_after), + ) + + with mock.patch("alembic.util.warn") as mock_warn: + diffs = self._fixture(m1, m2) + eq_(mock_warn.mock_calls, []) + + eq_(list(diffs), []) + + @config.requirements.computed_reflects_as_server_default + def test_remove_computed_default_on_computed(self): + """Asserts the current behavior which is that on PG and Oracle, + the GENERATED ALWAYS AS is reflected as a server default which we can't + tell is actually "computed", so these come out as a modification to + the server default. + + """ + m1 = MetaData() + m2 = MetaData() + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("bar", Integer), + Column("foo", Integer, sa.Computed("bar + 42")), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("bar", Integer), + Column("foo", Integer), + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs[0][0][0], "modify_default") + eq_(diffs[0][0][2], "user") + eq_(diffs[0][0][3], "foo") + old = diffs[0][0][-2] + new = diffs[0][0][-1] + + is_(new, None) + is_true(isinstance(old, sa.DefaultClause)) + + if exclusions.against(config, "postgresql"): + eq_(str(old.arg.text), "(bar + 42)") + elif exclusions.against(config, "oracle"): + eq_(str(old.arg.text), '"BAR"+42') diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_diffs.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_diffs.py new file mode 100644 index 0000000..75bcd37 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_diffs.py @@ -0,0 +1,273 @@ +from sqlalchemy import BigInteger +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import MetaData +from sqlalchemy import Table +from sqlalchemy.testing import in_ + +from ._autogen_fixtures import AutogenFixtureTest +from ... import testing +from ...testing import config +from ...testing import eq_ +from ...testing import is_ +from ...testing import TestBase + + +class AlterColumnTest(AutogenFixtureTest, TestBase): + __backend__ = True + + @testing.combinations((True,), (False,)) + @config.requirements.comments + def test_all_existings_filled(self, pk): + m1 = MetaData() + m2 = MetaData() + + Table("a", m1, Column("x", Integer, primary_key=pk)) + Table("a", m2, Column("x", Integer, comment="x", primary_key=pk)) + + alter_col = self._assert_alter_col(m1, m2, pk) + eq_(alter_col.modify_comment, "x") + + @testing.combinations((True,), (False,)) + @config.requirements.comments + def test_all_existings_filled_in_notnull(self, pk): + m1 = MetaData() + m2 = MetaData() + + Table("a", m1, Column("x", Integer, nullable=False, primary_key=pk)) + Table( + "a", + m2, + Column("x", Integer, nullable=False, comment="x", primary_key=pk), + ) + + self._assert_alter_col(m1, m2, pk, nullable=False) + + @testing.combinations((True,), (False,)) + @config.requirements.comments + def test_all_existings_filled_in_comment(self, pk): + m1 = MetaData() + m2 = MetaData() + + Table("a", m1, Column("x", Integer, comment="old", primary_key=pk)) + Table("a", m2, Column("x", Integer, comment="new", primary_key=pk)) + + alter_col = self._assert_alter_col(m1, m2, pk) + eq_(alter_col.existing_comment, "old") + + @testing.combinations((True,), (False,)) + @config.requirements.comments + def test_all_existings_filled_in_server_default(self, pk): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", m1, Column("x", Integer, server_default="5", primary_key=pk) + ) + Table( + "a", + m2, + Column( + "x", Integer, server_default="5", comment="new", primary_key=pk + ), + ) + + alter_col = self._assert_alter_col(m1, m2, pk) + in_("5", alter_col.existing_server_default.arg.text) + + def _assert_alter_col(self, m1, m2, pk, nullable=None): + ops = self._fixture(m1, m2, return_ops=True) + modify_table = ops.ops[-1] + alter_col = modify_table.ops[0] + + if nullable is None: + eq_(alter_col.existing_nullable, not pk) + else: + eq_(alter_col.existing_nullable, nullable) + assert alter_col.existing_type._compare_type_affinity(Integer()) + return alter_col + + +class AutoincrementTest(AutogenFixtureTest, TestBase): + __backend__ = True + __requires__ = ("integer_subtype_comparisons",) + + def test_alter_column_autoincrement_none(self): + m1 = MetaData() + m2 = MetaData() + + Table("a", m1, Column("x", Integer, nullable=False)) + Table("a", m2, Column("x", Integer, nullable=True)) + + ops = self._fixture(m1, m2, return_ops=True) + assert "autoincrement" not in ops.ops[0].ops[0].kw + + def test_alter_column_autoincrement_pk_false(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", + m1, + Column("x", Integer, primary_key=True, autoincrement=False), + ) + Table( + "a", + m2, + Column("x", BigInteger, primary_key=True, autoincrement=False), + ) + + ops = self._fixture(m1, m2, return_ops=True) + is_(ops.ops[0].ops[0].kw["autoincrement"], False) + + def test_alter_column_autoincrement_pk_implicit_true(self): + m1 = MetaData() + m2 = MetaData() + + Table("a", m1, Column("x", Integer, primary_key=True)) + Table("a", m2, Column("x", BigInteger, primary_key=True)) + + ops = self._fixture(m1, m2, return_ops=True) + is_(ops.ops[0].ops[0].kw["autoincrement"], True) + + def test_alter_column_autoincrement_pk_explicit_true(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", m1, Column("x", Integer, primary_key=True, autoincrement=True) + ) + Table( + "a", + m2, + Column("x", BigInteger, primary_key=True, autoincrement=True), + ) + + ops = self._fixture(m1, m2, return_ops=True) + is_(ops.ops[0].ops[0].kw["autoincrement"], True) + + def test_alter_column_autoincrement_nonpk_false(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", + m1, + Column("id", Integer, primary_key=True), + Column("x", Integer, autoincrement=False), + ) + Table( + "a", + m2, + Column("id", Integer, primary_key=True), + Column("x", BigInteger, autoincrement=False), + ) + + ops = self._fixture(m1, m2, return_ops=True) + is_(ops.ops[0].ops[0].kw["autoincrement"], False) + + def test_alter_column_autoincrement_nonpk_implicit_false(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", + m1, + Column("id", Integer, primary_key=True), + Column("x", Integer), + ) + Table( + "a", + m2, + Column("id", Integer, primary_key=True), + Column("x", BigInteger), + ) + + ops = self._fixture(m1, m2, return_ops=True) + assert "autoincrement" not in ops.ops[0].ops[0].kw + + def test_alter_column_autoincrement_nonpk_explicit_true(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", + m1, + Column("id", Integer, primary_key=True, autoincrement=False), + Column("x", Integer, autoincrement=True), + ) + Table( + "a", + m2, + Column("id", Integer, primary_key=True, autoincrement=False), + Column("x", BigInteger, autoincrement=True), + ) + + ops = self._fixture(m1, m2, return_ops=True) + is_(ops.ops[0].ops[0].kw["autoincrement"], True) + + def test_alter_column_autoincrement_compositepk_false(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", + m1, + Column("id", Integer, primary_key=True), + Column("x", Integer, primary_key=True, autoincrement=False), + ) + Table( + "a", + m2, + Column("id", Integer, primary_key=True), + Column("x", BigInteger, primary_key=True, autoincrement=False), + ) + + ops = self._fixture(m1, m2, return_ops=True) + is_(ops.ops[0].ops[0].kw["autoincrement"], False) + + def test_alter_column_autoincrement_compositepk_implicit_false(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", + m1, + Column("id", Integer, primary_key=True), + Column("x", Integer, primary_key=True), + ) + Table( + "a", + m2, + Column("id", Integer, primary_key=True), + Column("x", BigInteger, primary_key=True), + ) + + ops = self._fixture(m1, m2, return_ops=True) + assert "autoincrement" not in ops.ops[0].ops[0].kw + + @config.requirements.autoincrement_on_composite_pk + def test_alter_column_autoincrement_compositepk_explicit_true(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "a", + m1, + Column("id", Integer, primary_key=True, autoincrement=False), + Column("x", Integer, primary_key=True, autoincrement=True), + # on SQLA 1.0 and earlier, this being present + # trips the "add KEY for the primary key" so that the + # AUTO_INCREMENT keyword is accepted by MySQL. SQLA 1.1 and + # greater the columns are just reorganized. + mysql_engine="InnoDB", + ) + Table( + "a", + m2, + Column("id", Integer, primary_key=True, autoincrement=False), + Column("x", BigInteger, primary_key=True, autoincrement=True), + ) + + ops = self._fixture(m1, m2, return_ops=True) + is_(ops.ops[0].ops[0].kw["autoincrement"], True) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_fks.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_fks.py new file mode 100644 index 0000000..0240b98 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_fks.py @@ -0,0 +1,1190 @@ +from sqlalchemy import Column +from sqlalchemy import ForeignKeyConstraint +from sqlalchemy import Integer +from sqlalchemy import MetaData +from sqlalchemy import String +from sqlalchemy import Table + +from ._autogen_fixtures import AutogenFixtureTest +from ...testing import combinations +from ...testing import config +from ...testing import eq_ +from ...testing import mock +from ...testing import TestBase + + +class AutogenerateForeignKeysTest(AutogenFixtureTest, TestBase): + __backend__ = True + __requires__ = ("foreign_key_constraint_reflection",) + + def test_remove_fk(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("test2", String(10)), + ForeignKeyConstraint(["test2"], ["some_table.test"]), + ) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("test2", String(10)), + ) + + diffs = self._fixture(m1, m2) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["test2"], + "some_table", + ["test"], + conditional_name="servergenerated", + ) + + def test_add_fk(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("id", Integer, primary_key=True), + Column("test", String(10)), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("test2", String(10)), + ) + + Table( + "some_table", + m2, + Column("id", Integer, primary_key=True), + Column("test", String(10)), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("test2", String(10)), + ForeignKeyConstraint(["test2"], ["some_table.test"]), + ) + + diffs = self._fixture(m1, m2) + + self._assert_fk_diff( + diffs[0], "add_fk", "user", ["test2"], "some_table", ["test"] + ) + + def test_no_change(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("id", Integer, primary_key=True), + Column("test", String(10)), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("test2", Integer), + ForeignKeyConstraint(["test2"], ["some_table.id"]), + ) + + Table( + "some_table", + m2, + Column("id", Integer, primary_key=True), + Column("test", String(10)), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("test2", Integer), + ForeignKeyConstraint(["test2"], ["some_table.id"]), + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs, []) + + def test_no_change_composite_fk(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("id_1", String(10), primary_key=True), + Column("id_2", String(10), primary_key=True), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("other_id_1", String(10)), + Column("other_id_2", String(10)), + ForeignKeyConstraint( + ["other_id_1", "other_id_2"], + ["some_table.id_1", "some_table.id_2"], + ), + ) + + Table( + "some_table", + m2, + Column("id_1", String(10), primary_key=True), + Column("id_2", String(10), primary_key=True), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("other_id_1", String(10)), + Column("other_id_2", String(10)), + ForeignKeyConstraint( + ["other_id_1", "other_id_2"], + ["some_table.id_1", "some_table.id_2"], + ), + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs, []) + + def test_casing_convention_changed_so_put_drops_first(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("test", String(10), primary_key=True), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("test2", String(10)), + ForeignKeyConstraint(["test2"], ["some_table.test"], name="MyFK"), + ) + + Table( + "some_table", + m2, + Column("test", String(10), primary_key=True), + ) + + # foreign key autogen currently does not take "name" into account, + # so change the def just for the purposes of testing the + # add/drop order for now. + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("test2", String(10)), + ForeignKeyConstraint(["a1"], ["some_table.test"], name="myfk"), + ) + + diffs = self._fixture(m1, m2) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["test2"], + "some_table", + ["test"], + name="MyFK" if config.requirements.fk_names.enabled else None, + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["a1"], + "some_table", + ["test"], + name="myfk", + ) + + def test_add_composite_fk_with_name(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("id_1", String(10), primary_key=True), + Column("id_2", String(10), primary_key=True), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("other_id_1", String(10)), + Column("other_id_2", String(10)), + ) + + Table( + "some_table", + m2, + Column("id_1", String(10), primary_key=True), + Column("id_2", String(10), primary_key=True), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("other_id_1", String(10)), + Column("other_id_2", String(10)), + ForeignKeyConstraint( + ["other_id_1", "other_id_2"], + ["some_table.id_1", "some_table.id_2"], + name="fk_test_name", + ), + ) + + diffs = self._fixture(m1, m2) + self._assert_fk_diff( + diffs[0], + "add_fk", + "user", + ["other_id_1", "other_id_2"], + "some_table", + ["id_1", "id_2"], + name="fk_test_name", + ) + + @config.requirements.no_name_normalize + def test_remove_composite_fk(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("id_1", String(10), primary_key=True), + Column("id_2", String(10), primary_key=True), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("other_id_1", String(10)), + Column("other_id_2", String(10)), + ForeignKeyConstraint( + ["other_id_1", "other_id_2"], + ["some_table.id_1", "some_table.id_2"], + name="fk_test_name", + ), + ) + + Table( + "some_table", + m2, + Column("id_1", String(10), primary_key=True), + Column("id_2", String(10), primary_key=True), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("a1", String(10), server_default="x"), + Column("other_id_1", String(10)), + Column("other_id_2", String(10)), + ) + + diffs = self._fixture(m1, m2) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["other_id_1", "other_id_2"], + "some_table", + ["id_1", "id_2"], + conditional_name="fk_test_name", + ) + + def test_add_fk_colkeys(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("id_1", String(10), primary_key=True), + Column("id_2", String(10), primary_key=True), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("other_id_1", String(10)), + Column("other_id_2", String(10)), + ) + + Table( + "some_table", + m2, + Column("id_1", String(10), key="tid1", primary_key=True), + Column("id_2", String(10), key="tid2", primary_key=True), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("other_id_1", String(10), key="oid1"), + Column("other_id_2", String(10), key="oid2"), + ForeignKeyConstraint( + ["oid1", "oid2"], + ["some_table.tid1", "some_table.tid2"], + name="fk_test_name", + ), + ) + + diffs = self._fixture(m1, m2) + + self._assert_fk_diff( + diffs[0], + "add_fk", + "user", + ["other_id_1", "other_id_2"], + "some_table", + ["id_1", "id_2"], + name="fk_test_name", + ) + + def test_no_change_colkeys(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("id_1", String(10), primary_key=True), + Column("id_2", String(10), primary_key=True), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("other_id_1", String(10)), + Column("other_id_2", String(10)), + ForeignKeyConstraint( + ["other_id_1", "other_id_2"], + ["some_table.id_1", "some_table.id_2"], + ), + ) + + Table( + "some_table", + m2, + Column("id_1", String(10), key="tid1", primary_key=True), + Column("id_2", String(10), key="tid2", primary_key=True), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("other_id_1", String(10), key="oid1"), + Column("other_id_2", String(10), key="oid2"), + ForeignKeyConstraint( + ["oid1", "oid2"], ["some_table.tid1", "some_table.tid2"] + ), + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs, []) + + +class IncludeHooksTest(AutogenFixtureTest, TestBase): + __backend__ = True + __requires__ = ("fk_names",) + + @combinations(("object",), ("name",)) + @config.requirements.no_name_normalize + def test_remove_connection_fk(self, hook_type): + m1 = MetaData() + m2 = MetaData() + + ref = Table( + "ref", + m1, + Column("id", Integer, primary_key=True), + ) + t1 = Table( + "t", + m1, + Column("x", Integer), + Column("y", Integer), + ) + t1.append_constraint( + ForeignKeyConstraint([t1.c.x], [ref.c.id], name="fk1") + ) + t1.append_constraint( + ForeignKeyConstraint([t1.c.y], [ref.c.id], name="fk2") + ) + + ref = Table( + "ref", + m2, + Column("id", Integer, primary_key=True), + ) + Table( + "t", + m2, + Column("x", Integer), + Column("y", Integer), + ) + + if hook_type == "object": + + def include_object(object_, name, type_, reflected, compare_to): + return not ( + isinstance(object_, ForeignKeyConstraint) + and type_ == "foreign_key_constraint" + and reflected + and name == "fk1" + ) + + diffs = self._fixture(m1, m2, object_filters=include_object) + elif hook_type == "name": + + def include_name(name, type_, parent_names): + if name == "fk1": + if type_ == "index": # MariaDB thing + return True + eq_(type_, "foreign_key_constraint") + eq_( + parent_names, + { + "schema_name": None, + "table_name": "t", + "schema_qualified_table_name": "t", + }, + ) + return False + else: + return True + + diffs = self._fixture(m1, m2, name_filters=include_name) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "t", + ["y"], + "ref", + ["id"], + conditional_name="fk2", + ) + eq_(len(diffs), 1) + + def test_add_metadata_fk(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "ref", + m1, + Column("id", Integer, primary_key=True), + ) + Table( + "t", + m1, + Column("x", Integer), + Column("y", Integer), + ) + + ref = Table( + "ref", + m2, + Column("id", Integer, primary_key=True), + ) + t2 = Table( + "t", + m2, + Column("x", Integer), + Column("y", Integer), + ) + t2.append_constraint( + ForeignKeyConstraint([t2.c.x], [ref.c.id], name="fk1") + ) + t2.append_constraint( + ForeignKeyConstraint([t2.c.y], [ref.c.id], name="fk2") + ) + + def include_object(object_, name, type_, reflected, compare_to): + return not ( + isinstance(object_, ForeignKeyConstraint) + and type_ == "foreign_key_constraint" + and not reflected + and name == "fk1" + ) + + diffs = self._fixture(m1, m2, object_filters=include_object) + + self._assert_fk_diff( + diffs[0], "add_fk", "t", ["y"], "ref", ["id"], name="fk2" + ) + eq_(len(diffs), 1) + + @combinations(("object",), ("name",)) + @config.requirements.no_name_normalize + def test_change_fk(self, hook_type): + m1 = MetaData() + m2 = MetaData() + + r1a = Table( + "ref_a", + m1, + Column("a", Integer, primary_key=True), + ) + Table( + "ref_b", + m1, + Column("a", Integer, primary_key=True), + Column("b", Integer, primary_key=True), + ) + t1 = Table( + "t", + m1, + Column("x", Integer), + Column("y", Integer), + Column("z", Integer), + ) + t1.append_constraint( + ForeignKeyConstraint([t1.c.x], [r1a.c.a], name="fk1") + ) + t1.append_constraint( + ForeignKeyConstraint([t1.c.y], [r1a.c.a], name="fk2") + ) + + Table( + "ref_a", + m2, + Column("a", Integer, primary_key=True), + ) + r2b = Table( + "ref_b", + m2, + Column("a", Integer, primary_key=True), + Column("b", Integer, primary_key=True), + ) + t2 = Table( + "t", + m2, + Column("x", Integer), + Column("y", Integer), + Column("z", Integer), + ) + t2.append_constraint( + ForeignKeyConstraint( + [t2.c.x, t2.c.z], [r2b.c.a, r2b.c.b], name="fk1" + ) + ) + t2.append_constraint( + ForeignKeyConstraint( + [t2.c.y, t2.c.z], [r2b.c.a, r2b.c.b], name="fk2" + ) + ) + + if hook_type == "object": + + def include_object(object_, name, type_, reflected, compare_to): + return not ( + isinstance(object_, ForeignKeyConstraint) + and type_ == "foreign_key_constraint" + and name == "fk1" + ) + + diffs = self._fixture(m1, m2, object_filters=include_object) + elif hook_type == "name": + + def include_name(name, type_, parent_names): + if type_ == "index": + return True # MariaDB thing + + if name == "fk1": + eq_(type_, "foreign_key_constraint") + eq_( + parent_names, + { + "schema_name": None, + "table_name": "t", + "schema_qualified_table_name": "t", + }, + ) + return False + else: + return True + + diffs = self._fixture(m1, m2, name_filters=include_name) + + if hook_type == "object": + self._assert_fk_diff( + diffs[0], "remove_fk", "t", ["y"], "ref_a", ["a"], name="fk2" + ) + self._assert_fk_diff( + diffs[1], + "add_fk", + "t", + ["y", "z"], + "ref_b", + ["a", "b"], + name="fk2", + ) + eq_(len(diffs), 2) + elif hook_type == "name": + eq_( + {(d[0], d[1].name) for d in diffs}, + {("add_fk", "fk2"), ("add_fk", "fk1"), ("remove_fk", "fk2")}, + ) + + +class AutogenerateFKOptionsTest(AutogenFixtureTest, TestBase): + __backend__ = True + + def _fk_opts_fixture(self, old_opts, new_opts): + m1 = MetaData() + m2 = MetaData() + + Table( + "some_table", + m1, + Column("id", Integer, primary_key=True), + Column("test", String(10)), + ) + + Table( + "user", + m1, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("tid", Integer), + ForeignKeyConstraint(["tid"], ["some_table.id"], **old_opts), + ) + + Table( + "some_table", + m2, + Column("id", Integer, primary_key=True), + Column("test", String(10)), + ) + + Table( + "user", + m2, + Column("id", Integer, primary_key=True), + Column("name", String(50), nullable=False), + Column("tid", Integer), + ForeignKeyConstraint(["tid"], ["some_table.id"], **new_opts), + ) + + return self._fixture(m1, m2) + + @config.requirements.fk_ondelete_is_reflected + def test_add_ondelete(self): + diffs = self._fk_opts_fixture({}, {"ondelete": "cascade"}) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + ondelete=None, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + ondelete="cascade", + ) + + @config.requirements.fk_ondelete_is_reflected + def test_remove_ondelete(self): + diffs = self._fk_opts_fixture({"ondelete": "CASCADE"}, {}) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + ondelete="CASCADE", + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + ondelete=None, + ) + + def test_nochange_ondelete(self): + """test case sensitivity""" + diffs = self._fk_opts_fixture( + {"ondelete": "caSCAde"}, {"ondelete": "CasCade"} + ) + eq_(diffs, []) + + @config.requirements.fk_onupdate_is_reflected + def test_add_onupdate(self): + diffs = self._fk_opts_fixture({}, {"onupdate": "cascade"}) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate=None, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate="cascade", + ) + + @config.requirements.fk_onupdate_is_reflected + def test_remove_onupdate(self): + diffs = self._fk_opts_fixture({"onupdate": "CASCADE"}, {}) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate="CASCADE", + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate=None, + ) + + @config.requirements.fk_onupdate + def test_nochange_onupdate(self): + """test case sensitivity""" + diffs = self._fk_opts_fixture( + {"onupdate": "caSCAde"}, {"onupdate": "CasCade"} + ) + eq_(diffs, []) + + @config.requirements.fk_ondelete_restrict + def test_nochange_ondelete_restrict(self): + """test the RESTRICT option which MySQL doesn't report on""" + + diffs = self._fk_opts_fixture( + {"ondelete": "restrict"}, {"ondelete": "restrict"} + ) + eq_(diffs, []) + + @config.requirements.fk_onupdate_restrict + def test_nochange_onupdate_restrict(self): + """test the RESTRICT option which MySQL doesn't report on""" + + diffs = self._fk_opts_fixture( + {"onupdate": "restrict"}, {"onupdate": "restrict"} + ) + eq_(diffs, []) + + @config.requirements.fk_ondelete_noaction + def test_nochange_ondelete_noaction(self): + """test the NO ACTION option which generally comes back as None""" + + diffs = self._fk_opts_fixture( + {"ondelete": "no action"}, {"ondelete": "no action"} + ) + eq_(diffs, []) + + @config.requirements.fk_onupdate + def test_nochange_onupdate_noaction(self): + """test the NO ACTION option which generally comes back as None""" + + diffs = self._fk_opts_fixture( + {"onupdate": "no action"}, {"onupdate": "no action"} + ) + eq_(diffs, []) + + @config.requirements.fk_ondelete_restrict + def test_change_ondelete_from_restrict(self): + """test the RESTRICT option which MySQL doesn't report on""" + + # note that this is impossible to detect if we change + # from RESTRICT to NO ACTION on MySQL. + diffs = self._fk_opts_fixture( + {"ondelete": "restrict"}, {"ondelete": "cascade"} + ) + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate=None, + ondelete=mock.ANY, # MySQL reports None, PG reports RESTRICT + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate=None, + ondelete="cascade", + ) + + @config.requirements.fk_ondelete_restrict + def test_change_onupdate_from_restrict(self): + """test the RESTRICT option which MySQL doesn't report on""" + + # note that this is impossible to detect if we change + # from RESTRICT to NO ACTION on MySQL. + diffs = self._fk_opts_fixture( + {"onupdate": "restrict"}, {"onupdate": "cascade"} + ) + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate=mock.ANY, # MySQL reports None, PG reports RESTRICT + ondelete=None, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate="cascade", + ondelete=None, + ) + + @config.requirements.fk_ondelete_is_reflected + @config.requirements.fk_onupdate_is_reflected + def test_ondelete_onupdate_combo(self): + diffs = self._fk_opts_fixture( + {"onupdate": "CASCADE", "ondelete": "SET NULL"}, + {"onupdate": "RESTRICT", "ondelete": "RESTRICT"}, + ) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate="CASCADE", + ondelete="SET NULL", + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + onupdate="RESTRICT", + ondelete="RESTRICT", + ) + + @config.requirements.fk_initially + def test_add_initially_deferred(self): + diffs = self._fk_opts_fixture({}, {"initially": "deferred"}) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + initially=None, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + initially="deferred", + ) + + @config.requirements.fk_initially + def test_remove_initially_deferred(self): + diffs = self._fk_opts_fixture({"initially": "deferred"}, {}) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + initially="DEFERRED", + deferrable=True, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + initially=None, + ) + + @config.requirements.fk_deferrable + @config.requirements.fk_initially + def test_add_initially_immediate_plus_deferrable(self): + diffs = self._fk_opts_fixture( + {}, {"initially": "immediate", "deferrable": True} + ) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + initially=None, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + initially="immediate", + deferrable=True, + ) + + @config.requirements.fk_deferrable + @config.requirements.fk_initially + def test_remove_initially_immediate_plus_deferrable(self): + diffs = self._fk_opts_fixture( + {"initially": "immediate", "deferrable": True}, {} + ) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + initially=None, # immediate is the default + deferrable=True, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + initially=None, + deferrable=None, + ) + + @config.requirements.fk_initially + @config.requirements.fk_deferrable + def test_add_initially_deferrable_nochange_one(self): + diffs = self._fk_opts_fixture( + {"deferrable": True, "initially": "immediate"}, + {"deferrable": True, "initially": "immediate"}, + ) + + eq_(diffs, []) + + @config.requirements.fk_initially + @config.requirements.fk_deferrable + def test_add_initially_deferrable_nochange_two(self): + diffs = self._fk_opts_fixture( + {"deferrable": True, "initially": "deferred"}, + {"deferrable": True, "initially": "deferred"}, + ) + + eq_(diffs, []) + + @config.requirements.fk_initially + @config.requirements.fk_deferrable + def test_add_initially_deferrable_nochange_three(self): + diffs = self._fk_opts_fixture( + {"deferrable": None, "initially": "deferred"}, + {"deferrable": None, "initially": "deferred"}, + ) + + eq_(diffs, []) + + @config.requirements.fk_deferrable + def test_add_deferrable(self): + diffs = self._fk_opts_fixture({}, {"deferrable": True}) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + deferrable=None, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + deferrable=True, + ) + + @config.requirements.fk_deferrable_is_reflected + def test_remove_deferrable(self): + diffs = self._fk_opts_fixture({"deferrable": True}, {}) + + self._assert_fk_diff( + diffs[0], + "remove_fk", + "user", + ["tid"], + "some_table", + ["id"], + deferrable=True, + conditional_name="servergenerated", + ) + + self._assert_fk_diff( + diffs[1], + "add_fk", + "user", + ["tid"], + "some_table", + ["id"], + deferrable=None, + ) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_identity.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_identity.py new file mode 100644 index 0000000..3dee9fc --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_identity.py @@ -0,0 +1,226 @@ +import sqlalchemy as sa +from sqlalchemy import Column +from sqlalchemy import Integer +from sqlalchemy import MetaData +from sqlalchemy import Table + +from alembic.util import sqla_compat +from ._autogen_fixtures import AutogenFixtureTest +from ... import testing +from ...testing import config +from ...testing import eq_ +from ...testing import is_true +from ...testing import TestBase + + +class AutogenerateIdentityTest(AutogenFixtureTest, TestBase): + __requires__ = ("identity_columns",) + __backend__ = True + + def test_add_identity_column(self): + m1 = MetaData() + m2 = MetaData() + + Table("user", m1, Column("other", sa.Text)) + + Table( + "user", + m2, + Column("other", sa.Text), + Column( + "id", + Integer, + sa.Identity(start=5, increment=7), + primary_key=True, + ), + ) + + diffs = self._fixture(m1, m2) + + eq_(diffs[0][0], "add_column") + eq_(diffs[0][2], "user") + eq_(diffs[0][3].name, "id") + i = diffs[0][3].identity + + is_true(isinstance(i, sa.Identity)) + eq_(i.start, 5) + eq_(i.increment, 7) + + def test_remove_identity_column(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "user", + m1, + Column( + "id", + Integer, + sa.Identity(start=2, increment=3), + primary_key=True, + ), + ) + + Table("user", m2) + + diffs = self._fixture(m1, m2) + + eq_(diffs[0][0], "remove_column") + eq_(diffs[0][2], "user") + c = diffs[0][3] + eq_(c.name, "id") + + is_true(isinstance(c.identity, sa.Identity)) + eq_(c.identity.start, 2) + eq_(c.identity.increment, 3) + + def test_no_change_identity_column(self): + m1 = MetaData() + m2 = MetaData() + + for m in (m1, m2): + id_ = sa.Identity(start=2) + Table("user", m, Column("id", Integer, id_)) + + diffs = self._fixture(m1, m2) + + eq_(diffs, []) + + def test_dialect_kwargs_changes(self): + m1 = MetaData() + m2 = MetaData() + + if sqla_compat.identity_has_dialect_kwargs: + args = {"oracle_on_null": True, "oracle_order": True} + else: + args = {"on_null": True, "order": True} + + Table("user", m1, Column("id", Integer, sa.Identity(start=2))) + id_ = sa.Identity(start=2, **args) + Table("user", m2, Column("id", Integer, id_)) + + diffs = self._fixture(m1, m2) + if config.db.name == "oracle": + is_true(len(diffs), 1) + eq_(diffs[0][0][0], "modify_default") + else: + eq_(diffs, []) + + @testing.combinations( + (None, dict(start=2)), + (dict(start=2), None), + (dict(start=2), dict(start=2, increment=7)), + (dict(always=False), dict(always=True)), + ( + dict(start=1, minvalue=0, maxvalue=100, cycle=True), + dict(start=1, minvalue=0, maxvalue=100, cycle=False), + ), + ( + dict(start=10, increment=3, maxvalue=9999), + dict(start=10, increment=1, maxvalue=3333), + ), + ) + @config.requirements.identity_columns_alter + def test_change_identity(self, before, after): + arg_before = (sa.Identity(**before),) if before else () + arg_after = (sa.Identity(**after),) if after else () + + m1 = MetaData() + m2 = MetaData() + + Table( + "user", + m1, + Column("id", Integer, *arg_before), + Column("other", sa.Text), + ) + + Table( + "user", + m2, + Column("id", Integer, *arg_after), + Column("other", sa.Text), + ) + + diffs = self._fixture(m1, m2) + + eq_(len(diffs[0]), 1) + diffs = diffs[0][0] + eq_(diffs[0], "modify_default") + eq_(diffs[2], "user") + eq_(diffs[3], "id") + old = diffs[5] + new = diffs[6] + + def check(kw, idt): + if kw: + is_true(isinstance(idt, sa.Identity)) + for k, v in kw.items(): + eq_(getattr(idt, k), v) + else: + is_true(idt in (None, False)) + + check(before, old) + check(after, new) + + def test_add_identity_to_column(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "user", + m1, + Column("id", Integer), + Column("other", sa.Text), + ) + + Table( + "user", + m2, + Column("id", Integer, sa.Identity(start=2, maxvalue=1000)), + Column("other", sa.Text), + ) + + diffs = self._fixture(m1, m2) + + eq_(len(diffs[0]), 1) + diffs = diffs[0][0] + eq_(diffs[0], "modify_default") + eq_(diffs[2], "user") + eq_(diffs[3], "id") + eq_(diffs[5], None) + added = diffs[6] + + is_true(isinstance(added, sa.Identity)) + eq_(added.start, 2) + eq_(added.maxvalue, 1000) + + def test_remove_identity_from_column(self): + m1 = MetaData() + m2 = MetaData() + + Table( + "user", + m1, + Column("id", Integer, sa.Identity(start=2, maxvalue=1000)), + Column("other", sa.Text), + ) + + Table( + "user", + m2, + Column("id", Integer), + Column("other", sa.Text), + ) + + diffs = self._fixture(m1, m2) + + eq_(len(diffs[0]), 1) + diffs = diffs[0][0] + eq_(diffs[0], "modify_default") + eq_(diffs[2], "user") + eq_(diffs[3], "id") + eq_(diffs[6], None) + removed = diffs[5] + + is_true(isinstance(removed, sa.Identity)) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/test_environment.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_environment.py new file mode 100644 index 0000000..8c86859 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_environment.py @@ -0,0 +1,364 @@ +import io + +from ...migration import MigrationContext +from ...testing import assert_raises +from ...testing import config +from ...testing import eq_ +from ...testing import is_ +from ...testing import is_false +from ...testing import is_not_ +from ...testing import is_true +from ...testing import ne_ +from ...testing.fixtures import TestBase + + +class MigrationTransactionTest(TestBase): + __backend__ = True + + conn = None + + def _fixture(self, opts): + self.conn = conn = config.db.connect() + + if opts.get("as_sql", False): + self.context = MigrationContext.configure( + dialect=conn.dialect, opts=opts + ) + self.context.output_buffer = ( + self.context.impl.output_buffer + ) = io.StringIO() + else: + self.context = MigrationContext.configure( + connection=conn, opts=opts + ) + return self.context + + def teardown_method(self): + if self.conn: + self.conn.close() + + def test_proxy_transaction_rollback(self): + context = self._fixture( + {"transaction_per_migration": True, "transactional_ddl": True} + ) + + is_false(self.conn.in_transaction()) + proxy = context.begin_transaction(_per_migration=True) + is_true(self.conn.in_transaction()) + proxy.rollback() + is_false(self.conn.in_transaction()) + + def test_proxy_transaction_commit(self): + context = self._fixture( + {"transaction_per_migration": True, "transactional_ddl": True} + ) + proxy = context.begin_transaction(_per_migration=True) + is_true(self.conn.in_transaction()) + proxy.commit() + is_false(self.conn.in_transaction()) + + def test_proxy_transaction_contextmanager_commit(self): + context = self._fixture( + {"transaction_per_migration": True, "transactional_ddl": True} + ) + proxy = context.begin_transaction(_per_migration=True) + is_true(self.conn.in_transaction()) + with proxy: + pass + is_false(self.conn.in_transaction()) + + def test_proxy_transaction_contextmanager_rollback(self): + context = self._fixture( + {"transaction_per_migration": True, "transactional_ddl": True} + ) + proxy = context.begin_transaction(_per_migration=True) + is_true(self.conn.in_transaction()) + + def go(): + with proxy: + raise Exception("hi") + + assert_raises(Exception, go) + is_false(self.conn.in_transaction()) + + def test_proxy_transaction_contextmanager_explicit_rollback(self): + context = self._fixture( + {"transaction_per_migration": True, "transactional_ddl": True} + ) + proxy = context.begin_transaction(_per_migration=True) + is_true(self.conn.in_transaction()) + + with proxy: + is_true(self.conn.in_transaction()) + proxy.rollback() + is_false(self.conn.in_transaction()) + + is_false(self.conn.in_transaction()) + + def test_proxy_transaction_contextmanager_explicit_commit(self): + context = self._fixture( + {"transaction_per_migration": True, "transactional_ddl": True} + ) + proxy = context.begin_transaction(_per_migration=True) + is_true(self.conn.in_transaction()) + + with proxy: + is_true(self.conn.in_transaction()) + proxy.commit() + is_false(self.conn.in_transaction()) + + is_false(self.conn.in_transaction()) + + def test_transaction_per_migration_transactional_ddl(self): + context = self._fixture( + {"transaction_per_migration": True, "transactional_ddl": True} + ) + + is_false(self.conn.in_transaction()) + + with context.begin_transaction(): + is_false(self.conn.in_transaction()) + with context.begin_transaction(_per_migration=True): + is_true(self.conn.in_transaction()) + + is_false(self.conn.in_transaction()) + is_false(self.conn.in_transaction()) + + def test_transaction_per_migration_non_transactional_ddl(self): + context = self._fixture( + {"transaction_per_migration": True, "transactional_ddl": False} + ) + + is_false(self.conn.in_transaction()) + + with context.begin_transaction(): + is_false(self.conn.in_transaction()) + with context.begin_transaction(_per_migration=True): + is_true(self.conn.in_transaction()) + + is_false(self.conn.in_transaction()) + is_false(self.conn.in_transaction()) + + def test_transaction_per_all_transactional_ddl(self): + context = self._fixture({"transactional_ddl": True}) + + is_false(self.conn.in_transaction()) + + with context.begin_transaction(): + is_true(self.conn.in_transaction()) + with context.begin_transaction(_per_migration=True): + is_true(self.conn.in_transaction()) + + is_true(self.conn.in_transaction()) + is_false(self.conn.in_transaction()) + + def test_transaction_per_all_non_transactional_ddl(self): + context = self._fixture({"transactional_ddl": False}) + + is_false(self.conn.in_transaction()) + + with context.begin_transaction(): + is_false(self.conn.in_transaction()) + with context.begin_transaction(_per_migration=True): + is_true(self.conn.in_transaction()) + + is_false(self.conn.in_transaction()) + is_false(self.conn.in_transaction()) + + def test_transaction_per_all_sqlmode(self): + context = self._fixture({"as_sql": True}) + + context.execute("step 1") + with context.begin_transaction(): + context.execute("step 2") + with context.begin_transaction(_per_migration=True): + context.execute("step 3") + + context.execute("step 4") + context.execute("step 5") + + if context.impl.transactional_ddl: + self._assert_impl_steps( + "step 1", + "BEGIN", + "step 2", + "step 3", + "step 4", + "COMMIT", + "step 5", + ) + else: + self._assert_impl_steps( + "step 1", "step 2", "step 3", "step 4", "step 5" + ) + + def test_transaction_per_migration_sqlmode(self): + context = self._fixture( + {"as_sql": True, "transaction_per_migration": True} + ) + + context.execute("step 1") + with context.begin_transaction(): + context.execute("step 2") + with context.begin_transaction(_per_migration=True): + context.execute("step 3") + + context.execute("step 4") + context.execute("step 5") + + if context.impl.transactional_ddl: + self._assert_impl_steps( + "step 1", + "step 2", + "BEGIN", + "step 3", + "COMMIT", + "step 4", + "step 5", + ) + else: + self._assert_impl_steps( + "step 1", "step 2", "step 3", "step 4", "step 5" + ) + + @config.requirements.autocommit_isolation + def test_autocommit_block(self): + context = self._fixture({"transaction_per_migration": True}) + + is_false(self.conn.in_transaction()) + + with context.begin_transaction(): + is_false(self.conn.in_transaction()) + with context.begin_transaction(_per_migration=True): + is_true(self.conn.in_transaction()) + + with context.autocommit_block(): + # in 1.x, self.conn is separate due to the + # execution_options call. however for future they are the + # same connection and there is a "transaction" block + # despite autocommit + if self.is_sqlalchemy_future: + is_(context.connection, self.conn) + else: + is_not_(context.connection, self.conn) + is_false(self.conn.in_transaction()) + + eq_( + context.connection._execution_options[ + "isolation_level" + ], + "AUTOCOMMIT", + ) + + ne_( + context.connection._execution_options.get( + "isolation_level", None + ), + "AUTOCOMMIT", + ) + is_true(self.conn.in_transaction()) + + is_false(self.conn.in_transaction()) + is_false(self.conn.in_transaction()) + + @config.requirements.autocommit_isolation + def test_autocommit_block_no_transaction(self): + context = self._fixture({"transaction_per_migration": True}) + + is_false(self.conn.in_transaction()) + + with context.autocommit_block(): + is_true(context.connection.in_transaction()) + + # in 1.x, self.conn is separate due to the execution_options + # call. however for future they are the same connection and there + # is a "transaction" block despite autocommit + if self.is_sqlalchemy_future: + is_(context.connection, self.conn) + else: + is_not_(context.connection, self.conn) + is_false(self.conn.in_transaction()) + + eq_( + context.connection._execution_options["isolation_level"], + "AUTOCOMMIT", + ) + + ne_( + context.connection._execution_options.get("isolation_level", None), + "AUTOCOMMIT", + ) + + is_false(self.conn.in_transaction()) + + def test_autocommit_block_transactional_ddl_sqlmode(self): + context = self._fixture( + { + "transaction_per_migration": True, + "transactional_ddl": True, + "as_sql": True, + } + ) + + with context.begin_transaction(): + context.execute("step 1") + with context.begin_transaction(_per_migration=True): + context.execute("step 2") + + with context.autocommit_block(): + context.execute("step 3") + + context.execute("step 4") + + context.execute("step 5") + + self._assert_impl_steps( + "step 1", + "BEGIN", + "step 2", + "COMMIT", + "step 3", + "BEGIN", + "step 4", + "COMMIT", + "step 5", + ) + + def test_autocommit_block_nontransactional_ddl_sqlmode(self): + context = self._fixture( + { + "transaction_per_migration": True, + "transactional_ddl": False, + "as_sql": True, + } + ) + + with context.begin_transaction(): + context.execute("step 1") + with context.begin_transaction(_per_migration=True): + context.execute("step 2") + + with context.autocommit_block(): + context.execute("step 3") + + context.execute("step 4") + + context.execute("step 5") + + self._assert_impl_steps( + "step 1", "step 2", "step 3", "step 4", "step 5" + ) + + def _assert_impl_steps(self, *steps): + to_check = self.context.output_buffer.getvalue() + + self.context.impl.output_buffer = buf = io.StringIO() + for step in steps: + if step == "BEGIN": + self.context.impl.emit_begin() + elif step == "COMMIT": + self.context.impl.emit_commit() + else: + self.context.impl._exec(step) + + eq_(to_check, buf.getvalue()) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/suite/test_op.py b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_op.py new file mode 100644 index 0000000..a63b3f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/suite/test_op.py @@ -0,0 +1,42 @@ +"""Test against the builders in the op.* module.""" + +from sqlalchemy import Column +from sqlalchemy import event +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import Table +from sqlalchemy.sql import text + +from ...testing.fixtures import AlterColRoundTripFixture +from ...testing.fixtures import TestBase + + +@event.listens_for(Table, "after_parent_attach") +def _add_cols(table, metadata): + if table.name == "tbl_with_auto_appended_column": + table.append_column(Column("bat", Integer)) + + +class BackendAlterColumnTest(AlterColRoundTripFixture, TestBase): + __backend__ = True + + def test_rename_column(self): + self._run_alter_col({}, {"name": "newname"}) + + def test_modify_type_int_str(self): + self._run_alter_col({"type": Integer()}, {"type": String(50)}) + + def test_add_server_default_int(self): + self._run_alter_col({"type": Integer}, {"server_default": text("5")}) + + def test_modify_server_default_int(self): + self._run_alter_col( + {"type": Integer, "server_default": text("2")}, + {"server_default": text("5")}, + ) + + def test_modify_nullable_to_non(self): + self._run_alter_col({}, {"nullable": False}) + + def test_modify_non_nullable_to_nullable(self): + self._run_alter_col({"nullable": False}, {"nullable": True}) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/util.py b/venv/lib/python3.12/site-packages/alembic/testing/util.py new file mode 100644 index 0000000..4517a69 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/util.py @@ -0,0 +1,126 @@ +# testing/util.py +# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from __future__ import annotations + +import types +from typing import Union + +from sqlalchemy.util import inspect_getfullargspec + +from ..util import sqla_2 + + +def flag_combinations(*combinations): + """A facade around @testing.combinations() oriented towards boolean + keyword-based arguments. + + Basically generates a nice looking identifier based on the keywords + and also sets up the argument names. + + E.g.:: + + @testing.flag_combinations( + dict(lazy=False, passive=False), + dict(lazy=True, passive=False), + dict(lazy=False, passive=True), + dict(lazy=False, passive=True, raiseload=True), + ) + + + would result in:: + + @testing.combinations( + ('', False, False, False), + ('lazy', True, False, False), + ('lazy_passive', True, True, False), + ('lazy_passive', True, True, True), + id_='iaaa', + argnames='lazy,passive,raiseload' + ) + + """ + from sqlalchemy.testing import config + + keys = set() + + for d in combinations: + keys.update(d) + + keys = sorted(keys) + + return config.combinations( + *[ + ("_".join(k for k in keys if d.get(k, False)),) + + tuple(d.get(k, False) for k in keys) + for d in combinations + ], + id_="i" + ("a" * len(keys)), + argnames=",".join(keys), + ) + + +def resolve_lambda(__fn, **kw): + """Given a no-arg lambda and a namespace, return a new lambda that + has all the values filled in. + + This is used so that we can have module-level fixtures that + refer to instance-level variables using lambdas. + + """ + + pos_args = inspect_getfullargspec(__fn)[0] + pass_pos_args = {arg: kw.pop(arg) for arg in pos_args} + glb = dict(__fn.__globals__) + glb.update(kw) + new_fn = types.FunctionType(__fn.__code__, glb) + return new_fn(**pass_pos_args) + + +def metadata_fixture(ddl="function"): + """Provide MetaData for a pytest fixture.""" + + from sqlalchemy.testing import config + from . import fixture_functions + + def decorate(fn): + def run_ddl(self): + from sqlalchemy import schema + + metadata = self.metadata = schema.MetaData() + try: + result = fn(self, metadata) + metadata.create_all(config.db) + # TODO: + # somehow get a per-function dml erase fixture here + yield result + finally: + metadata.drop_all(config.db) + + return fixture_functions.fixture(scope=ddl)(run_ddl) + + return decorate + + +def _safe_int(value: str) -> Union[int, str]: + try: + return int(value) + except: + return value + + +def testing_engine(url=None, options=None, future=False): + from sqlalchemy.testing import config + from sqlalchemy.testing.engines import testing_engine + + if not future: + future = getattr(config._current.options, "future_engine", False) + + if not sqla_2: + kw = {"future": future} if future else {} + else: + kw = {} + return testing_engine(url, options, **kw) diff --git a/venv/lib/python3.12/site-packages/alembic/testing/warnings.py b/venv/lib/python3.12/site-packages/alembic/testing/warnings.py new file mode 100644 index 0000000..e87136b --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/testing/warnings.py @@ -0,0 +1,40 @@ +# testing/warnings.py +# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors +# +# +# This module is part of SQLAlchemy and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +import warnings + +from sqlalchemy import exc as sa_exc + +from ..util import sqla_14 + + +def setup_filters(): + """Set global warning behavior for the test suite.""" + + warnings.resetwarnings() + + warnings.filterwarnings("error", category=sa_exc.SADeprecationWarning) + warnings.filterwarnings("error", category=sa_exc.SAWarning) + + # some selected deprecations... + warnings.filterwarnings("error", category=DeprecationWarning) + if not sqla_14: + # 1.3 uses pkg_resources in PluginLoader + warnings.filterwarnings( + "ignore", + "pkg_resources is deprecated as an API", + DeprecationWarning, + ) + try: + import pytest + except ImportError: + pass + else: + warnings.filterwarnings( + "once", category=pytest.PytestDeprecationWarning + ) diff --git a/venv/lib/python3.12/site-packages/alembic/util/__init__.py b/venv/lib/python3.12/site-packages/alembic/util/__init__.py new file mode 100644 index 0000000..4724e1f --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/util/__init__.py @@ -0,0 +1,35 @@ +from .editor import open_in_editor as open_in_editor +from .exc import AutogenerateDiffsDetected as AutogenerateDiffsDetected +from .exc import CommandError as CommandError +from .langhelpers import _with_legacy_names as _with_legacy_names +from .langhelpers import asbool as asbool +from .langhelpers import dedupe_tuple as dedupe_tuple +from .langhelpers import Dispatcher as Dispatcher +from .langhelpers import EMPTY_DICT as EMPTY_DICT +from .langhelpers import immutabledict as immutabledict +from .langhelpers import memoized_property as memoized_property +from .langhelpers import ModuleClsProxy as ModuleClsProxy +from .langhelpers import not_none as not_none +from .langhelpers import rev_id as rev_id +from .langhelpers import to_list as to_list +from .langhelpers import to_tuple as to_tuple +from .langhelpers import unique_list as unique_list +from .messaging import err as err +from .messaging import format_as_comma as format_as_comma +from .messaging import msg as msg +from .messaging import obfuscate_url_pw as obfuscate_url_pw +from .messaging import status as status +from .messaging import warn as warn +from .messaging import write_outstream as write_outstream +from .pyfiles import coerce_resource_to_filename as coerce_resource_to_filename +from .pyfiles import load_python_file as load_python_file +from .pyfiles import pyc_file_from_path as pyc_file_from_path +from .pyfiles import template_to_file as template_to_file +from .sqla_compat import has_computed as has_computed +from .sqla_compat import sqla_13 as sqla_13 +from .sqla_compat import sqla_14 as sqla_14 +from .sqla_compat import sqla_2 as sqla_2 + + +if not sqla_13: + raise CommandError("SQLAlchemy 1.3.0 or greater is required.") diff --git a/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2d3ec86 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..0c4a713 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc new file mode 100644 index 0000000..56ccd10 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc new file mode 100644 index 0000000..23482fe Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc new file mode 100644 index 0000000..44324ae Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc new file mode 100644 index 0000000..302e4b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc new file mode 100644 index 0000000..04a99dd Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc new file mode 100644 index 0000000..741a3b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/alembic/util/compat.py b/venv/lib/python3.12/site-packages/alembic/util/compat.py new file mode 100644 index 0000000..e185cc4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/util/compat.py @@ -0,0 +1,89 @@ +# mypy: no-warn-unused-ignores + +from __future__ import annotations + +from configparser import ConfigParser +import io +import os +import sys +import typing +from typing import Any +from typing import List +from typing import Optional +from typing import Sequence +from typing import Union + +if True: + # zimports hack for too-long names + from sqlalchemy.util import ( # noqa: F401 + inspect_getfullargspec as inspect_getfullargspec, + ) + from sqlalchemy.util.compat import ( # noqa: F401 + inspect_formatargspec as inspect_formatargspec, + ) + +is_posix = os.name == "posix" + +py311 = sys.version_info >= (3, 11) +py310 = sys.version_info >= (3, 10) +py39 = sys.version_info >= (3, 9) + + +# produce a wrapper that allows encoded text to stream +# into a given buffer, but doesn't close it. +# not sure of a more idiomatic approach to this. +class EncodedIO(io.TextIOWrapper): + def close(self) -> None: + pass + + +if py39: + from importlib import resources as _resources + + importlib_resources = _resources + from importlib import metadata as _metadata + + importlib_metadata = _metadata + from importlib.metadata import EntryPoint as EntryPoint +else: + import importlib_resources # type:ignore # noqa + import importlib_metadata # type:ignore # noqa + from importlib_metadata import EntryPoint # type:ignore # noqa + + +def importlib_metadata_get(group: str) -> Sequence[EntryPoint]: + ep = importlib_metadata.entry_points() + if hasattr(ep, "select"): + return ep.select(group=group) + else: + return ep.get(group, ()) # type: ignore + + +def formatannotation_fwdref( + annotation: Any, base_module: Optional[Any] = None +) -> str: + """vendored from python 3.7""" + # copied over _formatannotation from sqlalchemy 2.0 + + if isinstance(annotation, str): + return annotation + + if getattr(annotation, "__module__", None) == "typing": + return repr(annotation).replace("typing.", "").replace("~", "") + if isinstance(annotation, type): + if annotation.__module__ in ("builtins", base_module): + return repr(annotation.__qualname__) + return annotation.__module__ + "." + annotation.__qualname__ + elif isinstance(annotation, typing.TypeVar): + return repr(annotation).replace("~", "") + return repr(annotation).replace("~", "") + + +def read_config_parser( + file_config: ConfigParser, + file_argument: Sequence[Union[str, os.PathLike[str]]], +) -> List[str]: + if py310: + return file_config.read(file_argument, encoding="locale") + else: + return file_config.read(file_argument) diff --git a/venv/lib/python3.12/site-packages/alembic/util/editor.py b/venv/lib/python3.12/site-packages/alembic/util/editor.py new file mode 100644 index 0000000..f1d1557 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/util/editor.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +import os +from os.path import exists +from os.path import join +from os.path import splitext +from subprocess import check_call +from typing import Dict +from typing import List +from typing import Mapping +from typing import Optional + +from .compat import is_posix +from .exc import CommandError + + +def open_in_editor( + filename: str, environ: Optional[Dict[str, str]] = None +) -> None: + """ + Opens the given file in a text editor. If the environment variable + ``EDITOR`` is set, this is taken as preference. + + Otherwise, a list of commonly installed editors is tried. + + If no editor matches, an :py:exc:`OSError` is raised. + + :param filename: The filename to open. Will be passed verbatim to the + editor command. + :param environ: An optional drop-in replacement for ``os.environ``. Used + mainly for testing. + """ + env = os.environ if environ is None else environ + try: + editor = _find_editor(env) + check_call([editor, filename]) + except Exception as exc: + raise CommandError("Error executing editor (%s)" % (exc,)) from exc + + +def _find_editor(environ: Mapping[str, str]) -> str: + candidates = _default_editors() + for i, var in enumerate(("EDITOR", "VISUAL")): + if var in environ: + user_choice = environ[var] + if exists(user_choice): + return user_choice + if os.sep not in user_choice: + candidates.insert(i, user_choice) + + for candidate in candidates: + path = _find_executable(candidate, environ) + if path is not None: + return path + raise OSError( + "No suitable editor found. Please set the " + '"EDITOR" or "VISUAL" environment variables' + ) + + +def _find_executable( + candidate: str, environ: Mapping[str, str] +) -> Optional[str]: + # Assuming this is on the PATH, we need to determine it's absolute + # location. Otherwise, ``check_call`` will fail + if not is_posix and splitext(candidate)[1] != ".exe": + candidate += ".exe" + for path in environ.get("PATH", "").split(os.pathsep): + value = join(path, candidate) + if exists(value): + return value + return None + + +def _default_editors() -> List[str]: + # Look for an editor. Prefer the user's choice by env-var, fall back to + # most commonly installed editor (nano/vim) + if is_posix: + return ["sensible-editor", "editor", "nano", "vim", "code"] + else: + return ["code.exe", "notepad++.exe", "notepad.exe"] diff --git a/venv/lib/python3.12/site-packages/alembic/util/exc.py b/venv/lib/python3.12/site-packages/alembic/util/exc.py new file mode 100644 index 0000000..0d0496b --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/util/exc.py @@ -0,0 +1,6 @@ +class CommandError(Exception): + pass + + +class AutogenerateDiffsDetected(CommandError): + pass diff --git a/venv/lib/python3.12/site-packages/alembic/util/langhelpers.py b/venv/lib/python3.12/site-packages/alembic/util/langhelpers.py new file mode 100644 index 0000000..4a5bf09 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/util/langhelpers.py @@ -0,0 +1,335 @@ +from __future__ import annotations + +import collections +from collections.abc import Iterable +import textwrap +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import uuid +import warnings + +from sqlalchemy.util import asbool as asbool # noqa: F401 +from sqlalchemy.util import immutabledict as immutabledict # noqa: F401 +from sqlalchemy.util import to_list as to_list # noqa: F401 +from sqlalchemy.util import unique_list as unique_list + +from .compat import inspect_getfullargspec + +if True: + # zimports workaround :( + from sqlalchemy.util import ( # noqa: F401 + memoized_property as memoized_property, + ) + + +EMPTY_DICT: Mapping[Any, Any] = immutabledict() +_T = TypeVar("_T", bound=Any) + +_C = TypeVar("_C", bound=Callable[..., Any]) + + +class _ModuleClsMeta(type): + def __setattr__(cls, key: str, value: Callable[..., Any]) -> None: + super().__setattr__(key, value) + cls._update_module_proxies(key) # type: ignore + + +class ModuleClsProxy(metaclass=_ModuleClsMeta): + """Create module level proxy functions for the + methods on a given class. + + The functions will have a compatible signature + as the methods. + + """ + + _setups: Dict[ + Type[Any], + Tuple[ + Set[str], + List[Tuple[MutableMapping[str, Any], MutableMapping[str, Any]]], + ], + ] = collections.defaultdict(lambda: (set(), [])) + + @classmethod + def _update_module_proxies(cls, name: str) -> None: + attr_names, modules = cls._setups[cls] + for globals_, locals_ in modules: + cls._add_proxied_attribute(name, globals_, locals_, attr_names) + + def _install_proxy(self) -> None: + attr_names, modules = self._setups[self.__class__] + for globals_, locals_ in modules: + globals_["_proxy"] = self + for attr_name in attr_names: + globals_[attr_name] = getattr(self, attr_name) + + def _remove_proxy(self) -> None: + attr_names, modules = self._setups[self.__class__] + for globals_, locals_ in modules: + globals_["_proxy"] = None + for attr_name in attr_names: + del globals_[attr_name] + + @classmethod + def create_module_class_proxy( + cls, + globals_: MutableMapping[str, Any], + locals_: MutableMapping[str, Any], + ) -> None: + attr_names, modules = cls._setups[cls] + modules.append((globals_, locals_)) + cls._setup_proxy(globals_, locals_, attr_names) + + @classmethod + def _setup_proxy( + cls, + globals_: MutableMapping[str, Any], + locals_: MutableMapping[str, Any], + attr_names: Set[str], + ) -> None: + for methname in dir(cls): + cls._add_proxied_attribute(methname, globals_, locals_, attr_names) + + @classmethod + def _add_proxied_attribute( + cls, + methname: str, + globals_: MutableMapping[str, Any], + locals_: MutableMapping[str, Any], + attr_names: Set[str], + ) -> None: + if not methname.startswith("_"): + meth = getattr(cls, methname) + if callable(meth): + locals_[methname] = cls._create_method_proxy( + methname, globals_, locals_ + ) + else: + attr_names.add(methname) + + @classmethod + def _create_method_proxy( + cls, + name: str, + globals_: MutableMapping[str, Any], + locals_: MutableMapping[str, Any], + ) -> Callable[..., Any]: + fn = getattr(cls, name) + + def _name_error(name: str, from_: Exception) -> NoReturn: + raise NameError( + "Can't invoke function '%s', as the proxy object has " + "not yet been " + "established for the Alembic '%s' class. " + "Try placing this code inside a callable." + % (name, cls.__name__) + ) from from_ + + globals_["_name_error"] = _name_error + + translations = getattr(fn, "_legacy_translations", []) + if translations: + spec = inspect_getfullargspec(fn) + if spec[0] and spec[0][0] == "self": + spec[0].pop(0) + + outer_args = inner_args = "*args, **kw" + translate_str = "args, kw = _translate(%r, %r, %r, args, kw)" % ( + fn.__name__, + tuple(spec), + translations, + ) + + def translate( + fn_name: str, spec: Any, translations: Any, args: Any, kw: Any + ) -> Any: + return_kw = {} + return_args = [] + + for oldname, newname in translations: + if oldname in kw: + warnings.warn( + "Argument %r is now named %r " + "for method %s()." % (oldname, newname, fn_name) + ) + return_kw[newname] = kw.pop(oldname) + return_kw.update(kw) + + args = list(args) + if spec[3]: + pos_only = spec[0][: -len(spec[3])] + else: + pos_only = spec[0] + for arg in pos_only: + if arg not in return_kw: + try: + return_args.append(args.pop(0)) + except IndexError: + raise TypeError( + "missing required positional argument: %s" + % arg + ) + return_args.extend(args) + + return return_args, return_kw + + globals_["_translate"] = translate + else: + outer_args = "*args, **kw" + inner_args = "*args, **kw" + translate_str = "" + + func_text = textwrap.dedent( + """\ + def %(name)s(%(args)s): + %(doc)r + %(translate)s + try: + p = _proxy + except NameError as ne: + _name_error('%(name)s', ne) + return _proxy.%(name)s(%(apply_kw)s) + e + """ + % { + "name": name, + "translate": translate_str, + "args": outer_args, + "apply_kw": inner_args, + "doc": fn.__doc__, + } + ) + lcl: MutableMapping[str, Any] = {} + + exec(func_text, cast("Dict[str, Any]", globals_), lcl) + return cast("Callable[..., Any]", lcl[name]) + + +def _with_legacy_names(translations: Any) -> Any: + def decorate(fn: _C) -> _C: + fn._legacy_translations = translations # type: ignore[attr-defined] + return fn + + return decorate + + +def rev_id() -> str: + return uuid.uuid4().hex[-12:] + + +@overload +def to_tuple(x: Any, default: Tuple[Any, ...]) -> Tuple[Any, ...]: + ... + + +@overload +def to_tuple(x: None, default: Optional[_T] = ...) -> _T: + ... + + +@overload +def to_tuple( + x: Any, default: Optional[Tuple[Any, ...]] = None +) -> Tuple[Any, ...]: + ... + + +def to_tuple( + x: Any, default: Optional[Tuple[Any, ...]] = None +) -> Optional[Tuple[Any, ...]]: + if x is None: + return default + elif isinstance(x, str): + return (x,) + elif isinstance(x, Iterable): + return tuple(x) + else: + return (x,) + + +def dedupe_tuple(tup: Tuple[str, ...]) -> Tuple[str, ...]: + return tuple(unique_list(tup)) + + +class Dispatcher: + def __init__(self, uselist: bool = False) -> None: + self._registry: Dict[Tuple[Any, ...], Any] = {} + self.uselist = uselist + + def dispatch_for( + self, target: Any, qualifier: str = "default" + ) -> Callable[[_C], _C]: + def decorate(fn: _C) -> _C: + if self.uselist: + self._registry.setdefault((target, qualifier), []).append(fn) + else: + assert (target, qualifier) not in self._registry + self._registry[(target, qualifier)] = fn + return fn + + return decorate + + def dispatch(self, obj: Any, qualifier: str = "default") -> Any: + if isinstance(obj, str): + targets: Sequence[Any] = [obj] + elif isinstance(obj, type): + targets = obj.__mro__ + else: + targets = type(obj).__mro__ + + for spcls in targets: + if qualifier != "default" and (spcls, qualifier) in self._registry: + return self._fn_or_list(self._registry[(spcls, qualifier)]) + elif (spcls, "default") in self._registry: + return self._fn_or_list(self._registry[(spcls, "default")]) + else: + raise ValueError("no dispatch function for object: %s" % obj) + + def _fn_or_list( + self, fn_or_list: Union[List[Callable[..., Any]], Callable[..., Any]] + ) -> Callable[..., Any]: + if self.uselist: + + def go(*arg: Any, **kw: Any) -> None: + if TYPE_CHECKING: + assert isinstance(fn_or_list, Sequence) + for fn in fn_or_list: + fn(*arg, **kw) + + return go + else: + return fn_or_list # type: ignore + + def branch(self) -> Dispatcher: + """Return a copy of this dispatcher that is independently + writable.""" + + d = Dispatcher() + if self.uselist: + d._registry.update( + (k, [fn for fn in self._registry[k]]) for k in self._registry + ) + else: + d._registry.update(self._registry) + return d + + +def not_none(value: Optional[_T]) -> _T: + assert value is not None + return value diff --git a/venv/lib/python3.12/site-packages/alembic/util/messaging.py b/venv/lib/python3.12/site-packages/alembic/util/messaging.py new file mode 100644 index 0000000..5f14d59 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/util/messaging.py @@ -0,0 +1,115 @@ +from __future__ import annotations + +from collections.abc import Iterable +from contextlib import contextmanager +import logging +import sys +import textwrap +from typing import Iterator +from typing import Optional +from typing import TextIO +from typing import Union +import warnings + +from sqlalchemy.engine import url + +from . import sqla_compat + +log = logging.getLogger(__name__) + +# disable "no handler found" errors +logging.getLogger("alembic").addHandler(logging.NullHandler()) + + +try: + import fcntl + import termios + import struct + + ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)) + _h, TERMWIDTH, _hp, _wp = struct.unpack("HHHH", ioctl) + if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty + TERMWIDTH = None +except (ImportError, OSError): + TERMWIDTH = None + + +def write_outstream( + stream: TextIO, *text: Union[str, bytes], quiet: bool = False +) -> None: + if quiet: + return + encoding = getattr(stream, "encoding", "ascii") or "ascii" + for t in text: + if not isinstance(t, bytes): + t = t.encode(encoding, "replace") + t = t.decode(encoding) + try: + stream.write(t) + except OSError: + # suppress "broken pipe" errors. + # no known way to handle this on Python 3 however + # as the exception is "ignored" (noisily) in TextIOWrapper. + break + + +@contextmanager +def status( + status_msg: str, newline: bool = False, quiet: bool = False +) -> Iterator[None]: + msg(status_msg + " ...", newline, flush=True, quiet=quiet) + try: + yield + except: + if not quiet: + write_outstream(sys.stdout, " FAILED\n") + raise + else: + if not quiet: + write_outstream(sys.stdout, " done\n") + + +def err(message: str, quiet: bool = False) -> None: + log.error(message) + msg(f"FAILED: {message}", quiet=quiet) + sys.exit(-1) + + +def obfuscate_url_pw(input_url: str) -> str: + u = url.make_url(input_url) + return sqla_compat.url_render_as_string(u, hide_password=True) # type: ignore # noqa: E501 + + +def warn(msg: str, stacklevel: int = 2) -> None: + warnings.warn(msg, UserWarning, stacklevel=stacklevel) + + +def msg( + msg: str, newline: bool = True, flush: bool = False, quiet: bool = False +) -> None: + if quiet: + return + if TERMWIDTH is None: + write_outstream(sys.stdout, msg) + if newline: + write_outstream(sys.stdout, "\n") + else: + # left indent output lines + lines = textwrap.wrap(msg, TERMWIDTH) + if len(lines) > 1: + for line in lines[0:-1]: + write_outstream(sys.stdout, " ", line, "\n") + write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else "")) + if flush: + sys.stdout.flush() + + +def format_as_comma(value: Optional[Union[str, Iterable[str]]]) -> str: + if value is None: + return "" + elif isinstance(value, str): + return value + elif isinstance(value, Iterable): + return ", ".join(value) + else: + raise ValueError("Don't know how to comma-format %r" % value) diff --git a/venv/lib/python3.12/site-packages/alembic/util/pyfiles.py b/venv/lib/python3.12/site-packages/alembic/util/pyfiles.py new file mode 100644 index 0000000..973bd45 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/util/pyfiles.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +import atexit +from contextlib import ExitStack +import importlib +import importlib.machinery +import importlib.util +import os +import re +import tempfile +from types import ModuleType +from typing import Any +from typing import Optional + +from mako import exceptions +from mako.template import Template + +from . import compat +from .exc import CommandError + + +def template_to_file( + template_file: str, dest: str, output_encoding: str, **kw: Any +) -> None: + template = Template(filename=template_file) + try: + output = template.render_unicode(**kw).encode(output_encoding) + except: + with tempfile.NamedTemporaryFile(suffix=".txt", delete=False) as ntf: + ntf.write( + exceptions.text_error_template() + .render_unicode() + .encode(output_encoding) + ) + fname = ntf.name + raise CommandError( + "Template rendering failed; see %s for a " + "template-oriented traceback." % fname + ) + else: + with open(dest, "wb") as f: + f.write(output) + + +def coerce_resource_to_filename(fname: str) -> str: + """Interpret a filename as either a filesystem location or as a package + resource. + + Names that are non absolute paths and contain a colon + are interpreted as resources and coerced to a file location. + + """ + if not os.path.isabs(fname) and ":" in fname: + tokens = fname.split(":") + + # from https://importlib-resources.readthedocs.io/en/latest/migration.html#pkg-resources-resource-filename # noqa E501 + + file_manager = ExitStack() + atexit.register(file_manager.close) + + ref = compat.importlib_resources.files(tokens[0]) + for tok in tokens[1:]: + ref = ref / tok + fname = file_manager.enter_context( # type: ignore[assignment] + compat.importlib_resources.as_file(ref) + ) + return fname + + +def pyc_file_from_path(path: str) -> Optional[str]: + """Given a python source path, locate the .pyc.""" + + candidate = importlib.util.cache_from_source(path) + if os.path.exists(candidate): + return candidate + + # even for pep3147, fall back to the old way of finding .pyc files, + # to support sourceless operation + filepath, ext = os.path.splitext(path) + for ext in importlib.machinery.BYTECODE_SUFFIXES: + if os.path.exists(filepath + ext): + return filepath + ext + else: + return None + + +def load_python_file(dir_: str, filename: str) -> ModuleType: + """Load a file from the given path as a Python module.""" + + module_id = re.sub(r"\W", "_", filename) + path = os.path.join(dir_, filename) + _, ext = os.path.splitext(filename) + if ext == ".py": + if os.path.exists(path): + module = load_module_py(module_id, path) + else: + pyc_path = pyc_file_from_path(path) + if pyc_path is None: + raise ImportError("Can't find Python file %s" % path) + else: + module = load_module_py(module_id, pyc_path) + elif ext in (".pyc", ".pyo"): + module = load_module_py(module_id, path) + else: + assert False + return module + + +def load_module_py(module_id: str, path: str) -> ModuleType: + spec = importlib.util.spec_from_file_location(module_id, path) + assert spec + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) # type: ignore + return module diff --git a/venv/lib/python3.12/site-packages/alembic/util/sqla_compat.py b/venv/lib/python3.12/site-packages/alembic/util/sqla_compat.py new file mode 100644 index 0000000..8489c19 --- /dev/null +++ b/venv/lib/python3.12/site-packages/alembic/util/sqla_compat.py @@ -0,0 +1,665 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics + +from __future__ import annotations + +import contextlib +import re +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import Mapping +from typing import Optional +from typing import Protocol +from typing import Set +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy import __version__ +from sqlalchemy import inspect +from sqlalchemy import schema +from sqlalchemy import sql +from sqlalchemy import types as sqltypes +from sqlalchemy.engine import url +from sqlalchemy.schema import CheckConstraint +from sqlalchemy.schema import Column +from sqlalchemy.schema import ForeignKeyConstraint +from sqlalchemy.sql import visitors +from sqlalchemy.sql.base import DialectKWArgs +from sqlalchemy.sql.elements import BindParameter +from sqlalchemy.sql.elements import ColumnClause +from sqlalchemy.sql.elements import quoted_name +from sqlalchemy.sql.elements import TextClause +from sqlalchemy.sql.elements import UnaryExpression +from sqlalchemy.sql.visitors import traverse +from typing_extensions import TypeGuard + +if TYPE_CHECKING: + from sqlalchemy import ClauseElement + from sqlalchemy import Index + from sqlalchemy import Table + from sqlalchemy.engine import Connection + from sqlalchemy.engine import Dialect + from sqlalchemy.engine import Transaction + from sqlalchemy.engine.reflection import Inspector + from sqlalchemy.sql.base import ColumnCollection + from sqlalchemy.sql.compiler import SQLCompiler + from sqlalchemy.sql.dml import Insert + from sqlalchemy.sql.elements import ColumnElement + from sqlalchemy.sql.schema import Constraint + from sqlalchemy.sql.schema import SchemaItem + from sqlalchemy.sql.selectable import Select + from sqlalchemy.sql.selectable import TableClause + +_CE = TypeVar("_CE", bound=Union["ColumnElement[Any]", "SchemaItem"]) + + +class _CompilerProtocol(Protocol): + def __call__(self, element: Any, compiler: Any, **kw: Any) -> str: + ... + + +def _safe_int(value: str) -> Union[int, str]: + try: + return int(value) + except: + return value + + +_vers = tuple( + [_safe_int(x) for x in re.findall(r"(\d+|[abc]\d)", __version__)] +) +sqla_13 = _vers >= (1, 3) +sqla_14 = _vers >= (1, 4) +# https://docs.sqlalchemy.org/en/latest/changelog/changelog_14.html#change-0c6e0cc67dfe6fac5164720e57ef307d +sqla_14_18 = _vers >= (1, 4, 18) +sqla_14_26 = _vers >= (1, 4, 26) +sqla_2 = _vers >= (2,) +sqlalchemy_version = __version__ + +try: + from sqlalchemy.sql.naming import _NONE_NAME as _NONE_NAME # type: ignore[attr-defined] # noqa: E501 +except ImportError: + from sqlalchemy.sql.elements import _NONE_NAME as _NONE_NAME # type: ignore # noqa: E501 + + +class _Unsupported: + "Placeholder for unsupported SQLAlchemy classes" + + +if TYPE_CHECKING: + + def compiles( + element: Type[ClauseElement], *dialects: str + ) -> Callable[[_CompilerProtocol], _CompilerProtocol]: + ... + +else: + from sqlalchemy.ext.compiler import compiles + +try: + from sqlalchemy import Computed as Computed +except ImportError: + if not TYPE_CHECKING: + + class Computed(_Unsupported): + pass + + has_computed = False + has_computed_reflection = False +else: + has_computed = True + has_computed_reflection = _vers >= (1, 3, 16) + +try: + from sqlalchemy import Identity as Identity +except ImportError: + if not TYPE_CHECKING: + + class Identity(_Unsupported): + pass + + has_identity = False +else: + identity_has_dialect_kwargs = issubclass(Identity, DialectKWArgs) + + def _get_identity_options_dict( + identity: Union[Identity, schema.Sequence, None], + dialect_kwargs: bool = False, + ) -> Dict[str, Any]: + if identity is None: + return {} + elif identity_has_dialect_kwargs: + as_dict = identity._as_dict() # type: ignore + if dialect_kwargs: + assert isinstance(identity, DialectKWArgs) + as_dict.update(identity.dialect_kwargs) + else: + as_dict = {} + if isinstance(identity, Identity): + # always=None means something different than always=False + as_dict["always"] = identity.always + if identity.on_null is not None: + as_dict["on_null"] = identity.on_null + # attributes common to Identity and Sequence + attrs = ( + "start", + "increment", + "minvalue", + "maxvalue", + "nominvalue", + "nomaxvalue", + "cycle", + "cache", + "order", + ) + as_dict.update( + { + key: getattr(identity, key, None) + for key in attrs + if getattr(identity, key, None) is not None + } + ) + return as_dict + + has_identity = True + +if sqla_2: + from sqlalchemy.sql.base import _NoneName +else: + from sqlalchemy.util import symbol as _NoneName # type: ignore[assignment] + + +_ConstraintName = Union[None, str, _NoneName] + +_ConstraintNameDefined = Union[str, _NoneName] + + +def constraint_name_defined( + name: _ConstraintName, +) -> TypeGuard[_ConstraintNameDefined]: + return name is _NONE_NAME or isinstance(name, (str, _NoneName)) + + +def constraint_name_string( + name: _ConstraintName, +) -> TypeGuard[str]: + return isinstance(name, str) + + +def constraint_name_or_none( + name: _ConstraintName, +) -> Optional[str]: + return name if constraint_name_string(name) else None + + +AUTOINCREMENT_DEFAULT = "auto" + + +@contextlib.contextmanager +def _ensure_scope_for_ddl( + connection: Optional[Connection], +) -> Iterator[None]: + try: + in_transaction = connection.in_transaction # type: ignore[union-attr] + except AttributeError: + # catch for MockConnection, None + in_transaction = None + pass + + # yield outside the catch + if in_transaction is None: + yield + else: + if not in_transaction(): + assert connection is not None + with connection.begin(): + yield + else: + yield + + +def url_render_as_string(url, hide_password=True): + if sqla_14: + return url.render_as_string(hide_password=hide_password) + else: + return url.__to_string__(hide_password=hide_password) + + +def _safe_begin_connection_transaction( + connection: Connection, +) -> Transaction: + transaction = _get_connection_transaction(connection) + if transaction: + return transaction + else: + return connection.begin() + + +def _safe_commit_connection_transaction( + connection: Connection, +) -> None: + transaction = _get_connection_transaction(connection) + if transaction: + transaction.commit() + + +def _safe_rollback_connection_transaction( + connection: Connection, +) -> None: + transaction = _get_connection_transaction(connection) + if transaction: + transaction.rollback() + + +def _get_connection_in_transaction(connection: Optional[Connection]) -> bool: + try: + in_transaction = connection.in_transaction # type: ignore + except AttributeError: + # catch for MockConnection + return False + else: + return in_transaction() + + +def _idx_table_bound_expressions(idx: Index) -> Iterable[ColumnElement[Any]]: + return idx.expressions # type: ignore + + +def _copy(schema_item: _CE, **kw) -> _CE: + if hasattr(schema_item, "_copy"): + return schema_item._copy(**kw) + else: + return schema_item.copy(**kw) # type: ignore[union-attr] + + +def _get_connection_transaction( + connection: Connection, +) -> Optional[Transaction]: + if sqla_14: + return connection.get_transaction() + else: + r = connection._root # type: ignore[attr-defined] + return r._Connection__transaction + + +def _create_url(*arg, **kw) -> url.URL: + if hasattr(url.URL, "create"): + return url.URL.create(*arg, **kw) + else: + return url.URL(*arg, **kw) + + +def _connectable_has_table( + connectable: Connection, tablename: str, schemaname: Union[str, None] +) -> bool: + if sqla_14: + return inspect(connectable).has_table(tablename, schemaname) + else: + return connectable.dialect.has_table( + connectable, tablename, schemaname + ) + + +def _exec_on_inspector(inspector, statement, **params): + if sqla_14: + with inspector._operation_context() as conn: + return conn.execute(statement, params) + else: + return inspector.bind.execute(statement, params) + + +def _nullability_might_be_unset(metadata_column): + if not sqla_14: + return metadata_column.nullable + else: + from sqlalchemy.sql import schema + + return ( + metadata_column._user_defined_nullable is schema.NULL_UNSPECIFIED + ) + + +def _server_default_is_computed(*server_default) -> bool: + if not has_computed: + return False + else: + return any(isinstance(sd, Computed) for sd in server_default) + + +def _server_default_is_identity(*server_default) -> bool: + if not sqla_14: + return False + else: + return any(isinstance(sd, Identity) for sd in server_default) + + +def _table_for_constraint(constraint: Constraint) -> Table: + if isinstance(constraint, ForeignKeyConstraint): + table = constraint.parent + assert table is not None + return table # type: ignore[return-value] + else: + return constraint.table + + +def _columns_for_constraint(constraint): + if isinstance(constraint, ForeignKeyConstraint): + return [fk.parent for fk in constraint.elements] + elif isinstance(constraint, CheckConstraint): + return _find_columns(constraint.sqltext) + else: + return list(constraint.columns) + + +def _reflect_table(inspector: Inspector, table: Table) -> None: + if sqla_14: + return inspector.reflect_table(table, None) + else: + return inspector.reflecttable( # type: ignore[attr-defined] + table, None + ) + + +def _resolve_for_variant(type_, dialect): + if _type_has_variants(type_): + base_type, mapping = _get_variant_mapping(type_) + return mapping.get(dialect.name, base_type) + else: + return type_ + + +if hasattr(sqltypes.TypeEngine, "_variant_mapping"): + + def _type_has_variants(type_): + return bool(type_._variant_mapping) + + def _get_variant_mapping(type_): + return type_, type_._variant_mapping + +else: + + def _type_has_variants(type_): + return type(type_) is sqltypes.Variant + + def _get_variant_mapping(type_): + return type_.impl, type_.mapping + + +def _fk_spec(constraint: ForeignKeyConstraint) -> Any: + if TYPE_CHECKING: + assert constraint.columns is not None + assert constraint.elements is not None + assert isinstance(constraint.parent, Table) + + source_columns = [ + constraint.columns[key].name for key in constraint.column_keys + ] + + source_table = constraint.parent.name + source_schema = constraint.parent.schema + target_schema = constraint.elements[0].column.table.schema + target_table = constraint.elements[0].column.table.name + target_columns = [element.column.name for element in constraint.elements] + ondelete = constraint.ondelete + onupdate = constraint.onupdate + deferrable = constraint.deferrable + initially = constraint.initially + return ( + source_schema, + source_table, + source_columns, + target_schema, + target_table, + target_columns, + onupdate, + ondelete, + deferrable, + initially, + ) + + +def _fk_is_self_referential(constraint: ForeignKeyConstraint) -> bool: + spec = constraint.elements[0]._get_colspec() + tokens = spec.split(".") + tokens.pop(-1) # colname + tablekey = ".".join(tokens) + assert constraint.parent is not None + return tablekey == constraint.parent.key + + +def _is_type_bound(constraint: Constraint) -> bool: + # this deals with SQLAlchemy #3260, don't copy CHECK constraints + # that will be generated by the type. + # new feature added for #3260 + return constraint._type_bound + + +def _find_columns(clause): + """locate Column objects within the given expression.""" + + cols: Set[ColumnElement[Any]] = set() + traverse(clause, {}, {"column": cols.add}) + return cols + + +def _remove_column_from_collection( + collection: ColumnCollection, column: Union[Column[Any], ColumnClause[Any]] +) -> None: + """remove a column from a ColumnCollection.""" + + # workaround for older SQLAlchemy, remove the + # same object that's present + assert column.key is not None + to_remove = collection[column.key] + + # SQLAlchemy 2.0 will use more ReadOnlyColumnCollection + # (renamed from ImmutableColumnCollection) + if hasattr(collection, "_immutable") or hasattr(collection, "_readonly"): + collection._parent.remove(to_remove) + else: + collection.remove(to_remove) + + +def _textual_index_column( + table: Table, text_: Union[str, TextClause, ColumnElement[Any]] +) -> Union[ColumnElement[Any], Column[Any]]: + """a workaround for the Index construct's severe lack of flexibility""" + if isinstance(text_, str): + c = Column(text_, sqltypes.NULLTYPE) + table.append_column(c) + return c + elif isinstance(text_, TextClause): + return _textual_index_element(table, text_) + elif isinstance(text_, _textual_index_element): + return _textual_index_column(table, text_.text) + elif isinstance(text_, sql.ColumnElement): + return _copy_expression(text_, table) + else: + raise ValueError("String or text() construct expected") + + +def _copy_expression(expression: _CE, target_table: Table) -> _CE: + def replace(col): + if ( + isinstance(col, Column) + and col.table is not None + and col.table is not target_table + ): + if col.name in target_table.c: + return target_table.c[col.name] + else: + c = _copy(col) + target_table.append_column(c) + return c + else: + return None + + return visitors.replacement_traverse( # type: ignore[call-overload] + expression, {}, replace + ) + + +class _textual_index_element(sql.ColumnElement): + """Wrap around a sqlalchemy text() construct in such a way that + we appear like a column-oriented SQL expression to an Index + construct. + + The issue here is that currently the Postgresql dialect, the biggest + recipient of functional indexes, keys all the index expressions to + the corresponding column expressions when rendering CREATE INDEX, + so the Index we create here needs to have a .columns collection that + is the same length as the .expressions collection. Ultimately + SQLAlchemy should support text() expressions in indexes. + + See SQLAlchemy issue 3174. + + """ + + __visit_name__ = "_textual_idx_element" + + def __init__(self, table: Table, text: TextClause) -> None: + self.table = table + self.text = text + self.key = text.text + self.fake_column = schema.Column(self.text.text, sqltypes.NULLTYPE) + table.append_column(self.fake_column) + + def get_children(self): + return [self.fake_column] + + +@compiles(_textual_index_element) +def _render_textual_index_column( + element: _textual_index_element, compiler: SQLCompiler, **kw +) -> str: + return compiler.process(element.text, **kw) + + +class _literal_bindparam(BindParameter): + pass + + +@compiles(_literal_bindparam) +def _render_literal_bindparam( + element: _literal_bindparam, compiler: SQLCompiler, **kw +) -> str: + return compiler.render_literal_bindparam(element, **kw) + + +def _column_kwargs(col: Column) -> Mapping: + if sqla_13: + return col.kwargs + else: + return {} + + +def _get_constraint_final_name( + constraint: Union[Index, Constraint], dialect: Optional[Dialect] +) -> Optional[str]: + if constraint.name is None: + return None + assert dialect is not None + if sqla_14: + # for SQLAlchemy 1.4 we would like to have the option to expand + # the use of "deferred" names for constraints as well as to have + # some flexibility with "None" name and similar; make use of new + # SQLAlchemy API to return what would be the final compiled form of + # the name for this dialect. + return dialect.identifier_preparer.format_constraint( + constraint, _alembic_quote=False + ) + else: + # prior to SQLAlchemy 1.4, work around quoting logic to get at the + # final compiled name without quotes. + if hasattr(constraint.name, "quote"): + # might be quoted_name, might be truncated_name, keep it the + # same + quoted_name_cls: type = type(constraint.name) + else: + quoted_name_cls = quoted_name + + new_name = quoted_name_cls(str(constraint.name), quote=False) + constraint = constraint.__class__(name=new_name) + + if isinstance(constraint, schema.Index): + # name should not be quoted. + d = dialect.ddl_compiler(dialect, None) # type: ignore[arg-type] + return d._prepared_index_name(constraint) + else: + # name should not be quoted. + return dialect.identifier_preparer.format_constraint(constraint) + + +def _constraint_is_named( + constraint: Union[Constraint, Index], dialect: Optional[Dialect] +) -> bool: + if sqla_14: + if constraint.name is None: + return False + assert dialect is not None + name = dialect.identifier_preparer.format_constraint( + constraint, _alembic_quote=False + ) + return name is not None + else: + return constraint.name is not None + + +def _is_mariadb(mysql_dialect: Dialect) -> bool: + if sqla_14: + return mysql_dialect.is_mariadb # type: ignore[attr-defined] + else: + return bool( + mysql_dialect.server_version_info + and mysql_dialect._is_mariadb # type: ignore[attr-defined] + ) + + +def _mariadb_normalized_version_info(mysql_dialect): + return mysql_dialect._mariadb_normalized_version_info + + +def _insert_inline(table: Union[TableClause, Table]) -> Insert: + if sqla_14: + return table.insert().inline() + else: + return table.insert(inline=True) # type: ignore[call-arg] + + +if sqla_14: + from sqlalchemy import create_mock_engine + + # weird mypy workaround + from sqlalchemy import select as _sa_select + + _select = _sa_select +else: + from sqlalchemy import create_engine + + def create_mock_engine(url, executor, **kw): # type: ignore[misc] + return create_engine( + "postgresql://", strategy="mock", executor=executor + ) + + def _select(*columns, **kw) -> Select: + return sql.select(list(columns), **kw) # type: ignore[call-overload] + + +def is_expression_index(index: Index) -> bool: + for expr in index.expressions: + if is_expression(expr): + return True + return False + + +def is_expression(expr: Any) -> bool: + while isinstance(expr, UnaryExpression): + expr = expr.element + if not isinstance(expr, ColumnClause) or expr.is_literal: + return True + return False diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA new file mode 100644 index 0000000..3ac05cf --- /dev/null +++ b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA @@ -0,0 +1,295 @@ +Metadata-Version: 2.3 +Name: annotated-types +Version: 0.7.0 +Summary: Reusable constraint types to use with typing.Annotated +Project-URL: Homepage, https://github.com/annotated-types/annotated-types +Project-URL: Source, https://github.com/annotated-types/annotated-types +Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases +Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin , Zac Hatfield-Dodds +License-File: LICENSE +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Console +Classifier: Environment :: MacOS X +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9' +Description-Content-Type: text/markdown + +# annotated-types + +[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) +[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types) +[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types) +[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE) + +[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of +adding context-specific metadata to existing types, and specifies that +`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special +logic for `x`. + +This package provides metadata objects which can be used to represent common +constraints such as upper and lower bounds on scalar values and collection sizes, +a `Predicate` marker for runtime checks, and +descriptions of how we intend these metadata to be interpreted. In some cases, +we also note alternative representations which do not require this package. + +## Install + +```bash +pip install annotated-types +``` + +## Examples + +```python +from typing import Annotated +from annotated_types import Gt, Len, Predicate + +class MyClass: + age: Annotated[int, Gt(18)] # Valid: 19, 20, ... + # Invalid: 17, 18, "19", 19.0, ... + factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ... + # Invalid: 4, 8, -2, 5.0, "prime", ... + + my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50] + # Invalid: (1, 2), ["abc"], [0] * 20 +``` + +## Documentation + +_While `annotated-types` avoids runtime checks for performance, users should not +construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`. +Downstream implementors may choose to raise an error, emit a warning, silently ignore +a metadata item, etc., if the metadata objects described below are used with an +incompatible type - or for any other reason!_ + +### Gt, Ge, Lt, Le + +Express inclusive and/or exclusive bounds on orderable values - which may be numbers, +dates, times, strings, sets, etc. Note that the boundary value need not be of the +same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]` +is fine, for example, and implies that the value is an integer x such that `x > 1.5`. + +We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)` +as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on +the `annotated-types` package. + +To be explicit, these types have the following meanings: + +* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum +* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum +* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum +* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum + +### Interval + +`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single +metadata object. `None` attributes should be ignored, and non-`None` attributes +treated as per the single bounds above. + +### MultipleOf + +`MultipleOf(multiple_of=x)` might be interpreted in two ways: + +1. Python semantics, implying `value % multiple_of == 0`, or +2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1), + where `int(value / multiple_of) == value / multiple_of`. + +We encourage users to be aware of these two common interpretations and their +distinct behaviours, especially since very large or non-integer numbers make +it easy to cause silent data corruption due to floating-point imprecision. + +We encourage libraries to carefully document which interpretation they implement. + +### MinLen, MaxLen, Len + +`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive. + +As well as `Len()` which can optionally include upper and lower bounds, we also +provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)` +and `Len(max_length=y)` respectively. + +`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`. + +Examples of usage: + +* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less +* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less +* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more +* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6 +* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8 + +#### Changed in v0.4.0 + +* `min_inclusive` has been renamed to `min_length`, no change in meaning +* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive** +* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic + meaning of the upper bound in slices vs. `Len` + +See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion. + +### Timezone + +`Timezone` can be used with a `datetime` or a `time` to express which timezones +are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime. +`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) +expresses that any timezone-aware datetime is allowed. You may also pass a specific +timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) +object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only +allow a specific timezone, though we note that this is often a symptom of fragile design. + +#### Changed in v0.x.x + +* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of + `timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries. + +### Unit + +`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of +a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]` +would be a float representing a velocity in meters per second. + +Please note that `annotated_types` itself makes no attempt to parse or validate +the unit string in any way. That is left entirely to downstream libraries, +such as [`pint`](https://pint.readthedocs.io) or +[`astropy.units`](https://docs.astropy.org/en/stable/units/). + +An example of how a library might use this metadata: + +```python +from annotated_types import Unit +from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args + +# given a type annotated with a unit: +Meters = Annotated[float, Unit("m")] + + +# you can cast the annotation to a specific unit type with any +# callable that accepts a string and returns the desired type +T = TypeVar("T") +def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None: + if get_origin(tp) is Annotated: + for arg in get_args(tp): + if isinstance(arg, Unit): + return unit_cls(arg.unit) + return None + + +# using `pint` +import pint +pint_unit = cast_unit(Meters, pint.Unit) + + +# using `astropy.units` +import astropy.units as u +astropy_unit = cast_unit(Meters, u.Unit) +``` + +### Predicate + +`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values. +Users should prefer the statically inspectable metadata above, but if you need +the full power and flexibility of arbitrary runtime predicates... here it is. + +For some common constraints, we provide generic types: + +* `IsLower = Annotated[T, Predicate(str.islower)]` +* `IsUpper = Annotated[T, Predicate(str.isupper)]` +* `IsDigit = Annotated[T, Predicate(str.isdigit)]` +* `IsFinite = Annotated[T, Predicate(math.isfinite)]` +* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]` +* `IsNan = Annotated[T, Predicate(math.isnan)]` +* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]` +* `IsInfinite = Annotated[T, Predicate(math.isinf)]` +* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]` + +so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer +(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`. + +Some libraries might have special logic to handle known or understandable predicates, +for example by checking for `str.isdigit` and using its presence to both call custom +logic to enforce digit-only strings, and customise some generated external schema. +Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in +favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`. + +To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner. + +We do not specify what behaviour should be expected for predicates that raise +an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently +skip invalid constraints, or statically raise an error; or it might try calling it +and then propagate or discard the resulting +`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object` +exception. We encourage libraries to document the behaviour they choose. + +### Doc + +`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used. + +It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools. + +It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`. + +This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md). + +### Integrating downstream types with `GroupedMetadata` + +Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata. +This can help reduce verbosity and cognitive overhead for users. +For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata: + +```python +from dataclasses import dataclass +from typing import Iterator +from annotated_types import GroupedMetadata, Ge + +@dataclass +class Field(GroupedMetadata): + ge: int | None = None + description: str | None = None + + def __iter__(self) -> Iterator[object]: + # Iterating over a GroupedMetadata object should yield annotated-types + # constraint metadata objects which describe it as fully as possible, + # and may include other unknown objects too. + if self.ge is not None: + yield Ge(self.ge) + if self.description is not None: + yield Description(self.description) +``` + +Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently. + +Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself. + +Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`. + +### Consuming metadata + +We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103). + +It is up to the implementer to determine how this metadata is used. +You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases. + +## Design & History + +This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic +and Hypothesis, with the goal of making it as easy as possible for end-users to +provide more informative annotations for use by runtime libraries. + +It is deliberately minimal, and following PEP-593 allows considerable downstream +discretion in what (if anything!) they choose to support. Nonetheless, we expect +that staying simple and covering _only_ the most common use-cases will give users +and maintainers the best experience we can. If you'd like more constraints for your +types - follow our lead, by defining them and documenting them downstream! diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD new file mode 100644 index 0000000..7045729 --- /dev/null +++ b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD @@ -0,0 +1,10 @@ +annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046 +annotated_types-0.7.0.dist-info/RECORD,, +annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87 +annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083 +annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819 +annotated_types/__pycache__/__init__.cpython-312.pyc,, +annotated_types/__pycache__/test_cases.cpython-312.pyc,, +annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421 diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL new file mode 100644 index 0000000..516596c --- /dev/null +++ b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.24.2 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..d99323a --- /dev/null +++ b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2022 the contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/annotated_types/__init__.py b/venv/lib/python3.12/site-packages/annotated_types/__init__.py new file mode 100644 index 0000000..74e0dee --- /dev/null +++ b/venv/lib/python3.12/site-packages/annotated_types/__init__.py @@ -0,0 +1,432 @@ +import math +import sys +import types +from dataclasses import dataclass +from datetime import tzinfo +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union + +if sys.version_info < (3, 8): + from typing_extensions import Protocol, runtime_checkable +else: + from typing import Protocol, runtime_checkable + +if sys.version_info < (3, 9): + from typing_extensions import Annotated, Literal +else: + from typing import Annotated, Literal + +if sys.version_info < (3, 10): + EllipsisType = type(Ellipsis) + KW_ONLY = {} + SLOTS = {} +else: + from types import EllipsisType + + KW_ONLY = {"kw_only": True} + SLOTS = {"slots": True} + + +__all__ = ( + 'BaseMetadata', + 'GroupedMetadata', + 'Gt', + 'Ge', + 'Lt', + 'Le', + 'Interval', + 'MultipleOf', + 'MinLen', + 'MaxLen', + 'Len', + 'Timezone', + 'Predicate', + 'LowerCase', + 'UpperCase', + 'IsDigits', + 'IsFinite', + 'IsNotFinite', + 'IsNan', + 'IsNotNan', + 'IsInfinite', + 'IsNotInfinite', + 'doc', + 'DocInfo', + '__version__', +) + +__version__ = '0.7.0' + + +T = TypeVar('T') + + +# arguments that start with __ are considered +# positional only +# see https://peps.python.org/pep-0484/#positional-only-arguments + + +class SupportsGt(Protocol): + def __gt__(self: T, __other: T) -> bool: + ... + + +class SupportsGe(Protocol): + def __ge__(self: T, __other: T) -> bool: + ... + + +class SupportsLt(Protocol): + def __lt__(self: T, __other: T) -> bool: + ... + + +class SupportsLe(Protocol): + def __le__(self: T, __other: T) -> bool: + ... + + +class SupportsMod(Protocol): + def __mod__(self: T, __other: T) -> T: + ... + + +class SupportsDiv(Protocol): + def __div__(self: T, __other: T) -> T: + ... + + +class BaseMetadata: + """Base class for all metadata. + + This exists mainly so that implementers + can do `isinstance(..., BaseMetadata)` while traversing field annotations. + """ + + __slots__ = () + + +@dataclass(frozen=True, **SLOTS) +class Gt(BaseMetadata): + """Gt(gt=x) implies that the value must be greater than x. + + It can be used with any type that supports the ``>`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + gt: SupportsGt + + +@dataclass(frozen=True, **SLOTS) +class Ge(BaseMetadata): + """Ge(ge=x) implies that the value must be greater than or equal to x. + + It can be used with any type that supports the ``>=`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + ge: SupportsGe + + +@dataclass(frozen=True, **SLOTS) +class Lt(BaseMetadata): + """Lt(lt=x) implies that the value must be less than x. + + It can be used with any type that supports the ``<`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + lt: SupportsLt + + +@dataclass(frozen=True, **SLOTS) +class Le(BaseMetadata): + """Le(le=x) implies that the value must be less than or equal to x. + + It can be used with any type that supports the ``<=`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + le: SupportsLe + + +@runtime_checkable +class GroupedMetadata(Protocol): + """A grouping of multiple objects, like typing.Unpack. + + `GroupedMetadata` on its own is not metadata and has no meaning. + All of the constraints and metadata should be fully expressable + in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. + + Concrete implementations should override `GroupedMetadata.__iter__()` + to add their own metadata. + For example: + + >>> @dataclass + >>> class Field(GroupedMetadata): + >>> gt: float | None = None + >>> description: str | None = None + ... + >>> def __iter__(self) -> Iterable[object]: + >>> if self.gt is not None: + >>> yield Gt(self.gt) + >>> if self.description is not None: + >>> yield Description(self.gt) + + Also see the implementation of `Interval` below for an example. + + Parsers should recognize this and unpack it so that it can be used + both with and without unpacking: + + - `Annotated[int, Field(...)]` (parser must unpack Field) + - `Annotated[int, *Field(...)]` (PEP-646) + """ # noqa: trailing-whitespace + + @property + def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: + return True + + def __iter__(self) -> Iterator[object]: + ... + + if not TYPE_CHECKING: + __slots__ = () # allow subclasses to use slots + + def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: + # Basic ABC like functionality without the complexity of an ABC + super().__init_subclass__(*args, **kwargs) + if cls.__iter__ is GroupedMetadata.__iter__: + raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") + + def __iter__(self) -> Iterator[object]: # noqa: F811 + raise NotImplementedError # more helpful than "None has no attribute..." type errors + + +@dataclass(frozen=True, **KW_ONLY, **SLOTS) +class Interval(GroupedMetadata): + """Interval can express inclusive or exclusive bounds with a single object. + + It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which + are interpreted the same way as the single-bound constraints. + """ + + gt: Union[SupportsGt, None] = None + ge: Union[SupportsGe, None] = None + lt: Union[SupportsLt, None] = None + le: Union[SupportsLe, None] = None + + def __iter__(self) -> Iterator[BaseMetadata]: + """Unpack an Interval into zero or more single-bounds.""" + if self.gt is not None: + yield Gt(self.gt) + if self.ge is not None: + yield Ge(self.ge) + if self.lt is not None: + yield Lt(self.lt) + if self.le is not None: + yield Le(self.le) + + +@dataclass(frozen=True, **SLOTS) +class MultipleOf(BaseMetadata): + """MultipleOf(multiple_of=x) might be interpreted in two ways: + + 1. Python semantics, implying ``value % multiple_of == 0``, or + 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` + + We encourage users to be aware of these two common interpretations, + and libraries to carefully document which they implement. + """ + + multiple_of: Union[SupportsDiv, SupportsMod] + + +@dataclass(frozen=True, **SLOTS) +class MinLen(BaseMetadata): + """ + MinLen() implies minimum inclusive length, + e.g. ``len(value) >= min_length``. + """ + + min_length: Annotated[int, Ge(0)] + + +@dataclass(frozen=True, **SLOTS) +class MaxLen(BaseMetadata): + """ + MaxLen() implies maximum inclusive length, + e.g. ``len(value) <= max_length``. + """ + + max_length: Annotated[int, Ge(0)] + + +@dataclass(frozen=True, **SLOTS) +class Len(GroupedMetadata): + """ + Len() implies that ``min_length <= len(value) <= max_length``. + + Upper bound may be omitted or ``None`` to indicate no upper length bound. + """ + + min_length: Annotated[int, Ge(0)] = 0 + max_length: Optional[Annotated[int, Ge(0)]] = None + + def __iter__(self) -> Iterator[BaseMetadata]: + """Unpack a Len into zone or more single-bounds.""" + if self.min_length > 0: + yield MinLen(self.min_length) + if self.max_length is not None: + yield MaxLen(self.max_length) + + +@dataclass(frozen=True, **SLOTS) +class Timezone(BaseMetadata): + """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). + + ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. + ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be + tz-aware but any timezone is allowed. + + You may also pass a specific timezone string or tzinfo object such as + ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that + you only allow a specific timezone, though we note that this is often + a symptom of poor design. + """ + + tz: Union[str, tzinfo, EllipsisType, None] + + +@dataclass(frozen=True, **SLOTS) +class Unit(BaseMetadata): + """Indicates that the value is a physical quantity with the specified unit. + + It is intended for usage with numeric types, where the value represents the + magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]`` + or ``speed: Annotated[float, Unit('m/s')]``. + + Interpretation of the unit string is left to the discretion of the consumer. + It is suggested to follow conventions established by python libraries that work + with physical quantities, such as + + - ``pint`` : + - ``astropy.units``: + + For indicating a quantity with a certain dimensionality but without a specific unit + it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`. + Note, however, ``annotated_types`` itself makes no use of the unit string. + """ + + unit: str + + +@dataclass(frozen=True, **SLOTS) +class Predicate(BaseMetadata): + """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. + + Users should prefer statically inspectable metadata, but if you need the full + power and flexibility of arbitrary runtime predicates... here it is. + + We provide a few predefined predicates for common string constraints: + ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and + ``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which + can be given special handling, and avoid indirection like ``lambda s: s.lower()``. + + Some libraries might have special logic to handle certain predicates, e.g. by + checking for `str.isdigit` and using its presence to both call custom logic to + enforce digit-only strings, and customise some generated external schema. + + We do not specify what behaviour should be expected for predicates that raise + an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently + skip invalid constraints, or statically raise an error; or it might try calling it + and then propagate or discard the resulting exception. + """ + + func: Callable[[Any], bool] + + def __repr__(self) -> str: + if getattr(self.func, "__name__", "") == "": + return f"{self.__class__.__name__}({self.func!r})" + if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and ( + namespace := getattr(self.func.__self__, "__name__", None) + ): + return f"{self.__class__.__name__}({namespace}.{self.func.__name__})" + if isinstance(self.func, type(str.isascii)): # method descriptor + return f"{self.__class__.__name__}({self.func.__qualname__})" + return f"{self.__class__.__name__}({self.func.__name__})" + + +@dataclass +class Not: + func: Callable[[Any], bool] + + def __call__(self, __v: Any) -> bool: + return not self.func(__v) + + +_StrType = TypeVar("_StrType", bound=str) + +LowerCase = Annotated[_StrType, Predicate(str.islower)] +""" +Return True if the string is a lowercase string, False otherwise. + +A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string. +""" # noqa: E501 +UpperCase = Annotated[_StrType, Predicate(str.isupper)] +""" +Return True if the string is an uppercase string, False otherwise. + +A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string. +""" # noqa: E501 +IsDigit = Annotated[_StrType, Predicate(str.isdigit)] +IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63 +""" +Return True if the string is a digit string, False otherwise. + +A string is a digit string if all characters in the string are digits and there is at least one character in the string. +""" # noqa: E501 +IsAscii = Annotated[_StrType, Predicate(str.isascii)] +""" +Return True if all characters in the string are ASCII, False otherwise. + +ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too. +""" + +_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex]) +IsFinite = Annotated[_NumericType, Predicate(math.isfinite)] +"""Return True if x is neither an infinity nor a NaN, and False otherwise.""" +IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))] +"""Return True if x is one of infinity or NaN, and False otherwise""" +IsNan = Annotated[_NumericType, Predicate(math.isnan)] +"""Return True if x is a NaN (not a number), and False otherwise.""" +IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))] +"""Return True if x is anything but NaN (not a number), and False otherwise.""" +IsInfinite = Annotated[_NumericType, Predicate(math.isinf)] +"""Return True if x is a positive or negative infinity, and False otherwise.""" +IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))] +"""Return True if x is neither a positive or negative infinity, and False otherwise.""" + +try: + from typing_extensions import DocInfo, doc # type: ignore [attr-defined] +except ImportError: + + @dataclass(frozen=True, **SLOTS) + class DocInfo: # type: ignore [no-redef] + """ " + The return value of doc(), mainly to be used by tools that want to extract the + Annotated documentation at runtime. + """ + + documentation: str + """The documentation string passed to doc().""" + + def doc( + documentation: str, + ) -> DocInfo: + """ + Add documentation to a type annotation inside of Annotated. + + For example: + + >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ... + """ + return DocInfo(documentation) diff --git a/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..acc7a23 Binary files /dev/null and b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc new file mode 100644 index 0000000..a2e9228 Binary files /dev/null and b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/annotated_types/py.typed b/venv/lib/python3.12/site-packages/annotated_types/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/annotated_types/test_cases.py b/venv/lib/python3.12/site-packages/annotated_types/test_cases.py new file mode 100644 index 0000000..d9164d6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/annotated_types/test_cases.py @@ -0,0 +1,151 @@ +import math +import sys +from datetime import date, datetime, timedelta, timezone +from decimal import Decimal +from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple + +if sys.version_info < (3, 9): + from typing_extensions import Annotated +else: + from typing import Annotated + +import annotated_types as at + + +class Case(NamedTuple): + """ + A test case for `annotated_types`. + """ + + annotation: Any + valid_cases: Iterable[Any] + invalid_cases: Iterable[Any] + + +def cases() -> Iterable[Case]: + # Gt, Ge, Lt, Le + yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) + yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) + yield Case( + Annotated[datetime, at.Gt(datetime(2000, 1, 1))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(2000, 1, 1), datetime(1999, 12, 31)], + ) + yield Case( + Annotated[datetime, at.Gt(date(2000, 1, 1))], + [date(2000, 1, 2), date(2000, 1, 3)], + [date(2000, 1, 1), date(1999, 12, 31)], + ) + yield Case( + Annotated[datetime, at.Gt(Decimal('1.123'))], + [Decimal('1.1231'), Decimal('123')], + [Decimal('1.123'), Decimal('0')], + ) + + yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) + yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) + yield Case( + Annotated[datetime, at.Ge(datetime(2000, 1, 1))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(1998, 1, 1), datetime(1999, 12, 31)], + ) + + yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) + yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) + yield Case( + Annotated[datetime, at.Lt(datetime(2000, 1, 1))], + [datetime(1999, 12, 31), datetime(1999, 12, 31)], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + ) + + yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) + yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) + yield Case( + Annotated[datetime, at.Le(datetime(2000, 1, 1))], + [datetime(2000, 1, 1), datetime(1999, 12, 31)], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + ) + + # Interval + yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) + yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) + yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) + yield Case( + Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(2000, 1, 1), datetime(2000, 1, 4)], + ) + + yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) + yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) + + # lengths + + yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) + yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) + yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) + yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) + + yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) + yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) + yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) + yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) + + yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) + yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) + + yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) + yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) + yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) + + # Timezone + + yield Case( + Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] + ) + yield Case( + Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] + ) + yield Case( + Annotated[datetime, at.Timezone(timezone.utc)], + [datetime(2000, 1, 1, tzinfo=timezone.utc)], + [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], + ) + yield Case( + Annotated[datetime, at.Timezone('Europe/London')], + [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], + [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], + ) + + # Quantity + + yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m')) + + # predicate types + + yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) + yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) + yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2']) + yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) + + yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) + + yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf]) + yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23]) + yield Case(at.IsNan[float], [math.nan], [1.23, math.inf]) + yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan]) + yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23]) + yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf]) + + # check stacked predicates + yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan]) + + # doc + yield Case(Annotated[int, at.doc("A number")], [1, 2], []) + + # custom GroupedMetadata + class MyCustomGroupedMetadata(at.GroupedMetadata): + def __iter__(self) -> Iterator[at.Predicate]: + yield at.Predicate(lambda x: float(x).is_integer()) + + yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/METADATA new file mode 100644 index 0000000..0552dea --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/METADATA @@ -0,0 +1,96 @@ +Metadata-Version: 2.4 +Name: anyio +Version: 4.12.0 +Summary: High-level concurrency and networking framework on top of asyncio or Trio +Author-email: Alex Grönholm +License-Expression: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Framework :: AnyIO +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11" +Requires-Dist: idna>=2.8 +Requires-Dist: typing_extensions>=4.5; python_version < "3.13" +Provides-Extra: trio +Requires-Dist: trio>=0.32.0; python_version >= "3.10" and extra == "trio" +Requires-Dist: trio>=0.31.0; python_version < "3.10" and extra == "trio" +Dynamic: license-file + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +Trio_. It implements Trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony +with the native SC of Trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +Trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with the native libraries of your chosen backend. + +To find out why you might want to use AnyIO's APIs instead of asyncio's, you can read about it +`here `_. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High-level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Subinterpreter support for code parallelization (on Python 3.13 and later) +* Asynchronous file I/O (using worker threads) +* Signal handling +* Asynchronous version of the functools_ module + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _Trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _functools: https://docs.python.org/3/library/functools.html +.. _Hypothesis: https://hypothesis.works/ diff --git a/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/RECORD new file mode 100644 index 0000000..c203c32 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/RECORD @@ -0,0 +1,92 @@ +anyio-4.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-4.12.0.dist-info/METADATA,sha256=rte2_C2hYKP9_iVMFYogSzBxdHBzwY45S1TrLiBsxdk,4277 +anyio-4.12.0.dist-info/RECORD,, +anyio-4.12.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +anyio-4.12.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-4.12.0.dist-info/licenses/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-4.12.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=7iDVqMUprUuKNY91FuoKqayAhR-OY136YDPI6P78HHk,6170 +anyio/__pycache__/__init__.cpython-312.pyc,, +anyio/__pycache__/from_thread.cpython-312.pyc,, +anyio/__pycache__/functools.cpython-312.pyc,, +anyio/__pycache__/lowlevel.cpython-312.pyc,, +anyio/__pycache__/pytest_plugin.cpython-312.pyc,, +anyio/__pycache__/to_interpreter.cpython-312.pyc,, +anyio/__pycache__/to_process.cpython-312.pyc,, +anyio/__pycache__/to_thread.cpython-312.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-312.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-312.pyc,, +anyio/_backends/__pycache__/_trio.cpython-312.pyc,, +anyio/_backends/_asyncio.py,sha256=w6gCSMs_2D1doKVtzi32bOloBl1df-IHubl8-Vks908,99656 +anyio/_backends/_trio.py,sha256=ScNVMQB0iiuJMAon1epQCVOVbIbf-Lxnfb5OxujzMok,42398 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-312.pyc,, +anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc,, +anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-312.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-312.pyc,, +anyio/_core/__pycache__/_fileio.cpython-312.pyc,, +anyio/_core/__pycache__/_resources.cpython-312.pyc,, +anyio/_core/__pycache__/_signals.cpython-312.pyc,, +anyio/_core/__pycache__/_sockets.cpython-312.pyc,, +anyio/_core/__pycache__/_streams.cpython-312.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-312.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-312.pyc,, +anyio/_core/__pycache__/_tasks.cpython-312.pyc,, +anyio/_core/__pycache__/_tempfile.cpython-312.pyc,, +anyio/_core/__pycache__/_testing.cpython-312.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-312.pyc,, +anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626 +anyio/_core/_contextmanagers.py,sha256=YInBCabiEeS-UaP_Jdxa1CaFC71ETPW8HZTHIM8Rsc8,7215 +anyio/_core/_eventloop.py,sha256=xsoYgHIddNYusTqAFDVmcvpjHKJFmdgtDcAjpN3JEWQ,6261 +anyio/_core/_exceptions.py,sha256=fR2SvRUBYVHvolNKbzWSLt8FC_5NFB2OAzGD738fD8Q,4257 +anyio/_core/_fileio.py,sha256=uc7t10Vb-If7GbdWM_zFf-ajUe6uek63fSt7IBLlZW0,25731 +anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 +anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905 +anyio/_core/_sockets.py,sha256=aTbgMr0qPmBPfrapxLykyajsmS7IAerhW9_Qk5r5E18,34311 +anyio/_core/_streams.py,sha256=FczFwIgDpnkK0bODWJXMpsUJYdvAD04kaUaGzJU8DK0,1806 +anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047 +anyio/_core/_synchronization.py,sha256=SY3nsr1ZZyDrjamsOVoYcvj-x6d_AR13Cu5lZecG0gY,20894 +anyio/_core/_tasks.py,sha256=km6hVE1fsuIenya3MDud8KP6-J_bNzlgYC10wUxI7iA,4880 +anyio/_core/_tempfile.py,sha256=lHb7CW4FyIlpkf5ADAf4VmLHCKwEHF9nxqNyBCFFUiA,19697 +anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118 +anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508 +anyio/abc/__init__.py,sha256=6mWhcl_pGXhrgZVHP_TCfMvIXIOp9mroEFM90fYCU_U,2869 +anyio/abc/__pycache__/__init__.cpython-312.pyc,, +anyio/abc/__pycache__/_eventloop.cpython-312.pyc,, +anyio/abc/__pycache__/_resources.cpython-312.pyc,, +anyio/abc/__pycache__/_sockets.cpython-312.pyc,, +anyio/abc/__pycache__/_streams.cpython-312.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-312.pyc,, +anyio/abc/__pycache__/_tasks.cpython-312.pyc,, +anyio/abc/__pycache__/_testing.cpython-312.pyc,, +anyio/abc/_eventloop.py,sha256=GTZbdItBHcj_b-8K2XylET2-bBYLZ3XjW4snY7vK7LE,10900 +anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783 +anyio/abc/_sockets.py,sha256=ECTY0jLEF18gryANHR3vFzXzGdZ-xPwELq1QdgOb0Jo,13258 +anyio/abc/_streams.py,sha256=005GKSCXGprxnhucILboSqc2JFovECZk9m3p-qqxXVc,7640 +anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 +anyio/abc/_tasks.py,sha256=KC7wrciE48AINOI-AhPutnFhe1ewfP7QnamFlDzqesQ,3721 +anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821 +anyio/from_thread.py,sha256=-YZOTpu9WVHtAsMxQGIOaHMjaDRNeKQilx6Nn2qDU-o,19017 +anyio/functools.py,sha256=tIWQ90cuLMxfJIpdBfFY3W3CC1zqFCRAyR3DxKc0Xlo,10061 +anyio/lowlevel.py,sha256=NnPYQ6tWDzLRwpalX2CvsbkXkTeasbJcL52gPopWdYg,5048 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=3jAFQn0jv_pyoWE2GBBlHaj9sqXj4e8vob0_hgrsXE8,10244 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-312.pyc,, +anyio/streams/__pycache__/buffered.cpython-312.pyc,, +anyio/streams/__pycache__/file.cpython-312.pyc,, +anyio/streams/__pycache__/memory.cpython-312.pyc,, +anyio/streams/__pycache__/stapled.cpython-312.pyc,, +anyio/streams/__pycache__/text.cpython-312.pyc,, +anyio/streams/__pycache__/tls.cpython-312.pyc,, +anyio/streams/buffered.py,sha256=2R3PeJhe4EXrdYqz44Y6-Eg9R6DrmlsYrP36Ir43-po,6263 +anyio/streams/file.py,sha256=4WZ7XGz5WNu39FQHvqbe__TQ0HDP9OOhgO1mk9iVpVU,4470 +anyio/streams/memory.py,sha256=F0zwzvFJKAhX_LRZGoKzzqDC2oMM-f-yyTBrEYEGOaU,10740 +anyio/streams/stapled.py,sha256=T8Xqwf8K6EgURPxbt1N4i7A8BAk-gScv-GRhjLXIf_o,4390 +anyio/streams/text.py,sha256=BcVAGJw1VRvtIqnv-o0Rb0pwH7p8vwlvl21xHq522ag,5765 +anyio/streams/tls.py,sha256=Jpxy0Mfbcp1BxHCwE-YjSSFaLnIBbnnwur-excYThs4,15368 +anyio/to_interpreter.py,sha256=_mLngrMy97TMR6VbW4Y6YzDUk9ZuPcQMPlkuyRh3C9k,7100 +anyio/to_process.py,sha256=cEyYUgb8LJVRJCfs6rK3aEM_T3k2gEmhl0nBjEvflOk,9687 +anyio/to_thread.py,sha256=tXQPvHohvQ2Vrw2pBtdzkRPNV7u3H2_UDbvwL2u_R7k,2465 diff --git a/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/WHEEL new file mode 100644 index 0000000..e7fa31b --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/entry_points.txt new file mode 100644 index 0000000..44dd9bd --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..104eebf --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/licenses/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/top_level.txt new file mode 100644 index 0000000..c77c069 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio-4.12.0.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/venv/lib/python3.12/site-packages/anyio/__init__.py b/venv/lib/python3.12/site-packages/anyio/__init__.py new file mode 100644 index 0000000..d23c5a5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/__init__.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextManagerMixin +from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin +from ._core._eventloop import current_time as current_time +from ._core._eventloop import get_all_backends as get_all_backends +from ._core._eventloop import get_available_backends as get_available_backends +from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class +from ._core._eventloop import run as run +from ._core._eventloop import sleep as sleep +from ._core._eventloop import sleep_forever as sleep_forever +from ._core._eventloop import sleep_until as sleep_until +from ._core._exceptions import BrokenResourceError as BrokenResourceError +from ._core._exceptions import BrokenWorkerInterpreter as BrokenWorkerInterpreter +from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess +from ._core._exceptions import BusyResourceError as BusyResourceError +from ._core._exceptions import ClosedResourceError as ClosedResourceError +from ._core._exceptions import ConnectionFailed as ConnectionFailed +from ._core._exceptions import DelimiterNotFound as DelimiterNotFound +from ._core._exceptions import EndOfStream as EndOfStream +from ._core._exceptions import IncompleteRead as IncompleteRead +from ._core._exceptions import NoEventLoopError as NoEventLoopError +from ._core._exceptions import RunFinishedError as RunFinishedError +from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError +from ._core._exceptions import WouldBlock as WouldBlock +from ._core._fileio import AsyncFile as AsyncFile +from ._core._fileio import Path as Path +from ._core._fileio import open_file as open_file +from ._core._fileio import wrap_file as wrap_file +from ._core._resources import aclose_forcefully as aclose_forcefully +from ._core._signals import open_signal_receiver as open_signal_receiver +from ._core._sockets import TCPConnectable as TCPConnectable +from ._core._sockets import UNIXConnectable as UNIXConnectable +from ._core._sockets import as_connectable as as_connectable +from ._core._sockets import connect_tcp as connect_tcp +from ._core._sockets import connect_unix as connect_unix +from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket +from ._core._sockets import ( + create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, +) +from ._core._sockets import create_tcp_listener as create_tcp_listener +from ._core._sockets import create_udp_socket as create_udp_socket +from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket +from ._core._sockets import create_unix_listener as create_unix_listener +from ._core._sockets import getaddrinfo as getaddrinfo +from ._core._sockets import getnameinfo as getnameinfo +from ._core._sockets import notify_closing as notify_closing +from ._core._sockets import wait_readable as wait_readable +from ._core._sockets import wait_socket_readable as wait_socket_readable +from ._core._sockets import wait_socket_writable as wait_socket_writable +from ._core._sockets import wait_writable as wait_writable +from ._core._streams import create_memory_object_stream as create_memory_object_stream +from ._core._subprocesses import open_process as open_process +from ._core._subprocesses import run_process as run_process +from ._core._synchronization import CapacityLimiter as CapacityLimiter +from ._core._synchronization import ( + CapacityLimiterStatistics as CapacityLimiterStatistics, +) +from ._core._synchronization import Condition as Condition +from ._core._synchronization import ConditionStatistics as ConditionStatistics +from ._core._synchronization import Event as Event +from ._core._synchronization import EventStatistics as EventStatistics +from ._core._synchronization import Lock as Lock +from ._core._synchronization import LockStatistics as LockStatistics +from ._core._synchronization import ResourceGuard as ResourceGuard +from ._core._synchronization import Semaphore as Semaphore +from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics +from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED +from ._core._tasks import CancelScope as CancelScope +from ._core._tasks import create_task_group as create_task_group +from ._core._tasks import current_effective_deadline as current_effective_deadline +from ._core._tasks import fail_after as fail_after +from ._core._tasks import move_on_after as move_on_after +from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile +from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile +from ._core._tempfile import TemporaryDirectory as TemporaryDirectory +from ._core._tempfile import TemporaryFile as TemporaryFile +from ._core._tempfile import gettempdir as gettempdir +from ._core._tempfile import gettempdirb as gettempdirb +from ._core._tempfile import mkdtemp as mkdtemp +from ._core._tempfile import mkstemp as mkstemp +from ._core._testing import TaskInfo as TaskInfo +from ._core._testing import get_current_task as get_current_task +from ._core._testing import get_running_tasks as get_running_tasks +from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider +from ._core._typedattr import TypedAttributeSet as TypedAttributeSet +from ._core._typedattr import typed_attribute as typed_attribute + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio."): + __value.__module__ = __name__ + + +del __value + + +def __getattr__(attr: str) -> type[BrokenWorkerInterpreter]: + """Support deprecated aliases.""" + if attr == "BrokenWorkerIntepreter": + import warnings + + warnings.warn( + "The 'BrokenWorkerIntepreter' alias is deprecated, use 'BrokenWorkerInterpreter' instead.", + DeprecationWarning, + stacklevel=2, + ) + return BrokenWorkerInterpreter + + raise AttributeError(f"module {__name__!r} has no attribute {attr!r}") diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..81b1423 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc new file mode 100644 index 0000000..14774ba Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/functools.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/functools.cpython-312.pyc new file mode 100644 index 0000000..806a1f0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/functools.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc new file mode 100644 index 0000000..cc5602c Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc new file mode 100644 index 0000000..8d33a10 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc new file mode 100644 index 0000000..3285747 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc new file mode 100644 index 0000000..ccb4035 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc new file mode 100644 index 0000000..6a45bc9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__init__.py b/venv/lib/python3.12/site-packages/anyio/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e7e40a2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc new file mode 100644 index 0000000..d0f8e6c Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc new file mode 100644 index 0000000..fa4a171 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py b/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 0000000..2a4dacc --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,3007 @@ +from __future__ import annotations + +import array +import asyncio +import concurrent.futures +import contextvars +import math +import os +import socket +import sys +import threading +import weakref +from asyncio import ( + AbstractEventLoop, + CancelledError, + all_tasks, + create_task, + current_task, + get_running_loop, + sleep, +) +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from concurrent.futures import Future +from contextlib import AbstractContextManager, suppress +from contextvars import Context, copy_context +from dataclasses import dataclass, field +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + getcoroutinestate, + iscoroutine, +) +from io import IOBase +from os import PathLike +from queue import Queue +from signal import Signals +from socket import AddressFamily, SocketKind +from threading import Thread +from types import CodeType, TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Optional, + TypeVar, + cast, +) +from weakref import WeakKeyDictionary + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + TaskInfo, + abc, +) +from .._core._eventloop import ( + claim_worker_thread, + set_current_async_library, + threadlocals, +) +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, + RunFinishedError, + WouldBlock, + iterate_exceptions, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import ( + AsyncBackend, + IPSockAddrType, + SocketListener, + UDPPacketType, + UNIXDatagramPacketType, +) +from ..abc._eventloop import StrOrBytesPath +from ..lowlevel import RunVar +from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from asyncio import Runner + from typing import TypeVarTuple, Unpack +else: + import contextvars + import enum + import signal + from asyncio import coroutines, events, exceptions, tasks + + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + + class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + + class Runner: + # Copied from CPython 3.11 + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ): + self._state = _State.CREATED + self._debug = debug + self._loop_factory = loop_factory + self._loop: AbstractEventLoop | None = None + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False + + def __enter__(self) -> Runner: + self._lazy_init() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def close(self) -> None: + """Shutdown and close event loop.""" + loop = self._loop + if self._state is not _State.INITIALIZED or loop is None: + return + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + if hasattr(loop, "shutdown_default_executor"): + loop.run_until_complete(loop.shutdown_default_executor()) + else: + loop.run_until_complete(_shutdown_default_executor(loop)) + finally: + if self._set_event_loop: + events.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: + """Run a coroutine inside the embedded event loop.""" + if not coroutines.iscoroutine(coro): + raise ValueError(f"a coroutine was expected, got {coro!r}") + + if events._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + task = context.run(self._loop.create_task, coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except exceptions.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + if uncancel is not None and uncancel() == 0: + raise KeyboardInterrupt # noqa: B904 + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self) -> None: + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + if self._loop_factory is None: + self._loop = events.new_event_loop() + if not self._set_event_loop: + # Call set_event_loop only once to avoid calling + # attach_loop multiple times on child watchers + events.set_event_loop(self._loop) + self._set_event_loop = True + else: + self._loop = self._loop_factory() + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + def _cancel_all_tasks(loop: AbstractEventLoop) -> None: + to_cancel = tasks.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: + """Schedule the shutdown of the default executor.""" + + def _do_shutdown(future: asyncio.futures.Future) -> None: + try: + loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] + loop.call_soon_threadsafe(future.set_result, None) + except Exception as ex: + loop.call_soon_threadsafe(future.set_exception, ex) + + loop._executor_shutdown_called = True + if loop._default_executor is None: + return + future = loop.create_future() + thread = threading.Thread(target=_do_shutdown, args=(future,)) + thread.start() + try: + await future + finally: + thread.join() + + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + +_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + callbacks = [cb for cb, context in task._callbacks] + for cb in callbacks: + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + # The task coro should never be None here, as we never add finished tasks to the + # task list + coro = task.get_coro() + assert coro is not None + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") from None + + +# +# Timeouts and cancellation +# + + +def is_anyio_cancellation(exc: CancelledError) -> bool: + # Sometimes third party frameworks catch a CancelledError and raise a new one, so as + # a workaround we have to look at the previous ones in __context__ too for a + # matching cancel message + while True: + if ( + exc.args + and isinstance(exc.args[0], str) + and exc.args[0].startswith("Cancelled via cancel scope ") + ): + return True + + if isinstance(exc.__context__, CancelledError): + exc = exc.__context__ + continue + + return False + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: CancelScope | None = None + self._child_scopes: set[CancelScope] = set() + self._cancel_called = False + self._cancel_reason: str | None = None + self._cancelled_caught = False + self._active = False + self._timeout_handle: asyncio.TimerHandle | None = None + self._cancel_handle: asyncio.Handle | None = None + self._tasks: set[asyncio.Task] = set() + self._host_task: asyncio.Task | None = None + if sys.version_info >= (3, 11): + self._pending_uncancellations: int | None = 0 + else: + self._pending_uncancellations = None + + def __enter__(self) -> CancelScope: + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_state = TaskState(None, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + if self._parent_scope is not None: + # If using an eager task factory, the parent scope may not even contain + # the host task + self._parent_scope._child_scopes.add(self) + self._parent_scope._tasks.discard(host_task) + + self._timeout() + self._active = True + + # Start cancelling the host task if the scope was cancelled before entering + if self._cancel_called: + self._deliver_cancellation(self) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + del exc_tb + + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + try: + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the closest visible, cancelled parent + # scope if necessary + self._restart_cancellation_in_parent() + + # We only swallow the exception iff it was an AnyIO CancelledError, either + # directly as exc_val or inside an exception group and there are no cancelled + # parent cancel scopes visible to us here + if self._cancel_called and not self._parent_cancellation_is_visible_to_us: + # For each level-cancel() call made on the host task, call uncancel() + while self._pending_uncancellations: + self._host_task.uncancel() + self._pending_uncancellations -= 1 + + # Update cancelled_caught and check for exceptions we must not swallow + cannot_swallow_exc_val = False + if exc_val is not None: + for exc in iterate_exceptions(exc_val): + if isinstance(exc, CancelledError) and is_anyio_cancellation( + exc + ): + self._cancelled_caught = True + else: + cannot_swallow_exc_val = True + + return self._cancelled_caught and not cannot_swallow_exc_val + else: + if self._pending_uncancellations: + assert self._parent_scope is not None + assert self._parent_scope._pending_uncancellations is not None + self._parent_scope._pending_uncancellations += ( + self._pending_uncancellations + ) + self._pending_uncancellations = 0 + + return False + finally: + self._host_task = None + del exc_val + + @property + def _effectively_cancelled(self) -> bool: + cancel_scope: CancelScope | None = self + while cancel_scope is not None: + if cancel_scope._cancel_called: + return True + + if cancel_scope.shield: + return False + + cancel_scope = cancel_scope._parent_scope + + return False + + @property + def _parent_cancellation_is_visible_to_us(self) -> bool: + return ( + self._parent_scope is not None + and not self.shield + and self._parent_scope._effectively_cancelled + ) + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self.cancel("deadline exceeded") + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self, origin: CancelScope) -> bool: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for + cancellation. + + :param origin: the cancel scope that originated the cancellation + :return: ``True`` if the delivery needs to be retried on the next cycle + + """ + should_retry = False + current = current_task() + for task in self._tasks: + should_retry = True + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started + if task is not current and (task is self._host_task or _task_started(task)): + waiter = task._fut_waiter # type: ignore[attr-defined] + if not isinstance(waiter, asyncio.Future) or not waiter.done(): + task.cancel(origin._cancel_reason) + if ( + task is origin._host_task + and origin._pending_uncancellations is not None + ): + origin._pending_uncancellations += 1 + + # Deliver cancellation to child scopes that aren't shielded or running their own + # cancellation callbacks + for scope in self._child_scopes: + if not scope._shield and not scope.cancel_called: + should_retry = scope._deliver_cancellation(origin) or should_retry + + # Schedule another callback if there are still tasks left + if origin is self: + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation, origin + ) + else: + self._cancel_handle = None + + return should_retry + + def _restart_cancellation_in_parent(self) -> None: + """ + Restart the cancellation effort in the closest directly cancelled parent scope. + + """ + scope = self._parent_scope + while scope is not None: + if scope._cancel_called: + if scope._cancel_handle is None: + scope._deliver_cancellation(scope) + + break + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + def cancel(self, reason: str | None = None) -> None: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + self._cancel_reason = f"Cancelled via cancel scope {id(self):x}" + if task := current_task(): + self._cancel_reason += f" by {task}" + + if reason: + self._cancel_reason += f"; reason: {reason}" + + if self._host_task is not None: + self._deliver_cancellation(self) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def cancelled_caught(self) -> bool: + return self._cancelled_caught + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._restart_cancellation_in_parent() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance + itself because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "cancel_scope", "__weakref__" + + def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): + self.parent_id = parent_id + self.cancel_scope = cancel_scope + + +_task_states: WeakKeyDictionary[asyncio.Task, TaskState] = WeakKeyDictionary() + + +# +# Task groups +# + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: T_contra | None = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + if not self._future.cancelled(): + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +if sys.version_info >= (3, 12): + _eager_task_factory_code: CodeType | None = asyncio.eager_task_factory.__code__ +else: + _eager_task_factory_code = None + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: list[BaseException] = [] + self._tasks: set[asyncio.Task] = set() + self._on_completed_fut: asyncio.Future[None] | None = None + + async def __aenter__(self) -> TaskGroup: + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + try: + if exc_val is not None: + self.cancel_scope.cancel() + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) + + loop = get_running_loop() + try: + if self._tasks: + with CancelScope() as wait_scope: + while self._tasks: + self._on_completed_fut = loop.create_future() + + try: + await self._on_completed_fut + except CancelledError as exc: + # Shield the scope against further cancellation attempts, + # as they're not productive (#695) + wait_scope.shield = True + self.cancel_scope.cancel() + + # Set exc_val from the cancellation exception if it was + # previously unset. However, we should not replace a native + # cancellation exception with one raise by a cancel scope. + if exc_val is None or ( + isinstance(exc_val, CancelledError) + and not is_anyio_cancellation(exc) + ): + exc_val = exc + + self._on_completed_fut = None + else: + # If there are no child tasks to wait on, run at least one checkpoint + # anyway + await AsyncIOBackend.cancel_shielded_checkpoint() + + self._active = False + if self._exceptions: + # The exception that got us here should already have been + # added to self._exceptions so it's ok to break exception + # chaining and avoid adding a "During handling of above..." + # for each nesting level. + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) from None + elif exc_val: + raise exc_val + except BaseException as exc: + if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__): + return True + + raise + + return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + finally: + del exc_val, exc_tb, self._exceptions + + def _spawn( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + args: tuple[Unpack[PosArgsT]], + name: object, + task_status_future: asyncio.Future | None = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + if sys.version_info >= (3, 14) and self.cancel_scope._host_task is not None: + asyncio.future_discard_from_awaited_by( + _task, self.cancel_scope._host_task + ) + + task_state = _task_states[_task] + assert task_state.cancel_scope is not None + assert _task in task_state.cancel_scope._tasks + task_state.cancel_scope._tasks.remove(_task) + self._tasks.remove(task) + del _task_states[_task] + + if self._on_completed_fut is not None and not self._tasks: + try: + self._on_completed_fut.set_result(None) + except asyncio.InvalidStateError: + pass + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + # The future can only be in the cancelled state if the host task was + # cancelled, so return immediately instead of adding one more + # CancelledError to the exceptions list + if task_status_future is not None and task_status_future.cancelled(): + return + + if task_status_future is None or task_status_future.done(): + if not isinstance(exc, CancelledError): + self._exceptions.append(exc) + + if not self.cancel_scope._effectively_cancelled: + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not iscoroutine(coro): + prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" + raise TypeError( + f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " + f"the return value ({coro!r}) is not a coroutine object" + ) + + name = get_callable_name(func) if name is None else str(name) + loop = asyncio.get_running_loop() + if ( + (factory := loop.get_task_factory()) + and getattr(factory, "__code__", None) is _eager_task_factory_code + and (closure := getattr(factory, "__closure__", None)) + ): + custom_task_constructor = closure[0].cell_contents + task = custom_task_constructor(coro, loop=loop, name=name) + else: + task = create_task(coro, name=name) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + self._tasks.add(task) + if sys.version_info >= (3, 14) and self.cancel_scope._host_task is not None: + asyncio.future_add_to_awaited_by(task, self.cancel_scope._host_task) + + task.add_done_callback(task_done) + return task + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch + # point between, the task group is cancelled and this method never proceeds to + # process the completed future. That's why we have to have a shielded cancel + # scope here. + try: + return await future + except CancelledError: + # Cancel the task and wait for it to exit before returning + task.cancel() + with CancelScope(shield=True), suppress(CancelledError): + await task + + raise + + +# +# Threads +# + +_Retval_Queue_Type = tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: set[WorkerThread], + idle_workers: deque[WorkerThread], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None + ] = Queue(2) + self.idle_since = AsyncIOBackend.current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: BaseException | None + ) -> None: + self.idle_since = AsyncIOBackend.current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + if isinstance(exc, StopIteration): + new_exc = RuntimeError("coroutine raised StopIteration") + new_exc.__cause__ = exc + exc = new_exc + + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread(AsyncIOBackend, self.loop): + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future, cancel_scope = item + if not future.cancelled(): + result = None + exception: BaseException | None = None + threadlocals.current_cancel_scope = cancel_scope + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + finally: + del threadlocals.current_cancel_scope + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + del result, exception + + self.queue.task_done() + del item, context, func, args, future, cancel_scope + + def stop(self, f: asyncio.Task | None = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + AsyncIOBackend.run_sync_from_thread( + partial(self._task_group.start_soon, name=name), + (self._call_func, func, args, kwargs, future), + self._loop, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.set_exception(ClosedResourceError()) + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + _closed: bool = field(init=False, default=False) + + async def send(self, item: bytes) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + stream_paused = self._stream._protocol._paused # type: ignore[attr-defined] + try: + self._stream.write(item) + await self._stream.drain() + except (ConnectionResetError, BrokenPipeError, RuntimeError) as exc: + # If closed by us and/or the peer: + # * on stdlib, drain() raises ConnectionResetError or BrokenPipeError + # * on uvloop and Winloop, write() eventually starts raising RuntimeError + if self._closed: + raise ClosedResourceError from exc + elif self._stream.is_closing(): + raise BrokenResourceError from exc + + raise + + if not stream_paused: + await AsyncIOBackend.cancel_shielded_checkpoint() + + async def aclose(self) -> None: + self._closed = True + self._stream.close() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: StreamWriterWrapper | None + _stdout: StreamReaderWrapper | None + _stderr: StreamReaderWrapper | None + + async def aclose(self) -> None: + with CancelScope(shield=True) as scope: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + scope.shield = False + try: + await self.wait() + except BaseException: + scope.shield = True + self.kill() + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +def _forcibly_shutdown_process_pool_on_exit( + workers: set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: asyncio.AbstractChildWatcher | None = None # type: ignore[name-defined] + if sys.version_info < (3, 12): + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + pass + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers.copy(): + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or + anyio.run(). + + """ + process: abc.Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + workers = workers.copy() + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + is_at_eof: bool = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Exception | None) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + # ProactorEventloop sometimes sends bytearray instead of bytes + self.read_queue.append(bytes(data)) + self.read_event.set() + + def eof_received(self) -> bool | None: + self.is_at_eof = True + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: deque[tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Exception | None) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + and not self._protocol.is_at_eof + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + else: + await AsyncIOBackend.checkpoint() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception from None + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will + # block until data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class _RawSocketMixin: + _receive_future: asyncio.Future | None = None + _send_future: asyncio.Future | None = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self._raw_socket.send(view) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self._raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self._raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: CancelScope | None = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await AsyncIOBackend.checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(transport, protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await AsyncIOBackend.checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + self._transport.close() + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): + async def receive(self) -> UNIXDatagramPacketType: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recvfrom(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: UNIXDatagramPacketType) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.sendto(*item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): + async def receive(self) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +_read_events: RunVar[dict[int, asyncio.Future[bool]]] = RunVar("read_events") +_write_events: RunVar[dict[int, asyncio.Future[bool]]] = RunVar("write_events") + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if self.is_set(): + await AsyncIOBackend.checkpoint() + else: + await self._event.wait() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self._owner_task: asyncio.Task | None = None + self._waiters: deque[tuple[asyncio.Task, asyncio.Future]] = deque() + + async def acquire(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._owner_task = task + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + fut: asyncio.Future[None] = asyncio.Future() + item = task, fut + self._waiters.append(item) + try: + await fut + except CancelledError: + self._waiters.remove(item) + if self._owner_task is task: + self.release() + + raise + + self._waiters.remove(item) + + def acquire_nowait(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + self._owner_task = task + return + + if self._owner_task is task: + raise RuntimeError("Attempted to acquire an already held Lock") + + raise WouldBlock + + def locked(self) -> bool: + return self._owner_task is not None + + def release(self) -> None: + if self._owner_task != current_task(): + raise RuntimeError("The current task is not holding this lock") + + for task, fut in self._waiters: + if not fut.cancelled(): + self._owner_task = task + fut.set_result(None) + return + + self._owner_task = None + + def statistics(self) -> LockStatistics: + task_info = AsyncIOTaskInfo(self._owner_task) if self._owner_task else None + return LockStatistics(self.locked(), task_info, len(self._waiters)) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + super().__init__(initial_value, max_value=max_value) + self._value = initial_value + self._max_value = max_value + self._fast_acquire = fast_acquire + self._waiters: deque[asyncio.Future[None]] = deque() + + async def acquire(self) -> None: + if self._value > 0 and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._value -= 1 + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + fut: asyncio.Future[None] = asyncio.Future() + self._waiters.append(fut) + try: + await fut + except CancelledError: + try: + self._waiters.remove(fut) + except ValueError: + self.release() + + raise + + def acquire_nowait(self) -> None: + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> None: + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + for fut in self._waiters: + if not fut.cancelled(): + fut.set_result(None) + self._waiters.remove(fut) + return + + self._value += 1 + + @property + def value(self) -> int: + return self._value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> CapacityLimiter: + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: set[Any] = set() + self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + + if value < 0: + raise ValueError("total_tokens must be >= 0") + + waiters_to_notify = max(value - self._total_tokens, 0) + self._total_tokens = value + + # Notify waiting tasks that they have acquired the limiter + while self._wait_queue and waiters_to_notify: + event = self._wait_queue.popitem(last=False)[1] + event.set() + waiters_to_notify -= 1 + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def _notify_next_waiter(self) -> None: + """Notify the next task in line if this limiter has free capacity now.""" + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem(last=False)[1] + event.set() + + def acquire_nowait(self) -> None: + self.acquire_on_behalf_of_nowait(current_task()) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + if event.is_set(): + self._notify_next_waiter() + + raise + + self._borrowers.add(borrower) + else: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's tokens" + ) from None + + self._notify_next_waiter() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +# +# Operating system signals +# + + +class _SignalReceiver: + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: deque[Signals] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: set[Signals] = set() + + def _deliver(self, signum: Signals) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> _SignalReceiver: + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + await AsyncIOBackend.checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +# +# Testing and debugging +# + + +class AsyncIOTaskInfo(TaskInfo): + def __init__(self, task: asyncio.Task): + task_state = _task_states.get(task) + if task_state is None: + parent_id = None + else: + parent_id = task_state.parent_id + + coro = task.get_coro() + assert coro is not None, "created TaskInfo from a completed Task" + super().__init__(id(task), parent_id, task.get_name(), coro) + self._task = weakref.ref(task) + + def has_pending_cancellation(self) -> bool: + if not (task := self._task()): + # If the task isn't around anymore, it won't have a pending cancellation + return False + + if task._must_cancel: # type: ignore[attr-defined] + return True + elif ( + isinstance(task._fut_waiter, asyncio.Future) # type: ignore[attr-defined] + and task._fut_waiter.cancelled() # type: ignore[attr-defined] + ): + return True + + if task_state := _task_states.get(task): + if cancel_scope := task_state.cancel_scope: + return cancel_scope._effectively_cancelled + + return False + + +class TestRunner(abc.TestRunner): + _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] + + def __init__( + self, + *, + debug: bool | None = None, + use_uvloop: bool = False, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ) -> None: + if use_uvloop and loop_factory is None: + if sys.platform != "win32": + import uvloop + + loop_factory = uvloop.new_event_loop + else: + import winloop + + loop_factory = winloop.new_event_loop + + self._runner = Runner(debug=debug, loop_factory=loop_factory) + self._exceptions: list[BaseException] = [] + self._runner_task: asyncio.Task | None = None + + def __enter__(self) -> TestRunner: + self._runner.__enter__() + self.get_loop().set_exception_handler(self._exception_handler) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._runner.__exit__(exc_type, exc_val, exc_tb) + + def get_loop(self) -> AbstractEventLoop: + return self._runner.get_loop() + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup( + "Multiple exceptions occurred in asynchronous callbacks", exceptions + ) + + async def _run_tests_and_fixtures( + self, + receive_stream: MemoryObjectReceiveStream[ + tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] + ], + ) -> None: + from _pytest.outcomes import OutcomeException + + with receive_stream, self._send_stream: + async for coro, future in receive_stream: + try: + retval = await coro + except CancelledError as exc: + if not future.cancelled(): + future.cancel(*exc.args) + + raise + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + if not isinstance(exc, (Exception, OutcomeException)): + raise + else: + if not future.cancelled(): + future.set_result(retval) + + async def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if not self._runner_task: + self._send_stream, receive_stream = create_memory_object_stream[ + tuple[Awaitable[Any], asyncio.Future] + ](1) + self._runner_task = self.get_loop().create_task( + self._run_tests_and_fixtures(receive_stream) + ) + + coro = func(*args, **kwargs) + future: asyncio.Future[T_Retval] = self.get_loop().create_future() + self._send_stream.send_nowait((coro, future)) + return await future + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + self._raise_async_exceptions() + + yield fixturevalue + + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + except StopAsyncIteration: + self._raise_async_exceptions() + else: + self.get_loop().run_until_complete(asyncgen.aclose()) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + retval = self.get_loop().run_until_complete( + self._call_in_runner_task(fixture_func, **kwargs) + ) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(test_func, **kwargs) + ) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() + + +class AsyncIOBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task.set_name(get_callable_name(func)) + _task_states[task] = TaskState(None, None) + + try: + return await func(*args) + finally: + del _task_states[task] + + debug = options.get("debug", None) + loop_factory = options.get("loop_factory", None) + if loop_factory is None and options.get("use_uvloop", False): + if sys.platform != "win32": + import uvloop + + loop_factory = uvloop.new_event_loop + else: + import winloop + + loop_factory = winloop.new_event_loop + + with Runner(debug=debug, loop_factory=loop_factory) as runner: + return runner.run(wrapper()) + + @classmethod + def current_token(cls) -> object: + return get_running_loop() + + @classmethod + def current_time(cls) -> float: + return get_running_loop().time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return CancelledError + + @classmethod + async def checkpoint(cls) -> None: + await sleep(0) + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + with CancelScope(shield=True): + await sleep(0) + + @classmethod + async def sleep(cls, delay: float) -> None: + await sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + if (task := current_task()) is None: + return math.inf + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope._cancel_called: + deadline = -math.inf + break + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> abc.Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( # type: ignore[return] + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + await cls.checkpoint() + + # If this is the first run in this event loop thread, set up the necessary + # variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with limiter or cls.current_default_thread_limiter(): + with CancelScope(shield=not abandon_on_cancel) as scope: + future = asyncio.Future[T_Retval]() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback( + worker.stop, context=contextvars.Context() + ) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME + # seconds or longer + now = cls.current_time() + while idle_workers: + if ( + now - idle_workers[0].idle_since + < WorkerThread.MAX_IDLE_TIME + ): + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback( + expired_worker.stop + ) + expired_worker.stop() + + context = copy_context() + context.run(set_current_async_library, None) + if abandon_on_cancel or scope._parent_scope is None: + worker_scope = scope + else: + worker_scope = scope._parent_scope + + worker.queue.put_nowait((context, func, args, future, worker_scope)) + return await future + + @classmethod + def check_cancelled(cls) -> None: + scope: CancelScope | None = threadlocals.current_cancel_scope + while scope is not None: + if scope.cancel_called: + raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") + + if scope.shield: + return + + scope = scope._parent_scope + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + async def task_wrapper() -> T_Retval: + __tracebackhide__ = True + if scope is not None: + task = cast(asyncio.Task, current_task()) + _task_states[task] = TaskState(None, scope) + scope._tasks.add(task) + try: + return await func(*args) + except CancelledError as exc: + raise concurrent.futures.CancelledError(str(exc)) from None + finally: + if scope is not None: + scope._tasks.discard(task) + + loop = cast( + "AbstractEventLoop", token or threadlocals.current_token.native_token + ) + if loop.is_closed(): + raise RunFinishedError + + context = copy_context() + context.run(set_current_async_library, "asyncio") + scope = getattr(threadlocals, "current_cancel_scope", None) + f: concurrent.futures.Future[T_Retval] = context.run( + asyncio.run_coroutine_threadsafe, task_wrapper(), loop=loop + ) + return f.result() + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + set_current_async_library("asyncio") + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + loop = cast( + "AbstractEventLoop", token or threadlocals.current_token.native_token + ) + if loop.is_closed(): + raise RunFinishedError + + f: concurrent.futures.Future[T_Retval] = Future() + loop.call_soon_threadsafe(wrapper) + return f.result() + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + await cls.checkpoint() + if isinstance(command, PathLike): + command = os.fspath(command) + + if isinstance(command, (str, bytes)): + process = await asyncio.create_subprocess_shell( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + create_task( + _shutdown_process_pool_on_exit(workers), + name="AnyIO process pool shutdown task", + ) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) # type:ignore[arg-type] + ) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> abc.SocketStream: + transport, protocol = cast( + tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_address + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + await cls.checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def create_unix_datagram_socket( # type: ignore[override] + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + await cls.checkpoint() + loop = get_running_loop() + + if remote_path: + while True: + try: + raw_socket.connect(remote_path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return ConnectedUNIXDatagramSocket(raw_socket) + else: + return UNIXDatagramSocket(raw_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + return await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + fd = obj if isinstance(obj, int) else obj.fileno() + if read_events.get(fd): + raise BusyResourceError("reading from") + + loop = get_running_loop() + fut: asyncio.Future[bool] = loop.create_future() + + def cb() -> None: + try: + del read_events[fd] + except KeyError: + pass + else: + remove_reader(fd) + + try: + fut.set_result(True) + except asyncio.InvalidStateError: + pass + + try: + loop.add_reader(fd, cb) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_reader(fd, cb) + remove_reader = selector.remove_reader + else: + remove_reader = loop.remove_reader + + read_events[fd] = fut + try: + success = await fut + finally: + try: + del read_events[fd] + except KeyError: + pass + else: + remove_reader(fd) + + if not success: + raise ClosedResourceError + + @classmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + fd = obj if isinstance(obj, int) else obj.fileno() + if write_events.get(fd): + raise BusyResourceError("writing to") + + loop = get_running_loop() + fut: asyncio.Future[bool] = loop.create_future() + + def cb() -> None: + try: + del write_events[fd] + except KeyError: + pass + else: + remove_writer(fd) + + try: + fut.set_result(True) + except asyncio.InvalidStateError: + pass + + try: + loop.add_writer(fd, cb) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_writer(fd, cb) + remove_writer = selector.remove_writer + else: + remove_writer = loop.remove_writer + + write_events[fd] = fut + try: + success = await fut + finally: + try: + del write_events[fd] + except KeyError: + pass + else: + remove_writer(fd) + + if not success: + raise ClosedResourceError + + @classmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + fd = obj if isinstance(obj, int) else obj.fileno() + loop = get_running_loop() + + try: + write_events = _write_events.get() + except LookupError: + pass + else: + try: + fut = write_events.pop(fd) + except KeyError: + pass + else: + try: + fut.set_result(False) + except asyncio.InvalidStateError: + pass + + try: + loop.remove_writer(fd) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + get_selector().remove_writer(fd) + + try: + read_events = _read_events.get() + except LookupError: + pass + else: + try: + fut = read_events.pop(fd) + except KeyError: + pass + else: + try: + fut.set_result(False) + except asyncio.InvalidStateError: + pass + + try: + loop.remove_reader(fd) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + get_selector().remove_reader(fd) + + @classmethod + async def wrap_listener_socket(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + async def wrap_stream_socket(cls, sock: socket.socket) -> SocketStream: + transport, protocol = await get_running_loop().create_connection( + StreamProtocol, sock=sock + ) + return SocketStream(transport, protocol) + + @classmethod + async def wrap_unix_stream_socket(cls, sock: socket.socket) -> UNIXSocketStream: + return UNIXSocketStream(sock) + + @classmethod + async def wrap_udp_socket(cls, sock: socket.socket) -> UDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, sock=sock + ) + return UDPSocket(transport, protocol) + + @classmethod + async def wrap_connected_udp_socket(cls, sock: socket.socket) -> ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, sock=sock + ) + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def wrap_unix_datagram_socket(cls, sock: socket.socket) -> UNIXDatagramSocket: + return UNIXDatagramSocket(sock) + + @classmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket.socket + ) -> ConnectedUNIXDatagramSocket: + return ConnectedUNIXDatagramSocket(sock) + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + return AsyncIOTaskInfo(current_task()) # type: ignore[arg-type] + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + return [AsyncIOTaskInfo(task) for task in all_tasks() if not task.done()] + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + await cls.checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + waiter = task._fut_waiter # type: ignore[attr-defined] + if waiter is None or waiter.done(): + await sleep(0.1) + break + else: + return + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = AsyncIOBackend diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py b/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py new file mode 100644 index 0000000..36941f0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,1384 @@ +from __future__ import annotations + +import array +import math +import os +import socket +import sys +import types +import weakref +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from concurrent.futures import Future +from contextlib import AbstractContextManager +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind +from types import TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Generic, + NoReturn, + TypeVar, + cast, + overload, +) + +import trio.from_thread +import trio.lowlevel +from outcome import Error, Outcome, Value +from trio.lowlevel import ( + current_root_task, + current_task, + notify_closing, + wait_readable, + wait_writable, +) +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + RunFinishedError, + TaskInfo, + WouldBlock, + abc, +) +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType +from ..abc._eventloop import AsyncBackend, StrOrBytesPath +from ..streams.memory import MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + + +# +# Event loop +# + +RunVar = trio.lowlevel.RunVar + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: trio.CancelScope | None = None, **kwargs: object + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> CancelScope: + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self, reason: str | None = None) -> None: + self.__original.cancel(reason) + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def cancelled_caught(self) -> bool: + return self.__original.cancelled_caught + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +# +# Task groups +# + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery(strict_exception_groups=True) + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> TaskGroup: + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + try: + # trio.Nursery.__exit__ returns bool; .open_nursery has wrong type + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) # type: ignore[return-value] + except BaseExceptionGroup as exc: + if not exc.split(trio.Cancelled)[1]: + raise trio.Cancelled._create() from exc + + raise + finally: + del exc_val, exc_tb + self._active = False + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Threads +# + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + trio.from_thread.run_sync( + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + trio_token=self._token, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: int | None = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return bytes(data) + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: abc.ByteSendStream | None + _stdout: abc.ByteReceiveStream | None + _stderr: abc.ByteReceiveStream | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: set[abc.Process]) -> None: + try: + await trio.sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> NoReturn: + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await trio.lowlevel.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await trio.lowlevel.checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> UNIXDatagramPacketType: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, addr + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UNIXDatagramPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUNIXDatagramSocket( + _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket +): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> None: + self.__original.set() + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self.__original = trio.Lock() + + @staticmethod + def _convert_runtime_error_msg(exc: RuntimeError) -> None: + if exc.args == ("attempt to re-acquire an already held Lock",): + exc.args = ("Attempted to acquire an already held Lock",) + + async def acquire(self) -> None: + if not self._fast_acquire: + try: + await self.__original.acquire() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def locked(self) -> bool: + return self.__original.locked() + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> LockStatistics: + orig_statistics = self.__original.statistics() + owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None + return LockStatistics( + orig_statistics.locked, owner, orig_statistics.tasks_waiting + ) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self.__original = trio.Semaphore(initial_value, max_value=max_value) + + async def acquire(self) -> None: + if not self._fast_acquire: + await self.__original.acquire() + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + + @property + def max_value(self) -> int | None: + return self.__original.max_value + + @property + def value(self) -> int: + return self.__original.value + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> SemaphoreStatistics: + orig_statistics = self.__original.statistics() + return SemaphoreStatistics(orig_statistics.tasks_waiting) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__( + cls, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> CapacityLimiter: + return object.__new__(cls) + + def __init__( + self, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> None: + if original is not None: + self.__original = original + else: + assert total_tokens is not None + self.__original = trio.CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> None: + self.__original.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self.__original.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=tuple(orig.borrowers), + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") + + +# +# Signal handling +# + + +class _SignalReceiver: + _iterator: AsyncIterator[int] + + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + + def __enter__(self) -> _SignalReceiver: + self._cm = trio.open_signal_receiver(*self._signals) + self._iterator = self._cm.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + signum = await self._iterator.__anext__() + return Signals(signum) + + +# +# Testing and debugging +# + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from queue import Queue + + self._call_queue: Queue[Callable[[], object]] = Queue() + self._send_stream: MemoryObjectSendStream | None = None + self._options = options + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + if self._send_stream: + self._send_stream.close() + while self._send_stream is not None: + self._call_queue.get()() + + async def _run_tests_and_fixtures(self) -> None: + self._send_stream, receive_stream = create_memory_object_stream(1) + with receive_stream: + async for coro, outcome_holder in receive_stream: + try: + retval = await coro + except BaseException as exc: + outcome_holder.append(Error(exc)) + else: + outcome_holder.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._send_stream = None + + def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if self._send_stream is None: + trio.lowlevel.start_guest_run( + self._run_tests_and_fixtures, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._send_stream is None: + self._call_queue.get()() + + outcome_holder: list[Outcome] = [] + self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) + while not outcome_holder: + self._call_queue.get()() + + return outcome_holder[0].unwrap() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) + + yield fixturevalue + + try: + self._call_in_runner_task(asyncgen.asend, None) + except StopAsyncIteration: + pass + else: + self._call_in_runner_task(asyncgen.aclose) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + return self._call_in_runner_task(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + self._call_in_runner_task(test_func, **kwargs) + + +class TrioTaskInfo(TaskInfo): + def __init__(self, task: trio.lowlevel.Task): + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + super().__init__(id(task), parent_id, task.name, task.coro) + self._task = weakref.proxy(task) + + def has_pending_cancellation(self) -> bool: + try: + return self._task._cancel_status.effectively_cancelled + except ReferenceError: + # If the task is no longer around, it surely doesn't have a cancellation + # pending + return False + + +class TrioBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + return trio.run(func, *args) + + @classmethod + def current_token(cls) -> object: + return trio.lowlevel.current_trio_token() + + @classmethod + def current_time(cls) -> float: + return trio.current_time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return trio.Cancelled + + @classmethod + async def checkpoint(cls) -> None: + await trio.lowlevel.checkpoint() + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + await trio.lowlevel.checkpoint_if_cancelled() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + await trio.lowlevel.cancel_shielded_checkpoint() + + @classmethod + async def sleep(cls, delay: float) -> None: + await trio.sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> abc.CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + return trio.current_effective_deadline() + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread(TrioBackend, token): + return func(*args) + + token = TrioBackend.current_token() + return await run_sync( + wrapper, + abandon_on_cancel=abandon_on_cancel, + limiter=cast(trio.CapacityLimiter, limiter), + ) + + @classmethod + def check_cancelled(cls) -> None: + trio.from_thread.check_cancelled() + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + trio_token = cast("trio.lowlevel.TrioToken | None", token) + try: + return trio.from_thread.run(func, *args, trio_token=trio_token) + except trio.RunFinishedError: + raise RunFinishedError from None + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + trio_token = cast("trio.lowlevel.TrioToken | None", token) + try: + return trio.from_thread.run_sync(func, *args, trio_token=trio_token) + except trio.RunFinishedError: + raise RunFinishedError from None + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + def convert_item(item: StrOrBytesPath) -> str: + str_or_bytes = os.fspath(item) + if isinstance(str_or_bytes, str): + return str_or_bytes + else: + return os.fsdecode(str_or_bytes) + + if isinstance(command, (str, bytes, PathLike)): + process = await trio.lowlevel.open_process( + convert_item(command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=True, + **kwargs, + ) + else: + process = await trio.lowlevel.open_process( + [convert_item(item) for item in command], + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=False, + **kwargs, + ) + + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: socket.AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: None + ) -> abc.UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes + ) -> abc.ConnectedUNIXDatagramSocket: ... + + @classmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + trio_socket = trio.socket.from_stdlib_socket(raw_socket) + + if remote_path: + await trio_socket.connect(remote_path) + return ConnectedUNIXDatagramSocket(trio_socket) + else: + return UNIXDatagramSocket(trio_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await trio.socket.getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + try: + await wait_readable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + @classmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + try: + await wait_writable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + @classmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + notify_closing(obj) + + @classmethod + async def wrap_listener_socket(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + async def wrap_stream_socket(cls, sock: socket.socket) -> SocketStream: + trio_sock = trio.socket.from_stdlib_socket(sock) + return SocketStream(trio_sock) + + @classmethod + async def wrap_unix_stream_socket(cls, sock: socket.socket) -> UNIXSocketStream: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UNIXSocketStream(trio_sock) + + @classmethod + async def wrap_udp_socket(cls, sock: socket.socket) -> UDPSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UDPSocket(trio_sock) + + @classmethod + async def wrap_connected_udp_socket(cls, sock: socket.socket) -> ConnectedUDPSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return ConnectedUDPSocket(trio_sock) + + @classmethod + async def wrap_unix_datagram_socket(cls, sock: socket.socket) -> UNIXDatagramSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UNIXDatagramSocket(trio_sock) + + @classmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket.socket + ) -> ConnectedUNIXDatagramSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return ConnectedUNIXDatagramSocket(trio_sock) + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + task = current_task() + return TrioTaskInfo(task) + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + root_task = current_root_task() + assert root_task + task_infos = [TrioTaskInfo(root_task)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: list[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append(TrioTaskInfo(task)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + from trio.testing import wait_all_tasks_blocked + + await wait_all_tasks_blocked() + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = TrioBackend diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__init__.py b/venv/lib/python3.12/site-packages/anyio/_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2e5e85e Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc new file mode 100644 index 0000000..6d32164 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc new file mode 100644 index 0000000..ba0e3bb Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc new file mode 100644 index 0000000..cd0c63b Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000..50c16b6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc new file mode 100644 index 0000000..d47e04d Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc new file mode 100644 index 0000000..2ef08d9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc new file mode 100644 index 0000000..89cbd9e Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc new file mode 100644 index 0000000..a6e1125 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc new file mode 100644 index 0000000..d2130ad Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc new file mode 100644 index 0000000..5837cd9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc new file mode 100644 index 0000000..27c91f2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc new file mode 100644 index 0000000..f3a28c3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc new file mode 100644 index 0000000..11e76d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc new file mode 100644 index 0000000..40d3c33 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc new file mode 100644 index 0000000..39861a5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py b/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py new file mode 100644 index 0000000..9f35bae --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +import asyncio +import socket +import threading +from collections.abc import Callable +from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + +_selector_lock = threading.Lock() +_selector: Selector | None = None + + +class Selector: + def __init__(self) -> None: + self._thread = threading.Thread(target=self.run, name="AnyIO socket selector") + self._selector = DefaultSelector() + self._send, self._receive = socket.socketpair() + self._send.setblocking(False) + self._receive.setblocking(False) + # This somewhat reduces the amount of memory wasted queueing up data + # for wakeups. With these settings, maximum number of 1-byte sends + # before getting BlockingIOError: + # Linux 4.8: 6 + # macOS (darwin 15.5): 1 + # Windows 10: 525347 + # Windows you're weird. (And on Windows setting SNDBUF to 0 makes send + # blocking, even on non-blocking sockets, so don't do that.) + self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1) + self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1) + # On Windows this is a TCP socket so this might matter. On other + # platforms this fails b/c AF_UNIX sockets aren't actually TCP. + try: + self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + except OSError: + pass + + self._selector.register(self._receive, EVENT_READ) + self._closed = False + + def start(self) -> None: + self._thread.start() + threading._register_atexit(self._stop) # type: ignore[attr-defined] + + def _stop(self) -> None: + global _selector + self._closed = True + self._notify_self() + self._send.close() + self._thread.join() + self._selector.unregister(self._receive) + self._receive.close() + self._selector.close() + _selector = None + assert not self._selector.get_map(), ( + "selector still has registered file descriptors after shutdown" + ) + + def _notify_self(self) -> None: + try: + self._send.send(b"\x00") + except BlockingIOError: + pass + + def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)}) + else: + if EVENT_READ in key.data: + raise ValueError( + "this file descriptor is already registered for reading" + ) + + key.data[EVENT_READ] = loop, callback + self._selector.modify(fd, key.events | EVENT_READ, key.data) + + self._notify_self() + + def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)}) + else: + if EVENT_WRITE in key.data: + raise ValueError( + "this file descriptor is already registered for writing" + ) + + key.data[EVENT_WRITE] = loop, callback + self._selector.modify(fd, key.events | EVENT_WRITE, key.data) + + self._notify_self() + + def remove_reader(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_READ: + del key.data[EVENT_READ] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def remove_writer(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_WRITE: + del key.data[EVENT_WRITE] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def run(self) -> None: + while not self._closed: + for key, events in self._selector.select(): + if key.fileobj is self._receive: + try: + while self._receive.recv(4096): + pass + except BlockingIOError: + pass + + continue + + if events & EVENT_READ: + loop, callback = key.data[EVENT_READ] + self.remove_reader(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + if events & EVENT_WRITE: + loop, callback = key.data[EVENT_WRITE] + self.remove_writer(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + +def get_selector() -> Selector: + global _selector + + with _selector_lock: + if _selector is None: + _selector = Selector() + _selector.start() + + return _selector diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py b/venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py new file mode 100644 index 0000000..302f32b --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py @@ -0,0 +1,200 @@ +from __future__ import annotations + +from abc import abstractmethod +from contextlib import AbstractAsyncContextManager, AbstractContextManager +from inspect import isasyncgen, iscoroutine, isgenerator +from types import TracebackType +from typing import Protocol, TypeVar, cast, final + +_T_co = TypeVar("_T_co", covariant=True) +_ExitT_co = TypeVar("_ExitT_co", covariant=True, bound="bool | None") + + +class _SupportsCtxMgr(Protocol[_T_co, _ExitT_co]): + def __contextmanager__(self) -> AbstractContextManager[_T_co, _ExitT_co]: ... + + +class _SupportsAsyncCtxMgr(Protocol[_T_co, _ExitT_co]): + def __asynccontextmanager__( + self, + ) -> AbstractAsyncContextManager[_T_co, _ExitT_co]: ... + + +class ContextManagerMixin: + """ + Mixin class providing context manager functionality via a generator-based + implementation. + + This class allows you to implement a context manager via :meth:`__contextmanager__` + which should return a generator. The mechanics are meant to mirror those of + :func:`@contextmanager `. + + .. note:: Classes using this mix-in are not reentrant as context managers, meaning + that once you enter it, you can't re-enter before first exiting it. + + .. seealso:: :doc:`contextmanagers` + """ + + __cm: AbstractContextManager[object, bool | None] | None = None + + @final + def __enter__(self: _SupportsCtxMgr[_T_co, bool | None]) -> _T_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, ContextManagerMixin) + if self.__cm is not None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has already been entered" + ) + + cm = self.__contextmanager__() + if not isinstance(cm, AbstractContextManager): + if isgenerator(cm): + raise TypeError( + "__contextmanager__() returned a generator object instead of " + "a context manager. Did you forget to add the @contextmanager " + "decorator?" + ) + + raise TypeError( + f"__contextmanager__() did not return a context manager object, " + f"but {cm.__class__!r}" + ) + + if cm is self: + raise TypeError( + f"{self.__class__.__qualname__}.__contextmanager__() returned " + f"self. Did you forget to add the @contextmanager decorator and a " + f"'yield' statement?" + ) + + value = cm.__enter__() + self.__cm = cm + return value + + @final + def __exit__( + self: _SupportsCtxMgr[object, _ExitT_co], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> _ExitT_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, ContextManagerMixin) + if self.__cm is None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has not been entered yet" + ) + + # Prevent circular references + cm = self.__cm + del self.__cm + + return cast(_ExitT_co, cm.__exit__(exc_type, exc_val, exc_tb)) + + @abstractmethod + def __contextmanager__(self) -> AbstractContextManager[object, bool | None]: + """ + Implement your context manager logic here. + + This method **must** be decorated with + :func:`@contextmanager `. + + .. note:: Remember that the ``yield`` will raise any exception raised in the + enclosed context block, so use a ``finally:`` block to clean up resources! + + :return: a context manager object + """ + + +class AsyncContextManagerMixin: + """ + Mixin class providing async context manager functionality via a generator-based + implementation. + + This class allows you to implement a context manager via + :meth:`__asynccontextmanager__`. The mechanics are meant to mirror those of + :func:`@asynccontextmanager `. + + .. note:: Classes using this mix-in are not reentrant as context managers, meaning + that once you enter it, you can't re-enter before first exiting it. + + .. seealso:: :doc:`contextmanagers` + """ + + __cm: AbstractAsyncContextManager[object, bool | None] | None = None + + @final + async def __aenter__(self: _SupportsAsyncCtxMgr[_T_co, bool | None]) -> _T_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, AsyncContextManagerMixin) + if self.__cm is not None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has already been entered" + ) + + cm = self.__asynccontextmanager__() + if not isinstance(cm, AbstractAsyncContextManager): + if isasyncgen(cm): + raise TypeError( + "__asynccontextmanager__() returned an async generator instead of " + "an async context manager. Did you forget to add the " + "@asynccontextmanager decorator?" + ) + elif iscoroutine(cm): + cm.close() + raise TypeError( + "__asynccontextmanager__() returned a coroutine object instead of " + "an async context manager. Did you forget to add the " + "@asynccontextmanager decorator and a 'yield' statement?" + ) + + raise TypeError( + f"__asynccontextmanager__() did not return an async context manager, " + f"but {cm.__class__!r}" + ) + + if cm is self: + raise TypeError( + f"{self.__class__.__qualname__}.__asynccontextmanager__() returned " + f"self. Did you forget to add the @asynccontextmanager decorator and a " + f"'yield' statement?" + ) + + value = await cm.__aenter__() + self.__cm = cm + return value + + @final + async def __aexit__( + self: _SupportsAsyncCtxMgr[object, _ExitT_co], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> _ExitT_co: + assert isinstance(self, AsyncContextManagerMixin) + if self.__cm is None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has not been entered yet" + ) + + # Prevent circular references + cm = self.__cm + del self.__cm + + return cast(_ExitT_co, await cm.__aexit__(exc_type, exc_val, exc_tb)) + + @abstractmethod + def __asynccontextmanager__( + self, + ) -> AbstractAsyncContextManager[object, bool | None]: + """ + Implement your async context manager logic here. + + This method **must** be decorated with + :func:`@asynccontextmanager `. + + .. note:: Remember that the ``yield`` will raise any exception raised in the + enclosed context block, so use a ``finally:`` block to clean up resources! + + :return: an async context manager object + """ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py b/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 0000000..46d9f46 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,229 @@ +from __future__ import annotations + +import math +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from contextlib import contextmanager +from contextvars import Token +from importlib import import_module +from typing import TYPE_CHECKING, Any, TypeVar + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +sniffio: Any +try: + import sniffio +except ModuleNotFoundError: + sniffio = None + +if TYPE_CHECKING: + from ..abc import AsyncBackend + +# This must be updated when new backends are introduced +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +threadlocals = threading.local() +loaded_backends: dict[str, type[AsyncBackend]] = {} + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently + either ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` + implementation with (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this + thread + :raises LookupError: if the named backend is not found + + """ + if asynclib_name := current_async_library(): + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + async_backend = get_async_backend(backend) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if asynclib_name is None: + # Since we're in control of the event loop, we can cache the name of the async + # library + token = set_current_async_library(backend) + + try: + backend_options = backend_options or {} + return async_backend.run(func, args, {}, backend_options) + finally: + reset_current_async_library(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_async_backend().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal + monotonic clock of the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return get_async_backend().current_time() + + +def get_all_backends() -> tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_available_backends() -> tuple[str, ...]: + """ + Test for the availability of built-in backends. + + :return a tuple of the built-in backend names that were successfully imported + + .. versionadded:: 4.12 + + """ + available_backends: list[str] = [] + for backend_name in get_all_backends(): + try: + get_async_backend(backend_name) + except ImportError: + continue + + available_backends.append(backend_name) + + return tuple(available_backends) + + +def get_cancelled_exc_class() -> type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_async_backend().cancelled_exception_class() + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread( + backend_class: type[AsyncBackend], token: object +) -> Generator[Any, None, None]: + from ..lowlevel import EventLoopToken + + threadlocals.current_token = EventLoopToken(backend_class, token) + try: + yield + finally: + del threadlocals.current_token + + +class NoCurrentAsyncBackend(Exception): + def __init__(self) -> None: + super().__init__( + f"Not currently running on any asynchronous event loop" + f"Available async backends: {', '.join(get_all_backends())}" + ) + + +def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]: + if asynclib_name is None: + asynclib_name = current_async_library() + if not asynclib_name: + raise NoCurrentAsyncBackend + + # We use our own dict instead of sys.modules to get the already imported back-end + # class because the appropriate modules in sys.modules could potentially be only + # partially initialized + try: + return loaded_backends[asynclib_name] + except KeyError: + module = import_module(f"anyio._backends._{asynclib_name}") + loaded_backends[asynclib_name] = module.backend_class + return module.backend_class + + +def current_async_library() -> str | None: + if sniffio is None: + # If sniffio is not installed, we assume we're either running asyncio or nothing + import asyncio + + try: + asyncio.get_running_loop() + return "asyncio" + except RuntimeError: + pass + else: + try: + return sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + + return None + + +def set_current_async_library(asynclib_name: str | None) -> Token | None: + # no-op if sniffio is not installed + if sniffio is None: + return None + + return sniffio.current_async_library_cvar.set(asynclib_name) + + +def reset_current_async_library(token: Token | None) -> None: + if token is not None: + sniffio.current_async_library_cvar.reset(token) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py b/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 0000000..5f123d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,153 @@ +from __future__ import annotations + +import sys +from collections.abc import Generator +from textwrap import dedent +from typing import Any + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external + causes (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or + otherwise misbehaves. + """ + + +class BrokenWorkerInterpreter(Exception): + """ + Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is + raised in the subinterpreter. + """ + + def __init__(self, excinfo: Any): + # This was adapted from concurrent.futures.interpreter.ExecutionFailed + msg = excinfo.formatted + if not msg: + if excinfo.type and excinfo.msg: + msg = f"{excinfo.type.__name__}: {excinfo.msg}" + else: + msg = excinfo.type.__name__ or excinfo.msg + + super().__init__(msg) + self.excinfo = excinfo + + def __str__(self) -> str: + try: + formatted = self.excinfo.errdisplay + except Exception: + return super().__str__() + else: + return dedent( + f""" + {super().__str__()} + + Uncaught in the interpreter: + + {formatted} + """.strip() + ) + + +class BusyResourceError(Exception): + """ + Raised when two tasks are trying to read from or write to the same resource + concurrently. + """ + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class ConnectionFailed(OSError): + """ + Raised when a connection attempt fails. + + .. note:: This class inherits from :exc:`OSError` for backwards compatibility. + """ + + +def iterate_exceptions( + exception: BaseException, +) -> Generator[BaseException, None, None]: + if isinstance(exception, BaseExceptionGroup): + for exc in exception.exceptions: + yield from iterate_exceptions(exc) + else: + yield exception + + +class DelimiterNotFound(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """ + Raised when trying to read from a stream that has been closed from the other end. + """ + + +class IncompleteRead(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute + is not found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" + + +class NoEventLoopError(RuntimeError): + """ + Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if + not calling from an AnyIO worker thread, and no ``token`` was passed. + """ + + +class RunFinishedError(RuntimeError): + """ + Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if the event + loop associated with the explicitly passed token has already finished. + """ + + def __init__(self) -> None: + super().__init__( + "The event loop associated with the given token has already finished" + ) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py b/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py new file mode 100644 index 0000000..061f0d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,797 @@ +from __future__ import annotations + +import os +import pathlib +import sys +from collections.abc import ( + AsyncIterator, + Callable, + Iterable, + Iterator, + Sequence, +) +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + ClassVar, + Final, + Generic, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if TYPE_CHECKING: + from types import ModuleType + + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the + following blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the + underlying file at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> list[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ... + + @overload + async def write(self: AsyncFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + + @overload + async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: int | None = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[bytes]: ... + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[str]: ... + + +async def open_file( + file: str | PathLike[str] | int, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: Callable[[str, int], int] | None = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator[PathLike[str]] + + async def __anext__(self) -> Path: + nextval = await to_thread.run_sync( + next, self.iterator, None, abandon_on_cancel=True + ) + if nextval is None: + raise StopAsyncIteration from None + + return Path(nextval) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or + :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` + interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for + the deprecated :meth:`~pathlib.Path.link_to` method. + + Some methods may be unavailable or have limited functionality, based on the Python + version: + + * :meth:`~pathlib.Path.copy` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later) + * :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later) + * :attr:`~pathlib.Path.info` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later) + * :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only + available on Python 3.13 or later) + * :meth:`~pathlib.Path.move` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later) + * :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available + on Python 3.12 or later) + * :meth:`~pathlib.Path.walk` (available on Python 3.12 or later) + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_junction` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.is_socket` + * :meth:`~pathlib.Path.is_symlink` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.resolve` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.symlink_to` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.walk` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding + :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: str | PathLike[str]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: str | PathLike[str]) -> Path: + return Path(self._path / other) + + def __rtruediv__(self, other: str | PathLike[str]) -> Path: + return Path(other) / self + + @property + def parts(self) -> tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence[Path]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> Path: + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> list[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> Path: + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + if sys.version_info >= (3, 13): + parser: ClassVar[ModuleType] = pathlib.Path.parser + + @classmethod + def from_uri(cls, uri: str) -> Path: + return Path(pathlib.Path.from_uri(uri)) + + def full_match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.full_match(path_pattern, case_sensitive=case_sensitive) + + def match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.match(path_pattern, case_sensitive=case_sensitive) + else: + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + if sys.version_info >= (3, 14): + + @property + def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it + return self._path.info + + async def copy( + self, + target: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy, + follow_symlinks=follow_symlinks, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, pathlib.Path(target))) + + async def copy_into( + self, + target_dir: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy_into, + follow_symlinks=follow_symlinks, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, pathlib.Path(target_dir))) + + async def move(self, target: str | os.PathLike[str]) -> Path: + # Upstream does not handle anyio.Path properly as a PathLike + target = pathlib.Path(target) + return Path(await to_thread.run_sync(self._path.move, target)) + + async def move_into( + self, + target_dir: str | os.PathLike[str], + ) -> Path: + return Path(await to_thread.run_sync(self._path.move_into, target_dir)) + + def is_relative_to(self, other: str | PathLike[str]) -> bool: + try: + self.relative_to(other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> Path: + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) + + async def expanduser(self) -> Path: + return Path( + await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) + ) + + if sys.version_info < (3, 12): + # Python 3.11 and earlier + def glob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + elif (3, 12) <= sys.version_info < (3, 13): + # changed in Python 3.12: + # - The case_sensitive parameter was added. + def glob( + self, + pattern: str, + *, + case_sensitive: bool | None = None, + ) -> AsyncIterator[Path]: + gen = self._path.glob(pattern, case_sensitive=case_sensitive) + return _PathIterator(gen) + elif sys.version_info >= (3, 13): + # Changed in Python 3.13: + # - The recurse_symlinks parameter was added. + # - The pattern parameter accepts a path-like object. + def glob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block + self, + pattern: str | PathLike[str], + *, + case_sensitive: bool | None = None, + recurse_symlinks: bool = False, + ) -> AsyncIterator[Path]: + gen = self._path.glob( + pattern, # type: ignore[arg-type] + case_sensitive=case_sensitive, + recurse_symlinks=recurse_symlinks, + ) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) + + async def hardlink_to( + self, target: str | bytes | PathLike[str] | PathLike[bytes] + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> Path: + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_block_device, abandon_on_cancel=True + ) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_char_device, abandon_on_cancel=True + ) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) + + if sys.version_info >= (3, 12): + + async def is_junction(self) -> bool: + return await to_thread.run_sync(self._path.is_junction) + + async def is_mount(self) -> bool: + return await to_thread.run_sync( + os.path.ismount, self._path, abandon_on_cancel=True + ) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) + + async def iterdir(self) -> AsyncIterator[Path]: + gen = ( + self._path.iterdir() + if sys.version_info < (3, 13) + else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True) + ) + async for path in _PathIterator(gen): + yield path + + def joinpath(self, *args: str | PathLike[str]) -> Path: + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[bytes]: ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[str]: ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: str | None = None, errors: str | None = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + if sys.version_info >= (3, 12): + + def relative_to( + self, *other: str | PathLike[str], walk_up: bool = False + ) -> Path: + # relative_to() should work with any PathLike but it doesn't + others = [pathlib.Path(other) for other in other] + return Path(self._path.relative_to(*others, walk_up=walk_up)) + + else: + + def relative_to(self, *other: str | PathLike[str]) -> Path: + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> Path: + target = await to_thread.run_sync(os.readlink, self._path) + return Path(target) + + async def rename(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> Path: + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) + + if sys.version_info < (3, 12): + # Pre Python 3.12 + def rglob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + elif (3, 12) <= sys.version_info < (3, 13): + # Changed in Python 3.12: + # - The case_sensitive parameter was added. + def rglob( + self, pattern: str, *, case_sensitive: bool | None = None + ) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern, case_sensitive=case_sensitive) + return _PathIterator(gen) + elif sys.version_info >= (3, 13): + # Changed in Python 3.13: + # - The recurse_symlinks parameter was added. + # - The pattern parameter accepts a path-like object. + def rglob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block + self, + pattern: str | PathLike[str], + *, + case_sensitive: bool | None = None, + recurse_symlinks: bool = False, + ) -> AsyncIterator[Path]: + gen = self._path.rglob( + pattern, # type: ignore[arg-type] + case_sensitive=case_sensitive, + recurse_symlinks=recurse_symlinks, + ) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: str | PathLike[str]) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, abandon_on_cancel=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) + + async def symlink_to( + self, + target: str | bytes | PathLike[str] | PathLike[bytes], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + if sys.version_info >= (3, 12): + + async def walk( + self, + top_down: bool = True, + on_error: Callable[[OSError], object] | None = None, + follow_symlinks: bool = False, + ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: + def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: + try: + return next(gen) + except StopIteration: + return None + + gen = self._path.walk(top_down, on_error, follow_symlinks) + while True: + value = await to_thread.run_sync(get_next_value) + if value is None: + return + + root, dirs, paths = value + yield Path(root), dirs, paths + + def with_name(self, name: str) -> Path: + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> Path: + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> Path: + return Path(self._path.with_suffix(suffix)) + + def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: + return Path(*pathsegments) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open( + "w", encoding=encoding, errors=errors, newline=newline + ) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_resources.py b/venv/lib/python3.12/site-packages/anyio/_core/_resources.py new file mode 100644 index 0000000..b9a5344 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_resources.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_signals.py b/venv/lib/python3.12/site-packages/anyio/_core/_signals.py new file mode 100644 index 0000000..f3451d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_signals.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from collections.abc import AsyncIterator +from contextlib import AbstractContextManager +from signal import Signals + +from ._eventloop import get_async_backend + + +def open_signal_receiver( + *signals: Signals, +) -> AbstractContextManager[AsyncIterator[Signals]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields + signal numbers + + .. warning:: Windows does not support signals natively so it is best to avoid + relying on this in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for + the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_async_backend().open_signal_receiver(*signals) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py b/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py new file mode 100644 index 0000000..c65508f --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,991 @@ +from __future__ import annotations + +import errno +import os +import socket +import ssl +import stat +import sys +from collections.abc import Awaitable +from dataclasses import dataclass +from ipaddress import IPv4Address, IPv6Address, ip_address +from os import PathLike, chmod +from socket import AddressFamily, SocketKind +from typing import TYPE_CHECKING, Any, Literal, cast, overload + +from .. import ConnectionFailed, to_thread +from ..abc import ( + ByteStreamConnectable, + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSConnectable, TLSStream +from ._eventloop import get_async_backend +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +if sys.version_info < (3, 13): + from typing_extensions import deprecated +else: + from warnings import deprecated + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[True], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[False], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = None, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_standard_compatible: bool = True, + tls_hostname: str | None = None, + happy_eyeballs_delay: float = 0.25, +) -> SocketStream | TLSStream: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC + 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, + each one is tried until one connection attempt succeeds. If the first attempt does + not connected within 250 milliseconds, a second attempt is started using the next + address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if + available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before + connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is + created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake + before closing the stream and requires that the server does this as well. + Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to + the value of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection + attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises ConnectionFailed: if the connection fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: SocketStream | None = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_async_backend() + local_address: IPSockAddrType | None = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + addr_obj = None + + if addr_obj is not None: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + else: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) + # and the second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs = [] + for af, *_, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + + oserrors: list[OSError] = [] + try: + async with create_task_group() as tg: + for _af, addr in target_addrs: + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = ( + oserrors[0] + if len(oserrors) == 1 + else ExceptionGroup("multiple connection attempts failed", oserrors) + ) + raise OSError("All connection attempts failed") from cause + finally: + oserrors.clear() + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + :raises ConnectionFailed: if the connection fails + + """ + path = os.fspath(path) + return await get_async_backend().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on + all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address + family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``local_host`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a multi-listener object containing one or more socket listeners + :raises OSError: if there's an error creating a socket, or binding to one or more + interfaces failed + + """ + asynclib = get_async_backend() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + + def setup_raw_socket( + fam: AddressFamily, + bind_addr: tuple[str, int] | tuple[str, int, int, int], + *, + v6only: bool = True, + ) -> socket.socket: + sock = socket.socket(fam) + try: + sock.setblocking(False) + + if fam == AddressFamily.AF_INET6: + sock.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, v6only) + + # For Windows, enable exclusive address use. For others, enable address + # reuse. + if sys.platform == "win32": + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # Workaround for #554 + if fam == socket.AF_INET6 and "%" in bind_addr[0]: + addr, scope_id = bind_addr[0].split("%", 1) + bind_addr = (addr, bind_addr[1], 0, int(scope_id)) + + sock.bind(bind_addr) + sock.listen(backlog) + except BaseException: + sock.close() + raise + + return sock + + # We passing type=0 on non-Windows platforms as a workaround for a uvloop bug + # where we don't get the correct scope ID for IPv6 link-local addresses when passing + # type=socket.SOCK_STREAM to getaddrinfo(): + # https://github.com/MagicStack/uvloop/issues/539 + gai_res = await getaddrinfo( + local_host, + local_port, + family=family, + type=socket.SOCK_STREAM if sys.platform == "win32" else 0, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + + # The set comprehension is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + sockaddrs = sorted({res for res in gai_res if res[1] == SocketKind.SOCK_STREAM}) + + # Special case for dual-stack binding on the "any" interface + if ( + local_host is None + and family == AddressFamily.AF_UNSPEC + and socket.has_dualstack_ipv6() + and any(fam == AddressFamily.AF_INET6 for fam, *_ in gai_res) + ): + raw_socket = setup_raw_socket( + AddressFamily.AF_INET6, ("::", local_port), v6only=False + ) + listener = asynclib.create_tcp_listener(raw_socket) + return MultiListener([listener]) + + errors: list[OSError] = [] + try: + for _ in range(len(sockaddrs)): + listeners: list[SocketListener] = [] + bound_ephemeral_port = local_port + try: + for fam, *_, sockaddr in sockaddrs: + sockaddr = sockaddr[0], bound_ephemeral_port, *sockaddr[2:] + raw_socket = setup_raw_socket(fam, sockaddr) + + # Store the assigned port if an ephemeral port was requested, so + # we'll bind to the same port on all interfaces + if local_port == 0 and len(gai_res) > 1: + bound_ephemeral_port = raw_socket.getsockname()[1] + + listeners.append(asynclib.create_tcp_listener(raw_socket)) + except BaseException as exc: + for listener in listeners: + await listener.aclose() + + # If an ephemeral port was requested but binding the assigned port + # failed for another interface, rotate the address list and try again + if ( + isinstance(exc, OSError) + and exc.errno == errno.EADDRINUSE + and local_port == 0 + and bound_ephemeral_port + ): + errors.append(exc) + sockaddrs.append(sockaddrs.pop(0)) + continue + + raise + + return MultiListener(listeners) + + raise OSError( + f"Could not create {len(sockaddrs)} listeners with a consistent port" + ) from ExceptionGroup("Several bind attempts failed", errors) + finally: + del errors # Prevent reference cycles + + +async def create_unix_listener( + path: str | bytes | PathLike[Any], + *, + mode: int | None = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be + removed first. + + """ + backlog = min(backlog, 65536) + raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) + try: + raw_socket.listen(backlog) + return get_async_backend().create_unix_listener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local + machine, making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + sock = await get_async_backend().create_udp_socket( + family, local_address, None, reuse_port + ) + return cast(UDPSocket, sock) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, an + any packets sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + sock = await get_async_backend().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + return cast(ConnectedUDPSocket, sock) + + +async def create_unix_datagram_socket( + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> UNIXDatagramSocket: + """ + Create a UNIX datagram socket. + + Not available on Windows. + + If ``local_path`` has been given, the socket will be bound to this path, making this + socket suitable for receiving datagrams from other processes. Other processes can + send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a UNIX datagram socket + + """ + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket(raw_socket, None) + + +async def create_connected_unix_datagram_socket( + remote_path: str | bytes | PathLike[Any], + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> ConnectedUNIXDatagramSocket: + """ + Create a connected UNIX datagram socket. + + Connected datagram sockets can only communicate with the specified remote path. + + If ``local_path`` has been given, the socket will be bound to this path, making + this socket suitable for receiving datagrams from other processes. Other processes + can send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param remote_path: the path to set as the default target + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a connected UNIX datagram socket + + """ + remote_path = os.fspath(remote_path) + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket( + raw_socket, remote_path + ) + + +async def getaddrinfo( + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) + IDNA 2008 standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host: bytes | None = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_async_backend().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + # filter out IPv6 results when IPv6 is disabled + if not isinstance(sockaddr[0], int) + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_async_backend().getnameinfo(sockaddr, flags) + + +@deprecated("This function is deprecated; use `wait_readable` instead") +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_readable` instead. + + Wait until the given socket has data to be read. + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_async_backend().wait_readable(sock.fileno()) + + +@deprecated("This function is deprecated; use `wait_writable` instead") +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_writable` instead. + + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_async_backend().wait_writable(sock.fileno()) + + +def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object has data to be read. + + On Unix systems, ``obj`` must either be an integer file descriptor, or else an + object with a ``.fileno()`` method which returns an integer file descriptor. Any + kind of file descriptor can be passed, though the exact semantics will depend on + your kernel. For example, this probably won't do anything useful for on-disk files. + + On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an + object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File + descriptors aren't supported, and neither are handles that refer to anything besides + a ``SOCKET``. + + On backends where this functionality is not natively provided (asyncio + ``ProactorEventLoop`` on Windows), it is provided using a separate selector thread + which is set to shut down when the interpreter shuts down. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become readable + + """ + return get_async_backend().wait_readable(obj) + + +def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object can be written to. + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become writable + + .. seealso:: See the documentation of :func:`wait_readable` for the definition of + ``obj`` and notes on backend compatibility. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + """ + return get_async_backend().wait_writable(obj) + + +def notify_closing(obj: FileDescriptorLike) -> None: + """ + Call this before closing a file descriptor (on Unix) or socket (on + Windows). This will cause any `wait_readable` or `wait_writable` + calls on the given object to immediately wake up and raise + `~anyio.ClosedResourceError`. + + This doesn't actually close the object – you still have to do that + yourself afterwards. Also, you want to be careful to make sure no + new tasks start waiting on the object in between when you call this + and when it's actually closed. So to close something properly, you + usually want to do these steps in order: + + 1. Explicitly mark the object as closed, so that any new attempts + to use it will abort before they start. + 2. Call `notify_closing` to wake up any already-existing users. + 3. Actually close the object. + + It's also possible to do them in a different order if that's more + convenient, *but only if* you make sure not to have any checkpoints in + between the steps. This way they all happen in a single atomic + step, so other tasks won't be able to tell what order they happened + in anyway. + + :param obj: an object with a ``.fileno()`` method or an integer handle + + """ + get_async_backend().notify_closing(obj) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: tuple[str, int, int, int] | tuple[str, int], +) -> tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = sockaddr + if scope_id: + # PyPy (as of v7.3.11) leaves the interface name in the result, so + # we discard it and only get the scope ID from the end + # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) + host = host.split("%")[0] + + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return sockaddr + + +async def setup_unix_local_socket( + path: None | str | bytes | PathLike[Any], + mode: int | None, + socktype: int, +) -> socket.socket: + """ + Create a UNIX local socket object, deleting the socket at the given path if it + exists. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM + + """ + path_str: str | None + if path is not None: + path_str = os.fsdecode(path) + + # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call + if not path_str.startswith("\0"): + # Copied from pathlib... + try: + stat_result = os.stat(path) + except OSError as e: + if e.errno not in ( + errno.ENOENT, + errno.ENOTDIR, + errno.EBADF, + errno.ELOOP, + ): + raise + else: + if stat.S_ISSOCK(stat_result.st_mode): + os.unlink(path) + else: + path_str = None + + raw_socket = socket.socket(socket.AF_UNIX, socktype) + raw_socket.setblocking(False) + + if path_str is not None: + try: + await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) + except BaseException: + raw_socket.close() + raise + + return raw_socket + + +@dataclass +class TCPConnectable(ByteStreamConnectable): + """ + Connects to a TCP server at the given host and port. + + :param host: host name or IP address of the server + :param port: TCP port number of the server + """ + + host: str | IPv4Address | IPv6Address + port: int + + def __post_init__(self) -> None: + if self.port < 1 or self.port > 65535: + raise ValueError("TCP port number out of range") + + @override + async def connect(self) -> SocketStream: + try: + return await connect_tcp(self.host, self.port) + except OSError as exc: + raise ConnectionFailed( + f"error connecting to {self.host}:{self.port}: {exc}" + ) from exc + + +@dataclass +class UNIXConnectable(ByteStreamConnectable): + """ + Connects to a UNIX domain socket at the given path. + + :param path: the file system path of the socket + """ + + path: str | bytes | PathLike[str] | PathLike[bytes] + + @override + async def connect(self) -> UNIXSocketStream: + try: + return await connect_unix(self.path) + except OSError as exc: + raise ConnectionFailed(f"error connecting to {self.path!r}: {exc}") from exc + + +def as_connectable( + remote: ByteStreamConnectable + | tuple[str | IPv4Address | IPv6Address, int] + | str + | bytes + | PathLike[str], + /, + *, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_hostname: str | None = None, + tls_standard_compatible: bool = True, +) -> ByteStreamConnectable: + """ + Return a byte stream connectable from the given object. + + If a bytestream connectable is given, it is returned unchanged. + If a tuple of (host, port) is given, a TCP connectable is returned. + If a string or bytes path is given, a UNIX connectable is returned. + + If ``tls=True``, the connectable will be wrapped in a + :class:`~.streams.tls.TLSConnectable`. + + :param remote: a connectable, a tuple of (host, port) or a path to a UNIX socket + :param tls: if ``True``, wrap the plaintext connectable in a + :class:`~.streams.tls.TLSConnectable`, using the provided TLS settings) + :param ssl_context: if ``tls=True``, the SSLContext object to use (if not provided, + a secure default will be created) + :param tls_hostname: if ``tls=True``, host name of the server to use for checking + the server certificate (defaults to the host portion of the address for TCP + connectables) + :param tls_standard_compatible: if ``False`` and ``tls=True``, makes the TLS stream + skip the closing handshake when closing the connection, so it won't raise an + exception if the server does the same + + """ + connectable: TCPConnectable | UNIXConnectable | TLSConnectable + if isinstance(remote, ByteStreamConnectable): + return remote + elif isinstance(remote, tuple) and len(remote) == 2: + connectable = TCPConnectable(*remote) + elif isinstance(remote, (str, bytes, PathLike)): + connectable = UNIXConnectable(remote) + else: + raise TypeError(f"cannot convert {remote!r} to a connectable") + + if tls: + if not tls_hostname and isinstance(connectable, TCPConnectable): + tls_hostname = str(connectable.host) + + connectable = TLSConnectable( + connectable, + ssl_context=ssl_context, + hostname=tls_hostname, + standard_compatible=tls_standard_compatible, + ) + + return connectable diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_streams.py b/venv/lib/python3.12/site-packages/anyio/_core/_streams.py new file mode 100644 index 0000000..2b9c7df --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_streams.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import math +from typing import TypeVar +from warnings import warn + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + _MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +class create_memory_object_stream( + tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], +): + """ + Create a memory object stream. + + The stream's item type can be annotated like + :func:`create_memory_object_stream[T_Item]`. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts + blocking + :param item_type: old way of marking the streams with the right generic type for + static typing (does nothing on AnyIO 4) + + .. deprecated:: 4.0 + Use ``create_memory_object_stream[YourItemType](...)`` instead. + :return: a tuple of (send stream, receive stream) + + """ + + def __new__( # type: ignore[misc] + cls, max_buffer_size: float = 0, item_type: object = None + ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + if item_type is not None: + warn( + "The item_type argument has been deprecated in AnyIO 4.0. " + "Use create_memory_object_stream[YourItemType](...) instead.", + DeprecationWarning, + stacklevel=2, + ) + + state = _MemoryObjectStreamState[T_Item](max_buffer_size) + return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py b/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 0000000..36d9b30 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,202 @@ +from __future__ import annotations + +import sys +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence +from io import BytesIO +from os import PathLike +from subprocess import PIPE, CalledProcessError, CompletedProcess +from typing import IO, Any, Union, cast + +from ..abc import Process +from ._eventloop import get_async_backend +from ._tasks import create_task_group + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] + + +async def run_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + input: bytes | None = None, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + check: bool = True, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> CompletedProcess[bytes]: + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None`; ``input`` overrides this + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or `None` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the + process terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the + command + :param env: if not ``None``, this mapping replaces the inherited environment + variables from the parent process + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (Python >= 3.9, POSIX only) + :param group: effective group to run the process as (Python >= 3.9, POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9, + POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (Python >= 3.9, POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process + exits with a nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + if stdin is not None and input is not None: + raise ValueError("only one of stdin and input is allowed") + + async with await open_process( + command, + stdin=PIPE if input else stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + user=user, + group=group, + extra_groups=extra_groups, + umask=umask, + ) as process: + stream_contents: list[bytes | None] = [None, None] + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None = PIPE, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the + environment variables for the new process + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (POSIX only) + :param group: effective group to run the process as (POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (POSIX only) + :return: an asynchronous process object + + """ + kwargs: dict[str, Any] = {} + if user is not None: + kwargs["user"] = user + + if group is not None: + kwargs["group"] = group + + if extra_groups is not None: + kwargs["extra_groups"] = group + + if umask >= 0: + kwargs["umask"] = umask + + return await get_async_backend().open_process( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + **kwargs, + ) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py b/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 0000000..eab87d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,753 @@ +from __future__ import annotations + +import math +from collections import deque +from collections.abc import Callable +from dataclasses import dataclass +from types import TracebackType +from typing import TypeVar + +from ..lowlevel import checkpoint_if_cancelled +from ._eventloop import NoCurrentAsyncBackend, get_async_backend +from ._exceptions import BusyResourceError +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + +T = TypeVar("T") + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from + this limiter + :ivar int tasks_waiting: number of tasks waiting on + :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the + lock is not held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: TaskInfo | None + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying + :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> Event: + try: + return get_async_backend().create_event() + except NoCurrentAsyncBackend: + return EventAdapter() + + def set(self) -> None: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns + immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class EventAdapter(Event): + _internal_event: Event | None = None + _is_set: bool = False + + def __new__(cls) -> EventAdapter: + return object.__new__(cls) + + @property + def _event(self) -> Event: + if self._internal_event is None: + self._internal_event = get_async_backend().create_event() + if self._is_set: + self._internal_event.set() + + return self._internal_event + + def set(self) -> None: + if self._internal_event is None: + self._is_set = True + else: + self._event.set() + + def is_set(self) -> bool: + if self._internal_event is None: + return self._is_set + + return self._internal_event.is_set() + + async def wait(self) -> None: + await self._event.wait() + + def statistics(self) -> EventStatistics: + if self._internal_event is None: + return EventStatistics(tasks_waiting=0) + + return self._internal_event.statistics() + + +class Lock: + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + try: + return get_async_backend().create_lock(fast_acquire=fast_acquire) + except NoCurrentAsyncBackend: + return LockAdapter(fast_acquire=fast_acquire) + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Release the lock.""" + raise NotImplementedError + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + raise NotImplementedError + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class LockAdapter(Lock): + _internal_lock: Lock | None = None + + def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False): + self._fast_acquire = fast_acquire + + @property + def _lock(self) -> Lock: + if self._internal_lock is None: + self._internal_lock = get_async_backend().create_lock( + fast_acquire=self._fast_acquire + ) + + return self._internal_lock + + async def __aenter__(self) -> None: + await self._lock.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self._internal_lock is not None: + self._internal_lock.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await self._lock.acquire() + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + + def release(self) -> None: + """Release the lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._lock.locked() + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + + """ + if self._internal_lock is None: + return LockStatistics(False, None, 0) + + return self._internal_lock.statistics() + + +class Condition: + _owner_task: TaskInfo | None = None + + def __init__(self, lock: Lock | None = None): + self._lock = lock or Lock() + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> None: + """Release the underlying lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint_if_cancelled() + self._check_acquired() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + async def wait_for(self, predicate: Callable[[], T]) -> T: + """ + Wait until a predicate becomes true. + + :param predicate: a callable that returns a truthy value when the condition is + met + :return: the result of the predicate + + .. versionadded:: 4.11.0 + + """ + while not (result := predicate()): + await self.wait() + + return result + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + try: + return get_async_backend().create_semaphore( + initial_value, max_value=max_value, fast_acquire=fast_acquire + ) + except NoCurrentAsyncBackend: + return SemaphoreAdapter(initial_value, max_value=max_value) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._fast_acquire = fast_acquire + + async def __aenter__(self) -> Semaphore: + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Increment the semaphore value.""" + raise NotImplementedError + + @property + def value(self) -> int: + """The current value of the semaphore.""" + raise NotImplementedError + + @property + def max_value(self) -> int | None: + """The maximum value of the semaphore.""" + raise NotImplementedError + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class SemaphoreAdapter(Semaphore): + _internal_semaphore: Semaphore | None = None + + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> SemaphoreAdapter: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self._initial_value = initial_value + self._max_value = max_value + + @property + def _semaphore(self) -> Semaphore: + if self._internal_semaphore is None: + self._internal_semaphore = get_async_backend().create_semaphore( + self._initial_value, max_value=self._max_value + ) + + return self._internal_semaphore + + async def acquire(self) -> None: + await self._semaphore.acquire() + + def acquire_nowait(self) -> None: + self._semaphore.acquire_nowait() + + def release(self) -> None: + self._semaphore.release() + + @property + def value(self) -> int: + if self._internal_semaphore is None: + return self._initial_value + + return self._semaphore.value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + if self._internal_semaphore is None: + return SemaphoreStatistics(tasks_waiting=0) + + return self._semaphore.statistics() + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> CapacityLimiter: + try: + return get_async_backend().create_capacity_limiter(total_tokens) + except NoCurrentAsyncBackend: + return CapacityLimiterAdapter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their + tokens. + + .. versionchanged:: 3.0 + The property is now writable. + .. versionchanged:: 4.12 + The value can now be set to 0. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire a token for the current task without waiting for one to become + available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become + available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + + :raises RuntimeError: if the current task has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +class CapacityLimiterAdapter(CapacityLimiter): + _internal_limiter: CapacityLimiter | None = None + + def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: + return object.__new__(cls) + + def __init__(self, total_tokens: float) -> None: + self.total_tokens = total_tokens + + @property + def _limiter(self) -> CapacityLimiter: + if self._internal_limiter is None: + self._internal_limiter = get_async_backend().create_capacity_limiter( + self._total_tokens + ) + + return self._internal_limiter + + async def __aenter__(self) -> None: + await self._limiter.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and value is not math.inf: + raise TypeError("total_tokens must be an int or math.inf") + elif value < 1: + raise ValueError("total_tokens must be >= 1") + + if self._internal_limiter is None: + self._total_tokens = value + return + + self._limiter.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + if self._internal_limiter is None: + return 0 + + return self._internal_limiter.borrowed_tokens + + @property + def available_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.available_tokens + + def acquire_nowait(self) -> None: + self._limiter.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self._limiter.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self._limiter.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self._limiter.acquire_on_behalf_of(borrower) + + def release(self) -> None: + self._limiter.release() + + def release_on_behalf_of(self, borrower: object) -> None: + self._limiter.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + if self._internal_limiter is None: + return CapacityLimiterStatistics( + borrowed_tokens=0, + total_tokens=self.total_tokens, + borrowers=(), + tasks_waiting=0, + ) + + return self._internal_limiter.statistics() + + +class ResourceGuard: + """ + A context manager for ensuring that a resource is only used by a single task at a + time. + + Entering this context manager while the previous has not exited it yet will trigger + :exc:`BusyResourceError`. + + :param action: the action to guard against (visible in the :exc:`BusyResourceError` + when triggered, e.g. "Another task is already {action} this resource") + + .. versionadded:: 4.1 + """ + + __slots__ = "action", "_guarded" + + def __init__(self, action: str = "using"): + self.action: str = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._guarded = False diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py b/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py new file mode 100644 index 0000000..d37efe3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import math +from collections.abc import Generator +from contextlib import contextmanager +from types import TracebackType + +from ..abc._tasks import TaskGroup, TaskStatus +from ._eventloop import get_async_backend + + +class _IgnoredTaskStatus(TaskStatus[object]): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope: + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) + + def cancel(self, reason: str | None = None) -> None: + """ + Cancel this scope immediately. + + :param reason: a message describing the reason for the cancellation + + """ + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def cancelled_caught(self) -> bool: + """ + ``True`` if this scope suppressed a cancellation exception it itself raised. + + This is typically used to check if any work was interrupted, or to see if the + scope was cancelled due to its deadline being reached. The value will, however, + only be ``True`` if the cancellation was triggered by the scope itself (and not + an outer scope). + + """ + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> CancelScope: + raise NotImplementedError + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + raise NotImplementedError + + +@contextmanager +def fail_after( + delay: float | None, shield: bool = False +) -> Generator[CancelScope, None, None]: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in + time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] + + """ + current_time = get_async_backend().current_time + deadline = (current_time() + delay) if delay is not None else math.inf + with get_async_backend().create_cancel_scope( + deadline=deadline, shield=shield + ) as cancel_scope: + yield cancel_scope + + if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: + raise TimeoutError + + +def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = ( + (get_async_backend().current_time() + delay) if delay is not None else math.inf + ) + return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the current + task. + + :return: a clock value from the event loop's internal clock (or ``float('inf')`` if + there is no deadline in effect, or ``float('-inf')`` if the current scope has + been cancelled) + :rtype: float + + """ + return get_async_backend().current_effective_deadline() + + +def create_task_group() -> TaskGroup: + """ + Create a task group. + + :return: a task group + + """ + return get_async_backend().create_task_group() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py b/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py new file mode 100644 index 0000000..fbb6b14 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py @@ -0,0 +1,616 @@ +from __future__ import annotations + +import os +import sys +import tempfile +from collections.abc import Iterable +from io import BytesIO, TextIOWrapper +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AnyStr, + Generic, + overload, +) + +from .. import to_thread +from .._core._fileio import AsyncFile +from ..lowlevel import checkpoint_if_cancelled + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer + + +class TemporaryFile(Generic[AnyStr]): + """ + An asynchronous temporary file that is automatically created and cleaned up. + + This class provides an asynchronous context manager interface to a temporary file. + The file is created using Python's standard `tempfile.TemporaryFile` function in a + background thread, and is wrapped as an asynchronous file using `AsyncFile`. + + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file. Only applicable in + text mode. + :param newline: Controls how universal newlines mode works (only applicable in text + mode). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param errors: The error handling scheme used for encoding/decoding errors. + """ + + _async_file: AsyncFile[AnyStr] + + @overload + def __init__( + self: TemporaryFile[bytes], + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + @overload + def __init__( + self: TemporaryFile[str], + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + + def __init__( + self, + mode: OpenTextMode | OpenBinaryMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: + self.mode = mode + self.buffering = buffering + self.encoding = encoding + self.newline = newline + self.suffix: str | None = suffix + self.prefix: str | None = prefix + self.dir: str | None = dir + self.errors = errors + + async def __aenter__(self) -> AsyncFile[AnyStr]: + fp = await to_thread.run_sync( + lambda: tempfile.TemporaryFile( + self.mode, + self.buffering, + self.encoding, + self.newline, + self.suffix, + self.prefix, + self.dir, + errors=self.errors, + ) + ) + self._async_file = AsyncFile(fp) + return self._async_file + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self._async_file.aclose() + + +class NamedTemporaryFile(Generic[AnyStr]): + """ + An asynchronous named temporary file that is automatically created and cleaned up. + + This class provides an asynchronous context manager for a temporary file with a + visible name in the file system. It uses Python's standard + :func:`~tempfile.NamedTemporaryFile` function and wraps the file object with + :class:`AsyncFile` for asynchronous operations. + + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file. Only applicable in + text mode. + :param newline: Controls how universal newlines mode works (only applicable in text + mode). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param delete: Whether to delete the file when it is closed. + :param errors: The error handling scheme used for encoding/decoding errors. + :param delete_on_close: (Python 3.12+) Whether to delete the file on close. + """ + + _async_file: AsyncFile[AnyStr] + + @overload + def __init__( + self: NamedTemporaryFile[bytes], + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + delete: bool = ..., + *, + errors: str | None = ..., + delete_on_close: bool = ..., + ): ... + @overload + def __init__( + self: NamedTemporaryFile[str], + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + delete: bool = ..., + *, + errors: str | None = ..., + delete_on_close: bool = ..., + ): ... + + def __init__( + self, + mode: OpenBinaryMode | OpenTextMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> None: + self._params: dict[str, Any] = { + "mode": mode, + "buffering": buffering, + "encoding": encoding, + "newline": newline, + "suffix": suffix, + "prefix": prefix, + "dir": dir, + "delete": delete, + "errors": errors, + } + if sys.version_info >= (3, 12): + self._params["delete_on_close"] = delete_on_close + + async def __aenter__(self) -> AsyncFile[AnyStr]: + fp = await to_thread.run_sync( + lambda: tempfile.NamedTemporaryFile(**self._params) + ) + self._async_file = AsyncFile(fp) + return self._async_file + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self._async_file.aclose() + + +class SpooledTemporaryFile(AsyncFile[AnyStr]): + """ + An asynchronous spooled temporary file that starts in memory and is spooled to disk. + + This class provides an asynchronous interface to a spooled temporary file, much like + Python's standard :class:`~tempfile.SpooledTemporaryFile`. It supports asynchronous + write operations and provides a method to force a rollover to disk. + + :param max_size: Maximum size in bytes before the file is rolled over to disk. + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file (text mode only). + :param newline: Controls how universal newlines mode works (text mode only). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param errors: The error handling scheme used for encoding/decoding errors. + """ + + _rolled: bool = False + + @overload + def __init__( + self: SpooledTemporaryFile[bytes], + max_size: int = ..., + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = ..., + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + + def __init__( + self, + max_size: int = 0, + mode: OpenBinaryMode | OpenTextMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: + self._tempfile_params: dict[str, Any] = { + "mode": mode, + "buffering": buffering, + "encoding": encoding, + "newline": newline, + "suffix": suffix, + "prefix": prefix, + "dir": dir, + "errors": errors, + } + self._max_size = max_size + if "b" in mode: + super().__init__(BytesIO()) # type: ignore[arg-type] + else: + super().__init__( + TextIOWrapper( # type: ignore[arg-type] + BytesIO(), + encoding=encoding, + errors=errors, + newline=newline, + write_through=True, + ) + ) + + async def aclose(self) -> None: + if not self._rolled: + self._fp.close() + return + + await super().aclose() + + async def _check(self) -> None: + if self._rolled or self._fp.tell() <= self._max_size: + return + + await self.rollover() + + async def rollover(self) -> None: + if self._rolled: + return + + self._rolled = True + buffer = self._fp + buffer.seek(0) + self._fp = await to_thread.run_sync( + lambda: tempfile.TemporaryFile(**self._tempfile_params) + ) + await self.write(buffer.read()) + buffer.close() + + @property + def closed(self) -> bool: + return self._fp.closed + + async def read(self, size: int = -1) -> AnyStr: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.read(size) + + return await super().read(size) # type: ignore[return-value] + + async def read1(self: SpooledTemporaryFile[bytes], size: int = -1) -> bytes: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.read1(size) + + return await super().read1(size) + + async def readline(self) -> AnyStr: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.readline() + + return await super().readline() # type: ignore[return-value] + + async def readlines(self) -> list[AnyStr]: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.readlines() + + return await super().readlines() # type: ignore[return-value] + + async def readinto(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + self._fp.readinto(b) + + return await super().readinto(b) + + async def readinto1(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + self._fp.readinto(b) + + return await super().readinto1(b) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.seek(offset, whence) + + return await super().seek(offset, whence) + + async def tell(self) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.tell() + + return await super().tell() + + async def truncate(self, size: int | None = None) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.truncate(size) + + return await super().truncate(size) + + @overload + async def write(self: SpooledTemporaryFile[bytes], b: ReadableBuffer) -> int: ... + @overload + async def write(self: SpooledTemporaryFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + """ + Asynchronously write data to the spooled temporary file. + + If the file has not yet been rolled over, the data is written synchronously, + and a rollover is triggered if the size exceeds the maximum size. + + :param s: The data to write. + :return: The number of bytes written. + :raises RuntimeError: If the underlying file is not initialized. + + """ + if not self._rolled: + await checkpoint_if_cancelled() + result = self._fp.write(b) + await self._check() + return result + + return await super().write(b) # type: ignore[misc] + + @overload + async def writelines( + self: SpooledTemporaryFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + @overload + async def writelines( + self: SpooledTemporaryFile[str], lines: Iterable[str] + ) -> None: ... + + async def writelines(self, lines: Iterable[str] | Iterable[ReadableBuffer]) -> None: + """ + Asynchronously write a list of lines to the spooled temporary file. + + If the file has not yet been rolled over, the lines are written synchronously, + and a rollover is triggered if the size exceeds the maximum size. + + :param lines: An iterable of lines to write. + :raises RuntimeError: If the underlying file is not initialized. + + """ + if not self._rolled: + await checkpoint_if_cancelled() + result = self._fp.writelines(lines) + await self._check() + return result + + return await super().writelines(lines) # type: ignore[misc] + + +class TemporaryDirectory(Generic[AnyStr]): + """ + An asynchronous temporary directory that is created and cleaned up automatically. + + This class provides an asynchronous context manager for creating a temporary + directory. It wraps Python's standard :class:`~tempfile.TemporaryDirectory` to + perform directory creation and cleanup operations in a background thread. + + :param suffix: Suffix to be added to the temporary directory name. + :param prefix: Prefix to be added to the temporary directory name. + :param dir: The parent directory where the temporary directory is created. + :param ignore_cleanup_errors: Whether to ignore errors during cleanup + (Python 3.10+). + :param delete: Whether to delete the directory upon closing (Python 3.12+). + """ + + def __init__( + self, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, + *, + ignore_cleanup_errors: bool = False, + delete: bool = True, + ) -> None: + self.suffix: AnyStr | None = suffix + self.prefix: AnyStr | None = prefix + self.dir: AnyStr | None = dir + self.ignore_cleanup_errors = ignore_cleanup_errors + self.delete = delete + + self._tempdir: tempfile.TemporaryDirectory | None = None + + async def __aenter__(self) -> str: + params: dict[str, Any] = { + "suffix": self.suffix, + "prefix": self.prefix, + "dir": self.dir, + } + if sys.version_info >= (3, 10): + params["ignore_cleanup_errors"] = self.ignore_cleanup_errors + + if sys.version_info >= (3, 12): + params["delete"] = self.delete + + self._tempdir = await to_thread.run_sync( + lambda: tempfile.TemporaryDirectory(**params) + ) + return await to_thread.run_sync(self._tempdir.__enter__) + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + if self._tempdir is not None: + await to_thread.run_sync( + self._tempdir.__exit__, exc_type, exc_value, traceback + ) + + async def cleanup(self) -> None: + if self._tempdir is not None: + await to_thread.run_sync(self._tempdir.cleanup) + + +@overload +async def mkstemp( + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + text: bool = False, +) -> tuple[int, str]: ... + + +@overload +async def mkstemp( + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: bytes | None = None, + text: bool = False, +) -> tuple[int, bytes]: ... + + +async def mkstemp( + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, + text: bool = False, +) -> tuple[int, str | bytes]: + """ + Asynchronously create a temporary file and return an OS-level handle and the file + name. + + This function wraps `tempfile.mkstemp` and executes it in a background thread. + + :param suffix: Suffix to be added to the file name. + :param prefix: Prefix to be added to the file name. + :param dir: Directory in which the temporary file is created. + :param text: Whether the file is opened in text mode. + :return: A tuple containing the file descriptor and the file name. + + """ + return await to_thread.run_sync(tempfile.mkstemp, suffix, prefix, dir, text) + + +@overload +async def mkdtemp( + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, +) -> str: ... + + +@overload +async def mkdtemp( + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: bytes | None = None, +) -> bytes: ... + + +async def mkdtemp( + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, +) -> str | bytes: + """ + Asynchronously create a temporary directory and return its path. + + This function wraps `tempfile.mkdtemp` and executes it in a background thread. + + :param suffix: Suffix to be added to the directory name. + :param prefix: Prefix to be added to the directory name. + :param dir: Parent directory where the temporary directory is created. + :return: The path of the created temporary directory. + + """ + return await to_thread.run_sync(tempfile.mkdtemp, suffix, prefix, dir) + + +async def gettempdir() -> str: + """ + Asynchronously return the name of the directory used for temporary files. + + This function wraps `tempfile.gettempdir` and executes it in a background thread. + + :return: The path of the temporary directory as a string. + + """ + return await to_thread.run_sync(tempfile.gettempdir) + + +async def gettempdirb() -> bytes: + """ + Asynchronously return the name of the directory used for temporary files in bytes. + + This function wraps `tempfile.gettempdirb` and executes it in a background thread. + + :return: The path of the temporary directory as bytes. + + """ + return await to_thread.run_sync(tempfile.gettempdirb) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_testing.py b/venv/lib/python3.12/site-packages/anyio/_core/_testing.py new file mode 100644 index 0000000..9e28b22 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_testing.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Generator +from typing import Any, cast + +from ._eventloop import get_async_backend + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: int | None, + name: str | None, + coro: Generator[Any, Any, Any] | Awaitable[Any], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: int | None = parent_id + self.name: str | None = name + self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def has_pending_cancellation(self) -> bool: + """ + Return ``True`` if the task has a cancellation pending, ``False`` otherwise. + + """ + return False + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_async_backend().get_current_task() + + +def get_running_tasks() -> list[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + return cast("list[TaskInfo]", get_async_backend().get_running_tasks()) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_async_backend().wait_all_tasks_blocked() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py b/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 0000000..f358a44 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, final, overload + +from ._exceptions import TypedAttributeLookupError + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding + values. + + If the provider wraps another provider, the attributes from that wrapper should + also be included in the returned mapping (but the wrapper may override the + callables from the wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to + look for + :param default: the value that should be returned if no value is found for the + attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default + value was given + + """ + try: + getter = self.extra_attributes[attribute] + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default + + return getter() diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__init__.py b/venv/lib/python3.12/site-packages/anyio/abc/__init__.py new file mode 100644 index 0000000..d560ce3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/__init__.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from ._eventloop import AsyncBackend as AsyncBackend +from ._resources import AsyncResource as AsyncResource +from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket +from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket +from ._sockets import IPAddressType as IPAddressType +from ._sockets import IPSockAddrType as IPSockAddrType +from ._sockets import SocketAttribute as SocketAttribute +from ._sockets import SocketListener as SocketListener +from ._sockets import SocketStream as SocketStream +from ._sockets import UDPPacketType as UDPPacketType +from ._sockets import UDPSocket as UDPSocket +from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType +from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket +from ._sockets import UNIXSocketStream as UNIXSocketStream +from ._streams import AnyByteReceiveStream as AnyByteReceiveStream +from ._streams import AnyByteSendStream as AnyByteSendStream +from ._streams import AnyByteStream as AnyByteStream +from ._streams import AnyByteStreamConnectable as AnyByteStreamConnectable +from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream +from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream +from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream +from ._streams import ByteReceiveStream as ByteReceiveStream +from ._streams import ByteSendStream as ByteSendStream +from ._streams import ByteStream as ByteStream +from ._streams import ByteStreamConnectable as ByteStreamConnectable +from ._streams import Listener as Listener +from ._streams import ObjectReceiveStream as ObjectReceiveStream +from ._streams import ObjectSendStream as ObjectSendStream +from ._streams import ObjectStream as ObjectStream +from ._streams import ObjectStreamConnectable as ObjectStreamConnectable +from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream +from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream +from ._streams import UnreliableObjectStream as UnreliableObjectStream +from ._subprocesses import Process as Process +from ._tasks import TaskGroup as TaskGroup +from ._tasks import TaskStatus as TaskStatus +from ._testing import TestRunner as TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, +) +from .._core._tasks import CancelScope as CancelScope +from ..from_thread import BlockingPortal as BlockingPortal + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio.abc."): + __value.__module__ = __name__ + +del __value diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..49515c2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc new file mode 100644 index 0000000..582d45b Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc new file mode 100644 index 0000000..79af977 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc new file mode 100644 index 0000000..455cdba Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc new file mode 100644 index 0000000..03a1503 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc new file mode 100644 index 0000000..d71c863 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc new file mode 100644 index 0000000..8c65b52 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc new file mode 100644 index 0000000..8c2bf67 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py b/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py new file mode 100644 index 0000000..a99a7ee --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import math +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncIterator, Awaitable, Callable, Sequence +from contextlib import AbstractContextManager +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind, socket +from typing import ( + IO, + TYPE_CHECKING, + Any, + TypeVar, + Union, + overload, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + + from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore + from .._core._tasks import CancelScope + from .._core._testing import TaskInfo + from ..from_thread import BlockingPortal + from ._sockets import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, + ) + from ._subprocesses import Process + from ._tasks import TaskGroup + from ._testing import TestRunner + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] + + +class AsyncBackend(metaclass=ABCMeta): + @classmethod + @abstractmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param kwargs: positional arguments to ``func`` + :param options: keyword arguments to call the backend ``run()`` implementation + with + :return: the return value of the coroutine function + """ + + @classmethod + @abstractmethod + def current_token(cls) -> object: + """ + Return an object that allows other threads to run code inside the event loop. + + :return: a token object, specific to the event loop running in the current + thread + """ + + @classmethod + @abstractmethod + def current_time(cls) -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + """ + + @classmethod + @abstractmethod + def cancelled_exception_class(cls) -> type[BaseException]: + """Return the exception class that is raised in a task if it's cancelled.""" + + @classmethod + @abstractmethod + async def checkpoint(cls) -> None: + """ + Check if the task has been cancelled, and allow rescheduling of other tasks. + + This is effectively the same as running :meth:`checkpoint_if_cancelled` and then + :meth:`cancel_shielded_checkpoint`. + """ + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + """ + Check if the current task group has been cancelled. + + This will check if the task has been cancelled, but will not allow other tasks + to be scheduled if not. + + """ + if cls.current_effective_deadline() == -math.inf: + await cls.checkpoint() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + """ + Allow the rescheduling of other tasks. + + This will give other tasks the opportunity to run, but without checking if the + current task group has been cancelled, unlike with :meth:`checkpoint`. + + """ + with cls.create_cancel_scope(shield=True): + await cls.sleep(0) + + @classmethod + @abstractmethod + async def sleep(cls, delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + """ + + @classmethod + @abstractmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + pass + + @classmethod + @abstractmethod + def current_effective_deadline(cls) -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the + current task. + + :return: + - a clock value from the event loop's internal clock + - ``inf`` if there is no deadline in effect + - ``-inf`` if the current scope has been cancelled + :rtype: float + """ + + @classmethod + @abstractmethod + def create_task_group(cls) -> TaskGroup: + pass + + @classmethod + @abstractmethod + def create_event(cls) -> Event: + pass + + @classmethod + @abstractmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + pass + + @classmethod + @abstractmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + pass + + @classmethod + @abstractmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: CapacityLimiter | None = None, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def check_cancelled(cls) -> None: + pass + + @classmethod + @abstractmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def create_blocking_portal(cls) -> BlockingPortal: + pass + + @classmethod + @abstractmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + pass + + @classmethod + @abstractmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: + pass + + @classmethod + @abstractmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + def create_tcp_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + def create_unix_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + pass + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: None + ) -> UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes + ) -> ConnectedUNIXDatagramSocket: ... + + @classmethod + @abstractmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes | None + ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + pass + + @classmethod + @abstractmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + pass + + @classmethod + @abstractmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + async def wrap_listener_socket(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def wrap_stream_socket(cls, sock: socket) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def wrap_unix_stream_socket(cls, sock: socket) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + async def wrap_udp_socket(cls, sock: socket) -> UDPSocket: + pass + + @classmethod + @abstractmethod + async def wrap_connected_udp_socket(cls, sock: socket) -> ConnectedUDPSocket: + pass + + @classmethod + @abstractmethod + async def wrap_unix_datagram_socket(cls, sock: socket) -> UNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket + ) -> ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + pass + + @classmethod + @abstractmethod + def get_current_task(cls) -> TaskInfo: + pass + + @classmethod + @abstractmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + pass + + @classmethod + @abstractmethod + async def wait_all_tasks_blocked(cls) -> None: + pass + + @classmethod + @abstractmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + pass diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_resources.py b/venv/lib/python3.12/site-packages/anyio/abc/_resources.py new file mode 100644 index 0000000..10df115 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_resources.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, + and calls :meth:`aclose` on exit. + """ + + __slots__ = () + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py b/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py new file mode 100644 index 0000000..3ff60d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,405 @@ +from __future__ import annotations + +import errno +import socket +import sys +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from contextlib import AsyncExitStack +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from typing import Any, TypeVar, Union + +from .._core._eventloop import get_async_backend +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, UnreliableObjectStream +from ._tasks import TaskGroup + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +IPAddressType: TypeAlias = Union[str, IPv4Address, IPv6Address] +IPSockAddrType: TypeAlias = tuple[str, int] +SockAddrType: TypeAlias = Union[IPSockAddrType, str] +UDPPacketType: TypeAlias = tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType: TypeAlias = tuple[bytes, str] +T_Retval = TypeVar("T_Retval") + + +def _validate_socket( + sock_or_fd: socket.socket | int, + sock_type: socket.SocketKind, + addr_family: socket.AddressFamily = socket.AF_UNSPEC, + *, + require_connected: bool = False, + require_bound: bool = False, +) -> socket.socket: + if isinstance(sock_or_fd, int): + try: + sock = socket.socket(fileno=sock_or_fd) + except OSError as exc: + if exc.errno == errno.ENOTSOCK: + raise ValueError( + "the file descriptor does not refer to a socket" + ) from exc + elif require_connected: + raise ValueError("the socket must be connected") from exc + elif require_bound: + raise ValueError("the socket must be bound to a local address") from exc + else: + raise + elif isinstance(sock_or_fd, socket.socket): + sock = sock_or_fd + else: + raise TypeError( + f"expected an int or socket, got {type(sock_or_fd).__qualname__} instead" + ) + + try: + if require_connected: + try: + sock.getpeername() + except OSError as exc: + raise ValueError("the socket must be connected") from exc + + if require_bound: + try: + if sock.family in (socket.AF_INET, socket.AF_INET6): + bound_addr = sock.getsockname()[1] + else: + bound_addr = sock.getsockname() + except OSError: + bound_addr = None + + if not bound_addr: + raise ValueError("the socket must be bound to a local address") + + if addr_family != socket.AF_UNSPEC and sock.family != addr_family: + raise ValueError( + f"address family mismatch: expected {addr_family.name}, got " + f"{sock.family.name}" + ) + + if sock.type != sock_type: + raise ValueError( + f"socket type mismatch: expected {sock_type.name}, got {sock.type.name}" + ) + except BaseException: + # Avoid ResourceWarning from the locally constructed socket object + if isinstance(sock_or_fd, int): + sock.detach() + + raise + + sock.setblocking(False) + return sock + + +class SocketAttribute(TypedAttributeSet): + """ + .. attribute:: family + :type: socket.AddressFamily + + the address family of the underlying socket + + .. attribute:: local_address + :type: tuple[str, int] | str + + the local address the underlying socket is connected to + + .. attribute:: local_port + :type: int + + for IP based sockets, the local port the underlying socket is bound to + + .. attribute:: raw_socket + :type: socket.socket + + the underlying stdlib socket object + + .. attribute:: remote_address + :type: tuple[str, int] | str + + the remote address the underlying socket is connected to + + .. attribute:: remote_port + :type: int + + for IP based sockets, the remote port the underlying socket is connected to + """ + + family: AddressFamily = typed_attribute() + local_address: SockAddrType = typed_attribute() + local_port: int = typed_attribute() + raw_socket: socket.socket = typed_attribute() + remote_address: SockAddrType = typed_attribute() + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = ( + lambda: self._raw_socket.getsockname()[1] + ) + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> SocketStream: + """ + Wrap an existing socket object or file descriptor as a socket stream. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a socket stream + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_STREAM, require_connected=True) + return await get_async_backend().wrap_stream_socket(sock) + + +class UNIXSocketStream(SocketStream): + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> UNIXSocketStream: + """ + Wrap an existing socket object or file descriptor as a UNIX socket stream. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a UNIX socket stream + + """ + sock = _validate_socket( + sock_or_fd, socket.SOCK_STREAM, socket.AF_UNIX, require_connected=True + ) + return await get_async_backend().wrap_unix_stream_socket(sock) + + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file + or socket objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> SocketListener: + """ + Wrap an existing socket object or file descriptor as a socket listener. + + The newly created listener takes ownership of the socket being passed in. + + :param sock_or_fd: a socket object or file descriptor + :return: a socket listener + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_STREAM, require_bound=True) + return await get_async_backend().wrap_listener_socket(sock) + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, + handler: Callable[[SocketStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + from .. import create_task_group + + async with AsyncExitStack() as stack: + if task_group is None: + task_group = await stack.enter_async_context(create_task_group()) + + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> UDPSocket: + """ + Wrap an existing socket object or file descriptor as a UDP socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must be bound to a local address. + + :param sock_or_fd: a socket object or file descriptor + :return: a UDP socket + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_DGRAM, require_bound=True) + return await get_async_backend().wrap_udp_socket(sock) + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """ + Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). + + """ + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> ConnectedUDPSocket: + """ + Wrap an existing socket object or file descriptor as a connected UDP socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a connected UDP socket + + """ + sock = _validate_socket( + sock_or_fd, + socket.SOCK_DGRAM, + require_connected=True, + ) + return await get_async_backend().wrap_connected_udp_socket(sock) + + +class UNIXDatagramSocket( + UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider +): + """ + Represents an unconnected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> UNIXDatagramSocket: + """ + Wrap an existing socket object or file descriptor as a UNIX datagram + socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + + :param sock_or_fd: a socket object or file descriptor + :return: a UNIX datagram socket + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_DGRAM, socket.AF_UNIX) + return await get_async_backend().wrap_unix_datagram_socket(sock) + + async def sendto(self, data: bytes, path: str) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" + return await self.send((data, path)) + + +class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents a connected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> ConnectedUNIXDatagramSocket: + """ + Wrap an existing socket object or file descriptor as a connected UNIX datagram + socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a connected UNIX datagram socket + + """ + sock = _validate_socket( + sock_or_fd, socket.SOCK_DGRAM, socket.AF_UNIX, require_connected=True + ) + return await get_async_backend().wrap_connected_unix_datagram_socket(sock) diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_streams.py b/venv/lib/python3.12/site-packages/anyio/abc/_streams.py new file mode 100644 index 0000000..369df3f --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_streams.py @@ -0,0 +1,239 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Callable +from typing import Any, Generic, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class UnreliableObjectReceiveStream( + Generic[T_co], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in + which they were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the + given type parameter. + """ + + def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: + return self + + async def __anext__(self) -> T_co: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration from None + + @abstractmethod + async def receive(self) -> T_co: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_contra], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the + recipient(s) in the same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_contra) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of + message delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): + """ + A receive message stream which guarantees that messages are received in the same + order in which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_contra]): + """ + A send message stream which guarantees that messages are delivered in the same order + in which they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message + delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more + than 65536 bytes. + """ + + def __aiter__(self) -> ByteReceiveStream: + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration from None + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementers of this interface should not return an empty + :class:`bytes` object, and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream: TypeAlias = Union[ + UnreliableObjectReceiveStream[bytes], ByteReceiveStream +] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream: TypeAlias = Union[ + UnreliableObjectSendStream[bytes], ByteSendStream +] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream: TypeAlias = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream: TypeAlias = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream: TypeAlias = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream: TypeAlias = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling + each accepted connection (if omitted, an ad-hoc task group will be created) + """ + + +class ObjectStreamConnectable(Generic[T_co], metaclass=ABCMeta): + @abstractmethod + async def connect(self) -> ObjectStream[T_co]: + """ + Connect to the remote endpoint. + + :return: an object stream connected to the remote end + :raises ConnectionFailed: if the connection fails + """ + + +class ByteStreamConnectable(metaclass=ABCMeta): + @abstractmethod + async def connect(self) -> ByteStream: + """ + Connect to the remote endpoint. + + :return: a bytestream connected to the remote end + :raises ConnectionFailed: if the connection fails + """ + + +#: Type alias for all connectables returning bytestreams or bytes-oriented object streams +AnyByteStreamConnectable: TypeAlias = Union[ + ObjectStreamConnectable[bytes], ByteStreamConnectable +] diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py b/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 0000000..ce0564c --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from abc import abstractmethod +from signal import Signals + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> int | None: + """ + The return code of the process. If the process has not yet terminated, this will + be ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> ByteSendStream | None: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> ByteReceiveStream | None: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> ByteReceiveStream | None: + """The stream for the standard error output of the process.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py b/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py new file mode 100644 index 0000000..516b3ec --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable +from types import TracebackType +from typing import TYPE_CHECKING, Any, Protocol, overload + +if sys.version_info >= (3, 13): + from typing import TypeVar +else: + from typing_extensions import TypeVar + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from .._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True, default=None) +PosArgsT = TypeVarTuple("PosArgsT") + + +class TaskStatus(Protocol[T_contra]): + @overload + def started(self: TaskStatus[None]) -> None: ... + + @overload + def started(self, value: T_contra) -> None: ... + + def started(self, value: T_contra | None = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + + .. note:: On asyncio, support for eager task factories is considered to be + **experimental**. In particular, they don't follow the usual semantics of new + tasks being scheduled on the next iteration of the event loop, and may thus + cause unexpected behavior in code that wasn't written with such semantics in + mind. + """ + + cancel_scope: CancelScope + + @abstractmethod + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Awaitable[Any]], + *args: object, + name: object = None, + ) -> Any: + """ + Start a new task and wait until it signals for readiness. + + The target callable must accept a keyword argument ``task_status`` (of type + :class:`TaskStatus`). Awaiting on this method will return whatever was passed to + ``task_status.started()`` (``None`` by default). + + .. note:: The :class:`TaskStatus` class is generic, and the type argument should + indicate the type of the value that will be passed to + ``task_status.started()``. + + :param func: a coroutine function that accepts the ``task_status`` keyword + argument + :param args: positional arguments to call the function with + :param name: an optional name for the task, for introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling + ``task_status.started()`` + + .. seealso:: :ref:`start_initialize` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> TaskGroup: + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_testing.py b/venv/lib/python3.12/site-packages/anyio/abc/_testing.py new file mode 100644 index 0000000..7c50ed7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_testing.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +from typing import Any, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the + same event loop. + """ + + def __enter__(self) -> TestRunner: + return self + + @abstractmethod + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool | None: ... + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[_T, Any]], + kwargs: dict[str, Any], + ) -> Iterable[_T]: + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/venv/lib/python3.12/site-packages/anyio/from_thread.py b/venv/lib/python3.12/site-packages/anyio/from_thread.py new file mode 100644 index 0000000..f8fb99e --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/from_thread.py @@ -0,0 +1,570 @@ +from __future__ import annotations + +__all__ = ( + "BlockingPortal", + "BlockingPortalProvider", + "check_cancelled", + "run", + "run_sync", + "start_blocking_portal", +) + +import sys +from collections.abc import Awaitable, Callable, Generator +from concurrent.futures import Future +from contextlib import ( + AbstractAsyncContextManager, + AbstractContextManager, + contextmanager, +) +from dataclasses import dataclass, field +from inspect import isawaitable +from threading import Lock, Thread, current_thread, get_ident +from types import TracebackType +from typing import ( + Any, + Generic, + TypeVar, + cast, + overload, +) + +from ._core._eventloop import ( + get_async_backend, + get_cancelled_exc_class, + threadlocals, +) +from ._core._eventloop import run as run_eventloop +from ._core._exceptions import NoEventLoopError +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc._tasks import TaskStatus +from .lowlevel import EventLoopToken + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +T_co = TypeVar("T_co", covariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +def _token_or_error(token: EventLoopToken | None) -> EventLoopToken: + if token is not None: + return token + + try: + return threadlocals.current_token + except AttributeError: + raise NoEventLoopError( + "Not running inside an AnyIO worker thread, and no event loop token was " + "provided" + ) from None + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + token: EventLoopToken | None = None, +) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :param token: an event loop token to use to get back to the event loop thread + (required if calling this function from outside an AnyIO worker thread) + :return: the return value of the coroutine function + :raises MissingTokenError: if no token was provided and called from outside an + AnyIO worker thread + :raises RunFinishedError: if the event loop tied to ``token`` is no longer running + + .. versionchanged:: 4.11.0 + Added the ``token`` parameter. + + """ + explicit_token = token is not None + token = _token_or_error(token) + return token.backend_class.run_async_from_thread( + func, args, token=token.native_token if explicit_token else None + ) + + +def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + token: EventLoopToken | None = None, +) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :param token: an event loop token to use to get back to the event loop thread + (required if calling this function from outside an AnyIO worker thread) + :return: the return value of the callable + :raises MissingTokenError: if no token was provided and called from outside an + AnyIO worker thread + :raises RunFinishedError: if the event loop tied to ``token`` is no longer running + + .. versionchanged:: 4.11.0 + Added the ``token`` parameter. + + """ + explicit_token = token is not None + token = _token_or_error(token) + return token.backend_class.run_sync_from_thread( + func, args, token=token.native_token if explicit_token else None + ) + + +class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): + _enter_future: Future[T_co] + _exit_future: Future[bool | None] + _exit_event: Event + _exit_exc_info: tuple[ + type[BaseException] | None, BaseException | None, TracebackType | None + ] = (None, None, None) + + def __init__( + self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal + ): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> bool | None: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + return self._enter_future.result() + + def __exit__( + self, + __exc_type: type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, + ) -> bool | None: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> BlockingPortal: + return get_async_backend().create_blocking_portal() + + def __init__(self) -> None: + self._event_loop_thread_id: int | None = get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> BlockingPortal: + await self._task_group.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError("This portal is not running") + if self._event_loop_thread_id == get_ident(): + raise RuntimeError( + "This method cannot be called from the event loop thread" + ) + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` + to let them finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel("the blocking portal is shutting down") + + async def _call_func( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + future: Future[T_Retval], + ) -> None: + def callback(f: Future[T_Retval]) -> None: + if f.cancelled(): + if self._event_loop_thread_id == get_ident(): + scope.cancel("the future was cancelled") + elif self._event_loop_thread_id is not None: + self.call(scope.cancel, "the future was cancelled") + + try: + retval_or_awaitable = func(*args, **kwargs) + if isawaitable(retval_or_awaitable): + with CancelScope() as scope: + future.add_done_callback(callback) + retval = await retval_or_awaitable + else: + retval = retval_or_awaitable + except self._cancelled_exc_class: + future.cancel() + future.set_running_or_notify_cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + """ + Spawn a new task using the given callable. + + Implementers must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, + or the exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + ) -> T_Retval: ... + + @overload + def call( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: ... + + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + ) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling + the returned future. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the + task completes successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + :rtype: concurrent.futures.Future[T_Retval] + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future[T_Retval] = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task( + self, + func: Callable[..., Awaitable[T_Retval]], + *args: object, + name: object = None, + ) -> tuple[Future[T_Retval], Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`.abc.TaskGroup.start`. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` + is the value passed to ``task_status.started()`` from within the target + function + :rtype: tuple[concurrent.futures.Future[T_Retval], Any] + + .. versionadded:: 3.0 + + """ + + def task_done(future: Future[T_Retval]) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError( + "Task exited without calling task_status.started()" + ) + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager( + self, cm: AbstractAsyncContextManager[T_co] + ) -> AbstractContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping + in the middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +@dataclass +class BlockingPortalProvider: + """ + A manager for a blocking portal. Used as a context manager. The first thread to + enter this context manager causes a blocking portal to be started with the specific + parameters, and the last thread to exit causes the portal to be shut down. Thus, + there will be exactly one blocking portal running in this context as long as at + least one thread has entered this context manager. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + + .. versionadded:: 4.4 + """ + + backend: str = "asyncio" + backend_options: dict[str, Any] | None = None + _lock: Lock = field(init=False, default_factory=Lock) + _leases: int = field(init=False, default=0) + _portal: BlockingPortal = field(init=False) + _portal_cm: AbstractContextManager[BlockingPortal] | None = field( + init=False, default=None + ) + + def __enter__(self) -> BlockingPortal: + with self._lock: + if self._portal_cm is None: + self._portal_cm = start_blocking_portal( + self.backend, self.backend_options + ) + self._portal = self._portal_cm.__enter__() + + self._leases += 1 + return self._portal + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + portal_cm: AbstractContextManager[BlockingPortal] | None = None + with self._lock: + assert self._portal_cm + assert self._leases > 0 + self._leases -= 1 + if not self._leases: + portal_cm = self._portal_cm + self._portal_cm = None + del self._portal + + if portal_cm: + portal_cm.__exit__(None, None, None) + + +@contextmanager +def start_blocking_portal( + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, + *, + name: str | None = None, +) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :param name: name of the thread + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if name is None: + current_thread().name = f"{backend}-portal-{id(portal_):x}" + + future.set_result(portal_) + await portal_.sleep_until_stopped() + + def run_blocking_portal() -> None: + if future.set_running_or_notify_cancel(): + try: + run_eventloop( + run_portal, backend=backend, backend_options=backend_options + ) + except BaseException as exc: + if not future.done(): + future.set_exception(exc) + + future: Future[BlockingPortal] = Future() + thread = Thread(target=run_blocking_portal, daemon=True, name=name) + thread.start() + try: + cancel_remaining_tasks = False + portal = future.result() + try: + yield portal + except BaseException: + cancel_remaining_tasks = True + raise + finally: + try: + portal.call(portal.stop, cancel_remaining_tasks) + except RuntimeError: + pass + finally: + thread.join() + + +def check_cancelled() -> None: + """ + Check if the cancel scope of the host task's running the current worker thread has + been cancelled. + + If the host task's current cancel scope has indeed been cancelled, the + backend-specific cancellation exception will be raised. + + :raises RuntimeError: if the current thread was not spawned by + :func:`.to_thread.run_sync` + + """ + try: + token: EventLoopToken = threadlocals.current_token + except AttributeError: + raise NoEventLoopError( + "This function can only be called inside an AnyIO worker thread" + ) from None + + token.backend_class.check_cancelled() diff --git a/venv/lib/python3.12/site-packages/anyio/functools.py b/venv/lib/python3.12/site-packages/anyio/functools.py new file mode 100644 index 0000000..41cc723 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/functools.py @@ -0,0 +1,347 @@ +from __future__ import annotations + +__all__ = ( + "AsyncCacheInfo", + "AsyncCacheParameters", + "AsyncLRUCacheWrapper", + "cache", + "lru_cache", + "reduce", +) + +import functools +import sys +from collections import OrderedDict +from collections.abc import ( + AsyncIterable, + Awaitable, + Callable, + Coroutine, + Hashable, + Iterable, +) +from functools import update_wrapper +from inspect import iscoroutinefunction +from typing import ( + Any, + Generic, + NamedTuple, + TypedDict, + TypeVar, + cast, + final, + overload, +) +from weakref import WeakKeyDictionary + +from ._core._synchronization import Lock +from .lowlevel import RunVar, checkpoint + +if sys.version_info >= (3, 11): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +T = TypeVar("T") +S = TypeVar("S") +P = ParamSpec("P") +lru_cache_items: RunVar[ + WeakKeyDictionary[ + AsyncLRUCacheWrapper[Any, Any], + OrderedDict[Hashable, tuple[_InitialMissingType, Lock] | tuple[Any, None]], + ] +] = RunVar("lru_cache_items") + + +class _InitialMissingType: + pass + + +initial_missing: _InitialMissingType = _InitialMissingType() + + +class AsyncCacheInfo(NamedTuple): + hits: int + misses: int + maxsize: int | None + currsize: int + + +class AsyncCacheParameters(TypedDict): + maxsize: int | None + typed: bool + always_checkpoint: bool + + +@final +class AsyncLRUCacheWrapper(Generic[P, T]): + def __init__( + self, + func: Callable[..., Awaitable[T]], + maxsize: int | None, + typed: bool, + always_checkpoint: bool, + ): + self.__wrapped__ = func + self._hits: int = 0 + self._misses: int = 0 + self._maxsize = max(maxsize, 0) if maxsize is not None else None + self._currsize: int = 0 + self._typed = typed + self._always_checkpoint = always_checkpoint + update_wrapper(self, func) + + def cache_info(self) -> AsyncCacheInfo: + return AsyncCacheInfo(self._hits, self._misses, self._maxsize, self._currsize) + + def cache_parameters(self) -> AsyncCacheParameters: + return { + "maxsize": self._maxsize, + "typed": self._typed, + "always_checkpoint": self._always_checkpoint, + } + + def cache_clear(self) -> None: + if cache := lru_cache_items.get(None): + cache.pop(self, None) + self._hits = self._misses = self._currsize = 0 + + async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + # Easy case first: if maxsize == 0, no caching is done + if self._maxsize == 0: + value = await self.__wrapped__(*args, **kwargs) + self._misses += 1 + return value + + # The key is constructed as a flat tuple to avoid memory overhead + key: tuple[Any, ...] = args + if kwargs: + # initial_missing is used as a separator + key += (initial_missing,) + sum(kwargs.items(), ()) + + if self._typed: + key += tuple(type(arg) for arg in args) + if kwargs: + key += (initial_missing,) + tuple(type(val) for val in kwargs.values()) + + try: + cache = lru_cache_items.get() + except LookupError: + cache = WeakKeyDictionary() + lru_cache_items.set(cache) + + try: + cache_entry = cache[self] + except KeyError: + cache_entry = cache[self] = OrderedDict() + + cached_value: T | _InitialMissingType + try: + cached_value, lock = cache_entry[key] + except KeyError: + # We're the first task to call this function + cached_value, lock = ( + initial_missing, + Lock(fast_acquire=not self._always_checkpoint), + ) + cache_entry[key] = cached_value, lock + + if lock is None: + # The value was already cached + self._hits += 1 + cache_entry.move_to_end(key) + if self._always_checkpoint: + await checkpoint() + + return cast(T, cached_value) + + async with lock: + # Check if another task filled the cache while we acquired the lock + if (cached_value := cache_entry[key][0]) is initial_missing: + self._misses += 1 + if self._maxsize is not None and self._currsize >= self._maxsize: + cache_entry.popitem(last=False) + else: + self._currsize += 1 + + value = await self.__wrapped__(*args, **kwargs) + cache_entry[key] = value, None + else: + # Another task filled the cache while we were waiting for the lock + self._hits += 1 + cache_entry.move_to_end(key) + value = cast(T, cached_value) + + return value + + +class _LRUCacheWrapper(Generic[T]): + def __init__(self, maxsize: int | None, typed: bool, always_checkpoint: bool): + self._maxsize = maxsize + self._typed = typed + self._always_checkpoint = always_checkpoint + + @overload + def __call__( # type: ignore[overload-overlap] + self, func: Callable[P, Coroutine[Any, Any, T]], / + ) -> AsyncLRUCacheWrapper[P, T]: ... + + @overload + def __call__( + self, func: Callable[..., T], / + ) -> functools._lru_cache_wrapper[T]: ... + + def __call__( + self, f: Callable[P, Coroutine[Any, Any, T]] | Callable[..., T], / + ) -> AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T]: + if iscoroutinefunction(f): + return AsyncLRUCacheWrapper( + f, self._maxsize, self._typed, self._always_checkpoint + ) + + return functools.lru_cache(maxsize=self._maxsize, typed=self._typed)(f) # type: ignore[arg-type] + + +@overload +def cache( # type: ignore[overload-overlap] + func: Callable[P, Coroutine[Any, Any, T]], / +) -> AsyncLRUCacheWrapper[P, T]: ... + + +@overload +def cache(func: Callable[..., T], /) -> functools._lru_cache_wrapper[T]: ... + + +def cache( + func: Callable[..., T] | Callable[P, Coroutine[Any, Any, T]], / +) -> AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T]: + """ + A convenient shortcut for :func:`lru_cache` with ``maxsize=None``. + + This is the asynchronous equivalent to :func:`functools.cache`. + + """ + return lru_cache(maxsize=None)(func) + + +@overload +def lru_cache( + *, maxsize: int | None = ..., typed: bool = ..., always_checkpoint: bool = ... +) -> _LRUCacheWrapper[Any]: ... + + +@overload +def lru_cache( # type: ignore[overload-overlap] + func: Callable[P, Coroutine[Any, Any, T]], / +) -> AsyncLRUCacheWrapper[P, T]: ... + + +@overload +def lru_cache(func: Callable[..., T], /) -> functools._lru_cache_wrapper[T]: ... + + +def lru_cache( + func: Callable[P, Coroutine[Any, Any, T]] | Callable[..., T] | None = None, + /, + *, + maxsize: int | None = 128, + typed: bool = False, + always_checkpoint: bool = False, +) -> ( + AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T] | _LRUCacheWrapper[Any] +): + """ + An asynchronous version of :func:`functools.lru_cache`. + + If a synchronous function is passed, the standard library + :func:`functools.lru_cache` is applied instead. + + :param always_checkpoint: if ``True``, every call to the cached function will be + guaranteed to yield control to the event loop at least once + + .. note:: Caches and locks are managed on a per-event loop basis. + + """ + if func is None: + return _LRUCacheWrapper[Any](maxsize, typed, always_checkpoint) + + if not callable(func): + raise TypeError("the first argument must be callable") + + return _LRUCacheWrapper[T](maxsize, typed, always_checkpoint)(func) + + +@overload +async def reduce( + function: Callable[[T, S], Awaitable[T]], + iterable: Iterable[S] | AsyncIterable[S], + /, + initial: T, +) -> T: ... + + +@overload +async def reduce( + function: Callable[[T, T], Awaitable[T]], + iterable: Iterable[T] | AsyncIterable[T], + /, +) -> T: ... + + +async def reduce( # type: ignore[misc] + function: Callable[[T, T], Awaitable[T]] | Callable[[T, S], Awaitable[T]], + iterable: Iterable[T] | Iterable[S] | AsyncIterable[T] | AsyncIterable[S], + /, + initial: T | _InitialMissingType = initial_missing, +) -> T: + """ + Asynchronous version of :func:`functools.reduce`. + + :param function: a coroutine function that takes two arguments: the accumulated + value and the next element from the iterable + :param iterable: an iterable or async iterable + :param initial: the initial value (if missing, the first element of the iterable is + used as the initial value) + + """ + element: Any + function_called = False + if isinstance(iterable, AsyncIterable): + async_it = iterable.__aiter__() + if initial is initial_missing: + try: + value = cast(T, await async_it.__anext__()) + except StopAsyncIteration: + raise TypeError( + "reduce() of empty sequence with no initial value" + ) from None + else: + value = cast(T, initial) + + async for element in async_it: + value = await function(value, element) + function_called = True + elif isinstance(iterable, Iterable): + it = iter(iterable) + if initial is initial_missing: + try: + value = cast(T, next(it)) + except StopIteration: + raise TypeError( + "reduce() of empty sequence with no initial value" + ) from None + else: + value = cast(T, initial) + + for element in it: + value = await function(value, element) + function_called = True + else: + raise TypeError("reduce() argument 2 must be an iterable or async iterable") + + # Make sure there is at least one checkpoint, even if an empty iterable and an + # initial value were given + if not function_called: + await checkpoint() + + return value diff --git a/venv/lib/python3.12/site-packages/anyio/lowlevel.py b/venv/lib/python3.12/site-packages/anyio/lowlevel.py new file mode 100644 index 0000000..06d23c9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/lowlevel.py @@ -0,0 +1,195 @@ +from __future__ import annotations + +__all__ = ( + "EventLoopToken", + "RunvarToken", + "RunVar", + "checkpoint", + "checkpoint_if_cancelled", + "cancel_shielded_checkpoint", + "current_token", +) + +import enum +from dataclasses import dataclass +from types import TracebackType +from typing import Any, Generic, Literal, TypeVar, final, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_async_backend +from .abc import AsyncBackend + +T = TypeVar("T") +D = TypeVar("D") + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().cancel_shielded_checkpoint() + + +@final +@dataclass(frozen=True, repr=False) +class EventLoopToken: + """ + An opaque object that holds a reference to an event loop. + + .. versionadded:: 4.11.0 + """ + + backend_class: type[AsyncBackend] + native_token: object + + +def current_token() -> EventLoopToken: + """ + Return a token object that can be used to call code in the current event loop from + another thread. + + .. versionadded:: 4.11.0 + + """ + backend_class = get_async_backend() + raw_token = backend_class.current_token() + return EventLoopToken(backend_class, raw_token) + + +_run_vars: WeakKeyDictionary[object, dict[RunVar[Any], Any]] = WeakKeyDictionary() + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = "_var", "_value", "_redeemed" + + def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): + self._var = var + self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value + self._redeemed = False + + def __enter__(self) -> RunvarToken[T]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._var.reset(self) + + +class RunVar(Generic[T]): + """ + Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. + + Can be used as a context manager, Just like :class:`~contextvars.ContextVar`, that + will reset the variable to its previous value when the context block is exited. + """ + + __slots__ = "_name", "_default" + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + def __init__( + self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ): + self._name = name + self._default = default + + @property + def _current_vars(self) -> dict[RunVar[T], T]: + native_token = current_token().native_token + try: + return _run_vars[native_token] + except KeyError: + run_vars = _run_vars[native_token] = {} + return run_vars + + @overload + def get(self, default: D) -> T | D: ... + + @overload + def get(self) -> T: ... + + def get( + self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ) -> T | D: + try: + return self._current_vars[self] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError( + f'Run variable "{self._name}" has no value and no default set' + ) + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self, RunVar.NO_VALUE_SET)) + current_vars[self] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError("This token does not belong to this RunVar") + + if token._redeemed: + raise ValueError("This token has already been used") + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self] + except KeyError: + pass + else: + self._current_vars[self] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f"" diff --git a/venv/lib/python3.12/site-packages/anyio/py.typed b/venv/lib/python3.12/site-packages/anyio/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py b/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py new file mode 100644 index 0000000..4222816 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,302 @@ +from __future__ import annotations + +import socket +import sys +from collections.abc import Callable, Generator, Iterator +from contextlib import ExitStack, contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction, ismethod +from typing import Any, cast + +import pytest +from _pytest.fixtures import SubRequest +from _pytest.outcomes import Exit + +from . import get_available_backends +from ._core._eventloop import ( + current_async_library, + get_async_backend, + reset_current_async_library, + set_current_async_library, +) +from ._core._exceptions import iterate_exceptions +from .abc import TestRunner + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +_current_runner: TestRunner | None = None +_runner_stack: ExitStack | None = None +_runner_leases = 0 + + +def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(tuple[str, dict[str, Any]], backend) + + raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") + + +@contextmanager +def get_runner( + backend_name: str, backend_options: dict[str, Any] +) -> Iterator[TestRunner]: + global _current_runner, _runner_leases, _runner_stack + if _current_runner is None: + asynclib = get_async_backend(backend_name) + _runner_stack = ExitStack() + if current_async_library() is None: + # Since we're in control of the event loop, we can cache the name of the + # async library + token = set_current_async_library(backend_name) + _runner_stack.callback(reset_current_async_library, token) + + backend_options = backend_options or {} + _current_runner = _runner_stack.enter_context( + asynclib.create_test_runner(backend_options) + ) + + _runner_leases += 1 + try: + yield _current_runner + finally: + _runner_leases -= 1 + if not _runner_leases: + assert _runner_stack is not None + _runner_stack.close() + _runner_stack = _current_runner = None + + +def pytest_addoption(parser: pytest.Parser) -> None: + parser.addini( + "anyio_mode", + default="strict", + help='AnyIO plugin mode (either "strict" or "auto")', + ) + + +def pytest_configure(config: pytest.Config) -> None: + config.addinivalue_line( + "markers", + "anyio: mark the (coroutine function) test to be run asynchronously via anyio.", + ) + if ( + config.getini("anyio_mode") == "auto" + and config.pluginmanager.has_plugin("asyncio") + and config.getini("asyncio_mode") == "auto" + ): + config.issue_config_time_warning( + pytest.PytestConfigWarning( + "AnyIO auto mode has been enabled together with pytest-asyncio auto " + "mode. This may cause unexpected behavior." + ), + 1, + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup(fixturedef: Any, request: Any) -> Generator[Any]: + def wrapper(anyio_backend: Any, request: SubRequest, **kwargs: Any) -> Any: + # Rebind any fixture methods to the request instance + if ( + request.instance + and ismethod(func) + and type(func.__self__) is type(request.instance) + ): + local_func = func.__func__.__get__(request.instance) + else: + local_func = func + + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs["anyio_backend"] = anyio_backend + + if has_request_arg: + kwargs["request"] = request + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(local_func): + yield from runner.run_asyncgen_fixture(local_func, kwargs) + else: + yield runner.run_fixture(local_func, kwargs) + + # Only apply this to coroutine functions and async generator functions in requests + # that involve the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if "anyio_backend" in request.fixturenames: + fixturedef.func = wrapper + original_argname = fixturedef.argnames + + if not (has_backend_arg := "anyio_backend" in fixturedef.argnames): + fixturedef.argnames += ("anyio_backend",) + + if not (has_request_arg := "request" in fixturedef.argnames): + fixturedef.argnames += ("request",) + + try: + return (yield) + finally: + fixturedef.func = func + fixturedef.argnames = original_argname + + return (yield) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem( + collector: pytest.Module | pytest.Class, name: str, obj: object +) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj + if iscoroutinefunction(inner_func): + anyio_auto_mode = collector.config.getini("anyio_mode") == "auto" + marker = collector.get_closest_marker("anyio") + own_markers = getattr(obj, "pytestmark", ()) + if ( + anyio_auto_mode + or marker + or any(marker.name == "anyio" for marker in own_markers) + ): + pytest.mark.usefixtures("anyio_backend")(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.run_test(original_func, kwargs) + + backend = pyfuncitem.funcargs.get("anyio_backend") + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, "hypothesis"): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + try: + runner.run_test(pyfuncitem.obj, testargs) + except ExceptionGroup as excgrp: + for exc in iterate_exceptions(excgrp): + if isinstance(exc, (Exit, KeyboardInterrupt, SystemExit)): + raise exc from excgrp + + raise + + return True + + return None + + +@pytest.fixture(scope="module", params=get_available_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] + + +class FreePortFactory: + """ + Manages port generation based on specified socket kind, ensuring no duplicate + ports are generated. + + This class provides functionality for generating available free ports on the + system. It is initialized with a specific socket kind and can generate ports + for given address families while avoiding reuse of previously generated ports. + + Users should not instantiate this class directly, but use the + ``free_tcp_port_factory`` and ``free_udp_port_factory`` fixtures instead. For simple + uses cases, ``free_tcp_port`` and ``free_udp_port`` can be used instead. + """ + + def __init__(self, kind: socket.SocketKind) -> None: + self._kind = kind + self._generated = set[int]() + + @property + def kind(self) -> socket.SocketKind: + """ + The type of socket connection (e.g., :data:`~socket.SOCK_STREAM` or + :data:`~socket.SOCK_DGRAM`) used to bind for checking port availability + + """ + return self._kind + + def __call__(self, family: socket.AddressFamily | None = None) -> int: + """ + Return an unbound port for the given address family. + + :param family: if omitted, both IPv4 and IPv6 addresses will be tried + :return: a port number + + """ + if family is not None: + families = [family] + else: + families = [socket.AF_INET] + if socket.has_ipv6: + families.append(socket.AF_INET6) + + while True: + port = 0 + with ExitStack() as stack: + for family in families: + sock = stack.enter_context(socket.socket(family, self._kind)) + addr = "::1" if family == socket.AF_INET6 else "127.0.0.1" + try: + sock.bind((addr, port)) + except OSError: + break + + if not port: + port = sock.getsockname()[1] + else: + if port not in self._generated: + self._generated.add(port) + return port + + +@pytest.fixture(scope="session") +def free_tcp_port_factory() -> FreePortFactory: + return FreePortFactory(socket.SOCK_STREAM) + + +@pytest.fixture(scope="session") +def free_udp_port_factory() -> FreePortFactory: + return FreePortFactory(socket.SOCK_DGRAM) + + +@pytest.fixture +def free_tcp_port(free_tcp_port_factory: Callable[[], int]) -> int: + return free_tcp_port_factory() + + +@pytest.fixture +def free_udp_port(free_udp_port_factory: Callable[[], int]) -> int: + return free_udp_port_factory() diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__init__.py b/venv/lib/python3.12/site-packages/anyio/streams/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..99eec04 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc new file mode 100644 index 0000000..49f4afa Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc new file mode 100644 index 0000000..d692ea3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc new file mode 100644 index 0000000..57b86d1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc new file mode 100644 index 0000000..1d8ff11 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc new file mode 100644 index 0000000..bb671a5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc new file mode 100644 index 0000000..d9b28e8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/buffered.py b/venv/lib/python3.12/site-packages/anyio/streams/buffered.py new file mode 100644 index 0000000..57c7cd7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/buffered.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +__all__ = ( + "BufferedByteReceiveStream", + "BufferedByteStream", + "BufferedConnectable", +) + +import sys +from collections.abc import Callable, Iterable, Mapping +from dataclasses import dataclass, field +from typing import Any, SupportsIndex + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import ( + AnyByteReceiveStream, + AnyByteStream, + AnyByteStreamConnectable, + ByteReceiveStream, + ByteStream, + ByteStreamConnectable, +) + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated + receiving capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + def feed_data(self, data: Iterable[SupportsIndex], /) -> None: + """ + Append data directly into the buffer. + + Any data in the buffer will be consumed by receive operations before receiving + anything from the wrapped stream. + + :param data: the data to append to the buffer (can be bytes or anything else + that supports ``__index__()``) + + """ + self._buffer.extend(data) + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes + # we get from the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) + + +class BufferedByteStream(BufferedByteReceiveStream, ByteStream): + """ + A full-duplex variant of :class:`BufferedByteReceiveStream`. All writes are passed + through to the wrapped stream as-is. + """ + + def __init__(self, stream: AnyByteStream): + """ + :param stream: the stream to be wrapped + + """ + super().__init__(stream) + self._stream = stream + + @override + async def send_eof(self) -> None: + await self._stream.send_eof() + + @override + async def send(self, item: bytes) -> None: + await self._stream.send(item) + + +class BufferedConnectable(ByteStreamConnectable): + def __init__(self, connectable: AnyByteStreamConnectable): + """ + :param connectable: the connectable to wrap + + """ + self.connectable = connectable + + @override + async def connect(self) -> BufferedByteStream: + stream = await self.connectable.connect() + return BufferedByteStream(stream) diff --git a/venv/lib/python3.12/site-packages/anyio/streams/file.py b/venv/lib/python3.12/site-packages/anyio/streams/file.py new file mode 100644 index 0000000..82d2da8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/file.py @@ -0,0 +1,154 @@ +from __future__ import annotations + +__all__ = ( + "FileReadStream", + "FileStreamAttribute", + "FileWriteStream", +) + +from collections.abc import Callable, Mapping +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, cast + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: str | PathLike[str], append: bool = False + ) -> FileWriteStream: + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any + existing file at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/venv/lib/python3.12/site-packages/anyio/streams/memory.py b/venv/lib/python3.12/site-packages/anyio/streams/memory.py new file mode 100644 index 0000000..a3fa0c3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/memory.py @@ -0,0 +1,325 @@ +from __future__ import annotations + +__all__ = ( + "MemoryObjectReceiveStream", + "MemoryObjectSendStream", + "MemoryObjectStreamStatistics", +) + +import warnings +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Generic, NamedTuple, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from .._core._testing import TaskInfo, get_current_task +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + tasks_waiting_send: int + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class _MemoryObjectItemReceiver(Generic[T_Item]): + task_info: TaskInfo = field(init=False, default_factory=get_current_task) + item: T_Item = field(init=False) + + def __repr__(self) -> str: + # When item is not defined, we get following error with default __repr__: + # AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item' + item = getattr(self, "item", None) + return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})" + + +@dataclass(eq=False) +class _MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: OrderedDict[Event, _MemoryObjectItemReceiver[T_Item]] = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: OrderedDict[Event, T_Item] = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): + _state: _MemoryObjectStreamState[T_co] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_co: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_co: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + receiver = _MemoryObjectItemReceiver[T_co]() + self._state.waiting_receivers[receive_event] = receiver + + try: + await receive_event.wait() + finally: + self._state.waiting_receivers.pop(receive_event, None) + + try: + return receiver.item + except AttributeError: + raise EndOfStream from None + + def clone(self) -> MemoryObjectReceiveStream[T_co]: + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will + the receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectReceiveStream[T_co]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + stacklevel=1, + source=self, + ) + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): + _state: _MemoryObjectStreamState[T_contra] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_contra) -> None: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + while self._state.waiting_receivers: + receive_event, receiver = self._state.waiting_receivers.popitem(last=False) + if not receiver.task_info.has_pending_cancellation(): + receiver.item = item + receive_event.set() + return + + if len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + async def send(self, item: T_contra) -> None: + """ + Send an item to the stream. + + If the buffer is full, this method blocks until there is again room in the + buffer or the item can be sent directly to a receiver. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + + """ + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) + raise + + if send_event in self._state.waiting_senders: + del self._state.waiting_senders[send_event] + raise BrokenResourceError from None + + def clone(self) -> MemoryObjectSendStream[T_contra]: + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will + the sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectSendStream[T_contra]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + stacklevel=1, + source=self, + ) diff --git a/venv/lib/python3.12/site-packages/anyio/streams/stapled.py b/venv/lib/python3.12/site-packages/anyio/streams/stapled.py new file mode 100644 index 0000000..9248b68 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/stapled.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +__all__ = ( + "MultiListener", + "StapledByteStream", + "StapledObjectStream", +) + +from collections.abc import Callable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any, Generic, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners + moved into this one. + + Extra attributes are provided from each listener, with each successive listener + overriding any conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: list[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/venv/lib/python3.12/site-packages/anyio/streams/text.py b/venv/lib/python3.12/site-packages/anyio/streams/text.py new file mode 100644 index 0000000..296cd25 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/text.py @@ -0,0 +1,176 @@ +from __future__ import annotations + +__all__ = ( + "TextConnectable", + "TextReceiveStream", + "TextSendStream", + "TextStream", +) + +import codecs +import sys +from collections.abc import Callable, Mapping +from dataclasses import InitVar, dataclass, field +from typing import Any + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + AnyByteStreamConnectable, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + ObjectStreamConnectable, +) + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any + completely received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults + to ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings + to bytes on send. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from + bytes (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } + + +class TextConnectable(ObjectStreamConnectable[str]): + def __init__(self, connectable: AnyByteStreamConnectable): + """ + :param connectable: the bytestream endpoint to wrap + + """ + self.connectable = connectable + + @override + async def connect(self) -> TextStream: + stream = await self.connectable.connect() + return TextStream(stream) diff --git a/venv/lib/python3.12/site-packages/anyio/streams/tls.py b/venv/lib/python3.12/site-packages/anyio/streams/tls.py new file mode 100644 index 0000000..b507488 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/tls.py @@ -0,0 +1,424 @@ +from __future__ import annotations + +__all__ = ( + "TLSAttribute", + "TLSConnectable", + "TLSListener", + "TLSStream", +) + +import logging +import re +import ssl +import sys +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from functools import wraps +from ssl import SSLContext +from typing import Any, TypeVar + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, + to_thread, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import ( + AnyByteStream, + AnyByteStreamConnectable, + ByteStream, + ByteStreamConnectable, + Listener, + TaskGroup, +) + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +_PCTRTT: TypeAlias = tuple[tuple[str, str], ...] +_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: str | None = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` + # for more information) + peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: bytes | None = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared by the client during the TLS handshake (``None`` if this is the + #: client side) + shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the + #: stream is being closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: bool | None = None, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> TLSStream: + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, + ``False`` if this is the client side (if omitted, will be set to ``False`` + if ``hostname`` has been provided, ``False`` otherwise). Used only to create + a default context when an explicit context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure + default will be created) + :param standard_compatible: if ``False``, skip the closing handshake when + closing the connection, and don't raise an exception if the peer does the + same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + + # External SSLContext implementations may do blocking I/O in wrap_bio(), + # but the standard library implementation won't + if type(ssl_context) is ssl.SSLContext: + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + else: + ssl_object = await to_thread.run_sync( + ssl_context.wrap_bio, + bio_in, + bio_out, + server_side, + hostname, + None, + ) + + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if isinstance(exc, ssl.SSLEOFError) or ( + exc.strerror and "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: ( + self._ssl_object.get_channel_binding + ), + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() + if self._ssl_object.server_side + else None, + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session + on every accepted connection. + + If the TLS handshake times out or raises an exception, + :meth:`handle_handshake_error` is called to do whatever post-mortem processing is + deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + """ + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the + ``anyio.streams.tls`` logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using + # any asyncio implementation, so we explicitly pass the exception to log + # (https://github.com/python/cpython/issues/108668). Trio does not have this + # issue because it works around the CPython bug. + logging.getLogger(__name__).exception( + "Error during TLS handshake", exc_info=exc + ) + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } + + +class TLSConnectable(ByteStreamConnectable): + """ + Wraps another connectable and does TLS negotiation after a successful connection. + + :param connectable: the connectable to wrap + :param hostname: host name of the server (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure default + will be created) + :param standard_compatible: if ``False``, skip the closing handshake when closing + the connection, and don't raise an exception if the server does the same + """ + + def __init__( + self, + connectable: AnyByteStreamConnectable, + *, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> None: + self.connectable = connectable + self.ssl_context: SSLContext = ssl_context or ssl.create_default_context( + ssl.Purpose.SERVER_AUTH + ) + if not isinstance(self.ssl_context, ssl.SSLContext): + raise TypeError( + "ssl_context must be an instance of ssl.SSLContext, not " + f"{type(self.ssl_context).__name__}" + ) + self.hostname = hostname + self.standard_compatible = standard_compatible + + @override + async def connect(self) -> TLSStream: + stream = await self.connectable.connect() + try: + return await TLSStream.wrap( + stream, + hostname=self.hostname, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException: + await aclose_forcefully(stream) + raise diff --git a/venv/lib/python3.12/site-packages/anyio/to_interpreter.py b/venv/lib/python3.12/site-packages/anyio/to_interpreter.py new file mode 100644 index 0000000..694dbe7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/to_interpreter.py @@ -0,0 +1,246 @@ +from __future__ import annotations + +__all__ = ( + "run_sync", + "current_default_interpreter_limiter", +) + +import atexit +import os +import sys +from collections import deque +from collections.abc import Callable +from typing import Any, Final, TypeVar + +from . import current_time, to_thread +from ._core._exceptions import BrokenWorkerInterpreter +from ._core._synchronization import CapacityLimiter +from .lowlevel import RunVar + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 14): + from concurrent.interpreters import ExecutionFailed, create + + def _interp_call( + func: Callable[..., Any], args: tuple[Any, ...] + ) -> tuple[Any, bool]: + try: + retval = func(*args) + except BaseException as exc: + return exc, True + else: + return retval, False + + class _Worker: + last_used: float = 0 + + def __init__(self) -> None: + self._interpreter = create() + + def destroy(self) -> None: + self._interpreter.close() + + def call( + self, + func: Callable[..., T_Retval], + args: tuple[Any, ...], + ) -> T_Retval: + try: + res, is_exception = self._interpreter.call(_interp_call, func, args) + except ExecutionFailed as exc: + raise BrokenWorkerInterpreter(exc.excinfo) from exc + + if is_exception: + raise res + + return res +elif sys.version_info >= (3, 13): + import _interpqueues + import _interpreters + + UNBOUND: Final = 2 # I have no clue how this works, but it was used in the stdlib + FMT_UNPICKLED: Final = 0 + FMT_PICKLED: Final = 1 + QUEUE_PICKLE_ARGS: Final = (FMT_PICKLED, UNBOUND) + QUEUE_UNPICKLE_ARGS: Final = (FMT_UNPICKLED, UNBOUND) + + _run_func = compile( + """ +import _interpqueues +from _interpreters import NotShareableError +from pickle import loads, dumps, HIGHEST_PROTOCOL + +QUEUE_PICKLE_ARGS = (1, 2) +QUEUE_UNPICKLE_ARGS = (0, 2) + +item = _interpqueues.get(queue_id)[0] +try: + func, args = loads(item) + retval = func(*args) +except BaseException as exc: + is_exception = True + retval = exc +else: + is_exception = False + +try: + _interpqueues.put(queue_id, (retval, is_exception), *QUEUE_UNPICKLE_ARGS) +except NotShareableError: + retval = dumps(retval, HIGHEST_PROTOCOL) + _interpqueues.put(queue_id, (retval, is_exception), *QUEUE_PICKLE_ARGS) + """, + "", + "exec", + ) + + class _Worker: + last_used: float = 0 + + def __init__(self) -> None: + self._interpreter_id = _interpreters.create() + self._queue_id = _interpqueues.create(1, *QUEUE_UNPICKLE_ARGS) + _interpreters.set___main___attrs( + self._interpreter_id, {"queue_id": self._queue_id} + ) + + def destroy(self) -> None: + _interpqueues.destroy(self._queue_id) + _interpreters.destroy(self._interpreter_id) + + def call( + self, + func: Callable[..., T_Retval], + args: tuple[Any, ...], + ) -> T_Retval: + import pickle + + item = pickle.dumps((func, args), pickle.HIGHEST_PROTOCOL) + _interpqueues.put(self._queue_id, item, *QUEUE_PICKLE_ARGS) + exc_info = _interpreters.exec(self._interpreter_id, _run_func) + if exc_info: + raise BrokenWorkerInterpreter(exc_info) + + res = _interpqueues.get(self._queue_id) + (res, is_exception), fmt = res[:2] + if fmt == FMT_PICKLED: + res = pickle.loads(res) + + if is_exception: + raise res + + return res +else: + + class _Worker: + last_used: float = 0 + + def __init__(self) -> None: + raise RuntimeError("subinterpreters require at least Python 3.13") + + def call( + self, + func: Callable[..., T_Retval], + args: tuple[Any, ...], + ) -> T_Retval: + raise NotImplementedError + + def destroy(self) -> None: + pass + + +DEFAULT_CPU_COUNT: Final = 8 # this is just an arbitrarily selected value +MAX_WORKER_IDLE_TIME = ( + 30 # seconds a subinterpreter can be idle before becoming eligible for pruning +) + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_idle_workers = RunVar[deque[_Worker]]("_available_workers") +_default_interpreter_limiter = RunVar[CapacityLimiter]("_default_interpreter_limiter") + + +def _stop_workers(workers: deque[_Worker]) -> None: + for worker in workers: + worker.destroy() + + workers.clear() + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a subinterpreter. + + .. warning:: On Python 3.13, the :mod:`concurrent.interpreters` module was not yet + available, so the code path for that Python version relies on an undocumented, + private API. As such, it is recommended to not rely on this function for anything + mission-critical on Python 3.13. + + :param func: a callable + :param args: the positional arguments for the callable + :param limiter: capacity limiter to use to limit the total number of subinterpreters + running (if omitted, the default limiter is used) + :return: the result of the call + :raises BrokenWorkerInterpreter: if there's an internal error in a subinterpreter + + """ + if limiter is None: + limiter = current_default_interpreter_limiter() + + try: + idle_workers = _idle_workers.get() + except LookupError: + idle_workers = deque() + _idle_workers.set(idle_workers) + atexit.register(_stop_workers, idle_workers) + + async with limiter: + try: + worker = idle_workers.pop() + except IndexError: + worker = _Worker() + + try: + return await to_thread.run_sync( + worker.call, + func, + args, + limiter=limiter, + ) + finally: + # Prune workers that have been idle for too long + now = current_time() + while idle_workers: + if now - idle_workers[0].last_used <= MAX_WORKER_IDLE_TIME: + break + + await to_thread.run_sync(idle_workers.popleft().destroy, limiter=limiter) + + worker.last_used = current_time() + idle_workers.append(worker) + + +def current_default_interpreter_limiter() -> CapacityLimiter: + """ + Return the capacity limiter used by default to limit the number of concurrently + running subinterpreters. + + Defaults to the number of CPU cores. + + :return: a capacity limiter object + + """ + try: + return _default_interpreter_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or DEFAULT_CPU_COUNT) + _default_interpreter_limiter.set(limiter) + return limiter diff --git a/venv/lib/python3.12/site-packages/anyio/to_process.py b/venv/lib/python3.12/site-packages/anyio/to_process.py new file mode 100644 index 0000000..e6796cd --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/to_process.py @@ -0,0 +1,264 @@ +from __future__ import annotations + +__all__ = ( + "current_default_process_limiter", + "process_worker", + "run_sync", +) + +import os +import pickle +import subprocess +import sys +from collections import deque +from collections.abc import Callable +from importlib.util import module_from_spec, spec_from_file_location +from typing import TypeVar, cast + +from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") +_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( + "_process_pool_idle_workers" +) +_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") + + +async def run_sync( # type: ignore[return] + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + cancellable: bool = False, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the worker process running it will be abruptly terminated using SIGKILL + (or ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's + running + :param limiter: capacity limiter to use to limit the total amount of processes + running (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b"\n", 50) + status, length = response.split(b" ") + if status not in (b"RETURN", b"EXCEPTION"): + raise RuntimeError( + f"Worker process returned unexpected response: {response!r}" + ) + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b"EXCEPTION": + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_async_backend().setup_process_pool_exit_at_shutdown(workers) + + async with limiter or current_default_process_limiter(): + # Pop processes from the pool (starting from the most recently used) until we + # find one that hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME + # seconds or longer + now = current_time() + killed_processes: list[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process_to_kill, idle_since = idle_workers.popleft() + process_to_kill.kill() + workers.remove(process_to_kill) + killed_processes.append(process_to_kill) + + with CancelScope(shield=True): + for killed_process in killed_processes: + await killed_process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, "-u", "-m", __name__] + process = await open_process( + command, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + with fail_after(20): + message = await buffered.receive(6) + + if message != b"READY\n": + raise BrokenWorkerProcess( + f"Worker process returned unexpected response: {message!r}" + ) + + main_module_path = getattr(sys.modules["__main__"], "__file__", None) + pickled = pickle.dumps( + ("init", sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL, + ) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess( + "Error during worker process initialization" + ) from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker + processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, "w") + + stdout.buffer.write(b"READY\n") + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == "run": + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == "init": + main_module_path: str | None + sys.path, main_module_path = args + del sys.modules["__main__"] + if main_module_path and os.path.isfile(main_module_path): + # Load the parent's main module but as __mp_main__ instead of + # __main__ (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location("__mp_main__", main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules["__main__"] = main + except BaseException as exc: + exception = exc + try: + if exception is not None: + status = b"EXCEPTION" + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b"RETURN" + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b"EXCEPTION" + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == "__main__": + process_worker() diff --git a/venv/lib/python3.12/site-packages/anyio/to_thread.py b/venv/lib/python3.12/site-packages/anyio/to_thread.py new file mode 100644 index 0000000..1f05ed9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/to_thread.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +__all__ = ( + "run_sync", + "current_default_thread_limiter", +) + +import sys +from collections.abc import Callable +from typing import TypeVar +from warnings import warn + +from ._core._eventloop import get_async_backend +from .abc import CapacityLimiter + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + abandon_on_cancel: bool = False, + cancellable: bool | None = None, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the thread will still run its course but its return value (or any raised + exception) will be ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run + unchecked on own) if the host task is cancelled, ``False`` to ignore + cancellations in the host task until the operation has completed in the worker + thread + :param cancellable: deprecated alias of ``abandon_on_cancel``; will override + ``abandon_on_cancel`` if both parameters are passed + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + if cancellable is not None: + abandon_on_cancel = cancellable + warn( + "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " + "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", + DeprecationWarning, + stacklevel=2, + ) + + return await get_async_backend().run_sync_in_worker_thread( + func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter + ) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of + concurrent threads. + + :return: a capacity limiter object + + """ + return get_async_backend().current_default_thread_limiter() diff --git a/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/LICENSE b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/LICENSE new file mode 100644 index 0000000..e23ece2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/LICENSE @@ -0,0 +1,277 @@ +Eclipse Public License - v 2.0 + + THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE + PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION + OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + + a) in the case of the initial Contributor, the initial content + Distributed under this Agreement, and + + b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + where such changes and/or additions to the Program originate from + and are Distributed by that particular Contributor. A Contribution + "originates" from a Contributor if it was added to the Program by + such Contributor itself or anyone acting on such Contributor's behalf. + Contributions do not include changes or additions to the Program that + are not Modified Works. + +"Contributor" means any person or entity that Distributes the Program. + +"Licensed Patents" mean patent claims licensable by a Contributor which +are necessarily infringed by the use or sale of its Contribution alone +or when combined with the Program. + +"Program" means the Contributions Distributed in accordance with this +Agreement. + +"Recipient" means anyone who receives the Program under this Agreement +or any Secondary License (as applicable), including Contributors. + +"Derivative Works" shall mean any work, whether in Source Code or other +form, that is based on (or derived from) the Program and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. + +"Modified Works" shall mean any work in Source Code or other form that +results from an addition to, deletion from, or modification of the +contents of the Program, including, for purposes of clarity any new file +in Source Code form that contains any contents of the Program. Modified +Works shall not include works that contain only declarations, +interfaces, types, classes, structures, or files of the Program solely +in each case in order to link to, bind by name, or subclass the Program +or Modified Works thereof. + +"Distribute" means the acts of a) distributing or b) making available +in any manner that enables the transfer of a copy. + +"Source Code" means the form of a Program preferred for making +modifications, including but not limited to software source code, +documentation source, and configuration files. + +"Secondary License" means either the GNU General Public License, +Version 2.0, or any later versions of that license, including any +exceptions or additional permissions as identified by the initial +Contributor. + +2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free copyright + license to reproduce, prepare Derivative Works of, publicly display, + publicly perform, Distribute and sublicense the Contribution of such + Contributor, if any, and such Derivative Works. + + b) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free patent + license under Licensed Patents to make, use, sell, offer to sell, + import and otherwise transfer the Contribution of such Contributor, + if any, in Source Code or other form. This patent license shall + apply to the combination of the Contribution and the Program if, at + the time the Contribution is added by the Contributor, such addition + of the Contribution causes such combination to be covered by the + Licensed Patents. The patent license shall not apply to any other + combinations which include the Contribution. No hardware per se is + licensed hereunder. + + c) Recipient understands that although each Contributor grants the + licenses to its Contributions set forth herein, no assurances are + provided by any Contributor that the Program does not infringe the + patent or other intellectual property rights of any other entity. + Each Contributor disclaims any liability to Recipient for claims + brought by any other entity based on infringement of intellectual + property rights or otherwise. As a condition to exercising the + rights and licenses granted hereunder, each Recipient hereby + assumes sole responsibility to secure any other intellectual + property rights needed, if any. For example, if a third party + patent license is required to allow Recipient to Distribute the + Program, it is Recipient's responsibility to acquire that license + before distributing the Program. + + d) Each Contributor represents that to its knowledge it has + sufficient copyright rights in its Contribution, if any, to grant + the copyright license set forth in this Agreement. + + e) Notwithstanding the terms of any Secondary License, no + Contributor makes additional grants to any Recipient (other than + those set forth in this Agreement) as a result of such Recipient's + receipt of the Program under the terms of a Secondary License + (if permitted under the terms of Section 3). + +3. REQUIREMENTS + +3.1 If a Contributor Distributes the Program in any form, then: + + a) the Program must also be made available as Source Code, in + accordance with section 3.2, and the Contributor must accompany + the Program with a statement that the Source Code for the Program + is available under this Agreement, and informs Recipients how to + obtain it in a reasonable manner on or through a medium customarily + used for software exchange; and + + b) the Contributor may Distribute the Program under a license + different than this Agreement, provided that such license: + i) effectively disclaims on behalf of all other Contributors all + warranties and conditions, express and implied, including + warranties or conditions of title and non-infringement, and + implied warranties or conditions of merchantability and fitness + for a particular purpose; + + ii) effectively excludes on behalf of all other Contributors all + liability for damages, including direct, indirect, special, + incidental and consequential damages, such as lost profits; + + iii) does not attempt to limit or alter the recipients' rights + in the Source Code under section 3.2; and + + iv) requires any subsequent distribution of the Program by any + party to be under a license that satisfies the requirements + of this section 3. + +3.2 When the Program is Distributed as Source Code: + + a) it must be made available under this Agreement, or if the + Program (i) is combined with other material in a separate file or + files made available under a Secondary License, and (ii) the initial + Contributor attached to the Source Code the notice described in + Exhibit A of this Agreement, then the Program may be made available + under the terms of such Secondary Licenses, and + + b) a copy of this Agreement must be included with each copy of + the Program. + +3.3 Contributors may not remove or alter any copyright, patent, +trademark, attribution notices, disclaimers of warranty, or limitations +of liability ("notices") contained within the Program from any copy of +the Program which they Distribute, provided that Contributors may add +their own appropriate notices. + +4. COMMERCIAL DISTRIBUTION + +Commercial distributors of software may accept certain responsibilities +with respect to end users, business partners and the like. While this +license is intended to facilitate the commercial use of the Program, +the Contributor who includes the Program in a commercial product +offering should do so in a manner which does not create potential +liability for other Contributors. Therefore, if a Contributor includes +the Program in a commercial product offering, such Contributor +("Commercial Contributor") hereby agrees to defend and indemnify every +other Contributor ("Indemnified Contributor") against any losses, +damages and costs (collectively "Losses") arising from claims, lawsuits +and other legal actions brought by a third party against the Indemnified +Contributor to the extent caused by the acts or omissions of such +Commercial Contributor in connection with its distribution of the Program +in a commercial product offering. The obligations in this section do not +apply to any claims or Losses relating to any actual or alleged +intellectual property infringement. In order to qualify, an Indemnified +Contributor must: a) promptly notify the Commercial Contributor in +writing of such claim, and b) allow the Commercial Contributor to control, +and cooperate with the Commercial Contributor in, the defense and any +related settlement negotiations. The Indemnified Contributor may +participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial +product offering, Product X. That Contributor is then a Commercial +Contributor. If that Commercial Contributor then makes performance +claims, or offers warranties related to Product X, those performance +claims and warranties are such Commercial Contributor's responsibility +alone. Under this section, the Commercial Contributor would have to +defend claims against the other Contributors related to those performance +claims and warranties, and if a court requires any other Contributor to +pay any damages as a result, the Commercial Contributor must pay +those damages. + +5. NO WARRANTY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" +BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR +IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF +TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR +PURPOSE. Each Recipient is solely responsible for determining the +appropriateness of using and distributing the Program and assumes all +risks associated with its exercise of rights under this Agreement, +including but not limited to the risks and costs of program errors, +compliance with applicable laws, damage to or loss of data, programs +or equipment, and unavailability or interruption of operations. + +6. DISCLAIMER OF LIABILITY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS +SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST +PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE +EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under +applicable law, it shall not affect the validity or enforceability of +the remainder of the terms of this Agreement, and without further +action by the parties hereto, such provision shall be reformed to the +minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity +(including a cross-claim or counterclaim in a lawsuit) alleging that the +Program itself (excluding combinations of the Program with other software +or hardware) infringes such Recipient's patent(s), then such Recipient's +rights granted under Section 2(b) shall terminate as of the date such +litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it +fails to comply with any of the material terms or conditions of this +Agreement and does not cure such failure in a reasonable period of +time after becoming aware of such noncompliance. If all Recipient's +rights under this Agreement terminate, Recipient agrees to cease use +and distribution of the Program as soon as reasonably practicable. +However, Recipient's obligations under this Agreement and any licenses +granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, +but in order to avoid inconsistency the Agreement is copyrighted and +may only be modified in the following manner. The Agreement Steward +reserves the right to publish new versions (including revisions) of +this Agreement from time to time. No one other than the Agreement +Steward has the right to modify this Agreement. The Eclipse Foundation +is the initial Agreement Steward. The Eclipse Foundation may assign the +responsibility to serve as the Agreement Steward to a suitable separate +entity. Each new version of the Agreement will be given a distinguishing +version number. The Program (including Contributions) may always be +Distributed subject to the version of the Agreement under which it was +received. In addition, after a new version of the Agreement is published, +Contributor may elect to Distribute the Program (including its +Contributions) under the new version. + +Except as expressly stated in Sections 2(a) and 2(b) above, Recipient +receives no rights or licenses to the intellectual property of any +Contributor under this Agreement, whether expressly, by implication, +estoppel or otherwise. All rights in the Program not expressly granted +under this Agreement are reserved. Nothing in this Agreement is intended +to be enforceable by any entity that is not a Contributor or Recipient. +No third-party beneficiary rights are created under this Agreement. + +Exhibit A - Form of Secondary Licenses Notice + +"This Source Code may also be made available under the following +Secondary Licenses when the conditions for such availability set forth +in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), +version(s), and exceptions or additional permissions here}." + + Simply including a copy of this Agreement, including this Exhibit A + is not sufficient to license the Source Code under Secondary Licenses. + + If it is not possible or desirable to put the notice in a particular + file, then You may include the notice in a location (such as a LICENSE + file in a relevant directory) where a recipient would be likely to + look for such a notice. + + You may add additional accurate notices of copyright ownership. \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/METADATA b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/METADATA new file mode 100644 index 0000000..cab94fa --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/METADATA @@ -0,0 +1,279 @@ +Metadata-Version: 2.1 +Name: asyncssh +Version: 2.14.2 +Summary: AsyncSSH: Asynchronous SSHv2 client and server library +Home-page: http://asyncssh.timeheart.net +Author: Ron Frederick +Author-email: ronf@timeheart.net +License: Eclipse Public License v2.0 +Project-URL: Documentation, https://asyncssh.readthedocs.io +Project-URL: Source, https://github.com/ronf/asyncssh +Project-URL: Tracker, https://github.com/ronf/asyncssh/issues +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet +Classifier: Topic :: Security :: Cryptography +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Networking +Requires-Python: >= 3.6 +License-File: LICENSE +Requires-Dist: cryptography (>=39.0) +Requires-Dist: typing-extensions (>=3.6) +Provides-Extra: bcrypt +Requires-Dist: bcrypt (>=3.1.3) ; extra == 'bcrypt' +Provides-Extra: fido2 +Requires-Dist: fido2 (>=0.9.2) ; extra == 'fido2' +Provides-Extra: gssapi +Requires-Dist: gssapi (>=1.2.0) ; extra == 'gssapi' +Provides-Extra: libnacl +Requires-Dist: libnacl (>=1.4.2) ; extra == 'libnacl' +Provides-Extra: pkcs11 +Requires-Dist: python-pkcs11 (>=0.7.0) ; extra == 'pkcs11' +Provides-Extra: pyopenssl +Requires-Dist: pyOpenSSL (>=23.0.0) ; extra == 'pyopenssl' +Provides-Extra: pywin32 +Requires-Dist: pywin32 (>=227) ; extra == 'pywin32' + +.. image:: https://readthedocs.org/projects/asyncssh/badge/?version=latest + :target: https://asyncssh.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/pypi/v/asyncssh.svg + :target: https://pypi.python.org/pypi/asyncssh/ + :alt: AsyncSSH PyPI Project + + +AsyncSSH: Asynchronous SSH for Python +===================================== + +AsyncSSH is a Python package which provides an asynchronous client and +server implementation of the SSHv2 protocol on top of the Python 3.6+ +asyncio framework. + +.. code:: python + + import asyncio, asyncssh, sys + + async def run_client(): + async with asyncssh.connect('localhost') as conn: + result = await conn.run('echo "Hello!"', check=True) + print(result.stdout, end='') + + try: + asyncio.get_event_loop().run_until_complete(run_client()) + except (OSError, asyncssh.Error) as exc: + sys.exit('SSH connection failed: ' + str(exc)) + +Check out the `examples`__ to get started! + +__ http://asyncssh.readthedocs.io/en/stable/#client-examples + +Features +-------- + +* Full support for SSHv2, SFTP, and SCP client and server functions + + * Shell, command, and subsystem channels + * Environment variables, terminal type, and window size + * Direct and forwarded TCP/IP channels + * OpenSSH-compatible direct and forwarded UNIX domain socket channels + * Local and remote TCP/IP port forwarding + * Local and remote UNIX domain socket forwarding + * Dynamic TCP/IP port forwarding via SOCKS + * X11 forwarding support on both the client and the server + * SFTP protocol version 3 with OpenSSH extensions + + * Experimental support for SFTP versions 4-6, when requested + + * SCP protocol support, including third-party remote to remote copies + +* Multiple simultaneous sessions on a single SSH connection +* Multiple SSH connections in a single event loop +* Byte and string based I/O with settable encoding +* A variety of `key exchange`__, `encryption`__, and `MAC`__ algorithms + + * Including OpenSSH post-quantum kex algorithm + sntrup761x25519-sha512\@openssh.com + +* Support for `gzip compression`__ + + * Including OpenSSH variant to delay compression until after auth + +* User and host-based public key, password, and keyboard-interactive + authentication methods + +* Many types and formats of `public keys and certificates`__ + + * Including OpenSSH-compatible support for U2F and FIDO2 security keys + * Including PKCS#11 support for accessing PIV security tokens + * Including support for X.509 certificates as defined in RFC 6187 + +* Support for accessing keys managed by `ssh-agent`__ on UNIX systems + + * Including agent forwarding support on both the client and the server + +* Support for accessing keys managed by PuTTY's Pageant agent on Windows +* Support for accessing host keys via OpenSSH's ssh-keysign +* OpenSSH-style `known_hosts file`__ support +* OpenSSH-style `authorized_keys file`__ support +* Partial support for `OpenSSH-style configuration files`__ +* Compatibility with OpenSSH "Encrypt then MAC" option for better security +* Time and byte-count based session key renegotiation +* Designed to be easy to extend to support new forms of key exchange, + authentication, encryption, and compression algorithms + +__ http://asyncssh.readthedocs.io/en/stable/api.html#key-exchange-algorithms +__ http://asyncssh.readthedocs.io/en/stable/api.html#encryption-algorithms +__ http://asyncssh.readthedocs.io/en/stable/api.html#mac-algorithms +__ http://asyncssh.readthedocs.io/en/stable/api.html#compression-algorithms +__ http://asyncssh.readthedocs.io/en/stable/api.html#public-key-support +__ http://asyncssh.readthedocs.io/en/stable/api.html#ssh-agent-support +__ http://asyncssh.readthedocs.io/en/stable/api.html#known-hosts +__ http://asyncssh.readthedocs.io/en/stable/api.html#authorized-keys +__ http://asyncssh.readthedocs.io/en/stable/api.html#config-file-support + +License +------- + +This package is released under the following terms: + + Copyright (c) 2013-2022 by Ron Frederick and others. + + This program and the accompanying materials are made available under + the terms of the Eclipse Public License v2.0 which accompanies this + distribution and is available at: + + http://www.eclipse.org/legal/epl-2.0/ + + This program may also be made available under the following secondary + licenses when the conditions for such availability set forth in the + Eclipse Public License v2.0 are satisfied: + + GNU General Public License, Version 2.0, or any later versions of + that license + + SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later + +For more information about this license, please see the `Eclipse +Public License FAQ `_. + +Prerequisites +------------- + +To use AsyncSSH 2.0 or later, you need the following: + +* Python 3.6 or later +* cryptography (PyCA) 3.1 or later + +Installation +------------ + +Install AsyncSSH by running: + + :: + + pip install asyncssh + +Optional Extras +^^^^^^^^^^^^^^^ + +There are some optional modules you can install to enable additional +functionality: + +* Install bcrypt from https://pypi.python.org/pypi/bcrypt + if you want support for OpenSSH private key encryption. + +* Install fido2 from https://pypi.org/project/fido2 if you want support + for key exchange and authentication with U2F/FIDO2 security keys. + +* Install python-pkcs11 from https://pypi.org/project/python-pkcs11 if + you want support for accessing PIV keys on PKCS#11 security tokens. + +* Install gssapi from https://pypi.python.org/pypi/gssapi if you + want support for GSSAPI key exchange and authentication on UNIX. + +* Install liboqs from https://github.com/open-quantum-safe/liboqs + if you want support for the OpenSSH post-quantum key exchange + algorithm sntrup761x25519-sha512\@openssh.com. + +* Install libsodium from https://github.com/jedisct1/libsodium + and libnacl from https://pypi.python.org/pypi/libnacl if you have + a version of OpenSSL older than 1.1.1b installed and you want + support for Curve25519 key exchange, Ed25519 keys and certificates, + or the Chacha20-Poly1305 cipher. + +* Install libnettle from http://www.lysator.liu.se/~nisse/nettle/ + if you want support for UMAC cryptographic hashes. + +* Install pyOpenSSL from https://pypi.python.org/pypi/pyOpenSSL + if you want support for X.509 certificate authentication. + +* Install pywin32 from https://pypi.python.org/pypi/pywin32 if you + want support for using the Pageant agent or support for GSSAPI + key exchange and authentication on Windows. + +AsyncSSH defines the following optional PyPI extra packages to make it +easy to install any or all of these dependencies: + + | bcrypt + | fido2 + | gssapi + | libnacl + | pkcs11 + | pyOpenSSL + | pywin32 + +For example, to install bcrypt, fido2, gssapi, libnacl, pkcs11, and +pyOpenSSL on UNIX, you can run: + + :: + + pip install 'asyncssh[bcrypt,fido2,gssapi,libnacl,pkcs11,pyOpenSSL]' + +To install bcrypt, fido2, libnacl, pkcs11, pyOpenSSL, and pywin32 on +Windows, you can run: + + :: + + pip install 'asyncssh[bcrypt,fido2,libnacl,pkcs11,pyOpenSSL,pywin32]' + +Note that you will still need to manually install the libsodium library +listed above for libnacl to work correctly and/or libnettle for UMAC +support. Unfortunately, since liboqs, libsodium, and libnettle are not +Python packages, they cannot be directly installed using pip. + +Installing the development branch +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you would like to install the development branch of asyncssh directly +from Github, you can use the following command to do this: + + :: + + pip install git+https://github.com/ronf/asyncssh@develop + +Mailing Lists +------------- + +Three mailing lists are available for AsyncSSH: + +* `asyncssh-announce@googlegroups.com`__: Project announcements +* `asyncssh-dev@googlegroups.com`__: Development discussions +* `asyncssh-users@googlegroups.com`__: End-user discussions + +__ http://groups.google.com/d/forum/asyncssh-announce +__ http://groups.google.com/d/forum/asyncssh-dev +__ http://groups.google.com/d/forum/asyncssh-users + + diff --git a/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/RECORD b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/RECORD new file mode 100644 index 0000000..59f2317 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/RECORD @@ -0,0 +1,138 @@ +asyncssh-2.14.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +asyncssh-2.14.2.dist-info/LICENSE,sha256=C-zxZWe-t3-iUrdmRjHdF3yPmhiJ5ImVtFN5xxMOUwM,14198 +asyncssh-2.14.2.dist-info/METADATA,sha256=w0lt66_CpTPAygP5XLFGgbfCLO7nwnEoqSBaXBFZdZU,9897 +asyncssh-2.14.2.dist-info/RECORD,, +asyncssh-2.14.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asyncssh-2.14.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +asyncssh-2.14.2.dist-info/top_level.txt,sha256=n1yXy2-yvQmweFl2J0szU1qlKxQ4Az6zfu0O35JV-0c,9 +asyncssh/__init__.py,sha256=1g7PFegXNQjtgWqDo3gpBIAO-QWRr0GU4Kb3TB5_N3g,8054 +asyncssh/__pycache__/__init__.cpython-312.pyc,, +asyncssh/__pycache__/agent.cpython-312.pyc,, +asyncssh/__pycache__/agent_unix.cpython-312.pyc,, +asyncssh/__pycache__/agent_win32.cpython-312.pyc,, +asyncssh/__pycache__/asn1.cpython-312.pyc,, +asyncssh/__pycache__/auth.cpython-312.pyc,, +asyncssh/__pycache__/auth_keys.cpython-312.pyc,, +asyncssh/__pycache__/channel.cpython-312.pyc,, +asyncssh/__pycache__/client.cpython-312.pyc,, +asyncssh/__pycache__/compression.cpython-312.pyc,, +asyncssh/__pycache__/config.cpython-312.pyc,, +asyncssh/__pycache__/connection.cpython-312.pyc,, +asyncssh/__pycache__/constants.cpython-312.pyc,, +asyncssh/__pycache__/dsa.cpython-312.pyc,, +asyncssh/__pycache__/ecdsa.cpython-312.pyc,, +asyncssh/__pycache__/eddsa.cpython-312.pyc,, +asyncssh/__pycache__/editor.cpython-312.pyc,, +asyncssh/__pycache__/encryption.cpython-312.pyc,, +asyncssh/__pycache__/forward.cpython-312.pyc,, +asyncssh/__pycache__/gss.cpython-312.pyc,, +asyncssh/__pycache__/gss_unix.cpython-312.pyc,, +asyncssh/__pycache__/gss_win32.cpython-312.pyc,, +asyncssh/__pycache__/kex.cpython-312.pyc,, +asyncssh/__pycache__/kex_dh.cpython-312.pyc,, +asyncssh/__pycache__/kex_rsa.cpython-312.pyc,, +asyncssh/__pycache__/keysign.cpython-312.pyc,, +asyncssh/__pycache__/known_hosts.cpython-312.pyc,, +asyncssh/__pycache__/listener.cpython-312.pyc,, +asyncssh/__pycache__/logging.cpython-312.pyc,, +asyncssh/__pycache__/mac.cpython-312.pyc,, +asyncssh/__pycache__/misc.cpython-312.pyc,, +asyncssh/__pycache__/packet.cpython-312.pyc,, +asyncssh/__pycache__/pattern.cpython-312.pyc,, +asyncssh/__pycache__/pbe.cpython-312.pyc,, +asyncssh/__pycache__/pkcs11.cpython-312.pyc,, +asyncssh/__pycache__/process.cpython-312.pyc,, +asyncssh/__pycache__/public_key.cpython-312.pyc,, +asyncssh/__pycache__/rsa.cpython-312.pyc,, +asyncssh/__pycache__/saslprep.cpython-312.pyc,, +asyncssh/__pycache__/scp.cpython-312.pyc,, +asyncssh/__pycache__/server.cpython-312.pyc,, +asyncssh/__pycache__/session.cpython-312.pyc,, +asyncssh/__pycache__/sftp.cpython-312.pyc,, +asyncssh/__pycache__/sk.cpython-312.pyc,, +asyncssh/__pycache__/sk_ecdsa.cpython-312.pyc,, +asyncssh/__pycache__/sk_eddsa.cpython-312.pyc,, +asyncssh/__pycache__/socks.cpython-312.pyc,, +asyncssh/__pycache__/stream.cpython-312.pyc,, +asyncssh/__pycache__/subprocess.cpython-312.pyc,, +asyncssh/__pycache__/version.cpython-312.pyc,, +asyncssh/__pycache__/x11.cpython-312.pyc,, +asyncssh/agent.py,sha256=J3jHE73ELyVYu8vnqLhz0iGLpt-K8mTaAsZbZf4jEIk,23553 +asyncssh/agent_unix.py,sha256=3viUx_0ciIJffJr9rJSfXrmWeMyLD-zdyCjHfly8LPs,1196 +asyncssh/agent_win32.py,sha256=uekboRXxadekOu7Ahg1SqvNKYq-bk5kl-Sn0a8mY_iU,5145 +asyncssh/asn1.py,sha256=UAeDMR8GkmSiQ_sAeA9j1CwyuDWfaWCU4aIQfNPMkW4,24462 +asyncssh/auth.py,sha256=rJ_h2R5DbFCuShqSLo60JokBxbyzLwLReYqVIeJ-IVE,33091 +asyncssh/auth_keys.py,sha256=Fi_VcL2E87GuoiHcrAzyh44i7gXyRl-nWXX_eIgZXF0,11821 +asyncssh/channel.py,sha256=PcLyvuGbz6D5hY1IxPxGh9zaVsRsSs7jsdLffVvg3os,78127 +asyncssh/client.py,sha256=ER_tMmoZ7EY7chr5nfQFpDc4d_F0E4LXMDjoiLhNYEk,16867 +asyncssh/compression.py,sha256=vqxm0EHor9bC95QwUsNXSD9ET9dBFv-bj0sj2JEHV5o,4162 +asyncssh/config.py,sha256=SmOhawdu3Euuu1R30vohLgM8CvI_luHEXxHbt4g2Xl8,22688 +asyncssh/connection.py,sha256=oLOebJUCAeGkJYUpCslUQSZPj90Uk5tkS9Twl0RZIZY,374903 +asyncssh/constants.py,sha256=OcTWx5gIclNDhwHBs4TqYin5bZen4LllRunU-4ILHqE,13385 +asyncssh/crypto/__init__.py,sha256=PJUwBAqU6x2Yl7vSC2xO_OEfTcOy9BF_ylCKAZd28Vc,2051 +asyncssh/crypto/__pycache__/__init__.cpython-312.pyc,, +asyncssh/crypto/__pycache__/chacha.cpython-312.pyc,, +asyncssh/crypto/__pycache__/cipher.cpython-312.pyc,, +asyncssh/crypto/__pycache__/dh.cpython-312.pyc,, +asyncssh/crypto/__pycache__/dsa.cpython-312.pyc,, +asyncssh/crypto/__pycache__/ec.cpython-312.pyc,, +asyncssh/crypto/__pycache__/ec_params.cpython-312.pyc,, +asyncssh/crypto/__pycache__/ed.cpython-312.pyc,, +asyncssh/crypto/__pycache__/kdf.cpython-312.pyc,, +asyncssh/crypto/__pycache__/misc.cpython-312.pyc,, +asyncssh/crypto/__pycache__/rsa.cpython-312.pyc,, +asyncssh/crypto/__pycache__/sntrup.cpython-312.pyc,, +asyncssh/crypto/__pycache__/umac.cpython-312.pyc,, +asyncssh/crypto/__pycache__/x509.cpython-312.pyc,, +asyncssh/crypto/chacha.py,sha256=woFZYS_t28rNJdrJK24X0sKcL1oHhgmYptkhk85y5nU,5573 +asyncssh/crypto/cipher.py,sha256=jVaHjKAqTjJ0dbouXbTzPbfE8RnXAWpNikH9ili6hxo,5925 +asyncssh/crypto/dh.py,sha256=EVyvIUEI6D81WBB2P7gIiY8frOedim0J2sTG0u85G9I,1547 +asyncssh/crypto/dsa.py,sha256=mSKlYjbVmzuyYqTTuWHGHDe9vgVa9YAWjnE5jlvEUjI,3811 +asyncssh/crypto/ec.py,sha256=mt1QxWkCr0nBpfKN9lVR1OulTBTnBWI_oYSVndgwtd4,6472 +asyncssh/crypto/ec_params.py,sha256=EzkbP5LYhha2ZPLgnIiwhBylDWV5YHiP1gsDw8AFmCU,4804 +asyncssh/crypto/ed.py,sha256=n2D60BPFKkTjOsrrv-bh_QZaAw62idDXAZ-qdlu0NyU,11179 +asyncssh/crypto/kdf.py,sha256=7g0sBD9OdVmyGxOsvw30EoEqdJs64GpSU8TgPJ-NJvg,1159 +asyncssh/crypto/misc.py,sha256=TUHa8mESY7W1C7_fFz0kh6bwXcDl-gFB8Yc8hivdbQ8,2401 +asyncssh/crypto/rsa.py,sha256=uty84Um1jZfEg-rp0cnM9kOIAT7OnCprJSH5YHiFgfk,5136 +asyncssh/crypto/sntrup.py,sha256=uECX9ESweu1KK8VB-LRQ72KAMY4DkHj0-E5adqUpHf0,2629 +asyncssh/crypto/umac.py,sha256=fSUtrHmot2vfSjxpkANSGKZjHhwPqXlmVeNfkZ_1WL4,4395 +asyncssh/crypto/x509.py,sha256=9Vy4gOdF6dVWg-NBuwrNvL1ROneO4byIvJuHt4hvEDk,15356 +asyncssh/dsa.py,sha256=ydZknIX_zDy2lRzRakb7KRySE51QqHRnI2EvsrrdTLY,8419 +asyncssh/ecdsa.py,sha256=iIyl5gguVN35Sh3qN2XpL7NCjcesNAf3EQEK7Kh-9-s,12211 +asyncssh/eddsa.py,sha256=-pz7FGVqSnVv5XqEZEUqfJjm7y0OEQZKyd9aJMeQpCc,7319 +asyncssh/editor.py,sha256=G1jdjEjUmhVfsEEWWmB5zjgcwwOZ_HxXYYL8eT0BG78,30401 +asyncssh/encryption.py,sha256=V2O3OvZdfKeGn2wFbAZCxL6jEvBWFNOC1k0mYyybBI4,11004 +asyncssh/forward.py,sha256=XypBezI05UEJfaBwVqmXcsO5Kjo-A0vkS9sbutuqfBQ,6101 +asyncssh/gss.py,sha256=FNCj36HjGNggMMcL8o52LW3AsHgZ9mq5Rem_Hhz3rYM,2007 +asyncssh/gss_unix.py,sha256=aCZq0bs-BDe3Rs-DxPlSHcN3jx2VDgvzRNm4WoTtBvY,4992 +asyncssh/gss_win32.py,sha256=PPruGO6nanw_gCbxORAeN5IrYQZIs8BuMxD1yzseXyM,5686 +asyncssh/kex.py,sha256=ypGs2c2ZlQtZXcrtBgpjxT3lfgWRakidfBxRae_j3g8,4456 +asyncssh/kex_dh.py,sha256=pxXHXftk45p2hKHh2ylN-nCEnLKmY15lHp6l9DGuQs8,32833 +asyncssh/kex_rsa.py,sha256=SvAmooth4v_qm9GoDZhxLySPy3mv272GYNOKw2AkVQs,5967 +asyncssh/keysign.py,sha256=0whrOHG1Y13y8-AMnxEfvOJBZaDWs3e_itbjTZU7GLo,3749 +asyncssh/known_hosts.py,sha256=T7S-4HgPHVNnm0cEex-2sqylMvwZYpjYZfm-UhxzRv4,14087 +asyncssh/listener.py,sha256=vRBXNBpnJ119V_vX1x6ttM9JWbTYc7_LSuRs3dUMn5o,12812 +asyncssh/logging.py,sha256=8Pga9jg1eaOupynKUAnJMBgui-CMAJOnFDJD164bsIM,7515 +asyncssh/mac.py,sha256=HbpEEWBruZiBkjOhI7vqKDNgo704l17W-58BlSowNko,7140 +asyncssh/misc.py,sha256=Dc3JqPOifmOSDJ4ual8t3Ohq3A7mVy9vKcKZkxPUKIQ,23665 +asyncssh/packet.py,sha256=JJcwDkgGeuvqZydroxHSw_a4UlAWaRRl27l6ElNrm0Y,6655 +asyncssh/pattern.py,sha256=DwJ7yyZhtIXqnS_wU1CZulAWtWXw7u4NBdNyOGzaGiU,4662 +asyncssh/pbe.py,sha256=FgVR7LnK0wnui08ovHGtWwPqPzwY9qx9zRv4BKJx_t4,20674 +asyncssh/pkcs11.py,sha256=_tY-YVRnnPDFdOP027KRxQuYJIuPwx1RTLaKApt4cLw,11479 +asyncssh/process.py,sha256=ZsEYg3uSkdSMzg6116Y9Hmx77ZwAr_SSxzhJSgF68do,62564 +asyncssh/public_key.py,sha256=iy46UfmNqodQzT2H-Trjq9gT_9FMOGry9xYPIzMi42c,138361 +asyncssh/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asyncssh/rsa.py,sha256=2AXD41d1Lowoyy3N-c-VfR6gYQcZFITae4qfbSxr8Ko,11474 +asyncssh/saslprep.py,sha256=PQPS-644PQInCq5JJ60l5838dnSac-lEuXYQ0AbbfY8,3599 +asyncssh/scp.py,sha256=fhdu6xN8kmjiTW9PYWoVXvmsthSXTf3XdYcafM08Mh8,37935 +asyncssh/server.py,sha256=JeWVJ_c_KPXc2rMJBkQ3OfW7UD7WdvTCiR0UpdPddgE,40824 +asyncssh/session.py,sha256=uVURjfqu4pk2MrWlre9vCS-fl8ZGKQx89vgo9PV0Y-Y,21523 +asyncssh/sftp.py,sha256=L9371j3WDlJg6Hxj2kkA3ifq9w5tG2xTAf8zrGSmmts,267754 +asyncssh/sk.py,sha256=qdam3dEkQ7i8Bg_U4Vp0Cll_QvXflt5Ut7N4rE1RsNs,9168 +asyncssh/sk_ecdsa.py,sha256=QQd5tAv2paRU3KSsub8QffTSg9KO_D663Kaig49esU8,7869 +asyncssh/sk_eddsa.py,sha256=_zjheLeL1TDtVb8VF-TnIsWyW753fEzbP0n9Pe_F15E,6892 +asyncssh/socks.py,sha256=QRvT0MqTNB6sG3pWjUcYN8Az-_m3Du9WvnHAJ_ICTpA,7581 +asyncssh/stream.py,sha256=ONnFrmt01egEWl9cOWHP2Sg7URv7HvGeoN5iY_Fb6-Q,29161 +asyncssh/subprocess.py,sha256=MUsSJgL-8H4Hke-0A7XJvE2mOc8Y1kgJafektzwct0w,10890 +asyncssh/version.py,sha256=QYo-PYvvDjui7igyJHv2CUGwK_VvUfeP6J4P7YZ18Wg,899 +asyncssh/x11.py,sha256=fS8-AZ7XXhxNueoBSDkwX68dveB2f-3H1CgxQORJR2c,17017 diff --git a/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/WHEEL b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/top_level.txt new file mode 100644 index 0000000..7bb99e8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh-2.14.2.dist-info/top_level.txt @@ -0,0 +1 @@ +asyncssh diff --git a/venv/lib/python3.12/site-packages/asyncssh/__init__.py b/venv/lib/python3.12/site-packages/asyncssh/__init__.py new file mode 100644 index 0000000..a8328c6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/__init__.py @@ -0,0 +1,171 @@ +# Copyright (c) 2013-2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""An asynchronous SSH2 library for Python""" + +from .version import __author__, __author_email__, __url__, __version__ + +# pylint: disable=wildcard-import + +from .constants import * + +# pylint: enable=wildcard-import + +from .agent import SSHAgentClient, SSHAgentKeyPair, connect_agent + +from .auth_keys import SSHAuthorizedKeys +from .auth_keys import import_authorized_keys, read_authorized_keys + +from .channel import SSHClientChannel, SSHServerChannel +from .channel import SSHTCPChannel, SSHUNIXChannel + +from .client import SSHClient + +from .config import ConfigParseError + +from .forward import SSHForwarder + +from .connection import SSHAcceptor, SSHClientConnection, SSHServerConnection +from .connection import SSHClientConnectionOptions, SSHServerConnectionOptions +from .connection import SSHAcceptHandler +from .connection import create_connection, create_server, connect, listen +from .connection import connect_reverse, listen_reverse, get_server_host_key +from .connection import get_server_auth_methods, run_client, run_server + +from .editor import SSHLineEditorChannel + +from .known_hosts import SSHKnownHosts +from .known_hosts import import_known_hosts, read_known_hosts +from .known_hosts import match_known_hosts + +from .listener import SSHListener + +from .logging import logger, set_log_level, set_sftp_log_level, set_debug_level + +from .misc import BytesOrStr +from .misc import Error, DisconnectError, ChannelOpenError, ChannelListenError +from .misc import ConnectionLost, CompressionError, HostKeyNotVerifiable +from .misc import KeyExchangeFailed, IllegalUserName, MACError +from .misc import PermissionDenied, ProtocolError, ProtocolNotSupported +from .misc import ServiceNotAvailable, PasswordChangeRequired +from .misc import BreakReceived, SignalReceived, TerminalSizeChanged + +from .pbe import KeyEncryptionError + +from .pkcs11 import load_pkcs11_keys + +from .process import SSHServerProcessFactory +from .process import SSHClientProcess, SSHServerProcess +from .process import SSHCompletedProcess, ProcessError +from .process import TimeoutError # pylint: disable=redefined-builtin +from .process import DEVNULL, PIPE, STDOUT + +from .public_key import SSHKey, SSHKeyPair, SSHCertificate +from .public_key import KeyGenerationError, KeyImportError, KeyExportError +from .public_key import generate_private_key, import_private_key +from .public_key import import_public_key, import_certificate +from .public_key import read_private_key, read_public_key, read_certificate +from .public_key import read_private_key_list, read_public_key_list +from .public_key import read_certificate_list +from .public_key import load_keypairs, load_public_keys, load_certificates +from .public_key import load_resident_keys + +from .rsa import set_default_skip_rsa_key_validation + +from .scp import scp + +from .session import DataType, SSHClientSession, SSHServerSession +from .session import SSHTCPSession, SSHUNIXSession + +from .server import SSHServer + +from .sftp import SFTPClient, SFTPClientFile, SFTPServer, SFTPError +from .sftp import SFTPEOFError, SFTPNoSuchFile, SFTPPermissionDenied +from .sftp import SFTPFailure, SFTPBadMessage, SFTPNoConnection +from .sftp import SFTPInvalidHandle, SFTPNoSuchPath, SFTPFileAlreadyExists +from .sftp import SFTPWriteProtect, SFTPNoMedia, SFTPNoSpaceOnFilesystem +from .sftp import SFTPQuotaExceeded, SFTPUnknownPrincipal, SFTPLockConflict +from .sftp import SFTPDirNotEmpty, SFTPNotADirectory, SFTPInvalidFilename +from .sftp import SFTPLinkLoop, SFTPCannotDelete, SFTPInvalidParameter +from .sftp import SFTPFileIsADirectory, SFTPByteRangeLockConflict +from .sftp import SFTPByteRangeLockRefused, SFTPDeletePending +from .sftp import SFTPFileCorrupt, SFTPOwnerInvalid, SFTPGroupInvalid +from .sftp import SFTPNoMatchingByteRangeLock +from .sftp import SFTPConnectionLost, SFTPOpUnsupported +from .sftp import SFTPAttrs, SFTPVFSAttrs, SFTPName +from .sftp import SEEK_SET, SEEK_CUR, SEEK_END + +from .stream import SSHSocketSessionFactory, SSHServerSessionFactory +from .stream import SFTPServerFactory, SSHReader, SSHWriter + +from .subprocess import SSHSubprocessReadPipe, SSHSubprocessWritePipe +from .subprocess import SSHSubprocessProtocol, SSHSubprocessTransport + +# Import these explicitly to trigger register calls in them +from . import sk_eddsa, sk_ecdsa, eddsa, ecdsa, rsa, dsa, kex_dh, kex_rsa + +__all__ = [ + 'BreakReceived', 'BytesOrStr', 'ChannelListenError', + 'ChannelOpenError', 'CompressionError', 'ConfigParseError', + 'ConnectionLost', 'DEVNULL', 'DataType', 'DisconnectError', 'Error', + 'HostKeyNotVerifiable', 'IllegalUserName', 'KeyEncryptionError', + 'KeyExchangeFailed', 'KeyExportError', 'KeyGenerationError', + 'KeyImportError', 'MACError', 'PIPE', 'PasswordChangeRequired', + 'PermissionDenied', 'ProcessError', 'ProtocolError', + 'ProtocolNotSupported', 'SEEK_CUR', 'SEEK_END', 'SEEK_SET', + 'SFTPAttrs', 'SFTPBadMessage', 'SFTPByteRangeLockConflict', + 'SFTPByteRangeLockRefused', 'SFTPCannotDelete', 'SFTPClient', + 'SFTPClientFile', 'SFTPConnectionLost', 'SFTPDeletePending', + 'SFTPDirNotEmpty', 'SFTPEOFError', 'SFTPError', 'SFTPFailure', + 'SFTPFileAlreadyExists', 'SFTPFileCorrupt', 'SFTPFileIsADirectory', + 'SFTPGroupInvalid', 'SFTPInvalidFilename', 'SFTPInvalidHandle', + 'SFTPInvalidParameter', 'SFTPLinkLoop', 'SFTPLockConflict', 'SFTPName', + 'SFTPNoConnection', 'SFTPNoMatchingByteRangeLock', 'SFTPNoMedia', + 'SFTPNoSpaceOnFilesystem', 'SFTPNoSuchFile', 'SFTPNoSuchPath', + 'SFTPNotADirectory', 'SFTPOpUnsupported', 'SFTPOwnerInvalid', + 'SFTPPermissionDenied', 'SFTPQuotaExceeded', 'SFTPServer', + 'SFTPServerFactory', 'SFTPUnknownPrincipal', 'SFTPVFSAttrs', + 'SFTPWriteProtect', 'SSHAcceptor', 'SSHAgentClient', 'SSHAgentKeyPair', + 'SSHAuthorizedKeys', 'SSHCertificate', 'SSHClient', 'SSHClientChannel', + 'SSHClientConnection', 'SSHClientConnectionOptions', + 'SSHClientProcess', 'SSHClientSession', 'SSHCompletedProcess', + 'SSHForwarder', 'SSHKey', 'SSHKeyPair', 'SSHKnownHosts', + 'SSHLineEditorChannel', 'SSHListener', 'SSHReader', 'SSHServer', + 'SSHServerChannel', 'SSHServerConnection', + 'SSHServerConnectionOptions', 'SSHServerProcess', + 'SSHServerProcessFactory', 'SSHServerSession', + 'SSHServerSessionFactory', 'SSHSocketSessionFactory', + 'SSHSubprocessProtocol', 'SSHSubprocessReadPipe', + 'SSHSubprocessTransport', 'SSHSubprocessWritePipe', 'SSHTCPChannel', + 'SSHTCPSession', 'SSHUNIXChannel', 'SSHUNIXSession', 'SSHWriter', + 'STDOUT', 'ServiceNotAvailable', 'SignalReceived', + 'TerminalSizeChanged', 'TimeoutError', 'connect', 'connect_agent', + 'connect_reverse', 'create_connection', 'create_server', + 'generate_private_key', 'get_server_auth_methods', + 'get_server_host_key', 'import_authorized_keys', 'import_certificate', + 'import_known_hosts', 'import_private_key', 'import_public_key', + 'listen', 'listen_reverse', 'load_certificates', 'load_keypairs', + 'load_pkcs11_keys', 'load_public_keys', 'load_resident_keys', 'logger', + 'match_known_hosts', 'read_authorized_keys', 'read_certificate', + 'read_certificate_list', 'read_known_hosts', 'read_private_key', + 'read_private_key_list', 'read_public_key', 'read_public_key_list', + 'run_client', 'run_server', 'scp', 'set_debug_level', 'set_log_level', + 'set_sftp_log_level', 'set_default_skip_rsa_key_validation', +] diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5c7639c Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent.cpython-312.pyc new file mode 100644 index 0000000..58ae299 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent_unix.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent_unix.cpython-312.pyc new file mode 100644 index 0000000..a8e14b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent_unix.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent_win32.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent_win32.cpython-312.pyc new file mode 100644 index 0000000..4e2eab4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/agent_win32.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/asn1.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/asn1.cpython-312.pyc new file mode 100644 index 0000000..4b13eab Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/asn1.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/auth.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..e102117 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/auth.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/auth_keys.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/auth_keys.cpython-312.pyc new file mode 100644 index 0000000..72ab0bd Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/auth_keys.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/channel.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/channel.cpython-312.pyc new file mode 100644 index 0000000..0f9e87b Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/channel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/client.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..3faa3d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/compression.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/compression.cpython-312.pyc new file mode 100644 index 0000000..2e9f3e2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/compression.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/config.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..16d7739 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/connection.cpython-312.pyc new file mode 100644 index 0000000..038f564 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/connection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/constants.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/constants.cpython-312.pyc new file mode 100644 index 0000000..3bb7aba Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/constants.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/dsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/dsa.cpython-312.pyc new file mode 100644 index 0000000..eeeb07e Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/dsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/ecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/ecdsa.cpython-312.pyc new file mode 100644 index 0000000..4971d66 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/ecdsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/eddsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/eddsa.cpython-312.pyc new file mode 100644 index 0000000..d2cae81 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/eddsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/editor.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/editor.cpython-312.pyc new file mode 100644 index 0000000..8d766bf Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/editor.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/encryption.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/encryption.cpython-312.pyc new file mode 100644 index 0000000..2f8b407 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/encryption.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/forward.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/forward.cpython-312.pyc new file mode 100644 index 0000000..32a039c Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/forward.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss.cpython-312.pyc new file mode 100644 index 0000000..9722e76 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss_unix.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss_unix.cpython-312.pyc new file mode 100644 index 0000000..d4fdcfd Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss_unix.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss_win32.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss_win32.cpython-312.pyc new file mode 100644 index 0000000..769bbb2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/gss_win32.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex.cpython-312.pyc new file mode 100644 index 0000000..20c9eda Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex_dh.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex_dh.cpython-312.pyc new file mode 100644 index 0000000..fc3f796 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex_dh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex_rsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex_rsa.cpython-312.pyc new file mode 100644 index 0000000..d1c8cba Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/kex_rsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/keysign.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/keysign.cpython-312.pyc new file mode 100644 index 0000000..8ea694e Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/keysign.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/known_hosts.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/known_hosts.cpython-312.pyc new file mode 100644 index 0000000..87a2f6c Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/known_hosts.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/listener.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/listener.cpython-312.pyc new file mode 100644 index 0000000..57568af Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/listener.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/logging.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/logging.cpython-312.pyc new file mode 100644 index 0000000..37148d1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/logging.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/mac.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/mac.cpython-312.pyc new file mode 100644 index 0000000..8420391 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/mac.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/misc.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/misc.cpython-312.pyc new file mode 100644 index 0000000..e90cde6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/misc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/packet.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/packet.cpython-312.pyc new file mode 100644 index 0000000..2384304 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/packet.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pattern.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pattern.cpython-312.pyc new file mode 100644 index 0000000..7df4e8f Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pattern.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pbe.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pbe.cpython-312.pyc new file mode 100644 index 0000000..8992008 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pbe.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pkcs11.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pkcs11.cpython-312.pyc new file mode 100644 index 0000000..f676cc4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/pkcs11.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/process.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/process.cpython-312.pyc new file mode 100644 index 0000000..43c5cb2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/process.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/public_key.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/public_key.cpython-312.pyc new file mode 100644 index 0000000..707d24d Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/public_key.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/rsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/rsa.cpython-312.pyc new file mode 100644 index 0000000..ed99ca4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/rsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/saslprep.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/saslprep.cpython-312.pyc new file mode 100644 index 0000000..332bd37 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/saslprep.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/scp.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/scp.cpython-312.pyc new file mode 100644 index 0000000..d17a6b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/scp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/server.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/server.cpython-312.pyc new file mode 100644 index 0000000..66b7487 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/server.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/session.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/session.cpython-312.pyc new file mode 100644 index 0000000..a04b305 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/session.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sftp.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sftp.cpython-312.pyc new file mode 100644 index 0000000..0488827 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sftp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk.cpython-312.pyc new file mode 100644 index 0000000..dbd8cdc Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk_ecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk_ecdsa.cpython-312.pyc new file mode 100644 index 0000000..f3b391e Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk_ecdsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk_eddsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk_eddsa.cpython-312.pyc new file mode 100644 index 0000000..16cb026 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/sk_eddsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/socks.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/socks.cpython-312.pyc new file mode 100644 index 0000000..77c7468 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/socks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/stream.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/stream.cpython-312.pyc new file mode 100644 index 0000000..4a11a2a Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/stream.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/subprocess.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/subprocess.cpython-312.pyc new file mode 100644 index 0000000..cd681a8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/subprocess.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..d18f3b6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/__pycache__/x11.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/x11.cpython-312.pyc new file mode 100644 index 0000000..0b05a50 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/__pycache__/x11.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/agent.py b/venv/lib/python3.12/site-packages/asyncssh/agent.py new file mode 100644 index 0000000..8c612e5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/agent.py @@ -0,0 +1,676 @@ +# Copyright (c) 2016-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH agent client""" + +import asyncio +import errno +import os +import sys +from types import TracebackType +from typing import TYPE_CHECKING, List, Optional, Sequence, Tuple, Type, Union +from typing_extensions import Protocol + +from .listener import SSHForwardListener +from .misc import async_context_manager, maybe_wait_closed +from .packet import Byte, String, UInt32, PacketDecodeError, SSHPacket +from .public_key import KeyPairListArg, SSHCertificate, SSHKeyPair +from .public_key import load_default_keypairs, load_keypairs + +if TYPE_CHECKING: + from tempfile import TemporaryDirectory + + +class AgentReader(Protocol): + """Protocol for reading from an SSH agent""" + + async def readexactly(self, n: int) -> bytes: + """Read exactly n bytes from the SSH agent""" + + +class AgentWriter(Protocol): + """Protocol for writing to an SSH agent""" + + def write(self, data: bytes) -> None: + """Write bytes to the SSH agent""" + + def close(self) -> None: + """Close connection to the SSH agent""" + + async def wait_closed(self) -> None: + """Wait for the connection to the SSH agent to close""" + + +try: + if sys.platform == 'win32': # pragma: no cover + from .agent_win32 import open_agent + else: + from .agent_unix import open_agent +except ImportError as _exc: # pragma: no cover + async def open_agent(agent_path: str) -> \ + Tuple[AgentReader, AgentWriter]: + """Dummy function if we're unable to import agent support""" + + raise OSError(errno.ENOENT, 'Agent support unavailable: %s' % str(_exc)) + + +class _SupportsOpenAgentConnection(Protocol): + """A class that supports open_agent_connection""" + + async def open_agent_connection(self) -> Tuple[AgentReader, AgentWriter]: + """Open a forwarded ssh-agent connection back to the client""" + + +_AgentPath = Union[str, _SupportsOpenAgentConnection] + + +# Client request message numbers +SSH_AGENTC_REQUEST_IDENTITIES = 11 +SSH_AGENTC_SIGN_REQUEST = 13 +SSH_AGENTC_ADD_IDENTITY = 17 +SSH_AGENTC_REMOVE_IDENTITY = 18 +SSH_AGENTC_REMOVE_ALL_IDENTITIES = 19 +SSH_AGENTC_ADD_SMARTCARD_KEY = 20 +SSH_AGENTC_REMOVE_SMARTCARD_KEY = 21 +SSH_AGENTC_LOCK = 22 +SSH_AGENTC_UNLOCK = 23 +SSH_AGENTC_ADD_ID_CONSTRAINED = 25 +SSH_AGENTC_ADD_SMARTCARD_KEY_CONSTRAINED = 26 +SSH_AGENTC_EXTENSION = 27 + +# Agent response message numbers +SSH_AGENT_FAILURE = 5 +SSH_AGENT_SUCCESS = 6 +SSH_AGENT_IDENTITIES_ANSWER = 12 +SSH_AGENT_SIGN_RESPONSE = 14 +SSH_AGENT_EXTENSION_FAILURE = 28 + +# SSH agent constraint numbers +SSH_AGENT_CONSTRAIN_LIFETIME = 1 +SSH_AGENT_CONSTRAIN_CONFIRM = 2 +SSH_AGENT_CONSTRAIN_EXTENSION = 255 + +# SSH agent signature flags +SSH_AGENT_RSA_SHA2_256 = 2 +SSH_AGENT_RSA_SHA2_512 = 4 + + +class SSHAgentKeyPair(SSHKeyPair): + """Surrogate for a key managed by the SSH agent""" + + _key_type = 'agent' + + def __init__(self, agent: 'SSHAgentClient', algorithm: bytes, + public_data: bytes, comment: bytes): + is_cert = algorithm.endswith(b'-cert-v01@openssh.com') + + if is_cert: + if algorithm.startswith(b'sk-'): + sig_algorithm = algorithm[:-21] + b'@openssh.com' + else: + sig_algorithm = algorithm[:-21] + else: + sig_algorithm = algorithm + + # Neither Pageant nor the Win10 OpenSSH agent seems to support the + # ssh-agent protocol flags used to request RSA SHA2 signatures yet + if sig_algorithm == b'ssh-rsa' and sys.platform != 'win32': + sig_algorithms: Sequence[bytes] = \ + (b'rsa-sha2-256', b'rsa-sha2-512', b'ssh-rsa') + else: + sig_algorithms = (sig_algorithm,) + + if is_cert: + host_key_algorithms: Sequence[bytes] = (algorithm,) + else: + host_key_algorithms = sig_algorithms + + super().__init__(algorithm, sig_algorithm, sig_algorithms, + host_key_algorithms, public_data, comment) + + self._agent = agent + self._is_cert = is_cert + self._flags = 0 + + @property + def has_cert(self) -> bool: + """ Return if this key pair has an associated cert""" + + return self._is_cert + + @property + def has_x509_chain(self) -> bool: + """ Return if this key pair has an associated X.509 cert chain""" + + return False + + def set_certificate(self, cert: SSHCertificate) -> None: + """Set certificate to use with this key""" + + super().set_certificate(cert) + + self._is_cert = True + + def set_sig_algorithm(self, sig_algorithm: bytes) -> None: + """Set the signature algorithm to use when signing data""" + + super().set_sig_algorithm(sig_algorithm) + + if sig_algorithm in (b'rsa-sha2-256', b'x509v3-rsa2048-sha256'): + self._flags |= SSH_AGENT_RSA_SHA2_256 + elif sig_algorithm == b'rsa-sha2-512': + self._flags |= SSH_AGENT_RSA_SHA2_512 + + async def sign_async(self, data: bytes) -> bytes: + """Asynchronously sign a block of data with this private key""" + + return await self._agent.sign(self.key_public_data, data, self._flags) + + async def remove(self) -> None: + """Remove this key pair from the agent""" + + await self._agent.remove_keys([self]) + + +class SSHAgentClient: + """SSH agent client""" + + def __init__(self, agent_path: _AgentPath): + self._agent_path = agent_path + self._reader: Optional[AgentReader] = None + self._writer: Optional[AgentWriter] = None + self._lock = asyncio.Lock() + + async def __aenter__(self) -> 'SSHAgentClient': + """Allow SSHAgentClient to be used as an async context manager""" + + return self + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType]) -> bool: + """Wait for connection close when used as an async context manager""" + + await self._cleanup() + return False + + async def _cleanup(self) -> None: + """Clean up this SSH agent client""" + + self.close() + await self.wait_closed() + + @staticmethod + def encode_constraints(lifetime: Optional[int], confirm: bool) -> bytes: + """Encode key constraints""" + + result = b'' + + if lifetime: + result += Byte(SSH_AGENT_CONSTRAIN_LIFETIME) + UInt32(lifetime) + + if confirm: + result += Byte(SSH_AGENT_CONSTRAIN_CONFIRM) + + return result + + async def connect(self) -> None: + """Connect to the SSH agent""" + + if isinstance(self._agent_path, str): + self._reader, self._writer = await open_agent(self._agent_path) + else: + self._reader, self._writer = \ + await self._agent_path.open_agent_connection() + + async def _make_request(self, msgtype: int, *args: bytes) -> \ + Tuple[int, SSHPacket]: + """Send an SSH agent request""" + + async with self._lock: + try: + if not self._writer: + await self.connect() + + reader = self._reader + writer = self._writer + + assert reader is not None + assert writer is not None + + payload = Byte(msgtype) + b''.join(args) + writer.write(UInt32(len(payload)) + payload) + + resplen = int.from_bytes((await reader.readexactly(4)), 'big') + + resp = SSHPacket((await reader.readexactly(resplen))) + resptype = resp.get_byte() + + return resptype, resp + except (OSError, EOFError, PacketDecodeError) as exc: + await self._cleanup() + raise ValueError(str(exc)) from None + + async def get_keys(self, identities: Optional[Sequence[bytes]] = None) -> \ + Sequence[SSHKeyPair]: + """Request the available client keys + + This method is a coroutine which returns a list of client keys + available in the ssh-agent. + + :param identities: (optional) + A list of allowed byte string identities to return. If empty, + all identities on the SSH agent will be returned. + + :returns: A list of :class:`SSHKeyPair` objects + + """ + + resptype, resp = \ + await self._make_request(SSH_AGENTC_REQUEST_IDENTITIES) + + if resptype == SSH_AGENT_IDENTITIES_ANSWER: + result: List[SSHKeyPair] = [] + + num_keys = resp.get_uint32() + for _ in range(num_keys): + key_blob = resp.get_string() + comment = resp.get_string() + + if identities and key_blob not in identities: + continue + + packet = SSHPacket(key_blob) + algorithm = packet.get_string() + + result.append(SSHAgentKeyPair(self, algorithm, + key_blob, comment)) + + resp.check_end() + return result + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def sign(self, key_blob: bytes, data: bytes, + flags: int = 0) -> bytes: + """Sign a block of data with the requested key""" + + resptype, resp = await self._make_request(SSH_AGENTC_SIGN_REQUEST, + String(key_blob), + String(data), UInt32(flags)) + + if resptype == SSH_AGENT_SIGN_RESPONSE: + sig = resp.get_string() + resp.check_end() + return sig + elif resptype == SSH_AGENT_FAILURE: + raise ValueError('Unable to sign with requested key') + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def add_keys(self, keylist: KeyPairListArg = (), + passphrase: Optional[str] = None, + lifetime: Optional[int] = None, + confirm: bool = False) -> None: + """Add keys to the agent + + This method adds a list of local private keys and optional + matching certificates to the agent. + + :param keylist: (optional) + The list of keys to add. If not specified, an attempt will + be made to load keys from the files + :file:`.ssh/id_ed25519_sk`, :file:`.ssh/id_ecdsa_sk`, + :file:`.ssh/id_ed448`, :file:`.ssh/id_ed25519`, + :file:`.ssh/id_ecdsa`, :file:`.ssh/id_rsa` and + :file:`.ssh/id_dsa` in the user's home directory with + optional matching certificates loaded from the files + :file:`.ssh/id_ed25519_sk-cert.pub`, + :file:`.ssh/id_ecdsa_sk-cert.pub`, + :file:`.ssh/id_ed448-cert.pub`, + :file:`.ssh/id_ed25519-cert.pub`, + :file:`.ssh/id_ecdsa-cert.pub`, :file:`.ssh/id_rsa-cert.pub`, + and :file:`.ssh/id_dsa-cert.pub`. Failures when adding keys + are ignored in this case, as the agent may not recognize + some of these key types. + :param passphrase: (optional) + The passphrase to use to decrypt the keys. + :param lifetime: (optional) + The time in seconds after which the keys should be + automatically deleted, or `None` to store these keys + indefinitely (the default). + :param confirm: (optional) + Whether or not to require confirmation for each private + key operation which uses these keys, defaulting to `False`. + :type keylist: *see* :ref:`SpecifyingPrivateKeys` + :type passphrase: `str` + :type lifetime: `int` or `None` + :type confirm: `bool` + + :raises: :exc:`ValueError` if the keys cannot be added + + """ + + if keylist: + keypairs = load_keypairs(keylist, passphrase) + ignore_failures = False + else: + keypairs = load_default_keypairs(passphrase) + ignore_failures = True + + base_constraints = self.encode_constraints(lifetime, confirm) + + provider = os.environ.get('SSH_SK_PROVIDER') or 'internal' + + sk_constraints = Byte(SSH_AGENT_CONSTRAIN_EXTENSION) + \ + String('sk-provider@openssh.com') + \ + String(provider) + + for keypair in keypairs: + constraints = base_constraints + + if keypair.algorithm.startswith(b'sk-'): + constraints += sk_constraints + + msgtype = SSH_AGENTC_ADD_ID_CONSTRAINED if constraints else \ + SSH_AGENTC_ADD_IDENTITY + + comment = keypair.get_comment_bytes() + + resptype, resp = \ + await self._make_request(msgtype, + keypair.get_agent_private_key(), + String(comment or b''), constraints) + + if resptype == SSH_AGENT_SUCCESS: + resp.check_end() + elif resptype == SSH_AGENT_FAILURE: + if not ignore_failures: + raise ValueError('Unable to add key') + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def add_smartcard_keys(self, provider: str, + pin: Optional[str] = None, + lifetime: Optional[int] = None, + confirm: bool = False) -> None: + """Store keys associated with a smart card in the agent + + :param provider: + The name of the smart card provider + :param pin: (optional) + The PIN to use to unlock the smart card + :param lifetime: (optional) + The time in seconds after which the keys should be + automatically deleted, or `None` to store these keys + indefinitely (the default). + :param confirm: (optional) + Whether or not to require confirmation for each private + key operation which uses these keys, defaulting to `False`. + :type provider: `str` + :type pin: `str` or `None` + :type lifetime: `int` or `None` + :type confirm: `bool` + + :raises: :exc:`ValueError` if the keys cannot be added + + """ + + constraints = self.encode_constraints(lifetime, confirm) + msgtype = SSH_AGENTC_ADD_SMARTCARD_KEY_CONSTRAINED \ + if constraints else SSH_AGENTC_ADD_SMARTCARD_KEY + + resptype, resp = await self._make_request(msgtype, String(provider), + String(pin or ''), + constraints) + + if resptype == SSH_AGENT_SUCCESS: + resp.check_end() + elif resptype == SSH_AGENT_FAILURE: + raise ValueError('Unable to add keys') + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def remove_keys(self, keylist: Sequence[SSHKeyPair]) -> None: + """Remove a key stored in the agent + + :param keylist: + The list of keys to remove. + :type keylist: `list` of :class:`SSHKeyPair` + + :raises: :exc:`ValueError` if any keys are not found + + """ + + for keypair in keylist: + resptype, resp = \ + await self._make_request(SSH_AGENTC_REMOVE_IDENTITY, + String(keypair.public_data)) + + if resptype == SSH_AGENT_SUCCESS: + resp.check_end() + elif resptype == SSH_AGENT_FAILURE: + raise ValueError('Key not found') + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def remove_smartcard_keys(self, provider: str, + pin: Optional[str] = None) -> None: + """Remove keys associated with a smart card stored in the agent + + :param provider: + The name of the smart card provider + :param pin: (optional) + The PIN to use to unlock the smart card + :type provider: `str` + :type pin: `str` or `None` + + :raises: :exc:`ValueError` if the keys are not found + + """ + + resptype, resp = \ + await self._make_request(SSH_AGENTC_REMOVE_SMARTCARD_KEY, + String(provider), String(pin or '')) + + if resptype == SSH_AGENT_SUCCESS: + resp.check_end() + elif resptype == SSH_AGENT_FAILURE: + raise ValueError('Keys not found') + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def remove_all(self) -> None: + """Remove all keys stored in the agent + + :raises: :exc:`ValueError` if the keys can't be removed + + """ + + resptype, resp = \ + await self._make_request(SSH_AGENTC_REMOVE_ALL_IDENTITIES) + + if resptype == SSH_AGENT_SUCCESS: + resp.check_end() + elif resptype == SSH_AGENT_FAILURE: + raise ValueError('Unable to remove all keys') + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def lock(self, passphrase: str) -> None: + """Lock the agent using the specified passphrase + + .. note:: The lock and unlock actions don't appear to be + supported on the Windows 10 OpenSSH agent. + + :param passphrase: + The passphrase required to later unlock the agent + :type passphrase: `str` + + :raises: :exc:`ValueError` if the agent can't be locked + + """ + + resptype, resp = await self._make_request(SSH_AGENTC_LOCK, + String(passphrase)) + + if resptype == SSH_AGENT_SUCCESS: + resp.check_end() + elif resptype == SSH_AGENT_FAILURE: + raise ValueError('Unable to lock SSH agent') + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def unlock(self, passphrase: str) -> None: + """Unlock the agent using the specified passphrase + + .. note:: The lock and unlock actions don't appear to be + supported on the Windows 10 OpenSSH agent. + + :param passphrase: + The passphrase to use to unlock the agent + :type passphrase: `str` + + :raises: :exc:`ValueError` if the agent can't be unlocked + + """ + + resptype, resp = await self._make_request(SSH_AGENTC_UNLOCK, + String(passphrase)) + + if resptype == SSH_AGENT_SUCCESS: + resp.check_end() + elif resptype == SSH_AGENT_FAILURE: + raise ValueError('Unable to unlock SSH agent') + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + async def query_extensions(self) -> Sequence[str]: + """Return a list of extensions supported by the agent + + :returns: A list of strings of supported extension names + + """ + + resptype, resp = await self._make_request(SSH_AGENTC_EXTENSION, + String('query')) + + if resptype == SSH_AGENT_SUCCESS: + result = [] + + while resp: + exttype = resp.get_string() + + try: + exttype_str = exttype.decode('utf-8') + except UnicodeDecodeError: + raise ValueError('Invalid extension type name') from None + + result.append(exttype_str) + + return result + elif resptype == SSH_AGENT_FAILURE: + return [] + else: + raise ValueError('Unknown SSH agent response: %d' % resptype) + + def close(self) -> None: + """Close the SSH agent connection + + This method closes the connection to the ssh-agent. Any + attempts to use this :class:`SSHAgentClient` or the key + pairs it previously returned will result in an error. + + """ + + if self._writer: + self._writer.close() + + async def wait_closed(self) -> None: + """Wait for this agent connection to close + + This method is a coroutine which can be called to block until + the connection to the agent has finished closing. + + """ + + if self._writer: + await maybe_wait_closed(self._writer) + + self._reader = None + self._writer = None + + +class SSHAgentListener: + """Listener used to forward agent connections""" + + def __init__(self, tempdir: 'TemporaryDirectory[str]', path: str, + unix_listener: SSHForwardListener): + self._tempdir = tempdir + self._path = path + self._unix_listener = unix_listener + + def get_path(self) -> str: + """Return the path being listened on""" + + return self._path + + def close(self) -> None: + """Close the agent listener""" + + self._unix_listener.close() + self._tempdir.cleanup() + + +@async_context_manager +async def connect_agent(agent_path: _AgentPath = '') -> 'SSHAgentClient': + """Make a connection to the SSH agent + + This function attempts to connect to an ssh-agent process + listening on a UNIX domain socket at `agent_path`. If not + provided, it will attempt to get the path from the `SSH_AUTH_SOCK` + environment variable. + + If the connection is successful, an :class:`SSHAgentClient` object + is returned that has methods on it you can use to query the + ssh-agent. If no path is specified and the environment variable + is not set or the connection to the agent fails, an error is + raised. + + :param agent_path: (optional) + The path to use to contact the ssh-agent process, or the + :class:`SSHServerConnection` to forward the agent request + over. + :type agent_path: `str` or :class:`SSHServerConnection` + + :returns: An :class:`SSHAgentClient` + + :raises: :exc:`OSError` or :exc:`ChannelOpenError` if the + connection to the agent can't be opened + + """ + + if not agent_path: + agent_path = os.environ.get('SSH_AUTH_SOCK', '') + + agent = SSHAgentClient(agent_path) + await agent.connect() + + return agent diff --git a/venv/lib/python3.12/site-packages/asyncssh/agent_unix.py b/venv/lib/python3.12/site-packages/asyncssh/agent_unix.py new file mode 100644 index 0000000..a095876 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/agent_unix.py @@ -0,0 +1,39 @@ +# Copyright (c) 2016-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH agent support code for UNIX""" + +import asyncio +import errno +from typing import TYPE_CHECKING, Tuple + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .agent import AgentReader, AgentWriter + + +async def open_agent(agent_path: str) -> Tuple['AgentReader', 'AgentWriter']: + """Open a connection to ssh-agent""" + + if not agent_path: + raise OSError(errno.ENOENT, 'Agent not found') + + return await asyncio.open_unix_connection(agent_path) diff --git a/venv/lib/python3.12/site-packages/asyncssh/agent_win32.py b/venv/lib/python3.12/site-packages/asyncssh/agent_win32.py new file mode 100644 index 0000000..350cf30 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/agent_win32.py @@ -0,0 +1,182 @@ +# Copyright (c) 2016-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH agent support code for Windows""" + +# Some of the imports below won't be found when running pylint on UNIX +# pylint: disable=import-error + +import asyncio +import ctypes +import ctypes.wintypes +import errno +from typing import TYPE_CHECKING, Tuple, Union, cast + +from .misc import open_file + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .agent import AgentReader, AgentWriter + + +try: + import mmapfile + import win32api + import win32con + import win32ui + _pywin32_available = True +except ImportError: + _pywin32_available = False + + +_AGENT_COPYDATA_ID = 0x804e50ba +_AGENT_MAX_MSGLEN = 8192 +_AGENT_NAME = 'Pageant' + +_DEFAULT_OPENSSH_PATH = r'\\.\pipe\openssh-ssh-agent' + + +def _find_agent_window() -> 'win32ui.PyCWnd': + """Find and return the Pageant window""" + + if _pywin32_available: + try: + return win32ui.FindWindow(_AGENT_NAME, _AGENT_NAME) + except win32ui.error: + raise OSError(errno.ENOENT, 'Agent not found') from None + else: + raise OSError(errno.ENOENT, 'PyWin32 not installed') from None + + +class _CopyDataStruct(ctypes.Structure): + """Windows COPYDATASTRUCT argument for WM_COPYDATA message""" + + _fields_ = (('dwData', ctypes.wintypes.LPARAM), + ('cbData', ctypes.wintypes.DWORD), + ('lpData', ctypes.c_char_p)) + + +class _PageantTransport: + """Transport to connect to Pageant agent on Windows""" + + def __init__(self) -> None: + self._mapname = '%s%08x' % (_AGENT_NAME, win32api.GetCurrentThreadId()) + + try: + self._mapfile = mmapfile.mmapfile('', self._mapname, + _AGENT_MAX_MSGLEN, 0, 0) + except mmapfile.error as exc: + raise OSError(errno.EIO, str(exc)) from None + + self._cds = _CopyDataStruct(_AGENT_COPYDATA_ID, len(self._mapname) + 1, + self._mapname.encode()) + + self._writing = False + + def write(self, data: bytes) -> None: + """Write request data to Pageant agent""" + + if not self._writing: + self._mapfile.seek(0) + self._writing = True + + try: + self._mapfile.write(data) + except ValueError as exc: + raise OSError(errno.EIO, str(exc)) from None + + async def readexactly(self, n: int) -> bytes: + """Read response data from Pageant agent""" + + if self._writing: + cwnd = _find_agent_window() + + if not cwnd.SendMessage(win32con.WM_COPYDATA, 0, + cast(int, self._cds)): + raise OSError(errno.EIO, 'Unable to send agent request') + + self._writing = False + self._mapfile.seek(0) + + result = self._mapfile.read(n) + + if len(result) != n: + raise asyncio.IncompleteReadError(result, n) + + return result + + def close(self) -> None: + """Close the connection to Pageant""" + + if self._mapfile: + self._mapfile.close() + + async def wait_closed(self) -> None: + """Wait for the transport to close""" + + +class _W10OpenSSHTransport: + """Transport to connect to OpenSSH agent on Windows 10""" + + def __init__(self, agent_path: str): + self._agentfile = open_file(agent_path, 'r+b') + + async def readexactly(self, n: int) -> bytes: + """Read response data from OpenSSH agent""" + + result = self._agentfile.read(n) + + if len(result) != n: + raise asyncio.IncompleteReadError(result, n) + + return result + + def write(self, data: bytes) -> None: + """Write request data to OpenSSH agent""" + + self._agentfile.write(data) + + def close(self) -> None: + """Close the connection to OpenSSH""" + + if self._agentfile: + self._agentfile.close() + + async def wait_closed(self) -> None: + """Wait for the transport to close""" + + +async def open_agent(agent_path: str) -> Tuple['AgentReader', 'AgentWriter']: + """Open a connection to the Pageant or Windows 10 OpenSSH agent""" + + transport: Union[None, _PageantTransport, _W10OpenSSHTransport] = None + + if not agent_path: + try: + _find_agent_window() + transport = _PageantTransport() + except OSError: + agent_path = _DEFAULT_OPENSSH_PATH + + if not transport: + transport = _W10OpenSSHTransport(agent_path) + + return transport, transport diff --git a/venv/lib/python3.12/site-packages/asyncssh/asn1.py b/venv/lib/python3.12/site-packages/asyncssh/asn1.py new file mode 100644 index 0000000..fc7ec1f --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/asn1.py @@ -0,0 +1,786 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Utilities for encoding and decoding ASN.1 DER data + + The der_encode function takes a Python value and encodes it in DER + format, returning a byte string. In addition to supporting standard + Python types, BitString can be used to encode a DER bit string, + ObjectIdentifier can be used to encode OIDs, values can be wrapped + in a TaggedDERObject to set an alternate DER tag on them, and + non-standard types can be encoded by placing them in a RawDERObject. + + The der_decode function takes a byte string in DER format and decodes + it into the corresponding Python values. + +""" + +from typing import Dict, FrozenSet, Sequence, Set, Tuple, Type, TypeVar, Union +from typing import cast + + +_DERClass = Type['DERType'] +_DERClassVar = TypeVar('_DERClassVar', bound='_DERClass') + + +# ASN.1 object classes +UNIVERSAL = 0x00 +APPLICATION = 0x01 +CONTEXT_SPECIFIC = 0x02 +PRIVATE = 0x03 + +# ASN.1 universal object tags +END_OF_CONTENT = 0x00 +BOOLEAN = 0x01 +INTEGER = 0x02 +BIT_STRING = 0x03 +OCTET_STRING = 0x04 +NULL = 0x05 +OBJECT_IDENTIFIER = 0x06 +UTF8_STRING = 0x0c +SEQUENCE = 0x10 +SET = 0x11 +IA5_STRING = 0x16 + +_asn1_class = ('Universal', 'Application', 'Context-specific', 'Private') + +_der_class_by_tag: Dict[int, _DERClass] = {} +_der_class_by_type: Dict[Union[object, _DERClass], _DERClass] = {} + + +def _encode_identifier(asn1_class: int, constructed: bool, tag: int) -> bytes: + """Encode a DER object's identifier""" + + if asn1_class not in (UNIVERSAL, APPLICATION, CONTEXT_SPECIFIC, PRIVATE): + raise ASN1EncodeError('Invalid ASN.1 class') + + flags = (asn1_class << 6) | (0x20 if constructed else 0x00) + + if tag < 0x20: + identifier = [flags | tag] + else: + identifier = [tag & 0x7f] + + while tag >= 0x80: + tag >>= 7 + identifier.append(0x80 | (tag & 0x7f)) + + identifier.append(flags | 0x1f) + + return bytes(identifier[::-1]) + + +class ASN1Error(ValueError): + """ASN.1 coding error""" + + +class ASN1EncodeError(ASN1Error): + """ASN.1 DER encoding error""" + + +class ASN1DecodeError(ASN1Error): + """ASN.1 DER decoding error""" + + +class DERType: + """Parent class for classes which use DERTag decorator""" + + identifier: bytes = b'' + + @staticmethod + def encode(value: object) -> bytes: + """Encode value as a DER byte string""" + + raise NotImplementedError + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> object: + """Decode a DER byte string into an object""" + + raise NotImplementedError + + +class DERTag: + """A decorator used by classes which convert values to/from DER + + Classes which convert Python values to and from DER format + should use the DERTag decorator to indicate what DER tag value + they understand. When DER data is decoded, the tag is looked + up in the list to see which class to call to perform the + decoding. + + Classes which convert existing Python types to and from DER + format can specify the list of types they understand in the + optional "types" argument. Otherwise, conversion is expected + to be to and from the new class being defined. + + """ + + def __init__(self, tag: int, types: Sequence[object] = (), + constructed: bool = False): + self._tag = tag + self._types = types + self._identifier = _encode_identifier(UNIVERSAL, constructed, tag) + + def __call__(self, cls: _DERClassVar) -> _DERClassVar: + cls.identifier = self._identifier + + _der_class_by_tag[self._tag] = cls + + if self._types: + for t in self._types: + _der_class_by_type[t] = cls + else: + _der_class_by_type[cls] = cls + + return cls + + +class RawDERObject: + """A class which can encode a DER object of an arbitrary type + + This object is initialized with an ASN.1 class, tag, and a + byte string representing the already encoded data. Such + objects will never have the constructed flag set, since + that is represented here as a TaggedDERObject. + + """ + + def __init__(self, tag: int, content: bytes, asn1_class: int): + self.asn1_class = asn1_class + self.tag = tag + self.content = content + + def __repr__(self) -> str: + return ('RawDERObject(%s, %s, %r)' % + (_asn1_class[self.asn1_class], self.tag, self.content)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RawDERObject): # pragma: no cover + return NotImplemented + + return (self.asn1_class == other.asn1_class and + self.tag == other.tag and self.content == other.content) + + def __hash__(self) -> int: + return hash((self.asn1_class, self.tag, self.content)) + + def encode_identifier(self) -> bytes: + """Encode the DER identifier for this object as a byte string""" + + return _encode_identifier(self.asn1_class, False, self.tag) + + @staticmethod + def encode(value: object) -> bytes: + """Encode the content for this object as a DER byte string""" + + return cast('RawDERObject', value).content + + +class TaggedDERObject: + """An explicitly tagged DER object + + This object provides a way to wrap an ASN.1 object with an + explicit tag. The value (including the tag representing its + actual type) is then encoded as part of its value. By + default, the ASN.1 class for these objects is CONTEXT_SPECIFIC, + and the DER encoding always marks these values as constructed. + + """ + + def __init__(self, tag: int, value: object, + asn1_class: int = CONTEXT_SPECIFIC): + self.asn1_class = asn1_class + self.tag = tag + self.value = value + + def __repr__(self) -> str: + if self.asn1_class == CONTEXT_SPECIFIC: + return 'TaggedDERObject(%s, %r)' % (self.tag, self.value) + else: + return ('TaggedDERObject(%s, %s, %r)' % + (_asn1_class[self.asn1_class], self.tag, self.value)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TaggedDERObject): # pragma: no cover + return NotImplemented + + return (self.asn1_class == other.asn1_class and + self.tag == other.tag and self.value == other.value) + + def __hash__(self) -> int: + return hash((self.asn1_class, self.tag, self.value)) + + def encode_identifier(self) -> bytes: + """Encode the DER identifier for this object as a byte string""" + + return _encode_identifier(self.asn1_class, True, self.tag) + + @staticmethod + def encode(value: object) -> bytes: + """Encode the content for this object as a DER byte string""" + + return der_encode(cast('TaggedDERObject', value).value) + + +@DERTag(NULL, (type(None),)) +class _Null(DERType): + """A null value""" + + @staticmethod + def encode(value: object) -> bytes: + """Encode a DER null value""" + + # pylint: disable=unused-argument + + return b'' + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> None: + """Decode a DER null value""" + + if constructed: + raise ASN1DecodeError('NULL should not be constructed') + + if content: + raise ASN1DecodeError('NULL should not have associated content') + + return None + + +@DERTag(BOOLEAN, (bool,)) +class _Boolean(DERType): + """A boolean value""" + + @staticmethod + def encode(value: object) -> bytes: + """Encode a DER boolean value""" + + return b'\xff' if value else b'\0' + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> bool: + """Decode a DER boolean value""" + + if constructed: + raise ASN1DecodeError('BOOLEAN should not be constructed') + + if content not in {b'\x00', b'\xff'}: + raise ASN1DecodeError('BOOLEAN content must be 0x00 or 0xff') + + return bool(content[0]) + + +@DERTag(INTEGER, (int,)) +class _Integer(DERType): + """An integer value""" + + @staticmethod + def encode(value: object) -> bytes: + """Encode a DER integer value""" + + i = cast(int, value) + l = i.bit_length() + l = l // 8 + 1 if l % 8 == 0 else (l + 7) // 8 + result = i.to_bytes(l, 'big', signed=True) + return result[1:] if result.startswith(b'\xff\x80') else result + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> int: + """Decode a DER integer value""" + + if constructed: + raise ASN1DecodeError('INTEGER should not be constructed') + + return int.from_bytes(content, 'big', signed=True) + + +@DERTag(OCTET_STRING, (bytes, bytearray)) +class _OctetString(DERType): + """An octet string value""" + + @staticmethod + def encode(value: object) -> bytes: + """Encode a DER octet string""" + + return cast(bytes, value) + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> bytes: + """Decode a DER octet string""" + + if constructed: + raise ASN1DecodeError('OCTET STRING should not be constructed') + + return content + + +@DERTag(UTF8_STRING, (str,)) +class _UTF8String(DERType): + """A UTF-8 string value""" + + @staticmethod + def encode(value: object) -> bytes: + """Encode a DER UTF-8 string""" + + return cast(str, value).encode('utf-8') + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> str: + """Decode a DER UTF-8 string""" + + if constructed: + raise ASN1DecodeError('UTF8 STRING should not be constructed') + + return content.decode('utf-8') + + +@DERTag(SEQUENCE, (list, tuple), constructed=True) +class _Sequence(DERType): + """A sequence of values""" + + @staticmethod + def encode(value: object) -> bytes: + """Encode a sequence of DER values""" + + seq_value = cast(Sequence[object], value) + return b''.join(der_encode(item) for item in seq_value) + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> Sequence[object]: + """Decode a sequence of DER values""" + + if not constructed: + raise ASN1DecodeError('SEQUENCE should always be constructed') + + offset = 0 + length = len(content) + + value = [] + while offset < length: + item, consumed = der_decode_partial(content[offset:]) + value.append(item) + offset += consumed + + return tuple(value) + + +@DERTag(SET, (set, frozenset), constructed=True) +class _Set(DERType): + """A set of DER values""" + + @staticmethod + def encode(value: object) -> bytes: + """Encode a set of DER values""" + + set_value = cast(Union[FrozenSet[object], Set[object]], value) + return b''.join(sorted(der_encode(item) for item in set_value)) + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> FrozenSet[object]: + """Decode a set of DER values""" + + if not constructed: + raise ASN1DecodeError('SET should always be constructed') + + offset = 0 + length = len(content) + + value = set() + while offset < length: + item, consumed = der_decode_partial(content[offset:]) + value.add(item) + offset += consumed + + return frozenset(value) + + +@DERTag(BIT_STRING) +class BitString(DERType): + """A string of bits + + This object can be initialized either with a byte string and an + optional count of the number of least-significant bits in the last + byte which should not be included in the value, or with a string + consisting only of the digits '0' and '1'. + + An optional 'named' flag can also be set, indicating that the + BitString was specified with named bits, indicating that the proper + DER encoding of it should strip any trailing zeroes. + + """ + + def __init__(self, value: object, unused: int = 0, named: bool = False): + if unused < 0 or unused > 7: + raise ASN1EncodeError('Unused bit count must be between 0 and 7') + + if isinstance(value, bytes): + if unused: + if not value: + raise ASN1EncodeError('Can\'t have unused bits with empty ' + 'value') + elif value[-1] & ((1 << unused) - 1): + raise ASN1EncodeError('Unused bits in value should be ' + 'zero') + elif isinstance(value, str): + if unused: + raise ASN1EncodeError('Unused bit count should not be set ' + 'when providing a string') + + used = len(value) % 8 + unused = 8 - used if used else 0 + value += unused * '0' + value = bytes(int(value[i:i+8], 2) + for i in range(0, len(value), 8)) + else: + raise ASN1EncodeError('Unexpected type of bit string value') + + if named: + while value and not value[-1] & (1 << unused): + unused += 1 + if unused == 8: + value = value[:-1] + unused = 0 + + self.value = value + self.unused = unused + + def __str__(self) -> str: + result = ''.join(bin(b)[2:].zfill(8) for b in self.value) + if self.unused: + result = result[:-self.unused] + return result + + def __repr__(self) -> str: + return "BitString('%s')" % self + + def __eq__(self, other: object) -> bool: + if not isinstance(other, BitString): # pragma: no cover + return NotImplemented + + return self.value == other.value and self.unused == other.unused + + def __hash__(self) -> int: + return hash((self.value, self.unused)) + + @staticmethod + def encode(value: object) -> bytes: + """Encode a DER bit string""" + + bitstr_value = cast('BitString', value) + return bytes((bitstr_value.unused,)) + bitstr_value.value + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> 'BitString': + """Decode a DER bit string""" + + if constructed: + raise ASN1DecodeError('BIT STRING should not be constructed') + + if not content or content[0] > 7: + raise ASN1DecodeError('Invalid unused bit count') + + return cls(content[1:], unused=content[0]) + + +@DERTag(IA5_STRING) +class IA5String(DERType): + """An ASCII string value""" + + def __init__(self, value: Union[bytes, bytearray]): + self.value = value + + def __str__(self) -> str: + return '%s' % self.value.decode('ascii') + + def __repr__(self) -> str: + return 'IA5String(%r)' % self.value + + def __eq__(self, other: object) -> bool: # pragma: no cover + if not isinstance(other, IA5String): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + @staticmethod + def encode(value: object) -> bytes: + """Encode a DER IA5 string""" + + # ASN.1 defines this type as only containing ASCII characters, but + # some tools expecting ASN.1 allow IA5Strings to contain other + # characters, so we leave it up to the caller to pass in a byte + # string which has already done the appropriate encoding of any + # non-ASCII characters. + + return cast('IA5String', value).value + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> 'IA5String': + """Decode a DER IA5 string""" + + if constructed: + raise ASN1DecodeError('IA5 STRING should not be constructed') + + # As noted in the encode method above, the decoded value for this + # type is a byte string, leaving the decoding of any non-ASCII + # characters up to the caller. + + return cls(content) + + +@DERTag(OBJECT_IDENTIFIER) +class ObjectIdentifier(DERType): + """An object identifier (OID) value + + This object can be initialized from a string of dot-separated + integer values, representing a hierarchical namespace. All OIDs + show have at least two components, with the first being between + 0 and 2 (indicating ITU-T, ISO, or joint assignment). In cases + where the first component is 0 or 1, the second component must + be in the range 0 to 39 due to the way these first two components + are encoded. + + """ + + def __init__(self, value: str): + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return "ObjectIdentifier('%s')" % self.value + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ObjectIdentifier): # pragma: no cover + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + @staticmethod + def encode(value: object) -> bytes: + """Encode a DER object identifier""" + + def _bytes(component: int) -> bytes: + """Convert a single element of an OID to a DER byte string""" + + if component < 0: + raise ASN1EncodeError('Components of object identifier must ' + 'be greater than or equal to 0') + + result = [component & 0x7f] + while component >= 0x80: + component >>= 7 + result.append(0x80 | (component & 0x7f)) + + return bytes(result[::-1]) + + oid_value = cast('ObjectIdentifier', value) + + try: + components = [int(c) for c in oid_value.value.split('.')] + except ValueError: + raise ASN1EncodeError('Component values must be ' + 'integers') from None + + if len(components) < 2: + raise ASN1EncodeError('Object identifiers must have at least two ' + 'components') + elif components[0] < 0 or components[0] > 2: + raise ASN1EncodeError('First component of object identifier must ' + 'be between 0 and 2') + elif components[0] < 2 and (components[1] < 0 or components[1] > 39): + raise ASN1EncodeError('Second component of object identifier must ' + 'be between 0 and 39') + + components[0:2] = [components[0]*40 + components[1]] + return b''.join(_bytes(c) for c in components) + + @classmethod + def decode(cls, constructed: bool, content: bytes) -> 'ObjectIdentifier': + """Decode a DER object identifier""" + + if constructed: + raise ASN1DecodeError('OBJECT IDENTIFIER should not be ' + 'constructed') + + if not content: + raise ASN1DecodeError('Empty object identifier') + + b = content[0] + components = list(divmod(b, 40)) if b < 80 else [2, b-80] + + component = 0 + for b in content[1:]: + if b == 0x80 and component == 0: + raise ASN1DecodeError('Invalid component') + elif b < 0x80: + components.append(component | b) + component = 0 + else: + component |= b & 0x7f + component <<= 7 + + if component: + raise ASN1DecodeError('Incomplete component') + + return cls('.'.join(str(c) for c in components)) + + +def der_encode(value: object) -> bytes: + """Encode a value in DER format + + This function takes a Python value and encodes it in DER format. + The following mapping of types is used: + + NoneType -> NULL + bool -> BOOLEAN + int -> INTEGER + bytes, bytearray -> OCTET STRING + str -> UTF8 STRING + list, tuple -> SEQUENCE + set, frozenset -> SET + BitString -> BIT STRING + ObjectIdentifier -> OBJECT IDENTIFIER + + An explicitly tagged DER object can be encoded by passing in a + TaggedDERObject which specifies the ASN.1 class, tag, and value + to encode. + + Other types can be encoded by passing in a RawDERObject which + specifies the ASN.1 class, tag, and raw content octets to encode. + + """ + + t = type(value) + if t in (RawDERObject, TaggedDERObject): + value = cast(Union[RawDERObject, TaggedDERObject], value) + identifier = value.encode_identifier() + content = value.encode(value) + elif t in _der_class_by_type: + cls = _der_class_by_type[t] + identifier = cls.identifier + content = cls.encode(value) + else: + raise ASN1EncodeError('Cannot DER encode type %s' % t.__name__) + + length = len(content) + if length < 0x80: + len_bytes = bytes((length,)) + else: + len_bytes = length.to_bytes((length.bit_length() + 7) // 8, 'big') + len_bytes = bytes((0x80 | len(len_bytes),)) + len_bytes + + return identifier + len_bytes + content + + +def der_decode_partial(data: bytes) -> Tuple[object, int]: + """Decode a value in DER format and return the number of bytes consumed""" + + if len(data) < 2: + raise ASN1DecodeError('Incomplete data') + + tag = data[0] + asn1_class, constructed, tag = tag >> 6, bool(tag & 0x20), tag & 0x1f + offset = 1 + if tag == 0x1f: + tag = 0 + for b in data[offset:]: + offset += 1 + + if b < 0x80: + tag |= b + break + else: + tag |= b & 0x7f + tag <<= 7 + else: + raise ASN1DecodeError('Incomplete tag') + + if offset >= len(data): + raise ASN1DecodeError('Incomplete data') + + length = data[offset] + offset += 1 + if length > 0x80: + len_size = length & 0x7f + length = int.from_bytes(data[offset:offset+len_size], 'big') + offset += len_size + elif length == 0x80: + raise ASN1DecodeError('Indefinite length not allowed') + + end = offset + length + content = data[offset:end] + + if end > len(data): + raise ASN1DecodeError('Incomplete data') + + if asn1_class == UNIVERSAL and tag in _der_class_by_tag: + cls = _der_class_by_tag[tag] + value = cls.decode(constructed, content) + elif constructed: + value = TaggedDERObject(tag, der_decode(content), asn1_class) + else: + value = RawDERObject(tag, content, asn1_class) + + return value, end + + +def der_decode(data: bytes) -> object: + """Decode a value in DER format + + This function takes a byte string in DER format and converts it + to a corresponding set of Python objects. The following mapping + of ASN.1 tags to Python types is used: + + NULL -> NoneType + BOOLEAN -> bool + INTEGER -> int + OCTET STRING -> bytes + UTF8 STRING -> str + SEQUENCE -> tuple + SET -> frozenset + BIT_STRING -> BitString + OBJECT IDENTIFIER -> ObjectIdentifier + + Explicitly tagged objects are returned as type TaggedDERObject, + with fields holding the object class, tag, and tagged value. + + Other object tags are returned as type RawDERObject, with fields + holding the object class, tag, and raw content octets. + + If partial_ok is True, this function returns a tuple of the decoded + value and number of bytes consumed. Otherwise, all data bytes must + be consumed and only the decoded value is returned. + + """ + + value, end = der_decode_partial(data) + + if end < len(data): + raise ASN1DecodeError('Data contains unexpected bytes at end') + + return value diff --git a/venv/lib/python3.12/site-packages/asyncssh/auth.py b/venv/lib/python3.12/site-packages/asyncssh/auth.py new file mode 100644 index 0000000..7437b0a --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/auth.py @@ -0,0 +1,992 @@ +# Copyright (c) 2013-2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH authentication handlers""" + +from typing import TYPE_CHECKING, Awaitable, Dict, List, Optional +from typing import Sequence, Tuple, Type, Union, cast + +from .constants import DEFAULT_LANG +from .gss import GSSBase, GSSError +from .logging import SSHLogger +from .misc import ProtocolError, PasswordChangeRequired, get_symbol_names +from .packet import Boolean, String, UInt32, SSHPacket, SSHPacketHandler +from .public_key import SigningKey +from .saslprep import saslprep, SASLPrepError + + +if TYPE_CHECKING: + import asyncio + + # pylint: disable=cyclic-import + from .connection import SSHConnection, SSHClientConnection + from .connection import SSHServerConnection + + +KbdIntPrompts = Sequence[Tuple[str, bool]] +KbdIntNewChallenge = Tuple[str, str, str, KbdIntPrompts] +KbdIntChallenge = Union[bool, KbdIntNewChallenge] +KbdIntResponse = Sequence[str] + +PasswordChangeResponse = Tuple[str, str] + + +# SSH message values for GSS auth +MSG_USERAUTH_GSSAPI_RESPONSE = 60 +MSG_USERAUTH_GSSAPI_TOKEN = 61 +MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = 63 +MSG_USERAUTH_GSSAPI_ERROR = 64 +MSG_USERAUTH_GSSAPI_ERRTOK = 65 +MSG_USERAUTH_GSSAPI_MIC = 66 + +# SSH message values for public key auth +MSG_USERAUTH_PK_OK = 60 + +# SSH message values for keyboard-interactive auth +MSG_USERAUTH_INFO_REQUEST = 60 +MSG_USERAUTH_INFO_RESPONSE = 61 + +# SSH message values for password auth +MSG_USERAUTH_PASSWD_CHANGEREQ = 60 + +_auth_methods: List[bytes] = [] +_client_auth_handlers: Dict[bytes, Type['ClientAuth']] = {} +_server_auth_handlers: Dict[bytes, Type['ServerAuth']] = {} + + +class Auth(SSHPacketHandler): + """Parent class for authentication""" + + def __init__(self, conn: 'SSHConnection', coro: Awaitable[None]): + self._conn = conn + self._logger = conn.logger + self._coro: Optional['asyncio.Task[None]'] = conn.create_task(coro) + + def send_packet(self, pkttype: int, *args: bytes, + trivial: bool = True) -> None: + """Send an auth packet""" + + self._conn.send_userauth_packet(pkttype, *args, handler=self, + trivial=trivial) + + @property + def logger(self) -> SSHLogger: + """A logger associated with this authentication handler""" + + return self._logger + + def create_task(self, coro: Awaitable[None]) -> None: + """Create an asynchronous auth task""" + + self.cancel() + self._coro = self._conn.create_task(coro) + + def cancel(self) -> None: + """Cancel any authentication in progress""" + + if self._coro: # pragma: no branch + self._coro.cancel() + self._coro = None + + +class ClientAuth(Auth): + """Parent class for client authentication""" + + _conn: 'SSHClientConnection' + + def __init__(self, conn: 'SSHClientConnection', method: bytes): + self._method = method + + super().__init__(conn, self._start()) + + async def _start(self) -> None: + """Abstract method for starting client authentication""" + + # Provided by subclass + raise NotImplementedError + + def auth_succeeded(self) -> None: + """Callback when auth succeeds""" + + def auth_failed(self) -> None: + """Callback when auth fails""" + + async def send_request(self, *args: bytes, + key: Optional[SigningKey] = None, + trivial: bool = True) -> None: + """Send a user authentication request""" + + await self._conn.send_userauth_request(self._method, *args, key=key, + trivial=trivial) + + +class _ClientNullAuth(ClientAuth): + """Client side implementation of null auth""" + + async def _start(self) -> None: + """Start client null authentication""" + + await self.send_request() + + +class _ClientGSSKexAuth(ClientAuth): + """Client side implementation of GSS key exchange auth""" + + async def _start(self) -> None: + """Start client GSS key exchange authentication""" + + if self._conn.gss_kex_auth_requested(): + self.logger.debug1('Trying GSS key exchange auth') + + await self.send_request(key=self._conn.get_gss_context(), + trivial=False) + else: + self._conn.try_next_auth() + + +class _ClientGSSMICAuth(ClientAuth): + """Client side implementation of GSS MIC auth""" + + _handler_names = get_symbol_names(globals(), 'MSG_USERAUTH_GSSAPI_') + + def __init__(self, conn: 'SSHClientConnection', method: bytes): + super().__init__(conn, method) + + self._gss: Optional[GSSBase] = None + self._got_error = False + + async def _start(self) -> None: + """Start client GSS MIC authentication""" + + if self._conn.gss_mic_auth_requested(): + self.logger.debug1('Trying GSS MIC auth') + + self._gss = self._conn.get_gss_context() + self._gss.reset() + mechs = b''.join((String(mech) for mech in self._gss.mechs)) + await self.send_request(UInt32(len(self._gss.mechs)), mechs) + else: + self._conn.try_next_auth() + + def _finish(self) -> None: + """Finish client GSS MIC authentication""" + + assert self._gss is not None + + if self._gss.provides_integrity: + data = self._conn.get_userauth_request_data(self._method) + + self.send_packet(MSG_USERAUTH_GSSAPI_MIC, + String(self._gss.sign(data)), + trivial=False) + else: + self.send_packet(MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE) + + def _process_response(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS response from the server""" + + mech = packet.get_string() + packet.check_end() + + assert self._gss is not None + + if mech not in self._gss.mechs: + raise ProtocolError('Mechanism mismatch') + + try: + token = self._gss.step() + assert token is not None + + self.send_packet(MSG_USERAUTH_GSSAPI_TOKEN, String(token)) + + if self._gss.complete: + self._finish() + except GSSError as exc: + if exc.token: + self.send_packet(MSG_USERAUTH_GSSAPI_ERRTOK, String(exc.token)) + + self._conn.try_next_auth() + + def _process_token(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS token from the server""" + + token: Optional[bytes] = packet.get_string() + packet.check_end() + + assert self._gss is not None + + try: + token = self._gss.step(token) + + if token: + self.send_packet(MSG_USERAUTH_GSSAPI_TOKEN, String(token)) + + if self._gss.complete: + self._finish() + except GSSError as exc: + if exc.token: + self.send_packet(MSG_USERAUTH_GSSAPI_ERRTOK, String(exc.token)) + + self._conn.try_next_auth() + + def _process_error(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS error from the server""" + + _ = packet.get_uint32() # major_status + _ = packet.get_uint32() # minor_status + msg = packet.get_string() + _ = packet.get_string() # lang + packet.check_end() + + self.logger.debug1('GSS error from server: %s', msg) + self._got_error = True + + def _process_error_token(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS error token from the server""" + + token = packet.get_string() + packet.check_end() + + assert self._gss is not None + + try: + self._gss.step(token) + except GSSError as exc: + if not self._got_error: # pragma: no cover + self.logger.debug1('GSS error from server: %s', str(exc)) + + _packet_handlers = { + MSG_USERAUTH_GSSAPI_RESPONSE: _process_response, + MSG_USERAUTH_GSSAPI_TOKEN: _process_token, + MSG_USERAUTH_GSSAPI_ERROR: _process_error, + MSG_USERAUTH_GSSAPI_ERRTOK: _process_error_token + } + + +class _ClientHostBasedAuth(ClientAuth): + """Client side implementation of host based auth""" + + async def _start(self) -> None: + """Start client host based authentication""" + + keypair, client_host, client_username = \ + await self._conn.host_based_auth_requested() + + if keypair is None: + self._conn.try_next_auth() + return + + self.logger.debug1('Trying host based auth of user %s on host %s ' + 'with %s host key', client_username, client_host, + keypair.algorithm) + + try: + await self.send_request(String(keypair.algorithm), + String(keypair.public_data), + String(client_host), + String(client_username), key=keypair) + except ValueError as exc: + self.logger.debug1('Host based auth failed: %s', str(exc)) + self._conn.try_next_auth() + + +class _ClientPublicKeyAuth(ClientAuth): + """Client side implementation of public key auth""" + + _handler_names = get_symbol_names(globals(), 'MSG_USERAUTH_PK_') + + async def _start(self) -> None: + """Start client public key authentication""" + + self._keypair = await self._conn.public_key_auth_requested() + + if self._keypair is None: + self._conn.try_next_auth() + return + + self.logger.debug1('Trying public key auth with %s key', + self._keypair.algorithm) + + await self.send_request(Boolean(False), + String(self._keypair.algorithm), + String(self._keypair.public_data)) + + async def _send_signed_request(self) -> None: + """Send signed public key request""" + + assert self._keypair is not None + + self.logger.debug1('Signing request with %s key', + self._keypair.algorithm) + + await self.send_request(Boolean(True), + String(self._keypair.algorithm), + String(self._keypair.public_data), + key=self._keypair, trivial=False) + + def _process_public_key_ok(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a public key ok response""" + + algorithm = packet.get_string() + key_data = packet.get_string() + packet.check_end() + + assert self._keypair is not None + + if (algorithm != self._keypair.algorithm or + key_data != self._keypair.public_data): + raise ProtocolError('Key mismatch') + + self.create_task(self._send_signed_request()) + + _packet_handlers = { + MSG_USERAUTH_PK_OK: _process_public_key_ok + } + + +class _ClientKbdIntAuth(ClientAuth): + """Client side implementation of keyboard-interactive auth""" + + _handler_names = get_symbol_names(globals(), 'MSG_USERAUTH_INFO_') + + async def _start(self) -> None: + """Start client keyboard interactive authentication""" + + submethods = await self._conn.kbdint_auth_requested() + + if submethods is None: + self._conn.try_next_auth() + return + + self.logger.debug1('Trying keyboard-interactive auth') + + await self.send_request(String(''), String(submethods)) + + async def _receive_challenge(self, name: str, instruction: str, lang: str, + prompts: KbdIntPrompts) -> None: + """Receive and respond to a keyboard interactive challenge""" + + responses = \ + await self._conn.kbdint_challenge_received(name, instruction, + lang, prompts) + + if responses is None: + self._conn.try_next_auth() + return + + self.send_packet(MSG_USERAUTH_INFO_RESPONSE, UInt32(len(responses)), + b''.join(String(r) for r in responses), + trivial=not responses) + + def _process_info_request(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a keyboard interactive authentication request""" + + name_bytes = packet.get_string() + instruction_bytes = packet.get_string() + lang_bytes = packet.get_string() + + try: + name = name_bytes.decode('utf-8') + instruction = instruction_bytes.decode('utf-8') + lang = lang_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid keyboard interactive ' + 'info request') from None + + num_prompts = packet.get_uint32() + prompts = [] + for _ in range(num_prompts): + prompt_bytes = packet.get_string() + echo = packet.get_boolean() + + try: + prompt = prompt_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid keyboard interactive ' + 'info request') from None + + prompts.append((prompt, echo)) + + self.create_task(self._receive_challenge(name, instruction, + lang, prompts)) + + _packet_handlers = { + MSG_USERAUTH_INFO_REQUEST: _process_info_request + } + + +class _ClientPasswordAuth(ClientAuth): + """Client side implementation of password auth""" + + _handler_names = get_symbol_names(globals(), 'MSG_USERAUTH_PASSWD_') + + def __init__(self, conn: 'SSHClientConnection', method: bytes): + super().__init__(conn, method) + + self._password_change = False + + async def _start(self) -> None: + """Start client password authentication""" + + password = await self._conn.password_auth_requested() + + if password is None: + self._conn.try_next_auth() + return + + self.logger.debug1('Trying password auth') + + await self.send_request(Boolean(False), String(password), + trivial=False) + + async def _change_password(self, prompt: str, lang: str) -> None: + """Start password change""" + + result = await self._conn.password_change_requested(prompt, lang) + + if result == NotImplemented: + # Password change not supported - move on to the next auth method + self._conn.try_next_auth() + return + + self.logger.debug1('Trying to chsnge password') + + old_password, new_password = cast(PasswordChangeResponse, result) + + self._password_change = True + + await self.send_request(Boolean(True), + String(old_password.encode('utf-8')), + String(new_password.encode('utf-8')), + trivial=False) + + def auth_succeeded(self) -> None: + if self._password_change: + self._password_change = False + self._conn.password_changed() + + def auth_failed(self) -> None: + if self._password_change: + self._password_change = False + self._conn.password_change_failed() + + def _process_password_change(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a password change request""" + + prompt_bytes = packet.get_string() + lang_bytes = packet.get_string() + + try: + prompt = prompt_bytes.decode('utf-8') + lang = lang_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid password change request') from None + + self.auth_failed() + self.create_task(self._change_password(prompt, lang)) + + _packet_handlers = { + MSG_USERAUTH_PASSWD_CHANGEREQ: _process_password_change + } + + +class ServerAuth(Auth): + """Parent class for server authentication""" + + _conn: 'SSHServerConnection' + + def __init__(self, conn: 'SSHServerConnection', username: str, + method: bytes, packet: SSHPacket): + self._username = username + self._method = method + + super().__init__(conn, self._start(packet)) + + @classmethod + def supported(cls, conn: 'SSHServerConnection') -> bool: + """Return whether this authentication method is supported""" + + raise NotImplementedError + + async def _start(self, packet: SSHPacket) -> None: + """Abstract method for starting server authentication""" + + # Provided by subclass + raise NotImplementedError + + def send_failure(self, partial_success: bool = False) -> None: + """Send a user authentication failure response""" + + self._conn.send_userauth_failure(partial_success) + + def send_success(self) -> None: + """Send a user authentication success response""" + + self._conn.send_userauth_success() + + +class _ServerNullAuth(ServerAuth): + """Server side implementation of null auth""" + + @classmethod + def supported(cls, conn: 'SSHServerConnection') -> bool: + """Return that null authentication is never a supported auth mode""" + + return False + + async def _start(self, packet: SSHPacket) -> None: + """Supported always returns false, so we never get here""" + + +class _ServerGSSKexAuth(ServerAuth): + """Server side implementation of GSS key exchange auth""" + + def __init__(self, conn: 'SSHServerConnection', username: str, + method: bytes, packet: SSHPacket): + super().__init__(conn, username, method, packet) + + self._gss = conn.get_gss_context() + + @classmethod + def supported(cls, conn: 'SSHServerConnection') -> bool: + """Return whether GSS key exchange authentication is supported""" + + return conn.gss_kex_auth_supported() + + async def _start(self, packet: SSHPacket) -> None: + """Start server GSS key exchange authentication""" + + mic = packet.get_string() + packet.check_end() + + self.logger.debug1('Trying GSS key exchange auth') + + data = self._conn.get_userauth_request_data(self._method) + + if (self._gss.complete and self._gss.verify(data, mic) and + (await self._conn.validate_gss_principal(self._username, + self._gss.user, + self._gss.host))): + self.send_success() + else: + self.send_failure() + + +class _ServerGSSMICAuth(ServerAuth): + """Server side implementation of GSS MIC auth""" + + _handler_names = get_symbol_names(globals(), 'MSG_USERAUTH_GSSAPI_') + + def __init__(self, conn: 'SSHServerConnection', username: str, + method: bytes, packet: SSHPacket) -> None: + super().__init__(conn, username, method, packet) + + self._gss = conn.get_gss_context() + + @classmethod + def supported(cls, conn: 'SSHServerConnection') -> bool: + """Return whether GSS MIC authentication is supported""" + + return conn.gss_mic_auth_supported() + + async def _start(self, packet: SSHPacket) -> None: + """Start server GSS MIC authentication""" + + mechs = set() + + n = packet.get_uint32() + for _ in range(n): + mechs.add(packet.get_string()) + packet.check_end() + + match = None + + for mech in self._gss.mechs: + if mech in mechs: + match = mech + break + + if not match: + self.send_failure() + return + + self.logger.debug1('Trying GSS MIC auth') + self._gss.reset() + + self.send_packet(MSG_USERAUTH_GSSAPI_RESPONSE, String(match)) + + async def _finish(self) -> None: + """Finish server GSS MIC authentication""" + + if (await self._conn.validate_gss_principal(self._username, + self._gss.user, + self._gss.host)): + self.send_success() + else: + self.send_failure() + + def _process_token(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS token from the client""" + + token: Optional[bytes] = packet.get_string() + packet.check_end() + + try: + token = self._gss.step(token) + + if token: + self.send_packet(MSG_USERAUTH_GSSAPI_TOKEN, String(token)) + except GSSError as exc: + self.send_packet(MSG_USERAUTH_GSSAPI_ERROR, UInt32(exc.maj_code), + UInt32(exc.min_code), String(str(exc)), + String(DEFAULT_LANG)) + + if exc.token: + self.send_packet(MSG_USERAUTH_GSSAPI_ERRTOK, String(exc.token)) + + self.send_failure() + + def _process_exchange_complete(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS exchange complete message from the client""" + + packet.check_end() + + if self._gss.complete and not self._gss.provides_integrity: + self.create_task(self._finish()) + else: + self.send_failure() + + def _process_error_token(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS error token from the client""" + + token = packet.get_string() + packet.check_end() + + try: + self._gss.step(token) + except GSSError as exc: + self.logger.debug1('GSS error from client: %s', str(exc)) + + def _process_mic(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS MIC from the client""" + + mic = packet.get_string() + packet.check_end() + + data = self._conn.get_userauth_request_data(self._method) + + if (self._gss.complete and self._gss.provides_integrity and + self._gss.verify(data, mic)): + self.create_task(self._finish()) + else: + self.send_failure() + + _packet_handlers = { + MSG_USERAUTH_GSSAPI_TOKEN: _process_token, + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: _process_exchange_complete, + MSG_USERAUTH_GSSAPI_ERRTOK: _process_error_token, + MSG_USERAUTH_GSSAPI_MIC: _process_mic + } + + +class _ServerHostBasedAuth(ServerAuth): + """Server side implementation of host based auth""" + + @classmethod + def supported(cls, conn: 'SSHServerConnection') -> bool: + """Return whether host based authentication is supported""" + + return conn.host_based_auth_supported() + + async def _start(self, packet: SSHPacket) -> None: + """Start server host based authentication""" + + algorithm = packet.get_string() + key_data = packet.get_string() + client_host_bytes = packet.get_string() + client_username_bytes = packet.get_string() + msg = packet.get_consumed_payload() + signature = packet.get_string() + + packet.check_end() + + try: + client_host = client_host_bytes.decode('utf-8') + client_username = saslprep(client_username_bytes.decode('utf-8')) + except (UnicodeDecodeError, SASLPrepError): + raise ProtocolError('Invalid host-based auth request') from None + + self.logger.debug1('Verifying host based auth of user %s ' + 'on host %s with %s host key', client_username, + client_host, algorithm) + + if (await self._conn.validate_host_based_auth(self._username, + key_data, client_host, + client_username, + msg, signature)): + self.send_success() + else: + self.send_failure() + + +class _ServerPublicKeyAuth(ServerAuth): + """Server side implementation of public key auth""" + + @classmethod + def supported(cls, conn: 'SSHServerConnection') -> bool: + """Return whether public key authentication is supported""" + + return conn.public_key_auth_supported() + + async def _start(self, packet: SSHPacket) -> None: + """Start server public key authentication""" + + sig_present = packet.get_boolean() + algorithm = packet.get_string() + key_data = packet.get_string() + + if sig_present: + msg = packet.get_consumed_payload() + signature = packet.get_string() + else: + msg = b'' + signature = b'' + + packet.check_end() + + if sig_present: + self.logger.debug1('Verifying request with %s key', algorithm) + else: + self.logger.debug1('Trying public key auth with %s key', algorithm) + + if (await self._conn.validate_public_key(self._username, key_data, + msg, signature)): + if sig_present: + self.send_success() + else: + self.send_packet(MSG_USERAUTH_PK_OK, String(algorithm), + String(key_data)) + else: + self.send_failure() + + +class _ServerKbdIntAuth(ServerAuth): + """Server side implementation of keyboard-interactive auth""" + + _handler_names = get_symbol_names(globals(), 'MSG_USERAUTH_INFO_') + + @classmethod + def supported(cls, conn: 'SSHServerConnection') -> bool: + """Return whether keyboard interactive authentication is supported""" + + return conn.kbdint_auth_supported() + + async def _start(self, packet: SSHPacket) -> None: + """Start server keyboard interactive authentication""" + + lang_bytes = packet.get_string() + submethods_bytes = packet.get_string() + packet.check_end() + + try: + lang = lang_bytes.decode('ascii') + submethods = submethods_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid keyboard interactive ' + 'auth request') from None + + self.logger.debug1('Trying keyboard-interactive auth') + + challenge = await self._conn.get_kbdint_challenge(self._username, + lang, submethods) + self._send_challenge(challenge) + + def _send_challenge(self, challenge: KbdIntChallenge) -> None: + """Send a keyboard interactive authentication request""" + + if isinstance(challenge, (tuple, list)): + name, instruction, lang, prompts = challenge + + num_prompts = len(prompts) + prompts_bytes = (String(prompt) + Boolean(echo) + for prompt, echo in prompts) + + self.send_packet(MSG_USERAUTH_INFO_REQUEST, String(name), + String(instruction), String(lang), + UInt32(num_prompts), *prompts_bytes) + elif challenge: + self.send_success() + else: + self.send_failure() + + async def _validate_response(self, responses: KbdIntResponse) -> None: + """Validate a keyboard interactive authentication response""" + + next_challenge = \ + await self._conn.validate_kbdint_response(self._username, responses) + self._send_challenge(next_challenge) + + def _process_info_response(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a keyboard interactive authentication response""" + + num_responses = packet.get_uint32() + responses = [] + for _ in range(num_responses): + response_bytes = packet.get_string() + + try: + response = response_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid keyboard interactive ' + 'info response') from None + + responses.append(response) + + packet.check_end() + + self.create_task(self._validate_response(responses)) + + _packet_handlers = { + MSG_USERAUTH_INFO_RESPONSE: _process_info_response + } + + +class _ServerPasswordAuth(ServerAuth): + """Server side implementation of password auth""" + + @classmethod + def supported(cls, conn: 'SSHServerConnection') -> bool: + """Return whether password authentication is supported""" + + return conn.password_auth_supported() + + async def _start(self, packet: SSHPacket) -> None: + """Start server password authentication""" + + password_change = packet.get_boolean() + password_bytes = packet.get_string() + new_password_bytes = packet.get_string() if password_change else b'' + packet.check_end() + + try: + password = saslprep(password_bytes.decode('utf-8')) + new_password = saslprep(new_password_bytes.decode('utf-8')) + except (UnicodeDecodeError, SASLPrepError): + raise ProtocolError('Invalid password auth request') from None + + try: + if password_change: + self.logger.debug1('Trying to chsnge password') + + result = await self._conn.change_password(self._username, + password, + new_password) + else: + self.logger.debug1('Trying password auth') + + result = \ + await self._conn.validate_password(self._username, password) + + if result: + self.send_success() + else: + self.send_failure() + except PasswordChangeRequired as exc: + self.send_packet(MSG_USERAUTH_PASSWD_CHANGEREQ, + String(exc.prompt), String(exc.lang)) + + +def register_auth_method(alg: bytes, client_handler: Type[ClientAuth], + server_handler: Type[ServerAuth]) -> None: + """Register an authentication method""" + + _auth_methods.append(alg) + _client_auth_handlers[alg] = client_handler + _server_auth_handlers[alg] = server_handler + + +def get_supported_client_auth_methods() -> Sequence[bytes]: + """Return a list of supported client auth methods""" + + return [method for method in _client_auth_handlers + if method != b'none'] + + +def lookup_client_auth(conn: 'SSHClientConnection', + method: bytes) -> Optional[ClientAuth]: + """Look up the client authentication method to use""" + + if method in _auth_methods: + return _client_auth_handlers[method](conn, method) + else: + return None + + +def get_supported_server_auth_methods(conn: 'SSHServerConnection') -> \ + Sequence[bytes]: + """Return a list of supported server auth methods""" + + auth_methods = [] + + for method in _auth_methods: + if _server_auth_handlers[method].supported(conn): + auth_methods.append(method) + + return auth_methods + + +def lookup_server_auth(conn: 'SSHServerConnection', username: str, + method: bytes, packet: SSHPacket) -> \ + Optional[ServerAuth]: + """Look up the server authentication method to use""" + + handler = _server_auth_handlers.get(method) + + if handler and handler.supported(conn): + return handler(conn, username, method, packet) + else: + conn.send_userauth_failure(False) + return None + + +_auth_method_list = ( + (b'none', _ClientNullAuth, _ServerNullAuth), + (b'gssapi-keyex', _ClientGSSKexAuth, _ServerGSSKexAuth), + (b'gssapi-with-mic', _ClientGSSMICAuth, _ServerGSSMICAuth), + (b'hostbased', _ClientHostBasedAuth, _ServerHostBasedAuth), + (b'publickey', _ClientPublicKeyAuth, _ServerPublicKeyAuth), + (b'keyboard-interactive', _ClientKbdIntAuth, _ServerKbdIntAuth), + (b'password', _ClientPasswordAuth, _ServerPasswordAuth) +) + +for _args in _auth_method_list: + register_auth_method(*_args) diff --git a/venv/lib/python3.12/site-packages/asyncssh/auth_keys.py b/venv/lib/python3.12/site-packages/asyncssh/auth_keys.py new file mode 100644 index 0000000..4a1c517 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/auth_keys.py @@ -0,0 +1,350 @@ +# Copyright (c) 2015-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Parser for SSH authorized_keys files""" + +from typing import Dict, List, Mapping, Optional, Sequence +from typing import Set, Tuple, Union, cast + +try: + # pylint: disable=unused-import + from .crypto import X509Name, X509NamePattern + _x509_available = True +except ImportError: # pragma: no cover + _x509_available = False + +from .misc import ip_address, read_file +from .pattern import HostPatternList, WildcardPatternList +from .public_key import KeyImportError, SSHKey +from .public_key import SSHX509Certificate, SSHX509CertificateChain +from .public_key import import_public_key, import_certificate +from .public_key import import_certificate_subject + + +_EntryOptions = Mapping[str, object] + +class _SSHAuthorizedKeyEntry: + """An entry in an SSH authorized_keys list""" + + def __init__(self, line: str): + self.key: Optional[SSHKey] = None + self.cert: Optional[SSHX509Certificate] = None + self.options: Dict[str, object] = {} + + try: + self._import_key_or_cert(line) + return + except KeyImportError: + pass + + line = self._parse_options(line) + self._import_key_or_cert(line) + + def _import_key_or_cert(self, line: str) -> None: + """Import key or certificate in this entry""" + + try: + self.key = import_public_key(line) + return + except KeyImportError: + pass + + try: + self.cert = cast(SSHX509Certificate, import_certificate(line)) + + if ('cert-authority' in self.options and + self.cert.subject != self.cert.issuer): + raise ValueError('X.509 cert-authority entries must ' + 'contain a root CA certificate') + + return + except KeyImportError: + pass + + if 'cert-authority' not in self.options: + try: + self.key = None + self.cert = None + self._add_subject('subject', import_certificate_subject(line)) + return + except KeyImportError: + pass + + raise KeyImportError('Unrecognized key, certificate, or subject') + + def _set_string(self, option: str, value: str) -> None: + """Set an option with a string value""" + + self.options[option] = value + + def _add_environment(self, option: str, value: str) -> None: + """Add an environment key/value pair""" + + if value.startswith('=') or '=' not in value: + raise ValueError('Invalid environment entry in authorized_keys') + + name, value = value.split('=', 1) + cast(Dict[str, str], self.options.setdefault(option, {}))[name] = value + + def _add_from(self, option: str, value: str) -> None: + """Add a from host pattern""" + + from_patterns = cast(List[HostPatternList], + self.options.setdefault(option, [])) + from_patterns.append(HostPatternList(value)) + + def _add_permitopen(self, option: str, value: str) -> None: + """Add a permitopen host/port pair""" + + try: + host, port_str = value.rsplit(':', 1) + + if host.startswith('[') and host.endswith(']'): + host = host[1:-1] + + port = None if port_str == '*' else int(port_str) + except: + raise ValueError('Illegal permitopen value: %s' % value) from None + + permitted_opens = cast(Set[Tuple[str, Optional[int]]], + self.options.setdefault(option, set())) + permitted_opens.add((host, port)) + + def _add_principals(self, option: str, value: str) -> None: + """Add a principals wildcard pattern list""" + + principal_patterns = cast(List[WildcardPatternList], + self.options.setdefault(option, [])) + principal_patterns.append(WildcardPatternList(value)) + + def _add_subject(self, option: str, value: str) -> None: + """Add an X.509 subject pattern""" + + if _x509_available: # pragma: no branch + subject_patterns = cast(List[X509NamePattern], + self.options.setdefault(option, [])) + subject_patterns.append(X509NamePattern(value)) + + _handlers = { + 'command': _set_string, + 'environment': _add_environment, + 'from': _add_from, + 'permitopen': _add_permitopen, + 'principals': _add_principals, + 'subject': _add_subject + } + + def _add_option(self) -> None: + """Add an option value""" + + if self._option.startswith('='): + raise ValueError('Missing option name in authorized_keys') + + if '=' in self._option: + option, value = self._option.split('=', 1) + + handler = self._handlers.get(option) + if handler: + handler(self, option, value) + else: + values = cast(List[str], self.options.setdefault(option, [])) + values.append(value) + else: + self.options[self._option] = True + + def _parse_options(self, line: str) -> str: + """Parse options in this entry""" + + self._option = '' + + idx = 0 + quoted = False + escaped = False + + for idx, ch in enumerate(line): + if escaped: + self._option += ch + escaped = False + elif ch == '\\': + escaped = True + elif ch == '"': + quoted = not quoted + elif quoted: + self._option += ch + elif ch in ' \t': + break + elif ch == ',': + self._add_option() + self._option = '' + else: + self._option += ch + + self._add_option() + + if quoted: + raise ValueError('Unbalanced quote in authorized_keys') + elif escaped: + raise ValueError('Unbalanced backslash in authorized_keys') + + return line[idx:].strip() + + def match_options(self, client_host: str, client_addr: str, + cert_principals: Optional[Sequence[str]], + cert_subject: Optional['X509Name'] = None) -> bool: + """Match "from", "principals" and "subject" options in entry""" + + from_patterns = cast(List[HostPatternList], self.options.get('from')) + + if from_patterns: + client_ip = ip_address(client_addr) + + if not all(pattern.matches(client_host, client_addr, client_ip) + for pattern in from_patterns): + return False + + principal_patterns = cast(List[WildcardPatternList], + self.options.get('principals')) + + if cert_principals is not None and principal_patterns is not None: + if not all(any(pattern.matches(principal) + for principal in cert_principals) + for pattern in principal_patterns): + return False + + subject_patterns = cast(List['X509NamePattern'], + self.options.get('subject')) + + if cert_subject is not None and subject_patterns is not None: + if not all(pattern.matches(cert_subject) + for pattern in subject_patterns): + return False + + return True + + +class SSHAuthorizedKeys: + """An SSH authorized keys list""" + + def __init__(self, authorized_keys: Optional[str] = None): + self._user_entries: List[_SSHAuthorizedKeyEntry] = [] + self._ca_entries: List[_SSHAuthorizedKeyEntry] = [] + self._x509_entries: List[_SSHAuthorizedKeyEntry] = [] + + if authorized_keys: + self.load(authorized_keys) + + def load(self, authorized_keys: str) -> None: + """Load authorized keys data into this object""" + + for line in authorized_keys.splitlines(): + line = line.strip() + if not line or line.startswith('#'): + continue + + try: + entry = _SSHAuthorizedKeyEntry(line) + except KeyImportError: + continue + + if entry.key: + if 'cert-authority' in entry.options: + self._ca_entries.append(entry) + else: + self._user_entries.append(entry) + else: + self._x509_entries.append(entry) + + if (not self._user_entries and not self._ca_entries and + not self._x509_entries): + raise ValueError('No valid entries found') + + def validate(self, key: SSHKey, client_host: str, client_addr: str, + cert_principals: Optional[Sequence[str]] = None, + ca: bool = False) -> Optional[Mapping[str, object]]: + """Return whether a public key or CA is valid for authentication""" + + for entry in self._ca_entries if ca else self._user_entries: + if (entry.key == key and + entry.match_options(client_host, client_addr, + cert_principals)): + return entry.options + + return None + + def validate_x509(self, cert: SSHX509CertificateChain, client_host: str, + client_addr: str) -> Tuple[Optional[_EntryOptions], + Optional[SSHX509Certificate]]: + """Return whether an X.509 certificate is valid for authentication""" + + for entry in self._x509_entries: + if (entry.cert and 'cert-authority' not in entry.options and + (cert.key != entry.cert.key or + cert.subject != entry.cert.subject)): + continue # pragma: no cover (work around bug in coverage tool) + + if entry.match_options(client_host, client_addr, + cert.user_principals, cert.subject): + return entry.options, entry.cert + + return None, None + +def import_authorized_keys(data: str) -> SSHAuthorizedKeys: + """Import SSH authorized keys + + This function imports public keys and associated options in + OpenSSH authorized keys format. + + :param data: + The key data to import. + :type data: `str` + + :returns: An :class:`SSHAuthorizedKeys` object + + """ + + return SSHAuthorizedKeys(data) + + +def read_authorized_keys(filelist: Union[str, Sequence[str]]) -> \ + SSHAuthorizedKeys: + """Read SSH authorized keys from a file or list of files + + This function reads public keys and associated options in + OpenSSH authorized_keys format from a file or list of files. + + :param filelist: + The file or list of files to read the keys from. + :type filenlist: `str` or `list` of `str` + + :returns: An :class:`SSHAuthorizedKeys` object + + """ + + authorized_keys = SSHAuthorizedKeys() + + if isinstance(filelist, str): + files: Sequence[str] = [filelist] + else: + files = filelist + + for filename in files: + authorized_keys.load(read_file(filename, 'r')) + + return authorized_keys diff --git a/venv/lib/python3.12/site-packages/asyncssh/channel.py b/venv/lib/python3.12/site-packages/asyncssh/channel.py new file mode 100644 index 0000000..7744c06 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/channel.py @@ -0,0 +1,2114 @@ +# Copyright (c) 2013-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH channel and session handlers""" + +import asyncio +import binascii +import codecs +import inspect +import re +import signal as _signal +from types import MappingProxyType +from typing import TYPE_CHECKING, Any, AnyStr, Awaitable, Callable +from typing import Dict, Generic, Iterable, List, Mapping, Optional +from typing import Set, Tuple, Union, cast + +from . import constants +from .constants import DEFAULT_LANG, EXTENDED_DATA_STDERR +from .constants import MSG_CHANNEL_OPEN, MSG_CHANNEL_WINDOW_ADJUST +from .constants import MSG_CHANNEL_DATA, MSG_CHANNEL_EXTENDED_DATA +from .constants import MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MSG_CHANNEL_REQUEST +from .constants import MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE +from .constants import OPEN_CONNECT_FAILED, PTY_OP_RESERVED, PTY_OP_END +from .constants import OPEN_REQUEST_X11_FORWARDING_FAILED +from .constants import OPEN_REQUEST_PTY_FAILED, OPEN_REQUEST_SESSION_FAILED + +from .editor import SSHLineEditorChannel, SSHLineEditorSession + +from .logging import SSHLogger + +from .misc import ChannelOpenError, MaybeAwait, ProtocolError +from .misc import get_symbol_names, map_handler_name + +from .packet import Boolean, Byte, String, UInt32, SSHPacket, SSHPacketHandler + +from .session import TermModes, TermSize, TermSizeArg +from .session import SSHSession, SSHClientSession, SSHServerSession +from .session import SSHTCPSession, SSHUNIXSession +from .session import SSHSessionFactory, SSHClientSessionFactory +from .session import SSHTCPSessionFactory, SSHUNIXSessionFactory + +from .stream import DataType + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .connection import SSHConnection, SSHClientConnection + from .connection import SSHServerConnection + + +_const_dict: Mapping[str, int] = constants.__dict__ +_pty_mode_names = get_symbol_names(_const_dict, 'PTY_', 4) +_data_type_names = get_symbol_names(_const_dict, 'EXTENDED_DATA_', 14) + +_signal_regex = re.compile(r'SIG[^_]') +_signal_numbers = {k[3:]: int(v) for (k, v) in vars(_signal).items() + if _signal_regex.match(k)} +_signal_names = {v: k for (k, v) in _signal_numbers.items()} + +_ExitSignal = Tuple[str, bool, str, str] +_RequestHandler = Optional[Callable[[SSHPacket], Optional[bool]]] + + +class SSHChannel(Generic[AnyStr], SSHPacketHandler): + """Parent class for SSH channels""" + + _handler_names = get_symbol_names(globals(), 'MSG_CHANNEL_') + + _read_datatypes: Set[int] = set() + _write_datatypes: Set[int] = set() + + def __init__(self, conn: 'SSHConnection', + loop: asyncio.AbstractEventLoop, encoding: Optional[str], + errors: str, window: int, max_pktsize: int): + """Initialize an SSH channel + + If encoding is set, data sent and received will be in the form + of strings, converted on the wire to bytes using the specified + encoding. If encoding is None, data sent and received must be + provided as bytes. + + Window specifies the initial receive window size. + + Max_pktsize specifies the maximum length of a single data packet. + + """ + + self._conn: Optional['SSHConnection'] = conn + self._loop = loop + self._session: Optional[SSHSession[AnyStr]] = None + self._extra: Dict[str, object] = {'connection': conn} + self._encoding: Optional[str] + self._errors: str + self._send_high_water: int + self._send_low_water: int + + self._env: Dict[str, str] = {} + self._command: Optional[str] = None + self._subsystem: Optional[str] = None + + self._send_state = 'closed' + self._send_chan: Optional[int] = None + self._send_window: int = 0 + self._send_pktsize: int = 0 + self._send_paused = False + self._send_buf: List[Tuple[bytearray, DataType]] = [] + self._send_buf_len = 0 + + self._recv_state = 'closed' + self._init_recv_window = window + self._recv_window = window + self._recv_pktsize = max_pktsize + self._recv_paused: Union[bool, str] = 'starting' + self._recv_buf: List[Tuple[bytes, DataType]] = [] + + self._request_queue: List[Tuple[str, SSHPacket, bool]] = [] + + self._open_waiter: Optional[asyncio.Future[SSHPacket]] = None + self._request_waiters: List[asyncio.Future[bool]] = [] + + self._close_event = asyncio.Event() + + self._recv_chan: Optional[int] = conn.add_channel(self) + + self._logger = conn.logger.get_child(context='chan=%d' % + self._recv_chan) + + self.set_encoding(encoding, errors) + self.set_write_buffer_limits() + + @property + def logger(self) -> SSHLogger: + """A logger associated with this channel""" + + return self._logger + + def get_connection(self) -> 'SSHConnection': + """Return the connection used by this channel""" + + assert self._conn is not None + return self._conn + + def get_loop(self) -> asyncio.AbstractEventLoop: + """Return the event loop used by this channel""" + + return self._loop + + def get_encoding(self) -> Tuple[Optional[str], str]: + """Return the encoding used by this channel""" + + return self._encoding, self._errors + + def set_encoding(self, encoding: Optional[str], + errors: str = 'strict') -> None: + """Set the encoding on this channel""" + + self._encoding = encoding + self._errors = errors + + if encoding: + self._encoder: Optional[codecs.IncrementalEncoder] = \ + codecs.getincrementalencoder(encoding)(errors) + self._decoder: Optional[codecs.IncrementalDecoder] = \ + codecs.getincrementaldecoder(encoding)(errors) + else: + self._encoder = None + self._decoder = None + + def get_recv_window(self) -> int: + """Return the configured receive window for this channel""" + + return self._init_recv_window + + def get_read_datatypes(self) -> Set[int]: + """Return the legal read data types for this channel""" + + return self._read_datatypes + + def get_write_datatypes(self) -> Set[int]: + """Return the legal write data types for this channel""" + + return self._write_datatypes + + def _cleanup(self, exc: Optional[Exception] = None) -> None: + """Clean up this channel""" + + if self._open_waiter: + if not self._open_waiter.cancelled(): # pragma: no branch + self._open_waiter.set_exception( + ChannelOpenError(OPEN_CONNECT_FAILED, + 'SSH connection closed')) + + self._open_waiter = None + + if self._request_waiters: + for waiter in self._request_waiters: + if not waiter.cancelled(): # pragma: no cover + if exc: + waiter.set_exception(exc) + else: + waiter.set_result(False) + + self._request_waiters = [] + + if self._session is not None: + self._session.connection_lost(exc) + self._session = None + + self._close_event.set() + + if self._conn: # pragma: no branch + self.logger.info('Channel closed%s', + ': ' + str(exc) if exc else '') + + self._conn.detach_x11_listener(self) + + assert self._recv_chan is not None + self._conn.remove_channel(self._recv_chan) + self._send_chan = None + self._recv_chan = None + self._conn = None + + def _close_send(self) -> None: + """Discard unsent data and close the channel for sending""" + + # Discard unsent data + self._send_buf = [] + self._send_buf_len = 0 + + if self._send_state != 'closed': + self.send_packet(MSG_CHANNEL_CLOSE) + self._send_chan = None + self._send_state = 'closed' + + def _discard_recv(self) -> None: + """Discard unreceived data and clean up if close received""" + + # Discard unreceived data + self._recv_buf = [] + self._recv_paused = False + + # If recv is close_pending, we know send is already closed + if self._recv_state == 'close_pending': + self._recv_state = 'closed' + self._loop.call_soon(self._cleanup) + + async def _start_reading(self) -> None: + """Start processing data on a new connection""" + + # If owner of the channel didn't explicitly pause it at + # startup, begin processing incoming data. + + if self._recv_paused == 'starting': + self.logger.debug2('Reading from channel started') + self._recv_paused = False + self._flush_recv_buf() + + def _pause_resume_writing(self) -> None: + """Pause or resume writing based on send buffer low/high water marks""" + + if self._send_paused: + if self._send_buf_len <= self._send_low_water: + self.logger.debug2('Writing from session resumed') + + self._send_paused = False + assert self._session is not None + self._session.resume_writing() + else: + if self._send_buf_len > self._send_high_water: + self.logger.debug2('Writing from session paused') + + self._send_paused = True + assert self._session is not None + self._session.pause_writing() + + def _flush_send_buf(self) -> None: + """Flush as much data in send buffer as the send window allows""" + + while self._send_buf and self._send_window: + pktsize = min(self._send_window, self._send_pktsize) + buf, datatype = self._send_buf[0] + + if len(buf) > pktsize: + data = buf[:pktsize] + del buf[:pktsize] + else: + data = buf + del self._send_buf[0] + + self._send_buf_len -= len(data) + self._send_window -= len(data) + + if datatype is None: + self.send_packet(MSG_CHANNEL_DATA, String(data)) + else: + self.send_packet(MSG_CHANNEL_EXTENDED_DATA, + UInt32(datatype), String(data)) + + self._pause_resume_writing() + + if not self._send_buf: + if self._send_state == 'eof_pending': + self.send_packet(MSG_CHANNEL_EOF) + self._send_state = 'eof' + elif self._send_state == 'close_pending': + self._close_send() + + def _flush_recv_buf(self, exc: Optional[Exception] = None) -> None: + """Flush as much data in the recv buffer as the application allows""" + + while self._recv_buf and not self._recv_paused: + self._deliver_data(*self._recv_buf.pop(0)) + + if not self._recv_buf and self._recv_paused != 'starting': + if self._encoding and not exc and \ + self._recv_state in ('eof_pending', 'close_pending'): + try: + assert self._decoder is not None + self._decoder.decode(b'', True) + except UnicodeDecodeError as unicode_exc: + raise ProtocolError(str(unicode_exc)) from None + + if self._recv_state == 'eof_pending': + self._recv_state = 'eof' + + assert self._session is not None + + if (not self._session.eof_received() and + self._send_state == 'open'): + self.write_eof() + + if not self._recv_buf and self._recv_state == 'close_pending': + self._recv_state = 'closed' + self._loop.call_soon(self._cleanup, exc) + + def _deliver_data(self, data: bytes, datatype: DataType) -> None: + """Deliver incoming data to the session""" + + self._recv_window -= len(data) + + if self._recv_window < self._init_recv_window / 2: + adjust = self._init_recv_window - self._recv_window + + self.logger.debug2('Sending window adjust of %d bytes, ' + 'new window %d', adjust, self._init_recv_window) + + self.send_packet(MSG_CHANNEL_WINDOW_ADJUST, UInt32(adjust)) + self._recv_window = self._init_recv_window + + if self._encoding: + try: + assert self._decoder is not None + decoded_data = cast(AnyStr, self._decoder.decode(data)) + except UnicodeDecodeError as unicode_exc: + raise ProtocolError(str(unicode_exc)) from None + else: + decoded_data = cast(AnyStr, data) + + if self._session is not None: + self._session.data_received(decoded_data, datatype) + + def _accept_data(self, data: bytes, datatype: DataType = None) -> None: + """Accept new data on the channel + + This method accepts new data on the channel, immediately + delivering it to the session if it hasn't paused reading. + If it has paused, data is buffered until reading is resumed. + + Data sent after the channel has been closed by the session + is dropped. + + """ + + if not data: + return + + if self._send_state in {'close_pending', 'closed'}: + return + + datalen = len(data) + + if datalen > self._recv_window: + raise ProtocolError('Window exceeded') + + if datatype: + typename = ' from %s' % _data_type_names[datatype] + else: + typename = '' + + self.logger.debug2('Received %d data byte%s%s', datalen, + 's' if datalen > 1 else '', typename) + + if self._recv_paused: + self._recv_buf.append((data, datatype)) + else: + self._deliver_data(data, datatype) + + def _service_next_request(self) -> None: + """Process next item on channel request queue""" + + request, packet, _ = self._request_queue[0] + + name = '_process_' + map_handler_name(request) + '_request' + handler = cast(_RequestHandler, getattr(self, name, None)) + + if handler: + result = cast(Optional[bool], handler(packet)) + else: + self.logger.debug1('Received unknown channel request: %s', request) + result = False + + if result is not None: + self._report_response(result) + + def _report_response(self, result: bool) -> None: + """Report back the response to a previously issued channel request""" + + request, _, want_reply = self._request_queue.pop(0) + + if want_reply and self._send_state not in {'close_pending', 'closed'}: + if result: + self.send_packet(MSG_CHANNEL_SUCCESS) + else: + self.send_packet(MSG_CHANNEL_FAILURE) + + if result and request in {'shell', 'exec', 'subsystem'}: + assert self._session is not None + self._session.session_started() + self.resume_reading() + + if self._request_queue: + self._service_next_request() + + def process_connection_close(self, exc: Optional[Exception]) -> None: + """Process the SSH connection closing""" + + self.logger.info('Closing channel due to connection close') + + self._send_state = 'closed' + self._close_send() + self._cleanup(exc) + + def process_open(self, send_chan: int, send_window: int, send_pktsize: int, + session: MaybeAwait[SSHSession[AnyStr]]) -> None: + """Process a channel open request""" + + self._send_chan = send_chan + self._send_window = send_window + self._send_pktsize = send_pktsize + + self.logger.debug2(' Initial send window %d, packet size %d', + send_window, send_pktsize) + + assert self._conn is not None + self._conn.create_task(self._finish_open_request(session), self.logger) + + def _wrap_session(self, session: SSHSession[AnyStr]) -> \ + Tuple['SSHChannel[AnyStr]', SSHSession[AnyStr]]: + """Hook to optionally wrap channel and session objects""" + + # By default, return the original channel and session objects + return self, session + + async def _finish_open_request( + self, result: MaybeAwait[SSHSession[AnyStr]]) -> None: + """Finish processing a channel open request""" + + try: + if inspect.isawaitable(result): + session = await cast(Awaitable[SSHSession[AnyStr]], result) + else: + session = cast(SSHSession[AnyStr], result) + + if not self._conn: + raise ChannelOpenError(OPEN_CONNECT_FAILED, + 'SSH connection closed') + + chan, self._session = self._wrap_session(session) + + self.logger.debug2(' Initial recv window %d, packet size %d', + self._recv_window, self._recv_pktsize) + + assert self._send_chan is not None + assert self._recv_chan is not None + + self._conn.send_channel_open_confirmation(self._send_chan, + self._recv_chan, + self._recv_window, + self._recv_pktsize) + + self._send_state = 'open' + self._recv_state = 'open' + + self._session.connection_made(chan) + except ChannelOpenError as exc: + if self._conn: + assert self._send_chan is not None + self._conn.send_channel_open_failure(self._send_chan, exc.code, + exc.reason, exc.lang) + + self._loop.call_soon(self._cleanup) + + def process_open_confirmation(self, send_chan: int, send_window: int, + send_pktsize: int, packet: SSHPacket) -> None: + """Process a channel open confirmation""" + + if not self._open_waiter: + raise ProtocolError('Channel not being opened') + + self._send_chan = send_chan + self._send_window = send_window + self._send_pktsize = send_pktsize + + self.logger.debug2(' Initial send window %d, packet size %d', + send_window, send_pktsize) + + self._send_state = 'open' + self._recv_state = 'open' + + if not self._open_waiter.cancelled(): # pragma: no branch + self._open_waiter.set_result(packet) + + self._open_waiter = None + + def process_open_failure(self, code: int, reason: str, lang: str) -> None: + """Process a channel open failure""" + + if not self._open_waiter: + raise ProtocolError('Channel not being opened') + + if not self._open_waiter.cancelled(): # pragma: no branch + self._open_waiter.set_exception( + ChannelOpenError(code, reason, lang)) + + self._open_waiter = None + self._loop.call_soon(self._cleanup) + + def _process_window_adjust(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a send window adjustment""" + + if self._recv_state not in {'open', 'eof_pending', 'eof'}: + raise ProtocolError('Channel not open') + + adjust = packet.get_uint32() + packet.check_end() + + self._send_window += adjust + + self.logger.debug2('Received window adjust of %d bytes, ' + 'new window %d', adjust, self._send_window) + + self._flush_send_buf() + + def _process_data(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process incoming data""" + + if self._recv_state != 'open': + raise ProtocolError('Channel not open for sending') + + data = packet.get_string() + packet.check_end() + + self._accept_data(data) + + def _process_extended_data(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process incoming extended data""" + + if self._recv_state != 'open': + raise ProtocolError('Channel not open for sending') + + datatype = packet.get_uint32() + data = packet.get_string() + packet.check_end() + + if datatype not in self._read_datatypes: + raise ProtocolError('Invalid extended data type') + + self._accept_data(data, datatype) + + def _process_eof(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process an incoming end of file""" + + if self._recv_state != 'open': + raise ProtocolError('Channel not open for sending') + + packet.check_end() + + self.logger.debug2('Received EOF') + + self._recv_state = 'eof_pending' + self._flush_recv_buf() + + def _process_close(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process an incoming channel close""" + + if self._recv_state not in {'open', 'eof_pending', 'eof'}: + raise ProtocolError('Channel not open') + + packet.check_end() + + self.logger.info('Received channel close') + + self._close_send() + + self._recv_state = 'close_pending' + self._flush_recv_buf() + + def _process_request(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process an incoming channel request""" + + if self._recv_state not in {'open', 'eof_pending', 'eof'}: + raise ProtocolError('Channel not open') + + request_bytes = packet.get_string() + want_reply = packet.get_boolean() + + try: + request = request_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid channel request') from None + + self._request_queue.append((request, packet, want_reply)) + if len(self._request_queue) == 1: + self._service_next_request() + + def _process_response(self, pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a success or failure response""" + + packet.check_end() + + if self._request_waiters: + waiter = self._request_waiters.pop(0) + if not waiter.cancelled(): # pragma: no branch + waiter.set_result(pkttype == MSG_CHANNEL_SUCCESS) + else: + raise ProtocolError('Unexpected channel response') + + def _process_keepalive_at_openssh_dot_com_request( + self, packet: SSHPacket) -> bool: + """Process an incoming OpenSSH keepalive request""" + + packet.check_end() + + self.logger.debug2('Received OpenSSH keepalive channel request') + return False + + _packet_handlers = { + MSG_CHANNEL_WINDOW_ADJUST: _process_window_adjust, + MSG_CHANNEL_DATA: _process_data, + MSG_CHANNEL_EXTENDED_DATA: _process_extended_data, + MSG_CHANNEL_EOF: _process_eof, + MSG_CHANNEL_CLOSE: _process_close, + MSG_CHANNEL_REQUEST: _process_request, + MSG_CHANNEL_SUCCESS: _process_response, + MSG_CHANNEL_FAILURE: _process_response + } + + async def _open(self, chantype: bytes, *args: bytes) -> SSHPacket: + """Make a request to open the channel""" + + if self._send_state != 'closed': + raise OSError('Channel already open') + + self._open_waiter = self._loop.create_future() + + self.logger.debug2(' Initial recv window %d, packet size %d', + self._recv_window, self._recv_pktsize) + + assert self._conn is not None + assert self._recv_chan is not None + + self._conn.send_packet(MSG_CHANNEL_OPEN, String(chantype), + UInt32(self._recv_chan), + UInt32(self._recv_window), + UInt32(self._recv_pktsize), *args, handler=self) + + return await self._open_waiter + + def send_packet(self, pkttype: int, *args: bytes) -> None: + """Send a packet on the channel""" + + if self._send_chan is None: # pragma: no cover + return + + payload = UInt32(self._send_chan) + b''.join(args) + + assert self._conn is not None + self._conn.send_packet(pkttype, payload, handler=self) + + def _send_request(self, request: bytes, *args: bytes, + want_reply: bool = False) -> None: + """Send a channel request""" + + self.send_packet(MSG_CHANNEL_REQUEST, String(request), + Boolean(want_reply), *args) + + async def _make_request(self, request: bytes, + *args: bytes) -> Optional[bool]: + """Make a channel request and wait for the response""" + + if self._send_chan is None: + return False + + waiter = self._loop.create_future() + self._request_waiters.append(waiter) + self._send_request(request, *args, want_reply=True) + return await waiter + + def abort(self) -> None: + """Forcibly close the channel + + This method can be called to forcibly close the channel, after + which no more data can be sent or received. Any unsent buffered + data and any incoming data in flight will be discarded. + + """ + + self.logger.info('Aborting channel') + + if self._send_state not in {'close_pending', 'closed'}: + # Send an immediate close, discarding unsent data + self._close_send() + + if self._recv_state != 'closed': + # Discard unreceived data + self._discard_recv() + + def close(self) -> None: + """Cleanly close the channel + + This method can be called to cleanly close the channel, after + which no more data can be sent or received. Any unsent buffered + data will be flushed asynchronously before the channel is + closed. + + """ + + self.logger.info('Closing channel') + + if self._send_state not in {'close_pending', 'closed'}: + # Send a close only after sending unsent data + self._send_state = 'close_pending' + self._flush_send_buf() + + if self._recv_state != 'closed': + # Discard unreceived data + self._discard_recv() + + def is_closing(self) -> bool: + """Return if the channel is closing or is closed""" + + return self._send_state != 'open' + + async def wait_closed(self) -> None: + """Wait for this channel to close + + This method is a coroutine which can be called to block until + this channel has finished closing. + + """ + + await self._close_event.wait() + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Get additional information about the channel + + This method returns extra information about the channel once + it is established. Supported values include `'connection'` + to return the SSH connection this channel is running over plus + all of the values supported on that connection. + + For TCP channels, the values `'local_peername'` and + `'remote_peername'` are added to return the local and remote + host and port information for the tunneled TCP connection. + + For UNIX channels, the values `'local_peername'` and + `'remote_peername'` are added to return the local and remote + path information for the tunneled UNIX domain socket connection. + Since UNIX domain sockets provide no "source" address, only + one of these will be filled in. + + See :meth:`get_extra_info() ` + on :class:`SSHClientConnection` for more information. + + Additional information stored on the channel by calling + :meth:`set_extra_info` can also be returned here. + + """ + + return self._extra.get(name, self._conn.get_extra_info(name, default) + if self._conn else default) + + def set_extra_info(self, **kwargs: Any) -> None: + """Store additional information associated with the channel + + This method allows extra information to be associated with the + channel. The information to store should be passed in as + keyword parameters and can later be returned by calling + :meth:`get_extra_info` with one of the keywords as the name + to retrieve. + + """ + + self._extra.update(**kwargs) + + def can_write_eof(self) -> bool: + """Return whether the channel supports :meth:`write_eof` + + This method always returns `True`. + + """ + + # pylint: disable=no-self-use + return True + + def get_write_buffer_size(self) -> int: + """Return the current size of the channel's output buffer + + This method returns how many bytes are currently in the + channel's output buffer waiting to be written. + + """ + + return self._send_buf_len + + def set_write_buffer_limits(self, high: Optional[int] = None, + low: Optional[int] = None) -> None: + """Set the high- and low-water limits for write flow control + + This method sets the limits used when deciding when to call + the :meth:`pause_writing() ` + and :meth:`resume_writing() ` + methods on SSH sessions. Writing will be paused when the write + buffer size exceeds the high-water mark, and resumed when the + write buffer size equals or drops below the low-water mark. + + """ + + if high is None: + high = 4*low if low is not None else 65536 + + if low is None: + low = high // 4 + + if not 0 <= low <= high: + raise ValueError('high (%r) must be >= low (%r) must be >= 0' % + (high, low)) + + self.logger.debug1('Set write buffer limits: low-water=%d, ' + 'high-water=%d', low, high) + + self._send_high_water = high + self._send_low_water = low + self._pause_resume_writing() + + def write(self, data: AnyStr, datatype: DataType = None) -> None: + """Write data on the channel + + This method can be called to send data on the channel. If + an encoding was specified when the channel was created, the + data should be provided as a string and will be converted + using that encoding. Otherwise, the data should be provided + as bytes. + + An extended data type can optionally be provided. For + instance, this is used from a :class:`SSHServerSession` + to write data to `stderr`. + + :param data: + The data to send on the channel + :param datatype: (optional) + The extended data type of the data, from :ref:`extended + data types ` + :type data: `str` or `bytes` + :type datatype: `int` + + :raises: :exc:`OSError` if the channel isn't open for sending + or the extended data type is not valid for this type + of channel + + """ + + if self._send_state != 'open': + raise BrokenPipeError('Channel not open for sending') + + if datatype is not None and datatype not in self._write_datatypes: + raise OSError('Invalid extended data type') + + if not data: + return + + if self._encoding: + assert self._encoder is not None + encoded_data = self._encoder.encode(cast(str, data)) + else: + encoded_data = cast(bytes, data) + + datalen = len(encoded_data) + + if datatype: + typename = ' to %s' % _data_type_names[datatype] + else: + typename = '' + + self.logger.debug2('Sending %d data byte%s%s', datalen, + 's' if datalen > 1 else '', typename) + + self._send_buf.append((bytearray(encoded_data), datatype)) + self._send_buf_len += datalen + self._flush_send_buf() + + def writelines(self, list_of_data: Iterable[AnyStr], + datatype: DataType = None) -> None: + """Write a list of data bytes on the channel + + This method can be called to write a list (or any iterable) of + data bytes to the channel. It is functionality equivalent to + calling :meth:`write` on each element in the list. + + :param list_of_data: + The data to send on the channel + :param datatype: (optional) + The extended data type of the data, from :ref:`extended + data types ` + :type list_of_data: iterable of `str` or `bytes` + :type datatype: `int` + + :raises: :exc:`OSError` if the channel isn't open for sending + or the extended data type is not valid for this type + of channel + + """ + + if self._encoding: + data = cast(AnyStr, ''.join(cast(Iterable[str], list_of_data))) + else: + data = cast(AnyStr, b''.join(cast(Iterable[bytes], list_of_data))) + + return self.write(data, datatype) + + def write_eof(self) -> None: + """Write EOF on the channel + + This method sends an end-of-file indication on the + channel, after which no more data can be sent. The + channel remains open, though, and data may still be + sent in the other direction. + + :raises: :exc:`OSError` if the channel isn't open for sending + + """ + + self.logger.debug2('Sending EOF') + + if self._send_state == 'open': + self._send_state = 'eof_pending' + self._flush_send_buf() + + def pause_reading(self) -> None: + """Pause delivery of incoming data + + This method is used to temporarily suspend delivery of incoming + channel data. After this call, incoming data will no longer + be delivered until :meth:`resume_reading` is called. Data will be + buffered locally up to the configured SSH channel window size, + but window updates will no longer be sent, eventually causing + back pressure on the remote system. + + .. note:: Channel close notifications are not suspended by this + call. If the remote system closes the channel while + delivery is suspended, the channel will be closed even + though some buffered data may not have been delivered. + + """ + + self.logger.debug2('Reading from channel paused') + + self._recv_paused = True + + def resume_reading(self) -> None: + """Resume delivery of incoming data + + This method can be called to resume delivery of incoming data + which was suspended by a call to :meth:`pause_reading`. As soon + as this method is called, any buffered data will be delivered + immediately. A pending end-of-file notification may also be + delivered if one was queued while reading was paused. + + """ + + if self._recv_paused: + self.logger.debug2('Reading from channel resumed') + + self._recv_paused = False + self._flush_recv_buf() + + def get_environment(self) -> Mapping[str, str]: + """Return the environment for this session + + This method returns the environment set by the client when + the session was opened. On the server, calls to this method + should only be made after :meth:`session_started + ` has been called on the + :class:`SSHServerSession`. When using the stream-based API, + calls to this can be made at any time after the handler + function has started up. + + :returns: A dictionary containing the environment variables + set by the client + + """ + + return MappingProxyType(self._env) + + def get_command(self) -> Optional[str]: + """Return the command the client requested to execute, if any + + This method returns the command the client requested to + execute when the session was opened, if any. If the client + did not request that a command be executed, this method + will return `None`. On the server, calls to this method + should only be made after :meth:`session_started + ` has been called on the + :class:`SSHServerSession`. When using the stream-based API, + calls to this can be made at any time after the handler + function has started up. + + """ + + return self._command + + def get_subsystem(self) -> Optional[str]: + """Return the subsystem the client requested to open, if any + + This method returns the subsystem the client requested to + open when the session was opened, if any. If the client + did not request that a subsystem be opened, this method will + return `None`. On the server, calls to this method should + only be made after :meth:`session_started + ` has been called on the + :class:`SSHServerSession`. When using the stream-based API, + calls to this can be made at any time after the handler + function has started up. + + """ + + return self._subsystem + + +class SSHClientChannel(SSHChannel, Generic[AnyStr]): + """SSH client channel""" + + _conn: 'SSHClientConnection' + _session: SSHClientSession[AnyStr] + + _read_datatypes = {EXTENDED_DATA_STDERR} + + def __init__(self, conn: 'SSHClientConnection', + loop: asyncio.AbstractEventLoop, encoding: Optional[str], + errors: str, window: int, max_pktsize: int): + super().__init__(conn, loop, encoding, errors, window, max_pktsize) + + self._exit_status: Optional[int] = None + self._exit_signal: Optional[_ExitSignal] = None + + async def create(self, session_factory: SSHClientSessionFactory[AnyStr], + command: Optional[str], subsystem: Optional[str], + env: Dict[str, str], request_pty: bool, + term_type: Optional[str], term_size: TermSizeArg, + term_modes: TermModes, x11_forwarding: Union[bool, str], + x11_display: Optional[str], x11_auth_path: Optional[str], + x11_single_connection: bool, + agent_forwarding: bool) -> SSHClientSession[AnyStr]: + """Create an SSH client session""" + + self.logger.info('Requesting new SSH session') + + packet = await self._open(b'session') + + # Client sessions should have no extra data in the open confirmation + packet.check_end() + + self._session = session_factory() + self._session.connection_made(self) + + self._env = env + self._command = command + self._subsystem = subsystem + + for name, env_value in env.items(): + self.logger.debug1(' Env: %s=%s', name, env_value) + self._send_request(b'env', String(str(name)), + String(str(env_value))) + + if request_pty: + self.logger.debug1(' Terminal type: %s', term_type or 'None') + + if not term_size: + width = height = pixwidth = pixheight = 0 + elif len(term_size) == 2: + width, height = cast(Tuple[int, int], term_size) + pixwidth = pixheight = 0 + self.logger.debug1(' Terminal size: %sx%s', width, height) + elif len(term_size) == 4: + width, height, pixwidth, pixheight = cast(TermSize, term_size) + self.logger.debug1(' Terminal size: %sx%s (%sx%s pixels)', + width, height, pixwidth, pixheight) + else: + raise ValueError('If set, terminal size must be a tuple of ' + '2 or 4 integers') + + modes = b'' + for mode, mode_value in term_modes.items(): + if mode <= PTY_OP_END or mode >= PTY_OP_RESERVED: + raise ValueError('Invalid pty mode: %s' % mode) + + name = _pty_mode_names.get(mode, str(mode)) + self.logger.debug2(' Mode %s: %d', name, mode_value) + modes += Byte(mode) + UInt32(mode_value) + + modes += Byte(PTY_OP_END) + + if not (await self._make_request(b'pty-req', + String(term_type or ''), + UInt32(width), UInt32(height), + UInt32(pixwidth), + UInt32(pixheight), + String(modes))): + self.close() + raise ChannelOpenError(OPEN_REQUEST_PTY_FAILED, + 'PTY request failed') + + if x11_forwarding: + self.logger.debug1(' X11 forwarding enabled') + + try: + attach_result: Optional[Tuple[bytes, bytes, int]] = \ + await self._conn.attach_x11_listener( + self, x11_display, x11_auth_path, x11_single_connection) + except ValueError as exc: + if x11_forwarding != 'ignore_failure': + raise ChannelOpenError(OPEN_REQUEST_X11_FORWARDING_FAILED, + str(exc)) from None + else: + attach_result = None + self.logger.info(' X11 forwarding attach failure ignored') + + if attach_result: + auth_proto, remote_auth, screen = attach_result + + result = await self._make_request( + b'x11-req', Boolean(x11_single_connection), + String(auth_proto), String(binascii.b2a_hex(remote_auth)), + UInt32(screen)) + + if not result: + if self._conn: # pragma: no branch + self._conn.detach_x11_listener(self) + + if x11_forwarding != 'ignore_failure': + raise ChannelOpenError( + OPEN_REQUEST_X11_FORWARDING_FAILED, + 'X11 forwarding request failed') + else: + self.logger.info( + ' X11 forwarding request failure ignored') + + if agent_forwarding: + self.logger.debug1(' Agent forwarding enabled') + self._send_request(b'auth-agent-req@openssh.com') + + if command: + self.logger.info(' Command: %s', command) + result = await self._make_request(b'exec', String(command)) + elif subsystem: + self.logger.info(' Subsystem: %s', subsystem) + result = await self._make_request(b'subsystem', String(subsystem)) + else: + self.logger.info(' Interactive shell requested') + result = await self._make_request(b'shell') + + if not result: + self.close() + raise ChannelOpenError(OPEN_REQUEST_SESSION_FAILED, + 'Session request failed') + + self._session.session_started() + self._conn.create_task(self._start_reading(), self.logger) + + return self._session + + def _process_xon_xoff_request(self, packet: SSHPacket) -> bool: + """Process a request to set up XON/XOFF processing""" + + client_can_do = packet.get_boolean() + packet.check_end() + + self.logger.info('Received XON/XOFF flow control %s request', + 'enable' if client_can_do else 'disable') + + self._session.xon_xoff_requested(client_can_do) + return True + + def _process_exit_status_request(self, packet: SSHPacket) -> bool: + """Process a request to deliver exit status""" + + status = packet.get_uint32() & 0xff + packet.check_end() + + self.logger.info('Received exit status %d', status) + + self._exit_status = status + self._session.exit_status_received(status) + return True + + def _process_exit_signal_request(self, packet: SSHPacket) -> bool: + """Process a request to deliver an exit signal""" + + signal_bytes = packet.get_string() + core_dumped = packet.get_boolean() + msg_bytes = packet.get_string() + lang_bytes = packet.get_string() + packet.check_end() + + try: + signal = signal_bytes.decode('ascii') + msg = msg_bytes.decode('utf-8') + lang = lang_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid exit signal request') from None + + self.logger.info('Received exit signal %s', signal) + self.logger.debug1(' Core dumped: %s', core_dumped) + self.logger.debug1(' Message: %s', msg) + + self._exit_signal = (signal, core_dumped, msg, lang) + self._session.exit_signal_received(signal, core_dumped, msg, lang) + return True + + def get_exit_status(self) -> Optional[int]: + """Return the session's exit status + + This method returns the exit status of the session if one has + been sent. If an exit signal was sent, this method returns -1 + and the exit signal information can be collected by calling + :meth:`get_exit_signal`. If neither has been sent, this method + returns `None`. + + """ + + if self._exit_status is not None: + return self._exit_status + elif self._exit_signal: + return -1 + else: + return None + + def get_exit_signal(self) -> Optional[_ExitSignal]: + """Return the session's exit signal, if one was sent + + This method returns information about the exit signal sent on + this session. If an exit signal was sent, a tuple is returned + containing the signal name, a boolean for whether a core dump + occurred, a message associated with the signal, and the language + the message was in. Otherwise, this method returns `None`. + + """ + + return self._exit_signal + + def get_returncode(self) -> Optional[int]: + """Return the session's exit status or signal + + This method returns the exit status of the session if one has + been sent. If an exit signal was sent, this method returns + the negative of the numeric value of that signal, matching + the behavior of :meth:`asyncio.SubprocessTransport.get_returncode`. + If neither has been sent, this method returns `None`. + + :returns: `int` or `None` + + """ + + if self._exit_status is not None: + return self._exit_status + elif self._exit_signal: + return -_signal_numbers.get(self._exit_signal[0], 99) + else: + return None + + def change_terminal_size(self, width: int, height: int, + pixwidth: int = 0, pixheight: int = 0) -> None: + """Change the terminal window size for this session + + This method changes the width and height of the terminal + associated with this session. + + :param width: + The width of the terminal in characters + :param height: + The height of the terminal in characters + :param pixwidth: (optional) + The width of the terminal in pixels + :param pixheight: (optional) + The height of the terminal in pixels + :type width: `int` + :type height: `int` + :type pixwidth: `int` + :type pixheight: `int` + + """ + + if pixwidth or pixheight: + self.logger.info('Sending window size change: %sx%s (%sx%s pixels)', + width, height, pixwidth, pixheight) + else: + self.logger.info('Sending window size change: %sx%s', width, height) + + self._send_request(b'window-change', UInt32(width), UInt32(height), + UInt32(pixwidth), UInt32(pixheight)) + + def send_break(self, msec: int) -> None: + """Send a break to the remote process + + This method requests that the server perform a break + operation on the remote process or service as described in + :rfc:`4335`. + + :param msec: + The duration of the break in milliseconds + :type msec: `int` + + :raises: :exc:`OSError` if the channel is not open + + """ + + self.logger.info('Sending %d msec break', msec) + + self._send_request(b'break', UInt32(msec)) + + def send_signal(self, signal: Union[str, int]) -> None: + """Send a signal to the remote process + + This method can be called to deliver a signal to the remote + process or service. Signal names should be as described in + section 6.10 of :rfc:`RFC 4254 <4254#section-6.10>`, or + can be integer values as defined in the :mod:`signal` + module, in which case they will be translated to their + corresponding signal name before being sent. + + .. note:: OpenSSH's SSH server implementation prior to version + 7.9 does not support this message, so attempts to + use :meth:`send_signal`, :meth:`terminate`, or + :meth:`kill` with an older OpenSSH SSH server will + end up being ignored. This was tracked in OpenSSH + `bug 1424`__. + + __ https://bugzilla.mindrot.org/show_bug.cgi?id=1424 + + :param signal: + The signal to deliver + :type signal: `str` or `int` + + :raises: | :exc:`OSError` if the channel is not open + | :exc:`ValueError` if the signal number is unknown + + """ + + if isinstance(signal, int): + try: + signal = _signal_names[signal] + except KeyError: + raise ValueError('Unknown signal: %s' % int(signal)) from None + + self.logger.info('Sending %s signal', signal) + + self._send_request(b'signal', String(signal)) + + def terminate(self) -> None: + """Terminate the remote process + + This method can be called to terminate the remote process or + service by sending it a `TERM` signal. + + :raises: :exc:`OSError` if the channel is not open + + .. note:: If your server-side runs on OpenSSH, + this might be ineffective; + for more details, see the note in + :meth:`send_signal` + + """ + + self.send_signal('TERM') + + def kill(self) -> None: + """Forcibly kill the remote process + + This method can be called to forcibly stop the remote process + or service by sending it a `KILL` signal. + + :raises: :exc:`OSError` if the channel is not open + + .. note:: If your server-side runs on OpenSSH, + this might be ineffective; + for more details, see the note in + :meth:`send_signal` + + """ + + self.send_signal('KILL') + + +class SSHServerChannel(SSHChannel, Generic[AnyStr]): + """SSH server channel""" + + _conn: 'SSHServerConnection' + _session: SSHServerSession[AnyStr] + + _write_datatypes = {EXTENDED_DATA_STDERR} + + def __init__(self, conn: 'SSHServerConnection', + loop: asyncio.AbstractEventLoop, allow_pty: bool, + line_editor: bool, line_echo: bool, line_history: int, + max_line_length: int, encoding: Optional[str], errors: str, + window: int, max_pktsize: int): + """Initialize an SSH server channel""" + + super().__init__(conn, loop, encoding, errors, window, max_pktsize) + + self._env = cast(Dict[str, str], + conn.get_key_option('environment', {})) + + self._allow_pty = allow_pty + self._line_editor = line_editor + self._line_echo = line_echo + self._line_history = line_history + self._max_line_length = max_line_length + self._term_type: Optional[str] = None + self._term_size = (0, 0, 0, 0) + self._term_modes: TermModes = {} + self._x11_display: Optional[str] = None + + self.logger.info('New SSH session requested') + + def _wrap_session(self, session: SSHSession[AnyStr]) -> \ + Tuple[SSHChannel[AnyStr], SSHSession[AnyStr]]: + """Wrap a line editor around the session if enabled""" + + if self._line_editor: + server_chan = cast(SSHServerChannel[str], self) + server_session = cast(SSHServerSession[str], session) + + editor_chan = SSHLineEditorChannel(server_chan, server_session, + self._line_echo, + self._line_history, + self._max_line_length) + editor_session = SSHLineEditorSession(editor_chan, server_session) + + chan = cast(SSHChannel[AnyStr], editor_chan) + session = cast(SSHSession[AnyStr], editor_session) + else: + chan = self + + return chan, session + + def _process_pty_req_request(self, packet: SSHPacket) -> bool: + """Process a request to open a pseudo-terminal""" + + term_type_bytes = packet.get_string() + width = packet.get_uint32() + height = packet.get_uint32() + pixwidth = packet.get_uint32() + pixheight = packet.get_uint32() + modes = packet.get_string() + packet.check_end() + + if not self._allow_pty or \ + not self._conn.check_key_permission('pty') or \ + not self._conn.check_certificate_permission('pty'): + self.logger.info('PTY request denied: PTY not permitted') + return False + + try: + term_type = term_type_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid pty request') from None + + term_size = (width, height, pixwidth, pixheight) + term_modes = {} + + self.logger.debug1(' Terminal type: %s', term_type) + + if pixwidth or pixheight: + self.logger.debug1(' Terminal size: %sx%s (%sx%s pixels)', + width, height, pixwidth, pixheight) + else: + self.logger.debug1(' Terminal size: %sx%s', width, height) + + idx = 0 + while idx < len(modes): + mode = modes[idx] + idx += 1 + if mode == PTY_OP_END or mode >= PTY_OP_RESERVED: + break + + if idx+4 <= len(modes): + name = _pty_mode_names.get(mode, str(mode)) + value = int.from_bytes(modes[idx:idx+4], 'big') + self.logger.debug2(' Mode %s: %s', name, value) + term_modes[mode] = value + idx += 4 + else: + raise ProtocolError('Invalid pty modes string') + + result = self._session.pty_requested(term_type, term_size, term_modes) + + if result: + self.logger.info(' PTY created') + + if self._line_editor: + self.logger.info(' Line editor enabled') + + self._term_type = term_type + self._term_size = term_size + self._term_modes = term_modes + else: + self.logger.info(' PTY creation failed') + + return result + + def _process_x11_req_request(self, packet: SSHPacket) -> Optional[bool]: + """Process request to enable X11 forwarding""" + + _ = packet.get_boolean() # single_connection + auth_proto = packet.get_string() + auth_data = packet.get_string() + screen = packet.get_uint32() + packet.check_end() + + try: + auth_data = binascii.a2b_hex(auth_data) + except binascii.Error: + self.logger.debug1(' Invalid X11 auth data') + return False + + self._conn.create_task(self._finish_x11_req_request(auth_proto, + auth_data, screen), + self.logger) + return None + + async def _finish_x11_req_request(self, auth_proto: bytes, + auth_data: bytes, screen: int) -> None: + """Finish processing request to enable X11 forwarding""" + + self._x11_display = await self._conn.attach_x11_listener( + self, auth_proto, auth_data, screen) + + if self._x11_display: + self.logger.debug1(' X11 forwarding enabled') + self._report_response(True) + else: + self.logger.debug1(' X11 forwarding failed') + self._report_response(False) + + def _process_auth_agent_req_at_openssh_dot_com_request( + self, packet: SSHPacket) -> None: + """Process a request to enable ssh-agent forwarding""" + + packet.check_end() + + self._conn.create_task(self._finish_agent_req_request(), self.logger) + + async def _finish_agent_req_request(self) -> None: + """Finish processing request to enable agent forwarding""" + + if await self._conn.create_agent_listener(): + self.logger.debug1(' Agent forwarding enabled') + self._report_response(True) + else: + self.logger.debug1(' Agent forwarding failed') + self._report_response(False) + + def _process_env_request(self, packet: SSHPacket) -> bool: + """Process a request to set an environment variable""" + + name_bytes = packet.get_string() + value_bytes = packet.get_string() + packet.check_end() + + try: + name = name_bytes.decode('utf-8') + value = value_bytes.decode('utf-8') + except UnicodeDecodeError: + self.logger.debug1('Invalid environment data') + return False + + self.logger.debug1(' Env: %s=%s', name, value) + self._env[name] = value + return True + + def _start_session(self, command: Optional[str] = None, + subsystem: Optional[str] = None) -> bool: + """Tell the session what type of channel is being requested""" + + forced_command = \ + cast(str, self._conn.get_certificate_option('force-command')) + + if forced_command is None: + forced_command = cast(str, self._conn.get_key_option('command')) + + if forced_command is not None: + self.logger.info(' Forced command override: %s', forced_command) + + command = forced_command + + if command is not None: + self._command = command + result = self._session.exec_requested(command) + elif subsystem is not None: + self._subsystem = subsystem + result = self._session.subsystem_requested(subsystem) + else: + result = self._session.shell_requested() + + return result + + def _process_shell_request(self, packet: SSHPacket) -> bool: + """Process a request to open a shell""" + + packet.check_end() + + self.logger.info(' Interactive shell requested') + return self._start_session() + + def _process_exec_request(self, packet: SSHPacket) -> bool: + """Process a request to execute a command""" + + command_bytes = packet.get_string() + packet.check_end() + + try: + command = command_bytes.decode('utf-8') + except UnicodeDecodeError: + return False + + self.logger.info(' Command: %s', command) + return self._start_session(command=command) + + def _process_subsystem_request(self, packet: SSHPacket) -> bool: + """Process a request to open a subsystem""" + + subsystem_bytes = packet.get_string() + packet.check_end() + + try: + subsystem = subsystem_bytes.decode('ascii') + except UnicodeDecodeError: + return False + + self.logger.info(' Subsystem: %s', subsystem) + return self._start_session(subsystem=subsystem) + + def _process_window_change_request(self, packet: SSHPacket) -> bool: + """Process a request to change the window size""" + + width = packet.get_uint32() + height = packet.get_uint32() + pixwidth = packet.get_uint32() + pixheight = packet.get_uint32() + packet.check_end() + + if pixwidth or pixheight: + self.logger.info('Received window change: %sx%s (%sx%s pixels)', + width, height, pixwidth, pixheight) + else: + self.logger.info('Received window change: %sx%s', width, height) + + self._term_size = (width, height, pixwidth, pixheight) + self._session.terminal_size_changed(width, height, pixwidth, pixheight) + return True + + def _process_signal_request(self, packet: SSHPacket) -> bool: + """Process a request to send a signal""" + + signal_bytes = packet.get_string() + packet.check_end() + + try: + signal = signal_bytes.decode('ascii') + except UnicodeDecodeError: + return False + + self.logger.info('Received %s signal', signal) + + self._session.signal_received(signal) + return True + + def _process_break_request(self, packet: SSHPacket) -> bool: + """Process a request to send a break""" + + msec = packet.get_uint32() + packet.check_end() + + self.logger.info('Received %d msec break', msec) + + return self._session.break_received(msec) + + def get_terminal_type(self) -> Optional[str]: + """Return the terminal type for this session + + This method returns the terminal type set by the client + when the session was opened. If the client didn't request + a pseudo-terminal, this method will return `None`. Calls + to this method should only be made after :meth:`session_started + ` has been called on the + :class:`SSHServerSession`. When using the stream-based API, + calls to this can be made at any time after the handler + function has started up. + + :returns: A `str` containing the terminal type or `None` if + no pseudo-terminal was requested + + """ + + return self._term_type + + def get_terminal_size(self) -> TermSize: + """Return terminal size information for this session + + This method returns the latest terminal size information set + by the client. If the client didn't set any terminal size + information, all values returned will be zero. Calls to + this method should only be made after :meth:`session_started + ` has been called on the + :class:`SSHServerSession`. When using the stream-based API, + calls to this can be made at any time after the handler + function has started up. + + Also see :meth:`terminal_size_changed() + ` or the + :exc:`TerminalSizeChanged` exception for how to get notified + when the terminal size changes. + + :returns: A tuple of four `int` values containing the width and + height of the terminal in characters and the width + and height of the terminal in pixels + + """ + + return self._term_size + + def get_terminal_mode(self, mode: int) -> Optional[int]: + """Return the requested TTY mode for this session + + This method looks up the value of a POSIX terminal mode + set by the client when the session was opened. If the client + didn't request a pseudo-terminal or didn't set the requested + TTY mode opcode, this method will return `None`. Calls to + this method should only be made after :meth:`session_started + ` has been called on the + :class:`SSHServerSession`. When using the stream-based API, + calls to this can be made at any time after the handler + function has started up. + + :param mode: + POSIX terminal mode taken from :ref:`POSIX terminal modes + ` to look up + :type mode: `int` + + :returns: An `int` containing the value of the requested + POSIX terminal mode or `None` if the requested + mode was not set + + """ + + return self._term_modes.get(mode) + + def get_terminal_modes(self) -> TermModes: + """Return the TTY modes for this session + + This method returns a mapping of all the POSIX terminal modes + set by the client when the session was opened. If the client + didn't request a pseudo-terminal, this method will return an + empty mapping. Calls to this method should only be made after + :meth:`session_started ` + has been called on the :class:`SSHServerSession`. When using + the stream-based API, calls to this can be made at any time + after the handler function has started up. + + :returns: A mapping containing all the POSIX terminal modes + set by the client or an empty mapping if no + pseudo-terminal was requested + + """ + + return MappingProxyType(self._term_modes) + + def get_x11_display(self) -> Optional[str]: + """Return the display to use for X11 forwarding + + When X11 forwarding has been requested by the client, this + method returns the X11 display which should be used to open + a forwarded connection. If the client did not request X11 + forwarding, this method returns `None`. + + :returns: A `str` containing the X11 display or `None` if + X11 forwarding was not requested + + """ + + return self._x11_display + + def get_agent_path(self) -> Optional[str]: + """Return the path of the ssh-agent listening socket + + When agent forwarding has been requested by the client, + this method returns the path of the listening socket which + should be used to open a forwarded agent connection. If the + client did not request agent forwarding, this method returns + `None`. + + :returns: A `str` containing the ssh-agent socket path or + `None` if agent forwarding was not requested + + """ + + return self._conn.get_agent_path() + + def set_xon_xoff(self, client_can_do: bool) -> None: + """Set whether the client should enable XON/XOFF flow control + + This method can be called to tell the client whether or not + to enable XON/XOFF flow control, indicating that it should + intercept Control-S and Control-Q coming from its local + terminal to pause and resume output, respectively. + Applications should set client_can_do to `True` to + enable this functionality or to `False` to tell the client + to forward Control-S and Control-Q through as normal input. + + :param client_can_do: + Whether or not the client should enable XON/XOFF flow control + :type client_can_do: `bool` + + """ + + self.logger.info('Sending XON/XOFF flow control %s', + 'enable' if client_can_do else 'disable') + + self._send_request(b'xon-xoff', Boolean(client_can_do)) + + def write_stderr(self, data: AnyStr) -> None: + """Write output to stderr + + This method can be called to send output to the client which + is intended to be displayed on stderr. If an encoding was + specified when the channel was created, the data should be + provided as a string and will be converted using that + encoding. Otherwise, the data should be provided as bytes. + + :param data: + The data to send to stderr + :type data: `str` or `bytes` + + :raises: :exc:`OSError` if the channel isn't open for sending + + """ + + self.write(data, EXTENDED_DATA_STDERR) + + def writelines_stderr(self, list_of_data: Iterable[AnyStr]) -> None: + """Write a list of data bytes to stderr + + This method can be called to write a list (or any iterable) of + data bytes to the channel. It is functionality equivalent to + calling :meth:`write_stderr` on each element in the list. + + """ + + self.writelines(list_of_data, EXTENDED_DATA_STDERR) + + def exit(self, status: int) -> None: + """Send exit status and close the channel + + This method can be called to report an exit status for the + process back to the client and close the channel. A zero + exit status is generally returned when the operation was + successful. After reporting the status, the channel is + closed. + + :param status: + The exit status to report to the client + :type status: `int` + + :raises: :exc:`OSError` if the channel isn't open + + """ + + status &= 0xff + + if self._send_state not in {'close_pending', 'closed'}: + self.logger.info('Sending exit status %d', status) + + self._send_request(b'exit-status', UInt32(status)) + self.close() + + def exit_with_signal(self, signal: str, core_dumped: bool = False, + msg: str = '', lang: str = DEFAULT_LANG) -> None: + """Send exit signal and close the channel + + This method can be called to report that the process + terminated abnormslly with a signal. A more detailed + error message may also provided, along with an indication + of whether or not the process dumped core. After + reporting the signal, the channel is closed. + + :param signal: + The signal which caused the process to exit + :param core_dumped: (optional) + Whether or not the process dumped core + :param msg: (optional) + Details about what error occurred + :param lang: (optional) + The language the error message is in + :type signal: `str` + :type core_dumped: `bool` + :type msg: `str` + :type lang: `str` + + :raises: :exc:`OSError` if the channel isn't open + + """ + + self.logger.info('Sending exit signal %s', signal) + self.logger.debug1(' Core dumped: %s', core_dumped) + self.logger.debug1(' Message: %s', msg) + + if self._send_state not in {'close_pending', 'closed'}: + self._send_request(b'exit-signal', String(signal), + Boolean(core_dumped), String(msg), String(lang)) + self.close() + + +class SSHForwardChannel(SSHChannel, Generic[AnyStr]): + """SSH channel for forwarding TCP and UNIX domain connections""" + + async def _finish_open_request( + self, result: MaybeAwait[SSHSession[AnyStr]]) -> None: + """Finish processing a forward channel open request""" + + await super()._finish_open_request(result) + + if self._session is not None: + self._session.session_started() + self.resume_reading() + + async def _open_forward(self, session_factory: SSHSessionFactory[AnyStr], + chantype: bytes, *args: bytes) -> \ + SSHSession[AnyStr]: + """Open a forward channel""" + + packet = await super()._open(chantype, *args) + + # Forward channels should have no extra data in the open confirmation + packet.check_end() + + self._session = session_factory() + self._session.connection_made(self) + self._session.session_started() + + assert self._conn is not None + self._conn.create_task(self._start_reading(), self.logger) + + return self._session + + +class SSHTCPChannel(SSHForwardChannel, Generic[AnyStr]): + """SSH TCP channel""" + + async def _open_tcp(self, session_factory: SSHTCPSessionFactory[AnyStr], + chantype: bytes, host: str, port: int, orig_host: str, + orig_port: int) -> SSHTCPSession[AnyStr]: + """Open a TCP channel""" + + self.set_extra_info(peername=('', 0), + local_peername=(orig_host, orig_port), + remote_peername=(host, port)) + + return cast(SSHTCPSession[AnyStr], + await self._open_forward(session_factory, chantype, + String(host), UInt32(port), + String(orig_host), + UInt32(orig_port))) + + async def connect(self, session_factory: SSHTCPSessionFactory[AnyStr], + host: str, port: int, orig_host: str, orig_port: int) -> \ + SSHTCPSession[AnyStr]: + """Create a new outbound TCP session""" + + return (await self._open_tcp(session_factory, b'direct-tcpip', + host, port, orig_host, orig_port)) + + async def accept(self, session_factory: SSHTCPSessionFactory[AnyStr], + host: str, port: int, orig_host: str, + orig_port: int) -> SSHTCPSession[AnyStr]: + """Create a new forwarded TCP session""" + + return (await self._open_tcp(session_factory, b'forwarded-tcpip', + host, port, orig_host, orig_port)) + + def set_inbound_peer_names(self, dest_host: str, dest_port: int, + orig_host: str, orig_port: int) -> None: + """Set local and remote peer names for inbound connections""" + + self.set_extra_info(peername=('', 0), + local_peername=(dest_host, dest_port), + remote_peername=(orig_host, orig_port)) + + +class SSHUNIXChannel(SSHForwardChannel, Generic[AnyStr]): + """SSH UNIX channel""" + + async def _open_unix(self, session_factory: SSHUNIXSessionFactory[AnyStr], + chantype: bytes, path: str, + *args: bytes) -> SSHUNIXSession[AnyStr]: + """Open a UNIX channel""" + + self.set_extra_info(local_peername='', remote_peername=path) + + return cast(SSHUNIXSession[AnyStr], + await self._open_forward(session_factory, chantype, + String(path), *args)) + + async def connect(self, session_factory: SSHUNIXSessionFactory[AnyStr], + path: str) -> SSHUNIXSession[AnyStr]: + """Create a new outbound UNIX session""" + + # OpenSSH appears to have a bug which requires an originator + # host and port to be sent after the path name to connect to + # when opening a direct streamlocal channel. + return await self._open_unix(session_factory, + b'direct-streamlocal@openssh.com', + path, String(''), UInt32(0)) + + async def accept(self, session_factory: SSHUNIXSessionFactory[AnyStr], + path: str) -> SSHUNIXSession[AnyStr]: + """Create a new forwarded UNIX session""" + + return await self._open_unix(session_factory, + b'forwarded-streamlocal@openssh.com', + path, String('')) + + def set_inbound_peer_names(self, dest_path: str) -> None: + """Set local and remote peer names for inbound connections""" + + self.set_extra_info(local_peername=dest_path, remote_peername='') + + +class SSHX11Channel(SSHForwardChannel[bytes]): + """SSH X11 channel""" + + async def open(self, session_factory: SSHTCPSessionFactory[bytes], + orig_host: str, orig_port: int) -> SSHTCPSession[bytes]: + """Open an SSH X11 channel""" + + self.set_extra_info(local_peername=(orig_host, orig_port), + remote_peername=('', 0)) + + return cast(SSHTCPSession[bytes], + await self._open_forward(session_factory, b'x11', + String(orig_host), + UInt32(orig_port))) + + def set_inbound_peer_names(self, orig_host: str, orig_port: int) -> None: + """Set local and remote peer name for inbound connections""" + + self.set_extra_info(local_peername=('', 0), + remote_peername=(orig_host, orig_port)) + + +class SSHAgentChannel(SSHForwardChannel[bytes]): + """SSH agent channel""" + + async def open(self, session_factory: SSHUNIXSessionFactory[bytes]) -> \ + SSHUNIXSession[bytes]: + """Open an SSH agent channel""" + + return cast(SSHUNIXSession[bytes], + await self._open_forward(session_factory, + b'auth-agent@openssh.com')) diff --git a/venv/lib/python3.12/site-packages/asyncssh/client.py b/venv/lib/python3.12/site-packages/asyncssh/client.py new file mode 100644 index 0000000..83f229d --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/client.py @@ -0,0 +1,409 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH client protocol handler""" + +from typing import TYPE_CHECKING, Optional + +from .auth import KbdIntPrompts, KbdIntResponse, PasswordChangeResponse +from .misc import MaybeAwait +from .public_key import KeyPairListArg, SSHKey + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .connection import SSHClientConnection + + +class SSHClient: + """SSH client protocol handler + + Applications may subclass this when implementing an SSH client + to receive callbacks when certain events occur on the SSH + connection. + + For simple password or public key based authentication, nothing + needs to be defined here if the password or client keys are passed + in when the connection is created. However, to prompt interactively + or otherwise dynamically select these values, the methods + :meth:`password_auth_requested` and/or :meth:`public_key_auth_requested` + can be defined. Keyboard-interactive authentication is also supported + via :meth:`kbdint_auth_requested` and :meth:`kbdint_challenge_received`. + + If the server sends an authentication banner, the method + :meth:`auth_banner_received` will be called. + + If the server requires a password change, the method + :meth:`password_change_requested` will be called, followed by either + :meth:`password_changed` or :meth:`password_change_failed` depending + on whether the password change is successful. + + .. note:: The authentication callbacks described here can be + defined as coroutines. However, they may be cancelled if + they are running when the SSH connection is closed by + the server. If they attempt to catch the CancelledError + exception to perform cleanup, they should make sure to + re-raise it to allow AsyncSSH to finish its own cleanup. + + """ + + # pylint: disable=no-self-use,unused-argument + + def connection_made(self, conn: 'SSHClientConnection') -> None: + """Called when a connection is made + + This method is called as soon as the TCP connection completes. + The `conn` parameter should be stored if needed for later use. + + :param conn: + The connection which was successfully opened + :type conn: :class:`SSHClientConnection` + + """ + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Called when a connection is lost or closed + + This method is called when a connection is closed. If the + connection is shut down cleanly, *exc* will be `None`. + Otherwise, it will be an exception explaining the reason for + the disconnect. + + :param exc: + The exception which caused the connection to close, or + `None` if the connection closed cleanly + :type exc: :class:`Exception` + + """ + + def debug_msg_received(self, msg: str, lang: str, + always_display: bool) -> None: + """A debug message was received on this connection + + This method is called when the other end of the connection sends + a debug message. Applications should implement this method if + they wish to process these debug messages. + + :param msg: + The debug message sent + :param lang: + The language the message is in + :param always_display: + Whether or not to display the message + :type msg: `str` + :type lang: `str` + :type always_display: `bool` + + """ + + def validate_host_public_key(self, host: str, addr: str, + port: int, key: SSHKey) -> bool: + """Return whether key is an authorized key for this host + + Server host key validation can be supported by passing known + host keys in the `known_hosts` argument of + :func:`create_connection`. However, for more flexibility + in matching on the allowed set of keys, this method can be + implemented by the application to do the matching itself. It + should return `True` if the specified key is a valid host key + for the server being connected to. + + By default, this method returns `False` for all host keys. + + .. note:: This function only needs to report whether the + public key provided is a valid key for this + host. If it is, AsyncSSH will verify that the + server possesses the corresponding private key + before allowing the validation to succeed. + + :param host: + The hostname of the target host + :param addr: + The IP address of the target host + :param port: + The port number on the target host + :param key: + The public key sent by the server + :type host: `str` + :type addr: `str` + :type port: `int` + :type key: :class:`SSHKey` *public key* + + :returns: A `bool` indicating if the specified key is a valid + key for the target host + + """ + + return False # pragma: no cover + + def validate_host_ca_key(self, host: str, addr: str, + port: int, key: SSHKey) -> bool: + """Return whether key is an authorized CA key for this host + + Server host certificate validation can be supported by passing + known host CA keys in the `known_hosts` argument of + :func:`create_connection`. However, for more flexibility + in matching on the allowed set of keys, this method can be + implemented by the application to do the matching itself. It + should return `True` if the specified key is a valid certificate + authority key for the server being connected to. + + By default, this method returns `False` for all CA keys. + + .. note:: This function only needs to report whether the + public key provided is a valid CA key for this + host. If it is, AsyncSSH will verify that the + certificate is valid, that the host is one of + the valid principals for the certificate, and + that the server possesses the private key + corresponding to the public key in the certificate + before allowing the validation to succeed. + + :param host: + The hostname of the target host + :param addr: + The IP address of the target host + :param port: + The port number on the target host + :param key: + The public key which signed the certificate sent by the server + :type host: `str` + :type addr: `str` + :type port: `int` + :type key: :class:`SSHKey` *public key* + + :returns: A `bool` indicating if the specified key is a valid + CA key for the target host + + """ + + return False # pragma: no cover + + def auth_banner_received(self, msg: str, lang: str) -> None: + """An incoming authentication banner was received + + This method is called when the server sends a banner to display + during authentication. Applications should implement this method + if they wish to do something with the banner. + + :param msg: + The message the server wanted to display + :param lang: + The language the message is in + :type msg: `str` + :type lang: `str` + + """ + + def auth_completed(self) -> None: + """Authentication was completed successfully + + This method is called when authentication has completed + successfully. Applications may use this method to create + whatever client sessions and direct TCP/IP or UNIX domain + connections are needed and/or set up listeners for incoming + TCP/IP or UNIX domain connections coming from the server. + However, :func:`create_connection` now blocks until + authentication is complete, so any code which wishes to + use the SSH connection can simply follow that call and + doesn't need to be performed in a callback. + + """ + + def public_key_auth_requested(self) -> \ + MaybeAwait[Optional[KeyPairListArg]]: + """Public key authentication has been requested + + This method should return a private key corresponding to + the user that authentication is being attempted for. + + This method may be called multiple times and can return a + different key to try each time it is called. When there are + no keys left to try, it should return `None` to indicate + that some other authentication method should be tried. + + If client keys were provided when the connection was opened, + they will be tried before this method is called. + + If blocking operations need to be performed to determine the + key to authenticate with, this method may be defined as a + coroutine. + + :returns: A key as described in :ref:`SpecifyingPrivateKeys` + or `None` to move on to another authentication + method + + """ + + return None # pragma: no cover + + def password_auth_requested(self) -> MaybeAwait[Optional[str]]: + """Password authentication has been requested + + This method should return a string containing the password + corresponding to the user that authentication is being + attempted for. It may be called multiple times and can + return a different password to try each time, but most + servers have a limit on the number of attempts allowed. + When there's no password left to try, this method should + return `None` to indicate that some other authentication + method should be tried. + + If a password was provided when the connection was opened, + it will be tried before this method is called. + + If blocking operations need to be performed to determine the + password to authenticate with, this method may be defined as + a coroutine. + + :returns: A string containing the password to authenticate + with or `None` to move on to another authentication + method + + """ + + return None # pragma: no cover + + def password_change_requested(self, prompt: str, lang: str) -> \ + MaybeAwait[PasswordChangeResponse]: + """A password change has been requested + + This method is called when password authentication was + attempted and the user's password was expired on the + server. To request a password change, this method should + return a tuple or two strings containing the old and new + passwords. Otherwise, it should return `NotImplemented`. + + If blocking operations need to be performed to determine the + passwords to authenticate with, this method may be defined + as a coroutine. + + By default, this method returns `NotImplemented`. + + :param prompt: + The prompt requesting that the user enter a new password + :param lang: + The language that the prompt is in + :type prompt: `str` + :type lang: `str` + + :returns: A tuple of two strings containing the old and new + passwords or `NotImplemented` if password changes + aren't supported + + """ + + return NotImplemented # pragma: no cover + + def password_changed(self) -> None: + """The requested password change was successful + + This method is called to indicate that a requested password + change was successful. It is generally followed by a call to + :meth:`auth_completed` since this means authentication was + also successful. + + """ + + def password_change_failed(self) -> None: + """The requested password change has failed + + This method is called to indicate that a requested password + change failed, generally because the requested new password + doesn't meet the password criteria on the remote system. + After this method is called, other forms of authentication + will automatically be attempted. + + """ + + def kbdint_auth_requested(self) -> MaybeAwait[Optional[str]]: + """Keyboard-interactive authentication has been requested + + This method should return a string containing a comma-separated + list of submethods that the server should use for + keyboard-interactive authentication. An empty string can be + returned to let the server pick the type of keyboard-interactive + authentication to perform. If keyboard-interactive authentication + is not supported, `None` should be returned. + + By default, keyboard-interactive authentication is supported + if a password was provided when the :class:`SSHClient` was + created and it hasn't been sent yet. If the challenge is not + a password challenge, this authentication will fail. This + method and the :meth:`kbdint_challenge_received` method can be + overridden if other forms of challenge should be supported. + + If blocking operations need to be performed to determine the + submethods to request, this method may be defined as a + coroutine. + + :returns: A string containing the submethods the server should + use for authentication or `None` to move on to + another authentication method + + """ + + return NotImplemented # pragma: no cover + + def kbdint_challenge_received(self, name: str, instructions: str, + lang: str, prompts: KbdIntPrompts) -> \ + MaybeAwait[Optional[KbdIntResponse]]: + """A keyboard-interactive auth challenge has been received + + This method is called when the server sends a keyboard-interactive + authentication challenge. + + The return value should be a list of strings of the same length + as the number of prompts provided if the challenge can be + answered, or `None` to indicate that some other form of + authentication should be attempted. + + If blocking operations need to be performed to determine the + responses to authenticate with, this method may be defined + as a coroutine. + + By default, this method will look for a challenge consisting + of a single 'Password:' prompt, and call the method + :meth:`password_auth_requested` to provide the response. + It will also ignore challenges with no prompts (generally used + to provide instructions). Any other form of challenge will + cause this method to return `None` to move on to another + authentication method. + + :param name: + The name of the challenge + :param instructions: + Instructions to the user about how to respond to the challenge + :param lang: + The language the challenge is in + :param prompts: + The challenges the user should respond to and whether or + not the responses should be echoed when they are entered + :type name: `str` + :type instructions: `str` + :type lang: `str` + :type prompts: `list` of tuples of `str` and `bool` + + :returns: List of string responses to the challenge or `None` + to move on to another authentication method + + """ + + return None # pragma: no cover diff --git a/venv/lib/python3.12/site-packages/asyncssh/compression.py b/venv/lib/python3.12/site-packages/asyncssh/compression.py new file mode 100644 index 0000000..f014760 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/compression.py @@ -0,0 +1,157 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH compression handlers""" + +from typing import Callable, List, Optional +import zlib + +_cmp_algs: List[bytes] = [] +_default_cmp_algs: List[bytes] = [] + +_cmp_params = {} + +_cmp_compressors = {} +_cmp_decompressors = {} + + +class Compressor: + """Base class for data compressor""" + + def compress(self, data: bytes) -> Optional[bytes]: + """Compress data""" + + raise NotImplementedError + + +class Decompressor: + """Base class for data decompressor""" + + def decompress(self, data: bytes) -> Optional[bytes]: + """Decompress data""" + + raise NotImplementedError + + +_CompressorType = Callable[[], Optional[Compressor]] +_DecompressorType = Callable[[], Optional[Decompressor]] + + +def _none() -> None: + """Compressor/decompressor for no compression""" + + return None + + +class _ZLibCompress(Compressor): + """Wrapper class to force a sync flush and handle exceptions""" + + def __init__(self) -> None: + self._comp = zlib.compressobj() + + def compress(self, data: bytes) -> Optional[bytes]: + """Compress data using zlib compression with sync flush""" + + try: + return self._comp.compress(data) + \ + self._comp.flush(zlib.Z_SYNC_FLUSH) + except zlib.error: # pragma: no cover + return None + + +class _ZLibDecompress(Decompressor): + """Wrapper class to handle exceptions""" + + def __init__(self) -> None: + self._decomp = zlib.decompressobj() + + def decompress(self, data: bytes) -> Optional[bytes]: + """Decompress data using zlib compression""" + + try: + return self._decomp.decompress(data) + except zlib.error: # pragma: no cover + return None + + +def register_compression_alg(alg: bytes, compressor: _CompressorType, + decompressor: _DecompressorType, + after_auth: bool, default: bool) -> None: + """Register a compression algorithm""" + + _cmp_algs.append(alg) + + if default: + _default_cmp_algs.append(alg) + + _cmp_params[alg] = after_auth + + _cmp_compressors[alg] = compressor + _cmp_decompressors[alg] = decompressor + + +def get_compression_algs() -> List[bytes]: + """Return supported compression algorithms""" + + return _cmp_algs + + +def get_default_compression_algs() -> List[bytes]: + """Return default compression algorithms""" + + return _default_cmp_algs + + +def get_compression_params(alg: bytes) -> bool: + """Get parameters of a compression algorithm + + This function returns whether or not a compression algorithm should + be delayed until after authentication completes. + + """ + + return _cmp_params[alg] + + +def get_compressor(alg: bytes) -> Optional[Compressor]: + """Return an instance of a compressor + + This function returns an object that can be used for data compression. + + """ + + return _cmp_compressors[alg]() + + +def get_decompressor(alg: bytes) -> Optional[Decompressor]: + """Return an instance of a decompressor + + This function returns an object that can be used for data decompression. + + """ + + return _cmp_decompressors[alg]() + +register_compression_alg(b'zlib@openssh.com', + _ZLibCompress, _ZLibDecompress, True, True) +register_compression_alg(b'zlib', + _ZLibCompress, _ZLibDecompress, False, False) +register_compression_alg(b'none', + _none, _none, False, True) diff --git a/venv/lib/python3.12/site-packages/asyncssh/config.py b/venv/lib/python3.12/site-packages/asyncssh/config.py new file mode 100644 index 0000000..d481cc2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/config.py @@ -0,0 +1,635 @@ +# Copyright (c) 2020-2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Parser for OpenSSH config files""" + +import os +import re +import shlex +import socket +import subprocess + +from hashlib import sha1 +from pathlib import Path, PurePath +from subprocess import DEVNULL +from typing import Callable, Dict, List, NoReturn, Optional, Sequence +from typing import Set, Tuple, Union, cast + +from .constants import DEFAULT_PORT +from .logging import logger +from .misc import DefTuple, FilePath, ip_address +from .pattern import HostPatternList, WildcardPatternList + + +ConfigPaths = Union[None, FilePath, Sequence[FilePath]] + + +def _exec(cmd: str) -> bool: + """Execute a command and return if exit status is 0""" + + return subprocess.run(cmd, check=False, shell=True, stdin=DEVNULL, + stdout=DEVNULL, stderr=DEVNULL).returncode == 0 + + +class ConfigParseError(ValueError): + """Configuration parsing exception""" + + +class SSHConfig: + """Settings from an OpenSSH config file""" + + _conditionals = {'match'} + _no_split: Set[str] = set() + _percent_expand = {'AuthorizedKeysFile'} + _handlers: Dict[str, Tuple[str, Callable]] = {} + + def __init__(self, last_config: Optional['SSHConfig'], reload: bool): + if last_config: + self._last_options = last_config.get_options(reload) + else: + self._last_options = {} + + self._default_path = Path('~', '.ssh').expanduser() + self._path = Path() + self._line_no = 0 + self._matching = True + self._options = self._last_options.copy() + self._tokens: Dict[str, str] = {} + + self.loaded = False + + def _error(self, reason: str, *args: object) -> NoReturn: + """Raise a configuration parsing error""" + + raise ConfigParseError('%s line %s: %s' % (self._path, self._line_no, + reason % args)) + + def _match_val(self, match: str) -> object: + """Return the value to match against in a match condition""" + + raise NotImplementedError + + def _set_tokens(self) -> None: + """Set the tokens available for percent expansion""" + + raise NotImplementedError + + def _expand_val(self, value: str) -> str: + """Perform percent token expansion on a string""" + + last_idx = 0 + result: List[str] = [] + + for match in re.finditer(r'%', value): + idx = match.start() + + if idx < last_idx: + continue + + try: + token = value[idx+1] + result.extend([value[last_idx:idx], self._tokens[token]]) + last_idx = idx + 2 + except IndexError: + raise ConfigParseError('Invalid token substitution') from None + except KeyError: + if token == 'd': + raise ConfigParseError('Home directory is ' + 'not available') from None + elif token == 'i': + raise ConfigParseError('User id not available') from None + else: + raise ConfigParseError('Invalid token substitution: %s' % + value[idx+1]) from None + + result.append(value[last_idx:]) + return ''.join(result) + + def _include(self, option: str, args: List[str]) -> None: + """Read config from a list of other config files""" + + # pylint: disable=unused-argument + + old_path = self._path + + for pattern in args: + path = Path(pattern).expanduser() + + if path.anchor: + pattern = str(Path(*path.parts[1:])) + path = Path(path.anchor) + else: + path = self._default_path + + paths = list(path.glob(pattern)) + + if not paths: + logger.debug1('Config pattern "%s" matched no files', pattern) + + for path in paths: + self.parse(path) + + self._path = old_path + args.clear() + + def _match(self, option: str, args: List[str]) -> None: + """Begin a conditional block""" + + # pylint: disable=unused-argument + + while args: + match = args.pop(0).lower() + + if match == 'all': + self._matching = True + continue + + match_val = self._match_val(match) + + if match != 'exec' and match_val is None: + self._error('Invalid match condition') + + try: + if match == 'exec': + self._matching = _exec(args.pop(0)) + elif match in ('address', 'localaddress'): + host_pat = HostPatternList(args.pop(0)) + ip = ip_address(cast(str, match_val)) \ + if match_val else None + self._matching = host_pat.matches(None, match_val, ip) + else: + wild_pat = WildcardPatternList(args.pop(0)) + self._matching = wild_pat.matches(match_val) + except IndexError: + self._error('Missing %s match pattern', match) + + if not self._matching: + args.clear() + break + + def _set_bool(self, option: str, args: List[str]) -> None: + """Set a boolean config option""" + + value_str = args.pop(0).lower() + + if value_str in ('yes', 'true'): + value = True + elif value_str in ('no', 'false'): + value = False + else: + self._error('Invalid %s boolean value: %s', option, value_str) + + if option not in self._options: + self._options[option] = value + + def _set_int(self, option: str, args: List[str]) -> None: + """Set an integer config option""" + + value_str = args.pop(0) + + try: + value = int(value_str) + except ValueError: + self._error('Invalid %s integer value: %s', option, value_str) + + if option not in self._options: + self._options[option] = value + + def _set_string(self, option: str, args: List[str]) -> None: + """Set a string config option""" + + value_str = args.pop(0) + + if value_str.lower() == 'none': + value = None + else: + value = value_str + + if option not in self._options: + self._options[option] = value + + def _append_string(self, option: str, args: List[str]) -> None: + """Append a string config option to a list""" + + value_str = args.pop(0) + + if value_str.lower() != 'none': + if option in self._options: + cast(List[str], self._options[option]).append(value_str) + else: + self._options[option] = [value_str] + else: + if option not in self._options: + self._options[option] = [] + + def _set_string_list(self, option: str, args: List[str]) -> None: + """Set whitespace-separated string config options as a list""" + + if option not in self._options: + if len(args) == 1 and args[0].lower() == 'none': + self._options[option] = [] + else: + self._options[option] = args[:] + + args.clear() + + def _append_string_list(self, option: str, args: List[str]) -> None: + """Append whitespace-separated string config options to a list""" + + if option in self._options: + cast(List[str], self._options[option]).extend(args) + else: + self._options[option] = args[:] + + args.clear() + + def _set_address_family(self, option: str, args: List[str]) -> None: + """Set an address family config option""" + + value_str = args.pop(0).lower() + + if value_str == 'any': + value = socket.AF_UNSPEC + elif value_str == 'inet': + value = socket.AF_INET + elif value_str == 'inet6': + value = socket.AF_INET6 + else: + self._error('Invalid %s value: %s', option, value_str) + + if option not in self._options: + self._options[option] = value + + def _set_rekey_limits(self, option: str, args: List[str]) -> None: + """Set rekey limits config option""" + + byte_limit: Union[str, Tuple[()]] = args.pop(0).lower() + + if byte_limit == 'default': + byte_limit = () + + if args: + time_limit: Optional[Union[str, Tuple[()]]] = args.pop(0).lower() + + if time_limit == 'none': + time_limit = None + else: + time_limit = () + + if option not in self._options: + self._options[option] = byte_limit, time_limit + + def parse(self, path: Path) -> None: + """Parse an OpenSSH config file and return matching declarations""" + + self._path = path + self._line_no = 0 + self._matching = True + self._tokens = {'%': '%'} + + logger.debug1('Reading config from "%s"', path) + + with open(path) as file: + for line in file: + self._line_no += 1 + + line = line.strip() + if not line or line[0] == '#': + continue + + try: + split_args = shlex.split(line) + except ValueError as exc: + self._error(str(exc)) + + args = [] + + for arg in split_args: + if arg.startswith('='): + if len(arg) > 1: + args.append(arg[1:]) + elif arg.endswith('='): + args.append(arg[:-1]) + elif '=' in arg: + arg, val = arg.split('=', 1) + args.append(arg) + args.append(val) + else: + args.append(arg) + + option = args.pop(0) + loption = option.lower() + + if loption in self._no_split: + args = [line.lstrip()[len(loption):].strip()] + + if not self._matching and loption not in self._conditionals: + continue + + try: + option, handler = self._handlers[loption] + except KeyError: + continue + + if not args: + self._error('Missing %s value', option) + + handler(self, option, args) + + if args: + self._error('Extra data at end: %s', ' '.join(args)) + + self._set_tokens() + + for option in self._percent_expand: + try: + value = self._options[option] + except KeyError: + pass + else: + if isinstance(value, list): + value = [self._expand_val(item) for item in value] + elif isinstance(value, str): + value = self._expand_val(value) + + self._options[option] = value + + def get_options(self, reload: bool) -> Dict[str, object]: + """Return options to base a new config object on""" + + return self._last_options.copy() if reload else self._options.copy() + + @classmethod + def load(cls, last_config: Optional['SSHConfig'], + config_paths: ConfigPaths, reload: bool, + *args: object) -> 'SSHConfig': + """Load a list of OpenSSH config files into a config object""" + + config = cls(last_config, reload, *args) + + if config_paths: + if isinstance(config_paths, (str, PurePath)): + paths: Sequence[FilePath] = [config_paths] + else: + paths = config_paths + + for path in paths: + config.parse(Path(path)) + + config.loaded = True + + return config + + def get(self, option: str, default: object = None) -> object: + """Get the value of a config option""" + + return self._options.get(option, default) + + def get_compression_algs(self) -> DefTuple[str]: + """Return the compression algorithms to use""" + + compression = self.get('Compression') + + if compression is None: + return () + elif compression: + return 'zlib@openssh.com,zlib,none' + else: + return 'none,zlib@openssh.com,zlib' + + +class SSHClientConfig(SSHConfig): + """Settings from an OpenSSH client config file""" + + _conditionals = {'host', 'match'} + _no_split = {'remotecommand'} + _percent_expand = {'CertificateFile', 'IdentityAgent', + 'IdentityFile', 'ProxyCommand', 'RemoteCommand'} + + def __init__(self, last_config: 'SSHConfig', reload: bool, + local_user: str, user: str, host: str, port: int) -> None: + super().__init__(last_config, reload) + + self._local_user = local_user + self._orig_host = host + + if user != (): + self._options['User'] = user + + if port != (): + self._options['Port'] = port + + def _match_val(self, match: str) -> object: + """Return the value to match against in a match condition""" + + if match == 'host': + return self._options.get('Hostname', self._orig_host) + elif match == 'originalhost': + return self._orig_host + elif match == 'localuser': + return self._local_user + elif match == 'user': + return self._options.get('User', self._local_user) + else: + return None + + def _match_host(self, option: str, args: List[str]) -> None: + """Begin a conditional block matching on host""" + + # pylint: disable=unused-argument + + pattern = ','.join(args) + self._matching = WildcardPatternList(pattern).matches(self._orig_host) + args.clear() + + def _set_hostname(self, option: str, args: List[str]) -> None: + """Set hostname config option""" + + value = args.pop(0) + + if option not in self._options: + self._tokens['h'] = \ + cast(str, self._options.get(option, self._orig_host)) + self._options[option] = self._expand_val(value) + + def _set_request_tty(self, option: str, args: List[str]) -> None: + """Set a pseudo-terminal request config option""" + + value_str = args.pop(0).lower() + + if value_str in ('yes', 'true'): + value: Union[bool, str] = True + elif value_str in ('no', 'false'): + value = False + elif value_str not in ('force', 'auto'): + self._error('Invalid %s value: %s', option, value_str) + else: + value = value_str + + if option not in self._options: + self._options[option] = value + + def _set_tokens(self) -> None: + """Set the tokens available for percent expansion""" + + local_host = socket.gethostname() + + idx = local_host.find('.') + short_local_host = local_host if idx < 0 else local_host[:idx] + + host = cast(str, self._options.get('Hostname', self._orig_host)) + port = str(self._options.get('Port', DEFAULT_PORT)) + user = cast(str, self._options.get('User') or self._local_user) + home = os.path.expanduser('~') + + conn_info = ''.join((local_host, host, port, user)) + conn_hash = sha1(conn_info.encode('utf-8')).hexdigest() + + self._tokens.update({'C': conn_hash, + 'h': host, + 'L': short_local_host, + 'l': local_host, + 'n': self._orig_host, + 'p': port, + 'r': user, + 'u': self._local_user}) + + if home != '~': + self._tokens['d'] = home + + if hasattr(os, 'getuid'): + self._tokens['i'] = str(os.getuid()) + + _handlers = {option.lower(): (option, handler) for option, handler in ( + ('Host', _match_host), + ('Match', SSHConfig._match), + ('Include', SSHConfig._include), + + ('AddressFamily', SSHConfig._set_address_family), + ('BindAddress', SSHConfig._set_string), + ('CASignatureAlgorithms', SSHConfig._set_string), + ('CertificateFile', SSHConfig._append_string), + ('ChallengeResponseAuthentication', SSHConfig._set_bool), + ('Ciphers', SSHConfig._set_string), + ('Compression', SSHConfig._set_bool), + ('ConnectTimeout', SSHConfig._set_int), + ('EnableSSHKeySign', SSHConfig._set_bool), + ('ForwardAgent', SSHConfig._set_bool), + ('ForwardX11Trusted', SSHConfig._set_bool), + ('GlobalKnownHostsFile', SSHConfig._set_string_list), + ('GSSAPIAuthentication', SSHConfig._set_bool), + ('GSSAPIDelegateCredentials', SSHConfig._set_bool), + ('GSSAPIKeyExchange', SSHConfig._set_bool), + ('HostbasedAuthentication', SSHConfig._set_bool), + ('HostKeyAlgorithms', SSHConfig._set_string), + ('Hostname', _set_hostname), + ('HostKeyAlias', SSHConfig._set_string), + ('IdentitiesOnly', SSHConfig._set_bool), + ('IdentityAgent', SSHConfig._set_string), + ('IdentityFile', SSHConfig._append_string), + ('KbdInteractiveAuthentication', SSHConfig._set_bool), + ('KexAlgorithms', SSHConfig._set_string), + ('MACs', SSHConfig._set_string), + ('PasswordAuthentication', SSHConfig._set_bool), + ('PKCS11Provider', SSHConfig._set_string), + ('PreferredAuthentications', SSHConfig._set_string), + ('Port', SSHConfig._set_int), + ('ProxyCommand', SSHConfig._set_string_list), + ('ProxyJump', SSHConfig._set_string), + ('PubkeyAuthentication', SSHConfig._set_bool), + ('RekeyLimit', SSHConfig._set_rekey_limits), + ('RemoteCommand', SSHConfig._set_string), + ('RequestTTY', _set_request_tty), + ('SendEnv', SSHConfig._append_string_list), + ('ServerAliveCountMax', SSHConfig._set_int), + ('ServerAliveInterval', SSHConfig._set_int), + ('SetEnv', SSHConfig._append_string_list), + ('TCPKeepAlive', SSHConfig._set_bool), + ('User', SSHConfig._set_string), + ('UserKnownHostsFile', SSHConfig._set_string_list) + )} + + +class SSHServerConfig(SSHConfig): + """Settings from an OpenSSH server config file""" + + def __init__(self, last_config: 'SSHConfig', reload: bool, + local_addr: str, local_port: int, user: str, + host: str, addr: str) -> None: + super().__init__(last_config, reload) + + self._local_addr = local_addr + self._local_port = local_port + self._user = user + self._host = host or addr + self._addr = addr + + def _match_val(self, match: str) -> object: + """Return the value to match against in a match condition""" + + if match == 'localaddress': + return self._local_addr + elif match == 'localport': + return str(self._local_port) + elif match == 'user': + return self._user + elif match == 'host': + return self._host + elif match == 'address': + return self._addr + else: + return None + + def _set_tokens(self) -> None: + """Set the tokens available for percent expansion""" + + self._tokens.update({'u': self._user}) + + _handlers = {option.lower(): (option, handler) for option, handler in ( + ('Match', SSHConfig._match), + ('Include', SSHConfig._include), + + ('AddressFamily', SSHConfig._set_address_family), + ('AuthorizedKeysFile', SSHConfig._set_string_list), + ('AllowAgentForwarding', SSHConfig._set_bool), + ('BindAddress', SSHConfig._set_string), + ('CASignatureAlgorithms', SSHConfig._set_string), + ('ChallengeResponseAuthentication', SSHConfig._set_bool), + ('Ciphers', SSHConfig._set_string), + ('ClientAliveCountMax', SSHConfig._set_int), + ('ClientAliveInterval', SSHConfig._set_int), + ('Compression', SSHConfig._set_bool), + ('GSSAPIAuthentication', SSHConfig._set_bool), + ('GSSAPIKeyExchange', SSHConfig._set_bool), + ('HostbasedAuthentication', SSHConfig._set_bool), + ('HostCertificate', SSHConfig._append_string), + ('HostKey', SSHConfig._append_string), + ('KbdInteractiveAuthentication', SSHConfig._set_bool), + ('KexAlgorithms', SSHConfig._set_string), + ('LoginGraceTime', SSHConfig._set_int), + ('MACs', SSHConfig._set_string), + ('PasswordAuthentication', SSHConfig._set_bool), + ('PermitTTY', SSHConfig._set_bool), + ('Port', SSHConfig._set_int), + ('PubkeyAuthentication', SSHConfig._set_bool), + ('RekeyLimit', SSHConfig._set_rekey_limits), + ('TCPKeepAlive', SSHConfig._set_bool), + ('UseDNS', SSHConfig._set_bool) + )} diff --git a/venv/lib/python3.12/site-packages/asyncssh/connection.py b/venv/lib/python3.12/site-packages/asyncssh/connection.py new file mode 100644 index 0000000..ffe5703 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/connection.py @@ -0,0 +1,8965 @@ +# Copyright (c) 2013-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH connection handlers""" + +import asyncio +import functools +import getpass +import inspect +import io +import os +import shlex +import socket +import sys +import tempfile +import time + +from collections import OrderedDict +from functools import partial +from pathlib import Path +from types import TracebackType +from typing import TYPE_CHECKING, Any, AnyStr, Awaitable, Callable, Dict +from typing import List, Mapping, Optional, Sequence, Set, Tuple, Type +from typing import TypeVar, Union, cast +from typing_extensions import Protocol + +from .agent import SSHAgentClient, SSHAgentListener + +from .auth import Auth, ClientAuth, KbdIntChallenge, KbdIntPrompts +from .auth import KbdIntResponse, PasswordChangeResponse +from .auth import get_supported_client_auth_methods, lookup_client_auth +from .auth import get_supported_server_auth_methods, lookup_server_auth + +from .auth_keys import SSHAuthorizedKeys, read_authorized_keys + +from .channel import SSHChannel, SSHClientChannel, SSHServerChannel +from .channel import SSHTCPChannel, SSHUNIXChannel +from .channel import SSHX11Channel, SSHAgentChannel + +from .client import SSHClient + +from .compression import Compressor, Decompressor, get_compression_algs +from .compression import get_default_compression_algs, get_compression_params +from .compression import get_compressor, get_decompressor + +from .config import ConfigPaths, SSHConfig, SSHClientConfig, SSHServerConfig + +from .constants import DEFAULT_LANG, DEFAULT_PORT +from .constants import DISC_BY_APPLICATION +from .constants import EXTENDED_DATA_STDERR +from .constants import MSG_DISCONNECT, MSG_IGNORE, MSG_UNIMPLEMENTED, MSG_DEBUG +from .constants import MSG_SERVICE_REQUEST, MSG_SERVICE_ACCEPT, MSG_EXT_INFO +from .constants import MSG_CHANNEL_OPEN, MSG_CHANNEL_OPEN_CONFIRMATION +from .constants import MSG_CHANNEL_OPEN_FAILURE +from .constants import MSG_CHANNEL_FIRST, MSG_CHANNEL_LAST +from .constants import MSG_KEXINIT, MSG_NEWKEYS, MSG_KEX_FIRST, MSG_KEX_LAST +from .constants import MSG_USERAUTH_REQUEST, MSG_USERAUTH_FAILURE +from .constants import MSG_USERAUTH_SUCCESS, MSG_USERAUTH_BANNER +from .constants import MSG_USERAUTH_FIRST, MSG_USERAUTH_LAST +from .constants import MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS +from .constants import MSG_REQUEST_FAILURE +from .constants import OPEN_ADMINISTRATIVELY_PROHIBITED, OPEN_CONNECT_FAILED +from .constants import OPEN_UNKNOWN_CHANNEL_TYPE + +from .encryption import Encryption, get_encryption_algs +from .encryption import get_default_encryption_algs +from .encryption import get_encryption_params, get_encryption + +from .forward import SSHForwarder + +from .gss import GSSBase, GSSClient, GSSServer, GSSError + +from .kex import Kex, get_kex_algs, get_default_kex_algs +from .kex import expand_kex_algs, get_kex + +from .keysign import KeySignPath, SSHKeySignKeyPair +from .keysign import find_keysign, get_keysign_keys + +from .known_hosts import KnownHostsArg, match_known_hosts + +from .listener import ListenKey, SSHListener +from .listener import SSHTCPClientListener, SSHUNIXClientListener +from .listener import TCPListenerFactory, UNIXListenerFactory +from .listener import create_tcp_forward_listener, create_unix_forward_listener +from .listener import create_socks_listener + +from .logging import SSHLogger, logger + +from .mac import get_mac_algs, get_default_mac_algs + +from .misc import BytesOrStr, DefTuple, FilePath, HostPort, IPNetwork +from .misc import MaybeAwait, OptExcInfo, Options, SockAddr +from .misc import ChannelListenError, ChannelOpenError, CompressionError +from .misc import DisconnectError, ConnectionLost, HostKeyNotVerifiable +from .misc import KeyExchangeFailed, IllegalUserName, MACError +from .misc import PasswordChangeRequired, PermissionDenied, ProtocolError +from .misc import ProtocolNotSupported, ServiceNotAvailable +from .misc import async_context_manager, construct_disc_error +from .misc import get_symbol_names, ip_address, map_handler_name +from .misc import parse_byte_count, parse_time_interval + +from .packet import Boolean, Byte, NameList, String, UInt32, PacketDecodeError +from .packet import SSHPacket, SSHPacketHandler, SSHPacketLogger + +from .pattern import WildcardPattern + +from .pkcs11 import load_pkcs11_keys + +from .process import PIPE, ProcessSource, ProcessTarget +from .process import SSHServerProcessFactory, SSHCompletedProcess +from .process import SSHClientProcess, SSHServerProcess + +from .public_key import CERT_TYPE_HOST, CERT_TYPE_USER, KeyImportError +from .public_key import CertListArg, IdentityListArg, KeyListArg, SigningKey +from .public_key import KeyPairListArg, X509CertPurposes, SSHKey, SSHKeyPair +from .public_key import SSHCertificate, SSHOpenSSHCertificate +from .public_key import SSHX509Certificate, SSHX509CertificateChain +from .public_key import decode_ssh_public_key, decode_ssh_certificate +from .public_key import get_public_key_algs, get_default_public_key_algs +from .public_key import get_certificate_algs, get_default_certificate_algs +from .public_key import get_x509_certificate_algs +from .public_key import get_default_x509_certificate_algs +from .public_key import load_keypairs, load_default_keypairs +from .public_key import load_public_keys, load_default_host_public_keys +from .public_key import load_certificates +from .public_key import load_identities, load_default_identities + +from .saslprep import saslprep, SASLPrepError + +from .server import SSHServer + +from .session import DataType, TermModesArg, TermSizeArg +from .session import SSHClientSession, SSHServerSession +from .session import SSHTCPSession, SSHUNIXSession +from .session import SSHClientSessionFactory, SSHTCPSessionFactory +from .session import SSHUNIXSessionFactory + +from .sftp import MIN_SFTP_VERSION, SFTPClient, SFTPServer +from .sftp import start_sftp_client + +from .stream import SSHReader, SSHWriter, SFTPServerFactory +from .stream import SSHSocketSessionFactory, SSHServerSessionFactory +from .stream import SSHClientStreamSession, SSHServerStreamSession +from .stream import SSHTCPStreamSession, SSHUNIXStreamSession + +from .subprocess import SSHSubprocessTransport, SSHSubprocessProtocol +from .subprocess import SubprocessFactory, SSHSubprocessWritePipe + +from .version import __version__ + +from .x11 import SSHX11ClientForwarder +from .x11 import SSHX11ClientListener, SSHX11ServerListener +from .x11 import create_x11_client_listener, create_x11_server_listener + +if TYPE_CHECKING: + # pylint: disable=unused-import + from .crypto import X509NamePattern + + +_ClientFactory = Callable[[], SSHClient] +_ServerFactory = Callable[[], SSHServer] +_ProtocolFactory = Union[_ClientFactory, _ServerFactory] + +_Conn = TypeVar('_Conn', 'SSHClientConnection', 'SSHServerConnection') +_ConnSelf = TypeVar('_ConnSelf', bound='SSHConnection') + +class _TunnelProtocol(Protocol): + """Base protocol for connections to tunnel SSH over""" + + def close(self) -> None: + """Close this tunnel""" + +class _TunnelConnectorProtocol(_TunnelProtocol, Protocol): + """Protocol to open a connection to tunnel an SSH connection over""" + + async def create_connection( + self, session_factory: SSHTCPSessionFactory[bytes], + remote_host: str, remote_port: int) -> \ + Tuple[SSHTCPChannel[bytes], SSHTCPSession[bytes]]: + """Create an outbound tunnel connection""" + +class _TunnelListenerProtocol(_TunnelProtocol, Protocol): + """Protocol to open a listener to tunnel SSH connections over""" + + async def create_server(self, session_factory: TCPListenerFactory, + listen_host: str, listen_port: int) -> SSHListener: + """Create an inbound tunnel listener""" + +_AcceptHandler = Optional[Callable[['SSHConnection'], MaybeAwait[None]]] +_ErrorHandler = Optional[Callable[['SSHConnection', + Optional[Exception]], None]] + +_OpenHandler = Callable[[SSHPacket], Tuple[SSHClientChannel, SSHClientSession]] +_PacketHandler = Callable[[SSHPacket], None] + +_AlgsArg = DefTuple[Union[str, Sequence[str]]] +_AuthArg = DefTuple[bool] +_AuthKeysArg = DefTuple[Union[None, str, List[str], SSHAuthorizedKeys]] +_ClientHostKey = Union[SSHKeyPair, SSHKeySignKeyPair] +_ClientKeysArg = Union[KeyListArg, KeyPairListArg] + +_Env = Optional[Union[Mapping[str, str], Sequence[str]]] +_SendEnv = Optional[Sequence[str]] + +_GlobalRequest = Tuple[Optional[_PacketHandler], SSHPacket, bool] +_GlobalRequestResult = Tuple[int, SSHPacket] +_KeyOrCertOptions = Mapping[str, object] +_ListenerArg = Union[bool, SSHListener] +_ProxyCommand = Optional[Sequence[str]] +_RequestPTY = Union[bool, str] + +_TCPServerHandlerFactory = Callable[[str, int], SSHSocketSessionFactory] +_UNIXServerHandlerFactory = Callable[[], SSHSocketSessionFactory] + +_TunnelConnector = Union[None, str, _TunnelConnectorProtocol] +_TunnelListener = Union[None, str, _TunnelListenerProtocol] + +_VersionArg = DefTuple[BytesOrStr] + +SSHAcceptHandler = Callable[[str, int], MaybeAwait[bool]] + +# SSH service names +_USERAUTH_SERVICE = b'ssh-userauth' +_CONNECTION_SERVICE = b'ssh-connection' + +# Max banner and version line length and count +_MAX_BANNER_LINES = 1024 +_MAX_BANNER_LINE_LEN = 8192 +_MAX_VERSION_LINE_LEN = 255 + +# Max allowed username length +_MAX_USERNAME_LEN = 1024 + +# Default rekey parameters +_DEFAULT_REKEY_BYTES = 1 << 30 # 1 GiB +_DEFAULT_REKEY_SECONDS = 3600 # 1 hour + +# Default login timeout +_DEFAULT_LOGIN_TIMEOUT = 120 # 2 minutes + +# Default keepalive interval and count max +_DEFAULT_KEEPALIVE_INTERVAL = 0 # disabled by default +_DEFAULT_KEEPALIVE_COUNT_MAX = 3 + +# Default channel parameters +_DEFAULT_WINDOW = 2*1024*1024 # 2 MiB +_DEFAULT_MAX_PKTSIZE = 32768 # 32 kiB + +# Default line editor parameters +_DEFAULT_LINE_HISTORY = 1000 # 1000 lines +_DEFAULT_MAX_LINE_LENGTH = 1024 # 1024 characters + + +async def _open_proxy( + loop: asyncio.AbstractEventLoop, command: Sequence[str], + conn_factory: Callable[[], _Conn]) -> _Conn: + """Open a tunnel running a proxy command""" + + class _ProxyCommandTunnel(asyncio.SubprocessProtocol): + """SSH proxy command tunnel""" + + def __init__(self) -> None: + super().__init__() + + self._transport: Optional[asyncio.SubprocessTransport] = None + self._stdin: Optional[asyncio.WriteTransport] = None + self._conn = conn_factory() + self._close_event = asyncio.Event() + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Return extra information associated with this tunnel""" + + assert self._transport is not None + return self._transport.get_extra_info(name, default) + + def get_conn(self) -> _Conn: + """Return the connection associated with this tunnel""" + + return self._conn + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Handle startup of the subprocess""" + + self._transport = cast(asyncio.SubprocessTransport, transport) + self._stdin = cast(asyncio.WriteTransport, + self._transport.get_pipe_transport(0)) + self._conn.connection_made(cast(asyncio.BaseTransport, self)) + + def pipe_data_received(self, fd: int, data: bytes) -> None: + """Handle data received from this tunnel""" + + # pylint: disable=unused-argument + + self._conn.data_received(data) + + def pipe_connection_lost(self, fd: int, + exc: Optional[Exception]) -> None: + """Handle when this tunnel is closed""" + + # pylint: disable=unused-argument + + self._conn.connection_lost(exc) + + def write(self, data: bytes) -> None: + """Write data to this tunnel""" + + assert self._stdin is not None + self._stdin.write(data) + + def abort(self) -> None: + """Forcibly close this tunnel""" + + self.close() + + def close(self) -> None: + """Close this tunnel""" + + if self._transport: # pragma: no cover + self._transport.close() + + self._close_event.set() + + + _, tunnel = await loop.subprocess_exec(_ProxyCommandTunnel, *command) + + return cast(_Conn, cast(_ProxyCommandTunnel, tunnel).get_conn()) + + +async def _open_tunnel(tunnel: object, passphrase: Optional[BytesOrStr]) -> \ + Optional['SSHClientConnection']: + """Parse and open connection to tunnel over""" + + username: DefTuple[str] + port: DefTuple[int] + + if isinstance(tunnel, str): + if '@' in tunnel: + username, host = tunnel.rsplit('@', 1) + else: + username, host = (), tunnel + + if ':' in host: + host, port_str = host.rsplit(':', 1) + port = int(port_str) + else: + port = () + + return await connect(host, port, username=username, + passphrase=passphrase) + else: + return None + + +async def _connect(options: 'SSHConnectionOptions', + loop: asyncio.AbstractEventLoop, flags: int, + sock: Optional[socket.socket], + conn_factory: Callable[[], _Conn], msg: str) -> _Conn: + """Make outbound TCP or SSH tunneled connection""" + + host = options.host + port = options.port + tunnel = options.tunnel + family = options.family + local_addr = options.local_addr + proxy_command = options.proxy_command + free_conn = True + + options.waiter = loop.create_future() + + new_tunnel = await _open_tunnel(tunnel, options.passphrase) + tunnel: _TunnelConnectorProtocol + + try: + if sock: + logger.info('%s already-connected socket', msg) + + _, session = await loop.create_connection(conn_factory, sock=sock) + + conn = cast(_Conn, session) + elif new_tunnel: + new_tunnel.logger.info('%s %s via %s', msg, (host, port), tunnel) + + # pylint: disable=broad-except + try: + _, tunnel_session = await new_tunnel.create_connection( + cast(SSHTCPSessionFactory[bytes], conn_factory), + host, port) + except Exception: + new_tunnel.close() + await new_tunnel.wait_closed() + raise + else: + conn = cast(_Conn, tunnel_session) + conn.set_tunnel(new_tunnel) + elif tunnel: + tunnel_logger = getattr(tunnel, 'logger', logger) + tunnel_logger.info('%s %s via SSH tunnel', msg, (host, port)) + + _, tunnel_session = await tunnel.create_connection( + cast(SSHTCPSessionFactory[bytes], conn_factory), + host, port) + + conn = cast(_Conn, tunnel_session) + elif proxy_command: + conn = await _open_proxy(loop, proxy_command, conn_factory) + else: + logger.info('%s %s', msg, (host, port)) + + _, session = await loop.create_connection( + conn_factory, host, port, family=family, + flags=flags, local_addr=local_addr) + + conn = cast(_Conn, session) + except asyncio.CancelledError: + options.waiter.cancel() + raise + + try: + await options.waiter + free_conn = False + + if new_tunnel: + conn.set_tunnel(new_tunnel) + + return conn + finally: + if free_conn: + conn.abort() + await conn.wait_closed() + + +async def _listen(options: 'SSHConnectionOptions', + loop: asyncio.AbstractEventLoop, flags: int, + backlog: int, sock: Optional[socket.socket], + reuse_address: bool, reuse_port: bool, + conn_factory: Callable[[], _Conn], + msg: str) -> 'SSHAcceptor': + """Make inbound TCP or SSH tunneled listener""" + + def tunnel_factory(_orig_host: str, _orig_port: int) -> SSHTCPSession: + """Ignore original host and port""" + + return cast(SSHTCPSession, conn_factory()) + + host = options.host + port = options.port + tunnel = options.tunnel + family = options.family + + new_tunnel = await _open_tunnel(tunnel, options.passphrase) + tunnel: _TunnelListenerProtocol + + if sock: + logger.info('%s already-connected socket', msg) + + server: asyncio.AbstractServer = await loop.create_server( + conn_factory, sock=sock, backlog=backlog, + reuse_address=reuse_address, reuse_port=reuse_port) + elif new_tunnel: + new_tunnel.logger.info('%s %s via %s', msg, (host, port), tunnel) + + # pylint: disable=broad-except + try: + tunnel_server = await new_tunnel.create_server( + tunnel_factory, host, port) + except Exception: + new_tunnel.close() + await new_tunnel.wait_closed() + raise + else: + tunnel_server.set_tunnel(new_tunnel) + server = cast(asyncio.AbstractServer, tunnel_server) + elif tunnel: + tunnel_logger = getattr(tunnel, 'logger', logger) + tunnel_logger.info('%s %s via SSH tunnel', msg, (host, port)) + + tunnel_server = await tunnel.create_server(tunnel_factory, host, port) + server = cast(asyncio.AbstractServer, tunnel_server) + else: + logger.info('%s %s', msg, (host, port)) + + server = await loop.create_server( + conn_factory, host, port, family=family, flags=flags, + backlog=backlog, reuse_address=reuse_address, + reuse_port=reuse_port) + + return SSHAcceptor(server, options) + + +async def _run_in_executor(loop: asyncio.AbstractEventLoop, func: Callable, + *args: object, **kwargs: object) -> object: + """Run a potentially blocking call in an executor""" + + return await loop.run_in_executor( + None, functools.partial(func, *args, **kwargs)) + + +def _validate_version(version: DefTuple[BytesOrStr]) -> bytes: + """Validate requested SSH version""" + + if version == (): + version = b'AsyncSSH_' + __version__.encode('ascii') + else: + if isinstance(version, str): + version = version.encode('ascii') + else: + assert isinstance(version, bytes) + + # Version including 'SSH-2.0-' and CRLF must be 255 chars or less + if len(version) > 245: + raise ValueError('Version string is too long') + + for b in version: + if b < 0x20 or b > 0x7e: + raise ValueError('Version string must be printable ASCII') + + return version + + +def _expand_algs(alg_type: str, algs: str, + possible_algs: List[bytes], + default_algs: List[bytes], + strict_match: bool) -> Sequence[bytes]: + """Expand the set of allowed algorithms""" + + if algs[:1] in '^+-': + prefix = algs[:1] + algs = algs[1:] + else: + prefix = '' + + matched: List[bytes] = [] + + for pat in algs.split(','): + pattern = WildcardPattern(pat) + + matches = [alg for alg in possible_algs + if pattern.matches(alg.decode('ascii'))] + + if not matches and strict_match: + raise ValueError('"%s" matches no valid %s algorithms' % + (pat, alg_type)) + + matched.extend(matches) + + if prefix == '^': + return matched + default_algs + elif prefix == '+': + return default_algs + matched + elif prefix == '-': + return [alg for alg in default_algs if alg not in matched] + else: + return matched + + +def _select_algs(alg_type: str, algs: _AlgsArg, config_algs: _AlgsArg, + possible_algs: List[bytes], default_algs: List[bytes], + none_value: Optional[bytes] = None) -> Sequence[bytes]: + """Select a set of allowed algorithms""" + + if algs == (): + algs = config_algs + strict_match = False + else: + strict_match = True + + if algs in ((), 'default'): + return default_algs + elif algs: + if isinstance(algs, str): + expanded_algs = _expand_algs(alg_type, algs, possible_algs, + default_algs, strict_match) + else: + expanded_algs = [alg.encode('ascii') for alg in algs] + + result: List[bytes] = [] + + for alg in expanded_algs: + if alg not in possible_algs: + raise ValueError('%s is not a valid %s algorithm' % + (alg.decode('ascii'), alg_type)) + + if alg not in result: + result.append(alg) + + return result + elif none_value: + return [none_value] + else: + raise ValueError('No %s algorithms selected' % alg_type) + + +def _select_host_key_algs(algs: _AlgsArg, config_algs: _AlgsArg, + default_algs: List[bytes]) -> Sequence[bytes]: + """Select a set of allowed host key algorithms""" + + possible_algs = (get_x509_certificate_algs() + get_certificate_algs() + + get_public_key_algs()) + + return _select_algs('host key', algs, config_algs, + possible_algs, default_algs) + + +def _validate_algs(config: SSHConfig, kex_algs_arg: _AlgsArg, + enc_algs_arg: _AlgsArg, mac_algs_arg: _AlgsArg, + cmp_algs_arg: _AlgsArg, sig_algs_arg: _AlgsArg, + allow_x509: bool) -> \ + Tuple[Sequence[bytes], Sequence[bytes], Sequence[bytes], + Sequence[bytes], Sequence[bytes]]: + """Validate requested algorithms""" + + kex_algs = _select_algs('key exchange', kex_algs_arg, + cast(_AlgsArg, config.get('KexAlgorithms', ())), + get_kex_algs(), get_default_kex_algs()) + enc_algs = _select_algs('encryption', enc_algs_arg, + cast(_AlgsArg, config.get('Ciphers', ())), + get_encryption_algs(), + get_default_encryption_algs()) + mac_algs = _select_algs('MAC', mac_algs_arg, + cast(_AlgsArg, config.get('MACs', ())), + get_mac_algs(), get_default_mac_algs()) + cmp_algs = _select_algs('compression', cmp_algs_arg, + cast(_AlgsArg, config.get_compression_algs()), + get_compression_algs(), + get_default_compression_algs(), b'none') + + allowed_sig_algs = get_x509_certificate_algs() if allow_x509 else [] + allowed_sig_algs = allowed_sig_algs + get_public_key_algs() + + default_sig_algs = get_default_x509_certificate_algs() if allow_x509 else [] + default_sig_algs = allowed_sig_algs + get_default_public_key_algs() + + sig_algs = _select_algs('signature', sig_algs_arg, + cast(_AlgsArg, + config.get('CASignatureAlgorithms', ())), + allowed_sig_algs, default_sig_algs) + + return kex_algs, enc_algs, mac_algs, cmp_algs, sig_algs + + +class SSHAcceptor: + """SSH acceptor + + This class in a wrapper around an :class:`asyncio.Server` listener + which provides the ability to update the the set of SSH client or + server connection options associated with that listener. This is + accomplished by calling the :meth:`update` method, which takes the + same keyword arguments as the :class:`SSHClientConnectionOptions` + and :class:`SSHServerConnectionOptions` classes. + + In addition, this class supports all of the methods supported by + :class:`asyncio.Server` to control accepting of new connections. + + """ + + def __init__(self, server: asyncio.AbstractServer, + options: 'SSHConnectionOptions'): + self._server = server + self._options = options + + async def __aenter__(self) -> 'SSHAcceptor': + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> bool: + self.close() + await self.wait_closed() + return False + + def __getattr__(self, name: str) -> Any: + return getattr(self._server, name) + + def get_addresses(self) -> List[Tuple]: + """Return socket addresses being listened on + + This method returns the socket addresses being listened on. + It returns tuples of the form returned by + :meth:`socket.getsockname`. If the listener was created + using a hostname, the host's resolved IPs will be returned. + If the requested listening port was `0`, the selected + listening ports will be returned. + + :returns: A list of socket addresses being listened on + + """ + + if hasattr(self._server, 'get_addresses'): + return self._server.get_addresses() + else: + return [sock.getsockname() for sock in self.sockets] + + def get_port(self) -> int: + """Return the port number being listened on + + This method returns the port number being listened on. + If it is listening on multiple sockets with different port + numbers, this function will return `0`. In that case, + :meth:`get_addresses` can be used to retrieve the full + list of listening addresses and ports. + + :returns: The port number being listened on, if there's only one + + """ + + if hasattr(self._server, 'get_port'): + return self._server.get_port() + else: + ports = set(addr[1] for addr in self.get_addresses()) + return ports.pop() if len(ports) == 1 else 0 + + def close(self) -> None: + """Stop listening for new connections + + This method can be called to stop listening for new + SSH connections. Existing connections will remain open. + + """ + + self._server.close() + + async def wait_closed(self) -> None: + """Wait for this listener to close + + This method is a coroutine which waits for this + listener to be closed. + + """ + + await self._server.wait_closed() + + def update(self, **kwargs: object) -> None: + """Update options on an SSH listener + + Acceptors started by :func:`listen` support options defined + in :class:`SSHServerConnectionOptions`. Acceptors started + by :func:`listen_reverse` support options defined in + :class:`SSHClientConnectionOptions`. + + Changes apply only to SSH client/server connections accepted + after the change is made. Previously accepted connections + will continue to use the options set when they were accepted. + + """ + + self._options.update(kwargs) + + +class SSHConnection(SSHPacketHandler, asyncio.Protocol): + """Parent class for SSH connections""" + + _handler_names = get_symbol_names(globals(), 'MSG_') + + next_conn = 0 # Next connection number, for logging + + @staticmethod + def _get_next_conn() -> int: + """Return the next available connection number (for logging)""" + + next_conn = SSHConnection.next_conn + SSHConnection.next_conn += 1 + return next_conn + + def __init__(self, loop: asyncio.AbstractEventLoop, + options: 'SSHConnectionOptions', + acceptor: _AcceptHandler, error_handler: _ErrorHandler, + wait: Optional[str], server: bool): + self._loop = loop + self._options = options + self._protocol_factory = options.protocol_factory + self._acceptor = acceptor + self._error_handler = error_handler + self._server = server + self._wait = wait + self._waiter = options.waiter if wait else None + + self._transport: Optional[asyncio.Transport] = None + self._local_addr = '' + self._local_port = 0 + self._peer_host = '' + self._peer_addr = '' + self._peer_port = 0 + self._tcp_keepalive = options.tcp_keepalive + self._owner: Optional[Union[SSHClient, SSHServer]] = None + self._extra: Dict[str, object] = {} + + self._inpbuf = b'' + self._packet = b'' + self._pktlen = 0 + self._banner_lines = 0 + + self._version = options.version + self._client_version = b'' + self._server_version = b'' + self._client_kexinit = b'' + self._server_kexinit = b'' + self._session_id = b'' + + self._send_seq = 0 + self._send_encryption: Optional[Encryption] = None + self._send_enchdrlen = 5 + self._send_blocksize = 8 + self._compressor: Optional[Compressor] = None + self._compress_after_auth = False + self._deferred_packets: List[Tuple[int, Sequence[bytes]]] = [] + + self._recv_handler = self._recv_version + self._recv_seq = 0 + self._recv_encryption: Optional[Encryption] = None + self._recv_blocksize = 8 + self._recv_macsize = 0 + self._decompressor: Optional[Decompressor] = None + self._decompress_after_auth = False + self._next_recv_encryption: Optional[Encryption] = None + self._next_recv_blocksize = 0 + self._next_recv_macsize = 0 + self._next_decompressor: Optional[Decompressor] = None + self._next_decompress_after_auth = False + + self._trusted_host_keys: Optional[Set[SSHKey]] = set() + self._trusted_host_key_algs: List[bytes] = [] + self._trusted_ca_keys: Optional[Set[SSHKey]] = set() + self._revoked_host_keys: Set[SSHKey] = set() + + self._x509_trusted_certs = options.x509_trusted_certs + self._x509_trusted_cert_paths = options.x509_trusted_cert_paths + self._x509_revoked_certs: Set[SSHX509Certificate] = set() + self._x509_trusted_subjects: Sequence['X509NamePattern'] = [] + self._x509_revoked_subjects: Sequence['X509NamePattern'] = [] + self._x509_purposes = options.x509_purposes + + self._kex_algs = options.kex_algs + self._enc_algs = options.encryption_algs + self._mac_algs = options.mac_algs + self._cmp_algs = options.compression_algs + self._sig_algs = options.signature_algs + + self._host_based_auth = options.host_based_auth + self._public_key_auth = options.public_key_auth + self._kbdint_auth = options.kbdint_auth + self._password_auth = options.password_auth + + self._kex: Optional[Kex] = None + self._kexinit_sent = False + self._kex_complete = False + self._ignore_first_kex = False + self._strict_kex = False + + self._gss: Optional[GSSBase] = None + self._gss_kex = False + self._gss_auth = False + self._gss_kex_auth = False + self._gss_mic_auth = False + + self._preferred_auth: Optional[Sequence[bytes]] = None + + self._rekey_bytes = options.rekey_bytes + self._rekey_seconds = options.rekey_seconds + self._rekey_bytes_sent = 0 + self._rekey_time = 0. + + self._keepalive_count = 0 + self._keepalive_count_max = options.keepalive_count_max + self._keepalive_interval = options.keepalive_interval + self._keepalive_timer: Optional[asyncio.TimerHandle] = None + + self._tunnel: Optional[_TunnelProtocol] = None + + self._enc_alg_cs = b'' + self._enc_alg_sc = b'' + + self._mac_alg_cs = b'' + self._mac_alg_sc = b'' + + self._cmp_alg_cs = b'' + self._cmp_alg_sc = b'' + + self._can_send_ext_info = False + self._extensions_to_send: 'OrderedDict[bytes, bytes]' = OrderedDict() + + self._can_recv_ext_info = False + + self._server_sig_algs: Set[bytes] = set() + + self._next_service: Optional[bytes] = None + + self._agent: Optional[SSHAgentClient] = None + + self._auth: Optional[Auth] = None + self._auth_in_progress = False + self._auth_complete = False + self._auth_final = False + self._auth_methods = [b'none'] + self._auth_was_trivial = True + self._username = '' + + self._channels: Dict[int, SSHChannel] = {} + self._next_recv_chan = 0 + + self._global_request_queue: List[_GlobalRequest] = [] + self._global_request_waiters: \ + 'List[asyncio.Future[_GlobalRequestResult]]' = [] + + self._local_listeners: Dict[ListenKey, SSHListener] = {} + + self._x11_listener: Union[None, SSHX11ClientListener, + SSHX11ServerListener] = None + + self._close_event = asyncio.Event() + + self._server_host_key_algs: Optional[Sequence[bytes]] = None + + self._logger = logger.get_child(context='conn=%d' % + self._get_next_conn()) + + self._login_timer: Optional[asyncio.TimerHandle] + + if options.login_timeout: + self._login_timer = self._loop.call_later( + options.login_timeout, self._login_timer_callback) + else: + self._login_timer = None + + self._disable_trivial_auth = False + + async def __aenter__(self: _ConnSelf) -> _ConnSelf: + """Allow SSHConnection to be used as an async context manager""" + + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> bool: + """Wait for connection close when used as an async context manager""" + + if not self._loop.is_closed(): # pragma: no branch + self.close() + + await self.wait_closed() + return False + + @property + def logger(self) -> SSHLogger: + """A logger associated with this connection""" + + return self._logger + + def _cleanup(self, exc: Optional[Exception]) -> None: + """Clean up this connection""" + + self._cancel_keepalive_timer() + + for chan in list(self._channels.values()): + chan.process_connection_close(exc) + + for listener in list(self._local_listeners.values()): + listener.close() + + while self._global_request_waiters: + self._process_global_response(MSG_REQUEST_FAILURE, 0, + SSHPacket(b'')) + + if self._auth: + self._auth.cancel() + self._auth = None + + if self._error_handler: + self._error_handler(self, exc) + self._acceptor = None + self._error_handler = None + + if self._wait and self._waiter and not self._waiter.cancelled(): + if exc: + self._waiter.set_exception(exc) + else: # pragma: no cover + self._waiter.set_result(None) + + self._wait = None + + if self._owner: # pragma: no branch + self._owner.connection_lost(exc) + self._owner = None + + self._cancel_login_timer() + self._close_event.set() + + self._inpbuf = b'' + + if self._tunnel: + self._tunnel.close() + self._tunnel = None + + def _cancel_login_timer(self) -> None: + """Cancel the login timer""" + + if self._login_timer: + self._login_timer.cancel() + self._login_timer = None + + def _login_timer_callback(self) -> None: + """Close the connection if authentication hasn't completed yet""" + + self._login_timer = None + + self.connection_lost(ConnectionLost('Login timeout expired')) + + def _cancel_keepalive_timer(self) -> None: + """Cancel the keepalive timer""" + + if self._keepalive_timer: + self._keepalive_timer.cancel() + self._keepalive_timer = None + + def _set_keepalive_timer(self) -> None: + """Set the keepalive timer""" + + if self._keepalive_interval: + self._keepalive_timer = self._loop.call_later( + self._keepalive_interval, self._keepalive_timer_callback) + + def _reset_keepalive_timer(self) -> None: + """Reset the keepalive timer""" + + if self._auth_complete: + self._cancel_keepalive_timer() + self._set_keepalive_timer() + + async def _make_keepalive_request(self) -> None: + """Send keepalive request""" + + self.logger.debug1('Sending keepalive request') + + await self._make_global_request(b'keepalive@openssh.com') + + if self._keepalive_timer: + self.logger.debug1('Got keepalive response') + + self._keepalive_count = 0 + + def _keepalive_timer_callback(self) -> None: + """Handle keepalive check""" + + self._keepalive_count += 1 + + if self._keepalive_count > self._keepalive_count_max: + self.connection_lost( + ConnectionLost(('Server' if self.is_client() else 'Client') + + ' not responding to keepalive')) + else: + self._set_keepalive_timer() + self.create_task(self._make_keepalive_request()) + + def _force_close(self, exc: Optional[BaseException]) -> None: + """Force this connection to close immediately""" + + if not self._transport: + return + + self._transport.abort() + self._transport = None + + self._loop.call_soon(self._cleanup, exc) + + def _reap_task(self, task_logger: SSHLogger, + task: 'asyncio.Task[None]') -> None: + """Collect result of an async task, reporting errors""" + + # pylint: disable=broad-except + try: + task.result() + except asyncio.CancelledError: + pass + except DisconnectError as exc: + self._send_disconnect(exc.code, exc.reason, exc.lang) + self._force_close(exc) + except Exception: + self.internal_error(error_logger=task_logger) + + def create_task(self, coro: Awaitable[None], + task_logger: Optional[SSHLogger] = None) -> \ + 'asyncio.Task[None]': + """Create an asynchronous task which catches and reports errors""" + + task = asyncio.ensure_future(coro) + task.add_done_callback(partial(self._reap_task, task_logger)) + return task + + def is_client(self) -> bool: + """Return if this is a client connection""" + + return not self._server + + def is_server(self) -> bool: + """Return if this is a server connection""" + + return self._server + + def get_owner(self) -> Optional[Union[SSHClient, SSHServer]]: + """Return the SSHClient or SSHServer which owns this connection""" + + return self._owner + + def get_hash_prefix(self) -> bytes: + """Return the bytes used in calculating unique connection hashes + + This methods returns a packetized version of the client and + server version and kexinit strings which is needed to perform + key exchange hashes. + + """ + + return b''.join((String(self._client_version), + String(self._server_version), + String(self._client_kexinit), + String(self._server_kexinit))) + + def set_tunnel(self, tunnel: _TunnelProtocol) -> None: + """Set tunnel used to open this connection""" + + self._tunnel = tunnel + + def _match_known_hosts(self, known_hosts: KnownHostsArg, host: str, + addr: str, port: Optional[int]) -> None: + """Determine the set of trusted host keys and certificates""" + + trusted_host_keys, trusted_ca_keys, revoked_host_keys, \ + trusted_x509_certs, revoked_x509_certs, \ + trusted_x509_subjects, revoked_x509_subjects = \ + match_known_hosts(known_hosts, host, addr, port) + + assert self._trusted_host_keys is not None + + for key in trusted_host_keys: + self._trusted_host_keys.add(key) + + if key.algorithm not in self._trusted_host_key_algs: + self._trusted_host_key_algs.extend(key.sig_algorithms) + + self._trusted_ca_keys = set(trusted_ca_keys) + self._revoked_host_keys = set(revoked_host_keys) + + if self._x509_trusted_certs is not None: + self._x509_trusted_certs = list(self._x509_trusted_certs) + self._x509_trusted_certs.extend(trusted_x509_certs) + self._x509_revoked_certs = set(revoked_x509_certs) + + self._x509_trusted_subjects = trusted_x509_subjects + self._x509_revoked_subjects = revoked_x509_subjects + + def _validate_openssh_host_certificate( + self, host: str, addr: str, port: int, + cert: SSHOpenSSHCertificate) -> SSHKey: + """Validate an OpenSSH host certificate""" + + if self._trusted_ca_keys is not None: + if cert.signing_key in self._revoked_host_keys: + raise ValueError('Host CA key is revoked') + + if not self._owner: # pragma: no cover + raise ValueError('Connection closed') + + if cert.signing_key not in self._trusted_ca_keys and \ + not self._owner.validate_host_ca_key(host, addr, port, + cert.signing_key): + raise ValueError('Host CA key is not trusted') + + cert.validate(CERT_TYPE_HOST, host) + + return cert.key + + def _validate_x509_host_certificate_chain( + self, host: str, cert: SSHX509CertificateChain) -> SSHKey: + """Validate an X.509 host certificate""" + + if (self._x509_revoked_subjects and + any(pattern.matches(cert.subject) + for pattern in self._x509_revoked_subjects)): + raise ValueError('X.509 subject name is revoked') + + if (self._x509_trusted_subjects and + not any(pattern.matches(cert.subject) + for pattern in self._x509_trusted_subjects)): + raise ValueError('X.509 subject name is not trusted') + + # Only validate hostname against X.509 certificate host + # principals when there are no X.509 trusted subject + # entries matched in known_hosts. + if self._x509_trusted_subjects: + host = '' + + assert self._x509_trusted_certs is not None + + cert.validate_chain(self._x509_trusted_certs, + self._x509_trusted_cert_paths, + self._x509_revoked_certs, + self._x509_purposes, + host_principal=host) + + return cert.key + + def _validate_host_key(self, host: str, addr: str, port: int, + key_data: bytes) -> SSHKey: + """Validate and return a trusted host key""" + + try: + cert = decode_ssh_certificate(key_data) + except KeyImportError: + pass + else: + if cert.is_x509_chain: + return self._validate_x509_host_certificate_chain( + host, cast(SSHX509CertificateChain, cert)) + else: + return self._validate_openssh_host_certificate( + host, addr, port, cast(SSHOpenSSHCertificate, cert)) + + try: + key = decode_ssh_public_key(key_data) + except KeyImportError: + pass + else: + if self._trusted_host_keys is not None: + if key in self._revoked_host_keys: + raise ValueError('Host key is revoked') + + if not self._owner: # pragma: no cover + raise ValueError('Connection closed') + + if key not in self._trusted_host_keys and \ + not self._owner.validate_host_public_key(host, addr, + port, key): + raise ValueError('Host key is not trusted') + + return key + + raise ValueError('Unable to decode host key') + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Handle a newly opened connection""" + + self._transport = cast(asyncio.Transport, transport) + + sock = cast(socket.socket, transport.get_extra_info('socket')) + + if sock: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, + self._tcp_keepalive) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + sockname = cast(SockAddr, transport.get_extra_info('sockname')) + + if sockname: # pragma: no branch + self._local_addr, self._local_port = sockname[:2] + + peername = cast(SockAddr, transport.get_extra_info('peername')) + + if peername: # pragma: no branch + self._peer_addr, self._peer_port = peername[:2] + + self._owner = self._protocol_factory() + + # pylint: disable=broad-except + try: + self._connection_made() + self._owner.connection_made(self) # type: ignore + self._send_version() + except Exception: + self._loop.call_soon(self.internal_error, sys.exc_info()) + + def connection_lost(self, exc: Optional[Exception] = None) -> None: + """Handle the closing of a connection""" + + if exc is None and self._transport: + exc = ConnectionLost('Connection lost') + + self._force_close(exc) + + def internal_error(self, exc_info: Optional[OptExcInfo] = None, + error_logger: Optional[SSHLogger] = None) -> None: + """Handle a fatal error in connection processing""" + + if not exc_info: + exc_info = sys.exc_info() + + if not error_logger: + error_logger = self.logger + + error_logger.debug1('Uncaught exception', exc_info=exc_info) + self._force_close(exc_info[1]) + + def session_started(self) -> None: + """Handle session start when opening tunneled SSH connection""" + + # pylint: disable=arguments-differ + def data_received(self, data: bytes, datatype: DataType = None) -> None: + """Handle incoming data on the connection""" + + # pylint: disable=unused-argument + + self._inpbuf += data + + self._reset_keepalive_timer() + + # pylint: disable=broad-except + try: + while self._inpbuf and self._recv_handler(): + pass + except DisconnectError as exc: + self._send_disconnect(exc.code, exc.reason, exc.lang) + self._force_close(exc) + except Exception: + self.internal_error() + # pylint: enable=arguments-differ + + def eof_received(self) -> None: + """Handle an incoming end of file on the connection""" + + self.connection_lost(None) + + def pause_writing(self) -> None: + """Handle a request from the transport to pause writing data""" + + # Do nothing with this for now + + def resume_writing(self) -> None: + """Handle a request from the transport to resume writing data""" + + # Do nothing with this for now + + def add_channel(self, chan: SSHChannel[AnyStr]) -> int: + """Add a new channel, returning its channel number""" + + if not self._transport: + raise ChannelOpenError(OPEN_CONNECT_FAILED, + 'SSH connection closed') + + while self._next_recv_chan in self._channels: # pragma: no cover + self._next_recv_chan = (self._next_recv_chan + 1) & 0xffffffff + + recv_chan = self._next_recv_chan + self._next_recv_chan = (self._next_recv_chan + 1) & 0xffffffff + + self._channels[recv_chan] = chan + return recv_chan + + def remove_channel(self, recv_chan: int) -> None: + """Remove the channel with the specified channel number""" + + del self._channels[recv_chan] + + def get_gss_context(self) -> GSSBase: + """Return the GSS context associated with this connection""" + + assert self._gss is not None + return self._gss + + def enable_gss_kex_auth(self) -> None: + """Enable GSS key exchange authentication""" + + self._gss_kex_auth = self._gss_auth + + def _choose_alg(self, alg_type: str, local_algs: Sequence[bytes], + remote_algs: Sequence[bytes]) -> bytes: + """Choose a common algorithm from the client & server lists + + This method returns the earliest algorithm on the client's + list which is supported by the server. + + """ + + if self.is_client(): + client_algs, server_algs = local_algs, remote_algs + else: + client_algs, server_algs = remote_algs, local_algs + + for alg in client_algs: + if alg in server_algs: + return alg + + raise KeyExchangeFailed( + 'No matching %s algorithm found, sent %s and received %s' % + (alg_type, b','.join(local_algs).decode('ascii'), + b','.join(remote_algs).decode('ascii'))) + + def _get_extra_kex_algs(self) -> List[bytes]: + """Return the extra kex algs to add""" + + if self.is_client(): + return [b'ext-info-c', b'kex-strict-c-v00@openssh.com'] + else: + return [b'ext-info-s', b'kex-strict-s-v00@openssh.com'] + + def _send(self, data: bytes) -> None: + """Send data to the SSH connection""" + + if self._transport: + try: + self._transport.write(data) + except BrokenPipeError: # pragma: no cover + pass + + def _send_version(self) -> None: + """Start the SSH handshake""" + + version = b'SSH-2.0-' + self._version + + self.logger.debug1('Sending version %s', version) + + if self.is_client(): + self._client_version = version + self.set_extra_info(client_version=version.decode('ascii')) + else: + self._server_version = version + self.set_extra_info(server_version=version.decode('ascii')) + + self._send(version + b'\r\n') + + def _recv_version(self) -> bool: + """Receive and parse the remote SSH version""" + + idx = self._inpbuf.find(b'\n', 0, _MAX_BANNER_LINE_LEN) + if idx < 0: + if len(self._inpbuf) >= _MAX_BANNER_LINE_LEN: + self._force_close(ProtocolError('Banner line too long')) + + return False + + version = self._inpbuf[:idx] + if version.endswith(b'\r'): + version = version[:-1] + + self._inpbuf = self._inpbuf[idx+1:] + + if (version.startswith(b'SSH-2.0-') or + (self.is_client() and version.startswith(b'SSH-1.99-'))): + if len(version) > _MAX_VERSION_LINE_LEN: + self._force_close(ProtocolError('Version too long')) + + # Accept version 2.0, or 1.99 if we're a client + if self.is_server(): + self._client_version = version + self.set_extra_info(client_version=version.decode('ascii')) + else: + self._server_version = version + self.set_extra_info(server_version=version.decode('ascii')) + + self.logger.debug1('Received version %s', version) + + self._send_kexinit() + self._kexinit_sent = True + self._recv_handler = self._recv_pkthdr + elif self.is_client() and not version.startswith(b'SSH-'): + # As a client, ignore the line if it doesn't appear to be a version + self._banner_lines += 1 + + if self._banner_lines > _MAX_BANNER_LINES: + self._force_close(ProtocolError('Too many banner lines')) + return False + else: + # Otherwise, reject the unknown version + self._force_close(ProtocolNotSupported('Unsupported SSH version')) + return False + + return True + + def _recv_pkthdr(self) -> bool: + """Receive and parse an SSH packet header""" + + if len(self._inpbuf) < self._recv_blocksize: + return False + + self._packet = self._inpbuf[:self._recv_blocksize] + self._inpbuf = self._inpbuf[self._recv_blocksize:] + + if self._recv_encryption: + self._packet, pktlen = \ + self._recv_encryption.decrypt_header(self._recv_seq, + self._packet, 4) + else: + pktlen = self._packet[:4] + + self._pktlen = int.from_bytes(pktlen, 'big') + self._recv_handler = self._recv_packet + return True + + def _recv_packet(self) -> bool: + """Receive the remainder of an SSH packet and process it""" + + rem = 4 + self._pktlen + self._recv_macsize - self._recv_blocksize + if len(self._inpbuf) < rem: + return False + + seq = self._recv_seq + rest = self._inpbuf[:rem-self._recv_macsize] + mac = self._inpbuf[rem-self._recv_macsize:rem] + + if self._recv_encryption: + packet_data = self._recv_encryption.decrypt_packet( + seq, self._packet, rest, 4, mac) + + if not packet_data: + raise MACError('MAC verification failed') + else: + packet_data = self._packet[4:] + rest + + self._inpbuf = self._inpbuf[rem:] + self._packet = b'' + + orig_payload = packet_data[1:-packet_data[0]] + + if self._decompressor and (self._auth_complete or + not self._decompress_after_auth): + payload = self._decompressor.decompress(orig_payload) + + if payload is None: + raise CompressionError('Decompression failed') + else: + payload = orig_payload + + packet = SSHPacket(payload) + pkttype = packet.get_byte() + handler: SSHPacketHandler = self + skip_reason = '' + exc_reason = '' + + if MSG_KEX_FIRST <= pkttype <= MSG_KEX_LAST: + if self._kex: + if self._ignore_first_kex: # pragma: no cover + skip_reason = 'ignored first kex' + self._ignore_first_kex = False + else: + handler = self._kex + else: + skip_reason = 'kex not in progress' + exc_reason = 'Key exchange not in progress' + elif self._strict_kex and not self._recv_encryption and \ + MSG_IGNORE <= pkttype <= MSG_DEBUG: + skip_reason = 'strict kex violation' + exc_reason = 'Strict key exchange violation: ' \ + 'unexpected packet type %d received' % pkttype + elif MSG_USERAUTH_FIRST <= pkttype <= MSG_USERAUTH_LAST: + if self._auth: + handler = self._auth + else: + skip_reason = 'auth not in progress' + exc_reason = 'Authentication not in progress' + elif pkttype > MSG_KEX_LAST and not self._recv_encryption: + skip_reason = 'invalid request before kex complete' + exc_reason = 'Invalid request before key exchange was complete' + elif pkttype > MSG_USERAUTH_LAST and not self._auth_complete: + skip_reason = 'invalid request before auth complete' + exc_reason = 'Invalid request before authentication was complete' + elif MSG_CHANNEL_FIRST <= pkttype <= MSG_CHANNEL_LAST: + try: + recv_chan = packet.get_uint32() + except PacketDecodeError: + skip_reason = 'incomplete channel request' + exc_reason = 'Incomplete channel request received' + else: + try: + handler = self._channels[recv_chan] + except KeyError: + skip_reason = 'invalid channel number' + exc_reason = 'Invalid channel number %d ' \ + 'received' % recv_chan + + handler.log_received_packet(pkttype, seq, packet, skip_reason) + + if not skip_reason: + try: + processed = handler.process_packet(pkttype, seq, packet) + except PacketDecodeError as exc: + raise ProtocolError(str(exc)) from None + + if not processed: + if self._strict_kex and not self._recv_encryption: + exc_reason = 'Strict key exchange violation: ' \ + 'unexpected packet type %d received' % pkttype + else: + self.logger.debug1('Unknown packet type %d received', + pkttype) + self.send_packet(MSG_UNIMPLEMENTED, UInt32(seq)) + + if exc_reason: + raise ProtocolError(exc_reason) + + if pkttype > MSG_USERAUTH_LAST: + self._auth_final = True + + if self._transport: + self._recv_handler = self._recv_pkthdr + + if self._recv_seq == 0xffffffff and not self._recv_encryption: + raise ProtocolError('Sequence rollover before kex complete') + + if pkttype == MSG_NEWKEYS and self._strict_kex: + self._recv_seq = 0 + else: + self._recv_seq = (seq + 1) & 0xffffffff + + return True + + def send_packet(self, pkttype: int, *args: bytes, + handler: Optional[SSHPacketLogger] = None) -> None: + """Send an SSH packet""" + + if (self._auth_complete and self._kex_complete and + (self._rekey_bytes_sent >= self._rekey_bytes or + (self._rekey_seconds and + time.monotonic() >= self._rekey_time))): + self._send_kexinit() + self._kexinit_sent = True + + if ((pkttype == MSG_USERAUTH_BANNER and + not (self._auth_in_progress or self._auth_complete)) or + (pkttype > MSG_USERAUTH_LAST and not self._auth_complete)): + self._deferred_packets.append((pkttype, args)) + return + + # If we're encrypting and we have no data outstanding, insert an + # ignore packet into the stream + if self._send_encryption and pkttype not in (MSG_IGNORE, MSG_EXT_INFO): + self.send_packet(MSG_IGNORE, String(b'')) + + orig_payload = Byte(pkttype) + b''.join(args) + + if self._compressor and (self._auth_complete or + not self._compress_after_auth): + payload = self._compressor.compress(orig_payload) + + if payload is None: # pragma: no cover + raise CompressionError('Compression failed') + else: + payload = orig_payload + + padlen = -(self._send_enchdrlen + len(payload)) % self._send_blocksize + if padlen < 4: + padlen += self._send_blocksize + + packet = Byte(padlen) + payload + os.urandom(padlen) + pktlen = len(packet) + hdr = UInt32(pktlen) + seq = self._send_seq + + if self._send_encryption: + packet, mac = self._send_encryption.encrypt_packet(seq, hdr, packet) + else: + packet = hdr + packet + mac = b'' + + self._send(packet + mac) + + if self._send_seq == 0xffffffff and not self._send_encryption: + self._send_seq = 0 + raise ProtocolError('Sequence rollover before kex complete') + + if pkttype == MSG_NEWKEYS and self._strict_kex: + self._send_seq = 0 + else: + self._send_seq = (seq + 1) & 0xffffffff + + if self._kex_complete: + self._rekey_bytes_sent += pktlen + + if not handler: + handler = self + + handler.log_sent_packet(pkttype, seq, orig_payload) + + def _send_deferred_packets(self) -> None: + """Send packets deferred due to key exchange or auth""" + + deferred_packets = self._deferred_packets + self._deferred_packets = [] + + for pkttype, args in deferred_packets: + self.send_packet(pkttype, *args) + + def _send_disconnect(self, code: int, reason: str, lang: str) -> None: + """Send a disconnect packet""" + + self.logger.info('Sending disconnect: %s (%d)', reason, code) + + self.send_packet(MSG_DISCONNECT, UInt32(code), + String(reason), String(lang)) + + def _send_kexinit(self) -> None: + """Start a key exchange""" + + self._kex_complete = False + self._rekey_bytes_sent = 0 + + if self._rekey_seconds: + self._rekey_time = time.monotonic() + self._rekey_seconds + + if self._gss_kex: + assert self._gss is not None + gss_mechs = self._gss.mechs + else: + gss_mechs = [] + + kex_algs = expand_kex_algs(self._kex_algs, gss_mechs, + bool(self._server_host_key_algs)) + \ + self._get_extra_kex_algs() + + host_key_algs = self._server_host_key_algs or [b'null'] + + self.logger.debug1('Requesting key exchange') + self.logger.debug2(' Key exchange algs: %s', kex_algs) + self.logger.debug2(' Host key algs: %s', host_key_algs) + self.logger.debug2(' Encryption algs: %s', self._enc_algs) + self.logger.debug2(' MAC algs: %s', self._mac_algs) + self.logger.debug2(' Compression algs: %s', self._cmp_algs) + + cookie = os.urandom(16) + kex_algs = NameList(kex_algs) + host_key_algs = NameList(host_key_algs) + enc_algs = NameList(self._enc_algs) + mac_algs = NameList(self._mac_algs) + cmp_algs = NameList(self._cmp_algs) + langs = NameList([]) + + packet = b''.join((Byte(MSG_KEXINIT), cookie, kex_algs, host_key_algs, + enc_algs, enc_algs, mac_algs, mac_algs, cmp_algs, + cmp_algs, langs, langs, Boolean(False), UInt32(0))) + + if self.is_server(): + self._server_kexinit = packet + else: + self._client_kexinit = packet + + self.send_packet(MSG_KEXINIT, packet[1:]) + + def _send_ext_info(self) -> None: + """Send extension information""" + + packet = UInt32(len(self._extensions_to_send)) + + self.logger.debug2('Sending extension info') + + for name, value in self._extensions_to_send.items(): + packet += String(name) + String(value) + + self.logger.debug2(' %s: %s', name, value) + + self.send_packet(MSG_EXT_INFO, packet) + + def send_newkeys(self, k: bytes, h: bytes) -> None: + """Finish a key exchange and send a new keys message""" + + if not self._session_id: + first_kex = True + self._session_id = h + else: + first_kex = False + + enc_keysize_cs, enc_ivsize_cs, enc_blocksize_cs, \ + mac_keysize_cs, mac_hashsize_cs, etm_cs = \ + get_encryption_params(self._enc_alg_cs, self._mac_alg_cs) + + enc_keysize_sc, enc_ivsize_sc, enc_blocksize_sc, \ + mac_keysize_sc, mac_hashsize_sc, etm_sc = \ + get_encryption_params(self._enc_alg_sc, self._mac_alg_sc) + + if mac_keysize_cs == 0: + self._mac_alg_cs = self._enc_alg_cs + + if mac_keysize_sc == 0: + self._mac_alg_sc = self._enc_alg_sc + + cmp_after_auth_cs = get_compression_params(self._cmp_alg_cs) + cmp_after_auth_sc = get_compression_params(self._cmp_alg_sc) + + self.logger.debug2(' Client to server:') + self.logger.debug2(' Encryption alg: %s', self._enc_alg_cs) + self.logger.debug2(' MAC alg: %s', self._mac_alg_cs) + self.logger.debug2(' Compression alg: %s', self._cmp_alg_cs) + self.logger.debug2(' Server to client:') + self.logger.debug2(' Encryption alg: %s', self._enc_alg_sc) + self.logger.debug2(' MAC alg: %s', self._mac_alg_sc) + self.logger.debug2(' Compression alg: %s', self._cmp_alg_sc) + + assert self._kex is not None + + iv_cs = self._kex.compute_key(k, h, b'A', self._session_id, + enc_ivsize_cs) + iv_sc = self._kex.compute_key(k, h, b'B', self._session_id, + enc_ivsize_sc) + enc_key_cs = self._kex.compute_key(k, h, b'C', self._session_id, + enc_keysize_cs) + enc_key_sc = self._kex.compute_key(k, h, b'D', self._session_id, + enc_keysize_sc) + mac_key_cs = self._kex.compute_key(k, h, b'E', self._session_id, + mac_keysize_cs) + mac_key_sc = self._kex.compute_key(k, h, b'F', self._session_id, + mac_keysize_sc) + self._kex = None + + next_enc_cs = get_encryption(self._enc_alg_cs, enc_key_cs, iv_cs, + self._mac_alg_cs, mac_key_cs, etm_cs) + next_enc_sc = get_encryption(self._enc_alg_sc, enc_key_sc, iv_sc, + self._mac_alg_sc, mac_key_sc, etm_sc) + + self.send_packet(MSG_NEWKEYS) + + self._extensions_to_send[b'global-requests-ok'] = b'' + + if self.is_client(): + self._send_encryption = next_enc_cs + self._send_enchdrlen = 1 if etm_cs else 5 + self._send_blocksize = max(8, enc_blocksize_cs) + self._compressor = get_compressor(self._cmp_alg_cs) + self._compress_after_auth = cmp_after_auth_cs + + self._next_recv_encryption = next_enc_sc + self._next_recv_blocksize = max(8, enc_blocksize_sc) + self._next_recv_macsize = mac_hashsize_sc + self._next_decompressor = get_decompressor(self._cmp_alg_sc) + self._next_decompress_after_auth = cmp_after_auth_sc + + self.set_extra_info( + send_cipher=self._enc_alg_cs.decode('ascii'), + send_mac=self._mac_alg_cs.decode('ascii'), + send_compression=self._cmp_alg_cs.decode('ascii'), + recv_cipher=self._enc_alg_sc.decode('ascii'), + recv_mac=self._mac_alg_sc.decode('ascii'), + recv_compression=self._cmp_alg_sc.decode('ascii')) + + if first_kex: + if self._wait == 'kex' and self._waiter and \ + not self._waiter.cancelled(): + self._waiter.set_result(None) + self._wait = None + return + else: + self._extensions_to_send[b'server-sig-algs'] = \ + b','.join(self._sig_algs) + + self._send_encryption = next_enc_sc + self._send_enchdrlen = 1 if etm_sc else 5 + self._send_blocksize = max(8, enc_blocksize_sc) + self._compressor = get_compressor(self._cmp_alg_sc) + self._compress_after_auth = cmp_after_auth_sc + + self._next_recv_encryption = next_enc_cs + self._next_recv_blocksize = max(8, enc_blocksize_cs) + self._next_recv_macsize = mac_hashsize_cs + self._next_decompressor = get_decompressor(self._cmp_alg_cs) + self._next_decompress_after_auth = cmp_after_auth_cs + + self.set_extra_info( + send_cipher=self._enc_alg_sc.decode('ascii'), + send_mac=self._mac_alg_sc.decode('ascii'), + send_compression=self._cmp_alg_sc.decode('ascii'), + recv_cipher=self._enc_alg_cs.decode('ascii'), + recv_mac=self._mac_alg_cs.decode('ascii'), + recv_compression=self._cmp_alg_cs.decode('ascii')) + + if self._can_send_ext_info: + self._send_ext_info() + self._can_send_ext_info = False + + self._kex_complete = True + + if first_kex: + if self.is_client(): + self.send_service_request(_USERAUTH_SERVICE) + else: + self._next_service = _USERAUTH_SERVICE + + self._send_deferred_packets() + + def send_service_request(self, service: bytes) -> None: + """Send a service request""" + + self.logger.debug2('Requesting service %s', service) + + self._next_service = service + self.send_packet(MSG_SERVICE_REQUEST, String(service)) + + def _get_userauth_request_packet(self, method: bytes, + args: Tuple[bytes, ...]) -> bytes: + """Get packet data for a user authentication request""" + + return b''.join((Byte(MSG_USERAUTH_REQUEST), String(self._username), + String(_CONNECTION_SERVICE), String(method)) + args) + + def get_userauth_request_data(self, method: bytes, *args: bytes) -> bytes: + """Get signature data for a user authentication request""" + + return (String(self._session_id) + + self._get_userauth_request_packet(method, args)) + + def send_userauth_packet(self, pkttype: int, *args: bytes, + handler: Optional[SSHPacketLogger] = None, + trivial: bool = True) -> None: + """Send a user authentication packet""" + + self._auth_was_trivial &= trivial + self.send_packet(pkttype, *args, handler=handler) + + async def send_userauth_request(self, method: bytes, *args: bytes, + key: Optional[SigningKey] = None, + trivial: bool = True) -> None: + """Send a user authentication request""" + + packet = self._get_userauth_request_packet(method, args) + + if key: + data = String(self._session_id) + packet + + sign_async: Optional[Callable[[bytes], Awaitable[bytes]]] = \ + getattr(key, 'sign_async', None) + + if sign_async: + # pylint: disable=not-callable + sig = await sign_async(data) + elif getattr(key, 'use_executor', False): + sig = await self._loop.run_in_executor(None, key.sign, data) + else: + sig = key.sign(data) + + packet += String(sig) + + self.send_userauth_packet(MSG_USERAUTH_REQUEST, packet[1:], + trivial=trivial) + + def send_userauth_failure(self, partial_success: bool) -> None: + """Send a user authentication failure response""" + + methods = get_supported_server_auth_methods( + cast(SSHServerConnection, self)) + + self.logger.debug2('Remaining auth methods: %s', methods or 'None') + + self._auth = None + self.send_packet(MSG_USERAUTH_FAILURE, NameList(methods), + Boolean(partial_success)) + + def send_userauth_success(self) -> None: + """Send a user authentication success response""" + + self.logger.info('Auth for user %s succeeded', self._username) + + self.send_packet(MSG_USERAUTH_SUCCESS) + self._auth = None + self._auth_in_progress = False + self._auth_complete = True + self._next_service = None + self.set_extra_info(username=self._username) + self._send_deferred_packets() + + self._cancel_login_timer() + self._set_keepalive_timer() + + if self._owner: # pragma: no branch + self._owner.auth_completed() + + if self._acceptor: + result = self._acceptor(self) + + if inspect.isawaitable(result): + assert result is not None + self.create_task(result) + + self._acceptor = None + self._error_handler = None + + if self._wait == 'auth' and self._waiter and \ + not self._waiter.cancelled(): + self._waiter.set_result(None) + self._wait = None + + def send_channel_open_confirmation(self, send_chan: int, recv_chan: int, + recv_window: int, recv_pktsize: int, + *result_args: bytes) -> None: + """Send a channel open confirmation""" + + self.send_packet(MSG_CHANNEL_OPEN_CONFIRMATION, UInt32(send_chan), + UInt32(recv_chan), UInt32(recv_window), + UInt32(recv_pktsize), *result_args) + + def send_channel_open_failure(self, send_chan: int, code: int, + reason: str, lang: str) -> None: + """Send a channel open failure""" + + self.send_packet(MSG_CHANNEL_OPEN_FAILURE, UInt32(send_chan), + UInt32(code), String(reason), String(lang)) + + async def _make_global_request(self, request: bytes, + *args: bytes) -> Tuple[int, SSHPacket]: + """Send a global request and wait for the response""" + + if not self._transport: + return MSG_REQUEST_FAILURE, SSHPacket(b'') + + waiter: 'asyncio.Future[_GlobalRequestResult]' = \ + self._loop.create_future() + + self._global_request_waiters.append(waiter) + + self.send_packet(MSG_GLOBAL_REQUEST, String(request), + Boolean(True), *args) + + return await waiter + + def _report_global_response(self, result: Union[bool, bytes]) -> None: + """Report back the response to a previously issued global request""" + + _, _, want_reply = self._global_request_queue.pop(0) + + if want_reply: # pragma: no branch + if result: + response = b'' if result is True else cast(bytes, result) + self.send_packet(MSG_REQUEST_SUCCESS, response) + else: + self.send_packet(MSG_REQUEST_FAILURE) + + if self._global_request_queue: + self._service_next_global_request() + + def _service_next_global_request(self) -> None: + """Process next item on global request queue""" + + handler, packet, _ = self._global_request_queue[0] + if callable(handler): + handler(packet) + else: + self._report_global_response(False) + + def _connection_made(self) -> None: + """Handle the opening of a new connection""" + + raise NotImplementedError + + def _process_disconnect(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a disconnect message""" + + code = packet.get_uint32() + reason_bytes = packet.get_string() + lang_bytes = packet.get_string() + packet.check_end() + + try: + reason = reason_bytes.decode('utf-8') + lang = lang_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid disconnect message') from None + + self.logger.debug1('Received disconnect: %s (%d)', reason, code) + + if code != DISC_BY_APPLICATION or self._wait: + exc: Optional[Exception] = construct_disc_error(code, reason, lang) + else: + exc = None + + self._force_close(exc) + + def _process_ignore(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process an ignore message""" + + # Work around missing payload bytes in an ignore message + # in some Cisco SSH servers + if b'Cisco' not in self._server_version: # pragma: no branch + _ = packet.get_string() # data + packet.check_end() + + def _process_unimplemented(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process an unimplemented message response""" + + # pylint: disable=no-self-use + + _ = packet.get_uint32() # seq + packet.check_end() + + def _process_debug(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a debug message""" + + always_display = packet.get_boolean() + msg_bytes = packet.get_string() + lang_bytes = packet.get_string() + packet.check_end() + + try: + msg = msg_bytes.decode('utf-8') + lang = lang_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid debug message') from None + + self.logger.debug1('Received debug message: %s%s', msg, + ' (always display)' if always_display else '') + + if self._owner: # pragma: no branch + self._owner.debug_msg_received(msg, lang, always_display) + + def _process_service_request(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a service request""" + + service = packet.get_string() + packet.check_end() + + if self.is_client(): + raise ProtocolError('Unexpected service request received') + + if not self._recv_encryption: + raise ProtocolError('Service request received before kex complete') + + if service != self._next_service: + raise ServiceNotAvailable('Unexpected service in service request') + + self.logger.debug2('Accepting request for service %s', service) + + self.send_packet(MSG_SERVICE_ACCEPT, String(service)) + + self._next_service = None + + if service == _USERAUTH_SERVICE: # pragma: no branch + self._auth_in_progress = True + self._can_recv_ext_info = False + self._send_deferred_packets() + + def _process_service_accept(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a service accept response""" + + service = packet.get_string() + packet.check_end() + + if self.is_server(): + raise ProtocolError('Unexpected service accept received') + + if not self._recv_encryption: + raise ProtocolError('Service accept received before kex complete') + + if service != self._next_service: + raise ServiceNotAvailable('Unexpected service in service accept') + + self.logger.debug2('Request for service %s accepted', service) + + self._next_service = None + + if service == _USERAUTH_SERVICE: # pragma: no branch + self.logger.info('Beginning auth for user %s', self._username) + + self._auth_in_progress = True + + # This method is only in SSHClientConnection + # pylint: disable=no-member + cast('SSHClientConnection', self).try_next_auth() + + def _process_ext_info(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process extension information""" + + if not self._can_recv_ext_info: + raise ProtocolError('Unexpected ext_info received') + + extensions: Dict[bytes, bytes] = {} + + self.logger.debug2('Received extension info') + + num_extensions = packet.get_uint32() + for _ in range(num_extensions): + name = packet.get_string() + value = packet.get_string() + extensions[name] = value + + self.logger.debug2(' %s: %s', name, value) + + packet.check_end() + + if self.is_client(): + self._server_sig_algs = \ + set(extensions.get(b'server-sig-algs', b'').split(b',')) + + def _process_kexinit(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a key exchange request""" + + if self._kex: + raise ProtocolError('Key exchange already in progress') + + _ = packet.get_bytes(16) # cookie + peer_kex_algs = packet.get_namelist() + peer_host_key_algs = packet.get_namelist() + enc_algs_cs = packet.get_namelist() + enc_algs_sc = packet.get_namelist() + mac_algs_cs = packet.get_namelist() + mac_algs_sc = packet.get_namelist() + cmp_algs_cs = packet.get_namelist() + cmp_algs_sc = packet.get_namelist() + _ = packet.get_namelist() # lang_cs + _ = packet.get_namelist() # lang_sc + first_kex_follows = packet.get_boolean() + _ = packet.get_uint32() # reserved + packet.check_end() + + if self.is_server(): + self._client_kexinit = packet.get_consumed_payload() + + if not self._session_id: + if b'ext-info-c' in peer_kex_algs: + self._can_send_ext_info = True + + if b'kex-strict-c-v00@openssh.com' in peer_kex_algs: + self._strict_kex = True + else: + self._server_kexinit = packet.get_consumed_payload() + + if not self._session_id: + if b'ext-info-s' in peer_kex_algs: + self._can_send_ext_info = True + + if b'kex-strict-s-v00@openssh.com' in peer_kex_algs: + self._strict_kex = True + + if self._strict_kex and not self._recv_encryption and \ + self._recv_seq != 0: + raise ProtocolError('Strict key exchange violation: ' + 'KEXINIT was not the first packet') + + + if self._kexinit_sent: + self._kexinit_sent = False + else: + self._send_kexinit() + + if self._gss: + self._gss.reset() + + if self._gss_kex: + assert self._gss is not None + gss_mechs = self._gss.mechs + else: + gss_mechs = [] + + kex_algs = expand_kex_algs(self._kex_algs, gss_mechs, + bool(self._server_host_key_algs)) + + self.logger.debug1('Received key exchange request') + self.logger.debug2(' Key exchange algs: %s', peer_kex_algs) + self.logger.debug2(' Host key algs: %s', peer_host_key_algs) + self.logger.debug2(' Client to server:') + self.logger.debug2(' Encryption algs: %s', enc_algs_cs) + self.logger.debug2(' MAC algs: %s', mac_algs_cs) + self.logger.debug2(' Compression algs: %s', cmp_algs_cs) + self.logger.debug2(' Server to client:') + self.logger.debug2(' Encryption algs: %s', enc_algs_sc) + self.logger.debug2(' MAC algs: %s', mac_algs_sc) + self.logger.debug2(' Compression algs: %s', cmp_algs_sc) + + kex_alg = self._choose_alg('key exchange', kex_algs, peer_kex_algs) + self._kex = get_kex(self, kex_alg) + self._ignore_first_kex = (first_kex_follows and + self._kex.algorithm != peer_kex_algs[0]) + + if self.is_server(): + # This method is only in SSHServerConnection + # pylint: disable=no-member + if (not cast(SSHServerConnection, self).choose_server_host_key( + peer_host_key_algs) and not kex_alg.startswith(b'gss-')): + raise KeyExchangeFailed('Unable to find compatible ' + 'server host key') + + self._enc_alg_cs = self._choose_alg('encryption', self._enc_algs, + enc_algs_cs) + self._enc_alg_sc = self._choose_alg('encryption', self._enc_algs, + enc_algs_sc) + + self._mac_alg_cs = self._choose_alg('MAC', self._mac_algs, mac_algs_cs) + self._mac_alg_sc = self._choose_alg('MAC', self._mac_algs, mac_algs_sc) + + self._cmp_alg_cs = self._choose_alg('compression', self._cmp_algs, + cmp_algs_cs) + self._cmp_alg_sc = self._choose_alg('compression', self._cmp_algs, + cmp_algs_sc) + + self.logger.debug1('Beginning key exchange') + self.logger.debug2(' Key exchange alg: %s', self._kex.algorithm) + + self._kex.start() + + def _process_newkeys(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a new keys message, finishing a key exchange""" + + packet.check_end() + + if self._next_recv_encryption: + self._recv_encryption = self._next_recv_encryption + self._recv_blocksize = self._next_recv_blocksize + self._recv_macsize = self._next_recv_macsize + self._decompressor = self._next_decompressor + self._decompress_after_auth = self._next_decompress_after_auth + + self._next_recv_encryption = None + self._can_recv_ext_info = True + else: + raise ProtocolError('New keys not negotiated') + + self.logger.debug1('Completed key exchange') + + def _process_userauth_request(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a user authentication request""" + + username_bytes = packet.get_string() + service = packet.get_string() + method = packet.get_string() + + if len(username_bytes) >= _MAX_USERNAME_LEN: + raise IllegalUserName('Username too long') + + if service != _CONNECTION_SERVICE: + raise ServiceNotAvailable('Unexpected service in auth request') + + try: + username = saslprep(username_bytes.decode('utf-8')) + except (UnicodeDecodeError, SASLPrepError) as exc: + raise IllegalUserName(str(exc)) from None + + if self.is_client(): + raise ProtocolError('Unexpected userauth request') + elif self._auth_complete: + # Silently ignore additional auth requests after auth succeeds, + # until the client sends a non-auth message + if self._auth_final: + raise ProtocolError('Unexpected userauth request') + else: + if username != self._username: + self.logger.info('Beginning auth for user %s', username) + + self._username = username + begin_auth = True + else: + begin_auth = False + + self.create_task(self._finish_userauth(begin_auth, method, packet)) + + async def _finish_userauth(self, begin_auth: bool, method: bytes, + packet: SSHPacket) -> None: + """Finish processing a user authentication request""" + + if not self._owner: # pragma: no cover + return + + if begin_auth: + # This method is only in SSHServerConnection + # pylint: disable=no-member + await cast(SSHServerConnection, self).reload_config() + + result = cast(SSHServer, self._owner).begin_auth(self._username) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + if not result: + self.send_userauth_success() + return + + if not self._owner: # pragma: no cover + return + + if self._auth: + self._auth.cancel() + + self._auth = lookup_server_auth(cast(SSHServerConnection, self), + self._username, method, packet) + + def _process_userauth_failure(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a user authentication failure response""" + + auth_methods = packet.get_namelist() + partial_success = packet.get_boolean() + packet.check_end() + + self.logger.debug2('Remaining auth methods: %s', + auth_methods or 'None') + + if self._wait == 'auth_methods' and self._waiter and \ + not self._waiter.cancelled(): + self._waiter.set_result(None) + self._auth_methods = list(auth_methods) + self._wait = None + return + + if self._preferred_auth: + self.logger.debug2('Preferred auth methods: %s', + self._preferred_auth or 'None') + + auth_methods = [method for method in self._preferred_auth + if method in auth_methods] + + self._auth_methods = list(auth_methods) + + if self.is_client() and self._auth: + auth = cast(ClientAuth, self._auth) + + if partial_success: # pragma: no cover + # Partial success not implemented yet + auth.auth_succeeded() + else: + auth.auth_failed() + + # This method is only in SSHClientConnection + # pylint: disable=no-member + cast(SSHClientConnection, self).try_next_auth() + else: + raise ProtocolError('Unexpected userauth failure response') + + def _process_userauth_success(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a user authentication success response""" + + packet.check_end() + + if self.is_client() and self._auth: + auth = cast(ClientAuth, self._auth) + + if self._auth_was_trivial and self._disable_trivial_auth: + raise PermissionDenied('Trivial auth disabled') + + self.logger.info('Auth for user %s succeeded', self._username) + + if self._wait == 'auth_methods' and self._waiter and \ + not self._waiter.cancelled(): + self._waiter.set_result(None) + self._auth_methods = [b'none'] + self._wait = None + return + + auth.auth_succeeded() + auth.cancel() + self._auth = None + self._auth_in_progress = False + self._auth_complete = True + self._can_recv_ext_info = False + + if self._agent: + self._agent.close() + + self.set_extra_info(username=self._username) + self._cancel_login_timer() + self._send_deferred_packets() + self._set_keepalive_timer() + + if self._owner: # pragma: no branch + self._owner.auth_completed() + + if self._acceptor: + result = self._acceptor(self) + + if inspect.isawaitable(result): + assert result is not None + self.create_task(result) + + self._acceptor = None + self._error_handler = None + + if self._wait == 'auth' and self._waiter and \ + not self._waiter.cancelled(): + self._waiter.set_result(None) + self._wait = None + else: + raise ProtocolError('Unexpected userauth success response') + + def _process_userauth_banner(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a user authentication banner message""" + + msg_bytes = packet.get_string() + lang_bytes = packet.get_string() + packet.check_end() + + try: + msg = msg_bytes.decode('utf-8') + lang = lang_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid userauth banner') from None + + self.logger.debug1('Received authentication banner') + + if self.is_client(): + cast(SSHClient, self._owner).auth_banner_received(msg, lang) + else: + raise ProtocolError('Unexpected userauth banner') + + def _process_global_request(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a global request""" + + request_bytes = packet.get_string() + want_reply = packet.get_boolean() + + try: + request = request_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid global request') from None + + name = '_process_' + map_handler_name(request) + '_global_request' + handler = cast(Optional[_PacketHandler], getattr(self, name, None)) + + if not handler: + self.logger.debug1('Received unknown global request: %s', request) + + self._global_request_queue.append((handler, packet, want_reply)) + if len(self._global_request_queue) == 1: + self._service_next_global_request() + + def _process_global_response(self, pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a global response""" + + if self._global_request_waiters: + waiter = self._global_request_waiters.pop(0) + if not waiter.cancelled(): # pragma: no branch + waiter.set_result((pkttype, packet)) + else: + raise ProtocolError('Unexpected global response') + + def _process_channel_open(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a channel open request""" + + chantype_bytes = packet.get_string() + send_chan = packet.get_uint32() + send_window = packet.get_uint32() + send_pktsize = packet.get_uint32() + + # Work around an off-by-one error in dropbear introduced in + # https://github.com/mkj/dropbear/commit/49263b5 + if b'dropbear' in self._client_version and self._compressor: + send_pktsize -= 1 + + try: + chantype = chantype_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid channel open request') from None + + try: + name = '_process_' + map_handler_name(chantype) + '_open' + handler = cast(Optional[_OpenHandler], getattr(self, name, None)) + + if callable(handler): + chan, session = handler(packet) + chan.process_open(send_chan, send_window, + send_pktsize, session) + else: + raise ChannelOpenError(OPEN_UNKNOWN_CHANNEL_TYPE, + 'Unknown channel type') + except ChannelOpenError as exc: + self.logger.debug1('Open failed for channel type %s: %s', + chantype, exc.reason) + + self.send_channel_open_failure(send_chan, exc.code, + exc.reason, exc.lang) + + def _process_channel_open_confirmation(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a channel open confirmation response""" + + recv_chan = packet.get_uint32() + send_chan = packet.get_uint32() + send_window = packet.get_uint32() + send_pktsize = packet.get_uint32() + + # Work around an off-by-one error in dropbear introduced in + # https://github.com/mkj/dropbear/commit/49263b5 + if b'dropbear' in self._server_version and self._compressor: + send_pktsize -= 1 + + chan = self._channels.get(recv_chan) + if chan: + chan.process_open_confirmation(send_chan, send_window, + send_pktsize, packet) + else: + self.logger.debug1('Received open confirmation for unknown ' + 'channel %d', recv_chan) + + raise ProtocolError('Invalid channel number') + + def _process_channel_open_failure(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a channel open failure response""" + + recv_chan = packet.get_uint32() + code = packet.get_uint32() + reason_bytes = packet.get_string() + lang_bytes = packet.get_string() + packet.check_end() + + try: + reason = reason_bytes.decode('utf-8') + lang = lang_bytes.decode('ascii') + except UnicodeDecodeError: + raise ProtocolError('Invalid channel open failure') from None + + chan = self._channels.get(recv_chan) + if chan: + chan.process_open_failure(code, reason, lang) + else: + self.logger.debug1('Received open failure for unknown ' + 'channel %d', recv_chan) + + raise ProtocolError('Invalid channel number') + + def _process_keepalive_at_openssh_dot_com_global_request( + self, packet: SSHPacket) -> None: + """Process an incoming OpenSSH keepalive request""" + + packet.check_end() + + self.logger.debug2('Received OpenSSH keepalive request') + self._report_global_response(True) + + _packet_handlers = { + MSG_DISCONNECT: _process_disconnect, + MSG_IGNORE: _process_ignore, + MSG_UNIMPLEMENTED: _process_unimplemented, + MSG_DEBUG: _process_debug, + MSG_SERVICE_REQUEST: _process_service_request, + MSG_SERVICE_ACCEPT: _process_service_accept, + MSG_EXT_INFO: _process_ext_info, + + MSG_KEXINIT: _process_kexinit, + MSG_NEWKEYS: _process_newkeys, + + MSG_USERAUTH_REQUEST: _process_userauth_request, + MSG_USERAUTH_FAILURE: _process_userauth_failure, + MSG_USERAUTH_SUCCESS: _process_userauth_success, + MSG_USERAUTH_BANNER: _process_userauth_banner, + + MSG_GLOBAL_REQUEST: _process_global_request, + MSG_REQUEST_SUCCESS: _process_global_response, + MSG_REQUEST_FAILURE: _process_global_response, + + MSG_CHANNEL_OPEN: _process_channel_open, + MSG_CHANNEL_OPEN_CONFIRMATION: _process_channel_open_confirmation, + MSG_CHANNEL_OPEN_FAILURE: _process_channel_open_failure + } + + def abort(self) -> None: + """Forcibly close the SSH connection + + This method closes the SSH connection immediately, without + waiting for pending operations to complete and without sending + an explicit SSH disconnect message. Buffered data waiting to be + sent will be lost and no more data will be received. When the + the connection is closed, :meth:`connection_lost() + ` on the associated :class:`SSHClient` + object will be called with the value `None`. + + """ + + self.logger.info('Aborting connection') + + self._force_close(None) + + def close(self) -> None: + """Cleanly close the SSH connection + + This method calls :meth:`disconnect` with the reason set to + indicate that the connection was closed explicitly by the + application. + + """ + + self.logger.info('Closing connection') + + self.disconnect(DISC_BY_APPLICATION, 'Disconnected by application') + + async def wait_closed(self) -> None: + """Wait for this connection to close + + This method is a coroutine which can be called to block until + this connection has finished closing. + + """ + + if self._agent: + await self._agent.wait_closed() + + await self._close_event.wait() + + def disconnect(self, code: int, reason: str, + lang: str = DEFAULT_LANG) -> None: + """Disconnect the SSH connection + + This method sends a disconnect message and closes the SSH + connection after buffered data waiting to be written has been + sent. No more data will be received. When the connection is + fully closed, :meth:`connection_lost() ` + on the associated :class:`SSHClient` or :class:`SSHServer` object + will be called with the value `None`. + + :param code: + The reason for the disconnect, from + :ref:`disconnect reason codes ` + :param reason: + A human readable reason for the disconnect + :param lang: + The language the reason is in + :type code: `int` + :type reason: `str` + :type lang: `str` + + """ + + for chan in list(self._channels.values()): + chan.close() + + self._send_disconnect(code, reason, lang) + self._force_close(None) + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Get additional information about the connection + + This method returns extra information about the connection once + it is established. Supported values include everything supported + by a socket transport plus: + + | username + | client_version + | server_version + | send_cipher + | send_mac + | send_compression + | recv_cipher + | recv_mac + | recv_compression + + See :meth:`get_extra_info() ` + in :class:`asyncio.BaseTransport` for more information. + + Additional information stored on the connection by calling + :meth:`set_extra_info` can also be returned here. + + """ + + return self._extra.get(name, + self._transport.get_extra_info(name, default) + if self._transport else default) + + def set_extra_info(self, **kwargs: Any) -> None: + """Store additional information associated with the connection + + This method allows extra information to be associated with the + connection. The information to store should be passed in as + keyword parameters and can later be returned by calling + :meth:`get_extra_info` with one of the keywords as the name + to retrieve. + + """ + + self._extra.update(**kwargs) + + def set_keepalive(self, interval: Union[None, float, str] = None, + count_max: Optional[int] = None) -> None: + """Set keep-alive timer on this connection + + This method sets the parameters of the keepalive timer on the + connection. If *interval* is set to a non-zero value, + keep-alive requests will be sent whenever the connection is + idle, and if a response is not received after *count_max* + attempts, the connection is closed. + + :param interval: (optional) + The time in seconds to wait before sending a keep-alive message + if no data has been received. This defaults to 0, which + disables sending these messages. + :param count_max: (optional) + The maximum number of keepalive messages which will be sent + without getting a response before closing the connection. + This defaults to 3, but only applies when *interval* is + non-zero. + :type interval: `int`, `float`, or `str` + :type count_max: `int` + + """ + + if interval is not None: + if isinstance(interval, str): + interval = parse_time_interval(interval) + + if interval < 0: + raise ValueError('Keepalive interval cannot be negative') + + self._keepalive_interval = interval + + if count_max is not None: + if count_max < 0: + raise ValueError('Keepalive count max cannot be negative') + + self._keepalive_count_max = count_max + + self._reset_keepalive_timer() + + def send_debug(self, msg: str, lang: str = DEFAULT_LANG, + always_display: bool = False) -> None: + """Send a debug message on this connection + + This method can be called to send a debug message to the + other end of the connection. + + :param msg: + The debug message to send + :param lang: + The language the message is in + :param always_display: + Whether or not to display the message + :type msg: `str` + :type lang: `str` + :type always_display: `bool` + + """ + + self.logger.debug1('Sending debug message: %s%s', msg, + ' (always display)' if always_display else '') + + self.send_packet(MSG_DEBUG, Boolean(always_display), + String(msg), String(lang)) + + def create_tcp_channel(self, encoding: Optional[str] = None, + errors: str = 'strict', + window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + SSHTCPChannel: + """Create an SSH TCP channel for a new direct TCP connection + + This method can be called by :meth:`connection_requested() + ` to create an + :class:`SSHTCPChannel` with the desired encoding, Unicode + error handling strategy, window, and max packet size for + a newly created SSH direct connection. + + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the + connection. This defaults to `None`, allowing the + application to send and receive raw bytes. + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: :class:`SSHTCPChannel` + + """ + + return SSHTCPChannel(self, self._loop, encoding, + errors, window, max_pktsize) + + def create_unix_channel(self, encoding: Optional[str] = None, + errors: str = 'strict', + window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + SSHUNIXChannel: + """Create an SSH UNIX channel for a new direct UNIX domain connection + + This method can be called by :meth:`unix_connection_requested() + ` to create an + :class:`SSHUNIXChannel` with the desired encoding, Unicode + error handling strategy, window, and max packet size for + a newly created SSH direct UNIX domain socket connection. + + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the + connection. This defaults to `None`, allowing the + application to send and receive raw bytes. + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: :class:`SSHUNIXChannel` + + """ + + return SSHUNIXChannel(self, self._loop, encoding, + errors, window, max_pktsize) + + def create_x11_channel( + self, window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> SSHX11Channel: + """Create an SSH X11 channel to use in X11 forwarding""" + + return SSHX11Channel(self, self._loop, None, 'strict', + window, max_pktsize) + + def create_agent_channel( + self, window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> SSHAgentChannel: + """Create an SSH agent channel to use in agent forwarding""" + + return SSHAgentChannel(self, self._loop, None, 'strict', + window, max_pktsize) + + async def create_connection( + self, session_factory: SSHTCPSessionFactory[AnyStr], + remote_host: str, remote_port: int, orig_host: str = '', + orig_port: int = 0, *, encoding: Optional[str] = None, + errors: str = 'strict', window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + Tuple[SSHTCPChannel[AnyStr], SSHTCPSession[AnyStr]]: + """Create an SSH direct or forwarded TCP connection""" + + raise NotImplementedError + + async def create_unix_connection( + self, session_factory: SSHUNIXSessionFactory[AnyStr], + remote_path: str, *, encoding: Optional[str] = None, + errors: str = 'strict', window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + Tuple[SSHUNIXChannel[AnyStr], SSHUNIXSession[AnyStr]]: + + """Create an SSH direct or forwarded UNIX domain socket connection""" + + raise NotImplementedError + + async def forward_connection( + self, dest_host: str, dest_port: int) -> SSHForwarder: + """Forward a tunneled TCP connection + + This method is a coroutine which can be returned by a + `session_factory` to forward connections tunneled over + SSH to the specified destination host and port. + + :param dest_host: + The hostname or address to forward the connections to + :param dest_port: + The port number to forward the connections to + :type dest_host: `str` or `None` + :type dest_port: `int` + + :returns: :class:`asyncio.BaseProtocol` + + """ + + try: + _, peer = await self._loop.create_connection(SSHForwarder, + dest_host, dest_port) + + self.logger.info(' Forwarding TCP connection to %s', + (dest_host, dest_port)) + except OSError as exc: + raise ChannelOpenError(OPEN_CONNECT_FAILED, str(exc)) from None + + return SSHForwarder(cast(SSHForwarder, peer)) + + async def forward_unix_connection(self, dest_path: str) -> SSHForwarder: + """Forward a tunneled UNIX domain socket connection + + This method is a coroutine which can be returned by a + `session_factory` to forward connections tunneled over + SSH to the specified destination path. + + :param dest_path: + The path to forward the connection to + :type dest_path: `str` + + :returns: :class:`asyncio.BaseProtocol` + + """ + + try: + _, peer = \ + await self._loop.create_unix_connection(SSHForwarder, dest_path) + + self.logger.info(' Forwarding UNIX connection to %s', dest_path) + except OSError as exc: + raise ChannelOpenError(OPEN_CONNECT_FAILED, str(exc)) from None + + return SSHForwarder(cast(SSHForwarder, peer)) + + @async_context_manager + async def forward_local_port( + self, listen_host: str, listen_port: int, + dest_host: str, dest_port: int, + accept_handler: Optional[SSHAcceptHandler] = None) -> SSHListener: + """Set up local port forwarding + + This method is a coroutine which attempts to set up port + forwarding from a local listening port to a remote host and port + via the SSH connection. If the request is successful, the + return value is an :class:`SSHListener` object which can be used + later to shut down the port forwarding. + + :param listen_host: + The hostname or address on the local host to listen on + :param listen_port: + The port number on the local host to listen on + :param dest_host: + The hostname or address to forward the connections to + :param dest_port: + The port number to forward the connections to + :param accept_handler: + A `callable` or coroutine which takes arguments of the + original host and port of the client and decides whether + or not to allow connection forwarding, returning `True` to + accept the connection and begin forwarding or `False` to + reject and close it. + :type listen_host: `str` + :type listen_port: `int` + :type dest_host: `str` + :type dest_port: `int` + :type accept_handler: `callable` or coroutine + + :returns: :class:`SSHListener` + + :raises: :exc:`OSError` if the listener can't be opened + + """ + + async def tunnel_connection( + session_factory: SSHTCPSessionFactory[bytes], + orig_host: str, orig_port: int) -> \ + Tuple[SSHTCPChannel[bytes], SSHTCPSession[bytes]]: + """Forward a local connection over SSH""" + + if accept_handler: + result = accept_handler(orig_host, orig_port) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + if not result: + self.logger.info('Request for TCP forwarding from ' + '%s to %s denied by application', + (orig_host, orig_port), + (dest_host, dest_port)) + + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Connection forwarding denied') + + return (await self.create_connection(session_factory, + dest_host, dest_port, + orig_host, orig_port)) + + if (listen_host, listen_port) == (dest_host, dest_port): + self.logger.info('Creating local TCP forwarder on %s', + (listen_host, listen_port)) + else: + self.logger.info('Creating local TCP forwarder from %s to %s', + (listen_host, listen_port), + (dest_host, dest_port)) + + try: + listener = await create_tcp_forward_listener(self, self._loop, + tunnel_connection, + listen_host, + listen_port) + except OSError as exc: + self.logger.debug1('Failed to create local TCP listener: %s', exc) + raise + + if listen_port == 0: + listen_port = listener.get_port() + + if dest_port == 0: + dest_port = listen_port + + self._local_listeners[listen_host, listen_port] = listener + + return listener + + @async_context_manager + async def forward_local_path(self, listen_path: str, + dest_path: str) -> SSHListener: + """Set up local UNIX domain socket forwarding + + This method is a coroutine which attempts to set up UNIX domain + socket forwarding from a local listening path to a remote path + via the SSH connection. If the request is successful, the + return value is an :class:`SSHListener` object which can be used + later to shut down the UNIX domain socket forwarding. + + :param listen_path: + The path on the local host to listen on + :param dest_path: + The path on the remote host to forward the connections to + :type listen_path: `str` + :type dest_path: `str` + + :returns: :class:`SSHListener` + + :raises: :exc:`OSError` if the listener can't be opened + + """ + + async def tunnel_connection( + session_factory: SSHUNIXSessionFactory[bytes]) -> \ + Tuple[SSHUNIXChannel[bytes], SSHUNIXSession[bytes]]: + """Forward a local connection over SSH""" + + return await self.create_unix_connection(session_factory, + dest_path) + + self.logger.info('Creating local UNIX forwarder from %s to %s', + listen_path, dest_path) + + try: + listener = await create_unix_forward_listener(self, self._loop, + tunnel_connection, + listen_path) + except OSError as exc: + self.logger.debug1('Failed to create local UNIX listener: %s', exc) + raise + + self._local_listeners[listen_path] = listener + + return listener + + def close_forward_listener(self, listen_key: ListenKey) -> None: + """Mark a local forwarding listener as closed""" + + self._local_listeners.pop(listen_key, None) + + def detach_x11_listener(self, chan: SSHChannel[AnyStr]) -> None: + """Detach a session from a local X11 listener""" + + raise NotImplementedError + + +class SSHClientConnection(SSHConnection): + """SSH client connection + + This class represents an SSH client connection. + + Once authentication is successful on a connection, new client + sessions can be opened by calling :meth:`create_session`. + + Direct TCP connections can be opened by calling + :meth:`create_connection`. + + Remote listeners for forwarded TCP connections can be opened by + calling :meth:`create_server`. + + Direct UNIX domain socket connections can be opened by calling + :meth:`create_unix_connection`. + + Remote listeners for forwarded UNIX domain socket connections + can be opened by calling :meth:`create_unix_server`. + + TCP port forwarding can be set up by calling :meth:`forward_local_port` + or :meth:`forward_remote_port`. + + UNIX domain socket forwarding can be set up by calling + :meth:`forward_local_path` or :meth:`forward_remote_path`. + + Mixed forwarding from a TCP port to a UNIX domain socket or + vice-versa can be set up by calling :meth:`forward_local_port_to_path`, + :meth:`forward_local_path_to_port`, + :meth:`forward_remote_port_to_path`, or + :meth:`forward_remote_path_to_port`. + + """ + + _options: 'SSHClientConnectionOptions' + _owner: SSHClient + _x11_listener: Optional[SSHX11ClientListener] + + def __init__(self, loop: asyncio.AbstractEventLoop, + options: 'SSHClientConnectionOptions', + acceptor: _AcceptHandler = None, + error_handler: _ErrorHandler = None, + wait: Optional[str] = None): + super().__init__(loop, options, acceptor, error_handler, + wait, server=False) + + self._host = options.host + self._port = options.port + + self._known_hosts = options.known_hosts + self._host_key_alias = options.host_key_alias + + self._server_host_key_algs: Optional[Sequence[bytes]] = None + self._server_host_key: Optional[SSHKey] = None + + self._username = options.username + self._password = options.password + + self._client_host_keys: List[_ClientHostKey] = [] + + self._client_keys: List[SSHKeyPair] = \ + list(options.client_keys) if options.client_keys else [] + self._saved_rsa_key: Optional[_ClientHostKey] = None + + if options.preferred_auth != (): + self._preferred_auth = [method.encode('ascii') for method in + options.preferred_auth] + else: + self._preferred_auth = get_supported_client_auth_methods() + + self._disable_trivial_auth = options.disable_trivial_auth + + if options.agent_path is not None: + self._agent = SSHAgentClient(options.agent_path) + + self._agent_identities = options.agent_identities + self._agent_forward_path = options.agent_forward_path + self._get_agent_keys = bool(self._agent) + + self._pkcs11_provider = options.pkcs11_provider + self._pkcs11_pin = options.pkcs11_pin + self._get_pkcs11_keys = bool(self._pkcs11_provider) + + gss_host = options.gss_host if options.gss_host != () else options.host + + if gss_host: + try: + self._gss = GSSClient(gss_host, options.gss_delegate_creds) + self._gss_kex = options.gss_kex + self._gss_auth = options.gss_auth + self._gss_mic_auth = self._gss_auth + except GSSError: + pass + + self._kbdint_password_auth = False + + self._remote_listeners: \ + Dict[ListenKey, Union[SSHTCPClientListener, + SSHUNIXClientListener]] = {} + + self._dynamic_remote_listeners: Dict[str, SSHTCPClientListener] = {} + + def _connection_made(self) -> None: + """Handle the opening of a new connection""" + + assert self._transport is not None + + if not self._host: + if self._peer_addr: + self._host = self._peer_addr + self._port = self._peer_port + else: + remote_peer = self.get_extra_info('remote_peername') + self._host, self._port = cast(HostPort, remote_peer) + + if self._options.client_host_keysign: + sock = cast(socket.socket, + self._transport.get_extra_info('socket')) + + self._client_host_keys = list(get_keysign_keys( + self._options.client_host_keysign, sock.fileno(), + self._options.client_host_pubkeys)) + elif self._options.client_host_keypairs: + self._client_host_keys = list(self._options.client_host_keypairs) + else: + self._client_host_keys = [] + + if self._known_hosts is None: + self._trusted_host_keys = None + self._trusted_ca_keys = None + else: + if not self._known_hosts: + default_known_hosts = Path('~', '.ssh', + 'known_hosts').expanduser() + + if (default_known_hosts.is_file() and + os.access(default_known_hosts, os.R_OK)): + self._known_hosts = str(default_known_hosts) + else: + self._known_hosts = b'' + + port = self._port if self._port != DEFAULT_PORT else None + + self._match_known_hosts(cast(KnownHostsArg, self._known_hosts), + self._host_key_alias or self._host, + self._peer_addr, port) + + default_host_key_algs = [] + + if self._options.server_host_key_algs != 'default': + if self._trusted_host_key_algs: + default_host_key_algs = self._trusted_host_key_algs + + if self._trusted_ca_keys: + default_host_key_algs = \ + get_default_certificate_algs() + default_host_key_algs + + if not default_host_key_algs: + default_host_key_algs = \ + get_default_certificate_algs() + get_default_public_key_algs() + + if self._x509_trusted_certs is not None: + if self._x509_trusted_certs or self._x509_trusted_cert_paths: + default_host_key_algs = \ + get_default_x509_certificate_algs() + default_host_key_algs + + self._server_host_key_algs = _select_host_key_algs( + self._options.server_host_key_algs, + cast(DefTuple[str], self._options.config.get( + 'HostKeyAlgorithms', ())), + default_host_key_algs) + + self.logger.info('Connected to SSH server at %s', + (self._host, self._port)) + + if self._options.proxy_command: + proxy_command = ' '.join(shlex.quote(arg) for arg in + self._options.proxy_command) + self.logger.info(' Proxy command: %s', proxy_command) + else: + self.logger.info(' Local address: %s', + (self._local_addr, self._local_port)) + self.logger.info(' Peer address: %s', + (self._peer_addr, self._peer_port)) + + + def _cleanup(self, exc: Optional[Exception]) -> None: + """Clean up this client connection""" + + if self._agent: + self._agent.close() + + if self._remote_listeners: + for tcp_listener in list(self._remote_listeners.values()): + tcp_listener.close() + + self._remote_listeners = {} + self._dynamic_remote_listeners = {} + + if exc is None: + self.logger.info('Connection closed') + elif isinstance(exc, ConnectionLost): + self.logger.info(str(exc)) + else: + self.logger.info('Connection failure: ' + str(exc)) + + super()._cleanup(exc) + + + def _choose_signature_alg(self, keypair: _ClientHostKey) -> bool: + """Choose signature algorithm to use for key-based authentication""" + + if self._server_sig_algs: + for alg in keypair.sig_algorithms: + if alg in self._sig_algs and alg in self._server_sig_algs: + keypair.set_sig_algorithm(alg) + return True + + return keypair.sig_algorithms[-1] in self._sig_algs + + def validate_server_host_key(self, key_data: bytes) -> SSHKey: + """Validate and return the server's host key""" + + try: + host_key = self._validate_host_key( + self._host_key_alias or self._host, + self._peer_addr, self._port, key_data) + except ValueError as exc: + host = self._host + + if self._host_key_alias: + host += f' with alias {self._host_key_alias}' + + raise HostKeyNotVerifiable(f'{exc} for host {host}') from None + + self._server_host_key = host_key + return host_key + + def get_server_host_key(self) -> Optional[SSHKey]: + """Return the server host key used in the key exchange + + This method returns the server host key used to complete the + key exchange with the server. + + If GSS key exchange is used, `None` is returned. + + :returns: An :class:`SSHKey` public key or `None` + + """ + + return self._server_host_key + + def get_server_auth_methods(self) -> Sequence[str]: + """Return the server host key used in the key exchange + + This method returns the auth methods available to authenticate + to the server. + + :returns: `list` of `str` + + """ + + return [method.decode('ascii') for method in self._auth_methods] + + def try_next_auth(self) -> None: + """Attempt client authentication using the next compatible method""" + + if self._auth: + self._auth.cancel() + self._auth = None + + while self._auth_methods: + method = self._auth_methods.pop(0) + + self._auth = lookup_client_auth(self, method) + + if self._auth: + return + + self.logger.info('Auth failed for user %s', self._username) + + self._force_close(PermissionDenied('Permission denied for user ' + f'{self._username} on host ' + f'{self._host}')) + + def gss_kex_auth_requested(self) -> bool: + """Return whether to allow GSS key exchange authentication or not""" + + if self._gss_kex_auth: + self._gss_kex_auth = False + return True + else: + return False + + def gss_mic_auth_requested(self) -> bool: + """Return whether to allow GSS MIC authentication or not""" + + if self._gss_mic_auth: + self._gss_mic_auth = False + return True + else: + return False + + async def host_based_auth_requested(self) -> \ + Tuple[Optional[_ClientHostKey], str, str]: + """Return a host key, host, and user to authenticate with""" + + if not self._host_based_auth: + return None, '', '' + + key: Optional[_ClientHostKey] + + while True: + if self._saved_rsa_key: + key = self._saved_rsa_key + key.algorithm = key.sig_algorithm + b'-cert-v01@openssh.com' + self._saved_rsa_key = None + else: + try: + key = self._client_host_keys.pop(0) + except IndexError: + key = None + break + + assert key is not None + + if self._choose_signature_alg(key): + if key.algorithm == b'ssh-rsa-cert-v01@openssh.com' and \ + key.sig_algorithm != b'ssh-rsa': + self._saved_rsa_key = key + + break + + client_host = self._options.client_host + + if client_host is None: + sockname = self.get_extra_info('sockname') + + if sockname: + client_host, _ = await self._loop.getnameinfo( + cast(SockAddr, sockname), socket.NI_NUMERICSERV) + else: + client_host = '' + + # Add a trailing '.' to the client host to be compatible with + # ssh-keysign from OpenSSH + if self._options.client_host_keysign and client_host[-1:] != '.': + client_host += '.' + + return key, client_host, self._options.client_username + + async def public_key_auth_requested(self) -> Optional[SSHKeyPair]: + """Return a client key pair to authenticate with""" + + if not self._public_key_auth: + return None + + if self._get_agent_keys: + assert self._agent is not None + + try: + agent_keys = await self._agent.get_keys(self._agent_identities) + self._client_keys[:0] = list(agent_keys) + except ValueError: + pass + + self._get_agent_keys = False + + if self._get_pkcs11_keys: + pkcs11_keys = await self._loop.run_in_executor( + None, load_pkcs11_keys, self._pkcs11_provider, self._pkcs11_pin) + + self._client_keys[:0] = list(pkcs11_keys) + self._get_pkcs11_keys = False + + while True: + if not self._client_keys: + result = self._owner.public_key_auth_requested() + + if inspect.isawaitable(result): + result = await cast(Awaitable[KeyPairListArg], result) + + if not result: + return None + + result: KeyPairListArg + + self._client_keys = list(load_keypairs(result)) + + # OpenSSH versions before 7.8 didn't support RSA SHA-2 + # signature names in certificate key types, requiring the + # use of ssh-rsa-cert-v01@openssh.com as the key type even + # when using SHA-2 signatures. However, OpenSSL 8.8 and + # later reject ssh-rsa-cert-v01@openssh.com as a key type + # by default, requiring that the RSA SHA-2 version of the key + # type be used. This makes it difficult to use RSA keys with + # certificates without knowing the version of the remote + # server and which key types it will accept. + # + # The code below works around this by trying multiple key + # types during public key and host-based authentication when + # using SHA-2 signatures with RSA keys signed by certificates. + + if self._saved_rsa_key: + key = self._saved_rsa_key + key.algorithm = key.sig_algorithm + b'-cert-v01@openssh.com' + self._saved_rsa_key = None + else: + key = self._client_keys.pop(0) + + if self._choose_signature_alg(key): + if key.algorithm == b'ssh-rsa-cert-v01@openssh.com' and \ + key.sig_algorithm != b'ssh-rsa': + self._saved_rsa_key = key + + return key + + async def password_auth_requested(self) -> Optional[str]: + """Return a password to authenticate with""" + + if not self._password_auth and not self._kbdint_password_auth: + return None + + if self._password is not None: + password: Optional[str] = self._password + self._password = None + else: + result = self._owner.password_auth_requested() + + if inspect.isawaitable(result): + password = await cast(Awaitable[Optional[str]], result) + else: + password = cast(Optional[str], result) + + return password + + async def password_change_requested(self, prompt: str, + lang: str) -> Tuple[str, str]: + """Return a password to authenticate with and what to change it to""" + + result = self._owner.password_change_requested(prompt, lang) + + if inspect.isawaitable(result): + result = await cast(Awaitable[PasswordChangeResponse], result) + + return cast(PasswordChangeResponse, result) + + def password_changed(self) -> None: + """Report a successful password change""" + + self._owner.password_changed() + + def password_change_failed(self) -> None: + """Report a failed password change""" + + self._owner.password_change_failed() + + async def kbdint_auth_requested(self) -> Optional[str]: + """Return the list of supported keyboard-interactive auth methods + + If keyboard-interactive auth is not supported in the client but + a password was provided when the connection was opened, this + will allow sending the password via keyboard-interactive auth. + + """ + + if not self._kbdint_auth: + return None + + result = self._owner.kbdint_auth_requested() + + if inspect.isawaitable(result): + result = await cast(Awaitable[Optional[str]], result) + + if result is NotImplemented: + if self._password is not None and not self._kbdint_password_auth: + self._kbdint_password_auth = True + result = '' + else: + result = None + + return cast(Optional[str], result) + + async def kbdint_challenge_received( + self, name: str, instructions: str, lang: str, + prompts: KbdIntPrompts) -> Optional[KbdIntResponse]: + """Return responses to a keyboard-interactive auth challenge""" + + if self._kbdint_password_auth: + if not prompts: + # Silently drop any empty challenges used to print messages + response: Optional[KbdIntResponse] = [] + elif len(prompts) == 1: + prompt = prompts[0][0].lower() + + if 'password' in prompt or 'passcode' in prompt: + password = await self.password_auth_requested() + + response = [password] if password is not None else None + else: + response = None + else: + response = None + else: + result = self._owner.kbdint_challenge_received(name, instructions, + lang, prompts) + + if inspect.isawaitable(result): + response = await cast(Awaitable[KbdIntResponse], result) + else: + response = cast(KbdIntResponse, result) + + return response + + def _process_session_open(self, _packet: SSHPacket) -> \ + Tuple[SSHServerChannel, SSHServerSession]: + """Process an inbound session open request + + These requests are disallowed on an SSH client. + + """ + + # pylint: disable=no-self-use + + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Session open forbidden on client') + + def _process_direct_tcpip_open(self, _packet: SSHPacket) -> \ + Tuple[SSHTCPChannel[bytes], SSHTCPSession[bytes]]: + """Process an inbound direct TCP/IP channel open request + + These requests are disallowed on an SSH client. + + """ + + # pylint: disable=no-self-use + + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Direct TCP/IP open forbidden on client') + + def _process_forwarded_tcpip_open(self, packet: SSHPacket) -> \ + Tuple[SSHTCPChannel, MaybeAwait[SSHTCPSession]]: + """Process an inbound forwarded TCP/IP channel open request""" + + dest_host_bytes = packet.get_string() + dest_port = packet.get_uint32() + orig_host_bytes = packet.get_string() + orig_port = packet.get_uint32() + packet.check_end() + + try: + dest_host = dest_host_bytes.decode('utf-8') + orig_host = orig_host_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid forwarded TCP/IP channel ' + 'open request') from None + + # Some buggy servers send back a port of `0` instead of the actual + # listening port when reporting connections which arrive on a listener + # set up on a dynamic port. This lookup attempts to work around that. + listener = cast(SSHTCPClientListener[bytes], + self._remote_listeners.get((dest_host, dest_port)) or + self._dynamic_remote_listeners.get(dest_host)) + + if listener: + chan, session = listener.process_connection(orig_host, orig_port) + + self.logger.info('Accepted forwarded TCP connection on %s', + (dest_host, dest_port)) + self.logger.info(' Client address: %s', (orig_host, orig_port)) + + return chan, session + else: + raise ChannelOpenError(OPEN_CONNECT_FAILED, 'No such listener') + + async def close_client_tcp_listener(self, listen_host: str, + listen_port: int) -> None: + """Close a remote TCP/IP listener""" + + await self._make_global_request( + b'cancel-tcpip-forward', String(listen_host), UInt32(listen_port)) + + self.logger.info('Closed remote TCP listener on %s', + (listen_host, listen_port)) + + listener = self._remote_listeners.get((listen_host, listen_port)) + + if listener: + if self._dynamic_remote_listeners.get(listen_host) == listener: + del self._dynamic_remote_listeners[listen_host] + + del self._remote_listeners[listen_host, listen_port] + + def _process_direct_streamlocal_at_openssh_dot_com_open( + self, _packet: SSHPacket) -> \ + Tuple[SSHUNIXChannel, SSHUNIXSession]: + """Process an inbound direct UNIX domain channel open request + + These requests are disallowed on an SSH client. + + """ + + # pylint: disable=no-self-use + + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Direct UNIX domain socket open ' + 'forbidden on client') + + def _process_forwarded_streamlocal_at_openssh_dot_com_open( + self, packet: SSHPacket) -> \ + Tuple[SSHUNIXChannel, MaybeAwait[SSHUNIXSession]]: + """Process an inbound forwarded UNIX domain channel open request""" + + dest_path_bytes = packet.get_string() + _ = packet.get_string() # reserved + packet.check_end() + + try: + dest_path = dest_path_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid forwarded UNIX domain channel ' + 'open request') from None + + listener = cast(SSHUNIXClientListener[bytes], + self._remote_listeners.get(dest_path)) + + if listener: + chan, session = listener.process_connection() + + self.logger.info('Accepted remote UNIX connection on %s', dest_path) + + return chan, session + else: + raise ChannelOpenError(OPEN_CONNECT_FAILED, 'No such listener') + + async def close_client_unix_listener(self, listen_path: str) -> None: + """Close a remote UNIX domain socket listener""" + + await self._make_global_request( + b'cancel-streamlocal-forward@openssh.com', String(listen_path)) + + self.logger.info('Closed UNIX listener on %s', listen_path) + + if listen_path in self._remote_listeners: + del self._remote_listeners[listen_path] + + def _process_x11_open(self, packet: SSHPacket) -> \ + Tuple[SSHX11Channel, Awaitable[SSHX11ClientForwarder]]: + """Process an inbound X11 channel open request""" + + orig_host_bytes = packet.get_string() + orig_port = packet.get_uint32() + + packet.check_end() + + try: + orig_host = orig_host_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid forwarded X11 channel ' + 'open request') from None + + if self._x11_listener: + self.logger.info('Accepted X11 connection') + self.logger.info(' Client address: %s', (orig_host, orig_port)) + + chan = self.create_x11_channel() + + chan.set_inbound_peer_names(orig_host, orig_port) + + return chan, self._x11_listener.forward_connection() + else: + raise ChannelOpenError(OPEN_CONNECT_FAILED, + 'X11 forwarding disabled') + + def _process_auth_agent_at_openssh_dot_com_open( + self, packet: SSHPacket) -> \ + Tuple[SSHUNIXChannel, Awaitable[SSHForwarder]]: + """Process an inbound auth agent channel open request""" + + packet.check_end() + + if self._agent_forward_path: + self.logger.info('Accepted SSH agent connection') + + return (self.create_unix_channel(), + self.forward_unix_connection(self._agent_forward_path)) + else: + raise ChannelOpenError(OPEN_CONNECT_FAILED, + 'Auth agent forwarding disabled') + + async def attach_x11_listener(self, chan: SSHClientChannel[AnyStr], + display: Optional[str], + auth_path: Optional[str], + single_connection: bool) -> \ + Tuple[bytes, bytes, int]: + """Attach a channel to a local X11 display""" + + if not display: + display = os.environ.get('DISPLAY') + + if not display: + raise ValueError('X11 display not set') + + if not self._x11_listener: + self._x11_listener = await create_x11_client_listener( + self._loop, display, auth_path) + + return self._x11_listener.attach(display, chan, single_connection) + + def detach_x11_listener(self, chan: SSHChannel[AnyStr]) -> None: + """Detach a session from a local X11 listener""" + + if self._x11_listener: + if self._x11_listener.detach(chan): + self._x11_listener = None + + async def create_session(self, session_factory: SSHClientSessionFactory, + command: DefTuple[Optional[str]] = (), *, + subsystem: DefTuple[Optional[str]]= (), + env: DefTuple[_Env] = (), + send_env: DefTuple[_SendEnv] = (), + request_pty: DefTuple[Union[bool, str]] = (), + term_type: DefTuple[Optional[str]] = (), + term_size: DefTuple[TermSizeArg] = (), + term_modes: DefTuple[TermModesArg] = (), + x11_forwarding: DefTuple[Union[int, str]] = (), + x11_display: DefTuple[Optional[str]] = (), + x11_auth_path: DefTuple[Optional[str]] = (), + x11_single_connection: DefTuple[bool] = (), + encoding: DefTuple[Optional[str]] = (), + errors: DefTuple[str] = (), + window: DefTuple[int] = (), + max_pktsize: DefTuple[int] = ()) -> \ + Tuple[SSHClientChannel, SSHClientSession]: + """Create an SSH client session + + This method is a coroutine which can be called to create an SSH + client session used to execute a command, start a subsystem + such as sftp, or if no command or subsystem is specified run an + interactive shell. Optional arguments allow terminal and + environment information to be provided. + + By default, this class expects string data in its send and + receive functions, which it encodes on the SSH connection in + UTF-8 (ISO 10646) format. An optional encoding argument can + be passed in to select a different encoding, or `None` can + be passed in if the application wishes to send and receive + raw bytes. When an encoding is set, an optional errors + argument can be passed in to select what Unicode error + handling strategy to use. + + Other optional arguments include the SSH receive window size and + max packet size which default to 2 MB and 32 KB, respectively. + + :param session_factory: + A `callable` which returns an :class:`SSHClientSession` object + that will be created to handle activity on this session + :param command: (optional) + The remote command to execute. By default, an interactive + shell is started if no command or subsystem is provided. + :param subsystem: (optional) + The name of a remote subsystem to start up. + :param env: (optional) + The environment variables to set for this session. Keys and + values passed in here will be converted to Unicode strings + encoded as UTF-8 (ISO 10646) for transmission. + + .. note:: Many SSH servers restrict which environment + variables a client is allowed to set. The + server's configuration may need to be edited + before environment variables can be + successfully set in the remote environment. + :param send_env: (optional) + A list of environment variable names to pull from + `os.environ` and set for this session. Wildcards patterns + using `'*'` and `'?'` are allowed, and all variables with + matching names will be sent with whatever value is set + in the local environment. If a variable is present in both + env and send_env, the value from env will be used. + :param request_pty: (optional) + Whether or not to request a pseudo-terminal (PTY) for this + session. This defaults to `True`, which means to request a + PTY whenever the `term_type` is set. Other possible values + include `False` to never request a PTY, `'force'` to always + request a PTY even without `term_type` being set, or `'auto'` + to request a TTY when `term_type` is set but only when + starting an interactive shell. + :param term_type: (optional) + The terminal type to set for this session. + :param term_size: (optional) + The terminal width and height in characters and optionally + the width and height in pixels. + :param term_modes: (optional) + POSIX terminal modes to set for this session, where keys are + taken from :ref:`POSIX terminal modes ` with values + defined in section 8 of :rfc:`RFC 4254 <4254#section-8>`. + :param x11_forwarding: (optional) + Whether or not to request X11 forwarding for this session, + defaulting to `False`. If set to `True`, X11 forwarding will + be requested and a failure will raise :exc:`ChannelOpenError`. + It can also be set to `'ignore_failure'` to attempt X11 + forwarding but ignore failures. + :param x11_display: (optional) + The display that X11 connections should be forwarded to, + defaulting to the value in the environment variable `DISPLAY`. + :param x11_auth_path: (optional) + The path to the Xauthority file to read X11 authentication + data from, defaulting to the value in the environment variable + `XAUTHORITY` or the file :file:`.Xauthority` in the user's + home directory if that's not set. + :param x11_single_connection: (optional) + Whether or not to limit X11 forwarding to a single connection, + defaulting to `False`. + :param encoding: (optional) + The Unicode encoding to use for data exchanged on this session. + :param errors: (optional) + The error handling strategy to apply on Unicode encode/decode + errors. + :param window: (optional) + The receive window size for this session. + :param max_pktsize: (optional) + The maximum packet size for this session. + :type session_factory: `callable` + :type command: `str` + :type subsystem: `str` + :type env: `dict` with `str` keys and values + :type send_env: `list` of `str` + :type request_pty: `bool`, `'force'`, or `'auto'` + :type term_type: `str` + :type term_size: `tuple` of 2 or 4 `int` values + :type term_modes: `dict` with `int` keys and values + :type x11_forwarding: `bool` or `'ignore_failure'` + :type x11_display: `str` + :type x11_auth_path: `str` + :type x11_single_connection: `bool` + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: an :class:`SSHClientChannel` and :class:`SSHClientSession` + + :raises: :exc:`ChannelOpenError` if the session can't be opened + + """ + + if command == (): + command = self._options.command + + if subsystem == (): + subsystem = self._options.subsystem + + if env == (): + env = self._options.env + + if send_env == (): + send_env = self._options.send_env + + if request_pty == (): + request_pty = self._options.request_pty + + if term_type == (): + term_type = self._options.term_type + + if term_size == (): + term_size = self._options.term_size + + if term_modes == (): + term_modes = self._options.term_modes + + if x11_forwarding == (): + x11_forwarding = self._options.x11_forwarding + + if x11_display == (): + x11_display = self._options.x11_display + + if x11_auth_path == (): + x11_auth_path = self._options.x11_auth_path + + if x11_single_connection == (): + x11_single_connection = self._options.x11_single_connection + + if encoding == (): + encoding = self._options.encoding + + if errors == (): + errors = self._options.errors + + if window == (): + window = self._options.window + + if max_pktsize == (): + max_pktsize = self._options.max_pktsize + + new_env: Dict[str, str] = {} + + if send_env: + for key in send_env: + pattern = WildcardPattern(key) + new_env.update((key, value) for key, value in os.environ.items() + if pattern.matches(key)) + + if env: + try: + if isinstance(env, list): + new_env.update((item.split('=', 2) for item in env)) + else: + new_env.update(cast(Mapping[str, str], env)) + except ValueError: + raise ValueError('Invalid environment value') from None + + if request_pty == 'force': + request_pty = True + elif request_pty == 'auto': + request_pty = bool(term_type and not (command or subsystem)) + elif request_pty: + request_pty = bool(term_type) + + command: Optional[str] + subsystem: Optional[str] + request_pty: bool + term_type: Optional[str] + term_size: TermSizeArg + term_modes: TermModesArg + x11_forwarding: Union[bool, str] + x11_display: Optional[str] + x11_auth_path: Optional[str] + x11_single_connection: bool + encoding: Optional[str] + errors: str + window: int + max_pktsize: int + + chan = SSHClientChannel(self, self._loop, encoding, errors, + window, max_pktsize) + + session = await chan.create(session_factory, command, subsystem, + new_env, request_pty, term_type, term_size, + term_modes or {}, x11_forwarding, + x11_display, x11_auth_path, + x11_single_connection, + bool(self._agent_forward_path)) + + return chan, session + + async def open_session(self, *args: object, **kwargs: object) -> \ + Tuple[SSHWriter, SSHReader, SSHReader]: + """Open an SSH client session + + This method is a coroutine wrapper around :meth:`create_session` + designed to provide a "high-level" stream interface for creating + an SSH client session. Instead of taking a `session_factory` + argument for constructing an object which will handle activity + on the session via callbacks, it returns an :class:`SSHWriter` + and two :class:`SSHReader` objects representing stdin, stdout, + and stderr which can be used to perform I/O on the session. With + the exception of `session_factory`, all of the arguments to + :meth:`create_session` are supported and have the same meaning. + + """ + + chan, session = await self.create_session( + SSHClientStreamSession, *args, **kwargs) # type: ignore + + session: SSHClientStreamSession + + return (SSHWriter(session, chan), SSHReader(session, chan), + SSHReader(session, chan, EXTENDED_DATA_STDERR)) + + # pylint: disable=redefined-builtin + @async_context_manager + async def create_process(self, *args: object, + input: Optional[AnyStr] = None, + stdin: ProcessSource = PIPE, + stdout: ProcessTarget = PIPE, + stderr: ProcessTarget = PIPE, + bufsize: int = io.DEFAULT_BUFFER_SIZE, + send_eof: bool = True, recv_eof: bool = True, + **kwargs: object) -> SSHClientProcess[AnyStr]: + """Create a process on the remote system + + This method is a coroutine wrapper around :meth:`create_session` + which can be used to execute a command, start a subsystem, + or start an interactive shell, optionally redirecting stdin, + stdout, and stderr to and from files or pipes attached to + other local and remote processes. + + By default, the stdin, stdout, and stderr arguments default + to the special value `PIPE` which means that they can be + read and written interactively via stream objects which are + members of the :class:`SSHClientProcess` object this method + returns. If other file-like objects are provided as arguments, + input or output will automatically be redirected to them. The + special value `DEVNULL` can be used to provide no input or + discard all output, and the special value `STDOUT` can be + provided as `stderr` to send its output to the same stream + as `stdout`. + + In addition to the arguments below, all arguments to + :meth:`create_session` except for `session_factory` are + supported and have the same meaning. + + :param input: (optional) + Input data to feed to standard input of the remote process. + If specified, this argument takes precedence over stdin. + Data should be a `str` if encoding is set, or `bytes` if not. + :param stdin: (optional) + A filename, file-like object, file descriptor, socket, or + :class:`SSHReader` to feed to standard input of the remote + process, or `DEVNULL` to provide no input. + :param stdout: (optional) + A filename, file-like object, file descriptor, socket, or + :class:`SSHWriter` to feed standard output of the remote + process to, or `DEVNULL` to discard this output. + :param stderr: (optional) + A filename, file-like object, file descriptor, socket, or + :class:`SSHWriter` to feed standard error of the remote + process to, `DEVNULL` to discard this output, or `STDOUT` + to feed standard error to the same place as stdout. + :param bufsize: (optional) + Buffer size to use when feeding data from a file to stdin + :param send_eof: + Whether or not to send EOF to the channel when EOF is + received from stdin, defaulting to `True`. If set to `False`, + the channel will remain open after EOF is received on stdin, + and multiple sources can be redirected to the channel. + :param recv_eof: + Whether or not to send EOF to stdout and stderr when EOF is + received from the channel, defaulting to `True`. If set to + `False`, the redirect targets of stdout and stderr will remain + open after EOF is received on the channel and can be used for + multiple redirects. + :type input: `str` or `bytes` + :type bufsize: `int` + :type send_eof: `bool` + :type recv_eof: `bool` + + :returns: :class:`SSHClientProcess` + + :raises: :exc:`ChannelOpenError` if the channel can't be opened + + """ + + chan, process = await self.create_session( + SSHClientProcess, *args, **kwargs) # type: ignore + + new_stdin: Optional[ProcessSource] = stdin + process: SSHClientProcess + + if input: + chan.write(input) + chan.write_eof() + new_stdin = None + + await process.redirect(new_stdin, stdout, stderr, + bufsize, send_eof, recv_eof) + + return process + + async def create_subprocess(self, protocol_factory: SubprocessFactory, + command: DefTuple[Optional[str]] = (), + bufsize: int = io.DEFAULT_BUFFER_SIZE, + input: Optional[AnyStr] = None, + stdin: ProcessSource = PIPE, + stdout: ProcessTarget = PIPE, + stderr: ProcessTarget = PIPE, + encoding: Optional[str] = None, + **kwargs: object) -> \ + Tuple[SSHSubprocessTransport, SSHSubprocessProtocol]: + """Create a subprocess on the remote system + + This method is a coroutine wrapper around :meth:`create_session` + which can be used to execute a command, start a subsystem, + or start an interactive shell, optionally redirecting stdin, + stdout, and stderr to and from files or pipes attached to + other local and remote processes similar to :meth:`create_process`. + However, instead of performing interactive I/O using + :class:`SSHReader` and :class:`SSHWriter` objects, the caller + provides a function which returns an object which conforms + to the :class:`asyncio.SubprocessProtocol` and this call + returns that and an :class:`SSHSubprocessTransport` object which + conforms to :class:`asyncio.SubprocessTransport`. + + With the exception of the addition of `protocol_factory`, all + of the arguments are the same as :meth:`create_process`. + + :param protocol_factory: + A `callable` which returns an :class:`SSHSubprocessProtocol` + object that will be created to handle activity on this + session. + :type protocol_factory: `callable` + + :returns: an :class:`SSHSubprocessTransport` and + :class:`SSHSubprocessProtocol` + + :raises: :exc:`ChannelOpenError` if the channel can't be opened + + """ + + def transport_factory() -> SSHSubprocessTransport: + """Return a subprocess transport""" + + return SSHSubprocessTransport(protocol_factory) + + _, transport = await self.create_session(transport_factory, command, + encoding=encoding, + **kwargs) # type: ignore + + new_stdin: Optional[ProcessSource] = stdin + transport: SSHSubprocessTransport + + if input: + stdin_pipe = cast(SSHSubprocessWritePipe, + transport.get_pipe_transport(0)) + stdin_pipe.write(input) + stdin_pipe.write_eof() + new_stdin = None + + await transport.redirect(new_stdin, stdout, stderr, bufsize) + + return transport, transport.get_protocol() + # pylint: enable=redefined-builtin + + async def run(self, *args: object, check: bool = False, + timeout: Optional[float] = None, + **kwargs: object) -> SSHCompletedProcess: + """Run a command on the remote system and collect its output + + This method is a coroutine wrapper around :meth:`create_process` + which can be used to run a process to completion when no + interactivity is needed. All of the arguments to + :meth:`create_process` can be passed in to provide input or + redirect stdin, stdout, and stderr, but this method waits until + the process exits and returns an :class:`SSHCompletedProcess` + object with the exit status or signal information and the + output to stdout and stderr (if not redirected). + + If the check argument is set to `True`, a non-zero exit status + from the remote process will trigger the :exc:`ProcessError` + exception to be raised. + + In addition to the argument below, all arguments to + :meth:`create_process` are supported and have the same meaning. + + If a timeout is specified and it expires before the process + exits, the :exc:`TimeoutError` exception will be raised. By + default, no timeout is set and this call will wait indefinitely. + + :param check: (optional) + Whether or not to raise :exc:`ProcessError` when a non-zero + exit status is returned + :param timeout: + Amount of time in seconds to wait for process to exit or + `None` to wait indefinitely + :type check: `bool` + :type timeout: `int`, `float`, or `None` + + :returns: :class:`SSHCompletedProcess` + + :raises: | :exc:`ChannelOpenError` if the session can't be opened + | :exc:`ProcessError` if checking non-zero exit status + | :exc:`TimeoutError` if the timeout expires before exit + + """ + + process = await self.create_process(*args, **kwargs) # type: ignore + + return await process.wait(check, timeout) + + async def create_connection( + self, session_factory: SSHTCPSessionFactory[AnyStr], + remote_host: str, remote_port: int, orig_host: str = '', + orig_port: int = 0, *, encoding: Optional[str] = None, + errors: str = 'strict', window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + Tuple[SSHTCPChannel[AnyStr], SSHTCPSession[AnyStr]]: + """Create an SSH TCP direct connection + + This method is a coroutine which can be called to request that + the server open a new outbound TCP connection to the specified + destination host and port. If the connection is successfully + opened, a new SSH channel will be opened with data being handled + by a :class:`SSHTCPSession` object created by `session_factory`. + + Optional arguments include the host and port of the original + client opening the connection when performing TCP port forwarding. + + By default, this class expects data to be sent and received as + raw bytes. However, an optional encoding argument can be passed + in to select the encoding to use, allowing the application send + and receive string data. When encoding is set, an optional errors + argument can be passed in to select what Unicode error handling + strategy to use. + + Other optional arguments include the SSH receive window size and + max packet size which default to 2 MB and 32 KB, respectively. + + :param session_factory: + A `callable` which returns an :class:`SSHTCPSession` object + that will be created to handle activity on this session + :param remote_host: + The remote hostname or address to connect to + :param remote_port: + The remote port number to connect to + :param orig_host: (optional) + The hostname or address of the client requesting the connection + :param orig_port: (optional) + The port number of the client requesting the connection + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the connection + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type session_factory: `callable` + :type remote_host: `str` + :type remote_port: `int` + :type orig_host: `str` + :type orig_port: `int` + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: an :class:`SSHTCPChannel` and :class:`SSHTCPSession` + + :raises: :exc:`ChannelOpenError` if the connection can't be opened + + """ + + self.logger.info('Opening direct TCP connection to %s', + (remote_host, remote_port)) + self.logger.info(' Client address: %s', (orig_host, orig_port)) + + chan = self.create_tcp_channel(encoding, errors, window, max_pktsize) + + session = await chan.connect(session_factory, remote_host, remote_port, + orig_host, orig_port) + + return chan, session + + async def open_connection(self, *args: object, **kwargs: object) -> \ + Tuple[SSHReader, SSHWriter]: + """Open an SSH TCP direct connection + + This method is a coroutine wrapper around :meth:`create_connection` + designed to provide a "high-level" stream interface for creating + an SSH TCP direct connection. Instead of taking a + `session_factory` argument for constructing an object which will + handle activity on the session via callbacks, it returns + :class:`SSHReader` and :class:`SSHWriter` objects which can be + used to perform I/O on the connection. + + With the exception of `session_factory`, all of the arguments + to :meth:`create_connection` are supported and have the same + meaning here. + + :returns: an :class:`SSHReader` and :class:`SSHWriter` + + :raises: :exc:`ChannelOpenError` if the connection can't be opened + + """ + + chan, session = await self.create_connection( + SSHTCPStreamSession, *args, **kwargs) # type: ignore + + session: SSHTCPStreamSession + + return SSHReader(session, chan), SSHWriter(session, chan) + + @async_context_manager + async def create_server( + self, session_factory: TCPListenerFactory[AnyStr], + listen_host: str, listen_port: int, *, + encoding: Optional[str] = None, errors: str = 'strict', + window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> SSHListener: + """Create a remote SSH TCP listener + + This method is a coroutine which can be called to request that + the server listen on the specified remote address and port for + incoming TCP connections. If the request is successful, the + return value is an :class:`SSHListener` object which can be + used later to shut down the listener. If the request fails, + `None` is returned. + + :param session_factory: + A `callable` or coroutine which takes arguments of the + original host and port of the client and decides whether + to accept the connection or not, either returning an + :class:`SSHTCPSession` object used to handle activity + on that connection or raising :exc:`ChannelOpenError` + to indicate that the connection should not be accepted + :param listen_host: + The hostname or address on the remote host to listen on + :param listen_port: + The port number on the remote host to listen on + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the connection + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type session_factory: `callable` or coroutine + :type listen_host: `str` + :type listen_port: `int` + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: :class:`SSHListener` + + :raises: :class:`ChannelListenError` if the listener can't be opened + + """ + + listen_host = listen_host.lower() + + self.logger.info('Creating remote TCP listener on %s', + (listen_host, listen_port)) + + pkttype, packet = await self._make_global_request( + b'tcpip-forward', String(listen_host), UInt32(listen_port)) + + if pkttype == MSG_REQUEST_SUCCESS: + if listen_port == 0: + listen_port = packet.get_uint32() + dynamic = True + else: + # OpenSSH 6.8 introduced a bug which causes the reply + # to contain an extra uint32 value of 0 when non-dynamic + # ports are requested, causing the check_end() call below + # to fail. This check works around this problem. + if len(packet.get_remaining_payload()) == 4: # pragma: no cover + packet.get_uint32() + + dynamic = False + + packet.check_end() + + listener = SSHTCPClientListener[AnyStr](self, session_factory, + listen_host, listen_port, + encoding, errors, + window, max_pktsize) + + if dynamic: + self.logger.debug1('Assigning dynamic port %d', listen_port) + + self._dynamic_remote_listeners[listen_host] = listener + + self._remote_listeners[listen_host, listen_port] = listener + return listener + else: + packet.check_end() + self.logger.debug1('Failed to create remote TCP listener') + raise ChannelListenError('Failed to create remote TCP listener') + + @async_context_manager + async def start_server(self, handler_factory: _TCPServerHandlerFactory, + *args: object, **kwargs: object) -> SSHListener: + """Start a remote SSH TCP listener + + This method is a coroutine wrapper around :meth:`create_server` + designed to provide a "high-level" stream interface for creating + remote SSH TCP listeners. Instead of taking a `session_factory` + argument for constructing an object which will handle activity on + the session via callbacks, it takes a `handler_factory` which + returns a `callable` or coroutine that will be passed + :class:`SSHReader` and :class:`SSHWriter` objects which can be + used to perform I/O on each new connection which arrives. Like + :meth:`create_server`, `handler_factory` can also raise + :exc:`ChannelOpenError` if the connection should not be accepted. + + With the exception of `handler_factory` replacing + `session_factory`, all of the arguments to :meth:`create_server` + are supported and have the same meaning here. + + :param handler_factory: + A `callable` or coroutine which takes arguments of the + original host and port of the client and decides whether to + accept the connection or not, either returning a callback + or coroutine used to handle activity on that connection + or raising :exc:`ChannelOpenError` to indicate that the + connection should not be accepted + :type handler_factory: `callable` or coroutine + + :returns: :class:`SSHListener` + + :raises: :class:`ChannelListenError` if the listener can't be opened + + """ + + def session_factory(orig_host: str, orig_port: int) -> SSHTCPSession: + """Return a TCP stream session handler""" + + return SSHTCPStreamSession(handler_factory(orig_host, orig_port)) + + return await self.create_server(session_factory, + *args, **kwargs) # type: ignore + + async def create_unix_connection( + self, session_factory: SSHUNIXSessionFactory[AnyStr], + remote_path: str, *, encoding: Optional[str] = None, + errors: str = 'strict', window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + Tuple[SSHUNIXChannel[AnyStr], SSHUNIXSession[AnyStr]]: + """Create an SSH UNIX domain socket direct connection + + This method is a coroutine which can be called to request that + the server open a new outbound UNIX domain socket connection to + the specified destination path. If the connection is successfully + opened, a new SSH channel will be opened with data being handled + by a :class:`SSHUNIXSession` object created by `session_factory`. + + By default, this class expects data to be sent and received as + raw bytes. However, an optional encoding argument can be passed + in to select the encoding to use, allowing the application to + send and receive string data. When encoding is set, an optional + errors argument can be passed in to select what Unicode error + handling strategy to use. + + Other optional arguments include the SSH receive window size and + max packet size which default to 2 MB and 32 KB, respectively. + + :param session_factory: + A `callable` which returns an :class:`SSHUNIXSession` object + that will be created to handle activity on this session + :param remote_path: + The remote path to connect to + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the connection + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type session_factory: `callable` + :type remote_path: `str` + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: an :class:`SSHUNIXChannel` and :class:`SSHUNIXSession` + + :raises: :exc:`ChannelOpenError` if the connection can't be opened + + """ + + self.logger.info('Opening direct UNIX connection to %s', remote_path) + + chan = self.create_unix_channel(encoding, errors, window, max_pktsize) + + session = await chan.connect(session_factory, remote_path) + + return chan, session + + async def open_unix_connection(self, *args: object, **kwargs: object) -> \ + Tuple[SSHReader, SSHWriter]: + """Open an SSH UNIX domain socket direct connection + + This method is a coroutine wrapper around + :meth:`create_unix_connection` designed to provide a "high-level" + stream interface for creating an SSH UNIX domain socket direct + connection. Instead of taking a `session_factory` argument for + constructing an object which will handle activity on the session + via callbacks, it returns :class:`SSHReader` and :class:`SSHWriter` + objects which can be used to perform I/O on the connection. + + With the exception of `session_factory`, all of the arguments + to :meth:`create_unix_connection` are supported and have the same + meaning here. + + :returns: an :class:`SSHReader` and :class:`SSHWriter` + + :raises: :exc:`ChannelOpenError` if the connection can't be opened + + """ + + chan, session = \ + await self.create_unix_connection(SSHUNIXStreamSession, + *args, **kwargs) # type: ignore + + session: SSHUNIXStreamSession + + return SSHReader(session, chan), SSHWriter(session, chan) + + @async_context_manager + async def create_unix_server( + self, session_factory: UNIXListenerFactory[AnyStr], + listen_path: str, *, encoding: Optional[str] = None, + errors: str = 'strict', window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> SSHListener: + """Create a remote SSH UNIX domain socket listener + + This method is a coroutine which can be called to request that + the server listen on the specified remote path for incoming UNIX + domain socket connections. If the request is successful, the + return value is an :class:`SSHListener` object which can be + used later to shut down the listener. If the request fails, + `None` is returned. + + :param session_factory: + A `callable` or coroutine which decides whether to accept + the connection or not, either returning an + :class:`SSHUNIXSession` object used to handle activity + on that connection or raising :exc:`ChannelOpenError` + to indicate that the connection should not be accepted + :param listen_path: + The path on the remote host to listen on + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the connection + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type session_factory: `callable` + :type listen_path: `str` + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: :class:`SSHListener` + + :raises: :class:`ChannelListenError` if the listener can't be opened + + """ + + self.logger.info('Creating remote UNIX listener on %s', listen_path) + + pkttype, packet = await self._make_global_request( + b'streamlocal-forward@openssh.com', String(listen_path)) + + packet.check_end() + + if pkttype == MSG_REQUEST_SUCCESS: + listener = SSHUNIXClientListener[AnyStr](self, session_factory, + listen_path, encoding, + errors, window, + max_pktsize) + + self._remote_listeners[listen_path] = listener + return listener + else: + self.logger.debug1('Failed to create remote UNIX listener') + raise ChannelListenError('Failed to create remote UNIX listener') + + @async_context_manager + async def start_unix_server( + self, handler_factory: _UNIXServerHandlerFactory, + *args: object, **kwargs: object) -> SSHListener: + """Start a remote SSH UNIX domain socket listener + + This method is a coroutine wrapper around :meth:`create_unix_server` + designed to provide a "high-level" stream interface for creating + remote SSH UNIX domain socket listeners. Instead of taking a + `session_factory` argument for constructing an object which + will handle activity on the session via callbacks, it takes a + `handler_factory` which returns a `callable` or coroutine that + will be passed :class:`SSHReader` and :class:`SSHWriter` objects + which can be used to perform I/O on each new connection which + arrives. Like :meth:`create_unix_server`, `handler_factory` + can also raise :exc:`ChannelOpenError` if the connection should + not be accepted. + + With the exception of `handler_factory` replacing + `session_factory`, all of the arguments to + :meth:`create_unix_server` are supported and have the same + meaning here. + + :param handler_factory: + A `callable` or coroutine which decides whether to accept + the UNIX domain socket connection or not, either returning + a callback or coroutine used to handle activity on that + connection or raising :exc:`ChannelOpenError` to indicate + that the connection should not be accepted + :type handler_factory: `callable` or coroutine + + :returns: :class:`SSHListener` + + :raises: :class:`ChannelListenError` if the listener can't be opened + + """ + + def session_factory() -> SSHUNIXStreamSession: + """Return a UNIX domain socket stream session handler""" + + return SSHUNIXStreamSession(handler_factory()) + + return await self.create_unix_server(session_factory, + *args, **kwargs) # type: ignore + + async def create_ssh_connection(self, client_factory: _ClientFactory, + host: str, port: DefTuple[int] = (), + **kwargs: object) -> \ + Tuple['SSHClientConnection', SSHClient]: + """Create a tunneled SSH client connection + + This method is a coroutine which can be called to open an + SSH client connection to the requested host and port tunneled + inside this already established connection. It takes all the + same arguments as :func:`create_connection` but requests + that the upstream SSH server open the connection rather than + connecting directly. + + """ + + return (await create_connection(client_factory, host, port, + tunnel=self, **kwargs)) # type: ignore + + @async_context_manager + async def connect_ssh(self, host: str, port: DefTuple[int] = (), + **kwargs: object) -> 'SSHClientConnection': + """Make a tunneled SSH client connection + + This method is a coroutine which can be called to open an + SSH client connection to the requested host and port tunneled + inside this already established connection. It takes all the + same arguments as :func:`connect` but requests that the upstream + SSH server open the connection rather than connecting directly. + + """ + + return await connect(host, port, tunnel=self, **kwargs) # type: ignore + + @async_context_manager + async def connect_reverse_ssh(self, host: str, port: DefTuple[int] = (), + **kwargs: object) -> 'SSHServerConnection': + """Make a tunneled reverse direction SSH connection + + This method is a coroutine which can be called to open an + SSH client connection to the requested host and port tunneled + inside this already established connection. It takes all the + same arguments as :func:`connect` but requests that the upstream + SSH server open the connection rather than connecting directly. + + """ + + return await connect_reverse(host, port, tunnel=self, + **kwargs) # type: ignore + + @async_context_manager + async def listen_ssh(self, host: str = '', port: DefTuple[int] = (), + **kwargs: object) -> SSHAcceptor: + """Create a tunneled SSH listener + + This method is a coroutine which can be called to open a remote + SSH listener on the requested host and port tunneled inside this + already established connection. It takes all the same arguments as + :func:`listen` but requests that the upstream SSH server open the + listener rather than listening directly via TCP/IP. + + """ + + return await listen(host, port, tunnel=self, **kwargs) # type: ignore + + @async_context_manager + async def listen_reverse_ssh(self, host: str = '', + port: DefTuple[int] = (), + **kwargs: object) -> SSHAcceptor: + """Create a tunneled reverse direction SSH listener + + This method is a coroutine which can be called to open a remote + SSH listener on the requested host and port tunneled inside this + already established connection. It takes all the same arguments as + :func:`listen_reverse` but requests that the upstream SSH server + open the listener rather than listening directly via TCP/IP. + + """ + + return await listen_reverse(host, port, tunnel=self, + **kwargs) # type: ignore + + @async_context_manager + async def forward_local_port_to_path( + self, listen_host: str, listen_port: int, dest_path: str, + accept_handler: Optional[SSHAcceptHandler] = None) -> SSHListener: + """Set up local TCP port forwarding to a remote UNIX domain socket + + This method is a coroutine which attempts to set up port + forwarding from a local TCP listening port to a remote UNIX + domain path via the SSH connection. If the request is successful, + the return value is an :class:`SSHListener` object which can be + used later to shut down the port forwarding. + + :param listen_host: + The hostname or address on the local host to listen on + :param listen_port: + The port number on the local host to listen on + :param dest_path: + The path on the remote host to forward the connections to + :param accept_handler: + A `callable` or coroutine which takes arguments of the + original host and port of the client and decides whether + or not to allow connection forwarding, returning `True` to + accept the connection and begin forwarding or `False` to + reject and close it. + :type listen_host: `str` + :type listen_port: `int` + :type dest_path: `str` + :type accept_handler: `callable` or coroutine + + :returns: :class:`SSHListener` + + :raises: :exc:`OSError` if the listener can't be opened + + """ + + async def tunnel_connection( + session_factory: SSHUNIXSessionFactory[bytes], + orig_host: str, orig_port: int) -> \ + Tuple[SSHUNIXChannel[bytes], SSHUNIXSession[bytes]]: + """Forward a local connection over SSH""" + + if accept_handler: + result = accept_handler(orig_host, orig_port) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + if not result: + self.logger.info('Request for TCP forwarding from ' + '%s to %s denied by application', + (orig_host, orig_port), dest_path) + + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Connection forwarding denied') + + return (await self.create_unix_connection(session_factory, + dest_path)) + + self.logger.info('Creating local TCP forwarder from %s to %s', + (listen_host, listen_port), dest_path) + + try: + listener = await create_tcp_forward_listener(self, self._loop, + tunnel_connection, + listen_host, + listen_port) + except OSError as exc: + self.logger.debug1('Failed to create local TCP listener: %s', exc) + raise + + if listen_port == 0: + listen_port = listener.get_port() + + self._local_listeners[listen_host, listen_port] = listener + + return listener + + @async_context_manager + async def forward_local_path_to_port(self, listen_path: str, + dest_host: str, + dest_port: int) -> SSHListener: + """Set up local UNIX domain socket forwarding to a remote TCP port + + This method is a coroutine which attempts to set up UNIX domain + socket forwarding from a local listening path to a remote host + and port via the SSH connection. If the request is successful, + the return value is an :class:`SSHListener` object which can + be used later to shut down the UNIX domain socket forwarding. + + :param listen_path: + The path on the local host to listen on + :param dest_host: + The hostname or address to forward the connections to + :param dest_port: + The port number to forward the connections to + :type listen_path: `str` + :type dest_host: `str` + :type dest_port: `int` + + :returns: :class:`SSHListener` + + :raises: :exc:`OSError` if the listener can't be opened + + """ + + async def tunnel_connection( + session_factory: SSHTCPSessionFactory[bytes]) -> \ + Tuple[SSHTCPChannel[bytes], SSHTCPSession[bytes]]: + """Forward a local connection over SSH""" + + return await self.create_connection(session_factory, dest_host, + dest_port, '', 0) + + self.logger.info('Creating local UNIX forwarder from %s to %s', + listen_path, (dest_host, dest_port)) + + try: + listener = await create_unix_forward_listener(self, self._loop, + tunnel_connection, + listen_path) + except OSError as exc: + self.logger.debug1('Failed to create local UNIX listener: %s', exc) + raise + + self._local_listeners[listen_path] = listener + + return listener + + @async_context_manager + async def forward_remote_port(self, listen_host: str, + listen_port: int, dest_host: str, + dest_port: int) -> SSHListener: + """Set up remote port forwarding + + This method is a coroutine which attempts to set up port + forwarding from a remote listening port to a local host and port + via the SSH connection. If the request is successful, the + return value is an :class:`SSHListener` object which can be + used later to shut down the port forwarding. If the request + fails, `None` is returned. + + :param listen_host: + The hostname or address on the remote host to listen on + :param listen_port: + The port number on the remote host to listen on + :param dest_host: + The hostname or address to forward connections to + :param dest_port: + The port number to forward connections to + :type listen_host: `str` + :type listen_port: `int` + :type dest_host: `str` + :type dest_port: `int` + + :returns: :class:`SSHListener` + + :raises: :class:`ChannelListenError` if the listener can't be opened + + """ + + def session_factory(_orig_host: str, + _orig_port: int) -> Awaitable[SSHTCPSession]: + """Return an SSHTCPSession used to do remote port forwarding""" + + return cast(Awaitable[SSHTCPSession], + self.forward_connection(dest_host, dest_port)) + + self.logger.info('Creating remote TCP forwarder from %s to %s', + (listen_host, listen_port), (dest_host, dest_port)) + + return await self.create_server(session_factory, listen_host, + listen_port) + + @async_context_manager + async def forward_remote_path(self, listen_path: str, + dest_path: str) -> SSHListener: + """Set up remote UNIX domain socket forwarding + + This method is a coroutine which attempts to set up UNIX domain + socket forwarding from a remote listening path to a local path + via the SSH connection. If the request is successful, the + return value is an :class:`SSHListener` object which can be + used later to shut down the port forwarding. If the request + fails, `None` is returned. + + :param listen_path: + The path on the remote host to listen on + :param dest_path: + The path on the local host to forward connections to + :type listen_path: `str` + :type dest_path: `str` + + :returns: :class:`SSHListener` + + :raises: :class:`ChannelListenError` if the listener can't be opened + + """ + + def session_factory() -> Awaitable[SSHUNIXSession[bytes]]: + """Return an SSHUNIXSession used to do remote path forwarding""" + + return cast(Awaitable[SSHUNIXSession[bytes]], + self.forward_unix_connection(dest_path)) + + self.logger.info('Creating remote UNIX forwarder from %s to %s', + listen_path, dest_path) + + return await self.create_unix_server(session_factory, listen_path) + + @async_context_manager + async def forward_remote_port_to_path(self, listen_host: str, + listen_port: int, + dest_path: str) -> SSHListener: + """Set up remote TCP port forwarding to a local UNIX domain socket + + This method is a coroutine which attempts to set up port + forwarding from a remote TCP listening port to a local UNIX + domain socket path via the SSH connection. If the request is + successful, the return value is an :class:`SSHListener` object + which can be used later to shut down the port forwarding. If + the request fails, `None` is returned. + + :param listen_host: + The hostname or address on the remote host to listen on + :param listen_port: + The port number on the remote host to listen on + :param dest_path: + The path on the local host to forward connections to + :type listen_host: `str` + :type listen_port: `int` + :type dest_path: `str` + + :returns: :class:`SSHListener` + + :raises: :class:`ChannelListenError` if the listener can't be opened + + """ + + def session_factory(_orig_host: str, + _orig_port: int) -> Awaitable[SSHUNIXSession]: + """Return an SSHTCPSession used to do remote port forwarding""" + + return cast(Awaitable[SSHUNIXSession], + self.forward_unix_connection(dest_path)) + + self.logger.info('Creating remote TCP forwarder from %s to %s', + (listen_host, listen_port), dest_path) + + return await self.create_server(session_factory, listen_host, + listen_port) + + @async_context_manager + async def forward_remote_path_to_port(self, listen_path: str, + dest_host: str, + dest_port: int) -> SSHListener: + """Set up remote UNIX domain socket forwarding to a local TCP port + + This method is a coroutine which attempts to set up UNIX domain + socket forwarding from a remote listening path to a local TCP + host and port via the SSH connection. If the request is + successful, the return value is an :class:`SSHListener` object + which can be used later to shut down the port forwarding. If + the request fails, `None` is returned. + + :param listen_path: + The path on the remote host to listen on + :param dest_host: + The hostname or address to forward connections to + :param dest_port: + The port number to forward connections to + :type listen_path: `str` + :type dest_host: `str` + :type dest_port: `int` + + :returns: :class:`SSHListener` + + :raises: :class:`ChannelListenError` if the listener can't be opened + + """ + + def session_factory() -> Awaitable[SSHTCPSession[bytes]]: + """Return an SSHUNIXSession used to do remote path forwarding""" + + return cast(Awaitable[SSHTCPSession[bytes]], + self.forward_connection(dest_host, dest_port)) + + self.logger.info('Creating remote UNIX forwarder from %s to %s', + listen_path, (dest_host, dest_port)) + + return await self.create_unix_server(session_factory, listen_path) + + @async_context_manager + async def forward_socks(self, listen_host: str, + listen_port: int) -> SSHListener: + """Set up local port forwarding via SOCKS + + This method is a coroutine which attempts to set up dynamic + port forwarding via SOCKS on the specified local host and + port. Each SOCKS request contains the destination host and + port to connect to and triggers a request to tunnel traffic + to the requested host and port via the SSH connection. + + If the request is successful, the return value is an + :class:`SSHListener` object which can be used later to shut + down the port forwarding. + + :param listen_host: + The hostname or address on the local host to listen on + :param listen_port: + The port number on the local host to listen on + :type listen_host: `str` + :type listen_port: `int` + + :returns: :class:`SSHListener` + + :raises: :exc:`OSError` if the listener can't be opened + + """ + + async def tunnel_socks(session_factory: SSHTCPSessionFactory[bytes], + dest_host: str, dest_port: int, + orig_host: str, orig_port: int) -> \ + Tuple[SSHTCPChannel[bytes], SSHTCPSession[bytes]]: + """Forward a local SOCKS connection over SSH""" + + return await self.create_connection(session_factory, + dest_host, dest_port, + orig_host, orig_port) + + self.logger.info('Creating local SOCKS forwarder on %s', + (listen_host, listen_port)) + + try: + listener = await create_socks_listener(self, self._loop, + tunnel_socks, + listen_host, listen_port) + except OSError as exc: + self.logger.debug1('Failed to create local SOCKS listener: %s', exc) + raise + + if listen_port == 0: + listen_port = listener.get_port() + + self._local_listeners[listen_host, listen_port] = listener + + return listener + + @async_context_manager + async def start_sftp_client(self, env: DefTuple[_Env] = (), + send_env: DefTuple[_SendEnv] = (), + path_encoding: Optional[str] = 'utf-8', + path_errors = 'strict', + sftp_version = MIN_SFTP_VERSION) -> SFTPClient: + """Start an SFTP client + + This method is a coroutine which attempts to start a secure + file transfer session. If it succeeds, it returns an + :class:`SFTPClient` object which can be used to copy and + access files on the remote host. + + An optional Unicode encoding can be specified for sending and + receiving pathnames, defaulting to UTF-8 with strict error + checking. If an encoding of `None` is specified, pathnames + will be left as bytes rather than being converted to & from + strings. + + :param env: (optional) + The environment variables to set for this SFTP session. Keys + and values passed in here will be converted to Unicode + strings encoded as UTF-8 (ISO 10646) for transmission. + + .. note:: Many SSH servers restrict which environment + variables a client is allowed to set. The + server's configuration may need to be edited + before environment variables can be + successfully set in the remote environment. + :param send_env: (optional) + A list of environment variable names to pull from + `os.environ` and set for this SFTP session. Wildcards + patterns using `'*'` and `'?'` are allowed, and all variables + with matching names will be sent with whatever value is set + in the local environment. If a variable is present in both + env and send_env, the value from env will be used. + :param path_encoding: + The Unicode encoding to apply when sending and receiving + remote pathnames + :param path_errors: + The error handling strategy to apply on encode/decode errors + :param sftp_version: (optional) + The maximum version of the SFTP protocol to support, currently + either 3 or 4, defaulting to 3. + :type env: `dict` with `str` keys and values + :type send_env: `list` of `str` + :type path_encoding: `str` or `None` + :type path_errors: `str` + :type sftp_version: `int` + + :returns: :class:`SFTPClient` + + :raises: :exc:`SFTPError` if the session can't be opened + + """ + + writer, reader, _ = await self.open_session(subsystem='sftp', + env=env, send_env=send_env, + encoding=None) + + return await start_sftp_client(self, self._loop, reader, writer, + path_encoding, path_errors, + sftp_version) + + +class SSHServerConnection(SSHConnection): + """SSH server connection + + This class represents an SSH server connection. + + During authentication, :meth:`send_auth_banner` can be called to + send an authentication banner to the client. + + Once authenticated, :class:`SSHServer` objects wishing to create + session objects with non-default channel properties can call + :meth:`create_server_channel` from their :meth:`session_requested() + ` method and return a tuple of + the :class:`SSHServerChannel` object returned from that and either + an :class:`SSHServerSession` object or a coroutine which returns + an :class:`SSHServerSession`. + + Similarly, :class:`SSHServer` objects wishing to create TCP + connection objects with non-default channel properties can call + :meth:`create_tcp_channel` from their :meth:`connection_requested() + ` method and return a tuple of + the :class:`SSHTCPChannel` object returned from that and either + an :class:`SSHTCPSession` object or a coroutine which returns an + :class:`SSHTCPSession`. + + :class:`SSHServer` objects wishing to create UNIX domain socket + connection objects with non-default channel properties can call + :meth:`create_unix_channel` from the :meth:`unix_connection_requested() + ` method and return a tuple of + the :class:`SSHUNIXChannel` object returned from that and either + an :class:`SSHUNIXSession` object or a coroutine which returns an + :class:`SSHUNIXSession`. + + """ + + _options: 'SSHServerConnectionOptions' + _owner: SSHServer + _x11_listener: Optional[SSHX11ServerListener] + + def __init__(self, loop: asyncio.AbstractEventLoop, + options: 'SSHServerConnectionOptions', + acceptor: _AcceptHandler = None, + error_handler: _ErrorHandler = None, + wait: Optional[str] = None): + super().__init__(loop, options, acceptor, error_handler, + wait, server=True) + + self._options = options + + self._server_host_keys = options.server_host_keys + self._server_host_key_algs = list(options.server_host_keys.keys()) + self._known_client_hosts = options.known_client_hosts + self._trust_client_host = options.trust_client_host + self._authorized_client_keys = options.authorized_client_keys + self._allow_pty = options.allow_pty + self._line_editor = options.line_editor + self._line_echo = options.line_echo + self._line_history = options.line_history + self._max_line_length = options.max_line_length + self._rdns_lookup = options.rdns_lookup + self._x11_forwarding = options.x11_forwarding + self._x11_auth_path = options.x11_auth_path + self._agent_forwarding = options.agent_forwarding + self._process_factory = options.process_factory + self._session_factory = options.session_factory + self._encoding = options.encoding + self._errors = options.errors + self._sftp_factory = options.sftp_factory + self._sftp_version = options.sftp_version + self._allow_scp = options.allow_scp + self._window = options.window + self._max_pktsize = options.max_pktsize + + if options.gss_host: + try: + self._gss = GSSServer(options.gss_host) + self._gss_kex = options.gss_kex + self._gss_auth = options.gss_auth + self._gss_mic_auth = self._gss_auth + except GSSError: + pass + + self._server_host_key: Optional[SSHKeyPair] = None + self._key_options: _KeyOrCertOptions = {} + self._cert_options: Optional[_KeyOrCertOptions] = None + self._kbdint_password_auth = False + + self._agent_listener: Optional[SSHAgentListener] = None + + def _cleanup(self, exc: Optional[Exception]) -> None: + """Clean up this server connection""" + + if self._agent_listener: + self._agent_listener.close() + self._agent_listener = None + + super()._cleanup(exc) + + def _connection_made(self) -> None: + """Handle the opening of a new connection""" + + self.logger.info('Accepted SSH client connection') + + if self._options.proxy_command: + proxy_command = ' '.join(shlex.quote(arg) for arg in + self._options.proxy_command) + self.logger.info(' Proxy command: %s', proxy_command) + else: + self.logger.info(' Local address: %s', + (self._local_addr, self._local_port)) + self.logger.info(' Peer address: %s', + (self._peer_addr, self._peer_port)) + + async def reload_config(self) -> None: + """Re-evaluate config with updated match options""" + + if self._rdns_lookup: + self._peer_host, _ = await self._loop.getnameinfo( + (self._peer_addr, self._peer_port), socket.NI_NUMERICSERV) + + options = cast(SSHServerConnectionOptions, await _run_in_executor( + self._loop, SSHServerConnectionOptions, options=self._options, + reload=True, accept_addr=self._local_addr, + accept_port=self._local_port, username=self._username, + client_host=self._peer_host, client_addr=self._peer_addr)) + + self._options = options + + self._host_based_auth = options.host_based_auth + self._public_key_auth = options.public_key_auth + self._kbdint_auth = options.kbdint_auth + self._password_auth = options.password_auth + + self._authorized_client_keys = options.authorized_client_keys + self._allow_pty = options.allow_pty + self._x11_forwarding = options.x11_forwarding + self._agent_forwarding = options.agent_forwarding + + self._rekey_bytes = options.rekey_bytes + self._rekey_seconds = options.rekey_seconds + + self._keepalive_count_max = options.keepalive_count_max + self._keepalive_interval = options.keepalive_interval + + def choose_server_host_key(self, + peer_host_key_algs: Sequence[bytes]) -> bool: + """Choose the server host key to use + + Given a list of host key algorithms supported by the client, + select the first compatible server host key we have and return + whether or not we were able to find a match. + + """ + + for alg in peer_host_key_algs: + keypair = self._server_host_keys.get(alg) + if keypair: + if alg != keypair.algorithm: + keypair.set_sig_algorithm(alg) + + self._server_host_key = keypair + return True + + return False + + def get_server_host_key(self) -> Optional[SSHKeyPair]: + """Return the chosen server host key + + This method returns a keypair object containing the + chosen server host key and a corresponding public key + or certificate. + + """ + + return self._server_host_key + + def gss_kex_auth_supported(self) -> bool: + """Return whether GSS key exchange authentication is supported""" + + if self._gss_kex_auth: + assert self._gss is not None + return self._gss.complete + else: + return False + + def gss_mic_auth_supported(self) -> bool: + """Return whether GSS MIC authentication is supported""" + + return self._gss_mic_auth + + async def validate_gss_principal(self, username: str, user_principal: str, + host_principal: str) -> bool: + """Validate the GSS principal name for the specified user + + Return whether the user principal acquired during GSS + authentication is valid for the specified user. + + """ + + result = self._owner.validate_gss_principal(username, user_principal, + host_principal) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + return cast(bool, result) + + def host_based_auth_supported(self) -> bool: + """Return whether or not host based authentication is supported""" + + return (self._host_based_auth and + (bool(self._known_client_hosts) or + self._owner.host_based_auth_supported())) + + async def validate_host_based_auth(self, username: str, key_data: bytes, + client_host: str, client_username: str, + msg: bytes, signature: bytes) -> bool: + """Validate host based authentication for the specified host and user""" + + # Remove a trailing '.' from the client host if present + if client_host[-1:] == '.': + client_host = client_host[:-1] + + if self._trust_client_host: + resolved_host = client_host + else: + resolved_host, _ = await self._loop.getnameinfo( + cast(SockAddr, self.get_extra_info('peername')), + socket.NI_NUMERICSERV) + + if resolved_host != client_host: + self.logger.info('Client host mismatch: received %s, ' + 'resolved %s', client_host, resolved_host) + + if self._known_client_hosts: + self._match_known_hosts(self._known_client_hosts, resolved_host, + self._peer_addr, None) + + try: + key = self._validate_host_key(resolved_host, self._peer_addr, + self._peer_port, key_data) + except ValueError as exc: + self.logger.debug1('Invalid host key: %s', exc) + return False + + if not key.verify(String(self._session_id) + msg, signature): + self.logger.debug1('Invalid host-based auth signature') + return False + + result = self._owner.validate_host_based_user(username, client_host, + client_username) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + return cast(bool, result) + + async def _validate_openssh_certificate( + self, username: str, cert: SSHOpenSSHCertificate) -> \ + Optional[SSHKey]: + """Validate an OpenSSH client certificate for the specified user""" + + options: Optional[_KeyOrCertOptions] = None + + if self._authorized_client_keys: + options = self._authorized_client_keys.validate( + cert.signing_key, self._peer_host, + self._peer_addr, cert.principals, ca=True) + + if options is None: + result = self._owner.validate_ca_key(username, cert.signing_key) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + if not result: + return None + + options = {} + + self._key_options = options + + if self.get_key_option('principals'): + username = '' + + try: + cert.validate(CERT_TYPE_USER, username) + except ValueError: + return None + + allowed_addresses = cast(Sequence[IPNetwork], + cert.options.get('source-address')) + if allowed_addresses: + ip = ip_address(self._peer_addr) + if not any(ip in network for network in allowed_addresses): + return None + + self._cert_options = cert.options + + cert.key.set_touch_required( + not (self.get_key_option('no-touch-required', False) and + self.get_certificate_option('no-touch-required', False))) + + return cert.key + + async def _validate_x509_certificate_chain( + self, username: str, cert: SSHX509CertificateChain) -> \ + Optional[SSHKey]: + """Validate an X.509 client certificate for the specified user""" + + if not self._authorized_client_keys: + return None + + options, trusted_cert = \ + self._authorized_client_keys.validate_x509( + cert, self._peer_host, self._peer_addr) + + if options is None: + return None + + self._key_options = options + + if self.get_key_option('principals'): + username = '' + + assert self._x509_trusted_certs is not None + trusted_certs = list(self._x509_trusted_certs) + + if trusted_cert: + trusted_certs += [trusted_cert] + + try: + cert.validate_chain(trusted_certs, self._x509_trusted_cert_paths, + set(), self._x509_purposes, + user_principal=username) + except ValueError: + return None + + return cert.key + + async def _validate_client_certificate( + self, username: str, key_data: bytes) -> Optional[SSHKey]: + """Validate a client certificate for the specified user""" + + try: + cert = decode_ssh_certificate(key_data) + except KeyImportError: + return None + + if cert.is_x509_chain: + return await self._validate_x509_certificate_chain( + username, cast(SSHX509CertificateChain, cert)) + else: + return await self._validate_openssh_certificate( + username, cast(SSHOpenSSHCertificate, cert)) + + async def _validate_client_public_key(self, username: str, + key_data: bytes) -> Optional[SSHKey]: + """Validate a client public key for the specified user""" + + try: + key = decode_ssh_public_key(key_data) + except KeyImportError: + return None + + options: Optional[_KeyOrCertOptions] = None + + if self._authorized_client_keys: + options = self._authorized_client_keys.validate( + key, self._peer_host, self._peer_addr) + + if options is None: + result = self._owner.validate_public_key(username, key) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + if not result: + return None + + options = {} + + self._key_options = options + + key.set_touch_required( + not self.get_key_option('no-touch-required', False)) + + return key + + def public_key_auth_supported(self) -> bool: + """Return whether or not public key authentication is supported""" + + return (self._public_key_auth and + (bool(self._authorized_client_keys) or + self._owner.public_key_auth_supported())) + + async def validate_public_key(self, username: str, key_data: bytes, + msg: bytes, signature: bytes) -> bool: + """Validate the public key or certificate for the specified user + + This method validates that the public key or certificate provided + is allowed for the specified user. If msg and signature are + provided, the key is used to also validate the message signature. + It returns `True` when the key is allowed and the signature (if + present) is valid. Otherwise, it returns `False`. + + """ + + key = ((await self._validate_client_certificate(username, key_data)) or + (await self._validate_client_public_key(username, key_data))) + + if key is None: + return False + elif msg: + return key.verify(String(self._session_id) + msg, signature) + else: + return True + + def password_auth_supported(self) -> bool: + """Return whether or not password authentication is supported""" + + return self._password_auth and self._owner.password_auth_supported() + + async def validate_password(self, username: str, password: str) -> bool: + """Return whether password is valid for this user""" + + result = self._owner.validate_password(username, password) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + return cast(bool, result) + + async def change_password(self, username: str, old_password: str, + new_password: str) -> bool: + """Handle a password change request for a user""" + + result = self._owner.change_password(username, old_password, + new_password) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bool], result) + + return cast(bool, result) + + def kbdint_auth_supported(self) -> bool: + """Return whether or not keyboard-interactive authentication + is supported""" + + result = self._kbdint_auth and self._owner.kbdint_auth_supported() + + if result is True: + return True + elif (result is NotImplemented and + self._owner.password_auth_supported()): + self._kbdint_password_auth = True + return True + else: + return False + + async def get_kbdint_challenge(self, username: str, lang: str, + submethods: str) -> KbdIntChallenge: + """Return a keyboard-interactive auth challenge""" + + if self._kbdint_password_auth: + challenge: KbdIntChallenge = ('', '', DEFAULT_LANG, + (('Password:', False),)) + else: + result = self._owner.get_kbdint_challenge(username, lang, + submethods) + + if inspect.isawaitable(result): + challenge = await cast(Awaitable[KbdIntChallenge], result) + else: + challenge = cast(KbdIntChallenge, result) + + return challenge + + async def validate_kbdint_response(self, username: str, + responses: KbdIntResponse) -> \ + KbdIntChallenge: + """Return whether the keyboard-interactive response is valid + for this user""" + + next_challenge: KbdIntChallenge + + if self._kbdint_password_auth: + if len(responses) != 1: + return False + + try: + pw_result = self._owner.validate_password( + username, responses[0]) + + if inspect.isawaitable(pw_result): + next_challenge = await cast(Awaitable[bool], pw_result) + else: + next_challenge = cast(bool, pw_result) + except PasswordChangeRequired: + # Don't support password change requests for now in + # keyboard-interactive auth + next_challenge = False + else: + result = self._owner.validate_kbdint_response(username, responses) + + if inspect.isawaitable(result): + next_challenge = await cast(Awaitable[KbdIntChallenge], result) + else: + next_challenge = cast(KbdIntChallenge, result) + + return next_challenge + + def _process_session_open(self, packet: SSHPacket) -> \ + Tuple[SSHServerChannel, SSHServerSession]: + """Process an incoming session open request""" + + packet.check_end() + + chan: SSHServerChannel + session: SSHServerSession + + if self._process_factory or self._session_factory or self._sftp_factory: + chan = self.create_server_channel(self._encoding, self._errors, + self._window, self._max_pktsize) + + if self._process_factory: + session = SSHServerProcess(self._process_factory, + self._sftp_factory, + self._sftp_version, + self._allow_scp) + else: + session = SSHServerStreamSession(self._session_factory, + self._sftp_factory, + self._sftp_version, + self._allow_scp) + else: + result = self._owner.session_requested() + + if not result: + raise ChannelOpenError(OPEN_CONNECT_FAILED, 'Session refused') + + if isinstance(result, tuple): + chan, result = result + else: + chan = self.create_server_channel(self._encoding, self._errors, + self._window, + self._max_pktsize) + + if callable(result): + session = SSHServerStreamSession(result) + else: + session = cast(SSHServerSession, result) + + return chan, session + + def _process_direct_tcpip_open(self, packet: SSHPacket) -> \ + Tuple[SSHTCPChannel[bytes], SSHTCPSession[bytes]]: + """Process an incoming direct TCP/IP open request""" + + dest_host_bytes = packet.get_string() + dest_port = packet.get_uint32() + orig_host_bytes = packet.get_string() + orig_port = packet.get_uint32() + packet.check_end() + + try: + dest_host = dest_host_bytes.decode('utf-8') + orig_host = orig_host_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid direct TCP/IP channel ' + 'open request') from None + + if not self.check_key_permission('port-forwarding') or \ + not self.check_certificate_permission('port-forwarding'): + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Port forwarding not permitted') + + permitted_opens = cast(Set[Tuple[str, int]], + self.get_key_option('permitopen')) + + if permitted_opens and \ + (dest_host, dest_port) not in permitted_opens and \ + (dest_host, None) not in permitted_opens: + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Port forwarding not permitted to %s ' + 'port %s' % (dest_host, dest_port)) + + result = self._owner.connection_requested(dest_host, dest_port, + orig_host, orig_port) + + if not result: + raise ChannelOpenError(OPEN_CONNECT_FAILED, 'Connection refused') + + if result is True: + result = cast(SSHTCPSession[bytes], + self.forward_connection(dest_host, dest_port)) + + if isinstance(result, tuple): + chan, result = result + else: + chan = self.create_tcp_channel() + + session: SSHTCPSession[bytes] + + if callable(result): + session = SSHTCPStreamSession[bytes](result) + else: + session = cast(SSHTCPSession[bytes], result) + + self.logger.info('Accepted direct TCP connection request to %s', + (dest_host, dest_port)) + self.logger.info(' Client address: %s', (orig_host, orig_port)) + + chan.set_inbound_peer_names(dest_host, dest_port, orig_host, orig_port) + + return chan, session + + def _process_tcpip_forward_global_request(self, packet: SSHPacket) -> None: + """Process an incoming TCP/IP port forwarding request""" + + listen_host_bytes = packet.get_string() + listen_port = packet.get_uint32() + packet.check_end() + + try: + listen_host = listen_host_bytes.decode('utf-8').lower() + except UnicodeDecodeError: + raise ProtocolError('Invalid TCP/IP forward request') from None + + if not self.check_key_permission('port-forwarding') or \ + not self.check_certificate_permission('port-forwarding'): + self.logger.info('Request for TCP listener on %s denied: port ' + 'forwarding not permitted', + (listen_host, listen_port)) + + self._report_global_response(False) + return + + self.create_task(self._finish_port_forward(listen_host, listen_port)) + + async def _finish_port_forward(self, listen_host: str, + listen_port: int) -> None: + """Finish processing a TCP/IP port forwarding request""" + + listener = self._owner.server_requested(listen_host, listen_port) + + try: + if inspect.isawaitable(listener): + listener = await cast(Awaitable[_ListenerArg], listener) + + if listener is True: + listener = await self.forward_local_port( + listen_host, listen_port, listen_host, listen_port) + elif callable(listener): + listener = await self.forward_local_port( + listen_host, listen_port, + listen_host, listen_port, listener) + except OSError: + self.logger.debug1('Failed to create TCP listener') + self._report_global_response(False) + return + + if not listener: + self.logger.info('Request for TCP listener on %s denied by ' + 'application', (listen_host, listen_port)) + + self._report_global_response(False) + return + + listener: SSHListener + result: Union[bool, bytes] + + if listen_port == 0: + listen_port = listener.get_port() + result = UInt32(listen_port) + else: + result = True + + self.logger.info('Created TCP listener on %s', + (listen_host, listen_port)) + + self._local_listeners[listen_host, listen_port] = listener + self._report_global_response(result) + + def _process_cancel_tcpip_forward_global_request( + self, packet: SSHPacket) -> None: + """Process a request to cancel TCP/IP port forwarding""" + + listen_host_bytes = packet.get_string() + listen_port = packet.get_uint32() + packet.check_end() + + try: + listen_host = listen_host_bytes.decode('utf-8').lower() + except UnicodeDecodeError: + raise ProtocolError('Invalid TCP/IP cancel ' + 'forward request') from None + + try: + listener = self._local_listeners.pop((listen_host, listen_port)) + except KeyError: + raise ProtocolError('TCP/IP listener not found') from None + + self.logger.info('Closed TCP listener on %s', + (listen_host, listen_port)) + + listener.close() + + self._report_global_response(True) + + def _process_direct_streamlocal_at_openssh_dot_com_open( + self, packet: SSHPacket) -> \ + Tuple[SSHUNIXChannel[bytes], SSHUNIXSession[bytes]]: + """Process an incoming direct UNIX domain socket open request""" + + dest_path_bytes = packet.get_string() + + # OpenSSH appears to have a bug which sends this extra data + _ = packet.get_string() # originator + _ = packet.get_uint32() # originator_port + + packet.check_end() + + try: + dest_path = dest_path_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid direct UNIX domain channel ' + 'open request') from None + + if not self.check_key_permission('port-forwarding') or \ + not self.check_certificate_permission('port-forwarding'): + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Port forwarding not permitted') + + result = self._owner.unix_connection_requested(dest_path) + + if not result: + raise ChannelOpenError(OPEN_CONNECT_FAILED, 'Connection refused') + + if result is True: + result = cast(SSHUNIXSession[bytes], + self.forward_unix_connection(dest_path)) + + if isinstance(result, tuple): + chan, result = result + else: + chan = self.create_unix_channel() + + session: SSHUNIXSession[bytes] + + if callable(result): + session = SSHUNIXStreamSession[bytes](result) + else: + session = cast(SSHUNIXSession[bytes], result) + + self.logger.info('Accepted direct UNIX connection on %s', dest_path) + + chan.set_inbound_peer_names(dest_path) + + return chan, session + + def _process_streamlocal_forward_at_openssh_dot_com_global_request( + self, packet: SSHPacket) -> None: + """Process an incoming UNIX domain socket forwarding request""" + + listen_path_bytes = packet.get_string() + packet.check_end() + + try: + listen_path = listen_path_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid UNIX domain socket ' + 'forward request') from None + + if not self.check_key_permission('port-forwarding') or \ + not self.check_certificate_permission('port-forwarding'): + self.logger.info('Request for UNIX listener on %s denied: port ' + 'forwarding not permitted', listen_path) + + self._report_global_response(False) + return + + self.create_task(self._finish_path_forward(listen_path)) + + async def _finish_path_forward(self, listen_path: str) -> None: + """Finish processing a UNIX domain socket forwarding request""" + + listener = self._owner.unix_server_requested(listen_path) + + try: + if inspect.isawaitable(listener): + listener = await cast(Awaitable[_ListenerArg], listener) + + if listener is True: + listener = await self.forward_local_path(listen_path, + listen_path) + except OSError: + self.logger.debug1('Failed to create UNIX listener') + self._report_global_response(False) + return + + if not listener: + self.logger.info('Request for UNIX listener on %s denied by ' + 'application', listen_path) + + self._report_global_response(False) + return + + self.logger.info('Created UNIX listener on %s', listen_path) + + self._local_listeners[listen_path] = cast(SSHListener, listener) + self._report_global_response(True) + + def _process_cancel_streamlocal_forward_at_openssh_dot_com_global_request( + self, packet: SSHPacket) -> None: + """Process a request to cancel UNIX domain socket forwarding""" + + listen_path_bytes = packet.get_string() + packet.check_end() + + try: + listen_path = listen_path_bytes.decode('utf-8') + except UnicodeDecodeError: + raise ProtocolError('Invalid UNIX domain cancel ' + 'forward request') from None + + try: + listener = self._local_listeners.pop(listen_path) + except KeyError: + raise ProtocolError('UNIX domain listener not found') from None + + self.logger.info('Closed UNIX listener on %s', listen_path) + + listener.close() + + self._report_global_response(True) + + async def attach_x11_listener(self, chan: SSHServerChannel[AnyStr], + auth_proto: bytes, auth_data: bytes, + screen: int) -> Optional[str]: + """Attach a channel to a remote X11 display""" + + if (not self._x11_forwarding or + not self.check_key_permission('X11-forwarding') or + not self.check_certificate_permission('X11-forwarding')): + self.logger.info('X11 forwarding request denied: X11 ' + 'forwarding not permitted') + + return None + + if not self._x11_listener: + self._x11_listener = await create_x11_server_listener( + self, self._loop, self._x11_auth_path, auth_proto, auth_data) + + if self._x11_listener: + return self._x11_listener.attach(chan, screen) + else: + return None + + def detach_x11_listener(self, chan: SSHChannel[AnyStr]) -> None: + """Detach a session from a remote X11 listener""" + + if self._x11_listener: + if self._x11_listener.detach(chan): + self._x11_listener = None + + async def create_agent_listener(self) -> bool: + """Create a listener for forwarding ssh-agent connections""" + + if (not self._agent_forwarding or + not self.check_key_permission('agent-forwarding') or + not self.check_certificate_permission('agent-forwarding')): + self.logger.info('Agent forwarding request denied: Agent ' + 'forwarding not permitted') + + return False + + if self._agent_listener: + return True + + try: + tempdir = tempfile.TemporaryDirectory(prefix='asyncssh-') + path = str(Path(tempdir.name, 'agent')) + + unix_listener = await create_unix_forward_listener( + self, self._loop, self.create_agent_connection, path) + + self._agent_listener = SSHAgentListener(tempdir, path, + unix_listener) + return True + except OSError: + return False + + def get_agent_path(self) -> Optional[str]: + """Return the path of the ssh-agent listener, if one exists""" + + if self._agent_listener: + return self._agent_listener.get_path() + else: + return None + + def send_auth_banner(self, msg: str, lang: str = DEFAULT_LANG) -> None: + """Send an authentication banner to the client + + This method can be called to send an authentication banner to + the client, displaying information while authentication is + in progress. It is an error to call this method after the + authentication is complete. + + :param msg: + The message to display + :param lang: + The language the message is in + :type msg: `str` + :type lang: `str` + + :raises: :exc:`OSError` if authentication is already completed + + """ + + if self._auth_complete: + raise OSError('Authentication already completed') + + self.logger.debug1('Sending authentication banner') + + self.send_packet(MSG_USERAUTH_BANNER, String(msg), String(lang)) + + def set_authorized_keys(self, authorized_keys: _AuthKeysArg) -> None: + """Set the keys trusted for client public key authentication + + This method can be called to set the trusted user and + CA keys for client public key authentication. It should + generally be called from the :meth:`begin_auth + ` method of :class:`SSHServer` to + set the appropriate keys for the user attempting to + authenticate. + + :param authorized_keys: + The keys to trust for client public key authentication + :type authorized_keys: *see* :ref:`SpecifyingAuthorizedKeys` + + """ + + if isinstance(authorized_keys, (str, list)): + authorized_keys = read_authorized_keys(authorized_keys) + + self._authorized_client_keys = authorized_keys + + def get_key_option(self, option: str, default: object = None) -> object: + """Return option from authorized_keys + + If a client key or certificate was presented during authentication, + this method returns the value of the requested option in the + corresponding authorized_keys entry if it was set. Otherwise, it + returns the default value provided. + + The following standard options are supported: + + | command (string) + | environment (dictionary of name/value pairs) + | from (list of host patterns) + | no-touch-required (boolean) + | permitopen (list of host/port tuples) + | principals (list of usernames) + + Non-standard options are also supported and will return the + value `True` if the option is present without a value or + return a list of strings containing the values associated + with each occurrence of that option name. If the option is + not present, the specified default value is returned. + + :param option: + The name of the option to look up. + :param default: + The default value to return if the option is not present. + :type option: `str` + + :returns: The value of the option in authorized_keys, if set + + """ + + return self._key_options.get(option, default) + + def check_key_permission(self, permission: str) -> bool: + """Check permissions in authorized_keys + + If a client key or certificate was presented during + authentication, this method returns whether the specified + permission is allowed by the corresponding authorized_keys + entry. By default, all permissions are granted, but they + can be revoked by specifying an option starting with + 'no-' without a value. + + The following standard options are supported: + + | X11-forwarding + | agent-forwarding + | port-forwarding + | pty + | user-rc + + AsyncSSH internally enforces X11-forwarding, agent-forwarding, + port-forwarding and pty permissions but ignores user-rc since + it does not implement that feature. + + Non-standard permissions can also be checked, as long as the + option follows the convention of starting with 'no-'. + + :param permission: + The name of the permission to check (without the 'no-'). + :type permission: `str` + + :returns: A `bool` indicating if the permission is granted. + + """ + + return not self._key_options.get('no-' + permission, False) + + def get_certificate_option(self, option: str, + default: object = None) -> object: + """Return option from user certificate + + If a user certificate was presented during authentication, + this method returns the value of the requested option in + the certificate if it was set. Otherwise, it returns the + default value provided. + + The following options are supported: + + | force-command (string) + | no-touch-required (boolean) + | source-address (list of CIDR-style IP network addresses) + + :param option: + The name of the option to look up. + :param default: + The default value to return if the option is not present. + :type option: `str` + + :returns: The value of the option in the user certificate, if set + + """ + + if self._cert_options is not None: + return self._cert_options.get(option, default) + else: + return default + + def check_certificate_permission(self, permission: str) -> bool: + """Check permissions in user certificate + + If a user certificate was presented during authentication, + this method returns whether the specified permission was + granted in the certificate. Otherwise, it acts as if all + permissions are granted and returns `True`. + + The following permissions are supported: + + | X11-forwarding + | agent-forwarding + | port-forwarding + | pty + | user-rc + + AsyncSSH internally enforces agent-forwarding, port-forwarding + and pty permissions but ignores the other values since it does + not implement those features. + + :param permission: + The name of the permission to check (without the 'permit-'). + :type permission: `str` + + :returns: A `bool` indicating if the permission is granted. + + """ + + if self._cert_options is not None: + return cast(bool, self._cert_options.get('permit-' + permission, + False)) + else: + return True + + def create_server_channel(self, encoding: Optional[str] = '', + errors: str = '', window: int = 0, + max_pktsize: int = 0) -> SSHServerChannel: + """Create an SSH server channel for a new SSH session + + This method can be called by :meth:`session_requested() + ` to create an + :class:`SSHServerChannel` with the desired encoding, Unicode + error handling strategy, window, and max packet size for a + newly created SSH server session. + + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the + session, defaulting to UTF-8 (ISO 10646) format. If `None` + is passed in, the application can send and receive raw + bytes. + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: :class:`SSHServerChannel` + + """ + + return SSHServerChannel(self, self._loop, self._allow_pty, + self._line_editor, self._line_echo, + self._line_history, self._max_line_length, + self._encoding if encoding == '' else encoding, + self._errors if errors == '' else errors, + window or self._window, + max_pktsize or self._max_pktsize) + + async def create_connection( + self, session_factory: SSHTCPSessionFactory[AnyStr], + remote_host: str, remote_port: int, orig_host: str = '', + orig_port: int = 0, *, encoding: Optional[str] = None, + errors: str = 'strict', window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + Tuple[SSHTCPChannel[AnyStr], SSHTCPSession[AnyStr]]: + """Create an SSH TCP forwarded connection + + This method is a coroutine which can be called to notify the + client about a new inbound TCP connection arriving on the + specified remote host and port. If the connection is successfully + opened, a new SSH channel will be opened with data being handled + by a :class:`SSHTCPSession` object created by `session_factory`. + + Optional arguments include the host and port of the original + client opening the connection when performing TCP port forwarding. + + By default, this class expects data to be sent and received as + raw bytes. However, an optional encoding argument can be + passed in to select the encoding to use, allowing the + application to send and receive string data. When encoding is + set, an optional errors argument can be passed in to select + what Unicode error handling strategy to use. + + Other optional arguments include the SSH receive window size and + max packet size which default to 2 MB and 32 KB, respectively. + + :param session_factory: + A `callable` which returns an :class:`SSHTCPSession` object + that will be created to handle activity on this session + :param remote_host: + The hostname or address the connection was received on + :param remote_port: + The port number the connection was received on + :param orig_host: (optional) + The hostname or address of the client requesting the connection + :param orig_port: (optional) + The port number of the client requesting the connection + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the connection + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type session_factory: `callable` + :type remote_host: `str` + :type remote_port: `int` + :type orig_host: `str` + :type orig_port: `int` + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: an :class:`SSHTCPChannel` and :class:`SSHTCPSession` + + """ + + self.logger.info('Opening forwarded TCP connection to %s', + (remote_host, remote_port)) + self.logger.info(' Client address: %s', (orig_host, orig_port)) + + chan = self.create_tcp_channel(encoding, errors, window, max_pktsize) + + session = await chan.accept(session_factory, remote_host, + remote_port, orig_host, orig_port) + + return chan, session + + async def open_connection(self, *args: object, **kwargs: object) -> \ + Tuple[SSHReader, SSHWriter]: + """Open an SSH TCP forwarded connection + + This method is a coroutine wrapper around :meth:`create_connection` + designed to provide a "high-level" stream interface for creating + an SSH TCP forwarded connection. Instead of taking a + `session_factory` argument for constructing an object which will + handle activity on the session via callbacks, it returns + :class:`SSHReader` and :class:`SSHWriter` objects which can be + used to perform I/O on the connection. + + With the exception of `session_factory`, all of the arguments + to :meth:`create_connection` are supported and have the same + meaning here. + + :returns: an :class:`SSHReader` and :class:`SSHWriter` + + """ + + chan, session = await self.create_connection( + SSHTCPStreamSession, *args, **kwargs) # type: ignore + + session: SSHTCPStreamSession + + return SSHReader(session, chan), SSHWriter(session, chan) + + async def create_unix_connection( + self, session_factory: SSHUNIXSessionFactory[AnyStr], + remote_path: str, *, encoding: Optional[str] = None, + errors: str = 'strict', window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + Tuple[SSHUNIXChannel[AnyStr], SSHUNIXSession[AnyStr]]: + """Create an SSH UNIX domain socket forwarded connection + + This method is a coroutine which can be called to notify the + client about a new inbound UNIX domain socket connection arriving + on the specified remote path. If the connection is successfully + opened, a new SSH channel will be opened with data being handled + by a :class:`SSHUNIXSession` object created by `session_factory`. + + By default, this class expects data to be sent and received as + raw bytes. However, an optional encoding argument can be + passed in to select the encoding to use, allowing the + application to send and receive string data. When encoding is + set, an optional errors argument can be passed in to select + what Unicode error handling strategy to use. + + Other optional arguments include the SSH receive window size and + max packet size which default to 2 MB and 32 KB, respectively. + + :param session_factory: + A `callable` which returns an :class:`SSHUNIXSession` object + that will be created to handle activity on this session + :param remote_path: + The path the connection was received on + :param encoding: (optional) + The Unicode encoding to use for data exchanged on the connection + :param errors: (optional) + The error handling strategy to apply on encode/decode errors + :param window: (optional) + The receive window size for this session + :param max_pktsize: (optional) + The maximum packet size for this session + :type session_factory: `callable` + :type remote_path: `str` + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + + :returns: an :class:`SSHTCPChannel` and :class:`SSHUNIXSession` + + """ + + self.logger.info('Opening forwarded UNIX connection to %s', remote_path) + + chan = self.create_unix_channel(encoding, errors, window, max_pktsize) + + session = await chan.accept(session_factory, remote_path) + + return chan, session + + async def open_unix_connection(self, *args: object, **kwargs: object) -> \ + Tuple[SSHReader, SSHWriter]: + """Open an SSH UNIX domain socket forwarded connection + + This method is a coroutine wrapper around + :meth:`create_unix_connection` designed to provide a "high-level" + stream interface for creating an SSH UNIX domain socket forwarded + connection. Instead of taking a `session_factory` argument for + constructing an object which will handle activity on the session + via callbacks, it returns :class:`SSHReader` and :class:`SSHWriter` + objects which can be used to perform I/O on the connection. + + With the exception of `session_factory`, all of the arguments + to :meth:`create_unix_connection` are supported and have the same + meaning here. + + :returns: an :class:`SSHReader` and :class:`SSHWriter` + + """ + + chan, session = \ + await self.create_unix_connection( + SSHUNIXStreamSession, *args, **kwargs) # type: ignore + + session: SSHUNIXStreamSession + + return SSHReader(session, chan), SSHWriter(session, chan) + + async def create_x11_connection( + self, session_factory: SSHTCPSessionFactory[bytes], + orig_host: str = '', orig_port: int = 0, *, + window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + Tuple[SSHX11Channel, SSHTCPSession[bytes]]: + """Create an SSH X11 forwarded connection""" + + self.logger.info('Opening forwarded X11 connection') + + chan = self.create_x11_channel(window, max_pktsize) + + session = await chan.open(session_factory, orig_host, orig_port) + + return chan, session + + async def create_agent_connection( + self, session_factory: SSHUNIXSessionFactory[bytes], *, + window:int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> \ + Tuple[SSHAgentChannel, SSHUNIXSession[bytes]]: + """Create a forwarded ssh-agent connection back to the client""" + + if not self._agent_listener: + raise ChannelOpenError(OPEN_ADMINISTRATIVELY_PROHIBITED, + 'Agent forwarding not permitted') + + self.logger.info('Opening forwarded agent connection') + + chan = self.create_agent_channel(window, max_pktsize) + + session = await chan.open(session_factory) + + return chan, session + + async def open_agent_connection(self) -> \ + Tuple[SSHReader[bytes], SSHWriter[bytes]]: + """Open a forwarded ssh-agent connection back to the client""" + + chan, session = \ + await self.create_agent_connection(SSHUNIXStreamSession) + + session: SSHUNIXStreamSession[bytes] + + return SSHReader[bytes](session, chan), SSHWriter[bytes](session, chan) + + +class SSHConnectionOptions(Options): + """SSH connection options""" + + config: SSHConfig + waiter: Optional[asyncio.Future] + protocol_factory: _ProtocolFactory + version: bytes + host: str + port: int + tunnel: object + proxy_command: Optional[Sequence[str]] + family: int + local_addr: HostPort + tcp_keepalive: bool + kex_algs: Sequence[bytes] + encryption_algs: Sequence[bytes] + mac_algs: Sequence[bytes] + compression_algs: Sequence[bytes] + signature_algs: Sequence[bytes] + host_based_auth: bool + public_key_auth: bool + kbdint_auth: bool + password_auth: bool + x509_trusted_certs: Optional[Sequence[SSHX509Certificate]] + x509_trusted_cert_paths: Sequence[FilePath] + x509_purposes: Union[None, str, Sequence[str]] + rekey_bytes: int + rekey_seconds: float + connect_timeout: Optional[float] + login_timeout: float + keepalive_internal: float + keepalive_count_max: int + + def __init__(self, options: Optional['SSHConnectionOptions'] = None, + **kwargs: object): + last_config = options.config if options else None + super().__init__(options=options, last_config=last_config, **kwargs) + + # pylint: disable=arguments-differ + def prepare(self, config: SSHConfig, # type: ignore + protocol_factory: _ProtocolFactory, version: _VersionArg, + host: str, port: DefTuple[int], tunnel: object, + passphrase: Optional[BytesOrStr], + proxy_command: DefTuple[_ProxyCommand], family: DefTuple[int], + local_addr: DefTuple[HostPort], tcp_keepalive: DefTuple[bool], + kex_algs: _AlgsArg, encryption_algs: _AlgsArg, + mac_algs: _AlgsArg, compression_algs: _AlgsArg, + signature_algs: _AlgsArg, host_based_auth: _AuthArg, + public_key_auth: _AuthArg, kbdint_auth: _AuthArg, + password_auth: _AuthArg, x509_trusted_certs: CertListArg, + x509_trusted_cert_paths: Sequence[FilePath], + x509_purposes: X509CertPurposes, + rekey_bytes: DefTuple[Union[int, str]], + rekey_seconds: DefTuple[Union[float, str]], + connect_timeout: Optional[Union[float, str]], + login_timeout: Union[float, str], + keepalive_interval: Union[float, str], + keepalive_count_max: int) -> None: + """Prepare common connection configuration options""" + + self.config = config + self.protocol_factory = protocol_factory + self.version = _validate_version(version) + + self.host = cast(str, config.get('Hostname', host)) + self.port = cast(int, port if port != () else + config.get('Port', DEFAULT_PORT)) + + self.tunnel = tunnel if tunnel != () else config.get('ProxyJump') + self.passphrase = passphrase + + if isinstance(proxy_command, str): + proxy_command = shlex.split(proxy_command) + + self.proxy_command = proxy_command if proxy_command != () else \ + cast(Sequence[str], config.get('ProxyCommand')) + + self.family = cast(int, family if family != () else + config.get('AddressFamily', socket.AF_UNSPEC)) + + bind_addr = config.get('BindAddress') + + self.local_addr = cast(HostPort, local_addr if local_addr != () + else (bind_addr, 0) if bind_addr else None) + + self.tcp_keepalive = cast(bool, tcp_keepalive if tcp_keepalive != () + else config.get('TCPKeepAlive', True)) + + self.kex_algs, self.encryption_algs, self.mac_algs, \ + self.compression_algs, self.signature_algs = \ + _validate_algs(config, kex_algs, encryption_algs, mac_algs, + compression_algs, signature_algs, + x509_trusted_certs is not None) + + self.host_based_auth = \ + cast(bool, host_based_auth if host_based_auth != () else + config.get('HostbasedAuthentication', True)) + + self.public_key_auth = \ + cast(bool, public_key_auth if public_key_auth != () else + config.get('PubkeyAuthentication', True)) + + self.kbdint_auth = \ + cast(bool, kbdint_auth if kbdint_auth != () else + config.get('KbdInteractiveAuthentication', + config.get('ChallengeResponseAuthentication', True))) + + self.password_auth = \ + cast(bool, password_auth if password_auth != () else + config.get('PasswordAuthentication', True)) + + if x509_trusted_certs is not None: + certs = load_certificates(x509_trusted_certs) + + for cert in certs: + if not cert.is_x509: + raise ValueError('OpenSSH certificates not allowed ' + 'in X.509 trusted certs') + + x509_trusted_certs = cast(Sequence[SSHX509Certificate], certs) + + if x509_trusted_cert_paths: + for path in x509_trusted_cert_paths: + if not Path(path).is_dir(): + raise ValueError('X.509 trusted certificate path not ' + f'a directory: {path}') + + self.x509_trusted_certs = x509_trusted_certs + self.x509_trusted_cert_paths = x509_trusted_cert_paths + self.x509_purposes = x509_purposes + + config_rekey_bytes, config_rekey_seconds = \ + cast(Tuple[DefTuple[int], DefTuple[int]], + config.get('RekeyLimit', ((), ()))) + + if rekey_bytes == (): + rekey_bytes = config_rekey_bytes + + if rekey_bytes == (): + rekey_bytes = _DEFAULT_REKEY_BYTES + elif isinstance(rekey_bytes, str): + rekey_bytes = parse_byte_count(rekey_bytes) + + rekey_bytes: int + + if rekey_bytes <= 0: + raise ValueError('Rekey bytes cannot be negative or zero') + + if rekey_seconds == (): + rekey_seconds = config_rekey_seconds + + if rekey_seconds == (): + rekey_seconds = _DEFAULT_REKEY_SECONDS + elif isinstance(rekey_seconds, str): + rekey_seconds = parse_time_interval(rekey_seconds) + + rekey_seconds: float + + if rekey_seconds and rekey_seconds <= 0: + raise ValueError('Rekey seconds cannot be negative or zero') + + if isinstance(connect_timeout, str): + connect_timeout = parse_time_interval(connect_timeout) + + if connect_timeout and connect_timeout < 0: + raise ValueError('Connect timeout cannot be negative') + + if isinstance(login_timeout, str): + login_timeout = parse_time_interval(login_timeout) + + if login_timeout and login_timeout < 0: + raise ValueError('Login timeout cannot be negative') + + if isinstance(keepalive_interval, str): + keepalive_interval = parse_time_interval(keepalive_interval) + + if keepalive_interval and keepalive_interval < 0: + raise ValueError('Keepalive interval cannot be negative') + + if keepalive_count_max <= 0: + raise ValueError('Keepalive count max cannot be negative or zero') + + self.rekey_bytes = rekey_bytes + self.rekey_seconds = rekey_seconds + self.connect_timeout = connect_timeout or None + self.login_timeout = login_timeout + self.keepalive_interval = keepalive_interval + self.keepalive_count_max = keepalive_count_max + + +class SSHClientConnectionOptions(SSHConnectionOptions): + """SSH client connection options + + The following options are available to control the establishment + of SSH client connections: + + :param client_factory: (optional) + A `callable` which returns an :class:`SSHClient` object that will + be created for each new connection. + :param proxy_command: (optional) + A string or list of strings specifying a command and arguments + to run to make a connection to the SSH server. Data will be + forwarded to this process over stdin/stdout instead of opening a + TCP connection. If specified as a string, standard shell quoting + will be applied when splitting the command and its arguments. + :param known_hosts: (optional) + The list of keys which will be used to validate the server host + key presented during the SSH handshake. If this is not specified, + the keys will be looked up in the file :file:`.ssh/known_hosts`. + If this is explicitly set to `None`, server host key validation + will be disabled. + :param host_key_alias: (optional) + An alias to use instead of the real host name when looking up a host + key in known_hosts and when validating host certificates. + :param server_host_key_algs: (optional) + A list of server host key algorithms to use instead of the + default of those present in known_hosts when performing the SSH + handshake, taken from :ref:`server host key algorithms + `. This is useful when using the + validate_host_public_key callback to validate server host keys, + since AsyncSSH can not determine which server host key algorithms + are preferred. This argument can also be set to 'default' to + specify that the client should always send its default list of + supported algorithms to avoid leaking information about what + algorithms are present for the server in known_hosts. + + .. note:: The 'default' keyword should be used with + caution, as it can result in a host key mismatch + if the client trusts only a subset of the host + keys the server might return. + :param x509_trusted_certs: (optional) + A list of certificates which should be trusted for X.509 server + certificate authentication. If no trusted certificates are + specified, an attempt will be made to load them from the file + :file:`.ssh/ca-bundle.crt`. If this argument is explicitly set + to `None`, X.509 server certificate authentication will not + be performed. + + .. note:: X.509 certificates to trust can also be provided + through a :ref:`known_hosts ` file + if they are converted into OpenSSH format. + This allows their trust to be limited to only + specific host names. + :param x509_trusted_cert_paths: (optional) + A list of path names to "hash directories" containing certificates + which should be trusted for X.509 server certificate authentication. + Each certificate should be in a separate file with a name of the + form *hash.number*, where *hash* is the OpenSSL hash value of the + certificate subject name and *number* is an integer counting up + from zero if multiple certificates have the same hash. If no + paths are specified, an attempt with be made to use the directory + :file:`.ssh/crt` as a certificate hash directory. + :param x509_purposes: (optional) + A list of purposes allowed in the ExtendedKeyUsage of a + certificate used for X.509 server certificate authentication, + defulting to 'secureShellServer'. If this argument is explicitly + set to `None`, the server certificate's ExtendedKeyUsage will + not be checked. + :param username: (optional) + Username to authenticate as on the server. If not specified, + the currently logged in user on the local machine will be used. + :param password: (optional) + The password to use for client password authentication or + keyboard-interactive authentication which prompts for a password. + If this is not specified, client password authentication will + not be performed. + :param client_host_keysign: (optional) + Whether or not to use `ssh-keysign` to sign host-based + authentication requests. If set to `True`, an attempt will be + made to find `ssh-keysign` in its typical locations. If set to + a string, that will be used as the `ssh-keysign` path. When set, + client_host_keys should be a list of public keys. Otherwise, + client_host_keys should be a list of private keys with optional + paired certificates. + :param client_host_keys: (optional) + A list of keys to use to authenticate this client via host-based + authentication. If `client_host_keysign` is set and no host keys + or certificates are specified, an attempt will be made to find + them in their typical locations. If `client_host_keysign` is + not set, host private keys must be specified explicitly or + host-based authentication will not be performed. + :param client_host_certs: (optional) + A list of optional certificates which can be paired with the + provided client host keys. + :param client_host: (optional) + The local hostname to use when performing host-based + authentication. If not specified, the hostname associated with + the local IP address of the SSH connection will be used. + :param client_username: (optional) + The local username to use when performing host-based + authentication. If not specified, the username of the currently + logged in user will be used. + :param client_keys: (optional) + A list of keys which will be used to authenticate this client + via public key authentication. These keys will be used after + trying keys from a PKCS11 provider or an ssh-agent, if either + of those are configured. If no client keys are specified, + an attempt will be made to load them from the files + :file:`.ssh/id_ed25519_sk`, :file:`.ssh/id_ecdsa_sk`, + :file:`.ssh/id_ed448`, :file:`.ssh/id_ed25519`, + :file:`.ssh/id_ecdsa`, :file:`.ssh/id_rsa`, and + :file:`.ssh/id_dsa` in the user's home directory, with + optional certificates loaded from the files + :file:`.ssh/id_ed25519_sk-cert.pub`, + :file:`.ssh/id_ecdsa_sk-cert.pub`, :file:`.ssh/id_ed448-cert.pub`, + :file:`.ssh/id_ed25519-cert.pub`, :file:`.ssh/id_ecdsa-cert.pub`, + :file:`.ssh/id_rsa-cert.pub`, and :file:`.ssh/id_dsa-cert.pub`. + If this argument is explicitly set to `None`, client public key + authentication will not be performed. + :param client_certs: (optional) + A list of optional certificates which can be paired with the + provided client keys. + :param passphrase: (optional) + The passphrase to use to decrypt client keys when loading them, + if they are encrypted. If this is not specified, only unencrypted + client keys can be loaded. If the keys passed into client_keys + are already loaded, this argument is ignored. + :param ignore_encrypted: (optional) + Whether or not to ignore encrypted keys when no passphrase is + specified. This defaults to `True` when keys are specified via + the IdentityFile config option, causing encrypted keys in the + config to be ignored when no passphrase is specified. Note + that encrypted keys loaded into an SSH agent can still be used + when this option is set. + :param host_based_auth: (optional) + Whether or not to allow host-based authentication. By default, + host-based authentication is enabled if client host keys are + made available. + :param public_key_auth: (optional) + Whether or not to allow public key authentication. By default, + public key authentication is enabled if client keys are made + available. + :param kbdint_auth: (optional) + Whether or not to allow keyboard-interactive authentication. By + default, keyboard-interactive authentication is enabled if a + password is specified or if callbacks to respond to challenges + are made available. + :param password_auth: (optional) + Whether or not to allow password authentication. By default, + password authentication is enabled if a password is specified + or if callbacks to provide a password are made available. + :param gss_host: (optional) + The principal name to use for the host in GSS key exchange and + authentication. If not specified, this value will be the same + as the `host` argument. If this argument is explicitly set to + `None`, GSS key exchange and authentication will not be performed. + :param gss_kex: (optional) + Whether or not to allow GSS key exchange. By default, GSS + key exchange is enabled. + :param gss_auth: (optional) + Whether or not to allow GSS authentication. By default, GSS + authentication is enabled. + :param gss_delegate_creds: (optional) + Whether or not to forward GSS credentials to the server being + accessed. By default, GSS credential delegation is disabled. + :param preferred_auth: + A list of authentication methods the client should attempt to + use in order of preference. By default, the preferred list is + gssapi-keyex, gssapi-with-mic, hostbased, publickey, + keyboard-interactive, and then password. This list may be + limited by which auth methods are implemented by the client + and which methods the server accepts. + :param disable_trivial_auth: (optional) + Whether or not to allow "trivial" forms of auth where the + client is not actually challenged for credentials. Setting + this will cause the connection to fail if a server does not + perform some non-trivial form of auth during the initial + SSH handshake. If not specified, all forms of auth supported + by the server are allowed, including none. + :param agent_path: (optional) + The path of a UNIX domain socket to use to contact an ssh-agent + process which will perform the operations needed for client + public key authentication, or the :class:`SSHServerConnection` + to use to forward ssh-agent requests over. If this is not + specified and the environment variable `SSH_AUTH_SOCK` is + set, its value will be used as the path. If this argument is + explicitly set to `None`, an ssh-agent will not be used. + :param agent_identities: (optional) + A list of identities used to restrict which SSH agent keys may + be used. These may be specified as byte strings in binary SSH + format or as public keys or certificates (*see* + :ref:`SpecifyingPublicKeys` and :ref:`SpecifyingCertificates`). + If set to `None`, all keys loaded into the SSH agent will be + made available for use. This is the default. + :param agent_forwarding: (optional) + Whether or not to allow forwarding of ssh-agent requests from + processes running on the server. By default, ssh-agent forwarding + requests from the server are not allowed. + :param pkcs11_provider: (optional) + The path of a shared library which should be used as a PKCS#11 + provider for accessing keys on PIV security tokens. By default, + no local security tokens will be accessed. + :param pkcs11_pin: (optional) + The PIN to use when accessing security tokens via PKCS#11. + + .. note:: If your application opens multiple SSH connections + using PKCS#11 keys, you should consider calling + :func:`load_pkcs11_keys` explicitly instead of + using these arguments. This allows you to pay + the cost of loading the key information from the + security tokens only once. You can then pass the + returned keys via the `client_keys` argument to + any calls that need them. + + Calling :func:`load_pkcs11_keys` explicitly also + gives you the ability to load keys from multiple + tokens with different PINs and to select which + tokens to load keys from and which keys on those + tokens to load. + + :param client_version: (optional) + An ASCII string to advertise to the SSH server as the version of + this client, defaulting to `'AsyncSSH'` and its version number. + :param kex_algs: (optional) + A list of allowed key exchange algorithms in the SSH handshake, + taken from :ref:`key exchange algorithms `. + :param encryption_algs: (optional) + A list of encryption algorithms to use during the SSH handshake, + taken from :ref:`encryption algorithms `. + :param mac_algs: (optional) + A list of MAC algorithms to use during the SSH handshake, taken + from :ref:`MAC algorithms `. + :param compression_algs: (optional) + A list of compression algorithms to use during the SSH handshake, + taken from :ref:`compression algorithms `, or + `None` to disable compression. + :param signature_algs: (optional) + A list of public key signature algorithms to use during the SSH + handshake, taken from :ref:`signature algorithms `. + :param rekey_bytes: (optional) + The number of bytes which can be sent before the SSH session + key is renegotiated. This defaults to 1 GB. + :param rekey_seconds: (optional) + The maximum time in seconds before the SSH session key is + renegotiated. This defaults to 1 hour. + :param connect_timeout: (optional) + The maximum time in seconds allowed to complete an outbound + SSH connection. This includes the time to establish the TCP + connection and the time to perform the initial SSH protocol + handshake, key exchange, and authentication. This is disabled + by default, relying on the system's default TCP connect timeout + and AsyncSSH's login timeout. + :param login_timeout: (optional) + The maximum time in seconds allowed for authentication to + complete, defaulting to 2 minutes. Setting this to 0 will + disable the login timeout. + + .. note:: This timeout only applies after the SSH TCP + connection is established. To set a timeout + which includes establishing the TCP connection, + use the `connect_timeout` argument above. + :param keepalive_interval: (optional) + The time in seconds to wait before sending a keepalive message + if no data has been received from the server. This defaults to + 0, which disables sending these messages. + :param keepalive_count_max: (optional) + The maximum number of keepalive messages which will be sent + without getting a response before disconnecting from the + server. This defaults to 3, but only applies when + keepalive_interval is non-zero. + :param command: (optional) + The default remote command to execute on client sessions. + An interactive shell is started if no command or subsystem is + specified. + :param subsystem: (optional) + The default remote subsystem to start on client sessions. + :param env: (optional) + The default environment variables to set for client sessions. + Keys and values passed in here will be converted to Unicode + strings encoded as UTF-8 (ISO 10646) for transmission. + + .. note:: Many SSH servers restrict which environment + variables a client is allowed to set. The + server's configuration may need to be edited + before environment variables can be + successfully set in the remote environment. + :param send_env: (optional) + A list of environment variable names to pull from + `os.environ` and set by default for client sessions. Wildcards + patterns using `'*'` and `'?'` are allowed, and all variables + with matching names will be sent with whatever value is set in + the local environment. If a variable is present in both env + and send_env, the value from env will be used. + :param request_pty: (optional) + Whether or not to request a pseudo-terminal (PTY) by default for + client sessions. This defaults to `True`, which means to request + a PTY whenever the `term_type` is set. Other possible values + include `False` to never request a PTY, `'force'` to always + request a PTY even without `term_type` being set, or `'auto'` + to request a TTY when `term_type` is set but only when starting + an interactive shell. + :param term_type: (optional) + The default terminal type to set for client sessions. + :param term_size: (optional) + The terminal width and height in characters and optionally + the width and height in pixels to set for client sessions. + :param term_modes: (optional) + POSIX terminal modes to set for client sessions, where keys are + taken from :ref:`POSIX terminal modes ` with values + defined in section 8 of :rfc:`RFC 4254 <4254#section-8>`. + :param x11_forwarding: (optional) + Whether or not to request X11 forwarding for client sessions, + defaulting to `False`. If set to `True`, X11 forwarding will be + requested and a failure will raise :exc:`ChannelOpenError`. It + can also be set to `'ignore_failure'` to attempt X11 forwarding + but ignore failures. + :param x11_display: (optional) + The display that X11 connections should be forwarded to, + defaulting to the value in the environment variable `DISPLAY`. + :param x11_auth_path: (optional) + The path to the Xauthority file to read X11 authentication + data from, defaulting to the value in the environment variable + `XAUTHORITY` or the file :file:`.Xauthority` in the user's + home directory if that's not set. + :param x11_single_connection: (optional) + Whether or not to limit X11 forwarding to a single connection, + defaulting to `False`. + :param encoding: (optional) + The default Unicode encoding to use for data exchanged on client + sessions. + :param errors: (optional) + The default error handling strategy to apply on Unicode + encode/decode errors. + :param window: (optional) + The default receive window size to set for client sessions. + :param max_pktsize: (optional) + The default maximum packet size to set for client sessions. + :param config: (optional) + Paths to OpenSSH client configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. + + .. note:: Specifying configuration files when creating an + :class:`SSHClientConnectionOptions` object will + cause the config file to be read and parsed at + the time of creation of the object, including + evaluation of any conditional blocks. If you want + the config to be parsed for every new connection, + this argument should be added to the connect or + listen calls instead. However, if you want to + save the parsing overhead and your configuration + doesn't depend on conditions that would change + between calls, this argument may be an option. + :param options: (optional) + A previous set of options to use as the base to incrementally + build up a configuration. When an option is not explicitly + specified, its value will be pulled from this options object + (if present) before falling back to the default value. + :type client_factory: `callable` returning :class:`SSHClient` + :type proxy_command: `str` or `list` of `str` + :type known_hosts: *see* :ref:`SpecifyingKnownHosts` + :type host_key_alias: `str` + :type server_host_key_algs: `str` or `list` of `str` + :type x509_trusted_certs: *see* :ref:`SpecifyingCertificates` + :type x509_trusted_cert_paths: `list` of `str` + :type x509_purposes: *see* :ref:`SpecifyingX509Purposes` + :type username: `str` + :type password: `str` + :type client_host_keysign: `bool` or `str` + :type client_host_keys: + *see* :ref:`SpecifyingPrivateKeys` or :ref:`SpecifyingPublicKeys` + :type client_host_certs: *see* :ref:`SpecifyingCertificates` + :type client_host: `str` + :type client_username: `str` + :type client_keys: *see* :ref:`SpecifyingPrivateKeys` + :type client_certs: *see* :ref:`SpecifyingCertificates` + :type passphrase: `str` or `bytes` + :type ignore_encrypted: `bool` + :type host_based_auth: `bool` + :type public_key_auth: `bool` + :type kbdint_auth: `bool` + :type password_auth: `bool` + :type gss_host: `str` + :type gss_kex: `bool` + :type gss_auth: `bool` + :type gss_delegate_creds: `bool` + :type preferred_auth: `str` or `list` of `str` + :type disable_trivial_auth: `bool` + :type agent_path: `str` + :type agent_identities: + *see* :ref:`SpecifyingPublicKeys` and :ref:`SpecifyingCertificates` + :type agent_forwarding: `bool` + :type pkcs11_provider: `str` + :type pkcs11_pin: `str` + :type client_version: `str` + :type kex_algs: `str` or `list` of `str` + :type encryption_algs: `str` or `list` of `str` + :type mac_algs: `str` or `list` of `str` + :type compression_algs: `str` or `list` of `str` + :type signature_algs: `str` or `list` of `str` + :type rekey_bytes: *see* :ref:`SpecifyingByteCounts` + :type rekey_seconds: *see* :ref:`SpecifyingTimeIntervals` + :type connect_timeout: *see* :ref:`SpecifyingTimeIntervals` + :type login_timeout: *see* :ref:`SpecifyingTimeIntervals` + :type keepalive_interval: *see* :ref:`SpecifyingTimeIntervals` + :type keepalive_count_max: `int` + :type command: `str` + :type subsystem: `str` + :type env: `dict` with `str` keys and values + :type send_env: `list` of `str` + :type request_pty: `bool`, `'force'`, or `'auto'` + :type term_type: `str` + :type term_size: `tuple` of 2 or 4 `int` values + :type term_modes: `dict` with `int` keys and values + :type x11_forwarding: `bool` or `'ignore_failure'` + :type x11_display: `str` + :type x11_auth_path: `str` + :type x11_single_connection: `bool` + :type encoding: `str` or `None` + :type errors: `str` + :type window: `int` + :type max_pktsize: `int` + :type config: `list` of `str` + :type options: :class:`SSHClientConnectionOptions` + + """ + + config: SSHClientConfig + client_factory: _ClientFactory + client_version: bytes + known_hosts: KnownHostsArg + host_key_alias: Optional[str] + server_host_key_algs: Union[str, Sequence[str]] + username: str + password: Optional[str] + client_host_keysign: Optional[str] + client_host_keypairs: Sequence[SSHKeyPair] + client_host_pubkeys: Sequence[Union[SSHKey, SSHCertificate]] + client_host: Optional[str] + client_username: str + client_keys: Optional[Sequence[SSHKeyPair]] + client_certs: Sequence[FilePath] + ignore_encrypted: bool + gss_host: DefTuple[Optional[str]] + gss_kex: bool + gss_auth: bool + gss_delegate_creds: bool + preferred_auth: Sequence[str] + disable_trivial_auth: bool + agent_path: Optional[str] + agent_identities: Optional[Sequence[bytes]] + agent_forward_path: Optional[str] + pkcs11_provider: Optional[FilePath] + pkcs11_pin: Optional[str] + command: Optional[str] + subsystem: Optional[str] + env: _Env + send_env: _SendEnv + request_pty: _RequestPTY + term_type: Optional[str] + term_size: TermSizeArg + term_modes: TermModesArg + x11_forwarding: Union[bool, str] + x11_display: Optional[str] + x11_auth_path: Optional[str] + x11_single_connection: bool + encoding: Optional[str] + errors: str + window: int + max_pktsize: int + + # pylint: disable=arguments-differ + def prepare(self, last_config: Optional[SSHConfig] = None, # type: ignore + config: DefTuple[ConfigPaths] = None, reload: bool = False, + client_factory: Optional[_ClientFactory] = None, + client_version: _VersionArg = (), host: str = '', + port: DefTuple[int] = (), tunnel: object = (), + proxy_command: DefTuple[_ProxyCommand] = (), + family: DefTuple[int] = (), + local_addr: DefTuple[HostPort] = (), + tcp_keepalive: DefTuple[bool] = (), + kex_algs: _AlgsArg = (), encryption_algs: _AlgsArg = (), + mac_algs: _AlgsArg = (), compression_algs: _AlgsArg = (), + signature_algs: _AlgsArg = (), host_based_auth: _AuthArg = (), + public_key_auth: _AuthArg = (), kbdint_auth: _AuthArg = (), + password_auth: _AuthArg = (), + x509_trusted_certs: CertListArg = (), + x509_trusted_cert_paths: Sequence[FilePath] = (), + x509_purposes: X509CertPurposes = 'secureShellServer', + rekey_bytes: DefTuple[Union[int, str]] = (), + rekey_seconds: DefTuple[Union[float, str]] = (), + connect_timeout: DefTuple[Optional[Union[float, str]]] = (), + login_timeout: Union[float, str] = _DEFAULT_LOGIN_TIMEOUT, + keepalive_interval: DefTuple[Union[float, str]] = (), + keepalive_count_max: DefTuple[int] = (), + known_hosts: KnownHostsArg = (), + host_key_alias: DefTuple[Optional[str]] = (), + server_host_key_algs: _AlgsArg = (), + username: DefTuple[str] = (), password: Optional[str] = None, + client_host_keysign: DefTuple[KeySignPath] = (), + client_host_keys: Optional[_ClientKeysArg] = None, + client_host_certs: Sequence[FilePath] = (), + client_host: Optional[str] = None, + client_username: DefTuple[str] = (), + client_keys: _ClientKeysArg = (), + client_certs: Sequence[FilePath] = (), + passphrase: Optional[BytesOrStr] = None, + ignore_encrypted: DefTuple[bool] = (), + gss_host: DefTuple[Optional[str]] = (), + gss_kex: DefTuple[bool] = (), gss_auth: DefTuple[bool] = (), + gss_delegate_creds: DefTuple[bool] = (), + preferred_auth: DefTuple[Union[str, Sequence[str]]] = (), + disable_trivial_auth: bool = False, + agent_path: DefTuple[Optional[str]] = (), + agent_identities: DefTuple[Optional[IdentityListArg]] = (), + agent_forwarding: DefTuple[bool] = (), + pkcs11_provider: DefTuple[Optional[FilePath]] = (), + pkcs11_pin: Optional[str] = None, + command: DefTuple[Optional[str]] = (), + subsystem: Optional[str] = None, env: DefTuple[_Env] = (), + send_env: DefTuple[_SendEnv] = (), + request_pty: DefTuple[_RequestPTY] = (), + term_type: Optional[str] = None, + term_size: TermSizeArg = None, + term_modes: TermModesArg = None, + x11_forwarding: DefTuple[Union[bool, str]] = (), + x11_display: Optional[str] = None, + x11_auth_path: Optional[str] = None, + x11_single_connection: bool = False, + encoding: Optional[str] = 'utf-8', errors: str = 'strict', + window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> None: + """Prepare client connection configuration options""" + + try: + local_username = getpass.getuser() + except KeyError: + raise ValueError('Unknown local username: set one of ' + 'LOGNAME, USER, LNAME, or USERNAME in ' + 'the environment') from None + + if config == () and (not last_config or not last_config.loaded): + default_config = Path('~', '.ssh', 'config').expanduser() + config = [default_config] if os.access(default_config, + os.R_OK) else [] + + config = SSHClientConfig.load(last_config, config, reload, + local_username, username, host, port) + + if x509_trusted_certs == (): + default_x509_certs = Path('~', '.ssh', 'ca-bundle.crt').expanduser() + + if os.access(default_x509_certs, os.R_OK): + x509_trusted_certs = str(default_x509_certs) + + if x509_trusted_cert_paths == (): + default_x509_cert_path = Path('~', '.ssh', 'crt').expanduser() + + if default_x509_cert_path.is_dir(): + x509_trusted_cert_paths = [str(default_x509_cert_path)] + + if connect_timeout == (): + connect_timeout = cast(Optional[Union[float, str]], + config.get('ConnectTimeout', None)) + + connect_timeout: Optional[Union[float, str]] + + if keepalive_interval == (): + keepalive_interval = \ + cast(Union[float, str], config.get('ServerAliveInterval', + _DEFAULT_KEEPALIVE_INTERVAL)) + + keepalive_interval: Union[float, str] + + if keepalive_count_max == (): + keepalive_count_max = \ + cast(int, config.get('ServerAliveCountMax', + _DEFAULT_KEEPALIVE_COUNT_MAX)) + + keepalive_count_max: int + + super().prepare(config, client_factory or SSHClient, client_version, + host, port, tunnel, passphrase, proxy_command, family, + local_addr, tcp_keepalive, kex_algs, encryption_algs, + mac_algs, compression_algs, signature_algs, + host_based_auth, public_key_auth, kbdint_auth, + password_auth, x509_trusted_certs, + x509_trusted_cert_paths, x509_purposes, rekey_bytes, + rekey_seconds, connect_timeout, login_timeout, + keepalive_interval, keepalive_count_max) + + self.known_hosts = known_hosts if known_hosts != () else \ + (cast(List[str], config.get('UserKnownHostsFile', [])) + + cast(List[str], config.get('GlobalKnownHostsFile', []))) or () + + self.host_key_alias = \ + cast(Optional[str], host_key_alias if host_key_alias != () else + config.get('HostKeyAlias')) + + self.server_host_key_algs = server_host_key_algs + + # Just validate the input here -- the actual server host key + # selection is done later, after the known_hosts lookup is done. + _select_host_key_algs(server_host_key_algs, + cast(DefTuple[str], config.get('HostKeyAlgorithms', ())), []) + + self.username = saslprep(cast(str, username if username != () else + config.get('User', local_username))) + + self.password = password + + if client_host_keysign == (): + client_host_keysign = \ + cast(bool, config.get('EnableSSHKeySign', False)) + + if client_host_keysign: + client_host_keysign = find_keysign(client_host_keysign) + + if client_host_keys: + self.client_host_pubkeys = \ + load_public_keys(cast(KeyListArg, client_host_keys)) + else: + self.client_host_pubkeys = load_default_host_public_keys() + else: + client_host_keysign = None + + self.client_host_keypairs = \ + load_keypairs(cast(KeyPairListArg, client_host_keys), + passphrase, client_host_certs) + + self.client_host_keysign = client_host_keysign + self.client_host = client_host + + self.client_username = saslprep(cast(str, client_username + if client_username != () else local_username)) + + self.gss_host = gss_host + + self.gss_kex = cast(bool, gss_kex if gss_kex != () else + config.get('GSSAPIKeyExchange', True)) + + self.gss_auth = cast(bool, gss_auth if gss_auth != () else + config.get('GSSAPIAuthentication', True)) + + self.gss_delegate_creds = cast(bool, + gss_delegate_creds if gss_delegate_creds != () else + config.get('GSSAPIDelegateCredentials', False)) + + if preferred_auth == (): + preferred_auth = \ + cast(str, config.get('PreferredAuthentications', ())) + + if isinstance(preferred_auth, str): + preferred_auth = preferred_auth.split(',') + + preferred_auth: Sequence[str] + + self.preferred_auth = preferred_auth + + self.disable_trivial_auth = disable_trivial_auth + + if agent_path == (): + agent_path = cast(DefTuple[str], config.get('IdentityAgent', ())) + + if agent_path == (): + agent_path = os.environ.get('SSH_AUTH_SOCK', '') + + agent_path = str(Path(agent_path).expanduser()) if agent_path else '' + + if pkcs11_provider == (): + pkcs11_provider = \ + cast(Optional[FilePath], config.get('PKCS11Provider')) + + pkcs11_provider: Optional[FilePath] + + if ignore_encrypted == (): + ignore_encrypted = client_keys == () + + ignore_encrypted: bool + + if client_keys == (): + client_keys = cast(_ClientKeysArg, config.get('IdentityFile', ())) + + if client_certs == (): + client_certs = \ + cast(Sequence[FilePath], config.get('CertificateFile', ())) + + identities_only = cast(bool, config.get('IdentitiesOnly')) + + if agent_identities == (): + if identities_only: + agent_identities = cast(KeyListArg, client_keys) + else: + agent_identities = None + + if agent_identities: + self.agent_identities = load_identities(agent_identities, + identities_only) + elif agent_identities == (): + self.agent_identities = load_default_identities() + else: + self.agent_identities = None + + if client_keys: + self.client_keys = \ + load_keypairs(cast(KeyPairListArg, client_keys), passphrase, + client_certs, identities_only, ignore_encrypted) + elif client_keys is not None: + self.client_keys = load_default_keypairs(passphrase, client_certs) + else: + self.client_keys = None + + if self.client_keys is not None: + self.agent_path = agent_path + self.pkcs11_provider = pkcs11_provider + self.pkcs11_pin = pkcs11_pin + else: + self.agent_path = None + self.pkcs11_provider = None + self.pkcs11_pin = None + + if agent_forwarding == (): + agent_forwarding = cast(bool, config.get('ForwardAgent', False)) + + self.agent_forward_path = agent_path if agent_forwarding else None + + self.command = cast(Optional[str], command if command != () else + config.get('RemoteCommand')) + + self.subsystem = subsystem + + self.env = cast(_Env, env if env != () else config.get('SetEnv')) + + self.send_env = cast(_SendEnv, send_env if send_env != () else + config.get('SendEnv')) + + self.request_pty = cast(_RequestPTY, request_pty if request_pty != () + else config.get('RequestTTY', True)) + + self.term_type = term_type + self.term_size = term_size + self.term_modes = term_modes + + self.x11_forwarding = cast(Union[bool, str], + x11_forwarding if x11_forwarding != () else + config.get('ForwardX11Trusted') and 'ignore_failure') + + self.x11_display = x11_display + self.x11_auth_path = x11_auth_path + self.x11_single_connection = x11_single_connection + self.encoding = encoding + self.errors = errors + self.window = window + self.max_pktsize = max_pktsize + + +class SSHServerConnectionOptions(SSHConnectionOptions): + """SSH server connection options + + The following options are available to control the acceptance + of SSH server connections: + + :param server_factory: + A `callable` which returns an :class:`SSHServer` object that will + be created for each new connection. + :param proxy_command: (optional) + A string or list of strings specifying a command and arguments + to run when using :func:`connect_reverse` to make a reverse + direction connection to an SSH client. Data will be forwarded + to this process over stdin/stdout instead of opening a TCP + connection. If specified as a string, standard shell quoting + will be applied when splitting the command and its arguments. + :param server_host_keys: (optional) + A list of private keys and optional certificates which can be + used by the server as a host key. Either this argument or + `gss_host` must be specified. If this is not specified, + only GSS-based key exchange will be supported. + :param server_host_certs: (optional) + A list of optional certificates which can be paired with the + provided server host keys. + :param passphrase: (optional) + The passphrase to use to decrypt server host keys when loading + them, if they are encrypted. If this is not specified, only + unencrypted server host keys can be loaded. If the keys passed + into server_host_keys are already loaded, this argument is + ignored. + :param known_client_hosts: (optional) + A list of client hosts which should be trusted to perform + host-based client authentication. If this is not specified, + host-based client authentication will be not be performed. + :param trust_client_host: (optional) + Whether or not to use the hostname provided by the client + when performing host-based authentication. By default, the + client-provided hostname is not trusted and is instead + determined by doing a reverse lookup of the IP address the + client connected from. + :param authorized_client_keys: (optional) + A list of authorized user and CA public keys which should be + trusted for certificate-based client public key authentication. + :param x509_trusted_certs: (optional) + A list of certificates which should be trusted for X.509 client + certificate authentication. If this argument is explicitly set + to `None`, X.509 client certificate authentication will not + be performed. + + .. note:: X.509 certificates to trust can also be provided + through an :ref:`authorized_keys ` + file if they are converted into OpenSSH format. + This allows their trust to be limited to only + specific client IPs or user names and allows + SSH functions to be restricted when these + certificates are used. + :param x509_trusted_cert_paths: (optional) + A list of path names to "hash directories" containing certificates + which should be trusted for X.509 client certificate authentication. + Each certificate should be in a separate file with a name of the + form *hash.number*, where *hash* is the OpenSSL hash value of the + certificate subject name and *number* is an integer counting up + from zero if multiple certificates have the same hash. + :param x509_purposes: (optional) + A list of purposes allowed in the ExtendedKeyUsage of a + certificate used for X.509 client certificate authentication, + defulting to 'secureShellClient'. If this argument is explicitly + set to `None`, the client certificate's ExtendedKeyUsage will + not be checked. + :param host_based_auth: (optional) + Whether or not to allow host-based authentication. By default, + host-based authentication is enabled if known client host keys + are specified or if callbacks to validate client host keys + are made available. + :param public_key_auth: (optional) + Whether or not to allow public key authentication. By default, + public key authentication is enabled if authorized client keys + are specified or if callbacks to validate client keys are made + available. + :param kbdint_auth: (optional) + Whether or not to allow keyboard-interactive authentication. By + default, keyboard-interactive authentication is enabled if the + callbacks to generate challenges are made available. + :param password_auth: (optional) + Whether or not to allow password authentication. By default, + password authentication is enabled if callbacks to validate a + password are made available. + :param gss_host: (optional) + The principal name to use for the host in GSS key exchange and + authentication. If not specified, the value returned by + :func:`socket.gethostname` will be used if it is a fully qualified + name. Otherwise, the value used by :func:`socket.getfqdn` will be + used. If this argument is explicitly set to `None`, GSS + key exchange and authentication will not be performed. + :param gss_kex: (optional) + Whether or not to allow GSS key exchange. By default, GSS + key exchange is enabled. + :param gss_auth: (optional) + Whether or not to allow GSS authentication. By default, GSS + authentication is enabled. + :param allow_pty: (optional) + Whether or not to allow allocation of a pseudo-tty in sessions, + defaulting to `True` + :param line_editor: (optional) + Whether or not to enable input line editing on sessions which + have a pseudo-tty allocated, defaulting to `True` + :param line_echo: (bool) + Whether or not to echo completed input lines when they are + entered, rather than waiting for the application to read and + echo them, defaulting to `True`. Setting this to `False` + and performing the echo in the application can better synchronize + input and output, especially when there are input prompts. + :param line_history: (int) + The number of lines of input line history to store in the + line editor when it is enabled, defaulting to 1000 + :param max_line_length: (int) + The maximum number of characters allowed in an input line when + the line editor is enabled, defaulting to 1024 + :param rdns_lookup: (optional) + Whether or not to perform reverse DNS lookups on the client's + IP address to enable hostname-based matches in authorized key + file "from" options and "Match Host" config options, defaulting + to `False`. + :param x11_forwarding: (optional) + Whether or not to allow forwarding of X11 connections back + to the client when the client supports it, defaulting to `False` + :param x11_auth_path: (optional) + The path to the Xauthority file to write X11 authentication + data to, defaulting to the value in the environment variable + `XAUTHORITY` or the file :file:`.Xauthority` in the user's + home directory if that's not set + :param agent_forwarding: (optional) + Whether or not to allow forwarding of ssh-agent requests back + to the client when the client supports it, defaulting to `True` + :param process_factory: (optional) + A `callable` or coroutine handler function which takes an AsyncSSH + :class:`SSHServerProcess` argument that will be called each time a + new shell, exec, or subsystem other than SFTP is requested by the + client. If set, this takes precedence over the `session_factory` + argument. + :param session_factory: (optional) + A `callable` or coroutine handler function which takes AsyncSSH + stream objects for stdin, stdout, and stderr that will be called + each time a new shell, exec, or subsystem other than SFTP is + requested by the client. If not specified, sessions are rejected + by default unless the :meth:`session_requested() + ` method is overridden on the + :class:`SSHServer` object returned by `server_factory` to make + this decision. + :param encoding: (optional) + The Unicode encoding to use for data exchanged on sessions on + this server, defaulting to UTF-8 (ISO 10646) format. If `None` + is passed in, the application can send and receive raw bytes. + :param errors: (optional) + The error handling strategy to apply on Unicode encode/decode + errors of data exchanged on sessions on this server, defaulting + to 'strict'. + :param sftp_factory: (optional) + A `callable` which returns an :class:`SFTPServer` object that + will be created each time an SFTP session is requested by the + client, or `True` to use the base :class:`SFTPServer` class + to handle SFTP requests. If not specified, SFTP sessions are + rejected by default. + :param sftp_version: (optional) + The maximum version of the SFTP protocol to support, currently + either 3 or 4, defaulting to 3. + :param allow_scp: (optional) + Whether or not to allow incoming scp requests to be accepted. + This option can only be used in conjunction with `sftp_factory`. + If not specified, scp requests will be passed as regular + commands to the `process_factory` or `session_factory`. + to the client when the client supports it, defaulting to `True` + :param window: (optional) + The receive window size for sessions on this server + :param max_pktsize: (optional) + The maximum packet size for sessions on this server + :param server_version: (optional) + An ASCII string to advertise to SSH clients as the version of + this server, defaulting to `'AsyncSSH'` and its version number. + :param kex_algs: (optional) + A list of allowed key exchange algorithms in the SSH handshake, + taken from :ref:`key exchange algorithms ` + :param encryption_algs: (optional) + A list of encryption algorithms to use during the SSH handshake, + taken from :ref:`encryption algorithms ` + :param mac_algs: (optional) + A list of MAC algorithms to use during the SSH handshake, taken + from :ref:`MAC algorithms ` + :param compression_algs: (optional) + A list of compression algorithms to use during the SSH handshake, + taken from :ref:`compression algorithms `, or + `None` to disable compression + :param signature_algs: (optional) + A list of public key signature algorithms to use during the SSH + handshake, taken from :ref:`signature algorithms ` + :param rekey_bytes: (optional) + The number of bytes which can be sent before the SSH session + key is renegotiated, defaulting to 1 GB + :param rekey_seconds: (optional) + The maximum time in seconds before the SSH session key is + renegotiated, defaulting to 1 hour + :param connect_timeout: (optional) + The maximum time in seconds allowed to complete an outbound + SSH connection. This includes the time to establish the TCP + connection and the time to perform the initial SSH protocol + handshake, key exchange, and authentication. This is disabled + by default, relying on the system's default TCP connect timeout + and AsyncSSH's login timeout. + :param login_timeout: (optional) + The maximum time in seconds allowed for authentication to + complete, defaulting to 2 minutes. Setting this to 0 + will disable the login timeout. + + .. note:: This timeout only applies after the SSH TCP + connection is established. To set a timeout + which includes establishing the TCP connection, + use the `connect_timeout` argument above. + :param keepalive_interval: (optional) + The time in seconds to wait before sending a keepalive message + if no data has been received from the client. This defaults to + 0, which disables sending these messages. + :param keepalive_count_max: (optional) + The maximum number of keepalive messages which will be sent + without getting a response before disconnecting a client. + This defaults to 3, but only applies when keepalive_interval is + non-zero. + :param tcp_keepalive: (optional) + Whether or not to enable keepalive probes at the TCP level to + detect broken connections, defaulting to `True` + :param config: (optional) + Paths to OpenSSH server configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. + + .. note:: Specifying configuration files when creating an + :class:`SSHServerConnectionOptions` object will + cause the config file to be read and parsed at + the time of creation of the object, including + evaluation of any conditional blocks. If you want + the config to be parsed for every new connection, + this argument should be added to the connect or + listen calls instead. However, if you want to + save the parsing overhead and your configuration + doesn't depend on conditions that would change + between calls, this argument may be an option. + :param options: (optional) + A previous set of options to use as the base to incrementally + build up a configuration. When an option is not explicitly + specified, its value will be pulled from this options object + (if present) before falling back to the default value. + :type server_factory: `callable` returning :class:`SSHServer` + :type proxy_command: `str` or `list` of `str` + :type family: `socket.AF_UNSPEC`, `socket.AF_INET`, or `socket.AF_INET6` + :type server_host_keys: *see* :ref:`SpecifyingPrivateKeys` + :type server_host_certs: *see* :ref:`SpecifyingCertificates` + :type passphrase: `str` or `bytes` + :type known_client_hosts: *see* :ref:`SpecifyingKnownHosts` + :type trust_client_host: `bool` + :type authorized_client_keys: *see* :ref:`SpecifyingAuthorizedKeys` + :type x509_trusted_certs: *see* :ref:`SpecifyingCertificates` + :type x509_trusted_cert_paths: `list` of `str` + :type x509_purposes: *see* :ref:`SpecifyingX509Purposes` + :type host_based_auth: `bool` + :type public_key_auth: `bool` + :type kbdint_auth: `bool` + :type password_auth: `bool` + :type gss_host: `str` + :type gss_kex: `bool` + :type gss_auth: `bool` + :type allow_pty: `bool` + :type line_editor: `bool` + :type line_echo: `bool` + :type line_history: `int` + :type max_line_length: `int` + :type rdns_lookup: `bool` + :type x11_forwarding: `bool` + :type x11_auth_path: `str` + :type agent_forwarding: `bool` + :type process_factory: `callable` or coroutine + :type session_factory: `callable` or coroutine + :type encoding: `str` or `None` + :type errors: `str` + :type sftp_factory: `callable` + :type sftp_version: `int` + :type allow_scp: `bool` + :type window: `int` + :type max_pktsize: `int` + :type server_version: `str` + :type kex_algs: `str` or `list` of `str` + :type encryption_algs: `str` or `list` of `str` + :type mac_algs: `str` or `list` of `str` + :type compression_algs: `str` or `list` of `str` + :type signature_algs: `str` or `list` of `str` + :type rekey_bytes: *see* :ref:`SpecifyingByteCounts` + :type rekey_seconds: *see* :ref:`SpecifyingTimeIntervals` + :type connect_timeout: *see* :ref:`SpecifyingTimeIntervals` + :type login_timeout: *see* :ref:`SpecifyingTimeIntervals` + :type keepalive_interval: *see* :ref:`SpecifyingTimeIntervals` + :type keepalive_count_max: `int` + :type config: `list` of `str` + :type options: :class:`SSHServerConnectionOptions` + + """ + + config: SSHServerConfig + server_factory: _ServerFactory + server_version: bytes + server_host_keys: 'OrderedDict[bytes, SSHKeyPair]' + known_client_hosts: KnownHostsArg + trust_client_host: bool + authorized_client_keys: DefTuple[Optional[SSHAuthorizedKeys]] + gss_host: Optional[str] + gss_kex: bool + gss_auth: bool + allow_pty: bool + line_editor: bool + line_echo: bool + line_history: int + max_line_length: int + rdns_lookup: bool + x11_forwarding: bool + x11_auth_path: Optional[str] + agent_forwarding: bool + process_factory: Optional[SSHServerProcessFactory] + session_factory: Optional[SSHServerSessionFactory] + encoding: Optional[str] + errors: str + sftp_factory: Optional[SFTPServerFactory] + sftp_version: int + allow_scp: bool + window: int + max_pktsize: int + + # pylint: disable=arguments-differ + def prepare(self, last_config: Optional[SSHConfig] = None, # type: ignore + config: DefTuple[ConfigPaths] = None, reload: bool = False, + accept_addr: str = '', accept_port: int = 0, + username: str = '', client_host: str = '', + client_addr: str = '', + server_factory: Optional[_ServerFactory] = None, + server_version: _VersionArg = (), host: str = '', + port: DefTuple[int] = (), tunnel: object = (), + proxy_command: DefTuple[_ProxyCommand] = (), + family: DefTuple[int] = (), + local_addr: DefTuple[HostPort] = (), + tcp_keepalive: DefTuple[bool] = (), + kex_algs: _AlgsArg = (), encryption_algs: _AlgsArg = (), + mac_algs: _AlgsArg = (), compression_algs: _AlgsArg = (), + signature_algs: _AlgsArg = (), host_based_auth: _AuthArg = (), + public_key_auth: _AuthArg = (), kbdint_auth: _AuthArg = (), + password_auth: _AuthArg = (), + x509_trusted_certs: CertListArg = (), + x509_trusted_cert_paths: Sequence[FilePath] = (), + x509_purposes: X509CertPurposes = 'secureShellClient', + rekey_bytes: DefTuple[Union[int, str]] = (), + rekey_seconds: DefTuple[Union[float, str]] = (), + connect_timeout: Optional[Union[float, str]] = None, + login_timeout: DefTuple[Union[float, str]] = (), + keepalive_interval: DefTuple[Union[float, str]] = (), + keepalive_count_max: DefTuple[int] = (), + server_host_keys: KeyPairListArg = (), + server_host_certs: CertListArg = (), + passphrase: Optional[BytesOrStr] = None, + known_client_hosts: KnownHostsArg = None, + trust_client_host: bool = False, + authorized_client_keys: _AuthKeysArg = (), + gss_host: DefTuple[Optional[str]] = (), + gss_kex: DefTuple[bool] = (), + gss_auth: DefTuple[bool] = (), + allow_pty: DefTuple[bool] = (), + line_editor: bool = True, + line_echo: bool = True, + line_history: int = _DEFAULT_LINE_HISTORY, + max_line_length: int = _DEFAULT_MAX_LINE_LENGTH, + rdns_lookup: DefTuple[bool] = (), + x11_forwarding: bool = False, + x11_auth_path: Optional[str] = None, + agent_forwarding: DefTuple[bool] = (), + process_factory: Optional[SSHServerProcessFactory] = None, + session_factory: Optional[SSHServerSessionFactory] = None, + encoding: Optional[str] = 'utf-8', errors: str = 'strict', + sftp_factory: Optional[SFTPServerFactory] = None, + sftp_version: int = MIN_SFTP_VERSION, + allow_scp: bool = False, window: int = _DEFAULT_WINDOW, + max_pktsize: int = _DEFAULT_MAX_PKTSIZE) -> None: + """Prepare server connection configuration options""" + + config = SSHServerConfig.load(last_config, config, reload, + accept_addr, accept_port, username, + client_host, client_addr) + + if login_timeout == (): + login_timeout = \ + cast(Union[float, str], config.get('LoginGraceTime', + _DEFAULT_LOGIN_TIMEOUT)) + + login_timeout: Union[float, str] + + if keepalive_interval == (): + keepalive_interval = \ + cast(Union[float, str], config.get('ClientAliveInterval', + _DEFAULT_KEEPALIVE_INTERVAL)) + + keepalive_interval: Union[float, str] + + if keepalive_count_max == (): + keepalive_count_max = \ + cast(int, config.get('ClientAliveCountMax', + _DEFAULT_KEEPALIVE_COUNT_MAX)) + + keepalive_count_max: int + + super().prepare(config, server_factory or SSHServer, server_version, + host, port, tunnel, passphrase, proxy_command, family, + local_addr, tcp_keepalive, kex_algs, encryption_algs, + mac_algs, compression_algs, signature_algs, + host_based_auth, public_key_auth, kbdint_auth, + password_auth, x509_trusted_certs, + x509_trusted_cert_paths, x509_purposes, + rekey_bytes, rekey_seconds, connect_timeout, + login_timeout, keepalive_interval, keepalive_count_max) + + if server_host_keys == (): + server_host_keys = cast(Sequence[str], config.get('HostKey')) + + if server_host_certs == (): + server_host_certs = cast(Sequence[str], + config.get('HostCertificate', ())) + + server_keys = load_keypairs(server_host_keys, passphrase, + server_host_certs) + + self.server_host_keys = OrderedDict() + + for keypair in server_keys: + for alg in keypair.host_key_algorithms: + if alg in self.server_host_keys: + raise ValueError('Multiple keys of type %s found' % + alg.decode('ascii')) + + self.server_host_keys[alg] = keypair + + self.known_client_hosts = known_client_hosts + self.trust_client_host = trust_client_host + + if authorized_client_keys == () and reload: + authorized_client_keys = \ + cast(List[str], config.get('AuthorizedKeysFile')) + + if isinstance(authorized_client_keys, (str, list)): + self.authorized_client_keys = \ + read_authorized_keys(authorized_client_keys) + else: + self.authorized_client_keys = authorized_client_keys + + if gss_host == (): + gss_host = socket.gethostname() + + if '.' not in gss_host: + gss_host = socket.getfqdn() + + gss_host: Optional[str] + + self.gss_host = gss_host + + self.gss_kex = cast(bool, gss_kex if gss_kex != () else + config.get('GSSAPIKeyExchange', True)) + + self.gss_auth = cast(bool, gss_auth if gss_auth != () else + config.get('GSSAPIAuthentication', True)) + + if not server_keys and not gss_host: + raise ValueError('No server host keys provided') + + self.allow_pty = cast(bool, allow_pty if allow_pty != () else + config.get('PermitTTY', True)) + + self.line_editor = line_editor + self.line_echo = line_echo + self.line_history = line_history + self.max_line_length = max_line_length + + self.rdns_lookup = cast(bool, rdns_lookup if rdns_lookup != () else + config.get('UseDNS', False)) + + self.x11_forwarding = x11_forwarding + self.x11_auth_path = x11_auth_path + + self.agent_forwarding = cast(bool, + agent_forwarding if agent_forwarding != () else + config.get('AllowAgentForwarding', True)) + + self.process_factory = process_factory + self.session_factory = session_factory + self.encoding = encoding + self.errors = errors + self.sftp_factory = SFTPServer if sftp_factory is True else sftp_factory + self.sftp_version = sftp_version + self.allow_scp = allow_scp + self.window = window + self.max_pktsize = max_pktsize + + +@async_context_manager +async def run_client(sock: socket.socket, config: DefTuple[ConfigPaths] = (), + options: Optional[SSHClientConnectionOptions] = None, + **kwargs: object) -> SSHClientConnection: + """Start an SSH client connection on an already-connected socket + + This function is a coroutine which starts an SSH client on an + existing already-connected socket. It can be used instead of + :func:`connect` when a socket is connected outside of asyncio. + + :param sock: + An existing already-connected socket to run an SSH client on, + instead of opening up a new connection. + :param config: (optional) + Paths to OpenSSH client configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. If no paths are specified and + no config paths were set when constructing the `options` + argument (if any), an attempt will be made to load the + configuration from the file :file:`.ssh/config`. If this + argument is explicitly set to `None`, no new configuration + files will be loaded, but any configuration loaded when + constructing the `options` argument will still apply. See + :ref:`SupportedClientConfigOptions` for details on what + configuration options are currently supported. + :param options: (optional) + Options to use when establishing the SSH client connection. These + options can be specified either through this parameter or as direct + keyword arguments to this function. + :type sock: :class:`socket.socket` + :type config: `list` of `str` + :type options: :class:`SSHClientConnectionOptions` + + :returns: :class:`SSHClientConnection` + + """ + + def conn_factory() -> SSHClientConnection: + """Return an SSH client connection factory""" + + return SSHClientConnection(loop, new_options, wait='auth') + + loop = asyncio.get_event_loop() + + new_options = cast(SSHClientConnectionOptions, await _run_in_executor( + loop, SSHClientConnectionOptions, options, config=config, **kwargs)) + + return await asyncio.wait_for( + _connect(new_options, loop, 0, sock, conn_factory, + 'Starting SSH client on'), + timeout=new_options.connect_timeout) + + +@async_context_manager +async def run_server(sock: socket.socket, config: DefTuple[ConfigPaths] = (), + options: Optional[SSHServerConnectionOptions] = None, + **kwargs: object) -> SSHServerConnection: + """Start an SSH server connection on an already-connected socket + + This function is a coroutine which starts an SSH server on an + existing already-connected TCP socket. It can be used instead of + :func:`listen` when connections are accepted outside of asyncio. + + :param sock: + An existing already-connected socket to run SSH over, instead of + opening up a new connection. + :param config: (optional) + Paths to OpenSSH server configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. By default, no OpenSSH + configuration files will be loaded. See + :ref:`SupportedServerConfigOptions` for details on what + configuration options are currently supported. + :param options: (optional) + Options to use when starting the reverse-direction SSH server. + These options can be specified either through this parameter + or as direct keyword arguments to this function. + :type sock: :class:`socket.socket` + :type config: `list` of `str` + :type options: :class:`SSHServerConnectionOptions` + + :returns: :class:`SSHServerConnection` + + """ + + def conn_factory() -> SSHServerConnection: + """Return an SSH server connection factory""" + + return SSHServerConnection(loop, new_options, wait='auth') + + loop = asyncio.get_event_loop() + + new_options = cast(SSHServerConnectionOptions, await _run_in_executor( + loop, SSHServerConnectionOptions, options, config=config, **kwargs)) + + return await asyncio.wait_for( + _connect(new_options, loop, 0, sock, conn_factory, + 'Starting SSH server on'), + timeout=new_options.connect_timeout) + + +@async_context_manager +async def connect(host = '', port: DefTuple[int] = (), *, + tunnel: DefTuple[_TunnelConnector] = (), + family: DefTuple[int] = (), flags: int = 0, + local_addr: DefTuple[HostPort] = (), + sock: Optional[socket.socket] = None, + config: DefTuple[ConfigPaths] = (), + options: Optional[SSHClientConnectionOptions] = None, + **kwargs: object) -> SSHClientConnection: + """Make an SSH client connection + + This function is a coroutine which can be run to create an outbound SSH + client connection to the specified host and port. + + When successful, the following steps occur: + + 1. The connection is established and an instance of + :class:`SSHClientConnection` is created to represent it. + 2. The `client_factory` is called without arguments and should + return an instance of :class:`SSHClient` or a subclass. + 3. The client object is tied to the connection and its + :meth:`connection_made() ` method + is called. + 4. The SSH handshake and authentication process is initiated, + calling methods on the client object if needed. + 5. When authentication completes successfully, the client's + :meth:`auth_completed() ` method is + called. + 6. The coroutine returns the :class:`SSHClientConnection`. At + this point, the connection is ready for sessions to be opened + or port forwarding to be set up. + + If an error occurs, it will be raised as an exception and the partially + open connection and client objects will be cleaned up. + + :param host: (optional) + The hostname or address to connect to. + :param port: (optional) + The port number to connect to. If not specified, the default + SSH port is used. + :param tunnel: (optional) + An existing SSH client connection that this new connection should + be tunneled over. If set, a direct TCP/IP tunnel will be opened + over this connection to the requested host and port rather than + connecting directly via TCP. A string of the form + [user@]host[:port] may also be specified, in which case a + connection will first be made to that host and it will then be + used as a tunnel. + + .. note:: When specifying tunnel as a string, any config + options in the call will apply only when opening + the connection inside the tunnel. The tunnel + itself will be opened with default configuration + settings or settings in the default config file. + To get more control of config settings used to + open the tunnel, :func:`connect` can be called + explicitly, and the resulting client connection + can be passed as the tunnel argument. + + :param family: (optional) + The address family to use when creating the socket. By default, + the address family is automatically selected based on the host. + :param flags: (optional) + The flags to pass to getaddrinfo() when looking up the host address + :param local_addr: (optional) + The host and port to bind the socket to before connecting + :param sock: (optional) + An existing already-connected socket to run SSH over, instead of + opening up a new connection. When this is specified, none of + host, port family, flags, or local_addr should be specified. + :param config: (optional) + Paths to OpenSSH client configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. If no paths are specified and + no config paths were set when constructing the `options` + argument (if any), an attempt will be made to load the + configuration from the file :file:`.ssh/config`. If this + argument is explicitly set to `None`, no new configuration + files will be loaded, but any configuration loaded when + constructing the `options` argument will still apply. See + :ref:`SupportedClientConfigOptions` for details on what + configuration options are currently supported. + :param options: (optional) + Options to use when establishing the SSH client connection. These + options can be specified either through this parameter or as direct + keyword arguments to this function. + :type host: `str` + :type port: `int` + :type tunnel: :class:`SSHClientConnection` or `str` + :type family: `socket.AF_UNSPEC`, `socket.AF_INET`, or `socket.AF_INET6` + :type flags: flags to pass to :meth:`getaddrinfo() ` + :type local_addr: tuple of `str` and `int` + :type sock: :class:`socket.socket` or `None` + :type config: `list` of `str` + :type options: :class:`SSHClientConnectionOptions` + + :returns: :class:`SSHClientConnection` + + """ + + def conn_factory() -> SSHClientConnection: + """Return an SSH client connection factory""" + + return SSHClientConnection(loop, new_options, wait='auth') + + loop = asyncio.get_event_loop() + + new_options = cast(SSHClientConnectionOptions, await _run_in_executor( + loop, SSHClientConnectionOptions, options, config=config, + host=host, port=port, tunnel=tunnel, family=family, + local_addr=local_addr, **kwargs)) + + return await asyncio.wait_for( + _connect(new_options, loop, flags, sock, conn_factory, + 'Opening SSH connection to'), + timeout=new_options.connect_timeout) + + +@async_context_manager +async def connect_reverse( + host = '', port: DefTuple[int] = (), *, + tunnel: DefTuple[_TunnelConnector] = (), + family: DefTuple[int] = (), flags: int = 0, + local_addr: DefTuple[HostPort] = (), + sock: Optional[socket.socket] = None, + config: DefTuple[ConfigPaths] = (), + options: Optional[SSHServerConnectionOptions] = None, + **kwargs: object) -> SSHServerConnection: + """Create a reverse direction SSH connection + + This function is a coroutine which behaves similar to :func:`connect`, + making an outbound TCP connection to a remote server. However, instead + of starting up an SSH client which runs on that outbound connection, + this function starts up an SSH server, expecting the remote system to + start up a reverse-direction SSH client. + + Arguments to this function are the same as :func:`connect`, except + that the `options` are of type :class:`SSHServerConnectionOptions` + instead of :class:`SSHClientConnectionOptions`. + + :param host: (optional) + The hostname or address to connect to. + :param port: (optional) + The port number to connect to. If not specified, the default + SSH port is used. + :param tunnel: (optional) + An existing SSH client connection that this new connection should + be tunneled over. If set, a direct TCP/IP tunnel will be opened + over this connection to the requested host and port rather than + connecting directly via TCP. A string of the form + [user@]host[:port] may also be specified, in which case a + connection will first be made to that host and it will then be + used as a tunnel. + + .. note:: When specifying tunnel as a string, any config + options in the call will apply only when opening + the connection inside the tunnel. The tunnel + itself will be opened with default configuration + settings or settings in the default config file. + To get more control of config settings used to + open the tunnel, :func:`connect` can be called + explicitly, and the resulting client connection + can be passed as the tunnel argument. + + :param family: (optional) + The address family to use when creating the socket. By default, + the address family is automatically selected based on the host. + :param flags: (optional) + The flags to pass to getaddrinfo() when looking up the host address + :param local_addr: (optional) + The host and port to bind the socket to before connecting + :param sock: (optional) + An existing already-connected socket to run SSH over, instead of + opening up a new connection. When this is specified, none of + host, port family, flags, or local_addr should be specified. + :param config: (optional) + Paths to OpenSSH server configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. By default, no OpenSSH + configuration files will be loaded. See + :ref:`SupportedServerConfigOptions` for details on what + configuration options are currently supported. + :param options: (optional) + Options to use when starting the reverse-direction SSH server. + These options can be specified either through this parameter + or as direct keyword arguments to this function. + :type host: `str` + :type port: `int` + :type tunnel: :class:`SSHClientConnection` or `str` + :type family: `socket.AF_UNSPEC`, `socket.AF_INET`, or `socket.AF_INET6` + :type flags: flags to pass to :meth:`getaddrinfo() ` + :type local_addr: tuple of `str` and `int` + :type sock: :class:`socket.socket` or `None` + :type config: `list` of `str` + :type options: :class:`SSHServerConnectionOptions` + + :returns: :class:`SSHServerConnection` + + """ + + def conn_factory() -> SSHServerConnection: + """Return an SSH server connection factory""" + + return SSHServerConnection(loop, new_options, wait='auth') + + loop = asyncio.get_event_loop() + + new_options = cast(SSHServerConnectionOptions, await _run_in_executor( + loop, SSHServerConnectionOptions, options, config=config, + host=host, port=port, tunnel=tunnel, family=family, + local_addr=local_addr, **kwargs)) + + return await asyncio.wait_for( + _connect(new_options, loop, flags, sock, conn_factory, + 'Opening reverse SSH connection to'), + timeout=new_options.connect_timeout) + + +@async_context_manager +async def listen(host = '', port: DefTuple[int] = (), *, + tunnel: DefTuple[_TunnelListener] = (), + family: DefTuple[int] = (), flags:int = socket.AI_PASSIVE, + backlog: int = 100, sock: Optional[socket.socket] = None, + reuse_address: bool = False, reuse_port: bool = False, + acceptor: _AcceptHandler = None, + error_handler: _ErrorHandler = None, + config: DefTuple[ConfigPaths] = (), + options: Optional[SSHServerConnectionOptions] = None, + **kwargs: object) -> SSHAcceptor: + """Start an SSH server + + This function is a coroutine which can be run to create an SSH server + listening on the specified host and port. The return value is an + :class:`SSHAcceptor` which can be used to shut down the listener. + + :param host: (optional) + The hostname or address to listen on. If not specified, listeners + are created for all addresses. + :param port: (optional) + The port number to listen on. If not specified, the default + SSH port is used. + :param tunnel: (optional) + An existing SSH client connection that this new listener should + be forwarded over. If set, a remote TCP/IP listener will be + opened on this connection on the requested host and port rather + than listening directly via TCP. A string of the form + [user@]host[:port] may also be specified, in which case a + connection will first be made to that host and it will then be + used as a tunnel. + + .. note:: When specifying tunnel as a string, any config + options in the call will apply only when opening + the connection inside the tunnel. The tunnel + itself will be opened with default configuration + settings or settings in the default config file. + To get more control of config settings used to + open the tunnel, :func:`connect` can be called + explicitly, and the resulting client connection + can be passed as the tunnel argument. + + :param family: (optional) + The address family to use when creating the server. By default, + the address families are automatically selected based on the host. + :param flags: (optional) + The flags to pass to getaddrinfo() when looking up the host + :param backlog: (optional) + The maximum number of queued connections allowed on listeners + :param sock: (optional) + A pre-existing socket to use instead of creating and binding + a new socket. When this is specified, host and port should not + be specified. + :param reuse_address: (optional) + Whether or not to reuse a local socket in the TIME_WAIT state + without waiting for its natural timeout to expire. If not + specified, this will be automatically set to `True` on UNIX. + :param reuse_port: (optional) + Whether or not to allow this socket to be bound to the same + port other existing sockets are bound to, so long as they all + set this flag when being created. If not specified, the + default is to not allow this. This option is not supported + on Windows or Python versions prior to 3.4.4. + :param acceptor: (optional) + A `callable` or coroutine which will be called when the + SSH handshake completes on an accepted connection, taking + the :class:`SSHServerConnection` as an argument. + :param error_handler: (optional) + A `callable` which will be called whenever the SSH handshake + fails on an accepted connection. It is called with the failed + :class:`SSHServerConnection` and an exception object describing + the failure. If not specified, failed handshakes result in the + connection object being silently cleaned up. + :param config: (optional) + Paths to OpenSSH server configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. By default, no OpenSSH + configuration files will be loaded. See + :ref:`SupportedServerConfigOptions` for details on what + configuration options are currently supported. + :param options: (optional) + Options to use when accepting SSH server connections. These + options can be specified either through this parameter or + as direct keyword arguments to this function. + :type host: `str` + :type port: `int` + :type tunnel: :class:`SSHClientConnection` or `str` + :type family: `socket.AF_UNSPEC`, `socket.AF_INET`, or `socket.AF_INET6` + :type flags: flags to pass to :meth:`getaddrinfo() ` + :type backlog: `int` + :type sock: :class:`socket.socket` or `None` + :type reuse_address: `bool` + :type reuse_port: `bool` + :type acceptor: `callable` or coroutine + :type error_handler: `callable` + :type config: `list` of `str` + :type options: :class:`SSHServerConnectionOptions` + + :returns: :class:`SSHAcceptor` + + """ + + def conn_factory() -> SSHServerConnection: + """Return an SSH server connection factory""" + + return SSHServerConnection(loop, new_options, acceptor, error_handler) + + loop = asyncio.get_event_loop() + + new_options = cast(SSHServerConnectionOptions, await _run_in_executor( + loop, SSHServerConnectionOptions, options, config=config, + host=host, port=port, tunnel=tunnel, family=family, **kwargs)) + + # pylint: disable=attribute-defined-outside-init + new_options.proxy_command = None + + return await asyncio.wait_for( + _listen(new_options, loop, flags, backlog, sock, reuse_address, + reuse_port, conn_factory, 'Creating SSH listener on'), + timeout=new_options.connect_timeout) + + +@async_context_manager +async def listen_reverse(host = '', port: DefTuple[int] = (), *, + tunnel: DefTuple[_TunnelListener] = (), + family: DefTuple[int] = (), + flags: int = socket.AI_PASSIVE, backlog: int = 100, + sock: Optional[socket.socket] = None, + reuse_address: bool = False, reuse_port: bool = False, + acceptor: _AcceptHandler = None, + error_handler: _ErrorHandler = None, + config: DefTuple[ConfigPaths] = (), + options: Optional[SSHClientConnectionOptions] = None, + **kwargs: object) -> SSHAcceptor: + """Create a reverse-direction SSH listener + + This function is a coroutine which behaves similar to :func:`listen`, + creating a listener which accepts inbound connections on the specified + host and port. However, instead of starting up an SSH server on each + inbound connection, it starts up a reverse-direction SSH client, + expecting the remote system making the connection to start up a + reverse-direction SSH server. + + Arguments to this function are the same as :func:`listen`, except + that the `options` are of type :class:`SSHClientConnectionOptions` + instead of :class:`SSHServerConnectionOptions`. + + The return value is an :class:`SSHAcceptor` which can be used to + shut down the reverse listener. + + :param host: (optional) + The hostname or address to listen on. If not specified, listeners + are created for all addresses. + :param port: (optional) + The port number to listen on. If not specified, the default + SSH port is used. + :param tunnel: (optional) + An existing SSH client connection that this new listener should + be forwarded over. If set, a remote TCP/IP listener will be + opened on this connection on the requested host and port rather + than listening directly via TCP. A string of the form + [user@]host[:port] may also be specified, in which case a + connection will first be made to that host and it will then be + used as a tunnel. + + .. note:: When specifying tunnel as a string, any config + options in the call will apply only when opening + the connection inside the tunnel. The tunnel + itself will be opened with default configuration + settings or settings in the default config file. + To get more control of config settings used to + open the tunnel, :func:`connect` can be called + explicitly, and the resulting client connection + can be passed as the tunnel argument. + + :param family: (optional) + The address family to use when creating the server. By default, + the address families are automatically selected based on the host. + :param flags: (optional) + The flags to pass to getaddrinfo() when looking up the host + :param backlog: (optional) + The maximum number of queued connections allowed on listeners + :param sock: (optional) + A pre-existing socket to use instead of creating and binding + a new socket. When this is specified, host and port should not + :param reuse_address: (optional) + Whether or not to reuse a local socket in the TIME_WAIT state + without waiting for its natural timeout to expire. If not + specified, this will be automatically set to `True` on UNIX. + :param reuse_port: (optional) + Whether or not to allow this socket to be bound to the same + port other existing sockets are bound to, so long as they all + set this flag when being created. If not specified, the + default is to not allow this. This option is not supported + on Windows or Python versions prior to 3.4.4. + :param acceptor: (optional) + A `callable` or coroutine which will be called when the + SSH handshake completes on an accepted connection, taking + the :class:`SSHClientConnection` as an argument. + :param error_handler: (optional) + A `callable` which will be called whenever the SSH handshake + fails on an accepted connection. It is called with the failed + :class:`SSHClientConnection` and an exception object describing + the failure. If not specified, failed handshakes result in the + connection object being silently cleaned up. + :param config: (optional) + Paths to OpenSSH client configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. If no paths are specified and + no config paths were set when constructing the `options` + argument (if any), an attempt will be made to load the + configuration from the file :file:`.ssh/config`. If this + argument is explicitly set to `None`, no new configuration + files will be loaded, but any configuration loaded when + constructing the `options` argument will still apply. See + :ref:`SupportedClientConfigOptions` for details on what + configuration options are currently supported. + :param options: (optional) + Options to use when starting reverse-direction SSH clients. + These options can be specified either through this parameter + or as direct keyword arguments to this function. + :type host: `str` + :type port: `int` + :type tunnel: :class:`SSHClientConnection` or `str` + :type family: `socket.AF_UNSPEC`, `socket.AF_INET`, or `socket.AF_INET6` + :type flags: flags to pass to :meth:`getaddrinfo() ` + :type backlog: `int` + :type sock: :class:`socket.socket` or `None` + :type reuse_address: `bool` + :type reuse_port: `bool` + :type acceptor: `callable` or coroutine + :type error_handler: `callable` + :type config: `list` of `str` + :type options: :class:`SSHClientConnectionOptions` + + :returns: :class:`SSHAcceptor` + + """ + + def conn_factory() -> SSHClientConnection: + """Return an SSH client connection factory""" + + return SSHClientConnection(loop, new_options, acceptor, error_handler) + + loop = asyncio.get_event_loop() + + new_options = cast(SSHClientConnectionOptions, await _run_in_executor( + loop, SSHClientConnectionOptions, options, config=config, + host=host, port=port, tunnel=tunnel, family=family, **kwargs)) + + # pylint: disable=attribute-defined-outside-init + new_options.proxy_command = None + + return await asyncio.wait_for( + _listen(new_options, loop, flags, backlog, sock, + reuse_address, reuse_port, conn_factory, + 'Creating reverse direction SSH listener on'), + timeout=new_options.connect_timeout) + + +async def create_connection(client_factory: _ClientFactory, + host = '', port: DefTuple[int] = (), + **kwargs: object) -> \ + Tuple[SSHClientConnection, SSHClient]: + """Create an SSH client connection + + This is a coroutine which wraps around :func:`connect`, providing + backward compatibility with older AsyncSSH releases. The only + differences are that the `client_factory` argument is the first + positional argument in this call rather than being a keyword argument + or specified via an :class:`SSHClientConnectionOptions` object and + the return value is a tuple of an :class:`SSHClientConnection` and + :class:`SSHClient` rather than just the connection, mirroring + :meth:`asyncio.AbstractEventLoop.create_connection`. + + :returns: An :class:`SSHClientConnection` and :class:`SSHClient` + + """ + + conn = await connect(host, port, client_factory=client_factory, + **kwargs) # type: ignore + + return conn, cast(SSHClient, conn.get_owner()) + + +@async_context_manager +async def create_server(server_factory: _ServerFactory, + host = '', port: DefTuple[int] = (), + **kwargs: object) -> SSHAcceptor: + """Create an SSH server + + This is a coroutine which wraps around :func:`listen`, providing + backward compatibility with older AsyncSSH releases. The only + difference is that the `server_factory` argument is the first + positional argument in this call rather than being a keyword argument + or specified via an :class:`SSHServerConnectionOptions` object, + mirroring :meth:`asyncio.AbstractEventLoop.create_server`. + + """ + + return await listen(host, port, server_factory=server_factory, + **kwargs) # type: ignore + + +async def get_server_host_key( + host = '', port: DefTuple[int] = (), *, + tunnel: DefTuple[_TunnelConnector] = (), + proxy_command: DefTuple[str] = (), family: DefTuple[int] = (), + flags: int = 0, local_addr: DefTuple[HostPort] = (), + sock: Optional[socket.socket] = None, + client_version: DefTuple[BytesOrStr] = (), + kex_algs: _AlgsArg = (), server_host_key_algs: _AlgsArg = (), + config: DefTuple[ConfigPaths] = (), + options: Optional[SSHClientConnectionOptions] = None) \ + -> Optional[SSHKey]: + """Retrieve an SSH server's host key + + This is a coroutine which can be run to connect to an SSH server and + return the server host key presented during the SSH handshake. + + A list of server host key algorithms can be provided to specify + which host key types the server is allowed to choose from. If the + key exchange is successful, the server host key sent during the + handshake is returned. + + .. note:: Not all key exchange methods involve the server + presenting a host key. If something like GSS key + exchange is used without a server host key, this + method may return `None` even when the handshake + completes. + + :param host: (optional) + The hostname or address to connect to + :param port: (optional) + The port number to connect to. If not specified, the default + SSH port is used. + :param tunnel: (optional) + An existing SSH client connection that this new connection should + be tunneled over. If set, a direct TCP/IP tunnel will be opened + over this connection to the requested host and port rather than + connecting directly via TCP. A string of the form + [user@]host[:port] may also be specified, in which case a + connection will first be made to that host and it will then be + used as a tunnel. + + .. note:: When specifying tunnel as a string, any config + options in the call will apply only when opening + the connection inside the tunnel. The tunnel + itself will be opened with default configuration + settings or settings in the default config file. + To get more control of config settings used to + open the tunnel, :func:`connect` can be called + explicitly, and the resulting client connection + can be passed as the tunnel argument. + + :param proxy_command: (optional) + A string or list of strings specifying a command and arguments + to run to make a connection to the SSH server. Data will be + forwarded to this process over stdin/stdout instead of opening a + TCP connection. If specified as a string, standard shell quoting + will be applied when splitting the command and its arguments. + :param family: (optional) + The address family to use when creating the socket. By default, + the address family is automatically selected based on the host. + :param flags: (optional) + The flags to pass to getaddrinfo() when looking up the host address + :param local_addr: (optional) + The host and port to bind the socket to before connecting + :param sock: (optional) + An existing already-connected socket to run SSH over, instead of + opening up a new connection. When this is specified, none of + host, port family, flags, or local_addr should be specified. + :param client_version: (optional) + An ASCII string to advertise to the SSH server as the version of + this client, defaulting to `'AsyncSSH'` and its version number. + :param kex_algs: (optional) + A list of allowed key exchange algorithms in the SSH handshake, + taken from :ref:`key exchange algorithms ` + :param server_host_key_algs: (optional) + A list of server host key algorithms to allow during the SSH + handshake, taken from :ref:`server host key algorithms + `. + :param config: (optional) + Paths to OpenSSH client configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. If no paths are specified and + no config paths were set when constructing the `options` + argument (if any), an attempt will be made to load the + configuration from the file :file:`.ssh/config`. If this + argument is explicitly set to `None`, no new configuration + files will be loaded, but any configuration loaded when + constructing the `options` argument will still apply. See + :ref:`SupportedClientConfigOptions` for details on what + configuration options are currently supported. + :param options: (optional) + Options to use when establishing the SSH client connection used + to retrieve the server host key. These options can be specified + either through this parameter or as direct keyword arguments to + this function. + :type host: `str` + :type port: `int` + :type tunnel: :class:`SSHClientConnection` or `str` + :type proxy_command: `str` or `list` of `str` + :type family: `socket.AF_UNSPEC`, `socket.AF_INET`, or `socket.AF_INET6` + :type flags: flags to pass to :meth:`getaddrinfo() ` + :type local_addr: tuple of `str` and `int` + :type sock: :class:`socket.socket` or `None` + :type client_version: `str` + :type kex_algs: `str` or `list` of `str` + :type server_host_key_algs: `str` or `list` of `str` + :type config: `list` of `str` + :type options: :class:`SSHClientConnectionOptions` + + :returns: An :class:`SSHKey` public key or `None` + + """ + + def conn_factory() -> SSHClientConnection: + """Return an SSH client connection factory""" + + return SSHClientConnection(loop, new_options, wait='kex') + + loop = asyncio.get_event_loop() + + new_options = cast(SSHClientConnectionOptions, await _run_in_executor( + loop, SSHClientConnectionOptions, options, config=config, + host=host, port=port, tunnel=tunnel, proxy_command=proxy_command, + family=family, local_addr=local_addr, known_hosts=None, + server_host_key_algs=server_host_key_algs, x509_trusted_certs=None, + x509_trusted_cert_paths=None, x509_purposes='any', gss_host=None, + kex_algs=kex_algs, client_version=client_version)) + + conn = await asyncio.wait_for( + _connect(new_options, loop, flags, sock, conn_factory, + 'Fetching server host key from'), + timeout=new_options.connect_timeout) + + server_host_key = conn.get_server_host_key() + + conn.abort() + + await conn.wait_closed() + + return server_host_key + + +async def get_server_auth_methods( + host = '', port: DefTuple[int] = (), username: DefTuple[str] = (), *, + tunnel: DefTuple[_TunnelConnector] = (), + proxy_command: DefTuple[str] = (), family: DefTuple[int] = (), + flags: int = 0, local_addr: DefTuple[HostPort] = (), + sock: Optional[socket.socket] = None, + client_version: DefTuple[BytesOrStr] = (), + kex_algs: _AlgsArg = (), server_host_key_algs: _AlgsArg = (), + config: DefTuple[ConfigPaths] = (), + options: Optional[SSHClientConnectionOptions] = None) -> Sequence[str]: + """Retrieve an SSH server's allowed auth methods + + This is a coroutine which can be run to connect to an SSH server and + return the auth methods available to authenticate to it. + + .. note:: The key exchange with the server must complete + successfully before the list of available auth + methods can be returned, so be sure to specify any + arguments needed to complete the key exchange. + Also, auth methods may vary by user, so you may + want to specify the specific user you would like + to get auth methods for. + + :param host: (optional) + The hostname or address to connect to + :param port: (optional) + The port number to connect to. If not specified, the default + SSH port is used. + :param username: (optional) + Username to authenticate as on the server. If not specified, + the currently logged in user on the local machine will be used. + :param tunnel: (optional) + An existing SSH client connection that this new connection should + be tunneled over. If set, a direct TCP/IP tunnel will be opened + over this connection to the requested host and port rather than + connecting directly via TCP. A string of the form + [user@]host[:port] may also be specified, in which case a + connection will first be made to that host and it will then be + used as a tunnel. + + .. note:: When specifying tunnel as a string, any config + options in the call will apply only when opening + the connection inside the tunnel. The tunnel + itself will be opened with default configuration + settings or settings in the default config file. + To get more control of config settings used to + open the tunnel, :func:`connect` can be called + explicitly, and the resulting client connection + can be passed as the tunnel argument. + + :param proxy_command: (optional) + A string or list of strings specifying a command and arguments + to run to make a connection to the SSH server. Data will be + forwarded to this process over stdin/stdout instead of opening a + TCP connection. If specified as a string, standard shell quoting + will be applied when splitting the command and its arguments. + :param family: (optional) + The address family to use when creating the socket. By default, + the address family is automatically selected based on the host. + :param flags: (optional) + The flags to pass to getaddrinfo() when looking up the host address + :param local_addr: (optional) + The host and port to bind the socket to before connecting + :param sock: (optional) + An existing already-connected socket to run SSH over, instead of + opening up a new connection. When this is specified, none of + host, port family, flags, or local_addr should be specified. + :param client_version: (optional) + An ASCII string to advertise to the SSH server as the version of + this client, defaulting to `'AsyncSSH'` and its version number. + :param kex_algs: (optional) + A list of allowed key exchange algorithms in the SSH handshake, + taken from :ref:`key exchange algorithms ` + :param server_host_key_algs: (optional) + A list of server host key algorithms to allow during the SSH + handshake, taken from :ref:`server host key algorithms + `. + :param config: (optional) + Paths to OpenSSH client configuration files to load. This + configuration will be used as a fallback to override the + defaults for settings which are not explicitly specified using + AsyncSSH's configuration options. If no paths are specified and + no config paths were set when constructing the `options` + argument (if any), an attempt will be made to load the + configuration from the file :file:`.ssh/config`. If this + argument is explicitly set to `None`, no new configuration + files will be loaded, but any configuration loaded when + constructing the `options` argument will still apply. See + :ref:`SupportedClientConfigOptions` for details on what + configuration options are currently supported. + :param options: (optional) + Options to use when establishing the SSH client connection used + to retrieve the server host key. These options can be specified + either through this parameter or as direct keyword arguments to + this function. + :type host: `str` + :type port: `int` + :type tunnel: :class:`SSHClientConnection` or `str` + :type proxy_command: `str` or `list` of `str` + :type family: `socket.AF_UNSPEC`, `socket.AF_INET`, or `socket.AF_INET6` + :type flags: flags to pass to :meth:`getaddrinfo() ` + :type local_addr: tuple of `str` and `int` + :type sock: :class:`socket.socket` or `None` + :type client_version: `str` + :type kex_algs: `str` or `list` of `str` + :type server_host_key_algs: `str` or `list` of `str` + :type config: `list` of `str` + :type options: :class:`SSHClientConnectionOptions` + + :returns: a `list` of `str` + + """ + + def conn_factory() -> SSHClientConnection: + """Return an SSH client connection factory""" + + return SSHClientConnection(loop, new_options, wait='auth_methods') + + loop = asyncio.get_event_loop() + + new_options = cast(SSHClientConnectionOptions, await _run_in_executor( + loop, SSHClientConnectionOptions, options, config=config, + host=host, port=port, username=username, tunnel=tunnel, + proxy_command=proxy_command, family=family, local_addr=local_addr, + known_hosts=None, server_host_key_algs=server_host_key_algs, + x509_trusted_certs=None, x509_trusted_cert_paths=None, + x509_purposes='any', gss_host=None, kex_algs=kex_algs, + client_version=client_version)) + + conn = await asyncio.wait_for( + _connect(new_options, loop, flags, sock, conn_factory, + 'Fetching server auth methods from'), + timeout=new_options.connect_timeout) + + server_auth_methods = conn.get_server_auth_methods() + + conn.abort() + + await conn.wait_closed() + + return server_auth_methods diff --git a/venv/lib/python3.12/site-packages/asyncssh/constants.py b/venv/lib/python3.12/site-packages/asyncssh/constants.py new file mode 100644 index 0000000..bbed4fa --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/constants.py @@ -0,0 +1,363 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH constants""" + +# Default language for error messages +DEFAULT_LANG = 'en-US' + +# Default SSH listening port +DEFAULT_PORT = 22 + +# SSH message codes +MSG_DISCONNECT = 1 +MSG_IGNORE = 2 +MSG_UNIMPLEMENTED = 3 +MSG_DEBUG = 4 +MSG_SERVICE_REQUEST = 5 +MSG_SERVICE_ACCEPT = 6 +MSG_EXT_INFO = 7 + +MSG_KEXINIT = 20 +MSG_NEWKEYS = 21 + +MSG_KEX_FIRST = 30 +MSG_KEX_LAST = 49 + +MSG_USERAUTH_REQUEST = 50 +MSG_USERAUTH_FAILURE = 51 +MSG_USERAUTH_SUCCESS = 52 +MSG_USERAUTH_BANNER = 53 + +MSG_USERAUTH_FIRST = 60 +MSG_USERAUTH_LAST = 79 + +MSG_GLOBAL_REQUEST = 80 +MSG_REQUEST_SUCCESS = 81 +MSG_REQUEST_FAILURE = 82 + +MSG_CHANNEL_OPEN = 90 +MSG_CHANNEL_OPEN_CONFIRMATION = 91 +MSG_CHANNEL_OPEN_FAILURE = 92 + +MSG_CHANNEL_WINDOW_ADJUST = 93 +MSG_CHANNEL_DATA = 94 +MSG_CHANNEL_EXTENDED_DATA = 95 +MSG_CHANNEL_EOF = 96 +MSG_CHANNEL_CLOSE = 97 +MSG_CHANNEL_REQUEST = 98 +MSG_CHANNEL_SUCCESS = 99 +MSG_CHANNEL_FAILURE = 100 + +# Messages 90-92 are excluded here as they relate to opening a new channel +MSG_CHANNEL_FIRST = 93 +MSG_CHANNEL_LAST = 127 + +# SSH disconnect reason codes +DISC_HOST_NOT_ALLOWED_TO_CONNECT = 1 +DISC_PROTOCOL_ERROR = 2 +DISC_KEY_EXCHANGE_FAILED = 3 +DISC_RESERVED = 4 +DISC_MAC_ERROR = 5 +DISC_COMPRESSION_ERROR = 6 +DISC_SERVICE_NOT_AVAILABLE = 7 +DISC_PROTOCOL_VERSION_NOT_SUPPORTED = 8 +DISC_HOST_KEY_NOT_VERIFIABLE = 9 +DISC_CONNECTION_LOST = 10 +DISC_BY_APPLICATION = 11 +DISC_TOO_MANY_CONNECTIONS = 12 +DISC_AUTH_CANCELLED_BY_USER = 13 +DISC_NO_MORE_AUTH_METHODS_AVAILABLE = 14 +DISC_ILLEGAL_USER_NAME = 15 + +DISC_HOST_KEY_NOT_VERIFYABLE = 9 # Error in naming, left here to not + # break backward compatibility + +# SSH channel open failure reason codes +OPEN_ADMINISTRATIVELY_PROHIBITED = 1 +OPEN_CONNECT_FAILED = 2 +OPEN_UNKNOWN_CHANNEL_TYPE = 3 +OPEN_RESOURCE_SHORTAGE = 4 + +# Internal failure reason codes +OPEN_REQUEST_X11_FORWARDING_FAILED = 0xfffffffd +OPEN_REQUEST_PTY_FAILED = 0xfffffffe +OPEN_REQUEST_SESSION_FAILED = 0xffffffff + +# SFTPv3-v5 packet types +FXP_INIT = 1 +FXP_VERSION = 2 +FXP_OPEN = 3 +FXP_CLOSE = 4 +FXP_READ = 5 +FXP_WRITE = 6 +FXP_LSTAT = 7 +FXP_FSTAT = 8 +FXP_SETSTAT = 9 +FXP_FSETSTAT = 10 +FXP_OPENDIR = 11 +FXP_READDIR = 12 +FXP_REMOVE = 13 +FXP_MKDIR = 14 +FXP_RMDIR = 15 +FXP_REALPATH = 16 +FXP_STAT = 17 +FXP_RENAME = 18 +FXP_READLINK = 19 +FXP_SYMLINK = 20 +FXP_STATUS = 101 +FXP_HANDLE = 102 +FXP_DATA = 103 +FXP_NAME = 104 +FXP_ATTRS = 105 +FXP_EXTENDED = 200 +FXP_EXTENDED_REPLY = 201 + +# SFTPv6 packet types +FXP_LINK = 21 +FXP_BLOCK = 22 +FXP_UNBLOCK = 23 + +# SFTPv3 open flags +FXF_READ = 0x00000001 +FXF_WRITE = 0x00000002 +FXF_APPEND = 0x00000004 +FXF_CREAT = 0x00000008 +FXF_TRUNC = 0x00000010 +FXF_EXCL = 0x00000020 + +# SFTPv4 open flags +FXF_TEXT = 0x00000040 + +# SFTPv5 open flags +FXF_ACCESS_DISPOSITION = 0x00000007 +FXF_CREATE_NEW = 0x00000000 +FXF_CREATE_TRUNCATE = 0x00000001 +FXF_OPEN_EXISTING = 0x00000002 +FXF_OPEN_OR_CREATE = 0x00000003 +FXF_TRUNCATE_EXISTING = 0x00000004 +FXF_APPEND_DATA = 0x00000008 +FXF_APPEND_DATA_ATOMIC = 0x00000010 +FXF_TEXT_MODE = 0x00000020 +FXF_BLOCK_READ = 0x00000040 +FXF_BLOCK_WRITE = 0x00000080 +FXF_BLOCK_DELETE = 0x00000100 + +# SFTPv6 open flags +FXF_BLOCK_ADVISORY = 0x00000200 +FXF_NOFOLLOW = 0x00000400 +FXF_DELETE_ON_CLOSE = 0x00000800 +FXF_ACCESS_AUDIT_ALARM_INFO = 0x00001000 +FXF_ACCESS_BACKUP = 0x00002000 +FXF_BACKUP_STREAM = 0x00004000 +FXF_OVERRIDE_OWNER = 0x00008000 + +# SFTPv5-v6 ACE mask values used in desired-access +ACE4_READ_DATA = 0x00000001 +ACE4_WRITE_DATA = 0x00000002 +ACE4_APPEND_DATA = 0x00000004 +ACE4_READ_ATTRIBUTES = 0x00000080 +ACE4_WRITE_ATTRIBUTES = 0x00000100 + +# SFTPv3 attribute flags +FILEXFER_ATTR_SIZE = 0x00000001 +FILEXFER_ATTR_UIDGID = 0x00000002 +FILEXFER_ATTR_PERMISSIONS = 0x00000004 +FILEXFER_ATTR_ACMODTIME = 0x00000008 +FILEXFER_ATTR_EXTENDED = 0x80000000 +FILEXFER_ATTR_DEFINED_V3 = 0x8000000f + +# SFTPv4 attribute flags +FILEXFER_ATTR_ACCESSTIME = 0x00000008 +FILEXFER_ATTR_CREATETIME = 0x00000010 +FILEXFER_ATTR_MODIFYTIME = 0x00000020 +FILEXFER_ATTR_ACL = 0x00000040 +FILEXFER_ATTR_OWNERGROUP = 0x00000080 +FILEXFER_ATTR_SUBSECOND_TIMES = 0x00000100 +FILEXFER_ATTR_DEFINED_V4 = 0x800001fd + +# SFTPv5 attribute flags +FILEXFER_ATTR_BITS = 0x00000200 +FILEXFER_ATTR_DEFINED_V5 = 0x800003fd + +# SFTPv6 attribute flags +FILEXFER_ATTR_ALLOCATION_SIZE = 0x00000400 +FILEXFER_ATTR_TEXT_HINT = 0x00000800 +FILEXFER_ATTR_MIME_TYPE = 0x00001000 +FILEXFER_ATTR_LINK_COUNT = 0x00002000 +FILEXFER_ATTR_UNTRANSLATED_NAME = 0x00004000 +FILEXFER_ATTR_CTIME = 0x00008000 +FILEXFER_ATTR_DEFINED_V6 = 0x8000fffd + +# SFTPv4 file types +FILEXFER_TYPE_REGULAR = 1 +FILEXFER_TYPE_DIRECTORY = 2 +FILEXFER_TYPE_SYMLINK = 3 +FILEXFER_TYPE_SPECIAL = 4 +FILEXFER_TYPE_UNKNOWN = 5 + +# SFTPv5 file types +FILEXFER_TYPE_SOCKET = 6 +FILEXFER_TYPE_CHAR_DEVICE = 7 +FILEXFER_TYPE_BLOCK_DEVICE = 8 +FILEXFER_TYPE_FIFO = 9 + +# SFTPv5 attrib bits +FILEXFER_ATTR_BITS_READONLY = 0x00000001 +FILEXFER_ATTR_BITS_SYSTEM = 0x00000002 +FILEXFER_ATTR_BITS_HIDDEN = 0x00000004 +FILEXFER_ATTR_BITS_CASE_INSENSITIVE = 0x00000008 +FILEXFER_ATTR_BITS_ARCHIVE = 0x00000010 +FILEXFER_ATTR_BITS_ENCRYPTED = 0x00000020 +FILEXFER_ATTR_BITS_COMPRESSED = 0x00000040 +FILEXFER_ATTR_BITS_SPARSE = 0x00000080 +FILEXFER_ATTR_BITS_APPEND_ONLY = 0x00000100 +FILEXFER_ATTR_BITS_IMMUTABLE = 0x00000200 +FILEXFER_ATTR_BITS_SYNC = 0x00000400 + +# SFTPv6 attrib bits +FILEXFER_ATTR_BITS_TRANSLATION_ERR = 0x00000800 + +# SFTPv6 text hint flags +FILEXFER_ATTR_KNOWN_TEXT = 0 +FILEXFER_ATTR_GUESSED_TEXT = 1 +FILEXFER_ATTR_KNOWN_BINARY = 2 +FILEXFER_ATTR_GUESSED_BINARY = 3 + +# SFTPv5 rename flags +FXR_OVERWRITE = 0x00000001 +FXR_ATOMIC = 0x00000002 +FXR_NATIVE = 0x00000004 + +# SFTPv6 realpath control byte +FXRP_NO_CHECK = 1 +FXRP_STAT_IF_EXISTS = 2 +FXRP_STAT_ALWAYS = 3 + +# OpenSSH statvfs attribute flags +FXE_STATVFS_ST_RDONLY = 0x1 +FXE_STATVFS_ST_NOSUID = 0x2 + +# SFTPv3 error codes +FX_OK = 0 +FX_EOF = 1 +FX_NO_SUCH_FILE = 2 +FX_PERMISSION_DENIED = 3 +FX_FAILURE = 4 +FX_BAD_MESSAGE = 5 +FX_NO_CONNECTION = 6 +FX_CONNECTION_LOST = 7 +FX_OP_UNSUPPORTED = 8 +FX_V3_END = FX_OP_UNSUPPORTED + +# SFTPv4 error codes +FX_INVALID_HANDLE = 9 +FX_NO_SUCH_PATH = 10 +FX_FILE_ALREADY_EXISTS = 11 +FX_WRITE_PROTECT = 12 +FX_NO_MEDIA = 13 +FX_V4_END = FX_NO_MEDIA + +# SFTPv5 error codes +FX_NO_SPACE_ON_FILESYSTEM = 14 +FX_QUOTA_EXCEEDED = 15 +FX_UNKNOWN_PRINCIPAL = 16 +FX_LOCK_CONFLICT = 17 +FX_V5_END = FX_LOCK_CONFLICT + +# SFTPv6 error codes +FX_DIR_NOT_EMPTY = 18 +FX_NOT_A_DIRECTORY = 19 +FX_INVALID_FILENAME = 20 +FX_LINK_LOOP = 21 +FX_CANNOT_DELETE = 22 +FX_INVALID_PARAMETER = 23 +FX_FILE_IS_A_DIRECTORY = 24 +FX_BYTE_RANGE_LOCK_CONFLICT = 25 +FX_BYTE_RANGE_LOCK_REFUSED = 26 +FX_DELETE_PENDING = 27 +FX_FILE_CORRUPT = 28 +FX_OWNER_INVALID = 29 +FX_GROUP_INVALID = 30 +FX_NO_MATCHING_BYTE_RANGE_LOCK = 31 +FX_V6_END = FX_NO_MATCHING_BYTE_RANGE_LOCK + +# SSH channel data type codes +EXTENDED_DATA_STDERR = 1 + +# SSH pty mode opcodes +PTY_OP_END = 0 +PTY_VINTR = 1 +PTY_VQUIT = 2 +PTY_VERASE = 3 +PTY_VKILL = 4 +PTY_VEOF = 5 +PTY_VEOL = 6 +PTY_VEOL2 = 7 +PTY_VSTART = 8 +PTY_VSTOP = 9 +PTY_VSUSP = 10 +PTY_VDSUSP = 11 +PTY_VREPRINT = 12 +PTY_WERASE = 13 +PTY_VLNEXT = 14 +PTY_VFLUSH = 15 +PTY_VSWTCH = 16 +PTY_VSTATUS = 17 +PTY_VDISCARD = 18 +PTY_IGNPAR = 30 +PTY_PARMRK = 31 +PTY_INPCK = 32 +PTY_ISTRIP = 33 +PTY_INLCR = 34 +PTY_IGNCR = 35 +PTY_ICRNL = 36 +PTY_IUCLC = 37 +PTY_IXON = 38 +PTY_IXANY = 39 +PTY_IXOFF = 40 +PTY_IMAXBEL = 41 +PTY_IUTF8 = 42 +PTY_ISIG = 50 +PTY_ICANON = 51 +PTY_XCASE = 52 +PTY_ECHO = 53 +PTY_ECHOE = 54 +PTY_ECHOK = 55 +PTY_ECHONL = 56 +PTY_NOFLSH = 57 +PTY_TOSTOP = 58 +PTY_IEXTEN = 59 +PTY_ECHOCTL = 60 +PTY_ECHOKE = 61 +PTY_PENDIN = 62 +PTY_OPOST = 70 +PTY_OLCUC = 71 +PTY_ONLCR = 72 +PTY_OCRNL = 73 +PTY_ONOCR = 74 +PTY_ONLRET = 75 +PTY_CS7 = 90 +PTY_CS8 = 91 +PTY_PARENB = 92 +PTY_PARODD = 93 +PTY_OP_ISPEED = 128 +PTY_OP_OSPEED = 129 +PTY_OP_RESERVED = 160 diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__init__.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/__init__.py new file mode 100644 index 0000000..ef6c8e5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/__init__.py @@ -0,0 +1,61 @@ +# Copyright (c) 2014-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim for accessing cryptographic primitives needed by asyncssh""" + +from .cipher import BasicCipher, GCMCipher, register_cipher, get_cipher_params + +from .dsa import DSAPrivateKey, DSAPublicKey + +from .dh import DH + +from .ec import ECDSAPrivateKey, ECDSAPublicKey, ECDH + +from .ed import ed25519_available, ed448_available +from .ed import curve25519_available, curve448_available +from .ed import EdDSAPrivateKey, EdDSAPublicKey, Curve25519DH, Curve448DH + +from .ec_params import lookup_ec_curve_by_params + +from .kdf import pbkdf2_hmac + +from .misc import CryptoKey, PyCAKey + +from .rsa import RSAPrivateKey, RSAPublicKey + +from .sntrup import sntrup761_available +from .sntrup import sntrup761_pubkey_bytes, sntrup761_ciphertext_bytes +from .sntrup import sntrup761_keypair, sntrup761_encaps, sntrup761_decaps + +# Import chacha20-poly1305 cipher if available +from .chacha import ChachaCipher, chacha_available + +# Import umac cryptographic hash if available +try: + from .umac import umac32, umac64, umac96, umac128 +except (ImportError, AttributeError, OSError): # pragma: no cover + pass + +# Import X.509 certificate support if available +try: + from .x509 import X509Certificate, X509Name, X509NamePattern + from .x509 import generate_x509_certificate, import_x509_certificate +except ImportError: # pragma: no cover + pass diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f074ec4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/chacha.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/chacha.cpython-312.pyc new file mode 100644 index 0000000..9e34a54 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/chacha.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/cipher.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/cipher.cpython-312.pyc new file mode 100644 index 0000000..df56057 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/cipher.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/dh.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/dh.cpython-312.pyc new file mode 100644 index 0000000..35abb64 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/dh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/dsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/dsa.cpython-312.pyc new file mode 100644 index 0000000..0df08b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/dsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ec.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ec.cpython-312.pyc new file mode 100644 index 0000000..27c9405 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ec_params.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ec_params.cpython-312.pyc new file mode 100644 index 0000000..9879aad Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ec_params.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ed.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ed.cpython-312.pyc new file mode 100644 index 0000000..cf36cd1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/ed.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/kdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/kdf.cpython-312.pyc new file mode 100644 index 0000000..bc6e075 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/kdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/misc.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/misc.cpython-312.pyc new file mode 100644 index 0000000..4b2b095 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/misc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/rsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/rsa.cpython-312.pyc new file mode 100644 index 0000000..7bd9b93 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/rsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/sntrup.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/sntrup.cpython-312.pyc new file mode 100644 index 0000000..2105720 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/sntrup.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/umac.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/umac.cpython-312.pyc new file mode 100644 index 0000000..fcd6e33 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/umac.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/x509.cpython-312.pyc b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/x509.cpython-312.pyc new file mode 100644 index 0000000..bf0fc93 Binary files /dev/null and b/venv/lib/python3.12/site-packages/asyncssh/crypto/__pycache__/x509.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/chacha.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/chacha.py new file mode 100644 index 0000000..b112f97 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/chacha.py @@ -0,0 +1,162 @@ +# Copyright (c) 2015-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Chacha20-Poly1305 symmetric encryption handler""" + +from ctypes import c_ulonglong, create_string_buffer +from typing import Optional, Tuple + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends.openssl import backend +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import ChaCha20 +from cryptography.hazmat.primitives.poly1305 import Poly1305 + +from .cipher import register_cipher + + +if backend.poly1305_supported(): + _CTR_0 = (0).to_bytes(8, 'little') + _CTR_1 = (1).to_bytes(8, 'little') + + _POLY1305_KEYBYTES = 32 + + def chacha20(key: bytes, data: bytes, nonce: bytes, ctr: int) -> bytes: + """Encrypt/decrypt a block of data with the ChaCha20 cipher""" + + return Cipher(ChaCha20(key, (_CTR_1 if ctr else _CTR_0) + nonce), + mode=None).encryptor().update(data) + + def poly1305_key(key: bytes, nonce: bytes) -> bytes: + """Derive a Poly1305 key""" + + return chacha20(key, _POLY1305_KEYBYTES * b'\0', nonce, 0) + + def poly1305(key: bytes, data: bytes, nonce: bytes) -> bytes: + """Compute a Poly1305 tag for a block of data""" + + return Poly1305.generate_tag(poly1305_key(key, nonce), data) + + def poly1305_verify(key: bytes, data: bytes, + nonce: bytes, tag: bytes) -> bool: + """Verify a Poly1305 tag for a block of data""" + + try: + Poly1305.verify_tag(poly1305_key(key, nonce), data, tag) + return True + except InvalidSignature: + return False + + chacha_available = True +else: # pragma: no cover + try: + from libnacl import nacl + + _chacha20 = nacl.crypto_stream_chacha20 + _chacha20_xor_ic = nacl.crypto_stream_chacha20_xor_ic + + _POLY1305_BYTES = nacl.crypto_onetimeauth_poly1305_bytes() + _POLY1305_KEYBYTES = nacl.crypto_onetimeauth_poly1305_keybytes() + + _poly1305 = nacl.crypto_onetimeauth_poly1305 + _poly1305_verify = nacl.crypto_onetimeauth_poly1305_verify + + def chacha20(key: bytes, data: bytes, nonce: bytes, ctr: int) -> bytes: + """Encrypt/decrypt a block of data with the ChaCha20 cipher""" + + datalen = len(data) + result = create_string_buffer(datalen) + ull_datalen = c_ulonglong(datalen) + ull_ctr = c_ulonglong(ctr) + + _chacha20_xor_ic(result, data, ull_datalen, nonce, ull_ctr, key) + + return result.raw + + def poly1305_key(key: bytes, nonce: bytes) -> bytes: + """Derive a Poly1305 key""" + + polykey = create_string_buffer(_POLY1305_KEYBYTES) + ull_polykeylen = c_ulonglong(_POLY1305_KEYBYTES) + + _chacha20(polykey, ull_polykeylen, nonce, key) + + return polykey.raw + + def poly1305(key: bytes, data: bytes, nonce: bytes) -> bytes: + """Compute a Poly1305 tag for a block of data""" + + tag = create_string_buffer(_POLY1305_BYTES) + ull_datalen = c_ulonglong(len(data)) + polykey = poly1305_key(key, nonce) + + _poly1305(tag, data, ull_datalen, polykey) + + return tag.raw + + def poly1305_verify(key: bytes, data: bytes, + nonce: bytes, tag: bytes) -> bool: + """Verify a Poly1305 tag for a block of data""" + + ull_datalen = c_ulonglong(len(data)) + polykey = poly1305_key(key, nonce) + + return _poly1305_verify(tag, data, ull_datalen, polykey) == 0 + + chacha_available = True + except (ImportError, OSError, AttributeError): + chacha_available = False + + +class ChachaCipher: + """Shim for Chacha20-Poly1305 symmetric encryption""" + + def __init__(self, key: bytes): + keylen = len(key) // 2 + self._key = key[:keylen] + self._adkey = key[keylen:] + + def encrypt_and_sign(self, header: bytes, data: bytes, + nonce: bytes) -> Tuple[bytes, bytes]: + """Encrypt and sign a block of data""" + + header = chacha20(self._adkey, header, nonce, 0) + data = chacha20(self._key, data, nonce, 1) + tag = poly1305(self._key, header + data, nonce) + + return header + data, tag + + def decrypt_header(self, header: bytes, nonce: bytes) -> bytes: + """Decrypt header data""" + + return chacha20(self._adkey, header, nonce, 0) + + def verify_and_decrypt(self, header: bytes, data: bytes, + nonce: bytes, tag: bytes) -> Optional[bytes]: + """Verify the signature of and decrypt a block of data""" + + if poly1305_verify(self._key, header + data, nonce, tag): + return chacha20(self._key, data, nonce, 1) + else: + return None + + +if chacha_available: # pragma: no branch + register_cipher('chacha20-poly1305', 64, 0, 1) diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/cipher.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/cipher.py new file mode 100644 index 0000000..68badae --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/cipher.py @@ -0,0 +1,166 @@ +# Copyright (c) 2014-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around PyCA for accessing symmetric ciphers needed by AsyncSSH""" + +from typing import Any, MutableMapping, Optional, Tuple +import warnings + +from cryptography.exceptions import InvalidTag +from cryptography.hazmat.primitives.ciphers import Cipher, CipherContext +from cryptography.hazmat.primitives.ciphers.aead import AESGCM +from cryptography.hazmat.primitives.ciphers.algorithms import AES, ARC4 +from cryptography.hazmat.primitives.ciphers.algorithms import TripleDES +from cryptography.hazmat.primitives.ciphers.modes import CBC, CTR + +with warnings.catch_warnings(): + warnings.simplefilter('ignore') + + from cryptography.hazmat.primitives.ciphers.algorithms import Blowfish + from cryptography.hazmat.primitives.ciphers.algorithms import CAST5 + from cryptography.hazmat.primitives.ciphers.algorithms import SEED + + +_CipherAlgs = Tuple[Any, Any, int] +_CipherParams = Tuple[int, int, int] + + +_GCM_MAC_SIZE = 16 + +_cipher_algs: MutableMapping[str, _CipherAlgs] = {} +_cipher_params: MutableMapping[str, _CipherParams] = {} + + +class BasicCipher: + """Shim for basic ciphers""" + + def __init__(self, cipher_name: str, key: bytes, iv: bytes): + cipher, mode, initial_bytes = _cipher_algs[cipher_name] + + self._cipher = Cipher(cipher(key), mode(iv) if mode else None) + self._initial_bytes = initial_bytes + self._encryptor: Optional[CipherContext] = None + self._decryptor: Optional[CipherContext] = None + + def encrypt(self, data: bytes) -> bytes: + """Encrypt a block of data""" + + if not self._encryptor: + self._encryptor = self._cipher.encryptor() + + if self._initial_bytes: + assert self._encryptor is not None + self._encryptor.update(self._initial_bytes * b'\0') + + assert self._encryptor is not None + return self._encryptor.update(data) + + def decrypt(self, data: bytes) -> bytes: + """Decrypt a block of data""" + + if not self._decryptor: + self._decryptor = self._cipher.decryptor() + + if self._initial_bytes: + assert self._decryptor is not None + self._decryptor.update(self._initial_bytes * b'\0') + + assert self._decryptor is not None + return self._decryptor.update(data) + + +class GCMCipher: + """Shim for GCM ciphers""" + + def __init__(self, cipher_name: str, key: bytes, iv: bytes): + self._cipher = _cipher_algs[cipher_name][0] + self._key = key + self._iv = iv + + def _update_iv(self) -> None: + """Update the IV after each encrypt/decrypt operation""" + + invocation = int.from_bytes(self._iv[4:], 'big') + invocation = (invocation + 1) & 0xffffffffffffffff + self._iv = self._iv[:4] + invocation.to_bytes(8, 'big') + + def encrypt_and_sign(self, header: bytes, + data: bytes) -> Tuple[bytes, bytes]: + """Encrypt and sign a block of data""" + + data = AESGCM(self._key).encrypt(self._iv, data, header) + + self._update_iv() + + return header + data[:-_GCM_MAC_SIZE], data[-_GCM_MAC_SIZE:] + + def verify_and_decrypt(self, header: bytes, data: bytes, + mac: bytes) -> Optional[bytes]: + """Verify the signature of and decrypt a block of data""" + + try: + decrypted_data: Optional[bytes] = \ + AESGCM(self._key).decrypt(self._iv, data + mac, header) + except InvalidTag: + decrypted_data = None + + self._update_iv() + + return decrypted_data + + +def register_cipher(cipher_name: str, key_size: int, + iv_size: int, block_size: int) -> None: + """Register a symmetric cipher""" + + _cipher_params[cipher_name] = (key_size, iv_size, block_size) + + +def get_cipher_params(cipher_name: str) -> _CipherParams: + """Get parameters of a symmetric cipher""" + + return _cipher_params[cipher_name] + + +_cipher_alg_list = ( + ('aes128-cbc', AES, CBC, 0, 16, 16, 16), + ('aes192-cbc', AES, CBC, 0, 24, 16, 16), + ('aes256-cbc', AES, CBC, 0, 32, 16, 16), + ('aes128-ctr', AES, CTR, 0, 16, 16, 16), + ('aes192-ctr', AES, CTR, 0, 24, 16, 16), + ('aes256-ctr', AES, CTR, 0, 32, 16, 16), + ('aes128-gcm', None, None, 0, 16, 12, 16), + ('aes256-gcm', None, None, 0, 32, 12, 16), + ('arcfour', ARC4, None, 0, 16, 1, 1), + ('arcfour40', ARC4, None, 0, 5, 1, 1), + ('arcfour128', ARC4, None, 1536, 16, 1, 1), + ('arcfour256', ARC4, None, 1536, 32, 1, 1), + ('blowfish-cbc', Blowfish, CBC, 0, 16, 8, 8), + ('cast128-cbc', CAST5, CBC, 0, 16, 8, 8), + ('des-cbc', TripleDES, CBC, 0, 8, 8, 8), + ('des2-cbc', TripleDES, CBC, 0, 16, 8, 8), + ('des3-cbc', TripleDES, CBC, 0, 24, 8, 8), + ('seed-cbc', SEED, CBC, 0, 16, 16, 16) +) + +for _cipher_name, _cipher, _mode, _initial_bytes, \ + _key_size, _iv_size, _block_size in _cipher_alg_list: + _cipher_algs[_cipher_name] = (_cipher, _mode, _initial_bytes) + register_cipher(_cipher_name, _key_size, _iv_size, _block_size) diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/dh.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/dh.py new file mode 100644 index 0000000..52e09e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/dh.py @@ -0,0 +1,46 @@ +# Copyright (c) 2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around PyCA for Diffie Hellman key exchange""" + +from cryptography.hazmat.primitives.asymmetric import dh + + +class DH: + """A shim around PyCA for Diffie Hellman key exchange""" + + def __init__(self, g: int, p: int): + self._pn = dh.DHParameterNumbers(p, g) + self._priv_key = self._pn.parameters().generate_private_key() + + def get_public(self) -> int: + """Return the public key to send in the handshake""" + + pub_key = self._priv_key.public_key() + + return pub_key.public_numbers().y + + def get_shared(self, peer_public: int) -> int: + """Return the shared key from the peer's public key""" + + peer_key = dh.DHPublicNumbers(peer_public, self._pn).public_key() + shared_key = self._priv_key.exchange(peer_key) + + return int.from_bytes(shared_key, 'big') diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/dsa.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/dsa.py new file mode 100644 index 0000000..befb544 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/dsa.py @@ -0,0 +1,132 @@ +# Copyright (c) 2014-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around PyCA for DSA public and private keys""" + +from typing import Optional, cast + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives.asymmetric import dsa + +from .misc import CryptoKey, PyCAKey, hashes + + +# Short variable names are used here, matching names in the spec +# pylint: disable=invalid-name + + +class _DSAKey(CryptoKey): + """Base class for shim around PyCA for DSA keys""" + + def __init__(self, pyca_key: PyCAKey, params: dsa.DSAParameterNumbers, + pub: dsa.DSAPublicNumbers, + priv: Optional[dsa.DSAPrivateNumbers] = None): + super().__init__(pyca_key) + + self._params = params + self._pub = pub + self._priv = priv + + @property + def p(self) -> int: + """Return the DSA public modulus""" + + return self._params.p + + @property + def q(self) -> int: + """Return the DSA sub-group order""" + + return self._params.q + + @property + def g(self) -> int: + """Return the DSA generator""" + + return self._params.g + + @property + def y(self) -> int: + """Return the DSA public value""" + + return self._pub.y + + @property + def x(self) -> Optional[int]: + """Return the DSA private value""" + + return self._priv.x if self._priv else None + + +class DSAPrivateKey(_DSAKey): + """A shim around PyCA for DSA private keys""" + + @classmethod + def construct(cls, p: int, q: int, g: int, + y: int, x: int) -> 'DSAPrivateKey': + """Construct a DSA private key""" + + params = dsa.DSAParameterNumbers(p, q, g) + pub = dsa.DSAPublicNumbers(y, params) + priv = dsa.DSAPrivateNumbers(x, pub) + priv_key = priv.private_key() + + return cls(priv_key, params, pub, priv) + + @classmethod + def generate(cls, key_size: int) -> 'DSAPrivateKey': + """Generate a new DSA private key""" + + priv_key = dsa.generate_private_key(key_size) + priv = priv_key.private_numbers() + pub = priv.public_numbers + params = pub.parameter_numbers + + return cls(priv_key, params, pub, priv) + + def sign(self, data: bytes, hash_name: str = '') -> bytes: + """Sign a block of data""" + + priv_key = cast('dsa.DSAPrivateKey', self.pyca_key) + return priv_key.sign(data, hashes[hash_name]()) + + +class DSAPublicKey(_DSAKey): + """A shim around PyCA for DSA public keys""" + + @classmethod + def construct(cls, p: int, q: int, g: int, y: int) -> 'DSAPublicKey': + """Construct a DSA public key""" + + params = dsa.DSAParameterNumbers(p, q, g) + pub = dsa.DSAPublicNumbers(y, params) + pub_key = pub.public_key() + + return cls(pub_key, params, pub) + + def verify(self, data: bytes, sig: bytes, hash_name: str = '') -> bool: + """Verify the signature on a block of data""" + + try: + pub_key = cast('dsa.DSAPublicKey', self.pyca_key) + pub_key.verify(sig, data, hashes[hash_name]()) + return True + except InvalidSignature: + return False diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/ec.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/ec.py new file mode 100644 index 0000000..8928378 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/ec.py @@ -0,0 +1,205 @@ +# Copyright (c) 2015-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around PyCA for elliptic curve keys and key exchange""" + +from typing import Mapping, Optional, Type, cast + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.serialization import Encoding +from cryptography.hazmat.primitives.serialization import PublicFormat + +from .misc import CryptoKey, PyCAKey, hashes + + +# Short variable names are used here, matching names in the spec +# pylint: disable=invalid-name + +_curves: Mapping[bytes, Type[ec.EllipticCurve]] = { + b'1.3.132.0.10': ec.SECP256K1, + b'nistp256': ec.SECP256R1, + b'nistp384': ec.SECP384R1, + b'nistp521': ec.SECP521R1 +} + + +class _ECKey(CryptoKey): + """Base class for shim around PyCA for EC keys""" + + def __init__(self, pyca_key: PyCAKey, curve_id: bytes, + pub: ec.EllipticCurvePublicNumbers, point: bytes, + priv: Optional[ec.EllipticCurvePrivateNumbers] = None): + super().__init__(pyca_key) + + self._curve_id = curve_id + self._pub = pub + self._point = point + self._priv = priv + + @classmethod + def lookup_curve(cls, curve_id: bytes) -> Type[ec.EllipticCurve]: + """Look up curve and hash algorithm""" + + try: + return _curves[curve_id] + except KeyError: # pragma: no cover, other curves not registered + raise ValueError('Unknown EC curve %s' % + curve_id.decode()) from None + + @property + def curve_id(self) -> bytes: + """Return the EC curve name""" + + return self._curve_id + + @property + def x(self) -> int: + """Return the EC public x coordinate""" + + return self._pub.x + + @property + def y(self) -> int: + """Return the EC public y coordinate""" + + return self._pub.y + + @property + def d(self) -> Optional[int]: + """Return the EC private value as an integer""" + + return self._priv.private_value if self._priv else None + + @property + def public_value(self) -> bytes: + """Return the EC public point value encoded as a byte string""" + + return self._point + + @property + def private_value(self) -> Optional[bytes]: + """Return the EC private value encoded as a byte string""" + + if self._priv: + keylen = (self._pub.curve.key_size + 7) // 8 + return self._priv.private_value.to_bytes(keylen, 'big') + else: + return None + + +class ECDSAPrivateKey(_ECKey): + """A shim around PyCA for ECDSA private keys""" + + @classmethod + def construct(cls, curve_id: bytes, public_value: bytes, + private_value: int) -> 'ECDSAPrivateKey': + """Construct an ECDSA private key""" + + curve = cls.lookup_curve(curve_id) + + priv_key = ec.derive_private_key(private_value, curve()) + priv = priv_key.private_numbers() + pub = priv.public_numbers + + return cls(priv_key, curve_id, pub, public_value, priv) + + @classmethod + def generate(cls, curve_id: bytes) -> 'ECDSAPrivateKey': + """Generate a new ECDSA private key""" + + curve = cls.lookup_curve(curve_id) + + priv_key = ec.generate_private_key(curve()) + priv = priv_key.private_numbers() + + pub_key = priv_key.public_key() + pub = pub_key.public_numbers() + + public_value = pub_key.public_bytes(Encoding.X962, + PublicFormat.UncompressedPoint) + + return cls(priv_key, curve_id, pub, public_value, priv) + + def sign(self, data: bytes, hash_name: str = '') -> bytes: + """Sign a block of data""" + + # pylint: disable=unused-argument + + priv_key = cast('ec.EllipticCurvePrivateKey', self.pyca_key) + return priv_key.sign(data, ec.ECDSA(hashes[hash_name]())) + + +class ECDSAPublicKey(_ECKey): + """A shim around PyCA for ECDSA public keys""" + + @classmethod + def construct(cls, curve_id: bytes, + public_value: bytes) -> 'ECDSAPublicKey': + """Construct an ECDSA public key""" + + curve = cls.lookup_curve(curve_id) + + pub_key = ec.EllipticCurvePublicKey.from_encoded_point(curve(), + public_value) + pub = pub_key.public_numbers() + + return cls(pub_key, curve_id, pub, public_value) + + def verify(self, data: bytes, sig: bytes, hash_name: str = '') -> bool: + """Verify the signature on a block of data""" + + try: + pub_key = cast('ec.EllipticCurvePublicKey', self.pyca_key) + pub_key.verify(sig, data, ec.ECDSA(hashes[hash_name]())) + return True + except InvalidSignature: + return False + + +class ECDH: + """A shim around PyCA for ECDH key exchange""" + + def __init__(self, curve_id: bytes): + try: + curve = _curves[curve_id] + except KeyError: # pragma: no cover, other curves not registered + raise ValueError('Unknown EC curve %s' % + curve_id.decode()) from None + + self._priv_key = ec.generate_private_key(curve()) + + def get_public(self) -> bytes: + """Return the public key to send in the handshake""" + + pub_key = self._priv_key.public_key() + + return pub_key.public_bytes(Encoding.X962, + PublicFormat.UncompressedPoint) + + def get_shared(self, peer_public: bytes) -> int: + """Return the shared key from the peer's public key""" + + peer_key = ec.EllipticCurvePublicKey.from_encoded_point( + self._priv_key.curve, peer_public) + + shared_key = self._priv_key.exchange(ec.ECDH(), peer_key) + + return int.from_bytes(shared_key, 'big') diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/ec_params.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/ec_params.py new file mode 100644 index 0000000..1fd46a8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/ec_params.py @@ -0,0 +1,87 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Functions for looking up named elliptic curves by their parameters""" + +_curve_param_map = {} + +# Short variable names are used here, matching names in the spec +# pylint: disable=invalid-name + + +def register_prime_curve(curve_id: bytes, p: int, a: int, b: int, + point: bytes, n: int) -> None: + """Register an elliptic curve prime domain + + This function registers an elliptic curve prime domain by + specifying the SSH identifier for the curve and the set of + parameters describing the curve, generator point, and order. + This allows EC keys encoded with explicit parameters to be + mapped back into their SSH curve IDs. + + """ + + _curve_param_map[p, a % p, b % p, point, n] = curve_id + + +def lookup_ec_curve_by_params(p: int, a: int, b: int, + point: bytes, n: int) -> bytes: + """Look up an elliptic curve by its parameters + + This function looks up an elliptic curve by its parameters + and returns the curve's name. + + """ + + try: + return _curve_param_map[p, a % p, b % p, point, n] + except (KeyError, ValueError): + raise ValueError('Unknown elliptic curve parameters') from None + + +# pylint: disable=line-too-long + +register_prime_curve(b'nistp521', + 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151, + -3, + 1093849038073734274511112390766805569936207598951683748994586394495953116150735016013708737573759623248592132296706313309438452531591012912142327488478985984, + b'\x04\x00\xc6\x85\x8e\x06\xb7\x04\x04\xe9\xcd\x9e>\xcbf#\x95\xb4B\x9cd\x819\x05?\xb5!\xf8(\xaf`kM=\xba\xa1K^w\xef\xe7Y(\xfe\x1d\xc1\'\xa2\xff\xa8\xde3H\xb3\xc1\x85jB\x9b\xf9~~1\xc2\xe5\xbdf\x01\x189)jx\x9a;\xc0\x04\\\x8a_\xb4,}\x1b\xd9\x98\xf5DIW\x9bDh\x17\xaf\xbd\x17\'>f,\x97\xeer\x99^\xf4&@\xc5P\xb9\x01?\xad\x07a5 and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around PyCA and libnacl for Edwards-curve keys and key exchange""" + +import ctypes +import os +from typing import Dict, Optional, Union, cast + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends.openssl import backend +from cryptography.hazmat.primitives.asymmetric import ed25519, ed448 +from cryptography.hazmat.primitives.asymmetric import x25519, x448 +from cryptography.hazmat.primitives.serialization import Encoding +from cryptography.hazmat.primitives.serialization import PrivateFormat +from cryptography.hazmat.primitives.serialization import PublicFormat +from cryptography.hazmat.primitives.serialization import NoEncryption + +from .misc import CryptoKey, PyCAKey + + +_EdPrivateKey = Union[ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey] +_EdPublicKey = Union[ed25519.Ed25519PublicKey, ed448.Ed448PublicKey] + + +ed25519_available = backend.ed25519_supported() +ed448_available = backend.ed448_supported() +curve25519_available = backend.x25519_supported() +curve448_available = backend.x448_supported() + + +if ed25519_available or ed448_available: # pragma: no branch + class _EdDSAKey(CryptoKey): + """Base class for shim around PyCA for EdDSA keys""" + + def __init__(self, pyca_key: PyCAKey, pub: bytes, + priv: Optional[bytes] = None): + super().__init__(pyca_key) + + self._pub = pub + self._priv = priv + + @property + def public_value(self) -> bytes: + """Return the public value encoded as a byte string""" + + return self._pub + + @property + def private_value(self) -> Optional[bytes]: + """Return the private value encoded as a byte string""" + + return self._priv + + + class EdDSAPrivateKey(_EdDSAKey): + """A shim around PyCA for EdDSA private keys""" + + _priv_classes: Dict[bytes, object] = {} + + if ed25519_available: # pragma: no branch + _priv_classes[b'ed25519'] = ed25519.Ed25519PrivateKey + + if ed448_available: # pragma: no branch + _priv_classes[b'ed448'] = ed448.Ed448PrivateKey + + @classmethod + def construct(cls, curve_id: bytes, priv: bytes) -> 'EdDSAPrivateKey': + """Construct an EdDSA private key""" + + priv_cls = cast('_EdPrivateKey', cls._priv_classes[curve_id]) + priv_key = priv_cls.from_private_bytes(priv) + pub_key = priv_key.public_key() + pub = pub_key.public_bytes(Encoding.Raw, PublicFormat.Raw) + + return cls(priv_key, pub, priv) + + @classmethod + def generate(cls, curve_id: bytes) -> 'EdDSAPrivateKey': + """Generate a new EdDSA private key""" + + priv_cls = cast('_EdPrivateKey', cls._priv_classes[curve_id]) + priv_key = priv_cls.generate() + priv = priv_key.private_bytes(Encoding.Raw, PrivateFormat.Raw, + NoEncryption()) + + pub_key = priv_key.public_key() + pub = pub_key.public_bytes(Encoding.Raw, PublicFormat.Raw) + + return cls(priv_key, pub, priv) + + def sign(self, data: bytes, hash_name: str = '') -> bytes: + """Sign a block of data""" + + # pylint: disable=unused-argument + + priv_key = cast('_EdPrivateKey', self.pyca_key) + return priv_key.sign(data) + + + class EdDSAPublicKey(_EdDSAKey): + """A shim around PyCA for EdDSA public keys""" + + _pub_classes: Dict[bytes, object] = { + b'ed25519': ed25519.Ed25519PublicKey, + b'ed448': ed448.Ed448PublicKey + } + + @classmethod + def construct(cls, curve_id: bytes, pub: bytes) -> 'EdDSAPublicKey': + """Construct an EdDSA public key""" + + pub_cls = cast('_EdPublicKey', cls._pub_classes[curve_id]) + pub_key = pub_cls.from_public_bytes(pub) + + return cls(pub_key, pub) + + def verify(self, data: bytes, sig: bytes, hash_name: str = '') -> bool: + """Verify the signature on a block of data""" + + # pylint: disable=unused-argument + + try: + pub_key = cast('_EdPublicKey', self.pyca_key) + pub_key.verify(sig, data) + return True + except InvalidSignature: + return False +else: # pragma: no cover + class _EdDSANaclKey: + """Base class for shim around libnacl for EdDSA keys""" + + def __init__(self, pub: bytes, priv: Optional[bytes] = None): + self._pub = pub + self._priv = priv + + @property + def public_value(self) -> bytes: + """Return the public value encoded as a byte string""" + + return self._pub + + @property + def private_value(self) -> Optional[bytes]: + """Return the private value encoded as a byte string""" + + return self._priv[:-len(self._pub)] if self._priv else None + + + class EdDSAPrivateKey(_EdDSANaclKey): # type: ignore + """A shim around libnacl for EdDSA private keys""" + + @classmethod + def construct(cls, curve_id: bytes, priv: bytes) -> 'EdDSAPrivateKey': + """Construct an EdDSA private key""" + + # pylint: disable=unused-argument + + return cls(*_ed25519_construct_keypair(priv)) + + @classmethod + def generate(cls, curve_id: str) -> 'EdDSAPrivateKey': + """Generate a new EdDSA private key""" + + # pylint: disable=unused-argument + + return cls(*_ed25519_generate_keypair()) + + def sign(self, data: bytes, hash_name: str = '') -> bytes: + """Sign a block of data""" + + # pylint: disable=unused-argument + + assert self._priv is not None + return _ed25519_sign(data, self._priv)[:-len(data)] + + + class EdDSAPublicKey(_EdDSANaclKey): # type: ignore + """A shim around libnacl for EdDSA public keys""" + + @classmethod + def construct(cls, curve_id: bytes, pub: bytes) -> 'EdDSAPublicKey': + """Construct an EdDSA public key""" + + # pylint: disable=unused-argument + + if len(pub) != _ED25519_PUBLIC_BYTES: + raise ValueError('Invalid EdDSA public key') + + return cls(pub) + + def verify(self, data: bytes, sig: bytes, hash_name: str = '') -> bool: + """Verify the signature on a block of data""" + + # pylint: disable=unused-argument + + try: + return _ed25519_verify(sig + data, self._pub) == data + except ValueError: + return False + + try: + import libnacl + + _ED25519_PUBLIC_BYTES = libnacl.crypto_sign_ed25519_PUBLICKEYBYTES + + _ed25519_construct_keypair = libnacl.crypto_sign_seed_keypair + _ed25519_generate_keypair = libnacl.crypto_sign_keypair + _ed25519_sign = libnacl.crypto_sign + _ed25519_verify = libnacl.crypto_sign_open + + ed25519_available = True + except (ImportError, OSError, AttributeError): + pass + + +if curve25519_available: # pragma: no branch + class Curve25519DH: + """Curve25519 Diffie Hellman implementation based on PyCA""" + + def __init__(self) -> None: + self._priv_key = x25519.X25519PrivateKey.generate() + + def get_public(self) -> bytes: + """Return the public key to send in the handshake""" + + return self._priv_key.public_key().public_bytes(Encoding.Raw, + PublicFormat.Raw) + + def get_shared_bytes(self, peer_public: bytes) -> bytes: + """Return the shared key from the peer's public key""" + + peer_key = x25519.X25519PublicKey.from_public_bytes(peer_public) + return self._priv_key.exchange(peer_key) + + def get_shared(self, peer_public: bytes) -> int: + """Return the shared key from the peer's public key as bytes""" + + return int.from_bytes(self.get_shared_bytes(peer_public), 'big') +else: # pragma: no cover + class Curve25519DH: # type: ignore + """Curve25519 Diffie Hellman implementation based on libnacl""" + + def __init__(self) -> None: + self._private = os.urandom(_CURVE25519_SCALARBYTES) + + def get_public(self) -> bytes: + """Return the public key to send in the handshake""" + + public = ctypes.create_string_buffer(_CURVE25519_BYTES) + + if _curve25519_base(public, self._private) != 0: + # This error is never returned by libsodium + raise ValueError('Curve25519 failed') # pragma: no cover + + return public.raw + + def get_shared_bytes(self, peer_public: bytes) -> bytes: + """Return the shared key from the peer's public key as bytes""" + + if len(peer_public) != _CURVE25519_BYTES: + raise ValueError('Invalid curve25519 public key size') + + shared = ctypes.create_string_buffer(_CURVE25519_BYTES) + + if _curve25519(shared, self._private, peer_public) != 0: + raise ValueError('Curve25519 failed') + + return shared.raw + + def get_shared(self, peer_public: bytes) -> int: + """Return the shared key from the peer's public key""" + + return int.from_bytes(self.get_shared_bytes(peer_public), 'big') + + try: + from libnacl import nacl + + _CURVE25519_BYTES = nacl.crypto_scalarmult_curve25519_bytes() + _CURVE25519_SCALARBYTES = \ + nacl.crypto_scalarmult_curve25519_scalarbytes() + + _curve25519 = nacl.crypto_scalarmult_curve25519 + _curve25519_base = nacl.crypto_scalarmult_curve25519_base + + curve25519_available = True + except (ImportError, OSError, AttributeError): + pass + + +class Curve448DH: + """Curve448 Diffie Hellman implementation based on PyCA""" + + def __init__(self) -> None: + self._priv_key = x448.X448PrivateKey.generate() + + def get_public(self) -> bytes: + """Return the public key to send in the handshake""" + + return self._priv_key.public_key().public_bytes(Encoding.Raw, + PublicFormat.Raw) + + def get_shared(self, peer_public: bytes) -> int: + """Return the shared key from the peer's public key""" + + peer_key = x448.X448PublicKey.from_public_bytes(peer_public) + shared = self._priv_key.exchange(peer_key) + return int.from_bytes(shared, 'big') diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/kdf.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/kdf.py new file mode 100644 index 0000000..539ca84 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/kdf.py @@ -0,0 +1,33 @@ +# Copyright (c) 2017-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around PyCA for key derivation functions""" + +from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC + +from .misc import hashes + + +def pbkdf2_hmac(hash_name: str, passphrase: bytes, salt: bytes, + count: int, key_size: int) -> bytes: + """A shim around PyCA for PBKDF2 HMAC key derivation""" + + return PBKDF2HMAC(hashes[hash_name](), key_size, salt, + count).derive(passphrase) diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/misc.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/misc.py new file mode 100644 index 0000000..4fc2ce2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/misc.py @@ -0,0 +1,70 @@ +# Copyright (c) 2017-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Miscellaneous PyCA utility classes and functions""" + +from typing import Callable, Mapping, Union + +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.hazmat.primitives.asymmetric import ed25519, ed448 +from cryptography.hazmat.primitives.hashes import HashAlgorithm +from cryptography.hazmat.primitives.hashes import MD5, SHA1, SHA224 +from cryptography.hazmat.primitives.hashes import SHA256, SHA384, SHA512 + + +PyCAPrivateKey = Union[dsa.DSAPrivateKey, rsa.RSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey] + +PyCAPublicKey = Union[dsa.DSAPublicKey, rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, ed448.Ed448PublicKey] + +PyCAKey = Union[PyCAPrivateKey, PyCAPublicKey] + + +hashes: Mapping[str, Callable[[], HashAlgorithm]] = { + str(h.name): h for h in (MD5, SHA1, SHA224, SHA256, SHA384, SHA512) +} + + +class CryptoKey: + """Base class for PyCA private/public keys""" + + def __init__(self, pyca_key: PyCAKey): + self._pyca_key = pyca_key + + @property + def pyca_key(self) -> PyCAKey: + """Return the PyCA object associated with this key""" + + return self._pyca_key + + def sign(self, data: bytes, hash_name: str = '') -> bytes: + """Sign a block of data""" + + # pylint: disable=no-self-use + raise RuntimeError # pragma: no cover + + def verify(self, data: bytes, sig: bytes, hash_name: str = '') -> bool: + """Verify the signature on a block of data""" + + # pylint: disable=no-self-use + raise RuntimeError # pragma: no cover diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/rsa.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/rsa.py new file mode 100644 index 0000000..66a2774 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/rsa.py @@ -0,0 +1,169 @@ +# Copyright (c) 2014-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around PyCA for RSA public and private keys""" + +from typing import Optional, cast + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives.asymmetric.padding import MGF1, OAEP +from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15 +from cryptography.hazmat.primitives.asymmetric import rsa + +from .misc import CryptoKey, PyCAKey, hashes + + +# Short variable names are used here, matching names in the spec +# pylint: disable=invalid-name + + +class _RSAKey(CryptoKey): + """Base class for shim around PyCA for RSA keys""" + + def __init__(self, pyca_key: PyCAKey, pub: rsa.RSAPublicNumbers, + priv: Optional[rsa.RSAPrivateNumbers] = None): + super().__init__(pyca_key) + + self._pub = pub + self._priv = priv + + @property + def n(self) -> int: + """Return the RSA public modulus""" + + return self._pub.n + + @property + def e(self) -> int: + """Return the RSA public exponent""" + + return self._pub.e + + @property + def d(self) -> Optional[int]: + """Return the RSA private exponent""" + + return self._priv.d if self._priv else None + + @property + def p(self) -> Optional[int]: + """Return the RSA first private prime""" + + return self._priv.p if self._priv else None + + @property + def q(self) -> Optional[int]: + """Return the RSA second private prime""" + + return self._priv.q if self._priv else None + + @property + def dmp1(self) -> Optional[int]: + """Return d modulo p-1""" + + return self._priv.dmp1 if self._priv else None + + @property + def dmq1(self) -> Optional[int]: + """Return q modulo p-1""" + + return self._priv.dmq1 if self._priv else None + + @property + def iqmp(self) -> Optional[int]: + """Return the inverse of q modulo p""" + + return self._priv.iqmp if self._priv else None + + +class RSAPrivateKey(_RSAKey): + """A shim around PyCA for RSA private keys""" + + @classmethod + def construct(cls, n: int, e: int, d: int, p: int, q: int, + dmp1: int, dmq1: int, iqmp: int, + skip_validation: bool) -> 'RSAPrivateKey': + """Construct an RSA private key""" + + pub = rsa.RSAPublicNumbers(e, n) + priv = rsa.RSAPrivateNumbers(p, q, d, dmp1, dmq1, iqmp, pub) + priv_key = priv.private_key( + unsafe_skip_rsa_key_validation=skip_validation) + + return cls(priv_key, pub, priv) + + @classmethod + def generate(cls, key_size: int, exponent: int) -> 'RSAPrivateKey': + """Generate a new RSA private key""" + + priv_key = rsa.generate_private_key(exponent, key_size) + priv = priv_key.private_numbers() + pub = priv.public_numbers + + return cls(priv_key, pub, priv) + + def decrypt(self, data: bytes, hash_name: str) -> Optional[bytes]: + """Decrypt a block of data""" + + try: + hash_alg = hashes[hash_name]() + priv_key = cast('rsa.RSAPrivateKey', self.pyca_key) + return priv_key.decrypt(data, OAEP(MGF1(hash_alg), hash_alg, None)) + except ValueError: + return None + + def sign(self, data: bytes, hash_name: str = '') -> bytes: + """Sign a block of data""" + + priv_key = cast('rsa.RSAPrivateKey', self.pyca_key) + return priv_key.sign(data, PKCS1v15(), hashes[hash_name]()) + + +class RSAPublicKey(_RSAKey): + """A shim around PyCA for RSA public keys""" + + @classmethod + def construct(cls, n: int, e: int) -> 'RSAPublicKey': + """Construct an RSA public key""" + + pub = rsa.RSAPublicNumbers(e, n) + pub_key = pub.public_key() + + return cls(pub_key, pub) + + def encrypt(self, data: bytes, hash_name: str) -> Optional[bytes]: + """Encrypt a block of data""" + + try: + hash_alg = hashes[hash_name]() + pub_key = cast('rsa.RSAPublicKey', self.pyca_key) + return pub_key.encrypt(data, OAEP(MGF1(hash_alg), hash_alg, None)) + except ValueError: + return None + + def verify(self, data: bytes, sig: bytes, hash_name: str = '') -> bool: + """Verify the signature on a block of data""" + + try: + pub_key = cast('rsa.RSAPublicKey', self.pyca_key) + pub_key.verify(sig, data, PKCS1v15(), hashes[hash_name]()) + return True + except InvalidSignature: + return False diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/sntrup.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/sntrup.py new file mode 100644 index 0000000..a671f00 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/sntrup.py @@ -0,0 +1,88 @@ +# Copyright (c) 2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around liboqs for Streamlined NTRU Prime post-quantum encryption""" + +import ctypes +import ctypes.util +from typing import Tuple + + +sntrup761_available = False + +sntrup761_pubkey_bytes = 1158 +sntrup761_privkey_bytes = 1763 +sntrup761_ciphertext_bytes = 1039 +sntrup761_secret_bytes = 32 + + +for lib in ('oqs', 'liboqs'): + _oqs_lib = ctypes.util.find_library(lib) + + if _oqs_lib: # pragma: no branch + break +else: # pragma: no cover + _oqs_lib = None + +if _oqs_lib: # pragma: no branch + _oqs = ctypes.cdll.LoadLibrary(_oqs_lib) + + _sntrup761_keypair = _oqs.OQS_KEM_ntruprime_sntrup761_keypair + _sntrup761_encaps = _oqs.OQS_KEM_ntruprime_sntrup761_encaps + _sntrup761_decaps = _oqs.OQS_KEM_ntruprime_sntrup761_decaps + + sntrup761_available = True + + +def sntrup761_keypair() -> Tuple[bytes, bytes]: + """Make a SNTRUP761 key pair""" + + pubkey = ctypes.create_string_buffer(sntrup761_pubkey_bytes) + privkey = ctypes.create_string_buffer(sntrup761_privkey_bytes) + _sntrup761_keypair(pubkey, privkey) + + return pubkey.raw, privkey.raw + + +def sntrup761_encaps(pubkey: bytes) -> Tuple[bytes, bytes]: + """Generate a random secret and encrypt it with a public key""" + + if len(pubkey) != sntrup761_pubkey_bytes: + raise ValueError('Invalid SNTRUP761 public key') + + ciphertext = ctypes.create_string_buffer(sntrup761_ciphertext_bytes) + secret = ctypes.create_string_buffer(sntrup761_secret_bytes) + + _sntrup761_encaps(ciphertext, secret, pubkey) + + return secret.raw, ciphertext.raw + + +def sntrup761_decaps(ciphertext: bytes, privkey: bytes) -> bytes: + """Decrypt an encrypted secret using a private key""" + + if len(ciphertext) != sntrup761_ciphertext_bytes: + raise ValueError('Invalid SNTRUP761 ciphertext') + + secret = ctypes.create_string_buffer(sntrup761_secret_bytes) + + _sntrup761_decaps(secret, ciphertext, privkey) + + return secret.raw diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/umac.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/umac.py new file mode 100644 index 0000000..0c41359 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/umac.py @@ -0,0 +1,139 @@ +# Copyright (c) 2016-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""UMAC cryptographic hash (RFC 4418) wrapper for Nettle library""" + +import binascii +import ctypes +import ctypes.util +from typing import TYPE_CHECKING, Callable, Optional + + +if TYPE_CHECKING: + _ByteArray = ctypes.Array[ctypes.c_char] + _SetKey = Callable[[_ByteArray, bytes], None] + _SetNonce = Callable[[_ByteArray, ctypes.c_size_t, bytes], None] + _Update = Callable[[_ByteArray, ctypes.c_size_t, bytes], None] + _Digest = Callable[[_ByteArray, ctypes.c_size_t, _ByteArray], None] + _New = Callable[[bytes, Optional[bytes], Optional[bytes]], object] + + +_UMAC_BLOCK_SIZE = 1024 +_UMAC_DEFAULT_CTX_SIZE = 4096 + + +def _build_umac(size: int) -> '_New': + """Function to build UMAC wrapper for a specific digest size""" + + _name = 'umac%d' % size + _prefix = 'nettle_%s_' % _name + + try: + _context_size: int = getattr(_nettle, _prefix + '_ctx_size')() + except AttributeError: + _context_size = _UMAC_DEFAULT_CTX_SIZE + + _set_key: _SetKey = getattr(_nettle, _prefix + 'set_key') + _set_nonce: _SetNonce = getattr(_nettle, _prefix + 'set_nonce') + _update: _Update = getattr(_nettle, _prefix + 'update') + _digest: _Digest = getattr(_nettle, _prefix + 'digest') + + + class _UMAC: + """Wrapper for UMAC cryptographic hash + + This class supports the cryptographic hash API defined in PEP 452. + + """ + + name = _name + block_size = _UMAC_BLOCK_SIZE + digest_size = size // 8 + + def __init__(self, ctx: '_ByteArray', nonce: Optional[bytes] = None, + msg: Optional[bytes] = None): + self._ctx = ctx + + if nonce: + self.set_nonce(nonce) + + if msg: + self.update(msg) + + @classmethod + def new(cls, key: bytes, msg: Optional[bytes] = None, + nonce: Optional[bytes] = None) -> '_UMAC': + """Construct a new UMAC hash object""" + + ctx = ctypes.create_string_buffer(_context_size) + _set_key(ctx, key) + + return cls(ctx, nonce, msg) + + def copy(self) -> '_UMAC': + """Return a new hash object with this object's state""" + + ctx = ctypes.create_string_buffer(self._ctx.raw) + return self.__class__(ctx) + + def set_nonce(self, nonce: bytes) -> None: + """Reset the nonce associated with this object""" + + _set_nonce(self._ctx, ctypes.c_size_t(len(nonce)), nonce) + + def update(self, msg: bytes) -> None: + """Add the data in msg to the hash""" + + _update(self._ctx, ctypes.c_size_t(len(msg)), msg) + + def digest(self) -> bytes: + """Return the hash and increment nonce to begin a new message + + .. note:: The hash is reset and the nonce is incremented + when this function is called. This doesn't match + the behavior defined in PEP 452. + + """ + + result = ctypes.create_string_buffer(self.digest_size) + _digest(self._ctx, ctypes.c_size_t(self.digest_size), result) + return result.raw + + def hexdigest(self) -> str: + """Return the digest as a string of hexadecimal digits""" + + return binascii.b2a_hex(self.digest()).decode('ascii') + + + return _UMAC.new + + +for lib in ('nettle', 'libnettle', 'libnettle-6'): + _nettle_lib = ctypes.util.find_library(lib) + + if _nettle_lib: # pragma: no branch + break +else: # pragma: no cover + _nettle_lib = None + +if _nettle_lib: # pragma: no branch + _nettle = ctypes.cdll.LoadLibrary(_nettle_lib) + + umac32, umac64, umac96, umac128 = map(_build_umac, (32, 64, 96, 128)) diff --git a/venv/lib/python3.12/site-packages/asyncssh/crypto/x509.py b/venv/lib/python3.12/site-packages/asyncssh/crypto/x509.py new file mode 100644 index 0000000..e89fe4f --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/crypto/x509.py @@ -0,0 +1,417 @@ +# Copyright (c) 2017-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""A shim around PyCA and PyOpenSSL for X.509 certificates""" + +from datetime import datetime, timezone +import re +import sys +from typing import Iterable, List, Optional, Sequence, Set, Union, cast + +from cryptography.hazmat.primitives.serialization import Encoding +from cryptography.hazmat.primitives.serialization import PublicFormat +from cryptography import x509 + +from OpenSSL import crypto + +from ..asn1 import IA5String, der_decode, der_encode +from ..misc import ip_address + +from .misc import PyCAKey, PyCAPrivateKey, PyCAPublicKey, hashes + + +_Comment = Union[None, bytes, str] +_Principals = Union[str, Sequence[str]] +_Purposes = Union[None, str, Sequence[str]] +_PurposeOIDs = Union[None, Set[x509.ObjectIdentifier]] +_GeneralNameList = List[x509.GeneralName] +_NameInit = Union[str, x509.Name, Iterable[x509.RelativeDistinguishedName]] + + +_purpose_to_oid = { + 'serverAuth': x509.ExtendedKeyUsageOID.SERVER_AUTH, + 'clientAuth': x509.ExtendedKeyUsageOID.CLIENT_AUTH, + 'secureShellClient': x509.ObjectIdentifier('1.3.6.1.5.5.7.3.21'), + 'secureShellServer': x509.ObjectIdentifier('1.3.6.1.5.5.7.3.22')} + +_purpose_any = '2.5.29.37.0' + +_nscomment_oid = x509.ObjectIdentifier('2.16.840.1.113730.1.13') + +_datetime_min = datetime.fromtimestamp(0, timezone.utc).replace(microsecond=1) + +_datetime_32bit_max = datetime.fromtimestamp(2**31 - 1, timezone.utc) + +if sys.platform == 'win32': # pragma: no cover + # Windows' datetime.max is year 9999, but timestamps that large don't work + _datetime_max = datetime.max.replace(year=2999, tzinfo=timezone.utc) +else: + _datetime_max = datetime.max.replace(tzinfo=timezone.utc) + + +def _to_generalized_time(t: int) -> datetime: + """Convert a timestamp value to a datetime""" + + if t <= 0: + return _datetime_min + else: + try: + return datetime.fromtimestamp(t, timezone.utc) + except (OSError, OverflowError): + try: + # Work around a bug in cryptography which shows up on + # systems with a small time_t. + datetime.fromtimestamp(_datetime_max.timestamp() - 1, + timezone.utc) + return _datetime_max + except (OSError, OverflowError): # pragma: no cover + return _datetime_32bit_max + + +def _to_purpose_oids(purposes: _Purposes) -> _PurposeOIDs: + """Convert a list of purposes to purpose OIDs""" + + if isinstance(purposes, str): + purposes = [p.strip() for p in purposes.split(',')] + + if not purposes or 'any' in purposes or _purpose_any in purposes: + purpose_oids = None + else: + purpose_oids = set(_purpose_to_oid.get(p) or x509.ObjectIdentifier(p) + for p in purposes) + + return purpose_oids + + +def _encode_user_principals(principals: _Principals) -> _GeneralNameList: + """Encode user principals as e-mail addresses""" + + if isinstance(principals, str): + principals = [p.strip() for p in principals.split(',')] + + return [x509.RFC822Name(name) for name in principals] + + +def _encode_host_principals(principals: _Principals) -> _GeneralNameList: + """Encode host principals as DNS names or IP addresses""" + + def _encode_host(name: str) -> x509.GeneralName: + """Encode a host principal as a DNS name or IP address""" + + try: + return x509.IPAddress(ip_address(name)) + except ValueError: + return x509.DNSName(name) + + if isinstance(principals, str): + principals = [p.strip() for p in principals.split(',')] + + return [_encode_host(name) for name in principals] + + +class X509Name(x509.Name): + """A shim around PyCA for X.509 distinguished names""" + + _escape = re.compile(r'([,+\\])') + _unescape = re.compile(r'\\([,+\\])') + _split_rdn = re.compile(r'(?:[^+\\]+|\\.)+') + _split_name = re.compile(r'(?:[^,\\]+|\\.)+') + + _attrs = ( + ('C', x509.NameOID.COUNTRY_NAME), + ('ST', x509.NameOID.STATE_OR_PROVINCE_NAME), + ('L', x509.NameOID.LOCALITY_NAME), + ('O', x509.NameOID.ORGANIZATION_NAME), + ('OU', x509.NameOID.ORGANIZATIONAL_UNIT_NAME), + ('CN', x509.NameOID.COMMON_NAME), + ('DC', x509.NameOID.DOMAIN_COMPONENT)) + + _to_oid = dict((k, v) for k, v in _attrs) + _from_oid = dict((v, k) for k, v in _attrs) + + def __init__(self, name: _NameInit): + if isinstance(name, str): + rdns = self._parse_name(name) + elif isinstance(name, x509.Name): + rdns = name.rdns + else: + rdns = name + + super().__init__(rdns) + + def __str__(self) -> str: + return ','.join(self._format_rdn(rdn) for rdn in self.rdns) + + def _format_rdn(self, rdn: x509.RelativeDistinguishedName) -> str: + """Format an X.509 RelativeDistinguishedName as a string""" + + return '+'.join(sorted(self._format_attr(nameattr) for nameattr in rdn)) + + def _format_attr(self, nameattr: x509.NameAttribute) -> str: + """Format an X.509 NameAttribute as a string""" + + attr = self._from_oid.get(nameattr.oid) or nameattr.oid.dotted_string + return attr + '=' + self._escape.sub(r'\\\1', cast(str, nameattr.value)) + + def _parse_name(self, name: str) -> \ + Iterable[x509.RelativeDistinguishedName]: + """Parse an X.509 distinguished name""" + + return [self._parse_rdn(rdn) for rdn in self._split_name.findall(name)] + + def _parse_rdn(self, rdn: str) -> x509.RelativeDistinguishedName: + """Parse an X.509 relative distinguished name""" + + return x509.RelativeDistinguishedName( + self._parse_nameattr(av) for av in self._split_rdn.findall(rdn)) + + def _parse_nameattr(self, av: str) -> x509.NameAttribute: + """Parse an X.509 name attribute/value pair""" + + try: + attr, value = av.split('=', 1) + except ValueError: + raise ValueError('Invalid X.509 name attribute: ' + av) from None + + try: + attr = attr.strip() + oid = self._to_oid.get(attr) or x509.ObjectIdentifier(attr) + except ValueError: + raise ValueError('Unknown X.509 attribute: ' + attr) from None + + return x509.NameAttribute(oid, self._unescape.sub(r'\1', value)) + + +class X509NamePattern: + """Match X.509 distinguished names""" + + def __init__(self, pattern: str): + if pattern.endswith(',*'): + self._pattern = X509Name(pattern[:-2]) + self._prefix_len: Optional[int] = len(self._pattern.rdns) + else: + self._pattern = X509Name(pattern) + self._prefix_len = None + + def __eq__(self, other: object) -> bool: + # This isn't protected access - both objects are _RSAKey instances + # pylint: disable=protected-access + + if not isinstance(other, X509NamePattern): # pragma: no cover + return NotImplemented + + return (self._pattern == other._pattern and + self._prefix_len == other._prefix_len) + + def __hash__(self) -> int: + return hash((self._pattern, self._prefix_len)) + + def matches(self, name: X509Name) -> bool: + """Return whether an X.509 name matches this pattern""" + + return self._pattern.rdns == name.rdns[:self._prefix_len] + + +class X509Certificate: + """A shim around PyCA and PyOpenSSL for X.509 certificates""" + + def __init__(self, cert: x509.Certificate, data: bytes): + self.data = data + + self.subject = X509Name(cert.subject) + self.issuer = X509Name(cert.issuer) + self.key_data = cert.public_key().public_bytes( + Encoding.DER, PublicFormat.SubjectPublicKeyInfo) + + self.openssl_cert = crypto.X509.from_cryptography(cert) + self.subject_hash = hex(self.openssl_cert.get_subject().hash())[2:] + self.issuer_hash = hex(self.openssl_cert.get_issuer().hash())[2:] + + try: + self.purposes: Optional[Set[bytes]] = \ + set(cert.extensions.get_extension_for_class( + x509.ExtendedKeyUsage).value) + except x509.ExtensionNotFound: + self.purposes = None + + try: + sans = cert.extensions.get_extension_for_class( + x509.SubjectAlternativeName).value + + self.user_principals = sans.get_values_for_type(x509.RFC822Name) + self.host_principals = sans.get_values_for_type(x509.DNSName) + \ + [str(ip) for ip in sans.get_values_for_type(x509.IPAddress)] + except x509.ExtensionNotFound: + cn = cert.subject.get_attributes_for_oid(x509.NameOID.COMMON_NAME) + principals = [cast(str, attr.value) for attr in cn] + + self.user_principals = principals + self.host_principals = principals + + try: + comment = cert.extensions.get_extension_for_oid(_nscomment_oid) + comment_der = cast(x509.UnrecognizedExtension, comment.value).value + self.comment: Optional[bytes] = \ + cast(IA5String, der_decode(comment_der)).value + except x509.ExtensionNotFound: + self.comment = None + + def __eq__(self, other: object) -> bool: + if not isinstance(other, X509Certificate): # pragma: no cover + return NotImplemented + + return self.data == other.data + + def __hash__(self) -> int: + return hash(self.data) + + def validate(self, trust_store: Sequence['X509Certificate'], + purposes: _Purposes, user_principal: str, + host_principal: str) -> None: + """Validate an X.509 certificate""" + + purpose_oids = _to_purpose_oids(purposes) + + if purpose_oids and self.purposes and not purpose_oids & self.purposes: + raise ValueError('Certificate purpose mismatch') + + if user_principal and user_principal not in self.user_principals: + raise ValueError('Certificate user principal mismatch') + + if host_principal and host_principal not in self.host_principals: + raise ValueError('Certificate host principal mismatch') + + x509_store = crypto.X509Store() + + for c in trust_store: + x509_store.add_cert(c.openssl_cert) + + try: + x509_ctx = crypto.X509StoreContext(x509_store, self.openssl_cert, + None) + x509_ctx.verify_certificate() + except crypto.X509StoreContextError as exc: + raise ValueError(f'X.509 chain validation error: {exc}') from None + + +def generate_x509_certificate(signing_key: PyCAKey, key: PyCAKey, + subject: _NameInit, issuer: Optional[_NameInit], + serial: Optional[int], valid_after: int, + valid_before: int, ca: bool, + ca_path_len: Optional[int], purposes: _Purposes, + user_principals: _Principals, + host_principals: _Principals, + hash_name: str, + comment: _Comment) -> X509Certificate: + """Generate a new X.509 certificate""" + + builder = x509.CertificateBuilder() + + subject = X509Name(subject) + issuer = X509Name(issuer) if issuer else subject + self_signed = subject == issuer + + builder = builder.subject_name(subject) + builder = builder.issuer_name(issuer) + + if serial is None: + serial = x509.random_serial_number() + + builder = builder.serial_number(serial) + + builder = builder.not_valid_before(_to_generalized_time(valid_after)) + builder = builder.not_valid_after(_to_generalized_time(valid_before)) + + builder = builder.public_key(cast(PyCAPublicKey, key)) + + if ca: + basic_constraints = x509.BasicConstraints(ca=True, + path_length=ca_path_len) + key_usage = x509.KeyUsage(digital_signature=False, + content_commitment=False, + key_encipherment=False, + data_encipherment=False, + key_agreement=False, key_cert_sign=True, + crl_sign=True, encipher_only=False, + decipher_only=False) + else: + basic_constraints = x509.BasicConstraints(ca=False, path_length=None) + key_usage = x509.KeyUsage(digital_signature=True, + content_commitment=False, + key_encipherment=True, + data_encipherment=False, + key_agreement=True, key_cert_sign=False, + crl_sign=False, encipher_only=False, + decipher_only=False) + + builder = builder.add_extension(basic_constraints, critical=True) + + if ca or not self_signed: + builder = builder.add_extension(key_usage, critical=True) + + purpose_oids = _to_purpose_oids(purposes) + + if purpose_oids: + builder = builder.add_extension(x509.ExtendedKeyUsage(purpose_oids), + critical=False) + + skid = x509.SubjectKeyIdentifier.from_public_key(cast(PyCAPublicKey, key)) + + builder = builder.add_extension(skid, critical=False) + + if not self_signed: + issuer_pk = cast(PyCAPrivateKey, signing_key).public_key() + akid = x509.AuthorityKeyIdentifier.from_issuer_public_key(issuer_pk) + builder = builder.add_extension(akid, critical=False) + + sans = _encode_user_principals(user_principals) + \ + _encode_host_principals(host_principals) + + if sans: + builder = builder.add_extension(x509.SubjectAlternativeName(sans), + critical=False) + + if comment: + if isinstance(comment, str): + comment_bytes = comment.encode('utf-8') + else: + comment_bytes = comment + + comment_bytes = der_encode(IA5String(comment_bytes)) + builder = builder.add_extension( + x509.UnrecognizedExtension(_nscomment_oid, comment_bytes), + critical=False) + + try: + hash_alg = hashes[hash_name]() if hash_name else None + except KeyError: + raise ValueError('Unknown hash algorithm') from None + + cert = builder.sign(cast(PyCAPrivateKey, signing_key), hash_alg) + data = cert.public_bytes(Encoding.DER) + + return X509Certificate(cert, data) + + +def import_x509_certificate(data: bytes) -> X509Certificate: + """Construct an X.509 certificate from DER data""" + + cert = x509.load_der_x509_certificate(data) + return X509Certificate(cert, data) diff --git a/venv/lib/python3.12/site-packages/asyncssh/dsa.py b/venv/lib/python3.12/site-packages/asyncssh/dsa.py new file mode 100644 index 0000000..1972e1d --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/dsa.py @@ -0,0 +1,258 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""DSA public key encryption handler""" + +from typing import Optional, Tuple, Union, cast + +from .asn1 import ASN1DecodeError, ObjectIdentifier, der_encode, der_decode +from .crypto import DSAPrivateKey, DSAPublicKey +from .misc import all_ints +from .packet import MPInt, String, SSHPacket +from .public_key import SSHKey, SSHOpenSSHCertificateV01, KeyExportError +from .public_key import register_public_key_alg, register_certificate_alg +from .public_key import register_x509_certificate_alg + + +_PrivateKeyArgs = Tuple[int, int, int, int, int] +_PublicKeyArgs = Tuple[int, int, int, int] + + +class _DSAKey(SSHKey): + """Handler for DSA public key encryption""" + + _key: Union[DSAPrivateKey, DSAPublicKey] + + algorithm = b'ssh-dss' + default_x509_hash = 'sha256' + pem_name = b'DSA' + pkcs8_oid = ObjectIdentifier('1.2.840.10040.4.1') + sig_algorithms = (algorithm,) + x509_algorithms = (b'x509v3-' + algorithm,) + all_sig_algorithms = set(sig_algorithms) + + def __eq__(self, other: object) -> bool: + # This isn't protected access - both objects are _DSAKey instances + # pylint: disable=protected-access + + return (isinstance(other, type(self)) and + self._key.p == other._key.p and + self._key.q == other._key.q and + self._key.g == other._key.g and + self._key.y == other._key.y and + self._key.x == other._key.x) + + def __hash__(self) -> int: + return hash((self._key.p, self._key.q, self._key.g, + self._key.y, self._key.x)) + + @classmethod + def generate(cls, algorithm: bytes) -> '_DSAKey': # type: ignore + """Generate a new DSA private key""" + + # pylint: disable=arguments-differ,unused-argument + + return cls(DSAPrivateKey.generate(key_size=1024)) + + @classmethod + def make_private(cls, key_params: object) -> SSHKey: + """Construct a DSA private key""" + + p, q, g, y, x = cast(_PrivateKeyArgs, key_params) + + return cls(DSAPrivateKey.construct(p, q, g, y, x)) + + @classmethod + def make_public(cls, key_params: object) -> SSHKey: + """Construct a DSA public key""" + + p, q, g, y = cast(_PublicKeyArgs, key_params) + + return cls(DSAPublicKey.construct(p, q, g, y)) + + @classmethod + def decode_pkcs1_private(cls, key_data: object) -> \ + Optional[_PrivateKeyArgs]: + """Decode a PKCS#1 format DSA private key""" + + if (isinstance(key_data, tuple) and len(key_data) == 6 and + all_ints(key_data) and key_data[0] == 0): + return cast(_PrivateKeyArgs, key_data[1:]) + else: + return None + + @classmethod + def decode_pkcs1_public(cls, key_data: object) -> \ + Optional[_PublicKeyArgs]: + """Decode a PKCS#1 format DSA public key""" + + if (isinstance(key_data, tuple) and len(key_data) == 4 and + all_ints(key_data)): + y, p, q, g = key_data + return p, q, g, y + else: + return None + + @classmethod + def decode_pkcs8_private(cls, alg_params: object, + data: bytes) -> Optional[_PrivateKeyArgs]: + """Decode a PKCS#8 format DSA private key""" + + try: + x = der_decode(data) + except ASN1DecodeError: + return None + + if (isinstance(alg_params, tuple) and len(alg_params) == 3 and + all_ints(alg_params) and isinstance(x, int)): + p, q, g = alg_params + y: int = pow(g, x, p) + return p, q, g, y, x + else: + return None + + @classmethod + def decode_pkcs8_public(cls, alg_params: object, + data: bytes) -> Optional[_PublicKeyArgs]: + """Decode a PKCS#8 format DSA public key""" + + try: + y = der_decode(data) + except ASN1DecodeError: + return None + + if (isinstance(alg_params, tuple) and len(alg_params) == 3 and + all_ints(alg_params) and isinstance(y, int)): + p, q, g = alg_params + return p, q, g, y + else: + return None + + @classmethod + def decode_ssh_private(cls, packet: SSHPacket) -> _PrivateKeyArgs: + """Decode an SSH format DSA private key""" + + p = packet.get_mpint() + q = packet.get_mpint() + g = packet.get_mpint() + y = packet.get_mpint() + x = packet.get_mpint() + + return p, q, g, y, x + + @classmethod + def decode_ssh_public(cls, packet: SSHPacket) -> _PublicKeyArgs: + """Decode an SSH format DSA public key""" + + p = packet.get_mpint() + q = packet.get_mpint() + g = packet.get_mpint() + y = packet.get_mpint() + + return p, q, g, y + + def encode_pkcs1_private(self) -> object: + """Encode a PKCS#1 format DSA private key""" + + if not self._key.x: + raise KeyExportError('Key is not private') + + return (0, self._key.p, self._key.q, self._key.g, + self._key.y, self._key.x) + + def encode_pkcs1_public(self) -> object: + """Encode a PKCS#1 format DSA public key""" + + return (self._key.y, self._key.p, self._key.q, self._key.g) + + def encode_pkcs8_private(self) -> Tuple[object, object]: + """Encode a PKCS#8 format DSA private key""" + + if not self._key.x: + raise KeyExportError('Key is not private') + + return (self._key.p, self._key.q, self._key.g), der_encode(self._key.x) + + def encode_pkcs8_public(self) -> Tuple[object, object]: + """Encode a PKCS#8 format DSA public key""" + + return (self._key.p, self._key.q, self._key.g), der_encode(self._key.y) + + def encode_ssh_private(self) -> bytes: + """Encode an SSH format DSA private key""" + + if not self._key.x: + raise KeyExportError('Key is not private') + + return b''.join((MPInt(self._key.p), MPInt(self._key.q), + MPInt(self._key.g), MPInt(self._key.y), + MPInt(self._key.x))) + + def encode_ssh_public(self) -> bytes: + """Encode an SSH format DSA public key""" + + return b''.join((MPInt(self._key.p), MPInt(self._key.q), + MPInt(self._key.g), MPInt(self._key.y))) + + def encode_agent_cert_private(self) -> bytes: + """Encode DSA certificate private key data for agent""" + + if not self._key.x: + raise KeyExportError('Key is not private') + + return MPInt(self._key.x) + + def sign_ssh(self, data: bytes, sig_algorithm: bytes) -> bytes: + """Compute an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + if not self._key.x: + raise ValueError('Private key needed for signing') + + sig = der_decode(self._key.sign(data, 'sha1')) + r, s = cast(Tuple[int, int], sig) + return String(r.to_bytes(20, 'big') + s.to_bytes(20, 'big')) + + def verify_ssh(self, data: bytes, sig_algorithm: bytes, + packet: SSHPacket) -> bool: + """Verify an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + sig = packet.get_string() + packet.check_end() + + if len(sig) != 40: + return False + + r = int.from_bytes(sig[:20], 'big') + s = int.from_bytes(sig[20:], 'big') + + return self._key.verify(data, der_encode((r, s)), 'sha1') + + +register_public_key_alg(b'ssh-dss', _DSAKey, False) + +register_certificate_alg(1, b'ssh-dss', b'ssh-dss-cert-v01@openssh.com', + _DSAKey, SSHOpenSSHCertificateV01, False) + +for alg in _DSAKey.x509_algorithms: + register_x509_certificate_alg(alg, False) diff --git a/venv/lib/python3.12/site-packages/asyncssh/ecdsa.py b/venv/lib/python3.12/site-packages/asyncssh/ecdsa.py new file mode 100644 index 0000000..57d8d82 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/ecdsa.py @@ -0,0 +1,341 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""ECDSA public key encryption handler""" + +from typing import Dict, Optional, Tuple, Union, cast + +from .asn1 import ASN1DecodeError, BitString, ObjectIdentifier, TaggedDERObject +from .asn1 import der_encode, der_decode +from .crypto import CryptoKey, ECDSAPrivateKey, ECDSAPublicKey +from .crypto import lookup_ec_curve_by_params +from .packet import MPInt, String, SSHPacket +from .public_key import SSHKey, SSHOpenSSHCertificateV01 +from .public_key import KeyImportError, KeyExportError +from .public_key import register_public_key_alg, register_certificate_alg +from .public_key import register_x509_certificate_alg + + +_PrivateKeyArgs = Tuple[bytes, Union[bytes, int], bytes] +_PublicKeyArgs = Tuple[bytes, bytes] + + +# OID for EC prime fields +PRIME_FIELD = ObjectIdentifier('1.2.840.10045.1.1') + +_hash_algs = {b'1.3.132.0.10': 'sha256', + b'nistp256': 'sha256', + b'nistp384': 'sha384', + b'nistp521': 'sha512'} + +_alg_oids: Dict[bytes, ObjectIdentifier] = {} +_alg_oid_map: Dict[ObjectIdentifier, bytes] = {} + + +class _ECKey(SSHKey): + """Handler for elliptic curve public key encryption""" + + _key: Union[ECDSAPrivateKey, ECDSAPublicKey] + + default_x509_hash = 'sha256' + pem_name = b'EC' + pkcs8_oid = ObjectIdentifier('1.2.840.10045.2.1') + + def __init__(self, key: CryptoKey): + super().__init__(key) + + self.algorithm = b'ecdsa-sha2-' + self._key.curve_id + self.sig_algorithms = (self.algorithm,) + self.x509_algorithms = (b'x509v3-' + self.algorithm,) + self.all_sig_algorithms = set(self.sig_algorithms) + + self._alg_oid = _alg_oids[self._key.curve_id] + self._hash_alg = _hash_algs[self._key.curve_id] + + def __eq__(self, other: object) -> bool: + # This isn't protected access - both objects are _ECKey instances + # pylint: disable=protected-access + + return (isinstance(other, type(self)) and + self._key.curve_id == other._key.curve_id and + self._key.x == other._key.x and + self._key.y == other._key.y and + self._key.d == other._key.d) + + def __hash__(self) -> int: + return hash((self._key.curve_id, self._key.x, + self._key.y, self._key.d)) + + @classmethod + def _lookup_curve(cls, alg_params: object) -> bytes: + """Look up an EC curve matching the specified parameters""" + + if isinstance(alg_params, ObjectIdentifier): + try: + curve_id = _alg_oid_map[alg_params] + except KeyError: + raise KeyImportError('Unknown elliptic curve OID %s' % + alg_params) from None + elif (isinstance(alg_params, tuple) and len(alg_params) >= 5 and + alg_params[0] == 1 and isinstance(alg_params[1], tuple) and + len(alg_params[1]) == 2 and alg_params[1][0] == PRIME_FIELD and + isinstance(alg_params[2], tuple) and len(alg_params[2]) >= 2 and + isinstance(alg_params[3], bytes) and + isinstance(alg_params[2][0], bytes) and + isinstance(alg_params[2][1], bytes) and + isinstance(alg_params[4], int)): + p = alg_params[1][1] + a = int.from_bytes(alg_params[2][0], 'big') + b = int.from_bytes(alg_params[2][1], 'big') + point = alg_params[3] + n = alg_params[4] + + try: + curve_id = lookup_ec_curve_by_params(p, a, b, point, n) + except ValueError as exc: + raise KeyImportError(str(exc)) from None + else: + raise KeyImportError('Invalid EC curve parameters') + + return curve_id + + @classmethod + def generate(cls, algorithm: bytes) -> '_ECKey': # type: ignore + """Generate a new EC private key""" + + # pylint: disable=arguments-differ + + # Strip 'ecdsa-sha2-' prefix of algorithm to get curve_id + return cls(ECDSAPrivateKey.generate(algorithm[11:])) + + @classmethod + def make_private(cls, key_params: object) -> SSHKey: + """Construct an EC private key""" + + curve_id, private_value, public_value = \ + cast(_PrivateKeyArgs, key_params) + + if isinstance(private_value, bytes): + private_value = int.from_bytes(private_value, 'big') + + return cls(ECDSAPrivateKey.construct(curve_id, public_value, + private_value)) + + @classmethod + def make_public(cls, key_params: object) -> SSHKey: + """Construct an EC public key""" + + curve_id, public_value = cast(_PublicKeyArgs, key_params) + + return cls(ECDSAPublicKey.construct(curve_id, public_value)) + + @classmethod + def decode_pkcs1_private(cls, key_data: object) -> \ + Optional[_PrivateKeyArgs]: + """Decode a PKCS#1 format EC private key""" + + if (isinstance(key_data, tuple) and len(key_data) > 2 and + key_data[0] == 1 and isinstance(key_data[1], bytes) and + isinstance(key_data[2], TaggedDERObject) and + key_data[2].tag == 0): + alg_params = key_data[2].value + private_key = key_data[1] + + if (len(key_data) > 3 and + isinstance(key_data[3], TaggedDERObject) and + key_data[3].tag == 1 and + isinstance(key_data[3].value, BitString) and + key_data[3].value.unused == 0): + public_key: bytes = key_data[3].value.value + else: + public_key = b'' + + return cls._lookup_curve(alg_params), private_key, public_key + else: + return None + + @classmethod + def decode_pkcs1_public(cls, key_data: object) -> \ + Optional[_PublicKeyArgs]: + """Decode a PKCS#1 format EC public key""" + + # pylint: disable=unused-argument + + raise KeyImportError('PKCS#1 not supported for EC public keys') + + @classmethod + def decode_pkcs8_private(cls, alg_params: object, + data: bytes) -> Optional[_PrivateKeyArgs]: + """Decode a PKCS#8 format EC private key""" + + try: + key_data = der_decode(data) + except ASN1DecodeError: + key_data = None + + if (isinstance(key_data, tuple) and len(key_data) > 1 and + key_data[0] == 1 and isinstance(key_data[1], bytes)): + private_key = key_data[1] + + if (len(key_data) > 2 and + isinstance(key_data[2], TaggedDERObject) and + key_data[2].tag == 1 and + isinstance(key_data[2].value, BitString) and + key_data[2].value.unused == 0): + public_key = key_data[2].value.value + else: + public_key = b'' + + return cls._lookup_curve(alg_params), private_key, public_key + else: + return None + + @classmethod + def decode_pkcs8_public(cls, alg_params: object, + data: bytes) -> Optional[_PublicKeyArgs]: + """Decode a PKCS#8 format EC public key""" + + if isinstance(alg_params, ObjectIdentifier): + return cls._lookup_curve(alg_params), data + else: + return None + + @classmethod + def decode_ssh_private(cls, packet: SSHPacket) -> _PrivateKeyArgs: + """Decode an SSH format EC private key""" + + curve_id = packet.get_string() + public_key = packet.get_string() + private_key = packet.get_mpint() + + return curve_id, private_key, public_key + + @classmethod + def decode_ssh_public(cls, packet: SSHPacket) -> _PublicKeyArgs: + """Decode an SSH format EC public key""" + + curve_id = packet.get_string() + public_key = packet.get_string() + + return curve_id, public_key + + def encode_public_tagged(self) -> object: + """Encode an EC public key blob as a tagged bitstring""" + + return TaggedDERObject(1, BitString(self._key.public_value)) + + def encode_pkcs1_private(self) -> object: + """Encode a PKCS#1 format EC private key""" + + if not self._key.private_value: + raise KeyExportError('Key is not private') + + return (1, self._key.private_value, + TaggedDERObject(0, self._alg_oid), + self.encode_public_tagged()) + + def encode_pkcs1_public(self) -> object: + """Encode a PKCS#1 format EC public key""" + + raise KeyExportError('PKCS#1 is not supported for EC public keys') + + def encode_pkcs8_private(self) -> Tuple[object, object]: + """Encode a PKCS#8 format EC private key""" + + if not self._key.private_value: + raise KeyExportError('Key is not private') + + return self._alg_oid, der_encode((1, self._key.private_value, + self.encode_public_tagged())) + + def encode_pkcs8_public(self) -> Tuple[object, object]: + """Encode a PKCS#8 format EC public key""" + + return self._alg_oid, self._key.public_value + + def encode_ssh_private(self) -> bytes: + """Encode an SSH format EC private key""" + + if not self._key.d: + raise KeyExportError('Key is not private') + + return b''.join((String(self._key.curve_id), + String(self._key.public_value), + MPInt(self._key.d))) + + def encode_ssh_public(self) -> bytes: + """Encode an SSH format EC public key""" + + return b''.join((String(self._key.curve_id), + String(self._key.public_value))) + + def encode_agent_cert_private(self) -> bytes: + """Encode ECDSA certificate private key data for agent""" + + if not self._key.d: + raise KeyExportError('Key is not private') + + return MPInt(self._key.d) + + def sign_ssh(self, data: bytes, sig_algorithm: bytes) -> bytes: + """Compute an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + if not self._key.private_value: + raise ValueError('Private key needed for signing') + + sig = der_decode(self._key.sign(data, self._hash_alg)) + r, s = cast(Tuple[int, int], sig) + return String(MPInt(r) + MPInt(s)) + + def verify_ssh(self, data: bytes, sig_algorithm: bytes, + packet: SSHPacket) -> bool: + """Verify an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + sig = packet.get_string() + packet.check_end() + + packet = SSHPacket(sig) + r = packet.get_mpint() + s = packet.get_mpint() + packet.check_end() + + return self._key.verify(data, der_encode((r, s)), self._hash_alg) + + +for _curve_id, _oid_str in ((b'nistp521', '1.3.132.0.35'), + (b'nistp384', '1.3.132.0.34'), + (b'nistp256', '1.2.840.10045.3.1.7'), + (b'1.3.132.0.10', '1.3.132.0.10')): + _algorithm = b'ecdsa-sha2-' + _curve_id + _cert_algorithm = _algorithm + b'-cert-v01@openssh.com' + _x509_algorithm = b'x509v3-' + _algorithm + + _oid = ObjectIdentifier(_oid_str) + _alg_oids[_curve_id] = _oid + _alg_oid_map[_oid] = _curve_id + + register_public_key_alg(_algorithm, _ECKey, True, (_algorithm,)) + register_certificate_alg(1, _algorithm, _cert_algorithm, + _ECKey, SSHOpenSSHCertificateV01, True) + register_x509_certificate_alg(_x509_algorithm, True) diff --git a/venv/lib/python3.12/site-packages/asyncssh/eddsa.py b/venv/lib/python3.12/site-packages/asyncssh/eddsa.py new file mode 100644 index 0000000..c97c33a --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/eddsa.py @@ -0,0 +1,220 @@ +# Copyright (c) 2019-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""EdDSA public key encryption handler""" + +from typing import Optional, Tuple, Union, cast + +from .asn1 import ASN1DecodeError, ObjectIdentifier, der_encode, der_decode +from .crypto import EdDSAPrivateKey, EdDSAPublicKey +from .crypto import ed25519_available, ed448_available +from .packet import String, SSHPacket +from .public_key import OMIT, SSHKey, SSHOpenSSHCertificateV01 +from .public_key import KeyImportError, KeyExportError +from .public_key import register_public_key_alg, register_certificate_alg +from .public_key import register_x509_certificate_alg + + +_PrivateKeyArgs = Tuple[bytes] +_PublicKeyArgs = Tuple[bytes] + + +class _EdKey(SSHKey): + """Handler for EdDSA public key encryption""" + + _key: Union[EdDSAPrivateKey, EdDSAPublicKey] + + algorithm = b'' + + def __eq__(self, other: object) -> bool: + # This isn't protected access - both objects are _EdKey instances + # pylint: disable=protected-access + + return (isinstance(other, type(self)) and + self._key.public_value == other._key.public_value and + self._key.private_value == other._key.private_value) + + def __hash__(self) -> int: + return hash((self._key.public_value, self._key.private_value)) + + @classmethod + def generate(cls, algorithm: bytes) -> '_EdKey': # type: ignore + """Generate a new EdDSA private key""" + + # pylint: disable=arguments-differ + + # Strip 'ssh-' prefix of algorithm to get curve_id + return cls(EdDSAPrivateKey.generate(algorithm[4:])) + + @classmethod + def make_private(cls, key_params: object) -> SSHKey: + """Construct an EdDSA private key""" + + try: + private_value, = cast(_PrivateKeyArgs, key_params) + + return cls(EdDSAPrivateKey.construct(cls.algorithm[4:], + private_value)) + except (TypeError, ValueError): + raise KeyImportError('Invalid EdDSA private key') from None + + @classmethod + def make_public(cls, key_params: object) -> SSHKey: + """Construct an EdDSA public key""" + + try: + public_value, = cast(_PublicKeyArgs, key_params) + + return cls(EdDSAPublicKey.construct(cls.algorithm[4:], + public_value)) + except (TypeError, ValueError): + raise KeyImportError('Invalid EdDSA public key') from None + + @classmethod + def decode_pkcs8_private(cls, alg_params: object, + data: bytes) -> Optional[_PrivateKeyArgs]: + """Decode a PKCS#8 format EdDSA private key""" + + # pylint: disable=unused-argument + + try: + return (cast(bytes, der_decode(data)),) + except ASN1DecodeError: + return None + + @classmethod + def decode_pkcs8_public(cls, alg_params: object, + data: bytes) -> Optional[_PublicKeyArgs]: + """Decode a PKCS#8 format EdDSA public key""" + + # pylint: disable=unused-argument + + return (data,) + + @classmethod + def decode_ssh_private(cls, packet: SSHPacket) -> _PrivateKeyArgs: + """Decode an SSH format EdDSA private key""" + + public_value = packet.get_string() + private_value = packet.get_string() + + return (private_value[:-len(public_value)],) + + @classmethod + def decode_ssh_public(cls, packet: SSHPacket) -> _PublicKeyArgs: + """Decode an SSH format EdDSA public key""" + + public_value = packet.get_string() + + return (public_value,) + + def encode_pkcs8_private(self) -> Tuple[object, object]: + """Encode a PKCS#8 format EdDSA private key""" + + if not self._key.private_value: + raise KeyExportError('Key is not private') + + return OMIT, der_encode(self._key.private_value) + + def encode_pkcs8_public(self) -> Tuple[object, object]: + """Encode a PKCS#8 format EdDSA public key""" + + return OMIT, self._key.public_value + + def encode_ssh_private(self) -> bytes: + """Encode an SSH format EdDSA private key""" + + if self._key.private_value is None: + raise KeyExportError('Key is not private') + + return b''.join((String(self._key.public_value), + String(self._key.private_value + + self._key.public_value))) + + def encode_ssh_public(self) -> bytes: + """Encode an SSH format EdDSA public key""" + + return String(self._key.public_value) + + def encode_agent_cert_private(self) -> bytes: + """Encode EdDSA certificate private key data for agent""" + + return self.encode_ssh_private() + + def sign_ssh(self, data: bytes, sig_algorithm: bytes) -> bytes: + """Compute an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + if not self._key.private_value: + raise ValueError('Private key needed for signing') + + return String(self._key.sign(data)) + + def verify_ssh(self, data: bytes, sig_algorithm: bytes, + packet: SSHPacket) -> bool: + """Verify an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + sig = packet.get_string() + packet.check_end() + + return self._key.verify(data, sig) + + +class _Ed25519Key(_EdKey): + """Handler for Curve25519 public key encryption""" + + algorithm = b'ssh-ed25519' + pkcs8_oid = ObjectIdentifier('1.3.101.112') + sig_algorithms = (algorithm,) + x509_algorithms = (b'x509v3-' + algorithm,) + all_sig_algorithms = set(sig_algorithms) + + +class _Ed448Key(_EdKey): + """Handler for Curve448 public key encryption""" + + algorithm = b'ssh-ed448' + pkcs8_oid = ObjectIdentifier('1.3.101.113') + sig_algorithms = (algorithm,) + x509_algorithms = (b'x509v3-' + algorithm,) + all_sig_algorithms = set(sig_algorithms) + + +if ed25519_available: # pragma: no branch + register_public_key_alg(b'ssh-ed25519', _Ed25519Key, True) + + register_certificate_alg(1, b'ssh-ed25519', + b'ssh-ed25519-cert-v01@openssh.com', + _Ed25519Key, SSHOpenSSHCertificateV01, True) + + for alg in _Ed25519Key.x509_algorithms: + register_x509_certificate_alg(alg, True) + +if ed448_available: # pragma: no branch + register_public_key_alg(b'ssh-ed448', _Ed448Key, True) + + register_certificate_alg(1, b'ssh-ed448', b'ssh-ed448-cert-v01@openssh.com', + _Ed448Key, SSHOpenSSHCertificateV01, True) + + for alg in _Ed448Key.x509_algorithms: + register_x509_certificate_alg(alg, True) diff --git a/venv/lib/python3.12/site-packages/asyncssh/editor.py b/venv/lib/python3.12/site-packages/asyncssh/editor.py new file mode 100644 index 0000000..174cae2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/editor.py @@ -0,0 +1,959 @@ +# Copyright (c) 2016-2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Input line editor""" + +import re + +from functools import partial +from typing import TYPE_CHECKING, Callable, Dict, List +from typing import Optional, Set, Tuple, Union, cast +from unicodedata import east_asian_width + +from .session import DataType + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .channel import SSHServerChannel + from .session import SSHServerSession + + +_CharDict = Dict[str, object] +_CharHandler = Callable[['SSHLineEditor'], None] +_KeyHandler = Callable[[str, int], Union[bool, Tuple[str, int]]] + + +_DEFAULT_WIDTH = 80 + +_ansi_terminals = ('ansi', 'cygwin', 'linux', 'putty', 'screen', 'teraterm', + 'cit80', 'vt100', 'vt102', 'vt220', 'vt320', 'xterm', + 'xterm-color', 'xterm-16color', 'xterm-256color', 'rxvt', + 'rxvt-color') + + +def _is_wide(ch: str) -> bool: + """Return display width of character""" + + return east_asian_width(ch) in 'WF' + + +class SSHLineEditor: + """Input line editor""" + + def __init__(self, chan: 'SSHServerChannel[str]', + session: 'SSHServerSession[str]', line_echo: bool, + history_size: int, max_line_length: int, term_type: str, + width: int): + self._chan = chan + self._session = session + self._line_echo = line_echo + self._line_pending = False + self._history_size = history_size if history_size > 0 else 0 + self._max_line_length = max_line_length + self._wrap = term_type in _ansi_terminals + self._width = width or _DEFAULT_WIDTH + self._line_mode = True + self._echo = True + self._start_column = 0 + self._end_column = 0 + self._cursor = 0 + self._left_pos = 0 + self._right_pos = 0 + self._pos = 0 + self._line = '' + self._bell_rung = False + self._early_wrap: Set[int] = set() + self._outbuf: List[str] = [] + self._keymap: _CharDict = {} + self._key_state = self._keymap + self._erased = '' + self._history: List[str] = [] + self._history_index = 0 + + for func, keys in self._keylist: + for key in keys: + self._add_key(key, func) + + self._build_printable() + + def _add_key(self, key: str, func: _CharHandler) -> None: + """Add a key to the keymap""" + + keymap = self._keymap + + for ch in key[:-1]: + if ch not in keymap: + keymap[ch] = {} + + keymap = cast(_CharDict, keymap[ch]) + + keymap[key[-1]] = func + + def _del_key(self, key: str) -> None: + """Delete a key from the keymap""" + + keymap = self._keymap + + for ch in key[:-1]: + if ch not in keymap: + return + + keymap = cast(_CharDict, keymap[ch]) + + keymap.pop(key[-1], None) + + def _build_printable(self) -> None: + """Build a regex of printable ASCII non-registered keys""" + + def _escape(c: int) -> str: + """Backslash escape special characters in regex character range""" + + ch = chr(c) + return ('\\' if (ch in '-&|[]\\^~') else '') + ch + + def _is_printable(ch: str) -> bool: + """Return if character is printable and has no handler""" + + return ch.isprintable() and ch not in keys + + pat: List[str] = [] + keys = self._keymap.keys() + start = ord(' ') + limit = 0x10000 + + while start < limit: + while start < limit and not _is_printable(chr(start)): + start += 1 + + end = start + + while _is_printable(chr(end)): + end += 1 + + pat.append(_escape(start)) + + if start != end - 1: + pat.append('-' + _escape(end - 1)) + + start = end + 1 + + self._printable = re.compile('[' + ''.join(pat) + ']*') + + def _char_width(self, pos: int) -> int: + """Return width of character at specified position""" + + return 1 + _is_wide(self._line[pos]) + ((pos + 1) in self._early_wrap) + + def _determine_column(self, data: str, column: int, + pos: Optional[int] = None) -> Tuple[str, int]: + """Determine new output column after output occurs""" + + escaped = False + offset = pos + last_wrap_pos = pos + wrapped_data = [] + + for ch in data: + if ch == '\b': + column -= 1 + elif ch == '\x1b': + escaped = True + elif escaped: + if ch == 'm': + escaped = False + else: + if _is_wide(ch) and (column % self._width) == self._width - 1: + column += 1 + + if pos is not None: + assert last_wrap_pos is not None + assert offset is not None + + wrapped_data.append(data[last_wrap_pos - offset: + pos - offset]) + last_wrap_pos = pos + + self._early_wrap.add(pos) + else: + if pos is not None: + self._early_wrap.discard(pos) + + column += 1 + _is_wide(ch) + + if pos is not None: + pos += 1 + + if pos is not None: + assert last_wrap_pos is not None + assert offset is not None + + wrapped_data.append(data[last_wrap_pos - offset:]) + return ' '.join(wrapped_data), column + else: + return data, column + + def _output(self, data: str, pos: Optional[int] = None) -> None: + """Generate output and calculate new output column""" + + idx = data.rfind('\n') + + if idx >= 0: + self._outbuf.append(data[:idx+1]) + tail = data[idx+1:] + self._cursor = 0 + else: + tail = data + + data, self._cursor = self._determine_column(tail, self._cursor, pos) + + self._outbuf.append(data) + + if self._cursor and self._cursor % self._width == 0: + self._outbuf.append(' \b') + + def _ring_bell(self) -> None: + """Ring the terminal bell""" + + if not self._bell_rung: + self._outbuf.append('\a') + self._bell_rung = True + + def _update_input_window(self, new_pos: int) -> int: + """Update visible input window when not wrapping onto multiple lines""" + + line_len = len(self._line) + + if new_pos < self._left_pos: + self._left_pos = new_pos + else: + if new_pos < line_len: + new_pos += 1 + + pos = self._pos + column = self._cursor + + while pos < new_pos: + column += self._char_width(pos) + pos += 1 + + if column >= self._width: + while column >= self._width: + column -= self._char_width(self._left_pos) + self._left_pos += 1 + else: + while self._left_pos > 0: + column += self._char_width(self._left_pos) + + if column < self._width: + self._left_pos -= 1 + else: + break + + column = self._start_column + self._right_pos = self._left_pos + + while self._right_pos < line_len: + ch_width = self._char_width(self._right_pos) + + if column + ch_width < self._width: + self._right_pos += 1 + column += ch_width + else: + break + + return column + + def _move_cursor(self, column: int) -> None: + """Move the cursor to selected position in input line""" + + start_row = self._cursor // self._width + start_col = self._cursor % self._width + + end_row = column // self._width + end_col = column % self._width + + if end_row < start_row: + self._outbuf.append('\x1b[' + str(start_row-end_row) + 'A') + elif end_row > start_row: + self._outbuf.append('\x1b[' + str(end_row-start_row) + 'B') + + if end_col > start_col: + self._outbuf.append('\x1b[' + str(end_col-start_col) + 'C') + elif end_col < start_col: + self._outbuf.append('\x1b[' + str(start_col-end_col) + 'D') + + self._cursor = column + + def _move_back(self, column: int) -> None: + """Move the cursor backward to selected position in input line""" + + if self._wrap: + self._move_cursor(column) + else: + self._outbuf.append('\b' * (self._cursor - column)) + self._cursor = column + + def _clear_to_end(self) -> None: + """Clear any remaining characters from previous input line""" + + column = self._cursor + remaining = self._end_column - column + + if remaining > 0: + self._outbuf.append(' ' * remaining) + self._cursor = self._end_column + + if self._cursor % self._width == 0: + self._outbuf.append(' \b') + + self._move_back(column) + self._end_column = column + + def _erase_input(self) -> None: + """Erase current input line""" + + self._move_cursor(self._start_column) + self._clear_to_end() + self._early_wrap.clear() + + def _draw_input(self) -> None: + """Draw current input line""" + + if self._line and self._echo: + if self._wrap: + self._output(self._line[:self._pos], 0) + column = self._cursor + self._output(self._line[self._pos:], self._pos) + else: + self._update_input_window(self._pos) + self._output(self._line[self._left_pos:self._pos]) + column = self._cursor + self._output(self._line[self._pos:self._right_pos]) + + self._end_column = self._cursor + self._move_back(column) + + def _reposition(self, new_pos: int, new_column: int) -> None: + """Reposition the cursor to selected position in input""" + + if self._echo: + if self._wrap: + self._move_cursor(new_column) + else: + self._update_input(self._pos, self._cursor, new_pos) + + self._pos = new_pos + + def _update_input(self, pos: int, column: int, new_pos: int) -> None: + """Update selected portion of current input line""" + + if self._echo: + if self._wrap: + if pos in self._early_wrap: + column -= 1 + + self._move_cursor(column) + prev_wrap = new_pos in self._early_wrap + self._output(self._line[pos:new_pos], pos) + column = self._cursor + self._output(self._line[new_pos:], new_pos) + column += (new_pos in self._early_wrap) - prev_wrap + else: + self._update_input_window(new_pos) + self._move_back(self._start_column) + self._output(self._line[self._left_pos:new_pos]) + column = self._cursor + self._output(self._line[new_pos:self._right_pos]) + + self._clear_to_end() + self._move_back(column) + + self._pos = new_pos + + def _reset_line(self) -> None: + """Reset input line to empty""" + + self._line = '' + self._left_pos = 0 + self._right_pos = 0 + self._pos = 0 + self._start_column = self._cursor + self._end_column = self._cursor + + def _reset_pending(self) -> None: + """Reset a pending echoed line if any""" + + if self._line_pending: + self._erase_input() + self._reset_line() + self._line_pending = False + + def _insert_printable(self, data: str) -> None: + """Insert data into the input line""" + + line_len = len(self._line) + data_len = len(data) + + if self._max_line_length: + if line_len + data_len > self._max_line_length: + self._ring_bell() + data_len = self._max_line_length - line_len + data = data[:data_len] + + if data: + pos = self._pos + new_pos = pos + data_len + self._line = self._line[:pos] + data + self._line[pos:] + + self._update_input(pos, self._cursor, new_pos) + + def _end_line(self) -> None: + """End the current input line and send it to the session""" + + line = self._line + + need_wrap = (self._echo and not self._wrap and + (self._left_pos > 0 or self._right_pos < len(line))) + + if self._line_echo or need_wrap: + if need_wrap: + self._output('\b' * (self._cursor - self._start_column) + line) + else: + self._move_to_end() + + self._output('\r\n') + self._reset_line() + else: + self._move_to_end() + self._line_pending = True + + if self._echo and self._history_size and line: + self._history.append(line) + self._history = self._history[-self._history_size:] + + self._history_index = len(self._history) + + self._session.data_received(line + '\n', None) + + def _eof_or_delete(self) -> None: + """Erase character to the right, or send EOF if input line is empty""" + + if not self._line: + self._session.soft_eof_received() + else: + self._erase_right() + + def _erase_left(self) -> None: + """Erase character to the left""" + + if self._pos > 0: + pos = self._pos - 1 + column = self._cursor - self._char_width(pos) + self._line = self._line[:pos] + self._line[pos+1:] + self._update_input(pos, column, pos) + else: + self._ring_bell() + + def _erase_right(self) -> None: + """Erase character to the right""" + + if self._pos < len(self._line): + pos = self._pos + self._line = self._line[:pos] + self._line[pos+1:] + self._update_input(pos, self._cursor, pos) + else: + self._ring_bell() + + def _erase_line(self) -> None: + """Erase entire input line""" + + self._erased = self._line + self._line = '' + self._update_input(0, self._start_column, 0) + + def _erase_to_end(self) -> None: + """Erase to end of input line""" + + pos = self._pos + self._erased = self._line[pos:] + self._line = self._line[:pos] + self._update_input(pos, self._cursor, pos) + + def _handle_key(self, key: str, handler: _KeyHandler) -> None: + """Call an external key handler""" + + result = handler(self._line, self._pos) + + if result is True: + if key.isprintable(): + self._insert_printable(key) + else: + self._ring_bell() + elif result is False: + self._ring_bell() + else: + line, new_pos = cast(Tuple[str, int], result) + + if new_pos < 0: + self._session.signal_received(line) + else: + self._line = line + self._update_input(0, self._start_column, new_pos) + + def _history_prev(self) -> None: + """Replace input with previous line in history""" + + if self._history_index > 0: + self._history_index -= 1 + self._line = self._history[self._history_index] + self._update_input(0, self._start_column, len(self._line)) + else: + self._ring_bell() + + def _history_next(self) -> None: + """Replace input with next line in history""" + + if self._history_index < len(self._history): + self._history_index += 1 + + if self._history_index < len(self._history): + self._line = self._history[self._history_index] + else: + self._line = '' + + self._update_input(0, self._start_column, len(self._line)) + else: + self._ring_bell() + + def _move_left(self) -> None: + """Move left in input line""" + + if self._pos > 0: + pos = self._pos - 1 + column = self._cursor - self._char_width(pos) + self._reposition(pos, column) + else: + self._ring_bell() + + def _move_right(self) -> None: + """Move right in input line""" + + if self._pos < len(self._line): + pos = self._pos + column = self._cursor + self._char_width(pos) + self._reposition(pos + 1, column) + else: + self._ring_bell() + + def _move_to_start(self) -> None: + """Move to start of input line""" + + self._reposition(0, self._start_column) + + def _move_to_end(self) -> None: + """Move to end of input line""" + + self._reposition(len(self._line), self._end_column) + + def _redraw(self) -> None: + """Redraw input line""" + + self._erase_input() + self._draw_input() + + def _insert_erased(self) -> None: + """Insert previously erased input""" + + self._insert_printable(self._erased) + + def _send_break(self) -> None: + """Send break to session""" + + self._session.break_received(0) + + _keylist = ((_end_line, ('\n', '\r', '\x1bOM')), + (_eof_or_delete, ('\x04',)), + (_erase_left, ('\x08', '\x7f')), + (_erase_right, ('\x1b[3~',)), + (_erase_line, ('\x15',)), + (_erase_to_end, ('\x0b',)), + (_history_prev, ('\x10', '\x1b[A', '\x1bOA')), + (_history_next, ('\x0e', '\x1b[B', '\x1bOB')), + (_move_left, ('\x02', '\x1b[D', '\x1bOD')), + (_move_right, ('\x06', '\x1b[C', '\x1bOC')), + (_move_to_start, ('\x01', '\x1b[H', '\x1b[1~')), + (_move_to_end, ('\x05', '\x1b[F', '\x1b[4~')), + (_redraw, ('\x12',)), + (_insert_erased, ('\x19',)), + (_send_break, ('\x03', '\x1b[33~'))) + + def register_key(self, key: str, handler: _KeyHandler) -> None: + """Register a handler to be called when a key is pressed""" + + self._add_key(key, partial(SSHLineEditor._handle_key, + key=key, handler=handler)) + self._build_printable() + + def unregister_key(self, key: str) -> None: + """Remove the handler associated with a key""" + + self._del_key(key) + self._build_printable() + + def set_input(self, line: str, pos: int) -> None: + """Set input line and cursor position""" + + self._reset_pending() + + self._line = line + self._update_input(0, self._start_column, pos) + + def set_line_mode(self, line_mode: bool) -> None: + """Enable/disable input line editing""" + + self._reset_pending() + + if self._line and not line_mode: + data = self._line + self._erase_input() + self._line = '' + + self._session.data_received(data, None) + + self._line_mode = line_mode + + def set_echo(self, echo: bool) -> None: + """Enable/disable echoing of input in line mode""" + + self._reset_pending() + + if self._echo and not echo: + self._erase_input() + self._echo = False + elif echo and not self._echo: + self._echo = True + self._draw_input() + + def set_width(self, width: int) -> None: + """Set terminal line width""" + + self._reset_pending() + + self._width = width or _DEFAULT_WIDTH + + if self._wrap: + _, self._cursor = self._determine_column(self._line, + self._start_column, 0) + + self._redraw() + + def process_input(self, data: str, datatype: DataType) -> None: + """Process input from channel""" + + if self._line_mode: + data_len = len(data) + idx = 0 + + while idx < data_len: + self._reset_pending() + + ch = data[idx] + idx += 1 + + if ch in self._key_state: + key_state = self._key_state[ch] + + if callable(key_state): + try: + cast(_CharHandler, key_state)(self) + finally: + self._key_state = self._keymap + else: + self._key_state = cast(_CharDict, key_state) + elif self._key_state == self._keymap and ch.isprintable(): + match = self._printable.match(data, idx - 1) + + assert match is not None + match = match[0] + + if match: + self._insert_printable(match) + idx += len(match) - 1 + else: + self._insert_printable(ch) + else: + self._key_state = self._keymap + self._ring_bell() + + self._bell_rung = False + + if self._outbuf: + self._chan.write(''.join(self._outbuf)) + self._outbuf.clear() + else: + self._session.data_received(data, datatype) + + def process_output(self, data: str) -> None: + """Process output to channel""" + + if self._line_pending: + if data.startswith(self._line): + self._start_column = self._cursor + data = data[len(self._line):] + else: + self._erase_input() + + self._reset_line() + self._line_pending = False + + data = data.replace('\n', '\r\n') + + self._erase_input() + self._output(data) + + if not self._wrap: + self._cursor %= self._width + + self._start_column = self._cursor + self._end_column = self._cursor + self._draw_input() + + self._chan.write(''.join(self._outbuf)) + self._outbuf.clear() + + +class SSHLineEditorChannel: + """Input line editor channel wrapper + + When creating server channels with `line_editor` set to `True`, + this class is wrapped around the channel, providing the caller with + the ability to enable and disable input line editing and echoing. + + .. note:: Line editing is only available when a pseudo-terminal + is requested on the server channel and the character + encoding on the channel is not set to `None`. + + """ + + def __init__(self, orig_chan: 'SSHServerChannel[str]', + orig_session: 'SSHServerSession[str]', line_echo: bool, + history_size: int, max_line_length: int): + self._orig_chan = orig_chan + self._orig_session = orig_session + self._line_echo = line_echo + self._history_size = history_size + self._max_line_length = max_line_length + self._editor: Optional[SSHLineEditor] = None + + def __getattr__(self, attr: str): + """Delegate most channel functions to original channel""" + + return getattr(self._orig_chan, attr) + + def create_editor(self) -> Optional[SSHLineEditor]: + """Create input line editor if encoding and terminal type are set""" + + encoding, _ = self._orig_chan.get_encoding() + term_type = self._orig_chan.get_terminal_type() + width = self._orig_chan.get_terminal_size()[0] + + if encoding and term_type: + self._editor = SSHLineEditor( + self._orig_chan, self._orig_session, self._line_echo, + self._history_size, self._max_line_length, term_type, width) + + return self._editor + + def register_key(self, key: str, handler: _KeyHandler) -> None: + """Register a handler to be called when a key is pressed + + This method registers a handler function which will be called + when a user presses the specified key while inputting a line. + + The handler will be called with arguments of the current + input line and cursor position, and updated versions of these + two values should be returned as a tuple. + + The handler can also return a tuple of a signal name and + negative cursor position to cause a signal to be delivered + on the channel. In this case, the current input line is left + unchanged but the signal is delivered before processing any + additional input. This can be used to define "hot keys" that + trigger actions unrelated to editing the input. + + If the registered key is printable text, returning `True` will + insert that text at the current cursor position, acting as if + no handler was registered for that key. This is useful if you + want to perform a special action in some cases but not others, + such as based on the current cursor position. + + Returning `False` will ring the bell and leave the input + unchanged, indicating the requested action could not be + performed. + + :param key: + The key sequence to look for + :param handler: + The handler function to call when the key is pressed + :type key: `str` + :type handler: `callable` + + """ + + assert self._editor is not None + self._editor.register_key(key, handler) + + def unregister_key(self, key: str) -> None: + """Remove the handler associated with a key + + This method removes a handler function associated with + the specified key. If the key sequence is printable, + this will cause it to return to being inserted at the + current position when pressed. Otherwise, it will cause + the bell to ring to signal the key is not understood. + + :param key: + The key sequence to look for + :type key: `str` + + """ + + assert self._editor is not None + self._editor.unregister_key(key) + + def clear_input(self) -> None: + """Clear input line + + This method clears the current input line. + + """ + + assert self._editor is not None + self._editor.set_input('', 0) + + def set_input(self, line: str, pos: int) -> None: + """Clear input line + + This method sets the current input line and cursor position. + + :param line: + The new input line + :param pos: + The new cursor position within the input line + :type line: `str` + :type pos: `int` + + """ + + assert self._editor is not None + self._editor.set_input(line, pos) + + def set_line_mode(self, line_mode: bool) -> None: + """Enable/disable input line editing + + This method enabled or disables input line editing. When set, + only full lines of input are sent to the session, and each + line of input can be edited before it is sent. + + :param line_mode: + Whether or not to process input a line at a time + :type line_mode: `bool` + + """ + + self._orig_chan.logger.info('%s line editor', + 'Enabling' if line_mode else 'Disabling') + + assert self._editor is not None + self._editor.set_line_mode(line_mode) + + def set_echo(self, echo: bool) -> None: + """Enable/disable echoing of input in line mode + + This method enables or disables echoing of input data when + input line editing is enabled. + + :param echo: + Whether or not input to echo input as it is entered + :type echo: `bool` + + """ + + self._orig_chan.logger.info('%s echo', + 'Enabling' if echo else 'Disabling') + + assert self._editor is not None + self._editor.set_echo(echo) + + def write(self, data: str, datatype: DataType = None) -> None: + """Process data written to the channel""" + + if self._editor and datatype is None: + self._editor.process_output(data) + else: + self._orig_chan.write(data, datatype) + + +class SSHLineEditorSession: + """Input line editor session wrapper""" + + def __init__(self, chan: SSHLineEditorChannel, + orig_session: 'SSHServerSession[str]'): + self._chan = chan + self._orig_session = orig_session + self._editor: Optional[SSHLineEditor] = None + + def __getattr__(self, attr: str): + """Delegate most channel functions to original session""" + + return getattr(self._orig_session, attr) + + def session_started(self) -> None: + """Start a session for this newly opened server channel""" + + self._editor = self._chan.create_editor() + self._orig_session.session_started() + + def terminal_size_changed(self, width: int, height: int, + pixwidth: int, pixheight: int) -> None: + """The terminal size has changed""" + + if self._editor: + self._editor.set_width(width) + + self._orig_session.terminal_size_changed(width, height, + pixwidth, pixheight) + + def data_received(self, data: str, datatype: DataType) -> None: + """Process data received from the channel""" + + if self._editor: + self._editor.process_input(data, datatype) + else: + self._orig_session.data_received(data, datatype) + + def eof_received(self) -> Optional[bool]: + """Process EOF received from the channel""" + + if self._editor: + self._editor.set_line_mode(False) + + return self._orig_session.eof_received() diff --git a/venv/lib/python3.12/site-packages/asyncssh/encryption.py b/venv/lib/python3.12/site-packages/asyncssh/encryption.py new file mode 100644 index 0000000..feb9840 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/encryption.py @@ -0,0 +1,318 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Symmetric key encryption handlers""" + +from typing import Dict, List, Optional, Tuple, Type + +from .crypto import BasicCipher, GCMCipher, ChachaCipher, get_cipher_params +from .mac import MAC, get_mac_params, get_mac +from .packet import UInt64 + + +_EncParams = Tuple[int, int, int, int, int, bool] +_EncParamsMap = Dict[bytes, Tuple[Type['Encryption'], str]] + + +_enc_algs: List[bytes] = [] +_default_enc_algs: List[bytes] = [] +_enc_params: _EncParamsMap = {} + + +class Encryption: + """Parent class for SSH packet encryption objects""" + + @classmethod + def new(cls, cipher_name: str, key: bytes, iv: bytes, mac_alg: bytes = b'', + mac_key: bytes = b'', etm: bool = False) -> 'Encryption': + """Construct a new SSH packet encryption object""" + + raise NotImplementedError + + @classmethod + def get_mac_params(cls, mac_alg: bytes) -> Tuple[int, int, bool]: + """Get parameters of the MAC algorithm used with this encryption""" + + return get_mac_params(mac_alg) + + def encrypt_packet(self, seq: int, header: bytes, + packet: bytes) -> Tuple[bytes, bytes]: + """Encrypt and sign an SSH packet""" + + raise NotImplementedError + + def decrypt_header(self, seq: int, first_block: bytes, + header_len: int) -> Tuple[bytes, bytes]: + """Decrypt an SSH packet header""" + + raise NotImplementedError + + def decrypt_packet(self, seq: int, first: bytes, rest: bytes, + header_len: int, mac: bytes) -> Optional[bytes]: + """Verify the signature of and decrypt an SSH packet""" + + raise NotImplementedError + + +class BasicEncryption(Encryption): + """Shim for basic encryption""" + + def __init__(self, cipher: BasicCipher, mac: MAC): + self._cipher = cipher + self._mac = mac + + @classmethod + def new(cls, cipher_name: str, key: bytes, iv: bytes, mac_alg: bytes = b'', + mac_key: bytes = b'', etm: bool = False) -> 'BasicEncryption': + """Construct a new SSH packet encryption object for basic ciphers""" + + cipher = BasicCipher(cipher_name, key, iv) + mac = get_mac(mac_alg, mac_key) + + if etm: + return ETMEncryption(cipher, mac) + else: + return cls(cipher, mac) + + def encrypt_packet(self, seq: int, header: bytes, + packet: bytes) -> Tuple[bytes, bytes]: + """Encrypt and sign an SSH packet""" + + packet = header + packet + mac = self._mac.sign(seq, packet) if self._mac else b'' + + return self._cipher.encrypt(packet), mac + + def decrypt_header(self, seq: int, first_block: bytes, + header_len: int) -> Tuple[bytes, bytes]: + """Decrypt an SSH packet header""" + + first_block = self._cipher.decrypt(first_block) + + return first_block, first_block[:header_len] + + def decrypt_packet(self, seq: int, first: bytes, rest: bytes, + header_len: int, mac: bytes) -> Optional[bytes]: + """Verify the signature of and decrypt an SSH packet""" + + packet = first + self._cipher.decrypt(rest) + + if self._mac.verify(seq, packet, mac): + return packet[header_len:] + else: + return None + + +class ETMEncryption(BasicEncryption): + """Shim for encrypt-then-mac encryption""" + + def encrypt_packet(self, seq: int, header: bytes, + packet: bytes) -> Tuple[bytes, bytes]: + """Encrypt and sign an SSH packet""" + + packet = header + self._cipher.encrypt(packet) + return packet, self._mac.sign(seq, packet) + + def decrypt_header(self, seq: int, first_block: bytes, + header_len: int) -> Tuple[bytes, bytes]: + """Decrypt an SSH packet header""" + + return first_block, first_block[:header_len] + + def decrypt_packet(self, seq: int, first: bytes, rest: bytes, + header_len: int, mac: bytes) -> Optional[bytes]: + """Verify the signature of and decrypt an SSH packet""" + + packet = first + rest + + if self._mac.verify(seq, packet, mac): + return self._cipher.decrypt(packet[header_len:]) + else: + return None + + +class GCMEncryption(Encryption): + """Shim for GCM encryption""" + + def __init__(self, cipher: GCMCipher): + self._cipher = cipher + + @classmethod + def new(cls, cipher_name: str, key: bytes, iv: bytes, mac_alg: bytes = b'', + mac_key: bytes = b'', etm: bool = False) -> 'GCMEncryption': + """Construct a new SSH packet encryption object for GCM ciphers""" + + return cls(GCMCipher(cipher_name, key, iv)) + + @classmethod + def get_mac_params(cls, mac_alg: bytes) -> Tuple[int, int, bool]: + """Get parameters of the MAC algorithm used with this encryption""" + + return 0, 16, True + + def encrypt_packet(self, seq: int, header: bytes, + packet: bytes) -> Tuple[bytes, bytes]: + """Encrypt and sign an SSH packet""" + + return self._cipher.encrypt_and_sign(header, packet) + + def decrypt_header(self, seq: int, first_block: bytes, + header_len: int) -> Tuple[bytes, bytes]: + """Decrypt an SSH packet header""" + + return first_block, first_block[:header_len] + + def decrypt_packet(self, seq: int, first: bytes, rest: bytes, + header_len: int, mac: bytes) -> Optional[bytes]: + """Verify the signature of and decrypt an SSH packet""" + + return self._cipher.verify_and_decrypt(first[:header_len], + first[header_len:] + rest, mac) + + +class ChachaEncryption(Encryption): + """Shim for chacha20-poly1305 encryption""" + + def __init__(self, cipher: ChachaCipher): + self._cipher = cipher + + @classmethod + def new(cls, cipher_name: str, key: bytes, iv: bytes, mac_alg: bytes = b'', + mac_key: bytes = b'', etm: bool = False) -> 'ChachaEncryption': + """Construct a new SSH packet encryption object for Chacha ciphers""" + + return cls(ChachaCipher(key)) + + @classmethod + def get_mac_params(cls, mac_alg: bytes) -> Tuple[int, int, bool]: + """Get parameters of the MAC algorithm used with this encryption""" + + return 0, 16, True + + def encrypt_packet(self, seq: int, header: bytes, + packet: bytes) -> Tuple[bytes, bytes]: + """Encrypt and sign an SSH packet""" + + return self._cipher.encrypt_and_sign(header, packet, UInt64(seq)) + + def decrypt_header(self, seq: int, first_block: bytes, + header_len: int) -> Tuple[bytes, bytes]: + """Decrypt an SSH packet header""" + + return (first_block, + self._cipher.decrypt_header(first_block[:header_len], + UInt64(seq))) + + def decrypt_packet(self, seq: int, first: bytes, rest: bytes, + header_len: int, mac: bytes) -> Optional[bytes]: + """Verify the signature of and decrypt an SSH packet""" + + return self._cipher.verify_and_decrypt(first[:header_len], + first[header_len:] + rest, + UInt64(seq), mac) + + +def register_encryption_alg(enc_alg: bytes, encryption: Type[Encryption], + cipher_name: str, default: bool) -> None: + """Register an encryption algorithm""" + + try: + get_cipher_params(cipher_name) + except KeyError: + pass + else: + _enc_algs.append(enc_alg) + + if default: + _default_enc_algs.append(enc_alg) + + _enc_params[enc_alg] = (encryption, cipher_name) + + +def get_encryption_algs() -> List[bytes]: + """Return supported encryption algorithms""" + + return _enc_algs + + +def get_default_encryption_algs() -> List[bytes]: + """Return default encryption algorithms""" + + return _default_enc_algs + + +def get_encryption_params(enc_alg: bytes, + mac_alg: bytes = b'') -> _EncParams: + """Get parameters of an encryption and MAC algorithm""" + + encryption, cipher_name = _enc_params[enc_alg] + enc_keysize, enc_ivsize, enc_blocksize = get_cipher_params(cipher_name) + mac_keysize, mac_hashsize, etm = encryption.get_mac_params(mac_alg) + + return (enc_keysize, enc_ivsize, enc_blocksize, + mac_keysize, mac_hashsize, etm) + + +def get_encryption(enc_alg: bytes, key: bytes, iv: bytes, mac_alg: bytes = b'', + mac_key: bytes = b'', etm: bool = False) -> Encryption: + """Return an object which can encrypt and decrypt SSH packets""" + + encryption, cipher_name = _enc_params[enc_alg] + + return encryption.new(cipher_name, key, iv, mac_alg, mac_key, etm) + + +_enc_alg_list = ( + (b'chacha20-poly1305@openssh.com', ChachaEncryption, + 'chacha20-poly1305', True), + (b'aes256-gcm@openssh.com', GCMEncryption, + 'aes256-gcm', True), + (b'aes128-gcm@openssh.com', GCMEncryption, + 'aes128-gcm', True), + (b'aes256-ctr', BasicEncryption, + 'aes256-ctr', True), + (b'aes192-ctr', BasicEncryption, + 'aes192-ctr', True), + (b'aes128-ctr', BasicEncryption, + 'aes128-ctr', True), + (b'aes256-cbc', BasicEncryption, + 'aes256-cbc', False), + (b'aes192-cbc', BasicEncryption, + 'aes192-cbc', False), + (b'aes128-cbc', BasicEncryption, + 'aes128-cbc', False), + (b'3des-cbc', BasicEncryption, + 'des3-cbc', False), + (b'blowfish-cbc', BasicEncryption, + 'blowfish-cbc', False), + (b'cast128-cbc', BasicEncryption, + 'cast128-cbc', False), + (b'seed-cbc@ssh.com', BasicEncryption, + 'seed-cbc', False), + (b'arcfour256', BasicEncryption, + 'arcfour256', False), + (b'arcfour128', BasicEncryption, + 'arcfour128', False), + (b'arcfour', BasicEncryption, + 'arcfour', False) +) + +for _enc_alg_args in _enc_alg_list: + register_encryption_alg(*_enc_alg_args) diff --git a/venv/lib/python3.12/site-packages/asyncssh/forward.py b/venv/lib/python3.12/site-packages/asyncssh/forward.py new file mode 100644 index 0000000..b3896c0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/forward.py @@ -0,0 +1,216 @@ +# Copyright (c) 2013-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH port forwarding handlers""" + +import asyncio +import socket +from typing import TYPE_CHECKING, Awaitable, Callable, Optional, cast + +from .misc import ChannelOpenError, SockAddr + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .connection import SSHConnection + + +SSHForwarderCoro = Callable[..., Awaitable] + + +class SSHForwarder(asyncio.BaseProtocol): + """SSH port forwarding connection handler""" + + def __init__(self, peer: Optional['SSHForwarder'] = None): + self._peer = peer + self._transport: Optional[asyncio.Transport] = None + self._inpbuf = b'' + self._eof_received = False + + if peer: + peer.set_peer(self) + + def set_peer(self, peer: 'SSHForwarder') -> None: + """Set the peer forwarder to exchange data with""" + + self._peer = peer + + def write(self, data: bytes) -> None: + """Write data to the transport""" + + assert self._transport is not None + self._transport.write(data) + + def write_eof(self) -> None: + """Write end of file to the transport""" + + assert self._transport is not None + + try: + self._transport.write_eof() + except OSError: # pragma: no cover + pass + + def was_eof_received(self) -> bool: + """Return whether end of file has been received or not""" + + return self._eof_received + + def pause_reading(self) -> None: + """Pause reading from the transport""" + + assert self._transport is not None + self._transport.pause_reading() + + def resume_reading(self) -> None: + """Resume reading on the transport""" + + assert self._transport is not None + self._transport.resume_reading() + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Handle a newly opened connection""" + + self._transport = cast(Optional['asyncio.Transport'], transport) + + sock = cast(socket.socket, transport.get_extra_info('socket')) + + if sock and sock.family in {socket.AF_INET, socket.AF_INET6}: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Handle an incoming connection close""" + + # pylint: disable=unused-argument + + self.close() + + def session_started(self) -> None: + """Handle session start""" + + def data_received(self, data: bytes, + datatype: Optional[int] = None) -> None: + """Handle incoming data from the transport""" + + # pylint: disable=unused-argument + + if self._peer: + self._peer.write(data) + else: + self._inpbuf += data + + def eof_received(self) -> bool: + """Handle an incoming end of file from the transport""" + + self._eof_received = True + + if self._peer: + self._peer.write_eof() + + return not self._peer.was_eof_received() + else: + return True + + def pause_writing(self) -> None: + """Pause writing by asking peer to pause reading""" + + if self._peer: # pragma: no branch + self._peer.pause_reading() + + def resume_writing(self) -> None: + """Resume writing by asking peer to resume reading""" + + if self._peer: # pragma: no branch + self._peer.resume_reading() + + def close(self) -> None: + """Close this port forwarder""" + + if self._transport: + self._transport.close() + self._transport = None + + if self._peer: + peer = self._peer + self._peer = None + peer.close() + + +class SSHLocalForwarder(SSHForwarder): + """Local forwarding connection handler""" + + def __init__(self, conn: 'SSHConnection', coro: SSHForwarderCoro): + super().__init__() + self._conn = conn + self._coro = coro + + async def _forward(self, *args: object) -> None: + """Begin local forwarding""" + + def session_factory() -> SSHForwarder: + """Return an SSH forwarder""" + + return SSHForwarder(self) + + try: + await self._coro(session_factory, *args) + except ChannelOpenError as exc: + self.connection_lost(exc) + return + + assert self._peer is not None + + if self._inpbuf: + self._peer.write(self._inpbuf) + self._inpbuf = b'' + + if self._eof_received: + self._peer.write_eof() + + def forward(self, *args: object) -> None: + """Start a task to begin local forwarding""" + + self._conn.create_task(self._forward(*args)) + + +class SSHLocalPortForwarder(SSHLocalForwarder): + """Local TCP port forwarding connection handler""" + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Handle a newly opened connection""" + + super().connection_made(transport) + + peername = cast(SockAddr, transport.get_extra_info('peername')) + + if peername: # pragma: no branch + orig_host, orig_port = peername[:2] + + self.forward(orig_host, orig_port) + + +class SSHLocalPathForwarder(SSHLocalForwarder): + """Local UNIX domain socket forwarding connection handler""" + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Handle a newly opened connection""" + + super().connection_made(transport) + self.forward() diff --git a/venv/lib/python3.12/site-packages/asyncssh/gss.py b/venv/lib/python3.12/site-packages/asyncssh/gss.py new file mode 100644 index 0000000..86e92e1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/gss.py @@ -0,0 +1,63 @@ +# Copyright (c) 2017-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""GSSAPI wrapper""" + +import sys + +from typing import Optional + +try: + # pylint: disable=unused-import + + if sys.platform == 'win32': # pragma: no cover + from .gss_win32 import GSSBase, GSSClient, GSSServer, GSSError + else: + from .gss_unix import GSSBase, GSSClient, GSSServer, GSSError + + gss_available = True +except ImportError: # pragma: no cover + gss_available = False + + class GSSError(ValueError): # type: ignore + """Stub class for reporting that GSS is not available""" + + def __init__(self, maj_code: int, min_code: int, + token: Optional[bytes] = None): + super().__init__('GSS not available') + + self.maj_code = maj_code + self.min_code = min_code + self.token = token + + class GSSBase: # type: ignore + """Base class for reporting that GSS is not available""" + + class GSSClient(GSSBase): # type: ignore + """Stub client class for reporting that GSS is not available""" + + def __init__(self, _host: str, _delegate_creds: bool): + raise GSSError(0, 0) + + class GSSServer(GSSBase): # type: ignore + """Stub client class for reporting that GSS is not available""" + + def __init__(self, _host: str): + raise GSSError(0, 0) diff --git a/venv/lib/python3.12/site-packages/asyncssh/gss_unix.py b/venv/lib/python3.12/site-packages/asyncssh/gss_unix.py new file mode 100644 index 0000000..66d3c96 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/gss_unix.py @@ -0,0 +1,180 @@ +# Copyright (c) 2017-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""GSSAPI wrapper for UNIX""" + +from typing import Optional, Sequence, SupportsBytes, cast + +from gssapi import Credentials, Name, NameType, OID +from gssapi import RequirementFlag, SecurityContext +from gssapi.exceptions import GSSError + +from .asn1 import OBJECT_IDENTIFIER + + +def _mech_to_oid(mech: OID) -> bytes: + """Return a DER-encoded OID corresponding to the requested GSS mechanism""" + + mech_bytes = bytes(cast(SupportsBytes, mech)) + return bytes((OBJECT_IDENTIFIER, len(mech_bytes))) + mech_bytes + + +class GSSBase: + """GSS base class""" + + def __init__(self, host: str): + if '@' in host: + self._host = Name(host) + else: + self._host = Name('host@' + host, NameType.hostbased_service) + + self._mechs = [_mech_to_oid(mech) for mech in self._creds.mechs] + self._ctx: Optional[SecurityContext] = None + + @property + def _creds(self) -> Credentials: + """Abstract method to construct GSS credentials""" + + raise NotImplementedError + + def _init_context(self) -> None: + """Abstract method to construct GSS security context""" + + raise NotImplementedError + + @property + def mechs(self) -> Sequence[bytes]: + """Return GSS mechanisms available for this host""" + + return self._mechs + + @property + def complete(self) -> bool: + """Return whether or not GSS negotiation is complete""" + + return self._ctx.complete if self._ctx else False + + @property + def provides_mutual_auth(self) -> bool: + """Return whether or not this context provides mutual authentication""" + + assert self._ctx is not None + + return bool(self._ctx.actual_flags & + RequirementFlag.mutual_authentication) + + @property + def provides_integrity(self) -> bool: + """Return whether or not this context provides integrity protection""" + + assert self._ctx is not None + + return bool(self._ctx.actual_flags & RequirementFlag.integrity) + + @property + def user(self) -> str: + """Return user principal associated with this context""" + + assert self._ctx is not None + + return str(self._ctx.initiator_name) + + @property + def host(self) -> str: + """Return host principal associated with this context""" + + assert self._ctx is not None + + return str(self._ctx.target_name) + + def reset(self) -> None: + """Reset GSS security context""" + + self._ctx = None + + def step(self, token: Optional[bytes] = None) -> Optional[bytes]: + """Perform next step in GSS security exchange""" + + if not self._ctx: + self._init_context() + + assert self._ctx is not None + + return self._ctx.step(token) + + def sign(self, data: bytes) -> bytes: + """Sign a block of data""" + + assert self._ctx is not None + + return self._ctx.get_signature(data) + + def verify(self, data: bytes, sig: bytes) -> bool: + """Verify a signature for a block of data""" + + assert self._ctx is not None + + try: + self._ctx.verify_signature(data, sig) + return True + except GSSError: + return False + + +class GSSClient(GSSBase): + """GSS client""" + + def __init__(self, host: str, delegate_creds: bool): + super().__init__(host) + + flags = RequirementFlag.mutual_authentication | \ + RequirementFlag.integrity + + if delegate_creds: + flags |= RequirementFlag.delegate_to_peer + + self._flags = flags + + @property + def _creds(self) -> Credentials: + """Abstract method to construct GSS credentials""" + + return Credentials(usage='initiate') + + def _init_context(self) -> None: + """Construct GSS client security context""" + + self._ctx = SecurityContext(name=self._host, creds=self._creds, + flags=self._flags) + + +class GSSServer(GSSBase): + """GSS server""" + + @property + def _creds(self) -> Credentials: + """Abstract method to construct GSS credentials""" + + return Credentials(name=self._host, usage='accept') + + def _init_context(self) -> None: + """Construct GSS server security context""" + + self._ctx = SecurityContext(creds=self._creds) diff --git a/venv/lib/python3.12/site-packages/asyncssh/gss_win32.py b/venv/lib/python3.12/site-packages/asyncssh/gss_win32.py new file mode 100644 index 0000000..8c68ba4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/gss_win32.py @@ -0,0 +1,202 @@ +# Copyright (c) 2017-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""GSSAPI wrapper for Windows""" + +# Some of the imports below won't be found when running pylint on UNIX +# pylint: disable=import-error + +from typing import Optional, Sequence, Union + +from sspi import ClientAuth, ServerAuth +from sspi import error as SSPIError + +from sspicon import ISC_REQ_DELEGATE, ISC_REQ_INTEGRITY, ISC_REQ_MUTUAL_AUTH +from sspicon import ISC_RET_INTEGRITY, ISC_RET_MUTUAL_AUTH +from sspicon import ASC_REQ_INTEGRITY, ASC_REQ_MUTUAL_AUTH +from sspicon import ASC_RET_INTEGRITY, ASC_RET_MUTUAL_AUTH +from sspicon import SECPKG_ATTR_NATIVE_NAMES + +from .asn1 import ObjectIdentifier, der_encode + + +_krb5_oid = der_encode(ObjectIdentifier('1.2.840.113554.1.2.2')) + + +class GSSBase: + """GSS base class""" + + # Overridden in child classes + _mutual_auth_flag = 0 + _integrity_flag = 0 + + def __init__(self, host: str): + if '@' in host: + self._host = host + else: + self._host = 'host/' + host + + self._ctx: Optional[Union[ClientAuth, ServerAuth]] = None + self._init_token: Optional[bytes] = None + + @property + def mechs(self) -> Sequence[bytes]: + """Return GSS mechanisms available for this host""" + + return [_krb5_oid] + + @property + def complete(self) -> bool: + """Return whether or not GSS negotiation is complete""" + + assert self._ctx is not None + + return self._ctx.authenticated + + @property + def provides_mutual_auth(self) -> bool: + """Return whether or not this context provides mutual authentication""" + + assert self._ctx is not None + + return bool(self._ctx.ctxt_attr & self._mutual_auth_flag) + + @property + def provides_integrity(self) -> bool: + """Return whether or not this context provides integrity protection""" + + assert self._ctx is not None + + return bool(self._ctx.ctxt_attr & self._integrity_flag) + + @property + def user(self) -> str: + """Return user principal associated with this context""" + + assert self._ctx is not None + + names = self._ctx.ctxt.QueryContextAttributes(SECPKG_ATTR_NATIVE_NAMES) + return names[0] + + @property + def host(self) -> str: + """Return host principal associated with this context""" + + assert self._ctx is not None + + names = self._ctx.ctxt.QueryContextAttributes(SECPKG_ATTR_NATIVE_NAMES) + return names[1] + + def reset(self) -> None: + """Reset GSS security context""" + + assert self._ctx is not None + + self._ctx.reset() + self._init_token = None + + def step(self, token: Optional[bytes] = None) -> Optional[bytes]: + """Perform next step in GSS security exchange""" + + assert self._ctx is not None + + if self._init_token: + token = self._init_token + self._init_token = None + return token + + try: + _, buf = self._ctx.authorize(token) + return buf[0].Buffer + except SSPIError as exc: + raise GSSError(details=exc.strerror) from None + + def sign(self, data: bytes) -> bytes: + """Sign a block of data""" + + assert self._ctx is not None + + try: + return self._ctx.sign(data) + except SSPIError as exc: + raise GSSError(details=exc.strerror) from None + + def verify(self, data: bytes, sig: bytes) -> bool: + """Verify a signature for a block of data""" + + assert self._ctx is not None + + try: + self._ctx.verify(data, sig) + return True + except SSPIError: + return False + + +class GSSClient(GSSBase): + """GSS client""" + + _mutual_auth_flag = ISC_RET_MUTUAL_AUTH + _integrity_flag = ISC_RET_INTEGRITY + + def __init__(self, host: str, delegate_creds: bool): + super().__init__(host) + + flags = ISC_REQ_MUTUAL_AUTH | ISC_REQ_INTEGRITY + + if delegate_creds: + flags |= ISC_REQ_DELEGATE + + try: + self._ctx = ClientAuth('Kerberos', targetspn=self._host, + scflags=flags) + except SSPIError as exc: + raise GSSError(1, 1, details=exc.strerror) from None + + self._init_token = self.step(None) + + +class GSSServer(GSSBase): + """GSS server""" + + _mutual_auth_flag = ASC_RET_MUTUAL_AUTH + _integrity_flag = ASC_RET_INTEGRITY + + def __init__(self, host: str): + super().__init__(host) + + flags = ASC_REQ_MUTUAL_AUTH | ASC_REQ_INTEGRITY + + try: + self._ctx = ServerAuth('Kerberos', spn=self._host, scflags=flags) + except SSPIError as exc: + raise GSSError(1, 1, details=exc.strerror) from None + + +class GSSError(Exception): + """Class for reporting GSS errors""" + + def __init__(self, maj_code: int = 0, min_code: int = 0, + token: Optional[bytes] = None, details: str = ''): + super().__init__(details) + + self.maj_code = maj_code + self.min_code = min_code + self.token = token diff --git a/venv/lib/python3.12/site-packages/asyncssh/kex.py b/venv/lib/python3.12/site-packages/asyncssh/kex.py new file mode 100644 index 0000000..b1c14e2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/kex.py @@ -0,0 +1,159 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH key exchange handlers""" + +import binascii +from hashlib import md5 +from typing import TYPE_CHECKING, Dict, List, Sequence, Tuple, Type + +from .logging import SSHLogger +from .misc import HashType +from .packet import SSHPacketHandler + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .connection import SSHConnection + + +_KexAlgList = List[bytes] +_KexAlgMap = Dict[bytes, Tuple[Type['Kex'], HashType, object]] + + +_kex_algs: _KexAlgList = [] +_default_kex_algs:_KexAlgList = [] +_kex_handlers: _KexAlgMap = {} + +_gss_kex_algs: _KexAlgList = [] +_default_gss_kex_algs: _KexAlgList = [] +_gss_kex_handlers: _KexAlgMap = {} + + +class Kex(SSHPacketHandler): + """Parent class for key exchange handlers""" + + def __init__(self, alg: bytes, conn: 'SSHConnection', hash_alg: HashType): + self.algorithm = alg + + self._conn = conn + self._logger = conn.logger + self._hash_alg = hash_alg + + + def start(self) -> None: + """Start key exchange""" + + raise NotImplementedError + + def send_packet(self, pkttype: int, *args: bytes) -> None: + """Send a kex packet""" + + self._conn.send_packet(pkttype, *args, handler=self) + + @property + def logger(self) -> SSHLogger: + """A logger associated with this connection""" + + return self._logger + + def compute_key(self, k: bytes, h: bytes, x: bytes, + session_id: bytes, keylen: int) -> bytes: + """Compute keys from output of key exchange""" + + key = b'' + while len(key) < keylen: + hash_obj = self._hash_alg() + hash_obj.update(k) + hash_obj.update(h) + hash_obj.update(key if key else x + session_id) + key += hash_obj.digest() + + return key[:keylen] + + +def register_kex_alg(alg: bytes, handler: Type[Kex], hash_alg: HashType, + args: Tuple, default: bool) -> None: + """Register a key exchange algorithm""" + + _kex_algs.append(alg) + + if default: + _default_kex_algs.append(alg) + + _kex_handlers[alg] = (handler, hash_alg, args) + + +def register_gss_kex_alg(alg: bytes, handler: Type[Kex], hash_alg: HashType, + args: Tuple, default: bool) -> None: + """Register a GSSAPI key exchange algorithm""" + + _gss_kex_algs.append(alg) + + if default: + _default_gss_kex_algs.append(alg) + + _gss_kex_handlers[alg] = (handler, hash_alg, args) + + +def get_kex_algs() -> List[bytes]: + """Return supported key exchange algorithms""" + + return _gss_kex_algs + _kex_algs + + +def get_default_kex_algs() -> List[bytes]: + """Return default key exchange algorithms""" + + return _default_gss_kex_algs + _default_kex_algs + + +def expand_kex_algs(kex_algs: Sequence[bytes], mechs: Sequence[bytes], + host_key_available: bool) -> List[bytes]: + """Add mechanisms to GSS entries in key exchange algorithm list""" + + expanded_kex_algs: List[bytes] = [] + + for alg in kex_algs: + if alg.startswith(b'gss-'): + for mech in mechs: + suffix = b'-' + binascii.b2a_base64(md5(mech).digest())[:-1] + expanded_kex_algs.append(alg + suffix) + elif host_key_available: + expanded_kex_algs.append(alg) + + return expanded_kex_algs + + +def get_kex(conn: 'SSHConnection', alg: bytes) -> Kex: + """Return a key exchange handler + + The function looks up a key exchange algorithm and returns a + handler which can perform that type of key exchange. + + """ + + if alg.startswith(b'gss-'): + alg = alg.rsplit(b'-', 1)[0] + handler, hash_alg, args = _gss_kex_handlers[alg] + else: + handler, hash_alg, args = _kex_handlers[alg] + + return handler(alg, conn, hash_alg, *args) diff --git a/venv/lib/python3.12/site-packages/asyncssh/kex_dh.py b/venv/lib/python3.12/site-packages/asyncssh/kex_dh.py new file mode 100644 index 0000000..f0413ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/kex_dh.py @@ -0,0 +1,801 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH Diffie-Hellman, ECDH, and Edwards DH key exchange handlers""" + +from hashlib import sha1, sha224, sha256, sha384, sha512 +from typing import TYPE_CHECKING, Callable, Mapping, Optional, cast +from typing_extensions import Protocol + +from .constants import DEFAULT_LANG +from .crypto import curve25519_available, curve448_available +from .crypto import Curve25519DH, Curve448DH, DH, ECDH +from .crypto import sntrup761_available +from .crypto import sntrup761_pubkey_bytes, sntrup761_ciphertext_bytes +from .crypto import sntrup761_keypair, sntrup761_encaps, sntrup761_decaps +from .gss import GSSError +from .kex import Kex, register_kex_alg, register_gss_kex_alg +from .misc import HashType, KeyExchangeFailed, ProtocolError +from .misc import get_symbol_names +from .packet import Boolean, MPInt, String, UInt32, SSHPacket +from .public_key import SigningKey, VerifyingKey + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .connection import SSHConnection, SSHClientConnection + from .connection import SSHServerConnection + + +class DHKey(Protocol): + """Protocol for performing Diffie-Hellman key exchange""" + + def get_public(self) -> bytes: + """Return the public key to send to the peer""" + + def get_shared(self, peer_public: bytes) -> int: + """Return the shared key from the peer's public key""" + + +_ECDHClass = Callable[..., DHKey] + + +# pylint: disable=line-too-long + +# SSH KEX DH message values +MSG_KEXDH_INIT = 30 +MSG_KEXDH_REPLY = 31 + +# SSH KEX DH group exchange message values +MSG_KEX_DH_GEX_REQUEST_OLD = 30 +MSG_KEX_DH_GEX_GROUP = 31 +MSG_KEX_DH_GEX_INIT = 32 +MSG_KEX_DH_GEX_REPLY = 33 +MSG_KEX_DH_GEX_REQUEST = 34 + +# SSH KEX ECDH message values +MSG_KEX_ECDH_INIT = 30 +MSG_KEX_ECDH_REPLY = 31 + +# SSH KEXGSS message values +MSG_KEXGSS_INIT = 30 +MSG_KEXGSS_CONTINUE = 31 +MSG_KEXGSS_COMPLETE = 32 +MSG_KEXGSS_HOSTKEY = 33 +MSG_KEXGSS_ERROR = 34 +MSG_KEXGSS_GROUPREQ = 40 +MSG_KEXGSS_GROUP = 41 + +# SSH KEX group exchange key sizes +KEX_DH_GEX_MIN_SIZE = 1024 +KEX_DH_GEX_PREFERRED_SIZE = 2048 +KEX_DH_GEX_MAX_SIZE = 8192 + +# SSH Diffie-Hellman group 1 parameters +_group1_g = 2 +_group1_p = 0xffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece65381ffffffffffffffff + +# SSH Diffie-Hellman group 14 parameters +_group14_g = 2 +_group14_p = 0xffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca18217c32905e462e36ce3be39e772c180e86039b2783a2ec07a28fb5c55df06f4c52c9de2bcbf6955817183995497cea956ae515d2261898fa051015728e5a8aacaa68ffffffffffffffff + +# SSH Diffie-Hellman group 15 parameters +_group15_g = 2 +_group15_p = 0xffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca18217c32905e462e36ce3be39e772c180e86039b2783a2ec07a28fb5c55df06f4c52c9de2bcbf6955817183995497cea956ae515d2261898fa051015728e5a8aaac42dad33170d04507a33a85521abdf1cba64ecfb850458dbef0a8aea71575d060c7db3970f85a6e1e4c7abf5ae8cdb0933d71e8c94e04a25619dcee3d2261ad2ee6bf12ffa06d98a0864d87602733ec86a64521f2b18177b200cbbe117577a615d6c770988c0bad946e208e24fa074e5ab3143db5bfce0fd108e4b82d120a93ad2caffffffffffffffff + +# SSH Diffie-Hellman group 16 parameters +_group16_g = 2 +_group16_p = 0xffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca18217c32905e462e36ce3be39e772c180e86039b2783a2ec07a28fb5c55df06f4c52c9de2bcbf6955817183995497cea956ae515d2261898fa051015728e5a8aaac42dad33170d04507a33a85521abdf1cba64ecfb850458dbef0a8aea71575d060c7db3970f85a6e1e4c7abf5ae8cdb0933d71e8c94e04a25619dcee3d2261ad2ee6bf12ffa06d98a0864d87602733ec86a64521f2b18177b200cbbe117577a615d6c770988c0bad946e208e24fa074e5ab3143db5bfce0fd108e4b82d120a92108011a723c12a787e6d788719a10bdba5b2699c327186af4e23c1a946834b6150bda2583e9ca2ad44ce8dbbbc2db04de8ef92e8efc141fbecaa6287c59474e6bc05d99b2964fa090c3a2233ba186515be7ed1f612970cee2d7afb81bdd762170481cd0069127d5b05aa993b4ea988d8fddc186ffb7dc90a6c08f4df435c934063199ffffffffffffffff + +# SSH Diffie-Hellman group 17 parameters +_group17_g = 2 +_group17_p = 0xffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca18217c32905e462e36ce3be39e772c180e86039b2783a2ec07a28fb5c55df06f4c52c9de2bcbf6955817183995497cea956ae515d2261898fa051015728e5a8aaac42dad33170d04507a33a85521abdf1cba64ecfb850458dbef0a8aea71575d060c7db3970f85a6e1e4c7abf5ae8cdb0933d71e8c94e04a25619dcee3d2261ad2ee6bf12ffa06d98a0864d87602733ec86a64521f2b18177b200cbbe117577a615d6c770988c0bad946e208e24fa074e5ab3143db5bfce0fd108e4b82d120a92108011a723c12a787e6d788719a10bdba5b2699c327186af4e23c1a946834b6150bda2583e9ca2ad44ce8dbbbc2db04de8ef92e8efc141fbecaa6287c59474e6bc05d99b2964fa090c3a2233ba186515be7ed1f612970cee2d7afb81bdd762170481cd0069127d5b05aa993b4ea988d8fddc186ffb7dc90a6c08f4df435c93402849236c3fab4d27c7026c1d4dcb2602646dec9751e763dba37bdf8ff9406ad9e530ee5db382f413001aeb06a53ed9027d831179727b0865a8918da3edbebcf9b14ed44ce6cbaced4bb1bdb7f1447e6cc254b332051512bd7af426fb8f401378cd2bf5983ca01c64b92ecf032ea15d1721d03f482d7ce6e74fef6d55e702f46980c82b5a84031900b1c9e59e7c97fbec7e8f323a97a7e36cc88be0f1d45b7ff585ac54bd407b22b4154aacc8f6d7ebf48e1d814cc5ed20f8037e0a79715eef29be32806a1d58bb7c5da76f550aa3d8a1fbff0eb19ccb1a313d55cda56c9ec2ef29632387fe8d76e3c0468043e8f663f4860ee12bf2d5b0b7474d6e694f91e6dcc4024ffffffffffffffff + +# SSH Diffie-Hellman group 18 parameters +_group18_g = 2 +_group18_p = 0xffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca18217c32905e462e36ce3be39e772c180e86039b2783a2ec07a28fb5c55df06f4c52c9de2bcbf6955817183995497cea956ae515d2261898fa051015728e5a8aaac42dad33170d04507a33a85521abdf1cba64ecfb850458dbef0a8aea71575d060c7db3970f85a6e1e4c7abf5ae8cdb0933d71e8c94e04a25619dcee3d2261ad2ee6bf12ffa06d98a0864d87602733ec86a64521f2b18177b200cbbe117577a615d6c770988c0bad946e208e24fa074e5ab3143db5bfce0fd108e4b82d120a92108011a723c12a787e6d788719a10bdba5b2699c327186af4e23c1a946834b6150bda2583e9ca2ad44ce8dbbbc2db04de8ef92e8efc141fbecaa6287c59474e6bc05d99b2964fa090c3a2233ba186515be7ed1f612970cee2d7afb81bdd762170481cd0069127d5b05aa993b4ea988d8fddc186ffb7dc90a6c08f4df435c93402849236c3fab4d27c7026c1d4dcb2602646dec9751e763dba37bdf8ff9406ad9e530ee5db382f413001aeb06a53ed9027d831179727b0865a8918da3edbebcf9b14ed44ce6cbaced4bb1bdb7f1447e6cc254b332051512bd7af426fb8f401378cd2bf5983ca01c64b92ecf032ea15d1721d03f482d7ce6e74fef6d55e702f46980c82b5a84031900b1c9e59e7c97fbec7e8f323a97a7e36cc88be0f1d45b7ff585ac54bd407b22b4154aacc8f6d7ebf48e1d814cc5ed20f8037e0a79715eef29be32806a1d58bb7c5da76f550aa3d8a1fbff0eb19ccb1a313d55cda56c9ec2ef29632387fe8d76e3c0468043e8f663f4860ee12bf2d5b0b7474d6e694f91e6dbe115974a3926f12fee5e438777cb6a932df8cd8bec4d073b931ba3bc832b68d9dd300741fa7bf8afc47ed2576f6936ba424663aab639c5ae4f5683423b4742bf1c978238f16cbe39d652de3fdb8befc848ad922222e04a4037c0713eb57a81a23f0c73473fc646cea306b4bcbc8862f8385ddfa9d4b7fa2c087e879683303ed5bdd3a062b3cf5b3a278a66d2a13f83f44f82ddf310ee074ab6a364597e899a0255dc164f31cc50846851df9ab48195ded7ea1b1d510bd7ee74d73faf36bc31ecfa268359046f4eb879f924009438b481c6cd7889a002ed5ee382bc9190da6fc026e479558e4475677e9aa9e3050e2765694dfc81f56e880b96e7160c980dd98edd3dfffffffffffffffff + +_dh_gex_groups = ((1024, _group1_g, _group1_p), + (2048, _group14_g, _group14_p), + (3072, _group15_g, _group15_p), + (4096, _group16_g, _group16_p), + (6144, _group17_g, _group17_p), + (8192, _group18_g, _group18_p)) + +# pylint: enable=line-too-long + + +class _KexDHBase(Kex): + """Abstract base class for Diffie-Hellman key exchange""" + + _init_type: int = 0 + _reply_type: int = 0 + + def __init__(self, alg: bytes, conn: 'SSHConnection', hash_alg: HashType): + super().__init__(alg, conn, hash_alg) + + self._dh: Optional[DH] = None + self._g = 0 + self._p = 0 + self._e = 0 + self._f = 0 + self._gex_data = b'' + + def _init_group(self, g: int, p: int) -> None: + """Initialize DH group parameters""" + + self._g = g + self._p = p + + def _compute_hash(self, host_key_data: bytes, k: bytes) -> bytes: + """Compute a hash of key information associated with the connection""" + + hash_obj = self._hash_alg() + hash_obj.update(self._conn.get_hash_prefix()) + hash_obj.update(String(host_key_data)) + hash_obj.update(self._gex_data) + hash_obj.update(self._format_client_key()) + hash_obj.update(self._format_server_key()) + hash_obj.update(k) + return hash_obj.digest() + + def _parse_client_key(self, packet: SSHPacket) -> None: + """Parse a DH client key""" + + if not self._p: + raise ProtocolError('Kex DH p not specified') + + self._e = packet.get_mpint() + + def _parse_server_key(self, packet: SSHPacket) -> None: + """Parse a DH server key""" + + if not self._p: + raise ProtocolError('Kex DH p not specified') + + self._f = packet.get_mpint() + + def _format_client_key(self) -> bytes: + """Format a DH client key""" + + return MPInt(self._e) + + def _format_server_key(self) -> bytes: + """Format a DH server key""" + + return MPInt(self._f) + + def _send_init(self) -> None: + """Send a DH init message""" + + self.send_packet(self._init_type, self._format_client_key()) + + def _send_reply(self, key_data: bytes, sig: bytes) -> None: + """Send a DH reply message""" + + self.send_packet(self._reply_type, String(key_data), + self._format_server_key(), String(sig)) + + def _perform_init(self) -> None: + """Compute e and send init message""" + + self._dh = DH(self._g, self._p) + self._e = self._dh.get_public() + + self._send_init() + + def _compute_client_shared(self) -> bytes: + """Compute client shared key""" + + if not 1 <= self._f < self._p: + raise ProtocolError('Kex DH f out of range') + + assert self._dh is not None + return MPInt(self._dh.get_shared(self._f)) + + def _compute_server_shared(self) -> bytes: + """Compute server shared key""" + + if not 1 <= self._e < self._p: + raise ProtocolError('Kex DH e out of range') + + self._dh = DH(self._g, self._p) + self._f = self._dh.get_public() + + return MPInt(self._dh.get_shared(self._e)) + + def _perform_reply(self, key: SigningKey, key_data: bytes) -> None: + """Compute server shared key and send reply message""" + + k = self._compute_server_shared() + h = self._compute_hash(key_data, k) + self._send_reply(key_data, key.sign(h)) + + self._conn.send_newkeys(k, h) + + def _verify_reply(self, key: VerifyingKey, key_data: bytes, + sig: bytes) -> None: + """Verify a DH reply message""" + + k = self._compute_client_shared() + h = self._compute_hash(key_data, k) + + if not key.verify(h, sig): + raise KeyExchangeFailed('Key exchange hash mismatch') + + self._conn.send_newkeys(k, h) + + def _process_init(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a DH init message""" + + if self._conn.is_client(): + raise ProtocolError('Unexpected kex init msg') + + self._parse_client_key(packet) + packet.check_end() + + server_conn = cast('SSHServerConnection', self._conn) + host_key = server_conn.get_server_host_key() + assert host_key is not None + + self._perform_reply(host_key, host_key.public_data) + + def _process_reply(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a DH reply message""" + + if self._conn.is_server(): + raise ProtocolError('Unexpected kex reply msg') + + host_key_data = packet.get_string() + self._parse_server_key(packet) + sig = packet.get_string() + packet.check_end() + + client_conn = cast('SSHClientConnection', self._conn) + host_key = client_conn.validate_server_host_key(host_key_data) + self._verify_reply(host_key, host_key_data, sig) + + def start(self) -> None: + """Start DH key exchange""" + + if self._conn.is_client(): + self._perform_init() + + +class _KexDH(_KexDHBase): + """Handler for Diffie-Hellman key exchange""" + + _handler_names = get_symbol_names(globals(), 'MSG_KEXDH_') + + _init_type = MSG_KEXDH_INIT + _reply_type = MSG_KEXDH_REPLY + + def __init__(self, alg: bytes, conn: 'SSHConnection', + hash_alg: HashType, g: int, p: int): + super().__init__(alg, conn, hash_alg) + + self._init_group(g, p) + + _packet_handlers: Mapping[int, Callable]= { + MSG_KEXDH_INIT: _KexDHBase._process_init, + MSG_KEXDH_REPLY: _KexDHBase._process_reply + } + + +class _KexDHGex(_KexDHBase): + """Handler for Diffie-Hellman group exchange""" + + _handler_names = get_symbol_names(globals(), 'MSG_KEX_DH_GEX_') + + _init_type = MSG_KEX_DH_GEX_INIT + _reply_type = MSG_KEX_DH_GEX_REPLY + _request_type = MSG_KEX_DH_GEX_REQUEST + _group_type = MSG_KEX_DH_GEX_GROUP + + def __init__(self, alg: bytes, conn: 'SSHConnection', hash_alg: HashType, + preferred_size: Optional[int] = None, + max_size: Optional[int] = None): + super().__init__(alg, conn, hash_alg) + + self._pref_size = preferred_size + self._max_size = max_size + + def _send_request(self) -> None: + """Send a DH gex request message""" + + if self._pref_size and not self._max_size: + # Send old request message for unit test + pkttype = MSG_KEX_DH_GEX_REQUEST_OLD + args = UInt32(self._pref_size) + else: + pkttype = self._request_type + args = (UInt32(KEX_DH_GEX_MIN_SIZE) + + UInt32(self._pref_size or KEX_DH_GEX_PREFERRED_SIZE) + + UInt32(self._max_size or KEX_DH_GEX_MAX_SIZE)) + + self._gex_data = args + self.send_packet(pkttype, args) + + def _process_request(self, pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a DH gex request message""" + + if self._conn.is_client(): + raise ProtocolError('Unexpected kex request msg') + + self._gex_data = packet.get_remaining_payload() + + if pkttype == MSG_KEX_DH_GEX_REQUEST_OLD: + preferred_size = packet.get_uint32() + max_size = KEX_DH_GEX_MAX_SIZE + else: + _ = packet.get_uint32() + preferred_size = packet.get_uint32() + max_size = packet.get_uint32() + + packet.check_end() + + g, p = _group1_g, _group1_p + + for gex_size, gex_g, gex_p in _dh_gex_groups: + if gex_size > max_size: + break + else: + g, p = gex_g, gex_p + + if gex_size >= preferred_size: + break + + self._init_group(g, p) + self._gex_data += MPInt(p) + MPInt(g) + self.send_packet(self._group_type, MPInt(p), MPInt(g)) + + def _process_group(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a DH gex group message""" + + if self._conn.is_server(): + raise ProtocolError('Unexpected kex group msg') + + p = packet.get_mpint() + g = packet.get_mpint() + packet.check_end() + + self._init_group(g, p) + self._gex_data += MPInt(p) + MPInt(g) + self._perform_init() + + def start(self) -> None: + """Start DH group exchange""" + + if self._conn.is_client(): + self._send_request() + + _packet_handlers: Mapping[int, Callable] = { + MSG_KEX_DH_GEX_REQUEST_OLD: _process_request, + MSG_KEX_DH_GEX_GROUP: _process_group, + MSG_KEX_DH_GEX_INIT: _KexDHBase._process_init, + MSG_KEX_DH_GEX_REPLY: _KexDHBase._process_reply, + MSG_KEX_DH_GEX_REQUEST: _process_request + } + + +class _KexECDH(_KexDHBase): + """Handler for elliptic curve Diffie-Hellman key exchange""" + + _handler_names = get_symbol_names(globals(), 'MSG_KEX_ECDH_') + + _init_type = MSG_KEX_ECDH_INIT + _reply_type = MSG_KEX_ECDH_REPLY + + def __init__(self, alg: bytes, conn: 'SSHConnection', hash_alg: HashType, + ecdh_class: _ECDHClass, *args: object): + super().__init__(alg, conn, hash_alg) + + self._priv = ecdh_class(*args) + pub = self._priv.get_public() + + if conn.is_client(): + self._client_pub = pub + else: + self._server_pub = pub + + def _parse_client_key(self, packet: SSHPacket) -> None: + """Parse an ECDH client key""" + + self._client_pub = packet.get_string() + + def _parse_server_key(self, packet: SSHPacket) -> None: + """Parse an ECDH server key""" + + self._server_pub = packet.get_string() + + def _format_client_key(self) -> bytes: + """Format an ECDH client key""" + + return String(self._client_pub) + + def _format_server_key(self) -> bytes: + """Format an ECDH server key""" + + return String(self._server_pub) + + def _compute_client_shared(self) -> bytes: + """Compute client shared key""" + + try: + return MPInt(self._priv.get_shared(self._server_pub)) + except ValueError: + raise ProtocolError('Invalid ECDH server public key') from None + + def _compute_server_shared(self) -> bytes: + """Compute server shared key""" + + try: + return MPInt(self._priv.get_shared(self._client_pub)) + except ValueError: + raise ProtocolError('Invalid ECDH client public key') from None + + def start(self) -> None: + """Start ECDH key exchange""" + + if self._conn.is_client(): + self._send_init() + + _packet_handlers: Mapping[int, Callable] = { + MSG_KEX_ECDH_INIT: _KexDHBase._process_init, + MSG_KEX_ECDH_REPLY: _KexDHBase._process_reply + } + + +class _KexSNTRUP761(_KexECDH): + """Handler for Streamlined NTRU Prime post-quantum key exchange""" + + def __init__(self, alg: bytes, conn: 'SSHConnection', hash_alg: HashType, + *args: object): + super().__init__(alg, conn, hash_alg, Curve25519DH) + + if conn.is_client(): + sntrup_pub, self._sntrup_priv = sntrup761_keypair() + self._client_pub = sntrup_pub + self._client_pub + + def _compute_client_shared(self) -> bytes: + """Compute client shared key""" + + ciphertext = self._server_pub[:sntrup761_ciphertext_bytes] + curve25519_pub = self._server_pub[sntrup761_ciphertext_bytes:] + + try: + sntrup_secret = sntrup761_decaps(ciphertext, self._sntrup_priv) + except ValueError: + raise ProtocolError('Invalid SNTRUP server ciphertext') from None + + try: + priv = cast(Curve25519DH, self._priv) + curve25519_shared = priv.get_shared_bytes(curve25519_pub) + except ValueError: + raise ProtocolError('Invalid ECDH server public key') from None + + return String(self._hash_alg(sntrup_secret + + curve25519_shared).digest()) + + def _compute_server_shared(self) -> bytes: + """Compute server shared key""" + + sntrup_pub = self._client_pub[:sntrup761_pubkey_bytes] + curve25519_pub = self._client_pub[sntrup761_pubkey_bytes:] + + try: + sntrup_secret, ciphertext = sntrup761_encaps(sntrup_pub) + except ValueError: + raise ProtocolError('Invalid SNTRUP client public key') from None + + try: + priv = cast(Curve25519DH, self._priv) + curve25519_shared = priv.get_shared_bytes(curve25519_pub) + except ValueError: + raise ProtocolError('Invalid ECDH client public key') from None + + self._server_pub = ciphertext + self._server_pub + + return String(self._hash_alg(sntrup_secret + + curve25519_shared).digest()) + + +class _KexGSSBase(_KexDHBase): + """Handler for GSS key exchange""" + + def __init__(self, alg: bytes, conn: 'SSHConnection', + hash_alg: HashType, *args: object): + super().__init__(alg, conn, hash_alg, *args) + + self._gss = conn.get_gss_context() + self._token: Optional[bytes] = None + self._host_key_data = b'' + + def _check_secure(self) -> None: + """Check that GSS context is secure enough for key exchange""" + + if (not self._gss.provides_mutual_auth or + not self._gss.provides_integrity): + raise ProtocolError('GSS context not secure') + + def _send_init(self) -> None: + """Send a GSS init message""" + + if not self._token: + raise ProtocolError('Empty GSS token in init') + + self.send_packet(MSG_KEXGSS_INIT, String(self._token), + self._format_client_key()) + + def _send_reply(self, key_data: bytes, sig: bytes) -> None: + """Send a GSS reply message""" + + if self._token: + token_data = Boolean(True) + String(self._token) + else: + token_data = Boolean(False) + + self.send_packet(MSG_KEXGSS_COMPLETE, self._format_server_key(), + String(sig), token_data) + + def _send_continue(self) -> None: + """Send a GSS continue message""" + + if not self._token: + raise ProtocolError('Empty GSS token in continue') + + self.send_packet(MSG_KEXGSS_CONTINUE, String(self._token)) + + def _process_token(self, token: Optional[bytes] = None) -> None: + """Process a GSS token""" + + try: + self._token = self._gss.step(token) + except GSSError as exc: + if self._conn.is_server(): + self.send_packet(MSG_KEXGSS_ERROR, UInt32(exc.maj_code), + UInt32(exc.min_code), String(str(exc)), + String(DEFAULT_LANG)) + + if exc.token: + self.send_packet(MSG_KEXGSS_CONTINUE, String(exc.token)) + + raise KeyExchangeFailed(str(exc)) from None + + def _process_init(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS init message""" + + if self._conn.is_client(): + raise ProtocolError('Unexpected kexgss init msg') + + token = packet.get_string() + self._parse_client_key(packet) + packet.check_end() + + server_conn = cast('SSHServerConnection', self._conn) + host_key = server_conn.get_server_host_key() + + if host_key: + self._host_key_data = host_key.public_data + self.send_packet(MSG_KEXGSS_HOSTKEY, String(self._host_key_data)) + else: + self._host_key_data = b'' + + self._process_token(token) + + if self._gss.complete: + self._check_secure() + self._perform_reply(self._gss, self._host_key_data) + self._conn.enable_gss_kex_auth() + else: + self._send_continue() + + def _process_continue(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS continue message""" + + token = packet.get_string() + packet.check_end() + + if self._conn.is_client() and self._gss.complete: + raise ProtocolError('Unexpected kexgss continue msg') + + self._process_token(token) + + if self._conn.is_server() and self._gss.complete: + self._check_secure() + self._perform_reply(self._gss, self._host_key_data) + else: + self._send_continue() + + def _process_complete(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS complete message""" + + if self._conn.is_server(): + raise ProtocolError('Unexpected kexgss complete msg') + + self._parse_server_key(packet) + mic = packet.get_string() + token_present = packet.get_boolean() + token = packet.get_string() if token_present else None + packet.check_end() + + if token: + if self._gss.complete: + raise ProtocolError('Non-empty token after complete') + + self._process_token(token) + + if self._token: + raise ProtocolError('Non-empty token after complete') + + if not self._gss.complete: + raise ProtocolError('GSS exchange failed to complete') + + self._check_secure() + self._verify_reply(self._gss, self._host_key_data, mic) + self._conn.enable_gss_kex_auth() + + def _process_hostkey(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS hostkey message""" + + self._host_key_data = packet.get_string() + packet.check_end() + + def _process_error(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a GSS error message""" + + if self._conn.is_server(): + raise ProtocolError('Unexpected kexgss error msg') + + _ = packet.get_uint32() # major_status + _ = packet.get_uint32() # minor_status + msg = packet.get_string() + _ = packet.get_string() # lang + packet.check_end() + + self._conn.logger.debug1('GSS error: %s', + msg.decode('utf-8', errors='ignore')) + + def start(self) -> None: + """Start GSS key exchange""" + + if self._conn.is_client(): + self._process_token() + super().start() + + +class _KexGSS(_KexGSSBase, _KexDH): + """Handler for GSS key exchange""" + + _handler_names = get_symbol_names(globals(), 'MSG_KEXGSS_') + + _packet_handlers = { + MSG_KEXGSS_INIT: _KexGSSBase._process_init, + MSG_KEXGSS_CONTINUE: _KexGSSBase._process_continue, + MSG_KEXGSS_COMPLETE: _KexGSSBase._process_complete, + MSG_KEXGSS_HOSTKEY: _KexGSSBase._process_hostkey, + MSG_KEXGSS_ERROR: _KexGSSBase._process_error + } + + +class _KexGSSGex(_KexGSSBase, _KexDHGex): + """Handler for GSS group exchange""" + + _handler_names = get_symbol_names(globals(), 'MSG_KEXGSS_') + + _request_type = MSG_KEXGSS_GROUPREQ + _group_type = MSG_KEXGSS_GROUP + + _packet_handlers = { + MSG_KEXGSS_INIT: _KexGSSBase._process_init, + MSG_KEXGSS_CONTINUE: _KexGSSBase._process_continue, + MSG_KEXGSS_COMPLETE: _KexGSSBase._process_complete, + MSG_KEXGSS_HOSTKEY: _KexGSSBase._process_hostkey, + MSG_KEXGSS_ERROR: _KexGSSBase._process_error, + MSG_KEXGSS_GROUPREQ: _KexDHGex._process_request, + MSG_KEXGSS_GROUP: _KexDHGex._process_group + } + + +class _KexGSSECDH(_KexGSSBase, _KexECDH): + """Handler for GSS ECDH key exchange""" + + _handler_names = get_symbol_names(globals(), 'MSG_KEXGSS_') + + _packet_handlers = { + MSG_KEXGSS_INIT: _KexGSSBase._process_init, + MSG_KEXGSS_CONTINUE: _KexGSSBase._process_continue, + MSG_KEXGSS_COMPLETE: _KexGSSBase._process_complete, + MSG_KEXGSS_HOSTKEY: _KexGSSBase._process_hostkey, + MSG_KEXGSS_ERROR: _KexGSSBase._process_error + } + + +if curve25519_available: # pragma: no branch + if sntrup761_available: # pragma: no branch + register_kex_alg(b'sntrup761x25519-sha512@openssh.com', _KexSNTRUP761, + sha512, (), True) + + register_kex_alg(b'curve25519-sha256', _KexECDH, sha256, + (Curve25519DH,), True) + register_kex_alg(b'curve25519-sha256@libssh.org', _KexECDH, sha256, + (Curve25519DH,), True) + register_gss_kex_alg(b'gss-curve25519-sha256', _KexGSSECDH, sha256, + (Curve25519DH,), True) + +if curve448_available: # pragma: no branch + register_kex_alg(b'curve448-sha512', _KexECDH, sha512, + (Curve448DH,), True) + register_gss_kex_alg(b'gss-curve448-sha512', _KexGSSECDH, sha512, + (Curve448DH,), True) + +for _curve_id, _hash_name, _hash_alg, _default in ( + (b'nistp521', b'sha512', sha512, True), + (b'nistp384', b'sha384', sha384, True), + (b'nistp256', b'sha256', sha256, True), + (b'1.3.132.0.10', b'sha256', sha256, True)): + register_kex_alg(b'ecdh-sha2-' + _curve_id, _KexECDH, _hash_alg, + (ECDH, _curve_id), _default) + register_gss_kex_alg(b'gss-' + _curve_id + b'-' + _hash_name, _KexGSSECDH, + _hash_alg, (ECDH, _curve_id), _default) + +for _hash_name, _hash_alg, _default in ( + (b'sha256', sha256, True), + (b'sha224@ssh.com', sha224, False), + (b'sha384@ssh.com', sha384, False), + (b'sha512@ssh.com', sha512, False), + (b'sha1', sha1, False)): + register_kex_alg(b'diffie-hellman-group-exchange-' + _hash_name, + _KexDHGex, _hash_alg, (), _default) + + if not _hash_name.endswith(b'@ssh.com'): + register_gss_kex_alg(b'gss-gex-' + _hash_name, + _KexGSSGex, _hash_alg, (), _default) + +for _name, _hash_alg, _g, _p, _default in ( + (b'group14-sha256', sha256, _group14_g, _group14_p, True), + (b'group15-sha512', sha512, _group15_g, _group15_p, True), + (b'group16-sha512', sha512, _group16_g, _group16_p, True), + (b'group17-sha512', sha512, _group17_g, _group17_p, True), + (b'group18-sha512', sha512, _group18_g, _group18_p, True), + (b'group14-sha256@ssh.com', sha256, _group14_g, _group14_p, True), + (b'group14-sha224@ssh.com', sha224, _group14_g, _group14_p, False), + (b'group15-sha256@ssh.com', sha256, _group15_g, _group15_p, False), + (b'group15-sha384@ssh.com', sha384, _group15_g, _group15_p, False), + (b'group16-sha384@ssh.com', sha384, _group16_g, _group16_p, False), + (b'group16-sha512@ssh.com', sha512, _group16_g, _group16_p, False), + (b'group18-sha512@ssh.com', sha512, _group18_g, _group18_p, False), + (b'group14-sha1', sha1, _group14_g, _group14_p, True), + (b'group1-sha1', sha1, _group1_g, _group1_p, False)): + register_kex_alg(b'diffie-hellman-' + _name, _KexDH, _hash_alg, + (_g, _p), _default) + + if not _name.endswith(b'@ssh.com'): + register_gss_kex_alg(b'gss-' + _name, _KexGSS, _hash_alg, + (_g, _p), _default) diff --git a/venv/lib/python3.12/site-packages/asyncssh/kex_rsa.py b/venv/lib/python3.12/site-packages/asyncssh/kex_rsa.py new file mode 100644 index 0000000..3e28dc6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/kex_rsa.py @@ -0,0 +1,177 @@ +# Copyright (c) 2018-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""RSA key exchange handler""" + +from hashlib import sha1, sha256 +from typing import TYPE_CHECKING, Optional, cast + +from .kex import Kex, register_kex_alg +from .misc import HashType, KeyExchangeFailed, ProtocolError +from .misc import get_symbol_names, randrange +from .packet import MPInt, String, SSHPacket +from .public_key import KeyImportError, SSHKey +from .public_key import decode_ssh_public_key, generate_private_key +from .rsa import RSAKey + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .connection import SSHConnection, SSHClientConnection + from .connection import SSHServerConnection + + +# SSH KEXRSA message values +MSG_KEXRSA_PUBKEY = 30 +MSG_KEXRSA_SECRET = 31 +MSG_KEXRSA_DONE = 32 + + +class _KexRSA(Kex): + """Handler for RSA key exchange""" + + _handler_names = get_symbol_names(globals(), 'MSG_KEXRSA') + + def __init__(self, alg: bytes, conn: 'SSHConnection', hash_alg: HashType, + key_size: int, hash_size: int): + super().__init__(alg, conn, hash_alg) + + self._key_size = key_size + self._k_limit = 1 << (key_size - 2*hash_size - 49) + + self._host_key_data = b'' + + self._trans_key: Optional[SSHKey] = None + self._trans_key_data = b'' + + self._k = 0 + self._encrypted_k = b'' + + def start(self) -> None: + """Start RSA key exchange""" + + if self._conn.is_server(): + server_conn = cast('SSHServerConnection', self._conn) + host_key = server_conn.get_server_host_key() + assert host_key is not None + self._host_key_data = host_key.public_data + + self._trans_key = generate_private_key( + 'ssh-rsa', key_size=self._key_size) + self._trans_key_data = self._trans_key.public_data + + self.send_packet(MSG_KEXRSA_PUBKEY, String(self._host_key_data), + String(self._trans_key_data)) + + def _compute_hash(self) -> bytes: + """Compute a hash of key information associated with the connection""" + + hash_obj = self._hash_alg() + hash_obj.update(self._conn.get_hash_prefix()) + hash_obj.update(String(self._host_key_data)) + hash_obj.update(String(self._trans_key_data)) + hash_obj.update(String(self._encrypted_k)) + hash_obj.update(MPInt(self._k)) + return hash_obj.digest() + + def _process_pubkey(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a KEXRSA pubkey message""" + + if self._conn.is_server(): + raise ProtocolError('Unexpected KEXRSA pubkey msg') + + self._host_key_data = packet.get_string() + self._trans_key_data = packet.get_string() + packet.check_end() + + try: + pubkey = decode_ssh_public_key(self._trans_key_data) + except KeyImportError: + raise ProtocolError('Invalid KEXRSA pubkey msg') from None + + trans_key = cast(RSAKey, pubkey) + self._k = randrange(self._k_limit) + self._encrypted_k = \ + cast(bytes, trans_key.encrypt(MPInt(self._k), self.algorithm)) + + self.send_packet(MSG_KEXRSA_SECRET, String(self._encrypted_k)) + + def _process_secret(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a KEXRSA secret message""" + + if self._conn.is_client(): + raise ProtocolError('Unexpected KEXRSA secret msg') + + self._encrypted_k = packet.get_string() + packet.check_end() + + trans_key = cast(RSAKey, self._trans_key) + decrypted_k = trans_key.decrypt(self._encrypted_k, self.algorithm) + if not decrypted_k: + raise KeyExchangeFailed('Key exchange decryption failed') + + packet = SSHPacket(decrypted_k) + self._k = packet.get_mpint() + packet.check_end() + + server_conn = cast('SSHServerConnection', self._conn) + host_key = server_conn.get_server_host_key() + assert host_key is not None + + h = self._compute_hash() + sig = host_key.sign(h) + + self.send_packet(MSG_KEXRSA_DONE, String(sig)) + + self._conn.send_newkeys(MPInt(self._k), h) + + def _process_done(self, _pkttype: int, _pktid: int, + packet: SSHPacket) -> None: + """Process a KEXRSA done message""" + + if self._conn.is_server(): + raise ProtocolError('Unexpected KEXRSA done msg') + + sig = packet.get_string() + packet.check_end() + + client_conn = cast('SSHClientConnection', self._conn) + host_key = client_conn.validate_server_host_key(self._host_key_data) + + h = self._compute_hash() + if not host_key.verify(h, sig): + raise KeyExchangeFailed('Key exchange hash mismatch') + + self._conn.send_newkeys(MPInt(self._k), h) + + _packet_handlers = { + MSG_KEXRSA_PUBKEY: _process_pubkey, + MSG_KEXRSA_SECRET: _process_secret, + MSG_KEXRSA_DONE: _process_done + } + + +for _name, _hash_alg, _key_size, _hash_size, _default in ( + (b'rsa2048-sha256', sha256, 2048, 256, True), + (b'rsa1024-sha1', sha1, 1024, 160, False)): + register_kex_alg(_name, _KexRSA, _hash_alg, + (_key_size, _hash_size), _default) diff --git a/venv/lib/python3.12/site-packages/asyncssh/keysign.py b/venv/lib/python3.12/site-packages/asyncssh/keysign.py new file mode 100644 index 0000000..ffb68a5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/keysign.py @@ -0,0 +1,115 @@ +# Copyright (c) 2018-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH keysign client""" + +import asyncio +from pathlib import Path +import subprocess +from typing import Iterable, Sequence, Union, cast + +from .misc import FilePath +from .packet import Byte, String, UInt32, PacketDecodeError, SSHPacket +from .public_key import SSHKey, SSHKeyPair, SSHCertificate + + +_KeySignKey = Union[SSHKey, SSHCertificate] +KeySignPath = Union[None, bool, FilePath] + + +KEYSIGN_VERSION = 2 + +_DEFAULT_KEYSIGN_DIRS = ('/opt/local/libexec', '/usr/local/libexec', + '/usr/libexec', '/usr/libexec/openssh', + '/usr/lib/openssh') + + +class SSHKeySignKeyPair(SSHKeyPair): + """Surrogate for a key where signing is done via ssh-keysign""" + + def __init__(self, keysign_path: str, sock_fd: int, + key_or_cert: _KeySignKey): + algorithm = key_or_cert.algorithm + sig_algorithms = key_or_cert.sig_algorithms[:1] + public_data = key_or_cert.public_data + comment = key_or_cert.get_comment_bytes() + + super().__init__(algorithm, algorithm, sig_algorithms, sig_algorithms, + public_data, comment) + + self._keysign_path = keysign_path + self._sock_fd = sock_fd + + async def sign_async(self, data: bytes) -> bytes: + """Use ssh-keysign to sign a block of data with this key""" + + proc = await asyncio.create_subprocess_exec( + self._keysign_path, stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, pass_fds=[self._sock_fd]) + + request = String(Byte(KEYSIGN_VERSION) + UInt32(self._sock_fd) + + String(data)) + stdout, stderr = await proc.communicate(request) + + if stderr: + error = stderr.decode().strip() + raise ValueError(error) + + try: + packet = SSHPacket(stdout) + resp = packet.get_string() + packet.check_end() + + packet = SSHPacket(resp) + version = packet.get_byte() + sig = packet.get_string() + packet.check_end() + + if version != KEYSIGN_VERSION: + raise ValueError('unexpected version') + + return sig + except PacketDecodeError: + raise ValueError('invalid response') from None + + +def find_keysign(path: KeySignPath) -> str: + """Return path to ssh-keysign executable""" + + if path is True: + for keysign_dir in _DEFAULT_KEYSIGN_DIRS: + path = Path(keysign_dir, 'ssh-keysign') + if path.exists(): + break + else: + raise ValueError('Keysign not found') + else: + if not path or not Path(cast(FilePath, path)).exists(): + raise ValueError('Keysign not found') + + return str(path) + + +def get_keysign_keys(keysign_path: str, sock_fd: int, + keys: Iterable[_KeySignKey]) -> \ + Sequence[SSHKeySignKeyPair]: + """Return keypair objects which invoke ssh-keysign""" + + return [SSHKeySignKeyPair(keysign_path, sock_fd, key) for key in keys] diff --git a/venv/lib/python3.12/site-packages/asyncssh/known_hosts.py b/venv/lib/python3.12/site-packages/asyncssh/known_hosts.py new file mode 100644 index 0000000..d80ac60 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/known_hosts.py @@ -0,0 +1,396 @@ +# Copyright (c) 2015-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation +# Alexander Travov - proposed changes to add negated patterns, hashed +# entries, and support for the revoked marker +# Josh Yudaken - proposed change to split parsing and matching to avoid +# parsing large known_hosts lists multiple times + +"""Parser for SSH known_hosts files""" + +import binascii +from hashlib import sha1 +import hmac +from typing import Callable, Dict, List, Optional +from typing import Sequence, Tuple, Union, cast + +try: + from .crypto import X509NamePattern + _x509_available = True +except ImportError: # pragma: no cover + _x509_available = False + +from .misc import IPAddress, ip_address, read_file +from .pattern import HostPatternList +from .public_key import KeyImportError +from .public_key import SSHKey, SSHCertificate, SSHX509Certificate +from .public_key import import_public_key, import_certificate +from .public_key import import_certificate_subject +from .public_key import load_public_keys, load_certificates + + +_HostPattern = Union['_PlainHost', '_HashedHost'] +_HostEntry = Tuple[Optional[str], Optional[SSHKey], + Optional[SSHX509Certificate], Optional['X509NamePattern']] + +_KnownHostsKeys = Sequence[SSHKey] +_KnownHostsCerts = Sequence[SSHX509Certificate] +_KnownHostsNames = Sequence['X509NamePattern'] +_KnownHostsResult = Tuple[_KnownHostsKeys, _KnownHostsKeys, _KnownHostsKeys, + _KnownHostsCerts, _KnownHostsCerts, + _KnownHostsNames, _KnownHostsNames] + +_KnownHostsCallable = Callable[[str, str, Optional[int]], Sequence[str]] +_KnownHostsListArg = Union[str, Sequence[str], 'X509NamePattern'] +KnownHostsArg = Union[None, str, bytes, _KnownHostsCallable, 'SSHKnownHosts', + _KnownHostsResult, Sequence[_KnownHostsListArg]] + + +def _load_subject_names(names: Sequence[str]) -> Sequence['X509NamePattern']: + """Load a list of X.509 subject name patterns""" + + if not _x509_available: # pragma: no cover + return [] + + return list(map(X509NamePattern, names)) + + +class _PlainHost: + """A plain host entry in a known_hosts file""" + + def __init__(self, pattern: str): + self._pattern = HostPatternList(pattern) + + def matches(self, host: str, addr: str, ip: Optional[IPAddress]) -> bool: + """Return whether a host or address matches this host pattern list""" + + return self._pattern.matches(host, addr, ip) + + +class _HashedHost: + """A hashed host entry in a known_hosts file""" + + _HMAC_SHA1_MAGIC = '1' + + def __init__(self, pattern: str): + try: + magic, salt, hosthash = pattern[1:].split('|') + self._salt = binascii.a2b_base64(salt) + self._hosthash = binascii.a2b_base64(hosthash) + except (ValueError, binascii.Error): + raise ValueError('Invalid known hosts hash entry: %s' % + pattern) from None + + if magic != self._HMAC_SHA1_MAGIC: + # Only support HMAC SHA-1 for now + raise ValueError('Invalid known hosts hash type: %s' % + magic) from None + + def _match(self, value: str) -> bool: + """Return whether this host hash matches a value""" + + hosthash = hmac.new(self._salt, value.encode(), sha1).digest() + return hosthash == self._hosthash + + def matches(self, host: str, addr: str, _ip: Optional[IPAddress]) -> bool: + """Return whether a host or address matches this host hash""" + + return self._match(host) or self._match(addr) + + +class SSHKnownHosts: + """An SSH known hosts list""" + + def __init__(self, known_hosts: Optional[str] = None): + self._exact_entries: Dict[Optional[str], List[_HostEntry]] = {} + self._pattern_entries: List[Tuple[_HostPattern, _HostEntry]] = [] + + if known_hosts: + self.load(known_hosts) + + def load(self, known_hosts: str) -> None: + """Load known hosts data into this object""" + + for line in known_hosts.splitlines(): + line = line.strip() + if not line or line.startswith('#'): + continue + + marker: Optional[str] + + try: + if line.startswith('@'): + marker, pattern, data = line[1:].split(None, 2) + else: + marker = None + pattern, data = line.split(None, 1) + except ValueError: + raise ValueError('Invalid known hosts entry: %s' % + line) from None + + if marker not in (None, 'cert-authority', 'revoked'): + raise ValueError('Invalid known hosts marker: %s' % + marker) from None + + key: Optional[SSHKey] = None + cert: Optional[SSHCertificate] = None + subject: Optional['X509NamePattern'] = None + + try: + key = import_public_key(data) + except KeyImportError: + try: + cert = import_certificate(data) + except KeyImportError: + if not _x509_available: # pragma: no cover + continue + + try: + subject_text = import_certificate_subject(data) + except KeyImportError: + # Ignore keys in the file that we're unable to parse + continue + + subject = X509NamePattern(subject_text) + + entry = (marker, key, cast(SSHX509Certificate, cert), subject) + + if any(c in pattern for c in '*?|/!'): + self._add_pattern(pattern, entry) + else: + self._add_exact(pattern, entry) + + def _add_exact(self, pattern: str, entry: _HostEntry) -> None: + """Add an exact match entry""" + + for host_pat in pattern.split(','): + if host_pat not in self._exact_entries: + self._exact_entries[host_pat] = [] + + self._exact_entries[host_pat].append(entry) + + def _add_pattern(self, pattern: str, entry: _HostEntry) -> None: + """Add a pattern match entry""" + + if pattern.startswith('|'): + host_pat: _HostPattern = _HashedHost(pattern) + else: + host_pat = _PlainHost(pattern) + + self._pattern_entries.append((host_pat, entry)) + + def _match(self, host: str, addr: str, + port: Optional[int] = None) -> _KnownHostsResult: + """Find host keys matching specified host, address, and port""" + + if addr: + ip: Optional[IPAddress] = ip_address(addr) + else: + try: + ip = ip_address(host) + except ValueError: + ip = None + + if port: + host = '[{}]:{}'.format(host, port) if host else '' + addr = '[{}]:{}'.format(addr, port) if addr else '' + + matches = [] + matches += self._exact_entries.get(host, []) + matches += self._exact_entries.get(addr, []) + matches += (match for (entry, match) in self._pattern_entries + if entry.matches(host, addr, ip)) + + host_keys: List[SSHKey] = [] + ca_keys: List[SSHKey] = [] + revoked_keys: List[SSHKey] = [] + x509_certs: List[SSHX509Certificate] = [] + revoked_certs: List[SSHX509Certificate] = [] + x509_subjects: List['X509NamePattern'] = [] + revoked_subjects: List['X509NamePattern'] = [] + + for marker, key, cert, subject in matches: + if key: + if marker == 'revoked': + revoked_keys.append(key) + elif marker == 'cert-authority': + ca_keys.append(key) + else: + host_keys.append(key) + elif cert: + if marker == 'revoked': + revoked_certs.append(cert) + else: + x509_certs.append(cert) + else: + assert subject is not None + + if marker == 'revoked': + revoked_subjects.append(subject) + else: + x509_subjects.append(subject) + + return (host_keys, ca_keys, revoked_keys, x509_certs, revoked_certs, + x509_subjects, revoked_subjects) + + def match(self, host: str, addr: str, + port: Optional[int]) -> _KnownHostsResult: + """Match a host, IP address, and port against known_hosts patterns + + If the port is not the default port and no match is found + for it, the lookup is attempted again without a port number. + + :param host: + The hostname of the target host + :param addr: + The IP address of the target host + :param port: + The port number on the target host, or `None` for the default + :type host: `str` + :type addr: `str` + :type port: `int` + + + :returns: A tuple of matching host keys, CA keys, and revoked keys + + """ + + host_keys, ca_keys, revoked_keys, x509_certs, revoked_certs, \ + x509_subjects, revoked_subjects = self._match(host, addr, port) + + if port and not (host_keys or ca_keys or x509_certs or x509_subjects): + host_keys, ca_keys, revoked_keys, x509_certs, revoked_certs, \ + x509_subjects, revoked_subjects = self._match(host, addr) + + return (host_keys, ca_keys, revoked_keys, x509_certs, revoked_certs, + x509_subjects, revoked_subjects) + + +def import_known_hosts(data: str) -> SSHKnownHosts: + """Import SSH known hosts + + This function imports known host patterns and keys in + OpenSSH known hosts format. + + :param data: + The known hosts data to import + :type data: `str` + + :returns: An :class:`SSHKnownHosts` object + + """ + + return SSHKnownHosts(data) + + +def read_known_hosts(filelist: Union[str, Sequence[str]]) -> SSHKnownHosts: + """Read SSH known hosts from a file or list of files + + This function reads known host patterns and keys in + OpenSSH known hosts format from a file or list of files. + + :param filelist: + The file or list of files to read the known hosts from + :type filelist: `str` or `list` of `str` + + :returns: An :class:`SSHKnownHosts` object + + """ + + known_hosts = SSHKnownHosts() + + if isinstance(filelist, str): + filelist = [filelist] + + for filename in filelist: + known_hosts.load(read_file(filename, 'r')) + + return known_hosts + + +def match_known_hosts(known_hosts: KnownHostsArg, host: str, + addr: str, port: Optional[int]) -> _KnownHostsResult: + """Match a host, IP address, and port against a known_hosts list + + This function looks up a host, IP address, and port in a list of + host patterns in OpenSSH `known_hosts` format and returns the + host keys, CA keys, and revoked keys which match. + + The `known_hosts` argument can be any of the following: + + * a string containing the filename to load host patterns from + * a byte string containing host pattern data to load + * an already loaded :class:`SSHKnownHosts` object containing + host patterns to match against + * an alternate matching function which accepts a host, address, + and port and returns lists of trusted host keys, trusted CA + keys, and revoked keys to load + * lists of trusted host keys, trusted CA keys, and revoked keys + to load without doing any matching + + If the port is not the default port and no match is found + for it, the lookup is attempted again without a port number. + + :param known_hosts: + The host patterns to match against + :param host: + The hostname of the target host + :param addr: + The IP address of the target host + :param port: + The port number on the target host, or `None` for the default + :type host: `str` + :type addr: `str` + :type port: `int` + + :returns: A tuple of matching host keys, CA keys, and revoked keys + + """ + + if isinstance(known_hosts, str) or \ + (known_hosts and isinstance(known_hosts, list) and + isinstance(known_hosts[0], str)): + known_hosts = read_known_hosts(known_hosts) + elif isinstance(known_hosts, bytes): + known_hosts = import_known_hosts(known_hosts.decode()) + + if isinstance(known_hosts, SSHKnownHosts): + known_hosts = known_hosts.match(host, addr, port) + else: + if callable(known_hosts): + known_hosts = known_hosts(host, addr, port) + + result = cast(Sequence[str], known_hosts) + + result = (tuple(map(load_public_keys, result[:3])) + + tuple(map(load_certificates, result[3:5])) + + tuple(map(_load_subject_names, result[5:7]))) + + if len(result) == 3: + # Provide backward compatibility for pre-X.509 releases + result += ((), (), (), ()) + + known_hosts = cast(_KnownHostsResult, result) + + for cert in list(known_hosts[3]) + list(known_hosts[4]): + if not cert.is_x509: + raise ValueError('OpenSSH certificates not ' + 'allowed in known hosts') from None + + return known_hosts diff --git a/venv/lib/python3.12/site-packages/asyncssh/listener.py b/venv/lib/python3.12/site-packages/asyncssh/listener.py new file mode 100644 index 0000000..3ce12dc --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/listener.py @@ -0,0 +1,379 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH listeners""" + +import asyncio +import errno +import socket +from types import TracebackType +from typing import TYPE_CHECKING, AnyStr, Callable, Generic, List, Optional +from typing import Sequence, Tuple, Type, Union + +from .forward import SSHForwarderCoro +from .forward import SSHLocalPortForwarder, SSHLocalPathForwarder +from .misc import HostPort, MaybeAwait +from .session import SSHTCPSession, SSHUNIXSession +from .socks import SSHSOCKSForwarder + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .channel import SSHTCPChannel, SSHUNIXChannel + from .connection import SSHConnection, SSHClientConnection + + +_LocalListenerFactory = Callable[[], asyncio.BaseProtocol] + +ListenKey = Union[HostPort, str] + +TCPListenerFactory = Callable[[str, int], MaybeAwait[SSHTCPSession[AnyStr]]] +UNIXListenerFactory = Callable[[], MaybeAwait[SSHUNIXSession[AnyStr]]] + + +class SSHListener: + """SSH listener for inbound connections""" + + def __init__(self) -> None: + self._tunnel: Optional['SSHConnection'] = None + + async def __aenter__(self) -> 'SSHListener': + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> bool: + self.close() + await self.wait_closed() + return False + + def get_port(self) -> int: + """Return the port number being listened on + + This method returns the port number that the remote listener + was bound to. When the requested remote listening port is `0` + to indicate a dynamic port, this method can be called to + determine what listening port was selected. This function + only applies to TCP listeners. + + :returns: The port number being listened on + + """ + + # pylint: disable=no-self-use + + return 0 + + def set_tunnel(self, tunnel: 'SSHConnection') -> None: + """Set tunnel associated with listener""" + + self._tunnel = tunnel + + def close(self) -> None: + """Stop listening for new connections + + This method can be called to stop listening for connections. + Existing connections will remain open. + + """ + + if self._tunnel: + self._tunnel.close() + + async def wait_closed(self) -> None: + """Wait for the listener to close + + This method is a coroutine which waits for the associated + listeners to be closed. + + """ + + if self._tunnel: + await self._tunnel.wait_closed() + self._tunnel = None + + +class SSHClientListener(SSHListener): + """Client listener used to accept inbound forwarded connections""" + + def __init__(self, conn: 'SSHClientConnection', encoding: Optional[str], + errors: str, window: int, max_pktsize: int): + super().__init__() + + self._conn: Optional['SSHClientConnection'] = conn + self._encoding = encoding + self._errors = errors + self._window = window + self._max_pktsize = max_pktsize + self._close_event = asyncio.Event() + + async def _close(self) -> None: + """Close this listener""" + + self._close_event.set() + self._conn = None + + def close(self) -> None: + """Close this listener asynchronously""" + + super().close() + + if self._conn: + self._conn.create_task(self._close()) + + async def wait_closed(self) -> None: + """Wait for this listener to finish closing""" + + await super().wait_closed() + + await self._close_event.wait() + + +class SSHTCPClientListener(SSHClientListener, Generic[AnyStr]): + """Client listener used to accept inbound forwarded TCP connections""" + + def __init__(self, conn: 'SSHClientConnection', + session_factory: TCPListenerFactory[AnyStr], + listen_host: str, listen_port: int, + encoding: Optional[str], errors: str, + window: int, max_pktsize: int): + super().__init__(conn, encoding, errors, window, max_pktsize) + + self._session_factory: TCPListenerFactory[AnyStr] = session_factory + self._listen_host = listen_host + self._listen_port = listen_port + + async def _close(self) -> None: + """Close this listener""" + + if self._conn: # pragma: no branch + await self._conn.close_client_tcp_listener(self._listen_host, + self._listen_port) + + await super()._close() + + def process_connection(self, orig_host: str, orig_port: int) -> \ + Tuple['SSHTCPChannel[AnyStr]', + MaybeAwait[SSHTCPSession[AnyStr]]]: + """Process a forwarded TCP connection""" + + assert self._conn is not None + + chan = self._conn.create_tcp_channel(self._encoding, self._errors, + self._window, self._max_pktsize) + + chan.set_inbound_peer_names(self._listen_host, self._listen_port, + orig_host, orig_port) + + return chan, self._session_factory(orig_host, orig_port) + + def get_addresses(self) -> List[Tuple]: + """Return the socket addresses being listened on""" + + return [(self._listen_host, self._listen_port)] + + def get_port(self) -> int: + """Return the port number being listened on""" + + return self._listen_port + + +class SSHUNIXClientListener(SSHClientListener, Generic[AnyStr]): + """Client listener used to accept inbound forwarded UNIX connections""" + + def __init__(self, conn: 'SSHClientConnection', + session_factory: UNIXListenerFactory[AnyStr], + listen_path: str, encoding: Optional[str], + errors: str, window: int, max_pktsize: int): + super().__init__(conn, encoding, errors, window, max_pktsize) + + self._session_factory: UNIXListenerFactory[AnyStr] = session_factory + self._listen_path = listen_path + + async def _close(self) -> None: + """Close this listener""" + + if self._conn: # pragma: no branch + await self._conn.close_client_unix_listener(self._listen_path) + + await super()._close() + + def process_connection(self) -> \ + Tuple['SSHUNIXChannel[AnyStr]', + MaybeAwait[SSHUNIXSession[AnyStr]]]: + """Process a forwarded UNIX connection""" + + assert self._conn is not None + + chan = self._conn.create_unix_channel(self._encoding, self._errors, + self._window, self._max_pktsize) + + chan.set_inbound_peer_names(self._listen_path) + + return chan, self._session_factory() + + +class SSHForwardListener(SSHListener): + """A listener used when forwarding traffic from local ports""" + + def __init__(self, conn: 'SSHConnection', + servers: Sequence[asyncio.AbstractServer], + listen_key: ListenKey, listen_port: int = 0): + super().__init__() + + self._conn: Optional['SSHConnection'] = conn + self._servers = servers + self._listen_key = listen_key + self._listen_port = listen_port + + def get_port(self) -> int: + """Return the port number being listened on""" + + return self._listen_port + + def close(self) -> None: + """Close this listener""" + + if self._conn: # pragma: no branch + self._conn.close_forward_listener(self._listen_key) + + for server in self._servers: + server.close() + + self._conn = None + + async def wait_closed(self) -> None: + """Wait for this listener to finish closing""" + + await super().wait_closed() + + for server in self._servers: + await server.wait_closed() + + self._servers = [] + + +async def create_tcp_local_listener( + conn: 'SSHConnection', loop: asyncio.AbstractEventLoop, + protocol_factory: _LocalListenerFactory, listen_host: str, + listen_port: int) -> 'SSHForwardListener': + """Create a listener to forward traffic from a local TCP port over SSH""" + + addrinfo = await loop.getaddrinfo(listen_host or None, listen_port, + family=socket.AF_UNSPEC, + type=socket.SOCK_STREAM, + flags=socket.AI_PASSIVE) + + if not addrinfo: # pragma: no cover + raise OSError('getaddrinfo() returned empty list') + + servers: List[asyncio.AbstractServer] = [] + + for family, socktype, proto, _, sa in addrinfo: + try: + sock = socket.socket(family, socktype, proto) + except OSError: # pragma: no cover + continue + + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) + + if family == socket.AF_INET6: + try: + sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, True) + except AttributeError: # pragma: no cover + pass + + if sa[1] == 0: + sa = sa[:1] + (listen_port,) + sa[2:] # type: ignore + + try: + sock.bind(sa) + except (OSError, OverflowError) as exc: + sock.close() + + for server in servers: + server.close() + + if isinstance(exc, OverflowError): # pragma: no cover + exc.errno = errno.EOVERFLOW # type: ignore + exc.strerror = str(exc) # type: ignore + + # pylint: disable=no-member + raise OSError(exc.errno, 'error while attempting ' # type: ignore + 'to bind on address %r: %s' % + (sa, exc.strerror)) from None # type: ignore + + if listen_port == 0: + listen_port = sock.getsockname()[1] + conn.logger.debug1('Assigning dynamic port %d', listen_port) + + server = await loop.create_server(protocol_factory, sock=sock) + servers.append(server) + + listen_key = listen_host or '', listen_port + return SSHForwardListener(conn, servers, listen_key, listen_port) + + +async def create_tcp_forward_listener(conn: 'SSHConnection', + loop: asyncio.AbstractEventLoop, + coro: SSHForwarderCoro, listen_host: str, + listen_port: int) -> \ + 'SSHForwardListener': + """Create a listener to forward traffic from a local TCP port over SSH""" + + def protocol_factory() -> asyncio.BaseProtocol: + """Start a port forwarder for each new local connection""" + + return SSHLocalPortForwarder(conn, coro) + + return await create_tcp_local_listener(conn, loop, protocol_factory, + listen_host, listen_port) + + +async def create_unix_forward_listener(conn: 'SSHConnection', + loop: asyncio.AbstractEventLoop, + coro: SSHForwarderCoro, + listen_path: str) -> \ + 'SSHForwardListener': + """Create a listener to forward a local UNIX domain socket over SSH""" + + def protocol_factory() -> asyncio.BaseProtocol: + """Start a path forwarder for each new local connection""" + + return SSHLocalPathForwarder(conn, coro) + + server = await loop.create_unix_server(protocol_factory, listen_path) + + return SSHForwardListener(conn, [server], listen_path) + + +async def create_socks_listener(conn: 'SSHConnection', + loop: asyncio.AbstractEventLoop, + coro: SSHForwarderCoro, listen_host: str, + listen_port: int) -> SSHForwardListener: + """Create a SOCKS listener to forward traffic over SSH""" + + def protocol_factory() -> asyncio.BaseProtocol: + """Start a port forwarder for each new SOCKS connection""" + + return SSHSOCKSForwarder(conn, coro) + + return await create_tcp_local_listener(conn, loop, protocol_factory, + listen_host, listen_port) diff --git a/venv/lib/python3.12/site-packages/asyncssh/logging.py b/venv/lib/python3.12/site-packages/asyncssh/logging.py new file mode 100644 index 0000000..828d85c --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/logging.py @@ -0,0 +1,240 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Sam Crooks - initial implementation +# Ron Frederick - minor cleanup + +"""Logging functions""" + +import logging +from typing import MutableMapping, Optional, Tuple, Union, cast + + +_LogArg = object +_ObjDict = MutableMapping[str, object] + + +class SSHLogger(logging.LoggerAdapter): + """Adapter to add context to AsyncSSH log messages""" + + _debug_level = 1 + _pkg_logger = logging.getLogger(__package__ or 'asyncssh') + + def __init__(self, parent: logging.Logger = _pkg_logger, + child: str = '', context: str = ''): + self._context = context + self._logger = parent.getChild(child) if child else parent + + super().__init__(self._logger, {}) + + def _extend_context(self, context: str) -> str: + """Extend context provided by this logger""" + + if context: + if self._context: + context = self._context + ', ' + context + else: + context = self._context + + return context + + def get_child(self, child: str = '', context: str = '') -> 'SSHLogger': + """Return child logger with optional added context""" + + return type(self)(self._logger, child, self._extend_context(context)) + + def log(self, level: int, msg: object, *args, **kwargs) -> None: + """Log a message to the underlying logger""" + + def _text(arg: _LogArg) -> str: + """Convert a log argument to text""" + + if isinstance(arg, list): + if arg and isinstance(arg[0], bytes): + result = b','.join(arg).decode('utf-8', errors='replace') + else: + result = ','.join(arg) + elif isinstance(arg, tuple): + host, port = arg + + if host: + result = '%s, port %d' % (host, port) if port else host + else: + result = 'port %d' % port if port else 'dynamic port' + else: + result = cast(str, arg) + + if isinstance(result, bytes): + result = result.decode('ascii', errors='backslashreplace') + + if not result.isprintable(): + result = repr(result)[1:-1] + + return result + + log_args = [_text(arg) for arg in args] + + super().log(level, msg, *log_args, **kwargs) + + def process(self, msg: str, kwargs: _ObjDict) -> Tuple[str, _ObjDict]: + """Add context to log message""" + + extra = cast(_ObjDict, kwargs.get('extra', {})) + + context = self._extend_context(cast(str, extra.get('context'))) + context = '[' + context + '] ' if context else '' + + packet = cast(bytes, extra.get('packet')) + pktdata = '' + offset = 0 + + while packet: + line = '\n %08x:' % offset + + for b in packet[:16]: + line += ' %02x' % b + + line += (62 - len(line)) * ' ' + + for b in packet[:16]: + if b < 0x20 or b >= 0x80: + c = '.' + elif b == ord('%'): + c = '%%' + else: + c = chr(b) + + line += c + + pktdata += line + + packet = packet[16:] + offset += 16 + + return context + msg + pktdata, kwargs + + @classmethod + def set_debug_level(cls, level: int) -> None: + """Set AsyncSSH debug log level""" + + if level < 1 or level > 3: + raise ValueError('Debug log level must be between 1 and 3') + + cls._debug_level = level + + def debug1(self, msg: str, *args: _LogArg, **kwargs: object) -> None: + """Write a level 1 debug log message""" + + self.log(logging.DEBUG, msg, *args, **kwargs) + + def debug2(self, msg: str, *args: _LogArg, **kwargs: object) -> None: + """Write a level 2 debug log message""" + + if self._debug_level >= 2: + self.log(logging.DEBUG, msg, *args, **kwargs) + + def packet(self, pktid: Optional[int], packet: bytes, msg: str, + *args: _LogArg, **kwargs: object) -> None: + """Write a control packet debug log message""" + + if self._debug_level >= 3: + kwargs.setdefault('extra', {}) + extra = cast(_ObjDict, kwargs.get('extra')) + + if pktid is not None: + extra.update(context='pktid=%d' % pktid) + + extra.update(packet=packet) + + self.log(logging.DEBUG, msg, *args, **kwargs) + + +def set_log_level(level: Union[int, str]) -> None: + """Set the AsyncSSH log level + + This function sets the log level of the AsyncSSH logger. It + defaults to `'NOTSET`', meaning that it will track the debug + level set on the root Python logger. + + For additional control over the level of debug logging, see the + function :func:`set_debug_level` for additional information. + + :param level: + The log level to set, as defined by the `logging` module + :type level: `int` or `str` + + """ + + logger.setLevel(level) + + +def set_sftp_log_level(level: Union[int, str]) -> None: + """Set the AsyncSSH SFTP/SCP log level + + This function sets the log level of the AsyncSSH SFTP/SCP logger. + It defaults to `'NOTSET`', meaning that it will track the debug + level set on the main AsyncSSH logger. + + For additional control over the level of debug logging, see the + function :func:`set_debug_level` for additional information. + + :param level: + The log level to set, as defined by the `logging` module + :type level: `int` or `str` + + """ + + sftp_logger.setLevel(level) + + +def set_debug_level(level: int) -> None: + """Set the AsyncSSH debug log level + + This function sets the level of debugging logging done by the + AsyncSSH logger, from the following options: + + ===== ==================================== + Level Description + ===== ==================================== + 1 Minimal debug logging + 2 Full debug logging + 3 Full debug logging with packet dumps + ===== ==================================== + + The debug level defaults to level 1 (minimal debug logging). + + .. note:: For this setting to have any effect, the effective log + level of the AsyncSSH logger must be set to DEBUG. + + .. warning:: Extreme caution should be used when setting debug + level to 3, as this can expose user passwords in + clear text. This level should generally only be + needed when tracking down issues with malformed + or incomplete packets. + + :param level: + The debug level to set, as defined above. + :type level: `int` + + """ + + logger.set_debug_level(level) + + +logger = SSHLogger() +sftp_logger = logger.get_child('sftp') diff --git a/venv/lib/python3.12/site-packages/asyncssh/mac.py b/venv/lib/python3.12/site-packages/asyncssh/mac.py new file mode 100644 index 0000000..d6dc4af --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/mac.py @@ -0,0 +1,212 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH message authentication handlers""" + +from hashlib import md5, sha1, sha224, sha256, sha384, sha512 +import hmac +from typing import Dict, Callable, List, Tuple + +from .packet import UInt32, UInt64 + +try: + from .crypto import umac64, umac128 + _umac_available = True +except ImportError: # pragma: no cover + _umac_available = False + + +_MACAlgsArgs = Tuple[bytes, int, int, bool, Callable, Tuple, bool] +_MACHandler = Tuple[Callable, int, Tuple] +_MACParams = Tuple[int, int, bool] + + +_OPENSSH = b'@openssh.com' +_ETM = b'-etm' + _OPENSSH + +_mac_algs: List[bytes] = [] +_default_mac_algs: List[bytes] = [] +_mac_handler: Dict[bytes, _MACHandler] = {} +_mac_params: Dict[bytes, _MACParams] = {} + + +class MAC: + """Parent class for SSH message authentication handlers""" + + def __init__(self, key: bytes, hash_size: int): + self._key = key + self._hash_size = hash_size + + def sign(self, seq: int, packet: bytes) -> bytes: + """Compute a signature for a message""" + + raise NotImplementedError + + def verify(self, seq: int, packet: bytes, sig: bytes) -> bool: + """Verify the signature of a message""" + + raise NotImplementedError + + +class _NullMAC(MAC): + """Null message authentication handler""" + + def sign(self, seq: int, packet: bytes) -> bytes: + """Compute a signature for a message""" + + return b'' + + def verify(self, seq: int, packet: bytes, sig: bytes) -> bool: + """Verify the signature of a message""" + + return sig == b'' + + +class _HMAC(MAC): + """HMAC-based message authentication handler""" + + def __init__(self, key: bytes, hash_size: int, hash_alg: Callable): + super().__init__(key, hash_size) + self._hash_alg = hash_alg + + def sign(self, seq: int, packet: bytes) -> bytes: + """Compute a signature for a message""" + + data = UInt32(seq) + packet + sig = hmac.new(self._key, data, self._hash_alg).digest() + return sig[:self._hash_size] + + def verify(self, seq: int, packet: bytes, sig: bytes) -> bool: + """Verify the signature of a message""" + + return hmac.compare_digest(self.sign(seq, packet), sig) + + +class _UMAC(MAC): + """UMAC-based message authentication handler""" + + def __init__(self, key: bytes, hash_size: int, umac_alg: Callable): + super().__init__(key, hash_size) + self._umac_alg = umac_alg + + def sign(self, seq: int, packet: bytes) -> bytes: + """Compute a signature for a message""" + + return self._umac_alg(self._key, packet, UInt64(seq)).digest() + + def verify(self, seq: int, packet: bytes, sig: bytes) -> bool: + """Verify the signature of a message""" + + return hmac.compare_digest(self.sign(seq, packet), sig) + + +def register_mac_alg(mac_alg: bytes, key_size: int, hash_size: int, + etm: bool, handler: Callable, args: Tuple, + default: bool) -> None: + """Register a MAC algorithm""" + + if mac_alg: + _mac_algs.append(mac_alg) + + if default: + _default_mac_algs.append(mac_alg) + + _mac_handler[mac_alg] = (handler, hash_size, args) + _mac_params[mac_alg] = (key_size, hash_size, etm) + + +def get_mac_algs() -> List[bytes]: + """Return supported MAC algorithms""" + + return _mac_algs + + +def get_default_mac_algs() -> List[bytes]: + """Return default MAC algorithms""" + + return _default_mac_algs + + +def get_mac_params(mac_alg: bytes) -> _MACParams: + """Get parameters of a MAC algorithm + + This function returns the key and hash sizes of a MAC algorithm and + whether or not to compute the MAC before or after encryption. + + """ + + return _mac_params[mac_alg] + + +def get_mac(mac_alg: bytes, key: bytes) -> MAC: + """Return a MAC handler + + This function returns a MAC object initialized with the specified + key that can be used for data signing and verification. + + """ + + handler, hash_size, args = _mac_handler[mac_alg] + return handler(key, hash_size, *args) + + +_mac_algs_list: Tuple[_MACAlgsArgs, ...] = ( + (b'', 0, 0, False, _NullMAC, (), True), +) + +if _umac_available: # pragma: no branch + _mac_algs_list += ( + (b'umac-64' + _ETM, 16, 8, True, _UMAC, (umac64,), True), + (b'umac-128' + _ETM, 16, 16, True, _UMAC, (umac128,), True)) + +_mac_algs_list += ( + (b'hmac-sha2-256' + _ETM, 32, 32, True, _HMAC, (sha256,), True), + (b'hmac-sha2-512' + _ETM, 64, 64, True, _HMAC, (sha512,), True), + (b'hmac-sha1' + _ETM, 20, 20, True, _HMAC, (sha1,), True), + (b'hmac-md5' + _ETM, 16, 16, True, _HMAC, (md5,), False), + (b'hmac-sha2-256-96' + _ETM, 32, 12, True, _HMAC, (sha256,), False), + (b'hmac-sha2-512-96' + _ETM, 64, 12, True, _HMAC, (sha512,), False), + (b'hmac-sha1-96' + _ETM, 20, 12, True, _HMAC, (sha1,), False), + (b'hmac-md5-96' + _ETM, 16, 12, True, _HMAC, (md5,), False)) + +if _umac_available: # pragma: no branch + _mac_algs_list += ( + (b'umac-64' + _OPENSSH, 16, 8, False, _UMAC, (umac64,), True), + (b'umac-128' + _OPENSSH, 16, 16, False, _UMAC, (umac128,), True)) + +_mac_algs_list += ( + (b'hmac-sha2-256', 32, 32, False, _HMAC, (sha256,), True), + (b'hmac-sha2-512', 64, 64, False, _HMAC, (sha512,), True), + (b'hmac-sha1', 20, 20, False, _HMAC, (sha1,), True), + (b'hmac-sha256-2@ssh.com', 32, 32, False, _HMAC, (sha256,), True), + (b'hmac-sha224@ssh.com', 28, 28, False, _HMAC, (sha224,), True), + (b'hmac-sha256@ssh.com', 16, 32, False, _HMAC, (sha256,), True), + (b'hmac-sha384@ssh.com', 48, 48, False, _HMAC, (sha384,), True), + (b'hmac-sha512@ssh.com', 64, 64, False, _HMAC, (sha512,), True), + (b'hmac-md5', 16, 16, False, _HMAC, (md5,), False), + (b'hmac-sha2-256-96', 32, 12, False, _HMAC, (sha256,), False), + (b'hmac-sha2-512-96', 64, 12, False, _HMAC, (sha512,), False), + (b'hmac-sha1-96', 20, 12, False, _HMAC, (sha1,), False), + (b'hmac-md5-96', 16, 12, False, _HMAC, (md5,), False)) + +for _alg, _key_size, _hash_size, _etm, \ + _mac_alg, _args, _default in _mac_algs_list: + register_mac_alg(_alg, _key_size, _hash_size, _etm, + _mac_alg, _args, _default) diff --git a/venv/lib/python3.12/site-packages/asyncssh/misc.py b/venv/lib/python3.12/site-packages/asyncssh/misc.py new file mode 100644 index 0000000..22c75d9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/misc.py @@ -0,0 +1,788 @@ +# Copyright (c) 2013-2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Miscellaneous utility classes and functions""" + +import functools +import ipaddress +import re +import socket + +from pathlib import Path, PurePath +from random import SystemRandom +from types import TracebackType +from typing import Any, AsyncContextManager, Awaitable, Callable, Dict +from typing import Generator, Generic, IO, Mapping, Optional, Sequence +from typing import Tuple, Type, TypeVar, Union, cast, overload +from typing_extensions import Literal, Protocol + +from .constants import DEFAULT_LANG +from .constants import DISC_COMPRESSION_ERROR, DISC_CONNECTION_LOST +from .constants import DISC_HOST_KEY_NOT_VERIFIABLE, DISC_ILLEGAL_USER_NAME +from .constants import DISC_KEY_EXCHANGE_FAILED, DISC_MAC_ERROR +from .constants import DISC_NO_MORE_AUTH_METHODS_AVAILABLE +from .constants import DISC_PROTOCOL_ERROR, DISC_PROTOCOL_VERSION_NOT_SUPPORTED +from .constants import DISC_SERVICE_NOT_AVAILABLE + + +class _Hash(Protocol): + """Protocol for hashing data""" + + @property + def digest_size(self) -> int: + """Return the hash digest size""" + + @property + def block_size(self) -> int: + """Return the hash block size""" + + @property + def name(self) -> str: + """Return the hash name""" + + def digest(self) -> bytes: + """Return the digest value as a bytes object""" + + def hexdigest(self) -> str: + """Return the digest value as a string of hexadecimal digits""" + + def update(self, __data: bytes) -> None: + """Update this hash object's state with the provided bytes""" + + +class HashType(Protocol): + """Protocol for returning the type of a hash function""" + + def __call__(self, __data: bytes = ...) -> _Hash: + """Create a new hash object""" + + +class _SupportsWaitClosed(Protocol): + """A class that supports async wait_closed""" + + async def wait_closed(self) -> None: + """Wait for transport to close""" + + +_T = TypeVar('_T') +DefTuple = Union[Tuple[()], _T] +MaybeAwait = Union[_T, Awaitable[_T]] + +ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] +OptExcInfo = Union[ExcInfo, Tuple[None, None, None]] + +BytesOrStr = Union[bytes, str] +FilePath = Union[str, PurePath] +HostPort = Tuple[str, int] +IPAddress = Union[ipaddress.IPv4Address, ipaddress.IPv6Address] +IPNetwork = Union[ipaddress.IPv4Network, ipaddress.IPv6Network] +SockAddr = Union[Tuple[str, int], Tuple[str, int, int, int]] + + +# Define a version of randrange which is based on SystemRandom(), so that +# we get back numbers suitable for cryptographic use. +_random = SystemRandom() +randrange = _random.randrange + +_unit_pattern = re.compile(r'([A-Za-z])') +_byte_units = {'': 1, 'k': 1024, 'm': 1024*1024, 'g': 1024*1024*1024} +_time_units = {'': 1, 's': 1, 'm': 60, 'h': 60*60, + 'd': 24*60*60, 'w': 7*24*60*60} + + +def hide_empty(value: object, prefix: str = ', ') -> str: + """Return a string with optional prefix if value is non-empty""" + + value = str(value) + return prefix + value if value else '' + + +def plural(length: int, label: str, suffix: str = 's') -> str: + """Return a label with an optional plural suffix""" + + return '%d %s%s' % (length, label, suffix if length != 1 else '') + + +def all_ints(seq: Sequence[object]) -> bool: + """Return if a sequence contains all integers""" + + return all(isinstance(i, int) for i in seq) + + +def get_symbol_names(symbols: Mapping[str, int], prefix: str, + strip_leading: int = 0) -> Mapping[int, str]: + """Return a mapping from values to symbol names for logging""" + + return {value: name[strip_leading:] for name, value in symbols.items() + if name.startswith(prefix)} + + +# Punctuation to map when creating handler names +_HANDLER_PUNCTUATION = (('@', '_at_'), ('.', '_dot_'), ('-', '_')) + +def map_handler_name(name: str) -> str: + """Map punctuation so a string can be used as a handler name""" + + for old, new in _HANDLER_PUNCTUATION: + name = name.replace(old, new) + + return name + + +def _normalize_scoped_ip(addr: str) -> str: + """Normalize scoped IP address + + The ipaddress module doesn't handle scoped addresses properly, + so we normalize scoped IP addresses using socket.getaddrinfo + before we pass them into ip_address/ip_network. + + """ + + try: + addrinfo = socket.getaddrinfo(addr, None, family=socket.AF_UNSPEC, + type=socket.SOCK_STREAM, + flags=socket.AI_NUMERICHOST)[0] + except socket.gaierror: + return addr + + if addrinfo[0] == socket.AF_INET6: + sa = addrinfo[4] + addr = sa[0] + + idx = addr.find('%') + if idx >= 0: # pragma: no cover + addr = addr[:idx] + + ip = ipaddress.ip_address(addr) + + if ip.is_link_local: + scope_id = cast(Tuple[str, int, int, int], sa)[3] + addr = str(ipaddress.ip_address(int(ip) | (scope_id << 96))) + + return addr + + +def ip_address(addr: str) -> IPAddress: + """Wrapper for ipaddress.ip_address which supports scoped addresses""" + + return ipaddress.ip_address(_normalize_scoped_ip(addr)) + + +def ip_network(addr: str) -> IPNetwork: + """Wrapper for ipaddress.ip_network which supports scoped addresses""" + + idx = addr.find('/') + if idx >= 0: + addr, mask = addr[:idx], addr[idx:] + else: + mask = '' + + return ipaddress.ip_network(_normalize_scoped_ip(addr) + mask) + + +def open_file(filename: FilePath, mode: str, buffering: int = -1) -> IO[bytes]: + """Open a file with home directory expansion""" + + return open(Path(filename).expanduser(), mode, buffering=buffering) + + +@overload +def read_file(filename: FilePath) -> bytes: + """Read from a binary file with home directory expansion""" + +@overload +def read_file(filename: FilePath, mode: Literal['rb']) -> bytes: + """Read from a binary file with home directory expansion""" + +@overload +def read_file(filename: FilePath, mode: Literal['r']) -> str: + """Read from a text file with home directory expansion""" + +def read_file(filename, mode = 'rb'): + """Read from a file with home directory expansion""" + + with open_file(filename, mode) as f: + return f.read() + + +def write_file(filename: FilePath, data: bytes, mode: str = 'wb') -> int: + """Write or append to a file with home directory expansion""" + + with open_file(filename, mode) as f: + return f.write(data) + + +def _parse_units(value: str, suffixes: Mapping[str, int], label: str) -> float: + """Parse a series of integers followed by unit suffixes""" + + matches = _unit_pattern.split(value) + + if matches[-1]: + matches.append('') + else: + matches.pop() + + try: + return sum(float(matches[i]) * suffixes[matches[i+1].lower()] + for i in range(0, len(matches), 2)) + except KeyError: + raise ValueError('Invalid ' + label) from None + + +def parse_byte_count(value: str) -> int: + """Parse a byte count with optional k, m, or g suffixes""" + + return int(_parse_units(value, _byte_units, 'byte count')) + + +def parse_time_interval(value: str) -> float: + """Parse a time interval with optional s, m, h, d, or w suffixes""" + + return _parse_units(value, _time_units, 'time interval') + + +_ACM = TypeVar('_ACM', bound=AsyncContextManager, covariant=True) + +class _ACMWrapper(Generic[_ACM]): + """Async context manager wrapper""" + + def __init__(self, coro: Awaitable[_ACM]): + self._coro = coro + self._coro_result: Optional[_ACM] = None + + def __await__(self) -> Generator[Any, None, _ACM]: + return self._coro.__await__() + + async def __aenter__(self) -> _ACM: + self._coro_result = await self._coro + + return await self._coro_result.__aenter__() + + async def __aexit__(self, exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType]) -> Optional[bool]: + assert self._coro_result is not None + + exit_result = await self._coro_result.__aexit__( + exc_type, exc_value, traceback) + + self._coro_result = None + + return exit_result + + +_ACMCoro = Callable[..., Awaitable[_ACM]] +_ACMWrapperFunc = Callable[..., _ACMWrapper[_ACM]] + +def async_context_manager(coro: _ACMCoro[_ACM]) -> _ACMWrapperFunc[_ACM]: + """Decorator for functions returning asynchronous context managers + + This decorator can be used on functions which return objects + intended to be async context managers. The object returned by + the function should implement __aenter__ and __aexit__ methods + to run when the async context is entered and exited. + + This wrapper also allows the use of "await" on the function being + decorated, to return the context manager without entering it. + + """ + + @functools.wraps(coro) + def context_wrapper(*args, **kwargs) -> _ACMWrapper[_ACM]: + """Return an async context manager wrapper for this coroutine""" + + return _ACMWrapper(coro(*args, **kwargs)) + + return context_wrapper + + +async def maybe_wait_closed(writer: '_SupportsWaitClosed') -> None: + """Wait for a StreamWriter to close, if Python version supports it + + Python 3.8 triggers a false error report about garbage collecting + an open stream if a close is in progress when a StreamWriter is + garbage collected. This can be avoided by calling wait_closed(), + but that method is not available in Python releases prior to 3.7. + This function wraps this call, ignoring the error if the method + is not available. + + """ + + try: + await writer.wait_closed() + except AttributeError: # pragma: no cover + pass + + +class Options: + """Container for configuration options""" + + kwargs: Dict[str, object] + + def __init__(self, options: Optional['Options'] = None, **kwargs: object): + if options: + if not isinstance(options, type(self)): + raise TypeError('Invalid %s, got %s' % + (type(self).__name__, type(options).__name__)) + + self.kwargs = options.kwargs.copy() + else: + self.kwargs = {} + + self.kwargs.update(kwargs) + self.prepare(**self.kwargs) + + def prepare(self, **kwargs: object) -> None: + """Pre-process configuration options""" + + def update(self, kwargs: Dict[str, object]) -> None: + """Update options based on keyword parameters passed in""" + + self.kwargs.update(kwargs) + self.prepare(**self.kwargs) + + +class _RecordMeta(type): + """Metaclass for general-purpose record type""" + + def __new__(mcs: Type['_RecordMeta'], name: str, bases: Tuple[type, ...], + ns: Dict[str, object]) -> '_RecordMeta': + if name != 'Record': + fields = cast(Mapping[str, str], + ns.get('__annotations__', {})).keys() + defaults = {k: ns.get(k) for k in fields} + + ns = {k: v for k, v in ns.items() if k not in fields} + ns['__slots__'] = defaults + + return cast(_RecordMeta, super().__new__(mcs, name, bases, ns)) + + +class Record(metaclass=_RecordMeta): + """Generic Record class""" + + __slots__: Mapping[str, object] = {} + + def __init__(self, *args: object, **kwargs: object): + for k, v in self.__slots__.items(): + setattr(self, k, v) + + for k, v in zip(self.__slots__, args): + setattr(self, k, v) + + for k, v in kwargs.items(): + setattr(self, k, v) + + def __repr__(self) -> str: + return '%s(%s)' % (type(self).__name__, + ', '.join('%s=%r' % (k, getattr(self, k)) + for k in self.__slots__)) + + def __str__(self) -> str: + values = ((k, self._format(k, getattr(self, k))) + for k in self.__slots__) + + return ', '.join('%s: %s' % (k, v) for k, v in values if v is not None) + + def _format(self, k: str, v: object) -> Optional[str]: + """Format a field as a string""" + + # pylint: disable=no-self-use,unused-argument + + return str(v) + +class Error(Exception): + """General SSH error""" + + def __init__(self, code: int, reason: str, lang: str = DEFAULT_LANG): + super().__init__(reason) + self.code = code + self.reason = reason + self.lang = lang + + +class DisconnectError(Error): + """SSH disconnect error + + This exception is raised when a serious error occurs which causes + the SSH connection to be disconnected. Exception codes should be + taken from :ref:`disconnect reason codes `. + See below for exception subclasses tied to specific disconnect + reasons if you want to customize your handling by reason. + + :param code: + Disconnect reason, taken from :ref:`disconnect reason + codes ` + :param reason: + A human-readable reason for the disconnect + :param lang: (optional) + The language the reason is in + :type code: `int` + :type reason: `str` + :type lang: `str` + + """ + + +class CompressionError(DisconnectError): + """SSH compression error + + This exception is raised when an error occurs while compressing + or decompressing data sent on the SSH connection. + + :param reason: + Details about the compression error + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_COMPRESSION_ERROR, reason, lang) + + +class ConnectionLost(DisconnectError): + """SSH connection lost + + This exception is raised when the SSH connection to the remote + system is unexpectedly lost. It can also occur as a result of + the remote system failing to respond to keepalive messages or + as a result of a login timeout, when those features are enabled. + + :param reason: + Details about the connection failure + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_CONNECTION_LOST, reason, lang) + + +class HostKeyNotVerifiable(DisconnectError): + """SSH host key not verifiable + + This exception is raised when the SSH server's host key or + certificate is not verifiable. + + :param reason: + Details about the host key verification failure + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_HOST_KEY_NOT_VERIFIABLE, reason, lang) + + +class IllegalUserName(DisconnectError): + """SSH illegal user name + + This exception is raised when an error occurs while processing + the username sent during the SSL handshake. + + :param reason: + Details about the illegal username + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_ILLEGAL_USER_NAME, reason, lang) + + +class KeyExchangeFailed(DisconnectError): + """SSH key exchange failed + + This exception is raised when the SSH key exchange fails. + + :param reason: + Details about the connection failure + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_KEY_EXCHANGE_FAILED, reason, lang) + + +class MACError(DisconnectError): + """SSH MAC error + + This exception is raised when an error occurs while processing + the message authentication code (MAC) of a message on the SSH + connection. + + :param reason: + Details about the MAC error + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_MAC_ERROR, reason, lang) + + +class PermissionDenied(DisconnectError): + """SSH permission denied + + This exception is raised when there are no authentication methods + remaining to complete SSH client authentication. + + :param reason: + Details about the SSH protocol error detected + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_NO_MORE_AUTH_METHODS_AVAILABLE, reason, lang) + + +class ProtocolError(DisconnectError): + """SSH protocol error + + This exception is raised when the SSH connection is disconnected + due to an SSH protocol error being detected. + + :param reason: + Details about the SSH protocol error detected + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_PROTOCOL_ERROR, reason, lang) + + +class ProtocolNotSupported(DisconnectError): + """SSH protocol not supported + + This exception is raised when the remote system sends an SSH + protocol version which is not supported. + + :param reason: + Details about the unsupported SSH protocol version + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_PROTOCOL_ERROR, reason, lang) + + +class ServiceNotAvailable(DisconnectError): + """SSH service not available + + This exception is raised when an unexpected service name is + received during the SSH handshake. + + :param reason: + Details about the unexpected SSH service + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(DISC_SERVICE_NOT_AVAILABLE, reason, lang) + + +class ChannelOpenError(Error): + """SSH channel open error + + This exception is raised by connection handlers to report + channel open failures. + + :param code: + Channel open failure reason, taken from :ref:`channel open + failure reason codes ` + :param reason: + A human-readable reason for the channel open failure + :param lang: + The language the reason is in + :type code: `int` + :type reason: `str` + :type lang: `str` + + """ + + +class ChannelListenError(Exception): + """SSH channel listen error + + This exception is raised to report failures in setting up + remote SSH connection listeners. + + :param details: + Details of the listen failure + :type details: `str` + + """ + + +class PasswordChangeRequired(Exception): + """SSH password change required + + This exception is raised during password validation on the + server to indicate that a password change is required. It + should be raised when the password provided is valid but + expired, to trigger the client to provide a new password. + + :param prompt: + The prompt requesting that the user enter a new password + :param lang: + The language that the prompt is in + :type prompt: `str` + :type lang: `str` + + """ + + def __init__(self, prompt: str, lang: str = DEFAULT_LANG): + super().__init__('Password change required: %s' % prompt) + self.prompt = prompt + self.lang = lang + + +class BreakReceived(Exception): + """SSH break request received + + This exception is raised on an SSH server stdin stream when the + client sends a break on the channel. + + :param msec: + The duration of the break in milliseconds + :type msec: `int` + + """ + + def __init__(self, msec: int): + super().__init__('Break for %s msec' % msec) + self.msec = msec + + +class SignalReceived(Exception): + """SSH signal request received + + This exception is raised on an SSH server stdin stream when the + client sends a signal on the channel. + + :param signal: + The name of the signal sent by the client + :type signal: `str` + + """ + + def __init__(self, signal: str): + super().__init__('Signal: %s' % signal) + self.signal = signal + + +class SoftEOFReceived(Exception): + """SSH soft EOF request received + + This exception is raised on an SSH server stdin stream when the + client sends an EOF from within the line editor on the channel. + + """ + + def __init__(self) -> None: + super().__init__('Soft EOF') + + +class TerminalSizeChanged(Exception): + """SSH terminal size change notification received + + This exception is raised on an SSH server stdin stream when the + client sends a terminal size change on the channel. + + :param width: + The new terminal width + :param height: + The new terminal height + :param pixwidth: + The new terminal width in pixels + :param pixheight: + The new terminal height in pixels + :type width: `int` + :type height: `int` + :type pixwidth: `int` + :type pixheight: `int` + + """ + + def __init__(self, width: int, height: int, pixwidth: int, pixheight: int): + super().__init__('Terminal size change: (%s, %s, %s, %s)' % + (width, height, pixwidth, pixheight)) + self.width = width + self.height = height + self.pixwidth = pixwidth + self.pixheight = pixheight + + +_disc_error_map = { + DISC_PROTOCOL_ERROR: ProtocolError, + DISC_KEY_EXCHANGE_FAILED: KeyExchangeFailed, + DISC_MAC_ERROR: MACError, + DISC_COMPRESSION_ERROR: CompressionError, + DISC_SERVICE_NOT_AVAILABLE: ServiceNotAvailable, + DISC_PROTOCOL_VERSION_NOT_SUPPORTED: ProtocolNotSupported, + DISC_HOST_KEY_NOT_VERIFIABLE: HostKeyNotVerifiable, + DISC_CONNECTION_LOST: ConnectionLost, + DISC_NO_MORE_AUTH_METHODS_AVAILABLE: PermissionDenied, + DISC_ILLEGAL_USER_NAME: IllegalUserName +} + + +def construct_disc_error(code: int, reason: str, lang: str) -> DisconnectError: + """Map disconnect error code to appropriate DisconnectError exception""" + + try: + return _disc_error_map[code](reason, lang) + except KeyError: + return DisconnectError(code, '%s (error %d)' % (reason, code), lang) diff --git a/venv/lib/python3.12/site-packages/asyncssh/packet.py b/venv/lib/python3.12/site-packages/asyncssh/packet.py new file mode 100644 index 0000000..916348d --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/packet.py @@ -0,0 +1,240 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH packet encoding and decoding functions""" + +from typing import Any, Callable, Iterable, Mapping, Optional, Sequence, Union + +from .logging import SSHLogger +from .misc import plural + + +_LoggedPacket = Union[bytes, 'SSHPacket'] +_PacketHandler = Callable[[Any, int, int, 'SSHPacket'], None] + + +class PacketDecodeError(ValueError): + """Packet decoding error""" + + +def Byte(value: int) -> bytes: + """Encode a single byte""" + + return bytes((value,)) + + +def Boolean(value: bool) -> bytes: + """Encode a boolean value""" + + return Byte(bool(value)) + + +def UInt16(value: int) -> bytes: + """Encode a 16-bit integer value""" + + return value.to_bytes(2, 'big') + + +def UInt32(value: int) -> bytes: + """Encode a 32-bit integer value""" + + return value.to_bytes(4, 'big') + + +def UInt64(value: int) -> bytes: + """Encode a 64-bit integer value""" + + return value.to_bytes(8, 'big') + + +def String(value: Union[bytes, str]) -> bytes: + """Encode a byte string or UTF-8 string value""" + + if isinstance(value, str): + value = value.encode('utf-8', errors='strict') + + return len(value).to_bytes(4, 'big') + value + + +def MPInt(value: int) -> bytes: + """Encode a multiple precision integer value""" + + l = value.bit_length() + l += (l % 8 == 0 and value != 0 and value != -1 << (l - 1)) + l = (l + 7) // 8 + + return l.to_bytes(4, 'big') + value.to_bytes(l, 'big', signed=True) + + +def NameList(value: Iterable[bytes]) -> bytes: + """Encode a comma-separated list of byte strings""" + + return String(b','.join(value)) + + +class SSHPacket: + """Decoder class for SSH packets""" + + def __init__(self, packet: bytes): + self._packet = packet + self._idx = 0 + self._len = len(packet) + + def __bool__(self) -> bool: + return self._idx != self._len + + def check_end(self) -> None: + """Confirm that all of the data in the packet has been consumed""" + + if self: + raise PacketDecodeError('Unexpected data at end of packet') + + def get_consumed_payload(self) -> bytes: + """Return the portion of the packet consumed so far""" + + return self._packet[:self._idx] + + def get_remaining_payload(self) -> bytes: + """Return the portion of the packet not yet consumed""" + + return self._packet[self._idx:] + + def get_full_payload(self) -> bytes: + """Return the full packet""" + + return self._packet + + def get_bytes(self, size: int) -> bytes: + """Extract the requested number of bytes from the packet""" + + if self._idx + size > self._len: + raise PacketDecodeError('Incomplete packet') + + value = self._packet[self._idx:self._idx+size] + self._idx += size + return value + + def get_byte(self) -> int: + """Extract a single byte from the packet""" + + return self.get_bytes(1)[0] + + def get_boolean(self) -> bool: + """Extract a boolean from the packet""" + + return bool(self.get_byte()) + + def get_uint16(self) -> int: + """Extract a 16-bit integer from the packet""" + + return int.from_bytes(self.get_bytes(2), 'big') + + def get_uint32(self) -> int: + """Extract a 32-bit integer from the packet""" + + return int.from_bytes(self.get_bytes(4), 'big') + + def get_uint64(self) -> int: + """Extract a 64-bit integer from the packet""" + + return int.from_bytes(self.get_bytes(8), 'big') + + def get_string(self) -> bytes: + """Extract a UTF-8 string from the packet""" + + return self.get_bytes(self.get_uint32()) + + def get_mpint(self) -> int: + """Extract a multiple precision integer from the packet""" + + return int.from_bytes(self.get_string(), 'big', signed=True) + + def get_namelist(self) -> Sequence[bytes]: + """Extract a comma-separated list of byte strings from the packet""" + + namelist = self.get_string() + return namelist.split(b',') if namelist else [] + + +class SSHPacketLogger: + """Parent class for SSH packet loggers""" + + _handler_names: Mapping[int, str] = {} + + @property + def logger(self) -> SSHLogger: + """The logger to use for packet logging""" + + raise NotImplementedError + + def _log_packet(self, msg: str, pkttype: int, pktid: Optional[int], + packet: _LoggedPacket, note: str) -> None: + """Log a sent/received packet""" + + if isinstance(packet, SSHPacket): + packet = packet.get_full_payload() + + try: + name = '%s (%d)' % (self._handler_names[pkttype], pkttype) + except KeyError: + name = 'packet type %d' % pkttype + + count = plural(len(packet), 'byte') + + if note: + note = ' (%s)' % note + + self.logger.packet(pktid, packet, '%s %s, %s%s', + msg, name, count, note) + + def log_sent_packet(self, pkttype: int, pktid: Optional[int], + packet: _LoggedPacket, note: str = '') -> None: + """Log a sent packet""" + + self._log_packet('Sent', pkttype, pktid, packet, note) + + + def log_received_packet(self, pkttype: int, pktid: Optional[int], + packet: _LoggedPacket, note: str = '') -> None: + """Log a received packet""" + + self._log_packet('Received', pkttype, pktid, packet, note) + + +class SSHPacketHandler(SSHPacketLogger): + """Parent class for SSH packet handlers""" + + _packet_handlers: Mapping[int, _PacketHandler] = {} + + @property + def logger(self) -> SSHLogger: + """The logger associated with this packet handler""" + + raise NotImplementedError + + def process_packet(self, pkttype: int, pktid: int, + packet: SSHPacket) -> bool: + """Log and process a received packet""" + + if pkttype in self._packet_handlers: + self._packet_handlers[pkttype](self, pkttype, pktid, packet) + return True + else: + return False diff --git a/venv/lib/python3.12/site-packages/asyncssh/pattern.py b/venv/lib/python3.12/site-packages/asyncssh/pattern.py new file mode 100644 index 0000000..7b02e42 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/pattern.py @@ -0,0 +1,145 @@ +# Copyright (c) 2015-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Pattern matching for principal and host names""" + +from fnmatch import fnmatch +from typing import Union + +from .misc import IPAddress, ip_network + + +_HostPattern = Union['WildcardHostPattern', 'CIDRHostPattern'] +_AnyPattern = Union['WildcardPattern', _HostPattern] + + +class _BaseWildcardPattern: + """A base class for matching '*' and '?' wildcards""" + + def __init__(self, pattern: str): + # We need to escape square brackets in host patterns if we + # want to use Python's fnmatch. + self._pattern = ''.join('[[]' if ch == '[' else + '[]]' if ch == ']' else + ch for ch in pattern) + + def _matches(self, value: str) -> bool: + """Return whether a wild card pattern matches a value""" + + return fnmatch(value, self._pattern) + + +class WildcardPattern(_BaseWildcardPattern): + """A pattern matcher for '*' and '?' wildcards""" + + def matches(self, value: str) -> bool: + """Return whether a wild card pattern matches a value""" + + return super()._matches(value) + + +class WildcardHostPattern(_BaseWildcardPattern): + """Match a host name or address against a wildcard pattern""" + + def matches(self, host: str, addr: str, _ip: IPAddress) -> bool: + """Return whether a host or address matches a wild card host pattern""" + + return (bool(host) and super()._matches(host)) or \ + (bool(addr) and super()._matches(addr)) + + +class CIDRHostPattern: + """Match IPv4/v6 address against CIDR-style subnet pattern""" + + def __init__(self, pattern: str): + self._network = ip_network(pattern) + + def matches(self, _host: str, _addr: str, ip: IPAddress) -> bool: + """Return whether an IP address matches a CIDR address pattern""" + + return bool(ip) and ip in self._network + + +class _PatternList: + """Match against a list of comma-separated positive and negative patterns + + This class is a base class for building a pattern matcher that + takes a set of comma-separated positive and negative patterns, + returning `True` if one or more positive patterns match and + no negative ones do. + + The pattern matching is done by objects returned by the + build_pattern method. The arguments passed in when a match + is performed will vary depending on what class build_pattern + returns. + + """ + + def __init__(self, patterns: str): + self._pos_patterns = [] + self._neg_patterns = [] + + for pattern in patterns.split(','): + if pattern.startswith('!'): + negate = True + pattern = pattern[1:] + else: + negate = False + + matcher = self.build_pattern(pattern) + + if negate: + self._neg_patterns.append(matcher) + else: + self._pos_patterns.append(matcher) + + def build_pattern(self, pattern: str) -> _AnyPattern: + """Abstract method to build a pattern object""" + + raise NotImplementedError + + def matches(self, *args) -> bool: + """Match a set of values against positive & negative pattern lists""" + + pos_match = any(p.matches(*args) for p in self._pos_patterns) + neg_match = any(p.matches(*args) for p in self._neg_patterns) + + return pos_match and not neg_match + + +class WildcardPatternList(_PatternList): + """Match names against wildcard patterns""" + + def build_pattern(self, pattern: str) -> WildcardPattern: + """Build a wild card pattern""" + + return WildcardPattern(pattern) + + +class HostPatternList(_PatternList): + """Match host names & addresses against wildcard and CIDR patterns""" + + def build_pattern(self, pattern: str) -> _HostPattern: + """Build a CIDR address or wild card host pattern""" + + try: + return CIDRHostPattern(pattern) + except ValueError: + return WildcardHostPattern(pattern) diff --git a/venv/lib/python3.12/site-packages/asyncssh/pbe.py b/venv/lib/python3.12/site-packages/asyncssh/pbe.py new file mode 100644 index 0000000..c536f51 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/pbe.py @@ -0,0 +1,585 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""Asymmetric key password based encryption functions""" + +from hashlib import md5, sha1 +import os +from typing import Callable, Dict, Sequence, Tuple, Union + +from .asn1 import ASN1DecodeError, ObjectIdentifier, der_encode, der_decode +from .crypto import BasicCipher, get_cipher_params, pbkdf2_hmac +from .misc import BytesOrStr, HashType + + +_Cipher = Union[BasicCipher, '_RFC1423Pad'] + +_PKCS8CipherHandler = Callable[[object, BytesOrStr, Callable, str], _Cipher] +_PKCS8Cipher = Tuple[_PKCS8CipherHandler, Callable, str] + +_PBES2CipherHandler = Callable[[Sequence, str, bytes], _Cipher] +_PBES2Cipher = Tuple[_PBES2CipherHandler, str] + +_PBES2KDFHandler = Callable[[Sequence, BytesOrStr, int], bytes] +_PBES2KDF = Tuple[_PBES2KDFHandler, Tuple[object, ...]] + + +_ES1_MD5_DES = ObjectIdentifier('1.2.840.113549.1.5.3') +_ES1_SHA1_DES = ObjectIdentifier('1.2.840.113549.1.5.10') + +_ES2 = ObjectIdentifier('1.2.840.113549.1.5.13') + +_P12_RC4_128 = ObjectIdentifier('1.2.840.113549.1.12.1.1') +_P12_RC4_40 = ObjectIdentifier('1.2.840.113549.1.12.1.2') +_P12_DES3 = ObjectIdentifier('1.2.840.113549.1.12.1.3') +_P12_DES2 = ObjectIdentifier('1.2.840.113549.1.12.1.4') + +_ES2_CAST128 = ObjectIdentifier('1.2.840.113533.7.66.10') +_ES2_DES3 = ObjectIdentifier('1.2.840.113549.3.7') +_ES2_BF = ObjectIdentifier('1.3.6.1.4.1.3029.1.2') +_ES2_DES = ObjectIdentifier('1.3.14.3.2.7') +_ES2_AES128 = ObjectIdentifier('2.16.840.1.101.3.4.1.2') +_ES2_AES192 = ObjectIdentifier('2.16.840.1.101.3.4.1.22') +_ES2_AES256 = ObjectIdentifier('2.16.840.1.101.3.4.1.42') + +_ES2_PBKDF2 = ObjectIdentifier('1.2.840.113549.1.5.12') + +_ES2_SHA1 = ObjectIdentifier('1.2.840.113549.2.7') +_ES2_SHA224 = ObjectIdentifier('1.2.840.113549.2.8') +_ES2_SHA256 = ObjectIdentifier('1.2.840.113549.2.9') +_ES2_SHA384 = ObjectIdentifier('1.2.840.113549.2.10') +_ES2_SHA512 = ObjectIdentifier('1.2.840.113549.2.11') + +_pkcs1_cipher: Dict[bytes, str] = {} +_pkcs1_dek_name: Dict[str, bytes] = {} + +_pkcs8_handler: Dict[ObjectIdentifier, _PKCS8Cipher] = {} +_pkcs8_cipher_oid: Dict[Tuple[str, str], ObjectIdentifier] = {} + +_pbes2_cipher: Dict[ObjectIdentifier, _PBES2Cipher] = {} +_pbes2_cipher_oid: Dict[str, ObjectIdentifier] = {} + +_pbes2_kdf: Dict[ObjectIdentifier, _PBES2KDF] = {} +_pbes2_kdf_oid: Dict[str, ObjectIdentifier] = {} + +_pbes2_prf: Dict[ObjectIdentifier, str] = {} +_pbes2_prf_oid: Dict[str, ObjectIdentifier] = {} + + +class KeyEncryptionError(ValueError): + """Key encryption error + + This exception is raised by key decryption functions when the data + provided is not a valid encrypted private key. + + """ + + +class _RFC1423Pad: + """RFC 1423 padding functions + + This class implements RFC 1423 padding for encryption and + decryption of data by block ciphers. On encryption, the data is + padded by between 1 and the cipher's block size number of bytes, + with the padding value being equal to the length of the padding. + + """ + + def __init__(self, cipher_name: str, block_size: int, + key: bytes, iv: bytes): + self._cipher = BasicCipher(cipher_name, key, iv) + self._block_size = block_size + + def encrypt(self, data: bytes) -> bytes: + """Pad data before encrypting it""" + + pad = self._block_size - (len(data) % self._block_size) + data += pad * bytes((pad,)) + return self._cipher.encrypt(data) + + def decrypt(self, data: bytes) -> bytes: + """Remove padding from data after decrypting it""" + + data = self._cipher.decrypt(data) + + if data: + pad = data[-1] + if (1 <= pad <= self._block_size and + data[-pad:] == pad * bytes((pad,))): + return data[:-pad] + + raise KeyEncryptionError('Unable to decrypt key') + + +def _pbkdf1(hash_alg: HashType, passphrase: BytesOrStr, salt: bytes, + count: int, key_size: int) -> bytes: + """PKCS#5 v1.5 key derivation function for password-based encryption + + This function implements the PKCS#5 v1.5 algorithm for deriving + an encryption key from a passphrase and salt. + + The standard PBKDF1 function cannot generate more key bytes than + the hash digest size, but 3DES uses a modified form of it which + calls PBKDF1 recursively on the result to generate more key data. + Support for this is implemented here. + + """ + + if isinstance(passphrase, str): + passphrase = passphrase.encode('utf-8') + + key = passphrase + salt + for _ in range(count): + key = hash_alg(key).digest() + + if len(key) <= key_size: + return key + _pbkdf1(hash_alg, key + passphrase, salt, count, + key_size - len(key)) + else: + return key[:key_size] + + +def _pbkdf_p12(hash_alg: HashType, passphrase: BytesOrStr, salt: bytes, + count: int, key_size: int, idx: int) -> bytes: + """PKCS#12 key derivation function for password-based encryption + + This function implements the PKCS#12 algorithm for deriving an + encryption key from a passphrase and salt. + + """ + + def _make_block(data: bytes, v: int) -> bytes: + """Make a block a multiple of v bytes long by repeating data""" + + l = len(data) + size = ((l + v - 1) // v) * v + return (((size + l - 1) // l) * data)[:size] + + v = hash_alg().block_size + D = v * bytes((idx,)) + + if isinstance(passphrase, str): + passphrase = passphrase.encode('utf-16be') + + I = bytearray(_make_block(salt, v) + _make_block(passphrase + b'\0\0', v)) + + key = b'' + while len(key) < key_size: + A = D + I + for i in range(count): + A = hash_alg(A).digest() + + B = int.from_bytes(_make_block(A, v), 'big') + for i in range(0, len(I), v): + x = (int.from_bytes(I[i:i+v], 'big') + B + 1) % (1 << v*8) + I[i:i+v] = x.to_bytes(v, 'big') + + key += A + + return key[:key_size] + + +def _pbes1(params: object, passphrase: BytesOrStr, hash_alg: HashType, + cipher_name: str) -> _Cipher: + """PKCS#5 v1.5 cipher selection function for password-based encryption + + This function implements the PKCS#5 v1.5 algorithm for password-based + encryption. It returns a cipher object which can be used to encrypt + or decrypt data based on the specified encryption parameters, + passphrase, and salt. + + """ + + if (not isinstance(params, tuple) or len(params) != 2 or + not isinstance(params[0], bytes) or + not isinstance(params[1], int)): + raise KeyEncryptionError('Invalid PBES1 encryption parameters') + + salt, count = params + + key_size, iv_size, block_size = get_cipher_params(cipher_name) + key = _pbkdf1(hash_alg, passphrase, salt, count, key_size + iv_size) + key, iv = key[:key_size], key[key_size:] + + return _RFC1423Pad(cipher_name, block_size, key, iv) + + +def _pbe_p12(params: object, passphrase: BytesOrStr, hash_alg: HashType, + cipher_name: str) -> _Cipher: + """PKCS#12 cipher selection function for password-based encryption + + This function implements the PKCS#12 algorithm for password-based + encryption. It returns a cipher object which can be used to encrypt + or decrypt data based on the specified encryption parameters, + passphrase, and salt. + + """ + + if (not isinstance(params, tuple) or len(params) != 2 or + not isinstance(params[0], bytes) or not params[0] or + not isinstance(params[1], int) or params[1] == 0): + raise KeyEncryptionError('Invalid PBES1 PKCS#12 encryption parameters') + + salt, count = params + + key_size, iv_size, block_size = get_cipher_params(cipher_name) + key = _pbkdf_p12(hash_alg, passphrase, salt, count, key_size, 1) + + if block_size == 1: + cipher: _Cipher = BasicCipher(cipher_name, key, b'') + else: + iv = _pbkdf_p12(hash_alg, passphrase, salt, count, iv_size, 2) + cipher = _RFC1423Pad(cipher_name, block_size, key, iv) + + return cipher + + +def _pbes2_iv(enc_params: Sequence, cipher_name: str, key: bytes) -> _Cipher: + """PKCS#5 v2.0 handler for PBES2 ciphers with an IV as a parameter + + This function returns the appropriate cipher object to use for + PBES2 encryption for ciphers that have only an IV as an encryption + parameter. + + """ + + _, iv_size, block_size = get_cipher_params(cipher_name) + + if len(enc_params) != 1 or not isinstance(enc_params[0], bytes): + raise KeyEncryptionError('Invalid PBES2 encryption parameters') + + if len(enc_params[0]) != iv_size: + raise KeyEncryptionError('Invalid length IV for PBES2 encryption') + + return _RFC1423Pad(cipher_name, block_size, key, enc_params[0]) + + +def _pbes2_pbkdf2(kdf_params: Sequence, passphrase: BytesOrStr, + default_key_size: int) -> bytes: + """PKCS#5 v2.0 handler for PBKDF2 key derivation + + This function parses the PBKDF2 arguments from a PKCS#8 encrypted key + and returns the encryption key to use for encryption. + + """ + + if (len(kdf_params) != 1 or not isinstance(kdf_params[0], tuple) or + len(kdf_params[0]) < 2): + raise KeyEncryptionError('Invalid PBES2 key derivation parameters') + + kdf_params = list(kdf_params[0]) + + if (not isinstance(kdf_params[0], bytes) or + not isinstance(kdf_params[1], int)): + raise KeyEncryptionError('Invalid PBES2 key derivation parameters') + + salt = kdf_params.pop(0) + count = kdf_params.pop(0) + + if kdf_params and isinstance(kdf_params[0], int): + key_size = kdf_params.pop(0) # pragma: no cover, used only by RC2 + else: + key_size = default_key_size + + if kdf_params: + if (isinstance(kdf_params[0], tuple) and len(kdf_params[0]) == 2 and + isinstance(kdf_params[0][0], ObjectIdentifier)): + prf_alg = kdf_params[0][0] + if prf_alg in _pbes2_prf: + hash_name = _pbes2_prf[prf_alg] + else: + raise KeyEncryptionError('Unknown PBES2 pseudo-random ' + 'function') + else: + raise KeyEncryptionError('Invalid PBES2 pseudo-random function ' + 'parameters') + else: + hash_name = 'sha1' + + if isinstance(passphrase, str): + passphrase = passphrase.encode('utf-8') + + return pbkdf2_hmac(hash_name, passphrase, salt, count, key_size) + + +def _pbes2(params: object, passphrase: BytesOrStr) -> _Cipher: + """PKCS#5 v2.0 cipher selection function for password-based encryption + + This function implements the PKCS#5 v2.0 algorithm for password-based + encryption. It returns a cipher object which can be used to encrypt + or decrypt data based on the specified encryption parameters and + passphrase. + + """ + + if (not isinstance(params, tuple) or len(params) != 2 or + not isinstance(params[0], tuple) or len(params[0]) < 1 or + not isinstance(params[1], tuple) or len(params[1]) < 1): + raise KeyEncryptionError('Invalid PBES2 encryption parameters') + + kdf_params = list(params[0]) + + kdf_alg = kdf_params.pop(0) + if kdf_alg not in _pbes2_kdf: + raise KeyEncryptionError('Unknown PBES2 key derivation function') + + enc_params = list(params[1]) + + enc_alg = enc_params.pop(0) + if enc_alg not in _pbes2_cipher: + raise KeyEncryptionError('Unknown PBES2 encryption algorithm') + + kdf_handler, kdf_args = _pbes2_kdf[kdf_alg] + enc_handler, cipher_name = _pbes2_cipher[enc_alg] + default_key_size, _, _ = get_cipher_params(cipher_name) + + key = kdf_handler(kdf_params, passphrase, default_key_size, *kdf_args) + return enc_handler(enc_params, cipher_name, key) + + +def register_pkcs1_cipher(pkcs1_cipher_name: str, pkcs1_dek_name: bytes, + cipher_name: str) -> None: + """Register a cipher used for PKCS#1 private key encryption""" + + _pkcs1_cipher[pkcs1_dek_name] = cipher_name + _pkcs1_dek_name[pkcs1_cipher_name] = pkcs1_dek_name + + +def register_pkcs8_cipher(pkcs8_cipher_name: str, hash_name: str, + pkcs8_cipher_oid: ObjectIdentifier, + handler: _PKCS8CipherHandler, hash_alg: HashType, + cipher_name: str) -> None: + """Register a cipher used for PKCS#8 private key encryption""" + + _pkcs8_handler[pkcs8_cipher_oid] = (handler, hash_alg, cipher_name) + _pkcs8_cipher_oid[pkcs8_cipher_name, hash_name] = pkcs8_cipher_oid + + +def register_pbes2_cipher(pbes2_cipher_name: str, + pbes2_cipher_oid: ObjectIdentifier, + handler: _PBES2CipherHandler, + cipher_name: str) -> None: + """Register a PBES2 encryption algorithm""" + + _pbes2_cipher[pbes2_cipher_oid] = (handler, cipher_name) + _pbes2_cipher_oid[pbes2_cipher_name] = pbes2_cipher_oid + + +def register_pbes2_kdf(kdf_name: str, kdf_oid: ObjectIdentifier, + handler: _PBES2KDFHandler, *args: object) -> None: + """Register a PBES2 key derivation function""" + + _pbes2_kdf[kdf_oid] = (handler, args) + _pbes2_kdf_oid[kdf_name] = kdf_oid + + +def register_pbes2_prf(hash_name: str, prf_oid: ObjectIdentifier) -> None: + """Register a PBES2 pseudo-random function""" + + _pbes2_prf[prf_oid] = hash_name + _pbes2_prf_oid[hash_name] = prf_oid + + +def pkcs1_encrypt(data: bytes, pkcs1_cipher_name: str, + passphrase: BytesOrStr) -> Tuple[bytes, bytes, bytes]: + """Encrypt PKCS#1 key data + + This function encrypts PKCS#1 key data using the specified cipher + and passphrase. Available ciphers include: + + aes128-cbc, aes192-cbc, aes256-cbc, des-cbc, des3-cbc + + """ + + if pkcs1_cipher_name in _pkcs1_dek_name: + pkcs1_dek_name = _pkcs1_dek_name[pkcs1_cipher_name] + cipher_name = _pkcs1_cipher[pkcs1_dek_name] + key_size, iv_size, block_size = get_cipher_params(cipher_name) + + iv = os.urandom(iv_size) + key = _pbkdf1(md5, passphrase, iv[:8], 1, key_size) + + cipher = _RFC1423Pad(cipher_name, block_size, key, iv) + return pkcs1_dek_name, iv, cipher.encrypt(data) + else: + raise KeyEncryptionError('Unknown PKCS#1 encryption algorithm') + + +def pkcs1_decrypt(data: bytes, pkcs1_dek_name: bytes, iv: bytes, + passphrase: BytesOrStr) -> bytes: + """Decrypt PKCS#1 key data + + This function decrypts PKCS#1 key data using the specified algorithm, + initialization vector, and passphrase. The algorithm name and IV + should be taken from the PEM DEK-Info header. + + """ + + if pkcs1_dek_name in _pkcs1_cipher: + cipher_name = _pkcs1_cipher[pkcs1_dek_name] + key_size, _, block_size = get_cipher_params(cipher_name) + key = _pbkdf1(md5, passphrase, iv[:8], 1, key_size) + + cipher = _RFC1423Pad(cipher_name, block_size, key, iv) + return cipher.decrypt(data) + else: + raise KeyEncryptionError('Unknown PKCS#1 encryption algorithm') + + +def pkcs8_encrypt(data: bytes, pkcs8_cipher_name: str, hash_name: str, + version: int, passphrase: BytesOrStr) -> bytes: + """Encrypt PKCS#8 key data + + This function encrypts PKCS#8 key data using the specified cipher, + hash, encryption version, and passphrase. + + Available ciphers include: + + aes128-cbc, aes192-cbc, aes256-cbc, blowfish-cbc, cast128-cbc, + des-cbc, des2-cbc, des3-cbc, rc4-40, and rc4-128 + + Available hashes include: + + md5, sha1, sha256, sha384, sha512 + + Available versions include 1 for PBES1 and 2 for PBES2. + + Only some combinations of cipher, hash, and version are supported. + + """ + + if version == 1 and (pkcs8_cipher_name, hash_name) in _pkcs8_cipher_oid: + pkcs8_cipher_oid = _pkcs8_cipher_oid[pkcs8_cipher_name, hash_name] + handler, hash_alg, cipher_name = _pkcs8_handler[pkcs8_cipher_oid] + + alg = pkcs8_cipher_oid + params: object = (os.urandom(8), 2048) + cipher = handler(params, passphrase, hash_alg, cipher_name) + elif version == 2 and pkcs8_cipher_name in _pbes2_cipher_oid: + pbes2_cipher_oid = _pbes2_cipher_oid[pkcs8_cipher_name] + _, cipher_name = _pbes2_cipher[pbes2_cipher_oid] + _, iv_size, _ = get_cipher_params(cipher_name) + + kdf_params = [os.urandom(8), 2048] + iv = os.urandom(iv_size) + enc_params = (pbes2_cipher_oid, iv) + + if hash_name != 'sha1': + if hash_name in _pbes2_prf_oid: + kdf_params.append((_pbes2_prf_oid[hash_name], None)) + else: + raise KeyEncryptionError('Unknown PBES2 hash function') + + alg = _ES2 + params = ((_ES2_PBKDF2, tuple(kdf_params)), enc_params) + cipher = _pbes2(params, passphrase) + else: + raise KeyEncryptionError('Unknown PKCS#8 encryption algorithm') + + return der_encode(((alg, params), cipher.encrypt(data))) + + +def pkcs8_decrypt(key_data: object, passphrase: BytesOrStr) -> object: + """Decrypt PKCS#8 key data + + This function decrypts key data in PKCS#8 EncryptedPrivateKeyInfo + format using the specified passphrase. + + """ + + if not isinstance(key_data, tuple) or len(key_data) != 2: + raise KeyEncryptionError('Invalid PKCS#8 encrypted key format') + + alg_params, data = key_data + + if (not isinstance(alg_params, tuple) or len(alg_params) != 2 or + not isinstance(data, bytes)): + raise KeyEncryptionError('Invalid PKCS#8 encrypted key format') + + alg, params = alg_params + + if alg == _ES2: + cipher = _pbes2(params, passphrase) + elif alg in _pkcs8_handler: + handler, hash_alg, cipher_name = _pkcs8_handler[alg] + cipher = handler(params, passphrase, hash_alg, cipher_name) + else: + raise KeyEncryptionError('Unknown PKCS#8 encryption algorithm') + + try: + return der_decode(cipher.decrypt(data)) + except (ASN1DecodeError, UnicodeDecodeError): + raise KeyEncryptionError('Invalid PKCS#8 encrypted key data') from None + + +_pkcs1_cipher_list = ( + ('aes128-cbc', b'AES-128-CBC', 'aes128-cbc'), + ('aes192-cbc', b'AES-192-CBC', 'aes192-cbc'), + ('aes256-cbc', b'AES-256-CBC', 'aes256-cbc'), + ('des-cbc', b'DES-CBC', 'des-cbc'), + ('des3-cbc', b'DES-EDE3-CBC', 'des3-cbc') +) + +_pkcs8_cipher_list = ( + ('des-cbc', 'md5', _ES1_MD5_DES, _pbes1, md5, 'des-cbc'), + ('des-cbc', 'sha1', _ES1_SHA1_DES, _pbes1, sha1, 'des-cbc'), + + ('des2-cbc','sha1', _P12_DES2, _pbe_p12, sha1, 'des2-cbc'), + ('des3-cbc','sha1', _P12_DES3, _pbe_p12, sha1, 'des3-cbc'), + ('rc4-40', 'sha1', _P12_RC4_40, _pbe_p12, sha1, 'arcfour40'), + ('rc4-128', 'sha1', _P12_RC4_128, _pbe_p12, sha1, 'arcfour') +) + +_pbes2_cipher_list = ( + ('aes128-cbc', _ES2_AES128, _pbes2_iv, 'aes128-cbc'), + ('aes192-cbc', _ES2_AES192, _pbes2_iv, 'aes192-cbc'), + ('aes256-cbc', _ES2_AES256, _pbes2_iv, 'aes256-cbc'), + ('blowfish-cbc', _ES2_BF, _pbes2_iv, 'blowfish-cbc'), + ('cast128-cbc', _ES2_CAST128, _pbes2_iv, 'cast128-cbc'), + ('des-cbc', _ES2_DES, _pbes2_iv, 'des-cbc'), + ('des3-cbc', _ES2_DES3, _pbes2_iv, 'des3-cbc') +) + +_pbes2_kdf_list = ( + ('pbkdf2', _ES2_PBKDF2, _pbes2_pbkdf2), +) + +_pbes2_prf_list = ( + ('sha1', _ES2_SHA1), + ('sha224', _ES2_SHA224), + ('sha256', _ES2_SHA256), + ('sha384', _ES2_SHA384), + ('sha512', _ES2_SHA512) +) + +for _pkcs1_cipher_args in _pkcs1_cipher_list: + register_pkcs1_cipher(*_pkcs1_cipher_args) + +for _pkcs8_cipher_args in _pkcs8_cipher_list: + register_pkcs8_cipher(*_pkcs8_cipher_args) + +for _pbes2_cipher_args in _pbes2_cipher_list: + register_pbes2_cipher(*_pbes2_cipher_args) + +for _pbes2_kdf_args in _pbes2_kdf_list: + register_pbes2_kdf(*_pbes2_kdf_args) + +for _pbes2_prf_args in _pbes2_prf_list: + register_pbes2_prf(*_pbes2_prf_args) diff --git a/venv/lib/python3.12/site-packages/asyncssh/pkcs11.py b/venv/lib/python3.12/site-packages/asyncssh/pkcs11.py new file mode 100644 index 0000000..b9a4740 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/pkcs11.py @@ -0,0 +1,290 @@ +# Copyright (c) 2020-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""PKCS#11 smart card handler""" + +from types import TracebackType +from typing import Dict, List, Optional, Sequence, Tuple, Type, Union, cast + +try: + import pkcs11 + from pkcs11 import Attribute, KeyType, Mechanism, ObjectClass + from pkcs11 import PrivateKey, Token + from pkcs11.util.rsa import encode_rsa_public_key + from pkcs11.util.ec import encode_ec_public_key + pkcs11_available = True +except (ImportError, ModuleNotFoundError): # pragma: no cover + pkcs11_available = False + +from .misc import BytesOrStr +from .packet import MPInt, String +from .public_key import SSHCertificate, SSHKey, SSHKeyPair +from .public_key import import_certificate_chain, import_public_key + + +_AttrDict = Dict['Attribute', Union[bool, bytes, str, 'ObjectClass']] +_TokenID = Tuple[str, bytes] +_SessionMap = Dict[_TokenID, 'SSHPKCS11Session'] + + +if pkcs11_available: + encoders = {KeyType.RSA: encode_rsa_public_key, + KeyType.EC: encode_ec_public_key} + + mechanisms = {b'ssh-rsa': Mechanism.SHA1_RSA_PKCS, + b'rsa-sha2-256': Mechanism.SHA256_RSA_PKCS, + b'rsa-sha2-512': Mechanism.SHA512_RSA_PKCS, + b'ssh-rsa-sha224@ssh.com': Mechanism.SHA224_RSA_PKCS, + b'ssh-rsa-sha256@ssh.com': Mechanism.SHA256_RSA_PKCS, + b'ssh-rsa-sha384@ssh.com': Mechanism.SHA384_RSA_PKCS, + b'ssh-rsa-sha512@ssh.com': Mechanism.SHA512_RSA_PKCS, + b'rsa1024-sha1': Mechanism.SHA1_RSA_PKCS, + b'rsa2048-sha256': Mechanism.SHA256_RSA_PKCS, + b'ecdsa-sha2-nistp256': Mechanism.ECDSA_SHA256, + b'ecdsa-sha2-nistp384': Mechanism.ECDSA_SHA384, + b'ecdsa-sha2-nistp521': Mechanism.ECDSA_SHA512} + + + class SSHPKCS11KeyPair(SSHKeyPair): + """Surrogate for a key accessed via a PKCS#11 provider""" + + _key_type = 'pkcs11' + + def __init__(self, session: 'SSHPKCS11Session', privkey: PrivateKey, + pubkey: SSHKey, cert: Optional[SSHCertificate] = None): + super().__init__(pubkey.algorithm, pubkey.algorithm, + pubkey.sig_algorithms, pubkey.sig_algorithms, + pubkey.public_data, privkey.label, cert, + use_executor=True) + + self._session = session + self._privkey = privkey + + def __del__(self) -> None: + self._session.close() + + def sign(self, data: bytes) -> bytes: + """Sign a block of data with this private key""" + + sig_algorithm = self.sig_algorithm + + if sig_algorithm.startswith(b'x509v3-'): + sig_algorithm = sig_algorithm[7:] + + sig = self._privkey.sign(data, mechanism=mechanisms[sig_algorithm]) + + if self._privkey.key_type == KeyType.EC: + length = len(sig) // 2 + r = int.from_bytes(sig[:length], 'big') + s = int.from_bytes(sig[length:], 'big') + sig = MPInt(r) + MPInt(s) + + return String(sig_algorithm) + String(sig) + + + class SSHPKCS11Session: + """Work around PKCS#11 sessions not supporting simultaneous opens""" + + _sessions: _SessionMap = {} + + def __init__(self, token_id: _TokenID, token: Token, + pin: Optional[str]): + self._token_id = token_id + self._session = token.open(user_pin=pin) + self._refcount = 0 + + def __enter__(self) -> 'SSHPKCS11Session': + """Allow SSHPKCS11Session to be used as a context manager""" + + return self + + def __exit__(self, _exc_type: Type[BaseException], + _exc_value: BaseException, + _traceback: TracebackType) -> None: + """Drop one reference to the session when exiting""" + + self.close() + + @classmethod + def open(cls, token: Token, pin: Optional[str]) -> 'SSHPKCS11Session': + """Open a new session, or return an already-open one""" + + token_id = (token.manufacturer_id, token.serial) + + try: + session = cls._sessions[token_id] + except KeyError: + session = cls(token_id, token, pin) + cls._sessions[token_id] = session + + session._refcount += 1 + return session + + def close(self) -> None: + """Drop one reference to an open session""" + + self._refcount -= 1 + + if self._refcount == 0: + self._session.close() + del self._sessions[self._token_id] + + def get_keys(self, load_certs: bool, key_label: Optional[str], + key_id: Optional[BytesOrStr]) -> \ + Sequence[SSHPKCS11KeyPair]: + """Return the private keys found on this token""" + + if isinstance(key_id, str): + key_id = bytes.fromhex(key_id) + + key_attrs: _AttrDict = {Attribute.CLASS: ObjectClass.PRIVATE_KEY, + Attribute.SIGN: True} + + if key_label is not None: + key_attrs[Attribute.LABEL] = key_label + + if key_id is not None: + key_attrs[Attribute.OBJECT_ID] = key_id + + cert_attrs: _AttrDict = {Attribute.CLASS: ObjectClass.CERTIFICATE} + + if load_certs: + certs = [import_certificate_chain( + cast(bytes, cert[Attribute.VALUE])) + for cert in self._session.get_objects(cert_attrs)] + + certdict = {cert.key.public_data: cert for cert in certs + if cert and 'Attest' not in str(cert.subject)} + else: + certdict = {} + + keys = [] + + for key in self._session.get_objects(key_attrs): + privkey = cast(PrivateKey, key) + encoder = encoders.get(privkey.key_type) + + if encoder: + pubkey = import_public_key(encoder(privkey)) + + cert = certdict.get(pubkey.public_data) + + if cert: + keys.append(SSHPKCS11KeyPair(self, privkey, + pubkey, cert)) + + keys.append(SSHPKCS11KeyPair(self, privkey, pubkey)) + + self._refcount += len(keys) + + return keys + + + def load_pkcs11_keys(provider: str, pin: Optional[str] = None, *, + load_certs: bool = True, + token_label: Optional[str] = None, + token_serial: Optional[BytesOrStr] = None, + key_label: Optional[str] = None, + key_id: Optional[BytesOrStr] = None) -> \ + Sequence[SSHPKCS11KeyPair]: + """Load PIV keys and X.509 certificates from a PKCS#11 token + + This function loads a list of SSH keypairs with optional X.509 + cerificates from attached PKCS#11 security tokens. The PKCS#11 + provider must be specified, along with a user PIN if the + tokens are set to require one. + + By default, this function loads both private key handles + and the X.509 certificates associated with them, allowing for + X.509 certificate based auth to SSH servers that support it. + To disable loading of these certificates and perform only + key-based authentication, load_certs may be set to `False`. + + If token_label and/or token_serial are specified, only tokens + matching those values will be accessed. + + If key_label and/or key_id are specified, only keys matching + those values will be loaded. Key IDs can be specified as + either raw bytes or a string containing hex digits. + + .. note:: If you have an active asyncio event loop at + the time you call this function, you may want + to consider running it via a call to + :meth:`asyncio.AbstractEventLoop.run_in_executor`. + While retrieving the keys generally takes only a + fraction of a second, calling this function + directly could block asyncio event processing + until it completes. + + :param provider: + The path to the PKCS#11 provider's shared library. + :param pin: (optional) + The PIN to use when accessing tokens, if needed. + :param load_certs: (optional) + Whether or not to load X.509 certificates from the + security tokens. + :param token_label: (optional) + A token label to match against. If set, only security + tokens with this label will be accessed. + :param token_serial: (optional) + A token serial number to match against. If set, only + security tokens with this serial number will be accessed. + :param key_label: (optional) + A key label to match against. If set, only keys with this + label will be loaded. + :param key_id: (optional) + A key ID to match against. If set, only keys with this ID + will be loaded. + :type provider: `str` + :type pin: `str` + :type load_certs: `bool` + :type token_label: `str` + :type token_serial: `bytes` or `str` + :type key_label: `str` + :type key_id: `bytes` or `str` + + :returns: list of class:`SSHKeyPair` + + """ + + lib = pkcs11.lib(provider) + + keys: List[SSHPKCS11KeyPair] = [] + + if isinstance(token_serial, str): + token_serial = token_serial.encode('utf-8') + + for token in lib.get_tokens(token_label=token_label, + token_serial=token_serial): + with SSHPKCS11Session.open(token, pin) as session: + keys.extend(session.get_keys(load_certs, key_label, key_id)) + + return keys +else: # pragma: no cover + def load_pkcs11_keys(provider: str, pin: Optional[str] = None, *, + load_certs: bool = True, + token_label: Optional[str] = None, + token_serial: Optional[BytesOrStr] = None, + key_label: Optional[str] = None, + key_id: Optional[BytesOrStr] = None) -> \ + Sequence['SSHPKCS11KeyPair']: + """Report that PKCS#11 support is not available""" + + raise ValueError('PKCS#11 support not available') from None diff --git a/venv/lib/python3.12/site-packages/asyncssh/process.py b/venv/lib/python3.12/site-packages/asyncssh/process.py new file mode 100644 index 0000000..33a0cdb --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/process.py @@ -0,0 +1,1785 @@ +# Copyright (c) 2016-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH process handlers""" + +import asyncio +from asyncio.subprocess import DEVNULL, PIPE, STDOUT +import codecs +import inspect +import io +import os +from pathlib import PurePath +import socket +import stat +from types import TracebackType +from typing import Any, AnyStr, Awaitable, Callable, Dict, Generic, IO +from typing import Iterable, List, Mapping, Optional, Set, TextIO +from typing import Tuple, Type, TypeVar, Union, cast +from typing_extensions import Protocol + +from .channel import SSHChannel, SSHClientChannel, SSHServerChannel + +from .constants import DEFAULT_LANG, EXTENDED_DATA_STDERR + +from .logging import SSHLogger + +from .misc import BytesOrStr, Error, MaybeAwait +from .misc import ProtocolError, Record, open_file +from .misc import BreakReceived, SignalReceived, TerminalSizeChanged + +from .session import DataType, TermModes, TermSize + +from .stream import SSHReader, SSHWriter, SSHStreamSession +from .stream import SSHClientStreamSession, SSHServerStreamSession +from .stream import SFTPServerFactory + + +_AnyStrContra = TypeVar('_AnyStrContra', bytes, str, contravariant=True) + +_File = Union[IO[bytes], '_AsyncFileProtocol[bytes]'] + +ProcessSource = Union[int, str, socket.socket, PurePath, SSHReader[bytes], + asyncio.StreamReader, _File] + +ProcessTarget = Union[int, str, socket.socket, PurePath, SSHWriter[bytes], + asyncio.StreamWriter, _File] + +SSHServerProcessFactory = Callable[['SSHServerProcess[AnyStr]'], + MaybeAwait[None]] + + +class _AsyncFileProtocol(Protocol[AnyStr]): + """Protocol for an async file""" + + async def read(self, n: int = -1) -> AnyStr: + """Read from an async file""" + + async def write(self, data: AnyStr) -> None: + """Write to an async file""" + + async def close(self) -> None: + """Close an async file""" + + +class _ReaderProtocol(Protocol): + """A class that can be used as a reader in SSHProcess""" + + def pause_reading(self) -> None: + """Pause reading""" + + def resume_reading(self) -> None: + """Resume reading""" + + def close(self) -> None: + """Stop forwarding data""" + + +class _WriterProtocol(Protocol[_AnyStrContra]): + """A class that can be used as a writer in SSHProcess""" + + def write(self, data: _AnyStrContra) -> None: + """Write data""" + + def write_exception(self, exc: Exception) -> None: + """Write exception (break, signal, terminal size change)""" + + return # pragma: no cover + + def write_eof(self) -> None: + """Close output when end of file is received""" + + def close(self) -> None: + """Stop forwarding data""" + + +def _is_regular_file(file: IO[bytes]) -> bool: + """Return if argument is a regular file or file-like object""" + + try: + return stat.S_ISREG(os.fstat(file.fileno()).st_mode) + except OSError: + return True + +class _UnicodeReader(_ReaderProtocol, Generic[AnyStr]): + """Handle buffering partial Unicode data""" + + def __init__(self, encoding: Optional[str], errors: str, + textmode: bool = False): + super().__init__() + + if encoding and not textmode: + self._decoder: Optional[codecs.IncrementalDecoder] = \ + codecs.getincrementaldecoder(encoding)(errors) + else: + self._decoder = None + + def decode(self, data: bytes, final: bool = False) -> AnyStr: + """Decode Unicode bytes when reading from binary sources""" + + if self._decoder: + try: + decoded_data = cast(AnyStr, self._decoder.decode(data, final)) + except UnicodeDecodeError as exc: + raise ProtocolError(str(exc)) from None + else: + decoded_data = cast(AnyStr, data) + + return decoded_data + + def check_partial(self) -> None: + """Check if there's partial Unicode data left at EOF""" + + self.decode(b'', True) + + def close(self) -> None: + """Perform necessary cleanup on error (provided by derived classes)""" + + +class _UnicodeWriter(_WriterProtocol[AnyStr]): + """Handle encoding Unicode data before writing it""" + + def __init__(self, encoding: Optional[str], errors: str, + textmode: bool = False): + super().__init__() + + if encoding and not textmode: + self._encoder: Optional[codecs.IncrementalEncoder] = \ + codecs.getincrementalencoder(encoding)(errors) + else: + self._encoder = None + + def encode(self, data: AnyStr) -> bytes: + """Encode Unicode bytes when writing to binary targets""" + + if self._encoder: + assert self._encoder is not None + encoded_data = cast(bytes, self._encoder.encode(cast(str, data))) + else: + encoded_data = cast(bytes, data) + + return encoded_data + + +class _FileReader(_UnicodeReader[AnyStr]): + """Forward data from a file""" + + def __init__(self, process: 'SSHProcess[AnyStr]', file: IO[bytes], + bufsize: int, datatype: DataType, + encoding: Optional[str], errors: str): + super().__init__(encoding, errors, hasattr(file, 'encoding')) + + self._process: 'SSHProcess[AnyStr]' = process + self._file = file + self._bufsize = bufsize + self._datatype = datatype + self._paused = False + + def feed(self) -> None: + """Feed file data""" + + while not self._paused: + data = self._file.read(self._bufsize) + + if data: + self._process.feed_data(self.decode(data), self._datatype) + else: + self.check_partial() + self._process.feed_eof(self._datatype) + break + + def pause_reading(self) -> None: + """Pause reading from the file""" + + self._paused = True + + def resume_reading(self) -> None: + """Resume reading from the file""" + + self._paused = False + self.feed() + + def close(self) -> None: + """Stop forwarding data from the file""" + + self._file.close() + + +class _AsyncFileReader(_UnicodeReader[AnyStr]): + """Forward data from an aiofile""" + + def __init__(self, process: 'SSHProcess[AnyStr]', + file: _AsyncFileProtocol[bytes], + bufsize: int, datatype: DataType, + encoding: Optional[str], errors: str): + super().__init__(encoding, errors, hasattr(file, 'encoding')) + + self._conn = process.channel.get_connection() + self._process: 'SSHProcess[AnyStr]' = process + self._file = file + self._bufsize = bufsize + self._datatype = datatype + self._paused = False + + async def _feed(self) -> None: + """Feed file data""" + + while not self._paused: + data = await self._file.read(self._bufsize) + + if data: + self._process.feed_data(self.decode(data), self._datatype) + else: + self.check_partial() + self._process.feed_eof(self._datatype) + break + + def feed(self) -> None: + """Start feeding file data""" + + self._conn.create_task(self._feed()) + + def pause_reading(self) -> None: + """Pause reading from the file""" + + self._paused = True + + def resume_reading(self) -> None: + """Resume reading from the file""" + + self._paused = False + self.feed() + + def close(self) -> None: + """Stop forwarding data from the file""" + + self._conn.create_task(self._file.close()) + + +class _FileWriter(_UnicodeWriter[AnyStr]): + """Forward data to a file""" + + def __init__(self, file: IO[bytes], needs_close: bool, + encoding: Optional[str], errors: str): + super().__init__(encoding, errors, hasattr(file, 'encoding')) + + self._file = file + self._needs_close = needs_close + + def write(self, data: AnyStr) -> None: + """Write data to the file""" + + self._file.write(self.encode(data)) + + def write_eof(self) -> None: + """Close output file when end of file is received""" + + self.close() + + def close(self) -> None: + """Stop forwarding data to the file""" + + if self._needs_close: + self._file.close() + + +class _AsyncFileWriter(_UnicodeWriter[AnyStr]): + """Forward data to an aiofile""" + + def __init__(self, process: 'SSHProcess[AnyStr]', + file: _AsyncFileProtocol[bytes], needs_close: bool, + encoding: Optional[str], errors: str): + super().__init__(encoding, errors, hasattr(file, 'encoding')) + + self._process: 'SSHProcess[AnyStr]' = process + self._file = file + self._needs_close = needs_close + self._queue: asyncio.Queue[Optional[AnyStr]] = asyncio.Queue() + self._write_task: Optional[asyncio.Task[None]] = \ + process.channel.get_connection().create_task(self._writer()) + + async def _writer(self) -> None: + """Process writes to the file""" + + while True: + data = await self._queue.get() + + if data is None: + self._queue.task_done() + break + + await self._file.write(self.encode(data)) + self._queue.task_done() + + if self._needs_close: + await self._file.close() + + def write(self, data: AnyStr) -> None: + """Write data to the file""" + + self._queue.put_nowait(data) + + def write_eof(self) -> None: + """Close output file when end of file is received""" + + self.close() + + def close(self) -> None: + """Stop forwarding data to the file""" + + if self._write_task: + self._write_task = None + self._queue.put_nowait(None) + self._process.add_cleanup_task(self._queue.join()) + + +class _PipeReader(_UnicodeReader[AnyStr], asyncio.BaseProtocol): + """Forward data from a pipe""" + + def __init__(self, process: 'SSHProcess[AnyStr]', datatype: DataType, + encoding: Optional[str], errors: str): + super().__init__(encoding, errors) + + self._process: 'SSHProcess[AnyStr]' = process + self._datatype = datatype + self._transport: Optional[asyncio.ReadTransport] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Handle a newly opened pipe""" + + self._transport = cast(asyncio.ReadTransport, transport) + + def data_received(self, data: bytes) -> None: + """Forward data from the pipe""" + + self._process.feed_data(self.decode(data), self._datatype) + + def eof_received(self) -> None: + """Forward EOF from the pipe""" + + self.check_partial() + self._process.feed_eof(self._datatype) + + def pause_reading(self) -> None: + """Pause reading from the pipe""" + + assert self._transport is not None + self._transport.pause_reading() + + def resume_reading(self) -> None: + """Resume reading from the pipe""" + + assert self._transport is not None + self._transport.resume_reading() + + def close(self) -> None: + """Stop forwarding data from the pipe""" + + assert self._transport is not None + self._transport.close() + + +class _PipeWriter(_UnicodeWriter[AnyStr], asyncio.BaseProtocol): + """Forward data to a pipe""" + + def __init__(self, process: 'SSHProcess[AnyStr]', datatype: DataType, + needs_close: bool, encoding: Optional[str], errors: str): + super().__init__(encoding, errors) + + self._process: 'SSHProcess[AnyStr]' = process + self._datatype = datatype + self._needs_close = needs_close + self._transport: Optional[asyncio.WriteTransport] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Handle a newly opened pipe""" + + self._transport = cast(asyncio.WriteTransport, transport) + + def pause_writing(self) -> None: + """Pause writing to the pipe""" + + self._process.pause_feeding(self._datatype) + + def resume_writing(self) -> None: + """Resume writing to the pipe""" + + self._process.resume_feeding(self._datatype) + + def write(self, data: AnyStr) -> None: + """Write data to the pipe""" + + assert self._transport is not None + self._transport.write(self.encode(data)) + + def write_eof(self) -> None: + """Write EOF to the pipe""" + + assert self._transport is not None + self._transport.write_eof() + + def close(self) -> None: + """Stop forwarding data to the pipe""" + + assert self._transport is not None + + # There's currently no public API to tell connect_write_pipe() + # to not close the pipe on when the created transport is closed, + # and not closing it triggers an "unclosed transport" warning. + # This masks that warning when we want to keep the pipe open. + # + # pylint: disable=protected-access + + if self._needs_close: + self._transport.close() + else: + self._transport._pipe = None # type: ignore + + +class _ProcessReader(_ReaderProtocol, Generic[AnyStr]): + """Forward data from another SSH process""" + + def __init__(self, process: 'SSHProcess[AnyStr]', datatype: DataType): + super().__init__() + self._process: 'SSHProcess[AnyStr]' = process + self._datatype = datatype + + def pause_reading(self) -> None: + """Pause reading from the other channel""" + + self._process.pause_feeding(self._datatype) + + def resume_reading(self) -> None: + """Resume reading from the other channel""" + + self._process.resume_feeding(self._datatype) + + def close(self) -> None: + """Stop forwarding data from the other channel""" + + self._process.clear_writer(self._datatype) + + +class _ProcessWriter(_WriterProtocol[AnyStr]): + """Forward data to another SSH process""" + + def __init__(self, process: 'SSHProcess[AnyStr]', datatype: DataType): + super().__init__() + self._process: 'SSHProcess[AnyStr]' = process + self._datatype = datatype + + def write(self, data: AnyStr) -> None: + """Write data to the other channel""" + + self._process.feed_data(data, self._datatype) + + def write_exception(self, exc: Exception) -> None: + """Write an exception to the other channel""" + + cast(SSHClientProcess, self._process).feed_exception(exc) + + def write_eof(self) -> None: + """Write EOF to the other channel""" + + self._process.feed_eof(self._datatype) + + def close(self) -> None: + """Stop forwarding data to the other channel""" + + self._process.clear_reader(self._datatype) + + +class _StreamReader(_UnicodeReader[AnyStr]): + """Forward data from an asyncio stream""" + + def __init__(self, process: 'SSHProcess[AnyStr]', + reader: asyncio.StreamReader, + bufsize: int, datatype: DataType, + encoding: Optional[str], errors: str): + super().__init__(encoding, errors) + + self._process: 'SSHProcess[AnyStr]' = process + self._conn = process.channel.get_connection() + self._reader = reader + self._bufsize = bufsize + self._datatype = datatype + self._paused = False + + async def _feed(self) -> None: + """Feed stream data""" + + while not self._paused: + data = await self._reader.read(self._bufsize) + + if data: + self._process.feed_data(self.decode(data), self._datatype) + else: + self.check_partial() + self._process.feed_eof(self._datatype) + break + + def feed(self) -> None: + """Start feeding stream data""" + + self._conn.create_task(self._feed()) + + def pause_reading(self) -> None: + """Pause reading from the stream""" + + self._paused = True + + def resume_reading(self) -> None: + """Resume reading from the stream""" + + self._paused = False + self.feed() + + def close(self) -> None: + """Ignore close -- the caller must clean up the associated transport""" + + +class _StreamWriter(_UnicodeWriter[AnyStr]): + """Forward data to an asyncio stream""" + + def __init__(self, writer: asyncio.StreamWriter, + encoding: Optional[str], errors: str): + super().__init__(encoding, errors) + + self._writer = writer + + def write(self, data: AnyStr) -> None: + """Write data to the stream""" + + self._writer.write(self.encode(data)) + + def write_eof(self) -> None: + """Write EOF to the stream""" + + self._writer.write_eof() + + def close(self) -> None: + """Ignore close -- the caller must clean up the associated transport""" + + +class _DevNullWriter(_WriterProtocol[AnyStr]): + """Discard data""" + + def write(self, data: AnyStr) -> None: + """Discard data being written""" + + def write_eof(self) -> None: + """Ignore end of file""" + + def close(self) -> None: + """Ignore close""" + + +class _StdoutWriter(_WriterProtocol[AnyStr]): + """Forward data to an SSH process' stdout instead of stderr""" + + def __init__(self, process: 'SSHProcess[AnyStr]'): + super().__init__() + self._process: 'SSHProcess[AnyStr]' = process + + def write(self, data: AnyStr) -> None: + """Pretend data was received on stdout""" + + self._process.data_received(data, None) + + def write_eof(self) -> None: + """Ignore end of file""" + + def close(self) -> None: + """Ignore close""" + + +class ProcessError(Error): + """SSH Process error + + This exception is raised when an :class:`SSHClientProcess` exits + with a non-zero exit status and error checking is enabled. In + addition to the usual error code, reason, and language, it + contains the following fields: + + ============ ======================================= ================= + Field Description Type + ============ ======================================= ================= + env The environment the client requested `str` or `None` + to be set for the process + command The command the client requested the `str` or `None` + process to execute (if any) + subsystem The subsystem the client requested the `str` or `None` + process to open (if any) + exit_status The exit status returned, or -1 if an `int` or `None` + exit signal is sent + exit_signal The exit signal sent (if any) in the `tuple` or `None` + form of a tuple containing the signal + name, a `bool` for whether a core dump + occurred, a message associated with the + signal, and the language the message + was in + returncode The exit status returned, or negative `int` or `None` + of the signal number when an exit + signal is sent + stdout The output sent by the process to `str` or `bytes` + stdout (if not redirected) + stderr The output sent by the process to `str` or `bytes` + stderr (if not redirected) + ============ ======================================= ================= + + """ + + def __init__(self, env: Optional[Mapping[str, str]], + command: Optional[str], subsystem: Optional[str], + exit_status: Optional[int], + exit_signal: Optional[Tuple[str, bool, str, str]], + returncode: Optional[int], stdout: BytesOrStr, + stderr: BytesOrStr, reason: str = '', + lang: str = DEFAULT_LANG): + self.env = env + self.command = command + self.subsystem = subsystem + self.exit_status = exit_status + self.exit_signal = exit_signal + self.returncode = returncode + self.stdout = stdout + self.stderr = stderr + + if exit_signal: + signal, core_dumped, msg, lang = exit_signal + reason = 'Process exited with signal %s%s%s' % \ + (signal, ': ' + msg if msg else '', + ' (core dumped)' if core_dumped else '') + elif exit_status: + reason = 'Process exited with non-zero exit status %s' % \ + exit_status + + super().__init__(exit_status or 0, reason, lang) + + +# pylint: disable=redefined-builtin +class TimeoutError(ProcessError, asyncio.TimeoutError): + """SSH Process timeout error + + This exception is raised when a timeout occurs when calling the + :meth:`wait ` method on :class:`SSHClientProcess` + or the :meth:`run ` method on + :class:`SSHClientConnection`. It is a subclass of :class:`ProcessError` + and contains all of the fields documented there, including any output + received on stdout and stderr prior to when the timeout occurred. It + is also a subclass of :class:`asyncio.TimeoutError`, for code that + might be expecting that. + + """ +# pylint: enable=redefined-builtin + + +class SSHCompletedProcess(Record): + """Results from running an SSH process + + This object is returned by the :meth:`run ` + method on :class:`SSHClientConnection` when the requested command + has finished running. It contains the following fields: + + ============ ======================================= ================= + Field Description Type + ============ ======================================= ================= + env The environment the client requested `dict` or `None` + to be set for the process + command The command the client requested the `str` or `None` + process to execute (if any) + subsystem The subsystem the client requested the `str` or `None` + process to open (if any) + exit_status The exit status returned, or -1 if an `int` + exit signal is sent + exit_signal The exit signal sent (if any) in the `tuple` or `None` + form of a tuple containing the signal + name, a `bool` for whether a core dump + occurred, a message associated with the + signal, and the language the message + was in + returncode The exit status returned, or negative `int` + of the signal number when an exit + signal is sent + stdout The output sent by the process to `str` or `bytes` + stdout (if not redirected) + stderr The output sent by the process to `str` or `bytes` + stderr (if not redirected) + ============ ======================================= ================= + + """ + + env: Optional[Mapping[str, str]] + command: Optional[str] + subsystem: Optional[str] + exit_status: Optional[int] + exit_signal: Optional[Tuple[str, bool, str, str]] + returncode: Optional[int] + stdout: Optional[BytesOrStr] + stderr: Optional[BytesOrStr] + + +class SSHProcess(SSHStreamSession, Generic[AnyStr]): + """SSH process handler""" + + def __init__(self, *args) -> None: + super().__init__(*args) + + self._cleanup_tasks: List[Awaitable[None]] = [] + + self._readers: Dict[Optional[int], _ReaderProtocol] = {} + self._send_eof: Dict[Optional[int], bool] = {} + + self._writers: Dict[Optional[int], _WriterProtocol[AnyStr]] = {} + self._recv_eof: Dict[Optional[int], bool] = {} + + self._paused_write_streams: Set[Optional[int]] = set() + + async def __aenter__(self) -> 'SSHProcess[AnyStr]': + """Allow SSHProcess to be used as an async context manager""" + + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> bool: + """Wait for a full channel close when exiting the async context""" + + self.close() + await self.wait_closed() + return False + + @property + def channel(self) -> SSHChannel[AnyStr]: + """The channel associated with the process""" + + assert self._chan is not None + return self._chan + + @property + def logger(self) -> SSHLogger: + """The logger associated with the process""" + + assert self._chan is not None + return self._chan.logger + + @property + def command(self) -> Optional[str]: + """The command the client requested to execute, if any + + If the client did not request that a command be executed, + this property will be set to `None`. + + """ + + assert self._chan is not None + return self._chan.get_command() + + @property + def subsystem(self) -> Optional[str]: + """The subsystem the client requested to open, if any + + If the client did not request that a subsystem be opened, + this property will be set to `None`. + + """ + + assert self._chan is not None + return self._chan.get_subsystem() + + @property + def env(self) -> Mapping[str, str]: + """A mapping containing the environment set by the client""" + + assert self._chan is not None + return self._chan.get_environment() + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Return additional information about this process + + This method returns extra information about the channel + associated with this process. See :meth:`get_extra_info() + ` on :class:`SSHClientChannel` + for additional information. + + """ + + assert self._chan is not None + return self._chan.get_extra_info(name, default) + + async def _create_reader(self, source: ProcessSource, bufsize: int, + send_eof: bool, recv_eof: bool, + datatype: DataType = None) -> None: + """Create a reader to forward data to the SSH channel""" + + def pipe_factory() -> _PipeReader: + """Return a pipe read handler""" + + return _PipeReader(self, datatype, self._encoding, self._errors) + + if source == PIPE: + reader: Optional[_ReaderProtocol] = None + elif source == DEVNULL: + assert self._chan is not None + self._chan.write_eof() + reader = None + elif isinstance(source, SSHReader): + reader_stream, reader_datatype = source.get_redirect_info() + reader_process = cast('SSHProcess[AnyStr]', reader_stream) + writer = _ProcessWriter[AnyStr](self, datatype) + reader_process.set_writer(writer, recv_eof, reader_datatype) + reader = _ProcessReader(reader_process, reader_datatype) + elif isinstance(source, asyncio.StreamReader): + reader = _StreamReader(self, source, bufsize, datatype, + self._encoding, self._errors) + else: + file: _File + + if isinstance(source, str): + file = open_file(source, 'rb', buffering=bufsize) + elif isinstance(source, PurePath): + file = open_file(str(source), 'rb', buffering=bufsize) + elif isinstance(source, int): + file = os.fdopen(source, 'rb', buffering=bufsize) + elif isinstance(source, socket.socket): + file = os.fdopen(source.detach(), 'rb', buffering=bufsize) + else: + file = source + + if hasattr(file, 'read') and \ + (asyncio.iscoroutinefunction(file.read) or + inspect.isgeneratorfunction(file.read)): + reader = _AsyncFileReader(self, cast(_AsyncFileProtocol, file), + bufsize, datatype, self._encoding, + self._errors) + elif _is_regular_file(cast(IO[bytes], file)): + reader = _FileReader(self, cast(IO[bytes], file), bufsize, + datatype, self._encoding, self._errors) + else: + if hasattr(source, 'buffer'): + # If file was opened in text mode, remove that wrapper + file = cast(TextIO, source).buffer + + assert self._loop is not None + _, protocol = \ + await self._loop.connect_read_pipe(pipe_factory, file) + reader = cast(_PipeReader, protocol) + + self.set_reader(reader, send_eof, datatype) + + if isinstance(reader, (_FileReader, _AsyncFileReader, _StreamReader)): + reader.feed() + elif isinstance(reader, _ProcessReader): + reader_process.feed_recv_buf(reader_datatype, writer) + + async def _create_writer(self, target: ProcessTarget, bufsize: int, + send_eof: bool, recv_eof: bool, + datatype: DataType = None) -> None: + """Create a writer to forward data from the SSH channel""" + + def pipe_factory() -> _PipeWriter: + """Return a pipe write handler""" + + return _PipeWriter(self, datatype, recv_eof, + self._encoding, self._errors) + + if target == PIPE: + writer: Optional[_WriterProtocol[AnyStr]] = None + elif target == DEVNULL: + writer = _DevNullWriter() + elif target == STDOUT: + writer = _StdoutWriter(self) + elif isinstance(target, SSHWriter): + writer_stream, writer_datatype = target.get_redirect_info() + writer_process = cast('SSHProcess[AnyStr]', writer_stream) + reader = _ProcessReader(self, datatype) + writer_process.set_reader(reader, send_eof, writer_datatype) + writer = _ProcessWriter[AnyStr](writer_process, writer_datatype) + elif isinstance(target, asyncio.StreamWriter): + writer = _StreamWriter(target, self._encoding, self._errors) + else: + file: _File + needs_close = True + + if isinstance(target, str): + file = open_file(target, 'wb', buffering=bufsize) + elif isinstance(target, PurePath): + file = open_file(str(target), 'wb', buffering=bufsize) + elif isinstance(target, int): + file = os.fdopen(target, 'wb', + buffering=bufsize, closefd=recv_eof) + elif isinstance(target, socket.socket): + fd = target.detach() if recv_eof else target.fileno() + file = os.fdopen(fd, 'wb', buffering=bufsize, closefd=recv_eof) + else: + file = target + needs_close = recv_eof + + if hasattr(file, 'write') and \ + (asyncio.iscoroutinefunction(file.write) or + inspect.isgeneratorfunction(file.write)): + writer = _AsyncFileWriter( + self, cast(_AsyncFileProtocol, file), needs_close, + self._encoding, self._errors) + elif _is_regular_file(cast(IO[bytes], file)): + writer = _FileWriter(cast(IO[bytes], file), needs_close, + self._encoding, self._errors) + else: + if hasattr(target, 'buffer'): + # If file was opened in text mode, remove that wrapper + file = cast(TextIO, target).buffer + + assert self._loop is not None + _, protocol = \ + await self._loop.connect_write_pipe(pipe_factory, file) + writer = cast(_PipeWriter, protocol) + + self.set_writer(writer, recv_eof, datatype) + + if writer: + self.feed_recv_buf(datatype, writer) + + def _should_block_drain(self, datatype: DataType) -> bool: + """Return whether output is still being written to the channel""" + + return (datatype in self._readers or + super()._should_block_drain(datatype)) + + def _should_pause_reading(self) -> bool: + """Return whether to pause reading from the channel""" + + return bool(self._paused_write_streams) or \ + super()._should_pause_reading() + + def add_cleanup_task(self, task: Awaitable[None]) -> None: + """Add a task to run when the process exits""" + + self._cleanup_tasks.append(task) + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Handle a close of the SSH channel""" + + super().connection_lost(exc) # type: ignore + + for reader in list(self._readers.values()): + reader.close() + + for writer in list(self._writers.values()): + writer.close() + + self._readers = {} + self._writers = {} + + def data_received(self, data: AnyStr, datatype: DataType) -> None: + """Handle incoming data from the SSH channel""" + + writer = self._writers.get(datatype) + + if writer: + writer.write(data) + else: + super().data_received(data, datatype) + + def eof_received(self) -> bool: + """Handle an incoming end of file from the SSH channel""" + + for datatype, writer in list(self._writers.items()): + if self._recv_eof[datatype]: + writer.write_eof() + + return super().eof_received() + + def pause_writing(self) -> None: + """Pause forwarding data to the channel""" + + super().pause_writing() + + for reader in list(self._readers.values()): + reader.pause_reading() + + def resume_writing(self) -> None: + """Resume forwarding data to the channel""" + + super().resume_writing() + + for reader in list(self._readers.values()): + reader.resume_reading() + + def feed_data(self, data: AnyStr, datatype: DataType) -> None: + """Feed data to the channel""" + + assert self._chan is not None + self._chan.write(data, datatype) + + def feed_eof(self, datatype: DataType) -> None: + """Feed EOF to the channel""" + + if self._send_eof[datatype]: + assert self._chan is not None + self._chan.write_eof() + + self._readers[datatype].close() + self.clear_reader(datatype) + + def feed_recv_buf(self, datatype: DataType, + writer: _WriterProtocol[AnyStr]) -> None: + """Feed current receive buffer to a newly set writer""" + + for buf in self._recv_buf[datatype]: + if isinstance(buf, Exception): + writer.write_exception(buf) + else: + writer.write(buf) + self._recv_buf_len -= len(buf) + + self._recv_buf[datatype].clear() + + if self._eof_received: + writer.write_eof() + + self._maybe_resume_reading() + + def pause_feeding(self, datatype: DataType) -> None: + """Pause feeding data from the channel""" + + self._paused_write_streams.add(datatype) + self._maybe_pause_reading() + + def resume_feeding(self, datatype: DataType) -> None: + """Resume feeding data from the channel""" + + self._paused_write_streams.remove(datatype) + self._maybe_resume_reading() + + def set_reader(self, reader: Optional[_ReaderProtocol], + send_eof: bool, datatype: DataType) -> None: + """Set a reader used to forward data to the channel""" + + old_reader = self._readers.get(datatype) + + if old_reader: + old_reader.close() + + if reader: + self._readers[datatype] = reader + self._send_eof[datatype] = send_eof + + if self._write_paused: + reader.pause_reading() + elif old_reader: + self.clear_reader(datatype) + + def clear_reader(self, datatype: DataType) -> None: + """Clear a reader forwarding data to the channel""" + + del self._readers[datatype] + del self._send_eof[datatype] + self._unblock_drain(datatype) + + def set_writer(self, writer: Optional[_WriterProtocol[AnyStr]], + recv_eof: bool, datatype: DataType) -> None: + """Set a writer used to forward data from the channel""" + + old_writer = self._writers.get(datatype) + + if old_writer: + old_writer.close() + self.clear_writer(datatype) + + if writer: + self._writers[datatype] = writer + self._recv_eof[datatype] = recv_eof + + def clear_writer(self, datatype: DataType) -> None: + """Clear a writer forwarding data from the channel""" + + if datatype in self._paused_write_streams: + self.resume_feeding(datatype) + + del self._writers[datatype] + + def close(self) -> None: + """Shut down the process""" + + assert self._chan is not None + self._chan.close() + + def is_closing(self) -> bool: + """Return if the channel is closing or is closed""" + + assert self._chan is not None + return self._chan.is_closing() + + async def wait_closed(self) -> None: + """Wait for the process to finish shutting down""" + + assert self._chan is not None + await self._chan.wait_closed() + + for task in self._cleanup_tasks: + await task + + +class SSHClientProcess(SSHProcess[AnyStr], SSHClientStreamSession[AnyStr]): + """SSH client process handler""" + + _chan: SSHClientChannel[AnyStr] + channel: SSHClientChannel[AnyStr] + + def __init__(self) -> None: + super().__init__() + + self._stdin: Optional[SSHWriter[AnyStr]] = None + self._stdout: Optional[SSHReader[AnyStr]] = None + self._stderr: Optional[SSHReader[AnyStr]] = None + + def _collect_output(self, datatype: DataType = None) -> AnyStr: + """Return output from the process""" + + recv_buf = self._recv_buf[datatype] + + if recv_buf and isinstance(recv_buf[-1], Exception): + recv_buf, self._recv_buf[datatype] = recv_buf[:-1], recv_buf[-1:] + else: + self._recv_buf[datatype] = [] + + buf = cast(AnyStr, '' if self._encoding else b'') + return buf.join(cast(Iterable[AnyStr], recv_buf)) + + def session_started(self) -> None: + """Start a process for this newly opened client channel""" + + self._stdin = SSHWriter[AnyStr](self, self._chan) + self._stdout = SSHReader[AnyStr](self, self._chan) + self._stderr = SSHReader[AnyStr](self, self._chan, EXTENDED_DATA_STDERR) + + @property + def exit_status(self) -> Optional[int]: + """The exit status of the process""" + + return self._chan.get_exit_status() + + @property + def exit_signal(self) -> Optional[Tuple[str, bool, str, str]]: + """Exit signal information for the process""" + + return self._chan.get_exit_signal() + + @property + def returncode(self) -> Optional[int]: + """The exit status or negative exit signal number for the process""" + + return self._chan.get_returncode() + + @property + def stdin(self) -> SSHWriter[AnyStr]: + """The :class:`SSHWriter` to use to write to stdin of the process""" + + assert self._stdin is not None + return self._stdin + + @property + def stdout(self) -> SSHReader[AnyStr]: + """The :class:`SSHReader` to use to read from stdout of the process""" + + assert self._stdout is not None + return self._stdout + + @property + def stderr(self) -> SSHReader[AnyStr]: + """The :class:`SSHReader` to use to read from stderr of the process""" + + assert self._stderr is not None + return self._stderr + + def feed_exception(self, exc: Exception) -> None: + """Feed exception to the channel""" + + if isinstance(exc, TerminalSizeChanged): + self._chan.change_terminal_size(exc.width, exc.height, + exc.pixwidth, exc.pixheight) + elif isinstance(exc, BreakReceived): + self._chan.send_break(exc.msec) + elif isinstance(exc, SignalReceived): # pragma: no branch + self._chan.send_signal(exc.signal) + + async def redirect(self, stdin: Optional[ProcessSource] = None, + stdout: Optional[ProcessTarget] = None, + stderr: Optional[ProcessTarget] = None, + bufsize: int =io.DEFAULT_BUFFER_SIZE, + send_eof: bool = True, recv_eof: bool = True) -> None: + """Perform I/O redirection for the process + + This method redirects data going to or from any or all of + standard input, standard output, and standard error for + the process. + + The `stdin` argument can be any of the following: + + * An :class:`SSHReader` object + * An :class:`asyncio.StreamReader` object + * A file object open for read + * An `int` file descriptor open for read + * A connected socket object + * A string or :class:`PurePath ` containing + the name of a file or device to open + * `DEVNULL` to provide no input to standard input + * `PIPE` to interactively write standard input + + The `stdout` and `stderr` arguments can be any of the following: + + * An :class:`SSHWriter` object + * An :class:`asyncio.StreamWriter` object + * A file object open for write + * An `int` file descriptor open for write + * A connected socket object + * A string or :class:`PurePath ` containing + the name of a file or device to open + * `DEVNULL` to discard standard error output + * `PIPE` to interactively read standard error output + + The `stderr` argument also accepts the value `STDOUT` to + request that standard error output be delivered to stdout. + + File objects passed in can be associated with plain files, pipes, + sockets, or ttys. + + The default value of `None` means to not change redirection + for that stream. + + .. note:: When passing in asyncio streams, it is the responsibility + of the caller to close the associated transport when it + is no longer needed. + + :param stdin: + Source of data to feed to standard input + :param stdout: + Target to feed data from standard output to + :param stderr: + Target to feed data from standard error to + :param bufsize: + Buffer size to use when forwarding data from a file + :param send_eof: + Whether or not to send EOF to the channel when EOF is + received from stdin, defaulting to `True`. If set to `False`, + the channel will remain open after EOF is received on stdin, + and multiple sources can be redirected to the channel. + :param recv_eof: + Whether or not to send EOF to stdout and stderr when EOF is + received from the channel, defaulting to `True`. If set to + `False`, the redirect targets of stdout and stderr will remain + open after EOF is received on the channel and can be used for + multiple redirects. + :type bufsize: `int` + :type send_eof: `bool` + :type recv_eof: `bool` + + """ + + if stdin: + await self._create_reader(stdin, bufsize, send_eof, recv_eof) + + if stdout: + await self._create_writer(stdout, bufsize, send_eof, recv_eof) + + if stderr: + await self._create_writer(stderr, bufsize, send_eof, recv_eof, + EXTENDED_DATA_STDERR) + + async def redirect_stdin(self, source: ProcessSource, + bufsize: int = io.DEFAULT_BUFFER_SIZE, + send_eof: bool = True) -> None: + """Redirect standard input of the process""" + + await self.redirect(source, None, None, bufsize, send_eof, True) + + async def redirect_stdout(self, target: ProcessTarget, + bufsize: int = io.DEFAULT_BUFFER_SIZE, + recv_eof: bool = True) -> None: + """Redirect standard output of the process""" + + await self.redirect(None, target, None, bufsize, True, recv_eof) + + async def redirect_stderr(self, target: ProcessTarget, + bufsize: int = io.DEFAULT_BUFFER_SIZE, + recv_eof: bool = True) -> None: + """Redirect standard error of the process""" + + await self.redirect(None, None, target, bufsize, True, recv_eof) + + def collect_output(self) -> Tuple[AnyStr, AnyStr]: + """Collect output from the process without blocking + + This method returns a tuple of the output that the process + has written to stdout and stderr which has not yet been read. + It is intended to be called instead of read() by callers + that want to collect received data without blocking. + + :returns: A tuple of output to stdout and stderr + + """ + + return (self._collect_output(), + self._collect_output(EXTENDED_DATA_STDERR)) + + # pylint: disable=redefined-builtin + async def communicate(self, input: Optional[AnyStr] = None) -> \ + Tuple[AnyStr, AnyStr]: + """Send input to and/or collect output from the process + + This method is a coroutine which optionally provides input + to the process and then waits for the process to exit, + returning a tuple of the data written to stdout and stderr. + + :param input: + Input data to feed to standard input of the process. Data + should be a `str` if encoding is set, or `bytes` if not. + :type input: `str` or `bytes` + + :returns: A tuple of output to stdout and stderr + + """ + + self._limit = 0 + self._maybe_resume_reading() + + if input: + self._chan.write(input) + self._chan.write_eof() + + await self.wait_closed() + + return self.collect_output() + # pylint: enable=redefined-builtin + + def change_terminal_size(self, width: int, height: int, + pixwidth: int = 0, pixheight: int = 0) -> None: + """Change the terminal window size for this process + + This method changes the width and height of the terminal + associated with this process. + + :param width: + The width of the terminal in characters + :param height: + The height of the terminal in characters + :param pixwidth: (optional) + The width of the terminal in pixels + :param pixheight: (optional) + The height of the terminal in pixels + :type width: `int` + :type height: `int` + :type pixwidth: `int` + :type pixheight: `int` + + :raises: :exc:`OSError` if the SSH channel is not open + + """ + + self._chan.change_terminal_size(width, height, pixwidth, pixheight) + + def send_break(self, msec: int) -> None: + """Send a break to the process + + :param msec: + The duration of the break in milliseconds + :type msec: `int` + + :raises: :exc:`OSError` if the SSH channel is not open + + """ + + self._chan.send_break(msec) + + def send_signal(self, signal: str) -> None: + """Send a signal to the process + + :param signal: + The signal to deliver + :type signal: `str` + + :raises: :exc:`OSError` if the SSH channel is not open + + """ + + self._chan.send_signal(signal) + + def terminate(self) -> None: + """Terminate the process + + :raises: :exc:`OSError` if the SSH channel is not open + + """ + + self._chan.terminate() + + def kill(self) -> None: + """Forcibly kill the process + + :raises: :exc:`OSError` if the SSH channel is not open + + """ + + self._chan.kill() + + async def wait(self, check: bool = False, + timeout: Optional[float] = None) -> SSHCompletedProcess: + """Wait for process to exit + + This method is a coroutine which waits for the process to + exit. It returns an :class:`SSHCompletedProcess` object with + the exit status or signal information and the output sent + to stdout and stderr if those are redirected to pipes. + + If the check argument is set to `True`, a non-zero exit + status from the process with trigger the :exc:`ProcessError` + exception to be raised. + + If a timeout is specified and it expires before the process + exits, the :exc:`TimeoutError` exception will be raised. By + default, no timeout is set and this call will wait indefinitely. + + :param check: + Whether or not to raise an error on non-zero exit status + :param timeout: + Amount of time in seconds to wait for process to exit, or + `None` to wait indefinitely + :type check: `bool` + :type timeout: `int`, `float`, or `None` + + :returns: :class:`SSHCompletedProcess` + + :raises: | :exc:`ProcessError` if check is set to `True` + and the process returns a non-zero exit status + | :exc:`TimeoutError` if the timeout expires + before the process exits + + """ + + try: + stdout_data, stderr_data = \ + await asyncio.wait_for(self.communicate(), timeout) + except asyncio.TimeoutError: + stdout_data, stderr_data = self.collect_output() + + raise TimeoutError(self.env, self.command, self.subsystem, + self.exit_status, self.exit_signal, + self.returncode, stdout_data, + stderr_data) from None + + if check and self.exit_status: + raise ProcessError(self.env, self.command, self.subsystem, + self.exit_status, self.exit_signal, + self.returncode, stdout_data, stderr_data) + else: + return SSHCompletedProcess(self.env, self.command, self.subsystem, + self.exit_status, self.exit_signal, + self.returncode, stdout_data, + stderr_data) + + +class SSHServerProcess(SSHProcess[AnyStr], SSHServerStreamSession[AnyStr]): + """SSH server process handler""" + + _chan: SSHServerChannel[AnyStr] + channel: SSHServerChannel[AnyStr] + + def __init__(self, process_factory: SSHServerProcessFactory, + sftp_factory: Optional[SFTPServerFactory], + sftp_version: int, allow_scp: bool): + super().__init__(self._start_process, sftp_factory, + sftp_version, allow_scp) + + self._process_factory = process_factory + + self._stdin: Optional[SSHReader[AnyStr]] = None + self._stdout: Optional[SSHWriter[AnyStr]] = None + self._stderr: Optional[SSHWriter[AnyStr]] = None + + def _start_process(self, stdin: SSHReader[AnyStr], + stdout: SSHWriter[AnyStr], + stderr: SSHWriter[AnyStr]) -> MaybeAwait[None]: + """Start a new server process""" + + self._stdin = stdin + self._stdout = stdout + self._stderr = stderr + + return self._process_factory(self) + + @property + def term_type(self) -> Optional[str]: + """The terminal type set by the client + + If the client didn't request a pseudo-terminal, this + property will be set to `None`. + + """ + + return self._chan.get_terminal_type() + + @property + def term_size(self) -> TermSize: + """The terminal size set by the client + + This property contains a tuple of four `int` values + representing the width and height of the terminal in + characters followed by the width and height of the + terminal in pixels. If the client hasn't set terminal + size information, the values will be set to zero. + + """ + + return self._chan.get_terminal_size() + + @property + def term_modes(self) -> TermModes: + """A mapping containing the TTY modes set by the client + + If the client didn't request a pseudo-terminal, this + property will be set to an empty mapping. + + """ + + return self._chan.get_terminal_modes() + + @property + def stdin(self) -> SSHReader[AnyStr]: + """The :class:`SSHReader` to use to read from stdin of the process""" + + assert self._stdin is not None + return self._stdin + + @property + def stdout(self) -> SSHWriter[AnyStr]: + """The :class:`SSHWriter` to use to write to stdout of the process""" + + assert self._stdout is not None + return self._stdout + + @property + def stderr(self) -> SSHWriter[AnyStr]: + """The :class:`SSHWriter` to use to write to stderr of the process""" + + assert self._stderr is not None + return self._stderr + + def exception_received(self, exc: Exception) -> None: + """Handle an incoming exception on the channel""" + + writer = self._writers.get(None) + + if writer: + writer.write_exception(exc) + else: + super().exception_received(exc) + + async def redirect(self, stdin: Optional[ProcessTarget] = None, + stdout: Optional[ProcessSource] = None, + stderr: Optional[ProcessSource] = None, + bufsize: int = io.DEFAULT_BUFFER_SIZE, + send_eof: bool = True, recv_eof: bool = True) -> None: + """Perform I/O redirection for the process + + This method redirects data going to or from any or all of + standard input, standard output, and standard error for + the process. + + The `stdin` argument can be any of the following: + + * An :class:`SSHWriter` object + * An :class:`asyncio.StreamWriter` object + * A file object open for write + * An `int` file descriptor open for write + * A connected socket object + * A string or :class:`PurePath ` containing + the name of a file or device to open + * `DEVNULL` to discard standard error output + * `PIPE` to interactively read standard error output + + The `stdout` and `stderr` arguments can be any of the following: + + * An :class:`SSHReader` object + * An :class:`asyncio.StreamReader` object + * A file object open for read + * An `int` file descriptor open for read + * A connected socket object + * A string or :class:`PurePath ` containing + the name of a file or device to open + * `DEVNULL` to provide no input to standard input + * `PIPE` to interactively write standard input + + File objects passed in can be associated with plain files, pipes, + sockets, or ttys. + + The default value of `None` means to not change redirection + for that stream. + + .. note:: When passing in asyncio streams, it is the responsibility + of the caller to close the associated transport when it + is no longer needed. + + :param stdin: + Target to feed data from standard input to + :param stdout: + Source of data to feed to standard output + :param stderr: + Source of data to feed to standard error + :param bufsize: + Buffer size to use when forwarding data from a file + :param send_eof: + Whether or not to send EOF to the channel when EOF is + received from stdout or stderr, defaulting to `True`. If + set to `False`, the channel will remain open after EOF is + received on stdout or stderr, and multiple sources can be + redirected to the channel. + :param recv_eof: + Whether or not to send EOF to stdin when EOF is received + on the channel, defaulting to `True`. If set to `False`, + the redirect target of stdin will remain open after EOF + is received on the channel and can be used for multiple + redirects. + :type bufsize: `int` + :type send_eof: `bool` + :type recv_eof: `bool` + + """ + + if stdin: + await self._create_writer(stdin, bufsize, send_eof, recv_eof) + + if stdout: + await self._create_reader(stdout, bufsize, send_eof, recv_eof) + + if stderr: + await self._create_reader(stderr, bufsize, send_eof, recv_eof, + EXTENDED_DATA_STDERR) + + async def redirect_stdin(self, target: ProcessTarget, + bufsize: int = io.DEFAULT_BUFFER_SIZE, + recv_eof: bool = True) -> None: + """Redirect standard input of the process""" + + await self.redirect(target, None, None, bufsize, True, recv_eof) + + async def redirect_stdout(self, source: ProcessSource, + bufsize: int = io.DEFAULT_BUFFER_SIZE, + send_eof: bool = True) -> None: + """Redirect standard output of the process""" + + await self.redirect(None, source, None, bufsize, send_eof, True) + + async def redirect_stderr(self, source: ProcessSource, + bufsize: int = io.DEFAULT_BUFFER_SIZE, + send_eof: bool = True) -> None: + """Redirect standard error of the process""" + + await self.redirect(None, None, source, bufsize, send_eof, True) + + def get_terminal_type(self) -> Optional[str]: + """Return the terminal type set by the client for the process + + This method returns the terminal type set by the client + when the process was started. If the client didn't request + a pseudo-terminal, this method will return `None`. + + :returns: A `str` containing the terminal type or `None` if + no pseudo-terminal was requested + + """ + + return self.term_type + + def get_terminal_size(self) -> Tuple[int, int, int, int]: + """Return the terminal size set by the client for the process + + This method returns the latest terminal size information set + by the client. If the client didn't set any terminal size + information, all values returned will be zero. + + :returns: A tuple of four `int` values containing the width and + height of the terminal in characters and the width + and height of the terminal in pixels + + """ + + return self.term_size + + def get_terminal_mode(self, mode: int) -> Optional[int]: + """Return the requested TTY mode for this session + + This method looks up the value of a POSIX terminal mode + set by the client when the process was started. If the client + didn't request a pseudo-terminal or didn't set the requested + TTY mode opcode, this method will return `None`. + + :param mode: + POSIX terminal mode taken from :ref:`POSIX terminal modes + ` to look up + :type mode: `int` + + :returns: An `int` containing the value of the requested + POSIX terminal mode or `None` if the requested + mode was not set + + """ + + return self.term_modes.get(mode) + + def exit(self, status: int) -> None: + """Send exit status and close the channel + + This method can be called to report an exit status for the + process back to the client and close the channel. + + :param status: + The exit status to report to the client + :type status: `int` + + """ + + self._chan.exit(status) + + def exit_with_signal(self, signal: str, core_dumped: bool = False, + msg: str = '', lang: str = DEFAULT_LANG) -> None: + """Send exit signal and close the channel + + This method can be called to report that the process + terminated abnormslly with a signal. A more detailed + error message may also provided, along with an indication + of whether or not the process dumped core. After + reporting the signal, the channel is closed. + + :param signal: + The signal which caused the process to exit + :param core_dumped: (optional) + Whether or not the process dumped core + :param msg: (optional) + Details about what error occurred + :param lang: (optional) + The language the error message is in + :type signal: `str` + :type core_dumped: `bool` + :type msg: `str` + :type lang: `str` + + """ + + return self._chan.exit_with_signal(signal, core_dumped, msg, lang) diff --git a/venv/lib/python3.12/site-packages/asyncssh/public_key.py b/venv/lib/python3.12/site-packages/asyncssh/public_key.py new file mode 100644 index 0000000..2184118 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/public_key.py @@ -0,0 +1,3856 @@ +# Copyright (c) 2013-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH asymmetric encryption handlers""" + +import binascii +import os +import re +import time + +from datetime import datetime +from hashlib import md5, sha1, sha256, sha384, sha512 +from pathlib import Path, PurePath +from typing import Callable, Dict, List, Mapping, Optional, Sequence, Set +from typing import Tuple, Type, Union, cast +from typing_extensions import Protocol + +from .crypto import ed25519_available, ed448_available +from .encryption import Encryption +from .sk import sk_available + +try: + # pylint: disable=unused-import + from .crypto import X509Certificate + from .crypto import generate_x509_certificate, import_x509_certificate + _x509_available = True +except ImportError: # pragma: no cover + _x509_available = False + +try: + import bcrypt + _bcrypt_available = hasattr(bcrypt, 'kdf') +except ImportError: # pragma: no cover + _bcrypt_available = False + +from .asn1 import ASN1DecodeError, BitString, ObjectIdentifier +from .asn1 import der_encode, der_decode, der_decode_partial +from .crypto import CryptoKey, PyCAKey +from .encryption import get_encryption_params, get_encryption +from .misc import BytesOrStr, DefTuple, FilePath, IPNetwork +from .misc import ip_network, read_file, write_file, parse_time_interval +from .packet import NameList, String, UInt32, UInt64 +from .packet import PacketDecodeError, SSHPacket +from .pbe import KeyEncryptionError, pkcs1_encrypt, pkcs8_encrypt +from .pbe import pkcs1_decrypt, pkcs8_decrypt +from .sk import SSH_SK_USER_PRESENCE_REQD, sk_get_resident + + +_Comment = Optional[BytesOrStr] +_CertPrincipals = Union[str, Sequence[str]] +_Time = Union[int, float, datetime, str] + +_PubKeyAlgMap = Dict[bytes, Type['SSHKey']] +_CertAlgMap = Dict[bytes, Tuple[Optional[Type['SSHKey']], + Type['SSHCertificate']]] +_CertSigAlgMap = Dict[bytes, bytes] +_CertVersionMap = Dict[Tuple[bytes, int], + Tuple[bytes, Type['SSHOpenSSHCertificate']]] + +_PEMMap = Dict[bytes, Type['SSHKey']] +_PKCS8OIDMap = Dict[ObjectIdentifier, Type['SSHKey']] +_SKAlgMap = Dict[int, Tuple[Type['SSHKey'], Tuple[object, ...]]] + +_OpenSSHCertOptions = Dict[str, object] +_OpenSSHCertParams = Tuple[object, int, int, bytes, bytes, + int, int, bytes, bytes] +_OpenSSHCertEncoders = Sequence[Tuple[str, Callable[[object], bytes]]] +_OpenSSHCertDecoders = Dict[bytes, Callable[[SSHPacket], object]] + +X509CertPurposes = Union[None, str, Sequence[str]] + +_IdentityArg = Union[bytes, FilePath, 'SSHKey', 'SSHCertificate'] +IdentityListArg = Union[_IdentityArg, Sequence[_IdentityArg]] +_KeyArg = Union[bytes, FilePath, 'SSHKey'] +KeyListArg = Union[FilePath, Sequence[_KeyArg]] +_CertArg = Union[bytes, FilePath, 'SSHCertificate'] +CertListArg = Union[_CertArg, Sequence[_CertArg]] +_KeyPairArg = Union['SSHKeyPair', _KeyArg, Tuple[_KeyArg, _CertArg]] +KeyPairListArg = Union[_KeyPairArg, Sequence[_KeyPairArg]] + + +# Default file names in .ssh directory to read private keys from +_DEFAULT_KEY_FILES = ( + ('id_ed25519_sk', ed25519_available and sk_available), + ('id_ecdsa_sk', sk_available), + ('id_ed448', ed448_available), + ('id_ed25519', ed25519_available), + ('id_ecdsa', True), + ('id_rsa', True), + ('id_dsa', True) +) + +# Default directories and file names to read host keys from +_DEFAULT_HOST_KEY_DIRS = ('/opt/local/etc', '/opt/local/etc/ssh', + '/usr/local/etc', '/usr/local/etc/ssh', + '/etc', '/etc/ssh') +_DEFAULT_HOST_KEY_FILES = ('ssh_host_ed448_key', 'ssh_host_ed25519_key', + 'ssh_host_ecdsa_key', 'ssh_host_rsa_key', + 'ssh_host_dsa_key') + +_hashes = {'md5': md5, 'sha1': sha1, 'sha256': sha256, + 'sha384': sha384, 'sha512': sha512} + +_public_key_algs: List[bytes] = [] +_default_public_key_algs: List[bytes] = [] + +_certificate_algs: List[bytes] = [] +_default_certificate_algs: List[bytes] = [] + +_x509_certificate_algs: List[bytes] = [] +_default_x509_certificate_algs: List[bytes] = [] + +_public_key_alg_map: _PubKeyAlgMap = {} +_certificate_alg_map: _CertAlgMap = {} +_certificate_sig_alg_map: _CertSigAlgMap = {} +_certificate_version_map: _CertVersionMap = {} +_pem_map: _PEMMap = {} +_pkcs8_oid_map: _PKCS8OIDMap = {} +_sk_alg_map: _SKAlgMap = {} + +_abs_date_pattern = re.compile(r'\d{8}') +_abs_time_pattern = re.compile(r'\d{14}') + +_subject_pattern = re.compile(r'(?:Distinguished[ -_]?Name|Subject|DN)[=:]?\s?', + re.IGNORECASE) + +# SSH certificate types +CERT_TYPE_USER = 1 +CERT_TYPE_HOST = 2 + +# Flag to omit second argument in alg_params +OMIT = object() + +_OPENSSH_KEY_V1 = b'openssh-key-v1\0' +_OPENSSH_SALT_LEN = 16 +_OPENSSH_WRAP_LEN = 70 + + +def _parse_time(t: _Time) -> int: + """Parse a time value""" + + if isinstance(t, int): + return t + elif isinstance(t, float): + return int(t) + elif isinstance(t, datetime): + return int(t.timestamp()) + elif isinstance(t, str): + if t == 'now': + return int(time.time()) + + match = _abs_date_pattern.fullmatch(t) + if match: + return int(datetime.strptime(t, '%Y%m%d').timestamp()) + + match = _abs_time_pattern.fullmatch(t) + if match: + return int(datetime.strptime(t, '%Y%m%d%H%M%S').timestamp()) + + try: + return int(time.time() + parse_time_interval(t)) + except ValueError: + pass + + raise ValueError('Unrecognized time value') + + +def _wrap_base64(data: bytes, wrap: int = 64) -> bytes: + """Break a Base64 value into multiple lines.""" + + data = binascii.b2a_base64(data)[:-1] + return b'\n'.join(data[i:i+wrap] + for i in range(0, len(data), wrap)) + b'\n' + + +class KeyGenerationError(ValueError): + """Key generation error + + This exception is raised by :func:`generate_private_key`, + :meth:`generate_user_certificate() ` + or :meth:`generate_host_certificate() + ` when the requested parameters are + unsupported. + + """ + + +class KeyImportError(ValueError): + """Key import error + + This exception is raised by key import functions when the + data provided cannot be imported as a valid key. + + """ + + +class KeyExportError(ValueError): + """Key export error + + This exception is raised by key export functions when the + requested format is unknown or encryption is requested for a + format which doesn't support it. + + """ + + +class SigningKey(Protocol): + """Protocol for signing a block of data""" + + def sign(self, data: bytes) -> bytes: + """Sign a block of data with a private key""" + + +class VerifyingKey(Protocol): + """Protocol for verifying a signature on a block of data""" + + def verify(self, data: bytes, sig: bytes) -> bool: + """Verify a signature on a block of data with a public key""" + + +class SSHKey: + """Parent class which holds an asymmetric encryption key""" + + algorithm: bytes = b'' + sig_algorithms: Sequence[bytes] = () + cert_algorithms: Sequence[bytes] = () + x509_algorithms: Sequence[bytes] = () + all_sig_algorithms: Set[bytes] = set() + default_x509_hash: str = '' + pem_name: bytes = b'' + pkcs8_oid: Optional[ObjectIdentifier] = None + use_executor: bool = False + + def __init__(self, key: Optional[CryptoKey] = None): + self._key = key + self._comment: Optional[bytes] = None + self._filename: Optional[bytes] = None + self._touch_required = False + + @classmethod + def generate(cls, algorithm: bytes, **kwargs) -> 'SSHKey': + """Generate a new SSH private key""" + + raise NotImplementedError + + @classmethod + def make_private(cls, key_params: object) -> 'SSHKey': + """Construct a private key""" + + raise NotImplementedError + + @classmethod + def make_public(cls, key_params: object) -> 'SSHKey': + """Construct a public key""" + + raise NotImplementedError + + @classmethod + def decode_pkcs1_private(cls, key_data: object) -> object: + """Decode a PKCS#1 format private key""" + + @classmethod + def decode_pkcs1_public(cls, key_data: object) -> object: + """Decode a PKCS#1 format public key""" + + @classmethod + def decode_pkcs8_private(cls, alg_params: object, data: bytes) -> object: + """Decode a PKCS#8 format private key""" + + @classmethod + def decode_pkcs8_public(cls, alg_params: object, data: bytes) -> object: + """Decode a PKCS#8 format public key""" + + @classmethod + def decode_ssh_private(cls, packet: SSHPacket) -> object: + """Decode an SSH format private key""" + + @classmethod + def decode_ssh_public(cls, packet: SSHPacket) -> object: + """Decode an SSH format public key""" + + @property + def private_data(self) -> bytes: + """Return private key data in OpenSSH binary format""" + + return String(self.algorithm) + self.encode_ssh_private() + + @property + def public_data(self) -> bytes: + """Return public key data in OpenSSH binary format""" + + return String(self.algorithm) + self.encode_ssh_public() + + @property + def pyca_key(self) -> PyCAKey: + """Return PyCA key for use in X.509 module""" + + assert self._key is not None + return self._key.pyca_key + + def _generate_certificate(self, key: 'SSHKey', version: int, serial: int, + cert_type: int, key_id: str, + principals: _CertPrincipals, + valid_after: _Time, valid_before: _Time, + cert_options: _OpenSSHCertOptions, + sig_alg_name: DefTuple[str], + comment: DefTuple[_Comment]) -> \ + 'SSHOpenSSHCertificate': + """Generate a new SSH certificate""" + + if isinstance(principals, str): + principals = [p.strip() for p in principals.split(',')] + else: + principals = list(principals) + + valid_after = _parse_time(valid_after) + valid_before = _parse_time(valid_before) + + if valid_before <= valid_after: + raise ValueError('Valid before time must be later than ' + 'valid after time') + + if sig_alg_name == (): + sig_alg = self.sig_algorithms[0] + else: + sig_alg = cast(str, sig_alg_name).encode() + + if comment == (): + comment = key.get_comment_bytes() + + comment: _Comment + + try: + algorithm, cert_handler = _certificate_version_map[key.algorithm, + version] + except KeyError: + raise KeyGenerationError('Unknown certificate version') from None + + return cert_handler.generate(self, algorithm, key, serial, cert_type, + key_id, principals, valid_after, + valid_before, cert_options, + sig_alg, comment) + + def _generate_x509_certificate(self, key: 'SSHKey', subject: str, + issuer: Optional[str], + serial: Optional[int], + valid_after: _Time, valid_before: _Time, + ca: bool, ca_path_len: Optional[int], + purposes: X509CertPurposes, + user_principals: _CertPrincipals, + host_principals: _CertPrincipals, + hash_name: DefTuple[str], + comment: DefTuple[_Comment]) -> \ + 'SSHX509Certificate': + """Generate a new X.509 certificate""" + + if not _x509_available: # pragma: no cover + raise KeyGenerationError('X.509 certificate generation ' + 'requires PyOpenSSL') + + if not self.x509_algorithms: + raise KeyGenerationError('X.509 certificate generation not ' + 'supported for ' + self.get_algorithm() + + ' keys') + + valid_after = _parse_time(valid_after) + valid_before = _parse_time(valid_before) + + if valid_before <= valid_after: + raise ValueError('Valid before time must be later than ' + 'valid after time') + + if hash_name == (): + hash_name = key.default_x509_hash + + if comment == (): + comment = key.get_comment_bytes() + + hash_name: str + comment: _Comment + + return SSHX509Certificate.generate(self, key, subject, issuer, + serial, valid_after, valid_before, + ca, ca_path_len, purposes, + user_principals, host_principals, + hash_name, comment) + + def get_algorithm(self) -> str: + """Return the algorithm associated with this key""" + + return self.algorithm.decode('ascii') + + def has_comment(self) -> bool: + """Return whether a comment is set for this key + + :returns: `bool` + + """ + + return bool(self._comment) + + def get_comment_bytes(self) -> Optional[bytes]: + """Return the comment associated with this key as a byte string + + :returns: `bytes` or `None` + + """ + + return self._comment or self._filename + + def get_comment(self, encoding: str = 'utf-8', + errors: str = 'strict') -> Optional[str]: + """Return the comment associated with this key as a Unicode string + + :param encoding: + The encoding to use to decode the comment as a Unicode + string, defaulting to UTF-8 + :param errors: + The error handling scheme to use for Unicode decode errors + :type encoding: `str` + :type errors: `str` + + :returns: `str` or `None` + + :raises: :exc:`UnicodeDecodeError` if the comment cannot be + decoded using the specified encoding + + """ + + comment = self.get_comment_bytes() + + return comment.decode(encoding, errors) if comment else None + + def set_comment(self, comment: _Comment, encoding: str = 'utf-8', + errors: str = 'strict') -> None: + """Set the comment associated with this key + + :param comment: + The new comment to associate with this key + :param encoding: + The Unicode encoding to use to encode the comment, + defaulting to UTF-8 + :param errors: + The error handling scheme to use for Unicode encode errors + :type comment: `str`, `bytes`, or `None` + :type encoding: `str` + :type errors: `str` + + :raises: :exc:`UnicodeEncodeError` if the comment cannot be + encoded using the specified encoding + + """ + + if isinstance(comment, str): + comment = comment.encode(encoding, errors) + + self._comment = comment or None + + def get_filename(self) -> Optional[bytes]: + """Return the filename associated with this key + + :returns: `bytes` or `None` + + """ + + return self._filename + + def set_filename(self, filename: Union[None, bytes, FilePath]) -> None: + """Set the filename associated with this key + + :param filename: + The new filename to associate with this key + :type filename: `PurePath`, `str`, `bytes`, or `None` + + """ + + if isinstance(filename, PurePath): + filename = str(filename) + + if isinstance(filename, str): + filename = filename.encode('utf-8') + + self._filename = filename or None + + def get_fingerprint(self, hash_name: str = 'sha256') -> str: + """Get the fingerprint of this key + + Available hashes include: + + md5, sha1, sha256, sha384, sha512 + + :param hash_name: (optional) + The hash algorithm to use to construct the fingerprint. + :type hash_name: `str` + + :returns: `str` + + :raises: :exc:`ValueError` if the hash name is invalid + + """ + + try: + hash_alg = _hashes[hash_name] + except KeyError: + raise ValueError('Unknown hash algorithm') from None + + h = hash_alg(self.public_data) + + if hash_name == 'md5': + fp = h.hexdigest() + fp_text = ':'.join(fp[i:i+2] for i in range(0, len(fp), 2)) + else: + fpb = h.digest() + fp_text = binascii.b2a_base64(fpb).decode('ascii')[:-1].strip('=') + + return hash_name.upper() + ':' + fp_text + + def set_touch_required(self, touch_required: bool) -> None: + """Set whether touch is required when using a security key""" + + self._touch_required = touch_required + + def sign_raw(self, data: bytes, hash_name: str) -> bytes: + """Return a raw signature of the specified data""" + + assert self._key is not None + return self._key.sign(data, hash_name) + + def sign_ssh(self, data: bytes, sig_algorithm: bytes) -> bytes: + """Abstract method to compute an SSH-encoded signature""" + + raise NotImplementedError + + def verify_ssh(self, data: bytes, sig_algorithm: bytes, + packet: SSHPacket) -> bool: + """Abstract method to verify an SSH-encoded signature""" + + raise NotImplementedError + + def sign(self, data: bytes, sig_algorithm: bytes) -> bytes: + """Return an SSH-encoded signature of the specified data""" + + if sig_algorithm.startswith(b'x509v3-'): + sig_algorithm = sig_algorithm[7:] + + if sig_algorithm not in self.all_sig_algorithms: + raise ValueError('Unrecognized signature algorithm') + + return b''.join((String(sig_algorithm), + self.sign_ssh(data, sig_algorithm))) + + def verify(self, data: bytes, sig: bytes) -> bool: + """Verify an SSH signature of the specified data using this key""" + + try: + packet = SSHPacket(sig) + sig_algorithm = packet.get_string() + + if sig_algorithm not in self.all_sig_algorithms: + return False + + return self.verify_ssh(data, sig_algorithm, packet) + except PacketDecodeError: + return False + + def encode_pkcs1_private(self) -> object: + """Export parameters associated with a PKCS#1 private key""" + + # pylint: disable=no-self-use + raise KeyExportError('PKCS#1 private key export not supported') + + def encode_pkcs1_public(self) -> object: + """Export parameters associated with a PKCS#1 public key""" + + # pylint: disable=no-self-use + raise KeyExportError('PKCS#1 public key export not supported') + + def encode_pkcs8_private(self) -> Tuple[object, object]: + """Export parameters associated with a PKCS#8 private key""" + + # pylint: disable=no-self-use + raise KeyExportError('PKCS#8 private key export not supported') + + def encode_pkcs8_public(self) -> Tuple[object, object]: + """Export parameters associated with a PKCS#8 public key""" + + # pylint: disable=no-self-use + raise KeyExportError('PKCS#8 public key export not supported') + + def encode_ssh_private(self) -> bytes: + """Export parameters associated with an OpenSSH private key""" + + # pylint: disable=no-self-use + raise KeyExportError('OpenSSH private key export not supported') + + def encode_ssh_public(self) -> bytes: + """Export parameters associated with an OpenSSH public key""" + + # pylint: disable=no-self-use + raise KeyExportError('OpenSSH public key export not supported') + + def encode_agent_cert_private(self) -> bytes: + """Encode certificate private key data for agent""" + + raise NotImplementedError + + def convert_to_public(self) -> 'SSHKey': + """Return public key corresponding to this key + + This method converts an :class:`SSHKey` object which contains + a private key into one which contains only the corresponding + public key. If it is called on something which is already + a public key, it has no effect. + + """ + + result = decode_ssh_public_key(self.public_data) + result.set_comment(self._comment) + result.set_filename(self._filename) + return result + + def generate_user_certificate( + self, user_key: 'SSHKey', key_id: str, version: int = 1, + serial: int = 0, principals: _CertPrincipals = (), + valid_after: _Time = 0, valid_before: _Time = 0xffffffffffffffff, + force_command: Optional[str] = None, + source_address: Optional[Sequence[str]] = None, + permit_x11_forwarding: bool = True, + permit_agent_forwarding: bool = True, + permit_port_forwarding: bool = True, permit_pty: bool = True, + permit_user_rc: bool = True, touch_required: bool = True, + sig_alg: DefTuple[str] = (), + comment: DefTuple[_Comment] = ()) -> 'SSHOpenSSHCertificate': + """Generate a new SSH user certificate + + This method returns an SSH user certificate with the requested + attributes signed by this private key. + + :param user_key: + The user's public key. + :param key_id: + The key identifier associated with this certificate. + :param version: (optional) + The version of certificate to create, defaulting to 1. + :param serial: (optional) + The serial number of the certificate, defaulting to 0. + :param principals: (optional) + The user names this certificate is valid for. By default, + it can be used with any user name. + :param valid_after: (optional) + The earliest time the certificate is valid for, defaulting to + no restriction on when the certificate starts being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param valid_before: (optional) + The latest time the certificate is valid for, defaulting to + no restriction on when the certificate stops being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param force_command: (optional) + The command (if any) to force a session to run when this + certificate is used. + :param source_address: (optional) + A list of source addresses and networks for which the + certificate is valid, defaulting to all addresses. + :param permit_x11_forwarding: (optional) + Whether or not to allow this user to use X11 forwarding, + defaulting to `True`. + :param permit_agent_forwarding: (optional) + Whether or not to allow this user to use agent forwarding, + defaulting to `True`. + :param permit_port_forwarding: (optional) + Whether or not to allow this user to use port forwarding, + defaulting to `True`. + :param permit_pty: (optional) + Whether or not to allow this user to allocate a + pseudo-terminal, defaulting to `True`. + :param permit_user_rc: (optional) + Whether or not to run the user rc file when this certificate + is used, defaulting to `True`. + :param touch_required: (optional) + Whether or not to require the user to touch the security key + when authenticating with it, defaulting to `True`. + :param sig_alg: (optional) + The algorithm to use when signing the new certificate. + :param comment: + The comment to associate with this certificate. By default, + the comment will be set to the comment currently set on + user_key. + :type user_key: :class:`SSHKey` + :type key_id: `str` + :type version: `int` + :type serial: `int` + :type principals: `str` or `list` of `str` + :type force_command: `str` or `None` + :type source_address: list of ip_address and ip_network values + :type permit_x11_forwarding: `bool` + :type permit_agent_forwarding: `bool` + :type permit_port_forwarding: `bool` + :type permit_pty: `bool` + :type permit_user_rc: `bool` + :type touch_required: `bool` + :type sig_alg: `str` + :type comment: `str`, `bytes`, or `None` + + :returns: :class:`SSHCertificate` + + :raises: | :exc:`ValueError` if the validity times are invalid + | :exc:`KeyGenerationError` if the requested certificate + parameters are unsupported + + """ + + cert_options: _OpenSSHCertOptions = {} + + if force_command: + cert_options['force-command'] = force_command + + if source_address: + cert_options['source-address'] = [ip_network(addr) + for addr in source_address] + + if permit_x11_forwarding: + cert_options['permit-X11-forwarding'] = True + + if permit_agent_forwarding: + cert_options['permit-agent-forwarding'] = True + + if permit_port_forwarding: + cert_options['permit-port-forwarding'] = True + + if permit_pty: + cert_options['permit-pty'] = True + + if permit_user_rc: + cert_options['permit-user-rc'] = True + + if not touch_required: + cert_options['no-touch-required'] = True + + return self._generate_certificate(user_key, version, serial, + CERT_TYPE_USER, key_id, + principals, valid_after, + valid_before, cert_options, + sig_alg, comment) + + def generate_host_certificate(self, host_key: 'SSHKey', key_id: str, + version: int = 1, serial: int = 0, + principals: _CertPrincipals = (), + valid_after: _Time = 0, + valid_before: _Time = 0xffffffffffffffff, + sig_alg: DefTuple[str] = (), + comment: DefTuple[_Comment] = ()) -> \ + 'SSHOpenSSHCertificate': + """Generate a new SSH host certificate + + This method returns an SSH host certificate with the requested + attributes signed by this private key. + + :param host_key: + The host's public key. + :param key_id: + The key identifier associated with this certificate. + :param version: (optional) + The version of certificate to create, defaulting to 1. + :param serial: (optional) + The serial number of the certificate, defaulting to 0. + :param principals: (optional) + The host names this certificate is valid for. By default, + it can be used with any host name. + :param valid_after: (optional) + The earliest time the certificate is valid for, defaulting to + no restriction on when the certificate starts being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param valid_before: (optional) + The latest time the certificate is valid for, defaulting to + no restriction on when the certificate stops being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param sig_alg: (optional) + The algorithm to use when signing the new certificate. + :param comment: + The comment to associate with this certificate. By default, + the comment will be set to the comment currently set on + host_key. + :type host_key: :class:`SSHKey` + :type key_id: `str` + :type version: `int` + :type serial: `int` + :type principals: `str` or `list` of `str` + :type sig_alg: `str` + :type comment: `str`, `bytes`, or `None` + + :returns: :class:`SSHCertificate` + + :raises: | :exc:`ValueError` if the validity times are invalid + | :exc:`KeyGenerationError` if the requested certificate + parameters are unsupported + """ + + if comment == (): + comment = host_key.get_comment_bytes() + + return self._generate_certificate(host_key, version, serial, + CERT_TYPE_HOST, key_id, + principals, valid_after, + valid_before, {}, sig_alg, comment) + + def generate_x509_user_certificate( + self, user_key: 'SSHKey', subject: str, + issuer: Optional[str] = None, serial: Optional[int] = None, + principals: _CertPrincipals = (), valid_after: _Time = 0, + valid_before: _Time = 0xffffffffffffffff, + purposes: X509CertPurposes = 'secureShellClient', + hash_alg: DefTuple[str] = (), + comment: DefTuple[_Comment] = ()) -> 'SSHX509Certificate': + """Generate a new X.509 user certificate + + This method returns an X.509 user certificate with the requested + attributes signed by this private key. + + :param user_key: + The user's public key. + :param subject: + The subject name in the certificate, expresed as a + comma-separated list of X.509 `name=value` pairs. + :param issuer: (optional) + The issuer name in the certificate, expresed as a + comma-separated list of X.509 `name=value` pairs. If + not specified, the subject name will be used, creating + a self-signed certificate. + :param serial: (optional) + The serial number of the certificate, defaulting to a random + 64-bit value. + :param principals: (optional) + The user names this certificate is valid for. By default, + it can be used with any user name. + :param valid_after: (optional) + The earliest time the certificate is valid for, defaulting to + no restriction on when the certificate starts being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param valid_before: (optional) + The latest time the certificate is valid for, defaulting to + no restriction on when the certificate stops being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param purposes: (optional) + The allowed purposes for this certificate or `None` to + not restrict the certificate's purpose, defaulting to + 'secureShellClient' + :param hash_alg: (optional) + The hash algorithm to use when signing the new certificate, + defaulting to SHA256. + :param comment: (optional) + The comment to associate with this certificate. By default, + the comment will be set to the comment currently set on + user_key. + :type user_key: :class:`SSHKey` + :type subject: `str` + :type issuer: `str` + :type serial: `int` + :type principals: `str` or `list` of `str` + :type purposes: `str`, `list` of `str`, or `None` + :type hash_alg: `str` + :type comment: `str`, `bytes`, or `None` + + :returns: :class:`SSHCertificate` + + :raises: | :exc:`ValueError` if the validity times are invalid + | :exc:`KeyGenerationError` if the requested certificate + parameters are unsupported + + """ + + return self._generate_x509_certificate(user_key, subject, issuer, + serial, valid_after, + valid_before, False, None, + purposes, principals, (), + hash_alg, comment) + + def generate_x509_host_certificate( + self, host_key: 'SSHKey', subject: str, + issuer: Optional[str] = None, serial: Optional[int] = None, + principals: _CertPrincipals = (), valid_after: _Time = 0, + valid_before: _Time = 0xffffffffffffffff, + purposes: X509CertPurposes = 'secureShellServer', + hash_alg: DefTuple[str] = (), + comment: DefTuple[_Comment] = ()) -> 'SSHX509Certificate': + """Generate a new X.509 host certificate + + This method returns an X.509 host certificate with the requested + attributes signed by this private key. + + :param host_key: + The host's public key. + :param subject: + The subject name in the certificate, expresed as a + comma-separated list of X.509 `name=value` pairs. + :param issuer: (optional) + The issuer name in the certificate, expresed as a + comma-separated list of X.509 `name=value` pairs. If + not specified, the subject name will be used, creating + a self-signed certificate. + :param serial: (optional) + The serial number of the certificate, defaulting to a random + 64-bit value. + :param principals: (optional) + The host names this certificate is valid for. By default, + it can be used with any host name. + :param valid_after: (optional) + The earliest time the certificate is valid for, defaulting to + no restriction on when the certificate starts being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param valid_before: (optional) + The latest time the certificate is valid for, defaulting to + no restriction on when the certificate stops being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param purposes: (optional) + The allowed purposes for this certificate or `None` to + not restrict the certificate's purpose, defaulting to + 'secureShellServer' + :param hash_alg: (optional) + The hash algorithm to use when signing the new certificate, + defaulting to SHA256. + :param comment: (optional) + The comment to associate with this certificate. By default, + the comment will be set to the comment currently set on + host_key. + :type host_key: :class:`SSHKey` + :type subject: `str` + :type issuer: `str` + :type serial: `int` + :type principals: `str` or `list` of `str` + :type purposes: `str`, `list` of `str`, or `None` + :type hash_alg: `str` + :type comment: `str`, `bytes`, or `None` + + :returns: :class:`SSHCertificate` + + :raises: | :exc:`ValueError` if the validity times are invalid + | :exc:`KeyGenerationError` if the requested certificate + parameters are unsupported + """ + + return self._generate_x509_certificate(host_key, subject, issuer, + serial, valid_after, + valid_before, False, None, + purposes, (), principals, + hash_alg, comment) + + def generate_x509_ca_certificate(self, ca_key: 'SSHKey', subject: str, + issuer: Optional[str] = None, + serial: Optional[int] = None, + valid_after: _Time = 0, + valid_before: _Time = 0xffffffffffffffff, + ca_path_len: Optional[int] = None, + hash_alg: DefTuple[str] = (), + comment: DefTuple[_Comment] = ()) -> \ + 'SSHX509Certificate': + """Generate a new X.509 CA certificate + + This method returns an X.509 CA certificate with the requested + attributes signed by this private key. + + :param ca_key: + The new CA's public key. + :param subject: + The subject name in the certificate, expresed as a + comma-separated list of X.509 `name=value` pairs. + :param issuer: (optional) + The issuer name in the certificate, expresed as a + comma-separated list of X.509 `name=value` pairs. If + not specified, the subject name will be used, creating + a self-signed certificate. + :param serial: (optional) + The serial number of the certificate, defaulting to a random + 64-bit value. + :param valid_after: (optional) + The earliest time the certificate is valid for, defaulting to + no restriction on when the certificate starts being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param valid_before: (optional) + The latest time the certificate is valid for, defaulting to + no restriction on when the certificate stops being valid. + See :ref:`SpecifyingTimeValues` for allowed time specifications. + :param ca_path_len: (optional) + The maximum number of levels of intermediate CAs allowed + below this new CA or `None` to not enforce a limit, + defaulting to no limit. + :param hash_alg: (optional) + The hash algorithm to use when signing the new certificate, + defaulting to SHA256. + :param comment: (optional) + The comment to associate with this certificate. By default, + the comment will be set to the comment currently set on + ca_key. + :type ca_key: :class:`SSHKey` + :type subject: `str` + :type issuer: `str` + :type serial: `int` + :type ca_path_len: `int` or `None` + :type hash_alg: `str` + :type comment: `str`, `bytes`, or `None` + + :returns: :class:`SSHCertificate` + + :raises: | :exc:`ValueError` if the validity times are invalid + | :exc:`KeyGenerationError` if the requested certificate + parameters are unsupported + """ + + return self._generate_x509_certificate(ca_key, subject, issuer, + serial, valid_after, + valid_before, True, + ca_path_len, None, (), (), + hash_alg, comment) + + def export_private_key(self, format_name: str = 'openssh', + passphrase: Optional[BytesOrStr] = None, + cipher_name: str = 'aes256-cbc', + hash_name: str = 'sha256', + pbe_version: int = 2, rounds: int = 128, + ignore_few_rounds: bool = False) -> bytes: + """Export a private key in the requested format + + This method returns this object's private key encoded in the + requested format. If a passphrase is specified, the key will + be exported in encrypted form. + + Available formats include: + + pkcs1-der, pkcs1-pem, pkcs8-der, pkcs8-pem, openssh + + By default, openssh format will be used. + + Encryption is supported in pkcs1-pem, pkcs8-der, pkcs8-pem, + and openssh formats. For pkcs1-pem, only the cipher can be + specified. For pkcs8-der and pkcs-8, cipher, hash and PBE + version can be specified. For openssh, cipher and rounds + can be specified. + + Available ciphers for pkcs1-pem are: + + aes128-cbc, aes192-cbc, aes256-cbc, des-cbc, des3-cbc + + Available ciphers for pkcs8-der and pkcs8-pem are: + + aes128-cbc, aes192-cbc, aes256-cbc, blowfish-cbc, + cast128-cbc, des-cbc, des2-cbc, des3-cbc, rc4-40, rc4-128 + + Available ciphers for openssh format include the following + :ref:`encryption algorithms `. + + Available hashes include: + + md5, sha1, sha256, sha384, sha512 + + Available PBE versions include 1 for PBES1 and 2 for PBES2. + + Not all combinations of cipher, hash, and version are supported. + + The default cipher is aes256. In the pkcs8 formats, the default + hash is sha256 and default version is PBES2. + + In openssh format, the default number of rounds is 128. + + .. note:: The openssh format uses bcrypt for encryption, but + unlike the traditional bcrypt cost factor used in + password hashing which scales logarithmically, the + encryption strength here scales linearly with the + rounds value. Since the cipher is rekeyed 64 times + per round, the default rounds value of 128 corresponds + to 8192 total iterations, which is the equivalent of + a bcrypt cost factor of 13. + + :param format_name: (optional) + The format to export the key in. + :param passphrase: (optional) + A passphrase to encrypt the private key with. + :param cipher_name: (optional) + The cipher to use for private key encryption. + :param hash_name: (optional) + The hash to use for private key encryption. + :param pbe_version: (optional) + The PBE version to use for private key encryption. + :param rounds: (optional) + The number of KDF rounds to apply to the passphrase. + :type format_name: `str` + :type passphrase: `str` or `bytes` + :type cipher_name: `str` + :type hash_name: `str` + :type pbe_version: `int` + :type rounds: `int` + + :returns: `bytes` representing the exported private key + + """ + + if format_name in ('pkcs1-der', 'pkcs1-pem'): + data = der_encode(self.encode_pkcs1_private()) + + if passphrase is not None: + if format_name == 'pkcs1-der': + raise KeyExportError('PKCS#1 DER format does not support ' + 'private key encryption') + + alg, iv, data = pkcs1_encrypt(data, cipher_name, passphrase) + headers = (b'Proc-Type: 4,ENCRYPTED\n' + + b'DEK-Info: ' + alg + b',' + + binascii.b2a_hex(iv).upper() + b'\n\n') + else: + headers = b'' + + if format_name == 'pkcs1-pem': + keytype = self.pem_name + b' PRIVATE KEY' + data = (b'-----BEGIN ' + keytype + b'-----\n' + + headers + _wrap_base64(data) + + b'-----END ' + keytype + b'-----\n') + + return data + elif format_name in ('pkcs8-der', 'pkcs8-pem'): + alg_params, pkcs8_data = self.encode_pkcs8_private() + + if alg_params is OMIT: + data = der_encode((0, (self.pkcs8_oid,), pkcs8_data)) + else: + data = der_encode((0, (self.pkcs8_oid, alg_params), pkcs8_data)) + + if passphrase is not None: + data = pkcs8_encrypt(data, cipher_name, hash_name, + pbe_version, passphrase) + + if format_name == 'pkcs8-pem': + if passphrase is not None: + keytype = b'ENCRYPTED PRIVATE KEY' + else: + keytype = b'PRIVATE KEY' + + data = (b'-----BEGIN ' + keytype + b'-----\n' + + _wrap_base64(data) + + b'-----END ' + keytype + b'-----\n') + + return data + elif format_name == 'openssh': + check = os.urandom(4) + nkeys = 1 + + data = b''.join((check, check, self.private_data, + String(self._comment or b''))) + + cipher: Optional[Encryption] + + if passphrase is not None: + try: + alg = cipher_name.encode('ascii') + key_size, iv_size, block_size, _, _, _ = \ + get_encryption_params(alg) + except (KeyError, UnicodeEncodeError): + raise KeyEncryptionError('Unknown cipher: %s' % + cipher_name) from None + + if not _bcrypt_available: # pragma: no cover + raise KeyExportError('OpenSSH private key encryption ' + 'requires bcrypt with KDF support') + + kdf = b'bcrypt' + salt = os.urandom(_OPENSSH_SALT_LEN) + kdf_data = b''.join((String(salt), UInt32(rounds))) + + if isinstance(passphrase, str): + passphrase = passphrase.encode('utf-8') + + key = bcrypt.kdf(passphrase, salt, key_size + iv_size, + rounds, ignore_few_rounds) + + cipher = get_encryption(alg, key[:key_size], key[key_size:]) + block_size = max(block_size, 8) + else: + cipher = None + alg = b'none' + kdf = b'none' + kdf_data = b'' + block_size = 8 + mac = b'' + + pad = len(data) % block_size + if pad: # pragma: no branch + data = data + bytes(range(1, block_size + 1 - pad)) + + if cipher: + data, mac = cipher.encrypt_packet(0, b'', data) + else: + mac = b'' + + data = b''.join((_OPENSSH_KEY_V1, String(alg), String(kdf), + String(kdf_data), UInt32(nkeys), + String(self.public_data), String(data), mac)) + + return (b'-----BEGIN OPENSSH PRIVATE KEY-----\n' + + _wrap_base64(data, _OPENSSH_WRAP_LEN) + + b'-----END OPENSSH PRIVATE KEY-----\n') + else: + raise KeyExportError('Unknown export format') + + def export_public_key(self, format_name: str = 'openssh') -> bytes: + """Export a public key in the requested format + + This method returns this object's public key encoded in the + requested format. Available formats include: + + pkcs1-der, pkcs1-pem, pkcs8-der, pkcs8-pem, openssh, rfc4716 + + By default, openssh format will be used. + + :param format_name: (optional) + The format to export the key in. + :type format_name: `str` + + :returns: `bytes` representing the exported public key + + """ + + if format_name in ('pkcs1-der', 'pkcs1-pem'): + data = der_encode(self.encode_pkcs1_public()) + + if format_name == 'pkcs1-pem': + keytype = self.pem_name + b' PUBLIC KEY' + data = (b'-----BEGIN ' + keytype + b'-----\n' + + _wrap_base64(data) + + b'-----END ' + keytype + b'-----\n') + + return data + elif format_name in ('pkcs8-der', 'pkcs8-pem'): + alg_params, pkcs8_data = self.encode_pkcs8_public() + pkcs8_data = BitString(pkcs8_data) + + if alg_params is OMIT: + data = der_encode(((self.pkcs8_oid,), pkcs8_data)) + else: + data = der_encode(((self.pkcs8_oid, alg_params), pkcs8_data)) + + if format_name == 'pkcs8-pem': + data = (b'-----BEGIN PUBLIC KEY-----\n' + + _wrap_base64(data) + + b'-----END PUBLIC KEY-----\n') + + return data + elif format_name == 'openssh': + if self._comment: + comment = b' ' + self._comment + else: + comment = b'' + + return (self.algorithm + b' ' + + binascii.b2a_base64(self.public_data)[:-1] + + comment + b'\n') + elif format_name == 'rfc4716': + if self._comment: + comment = (b'Comment: "' + self._comment + b'"\n') + else: + comment = b'' + + return (b'---- BEGIN SSH2 PUBLIC KEY ----\n' + + comment + _wrap_base64(self.public_data) + + b'---- END SSH2 PUBLIC KEY ----\n') + else: + raise KeyExportError('Unknown export format') + + def write_private_key(self, filename: FilePath, *args, **kwargs) -> None: + """Write a private key to a file in the requested format + + This method is a simple wrapper around :meth:`export_private_key` + which writes the exported key data to a file. + + :param filename: + The filename to write the private key to. + :param \\*args,\\ \\*\\*kwargs: + Additional arguments to pass through to + :meth:`export_private_key`. + :type filename: :class:`PurePath ` or `str` + + """ + + write_file(filename, self.export_private_key(*args, **kwargs)) + + def write_public_key(self, filename: FilePath, *args, **kwargs) -> None: + """Write a public key to a file in the requested format + + This method is a simple wrapper around :meth:`export_public_key` + which writes the exported key data to a file. + + :param filename: + The filename to write the public key to. + :param \\*args,\\ \\*\\*kwargs: + Additional arguments to pass through to + :meth:`export_public_key`. + :type filename: :class:`PurePath ` or `str` + + """ + + write_file(filename, self.export_public_key(*args, **kwargs)) + + def append_private_key(self, filename: FilePath, *args, **kwargs) -> None: + """Append a private key to a file in the requested format + + This method is a simple wrapper around :meth:`export_private_key` + which appends the exported key data to an existing file. + + :param filename: + The filename to append the private key to. + :param \\*args,\\ \\*\\*kwargs: + Additional arguments to pass through to + :meth:`export_private_key`. + :type filename: :class:`PurePath ` or `str` + + """ + + write_file(filename, self.export_private_key(*args, **kwargs), 'ab') + + def append_public_key(self, filename: FilePath, *args, **kwargs) -> None: + """Append a public key to a file in the requested format + + This method is a simple wrapper around :meth:`export_public_key` + which appends the exported key data to an existing file. + + :param filename: + The filename to append the public key to. + :param \\*args,\\ \\*\\*kwargs: + Additional arguments to pass through to + :meth:`export_public_key`. + :type filename: :class:`PurePath ` or `str` + + """ + + write_file(filename, self.export_public_key(*args, **kwargs), 'ab') + + +class SSHCertificate: + """Parent class which holds an SSH certificate""" + + is_x509 = False + is_x509_chain = False + + def __init__(self, algorithm: bytes, sig_algorithms: Sequence[bytes], + host_key_algorithms: Sequence[bytes], key: SSHKey, + public_data: bytes, comment: _Comment): + self.algorithm = algorithm + self.sig_algorithms = sig_algorithms + self.host_key_algorithms = host_key_algorithms + self.key = key + self.public_data = public_data + + self.set_comment(comment) + + @classmethod + def construct(cls, packet: SSHPacket, algorithm: bytes, + key_handler: Optional[Type[SSHKey]], + comment: _Comment) -> 'SSHCertificate': + """Construct an SSH certificate from packetized data""" + + raise NotImplementedError + + def __eq__(self, other: object) -> bool: + return (isinstance(other, type(self)) and + self.public_data == other.public_data) + + def __hash__(self) -> int: + return hash(self.public_data) + + def get_algorithm(self) -> str: + """Return the algorithm associated with this certificate""" + + return self.algorithm.decode('ascii') + + def has_comment(self) -> bool: + """Return whether a comment is set for this certificate + + :returns: `bool` + + """ + + return bool(self._comment) + + def get_comment_bytes(self) -> Optional[bytes]: + """Return the comment associated with this certificate as a + byte string + + :returns: `bytes` or `None` + + """ + + return self._comment + + def get_comment(self, encoding: str = 'utf-8', + errors: str = 'strict') -> Optional[str]: + """Return the comment associated with this certificate as a + Unicode string + + :param encoding: + The encoding to use to decode the comment as a Unicode + string, defaulting to UTF-8 + :param errors: + The error handling scheme to use for Unicode decode errors + :type encoding: `str` + :type errors: `str` + + :returns: `str` or `None` + + :raises: :exc:`UnicodeDecodeError` if the comment cannot be + decoded using the specified encoding + + """ + + return self._comment.decode(encoding, errors) if self._comment else None + + def set_comment(self, comment: _Comment, encoding: str = 'utf-8', + errors: str = 'strict') -> None: + """Set the comment associated with this certificate + + :param comment: + The new comment to associate with this key + :param encoding: + The Unicode encoding to use to encode the comment, + defaulting to UTF-8 + :param errors: + The error handling scheme to use for Unicode encode errors + :type comment: `str`, `bytes`, or `None` + :type encoding: `str` + :type errors: `str` + + :raises: :exc:`UnicodeEncodeError` if the comment cannot be + encoded using the specified encoding + + """ + + if isinstance(comment, str): + comment = comment.encode(encoding, errors) + + self._comment = comment or None + + def export_certificate(self, format_name: str = 'openssh') -> bytes: + """Export a certificate in the requested format + + This function returns this certificate encoded in the requested + format. Available formats include: + + der, pem, openssh, rfc4716 + + By default, OpenSSH format will be used. + + :param format_name: (optional) + The format to export the certificate in. + :type format_name: `str` + + :returns: `bytes` representing the exported certificate + + """ + + if self.is_x509: + if format_name == 'rfc4716': + raise KeyExportError('RFC4716 format is not supported for ' + 'X.509 certificates') + else: + if format_name in ('der', 'pem'): + raise KeyExportError('DER and PEM formats are not supported ' + 'for OpenSSH certificates') + + if format_name == 'der': + return self.public_data + elif format_name == 'pem': + return (b'-----BEGIN CERTIFICATE-----\n' + + _wrap_base64(self.public_data) + + b'-----END CERTIFICATE-----\n') + elif format_name == 'openssh': + if self._comment: + comment = b' ' + self._comment + else: + comment = b'' + + return (self.algorithm + b' ' + + binascii.b2a_base64(self.public_data)[:-1] + + comment + b'\n') + elif format_name == 'rfc4716': + if self._comment: + comment = (b'Comment: "' + self._comment + b'"\n') + else: + comment = b'' + + return (b'---- BEGIN SSH2 PUBLIC KEY ----\n' + + comment + _wrap_base64(self.public_data) + + b'---- END SSH2 PUBLIC KEY ----\n') + else: + raise KeyExportError('Unknown export format') + + def write_certificate(self, filename: FilePath, + format_name: str = 'openssh') -> None: + """Write a certificate to a file in the requested format + + This function is a simple wrapper around export_certificate + which writes the exported certificate to a file. + + :param filename: + The filename to write the certificate to. + :param format_name: (optional) + The format to export the certificate in. + :type filename: :class:`PurePath ` or `str` + :type format_name: `str` + + """ + + write_file(filename, self.export_certificate(format_name)) + + def append_certificate(self, filename: FilePath, + format_name: str = 'openssh') -> None: + """Append a certificate to a file in the requested format + + This function is a simple wrapper around export_certificate + which appends the exported certificate to an existing file. + + :param filename: + The filename to append the certificate to. + :param format_name: (optional) + The format to export the certificate in. + :type filename: :class:`PurePath ` or `str` + :type format_name: `str` + + """ + + write_file(filename, self.export_certificate(format_name), 'ab') + + +class SSHOpenSSHCertificate(SSHCertificate): + """Class which holds an OpenSSH certificate""" + + _user_option_encoders: _OpenSSHCertEncoders = () + _user_extension_encoders: _OpenSSHCertEncoders = () + _host_option_encoders: _OpenSSHCertEncoders = () + _host_extension_encoders: _OpenSSHCertEncoders = () + + _user_option_decoders: _OpenSSHCertDecoders = {} + _user_extension_decoders: _OpenSSHCertDecoders = {} + _host_option_decoders: _OpenSSHCertDecoders = {} + _host_extension_decoders: _OpenSSHCertDecoders = {} + + def __init__(self, algorithm: bytes, key: SSHKey, data: bytes, + principals: Sequence[str], options: _OpenSSHCertOptions, + signing_key: SSHKey, serial: int, cert_type: int, + key_id: str, valid_after: int, valid_before: int, + comment: _Comment): + super().__init__(algorithm, key.sig_algorithms, + key.cert_algorithms or (algorithm,), + key, data, comment) + + self.principals = principals + self.options = options + self.signing_key = signing_key + + self._serial = serial + self._cert_type = cert_type + self._key_id = key_id + self._valid_after = valid_after + self._valid_before = valid_before + + @classmethod + def generate(cls, signing_key: 'SSHKey', algorithm: bytes, key: 'SSHKey', + serial: int, cert_type: int, key_id: str, + principals: Sequence[str], valid_after: int, + valid_before: int, options: _OpenSSHCertOptions, + sig_alg: bytes, comment: _Comment) -> 'SSHOpenSSHCertificate': + """Generate a new SSH certificate""" + + principal_bytes = b''.join(String(p) for p in principals) + + if cert_type == CERT_TYPE_USER: + cert_options = cls._encode_options(options, + cls._user_option_encoders) + cert_extensions = cls._encode_options(options, + cls._user_extension_encoders) + else: + cert_options = cls._encode_options(options, + cls._host_option_encoders) + cert_extensions = cls._encode_options(options, + cls._host_extension_encoders) + + key = key.convert_to_public() + + data = b''.join((String(algorithm), + cls._encode(key, serial, cert_type, key_id, + principal_bytes, valid_after, + valid_before, cert_options, + cert_extensions), + String(signing_key.public_data))) + + data += String(signing_key.sign(data, sig_alg)) + + signing_key = signing_key.convert_to_public() + + return cls(algorithm, key, data, principals, options, signing_key, + serial, cert_type, key_id, valid_after, valid_before, + comment) + + @classmethod + def construct(cls, packet: SSHPacket, algorithm: bytes, + key_handler: Optional[Type[SSHKey]], + comment: _Comment) -> 'SSHOpenSSHCertificate': + """Construct an SSH certificate from packetized data""" + + assert key_handler is not None + + key_params, serial, cert_type, key_id, \ + principals, valid_after, valid_before, \ + options, extensions = cls._decode(packet, key_handler) + + signing_key = decode_ssh_public_key(packet.get_string()) + data = packet.get_consumed_payload() + signature = packet.get_string() + packet.check_end() + + if not signing_key.verify(data, signature): + raise KeyImportError('Invalid certificate signature') + + key = key_handler.make_public(key_params) + data = packet.get_consumed_payload() + + try: + key_id_bytes = key_id.decode('utf-8') + except UnicodeDecodeError: + raise KeyImportError('Invalid characters in key ID') from None + + packet = SSHPacket(principals) + principals: List[str] = [] + + while packet: + try: + principal = packet.get_string().decode('utf-8') + except UnicodeDecodeError: + raise KeyImportError('Invalid characters in principal ' + 'name') from None + + principals.append(principal) + + if cert_type == CERT_TYPE_USER: + cert_options = cls._decode_options( + options, cls._user_option_decoders, True) + cert_options.update(cls._decode_options( + extensions, cls._user_extension_decoders, False)) + elif cert_type == CERT_TYPE_HOST: + cert_options = cls._decode_options( + options, cls._host_option_decoders, True) + cert_options.update(cls._decode_options( + extensions, cls._host_extension_decoders, False)) + else: + raise KeyImportError('Unknown certificate type') + + return cls(algorithm, key, data, principals, cert_options, signing_key, + serial, cert_type, key_id_bytes, valid_after, valid_before, + comment) + + @classmethod + def _encode(cls, key: SSHKey, serial: int, cert_type: int, key_id: str, + principals: bytes, valid_after: int, valid_before: int, + options: bytes, extensions: bytes) -> bytes: + + """Encode an SSH certificate""" + + raise NotImplementedError + + @classmethod + def _decode(cls, packet: SSHPacket, + key_handler: Type[SSHKey]) -> _OpenSSHCertParams: + """Decode an SSH certificate""" + + raise NotImplementedError + + @staticmethod + def _encode_options(options: _OpenSSHCertOptions, + encoders: _OpenSSHCertEncoders) -> bytes: + """Encode options found in this certificate""" + + result = [] + + for name, encoder in encoders: + value = options.get(name) + if value: + result.append(String(name) + String(encoder(value))) + + return b''.join(result) + + @staticmethod + def _encode_bool(_value: object) -> bytes: + """Encode a boolean option value""" + + return b'' + + @staticmethod + def _encode_force_cmd(force_command: object) -> bytes: + """Encode a force-command option""" + + return String(cast(BytesOrStr, force_command)) + + @staticmethod + def _encode_source_addr(source_address: object) -> bytes: + """Encode a source-address option""" + + return NameList(str(addr).encode('ascii') + for addr in cast(Sequence[IPNetwork], source_address)) + + @staticmethod + def _decode_bool(_packet: SSHPacket) -> bool: + """Decode a boolean option value""" + + return True + + @staticmethod + def _decode_force_cmd(packet: SSHPacket) -> str: + """Decode a force-command option""" + + try: + return packet.get_string().decode('utf-8') + except UnicodeDecodeError: + raise KeyImportError('Invalid characters in command') from None + + @staticmethod + def _decode_source_addr(packet: SSHPacket) -> Sequence[IPNetwork]: + """Decode a source-address option""" + + try: + return [ip_network(addr.decode('ascii')) + for addr in packet.get_namelist()] + except (UnicodeDecodeError, ValueError): + raise KeyImportError('Invalid source address') from None + + @staticmethod + def _decode_options(options: bytes, decoders: _OpenSSHCertDecoders, + critical: bool = True) -> _OpenSSHCertOptions: + """Decode options found in this certificate""" + + packet = SSHPacket(options) + result: _OpenSSHCertOptions = {} + + while packet: + name = packet.get_string() + + decoder = decoders.get(name) + if decoder: + data_packet = SSHPacket(packet.get_string()) + result[name.decode('ascii')] = decoder(data_packet) + data_packet.check_end() + elif critical: + raise KeyImportError('Unrecognized critical option: %s' % + name.decode('ascii', errors='replace')) + + return result + + def validate(self, cert_type: int, principal: str) -> None: + """Validate an OpenSSH certificate""" + + if self._cert_type != cert_type: + raise ValueError('Invalid certificate type') + + now = time.time() + + if now < self._valid_after: + raise ValueError('Certificate not yet valid') + + if now >= self._valid_before: + raise ValueError('Certificate expired') + + if principal and self.principals and principal not in self.principals: + raise ValueError('Certificate principal mismatch') + + +class SSHOpenSSHCertificateV01(SSHOpenSSHCertificate): + """Encoder/decoder class for version 01 OpenSSH certificates""" + + _user_option_encoders = ( + ('force-command', SSHOpenSSHCertificate._encode_force_cmd), + ('source-address', SSHOpenSSHCertificate._encode_source_addr) + ) + + _user_extension_encoders = ( + ('permit-X11-forwarding', SSHOpenSSHCertificate._encode_bool), + ('permit-agent-forwarding', SSHOpenSSHCertificate._encode_bool), + ('permit-port-forwarding', SSHOpenSSHCertificate._encode_bool), + ('permit-pty', SSHOpenSSHCertificate._encode_bool), + ('permit-user-rc', SSHOpenSSHCertificate._encode_bool), + ('no-touch-required', SSHOpenSSHCertificate._encode_bool) + ) + + _user_option_decoders = { + b'force-command': SSHOpenSSHCertificate._decode_force_cmd, + b'source-address': SSHOpenSSHCertificate._decode_source_addr + } + + _user_extension_decoders = { + b'permit-X11-forwarding': SSHOpenSSHCertificate._decode_bool, + b'permit-agent-forwarding': SSHOpenSSHCertificate._decode_bool, + b'permit-port-forwarding': SSHOpenSSHCertificate._decode_bool, + b'permit-pty': SSHOpenSSHCertificate._decode_bool, + b'permit-user-rc': SSHOpenSSHCertificate._decode_bool, + b'no-touch-required': SSHOpenSSHCertificate._decode_bool + } + + @classmethod + def _encode(cls, key: SSHKey, serial: int, cert_type: int, key_id: str, + principals: bytes, valid_after: int, valid_before: int, + options: bytes, extensions: bytes) -> bytes: + """Encode a version 01 SSH certificate""" + + return b''.join((String(os.urandom(32)), key.encode_ssh_public(), + UInt64(serial), UInt32(cert_type), String(key_id), + String(principals), UInt64(valid_after), + UInt64(valid_before), String(options), + String(extensions), String(''))) + + @classmethod + def _decode(cls, packet: SSHPacket, + key_handler: Type[SSHKey]) -> _OpenSSHCertParams: + """Decode a version 01 SSH certificate""" + + _ = packet.get_string() # nonce + key_params = key_handler.decode_ssh_public(packet) + serial = packet.get_uint64() + cert_type = packet.get_uint32() + key_id = packet.get_string() + principals = packet.get_string() + valid_after = packet.get_uint64() + valid_before = packet.get_uint64() + options = packet.get_string() + extensions = packet.get_string() + _ = packet.get_string() # reserved + + return (key_params, serial, cert_type, key_id, principals, + valid_after, valid_before, options, extensions) + + +class SSHX509Certificate(SSHCertificate): + """Encoder/decoder class for SSH X.509 certificates""" + + is_x509 = True + + def __init__(self, key: SSHKey, x509_cert: 'X509Certificate', + comment: _Comment = None): + super().__init__(b'x509v3-' + key.algorithm, key.x509_algorithms, + key.x509_algorithms, key, x509_cert.data, + x509_cert.comment or comment) + + self.subject = x509_cert.subject + self.issuer = x509_cert.issuer + self.issuer_hash = x509_cert.issuer_hash + self.user_principals = x509_cert.user_principals + self.x509_cert = x509_cert + + def _expand_trust_store(self, cert: 'SSHX509Certificate', + trusted_cert_paths: Sequence[FilePath], + trust_store: Set['SSHX509Certificate']) -> None: + """Look up certificates by issuer hash to build a trust store""" + + issuer_hash = cert.issuer_hash + + for path in trusted_cert_paths: + idx = 0 + + try: + while True: + cert_path = Path(path, issuer_hash + '.' + str(idx)) + idx += 1 + + c = cast('SSHX509Certificate', read_certificate(cert_path)) + + if c.subject != cert.issuer or c in trust_store: + continue + + trust_store.add(c) + self._expand_trust_store(c, trusted_cert_paths, trust_store) + except (OSError, KeyImportError): + pass + + @classmethod + def construct(cls, packet: SSHPacket, algorithm: bytes, + key_handler: Optional[Type[SSHKey]], + comment: _Comment) -> 'SSHX509Certificate': + """Construct an SSH X.509 certificate from packetized data""" + + raise RuntimeError # pragma: no cover + + @classmethod + def generate(cls, signing_key: 'SSHKey', key: 'SSHKey', subject: str, + issuer: Optional[str], serial: Optional[int], + valid_after: int, valid_before: int, ca: bool, + ca_path_len: Optional[int], purposes: X509CertPurposes, + user_principals: _CertPrincipals, + host_principals: _CertPrincipals, hash_name: str, + comment: _Comment) -> 'SSHX509Certificate': + """Generate a new X.509 certificate""" + + key = key.convert_to_public() + + x509_cert = generate_x509_certificate(signing_key.pyca_key, + key.pyca_key, subject, issuer, + serial, valid_after, valid_before, + ca, ca_path_len, purposes, + user_principals, host_principals, + hash_name, comment) + + return cls(key, x509_cert) + + @classmethod + def construct_from_der(cls, data: bytes, + comment: _Comment = None) -> 'SSHX509Certificate': + """Construct an SSH X.509 certificate from DER data""" + + try: + x509_cert = import_x509_certificate(data) + key = import_public_key(x509_cert.key_data) + except ValueError as exc: + raise KeyImportError(str(exc)) from None + + return cls(key, x509_cert, comment) + + def validate_chain(self, trust_chain: Sequence['SSHX509Certificate'], + trusted_certs: Sequence['SSHX509Certificate'], + trusted_cert_paths: Sequence[FilePath], + purposes: X509CertPurposes, user_principal: str = '', + host_principal: str = '') -> None: + """Validate an X.509 certificate chain""" + + trust_store = set(c for c in trust_chain if c.subject != c.issuer) | \ + set(c for c in trusted_certs) + + if trusted_cert_paths: + self._expand_trust_store(self, trusted_cert_paths, trust_store) + + for c in trust_chain: + self._expand_trust_store(c, trusted_cert_paths, trust_store) + + self.x509_cert.validate([c.x509_cert for c in trust_store], + purposes, user_principal, host_principal) + + +class SSHX509CertificateChain(SSHCertificate): + """Encoder/decoder class for an SSH X.509 certificate chain""" + + is_x509_chain = True + + def __init__(self, algorithm: bytes, certs: Sequence[SSHCertificate], + ocsp_responses: Sequence[bytes], comment: _Comment): + key = certs[0].key + data = self._public_data(algorithm, certs, ocsp_responses) + + super().__init__(algorithm, key.x509_algorithms, key.x509_algorithms, + key, data, comment) + + x509_certs = cast(Sequence[SSHX509Certificate], certs) + first_cert = x509_certs[0] + last_cert = x509_certs[-1] + + self.subject = first_cert.subject + self.issuer = last_cert.issuer + self.user_principals = first_cert.user_principals + + self._certs = x509_certs + self._ocsp_responses = ocsp_responses + + @staticmethod + def _public_data(algorithm: bytes, certs: Sequence[SSHCertificate], + ocsp_responses: Sequence[bytes]) -> bytes: + """Return the X509 chain public data""" + + return (String(algorithm) + UInt32(len(certs)) + + b''.join(String(c.public_data) for c in certs) + + UInt32(len(ocsp_responses)) + + b''.join(String(resp) for resp in ocsp_responses)) + + @classmethod + def construct(cls, packet: SSHPacket, algorithm: bytes, + key_handler: Optional[Type[SSHKey]], + comment: _Comment) -> 'SSHX509CertificateChain': + """Construct an SSH X.509 certificate from packetized data""" + + cert_count = packet.get_uint32() + certs = [import_certificate(packet.get_string()) + for _ in range(cert_count)] + + ocsp_resp_count = packet.get_uint32() + ocsp_responses = [packet.get_string() for _ in range(ocsp_resp_count)] + + packet.check_end() + + if not certs: + raise KeyImportError('No certificates present') + + return cls(algorithm, certs, ocsp_responses, comment) + + @classmethod + def construct_from_certs(cls, certs: Sequence['SSHCertificate']) -> \ + 'SSHX509CertificateChain': + """Construct an SSH X.509 certificate chain from certificates""" + + cert = certs[0] + + return cls(cert.algorithm, certs, (), cert.get_comment_bytes()) + + def adjust_public_data(self, algorithm: bytes) -> bytes: + """Adjust public data to reflect chosen signature algorithm""" + + return self._public_data(algorithm, self._certs, self._ocsp_responses) + + def validate_chain(self, trusted_certs: Sequence[SSHX509Certificate], + trusted_cert_paths: Sequence[FilePath], + revoked_certs: Set[SSHX509Certificate], + purposes: X509CertPurposes, user_principal: str = '', + host_principal: str = '') -> None: + """Validate an X.509 certificate chain""" + + if revoked_certs: + for cert in self._certs: + if cert in revoked_certs: + raise ValueError('Revoked X.509 certificate in ' + 'certificate chain') + + self._certs[0].validate_chain(self._certs[1:], trusted_certs, + trusted_cert_paths, purposes, + user_principal, host_principal) + + +class SSHKeyPair: + """Parent class which represents an asymmetric key pair + + This is an abstract class which provides a method to sign data + with a private key and members to access the corresponding + algorithm and public key or certificate information needed to + identify what key was used for signing. + + """ + + _key_type = 'unknown' + + def __init__(self, algorithm: bytes, sig_algorithm: bytes, + sig_algorithms: Sequence[bytes], + host_key_algorithms: Sequence[bytes], + public_data: bytes, comment: _Comment, + cert: Optional[SSHCertificate] = None, + filename: Optional[bytes] = None, + use_executor: bool = False): + self.key_algorithm = algorithm + self.key_public_data = public_data + + self.set_comment(comment) + self._cert = cert + self._filename = filename + + self.use_executor = use_executor + + if cert: + if cert.key.public_data != self.key_public_data: + raise ValueError('Certificate key mismatch') + + self.algorithm = cert.algorithm + + if cert.is_x509_chain: + self.sig_algorithm = cert.algorithm + else: + self.sig_algorithm = sig_algorithm + + self.sig_algorithms = cert.sig_algorithms + self.host_key_algorithms = cert.host_key_algorithms + self.public_data = cert.public_data + else: + self.algorithm = algorithm + self.sig_algorithm = algorithm + self.sig_algorithms = sig_algorithms + self.host_key_algorithms = host_key_algorithms + self.public_data = public_data + + def get_key_type(self) -> str: + """Return what type of key pair this is + + This method returns 'local' for locally loaded keys, and + 'agent' for keys managed by an SSH agent. + + """ + + return self._key_type + + @property + def has_cert(self) -> bool: + """ Return if this key pair has an associated cert""" + + return bool(self._cert) + + @property + def has_x509_chain(self) -> bool: + """ Return if this key pair has an associated X.509 cert chain""" + + return self._cert.is_x509_chain if self._cert else False + + def get_algorithm(self) -> str: + """Return the algorithm associated with this key pair""" + + return self.algorithm.decode('ascii') + + def get_agent_private_key(self) -> bytes: + """Return binary encoding of keypair for upload to SSH agent""" + + # pylint: disable=no-self-use + raise KeyImportError('Private key export to agent not supported') + + def get_comment_bytes(self) -> Optional[bytes]: + """Return the comment associated with this key pair as a + byte string + + :returns: `bytes` or `None` + + """ + + return self._comment or self._filename + + def get_comment(self, encoding: str = 'utf-8', + errors: str = 'strict') -> Optional[str]: + """Return the comment associated with this key pair as a + Unicode string + + :param encoding: + The encoding to use to decode the comment as a Unicode + string, defaulting to UTF-8 + :param errors: + The error handling scheme to use for Unicode decode errors + :type encoding: `str` + :type errors: `str` + + :returns: `str` or `None` + + :raises: :exc:`UnicodeDecodeError` if the comment cannot be + decoded using the specified encoding + + """ + + comment = self.get_comment_bytes() + + return comment.decode(encoding, errors) if comment else None + + def set_comment(self, comment: _Comment, encoding: str = 'utf-8', + errors: str = 'strict') -> None: + """Set the comment associated with this key pair + + :param comment: + The new comment to associate with this key + :param encoding: + The Unicode encoding to use to encode the comment, + defaulting to UTF-8 + :param errors: + The error handling scheme to use for Unicode encode errors + :type comment: `str`, `bytes`, or `None` + :type encoding: `str` + :type errors: `str` + + :raises: :exc:`UnicodeEncodeError` if the comment cannot be + encoded using the specified encoding + + """ + + if isinstance(comment, str): + comment = comment.encode(encoding, errors) + + self._comment = comment or None + + def set_certificate(self, cert: SSHCertificate) -> None: + """Set certificate to use with this key""" + + if cert.key.public_data != self.key_public_data: + raise ValueError('Certificate key mismatch') + + self._cert = cert + self.algorithm = cert.algorithm + + if cert.is_x509_chain: + self.sig_algorithm = cert.algorithm + else: + self.sig_algorithm = self.key_algorithm + + self.sig_algorithms = cert.sig_algorithms + self.host_key_algorithms = cert.host_key_algorithms + self.public_data = cert.public_data + + def set_sig_algorithm(self, sig_algorithm: bytes) -> None: + """Set the signature algorithm to use when signing data""" + + try: + sig_algorithm = _certificate_sig_alg_map[sig_algorithm] + except KeyError: + pass + + self.sig_algorithm = sig_algorithm + + if not self.has_cert: + self.algorithm = sig_algorithm + elif self.has_x509_chain: + self.algorithm = sig_algorithm + + cert = cast('SSHX509CertificateChain', self._cert) + self.public_data = cert.adjust_public_data(sig_algorithm) + + def sign(self, data: bytes) -> bytes: + """Sign a block of data with this private key""" + + # pylint: disable=no-self-use + raise RuntimeError # pragma: no cover + + +class SSHLocalKeyPair(SSHKeyPair): + """Class which holds a local asymmetric key pair + + This class holds a private key and associated public data + which can either be the matching public key or a certificate + which has signed that public key. + + """ + + _key_type = 'local' + + def __init__(self, key: SSHKey, pubkey: Optional[SSHKey] = None, + cert: Optional[SSHCertificate] = None): + if pubkey and pubkey.public_data != key.public_data: + raise ValueError('Public key mismatch') + + if key.has_comment(): + comment = key.get_comment_bytes() + elif cert and cert.has_comment(): + comment = cert.get_comment_bytes() + elif pubkey and pubkey.has_comment(): + comment = pubkey.get_comment_bytes() + else: + comment = None + + super().__init__(key.algorithm, key.algorithm, key.sig_algorithms, + key.sig_algorithms, key.public_data, comment, cert, + key.get_filename(), key.use_executor) + + self._key = key + + def get_agent_private_key(self) -> bytes: + """Return binary encoding of keypair for upload to SSH agent""" + + if self._cert: + data = String(self.public_data) + \ + self._key.encode_agent_cert_private() + else: + data = self._key.encode_ssh_private() + + return String(self.algorithm) + data + + def sign(self, data: bytes) -> bytes: + """Sign a block of data with this private key""" + + return self._key.sign(data, self.sig_algorithm) + + +def _parse_openssh(data: bytes) -> Tuple[bytes, Optional[bytes], bytes]: + """Parse an OpenSSH format public key or certificate""" + + line = data.split(None, 2) + + if len(line) < 2: + raise KeyImportError('Invalid OpenSSH public key or certificate') + elif len(line) == 2: + comment = None + else: + comment = line[2] + + if (line[0] not in _public_key_alg_map and + line[0] not in _certificate_alg_map): + raise KeyImportError('Unknown OpenSSH public key algorithm') + + try: + return line[0], comment, binascii.a2b_base64(line[1]) + except binascii.Error: + raise KeyImportError('Invalid OpenSSH public key ' + 'or certificate') from None + + +def _parse_pem(data: bytes) -> Tuple[Mapping[bytes, bytes], bytes]: + """Parse a PEM data block""" + + start = 0 + end: Optional[int] = None + headers: Dict[bytes, bytes] = {} + + while True: + end = data.find(b'\n', start) + 1 + + line = data[start:end] if end else data[start:] + line = line.rstrip() + + if b':' in line: + hdr, value = line.split(b':', 1) + headers[hdr.strip()] = value.strip() + else: + break + + start = end if end != 0 else len(data) + + try: + return headers, binascii.a2b_base64(data[start:]) + except binascii.Error: + raise KeyImportError('Invalid PEM data') from None + + +def _parse_rfc4716(data: bytes) -> Tuple[Optional[bytes], bytes]: + """Parse an RFC 4716 data block""" + + start = 0 + end = None + hdr = b'' + comment = None + + while True: + end = data.find(b'\n', start) + 1 + line = data[start:end] if end else data[start:] + line = line.rstrip() + + if line[-1:] == b'\\': + hdr += line[:-1] + else: + hdr += line + if b':' in hdr: + hdr, value = hdr.split(b':', 1) + + if hdr.strip() == b'Comment': + comment = value.strip() + if comment[:1] == b'"' and comment[-1:] == b'"': + comment = comment[1:-1] + + hdr = b'' + else: + break + + start = end if end != 0 else len(data) + + try: + return comment, binascii.a2b_base64(data[start:]) + except binascii.Error: + raise KeyImportError('Invalid RFC 4716 data') from None + + +def _match_block(data: bytes, start: int, header: bytes, + fmt: str) -> Tuple[bytes, int]: + """Match a block of data wrapped in a header/footer""" + + match = re.compile(b'^' + header[:5] + b'END' + header[10:] + + rb'[ \t\r\f\v]*$', re.M).search(data, start) + + if not match: + raise KeyImportError('Missing %s footer' % fmt) + + return data[start:match.start()], match.end() + + +def _match_next(data: bytes, keytype: bytes, public: bool = False) -> \ + Tuple[Optional[str], Tuple, Optional[int]]: + """Find the next key/certificate and call the appropriate decode""" + + end: Optional[int] + + if data.startswith(b'\x30'): + try: + key_data, end = der_decode_partial(data) + return 'der', (key_data,), end + except ASN1DecodeError: + pass + + start = 0 + end = None + + while end != 0: + end = data.find(b'\n', start) + 1 + + line = data[start:end] if end else data[start:] + line = line.rstrip() + + if (line.startswith(b'-----BEGIN ') and + line.endswith(b' ' + keytype + b'-----')): + pem_name = line[11:-(6+len(keytype))].strip() + data, end = _match_block(data, end, line, 'PEM') + headers, data = _parse_pem(data) + return 'pem', (pem_name, headers, data), end + elif public: + if line == b'---- BEGIN SSH2 PUBLIC KEY ----': + data, end = _match_block(data, end, line, 'RFC 4716') + return 'rfc4716', _parse_rfc4716(data), end + else: + try: + cert = _parse_openssh(line) + except KeyImportError: + pass + else: + return 'openssh', cert, (end if end else len(data)) + + start = end + + return None, (), len(data) + + +def _decode_pkcs1_private( + pem_name: bytes, key_data: object, + unsafe_skip_rsa_key_validation: Optional[bool]) -> SSHKey: + """Decode a PKCS#1 format private key""" + + handler = _pem_map.get(pem_name) + if handler is None: + raise KeyImportError('Unknown PEM key type: %s' % + pem_name.decode('ascii')) + + key_params = handler.decode_pkcs1_private(key_data) + if key_params is None: + raise KeyImportError('Invalid %s private key' % + pem_name.decode('ascii')) + + if pem_name == b'RSA': + key_params = cast(Tuple, key_params) + \ + (unsafe_skip_rsa_key_validation,) + + return handler.make_private(key_params) + + +def _decode_pkcs1_public(pem_name: bytes, key_data: object) -> SSHKey: + """Decode a PKCS#1 format public key""" + + handler = _pem_map.get(pem_name) + if handler is None: + raise KeyImportError('Unknown PEM key type: %s' % + pem_name.decode('ascii')) + + key_params = handler.decode_pkcs1_public(key_data) + if key_params is None: + raise KeyImportError('Invalid %s public key' % + pem_name.decode('ascii')) + + return handler.make_public(key_params) + + +def _decode_pkcs8_private( + key_data: object, + unsafe_skip_rsa_key_validation: Optional[bool]) -> SSHKey: + """Decode a PKCS#8 format private key""" + + if (isinstance(key_data, tuple) and len(key_data) >= 3 and + key_data[0] in (0, 1) and isinstance(key_data[1], tuple) and + 1 <= len(key_data[1]) <= 2 and isinstance(key_data[2], bytes)): + if len(key_data[1]) == 2: + alg, alg_params = key_data[1] + else: + alg, alg_params = key_data[1][0], OMIT + + handler = _pkcs8_oid_map.get(alg) + if handler is None: + raise KeyImportError('Unknown PKCS#8 algorithm') + + key_params = handler.decode_pkcs8_private(alg_params, key_data[2]) + if key_params is None: + raise KeyImportError('Invalid %s private key' % + handler.pem_name.decode('ascii') + if handler.pem_name else 'PKCS#8') + + if alg == ObjectIdentifier('1.2.840.113549.1.1.1'): + key_params = cast(Tuple, key_params) + \ + (unsafe_skip_rsa_key_validation,) + + return handler.make_private(key_params) + else: + raise KeyImportError('Invalid PKCS#8 private key') + + +def _decode_pkcs8_public(key_data: object) -> SSHKey: + """Decode a PKCS#8 format public key""" + + if (isinstance(key_data, tuple) and len(key_data) == 2 and + isinstance(key_data[0], tuple) and 1 <= len(key_data[0]) <= 2 and + isinstance(key_data[1], BitString) and key_data[1].unused == 0): + if len(key_data[0]) == 2: + alg, alg_params = key_data[0] + else: + alg, alg_params = key_data[0][0], OMIT + + handler = _pkcs8_oid_map.get(alg) + if handler is None: + raise KeyImportError('Unknown PKCS#8 algorithm') + + key_params = handler.decode_pkcs8_public(alg_params, key_data[1].value) + if key_params is None: + raise KeyImportError('Invalid %s public key' % + handler.pem_name.decode('ascii') + if handler.pem_name else 'PKCS#8') + + return handler.make_public(key_params) + else: + raise KeyImportError('Invalid PKCS#8 public key') + + +def _decode_openssh_private( + data: bytes, passphrase: Optional[BytesOrStr], + unsafe_skip_rsa_key_validation: Optional[bool]) -> SSHKey: + """Decode an OpenSSH format private key""" + + try: + if not data.startswith(_OPENSSH_KEY_V1): + raise KeyImportError('Unrecognized OpenSSH private key type') + + data = data[len(_OPENSSH_KEY_V1):] + packet = SSHPacket(data) + + cipher_name = packet.get_string() + kdf = packet.get_string() + kdf_data = packet.get_string() + nkeys = packet.get_uint32() + _ = packet.get_string() # public_key + key_data = packet.get_string() + mac = packet.get_remaining_payload() + + if nkeys != 1: + raise KeyImportError('Invalid OpenSSH private key') + + if cipher_name != b'none': + if passphrase is None: + raise KeyImportError('Passphrase must be specified to import ' + 'encrypted private keys') + + try: + key_size, iv_size, block_size, _, _, _ = \ + get_encryption_params(cipher_name) + except KeyError: + raise KeyEncryptionError('Unknown cipher: %s' % + cipher_name.decode('ascii')) from None + + if kdf != b'bcrypt': + raise KeyEncryptionError('Unknown kdf: %s' % + kdf.decode('ascii')) + + if not _bcrypt_available: # pragma: no cover + raise KeyEncryptionError('OpenSSH private key encryption ' + 'requires bcrypt with KDF support') + + packet = SSHPacket(kdf_data) + salt = packet.get_string() + rounds = packet.get_uint32() + packet.check_end() + + if isinstance(passphrase, str): + passphrase = passphrase.encode('utf-8') + + try: + bcrypt_key = bcrypt.kdf(passphrase, salt, key_size + iv_size, + rounds, ignore_few_rounds=True) + except ValueError: + raise KeyEncryptionError('Invalid OpenSSH ' + 'private key') from None + + cipher = get_encryption(cipher_name, bcrypt_key[:key_size], + bcrypt_key[key_size:]) + + decrypted_key = cipher.decrypt_packet(0, b'', key_data, 0, mac) + + if decrypted_key is None: + raise KeyEncryptionError('Incorrect passphrase') + + key_data = decrypted_key + block_size = max(block_size, 8) + else: + block_size = 8 + + packet = SSHPacket(key_data) + + check1 = packet.get_uint32() + check2 = packet.get_uint32() + if check1 != check2: + if cipher_name != b'none': + raise KeyEncryptionError('Incorrect passphrase') from None + else: + raise KeyImportError('Invalid OpenSSH private key') + + alg = packet.get_string() + handler = _public_key_alg_map.get(alg) + if not handler: + raise KeyImportError('Unknown OpenSSH private key algorithm') + + key_params = handler.decode_ssh_private(packet) + comment = packet.get_string() + pad = packet.get_remaining_payload() + + if len(pad) >= block_size or pad != bytes(range(1, len(pad) + 1)): + raise KeyImportError('Invalid OpenSSH private key') + + if alg == b'ssh-rsa': + key_params = cast(Tuple, key_params) + \ + (unsafe_skip_rsa_key_validation,) + + key = handler.make_private(key_params) + key.set_comment(comment) + return key + except PacketDecodeError: + raise KeyImportError('Invalid OpenSSH private key') from None + + +def _decode_openssh_public(data: bytes) -> SSHKey: + """Decode public key within OpenSSH format private key""" + + try: + if not data.startswith(_OPENSSH_KEY_V1): + raise KeyImportError('Unrecognized OpenSSH private key type') + + data = data[len(_OPENSSH_KEY_V1):] + packet = SSHPacket(data) + + _ = packet.get_string() # cipher_name + _ = packet.get_string() # kdf + _ = packet.get_string() # kdf_data + nkeys = packet.get_uint32() + pubkey = packet.get_string() + + if nkeys != 1: + raise KeyImportError('Invalid OpenSSH private key') + + return decode_ssh_public_key(pubkey) + except PacketDecodeError: + raise KeyImportError('Invalid OpenSSH private key') from None + + +def _decode_der_private( + key_data: object, passphrase: Optional[BytesOrStr], + unsafe_skip_rsa_key_validation: Optional[bool]) -> SSHKey: + """Decode a DER format private key""" + + # First, if there's a passphrase, try to decrypt PKCS#8 + if passphrase is not None: + try: + key_data = pkcs8_decrypt(key_data, passphrase) + except KeyEncryptionError: + # Decryption failed - try decoding it as unencrypted + pass + + # Then, try to decode PKCS#8 + try: + return _decode_pkcs8_private(key_data, unsafe_skip_rsa_key_validation) + except KeyImportError: + # PKCS#8 failed - try PKCS#1 instead + pass + + # If that fails, try each of the possible PKCS#1 encodings + for pem_name in _pem_map: + try: + return _decode_pkcs1_private(pem_name, key_data, + unsafe_skip_rsa_key_validation) + except KeyImportError: + # Try the next PKCS#1 encoding + pass + + raise KeyImportError('Invalid DER private key') + + +def _decode_der_public(key_data: object) -> SSHKey: + """Decode a DER format public key""" + + # First, try to decode PKCS#8 + try: + return _decode_pkcs8_public(key_data) + except KeyImportError: + # PKCS#8 failed - try PKCS#1 instead + pass + + # If that fails, try each of the possible PKCS#1 encodings + for pem_name in _pem_map: + try: + return _decode_pkcs1_public(pem_name, key_data) + except KeyImportError: + # Try the next PKCS#1 encoding + pass + + raise KeyImportError('Invalid DER public key') + + +def _decode_der_certificate(data: bytes, + comment: _Comment = None) -> SSHCertificate: + """Decode a DER format X.509 certificate""" + + return SSHX509Certificate.construct_from_der(data, comment) + + +def _decode_pem_private( + pem_name: bytes, headers: Mapping[bytes, bytes], + data: bytes, passphrase: Optional[BytesOrStr], + unsafe_skip_rsa_key_validation: Optional[bool]) -> SSHKey: + """Decode a PEM format private key""" + + if pem_name == b'OPENSSH': + return _decode_openssh_private(data, passphrase, + unsafe_skip_rsa_key_validation) + + if headers.get(b'Proc-Type') == b'4,ENCRYPTED': + if passphrase is None: + raise KeyImportError('Passphrase must be specified to import ' + 'encrypted private keys') + + dek_info = headers.get(b'DEK-Info', b'').split(b',') + if len(dek_info) != 2: + raise KeyImportError('Invalid PEM encryption params') + + alg, iv = dek_info + try: + iv = binascii.a2b_hex(iv) + except binascii.Error: + raise KeyImportError('Invalid PEM encryption params') from None + + try: + data = pkcs1_decrypt(data, alg, iv, passphrase) + except KeyEncryptionError: + raise KeyImportError('Unable to decrypt PKCS#1 ' + 'private key') from None + + try: + key_data = der_decode(data) + except ASN1DecodeError: + raise KeyImportError('Invalid PEM private key') from None + + if pem_name == b'ENCRYPTED': + if passphrase is None: + raise KeyImportError('Passphrase must be specified to import ' + 'encrypted private keys') + + pem_name = b'' + + try: + key_data = pkcs8_decrypt(key_data, passphrase) + except KeyEncryptionError: + raise KeyImportError('Unable to decrypt PKCS#8 ' + 'private key') from None + + if pem_name: + return _decode_pkcs1_private(pem_name, key_data, + unsafe_skip_rsa_key_validation) + else: + return _decode_pkcs8_private(key_data, unsafe_skip_rsa_key_validation) + + +def _decode_pem_public(pem_name: bytes, data: bytes) -> SSHKey: + """Decode a PEM format public key""" + + try: + key_data = der_decode(data) + except ASN1DecodeError: + raise KeyImportError('Invalid PEM public key') from None + + if pem_name: + return _decode_pkcs1_public(pem_name, key_data) + else: + return _decode_pkcs8_public(key_data) + + +def _decode_pem_certificate(pem_name: bytes, data: bytes) -> SSHCertificate: + """Decode a PEM format X.509 certificate""" + + if pem_name == b'TRUSTED': + # Strip off OpenSSL trust information + try: + _, end = der_decode_partial(data) + data = data[:end] + except ASN1DecodeError: + raise KeyImportError('Invalid PEM trusted certificate') from None + elif pem_name: + raise KeyImportError('Invalid PEM certificate') + + return SSHX509Certificate.construct_from_der(data) + + +def _decode_private( + data: bytes, passphrase: Optional[BytesOrStr], + unsafe_skip_rsa_key_validation: Optional[bool]) -> \ + Tuple[Optional[SSHKey], Optional[int]]: + """Decode a private key""" + + fmt, key_info, end = _match_next(data, b'PRIVATE KEY') + + key: Optional[SSHKey] + + if fmt == 'der': + key = _decode_der_private(key_info[0], passphrase, + unsafe_skip_rsa_key_validation) + elif fmt == 'pem': + pem_name, headers, data = key_info + key = _decode_pem_private(pem_name, headers, data, passphrase, + unsafe_skip_rsa_key_validation) + else: + key = None + + return key, end + + +def _decode_public(data: bytes) -> Tuple[Optional[SSHKey], Optional[int]]: + """Decode a public key""" + + fmt, key_info, end = _match_next(data, b'PUBLIC KEY', public=True) + + key: Optional[SSHKey] + + if fmt == 'der': + key = _decode_der_public(key_info[0]) + elif fmt == 'pem': + pem_name, _, data = key_info + key = _decode_pem_public(pem_name, data) + elif fmt == 'openssh': + algorithm, comment, data = key_info + key = decode_ssh_public_key(data) + + if algorithm != key.algorithm: + raise KeyImportError('Public key algorithm mismatch') + + key.set_comment(comment) + elif fmt == 'rfc4716': + comment, data = key_info + key = decode_ssh_public_key(data) + key.set_comment(comment) + else: + fmt, key_info, end = _match_next(data, b'PRIVATE KEY') + + if fmt == 'pem' and key_info[0] == b'OPENSSH': + key = _decode_openssh_public(key_info[2]) + else: + key, _ = _decode_private(data, None, False) + + if key: + key = key.convert_to_public() + + return key, end + + +def _decode_certificate(data: bytes) -> \ + Tuple[Optional[SSHCertificate], Optional[int]]: + """Decode a certificate""" + + fmt, key_info, end = _match_next(data, b'CERTIFICATE', public=True) + + cert: Optional[SSHCertificate] + + if fmt == 'der': + cert = _decode_der_certificate(data[:end]) + elif fmt == 'pem': + pem_name, _, data = key_info + cert = _decode_pem_certificate(pem_name, data) + elif fmt == 'openssh': + algorithm, comment, data = key_info + + if algorithm.startswith(b'x509v3-'): + cert = _decode_der_certificate(data, comment) + else: + cert = decode_ssh_certificate(data, comment) + elif fmt == 'rfc4716': + comment, data = key_info + cert = decode_ssh_certificate(data, comment) + else: + cert = None + + return cert, end + + +def _decode_private_list( + data: bytes, passphrase: Optional[BytesOrStr], + unsafe_skip_rsa_key_validation: Optional[bool]) -> Sequence[SSHKey]: + """Decode a private key list""" + + keys: List[SSHKey] = [] + + while data: + key, end = _decode_private(data, passphrase, + unsafe_skip_rsa_key_validation) + + if key: + keys.append(key) + + data = data[end:] + + return keys + + +def _decode_public_list(data: bytes) -> Sequence[SSHKey]: + """Decode a public key list""" + + keys: List[SSHKey] = [] + + while data: + key, end = _decode_public(data) + + if key: + keys.append(key) + + data = data[end:] + + return keys + + +def _decode_certificate_list(data: bytes) -> Sequence[SSHCertificate]: + """Decode a certificate list""" + + certs: List[SSHCertificate] = [] + + while data: + cert, end = _decode_certificate(data) + + if cert: + certs.append(cert) + + data = data[end:] + + return certs + + +def register_sk_alg(sk_alg: int, handler: Type[SSHKey], *args: object) -> None: + """Register a new security key algorithm""" + + _sk_alg_map[sk_alg] = handler, args + + +def register_public_key_alg(algorithm: bytes, handler: Type[SSHKey], + default: bool, + sig_algorithms: Optional[Sequence[bytes]] = \ + None) -> None: + """Register a new public key algorithm""" + + if not sig_algorithms: + sig_algorithms = handler.sig_algorithms + + _public_key_algs.extend(sig_algorithms) + + if default: + _default_public_key_algs.extend(sig_algorithms) + + _public_key_alg_map[algorithm] = handler + + if handler.pem_name: + _pem_map[handler.pem_name] = handler + + if handler.pkcs8_oid: # pragma: no branch + _pkcs8_oid_map[handler.pkcs8_oid] = handler + + +def register_certificate_alg(version: int, algorithm: bytes, + cert_algorithm: bytes, + key_handler: Type[SSHKey], + cert_handler: Type[SSHOpenSSHCertificate], + default: bool) -> None: + """Register a new certificate algorithm""" + + _certificate_algs.append(cert_algorithm) + + if default: + _default_certificate_algs.append(cert_algorithm) + + _certificate_alg_map[cert_algorithm] = (key_handler, cert_handler) + + _certificate_sig_alg_map[cert_algorithm] = algorithm + + _certificate_version_map[algorithm, version] = \ + (cert_algorithm, cert_handler) + + +def register_x509_certificate_alg(cert_algorithm: bytes, default: bool) -> None: + """Register a new X.509 certificate algorithm""" + + if _x509_available: # pragma: no branch + _x509_certificate_algs.append(cert_algorithm) + + if default: + _default_x509_certificate_algs.append(cert_algorithm) + + _certificate_alg_map[cert_algorithm] = (None, SSHX509CertificateChain) + + +def get_public_key_algs() -> List[bytes]: + """Return supported public key algorithms""" + + return _public_key_algs + + +def get_default_public_key_algs() -> List[bytes]: + """Return default public key algorithms""" + + return _default_public_key_algs + + +def get_certificate_algs() -> List[bytes]: + """Return supported certificate-based public key algorithms""" + + return _certificate_algs + + +def get_default_certificate_algs() -> List[bytes]: + """Return default certificate-based public key algorithms""" + + return _default_certificate_algs + + +def get_x509_certificate_algs() -> List[bytes]: + """Return supported X.509 certificate-based public key algorithms""" + + return _x509_certificate_algs + + +def get_default_x509_certificate_algs() -> List[bytes]: + """Return default X.509 certificate-based public key algorithms""" + + return _default_x509_certificate_algs + + +def decode_ssh_public_key(data: bytes) -> SSHKey: + """Decode a packetized SSH public key""" + + try: + packet = SSHPacket(data) + alg = packet.get_string() + handler = _public_key_alg_map.get(alg) + + if handler: + key_params = handler.decode_ssh_public(packet) + packet.check_end() + + key = handler.make_public(key_params) + key.algorithm = alg + return key + else: + raise KeyImportError('Unknown key algorithm: %s' % + alg.decode('ascii', errors='replace')) + except PacketDecodeError: + raise KeyImportError('Invalid public key') from None + + +def decode_ssh_certificate(data: bytes, + comment: _Comment = None) -> SSHCertificate: + """Decode a packetized SSH certificate""" + + try: + packet = SSHPacket(data) + alg = packet.get_string() + key_handler, cert_handler = _certificate_alg_map.get(alg, (None, None)) + + if cert_handler: + return cert_handler.construct(packet, alg, key_handler, comment) + else: + raise KeyImportError('Unknown certificate algorithm: %s' % + alg.decode('ascii', errors='replace')) + except (PacketDecodeError, ValueError): + raise KeyImportError('Invalid OpenSSH certificate') from None + + +def generate_private_key(alg_name: str, comment: _Comment = None, + **kwargs) -> SSHKey: + """Generate a new private key + + This function generates a new private key of a type matching + the requested SSH algorithm. Depending on the algorithm, additional + parameters can be passed which affect the generated key. + + Available algorithms include: + + ssh-dss, ssh-rsa, ecdsa-sha2-nistp256, ecdsa-sha2-nistp384, + ecdsa-sha2-nistp521, ecdsa-sha2-1.3.132.0.10, ssh-ed25519, + ssh-ed448, sk-ecdsa-sha2-nistp256\\@openssh.com, + sk-ssh-ed25519\\@openssh.com + + For dss keys, no parameters are supported. The key size is fixed at + 1024 bits due to the use of SHA1 signatures. + + For rsa keys, the key size can be specified using the `key_size` + parameter, and the RSA public exponent can be changed using the + `exponent` parameter. By default, generated keys are 2048 bits + with a public exponent of 65537. + + For ecdsa keys, the curve to use is part of the SSH algorithm name + and that determines the key size. No other parameters are supported. + + For ed25519 and ed448 keys, no parameters are supported. The key size + is fixed by the algorithms at 256 bits and 448 bits, respectively. + + For sk keys, the application name to associate with the generated + key can be specified using the `application` parameter. It defaults + to `'ssh:'`. The user name to associate with the generated key can + be specified using the `user` parameter. It defaults to `'AsyncSSH'`. + + When generating an sk key, a PIN can be provided via the `pin` + parameter if the security key requires it. + + The `resident` parameter can be set to `True` to request that a + resident key be created on the security key. This allows the key + handle and public key information to later be retrieved so that + the generated key can be used without having to store any + information on the client system. It defaults to `False`. + + You can enable or disable the security key touch requirement by + setting the `touch_required` parameter. It defaults to `True`, + requiring that the user confirm their presence by touching the + security key each time they use it to authenticate. + + :param alg_name: + The SSH algorithm name corresponding to the desired type of key. + :param comment: (optional) + A comment to associate with this key. + :param key_size: (optional) + The key size in bits for RSA keys. + :param exponent: (optional) + The public exponent for RSA keys. + :param application: (optional) + The application name to associate with the generated SK key, + defaulting to `'ssh:'`. + :param user: (optional) + The user name to associate with the generated SK key, defaulting + to `'AsyncSSH'`. + :param pin: (optional) + The PIN to use to access the security key, defaulting to `None`. + :param resident: (optional) + Whether or not to create a resident key on the security key, + defaulting to `False`. + :param touch_required: (optional) + Whether or not to require the user to touch the security key + when authenticating with it, defaulting to `True`. + :type alg_name: `str` + :type comment: `str`, `bytes`, or `None` + :type key_size: `int` + :type exponent: `int` + :type application: `str` + :type user: `str` + :type pin: `str` + :type resident: `bool` + :type touch_required: `bool` + + :returns: An :class:`SSHKey` private key + + :raises: :exc:`KeyGenerationError` if the requested key parameters + are unsupported + """ + + algorithm = alg_name.encode('utf-8') + handler = _public_key_alg_map.get(algorithm) + + if handler: + try: + key = handler.generate(algorithm, **kwargs) + except (TypeError, ValueError) as exc: + raise KeyGenerationError(str(exc)) from None + else: + raise KeyGenerationError('Unknown algorithm: %s' % alg_name) + + key.set_comment(comment) + return key + +def import_private_key( + data: BytesOrStr, passphrase: Optional[BytesOrStr] = None, + unsafe_skip_rsa_key_validation: Optional[bool] = None) -> SSHKey: + """Import a private key + + This function imports a private key encoded in PKCS#1 or PKCS#8 DER + or PEM format or OpenSSH format. Encrypted private keys can be + imported by specifying the passphrase needed to decrypt them. + + :param data: + The data to import. + :param passphrase: (optional) + The passphrase to use to decrypt the key. + :param unsafe_skip_rsa_key_validation: (optional) + Whether or not to skip key validation when loading RSA private + keys, defaulting to performing these checks unless changed by + calling :func:`set_default_skip_rsa_key_validation`. + :type data: `bytes` or ASCII `str` + :type passphrase: `str` or `bytes` + :type unsafe_skip_rsa_key_validation: bool + + :returns: An :class:`SSHKey` private key + + """ + + if isinstance(data, str): + try: + data = data.encode('ascii') + except UnicodeEncodeError: + raise KeyImportError('Invalid encoding for key') from None + + key, _ = _decode_private(data, passphrase, unsafe_skip_rsa_key_validation) + + if key: + return key + else: + raise KeyImportError('Invalid private key') + + +def import_private_key_and_certs( + data: bytes, passphrase: Optional[BytesOrStr] = None, + unsafe_skip_rsa_key_validation: Optional[bool] = None) -> \ + Tuple[SSHKey, Optional[SSHX509CertificateChain]]: + """Import a private key and optional certificate chain""" + + key, end = _decode_private(data, passphrase, + unsafe_skip_rsa_key_validation) + + if key: + return key, import_certificate_chain(data[end:]) + else: + raise KeyImportError('Invalid private key') + + +def import_public_key(data: BytesOrStr) -> SSHKey: + """Import a public key + + This function imports a public key encoded in OpenSSH, RFC4716, or + PKCS#1 or PKCS#8 DER or PEM format. + + :param data: + The data to import. + :type data: `bytes` or ASCII `str` + + :returns: An :class:`SSHKey` public key + + """ + + if isinstance(data, str): + try: + data = data.encode('ascii') + except UnicodeEncodeError: + raise KeyImportError('Invalid encoding for key') from None + + key, _ = _decode_public(data) + + if key: + return key + else: + raise KeyImportError('Invalid public key') + + +def import_certificate(data: BytesOrStr) -> SSHCertificate: + """Import a certificate + + This function imports an SSH certificate in DER, PEM, OpenSSH, or + RFC4716 format. + + :param data: + The data to import. + :type data: `bytes` or ASCII `str` + + :returns: An :class:`SSHCertificate` object + + """ + + if isinstance(data, str): + try: + data = data.encode('ascii') + except UnicodeEncodeError: + raise KeyImportError('Invalid encoding for key') from None + + cert, _ = _decode_certificate(data) + + if cert: + return cert + else: + raise KeyImportError('Invalid certificate') + + +def import_certificate_chain(data: bytes) -> Optional[SSHX509CertificateChain]: + """Import an X.509 certificate chain""" + + certs = _decode_certificate_list(data) + + chain: Optional[SSHX509CertificateChain] + + if certs: + chain = SSHX509CertificateChain.construct_from_certs(certs) + else: + chain = None + + return chain + + +def import_certificate_subject(data: str) -> str: + """Import an X.509 certificate subject name""" + + try: + algorithm, data = data.strip().split(None, 1) + except ValueError: + raise KeyImportError('Missing certificate subject algorithm') from None + + if algorithm.startswith('x509v3-'): + match = _subject_pattern.match(data) + + if match: + return data[match.end():] + + raise KeyImportError('Invalid certificate subject') + + +def read_private_key( + filename: FilePath, passphrase: Optional[BytesOrStr] = None, + unsafe_skip_rsa_key_validation: Optional[bool] = None) -> SSHKey: + """Read a private key from a file + + This function reads a private key from a file. See the function + :func:`import_private_key` for information about the formats + supported. + + :param filename: + The file to read the key from. + :param passphrase: (optional) + The passphrase to use to decrypt the key. + :param unsafe_skip_rsa_key_validation: (optional) + Whether or not to skip key validation when loading RSA private + keys, defaulting to performing these checks unless changed by + calling :func:`set_default_skip_rsa_key_validation`. + :type filename: :class:`PurePath ` or `str` + :type passphrase: `str` or `bytes` + :type unsafe_skip_rsa_key_validation: bool + + :returns: An :class:`SSHKey` private key + + """ + + key = import_private_key(read_file(filename), passphrase, + unsafe_skip_rsa_key_validation) + + key.set_filename(filename) + + return key + + +def read_private_key_and_certs( + filename: FilePath, passphrase: Optional[BytesOrStr] = None, + unsafe_skip_rsa_key_validation: Optional[bool] = None) -> \ + Tuple[SSHKey, Optional[SSHX509CertificateChain]]: + """Read a private key and optional certificate chain from a file""" + + key, cert = import_private_key_and_certs(read_file(filename), passphrase, + unsafe_skip_rsa_key_validation) + + key.set_filename(filename) + + return key, cert + + +def read_public_key(filename: FilePath) -> SSHKey: + """Read a public key from a file + + This function reads a public key from a file. See the function + :func:`import_public_key` for information about the formats + supported. + + :param filename: + The file to read the key from. + :type filename: :class:`PurePath ` or `str` + + :returns: An :class:`SSHKey` public key + + """ + + key = import_public_key(read_file(filename)) + + key.set_filename(filename) + + return key + + +def read_certificate(filename: FilePath) -> SSHCertificate: + """Read a certificate from a file + + This function reads an SSH certificate from a file. See the + function :func:`import_certificate` for information about the + formats supported. + + :param filename: + The file to read the certificate from. + :type filename: :class:`PurePath ` or `str` + + :returns: An :class:`SSHCertificate` object + + """ + + return import_certificate(read_file(filename)) + + +def read_private_key_list( + filename: FilePath, passphrase: Optional[BytesOrStr] = None, + unsafe_skip_rsa_key_validation: Optional[bool] = None) -> \ + Sequence[SSHKey]: + """Read a list of private keys from a file + + This function reads a list of private keys from a file. See the + function :func:`import_private_key` for information about the + formats supported. If any of the keys are encrypted, they must + all be encrypted with the same passphrase. + + :param filename: + The file to read the keys from. + :param passphrase: (optional) + The passphrase to use to decrypt the keys. + :param unsafe_skip_rsa_key_validation: (optional) + Whether or not to skip key validation when loading RSA private + keys, defaulting to performing these checks unless changed by + calling :func:`set_default_skip_rsa_key_validation`. + :type filename: :class:`PurePath ` or `str` + :type passphrase: `str` or `bytes` + :type unsafe_skip_rsa_key_validation: bool + + :returns: A list of :class:`SSHKey` private keys + + """ + + keys = _decode_private_list(read_file(filename), passphrase, + unsafe_skip_rsa_key_validation) + + for key in keys: + key.set_filename(filename) + + return keys + + +def read_public_key_list(filename: FilePath) -> Sequence[SSHKey]: + """Read a list of public keys from a file + + This function reads a list of public keys from a file. See the + function :func:`import_public_key` for information about the + formats supported. + + :param filename: + The file to read the keys from. + :type filename: :class:`PurePath ` or `str` + + :returns: A list of :class:`SSHKey` public keys + + """ + + keys = _decode_public_list(read_file(filename)) + + for key in keys: + key.set_filename(filename) + + return keys + + +def read_certificate_list(filename: FilePath) -> Sequence[SSHCertificate]: + """Read a list of certificates from a file + + This function reads a list of SSH certificates from a file. See + the function :func:`import_certificate` for information about + the formats supported. + + :param filename: + The file to read the certificates from. + :type filename: :class:`PurePath ` or `str` + + :returns: A list of :class:`SSHCertificate` certificates + + """ + + return _decode_certificate_list(read_file(filename)) + + +def load_keypairs( + keylist: KeyPairListArg, passphrase: Optional[BytesOrStr] = None, + certlist: CertListArg = (), skip_public: bool = False, + ignore_encrypted: bool = False, + unsafe_skip_rsa_key_validation: Optional[bool] = None) -> \ + Sequence[SSHKeyPair]: + """Load SSH private keys and optional matching certificates + + This function loads a list of SSH keys and optional matching + certificates. + + When certificates are specified, the private key is added to + the list both with and without the certificate. + + :param keylist: + The list of private keys and certificates to load. + :param passphrase: (optional) + The passphrase to use to decrypt private keys. + :param certlist: (optional) + A list of certificates to attempt to pair with the provided + list of private keys. + :param skip_public: (optional) + An internal parameter used to skip public keys and certificates + when IdentitiesOnly and IdentityFile are used to specify a + mixture of private and public keys. + :param unsafe_skip_rsa_key_validation: (optional) + Whether or not to skip key validation when loading RSA private + keys, defaulting to performing these checks unless changed by + calling :func:`set_default_skip_rsa_key_validation`. + :type keylist: *see* :ref:`SpecifyingPrivateKeys` + :type passphrase: `str` or `bytes` + :type certlist: *see* :ref:`SpecifyingCertificates` + :type skip_public: `bool` + :type unsafe_skip_rsa_key_validation: bool + + :returns: A list of :class:`SSHKeyPair` objects + + """ + + keys_to_load: Sequence[_KeyPairArg] + result: List[SSHKeyPair] = [] + + certlist = load_certificates(certlist) + certdict = {cert.key.public_data: cert for cert in certlist} + + if isinstance(keylist, (PurePath, str)): + try: + priv_keys = read_private_key_list(keylist, passphrase, + unsafe_skip_rsa_key_validation) + keys_to_load = [keylist] if len(priv_keys) <= 1 else priv_keys + except KeyImportError: + keys_to_load = [keylist] + elif isinstance(keylist, (tuple, bytes, SSHKey, SSHKeyPair)): + keys_to_load = [cast(_KeyPairArg, keylist)] + else: + keys_to_load = keylist if keylist else [] + + for key_to_load in keys_to_load: + allow_certs = False + key_prefix = None + saved_exc = None + pubkey_or_certs = None + pubkey_to_load: Optional[_KeyArg] = None + certs_to_load: Optional[_CertArg] = None + key: Union['SSHKey', 'SSHKeyPair'] + + if isinstance(key_to_load, (PurePath, str, bytes)): + allow_certs = True + elif isinstance(key_to_load, tuple): + key_to_load, pubkey_or_certs = key_to_load + + try: + if isinstance(key_to_load, (PurePath, str)): + key_prefix = str(key_to_load) + + if allow_certs: + key, certs_to_load = read_private_key_and_certs( + key_to_load, passphrase, + unsafe_skip_rsa_key_validation) + + if not certs_to_load: + certs_to_load = key_prefix + '-cert.pub' + else: + key = read_private_key(key_to_load, passphrase, + unsafe_skip_rsa_key_validation) + + pubkey_to_load = key_prefix + '.pub' + elif isinstance(key_to_load, bytes): + if allow_certs: + key, certs_to_load = import_private_key_and_certs( + key_to_load, passphrase, + unsafe_skip_rsa_key_validation) + else: + key = import_private_key(key_to_load, passphrase, + unsafe_skip_rsa_key_validation) + else: + key = key_to_load + except KeyImportError as exc: + if skip_public or \ + (ignore_encrypted and str(exc).startswith('Passphrase')): + continue + + raise + + certs: Optional[Sequence[SSHCertificate]] + + if pubkey_or_certs: + try: + certs = load_certificates(pubkey_or_certs) + except (TypeError, OSError, KeyImportError) as exc: + saved_exc = exc + certs = None + + if not certs: + pubkey_to_load = cast(_KeyArg, pubkey_or_certs) + elif certs_to_load: + try: + certs = load_certificates(certs_to_load) + except (OSError, KeyImportError): + certs = None + else: + certs = None + + pubkey: Optional[SSHKey] + + if pubkey_to_load: + try: + if isinstance(pubkey_to_load, (PurePath, str)): + pubkey = read_public_key(pubkey_to_load) + elif isinstance(pubkey_to_load, bytes): + pubkey = import_public_key(pubkey_to_load) + else: + pubkey = pubkey_to_load + except (OSError, KeyImportError): + pubkey = None + else: + saved_exc = None + else: + pubkey = None + + if saved_exc: + raise saved_exc # pylint: disable=raising-bad-type + + if not certs: + if isinstance(key, SSHKeyPair): + pubdata = key.key_public_data + else: + pubdata = key.public_data + + cert = certdict.get(pubdata) + + if cert and cert.is_x509: + cert = SSHX509CertificateChain.construct_from_certs(certlist) + elif len(certs) == 1 and not certs[0].is_x509: + cert = certs[0] + else: + cert = SSHX509CertificateChain.construct_from_certs(certs) + + if isinstance(key, SSHKeyPair): + if cert: + key.set_certificate(cert) + + result.append(key) + else: + if cert: + result.append(SSHLocalKeyPair(key, pubkey, cert)) + + result.append(SSHLocalKeyPair(key, pubkey)) + + return result + + +def load_default_keypairs(passphrase: Optional[BytesOrStr] = None, + certlist: CertListArg = ()) -> \ + Sequence[SSHKeyPair]: + """Return a list of default keys from the user's home directory""" + + result: List[SSHKeyPair] = [] + + for file, condition in _DEFAULT_KEY_FILES: + if condition: # pragma: no branch + try: + path = Path('~', '.ssh', file).expanduser() + result.extend(load_keypairs(path, passphrase, certlist, + ignore_encrypted=True)) + except OSError: + pass + + return result + + +def load_public_keys(keylist: KeyListArg) -> Sequence[SSHKey]: + """Load public keys + + This function loads a list of SSH public keys. + + :param keylist: + The list of public keys to load. + :type keylist: *see* :ref:`SpecifyingPublicKeys` + + :returns: A list of :class:`SSHKey` objects + + """ + + if isinstance(keylist, (PurePath, str)): + return read_public_key_list(keylist) + else: + result: List[SSHKey] = [] + + for key in keylist: + if isinstance(key, (PurePath, str)): + key = read_public_key(key) + elif isinstance(key, bytes): + key = import_public_key(key) + + result.append(key) + + return result + + +def load_default_host_public_keys() -> Sequence[Union[SSHKey, SSHCertificate]]: + """Return a list of default host public keys or certificates""" + + result: List[Union[SSHKey, SSHCertificate]] = [] + + for host_key_dir in _DEFAULT_HOST_KEY_DIRS: + for file in _DEFAULT_HOST_KEY_FILES: + try: + cert = read_certificate(Path(host_key_dir, file + '-cert.pub')) + except (OSError, KeyImportError): + pass + else: + result.append(cert) + + for host_key_dir in _DEFAULT_HOST_KEY_DIRS: + for file in _DEFAULT_HOST_KEY_FILES: + try: + key = read_public_key(Path(host_key_dir, file + '.pub')) + except (OSError, KeyImportError): + pass + else: + result.append(key) + + return result + + +def load_certificates(certlist: CertListArg) -> Sequence[SSHCertificate]: + """Load certificates + + This function loads a list of OpenSSH or X.509 certificates. + + :param certlist: + The list of certificates to load. + :type certlist: *see* :ref:`SpecifyingCertificates` + + :returns: A list of :class:`SSHCertificate` objects + + """ + + if isinstance(certlist, SSHCertificate): + return [certlist] + elif isinstance(certlist, (PurePath, str, bytes)): + certlist = [certlist] + + result: List[SSHCertificate] = [] + + for cert in certlist: + if isinstance(cert, (PurePath, str)): + certs = read_certificate_list(cert) + elif isinstance(cert, bytes): + certs = _decode_certificate_list(cert) + elif isinstance(cert, SSHCertificate): + certs = [cert] + else: + certs = cert + + result.extend(certs) + + return result + + +def load_identities(keylist: IdentityListArg, + skip_private: bool = False) -> Sequence[bytes]: + """Load public key and certificate identities""" + + if isinstance(keylist, (bytes, str, PurePath, SSHKey, SSHCertificate)): + identities: Sequence[_IdentityArg] = [keylist] + else: + identities = keylist + + result = [] + + for identity in identities: + if isinstance(identity, (PurePath, str)): + try: + pubdata = read_certificate(identity).public_data + except KeyImportError: + try: + pubdata = read_public_key(identity).public_data + except KeyImportError: + if skip_private: + continue + + raise + elif isinstance(identity, (SSHKey, SSHCertificate)): + pubdata = identity.public_data + else: + pubdata = identity + + result.append(pubdata) + + return result + + +def load_default_identities() -> Sequence[bytes]: + """Return a list of default public key and certificate identities""" + + result: List[bytes] = [] + + for file, condition in _DEFAULT_KEY_FILES: + if condition: # pragma: no branch + try: + cert = read_certificate(Path('~', '.ssh', file + '-cert.pub')) + except (OSError, KeyImportError): + pass + else: + result.append(cert.public_data) + + try: + key = read_public_key(Path('~', '.ssh', file + '.pub')) + except (OSError, KeyImportError): + pass + else: + result.append(key.public_data) + + return result + + +def load_resident_keys(pin: str, *, application: str = 'ssh:', + user: Optional[str] = None, + touch_required: bool = True) -> Sequence[SSHKey]: + """Load keys resident on attached FIDO2 security keys + + This function loads keys resident on any FIDO2 security keys + currently attached to the system. The user name associated + with each key is returned in the key's comment field. + + :param pin: + The PIN to use to access the security keys, defaulting to `None`. + :param application: (optional) + The application name associated with the keys to load, + defaulting to `'ssh:'`. + :param user: (optional) + The user name associated with the keys to load. By default, + keys for all users are loaded. + :param touch_required: (optional) + Whether or not to require the user to touch the security key + when authenticating with it, defaulting to `True`. + :type application: `str` + :type user: `str` + :type pin: `str` + :type touch_required: `bool` + + """ + + application = application.encode('utf-8') + flags = SSH_SK_USER_PRESENCE_REQD if touch_required else 0 + reserved = b'' + + try: + resident_keys = sk_get_resident(application, user, pin) + except ValueError as exc: + raise KeyImportError(str(exc)) from None + + result: List[SSHKey] = [] + + for sk_alg, name, public_value, key_handle in resident_keys: + handler, key_params = _sk_alg_map[sk_alg] + key_params += (public_value, application, flags, key_handle, reserved) + + key = handler.make_private(key_params) + key.set_comment(name) + + result.append(key) + + return result diff --git a/venv/lib/python3.12/site-packages/asyncssh/py.typed b/venv/lib/python3.12/site-packages/asyncssh/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/asyncssh/rsa.py b/venv/lib/python3.12/site-packages/asyncssh/rsa.py new file mode 100644 index 0000000..c1b67c4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/rsa.py @@ -0,0 +1,321 @@ +# Copyright (c) 2013-2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""RSA public key encryption handler""" + +from typing import Optional, Tuple, Union, cast + +from .asn1 import ASN1DecodeError, ObjectIdentifier, der_encode, der_decode +from .crypto import RSAPrivateKey, RSAPublicKey +from .misc import all_ints +from .packet import MPInt, String, SSHPacket +from .public_key import SSHKey, SSHOpenSSHCertificateV01, KeyExportError +from .public_key import register_public_key_alg, register_certificate_alg +from .public_key import register_x509_certificate_alg + + +_hash_algs = {b'ssh-rsa': 'sha1', + b'rsa-sha2-256': 'sha256', + b'rsa-sha2-512': 'sha512', + b'ssh-rsa-sha224@ssh.com': 'sha224', + b'ssh-rsa-sha256@ssh.com': 'sha256', + b'ssh-rsa-sha384@ssh.com': 'sha384', + b'ssh-rsa-sha512@ssh.com': 'sha512', + b'rsa1024-sha1': 'sha1', + b'rsa2048-sha256': 'sha256'} + + +_PrivateKeyArgs = Tuple[int, int, int, int, int, int, int, int] +_PrivateKeyConstructArgs = Tuple[int, int, int, int, int, int, int, int, bool] +_PublicKeyArgs = Tuple[int, int] + + +_default_skip_rsa_key_validation = False + + +def set_default_skip_rsa_key_validation(skip_validation: bool) -> None: + """Set whether to disable RSA key validation in OpenSSL + + OpenSSL 3.x does additional validation when loading RSA keys + as an added security measure. However, the result is that + loading a key can take significantly longer than it did before. + + If all your RSA keys are coming from a trusted source, you can + call this function with a value of `True` to default to skipping + these checks on RSA keys, reducing the cost back down to what it + was in earlier releases. + + This can also be set on a case by case basis by using the new + `unsafe_skip_rsa_key_validation` argument on the functions used + to load keys. This will only affect loading keys of type RSA. + + .. note:: The extra cost only applies to loading existing keys, and + not to generating new keys. Also, in cases where a key is + used repeatedly, it can be loaded once into an `SSHKey` + object and reused without having to pay the cost each time. + So, this call should not be needed in most applications. + + If an application does need this, it is strongly + recommended that the `unsafe_skip_rsa_key_validation` + argument be used rather than using this function to + change the default behavior for all load operations. + + """ + + # pylint: disable=global-statement + + global _default_skip_rsa_key_validation + + _default_skip_rsa_key_validation = skip_validation + + +class RSAKey(SSHKey): + """Handler for RSA public key encryption""" + + _key: Union[RSAPrivateKey, RSAPublicKey] + + algorithm = b'ssh-rsa' + default_x509_hash = 'sha256' + pem_name = b'RSA' + pkcs8_oid = ObjectIdentifier('1.2.840.113549.1.1.1') + sig_algorithms = (b'rsa-sha2-256', b'rsa-sha2-512', + b'ssh-rsa-sha224@ssh.com', b'ssh-rsa-sha256@ssh.com', + b'ssh-rsa-sha384@ssh.com', b'ssh-rsa-sha512@ssh.com', + b'ssh-rsa') + cert_sig_algorithms = (b'rsa-sha2-256', b'rsa-sha2-512', b'ssh-rsa') + cert_algorithms = tuple(alg + b'-cert-v01@openssh.com' + for alg in cert_sig_algorithms) + x509_sig_algorithms = (b'rsa2048-sha256', b'ssh-rsa') + x509_algorithms = tuple(b'x509v3-' + alg for alg in x509_sig_algorithms) + all_sig_algorithms = set(x509_sig_algorithms + sig_algorithms) + + def __eq__(self, other: object) -> bool: + # This isn't protected access - both objects are RSAKey instances + # pylint: disable=protected-access + + if not isinstance(other, RSAKey): + return NotImplemented + + return (self._key.n == other._key.n and + self._key.e == other._key.e and + self._key.d == other._key.d) + + def __hash__(self) -> int: + return hash((self._key.n, self._key.e, self._key.d, + self._key.p, self._key.q)) + + @classmethod + def generate(cls, algorithm: bytes, *, # type: ignore + key_size: int = 2048, exponent: int = 65537) -> 'RSAKey': + """Generate a new RSA private key""" + + # pylint: disable=arguments-differ,unused-argument + + return cls(RSAPrivateKey.generate(key_size, exponent)) + + @classmethod + def make_private(cls, key_params: object) -> SSHKey: + """Construct an RSA private key""" + + n, e, d, p, q, dmp1, dmq1, iqmp, unsafe_skip_rsa_key_validation = \ + cast(_PrivateKeyConstructArgs, key_params) + + if unsafe_skip_rsa_key_validation is None: + unsafe_skip_rsa_key_validation = _default_skip_rsa_key_validation + + return cls(RSAPrivateKey.construct(n, e, d, p, q, dmp1, dmq1, iqmp, + unsafe_skip_rsa_key_validation)) + + @classmethod + def make_public(cls, key_params: object) -> SSHKey: + """Construct an RSA public key""" + + n, e = cast(_PublicKeyArgs, key_params) + + return cls(RSAPublicKey.construct(n, e)) + + @classmethod + def decode_pkcs1_private(cls, key_data: object) -> \ + Optional[_PrivateKeyArgs]: + """Decode a PKCS#1 format RSA private key""" + + if (isinstance(key_data, tuple) and all_ints(key_data) and + len(key_data) >= 9): + return cast(_PrivateKeyArgs, key_data[1:9]) + else: + return None + + @classmethod + def decode_pkcs1_public(cls, key_data: object) -> \ + Optional[_PublicKeyArgs]: + """Decode a PKCS#1 format RSA public key""" + + if (isinstance(key_data, tuple) and all_ints(key_data) and + len(key_data) == 2): + return cast(_PublicKeyArgs, key_data) + else: + return None + + @classmethod + def decode_pkcs8_private(cls, alg_params: object, + data: bytes) -> Optional[_PrivateKeyArgs]: + """Decode a PKCS#8 format RSA private key""" + + if alg_params is not None: + return None + + try: + key_data = der_decode(data) + except ASN1DecodeError: + return None + + return cls.decode_pkcs1_private(key_data) + + @classmethod + def decode_pkcs8_public(cls, alg_params: object, + data: bytes) -> Optional[_PublicKeyArgs]: + """Decode a PKCS#8 format RSA public key""" + + if alg_params is not None: + return None + + try: + key_data = der_decode(data) + except ASN1DecodeError: + return None + + return cls.decode_pkcs1_public(key_data) + + @classmethod + def decode_ssh_private(cls, packet: SSHPacket) -> _PrivateKeyArgs: + """Decode an SSH format RSA private key""" + + n = packet.get_mpint() + e = packet.get_mpint() + d = packet.get_mpint() + iqmp = packet.get_mpint() + p = packet.get_mpint() + q = packet.get_mpint() + + return n, e, d, p, q, d % (p-1), d % (q-1), iqmp + + @classmethod + def decode_ssh_public(cls, packet: SSHPacket) -> _PublicKeyArgs: + """Decode an SSH format RSA public key""" + + e = packet.get_mpint() + n = packet.get_mpint() + + return n, e + + def encode_pkcs1_private(self) -> object: + """Encode a PKCS#1 format RSA private key""" + + if not self._key.d: + raise KeyExportError('Key is not private') + + return (0, self._key.n, self._key.e, self._key.d, self._key.p, + self._key.q, self._key.dmp1, self._key.dmq1, self._key.iqmp) + + def encode_pkcs1_public(self) -> object: + """Encode a PKCS#1 format RSA public key""" + + return self._key.n, self._key.e + + def encode_pkcs8_private(self) -> Tuple[object, object]: + """Encode a PKCS#8 format RSA private key""" + + return None, der_encode(self.encode_pkcs1_private()) + + def encode_pkcs8_public(self) -> Tuple[object, object]: + """Encode a PKCS#8 format RSA public key""" + + return None, der_encode(self.encode_pkcs1_public()) + + def encode_ssh_private(self) -> bytes: + """Encode an SSH format RSA private key""" + + if not self._key.d: + raise KeyExportError('Key is not private') + + assert self._key.iqmp is not None + assert self._key.p is not None + assert self._key.q is not None + + return b''.join((MPInt(self._key.n), MPInt(self._key.e), + MPInt(self._key.d), MPInt(self._key.iqmp), + MPInt(self._key.p), MPInt(self._key.q))) + + def encode_ssh_public(self) -> bytes: + """Encode an SSH format RSA public key""" + + return b''.join((MPInt(self._key.e), MPInt(self._key.n))) + + def encode_agent_cert_private(self) -> bytes: + """Encode RSA certificate private key data for agent""" + + if not self._key.d: + raise KeyExportError('Key is not private') + + assert self._key.iqmp is not None + assert self._key.p is not None + assert self._key.q is not None + + return b''.join((MPInt(self._key.d), MPInt(self._key.iqmp), + MPInt(self._key.p), MPInt(self._key.q))) + + def sign_ssh(self, data: bytes, sig_algorithm: bytes) -> bytes: + """Compute an SSH-encoded signature of the specified data""" + + if not self._key.d: + raise ValueError('Private key needed for signing') + + return String(self._key.sign(data, _hash_algs[sig_algorithm])) + + def verify_ssh(self, data: bytes, sig_algorithm: bytes, + packet: SSHPacket) -> bool: + """Verify an SSH-encoded signature of the specified data""" + + sig = packet.get_string() + packet.check_end() + + return self._key.verify(data, sig, _hash_algs[sig_algorithm]) + + def encrypt(self, data: bytes, algorithm: bytes) -> Optional[bytes]: + """Encrypt a block of data with this key""" + + pub_key = cast(RSAPublicKey, self._key) + return pub_key.encrypt(data, _hash_algs[algorithm]) + + def decrypt(self, data: bytes, algorithm: bytes) -> Optional[bytes]: + """Decrypt a block of data with this key""" + + priv_key = cast(RSAPrivateKey, self._key) + return priv_key.decrypt(data, _hash_algs[algorithm]) + + +register_public_key_alg(b'ssh-rsa', RSAKey, True) + +for _alg in RSAKey.cert_sig_algorithms: + register_certificate_alg(1, _alg, _alg + b'-cert-v01@openssh.com', + RSAKey, SSHOpenSSHCertificateV01, True) + +for _alg in RSAKey.x509_algorithms: + register_x509_certificate_alg(_alg, True) diff --git a/venv/lib/python3.12/site-packages/asyncssh/saslprep.py b/venv/lib/python3.12/site-packages/asyncssh/saslprep.py new file mode 100644 index 0000000..569e502 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/saslprep.py @@ -0,0 +1,116 @@ +# Copyright (c) 2013-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SASLprep implementation + + This module implements the stringprep algorithm defined in RFC 3454 + and the SASLprep profile of stringprep defined in RFC 4013. This + profile is used to normalize usernames and passwords sent in the + SSH protocol. + +""" + +# The stringprep module should not be flagged as deprecated +# pylint: disable=deprecated-module +import stringprep +# pylint: enable=deprecated-module +from typing import Callable, Optional, Sequence +import unicodedata + + +class SASLPrepError(ValueError): + """Invalid data provided to saslprep""" + + +def _check_bidi(s: str) -> None: + """Enforce bidirectional character check from RFC 3454 (stringprep)""" + + r_and_al_cat = False + l_cat = False + + for c in s: + if not r_and_al_cat and stringprep.in_table_d1(c): + r_and_al_cat = True + + if not l_cat and stringprep.in_table_d2(c): + l_cat = True + + if r_and_al_cat and l_cat: + raise SASLPrepError('Both RandALCat and LCat characters present') + + if r_and_al_cat and not (stringprep.in_table_d1(s[0]) and + stringprep.in_table_d1(s[-1])): + raise SASLPrepError('RandALCat character not at both start and end') + + +def _stringprep(s: str, check_unassigned: bool, + mapping: Optional[Callable[[str], str]], + normalization: str, + prohibited: Sequence[Callable[[str], bool]], + bidi: bool) -> str: + """Implement a stringprep profile as defined in RFC 3454""" + + if check_unassigned: # pragma: no branch + for c in s: + if stringprep.in_table_a1(c): + raise SASLPrepError('Unassigned character: %r' % c) + + if mapping: # pragma: no branch + s = mapping(s) + + if normalization: # pragma: no branch + s = unicodedata.normalize(normalization, s) + + if prohibited: # pragma: no branch + for c in s: + for lookup in prohibited: + if lookup(c): + raise SASLPrepError('Prohibited character: %r' % c) + + if bidi: # pragma: no branch + _check_bidi(s) + + return s + + +def _map_saslprep(s: str) -> str: + """Map stringprep table B.1 to nothing and C.1.2 to ASCII space""" + + r = [] + + for c in s: + if stringprep.in_table_c12(c): + r.append(' ') + elif not stringprep.in_table_b1(c): + r.append(c) + + return ''.join(r) + + +def saslprep(s: str) -> str: + """Implement SASLprep profile defined in RFC 4013""" + + prohibited = (stringprep.in_table_c12, stringprep.in_table_c21_c22, + stringprep.in_table_c3, stringprep.in_table_c4, + stringprep.in_table_c5, stringprep.in_table_c6, + stringprep.in_table_c7, stringprep.in_table_c8, + stringprep.in_table_c9) + + return _stringprep(s, True, _map_saslprep, 'NFKC', prohibited, True) diff --git a/venv/lib/python3.12/site-packages/asyncssh/scp.py b/venv/lib/python3.12/site-packages/asyncssh/scp.py new file mode 100644 index 0000000..85b5f22 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/scp.py @@ -0,0 +1,1083 @@ +# Copyright (c) 2017-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation +# Jonathan Slenders - proposed changes to allow SFTP server callbacks +# to be coroutines + +"""SCP handlers""" + +import argparse +import asyncio +import posixpath +from pathlib import PurePath +import shlex +import string +import sys +from types import TracebackType +from typing import TYPE_CHECKING, AsyncIterator, List, NoReturn, Optional +from typing import Sequence, Tuple, Type, Union, cast +from typing_extensions import Protocol + +from .constants import DEFAULT_LANG +from .constants import FILEXFER_TYPE_REGULAR, FILEXFER_TYPE_DIRECTORY +from .logging import SSHLogger +from .misc import BytesOrStr, FilePath, HostPort, MaybeAwait +from .misc import async_context_manager, plural +from .sftp import SFTPAttrs, SFTPGlob, SFTPName, SFTPServer, SFTPServerFS +from .sftp import SFTPFileProtocol, SFTPError, SFTPFailure, SFTPBadMessage +from .sftp import SFTPConnectionLost, SFTPErrorHandler, SFTPProgressHandler +from .sftp import SFTP_BLOCK_SIZE, local_fs + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .channel import SSHServerChannel + from .connection import SSHClientConnection + from .stream import SSHReader, SSHWriter + + +_SCPConn = Union[None, bytes, str, HostPort, 'SSHClientConnection'] +_SCPPath = Union[bytes, FilePath] +_SCPConnPath = Union[Tuple[_SCPConn, _SCPPath], _SCPConn, _SCPPath] + + +class _SCPFSProtocol(Protocol): + """Protocol for accessing a filesystem during an SCP copy""" + + @staticmethod + def basename(path: bytes) -> bytes: + """Return the final component of a POSIX-style path""" + + async def stat(self, path: bytes) -> 'SFTPAttrs': + """Get attributes of a file or directory, following symlinks""" + + async def setstat(self, path: bytes, attrs: 'SFTPAttrs') -> None: + """Set attributes of a file or directory""" + + async def exists(self, path: bytes) -> bool: + """Return if a path exists""" + + async def isdir(self, path: bytes) -> bool: + """Return if the path refers to a directory""" + + def scandir(self, path: bytes) -> AsyncIterator[SFTPName]: + """Read the names and attributes of files in a directory""" + + async def mkdir(self, path: bytes) -> None: + """Create a directory""" + + @async_context_manager + async def open(self, path: bytes, mode: str) -> SFTPFileProtocol: + """Open a file""" + + +def _scp_error(exc_class: Type[Exception], reason: BytesOrStr, + path: Optional[bytes] = None, fatal: bool = False, + suppress_send: bool = False, + lang: str = DEFAULT_LANG) -> Exception: + """Construct SCP version of SFTPError exception""" + + if isinstance(reason, bytes): + reason = reason.decode('utf-8', errors='replace') + + if path: + reason = reason + ': ' + path.decode('utf-8', errors='replace') + + exc = exc_class(reason, lang) + + setattr(exc, 'fatal', fatal) + setattr(exc, 'suppress_send', suppress_send) + + return exc + + +def _parse_cd_args(args: bytes) -> Tuple[int, int, bytes]: + """Parse arguments to an SCP copy or dir request""" + + try: + permissions, size, name = args.split(None, 2) + return int(permissions, 8), int(size), name + except ValueError: + raise _scp_error(SFTPBadMessage, + 'Invalid copy or dir request') from None + + +def _parse_t_args(args: bytes) -> Tuple[int, int]: + """Parse argument to an SCP time request""" + + try: + mtime, _, atime, _ = args.split() + return int(atime), int(mtime) + except ValueError: + raise _scp_error(SFTPBadMessage, 'Invalid time request') from None + + +async def _parse_path(path: _SCPConnPath, **kwargs) -> \ + Tuple[Optional['SSHClientConnection'], _SCPPath, bool]: + """Convert an SCP path into an SSHClientConnection and path""" + + # pylint: disable=cyclic-import,import-outside-toplevel + from . import connect + + conn: _SCPConn + + if isinstance(path, tuple): + conn, path = cast(Tuple[_SCPConn, _SCPPath], path) + elif isinstance(path, str) and sys.platform == 'win32' and \ + path[:1] in string.ascii_letters and \ + path[1:2] == ':': # pragma: no cover (win32) + conn = None + elif isinstance(path, str) and ':' in path: + conn, path = path.split(':', 1) + elif isinstance(path, bytes) and b':' in path: + conn, path = path.split(b':', 1) + conn = conn.decode('utf-8') + elif isinstance(path, (bytes, str, PurePath)): + conn = None + else: + conn = path + path = b'.' + + if isinstance(conn, str): + close_conn = True + conn = await connect(conn, **kwargs) + elif isinstance(conn, tuple): + close_conn = True + conn = await connect(*conn, **kwargs) + else: + close_conn = False + + return (cast(Optional['SSHClientConnection'], conn), + cast(_SCPPath, path), close_conn) + + +async def _start_remote(conn: 'SSHClientConnection', source: bool, + must_be_dir: bool, preserve: bool, + recurse: bool, path: _SCPPath) -> \ + Tuple['SSHReader[bytes]', 'SSHWriter[bytes]']: + """Start remote SCP server""" + + if isinstance(path, PurePath): + path = str(path) + + if isinstance(path, str): + path = path.encode('utf-8') + + command = (b'scp ' + (b'-f ' if source else b'-t ') + + (b'-d ' if must_be_dir else b'') + + (b'-p ' if preserve else b'') + + (b'-r ' if recurse else b'') + path) + + conn.logger.get_child('sftp').info('Starting remote SCP, args: %s', + command[4:]) + + writer, reader, _ = await conn.open_session(command, encoding=None) + + return reader, writer + + +class _SCPArgs(argparse.Namespace): + """SCP command line arguments""" + + path: str + source: bool + must_be_dir: bool + preserve: bool + recurse: bool + + +class _SCPArgParser(argparse.ArgumentParser): + """A parser for SCP arguments""" + + def __init__(self) -> None: + super().__init__(add_help=False) + + group = self.add_mutually_exclusive_group(required=True) + group.add_argument('-f', dest='source', action='store_true') + group.add_argument('-t', dest='source', action='store_false') + + self.add_argument('-d', dest='must_be_dir', action='store_true') + self.add_argument('-p', dest='preserve', action='store_true') + self.add_argument('-r', dest='recurse', action='store_true') + self.add_argument('-v', dest='verbose', action='store_true') + + self.add_argument('path') + + def error(self, message: str) -> NoReturn: + raise ValueError(message) + + def parse(self, command: str) -> _SCPArgs: + """Parse an SCP command""" + + return self.parse_args(shlex.split(command)[1:], namespace=_SCPArgs()) + + +class _SCPHandler: + """SCP handler""" + + def __init__(self, reader: 'SSHReader[bytes]', writer: 'SSHWriter[bytes]', + error_handler: SFTPErrorHandler = None, server: bool = False): + self._reader = reader + self._writer = writer + self._error_handler = error_handler + self._server = server + + self._logger = reader.logger.get_child('sftp') + + async def __aenter__(self) -> '_SCPHandler': # pragma: no cover + """Allow _SCPHandler to be used as an async context manager""" + + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> \ + bool: # pragma: no cover + """Wait for file close when used as an async context manager""" + + await self.close() + return False + + @property + def logger(self) -> SSHLogger: + """A logger associated with this SCP handler""" + + return self._logger + + async def await_response(self) -> Optional[Exception]: + """Wait for an SCP response""" + + result = await self._reader.read(1) + + if result != b'\0': + reason = await self._reader.readline() + + if not result or not reason.endswith(b'\n'): + raise _scp_error(SFTPConnectionLost, 'Connection lost', + fatal=True, suppress_send=True) + + if result not in b'\x01\x02': + reason = result + reason + + return _scp_error(SFTPFailure, reason[:-1], + fatal=result != b'\x01', suppress_send=True) + + self.logger.debug1('Received SCP OK') + + return None + + def send_request(self, *args: bytes) -> None: + """Send an SCP request""" + + request = b''.join(args) + + self.logger.debug1('Sending SCP request: %s', request) + + self._writer.write(request + b'\n') + + async def make_request(self, *args: bytes) -> None: + """Send an SCP request and wait for a response""" + + self.send_request(*args) + + exc = await self.await_response() + + if exc: + raise exc + + async def send_data(self, data: bytes) -> None: + """Send SCP file data""" + + self.logger.debug1('Sending %s', plural(len(data), 'SCP data byte')) + + self._writer.write(data) + await self._writer.drain() + await asyncio.sleep(0) + + def send_ok(self) -> None: + """Send an SCP OK response""" + + self.logger.debug1('Sending SCP OK') + + self._writer.write(b'\0') + + def send_error(self, exc: Exception) -> None: + """Send an SCP error response""" + + if isinstance(exc, SFTPError): + reason = exc.reason.encode('utf-8') + elif isinstance(exc, OSError): # pragma: no branch (win32) + reason = exc.strerror.encode('utf-8') + + filename = cast(BytesOrStr, exc.filename) + + if filename: + if isinstance(filename, str): # pragma: no cover (win32) + filename = filename.encode('utf-8') + + reason += b': ' + filename + else: # pragma: no cover (win32) + reason = str(exc).encode('utf-8') + + fatal = cast(bool, getattr(exc, 'fatal', False)) + + self.logger.debug1('Sending SCP %serror: %s', + 'fatal ' if fatal else '', reason) + + self._writer.write((b'\x02' if fatal else b'\x01') + + b'scp: ' + reason + b'\n') + + async def recv_request(self) -> Tuple[Optional[bytes], Optional[bytes]]: + """Receive SCP request""" + + request = await self._reader.readline() + + if not request: + return None, None + + action, args = request[:1], request[1:-1] + + if action not in b'\x01\x02': + self.logger.debug1('Received SCP request: %s%s', action, args) + else: + self.logger.debug1('Received SCP %serror: %s', + 'fatal ' if action != b'\x01' else '', args) + + return action, args + + + async def recv_data(self, n: int) -> bytes: + """Receive SCP file data""" + + data = await self._reader.read(n) + + self.logger.debug1('Received %s', plural(len(data), 'SCP data byte')) + + return data + + def handle_error(self, exc: Exception) -> None: + """Handle an SCP error""" + + if isinstance(exc, BrokenPipeError): + exc = _scp_error(SFTPConnectionLost, 'Connection lost', + fatal=True, suppress_send=True) + + if not getattr(exc, 'suppress_send', False): + self.send_error(exc) + + self.logger.debug1('Handling SCP error: %s', str(exc)) + + if getattr(exc, 'fatal', False) or self._error_handler is None: + raise exc from None + elif self._error_handler: + self._error_handler(exc) + + async def close(self) -> None: + """Close an SCP session""" + + self.logger.info('Stopping remote SCP') + + if self._server: + cast('SSHServerChannel', self._writer.channel).exit(0) + else: + self._writer.close() + + await self._writer.channel.wait_closed() + + +class _SCPSource(_SCPHandler): + """SCP handler for sending files""" + + def __init__(self, fs: _SCPFSProtocol, reader: 'SSHReader[bytes]', + writer: 'SSHWriter[bytes]', preserve: bool, recurse: bool, + block_size: int = SFTP_BLOCK_SIZE, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None, server: bool = False): + super().__init__(reader, writer, error_handler, server) + + self._fs = fs + self._preserve = preserve + self._recurse = recurse + self._block_size = block_size + self._progress_handler = progress_handler + + async def _make_cd_request(self, action: bytes, attrs: SFTPAttrs, + size: int, path: bytes) -> None: + """Make an SCP copy or dir request""" + + assert attrs.permissions is not None + + args = '%04o %d ' % (attrs.permissions & 0o7777, size) + await self.make_request(action, args.encode('ascii'), + self._fs.basename(path)) + + async def _make_t_request(self, attrs: SFTPAttrs) -> None: + """Make an SCP time request""" + + self.logger.info(' Preserving attrs: %s', + SFTPAttrs(atime=attrs.atime, mtime=attrs.mtime)) + + assert attrs.mtime is not None + assert attrs.atime is not None + + args = '%d 0 %d 0' % (attrs.mtime, attrs.atime) + await self.make_request(b'T', args.encode('ascii')) + + async def _send_file(self, srcpath: bytes, + dstpath: bytes, attrs: SFTPAttrs) -> None: + """Send a file over SCP""" + + assert attrs.size is not None + + file_obj = await self._fs.open(srcpath, 'rb') + size = attrs.size + local_exc = None + offset = 0 + + self.logger.info(' Sending file %s, size %d', srcpath, size) + + try: + await self._make_cd_request(b'C', attrs, size, srcpath) + + if self._progress_handler and size == 0: + self._progress_handler(srcpath, dstpath, 0, 0) + + while offset < size: + blocklen = min(size - offset, self._block_size) + + if local_exc: + data = blocklen * b'\0' + else: + try: + data = cast(bytes, + await file_obj.read(blocklen, offset)) + + if not data: + raise _scp_error(SFTPFailure, 'Unexpected EOF') + except (OSError, SFTPError) as exc: + local_exc = exc + + await self.send_data(data) + offset += len(data) + + if self._progress_handler: + self._progress_handler(srcpath, dstpath, offset, size) + finally: + await file_obj.close() + + if local_exc: + self.send_error(local_exc) + setattr(local_exc, 'suppress_send', True) + else: + self.send_ok() + + remote_exc = await self.await_response() + final_exc = remote_exc or local_exc + + if final_exc: + raise final_exc + + async def _send_dir(self, srcpath: bytes, dstpath: bytes, + attrs: SFTPAttrs) -> None: + """Send directory over SCP""" + + self.logger.info(' Starting send of directory %s', srcpath) + + await self._make_cd_request(b'D', attrs, 0, srcpath) + + async for entry in self._fs.scandir(srcpath): + name = cast(bytes, entry.filename) + + if name in (b'.', b'..'): + continue + + await self._send_files(posixpath.join(srcpath, name), + posixpath.join(dstpath, name), + entry.attrs) + + await self.make_request(b'E') + + self.logger.info(' Finished send of directory %s', srcpath) + + async def _send_files(self, srcpath: bytes, dstpath: bytes, + attrs: SFTPAttrs) -> None: + """Send files via SCP""" + + try: + if self._preserve: + await self._make_t_request(attrs) + + if self._recurse and attrs.type == FILEXFER_TYPE_DIRECTORY: + await self._send_dir(srcpath, dstpath, attrs) + elif attrs.type == FILEXFER_TYPE_REGULAR: + await self._send_file(srcpath, dstpath, attrs) + else: + raise _scp_error(SFTPFailure, 'Not a regular file', srcpath) + except (OSError, SFTPError, ValueError) as exc: + self.handle_error(exc) + + async def run(self, srcpath: _SCPPath) -> None: + """Start SCP transfer""" + + try: + if isinstance(srcpath, PurePath): + srcpath = str(srcpath) + + if isinstance(srcpath, str): + srcpath = srcpath.encode('utf-8') + + exc = await self.await_response() + + if exc: + raise exc + + for name in await SFTPGlob(self._fs).match(srcpath): + await self._send_files(cast(bytes, name.filename), + b'', name.attrs) + except (OSError, SFTPError) as exc: + self.handle_error(exc) + finally: + await self.close() + + +class _SCPSink(_SCPHandler): + """SCP handler for receiving files""" + + def __init__(self, fs: _SCPFSProtocol, reader: 'SSHReader[bytes]', + writer: 'SSHWriter[bytes]', must_be_dir: bool, preserve: bool, + recurse: bool, block_size: int = SFTP_BLOCK_SIZE, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None, server: bool = False): + super().__init__(reader, writer, error_handler, server) + + self._fs = fs + self._must_be_dir = must_be_dir + self._preserve = preserve + self._recurse = recurse + self._block_size = block_size + self._progress_handler = progress_handler + + async def _recv_file(self, srcpath: bytes, + dstpath: bytes, size: int) -> None: + """Receive a file via SCP""" + + file_obj = await self._fs.open(dstpath, 'wb') + local_exc = None + offset = 0 + + self.logger.info(' Receiving file %s, size %d', dstpath, size) + + try: + self.send_ok() + + if self._progress_handler and size == 0: + self._progress_handler(srcpath, dstpath, 0, 0) + + while offset < size: + blocklen = min(size - offset, self._block_size) + data = await self.recv_data(blocklen) + + if not data: + raise _scp_error(SFTPConnectionLost, 'Connection lost', + fatal=True, suppress_send=True) + + if not local_exc: + try: + await file_obj.write(data, offset) + except (OSError, SFTPError) as exc: + local_exc = exc + + offset += len(data) + + if self._progress_handler: + self._progress_handler(srcpath, dstpath, offset, size) + finally: + await file_obj.close() + + remote_exc = await self.await_response() + + if local_exc: + self.send_error(local_exc) + setattr(local_exc, 'suppress_send',True) + else: + self.send_ok() + + final_exc = remote_exc or local_exc + + if final_exc: + raise final_exc + + async def _recv_dir(self, srcpath: bytes, dstpath: bytes) -> None: + """Receive a directory over SCP""" + + if not self._recurse: + raise _scp_error(SFTPBadMessage, + 'Directory received without recurse') + + self.logger.info(' Starting receive of directory %s', dstpath) + + if await self._fs.exists(dstpath): + if not await self._fs.isdir(dstpath): + raise _scp_error(SFTPFailure, 'Not a directory', dstpath) + else: + await self._fs.mkdir(dstpath) + + await self._recv_files(srcpath, dstpath) + + self.logger.info(' Finished receive of directory %s', dstpath) + + async def _recv_files(self, srcpath: bytes, dstpath: bytes) -> None: + """Receive files over SCP""" + + self.send_ok() + + attrs = SFTPAttrs() + + while True: + action, args = await self.recv_request() + + if not action: + break + + assert args is not None + + try: + if action in b'\x01\x02': + raise _scp_error(SFTPFailure, args, fatal=action != b'\x01', + suppress_send=True) + elif action == b'T': + if self._preserve: + attrs.atime, attrs.mtime = _parse_t_args(args) + + self.send_ok() + elif action == b'E': + self.send_ok() + break + elif action in b'CD': + try: + attrs.permissions, size, name = _parse_cd_args(args) + + new_srcpath = posixpath.join(srcpath, name) + + if await self._fs.isdir(dstpath): + new_dstpath = posixpath.join(dstpath, name) + else: + new_dstpath = dstpath + + if action == b'D': + await self._recv_dir(new_srcpath, new_dstpath) + else: + await self._recv_file(new_srcpath, + new_dstpath, size) + + if self._preserve: + self.logger.info(' Preserving attrs: %s', attrs) + await self._fs.setstat(new_dstpath, attrs) + finally: + attrs = SFTPAttrs() + else: + raise _scp_error(SFTPBadMessage, 'Unknown request') + except (OSError, SFTPError) as exc: + self.handle_error(exc) + + async def run(self, dstpath: _SCPPath) -> None: + """Start SCP file receive""" + + try: + if isinstance(dstpath, PurePath): + dstpath = str(dstpath) + + if isinstance(dstpath, str): + dstpath = dstpath.encode('utf-8') + + if self._must_be_dir and not await self._fs.isdir(dstpath): + self.handle_error(_scp_error(SFTPFailure, 'Not a directory', + dstpath)) + else: + await self._recv_files(b'', dstpath) + except (OSError, SFTPError, ValueError) as exc: + self.handle_error(exc) + finally: + await self.close() + + +class _SCPCopier: + """SCP handler for remote-to-remote copies""" + + def __init__(self, src_reader: 'SSHReader[bytes]', + src_writer: 'SSHWriter[bytes]', + dst_reader: 'SSHReader[bytes]', + dst_writer: 'SSHWriter[bytes]', + block_size: int = SFTP_BLOCK_SIZE, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None): + self._source = _SCPHandler(src_reader, src_writer) + self._sink = _SCPHandler(dst_reader, dst_writer) + self._logger = self._source.logger + self._block_size = block_size + self._progress_handler = progress_handler + self._error_handler = error_handler + + @property + def logger(self) -> SSHLogger: + """A logger associated with this SCP handler""" + + return self._logger + + def _handle_error(self, exc: Exception) -> None: + """Handle an SCP error""" + + if isinstance(exc, BrokenPipeError): + exc = _scp_error(SFTPConnectionLost, 'Connection lost', fatal=True) + + self.logger.debug1('Handling SCP error: %s', str(exc)) + + if self._error_handler and not getattr(exc, 'fatal', False): + self._error_handler(exc) + else: + raise exc + + async def _forward_response(self, src: _SCPHandler, + dst: _SCPHandler) -> Optional[Exception]: + """Forward an SCP response between two remote SCP servers""" + + # pylint: disable=no-self-use + + try: + exc = await src.await_response() + + if exc: + dst.send_error(exc) + return exc + else: + dst.send_ok() + return None + except OSError as exc: + return exc + + async def _copy_file(self, path: bytes, size: int) -> None: + """Copy a file from one remote SCP server to another""" + + self.logger.info(' Copying file %s, size %d', path, size) + + offset = 0 + + if self._progress_handler and size == 0: + self._progress_handler(path, path, 0, 0) + + while offset < size: + blocklen = min(size - offset, self._block_size) + data = await self._source.recv_data(blocklen) + + if not data: + raise _scp_error(SFTPConnectionLost, 'Connection lost', + fatal=True) + + await self._sink.send_data(data) + offset += len(data) + + if self._progress_handler: + self._progress_handler(path, path, offset, size) + + source_exc = await self._forward_response(self._source, self._sink) + sink_exc = await self._forward_response(self._sink, self._source) + + exc = sink_exc or source_exc + + if exc: + self._handle_error(exc) + + async def _copy_files(self) -> None: + """Copy files from one SCP server to another""" + + exc = await self._forward_response(self._sink, self._source) + + if exc: + self._handle_error(exc) + + pathlist: List[bytes] = [] + attrlist: List[SFTPAttrs] = [] + attrs = SFTPAttrs() + + while True: + action, args = await self._source.recv_request() + + if not action: + break + + assert args is not None + + self._sink.send_request(action, args) + + if action in b'\x01\x02': + exc = _scp_error(SFTPFailure, args, fatal=action != b'\x01') + self._handle_error(exc) + continue + + exc = await self._forward_response(self._sink, self._source) + + if exc: + self._handle_error(exc) + continue + + if action in b'CD': + try: + attrs.permissions, size, name = _parse_cd_args(args) + + if action == b'C': + path = b'/'.join(pathlist + [name]) + await self._copy_file(path, size) + self.logger.info(' Preserving attrs: %s', attrs) + else: + pathlist.append(name) + attrlist.append(attrs) + self.logger.info(' Starting copy of directory %s', + b'/'.join(pathlist)) + finally: + attrs = SFTPAttrs() + elif action == b'E': + if pathlist: + self.logger.info(' Finished copy of directory %s', + b'/'.join(pathlist)) + + pathlist.pop() + attrs = attrlist.pop() + + self.logger.info(' Preserving attrs: %s', attrs) + else: + break + elif action == b'T': + attrs.atime, attrs.mtime = _parse_t_args(args) + else: + raise _scp_error(SFTPBadMessage, 'Unknown SCP action') + + async def run(self) -> None: + """Start SCP remote-to-remote transfer""" + + try: + await self._copy_files() + except (OSError, SFTPError) as exc: + self._handle_error(exc) + finally: + await self._source.close() + await self._sink.close() + + +async def scp(srcpaths: Union[_SCPConnPath, Sequence[_SCPConnPath]], + dstpath: _SCPConnPath = None, *, preserve: bool = False, + recurse: bool = False, block_size: int = SFTP_BLOCK_SIZE, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None, **kwargs) -> None: + """Copy files using SCP + + This function is a coroutine which copies one or more files or + directories using the SCP protocol. Source and destination paths + can be `str` or `bytes` values to reference local files or can be + a tuple of the form `(conn, path)` where `conn` is an open + :class:`SSHClientConnection` to reference files and directories + on a remote system. + + For convenience, a host name or tuple of the form `(host, port)` + can be provided in place of the :class:`SSHClientConnection` to + request that a new SSH connection be opened to a host using + default connect arguments. A `str` or `bytes` value of the form + `'host:path'` may also be used in place of the `(conn, path)` + tuple to make a new connection to the requested host on the + default SSH port. + + Either a single source path or a sequence of source paths can be + provided, and each path can contain '*' and '?' wildcard characters + which can be used to match multiple source files or directories. + + When copying a single file or directory, the destination path + can be either the full path to copy data into or the path to an + existing directory where the data should be placed. In the latter + case, the base file name from the source path will be used as the + destination name. + + When copying multiple files, the destination path must refer to + a directory. If it doesn't already exist, a directory will be + created with that name. + + If the destination path is an :class:`SSHClientConnection` without + a path or the path provided is empty, files are copied into the + default destination working directory. + + If preserve is `True`, the access and modification times and + permissions of the original files and directories are set on the + copied files. However, do to the timing of when this information + is sent, the preserved access time will be what was set on the + source file before the copy begins. So, the access time on the + source file will no longer match the destination after the + transfer completes. + + If recurse is `True` and the source path points at a directory, + the entire subtree under that directory is copied. + + Symbolic links found on the source will have the contents of their + target copied rather than creating a destination symbolic link. + When using this option during a recursive copy, one needs to watch + out for links that result in loops. SCP does not provide a + mechanism for preserving links. If you need this, consider using + SFTP instead. + + The block_size value controls the size of read and write operations + issued to copy the files. It defaults to 16 KB. + + If progress_handler is specified, it will be called after each + block of a file is successfully copied. The arguments passed to + this handler will be the relative path of the file being copied, + bytes copied so far, and total bytes in the file being copied. If + multiple source paths are provided or recurse is set to `True`, + the progress_handler will be called consecutively on each file + being copied. + + If error_handler is specified and an error occurs during the copy, + this handler will be called with the exception instead of it being + raised. This is intended to primarily be used when multiple source + paths are provided or when recurse is set to `True`, to allow + error information to be collected without aborting the copy of the + remaining files. The error handler can raise an exception if it + wants the copy to completely stop. Otherwise, after an error, the + copy will continue starting with the next file. + + If any other keyword arguments are specified, they will be passed + to the AsyncSSH connect() call when attempting to open any new SSH + connections needed to perform the file transfer. + + :param srcpaths: + The paths of the source files or directories to copy + :param dstpath: (optional) + The path of the destination file or directory to copy into + :param preserve: (optional) + Whether or not to preserve the original file attributes + :param recurse: (optional) + Whether or not to recursively copy directories + :param block_size: (optional) + The block size to use for file reads and writes + :param progress_handler: (optional) + The function to call to report copy progress + :param error_handler: (optional) + The function to call when an error occurs + :type preserve: `bool` + :type recurse: `bool` + :type block_size: `int` + :type progress_handler: `callable` + :type error_handler: `callable` + + :raises: | :exc:`OSError` if a local file I/O error occurs + | :exc:`SFTPError` if the server returns an error + | :exc:`ValueError` if both source and destination are local + + """ + + if (isinstance(srcpaths, (bytes, str, PurePath)) or + (isinstance(srcpaths, tuple) and len(srcpaths) == 2)): + srcpaths = [srcpaths] # type: ignore + + srcpaths: Sequence[_SCPConnPath] + + must_be_dir = len(srcpaths) > 1 + + dstconn, dstpath, close_dst = await _parse_path(dstpath, **kwargs) + + try: + for srcpath in srcpaths: + srcconn, srcpath, close_src = await _parse_path(srcpath, **kwargs) + + try: + if srcconn and dstconn: + src_reader, src_writer = await _start_remote( + srcconn, True, must_be_dir, preserve, recurse, srcpath) + + dst_reader, dst_writer = await _start_remote( + dstconn, False, must_be_dir, preserve, recurse, dstpath) + + copier = _SCPCopier(src_reader, src_writer, dst_reader, + dst_writer, block_size, + progress_handler, error_handler) + + await copier.run() + elif srcconn: + reader, writer = await _start_remote( + srcconn, True, must_be_dir, preserve, recurse, srcpath) + + sink = _SCPSink(local_fs, reader, writer, must_be_dir, + preserve, recurse, block_size, + progress_handler, error_handler) + + await sink.run(dstpath) + elif dstconn: + reader, writer = await _start_remote( + dstconn, False, must_be_dir, preserve, recurse, dstpath) + + source = _SCPSource(local_fs, reader, writer, + preserve, recurse, block_size, + progress_handler, error_handler) + + await source.run(srcpath) + else: + raise ValueError('Local copy not supported') + finally: + if close_src: + assert srcconn is not None + srcconn.close() + await srcconn.wait_closed() + finally: + if close_dst: + assert dstconn is not None + dstconn.close() + await dstconn.wait_closed() + + +def run_scp_server(sftp_server: SFTPServer, command: str, + stdin: 'SSHReader[bytes]', stdout: 'SSHWriter[bytes]', + stderr: 'SSHWriter[bytes]') -> MaybeAwait[None]: + """Return a handler for an SCP server session""" + + async def _run_handler() -> None: + """Run an SCP server to handle this request""" + + try: + await handler.run(args.path) + finally: + sftp_server.exit() + + try: + args = _SCPArgParser().parse(command) + except ValueError as exc: + stdin.logger.info('Error starting SCP server: %s', str(exc)) + stderr.write(b'scp: ' + str(exc).encode('utf-8') + b'\n') + cast('SSHServerChannel', stderr.channel).exit(1) + return None + + stdin.logger.info('Starting SCP server, args: %s', command[4:].strip()) + + fs = SFTPServerFS(sftp_server) + + handler: Union[_SCPSource, _SCPSink] + + if args.source: + handler = _SCPSource(fs, stdin, stdout, args.preserve, args.recurse, + error_handler=False, server=True) + else: + handler = _SCPSink(fs, stdin, stdout, args.must_be_dir, args.preserve, + args.recurse, error_handler=False, server=True) + + return _run_handler() diff --git a/venv/lib/python3.12/site-packages/asyncssh/server.py b/venv/lib/python3.12/site-packages/asyncssh/server.py new file mode 100644 index 0000000..0bab344 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/server.py @@ -0,0 +1,932 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH server protocol handler""" + +from typing import TYPE_CHECKING, Optional, Tuple, Union + +from .auth import KbdIntChallenge, KbdIntResponse +from .listener import SSHListener +from .misc import MaybeAwait +from .public_key import SSHKey +from .stream import SSHSocketSessionFactory, SSHServerSessionFactory + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .connection import SSHServerConnection, SSHAcceptHandler + from .channel import SSHServerChannel, SSHTCPChannel, SSHUNIXChannel + from .session import SSHServerSession, SSHTCPSession, SSHUNIXSession + + +_NewSession = Union[bool, 'SSHServerSession', SSHServerSessionFactory, + Tuple['SSHServerChannel', 'SSHServerSession'], + Tuple['SSHServerChannel', SSHServerSessionFactory]] +_NewTCPSession = Union[bool, 'SSHTCPSession', SSHSocketSessionFactory, + Tuple['SSHTCPChannel', 'SSHTCPSession'], + Tuple['SSHTCPChannel', SSHSocketSessionFactory]] +_NewUNIXSession = Union[bool, 'SSHUNIXSession', SSHSocketSessionFactory, + Tuple['SSHUNIXChannel', 'SSHUNIXSession'], + Tuple['SSHUNIXChannel', SSHSocketSessionFactory]] +_NewListener = Union[bool, 'SSHAcceptHandler', SSHListener] + + +class SSHServer: + """SSH server protocol handler + + Applications may subclass this when implementing an SSH server to + provide custom authentication and request handlers. + + The method :meth:`begin_auth` can be overridden decide whether + or not authentication is required, and additional callbacks are + provided for each form of authentication in cases where authentication + information is not provided in the call to :func:`create_server`. + + In addition, the methods :meth:`session_requested`, + :meth:`connection_requested`, :meth:`server_requested`, + :meth:`unix_connection_requested`, or :meth:`unix_server_requested` + can be overridden to handle requests to open sessions or direct + connections or set up listeners for forwarded connections. + + .. note:: The authentication callbacks described here can be + defined as coroutines. However, they may be cancelled if + they are running when the SSH connection is closed by + the client. If they attempt to catch the CancelledError + exception to perform cleanup, they should make sure to + re-raise it to allow AsyncSSH to finish its own cleanup. + + """ + + # pylint: disable=no-self-use,unused-argument + + def connection_made(self, conn: 'SSHServerConnection') -> None: + """Called when a connection is made + + This method is called when a new TCP connection is accepted. The + `conn` parameter should be stored if needed for later use. + + :param conn: + The connection which was successfully opened + :type conn: :class:`SSHServerConnection` + + """ + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Called when a connection is lost or closed + + This method is called when a connection is closed. If the + connection is shut down cleanly, *exc* will be `None`. + Otherwise, it will be an exception explaining the reason for + the disconnect. + + """ + + def debug_msg_received(self, msg: str, lang: str, + always_display: bool) -> None: + """A debug message was received on this connection + + This method is called when the other end of the connection sends + a debug message. Applications should implement this method if + they wish to process these debug messages. + + :param msg: + The debug message sent + :param lang: + The language the message is in + :param always_display: + Whether or not to display the message + :type msg: `str` + :type lang: `str` + :type always_display: `bool` + + """ + + def begin_auth(self, username: str) -> MaybeAwait[bool]: + """Authentication has been requested by the client + + This method will be called when authentication is attempted for + the specified user. Applications should use this method to + prepare whatever state they need to complete the authentication, + such as loading in the set of authorized keys for that user. If + no authentication is required for this user, this method should + return `False` to cause the authentication to immediately + succeed. Otherwise, it should return `True` to indicate that + authentication should proceed. + + If blocking operations need to be performed to prepare the + state needed to complete the authentication, this method may + be defined as a coroutine. + + :param username: + The name of the user being authenticated + :type username: `str` + + :returns: A `bool` indicating whether authentication is required + + """ + + return True # pragma: no cover + + def auth_completed(self) -> None: + """Authentication was completed successfully + + This method is called when authentication has completed + successfully. Applications may use this method to perform + processing based on the authenticated username or options in + the authorized keys list or certificate associated with the + user before any sessions are opened or forwarding requests + are handled. + + """ + + def validate_gss_principal(self, username: str, user_principal: str, + host_principal: str) -> MaybeAwait[bool]: + """Return whether a GSS principal is valid for this user + + This method should return `True` if the specified user + principal is valid for the user being authenticated. It can + be overridden by applications wishing to perform their own + authentication. + + If blocking operations need to be performed to determine the + validity of the principal, this method may be defined as a + coroutine. + + By default, this method will return `True` only when the + name in the user principal exactly matches the username and + the domain of the user principal matches the domain of the + host principal. + + :param username: + The user being authenticated + :param user_principal: + The user principal sent by the client + :param host_principal: + The host principal sent by the server + :type username: `str` + :type user_principal: `str` + :type host_principal: `str` + + :returns: A `bool` indicating if the specified user principal + is valid for the user being authenticated + + """ + + host_domain = host_principal.rsplit('@')[-1] + return user_principal == username + '@' + host_domain + + def host_based_auth_supported(self) -> bool: + """Return whether or not host-based authentication is supported + + This method should return `True` if client host-based + authentication is supported. Applications wishing to support + it must have this method return `True` and implement + :meth:`validate_host_public_key` and/or :meth:`validate_host_ca_key` + to return whether or not the key provided by the client is valid + for the client host being authenticated. + + By default, it returns `False` indicating the client host + based authentication is not supported. + + :returns: A `bool` indicating if host-based authentication is + supported or not + + """ + + return False # pragma: no cover + + def validate_host_public_key(self, client_host: str, client_addr: str, + client_port: int, key: SSHKey) -> bool: + """Return whether key is an authorized host key for this client host + + Host key based client authentication can be supported by + passing authorized host keys in the `known_client_hosts` + argument of :func:`create_server`. However, for more flexibility + in matching on the allowed set of keys, this method can be + implemented by the application to do the matching itself. It + should return `True` if the specified key is a valid host key + for the client host being authenticated. + + This method may be called multiple times with different keys + provided by the client. Applications should precompute as + much as possible in the :meth:`begin_auth` method so that + this function can quickly return whether the key provided is + in the list. + + By default, this method returns `False` for all client host keys. + + .. note:: This function only needs to report whether the + public key provided is a valid key for this client + host. If it is, AsyncSSH will verify that the + client possesses the corresponding private key + before allowing the authentication to succeed. + + :param client_host: + The hostname of the client host + :param client_addr: + The IP address of the client host + :param client_port: + The port number on the client host + :param key: + The host public key sent by the client + :type client_host: `str` + :type client_addr: `str` + :type client_port: `int` + :type key: :class:`SSHKey` *public key* + + :returns: A `bool` indicating if the specified key is a valid + key for the client host being authenticated + + """ + + return False # pragma: no cover + + def validate_host_ca_key(self, client_host: str, client_addr: str, + client_port: int, key: SSHKey) -> bool: + """Return whether key is an authorized CA key for this client host + + Certificate based client host authentication can be + supported by passing authorized host CA keys in the + `known_client_hosts` argument of :func:`create_server`. + However, for more flexibility in matching on the allowed + set of keys, this method can be implemented by the application + to do the matching itself. It should return `True` if the + specified key is a valid certificate authority key for the + client host being authenticated. + + This method may be called multiple times with different keys + provided by the client. Applications should precompute as + much as possible in the :meth:`begin_auth` method so that + this function can quickly return whether the key provided is + in the list. + + By default, this method returns `False` for all CA keys. + + .. note:: This function only needs to report whether the + public key provided is a valid CA key for this + client host. If it is, AsyncSSH will verify that + the certificate is valid, that the client host is + one of the valid principals for the certificate, + and that the client possesses the private key + corresponding to the public key in the certificate + before allowing the authentication to succeed. + + :param client_host: + The hostname of the client host + :param client_addr: + The IP address of the client host + :param client_port: + The port number on the client host + :param key: + The public key which signed the certificate sent by the client + :type client_host: `str` + :type client_addr: `str` + :type client_port: `int` + :type key: :class:`SSHKey` *public key* + + :returns: A `bool` indicating if the specified key is a valid + CA key for the client host being authenticated + + """ + + return False # pragma: no cover + + def validate_host_based_user(self, username: str, client_host: str, + client_username: str) -> MaybeAwait[bool]: + """Return whether remote host and user is authorized for this user + + This method should return `True` if the specified client host + and user is valid for the user being authenticated. It can be + overridden by applications wishing to enforce restrictions on + which remote users are allowed to authenticate as particular + local users. + + If blocking operations need to be performed to determine the + validity of the client host and user, this method may be defined + as a coroutine. + + By default, this method will return `True` when the client + username matches the name of the user being authenticated. + + :param username: + The user being authenticated + :param client_host: + The hostname of the client host making the request + :param client_username: + The username of the user on the client host + :type username: `str` + :type client_host: `str` + :type client_username: `str` + + :returns: A `bool` indicating if the specified client host + and user is valid for the user being authenticated + + """ + + return username == client_username + + def public_key_auth_supported(self) -> bool: + """Return whether or not public key authentication is supported + + This method should return `True` if client public key + authentication is supported. Applications wishing to support + it must have this method return `True` and implement + :meth:`validate_public_key` and/or :meth:`validate_ca_key` + to return whether or not the key provided by the client is + valid for the user being authenticated. + + By default, it returns `False` indicating the client public + key authentication is not supported. + + :returns: A `bool` indicating if public key authentication is + supported or not + + """ + + return False # pragma: no cover + + def validate_public_key(self, username: str, key: SSHKey) -> \ + MaybeAwait[bool]: + """Return whether key is an authorized client key for this user + + Key based client authentication can be supported by + passing authorized keys in the `authorized_client_keys` + argument of :func:`create_server`, or by calling + :meth:`set_authorized_keys + ` on the server + connection from the :meth:`begin_auth` method. However, for + more flexibility in matching on the allowed set of keys, this + method can be implemented by the application to do the + matching itself. It should return `True` if the specified + key is a valid client key for the user being authenticated. + + This method may be called multiple times with different keys + provided by the client. Applications should precompute as + much as possible in the :meth:`begin_auth` method so that + this function can quickly return whether the key provided is + in the list. + + If blocking operations need to be performed to determine the + validity of the key, this method may be defined as a coroutine. + + By default, this method returns `False` for all client keys. + + .. note:: This function only needs to report whether the + public key provided is a valid client key for this + user. If it is, AsyncSSH will verify that the + client possesses the corresponding private key + before allowing the authentication to succeed. + + :param username: + The user being authenticated + :param key: + The public key sent by the client + :type username: `str` + :type key: :class:`SSHKey` *public key* + + :returns: A `bool` indicating if the specified key is a valid + client key for the user being authenticated + + """ + + return False # pragma: no cover + + def validate_ca_key(self, username: str, key: SSHKey) -> MaybeAwait[bool]: + """Return whether key is an authorized CA key for this user + + Certificate based client authentication can be supported by + passing authorized CA keys in the `authorized_client_keys` + argument of :func:`create_server`, or by calling + :meth:`set_authorized_keys + ` on the server + connection from the :meth:`begin_auth` method. However, for + more flexibility in matching on the allowed set of keys, this + method can be implemented by the application to do the + matching itself. It should return `True` if the specified + key is a valid certificate authority key for the user being + authenticated. + + This method may be called multiple times with different keys + provided by the client. Applications should precompute as + much as possible in the :meth:`begin_auth` method so that + this function can quickly return whether the key provided is + in the list. + + If blocking operations need to be performed to determine the + validity of the key, this method may be defined as a coroutine. + + By default, this method returns `False` for all CA keys. + + .. note:: This function only needs to report whether the + public key provided is a valid CA key for this + user. If it is, AsyncSSH will verify that the + certificate is valid, that the user is one of + the valid principals for the certificate, and + that the client possesses the private key + corresponding to the public key in the certificate + before allowing the authentication to succeed. + + :param username: + The user being authenticated + :param key: + The public key which signed the certificate sent by the client + :type username: `str` + :type key: :class:`SSHKey` *public key* + + :returns: A `bool` indicating if the specified key is a valid + CA key for the user being authenticated + + """ + + return False # pragma: no cover + + def password_auth_supported(self) -> bool: + """Return whether or not password authentication is supported + + This method should return `True` if password authentication + is supported. Applications wishing to support it must have + this method return `True` and implement :meth:`validate_password` + to return whether or not the password provided by the client + is valid for the user being authenticated. + + By default, this method returns `False` indicating that + password authentication is not supported. + + :returns: A `bool` indicating if password authentication is + supported or not + + """ + + return False # pragma: no cover + + def validate_password(self, username: str, password: str) -> \ + MaybeAwait[bool]: + """Return whether password is valid for this user + + This method should return `True` if the specified password + is a valid password for the user being authenticated. It must + be overridden by applications wishing to support password + authentication. + + If the password provided is valid but expired, this method + may raise :exc:`PasswordChangeRequired` to request that the + client provide a new password before authentication is + allowed to complete. In this case, the application must + override :meth:`change_password` to handle the password + change request. + + This method may be called multiple times with different + passwords provided by the client. Applications may wish + to limit the number of attempts which are allowed. This + can be done by having :meth:`password_auth_supported` begin + returning `False` after the maximum number of attempts is + exceeded. + + If blocking operations need to be performed to determine the + validity of the password, this method may be defined as a + coroutine. + + By default, this method returns `False` for all passwords. + + :param username: + The user being authenticated + :param password: + The password sent by the client + :type username: `str` + :type password: `str` + + :returns: A `bool` indicating if the specified password is + valid for the user being authenticated + + :raises: :exc:`PasswordChangeRequired` if the password + provided is expired and needs to be changed + + """ + + return False # pragma: no cover + + def change_password(self, username: str, old_password: str, + new_password: str) -> MaybeAwait[bool]: + """Handle a request to change a user's password + + This method is called when a user makes a request to + change their password. It should first validate that + the old password provided is correct and then attempt + to change the user's password to the new value. + + If the old password provided is valid and the change to + the new password is successful, this method should + return `True`. If the old password is not valid or + password changes are not supported, it should return + `False`. It may also raise :exc:`PasswordChangeRequired` + to request that the client try again if the new password + is not acceptable for some reason. + + If blocking operations need to be performed to determine the + validity of the old password or to change to the new password, + this method may be defined as a coroutine. + + By default, this method returns `False`, rejecting all + password changes. + + :param username: + The user whose password should be changed + :param old_password: + The user's current password + :param new_password: + The new password being requested + :type username: `str` + :type old_password: `str` + :type new_password: `str` + + :returns: A `bool` indicating if the password change + is successful or not + + :raises: :exc:`PasswordChangeRequired` if the new password + is not acceptable and the client should be asked + to provide another + + """ + + return False # pragma: no cover + + def kbdint_auth_supported(self) -> bool: + """Return whether or not keyboard-interactive authentication + is supported + + This method should return `True` if keyboard-interactive + authentication is supported. Applications wishing to support + it must have this method return `True` and implement + :meth:`get_kbdint_challenge` and :meth:`validate_kbdint_response` + to generate the appropriate challenges and validate the responses + for the user being authenticated. + + By default, this method returns `NotImplemented` tying + this authentication to password authentication. If the + application implements password authentication and this + method is not overridden, keyboard-interactive authentication + will be supported by prompting for a password and passing + that to the password authentication callbacks. + + :returns: A `bool` indicating if keyboard-interactive + authentication is supported or not + + """ + + return NotImplemented # pragma: no cover + + def get_kbdint_challenge(self, username: str, lang: str, + submethods: str) -> MaybeAwait[KbdIntChallenge]: + """Return a keyboard-interactive auth challenge + + This method should return `True` if authentication should + succeed without any challenge, `False` if authentication + should fail without any challenge, or an auth challenge + consisting of a challenge name, instructions, a language tag, + and a list of tuples containing prompt strings and booleans + indicating whether input should be echoed when a value is + entered for that prompt. + + If blocking operations need to be performed to determine the + challenge to issue, this method may be defined as a coroutine. + + :param username: + The user being authenticated + :param lang: + The language requested by the client for the challenge + :param submethods: + A comma-separated list of the types of challenges the client + can support, or the empty string if the server should choose + :type username: `str` + :type lang: `str` + :type submethods: `str` + + :returns: An authentication challenge as described above + + """ + + return False # pragma: no cover + + def validate_kbdint_response( + self, username: str, responses: KbdIntResponse) -> \ + MaybeAwait[KbdIntChallenge]: + """Return whether the keyboard-interactive response is valid + for this user + + This method should validate the keyboard-interactive responses + provided and return `True` if authentication should succeed + with no further challenge, `False` if authentication should + fail, or an additional auth challenge in the same format returned + by :meth:`get_kbdint_challenge`. Any series of challenges can be + returned this way. To print a message in the middle of a sequence + of challenges without prompting for additional data, a challenge + can be returned with an empty list of prompts. After the client + acknowledges this message, this function will be called again + with an empty list of responses to continue the authentication. + + If blocking operations need to be performed to determine the + validity of the response or the next challenge to issue, this + method may be defined as a coroutine. + + :param username: + The user being authenticated + :param responses: + A list of responses to the last challenge + :type username: `str` + :type responses: `list` of `str` + + :returns: `True`, `False`, or the next challenge + + """ + + return False # pragma: no cover + + def session_requested(self) -> MaybeAwait[_NewSession]: + """Handle an incoming session request + + This method is called when a session open request is received + from the client, indicating it wishes to open a channel to be + used for running a shell, executing a command, or connecting + to a subsystem. If the application wishes to accept the session, + it must override this method to return either an + :class:`SSHServerSession` object to use to process + the data received on the channel or a tuple consisting of an + :class:`SSHServerChannel` object created with + :meth:`create_server_channel + ` and an + :class:`SSHServerSession`, if the application + wishes to pass non-default arguments when creating the channel. + + If blocking operations need to be performed before the session + can be created, a coroutine which returns an + :class:`SSHServerSession` object can be returned instead of + the session itself. This can be either returned directly or as + a part of a tuple with an :class:`SSHServerChannel` object. + + To reject this request, this method should return `False` + to send back a "Session refused" response or raise a + :exc:`ChannelOpenError` exception with the reason for + the failure. + + The details of what type of session the client wants to start + will be delivered to methods on the :class:`SSHServerSession` + object which is returned, along with other information such + as environment variables, terminal type, size, and modes. + + By default, all session requests are rejected. + + :returns: One of the following: + + * An :class:`SSHServerSession` object or a coroutine + which returns an :class:`SSHServerSession` + * A tuple consisting of an :class:`SSHServerChannel` + and the above + * A `callable` or coroutine handler function which + takes AsyncSSH stream objects for stdin, stdout, + and stderr as arguments + * A tuple consisting of an :class:`SSHServerChannel` + and the above + * `False` to refuse the request + + :raises: :exc:`ChannelOpenError` if the session shouldn't + be accepted + + """ + + return False # pragma: no cover + + def connection_requested(self, dest_host: str, dest_port: int, + orig_host: str, orig_port: int) -> _NewTCPSession: + """Handle a direct TCP/IP connection request + + This method is called when a direct TCP/IP connection + request is received by the server. Applications wishing + to accept such connections must override this method. + + To allow standard port forwarding of data on the connection + to the requested destination host and port, this method + should return `True`. + + To reject this request, this method should return `False` + to send back a "Connection refused" response or raise an + :exc:`ChannelOpenError` exception with the reason for + the failure. + + If the application wishes to process the data on the + connection itself, this method should return either an + :class:`SSHTCPSession` object which can be used to process the + data received on the channel or a tuple consisting of of an + :class:`SSHTCPChannel` object created with + :meth:`create_tcp_channel() + ` and an + :class:`SSHTCPSession`, if the application wishes + to pass non-default arguments when creating the channel. + + If blocking operations need to be performed before the session + can be created, a coroutine which returns an + :class:`SSHTCPSession` object can be returned instead of + the session itself. This can be either returned directly or as + a part of a tuple with an :class:`SSHTCPChannel` object. + + By default, all connection requests are rejected. + + :param dest_host: + The address the client wishes to connect to + :param dest_port: + The port the client wishes to connect to + :param orig_host: + The address the connection was originated from + :param orig_port: + The port the connection was originated from + :type dest_host: `str` + :type dest_port: `int` + :type orig_host: `str` + :type orig_port: `int` + + :returns: One of the following: + + * An :class:`SSHTCPSession` object or a coroutine + which returns an :class:`SSHTCPSession` + * A tuple consisting of an :class:`SSHTCPChannel` + and the above + * A `callable` or coroutine handler function which + takes AsyncSSH stream objects for reading from + and writing to the connection + * A tuple consisting of an :class:`SSHTCPChannel` + and the above + * `True` to request standard port forwarding + * `False` to refuse the connection + + :raises: :exc:`ChannelOpenError` if the connection shouldn't + be accepted + + """ + + return False # pragma: no cover + + def server_requested(self, listen_host: str, + listen_port: int) -> MaybeAwait[_NewListener]: + """Handle a request to listen on a TCP/IP address and port + + This method is called when a client makes a request to + listen on an address and port for incoming TCP connections. + The port to listen on may be `0` to request a dynamically + allocated port. Applications wishing to allow TCP/IP connection + forwarding must override this method. + + To set up standard port forwarding of connections received + on this address and port, this method should return `True`. + + If the application wishes to manage listening for incoming + connections itself, this method should return an + :class:`SSHListener` object that listens for new connections + and calls :meth:`create_connection + ` on each of them to + forward them back to the client or return `None` if the + listener can't be set up. + + If blocking operations need to be performed to set up the + listener, a coroutine which returns an :class:`SSHListener` + can be returned instead of the listener itself. + + To reject this request, this method should return `False`. + + By default, this method rejects all server requests. + + :param listen_host: + The address the server should listen on + :param listen_port: + The port the server should listen on, or the value `0` + to request that the server dynamically allocate a port + :type listen_host: `str` + :type listen_port: `int` + + :returns: One of the following: + + * An :class:`SSHListener` object + * `True` to set up standard port forwarding + * `False` to reject the request + * A coroutine object which returns one of the above + + """ + + return False # pragma: no cover + + def unix_connection_requested(self, dest_path: str) -> _NewUNIXSession: + """Handle a direct UNIX domain socket connection request + + This method is called when a direct UNIX domain socket connection + request is received by the server. Applications wishing to accept + such connections must override this method. + + To allow standard path forwarding of data on the connection to the + requested destination path, this method should return `True`. + + To reject this request, this method should return `False` + to send back a "Connection refused" response or raise an + :exc:`ChannelOpenError` exception with the reason for + the failure. + + If the application wishes to process the data on the + connection itself, this method should return either an + :class:`SSHUNIXSession` object which can be used to process the + data received on the channel or a tuple consisting of of an + :class:`SSHUNIXChannel` object created with + :meth:`create_unix_channel() + ` and an + :class:`SSHUNIXSession`, if the application wishes + to pass non-default arguments when creating the channel. + + If blocking operations need to be performed before the session + can be created, a coroutine which returns an + :class:`SSHUNIXSession` object can be returned instead of + the session itself. This can be either returned directly or as + a part of a tuple with an :class:`SSHUNIXChannel` object. + + By default, all connection requests are rejected. + + :param dest_path: + The path the client wishes to connect to + :type dest_path: `str` + + :returns: One of the following: + + * An :class:`SSHUNIXSession` object or a coroutine + which returns an :class:`SSHUNIXSession` + * A tuple consisting of an :class:`SSHUNIXChannel` + and the above + * A `callable` or coroutine handler function which + takes AsyncSSH stream objects for reading from + and writing to the connection + * A tuple consisting of an :class:`SSHUNIXChannel` + and the above + * `True` to request standard path forwarding + * `False` to refuse the connection + + :raises: :exc:`ChannelOpenError` if the connection shouldn't + be accepted + + """ + + return False # pragma: no cover + + def unix_server_requested(self, listen_path: str) -> \ + MaybeAwait[_NewListener]: + """Handle a request to listen on a UNIX domain socket + + This method is called when a client makes a request to + listen on a path for incoming UNIX domain socket connections. + Applications wishing to allow UNIX domain socket forwarding + must override this method. + + To set up standard path forwarding of connections received + on this path, this method should return `True`. + + If the application wishes to manage listening for incoming + connections itself, this method should return an + :class:`SSHListener` object that listens for new connections + and calls :meth:`create_unix_connection + ` on each of them to + forward them back to the client or return `None` if the + listener can't be set up. + + If blocking operations need to be performed to set up the + listener, a coroutine which returns an :class:`SSHListener` + can be returned instead of the listener itself. + + To reject this request, this method should return `False`. + + By default, this method rejects all server requests. + + :param listen_path: + The path the server should listen on + :type listen_path: `str` + + :returns: One of the following: + + * An :class:`SSHListener` object or a coroutine + which returns an :class:`SSHListener` or `False` + if the listener can't be opened + * `True` to set up standard path forwarding + * `False` to reject the request + + """ + + return False # pragma: no cover diff --git a/venv/lib/python3.12/site-packages/asyncssh/session.py b/venv/lib/python3.12/site-packages/asyncssh/session.py new file mode 100644 index 0000000..291c511 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/session.py @@ -0,0 +1,543 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH session handlers""" + +from typing import TYPE_CHECKING, Any, AnyStr, Callable, Generic +from typing import Mapping, Optional, Tuple, Union + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .channel import SSHClientChannel, SSHServerChannel + from .channel import SSHTCPChannel, SSHUNIXChannel + +DataType = Optional[int] + +TermModes = Mapping[int, int] +TermModesArg = Optional[TermModes] +TermSize = Tuple[int, int, int, int] +TermSizeArg = Union[None, Tuple[int, int], TermSize] + + +class SSHSession(Generic[AnyStr]): + """SSH session handler""" + + # pylint: disable=no-self-use,unused-argument + + def connection_made(self, chan: Any) -> None: + """Called when a channel is opened successfully""" + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Called when a channel is closed + + This method is called when a channel is closed. If the channel + is shut down cleanly, *exc* will be `None`. Otherwise, it + will be an exception explaining the reason for the channel close. + + :param exc: + The exception which caused the channel to close, or + `None` if the channel closed cleanly. + :type exc: :class:`Exception` + + """ + + def session_started(self) -> None: + """Called when the session is started + + This method is called when a session has started up. For + client and server sessions, this will be called once a + shell, exec, or subsystem request has been successfully + completed. For TCP and UNIX domain socket sessions, it will + be called immediately after the connection is opened. + + """ + + def data_received(self, data: AnyStr, datatype: DataType) -> None: + """Called when data is received on the channel + + This method is called when data is received on the channel. + If an encoding was specified when the channel was created, + the data will be delivered as a string after decoding with + the requested encoding. Otherwise, the data will be delivered + as bytes. + + :param data: + The data received on the channel + :param datatype: + The extended data type of the data, from :ref:`extended + data types ` + :type data: `str` or `bytes` + + """ + + def eof_received(self) -> bool: + """Called when EOF is received on the channel + + This method is called when an end-of-file indication is received + on the channel, after which no more data will be received. If this + method returns `True`, the channel remains half open and data + may still be sent. Otherwise, the channel is automatically closed + after this method returns. This is the default behavior for + classes derived directly from :class:`SSHSession`, but not when + using the higher-level streams API. Because input is buffered + in that case, streaming sessions enable half-open channels to + allow applications to respond to input read after an end-of-file + indication is received. + + """ + + return False # pragma: no cover + + def pause_writing(self) -> None: + """Called when the write buffer becomes full + + This method is called when the channel's write buffer becomes + full and no more data can be sent until the remote system + adjusts its window. While data can still be buffered locally, + applications may wish to stop producing new data until the + write buffer has drained. + + """ + + def resume_writing(self) -> None: + """Called when the write buffer has sufficiently drained + + This method is called when the channel's send window reopens + and enough data has drained from the write buffer to allow the + application to produce more data. + + """ + + +class SSHClientSession(SSHSession[AnyStr]): + """SSH client session handler + + Applications should subclass this when implementing an SSH client + session handler. The functions listed below should be implemented + to define application-specific behavior. In particular, the standard + `asyncio` protocol methods such as :meth:`connection_made`, + :meth:`connection_lost`, :meth:`data_received`, :meth:`eof_received`, + :meth:`pause_writing`, and :meth:`resume_writing` are all supported. + In addition, :meth:`session_started` is called as soon as the SSH + session is fully started, :meth:`xon_xoff_requested` can be used to + determine if the server wants the client to support XON/XOFF flow + control, and :meth:`exit_status_received` and + :meth:`exit_signal_received` can be used to receive session exit + information. + + """ + + # pylint: disable=no-self-use,unused-argument + + def connection_made(self, chan: 'SSHClientChannel[AnyStr]') -> None: + """Called when a channel is opened successfully + + This method is called when a channel is opened successfully. The + channel parameter should be stored if needed for later use. + + :param chan: + The channel which was successfully opened. + :type chan: :class:`SSHClientChannel` + + """ + + def xon_xoff_requested(self, client_can_do: bool) -> None: + """XON/XOFF flow control has been enabled or disabled + + This method is called to notify the client whether or not + to enable XON/XOFF flow control. If client_can_do is + `True` and output is being sent to an interactive + terminal the application should allow input of Control-S + and Control-Q to pause and resume output, respectively. + If client_can_do is `False`, Control-S and Control-Q + should be treated as normal input and passed through to + the server. Non-interactive applications can ignore this + request. + + By default, this message is ignored. + + :param client_can_do: + Whether or not to enable XON/XOFF flow control + :type client_can_do: `bool` + + """ + + def exit_status_received(self, status: int) -> None: + """A remote exit status has been received for this session + + This method is called when the shell, command, or subsystem + running on the server terminates and returns an exit status. + A zero exit status generally means that the operation was + successful. This call will generally be followed by a call + to :meth:`connection_lost`. + + By default, the exit status is ignored. + + :param status: + The exit status returned by the remote process + :type status: `int` + + """ + + def exit_signal_received(self, signal: str, core_dumped: bool, + msg: str, lang: str) -> None: + """A remote exit signal has been received for this session + + This method is called when the shell, command, or subsystem + running on the server terminates abnormally with a signal. + A more detailed error may also be provided, along with an + indication of whether the remote process dumped core. This call + will generally be followed by a call to :meth:`connection_lost`. + + By default, exit signals are ignored. + + :param signal: + The signal which caused the remote process to exit + :param core_dumped: + Whether or not the remote process dumped core + :param msg: + Details about what error occurred + :param lang: + The language the error message is in + :type signal: `str` + :type core_dumped: `bool` + :type msg: `str` + :type lang: `str` + + """ + + +class SSHServerSession(SSHSession[AnyStr]): + """SSH server session handler + + Applications should subclass this when implementing an SSH server + session handler. The functions listed below should be implemented + to define application-specific behavior. In particular, the + standard `asyncio` protocol methods such as :meth:`connection_made`, + :meth:`connection_lost`, :meth:`data_received`, :meth:`eof_received`, + :meth:`pause_writing`, and :meth:`resume_writing` are all supported. + In addition, :meth:`pty_requested` is called when the client requests a + pseudo-terminal, one of :meth:`shell_requested`, :meth:`exec_requested`, + or :meth:`subsystem_requested` is called depending on what type of + session the client wants to start, :meth:`session_started` is called + once the SSH session is fully started, :meth:`terminal_size_changed` is + called when the client's terminal size changes, :meth:`signal_received` + is called when the client sends a signal, and :meth:`break_received` + is called when the client sends a break. + + """ + + # pylint: disable=no-self-use,unused-argument + + def connection_made(self, chan: 'SSHServerChannel[AnyStr]') -> None: + """Called when a channel is opened successfully + + This method is called when a channel is opened successfully. The + channel parameter should be stored if needed for later use. + + :param chan: + The channel which was successfully opened. + :type chan: :class:`SSHServerChannel` + + """ + + def pty_requested(self, term_type: str, + term_size: Tuple[int, int, int, int], + term_modes: Mapping[int, int]) -> bool: + """A pseudo-terminal has been requested + + This method is called when the client sends a request to allocate + a pseudo-terminal with the requested terminal type, size, and + POSIX terminal modes. This method should return `True` if the + request for the pseudo-terminal is accepted. Otherwise, it should + return `False` to reject the request. + + By default, requests to allocate a pseudo-terminal are accepted + but nothing is done with the associated terminal information. + Applications wishing to use this information should implement + this method and have it return `True`, or call + :meth:`get_terminal_type() `, + :meth:`get_terminal_size() `, + or :meth:`get_terminal_mode() ` + on the :class:`SSHServerChannel` to get the information they need + after a shell, command, or subsystem is started. + + :param term_type: + Terminal type to set for this session + :param term_size: + Terminal size to set for this session provided as a + tuple of four `int` values: the width and height of the + terminal in characters followed by the width and height + of the terminal in pixels + :param term_modes: + POSIX terminal modes to set for this session, where keys + are taken from :ref:`POSIX terminal modes ` with + values defined in section 8 of :rfc:`RFC 4254 <4254#section-8>`. + :type term_type: `str` + :type term_size: tuple of 4 `int` values + :type term_modes: `dict` + + :returns: A `bool` indicating if the request for a + pseudo-terminal was allowed or not + + """ + + return True # pragma: no cover + + def terminal_size_changed(self, width: int, height: int, + pixwidth: int, pixheight: int) -> None: + """The terminal size has changed + + This method is called when a client requests a + pseudo-terminal and again whenever the the size of + he client's terminal window changes. + + By default, this information is ignored, but applications + wishing to use the terminal size can implement this method + to get notified whenever it changes. + + :param width: + The width of the terminal in characters + :param height: + The height of the terminal in characters + :param pixwidth: (optional) + The width of the terminal in pixels + :param pixheight: (optional) + The height of the terminal in pixels + :type width: `int` + :type height: `int` + :type pixwidth: `int` + :type pixheight: `int` + + """ + + def shell_requested(self) -> bool: + """The client has requested a shell + + This method should be implemented by the application to + perform whatever processing is required when a client makes + a request to open an interactive shell. It should return + `True` to accept the request, or `False` to reject it. + + If the application returns `True`, the :meth:`session_started` + method will be called once the channel is fully open. No output + should be sent until this method is called. + + By default this method returns `False` to reject all requests. + + :returns: A `bool` indicating if the shell request was + allowed or not + + """ + + return False # pragma: no cover + + def exec_requested(self, command: str) -> bool: + """The client has requested to execute a command + + This method should be implemented by the application to + perform whatever processing is required when a client makes + a request to execute a command. It should return `True` to + accept the request, or `False` to reject it. + + If the application returns `True`, the :meth:`session_started` + method will be called once the channel is fully open. No output + should be sent until this method is called. + + By default this method returns `False` to reject all requests. + + :param command: + The command the client has requested to execute + :type command: `str` + + :returns: A `bool` indicating if the exec request was + allowed or not + + """ + + return False # pragma: no cover + + def subsystem_requested(self, subsystem: str) -> bool: + """The client has requested to start a subsystem + + This method should be implemented by the application to + perform whatever processing is required when a client makes + a request to start a subsystem. It should return `True` to + accept the request, or `False` to reject it. + + If the application returns `True`, the :meth:`session_started` + method will be called once the channel is fully open. No output + should be sent until this method is called. + + By default this method returns `False` to reject all requests. + + :param subsystem: + The subsystem to start + :type subsystem: `str` + + :returns: A `bool` indicating if the request to open the + subsystem was allowed or not + + """ + + return False # pragma: no cover + + def break_received(self, msec: int) -> bool: + """The client has sent a break + + This method is called when the client requests that the + server perform a break operation on the terminal. If the + break is performed, this method should return `True`. + Otherwise, it should return `False`. + + By default, this method returns `False` indicating that + no break was performed. + + :param msec: + The duration of the break in milliseconds + :type msec: `int` + + :returns: A `bool` to indicate if the break operation was + performed or not + + """ + + return False # pragma: no cover + + def signal_received(self, signal: str) -> None: + """The client has sent a signal + + This method is called when the client delivers a signal + on the channel. + + By default, signals from the client are ignored. + + :param signal: + The name of the signal received + :type signal: `str` + + """ + + def soft_eof_received(self) -> None: + """The client has sent a soft EOF + + This method is called by the line editor when the client + send a soft EOF (Ctrl-D on an empty input line). + + By default, soft EOF will trigger an EOF to an outstanding + read call but still allow additional input to be received + from the client after that. + + """ + + +class SSHTCPSession(SSHSession[AnyStr]): + """SSH TCP session handler + + Applications should subclass this when implementing a handler for + SSH direct or forwarded TCP connections. + + SSH client applications wishing to open a direct connection should call + :meth:`create_connection() ` + on their :class:`SSHClientConnection`, passing in a factory which + returns instances of this class. + + Server applications wishing to allow direct connections should + implement the coroutine :meth:`connection_requested() + ` on their :class:`SSHServer` + object and have it return instances of this class. + + Server applications wishing to allow connection forwarding back + to the client should implement the coroutine :meth:`server_requested() + ` on their :class:`SSHServer` object + and call :meth:`create_connection() + ` on their + :class:`SSHServerConnection` for each new connection, passing it a + factory which returns instances of this class. + + When a connection is successfully opened, :meth:`session_started` + will be called, after which the application can begin sending data. + Received data will be passed to the :meth:`data_received` method. + + """ + + def connection_made(self, chan: 'SSHTCPChannel[AnyStr]') -> None: + """Called when a channel is opened successfully + + This method is called when a channel is opened successfully. The + channel parameter should be stored if needed for later use. + + :param chan: + The channel which was successfully opened. + :type chan: :class:`SSHTCPChannel` + + """ + + +class SSHUNIXSession(SSHSession[AnyStr]): + """SSH UNIX domain socket session handler + + Applications should subclass this when implementing a handler for + SSH direct or forwarded UNIX domain socket connections. + + SSH client applications wishing to open a direct connection should call + :meth:`create_unix_connection() + ` on their + :class:`SSHClientConnection`, passing in a factory which returns + instances of this class. + + Server applications wishing to allow direct connections should + implement the coroutine :meth:`unix_connection_requested() + ` on their :class:`SSHServer` + object and have it return instances of this class. + + Server applications wishing to allow connection forwarding back + to the client should implement the coroutine + :meth:`unix_server_requested() ` + on their :class:`SSHServer` object and call + :meth:`create_unix_connection() + ` on their + :class:`SSHServerConnection` for each new connection, passing it a + factory which returns instances of this class. + + When a connection is successfully opened, :meth:`session_started` + will be called, after which the application can begin sending data. + Received data will be passed to the :meth:`data_received` method. + + """ + + def connection_made(self, chan: 'SSHUNIXChannel[AnyStr]') -> None: + """Called when a channel is opened successfully + + This method is called when a channel is opened successfully. The + channel parameter should be stored if needed for later use. + + :param chan: + The channel which was successfully opened. + :type chan: :class:`SSHUNIXChannel` + + """ + + +SSHSessionFactory = Callable[[], SSHSession[AnyStr]] +SSHClientSessionFactory = Callable[[], SSHClientSession[AnyStr]] +SSHTCPSessionFactory = Callable[[], SSHTCPSession[AnyStr]] +SSHUNIXSessionFactory = Callable[[], SSHUNIXSession[AnyStr]] diff --git a/venv/lib/python3.12/site-packages/asyncssh/sftp.py b/venv/lib/python3.12/site-packages/asyncssh/sftp.py new file mode 100644 index 0000000..96f1e6a --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/sftp.py @@ -0,0 +1,7572 @@ +# Copyright (c) 2015-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation +# Jonathan Slenders - proposed changes to allow SFTP server callbacks +# to be coroutines + +"""SFTP handlers""" + +import asyncio +import errno +from fnmatch import fnmatch +import inspect +import os +from os import SEEK_SET, SEEK_CUR, SEEK_END +from pathlib import PurePath +import posixpath +import stat +import sys +import time +from types import TracebackType +from typing import TYPE_CHECKING, AnyStr, AsyncIterator, Awaitable, Callable +from typing import Dict, Generic, IO, Iterable, List, Mapping, Optional +from typing import Sequence, Set, Tuple, Type, TypeVar, Union, cast, overload +from typing_extensions import Literal, Protocol + +from . import constants +from .constants import DEFAULT_LANG + +from .constants import FXP_INIT, FXP_VERSION, FXP_OPEN, FXP_CLOSE, FXP_READ +from .constants import FXP_WRITE, FXP_LSTAT, FXP_FSTAT, FXP_SETSTAT +from .constants import FXP_FSETSTAT, FXP_OPENDIR, FXP_READDIR, FXP_REMOVE +from .constants import FXP_MKDIR, FXP_RMDIR, FXP_REALPATH, FXP_STAT, FXP_RENAME +from .constants import FXP_READLINK, FXP_SYMLINK, FXP_LINK, FXP_BLOCK +from .constants import FXP_UNBLOCK, FXP_STATUS, FXP_HANDLE, FXP_DATA +from .constants import FXP_NAME, FXP_ATTRS, FXP_EXTENDED, FXP_EXTENDED_REPLY + +from .constants import FXR_OVERWRITE + +from .constants import FXRP_NO_CHECK, FXRP_STAT_IF_EXISTS, FXRP_STAT_ALWAYS + +from .constants import FXF_READ, FXF_WRITE, FXF_APPEND +from .constants import FXF_CREAT, FXF_TRUNC, FXF_EXCL + +from .constants import FXF_ACCESS_DISPOSITION, FXF_CREATE_NEW +from .constants import FXF_CREATE_TRUNCATE, FXF_OPEN_EXISTING +from .constants import FXF_OPEN_OR_CREATE, FXF_TRUNCATE_EXISTING +from .constants import FXF_APPEND_DATA + +from .constants import ACE4_READ_DATA, ACE4_WRITE_DATA, ACE4_APPEND_DATA +from .constants import ACE4_READ_ATTRIBUTES, ACE4_WRITE_ATTRIBUTES + +from .constants import FILEXFER_ATTR_SIZE, FILEXFER_ATTR_UIDGID +from .constants import FILEXFER_ATTR_PERMISSIONS, FILEXFER_ATTR_ACMODTIME +from .constants import FILEXFER_ATTR_EXTENDED, FILEXFER_ATTR_DEFINED_V3 + +from .constants import FILEXFER_ATTR_ACCESSTIME, FILEXFER_ATTR_CREATETIME +from .constants import FILEXFER_ATTR_MODIFYTIME, FILEXFER_ATTR_ACL +from .constants import FILEXFER_ATTR_OWNERGROUP, FILEXFER_ATTR_SUBSECOND_TIMES +from .constants import FILEXFER_ATTR_DEFINED_V4 + +from .constants import FILEXFER_ATTR_BITS, FILEXFER_ATTR_DEFINED_V5 + +from .constants import FILEXFER_ATTR_ALLOCATION_SIZE, FILEXFER_ATTR_TEXT_HINT +from .constants import FILEXFER_ATTR_MIME_TYPE, FILEXFER_ATTR_LINK_COUNT +from .constants import FILEXFER_ATTR_UNTRANSLATED_NAME, FILEXFER_ATTR_CTIME +from .constants import FILEXFER_ATTR_DEFINED_V6 + +from .constants import FX_OK, FX_EOF, FX_NO_SUCH_FILE, FX_PERMISSION_DENIED +from .constants import FX_FAILURE, FX_BAD_MESSAGE, FX_NO_CONNECTION +from .constants import FX_CONNECTION_LOST, FX_OP_UNSUPPORTED, FX_V3_END +from .constants import FX_INVALID_HANDLE, FX_NO_SUCH_PATH +from .constants import FX_FILE_ALREADY_EXISTS, FX_WRITE_PROTECT, FX_NO_MEDIA +from .constants import FX_V4_END, FX_NO_SPACE_ON_FILESYSTEM, FX_QUOTA_EXCEEDED +from .constants import FX_UNKNOWN_PRINCIPAL, FX_LOCK_CONFLICT, FX_V5_END +from .constants import FX_DIR_NOT_EMPTY, FX_NOT_A_DIRECTORY +from .constants import FX_INVALID_FILENAME, FX_LINK_LOOP, FX_CANNOT_DELETE +from .constants import FX_INVALID_PARAMETER, FX_FILE_IS_A_DIRECTORY +from .constants import FX_BYTE_RANGE_LOCK_CONFLICT, FX_BYTE_RANGE_LOCK_REFUSED +from .constants import FX_DELETE_PENDING, FX_FILE_CORRUPT, FX_OWNER_INVALID +from .constants import FX_GROUP_INVALID, FX_NO_MATCHING_BYTE_RANGE_LOCK +from .constants import FX_V6_END + +from .constants import FILEXFER_TYPE_REGULAR, FILEXFER_TYPE_DIRECTORY +from .constants import FILEXFER_TYPE_SYMLINK, FILEXFER_TYPE_SPECIAL +from .constants import FILEXFER_TYPE_UNKNOWN, FILEXFER_TYPE_SOCKET +from .constants import FILEXFER_TYPE_CHAR_DEVICE, FILEXFER_TYPE_BLOCK_DEVICE +from .constants import FILEXFER_TYPE_FIFO + +from .logging import SSHLogger + +from .misc import BytesOrStr, Error, FilePath, MaybeAwait, OptExcInfo, Record +from .misc import ConnectionLost +from .misc import async_context_manager, get_symbol_names, hide_empty, plural + +from .packet import Boolean, Byte, String, UInt16, UInt32, UInt64 +from .packet import PacketDecodeError, SSHPacket, SSHPacketLogger + +from .version import __author__, __version__ + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .channel import SSHServerChannel + from .connection import SSHClientConnection, SSHServerConnection + from .stream import SSHReader, SSHWriter + + +if TYPE_CHECKING: + _RequestWaiter = asyncio.Future[Tuple[int, SSHPacket]] +else: + _RequestWaiter = asyncio.Future + +if sys.platform == 'win32': # pragma: no cover + _LocalPath = str +else: + _LocalPath = bytes + +_SFTPFileObj = IO[bytes] +_SFTPPath = Union[bytes, FilePath] +_SFTPPaths = Union[_SFTPPath, Sequence[_SFTPPath]] +_SFTPPatList = List[Union[bytes, List[bytes]]] +_SFTPStatFunc = Callable[[_SFTPPath], Awaitable['SFTPAttrs']] + +_SFTPNames = Tuple[Sequence['SFTPName'], bool] +_SFTPOSAttrs = Union[os.stat_result, 'SFTPAttrs'] +_SFTPOSVFSAttrs = Union[os.statvfs_result, 'SFTPVFSAttrs'] + +_SFTPOnErrorHandler = Optional[Callable[[Callable, bytes, OptExcInfo], None]] +_SFTPPacketHandler = Optional[Callable[['SFTPServerHandler', SSHPacket], + Awaitable[object]]] + +SFTPErrorHandler = Union[None, Literal[False], Callable[[Exception], None]] +SFTPProgressHandler = Optional[Callable[[bytes, bytes, int, int], None]] + + +MIN_SFTP_VERSION = 3 +MAX_SFTP_VERSION = 6 + +SFTP_BLOCK_SIZE = 16384 +_MAX_SFTP_READ_SIZE = 4*1024*1024 # 4 MiB + +_MAX_SFTP_REQUESTS = 128 +_MAX_READDIR_NAMES = 128 + +_NSECS_IN_SEC = 1_000_000_000 + +_T = TypeVar('_T') + + +_const_dict: Mapping[str, int] = constants.__dict__ + +_valid_attr_flags = { + 3: FILEXFER_ATTR_DEFINED_V3, + 4: FILEXFER_ATTR_DEFINED_V4, + 5: FILEXFER_ATTR_DEFINED_V5, + 6: FILEXFER_ATTR_DEFINED_V6 +} + +_open_modes = { + 'r': FXF_READ, + 'w': FXF_WRITE | FXF_CREAT | FXF_TRUNC, + 'a': FXF_WRITE | FXF_CREAT | FXF_APPEND, + 'x': FXF_WRITE | FXF_CREAT | FXF_EXCL, + + 'r+': FXF_READ | FXF_WRITE, + 'w+': FXF_READ | FXF_WRITE | FXF_CREAT | FXF_TRUNC, + 'a+': FXF_READ | FXF_WRITE | FXF_CREAT | FXF_APPEND, + 'x+': FXF_READ | FXF_WRITE | FXF_CREAT | FXF_EXCL +} + +_file_types = {k: v.lower() for k, v in + get_symbol_names(_const_dict, 'FILEXFER_TYPE_', 14).items()} + + +class _SupportsEncode(Protocol): + """Protocol for applying encoding to path names""" + + def encode(self, sftp_version: int) -> bytes: + """Encode result as bytes in an SSH packet""" + + +class _SFTPGlobProtocol(Protocol): + """Protocol for getting files to perform glob matching against""" + + async def stat(self, path: bytes) -> 'SFTPAttrs': + """Get attributes of a file""" + + def scandir(self, path: bytes) -> AsyncIterator['SFTPName']: + """Return names and attributes of the files in a directory""" + + +class SFTPFileProtocol(Protocol): + """Protocol for accessing a file via an SFTP server""" + + async def __aenter__(self) -> 'SFTPFileProtocol': + """Allow SFTPFileProtocol to be used as an async context manager""" + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> bool: + """Wait for file close when used as an async context manager""" + + async def read(self, size: int, offset: int) -> bytes: + """Read data from the local file""" + + async def write(self, data: bytes, offset: int) -> int: + """Write data to the local file""" + + async def close(self) -> None: + """Close the local file""" + + +class _SFTPFSProtocol(Protocol): + """Protocol for accessing a filesystem via an SFTP server""" + + @staticmethod + def basename(path: bytes) -> bytes: + """Return the final component of a POSIX-style path""" + + def encode(self, path: _SFTPPath) -> bytes: + """Encode path name using configured path encoding""" + + def compose_path(self, path: bytes, + parent: Optional[bytes] = None) -> bytes: + """Compose a path""" + + async def stat(self, path: bytes) -> 'SFTPAttrs': + """Get attributes of a file or directory, following symlinks""" + + async def setstat(self, path: bytes, attrs: 'SFTPAttrs') -> None: + """Set attributes of a file or directory""" + + async def isdir(self, path: bytes) -> bool: + """Return if the path refers to a directory""" + + def scandir(self, path: bytes) -> AsyncIterator['SFTPName']: + """Return names and attributes of the files in a directory""" + + async def mkdir(self, path: bytes) -> None: + """Create a directory""" + + async def readlink(self, path: bytes) -> bytes: + """Return the target of a symbolic link""" + + async def symlink(self, oldpath: bytes, newpath: bytes) -> None: + """Create a symbolic link""" + + @async_context_manager + async def open(self, path: bytes, mode: str) -> SFTPFileProtocol: + """Open a file""" + + +def _parse_acl_supported(data: bytes) -> int: + """Parse an SFTPv6 "acl-supported" extension""" + + packet = SSHPacket(data) + capabilities = packet.get_uint32() + packet.check_end() + + return capabilities + + +def _parse_supported(data: bytes) -> \ + Tuple[int, int, int, int, int, Sequence[bytes]]: + """Parse an SFTPv5 "supported" extension""" + + packet = SSHPacket(data) + attr_mask = packet.get_uint32() + attrib_mask = packet.get_uint32() + open_flags = packet.get_uint32() + access_mask = packet.get_uint32() + max_read_size = packet.get_uint32() + + ext_names: List[bytes] = [] + + while packet: + name = packet.get_string() + ext_names.append(name) + + return (attr_mask, attrib_mask, open_flags, access_mask, + max_read_size, ext_names) + + +def _parse_supported2(data: bytes) -> Tuple[int, int, int, int, int, int, int, + Sequence[bytes], Sequence[bytes]]: + """Parse an SFTPv6 "supported2" extension""" + + packet = SSHPacket(data) + attr_mask = packet.get_uint32() + attrib_mask = packet.get_uint32() + open_flags = packet.get_uint32() + access_mask = packet.get_uint32() + max_read_size = packet.get_uint32() + open_block_vector = packet.get_uint16() + block_vector = packet.get_uint16() + + attrib_ext_count = packet.get_uint32() + attrib_ext_names: List[bytes] = [] + + for _ in range(attrib_ext_count): + attrib_ext_names.append(packet.get_string()) + + ext_count = packet.get_uint32() + ext_names: List[bytes] = [] + + for _ in range(ext_count): + ext_names.append(packet.get_string()) + + packet.check_end() + + return (attr_mask, attrib_mask, open_flags, access_mask, + max_read_size, open_block_vector, block_vector, + attrib_ext_names, ext_names) + + +def _parse_vendor_id(data: bytes) -> Tuple[str, str, str, int]: + """Parse a "vendor-id" extension""" + + packet = SSHPacket(data) + + vendor_name = packet.get_string().decode('utf-8', 'backslashreplace') + product_name = packet.get_string().decode('utf-8', 'backslashreplace') + product_version = packet.get_string().decode('utf-8', 'backslashreplace') + product_build = packet.get_uint64() + + return vendor_name, product_name, product_version, product_build + + +def _stat_mode_to_filetype(mode: int) -> int: + """Convert stat mode/permissions to file type""" + + if stat.S_ISREG(mode): + filetype = FILEXFER_TYPE_REGULAR + elif stat.S_ISDIR(mode): + filetype = FILEXFER_TYPE_DIRECTORY + elif stat.S_ISLNK(mode): + filetype = FILEXFER_TYPE_SYMLINK + elif stat.S_ISSOCK(mode): + filetype = FILEXFER_TYPE_SOCKET + elif stat.S_ISCHR(mode): + filetype = FILEXFER_TYPE_CHAR_DEVICE + elif stat.S_ISBLK(mode): + filetype = FILEXFER_TYPE_BLOCK_DEVICE + elif stat.S_ISFIFO(mode): + filetype = FILEXFER_TYPE_FIFO + elif stat.S_IFMT(mode) != 0: + filetype = FILEXFER_TYPE_SPECIAL + else: + filetype = FILEXFER_TYPE_UNKNOWN + + return filetype + + +def _nsec_to_tuple(nsec: int) -> Tuple[int, int]: + """Convert nanoseconds since epoch to seconds & remainder""" + + return divmod(nsec, _NSECS_IN_SEC) + + +def _float_sec_to_tuple(sec: float) -> Tuple[int, int]: + """Convert float seconds since epoch to seconds & remainder""" + + return (int(sec), int((sec % 1) * _NSECS_IN_SEC)) + + +def _tuple_to_float_sec(sec: int, nsec: Optional[int]) -> float: + """Convert seconds and remainder to float seconds since epoch""" + + return sec + float(nsec or 0) / _NSECS_IN_SEC + + +def _tuple_to_nsec(sec: int, nsec: Optional[int]) -> int: + """Convert seconds and remainder to nanoseconds since epoch""" + + return sec * _NSECS_IN_SEC + (nsec or 0) + + +def _utime_to_attrs(times: Optional[Tuple[float, float]] = None, + ns: Optional[Tuple[int, int]] = None) -> 'SFTPAttrs': + """Convert utime arguments to SFTPAttrs""" + + if ns: + atime, atime_ns = _nsec_to_tuple(ns[0]) + mtime, mtime_ns = _nsec_to_tuple(ns[1]) + elif times: + atime, atime_ns = _float_sec_to_tuple(times[0]) + mtime, mtime_ns = _float_sec_to_tuple(times[1]) + else: + if hasattr(time, 'time_ns'): + atime, atime_ns = _nsec_to_tuple(time.time_ns()) + else: # pragma: no cover + atime, atime_ns = _float_sec_to_tuple(time.time()) + + mtime, mtime_ns = atime, atime_ns + + return SFTPAttrs(atime=atime, atime_ns=atime_ns, + mtime=mtime, mtime_ns=mtime_ns) + + +def _lookup_uid(user: Optional[str]) -> Optional[int]: + """Return the uid associated with a user name""" + + if user is not None: + try: + # pylint: disable=import-outside-toplevel + import pwd + uid = pwd.getpwnam(user).pw_uid + except (ImportError, KeyError): + try: + uid = int(user) + except ValueError: + raise SFTPOwnerInvalid('Invalid owner: %s' % user) from None + else: + uid = None + + return uid + + +def _lookup_gid(group: Optional[str]) -> Optional[int]: + """Return the gid associated with a group name""" + + if group is not None: + try: + # pylint: disable=import-outside-toplevel + import grp + gid = grp.getgrnam(group).gr_gid + except (ImportError, KeyError): + try: + gid = int(group) + except ValueError: + raise SFTPGroupInvalid('Invalid group: %s' % group) from None + else: + gid = None + + return gid + + +def _lookup_user(uid: Optional[int]) -> str: + """Return the user name associated with a uid""" + + if uid is not None: + try: + # pylint: disable=import-outside-toplevel + import pwd + user = pwd.getpwuid(uid).pw_name + except (ImportError, KeyError): + user = str(uid) + else: + user = '' + + return user + + +def _lookup_group(gid: Optional[int]) -> str: + """Return the group name associated with a gid""" + + if gid is not None: + try: + # pylint: disable=import-outside-toplevel + import grp + group = grp.getgrgid(gid).gr_name + except (ImportError, KeyError): + group = str(gid) + else: + group = '' + + return group + + +def _mode_to_pflags(mode: str) -> Tuple[int, bool]: + """Convert open mode to SFTP open flags""" + + if 'b' in mode: + mode = mode.replace('b', '') + binary = True + else: + binary = False + + pflags = _open_modes.get(mode) + + if not pflags: + raise ValueError('Invalid mode: %r' % mode) + + return pflags, binary + + +def _pflags_to_flags(pflags: int) -> Tuple[int, int]: + """Convert SFTPv3 pflags to SFTPv5 desired-access and flags""" + + desired_access = 0 + flags = 0 + + if pflags & (FXF_CREAT | FXF_EXCL) == (FXF_CREAT | FXF_EXCL): + flags = FXF_CREATE_NEW + elif pflags & (FXF_CREAT | FXF_TRUNC) == (FXF_CREAT | FXF_TRUNC): + flags = FXF_CREATE_TRUNCATE + elif pflags & FXF_CREAT: + flags = FXF_OPEN_OR_CREATE + elif pflags & FXF_TRUNC: + flags = FXF_TRUNCATE_EXISTING + else: + flags = FXF_OPEN_EXISTING + + if pflags & FXF_READ: + desired_access |= ACE4_READ_DATA | ACE4_READ_ATTRIBUTES + + if pflags & FXF_WRITE: + desired_access |= ACE4_WRITE_DATA | ACE4_WRITE_ATTRIBUTES + + if pflags & FXF_APPEND: + desired_access |= ACE4_APPEND_DATA + flags |= FXF_APPEND_DATA + + return desired_access, flags + + +def _from_local_path(path: _SFTPPath) -> bytes: + """Convert local path to SFTP path""" + + path = os.fsencode(path) + + if sys.platform == 'win32': # pragma: no cover + path = path.replace(b'\\', b'/') + + if path[:1] != b'/' and path[1:2] == b':': + path = b'/' + path + + return path + + +def _to_local_path(path: bytes) -> _LocalPath: + """Convert SFTP path to local path""" + + if sys.platform == 'win32': # pragma: no cover + path = os.fsdecode(path) + + if path[:1] == '/' and path[2:3] == ':': + path = path[1:] + + path = path.replace('/', '\\') + else: + path = os.fsencode(path) + + return path + + +def _setstat(path: Union[int, _SFTPPath], attrs: 'SFTPAttrs') -> None: + """Utility function to set file attributes""" + + if attrs.size is not None: + os.truncate(path, attrs.size) + + uid = _lookup_uid(attrs.owner) if attrs.uid is None else attrs.uid + gid = _lookup_gid(attrs.group) if attrs.gid is None else attrs.gid + + atime_ns = _tuple_to_nsec(attrs.atime, attrs.atime_ns) \ + if attrs.atime is not None else None + + mtime_ns = _tuple_to_nsec(attrs.mtime, attrs.mtime_ns) \ + if attrs.mtime is not None else None + + if ((atime_ns is None and mtime_ns is not None) or + (atime_ns is not None and mtime_ns is None)): + stat_result = os.stat(path) + + if atime_ns is None and mtime_ns is not None: + atime_ns = stat_result.st_atime_ns + + if atime_ns is not None and mtime_ns is None: + mtime_ns = stat_result.st_mtime_ns + + if uid is not None and gid is not None: + try: + os.chown(path, uid, gid) + except AttributeError: # pragma: no cover + raise NotImplementedError from None + + if attrs.permissions is not None: + os.chmod(path, stat.S_IMODE(attrs.permissions)) + + if atime_ns is not None and mtime_ns is not None: + os.utime(path, ns=(atime_ns, mtime_ns)) + + +class _SFTPParallelIO(Generic[_T]): + """Parallelize I/O requests on files + + This class issues parallel read and write requests on files. + + """ + + def __init__(self, block_size: int, max_requests: int, + offset: int, size: int): + self._block_size = block_size + self._max_requests = max_requests + self._offset = offset + self._bytes_left = size + self._pending: Set['asyncio.Task[Tuple[int, int, int, _T]]'] = set() + + async def _start_task(self, offset: int, size: int) -> \ + Tuple[int, int, int, _T]: + """Start a task to perform file I/O on a particular byte range""" + + count, result = await self.run_task(offset, size) + return offset, size, count, result + + def _start_tasks(self) -> None: + """Create parallel file I/O tasks""" + + while self._bytes_left and len(self._pending) < self._max_requests: + size = min(self._bytes_left, self._block_size) + + task = asyncio.ensure_future(self._start_task(self._offset, size)) + self._pending.add(task) + + self._offset += size + self._bytes_left -= size + + async def run_task(self, offset: int, size: int) -> Tuple[int, _T]: + """Perform file I/O on a particular byte range""" + + raise NotImplementedError + + async def iter(self) -> AsyncIterator[Tuple[int, _T]]: + """Perform file I/O and return async iterator of results""" + + self._start_tasks() + + while self._pending: + done, self._pending = await asyncio.wait( + self._pending, return_when=asyncio.FIRST_COMPLETED) + + exceptions = [] + + for task in done: + try: + offset, size, count, result = task.result() + yield offset, result + + if count < size: + self._pending.add(asyncio.ensure_future( + self._start_task(offset+count, size-count))) + except SFTPEOFError: + pass + except (OSError, SFTPError) as exc: + exceptions.append(exc) + + if exceptions: + for task in self._pending: + task.cancel() + + raise exceptions[0] + + self._start_tasks() + + +class _SFTPFileReader(_SFTPParallelIO[bytes]): + """Parallelized SFTP file reader""" + + def __init__(self, block_size: int, max_requests: int, + handler: 'SFTPClientHandler', handle: bytes, + offset: int, size: int): + super().__init__(block_size, max_requests, offset, size) + + self._handler = handler + self._handle = handle + self._start = offset + + async def run_task(self, offset: int, size: int) -> Tuple[int, bytes]: + """Read a block of the file""" + + data, _ = await self._handler.read(self._handle, offset, size) + + return len(data), data + + async def run(self) -> bytes: + """Reassemble and return data from parallel reads""" + + result = bytearray() + + async for offset, data in self.iter(): + pos = offset - self._start + pad = pos - len(result) + + if pad > 0: + result += pad * b'\0' + + result[pos:pos+len(data)] = data + + return bytes(result) + + +class _SFTPFileWriter(_SFTPParallelIO[int]): + """Parallelized SFTP file writer""" + + def __init__(self, block_size: int, max_requests: int, + handler: 'SFTPClientHandler', handle: bytes, + offset: int, data: bytes): + super().__init__(block_size, max_requests, offset, len(data)) + + self._handler = handler + self._handle = handle + self._start = offset + self._data = data + + async def run_task(self, offset: int, size: int) -> Tuple[int, int]: + """Write a block to the file""" + + pos = offset - self._start + await self._handler.write(self._handle, offset, + self._data[pos:pos+size]) + return size, size + + async def run(self): + """Perform parallel writes""" + + async for _ in self.iter(): + pass + +class _SFTPFileCopier(_SFTPParallelIO[int]): + """SFTP file copier + + This class parforms an SFTP file copy, initiating multiple + read and write requests to copy chunks of the file in parallel. + + """ + + def __init__(self, block_size: int, max_requests: int, offset: int, + total_bytes: int, srcfs: _SFTPFSProtocol, + dstfs: _SFTPFSProtocol, srcpath: bytes, dstpath: bytes, + progress_handler: SFTPProgressHandler): + super().__init__(block_size, max_requests, offset, total_bytes) + + self._srcfs = srcfs + self._dstfs = dstfs + + self._srcpath = srcpath + self._dstpath = dstpath + + self._src: Optional[SFTPFileProtocol] = None + self._dst: Optional[SFTPFileProtocol] = None + + self._bytes_copied = 0 + self._total_bytes = total_bytes + self._progress_handler = progress_handler + + async def run_task(self, offset: int, size: int) -> Tuple[int, int]: + """Copy a block of the source file""" + + assert self._src is not None + assert self._dst is not None + + data = await self._src.read(size, offset) + await self._dst.write(data, offset) + datalen = len(data) + + return datalen, datalen + + async def run(self) -> None: + """Perform parallel file copy""" + + try: + self._src = await self._srcfs.open(self._srcpath, 'rb') + self._dst = await self._dstfs.open(self._dstpath, 'wb') + + if self._progress_handler and self._total_bytes == 0: + self._progress_handler(self._srcpath, self._dstpath, 0, 0) + + async for offset, datalen in self.iter(): + if not datalen: + exc = SFTPFailure('Unexpected EOF during file copy') + + setattr(exc, 'filename', self._srcpath) + setattr(exc, 'offset', offset) + + raise exc + + if self._progress_handler: + self._bytes_copied += datalen + self._progress_handler(self._srcpath, self._dstpath, + self._bytes_copied, + self._total_bytes) + finally: + if self._src: # pragma: no branch + await self._src.close() + + if self._dst: # pragma: no branch + await self._dst.close() + + +class SFTPError(Error): + """SFTP error + + This exception is raised when an error occurs while processing + an SFTP request. Exception codes should be taken from + :ref:`SFTP error codes `. + + :param code: + Disconnect reason, taken from :ref:`disconnect reason + codes ` + :param reason: + A human-readable reason for the disconnect + :param lang: (optional) + The language the reason is in + :type code: `int` + :type reason: `str` + :type lang: `str` + + """ + + @staticmethod + def construct(packet: SSHPacket) -> Optional['SFTPError']: + """Construct an SFTPError from an FXP_STATUS response""" + + code = packet.get_uint32() + + if packet: + try: + reason = packet.get_string().decode('utf-8') + lang = packet.get_string().decode('ascii') + except UnicodeDecodeError: + raise SFTPBadMessage('Invalid status message') from None + else: + # Some servers may not always send reason and lang (usually + # when responding with FX_OK). Tolerate this, automatically + # filling in empty strings for them if they're not present. + + reason = '' + lang = '' + + if code == FX_OK: + return None + else: + try: + exc = _sftp_error_map[code](reason, lang) + except KeyError: + exc = SFTPError(code, '%s (error %d)' % (reason, code), lang) + + exc.decode(packet) + return exc + + def encode(self, version: int) -> bytes: + """Encode an SFTPError as bytes in an SSHPacket""" + + if self.code == FX_NOT_A_DIRECTORY and version < 6: + code = FX_NO_SUCH_FILE + elif (self.code <= FX_V6_END and + ((self.code > FX_V3_END and version <= 3) or + (self.code > FX_V4_END and version <= 4) or + (self.code > FX_V5_END and version <= 5))): + code = FX_FAILURE + else: + code = self.code + + return UInt32(code) + String(self.reason) + String(self.lang) + + def decode(self, packet: SSHPacket) -> None: + """Decode error-specific data""" + + # pylint: disable=no-self-use + + # By default, expect no error-specific data + + +class SFTPEOFError(SFTPError): + """SFTP EOF error + + This exception is raised when end of file is reached when + reading a file or directory. + + :param reason: (optional) + Details about the EOF + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str = '', lang: str = DEFAULT_LANG): + super().__init__(FX_EOF, reason, lang) + + +class SFTPNoSuchFile(SFTPError): + """SFTP no such file + + This exception is raised when the requested file is not found. + + :param reason: + Details about the missing file + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_NO_SUCH_FILE, reason, lang) + + +class SFTPPermissionDenied(SFTPError): + """SFTP permission denied + + This exception is raised when the permissions are not available + to perform the requested operation. + + :param reason: + Details about the invalid permissions + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_PERMISSION_DENIED, reason, lang) + + +class SFTPFailure(SFTPError): + """SFTP failure + + This exception is raised when an unexpected SFTP failure occurs. + + :param reason: + Details about the failure + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_FAILURE, reason, lang) + + +class SFTPBadMessage(SFTPError): + """SFTP bad message + + This exception is raised when an invalid SFTP message is + received. + + :param reason: + Details about the invalid message + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_BAD_MESSAGE, reason, lang) + + +class SFTPNoConnection(SFTPError): + """SFTP no connection + + This exception is raised when an SFTP request is made on a + closed SSH connection. + + :param reason: + Details about the closed connection + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_NO_CONNECTION, reason, lang) + + +class SFTPConnectionLost(SFTPError): + """SFTP connection lost + + This exception is raised when the SSH connection is lost or + closed while making an SFTP request. + + :param reason: + Details about the connection failure + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_CONNECTION_LOST, reason, lang) + + +class SFTPOpUnsupported(SFTPError): + """SFTP operation unsupported + + This exception is raised when the requested SFTP operation + is not supported. + + :param reason: + Details about the unsupported operation + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_OP_UNSUPPORTED, reason, lang) + + +class SFTPInvalidHandle(SFTPError): + """SFTP invalid handle (SFTPv4+) + + This exception is raised when the handle provided is invalid. + + :param reason: + Details about the invalid handle + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_INVALID_HANDLE, reason, lang) + + +class SFTPNoSuchPath(SFTPError): + """SFTP no such path (SFTPv4+) + + This exception is raised when the requested path is not found. + + :param reason: + Details about the missing path + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_NO_SUCH_PATH, reason, lang) + + +class SFTPFileAlreadyExists(SFTPError): + """SFTP file already exists (SFTPv4+) + + This exception is raised when the requested file already exists. + + :param reason: + Details about the existing file + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_FILE_ALREADY_EXISTS, reason, lang) + + +class SFTPWriteProtect(SFTPError): + """SFTP write protect (SFTPv4+) + + This exception is raised when a write is attempted to a file + on read-only or write protected media. + + :param reason: + Details about the requested file + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_WRITE_PROTECT, reason, lang) + + +class SFTPNoMedia(SFTPError): + """SFTP no media (SFTPv4+) + + This exception is raised when there is no media in the + requested drive. + + :param reason: + Details about the requested drive + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_NO_MEDIA, reason, lang) + + +class SFTPNoSpaceOnFilesystem(SFTPError): + """SFTP no space on filesystem (SFTPv5+) + + This exception is raised when there is no space available + on the filesystem a file is being written to. + + :param reason: + Details about the filesystem which has filled up + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_NO_SPACE_ON_FILESYSTEM, reason, lang) + + +class SFTPQuotaExceeded(SFTPError): + """SFTP quota exceeded (SFTPv5+) + + This exception is raised when the user's storage quota + is exceeded. + + :param reason: + Details about the exceeded quota + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_QUOTA_EXCEEDED, reason, lang) + + +class SFTPUnknownPrincipal(SFTPError): + """SFTP unknown principal (SFTPv5+) + + This exception is raised when a file owner or group is + not reocgnized. + + :param reason: + Details about the unknown principal + :param lang: (optional) + The language the reason is in + :param unknown_names: (optional) + A list of unknown principal names + :type reason: `str` + :type lang: `str` + :type unknown_names: list of `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG, + unknown_names: Sequence[str] = ()): + super().__init__(FX_UNKNOWN_PRINCIPAL, reason, lang) + self.unknown_names = unknown_names + + def encode(self, version: int) -> bytes: + """Encode an SFTPUnknownPrincipal as bytes in an SSHPacket""" + + return super().encode(version) + \ + b''.join(String(name) for name in self.unknown_names) + + def decode(self, packet: SSHPacket) -> None: + """Decode error-specific data""" + + self.unknown_names = [] + + try: + while packet: + self.unknown_names.append( + packet.get_string().decode('utf-8')) + except UnicodeDecodeError: + raise SFTPBadMessage('Invalid status message') from None + + +class SFTPLockConflict(SFTPError): + """SFTP lock conflict (SFTPv5+) + + This exception is raised when a requested lock is held by + another process. + + :param reason: + Details about the conflicting lock + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_LOCK_CONFLICT, reason, lang) + + +class SFTPDirNotEmpty(SFTPError): + """SFTP directory not empty (SFTPv6+) + + This exception is raised when a directory is not empty. + + :param reason: + Details about the non-empty directory + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_DIR_NOT_EMPTY, reason, lang) + + +class SFTPNotADirectory(SFTPError): + """SFTP not a directory (SFTPv6+) + + This exception is raised when a specified file is + not a directory where one was expected. + + :param reason: + Details about the file expected to be a directory + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_NOT_A_DIRECTORY, reason, lang) + + +class SFTPInvalidFilename(SFTPError): + """SFTP invalid filename (SFTPv6+) + + This exception is raised when a filename is not valid. + + :param reason: + Details about the invalid filename + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_INVALID_FILENAME, reason, lang) + + +class SFTPLinkLoop(SFTPError): + """SFTP link loop (SFTPv6+) + + This exception is raised when a symbolic link loop is detected. + + :param reason: + Details about the link loop + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_LINK_LOOP, reason, lang) + + +class SFTPCannotDelete(SFTPError): + """SFTP cannot delete (SFTPv6+) + + This exception is raised when a file cannot be deleted. + + :param reason: + Details about the undeletable file + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_CANNOT_DELETE, reason, lang) + + +class SFTPInvalidParameter(SFTPError): + """SFTP invalid parameter (SFTPv6+) + + This exception is raised when parameters in a request are + out of range or incompatible with one another. + + :param reason: + Details about the invalid parameter + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_INVALID_PARAMETER, reason, lang) + + +class SFTPFileIsADirectory(SFTPError): + """SFTP file is a directory (SFTPv6+) + + This exception is raised when a specified file is a + directory where one isn't allowed. + + :param reason: + Details about the unexpected directory + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_FILE_IS_A_DIRECTORY, reason, lang) + + +class SFTPByteRangeLockConflict(SFTPError): + """SFTP byte range lock conflict (SFTPv6+) + + This exception is raised when a read or write request overlaps + a byte range lock held by another process. + + :param reason: + Details about the conflicting byte range lock + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_BYTE_RANGE_LOCK_CONFLICT, reason, lang) + + +class SFTPByteRangeLockRefused(SFTPError): + """SFTP byte range lock refused (SFTPv6+) + + This exception is raised when a request for a byte range + lock was refused. + + :param reason: + Details about the refused byte range lock + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_BYTE_RANGE_LOCK_REFUSED, reason, lang) + + +class SFTPDeletePending(SFTPError): + """SFTP delete pending (SFTPv6+) + + This exception is raised when an operation was attempted + on a file for which a delete operation is pending. + another process. + + :param reason: + Details about the file being deleted + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_DELETE_PENDING, reason, lang) + + +class SFTPFileCorrupt(SFTPError): + """SFTP file corrupt (SFTPv6+) + + This exception is raised when filesystem corruption is detected. + + :param reason: + Details about the corrupted filesystem + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_FILE_CORRUPT, reason, lang) + + +class SFTPOwnerInvalid(SFTPError): + """SFTP owner invalid (SFTPv6+) + + This exception is raised when a principal cannot be assigned + as the owner of a file. + + :param reason: + Details about the principal being set as a file's owner + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_OWNER_INVALID, reason, lang) + + +class SFTPGroupInvalid(SFTPError): + """SFTP group invalid (SFTPv6+) + + This exception is raised when a principal cannot be assigned + as the primary group of a file. + + :param reason: + Details about the principal being set as a file's group + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_GROUP_INVALID, reason, lang) + + +class SFTPNoMatchingByteRangeLock(SFTPError): + """SFTP no matching byte range lock (SFTPv6+) + + This exception is raised when an unlock is requested for a + byte range lock which is not currently held. + + :param reason: + Details about the byte range lock being released + :param lang: (optional) + The language the reason is in + :type reason: `str` + :type lang: `str` + + """ + + def __init__(self, reason: str, lang: str = DEFAULT_LANG): + super().__init__(FX_NO_MATCHING_BYTE_RANGE_LOCK, reason, lang) + + +_sftp_error_map: Dict[int, Callable[[str, str], SFTPError]] = { + FX_EOF: SFTPEOFError, + FX_NO_SUCH_FILE: SFTPNoSuchFile, + FX_PERMISSION_DENIED: SFTPPermissionDenied, + FX_FAILURE: SFTPFailure, + FX_BAD_MESSAGE: SFTPBadMessage, + FX_NO_CONNECTION: SFTPNoConnection, + FX_CONNECTION_LOST: SFTPConnectionLost, + FX_OP_UNSUPPORTED: SFTPOpUnsupported, + FX_INVALID_HANDLE: SFTPInvalidHandle, + FX_NO_SUCH_PATH: SFTPNoSuchPath, + FX_FILE_ALREADY_EXISTS: SFTPFileAlreadyExists, + FX_WRITE_PROTECT: SFTPWriteProtect, + FX_NO_MEDIA: SFTPNoMedia, + FX_NO_SPACE_ON_FILESYSTEM: SFTPNoSpaceOnFilesystem, + FX_QUOTA_EXCEEDED: SFTPQuotaExceeded, + FX_UNKNOWN_PRINCIPAL: SFTPUnknownPrincipal, + FX_LOCK_CONFLICT: SFTPLockConflict, + FX_DIR_NOT_EMPTY: SFTPDirNotEmpty, + FX_NOT_A_DIRECTORY: SFTPNotADirectory, + FX_INVALID_FILENAME: SFTPInvalidFilename, + FX_LINK_LOOP: SFTPLinkLoop, + FX_CANNOT_DELETE: SFTPCannotDelete, + FX_INVALID_PARAMETER: SFTPInvalidParameter, + FX_FILE_IS_A_DIRECTORY: SFTPFileIsADirectory, + FX_BYTE_RANGE_LOCK_CONFLICT: SFTPByteRangeLockConflict, + FX_BYTE_RANGE_LOCK_REFUSED: SFTPByteRangeLockRefused, + FX_DELETE_PENDING: SFTPDeletePending, + FX_FILE_CORRUPT: SFTPFileCorrupt, + FX_OWNER_INVALID: SFTPOwnerInvalid, + FX_GROUP_INVALID: SFTPGroupInvalid, + FX_NO_MATCHING_BYTE_RANGE_LOCK: SFTPNoMatchingByteRangeLock +} + + +class SFTPAttrs(Record): + """SFTP file attributes + + SFTPAttrs is a simple record class with the following fields: + + ============ ================================================= ====== + Field Description Type + ============ ================================================= ====== + type File type (SFTPv4+) byte + size File size in bytes uint64 + alloc_size Allocation file size in bytes (SFTPv6+) uint64 + uid User id of file owner uint32 + gid Group id of file owner uint32 + owner User name of file owner (SFTPv4+) string + group Group name of file owner (SFTPv4+) string + permissions Bit mask of POSIX file permissions uint32 + atime Last access time, UNIX epoch seconds uint64 + atime_ns Last access time, nanoseconds (SFTPv4+) uint32 + crtime Creation time, UNIX epoch seconds (SFTPv4+) uint64 + crtime_ns Creation time, nanoseconds (SFTPv4+) uint32 + mtime Last modify time, UNIX epoch seconds uint64 + mtime_ns Last modify time, nanoseconds (SFTPv4+) uint32 + ctime Last change time, UNIX epoch seconds (SFTPv6+) uint64 + ctime_ns Last change time, nanoseconds (SFTPv6+) uint32 + acl Access control list for file (SFTPv4+) bytes + attrib_bits Attribute bits set for file (SFTPv5+) uint32 + attrib_valid Valid attribute bits for file (SFTPv5+) uint32 + text_hint Text/binary hint for file (SFTPv6+) byte + mime_type MIME type for file (SFTPv6+) string + nlink Link count for file (SFTPv6+) uint32 + untrans_name Untranslated name for file (SFTPv6+) bytes + ============ ================================================= ====== + + Extended attributes can also be added via a field named + `extended` which is a list of bytes name/value pairs. + + When setting attributes using an :class:`SFTPAttrs`, only fields + which have been initialized will be changed on the selected file. + + """ + + type: int = FILEXFER_TYPE_UNKNOWN + size: Optional[int] + alloc_size: Optional[int] + uid: Optional[int] + gid: Optional[int] + owner: Optional[str] + group: Optional[str] + permissions: Optional[int] + atime: Optional[int] + atime_ns: Optional[int] + crtime: Optional[int] + crtime_ns: Optional[int] + mtime: Optional[int] + mtime_ns: Optional[int] + ctime: Optional[int] + ctime_ns: Optional[int] + acl: Optional[bytes] + attrib_bits: Optional[int] + attrib_valid: Optional[int] + text_hint: Optional[int] + mime_type: Optional[str] + nlink: Optional[int] + untrans_name: Optional[bytes] + extended: Sequence[Tuple[bytes, bytes]] = () + + def _format_ns(self, k: str): + """Convert epoch seconds & nanoseconds to a string date & time""" + + result = time.ctime(getattr(self, k)) + nsec = getattr(self, k + '_ns') + + if result and nsec: + result = result[:19] + f'.{nsec:09d}' + result[19:] + + return result + + def _format(self, k: str, v: object) -> Optional[str]: + """Convert attributes to more readable values""" + + if v is None or k == 'extended' and not v: + return None + + if k == 'type': + return _file_types.get(cast(int, v), str(v)) \ + if v != FILEXFER_TYPE_UNKNOWN else None + elif k == 'permissions': + return '{:04o}'.format(cast(int, v)) + elif k in ('atime', 'crtime', 'mtime', 'ctime'): + return self._format_ns(k) + elif k in ('atime_ns', 'crtime_ns', 'mtime_ns', 'ctime_ns'): + return None + else: + return str(v) or None + + def encode(self, sftp_version: int) -> bytes: + """Encode SFTP attributes as bytes in an SSH packet""" + + flags = 0 + attrs = [] + + if sftp_version >= 4: + if sftp_version < 5 and self.type >= FILEXFER_TYPE_SOCKET: + filetype = FILEXFER_TYPE_SPECIAL + else: + filetype = self.type + + attrs.append(Byte(filetype)) + + if self.size is not None: + flags |= FILEXFER_ATTR_SIZE + attrs.append(UInt64(self.size)) + + if self.alloc_size is not None: + flags |= FILEXFER_ATTR_ALLOCATION_SIZE + attrs.append(UInt64(self.alloc_size)) + + if sftp_version == 3: + if self.uid is not None and self.gid is not None: + flags |= FILEXFER_ATTR_UIDGID + attrs.append(UInt32(self.uid) + UInt32(self.gid)) + elif self.owner is not None and self.group is not None: + raise ValueError('Setting owner and group requires SFTPv4 ' + 'or later') + else: + if self.owner is not None and self.group is not None: + flags |= FILEXFER_ATTR_OWNERGROUP + attrs.append(String(self.owner) + String(self.group)) + elif self.uid is not None and self.gid is not None: + flags |= FILEXFER_ATTR_OWNERGROUP + attrs.append(String(str(self.uid)) + String(str(self.gid))) + + if self.permissions is not None: + flags |= FILEXFER_ATTR_PERMISSIONS + attrs.append(UInt32(self.permissions)) + + if sftp_version == 3: + if self.atime is not None and self.mtime is not None: + flags |= FILEXFER_ATTR_ACMODTIME + attrs.append(UInt32(int(self.atime)) + UInt32(int(self.mtime))) + else: + subsecond = (self.atime_ns is not None or + self.crtime_ns is not None or + self.mtime_ns is not None or + self.ctime_ns is not None) + + if subsecond: + flags |= FILEXFER_ATTR_SUBSECOND_TIMES + + if self.atime is not None: + flags |= FILEXFER_ATTR_ACCESSTIME + attrs.append(UInt64(int(self.atime))) + + if subsecond: + attrs.append(UInt32(self.atime_ns or 0)) + + if self.crtime is not None: + flags |= FILEXFER_ATTR_CREATETIME + attrs.append(UInt64(int(self.crtime))) + + if subsecond: + attrs.append(UInt32(self.crtime_ns or 0)) + + if self.mtime is not None: + flags |= FILEXFER_ATTR_MODIFYTIME + attrs.append(UInt64(int(self.mtime))) + + if subsecond: + attrs.append(UInt32(self.mtime_ns or 0)) + + if sftp_version >= 6 and self.ctime is not None: + flags |= FILEXFER_ATTR_CTIME + attrs.append(UInt64(int(self.ctime))) + + if subsecond: + attrs.append(UInt32(self.ctime_ns or 0)) + + if sftp_version >= 4 and self.acl is not None: + flags |= FILEXFER_ATTR_ACL + attrs.append(String(self.acl)) + + if sftp_version >= 5 and \ + self.attrib_bits is not None and \ + self.attrib_valid is not None: + flags |= FILEXFER_ATTR_BITS + attrs.append(UInt32(self.attrib_bits) + UInt32(self.attrib_valid)) + + if sftp_version >= 6: + if self.text_hint is not None: + flags |= FILEXFER_ATTR_TEXT_HINT + attrs.append(Byte(self.text_hint)) + + if self.mime_type is not None: + flags |= FILEXFER_ATTR_MIME_TYPE + attrs.append(String(self.mime_type)) + + if self.nlink is not None: + flags |= FILEXFER_ATTR_LINK_COUNT + attrs.append(UInt32(self.nlink)) + + if self.untrans_name is not None: + flags |= FILEXFER_ATTR_UNTRANSLATED_NAME + attrs.append(String(self.untrans_name)) + + if self.extended: + flags |= FILEXFER_ATTR_EXTENDED + attrs.append(UInt32(len(self.extended))) + attrs.extend(String(type) + String(data) + for type, data in self.extended) + + return UInt32(flags) + b''.join(attrs) + + @classmethod + def decode(cls, packet: SSHPacket, sftp_version: int) -> 'SFTPAttrs': + """Decode bytes in an SSH packet as SFTP attributes""" + + flags = packet.get_uint32() + attrs = cls() + + # Work around a bug seen in a Huawei SFTP server where + # FILEXFER_ATTR_MODIFYTIME is included in flags, even though + # the SFTP version is set to 3. That flag is only defined for + # SFTPv4 and later. + if sftp_version == 3 and flags & (FILEXFER_ATTR_ACMODTIME | + FILEXFER_ATTR_MODIFYTIME): + flags &= ~FILEXFER_ATTR_MODIFYTIME + + unsupported_attrs = flags & ~_valid_attr_flags[sftp_version] + + if unsupported_attrs: + raise SFTPBadMessage('Unsupported attribute flags: 0x%08x' % + unsupported_attrs) + + if sftp_version >= 4: + attrs.type = packet.get_byte() + + if flags & FILEXFER_ATTR_SIZE: + attrs.size = packet.get_uint64() + + if flags & FILEXFER_ATTR_ALLOCATION_SIZE: + attrs.alloc_size = packet.get_uint64() + + if sftp_version == 3: + if flags & FILEXFER_ATTR_UIDGID: + attrs.uid = packet.get_uint32() + attrs.gid = packet.get_uint32() + else: + if flags & FILEXFER_ATTR_OWNERGROUP: + owner = packet.get_string() + + try: + attrs.owner = owner.decode('utf-8') + except UnicodeDecodeError: + raise SFTPOwnerInvalid('Invalid owner name: %s' % + owner.decode('utf-8', 'backslashreplace')) from None + + group = packet.get_string() + + try: + attrs.group = group.decode('utf-8') + except UnicodeDecodeError: + raise SFTPGroupInvalid('Invalid group name: %s' % + group.decode('utf-8', 'backslashreplace')) from None + + if flags & FILEXFER_ATTR_PERMISSIONS: + mode = packet.get_uint32() + + if sftp_version == 3: + attrs.type = _stat_mode_to_filetype(mode) + attrs.permissions = mode & 0xffff + else: + attrs.permissions = mode & 0xfff + + if sftp_version == 3: + if flags & FILEXFER_ATTR_ACMODTIME: + attrs.atime = packet.get_uint32() + attrs.mtime = packet.get_uint32() + else: + if flags & FILEXFER_ATTR_ACCESSTIME: + attrs.atime = packet.get_uint64() + + if flags & FILEXFER_ATTR_SUBSECOND_TIMES: + attrs.atime_ns = packet.get_uint32() + + if flags & FILEXFER_ATTR_CREATETIME: + attrs.crtime = packet.get_uint64() + + if flags & FILEXFER_ATTR_SUBSECOND_TIMES: + attrs.crtime_ns = packet.get_uint32() + + if flags & FILEXFER_ATTR_MODIFYTIME: + attrs.mtime = packet.get_uint64() + + if flags & FILEXFER_ATTR_SUBSECOND_TIMES: + attrs.mtime_ns = packet.get_uint32() + + if flags & FILEXFER_ATTR_CTIME: + attrs.ctime = packet.get_uint64() + + if flags & FILEXFER_ATTR_SUBSECOND_TIMES: + attrs.ctime_ns = packet.get_uint32() + + if flags & FILEXFER_ATTR_ACL: + attrs.acl = packet.get_string() + + if flags & FILEXFER_ATTR_BITS: + attrs.attrib_bits = packet.get_uint32() + attrs.attrib_valid = packet.get_uint32() + + if flags & FILEXFER_ATTR_TEXT_HINT: + attrs.text_hint = packet.get_byte() + + if flags & FILEXFER_ATTR_MIME_TYPE: + try: + attrs.mime_type = packet.get_string().decode('utf-8') + except UnicodeDecodeError: + raise SFTPBadMessage('Invalid MIME type') from None + + if flags & FILEXFER_ATTR_LINK_COUNT: + attrs.nlink = packet.get_uint32() + + if flags & FILEXFER_ATTR_UNTRANSLATED_NAME: + attrs.untrans_name = packet.get_string() + + if flags & FILEXFER_ATTR_EXTENDED: + count = packet.get_uint32() + attrs.extended = [] + + for _ in range(count): + attr = packet.get_string() + data = packet.get_string() + attrs.extended.append((attr, data)) + + return attrs + + @classmethod + def from_local(cls, result: os.stat_result) -> 'SFTPAttrs': + """Convert from local stat attributes""" + + mode = result.st_mode + filetype = _stat_mode_to_filetype(mode) + + if sys.platform == 'win32': # pragma: no cover + uid = 0 + gid = 0 + owner = '' + group = '' + else: + uid = result.st_uid + gid = result.st_gid + owner = _lookup_user(uid) + group = _lookup_group(gid) + + atime, atime_ns = _nsec_to_tuple(result.st_atime_ns) + mtime, mtime_ns = _nsec_to_tuple(result.st_mtime_ns) + ctime, ctime_ns = _nsec_to_tuple(result.st_ctime_ns) + + if sys.platform == 'win32': # pragma: no cover + crtime, crtime_ns = ctime, ctime_ns + elif hasattr(result, 'st_birthtime'): # pragma: no cover + crtime, crtime_ns = _float_sec_to_tuple(result.st_birthtime) + else: # pragma: no cover + crtime, crtime_ns = mtime, mtime_ns + + return cls(filetype, result.st_size, None, uid, gid, owner, group, + mode, atime, atime_ns, crtime, crtime_ns, mtime, mtime_ns, + ctime, ctime_ns, None, None, None, None, None, + result.st_nlink, None) + + +class SFTPVFSAttrs(Record): + """SFTP file system attributes + + SFTPVFSAttrs is a simple record class with the following fields: + + ============ =========================================== ====== + Field Description Type + ============ =========================================== ====== + bsize File system block size (I/O size) uint64 + frsize Fundamental block size (allocation size) uint64 + blocks Total data blocks (in frsize units) uint64 + bfree Free data blocks uint64 + bavail Available data blocks (for non-root) uint64 + files Total file inodes uint64 + ffree Free file inodes uint64 + favail Available file inodes (for non-root) uint64 + fsid File system id uint64 + flags File system flags (read-only, no-setuid) uint64 + namemax Maximum filename length uint64 + ============ =========================================== ====== + + """ + + bsize: int = 0 + frsize: int = 0 + blocks: int = 0 + bfree: int = 0 + bavail: int = 0 + files: int = 0 + ffree: int = 0 + favail: int = 0 + fsid: int = 0 + flags: int = 0 + namemax: int = 0 + + def encode(self, sftp_version: int) -> bytes: + """Encode SFTP statvfs attributes as bytes in an SSH packet""" + + # pylint: disable=unused-argument + + return b''.join((UInt64(self.bsize), UInt64(self.frsize), + UInt64(self.blocks), UInt64(self.bfree), + UInt64(self.bavail), UInt64(self.files), + UInt64(self.ffree), UInt64(self.favail), + UInt64(self.fsid), UInt64(self.flags), + UInt64(self.namemax))) + + @classmethod + def decode(cls, packet: SSHPacket, sftp_version: int) -> 'SFTPVFSAttrs': + """Decode bytes in an SSH packet as SFTP statvfs attributes""" + + # pylint: disable=unused-argument + + vfsattrs = cls() + + vfsattrs.bsize = packet.get_uint64() + vfsattrs.frsize = packet.get_uint64() + vfsattrs.blocks = packet.get_uint64() + vfsattrs.bfree = packet.get_uint64() + vfsattrs.bavail = packet.get_uint64() + vfsattrs.files = packet.get_uint64() + vfsattrs.ffree = packet.get_uint64() + vfsattrs.favail = packet.get_uint64() + vfsattrs.fsid = packet.get_uint64() + vfsattrs.flags = packet.get_uint64() + vfsattrs.namemax = packet.get_uint64() + + return vfsattrs + + @classmethod + def from_local(cls, result: os.statvfs_result) -> 'SFTPVFSAttrs': + """Convert from local statvfs attributes""" + + return cls(result.f_bsize, result.f_frsize, result.f_blocks, + result.f_bfree, result.f_bavail, result.f_files, + result.f_ffree, result.f_favail, 0, result.f_flag, + result.f_namemax) + + +class SFTPName(Record): + """SFTP file name and attributes + + SFTPName is a simple record class with the following fields: + + ========= ================================== ================== + Field Description Type + ========= ================================== ================== + filename Filename `str` or `bytes` + longname Expanded form of filename & attrs `str` or `bytes` + attrs File attributes :class:`SFTPAttrs` + ========= ================================== ================== + + A list of these is returned by :meth:`readdir() ` + in :class:`SFTPClient` when retrieving the contents of a directory. + + """ + + filename: BytesOrStr = '' + longname: BytesOrStr = '' + attrs: SFTPAttrs = SFTPAttrs() + + def _format(self, k: str, v: object) -> Optional[str]: + """Convert name fields to more readable values""" + + if k == 'longname' and not v: + return None + + if isinstance(v, bytes): + v = v.decode('utf-8', 'backslashreplace') + + return str(v) or None + + def encode(self, sftp_version: int) -> bytes: + """Encode an SFTP name as bytes in an SSH packet""" + + longname = String(self.longname) if sftp_version == 3 else b'' + + return (String(self.filename) + longname + + self.attrs.encode(sftp_version)) + + @classmethod + def decode(cls, packet: SSHPacket, sftp_version: int) -> 'SFTPName': + """Decode bytes in an SSH packet as an SFTP name""" + + filename = packet.get_string() + longname = packet.get_string() if sftp_version == 3 else None + attrs = SFTPAttrs.decode(packet, sftp_version) + + return cls(filename, longname, attrs) + + +class SFTPGlob: + """SFTP glob matcher""" + + def __init__(self, fs: _SFTPGlobProtocol, multiple=False): + self._fs = fs + self._multiple = multiple + self._prev_matches: Set[bytes] = set() + self._new_matches: List[SFTPName] = [] + self._matched = False + self._stat_cache: Dict[bytes, Optional[SFTPAttrs]] = {} + self._scandir_cache: Dict[bytes, List[SFTPName]] = {} + + def _split(self, pattern: bytes) -> Tuple[bytes, _SFTPPatList]: + """Split out exact parts of a glob pattern""" + + patlist: _SFTPPatList = [] + + if any(c in pattern for c in b'*?[]'): + path = b'' + plain: List[bytes] = [] + + for current in pattern.split(b'/'): + if any(c in current for c in b'*?[]'): + if plain: + if patlist: + patlist.append(plain) + else: + path = b'/'.join(plain) or b'/' + + plain = [] + + patlist.append(current) + else: + plain.append(current) + + if plain: + patlist.append(plain) + else: + path = pattern + + return path, patlist + + def _report_match(self, path, attrs): + """Report a matching name""" + + self._matched = True + + if self._multiple: + if path not in self._prev_matches: + self._prev_matches.add(path) + else: + return + + self._new_matches.append(SFTPName(path, attrs=attrs)) + + async def _stat(self, path) -> Optional[SFTPAttrs]: + """Cache results of calls to stat""" + + try: + return self._stat_cache[path] + except KeyError: + pass + + try: + attrs = await self._fs.stat(path) + except (SFTPNoSuchFile, SFTPPermissionDenied, SFTPNoSuchPath): + attrs = None + + self._stat_cache[path] = attrs + return attrs + + async def _scandir(self, path) -> AsyncIterator[SFTPName]: + """Cache results of calls to scandir""" + + try: + for entry in self._scandir_cache[path]: + yield entry + + return + except KeyError: + pass + + entries: List[SFTPName] = [] + + try: + async for entry in self._fs.scandir(path): + entries.append(entry) + yield entry + except (SFTPNoSuchFile, SFTPPermissionDenied, SFTPNoSuchPath): + pass + + self._scandir_cache[path] = entries + + async def _match_exact(self, path: bytes, pattern: Sequence[bytes], + patlist: _SFTPPatList) -> None: + """Match on an exact portion of a path""" + + newpath = posixpath.join(path, *pattern) + newpatlist = patlist[1:] + + attrs = await self._stat(newpath) + + if attrs is None: + return + + if newpatlist: + if attrs.type == FILEXFER_TYPE_DIRECTORY: + await self._match(newpath, attrs, newpatlist) + else: + self._report_match(newpath, attrs) + + async def _match_pattern(self, path: bytes, attrs: SFTPAttrs, + pattern: bytes, patlist: _SFTPPatList) -> None: + """Match on a pattern portion of a path""" + + newpatlist = patlist[1:] + + if pattern == b'**': + if newpatlist: + await self._match(path, attrs, newpatlist) + else: + self._report_match(path, attrs) + + async for entry in self._scandir(path or b'.'): + filename = cast(bytes, entry.filename) + + if filename in (b'.', b'..'): + continue + + if not pattern or fnmatch(filename, pattern): + newpath = posixpath.join(path, filename) + attrs = entry.attrs + + if pattern == b'**' and attrs.type == FILEXFER_TYPE_DIRECTORY: + await self._match(newpath, attrs, patlist) + elif newpatlist: + if attrs.type == FILEXFER_TYPE_DIRECTORY: + await self._match(newpath, attrs, newpatlist) + else: + self._report_match(newpath, attrs) + + async def _match(self, path: bytes, attrs: SFTPAttrs, + patlist: _SFTPPatList) -> None: + """Recursively match against a glob pattern""" + + pattern = patlist[0] + + if isinstance(pattern, list): + await self._match_exact(path, pattern, patlist) + else: + await self._match_pattern(path, attrs, pattern, patlist) + + async def match(self, pattern: bytes, + error_handler: SFTPErrorHandler = None, + sftp_version = MIN_SFTP_VERSION) -> Sequence[SFTPName]: + """Match against a glob pattern""" + + self._new_matches = [] + self._matched = False + + path, patlist = self._split(pattern) + + try: + attrs = await self._stat(path or b'.') + + if attrs: + if patlist: + if attrs.type == FILEXFER_TYPE_DIRECTORY: + await self._match(path, attrs, patlist) + elif path: + self._report_match(path, attrs) + + if pattern and not self._matched: + exc = SFTPNoSuchPath if sftp_version >= 4 else SFTPNoSuchFile + raise exc('No matches found') + except (OSError, SFTPError) as exc: + setattr(exc, 'srcpath', pattern) + + if error_handler: + error_handler(exc) + else: + raise + + return self._new_matches + + +class SFTPHandler(SSHPacketLogger): + """SFTP session handler""" + + _data_pkttypes = {FXP_WRITE, FXP_DATA} + + _handler_names = get_symbol_names(_const_dict, 'FXP_') + _realpath_check_names = get_symbol_names(_const_dict, 'FXRP_', 5) + + # SFTP implementations with broken order for SYMLINK arguments + _nonstandard_symlink_impls = ['OpenSSH', 'paramiko'] + + # Return types by message -- unlisted entries always return FXP_STATUS, + # those below return FXP_STATUS on error + _return_types = { + FXP_OPEN: FXP_HANDLE, + FXP_READ: FXP_DATA, + FXP_LSTAT: FXP_ATTRS, + FXP_FSTAT: FXP_ATTRS, + FXP_OPENDIR: FXP_HANDLE, + FXP_READDIR: FXP_NAME, + FXP_REALPATH: FXP_NAME, + FXP_STAT: FXP_ATTRS, + FXP_READLINK: FXP_NAME, + b'statvfs@openssh.com': FXP_EXTENDED_REPLY, + b'fstatvfs@openssh.com': FXP_EXTENDED_REPLY + } + + def __init__(self, reader: 'SSHReader[bytes]', writer: 'SSHWriter[bytes]'): + self._reader: Optional['SSHReader[bytes]'] = reader + self._writer: Optional['SSHWriter[bytes]'] = writer + self._logger = reader.logger.get_child('sftp') + + @property + def logger(self) -> SSHLogger: + """A logger associated with this SFTP handler""" + + return self._logger + + async def _cleanup(self, exc: Optional[Exception]) -> None: + """Clean up this SFTP session""" + + # pylint: disable=unused-argument + + if self._writer: # pragma: no branch + self._writer.close() + self._reader = None + self._writer = None + + def _log_extensions(self, extensions: Sequence[Tuple[bytes, bytes]]): + """Dump a formatted list of extensions to the debug log""" + + for name, data in extensions: + if name == b'acl-supported': + capabilities = _parse_acl_supported(data) + + self.logger.debug1(' acl-supported:') + self.logger.debug1(' capabilities: 0x%08x', capabilities) + elif name == b'supported': + attr_mask, attrib_mask, open_flags, access_mask, \ + max_read_size, ext_names = _parse_supported(data) + + self.logger.debug1(' supported:') + self.logger.debug1(' attr_mask: 0x%08x', attr_mask) + self.logger.debug1(' attrib_mask: 0x%08x', attrib_mask) + self.logger.debug1(' open_flags: 0x%08x', open_flags) + self.logger.debug1(' access_mask: 0x%08x', access_mask) + self.logger.debug1(' max_read_size: %d', max_read_size) + + if ext_names: + self.logger.debug1(' extensions:') + + for ext_name in ext_names: + self.logger.debug1(' %s', ext_name) + elif name == b'supported2': + attr_mask, attrib_mask, open_flags, access_mask, \ + max_read_size, open_block_vector, block_vector, \ + attrib_ext_names, ext_names = _parse_supported2(data) + + self.logger.debug1(' supported2:') + self.logger.debug1(' attr_mask: 0x%08x', attr_mask) + self.logger.debug1(' attrib_mask: 0x%08x', attrib_mask) + self.logger.debug1(' open_flags: 0x%08x', open_flags) + self.logger.debug1(' access_mask: 0x%08x', access_mask) + self.logger.debug1(' max_read_size: %d', max_read_size) + self.logger.debug1(' open_block_vector: 0x%04x', + open_block_vector) + self.logger.debug1(' block_vector: 0x%04x', block_vector) + + if attrib_ext_names: + self.logger.debug1(' attrib_extensions:') + + for attrib_ext_name in attrib_ext_names: + self.logger.debug1(' %s', attrib_ext_name) + + if ext_names: + self.logger.debug1(' extensions:') + + for ext_name in ext_names: + self.logger.debug1(' %s', ext_name) + elif name == b'vendor-id': + vendor_name, product_name, product_version, product_build = \ + _parse_vendor_id(data) + + self.logger.debug1(' vendor-id:') + self.logger.debug1(' vendor_name: %s', vendor_name) + self.logger.debug1(' product_name: %s', product_name) + self.logger.debug1(' product_version: %s', product_version) + self.logger.debug1(' product_build: %d', product_build) + else: + self.logger.debug1(' %s%s%s', name, + ': ' if data else '', data) + + async def _process_packet(self, pkttype: int, pktid: int, + packet: SSHPacket) -> None: + """Abstract method for processing SFTP packets""" + + raise NotImplementedError + + def send_packet(self, pkttype: int, pktid: Optional[int], + *args: bytes) -> None: + """Send an SFTP packet""" + + if not self._writer: + raise SFTPNoConnection('Connection not open') + + payload = Byte(pkttype) + b''.join(args) + + try: + self._writer.write(UInt32(len(payload)) + payload) + except ConnectionError as exc: + raise SFTPConnectionLost(str(exc)) from None + + self.log_sent_packet(pkttype, pktid, payload) + + async def recv_packet(self) -> SSHPacket: + """Receive an SFTP packet""" + + assert self._reader is not None + + pktlen = await self._reader.readexactly(4) + pktlen = int.from_bytes(pktlen, 'big') + + packet = await self._reader.readexactly(pktlen) + return SSHPacket(packet) + + async def recv_packets(self) -> None: + """Receive and process SFTP packets""" + + try: + while self._reader: # pragma: no branch + packet = await self.recv_packet() + + pkttype = packet.get_byte() + pktid = packet.get_uint32() + + self.log_received_packet(pkttype, pktid, packet) + + await self._process_packet(pkttype, pktid, packet) + except PacketDecodeError as exc: + await self._cleanup(SFTPBadMessage(str(exc))) + except EOFError: + await self._cleanup(None) + except (OSError, Error) as exc: + await self._cleanup(exc) + + +class SFTPClientHandler(SFTPHandler): + """An SFTP client session handler""" + + def __init__(self, loop: asyncio.AbstractEventLoop, + reader: 'SSHReader[bytes]', writer: 'SSHWriter[bytes]', + sftp_version: int): + super().__init__(reader, writer) + + self._loop = loop + self._version = sftp_version + self._next_pktid = 0 + self._requests: Dict[int, _RequestWaiter] = {} + self._nonstandard_symlink = False + self._supports_posix_rename = False + self._supports_statvfs = False + self._supports_fstatvfs = False + self._supports_hardlink = False + self._supports_fsync = False + + @property + def version(self) -> int: + """SFTP version associated with this SFTP session""" + + return self._version + + async def _cleanup(self, exc: Optional[Exception]) -> None: + """Clean up this SFTP client session""" + + req_exc = exc or SFTPConnectionLost('Connection closed') + + for waiter in list(self._requests.values()): + if not waiter.cancelled(): # pragma: no branch + waiter.set_exception(req_exc) + + self._requests = {} + + self.logger.info('SFTP client exited%s', ': ' + str(exc) if exc else '') + + await super()._cleanup(exc) + + async def _process_packet(self, pkttype: int, pktid: int, + packet: SSHPacket) -> None: + """Process incoming SFTP responses""" + + try: + waiter = self._requests.pop(pktid) + except KeyError: + await self._cleanup(SFTPBadMessage('Invalid response id')) + else: + if not waiter.cancelled(): # pragma: no branch + waiter.set_result((pkttype, packet)) + + def _send_request(self, pkttype: Union[int, bytes], args: Sequence[bytes], + waiter: _RequestWaiter) -> None: + """Send an SFTP request""" + + pktid = self._next_pktid + self._next_pktid = (self._next_pktid + 1) & 0xffffffff + + self._requests[pktid] = waiter + + if isinstance(pkttype, bytes): + hdr = UInt32(pktid) + String(pkttype) + pkttype = FXP_EXTENDED + else: + hdr = UInt32(pktid) + + self.send_packet(pkttype, pktid, hdr, *args) + + async def _make_request(self, pkttype: Union[int, bytes], + *args: bytes) -> object: + """Make an SFTP request and wait for a response""" + + waiter: _RequestWaiter = self._loop.create_future() + self._send_request(pkttype, args, waiter) + resptype, resp = await waiter + + return_type = self._return_types.get(pkttype) + + if resptype not in (FXP_STATUS, return_type): + raise SFTPBadMessage('Unexpected response type: %s' % resptype) + + result = self._packet_handlers[resptype](self, resp) + + if result is not None or return_type is None: + return result + else: + raise SFTPBadMessage('Unexpected FX_OK response') + + def _process_status(self, packet: SSHPacket) -> None: + """Process an incoming SFTP status response""" + + exc = SFTPError.construct(packet) + + if self._version < 6: + packet.check_end() + + if exc: + raise exc + else: + self.logger.debug1('Received OK') + + def _process_handle(self, packet: SSHPacket) -> bytes: + """Process an incoming SFTP handle response""" + + handle = packet.get_string() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received handle %s', handle.hex()) + + return handle + + def _process_data(self, packet: SSHPacket) -> Tuple[bytes, bool]: + """Process an incoming SFTP data response""" + + data = packet.get_string() + at_end = packet.get_boolean() if packet and self._version >= 6 \ + else False + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received %s%s', plural(len(data), 'data byte'), + ' (at end)' if at_end else '') + + return data, at_end + + def _process_name(self, packet: SSHPacket) -> _SFTPNames: + """Process an incoming SFTP name response""" + + count = packet.get_uint32() + names = [SFTPName.decode(packet, self._version) for _ in range(count)] + at_end = packet.get_boolean() if packet and self._version >= 6 \ + else False + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received %s%s', plural(len(names), 'name'), + ' (at end)' if at_end else '') + + for name in names: + self.logger.debug1(' %s', name) + + return names, at_end + + def _process_attrs(self, packet: SSHPacket) -> SFTPAttrs: + """Process an incoming SFTP attributes response""" + + attrs = SFTPAttrs().decode(packet, self._version) + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received %s', attrs) + + return attrs + + def _process_extended_reply(self, packet: SSHPacket) -> SSHPacket: + """Process an incoming SFTP extended reply response""" + + # pylint: disable=no-self-use + + # Let the caller do the decoding for extended replies + return packet + + _packet_handlers = { + FXP_STATUS: _process_status, + FXP_HANDLE: _process_handle, + FXP_DATA: _process_data, + FXP_NAME: _process_name, + FXP_ATTRS: _process_attrs, + FXP_EXTENDED_REPLY: _process_extended_reply + } + + async def start(self) -> None: + """Start an SFTP client""" + + assert self._reader is not None + + self.logger.debug1('Sending init, version=%d', self._version) + + self.send_packet(FXP_INIT, None, UInt32(self._version)) + + try: + resp = await self.recv_packet() + + resptype = resp.get_byte() + + self.log_received_packet(resptype, None, resp) + + if resptype != FXP_VERSION: + raise SFTPBadMessage('Expected version message') + + version = resp.get_uint32() + + if not MIN_SFTP_VERSION <= version <= MAX_SFTP_VERSION: + raise SFTPBadMessage('Unsupported version: %d' % version) + + rcvd_extensions: List[Tuple[bytes, bytes]] = [] + + while resp: + name = resp.get_string() + data = resp.get_string() + rcvd_extensions.append((name, data)) + except PacketDecodeError as exc: + raise SFTPBadMessage(str(exc)) from None + except SFTPError as exc: + raise + except ConnectionLost as exc: + raise SFTPConnectionLost(str(exc)) from None + except (asyncio.IncompleteReadError, Error) as exc: + raise SFTPConnectionLost(str(exc)) from None + + self.logger.debug1('Received version=%d%s', version, + ', extensions:' if rcvd_extensions else '') + + self._log_extensions(rcvd_extensions) + + self._version = version + + for name, data in rcvd_extensions: + if name == b'posix-rename@openssh.com' and data == b'1': + self._supports_posix_rename = True + elif name == b'statvfs@openssh.com' and data == b'2': + self._supports_statvfs = True + elif name == b'fstatvfs@openssh.com' and data == b'2': + self._supports_fstatvfs = True + elif name == b'hardlink@openssh.com' and data == b'1': + self._supports_hardlink = True + elif name == b'fsync@openssh.com' and data == b'1': + self._supports_fsync = True + + if version == 3: + # Check if the server has a buggy SYMLINK implementation + + server_version = cast(str, + self._reader.get_extra_info('server_version', '')) + + if any(name in server_version + for name in self._nonstandard_symlink_impls): + self.logger.debug1('Adjusting for non-standard symlink ' + 'implementation') + self._nonstandard_symlink = True + + async def open(self, filename: bytes, pflags: int, + attrs: SFTPAttrs) -> bytes: + """Make an SFTP open request""" + + if self._version >= 5: + desired_access, flags = _pflags_to_flags(pflags) + + self.logger.debug1('Sending open for %s, desired_access=0x%08x, ' + 'flags=0x%08x%s', filename, desired_access, + flags, hide_empty(attrs)) + + return cast(bytes, await self._make_request( + FXP_OPEN, String(filename), UInt32(desired_access), + UInt32(flags), attrs.encode(self._version))) + else: + self.logger.debug1('Sending open for %s, mode 0x%02x%s', + filename, pflags, hide_empty(attrs)) + + return cast(bytes, await self._make_request( + FXP_OPEN, String(filename), UInt32(pflags), + attrs.encode(self._version))) + + async def open56(self, filename: bytes, desired_access: int, + flags: int, attrs: SFTPAttrs) -> bytes: + """Make an SFTPv5/v6 open request""" + + self.logger.debug1('Sending open for %s, desired_access=0x%08x, ' + 'flags=0x%08x%s', filename, desired_access, + flags, hide_empty(attrs)) + + if self._version >= 5: + return cast(bytes, await self._make_request( + FXP_OPEN, String(filename), UInt32(desired_access), + UInt32(flags), attrs.encode(self._version))) + else: + raise SFTPOpUnsupported('SFTPv5/v6 open not supported by server') + + async def close(self, handle: bytes) -> None: + """Make an SFTP close request""" + + self.logger.debug1('Sending close for handle %s', handle.hex()) + + if self._writer: + await self._make_request(FXP_CLOSE, String(handle)) + + async def read(self, handle: bytes, offset: int, + length: int) -> Tuple[bytes, bool]: + """Make an SFTP read request""" + + self.logger.debug1('Sending read for %s at offset %d in handle %s', + plural(length, 'byte'), offset, handle.hex()) + + return cast(Tuple[bytes, bool], await self._make_request( + FXP_READ, String(handle), UInt64(offset), UInt32(length))) + + async def write(self, handle: bytes, offset: int, data: bytes) -> int: + """Make an SFTP write request""" + + self.logger.debug1('Sending write for %s at offset %d in handle %s', + plural(len(data), 'byte'), offset, handle.hex()) + + return cast(int, await self._make_request( + FXP_WRITE, String(handle), UInt64(offset), String(data))) + + async def stat(self, path: bytes, flags: int) -> SFTPAttrs: + """Make an SFTP stat request""" + + if self._version >= 4: + flag_bytes = UInt32(flags) + flag_text = ', flags 0x%08x' % flags + else: + flag_bytes = b'' + flag_text = '' + + self.logger.debug1('Sending stat for %s%s', path, flag_text) + + return cast(SFTPAttrs, await self._make_request( + FXP_STAT, String(path), flag_bytes)) + + async def lstat(self, path: bytes, flags: int) -> SFTPAttrs: + """Make an SFTP lstat request""" + + if self._version >= 4: + flag_bytes = UInt32(flags) + flag_text = ', flags 0x%08x' % flags + else: + flag_bytes = b'' + flag_text = '' + + self.logger.debug1('Sending lstat for %s%s', path, flag_text) + + return cast(SFTPAttrs, await self._make_request( + FXP_LSTAT, String(path), flag_bytes)) + + async def fstat(self, handle: bytes, flags: int) -> SFTPAttrs: + """Make an SFTP fstat request""" + + if self._version >= 4: + flag_bytes = UInt32(flags) + flag_text = ', flags 0x%08x' % flags + else: + flag_bytes = b'' + flag_text = '' + + self.logger.debug1('Sending fstat for handle %s%s', + handle.hex(), flag_text) + + return cast(SFTPAttrs, await self._make_request( + FXP_FSTAT, String(handle), flag_bytes)) + + async def setstat(self, path: bytes, attrs: SFTPAttrs) -> None: + """Make an SFTP setstat request""" + + self.logger.debug1('Sending setstat for %s%s', path, hide_empty(attrs)) + + await self._make_request(FXP_SETSTAT, String(path), + attrs.encode(self._version)) + + async def fsetstat(self, handle: bytes, attrs: SFTPAttrs) -> None: + """Make an SFTP fsetstat request""" + + self.logger.debug1('Sending fsetstat for handle %s%s', + handle.hex(), hide_empty(attrs)) + + await self._make_request(FXP_FSETSTAT, String(handle), + attrs.encode(self._version)) + + async def statvfs(self, path: bytes) -> SFTPVFSAttrs: + """Make an SFTP statvfs request""" + + if self._supports_statvfs: + self.logger.debug1('Sending statvfs for %s', path) + + packet = cast(SSHPacket, await self._make_request( + b'statvfs@openssh.com', String(path))) + + vfsattrs = SFTPVFSAttrs.decode(packet, self._version) + packet.check_end() + + self.logger.debug1('Received %s', vfsattrs) + + return vfsattrs + else: + raise SFTPOpUnsupported('statvfs not supported') + + async def fstatvfs(self, handle: bytes) -> SFTPVFSAttrs: + """Make an SFTP fstatvfs request""" + + if self._supports_fstatvfs: + self.logger.debug1('Sending fstatvfs for handle %s', handle.hex()) + + packet = cast(SSHPacket, await self._make_request( + b'fstatvfs@openssh.com', String(handle))) + + vfsattrs = SFTPVFSAttrs.decode(packet, self._version) + packet.check_end() + + self.logger.debug1('Received %s', vfsattrs) + + return vfsattrs + else: + raise SFTPOpUnsupported('fstatvfs not supported') + + async def remove(self, path: bytes) -> None: + """Make an SFTP remove request""" + + self.logger.debug1('Sending remove for %s', path) + + await self._make_request(FXP_REMOVE, String(path)) + + async def rename(self, oldpath: bytes, newpath: bytes, flags: int) -> None: + """Make an SFTP rename request""" + + if self._version >= 5: + self.logger.debug1('Sending rename request from %s to %s%s', + oldpath, newpath, ', flags=0x%x' % flags + if flags else '') + + await self._make_request(FXP_RENAME, String(oldpath), + String(newpath), UInt32(flags)) + elif flags and self._supports_posix_rename: + self.logger.debug1('Sending OpenSSH POSIX rename request ' + 'from %s to %s', oldpath, newpath) + + await self._make_request(b'posix-rename@openssh.com', + String(oldpath), String(newpath)) + elif not flags: + self.logger.debug1('Sending rename request from %s to %s', + oldpath, newpath) + + await self._make_request(FXP_RENAME, String(oldpath), + String(newpath)) + else: + raise SFTPOpUnsupported('Rename with overwrite not supported') + + async def posix_rename(self, oldpath: bytes, newpath: bytes) -> None: + """Make an SFTP POSIX rename request""" + + if self._supports_posix_rename: + self.logger.debug1('Sending OpenSSH POSIX rename request ' + 'from %s to %s', oldpath, newpath) + + await self._make_request(b'posix-rename@openssh.com', + String(oldpath), String(newpath)) + elif self._version >= 5: + self.logger.debug1('Sending rename request from %s to %s ' + 'with overwrite', oldpath, newpath) + + await self._make_request(FXP_RENAME, String(oldpath), + String(newpath), UInt32(FXR_OVERWRITE)) + else: + raise SFTPOpUnsupported('POSIX rename not supported') + + async def opendir(self, path: bytes) -> bytes: + """Make an SFTP opendir request""" + + self.logger.debug1('Sending opendir for %s', path) + + return cast(bytes, await self._make_request( + FXP_OPENDIR, String(path))) + + async def readdir(self, handle: bytes) -> _SFTPNames: + """Make an SFTP readdir request""" + + self.logger.debug1('Sending readdir for handle %s', handle.hex()) + + return cast(_SFTPNames, await self._make_request( + FXP_READDIR, String(handle))) + + async def mkdir(self, path: bytes, attrs: SFTPAttrs) -> None: + """Make an SFTP mkdir request""" + + self.logger.debug1('Sending mkdir for %s', path) + + await self._make_request(FXP_MKDIR, String(path), + attrs.encode(self._version)) + + async def rmdir(self, path: bytes) -> None: + """Make an SFTP rmdir request""" + + self.logger.debug1('Sending rmdir for %s', path) + + await self._make_request(FXP_RMDIR, String(path)) + + async def realpath(self, path: bytes, *compose_paths: bytes, + check: int = FXRP_NO_CHECK) -> _SFTPNames: + """Make an SFTP realpath request""" + + if check == FXRP_NO_CHECK: + checkmsg = '' + else: + try: + checkmsg = ', check=%s' % self._realpath_check_names[check] + except KeyError: + checkmsg = ', check=%d' % check + + self.logger.debug1('Sending realpath of %s%s%s', path, + b', compose_path: %s' % b', '.join(compose_paths) + if compose_paths else b'', checkmsg) + + if self._version >= 6: + return cast(_SFTPNames, await self._make_request( + FXP_REALPATH, String(path), Byte(check), + *map(String, compose_paths))) + else: + return cast(_SFTPNames, await self._make_request( + FXP_REALPATH, String(path))) + + async def readlink(self, path: bytes) -> _SFTPNames: + """Make an SFTP readlink request""" + + self.logger.debug1('Sending readlink for %s', path) + + return cast(_SFTPNames, await self._make_request( + FXP_READLINK, String(path))) + + async def symlink(self, oldpath: bytes, newpath: bytes) -> None: + """Make an SFTP symlink request""" + + self.logger.debug1('Sending symlink request from %s to %s', + oldpath, newpath) + + if self._version >= 6: + await self._make_request(FXP_LINK, String(newpath), + String(oldpath), Boolean(True)) + else: + if self._nonstandard_symlink: + args = String(oldpath) + String(newpath) + else: + args = String(newpath) + String(oldpath) + + await self._make_request(FXP_SYMLINK, args) + + async def link(self, oldpath: bytes, newpath: bytes) -> None: + """Make an SFTP hard link request""" + + if self._version >= 6 or self._supports_hardlink: + self.logger.debug1('Sending hardlink request from %s to %s', + oldpath, newpath) + + if self._version >= 6: + await self._make_request(FXP_LINK, String(newpath), + String(oldpath), Boolean(False)) + else: + await self._make_request(b'hardlink@openssh.com', + String(oldpath), String(newpath)) + else: + raise SFTPOpUnsupported('link not supported') + + async def lock(self, handle: bytes, offset: int, length: int, + flags: int) -> None: + """Make an SFTP byte range lock request""" + + if self._version >= 6: + self.logger.debug1('Sending byte range lock request for ' + 'handle %s, offset %d, length %d, ' + 'flags 0x%04x', handle.hex(), offset, + length, flags) + + await self._make_request(FXP_BLOCK, String(handle), + UInt64(offset), UInt64(length), + UInt32(flags)) + else: + raise SFTPOpUnsupported('Byte range locks not supported') + + async def unlock(self, handle: bytes, offset: int, length: int) -> None: + """Make an SFTP byte range unlock request""" + + if self._version >= 6: + self.logger.debug1('Sending byte range unlock request for ' + 'handle %s, offset %d, length %d', + handle.hex(), offset, length) + + await self._make_request(FXP_UNBLOCK, String(handle), + UInt64(offset), UInt64(length)) + else: + raise SFTPOpUnsupported('Byte range locks not supported') + + async def fsync(self, handle: bytes) -> None: + """Make an SFTP fsync request""" + + if self._supports_fsync: + self.logger.debug1('Sending fsync for handle %s', handle.hex()) + + await self._make_request(b'fsync@openssh.com', String(handle)) + else: + raise SFTPOpUnsupported('fsync not supported') + + def exit(self) -> None: + """Handle a request to close the SFTP session""" + + if self._writer: + self._writer.write_eof() + + async def wait_closed(self) -> None: + """Wait for this SFTP session to close""" + + if self._writer: + await self._writer.channel.wait_closed() + + +class SFTPClientFile: + """SFTP client remote file object + + This class represents an open file on a remote SFTP server. It + is opened with the :meth:`open() ` method on the + :class:`SFTPClient` class and provides methods to read and write + data and get and set attributes on the open file. + + """ + + def __init__(self, handler: SFTPClientHandler, handle: bytes, + appending: bool, encoding: Optional[str], errors: str, + block_size: int, max_requests: int): + self._handler = handler + self._handle: Optional[bytes] = handle + self._appending = appending + self._encoding = encoding + self._errors = errors + self._block_size = block_size + self._max_requests = max_requests + self._offset = None if appending else 0 + + async def __aenter__(self) -> 'SFTPClientFile': + """Allow SFTPClientFile to be used as an async context manager""" + + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> bool: + """Wait for file close when used as an async context manager""" + + await self.close() + return False + + async def _end(self) -> int: + """Return the offset of the end of the file""" + + attrs = await self.stat() + return attrs.size or 0 + + async def read(self, size: int = -1, + offset: Optional[int] = None) -> AnyStr: + """Read data from the remote file + + This method reads and returns up to `size` bytes of data + from the remote file. If size is negative, all data up to + the end of the file is returned. + + If offset is specified, the read will be performed starting + at that offset rather than the current file position. This + argument should be provided if you want to issue parallel + reads on the same file, since the file position is not + predictable in that case. + + Data will be returned as a string if an encoding was set when + the file was opened. Otherwise, data is returned as bytes. + + An empty `str` or `bytes` object is returned when at EOF. + + :param size: + The number of bytes to read + :param offset: (optional) + The offset from the beginning of the file to begin reading + :type size: `int` + :type offset: `int` + + :returns: data read from the file, as a `str` or `bytes` + + :raises: | :exc:`ValueError` if the file has been closed + | :exc:`UnicodeDecodeError` if the data can't be + decoded using the requested encoding + | :exc:`SFTPError` if the server returns an error + + """ + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + if offset is None: + offset = self._offset + + # If self._offset is None, we're appending and haven't sought + # backward in the file since the last write, so there's no + # data to return + + data = b'' + + if offset is not None: + if size is None or size < 0: + size = (await self._end()) - offset + + try: + if self._block_size and size > self._block_size: + data = await _SFTPFileReader( + self._block_size, self._max_requests, self._handler, + self._handle, offset, size).run() + else: + data, _ = await self._handler.read(self._handle, + offset, size) + + self._offset = offset + len(data) + except SFTPEOFError: + pass + + if self._encoding: + return cast(AnyStr, data.decode(self._encoding, self._errors)) + else: + return cast(AnyStr, data) + + async def read_parallel(self, size: int = -1, + offset: Optional[int] = None) -> \ + AsyncIterator[Tuple[int, bytes]]: + """Read parallel blocks of data from the remote file + + This method reads and returns up to `size` bytes of data + from the remote file. If size is negative, all data up to + the end of the file is returned. + + If offset is specified, the read will be performed starting + at that offset rather than the current file position. + + Data is returned as a series of tuples delivered by an + async iterator, where each tuple contains an offset and + data bytes. Encoding is ignored here, since multi-byte + characters may be split across block boundaries. + + To maximize performance, multiple reads are issued in + parallel, and data blocks may be returned out of order. + The size of the blocks and the maximum number of + outstanding read requests can be controlled using + the `block_size` and `max_requests` arguments passed + in the call to the :meth:`open() ` + method on the :class:`SFTPClient` class. + + :param size: + The number of bytes to read + :param offset: (optional) + The offset from the beginning of the file to begin reading + :type size: `int` + :type offset: `int` + + :returns: an async iterator of tuples of offset and data bytes + + :raises: | :exc:`ValueError` if the file has been closed + | :exc:`SFTPError` if the server returns an error + + """ + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + if offset is None: + offset = self._offset + + # If self._offset is None, we're appending and haven't sought + # backward in the file since the last write, so there's no + # data to return + + if offset is not None: + if size is None or size < 0: + size = (await self._end()) - offset + else: + offset = 0 + size = 0 + + return _SFTPFileReader(self._block_size, self._max_requests, + self._handler, self._handle, offset, + size).iter() + + async def write(self, data: AnyStr, offset: Optional[int] = None) -> int: + """Write data to the remote file + + This method writes the specified data at the current + position in the remote file. + + :param data: + The data to write to the file + :param offset: (optional) + The offset from the beginning of the file to begin writing + :type data: `str` or `bytes` + :type offset: `int` + + If offset is specified, the write will be performed starting + at that offset rather than the current file position. This + argument should be provided if you want to issue parallel + writes on the same file, since the file position is not + predictable in that case. + + :returns: number of bytes written + + :raises: | :exc:`ValueError` if the file has been closed + | :exc:`UnicodeEncodeError` if the data can't be + encoded using the requested encoding + | :exc:`SFTPError` if the server returns an error + + """ + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + if offset is None: + # Offset is ignored when appending, so fill in an offset of 0 + # if we don't have a current file position + offset = self._offset or 0 + + if self._encoding: + data_bytes = cast(str, data).encode(self._encoding, self._errors) + else: + data_bytes = cast(bytes, data) + + datalen = len(data_bytes) + + if self._block_size and datalen > self._block_size: + await _SFTPFileWriter( + self._block_size, self._max_requests, self._handler, + self._handle, offset, data_bytes).run() + else: + await self._handler.write(self._handle, offset, data_bytes) + + self._offset = None if self._appending else offset + datalen + return datalen + + async def seek(self, offset: int, from_what: int = SEEK_SET) -> int: + """Seek to a new position in the remote file + + This method changes the position in the remote file. The + `offset` passed in is treated as relative to the beginning + of the file if `from_what` is set to `SEEK_SET` (the + default), relative to the current file position if it is + set to `SEEK_CUR`, or relative to the end of the file + if it is set to `SEEK_END`. + + :param offset: + The amount to seek + :param from_what: (optional) + The reference point to use + :type offset: `int` + :type from_what: `SEEK_SET`, `SEEK_CUR`, or `SEEK_END` + + :returns: The new byte offset from the beginning of the file + + """ + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + if from_what == SEEK_SET: + self._offset = offset + elif from_what == SEEK_CUR: + if self._offset is None: + self._offset = (await self._end()) + offset + else: + self._offset += offset + elif from_what == SEEK_END: + self._offset = (await self._end()) + offset + else: + raise ValueError('Invalid reference point') + + return self._offset + + async def tell(self) -> int: + """Return the current position in the remote file + + This method returns the current position in the remote file. + + :returns: The current byte offset from the beginning of the file + + """ + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + if self._offset is None: + self._offset = await self._end() + + return self._offset + + async def stat(self, flags = FILEXFER_ATTR_DEFINED_V4) -> SFTPAttrs: + """Return file attributes of the remote file + + This method queries file attributes of the currently open file. + + :param flags: (optional) + Flags indicating attributes of interest (SFTPv4 or later) + :type flags: `int` + + :returns: An :class:`SFTPAttrs` containing the file attributes + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + return await self._handler.fstat(self._handle, flags) + + async def setstat(self, attrs: SFTPAttrs) -> None: + """Set attributes of the remote file + + This method sets file attributes of the currently open file. + + :param attrs: + File attributes to set on the file + :type attrs: :class:`SFTPAttrs` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + await self._handler.fsetstat(self._handle, attrs) + + async def statvfs(self) -> SFTPVFSAttrs: + """Return file system attributes of the remote file + + This method queries attributes of the file system containing + the currently open file. + + :returns: An :class:`SFTPVFSAttrs` containing the file system + attributes + + :raises: :exc:`SFTPError` if the server doesn't support this + extension or returns an error + + """ + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + return await self._handler.fstatvfs(self._handle) + + async def truncate(self, size: Optional[int] = None) -> None: + """Truncate the remote file to the specified size + + This method changes the remote file's size to the specified + value. If a size is not provided, the current file position + is used. + + :param size: (optional) + The desired size of the file, in bytes + :type size: `int` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + if size is None: + size = self._offset + + await self.setstat(SFTPAttrs(size=size)) + + @overload + async def chown(self, uid: int, gid: int) -> None: ... # pragma: no cover + + @overload + async def chown(self, owner: str, + group: str) -> None: ... # pragma: no cover + + async def chown(self, uid_or_owner = None, gid_or_group = None, + uid = None, gid = None, owner = None, group = None): + """Change the owner user and group of the remote file + + This method changes the user and group of the currently open file. + + :param uid: + The new user id to assign to the file + :param gid: + The new group id to assign to the file + :param owner: + The new owner to assign to the file (SFTPv4 only) + :param group: + The new group to assign to the file (SFTPv4 only) + :type uid: `int` + :type gid: `int` + :type owner: `str` + :type group: `str` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + if isinstance(uid_or_owner, int): + uid = uid_or_owner + elif isinstance(uid_or_owner, str): + owner = uid_or_owner + + if isinstance(gid_or_group, int): + gid = gid_or_group + elif isinstance(gid_or_group, str): + group = gid_or_group + + await self.setstat(SFTPAttrs(uid=uid, gid=gid, + owner=owner, group=group)) + + async def chmod(self, mode: int) -> None: + """Change the file permissions of the remote file + + This method changes the permissions of the currently + open file. + + :param mode: + The new file permissions, expressed as an int + :type mode: `int` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + await self.setstat(SFTPAttrs(permissions=mode)) + + async def utime(self, times: Optional[Tuple[float, float]] = None, + ns: Optional[Tuple[int, int]] = None) -> None: + """Change the access and modify times of the remote file + + This method changes the access and modify times of the + currently open file. If `times` is not provided, + the times will be changed to the current time. + + :param times: (optional) + The new access and modify times, as seconds relative to + the UNIX epoch + :param ns: (optional) + The new access and modify times, as nanoseconds relative to + the UNIX epoch + :type times: tuple of two `int` or `float` values + :type ns: tuple of two `int` values + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + await self.setstat(_utime_to_attrs(times, ns)) + + async def lock(self, offset: int, length: int, flags: int) -> None: + """Acquire a byte range lock on the remote file""" + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + await self._handler.lock(self._handle, offset, length, flags) + + async def unlock(self, offset: int, length: int) -> None: + """Release a byte range lock on the remote file""" + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + await self._handler.unlock(self._handle, offset, length) + + async def fsync(self) -> None: + """Force the remote file data to be written to disk""" + + if self._handle is None: + raise ValueError('I/O operation on closed file') + + await self._handler.fsync(self._handle) + + async def close(self) -> None: + """Close the remote file""" + + if self._handle: + await self._handler.close(self._handle) + self._handle = None + + +class SFTPClient: + """SFTP client + + This class represents the client side of an SFTP session. It is + started by calling the :meth:`start_sftp_client() + ` method on the + :class:`SSHClientConnection` class. + + """ + + def __init__(self, handler: SFTPClientHandler, + path_encoding: Optional[str], path_errors: str): + self._handler = handler + self._path_encoding = path_encoding + self._path_errors = path_errors + self._cwd: Optional[bytes] = None + + async def __aenter__(self) -> 'SFTPClient': + """Allow SFTPClient to be used as an async context manager""" + + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> bool: + """Wait for client close when used as an async context manager""" + + self.exit() + await self.wait_closed() + return False + + @property + def logger(self) -> SSHLogger: + """A logger associated with this SFTP client""" + + return self._handler.logger + + @property + def version(self) -> int: + """SFTP version associated with this SFTP session""" + + return self._handler.version + + @staticmethod + def basename(path: bytes) -> bytes: + """Return the final component of a POSIX-style path""" + + return posixpath.basename(path) + + def encode(self, path: _SFTPPath) -> bytes: + """Encode path name using configured path encoding + + This method has no effect if the path is already bytes. + + """ + + if isinstance(path, PurePath): + path = str(path) + + if isinstance(path, str): + if self._path_encoding: + path = path.encode(self._path_encoding, self._path_errors) + else: + raise SFTPBadMessage('Path must be bytes when ' + 'encoding is not set') + + return path + + def decode(self, path: bytes, want_string: bool = True) -> BytesOrStr: + """Decode path name using configured path encoding + + This method has no effect if want_string is set to `False`. + + """ + + if want_string and self._path_encoding: + try: + return path.decode(self._path_encoding, self._path_errors) + except UnicodeDecodeError: + raise SFTPBadMessage('Unable to decode name') from None + + return path + + def compose_path(self, path: _SFTPPath, + parent: Optional[bytes] = None) -> bytes: + """Compose a path + + If parent is not specified, return a path relative to the + current remote working directory. + + """ + + if parent is None: + parent = self._cwd + + path = self.encode(path) + + return posixpath.join(parent, path) if parent else path + + async def _type(self, path: _SFTPPath, + statfunc: Optional[_SFTPStatFunc] = None) -> int: + """Return the file type of a remote path, or FILEXFER_TYPE_UNKNOWN + if it can't be accessed""" + + if statfunc is None: + statfunc = self.stat + + try: + return (await statfunc(path)).type + except (SFTPNoSuchFile, SFTPNoSuchPath, SFTPPermissionDenied): + return FILEXFER_TYPE_UNKNOWN + + async def _copy(self, srcfs: _SFTPFSProtocol, dstfs: _SFTPFSProtocol, + srcpath: bytes, dstpath: bytes, srcattrs: SFTPAttrs, + preserve: bool, recurse: bool, follow_symlinks: bool, + block_size: int, max_requests: int, + progress_handler: SFTPProgressHandler, + error_handler: SFTPErrorHandler) -> None: + """Copy a file, directory, or symbolic link""" + + try: + filetype = srcattrs.type + + if follow_symlinks and filetype == FILEXFER_TYPE_SYMLINK: + srcattrs = await srcfs.stat(srcpath) + filetype = srcattrs.type + + if filetype == FILEXFER_TYPE_DIRECTORY: + if not recurse: + exc = SFTPFileIsADirectory if self.version >= 6 \ + else SFTPFailure + + raise exc('%s is a directory' % + srcpath.decode('utf-8', 'backslashreplace')) + + self.logger.info(' Starting copy of directory %s to %s', + srcpath, dstpath) + + if not await dstfs.isdir(dstpath): + await dstfs.mkdir(dstpath) + + async for srcname in srcfs.scandir(srcpath): + filename = cast(bytes, srcname.filename) + + if filename in (b'.', b'..'): + continue + + srcfile = posixpath.join(srcpath, filename) + dstfile = posixpath.join(dstpath, filename) + + await self._copy(srcfs, dstfs, srcfile, dstfile, + srcname.attrs, preserve, recurse, + follow_symlinks, block_size, max_requests, + progress_handler, error_handler) + + self.logger.info(' Finished copy of directory %s to %s', + srcpath, dstpath) + + elif filetype == FILEXFER_TYPE_SYMLINK: + targetpath = await srcfs.readlink(srcpath) + + self.logger.info(' Copying symlink %s to %s', srcpath, dstpath) + self.logger.info(' Target path: %s', targetpath) + + await dstfs.symlink(targetpath, dstpath) + else: + self.logger.info(' Copying file %s to %s', srcpath, dstpath) + + await _SFTPFileCopier(block_size, max_requests, 0, + srcattrs.size or 0, srcfs, dstfs, + srcpath, dstpath, progress_handler).run() + + if preserve: + attrs = await srcfs.stat(srcpath) + + attrs = SFTPAttrs(permissions=attrs.permissions, + atime=attrs.atime, atime_ns=attrs.atime_ns, + mtime=attrs.mtime, mtime_ns=attrs.mtime_ns) + + self.logger.info(' Preserving attrs: %s', attrs) + + await dstfs.setstat(dstpath, attrs) + except (OSError, SFTPError) as exc: + setattr(exc, 'srcpath', srcpath) + setattr(exc, 'dstpath', dstpath) + + if error_handler: + error_handler(exc) + else: + raise + + async def _begin_copy(self, srcfs: _SFTPFSProtocol, dstfs: _SFTPFSProtocol, + srcpaths: Sequence[_SFTPPath], + dstpath: Optional[_SFTPPath], + copy_type: str, expand_glob: bool, preserve: bool, + recurse: bool, follow_symlinks: bool, + block_size: int, max_requests: int, + progress_handler: SFTPProgressHandler, + error_handler: SFTPErrorHandler) -> None: + """Begin a new file upload, download, or copy""" + + if isinstance(srcpaths, tuple): + srcpaths = list(srcpaths) + + self.logger.info('Starting SFTP %s of %s to %s', + copy_type, srcpaths, dstpath) + + if isinstance(srcpaths, (bytes, str, PurePath)): + srcpaths = [srcpaths] + + srcnames: List[SFTPName] = [] + + if expand_glob: + glob = SFTPGlob(srcfs, len(srcpaths) > 1) + + for srcpath in srcpaths: + srcnames.extend(await glob.match(srcfs.encode(srcpath), + error_handler, self.version)) + else: + for srcpath in srcpaths: + srcpath = srcfs.encode(srcpath) + srcattrs = await srcfs.stat(srcpath) + srcnames.append(SFTPName(srcpath, attrs=srcattrs)) + + if dstpath: + dstpath = dstfs.encode(dstpath) + + dstpath: Optional[bytes] + + dst_isdir = dstpath is None or (await dstfs.isdir(dstpath)) + + if len(srcnames) > 1 and not dst_isdir: + assert dstpath is not None + exc = SFTPNotADirectory if self.version >= 6 else SFTPFailure + + raise exc('%s must be a directory' % + dstpath.decode('utf-8', 'backslashreplace')) + + for srcname in srcnames: + srcfile = cast(bytes, srcname.filename) + basename = srcfs.basename(srcfile) + + if dstpath is None: + dstfile = basename + elif dst_isdir: + dstfile = dstfs.compose_path(basename, parent=dstpath) + else: + dstfile = dstpath + + await self._copy(srcfs, dstfs, srcfile, dstfile, srcname.attrs, + preserve, recurse, follow_symlinks, block_size, + max_requests, progress_handler, error_handler) + + async def get(self, remotepaths: Sequence[_SFTPPath], + localpath: Optional[_SFTPPath] = None, *, + preserve: bool = False, recurse: bool = False, + follow_symlinks: bool = False, + block_size: int = SFTP_BLOCK_SIZE, + max_requests: int = _MAX_SFTP_REQUESTS, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None) -> None: + """Download remote files + + This method downloads one or more files or directories from + the remote system. Either a single remote path or a sequence + of remote paths to download can be provided. + + When downloading a single file or directory, the local path can + be either the full path to download data into or the path to an + existing directory where the data should be placed. In the + latter case, the base file name from the remote path will be + used as the local name. + + When downloading multiple files, the local path must refer to + an existing directory. + + If no local path is provided, the file is downloaded + into the current local working directory. + + If preserve is `True`, the access and modification times + and permissions of the original file are set on the + downloaded file. + + If recurse is `True` and the remote path points at a + directory, the entire subtree under that directory is + downloaded. + + If follow_symlinks is set to `True`, symbolic links found + on the remote system will have the contents of their target + downloaded rather than creating a local symbolic link. When + using this option during a recursive download, one needs to + watch out for links that result in loops. + + The block_size argument specifies the size of read and write + requests issued when downloading the files, defaulting to 16 KB. + + The max_requests argument specifies the maximum number of + parallel read or write requests issued, defaulting to 128. + + If progress_handler is specified, it will be called after + each block of a file is successfully downloaded. The arguments + passed to this handler will be the source path, destination + path, bytes downloaded so far, and total bytes in the file + being downloaded. If multiple source paths are provided or + recurse is set to `True`, the progress_handler will be + called consecutively on each file being downloaded. + + If error_handler is specified and an error occurs during + the download, this handler will be called with the exception + instead of it being raised. This is intended to primarily be + used when multiple remote paths are provided or when recurse + is set to `True`, to allow error information to be collected + without aborting the download of the remaining files. The + error handler can raise an exception if it wants the download + to completely stop. Otherwise, after an error, the download + will continue starting with the next file. + + :param remotepaths: + The paths of the remote files or directories to download + :param localpath: (optional) + The path of the local file or directory to download into + :param preserve: (optional) + Whether or not to preserve the original file attributes + :param recurse: (optional) + Whether or not to recursively copy directories + :param follow_symlinks: (optional) + Whether or not to follow symbolic links + :param block_size: (optional) + The block size to use for file reads and writes + :param max_requests: (optional) + The maximum number of parallel read or write requests + :param progress_handler: (optional) + The function to call to report download progress + :param error_handler: (optional) + The function to call when an error occurs + :type remotepaths: + :class:`PurePath `, `str`, or `bytes`, + or a sequence of these + :type localpath: + :class:`PurePath `, `str`, or `bytes` + :type preserve: `bool` + :type recurse: `bool` + :type follow_symlinks: `bool` + :type block_size: `int` + :type max_requests: `int` + :type progress_handler: `callable` + :type error_handler: `callable` + + :raises: | :exc:`OSError` if a local file I/O error occurs + | :exc:`SFTPError` if the server returns an error + + """ + + await self._begin_copy(self, local_fs, remotepaths, localpath, 'get', + False, preserve, recurse, follow_symlinks, + block_size, max_requests, progress_handler, + error_handler) + + async def put(self, localpaths: Sequence[_SFTPPath], + remotepath: Optional[_SFTPPath] = None, *, + preserve: bool = False, recurse: bool = False, + follow_symlinks: bool = False, + block_size: int = SFTP_BLOCK_SIZE, + max_requests: int = _MAX_SFTP_REQUESTS, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None) -> None: + """Upload local files + + This method uploads one or more files or directories to the + remote system. Either a single local path or a sequence of + local paths to upload can be provided. + + When uploading a single file or directory, the remote path can + be either the full path to upload data into or the path to an + existing directory where the data should be placed. In the + latter case, the base file name from the local path will be + used as the remote name. + + When uploading multiple files, the remote path must refer to + an existing directory. + + If no remote path is provided, the file is uploaded into the + current remote working directory. + + If preserve is `True`, the access and modification times + and permissions of the original file are set on the + uploaded file. + + If recurse is `True` and the local path points at a + directory, the entire subtree under that directory is + uploaded. + + If follow_symlinks is set to `True`, symbolic links found + on the local system will have the contents of their target + uploaded rather than creating a remote symbolic link. When + using this option during a recursive upload, one needs to + watch out for links that result in loops. + + The block_size argument specifies the size of read and write + requests issued when uploading the files, defaulting to 16 KB. + + The max_requests argument specifies the maximum number of + parallel read or write requests issued, defaulting to 128. + + If progress_handler is specified, it will be called after + each block of a file is successfully uploaded. The arguments + passed to this handler will be the source path, destination + path, bytes uploaded so far, and total bytes in the file + being uploaded. If multiple source paths are provided or + recurse is set to `True`, the progress_handler will be + called consecutively on each file being uploaded. + + If error_handler is specified and an error occurs during + the upload, this handler will be called with the exception + instead of it being raised. This is intended to primarily be + used when multiple local paths are provided or when recurse + is set to `True`, to allow error information to be collected + without aborting the upload of the remaining files. The + error handler can raise an exception if it wants the upload + to completely stop. Otherwise, after an error, the upload + will continue starting with the next file. + + :param localpaths: + The paths of the local files or directories to upload + :param remotepath: (optional) + The path of the remote file or directory to upload into + :param preserve: (optional) + Whether or not to preserve the original file attributes + :param recurse: (optional) + Whether or not to recursively copy directories + :param follow_symlinks: (optional) + Whether or not to follow symbolic links + :param block_size: (optional) + The block size to use for file reads and writes + :param max_requests: (optional) + The maximum number of parallel read or write requests + :param progress_handler: (optional) + The function to call to report upload progress + :param error_handler: (optional) + The function to call when an error occurs + :type localpaths: + :class:`PurePath `, `str`, or `bytes`, + or a sequence of these + :type remotepath: + :class:`PurePath `, `str`, or `bytes` + :type preserve: `bool` + :type recurse: `bool` + :type follow_symlinks: `bool` + :type block_size: `int` + :type max_requests: `int` + :type progress_handler: `callable` + :type error_handler: `callable` + + :raises: | :exc:`OSError` if a local file I/O error occurs + | :exc:`SFTPError` if the server returns an error + + """ + + await self._begin_copy(local_fs, self, localpaths, remotepath, 'put', + False, preserve, recurse, follow_symlinks, + block_size, max_requests, progress_handler, + error_handler) + + async def copy(self, srcpaths: Sequence[_SFTPPath], + dstpath: Optional[_SFTPPath] = None, *, + preserve: bool = False, recurse: bool = False, + follow_symlinks: bool = False, + block_size: int =SFTP_BLOCK_SIZE, + max_requests: int = _MAX_SFTP_REQUESTS, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None) -> None: + """Copy remote files to a new location + + This method copies one or more files or directories on the + remote system to a new location. Either a single source path + or a sequence of source paths to copy can be provided. + + When copying a single file or directory, the destination path + can be either the full path to copy data into or the path to + an existing directory where the data should be placed. In the + latter case, the base file name from the source path will be + used as the destination name. + + When copying multiple files, the destination path must refer + to an existing remote directory. + + If no destination path is provided, the file is copied into + the current remote working directory. + + If preserve is `True`, the access and modification times + and permissions of the original file are set on the + copied file. + + If recurse is `True` and the source path points at a + directory, the entire subtree under that directory is + copied. + + If follow_symlinks is set to `True`, symbolic links found + in the source will have the contents of their target copied + rather than creating a copy of the symbolic link. When + using this option during a recursive copy, one needs to + watch out for links that result in loops. + + The block_size argument specifies the size of read and write + requests issued when copying the files, defaulting to 16 KB. + + The max_requests argument specifies the maximum number of + parallel read or write requests issued, defaulting to 128. + + If progress_handler is specified, it will be called after + each block of a file is successfully copied. The arguments + passed to this handler will be the source path, destination + path, bytes copied so far, and total bytes in the file + being copied. If multiple source paths are provided or + recurse is set to `True`, the progress_handler will be + called consecutively on each file being copied. + + If error_handler is specified and an error occurs during + the copy, this handler will be called with the exception + instead of it being raised. This is intended to primarily be + used when multiple source paths are provided or when recurse + is set to `True`, to allow error information to be collected + without aborting the copy of the remaining files. The error + handler can raise an exception if it wants the copy to + completely stop. Otherwise, after an error, the copy will + continue starting with the next file. + + :param srcpaths: + The paths of the remote files or directories to copy + :param dstpath: (optional) + The path of the remote file or directory to copy into + :param preserve: (optional) + Whether or not to preserve the original file attributes + :param recurse: (optional) + Whether or not to recursively copy directories + :param follow_symlinks: (optional) + Whether or not to follow symbolic links + :param block_size: (optional) + The block size to use for file reads and writes + :param max_requests: (optional) + The maximum number of parallel read or write requests + :param progress_handler: (optional) + The function to call to report copy progress + :param error_handler: (optional) + The function to call when an error occurs + :type srcpaths: + :class:`PurePath `, `str`, or `bytes`, + or a sequence of these + :type dstpath: + :class:`PurePath `, `str`, or `bytes` + :type preserve: `bool` + :type recurse: `bool` + :type follow_symlinks: `bool` + :type block_size: `int` + :type max_requests: `int` + :type progress_handler: `callable` + :type error_handler: `callable` + + :raises: | :exc:`OSError` if a local file I/O error occurs + | :exc:`SFTPError` if the server returns an error + + """ + + await self._begin_copy(self, self, srcpaths, dstpath, 'remote copy', + False, preserve, recurse, follow_symlinks, + block_size, max_requests, progress_handler, + error_handler) + + async def mget(self, remotepaths: Sequence[_SFTPPath], + localpath: Optional[_SFTPPath] = None, *, + preserve: bool = False, recurse: bool = False, + follow_symlinks: bool = False, + block_size: int = SFTP_BLOCK_SIZE, + max_requests: int = _MAX_SFTP_REQUESTS, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None) -> None: + """Download remote files with glob pattern match + + This method downloads files and directories from the remote + system matching one or more glob patterns. + + The arguments to this method are identical to the :meth:`get` + method, except that the remote paths specified can contain + wildcard patterns. + + """ + + await self._begin_copy(self, local_fs, remotepaths, localpath, 'mget', + True, preserve, recurse, follow_symlinks, + block_size, max_requests, progress_handler, + error_handler) + + async def mput(self, localpaths: Sequence[_SFTPPath], + remotepath: Optional[_SFTPPath] = None, *, + preserve: bool = False, recurse: bool = False, + follow_symlinks: bool = False, + block_size: int = SFTP_BLOCK_SIZE, + max_requests: int = _MAX_SFTP_REQUESTS, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None) -> None: + """Upload local files with glob pattern match + + This method uploads files and directories to the remote + system matching one or more glob patterns. + + The arguments to this method are identical to the :meth:`put` + method, except that the local paths specified can contain + wildcard patterns. + + """ + + await self._begin_copy(local_fs, self, localpaths, remotepath, 'mput', + True, preserve, recurse, follow_symlinks, + block_size, max_requests, progress_handler, + error_handler) + + async def mcopy(self, srcpaths: Sequence[_SFTPPath], + dstpath: Optional[_SFTPPath] = None, *, + preserve: bool = False, recurse: bool = False, + follow_symlinks: bool = False, + block_size: int =SFTP_BLOCK_SIZE, + max_requests: int = _MAX_SFTP_REQUESTS, + progress_handler: SFTPProgressHandler = None, + error_handler: SFTPErrorHandler = None) -> None: + """Download remote files with glob pattern match + + This method copies files and directories on the remote + system matching one or more glob patterns. + + The arguments to this method are identical to the :meth:`copy` + method, except that the source paths specified can contain + wildcard patterns. + + """ + + await self._begin_copy(self, self, srcpaths, dstpath, 'remote mcopy', + True, preserve, recurse, follow_symlinks, + block_size, max_requests, progress_handler, + error_handler) + + async def glob(self, patterns: _SFTPPaths, + error_handler: SFTPErrorHandler = None) -> \ + Sequence[BytesOrStr]: + """Match remote files against glob patterns + + This method matches remote files against one or more glob + patterns. Either a single pattern or a sequence of patterns + can be provided to match against. + + Supported wildcard characters include '*', '?', and + character ranges in square brackets. In addition, '**' + can be used to trigger a recursive directory search at + that point in the pattern, and a trailing slash can be + used to request that only directories get returned. + + If error_handler is specified and an error occurs during + the match, this handler will be called with the exception + instead of it being raised. This is intended to primarily be + used when multiple patterns are provided to allow error + information to be collected without aborting the match + against the remaining patterns. The error handler can raise + an exception if it wants to completely abort the match. + Otherwise, after an error, the match will continue starting + with the next pattern. + + An error will be raised if any of the patterns completely + fail to match, and this can either stop the match against + the remaining patterns or be handled by the error_handler + just like other errors. + + :param patterns: + Glob patterns to try and match remote files against + :param error_handler: (optional) + The function to call when an error occurs + :type patterns: + :class:`PurePath `, `str`, or `bytes`, + or a sequence of these + :type error_handler: `callable` + + :raises: :exc:`SFTPError` if the server returns an error + or no match is found + + """ + + return [name.filename for name in + await self.glob_sftpname(patterns, error_handler)] + + async def glob_sftpname(self, patterns: _SFTPPaths, + error_handler: SFTPErrorHandler = None) -> \ + Sequence[SFTPName]: + """Match glob patterns and return SFTPNames + + This method is similar to :meth:`glob`, but it returns matching + file names and attributes as :class:`SFTPName` objects. + + """ + + if isinstance(patterns, (bytes, str, PurePath)): + patterns = [patterns] + + glob = SFTPGlob(self, len(patterns) > 1) + matches: List[SFTPName] = [] + + for pattern in patterns: + new_matches = await glob.match(self.encode(pattern), + error_handler, self.version) + + if isinstance(pattern, (str, PurePath)): + for name in new_matches: + name.filename = self.decode(cast(bytes, name.filename)) + + matches.extend(new_matches) + + return matches + + async def makedirs(self, path: _SFTPPath, attrs: SFTPAttrs = SFTPAttrs(), + exist_ok: bool = False) -> None: + """Create a remote directory with the specified attributes + + This method creates a remote directory at the specified path + similar to :meth:`mkdir`, but it will also create any + intermediate directories which don't yet exist. + + If the target directory already exists and exist_ok is set + to `False`, this method will raise an error. + + :param path: + The path of where the new remote directory should be created + :param attrs: (optional) + The file attributes to use when creating the directory or + any intermediate directories + :param exist_ok: (optional) + Whether or not to raise an error if thet target directory + already exists + :type path: :class:`PurePath `, `str`, or `bytes` + :type attrs: :class:`SFTPAttrs` + :type exist_ok: `bool` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + path = self.encode(path) + curpath = b'/' if posixpath.isabs(path) else (self._cwd or b'') + exists = True + parts = path.split(b'/') + last = len(parts) - 1 + + for i, part in enumerate(parts): + curpath = posixpath.join(curpath, part) + + try: + await self.mkdir(curpath, attrs) + exists = False + except (SFTPFailure, SFTPFileAlreadyExists): + filetype = await self._type(curpath) + + if filetype != FILEXFER_TYPE_DIRECTORY: + curpath_str = curpath.decode('utf-8', 'backslashreplace') + + exc = SFTPNotADirectory if self.version >= 6 \ + else SFTPFailure + + raise exc('%s is not a directory' % curpath_str) from None + except SFTPPermissionDenied: + if i == last: + raise + + if exists and not exist_ok: + exc = SFTPFileAlreadyExists if self.version >= 6 else SFTPFailure + + raise exc('%s already exists' % + curpath.decode('utf-8', 'backslashreplace')) + + async def rmtree(self, path: _SFTPPath, ignore_errors: bool = False, + onerror: _SFTPOnErrorHandler = None) -> None: + """Recursively delete a directory tree + + This method removes all the files in a directory tree. + + If ignore_errors is set, errors are ignored. Otherwise, + if onerror is set, it will be called with arguments of + the function which failed, the path it failed on, and + exception information returns by :func:`sys.exc_info()`. + + If follow_symlinks is set, files or directories pointed at by + symlinks (and their subdirectories, if any) will be removed + in addition to the links pointing at them. + + :param path: + The path of the parent directory to remove + :param ignore_errors: (optional) + Whether or not to ignore errors during the remove + :param onerror: (optional) + A function to call when errors occur + :type path: :class:`PurePath `, `str`, or `bytes` + :type ignore_errors: `bool` + :type onerror: `callable` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + async def _unlink(path: bytes) -> None: + """Internal helper for unlinking non-directories""" + + assert onerror is not None + + try: + await self.unlink(path) + except SFTPError: + onerror(self.unlink, path, sys.exc_info()) + + async def _rmtree(path: bytes) -> None: + """Internal helper for rmtree recursion""" + + assert onerror is not None + + tasks = [] + + try: + async with sem: + async for entry in self.scandir(path): + filename = cast(bytes, entry.filename) + + if filename in (b'.', b'..'): + continue + + filename = posixpath.join(path, filename) + + if entry.attrs.type == FILEXFER_TYPE_DIRECTORY: + task = _rmtree(filename) + else: + task = _unlink(filename) + + tasks.append(asyncio.ensure_future(task)) + except SFTPError: + onerror(self.scandir, path, sys.exc_info()) + + results = await asyncio.gather(*tasks, return_exceptions=True) + exc = next((result for result in results + if isinstance(result, Exception)), None) + + if exc: + raise exc + + try: + await self.rmdir(path) + except SFTPError: + onerror(self.rmdir, path, sys.exc_info()) + + # pylint: disable=function-redefined + if ignore_errors: + def onerror(*_args: object) -> None: + pass + elif onerror is None: + def onerror(*_args: object) -> None: + raise # pylint: disable=misplaced-bare-raise + # pylint: enable=function-redefined + + assert onerror is not None + + path = self.encode(path) + sem = asyncio.Semaphore(_MAX_SFTP_REQUESTS) + + try: + if await self.islink(path): + raise SFTPNoSuchFile('%s must not be a symlink' % + path.decode('utf-8', 'backslashreplace')) + except SFTPError: + onerror(self.islink, path, sys.exc_info()) + return + + await _rmtree(path) + + @async_context_manager + async def open(self, path: _SFTPPath, + pflags_or_mode: Union[int, str] = FXF_READ, + attrs: SFTPAttrs = SFTPAttrs(), + encoding: Optional[str] = 'utf-8', errors: str = 'strict', + block_size: int = SFTP_BLOCK_SIZE, + max_requests: int = _MAX_SFTP_REQUESTS) -> SFTPClientFile: + """Open a remote file + + This method opens a remote file and returns an + :class:`SFTPClientFile` object which can be used to read and + write data and get and set file attributes. + + The path can be either a `str` or `bytes` value. If it is a + str, it will be encoded using the file encoding specified + when the :class:`SFTPClient` was started. + + The following open mode flags are supported: + + ========== ====================================================== + Mode Description + ========== ====================================================== + FXF_READ Open the file for reading. + FXF_WRITE Open the file for writing. If both this and FXF_READ + are set, open the file for both reading and writing. + FXF_APPEND Force writes to append data to the end of the file + regardless of seek position. + FXF_CREAT Create the file if it doesn't exist. Without this, + attempts to open a non-existent file will fail. + FXF_TRUNC Truncate the file to zero length if it already exists. + FXF_EXCL Return an error when trying to open a file which + already exists. + ========== ====================================================== + + Instead of these flags, a Python open mode string can also be + provided. Python open modes map to the above flags as follows: + + ==== ============================================= + Mode Flags + ==== ============================================= + r FXF_READ + w FXF_WRITE | FXF_CREAT | FXF_TRUNC + a FXF_WRITE | FXF_CREAT | FXF_APPEND + x FXF_WRITE | FXF_CREAT | FXF_EXCL + + r+ FXF_READ | FXF_WRITE + w+ FXF_READ | FXF_WRITE | FXF_CREAT | FXF_TRUNC + a+ FXF_READ | FXF_WRITE | FXF_CREAT | FXF_APPEND + x+ FXF_READ | FXF_WRITE | FXF_CREAT | FXF_EXCL + ==== ============================================= + + Including a 'b' in the mode causes the `encoding` to be set + to `None`, forcing all data to be read and written as bytes + in binary format. + + Most applications should be able to use this method regardless + of the version of the SFTP protocol negotiated with the SFTP + server. A conversion from the pflags_or_mode values to the + SFTPv5/v6 flag values will happen automatically. However, if + an application wishes to set flags only available in SFTPv5/v6, + the :meth:`open56` method may be used to specify these flags + explicitly. + + The attrs argument is used to set initial attributes of the + file if it needs to be created. Otherwise, this argument is + ignored. + + The block_size argument specifies the size of parallel read and + write requests issued on the file. If set to `None`, each read + or write call will become a single request to the SFTP server. + Otherwise, read or write calls larger than this size will be + turned into parallel requests to the server of the requested + size, defaulting to 16 KB. + + .. note:: The OpenSSH SFTP server will close the connection + if it receives a message larger than 256 KB, and + limits read requests to returning no more than + 64 KB. So, when connecting to an OpenSSH SFTP + server, it is recommended that the block_size be + set below these sizes. + + The max_requests argument specifies the maximum number of + parallel read or write requests issued, defaulting to 128. + + :param path: + The name of the remote file to open + :param pflags_or_mode: (optional) + The access mode to use for the remote file (see above) + :param attrs: (optional) + File attributes to use if the file needs to be created + :param encoding: (optional) + The Unicode encoding to use for data read and written + to the remote file + :param errors: (optional) + The error-handling mode if an invalid Unicode byte + sequence is detected, defaulting to 'strict' which + raises an exception + :param block_size: (optional) + The block size to use for read and write requests + :param max_requests: (optional) + The maximum number of parallel read or write requests + :type path: :class:`PurePath `, `str`, or `bytes` + :type pflags_or_mode: `int` or `str` + :type attrs: :class:`SFTPAttrs` + :type encoding: `str` + :type errors: `str` + :type block_size: `int` or `None` + :type max_requests: `int` + + :returns: An :class:`SFTPClientFile` to use to access the file + + :raises: | :exc:`ValueError` if the mode is not valid + | :exc:`SFTPError` if the server returns an error + + """ + + if isinstance(pflags_or_mode, str): + pflags, binary = _mode_to_pflags(pflags_or_mode) + + if binary: + encoding = None + else: + pflags = pflags_or_mode + + path = self.compose_path(path) + handle = await self._handler.open(path, pflags, attrs) + + return SFTPClientFile(self._handler, handle, bool(pflags & FXF_APPEND), + encoding, errors, block_size, max_requests) + + @async_context_manager + async def open56(self, path: _SFTPPath, + desired_access: int = ACE4_READ_DATA | + ACE4_READ_ATTRIBUTES, + flags: int = FXF_OPEN_EXISTING, + attrs: SFTPAttrs = SFTPAttrs(), + encoding: Optional[str] = 'utf-8', errors: str = 'strict', + block_size: int = SFTP_BLOCK_SIZE, + max_requests: int = _MAX_SFTP_REQUESTS) -> SFTPClientFile: + """Open a remote file using SFTP v5/v6 flags + + This method is very similar to :meth:`open`, but the pflags_or_mode + argument is replaced with SFTPv5/v6 desired_access and flags + arguments. Most applications can continue to use :meth:`open` + even when talking to an SFTPv5/v6 server and the translation of + the flags will happen automatically. However, if an application + wishes to set flags only available in SFTPv5/v6, this method + provides that capability. + + The following desired_access flags can be specified: + + | ACE4_READ_DATA + | ACE4_WRITE_DATA + | ACE4_APPEND_DATA + | ACE4_READ_ATTRIBUTES + | ACE4_WRITE_ATTRIBUTES + + The following flags can be specified: + + | FXF_CREATE_NEW + | FXF_CREATE_TRUNCATE + | FXF_OPEN_EXISTING + | FXF_OPEN_OR_CREATE + | FXF_TRUNCATE_EXISTING + | FXF_APPEND_DATA + | FXF_APPEND_DATA_ATOMIC + | FXF_BLOCK_READ + | FXF_BLOCK_WRITE + | FXF_BLOCK_DELETE + | FXF_BLOCK_ADVISORY (SFTPv6) + | FXF_NOFOLLOW (SFTPv6) + | FXF_DELETE_ON_CLOSE (SFTPv6) + | FXF_ACCESS_AUDIT_ALARM_INFO (SFTPv6) + | FXF_ACCESS_BACKUP (SFTPv6) + | FXF_BACKUP_STREAM (SFTPv6) + | FXF_OVERRIDE_OWNER (SFTPv6) + + At this time, FXF_TEXT_MODE is not supported. Also, servers + may support only a subset of these flags. For example, + the AsyncSSH SFTP server doesn't currently support ACLs, + file locking, or most of the SFTPv6 open flags, but + support for some of these may be added over time. + + :param path: + The name of the remote file to open + :param desired_access: (optional) + The access mode to use for the remote file (see above) + :param flags: (optional) + The access flags to use for the remote file (see above) + :param attrs: (optional) + File attributes to use if the file needs to be created + :param encoding: (optional) + The Unicode encoding to use for data read and written + to the remote file + :param errors: (optional) + The error-handling mode if an invalid Unicode byte + sequence is detected, defaulting to 'strict' which + raises an exception + :param block_size: (optional) + The block size to use for read and write requests + :param max_requests: (optional) + The maximum number of parallel read or write requests + :type path: :class:`PurePath `, `str`, or `bytes` + :type desired_access: int + :type flags: int + :type attrs: :class:`SFTPAttrs` + :type encoding: `str` + :type errors: `str` + :type block_size: `int` or `None` + :type max_requests: `int` + + :returns: An :class:`SFTPClientFile` to use to access the file + + :raises: | :exc:`ValueError` if the mode is not valid + | :exc:`SFTPError` if the server returns an error + + """ + + path = self.compose_path(path) + handle = await self._handler.open56(path, desired_access, flags, attrs) + + return SFTPClientFile(self._handler, handle, + bool(desired_access & ACE4_APPEND_DATA or + flags & FXF_APPEND_DATA), + encoding, errors, block_size, max_requests) + + async def stat(self, path: _SFTPPath, + flags = FILEXFER_ATTR_DEFINED_V4) -> SFTPAttrs: + """Get attributes of a remote file or directory, following symlinks + + This method queries the attributes of a remote file or + directory. If the path provided is a symbolic link, the + returned attributes will correspond to the target of the + link. + + :param path: + The path of the remote file or directory to get attributes for + :param flags: (optional) + Flags indicating attributes of interest (SFTPv4 only) + :type path: :class:`PurePath `, `str`, or `bytes` + :type flags: `int` + + :returns: An :class:`SFTPAttrs` containing the file attributes + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + path = self.compose_path(path) + return await self._handler.stat(path, flags) + + async def lstat(self, path: _SFTPPath, + flags = FILEXFER_ATTR_DEFINED_V4) -> SFTPAttrs: + """Get attributes of a remote file, directory, or symlink + + This method queries the attributes of a remote file, + directory, or symlink. Unlike :meth:`stat`, this method + returns the attributes of a symlink itself rather than + the target of that link. + + :param path: + The path of the remote file, directory, or link to get + attributes for + :param flags: (optional) + Flags indicating attributes of interest (SFTPv4 only) + :type path: :class:`PurePath `, `str`, or `bytes` + :type flags: `int` + + :returns: An :class:`SFTPAttrs` containing the file attributes + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + path = self.compose_path(path) + return await self._handler.lstat(path, flags) + + async def setstat(self, path: _SFTPPath, attrs: SFTPAttrs) -> None: + """Set attributes of a remote file or directory + + This method sets attributes of a remote file or directory. + If the path provided is a symbolic link, the attributes + will be set on the target of the link. A subset of the + fields in `attrs` can be initialized and only those + attributes will be changed. + + :param path: + The path of the remote file or directory to set attributes for + :param attrs: + File attributes to set + :type path: :class:`PurePath `, `str`, or `bytes` + :type attrs: :class:`SFTPAttrs` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + path = self.compose_path(path) + await self._handler.setstat(path, attrs) + + async def statvfs(self, path: _SFTPPath) -> SFTPVFSAttrs: + """Get attributes of a remote file system + + This method queries the attributes of the file system containing + the specified path. + + :param path: + The path of the remote file system to get attributes for + :type path: :class:`PurePath `, `str`, or `bytes` + + :returns: An :class:`SFTPVFSAttrs` containing the file system + attributes + + :raises: :exc:`SFTPError` if the server doesn't support this + extension or returns an error + + """ + + path = self.compose_path(path) + return await self._handler.statvfs(path) + + async def truncate(self, path: _SFTPPath, size: int) -> None: + """Truncate a remote file to the specified size + + This method truncates a remote file to the specified size. + If the path provided is a symbolic link, the target of + the link will be truncated. + + :param path: + The path of the remote file to be truncated + :param size: + The desired size of the file, in bytes + :type path: :class:`PurePath `, `str`, or `bytes` + :type size: `int` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + await self.setstat(path, SFTPAttrs(size=size)) + + @overload + async def chown(self, path: _SFTPPath, + uid: int, gid: int) -> None: ... # pragma: no cover + + @overload + async def chown(self, path: _SFTPPath, + owner: str, group: str) -> None: ... # pragma: no cover + + async def chown(self, path, uid_or_owner = None, gid_or_group = None, + uid = None, gid = None, owner = None, group = None): + """Change the owner user and group id of a remote file or directory + + This method changes the user and group id of a remote + file or directory. If the path provided is a symbolic + link, the target of the link will be changed. + + :param path: + The path of the remote file to change + :param uid: + The new user id to assign to the file + :param gid: + The new group id to assign to the file + :param owner: + The new owner to assign to the file (SFTPv4 only) + :param group: + The new group to assign to the file (SFTPv4 only) + :type path: :class:`PurePath `, `str`, or `bytes` + :type uid: `int` + :type gid: `int` + :type owner: `str` + :type group: `str` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + if isinstance(uid_or_owner, int): + uid = uid_or_owner + elif isinstance(uid_or_owner, str): + owner = uid_or_owner + + if isinstance(gid_or_group, int): + gid = gid_or_group + elif isinstance(gid_or_group, str): + group = gid_or_group + + await self.setstat(path, SFTPAttrs(uid=uid, gid=gid, + owner=owner, group=group)) + + async def chmod(self, path: _SFTPPath, mode: int) -> None: + """Change the file permissions of a remote file or directory + + This method changes the permissions of a remote file or + directory. If the path provided is a symbolic link, the + target of the link will be changed. + + :param path: + The path of the remote file to change + :param mode: + The new file permissions, expressed as an int + :type path: :class:`PurePath `, `str`, or `bytes` + :type mode: `int` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + await self.setstat(path, SFTPAttrs(permissions=mode)) + + async def utime(self, path: _SFTPPath, + times: Optional[Tuple[float, float]] = None, + ns: Optional[Tuple[int, int]] = None) -> None: + """Change the access and modify times of a remote file or directory + + This method changes the access and modify times of a + remote file or directory. If neither `times` nor '`ns` is + provided, the times will be changed to the current time. + + If the path provided is a symbolic link, the target of the + link will be changed. + + :param path: + The path of the remote file to change + :param times: (optional) + The new access and modify times, as seconds relative to + the UNIX epoch + :param ns: (optional) + The new access and modify times, as nanoseconds relative to + the UNIX epoch + :type path: :class:`PurePath `, `str`, or `bytes` + :type times: tuple of two `int` or `float` values + :type ns: tuple of two `int` values + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + await self.setstat(path, _utime_to_attrs(times, ns)) + + async def exists(self, path: _SFTPPath) -> bool: + """Return if the remote path exists and isn't a broken symbolic link + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + return (await self._type(path)) != FILEXFER_TYPE_UNKNOWN + + async def lexists(self, path: _SFTPPath) -> bool: + """Return if the remote path exists, without following symbolic links + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + return (await self._type(path, statfunc=self.lstat)) != \ + FILEXFER_TYPE_UNKNOWN + + async def getatime(self, path: _SFTPPath) -> Optional[float]: + """Return the last access time of a remote file or directory + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + attrs = await self.stat(path) + + return _tuple_to_float_sec(attrs.atime, attrs.atime_ns) \ + if attrs.atime is not None else None + + async def getatime_ns(self, path: _SFTPPath) -> Optional[int]: + """Return the last access time of a remote file or directory + + The time returned is nanoseconds since the epoch. + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + attrs = await self.stat(path) + + return _tuple_to_nsec(attrs.atime, attrs.atime_ns) \ + if attrs.atime is not None else None + + async def getcrtime(self, path: _SFTPPath) -> Optional[float]: + """Return the creation time of a remote file or directory (SFTPv4 only) + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + attrs = await self.stat(path) + + return _tuple_to_float_sec(attrs.crtime, attrs.crtime_ns) \ + if attrs.crtime is not None else None + + async def getcrtime_ns(self, path: _SFTPPath) -> Optional[int]: + """Return the creation time of a remote file or directory + + The time returned is nanoseconds since the epoch. + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + attrs = await self.stat(path) + + return _tuple_to_nsec(attrs.crtime, attrs.crtime_ns) \ + if attrs.crtime is not None else None + + async def getmtime(self, path: _SFTPPath) -> Optional[float]: + """Return the last modification time of a remote file or directory + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + attrs = await self.stat(path) + + return _tuple_to_float_sec(attrs.mtime, attrs.mtime_ns) \ + if attrs.mtime is not None else None + + async def getmtime_ns(self, path: _SFTPPath) -> Optional[int]: + """Return the last modification time of a remote file or directory + + The time returned is nanoseconds since the epoch. + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + attrs = await self.stat(path) + + return _tuple_to_nsec(attrs.mtime, attrs.mtime_ns) \ + if attrs.mtime is not None else None + + async def getsize(self, path: _SFTPPath) -> Optional[int]: + """Return the size of a remote file or directory + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + return (await self.stat(path)).size + + async def isdir(self, path: _SFTPPath) -> bool: + """Return if the remote path refers to a directory + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + return (await self._type(path)) == FILEXFER_TYPE_DIRECTORY + + async def isfile(self, path: _SFTPPath) -> bool: + """Return if the remote path refers to a regular file + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + return (await self._type(path)) == FILEXFER_TYPE_REGULAR + + async def islink(self, path: _SFTPPath) -> bool: + """Return if the remote path refers to a symbolic link + + :param path: + The remote path to check + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + return (await self._type(path, statfunc=self.lstat)) == \ + FILEXFER_TYPE_SYMLINK + + async def remove(self, path: _SFTPPath) -> None: + """Remove a remote file + + This method removes a remote file or symbolic link. + + :param path: + The path of the remote file or link to remove + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + path = self.compose_path(path) + await self._handler.remove(path) + + async def unlink(self, path: _SFTPPath) -> None: + """Remove a remote file (see :meth:`remove`)""" + + await self.remove(path) + + async def rename(self, oldpath: _SFTPPath, newpath: _SFTPPath, + flags: int = 0) -> None: + """Rename a remote file, directory, or link + + This method renames a remote file, directory, or link. + + .. note:: By default, this version of rename will not overwrite + the new path if it already exists. However, this + can be controlled using the `flags` argument, + available in SFTPv5 and later. When a connection + is negotiated to use an earliler version of SFTP + and `flags` is set, this method will attempt to + fall back to the OpenSSH "posix-rename" extension + if it is available. That can also be invoked + directly by calling :meth:`posix_rename`. + + :param oldpath: + The path of the remote file, directory, or link to rename + :param newpath: + The new name for this file, directory, or link + :param flags: (optional) + A combination of the `FXR_OVERWRITE`, `FXR_ATOMIC`, and + `FXR_NATIVE` flags to specify what happens when `newpath` + already exists, defaulting to not allowing the overwrite + (SFTPv5 and later) + :type oldpath: + :class:`PurePath `, `str`, or `bytes` + :type newpath: + :class:`PurePath `, `str`, or `bytes` + :type flags: `int` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + oldpath = self.compose_path(oldpath) + newpath = self.compose_path(newpath) + await self._handler.rename(oldpath, newpath, flags) + + async def posix_rename(self, oldpath: _SFTPPath, + newpath: _SFTPPath) -> None: + """Rename a remote file, directory, or link with POSIX semantics + + This method renames a remote file, directory, or link, + removing the prior instance of new path if it previously + existed. + + This method may not be supported by all SFTP servers. If it + is not available but the server supports SFTPv5 or later, + this method will attempt to send the standard SFTP rename + request with the `FXR_OVERWRITE` flag set. + + :param oldpath: + The path of the remote file, directory, or link to rename + :param newpath: + The new name for this file, directory, or link + :type oldpath: + :class:`PurePath `, `str`, or `bytes` + :type newpath: + :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server doesn't support this + extension or returns an error + + """ + + oldpath = self.compose_path(oldpath) + newpath = self.compose_path(newpath) + await self._handler.posix_rename(oldpath, newpath) + + async def scandir(self, path: _SFTPPath = '.') -> AsyncIterator[SFTPName]: + """Return names and attributes of the files in a remote directory + + This method reads the contents of a directory, returning + the names and attributes of what is contained there as an + async iterator. If no path is provided, it defaults to the + current remote working directory. + + :param path: (optional) + The path of the remote directory to read + :type path: :class:`PurePath `, `str`, or `bytes` + + :returns: An async iterator of :class:`SFTPName` entries, with + path names matching the type used to pass in the path + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + dirpath = self.compose_path(path) + handle = await self._handler.opendir(dirpath) + at_end = False + + try: + while not at_end: + names, at_end = await self._handler.readdir(handle) + + for entry in names: + if isinstance(path, (str, PurePath)): + entry.filename = \ + self.decode(cast(bytes, entry.filename)) + + if entry.longname is not None: + entry.longname = \ + self.decode(cast(bytes, entry.longname)) + + yield entry + except SFTPEOFError: + pass + finally: + await self._handler.close(handle) + + async def readdir(self, path: _SFTPPath = '.') -> Sequence[SFTPName]: + """Read the contents of a remote directory + + This method reads the contents of a directory, returning + the names and attributes of what is contained there. If no + path is provided, it defaults to the current remote working + directory. + + :param path: (optional) + The path of the remote directory to read + :type path: :class:`PurePath `, `str`, or `bytes` + + :returns: A list of :class:`SFTPName` entries, with path + names matching the type used to pass in the path + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + return [entry async for entry in self.scandir(path)] + + @overload + async def listdir(self, path: bytes) -> \ + Sequence[bytes]: ... # pragma: no cover + + @overload + async def listdir(self, path: FilePath) -> \ + Sequence[str]: ... # pragma: no cover + + async def listdir(self, path: _SFTPPath = '.') -> Sequence[BytesOrStr]: + """Read the names of the files in a remote directory + + This method reads the names of files and subdirectories + in a remote directory. If no path is provided, it defaults + to the current remote working directory. + + :param path: (optional) + The path of the remote directory to read + :type path: :class:`PurePath `, `str`, or `bytes` + + :returns: A list of file/subdirectory names, as a `str` or `bytes` + matching the type used to pass in the path + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + names = await self.readdir(path) + return [name.filename for name in names] + + async def mkdir(self, path: _SFTPPath, + attrs: SFTPAttrs = SFTPAttrs()) -> None: + """Create a remote directory with the specified attributes + + This method creates a new remote directory at the + specified path with the requested attributes. + + :param path: + The path of where the new remote directory should be created + :param attrs: (optional) + The file attributes to use when creating the directory + :type path: :class:`PurePath `, `str`, or `bytes` + :type attrs: :class:`SFTPAttrs` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + path = self.compose_path(path) + await self._handler.mkdir(path, attrs) + + async def rmdir(self, path: _SFTPPath) -> None: + """Remove a remote directory + + This method removes a remote directory. The directory + must be empty for the removal to succeed. + + :param path: + The path of the remote directory to remove + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + path = self.compose_path(path) + await self._handler.rmdir(path) + + @overload + async def realpath(self, path: bytes, # pragma: no cover + *compose_paths: bytes) -> bytes: ... + + @overload + async def realpath(self, path: FilePath, # pragma: no cover + *compose_paths: FilePath) -> str: ... + + @overload + async def realpath(self, path: bytes, # pragma: no cover + *compose_paths: bytes, check: int) -> SFTPName: ... + + @overload + async def realpath(self, path: FilePath, # pragma: no cover + *compose_paths: FilePath, check: int) -> SFTPName: ... + + async def realpath(self, path: _SFTPPath, *compose_paths: _SFTPPath, + check: int = FXRP_NO_CHECK) -> \ + Union[BytesOrStr, SFTPName]: + """Return the canonical version of a remote path + + This method returns a canonical version of the requested path. + + :param path: (optional) + The path of the remote directory to canonicalize + :param compose_paths: (optional) + A list of additional paths that the server should compose + with `path` before canonicalizing it + :param check: (optional) + One of `FXRP_NO_CHECK`, `FXRP_STAT_IF_EXISTS`, and + `FXRP_STAT_ALWAYS`, specifying when to perform a + stat operation on the resulting path, defaulting to + `FXRP_NO_CHECK` + :type path: + :class:`PurePath `, `str`, or `bytes` + :type compose_paths: + :class:`PurePath `, `str`, or `bytes` + :type check: int + + :returns: The canonical path as a `str` or `bytes`, matching + the type used to pass in the path if `check` is set + to `FXRP_NO_CHECK`, or an :class:`SFTPName` + containing the canonical path name and attributes + otherwise + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + if compose_paths and isinstance(compose_paths[-1], int): + check = compose_paths[-1] + compose_paths = compose_paths[:-1] + + path_bytes = self.compose_path(path) + + if self.version >= 6: + names, _ = await self._handler.realpath( + path_bytes, *map(self.encode, compose_paths), check=check) + else: + for cpath in compose_paths: + path_bytes = self.compose_path(cpath, path_bytes) + + names, _ = await self._handler.realpath(path_bytes) + + if len(names) > 1: + raise SFTPBadMessage('Too many names returned') + + if check != FXRP_NO_CHECK: + if self.version < 6: + try: + names[0].attrs = await self._handler.stat( + self.encode(names[0].filename), + _valid_attr_flags[self.version]) + except SFTPError: + if check == FXRP_STAT_IF_EXISTS: + names[0].attrs = SFTPAttrs(type=FILEXFER_TYPE_UNKNOWN) + else: + raise + + return names[0] + else: + return self.decode(cast(bytes, names[0].filename), + isinstance(path, (str, PurePath))) + + async def getcwd(self) -> BytesOrStr: + """Return the current remote working directory + + :returns: The current remote working directory, decoded using + the specified path encoding + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + if self._cwd is None: + self._cwd = await self.realpath(b'.') + + return self.decode(self._cwd) + + async def chdir(self, path: _SFTPPath) -> None: + """Change the current remote working directory + + :param path: + The path to set as the new remote working directory + :type path: :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + self._cwd = await self.realpath(self.encode(path)) + + @overload + async def readlink(self, path: bytes) -> bytes: ... # pragma: no cover + + @overload + async def readlink(self, path: FilePath) -> str: ... # pragma: no cover + + async def readlink(self, path: _SFTPPath) -> BytesOrStr: + """Return the target of a remote symbolic link + + This method returns the target of a symbolic link. + + :param path: + The path of the remote symbolic link to follow + :type path: :class:`PurePath `, `str`, or `bytes` + + :returns: The target path of the link as a `str` or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + linkpath = self.compose_path(path) + names, _ = await self._handler.readlink(linkpath) + + if len(names) > 1: + raise SFTPBadMessage('Too many names returned') + + return self.decode(cast(bytes, names[0].filename), + isinstance(path, (str, PurePath))) + + async def symlink(self, oldpath: _SFTPPath, newpath: _SFTPPath) -> None: + """Create a remote symbolic link + + This method creates a symbolic link. The argument order here + matches the standard Python :meth:`os.symlink` call. The + argument order sent on the wire is automatically adapted + depending on the version information sent by the server, as + a number of servers (OpenSSH in particular) did not follow + the SFTP standard when implementing this call. + + :param oldpath: + The path the link should point to + :param newpath: + The path of where to create the remote symbolic link + :type oldpath: + :class:`PurePath `, `str`, or `bytes` + :type newpath: + :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server returns an error + + """ + + oldpath = self.compose_path(oldpath) + newpath = self.encode(newpath) + await self._handler.symlink(oldpath, newpath) + + async def link(self, oldpath: _SFTPPath, newpath: _SFTPPath) -> None: + """Create a remote hard link + + This method creates a hard link to the remote file specified + by oldpath at the location specified by newpath. + + This method may not be supported by all SFTP servers. + + :param oldpath: + The path of the remote file the hard link should point to + :param newpath: + The path of where to create the remote hard link + :type oldpath: + :class:`PurePath `, `str`, or `bytes` + :type newpath: + :class:`PurePath `, `str`, or `bytes` + + :raises: :exc:`SFTPError` if the server doesn't support this + extension or returns an error + + """ + + oldpath = self.compose_path(oldpath) + newpath = self.compose_path(newpath) + await self._handler.link(oldpath, newpath) + + def exit(self) -> None: + """Exit the SFTP client session + + This method exits the SFTP client session, closing the + corresponding channel opened on the server. + + """ + + self._handler.exit() + + async def wait_closed(self) -> None: + """Wait for this SFTP client session to close""" + + await self._handler.wait_closed() + + +class SFTPServerHandler(SFTPHandler): + """An SFTP server session handler""" + + # Supported attribute flags in setstat/fsetstat + _supported_attr_mask = FILEXFER_ATTR_SIZE | \ + FILEXFER_ATTR_PERMISSIONS | \ + FILEXFER_ATTR_ACCESSTIME | \ + FILEXFER_ATTR_MODIFYTIME | \ + FILEXFER_ATTR_OWNERGROUP | \ + FILEXFER_ATTR_SUBSECOND_TIMES + + # No attrib bits currently supported + _supported_attrib_mask = 0 + + # Supported SFTPv5/v6 open flags + _supported_open_flags = FXF_ACCESS_DISPOSITION | FXF_APPEND_DATA + + # Supported SFTPv5/v6 desired access flags + _supported_access_mask = ACE4_READ_DATA | ACE4_WRITE_DATA | \ + ACE4_APPEND_DATA | ACE4_READ_ATTRIBUTES | \ + ACE4_WRITE_ATTRIBUTES + + # Locking not currently supported + _supported_open_block_vector = _supported_block_vector = 0x0001 + + _vendor_id = String(__author__) + String('AsyncSSH') + \ + String(__version__) + UInt64(0) + + _extensions: List[Tuple[bytes, bytes]] = [ + (b'newline', os.linesep.encode('utf-8')), + (b'vendor-id', _vendor_id), + (b'posix-rename@openssh.com', b'1'), + (b'hardlink@openssh.com', b'1'), + (b'fsync@openssh.com', b'1')] + + _attrib_extensions: List[bytes] = [] + + if hasattr(os, 'statvfs'): # pragma: no branch + _extensions += [(b'statvfs@openssh.com', b'2'), + (b'fstatvfs@openssh.com', b'2')] + + def __init__(self, server: 'SFTPServer', reader: 'SSHReader[bytes]', + writer: 'SSHWriter[bytes]', sftp_version: int): + super().__init__(reader, writer) + + self._server = server + self._version = sftp_version + self._nonstandard_symlink = False + self._next_handle = 0 + self._file_handles: Dict[bytes, object] = {} + self._dir_handles: Dict[bytes, AsyncIterator[SFTPName]] = {} + + async def _cleanup(self, exc: Optional[Exception]) -> None: + """Clean up this SFTP server session""" + + if self._server: # pragma: no branch + for file_obj in list(self._file_handles.values()): + result = self._server.close(file_obj) + + if inspect.isawaitable(result): + assert result is not None + await result + + self._server.exit() + + self._file_handles = {} + self._dir_handles = {} + + self.logger.info('SFTP server exited%s', ': ' + str(exc) if exc else '') + + await super()._cleanup(exc) + + def _get_next_handle(self) -> bytes: + """Get the next available unique file handle number""" + + while True: + handle = self._next_handle.to_bytes(4, 'big') + self._next_handle = (self._next_handle + 1) & 0xffffffff + + if (handle not in self._file_handles and + handle not in self._dir_handles): + return handle + + async def _process_packet(self, pkttype: int, pktid: int, + packet: SSHPacket) -> None: + """Process incoming SFTP requests""" + + # pylint: disable=broad-except + try: + if pkttype == FXP_EXTENDED: + handler_type: Union[int, bytes] = packet.get_string() + else: + handler_type = pkttype + + handler = self._packet_handlers.get(handler_type) + if not handler: + raise SFTPOpUnsupported('Unsupported request type: %s' % + pkttype) + + return_type = self._return_types.get(handler_type, FXP_STATUS) + result = await handler(self, packet) + + if return_type == FXP_STATUS: + self.logger.debug1('Sending OK') + + response = UInt32(FX_OK) + String('') + String('') + elif return_type == FXP_HANDLE: + handle = cast(bytes, result) + + self.logger.debug1('Sending handle %s', handle.hex()) + + response = String(handle) + elif return_type == FXP_DATA: + data, at_end = cast(Tuple[bytes, bool], result) + + self.logger.debug1('Sending %s%s', + plural(len(data), 'data byte'), + ' (at end)' if at_end else '') + + end = Boolean(at_end) if at_end and self._version >= 6 else b'' + + response = String(data) + end + elif return_type == FXP_NAME: + names, at_end = cast(_SFTPNames, result) + + self.logger.debug1('Sending %s%s', plural(len(names), 'name'), + ' (at end)' if at_end else '') + + for name in names: + self.logger.debug1(' %s', name) + + end = Boolean(at_end) if at_end and self._version >= 6 else b'' + + response = (UInt32(len(names)) + + b''.join(name.encode(self._version) + for name in names) + end) + else: + attrs: _SupportsEncode + + if isinstance(result, os.stat_result): + attrs = SFTPAttrs.from_local(cast(os.stat_result, result)) + elif isinstance(result, os.statvfs_result): + attrs = SFTPVFSAttrs.from_local(cast(os.statvfs_result, + result)) + else: + attrs = cast(_SupportsEncode, result) + + self.logger.debug1('Sending %s', attrs) + response = attrs.encode(self._version) + except PacketDecodeError as exc: + return_type = FXP_STATUS + + self.logger.debug1('Sending bad message error: %s', str(exc)) + + response = (UInt32(FX_BAD_MESSAGE) + String(str(exc)) + + String(DEFAULT_LANG)) + except SFTPError as exc: + return_type = FXP_STATUS + + if exc.code == FX_EOF: + self.logger.debug1('Sending EOF') + else: + self.logger.debug1('Sending %s: %s', exc.__class__.__name__, + str(exc.reason)) + + response = exc.encode(self._version) + except NotImplementedError as exc: + assert handler is not None + + return_type = FXP_STATUS + op_name = handler.__name__[9:] + + self.logger.debug1('Sending operation not supported: %s', op_name) + + response = (UInt32(FX_OP_UNSUPPORTED) + + String('Operation not supported: %s' % op_name) + + String(DEFAULT_LANG)) + except OSError as exc: + return_type = FXP_STATUS + reason = exc.strerror or str(exc) + + if exc.errno == errno.ENOENT: + self.logger.debug1('Sending no such file: %s', reason) + code = FX_NO_SUCH_FILE + elif exc.errno == errno.EACCES: + self.logger.debug1('Sending permission denied: %s', reason) + code = FX_PERMISSION_DENIED + elif exc.errno == errno.EEXIST: + self.logger.debug1('Sending file already exists: %s', reason) + code = FX_FILE_ALREADY_EXISTS + elif exc.errno == errno.EROFS: + self.logger.debug1('Sending write protect: %s', reason) + code = FX_WRITE_PROTECT + elif exc.errno == errno.ENOSPC: + self.logger.debug1('Sending no space on ' + 'filesystem: %s', reason) + code = FX_NO_SPACE_ON_FILESYSTEM + elif exc.errno == errno.EDQUOT: + self.logger.debug1('Sending disk quota exceeded: %s', reason) + code = FX_QUOTA_EXCEEDED + elif exc.errno == errno.ENOTEMPTY: + self.logger.debug1('Sending directory not empty: %s', reason) + code = FX_DIR_NOT_EMPTY + elif exc.errno == errno.ENOTDIR: + self.logger.debug1('Sending not a directory: %s', reason) + code = FX_NOT_A_DIRECTORY + elif exc.errno in (errno.ENAMETOOLONG, errno.EILSEQ): + self.logger.debug1('Sending invalid filename: %s', reason) + code = FX_INVALID_FILENAME + elif exc.errno == errno.ELOOP: + self.logger.debug1('Sending link loop: %s', reason) + code = FX_LINK_LOOP + elif exc.errno == errno.EINVAL: + self.logger.debug1('Sending invalid parameter: %s', reason) + code = FX_INVALID_PARAMETER + elif exc.errno == errno.EISDIR: + self.logger.debug1('Sending file is a directory: %s', reason) + code = FX_FILE_IS_A_DIRECTORY + else: + self.logger.debug1('Sending failure: %s', reason) + code = FX_FAILURE + + response = SFTPError(code, reason).encode(self._version) + except Exception as exc: # pragma: no cover + return_type = FXP_STATUS + reason = 'Uncaught exception: %s' % str(exc) + + self.logger.debug1('Sending failure: %s', reason, + exc_info=sys.exc_info) + + response = (UInt32(FX_FAILURE) + String(reason) + + String(DEFAULT_LANG)) + + self.send_packet(return_type, pktid, UInt32(pktid), response) + + async def _process_open(self, packet: SSHPacket) -> bytes: + """Process an incoming SFTP open request""" + + path = packet.get_string() + + if self._version >= 5: + desired_access = packet.get_uint32() + flags = packet.get_uint32() + flagmsg = 'access=0x%04x, flags=0x%04x' % (desired_access, flags) + else: + pflags = packet.get_uint32() + flagmsg = 'pflags=0x%02x' % pflags + + attrs = SFTPAttrs.decode(packet, self._version) + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received open request for %s, %s%s', + path, flagmsg, hide_empty(attrs)) + + if self._version >= 5: + unsupported_access = desired_access & ~self._supported_access_mask + + if unsupported_access: + raise SFTPInvalidParameter('Unsupported access flags: 0x%08x' % + unsupported_access) + + unsupported_flags = flags & ~self._supported_open_flags + + if unsupported_flags: + raise SFTPInvalidParameter('Unsupported open flags: 0x%08x' % + unsupported_flags) + + result = self._server.open56(path, desired_access, flags, attrs) + else: + result = self._server.open(path, pflags, attrs) + + if inspect.isawaitable(result): + result = await cast(Awaitable[object], result) + + handle = self._get_next_handle() + self._file_handles[handle] = result + return handle + + async def _process_close(self, packet: SSHPacket) -> None: + """Process an incoming SFTP close request""" + + handle = packet.get_string() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received close for handle %s', handle.hex()) + + file_obj = self._file_handles.pop(handle, None) + if file_obj: + result = self._server.close(file_obj) + + if inspect.isawaitable(result): + assert result is not None + await result + + return + + if self._dir_handles.pop(handle, None) is not None: + return + + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_read(self, packet: SSHPacket) -> Tuple[bytes, bool]: + """Process an incoming SFTP read request""" + + handle = packet.get_string() + offset = packet.get_uint64() + length = packet.get_uint32() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received read for %s at offset %d in handle %s', + plural(length, 'byte'), offset, handle.hex()) + + file_obj = self._file_handles.get(handle) + + if file_obj: + result = self._server.read(file_obj, offset, length) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bytes], result) + + result: bytes + + if self._version >= 6: + attrs = await self._server.convert_attrs( + self._server.fstat(file_obj)) + + at_end = offset + len(result) == attrs.size + else: + at_end = False + + if result: + return cast(bytes, result), at_end + else: + raise SFTPEOFError + else: + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_write(self, packet: SSHPacket) -> int: + """Process an incoming SFTP write request""" + + handle = packet.get_string() + offset = packet.get_uint64() + data = packet.get_string() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received write for %s at offset %d in handle %s', + plural(len(data), 'byte'), offset, handle.hex()) + + file_obj = self._file_handles.get(handle) + + if file_obj: + result = self._server.write(file_obj, offset, data) + + if inspect.isawaitable(result): + result = await cast(Awaitable[int], result) + + return cast(int, result) + else: + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_lstat(self, packet: SSHPacket) -> _SFTPOSAttrs: + """Process an incoming SFTP lstat request""" + + path = packet.get_string() + + flags = packet.get_uint32()if self._version >= 4 else 0 + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received lstat for %s%s', path, + ', flags=0x%08x' % flags if flags else '') + + # Ignore flags for now, returning all available fields + + result = self._server.lstat(path) + + if inspect.isawaitable(result): + result = await cast(Awaitable[_SFTPOSAttrs], result) + + result: _SFTPOSAttrs + + return result + + async def _process_fstat(self, packet: SSHPacket) -> _SFTPOSAttrs: + """Process an incoming SFTP fstat request""" + + handle = packet.get_string() + + flags = packet.get_uint32() if self._version >= 4 else 0 + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received fstat for handle %s%s', handle.hex(), + ', flags=0x%08x' % flags if flags else '') + + file_obj = self._file_handles.get(handle) + + if file_obj: + # Ignore flags for now, returning all available fields + result = self._server.fstat(file_obj) + + if inspect.isawaitable(result): + result = await cast(Awaitable[_SFTPOSAttrs], result) + + result: _SFTPOSAttrs + + return result + else: + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_setstat(self, packet: SSHPacket) -> None: + """Process an incoming SFTP setstat request""" + + path = packet.get_string() + attrs = SFTPAttrs.decode(packet, self._version) + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received setstat for %s%s', path, hide_empty(attrs)) + + result = self._server.setstat(path, attrs) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_fsetstat(self, packet: SSHPacket) -> None: + """Process an incoming SFTP fsetstat request""" + + handle = packet.get_string() + attrs = SFTPAttrs.decode(packet, self._version) + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received fsetstat for handle %s%s', + handle.hex(), hide_empty(attrs)) + + file_obj = self._file_handles.get(handle) + + if file_obj: + result = self._server.fsetstat(file_obj, attrs) + + if inspect.isawaitable(result): + assert result is not None + await result + else: + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_opendir(self, packet: SSHPacket) -> bytes: + """Process an incoming SFTP opendir request""" + + path = packet.get_string() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received opendir for %s', path) + + handle = self._get_next_handle() + self._dir_handles[handle] = self._server.scandir(path) + return handle + + async def _process_readdir(self, packet: SSHPacket) -> _SFTPNames: + """Process an incoming SFTP readdir request""" + + handle = packet.get_string() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received readdir for handle %s', handle.hex()) + + names = self._dir_handles.get(handle) + + if names: + count = 0 + result: List[SFTPName] = [] + + async for name in names: + if not name.longname and self._version == 3: + longname_result = self._server.format_longname(name) + + if inspect.isawaitable(longname_result): + assert longname_result is not None + await longname_result + + result.append(name) + count += 1 + + if count == _MAX_READDIR_NAMES: + break + + if result: + return result, count < _MAX_READDIR_NAMES + else: + raise SFTPEOFError + else: + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_remove(self, packet: SSHPacket) -> None: + """Process an incoming SFTP remove request""" + + path = packet.get_string() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received remove for %s', path) + + result = self._server.remove(path) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_mkdir(self, packet: SSHPacket) -> None: + """Process an incoming SFTP mkdir request""" + + path = packet.get_string() + attrs = SFTPAttrs.decode(packet, self._version) + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received mkdir for %s', path) + + result = self._server.mkdir(path, attrs) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_rmdir(self, packet: SSHPacket) -> None: + """Process an incoming SFTP rmdir request""" + + path = packet.get_string() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received rmdir for %s', path) + + result = self._server.rmdir(path) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_realpath(self, packet: SSHPacket) -> _SFTPNames: + """Process an incoming SFTP realpath request""" + + path = packet.get_string() + + checkmsg = '' + compose_paths: List[bytes] = [] + + if self._version >= 6: + check = packet.get_byte() + + while packet: + compose_paths.append(packet.get_string()) + + try: + checkmsg = ', check=%s' % self._realpath_check_names[check] + except KeyError: + raise SFTPInvalidParameter('Invalid check value') from None + else: + check = FXRP_NO_CHECK + + self.logger.debug1('Received realpath for %s%s%s', path, + b', compose_path: %s' % b', '.join(compose_paths) + if compose_paths else b'', checkmsg) + + for cpath in compose_paths: + path = posixpath.join(path, cpath) + + result = self._server.realpath(path) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bytes], result) + + result: bytes + + attrs = SFTPAttrs() + + if check != FXRP_NO_CHECK: + try: + attrs = await self._server.convert_attrs( + self._server.stat(result)) + except (OSError, SFTPError): + if check == FXRP_STAT_ALWAYS: + raise + + return [SFTPName(result, attrs=attrs)], False + + async def _process_stat(self, packet: SSHPacket) -> _SFTPOSAttrs: + """Process an incoming SFTP stat request""" + + path = packet.get_string() + + flags = packet.get_uint32() if self._version >= 4 else 0 + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received stat for %s%s', path, + ', flags=0x%08x' % flags if flags else '') + + # Ignore flags for now, returning all available fields + result = self._server.stat(path) + + if inspect.isawaitable(result): + result = await cast(Awaitable[_SFTPOSAttrs], result) + + result: _SFTPOSAttrs + + return result + + async def _process_rename(self, packet: SSHPacket) -> None: + """Process an incoming SFTP rename request""" + + oldpath = packet.get_string() + newpath = packet.get_string() + + if self._version >= 5: + flags = packet.get_uint32() + flag_text = ', flags=0x%08x' % flags + else: + flags = 0 + flag_text = '' + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received rename request from %s to %s%s', + oldpath, newpath, flag_text) + + if flags: + result = self._server.posix_rename(oldpath, newpath) + else: + result = self._server.rename(oldpath, newpath) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_readlink(self, packet: SSHPacket) -> _SFTPNames: + """Process an incoming SFTP readlink request""" + + path = packet.get_string() + + if self._version < 6: + packet.check_end() + + self.logger.debug1('Received readlink for %s', path) + + result = self._server.readlink(path) + + if inspect.isawaitable(result): + result = await cast(Awaitable[bytes], result) + + result: bytes + + return [SFTPName(result)], False + + async def _process_symlink(self, packet: SSHPacket) -> None: + """Process an incoming SFTP symlink request""" + + if self._nonstandard_symlink: + oldpath = packet.get_string() + newpath = packet.get_string() + else: + newpath = packet.get_string() + oldpath = packet.get_string() + + packet.check_end() + + self.logger.debug1('Received symlink request from %s to %s', + oldpath, newpath) + + result = self._server.symlink(oldpath, newpath) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_link(self, packet: SSHPacket) -> None: + """Process an incoming SFTP hard link request""" + + newpath = packet.get_string() + oldpath = packet.get_string() + symlink = packet.get_boolean() + + if symlink: + self.logger.debug1('Received symlink request from %s to %s', + oldpath, newpath) + + result = self._server.symlink(oldpath, newpath) + else: + self.logger.debug1('Received hardlink request from %s to %s', + oldpath, newpath) + + result = self._server.link(oldpath, newpath) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_lock(self, packet: SSHPacket) -> None: + """Process an incoming SFTP byte range lock request""" + + handle = packet.get_string() + offset = packet.get_uint64() + length = packet.get_uint64() + flags = packet.get_uint32() + + self.logger.debug1('Received byte range lock request for ' + 'handle %s, offset %d, length %d, ' + 'flags 0x%04x', handle.hex(), offset, + length, flags) + + file_obj = self._file_handles.get(handle) + + if file_obj: + result = self._server.lock(file_obj, offset, length, flags) + + if inspect.isawaitable(result): # pragma: no branch + assert result is not None + await result + else: + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_unlock(self, packet: SSHPacket) -> None: + """Process an incoming SFTP byte range unlock request""" + + handle = packet.get_string() + offset = packet.get_uint64() + length = packet.get_uint64() + + self.logger.debug1('Received byte range lock request for ' + 'handle %s, offset %d, length %d', + handle.hex(), offset, length) + + file_obj = self._file_handles.get(handle) + + if file_obj: + result = self._server.unlock(file_obj, offset, length) + + if inspect.isawaitable(result): # pragma: no branch + assert result is not None + await result + else: + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_posix_rename(self, packet: SSHPacket) -> None: + """Process an incoming SFTP POSIX rename request""" + + oldpath = packet.get_string() + newpath = packet.get_string() + packet.check_end() + + self.logger.debug1('Received POSIX rename request from %s to %s', + oldpath, newpath) + + result = self._server.posix_rename(oldpath, newpath) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_statvfs(self, packet: SSHPacket) -> _SFTPOSVFSAttrs: + """Process an incoming SFTP statvfs request""" + + path = packet.get_string() + packet.check_end() + + self.logger.debug1('Received statvfs for %s', path) + + result = self._server.statvfs(path) + + if inspect.isawaitable(result): + result = await cast(Awaitable[_SFTPOSVFSAttrs], result) + + result: _SFTPOSVFSAttrs + + return result + + async def _process_fstatvfs(self, packet: SSHPacket) -> _SFTPOSVFSAttrs: + """Process an incoming SFTP fstatvfs request""" + + handle = packet.get_string() + packet.check_end() + + self.logger.debug1('Received fstatvfs for handle %s', handle.hex()) + + file_obj = self._file_handles.get(handle) + + if file_obj: + result = self._server.fstatvfs(file_obj) + + if inspect.isawaitable(result): + result = await cast(Awaitable[_SFTPOSVFSAttrs], result) + + result: _SFTPOSVFSAttrs + + return result + else: + raise SFTPInvalidHandle('Invalid file handle') + + async def _process_openssh_link(self, packet: SSHPacket) -> None: + """Process an incoming SFTP hard link request""" + + oldpath = packet.get_string() + newpath = packet.get_string() + packet.check_end() + + self.logger.debug1('Received hardlink request from %s to %s', + oldpath, newpath) + + result = self._server.link(oldpath, newpath) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _process_fsync(self, packet: SSHPacket) -> None: + """Process an incoming SFTP fsync request""" + + handle = packet.get_string() + packet.check_end() + + self.logger.debug1('Received fsync for handle %s', handle.hex()) + + file_obj = self._file_handles.get(handle) + + if file_obj: + result = self._server.fsync(file_obj) + + if inspect.isawaitable(result): + assert result is not None + await result + else: + raise SFTPInvalidHandle('Invalid file handle') + + _packet_handlers: Dict[Union[int, bytes], _SFTPPacketHandler] = { + FXP_OPEN: _process_open, + FXP_CLOSE: _process_close, + FXP_READ: _process_read, + FXP_WRITE: _process_write, + FXP_LSTAT: _process_lstat, + FXP_FSTAT: _process_fstat, + FXP_SETSTAT: _process_setstat, + FXP_FSETSTAT: _process_fsetstat, + FXP_OPENDIR: _process_opendir, + FXP_READDIR: _process_readdir, + FXP_REMOVE: _process_remove, + FXP_MKDIR: _process_mkdir, + FXP_RMDIR: _process_rmdir, + FXP_REALPATH: _process_realpath, + FXP_STAT: _process_stat, + FXP_RENAME: _process_rename, + FXP_READLINK: _process_readlink, + FXP_SYMLINK: _process_symlink, + FXP_LINK: _process_link, + FXP_BLOCK: _process_lock, + FXP_UNBLOCK: _process_unlock, + b'posix-rename@openssh.com': _process_posix_rename, + b'statvfs@openssh.com': _process_statvfs, + b'fstatvfs@openssh.com': _process_fstatvfs, + b'hardlink@openssh.com': _process_openssh_link, + b'fsync@openssh.com': _process_fsync + } + + async def run(self) -> None: + """Run an SFTP server""" + + assert self._reader is not None + + try: + packet = await self.recv_packet() + pkttype = packet.get_byte() + self.log_received_packet(pkttype, None, packet) + + if pkttype != FXP_INIT: + await self._cleanup(SFTPBadMessage('Expected init message')) + return + + version = packet.get_uint32() + rcvd_extensions: List[Tuple[bytes, bytes]] = [] + + if version == 3: + while packet: + name = packet.get_string() + data = packet.get_string() + rcvd_extensions.append((name, data)) + else: + packet.check_end() + except PacketDecodeError as exc: + await self._cleanup(SFTPBadMessage(str(exc))) + return + except Error as exc: + await self._cleanup(exc) + return + + self.logger.debug1('Received init, version=%d%s', version, + ', extensions:' if rcvd_extensions else '') + + self._log_extensions(rcvd_extensions) + + self._version = min(version, self._version) + + extensions: List[Tuple[bytes, bytes]] = [] + + ext_names = b''.join(String(name) for (name, _) in self._extensions) + + attrib_ext_names = b''.join(String(name) for name in + self._attrib_extensions) + + if self._version == 5: + supported = UInt32(self._supported_attr_mask) + \ + UInt32(self._supported_attrib_mask) + \ + UInt32(self._supported_open_flags) + \ + UInt32(self._supported_access_mask) + \ + UInt32(_MAX_SFTP_READ_SIZE) + ext_names + \ + attrib_ext_names + + extensions.append((b'supported', supported)) + elif self._version == 6: + acl_supported = UInt32(0) # No ACL support yet + + supported2 = UInt32(self._supported_attr_mask) + \ + UInt32(self._supported_attrib_mask) + \ + UInt32(self._supported_open_flags) + \ + UInt32(self._supported_access_mask) + \ + UInt32(_MAX_SFTP_READ_SIZE) + \ + UInt16(self._supported_open_block_vector) + \ + UInt16(self._supported_block_vector) + \ + UInt32(len(self._attrib_extensions)) + \ + attrib_ext_names + \ + UInt32(len(self._extensions)) + \ + ext_names + + extensions.append((b'acl-supported', acl_supported)) + extensions.append((b'supported2', supported2)) + + extensions.extend(self._extensions) + + self.logger.debug1('Sending version=%d%s', self._version, + ', extensions:' if extensions else '') + + self._log_extensions(extensions) + + sent_extensions: Iterable[bytes] = \ + (String(name) + String(data) for name, data in extensions) + + try: + self.send_packet(FXP_VERSION, None, UInt32(self._version), + *sent_extensions) + except SFTPError as exc: + await self._cleanup(exc) + return + + if self._version == 3: + # Check if the client has a buggy SYMLINK implementation + + client_version = cast(str, + self._reader.get_extra_info('client_version', '')) + + if any(name in client_version + for name in self._nonstandard_symlink_impls): + self.logger.debug1('Adjusting for non-standard symlink ' + 'implementation') + self._nonstandard_symlink = True + + await self.recv_packets() + + +class SFTPServer: + """SFTP server + + Applications should subclass this when implementing an SFTP + server. The methods listed below should be implemented to + provide the desired application behavior. + + .. note:: Any method can optionally be defined as a + coroutine if that method needs to perform + blocking operations to determine its result. + + The `chan` object provided here is the :class:`SSHServerChannel` + instance this SFTP server is associated with. It can be queried to + determine which user the client authenticated as, environment + variables set on the channel when it was opened, and key and + certificate options or permissions associated with this session. + + .. note:: In AsyncSSH 1.x, this first argument was an + :class:`SSHServerConnection`, not an + :class:`SSHServerChannel`. When moving to AsyncSSH + 2.x, subclasses of :class:`SFTPServer` which + implement an __init__ method will need to be + updated to account for this change, and pass this + through to the parent. + + If the `chroot` argument is specified when this object is + created, the default :meth:`map_path` and :meth:`reverse_map_path` + methods will enforce a virtual root directory starting in that + location, limiting access to only files within that directory + tree. This will also affect path names returned by the + :meth:`realpath` and :meth:`readlink` methods. + + """ + + # The default implementation of a number of these methods don't need self + # pylint: disable=no-self-use + + def __init__(self, chan: 'SSHServerChannel', + chroot: Optional[bytes] = None): + self._chan = chan + + self._chroot: Optional[bytes] + + if chroot: + self._chroot = _from_local_path(os.path.realpath(chroot)) + else: + self._chroot = None + + @property + def channel(self) -> 'SSHServerChannel': + """The channel associated with this SFTP server session""" + + return self._chan + + @property + def connection(self) -> 'SSHServerConnection': + """The channel associated with this SFTP server session""" + + return cast('SSHServerConnection', self._chan.get_connection()) + + @property + def env(self) -> Mapping[str, str]: + """The environment associated with this SFTP server session + + This method returns the environment set by the client + when this SFTP session was opened. + + :returns: A dictionary containing the environment variables + set by the client + + """ + + return self._chan.get_environment() + + @property + def logger(self) -> SSHLogger: + """A logger associated with this SFTP server""" + + return self._chan.logger + + async def convert_attrs(self, result: MaybeAwait[_SFTPOSAttrs]) -> \ + SFTPAttrs: + """Convert stat result to SFTPAttrs""" + + if inspect.isawaitable(result): + result = await cast(Awaitable[_SFTPOSAttrs], result) + + result: _SFTPOSAttrs + + if isinstance(result, os.stat_result): + result = SFTPAttrs.from_local(result) + + result: SFTPAttrs + + return result + + async def _to_sftpname(self, parent: bytes, name: bytes) -> SFTPName: + """Construct an SFTPName for a filename in a directory""" + + path = posixpath.join(parent, name) + attrs = await self.convert_attrs(self.lstat(path)) + return SFTPName(name, attrs=attrs) + + def format_user(self, uid: Optional[int]) -> str: + """Return the user name associated with a uid + + This method returns a user name string to insert into + the `longname` field of an :class:`SFTPName` object. + + By default, it calls the Python :func:`pwd.getpwuid` + function if it is available, or returns the numeric + uid as a string if not. If there is no uid, it returns + an empty string. + + :param uid: + The uid value to look up + :type uid: `int` or `None` + + :returns: The formatted user name string + + """ + + return _lookup_user(uid) + + def format_group(self, gid: Optional[int]) -> str: + """Return the group name associated with a gid + + This method returns a group name string to insert into + the `longname` field of an :class:`SFTPName` object. + + By default, it calls the Python :func:`grp.getgrgid` + function if it is available, or returns the numeric + gid as a string if not. If there is no gid, it returns + an empty string. + + :param gid: + The gid value to look up + :type gid: `int` or `None` + + :returns: The formatted group name string + + """ + + return _lookup_group(gid) + + def format_longname(self, name: SFTPName) -> MaybeAwait[None]: + """Format the long name associated with an SFTP name + + This method fills in the `longname` field of a + :class:`SFTPName` object. By default, it generates + something similar to UNIX "ls -l" output. The `filename` + and `attrs` fields of the :class:`SFTPName` should + already be filled in before this method is called. + + :param name: + The :class:`SFTPName` instance to format the long name for + :type name: :class:`SFTPName` + + """ + + if name.attrs.permissions is not None: + mode = stat.filemode(name.attrs.permissions) + else: + mode = '' + + nlink = str(name.attrs.nlink) if name.attrs.nlink else '' + + user = self.format_user(name.attrs.uid) + group = self.format_group(name.attrs.gid) + + size = str(name.attrs.size) if name.attrs.size is not None else '' + + if name.attrs.mtime is not None: + now = time.time() + mtime = time.localtime(name.attrs.mtime) + modtime = time.strftime('%b ', mtime) + + try: + modtime += time.strftime('%e', mtime) + except ValueError: + modtime += time.strftime('%d', mtime) + + if now - 365*24*60*60/2 < name.attrs.mtime <= now: + modtime += time.strftime(' %H:%M', mtime) + else: + modtime += time.strftime(' %Y', mtime) + else: + modtime = '' + + detail = '{:10s} {:>4s} {:8s} {:8s} {:>8s} {:12s} '.format( + mode, nlink, user, group, size, modtime) + + name.longname = detail.encode('utf-8') + cast(bytes, name.filename) + + return None + + def map_path(self, path: bytes) -> bytes: + """Map the path requested by the client to a local path + + This method can be overridden to provide a custom mapping + from path names requested by the client to paths in the local + filesystem. By default, it will enforce a virtual "chroot" + if one was specified when this server was created. Otherwise, + path names are left unchanged, with relative paths being + interpreted based on the working directory of the currently + running process. + + :param path: + The path name to map + :type path: `bytes` + + :returns: bytes containing the local path name to operate on + + """ + + if self._chroot: + normpath = posixpath.normpath(posixpath.join(b'/', path)) + return posixpath.join(self._chroot, normpath[1:]) + else: + return path + + def reverse_map_path(self, path: bytes) -> bytes: + """Reverse map a local path into the path reported to the client + + This method can be overridden to provide a custom reverse + mapping for the mapping provided by :meth:`map_path`. By + default, it hides the portion of the local path associated + with the virtual "chroot" if one was specified. + + :param path: + The local path name to reverse map + :type path: `bytes` + + :returns: bytes containing the path name to report to the client + + """ + + if self._chroot: + if path == self._chroot: + return b'/' + elif path.startswith(self._chroot + b'/'): + return path[len(self._chroot):] + else: + raise SFTPNoSuchFile('File not found') + else: + return path + + def open(self, path: bytes, pflags: int, attrs: SFTPAttrs) -> \ + MaybeAwait[object]: + """Open a file to serve to a remote client + + This method returns a file object which can be used to read + and write data and get and set file attributes. + + The possible open mode flags and their meanings are: + + ========== ====================================================== + Mode Description + ========== ====================================================== + FXF_READ Open the file for reading. If neither FXF_READ nor + FXF_WRITE are set, this is the default. + FXF_WRITE Open the file for writing. If both this and FXF_READ + are set, open the file for both reading and writing. + FXF_APPEND Force writes to append data to the end of the file + regardless of seek position. + FXF_CREAT Create the file if it doesn't exist. Without this, + attempts to open a non-existent file will fail. + FXF_TRUNC Truncate the file to zero length if it already exists. + FXF_EXCL Return an error when trying to open a file which + already exists. + ========== ====================================================== + + The attrs argument is used to set initial attributes of the + file if it needs to be created. Otherwise, this argument is + ignored. + + :param path: + The name of the file to open + :param pflags: + The access mode to use for the file (see above) + :param attrs: + File attributes to use if the file needs to be created + :type path: `bytes` + :type pflags: `int` + :type attrs: :class:`SFTPAttrs` + + :returns: A file object to use to access the file + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + if pflags & FXF_EXCL: + mode = 'xb' + elif pflags & FXF_APPEND: + mode = 'ab' + elif pflags & FXF_WRITE and not pflags & FXF_READ: + mode = 'wb' + else: + mode = 'rb' + + if pflags & FXF_READ and pflags & FXF_WRITE: + mode += '+' + flags = os.O_RDWR + elif pflags & FXF_WRITE: + flags = os.O_WRONLY + else: + flags = os.O_RDONLY + + if pflags & FXF_APPEND: + flags |= os.O_APPEND + + if pflags & FXF_CREAT: + flags |= os.O_CREAT + + if pflags & FXF_TRUNC: + flags |= os.O_TRUNC + + if pflags & FXF_EXCL: + flags |= os.O_EXCL + + try: + flags |= os.O_BINARY + except AttributeError: # pragma: no cover + pass + + perms = 0o666 if attrs.permissions is None else attrs.permissions + return open(_to_local_path(self.map_path(path)), mode, buffering=0, + opener=lambda path, _: os.open(path, flags, perms)) + + def open56(self, path: bytes, desired_access: int, flags: int, + attrs: SFTPAttrs) -> MaybeAwait[object]: + """Open a file to serve to a remote client (SFTPv5 and later) + + This method returns a file object which can be used to read + and write data and get and set file attributes. + + Supported desired_access bits include `ACE4_READ_DATA`, + `ACE4_WRITE_DATA`, `ACE4_APPEND_DATA`, `ACE4_READ_ATTRIBUTES`, + and `ACE4_WRITE_ATTRIBUTES`. + + Supported disposition bits in flags and their meanings are: + + ===================== ============================================ + Disposition Description + ===================== ============================================ + FXF_OPEN_EXISTING Open an existing file + FXF_OPEN_OR_CREATE Open an existing file or create a new one + FXF_CREATE_NEW Create a new file + FXF_CREATE_TRUNCATE Create a new file or truncate an existing + one + FXF_TRUNCATE_EXISTING Truncate an existing file + ===================== ============================================ + + Other supported flag bits are: + + ===================== ============================================ + Flag Description + ===================== ============================================ + FXF_APPEND_DATA Append data writes to the end of the file + ===================== ============================================ + + The attrs argument is used to set initial attributes of the + file if it needs to be created. Otherwise, this argument is + ignored. + + :param path: + The name of the file to open + :param desired_access: + The access mode to use for the file (see above) + :param flags: + The access flags to use for the file (see above) + :param attrs: + File attributes to use if the file needs to be created + :type path: `bytes` + :type desired_access: `int` + :type flags: `int` + :type attrs: :class:`SFTPAttrs` + + :returns: A file object to use to access the file + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + if desired_access & ACE4_READ_DATA and \ + desired_access & ACE4_WRITE_DATA: + open_flags = os.O_RDWR + elif desired_access & ACE4_WRITE_DATA: + open_flags = os.O_WRONLY + else: + open_flags = os.O_RDONLY + + disp = flags & FXF_ACCESS_DISPOSITION + + if disp == FXF_CREATE_NEW: + mode = 'xb' + open_flags |= os.O_CREAT | os.O_EXCL + elif disp == FXF_CREATE_TRUNCATE: + mode = 'wb' + open_flags |= os.O_CREAT | os.O_TRUNC + elif disp == FXF_OPEN_OR_CREATE: + mode = 'wb' + open_flags |= os.O_CREAT + elif disp == FXF_TRUNCATE_EXISTING: + mode = 'wb' + open_flags |= os.O_TRUNC + else: + mode = 'wb' if desired_access & ACE4_WRITE_DATA else 'rb' + + if desired_access & ACE4_APPEND_DATA or flags & FXF_APPEND_DATA: + mode = 'ab' + open_flags |= os.O_APPEND + + if desired_access & ACE4_READ_DATA and \ + desired_access & ACE4_WRITE_DATA: + mode += '+' + + try: + open_flags |= os.O_BINARY + except AttributeError: # pragma: no cover + pass + + perms = 0o666 if attrs.permissions is None else attrs.permissions + return open(_to_local_path(self.map_path(path)), mode, buffering=0, + opener=lambda path, _: os.open(path, open_flags, perms)) + + def close(self, file_obj: object) -> MaybeAwait[None]: + """Close an open file or directory + + :param file_obj: + The file or directory object to close + :type file_obj: file + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + file_obj = cast(_SFTPFileObj, file_obj) + file_obj.close() + return None + + def read(self, file_obj: object, offset: int, size: int) -> \ + MaybeAwait[bytes]: + """Read data from an open file + + :param file_obj: + The file to read from + :param offset: + The offset from the beginning of the file to begin reading + :param size: + The number of bytes to read + :type file_obj: file + :type offset: `int` + :type size: `int` + + :returns: bytes read from the file + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + file_obj = cast(_SFTPFileObj, file_obj) + file_obj.seek(offset) + return file_obj.read(size) + + def write(self, file_obj: object, offset: int, data: bytes) -> \ + MaybeAwait[int]: + """Write data to an open file + + :param file_obj: + The file to write to + :param offset: + The offset from the beginning of the file to begin writing + :param data: + The data to write to the file + :type file_obj: file + :type offset: `int` + :type data: `bytes` + + :returns: number of bytes written + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + file_obj = cast(_SFTPFileObj, file_obj) + file_obj.seek(offset) + return file_obj.write(data) + + def lstat(self, path: bytes) -> MaybeAwait[_SFTPOSAttrs]: + """Get attributes of a file, directory, or symlink + + This method queries the attributes of a file, directory, + or symlink. Unlike :meth:`stat`, this method should + return the attributes of a symlink itself rather than + the target of that link. + + :param path: + The path of the file, directory, or link to get attributes for + :type path: `bytes` + + :returns: An :class:`SFTPAttrs` or an os.stat_result containing + the file attributes + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + return os.lstat(_to_local_path(self.map_path(path))) + + def fstat(self, file_obj: object) -> MaybeAwait[_SFTPOSAttrs]: + """Get attributes of an open file + + :param file_obj: + The file to get attributes for + :type file_obj: file + + :returns: An :class:`SFTPAttrs` or an os.stat_result containing + the file attributes + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + file_obj = cast(_SFTPFileObj, file_obj) + file_obj.flush() + return os.fstat(file_obj.fileno()) + + def setstat(self, path: bytes, attrs: SFTPAttrs) -> MaybeAwait[None]: + """Set attributes of a file or directory + + This method sets attributes of a file or directory. If + the path provided is a symbolic link, the attributes + should be set on the target of the link. A subset of the + fields in `attrs` can be initialized and only those + attributes should be changed. + + :param path: + The path of the remote file or directory to set attributes for + :param attrs: + File attributes to set + :type path: `bytes` + :type attrs: :class:`SFTPAttrs` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + _setstat(_to_local_path(self.map_path(path)), attrs) + return None + + def fsetstat(self, file_obj: object, attrs: SFTPAttrs) -> MaybeAwait[None]: + """Set attributes of an open file + + :param file_obj: + The file to set attributes for + :param attrs: + File attributes to set on the file + :type file_obj: file + :type attrs: :class:`SFTPAttrs` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + file_obj = cast(_SFTPFileObj, file_obj) + file_obj.flush() + + if sys.platform == 'win32': # pragma: no cover + _setstat(file_obj.name, attrs) + else: + _setstat(file_obj.fileno(), attrs) + + return None + + async def scandir(self, path: bytes) -> AsyncIterator[SFTPName]: + """Return names and attributes of the files in a directory + + This function returns an async iterator of :class:`SFTPName` + entries corresponding to files in the requested directory. + + :param path: + The path of the directory to scan + :type path: `bytes` + + :returns: An async iterator of :class:`SFTPName` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + if hasattr(self, 'listdir'): + # Support backward compatibility with older AsyncSSH versions + # which allowed listdir() to be overridden, returning a list + # of either :class:`SFTPName` objects or plain filenames, in + # which case :meth:`lstat` is called to retrieve attribute + # information. + + # pylint: disable=no-member + listdir_result = self.listdir(path) # type: ignore + + if inspect.isawaitable(listdir_result): + listdir_result = await cast( + Awaitable[Sequence[Union[bytes, SFTPName]]], + listdir_result) + + listdir_result: Sequence[Union[bytes, SFTPName]] + + for name in listdir_result: + if isinstance(name, bytes): + yield await self._to_sftpname(path, name) + else: + yield name + else: + for name in (b'.', b'..'): + yield await self._to_sftpname(path, name) + + with os.scandir(_to_local_path(self.map_path(path))) as entries: + for entry in entries: + filename = entry.name + + if sys.platform == 'win32': # pragma: no cover + filename = os.fsencode(filename) + + attrs = SFTPAttrs.from_local( + entry.stat(follow_symlinks=False)) + + yield SFTPName(filename, attrs=attrs) + + def remove(self, path: bytes) -> MaybeAwait[None]: + """Remove a file or symbolic link + + :param path: + The path of the file or link to remove + :type path: `bytes` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + os.remove(_to_local_path(self.map_path(path))) + return None + + def mkdir(self, path: bytes, attrs: SFTPAttrs) -> MaybeAwait[None]: + """Create a directory with the specified attributes + + :param path: + The path of where the new directory should be created + :param attrs: + The file attributes to use when creating the directory + :type path: `bytes` + :type attrs: :class:`SFTPAttrs` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + mode = 0o777 if attrs.permissions is None else attrs.permissions + os.mkdir(_to_local_path(self.map_path(path)), mode) + return None + + def rmdir(self, path: bytes) -> MaybeAwait[None]: + """Remove a directory + + :param path: + The path of the directory to remove + :type path: `bytes` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + os.rmdir(_to_local_path(self.map_path(path))) + return None + + def realpath(self, path: bytes) -> MaybeAwait[bytes]: + """Return the canonical version of a path + + :param path: + The path of the directory to canonicalize + :type path: `bytes` + + :returns: bytes containing the canonical path + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + path = os.path.realpath(_to_local_path(self.map_path(path))) + return self.reverse_map_path(_from_local_path(path)) + + def stat(self, path: bytes) -> MaybeAwait[_SFTPOSAttrs]: + """Get attributes of a file or directory, following symlinks + + This method queries the attributes of a file or directory. + If the path provided is a symbolic link, the returned + attributes should correspond to the target of the link. + + :param path: + The path of the remote file or directory to get attributes for + :type path: `bytes` + + :returns: An :class:`SFTPAttrs` or an os.stat_result containing + the file attributes + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + return os.stat(_to_local_path(self.map_path(path))) + + def rename(self, oldpath: bytes, newpath: bytes) -> MaybeAwait[None]: + """Rename a file, directory, or link + + This method renames a file, directory, or link. + + .. note:: This is a request for the standard SFTP version + of rename which will not overwrite the new path + if it already exists. The :meth:`posix_rename` + method will be called if the client requests the + POSIX behavior where an existing instance of the + new path is removed before the rename. + + :param oldpath: + The path of the file, directory, or link to rename + :param newpath: + The new name for this file, directory, or link + :type oldpath: `bytes` + :type newpath: `bytes` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + oldpath = _to_local_path(self.map_path(oldpath)) + newpath = _to_local_path(self.map_path(newpath)) + + if os.path.exists(newpath): + raise SFTPFileAlreadyExists('File already exists') + + os.rename(oldpath, newpath) + return None + + def readlink(self, path: bytes) -> MaybeAwait[bytes]: + """Return the target of a symbolic link + + :param path: + The path of the symbolic link to follow + :type path: `bytes` + + :returns: bytes containing the target path of the link + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + path = os.readlink(_to_local_path(self.map_path(path))) + return self.reverse_map_path(_from_local_path(path)) + + def symlink(self, oldpath: bytes, newpath: bytes) -> MaybeAwait[None]: + """Create a symbolic link + + :param oldpath: + The path the link should point to + :param newpath: + The path of where to create the symbolic link + :type oldpath: `bytes` + :type newpath: `bytes` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + if posixpath.isabs(oldpath): + oldpath = self.map_path(oldpath) + else: + newdir = posixpath.dirname(newpath) + abspath1 = self.map_path(posixpath.join(newdir, oldpath)) + + mapped_newdir = self.map_path(newdir) + abspath2 = os.path.join(mapped_newdir, oldpath) + + # Make sure the symlink doesn't point outside the chroot + if os.path.realpath(abspath1) != os.path.realpath(abspath2): + oldpath = os.path.relpath(abspath1, start=mapped_newdir) + + newpath = self.map_path(newpath) + + os.symlink(_to_local_path(oldpath), _to_local_path(newpath)) + return None + + def link(self, oldpath: bytes, newpath: bytes) -> MaybeAwait[None]: + """Create a hard link + + :param oldpath: + The path of the file the hard link should point to + :param newpath: + The path of where to create the hard link + :type oldpath: `bytes` + :type newpath: `bytes` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + oldpath = _to_local_path(self.map_path(oldpath)) + newpath = _to_local_path(self.map_path(newpath)) + + os.link(oldpath, newpath) + return None + + def lock(self, file_obj: object, offset: int, length: int, + flags: int) -> MaybeAwait[None]: + """Acquire a byte range lock on an open file""" + + raise SFTPOpUnsupported('Byte range locks not supported') + + def unlock(self, file_obj: object, offset: int, + length: int) -> MaybeAwait[None]: + """Release a byte range lock on an open file""" + + raise SFTPOpUnsupported('Byte range locks not supported') + + def posix_rename(self, oldpath: bytes, newpath: bytes) -> MaybeAwait[None]: + """Rename a file, directory, or link with POSIX semantics + + This method renames a file, directory, or link, removing + the prior instance of new path if it previously existed. + + :param oldpath: + The path of the file, directory, or link to rename + :param newpath: + The new name for this file, directory, or link + :type oldpath: `bytes` + :type newpath: `bytes` + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + oldpath = _to_local_path(self.map_path(oldpath)) + newpath = _to_local_path(self.map_path(newpath)) + + os.replace(oldpath, newpath) + return None + + def statvfs(self, path: bytes) -> MaybeAwait[_SFTPOSVFSAttrs]: + """Get attributes of the file system containing a file + + :param path: + The path of the file system to get attributes for + :type path: `bytes` + + :returns: An :class:`SFTPVFSAttrs` or an os.statvfs_result + containing the file system attributes + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + try: + return os.statvfs(_to_local_path(self.map_path(path))) + except AttributeError: # pragma: no cover + raise SFTPOpUnsupported('statvfs not supported') from None + + def fstatvfs(self, file_obj: object) -> MaybeAwait[_SFTPOSVFSAttrs]: + """Return attributes of the file system containing an open file + + :param file_obj: + The open file to get file system attributes for + :type file_obj: file + + :returns: An :class:`SFTPVFSAttrs` or an os.statvfs_result + containing the file system attributes + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + file_obj = cast(_SFTPFileObj, file_obj) + + try: + return os.statvfs(file_obj.fileno()) + except AttributeError: # pragma: no cover + raise SFTPOpUnsupported('fstatvfs not supported') from None + + def fsync(self, file_obj: object) -> MaybeAwait[None]: + """Force file data to be written to disk + + :param file_obj: + The open file containing the data to flush to disk + :type file_obj: file + + :raises: :exc:`SFTPError` to return an error to the client + + """ + + file_obj = cast(_SFTPFileObj, file_obj) + os.fsync(file_obj.fileno()) + return None + + def exit(self) -> MaybeAwait[None]: + """Shut down this SFTP server""" + + return None + +class LocalFile: + """An async wrapper around local file I/O""" + + def __init__(self, file: _SFTPFileObj): + self._file = file + + async def __aenter__(self) -> 'LocalFile': # pragma: no cover + """Allow LocalFile to be used as an async context manager""" + + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> \ + bool: # pragma: no cover + """Wait for file close when used as an async context manager""" + + await self.close() + return False + + async def read(self, size: int, offset: int) -> bytes: + """Read data from the local file""" + + self._file.seek(offset) + return self._file.read(size) + + async def write(self, data: bytes, offset: int) -> int: + """Write data to the local file""" + + self._file.seek(offset) + return self._file.write(data) + + async def close(self) -> None: + """Close the local file""" + + self._file.close() + + +class LocalFS: + """An async wrapper around local filesystem access""" + + @staticmethod + def basename(path: bytes) -> bytes: + """Return the final component of a local file path""" + + return os.path.basename(path) + + def encode(self, path: _SFTPPath) -> bytes: + """Encode path name using filesystem native encoding + + This method has no effect if the path is already bytes. + + """ + + # pylint: disable=no-self-use + + return os.fsencode(path) + + def compose_path(self, path: bytes, + parent: Optional[bytes] = None) -> bytes: + """Compose a path + + If parent is not specified, just encode the path. + + """ + + path = self.encode(path) + + return posixpath.join(parent, path) if parent else path + + async def stat(self, path: bytes) -> 'SFTPAttrs': + """Get attributes of a local file or directory, following symlinks""" + + return SFTPAttrs.from_local(os.stat(_to_local_path(path))) + + async def setstat(self, path: bytes, attrs: 'SFTPAttrs') -> None: + """Set attributes of a local file or directory""" + + _setstat(_to_local_path(path), attrs) + + async def exists(self, path: bytes) -> bool: + """Return if the local path exists and isn't a broken symbolic link""" + + return os.path.exists(_to_local_path(path)) + + async def isdir(self, path: bytes) -> bool: + """Return if the local path refers to a directory""" + + return os.path.isdir(_to_local_path(path)) + + async def scandir(self, path: bytes) -> AsyncIterator[SFTPName]: + """Return names and attributes of the files in a local directory""" + + with os.scandir(_to_local_path(path)) as entries: + for entry in entries: + filename = entry.name + + if sys.platform == 'win32': # pragma: no cover + filename = os.fsencode(filename) + + attrs = SFTPAttrs.from_local(entry.stat(follow_symlinks=False)) + yield SFTPName(filename, attrs=attrs) + + async def mkdir(self, path: bytes) -> None: + """Create a local directory with the specified attributes""" + + os.mkdir(_to_local_path(path)) + + async def readlink(self, path: bytes) -> bytes: + """Return the target of a local symbolic link""" + + return _from_local_path(os.readlink(_to_local_path(path))) + + async def symlink(self, oldpath: bytes, newpath: bytes) -> None: + """Create a local symbolic link""" + + os.symlink(_to_local_path(oldpath), _to_local_path(newpath)) + + @async_context_manager + async def open(self, path: bytes, mode: str) -> LocalFile: + """Open a local file""" + + # pylint: disable=unused-argument + + return LocalFile(open(_to_local_path(path), mode)) + +local_fs = LocalFS() + + +class SFTPServerFile: + """A wrapper around SFTPServer used to access files it manages""" + + def __init__(self, server: SFTPServer, file_obj: object): + self._server = server + self._file_obj = file_obj + + async def __aenter__(self) -> 'SFTPServerFile': # pragma: no cover + """Allow SFTPServerFile to be used as an async context manager""" + + return self + + async def __aexit__(self, _exc_type: Optional[Type[BaseException]], + _exc_value: Optional[BaseException], + _traceback: Optional[TracebackType]) -> \ + bool: # pragma: no cover + """Wait for client close when used as an async context manager""" + + await self.close() + return False + + async def read(self, size: int, offset: int) -> bytes: + """Read bytes from the file""" + + data = self._server.read(self._file_obj, offset, size) + + if inspect.isawaitable(data): + data = await cast(Awaitable[bytes], data) + + data: bytes + + return data + + async def write(self, data: bytes, offset: int) -> int: + """Write bytes to the file""" + + size = self._server.write(self._file_obj, offset, data) + + if inspect.isawaitable(size): + size = await cast(Awaitable[int], size) + + size: int + + return size + + async def close(self) -> None: + """Close a file managed by the associated SFTPServer""" + + result = self._server.close(self._file_obj) + + if inspect.isawaitable(result): + assert result is not None + await result + + +class SFTPServerFS: + """A wrapper around SFTPServer used to access its filesystem""" + + def __init__(self, server: SFTPServer): + self._server = server + + @staticmethod + def basename(path: bytes) -> bytes: + """Return the final component of a POSIX-style path""" + + return posixpath.basename(path) + + async def stat(self, path: bytes) -> SFTPAttrs: + """Get attributes of a file or directory, following symlinks""" + + attrs = self._server.stat(path) + + if inspect.isawaitable(attrs): + attrs = await cast(Awaitable[_SFTPOSAttrs], attrs) + + attrs: _SFTPOSAttrs + + if isinstance(attrs, os.stat_result): + attrs = SFTPAttrs.from_local(attrs) + + return attrs + + async def setstat(self, path: bytes, attrs: SFTPAttrs) -> None: + """Set attributes of a file or directory""" + + result = self._server.setstat(path, attrs) + + if inspect.isawaitable(result): + assert result is not None + await result + + async def _type(self, path: bytes) -> int: + """Return the file type of a path, or 0 if it can't be accessed""" + + try: + return (await self.stat(path)).type + except OSError as exc: + if exc.errno in (errno.ENOENT, errno.EACCES): + return FILEXFER_TYPE_UNKNOWN + else: + raise + except (SFTPNoSuchFile, SFTPNoSuchPath, SFTPPermissionDenied): + return FILEXFER_TYPE_UNKNOWN + + async def exists(self, path: bytes) -> bool: + """Return if a path exists""" + + return (await self._type(path)) != FILEXFER_TYPE_UNKNOWN + + async def isdir(self, path: bytes) -> bool: + """Return if the path refers to a directory""" + + return (await self._type(path)) == FILEXFER_TYPE_DIRECTORY + + def scandir(self, path: bytes) -> AsyncIterator[SFTPName]: + """Return names and attributes of the files in a directory""" + + return self._server.scandir(path) + + async def mkdir(self, path: bytes) -> None: + """Create a directory""" + + result = self._server.mkdir(path, SFTPAttrs()) + + if inspect.isawaitable(result): + assert result is not None + await result + + @async_context_manager + async def open(self, path: bytes, mode: str) -> SFTPServerFile: + """Open a file""" + + pflags, _ = _mode_to_pflags(mode) + file_obj = self._server.open(path, pflags, SFTPAttrs()) + + if inspect.isawaitable(file_obj): + file_obj = await cast(Awaitable[object], file_obj) + + return SFTPServerFile(self._server, file_obj) + + +async def start_sftp_client(conn: 'SSHClientConnection', + loop: asyncio.AbstractEventLoop, + reader: 'SSHReader[bytes]', + writer: 'SSHWriter[bytes]', + path_encoding: Optional[str], + path_errors: str, sftp_version: int) -> SFTPClient: + """Start an SFTP client""" + + handler = SFTPClientHandler(loop, reader, writer, sftp_version) + + handler.logger.info('Starting SFTP client') + + await handler.start() + + conn.create_task(handler.recv_packets(), handler.logger) + + return SFTPClient(handler, path_encoding, path_errors) + + +def run_sftp_server(sftp_server: SFTPServer, reader: 'SSHReader[bytes]', + writer: 'SSHWriter[bytes]', + sftp_version: int) -> Awaitable[None]: + """Return a handler for an SFTP server session""" + + handler = SFTPServerHandler(sftp_server, reader, writer, sftp_version) + + handler.logger.info('Starting SFTP server') + + return handler.run() diff --git a/venv/lib/python3.12/site-packages/asyncssh/sk.py b/venv/lib/python3.12/site-packages/asyncssh/sk.py new file mode 100644 index 0000000..969bff9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/sk.py @@ -0,0 +1,277 @@ +# Copyright (c) 2019-2022 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""U2F security key handler""" + +from hashlib import sha256 +import hmac +import time +from typing import Callable, List, Mapping, NoReturn, Optional +from typing import Sequence, Tuple, TypeVar, cast + + +_PollResult = TypeVar('_PollResult') +_SKResidentKey = Tuple[int, str, bytes, bytes] + + +_CTAP1_POLL_INTERVAL = 0.1 + +_dummy_hash = 32 * b'\0' + +# Flags +SSH_SK_USER_PRESENCE_REQD = 0x01 + +# Algorithms +SSH_SK_ECDSA = -7 +SSH_SK_ED25519 = -8 + + +def _decode_public_key(alg: int, public_key: Mapping[int, object]) -> bytes: + """Decode algorithm and public value from a CTAP public key""" + + result = cast(bytes, public_key[-2]) + + if alg == SSH_SK_ED25519: + return result + else: + return b'\x04' + result + cast(bytes, public_key[-3]) + + +def _ctap1_poll(poll_interval: float, func: Callable[..., _PollResult], + *args: object) -> _PollResult: + """Poll until a CTAP1 response is received""" + + while True: + try: + return func(*args) + except ApduError as exc: + if exc.code != APDU.USE_NOT_SATISFIED: + raise + + time.sleep(poll_interval) + + +def _ctap1_enroll(dev: 'CtapHidDevice', alg: int, + application: bytes) -> Tuple[bytes, bytes]: + """Enroll a new security key using CTAP version 1""" + + ctap1 = Ctap1(dev) + + if alg != SSH_SK_ECDSA: + raise ValueError('Unsupported algorithm') + + app_hash = sha256(application).digest() + registration = _ctap1_poll(_CTAP1_POLL_INTERVAL, ctap1.register, + _dummy_hash, app_hash) + + return registration.public_key, registration.key_handle + + +def _ctap2_enroll(dev: 'CtapHidDevice', alg: int, application: bytes, + user: str, pin: Optional[str], + resident: bool) -> Tuple[bytes, bytes]: + """Enroll a new security key using CTAP version 2""" + + ctap2 = Ctap2(dev) + + application = application.decode('utf-8') + rp = {'id': application, 'name': application} + user_cred = {'id': user.encode('utf-8'), 'name': user} + key_params = [{'type': 'public-key', 'alg': alg}] + options = {'rk': resident} + + pin_protocol: Optional[PinProtocolV1] + pin_auth: Optional[bytes] + + if pin: + pin_protocol = PinProtocolV1() + pin_token = ClientPin(ctap2, pin_protocol).get_pin_token(pin) + pin_auth = hmac.new(pin_token, _dummy_hash, sha256).digest()[:16] + else: + pin_protocol = None + pin_auth = None + + pin_version = pin_protocol.VERSION if pin_protocol else None + cred = ctap2.make_credential(_dummy_hash, rp, user_cred, key_params, + options=options, pin_uv_param=pin_auth, + pin_uv_protocol=pin_version) + cdata = cred.auth_data.credential_data + + # pylint: disable=no-member + return _decode_public_key(alg, cdata.public_key), cdata.credential_id + + +def _ctap1_sign(dev: 'CtapHidDevice', message_hash: bytes, application: bytes, + key_handle: bytes) -> Tuple[int, int, bytes]: + """Sign a message with a security key using CTAP version 1""" + + ctap1 = Ctap1(dev) + + app_hash = sha256(application).digest() + + auth_response = _ctap1_poll(_CTAP1_POLL_INTERVAL, ctap1.authenticate, + message_hash, app_hash, key_handle) + + flags = auth_response[0] + counter = int.from_bytes(auth_response[1:5], 'big') + sig = auth_response[5:] + + return flags, counter, sig + + +def _ctap2_sign(dev: 'CtapHidDevice', message_hash: bytes, + application: bytes, key_handle: bytes, + touch_required: bool) -> Tuple[int, int, bytes]: + """Sign a message with a security key using CTAP version 2""" + + ctap2 = Ctap2(dev) + + application = application.decode('utf-8') + allow_creds = [{'type': 'public-key', 'id': key_handle}] + options = {'up': touch_required} + + assertion = ctap2.get_assertions(application, message_hash, allow_creds, + options=options)[0] + + auth_data = assertion.auth_data + + return auth_data.flags, auth_data.counter, assertion.signature + + +def sk_enroll(alg: int, application: bytes, user: str, + pin: Optional[str], resident: bool) -> Tuple[bytes, bytes]: + """Enroll a new security key""" + + try: + dev = next(CtapHidDevice.list_devices()) + except StopIteration: + raise ValueError('No security key found') from None + + try: + return _ctap2_enroll(dev, alg, application, user, pin, resident) + except CtapError as exc: + if exc.code == CtapError.ERR.PUAT_REQUIRED: + raise ValueError('PIN required') from None + elif exc.code == CtapError.ERR.PIN_INVALID: + raise ValueError('Invalid PIN') from None + else: + raise ValueError(str(exc)) from None + except ValueError as exc: + try: + return _ctap1_enroll(dev, alg, application) + except ApduError as exc: + raise ValueError(str(exc)) from None + finally: + dev.close() + + +def sk_sign(message_hash: bytes, application: bytes, key_handle: bytes, + flags: int) -> Tuple[int, int, bytes]: + """Sign a message with a security key""" + + touch_required = bool(flags & SSH_SK_USER_PRESENCE_REQD) + + for dev in CtapHidDevice.list_devices(): + try: + return _ctap2_sign(dev, message_hash, application, + key_handle, touch_required) + except CtapError as exc: + if exc.code != CtapError.ERR.NO_CREDENTIALS: + raise ValueError(str(exc)) from None + except ValueError: + try: + return _ctap1_sign(dev, message_hash, application, key_handle) + except ApduError as exc: + if exc.code != APDU.WRONG_DATA: + raise ValueError(str(exc)) from None + finally: + dev.close() + + raise ValueError('Security key credential not found') + + +def sk_get_resident(application: bytes, user: Optional[str], + pin: str) -> Sequence[_SKResidentKey]: + """Get keys resident on a security key""" + + app_hash = sha256(application).digest() + result: List[_SKResidentKey] = [] + + for dev in CtapHidDevice.list_devices(): + try: + ctap2 = Ctap2(dev) + + pin_protocol = PinProtocolV1() + pin_token = ClientPin(ctap2, pin_protocol).get_pin_token(pin) + cred_mgmt = CredentialManagement(ctap2, pin_protocol, pin_token) + + for cred in cred_mgmt.enumerate_creds(app_hash): + user_info = cast(Mapping[str, object], + cred[CredentialManagement.RESULT.USER]) + name = cast(str, user_info['name']) + + if user and name != user: + continue + + cred_id = cast(Mapping[str, object], + cred[CredentialManagement.RESULT.CREDENTIAL_ID]) + key_handle = cast(bytes, cred_id['id']) + + public_key = cast(Mapping[int, object], + cred[CredentialManagement.RESULT.PUBLIC_KEY]) + + alg = cast(int, public_key[3]) + public_value = _decode_public_key(alg, public_key) + + result.append((alg, name, public_value, key_handle)) + except CtapError as exc: + if exc.code == CtapError.ERR.NO_CREDENTIALS: + continue + elif exc.code == CtapError.ERR.PIN_INVALID: + raise ValueError('Invalid PIN') from None + elif exc.code == CtapError.ERR.PIN_NOT_SET: + raise ValueError('PIN not set') from None + else: + raise ValueError(str(exc)) from None + finally: + dev.close() + + return result + + +try: + from fido2.hid import CtapHidDevice + from fido2.ctap import CtapError + from fido2.ctap1 import Ctap1, APDU, ApduError + from fido2.ctap2 import Ctap2, ClientPin, PinProtocolV1 + from fido2.ctap2 import CredentialManagement + + sk_available = True +except (ImportError, OSError, AttributeError): # pragma: no cover + sk_available = False + + def _sk_not_available(*args: object, **kwargs: object) -> NoReturn: + """Report that security key support is unavailable""" + + raise ValueError('Security key support not available') + + sk_enroll = _sk_not_available + sk_sign = _sk_not_available + sk_get_resident = _sk_not_available diff --git a/venv/lib/python3.12/site-packages/asyncssh/sk_ecdsa.py b/venv/lib/python3.12/site-packages/asyncssh/sk_ecdsa.py new file mode 100644 index 0000000..107de5b --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/sk_ecdsa.py @@ -0,0 +1,213 @@ +# Copyright (c) 2019-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""U2F ECDSA public key encryption handler""" + +from hashlib import sha256 +from typing import Optional, Tuple, cast + +from .asn1 import der_encode, der_decode +from .crypto import ECDSAPublicKey +from .packet import Byte, MPInt, String, UInt32, SSHPacket +from .public_key import KeyExportError, SSHKey, SSHOpenSSHCertificateV01 +from .public_key import register_public_key_alg, register_certificate_alg +from .public_key import register_sk_alg +from .sk import SSH_SK_ECDSA, SSH_SK_USER_PRESENCE_REQD, sk_enroll, sk_sign + + +_PrivateKeyArgs = Tuple[bytes, bytes, bytes, int, bytes, bytes] +_PublicKeyArgs = Tuple[bytes, bytes, bytes] + + +class _SKECDSAKey(SSHKey): + """Handler for elliptic curve public key encryption""" + + _key: ECDSAPublicKey + + use_executor = True + + def __init__(self, curve_id: bytes, public_value: bytes, + application: bytes, flags: int = 0, + key_handle: Optional[bytes] = None, reserved: bytes = b''): + super().__init__(ECDSAPublicKey.construct(curve_id, public_value)) + + self.algorithm = b'sk-ecdsa-sha2-' + curve_id + b'@openssh.com' + self.sig_algorithms = (self.algorithm,) + self.all_sig_algorithms = set(self.sig_algorithms) + + self._application = application + self._flags = flags + self._key_handle = key_handle + self._reserved = reserved + + def __eq__(self, other: object) -> bool: + # This isn't protected access - both objects are _SKECDSAKey instances + # pylint: disable=protected-access + + return (isinstance(other, type(self)) and + self._key.curve_id == other._key.curve_id and + self._key.public_value == other._key.public_value and + self._application == other._application and + self._flags == other._flags and + self._key_handle == other._key_handle and + self._reserved == other._reserved) + + def __hash__(self) -> int: + return hash((self._key.curve_id, self._key.public_value, + self._application, self._flags, self._key_handle, + self._reserved)) + + @classmethod + def generate(cls, algorithm: bytes, *, # type: ignore + application: str = 'ssh:', user: str = 'AsyncSSH', + pin: Optional[str] = None, resident: bool = False, + touch_required: bool = True) -> '_SKECDSAKey': + """Generate a new SK ECDSA private key""" + + # pylint: disable=arguments-differ + + application = application.encode('utf-8') + flags = SSH_SK_USER_PRESENCE_REQD if touch_required else 0 + + public_value, key_handle = sk_enroll(SSH_SK_ECDSA, application, + user, pin, resident) + + # Strip prefix and suffix of algorithm to get curve_id + return cls(algorithm[14:-12], public_value, application, + flags, key_handle, b'') + + @classmethod + def make_private(cls, key_params: object) -> SSHKey: + """Construct a U2F ECDSA private key""" + + curve_id, public_value, application, flags, key_handle, reserved = \ + cast(_PrivateKeyArgs, key_params) + + return cls(curve_id, public_value, application, + flags, key_handle, reserved) + + @classmethod + def make_public(cls, key_params: object) -> SSHKey: + """Construct a U2F ECDSA public key""" + + curve_id, public_value, application = cast(_PublicKeyArgs, key_params) + + return cls(curve_id, public_value, application) + + @classmethod + def decode_ssh_private(cls, packet: SSHPacket) -> _PrivateKeyArgs: + """Decode an SSH format SK ECDSA private key""" + + curve_id = packet.get_string() + public_value = packet.get_string() + application = packet.get_string() + flags = packet.get_byte() + key_handle = packet.get_string() + reserved = packet.get_string() + + return curve_id, public_value, application, flags, key_handle, reserved + + @classmethod + def decode_ssh_public(cls, packet: SSHPacket) -> _PublicKeyArgs: + """Decode an SSH format SK ECDSA public key""" + + curve_id = packet.get_string() + public_value = packet.get_string() + application = packet.get_string() + + return curve_id, public_value, application + + def encode_ssh_private(self) -> bytes: + """Encode an SSH format SK ECDSA private key""" + + if self._key_handle is None: + raise KeyExportError('Key is not private') + + return b''.join((String(self._key.curve_id), + String(self._key.public_value), + String(self._application), Byte(self._flags), + String(self._key_handle), String(self._reserved))) + + def encode_ssh_public(self) -> bytes: + """Encode an SSH format SK ECDSA public key""" + + return b''.join((String(self._key.curve_id), + String(self._key.public_value), + String(self._application))) + + def encode_agent_cert_private(self) -> bytes: + """Encode U2F ECDSA certificate private key data for agent""" + + if self._key_handle is None: + raise KeyExportError('Key is not private') + + return b''.join((String(self._application), Byte(self._flags), + String(self._key_handle), String(self._reserved))) + + def sign_ssh(self, data: bytes, sig_algorithm: bytes) -> bytes: + """Compute an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + if self._key_handle is None: + raise ValueError('Key handle needed for signing') + + flags, counter, sig = sk_sign(sha256(data).digest(), self._application, + self._key_handle, self._flags) + + r, s = cast(Tuple[int, int], der_decode(sig)) + + return String(MPInt(r) + MPInt(s)) + Byte(flags) + UInt32(counter) + + def verify_ssh(self, data: bytes, sig_algorithm: bytes, + packet: SSHPacket) -> bool: + """Verify an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + sig = packet.get_string() + flags = packet.get_byte() + counter = packet.get_uint32() + packet.check_end() + + if self._touch_required and not flags & SSH_SK_USER_PRESENCE_REQD: + return False + + packet = SSHPacket(sig) + r = packet.get_mpint() + s = packet.get_mpint() + packet.check_end() + + sig = der_encode((r, s)) + + return self._key.verify(sha256(self._application).digest() + + Byte(flags) + UInt32(counter) + + sha256(data).digest(), sig, 'sha256') + + +_algorithm = b'sk-ecdsa-sha2-nistp256@openssh.com' +_cert_algorithm = b'sk-ecdsa-sha2-nistp256-cert-v01@openssh.com' + +register_sk_alg(SSH_SK_ECDSA, _SKECDSAKey, b'nistp256') + +register_public_key_alg(_algorithm, _SKECDSAKey, True, (_algorithm,)) + +register_certificate_alg(1, _algorithm, _cert_algorithm, + _SKECDSAKey, SSHOpenSSHCertificateV01, True) diff --git a/venv/lib/python3.12/site-packages/asyncssh/sk_eddsa.py b/venv/lib/python3.12/site-packages/asyncssh/sk_eddsa.py new file mode 100644 index 0000000..7ca8da7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/sk_eddsa.py @@ -0,0 +1,188 @@ +# Copyright (c) 2019-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""U2F EdDSA public key encryption handler""" + +from hashlib import sha256 +from typing import Optional, Tuple, cast + +from .crypto import EdDSAPublicKey, ed25519_available +from .packet import Byte, String, UInt32, SSHPacket +from .public_key import KeyExportError, SSHKey, SSHOpenSSHCertificateV01 +from .public_key import register_public_key_alg, register_certificate_alg +from .public_key import register_sk_alg +from .sk import SSH_SK_ED25519, SSH_SK_USER_PRESENCE_REQD, sk_enroll, sk_sign + + +_PrivateKeyArgs = Tuple[bytes, bytes, int, bytes, bytes] +_PublicKeyArgs = Tuple[bytes, bytes] + + +class _SKEd25519Key(SSHKey): + """Handler for U2F Ed25519 public key encryption""" + + _key: EdDSAPublicKey + + algorithm = b'sk-ssh-ed25519@openssh.com' + sig_algorithms = (algorithm,) + all_sig_algorithms = set(sig_algorithms) + use_executor = True + + def __init__(self, public_value: bytes, application: bytes, + flags: int = 0, key_handle: Optional[bytes] = None, + reserved: bytes = b''): + super().__init__(EdDSAPublicKey.construct(b'ed25519', public_value)) + + self._application = application + self._flags = flags + self._key_handle = key_handle + self._reserved = reserved + + def __eq__(self, other: object) -> bool: + # This isn't protected access - both objects are _SKEd25519Key instances + # pylint: disable=protected-access + + return (isinstance(other, type(self)) and + self._key.public_value == other._key.public_value and + self._application == other._application and + self._flags == other._flags and + self._key_handle == other._key_handle and + self._reserved == other._reserved) + + def __hash__(self) -> int: + return hash((self._key.public_value, self._application, self._flags, + self._key_handle, self._reserved)) + + @classmethod + def generate(cls, algorithm: bytes, *, # type: ignore + application: str = 'ssh:', user: str = 'AsyncSSH', + pin: Optional[str] = None, resident: bool = False, + touch_required: bool = True) -> '_SKEd25519Key': + """Generate a new U2F Ed25519 private key""" + + # pylint: disable=arguments-differ + + application = application.encode('utf-8') + flags = SSH_SK_USER_PRESENCE_REQD if touch_required else 0 + + public_value, key_handle = sk_enroll(SSH_SK_ED25519, application, + user, pin, resident) + + return cls(public_value, application, flags, key_handle, b'') + + @classmethod + def make_private(cls, key_params: object) -> SSHKey: + """Construct a U2F Ed25519 private key""" + + public_value, application, flags, key_handle, reserved = \ + cast(_PrivateKeyArgs, key_params) + + return cls(public_value, application, flags, key_handle, reserved) + + @classmethod + def make_public(cls, key_params: object) -> SSHKey: + """Construct a U2F Ed25519 public key""" + + public_value, application = cast(_PublicKeyArgs, key_params) + + return cls(public_value, application) + + @classmethod + def decode_ssh_private(cls, packet: SSHPacket) -> _PrivateKeyArgs: + """Decode an SSH format U2F Ed25519 private key""" + + public_value = packet.get_string() + application = packet.get_string() + flags = packet.get_byte() + key_handle = packet.get_string() + reserved = packet.get_string() + + return public_value, application, flags, key_handle, reserved + + @classmethod + def decode_ssh_public(cls, packet: SSHPacket) -> _PublicKeyArgs: + """Decode an SSH format U2F Ed25519 public key""" + + public_value = packet.get_string() + application = packet.get_string() + + return public_value, application + + def encode_ssh_private(self) -> bytes: + """Encode an SSH format U2F Ed25519 private key""" + + if self._key_handle is None: + raise KeyExportError('Key is not private') + + return b''.join((String(self._key.public_value), + String(self._application), Byte(self._flags), + String(self._key_handle), String(self._reserved))) + + def encode_ssh_public(self) -> bytes: + """Encode an SSH format U2F Ed25519 public key""" + + return b''.join((String(self._key.public_value), + String(self._application))) + + def encode_agent_cert_private(self) -> bytes: + """Encode U2F Ed25519 certificate private key data for agent""" + + return self.encode_ssh_private() + + def sign_ssh(self, data: bytes, sig_algorithm: bytes) -> bytes: + """Compute an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + if self._key_handle is None: + raise ValueError('Key handle needed for signing') + + flags, counter, sig = sk_sign(sha256(data).digest(), self._application, + self._key_handle, self._flags) + + return String(sig) + Byte(flags) + UInt32(counter) + + def verify_ssh(self, data: bytes, sig_algorithm: bytes, + packet: SSHPacket) -> bool: + """Verify an SSH-encoded signature of the specified data""" + + # pylint: disable=unused-argument + + sig = packet.get_string() + flags = packet.get_byte() + counter = packet.get_uint32() + packet.check_end() + + if self._touch_required and not flags & SSH_SK_USER_PRESENCE_REQD: + return False + + return self._key.verify(sha256(self._application).digest() + + Byte(flags) + UInt32(counter) + + sha256(data).digest(), sig) + + +if ed25519_available: # pragma: no branch + register_sk_alg(SSH_SK_ED25519, _SKEd25519Key) + + register_public_key_alg(b'sk-ssh-ed25519@openssh.com', _SKEd25519Key, True) + + register_certificate_alg(1, b'sk-ssh-ed25519@openssh.com', + b'sk-ssh-ed25519-cert-v01@openssh.com', + _SKEd25519Key, SSHOpenSSHCertificateV01, True) diff --git a/venv/lib/python3.12/site-packages/asyncssh/socks.py b/venv/lib/python3.12/site-packages/asyncssh/socks.py new file mode 100644 index 0000000..960cda6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/socks.py @@ -0,0 +1,247 @@ +# Copyright (c) 2018-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SOCKS forwarding support""" + +from ipaddress import ip_address +from typing import TYPE_CHECKING, Callable, Optional + +from .forward import SSHForwarderCoro, SSHLocalForwarder +from .session import DataType + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .connection import SSHConnection + + +_RecvHandler = Optional[Callable[[bytes], None]] + + +SOCKS4 = 0x04 +SOCKS5 = 0x05 + +SOCKS_CONNECT = 0x01 + +SOCKS4_OK = 0x5a +SOCKS5_OK = 0x00 + +SOCKS5_AUTH_NONE = 0x00 + +SOCKS5_ADDR_IPV4 = 0x01 +SOCKS5_ADDR_HOSTNAME = 0x03 +SOCKS5_ADDR_IPV6 = 0x04 + +SOCKS4_OK_RESPONSE = bytes((0, SOCKS4_OK, 0, 0, 0, 0, 0, 0)) +SOCKS5_OK_RESPONSE_HDR = bytes((SOCKS5, SOCKS5_OK, 0)) + +_socks5_addr_len = { SOCKS5_ADDR_IPV4: 4, SOCKS5_ADDR_IPV6: 16 } + + +class SSHSOCKSForwarder(SSHLocalForwarder): + """SOCKS dynamic port forwarding connection handler""" + + def __init__(self, conn: 'SSHConnection', coro: SSHForwarderCoro): + super().__init__(conn, coro) + + self._inpbuf = b'' + self._bytes_needed = 2 + self._recv_handler: _RecvHandler = self._recv_version + self._addrtype = 0 + self._host = '' + self._port = 0 + + def _connect(self) -> None: + """Send request to open a new tunnel connection""" + + assert self._transport is not None + + self._recv_handler = None + + orig_host, orig_port = self._transport.get_extra_info('peername')[:2] + self.forward(self._host, self._port, orig_host, orig_port) + + def _send_socks4_ok(self) -> None: + """Send SOCKS4 success response""" + + assert self._transport is not None + + self._transport.write(SOCKS4_OK_RESPONSE) + + def _send_socks5_ok(self) -> None: + """Send SOCKS5 success response""" + + assert self._transport is not None + + addrlen = _socks5_addr_len[self._addrtype] + 2 + + self._transport.write(SOCKS5_OK_RESPONSE_HDR + + bytes((self._addrtype,)) + + addrlen * b'\0') + + def _recv_version(self, data: bytes) -> None: + """Parse SOCKS version""" + + if data[0] == SOCKS4: + if data[1] == SOCKS_CONNECT: + self._bytes_needed = 6 + self._recv_handler = self._recv_socks4_addr + else: + self.close() + elif data[0] == SOCKS5: + self._bytes_needed = data[1] + self._recv_handler = self._recv_socks5_authlist + else: + self.close() + + def _recv_socks4_addr(self, data: bytes) -> None: + """Parse SOCKSv4 address and port""" + + self._port = (data[0] << 8) + data[1] + + # If address is 0.0.0.x, read a hostname later + if data[2:5] != b'\0\0\0' or data[5] == 0: + self._host = str(ip_address(data[2:])) + + self._bytes_needed = -1 + self._recv_handler = self._recv_socks4_user + + def _recv_socks4_user(self, data: bytes) -> None: + """Parse SOCKSv4 username""" + + # pylint: disable=unused-argument + + if self._host: + self._send_socks4_ok() + self._connect() + else: + self._bytes_needed = -1 + self._recv_handler = self._recv_socks4_hostname + + def _recv_socks4_hostname(self, data: bytes) -> None: + """Parse SOCKSv4 hostname""" + + try: + self._host = data.decode('utf-8') + except UnicodeDecodeError: + self.close() + return + + self._send_socks4_ok() + self._connect() + + def _recv_socks5_authlist(self, data: bytes) -> None: + """Parse SOCKSv5 list of authentication methods""" + + assert self._transport is not None + + if SOCKS5_AUTH_NONE in data: + self._transport.write(bytes((SOCKS5, SOCKS5_AUTH_NONE))) + + self._bytes_needed = 4 + self._recv_handler = self._recv_socks5_command + else: + self.close() + + def _recv_socks5_command(self, data: bytes) -> None: + """Parse SOCKSv5 command""" + + if data[0] == SOCKS5 and data[1] == SOCKS_CONNECT and data[2] == 0: + if data[3] == SOCKS5_ADDR_HOSTNAME: + self._bytes_needed = 1 + self._recv_handler = self._recv_socks5_hostlen + self._addrtype = SOCKS5_ADDR_IPV4 + else: + addrlen = _socks5_addr_len.get(data[3]) + + if addrlen: + self._bytes_needed = addrlen + self._recv_handler = self._recv_socks5_addr + self._addrtype = data[3] + else: + self.close() + else: + self.close() + + def _recv_socks5_addr(self, data: bytes) -> None: + """Parse SOCKSv5 address""" + + self._host = str(ip_address(data)) + + self._bytes_needed = 2 + self._recv_handler = self._recv_socks5_port + + def _recv_socks5_hostlen(self, data: bytes) -> None: + """Parse SOCKSv5 host length""" + + self._bytes_needed = data[0] + self._recv_handler = self._recv_socks5_host + + def _recv_socks5_host(self, data: bytes) -> None: + """Parse SOCKSv5 host""" + + try: + self._host = data.decode('utf-8') + except UnicodeDecodeError: + self.close() + return + + self._bytes_needed = 2 + self._recv_handler = self._recv_socks5_port + + def _recv_socks5_port(self, data: bytes) -> None: + """Parse SOCKSv5 port""" + + self._port = (data[0] << 8) + data[1] + self._send_socks5_ok() + self._connect() + + def data_received(self, data: bytes, datatype: DataType = None) -> None: + """Handle incoming data from the SOCKS client""" + + if self._recv_handler: + self._inpbuf += data + + while self._recv_handler: # type: ignore[truthy-function] + if self._bytes_needed < 0: + idx = self._inpbuf.find(b'\0') + if idx >= 0: + data = self._inpbuf[:idx] + self._inpbuf = self._inpbuf[idx+1:] + self._recv_handler(data) + elif len(self._inpbuf) > 255: + # SOCKSv4 user or hostname too long + self.close() + return + else: + return + else: + if len(self._inpbuf) >= self._bytes_needed: + data = self._inpbuf[:self._bytes_needed] + self._inpbuf = self._inpbuf[self._bytes_needed:] + self._recv_handler(data) + else: + return + + data = self._inpbuf + self._inpbuf = b'' + + if data: + super().data_received(data, datatype) diff --git a/venv/lib/python3.12/site-packages/asyncssh/stream.py b/venv/lib/python3.12/site-packages/asyncssh/stream.py new file mode 100644 index 0000000..c0fc266 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/stream.py @@ -0,0 +1,827 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH stream handlers""" + +import asyncio +import inspect +import re +from typing import TYPE_CHECKING, Any, AnyStr, AsyncIterator +from typing import Callable, Dict, Generic, Iterable, List +from typing import Optional, Pattern, Set, Tuple, Union, cast + +from .constants import EXTENDED_DATA_STDERR +from .logging import SSHLogger +from .misc import MaybeAwait, BreakReceived, SignalReceived +from .misc import SoftEOFReceived, TerminalSizeChanged +from .session import DataType, SSHClientSession, SSHServerSession +from .session import SSHTCPSession, SSHUNIXSession +from .sftp import SFTPServer, run_sftp_server +from .scp import run_scp_server + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .channel import SSHChannel + from .connection import SSHConnection + + +if TYPE_CHECKING: + _WaiterFuture = asyncio.Future[None] +else: + _WaiterFuture = asyncio.Future + +_RecvBuf = List[Union[AnyStr, Exception]] +_RecvBufMap = Dict[DataType, _RecvBuf[AnyStr]] +_ReadLocks = Dict[DataType, asyncio.Lock] +_ReadWaiters = Dict[DataType, Optional[_WaiterFuture]] +_DrainWaiters = Dict[DataType, Set[_WaiterFuture]] + +SSHSocketSessionFactory = Callable[['SSHReader', 'SSHWriter'], + MaybeAwait[None]] +_OptSocketSessionFactory = Optional[SSHSocketSessionFactory] + +SSHServerSessionFactory = Callable[['SSHReader', 'SSHWriter', + 'SSHWriter'], MaybeAwait[None]] +_OptServerSessionFactory = Optional[SSHServerSessionFactory] + +SFTPServerFactory = Callable[['SSHChannel[bytes]'], SFTPServer] +_OptSFTPServerFactory = Optional[SFTPServerFactory] + + +_NEWLINE = object() + + +class SSHReader(Generic[AnyStr]): + """SSH read stream handler""" + + def __init__(self, session: 'SSHStreamSession[AnyStr]', + chan: 'SSHChannel[AnyStr]', datatype: DataType = None): + self._session: 'SSHStreamSession[AnyStr]' = session + self._chan: 'SSHChannel[AnyStr]' = chan + self._datatype = datatype + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + """Allow SSHReader to be an async iterator""" + + while not self.at_eof(): + yield await self.readline() + + @property + def channel(self) -> 'SSHChannel[AnyStr]': + """The SSH channel associated with this stream""" + + return self._chan + + @property + def logger(self) -> SSHLogger: + """The SSH logger associated with this stream""" + + return self._chan.logger + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Return additional information about this stream + + This method returns extra information about the channel + associated with this stream. See :meth:`get_extra_info() + ` on :class:`SSHClientChannel` + for additional information. + + """ + + return self._chan.get_extra_info(name, default) + + def feed_data(self, data: AnyStr) -> None: + """Feed data to the associated session + + This method feeds data to the SSH session associated with + this stream, providing compatibility with the + :meth:`feed_data() ` method + on :class:`asyncio.StreamReader`. This is mostly useful + for testing. + + """ + + self._session.data_received(data, self._datatype) + + def feed_eof(self) -> None: + """Feed EOF to the associated session + + This method feeds an end-of-file indication to the SSH session + associated with this stream, providing compatibility with the + :meth:`feed_eof() ` method + on :class:`asyncio.StreamReader`. This is mostly useful + for testing. + + """ + + self._session.eof_received() + + async def read(self, n: int = -1) -> AnyStr: + """Read data from the stream + + This method is a coroutine which reads up to `n` bytes + or characters from the stream. If `n` is not provided or + set to `-1`, it reads until EOF or a signal is received. + + If EOF is received and the receive buffer is empty, an + empty `bytes` or `str` object is returned. + + If the next data in the stream is a signal, the signal is + delivered as a raised exception. + + .. note:: Unlike traditional `asyncio` stream readers, + the data will be delivered as either `bytes` or + a `str` depending on whether an encoding was + specified when the underlying channel was opened. + + """ + + return await self._session.read(n, self._datatype, exact=False) + + async def readline(self) -> AnyStr: + """Read one line from the stream + + This method is a coroutine which reads one line, ending in + `'\\n'`. + + If EOF is received before `'\\n'` is found, the partial + line is returned. If EOF is received and the receive buffer + is empty, an empty `bytes` or `str` object is returned. + + If the next data in the stream is a signal, the signal is + delivered as a raised exception. + + .. note:: In Python 3.5 and later, :class:`SSHReader` objects + can also be used as async iterators, returning input + data one line at a time. + + """ + + try: + return await self.readuntil(_NEWLINE) + except asyncio.IncompleteReadError as exc: + return cast(AnyStr, exc.partial) + + async def readuntil(self, separator: object, + max_separator_len = 0) -> AnyStr: + """Read data from the stream until `separator` is seen + + This method is a coroutine which reads from the stream until + the requested separator is seen. If a match is found, the + returned data will include the separator at the end. + + The `separator` argument can be a single `bytes` or `str` + value, a sequence of multiple `bytes` or `str` values, + or a compiled regex (`re.Pattern`) to match against, + returning data as soon as a matching separator is found + in the stream. + + When passing a regex pattern as the separator, the + `max_separator_len` argument should be set to the + maximum length of an expected separator match. This + can greatly improve performance, by minimizing how far + back into the stream must be searched for a match. + When passing literal separators to match against, the + max separator length will be set automatically. + + .. note:: For best results, a separator regex should + both begin and end with data which is as + unique as possible, and should not start or + end with optional or repeated elements. + Otherwise, you run the risk of failing to + match parts of a separator when it is split + across multiple reads. + + If EOF or a signal is received before a match occurs, an + :exc:`IncompleteReadError ` + is raised and its `partial` attribute will contain the + data in the stream prior to the EOF or signal. + + If the next data in the stream is a signal, the signal is + delivered as a raised exception. + + """ + + return await self._session.readuntil(separator, self._datatype, + max_separator_len) + + async def readexactly(self, n: int) -> AnyStr: + """Read an exact amount of data from the stream + + This method is a coroutine which reads exactly n bytes or + characters from the stream. + + If EOF or a signal is received in the stream before `n` + bytes are read, an :exc:`IncompleteReadError + ` is raised and its `partial` + attribute will contain the data before the EOF or signal. + + If the next data in the stream is a signal, the signal is + delivered as a raised exception. + + """ + + return await self._session.read(n, self._datatype, exact=True) + + def at_eof(self) -> bool: + """Return whether the stream is at EOF + + This method returns `True` when EOF has been received and + all data in the stream has been read. + + """ + + return self._session.at_eof(self._datatype) + + def get_redirect_info(self) -> Tuple['SSHStreamSession[AnyStr]', DataType]: + """Get information needed to redirect from this SSHReader""" + + return self._session, self._datatype + + +class SSHWriter(Generic[AnyStr]): + """SSH write stream handler""" + + def __init__(self, session: 'SSHStreamSession[AnyStr]', + chan: 'SSHChannel[AnyStr]', datatype: DataType = None): + self._session: 'SSHStreamSession[AnyStr]' = session + self._chan: 'SSHChannel[AnyStr]' = chan + self._datatype = datatype + + @property + def channel(self) -> 'SSHChannel[AnyStr]': + """The SSH channel associated with this stream""" + + return self._chan + + @property + def logger(self) -> SSHLogger: + """The SSH logger associated with this stream""" + + return self._chan.logger + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Return additional information about this stream + + This method returns extra information about the channel + associated with this stream. See :meth:`get_extra_info() + ` on :class:`SSHClientChannel` + for additional information. + + """ + + return self._chan.get_extra_info(name, default) + + def can_write_eof(self) -> bool: + """Return whether the stream supports :meth:`write_eof`""" + + return self._chan.can_write_eof() + + def close(self) -> None: + """Close the channel + + .. note:: After this is called, no data can be read or written + from any of the streams associated with this channel. + + """ + + return self._chan.close() + + def is_closing(self) -> bool: + """Return if the stream is closing or is closed""" + + return self._chan.is_closing() + + async def wait_closed(self) -> None: + """Wait until the stream is closed + + This should be called after :meth:`close` to wait until + the underlying connection is closed. + + """ + + await self._chan.wait_closed() + + async def drain(self) -> None: + """Wait until the write buffer on the channel is flushed + + This method is a coroutine which blocks the caller if the + stream is currently paused for writing, returning when + enough data has been sent on the channel to allow writing + to resume. This can be used to avoid buffering an excessive + amount of data in the channel's send buffer. + + """ + + await self._session.drain(self._datatype) + + def write(self, data: AnyStr) -> None: + """Write data to the stream + + This method writes bytes or characters to the stream. + + .. note:: Unlike traditional `asyncio` stream writers, + the data must be supplied as either `bytes` or + a `str` depending on whether an encoding was + specified when the underlying channel was opened. + + """ + + return self._chan.write(data, self._datatype) + + def writelines(self, list_of_data: Iterable[AnyStr]) -> None: + """Write a collection of data to the stream""" + + return self._chan.writelines(list_of_data, self._datatype) + + def write_eof(self) -> None: + """Write EOF on the channel + + This method sends an end-of-file indication on the channel, + after which no more data can be written. + + .. note:: On an :class:`SSHServerChannel` where multiple + output streams are created, writing EOF on one + stream signals EOF for all of them, since it + applies to the channel as a whole. + + """ + + return self._chan.write_eof() + + def get_redirect_info(self) -> Tuple['SSHStreamSession[AnyStr]', DataType]: + """Get information needed to redirect to this SSHWriter""" + + return self._session, self._datatype + + +class SSHStreamSession(Generic[AnyStr]): + """SSH stream session handler""" + + def __init__(self) -> None: + self._chan: Optional['SSHChannel[AnyStr]'] = None + self._conn: Optional['SSHConnection'] = None + self._encoding: Optional[str] = None + self._errors = 'strict' + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._limit = 0 + self._exception: Optional[Exception] = None + self._eof_received = False + self._connection_lost = False + self._read_paused = False + self._write_paused = False + self._recv_buf_len = 0 + self._recv_buf: _RecvBufMap[AnyStr] = {None: []} + self._read_locks: _ReadLocks = {None: asyncio.Lock()} + self._read_waiters: _ReadWaiters = {None: None} + self._drain_waiters: _DrainWaiters = {None: set()} + + async def _block_read(self, datatype: DataType) -> None: + """Wait for more data to arrive on the stream""" + + try: + assert self._loop is not None + waiter: _WaiterFuture = self._loop.create_future() + self._read_waiters[datatype] = waiter + await waiter + finally: + self._read_waiters[datatype] = None + + def _unblock_read(self, datatype: DataType) -> None: + """Signal that more data has arrived on the stream""" + + waiter = self._read_waiters[datatype] + if waiter and not waiter.done(): + waiter.set_result(None) + + def _should_block_drain(self, datatype: DataType) -> bool: + """Return whether output is still being written to the channel""" + + # pylint: disable=unused-argument + + return self._write_paused and not self._connection_lost + + def _unblock_drain(self, datatype: DataType) -> None: + """Signal that more data can be written on the stream""" + + if not self._should_block_drain(datatype): + for waiter in self._drain_waiters[datatype]: + if not waiter.done(): # pragma: no branch + waiter.set_result(None) + + def _should_pause_reading(self) -> bool: + """Return whether to pause reading from the channel""" + + return bool(self._limit) and self._recv_buf_len >= self._limit + + def _maybe_pause_reading(self) -> bool: + """Pause reading if necessary""" + + if not self._read_paused and self._should_pause_reading(): + assert self._chan is not None + self._read_paused = True + self._chan.pause_reading() + return True + else: + return False + + def _maybe_resume_reading(self) -> bool: + """Resume reading if necessary""" + + if self._read_paused and not self._should_pause_reading(): + assert self._chan is not None + self._read_paused = False + self._chan.resume_reading() + return True + else: + return False + + def connection_made(self, chan: 'SSHChannel[AnyStr]') -> None: + """Handle a newly opened channel""" + + self._chan = chan + self._conn = chan.get_connection() + self._encoding, self._errors = chan.get_encoding() + self._loop = chan.get_loop() + self._limit = self._chan.get_recv_window() + + for datatype in chan.get_read_datatypes(): + self._recv_buf[datatype] = [] + self._read_locks[datatype] = asyncio.Lock() + self._read_waiters[datatype] = None + + for datatype in chan.get_write_datatypes(): + self._drain_waiters[datatype] = set() + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Handle an incoming channel close""" + + self._connection_lost = True + self._exception = exc + + if not self._eof_received: + if exc: + for datatype in self._read_waiters: + self._recv_buf[datatype].append(exc) + + self.eof_received() + + for datatype in self._drain_waiters: + self._unblock_drain(datatype) + + def data_received(self, data: AnyStr, datatype: DataType) -> None: + """Handle incoming data on the channel""" + + self._recv_buf[datatype].append(data) + self._recv_buf_len += len(data) + self._unblock_read(datatype) + self._maybe_pause_reading() + + def eof_received(self) -> bool: + """Handle an incoming end of file on the channel""" + + self._eof_received = True + + for datatype in self._read_waiters: + self._unblock_read(datatype) + + return True + + def at_eof(self, datatype: DataType) -> bool: + """Return whether end of file has been received on the channel""" + + return self._eof_received and not self._recv_buf[datatype] + + def pause_writing(self) -> None: + """Handle a request to pause writing on the channel""" + + self._write_paused = True + + def resume_writing(self) -> None: + """Handle a request to resume writing on the channel""" + + self._write_paused = False + + for datatype in self._drain_waiters: + self._unblock_drain(datatype) + + async def read(self, n: int, datatype: DataType, exact: bool) -> AnyStr: + """Read data from the channel""" + + recv_buf = self._recv_buf[datatype] + data: List[AnyStr] = [] + break_read = False + + async with self._read_locks[datatype]: + while True: + while recv_buf and n != 0: + if isinstance(recv_buf[0], Exception): + if data: + break_read = True + break + else: + exc = cast(Exception, recv_buf.pop(0)) + + if isinstance(exc, SoftEOFReceived): + n = 0 + break + else: + raise exc + + l = len(recv_buf[0]) + + if l > n > 0: + data.append(recv_buf[0][:n]) + recv_buf[0] = recv_buf[0][n:] + self._recv_buf_len -= n + n = 0 + break + + data.append(cast(AnyStr, recv_buf.pop(0))) + self._recv_buf_len -= l + n -= l + + if self._maybe_resume_reading(): + continue + + if n == 0 or (n > 0 and data and not exact) or \ + (n < 0 and recv_buf) or \ + self._eof_received or break_read: + break + + await self._block_read(datatype) + + result = cast(AnyStr, '' if self._encoding else b'').join(data) + + if n > 0 and exact: + raise asyncio.IncompleteReadError(cast(bytes, result), + len(result) + n) + + return result + + async def readuntil(self, separator: object, datatype: DataType, + max_separator_len: int) -> AnyStr: + """Read data from the channel until a separator is seen""" + + if not separator: + raise ValueError('Separator cannot be empty') + + buf = cast(AnyStr, '' if self._encoding else b'') + recv_buf = self._recv_buf[datatype] + + if separator is _NEWLINE: + seplen = 1 + separators = cast(AnyStr, '\n' if self._encoding else b'\n') + pat = re.compile(separators) + elif isinstance(separator, (bytes, str)): + seplen = len(separator) + pat = re.compile(re.escape(cast(AnyStr, separator))) + elif isinstance(separator, Pattern): + seplen = max_separator_len + pat = cast(Pattern[AnyStr], separator) + else: + bar = cast(AnyStr, '|' if self._encoding else b'|') + seplist = list(cast(Iterable[AnyStr], separator)) + seplen = max(len(sep) for sep in seplist) + separators = bar.join(re.escape(sep) for sep in seplist) + pat = re.compile(separators) + + curbuf = 0 + buflen = 0 + + async with self._read_locks[datatype]: + while True: + while curbuf < len(recv_buf): + if isinstance(recv_buf[curbuf], Exception): + if buf: + recv_buf[:curbuf] = [] + self._recv_buf_len -= buflen + raise asyncio.IncompleteReadError( + cast(bytes, buf), None) + else: + exc = recv_buf.pop(0) + + if isinstance(exc, SoftEOFReceived): + return buf + else: + raise cast(Exception, exc) + + newbuf = cast(AnyStr, recv_buf[curbuf]) + buf += newbuf + start = 0 if seplen == 0 else max(buflen + 1 - seplen, 0) + + match = pat.search(buf, start) + if match: + idx = match.end() + recv_buf[:curbuf] = [] + recv_buf[0] = buf[idx:] + buf = buf[:idx] + self._recv_buf_len -= idx + + if not recv_buf[0]: + recv_buf.pop(0) + + self._maybe_resume_reading() + return buf + + buflen += len(newbuf) + curbuf += 1 + + if self._read_paused or self._eof_received: + recv_buf[:curbuf] = [] + self._recv_buf_len -= buflen + self._maybe_resume_reading() + raise asyncio.IncompleteReadError(cast(bytes, buf), None) + + await self._block_read(datatype) + + async def drain(self, datatype: DataType) -> None: + """Wait for data written to the channel to drain""" + + while self._should_block_drain(datatype): + try: + assert self._loop is not None + waiter: _WaiterFuture = self._loop.create_future() + self._drain_waiters[datatype].add(waiter) + await waiter + finally: + self._drain_waiters[datatype].remove(waiter) + + if self._connection_lost: + exc = self._exception + + if not exc and self._write_paused: + exc = BrokenPipeError() + + if exc: + raise exc + + +class SSHClientStreamSession(SSHStreamSession[AnyStr], + SSHClientSession[AnyStr]): + """SSH client stream session handler""" + + +class SSHServerStreamSession(SSHStreamSession[AnyStr], + SSHServerSession[AnyStr]): + """SSH server stream session handler""" + + def __init__(self, session_factory: _OptServerSessionFactory, + sftp_factory: _OptSFTPServerFactory = None, + sftp_version = 0, allow_scp = False): + super().__init__() + + self._session_factory = session_factory + self._sftp_factory = sftp_factory + self._sftp_version = sftp_version + self._allow_scp = allow_scp and bool(sftp_factory) + + def _init_sftp_server(self) -> SFTPServer: + """Initialize an SFTP server for this stream to use""" + + assert self._chan is not None + + self._chan.set_encoding(None) + self._encoding = None + + assert self._sftp_factory is not None + return self._sftp_factory(cast('SSHChannel[bytes]', self._chan)) + + def shell_requested(self) -> bool: + """Return whether a shell can be requested""" + + return bool(self._session_factory) + + def exec_requested(self, command: str) -> bool: + """Return whether execution of a command can be requested""" + + # Avoid incorrect pylint suggestion to use ternary + # pylint: disable=consider-using-ternary + + return ((self._allow_scp and command.startswith('scp ')) or + bool(self._session_factory)) + + def subsystem_requested(self, subsystem: str) -> bool: + """Return whether starting a subsystem can be requested""" + + if subsystem == 'sftp': + return bool(self._sftp_factory) + else: + return bool(self._session_factory) + + def session_started(self) -> None: + """Start a session for this newly opened server channel""" + + assert self._chan is not None + + command = self._chan.get_command() + + stdin = SSHReader[AnyStr](self, self._chan) + stdout = SSHWriter[AnyStr](self, self._chan) + stderr = SSHWriter[AnyStr](self, self._chan, EXTENDED_DATA_STDERR) + + handler: MaybeAwait[None] + + if self._chan.get_subsystem() == 'sftp': + stdin_bytes = cast(SSHReader[bytes], stdin) + stdout_bytes = cast(SSHWriter[bytes], stdout) + + handler = run_sftp_server(self._init_sftp_server(), + stdin_bytes, stdout_bytes, + self._sftp_version) + elif self._allow_scp and command and command.startswith('scp '): + stdin_bytes = cast(SSHReader[bytes], stdin) + stdout_bytes = cast(SSHWriter[bytes], stdout) + stderr_bytes = cast(SSHWriter[bytes], stderr) + + handler = run_scp_server(self._init_sftp_server(), command, + stdin_bytes, stdout_bytes, stderr_bytes) + else: + assert self._session_factory is not None + handler = self._session_factory(stdin, stdout, stderr) + + if inspect.isawaitable(handler): + assert self._conn is not None + assert handler is not None + self._conn.create_task(handler, stdin.logger) + + def exception_received(self, exc: Exception) -> None: + """Handle an incoming exception on the channel""" + + self._recv_buf[None].append(exc) + self._unblock_read(None) + + def break_received(self, msec: int) -> bool: + """Handle an incoming break on the channel""" + + self.exception_received(BreakReceived(msec)) + return True + + def signal_received(self, signal: str) -> None: + """Handle an incoming signal on the channel""" + + self.exception_received(SignalReceived(signal)) + + def soft_eof_received(self) -> None: + """Handle an incoming soft EOF on the channel""" + + self.exception_received(SoftEOFReceived()) + + def terminal_size_changed(self, width: int, height: int, + pixwidth: int, pixheight: int) -> None: + """Handle an incoming terminal size change on the channel""" + + self.exception_received(TerminalSizeChanged(width, height, + pixwidth, pixheight)) + + +class SSHSocketStreamSession(SSHStreamSession[AnyStr]): + """Socket stream session handler""" + + def __init__(self, session_factory: _OptSocketSessionFactory = None): + super().__init__() + + self._session_factory = session_factory + + def session_started(self) -> None: + """Start a session for this newly opened socket channel""" + + if self._session_factory: + assert self._chan is not None + reader = SSHReader[AnyStr](self, self._chan) + writer = SSHWriter[AnyStr](self, self._chan) + + handler = self._session_factory(reader, writer) + + if inspect.isawaitable(handler): + assert self._conn is not None + assert handler is not None + self._conn.create_task(handler, reader.logger) + + +class SSHTCPStreamSession(SSHSocketStreamSession[AnyStr], + SSHTCPSession[AnyStr]): + """TCP stream session handler""" + + +class SSHUNIXStreamSession(SSHSocketStreamSession[AnyStr], + SSHUNIXSession[AnyStr]): + """UNIX stream session handler""" diff --git a/venv/lib/python3.12/site-packages/asyncssh/subprocess.py b/venv/lib/python3.12/site-packages/asyncssh/subprocess.py new file mode 100644 index 0000000..3b1c30f --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/subprocess.py @@ -0,0 +1,318 @@ +# Copyright (c) 2019-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""SSH subprocess handlers""" + +from typing import TYPE_CHECKING, Any, AnyStr, Callable +from typing import Dict, Generic, Iterable, Optional + +from .constants import EXTENDED_DATA_STDERR +from .process import SSHClientProcess +from .session import DataType + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .channel import SSHChannel, SSHClientChannel + + +SubprocessFactory = Callable[[], 'SSHSubprocessProtocol'] + + +class SSHSubprocessPipe(Generic[AnyStr]): + """SSH subprocess pipe""" + + def __init__(self, chan: 'SSHClientChannel[AnyStr]', + datatype: DataType = None): + self._chan: 'SSHClientChannel[AnyStr]' = chan + self._datatype = datatype + + def close(self) -> None: + """Shut down the remote process""" + + self._chan.close() + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Return additional information about the remote process + + This method returns extra information about the channel + associated with this subprocess. See :meth:`get_extra_info() + ` on :class:`SSHClientChannel` + for additional information. + + """ + + return self._chan.get_extra_info(name, default) + + +class SSHSubprocessReadPipe(SSHSubprocessPipe[AnyStr]): + """SSH subprocess pipe reader""" + + def pause_reading(self) -> None: + """Pause delivery of incoming data from the remote process""" + + self._chan.pause_reading() + + def resume_reading(self) -> None: + """Resume delivery of incoming data from the remote process""" + + self._chan.resume_reading() + + +class SSHSubprocessWritePipe(SSHSubprocessPipe[AnyStr]): + """SSH subprocess pipe writer""" + + def abort(self) -> None: + """Forcibly close the channel to the remote process""" + + self._chan.abort() + + def can_write_eof(self) -> bool: + """Return whether the pipe supports :meth:`write_eof`""" + + return self._chan.can_write_eof() + + def get_write_buffer_size(self) -> int: + """Return the current size of the pipe's output buffer""" + + return self._chan.get_write_buffer_size() + + def set_write_buffer_limits(self, high: Optional[int] = None, + low: Optional[int] = None) -> None: + """Set the high- and low-water limits for write flow control""" + + self._chan.set_write_buffer_limits(high, low) + + def write(self, data: AnyStr) -> None: + """Write data on this pipe""" + + self._chan.write(data, self._datatype) + + def writelines(self, list_of_data: Iterable[AnyStr]) -> None: + """Write a list of data bytes on this pipe""" + + self._chan.writelines(list_of_data, self._datatype) + + def write_eof(self) -> None: + """Write EOF on this pipe""" + + self._chan.write_eof() + + +class SSHSubprocessProtocol(Generic[AnyStr]): + """SSH subprocess protocol + + This class conforms to :class:`asyncio.SubprocessProtocol`, but with + the following enhancement: + + * If encoding is set when the subprocess is created, all data + passed to :meth:`pipe_data_received` will be string values + containing Unicode data. However, for compatibility with + :class:`asyncio.SubprocessProtocol`, encoding defaults to + `None`, in which case all data is delivered as bytes. + + """ + + def connection_made(self, + transport: 'SSHSubprocessTransport[AnyStr]') -> None: + """Called when a remote process is successfully started + + This method is called when a remote process is successfully + started. The transport parameter should be stored if needed + for later use. + + :param transport: + The transport to use to communicate with the remote process. + :type transport: :class:`SSHSubprocessTransport` + + """ + + def pipe_data_received(self, fd: int, data: AnyStr) -> None: + """Called when data is received from the remote process + + This method is called when data is received from the remote + process. If an encoding was specified when the process was + started, the data will be delivered as a string after decoding + with the requested encoding. Otherwise, the data will be + delivered as bytes. + + :param fd: + The integer file descriptor of the pipe data was received + on. This will be 1 for stdout or 2 for stderr. + :param data: + The data received from the remote process + :type fd: `int` + :type data: `str` or `bytes` + + """ + + def pipe_connection_lost(self, fd: int, exc: Optional[Exception]) -> None: + """Called when the pipe to a remote process is closed + + This method is called when a pipe to a remote process is + closed. If the channel is shut down cleanly, *exc* will be + `None`. Otherwise, it will be an exception explaining the + reason the pipe was closed. + + :param fd: + The integer file descriptor of the pipe which was + closed. This will be 1 for stdout or 2 for stderr. + :param exc: + The exception which caused the channel to close, or + `None` if the channel closed cleanly. + :type fd: `int` + :type exc: :class:`Exception` or `None` + + """ + + def process_exited(self) -> None: + """Called when a remote process has exited + + This method is called when the remote process has exited. + Exit status information can be retrieved by calling + :meth:`get_returncode() ` + on the transport provided in :meth:`connection_made`. + + """ + + +class SSHSubprocessTransport(SSHClientProcess[AnyStr]): + """SSH subprocess transport + + This class conforms to :class:`asyncio.SubprocessTransport`, but with + the following enhancements: + + * All functionality available through :class:`SSHClientProcess` + is also available here, such as the ability to dynamically + redirect stdin, stdout, and stderr at any time during the + lifetime of the process. + + * If encoding is set when the subprocess is created, all data + written to the transports created by :meth:`get_pipe_transport` + should be strings containing Unicode data. The encoding defaults + to `None`, though, to preserve compatibility with + :class:`asyncio.SubprocessTransport`, which expects data + to be written as bytes. + + """ + + _chan: 'SSHClientChannel[AnyStr]' + + def __init__(self, protocol_factory: SubprocessFactory): + super().__init__() + + self._pipes: Dict[int, SSHSubprocessPipe[AnyStr]] = {} + self._protocol: SSHSubprocessProtocol[AnyStr] = protocol_factory() + + def get_protocol(self) -> SSHSubprocessProtocol[AnyStr]: + """Return the subprocess protocol associated with this transport""" + + return self._protocol + + def connection_made(self, chan: 'SSHChannel[AnyStr]') -> None: + """Handle a newly opened channel""" + + super().connection_made(chan) + + self._protocol.connection_made(self) + + self._pipes = { + 0: SSHSubprocessWritePipe(self._chan), + 1: SSHSubprocessReadPipe(self._chan), + 2: SSHSubprocessReadPipe(self._chan, EXTENDED_DATA_STDERR) + } + + def session_started(self) -> None: + """Override SSHClientProcess to avoid creating SSHReader/SSHWriter + streams, since this class uses read/write pipe objects instead""" + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Handle an incoming channel close""" + + self._protocol.pipe_connection_lost(1, exc) + self._protocol.pipe_connection_lost(2, exc) + super().connection_lost(exc) + + def data_received(self, data: AnyStr, datatype: DataType) -> None: + """Handle incoming data from the remote process""" + + writer = self._writers.get(datatype) + + if writer: + writer.write(data) + else: + fd = 2 if datatype == EXTENDED_DATA_STDERR else 1 + self._protocol.pipe_data_received(fd, data) + + def exit_status_received(self, status: int) -> None: + """Handle exit status for the remote process""" + + super().exit_status_received(status) + self._protocol.process_exited() + + def exit_signal_received(self, signal: str, core_dumped: bool, + msg: str, lang: str) -> None: + """Handle exit signal for the remote process""" + + super().exit_signal_received(signal, core_dumped, msg, lang) + self._protocol.process_exited() + + def get_pid(self) -> Optional[int]: + """Return the PID of the remote process + + This method always returns `None`, since SSH doesn't report + remote PIDs. + + """ + + # pylint: disable=no-self-use + + return None + + def get_pipe_transport(self, fd: int) -> \ + Optional[SSHSubprocessPipe[AnyStr]]: + """Return a transport for the requested stream + + :param fd: + The integer file descriptor (0-2) to return the transport for, + where 0 means stdin, 1 means stdout, and 2 means stderr. + :type fd: `int` + + :returns: an :class:`SSHSubprocessReadPipe` or + :class:`SSHSubprocessWritePipe` + + """ + + return self._pipes.get(fd) + + def get_returncode(self) -> Optional[int]: + """Return the exit status or signal for the remote process + + This method returns the exit status of the session if one has + been sent. If an exit signal was sent, this method returns + the negative of the numeric value of that signal, matching + the behavior of :meth:`asyncio.SubprocessTransport.get_returncode`. + If neither has been sent, this method returns `None`. + + :returns: `int` or `None` + + """ + + return self.returncode diff --git a/venv/lib/python3.12/site-packages/asyncssh/version.py b/venv/lib/python3.12/site-packages/asyncssh/version.py new file mode 100644 index 0000000..5fae86e --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/version.py @@ -0,0 +1,29 @@ +# Copyright (c) 2013-2021 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""AsyncSSH version information""" + +__author__ = 'Ron Frederick' + +__author_email__ = 'ronf@timeheart.net' + +__url__ = 'http://asyncssh.timeheart.net' + +__version__ = '2.14.2' diff --git a/venv/lib/python3.12/site-packages/asyncssh/x11.py b/venv/lib/python3.12/site-packages/asyncssh/x11.py new file mode 100644 index 0000000..0c7a808 --- /dev/null +++ b/venv/lib/python3.12/site-packages/asyncssh/x11.py @@ -0,0 +1,548 @@ +# Copyright (c) 2016-2023 by Ron Frederick and others. +# +# This program and the accompanying materials are made available under +# the terms of the Eclipse Public License v2.0 which accompanies this +# distribution and is available at: +# +# http://www.eclipse.org/legal/epl-2.0/ +# +# This program may also be made available under the following secondary +# licenses when the conditions for such availability set forth in the +# Eclipse Public License v2.0 are satisfied: +# +# GNU General Public License, Version 2.0, or any later versions of +# that license +# +# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later +# +# Contributors: +# Ron Frederick - initial implementation, API, and documentation + +"""X11 forwarding support""" + +import asyncio +import os +from pathlib import Path +import socket +import time +from typing import TYPE_CHECKING, Callable, Dict, Iterable +from typing import NamedTuple, Optional, Sequence, Set, Tuple + +from .constants import OPEN_CONNECT_FAILED +from .forward import SSHForwarder, SSHForwarderCoro +from .listener import SSHListener, create_tcp_forward_listener +from .logging import logger +from .misc import ChannelOpenError +from .session import DataType + + +if TYPE_CHECKING: + # pylint: disable=cyclic-import + from .channel import SSHChannel + from .connection import SSHServerConnection + + +_RecvHandler = Optional[Callable[[bytes], None]] + + +# Xauth address families +XAUTH_FAMILY_IPV4 = 0 +XAUTH_FAMILY_DECNET = 1 +XAUTH_FAMILY_IPV6 = 6 +XAUTH_FAMILY_HOSTNAME = 256 +XAUTH_FAMILY_WILD = 65535 + +# Xauth protocol values +XAUTH_PROTO_COOKIE = b'MIT-MAGIC-COOKIE-1' +XAUTH_COOKIE_LEN = 16 + +# Xauth lock information +XAUTH_LOCK_SUFFIX = '-c' +XAUTH_LOCK_TRIES = 5 +XAUTH_LOCK_DELAY = 0.2 +XAUTH_LOCK_DEAD = 5 + +# X11 display and port numbers +X11_BASE_PORT = 6000 +X11_DISPLAY_START = 10 +X11_MAX_DISPLAYS = 64 + +# Host to listen on when doing X11 forwarding +X11_LISTEN_HOST = 'localhost' + + +def _parse_display(display: str) -> Tuple[str, str, int]: + """Parse an X11 display value""" + + try: + host, dpynum = display.rsplit(':', 1) + + if host.startswith('[') and host.endswith(']'): + host = host[1:-1] + + idx = dpynum.find('.') + if idx >= 0: + screen = int(dpynum[idx+1:]) + dpynum = dpynum[:idx] + else: + screen = 0 + except (ValueError, UnicodeEncodeError): + raise ValueError('Invalid X11 display') from None + + return host, dpynum, screen + +async def _lookup_host(loop: asyncio.AbstractEventLoop, host: str, + family: int) -> Sequence[str]: + """Look up IPv4 or IPv6 addresses of a host name""" + + try: + addrinfo = await loop.getaddrinfo(host, 0, family=family, + type=socket.SOCK_STREAM) + except socket.gaierror: + return [] + + return [ai[4][0] for ai in addrinfo] + + +class SSHXAuthorityEntry(NamedTuple): + """An entry in an Xauthority file""" + + family: int + addr: bytes + dpynum: bytes + proto: bytes + data: bytes + + def __bytes__(self) -> bytes: + """Construct an Xauthority entry""" + + def _uint16(value: int) -> bytes: + """Construct a big-endian 16-bit unsigned integer""" + + return value.to_bytes(2, 'big') + + def _string(data: bytes) -> bytes: + """Construct a binary string with a 16-bit length""" + + return _uint16(len(data)) + data + + return b''.join((_uint16(self.family), _string(self.addr), + _string(self.dpynum), _string(self.proto), + _string(self.data))) + + +class SSHX11ClientForwarder(SSHForwarder): + """X11 forwarding connection handler""" + + def __init__(self, listener: 'SSHX11ClientListener', peer: SSHForwarder): + super().__init__(peer) + + self._listener = listener + + self._inpbuf = b'' + self._bytes_needed = 12 + self._recv_handler: _RecvHandler = self._recv_prefix + + self._endian = b'' + self._prefix = b'' + self._auth_proto_len = 0 + self._auth_data_len = 0 + + self._auth_proto = b'' + self._auth_proto_pad = b'' + + self._auth_data = b'' + self._auth_data_pad = b'' + + def _encode_uint16(self, value: int) -> bytes: + """Encode a 16-bit unsigned integer""" + + if self._endian == b'B': + return bytes((value >> 8, value & 255)) + else: + return bytes((value & 255, value >> 8)) + + def _decode_uint16(self, value: bytes) -> int: + """Decode a 16-bit unsigned integer""" + + if self._endian == b'B': + return (value[0] << 8) + value[1] + else: + return (value[1] << 8) + value[0] + + @staticmethod + def _padded_len(length: int) -> int: + """Return length rounded up to the next multiple of 4 bytes""" + + return ((length + 3) // 4) * 4 + + @staticmethod + def _pad(data: bytes) -> bytes: + """Pad a string to a multiple of 4 bytes""" + + length = len(data) % 4 + return data + ((4 - length) * b'\00' if length else b'') + + def _recv_prefix(self, data: bytes) -> None: + """Parse X11 client prefix""" + + self._endian = data[:1] + self._prefix = data + + self._auth_proto_len = self._decode_uint16(data[6:8]) + self._auth_data_len = self._decode_uint16(data[8:10]) + + self._recv_handler = self._recv_auth_proto + self._bytes_needed = self._padded_len(self._auth_proto_len) + + def _recv_auth_proto(self, data: bytes) -> None: + """Extract X11 auth protocol""" + + self._auth_proto = data[:self._auth_proto_len] + self._auth_proto_pad = data[self._auth_proto_len:] + + self._recv_handler = self._recv_auth_data + self._bytes_needed = self._padded_len(self._auth_data_len) + + def _recv_auth_data(self, data: bytes) -> None: + """Extract X11 auth data""" + + self._auth_data = data[:self._auth_data_len] + self._auth_data_pad = data[self._auth_data_len:] + + try: + self._auth_data = self._listener.validate_auth(self._auth_data) + except KeyError: + reason = b'Invalid authentication key\n' + + response = b''.join((bytes((0, len(reason))), + self._encode_uint16(11), + self._encode_uint16(0), + self._encode_uint16((len(reason) + 3) // 4), + self._pad(reason))) + + try: + self.write(response) + self.write_eof() + except OSError: # pragma: no cover + pass + + self._inpbuf = b'' + else: + self._inpbuf = (self._prefix + self._auth_proto + + self._auth_proto_pad + self._auth_data + + self._auth_data_pad) + + self._recv_handler = None + self._bytes_needed = 0 + + def data_received(self, data: bytes, datatype: DataType = None) -> None: + """Handle incoming data from the X11 client""" + + if self._recv_handler: + self._inpbuf += data + + while self._recv_handler: # type: ignore[truthy-function] + if len(self._inpbuf) >= self._bytes_needed: + data = self._inpbuf[:self._bytes_needed] + self._inpbuf = self._inpbuf[self._bytes_needed:] + self._recv_handler(data) + else: + return + + data = self._inpbuf + self._inpbuf = b'' + + if data: + super().data_received(data, datatype) + + +class SSHX11ClientListener: + """Client listener used to accept forwarded X11 connections""" + + def __init__(self, loop: asyncio.AbstractEventLoop, host: str, dpynum: str, + auth_proto: bytes, auth_data: bytes): + self._host = host + self._dpynum = dpynum + self._auth_proto = auth_proto + self._local_auth = auth_data + + if host.startswith('/'): + self._connect_coro: SSHForwarderCoro = loop.create_unix_connection + self._connect_args: Sequence[object] = (host + ':' + dpynum,) + elif host in ('', 'unix'): + self._connect_coro = loop.create_unix_connection + self._connect_args = ('/tmp/.X11-unix/X' + dpynum,) + else: + self._connect_coro = loop.create_connection + self._connect_args = (host, X11_BASE_PORT + int(dpynum)) + + self._remote_auth: Dict['SSHChannel', bytes] = {} + self._channel: Dict[bytes, Tuple['SSHChannel', bool]] = {} + + def attach(self, display: str, chan: 'SSHChannel', + single_connection: bool) -> Tuple[bytes, bytes, int]: + """Attach a channel to this listener""" + + host, dpynum, screen = _parse_display(display) + + if self._host != host or self._dpynum != dpynum: + raise ValueError('Already forwarding to another X11 display') + + remote_auth = os.urandom(len(self._local_auth)) + + self._remote_auth[chan] = remote_auth + self._channel[remote_auth] = chan, single_connection + + return self._auth_proto, remote_auth, screen + + def detach(self, chan: 'SSHChannel') -> bool: + """Detach a channel from this listener""" + + try: + remote_auth = self._remote_auth.pop(chan) + del self._channel[remote_auth] + except KeyError: + pass + + return not bool(self._remote_auth) + + async def forward_connection(self) -> SSHX11ClientForwarder: + """Forward an incoming connection to the local X server""" + + peer: SSHForwarder + + try: + _, peer = await self._connect_coro(SSHForwarder, + *self._connect_args) + except OSError as exc: + raise ChannelOpenError(OPEN_CONNECT_FAILED, str(exc)) from None + + return SSHX11ClientForwarder(self, peer) + + def validate_auth(self, remote_auth: bytes) -> bytes: + """Validate client auth and enforce single connection flag""" + + chan, single_connection = self._channel[remote_auth] + + if single_connection: + del self._channel[remote_auth] + del self._remote_auth[chan] + + return self._local_auth + + +class SSHX11ServerListener: + """Server listener used to forward X11 connections""" + + def __init__(self, tcp_listener: SSHListener, display: str): + self._tcp_listener = tcp_listener + self._display = display + self._channels: Set[object] = set() + + def attach(self, chan: 'SSHChannel', screen: int) -> str: + """Attach a channel to this listener and return its display""" + + self._channels.add(chan) + + return '%s.%s' % (self._display, screen) + + def detach(self, chan: 'SSHChannel') -> bool: + """Detach a channel from this listener""" + + try: + self._channels.remove(chan) + except KeyError: + pass + + if not self._channels: + self._tcp_listener.close() + return True + else: + return False + + +def get_xauth_path(auth_path: Optional[str]) -> str: + """Compute the path to the Xauthority file""" + + if not auth_path: + auth_path = os.environ.get('XAUTHORITY') + + if not auth_path: + auth_path = str(Path('~', '.Xauthority').expanduser()) + + return auth_path + + +def walk_xauth(auth_path: str) -> Iterable[SSHXAuthorityEntry]: + """Walk the entries in an Xauthority file""" + + def _read_bytes(n: int) -> bytes: + """Read exactly n bytes""" + + data = auth_file.read(n) + + if len(data) != n: + raise EOFError + + return data + + def _read_uint16() -> int: + """Read a 16-bit unsigned integer""" + + return int.from_bytes(_read_bytes(2), 'big') + + def _read_string() -> bytes: + """Read a string""" + + return _read_bytes(_read_uint16()) + + try: + with open(auth_path, 'rb') as auth_file: + while True: + try: + family = _read_uint16() + except EOFError: + break + + try: + yield SSHXAuthorityEntry(family, _read_string(), + _read_string(), _read_string(), + _read_string()) + except EOFError: + raise ValueError('Incomplete Xauthority entry') from None + except OSError: + pass + + +async def lookup_xauth(loop: asyncio.AbstractEventLoop, + auth_path: Optional[str], host: str, + dpynum: str) -> Tuple[bytes, bytes]: + """Look up Xauthority data for the specified display""" + + auth_path = get_xauth_path(auth_path) + + if host.startswith('/') or host in ('', 'unix', 'localhost'): + host = socket.gethostname() + + dpynum = dpynum.encode('ascii') + + ipv4_addrs: Sequence[str] = [] + ipv6_addrs: Sequence[str] = [] + + for entry in walk_xauth(auth_path): + if entry.dpynum and entry.dpynum != dpynum: + continue + + if entry.family == XAUTH_FAMILY_IPV4: + if not ipv4_addrs: + ipv4_addrs = await _lookup_host(loop, host, socket.AF_INET) + + addr = socket.inet_ntop(socket.AF_INET, entry.addr) + match = addr in ipv4_addrs + elif entry.family == XAUTH_FAMILY_IPV6: + if not ipv6_addrs: + ipv6_addrs = await _lookup_host(loop, host, socket.AF_INET6) + + addr = socket.inet_ntop(socket.AF_INET6, entry.addr) + match = addr in ipv6_addrs + elif entry.family == XAUTH_FAMILY_HOSTNAME: + match = entry.addr == host.encode('idna') + elif entry.family == XAUTH_FAMILY_WILD: + match = True + else: + match = False + + if match: + return entry.proto, entry.data + + logger.debug1('No xauth entry found for display: using random auth') + return XAUTH_PROTO_COOKIE, os.urandom(XAUTH_COOKIE_LEN) + + +async def update_xauth(auth_path: Optional[str], host: str, dpynum: str, + auth_proto: bytes, auth_data: bytes) -> None: + """Update Xauthority data for the specified display""" + + if host.startswith('/') or host in ('', 'unix', 'localhost'): + host = socket.gethostname() + + host = host.encode('idna') + dpynum = str(dpynum).encode('ascii') + + auth_path = get_xauth_path(auth_path) + new_auth_path = auth_path + XAUTH_LOCK_SUFFIX + new_file = None + + try: + if time.time() - os.stat(new_auth_path).st_ctime > XAUTH_LOCK_DEAD: + os.unlink(new_auth_path) + except FileNotFoundError: + pass + + for _ in range(XAUTH_LOCK_TRIES): + try: + new_file = open(new_auth_path, 'xb') + except FileExistsError: + await asyncio.sleep(XAUTH_LOCK_DELAY) + else: + break + + if not new_file: + raise ValueError('Unable to acquire Xauthority lock') + + new_entry = SSHXAuthorityEntry(XAUTH_FAMILY_HOSTNAME, host, + dpynum, auth_proto, auth_data) + + new_file.write(bytes(new_entry)) + + for entry in walk_xauth(auth_path): + if (entry.family != new_entry.family or entry.addr != new_entry.addr or + entry.dpynum != new_entry.dpynum): + new_file.write(bytes(entry)) + + new_file.close() + + os.replace(new_auth_path, auth_path) + + +async def create_x11_client_listener(loop: asyncio.AbstractEventLoop, + display: str, + auth_path: Optional[str]) -> \ + SSHX11ClientListener: + """Create a listener to accept X11 connections forwarded over SSH""" + + host, dpynum, _ = _parse_display(display) + + auth_proto, auth_data = await lookup_xauth(loop, auth_path, host, dpynum) + + return SSHX11ClientListener(loop, host, dpynum, auth_proto, auth_data) + + +async def create_x11_server_listener(conn: 'SSHServerConnection', + loop: asyncio.AbstractEventLoop, + auth_path: Optional[str], + auth_proto: bytes, auth_data: bytes) -> \ + Optional[SSHX11ServerListener]: + """Create a listener to forward X11 connections over SSH""" + + for dpynum in range(X11_DISPLAY_START, X11_MAX_DISPLAYS): + try: + tcp_listener = await create_tcp_forward_listener( + conn, loop, conn.create_x11_connection, + X11_LISTEN_HOST, X11_BASE_PORT + dpynum) + except OSError: + continue + + display = '%s:%d' % (X11_LISTEN_HOST, dpynum) + + try: + await update_xauth(auth_path, X11_LISTEN_HOST, str(dpynum), + auth_proto, auth_data) + except ValueError: + tcp_listener.close() + break + + return SSHX11ServerListener(tcp_listener, display) + + return None diff --git a/venv/lib/python3.12/site-packages/attr/__init__.py b/venv/lib/python3.12/site-packages/attr/__init__.py new file mode 100644 index 0000000..5c6e065 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/__init__.py @@ -0,0 +1,104 @@ +# SPDX-License-Identifier: MIT + +""" +Classes Without Boilerplate +""" + +from functools import partial +from typing import Callable, Literal, Protocol + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Converter, + Factory, + _Nothing, + attrib, + attrs, + evolve, + fields, + fields_dict, + make_class, + validate, +) +from ._next_gen import define, field, frozen, mutable +from ._version_info import VersionInfo + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + + +class AttrsInstance(Protocol): + pass + + +NothingType = Literal[_Nothing.NOTHING] + +__all__ = [ + "NOTHING", + "Attribute", + "AttrsInstance", + "Converter", + "Factory", + "NothingType", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "field", + "fields", + "fields_dict", + "filters", + "frozen", + "get_run_validators", + "has", + "ib", + "make_class", + "mutable", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + + +def _make_getattr(mod_name: str) -> Callable: + """ + Create a metadata proxy for packaging information that uses *mod_name* in + its warnings and errors. + """ + + def __getattr__(name: str) -> str: + if name not in ("__version__", "__version_info__"): + msg = f"module {mod_name} has no attribute {name}" + raise AttributeError(msg) + + from importlib.metadata import metadata + + meta = metadata("attrs") + + if name == "__version_info__": + return VersionInfo._from_version_string(meta["version"]) + + return meta["version"] + + return __getattr__ + + +__getattr__ = _make_getattr(__name__) diff --git a/venv/lib/python3.12/site-packages/attr/__init__.pyi b/venv/lib/python3.12/site-packages/attr/__init__.pyi new file mode 100644 index 0000000..8d78fa1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/__init__.pyi @@ -0,0 +1,389 @@ +import enum +import sys + +from typing import ( + Any, + Callable, + Generic, + Literal, + Mapping, + Protocol, + Sequence, + TypeVar, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._typing_compat import AttrsInstance_ +from ._version_info import VersionInfo +from attrs import ( + define as define, + field as field, + mutable as mutable, + frozen as frozen, + _EqOrderType, + _ValidatorType, + _ConverterType, + _ReprArgType, + _OnSetAttrType, + _OnSetAttrArgType, + _FieldTransformer, + _ValidatorArgType, +) + +if sys.version_info >= (3, 10): + from typing import TypeGuard, TypeAlias +else: + from typing_extensions import TypeGuard, TypeAlias + +if sys.version_info >= (3, 11): + from typing import dataclass_transform +else: + from typing_extensions import dataclass_transform + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_FilterType = Callable[["Attribute[_T]", _T], bool] + +# We subclass this here to keep the protocol's qualified name clean. +class AttrsInstance(AttrsInstance_, Protocol): + pass + +_A = TypeVar("_A", bound=type[AttrsInstance]) + +class _Nothing(enum.Enum): + NOTHING = enum.auto() + +NOTHING = _Nothing.NOTHING +NothingType: TypeAlias = Literal[_Nothing.NOTHING] + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. + +@overload +def Factory(factory: Callable[[], _T]) -> _T: ... +@overload +def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], +) -> _T: ... +@overload +def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], +) -> _T: ... + +In = TypeVar("In") +Out = TypeVar("Out") + +class Converter(Generic[In, Out]): + @overload + def __init__(self, converter: Callable[[In], Out]) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, AttrsInstance, Attribute], Out], + *, + takes_self: Literal[True], + takes_field: Literal[True], + ) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, Attribute], Out], + *, + takes_field: Literal[True], + ) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, AttrsInstance], Out], + *, + takes_self: Literal[True], + ) -> None: ... + +class Attribute(Generic[_T]): + name: str + default: _T | None + validator: _ValidatorType[_T] | None + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: bool | None + init: bool + converter: Converter | None + metadata: dict[Any, Any] + type: type[_T] | None + kw_only: bool + on_setattr: _OnSetAttrType + alias: str | None + + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool | None = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: type[_T] | None = ..., + converter: _ConverterType + | list[_ConverterType] + | tuple[_ConverterType] + | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool | None = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: type[_T] | None = ..., + converter: _ConverterType + | list[_ConverterType] + | tuple[_ConverterType] + | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool | None = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: _T | None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: object = ..., + converter: _ConverterType + | list[_ConverterType] + | tuple[_ConverterType] + | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool | None = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> Any: ... +@overload +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) +def attrs( + maybe_cls: _C, + these: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., + unsafe_hash: bool | None = ..., +) -> _C: ... +@overload +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., + unsafe_hash: bool | None = ..., +) -> Callable[[_C], _C]: ... +def fields(cls: type[AttrsInstance]) -> Any: ... +def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _A, + globalns: dict[str, Any] | None = ..., + localns: dict[str, Any] | None = ..., + attribs: list[Attribute[Any]] | None = ..., + include_extras: bool = ..., +) -> _A: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: list[str] | tuple[str, ...] | dict[str, Any], + bases: tuple[type, ...] = ..., + class_body: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + collect_by_mro: bool = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: _FilterType[Any] | None = ..., + dict_factory: type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ..., + tuple_keys: bool | None = ..., +) -> dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: _FilterType[Any] | None = ..., + tuple_factory: type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> tuple[Any, ...]: ... +def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4e33ea0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/_cmp.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/_cmp.cpython-312.pyc new file mode 100644 index 0000000..79611f4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/_cmp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..7d0c682 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/_config.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/_config.cpython-312.pyc new file mode 100644 index 0000000..dfae98b Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/_config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/_funcs.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/_funcs.cpython-312.pyc new file mode 100644 index 0000000..266c9ab Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/_funcs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/_make.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/_make.cpython-312.pyc new file mode 100644 index 0000000..2e0e92f Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/_make.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc new file mode 100644 index 0000000..592132f Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/_next_gen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/_version_info.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/_version_info.cpython-312.pyc new file mode 100644 index 0000000..06be12b Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/_version_info.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/converters.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/converters.cpython-312.pyc new file mode 100644 index 0000000..0ced51b Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/converters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..820b2e7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/filters.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/filters.cpython-312.pyc new file mode 100644 index 0000000..7d91cd7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/filters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/setters.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/setters.cpython-312.pyc new file mode 100644 index 0000000..f3455c8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/setters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/__pycache__/validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/attr/__pycache__/validators.cpython-312.pyc new file mode 100644 index 0000000..2b67098 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attr/__pycache__/validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attr/_cmp.py b/venv/lib/python3.12/site-packages/attr/_cmp.py new file mode 100644 index 0000000..09bab49 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_cmp.py @@ -0,0 +1,160 @@ +# SPDX-License-Identifier: MIT + + +import functools +import types + +from ._make import __ne__ + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attrs.field`'s ``eq``, ``order``, + and ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if at least + one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + Args: + eq (typing.Callable | None): + Callable used to evaluate equality of two objects. + + lt (typing.Callable | None): + Callable used to evaluate whether one object is less than another + object. + + le (typing.Callable | None): + Callable used to evaluate whether one object is less than or equal + to another object. + + gt (typing.Callable | None): + Callable used to evaluate whether one object is greater than + another object. + + ge (typing.Callable | None): + Callable used to evaluate whether one object is greater than or + equal to another object. + + require_same_type (bool): + When `True`, equality and ordering methods will return + `NotImplemented` if objects are not of the same type. + + class_name (str | None): Name of class. Defaults to "Comparable". + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = __ne__ + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + msg = "eq must be define is order to complete ordering from lt, le, gt, ge." + raise ValueError(msg) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = f"__{name}__" + method.__doc__ = ( + f"Return a {_operation_names[name]} b. Computed by attrs." + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + return all(func(self, other) for func in self._requirements) + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/venv/lib/python3.12/site-packages/attr/_cmp.pyi b/venv/lib/python3.12/site-packages/attr/_cmp.pyi new file mode 100644 index 0000000..cc7893b --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: _CompareWithType | None = ..., + lt: _CompareWithType | None = ..., + le: _CompareWithType | None = ..., + gt: _CompareWithType | None = ..., + ge: _CompareWithType | None = ..., + require_same_type: bool = ..., + class_name: str = ..., +) -> type: ... diff --git a/venv/lib/python3.12/site-packages/attr/_compat.py b/venv/lib/python3.12/site-packages/attr/_compat.py new file mode 100644 index 0000000..bc68ed9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_compat.py @@ -0,0 +1,99 @@ +# SPDX-License-Identifier: MIT + +import inspect +import platform +import sys +import threading + +from collections.abc import Mapping, Sequence # noqa: F401 +from typing import _GenericAlias + + +PYPY = platform.python_implementation() == "PyPy" +PY_3_10_PLUS = sys.version_info[:2] >= (3, 10) +PY_3_11_PLUS = sys.version_info[:2] >= (3, 11) +PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) +PY_3_13_PLUS = sys.version_info[:2] >= (3, 13) +PY_3_14_PLUS = sys.version_info[:2] >= (3, 14) + + +if PY_3_14_PLUS: + import annotationlib + + # We request forward-ref annotations to not break in the presence of + # forward references. + + def _get_annotations(cls): + return annotationlib.get_annotations( + cls, format=annotationlib.Format.FORWARDREF + ) + +else: + + def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + return cls.__dict__.get("__annotations__", {}) + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() + + +def get_generic_base(cl): + """If this is a generic class (A[str]), return the generic base for it.""" + if cl.__class__ is _GenericAlias: + return cl.__origin__ + return None diff --git a/venv/lib/python3.12/site-packages/attr/_config.py b/venv/lib/python3.12/site-packages/attr/_config.py new file mode 100644 index 0000000..4b25772 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + +__all__ = ["get_run_validators", "set_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + msg = "'run' must be bool." + raise TypeError(msg) + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/venv/lib/python3.12/site-packages/attr/_funcs.py b/venv/lib/python3.12/site-packages/attr/_funcs.py new file mode 100644 index 0000000..1adb500 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_funcs.py @@ -0,0 +1,497 @@ +# SPDX-License-Identifier: MIT + + +import copy + +from ._compat import get_generic_base +from ._make import _OBJ_SETATTR, NOTHING, fields +from .exceptions import AttrsAttributeNotFoundError + + +_ATOMIC_TYPES = frozenset( + { + type(None), + bool, + int, + float, + str, + complex, + bytes, + type(...), + type, + range, + property, + } +) + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the *attrs* attribute values of *inst* as a dict. + + Optionally recurse into other *attrs*-decorated classes. + + Args: + inst: Instance of an *attrs*-decorated class. + + recurse (bool): Recurse into classes that are also *attrs*-decorated. + + filter (~typing.Callable): + A callable whose return code determines whether an attribute or + element is included (`True`) or dropped (`False`). Is called with + the `attrs.Attribute` as the first argument and the value as the + second argument. + + dict_factory (~typing.Callable): + A callable to produce dictionaries from. For example, to produce + ordered dictionaries instead of normal Python dictionaries, pass in + ``collections.OrderedDict``. + + retain_collection_types (bool): + Do not convert to `list` when encountering an attribute whose type + is `tuple` or `set`. Only meaningful if *recurse* is `True`. + + value_serializer (typing.Callable | None): + A hook that is called for every attribute or dict key/value. It + receives the current instance, field and value and must return the + (updated) value. The hook is run *after* the optional *filter* has + been applied. + + Returns: + Return type of *dict_factory*. + + Raises: + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 + If a dict has a collection for a key, it is serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + value_type = type(v) + if value_type in _ATOMIC_TYPES: + rv[a.name] = v + elif has(value_type): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif issubclass(value_type, (tuple, list, set, frozenset)): + cf = value_type if retain_collection_types is True else list + items = [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + try: + rv[a.name] = cf(items) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv[a.name] = cf(*items) + elif issubclass(value_type, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in v.items() + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + val_type = type(val) + if val_type in _ATOMIC_TYPES: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + elif getattr(val_type, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif issubclass(val_type, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif issubclass(val_type, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in val.items() + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the *attrs* attribute values of *inst* as a tuple. + + Optionally recurse into other *attrs*-decorated classes. + + Args: + inst: Instance of an *attrs*-decorated class. + + recurse (bool): + Recurse into classes that are also *attrs*-decorated. + + filter (~typing.Callable): + A callable whose return code determines whether an attribute or + element is included (`True`) or dropped (`False`). Is called with + the `attrs.Attribute` as the first argument and the value as the + second argument. + + tuple_factory (~typing.Callable): + A callable to produce tuples from. For example, to produce lists + instead of tuples. + + retain_collection_types (bool): + Do not convert to `list` or `dict` when encountering an attribute + which type is `tuple`, `dict` or `set`. Only meaningful if + *recurse* is `True`. + + Returns: + Return type of *tuple_factory* + + Raises: + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + value_type = type(v) + if recurse is True: + if value_type in _ATOMIC_TYPES: + rv.append(v) + elif has(value_type): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif issubclass(value_type, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + items = [ + ( + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + ) + for j in v + ] + try: + rv.append(cf(items)) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv.append(cf(*items)) + elif issubclass(value_type, dict): + df = value_type if retain is True else dict + rv.append( + df( + ( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk + ), + ( + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv + ), + ) + for kk, vv in v.items() + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with *attrs* attributes. + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + Returns: + bool: + """ + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is not None: + return True + + # No attrs, maybe it's a specialized generic (A[str])? + generic_base = get_generic_base(cls) + if generic_base is not None: + generic_attrs = getattr(generic_base, "__attrs_attrs__", None) + if generic_attrs is not None: + # Stick it on here for speed next time. + cls.__attrs_attrs__ = generic_attrs + return generic_attrs is not None + return False + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + This is different from `evolve` that applies the changes to the arguments + that create the new instance. + + `evolve`'s behavior is preferable, but there are `edge cases`_ where it + doesn't work. Therefore `assoc` is deprecated, but will not be removed. + + .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251 + + Args: + inst: Instance of a class with *attrs* attributes. + + changes: Keyword changes in the new copy. + + Returns: + A copy of inst with *changes* incorporated. + + Raises: + attrs.exceptions.AttrsAttributeNotFoundError: + If *attr_name* couldn't be found on *cls*. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. This function will not be + removed du to the slightly different approach compared to + `attrs.evolve`, though. + """ + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in changes.items(): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + msg = f"{k} is not an attrs attribute on {new.__class__}." + raise AttrsAttributeNotFoundError(msg) + _OBJ_SETATTR(new, k, v) + return new + + +def resolve_types( + cls, globalns=None, localns=None, attribs=None, include_extras=True +): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in :class:`Attribute`'s + *type* field. In other words, you don't need to resolve your types if you + only use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, for example, if the name only + exists inside a method, you may pass *globalns* or *localns* to specify + other dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + Args: + cls (type): Class to resolve. + + globalns (dict | None): Dictionary containing global variables. + + localns (dict | None): Dictionary containing local variables. + + attribs (list | None): + List of attribs for the given class. This is necessary when calling + from inside a ``field_transformer`` since *cls* is not an *attrs* + class yet. + + include_extras (bool): + Resolve more accurately, if possible. Pass ``include_extras`` to + ``typing.get_hints``, if supported by the typing module. On + supported Python versions (3.9+), this resolves the types more + accurately. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class and you didn't pass any attribs. + + NameError: If types cannot be resolved because of missing variables. + + Returns: + *cls* so you can use this function also as a class decorator. Please + note that you have to apply it **after** `attrs.define`. That means the + decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + .. versionadded:: 23.1.0 *include_extras* + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + kwargs = { + "globalns": globalns, + "localns": localns, + "include_extras": include_extras, + } + + hints = typing.get_type_hints(cls, **kwargs) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _OBJ_SETATTR(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/venv/lib/python3.12/site-packages/attr/_make.py b/venv/lib/python3.12/site-packages/attr/_make.py new file mode 100644 index 0000000..d24d9ba --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_make.py @@ -0,0 +1,3362 @@ +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import abc +import contextlib +import copy +import enum +import inspect +import itertools +import linecache +import sys +import types +import unicodedata +import weakref + +from collections.abc import Callable, Mapping +from functools import cached_property +from typing import Any, NamedTuple, TypeVar + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + PY_3_10_PLUS, + PY_3_11_PLUS, + PY_3_13_PLUS, + _AnnotationExtractor, + _get_annotations, + get_generic_base, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_OBJ_SETATTR = object.__setattr__ +_INIT_FACTORY_PAT = "__attr_factory_%s" +_CLASSVAR_PREFIXES = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_HASH_CACHE_FIELD = "_attrs_cached_hash" + +_EMPTY_METADATA_SINGLETON = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_SENTINEL = object() + +_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(enum.Enum): + """ + Sentinel to indicate the lack of a value when `None` is ambiguous. + + If extending attrs, you can use ``typing.Literal[NOTHING]`` to show + that a value may be ``NOTHING``. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. + """ + + NOTHING = enum.auto() + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing.NOTHING +""" +Sentinel to indicate the lack of a value when `None` is ambiguous. + +When using in 3rd party code, use `attrs.NothingType` for type annotations. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since `None` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008 + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=None, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new field / attribute on a class. + + Identical to `attrs.field`, except it's not keyword-only. + + Consider using `attrs.field` in new code (``attr.ib`` will *never* go away, + though). + + .. warning:: + + Does **nothing** unless the class is also decorated with + `attr.s` (or similar)! + + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is `None` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 + *converter* as a replacement for the deprecated *convert* to achieve + consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 22.2.0 *alias* + .. versionchanged:: 25.4.0 + *kw_only* can now be None, and its default is also changed from False to + None. + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if factory is not None: + if default is not NOTHING: + msg = ( + "The `default` and `factory` arguments are mutually exclusive." + ) + raise ValueError(msg) + if not callable(factory): + msg = "The `factory` argument must be a callable." + raise ValueError(msg) + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + alias=alias, + ) + + +def _compile_and_eval( + script: str, + globs: dict[str, Any] | None, + locs: Mapping[str, object] | None = None, + filename: str = "", +) -> None: + """ + Evaluate the script with the given global (globs) and local (locs) + variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _linecache_and_compile( + script: str, + filename: str, + globs: dict[str, Any] | None, + locals: Mapping[str, object] | None = None, +) -> dict[str, Any]: + """ + Cache the script with _linecache_, compile it and return the _locals_. + """ + + locs = {} if locals is None else locals + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + + filename = f"{base_filename[:-1]}-{count}>" + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs + + +def _make_attr_tuple_class(cls_name: str, attr_names: list[str]) -> type: + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = f"{cls_name}Attributes" + body = {} + for i, attr_name in enumerate(attr_names): + + def getter(self, i=i): + return self[i] + + body[attr_name] = property(getter) + return type(attr_class_name, (tuple,), body) + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +class _Attributes(NamedTuple): + attrs: type + base_attrs: list[Attribute] + base_attrs_map: dict[str, type] + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_CLASSVAR_PREFIXES) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + """ + return attrib_name in cls.__dict__ + + +def _collect_base_attrs( + cls, taken_attr_names +) -> tuple[list[Attribute], dict[str, type]]: + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, +) -> _Attributes: + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + If *collect_by_mro* is True, collect them in the correct MRO order, + otherwise use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = list(these.items()) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if attr.__class__ is _CountingAttr + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if a.__class__ is not _CountingAttr: + a = attrib(a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if unannotated: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if attr.__class__ is _CountingAttr + ), + key=lambda e: e[1].counter, + ) + + fca = Attribute.from_counting_attr + no = ClassProps.KeywordOnly.NO + own_attrs = [ + fca( + attr_name, + ca, + kw_only is not no, + anns.get(attr_name), + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only is ClassProps.KeywordOnly.FORCE: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + if field_transformer is not None: + attrs = tuple(field_transformer(cls, attrs)) + + # Check attr order after executing the field_transformer. + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}" + raise ValueError(msg) + + if had_default is False and a.default is not NOTHING: + had_default = True + + # Resolve default field alias after executing field_transformer. + # This allows field_transformer to differentiate between explicit vs + # default aliases and supply their own defaults. + for a in attrs: + if not a.alias: + # Evolve is very slow, so we hold our nose and do it dirty. + _OBJ_SETATTR.__get__(a)("alias", _default_init_alias_for(a.name)) + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes(AttrsClass(attrs), base_attrs, base_attr_map) + + +def _make_cached_property_getattr(cached_properties, original_getattr, cls): + lines = [ + # Wrapped to get `__class__` into closure cell for super() + # (It will be replaced with the newly constructed class after construction). + "def wrapper(_cls):", + " __class__ = _cls", + " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):", + " func = cached_properties.get(item)", + " if func is not None:", + " result = func(self)", + " _setter = _cached_setattr_get(self)", + " _setter(item, result)", + " return result", + ] + if original_getattr is not None: + lines.append( + " return original_getattr(self, item)", + ) + else: + lines.extend( + [ + " try:", + " return super().__getattribute__(item)", + " except AttributeError:", + " if not hasattr(super(), '__getattr__'):", + " raise", + " return super().__getattr__(item)", + " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"", + " raise AttributeError(original_error)", + ] + ) + + lines.extend( + [ + " return __getattr__", + "__getattr__ = wrapper(_cls)", + ] + ) + + unique_filename = _generate_unique_filename(cls, "getattr") + + glob = { + "cached_properties": cached_properties, + "_cached_setattr_get": _OBJ_SETATTR.__get__, + "original_getattr": original_getattr, + } + + return _linecache_and_compile( + "\n".join(lines), unique_filename, glob, locals={"_cls": cls} + )["__getattr__"] + + +def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + "__traceback__", + "__suppress_context__", + "__notes__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + if isinstance(self, BaseException) and name in ("__notes__",): + BaseException.__delattr__(self, name) + return + + raise FrozenInstanceError + + +def evolve(*args, **changes): + """ + Create a new instance, based on the first positional argument with + *changes* applied. + + .. tip:: + + On Python 3.13 and later, you can also use `copy.replace` instead. + + Args: + + inst: + Instance of a class with *attrs* attributes. *inst* must be passed + as a positional argument. + + changes: + Keyword changes in the new copy. + + Returns: + A copy of inst with *changes* incorporated. + + Raises: + TypeError: + If *attr_name* couldn't be found in the class ``__init__``. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 17.1.0 + .. deprecated:: 23.1.0 + It is now deprecated to pass the instance using the keyword argument + *inst*. It will raise a warning until at least April 2024, after which + it will become an error. Always pass the instance as a positional + argument. + .. versionchanged:: 24.1.0 + *inst* can't be passed as a keyword argument anymore. + """ + try: + (inst,) = args + except ValueError: + msg = ( + f"evolve() takes 1 positional argument, but {len(args)} were given" + ) + raise TypeError(msg) from None + + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = a.alias + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_add_method_dunders", + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_custom_setattr", + "_has_post_init", + "_has_pre_init", + "_is_exc", + "_on_setattr", + "_pre_init_has_args", + "_repr_added", + "_script_snippets", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + ) + + def __init__( + self, + cls: type, + these, + auto_attribs: bool, + props: ClassProps, + has_custom_setattr: bool, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + props.kw_only, + props.collected_fields_by_mro, + props.field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if props.is_slotted else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = props.is_slotted + self._frozen = props.is_frozen + self._weakref_slot = props.has_weakref_slot + self._cache_hash = ( + props.hashability is ClassProps.Hashability.HASHABLE_CACHED + ) + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._pre_init_has_args = False + if self._has_pre_init: + # Check if the pre init method has more arguments than just `self` + # We want to pass arguments if pre init expects arguments + pre_init_func = cls.__attrs_pre_init__ + pre_init_signature = inspect.signature(pre_init_func) + self._pre_init_has_args = len(pre_init_signature.parameters) > 1 + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = props.is_exception + self._on_setattr = props.on_setattr_hook + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + self._cls_dict["__attrs_props__"] = props + + if props.is_frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif self._on_setattr in ( + _DEFAULT_ON_SETATTR, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + self._on_setattr == _DEFAULT_ON_SETATTR + and not (has_validator or has_converter) + ) + or (self._on_setattr == setters.validate and not has_validator) + or (self._on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if props.added_pickling: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + # tuples of script, globs, hook + self._script_snippets: list[ + tuple[str, dict, Callable[[dict, dict], Any]] + ] = [] + self._repr_added = False + + # We want to only do this check once; in 99.9% of cases these + # exist. + if not hasattr(self._cls, "__module__") or not hasattr( + self._cls, "__qualname__" + ): + self._add_method_dunders = self._add_method_dunders_safe + else: + self._add_method_dunders = self._add_method_dunders_unsafe + + def __repr__(self): + return f"<_ClassBuilder(cls={self._cls.__name__})>" + + def _eval_snippets(self) -> None: + """ + Evaluate any registered snippets in one go. + """ + script = "\n".join([snippet[0] for snippet in self._script_snippets]) + globs = {} + for _, snippet_globs, _ in self._script_snippets: + globs.update(snippet_globs) + + locs = _linecache_and_compile( + script, + _generate_unique_filename(self._cls, "methods"), + globs, + ) + + for _, _, hook in self._script_snippets: + hook(self._cls_dict, locs) + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + self._eval_snippets() + if self._slots is True: + cls = self._create_slots_class() + self._cls.__attrs_base_of_slotted__ = weakref.ref(cls) + else: + cls = self._patch_original_class() + if PY_3_10_PLUS: + cls = abc.update_abstractmethods(cls) + + # The method gets only called if it's not inherited from a base class. + # _has_own_attribute does NOT work properly for classmethods. + if ( + getattr(cls, "__attrs_init_subclass__", None) + and "__attrs_init_subclass__" not in cls.__dict__ + ): + cls.__attrs_init_subclass__() + + return cls + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _SENTINEL) is not _SENTINEL + ): + # An AttributeError can happen if a base class defines a + # class variable and we want to set an attribute with the + # same name by using only a type annotation. + with contextlib.suppress(AttributeError): + delattr(cls, name) + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _OBJ_SETATTR + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in (*tuple(self._attr_names), "__dict__", "__weakref__") + } + + # 3.14.0rc2+ + if hasattr(sys, "_clear_type_descriptors"): + sys._clear_type_descriptors(self._cls) + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _OBJ_SETATTR + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = {} + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + cached_properties = { + name: cached_prop.func + for name, cached_prop in cd.items() + if isinstance(cached_prop, cached_property) + } + + # Collect methods with a `__class__` reference that are shadowed in the new class. + # To know to update them. + additional_closure_functions_to_update = [] + if cached_properties: + class_annotations = _get_annotations(self._cls) + for name, func in cached_properties.items(): + # Add cached properties to names for slotting. + names += (name,) + # Clear out function from class to avoid clashing. + del cd[name] + additional_closure_functions_to_update.append(func) + annotation = inspect.signature(func).return_annotation + if annotation is not inspect.Parameter.empty: + class_annotations[name] = annotation + + original_getattr = cd.get("__getattr__") + if original_getattr is not None: + additional_closure_functions_to_update.append(original_getattr) + + cd["__getattr__"] = _make_cached_property_getattr( + cached_properties, original_getattr, self._cls + ) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_HASH_CACHE_FIELD) + + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . + # If a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in itertools.chain( + cls.__dict__.values(), additional_closure_functions_to_update + ): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # noqa: PERF203 + # ValueError: Cell is empty + pass + else: + if match: + cell.cell_contents = cls + return cls + + def add_repr(self, ns): + script, globs = _make_repr_script(self._attrs, ns) + + def _attach_repr(cls_dict, globs): + cls_dict["__repr__"] = self._add_method_dunders(globs["__repr__"]) + + self._script_snippets.append((script, globs, _attach_repr)) + self._repr_added = True + return self + + def add_str(self): + if not self._repr_added: + msg = "__str__ can only be generated if a __repr__ exists." + raise ValueError(msg) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return {name: getattr(self, name) for name in state_attr_names} + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _OBJ_SETATTR.__get__(self) + if isinstance(state, tuple): + # Backward compatibility with attrs instances pickled with + # attrs versions before v22.2.0 which stored tuples. + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + else: + for name in state_attr_names: + if name in state: + __bound_setattr(name, state[name]) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_HASH_CACHE_FIELD, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + script, globs = _make_hash_script( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + + def attach_hash(cls_dict: dict, locs: dict) -> None: + cls_dict["__hash__"] = self._add_method_dunders(locs["__hash__"]) + + self._script_snippets.append((script, globs, attach_hash)) + + return self + + def add_init(self): + script, globs, annotations = _make_init_script( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + + def _attach_init(cls_dict, globs): + init = globs["__init__"] + init.__annotations__ = annotations + cls_dict["__init__"] = self._add_method_dunders(init) + + self._script_snippets.append((script, globs, _attach_init)) + + return self + + def add_replace(self): + self._cls_dict["__replace__"] = self._add_method_dunders( + lambda self, **changes: evolve(self, **changes) + ) + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + script, globs, annotations = _make_init_script( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + + def _attach_attrs_init(cls_dict, globs): + init = globs["__attrs_init__"] + init.__annotations__ = annotations + cls_dict["__attrs_init__"] = self._add_method_dunders(init) + + self._script_snippets.append((script, globs, _attach_attrs_init)) + + return self + + def add_eq(self): + cd = self._cls_dict + + script, globs = _make_eq_script(self._attrs) + + def _attach_eq(cls_dict, globs): + cls_dict["__eq__"] = self._add_method_dunders(globs["__eq__"]) + + self._script_snippets.append((script, globs, _attach_eq)) + + cd["__ne__"] = __ne__ + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + msg = "Can't combine custom __setattr__ with on_setattr hooks." + raise ValueError(msg) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _OBJ_SETATTR(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders_unsafe(self, method: Callable) -> Callable: + """ + Add __module__ and __qualname__ to a *method*. + """ + method.__module__ = self._cls.__module__ + + method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" + + method.__doc__ = ( + f"Method generated by attrs for class {self._cls.__qualname__}." + ) + + return method + + def _add_method_dunders_safe(self, method: Callable) -> Callable: + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + with contextlib.suppress(AttributeError): + method.__module__ = self._cls.__module__ + + with contextlib.suppress(AttributeError): + method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" + + with contextlib.suppress(AttributeError): + method.__doc__ = f"Method generated by attrs for class {self._cls.__qualname__}." + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + unsafe_hash=None, + force_kw_only=True, +): + r""" + A class decorator that adds :term:`dunder methods` according to the + specified attributes using `attr.ib` or the *these* argument. + + Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will + *never* go away, though). + + Args: + repr_ns (str): + When using nested classes, there was no way in Python 2 to + automatically detect that. This argument allows to set a custom + name for a more meaningful ``repr`` output. This argument is + pointless in Python 3 and is therefore deprecated. + + .. caution:: + Refer to `attrs.define` for the rest of the parameters, but note that they + can have different defaults. + + Notably, leaving *on_setattr* as `None` will **not** add any hooks. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports `None` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + .. deprecated:: 24.1.0 *repr_ns* + .. versionchanged:: 24.1.0 + Instances are not compared as tuples of attributes anymore, but using a + big ``and`` condition. This is faster and has more correct behavior for + uncomparable values like `math.nan`. + .. versionadded:: 24.1.0 + If a class has an *inherited* classmethod called + ``__attrs_init_subclass__``, it is executed after the class is created. + .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. + .. versionchanged:: 25.4.0 + *kw_only* now only applies to attributes defined in the current class, + and respects attribute-level ``kw_only=False`` settings. + .. versionadded:: 25.4.0 *force_kw_only* + """ + if repr_ns is not None: + import warnings + + warnings.warn( + DeprecationWarning( + "The `repr_ns` argument is deprecated and will be removed in or after August 2025." + ), + stacklevel=2, + ) + + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + + # unsafe_hash takes precedence due to PEP 681. + if unsafe_hash is not None: + hash = unsafe_hash + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + nonlocal hash + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + msg = "Can't freeze a class with a custom __setattr__." + raise ValueError(msg) + + eq = not is_exc and _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + + Hashability = ClassProps.Hashability + + if is_exc: + hashability = Hashability.LEAVE_ALONE + elif hash is True: + hashability = ( + Hashability.HASHABLE_CACHED + if cache_hash + else Hashability.HASHABLE + ) + elif hash is False: + hashability = Hashability.LEAVE_ALONE + elif hash is None: + if auto_detect is True and _has_own_attribute(cls, "__hash__"): + hashability = Hashability.LEAVE_ALONE + elif eq is True and is_frozen is True: + hashability = ( + Hashability.HASHABLE_CACHED + if cache_hash + else Hashability.HASHABLE + ) + elif eq is False: + hashability = Hashability.LEAVE_ALONE + else: + hashability = Hashability.UNHASHABLE + else: + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + KeywordOnly = ClassProps.KeywordOnly + if kw_only: + kwo = KeywordOnly.FORCE if force_kw_only else KeywordOnly.YES + else: + kwo = KeywordOnly.NO + + props = ClassProps( + is_exception=is_exc, + is_frozen=is_frozen, + is_slotted=slots, + collected_fields_by_mro=collect_by_mro, + added_init=_determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ), + added_repr=_determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ), + added_eq=eq, + added_ordering=not is_exc + and _determine_whether_to_implement( + cls, + order_, + auto_detect, + ("__lt__", "__le__", "__gt__", "__ge__"), + ), + hashability=hashability, + added_match_args=match_args, + kw_only=kwo, + has_weakref_slot=weakref_slot, + added_str=str, + added_pickling=_determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + on_setattr_hook=on_setattr, + field_transformer=field_transformer, + ) + + if not props.is_hashable and cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + + builder = _ClassBuilder( + cls, + these, + auto_attribs=auto_attribs, + props=props, + has_custom_setattr=has_own_setattr, + ) + + if props.added_repr: + builder.add_repr(repr_ns) + + if props.added_str: + builder.add_str() + + if props.added_eq: + builder.add_eq() + if props.added_ordering: + builder.add_order() + + if not frozen: + builder.add_setattr() + + if props.is_hashable: + builder.add_hash() + elif props.hashability is Hashability.UNHASHABLE: + builder.make_unhashable() + + if props.added_init: + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, init must be True." + raise TypeError(msg) + + if PY_3_13_PLUS and not _has_own_attribute(cls, "__replace__"): + builder.add_replace() + + if ( + PY_3_10_PLUS + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but `None` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls: type, func_name: str) -> str: + """ + Create a "filename" suitable for a function being generated. + """ + return ( + f"" + ) + + +def _make_hash_script( + cls: type, attrs: list[Attribute], frozen: bool, cache_hash: bool +) -> tuple[str, dict]: + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + type_hash = hash(_generate_unique_filename(cls, "hash")) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):" + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + f" {type_hash},", + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + globs[cmp_name] = a.eq_key + method_lines.append( + indent + f" {cmp_name}(self.{a.name})," + ) + else: + method_lines.append(indent + f" self.{a.name},") + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:") + if frozen: + append_hash_computation_lines( + f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + f"self.{_HASH_CACHE_FIELD} = ", tab * 2 + ) + method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}") + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return script, globs + + +def _add_hash(cls: type, attrs: list[Attribute]): + """ + Add a hash method to *cls*. + """ + script, globs = _make_hash_script( + cls, attrs, frozen=False, cache_hash=False + ) + _compile_and_eval( + script, globs, filename=_generate_unique_filename(cls, "__hash__") + ) + cls.__hash__ = globs["__hash__"] + return cls + + +def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + +def _make_eq_script(attrs: list) -> tuple[str, dict]: + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + globs = {} + if attrs: + lines.append(" return (") + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})" + ) + else: + lines.append(f" self.{a.name} == other.{a.name}") + if a is not attrs[-1]: + lines[-1] = f"{lines[-1]} and" + lines.append(" )") + else: + lines.append(" return True") + + script = "\n".join(lines) + + return script, globs + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + script, globs = _make_eq_script(attrs) + _compile_and_eval( + script, globs, filename=_generate_unique_filename(cls, "__eq__") + ) + cls.__eq__ = globs["__eq__"] + cls.__ne__ = __ne__ + + return cls + + +def _make_repr_script(attrs, ns) -> tuple[str, dict]: + """ + Create the source and globs for a __repr__ and return it. + """ + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + f" return f'{cls_name_fragment}({repr_fragment})'", + " finally:", + " already_repring.remove(id(self))", + ] + + return "\n".join(lines), globs + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + script, globs = _make_repr_script(attrs, ns) + _compile_and_eval( + script, globs, filename=_generate_unique_filename(cls, "__repr__") + ) + cls.__repr__ = globs["__repr__"] + return cls + + +def fields(cls): + """ + Return the tuple of *attrs* attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + Returns: + tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + .. versionchanged:: 23.1.0 Add support for generic classes. + """ + generic_base = get_generic_base(cls) + + if generic_base is None and not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + + attrs = getattr(cls, "__attrs_attrs__", None) + + if attrs is None: + if generic_base is not None: + attrs = getattr(generic_base, "__attrs_attrs__", None) + if attrs is not None: + # Even though this is global state, stick it on here to speed + # it up. We rely on `cls` being cached for this to be + # efficient. + cls.__attrs_attrs__ = attrs + return attrs + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of *attrs* attributes for a class, whose keys + are the attribute names. + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + Returns: + dict[str, attrs.Attribute]: Dict of attribute name to definition + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + return {a.name: a for a in attrs} + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + Args: + inst: Instance of a class with *attrs* attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + cls = base_attr_map.get(a_name) + return cls and "__slots__" in cls.__dict__ + + +def _make_init_script( + cls, + attrs, + pre_init, + pre_init_has_args, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +) -> tuple[str, dict, dict]: + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + pre_init_has_args, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + "__attrs_init__" if attrs_init else "__init__", + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__ + + return script, globs, annotations + + +def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str: + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return f"_setattr('{attr_name}', {value_var})" + + +def _setattr_with_converter( + attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter +) -> str: + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})" + + +def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str: + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return f"self.{attr_name} = {value}" + + +def _assign_with_converter( + attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter +) -> str: + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True, converter) + + return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}" + + +def _determine_setters( + frozen: bool, slots: bool, base_attr_map: dict[str, type] +): + """ + Determine the correct setter functions based on whether a class is frozen + and/or slotted. + """ + if frozen is True: + if slots is True: + return (), _setattr, _setattr_with_converter + + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + + def fmt_setter( + attr_name: str, value_var: str, has_on_setattr: bool + ) -> str: + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return f"_inst_dict['{attr_name}'] = {value_var}" + + def fmt_setter_with_converter( + attr_name: str, + value_var: str, + has_on_setattr: bool, + converter: Converter, + ) -> str: + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr, converter + ) + + return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}" + + return ( + ("_inst_dict = self.__dict__",), + fmt_setter, + fmt_setter_with_converter, + ) + + # Not frozen -- we can just assign directly. + return (), _assign, _assign_with_converter + + +def _attrs_to_init_script( + attrs: list[Attribute], + is_frozen: bool, + is_slotted: bool, + call_pre_init: bool, + pre_init_has_args: bool, + call_post_init: bool, + does_cache_hash: bool, + base_attr_map: dict[str, type], + is_exc: bool, + needs_cached_setattr: bool, + has_cls_on_setattr: bool, + method_name: str, +) -> tuple[str, dict, dict]: + """ + Return a script of an initializer for *attrs*, a dict of globals, and + annotations for the initializer. + + The globals are required by the generated script. + """ + lines = ["self.__attrs_pre_init__()"] if call_pre_init else [] + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. Note _setattr will be used again below if + # does_cache_hash is True. + "_setattr = _cached_setattr_get(self)" + ) + + extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters( + is_frozen, is_slotted, base_attr_map + ) + lines.extend(extra_lines) + + args = [] # Parameters in the definition of __init__ + pre_init_args = [] # Parameters in the call to __attrs_pre_init__ + kw_only_args = [] # Used for both 'args' and 'pre_init_args' above + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not + # explicitly provided + arg_name = a.alias + + has_factory = isinstance(a.default, Factory) + maybe_self = "self" if has_factory and a.default.takes_self else "" + + if a.converter is not None and not isinstance(a.converter, Converter): + converter = Converter(a.converter) + else: + converter = a.converter + + if a.init is False: + if has_factory: + init_factory_name = _INIT_FACTORY_PAT % (a.name,) + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + elif converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = f"{arg_name}=attr_dict['{attr_name}'].default" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + pre_init_args.append(arg_name) + + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = f"{arg_name}=NOTHING" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + pre_init_args.append(arg_name) + lines.append(f"if {arg_name} is not NOTHING:") + + init_factory_name = _INIT_FACTORY_PAT % (a.name,) + if converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + pre_init_args.append(arg_name) + + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and converter is None: + annotations[arg_name] = a.type + elif converter is not None and converter._first_param_type: + # Use the type from the converter if present. + annotations[arg_name] = converter._first_param_type + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if call_post_init: + lines.append("self.__attrs_post_init__()") + + # Because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to field + # values during post-init combined with post-init accessing the hash code + # would result in silent bugs. + if does_cache_hash: + if is_frozen: + if is_slotted: + init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)" + else: + init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None" + else: + init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None" + lines.append(init_hash_cache) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join(f"self.{a.name}" for a in attrs if a.init) + + lines.append(f"BaseException.__init__(self, {vals})") + + args = ", ".join(args) + pre_init_args = ", ".join(pre_init_args) + if kw_only_args: + # leading comma & kw_only args + args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}" + pre_init_kw_only_args = ", ".join( + [ + f"{kw_arg_name}={kw_arg_name}" + # We need to remove the defaults from the kw_only_args. + for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args) + ] + ) + pre_init_args += ", " if pre_init_args else "" + pre_init_args += pre_init_kw_only_args + + if call_pre_init and pre_init_has_args: + # If pre init method has arguments, pass the values given to __init__. + lines[0] = f"self.__attrs_pre_init__({pre_init_args})" + + # Python <3.12 doesn't allow backslashes in f-strings. + NL = "\n " + return ( + f"""def {method_name}(self, {args}): + {NL.join(lines) if lines else "pass"} +""", + names_for_globals, + annotations, + ) + + +def _default_init_alias_for(name: str) -> str: + """ + The default __init__ parameter name for a field. + + This performs private-name adjustment via leading-unscore stripping, + and is the default value of Attribute.alias if not provided. + """ + + return name.lstrip("_") + + +class Attribute: + """ + *Read-only* representation of an attribute. + + .. warning:: + + You should never instantiate this class yourself. + + The class has *all* arguments of `attr.ib` (except for ``factory`` which is + only syntactic sugar for ``default=Factory(...)`` plus the following: + + - ``name`` (`str`): The name of the attribute. + - ``alias`` (`str`): The __init__ parameter name of the attribute, after + any explicit overrides and default private-attribute-name handling. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The + callables that are used for comparing and ordering objects by this + attribute, respectively. These are set by passing a callable to + `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also + :ref:`comparison customization `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + - The ``alias`` property exposes the __init__ parameter name of the field, + with any overrides and default private-attribute handling applied. + + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + .. versionadded:: 22.2.0 *alias* + + For the full version history of the fields, see `attr.ib`. + """ + + # These slots must NOT be reordered because we use them later for + # instantiation. + __slots__ = ( # noqa: RUF023 + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + "alias", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + alias=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _OBJ_SETATTR.__get__(self) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _EMPTY_METADATA_SINGLETON + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + bound_setattr("alias", alias) + + def __setattr__(self, name, value): + raise FrozenInstanceError + + @classmethod + def from_counting_attr( + cls, name: str, ca: _CountingAttr, kw_only: bool, type=None + ): + # The 'kw_only' argument is the class-level setting, and is used if the + # attribute itself does not explicitly set 'kw_only'. + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + msg = f"Type annotation and type argument cannot both be present for '{name}'." + raise ValueError(msg) + return cls( + name, + ca._default, + ca._validator, + ca.repr, + None, + ca.hash, + ca.init, + False, + ca.metadata, + type, + ca.converter, + kw_only if ca.kw_only is None else ca.kw_only, + ca.eq, + ca.eq_key, + ca.order, + ca.order_key, + ca.on_setattr, + ca.alias, + ) + + # Don't use attrs.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attrs.evolve` but that function does not work + with :class:`attrs.Attribute`. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _OBJ_SETATTR.__get__(self) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + ( + types.MappingProxyType(dict(value)) + if value + else _EMPTY_METADATA_SINGLETON + ), + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "_default", + "_validator", + "alias", + "converter", + "counter", + "eq", + "eq_key", + "hash", + "init", + "kw_only", + "metadata", + "on_setattr", + "order", + "order_key", + "repr", + "type", + ) + __attrs_attrs__ = ( + *tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ), + Attribute( + name="metadata", + alias="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + alias, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + self.alias = alias + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + Raises: + DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class ClassProps: + """ + Effective class properties as derived from parameters to `attr.s()` or + `define()` decorators. + + This is the same data structure that *attrs* uses internally to decide how + to construct the final class. + + Warning: + + This feature is currently **experimental** and is not covered by our + strict backwards-compatibility guarantees. + + + Attributes: + is_exception (bool): + Whether the class is treated as an exception class. + + is_slotted (bool): + Whether the class is `slotted `. + + has_weakref_slot (bool): + Whether the class has a slot for weak references. + + is_frozen (bool): + Whether the class is frozen. + + kw_only (KeywordOnly): + Whether / how the class enforces keyword-only arguments on the + ``__init__`` method. + + collected_fields_by_mro (bool): + Whether the class fields were collected by method resolution order. + That is, correctly but unlike `dataclasses`. + + added_init (bool): + Whether the class has an *attrs*-generated ``__init__`` method. + + added_repr (bool): + Whether the class has an *attrs*-generated ``__repr__`` method. + + added_eq (bool): + Whether the class has *attrs*-generated equality methods. + + added_ordering (bool): + Whether the class has *attrs*-generated ordering methods. + + hashability (Hashability): How `hashable ` the class is. + + added_match_args (bool): + Whether the class supports positional `match ` over its + fields. + + added_str (bool): + Whether the class has an *attrs*-generated ``__str__`` method. + + added_pickling (bool): + Whether the class has *attrs*-generated ``__getstate__`` and + ``__setstate__`` methods for `pickle`. + + on_setattr_hook (Callable[[Any, Attribute[Any], Any], Any] | None): + The class's ``__setattr__`` hook. + + field_transformer (Callable[[Attribute[Any]], Attribute[Any]] | None): + The class's `field transformers `. + + .. versionadded:: 25.4.0 + """ + + class Hashability(enum.Enum): + """ + The hashability of a class. + + .. versionadded:: 25.4.0 + """ + + HASHABLE = "hashable" + """Write a ``__hash__``.""" + HASHABLE_CACHED = "hashable_cache" + """Write a ``__hash__`` and cache the hash.""" + UNHASHABLE = "unhashable" + """Set ``__hash__`` to ``None``.""" + LEAVE_ALONE = "leave_alone" + """Don't touch ``__hash__``.""" + + class KeywordOnly(enum.Enum): + """ + How attributes should be treated regarding keyword-only parameters. + + .. versionadded:: 25.4.0 + """ + + NO = "no" + """Attributes are not keyword-only.""" + YES = "yes" + """Attributes in current class without kw_only=False are keyword-only.""" + FORCE = "force" + """All attributes are keyword-only.""" + + __slots__ = ( # noqa: RUF023 -- order matters for __init__ + "is_exception", + "is_slotted", + "has_weakref_slot", + "is_frozen", + "kw_only", + "collected_fields_by_mro", + "added_init", + "added_repr", + "added_eq", + "added_ordering", + "hashability", + "added_match_args", + "added_str", + "added_pickling", + "on_setattr_hook", + "field_transformer", + ) + + def __init__( + self, + is_exception, + is_slotted, + has_weakref_slot, + is_frozen, + kw_only, + collected_fields_by_mro, + added_init, + added_repr, + added_eq, + added_ordering, + hashability, + added_match_args, + added_str, + added_pickling, + on_setattr_hook, + field_transformer, + ): + self.is_exception = is_exception + self.is_slotted = is_slotted + self.has_weakref_slot = has_weakref_slot + self.is_frozen = is_frozen + self.kw_only = kw_only + self.collected_fields_by_mro = collected_fields_by_mro + self.added_init = added_init + self.added_repr = added_repr + self.added_eq = added_eq + self.added_ordering = added_ordering + self.hashability = hashability + self.added_match_args = added_match_args + self.added_str = added_str + self.added_pickling = added_pickling + self.on_setattr_hook = on_setattr_hook + self.field_transformer = field_transformer + + @property + def is_hashable(self): + return ( + self.hashability is ClassProps.Hashability.HASHABLE + or self.hashability is ClassProps.Hashability.HASHABLE_CACHED + ) + + +_cas = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in ClassProps.__slots__ +] + +ClassProps = _add_eq(_add_repr(ClassProps, attrs=_cas), attrs=_cas) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + Args: + factory (typing.Callable): + A callable that takes either none or exactly one mandatory + positional argument depending on *takes_self*. + + takes_self (bool): + Pass the partially initialized instance that is being initialized + as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +class Converter: + """ + Stores a converter callable. + + Allows for the wrapped converter to take additional arguments. The + arguments are passed in the order they are documented. + + Args: + converter (Callable): A callable that converts the passed value. + + takes_self (bool): + Pass the partially initialized instance that is being initialized + as a positional argument. (default: `False`) + + takes_field (bool): + Pass the field definition (an :class:`Attribute`) into the + converter as a positional argument. (default: `False`) + + .. versionadded:: 24.1.0 + """ + + __slots__ = ( + "__call__", + "_first_param_type", + "_global_name", + "converter", + "takes_field", + "takes_self", + ) + + def __init__(self, converter, *, takes_self=False, takes_field=False): + self.converter = converter + self.takes_self = takes_self + self.takes_field = takes_field + + ex = _AnnotationExtractor(converter) + self._first_param_type = ex.get_first_param_type() + + if not (self.takes_self or self.takes_field): + self.__call__ = lambda value, _, __: self.converter(value) + elif self.takes_self and not self.takes_field: + self.__call__ = lambda value, instance, __: self.converter( + value, instance + ) + elif not self.takes_self and self.takes_field: + self.__call__ = lambda value, __, field: self.converter( + value, field + ) + else: + self.__call__ = lambda value, instance, field: self.converter( + value, instance, field + ) + + rt = ex.get_return_type() + if rt is not None: + self.__call__.__annotations__["return"] = rt + + @staticmethod + def _get_global_name(attr_name: str) -> str: + """ + Return the name that a converter for an attribute name *attr_name* + would have. + """ + return f"__attr_converter_{attr_name}" + + def _fmt_converter_call(self, attr_name: str, value_var: str) -> str: + """ + Return a string that calls the converter for an attribute name + *attr_name* and the value in variable named *value_var* according to + `self.takes_self` and `self.takes_field`. + """ + if not (self.takes_self or self.takes_field): + return f"{self._get_global_name(attr_name)}({value_var})" + + if self.takes_self and self.takes_field: + return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])" + + if self.takes_self: + return f"{self._get_global_name(attr_name)}({value_var}, self)" + + return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])" + + def __getstate__(self): + """ + Return a dict containing only converter and takes_self -- the rest gets + computed when loading. + """ + return { + "converter": self.converter, + "takes_self": self.takes_self, + "takes_field": self.takes_field, + } + + def __setstate__(self, state): + """ + Load instance from state. + """ + self.__init__(**state) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in ("converter", "takes_self", "takes_field") +] + +Converter = _add_hash( + _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f +) + + +def make_class( + name, attrs, bases=(object,), class_body=None, **attributes_arguments +): + r""" + A quick way to create a new class called *name* with *attrs*. + + .. note:: + + ``make_class()`` is a thin wrapper around `attr.s`, not `attrs.define` + which means that it doesn't come with some of the improved defaults. + + For example, if you want the same ``on_setattr`` behavior as in + `attrs.define`, you have to pass the hooks yourself: ``make_class(..., + on_setattr=setters.pipe(setters.convert, setters.validate)`` + + .. warning:: + + It is *your* duty to ensure that the class name and the attribute names + are valid identifiers. ``make_class()`` will *not* validate them for + you. + + Args: + name (str): The name for the new class. + + attrs (list | dict): + A list of names or a dictionary of mappings of names to `attr.ib`\ + s / `attrs.field`\ s. + + The order is deduced from the order of the names or attributes + inside *attrs*. Otherwise the order of the definition of the + attributes is used. + + bases (tuple[type, ...]): Classes that the new class will subclass. + + class_body (dict): + An optional dictionary of class attributes for the new class. + + attributes_arguments: Passed unmodified to `attr.s`. + + Returns: + type: A new class with *attrs*. + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + .. versionchanged:: 23.2.0 *class_body* + .. versionchanged:: 25.2.0 Class names can now be unicode. + """ + # Class identifiers are converted into the normal form NFKC while parsing + name = unicodedata.normalize("NFKC", name) + + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + msg = "attrs argument must be a dict or a list." + raise TypeError(msg) + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if class_body is not None: + body.update(class_body) + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + with contextlib.suppress(AttributeError, ValueError): + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + cls = _attrs(these=cls_dict, **attributes_arguments)(type_) + # Only add type annotations now or "_attrs()" will complain: + cls.__annotations__ = { + k: v.type for k, v in cls_dict.items() if v.type is not None + } + return cls + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, unsafe_hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + Args: + validators (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if they + have any. + + converters (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + return_instance = any(isinstance(c, Converter) for c in converters) + + if return_instance: + + def pipe_converter(val, inst, field): + for c in converters: + val = ( + c(val, inst, field) if isinstance(c, Converter) else c(val) + ) + + return val + + else: + + def pipe_converter(val): + for c in converters: + val = c(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = TypeVar("A") + pipe_converter.__annotations__.update({"val": A, "return": A}) + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + last = converters[-1] + if not PY_3_11_PLUS and isinstance(last, Converter): + last = last.__call__ + + # Get return type from last converter. + rt = _AnnotationExtractor(last).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + if return_instance: + return Converter(pipe_converter, takes_self=True, takes_field=True) + return pipe_converter diff --git a/venv/lib/python3.12/site-packages/attr/_next_gen.py b/venv/lib/python3.12/site-packages/attr/_next_gen.py new file mode 100644 index 0000000..4ccd0da --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_next_gen.py @@ -0,0 +1,674 @@ +# SPDX-License-Identifier: MIT + +""" +These are keyword-only APIs that call `attr.s` and `attr.ib` with different +default values. +""" + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + _DEFAULT_ON_SETATTR, + NOTHING, + _frozen_setattrs, + attrib, + attrs, +) +from .exceptions import NotAnAttrsClassError, UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + unsafe_hash=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + force_kw_only=False, +): + r""" + A class decorator that adds :term:`dunder methods` according to + :term:`fields ` specified using :doc:`type annotations `, + `field()` calls, or the *these* argument. + + Since *attrs* patches or replaces an existing class, you cannot use + `object.__init_subclass__` with *attrs* classes, because it runs too early. + As a replacement, you can define ``__attrs_init_subclass__`` on your class. + It will be called by *attrs* classes that subclass it after they're + created. See also :ref:`init-subclass`. + + Args: + slots (bool): + Create a :term:`slotted class ` that's more + memory-efficient. Slotted classes are generally superior to the + default dict classes, but have some gotchas you should know about, + so we encourage you to read the :term:`glossary entry `. + + auto_detect (bool): + Instead of setting the *init*, *repr*, *eq*, and *hash* arguments + explicitly, assume they are set to True **unless any** of the + involved methods for one of the arguments is implemented in the + *current* class (meaning, it is *not* inherited from some base + class). + + So, for example by implementing ``__eq__`` on a class yourself, + *attrs* will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a + sensible ``__ne__`` by default, so it *should* be enough to only + implement ``__eq__`` in most cases). + + Passing :data:`True` or :data:`False` to *init*, *repr*, *eq*, or *hash* + overrides whatever *auto_detect* would determine. + + auto_exc (bool): + If the class subclasses `BaseException` (which implicitly includes + any subclass of any exception), the following happens to behave + like a well-behaved Python exception class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids [#]_ , + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the + ``args`` attribute, + - the value of *str* is ignored leaving ``__str__`` to base + classes. + + .. [#] + Note that *attrs* will *not* remove existing implementations of + ``__hash__`` or the equality methods. It just won't add own + ones. + + on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]): + A callable that is run whenever the user attempts to set an + attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same + arguments as validators: the instance, the attribute that is being + modified, and the new value. + + If no exception is raised, the attribute is set to the return value + of the callable. + + If a list of callables is passed, they're automatically wrapped in + an `attrs.setters.pipe`. + + If left None, the default behavior is to run converters and + validators whenever an attribute is set. + + init (bool): + Create a ``__init__`` method that initializes the *attrs* + attributes. Leading underscores are stripped for the argument name, + unless an alias is set on the attribute. + + .. seealso:: + `init` shows advanced ways to customize the generated + ``__init__`` method, including executing code before and after. + + repr(bool): + Create a ``__repr__`` method with a human readable representation + of *attrs* attributes. + + str (bool): + Create a ``__str__`` method that is identical to ``__repr__``. This + is usually not necessary except for `Exception`\ s. + + eq (bool | None): + If True or None (default), add ``__eq__`` and ``__ne__`` methods + that check two instances for equality. + + .. seealso:: + `comparison` describes how to customize the comparison behavior + going as far comparing NumPy arrays. + + order (bool | None): + If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` + methods that behave like *eq* above and allow instances to be + ordered. + + They compare the instances as if they were tuples of their *attrs* + attributes if and only if the types of both classes are + *identical*. + + If `None` mirror value of *eq*. + + .. seealso:: `comparison` + + unsafe_hash (bool | None): + If None (default), the ``__hash__`` method is generated according + how *eq* and *frozen* are set. + + 1. If *both* are True, *attrs* will generate a ``__hash__`` for + you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set + to None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning + the ``__hash__`` method of the base class will be used. If the + base class is `object`, this means it will fall back to id-based + hashing. + + Although not recommended, you can decide for yourself and force + *attrs* to create one (for example, if the class is immutable even + though you didn't freeze it programmatically) by passing True or + not. Both of these cases are rather special and should be used + carefully. + + .. seealso:: + + - Our documentation on `hashing`, + - Python's documentation on `object.__hash__`, + - and the `GitHub issue that led to the default \ behavior + `_ for more + details. + + hash (bool | None): + Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence. + + cache_hash (bool): + Ensure that the object's hash code is computed only once and stored + on the object. If this is set to True, hashing must be either + explicitly or implicitly enabled for this class. If the hash code + is cached, avoid any reassignments of fields involved in hash code + computation or mutations of the objects those fields point to after + object creation. If such changes occur, the behavior of the + object's hash code is undefined. + + frozen (bool): + Make instances immutable after initialization. If someone attempts + to modify a frozen instance, `attrs.exceptions.FrozenInstanceError` + is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` + method on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other + words: ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You + can circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + kw_only (bool): + Make attributes keyword-only in the generated ``__init__`` (if + *init* is False, this parameter is ignored). Attributes that + explicitly set ``kw_only=False`` are not affected; base class + attributes are also not affected. + + Also see *force_kw_only*. + + weakref_slot (bool): + Make instances weak-referenceable. This has no effect unless + *slots* is True. + + field_transformer (~typing.Callable | None): + A function that is called with the original class object and all + fields right before *attrs* finalizes the class. You can use this, + for example, to automatically add converters or validators to + fields based on their types. + + .. seealso:: `transform-fields` + + match_args (bool): + If True (default), set ``__match_args__`` on the class to support + :pep:`634` (*Structural Pattern Matching*). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and + later. Ignored on older Python versions. + + collect_by_mro (bool): + If True, *attrs* collects attributes from base classes correctly + according to the `method resolution order + `_. If False, *attrs* + will mimic the (wrong) behavior of `dataclasses` and :pep:`681`. + + See also `issue #428 + `_. + + force_kw_only (bool): + A back-compat flag for restoring pre-25.4.0 behavior. If True and + ``kw_only=True``, all attributes are made keyword-only, including + base class attributes, and those set to ``kw_only=False`` at the + attribute level. Defaults to False. + + See also `issue #980 + `_. + + getstate_setstate (bool | None): + .. note:: + + This is usually only interesting for slotted classes and you + should probably just set *auto_detect* to True. + + If True, ``__getstate__`` and ``__setstate__`` are generated and + attached to the class. This is necessary for slotted classes to be + pickleable. If left None, it's True by default for slotted classes + and False for dict classes. + + If *auto_detect* is True, and *getstate_setstate* is left None, and + **either** ``__getstate__`` or ``__setstate__`` is detected + directly on the class (meaning: not inherited), it is set to False + (this is usually what you want). + + auto_attribs (bool | None): + If True, look at type annotations to determine which attributes to + use, like `dataclasses`. If False, it will only look for explicit + :func:`field` class attributes, like classic *attrs*. + + If left None, it will guess: + + 1. If any attributes are annotated and no unannotated + `attrs.field`\ s are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.field`\ s. + + If *attrs* decides to look at type annotations, **all** fields + **must** be annotated. If *attrs* encounters a field that is set to + a :func:`field` / `attr.ib` but lacks a type annotation, an + `attrs.exceptions.UnannotatedAttributeError` is raised. Use + ``field_name: typing.Any = field(...)`` if you don't want to set a + type. + + .. warning:: + + For features that use the attribute name to create decorators + (for example, :ref:`validators `), you still *must* + assign :func:`field` / `attr.ib` to them. Otherwise Python will + either not find the name or try to use the default value to + call, for example, ``validator`` on it. + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `field()` are **ignored**. + + these (dict[str, object]): + A dictionary of name to the (private) return value of `field()` + mappings. This is useful to avoid the definition of your attributes + within the class body because you can't (for example, if you want + to add ``__repr__`` methods to Django models) or don't want to. + + If *these* is not `None`, *attrs* will *not* search the class body + for attributes and will *not* remove any attributes from it. + + The order is deduced from the order of the attributes inside + *these*. + + Arguably, this is a rather obscure feature. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + .. versionchanged:: 24.1.0 + Instances are not compared as tuples of attributes anymore, but using a + big ``and`` condition. This is faster and has more correct behavior for + uncomparable values like `math.nan`. + .. versionadded:: 24.1.0 + If a class has an *inherited* classmethod called + ``__attrs_init_subclass__``, it is executed after the class is created. + .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. + .. versionadded:: 24.3.0 + Unless already present, a ``__replace__`` method is automatically + created for `copy.replace` (Python 3.13+ only). + .. versionchanged:: 25.4.0 + *kw_only* now only applies to attributes defined in the current class, + and respects attribute-level ``kw_only=False`` settings. + .. versionadded:: 25.4.0 + Added *force_kw_only* to go back to the previous *kw_only* behavior. + + .. note:: + + The main differences to the classic `attr.s` are: + + - Automatically detect whether or not *auto_attribs* should be `True` + (c.f. *auto_attribs* parameter). + - Converters and validators run when attributes are set by default -- + if *frozen* is `False`. + - *slots=True* + + Usually, this has only upsides and few visible effects in everyday + programming. But it *can* lead to some surprising behaviors, so + please make sure to read :term:`slotted classes`. + + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - *force_kw_only=False* + - Some options that were only relevant on Python 2 or were kept around + for backwards-compatibility have been removed. + + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + unsafe_hash=unsafe_hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + force_kw_only=force_kw_only, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _DEFAULT_ON_SETATTR + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)." + raise ValueError(msg) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but `None` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=None, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new :term:`field` / :term:`attribute` on a class. + + .. warning:: + + Does **nothing** unless the class is also decorated with + `attrs.define` (or similar)! + + Args: + default: + A value that is used if an *attrs*-generated ``__init__`` is used + and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will + be used to construct a new value (useful for mutable data types + like lists or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a + value *must* be supplied when instantiating; otherwise a + `TypeError` will be raised. + + .. seealso:: `defaults` + + factory (~typing.Callable): + Syntactic sugar for ``default=attr.Factory(factory)``. + + validator (~typing.Callable | list[~typing.Callable]): + Callable that is called by *attrs*-generated ``__init__`` methods + after the instance has been initialized. They receive the + initialized instance, the :func:`~attrs.Attribute`, and the passed + value. + + The return value is *not* inspected so the validator has to throw + an exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `attrs.validators.get_disabled` / `attrs.validators.set_disabled`. + + The validator can also be set using decorator notation as shown + below. + + .. seealso:: :ref:`validators` + + repr (bool | ~typing.Callable): + Include this attribute in the generated ``__repr__`` method. If + True, include the attribute; if False, omit it. By default, the + built-in ``repr()`` function is used. To override how the attribute + value is formatted, pass a ``callable`` that takes a single value + and returns a string. Note that the resulting string is used as-is, + which means it will be used directly *instead* of calling + ``repr()`` (the default). + + eq (bool | ~typing.Callable): + If True (default), include this attribute in the generated + ``__eq__`` and ``__ne__`` methods that check two instances for + equality. To override how the attribute value is compared, pass a + callable that takes a single value and returns the value to be + compared. + + .. seealso:: `comparison` + + order (bool | ~typing.Callable): + If True (default), include this attributes in the generated + ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To + override how the attribute value is ordered, pass a callable that + takes a single value and returns the value to be ordered. + + .. seealso:: `comparison` + + hash (bool | None): + Include this attribute in the generated ``__hash__`` method. If + None (default), mirror *eq*'s value. This is the correct behavior + according the Python spec. Setting this value to anything else + than None is *discouraged*. + + .. seealso:: `hashing` + + init (bool): + Include this attribute in the generated ``__init__`` method. + + It is possible to set this to False and set a default value. In + that case this attributed is unconditionally initialized with the + specified default value or factory. + + .. seealso:: `init` + + converter (typing.Callable | Converter): + A callable that is called by *attrs*-generated ``__init__`` methods + to convert attribute's value to the desired format. + + If a vanilla callable is passed, it is given the passed-in value as + the only positional argument. It is possible to receive additional + arguments by wrapping the callable in a `Converter`. + + Either way, the returned value will be used as the new value of the + attribute. The value is converted before being passed to the + validator, if any. + + .. seealso:: :ref:`converters` + + metadata (dict | None): + An arbitrary mapping, to be used by third-party code. + + .. seealso:: `extending-metadata`. + + type (type): + The type of the attribute. Nowadays, the preferred method to + specify the type is using a variable annotation (see :pep:`526`). + This argument is provided for backwards-compatibility and for usage + with `make_class`. Regardless of the approach used, the type will + be stored on ``Attribute.type``. + + Please note that *attrs* doesn't do anything with this metadata by + itself. You can use it as part of your own code or for `static type + checking `. + + kw_only (bool | None): + Make this attribute keyword-only in the generated ``__init__`` (if + *init* is False, this parameter is ignored). If None (default), + mirror the setting from `attrs.define`. + + on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]): + Allows to overwrite the *on_setattr* setting from `attr.s`. If left + None, the *on_setattr* value from `attr.s` is used. Set to + `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `define()`. + + alias (str | None): + Override this attribute's parameter name in the generated + ``__init__`` method. If left None, default to ``name`` stripped + of leading underscores. See `private-attributes`. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionadded:: 22.2.0 *alias* + .. versionadded:: 23.1.0 + The *type* parameter has been re-added; mostly for `attrs.make_class`. + Please note that type checkers ignore this metadata. + .. versionchanged:: 25.4.0 + *kw_only* can now be None, and its default is also changed from False to + None. + + .. seealso:: + + `attr.ib` + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + type=type, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + alias=alias, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) + + +def inspect(cls): + """ + Inspect the class and return its effective build parameters. + + Warning: + This feature is currently **experimental** and is not covered by our + strict backwards-compatibility guarantees. + + Args: + cls: The *attrs*-decorated class to inspect. + + Returns: + The effective build parameters of the class. + + Raises: + NotAnAttrsClassError: If the class is not an *attrs*-decorated class. + + .. versionadded:: 25.4.0 + """ + try: + return cls.__dict__["__attrs_props__"] + except KeyError: + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) from None diff --git a/venv/lib/python3.12/site-packages/attr/_typing_compat.pyi b/venv/lib/python3.12/site-packages/attr/_typing_compat.pyi new file mode 100644 index 0000000..ca7b71e --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_typing_compat.pyi @@ -0,0 +1,15 @@ +from typing import Any, ClassVar, Protocol + +# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. +MYPY = False + +if MYPY: + # A protocol to be able to statically accept an attrs class. + class AttrsInstance_(Protocol): + __attrs_attrs__: ClassVar[Any] + +else: + # For type checkers without plug-in support use an empty protocol that + # will (hopefully) be combined into a union. + class AttrsInstance_(Protocol): + pass diff --git a/venv/lib/python3.12/site-packages/attr/_version_info.py b/venv/lib/python3.12/site-packages/attr/_version_info.py new file mode 100644 index 0000000..27f1888 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_version_info.py @@ -0,0 +1,89 @@ +# SPDX-License-Identifier: MIT + + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo: + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them + + def __hash__(self): + return hash((self.year, self.minor, self.micro, self.releaselevel)) diff --git a/venv/lib/python3.12/site-packages/attr/_version_info.pyi b/venv/lib/python3.12/site-packages/attr/_version_info.pyi new file mode 100644 index 0000000..45ced08 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/venv/lib/python3.12/site-packages/attr/converters.py b/venv/lib/python3.12/site-packages/attr/converters.py new file mode 100644 index 0000000..0a79dee --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/converters.py @@ -0,0 +1,162 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + +import typing + +from ._compat import _AnnotationExtractor +from ._make import NOTHING, Converter, Factory, pipe + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to `None`. + + Type annotations will be inferred from the wrapped converter's, if it has + any. + + Args: + converter (typing.Callable): + the converter that is used for non-`None` values. + + .. versionadded:: 17.1.0 + """ + + if isinstance(converter, Converter): + + def optional_converter(val, inst, field): + if val is None: + return None + return converter(val, inst, field) + + else: + + def optional_converter(val): + if val is None: + return None + return converter(val) + + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] + + if isinstance(converter, Converter): + return Converter(optional_converter, takes_self=True, takes_field=True) + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace `None` values by *default* or the result + of *factory*. + + Args: + default: + Value to be used if `None` is passed. Passing an instance of + `attrs.Factory` is supported, however the ``takes_self`` option is + *not*. + + factory (typing.Callable): + A callable that takes no parameters whose result is used if `None` + is passed. + + Raises: + TypeError: If **neither** *default* or *factory* is passed. + + TypeError: If **both** *default* and *factory* are passed. + + ValueError: + If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + msg = "Must pass either `default` or `factory`." + raise TypeError(msg) + + if default is not NOTHING and factory is not None: + msg = "Must pass either `default` or `factory` but not both." + raise TypeError(msg) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + msg = "`takes_self` is not supported by default_if_none." + raise ValueError(msg) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (for example, from environment variables) to real + booleans. + + Values mapping to `True`: + + - ``True`` + - ``"true"`` / ``"t"`` + - ``"yes"`` / ``"y"`` + - ``"on"`` + - ``"1"`` + - ``1`` + + Values mapping to `False`: + + - ``False`` + - ``"false"`` / ``"f"`` + - ``"no"`` / ``"n"`` + - ``"off"`` + - ``"0"`` + - ``0`` + + Raises: + ValueError: For any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + + if val in (True, "true", "t", "yes", "y", "on", "1", 1): + return True + if val in (False, "false", "f", "no", "n", "off", "0", 0): + return False + + msg = f"Cannot convert value to bool: {val!r}" + raise ValueError(msg) diff --git a/venv/lib/python3.12/site-packages/attr/converters.pyi b/venv/lib/python3.12/site-packages/attr/converters.pyi new file mode 100644 index 0000000..12bd0c4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/converters.pyi @@ -0,0 +1,19 @@ +from typing import Callable, Any, overload + +from attrs import _ConverterType, _CallableConverterType + +@overload +def pipe(*validators: _CallableConverterType) -> _CallableConverterType: ... +@overload +def pipe(*validators: _ConverterType) -> _ConverterType: ... +@overload +def optional(converter: _CallableConverterType) -> _CallableConverterType: ... +@overload +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: Any) -> _CallableConverterType: ... +@overload +def default_if_none( + *, factory: Callable[[], Any] +) -> _CallableConverterType: ... +def to_bool(val: str | int | bool) -> bool: ... diff --git a/venv/lib/python3.12/site-packages/attr/exceptions.py b/venv/lib/python3.12/site-packages/attr/exceptions.py new file mode 100644 index 0000000..3b7abb8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/exceptions.py @@ -0,0 +1,95 @@ +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +from typing import ClassVar + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args: ClassVar[tuple[str]] = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An *attrs* function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-*attrs* class has been passed into an *attrs* function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set when defining the field and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has a field without a type annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an *attrs* feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A field requiring a callable has been set with a value that is not + callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/venv/lib/python3.12/site-packages/attr/exceptions.pyi b/venv/lib/python3.12/site-packages/attr/exceptions.pyi new file mode 100644 index 0000000..f268011 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/venv/lib/python3.12/site-packages/attr/filters.py b/venv/lib/python3.12/site-packages/attr/filters.py new file mode 100644 index 0000000..689b170 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/filters.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attrs.asdict` and `attrs.astuple`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, str)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Create a filter that only allows *what*. + + Args: + what (list[type, str, attrs.Attribute]): + What to include. Can be a type, a name, or an attribute. + + Returns: + Callable: + A callable that can be passed to `attrs.asdict`'s and + `attrs.astuple`'s *filter* argument. + + .. versionchanged:: 23.1.0 Accept strings with field names. + """ + cls, names, attrs = _split_what(what) + + def include_(attribute, value): + return ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return include_ + + +def exclude(*what): + """ + Create a filter that does **not** allow *what*. + + Args: + what (list[type, str, attrs.Attribute]): + What to exclude. Can be a type, a name, or an attribute. + + Returns: + Callable: + A callable that can be passed to `attrs.asdict`'s and + `attrs.astuple`'s *filter* argument. + + .. versionchanged:: 23.3.0 Accept field name string as input argument + """ + cls, names, attrs = _split_what(what) + + def exclude_(attribute, value): + return not ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return exclude_ diff --git a/venv/lib/python3.12/site-packages/attr/filters.pyi b/venv/lib/python3.12/site-packages/attr/filters.pyi new file mode 100644 index 0000000..974abdc --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any + +from . import Attribute, _FilterType + +def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ... +def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ... diff --git a/venv/lib/python3.12/site-packages/attr/py.typed b/venv/lib/python3.12/site-packages/attr/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/attr/setters.py b/venv/lib/python3.12/site-packages/attr/setters.py new file mode 100644 index 0000000..78b0839 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/setters.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + # This can be removed once we drop 3.8 and use attrs.Converter instead. + from ._make import Converter + + if not isinstance(c, Converter): + return c(new_value) + + return c(new_value, instance, attrib) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# Sphinx's autodata stopped working, so the docstring is inlined in the API +# docs. +NO_OP = object() diff --git a/venv/lib/python3.12/site-packages/attr/setters.pyi b/venv/lib/python3.12/site-packages/attr/setters.pyi new file mode 100644 index 0000000..73abf36 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/setters.pyi @@ -0,0 +1,20 @@ +from typing import Any, NewType, NoReturn, TypeVar + +from . import Attribute +from attrs import _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/venv/lib/python3.12/site-packages/attr/validators.py b/venv/lib/python3.12/site-packages/attr/validators.py new file mode 100644 index 0000000..837e003 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/validators.py @@ -0,0 +1,748 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + +import operator +import re + +from contextlib import contextmanager +from re import Pattern + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .converters import default_if_none +from .exceptions import NotCallableError + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "min_len", + "not_", + "optional", + "or_", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + Args: + disabled (bool): If `True`, disable running all validators. + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + Returns: + bool:`True` if validators are currently disabled. + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _InstanceOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})." + raise TypeError( + msg, + attr, + self.type, + value, + ) + + def __repr__(self): + return f"" + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called with a + wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + Args: + type (type | tuple[type]): The type to check for. + + Raises: + TypeError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected type, and the value it got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator: + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)" + raise ValueError( + msg, + attr, + self.pattern, + value, + ) + + def __repr__(self): + return f"" + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called with a + string that doesn't match *regex*. + + Args: + regex (str, re.Pattern): + A regex string or precompiled pattern to match against + + flags (int): + Flags that will be passed to the underlying re function (default 0) + + func (typing.Callable): + Which underlying `re` function to call. Valid options are + `re.fullmatch`, `re.search`, and `re.match`; the default `None` + means `re.fullmatch`. For performance reasons, the pattern is + always precompiled using `re.compile`. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + valid_funcs = (re.fullmatch, None, re.search, re.match) + if func not in valid_funcs: + msg = "'func' must be one of {}.".format( + ", ".join( + sorted((e and e.__name__) or "None" for e in set(valid_funcs)) + ) + ) + raise ValueError(msg) + + if isinstance(regex, Pattern): + if flags: + msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead" + raise TypeError(msg) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + match_func = pattern.fullmatch + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _OptionalValidator: + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return f"" + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to `None` in addition to satisfying the requirements of + the sub-validator. + + Args: + validator + (typing.Callable | tuple[typing.Callable] | list[typing.Callable]): + A validator (or validators) that is used for non-`None` values. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators. + """ + if isinstance(validator, (list, tuple)): + return _OptionalValidator(_AndValidator(validator)) + + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _InValidator: + options = attrib() + _original_options = attrib(hash=False) + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})" + raise ValueError( + msg, + attr, + self._original_options, + value, + ) + + def __repr__(self): + return f"" + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called with a + value that does not belong in the *options* provided. + + The check is performed using ``value in options``, so *options* has to + support that operation. + + To keep the validator hashable, dicts, lists, and sets are transparently + transformed into a `tuple`. + + Args: + options: Allowed options. + + Raises: + ValueError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected options, and the value it got. + + .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. + .. versionchanged:: 24.1.0 + *options* that are a list, dict, or a set are now transformed into a + tuple to keep the validator hashable. + """ + repr_options = options + if isinstance(options, (list, dict, set)): + options = tuple(options) + + return _InValidator(options, repr_options) + + +@attrs(repr=False, slots=False, unsafe_hash=True) +class _IsCallableValidator: + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attrs.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute that is + not callable. + + .. versionadded:: 19.1.0 + + Raises: + attrs.exceptions.NotCallableError: + With a human readable error message containing the attribute + (`attrs.Attribute`) name, and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _DeepIterable: + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else f" {self.iterable_validator!r}" + ) + return ( + f"" + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + Args: + member_validator: Validator(s) to apply to iterable members. + + iterable_validator: + Validator(s) to apply to iterable itself (optional). + + Raises + TypeError: if any sub-validators fail + + .. versionadded:: 19.1.0 + + .. versionchanged:: 25.4.0 + *member_validator* and *iterable_validator* can now be a list or tuple + of validators. + """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) + if isinstance(iterable_validator, (list, tuple)): + iterable_validator = and_(*iterable_validator) + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _DeepMapping: + key_validator = attrib(validator=optional(is_callable())) + value_validator = attrib(validator=optional(is_callable())) + mapping_validator = attrib(validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + if self.key_validator is not None: + self.key_validator(inst, attr, key) + if self.value_validator is not None: + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return f"" + + +def deep_mapping( + key_validator=None, value_validator=None, mapping_validator=None +): + """ + A validator that performs deep validation of a dictionary. + + All validators are optional, but at least one of *key_validator* or + *value_validator* must be provided. + + Args: + key_validator: Validator(s) to apply to dictionary keys. + + value_validator: Validator(s) to apply to dictionary values. + + mapping_validator: + Validator(s) to apply to top-level mapping attribute. + + .. versionadded:: 19.1.0 + + .. versionchanged:: 25.4.0 + *key_validator* and *value_validator* are now optional, but at least one + of them must be provided. + + .. versionchanged:: 25.4.0 + *key_validator*, *value_validator*, and *mapping_validator* can now be a + list or tuple of validators. + + Raises: + TypeError: If any sub-validator fails on validation. + + ValueError: + If neither *key_validator* nor *value_validator* is provided on + instantiation. + """ + if key_validator is None and value_validator is None: + msg = ( + "At least one of key_validator or value_validator must be provided" + ) + raise ValueError(msg) + + if isinstance(key_validator, (list, tuple)): + key_validator = and_(*key_validator) + if isinstance(value_validator, (list, tuple)): + value_validator = and_(*value_validator) + if isinstance(mapping_validator, (list, tuple)): + mapping_validator = and_(*mapping_validator) + + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number larger or equal to *val*. + + The validator uses `operator.lt` to compare the values. + + Args: + val: Exclusive upper bound for values. + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number greater than *val*. + + The validator uses `operator.le` to compare the values. + + Args: + val: Inclusive upper bound for values. + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number smaller than *val*. + + The validator uses `operator.ge` to compare the values. + + Args: + val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number smaller or equal to *val*. + + The validator uses `operator.gt` to compare the values. + + Args: + val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + Args: + length (int): Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + Args: + length (int): Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _SubclassOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not issubclass(value, self.type): + msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})." + raise TypeError( + msg, + attr, + self.type, + value, + ) + + def __repr__(self): + return f"" + + +def _subclass_of(type): + """ + A validator that raises a `TypeError` if the initializer is called with a + wrong type for this particular attribute (checks are performed using + `issubclass` therefore it's also valid to pass a tuple of types). + + Args: + type (type | tuple[type, ...]): The type(s) to check for. + + Raises: + TypeError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected type, and the value it got. + """ + return _SubclassOfValidator(type) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _NotValidator: + validator = attrib() + msg = attrib( + converter=default_if_none( + "not_ validator child '{validator!r}' " + "did not raise a captured error" + ) + ) + exc_types = attrib( + validator=deep_iterable( + member_validator=_subclass_of(Exception), + iterable_validator=instance_of(tuple), + ), + ) + + def __call__(self, inst, attr, value): + try: + self.validator(inst, attr, value) + except self.exc_types: + pass # suppress error to invert validity + else: + raise ValueError( + self.msg.format( + validator=self.validator, + exc_types=self.exc_types, + ), + attr, + self.validator, + value, + self.exc_types, + ) + + def __repr__(self): + return f"" + + +def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): + """ + A validator that wraps and logically 'inverts' the validator passed to it. + It will raise a `ValueError` if the provided validator *doesn't* raise a + `ValueError` or `TypeError` (by default), and will suppress the exception + if the provided validator *does*. + + Intended to be used with existing validators to compose logic without + needing to create inverted variants, for example, ``not_(in_(...))``. + + Args: + validator: A validator to be logically inverted. + + msg (str): + Message to raise if validator fails. Formatted with keys + ``exc_types`` and ``validator``. + + exc_types (tuple[type, ...]): + Exception type(s) to capture. Other types raised by child + validators will not be intercepted and pass through. + + Raises: + ValueError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the validator that failed to raise an + exception, the value it got, and the expected exception types. + + .. versionadded:: 22.2.0 + """ + try: + exc_types = tuple(exc_types) + except TypeError: + exc_types = (exc_types,) + return _NotValidator(validator, msg, exc_types) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _OrValidator: + validators = attrib() + + def __call__(self, inst, attr, value): + for v in self.validators: + try: + v(inst, attr, value) + except Exception: # noqa: BLE001, PERF203, S112 + continue + else: + return + + msg = f"None of {self.validators!r} satisfied for value {value!r}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def or_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators until one of them is + satisfied. + + Args: + validators (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of validators. + + Raises: + ValueError: + If no validator is satisfied. Raised with a human-readable error + message listing all the wrapped validators and the value that + failed all of them. + + .. versionadded:: 24.1.0 + """ + vals = [] + for v in validators: + vals.extend(v.validators if isinstance(v, _OrValidator) else [v]) + + return _OrValidator(tuple(vals)) diff --git a/venv/lib/python3.12/site-packages/attr/validators.pyi b/venv/lib/python3.12/site-packages/attr/validators.pyi new file mode 100644 index 0000000..36a7e80 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attr/validators.pyi @@ -0,0 +1,140 @@ +from types import UnionType +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + Mapping, + Match, + Pattern, + TypeVar, + overload, +) + +from attrs import _ValidatorType +from attrs import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_T6 = TypeVar("_T6") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: tuple[type[_T1], type[_T2]], +) -> _ValidatorType[_T1 | _T2]: ... +@overload +def instance_of( + type: tuple[type[_T1], type[_T2], type[_T3]], +) -> _ValidatorType[_T1 | _T2 | _T3]: ... +@overload +def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ... +@overload +def instance_of(type: UnionType) -> _ValidatorType[Any]: ... +def optional( + validator: ( + _ValidatorType[_T] + | list[_ValidatorType[_T]] + | tuple[_ValidatorType[_T]] + ), +) -> _ValidatorType[_T | None]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Pattern[AnyStr] | AnyStr, + flags: int = ..., + func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: _ValidatorArgType[_I] | None = ..., +) -> _ValidatorType[_I]: ... +@overload +def deep_mapping( + key_validator: _ValidatorArgType[_K], + value_validator: _ValidatorArgType[_V] | None = ..., + mapping_validator: _ValidatorArgType[_M] | None = ..., +) -> _ValidatorType[_M]: ... +@overload +def deep_mapping( + key_validator: _ValidatorArgType[_K] | None = ..., + value_validator: _ValidatorArgType[_V] = ..., + mapping_validator: _ValidatorArgType[_M] | None = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... +def not_( + validator: _ValidatorType[_T], + *, + msg: str | None = None, + exc_types: type[Exception] | Iterable[type[Exception]] = ..., +) -> _ValidatorType[_T]: ... +@overload +def or_( + __v1: _ValidatorType[_T1], + __v2: _ValidatorType[_T2], +) -> _ValidatorType[_T1 | _T2]: ... +@overload +def or_( + __v1: _ValidatorType[_T1], + __v2: _ValidatorType[_T2], + __v3: _ValidatorType[_T3], +) -> _ValidatorType[_T1 | _T2 | _T3]: ... +@overload +def or_( + __v1: _ValidatorType[_T1], + __v2: _ValidatorType[_T2], + __v3: _ValidatorType[_T3], + __v4: _ValidatorType[_T4], +) -> _ValidatorType[_T1 | _T2 | _T3 | _T4]: ... +@overload +def or_( + __v1: _ValidatorType[_T1], + __v2: _ValidatorType[_T2], + __v3: _ValidatorType[_T3], + __v4: _ValidatorType[_T4], + __v5: _ValidatorType[_T5], +) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5]: ... +@overload +def or_( + __v1: _ValidatorType[_T1], + __v2: _ValidatorType[_T2], + __v3: _ValidatorType[_T3], + __v4: _ValidatorType[_T4], + __v5: _ValidatorType[_T5], + __v6: _ValidatorType[_T6], +) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5 | _T6]: ... +@overload +def or_( + __v1: _ValidatorType[Any], + __v2: _ValidatorType[Any], + __v3: _ValidatorType[Any], + __v4: _ValidatorType[Any], + __v5: _ValidatorType[Any], + __v6: _ValidatorType[Any], + *validators: _ValidatorType[Any], +) -> _ValidatorType[Any]: ... diff --git a/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/METADATA new file mode 100644 index 0000000..51128bb --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/METADATA @@ -0,0 +1,235 @@ +Metadata-Version: 2.4 +Name: attrs +Version: 25.4.0 +Summary: Classes Without Boilerplate +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: GitHub, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Author-email: Hynek Schlawack +License-Expression: MIT +License-File: LICENSE +Keywords: attribute,boilerplate,class +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.9 +Description-Content-Type: text/markdown + +

    + + attrs + +

    + + +*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). +Trusted by NASA for [Mars missions since 2020](https://github.com/readme/featured/nasa-ingenuity-helicopter)! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + + +## Sponsors + +*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). +Especially those generously supporting us at the *The Organization* tier and higher: + + + +

    + + + + + + + + + +

    + + + +

    + Please consider joining them to help make attrs’s maintenance more sustainable! +

    + + + +## Example + +*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: + + + +```pycon +>>> from attrs import asdict, define, make_class, Factory + +>>> @define +... class SomeClass: +... a_number: int = 42 +... list_of_numbers: list[int] = Factory(list) +... +... def hard_math(self, another_number): +... return self.a_number + sum(self.list_of_numbers) * another_number + + +>>> sc = SomeClass(1, [1, 2, 3]) +>>> sc +SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + +>>> sc.hard_math(3) +19 +>>> sc == SomeClass(1, [1, 2, 3]) +True +>>> sc != SomeClass(2, [3, 2, 1]) +True + +>>> asdict(sc) +{'a_number': 1, 'list_of_numbers': [1, 2, 3]} + +>>> SomeClass() +SomeClass(a_number=42, list_of_numbers=[]) + +>>> C = make_class("C", ["a", "b"]) +>>> C("foo", "bar") +C(a='foo', b='bar') +``` + +After *declaring* your attributes, *attrs* gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable `__repr__`, +- equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +--- + +This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. +The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. + +Check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation! + + +### Hate Type Annotations!? + +No problem! +Types are entirely **optional** with *attrs*. +Simply assign `attrs.field()` to the attributes instead of annotating them with types: + +```python +from attrs import define, field + +@define +class SomeClass: + a_number = field(default=42) + list_of_numbers = field(factory=list) +``` + + +## Data Classes + +On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). +In practice it does a lot more and is more flexible. +For instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger. + +For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice. + + +## Project Information + +- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) +- [**Documentation**](https://www.attrs.org/) +- [**PyPI**](https://pypi.org/project/attrs/) +- [**Source Code**](https://github.com/python-attrs/attrs) +- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) +- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) +- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs) + + +### *attrs* for Enterprise + +Available as part of the [Tidelift Subscription](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek). + +The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. + +## Release Information + +### Backwards-incompatible Changes + +- Class-level `kw_only=True` behavior is now consistent with `dataclasses`. + + Previously, a class that sets `kw_only=True` makes all attributes keyword-only, including those from base classes. + If an attribute sets `kw_only=False`, that setting is ignored, and it is still made keyword-only. + + Now, only the attributes defined in that class that doesn't explicitly set `kw_only=False` are made keyword-only. + + This shouldn't be a problem for most users, unless you have a pattern like this: + + ```python + @attrs.define(kw_only=True) + class Base: + a: int + b: int = attrs.field(default=1, kw_only=False) + + @attrs.define + class Subclass(Base): + c: int + ``` + + Here, we have a `kw_only=True` *attrs* class (`Base`) with an attribute that sets `kw_only=False` and has a default (`Base.b`), and then create a subclass (`Subclass`) with required arguments (`Subclass.c`). + Previously this would work, since it would make `Base.b` keyword-only, but now this fails since `Base.b` is positional, and we have a required positional argument (`Subclass.c`) following another argument with defaults. + [#1457](https://github.com/python-attrs/attrs/issues/1457) + + +### Changes + +- Values passed to the `__init__()` method of `attrs` classes are now correctly passed to `__attrs_pre_init__()` instead of their default values (in cases where *kw_only* was not specified). + [#1427](https://github.com/python-attrs/attrs/issues/1427) +- Added support for Python 3.14 and [PEP 749](https://peps.python.org/pep-0749/). + [#1446](https://github.com/python-attrs/attrs/issues/1446), + [#1451](https://github.com/python-attrs/attrs/issues/1451) +- `attrs.validators.deep_mapping()` now allows to leave out either *key_validator* xor *value_validator*. + [#1448](https://github.com/python-attrs/attrs/issues/1448) +- `attrs.validators.deep_iterator()` and `attrs.validators.deep_mapping()` now accept lists and tuples for all validators and wrap them into a `attrs.validators.and_()`. + [#1449](https://github.com/python-attrs/attrs/issues/1449) +- Added a new **experimental** way to inspect classes: + + `attrs.inspect(cls)` returns the _effective_ class-wide parameters that were used by *attrs* to construct the class. + + The returned class is the same data structure that *attrs* uses internally to decide how to construct the final class. + [#1454](https://github.com/python-attrs/attrs/issues/1454) +- Fixed annotations for `attrs.field(converter=...)`. + Previously, a `tuple` of converters was only accepted if it had exactly one element. + [#1461](https://github.com/python-attrs/attrs/issues/1461) +- The performance of `attrs.asdict()` has been improved by 45–260%. + [#1463](https://github.com/python-attrs/attrs/issues/1463) +- The performance of `attrs.astuple()` has been improved by 49–270%. + [#1469](https://github.com/python-attrs/attrs/issues/1469) +- The type annotation for `attrs.validators.or_()` now allows for different types of validators. + + This was only an issue on Pyright. + [#1474](https://github.com/python-attrs/attrs/issues/1474) + + + +--- + +[Full changelog →](https://www.attrs.org/en/stable/changelog.html) diff --git a/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/RECORD new file mode 100644 index 0000000..70e06bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/RECORD @@ -0,0 +1,55 @@ +attr/__init__.py,sha256=fOYIvt1eGSqQre4uCS3sJWKZ0mwAuC8UD6qba5OS9_U,2057 +attr/__init__.pyi,sha256=IZkzIjvtbRqDWGkDBIF9dd12FgDa379JYq3GHnVOvFQ,11309 +attr/__pycache__/__init__.cpython-312.pyc,, +attr/__pycache__/_cmp.cpython-312.pyc,, +attr/__pycache__/_compat.cpython-312.pyc,, +attr/__pycache__/_config.cpython-312.pyc,, +attr/__pycache__/_funcs.cpython-312.pyc,, +attr/__pycache__/_make.cpython-312.pyc,, +attr/__pycache__/_next_gen.cpython-312.pyc,, +attr/__pycache__/_version_info.cpython-312.pyc,, +attr/__pycache__/converters.cpython-312.pyc,, +attr/__pycache__/exceptions.cpython-312.pyc,, +attr/__pycache__/filters.cpython-312.pyc,, +attr/__pycache__/setters.cpython-312.pyc,, +attr/__pycache__/validators.cpython-312.pyc,, +attr/_cmp.py,sha256=3Nn1TjxllUYiX_nJoVnEkXoDk0hM1DYKj5DE7GZe4i0,4117 +attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368 +attr/_compat.py,sha256=x0g7iEUOnBVJC72zyFCgb1eKqyxS-7f2LGnNyZ_r95s,2829 +attr/_config.py,sha256=dGq3xR6fgZEF6UBt_L0T-eUHIB4i43kRmH0P28sJVw8,843 +attr/_funcs.py,sha256=Ix5IETTfz5F01F-12MF_CSFomIn2h8b67EVVz2gCtBE,16479 +attr/_make.py,sha256=NRJDGS8syg2h3YNflVNoK2FwR3CpdSZxx8M6lacwljA,104141 +attr/_next_gen.py,sha256=BQtCUlzwg2gWHTYXBQvrEYBnzBUrDvO57u0Py6UCPhc,26274 +attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 +attr/_version_info.py,sha256=w4R-FYC3NK_kMkGUWJlYP4cVAlH9HRaC-um3fcjYkHM,2222 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=GlDeOzPeTFgeBBLbj9G57Ez5lAk68uhSALRYJ_exe84,3861 +attr/converters.pyi,sha256=orU2bff-VjQa2kMDyvnMQV73oJT2WRyQuw4ZR1ym1bE,643 +attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795 +attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=5-dcT63GQK35ONEzSgfXCkbB7pPkaR-qv15mm4PVSzQ,1617 +attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584 +attr/validators.py,sha256=1BnYGTuYvSucGEI4ju-RPNJteVzG0ZlfWpJiWoSFHQ8,21458 +attr/validators.pyi,sha256=ftmW3m4KJ3pQcIXAj-BejT7BY4ZfqrC1G-5W7XvoPds,4082 +attrs-25.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-25.4.0.dist-info/METADATA,sha256=2Rerxj7agcMRxiwdkt6lC2guqHAmkGKCH13nWWK7ZoQ,10473 +attrs-25.4.0.dist-info/RECORD,, +attrs-25.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +attrs-25.4.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 +attrs/__init__.py,sha256=RxaAZNwYiEh-fcvHLZNpQ_DWKni73M_jxEPEftiq1Zc,1183 +attrs/__init__.pyi,sha256=2gV79g9UxJppGSM48hAZJ6h_MHb70dZoJL31ZNJeZYI,9416 +attrs/__pycache__/__init__.cpython-312.pyc,, +attrs/__pycache__/converters.cpython-312.pyc,, +attrs/__pycache__/exceptions.cpython-312.pyc,, +attrs/__pycache__/filters.cpython-312.pyc,, +attrs/__pycache__/setters.cpython-312.pyc,, +attrs/__pycache__/validators.cpython-312.pyc,, +attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76 +attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76 +attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73 +attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76 diff --git a/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/WHEEL new file mode 100644 index 0000000..12228d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..2bd6453 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/attrs/__init__.py b/venv/lib/python3.12/site-packages/attrs/__init__.py new file mode 100644 index 0000000..dc1ce4b --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs/__init__.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + AttrsInstance, + Converter, + Factory, + NothingType, + _make_getattr, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._make import ClassProps +from attr._next_gen import asdict, astuple, inspect + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "NOTHING", + "Attribute", + "AttrsInstance", + "ClassProps", + "Converter", + "Factory", + "NothingType", + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "field", + "fields", + "fields_dict", + "filters", + "frozen", + "has", + "inspect", + "make_class", + "mutable", + "resolve_types", + "setters", + "validate", + "validators", +] + +__getattr__ = _make_getattr(__name__) diff --git a/venv/lib/python3.12/site-packages/attrs/__init__.pyi b/venv/lib/python3.12/site-packages/attrs/__init__.pyi new file mode 100644 index 0000000..6364bac --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs/__init__.pyi @@ -0,0 +1,314 @@ +import sys + +from typing import ( + Any, + Callable, + Mapping, + Sequence, + overload, + TypeVar, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import AttrsInstance as AttrsInstance +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import Converter as Converter +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import has as has +from attr import make_class as make_class +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators +from attr import attrib, asdict as asdict, astuple as astuple +from attr import NothingType as NothingType + +if sys.version_info >= (3, 11): + from typing import dataclass_transform +else: + from typing_extensions import dataclass_transform + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = bool | Callable[[Any], Any] +_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] +_CallableConverterType = Callable[[Any], Any] +_ConverterType = _CallableConverterType | Converter[Any, Any] +_ReprType = Callable[[Any], str] +_ReprArgType = bool | _ReprType +_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] +_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType +_FieldTransformer = Callable[ + [type, list["Attribute[Any]"]], list["Attribute[Any]"] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]] + +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool | None = ..., + eq: bool | None = ..., + order: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType + | list[_ConverterType] + | tuple[_ConverterType, ...] + | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool | None = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType + | list[_ConverterType] + | tuple[_ConverterType, ...] + | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool | None = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: _T | None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType + | list[_ConverterType] + | tuple[_ConverterType, ...] + | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool | None = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> Any: ... +@overload +@dataclass_transform(field_specifiers=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@dataclass_transform(field_specifiers=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define + +@overload +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) +def frozen( + maybe_cls: _C, + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) +def frozen( + maybe_cls: None = ..., + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +class ClassProps: + # XXX: somehow when defining/using enums Mypy starts looking at our own + # (untyped) code and causes tons of errors. + Hashability: Any + KeywordOnly: Any + + is_exception: bool + is_slotted: bool + has_weakref_slot: bool + is_frozen: bool + # kw_only: ClassProps.KeywordOnly + kw_only: Any + collected_fields_by_mro: bool + added_init: bool + added_repr: bool + added_eq: bool + added_ordering: bool + # hashability: ClassProps.Hashability + hashability: Any + added_match_args: bool + added_str: bool + added_pickling: bool + on_setattr_hook: _OnSetAttrType | None + field_transformer: Callable[[Attribute[Any]], Attribute[Any]] | None + + def __init__( + self, + is_exception: bool, + is_slotted: bool, + has_weakref_slot: bool, + is_frozen: bool, + # kw_only: ClassProps.KeywordOnly + kw_only: Any, + collected_fields_by_mro: bool, + added_init: bool, + added_repr: bool, + added_eq: bool, + added_ordering: bool, + # hashability: ClassProps.Hashability + hashability: Any, + added_match_args: bool, + added_str: bool, + added_pickling: bool, + on_setattr_hook: _OnSetAttrType, + field_transformer: Callable[[Attribute[Any]], Attribute[Any]], + ) -> None: ... + @property + def is_hashable(self) -> bool: ... + +def inspect(cls: type) -> ClassProps: ... diff --git a/venv/lib/python3.12/site-packages/attrs/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/attrs/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..dbefa8d Binary files /dev/null and b/venv/lib/python3.12/site-packages/attrs/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attrs/__pycache__/converters.cpython-312.pyc b/venv/lib/python3.12/site-packages/attrs/__pycache__/converters.cpython-312.pyc new file mode 100644 index 0000000..2baf70e Binary files /dev/null and b/venv/lib/python3.12/site-packages/attrs/__pycache__/converters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..a9089f2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attrs/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attrs/__pycache__/filters.cpython-312.pyc b/venv/lib/python3.12/site-packages/attrs/__pycache__/filters.cpython-312.pyc new file mode 100644 index 0000000..5f3b7a2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attrs/__pycache__/filters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attrs/__pycache__/setters.cpython-312.pyc b/venv/lib/python3.12/site-packages/attrs/__pycache__/setters.cpython-312.pyc new file mode 100644 index 0000000..1044255 Binary files /dev/null and b/venv/lib/python3.12/site-packages/attrs/__pycache__/setters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attrs/__pycache__/validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/attrs/__pycache__/validators.cpython-312.pyc new file mode 100644 index 0000000..c63e24a Binary files /dev/null and b/venv/lib/python3.12/site-packages/attrs/__pycache__/validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/attrs/converters.py b/venv/lib/python3.12/site-packages/attrs/converters.py new file mode 100644 index 0000000..7821f6c --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa: F403 diff --git a/venv/lib/python3.12/site-packages/attrs/exceptions.py b/venv/lib/python3.12/site-packages/attrs/exceptions.py new file mode 100644 index 0000000..3323f9d --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa: F403 diff --git a/venv/lib/python3.12/site-packages/attrs/filters.py b/venv/lib/python3.12/site-packages/attrs/filters.py new file mode 100644 index 0000000..3080f48 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa: F403 diff --git a/venv/lib/python3.12/site-packages/attrs/py.typed b/venv/lib/python3.12/site-packages/attrs/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/attrs/setters.py b/venv/lib/python3.12/site-packages/attrs/setters.py new file mode 100644 index 0000000..f3d73bb --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa: F403 diff --git a/venv/lib/python3.12/site-packages/attrs/validators.py b/venv/lib/python3.12/site-packages/attrs/validators.py new file mode 100644 index 0000000..037e124 --- /dev/null +++ b/venv/lib/python3.12/site-packages/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa: F403 diff --git a/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/LICENSE b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/LICENSE new file mode 100644 index 0000000..11069ed --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/METADATA new file mode 100644 index 0000000..789f784 --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/METADATA @@ -0,0 +1,292 @@ +Metadata-Version: 2.1 +Name: bcrypt +Version: 4.0.1 +Summary: Modern password hashing for your software and your servers +Home-page: https://github.com/pyca/bcrypt/ +Author: The Python Cryptographic Authority developers +Author-email: cryptography-dev@python.org +License: Apache License, Version 2.0 +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE +Provides-Extra: tests +Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests' +Provides-Extra: typecheck +Requires-Dist: mypy ; extra == 'typecheck' + +bcrypt +====== + +.. image:: https://img.shields.io/pypi/v/bcrypt.svg + :target: https://pypi.org/project/bcrypt/ + :alt: Latest Version + +.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main + :target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain + +Acceptable password hashing for your software and your servers (but you should +really use argon2id or scrypt) + + +Installation +============ + +To install bcrypt, simply: + +.. code:: bash + + $ pip install bcrypt + +Note that bcrypt should build very easily on Linux provided you have a C +compiler and a Rust compiler (the minimum supported Rust version is 1.56.0). + +For Debian and Ubuntu, the following command will ensure that the required dependencies are installed: + +.. code:: bash + + $ sudo apt-get install build-essential cargo + +For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed: + +.. code:: bash + + $ sudo yum install gcc cargo + +For Alpine, the following command will ensure that the required dependencies are installed: + +.. code:: bash + + $ apk add --update musl-dev gcc cargo + + +Alternatives +============ + +While bcrypt remains an acceptable choice for password storage, depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_. + +Changelog +========= + +4.0.1 +----- + +* We now build PyPy ``manylinux`` wheels. +* Fixed a bug where passing an invalid ``salt`` to ``checkpw`` could result in + a ``pyo3_runtime.PanicException``. It now correctly raises a ``ValueError``. + +4.0.0 +----- + +* ``bcrypt`` is now implemented in Rust. Users building from source will need + to have a Rust compiler available. Nothing will change for users downloading + wheels. +* We no longer ship ``manylinux2010`` wheels. Users should upgrade to the latest + ``pip`` to ensure this doesn’t cause issues downloading wheels on their + platform. We now ship ``manylinux_2_28`` wheels for users on new enough platforms. +* ``NUL`` bytes are now allowed in inputs. + + +3.2.2 +----- + +* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works. + +3.2.1 +----- + +* Added support for compilation on z/OS +* The next release of ``bcrypt`` with be 4.0 and it will require Rust at + compile time, for users building from source. There will be no additional + requirement for users who are installing from wheels. Users on most + platforms will be able to obtain a wheel by making sure they have an up to + date ``pip``. The minimum supported Rust version will be 1.56.0. +* This will be the final release for which we ship ``manylinux2010`` wheels. + Going forward the minimum supported manylinux ABI for our wheels will be + ``manylinux2014``. The vast majority of users will continue to receive + ``manylinux`` wheels provided they have an up to date ``pip``. + + +3.2.0 +----- + +* Added typehints for library functions. +* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5). +* Shipped ``abi3`` Windows wheels (requires pip >= 20). + +3.1.7 +----- + +* Set a ``setuptools`` lower bound for PEP517 wheel building. +* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce + them was a maintenance burden. + +3.1.6 +----- + +* Added support for compilation on Haiku. + +3.1.5 +----- + +* Added support for compilation on AIX. +* Dropped Python 2.6 and 3.3 support. +* Switched to using ``abi3`` wheels for Python 3. If you are not getting a + wheel on a compatible platform please upgrade your ``pip`` version. + +3.1.4 +----- + +* Fixed compilation with mingw and on illumos. + +3.1.3 +----- +* Fixed a compilation issue on Solaris. +* Added a warning when using too few rounds with ``kdf``. + +3.1.2 +----- +* Fixed a compile issue affecting big endian platforms. +* Fixed invalid escape sequence warnings on Python 3.6. +* Fixed building in non-UTF8 environments on Python 2. + +3.1.1 +----- +* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3. + +3.1.0 +----- +* Added support for ``checkpw``, a convenience method for verifying a password. +* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt. +* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug. +* Fixed compilation under Alpine Linux. + +3.0.0 +----- +* Switched the C backend to code obtained from the OpenBSD project rather than + openwall. +* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function. + +2.0.0 +----- +* Added support for an adjustible prefix when calling ``gensalt``. +* Switched to CFFI 1.0+ + +Usage +----- + +Password Hashing +~~~~~~~~~~~~~~~~ + +Hashing and then later checking that a password matches the previous hashed +password is very simple: + +.. code:: pycon + + >>> import bcrypt + >>> password = b"super secret password" + >>> # Hash a password for the first time, with a randomly-generated salt + >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt()) + >>> # Check that an unhashed password matches one that has previously been + >>> # hashed + >>> if bcrypt.checkpw(password, hashed): + ... print("It Matches!") + ... else: + ... print("It Does not Match :(") + +KDF +~~~ + +As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``. +This KDF is used in OpenSSH's newer encrypted private key format. + +.. code:: pycon + + >>> import bcrypt + >>> key = bcrypt.kdf( + ... password=b'password', + ... salt=b'salt', + ... desired_key_bytes=32, + ... rounds=100) + + +Adjustable Work Factor +~~~~~~~~~~~~~~~~~~~~~~ +One of bcrypt's features is an adjustable logarithmic work factor. To adjust +the work factor merely pass the desired number of rounds to +``bcrypt.gensalt(rounds=12)`` which defaults to 12): + +.. code:: pycon + + >>> import bcrypt + >>> password = b"super secret password" + >>> # Hash a password for the first time, with a certain number of rounds + >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14)) + >>> # Check that a unhashed password matches one that has previously been + >>> # hashed + >>> if bcrypt.checkpw(password, hashed): + ... print("It Matches!") + ... else: + ... print("It Does not Match :(") + + +Adjustable Prefix +~~~~~~~~~~~~~~~~~ + +Another one of bcrypt's features is an adjustable prefix to let you define what +libraries you'll remain compatible with. To adjust this, pass either ``2a`` or +``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object. + +As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated. + +Maximum Password Length +~~~~~~~~~~~~~~~~~~~~~~~ + +The bcrypt algorithm only handles passwords up to 72 characters, any characters +beyond that are ignored. To work around this, a common approach is to hash a +password with a cryptographic hash (such as ``sha256``) and then base64 +encode it to prevent NULL byte problems before hashing the result with +``bcrypt``: + +.. code:: pycon + + >>> password = b"an incredibly long password" * 10 + >>> hashed = bcrypt.hashpw( + ... base64.b64encode(hashlib.sha256(password).digest()), + ... bcrypt.gensalt() + ... ) + +Compatibility +------------- + +This library should be compatible with py-bcrypt and it will run on Python +3.6+, and PyPy 3. + +C Code +------ + +This library uses code from OpenBSD. + +Security +-------- + +``bcrypt`` follows the `same security policy as cryptography`_, if you +identify a vulnerability, we ask you to contact us privately. + +.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html +.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt +.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io +.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt + + diff --git a/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/RECORD new file mode 100644 index 0000000..217979e --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/RECORD @@ -0,0 +1,14 @@ +bcrypt-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +bcrypt-4.0.1.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850 +bcrypt-4.0.1.dist-info/METADATA,sha256=peZwWFa95xnpp4NiIE7gJkV01CTkbVXIzoEN66SXd3c,8972 +bcrypt-4.0.1.dist-info/RECORD,, +bcrypt-4.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +bcrypt-4.0.1.dist-info/WHEEL,sha256=ZXaM-AC_dnzk1sUAdQV_bMrIMG6zI-GthFaEkNkWsgU,112 +bcrypt-4.0.1.dist-info/top_level.txt,sha256=BkR_qBzDbSuycMzHWE1vzXrfYecAzUVmQs6G2CukqNI,7 +bcrypt/__about__.py,sha256=F7i0CQOa8G3Yjw1T71jQv8yi__Z_4TzLyZJv1GFqVx0,1320 +bcrypt/__init__.py,sha256=EpUdbfHaiHlSoaM-SSUB6MOgNpWOIkS0ZrjxogPIRLM,3781 +bcrypt/__pycache__/__about__.cpython-312.pyc,, +bcrypt/__pycache__/__init__.cpython-312.pyc,, +bcrypt/_bcrypt.abi3.so,sha256=_T-y5IrekziUzkYio4hWH7Xzw92XBKewSLd8kmERhGU,1959696 +bcrypt/_bcrypt.pyi,sha256=O-vvHdooGyAxIkdKemVqOzBF5aMhh0evPSaDMgETgEk,214 +bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/WHEEL new file mode 100644 index 0000000..dc50279 --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp36-abi3-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/top_level.txt new file mode 100644 index 0000000..7f0b6e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt-4.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +bcrypt diff --git a/venv/lib/python3.12/site-packages/bcrypt/__about__.py b/venv/lib/python3.12/site-packages/bcrypt/__about__.py new file mode 100644 index 0000000..020b748 --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt/__about__.py @@ -0,0 +1,41 @@ +# Author:: Donald Stufft () +# Copyright:: Copyright (c) 2013 Donald Stufft +# License:: Apache License, Version 2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] + +__title__ = "bcrypt" +__summary__ = "Modern password hashing for your software and your servers" +__uri__ = "https://github.com/pyca/bcrypt/" + +__version__ = "4.0.1" + +__author__ = "The Python Cryptographic Authority developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "Apache License, Version 2.0" +__copyright__ = "Copyright 2013-2022 {0}".format(__author__) diff --git a/venv/lib/python3.12/site-packages/bcrypt/__init__.py b/venv/lib/python3.12/site-packages/bcrypt/__init__.py new file mode 100644 index 0000000..1f2886f --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt/__init__.py @@ -0,0 +1,127 @@ +# Author:: Donald Stufft () +# Copyright:: Copyright (c) 2013 Donald Stufft +# License:: Apache License, Version 2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import +from __future__ import division + +import hmac +import os +import warnings + +from .__about__ import ( + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, +) +from . import _bcrypt # noqa: I100 + + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", + "gensalt", + "hashpw", + "kdf", + "checkpw", +] + + +def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes: + if prefix not in (b"2a", b"2b"): + raise ValueError("Supported prefixes are b'2a' or b'2b'") + + if rounds < 4 or rounds > 31: + raise ValueError("Invalid rounds") + + salt = os.urandom(16) + output = _bcrypt.encode_base64(salt) + + return ( + b"$" + + prefix + + b"$" + + ("%2.2u" % rounds).encode("ascii") + + b"$" + + output + ) + + +def hashpw(password: bytes, salt: bytes) -> bytes: + if isinstance(password, str) or isinstance(salt, str): + raise TypeError("Strings must be encoded before hashing") + + # bcrypt originally suffered from a wraparound bug: + # http://www.openwall.com/lists/oss-security/2012/01/02/4 + # This bug was corrected in the OpenBSD source by truncating inputs to 72 + # bytes on the updated prefix $2b$, but leaving $2a$ unchanged for + # compatibility. However, pyca/bcrypt 2.0.0 *did* correctly truncate inputs + # on $2a$, so we do it here to preserve compatibility with 2.0.0 + password = password[:72] + + return _bcrypt.hashpass(password, salt) + + +def checkpw(password: bytes, hashed_password: bytes) -> bool: + if isinstance(password, str) or isinstance(hashed_password, str): + raise TypeError("Strings must be encoded before checking") + + ret = hashpw(password, hashed_password) + return hmac.compare_digest(ret, hashed_password) + + +def kdf( + password: bytes, + salt: bytes, + desired_key_bytes: int, + rounds: int, + ignore_few_rounds: bool = False, +) -> bytes: + if isinstance(password, str) or isinstance(salt, str): + raise TypeError("Strings must be encoded before hashing") + + if len(password) == 0 or len(salt) == 0: + raise ValueError("password and salt must not be empty") + + if desired_key_bytes <= 0 or desired_key_bytes > 512: + raise ValueError("desired_key_bytes must be 1-512") + + if rounds < 1: + raise ValueError("rounds must be 1 or more") + + if rounds < 50 and not ignore_few_rounds: + # They probably think bcrypt.kdf()'s rounds parameter is logarithmic, + # expecting this value to be slow enough (it probably would be if this + # were bcrypt). Emit a warning. + warnings.warn( + ( + "Warning: bcrypt.kdf() called with only {0} round(s). " + "This few is not secure: the parameter is linear, like PBKDF2." + ).format(rounds), + UserWarning, + stacklevel=2, + ) + + return _bcrypt.pbkdf(password, salt, rounds, desired_key_bytes) diff --git a/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__about__.cpython-312.pyc b/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__about__.cpython-312.pyc new file mode 100644 index 0000000..42c9cc4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__about__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6f9823b Binary files /dev/null and b/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.abi3.so b/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.abi3.so new file mode 100644 index 0000000..5651953 Binary files /dev/null and b/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.abi3.so differ diff --git a/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.pyi b/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.pyi new file mode 100644 index 0000000..640e913 --- /dev/null +++ b/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.pyi @@ -0,0 +1,7 @@ +import typing + +def encode_base64(data: bytes) -> bytes: ... +def hashpass(password: bytes, salt: bytes) -> bytes: ... +def pbkdf( + password: bytes, salt: bytes, rounds: int, desired_key_bytes: int +) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/bcrypt/py.typed b/venv/lib/python3.12/site-packages/bcrypt/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/METADATA b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/METADATA new file mode 100644 index 0000000..6939bac --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/METADATA @@ -0,0 +1,78 @@ +Metadata-Version: 2.4 +Name: certifi +Version: 2025.11.12 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Requires-Python: >=3.7 +License-File: LICENSE +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: project-url +Dynamic: requires-python +Dynamic: summary + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/RECORD b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/RECORD new file mode 100644 index 0000000..6c79fae --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2025.11.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2025.11.12.dist-info/METADATA,sha256=_JprGu_1lWSdHlruRBKcorXnrfvBDhvX_6KRr8HQbLc,2475 +certifi-2025.11.12.dist-info/RECORD,, +certifi-2025.11.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +certifi-2025.11.12.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2025.11.12.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=1BRSxNMnZW7CZ2oJtYWLoJgfHfcB9i273exwiPwfjJM,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-312.pyc,, +certifi/__pycache__/__main__.cpython-312.pyc,, +certifi/__pycache__/core.cpython-312.pyc,, +certifi/cacert.pem,sha256=oa1dZD4hxDtb7XTH4IkdzbWPavUcis4eTwINZUqlKhY,283932 +certifi/core.py,sha256=XFXycndG5pf37ayeF8N32HUuDafsyhkVMbO4BAPWHa0,3394 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/WHEEL b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/WHEEL new file mode 100644 index 0000000..e7fa31b --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/licenses/LICENSE new file mode 100644 index 0000000..62b076c --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/licenses/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi-2025.11.12.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/venv/lib/python3.12/site-packages/certifi/__init__.py b/venv/lib/python3.12/site-packages/certifi/__init__.py new file mode 100644 index 0000000..f11f5ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2025.11.12" diff --git a/venv/lib/python3.12/site-packages/certifi/__main__.py b/venv/lib/python3.12/site-packages/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c36d2fa Binary files /dev/null and b/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..0b6dd8b Binary files /dev/null and b/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..52f8d53 Binary files /dev/null and b/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/certifi/cacert.pem b/venv/lib/python3.12/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..ebcb66f --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi/cacert.pem @@ -0,0 +1,4678 @@ + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- + +# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" +# Serial: 65916896770016886708751106294915943533 +# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 +# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 +# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a +-----BEGIN CERTIFICATE----- +MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf +e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C +cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O +BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO +PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw +hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG +XSaQpYXFuXqUPoeovQA= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA CYBER Root CA" +# Serial: 85076849864375384482682434040119489222 +# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 +# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 +# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ +MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 +IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 +WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO +LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P +40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF +avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ +34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i +JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu +j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf +Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP +2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA +S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA +oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC +kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW +5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd +BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB +AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t +tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn +68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn +TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t +RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx +f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI +Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz +8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 +NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX +xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 +t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA12" +# Serial: 587887345431707215246142177076162061960426065942 +# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 +# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 +# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw +NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF +KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt +p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd +J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur +FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J +hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K +h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF +AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld +mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ +mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA +8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV +55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ +yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA14" +# Serial: 575790784512929437950770173562378038616896959179 +# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 +# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f +# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw +NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ +FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg +vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy +6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo +/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J +kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ +0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib +y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac +18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs +0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB +SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL +ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk +86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E +rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib +ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT +zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS +DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 +2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo +FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy +K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 +dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl +Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB +365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c +JRNItX+S +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA15" +# Serial: 126083514594751269499665114766174399806381178503 +# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 +# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d +# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a +-----BEGIN CERTIFICATE----- +MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw +UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM +dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy +NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl +cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 +IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 +wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR +ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT +9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp +4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 +bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 2 2023" +# Serial: 153168538924886464690566649552453098598 +# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 +# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 +# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw +OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr +i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE +gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 +k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT +Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl +2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U +cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP +/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS +uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ +0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N +DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ +XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 +GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI +FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n +riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR +VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc +LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn +4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD +hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG +koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 +ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS +Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 +knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ +hJ65bvspmZDogNOfJA== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS ECC Root CA" +# Serial: 310892014698942880364840003424242768478804666567 +# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c +# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36 +# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11 +-----BEGIN CERTIFICATE----- +MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw +WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw +NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE +ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB +c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/ +AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp +guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw +DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01 +L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR +OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS RSA Root CA" +# Serial: 160405846464868906657516898462547310235378010780 +# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12 +# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa +# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3 +-----BEGIN CERTIFICATE----- +MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM +BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN +MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG +A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1 +c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC +AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+ +NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ +Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561 +HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32 +ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb +xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX +i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ +UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j +TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT +bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8 +S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3 +Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4 +iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt +7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp +2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ +g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj +pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M +pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP +XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe +SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0 +ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy +323imttUQ/hHWKNddBWcwauwxzQ= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 2 2023" +# Serial: 139766439402180512324132425437959641711 +# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 +# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b +# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw +OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK +F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE +7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe +EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 +lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb +RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV +jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc +jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx +TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ +ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk +hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF +NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH +kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 +QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 +pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q +3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU +t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX +cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 +ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT +2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs +7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP +gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst +Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh +XBxvWHZks/wCuPWdCg== +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Label: "SwissSign RSA TLS Root CA 2022 - 1" +# Serial: 388078645722908516278762308316089881486363258315 +# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39 +# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce +# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41 +-----BEGIN CERTIFICATE----- +MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE +AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx +MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT +d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg +MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX +vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7 +LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX +5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE +EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt +/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x +0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5 +KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM +0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd +OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta +clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK +wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4 +DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL +BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3 +10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz +Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ +iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc +gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM +ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF +LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp +zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td +Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0 +rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO +gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ +-----END CERTIFICATE----- + +# Issuer: CN=OISTE Server Root ECC G1 O=OISTE Foundation +# Subject: CN=OISTE Server Root ECC G1 O=OISTE Foundation +# Label: "OISTE Server Root ECC G1" +# Serial: 47819833811561661340092227008453318557 +# MD5 Fingerprint: 42:a7:d2:35:ae:02:92:db:19:76:08:de:2f:05:b4:d4 +# SHA1 Fingerprint: 3b:f6:8b:09:ae:2a:92:7b:ba:e3:8d:3f:11:95:d9:e6:44:0c:45:e2 +# SHA256 Fingerprint: ee:c9:97:c0:c3:0f:21:6f:7e:3b:8b:30:7d:2b:ae:42:41:2d:75:3f:c8:21:9d:af:d1:52:0b:25:72:85:0f:49 +-----BEGIN CERTIFICATE----- +MIICNTCCAbqgAwIBAgIQI/nD1jWvjyhLH/BU6n6XnTAKBggqhkjOPQQDAzBLMQsw +CQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UEAwwY +T0lTVEUgU2VydmVyIFJvb3QgRUNDIEcxMB4XDTIzMDUzMTE0NDIyOFoXDTQ4MDUy +NDE0NDIyN1owSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5kYXRp +b24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IEVDQyBHMTB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABBcv+hK8rBjzCvRE1nZCnrPoH7d5qVi2+GXROiFPqOujvqQy +cvO2Ackr/XeFblPdreqqLiWStukhEaivtUwL85Zgmjvn6hp4LrQ95SjeHIC6XG4N +2xml4z+cKrhAS93mT6NjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBQ3 +TYhlz/w9itWj8UnATgwQb0K0nDAdBgNVHQ4EFgQUN02IZc/8PYrVo/FJwE4MEG9C +tJwwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2kAMGYCMQCpKjAd0MKfkFFR +QD6VVCHNFmb3U2wIFjnQEnx/Yxvf4zgAOdktUyBFCxxgZzFDJe0CMQCSia7pXGKD +YmH5LVerVrkR3SW+ak5KGoJr3M/TvEqzPNcum9v4KGm8ay3sMaE641c= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE Server Root RSA G1 O=OISTE Foundation +# Subject: CN=OISTE Server Root RSA G1 O=OISTE Foundation +# Label: " OISTE Server Root RSA G1" +# Serial: 113845518112613905024960613408179309848 +# MD5 Fingerprint: 23:a7:9e:d4:70:b8:b9:14:57:41:8a:7e:44:59:e2:68 +# SHA1 Fingerprint: f7:00:34:25:94:88:68:31:e4:34:87:3f:70:fe:86:b3:86:9f:f0:6e +# SHA256 Fingerprint: 9a:e3:62:32:a5:18:9f:fd:db:35:3d:fd:26:52:0c:01:53:95:d2:27:77:da:c5:9d:b5:7b:98:c0:89:a6:51:e6 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIQVaXZZ5Qoxu0M+ifdWwFNGDANBgkqhkiG9w0BAQwFADBL +MQswCQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UE +AwwYT0lTVEUgU2VydmVyIFJvb3QgUlNBIEcxMB4XDTIzMDUzMTE0MzcxNloXDTQ4 +MDUyNDE0MzcxNVowSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5k +YXRpb24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IFJTQSBHMTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAKqu9KuCz/vlNwvn1ZatkOhLKdxVYOPM +vLO8LZK55KN68YG0nnJyQ98/qwsmtO57Gmn7KNByXEptaZnwYx4M0rH/1ow00O7b +rEi56rAUjtgHqSSY3ekJvqgiG1k50SeH3BzN+Puz6+mTeO0Pzjd8JnduodgsIUzk +ik/HEzxux9UTl7Ko2yRpg1bTacuCErudG/L4NPKYKyqOBGf244ehHa1uzjZ0Dl4z +O8vbUZeUapU8zhhabkvG/AePLhq5SvdkNCncpo1Q4Y2LS+VIG24ugBA/5J8bZT8R +tOpXaZ+0AOuFJJkk9SGdl6r7NH8CaxWQrbueWhl/pIzY+m0o/DjH40ytas7ZTpOS +jswMZ78LS5bOZmdTaMsXEY5Z96ycG7mOaES3GK/m5Q9l3JUJsJMStR8+lKXHiHUh +sd4JJCpM4rzsTGdHwimIuQq6+cF0zowYJmXa92/GjHtoXAvuY8BeS/FOzJ8vD+Ho +mnqT8eDI278n5mUpezbgMxVz8p1rhAhoKzYHKyfMeNhqhw5HdPSqoBNdZH702xSu ++zrkL8Fl47l6QGzwBrd7KJvX4V84c5Ss2XCTLdyEr0YconosP4EmQufU2MVshGYR +i3drVByjtdgQ8K4p92cIiBdcuJd5z+orKu5YM+Vt6SmqZQENghPsJQtdLEByFSnT +kCz3GkPVavBpAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU +8snBDw1jALvsRQ5KH7WxszbNDo0wHQYDVR0OBBYEFPLJwQ8NYwC77EUOSh+1sbM2 +zQ6NMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQwFAAOCAgEANGd5sjrG5T33 +I3K5Ce+SrScfoE4KsvXaFwyihdJ+klH9FWXXXGtkFu6KRcoMQzZENdl//nk6HOjG +5D1rd9QhEOP28yBOqb6J8xycqd+8MDoX0TJD0KqKchxRKEzdNsjkLWd9kYccnbz8 +qyiWXmFcuCIzGEgWUOrKL+mlSdx/PKQZvDatkuK59EvV6wit53j+F8Bdh3foZ3dP +AGav9LEDOr4SfEE15fSmG0eLy3n31r8Xbk5l8PjaV8GUgeV6Vg27Rn9vkf195hfk +gSe7BYhW3SCl95gtkRlpMV+bMPKZrXJAlszYd2abtNUOshD+FKrDgHGdPY3ofRRs +YWSGRqbXVMW215AWRqWFyp464+YTFrYVI8ypKVL9AMb2kI5Wj4kI3Zaq5tNqqYY1 +9tVFeEJKRvwDyF7YZvZFZSS0vod7VSCd9521Kvy5YhnLbDuv0204bKt7ph6N/Ome +/msVuduCmsuY33OhkKCgxeDoAaijFJzIwZqsFVAzje18KotzlUBDJvyBpCpfOZC3 +J8tRd/iWkx7P8nd9H0aTolkelUTFLXVksNb54Dxp6gS1HAviRkRNQzuXSXERvSS2 +wq1yVAb+axj5d9spLFKebXd7Yv0PTY6YMjAwcRLWJTXjn/hvnLXrahut6hDTlhZy +BiElxky8j3C7DOReIoMt0r7+hVu05L0= +-----END CERTIFICATE----- diff --git a/venv/lib/python3.12/site-packages/certifi/core.py b/venv/lib/python3.12/site-packages/certifi/core.py new file mode 100644 index 0000000..1c9661c --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi/core.py @@ -0,0 +1,83 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +else: + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/lib/python3.12/site-packages/certifi/py.typed b/venv/lib/python3.12/site-packages/certifi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA new file mode 100644 index 0000000..67508e5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA @@ -0,0 +1,68 @@ +Metadata-Version: 2.4 +Name: cffi +Version: 2.0.0 +Summary: Foreign Function Interface for Python calling C code. +Author: Armin Rigo, Maciej Fijalkowski +Maintainer: Matt Davis, Matt Clay, Matti Picus +License-Expression: MIT +Project-URL: Documentation, https://cffi.readthedocs.io/ +Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html +Project-URL: Downloads, https://github.com/python-cffi/cffi/releases +Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi +Project-URL: Source Code, https://github.com/python-cffi/cffi +Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Free Threading :: 2 - Beta +Classifier: Programming Language :: Python :: Implementation :: CPython +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: pycparser; implementation_name != "PyPy" +Dynamic: license-file + +[![GitHub Actions Status](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml?query=branch%3Amain++) +[![PyPI version](https://img.shields.io/pypi/v/cffi.svg)](https://pypi.org/project/cffi) +[![Read the Docs](https://img.shields.io/badge/docs-latest-blue.svg)][Documentation] + + +CFFI +==== + +Foreign Function Interface for Python calling C code. + +Please see the [Documentation] or uncompiled in the `doc/` subdirectory. + +Download +-------- + +[Download page](https://github.com/python-cffi/cffi/releases) + +Source Code +----------- + +Source code is publicly available on +[GitHub](https://github.com/python-cffi/cffi). + +Contact +------- + +[Mailing list](https://groups.google.com/forum/#!forum/python-cffi) + +Testing/development tips +------------------------ + +After `git clone` or `wget && tar`, we will get a directory called `cffi` or `cffi-x.x.x`. we call it `repo-directory`. To run tests under CPython, run the following in the `repo-directory`: + + pip install pytest + pip install -e . # editable install of CFFI for local development + pytest src/c/ testing/ + +[Documentation]: http://cffi.readthedocs.org/ diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD new file mode 100644 index 0000000..148b18a --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD @@ -0,0 +1,49 @@ +_cffi_backend.cpython-312-x86_64-linux-gnu.so,sha256=AGLtw5fn9u4Cmwk3BbGlsXG7VZEvQekABMyEGuRZmcE,348808 +cffi-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi-2.0.0.dist-info/METADATA,sha256=uYzn40F68Im8EtXHNBLZs7FoPM-OxzyYbDWsjJvhujk,2559 +cffi-2.0.0.dist-info/RECORD,, +cffi-2.0.0.dist-info/WHEEL,sha256=aSgG0F4rGPZtV0iTEIfy6dtHq6g67Lze3uLfk0vWn88,151 +cffi-2.0.0.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 +cffi-2.0.0.dist-info/licenses/AUTHORS,sha256=KmemC7-zN1nWfWRf8TG45ta8TK_CMtdR_Kw-2k0xTMg,208 +cffi-2.0.0.dist-info/licenses/LICENSE,sha256=W6JN3FcGf5JJrdZEw6_EGl1tw34jQz73Wdld83Cwr2M,1123 +cffi-2.0.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__init__.py,sha256=-ksBQ7MfDzVvbBlV_ftYBWAmEqfA86ljIzMxzaZeAlI,511 +cffi/__pycache__/__init__.cpython-312.pyc,, +cffi/__pycache__/_imp_emulation.cpython-312.pyc,, +cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc,, +cffi/__pycache__/api.cpython-312.pyc,, +cffi/__pycache__/backend_ctypes.cpython-312.pyc,, +cffi/__pycache__/cffi_opcode.cpython-312.pyc,, +cffi/__pycache__/commontypes.cpython-312.pyc,, +cffi/__pycache__/cparser.cpython-312.pyc,, +cffi/__pycache__/error.cpython-312.pyc,, +cffi/__pycache__/ffiplatform.cpython-312.pyc,, +cffi/__pycache__/lock.cpython-312.pyc,, +cffi/__pycache__/model.cpython-312.pyc,, +cffi/__pycache__/pkgconfig.cpython-312.pyc,, +cffi/__pycache__/recompiler.cpython-312.pyc,, +cffi/__pycache__/setuptools_ext.cpython-312.pyc,, +cffi/__pycache__/vengine_cpy.cpython-312.pyc,, +cffi/__pycache__/vengine_gen.cpython-312.pyc,, +cffi/__pycache__/verifier.cpython-312.pyc,, +cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 +cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055 +cffi/_embedding.h,sha256=Ai33FHblE7XSpHOCp8kPcWwN5_9BV14OvN0JVa6ITpw,18786 +cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960 +cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230 +cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169 +cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 +cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731 +cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805 +cffi/cparser.py,sha256=QUTfmlL-aO-MYR8bFGlvAUHc36OQr7XYLe0WLkGFjRo,44790 +cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 +cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797 +cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 +cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 +cffi/recompiler.py,sha256=78J6lMEEOygXNmjN9-fOFFO3j7eW-iFxSrxfvQb54bY,65509 +cffi/setuptools_ext.py,sha256=0rCwBJ1W7FHWtiMKfNXsSST88V8UXrui5oeXFlDNLG8,9411 +cffi/vengine_cpy.py,sha256=oyQKD23kpE0aChUKA8Jg0e723foPiYzLYEdb-J0MiNs,43881 +cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939 +cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182 diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL new file mode 100644 index 0000000..e21e9f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 + diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt new file mode 100644 index 0000000..4b0274f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS new file mode 100644 index 0000000..370a25d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS @@ -0,0 +1,8 @@ +This package has been mostly done by Armin Rigo with help from +Maciej Fijałkowski. The idea is heavily based (although not directly +copied) from LuaJIT ffi by Mike Pall. + + +Other contributors: + + Google Inc. diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..0a1dbfb --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE @@ -0,0 +1,23 @@ + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + MIT No Attribution + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + diff --git a/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/venv/lib/python3.12/site-packages/cffi/__init__.py b/venv/lib/python3.12/site-packages/cffi/__init__.py new file mode 100644 index 0000000..c99ec3d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "2.0.0" +__version_info__ = (2, 0, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d5c6184 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc new file mode 100644 index 0000000..291098a Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc new file mode 100644 index 0000000..22747d7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc new file mode 100644 index 0000000..90ab91b Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc new file mode 100644 index 0000000..3e97367 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc new file mode 100644 index 0000000..7aac9c5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc new file mode 100644 index 0000000..62883c7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc new file mode 100644 index 0000000..6c6209e Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc new file mode 100644 index 0000000..7325b62 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc new file mode 100644 index 0000000..e3c5360 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc new file mode 100644 index 0000000..e2bc13f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc new file mode 100644 index 0000000..42938fd Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc new file mode 100644 index 0000000..ef8db67 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc new file mode 100644 index 0000000..40fd9c9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc new file mode 100644 index 0000000..788a81f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc new file mode 100644 index 0000000..0b477ec Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc new file mode 100644 index 0000000..b2ecad2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc b/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc new file mode 100644 index 0000000..063a549 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h b/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h new file mode 100644 index 0000000..158e059 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/venv/lib/python3.12/site-packages/cffi/_cffi_include.h b/venv/lib/python3.12/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000..908a1d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_cffi_include.h @@ -0,0 +1,389 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/venv/lib/python3.12/site-packages/cffi/_embedding.h b/venv/lib/python3.12/site-packages/cffi/_embedding.h new file mode 100644 index 0000000..64c04f6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_embedding.h @@ -0,0 +1,550 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. If _cffi_start_python() fails, then this is set + to NULL; otherwise, it should never be NULL. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 2.0.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + + In Python >= 3.12, this stopped working because that particular + tp_version_tag gets modified during interpreter startup. It's + arguably a bad idea before 3.12 too, but again we can't change + that because someone might use a mixture of cffi embedded + modules, and no-one reported a bug so far. In Python >= 3.12 + we go instead for PyCapsuleType.tp_as_buffer, which is supposed + to always be NULL. We write to it temporarily a pointer to + a struct full of NULLs, which is semantically the same. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else +# if PY_VERSION_HEX < 0x030C0000 + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value = -42; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); +# else + static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; + empty_buffer_procs.mark = -42; + PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) + &PyCapsule_Type.tp_as_buffer; + PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; +# endif + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { +# if PY_VERSION_HEX < 0x030C0000 + assert(old_value == locked_value); +# else + /* The pointer should point to a possibly different + empty_buffer_procs from another C extension module */ + assert(((struct ebp_s *)old_value)->mark == -42); +# endif + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py b/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py new file mode 100644 index 0000000..136abdd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py @@ -0,0 +1,83 @@ + +try: + # this works on Python < 3.12 + from imp import * + +except ImportError: + # this is a limited emulation for Python >= 3.12. + # Note that this is used only for tests or for the old ffi.verify(). + # This is copied from the source code of Python 3.11. + + from _imp import (acquire_lock, release_lock, + is_builtin, is_frozen) + + from importlib._bootstrap import _load + + from importlib import machinery + import os + import sys + import tokenize + + SEARCH_ERROR = 0 + PY_SOURCE = 1 + PY_COMPILED = 2 + C_EXTENSION = 3 + PY_RESOURCE = 4 + PKG_DIRECTORY = 5 + C_BUILTIN = 6 + PY_FROZEN = 7 + PY_CODERESOURCE = 8 + IMP_HOOK = 9 + + def get_suffixes(): + extensions = [(s, 'rb', C_EXTENSION) + for s in machinery.EXTENSION_SUFFIXES] + source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] + bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] + return extensions + source + bytecode + + def find_module(name, path=None): + if not isinstance(name, str): + raise TypeError("'name' must be a str, not {}".format(type(name))) + elif not isinstance(path, (type(None), list)): + # Backwards-compatibility + raise RuntimeError("'path' must be None or a list, " + "not {}".format(type(path))) + + if path is None: + if is_builtin(name): + return None, None, ('', '', C_BUILTIN) + elif is_frozen(name): + return None, None, ('', '', PY_FROZEN) + else: + path = sys.path + + for entry in path: + package_directory = os.path.join(entry, name) + for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: + package_file_name = '__init__' + suffix + file_path = os.path.join(package_directory, package_file_name) + if os.path.isfile(file_path): + return None, package_directory, ('', '', PKG_DIRECTORY) + for suffix, mode, type_ in get_suffixes(): + file_name = name + suffix + file_path = os.path.join(entry, file_name) + if os.path.isfile(file_path): + break + else: + continue + break # Break out of outer loop when breaking out of inner loop. + else: + raise ImportError(name, name=name) + + encoding = None + if 'b' not in mode: + with open(file_path, 'rb') as file: + encoding = tokenize.detect_encoding(file.readline)[0] + file = open(file_path, mode, encoding=encoding) + return file, file_path, (suffix, mode, type_) + + def load_dynamic(name, path, file=None): + loader = machinery.ExtensionFileLoader(name, path) + spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) + return _load(spec) diff --git a/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py b/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py new file mode 100644 index 0000000..c3d2312 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py @@ -0,0 +1,45 @@ +""" +Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful +error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. + +This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. +""" +import sys + +try: + # import setuptools first; this is the most robust way to ensure its embedded distutils is available + # (the .pth shim should usually work, but this is even more robust) + import setuptools +except Exception as ex: + if sys.version_info >= (3, 12): + # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex + + # silently ignore on older Pythons (support fallback to stdlib distutils where available) +else: + del setuptools + +try: + # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils + from distutils import log, sysconfig + from distutils.ccompiler import CCompiler + from distutils.command.build_ext import build_ext + from distutils.core import Distribution, Extension + from distutils.dir_util import mkpath + from distutils.errors import DistutilsSetupError, CompileError, LinkError + from distutils.log import set_threshold, set_verbosity + + if sys.platform == 'win32': + try: + # FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler` + from distutils.msvc9compiler import MSVCCompiler + except ImportError: + MSVCCompiler = None +except Exception as ex: + if sys.version_info >= (3, 12): + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex + + # anything older, just let the underlying distutils import error fly + raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex + +del sys diff --git a/venv/lib/python3.12/site-packages/cffi/api.py b/venv/lib/python3.12/site-packages/cffi/api.py new file mode 100644 index 0000000..5a474f3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/api.py @@ -0,0 +1,967 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from cffi._shimmed_dist_utils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from cffi._shimmed_dist_utils import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, + uses_ffiplatform=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, + uses_ffiplatform=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py b/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000..e7956a7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py b/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000..6421df6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + '_cffi_float_complex_t': PRIM_FLOATCOMPLEX, + '_cffi_double_complex_t': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/venv/lib/python3.12/site-packages/cffi/commontypes.py b/venv/lib/python3.12/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..d4dae35 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/commontypes.py @@ -0,0 +1,82 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above +COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t' +COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t' + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/venv/lib/python3.12/site-packages/cffi/cparser.py b/venv/lib/python3.12/site-packages/cffi/cparser.py new file mode 100644 index 0000000..dd590d8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/cparser.py @@ -0,0 +1,1015 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis in between, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + csourcelines.append('') # see test_missing_newline_bug + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + # skip pragma, only in pycparser 2.15 + import warnings + warnings.warn( + "#pragma in cdef() are entirely ignored. " + "They should be removed for now, otherwise your " + "code might behave differently in a future version " + "of CFFI if #pragma support gets added. Note that " + "'#pragma pack' needs to be replaced with the " + "'packed' keyword argument to cdef().") + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/venv/lib/python3.12/site-packages/cffi/error.py b/venv/lib/python3.12/site-packages/cffi/error.py new file mode 100644 index 0000000..0a27247 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/venv/lib/python3.12/site-packages/cffi/ffiplatform.py b/venv/lib/python3.12/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000..adca28f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/ffiplatform.py @@ -0,0 +1,113 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from cffi._shimmed_dist_utils import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity + + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = set_threshold(0) or 0 + try: + set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + set_threshold(old_level) + except (CompileError, LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/venv/lib/python3.12/site-packages/cffi/lock.py b/venv/lib/python3.12/site-packages/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/venv/lib/python3.12/site-packages/cffi/model.py b/venv/lib/python3.12/site-packages/cffi/model.py new file mode 100644 index 0000000..e5f4cae --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/model.py @@ -0,0 +1,618 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + '_cffi_float_complex_t': 'j', + '_cffi_double_complex_t': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = " *&" + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + extra = qualify(quals, extra) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/venv/lib/python3.12/site-packages/cffi/parse_c_type.h b/venv/lib/python3.12/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000..84e4ef8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/venv/lib/python3.12/site-packages/cffi/pkgconfig.py b/venv/lib/python3.12/site-packages/cffi/pkgconfig.py new file mode 100644 index 0000000..5c93f15 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/venv/lib/python3.12/site-packages/cffi/recompiler.py b/venv/lib/python3.12/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..7734a34 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/recompiler.py @@ -0,0 +1,1598 @@ +import io, os, sys, sysconfig +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = ((sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) and + not sysconfig.get_config_var("Py_GIL_DISABLED")) # free-threaded doesn't yet support limited API + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy > 0 or self.ffi._embedding is not None: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if flags & 1: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '(size_t)(((char *)&((%s)4096)->%s) - (char *)4096)' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _is_file_like(maybefile): + # compare to xml.etree.ElementTree._get_writer + return hasattr(maybefile, 'write') + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + if _is_file_like(target_file): + recompiler.write_source_to_f(target_file, preamble) + return True + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + # FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed, + # since the toolchain it supports (VS2005-2008) is also long dead. + from cffi._shimmed_dist_utils import MSVCCompiler + if MSVCCompiler is not None: + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from cffi._shimmed_dist_utils import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from cffi._shimmed_dist_utils import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, + uses_ffiplatform=True, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + if call_c_compiler and _is_file_like(c_file): + raise TypeError("Writing to file-like objects is not supported " + "with call_c_compiler=True") + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + if uses_ffiplatform: + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + else: + ext = None + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py b/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000..5cdd246 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,229 @@ +import os +import sys +import sysconfig + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from cffi._shimmed_dist_utils import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi._shimmed_dist_utils import log + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + + if sysconfig.get_config_var("Py_GIL_DISABLED"): + if kwds.get('py_limited_api'): + log.info("Ignoring py_limited_api=True for free-threaded build.") + + kwds['py_limited_api'] = False + + if kwds.get('py_limited_api') is False: + # avoid setting Py_LIMITED_API if py_limited_api=False + # which _cffi_include.h does unless _CFFI_NO_LIMITED_API is defined + kwds.setdefault("define_macros", []).append(("_CFFI_NO_LIMITED_API", None)) + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import Extension, log, mkpath + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import log, mkpath + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py b/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000..02e6a47 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1087 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys +from . import model +from .error import VerificationError +from . import _imp_emulation as imp + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt('#if Py_GIL_DISABLED') + prnt(' PyUnstable_Module_SetGIL(lib, Py_MOD_GIL_NOT_USED);') + prnt('#endif') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif tp.is_complex_type(): + raise VerificationError( + "not implemented in verify(): complex types") + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/venv/lib/python3.12/site-packages/cffi/vengine_gen.py b/venv/lib/python3.12/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000..bffc821 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/vengine_gen.py @@ -0,0 +1,679 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +# define _cffi_float_complex_t _Fcomplex /* include for it */ +# define _cffi_double_complex_t _Dcomplex /* include for it */ +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +# define _cffi_float_complex_t float _Complex +# define _cffi_double_complex_t double _Complex +#endif +''' diff --git a/venv/lib/python3.12/site-packages/cffi/verifier.py b/venv/lib/python3.12/site-packages/cffi/verifier.py new file mode 100644 index 0000000..e392a2b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cffi/verifier.py @@ -0,0 +1,306 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA new file mode 100644 index 0000000..3f433af --- /dev/null +++ b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA @@ -0,0 +1,84 @@ +Metadata-Version: 2.4 +Name: click +Version: 8.3.1 +Summary: Composable command line interface toolkit +Maintainer-email: Pallets +Requires-Python: >=3.10 +Description-Content-Type: text/markdown +License-Expression: BSD-3-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Typing :: Typed +License-File: LICENSE.txt +Requires-Dist: colorama; platform_system == 'Windows' +Project-URL: Changes, https://click.palletsprojects.com/page/changes/ +Project-URL: Chat, https://discord.gg/pallets +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Source, https://github.com/pallets/click/ + +
    + +# Click + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +## A Simple Example + +```python +import click + +@click.command() +@click.option("--count", default=1, help="Number of greetings.") +@click.option("--name", prompt="Your name", help="The person to greet.") +def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + +if __name__ == '__main__': + hello() +``` + +``` +$ python hello.py --count=3 +Your name: Click +Hello, Click! +Hello, Click! +Hello, Click! +``` + + +## Donate + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, [please +donate today][]. + +[please donate today]: https://palletsprojects.com/donate + +## Contributing + +See our [detailed contributing documentation][contrib] for many ways to +contribute, including reporting issues, requesting features, asking or answering +questions, and making PRs. + +[contrib]: https://palletsprojects.com/contributing/ + diff --git a/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD new file mode 100644 index 0000000..008d697 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD @@ -0,0 +1,40 @@ +click-8.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.3.1.dist-info/METADATA,sha256=XZeBrMAE0ghTE88SjfrSDuSyNCpBPplxJR1tbwD9oZg,2621 +click-8.3.1.dist-info/RECORD,, +click-8.3.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +click-8.3.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473 +click/__pycache__/__init__.cpython-312.pyc,, +click/__pycache__/_compat.cpython-312.pyc,, +click/__pycache__/_termui_impl.cpython-312.pyc,, +click/__pycache__/_textwrap.cpython-312.pyc,, +click/__pycache__/_utils.cpython-312.pyc,, +click/__pycache__/_winconsole.cpython-312.pyc,, +click/__pycache__/core.cpython-312.pyc,, +click/__pycache__/decorators.cpython-312.pyc,, +click/__pycache__/exceptions.cpython-312.pyc,, +click/__pycache__/formatting.cpython-312.pyc,, +click/__pycache__/globals.cpython-312.pyc,, +click/__pycache__/parser.cpython-312.pyc,, +click/__pycache__/shell_completion.cpython-312.pyc,, +click/__pycache__/termui.cpython-312.pyc,, +click/__pycache__/testing.cpython-312.pyc,, +click/__pycache__/types.cpython-312.pyc,, +click/__pycache__/utils.cpython-312.pyc,, +click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693 +click/_termui_impl.py,sha256=rgCb3On8X5A4200rA5L6i13u5iapmFer7sru57Jy6zA,27093 +click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400 +click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943 +click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465 +click/core.py,sha256=U6Bfxt8GkjNDqyJ0HqXvluJHtyZ4sY5USAvM1Cdq7mQ,132105 +click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461 +click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954 +click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730 +click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923 +click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994 +click/termui.py,sha256=hqCEjNndU-nzW08nRAkBaVgfZp_FdCA9KxfIWlKYaMc,31037 +click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102 +click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927 +click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257 diff --git a/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL new file mode 100644 index 0000000..d8b9936 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000..d12a849 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/click/__init__.py b/venv/lib/python3.12/site-packages/click/__init__.py new file mode 100644 index 0000000..1aa547c --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/__init__.py @@ -0,0 +1,123 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" + +from __future__ import annotations + +from .core import Argument as Argument +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + + +def __getattr__(name: str) -> object: + import warnings + + if name == "BaseCommand": + from .core import _BaseCommand + + warnings.warn( + "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" + " 'Command' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _BaseCommand + + if name == "MultiCommand": + from .core import _MultiCommand + + warnings.warn( + "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" + " 'Group' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _MultiCommand + + if name == "OptionParser": + from .parser import _OptionParser + + warnings.warn( + "'OptionParser' is deprecated and will be removed in Click 9.0. The" + " old parser is available in 'optparse'.", + DeprecationWarning, + stacklevel=2, + ) + return _OptionParser + + if name == "__version__": + import importlib.metadata + import warnings + + warnings.warn( + "The '__version__' attribute is deprecated and will be removed in" + " Click 9.1. Use feature detection or" + " 'importlib.metadata.version(\"click\")' instead.", + DeprecationWarning, + stacklevel=2, + ) + return importlib.metadata.version("click") + + raise AttributeError(name) diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..7a14ac3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..51d14fb Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc new file mode 100644 index 0000000..bae6521 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc new file mode 100644 index 0000000..b0e38a1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 0000000..d8bb22b Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc new file mode 100644 index 0000000..b048b7f Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..9467ab7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc new file mode 100644 index 0000000..6d70b85 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..82f62ca Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc new file mode 100644 index 0000000..b57e03f Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc new file mode 100644 index 0000000..fb770be Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc new file mode 100644 index 0000000..f2e292f Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc new file mode 100644 index 0000000..2cca434 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc new file mode 100644 index 0000000..edd2cd1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc new file mode 100644 index 0000000..74fa8b4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc new file mode 100644 index 0000000..f33cfaf Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..494ba90 Binary files /dev/null and b/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/click/_compat.py b/venv/lib/python3.12/site-packages/click/_compat.py new file mode 100644 index 0000000..f2726b9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/_compat.py @@ -0,0 +1,622 @@ +from __future__ import annotations + +import codecs +import collections.abc as cabc +import io +import os +import re +import sys +import typing as t +from types import TracebackType +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +WIN = sys.platform.startswith("win") +auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: str | None, + errors: str | None, + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO[t.Any]) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: str | None, + errors: str | None, + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write(b"") + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: str | None, errors: str | None +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO[t.Any], + encoding: str | None, + errors: str | None, + is_binary: t.Callable[[t.IO[t.Any], bool], bool], + find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO[t.Any], + encoding: str | None, + errors: str | None, + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO[t.Any], + encoding: str | None, + errors: str | None, + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: str | os.PathLike[str] | int, + mode: str, + encoding: str | None, + errors: str | None, +) -> t.IO[t.Any]: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: str | os.PathLike[str], + mode: str = "r", + encoding: str | None = None, + errors: str | None = "strict", + atomic: bool = False, +) -> tuple[t.IO[t.Any], bool]: + binary = "b" in mode + filename = os.fspath(filename) + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: int | None = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO[t.Any], af), True + + +class _AtomicFile: + def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> _AtomicFile: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.IO[t.Any] | None = None, color: bool | None = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s: str) -> int: + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write # type: ignore[method-assign] + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: str | None, errors: str | None + ) -> t.TextIO | None: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO[t.Any]) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.TextIO | None], + wrapper_func: t.Callable[[], t.TextIO], +) -> t.Callable[[], t.TextIO | None]: + cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.TextIO | None: + stream = src_func() + + if stream is None: + return None + + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/venv/lib/python3.12/site-packages/click/_termui_impl.py b/venv/lib/python3.12/site-packages/click/_termui_impl.py new file mode 100644 index 0000000..ee8225c --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/_termui_impl.py @@ -0,0 +1,852 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" + +from __future__ import annotations + +import collections.abc as cabc +import contextlib +import math +import os +import shlex +import sys +import time +import typing as t +from gettext import gettext as _ +from io import StringIO +from pathlib import Path +from types import TracebackType + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: cabc.Iterable[V] | None, + length: int | None = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + hidden: bool = False, + show_eta: bool = True, + show_percent: bool | None = None, + show_pos: bool = False, + item_show_func: t.Callable[[V | None], str | None] | None = None, + label: str | None = None, + file: t.TextIO | None = None, + color: bool | None = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.hidden = hidden + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label: str = label or "" + + if file is None: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + file = StringIO() + + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width: int = width + self.autowidth: bool = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast("cabc.Iterable[V]", range(length)) + self.iter: cabc.Iterable[V] = iter(iterable) + self.length = length + self.pos: int = 0 + self.avg: list[float] = [] + self.last_eta: float + self.start: float + self.start = self.last_eta = time.time() + self.eta_known: bool = False + self.finished: bool = False + self.max_width: int | None = None + self.entered: bool = False + self.current_item: V | None = None + self._is_atty = isatty(self.file) + self._last_line: str | None = None + + def __enter__(self) -> ProgressBar[V]: + self.entered = True + self.render_progress() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.render_finish() + + def __iter__(self) -> cabc.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.hidden or not self._is_atty: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + if self.hidden: + return + + if not self._is_atty: + # Only output the label once if the output is not a TTY. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + import shutil + + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width and self.max_width is not None: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: V | None = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> cabc.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if not self._is_atty: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if stdout is None: + stdout = StringIO() + + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + + # Split and normalize the pager command into parts. + pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False) + if pager_cmd_parts: + if WIN: + if _tempfilepager(generator, pager_cmd_parts, color): + return + elif _pipepager(generator, pager_cmd_parts, color): + return + + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if (WIN or sys.platform.startswith("os2")) and _tempfilepager( + generator, ["more"], color + ): + return + if _pipepager(generator, ["less"], color): + return + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if _pipepager(generator, ["more"], color): + return + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager( + generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None +) -> bool: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + + Returns `True` if the command was found, `False` otherwise and thus another + pager should be attempted. + """ + # Split the command into the invoked CLI and its parameters. + if not cmd_parts: + return False + + import shutil + + cmd = cmd_parts[0] + cmd_params = cmd_parts[1:] + + cmd_filepath = shutil.which(cmd) + if not cmd_filepath: + return False + + # Produces a normalized absolute path string. + # multi-call binaries such as busybox derive their identity from the symlink + # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) + cmd_path = Path(cmd_filepath).absolute() + cmd_name = cmd_path.name + + import subprocess + + # Make a local copy of the environment to not affect the global one. + env = dict(os.environ) + + # If we're piping to less and the user hasn't decided on colors, we enable + # them by default we find the -R flag in the command line arguments. + if color is None and cmd_name == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen( + [str(cmd_path)] + cmd_params, + shell=False, + stdin=subprocess.PIPE, + env=env, + errors="replace", + text=True, + ) + assert c.stdin is not None + try: + for text in generator: + if not color: + text = strip_ansi(text) + + c.stdin.write(text) + except BrokenPipeError: + # In case the pager exited unexpectedly, ignore the broken pipe error. + pass + except Exception as e: + # In case there is an exception we want to close the pager immediately + # and let the caller handle it. + # Otherwise the pager will keep running, and the user may not notice + # the error message, or worse yet it may leave the terminal in a broken state. + c.terminate() + raise e + finally: + # We must close stdin and wait for the pager to exit before we continue + try: + c.stdin.close() + # Close implies flush, so it might throw a BrokenPipeError if the pager + # process exited already. + except BrokenPipeError: + pass + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + return True + + +def _tempfilepager( + generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None +) -> bool: + """Page through text by invoking a program on a temporary file. + + Returns `True` if the command was found, `False` otherwise and thus another + pager should be attempted. + """ + # Split the command into the invoked CLI and its parameters. + if not cmd_parts: + return False + + import shutil + + cmd = cmd_parts[0] + + cmd_filepath = shutil.which(cmd) + if not cmd_filepath: + return False + # Produces a normalized absolute path string. + # multi-call binaries such as busybox derive their identity from the symlink + # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox) + cmd_path = Path(cmd_filepath).absolute() + + import subprocess + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + subprocess.call([str(cmd_path), filename]) + except OSError: + # Command not found + pass + finally: + os.close(fd) + os.unlink(filename) + + return True + + +def _nullpager( + stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + + from shutil import which + + for editor in "sensible-editor", "vim", "nano": + if which(editor) is not None: + return editor + return "vi" + + def edit_files(self, filenames: cabc.Iterable[str]) -> None: + import subprocess + + editor = self.get_editor() + environ: dict[str, str] | None = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + exc_filename = " ".join(f'"{filename}"' for filename in filenames) + + try: + c = subprocess.Popen( + args=f"{editor} {exc_filename}", env=environ, shell=True + ) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + @t.overload + def edit(self, text: bytes | bytearray) -> bytes | None: ... + + # We cannot know whether or not the type expected is str or bytes when None + # is passed, so str is returned as that was what was done before. + @t.overload + def edit(self, text: str | None) -> str | None: ... + + def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None: + import tempfile + + if text is None: + data: bytes | bytearray = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_files((name,)) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url) + args = ["explorer", f"/select,{url}"] + else: + args = ["start"] + if wait: + args.append("/WAIT") + args.append("") + args.append(url) + try: + return subprocess.call(args) + except OSError: + # Command not found + return 127 + elif CYGWIN: + if locate: + url = _unquote_file(url) + args = ["cygstart", os.path.dirname(url)] + else: + args = ["cygstart"] + if wait: + args.append("-w") + args.append(url) + try: + return subprocess.call(args) + except OSError: + # Command not found + return 127 + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> None: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if sys.platform == "win32": + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> cabc.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + + if echo: + func = t.cast(t.Callable[[], str], msvcrt.getwche) + else: + func = t.cast(t.Callable[[], str], msvcrt.getwch) + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import termios + import tty + + @contextlib.contextmanager + def raw_terminal() -> cabc.Iterator[int]: + f: t.TextIO | None + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/venv/lib/python3.12/site-packages/click/_textwrap.py b/venv/lib/python3.12/site-packages/click/_textwrap.py new file mode 100644 index 0000000..97fbee3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/_textwrap.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +import collections.abc as cabc +import textwrap +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: list[str], + cur_line: list[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> cabc.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/venv/lib/python3.12/site-packages/click/_utils.py b/venv/lib/python3.12/site-packages/click/_utils.py new file mode 100644 index 0000000..09fb008 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/_utils.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +import enum +import typing as t + + +class Sentinel(enum.Enum): + """Enum used to define sentinel values. + + .. seealso:: + + `PEP 661 - Sentinel Values `_. + """ + + UNSET = object() + FLAG_NEEDS_VALUE = object() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}.{self.name}" + + +UNSET = Sentinel.UNSET +"""Sentinel used to indicate that a value is not set.""" + +FLAG_NEEDS_VALUE = Sentinel.FLAG_NEEDS_VALUE +"""Sentinel used to indicate an option was passed as a flag without a +value but is not a flag option. + +``Option.consume_value`` uses this to prompt or use the ``flag_value``. +""" + +T_UNSET = t.Literal[UNSET] # type: ignore[valid-type] +"""Type hint for the :data:`UNSET` sentinel value.""" + +T_FLAG_NEEDS_VALUE = t.Literal[FLAG_NEEDS_VALUE] # type: ignore[valid-type] +"""Type hint for the :data:`FLAG_NEEDS_VALUE` sentinel value.""" diff --git a/venv/lib/python3.12/site-packages/click/_winconsole.py b/venv/lib/python3.12/site-packages/click/_winconsole.py new file mode 100644 index 0000000..e56c7c6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/_winconsole.py @@ -0,0 +1,296 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +from __future__ import annotations + +import collections.abc as cabc +import io +import sys +import time +import typing as t +from ctypes import Array +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +if t.TYPE_CHECKING: + try: + # Using `typing_extensions.Buffer` instead of `collections.abc` + # on Windows for some reason does not have `Sized` implemented. + from collections.abc import Buffer # type: ignore + except ImportError: + from typing_extensions import Buffer + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ # noqa: RUF012 + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]: + buf = Py_buffer() + flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + out: Array[c_char] = buffer_type.from_address(buf.buf) + return out + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle: int | None) -> None: + self.handle = handle + + def isatty(self) -> t.Literal[True]: + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self) -> t.Literal[True]: + return True + + def readinto(self, b: Buffer) -> int: + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self) -> t.Literal[True]: + return True + + @staticmethod + def _get_error_message(errno: int) -> str: + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b: Buffer) -> int: + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self) -> str: + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: str | None, errors: str | None +) -> t.TextIO | None: + if ( + get_buffer is None + or encoding not in {"utf-16-le", None} + or errors not in {"strict", None} + or not _is_console(f) + ): + return None + + func = _stream_factories.get(f.fileno()) + if func is None: + return None + + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/venv/lib/python3.12/site-packages/click/core.py b/venv/lib/python3.12/site-packages/click/core.py new file mode 100644 index 0000000..57f549c --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/core.py @@ -0,0 +1,3415 @@ +from __future__ import annotations + +import collections.abc as cabc +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from collections import Counter +from contextlib import AbstractContextManager +from contextlib import contextmanager +from contextlib import ExitStack +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat +from types import TracebackType + +from . import types +from ._utils import FLAG_NEEDS_VALUE +from ._utils import UNSET +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import NoArgsIsHelpError +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _OptionParser +from .parser import _split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound="t.Callable[..., t.Any]") +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: Context, incomplete: str +) -> cabc.Iterator[tuple[str, Command]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(Group, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_nested_chain( + base_command: Group, cmd_name: str, cmd: Command, register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, Group): + return + + if register: + message = ( + f"It is not possible to add the group {cmd_name!r} to another" + f" group {base_command.name!r} that is in chain mode." + ) + else: + message = ( + f"Found the group {cmd_name!r} as subcommand to another group " + f" {base_command.name!r} that is in chain mode. This is not supported." + ) + + raise RuntimeError(message) + + +def batch(iterable: cabc.Iterable[V], batch_size: int) -> list[tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size), strict=False)) + + +@contextmanager +def augment_usage_errors( + ctx: Context, param: Parameter | None = None +) -> cabc.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: cabc.Sequence[Parameter], + declaration_order: cabc.Sequence[Parameter], +) -> list[Parameter]: + """Returns all declared parameters in the order they should be processed. + + The declared parameters are re-shuffled depending on the order in which + they were invoked, as well as the eagerness of each parameters. + + The invocation order takes precedence over the declaration order. I.e. the + order in which the user provided them to the CLI is respected. + + This behavior and its effect on callback evaluation is detailed at: + https://click.palletsprojects.com/en/stable/advanced/#callback-evaluation-order + """ + + def sort_key(item: Parameter) -> tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.2 + The ``protected_args`` attribute is deprecated and will be removed in + Click 9.0. ``args`` will contain remaining unparsed tokens. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: type[HelpFormatter] = HelpFormatter + + def __init__( + self, + command: Command, + parent: Context | None = None, + info_name: str | None = None, + obj: t.Any | None = None, + auto_envvar_prefix: str | None = None, + default_map: cabc.MutableMapping[str, t.Any] | None = None, + terminal_width: int | None = None, + max_content_width: int | None = None, + resilient_parsing: bool = False, + allow_extra_args: bool | None = None, + allow_interspersed_args: bool | None = None, + ignore_unknown_options: bool | None = None, + help_option_names: list[str] | None = None, + token_normalize_func: t.Callable[[str], str] | None = None, + color: bool | None = None, + show_default: bool | None = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: dict[str, t.Any] = {} + #: the leftover arguments. + self.args: list[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self._protected_args: list[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: cabc.MutableMapping[str, t.Any] | None = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: str | None = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: int | None = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: int | None = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: list[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Callable[[str], str] | None = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: str | None = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: bool | None = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: bool | None = show_default + + self._close_callbacks: list[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + @property + def protected_args(self) -> list[str]: + import warnings + + warnings.warn( + "'protected_args' is deprecated and will be removed in Click 9.0." + " 'args' will contain remaining unparsed tokens.", + DeprecationWarning, + stacklevel=2, + ) + return self._protected_args + + def to_info_dict(self) -> dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> Context: + self._depth += 1 + push_context(self) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> bool | None: + self._depth -= 1 + exit_result: bool | None = None + if self._depth == 0: + exit_result = self._close_with_exception_info(exc_type, exc_value, tb) + pop_context() + + return exit_result + + @contextmanager + def scope(self, cleanup: bool = True) -> cabc.Iterator[Context]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: AbstractContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._close_with_exception_info(None, None, None) + + def _close_with_exception_info( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> bool | None: + """Unwind the exit stack by calling its :meth:`__exit__` providing the exception + information to allow for exception handling by the various resources registered + using :meth;`with_resource` + + :return: Whatever ``exit_stack.__exit__()`` returns. + """ + exit_result = self._exit_stack.__exit__(exc_type, exc_value, tb) + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + return exit_result + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> Context: + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: type[V]) -> V | None: + """Finds the closest object of a given type.""" + node: Context | None = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: t.Literal[True] = True + ) -> t.Any | None: ... + + @t.overload + def lookup_default( + self, name: str, call: t.Literal[False] = ... + ) -> t.Any | t.Callable[[], t.Any] | None: ... + + def lookup_default(self, name: str, call: bool = True) -> t.Any | None: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name, UNSET) + + if call and callable(value): + return value() + + return value + + return UNSET + + def fail(self, message: str) -> t.NoReturn: + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> t.NoReturn: + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> t.NoReturn: + """Exits the application with a given exit code. + + .. versionchanged:: 8.2 + Callbacks and context managers registered with :meth:`call_on_close` + and :meth:`with_resource` are closed before exiting. + """ + self.close() + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: Command) -> Context: + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + @t.overload + def invoke( + self, callback: t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any + ) -> V: ... + + @t.overload + def invoke(self, callback: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: ... + + def invoke( + self, callback: Command | t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any + ) -> t.Any | V: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + + .. versionchanged:: 3.2 + A new context is created, and missing arguments use default values. + """ + if isinstance(callback, Command): + other_cmd = callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + callback = t.cast("t.Callable[..., V]", other_cmd.callback) + + ctx = self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + default_value = param.get_default(ctx) + # We explicitly hide the :attr:`UNSET` value to the user, as we + # choose to make it an implementation detail. And because ``invoke`` + # has been designed as part of Click public API, we return ``None`` + # instead. Refs: + # https://github.com/pallets/click/issues/3066 + # https://github.com/pallets/click/issues/3065 + # https://github.com/pallets/click/pull/3068 + if default_value is UNSET: + default_value = None + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, default_value + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = self + + with augment_usage_errors(self): + with ctx: + return callback(*args, **kwargs) + + def forward(self, cmd: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(cmd, Command): + raise TypeError("Callback is not a command.") + + for param in self.params: + if param not in kwargs: + kwargs[param] = self.params[param] + + return self.invoke(cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> ParameterSource | None: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class Command: + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + :param deprecated: If ``True`` or non-empty string, issues a message + indicating that the command is deprecated and highlights + its deprecation in --help. The message can be customized + by using a string as the value. + + .. versionchanged:: 8.2 + This is the base class for all commands, not ``BaseCommand``. + ``deprecated`` can be set to a string as well to customize the + deprecation message. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: type[Context] = Context + + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: str | None, + context_settings: cabc.MutableMapping[str, t.Any] | None = None, + callback: t.Callable[..., t.Any] | None = None, + params: list[Parameter] | None = None, + help: str | None = None, + epilog: str | None = None, + short_help: str | None = None, + options_metavar: str | None = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool | str = False, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: cabc.MutableMapping[str, t.Any] = context_settings + + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: list[Parameter] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self._help_option = None + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: + return { + "name": self.name, + "params": [param.to_info_dict() for param in self.get_params(ctx)], + "help": self.help, + "epilog": self.epilog, + "short_help": self.short_help, + "hidden": self.hidden, + "deprecated": self.deprecated, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> list[Parameter]: + params = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + params = [*params, help_option] + + if __debug__: + import warnings + + opts = [opt for param in params for opt in param.opts] + opts_counter = Counter(opts) + duplicate_opts = (opt for opt, count in opts_counter.items() if count > 1) + + for duplicate_opt in duplicate_opts: + warnings.warn( + ( + f"The parameter {duplicate_opt} is used more than once. " + "Remove its duplicate as parameters should be unique." + ), + stacklevel=3, + ) + + return params + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> list[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> list[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> Option | None: + """Returns the help option object. + + Skipped if :attr:`add_help_option` is ``False``. + + .. versionchanged:: 8.1.8 + The help option is now cached to avoid creating it multiple times. + """ + help_option_names = self.get_help_option_names(ctx) + + if not help_option_names or not self.add_help_option: + return None + + # Cache the help option object in private _help_option attribute to + # avoid creating it multiple times. Not doing this will break the + # callback odering by iter_params_for_processing(), which relies on + # object comparison. + if self._help_option is None: + # Avoid circular import. + from .decorators import help_option + + # Apply help_option decorator and pop resulting option + help_option(*help_option_names)(self) + self._help_option = self.params.pop() # type: ignore[assignment] + + return self._help_option + + def make_parser(self, ctx: Context) -> _OptionParser: + """Creates the underlying option parser for this command.""" + parser = _OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + deprecated_message = ( + f"(DEPRECATED: {self.deprecated})" + if isinstance(self.deprecated, str) + else "(DEPRECATED)" + ) + text = _("{text} {deprecated_message}").format( + text=text, deprecated_message=deprecated_message + ) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + if self.help is not None: + # truncate the help text to the first form feed + text = inspect.cleandoc(self.help).partition("\f")[0] + else: + text = "" + + if self.deprecated: + deprecated_message = ( + f"(DEPRECATED: {self.deprecated})" + if isinstance(self.deprecated, str) + else "(DEPRECATED)" + ) + text = _("{text} {deprecated_message}").format( + text=text, deprecated_message=deprecated_message + ) + + if text: + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def make_context( + self, + info_name: str | None, + args: list[str], + parent: Context | None = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class(self, info_name=info_name, parent=parent, **extra) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: list[str]) -> list[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + raise NoArgsIsHelpError(ctx) + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + _, args = param.handle_parse_result(ctx, opts, args) + + # We now have all parameters' values into `ctx.params`, but the data may contain + # the `UNSET` sentinel. + # Convert `UNSET` to `None` to ensure that the user doesn't see `UNSET`. + # + # Waiting until after the initial parse to convert allows us to treat `UNSET` + # more like a missing value when multiple params use the same name. + # Refs: + # https://github.com/pallets/click/issues/3071 + # https://github.com/pallets/click/pull/3079 + for name, value in ctx.params.items(): + if value is UNSET: + ctx.params[name] = None + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + extra_message = ( + f" {self.deprecated}" if isinstance(self.deprecated, str) else "" + ) + message = _( + "DeprecationWarning: The command {name!r} is deprecated.{extra_message}" + ).format(name=self.name, extra_message=extra_message) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: list[CompletionItem] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, Group) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx._protected_args + ) + + return results + + @t.overload + def main( + self, + args: cabc.Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: t.Literal[True] = True, + **extra: t.Any, + ) -> t.NoReturn: ... + + @t.overload + def main( + self, + args: cabc.Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: ... + + def main( + self, + args: cabc.Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt) as e: + echo(file=sys.stderr) + raise Abort() from e + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: cabc.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str | None = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + + .. versionchanged:: 8.2.0 + Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). + """ + if complete_var is None: + complete_name = prog_name.replace("-", "_").replace(".", "_") + complete_var = f"_{complete_name}_COMPLETE".upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class _FakeSubclassCheck(type): + def __subclasscheck__(cls, subclass: type) -> bool: + return issubclass(subclass, cls.__bases__[0]) + + def __instancecheck__(cls, instance: t.Any) -> bool: + return isinstance(instance, cls.__bases__[0]) + + +class _BaseCommand(Command, metaclass=_FakeSubclassCheck): + """ + .. deprecated:: 8.2 + Will be removed in Click 9.0. Use ``Command`` instead. + """ + + +class Group(Command): + """A group is a command that nests other commands (or more groups). + + :param name: The name of the group command. + :param commands: Map names to :class:`Command` objects. Can be a list, which + will use :attr:`Command.name` as the keys. + :param invoke_without_command: Invoke the group's callback even if a + subcommand is not given. + :param no_args_is_help: If no arguments are given, show the group's help and + exit. Defaults to the opposite of ``invoke_without_command``. + :param subcommand_metavar: How to represent the subcommand argument in help. + The default will represent whether ``chain`` is set or not. + :param chain: Allow passing more than one subcommand argument. After parsing + a command's arguments, if any arguments remain another command will be + matched, and so on. + :param result_callback: A function to call after the group's and + subcommand's callbacks. The value returned by the subcommand is passed. + If ``chain`` is enabled, the value will be a list of values returned by + all the commands. If ``invoke_without_command`` is enabled, the value + will be the value returned by the group's callback, or an empty list if + ``chain`` is enabled. + :param kwargs: Other arguments passed to :class:`Command`. + + .. versionchanged:: 8.0 + The ``commands`` argument can be a list of command objects. + + .. versionchanged:: 8.2 + Merged with and replaces the ``MultiCommand`` base class. + """ + + allow_extra_args = True + allow_interspersed_args = False + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: type[Command] | None = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: type[Group] | type[type] | None = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: str | None = None, + commands: cabc.MutableMapping[str, Command] + | cabc.Sequence[Command] + | None = None, + invoke_without_command: bool = False, + no_args_is_help: bool | None = None, + subcommand_metavar: str | None = None, + chain: bool = False, + result_callback: t.Callable[..., t.Any] | None = None, + **kwargs: t.Any, + ) -> None: + super().__init__(name, **kwargs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: cabc.MutableMapping[str, Command] = commands + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "A group in chain mode cannot have optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def add_command(self, cmd: Command, name: str | None = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_nested_chain(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command] | Command: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + func: t.Callable[..., t.Any] | None = None + + if args and callable(args[0]): + assert len(args) == 1 and not kwargs, ( + "Use 'command(**kwargs)(callable)' to provide arguments." + ) + (func,) = args + args = () + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> Group: ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Group]: ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Group] | Group: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Callable[..., t.Any] | None = None + + if args and callable(args[0]): + assert len(args) == 1 and not kwargs, ( + "Use 'group(**kwargs)(callable)' to provide arguments." + ) + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> Group: + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(value: t.Any, /, *args: t.Any, **kwargs: t.Any) -> t.Any: + inner = old_callback(value, *args, **kwargs) + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv # type: ignore[return-value] + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> Command | None: + """Given a context and a command name, this returns a :class:`Command` + object if it exists or returns ``None``. + """ + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> list[str]: + """Returns a list of subcommand names in the order they should appear.""" + return sorted(self.commands) + + def collect_usage_pieces(self, ctx: Context) -> list[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: list[str]) -> list[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + raise NoArgsIsHelpError(ctx) + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx._protected_args = rest + ctx.args = [] + elif rest: + ctx._protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx._protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx._protected_args, *ctx.args] + ctx.args = [] + ctx._protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: list[str] + ) -> tuple[str | None, Command | None, list[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if _split_opt(cmd_name)[0]: + self.parse_args(ctx, args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class _MultiCommand(Group, metaclass=_FakeSubclassCheck): + """ + .. deprecated:: 8.2 + Will be removed in Click 9.0. Use ``Group`` instead. + """ + + +class CommandCollection(Group): + """A :class:`Group` that looks up subcommands on other groups. If a command + is not found on this group, each registered source is checked in order. + Parameters on a source are not added to this group, and a source's callback + is not invoked when invoking its commands. In other words, this "flattens" + commands in many groups into this one group. + + :param name: The name of the group command. + :param sources: A list of :class:`Group` objects to look up commands from. + :param kwargs: Other arguments passed to :class:`Group`. + + .. versionchanged:: 8.2 + This is a subclass of ``Group``. Commands are looked up first on this + group, then each of its sources. + """ + + def __init__( + self, + name: str | None = None, + sources: list[Group] | None = None, + **kwargs: t.Any, + ) -> None: + super().__init__(name, **kwargs) + #: The list of registered groups. + self.sources: list[Group] = sources or [] + + def add_source(self, group: Group) -> None: + """Add a group as a source of commands.""" + self.sources.append(group) + + def get_command(self, ctx: Context, cmd_name: str) -> Command | None: + rv = super().get_command(ctx, cmd_name) + + if rv is not None: + return rv + + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_nested_chain(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> list[str]: + rv: set[str] = set(super().list_commands(ctx)) + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> cabc.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The latter is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: environment variable(s) that are used to provide a default value for + this parameter. This can be a string or a sequence of strings. If a sequence is + given, only the first non-empty environment variable is used for the parameter. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + :param deprecated: If ``True`` or non-empty string, issues a message + indicating that the argument is deprecated and highlights + its deprecation in --help. The message can be customized + by using a string as the value. A deprecated parameter + cannot be required, a ValueError will be raised otherwise. + + .. versionchanged:: 8.2.0 + Introduction of ``deprecated``. + + .. versionchanged:: 8.2 + Adding duplicate parameter names to a :class:`~click.core.Command` will + result in a ``UserWarning`` being shown. + + .. versionchanged:: 8.2 + Adding duplicate parameter names to a :class:`~click.core.Command` will + result in a ``UserWarning`` being shown. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: cabc.Sequence[str] | None = None, + type: types.ParamType | t.Any | None = None, + required: bool = False, + # XXX The default historically embed two concepts: + # - the declaration of a Parameter object carrying the default (handy to + # arbitrage the default value of coupled Parameters sharing the same + # self.name, like flag options), + # - and the actual value of the default. + # It is confusing and is the source of many issues discussed in: + # https://github.com/pallets/click/pull/3030 + # In the future, we might think of splitting it in two, not unlike + # Option.is_flag and Option.flag_value: we could have something like + # Parameter.is_default and Parameter.default_value. + default: t.Any | t.Callable[[], t.Any] | None = UNSET, + callback: t.Callable[[Context, Parameter, t.Any], t.Any] | None = None, + nargs: int | None = None, + multiple: bool = False, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | cabc.Sequence[str] | None = None, + shell_complete: t.Callable[ + [Context, Parameter, str], list[CompletionItem] | list[str] + ] + | None = None, + deprecated: bool | str = False, + ) -> None: + self.name: str | None + self.opts: list[str] + self.secondary_opts: list[str] + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type: types.ParamType = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default: t.Any | t.Callable[[], t.Any] | None = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + self.deprecated = deprecated + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + if required and deprecated: + raise ValueError( + f"The {self.param_type_name} '{self.human_readable_name}' " + "is deprecated and still required. A deprecated " + f"{self.param_type_name} cannot be required." + ) + + def to_info_dict(self) -> dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionchanged:: 8.3.0 + Returns ``None`` for the :attr:`default` if it was not set. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + # We explicitly hide the :attr:`UNSET` value to the user, as we choose to + # make it an implementation detail. And because ``to_info_dict`` has been + # designed for documentation purposes, we return ``None`` instead. + "default": self.default if self.default is not UNSET else None, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: cabc.Sequence[str], expose_value: bool + ) -> tuple[str | None, list[str], list[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self, ctx: Context) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(param=self, ctx=ctx) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: t.Literal[True] = True + ) -> t.Any | None: ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Any | t.Callable[[], t.Any] | None: ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Any | t.Callable[[], t.Any] | None: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is UNSET: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: cabc.Mapping[str, t.Any] + ) -> tuple[t.Any, ParameterSource]: + """Returns the parameter value produced by the parser. + + If the parser did not produce a value from user input, the value is either + sourced from the environment variable, the default map, or the parameter's + default value. In that order of precedence. + + If no value is found, an internal sentinel value is returned. + + :meta private: + """ + # Collect from the parse the value passed by the user to the CLI. + value = opts.get(self.name, UNSET) # type: ignore + # If the value is set, it means it was sourced from the command line by the + # parser, otherwise it left unset by default. + source = ( + ParameterSource.COMMANDLINE + if value is not UNSET + else ParameterSource.DEFAULT + ) + + if value is UNSET: + envvar_value = self.value_from_envvar(ctx) + if envvar_value is not None: + value = envvar_value + source = ParameterSource.ENVIRONMENT + + if value is UNSET: + default_map_value = ctx.lookup_default(self.name) # type: ignore + if default_map_value is not UNSET: + value = default_map_value + source = ParameterSource.DEFAULT_MAP + + if value is UNSET: + default_value = self.get_default(ctx) + if default_value is not UNSET: + value = default_value + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the parameter's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + if self.multiple or self.nargs == -1: + return () + else: + return value + + def check_iter(value: t.Any) -> cabc.Iterator[t.Any]: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + # Define the conversion function based on nargs and type. + + if self.nargs == 1 or self.type.is_composite: + + def convert(value: t.Any) -> t.Any: + return self.type(value, param=self, ctx=ctx) + + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + """A value is considered missing if: + + - it is :attr:`UNSET`, + - or if it is an empty sequence while the parameter is suppose to have + non-single value (i.e. :attr:`nargs` is not ``1`` or :attr:`multiple` is + set). + + :meta private: + """ + if value is UNSET: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + """Process the value of this parameter: + + 1. Type cast the value using :meth:`type_cast_value`. + 2. Check if the value is missing (see: :meth:`value_is_missing`), and raise + :exc:`MissingParameter` if it is required. + 3. If a :attr:`callback` is set, call it to have the value replaced by the + result of the callback. If the value was not set, the callback receive + ``None``. This keep the legacy behavior as it was before the introduction of + the :attr:`UNSET` sentinel. + + :meta private: + """ + # shelter `type_cast_value` from ever seeing an `UNSET` value by handling the + # cases in which `UNSET` gets special treatment explicitly at this layer + # + # Refs: + # https://github.com/pallets/click/issues/3069 + if value is UNSET: + if self.multiple or self.nargs == -1: + value = () + else: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + # Legacy case: UNSET is not exposed directly to the callback, but converted + # to None. + if value is UNSET: + value = None + + # Search for parameters with UNSET values in the context. + unset_keys = {k: None for k, v in ctx.params.items() if v is UNSET} + # No UNSET values, call the callback as usual. + if not unset_keys: + value = self.callback(ctx, self, value) + + # Legacy case: provide a temporarily manipulated context to the callback + # to hide UNSET values as None. + # + # Refs: + # https://github.com/pallets/click/issues/3136 + # https://github.com/pallets/click/pull/3137 + else: + # Add another layer to the context stack to clearly hint that the + # context is temporarily modified. + with ctx: + # Update the context parameters to replace UNSET with None. + ctx.params.update(unset_keys) + # Feed these fake context parameters to the callback. + value = self.callback(ctx, self, value) + # Restore the UNSET values in the context parameters. + ctx.params.update( + { + k: UNSET + for k in unset_keys + # Only restore keys that are present and still None, in case + # the callback modified other parameters. + if k in ctx.params and ctx.params[k] is None + } + ) + + return value + + def resolve_envvar_value(self, ctx: Context) -> str | None: + """Returns the value found in the environment variable(s) attached to this + parameter. + + Environment variables values are `always returned as strings + `_. + + This method returns ``None`` if: + + - the :attr:`envvar` property is not set on the :class:`Parameter`, + - the environment variable is not found in the environment, + - the variable is found in the environment but its value is empty (i.e. the + environment variable is present but has an empty string). + + If :attr:`envvar` is setup with multiple environment variables, + then only the first non-empty value is returned. + + .. caution:: + + The raw value extracted from the environment is not normalized and is + returned as-is. Any normalization or reconciliation is performed later by + the :class:`Parameter`'s :attr:`type`. + + :meta private: + """ + if not self.envvar: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + # Return the first non-empty value of the list of environment variables. + if rv: + return rv + # Else, absence of value is interpreted as an environment variable that + # is not set, so proceed to the next one. + + return None + + def value_from_envvar(self, ctx: Context) -> str | cabc.Sequence[str] | None: + """Process the raw environment variable string for this parameter. + + Returns the string as-is or splits it into a sequence of strings if the + parameter is expecting multiple values (i.e. its :attr:`nargs` property is set + to a value other than ``1``). + + :meta private: + """ + rv = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + return self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: cabc.Mapping[str, t.Any], args: list[str] + ) -> tuple[t.Any, list[str]]: + """Process the value produced by the parser from user input. + + Always process the value through the Parameter's :attr:`type`, wherever it + comes from. + + If the parameter is deprecated, this method warn the user about it. But only if + the value has been explicitly set by the user (and as such, is not coming from + a default). + + :meta private: + """ + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + + ctx.set_parameter_source(self.name, source) # type: ignore + + # Display a deprecation warning if necessary. + if ( + self.deprecated + and value is not UNSET + and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) + ): + extra_message = ( + f" {self.deprecated}" if isinstance(self.deprecated, str) else "" + ) + message = _( + "DeprecationWarning: The {param_type} {name!r} is deprecated." + "{extra_message}" + ).format( + param_type=self.param_type_name, + name=self.human_readable_name, + extra_message=extra_message, + ) + echo(style(message, fg="red"), err=True) + + # Process the value through the parameter's type. + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + # In resilient parsing mode, we do not want to fail the command if the + # value is incompatible with the parameter type, so we reset the value + # to UNSET, which will be interpreted as a missing value. + value = UNSET + + # Add parameter's value to the context. + if ( + self.expose_value + # We skip adding the value if it was previously set by another parameter + # targeting the same variable name. This prevents parameters competing for + # the same name to override each other. + and (self.name not in ctx.params or ctx.params[self.name] is UNSET) + ): + # Click is logically enforcing that the name is None if the parameter is + # not to be exposed. We still assert it here to please the type checker. + assert self.name is not None, ( + f"{self!r} parameter's name should not be None when exposing value." + ) + ctx.params[self.name] = value + + return value, args + + def get_help_record(self, ctx: Context) -> tuple[str, str] | None: + pass + + def get_usage_pieces(self, ctx: Context) -> list[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast("list[CompletionItem]", results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page and error messages. + Normally, environment variables are not shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. A deprecated option cannot be + prompted. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + :param attrs: Other command arguments described in :class:`Parameter`. + + .. versionchanged:: 8.2 + ``envvar`` used with ``flag_value`` will always use the ``flag_value``, + previously it would use the value of the environment variable. + + .. versionchanged:: 8.1 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: cabc.Sequence[str] | None = None, + show_default: bool | str | None = None, + prompt: bool | str = False, + confirmation_prompt: bool | str = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: bool | None = None, + flag_value: t.Any = UNSET, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: types.ParamType | t.Any | None = None, + help: str | None = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + deprecated: bool | str = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + super().__init__( + param_decls, type=type, multiple=multiple, deprecated=deprecated, **attrs + ) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: str | None = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + if deprecated: + deprecated_message = ( + f"(DEPRECATED: {deprecated})" + if isinstance(deprecated, str) + else "(DEPRECATED)" + ) + help = help + deprecated_message if help is not None else deprecated_message + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # The _flag_needs_value property tells the parser that this option is a flag + # that cannot be used standalone and needs a value. With this information, the + # parser can determine whether to consider the next user-provided argument in + # the CLI as a value for this flag or as a new option. + # If prompt is enabled but not required, then it opens the possibility for the + # option to gets its value from the user. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + # Auto-detect if this is a flag or not. + if is_flag is None: + # Implicitly a flag because flag_value was set. + if flag_value is not UNSET: + is_flag = True + # Not a flag, but when used as a flag it shows a prompt. + elif self._flag_needs_value: + is_flag = False + # Implicitly a flag because secondary options names were given. + elif self.secondary_opts: + is_flag = True + # The option is explicitly not a flag. But we do not know yet if it needs a + # value or not. So we look at the default value to determine it. + elif is_flag is False and not self._flag_needs_value: + self._flag_needs_value = self.default is UNSET + + if is_flag: + # Set missing default for flags if not explicitly required or prompted. + if self.default is UNSET and not self.required and not self.prompt: + if multiple: + self.default = () + + # Auto-detect the type of the flag based on the flag_value. + if type is None: + # A flag without a flag_value is a boolean flag. + if flag_value is UNSET: + self.type: types.ParamType = types.BoolParamType() + # If the flag value is a boolean, use BoolParamType. + elif isinstance(flag_value, bool): + self.type = types.BoolParamType() + # Otherwise, guess the type from the flag value. + else: + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = bool(is_flag) + self.is_bool_flag: bool = bool( + is_flag and isinstance(self.type, types.BoolParamType) + ) + self.flag_value: t.Any = flag_value + + # Set boolean flag default to False if unset and not required. + if self.is_bool_flag: + if self.default is UNSET and not self.required: + self.default = False + + # Support the special case of aligning the default value with the flag_value + # for flags whose default is explicitly set to True. Note that as long as we + # have this condition, there is no way a flag can have a default set to True, + # and a flag_value set to something else. Refs: + # https://github.com/pallets/click/issues/3024#issuecomment-3146199461 + # https://github.com/pallets/click/pull/3030/commits/06847da + if self.default is True and self.flag_value is not UNSET: + self.default = self.flag_value + + # Set the default flag_value if it is not set. + if self.flag_value is UNSET: + if self.is_flag: + self.flag_value = True + else: + self.flag_value = None + + # Counting. + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if self.default is UNSET: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if deprecated and prompt: + raise ValueError("`deprecated` options cannot use `prompt`.") + + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + def to_info_dict(self) -> dict[str, t.Any]: + """ + .. versionchanged:: 8.3.0 + Returns ``None`` for the :attr:`flag_value` if it was not set. + """ + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + # We explicitly hide the :attr:`UNSET` value to the user, as we choose to + # make it an implementation detail. And because ``to_info_dict`` has been + # designed for documentation purposes, we return ``None`` instead. + flag_value=self.flag_value if self.flag_value is not UNSET else None, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def get_error_hint(self, ctx: Context) -> str: + result = super().get_error_hint(ctx) + if self.show_envvar and self.envvar is not None: + result += f" (env var: '{self.envvar}')" + return result + + def _parse_decls( + self, decls: cabc.Sequence[str], expose_value: bool + ) -> tuple[str | None, list[str], list[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(_split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(_split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError( + f"Could not determine name for option with declarations {decls!r}" + ) + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> tuple[str, str] | None: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: cabc.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar(ctx=ctx)}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + + extra = self.get_help_extra(ctx) + extra_items = [] + if "envvars" in extra: + extra_items.append( + _("env var: {var}").format(var=", ".join(extra["envvars"])) + ) + if "default" in extra: + extra_items.append(_("default: {default}").format(default=extra["default"])) + if "range" in extra: + extra_items.append(extra["range"]) + if "required" in extra: + extra_items.append(_(extra["required"])) + + if extra_items: + extra_str = "; ".join(extra_items) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + def get_help_extra(self, ctx: Context) -> types.OptionHelpExtra: + extra: types.OptionHelpExtra = {} + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + if isinstance(envvar, str): + extra["envvars"] = (envvar,) + else: + extra["envvars"] = tuple(str(d) for d in envvar) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or ( + show_default and (default_value not in (None, UNSET)) + ): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif isinstance(default_value, enum.Enum): + default_string = default_value.name + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = _split_opt( + (self.opts if default_value else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + elif default_value == "": + default_string = '""' + else: + default_string = str(default_value) + + if default_string: + extra["default"] = default_string + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra["range"] = range_str + + if self.required: + extra["required"] = "required" + + return extra + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to lock in the value before + # attempting any user interaction. + default = self.get_default(ctx) + + # A boolean flag can use a simplified [y/n] confirmation prompt. + if self.is_bool_flag: + # If we have no boolean default, we force the user to explicitly provide + # one. + if default in (UNSET, None): + default = None + # Nothing prevent you to declare an option that is simultaneously: + # 1) auto-detected as a boolean flag, + # 2) allowed to prompt, and + # 3) still declare a non-boolean default. + # This forced casting into a boolean is necessary to align any non-boolean + # default to the prompt, which is going to be a [y/n]-style confirmation + # because the option is still a boolean flag. That way, instead of [y/n], + # we get [Y/n] or [y/N] depending on the truthy value of the default. + # Refs: https://github.com/pallets/click/pull/3030#discussion_r2289180249 + else: + default = bool(default) + return confirm(self.prompt, default) + + # If show_default is set to True/False, provide this to `prompt` as well. For + # non-bool values of `show_default`, we use `prompt`'s default behavior + prompt_kwargs: t.Any = {} + if isinstance(self.show_default, bool): + prompt_kwargs["show_default"] = self.show_default + + return prompt( + self.prompt, + # Use ``None`` to inform the prompt() function to reiterate until a valid + # value is provided by the user if we have no default. + default=None if default is UNSET else default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + **prompt_kwargs, + ) + + def resolve_envvar_value(self, ctx: Context) -> str | None: + """:class:`Option` resolves its environment variable the same way as + :func:`Parameter.resolve_envvar_value`, but it also supports + :attr:`Context.auto_envvar_prefix`. If we could not find an environment from + the :attr:`envvar` property, we fallback on :attr:`Context.auto_envvar_prefix` + to build dynamiccaly the environment variable name using the + :python:`{ctx.auto_envvar_prefix}_{self.name.upper()}` template. + + :meta private: + """ + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Any: + """For :class:`Option`, this method processes the raw environment variable + string the same way as :func:`Parameter.value_from_envvar` does. + + But in the case of non-boolean flags, the value is analyzed to determine if the + flag is activated or not, and returns a boolean of its activation, or the + :attr:`flag_value` if the latter is set. + + This method also takes care of repeated options (i.e. options with + :attr:`multiple` set to ``True``). + + :meta private: + """ + rv = self.resolve_envvar_value(ctx) + + # Absent environment variable or an empty string is interpreted as unset. + if rv is None: + return None + + # Non-boolean flags are more liberal in what they accept. But a flag being a + # flag, its envvar value still needs to be analyzed to determine if the flag is + # activated or not. + if self.is_flag and not self.is_bool_flag: + # If the flag_value is set and match the envvar value, return it + # directly. + if self.flag_value is not UNSET and rv == self.flag_value: + return self.flag_value + # Analyze the envvar value as a boolean to know if the flag is + # activated or not. + return types.BoolParamType.str_to_bool(rv) + + # Split the envvar value if it is allowed to be repeated. + value_depth = (self.nargs != 1) + bool(self.multiple) + if value_depth > 0: + multi_rv = self.type.split_envvar_value(rv) + if self.multiple and self.nargs != 1: + multi_rv = batch(multi_rv, self.nargs) # type: ignore[assignment] + + return multi_rv + + return rv + + def consume_value( + self, ctx: Context, opts: cabc.Mapping[str, Parameter] + ) -> tuple[t.Any, ParameterSource]: + """For :class:`Option`, the value can be collected from an interactive prompt + if the option is a flag that needs a value (and the :attr:`prompt` property is + set). + + Additionally, this method handles flag option that are activated without a + value, in which case the :attr:`flag_value` is returned. + + :meta private: + """ + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option is allowed to as a flag + # without a value. + if value is FLAG_NEEDS_VALUE: + # If the option allows for a prompt, we start an interaction with the user. + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + # Else the flag takes its flag_value as value. + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + # A flag which is activated always returns the flag value, unless the value + # comes from the explicitly sets default. + elif ( + self.is_flag + and value is True + and not self.is_bool_flag + and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) + ): + value = self.flag_value + + # Re-interpret a multiple option which has been sent as-is by the parser. + # Here we replace each occurrence of value-less flags (marked by the + # FLAG_NEEDS_VALUE sentinel) with the flag_value. + elif ( + self.multiple + and value is not UNSET + and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) + and any(v is FLAG_NEEDS_VALUE for v in value) + ): + value = [self.flag_value if v is FLAG_NEEDS_VALUE else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt for one to the user + # if prompting is enabled. + elif ( + ( + value is UNSET + or source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP) + ) + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + # process_value has to be overridden on Options in order to capture + # `value == UNSET` cases before `type_cast_value()` gets called. + # + # Refs: + # https://github.com/pallets/click/issues/3069 + if self.is_flag and not self.required and self.is_bool_flag and value is UNSET: + value = False + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + # in the normal case, rely on Parameter.process_value + return super().process_value(ctx, value) + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the constructor of :class:`Parameter`. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: cabc.Sequence[str], + required: bool | None = None, + **attrs: t.Any, + ) -> None: + # Auto-detect the requirement status of the argument if not explicitly set. + if required is None: + # The argument gets automatically required if it has no explicit default + # value set and is setup to match at least one value. + if attrs.get("default", UNSET) is UNSET: + required = attrs.get("nargs", 1) > 0 + # If the argument has a default value, it is not required. + else: + required = False + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self, ctx: Context) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(param=self, ctx=ctx) + if not var: + var = self.name.upper() # type: ignore + if self.deprecated: + var += "!" + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: cabc.Sequence[str], expose_value: bool + ) -> tuple[str | None, list[str], list[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Argument is marked as exposed, but does not have a name.") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}: {decls}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> list[str]: + return [self.make_metavar(ctx)] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar(ctx)}'" + + def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) + + +def __getattr__(name: str) -> object: + import warnings + + if name == "BaseCommand": + warnings.warn( + "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" + " 'Command' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _BaseCommand + + if name == "MultiCommand": + warnings.warn( + "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" + " 'Group' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _MultiCommand + + raise AttributeError(name) diff --git a/venv/lib/python3.12/site-packages/click/decorators.py b/venv/lib/python3.12/site-packages/click/decorators.py new file mode 100644 index 0000000..21f4c34 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/decorators.py @@ -0,0 +1,551 @@ +from __future__ import annotations + +import inspect +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") +T = t.TypeVar("T") +_AnyCallable = t.Callable[..., t.Any] +FC = t.TypeVar("FC", bound="_AnyCallable | Command") + + +def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]: + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args: P.args, **kwargs: P.kwargs) -> R: + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(new_func, f) + + +def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args: P.args, **kwargs: P.kwargs) -> R: + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + +def make_pass_decorator( + object_type: type[T], ensure: bool = False +) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: + def new_func(*args: P.args, **kwargs: P.kwargs) -> R: + ctx = get_current_context() + + obj: T | None + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + return decorator + + +def pass_meta_key( + key: str, *, doc_description: str | None = None +) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: + def new_func(*args: P.args, **kwargs: P.kwargs) -> R: + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +# variant: no call, directly as decorator for a function. +@t.overload +def command(name: _AnyCallable) -> Command: ... + + +# variant: with positional name and with positional or keyword cls argument: +# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) +@t.overload +def command( + name: str | None, + cls: type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: ... + + +# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) +@t.overload +def command( + name: None = None, + *, + cls: type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def command( + name: str | None = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Command]: ... + + +def command( + name: str | _AnyCallable | None = None, + cls: type[CmdType] | None = None, + **attrs: t.Any, +) -> Command | t.Callable[[_AnyCallable], Command | CmdType]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function, converted to + lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes + ``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example, + ``init_data_command`` becomes ``init-data``. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: The name of the command. Defaults to modifying the function's + name as described above. + :param cls: The command class to create. Defaults to :class:`Command`. + + .. versionchanged:: 8.2 + The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are + removed when generating the name. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Callable[[_AnyCallable], t.Any] | None = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = t.cast("type[CmdType]", Command) + + def decorator(f: _AnyCallable) -> CmdType: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + if t.TYPE_CHECKING: + assert cls is not None + assert not callable(name) + + if name is not None: + cmd_name = name + else: + cmd_name = f.__name__.lower().replace("_", "-") + cmd_left, sep, suffix = cmd_name.rpartition("-") + + if sep and suffix in {"command", "cmd", "group", "grp"}: + cmd_name = cmd_left + + cmd = cls(name=cmd_name, callback=f, params=params, **attrs) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +GrpType = t.TypeVar("GrpType", bound=Group) + + +# variant: no call, directly as decorator for a function. +@t.overload +def group(name: _AnyCallable) -> Group: ... + + +# variant: with positional name and with positional or keyword cls argument: +# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) +@t.overload +def group( + name: str | None, + cls: type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: ... + + +# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) +@t.overload +def group( + name: None = None, + *, + cls: type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def group( + name: str | None = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Group]: ... + + +def group( + name: str | _AnyCallable | None = None, + cls: type[GrpType] | None = None, + **attrs: t.Any, +) -> Group | t.Callable[[_AnyCallable], Group | GrpType]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if cls is None: + cls = t.cast("type[GrpType]", Group) + + if callable(name): + return command(cls=cls, **attrs)(name) + + return command(name, cls, **attrs) + + +def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument( + *param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default argument class, refer to :class:`Argument` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Argument + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def option( + *param_decls: str, cls: type[Option] | None = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default option class, refer to :class:`Option` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Option + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: str | None = None, + *param_decls: str, + package_name: str | None = None, + prog_name: str | None = None, + message: str | None = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + import importlib.metadata + + try: + version = importlib.metadata.version(package_name) + except importlib.metadata.PackageNotFoundError: + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + message % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Pre-configured ``--help`` option which immediately prints the help page + and exits the program. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def show_help(ctx: Context, param: Parameter, value: bool) -> None: + """Callback that print the help page on ```` and exits.""" + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs.setdefault("callback", show_help) + + return option(*param_decls, **kwargs) diff --git a/venv/lib/python3.12/site-packages/click/exceptions.py b/venv/lib/python3.12/site-packages/click/exceptions.py new file mode 100644 index 0000000..4d782ee --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/exceptions.py @@ -0,0 +1,308 @@ +from __future__ import annotations + +import collections.abc as cabc +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .globals import resolve_color_default +from .utils import echo +from .utils import format_filename + +if t.TYPE_CHECKING: + from .core import Command + from .core import Context + from .core import Parameter + + +def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + # The context will be removed by the time we print the message, so cache + # the color settings here to be used later on (in `show`) + self.show_color: bool | None = resolve_color_default() + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.IO[t.Any] | None = None) -> None: + if file is None: + file = get_text_stderr() + + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=self.show_color, + ) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: Context | None = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd: Command | None = self.ctx.command if self.ctx else None + + def show(self, file: t.IO[t.Any] | None = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: Context | None = None, + param: Parameter | None = None, + param_hint: cabc.Sequence[str] | str | None = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: str | None = None, + ctx: Context | None = None, + param: Parameter | None = None, + param_hint: cabc.Sequence[str] | str | None = None, + param_type: str | None = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: cabc.Sequence[str] | str | None = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message( + param=self.param, ctx=self.ctx + ) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: str | None = None, + possibilities: cabc.Sequence[str] | None = None, + ctx: Context | None = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: Context | None = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class NoArgsIsHelpError(UsageError): + def __init__(self, ctx: Context) -> None: + self.ctx: Context + super().__init__(ctx.get_help(), ctx=ctx) + + def show(self, file: t.IO[t.Any] | None = None) -> None: + echo(self.format_message(), file=file, err=True, color=self.ctx.color) + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: str | None = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename: str = format_filename(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code: int = code diff --git a/venv/lib/python3.12/site-packages/click/formatting.py b/venv/lib/python3.12/site-packages/click/formatting.py new file mode 100644 index 0000000..0b64f83 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +from __future__ import annotations + +import collections.abc as cabc +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import _split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: int | None = None + + +def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]: + widths: dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: cabc.Iterable[tuple[str, str]], col_count: int +) -> cabc.Iterator[tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: list[tuple[int, bool, str]] = [] + buf: list[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: int | None = None, + max_width: int | None = None, + ) -> None: + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + import shutil + + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent: int = 0 + self.buffer: list[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: cabc.Sequence[tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> cabc.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> cabc.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = _split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv/lib/python3.12/site-packages/click/globals.py b/venv/lib/python3.12/site-packages/click/globals.py new file mode 100644 index 0000000..a2f9172 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/globals.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +import typing as t +from threading import local + +if t.TYPE_CHECKING: + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: t.Literal[False] = False) -> Context: ... + + +@t.overload +def get_current_context(silent: bool = ...) -> Context | None: ... + + +def get_current_context(silent: bool = False) -> Context | None: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: Context) -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: bool | None = None) -> bool | None: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/venv/lib/python3.12/site-packages/click/parser.py b/venv/lib/python3.12/site-packages/click/parser.py new file mode 100644 index 0000000..1ea1f71 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/parser.py @@ -0,0 +1,532 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" + +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +from __future__ import annotations + +import collections.abc as cabc +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from ._utils import FLAG_NEEDS_VALUE +from ._utils import UNSET +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + from ._utils import T_FLAG_NEEDS_VALUE + from ._utils import T_UNSET + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + + +def _unpack_args( + args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int] +) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with ``UNSET``. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: list[str | tuple[str | T_UNSET, ...] | T_UNSET] = [] + spos: int | None = None + + def _fetch(c: deque[V]) -> V | T_UNSET: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return UNSET + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) # type: ignore[arg-type] + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(UNSET) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def _split_opt(opt: str) -> tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def _normalize_opt(opt: str, ctx: Context | None) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = _split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +class _Option: + def __init__( + self, + obj: CoreOption, + opts: cabc.Sequence[str], + dest: str | None, + action: str | None = None, + nargs: int = 1, + const: t.Any | None = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes: set[str] = set() + + for opt in opts: + prefix, value = _split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: t.Any, state: _ParsingState) -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class _Argument: + def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: str | cabc.Sequence[str | None] | None | T_UNSET, + state: _ParsingState, + ) -> None: + if self.nargs > 1: + assert isinstance(value, cabc.Sequence) + holes = sum(1 for x in value if x is UNSET) + if holes == len(value): + value = UNSET + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + # We failed to collect any argument value so we consider the argument as unset. + if value == (): + value = UNSET + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class _ParsingState: + def __init__(self, rargs: list[str]) -> None: + self.opts: dict[str, t.Any] = {} + self.largs: list[str] = [] + self.rargs = rargs + self.order: list[CoreParameter] = [] + + +class _OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + + .. deprecated:: 8.2 + Will be removed in Click 9.0. + """ + + def __init__(self, ctx: Context | None = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args: bool = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options: bool = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: dict[str, _Option] = {} + self._long_opt: dict[str, _Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: list[_Argument] = [] + + def add_option( + self, + obj: CoreOption, + opts: cabc.Sequence[str], + dest: str | None, + action: str | None = None, + nargs: int = 1, + const: t.Any | None = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [_normalize_opt(opt, self.ctx) for opt in opts] + option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(_Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: list[str] + ) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = _ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: _ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: _ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: str | None, state: _ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = UNSET + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: _ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = _normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = UNSET + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we recombine the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: _Option, state: _ParsingState + ) -> str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE: + nargs = option.nargs + + value: str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = FLAG_NEEDS_VALUE + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = FLAG_NEEDS_VALUE + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: _ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = _normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) + + +def __getattr__(name: str) -> object: + import warnings + + if name in { + "OptionParser", + "Argument", + "Option", + "split_opt", + "normalize_opt", + "ParsingState", + }: + warnings.warn( + f"'parser.{name}' is deprecated and will be removed in Click 9.0." + " The old parser is available in 'optparse'.", + DeprecationWarning, + stacklevel=2, + ) + return globals()[f"_{name}"] + + if name == "split_arg_string": + from .shell_completion import split_arg_string + + warnings.warn( + "Importing 'parser.split_arg_string' is deprecated, it will only be" + " available in 'shell_completion' in Click 9.0.", + DeprecationWarning, + stacklevel=2, + ) + return split_arg_string + + raise AttributeError(name) diff --git a/venv/lib/python3.12/site-packages/click/py.typed b/venv/lib/python3.12/site-packages/click/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/click/shell_completion.py b/venv/lib/python3.12/site-packages/click/shell_completion.py new file mode 100644 index 0000000..8f1564c --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/shell_completion.py @@ -0,0 +1,667 @@ +from __future__ import annotations + +import collections.abc as cabc +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .utils import echo + + +def shell_complete( + cli: Command, + ctx_args: cabc.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: str | None = None, + **kwargs: t.Any, + ) -> None: + self.value: t.Any = value + self.type: str = type + self.help: str | None = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +# See ZshComplete.format_completion below, and issue #2703, before +# changing this script. +# +# (TL;DR: _describe is picky about the format, but this Zsh script snippet +# is already widely deployed. So freeze this script, and use clever-ish +# handling of colons in ZshComplet.format_completion.) +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + %(complete_func)s "$@" +else + # eval/source/. command, register function for later + compdef %(complete_func)s %(prog_name)s +fi +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: Command, + ctx_args: cabc.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> tuple[list[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + @staticmethod + def _check_version() -> None: + import shutil + import subprocess + + bash_exe = shutil.which("bash") + + if bash_exe is None: + match = None + else: + output = subprocess.run( + [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'], + stdout=subprocess.PIPE, + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + echo( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ), + err=True, + ) + else: + echo( + _("Couldn't detect Bash version, shell completion is not supported."), + err=True, + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> tuple[list[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> tuple[list[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + help_ = item.help or "_" + # The zsh completion script uses `_describe` on items with help + # texts (which splits the item help from the item value at the + # first unescaped colon) and `compadd` on items without help + # text (which uses the item value as-is and does not support + # colon escaping). So escape colons in the item value if and + # only if the item help is not the sentinel "_" value, as used + # by the completion script. + # + # (The zsh completion script is potentially widely deployed, and + # thus harder to fix than this method.) + # + # See issue #1812 and issue #2703 for further context. + value = item.value.replace(":", r"\:") if help_ != "_" else item.value + return f"{item.type}\n{value}\n{help_}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> tuple[list[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + if incomplete: + incomplete = split_arg_string(incomplete)[0] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]") + + +_available_shells: dict[str, type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: ShellCompleteType, name: str | None = None +) -> ShellCompleteType: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + return cls + + +def get_completion_class(shell: str) -> type[ShellComplete] | None: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def split_arg_string(string: str) -> list[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + + .. versionchanged:: 8.2 + Moved to ``shell_completion`` from ``parser``. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + # Will be None if expose_value is False. + value = ctx.params.get(param.name) + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + break + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: Command, + ctx_args: cabc.MutableMapping[str, t.Any], + prog_name: str, + args: list[str], +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx: + args = ctx._protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, Group): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + with cmd.make_context( + name, args, parent=ctx, resilient_parsing=True + ) as sub_ctx: + ctx = sub_ctx + args = ctx._protected_args + ctx.args + else: + sub_ctx = ctx + + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + with cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) as sub_sub_ctx: + sub_ctx = sub_sub_ctx + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx._protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: list[str], incomplete: str +) -> tuple[Command | Parameter, str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/venv/lib/python3.12/site-packages/click/termui.py b/venv/lib/python3.12/site-packages/click/termui.py new file mode 100644 index 0000000..2e98a07 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/termui.py @@ -0,0 +1,883 @@ +from __future__ import annotations + +import collections.abc as cabc +import inspect +import io +import itertools +import sys +import typing as t +from contextlib import AbstractContextManager +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Any | None = None, + show_choices: bool = True, + type: ParamType | None = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name + + return default + + +def prompt( + text: str, + default: t.Any | None = None, + hide_input: bool = False, + confirmation_prompt: bool | str = False, + type: ParamType | t.Any | None = None, + value_proc: t.Callable[[str], t.Any] | None = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionchanged:: 8.3.1 + A space is no longer appended to the prompt. + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text[:-1], nl=False, err=err) + # Echo the last character to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(text[-1:]) + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: bool | None = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.3.1 + A space is no longer appended to the prompt. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt[:-1], nl=False, err=err) + # Echo the last character to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(prompt[-1:]).lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str, + color: bool | None = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast("cabc.Iterable[str]", text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +@t.overload +def progressbar( + *, + length: int, + label: str | None = None, + hidden: bool = False, + show_eta: bool = True, + show_percent: bool | None = None, + show_pos: bool = False, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.TextIO | None = None, + color: bool | None = None, + update_min_steps: int = 1, +) -> ProgressBar[int]: ... + + +@t.overload +def progressbar( + iterable: cabc.Iterable[V] | None = None, + length: int | None = None, + label: str | None = None, + hidden: bool = False, + show_eta: bool = True, + show_percent: bool | None = None, + show_pos: bool = False, + item_show_func: t.Callable[[V | None], str | None] | None = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.TextIO | None = None, + color: bool | None = None, + update_min_steps: int = 1, +) -> ProgressBar[V]: ... + + +def progressbar( + iterable: cabc.Iterable[V] | None = None, + length: int | None = None, + label: str | None = None, + hidden: bool = False, + show_eta: bool = True, + show_percent: bool | None = None, + show_pos: bool = False, + item_show_func: t.Callable[[V | None], str | None] | None = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.TextIO | None = None, + color: bool | None = None, + update_min_steps: int = 1, +) -> ProgressBar[V]: + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param hidden: hide the progressbar. Defaults to ``False``. When no tty is + detected, it will only print the progressbar label. Setting this to + ``False`` also disables that. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionadded:: 8.2 + The ``hidden`` argument. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + The ``update_min_steps`` parameter. + + .. versionadded:: 4.0 + The ``color`` parameter and ``update`` method. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + hidden=hidden, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + + # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor + echo("\033[2J\033[1;1H", nl=False) + + +def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: int | tuple[int, int, int] | str | None = None, + bg: int | tuple[int, int, int] | str | None = None, + bold: bool | None = None, + dim: bool | None = None, + underline: bool | None = None, + overline: bool | None = None, + italic: bool | None = None, + blink: bool | None = None, + reverse: bool | None = None, + strikethrough: bool | None = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Any | None = None, + file: t.IO[t.AnyStr] | None = None, + nl: bool = True, + err: bool = False, + color: bool | None = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +@t.overload +def edit( + text: bytes | bytearray, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = False, + extension: str = ".txt", +) -> bytes | None: ... + + +@t.overload +def edit( + text: str, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = True, + extension: str = ".txt", +) -> str | None: ... + + +@t.overload +def edit( + text: None = None, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = True, + extension: str = ".txt", + filename: str | cabc.Iterable[str] | None = None, +) -> None: ... + + +def edit( + text: str | bytes | bytearray | None = None, + editor: str | None = None, + env: cabc.Mapping[str, str] | None = None, + require_save: bool = True, + extension: str = ".txt", + filename: str | cabc.Iterable[str] | None = None, +) -> str | bytes | bytearray | None: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. If the editor supports + editing multiple files at once, a sequence of files may be + passed as well. Invoke `click.file` once per file instead + if multiple files cannot be managed at once or editing the + files serially is desired. + + .. versionchanged:: 8.2.0 + ``filename`` now accepts any ``Iterable[str]`` in addition to a ``str`` + if the ``editor`` supports editing multiple files at once. + + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + if isinstance(filename, str): + filename = (filename,) + + ed.edit_files(filenames=filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Callable[[bool], str] | None = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> AbstractContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: str | None = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/venv/lib/python3.12/site-packages/click/testing.py b/venv/lib/python3.12/site-packages/click/testing.py new file mode 100644 index 0000000..f6f60b8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/testing.py @@ -0,0 +1,577 @@ +from __future__ import annotations + +import collections.abc as cabc +import contextlib +import io +import os +import shlex +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import _compat +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from _typeshed import ReadableBuffer + + from .core import Command + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> list[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> cabc.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class BytesIOCopy(io.BytesIO): + """Patch ``io.BytesIO`` to let the written stream be copied to another. + + .. versionadded:: 8.2 + """ + + def __init__(self, copy_to: io.BytesIO) -> None: + super().__init__() + self.copy_to = copy_to + + def flush(self) -> None: + super().flush() + self.copy_to.flush() + + def write(self, b: ReadableBuffer) -> int: + self.copy_to.write(b) + return super().write(b) + + +class StreamMixer: + """Mixes `` and `` streams. + + The result is available in the ``output`` attribute. + + .. versionadded:: 8.2 + """ + + def __init__(self) -> None: + self.output: io.BytesIO = io.BytesIO() + self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output) + self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output) + + def __del__(self) -> None: + """ + Guarantee that embedded file-like objects are closed in a + predictable order, protecting against races between + self.output being closed and other streams being flushed on close + + .. versionadded:: 8.2.2 + """ + self.stderr.close() + self.stdout.close() + self.output.close() + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: str | bytes | t.IO[t.Any] | None, charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast("t.IO[t.Any]", input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(input) + + +class Result: + """Holds the captured result of an invoked CLI script. + + :param runner: The runner that created the result + :param stdout_bytes: The standard output as bytes. + :param stderr_bytes: The standard error as bytes. + :param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the + user would see it in its terminal. + :param return_value: The value returned from the invoked command. + :param exit_code: The exit code as integer. + :param exception: The exception that happened if one did. + :param exc_info: Exception information (exception type, exception instance, + traceback type). + + .. versionchanged:: 8.2 + ``stderr_bytes`` no longer optional, ``output_bytes`` introduced and + ``mix_stderr`` has been removed. + + .. versionadded:: 8.0 + Added ``return_value``. + """ + + def __init__( + self, + runner: CliRunner, + stdout_bytes: bytes, + stderr_bytes: bytes, + output_bytes: bytes, + return_value: t.Any, + exit_code: int, + exception: BaseException | None, + exc_info: tuple[type[BaseException], BaseException, TracebackType] + | None = None, + ): + self.runner = runner + self.stdout_bytes = stdout_bytes + self.stderr_bytes = stderr_bytes + self.output_bytes = output_bytes + self.return_value = return_value + self.exit_code = exit_code + self.exception = exception + self.exc_info = exc_info + + @property + def output(self) -> str: + """The terminal output as unicode string, as the user would see it. + + .. versionchanged:: 8.2 + No longer a proxy for ``self.stdout``. Now has its own independent stream + that is mixing `` and ``, in the order they were written. + """ + return self.output_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string. + + .. versionchanged:: 8.2 + No longer raise an exception, always returns the `` string. + """ + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from `` writes + to ``. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param catch_exceptions: Whether to catch any exceptions other than + ``SystemExit`` when running :meth:`~CliRunner.invoke`. + + .. versionchanged:: 8.2 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 8.2 + ``mix_stderr`` parameter has been removed. + """ + + def __init__( + self, + charset: str = "utf-8", + env: cabc.Mapping[str, str | None] | None = None, + echo_stdin: bool = False, + catch_exceptions: bool = True, + ) -> None: + self.charset = charset + self.env: cabc.Mapping[str, str | None] = env or {} + self.echo_stdin = echo_stdin + self.catch_exceptions = catch_exceptions + + def get_default_prog_name(self, cli: Command) -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: cabc.Mapping[str, str | None] | None = None + ) -> cabc.Mapping[str, str | None]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: str | bytes | t.IO[t.Any] | None = None, + env: cabc.Mapping[str, str | None] | None = None, + color: bool = False, + ) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up `` with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into `sys.stdin`. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionadded:: 8.2 + An additional output stream is returned, which is a mix of + `` and `` streams. + + .. versionchanged:: 8.2 + Always returns the `` stream. + + .. versionchanged:: 8.0 + `` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + stream_mixer = StreamMixer() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + stream_mixer.stdout, encoding=self.charset, name="", mode="w" + ) + + sys.stderr = _NamedTextIOWrapper( + stream_mixer.stderr, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: str | None = None) -> str: + sys.stdout.write(prompt or "") + try: + val = next(text_input).rstrip("\r\n") + except StopIteration as e: + raise EOFError() from e + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: str | None = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + try: + return next(text_input).rstrip("\r\n") + except StopIteration as e: + raise EOFError() from e + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.IO[t.Any] | None = None, color: bool | None = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + old__compat_should_strip_ansi = _compat.should_strip_ansi + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + _compat.should_strip_ansi = should_strip_ansi + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + _compat.should_strip_ansi = old__compat_should_strip_ansi + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: Command, + args: str | cabc.Sequence[str] | None = None, + input: str | bytes | t.IO[t.Any] | None = None, + env: cabc.Mapping[str, str | None] | None = None, + catch_exceptions: bool | None = None, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. If :data:`None`, the value + from :class:`CliRunner` is used. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionadded:: 8.2 + The result object has the ``output_bytes`` attribute with + the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would + see it in its terminal. + + .. versionchanged:: 8.2 + The result object always returns the ``stderr_bytes`` stream. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + if catch_exceptions is None: + catch_exceptions = self.catch_exceptions + + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: BaseException | None = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast("int | t.Any | None", e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + sys.stderr.flush() + stdout = outstreams[0].getvalue() + stderr = outstreams[1].getvalue() + output = outstreams[2].getvalue() + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + output_bytes=output, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: str | os.PathLike[str] | None = None + ) -> cabc.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) + os.chdir(dt) + + try: + yield dt + finally: + os.chdir(cwd) + + if temp_dir is None: + import shutil + + try: + shutil.rmtree(dt) + except OSError: + pass diff --git a/venv/lib/python3.12/site-packages/click/types.py b/venv/lib/python3.12/site-packages/click/types.py new file mode 100644 index 0000000..e71c1c2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/types.py @@ -0,0 +1,1209 @@ +from __future__ import annotations + +import collections.abc as cabc +import enum +import os +import stat +import sys +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import format_filename +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + +ParamTypeValue = t.TypeVar("ParamTypeValue") + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[str | None] = None + + def to_info_dict(self) -> dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: Parameter | None = None, + ctx: Context | None = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: Parameter, ctx: Context) -> str | None: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: Parameter, ctx: Context | None) -> str | None: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> cabc.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: Parameter | None = None, + ctx: Context | None = None, + ) -> t.NoReturn: + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: Context, param: Parameter, incomplete: str + ) -> list[CompletionItem]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name: str = func.__name__ + self.func = func + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = sys.getfilesystemencoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType, t.Generic[ParamTypeValue]): + """The choice type allows a value to be checked against a fixed set + of supported values. + + You may pass any iterable value which will be converted to a tuple + and thus will only be iterated once. + + The resulting value will always be one of the originally passed choices. + See :meth:`normalize_choice` for more info on the mapping of strings + to choices. See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + + .. versionchanged:: 8.2.0 + Non-``str`` ``choices`` are now supported. It can additionally be any + iterable. Before you were not recommended to pass anything but a list or + tuple. + + .. versionadded:: 8.2.0 + Choice normalization can be overridden via :meth:`normalize_choice`. + """ + + name = "choice" + + def __init__( + self, choices: cabc.Iterable[ParamTypeValue], case_sensitive: bool = True + ) -> None: + self.choices: cabc.Sequence[ParamTypeValue] = tuple(choices) + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def _normalized_mapping( + self, ctx: Context | None = None + ) -> cabc.Mapping[ParamTypeValue, str]: + """ + Returns mapping where keys are the original choices and the values are + the normalized values that are accepted via the command line. + + This is a simple wrapper around :meth:`normalize_choice`, use that + instead which is supported. + """ + return { + choice: self.normalize_choice( + choice=choice, + ctx=ctx, + ) + for choice in self.choices + } + + def normalize_choice(self, choice: ParamTypeValue, ctx: Context | None) -> str: + """ + Normalize a choice value, used to map a passed string to a choice. + Each choice must have a unique normalized value. + + By default uses :meth:`Context.token_normalize_func` and if not case + sensitive, convert it to a casefolded value. + + .. versionadded:: 8.2.0 + """ + normed_value = choice.name if isinstance(choice, enum.Enum) else str(choice) + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(normed_value) + + if not self.case_sensitive: + normed_value = normed_value.casefold() + + return normed_value + + def get_metavar(self, param: Parameter, ctx: Context) -> str | None: + if param.param_type_name == "option" and not param.show_choices: # type: ignore + choice_metavars = [ + convert_type(type(choice)).name.upper() for choice in self.choices + ] + choices_str = "|".join([*dict.fromkeys(choice_metavars)]) + else: + choices_str = "|".join( + [str(i) for i in self._normalized_mapping(ctx=ctx).values()] + ) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: Parameter, ctx: Context | None) -> str: + """ + Message shown when no choice is passed. + + .. versionchanged:: 8.2.0 Added ``ctx`` argument. + """ + return _("Choose from:\n\t{choices}").format( + choices=",\n\t".join(self._normalized_mapping(ctx=ctx).values()) + ) + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> ParamTypeValue: + """ + For a given value from the parser, normalize it and find its + matching normalized value in the list of choices. Then return the + matched "original" choice. + """ + normed_value = self.normalize_choice(choice=value, ctx=ctx) + normalized_mapping = self._normalized_mapping(ctx=ctx) + + try: + return next( + original + for original, normalized in normalized_mapping.items() + if normalized == normed_value + ) + except StopIteration: + self.fail( + self.get_invalid_choice_message(value=value, ctx=ctx), + param=param, + ctx=ctx, + ) + + def get_invalid_choice_message(self, value: t.Any, ctx: Context | None) -> str: + """Get the error message when the given choice is invalid. + + :param value: The invalid value. + + .. versionadded:: 8.2 + """ + choices_str = ", ".join(map(repr, self._normalized_mapping(ctx=ctx).values())) + return ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: Context, param: Parameter, incomplete: str + ) -> list[CompletionItem]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: cabc.Sequence[str] | None = None): + self.formats: cabc.Sequence[str] = formats or [ + "%Y-%m-%d", + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%d %H:%M:%S", + ] + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: Parameter, ctx: Context) -> str | None: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> datetime | None: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[type[t.Any]] + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: float | None = None, + max: float | None = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: t.Literal[1, -1], open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: float | None = None, + max: float | None = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: + if not open: + return bound + + # Could use math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + bool_states: dict[str, bool] = { + "1": True, + "0": False, + "yes": True, + "no": False, + "true": True, + "false": False, + "on": True, + "off": False, + "t": True, + "f": False, + "y": True, + "n": False, + # Absence of value is considered False. + "": False, + } + """A mapping of string values to boolean states. + + Mapping is inspired by :py:attr:`configparser.ConfigParser.BOOLEAN_STATES` + and extends it. + + .. caution:: + String values are lower-cased, as the ``str_to_bool`` comparison function + below is case-insensitive. + + .. warning:: + The mapping is not exhaustive, and does not cover all possible boolean strings + representations. It will remains as it is to avoid endless bikeshedding. + + Future work my be considered to make this mapping user-configurable from public + API. + """ + + @staticmethod + def str_to_bool(value: str | bool) -> bool | None: + """Convert a string to a boolean value. + + If the value is already a boolean, it is returned as-is. If the value is a + string, it is stripped of whitespaces and lower-cased, then checked against + the known boolean states pre-defined in the `BoolParamType.bool_states` mapping + above. + + Returns `None` if the value does not match any known boolean state. + """ + if isinstance(value, bool): + return value + return BoolParamType.bool_states.get(value.strip().lower()) + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> bool: + normalized = self.str_to_bool(value) + if normalized is None: + self.fail( + _( + "{value!r} is not a valid boolean. Recognized values: {states}" + ).format(value=value, states=", ".join(sorted(self.bool_states))), + param, + ctx, + ) + return normalized + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Files can also be opened atomically in which case all writes go into a + separate file in the same folder and upon completion the file will + be moved over to the original location. This is useful if a file + regularly read by other users is modified. + + See :ref:`file-args` for more information. + + .. versionchanged:: 2.0 + Added the ``atomic`` parameter. + """ + + name = "filename" + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: str | os.PathLike[str]) -> bool: + if self.lazy is not None: + return self.lazy + if os.fspath(value) == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, + value: str | os.PathLike[str] | t.IO[t.Any], + param: Parameter | None, + ctx: Context | None, + ) -> t.IO[t.Any]: + if _is_file_like(value): + return value + + value = t.cast("str | os.PathLike[str]", value) + + try: + lazy = self.resolve_lazy_flag(value) + + if lazy: + lf = LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + if ctx is not None: + ctx.call_on_close(lf.close_intelligently) + + return t.cast("t.IO[t.Any]", lf) + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: + self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: Context, param: Parameter, incomplete: str + ) -> list[CompletionItem]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +def _is_file_like(value: t.Any) -> te.TypeGuard[t.IO[t.Any]]: + return hasattr(value, "read") or hasattr(value, "write") + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``path_type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: type[t.Any] | None = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name: str = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result( + self, value: str | os.PathLike[str] + ) -> str | bytes | os.PathLike[str]: + if self.type is not None and not isinstance(value, self.type): + if self.type is str: + return os.fsdecode(value) + elif self.type is bytes: + return os.fsencode(value) + else: + return t.cast("os.PathLike[str]", self.type(value)) + + return value + + def convert( + self, + value: str | os.PathLike[str], + param: Parameter | None, + ctx: Context | None, + ) -> str | bytes | os.PathLike[str]: + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + rv = os.path.realpath(rv) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} {filename!r} is a directory.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: Context, param: Parameter, incomplete: str + ) -> list[CompletionItem]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: cabc.Sequence[type[t.Any] | ParamType]) -> None: + self.types: cabc.Sequence[ParamType] = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: Parameter | None, ctx: Context | None + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple( + ty(x, param, ctx) for ty, x in zip(self.types, value, strict=False) + ) + + +def convert_type(ty: t.Any | None, default: t.Any | None = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() + + +class OptionHelpExtra(t.TypedDict, total=False): + envvars: tuple[str, ...] + default: str + range: str + required: str diff --git a/venv/lib/python3.12/site-packages/click/utils.py b/venv/lib/python3.12/site-packages/click/utils.py new file mode 100644 index 0000000..beae26f --- /dev/null +++ b/venv/lib/python3.12/site-packages/click/utils.py @@ -0,0 +1,627 @@ +from __future__ import annotations + +import collections.abc as cabc +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType +from types import TracebackType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]: + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None: + try: + return func(*args, **kwargs) + except Exception: + pass + return None + + return update_wrapper(wrapper, func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(sys.getfilesystemencoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: str | os.PathLike[str], + mode: str = "r", + encoding: str | None = None, + errors: str | None = "strict", + atomic: bool = False, + ): + self.name: str = os.fspath(filename) + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.IO[t.Any] | None + self.should_close: bool + + if self.name == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO[t.Any]: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> LazyFile: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.close_intelligently() + + def __iter__(self) -> cabc.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO[t.Any]) -> None: + self._file: t.IO[t.Any] = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> KeepOpenFile: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> cabc.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Any | None = None, + file: t.IO[t.Any] | None = None, + nl: bool = True, + err: bool = False, + color: bool | None = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + return + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: str | bytes | bytearray | None = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file, color) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: t.Literal["stdin", "stdout", "stderr"], + encoding: str | None = None, + errors: str | None = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str | os.PathLike[str], + mode: str = "r", + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO[t.Any]: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name or Path of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast( + "t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic) + ) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast("t.IO[t.Any]", KeepOpenFile(f)) + + return f + + +def format_filename( + filename: str | bytes | os.PathLike[str] | os.PathLike[bytes], + shorten: bool = False, +) -> str: + """Format a filename as a string for display. Ensures the filename can be + displayed by replacing any invalid bytes or surrogate escapes in the name + with the replacement character ``�``. + + Invalid bytes or surrogate escapes will raise an error when written to a + stream with ``errors="strict"``. This will typically happen with ``stdout`` + when the locale is something like ``en_GB.UTF-8``. + + Many scenarios *are* safe to write surrogates though, due to PEP 538 and + PEP 540, including: + + - Writing to ``stderr``, which uses ``errors="backslashreplace"``. + - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens + stdout and stderr with ``errors="surrogateescape"``. + - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. + - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. + Python opens stdout and stderr with ``errors="surrogateescape"``. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + else: + filename = os.fspath(filename) + + if isinstance(filename, bytes): + filename = filename.decode(sys.getfilesystemencoding(), "replace") + else: + filename = filename.encode("utf-8", "surrogateescape").decode( + "utf-8", "replace" + ) + + return filename + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no effect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO[t.Any]) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: str | None = None, _main: ModuleType | None = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + # It is set to "" inside a Shiv or PEX zipapp. + if getattr(_main, "__package__", None) in {None, ""} or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: cabc.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> list[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/METADATA b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/METADATA new file mode 100644 index 0000000..7b07ee7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/METADATA @@ -0,0 +1,139 @@ +Metadata-Version: 2.4 +Name: cryptography +Version: 46.0.3 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Free Threading :: 3 - Stable +Classifier: Topic :: Security :: Cryptography +Requires-Dist: cffi>=1.14 ; python_full_version == '3.8.*' and platform_python_implementation != 'PyPy' +Requires-Dist: cffi>=2.0.0 ; python_full_version >= '3.9' and platform_python_implementation != 'PyPy' +Requires-Dist: typing-extensions>=4.13.2 ; python_full_version < '3.11' +Requires-Dist: bcrypt>=3.1.5 ; extra == 'ssh' +Requires-Dist: nox[uv]>=2024.4.15 ; extra == 'nox' +Requires-Dist: cryptography-vectors==46.0.3 ; extra == 'test' +Requires-Dist: pytest>=7.4.0 ; extra == 'test' +Requires-Dist: pytest-benchmark>=4.0 ; extra == 'test' +Requires-Dist: pytest-cov>=2.10.1 ; extra == 'test' +Requires-Dist: pytest-xdist>=3.5.0 ; extra == 'test' +Requires-Dist: pretend>=0.7 ; extra == 'test' +Requires-Dist: certifi>=2024 ; extra == 'test' +Requires-Dist: pytest-randomly ; extra == 'test-randomorder' +Requires-Dist: sphinx>=5.3.0 ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme>=3.0.0 ; extra == 'docs' +Requires-Dist: sphinx-inline-tabs ; extra == 'docs' +Requires-Dist: pyenchant>=3 ; extra == 'docstest' +Requires-Dist: readme-renderer>=30.0 ; extra == 'docstest' +Requires-Dist: sphinxcontrib-spelling>=7.3.1 ; extra == 'docstest' +Requires-Dist: build>=1.0.0 ; extra == 'sdist' +Requires-Dist: ruff>=0.11.11 ; extra == 'pep8test' +Requires-Dist: mypy>=1.14 ; extra == 'pep8test' +Requires-Dist: check-sdist ; extra == 'pep8test' +Requires-Dist: click>=8.0.1 ; extra == 'pep8test' +Provides-Extra: ssh +Provides-Extra: nox +Provides-Extra: test +Provides-Extra: test-randomorder +Provides-Extra: docs +Provides-Extra: docstest +Provides-Extra: sdist +Provides-Extra: pep8test +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Author-email: The Python Cryptographic Authority and individual contributors +License-Expression: Apache-2.0 OR BSD-3-Clause +Requires-Python: >=3.8, !=3.9.0, !=3.9.1 +Description-Content-Type: text/x-rst; charset=UTF-8 +Project-URL: homepage, https://github.com/pyca/cryptography +Project-URL: documentation, https://cryptography.io/ +Project-URL: source, https://github.com/pyca/cryptography/ +Project-URL: issues, https://github.com/pyca/cryptography/issues +Project-URL: changelog, https://cryptography.io/en/latest/changelog/ + +pyca/cryptography +================= + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.org/project/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg + :target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 3.8+ and PyPy3 7.3.11+. + +``cryptography`` includes both high level recipes and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests, and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + b'...' + >>> f.decrypt(token) + b'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +You can install ``cryptography`` with: + +.. code-block:: console + + $ pip install cryptography + +For full details see `the installation documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get +involved. + +Security +~~~~~~~~ + +Need to report a security issue? Please consult our `security reporting`_ +documentation. + + +.. _`documentation`: https://cryptography.io/ +.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _`security reporting`: https://cryptography.io/en/latest/security/ + diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/RECORD b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/RECORD new file mode 100644 index 0000000..e7774bd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/RECORD @@ -0,0 +1,180 @@ +cryptography-46.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cryptography-46.0.3.dist-info/METADATA,sha256=bx2LyCEmOVUC8FH5hsGEZewWPiZoIIYTq0hM9mu9r4s,5748 +cryptography-46.0.3.dist-info/RECORD,, +cryptography-46.0.3.dist-info/WHEEL,sha256=jkxrJemT4jZpYSr-u9xPalWqoow8benNmiXfjKXLlJw,108 +cryptography-46.0.3.dist-info/licenses/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197 +cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 +cryptography-46.0.3.dist-info/licenses/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 +cryptography/__about__.py,sha256=QCLxNH_Abbygdc9RQGpUmrK14Wp3Cl_SEiB2byLwyxo,445 +cryptography/__init__.py,sha256=mthuUrTd4FROCpUYrTIqhjz6s6T9djAZrV7nZ1oMm2o,364 +cryptography/__pycache__/__about__.cpython-312.pyc,, +cryptography/__pycache__/__init__.cpython-312.pyc,, +cryptography/__pycache__/exceptions.cpython-312.pyc,, +cryptography/__pycache__/fernet.cpython-312.pyc,, +cryptography/__pycache__/utils.cpython-312.pyc,, +cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087 +cryptography/fernet.py,sha256=3Cvxkh0KJSbX8HbnCHu4wfCW7U0GgfUA3v_qQ8a8iWc,6963 +cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455 +cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,, +cryptography/hazmat/_oid.py,sha256=p8ThjwJB56Ci_rAIrjyJ1f8VjgD6e39es2dh8JIUBOw,17240 +cryptography/hazmat/asn1/__init__.py,sha256=hS_EWx3wVvZzfbCcNV8hzcDnyMM8H-BhIoS1TipUosk,293 +cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc,, +cryptography/hazmat/asn1/asn1.py,sha256=eMEThEXa19LQjcyVofgHsW6tsZnjp3ddH7bWkkcxfLM,3860 +cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361 +cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305 +cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,, +cryptography/hazmat/backends/openssl/backend.py,sha256=tV5AxBoFJ2GfA0DMWSY-0TxQJrpQoexzI9R4Kybb--4,10215 +cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/bindings/_rust.abi3.so,sha256=4bUN0J2p_ZQMdgmAc9eL0VMj_lgbTsHUmX4doekVIJ4,12955672 +cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=KhqLhXFPArPzzJ7DYO9Fl8FoXB_BagAd_r4Dm_Ze9Xo,1257 +cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230 +cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354 +cryptography/hazmat/bindings/_rust/declarative_asn1.pyi,sha256=2ECFmYue1EPkHEE2Bm7aLwkjB0mSUTpr23v9MN4pri4,892 +cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640 +cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=VPVWuKHI9EMs09ZLRYAGvR0Iz0mCMmEzXAkgJHovpoM,4020 +cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=iOAMDyHoNwwCSZfZzuXDr64g4GpGUeDgEN-LjXqdrBM,1522 +cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=4Nddw6-ynzIB3w2W86WvkGKTLlTDk_6F5l54RHCuy3E,2688 +cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=LhPzHWSXJq4grAJXn6zSvSSdV-aYIIscHDwIPlJGGPs,1315 +cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564 +cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564 +cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299 +cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691 +cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=VXfXd5G6hUivg399R1DYdmW3eTb0EebzDTqjRC2gaRw,532 +cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=Yx49lqdnjsD7bxiDV1kcaMrDktug5evi5a6zerMiy2s,514 +cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=OWZvBx7xfo_HJl41Nc--DugVyCVPIprZ3HlOPTSWH9g,984 +cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=BXZn7NDjL3JAbYW0SQ8pg1iyC5DbQXVhUAiwsi8DFR8,702 +cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=xXfFBb9QehHfDtEaxV_65Z0YK7NquOVIChpTLkgAs_k,2029 +cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=teIt8M6ZEMJrn4s3W0UnW0DZ-30Jd68WnSsKKG124l0,912 +cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=_SW9NtQ5FDlAbdclFtWpT4lGmxKIKHpN-4j8J2BzYfQ,585 +cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364 +cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=ewn4GpQyb7zPwE-ni7GtyQgMC0A1mLuqYsSyqv6nI_s,523 +cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=juTZTmli8jO_5Vcufg-vHvx_tCyezmSLIh_9PU3TczI,505 +cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=vEEd5wDiZvb8ZGFaziLCaWLzAwoG_tvPUxLQw5_uOl8,1605 +cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=txGBJijqZshEcqra6byPNbnisIdlxzOSIHP2hl9arPs,1601 +cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=PPhld-WkO743iXFPebeG0LtgK0aTzGdjcIsay1Gm5GE,757 +cryptography/hazmat/bindings/_rust/x509.pyi,sha256=n9X0IQ6ICbdIi-ExdCFZoBgeY6njm3QOVAVZwDQdnbk,9784 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,, +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DMOpA_XN4l70zTc5_J9DpwlbQeUBRTWpfIJ4yRIn1-U,5791 +cryptography/hazmat/bindings/openssl/binding.py,sha256=x8eocEmukO4cm7cHqfVmOoYY7CCXdoF1v1WhZQt9neo,4610 +cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc,, +cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=YrKgHS4MfwWaMmPBYRymRRlC0phwWp9ycICFezeJPGk,2595 +cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,, +cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,, +cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532 +cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=Eh3i7lwedHfi0eLSsH93PZxQKzY9I6lkK67vL4V5tOc,1522 +cryptography/hazmat/primitives/_serialization.py,sha256=chgPCSF2jxI2Cr5gB-qbWXOvOfupBh4CARS0KAhv9AM,5123 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,, +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=0v_vEFFz5pQ1QG-FkWDyvgv7IfuVZSH5Q6LyFI5A8rg,3645 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=Ld_bbbqQFz12dObHxIkzEQzX0SWWP41RLSWkYSaKhqE,4213 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=Vf5ig2PcS3PVnsb5N49Kx1uIkFBJyhg4BWXThDz5cug,12999 +cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=jZW5cs472wXXV3eB0sE1b8w64gdazwwU0_MT5UOTiXs,3700 +cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=yAetgn2f2JYf0BO8MapGzXeThsvSMG5LmUCrxVOidAA,3729 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=vQ6l6gOg9HqcbOsvHrSiJRVLdEj9L4m4HkRGYziTyFA,2854 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=ZnKOo2f34MCCOupC03Y1uR-_jiSG5IrelHEmxaME3D4,8303 +cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790 +cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=_4nQeZ3yJ3Lg0RpXnaqA-1yt6vbx1F-wzLcaZHwSpeE,3613 +cryptography/hazmat/primitives/asymmetric/x448.py,sha256=WKBLtuVfJqiBRro654fGaQAlvsKbqbNkK7c4A_ZCdV0,3642 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680 +cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,, +cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=Q7ZJwcsx83Mgxv5y7r6CyJKSdsOwC-my-5A67-ma2vw,3407 +cryptography/hazmat/primitives/ciphers/base.py,sha256=aBC7HHBBoixebmparVr0UlODs3VD0A7B6oz_AaRjDv8,4253 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=20stpwhDtbAvpH0SMf9EDHIciwmTF-JMBUOZ9bU8WiQ,8318 +cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338 +cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422 +cryptography/hazmat/primitives/hashes.py,sha256=M8BrlKB3U6DEtHvWTV5VRjpteHv1kS3Zxm_Bsk04cr8,5184 +cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750 +cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,, +cryptography/hazmat/primitives/kdf/argon2.py,sha256=UFDNXG0v-rw3DqAQTB1UQAsQC2M5Ejg0k_6OCyhLKus,460 +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=Ua8KoLXXnzgsrAUmHpyKymaPt8aPRP0EHEaBz7QCQ9I,3737 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=M0lAEfRoc4kpp4-nwDj9yB-vNZukIOYEQrUlWsBNn9o,543 +cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=oZepvo4evhKkkJQWRDwaPoIbyTaFmDc5NPimxg6lfKg,9165 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1WIwhELR0w8ztTpTu8BrFiYWmK3hUfJq08I79TxwieE,1957 +cryptography/hazmat/primitives/kdf/scrypt.py,sha256=XyWUdUUmhuI9V6TqAPOvujCSMGv1XQdg0a21IWCmO-U,590 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=zLTcF665QFvXX2f8TS7fmBZTteXpFjKahzfjjQcCJyw,1999 +cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650 +cryptography/hazmat/primitives/padding.py,sha256=QT-U-NvV2eQGO1wVPbDiNGNSc9keRDS-ig5cQOrLz0E,1865 +cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355 +cryptography/hazmat/primitives/serialization/__init__.py,sha256=Q7uTgDlt7n3WfsMT6jYwutC6DIg_7SEeoAm1GHZ5B5E,1705 +cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,, +cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615 +cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=mS9cFNG4afzvseoc5e1MWoY2VskfL8N8Y_OFjl67luY,5104 +cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=5OR_Tkysxaprn4FegvJIfbep9rJ9wok6FLWvWwQ5-Mg,13943 +cryptography/hazmat/primitives/serialization/ssh.py,sha256=hPV5obFznz0QhFfXFPOeQ8y6MsurA0xVMQiLnLESEs8,53700 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258 +cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,, +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=ivZo5BrcCGWLsqql4nZV0XXCjyGPi_iHfDFltGlOJwk,3256 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=m5LPpRL00kp4zY8gTjr55Hfz9aMlPS53kHmVkSQCmdY,1652 +cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cryptography/utils.py,sha256=bZAjFC5KVpfmF29qS_18vvpW3mKxmdiRALcusHhTTkg,4301 +cryptography/x509/__init__.py,sha256=xloN0swseNx-m2WFZmCA17gOoxQWqeU82UVjEdJBePQ,8257 +cryptography/x509/__pycache__/__init__.cpython-312.pyc,, +cryptography/x509/__pycache__/base.cpython-312.pyc,, +cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,, +cryptography/x509/__pycache__/extensions.cpython-312.pyc,, +cryptography/x509/__pycache__/general_name.cpython-312.pyc,, +cryptography/x509/__pycache__/name.cpython-312.pyc,, +cryptography/x509/__pycache__/ocsp.cpython-312.pyc,, +cryptography/x509/__pycache__/oid.cpython-312.pyc,, +cryptography/x509/__pycache__/verification.cpython-312.pyc,, +cryptography/x509/base.py,sha256=OrmTw3y8B6AE_nGXQPN8x9kq-d7rDWeH13gCq6T6D6U,27997 +cryptography/x509/certificate_transparency.py,sha256=JqoOIDhlwInrYMFW6IFn77WJ0viF-PB_rlZV3vs9MYc,797 +cryptography/x509/extensions.py,sha256=QxYrqR6SF1qzR9ZraP8wDiIczlEVlAFuwDRVcltB6Tk,77724 +cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836 +cryptography/x509/name.py,sha256=ty0_xf0LnHwZAdEf-d8FLO1K4hGqx_7DsD3CHwoLJiY,15101 +cryptography/x509/ocsp.py,sha256=Yey6NdFV1MPjop24Mj_VenjEpg3kUaMopSWOK0AbeBs,12699 +cryptography/x509/oid.py,sha256=BUzgXXGVWilkBkdKPTm9R4qElE9gAGHgdYPMZAp7PJo,931 +cryptography/x509/verification.py,sha256=gR2C2c-XZQtblZhT5T5vjSKOtCb74ef2alPVmEcwFlM,958 diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/WHEEL b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/WHEEL new file mode 100644 index 0000000..8e48aa1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: maturin (1.9.4) +Root-Is-Purelib: false +Tag: cp311-abi3-manylinux_2_34_x86_64 + diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE new file mode 100644 index 0000000..b11f379 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE new file mode 100644 index 0000000..62589ed --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD new file mode 100644 index 0000000..ec1a29d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography-46.0.3.dist-info/licenses/LICENSE.BSD @@ -0,0 +1,27 @@ +Copyright (c) Individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of PyCA Cryptography nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/cryptography/__about__.py b/venv/lib/python3.12/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..a811628 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/__about__.py @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +__all__ = [ + "__author__", + "__copyright__", + "__version__", +] + +__version__ = "46.0.3" + + +__author__ = "The Python Cryptographic Authority and individual contributors" +__copyright__ = f"Copyright 2013-2025 {__author__}" diff --git a/venv/lib/python3.12/site-packages/cryptography/__init__.py b/venv/lib/python3.12/site-packages/cryptography/__init__.py new file mode 100644 index 0000000..d374f75 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.__about__ import __author__, __copyright__, __version__ + +__all__ = [ + "__author__", + "__copyright__", + "__version__", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc new file mode 100644 index 0000000..b5b1fae Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c742b33 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..b8a3bae Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc new file mode 100644 index 0000000..ed3ab67 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..6598244 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/exceptions.py b/venv/lib/python3.12/site-packages/cryptography/exceptions.py new file mode 100644 index 0000000..fe125ea --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/exceptions.py @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions + +if typing.TYPE_CHECKING: + from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +_Reasons = rust_exceptions._Reasons + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message: str, reason: _Reasons | None = None) -> None: + super().__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__( + self, msg: str, err_code: list[rust_openssl.OpenSSLError] + ) -> None: + super().__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass diff --git a/venv/lib/python3.12/site-packages/cryptography/fernet.py b/venv/lib/python3.12/site-packages/cryptography/fernet.py new file mode 100644 index 0000000..c6744ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/fernet.py @@ -0,0 +1,224 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import base64 +import binascii +import os +import time +import typing +from collections.abc import Iterable + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet: + def __init__( + self, + key: bytes | str, + backend: typing.Any = None, + ) -> None: + try: + key = base64.urlsafe_b64decode(key) + except binascii.Error as exc: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) from exc + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + + @classmethod + def generate_key(cls) -> bytes: + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data: bytes) -> bytes: + return self.encrypt_at_time(data, int(time.time())) + + def encrypt_at_time(self, data: bytes, current_time: int) -> bytes: + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts( + self, data: bytes, current_time: int, iv: bytes + ) -> bytes: + utils._check_bytes("data", data) + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), + modes.CBC(iv), + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + + current_time.to_bytes(length=8, byteorder="big") + + iv + + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(token) + if ttl is None: + time_info = None + else: + time_info = (ttl, int(time.time())) + return self._decrypt_data(data, timestamp, time_info) + + def decrypt_at_time( + self, token: bytes | str, ttl: int, current_time: int + ) -> bytes: + if ttl is None: + raise ValueError( + "decrypt_at_time() can only be used with a non-None ttl" + ) + timestamp, data = Fernet._get_unverified_token_data(token) + return self._decrypt_data(data, timestamp, (ttl, current_time)) + + def extract_timestamp(self, token: bytes | str) -> int: + timestamp, data = Fernet._get_unverified_token_data(token) + # Verify the token was not tampered with. + self._verify_signature(data) + return timestamp + + @staticmethod + def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]: + if not isinstance(token, (str, bytes)): + raise TypeError("token must be bytes or str") + + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if not data or data[0] != 0x80: + raise InvalidToken + + if len(data) < 9: + raise InvalidToken + + timestamp = int.from_bytes(data[1:9], byteorder="big") + return timestamp, data + + def _verify_signature(self, data: bytes) -> None: + h = HMAC(self._signing_key, hashes.SHA256()) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + def _decrypt_data( + self, + data: bytes, + timestamp: int, + time_info: tuple[int, int] | None, + ) -> bytes: + if time_info is not None: + ttl, current_time = time_info + if timestamp + ttl < current_time: + raise InvalidToken + + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + + self._verify_signature(data) + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv) + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded + + +class MultiFernet: + def __init__(self, fernets: Iterable[Fernet]): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg: bytes) -> bytes: + return self.encrypt_at_time(msg, int(time.time())) + + def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes: + return self._fernets[0].encrypt_at_time(msg, current_time) + + def rotate(self, msg: bytes | str) -> bytes: + timestamp, data = Fernet._get_unverified_token_data(msg) + for f in self._fernets: + try: + p = f._decrypt_data(data, timestamp, None) + break + except InvalidToken: + pass + else: + raise InvalidToken + + iv = os.urandom(16) + return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) + + def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes: + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken + + def decrypt_at_time( + self, msg: bytes | str, ttl: int, current_time: int + ) -> bytes: + for f in self._fernets: + try: + return f.decrypt_at_time(msg, ttl, current_time) + except InvalidToken: + pass + raise InvalidToken + + def extract_timestamp(self, msg: bytes | str) -> int: + for f in self._fernets: + try: + return f.extract_timestamp(msg) + except InvalidToken: + pass + raise InvalidToken diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..b9f1187 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +""" +Hazardous Materials + +This is a "Hazardous Materials" module. You should ONLY use it if you're +100% absolutely sure that you know what you're doing because this module +is full of land mines, dragons, and dinosaurs with laser guns. +""" diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..7f2bf46 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc new file mode 100644 index 0000000..fb1120b Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py new file mode 100644 index 0000000..4bf138d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py @@ -0,0 +1,356 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import ( + ObjectIdentifier as ObjectIdentifier, +) +from cryptography.hazmat.primitives import hashes + + +class ExtensionOID: + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + PRIVATE_KEY_USAGE_PERIOD = ObjectIdentifier("2.5.29.16") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") + CRL_NUMBER = ObjectIdentifier("2.5.29.20") + DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") + PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier( + "1.3.6.1.4.1.11129.2.4.2" + ) + PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") + SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5") + MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7") + ADMISSIONS = ObjectIdentifier("1.3.36.8.3.3") + + +class OCSPExtensionOID: + NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") + ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4") + + +class CRLEntryExtensionOID: + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +class NameOID: + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + STREET_ADDRESS = ObjectIdentifier("2.5.4.9") + ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + INITIALS = ObjectIdentifier("2.5.4.43") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") + JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") + JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( + "1.3.6.1.4.1.311.60.2.1.2" + ) + BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") + POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") + POSTAL_CODE = ObjectIdentifier("2.5.4.17") + INN = ObjectIdentifier("1.2.643.3.131.1.1") + OGRN = ObjectIdentifier("1.2.643.100.1") + SNILS = ObjectIdentifier("1.2.643.100.3") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +class SignatureAlgorithmOID: + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + # This is an alternate OID for RSA with SHA1 that is occasionally seen + _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13") + RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14") + RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15") + RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9") + ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10") + ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11") + ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3") + DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3") + GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2") + GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3") + + +_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = { + SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(), + SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(), + SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(), + SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(), + SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), + SignatureAlgorithmOID.ED25519: None, + SignatureAlgorithmOID.ED448: None, + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None, + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None, +} + + +class HashAlgorithmOID: + SHA1 = ObjectIdentifier("1.3.14.3.2.26") + SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.2.4") + SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.2.1") + SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.2.2") + SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.2.3") + SHA3_224 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.224") + SHA3_256 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.256") + SHA3_384 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.384") + SHA3_512 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.512") + SHA3_224_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.7") + SHA3_256_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.8") + SHA3_384_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.9") + SHA3_512_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.10") + + +class PublicKeyAlgorithmOID: + DSA = ObjectIdentifier("1.2.840.10040.4.1") + EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1") + RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1") + RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") + X25519 = ObjectIdentifier("1.3.101.110") + X448 = ObjectIdentifier("1.3.101.111") + ED25519 = ObjectIdentifier("1.3.101.112") + ED448 = ObjectIdentifier("1.3.101.113") + + +class ExtendedKeyUsageOID: + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") + SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2") + KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5") + IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17") + BUNDLE_SECURITY = ObjectIdentifier("1.3.6.1.5.5.7.3.35") + CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4") + + +class OtherNameFormOID: + PERMANENT_IDENTIFIER = ObjectIdentifier("1.3.6.1.5.5.7.8.3") + HW_MODULE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.4") + DNS_SRV = ObjectIdentifier("1.3.6.1.5.5.7.8.7") + NAI_REALM = ObjectIdentifier("1.3.6.1.5.5.7.8.8") + SMTP_UTF8_MAILBOX = ObjectIdentifier("1.3.6.1.5.5.7.8.9") + ACP_NODE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.10") + BUNDLE_EID = ObjectIdentifier("1.3.6.1.5.5.7.8.11") + + +class AuthorityInformationAccessOID: + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class SubjectInformationAccessOID: + CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5") + + +class CertificatePoliciesOID: + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + + +class AttributeOID: + CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7") + UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") + + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.STREET_ADDRESS: "streetAddress", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.USER_ID: "userID", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", + NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", + NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( + "jurisdictionStateOrProvinceName" + ), + NameOID.BUSINESS_CATEGORY: "businessCategory", + NameOID.POSTAL_ADDRESS: "postalAddress", + NameOID.POSTAL_CODE: "postalCode", + NameOID.INN: "INN", + NameOID.OGRN: "OGRN", + NameOID.SNILS: "SNILS", + NameOID.UNSTRUCTURED_NAME: "unstructuredName", + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.RSASSA_PSS: "rsassaPss", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + SignatureAlgorithmOID.ED25519: "ed25519", + SignatureAlgorithmOID.ED448: "ed448", + SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: ( + "GOST R 34.11-94 with GOST R 34.10-2001" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)" + ), + SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: ( + "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)" + ), + HashAlgorithmOID.SHA1: "sha1", + HashAlgorithmOID.SHA224: "sha224", + HashAlgorithmOID.SHA256: "sha256", + HashAlgorithmOID.SHA384: "sha384", + HashAlgorithmOID.SHA512: "sha512", + HashAlgorithmOID.SHA3_224: "sha3_224", + HashAlgorithmOID.SHA3_256: "sha3_256", + HashAlgorithmOID.SHA3_384: "sha3_384", + HashAlgorithmOID.SHA3_512: "sha3_512", + HashAlgorithmOID.SHA3_224_NIST: "sha3_224", + HashAlgorithmOID.SHA3_256_NIST: "sha3_256", + HashAlgorithmOID.SHA3_384_NIST: "sha3_384", + HashAlgorithmOID.SHA3_512_NIST: "sha3_512", + PublicKeyAlgorithmOID.DSA: "dsaEncryption", + PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey", + PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption", + PublicKeyAlgorithmOID.X25519: "X25519", + PublicKeyAlgorithmOID.X448: "X448", + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin", + ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.PRIVATE_KEY_USAGE_PERIOD: "privateKeyUsagePeriod", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: ( + "signedCertificateTimestampList" + ), + ExtensionOID.PRECERT_POISON: "ctPoison", + ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate", + ExtensionOID.ADMISSIONS: "Admissions", + CRLEntryExtensionOID.CRL_REASON: "cRLReason", + CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint", + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + ExtensionOID.CRL_NUMBER: "cRLNumber", + ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", + ExtensionOID.TLS_FEATURE: "TLSFeature", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + SubjectInformationAccessOID.CA_REPOSITORY: "caRepository", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", + OCSPExtensionOID.NONCE: "OCSPNonce", + AttributeOID.CHALLENGE_PASSWORD: "challengePassword", +} diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__init__.py new file mode 100644 index 0000000..be68373 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.asn1.asn1 import encode_der, sequence + +__all__ = [ + "encode_der", + "sequence", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3778c45 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc new file mode 100644 index 0000000..badcd10 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/asn1.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/asn1.py new file mode 100644 index 0000000..dedad6f --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/asn1/asn1.py @@ -0,0 +1,116 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import dataclasses +import sys +import typing + +if sys.version_info < (3, 11): + import typing_extensions + + # We use the `include_extras` parameter of `get_type_hints`, which was + # added in Python 3.9. This can be replaced by the `typing` version + # once the min version is >= 3.9 + if sys.version_info < (3, 9): + get_type_hints = typing_extensions.get_type_hints + else: + get_type_hints = typing.get_type_hints +else: + get_type_hints = typing.get_type_hints + +from cryptography.hazmat.bindings._rust import declarative_asn1 + +T = typing.TypeVar("T", covariant=True) +U = typing.TypeVar("U") + + +encode_der = declarative_asn1.encode_der + + +def _normalize_field_type( + field_type: typing.Any, field_name: str +) -> declarative_asn1.AnnotatedType: + annotation = declarative_asn1.Annotation() + + if hasattr(field_type, "__asn1_root__"): + annotated_root = field_type.__asn1_root__ + if not isinstance(annotated_root, declarative_asn1.AnnotatedType): + raise TypeError(f"unsupported root type: {annotated_root}") + return annotated_root + else: + rust_field_type = declarative_asn1.non_root_python_to_rust(field_type) + + return declarative_asn1.AnnotatedType(rust_field_type, annotation) + + +def _annotate_fields( + raw_fields: dict[str, type], +) -> dict[str, declarative_asn1.AnnotatedType]: + fields = {} + for field_name, field_type in raw_fields.items(): + # Recursively normalize the field type into something that the + # Rust code can understand. + annotated_field_type = _normalize_field_type(field_type, field_name) + fields[field_name] = annotated_field_type + + return fields + + +def _register_asn1_sequence(cls: type[U]) -> None: + raw_fields = get_type_hints(cls, include_extras=True) + root = declarative_asn1.AnnotatedType( + declarative_asn1.Type.Sequence(cls, _annotate_fields(raw_fields)), + declarative_asn1.Annotation(), + ) + + setattr(cls, "__asn1_root__", root) + + +# Due to https://github.com/python/mypy/issues/19731, we can't define an alias +# for `dataclass_transform` that conditionally points to `typing` or +# `typing_extensions` depending on the Python version (like we do for +# `get_type_hints`). +# We work around it by making the whole decorated class conditional on the +# Python version. +if sys.version_info < (3, 11): + + @typing_extensions.dataclass_transform(kw_only_default=True) + def sequence(cls: type[U]) -> type[U]: + # We use `dataclasses.dataclass` to add an __init__ method + # to the class with keyword-only parameters. + if sys.version_info >= (3, 10): + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + # `match_args` was added in Python 3.10 and defaults + # to True + match_args=False, + # `kw_only` was added in Python 3.10 and defaults to + # False + kw_only=True, + )(cls) + else: + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + )(cls) + _register_asn1_sequence(dataclass_cls) + return dataclass_cls + +else: + + @typing.dataclass_transform(kw_only_default=True) + def sequence(cls: type[U]) -> type[U]: + # Only add an __init__ method, with keyword-only + # parameters. + dataclass_cls = dataclasses.dataclass( + repr=False, + eq=False, + match_args=False, + kw_only=True, + )(cls) + _register_asn1_sequence(dataclass_cls) + return dataclass_cls diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..b4400aa --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from typing import Any + + +def default_backend() -> Any: + from cryptography.hazmat.backends.openssl.backend import backend + + return backend diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..da33775 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..51b0447 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.backends.openssl.backend import backend + +__all__ = ["backend"] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b6033f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc new file mode 100644 index 0000000..08c3105 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..248b8c5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,302 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, + OAEP, + PSS, + PKCS1v15, +) +from cryptography.hazmat.primitives.ciphers import ( + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, + Mode, +) + + +class Backend: + """ + OpenSSL API binding interfaces. + """ + + name = "openssl" + + # TripleDES encryption is disallowed/deprecated throughout 2023 in + # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA). + _fips_ciphers = (AES,) + # Sometimes SHA1 is still permissible. That logic is contained + # within the various *_supported methods. + _fips_hashes = ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA512_224, + hashes.SHA512_256, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, + hashes.SHAKE128, + hashes.SHAKE256, + ) + _fips_ecdh_curves = ( + ec.SECP224R1, + ec.SECP256R1, + ec.SECP384R1, + ec.SECP521R1, + ) + _fips_rsa_min_key_size = 2048 + _fips_rsa_min_public_exponent = 65537 + _fips_dsa_min_modulus = 1 << 2048 + _fips_dh_min_key_size = 2048 + _fips_dh_min_modulus = 1 << _fips_dh_min_key_size + + def __init__(self) -> None: + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + self._fips_enabled = rust_openssl.is_fips_enabled() + + def __repr__(self) -> str: + return ( + f"" + ) + + def openssl_assert(self, ok: bool) -> None: + return binding._openssl_assert(ok) + + def _enable_fips(self) -> None: + # This function enables FIPS mode for OpenSSL 3.0.0 on installs that + # have the FIPS provider installed properly. + rust_openssl.enable_fips(rust_openssl._providers) + assert rust_openssl.is_fips_enabled() + self._fips_enabled = rust_openssl.is_fips_enabled() + + def openssl_version_text(self) -> str: + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 3.2.1 30 Jan 2024 + """ + return rust_openssl.openssl_version_text() + + def openssl_version_number(self) -> int: + return rust_openssl.openssl_version() + + def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and not isinstance(algorithm, self._fips_hashes): + return False + + return rust_openssl.hashes.hash_supported(algorithm) + + def signature_hash_supported( + self, algorithm: hashes.HashAlgorithm + ) -> bool: + # Dedicated check for hashing algorithm use in message digest for + # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption). + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + return self.hash_supported(algorithm) + + def scrypt_supported(self) -> bool: + if self._fips_enabled: + return False + else: + return hasattr(rust_openssl.kdf.Scrypt, "derive") + + def argon2_supported(self) -> bool: + if self._fips_enabled: + return False + else: + return hasattr(rust_openssl.kdf.Argon2id, "derive") + + def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + # FIPS mode still allows SHA1 for HMAC + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return True + if rust_openssl.CRYPTOGRAPHY_IS_AWSLC: + return isinstance( + algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA512_224, + hashes.SHA512_256, + ), + ) + return self.hash_supported(algorithm) + + def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: + if self._fips_enabled: + # FIPS mode requires AES. TripleDES is disallowed/deprecated in + # FIPS 140-3. + if not isinstance(cipher, self._fips_ciphers): + return False + + return rust_openssl.ciphers.cipher_supported(cipher, mode) + + def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + return self.hmac_supported(algorithm) + + def _consume_errors(self) -> list[rust_openssl.OpenSSLError]: + return rust_openssl.capture_error_stack() + + def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if self._fips_enabled and isinstance(algorithm, hashes.SHA1): + return False + + return isinstance( + algorithm, + ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ) + + def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + # FIPS 186-4 only allows salt length == digest length for PSS + # It is technically acceptable to set an explicit salt length + # equal to the digest length and this will incorrectly fail, but + # since we don't do that in the tests and this method is + # private, we'll ignore that until we need to do otherwise. + if ( + self._fips_enabled + and padding._salt_length != PSS.DIGEST_LENGTH + ): + return False + return self.hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return self._oaep_hash_supported( + padding._mgf._algorithm + ) and self._oaep_hash_supported(padding._algorithm) + else: + return False + + def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool: + if self._fips_enabled and isinstance(padding, PKCS1v15): + return False + else: + return self.rsa_padding_supported(padding) + + def dsa_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not self._fips_enabled + ) + + def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: + if not self.dsa_supported(): + return False + return self.signature_hash_supported(algorithm) + + def cmac_algorithm_supported(self, algorithm) -> bool: + return self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + + def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool: + if self._fips_enabled and not isinstance( + curve, self._fips_ecdh_curves + ): + return False + + return rust_openssl.ec.curve_supported(curve) + + def elliptic_curve_signature_algorithm_supported( + self, + signature_algorithm: ec.EllipticCurveSignatureAlgorithm, + curve: ec.EllipticCurve, + ) -> bool: + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + return self.elliptic_curve_supported(curve) and ( + isinstance(signature_algorithm.algorithm, asym_utils.Prehashed) + or self.hash_supported(signature_algorithm.algorithm) + ) + + def elliptic_curve_exchange_algorithm_supported( + self, algorithm: ec.ECDH, curve: ec.EllipticCurve + ) -> bool: + return self.elliptic_curve_supported(curve) and isinstance( + algorithm, ec.ECDH + ) + + def dh_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def dh_x942_serialization_supported(self) -> bool: + return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 + + def x25519_supported(self) -> bool: + return not self._fips_enabled + + def x448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def ed25519_supported(self) -> bool: + return not self._fips_enabled + + def ed448_supported(self) -> bool: + if self._fips_enabled: + return False + return ( + not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + def ecdsa_deterministic_supported(self) -> bool: + return ( + rust_openssl.CRYPTOGRAPHY_OPENSSL_320_OR_GREATER + and not self._fips_enabled + ) + + def poly1305_supported(self) -> bool: + return not self._fips_enabled + + def pkcs7_supported(self) -> bool: + return ( + not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not rust_openssl.CRYPTOGRAPHY_IS_AWSLC + ) + + +backend = Backend() diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9f7a36e Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so new file mode 100644 index 0000000..f088397 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi new file mode 100644 index 0000000..2f4eef4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import padding +from cryptography.utils import Buffer + +class PKCS7PaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ANSIX923PaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class PKCS7UnpaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ANSIX923UnpaddingContext(padding.PaddingContext): + def __init__(self, block_size: int) -> None: ... + def update(self, data: Buffer) -> bytes: ... + def finalize(self) -> bytes: ... + +class ObjectIdentifier: + def __init__(self, value: str) -> None: ... + @property + def dotted_string(self) -> str: ... + @property + def _name(self) -> str: ... + +T = typing.TypeVar("T") diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi new file mode 100644 index 0000000..8010008 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi @@ -0,0 +1,8 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +lib = typing.Any +ffi = typing.Any diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi new file mode 100644 index 0000000..3b5f208 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi @@ -0,0 +1,7 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +def decode_dss_signature(signature: bytes) -> tuple[int, int]: ... +def encode_dss_signature(r: int, s: int) -> bytes: ... +def parse_spki_for_data(data: bytes) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi new file mode 100644 index 0000000..8563c11 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/declarative_asn1.pyi @@ -0,0 +1,32 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +import typing + +def encode_der(value: typing.Any) -> bytes: ... +def non_root_python_to_rust(cls: type) -> Type: ... + +# Type is a Rust enum with tuple variants. For now, we express the type +# annotations like this: +class Type: + Sequence: typing.ClassVar[type] + PyInt: typing.ClassVar[type] + +class Annotation: + def __new__( + cls, + ) -> Annotation: ... + +class AnnotatedType: + inner: Type + annotation: Annotation + + def __new__(cls, inner: Type, annotation: Annotation) -> AnnotatedType: ... + +class AnnotatedTypeObject: + annotated_type: AnnotatedType + value: typing.Any + + def __new__( + cls, annotated_type: AnnotatedType, value: typing.Any + ) -> AnnotatedTypeObject: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi new file mode 100644 index 0000000..09f46b1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi @@ -0,0 +1,17 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +class _Reasons: + BACKEND_MISSING_INTERFACE: _Reasons + UNSUPPORTED_HASH: _Reasons + UNSUPPORTED_CIPHER: _Reasons + UNSUPPORTED_PADDING: _Reasons + UNSUPPORTED_MGF: _Reasons + UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons + UNSUPPORTED_ELLIPTIC_CURVE: _Reasons + UNSUPPORTED_SERIALIZATION: _Reasons + UNSUPPORTED_X509: _Reasons + UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons + UNSUPPORTED_DIFFIE_HELLMAN: _Reasons + UNSUPPORTED_MAC: _Reasons diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi new file mode 100644 index 0000000..103e96c --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi @@ -0,0 +1,117 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import datetime +from collections.abc import Iterator + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes +from cryptography.x509 import ocsp + +class OCSPRequest: + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + @property + def extensions(self) -> x509.Extensions: ... + +class OCSPResponse: + @property + def responses(self) -> Iterator[OCSPSingleResponse]: ... + @property + def response_status(self) -> ocsp.OCSPResponseStatus: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_response_bytes(self) -> bytes: ... + @property + def certificates(self) -> list[x509.Certificate]: ... + @property + def responder_key_hash(self) -> bytes | None: ... + @property + def responder_name(self) -> x509.Name | None: ... + @property + def produced_at(self) -> datetime.datetime: ... + @property + def produced_at_utc(self) -> datetime.datetime: ... + @property + def certificate_status(self) -> ocsp.OCSPCertStatus: ... + @property + def revocation_time(self) -> datetime.datetime | None: ... + @property + def revocation_time_utc(self) -> datetime.datetime | None: ... + @property + def revocation_reason(self) -> x509.ReasonFlags | None: ... + @property + def this_update(self) -> datetime.datetime: ... + @property + def this_update_utc(self) -> datetime.datetime: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def single_extensions(self) -> x509.Extensions: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + +class OCSPSingleResponse: + @property + def certificate_status(self) -> ocsp.OCSPCertStatus: ... + @property + def revocation_time(self) -> datetime.datetime | None: ... + @property + def revocation_time_utc(self) -> datetime.datetime | None: ... + @property + def revocation_reason(self) -> x509.ReasonFlags | None: ... + @property + def this_update(self) -> datetime.datetime: ... + @property + def this_update_utc(self) -> datetime.datetime: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def issuer_key_hash(self) -> bytes: ... + @property + def issuer_name_hash(self) -> bytes: ... + @property + def hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def serial_number(self) -> int: ... + +def load_der_ocsp_request(data: bytes) -> ocsp.OCSPRequest: ... +def load_der_ocsp_response(data: bytes) -> ocsp.OCSPResponse: ... +def create_ocsp_request( + builder: ocsp.OCSPRequestBuilder, +) -> ocsp.OCSPRequest: ... +def create_ocsp_response( + status: ocsp.OCSPResponseStatus, + builder: ocsp.OCSPResponseBuilder | None, + private_key: PrivateKeyTypes | None, + hash_algorithm: hashes.HashAlgorithm | None, +) -> ocsp.OCSPResponse: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi new file mode 100644 index 0000000..5fb3cb2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi @@ -0,0 +1,75 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.bindings._rust.openssl import ( + aead, + ciphers, + cmac, + dh, + dsa, + ec, + ed448, + ed25519, + hashes, + hmac, + kdf, + keys, + poly1305, + rsa, + x448, + x25519, +) + +__all__ = [ + "aead", + "ciphers", + "cmac", + "dh", + "dsa", + "ec", + "ed448", + "ed25519", + "hashes", + "hmac", + "kdf", + "keys", + "openssl_version", + "openssl_version_text", + "poly1305", + "raise_openssl_error", + "rsa", + "x448", + "x25519", +] + +CRYPTOGRAPHY_IS_LIBRESSL: bool +CRYPTOGRAPHY_IS_BORINGSSL: bool +CRYPTOGRAPHY_IS_AWSLC: bool +CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_309_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_320_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_330_OR_GREATER: bool +CRYPTOGRAPHY_OPENSSL_350_OR_GREATER: bool + +class Providers: ... + +_legacy_provider_loaded: bool +_providers: Providers + +def openssl_version() -> int: ... +def openssl_version_text() -> str: ... +def raise_openssl_error() -> typing.NoReturn: ... +def capture_error_stack() -> list[OpenSSLError]: ... +def is_fips_enabled() -> bool: ... +def enable_fips(providers: Providers) -> None: ... + +class OpenSSLError: + @property + def lib(self) -> int: ... + @property + def reason(self) -> int: ... + @property + def reason_text(self) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi new file mode 100644 index 0000000..831fcd1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi @@ -0,0 +1,107 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from collections.abc import Sequence + +from cryptography.utils import Buffer + +class AESGCM: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class ChaCha20Poly1305: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key() -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESCCM: + def __init__(self, key: Buffer, tag_length: int = 16) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESSIV: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + data: Buffer, + associated_data: Sequence[Buffer] | None, + ) -> bytes: ... + def decrypt( + self, + data: Buffer, + associated_data: Sequence[Buffer] | None, + ) -> bytes: ... + +class AESOCB3: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + +class AESGCMSIV: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_key(bit_length: int) -> bytes: ... + def encrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... + def decrypt( + self, + nonce: Buffer, + data: Buffer, + associated_data: Buffer | None, + ) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi new file mode 100644 index 0000000..a48fb01 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi @@ -0,0 +1,38 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import modes + +@typing.overload +def create_encryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag +) -> ciphers.AEADEncryptionContext: ... +@typing.overload +def create_encryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None +) -> ciphers.CipherContext: ... +@typing.overload +def create_decryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag +) -> ciphers.AEADDecryptionContext: ... +@typing.overload +def create_decryption_ctx( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None +) -> ciphers.CipherContext: ... +def cipher_supported( + algorithm: ciphers.CipherAlgorithm, mode: modes.Mode +) -> bool: ... +def _advance( + ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int +) -> None: ... +def _advance_aad( + ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int +) -> None: ... + +class CipherContext: ... +class AEADEncryptionContext: ... +class AEADDecryptionContext: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi new file mode 100644 index 0000000..9c03508 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi @@ -0,0 +1,18 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import ciphers + +class CMAC: + def __init__( + self, + algorithm: ciphers.BlockCipherAlgorithm, + backend: typing.Any = None, + ) -> None: ... + def update(self, data: bytes) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, signature: bytes) -> None: ... + def copy(self) -> CMAC: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi new file mode 100644 index 0000000..08733d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi @@ -0,0 +1,51 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import dh + +MIN_MODULUS_SIZE: int + +class DHPrivateKey: ... +class DHPublicKey: ... +class DHParameters: ... + +class DHPrivateNumbers: + def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ... + def private_key(self, backend: typing.Any = None) -> dh.DHPrivateKey: ... + @property + def x(self) -> int: ... + @property + def public_numbers(self) -> DHPublicNumbers: ... + +class DHPublicNumbers: + def __init__( + self, y: int, parameter_numbers: DHParameterNumbers + ) -> None: ... + def public_key(self, backend: typing.Any = None) -> dh.DHPublicKey: ... + @property + def y(self) -> int: ... + @property + def parameter_numbers(self) -> DHParameterNumbers: ... + +class DHParameterNumbers: + def __init__(self, p: int, g: int, q: int | None = None) -> None: ... + def parameters(self, backend: typing.Any = None) -> dh.DHParameters: ... + @property + def p(self) -> int: ... + @property + def g(self) -> int: ... + @property + def q(self) -> int | None: ... + +def generate_parameters( + generator: int, key_size: int, backend: typing.Any = None +) -> dh.DHParameters: ... +def from_pem_parameters( + data: bytes, backend: typing.Any = None +) -> dh.DHParameters: ... +def from_der_parameters( + data: bytes, backend: typing.Any = None +) -> dh.DHParameters: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi new file mode 100644 index 0000000..0922a4c --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi @@ -0,0 +1,41 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import dsa + +class DSAPrivateKey: ... +class DSAPublicKey: ... +class DSAParameters: ... + +class DSAPrivateNumbers: + def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ... + @property + def x(self) -> int: ... + @property + def public_numbers(self) -> DSAPublicNumbers: ... + def private_key(self, backend: typing.Any = None) -> dsa.DSAPrivateKey: ... + +class DSAPublicNumbers: + def __init__( + self, y: int, parameter_numbers: DSAParameterNumbers + ) -> None: ... + @property + def y(self) -> int: ... + @property + def parameter_numbers(self) -> DSAParameterNumbers: ... + def public_key(self, backend: typing.Any = None) -> dsa.DSAPublicKey: ... + +class DSAParameterNumbers: + def __init__(self, p: int, q: int, g: int) -> None: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def g(self) -> int: ... + def parameters(self, backend: typing.Any = None) -> dsa.DSAParameters: ... + +def generate_parameters(key_size: int) -> dsa.DSAParameters: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi new file mode 100644 index 0000000..5c3b7bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import ec + +class ECPrivateKey: ... +class ECPublicKey: ... + +class EllipticCurvePrivateNumbers: + def __init__( + self, private_value: int, public_numbers: EllipticCurvePublicNumbers + ) -> None: ... + def private_key( + self, backend: typing.Any = None + ) -> ec.EllipticCurvePrivateKey: ... + @property + def private_value(self) -> int: ... + @property + def public_numbers(self) -> EllipticCurvePublicNumbers: ... + +class EllipticCurvePublicNumbers: + def __init__(self, x: int, y: int, curve: ec.EllipticCurve) -> None: ... + def public_key( + self, backend: typing.Any = None + ) -> ec.EllipticCurvePublicKey: ... + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + @property + def curve(self) -> ec.EllipticCurve: ... + def __eq__(self, other: object) -> bool: ... + +def curve_supported(curve: ec.EllipticCurve) -> bool: ... +def generate_private_key( + curve: ec.EllipticCurve, backend: typing.Any = None +) -> ec.EllipticCurvePrivateKey: ... +def from_private_numbers( + numbers: ec.EllipticCurvePrivateNumbers, +) -> ec.EllipticCurvePrivateKey: ... +def from_public_numbers( + numbers: ec.EllipticCurvePublicNumbers, +) -> ec.EllipticCurvePublicKey: ... +def from_public_bytes( + curve: ec.EllipticCurve, data: bytes +) -> ec.EllipticCurvePublicKey: ... +def derive_private_key( + private_value: int, curve: ec.EllipticCurve +) -> ec.EllipticCurvePrivateKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi new file mode 100644 index 0000000..f85b3d1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import ed25519 +from cryptography.utils import Buffer + +class Ed25519PrivateKey: ... +class Ed25519PublicKey: ... + +def generate_key() -> ed25519.Ed25519PrivateKey: ... +def from_private_bytes(data: Buffer) -> ed25519.Ed25519PrivateKey: ... +def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi new file mode 100644 index 0000000..c8ca0ec --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import ed448 +from cryptography.utils import Buffer + +class Ed448PrivateKey: ... +class Ed448PublicKey: ... + +def generate_key() -> ed448.Ed448PrivateKey: ... +def from_private_bytes(data: Buffer) -> ed448.Ed448PrivateKey: ... +def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi new file mode 100644 index 0000000..6bfd295 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi @@ -0,0 +1,28 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes +from cryptography.utils import Buffer + +class Hash(hashes.HashContext): + def __init__( + self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None + ) -> None: ... + @property + def algorithm(self) -> hashes.HashAlgorithm: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def copy(self) -> Hash: ... + +def hash_supported(algorithm: hashes.HashAlgorithm) -> bool: ... + +class XOFHash: + def __init__(self, algorithm: hashes.ExtendableOutputFunction) -> None: ... + @property + def algorithm(self) -> hashes.ExtendableOutputFunction: ... + def update(self, data: Buffer) -> None: ... + def squeeze(self, length: int) -> bytes: ... + def copy(self) -> XOFHash: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi new file mode 100644 index 0000000..3883d1b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi @@ -0,0 +1,22 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives import hashes +from cryptography.utils import Buffer + +class HMAC(hashes.HashContext): + def __init__( + self, + key: Buffer, + algorithm: hashes.HashAlgorithm, + backend: typing.Any = None, + ) -> None: ... + @property + def algorithm(self) -> hashes.HashAlgorithm: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, signature: bytes) -> None: ... + def copy(self) -> HMAC: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi new file mode 100644 index 0000000..9e2d8d9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi @@ -0,0 +1,72 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.hashes import HashAlgorithm +from cryptography.utils import Buffer + +def derive_pbkdf2_hmac( + key_material: Buffer, + algorithm: HashAlgorithm, + salt: bytes, + iterations: int, + length: int, +) -> bytes: ... + +class Scrypt: + def __init__( + self, + salt: bytes, + length: int, + n: int, + r: int, + p: int, + backend: typing.Any = None, + ) -> None: ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + +class Argon2id: + def __init__( + self, + *, + salt: bytes, + length: int, + iterations: int, + lanes: int, + memory_cost: int, + ad: bytes | None = None, + secret: bytes | None = None, + ) -> None: ... + def derive(self, key_material: bytes) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + def derive_phc_encoded(self, key_material: bytes) -> str: ... + @classmethod + def verify_phc_encoded( + cls, key_material: bytes, phc_encoded: str, secret: bytes | None = None + ) -> None: ... + +class HKDF: + def __init__( + self, + algorithm: HashAlgorithm, + length: int, + salt: bytes | None, + info: bytes | None, + backend: typing.Any = None, + ): ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... + +class HKDFExpand: + def __init__( + self, + algorithm: HashAlgorithm, + length: int, + info: bytes | None, + backend: typing.Any = None, + ): ... + def derive(self, key_material: Buffer) -> bytes: ... + def verify(self, key_material: bytes, expected_key: bytes) -> None: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi new file mode 100644 index 0000000..404057e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi @@ -0,0 +1,34 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric.types import ( + PrivateKeyTypes, + PublicKeyTypes, +) +from cryptography.utils import Buffer + +def load_der_private_key( + data: Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> PrivateKeyTypes: ... +def load_pem_private_key( + data: Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> PrivateKeyTypes: ... +def load_der_public_key( + data: bytes, + backend: typing.Any = None, +) -> PublicKeyTypes: ... +def load_pem_public_key( + data: bytes, + backend: typing.Any = None, +) -> PublicKeyTypes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi new file mode 100644 index 0000000..45a2a39 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.utils import Buffer + +class Poly1305: + def __init__(self, key: Buffer) -> None: ... + @staticmethod + def generate_tag(key: Buffer, data: Buffer) -> bytes: ... + @staticmethod + def verify_tag(key: Buffer, data: Buffer, tag: bytes) -> None: ... + def update(self, data: Buffer) -> None: ... + def finalize(self) -> bytes: ... + def verify(self, tag: bytes) -> None: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi new file mode 100644 index 0000000..ef7752d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + +from cryptography.hazmat.primitives.asymmetric import rsa + +class RSAPrivateKey: ... +class RSAPublicKey: ... + +class RSAPrivateNumbers: + def __init__( + self, + p: int, + q: int, + d: int, + dmp1: int, + dmq1: int, + iqmp: int, + public_numbers: RSAPublicNumbers, + ) -> None: ... + @property + def p(self) -> int: ... + @property + def q(self) -> int: ... + @property + def d(self) -> int: ... + @property + def dmp1(self) -> int: ... + @property + def dmq1(self) -> int: ... + @property + def iqmp(self) -> int: ... + @property + def public_numbers(self) -> RSAPublicNumbers: ... + def private_key( + self, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, + ) -> rsa.RSAPrivateKey: ... + +class RSAPublicNumbers: + def __init__(self, e: int, n: int) -> None: ... + @property + def n(self) -> int: ... + @property + def e(self) -> int: ... + def public_key(self, backend: typing.Any = None) -> rsa.RSAPublicKey: ... + +def generate_private_key( + public_exponent: int, + key_size: int, +) -> rsa.RSAPrivateKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi new file mode 100644 index 0000000..38d2add --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import x25519 +from cryptography.utils import Buffer + +class X25519PrivateKey: ... +class X25519PublicKey: ... + +def generate_key() -> x25519.X25519PrivateKey: ... +def from_private_bytes(data: Buffer) -> x25519.X25519PrivateKey: ... +def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi new file mode 100644 index 0000000..3ac0980 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.primitives.asymmetric import x448 +from cryptography.utils import Buffer + +class X448PrivateKey: ... +class X448PublicKey: ... + +def generate_key() -> x448.X448PrivateKey: ... +def from_private_bytes(data: Buffer) -> x448.X448PrivateKey: ... +def from_public_bytes(data: bytes) -> x448.X448PublicKey: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi new file mode 100644 index 0000000..b25becb --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi @@ -0,0 +1,52 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes +from cryptography.hazmat.primitives.serialization import ( + KeySerializationEncryption, +) +from cryptography.hazmat.primitives.serialization.pkcs12 import ( + PKCS12KeyAndCertificates, + PKCS12PrivateKeyTypes, +) +from cryptography.utils import Buffer + +class PKCS12Certificate: + def __init__( + self, cert: x509.Certificate, friendly_name: bytes | None + ) -> None: ... + @property + def friendly_name(self) -> bytes | None: ... + @property + def certificate(self) -> x509.Certificate: ... + +def load_key_and_certificates( + data: Buffer, + password: Buffer | None, + backend: typing.Any = None, +) -> tuple[ + PrivateKeyTypes | None, + x509.Certificate | None, + list[x509.Certificate], +]: ... +def load_pkcs12( + data: bytes, + password: bytes | None, + backend: typing.Any = None, +) -> PKCS12KeyAndCertificates: ... +def serialize_java_truststore( + certs: Iterable[PKCS12Certificate], + encryption_algorithm: KeySerializationEncryption, +) -> bytes: ... +def serialize_key_and_certificates( + name: bytes | None, + key: PKCS12PrivateKeyTypes | None, + cert: x509.Certificate | None, + cas: Iterable[x509.Certificate | PKCS12Certificate] | None, + encryption_algorithm: KeySerializationEncryption, +) -> bytes: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi new file mode 100644 index 0000000..358b135 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi @@ -0,0 +1,50 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.serialization import pkcs7 + +def serialize_certificates( + certs: list[x509.Certificate], + encoding: serialization.Encoding, +) -> bytes: ... +def encrypt_and_serialize( + builder: pkcs7.PKCS7EnvelopeBuilder, + content_encryption_algorithm: pkcs7.ContentEncryptionAlgorithm, + encoding: serialization.Encoding, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def sign_and_serialize( + builder: pkcs7.PKCS7SignatureBuilder, + encoding: serialization.Encoding, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_der( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_pem( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def decrypt_smime( + data: bytes, + certificate: x509.Certificate, + private_key: rsa.RSAPrivateKey, + options: Iterable[pkcs7.PKCS7Options], +) -> bytes: ... +def load_pem_pkcs7_certificates( + data: bytes, +) -> list[x509.Certificate]: ... +def load_der_pkcs7_certificates( + data: bytes, +) -> list[x509.Certificate]: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi new file mode 100644 index 0000000..c6c6d0b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography import x509 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.serialization import pkcs7 +from cryptography.utils import Buffer + +class TestCertificate: + not_after_tag: int + not_before_tag: int + issuer_value_tags: list[int] + subject_value_tags: list[int] + +def test_parse_certificate(data: bytes) -> TestCertificate: ... +def pkcs7_verify( + encoding: serialization.Encoding, + sig: bytes, + msg: Buffer | None, + certs: list[x509.Certificate], + options: list[pkcs7.PKCS7Options], +) -> None: ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi new file mode 100644 index 0000000..83c3441 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi @@ -0,0 +1,301 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import datetime +import typing +from collections.abc import Iterator + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA +from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15 +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPublicKeyTypes, + CertificatePublicKeyTypes, + PrivateKeyTypes, +) +from cryptography.x509 import certificate_transparency + +def load_pem_x509_certificate( + data: bytes, backend: typing.Any = None +) -> x509.Certificate: ... +def load_der_x509_certificate( + data: bytes, backend: typing.Any = None +) -> x509.Certificate: ... +def load_pem_x509_certificates( + data: bytes, +) -> list[x509.Certificate]: ... +def load_pem_x509_crl( + data: bytes, backend: typing.Any = None +) -> x509.CertificateRevocationList: ... +def load_der_x509_crl( + data: bytes, backend: typing.Any = None +) -> x509.CertificateRevocationList: ... +def load_pem_x509_csr( + data: bytes, backend: typing.Any = None +) -> x509.CertificateSigningRequest: ... +def load_der_x509_csr( + data: bytes, backend: typing.Any = None +) -> x509.CertificateSigningRequest: ... +def encode_name_bytes(name: x509.Name) -> bytes: ... +def encode_extension_value(extension: x509.ExtensionType) -> bytes: ... +def create_x509_certificate( + builder: x509.CertificateBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.Certificate: ... +def create_x509_csr( + builder: x509.CertificateSigningRequestBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.CertificateSigningRequest: ... +def create_x509_crl( + builder: x509.CertificateRevocationListBuilder, + private_key: PrivateKeyTypes, + hash_algorithm: hashes.HashAlgorithm | None, + rsa_padding: PKCS1v15 | PSS | None, + ecdsa_deterministic: bool | None, +) -> x509.CertificateRevocationList: ... + +class Sct: + @property + def version(self) -> certificate_transparency.Version: ... + @property + def log_id(self) -> bytes: ... + @property + def timestamp(self) -> datetime.datetime: ... + @property + def entry_type(self) -> certificate_transparency.LogEntryType: ... + @property + def signature_hash_algorithm(self) -> hashes.HashAlgorithm: ... + @property + def signature_algorithm( + self, + ) -> certificate_transparency.SignatureAlgorithm: ... + @property + def signature(self) -> bytes: ... + @property + def extension_bytes(self) -> bytes: ... + +class Certificate: + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... + @property + def serial_number(self) -> int: ... + @property + def version(self) -> x509.Version: ... + def public_key(self) -> CertificatePublicKeyTypes: ... + @property + def public_key_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def not_valid_before(self) -> datetime.datetime: ... + @property + def not_valid_before_utc(self) -> datetime.datetime: ... + @property + def not_valid_after(self) -> datetime.datetime: ... + @property + def not_valid_after_utc(self) -> datetime.datetime: ... + @property + def issuer(self) -> x509.Name: ... + @property + def subject(self) -> x509.Name: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certificate_bytes(self) -> bytes: ... + @property + def tbs_precertificate_bytes(self) -> bytes: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + def verify_directly_issued_by(self, issuer: Certificate) -> None: ... + +class RevokedCertificate: ... + +class CertificateRevocationList: + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ... + def get_revoked_certificate_by_serial_number( + self, serial_number: int + ) -> x509.RevokedCertificate | None: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def issuer(self) -> x509.Name: ... + @property + def next_update(self) -> datetime.datetime | None: ... + @property + def next_update_utc(self) -> datetime.datetime | None: ... + @property + def last_update(self) -> datetime.datetime: ... + @property + def last_update_utc(self) -> datetime.datetime: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certlist_bytes(self) -> bytes: ... + def __eq__(self, other: object) -> bool: ... + def __len__(self) -> int: ... + @typing.overload + def __getitem__(self, idx: int) -> x509.RevokedCertificate: ... + @typing.overload + def __getitem__(self, idx: slice) -> list[x509.RevokedCertificate]: ... + def __iter__(self) -> Iterator[x509.RevokedCertificate]: ... + def is_signature_valid( + self, public_key: CertificateIssuerPublicKeyTypes + ) -> bool: ... + +class CertificateSigningRequest: + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def public_key(self) -> CertificatePublicKeyTypes: ... + @property + def subject(self) -> x509.Name: ... + @property + def signature_hash_algorithm( + self, + ) -> hashes.HashAlgorithm | None: ... + @property + def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ... + @property + def signature_algorithm_parameters( + self, + ) -> PSS | PKCS1v15 | ECDSA | None: ... + @property + def extensions(self) -> x509.Extensions: ... + @property + def attributes(self) -> x509.Attributes: ... + def public_bytes(self, encoding: serialization.Encoding) -> bytes: ... + @property + def signature(self) -> bytes: ... + @property + def tbs_certrequest_bytes(self) -> bytes: ... + @property + def is_signature_valid(self) -> bool: ... + +class PolicyBuilder: + def time(self, time: datetime.datetime) -> PolicyBuilder: ... + def store(self, store: Store) -> PolicyBuilder: ... + def max_chain_depth(self, max_chain_depth: int) -> PolicyBuilder: ... + def extension_policies( + self, *, ca_policy: ExtensionPolicy, ee_policy: ExtensionPolicy + ) -> PolicyBuilder: ... + def build_client_verifier(self) -> ClientVerifier: ... + def build_server_verifier( + self, subject: x509.verification.Subject + ) -> ServerVerifier: ... + +class Policy: + @property + def max_chain_depth(self) -> int: ... + @property + def subject(self) -> x509.verification.Subject | None: ... + @property + def validation_time(self) -> datetime.datetime: ... + @property + def extended_key_usage(self) -> x509.ObjectIdentifier: ... + @property + def minimum_rsa_modulus(self) -> int: ... + +class Criticality: + CRITICAL: Criticality + AGNOSTIC: Criticality + NON_CRITICAL: Criticality + +T = typing.TypeVar("T", contravariant=True, bound=x509.ExtensionType) + +MaybeExtensionValidatorCallback = typing.Callable[ + [ + Policy, + x509.Certificate, + T | None, + ], + None, +] + +PresentExtensionValidatorCallback = typing.Callable[ + [Policy, x509.Certificate, T], + None, +] + +class ExtensionPolicy: + @staticmethod + def permit_all() -> ExtensionPolicy: ... + @staticmethod + def webpki_defaults_ca() -> ExtensionPolicy: ... + @staticmethod + def webpki_defaults_ee() -> ExtensionPolicy: ... + def require_not_present( + self, extension_type: type[x509.ExtensionType] + ) -> ExtensionPolicy: ... + def may_be_present( + self, + extension_type: type[T], + criticality: Criticality, + validator: MaybeExtensionValidatorCallback[T] | None, + ) -> ExtensionPolicy: ... + def require_present( + self, + extension_type: type[T], + criticality: Criticality, + validator: PresentExtensionValidatorCallback[T] | None, + ) -> ExtensionPolicy: ... + +class VerifiedClient: + @property + def subjects(self) -> list[x509.GeneralName] | None: ... + @property + def chain(self) -> list[x509.Certificate]: ... + +class ClientVerifier: + @property + def policy(self) -> Policy: ... + @property + def store(self) -> Store: ... + def verify( + self, + leaf: x509.Certificate, + intermediates: list[x509.Certificate], + ) -> VerifiedClient: ... + +class ServerVerifier: + @property + def policy(self) -> Policy: ... + @property + def store(self) -> Store: ... + def verify( + self, + leaf: x509.Certificate, + intermediates: list[x509.Certificate], + ) -> list[x509.Certificate]: ... + +class Store: + def __init__(self, certs: list[x509.Certificate]) -> None: ... + +class VerificationError(Exception): ... diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..241fbd7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc new file mode 100644 index 0000000..49e3fe6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc new file mode 100644 index 0000000..accd62a Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 0000000..063bcf5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,207 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + + +def cryptography_has_set_cert_cb() -> list[str]: + return [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ] + + +def cryptography_has_ssl_st() -> list[str]: + return [ + "SSL_ST_BEFORE", + "SSL_ST_OK", + "SSL_ST_INIT", + "SSL_ST_RENEGOTIATE", + ] + + +def cryptography_has_tls_st() -> list[str]: + return [ + "TLS_ST_BEFORE", + "TLS_ST_OK", + ] + + +def cryptography_has_ssl_sigalgs() -> list[str]: + return [ + "SSL_CTX_set1_sigalgs_list", + ] + + +def cryptography_has_psk() -> list[str]: + return [ + "SSL_CTX_use_psk_identity_hint", + "SSL_CTX_set_psk_server_callback", + "SSL_CTX_set_psk_client_callback", + ] + + +def cryptography_has_psk_tlsv13() -> list[str]: + return [ + "SSL_CTX_set_psk_find_session_callback", + "SSL_CTX_set_psk_use_session_callback", + "Cryptography_SSL_SESSION_new", + "SSL_CIPHER_find", + "SSL_SESSION_set1_master_key", + "SSL_SESSION_set_cipher", + "SSL_SESSION_set_protocol_version", + ] + + +def cryptography_has_custom_ext() -> list[str]: + return [ + "SSL_CTX_add_client_custom_ext", + "SSL_CTX_add_server_custom_ext", + "SSL_extension_supported", + ] + + +def cryptography_has_tlsv13_functions() -> list[str]: + return [ + "SSL_CTX_set_ciphersuites", + ] + + +def cryptography_has_tlsv13_hs_functions() -> list[str]: + return [ + "SSL_VERIFY_POST_HANDSHAKE", + "SSL_verify_client_post_handshake", + "SSL_CTX_set_post_handshake_auth", + "SSL_set_post_handshake_auth", + "SSL_SESSION_get_max_early_data", + "SSL_write_early_data", + "SSL_read_early_data", + "SSL_CTX_set_max_early_data", + ] + + +def cryptography_has_ssl_verify_client_post_handshake() -> list[str]: + return [ + "SSL_verify_client_post_handshake", + ] + + +def cryptography_has_engine() -> list[str]: + return [ + "ENGINE_by_id", + "ENGINE_init", + "ENGINE_finish", + "ENGINE_get_default_RAND", + "ENGINE_set_default_RAND", + "ENGINE_unregister_RAND", + "ENGINE_ctrl_cmd", + "ENGINE_free", + "ENGINE_get_name", + "ENGINE_ctrl_cmd_string", + "ENGINE_load_builtin_engines", + "ENGINE_load_private_key", + "ENGINE_load_public_key", + "SSL_CTX_set_client_cert_engine", + ] + + +def cryptography_has_verified_chain() -> list[str]: + return [ + "SSL_get0_verified_chain", + ] + + +def cryptography_has_srtp() -> list[str]: + return [ + "SSL_CTX_set_tlsext_use_srtp", + "SSL_set_tlsext_use_srtp", + "SSL_get_selected_srtp_profile", + ] + + +def cryptography_has_op_no_renegotiation() -> list[str]: + return [ + "SSL_OP_NO_RENEGOTIATION", + ] + + +def cryptography_has_dtls_get_data_mtu() -> list[str]: + return [ + "DTLS_get_data_mtu", + ] + + +def cryptography_has_ssl_cookie() -> list[str]: + return [ + "SSL_OP_COOKIE_EXCHANGE", + "DTLSv1_listen", + "SSL_CTX_set_cookie_generate_cb", + "SSL_CTX_set_cookie_verify_cb", + ] + + +def cryptography_has_prime_checks() -> list[str]: + return [ + "BN_prime_checks_for_size", + ] + + +def cryptography_has_unexpected_eof_while_reading() -> list[str]: + return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"] + + +def cryptography_has_ssl_op_ignore_unexpected_eof() -> list[str]: + return [ + "SSL_OP_IGNORE_UNEXPECTED_EOF", + ] + + +def cryptography_has_get_extms_support() -> list[str]: + return ["SSL_get_extms_support"] + + +def cryptography_has_ssl_get0_group_name() -> list[str]: + return ["SSL_get0_group_name"] + + +# This is a mapping of +# {condition: function-returning-names-dependent-on-that-condition} so we can +# loop over them and delete unsupported names at runtime. It will be removed +# when cffi supports #if in cdef. We use functions instead of just a dict of +# lists so we can use coverage to measure which are used. +CONDITIONAL_NAMES = { + "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, + "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, + "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, + "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs, + "Cryptography_HAS_PSK": cryptography_has_psk, + "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13, + "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, + "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions, + "Cryptography_HAS_TLSv1_3_HS_FUNCTIONS": ( + cryptography_has_tlsv13_hs_functions + ), + "Cryptography_HAS_SSL_VERIFY_CLIENT_POST_HANDSHAKE": ( + cryptography_has_ssl_verify_client_post_handshake + ), + "Cryptography_HAS_ENGINE": cryptography_has_engine, + "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, + "Cryptography_HAS_SRTP": cryptography_has_srtp, + "Cryptography_HAS_OP_NO_RENEGOTIATION": ( + cryptography_has_op_no_renegotiation + ), + "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu, + "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie, + "Cryptography_HAS_PRIME_CHECKS": cryptography_has_prime_checks, + "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": ( + cryptography_has_unexpected_eof_while_reading + ), + "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": ( + cryptography_has_ssl_op_ignore_unexpected_eof + ), + "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support, + "Cryptography_HAS_SSL_GET0_GROUP_NAME": ( + cryptography_has_ssl_get0_group_name + ), +} diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..4494c71 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,137 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import os +import sys +import threading +import types +import typing +import warnings +from collections.abc import Callable + +import cryptography +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._rust import _openssl, openssl +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES +from cryptography.utils import CryptographyDeprecationWarning + + +def _openssl_assert(ok: bool) -> None: + if not ok: + errors = openssl.capture_error_stack() + + raise InternalError( + "Unknown OpenSSL error. This error is commonly encountered when " + "another library is not cleaning up the OpenSSL error stack. If " + "you are using cryptography with another library that uses " + "OpenSSL try disabling it before reporting a bug. Otherwise " + "please file an issue at https://github.com/pyca/cryptography/" + "issues with information on how to reproduce " + f"this. ({errors!r})", + errors, + ) + + +def build_conditional_library( + lib: typing.Any, + conditional_names: dict[str, Callable[[], list[str]]], +) -> typing.Any: + conditional_lib = types.ModuleType("lib") + conditional_lib._original_lib = lib # type: ignore[attr-defined] + excluded_names = set() + for condition, names_cb in conditional_names.items(): + if not getattr(lib, condition): + excluded_names.update(names_cb()) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding: + """ + OpenSSL API wrapper. + """ + + lib: typing.ClassVar[typing.Any] = None + ffi = _openssl.ffi + _lib_loaded = False + _init_lock = threading.Lock() + + def __init__(self) -> None: + self._ensure_ffi_initialized() + + @classmethod + def _ensure_ffi_initialized(cls) -> None: + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library( + _openssl.lib, CONDITIONAL_NAMES + ) + cls._lib_loaded = True + + @classmethod + def init_static_locks(cls) -> None: + cls._ensure_ffi_initialized() + + +def _verify_package_version(version: str) -> None: + # Occasionally we run into situations where the version of the Python + # package does not match the version of the shared object that is loaded. + # This may occur in environments where multiple versions of cryptography + # are installed and available in the python path. To avoid errors cropping + # up later this code checks that the currently imported package and the + # shared object that were loaded have the same version and raise an + # ImportError if they do not + so_package_version = _openssl.ffi.string( + _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION + ) + if version.encode("ascii") != so_package_version: + raise ImportError( + "The version of cryptography does not match the loaded " + "shared object. This can happen if you have multiple copies of " + "cryptography installed in your Python path. Please try creating " + "a new virtual environment to resolve this issue. " + f"Loaded python version: {version}, " + f"shared object version: {so_package_version}" + ) + + _openssl_assert( + _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(), + ) + + +_verify_package_version(cryptography.__version__) + +Binding.init_static_locks() + +if ( + sys.platform == "win32" + and os.environ.get("PROCESSOR_ARCHITEW6432") is not None +): + warnings.warn( + "You are using cryptography on a 32-bit Python on a 64-bit Windows " + "Operating System. Cryptography will be significantly faster if you " + "switch to using a 64-bit Python.", + UserWarning, + stacklevel=2, + ) + +if ( + not openssl.CRYPTOGRAPHY_IS_LIBRESSL + and not openssl.CRYPTOGRAPHY_IS_BORINGSSL + and not openssl.CRYPTOGRAPHY_IS_AWSLC + and not openssl.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER +): + warnings.warn( + "You are using OpenSSL < 3.0. Support for OpenSSL < 3.0 is deprecated " + "and will be removed in the next release. Please upgrade to OpenSSL " + "3.0 or later.", + CryptographyDeprecationWarning, + stacklevel=2, + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py new file mode 100644 index 0000000..41d7318 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..670960a Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py new file mode 100644 index 0000000..41d7318 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c9a6ce5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc new file mode 100644 index 0000000..d84b0d8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py new file mode 100644 index 0000000..072a991 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py @@ -0,0 +1,112 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, + _verify_key_size, +) + + +class ARC4(CipherAlgorithm): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class TripleDES(BlockCipherAlgorithm): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key: bytes): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +# Not actually supported, marker for tests +class _DES: + key_size = 64 + + +class Blowfish(BlockCipherAlgorithm): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class CAST5(BlockCipherAlgorithm): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SEED(BlockCipherAlgorithm): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class IDEA(BlockCipherAlgorithm): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +# This class only allows RC2 with a 128-bit key. No support for +# effective key bits or other key sizes is provided. +class RC2(BlockCipherAlgorithm): + name = "RC2" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1ba1dc9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc new file mode 100644 index 0000000..3fe454c Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc new file mode 100644 index 0000000..00f7c4a Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc new file mode 100644 index 0000000..b470c7d Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc new file mode 100644 index 0000000..8d50e5f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc new file mode 100644 index 0000000..4da4fab Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc new file mode 100644 index 0000000..06065a9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc new file mode 100644 index 0000000..214a1d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc new file mode 100644 index 0000000..2633d77 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc new file mode 100644 index 0000000..af0ee84 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc new file mode 100644 index 0000000..c9bc8de Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py new file mode 100644 index 0000000..ea55ffd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py @@ -0,0 +1,19 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +# This exists to break an import cycle. It is normally accessible from the +# asymmetric padding module. + + +class AsymmetricPadding(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py new file mode 100644 index 0000000..305a9fd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py @@ -0,0 +1,60 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils + +# This exists to break an import cycle. It is normally accessible from the +# ciphers module. + + +class CipherAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @property + @abc.abstractmethod + def key_sizes(self) -> frozenset[int]: + """ + Valid key sizes for this algorithm in bits + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +class BlockCipherAlgorithm(CipherAlgorithm): + key: utils.Buffer + + @property + @abc.abstractmethod + def block_size(self) -> int: + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +def _verify_key_size( + algorithm: CipherAlgorithm, key: utils.Buffer +) -> utils.Buffer: + # Verify that the key is instance of bytes + utils._check_byteslike("key", key) + + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError( + f"Invalid key size ({len(key) * 8}) for {algorithm.name}." + ) + return key diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py new file mode 100644 index 0000000..e998865 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py @@ -0,0 +1,168 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.hazmat.primitives.hashes import HashAlgorithm + +# This exists to break an import cycle. These classes are normally accessible +# from the serialization module. + + +class PBES(utils.Enum): + PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES" + PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC" + + +class Encoding(utils.Enum): + PEM = "PEM" + DER = "DER" + OpenSSH = "OpenSSH" + Raw = "Raw" + X962 = "ANSI X9.62" + SMIME = "S/MIME" + + +class PrivateFormat(utils.Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + Raw = "Raw" + OpenSSH = "OpenSSH" + PKCS12 = "PKCS12" + + def encryption_builder(self) -> KeySerializationEncryptionBuilder: + if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12): + raise ValueError( + "encryption_builder only supported with PrivateFormat.OpenSSH" + " and PrivateFormat.PKCS12" + ) + return KeySerializationEncryptionBuilder(self) + + +class PublicFormat(utils.Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + OpenSSH = "OpenSSH" + Raw = "Raw" + CompressedPoint = "X9.62 Compressed Point" + UncompressedPoint = "X9.62 Uncompressed Point" + + +class ParameterFormat(utils.Enum): + PKCS3 = "PKCS3" + + +class KeySerializationEncryption(metaclass=abc.ABCMeta): + pass + + +class BestAvailableEncryption(KeySerializationEncryption): + def __init__(self, password: bytes): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +class NoEncryption(KeySerializationEncryption): + pass + + +class KeySerializationEncryptionBuilder: + def __init__( + self, + format: PrivateFormat, + *, + _kdf_rounds: int | None = None, + _hmac_hash: HashAlgorithm | None = None, + _key_cert_algorithm: PBES | None = None, + ) -> None: + self._format = format + + self._kdf_rounds = _kdf_rounds + self._hmac_hash = _hmac_hash + self._key_cert_algorithm = _key_cert_algorithm + + def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder: + if self._kdf_rounds is not None: + raise ValueError("kdf_rounds already set") + + if not isinstance(rounds, int): + raise TypeError("kdf_rounds must be an integer") + + if rounds < 1: + raise ValueError("kdf_rounds must be a positive integer") + + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=rounds, + _hmac_hash=self._hmac_hash, + _key_cert_algorithm=self._key_cert_algorithm, + ) + + def hmac_hash( + self, algorithm: HashAlgorithm + ) -> KeySerializationEncryptionBuilder: + if self._format is not PrivateFormat.PKCS12: + raise TypeError( + "hmac_hash only supported with PrivateFormat.PKCS12" + ) + + if self._hmac_hash is not None: + raise ValueError("hmac_hash already set") + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=self._kdf_rounds, + _hmac_hash=algorithm, + _key_cert_algorithm=self._key_cert_algorithm, + ) + + def key_cert_algorithm( + self, algorithm: PBES + ) -> KeySerializationEncryptionBuilder: + if self._format is not PrivateFormat.PKCS12: + raise TypeError( + "key_cert_algorithm only supported with PrivateFormat.PKCS12" + ) + if self._key_cert_algorithm is not None: + raise ValueError("key_cert_algorithm already set") + return KeySerializationEncryptionBuilder( + self._format, + _kdf_rounds=self._kdf_rounds, + _hmac_hash=self._hmac_hash, + _key_cert_algorithm=algorithm, + ) + + def build(self, password: bytes) -> KeySerializationEncryption: + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + return _KeySerializationEncryption( + self._format, + password, + kdf_rounds=self._kdf_rounds, + hmac_hash=self._hmac_hash, + key_cert_algorithm=self._key_cert_algorithm, + ) + + +class _KeySerializationEncryption(KeySerializationEncryption): + def __init__( + self, + format: PrivateFormat, + password: bytes, + *, + kdf_rounds: int | None, + hmac_hash: HashAlgorithm | None, + key_cert_algorithm: PBES | None, + ): + self._format = format + self.password = password + + self._kdf_rounds = kdf_rounds + self._hmac_hash = hmac_hash + self._key_cert_algorithm = key_cert_algorithm diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..b509336 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,3 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a6e66cf Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc new file mode 100644 index 0000000..97e6d1c Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc new file mode 100644 index 0000000..eefdd91 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc new file mode 100644 index 0000000..c2a3a44 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc new file mode 100644 index 0000000..6bcecf1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc new file mode 100644 index 0000000..499789f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc new file mode 100644 index 0000000..a1031a6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc new file mode 100644 index 0000000..395e134 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc new file mode 100644 index 0000000..1b45bfe Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..88b601f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc new file mode 100644 index 0000000..3df2d66 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc new file mode 100644 index 0000000..dad690a Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 0000000..1822e99 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,147 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization + +generate_parameters = rust_openssl.dh.generate_parameters + + +DHPrivateNumbers = rust_openssl.dh.DHPrivateNumbers +DHPublicNumbers = rust_openssl.dh.DHPublicNumbers +DHParameterNumbers = rust_openssl.dh.DHParameterNumbers + + +class DHParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> DHPrivateKey: + """ + Generates and returns a DHPrivateKey. + """ + + @abc.abstractmethod + def parameter_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.ParameterFormat, + ) -> bytes: + """ + Returns the parameters serialized as bytes. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DHParameterNumbers: + """ + Returns a DHParameterNumbers. + """ + + +DHParametersWithSerialization = DHParameters +DHParameters.register(rust_openssl.dh.DHParameters) + + +class DHPublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DHPublicNumbers: + """ + Returns a DHPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> DHPublicKey: + """ + Returns a copy. + """ + + +DHPublicKeyWithSerialization = DHPublicKey +DHPublicKey.register(rust_openssl.dh.DHPublicKey) + + +class DHPrivateKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DHPublicKey: + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DHParameters: + """ + The DHParameters object associated with this private key. + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: DHPublicKey) -> bytes: + """ + Given peer's DHPublicKey, carry out the key exchange and + return shared key as bytes. + """ + + @abc.abstractmethod + def private_numbers(self) -> DHPrivateNumbers: + """ + Returns a DHPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> DHPrivateKey: + """ + Returns a copy. + """ + + +DHPrivateKeyWithSerialization = DHPrivateKey +DHPrivateKey.register(rust_openssl.dh.DHPrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..21d78ba --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,167 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.utils import Buffer + + +class DSAParameters(metaclass=abc.ABCMeta): + @abc.abstractmethod + def generate_private_key(self) -> DSAPrivateKey: + """ + Generates and returns a DSAPrivateKey. + """ + + @abc.abstractmethod + def parameter_numbers(self) -> DSAParameterNumbers: + """ + Returns a DSAParameterNumbers. + """ + + +DSAParametersWithNumbers = DSAParameters +DSAParameters.register(rust_openssl.dsa.DSAParameters) + + +class DSAPrivateKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self) -> DSAPublicKey: + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: Buffer, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> DSAPrivateNumbers: + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> DSAPrivateKey: + """ + Returns a copy. + """ + + +DSAPrivateKeyWithSerialization = DSAPrivateKey +DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey) + + +class DSAPublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self) -> DSAParameters: + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def public_numbers(self) -> DSAPublicNumbers: + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: Buffer, + data: Buffer, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> DSAPublicKey: + """ + Returns a copy. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey +DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey) + +DSAPrivateNumbers = rust_openssl.dsa.DSAPrivateNumbers +DSAPublicNumbers = rust_openssl.dsa.DSAPublicNumbers +DSAParameterNumbers = rust_openssl.dsa.DSAParameterNumbers + + +def generate_parameters( + key_size: int, backend: typing.Any = None +) -> DSAParameters: + if key_size not in (1024, 2048, 3072, 4096): + raise ValueError("Key size must be 1024, 2048, 3072, or 4096 bits.") + + return rust_openssl.dsa.generate_parameters(key_size) + + +def generate_private_key( + key_size: int, backend: typing.Any = None +) -> DSAPrivateKey: + parameters = generate_parameters(key_size) + return parameters.generate_private_key() diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..a13d982 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,447 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat._oid import ObjectIdentifier +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class EllipticCurveOID: + SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") + SECP224R1 = ObjectIdentifier("1.3.132.0.33") + SECP256K1 = ObjectIdentifier("1.3.132.0.10") + SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") + SECP384R1 = ObjectIdentifier("1.3.132.0.34") + SECP521R1 = ObjectIdentifier("1.3.132.0.35") + BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") + BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") + BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") + SECT163K1 = ObjectIdentifier("1.3.132.0.1") + SECT163R2 = ObjectIdentifier("1.3.132.0.15") + SECT233K1 = ObjectIdentifier("1.3.132.0.26") + SECT233R1 = ObjectIdentifier("1.3.132.0.27") + SECT283K1 = ObjectIdentifier("1.3.132.0.16") + SECT283R1 = ObjectIdentifier("1.3.132.0.17") + SECT409K1 = ObjectIdentifier("1.3.132.0.36") + SECT409R1 = ObjectIdentifier("1.3.132.0.37") + SECT571K1 = ObjectIdentifier("1.3.132.0.38") + SECT571R1 = ObjectIdentifier("1.3.132.0.39") + + +class EllipticCurve(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + The name of the curve. e.g. secp256r1. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @property + @abc.abstractmethod + def group_order(self) -> int: + """ + The order of the curve's group. + """ + + +class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def algorithm( + self, + ) -> asym_utils.Prehashed | hashes.HashAlgorithm: + """ + The digest algorithm used with this signature. + """ + + +class EllipticCurvePrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def exchange( + self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey + ) -> bytes: + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self) -> EllipticCurvePublicKey: + """ + The EllipticCurvePublicKey for this private key. + """ + + @property + @abc.abstractmethod + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def sign( + self, + data: utils.Buffer, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> bytes: + """ + Signs the data + """ + + @abc.abstractmethod + def private_numbers(self) -> EllipticCurvePrivateNumbers: + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> EllipticCurvePrivateKey: + """ + Returns a copy. + """ + + +EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey +EllipticCurvePrivateKey.register(rust_openssl.ec.ECPrivateKey) + + +class EllipticCurvePublicKey(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def curve(self) -> EllipticCurve: + """ + The EllipticCurve that this key is on. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + Bit size of a secret scalar for the curve. + """ + + @abc.abstractmethod + def public_numbers(self) -> EllipticCurvePublicNumbers: + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: utils.Buffer, + data: utils.Buffer, + signature_algorithm: EllipticCurveSignatureAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @classmethod + def from_encoded_point( + cls, curve: EllipticCurve, data: bytes + ) -> EllipticCurvePublicKey: + utils._check_bytes("data", data) + + if len(data) == 0: + raise ValueError("data must not be an empty byte string") + + if data[0] not in [0x02, 0x03, 0x04]: + raise ValueError("Unsupported elliptic curve point type") + + return rust_openssl.ec.from_public_bytes(curve, data) + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> EllipticCurvePublicKey: + """ + Returns a copy. + """ + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey +EllipticCurvePublicKey.register(rust_openssl.ec.ECPublicKey) + +EllipticCurvePrivateNumbers = rust_openssl.ec.EllipticCurvePrivateNumbers +EllipticCurvePublicNumbers = rust_openssl.ec.EllipticCurvePublicNumbers + + +class SECT571R1(EllipticCurve): + name = "sect571r1" + key_size = 570 + group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE661CE18FF55987308059B186823851EC7DD9CA1161DE93D5174D66E8382E9BB2FE84E47 # noqa: E501 + + +class SECT409R1(EllipticCurve): + name = "sect409r1" + key_size = 409 + group_order = 0x10000000000000000000000000000000000000000000000000001E2AAD6A612F33307BE5FA47C3C9E052F838164CD37D9A21173 # noqa: E501 + + +class SECT283R1(EllipticCurve): + name = "sect283r1" + key_size = 283 + group_order = 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEF90399660FC938A90165B042A7CEFADB307 # noqa: E501 + + +class SECT233R1(EllipticCurve): + name = "sect233r1" + key_size = 233 + group_order = 0x1000000000000000000000000000013E974E72F8A6922031D2603CFE0D7 + + +class SECT163R2(EllipticCurve): + name = "sect163r2" + key_size = 163 + group_order = 0x40000000000000000000292FE77E70C12A4234C33 + + +class SECT571K1(EllipticCurve): + name = "sect571k1" + key_size = 571 + group_order = 0x20000000000000000000000000000000000000000000000000000000000000000000000131850E1F19A63E4B391A8DB917F4138B630D84BE5D639381E91DEB45CFE778F637C1001 # noqa: E501 + + +class SECT409K1(EllipticCurve): + name = "sect409k1" + key_size = 409 + group_order = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE5F83B2D4EA20400EC4557D5ED3E3E7CA5B4B5C83B8E01E5FCF # noqa: E501 + + +class SECT283K1(EllipticCurve): + name = "sect283k1" + key_size = 283 + group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE9AE2ED07577265DFF7F94451E061E163C61 # noqa: E501 + + +class SECT233K1(EllipticCurve): + name = "sect233k1" + key_size = 233 + group_order = 0x8000000000000000000000000000069D5BB915BCD46EFB1AD5F173ABDF + + +class SECT163K1(EllipticCurve): + name = "sect163k1" + key_size = 163 + group_order = 0x4000000000000000000020108A2E0CC0D99F8A5EF + + +class SECP521R1(EllipticCurve): + name = "secp521r1" + key_size = 521 + group_order = 0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409 # noqa: E501 + + +class SECP384R1(EllipticCurve): + name = "secp384r1" + key_size = 384 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973 # noqa: E501 + + +class SECP256R1(EllipticCurve): + name = "secp256r1" + key_size = 256 + group_order = ( + 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551 + ) + + +class SECP256K1(EllipticCurve): + name = "secp256k1" + key_size = 256 + group_order = ( + 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 + ) + + +class SECP224R1(EllipticCurve): + name = "secp224r1" + key_size = 224 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D + + +class SECP192R1(EllipticCurve): + name = "secp192r1" + key_size = 192 + group_order = 0xFFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831 + + +class BrainpoolP256R1(EllipticCurve): + name = "brainpoolP256r1" + key_size = 256 + group_order = ( + 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 + ) + + +class BrainpoolP384R1(EllipticCurve): + name = "brainpoolP384r1" + key_size = 384 + group_order = 0x8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425A7CF3AB6AF6B7FC3103B883202E9046565 # noqa: E501 + + +class BrainpoolP512R1(EllipticCurve): + name = "brainpoolP512r1" + key_size = 512 + group_order = 0xAADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330870553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069 # noqa: E501 + + +_CURVE_TYPES: dict[str, EllipticCurve] = { + "prime192v1": SECP192R1(), + "prime256v1": SECP256R1(), + "secp192r1": SECP192R1(), + "secp224r1": SECP224R1(), + "secp256r1": SECP256R1(), + "secp384r1": SECP384R1(), + "secp521r1": SECP521R1(), + "secp256k1": SECP256K1(), + "sect163k1": SECT163K1(), + "sect233k1": SECT233K1(), + "sect283k1": SECT283K1(), + "sect409k1": SECT409K1(), + "sect571k1": SECT571K1(), + "sect163r2": SECT163R2(), + "sect233r1": SECT233R1(), + "sect283r1": SECT283R1(), + "sect409r1": SECT409R1(), + "sect571r1": SECT571R1(), + "brainpoolP256r1": BrainpoolP256R1(), + "brainpoolP384r1": BrainpoolP384R1(), + "brainpoolP512r1": BrainpoolP512R1(), +} + + +class ECDSA(EllipticCurveSignatureAlgorithm): + def __init__( + self, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + deterministic_signing: bool = False, + ): + from cryptography.hazmat.backends.openssl.backend import backend + + if ( + deterministic_signing + and not backend.ecdsa_deterministic_supported() + ): + raise UnsupportedAlgorithm( + "ECDSA with deterministic signature (RFC 6979) is not " + "supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + self._algorithm = algorithm + self._deterministic_signing = deterministic_signing + + @property + def algorithm( + self, + ) -> asym_utils.Prehashed | hashes.HashAlgorithm: + return self._algorithm + + @property + def deterministic_signing( + self, + ) -> bool: + return self._deterministic_signing + + +generate_private_key = rust_openssl.ec.generate_private_key + + +def derive_private_key( + private_value: int, + curve: EllipticCurve, + backend: typing.Any = None, +) -> EllipticCurvePrivateKey: + if not isinstance(private_value, int): + raise TypeError("private_value must be an integer type.") + + if private_value <= 0: + raise ValueError("private_value must be a positive integer.") + + return rust_openssl.ec.derive_private_key(private_value, curve) + + +class ECDH: + pass + + +_OID_TO_CURVE = { + EllipticCurveOID.SECP192R1: SECP192R1, + EllipticCurveOID.SECP224R1: SECP224R1, + EllipticCurveOID.SECP256K1: SECP256K1, + EllipticCurveOID.SECP256R1: SECP256R1, + EllipticCurveOID.SECP384R1: SECP384R1, + EllipticCurveOID.SECP521R1: SECP521R1, + EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, + EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, + EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, + EllipticCurveOID.SECT163K1: SECT163K1, + EllipticCurveOID.SECT163R2: SECT163R2, + EllipticCurveOID.SECT233K1: SECT233K1, + EllipticCurveOID.SECT233R1: SECT233R1, + EllipticCurveOID.SECT283K1: SECT283K1, + EllipticCurveOID.SECT283R1: SECT283R1, + EllipticCurveOID.SECT409K1: SECT409K1, + EllipticCurveOID.SECT409R1: SECT409R1, + EllipticCurveOID.SECT571K1: SECT571K1, + EllipticCurveOID.SECT571R1: SECT571R1, +} + + +def get_curve_for_oid(oid: ObjectIdentifier) -> type[EllipticCurve]: + try: + return _OID_TO_CURVE[oid] + except KeyError: + raise LookupError( + "The provided object identifier has no matching elliptic " + "curve class" + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py new file mode 100644 index 0000000..e576dc9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py @@ -0,0 +1,129 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class Ed25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def verify(self, signature: Buffer, data: Buffer) -> None: + """ + Verify the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed25519PublicKey: + """ + Returns a copy. + """ + + +Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey) + + +class Ed25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> Ed25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> Ed25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed25519_supported(): + raise UnsupportedAlgorithm( + "ed25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed25519.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed25519PublicKey: + """ + The Ed25519PublicKey derived from the private key. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def sign(self, data: Buffer) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed25519PrivateKey: + """ + Returns a copy. + """ + + +Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py new file mode 100644 index 0000000..89db209 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py @@ -0,0 +1,131 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class Ed448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> Ed448PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def verify(self, signature: Buffer, data: Buffer) -> None: + """ + Verify the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> Ed448PublicKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "ed448"): + Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey) + + +class Ed448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> Ed448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> Ed448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.ed448_supported(): + raise UnsupportedAlgorithm( + "ed448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, + ) + + return rust_openssl.ed448.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> Ed448PublicKey: + """ + The Ed448PublicKey derived from the private key. + """ + + @abc.abstractmethod + def sign(self, data: Buffer) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def __copy__(self) -> Ed448PrivateKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..5121a28 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,111 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives._asymmetric import ( + AsymmetricPadding as AsymmetricPadding, +) +from cryptography.hazmat.primitives.asymmetric import rsa + + +class PKCS1v15(AsymmetricPadding): + name = "EMSA-PKCS1-v1_5" + + +class _MaxLength: + "Sentinel value for `MAX_LENGTH`." + + +class _Auto: + "Sentinel value for `AUTO`." + + +class _DigestLength: + "Sentinel value for `DIGEST_LENGTH`." + + +class PSS(AsymmetricPadding): + MAX_LENGTH = _MaxLength() + AUTO = _Auto() + DIGEST_LENGTH = _DigestLength() + name = "EMSA-PSS" + _salt_length: int | _MaxLength | _Auto | _DigestLength + + def __init__( + self, + mgf: MGF, + salt_length: int | _MaxLength | _Auto | _DigestLength, + ) -> None: + self._mgf = mgf + + if not isinstance( + salt_length, (int, _MaxLength, _Auto, _DigestLength) + ): + raise TypeError( + "salt_length must be an integer, MAX_LENGTH, " + "DIGEST_LENGTH, or AUTO" + ) + + if isinstance(salt_length, int) and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + @property + def mgf(self) -> MGF: + return self._mgf + + +class OAEP(AsymmetricPadding): + name = "EME-OAEP" + + def __init__( + self, + mgf: MGF, + algorithm: hashes.HashAlgorithm, + label: bytes | None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + @property + def algorithm(self) -> hashes.HashAlgorithm: + return self._algorithm + + @property + def mgf(self) -> MGF: + return self._mgf + + +class MGF(metaclass=abc.ABCMeta): + _algorithm: hashes.HashAlgorithm + + +class MGF1(MGF): + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm + + +def calculate_max_pss_salt_length( + key: rsa.RSAPrivateKey | rsa.RSAPublicKey, + hash_algorithm: hashes.HashAlgorithm, +) -> int: + if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + raise TypeError("key must be an RSA public or private key") + # bit length - 1 per RFC 3447 + emlen = (key.key_size + 6) // 8 + salt_length = emlen - hash_algorithm.digest_size - 2 + assert salt_length >= 0 + return salt_length diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..f94812e --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,285 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import random +import typing +from math import gcd + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization, hashes +from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils + + +class RSAPrivateKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Decrypts the provided ciphertext. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self) -> RSAPublicKey: + """ + The RSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def sign( + self, + data: bytes, + padding: AsymmetricPadding, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> bytes: + """ + Signs the data. + """ + + @abc.abstractmethod + def private_numbers(self) -> RSAPrivateNumbers: + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def __copy__(self) -> RSAPrivateKey: + """ + Returns a copy. + """ + + +RSAPrivateKeyWithSerialization = RSAPrivateKey +RSAPrivateKey.register(rust_openssl.rsa.RSAPrivateKey) + + +class RSAPublicKey(metaclass=abc.ABCMeta): + @abc.abstractmethod + def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: + """ + Encrypts the given plaintext. + """ + + @property + @abc.abstractmethod + def key_size(self) -> int: + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self) -> RSAPublicNumbers: + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + Returns the key serialized as bytes. + """ + + @abc.abstractmethod + def verify( + self, + signature: bytes, + data: bytes, + padding: AsymmetricPadding, + algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, + ) -> None: + """ + Verifies the signature of the data. + """ + + @abc.abstractmethod + def recover_data_from_signature( + self, + signature: bytes, + padding: AsymmetricPadding, + algorithm: hashes.HashAlgorithm | None, + ) -> bytes: + """ + Recovers the original data from the signature. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> RSAPublicKey: + """ + Returns a copy. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey +RSAPublicKey.register(rust_openssl.rsa.RSAPublicKey) + +RSAPrivateNumbers = rust_openssl.rsa.RSAPrivateNumbers +RSAPublicNumbers = rust_openssl.rsa.RSAPublicNumbers + + +def generate_private_key( + public_exponent: int, + key_size: int, + backend: typing.Any = None, +) -> RSAPrivateKey: + _verify_rsa_parameters(public_exponent, key_size) + return rust_openssl.rsa.generate_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None: + if public_exponent not in (3, 65537): + raise ValueError( + "public_exponent must be either 3 (for legacy compatibility) or " + "65537. Almost everyone should choose 65537 here!" + ) + + if key_size < 1024: + raise ValueError("key_size must be at least 1024-bits.") + + +def _modinv(e: int, m: int) -> int: + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, x2 = 1, 0 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn = x1 - q * x2 + a, b, x1, x2 = b, r, x2, xn + return x1 % m + + +def rsa_crt_iqmp(p: int, q: int) -> int: + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + if p <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent: int, p: int) -> int: + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent (d) and p. + """ + if private_exponent <= 1 or p <= 1: + raise ValueError("Values can't be <= 1") + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent: int, q: int) -> int: + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent (d) and q. + """ + if private_exponent <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + return private_exponent % (q - 1) + + +def rsa_recover_private_exponent(e: int, p: int, q: int) -> int: + """ + Compute the RSA private_exponent (d) given the public exponent (e) + and the RSA primes p and q. + + This uses the Carmichael totient function to generate the + smallest possible working value of the private exponent. + """ + # This lambda_n is the Carmichael totient function. + # The original RSA paper uses the Euler totient function + # here: phi_n = (p - 1) * (q - 1) + # Either version of the private exponent will work, but the + # one generated by the older formulation may be larger + # than necessary. (lambda_n always divides phi_n) + # + # TODO: Replace with lcm(p - 1, q - 1) once the minimum + # supported Python version is >= 3.9. + if e <= 1 or p <= 1 or q <= 1: + raise ValueError("Values can't be <= 1") + lambda_n = (p - 1) * (q - 1) // gcd(p - 1, q - 1) + return _modinv(e, lambda_n) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. +_MAX_RECOVERY_ATTEMPTS = 500 + + +def rsa_recover_prime_factors(n: int, e: int, d: int) -> tuple[int, int]: + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # reject invalid values early + if d <= 1 or e <= 1: + raise ValueError("d, e can't be <= 1") + if 17 != pow(17, e * d, n): + raise ValueError("n, d, e don't match") + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + tries = 0 + while not spotted and tries < _MAX_RECOVERY_ATTEMPTS: + a = random.randint(2, n - 1) + tries += 1 + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + p, q = sorted((p, q), reverse=True) + return (p, q) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py new file mode 100644 index 0000000..1fe4eaf --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py @@ -0,0 +1,111 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.hazmat.primitives.asymmetric import ( + dh, + dsa, + ec, + ed448, + ed25519, + rsa, + x448, + x25519, +) + +# Every asymmetric key type +PublicKeyTypes = typing.Union[ + dh.DHPublicKey, + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +PUBLIC_KEY_TYPES = PublicKeyTypes +utils.deprecated( + PUBLIC_KEY_TYPES, + __name__, + "Use PublicKeyTypes instead", + utils.DeprecatedIn40, + name="PUBLIC_KEY_TYPES", +) +# Every asymmetric key type +PrivateKeyTypes = typing.Union[ + dh.DHPrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + x25519.X25519PrivateKey, + x448.X448PrivateKey, +] +PRIVATE_KEY_TYPES = PrivateKeyTypes +utils.deprecated( + PRIVATE_KEY_TYPES, + __name__, + "Use PrivateKeyTypes instead", + utils.DeprecatedIn40, + name="PRIVATE_KEY_TYPES", +) +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate public key types +CertificateIssuerPrivateKeyTypes = typing.Union[ + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, +] +CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes +utils.deprecated( + CERTIFICATE_PRIVATE_KEY_TYPES, + __name__, + "Use CertificateIssuerPrivateKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_PRIVATE_KEY_TYPES", +) +# Just the key types we allow to be used for x509 signing. This mirrors +# the certificate private key types +CertificateIssuerPublicKeyTypes = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, +] +CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes +utils.deprecated( + CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, + __name__, + "Use CertificateIssuerPublicKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES", +) +# This type removes DHPublicKey. x448/x25519 can be a public key +# but cannot be used in signing so they are allowed here. +CertificatePublicKeyTypes = typing.Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, +] +CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes +utils.deprecated( + CERTIFICATE_PUBLIC_KEY_TYPES, + __name__, + "Use CertificatePublicKeyTypes instead", + utils.DeprecatedIn40, + name="CERTIFICATE_PUBLIC_KEY_TYPES", +) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 0000000..826b956 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,24 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.primitives import hashes + +decode_dss_signature = asn1.decode_dss_signature +encode_dss_signature = asn1.encode_dss_signature + + +class Prehashed: + def __init__(self, algorithm: hashes.HashAlgorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of HashAlgorithm.") + + self._algorithm = algorithm + self._digest_size = algorithm.digest_size + + @property + def digest_size(self) -> int: + return self._digest_size diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py new file mode 100644 index 0000000..a499376 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py @@ -0,0 +1,122 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class X25519PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> X25519PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x25519.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> X25519PublicKey: + """ + Returns a copy. + """ + + +X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey) + + +class X25519PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> X25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + return rust_openssl.x25519.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> X25519PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x25519_supported(): + raise UnsupportedAlgorithm( + "X25519 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x25519.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X25519PublicKey: + """ + Returns the public key associated with this private key + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X25519PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ + + @abc.abstractmethod + def __copy__(self) -> X25519PrivateKey: + """ + Returns a copy. + """ + + +X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py new file mode 100644 index 0000000..c6fd71b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import _serialization +from cryptography.utils import Buffer + + +class X448PublicKey(metaclass=abc.ABCMeta): + @classmethod + def from_public_bytes(cls, data: bytes) -> X448PublicKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.from_public_bytes(data) + + @abc.abstractmethod + def public_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PublicFormat, + ) -> bytes: + """ + The serialized bytes of the public key. + """ + + @abc.abstractmethod + def public_bytes_raw(self) -> bytes: + """ + The raw bytes of the public key. + Equivalent to public_bytes(Raw, Raw). + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Checks equality. + """ + + @abc.abstractmethod + def __copy__(self) -> X448PublicKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + X448PublicKey.register(rust_openssl.x448.X448PublicKey) + + +class X448PrivateKey(metaclass=abc.ABCMeta): + @classmethod + def generate(cls) -> X448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.generate_key() + + @classmethod + def from_private_bytes(cls, data: Buffer) -> X448PrivateKey: + from cryptography.hazmat.backends.openssl.backend import backend + + if not backend.x448_supported(): + raise UnsupportedAlgorithm( + "X448 is not supported by this version of OpenSSL.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, + ) + + return rust_openssl.x448.from_private_bytes(data) + + @abc.abstractmethod + def public_key(self) -> X448PublicKey: + """ + Returns the public key associated with this private key + """ + + @abc.abstractmethod + def private_bytes( + self, + encoding: _serialization.Encoding, + format: _serialization.PrivateFormat, + encryption_algorithm: _serialization.KeySerializationEncryption, + ) -> bytes: + """ + The serialized bytes of the private key. + """ + + @abc.abstractmethod + def private_bytes_raw(self) -> bytes: + """ + The raw bytes of the private key. + Equivalent to private_bytes(Raw, Raw, NoEncryption()). + """ + + @abc.abstractmethod + def exchange(self, peer_public_key: X448PublicKey) -> bytes: + """ + Performs a key exchange operation using the provided peer's public key. + """ + + @abc.abstractmethod + def __copy__(self) -> X448PrivateKey: + """ + Returns a copy. + """ + + +if hasattr(rust_openssl, "x448"): + X448PrivateKey.register(rust_openssl.x448.X448PrivateKey) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..10c15d0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, + AEADDecryptionContext, + AEADEncryptionContext, + Cipher, + CipherContext, +) + +__all__ = [ + "AEADCipherContext", + "AEADDecryptionContext", + "AEADEncryptionContext", + "BlockCipherAlgorithm", + "Cipher", + "CipherAlgorithm", + "CipherContext", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..37213a9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc new file mode 100644 index 0000000..11b5ded Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc new file mode 100644 index 0000000..42e660d Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..822bc4f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc new file mode 100644 index 0000000..b8b7bf9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py new file mode 100644 index 0000000..c8a582d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = [ + "AESCCM", + "AESGCM", + "AESGCMSIV", + "AESOCB3", + "AESSIV", + "ChaCha20Poly1305", +] + +AESGCM = rust_openssl.aead.AESGCM +ChaCha20Poly1305 = rust_openssl.aead.ChaCha20Poly1305 +AESCCM = rust_openssl.aead.AESCCM +AESSIV = rust_openssl.aead.AESSIV +AESOCB3 = rust_openssl.aead.AESOCB3 +AESGCMSIV = rust_openssl.aead.AESGCMSIV diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 0000000..1e402c7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,136 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography import utils +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + ARC4 as ARC4, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + CAST5 as CAST5, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + IDEA as IDEA, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + SEED as SEED, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + Blowfish as Blowfish, +) +from cryptography.hazmat.decrepit.ciphers.algorithms import ( + TripleDES as TripleDES, +) +from cryptography.hazmat.primitives._cipheralgorithm import _verify_key_size +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) + + +class AES(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + # 512 added to support AES-256-XTS, which uses 512-bit keys + key_sizes = frozenset([128, 192, 256, 512]) + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class AES128(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + key_sizes = frozenset([128]) + key_size = 128 + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + +class AES256(BlockCipherAlgorithm): + name = "AES" + block_size = 128 + key_sizes = frozenset([256]) + key_size = 256 + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + +class Camellia(BlockCipherAlgorithm): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key: utils.Buffer): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +utils.deprecated( + ARC4, + __name__, + "ARC4 has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and " + "will be removed from " + "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", + utils.DeprecatedIn43, + name="ARC4", +) + + +utils.deprecated( + TripleDES, + __name__, + "TripleDES has been moved to " + "cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and " + "will be removed from " + "cryptography.hazmat.primitives.ciphers.algorithms in 48.0.0.", + utils.DeprecatedIn43, + name="TripleDES", +) + + +class ChaCha20(CipherAlgorithm): + name = "ChaCha20" + key_sizes = frozenset([256]) + + def __init__(self, key: utils.Buffer, nonce: utils.Buffer): + self.key = _verify_key_size(self, key) + utils._check_byteslike("nonce", nonce) + + if len(nonce) != 16: + raise ValueError("nonce must be 128-bits (16 bytes)") + + self._nonce = nonce + + @property + def nonce(self) -> utils.Buffer: + return self._nonce + + @property + def key_size(self) -> int: + return len(self.key) * 8 + + +class SM4(BlockCipherAlgorithm): + name = "SM4" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key: bytes): + self.key = _verify_key_size(self, key) + + @property + def key_size(self) -> int: + return len(self.key) * 8 diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 0000000..24fceea --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,146 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import typing + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm +from cryptography.hazmat.primitives.ciphers import modes +from cryptography.utils import Buffer + + +class CipherContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: Buffer) -> bytes: + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def update_into(self, data: Buffer, buf: Buffer) -> int: + """ + Processes the provided bytes and writes the resulting data into the + provided buffer. Returns the number of bytes written. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Returns the results of processing the final block as bytes. + """ + + @abc.abstractmethod + def reset_nonce(self, nonce: bytes) -> None: + """ + Resets the nonce for the cipher context to the provided value. + Raises an exception if it does not support reset or if the + provided nonce does not have a valid length. + """ + + +class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def authenticate_additional_data(self, data: Buffer) -> None: + """ + Authenticates the provided bytes. + """ + + +class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @abc.abstractmethod + def finalize_with_tag(self, tag: bytes) -> bytes: + """ + Returns the results of processing the final block as bytes and allows + delayed passing of the authentication tag. + """ + + +class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tag(self) -> bytes: + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +Mode = typing.TypeVar( + "Mode", bound=typing.Optional[modes.Mode], covariant=True +) + + +class Cipher(typing.Generic[Mode]): + def __init__( + self, + algorithm: CipherAlgorithm, + mode: Mode, + backend: typing.Any = None, + ) -> None: + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") + + if mode is not None: + # mypy needs this assert to narrow the type from our generic + # type. Maybe it won't some time in the future. + assert isinstance(mode, modes.Mode) + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + + @typing.overload + def encryptor( + self: Cipher[modes.ModeWithAuthenticationTag], + ) -> AEADEncryptionContext: ... + + @typing.overload + def encryptor( + self: _CIPHER_TYPE, + ) -> CipherContext: ... + + def encryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting." + ) + + return rust_openssl.ciphers.create_encryption_ctx( + self.algorithm, self.mode + ) + + @typing.overload + def decryptor( + self: Cipher[modes.ModeWithAuthenticationTag], + ) -> AEADDecryptionContext: ... + + @typing.overload + def decryptor( + self: _CIPHER_TYPE, + ) -> CipherContext: ... + + def decryptor(self): + return rust_openssl.ciphers.create_decryption_ctx( + self.algorithm, self.mode + ) + + +_CIPHER_TYPE = Cipher[ + typing.Union[ + modes.ModeWithNonce, + modes.ModeWithTweak, + modes.ECB, + modes.ModeWithInitializationVector, + None, + ] +] + +CipherContext.register(rust_openssl.ciphers.CipherContext) +AEADEncryptionContext.register(rust_openssl.ciphers.AEADEncryptionContext) +AEADDecryptionContext.register(rust_openssl.ciphers.AEADDecryptionContext) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..36c555c --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,268 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives._cipheralgorithm import ( + BlockCipherAlgorithm, + CipherAlgorithm, +) +from cryptography.hazmat.primitives.ciphers import algorithms + + +class Mode(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def initialization_vector(self) -> utils.Buffer: + """ + The value of the initialization vector for this mode as bytes. + """ + + +class ModeWithTweak(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tweak(self) -> utils.Buffer: + """ + The value of the tweak for this mode as bytes. + """ + + +class ModeWithNonce(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def nonce(self) -> utils.Buffer: + """ + The value of the nonce for this mode as bytes. + """ + + +class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def tag(self) -> bytes | None: + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None: + if algorithm.key_size > 256 and algorithm.name == "AES": + raise ValueError( + "Only 128, 192, and 256 bit keys are allowed for this AES mode" + ) + + +def _check_iv_length( + self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm +) -> None: + iv_len = len(self.initialization_vector) + if iv_len * 8 != algorithm.block_size: + raise ValueError(f"Invalid IV size ({iv_len}) for {self.name}.") + + +def _check_nonce_length( + nonce: utils.Buffer, name: str, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{name} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + if len(nonce) * 8 != algorithm.block_size: + raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.") + + +def _check_iv_and_key_length( + self: ModeWithInitializationVector, algorithm: CipherAlgorithm +) -> None: + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + f"{self} requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + _check_aes_key_length(self, algorithm) + _check_iv_length(self, algorithm) + + +class CBC(ModeWithInitializationVector): + name = "CBC" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class XTS(ModeWithTweak): + name = "XTS" + + def __init__(self, tweak: utils.Buffer): + utils._check_byteslike("tweak", tweak) + + if len(tweak) != 16: + raise ValueError("tweak must be 128-bits (16 bytes)") + + self._tweak = tweak + + @property + def tweak(self) -> utils.Buffer: + return self._tweak + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)): + raise TypeError( + "The AES128 and AES256 classes do not support XTS, please use " + "the standard AES class instead." + ) + + if algorithm.key_size not in (256, 512): + raise ValueError( + "The XTS specification requires a 256-bit key for AES-128-XTS" + " and 512-bit key for AES-256-XTS" + ) + + +class ECB(Mode): + name = "ECB" + + validate_for_algorithm = _check_aes_key_length + + +class OFB(ModeWithInitializationVector): + name = "OFB" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB(ModeWithInitializationVector): + name = "CFB" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CFB8(ModeWithInitializationVector): + name = "CFB8" + + def __init__(self, initialization_vector: utils.Buffer): + utils._check_byteslike("initialization_vector", initialization_vector) + self._initialization_vector = initialization_vector + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + validate_for_algorithm = _check_iv_and_key_length + + +class CTR(ModeWithNonce): + name = "CTR" + + def __init__(self, nonce: utils.Buffer): + utils._check_byteslike("nonce", nonce) + self._nonce = nonce + + @property + def nonce(self) -> utils.Buffer: + return self._nonce + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + _check_nonce_length(self.nonce, self.name, algorithm) + + +class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8 + _MAX_AAD_BYTES = (2**64) // 8 + + def __init__( + self, + initialization_vector: utils.Buffer, + tag: bytes | None = None, + min_tag_length: int = 16, + ): + # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive + # This is a sane limit anyway so we'll enforce it here. + utils._check_byteslike("initialization_vector", initialization_vector) + if len(initialization_vector) < 8 or len(initialization_vector) > 128: + raise ValueError( + "initialization_vector must be between 8 and 128 bytes (64 " + "and 1024 bits)." + ) + self._initialization_vector = initialization_vector + if tag is not None: + utils._check_bytes("tag", tag) + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if len(tag) < min_tag_length: + raise ValueError( + f"Authentication tag must be {min_tag_length} bytes or " + "longer." + ) + self._tag = tag + self._min_tag_length = min_tag_length + + @property + def tag(self) -> bytes | None: + return self._tag + + @property + def initialization_vector(self) -> utils.Buffer: + return self._initialization_vector + + def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: + _check_aes_key_length(self, algorithm) + if not isinstance(algorithm, BlockCipherAlgorithm): + raise UnsupportedAlgorithm( + "GCM requires a block cipher algorithm", + _Reasons.UNSUPPORTED_CIPHER, + ) + block_size_bytes = algorithm.block_size // 8 + if self._tag is not None and len(self._tag) > block_size_bytes: + raise ValueError( + f"Authentication tag cannot be more than {block_size_bytes} " + "bytes." + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 0000000..2c67ce2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = ["CMAC"] +CMAC = rust_openssl.cmac.CMAC diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..3975c71 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import hmac + + +def bytes_eq(a: bytes, b: bytes) -> bool: + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 0000000..4b55ec3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,246 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.utils import Buffer + +__all__ = [ + "MD5", + "SHA1", + "SHA3_224", + "SHA3_256", + "SHA3_384", + "SHA3_512", + "SHA224", + "SHA256", + "SHA384", + "SHA512", + "SHA512_224", + "SHA512_256", + "SHAKE128", + "SHAKE256", + "SM3", + "BLAKE2b", + "BLAKE2s", + "ExtendableOutputFunction", + "Hash", + "HashAlgorithm", + "HashContext", + "XOFHash", +] + + +class HashAlgorithm(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def name(self) -> str: + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @property + @abc.abstractmethod + def digest_size(self) -> int: + """ + The size of the resulting digest in bytes. + """ + + @property + @abc.abstractmethod + def block_size(self) -> int | None: + """ + The internal block size of the hash function, or None if the hash + function does not use blocks internally (e.g. SHA3). + """ + + +class HashContext(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def algorithm(self) -> HashAlgorithm: + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data: Buffer) -> None: + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self) -> HashContext: + """ + Return a HashContext that is a copy of the current context. + """ + + +Hash = rust_openssl.hashes.Hash +HashContext.register(Hash) + +XOFHash = rust_openssl.hashes.XOFHash + + +class ExtendableOutputFunction(metaclass=abc.ABCMeta): + """ + An interface for extendable output functions. + """ + + +class SHA1(HashAlgorithm): + name = "sha1" + digest_size = 20 + block_size = 64 + + +class SHA512_224(HashAlgorithm): # noqa: N801 + name = "sha512-224" + digest_size = 28 + block_size = 128 + + +class SHA512_256(HashAlgorithm): # noqa: N801 + name = "sha512-256" + digest_size = 32 + block_size = 128 + + +class SHA224(HashAlgorithm): + name = "sha224" + digest_size = 28 + block_size = 64 + + +class SHA256(HashAlgorithm): + name = "sha256" + digest_size = 32 + block_size = 64 + + +class SHA384(HashAlgorithm): + name = "sha384" + digest_size = 48 + block_size = 128 + + +class SHA512(HashAlgorithm): + name = "sha512" + digest_size = 64 + block_size = 128 + + +class SHA3_224(HashAlgorithm): # noqa: N801 + name = "sha3-224" + digest_size = 28 + block_size = None + + +class SHA3_256(HashAlgorithm): # noqa: N801 + name = "sha3-256" + digest_size = 32 + block_size = None + + +class SHA3_384(HashAlgorithm): # noqa: N801 + name = "sha3-384" + digest_size = 48 + block_size = None + + +class SHA3_512(HashAlgorithm): # noqa: N801 + name = "sha3-512" + digest_size = 64 + block_size = None + + +class SHAKE128(HashAlgorithm, ExtendableOutputFunction): + name = "shake128" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SHAKE256(HashAlgorithm, ExtendableOutputFunction): + name = "shake256" + block_size = None + + def __init__(self, digest_size: int): + if not isinstance(digest_size, int): + raise TypeError("digest_size must be an integer") + + if digest_size < 1: + raise ValueError("digest_size must be a positive integer") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class MD5(HashAlgorithm): + name = "md5" + digest_size = 16 + block_size = 64 + + +class BLAKE2b(HashAlgorithm): + name = "blake2b" + _max_digest_size = 64 + _min_digest_size = 1 + block_size = 128 + + def __init__(self, digest_size: int): + if digest_size != 64: + raise ValueError("Digest size must be 64") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class BLAKE2s(HashAlgorithm): + name = "blake2s" + block_size = 64 + _max_digest_size = 32 + _min_digest_size = 1 + + def __init__(self, digest_size: int): + if digest_size != 32: + raise ValueError("Digest size must be 32") + + self._digest_size = digest_size + + @property + def digest_size(self) -> int: + return self._digest_size + + +class SM3(HashAlgorithm): + name = "sm3" + digest_size = 32 + block_size = 64 diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 0000000..a9442d5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import hashes + +__all__ = ["HMAC"] + +HMAC = rust_openssl.hmac.HMAC +hashes.HashContext.register(HMAC) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..79bb459 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + + +class KeyDerivationFunction(metaclass=abc.ABCMeta): + @abc.abstractmethod + def derive(self, key_material: bytes) -> bytes: + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material: bytes, expected_key: bytes) -> None: + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..185279f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc new file mode 100644 index 0000000..9c6246f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc new file mode 100644 index 0000000..7c827f6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc new file mode 100644 index 0000000..decda41 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc new file mode 100644 index 0000000..8830968 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc new file mode 100644 index 0000000..fd40dda Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc new file mode 100644 index 0000000..8d9a59c Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc new file mode 100644 index 0000000..7dd5046 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/argon2.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/argon2.py new file mode 100644 index 0000000..405fc8d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/argon2.py @@ -0,0 +1,13 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +Argon2id = rust_openssl.kdf.Argon2id +KeyDerivationFunction.register(Argon2id) + +__all__ = ["Argon2id"] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 0000000..1b92841 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Callable + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +def _common_args_checks( + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: bytes | None, +) -> None: + max_length = algorithm.digest_size * (2**32 - 1) + if length > max_length: + raise ValueError(f"Cannot derive keys larger than {max_length} bits.") + if otherinfo is not None: + utils._check_bytes("otherinfo", otherinfo) + + +def _concatkdf_derive( + key_material: utils.Buffer, + length: int, + auxfn: Callable[[], hashes.HashContext], + otherinfo: bytes, +) -> bytes: + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while length > outlen: + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +class ConcatKDFHash(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + otherinfo: bytes | None, + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + self._used = False + + def _hash(self) -> hashes.Hash: + return hashes.Hash(self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hash, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class ConcatKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes | None, + otherinfo: bytes | None, + backend: typing.Any = None, + ): + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" + + if algorithm.block_size is None: + raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF") + + if salt is None: + salt = b"\x00" * algorithm.block_size + else: + utils._check_bytes("salt", salt) + + self._salt = salt + + self._used = False + + def _hmac(self) -> hmac.HMAC: + return hmac.HMAC(self._salt, self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive( + key_material, self._length, self._hmac, self._otherinfo + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 0000000..1e162d9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,16 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +HKDF = rust_openssl.kdf.HKDF +HKDFExpand = rust_openssl.kdf.HKDFExpand + +KeyDerivationFunction.register(HKDF) +KeyDerivationFunction.register(HKDFExpand) + +__all__ = ["HKDF", "HKDFExpand"] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py new file mode 100644 index 0000000..5b47137 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py @@ -0,0 +1,303 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Callable + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.primitives import ( + ciphers, + cmac, + constant_time, + hashes, + hmac, +) +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class Mode(utils.Enum): + CounterMode = "ctr" + + +class CounterLocation(utils.Enum): + BeforeFixed = "before_fixed" + AfterFixed = "after_fixed" + MiddleFixed = "middle_fixed" + + +class _KBKDFDeriver: + def __init__( + self, + prf: Callable, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + break_location: int | None, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + ): + assert callable(prf) + + if not isinstance(mode, Mode): + raise TypeError("mode must be of type Mode") + + if not isinstance(location, CounterLocation): + raise TypeError("location must be of type CounterLocation") + + if break_location is None and location is CounterLocation.MiddleFixed: + raise ValueError("Please specify a break_location") + + if ( + break_location is not None + and location != CounterLocation.MiddleFixed + ): + raise ValueError( + "break_location is ignored when location is not" + " CounterLocation.MiddleFixed" + ) + + if break_location is not None and not isinstance(break_location, int): + raise TypeError("break_location must be an integer") + + if break_location is not None and break_location < 0: + raise ValueError("break_location must be a positive integer") + + if (label or context) and fixed: + raise ValueError( + "When supplying fixed data, label and context are ignored." + ) + + if rlen is None or not self._valid_byte_length(rlen): + raise ValueError("rlen must be between 1 and 4") + + if llen is None and fixed is None: + raise ValueError("Please specify an llen") + + if llen is not None and not isinstance(llen, int): + raise TypeError("llen must be an integer") + + if llen == 0: + raise ValueError("llen must be non-zero") + + if label is None: + label = b"" + + if context is None: + context = b"" + + utils._check_bytes("label", label) + utils._check_bytes("context", context) + self._prf = prf + self._mode = mode + self._length = length + self._rlen = rlen + self._llen = llen + self._location = location + self._break_location = break_location + self._label = label + self._context = context + self._used = False + self._fixed_data = fixed + + @staticmethod + def _valid_byte_length(value: int) -> bool: + if not isinstance(value, int): + raise TypeError("value must be of type int") + + value_bin = utils.int_to_bytes(1, value) + return 1 <= len(value_bin) <= 4 + + def derive( + self, key_material: utils.Buffer, prf_output_size: int + ) -> bytes: + if self._used: + raise AlreadyFinalized + + utils._check_byteslike("key_material", key_material) + self._used = True + + # inverse floor division (equivalent to ceiling) + rounds = -(-self._length // prf_output_size) + + output = [b""] + + # For counter mode, the number of iterations shall not be + # larger than 2^r-1, where r <= 32 is the binary length of the counter + # This ensures that the counter values used as an input to the + # PRF will not repeat during a particular call to the KDF function. + r_bin = utils.int_to_bytes(1, self._rlen) + if rounds > pow(2, len(r_bin) * 8) - 1: + raise ValueError("There are too many iterations.") + + fixed = self._generate_fixed_input() + + if self._location == CounterLocation.BeforeFixed: + data_before_ctr = b"" + data_after_ctr = fixed + elif self._location == CounterLocation.AfterFixed: + data_before_ctr = fixed + data_after_ctr = b"" + else: + if isinstance( + self._break_location, int + ) and self._break_location > len(fixed): + raise ValueError("break_location offset > len(fixed)") + data_before_ctr = fixed[: self._break_location] + data_after_ctr = fixed[self._break_location :] + + for i in range(1, rounds + 1): + h = self._prf(key_material) + + counter = utils.int_to_bytes(i, self._rlen) + input_data = data_before_ctr + counter + data_after_ctr + + h.update(input_data) + + output.append(h.finalize()) + + return b"".join(output)[: self._length] + + def _generate_fixed_input(self) -> bytes: + if self._fixed_data and isinstance(self._fixed_data, bytes): + return self._fixed_data + + l_val = utils.int_to_bytes(self._length * 8, self._llen) + + return b"".join([self._label, b"\x00", self._context, l_val]) + + +class KBKDFHMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + backend: typing.Any = None, + *, + break_location: int | None = None, + ): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hash algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported hmac algorithm.", + _Reasons.UNSUPPORTED_HASH, + ) + + self._algorithm = algorithm + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + break_location, + label, + context, + fixed, + ) + + def _prf(self, key_material: bytes) -> hmac.HMAC: + return hmac.HMAC(key_material, self._algorithm) + + def derive(self, key_material: utils.Buffer) -> bytes: + return self._deriver.derive(key_material, self._algorithm.digest_size) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +class KBKDFCMAC(KeyDerivationFunction): + def __init__( + self, + algorithm, + mode: Mode, + length: int, + rlen: int, + llen: int | None, + location: CounterLocation, + label: bytes | None, + context: bytes | None, + fixed: bytes | None, + backend: typing.Any = None, + *, + break_location: int | None = None, + ): + if not issubclass( + algorithm, ciphers.BlockCipherAlgorithm + ) or not issubclass(algorithm, ciphers.CipherAlgorithm): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + self._algorithm = algorithm + self._cipher: ciphers.BlockCipherAlgorithm | None = None + + self._deriver = _KBKDFDeriver( + self._prf, + mode, + length, + rlen, + llen, + location, + break_location, + label, + context, + fixed, + ) + + def _prf(self, _: bytes) -> cmac.CMAC: + assert self._cipher is not None + + return cmac.CMAC(self._cipher) + + def derive(self, key_material: utils.Buffer) -> bytes: + self._cipher = self._algorithm(key_material) + + assert self._cipher is not None + + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.cmac_algorithm_supported(self._cipher): + raise UnsupportedAlgorithm( + "Algorithm supplied is not a supported cipher algorithm.", + _Reasons.UNSUPPORTED_CIPHER, + ) + + return self._deriver.derive(key_material, self._cipher.block_size // 8) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 0000000..d539f13 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,62 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, + InvalidKey, + UnsupportedAlgorithm, + _Reasons, +) +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +class PBKDF2HMAC(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + salt: bytes, + iterations: int, + backend: typing.Any = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + f"{algorithm.name} is not supported for PBKDF2.", + _Reasons.UNSUPPORTED_HASH, + ) + self._used = False + self._algorithm = algorithm + self._length = length + utils._check_bytes("salt", salt) + self._salt = salt + self._iterations = iterations + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once.") + self._used = True + + return rust_openssl.kdf.derive_pbkdf2_hmac( + key_material, + self._algorithm, + self._salt, + self._iterations, + self._length, + ) + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py new file mode 100644 index 0000000..f791cee --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py @@ -0,0 +1,19 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import sys + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + +# This is used by the scrypt tests to skip tests that require more memory +# than the MEM_LIMIT +_MEM_LIMIT = sys.maxsize // 2 + +Scrypt = rust_openssl.kdf.Scrypt +KeyDerivationFunction.register(Scrypt) + +__all__ = ["Scrypt"] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 0000000..63870cd --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized, InvalidKey +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n: int) -> bytes: + return n.to_bytes(length=4, byteorder="big") + + +class X963KDF(KeyDerivationFunction): + def __init__( + self, + algorithm: hashes.HashAlgorithm, + length: int, + sharedinfo: bytes | None, + backend: typing.Any = None, + ): + max_len = algorithm.digest_size * (2**32 - 1) + if length > max_len: + raise ValueError(f"Cannot derive keys larger than {max_len} bits.") + if sharedinfo is not None: + utils._check_bytes("sharedinfo", sharedinfo) + + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + self._used = False + + def derive(self, key_material: utils.Buffer) -> bytes: + if self._used: + raise AlreadyFinalized + self._used = True + utils._check_byteslike("key_material", key_material) + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[: self._length] + + def verify(self, key_material: bytes, expected_key: bytes) -> None: + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 0000000..b93d87d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,177 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def _wrap_core( + wrapping_key: bytes, + a: bytes, + r: list[bytes], +) -> bytes: + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + a = ( + int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_wrap( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, a, r) + + +def _unwrap_core( + wrapping_key: bytes, + a: bytes, + r: list[bytes], +) -> tuple[bytes, list[bytes]]: + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + atr = ( + int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1) + ).to_bytes(length=8, byteorder="big") + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + return a, r + + +def aes_key_wrap_with_padding( + wrapping_key: bytes, + key_to_wrap: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\x59\x59\xa6" + len(key_to_wrap).to_bytes( + length=4, byteorder="big" + ) + # pad the key to wrap if necessary + pad = (8 - (len(key_to_wrap) % 8)) % 8 + key_to_wrap = key_to_wrap + b"\x00" * pad + if len(key_to_wrap) == 8: + # RFC 5649 - 4.1 - exactly 8 octets after padding + encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() + b = encryptor.update(aiv + key_to_wrap) + assert encryptor.finalize() == b"" + return b + else: + r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] + return _wrap_core(wrapping_key, aiv, r) + + +def aes_key_unwrap_with_padding( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 16: + raise InvalidUnwrap("Must be at least 16 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(wrapped_key) == 16: + # RFC 5649 - 4.2 - exactly two 64-bit blocks + decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() + out = decryptor.update(wrapped_key) + assert decryptor.finalize() == b"" + a = out[:8] + data = out[8:] + n = 1 + else: + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + encrypted_aiv = r.pop(0) + n = len(r) + a, r = _unwrap_core(wrapping_key, encrypted_aiv, r) + data = b"".join(r) + + # 1) Check that MSB(32,A) = A65959A6. + # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let + # MLI = LSB(32,A). + # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of + # the output data are zero. + mli = int.from_bytes(a[4:], byteorder="big") + b = (8 * n) - mli + if ( + not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") + or not 8 * (n - 1) < mli <= 8 * n + or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b)) + ): + raise InvalidUnwrap() + + if b == 0: + return data + else: + return data[:-b] + + +def aes_key_unwrap( + wrapping_key: bytes, + wrapped_key: bytes, + backend: typing.Any = None, +) -> bytes: + if len(wrapped_key) < 24: + raise InvalidUnwrap("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + a, r = _unwrap_core(wrapping_key, a, r) + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py new file mode 100644 index 0000000..f9cd1f1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc + +from cryptography import utils +from cryptography.hazmat.bindings._rust import ( + ANSIX923PaddingContext, + ANSIX923UnpaddingContext, + PKCS7PaddingContext, + PKCS7UnpaddingContext, +) + + +class PaddingContext(metaclass=abc.ABCMeta): + @abc.abstractmethod + def update(self, data: utils.Buffer) -> bytes: + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self) -> bytes: + """ + Finalize the padding, returns bytes. + """ + + +def _byte_padding_check(block_size: int) -> None: + if not (0 <= block_size <= 2040): + raise ValueError("block_size must be in range(0, 2041).") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8.") + + +class PKCS7: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return PKCS7PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return PKCS7UnpaddingContext(self.block_size) + + +PaddingContext.register(PKCS7PaddingContext) +PaddingContext.register(PKCS7UnpaddingContext) + + +class ANSIX923: + def __init__(self, block_size: int): + _byte_padding_check(block_size) + self.block_size = block_size + + def padder(self) -> PaddingContext: + return ANSIX923PaddingContext(self.block_size) + + def unpadder(self) -> PaddingContext: + return ANSIX923UnpaddingContext(self.block_size) + + +PaddingContext.register(ANSIX923PaddingContext) +PaddingContext.register(ANSIX923UnpaddingContext) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py new file mode 100644 index 0000000..7f5a77a --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py @@ -0,0 +1,11 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +__all__ = ["Poly1305"] + +Poly1305 = rust_openssl.poly1305.Poly1305 diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py new file mode 100644 index 0000000..62283cc --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py @@ -0,0 +1,65 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat.primitives._serialization import ( + BestAvailableEncryption, + Encoding, + KeySerializationEncryption, + NoEncryption, + ParameterFormat, + PrivateFormat, + PublicFormat, + _KeySerializationEncryption, +) +from cryptography.hazmat.primitives.serialization.base import ( + load_der_parameters, + load_der_private_key, + load_der_public_key, + load_pem_parameters, + load_pem_private_key, + load_pem_public_key, +) +from cryptography.hazmat.primitives.serialization.ssh import ( + SSHCertificate, + SSHCertificateBuilder, + SSHCertificateType, + SSHCertPrivateKeyTypes, + SSHCertPublicKeyTypes, + SSHPrivateKeyTypes, + SSHPublicKeyTypes, + load_ssh_private_key, + load_ssh_public_identity, + load_ssh_public_key, + ssh_key_fingerprint, +) + +__all__ = [ + "BestAvailableEncryption", + "Encoding", + "KeySerializationEncryption", + "NoEncryption", + "ParameterFormat", + "PrivateFormat", + "PublicFormat", + "SSHCertPrivateKeyTypes", + "SSHCertPublicKeyTypes", + "SSHCertificate", + "SSHCertificateBuilder", + "SSHCertificateType", + "SSHPrivateKeyTypes", + "SSHPublicKeyTypes", + "_KeySerializationEncryption", + "load_der_parameters", + "load_der_private_key", + "load_der_public_key", + "load_pem_parameters", + "load_pem_private_key", + "load_pem_public_key", + "load_ssh_private_key", + "load_ssh_public_identity", + "load_ssh_public_key", + "ssh_key_fingerprint", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..15312f6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..f6bd2bb Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc new file mode 100644 index 0000000..5011fc5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc new file mode 100644 index 0000000..dd632d6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc new file mode 100644 index 0000000..7107c81 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py new file mode 100644 index 0000000..e7c998b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from cryptography.hazmat.bindings._rust import openssl as rust_openssl + +load_pem_private_key = rust_openssl.keys.load_pem_private_key +load_der_private_key = rust_openssl.keys.load_der_private_key + +load_pem_public_key = rust_openssl.keys.load_pem_public_key +load_der_public_key = rust_openssl.keys.load_der_public_key + +load_pem_parameters = rust_openssl.dh.from_pem_parameters +load_der_parameters = rust_openssl.dh.from_der_parameters diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py new file mode 100644 index 0000000..58884ff --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py @@ -0,0 +1,176 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing +from collections.abc import Iterable + +from cryptography import x509 +from cryptography.hazmat.bindings._rust import pkcs12 as rust_pkcs12 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives._serialization import PBES as PBES +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + rsa, +) +from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes + +__all__ = [ + "PBES", + "PKCS12Certificate", + "PKCS12KeyAndCertificates", + "PKCS12PrivateKeyTypes", + "load_key_and_certificates", + "load_pkcs12", + "serialize_java_truststore", + "serialize_key_and_certificates", +] + +PKCS12PrivateKeyTypes = typing.Union[ + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, +] + + +PKCS12Certificate = rust_pkcs12.PKCS12Certificate + + +class PKCS12KeyAndCertificates: + def __init__( + self, + key: PrivateKeyTypes | None, + cert: PKCS12Certificate | None, + additional_certs: list[PKCS12Certificate], + ): + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + if cert is not None and not isinstance(cert, PKCS12Certificate): + raise TypeError("cert must be a PKCS12Certificate object or None") + if not all( + isinstance(add_cert, PKCS12Certificate) + for add_cert in additional_certs + ): + raise TypeError( + "all values in additional_certs must be PKCS12Certificate" + " objects" + ) + self._key = key + self._cert = cert + self._additional_certs = additional_certs + + @property + def key(self) -> PrivateKeyTypes | None: + return self._key + + @property + def cert(self) -> PKCS12Certificate | None: + return self._cert + + @property + def additional_certs(self) -> list[PKCS12Certificate]: + return self._additional_certs + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PKCS12KeyAndCertificates): + return NotImplemented + + return ( + self.key == other.key + and self.cert == other.cert + and self.additional_certs == other.additional_certs + ) + + def __hash__(self) -> int: + return hash((self.key, self.cert, tuple(self.additional_certs))) + + def __repr__(self) -> str: + fmt = ( + "" + ) + return fmt.format(self.key, self.cert, self.additional_certs) + + +load_key_and_certificates = rust_pkcs12.load_key_and_certificates +load_pkcs12 = rust_pkcs12.load_pkcs12 + + +_PKCS12CATypes = typing.Union[ + x509.Certificate, + PKCS12Certificate, +] + + +def serialize_java_truststore( + certs: Iterable[PKCS12Certificate], + encryption_algorithm: serialization.KeySerializationEncryption, +) -> bytes: + if not certs: + raise ValueError("You must supply at least one cert") + + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Key encryption algorithm must be a " + "KeySerializationEncryption instance" + ) + + return rust_pkcs12.serialize_java_truststore(certs, encryption_algorithm) + + +def serialize_key_and_certificates( + name: bytes | None, + key: PKCS12PrivateKeyTypes | None, + cert: x509.Certificate | None, + cas: Iterable[_PKCS12CATypes] | None, + encryption_algorithm: serialization.KeySerializationEncryption, +) -> bytes: + if key is not None and not isinstance( + key, + ( + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ec.EllipticCurvePrivateKey, + ed25519.Ed25519PrivateKey, + ed448.Ed448PrivateKey, + ), + ): + raise TypeError( + "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" + " private key, or None." + ) + + if not isinstance( + encryption_algorithm, serialization.KeySerializationEncryption + ): + raise TypeError( + "Key encryption algorithm must be a " + "KeySerializationEncryption instance" + ) + + if key is None and cert is None and not cas: + raise ValueError("You must supply at least one of key, cert, or cas") + + return rust_pkcs12.serialize_key_and_certificates( + name, key, cert, cas, encryption_algorithm + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py new file mode 100644 index 0000000..456dc5b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py @@ -0,0 +1,411 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import email.base64mime +import email.generator +import email.message +import email.policy +import io +import typing +from collections.abc import Iterable + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa +from cryptography.hazmat.primitives.ciphers import ( + algorithms, +) +from cryptography.utils import _check_byteslike + +load_pem_pkcs7_certificates = rust_pkcs7.load_pem_pkcs7_certificates + +load_der_pkcs7_certificates = rust_pkcs7.load_der_pkcs7_certificates + +serialize_certificates = rust_pkcs7.serialize_certificates + +PKCS7HashTypes = typing.Union[ + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +] + +PKCS7PrivateKeyTypes = typing.Union[ + rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey +] + +ContentEncryptionAlgorithm = typing.Union[ + typing.Type[algorithms.AES128], typing.Type[algorithms.AES256] +] + + +class PKCS7Options(utils.Enum): + Text = "Add text/plain MIME type" + Binary = "Don't translate input data into canonical MIME format" + DetachedSignature = "Don't embed data in the PKCS7 structure" + NoCapabilities = "Don't embed SMIME capabilities" + NoAttributes = "Don't embed authenticatedAttributes" + NoCerts = "Don't embed signer certificate" + + +class PKCS7SignatureBuilder: + def __init__( + self, + data: utils.Buffer | None = None, + signers: list[ + tuple[ + x509.Certificate, + PKCS7PrivateKeyTypes, + PKCS7HashTypes, + padding.PSS | padding.PKCS1v15 | None, + ] + ] = [], + additional_certs: list[x509.Certificate] = [], + ): + self._data = data + self._signers = signers + self._additional_certs = additional_certs + + def set_data(self, data: utils.Buffer) -> PKCS7SignatureBuilder: + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7SignatureBuilder(data, self._signers) + + def add_signer( + self, + certificate: x509.Certificate, + private_key: PKCS7PrivateKeyTypes, + hash_algorithm: PKCS7HashTypes, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ) -> PKCS7SignatureBuilder: + if not isinstance( + hash_algorithm, + ( + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + ), + ): + raise TypeError( + "hash_algorithm must be one of hashes.SHA224, " + "SHA256, SHA384, or SHA512" + ) + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance( + private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey) + ): + raise TypeError("Only RSA & EC keys are supported at this time.") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + return PKCS7SignatureBuilder( + self._data, + [ + *self._signers, + (certificate, private_key, hash_algorithm, rsa_padding), + ], + ) + + def add_certificate( + self, certificate: x509.Certificate + ) -> PKCS7SignatureBuilder: + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + return PKCS7SignatureBuilder( + self._data, self._signers, [*self._additional_certs, certificate] + ) + + def sign( + self, + encoding: serialization.Encoding, + options: Iterable[PKCS7Options], + backend: typing.Any = None, + ) -> bytes: + if len(self._signers) == 0: + raise ValueError("Must have at least one signer") + if self._data is None: + raise ValueError("You must add data to sign") + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Text is a meaningless option unless it is accompanied by + # DetachedSignature + if ( + PKCS7Options.Text in options + and PKCS7Options.DetachedSignature not in options + ): + raise ValueError( + "When passing the Text option you must also pass " + "DetachedSignature" + ) + + if PKCS7Options.Text in options and encoding in ( + serialization.Encoding.DER, + serialization.Encoding.PEM, + ): + raise ValueError( + "The Text option is only available for SMIME serialization" + ) + + # No attributes implies no capabilities so we'll error if you try to + # pass both. + if ( + PKCS7Options.NoAttributes in options + and PKCS7Options.NoCapabilities in options + ): + raise ValueError( + "NoAttributes is a superset of NoCapabilities. Do not pass " + "both values." + ) + + return rust_pkcs7.sign_and_serialize(self, encoding, options) + + +class PKCS7EnvelopeBuilder: + def __init__( + self, + *, + _data: bytes | None = None, + _recipients: list[x509.Certificate] | None = None, + _content_encryption_algorithm: ContentEncryptionAlgorithm + | None = None, + ): + from cryptography.hazmat.backends.openssl.backend import ( + backend as ossl, + ) + + if not ossl.rsa_encryption_supported(padding=padding.PKCS1v15()): + raise UnsupportedAlgorithm( + "RSA with PKCS1 v1.5 padding is not supported by this version" + " of OpenSSL.", + _Reasons.UNSUPPORTED_PADDING, + ) + self._data = _data + self._recipients = _recipients if _recipients is not None else [] + self._content_encryption_algorithm = _content_encryption_algorithm + + def set_data(self, data: bytes) -> PKCS7EnvelopeBuilder: + _check_byteslike("data", data) + if self._data is not None: + raise ValueError("data may only be set once") + + return PKCS7EnvelopeBuilder( + _data=data, + _recipients=self._recipients, + _content_encryption_algorithm=self._content_encryption_algorithm, + ) + + def add_recipient( + self, + certificate: x509.Certificate, + ) -> PKCS7EnvelopeBuilder: + if not isinstance(certificate, x509.Certificate): + raise TypeError("certificate must be a x509.Certificate") + + if not isinstance(certificate.public_key(), rsa.RSAPublicKey): + raise TypeError("Only RSA keys are supported at this time.") + + return PKCS7EnvelopeBuilder( + _data=self._data, + _recipients=[ + *self._recipients, + certificate, + ], + _content_encryption_algorithm=self._content_encryption_algorithm, + ) + + def set_content_encryption_algorithm( + self, content_encryption_algorithm: ContentEncryptionAlgorithm + ) -> PKCS7EnvelopeBuilder: + if self._content_encryption_algorithm is not None: + raise ValueError("Content encryption algo may only be set once") + if content_encryption_algorithm not in { + algorithms.AES128, + algorithms.AES256, + }: + raise TypeError("Only AES128 and AES256 are supported") + + return PKCS7EnvelopeBuilder( + _data=self._data, + _recipients=self._recipients, + _content_encryption_algorithm=content_encryption_algorithm, + ) + + def encrypt( + self, + encoding: serialization.Encoding, + options: Iterable[PKCS7Options], + ) -> bytes: + if len(self._recipients) == 0: + raise ValueError("Must have at least one recipient") + if self._data is None: + raise ValueError("You must add data to encrypt") + + # The default content encryption algorithm is AES-128, which the S/MIME + # v3.2 RFC specifies as MUST support (https://datatracker.ietf.org/doc/html/rfc5751#section-2.7) + content_encryption_algorithm = ( + self._content_encryption_algorithm or algorithms.AES128 + ) + + options = list(options) + if not all(isinstance(x, PKCS7Options) for x in options): + raise ValueError("options must be from the PKCS7Options enum") + if encoding not in ( + serialization.Encoding.PEM, + serialization.Encoding.DER, + serialization.Encoding.SMIME, + ): + raise ValueError( + "Must be PEM, DER, or SMIME from the Encoding enum" + ) + + # Only allow options that make sense for encryption + if any( + opt not in [PKCS7Options.Text, PKCS7Options.Binary] + for opt in options + ): + raise ValueError( + "Only the following options are supported for encryption: " + "Text, Binary" + ) + elif PKCS7Options.Text in options and PKCS7Options.Binary in options: + # OpenSSL accepts both options at the same time, but ignores Text. + # We fail defensively to avoid unexpected outputs. + raise ValueError( + "Cannot use Binary and Text options at the same time" + ) + + return rust_pkcs7.encrypt_and_serialize( + self, content_encryption_algorithm, encoding, options + ) + + +pkcs7_decrypt_der = rust_pkcs7.decrypt_der +pkcs7_decrypt_pem = rust_pkcs7.decrypt_pem +pkcs7_decrypt_smime = rust_pkcs7.decrypt_smime + + +def _smime_signed_encode( + data: bytes, signature: bytes, micalg: str, text_mode: bool +) -> bytes: + # This function works pretty hard to replicate what OpenSSL does + # precisely. For good and for ill. + + m = email.message.Message() + m.add_header("MIME-Version", "1.0") + m.add_header( + "Content-Type", + "multipart/signed", + protocol="application/x-pkcs7-signature", + micalg=micalg, + ) + + m.preamble = "This is an S/MIME signed message\n" + + msg_part = OpenSSLMimePart() + msg_part.set_payload(data) + if text_mode: + msg_part.add_header("Content-Type", "text/plain") + m.attach(msg_part) + + sig_part = email.message.MIMEPart() + sig_part.add_header( + "Content-Type", "application/x-pkcs7-signature", name="smime.p7s" + ) + sig_part.add_header("Content-Transfer-Encoding", "base64") + sig_part.add_header( + "Content-Disposition", "attachment", filename="smime.p7s" + ) + sig_part.set_payload( + email.base64mime.body_encode(signature, maxlinelen=65) + ) + del sig_part["MIME-Version"] + m.attach(sig_part) + + fp = io.BytesIO() + g = email.generator.BytesGenerator( + fp, + maxheaderlen=0, + mangle_from_=False, + policy=m.policy.clone(linesep="\r\n"), + ) + g.flatten(m) + return fp.getvalue() + + +def _smime_enveloped_encode(data: bytes) -> bytes: + m = email.message.Message() + m.add_header("MIME-Version", "1.0") + m.add_header("Content-Disposition", "attachment", filename="smime.p7m") + m.add_header( + "Content-Type", + "application/pkcs7-mime", + smime_type="enveloped-data", + name="smime.p7m", + ) + m.add_header("Content-Transfer-Encoding", "base64") + + m.set_payload(email.base64mime.body_encode(data, maxlinelen=65)) + + return m.as_bytes(policy=m.policy.clone(linesep="\n", max_line_length=0)) + + +def _smime_enveloped_decode(data: bytes) -> bytes: + m = email.message_from_bytes(data) + if m.get_content_type() not in { + "application/x-pkcs7-mime", + "application/pkcs7-mime", + }: + raise ValueError("Not an S/MIME enveloped message") + return bytes(m.get_payload(decode=True)) + + +def _smime_remove_text_headers(data: bytes) -> bytes: + m = email.message_from_bytes(data) + # Using get() instead of get_content_type() since it has None as default, + # where the latter has "text/plain". Both methods are case-insensitive. + content_type = m.get("content-type") + if content_type is None: + raise ValueError( + "Decrypted MIME data has no 'Content-Type' header. " + "Please remove the 'Text' option to parse it manually." + ) + if "text/plain" not in content_type: + raise ValueError( + f"Decrypted MIME data content type is '{content_type}', not " + "'text/plain'. Remove the 'Text' option to parse it manually." + ) + return bytes(m.get_payload(decode=True)) + + +class OpenSSLMimePart(email.message.MIMEPart): + # A MIMEPart subclass that replicates OpenSSL's behavior of not including + # a newline if there are no headers. + def _write_headers(self, generator) -> None: + if list(self.raw_items()): + generator._write_headers(self) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py new file mode 100644 index 0000000..cb10cf8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py @@ -0,0 +1,1619 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import binascii +import enum +import os +import re +import typing +import warnings +from base64 import encodebytes as _base64_encode +from dataclasses import dataclass + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed25519, + padding, + rsa, +) +from cryptography.hazmat.primitives.asymmetric import utils as asym_utils +from cryptography.hazmat.primitives.ciphers import ( + AEADDecryptionContext, + Cipher, + algorithms, + modes, +) +from cryptography.hazmat.primitives.serialization import ( + Encoding, + KeySerializationEncryption, + NoEncryption, + PrivateFormat, + PublicFormat, + _KeySerializationEncryption, +) + +try: + from bcrypt import kdf as _bcrypt_kdf + + _bcrypt_supported = True +except ImportError: + _bcrypt_supported = False + + def _bcrypt_kdf( + password: bytes, + salt: bytes, + desired_key_bytes: int, + rounds: int, + ignore_few_rounds: bool = False, + ) -> bytes: + raise UnsupportedAlgorithm("Need bcrypt module") + + +_SSH_ED25519 = b"ssh-ed25519" +_SSH_RSA = b"ssh-rsa" +_SSH_DSA = b"ssh-dss" +_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256" +_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384" +_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521" +_CERT_SUFFIX = b"-cert-v01@openssh.com" + +# U2F application string suffixed pubkey +_SK_SSH_ED25519 = b"sk-ssh-ed25519@openssh.com" +_SK_SSH_ECDSA_NISTP256 = b"sk-ecdsa-sha2-nistp256@openssh.com" + +# These are not key types, only algorithms, so they cannot appear +# as a public key type +_SSH_RSA_SHA256 = b"rsa-sha2-256" +_SSH_RSA_SHA512 = b"rsa-sha2-512" + +_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") +_SK_MAGIC = b"openssh-key-v1\0" +_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----" +_SK_END = b"-----END OPENSSH PRIVATE KEY-----" +_BCRYPT = b"bcrypt" +_NONE = b"none" +_DEFAULT_CIPHER = b"aes256-ctr" +_DEFAULT_ROUNDS = 16 + +# re is only way to work on bytes-like data +_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL) + +# padding for max blocksize +_PADDING = memoryview(bytearray(range(1, 1 + 16))) + + +@dataclass +class _SSHCipher: + alg: type[algorithms.AES] + key_len: int + mode: type[modes.CTR] | type[modes.CBC] | type[modes.GCM] + block_len: int + iv_len: int + tag_len: int | None + is_aead: bool + + +# ciphers that are actually used in key wrapping +_SSH_CIPHERS: dict[bytes, _SSHCipher] = { + b"aes256-ctr": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.CTR, + block_len=16, + iv_len=16, + tag_len=None, + is_aead=False, + ), + b"aes256-cbc": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.CBC, + block_len=16, + iv_len=16, + tag_len=None, + is_aead=False, + ), + b"aes256-gcm@openssh.com": _SSHCipher( + alg=algorithms.AES, + key_len=32, + mode=modes.GCM, + block_len=16, + iv_len=12, + tag_len=16, + is_aead=True, + ), +} + +# map local curve name to key type +_ECDSA_KEY_TYPE = { + "secp256r1": _ECDSA_NISTP256, + "secp384r1": _ECDSA_NISTP384, + "secp521r1": _ECDSA_NISTP521, +} + + +def _get_ssh_key_type(key: SSHPrivateKeyTypes | SSHPublicKeyTypes) -> bytes: + if isinstance(key, ec.EllipticCurvePrivateKey): + key_type = _ecdsa_key_type(key.public_key()) + elif isinstance(key, ec.EllipticCurvePublicKey): + key_type = _ecdsa_key_type(key) + elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): + key_type = _SSH_RSA + elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)): + key_type = _SSH_DSA + elif isinstance( + key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey) + ): + key_type = _SSH_ED25519 + else: + raise ValueError("Unsupported key type") + + return key_type + + +def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes: + """Return SSH key_type and curve_name for private key.""" + curve = public_key.curve + if curve.name not in _ECDSA_KEY_TYPE: + raise ValueError( + f"Unsupported curve for ssh private key: {curve.name!r}" + ) + return _ECDSA_KEY_TYPE[curve.name] + + +def _ssh_pem_encode( + data: utils.Buffer, + prefix: bytes = _SK_START + b"\n", + suffix: bytes = _SK_END + b"\n", +) -> bytes: + return b"".join([prefix, _base64_encode(data), suffix]) + + +def _check_block_size(data: utils.Buffer, block_len: int) -> None: + """Require data to be full blocks""" + if not data or len(data) % block_len != 0: + raise ValueError("Corrupt data: missing padding") + + +def _check_empty(data: utils.Buffer) -> None: + """All data should have been parsed.""" + if data: + raise ValueError("Corrupt data: unparsed data") + + +def _init_cipher( + ciphername: bytes, + password: bytes | None, + salt: bytes, + rounds: int, +) -> Cipher[modes.CBC | modes.CTR | modes.GCM]: + """Generate key + iv and return cipher.""" + if not password: + raise TypeError( + "Key is password-protected, but password was not provided." + ) + + ciph = _SSH_CIPHERS[ciphername] + seed = _bcrypt_kdf( + password, salt, ciph.key_len + ciph.iv_len, rounds, True + ) + return Cipher( + ciph.alg(seed[: ciph.key_len]), + ciph.mode(seed[ciph.key_len :]), + ) + + +def _get_u32(data: memoryview) -> tuple[int, memoryview]: + """Uint32""" + if len(data) < 4: + raise ValueError("Invalid data") + return int.from_bytes(data[:4], byteorder="big"), data[4:] + + +def _get_u64(data: memoryview) -> tuple[int, memoryview]: + """Uint64""" + if len(data) < 8: + raise ValueError("Invalid data") + return int.from_bytes(data[:8], byteorder="big"), data[8:] + + +def _get_sshstr(data: memoryview) -> tuple[memoryview, memoryview]: + """Bytes with u32 length prefix""" + n, data = _get_u32(data) + if n > len(data): + raise ValueError("Invalid data") + return data[:n], data[n:] + + +def _get_mpint(data: memoryview) -> tuple[int, memoryview]: + """Big integer.""" + val, data = _get_sshstr(data) + if val and val[0] > 0x7F: + raise ValueError("Invalid data") + return int.from_bytes(val, "big"), data + + +def _to_mpint(val: int) -> bytes: + """Storage format for signed bigint.""" + if val < 0: + raise ValueError("negative mpint not allowed") + if not val: + return b"" + nbytes = (val.bit_length() + 8) // 8 + return utils.int_to_bytes(val, nbytes) + + +class _FragList: + """Build recursive structure without data copy.""" + + flist: list[utils.Buffer] + + def __init__(self, init: list[utils.Buffer] | None = None) -> None: + self.flist = [] + if init: + self.flist.extend(init) + + def put_raw(self, val: utils.Buffer) -> None: + """Add plain bytes""" + self.flist.append(val) + + def put_u32(self, val: int) -> None: + """Big-endian uint32""" + self.flist.append(val.to_bytes(length=4, byteorder="big")) + + def put_u64(self, val: int) -> None: + """Big-endian uint64""" + self.flist.append(val.to_bytes(length=8, byteorder="big")) + + def put_sshstr(self, val: bytes | _FragList) -> None: + """Bytes prefixed with u32 length""" + if isinstance(val, (bytes, memoryview, bytearray)): + self.put_u32(len(val)) + self.flist.append(val) + else: + self.put_u32(val.size()) + self.flist.extend(val.flist) + + def put_mpint(self, val: int) -> None: + """Big-endian bigint prefixed with u32 length""" + self.put_sshstr(_to_mpint(val)) + + def size(self) -> int: + """Current number of bytes""" + return sum(map(len, self.flist)) + + def render(self, dstbuf: memoryview, pos: int = 0) -> int: + """Write into bytearray""" + for frag in self.flist: + flen = len(frag) + start, pos = pos, pos + flen + dstbuf[start:pos] = frag + return pos + + def tobytes(self) -> bytes: + """Return as bytes""" + buf = memoryview(bytearray(self.size())) + self.render(buf) + return buf.tobytes() + + +class _SSHFormatRSA: + """Format for RSA keys. + + Public: + mpint e, n + Private: + mpint n, e, d, iqmp, p, q + """ + + def get_public( + self, data: memoryview + ) -> tuple[tuple[int, int], memoryview]: + """RSA public fields""" + e, data = _get_mpint(data) + n, data = _get_mpint(data) + return (e, n), data + + def load_public( + self, data: memoryview + ) -> tuple[rsa.RSAPublicKey, memoryview]: + """Make RSA public key from data.""" + (e, n), data = self.get_public(data) + public_numbers = rsa.RSAPublicNumbers(e, n) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[rsa.RSAPrivateKey, memoryview]: + """Make RSA private key from data.""" + n, data = _get_mpint(data) + e, data = _get_mpint(data) + d, data = _get_mpint(data) + iqmp, data = _get_mpint(data) + p, data = _get_mpint(data) + q, data = _get_mpint(data) + + if (e, n) != pubfields: + raise ValueError("Corrupt data: rsa field mismatch") + dmp1 = rsa.rsa_crt_dmp1(d, p) + dmq1 = rsa.rsa_crt_dmq1(d, q) + public_numbers = rsa.RSAPublicNumbers(e, n) + private_numbers = rsa.RSAPrivateNumbers( + p, q, d, dmp1, dmq1, iqmp, public_numbers + ) + private_key = private_numbers.private_key( + unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation + ) + return private_key, data + + def encode_public( + self, public_key: rsa.RSAPublicKey, f_pub: _FragList + ) -> None: + """Write RSA public key""" + pubn = public_key.public_numbers() + f_pub.put_mpint(pubn.e) + f_pub.put_mpint(pubn.n) + + def encode_private( + self, private_key: rsa.RSAPrivateKey, f_priv: _FragList + ) -> None: + """Write RSA private key""" + private_numbers = private_key.private_numbers() + public_numbers = private_numbers.public_numbers + + f_priv.put_mpint(public_numbers.n) + f_priv.put_mpint(public_numbers.e) + + f_priv.put_mpint(private_numbers.d) + f_priv.put_mpint(private_numbers.iqmp) + f_priv.put_mpint(private_numbers.p) + f_priv.put_mpint(private_numbers.q) + + +class _SSHFormatDSA: + """Format for DSA keys. + + Public: + mpint p, q, g, y + Private: + mpint p, q, g, y, x + """ + + def get_public(self, data: memoryview) -> tuple[tuple, memoryview]: + """DSA public fields""" + p, data = _get_mpint(data) + q, data = _get_mpint(data) + g, data = _get_mpint(data) + y, data = _get_mpint(data) + return (p, q, g, y), data + + def load_public( + self, data: memoryview + ) -> tuple[dsa.DSAPublicKey, memoryview]: + """Make DSA public key from data.""" + (p, q, g, y), data = self.get_public(data) + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + public_key = public_numbers.public_key() + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[dsa.DSAPrivateKey, memoryview]: + """Make DSA private key from data.""" + (p, q, g, y), data = self.get_public(data) + x, data = _get_mpint(data) + + if (p, q, g, y) != pubfields: + raise ValueError("Corrupt data: dsa field mismatch") + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + self._validate(public_numbers) + private_numbers = dsa.DSAPrivateNumbers(x, public_numbers) + private_key = private_numbers.private_key() + return private_key, data + + def encode_public( + self, public_key: dsa.DSAPublicKey, f_pub: _FragList + ) -> None: + """Write DSA public key""" + public_numbers = public_key.public_numbers() + parameter_numbers = public_numbers.parameter_numbers + self._validate(public_numbers) + + f_pub.put_mpint(parameter_numbers.p) + f_pub.put_mpint(parameter_numbers.q) + f_pub.put_mpint(parameter_numbers.g) + f_pub.put_mpint(public_numbers.y) + + def encode_private( + self, private_key: dsa.DSAPrivateKey, f_priv: _FragList + ) -> None: + """Write DSA private key""" + self.encode_public(private_key.public_key(), f_priv) + f_priv.put_mpint(private_key.private_numbers().x) + + def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None: + parameter_numbers = public_numbers.parameter_numbers + if parameter_numbers.p.bit_length() != 1024: + raise ValueError("SSH supports only 1024 bit DSA keys") + + +class _SSHFormatECDSA: + """Format for ECDSA keys. + + Public: + str curve + bytes point + Private: + str curve + bytes point + mpint secret + """ + + def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve): + self.ssh_curve_name = ssh_curve_name + self.curve = curve + + def get_public( + self, data: memoryview + ) -> tuple[tuple[memoryview, memoryview], memoryview]: + """ECDSA public fields""" + curve, data = _get_sshstr(data) + point, data = _get_sshstr(data) + if curve != self.ssh_curve_name: + raise ValueError("Curve name mismatch") + if point[0] != 4: + raise NotImplementedError("Need uncompressed point") + return (curve, point), data + + def load_public( + self, data: memoryview + ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + (_, point), data = self.get_public(data) + public_key = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[ec.EllipticCurvePrivateKey, memoryview]: + """Make ECDSA private key from data.""" + (curve_name, point), data = self.get_public(data) + secret, data = _get_mpint(data) + + if (curve_name, point) != pubfields: + raise ValueError("Corrupt data: ecdsa field mismatch") + private_key = ec.derive_private_key(secret, self.curve) + return private_key, data + + def encode_public( + self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList + ) -> None: + """Write ECDSA public key""" + point = public_key.public_bytes( + Encoding.X962, PublicFormat.UncompressedPoint + ) + f_pub.put_sshstr(self.ssh_curve_name) + f_pub.put_sshstr(point) + + def encode_private( + self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList + ) -> None: + """Write ECDSA private key""" + public_key = private_key.public_key() + private_numbers = private_key.private_numbers() + + self.encode_public(public_key, f_priv) + f_priv.put_mpint(private_numbers.private_value) + + +class _SSHFormatEd25519: + """Format for Ed25519 keys. + + Public: + bytes point + Private: + bytes point + bytes secret_and_point + """ + + def get_public( + self, data: memoryview + ) -> tuple[tuple[memoryview], memoryview]: + """Ed25519 public fields""" + point, data = _get_sshstr(data) + return (point,), data + + def load_public( + self, data: memoryview + ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + (point,), data = self.get_public(data) + public_key = ed25519.Ed25519PublicKey.from_public_bytes( + point.tobytes() + ) + return public_key, data + + def load_private( + self, data: memoryview, pubfields, unsafe_skip_rsa_key_validation: bool + ) -> tuple[ed25519.Ed25519PrivateKey, memoryview]: + """Make Ed25519 private key from data.""" + (point,), data = self.get_public(data) + keypair, data = _get_sshstr(data) + + secret = keypair[:32] + point2 = keypair[32:] + if point != point2 or (point,) != pubfields: + raise ValueError("Corrupt data: ed25519 field mismatch") + private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret) + return private_key, data + + def encode_public( + self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList + ) -> None: + """Write Ed25519 public key""" + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_pub.put_sshstr(raw_public_key) + + def encode_private( + self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList + ) -> None: + """Write Ed25519 private key""" + public_key = private_key.public_key() + raw_private_key = private_key.private_bytes( + Encoding.Raw, PrivateFormat.Raw, NoEncryption() + ) + raw_public_key = public_key.public_bytes( + Encoding.Raw, PublicFormat.Raw + ) + f_keypair = _FragList([raw_private_key, raw_public_key]) + + self.encode_public(public_key, f_priv) + f_priv.put_sshstr(f_keypair) + + +def load_application(data) -> tuple[memoryview, memoryview]: + """ + U2F application strings + """ + application, data = _get_sshstr(data) + if not application.tobytes().startswith(b"ssh:"): + raise ValueError( + "U2F application string does not start with b'ssh:' " + f"({application})" + ) + return application, data + + +class _SSHFormatSKEd25519: + """ + The format of a sk-ssh-ed25519@openssh.com public key is: + + string "sk-ssh-ed25519@openssh.com" + string public key + string application (user-specified, but typically "ssh:") + """ + + def load_public( + self, data: memoryview + ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: + """Make Ed25519 public key from data.""" + public_key, data = _lookup_kformat(_SSH_ED25519).load_public(data) + _, data = load_application(data) + return public_key, data + + def get_public(self, data: memoryview) -> typing.NoReturn: + # Confusingly `get_public` is an entry point used by private key + # loading. + raise UnsupportedAlgorithm( + "sk-ssh-ed25519 private keys cannot be loaded" + ) + + +class _SSHFormatSKECDSA: + """ + The format of a sk-ecdsa-sha2-nistp256@openssh.com public key is: + + string "sk-ecdsa-sha2-nistp256@openssh.com" + string curve name + ec_point Q + string application (user-specified, but typically "ssh:") + """ + + def load_public( + self, data: memoryview + ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: + """Make ECDSA public key from data.""" + public_key, data = _lookup_kformat(_ECDSA_NISTP256).load_public(data) + _, data = load_application(data) + return public_key, data + + def get_public(self, data: memoryview) -> typing.NoReturn: + # Confusingly `get_public` is an entry point used by private key + # loading. + raise UnsupportedAlgorithm( + "sk-ecdsa-sha2-nistp256 private keys cannot be loaded" + ) + + +_KEY_FORMATS = { + _SSH_RSA: _SSHFormatRSA(), + _SSH_DSA: _SSHFormatDSA(), + _SSH_ED25519: _SSHFormatEd25519(), + _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()), + _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()), + _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()), + _SK_SSH_ED25519: _SSHFormatSKEd25519(), + _SK_SSH_ECDSA_NISTP256: _SSHFormatSKECDSA(), +} + + +def _lookup_kformat(key_type: utils.Buffer): + """Return valid format or throw error""" + if not isinstance(key_type, bytes): + key_type = memoryview(key_type).tobytes() + if key_type in _KEY_FORMATS: + return _KEY_FORMATS[key_type] + raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}") + + +SSHPrivateKeyTypes = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + dsa.DSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +def load_ssh_private_key( + data: utils.Buffer, + password: bytes | None, + backend: typing.Any = None, + *, + unsafe_skip_rsa_key_validation: bool = False, +) -> SSHPrivateKeyTypes: + """Load private key from OpenSSH custom encoding.""" + utils._check_byteslike("data", data) + if password is not None: + utils._check_bytes("password", password) + + m = _PEM_RC.search(data) + if not m: + raise ValueError("Not OpenSSH private key format") + p1 = m.start(1) + p2 = m.end(1) + data = binascii.a2b_base64(memoryview(data)[p1:p2]) + if not data.startswith(_SK_MAGIC): + raise ValueError("Not OpenSSH private key format") + data = memoryview(data)[len(_SK_MAGIC) :] + + # parse header + ciphername, data = _get_sshstr(data) + kdfname, data = _get_sshstr(data) + kdfoptions, data = _get_sshstr(data) + nkeys, data = _get_u32(data) + if nkeys != 1: + raise ValueError("Only one key supported") + + # load public key data + pubdata, data = _get_sshstr(data) + pub_key_type, pubdata = _get_sshstr(pubdata) + kformat = _lookup_kformat(pub_key_type) + pubfields, pubdata = kformat.get_public(pubdata) + _check_empty(pubdata) + + if ciphername != _NONE or kdfname != _NONE: + ciphername_bytes = ciphername.tobytes() + if ciphername_bytes not in _SSH_CIPHERS: + raise UnsupportedAlgorithm( + f"Unsupported cipher: {ciphername_bytes!r}" + ) + if kdfname != _BCRYPT: + raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}") + blklen = _SSH_CIPHERS[ciphername_bytes].block_len + tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len + # load secret data + edata, data = _get_sshstr(data) + # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for + # information about how OpenSSH handles AEAD tags + if _SSH_CIPHERS[ciphername_bytes].is_aead: + tag = bytes(data) + if len(tag) != tag_len: + raise ValueError("Corrupt data: invalid tag length for cipher") + else: + _check_empty(data) + _check_block_size(edata, blklen) + salt, kbuf = _get_sshstr(kdfoptions) + rounds, kbuf = _get_u32(kbuf) + _check_empty(kbuf) + ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds) + dec = ciph.decryptor() + edata = memoryview(dec.update(edata)) + if _SSH_CIPHERS[ciphername_bytes].is_aead: + assert isinstance(dec, AEADDecryptionContext) + _check_empty(dec.finalize_with_tag(tag)) + else: + # _check_block_size requires data to be a full block so there + # should be no output from finalize + _check_empty(dec.finalize()) + else: + if password: + raise TypeError( + "Password was given but private key is not encrypted." + ) + # load secret data + edata, data = _get_sshstr(data) + _check_empty(data) + blklen = 8 + _check_block_size(edata, blklen) + ck1, edata = _get_u32(edata) + ck2, edata = _get_u32(edata) + if ck1 != ck2: + raise ValueError("Corrupt data: broken checksum") + + # load per-key struct + key_type, edata = _get_sshstr(edata) + if key_type != pub_key_type: + raise ValueError("Corrupt data: key type mismatch") + private_key, edata = kformat.load_private( + edata, + pubfields, + unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation, + ) + # We don't use the comment + _, edata = _get_sshstr(edata) + + # yes, SSH does padding check *after* all other parsing is done. + # need to follow as it writes zero-byte padding too. + if edata != _PADDING[: len(edata)]: + raise ValueError("Corrupt data: invalid padding") + + if isinstance(private_key, dsa.DSAPrivateKey): + warnings.warn( + "SSH DSA keys are deprecated and will be removed in a future " + "release.", + utils.DeprecatedIn40, + stacklevel=2, + ) + + return private_key + + +def _serialize_ssh_private_key( + private_key: SSHPrivateKeyTypes, + password: bytes, + encryption_algorithm: KeySerializationEncryption, +) -> bytes: + """Serialize private key with OpenSSH custom encoding.""" + utils._check_bytes("password", password) + if isinstance(private_key, dsa.DSAPrivateKey): + warnings.warn( + "SSH DSA key support is deprecated and will be " + "removed in a future release", + utils.DeprecatedIn40, + stacklevel=4, + ) + + key_type = _get_ssh_key_type(private_key) + kformat = _lookup_kformat(key_type) + + # setup parameters + f_kdfoptions = _FragList() + if password: + ciphername = _DEFAULT_CIPHER + blklen = _SSH_CIPHERS[ciphername].block_len + kdfname = _BCRYPT + rounds = _DEFAULT_ROUNDS + if ( + isinstance(encryption_algorithm, _KeySerializationEncryption) + and encryption_algorithm._kdf_rounds is not None + ): + rounds = encryption_algorithm._kdf_rounds + salt = os.urandom(16) + f_kdfoptions.put_sshstr(salt) + f_kdfoptions.put_u32(rounds) + ciph = _init_cipher(ciphername, password, salt, rounds) + else: + ciphername = kdfname = _NONE + blklen = 8 + ciph = None + nkeys = 1 + checkval = os.urandom(4) + comment = b"" + + # encode public and private parts together + f_public_key = _FragList() + f_public_key.put_sshstr(key_type) + kformat.encode_public(private_key.public_key(), f_public_key) + + f_secrets = _FragList([checkval, checkval]) + f_secrets.put_sshstr(key_type) + kformat.encode_private(private_key, f_secrets) + f_secrets.put_sshstr(comment) + f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)]) + + # top-level structure + f_main = _FragList() + f_main.put_raw(_SK_MAGIC) + f_main.put_sshstr(ciphername) + f_main.put_sshstr(kdfname) + f_main.put_sshstr(f_kdfoptions) + f_main.put_u32(nkeys) + f_main.put_sshstr(f_public_key) + f_main.put_sshstr(f_secrets) + + # copy result info bytearray + slen = f_secrets.size() + mlen = f_main.size() + buf = memoryview(bytearray(mlen + blklen)) + f_main.render(buf) + ofs = mlen - slen + + # encrypt in-place + if ciph is not None: + ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:]) + + return _ssh_pem_encode(buf[:mlen]) + + +SSHPublicKeyTypes = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + dsa.DSAPublicKey, + ed25519.Ed25519PublicKey, +] + +SSHCertPublicKeyTypes = typing.Union[ + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + ed25519.Ed25519PublicKey, +] + + +class SSHCertificateType(enum.Enum): + USER = 1 + HOST = 2 + + +class SSHCertificate: + def __init__( + self, + _nonce: memoryview, + _public_key: SSHPublicKeyTypes, + _serial: int, + _cctype: int, + _key_id: memoryview, + _valid_principals: list[bytes], + _valid_after: int, + _valid_before: int, + _critical_options: dict[bytes, bytes], + _extensions: dict[bytes, bytes], + _sig_type: memoryview, + _sig_key: memoryview, + _inner_sig_type: memoryview, + _signature: memoryview, + _tbs_cert_body: memoryview, + _cert_key_type: bytes, + _cert_body: memoryview, + ): + self._nonce = _nonce + self._public_key = _public_key + self._serial = _serial + try: + self._type = SSHCertificateType(_cctype) + except ValueError: + raise ValueError("Invalid certificate type") + self._key_id = _key_id + self._valid_principals = _valid_principals + self._valid_after = _valid_after + self._valid_before = _valid_before + self._critical_options = _critical_options + self._extensions = _extensions + self._sig_type = _sig_type + self._sig_key = _sig_key + self._inner_sig_type = _inner_sig_type + self._signature = _signature + self._cert_key_type = _cert_key_type + self._cert_body = _cert_body + self._tbs_cert_body = _tbs_cert_body + + @property + def nonce(self) -> bytes: + return bytes(self._nonce) + + def public_key(self) -> SSHCertPublicKeyTypes: + # make mypy happy until we remove DSA support entirely and + # the underlying union won't have a disallowed type + return typing.cast(SSHCertPublicKeyTypes, self._public_key) + + @property + def serial(self) -> int: + return self._serial + + @property + def type(self) -> SSHCertificateType: + return self._type + + @property + def key_id(self) -> bytes: + return bytes(self._key_id) + + @property + def valid_principals(self) -> list[bytes]: + return self._valid_principals + + @property + def valid_before(self) -> int: + return self._valid_before + + @property + def valid_after(self) -> int: + return self._valid_after + + @property + def critical_options(self) -> dict[bytes, bytes]: + return self._critical_options + + @property + def extensions(self) -> dict[bytes, bytes]: + return self._extensions + + def signature_key(self) -> SSHCertPublicKeyTypes: + sigformat = _lookup_kformat(self._sig_type) + signature_key, sigkey_rest = sigformat.load_public(self._sig_key) + _check_empty(sigkey_rest) + return signature_key + + def public_bytes(self) -> bytes: + return ( + bytes(self._cert_key_type) + + b" " + + binascii.b2a_base64(bytes(self._cert_body), newline=False) + ) + + def verify_cert_signature(self) -> None: + signature_key = self.signature_key() + if isinstance(signature_key, ed25519.Ed25519PublicKey): + signature_key.verify( + bytes(self._signature), bytes(self._tbs_cert_body) + ) + elif isinstance(signature_key, ec.EllipticCurvePublicKey): + # The signature is encoded as a pair of big-endian integers + r, data = _get_mpint(self._signature) + s, data = _get_mpint(data) + _check_empty(data) + computed_sig = asym_utils.encode_dss_signature(r, s) + hash_alg = _get_ec_hash_alg(signature_key.curve) + signature_key.verify( + computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg) + ) + else: + assert isinstance(signature_key, rsa.RSAPublicKey) + if self._inner_sig_type == _SSH_RSA: + hash_alg = hashes.SHA1() + elif self._inner_sig_type == _SSH_RSA_SHA256: + hash_alg = hashes.SHA256() + else: + assert self._inner_sig_type == _SSH_RSA_SHA512 + hash_alg = hashes.SHA512() + signature_key.verify( + bytes(self._signature), + bytes(self._tbs_cert_body), + padding.PKCS1v15(), + hash_alg, + ) + + +def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm: + if isinstance(curve, ec.SECP256R1): + return hashes.SHA256() + elif isinstance(curve, ec.SECP384R1): + return hashes.SHA384() + else: + assert isinstance(curve, ec.SECP521R1) + return hashes.SHA512() + + +def _load_ssh_public_identity( + data: utils.Buffer, + _legacy_dsa_allowed=False, +) -> SSHCertificate | SSHPublicKeyTypes: + utils._check_byteslike("data", data) + + m = _SSH_PUBKEY_RC.match(data) + if not m: + raise ValueError("Invalid line format") + key_type = orig_key_type = m.group(1) + key_body = m.group(2) + with_cert = False + if key_type.endswith(_CERT_SUFFIX): + with_cert = True + key_type = key_type[: -len(_CERT_SUFFIX)] + if key_type == _SSH_DSA and not _legacy_dsa_allowed: + raise UnsupportedAlgorithm( + "DSA keys aren't supported in SSH certificates" + ) + kformat = _lookup_kformat(key_type) + + try: + rest = memoryview(binascii.a2b_base64(key_body)) + except (TypeError, binascii.Error): + raise ValueError("Invalid format") + + if with_cert: + cert_body = rest + inner_key_type, rest = _get_sshstr(rest) + if inner_key_type != orig_key_type: + raise ValueError("Invalid key format") + if with_cert: + nonce, rest = _get_sshstr(rest) + public_key, rest = kformat.load_public(rest) + if with_cert: + serial, rest = _get_u64(rest) + cctype, rest = _get_u32(rest) + key_id, rest = _get_sshstr(rest) + principals, rest = _get_sshstr(rest) + valid_principals = [] + while principals: + principal, principals = _get_sshstr(principals) + valid_principals.append(bytes(principal)) + valid_after, rest = _get_u64(rest) + valid_before, rest = _get_u64(rest) + crit_options, rest = _get_sshstr(rest) + critical_options = _parse_exts_opts(crit_options) + exts, rest = _get_sshstr(rest) + extensions = _parse_exts_opts(exts) + # Get the reserved field, which is unused. + _, rest = _get_sshstr(rest) + sig_key_raw, rest = _get_sshstr(rest) + sig_type, sig_key = _get_sshstr(sig_key_raw) + if sig_type == _SSH_DSA and not _legacy_dsa_allowed: + raise UnsupportedAlgorithm( + "DSA signatures aren't supported in SSH certificates" + ) + # Get the entire cert body and subtract the signature + tbs_cert_body = cert_body[: -len(rest)] + signature_raw, rest = _get_sshstr(rest) + _check_empty(rest) + inner_sig_type, sig_rest = _get_sshstr(signature_raw) + # RSA certs can have multiple algorithm types + if ( + sig_type == _SSH_RSA + and inner_sig_type + not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA] + ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type): + raise ValueError("Signature key type does not match") + signature, sig_rest = _get_sshstr(sig_rest) + _check_empty(sig_rest) + return SSHCertificate( + nonce, + public_key, + serial, + cctype, + key_id, + valid_principals, + valid_after, + valid_before, + critical_options, + extensions, + sig_type, + sig_key, + inner_sig_type, + signature, + tbs_cert_body, + orig_key_type, + cert_body, + ) + else: + _check_empty(rest) + return public_key + + +def load_ssh_public_identity( + data: utils.Buffer, +) -> SSHCertificate | SSHPublicKeyTypes: + return _load_ssh_public_identity(data) + + +def _parse_exts_opts(exts_opts: memoryview) -> dict[bytes, bytes]: + result: dict[bytes, bytes] = {} + last_name = None + while exts_opts: + name, exts_opts = _get_sshstr(exts_opts) + bname: bytes = bytes(name) + if bname in result: + raise ValueError("Duplicate name") + if last_name is not None and bname < last_name: + raise ValueError("Fields not lexically sorted") + value, exts_opts = _get_sshstr(exts_opts) + if len(value) > 0: + value, extra = _get_sshstr(value) + if len(extra) > 0: + raise ValueError("Unexpected extra data after value") + result[bname] = bytes(value) + last_name = bname + return result + + +def ssh_key_fingerprint( + key: SSHPublicKeyTypes, + hash_algorithm: hashes.MD5 | hashes.SHA256, +) -> bytes: + if not isinstance(hash_algorithm, (hashes.MD5, hashes.SHA256)): + raise TypeError("hash_algorithm must be either MD5 or SHA256") + + key_type = _get_ssh_key_type(key) + kformat = _lookup_kformat(key_type) + + f_pub = _FragList() + f_pub.put_sshstr(key_type) + kformat.encode_public(key, f_pub) + + ssh_binary_data = f_pub.tobytes() + + # Hash the binary data + hash_obj = hashes.Hash(hash_algorithm) + hash_obj.update(ssh_binary_data) + return hash_obj.finalize() + + +def load_ssh_public_key( + data: utils.Buffer, backend: typing.Any = None +) -> SSHPublicKeyTypes: + cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True) + public_key: SSHPublicKeyTypes + if isinstance(cert_or_key, SSHCertificate): + public_key = cert_or_key.public_key() + else: + public_key = cert_or_key + + if isinstance(public_key, dsa.DSAPublicKey): + warnings.warn( + "SSH DSA keys are deprecated and will be removed in a future " + "release.", + utils.DeprecatedIn40, + stacklevel=2, + ) + return public_key + + +def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes: + """One-line public key format for OpenSSH""" + if isinstance(public_key, dsa.DSAPublicKey): + warnings.warn( + "SSH DSA key support is deprecated and will be " + "removed in a future release", + utils.DeprecatedIn40, + stacklevel=4, + ) + key_type = _get_ssh_key_type(public_key) + kformat = _lookup_kformat(key_type) + + f_pub = _FragList() + f_pub.put_sshstr(key_type) + kformat.encode_public(public_key, f_pub) + + pub = binascii.b2a_base64(f_pub.tobytes()).strip() + return b"".join([key_type, b" ", pub]) + + +SSHCertPrivateKeyTypes = typing.Union[ + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + ed25519.Ed25519PrivateKey, +] + + +# This is an undocumented limit enforced in the openssh codebase for sshd and +# ssh-keygen, but it is undefined in the ssh certificates spec. +_SSHKEY_CERT_MAX_PRINCIPALS = 256 + + +class SSHCertificateBuilder: + def __init__( + self, + _public_key: SSHCertPublicKeyTypes | None = None, + _serial: int | None = None, + _type: SSHCertificateType | None = None, + _key_id: bytes | None = None, + _valid_principals: list[bytes] = [], + _valid_for_all_principals: bool = False, + _valid_before: int | None = None, + _valid_after: int | None = None, + _critical_options: list[tuple[bytes, bytes]] = [], + _extensions: list[tuple[bytes, bytes]] = [], + ): + self._public_key = _public_key + self._serial = _serial + self._type = _type + self._key_id = _key_id + self._valid_principals = _valid_principals + self._valid_for_all_principals = _valid_for_all_principals + self._valid_before = _valid_before + self._valid_after = _valid_after + self._critical_options = _critical_options + self._extensions = _extensions + + def public_key( + self, public_key: SSHCertPublicKeyTypes + ) -> SSHCertificateBuilder: + if not isinstance( + public_key, + ( + ec.EllipticCurvePublicKey, + rsa.RSAPublicKey, + ed25519.Ed25519PublicKey, + ), + ): + raise TypeError("Unsupported key type") + if self._public_key is not None: + raise ValueError("public_key already set") + + return SSHCertificateBuilder( + _public_key=public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def serial(self, serial: int) -> SSHCertificateBuilder: + if not isinstance(serial, int): + raise TypeError("serial must be an integer") + if not 0 <= serial < 2**64: + raise ValueError("serial must be between 0 and 2**64") + if self._serial is not None: + raise ValueError("serial already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def type(self, type: SSHCertificateType) -> SSHCertificateBuilder: + if not isinstance(type, SSHCertificateType): + raise TypeError("type must be an SSHCertificateType") + if self._type is not None: + raise ValueError("type already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def key_id(self, key_id: bytes) -> SSHCertificateBuilder: + if not isinstance(key_id, bytes): + raise TypeError("key_id must be bytes") + if self._key_id is not None: + raise ValueError("key_id already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_principals( + self, valid_principals: list[bytes] + ) -> SSHCertificateBuilder: + if self._valid_for_all_principals: + raise ValueError( + "Principals can't be set because the cert is valid " + "for all principals" + ) + if ( + not all(isinstance(x, bytes) for x in valid_principals) + or not valid_principals + ): + raise TypeError( + "principals must be a list of bytes and can't be empty" + ) + if self._valid_principals: + raise ValueError("valid_principals already set") + + if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS: + raise ValueError( + "Reached or exceeded the maximum number of valid_principals" + ) + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_for_all_principals(self): + if self._valid_principals: + raise ValueError( + "valid_principals already set, can't set " + "valid_for_all_principals" + ) + if self._valid_for_all_principals: + raise ValueError("valid_for_all_principals already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=True, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_before(self, valid_before: int | float) -> SSHCertificateBuilder: + if not isinstance(valid_before, (int, float)): + raise TypeError("valid_before must be an int or float") + valid_before = int(valid_before) + if valid_before < 0 or valid_before >= 2**64: + raise ValueError("valid_before must [0, 2**64)") + if self._valid_before is not None: + raise ValueError("valid_before already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def valid_after(self, valid_after: int | float) -> SSHCertificateBuilder: + if not isinstance(valid_after, (int, float)): + raise TypeError("valid_after must be an int or float") + valid_after = int(valid_after) + if valid_after < 0 or valid_after >= 2**64: + raise ValueError("valid_after must [0, 2**64)") + if self._valid_after is not None: + raise ValueError("valid_after already set") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=valid_after, + _critical_options=self._critical_options, + _extensions=self._extensions, + ) + + def add_critical_option( + self, name: bytes, value: bytes + ) -> SSHCertificateBuilder: + if not isinstance(name, bytes) or not isinstance(value, bytes): + raise TypeError("name and value must be bytes") + # This is O(n**2) + if name in [name for name, _ in self._critical_options]: + raise ValueError("Duplicate critical option name") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=[*self._critical_options, (name, value)], + _extensions=self._extensions, + ) + + def add_extension( + self, name: bytes, value: bytes + ) -> SSHCertificateBuilder: + if not isinstance(name, bytes) or not isinstance(value, bytes): + raise TypeError("name and value must be bytes") + # This is O(n**2) + if name in [name for name, _ in self._extensions]: + raise ValueError("Duplicate extension name") + + return SSHCertificateBuilder( + _public_key=self._public_key, + _serial=self._serial, + _type=self._type, + _key_id=self._key_id, + _valid_principals=self._valid_principals, + _valid_for_all_principals=self._valid_for_all_principals, + _valid_before=self._valid_before, + _valid_after=self._valid_after, + _critical_options=self._critical_options, + _extensions=[*self._extensions, (name, value)], + ) + + def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate: + if not isinstance( + private_key, + ( + ec.EllipticCurvePrivateKey, + rsa.RSAPrivateKey, + ed25519.Ed25519PrivateKey, + ), + ): + raise TypeError("Unsupported private key type") + + if self._public_key is None: + raise ValueError("public_key must be set") + + # Not required + serial = 0 if self._serial is None else self._serial + + if self._type is None: + raise ValueError("type must be set") + + # Not required + key_id = b"" if self._key_id is None else self._key_id + + # A zero length list is valid, but means the certificate + # is valid for any principal of the specified type. We require + # the user to explicitly set valid_for_all_principals to get + # that behavior. + if not self._valid_principals and not self._valid_for_all_principals: + raise ValueError( + "valid_principals must be set if valid_for_all_principals " + "is False" + ) + + if self._valid_before is None: + raise ValueError("valid_before must be set") + + if self._valid_after is None: + raise ValueError("valid_after must be set") + + if self._valid_after > self._valid_before: + raise ValueError("valid_after must be earlier than valid_before") + + # lexically sort our byte strings + self._critical_options.sort(key=lambda x: x[0]) + self._extensions.sort(key=lambda x: x[0]) + + key_type = _get_ssh_key_type(self._public_key) + cert_prefix = key_type + _CERT_SUFFIX + + # Marshal the bytes to be signed + nonce = os.urandom(32) + kformat = _lookup_kformat(key_type) + f = _FragList() + f.put_sshstr(cert_prefix) + f.put_sshstr(nonce) + kformat.encode_public(self._public_key, f) + f.put_u64(serial) + f.put_u32(self._type.value) + f.put_sshstr(key_id) + fprincipals = _FragList() + for p in self._valid_principals: + fprincipals.put_sshstr(p) + f.put_sshstr(fprincipals.tobytes()) + f.put_u64(self._valid_after) + f.put_u64(self._valid_before) + fcrit = _FragList() + for name, value in self._critical_options: + fcrit.put_sshstr(name) + if len(value) > 0: + foptval = _FragList() + foptval.put_sshstr(value) + fcrit.put_sshstr(foptval.tobytes()) + else: + fcrit.put_sshstr(value) + f.put_sshstr(fcrit.tobytes()) + fext = _FragList() + for name, value in self._extensions: + fext.put_sshstr(name) + if len(value) > 0: + fextval = _FragList() + fextval.put_sshstr(value) + fext.put_sshstr(fextval.tobytes()) + else: + fext.put_sshstr(value) + f.put_sshstr(fext.tobytes()) + f.put_sshstr(b"") # RESERVED FIELD + # encode CA public key + ca_type = _get_ssh_key_type(private_key) + caformat = _lookup_kformat(ca_type) + caf = _FragList() + caf.put_sshstr(ca_type) + caformat.encode_public(private_key.public_key(), caf) + f.put_sshstr(caf.tobytes()) + # Sigs according to the rules defined for the CA's public key + # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA, + # and RFC8032 for Ed25519). + if isinstance(private_key, ed25519.Ed25519PrivateKey): + signature = private_key.sign(f.tobytes()) + fsig = _FragList() + fsig.put_sshstr(ca_type) + fsig.put_sshstr(signature) + f.put_sshstr(fsig.tobytes()) + elif isinstance(private_key, ec.EllipticCurvePrivateKey): + hash_alg = _get_ec_hash_alg(private_key.curve) + signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg)) + r, s = asym_utils.decode_dss_signature(signature) + fsig = _FragList() + fsig.put_sshstr(ca_type) + fsigblob = _FragList() + fsigblob.put_mpint(r) + fsigblob.put_mpint(s) + fsig.put_sshstr(fsigblob.tobytes()) + f.put_sshstr(fsig.tobytes()) + + else: + assert isinstance(private_key, rsa.RSAPrivateKey) + # Just like Golang, we're going to use SHA512 for RSA + # https://cs.opensource.google/go/x/crypto/+/refs/tags/ + # v0.4.0:ssh/certs.go;l=445 + # RFC 8332 defines SHA256 and 512 as options + fsig = _FragList() + fsig.put_sshstr(_SSH_RSA_SHA512) + signature = private_key.sign( + f.tobytes(), padding.PKCS1v15(), hashes.SHA512() + ) + fsig.put_sshstr(signature) + f.put_sshstr(fsig.tobytes()) + + cert_data = binascii.b2a_base64(f.tobytes()).strip() + # load_ssh_public_identity returns a union, but this is + # guaranteed to be an SSHCertificate, so we cast to make + # mypy happy. + return typing.cast( + SSHCertificate, + load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])), + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..c1af423 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + + +class InvalidToken(Exception): + pass diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..736e8d6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc new file mode 100644 index 0000000..00e61cb Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc new file mode 100644 index 0000000..9bb6963 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 0000000..21fb000 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,101 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import base64 +import typing +from urllib.parse import quote, urlencode + +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.utils import Buffer + +HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512] + + +def _generate_uri( + hotp: HOTP, + type_name: str, + account_name: str, + issuer: str | None, + extra_parameters: list[tuple[str, int]], +) -> str: + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + label = ( + f"{quote(issuer)}:{quote(account_name)}" + if issuer + else quote(account_name) + ) + return f"otpauth://{type_name}/{label}?{urlencode(parameters)}" + + +class HOTP: + def __init__( + self, + key: Buffer, + length: int, + algorithm: HOTPHashTypes, + backend: typing.Any = None, + enforce_key_length: bool = True, + ) -> None: + if len(key) < 16 and enforce_key_length is True: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, int): + raise TypeError("Length parameter must be an integer type.") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 and 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") + + self._key = key + self._length = length + self._algorithm = algorithm + + def generate(self, counter: int) -> bytes: + if not isinstance(counter, int): + raise TypeError("Counter parameter must be an integer type.") + + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10**self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp: bytes, counter: int) -> None: + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match.") + + def _dynamic_truncate(self, counter: int) -> int: + ctx = hmac.HMAC(self._key, self._algorithm) + + try: + ctx.update(counter.to_bytes(length=8, byteorder="big")) + except OverflowError: + raise ValueError(f"Counter must be between 0 and {2**64 - 1}.") + + hmac_value = ctx.finalize() + + offset = hmac_value[len(hmac_value) - 1] & 0b1111 + p = hmac_value[offset : offset + 4] + return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF + + def get_provisioning_uri( + self, account_name: str, counter: int, issuer: str | None + ) -> str: + return _generate_uri( + self, "hotp", account_name, issuer, [("counter", int(counter))] + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 0000000..10c725c --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,56 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.hotp import ( + HOTP, + HOTPHashTypes, + _generate_uri, +) +from cryptography.utils import Buffer + + +class TOTP: + def __init__( + self, + key: Buffer, + length: int, + algorithm: HOTPHashTypes, + time_step: int, + backend: typing.Any = None, + enforce_key_length: bool = True, + ): + self._time_step = time_step + self._hotp = HOTP( + key, length, algorithm, enforce_key_length=enforce_key_length + ) + + def generate(self, time: int | float) -> bytes: + if not isinstance(time, (int, float)): + raise TypeError( + "Time parameter must be an integer type or float type." + ) + + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp: bytes, time: int) -> None: + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri( + self, account_name: str, issuer: str | None + ) -> str: + return _generate_uri( + self._hotp, + "totp", + account_name, + issuer, + [("period", int(self._time_step))], + ) diff --git a/venv/lib/python3.12/site-packages/cryptography/py.typed b/venv/lib/python3.12/site-packages/cryptography/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/cryptography/utils.py b/venv/lib/python3.12/site-packages/cryptography/utils.py new file mode 100644 index 0000000..a0fc3b1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/utils.py @@ -0,0 +1,137 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import enum +import sys +import types +import typing +import warnings +from collections.abc import Callable, Sequence + + +# We use a UserWarning subclass, instead of DeprecationWarning, because CPython +# decided deprecation warnings should be invisible by default. +class CryptographyDeprecationWarning(UserWarning): + pass + + +# Several APIs were deprecated with no specific end-of-life date because of the +# ubiquity of their use. They should not be removed until we agree on when that +# cycle ends. +DeprecatedIn36 = CryptographyDeprecationWarning +DeprecatedIn40 = CryptographyDeprecationWarning +DeprecatedIn41 = CryptographyDeprecationWarning +DeprecatedIn42 = CryptographyDeprecationWarning +DeprecatedIn43 = CryptographyDeprecationWarning + + +# If you're wondering why we don't use `Buffer`, it's because `Buffer` would +# be more accurately named: Bufferable. It means something which has an +# `__buffer__`. Which means you can't actually treat the result as a buffer +# (and do things like take a `len()`). +if sys.version_info >= (3, 9): + Buffer = typing.Union[bytes, bytearray, memoryview] +else: + Buffer = typing.ByteString + + +def _check_bytes(name: str, value: bytes) -> None: + if not isinstance(value, bytes): + raise TypeError(f"{name} must be bytes") + + +def _check_byteslike(name: str, value: Buffer) -> None: + try: + memoryview(value) + except TypeError: + raise TypeError(f"{name} must be bytes-like") + + +def int_to_bytes(integer: int, length: int | None = None) -> bytes: + if length == 0: + raise ValueError("length argument can't be 0") + return integer.to_bytes( + length or (integer.bit_length() + 7) // 8 or 1, "big" + ) + + +class InterfaceNotImplemented(Exception): + pass + + +class _DeprecatedValue: + def __init__(self, value: object, message: str, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(types.ModuleType): + def __init__(self, module: types.ModuleType): + super().__init__(module.__name__) + self.__dict__["_module"] = module + + def __getattr__(self, attr: str) -> object: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr: str, value: object) -> None: + setattr(self._module, attr, value) + + def __delattr__(self, attr: str) -> None: + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + + delattr(self._module, attr) + + def __dir__(self) -> Sequence[str]: + return ["_module", *dir(self._module)] + + +def deprecated( + value: object, + module_name: str, + message: str, + warning_class: type[Warning], + name: str | None = None, +) -> _DeprecatedValue: + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = module = _ModuleWithDeprecations(module) + dv = _DeprecatedValue(value, message, warning_class) + # Maintain backwards compatibility with `name is None` for pyOpenSSL. + if name is not None: + setattr(module, name, dv) + return dv + + +def cached_property(func: Callable) -> property: + cached_name = f"_cached_{func}" + sentinel = object() + + def inner(instance: object): + cache = getattr(instance, cached_name, sentinel) + if cache is not sentinel: + return cache + result = func(instance) + setattr(instance, cached_name, result) + return result + + return property(inner) + + +# Python 3.10 changed representation of enums. We use well-defined object +# representation and string representation from Python 3.9. +class Enum(enum.Enum): + def __repr__(self) -> str: + return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>" + + def __str__(self) -> str: + return f"{self.__class__.__name__}.{self._name_}" diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py b/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py new file mode 100644 index 0000000..318eecc --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py @@ -0,0 +1,270 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.x509 import certificate_transparency, verification +from cryptography.x509.base import ( + Attribute, + AttributeNotFound, + Attributes, + Certificate, + CertificateBuilder, + CertificateRevocationList, + CertificateRevocationListBuilder, + CertificateSigningRequest, + CertificateSigningRequestBuilder, + InvalidVersion, + RevokedCertificate, + RevokedCertificateBuilder, + Version, + load_der_x509_certificate, + load_der_x509_crl, + load_der_x509_csr, + load_pem_x509_certificate, + load_pem_x509_certificates, + load_pem_x509_crl, + load_pem_x509_csr, + random_serial_number, +) +from cryptography.x509.extensions import ( + AccessDescription, + Admission, + Admissions, + AuthorityInformationAccess, + AuthorityKeyIdentifier, + BasicConstraints, + CertificateIssuer, + CertificatePolicies, + CRLDistributionPoints, + CRLNumber, + CRLReason, + DeltaCRLIndicator, + DistributionPoint, + DuplicateExtension, + ExtendedKeyUsage, + Extension, + ExtensionNotFound, + Extensions, + ExtensionType, + FreshestCRL, + GeneralNames, + InhibitAnyPolicy, + InvalidityDate, + IssuerAlternativeName, + IssuingDistributionPoint, + KeyUsage, + MSCertificateTemplate, + NameConstraints, + NamingAuthority, + NoticeReference, + OCSPAcceptableResponses, + OCSPNoCheck, + OCSPNonce, + PolicyConstraints, + PolicyInformation, + PrecertificateSignedCertificateTimestamps, + PrecertPoison, + PrivateKeyUsagePeriod, + ProfessionInfo, + ReasonFlags, + SignedCertificateTimestamps, + SubjectAlternativeName, + SubjectInformationAccess, + SubjectKeyIdentifier, + TLSFeature, + TLSFeatureType, + UnrecognizedExtension, + UserNotice, +) +from cryptography.x509.general_name import ( + DirectoryName, + DNSName, + GeneralName, + IPAddress, + OtherName, + RegisteredID, + RFC822Name, + UniformResourceIdentifier, + UnsupportedGeneralNameType, +) +from cryptography.x509.name import ( + Name, + NameAttribute, + RelativeDistinguishedName, +) +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, + CertificatePoliciesOID, + CRLEntryExtensionOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + PublicKeyAlgorithmOID, + SignatureAlgorithmOID, +) + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_PRIVATE_KEY_USAGE_PERIOD = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 +OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + +__all__ = [ + "OID_CA_ISSUERS", + "OID_OCSP", + "AccessDescription", + "Admission", + "Admissions", + "Attribute", + "AttributeNotFound", + "Attributes", + "AuthorityInformationAccess", + "AuthorityKeyIdentifier", + "BasicConstraints", + "CRLDistributionPoints", + "CRLNumber", + "CRLReason", + "Certificate", + "CertificateBuilder", + "CertificateIssuer", + "CertificatePolicies", + "CertificateRevocationList", + "CertificateRevocationListBuilder", + "CertificateSigningRequest", + "CertificateSigningRequestBuilder", + "DNSName", + "DeltaCRLIndicator", + "DirectoryName", + "DistributionPoint", + "DuplicateExtension", + "ExtendedKeyUsage", + "Extension", + "ExtensionNotFound", + "ExtensionType", + "Extensions", + "FreshestCRL", + "GeneralName", + "GeneralNames", + "IPAddress", + "InhibitAnyPolicy", + "InvalidVersion", + "InvalidityDate", + "IssuerAlternativeName", + "IssuingDistributionPoint", + "KeyUsage", + "MSCertificateTemplate", + "Name", + "NameAttribute", + "NameConstraints", + "NameOID", + "NamingAuthority", + "NoticeReference", + "OCSPAcceptableResponses", + "OCSPNoCheck", + "OCSPNonce", + "ObjectIdentifier", + "OtherName", + "PolicyConstraints", + "PolicyInformation", + "PrecertPoison", + "PrecertificateSignedCertificateTimestamps", + "PrivateKeyUsagePeriod", + "ProfessionInfo", + "PublicKeyAlgorithmOID", + "RFC822Name", + "ReasonFlags", + "RegisteredID", + "RelativeDistinguishedName", + "RevokedCertificate", + "RevokedCertificateBuilder", + "SignatureAlgorithmOID", + "SignedCertificateTimestamps", + "SubjectAlternativeName", + "SubjectInformationAccess", + "SubjectKeyIdentifier", + "TLSFeature", + "TLSFeatureType", + "UniformResourceIdentifier", + "UnrecognizedExtension", + "UnsupportedGeneralNameType", + "UserNotice", + "Version", + "certificate_transparency", + "load_der_x509_certificate", + "load_der_x509_crl", + "load_der_x509_csr", + "load_pem_x509_certificate", + "load_pem_x509_certificates", + "load_pem_x509_crl", + "load_pem_x509_csr", + "random_serial_number", + "verification", + "verification", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..76d7d93 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..e197c5b Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc new file mode 100644 index 0000000..7066077 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc new file mode 100644 index 0000000..d32712d Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc new file mode 100644 index 0000000..c4ae9b1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc new file mode 100644 index 0000000..9534648 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc new file mode 100644 index 0000000..3af945e Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc new file mode 100644 index 0000000..7893b1f Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc new file mode 100644 index 0000000..c476032 Binary files /dev/null and b/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/verification.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/base.py b/venv/lib/python3.12/site-packages/cryptography/x509/base.py new file mode 100644 index 0000000..1be612b --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/base.py @@ -0,0 +1,848 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import os +import typing +import warnings +from collections.abc import Iterable + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + padding, + rsa, + x448, + x25519, +) +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPrivateKeyTypes, + CertificatePublicKeyTypes, +) +from cryptography.x509.extensions import ( + Extension, + Extensions, + ExtensionType, + _make_sequence_methods, +) +from cryptography.x509.name import Name, _ASN1Type +from cryptography.x509.oid import ObjectIdentifier + +_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) + +# This must be kept in sync with sign.rs's list of allowable types in +# identify_hash_type +_AllowedHashTypes = typing.Union[ + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, + hashes.SHA3_224, + hashes.SHA3_256, + hashes.SHA3_384, + hashes.SHA3_512, +] + + +class AttributeNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +def _reject_duplicate_extension( + extension: Extension[ExtensionType], + extensions: list[Extension[ExtensionType]], +) -> None: + # This is quadratic in the number of extensions + for e in extensions: + if e.oid == extension.oid: + raise ValueError("This extension has already been set.") + + +def _reject_duplicate_attribute( + oid: ObjectIdentifier, + attributes: list[tuple[ObjectIdentifier, bytes, int | None]], +) -> None: + # This is quadratic in the number of attributes + for attr_oid, _, _ in attributes: + if attr_oid == oid: + raise ValueError("This attribute has already been set.") + + +def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime: + """Normalizes a datetime to a naive datetime in UTC. + + time -- datetime to normalize. Assumed to be in UTC if not timezone + aware. + """ + if time.tzinfo is not None: + offset = time.utcoffset() + offset = offset if offset else datetime.timedelta() + return time.replace(tzinfo=None) - offset + else: + return time + + +class Attribute: + def __init__( + self, + oid: ObjectIdentifier, + value: bytes, + _type: int = _ASN1Type.UTF8String.value, + ) -> None: + self._oid = oid + self._value = value + self._type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Attribute): + return NotImplemented + + return ( + self.oid == other.oid + and self.value == other.value + and self._type == other._type + ) + + def __hash__(self) -> int: + return hash((self.oid, self.value, self._type)) + + +class Attributes: + def __init__( + self, + attributes: Iterable[Attribute], + ) -> None: + self._attributes = list(attributes) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes") + + def __repr__(self) -> str: + return f"" + + def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute: + for attr in self: + if attr.oid == oid: + return attr + + raise AttributeNotFound(f"No {oid} attribute was found", oid) + + +class Version(utils.Enum): + v1 = 0 + v3 = 2 + + +class InvalidVersion(Exception): + def __init__(self, msg: str, parsed_version: int) -> None: + super().__init__(msg) + self.parsed_version = parsed_version + + +Certificate = rust_x509.Certificate + + +class RevokedCertificate(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def serial_number(self) -> int: + """ + Returns the serial number of the revoked certificate. + """ + + @property + @abc.abstractmethod + def revocation_date(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked. + """ + + @property + @abc.abstractmethod + def revocation_date_utc(self) -> datetime.datetime: + """ + Returns the date of when this certificate was revoked as a non-naive + UTC datetime. + """ + + @property + @abc.abstractmethod + def extensions(self) -> Extensions: + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +# Runtime isinstance checks need this since the rust class is not a subclass. +RevokedCertificate.register(rust_x509.RevokedCertificate) + + +class _RawRevokedCertificate(RevokedCertificate): + def __init__( + self, + serial_number: int, + revocation_date: datetime.datetime, + extensions: Extensions, + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + @property + def serial_number(self) -> int: + return self._serial_number + + @property + def revocation_date(self) -> datetime.datetime: + warnings.warn( + "Properties that return a naïve datetime object have been " + "deprecated. Please switch to revocation_date_utc.", + utils.DeprecatedIn42, + stacklevel=2, + ) + return self._revocation_date + + @property + def revocation_date_utc(self) -> datetime.datetime: + return self._revocation_date.replace(tzinfo=datetime.timezone.utc) + + @property + def extensions(self) -> Extensions: + return self._extensions + + +CertificateRevocationList = rust_x509.CertificateRevocationList +CertificateSigningRequest = rust_x509.CertificateSigningRequest + + +load_pem_x509_certificate = rust_x509.load_pem_x509_certificate +load_der_x509_certificate = rust_x509.load_der_x509_certificate + +load_pem_x509_certificates = rust_x509.load_pem_x509_certificates + +load_pem_x509_csr = rust_x509.load_pem_x509_csr +load_der_x509_csr = rust_x509.load_der_x509_csr + +load_pem_x509_crl = rust_x509.load_pem_x509_crl +load_der_x509_crl = rust_x509.load_der_x509_crl + + +class CertificateSigningRequestBuilder: + def __init__( + self, + subject_name: Name | None = None, + extensions: list[Extension[ExtensionType]] = [], + attributes: list[tuple[ObjectIdentifier, bytes, int | None]] = [], + ): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + self._attributes = attributes + + def subject_name(self, name: Name) -> CertificateSigningRequestBuilder: + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateSigningRequestBuilder( + name, self._extensions, self._attributes + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateSigningRequestBuilder: + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateSigningRequestBuilder( + self._subject_name, + [*self._extensions, extension], + self._attributes, + ) + + def add_attribute( + self, + oid: ObjectIdentifier, + value: bytes, + *, + _tag: _ASN1Type | None = None, + ) -> CertificateSigningRequestBuilder: + """ + Adds an X.509 attribute with an OID and associated value. + """ + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + + if not isinstance(value, bytes): + raise TypeError("value must be bytes") + + if _tag is not None and not isinstance(_tag, _ASN1Type): + raise TypeError("tag must be _ASN1Type") + + _reject_duplicate_attribute(oid, self._attributes) + + if _tag is not None: + tag = _tag.value + else: + tag = None + + return CertificateSigningRequestBuilder( + self._subject_name, + self._extensions, + [*self._attributes, (oid, value, tag)], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> CertificateSigningRequest: + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_csr( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class CertificateBuilder: + _extensions: list[Extension[ExtensionType]] + + def __init__( + self, + issuer_name: Name | None = None, + subject_name: Name | None = None, + public_key: CertificatePublicKeyTypes | None = None, + serial_number: int | None = None, + not_valid_before: datetime.datetime | None = None, + not_valid_after: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + ) -> None: + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name: Name) -> CertificateBuilder: + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateBuilder( + name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def subject_name(self, name: Name) -> CertificateBuilder: + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError("Expecting x509.Name object.") + if self._subject_name is not None: + raise ValueError("The subject name may only be set once.") + return CertificateBuilder( + self._issuer_name, + name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def public_key( + self, + key: CertificatePublicKeyTypes, + ) -> CertificateBuilder: + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance( + key, + ( + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, + ), + ): + raise TypeError( + "Expecting one of DSAPublicKey, RSAPublicKey," + " EllipticCurvePublicKey, Ed25519PublicKey," + " Ed448PublicKey, X25519PublicKey, or " + "X448PublicKey." + ) + if self._public_key is not None: + raise ValueError("The public key may only be set once.") + return CertificateBuilder( + self._issuer_name, + self._subject_name, + key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def serial_number(self, number: int) -> CertificateBuilder: + """ + Sets the certificate serial number. + """ + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive.") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 bits." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + number, + self._not_valid_before, + self._not_valid_after, + self._extensions, + ) + + def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder: + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_before is not None: + raise ValueError("The not valid before may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid before date must be on or after" + " 1950 January 1)." + ) + if self._not_valid_after is not None and time > self._not_valid_after: + raise ValueError( + "The not valid before date must be before the not valid after " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + time, + self._not_valid_after, + self._extensions, + ) + + def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder: + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._not_valid_after is not None: + raise ValueError("The not valid after may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The not valid after date must be on or after 1950 January 1." + ) + if ( + self._not_valid_before is not None + and time < self._not_valid_before + ): + raise ValueError( + "The not valid after date must be after the not valid before " + "date." + ) + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + time, + self._extensions, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateBuilder: + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return CertificateBuilder( + self._issuer_name, + self._subject_name, + self._public_key, + self._serial_number, + self._not_valid_before, + self._not_valid_after, + [*self._extensions, extension], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> Certificate: + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_certificate( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class CertificateRevocationListBuilder: + _extensions: list[Extension[ExtensionType]] + _revoked_certificates: list[RevokedCertificate] + + def __init__( + self, + issuer_name: Name | None = None, + last_update: datetime.datetime | None = None, + next_update: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + revoked_certificates: list[RevokedCertificate] = [], + ): + self._issuer_name = issuer_name + self._last_update = last_update + self._next_update = next_update + self._extensions = extensions + self._revoked_certificates = revoked_certificates + + def issuer_name( + self, issuer_name: Name + ) -> CertificateRevocationListBuilder: + if not isinstance(issuer_name, Name): + raise TypeError("Expecting x509.Name object.") + if self._issuer_name is not None: + raise ValueError("The issuer name may only be set once.") + return CertificateRevocationListBuilder( + issuer_name, + self._last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def last_update( + self, last_update: datetime.datetime + ) -> CertificateRevocationListBuilder: + if not isinstance(last_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._last_update is not None: + raise ValueError("Last update may only be set once.") + last_update = _convert_to_naive_utc_time(last_update) + if last_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after 1950 January 1." + ) + if self._next_update is not None and last_update > self._next_update: + raise ValueError( + "The last update date must be before the next update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + last_update, + self._next_update, + self._extensions, + self._revoked_certificates, + ) + + def next_update( + self, next_update: datetime.datetime + ) -> CertificateRevocationListBuilder: + if not isinstance(next_update, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._next_update is not None: + raise ValueError("Last update may only be set once.") + next_update = _convert_to_naive_utc_time(next_update) + if next_update < _EARLIEST_UTC_TIME: + raise ValueError( + "The last update date must be on or after 1950 January 1." + ) + if self._last_update is not None and next_update < self._last_update: + raise ValueError( + "The next update date must be after the last update date." + ) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + next_update, + self._extensions, + self._revoked_certificates, + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> CertificateRevocationListBuilder: + """ + Adds an X.509 extension to the certificate revocation list. + """ + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + [*self._extensions, extension], + self._revoked_certificates, + ) + + def add_revoked_certificate( + self, revoked_certificate: RevokedCertificate + ) -> CertificateRevocationListBuilder: + """ + Adds a revoked certificate to the CRL. + """ + if not isinstance(revoked_certificate, RevokedCertificate): + raise TypeError("Must be an instance of RevokedCertificate") + + return CertificateRevocationListBuilder( + self._issuer_name, + self._last_update, + self._next_update, + self._extensions, + [*self._revoked_certificates, revoked_certificate], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: _AllowedHashTypes | None, + backend: typing.Any = None, + *, + rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, + ecdsa_deterministic: bool | None = None, + ) -> CertificateRevocationList: + if self._issuer_name is None: + raise ValueError("A CRL must have an issuer name") + + if self._last_update is None: + raise ValueError("A CRL must have a last update time") + + if self._next_update is None: + raise ValueError("A CRL must have a next update time") + + if rsa_padding is not None: + if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): + raise TypeError("Padding must be PSS or PKCS1v15") + if not isinstance(private_key, rsa.RSAPrivateKey): + raise TypeError("Padding is only supported for RSA keys") + + if ecdsa_deterministic is not None: + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise TypeError( + "Deterministic ECDSA is only supported for EC keys" + ) + + return rust_x509.create_x509_crl( + self, + private_key, + algorithm, + rsa_padding, + ecdsa_deterministic, + ) + + +class RevokedCertificateBuilder: + def __init__( + self, + serial_number: int | None = None, + revocation_date: datetime.datetime | None = None, + extensions: list[Extension[ExtensionType]] = [], + ): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + def serial_number(self, number: int) -> RevokedCertificateBuilder: + if not isinstance(number, int): + raise TypeError("Serial number must be of integral type.") + if self._serial_number is not None: + raise ValueError("The serial number may only be set once.") + if number <= 0: + raise ValueError("The serial number should be positive") + + # ASN.1 integers are always signed, so most significant bit must be + # zero. + if number.bit_length() >= 160: # As defined in RFC 5280 + raise ValueError( + "The serial number should not be more than 159 bits." + ) + return RevokedCertificateBuilder( + number, self._revocation_date, self._extensions + ) + + def revocation_date( + self, time: datetime.datetime + ) -> RevokedCertificateBuilder: + if not isinstance(time, datetime.datetime): + raise TypeError("Expecting datetime object.") + if self._revocation_date is not None: + raise ValueError("The revocation date may only be set once.") + time = _convert_to_naive_utc_time(time) + if time < _EARLIEST_UTC_TIME: + raise ValueError( + "The revocation date must be on or after 1950 January 1." + ) + return RevokedCertificateBuilder( + self._serial_number, time, self._extensions + ) + + def add_extension( + self, extval: ExtensionType, critical: bool + ) -> RevokedCertificateBuilder: + if not isinstance(extval, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + return RevokedCertificateBuilder( + self._serial_number, + self._revocation_date, + [*self._extensions, extension], + ) + + def build(self, backend: typing.Any = None) -> RevokedCertificate: + if self._serial_number is None: + raise ValueError("A revoked certificate must have a serial number") + if self._revocation_date is None: + raise ValueError( + "A revoked certificate must have a revocation date" + ) + return _RawRevokedCertificate( + self._serial_number, + self._revocation_date, + Extensions(self._extensions), + ) + + +def random_serial_number() -> int: + return int.from_bytes(os.urandom(20), "big") >> 1 diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py b/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py new file mode 100644 index 0000000..fb66cc6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py @@ -0,0 +1,35 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 + + +class LogEntryType(utils.Enum): + X509_CERTIFICATE = 0 + PRE_CERTIFICATE = 1 + + +class Version(utils.Enum): + v1 = 0 + + +class SignatureAlgorithm(utils.Enum): + """ + Signature algorithms that are valid for SCTs. + + These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2). + + See: + """ + + ANONYMOUS = 0 + RSA = 1 + DSA = 2 + ECDSA = 3 + + +SignedCertificateTimestamp = rust_x509.Sct diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py b/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py new file mode 100644 index 0000000..dfa472d --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py @@ -0,0 +1,2528 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import datetime +import hashlib +import ipaddress +import typing +from collections.abc import Iterable, Iterator + +from cryptography import utils +from cryptography.hazmat.bindings._rust import asn1 +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPublicKeyTypes, + CertificatePublicKeyTypes, +) +from cryptography.x509.certificate_transparency import ( + SignedCertificateTimestamp, +) +from cryptography.x509.general_name import ( + DirectoryName, + DNSName, + GeneralName, + IPAddress, + OtherName, + RegisteredID, + RFC822Name, + UniformResourceIdentifier, + _IPAddressTypes, +) +from cryptography.x509.name import Name, RelativeDistinguishedName +from cryptography.x509.oid import ( + CRLEntryExtensionOID, + ExtensionOID, + ObjectIdentifier, + OCSPExtensionOID, +) + +ExtensionTypeVar = typing.TypeVar( + "ExtensionTypeVar", bound="ExtensionType", covariant=True +) + + +def _key_identifier_from_public_key( + public_key: CertificatePublicKeyTypes, +) -> bytes: + if isinstance(public_key, RSAPublicKey): + data = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.PKCS1, + ) + elif isinstance(public_key, EllipticCurvePublicKey): + data = public_key.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + else: + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo, + ) + data = asn1.parse_spki_for_data(serialized) + + return hashlib.sha1(data).digest() + + +def _make_sequence_methods(field_name: str): + def len_method(self) -> int: + return len(getattr(self, field_name)) + + def iter_method(self): + return iter(getattr(self, field_name)) + + def getitem_method(self, idx): + return getattr(self, field_name)[idx] + + return len_method, iter_method, getitem_method + + +class DuplicateExtension(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg: str, oid: ObjectIdentifier) -> None: + super().__init__(msg) + self.oid = oid + + +class ExtensionType(metaclass=abc.ABCMeta): + oid: typing.ClassVar[ObjectIdentifier] + + def public_bytes(self) -> bytes: + """ + Serializes the extension type to DER. + """ + raise NotImplementedError( + f"public_bytes is not implemented for extension type {self!r}" + ) + + +class Extensions: + def __init__(self, extensions: Iterable[Extension[ExtensionType]]) -> None: + self._extensions = list(extensions) + + def get_extension_for_oid( + self, oid: ObjectIdentifier + ) -> Extension[ExtensionType]: + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound(f"No {oid} extension was found", oid) + + def get_extension_for_class( + self, extclass: type[ExtensionTypeVar] + ) -> Extension[ExtensionTypeVar]: + if extclass is UnrecognizedExtension: + raise TypeError( + "UnrecognizedExtension can't be used with " + "get_extension_for_class because more than one instance of the" + " class may be present." + ) + + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + f"No {extclass} extension was found", extclass.oid + ) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") + + def __repr__(self) -> str: + return f"" + + +class CRLNumber(ExtensionType): + oid = ExtensionOID.CRL_NUMBER + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLNumber): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return f"" + + @property + def crl_number(self) -> int: + return self._crl_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityKeyIdentifier(ExtensionType): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__( + self, + key_identifier: bytes | None, + authority_cert_issuer: Iterable[GeneralName] | None, + authority_cert_serial_number: int | None, + ) -> None: + if (authority_cert_issuer is None) != ( + authority_cert_serial_number is None + ): + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if authority_cert_issuer is not None: + authority_cert_issuer = list(authority_cert_issuer) + if not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if authority_cert_serial_number is not None and not isinstance( + authority_cert_serial_number, int + ): + raise TypeError("authority_cert_serial_number must be an integer") + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + # This takes a subset of CertificatePublicKeyTypes because an issuer + # cannot have an X25519/X448 key. This introduces some unfortunate + # asymmetry that requires typing users to explicitly + # narrow their type, but we should make this accurate and not just + # convenient. + @classmethod + def from_issuer_public_key( + cls, public_key: CertificateIssuerPublicKeyTypes + ) -> AuthorityKeyIdentifier: + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + @classmethod + def from_issuer_subject_key_identifier( + cls, ski: SubjectKeyIdentifier + ) -> AuthorityKeyIdentifier: + return cls( + key_identifier=ski.digest, + authority_cert_issuer=None, + authority_cert_serial_number=None, + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier + and self.authority_cert_issuer == other.authority_cert_issuer + and self.authority_cert_serial_number + == other.authority_cert_serial_number + ) + + def __hash__(self) -> int: + if self.authority_cert_issuer is None: + aci = None + else: + aci = tuple(self.authority_cert_issuer) + return hash( + (self.key_identifier, aci, self.authority_cert_serial_number) + ) + + @property + def key_identifier(self) -> bytes | None: + return self._key_identifier + + @property + def authority_cert_issuer( + self, + ) -> list[GeneralName] | None: + return self._authority_cert_issuer + + @property + def authority_cert_serial_number(self) -> int | None: + return self._authority_cert_serial_number + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectKeyIdentifier(ExtensionType): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest: bytes) -> None: + self._digest = digest + + @classmethod + def from_public_key( + cls, public_key: CertificatePublicKeyTypes + ) -> SubjectKeyIdentifier: + return cls(_key_identifier_from_public_key(public_key)) + + @property + def digest(self) -> bytes: + return self._digest + + @property + def key_identifier(self) -> bytes: + return self._digest + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __hash__(self) -> int: + return hash(self.digest) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AuthorityInformationAccess(ExtensionType): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__(self, descriptions: Iterable[AccessDescription]) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SubjectInformationAccess(ExtensionType): + oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS + + def __init__(self, descriptions: Iterable[AccessDescription]) -> None: + descriptions = list(descriptions) + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __hash__(self) -> int: + return hash(tuple(self._descriptions)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class AccessDescription: + def __init__( + self, access_method: ObjectIdentifier, access_location: GeneralName + ) -> None: + if not isinstance(access_method, ObjectIdentifier): + raise TypeError("access_method must be an ObjectIdentifier") + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method + and self.access_location == other.access_location + ) + + def __hash__(self) -> int: + return hash((self.access_method, self.access_location)) + + @property + def access_method(self) -> ObjectIdentifier: + return self._access_method + + @property + def access_location(self) -> GeneralName: + return self._access_location + + +class BasicConstraints(ExtensionType): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca: bool, path_length: int | None) -> None: + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if path_length is not None and ( + not isinstance(path_length, int) or path_length < 0 + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + @property + def ca(self) -> bool: + return self._ca + + @property + def path_length(self) -> int | None: + return self._path_length + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __hash__(self) -> int: + return hash((self.ca, self.path_length)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DeltaCRLIndicator(ExtensionType): + oid = ExtensionOID.DELTA_CRL_INDICATOR + + def __init__(self, crl_number: int) -> None: + if not isinstance(crl_number, int): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + @property + def crl_number(self) -> int: + return self._crl_number + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DeltaCRLIndicator): + return NotImplemented + + return self.crl_number == other.crl_number + + def __hash__(self) -> int: + return hash(self.crl_number) + + def __repr__(self) -> str: + return f"" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLDistributionPoints(ExtensionType): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__( + self, distribution_points: Iterable[DistributionPoint] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class FreshestCRL(ExtensionType): + oid = ExtensionOID.FRESHEST_CRL + + def __init__( + self, distribution_points: Iterable[DistributionPoint] + ) -> None: + distribution_points = list(distribution_points) + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_distribution_points" + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FreshestCRL): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __hash__(self) -> int: + return hash(tuple(self._distribution_points)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class DistributionPoint: + def __init__( + self, + full_name: Iterable[GeneralName] | None, + relative_name: RelativeDistinguishedName | None, + reasons: frozenset[ReasonFlags] | None, + crl_issuer: Iterable[GeneralName] | None, + ) -> None: + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + if not full_name and not relative_name and not crl_issuer: + raise ValueError( + "Either full_name, relative_name or crl_issuer must be " + "provided." + ) + + if full_name is not None: + full_name = list(full_name) + if not all(isinstance(x, GeneralName) for x in full_name): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name: + if not isinstance(relative_name, RelativeDistinguishedName): + raise TypeError( + "relative_name must be a RelativeDistinguishedName" + ) + + if crl_issuer is not None: + crl_issuer = list(crl_issuer) + if not all(isinstance(x, GeneralName) for x in crl_issuer): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and ( + not isinstance(reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in reasons) + ): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons + or ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.reasons == other.reasons + and self.crl_issuer == other.crl_issuer + ) + + def __hash__(self) -> int: + if self.full_name is not None: + fn: tuple[GeneralName, ...] | None = tuple(self.full_name) + else: + fn = None + + if self.crl_issuer is not None: + crl_issuer: tuple[GeneralName, ...] | None = tuple(self.crl_issuer) + else: + crl_issuer = None + + return hash((fn, self.relative_name, self.reasons, crl_issuer)) + + @property + def full_name(self) -> list[GeneralName] | None: + return self._full_name + + @property + def relative_name(self) -> RelativeDistinguishedName | None: + return self._relative_name + + @property + def reasons(self) -> frozenset[ReasonFlags] | None: + return self._reasons + + @property + def crl_issuer(self) -> list[GeneralName] | None: + return self._crl_issuer + + +class ReasonFlags(utils.Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +# These are distribution point bit string mappings. Not to be confused with +# CRLReason reason flags bit string mappings. +# ReasonFlags ::= BIT STRING { +# unused (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# privilegeWithdrawn (7), +# aACompromise (8) } +_REASON_BIT_MAPPING = { + 1: ReasonFlags.key_compromise, + 2: ReasonFlags.ca_compromise, + 3: ReasonFlags.affiliation_changed, + 4: ReasonFlags.superseded, + 5: ReasonFlags.cessation_of_operation, + 6: ReasonFlags.certificate_hold, + 7: ReasonFlags.privilege_withdrawn, + 8: ReasonFlags.aa_compromise, +} + +_CRLREASONFLAGS = { + ReasonFlags.key_compromise: 1, + ReasonFlags.ca_compromise: 2, + ReasonFlags.affiliation_changed: 3, + ReasonFlags.superseded: 4, + ReasonFlags.cessation_of_operation: 5, + ReasonFlags.certificate_hold: 6, + ReasonFlags.privilege_withdrawn: 7, + ReasonFlags.aa_compromise: 8, +} + +# CRLReason ::= ENUMERATED { +# unspecified (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# -- value 7 is not used +# removeFromCRL (8), +# privilegeWithdrawn (9), +# aACompromise (10) } +_CRL_ENTRY_REASON_ENUM_TO_CODE = { + ReasonFlags.unspecified: 0, + ReasonFlags.key_compromise: 1, + ReasonFlags.ca_compromise: 2, + ReasonFlags.affiliation_changed: 3, + ReasonFlags.superseded: 4, + ReasonFlags.cessation_of_operation: 5, + ReasonFlags.certificate_hold: 6, + ReasonFlags.remove_from_crl: 8, + ReasonFlags.privilege_withdrawn: 9, + ReasonFlags.aa_compromise: 10, +} + + +class PolicyConstraints(ExtensionType): + oid = ExtensionOID.POLICY_CONSTRAINTS + + def __init__( + self, + require_explicit_policy: int | None, + inhibit_policy_mapping: int | None, + ) -> None: + if require_explicit_policy is not None and not isinstance( + require_explicit_policy, int + ): + raise TypeError( + "require_explicit_policy must be a non-negative integer or " + "None" + ) + + if inhibit_policy_mapping is not None and not isinstance( + inhibit_policy_mapping, int + ): + raise TypeError( + "inhibit_policy_mapping must be a non-negative integer or None" + ) + + if inhibit_policy_mapping is None and require_explicit_policy is None: + raise ValueError( + "At least one of require_explicit_policy and " + "inhibit_policy_mapping must not be None" + ) + + self._require_explicit_policy = require_explicit_policy + self._inhibit_policy_mapping = inhibit_policy_mapping + + def __repr__(self) -> str: + return ( + "".format(self) + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyConstraints): + return NotImplemented + + return ( + self.require_explicit_policy == other.require_explicit_policy + and self.inhibit_policy_mapping == other.inhibit_policy_mapping + ) + + def __hash__(self) -> int: + return hash( + (self.require_explicit_policy, self.inhibit_policy_mapping) + ) + + @property + def require_explicit_policy(self) -> int | None: + return self._require_explicit_policy + + @property + def inhibit_policy_mapping(self) -> int | None: + return self._inhibit_policy_mapping + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificatePolicies(ExtensionType): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies: Iterable[PolicyInformation]) -> None: + policies = list(policies) + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a PolicyInformation" + ) + + self._policies = policies + + __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __hash__(self) -> int: + return hash(tuple(self._policies)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PolicyInformation: + def __init__( + self, + policy_identifier: ObjectIdentifier, + policy_qualifiers: Iterable[str | UserNotice] | None, + ) -> None: + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + + if policy_qualifiers is not None: + policy_qualifiers = list(policy_qualifiers) + if not all( + isinstance(x, (str, UserNotice)) for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or " + "UserNotice objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier + and self.policy_qualifiers == other.policy_qualifiers + ) + + def __hash__(self) -> int: + if self.policy_qualifiers is not None: + pq = tuple(self.policy_qualifiers) + else: + pq = None + + return hash((self.policy_identifier, pq)) + + @property + def policy_identifier(self) -> ObjectIdentifier: + return self._policy_identifier + + @property + def policy_qualifiers( + self, + ) -> list[str | UserNotice] | None: + return self._policy_qualifiers + + +class UserNotice: + def __init__( + self, + notice_reference: NoticeReference | None, + explicit_text: str | None, + ) -> None: + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference + and self.explicit_text == other.explicit_text + ) + + def __hash__(self) -> int: + return hash((self.notice_reference, self.explicit_text)) + + @property + def notice_reference(self) -> NoticeReference | None: + return self._notice_reference + + @property + def explicit_text(self) -> str | None: + return self._explicit_text + + +class NoticeReference: + def __init__( + self, + organization: str | None, + notice_numbers: Iterable[int], + ) -> None: + self._organization = organization + notice_numbers = list(notice_numbers) + if not all(isinstance(x, int) for x in notice_numbers): + raise TypeError("notice_numbers must be a list of integers") + + self._notice_numbers = notice_numbers + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization + and self.notice_numbers == other.notice_numbers + ) + + def __hash__(self) -> int: + return hash((self.organization, tuple(self.notice_numbers))) + + @property + def organization(self) -> str | None: + return self._organization + + @property + def notice_numbers(self) -> list[int]: + return self._notice_numbers + + +class ExtendedKeyUsage(ExtensionType): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages: Iterable[ObjectIdentifier]) -> None: + usages = list(usages) + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __hash__(self) -> int: + return hash(tuple(self._usages)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNoCheck(ExtensionType): + oid = ExtensionOID.OCSP_NO_CHECK + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNoCheck): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(OCSPNoCheck) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertPoison(ExtensionType): + oid = ExtensionOID.PRECERT_POISON + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertPoison): + return NotImplemented + + return True + + def __hash__(self) -> int: + return hash(PrecertPoison) + + def __repr__(self) -> str: + return "" + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeature(ExtensionType): + oid = ExtensionOID.TLS_FEATURE + + def __init__(self, features: Iterable[TLSFeatureType]) -> None: + features = list(features) + if ( + not all(isinstance(x, TLSFeatureType) for x in features) + or len(features) == 0 + ): + raise TypeError( + "features must be a list of elements from the TLSFeatureType " + "enum" + ) + + self._features = features + + __len__, __iter__, __getitem__ = _make_sequence_methods("_features") + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, TLSFeature): + return NotImplemented + + return self._features == other._features + + def __hash__(self) -> int: + return hash(tuple(self._features)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class TLSFeatureType(utils.Enum): + # status_request is defined in RFC 6066 and is used for what is commonly + # called OCSP Must-Staple when present in the TLS Feature extension in an + # X.509 certificate. + status_request = 5 + # status_request_v2 is defined in RFC 6961 and allows multiple OCSP + # responses to be provided. It is not currently in use by clients or + # servers. + status_request_v2 = 17 + + +_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} + + +class InhibitAnyPolicy(ExtensionType): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs: int) -> None: + if not isinstance(skip_certs, int): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __hash__(self) -> int: + return hash(self.skip_certs) + + @property + def skip_certs(self) -> int: + return self._skip_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class KeyUsage(ExtensionType): + oid = ExtensionOID.KEY_USAGE + + def __init__( + self, + digital_signature: bool, + content_commitment: bool, + key_encipherment: bool, + data_encipherment: bool, + key_agreement: bool, + key_cert_sign: bool, + crl_sign: bool, + encipher_only: bool, + decipher_only: bool, + ) -> None: + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + @property + def digital_signature(self) -> bool: + return self._digital_signature + + @property + def content_commitment(self) -> bool: + return self._content_commitment + + @property + def key_encipherment(self) -> bool: + return self._key_encipherment + + @property + def data_encipherment(self) -> bool: + return self._data_encipherment + + @property + def key_agreement(self) -> bool: + return self._key_agreement + + @property + def key_cert_sign(self) -> bool: + return self._key_cert_sign + + @property + def crl_sign(self) -> bool: + return self._crl_sign + + @property + def encipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self) -> bool: + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self) -> str: + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + # Users found None confusing because even though encipher/decipher + # have no meaning unless key_agreement is true, to construct an + # instance of the class you still need to pass False. + encipher_only = False + decipher_only = False + + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature + and self.content_commitment == other.content_commitment + and self.key_encipherment == other.key_encipherment + and self.data_encipherment == other.data_encipherment + and self.key_agreement == other.key_agreement + and self.key_cert_sign == other.key_cert_sign + and self.crl_sign == other.crl_sign + and self._encipher_only == other._encipher_only + and self._decipher_only == other._decipher_only + ) + + def __hash__(self) -> int: + return hash( + ( + self.digital_signature, + self.content_commitment, + self.key_encipherment, + self.data_encipherment, + self.key_agreement, + self.key_cert_sign, + self.crl_sign, + self._encipher_only, + self._decipher_only, + ) + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrivateKeyUsagePeriod(ExtensionType): + oid = ExtensionOID.PRIVATE_KEY_USAGE_PERIOD + + def __init__( + self, + not_before: datetime.datetime | None, + not_after: datetime.datetime | None, + ) -> None: + if ( + not isinstance(not_before, datetime.datetime) + and not_before is not None + ): + raise TypeError("not_before must be a datetime.datetime or None") + + if ( + not isinstance(not_after, datetime.datetime) + and not_after is not None + ): + raise TypeError("not_after must be a datetime.datetime or None") + + if not_before is None and not_after is None: + raise ValueError( + "At least one of not_before and not_after must not be None" + ) + + if ( + not_before is not None + and not_after is not None + and not_before > not_after + ): + raise ValueError("not_before must be before not_after") + + self._not_before = not_before + self._not_after = not_after + + @property + def not_before(self) -> datetime.datetime | None: + return self._not_before + + @property + def not_after(self) -> datetime.datetime | None: + return self._not_after + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrivateKeyUsagePeriod): + return NotImplemented + + return ( + self.not_before == other.not_before + and self.not_after == other.not_after + ) + + def __hash__(self) -> int: + return hash((self.not_before, self.not_after)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class NameConstraints(ExtensionType): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__( + self, + permitted_subtrees: Iterable[GeneralName] | None, + excluded_subtrees: Iterable[GeneralName] | None, + ) -> None: + if permitted_subtrees is not None: + permitted_subtrees = list(permitted_subtrees) + if not permitted_subtrees: + raise ValueError( + "permitted_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in permitted_subtrees): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_tree(permitted_subtrees) + + if excluded_subtrees is not None: + excluded_subtrees = list(excluded_subtrees) + if not excluded_subtrees: + raise ValueError( + "excluded_subtrees must be a non-empty list or None" + ) + if not all(isinstance(x, GeneralName) for x in excluded_subtrees): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_tree(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees + and self.permitted_subtrees == other.permitted_subtrees + ) + + def _validate_tree(self, tree: Iterable[GeneralName]) -> None: + self._validate_ip_name(tree) + self._validate_dns_name(tree) + + def _validate_ip_name(self, tree: Iterable[GeneralName]) -> None: + if any( + isinstance(name, IPAddress) + and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) + for name in tree + ): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def _validate_dns_name(self, tree: Iterable[GeneralName]) -> None: + if any( + isinstance(name, DNSName) and "*" in name.value for name in tree + ): + raise ValueError( + "DNSName name constraints must not contain the '*' wildcard" + " character" + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __hash__(self) -> int: + if self.permitted_subtrees is not None: + ps: tuple[GeneralName, ...] | None = tuple(self.permitted_subtrees) + else: + ps = None + + if self.excluded_subtrees is not None: + es: tuple[GeneralName, ...] | None = tuple(self.excluded_subtrees) + else: + es = None + + return hash((ps, es)) + + @property + def permitted_subtrees( + self, + ) -> list[GeneralName] | None: + return self._permitted_subtrees + + @property + def excluded_subtrees( + self, + ) -> list[GeneralName] | None: + return self._excluded_subtrees + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class Extension(typing.Generic[ExtensionTypeVar]): + def __init__( + self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def critical(self) -> bool: + return self._critical + + @property + def value(self) -> ExtensionTypeVar: + return self._value + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid + and self.critical == other.critical + and self.value == other.value + ) + + def __hash__(self) -> int: + return hash((self.oid, self.critical, self.value)) + + +class GeneralNames: + def __init__(self, general_names: Iterable[GeneralName]) -> None: + general_names = list(general_names) + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + return [i.value for i in objs] + return list(objs) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(tuple(self._general_names)) + + +class SubjectAlternativeName(ExtensionType): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuerAlternativeName(ExtensionType): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CertificateIssuer(ExtensionType): + oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER + + def __init__(self, general_names: Iterable[GeneralName]) -> None: + self._general_names = GeneralNames(general_names) + + __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") + + @typing.overload + def get_values_for_type( + self, + type: type[DNSName] + | type[UniformResourceIdentifier] + | type[RFC822Name], + ) -> list[str]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[DirectoryName], + ) -> list[Name]: ... + + @typing.overload + def get_values_for_type( + self, + type: type[RegisteredID], + ) -> list[ObjectIdentifier]: ... + + @typing.overload + def get_values_for_type( + self, type: type[IPAddress] + ) -> list[_IPAddressTypes]: ... + + @typing.overload + def get_values_for_type( + self, type: type[OtherName] + ) -> list[OtherName]: ... + + def get_values_for_type( + self, + type: type[DNSName] + | type[DirectoryName] + | type[IPAddress] + | type[OtherName] + | type[RFC822Name] + | type[RegisteredID] + | type[UniformResourceIdentifier], + ) -> ( + list[_IPAddressTypes] + | list[str] + | list[OtherName] + | list[Name] + | list[ObjectIdentifier] + ): + return self._general_names.get_values_for_type(type) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CertificateIssuer): + return NotImplemented + + return self._general_names == other._general_names + + def __hash__(self) -> int: + return hash(self._general_names) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class CRLReason(ExtensionType): + oid = CRLEntryExtensionOID.CRL_REASON + + def __init__(self, reason: ReasonFlags) -> None: + if not isinstance(reason, ReasonFlags): + raise TypeError("reason must be an element from ReasonFlags") + + self._reason = reason + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CRLReason): + return NotImplemented + + return self.reason == other.reason + + def __hash__(self) -> int: + return hash(self.reason) + + @property + def reason(self) -> ReasonFlags: + return self._reason + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class InvalidityDate(ExtensionType): + oid = CRLEntryExtensionOID.INVALIDITY_DATE + + def __init__(self, invalidity_date: datetime.datetime) -> None: + if not isinstance(invalidity_date, datetime.datetime): + raise TypeError("invalidity_date must be a datetime.datetime") + + self._invalidity_date = invalidity_date + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, InvalidityDate): + return NotImplemented + + return self.invalidity_date == other.invalidity_date + + def __hash__(self) -> int: + return hash(self.invalidity_date) + + @property + def invalidity_date(self) -> datetime.datetime: + return self._invalidity_date + + @property + def invalidity_date_utc(self) -> datetime.datetime: + if self._invalidity_date.tzinfo is None: + return self._invalidity_date.replace(tzinfo=datetime.timezone.utc) + else: + return self._invalidity_date.astimezone(tz=datetime.timezone.utc) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class PrecertificateSignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PrecertificateSignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class SignedCertificateTimestamps(ExtensionType): + oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS + + def __init__( + self, + signed_certificate_timestamps: Iterable[SignedCertificateTimestamp], + ) -> None: + signed_certificate_timestamps = list(signed_certificate_timestamps) + if not all( + isinstance(sct, SignedCertificateTimestamp) + for sct in signed_certificate_timestamps + ): + raise TypeError( + "Every item in the signed_certificate_timestamps list must be " + "a SignedCertificateTimestamp" + ) + self._signed_certificate_timestamps = signed_certificate_timestamps + + __len__, __iter__, __getitem__ = _make_sequence_methods( + "_signed_certificate_timestamps" + ) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(tuple(self._signed_certificate_timestamps)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SignedCertificateTimestamps): + return NotImplemented + + return ( + self._signed_certificate_timestamps + == other._signed_certificate_timestamps + ) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPNonce(ExtensionType): + oid = OCSPExtensionOID.NONCE + + def __init__(self, nonce: bytes) -> None: + if not isinstance(nonce, bytes): + raise TypeError("nonce must be bytes") + + self._nonce = nonce + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPNonce): + return NotImplemented + + return self.nonce == other.nonce + + def __hash__(self) -> int: + return hash(self.nonce) + + def __repr__(self) -> str: + return f"" + + @property + def nonce(self) -> bytes: + return self._nonce + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class OCSPAcceptableResponses(ExtensionType): + oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES + + def __init__(self, responses: Iterable[ObjectIdentifier]) -> None: + responses = list(responses) + if any(not isinstance(r, ObjectIdentifier) for r in responses): + raise TypeError("All responses must be ObjectIdentifiers") + + self._responses = responses + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OCSPAcceptableResponses): + return NotImplemented + + return self._responses == other._responses + + def __hash__(self) -> int: + return hash(tuple(self._responses)) + + def __repr__(self) -> str: + return f"" + + def __iter__(self) -> Iterator[ObjectIdentifier]: + return iter(self._responses) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class IssuingDistributionPoint(ExtensionType): + oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT + + def __init__( + self, + full_name: Iterable[GeneralName] | None, + relative_name: RelativeDistinguishedName | None, + only_contains_user_certs: bool, + only_contains_ca_certs: bool, + only_some_reasons: frozenset[ReasonFlags] | None, + indirect_crl: bool, + only_contains_attribute_certs: bool, + ) -> None: + if full_name is not None: + full_name = list(full_name) + + if only_some_reasons and ( + not isinstance(only_some_reasons, frozenset) + or not all(isinstance(x, ReasonFlags) for x in only_some_reasons) + ): + raise TypeError( + "only_some_reasons must be None or frozenset of ReasonFlags" + ) + + if only_some_reasons and ( + ReasonFlags.unspecified in only_some_reasons + or ReasonFlags.remove_from_crl in only_some_reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in an " + "IssuingDistributionPoint" + ) + + if not ( + isinstance(only_contains_user_certs, bool) + and isinstance(only_contains_ca_certs, bool) + and isinstance(indirect_crl, bool) + and isinstance(only_contains_attribute_certs, bool) + ): + raise TypeError( + "only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl and only_contains_attribute_certs " + "must all be boolean." + ) + + # Per RFC5280 Section 5.2.5, the Issuing Distribution Point extension + # in a CRL can have only one of onlyContainsUserCerts, + # onlyContainsCACerts, onlyContainsAttributeCerts set to TRUE. + crl_constraints = [ + only_contains_user_certs, + only_contains_ca_certs, + only_contains_attribute_certs, + ] + + if len([x for x in crl_constraints if x]) > 1: + raise ValueError( + "Only one of the following can be set to True: " + "only_contains_user_certs, only_contains_ca_certs, " + "only_contains_attribute_certs" + ) + + if not any( + [ + only_contains_user_certs, + only_contains_ca_certs, + indirect_crl, + only_contains_attribute_certs, + full_name, + relative_name, + only_some_reasons, + ] + ): + raise ValueError( + "Cannot create empty extension: " + "if only_contains_user_certs, only_contains_ca_certs, " + "indirect_crl, and only_contains_attribute_certs are all False" + ", then either full_name, relative_name, or only_some_reasons " + "must have a value." + ) + + self._only_contains_user_certs = only_contains_user_certs + self._only_contains_ca_certs = only_contains_ca_certs + self._indirect_crl = indirect_crl + self._only_contains_attribute_certs = only_contains_attribute_certs + self._only_some_reasons = only_some_reasons + self._full_name = full_name + self._relative_name = relative_name + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IssuingDistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name + and self.relative_name == other.relative_name + and self.only_contains_user_certs == other.only_contains_user_certs + and self.only_contains_ca_certs == other.only_contains_ca_certs + and self.only_some_reasons == other.only_some_reasons + and self.indirect_crl == other.indirect_crl + and self.only_contains_attribute_certs + == other.only_contains_attribute_certs + ) + + def __hash__(self) -> int: + return hash( + ( + self.full_name, + self.relative_name, + self.only_contains_user_certs, + self.only_contains_ca_certs, + self.only_some_reasons, + self.indirect_crl, + self.only_contains_attribute_certs, + ) + ) + + @property + def full_name(self) -> list[GeneralName] | None: + return self._full_name + + @property + def relative_name(self) -> RelativeDistinguishedName | None: + return self._relative_name + + @property + def only_contains_user_certs(self) -> bool: + return self._only_contains_user_certs + + @property + def only_contains_ca_certs(self) -> bool: + return self._only_contains_ca_certs + + @property + def only_some_reasons( + self, + ) -> frozenset[ReasonFlags] | None: + return self._only_some_reasons + + @property + def indirect_crl(self) -> bool: + return self._indirect_crl + + @property + def only_contains_attribute_certs(self) -> bool: + return self._only_contains_attribute_certs + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class MSCertificateTemplate(ExtensionType): + oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE + + def __init__( + self, + template_id: ObjectIdentifier, + major_version: int | None, + minor_version: int | None, + ) -> None: + if not isinstance(template_id, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._template_id = template_id + if ( + major_version is not None and not isinstance(major_version, int) + ) or ( + minor_version is not None and not isinstance(minor_version, int) + ): + raise TypeError( + "major_version and minor_version must be integers or None" + ) + self._major_version = major_version + self._minor_version = minor_version + + @property + def template_id(self) -> ObjectIdentifier: + return self._template_id + + @property + def major_version(self) -> int | None: + return self._major_version + + @property + def minor_version(self) -> int | None: + return self._minor_version + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, MSCertificateTemplate): + return NotImplemented + + return ( + self.template_id == other.template_id + and self.major_version == other.major_version + and self.minor_version == other.minor_version + ) + + def __hash__(self) -> int: + return hash((self.template_id, self.major_version, self.minor_version)) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class NamingAuthority: + def __init__( + self, + id: ObjectIdentifier | None, + url: str | None, + text: str | None, + ) -> None: + if id is not None and not isinstance(id, ObjectIdentifier): + raise TypeError("id must be an ObjectIdentifier") + + if url is not None and not isinstance(url, str): + raise TypeError("url must be a str") + + if text is not None and not isinstance(text, str): + raise TypeError("text must be a str") + + self._id = id + self._url = url + self._text = text + + @property + def id(self) -> ObjectIdentifier | None: + return self._id + + @property + def url(self) -> str | None: + return self._url + + @property + def text(self) -> str | None: + return self._text + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NamingAuthority): + return NotImplemented + + return ( + self.id == other.id + and self.url == other.url + and self.text == other.text + ) + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.url, + self.text, + ) + ) + + +class ProfessionInfo: + def __init__( + self, + naming_authority: NamingAuthority | None, + profession_items: Iterable[str], + profession_oids: Iterable[ObjectIdentifier] | None, + registration_number: str | None, + add_profession_info: bytes | None, + ) -> None: + if naming_authority is not None and not isinstance( + naming_authority, NamingAuthority + ): + raise TypeError("naming_authority must be a NamingAuthority") + + profession_items = list(profession_items) + if not all(isinstance(item, str) for item in profession_items): + raise TypeError( + "Every item in the profession_items list must be a str" + ) + + if profession_oids is not None: + profession_oids = list(profession_oids) + if not all( + isinstance(oid, ObjectIdentifier) for oid in profession_oids + ): + raise TypeError( + "Every item in the profession_oids list must be an " + "ObjectIdentifier" + ) + + if registration_number is not None and not isinstance( + registration_number, str + ): + raise TypeError("registration_number must be a str") + + if add_profession_info is not None and not isinstance( + add_profession_info, bytes + ): + raise TypeError("add_profession_info must be bytes") + + self._naming_authority = naming_authority + self._profession_items = profession_items + self._profession_oids = profession_oids + self._registration_number = registration_number + self._add_profession_info = add_profession_info + + @property + def naming_authority(self) -> NamingAuthority | None: + return self._naming_authority + + @property + def profession_items(self) -> list[str]: + return self._profession_items + + @property + def profession_oids(self) -> list[ObjectIdentifier] | None: + return self._profession_oids + + @property + def registration_number(self) -> str | None: + return self._registration_number + + @property + def add_profession_info(self) -> bytes | None: + return self._add_profession_info + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ProfessionInfo): + return NotImplemented + + return ( + self.naming_authority == other.naming_authority + and self.profession_items == other.profession_items + and self.profession_oids == other.profession_oids + and self.registration_number == other.registration_number + and self.add_profession_info == other.add_profession_info + ) + + def __hash__(self) -> int: + if self.profession_oids is not None: + profession_oids = tuple(self.profession_oids) + else: + profession_oids = None + return hash( + ( + self.naming_authority, + tuple(self.profession_items), + profession_oids, + self.registration_number, + self.add_profession_info, + ) + ) + + +class Admission: + def __init__( + self, + admission_authority: GeneralName | None, + naming_authority: NamingAuthority | None, + profession_infos: Iterable[ProfessionInfo], + ) -> None: + if admission_authority is not None and not isinstance( + admission_authority, GeneralName + ): + raise TypeError("admission_authority must be a GeneralName") + + if naming_authority is not None and not isinstance( + naming_authority, NamingAuthority + ): + raise TypeError("naming_authority must be a NamingAuthority") + + profession_infos = list(profession_infos) + if not all( + isinstance(info, ProfessionInfo) for info in profession_infos + ): + raise TypeError( + "Every item in the profession_infos list must be a " + "ProfessionInfo" + ) + + self._admission_authority = admission_authority + self._naming_authority = naming_authority + self._profession_infos = profession_infos + + @property + def admission_authority(self) -> GeneralName | None: + return self._admission_authority + + @property + def naming_authority(self) -> NamingAuthority | None: + return self._naming_authority + + @property + def profession_infos(self) -> list[ProfessionInfo]: + return self._profession_infos + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Admission): + return NotImplemented + + return ( + self.admission_authority == other.admission_authority + and self.naming_authority == other.naming_authority + and self.profession_infos == other.profession_infos + ) + + def __hash__(self) -> int: + return hash( + ( + self.admission_authority, + self.naming_authority, + tuple(self.profession_infos), + ) + ) + + +class Admissions(ExtensionType): + oid = ExtensionOID.ADMISSIONS + + def __init__( + self, + authority: GeneralName | None, + admissions: Iterable[Admission], + ) -> None: + if authority is not None and not isinstance(authority, GeneralName): + raise TypeError("authority must be a GeneralName") + + admissions = list(admissions) + if not all( + isinstance(admission, Admission) for admission in admissions + ): + raise TypeError( + "Every item in the contents_of_admissions list must be an " + "Admission" + ) + + self._authority = authority + self._admissions = admissions + + __len__, __iter__, __getitem__ = _make_sequence_methods("_admissions") + + @property + def authority(self) -> GeneralName | None: + return self._authority + + def __repr__(self) -> str: + return ( + f"" + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Admissions): + return NotImplemented + + return ( + self.authority == other.authority + and self._admissions == other._admissions + ) + + def __hash__(self) -> int: + return hash((self.authority, tuple(self._admissions))) + + def public_bytes(self) -> bytes: + return rust_x509.encode_extension_value(self) + + +class UnrecognizedExtension(ExtensionType): + def __init__(self, oid: ObjectIdentifier, value: bytes) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._oid = oid + self._value = value + + @property + def oid(self) -> ObjectIdentifier: # type: ignore[override] + return self._oid + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnrecognizedExtension): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def public_bytes(self) -> bytes: + return self.value diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py b/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py new file mode 100644 index 0000000..672f287 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py @@ -0,0 +1,281 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import abc +import ipaddress +import typing +from email.utils import parseaddr + +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + +_IPAddressTypes = typing.Union[ + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, +] + + +class UnsupportedGeneralNameType(Exception): + pass + + +class GeneralName(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def value(self) -> typing.Any: + """ + Return the value of the object + """ + + +class RFC822Name(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "RFC822Name values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + name, address = parseaddr(value) + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> RFC822Name: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DNSName(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "DNSName values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> DNSName: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class UniformResourceIdentifier(GeneralName): + def __init__(self, value: str) -> None: + if isinstance(value, str): + try: + value.encode("ascii") + except UnicodeEncodeError: + raise ValueError( + "URI values should be passed as an A-label string. " + "This means unicode characters should be encoded via " + "a library like idna." + ) + else: + raise TypeError("value must be string") + + self._value = value + + @property + def value(self) -> str: + return self._value + + @classmethod + def _init_without_validation(cls, value: str) -> UniformResourceIdentifier: + instance = cls.__new__(cls) + instance._value = value + return instance + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class DirectoryName(GeneralName): + def __init__(self, value: Name) -> None: + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + @property + def value(self) -> Name: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class RegisteredID(GeneralName): + def __init__(self, value: ObjectIdentifier) -> None: + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + @property + def value(self) -> ObjectIdentifier: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class IPAddress(GeneralName): + def __init__(self, value: _IPAddressTypes) -> None: + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network, + ), + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + @property + def value(self) -> _IPAddressTypes: + return self._value + + def _packed(self) -> bytes: + if isinstance( + self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address) + ): + return self.value.packed + else: + return ( + self.value.network_address.packed + self.value.netmask.packed + ) + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + +class OtherName(GeneralName): + def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + @property + def type_id(self) -> ObjectIdentifier: + return self._type_id + + @property + def value(self) -> bytes: + return self._value + + def __repr__(self) -> str: + return f"" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __hash__(self) -> int: + return hash((self.type_id, self.value)) diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/name.py b/venv/lib/python3.12/site-packages/cryptography/x509/name.py new file mode 100644 index 0000000..685f921 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/name.py @@ -0,0 +1,476 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import binascii +import re +import sys +import typing +import warnings +from collections.abc import Iterable, Iterator + +from cryptography import utils +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.x509.oid import NameOID, ObjectIdentifier + + +class _ASN1Type(utils.Enum): + BitString = 3 + OctetString = 4 + UTF8String = 12 + NumericString = 18 + PrintableString = 19 + T61String = 20 + IA5String = 22 + UTCTime = 23 + GeneralizedTime = 24 + VisibleString = 26 + UniversalString = 28 + BMPString = 30 + + +_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} +_NAMEOID_DEFAULT_TYPE: dict[ObjectIdentifier, _ASN1Type] = { + NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, + NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, + NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, + NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, + NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, +} + +# Type alias +_OidNameMap = typing.Mapping[ObjectIdentifier, str] +_NameOidMap = typing.Mapping[str, ObjectIdentifier] + +#: Short attribute names from RFC 4514: +#: https://tools.ietf.org/html/rfc4514#page-7 +_NAMEOID_TO_NAME: _OidNameMap = { + NameOID.COMMON_NAME: "CN", + NameOID.LOCALITY_NAME: "L", + NameOID.STATE_OR_PROVINCE_NAME: "ST", + NameOID.ORGANIZATION_NAME: "O", + NameOID.ORGANIZATIONAL_UNIT_NAME: "OU", + NameOID.COUNTRY_NAME: "C", + NameOID.STREET_ADDRESS: "STREET", + NameOID.DOMAIN_COMPONENT: "DC", + NameOID.USER_ID: "UID", +} +_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()} + +_NAMEOID_LENGTH_LIMIT = { + NameOID.COUNTRY_NAME: (2, 2), + NameOID.JURISDICTION_COUNTRY_NAME: (2, 2), + NameOID.COMMON_NAME: (1, 64), +} + + +def _escape_dn_value(val: str | bytes) -> str: + """Escape special characters in RFC4514 Distinguished Name value.""" + + if not val: + return "" + + # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character + # followed by the hexadecimal encoding of the octets. + if isinstance(val, bytes): + return "#" + binascii.hexlify(val).decode("utf8") + + # See https://tools.ietf.org/html/rfc4514#section-2.4 + val = val.replace("\\", "\\\\") + val = val.replace('"', '\\"') + val = val.replace("+", "\\+") + val = val.replace(",", "\\,") + val = val.replace(";", "\\;") + val = val.replace("<", "\\<") + val = val.replace(">", "\\>") + val = val.replace("\0", "\\00") + + if val[0] in ("#", " "): + val = "\\" + val + if val[-1] == " ": + val = val[:-1] + "\\ " + + return val + + +def _unescape_dn_value(val: str) -> str: + if not val: + return "" + + # See https://tools.ietf.org/html/rfc4514#section-3 + + # special = escaped / SPACE / SHARP / EQUALS + # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE + def sub(m): + val = m.group(1) + # Regular escape + if len(val) == 1: + return val + # Hex-value scape + return chr(int(val, 16)) + + return _RFC4514NameParser._PAIR_RE.sub(sub, val) + + +NameAttributeValueType = typing.TypeVar( + "NameAttributeValueType", + typing.Union[str, bytes], + str, + bytes, + covariant=True, +) + + +class NameAttribute(typing.Generic[NameAttributeValueType]): + def __init__( + self, + oid: ObjectIdentifier, + value: NameAttributeValueType, + _type: _ASN1Type | None = None, + *, + _validate: bool = True, + ) -> None: + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + if _type == _ASN1Type.BitString: + if oid != NameOID.X500_UNIQUE_IDENTIFIER: + raise TypeError( + "oid must be X500_UNIQUE_IDENTIFIER for BitString type." + ) + if not isinstance(value, bytes): + raise TypeError("value must be bytes for BitString") + elif not isinstance(value, str): + raise TypeError("value argument must be a str") + + length_limits = _NAMEOID_LENGTH_LIMIT.get(oid) + if length_limits is not None: + min_length, max_length = length_limits + assert isinstance(value, str) + c_len = len(value.encode("utf8")) + if c_len < min_length or c_len > max_length: + msg = ( + f"Attribute's length must be >= {min_length} and " + f"<= {max_length}, but it was {c_len}" + ) + if _validate is True: + raise ValueError(msg) + else: + warnings.warn(msg, stacklevel=2) + + # The appropriate ASN1 string type varies by OID and is defined across + # multiple RFCs including 2459, 3280, and 5280. In general UTF8String + # is preferred (2459), but 3280 and 5280 specify several OIDs with + # alternate types. This means when we see the sentinel value we need + # to look up whether the OID has a non-UTF8 type. If it does, set it + # to that. Otherwise, UTF8! + if _type is None: + _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) + + if not isinstance(_type, _ASN1Type): + raise TypeError("_type must be from the _ASN1Type enum") + + self._oid = oid + self._value: NameAttributeValueType = value + self._type: _ASN1Type = _type + + @property + def oid(self) -> ObjectIdentifier: + return self._oid + + @property + def value(self) -> NameAttributeValueType: + return self._value + + @property + def rfc4514_attribute_name(self) -> str: + """ + The short attribute name (for example "CN") if available, + otherwise the OID dotted string. + """ + return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Use short attribute name if available, otherwise fall back to OID + dotted string. + """ + attr_name = ( + attr_name_overrides.get(self.oid) if attr_name_overrides else None + ) + if attr_name is None: + attr_name = self.rfc4514_attribute_name + + return f"{attr_name}={_escape_dn_value(self.value)}" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, NameAttribute): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __hash__(self) -> int: + return hash((self.oid, self.value)) + + def __repr__(self) -> str: + return f"" + + +class RelativeDistinguishedName: + def __init__(self, attributes: Iterable[NameAttribute]): + attributes = list(attributes) + if not attributes: + raise ValueError("a relative distinguished name cannot be empty") + if not all(isinstance(x, NameAttribute) for x in attributes): + raise TypeError("attributes must be an iterable of NameAttribute") + + # Keep list and frozenset to preserve attribute order where it matters + self._attributes = attributes + self._attribute_set = frozenset(attributes) + + if len(self._attribute_set) != len(attributes): + raise ValueError("duplicate attributes are not allowed") + + def get_attributes_for_oid( + self, + oid: ObjectIdentifier, + ) -> list[NameAttribute[str | bytes]]: + return [i for i in self if i.oid == oid] + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + + Within each RDN, attributes are joined by '+', although that is rarely + used in certificates. + """ + return "+".join( + attr.rfc4514_string(attr_name_overrides) + for attr in self._attributes + ) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, RelativeDistinguishedName): + return NotImplemented + + return self._attribute_set == other._attribute_set + + def __hash__(self) -> int: + return hash(self._attribute_set) + + def __iter__(self) -> Iterator[NameAttribute]: + return iter(self._attributes) + + def __len__(self) -> int: + return len(self._attributes) + + def __repr__(self) -> str: + return f"" + + +class Name: + @typing.overload + def __init__(self, attributes: Iterable[NameAttribute]) -> None: ... + + @typing.overload + def __init__( + self, attributes: Iterable[RelativeDistinguishedName] + ) -> None: ... + + def __init__( + self, + attributes: Iterable[NameAttribute | RelativeDistinguishedName], + ) -> None: + attributes = list(attributes) + if all(isinstance(x, NameAttribute) for x in attributes): + self._attributes = [ + RelativeDistinguishedName([typing.cast(NameAttribute, x)]) + for x in attributes + ] + elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): + self._attributes = typing.cast( + typing.List[RelativeDistinguishedName], attributes + ) + else: + raise TypeError( + "attributes must be a list of NameAttribute" + " or a list RelativeDistinguishedName" + ) + + @classmethod + def from_rfc4514_string( + cls, + data: str, + attr_name_overrides: _NameOidMap | None = None, + ) -> Name: + return _RFC4514NameParser(data, attr_name_overrides or {}).parse() + + def rfc4514_string( + self, attr_name_overrides: _OidNameMap | None = None + ) -> str: + """ + Format as RFC4514 Distinguished Name string. + For example 'CN=foobar.com,O=Foo Corp,C=US' + + An X.509 name is a two-level structure: a list of sets of attributes. + Each list element is separated by ',' and within each list element, set + elements are separated by '+'. The latter is almost never used in + real world certificates. According to RFC4514 section 2.1 the + RDNSequence must be reversed when converting to string representation. + """ + return ",".join( + attr.rfc4514_string(attr_name_overrides) + for attr in reversed(self._attributes) + ) + + def get_attributes_for_oid( + self, + oid: ObjectIdentifier, + ) -> list[NameAttribute[str | bytes]]: + return [i for i in self if i.oid == oid] + + @property + def rdns(self) -> list[RelativeDistinguishedName]: + return self._attributes + + def public_bytes(self, backend: typing.Any = None) -> bytes: + return rust_x509.encode_name_bytes(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __hash__(self) -> int: + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self) -> Iterator[NameAttribute]: + for rdn in self._attributes: + yield from rdn + + def __len__(self) -> int: + return sum(len(rdn) for rdn in self._attributes) + + def __repr__(self) -> str: + rdns = ",".join(attr.rfc4514_string() for attr in self._attributes) + return f"" + + +class _RFC4514NameParser: + _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+") + _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*") + + _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})" + _PAIR_RE = re.compile(_PAIR) + _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" + _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]" + _LEADCHAR = rf"{_LUTF1}|{_UTFMB}" + _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}" + _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}" + _STRING_RE = re.compile( + rf""" + ( + ({_LEADCHAR}|{_PAIR}) + ( + ({_STRINGCHAR}|{_PAIR})* + ({_TRAILCHAR}|{_PAIR}) + )? + )? + """, + re.VERBOSE, + ) + _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+") + + def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None: + self._data = data + self._idx = 0 + + self._attr_name_overrides = attr_name_overrides + + def _has_data(self) -> bool: + return self._idx < len(self._data) + + def _peek(self) -> str | None: + if self._has_data(): + return self._data[self._idx] + return None + + def _read_char(self, ch: str) -> None: + if self._peek() != ch: + raise ValueError + self._idx += 1 + + def _read_re(self, pat) -> str: + match = pat.match(self._data, pos=self._idx) + if match is None: + raise ValueError + val = match.group() + self._idx += len(val) + return val + + def parse(self) -> Name: + """ + Parses the `data` string and converts it to a Name. + + According to RFC4514 section 2.1 the RDNSequence must be + reversed when converting to string representation. So, when + we parse it, we need to reverse again to get the RDNs on the + correct order. + """ + + if not self._has_data(): + return Name([]) + + rdns = [self._parse_rdn()] + + while self._has_data(): + self._read_char(",") + rdns.append(self._parse_rdn()) + + return Name(reversed(rdns)) + + def _parse_rdn(self) -> RelativeDistinguishedName: + nas = [self._parse_na()] + while self._peek() == "+": + self._read_char("+") + nas.append(self._parse_na()) + + return RelativeDistinguishedName(nas) + + def _parse_na(self) -> NameAttribute: + try: + oid_value = self._read_re(self._OID_RE) + except ValueError: + name = self._read_re(self._DESCR_RE) + oid = self._attr_name_overrides.get( + name, _NAME_TO_NAMEOID.get(name) + ) + if oid is None: + raise ValueError + else: + oid = ObjectIdentifier(oid_value) + + self._read_char("=") + if self._peek() == "#": + value = self._read_re(self._HEXSTRING_RE) + value = binascii.unhexlify(value[1:]).decode() + else: + raw_value = self._read_re(self._STRING_RE) + value = _unescape_dn_value(raw_value) + + return NameAttribute(oid, value) diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/ocsp.py b/venv/lib/python3.12/site-packages/cryptography/x509/ocsp.py new file mode 100644 index 0000000..f61ed80 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/ocsp.py @@ -0,0 +1,379 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import datetime +from collections.abc import Iterable + +from cryptography import utils, x509 +from cryptography.hazmat.bindings._rust import ocsp +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric.types import ( + CertificateIssuerPrivateKeyTypes, +) +from cryptography.x509.base import _reject_duplicate_extension + + +class OCSPResponderEncoding(utils.Enum): + HASH = "By Hash" + NAME = "By Name" + + +class OCSPResponseStatus(utils.Enum): + SUCCESSFUL = 0 + MALFORMED_REQUEST = 1 + INTERNAL_ERROR = 2 + TRY_LATER = 3 + SIG_REQUIRED = 5 + UNAUTHORIZED = 6 + + +_ALLOWED_HASHES = ( + hashes.SHA1, + hashes.SHA224, + hashes.SHA256, + hashes.SHA384, + hashes.SHA512, +) + + +def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None: + if not isinstance(algorithm, _ALLOWED_HASHES): + raise ValueError( + "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" + ) + + +class OCSPCertStatus(utils.Enum): + GOOD = 0 + REVOKED = 1 + UNKNOWN = 2 + + +class _SingleResponse: + def __init__( + self, + resp: tuple[x509.Certificate, x509.Certificate] | None, + resp_hash: tuple[bytes, bytes, int] | None, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ): + _verify_algorithm(algorithm) + if not isinstance(this_update, datetime.datetime): + raise TypeError("this_update must be a datetime object") + if next_update is not None and not isinstance( + next_update, datetime.datetime + ): + raise TypeError("next_update must be a datetime object or None") + + self._resp = resp + self._resp_hash = resp_hash + self._algorithm = algorithm + self._this_update = this_update + self._next_update = next_update + + if not isinstance(cert_status, OCSPCertStatus): + raise TypeError( + "cert_status must be an item from the OCSPCertStatus enum" + ) + if cert_status is not OCSPCertStatus.REVOKED: + if revocation_time is not None: + raise ValueError( + "revocation_time can only be provided if the certificate " + "is revoked" + ) + if revocation_reason is not None: + raise ValueError( + "revocation_reason can only be provided if the certificate" + " is revoked" + ) + else: + if not isinstance(revocation_time, datetime.datetime): + raise TypeError("revocation_time must be a datetime object") + + if revocation_reason is not None and not isinstance( + revocation_reason, x509.ReasonFlags + ): + raise TypeError( + "revocation_reason must be an item from the ReasonFlags " + "enum or None" + ) + + self._cert_status = cert_status + self._revocation_time = revocation_time + self._revocation_reason = revocation_reason + + +OCSPRequest = ocsp.OCSPRequest +OCSPResponse = ocsp.OCSPResponse +OCSPSingleResponse = ocsp.OCSPSingleResponse + + +class OCSPRequestBuilder: + def __init__( + self, + request: tuple[ + x509.Certificate, x509.Certificate, hashes.HashAlgorithm + ] + | None = None, + request_hash: tuple[bytes, bytes, int, hashes.HashAlgorithm] + | None = None, + extensions: list[x509.Extension[x509.ExtensionType]] = [], + ) -> None: + self._request = request + self._request_hash = request_hash + self._extensions = extensions + + def add_certificate( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + ) -> OCSPRequestBuilder: + if self._request is not None or self._request_hash is not None: + raise ValueError("Only one certificate can be added to a request") + + _verify_algorithm(algorithm) + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + return OCSPRequestBuilder( + (cert, issuer, algorithm), self._request_hash, self._extensions + ) + + def add_certificate_by_hash( + self, + issuer_name_hash: bytes, + issuer_key_hash: bytes, + serial_number: int, + algorithm: hashes.HashAlgorithm, + ) -> OCSPRequestBuilder: + if self._request is not None or self._request_hash is not None: + raise ValueError("Only one certificate can be added to a request") + + if not isinstance(serial_number, int): + raise TypeError("serial_number must be an integer") + + _verify_algorithm(algorithm) + utils._check_bytes("issuer_name_hash", issuer_name_hash) + utils._check_bytes("issuer_key_hash", issuer_key_hash) + if algorithm.digest_size != len( + issuer_name_hash + ) or algorithm.digest_size != len(issuer_key_hash): + raise ValueError( + "issuer_name_hash and issuer_key_hash must be the same length " + "as the digest size of the algorithm" + ) + + return OCSPRequestBuilder( + self._request, + (issuer_name_hash, issuer_key_hash, serial_number, algorithm), + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> OCSPRequestBuilder: + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPRequestBuilder( + self._request, self._request_hash, [*self._extensions, extension] + ) + + def build(self) -> OCSPRequest: + if self._request is None and self._request_hash is None: + raise ValueError("You must add a certificate before building") + + return ocsp.create_ocsp_request(self) + + +class OCSPResponseBuilder: + def __init__( + self, + response: _SingleResponse | None = None, + responder_id: tuple[x509.Certificate, OCSPResponderEncoding] + | None = None, + certs: list[x509.Certificate] | None = None, + extensions: list[x509.Extension[x509.ExtensionType]] = [], + ): + self._response = response + self._responder_id = responder_id + self._certs = certs + self._extensions = extensions + + def add_response( + self, + cert: x509.Certificate, + issuer: x509.Certificate, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ) -> OCSPResponseBuilder: + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + if not isinstance(cert, x509.Certificate) or not isinstance( + issuer, x509.Certificate + ): + raise TypeError("cert and issuer must be a Certificate") + + singleresp = _SingleResponse( + (cert, issuer), + None, + algorithm, + cert_status, + this_update, + next_update, + revocation_time, + revocation_reason, + ) + return OCSPResponseBuilder( + singleresp, + self._responder_id, + self._certs, + self._extensions, + ) + + def add_response_by_hash( + self, + issuer_name_hash: bytes, + issuer_key_hash: bytes, + serial_number: int, + algorithm: hashes.HashAlgorithm, + cert_status: OCSPCertStatus, + this_update: datetime.datetime, + next_update: datetime.datetime | None, + revocation_time: datetime.datetime | None, + revocation_reason: x509.ReasonFlags | None, + ) -> OCSPResponseBuilder: + if self._response is not None: + raise ValueError("Only one response per OCSPResponse.") + + if not isinstance(serial_number, int): + raise TypeError("serial_number must be an integer") + + utils._check_bytes("issuer_name_hash", issuer_name_hash) + utils._check_bytes("issuer_key_hash", issuer_key_hash) + _verify_algorithm(algorithm) + if algorithm.digest_size != len( + issuer_name_hash + ) or algorithm.digest_size != len(issuer_key_hash): + raise ValueError( + "issuer_name_hash and issuer_key_hash must be the same length " + "as the digest size of the algorithm" + ) + + singleresp = _SingleResponse( + None, + (issuer_name_hash, issuer_key_hash, serial_number), + algorithm, + cert_status, + this_update, + next_update, + revocation_time, + revocation_reason, + ) + return OCSPResponseBuilder( + singleresp, + self._responder_id, + self._certs, + self._extensions, + ) + + def responder_id( + self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate + ) -> OCSPResponseBuilder: + if self._responder_id is not None: + raise ValueError("responder_id can only be set once") + if not isinstance(responder_cert, x509.Certificate): + raise TypeError("responder_cert must be a Certificate") + if not isinstance(encoding, OCSPResponderEncoding): + raise TypeError( + "encoding must be an element from OCSPResponderEncoding" + ) + + return OCSPResponseBuilder( + self._response, + (responder_cert, encoding), + self._certs, + self._extensions, + ) + + def certificates( + self, certs: Iterable[x509.Certificate] + ) -> OCSPResponseBuilder: + if self._certs is not None: + raise ValueError("certificates may only be set once") + certs = list(certs) + if len(certs) == 0: + raise ValueError("certs must not be an empty list") + if not all(isinstance(x, x509.Certificate) for x in certs): + raise TypeError("certs must be a list of Certificates") + return OCSPResponseBuilder( + self._response, + self._responder_id, + certs, + self._extensions, + ) + + def add_extension( + self, extval: x509.ExtensionType, critical: bool + ) -> OCSPResponseBuilder: + if not isinstance(extval, x509.ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = x509.Extension(extval.oid, critical, extval) + _reject_duplicate_extension(extension, self._extensions) + + return OCSPResponseBuilder( + self._response, + self._responder_id, + self._certs, + [*self._extensions, extension], + ) + + def sign( + self, + private_key: CertificateIssuerPrivateKeyTypes, + algorithm: hashes.HashAlgorithm | None, + ) -> OCSPResponse: + if self._response is None: + raise ValueError("You must add a response before signing") + if self._responder_id is None: + raise ValueError("You must add a responder_id before signing") + + return ocsp.create_ocsp_response( + OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm + ) + + @classmethod + def build_unsuccessful( + cls, response_status: OCSPResponseStatus + ) -> OCSPResponse: + if not isinstance(response_status, OCSPResponseStatus): + raise TypeError( + "response_status must be an item from OCSPResponseStatus" + ) + if response_status is OCSPResponseStatus.SUCCESSFUL: + raise ValueError("response_status cannot be SUCCESSFUL") + + return ocsp.create_ocsp_response(response_status, None, None, None) + + +load_der_ocsp_request = ocsp.load_der_ocsp_request +load_der_ocsp_response = ocsp.load_der_ocsp_response diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/oid.py b/venv/lib/python3.12/site-packages/cryptography/x509/oid.py new file mode 100644 index 0000000..520fc7a --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/oid.py @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +from cryptography.hazmat._oid import ( + AttributeOID, + AuthorityInformationAccessOID, + CertificatePoliciesOID, + CRLEntryExtensionOID, + ExtendedKeyUsageOID, + ExtensionOID, + NameOID, + ObjectIdentifier, + OCSPExtensionOID, + OtherNameFormOID, + PublicKeyAlgorithmOID, + SignatureAlgorithmOID, + SubjectInformationAccessOID, +) + +__all__ = [ + "AttributeOID", + "AuthorityInformationAccessOID", + "CRLEntryExtensionOID", + "CertificatePoliciesOID", + "ExtendedKeyUsageOID", + "ExtensionOID", + "NameOID", + "OCSPExtensionOID", + "ObjectIdentifier", + "OtherNameFormOID", + "PublicKeyAlgorithmOID", + "SignatureAlgorithmOID", + "SubjectInformationAccessOID", +] diff --git a/venv/lib/python3.12/site-packages/cryptography/x509/verification.py b/venv/lib/python3.12/site-packages/cryptography/x509/verification.py new file mode 100644 index 0000000..2db4324 --- /dev/null +++ b/venv/lib/python3.12/site-packages/cryptography/x509/verification.py @@ -0,0 +1,34 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import typing + +from cryptography.hazmat.bindings._rust import x509 as rust_x509 +from cryptography.x509.general_name import DNSName, IPAddress + +__all__ = [ + "ClientVerifier", + "Criticality", + "ExtensionPolicy", + "Policy", + "PolicyBuilder", + "ServerVerifier", + "Store", + "Subject", + "VerificationError", + "VerifiedClient", +] + +Store = rust_x509.Store +Subject = typing.Union[DNSName, IPAddress] +VerifiedClient = rust_x509.VerifiedClient +ClientVerifier = rust_x509.ClientVerifier +ServerVerifier = rust_x509.ServerVerifier +PolicyBuilder = rust_x509.PolicyBuilder +Policy = rust_x509.Policy +ExtensionPolicy = rust_x509.ExtensionPolicy +Criticality = rust_x509.Criticality +VerificationError = rust_x509.VerificationError diff --git a/venv/lib/python3.12/site-packages/dateutil/__init__.py b/venv/lib/python3.12/site-packages/dateutil/__init__.py new file mode 100644 index 0000000..0defb82 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +try: + from ._version import version as __version__ +except ImportError: + __version__ = 'unknown' + +__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', + 'utils', 'zoneinfo'] diff --git a/venv/lib/python3.12/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..257d565 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/__pycache__/_common.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/__pycache__/_common.cpython-312.pyc new file mode 100644 index 0000000..14a823f Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/__pycache__/_common.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/__pycache__/_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/__pycache__/_version.cpython-312.pyc new file mode 100644 index 0000000..1da105f Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/__pycache__/_version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/__pycache__/easter.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/__pycache__/easter.cpython-312.pyc new file mode 100644 index 0000000..080af20 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/__pycache__/easter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc new file mode 100644 index 0000000..5a618c1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/__pycache__/relativedelta.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc new file mode 100644 index 0000000..de15cd8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/__pycache__/rrule.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc new file mode 100644 index 0000000..208464d Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/__pycache__/tzwin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..5ad2f59 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/_common.py b/venv/lib/python3.12/site-packages/dateutil/_common.py new file mode 100644 index 0000000..4eb2659 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/_common.py @@ -0,0 +1,43 @@ +""" +Common code used in multiple modules. +""" + + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __hash__(self): + return hash(( + self.weekday, + self.n, + )) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.12/site-packages/dateutil/_version.py b/venv/lib/python3.12/site-packages/dateutil/_version.py new file mode 100644 index 0000000..b723056 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/_version.py @@ -0,0 +1,5 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '2.8.2' +version_tuple = (2, 8, 2) diff --git a/venv/lib/python3.12/site-packages/dateutil/easter.py b/venv/lib/python3.12/site-packages/dateutil/easter.py new file mode 100644 index 0000000..f74d1f7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/easter.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic Easter computing method for any given year, using +Western, Orthodox or Julian algorithms. +""" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different Easter + calculation methods: + + 1. Original calculation in Julian calendar, valid in + dates after 326 AD + 2. Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3. Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + * ``EASTER_JULIAN = 1`` + * ``EASTER_ORTHODOX = 2`` + * ``EASTER_WESTERN = 3`` + + The default method is method 3. + + More about the algorithm may be found at: + + `GM Arts: Easter Algorithms `_ + + and + + `The Calendar FAQ: Easter `_ + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e + y//100 - 16 - (y//100 - 16)//4 + else: + # New method + c = y//100 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 + return datetime.date(int(y), int(m), int(d)) diff --git a/venv/lib/python3.12/site-packages/dateutil/parser/__init__.py b/venv/lib/python3.12/site-packages/dateutil/parser/__init__.py new file mode 100644 index 0000000..d174b0e --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/parser/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +from ._parser import parse, parser, parserinfo, ParserError +from ._parser import DEFAULTPARSER, DEFAULTTZPARSER +from ._parser import UnknownTimezoneWarning + +from ._parser import __doc__ + +from .isoparser import isoparser, isoparse + +__all__ = ['parse', 'parser', 'parserinfo', + 'isoparse', 'isoparser', + 'ParserError', + 'UnknownTimezoneWarning'] + + +### +# Deprecate portions of the private interface so that downstream code that +# is improperly relying on it is given *some* notice. + + +def __deprecated_private_func(f): + from functools import wraps + import warnings + + msg = ('{name} is a private function and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=f.__name__) + + @wraps(f) + def deprecated_func(*args, **kwargs): + warnings.warn(msg, DeprecationWarning) + return f(*args, **kwargs) + + return deprecated_func + +def __deprecate_private_class(c): + import warnings + + msg = ('{name} is a private class and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=c.__name__) + + class private_class(c): + __doc__ = c.__doc__ + + def __init__(self, *args, **kwargs): + warnings.warn(msg, DeprecationWarning) + super(private_class, self).__init__(*args, **kwargs) + + private_class.__name__ = c.__name__ + + return private_class + + +from ._parser import _timelex, _resultbase +from ._parser import _tzparser, _parsetz + +_timelex = __deprecate_private_class(_timelex) +_tzparser = __deprecate_private_class(_tzparser) +_resultbase = __deprecate_private_class(_resultbase) +_parsetz = __deprecated_private_func(_parsetz) diff --git a/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2979a84 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc new file mode 100644 index 0000000..129420e Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/_parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc new file mode 100644 index 0000000..f92f51b Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/parser/__pycache__/isoparser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/parser/_parser.py b/venv/lib/python3.12/site-packages/dateutil/parser/_parser.py new file mode 100644 index 0000000..37d1663 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/parser/_parser.py @@ -0,0 +1,1613 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. + +This module attempts to be forgiving with regards to unlikely input formats, +returning a datetime object even for dates which are ambiguous. If an element +of a date/time stamp is omitted, the following rules are applied: + +- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour + on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is + specified. +- If a time zone is omitted, a timezone-naive datetime is returned. + +If any other elements are missing, they are taken from the +:class:`datetime.datetime` object passed to the parameter ``default``. If this +results in a day number exceeding the valid number of days per month, the +value falls back to the end of the month. + +Additional resources about date/time string formats can be found below: + +- `A summary of the international standard date and time notation + `_ +- `W3C Date and Time Formats `_ +- `Time Formats (Planetary Rings Node) `_ +- `CPAN ParseDate module + `_ +- `Java SimpleDateFormat Class + `_ +""" +from __future__ import unicode_literals + +import datetime +import re +import string +import time +import warnings + +from calendar import monthrange +from io import StringIO + +import six +from six import integer_types, text_type + +from decimal import Decimal + +from warnings import warn + +from .. import relativedelta +from .. import tz + +__all__ = ["parse", "parserinfo", "ParserError"] + + +# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth +# making public and/or figuring out if there is something we can +# take off their plate. +class _timelex(object): + # Fractional seconds are sometimes split by a comma + _split_decimal = re.compile("([.,])") + + def __init__(self, instream): + if isinstance(instream, (bytes, bytearray)): + instream = instream.decode() + + if isinstance(instream, text_type): + instream = StringIO(instream) + elif getattr(instream, 'read', None) is None: + raise TypeError('Parser must be a string or character stream, not ' + '{itype}'.format(itype=instream.__class__.__name__)) + + self.instream = instream + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + """ + This function breaks the time string into lexical units (tokens), which + can be parsed by the parser. Lexical units are demarcated by changes in + the character set, so any continuous string of letters is considered + one unit, any continuous string of numbers is considered one unit. + + The main complication arises from the fact that dots ('.') can be used + both as separators (e.g. "Sep.20.2009") or decimal points (e.g. + "4:30:21.447"). As such, it is necessary to read the full context of + any dot-separated strings before breaking it into tokens; as such, this + function maintains a "token stack", for when the ambiguous context + demands that multiple tokens be parsed at once. + """ + if self.tokenstack: + return self.tokenstack.pop(0) + + seenletters = False + token = None + state = None + + while not self.eof: + # We only realize that we've reached the end of a token when we + # find a character that's not part of the current token - since + # that character may be part of the next token, it's stored in the + # charstack. + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + + if not nextchar: + self.eof = True + break + elif not state: + # First character of the token - determines if we're starting + # to parse a word, a number or something else. + token = nextchar + if self.isword(nextchar): + state = 'a' + elif self.isnum(nextchar): + state = '0' + elif self.isspace(nextchar): + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + # If we've already started reading a word, we keep reading + # letters until we find something that's not part of a word. + seenletters = True + if self.isword(nextchar): + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + # If we've already started reading a number, we keep reading + # numbers until we find something that doesn't fit. + if self.isnum(nextchar): + token += nextchar + elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + # If we've seen some letters and a dot separator, continue + # parsing, and the tokens will be broken up later. + seenletters = True + if nextchar == '.' or self.isword(nextchar): + token += nextchar + elif self.isnum(nextchar) and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + # If we've seen at least one dot separator, keep going, we'll + # break up the tokens later. + if nextchar == '.' or self.isnum(nextchar): + token += nextchar + elif self.isword(nextchar) and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] in '.,')): + l = self._split_decimal.split(token) + token = l[0] + for tok in l[1:]: + if tok: + self.tokenstack.append(tok) + + if state == '0.' and token.count('.') == 0: + token = token.replace(',', '.') + + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + + return token + + def next(self): + return self.__next__() # Python 2.x support + + @classmethod + def split(cls, s): + return list(cls(s)) + + @classmethod + def isword(cls, nextchar): + """ Whether or not the next character is part of a word """ + return nextchar.isalpha() + + @classmethod + def isnum(cls, nextchar): + """ Whether the next character is part of a number """ + return nextchar.isdigit() + + @classmethod + def isspace(cls, nextchar): + """ Whether the next character is whitespace """ + return nextchar.isspace() + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __len__(self): + return (sum(getattr(self, attr) is not None + for attr in self.__slots__)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + """ + Class which handles what inputs are accepted. Subclass this to customize + the language and acceptable values for each parameter. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. Default is ``False``. + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + Default is ``False``. + """ + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), # TODO: "Tues" + ("Wed", "Wednesday"), + ("Thu", "Thursday"), # TODO: "Thurs" + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), # TODO: "Febr" + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z", "z"] + PERTAIN = ["of"] + TZOFFSET = {} + # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", + # "Anno Domini", "Year of Our Lord"] + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year // 100 * 100 + + def _convert(self, lst): + dct = {} + for i, v in enumerate(lst): + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + try: + return self._months[name.lower()] + 1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + + return self.TZOFFSET.get(name) + + def convertyear(self, year, century_specified=False): + """ + Converts two-digit years to year within [-50, 49] + range of self._year (current local time) + """ + + # Function contract is that the year is always positive + assert year >= 0 + + if year < 100 and not century_specified: + # assume current century to start + year += self._century + + if year >= self._year + 50: # if too far in future + year -= 100 + elif year < self._year - 50: # if too far in past + year += 100 + + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year, res.century_specified) + + if ((res.tzoffset == 0 and not res.tzname) or + (res.tzname == 'Z' or res.tzname == 'z')): + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class _ymd(list): + def __init__(self, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + self.century_specified = False + self.dstridx = None + self.mstridx = None + self.ystridx = None + + @property + def has_year(self): + return self.ystridx is not None + + @property + def has_month(self): + return self.mstridx is not None + + @property + def has_day(self): + return self.dstridx is not None + + def could_be_day(self, value): + if self.has_day: + return False + elif not self.has_month: + return 1 <= value <= 31 + elif not self.has_year: + # Be permissive, assume leap year + month = self[self.mstridx] + return 1 <= value <= monthrange(2000, month)[1] + else: + month = self[self.mstridx] + year = self[self.ystridx] + return 1 <= value <= monthrange(year, month)[1] + + def append(self, val, label=None): + if hasattr(val, '__len__'): + if val.isdigit() and len(val) > 2: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + elif val > 100: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + + super(self.__class__, self).append(int(val)) + + if label == 'M': + if self.has_month: + raise ValueError('Month is already set') + self.mstridx = len(self) - 1 + elif label == 'D': + if self.has_day: + raise ValueError('Day is already set') + self.dstridx = len(self) - 1 + elif label == 'Y': + if self.has_year: + raise ValueError('Year is already set') + self.ystridx = len(self) - 1 + + def _resolve_from_stridxs(self, strids): + """ + Try to resolve the identities of year/month/day elements using + ystridx, mstridx, and dstridx, if enough of these are specified. + """ + if len(self) == 3 and len(strids) == 2: + # we can back out the remaining stridx value + missing = [x for x in range(3) if x not in strids.values()] + key = [x for x in ['y', 'm', 'd'] if x not in strids] + assert len(missing) == len(key) == 1 + key = key[0] + val = missing[0] + strids[key] = val + + assert len(self) == len(strids) # otherwise this should not be called + out = {key: self[strids[key]] for key in strids} + return (out.get('y'), out.get('m'), out.get('d')) + + def resolve_ymd(self, yearfirst, dayfirst): + len_ymd = len(self) + year, month, day = (None, None, None) + + strids = (('y', self.ystridx), + ('m', self.mstridx), + ('d', self.dstridx)) + + strids = {key: val for key, val in strids if val is not None} + if (len(self) == len(strids) > 0 or + (len(self) == 3 and len(strids) == 2)): + return self._resolve_from_stridxs(strids) + + mstridx = self.mstridx + + if len_ymd > 3: + raise ValueError("More than three YMD values") + elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): + # One member, or two members with a month string + if mstridx is not None: + month = self[mstridx] + # since mstridx is 0 or 1, self[mstridx-1] always + # looks up the other element + other = self[mstridx - 1] + else: + other = self[0] + + if len_ymd > 1 or mstridx is None: + if other > 31: + year = other + else: + day = other + + elif len_ymd == 2: + # Two members with numbers + if self[0] > 31: + # 99-01 + year, month = self + elif self[1] > 31: + # 01-99 + month, year = self + elif dayfirst and self[1] <= 12: + # 13-01 + day, month = self + else: + # 01-13 + month, day = self + + elif len_ymd == 3: + # Three members + if mstridx == 0: + if self[1] > 31: + # Apr-2003-25 + month, year, day = self + else: + month, day, year = self + elif mstridx == 1: + if self[0] > 31 or (yearfirst and self[2] <= 31): + # 99-Jan-01 + year, month, day = self + else: + # 01-Jan-01 + # Give precedence to day-first, since + # two-digit years is usually hand-written. + day, month, year = self + + elif mstridx == 2: + # WTF!? + if self[1] > 31: + # 01-99-Jan + day, year, month = self + else: + # 99-01-Jan + year, day, month = self + + else: + if (self[0] > 31 or + self.ystridx == 0 or + (yearfirst and self[1] <= 12 and self[2] <= 31)): + # 99-01-01 + if dayfirst and self[2] <= 12: + year, day, month = self + else: + year, month, day = self + elif self[0] > 12 or (dayfirst and self[1] <= 12): + # 13-01-01 + day, month, year = self + else: + # 01-13-01 + month, day, year = self + + return year, month, day + + +class parser(object): + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, + ignoretz=False, tzinfos=None, **kwargs): + """ + Parse the date/time string into a :class:`datetime.datetime` object. + + :param timestr: + Any date/time string using the supported formats. + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a + naive :class:`datetime.datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param \\*\\*kwargs: + Keyword arguments as passed to ``_parse()``. + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises TypeError: + Raised for non-string or character stream input. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + + if default is None: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + + res, skipped_tokens = self._parse(timestr, **kwargs) + + if res is None: + raise ParserError("Unknown string format: %s", timestr) + + if len(res) == 0: + raise ParserError("String does not contain a date: %s", timestr) + + try: + ret = self._build_naive(res, default) + except ValueError as e: + six.raise_from(ParserError(str(e) + ": %s", timestr), e) + + if not ignoretz: + ret = self._build_tzaware(ret, res, tzinfos) + + if kwargs.get('fuzzy_with_tokens', False): + return ret, skipped_tokens + else: + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset", "ampm","any_unused_tokens"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): + """ + Private method which performs the heavy lifting of parsing, called from + ``parse()``, which passes on its ``kwargs`` to this function. + + :param timestr: + The string to parse. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. If set to ``None``, this value is retrieved from the + current :class:`parserinfo` object (which itself defaults to + ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + If this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + """ + if fuzzy_with_tokens: + fuzzy = True + + info = self.info + + if dayfirst is None: + dayfirst = info.dayfirst + + if yearfirst is None: + yearfirst = info.yearfirst + + res = self._result() + l = _timelex.split(timestr) # Splits the timestr into tokens + + skipped_idxs = [] + + # year/month/day list + ymd = _ymd() + + len_l = len(l) + i = 0 + try: + while i < len_l: + + # Check if it's a number + value_repr = l[i] + try: + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Numeric token + i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) + + # Check weekday + elif info.weekday(l[i]) is not None: + value = info.weekday(l[i]) + res.weekday = value + + # Check month name + elif info.month(l[i]) is not None: + value = info.month(l[i]) + ymd.append(value, 'M') + + if i + 1 < len_l: + if l[i + 1] in ('-', '/'): + # Jan-01[-99] + sep = l[i + 1] + ymd.append(l[i + 2]) + + if i + 3 < len_l and l[i + 3] == sep: + # Jan-01-99 + ymd.append(l[i + 4]) + i += 2 + + i += 2 + + elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and + info.pertain(l[i + 2])): + # Jan of 01 + # In this case, 01 is clearly year + if l[i + 4].isdigit(): + # Convert it here to become unambiguous + value = int(l[i + 4]) + year = str(info.convertyear(value)) + ymd.append(year, 'Y') + else: + # Wrong guess + pass + # TODO: not hit in tests + i += 4 + + # Check am/pm + elif info.ampm(l[i]) is not None: + value = info.ampm(l[i]) + val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) + + if val_is_ampm: + res.hour = self._adjust_ampm(res.hour, value) + res.ampm = value + + elif fuzzy: + skipped_idxs.append(i) + + # Check for a timezone name + elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i + 1 < len_l and l[i + 1] in ('+', '-'): + l[i + 1] = ('+', '-')[l[i + 1] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + # Check for a numbered timezone + elif res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + len_li = len(l[i + 1]) + + # TODO: check that l[i + 1] is integer? + if len_li == 4: + # -0300 + hour_offset = int(l[i + 1][:2]) + min_offset = int(l[i + 1][2:]) + elif i + 2 < len_l and l[i + 2] == ':': + # -03:00 + hour_offset = int(l[i + 1]) + min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? + i += 2 + elif len_li <= 2: + # -[0]3 + hour_offset = int(l[i + 1][:2]) + min_offset = 0 + else: + raise ValueError(timestr) + + res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) + + # Look for a timezone name between parenthesis + if (i + 5 < len_l and + info.jump(l[i + 2]) and l[i + 3] == '(' and + l[i + 5] == ')' and + 3 <= len(l[i + 4]) and + self._could_be_tzname(res.hour, res.tzname, + None, l[i + 4])): + # -0300 (BRST) + res.tzname = l[i + 4] + i += 4 + + i += 1 + + # Check jumps + elif not (info.jump(l[i]) or fuzzy): + raise ValueError(timestr) + + else: + skipped_idxs.append(i) + i += 1 + + # Process year/month/day + year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) + + res.century_specified = ymd.century_specified + res.year = year + res.month = month + res.day = day + + except (IndexError, ValueError): + return None, None + + if not info.validate(res): + return None, None + + if fuzzy_with_tokens: + skipped_tokens = self._recombine_skipped(l, skipped_idxs) + return res, tuple(skipped_tokens) + else: + return res, None + + def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): + # Token is a number + value_repr = tokens[idx] + try: + value = self._to_decimal(value_repr) + except Exception as e: + six.raise_from(ValueError('Unknown numeric token'), e) + + len_li = len(value_repr) + + len_l = len(tokens) + + if (len(ymd) == 3 and len_li in (2, 4) and + res.hour is None and + (idx + 1 >= len_l or + (tokens[idx + 1] != ':' and + info.hms(tokens[idx + 1]) is None))): + # 19990101T23[59] + s = tokens[idx] + res.hour = int(s[:2]) + + if len_li == 4: + res.minute = int(s[2:]) + + elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = tokens[idx] + + if not ymd and '.' not in tokens[idx]: + ymd.append(s[:2]) + ymd.append(s[2:4]) + ymd.append(s[4:]) + else: + # 19990101T235959[.59] + + # TODO: Check if res attributes already set. + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = self._parsems(s[4:]) + + elif len_li in (8, 12, 14): + # YYYYMMDD + s = tokens[idx] + ymd.append(s[:4], 'Y') + ymd.append(s[4:6]) + ymd.append(s[6:8]) + + if len_li > 8: + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + + if len_li > 12: + res.second = int(s[12:]) + + elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) + (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) + if hms is not None: + # TODO: checking that hour/minute/second are not + # already set? + self._assign_hms(res, value_repr, hms) + + elif idx + 2 < len_l and tokens[idx + 1] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? + (res.minute, res.second) = self._parse_min_sec(value) + + if idx + 4 < len_l and tokens[idx + 3] == ':': + res.second, res.microsecond = self._parsems(tokens[idx + 4]) + + idx += 2 + + idx += 2 + + elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): + sep = tokens[idx + 1] + ymd.append(value_repr) + + if idx + 2 < len_l and not info.jump(tokens[idx + 2]): + if tokens[idx + 2].isdigit(): + # 01-01[-01] + ymd.append(tokens[idx + 2]) + else: + # 01-Jan[-01] + value = info.month(tokens[idx + 2]) + + if value is not None: + ymd.append(value, 'M') + else: + raise ValueError() + + if idx + 3 < len_l and tokens[idx + 3] == sep: + # We have three members + value = info.month(tokens[idx + 4]) + + if value is not None: + ymd.append(value, 'M') + else: + ymd.append(tokens[idx + 4]) + idx += 2 + + idx += 1 + idx += 1 + + elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): + if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: + # 12 am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) + idx += 1 + else: + # Year, month or day + ymd.append(value) + idx += 1 + + elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): + # 12am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) + idx += 1 + + elif ymd.could_be_day(value): + ymd.append(value) + + elif not fuzzy: + raise ValueError() + + return idx + + def _find_hms_idx(self, idx, tokens, info, allow_jump): + len_l = len(tokens) + + if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: + # There is an "h", "m", or "s" label following this token. We take + # assign the upcoming label to the current token. + # e.g. the "12" in 12h" + hms_idx = idx + 1 + + elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and + info.hms(tokens[idx+2]) is not None): + # There is a space and then an "h", "m", or "s" label. + # e.g. the "12" in "12 h" + hms_idx = idx + 2 + + elif idx > 0 and info.hms(tokens[idx-1]) is not None: + # There is a "h", "m", or "s" preceding this token. Since neither + # of the previous cases was hit, there is no label following this + # token, so we use the previous label. + # e.g. the "04" in "12h04" + hms_idx = idx-1 + + elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and + info.hms(tokens[idx-2]) is not None): + # If we are looking at the final token, we allow for a + # backward-looking check to skip over a space. + # TODO: Are we sure this is the right condition here? + hms_idx = idx - 2 + + else: + hms_idx = None + + return hms_idx + + def _assign_hms(self, res, value_repr, hms): + # See GH issue #427, fixing float rounding + value = self._to_decimal(value_repr) + + if hms == 0: + # Hour + res.hour = int(value) + if value % 1: + res.minute = int(60*(value % 1)) + + elif hms == 1: + (res.minute, res.second) = self._parse_min_sec(value) + + elif hms == 2: + (res.second, res.microsecond) = self._parsems(value_repr) + + def _could_be_tzname(self, hour, tzname, tzoffset, token): + return (hour is not None and + tzname is None and + tzoffset is None and + len(token) <= 5 and + (all(x in string.ascii_uppercase for x in token) + or token in self.info.UTCZONE)) + + def _ampm_valid(self, hour, ampm, fuzzy): + """ + For fuzzy parsing, 'a' or 'am' (both valid English words) + may erroneously trigger the AM/PM flag. Deal with that + here. + """ + val_is_ampm = True + + # If there's already an AM/PM flag, this one isn't one. + if fuzzy and ampm is not None: + val_is_ampm = False + + # If AM/PM is found and hour is not, raise a ValueError + if hour is None: + if fuzzy: + val_is_ampm = False + else: + raise ValueError('No hour specified with AM or PM flag.') + elif not 0 <= hour <= 12: + # If AM/PM is found, it's a 12 hour clock, so raise + # an error for invalid range + if fuzzy: + val_is_ampm = False + else: + raise ValueError('Invalid hour specified for 12-hour clock.') + + return val_is_ampm + + def _adjust_ampm(self, hour, ampm): + if hour < 12 and ampm == 1: + hour += 12 + elif hour == 12 and ampm == 0: + hour = 0 + return hour + + def _parse_min_sec(self, value): + # TODO: Every usage of this function sets res.second to the return + # value. Are there any cases where second will be returned as None and + # we *don't* want to set res.second = None? + minute = int(value) + second = None + + sec_remainder = value % 1 + if sec_remainder: + second = int(60 * sec_remainder) + return (minute, second) + + def _parse_hms(self, idx, tokens, info, hms_idx): + # TODO: Is this going to admit a lot of false-positives for when we + # just happen to have digits and "h", "m" or "s" characters in non-date + # text? I guess hex hashes won't have that problem, but there's plenty + # of random junk out there. + if hms_idx is None: + hms = None + new_idx = idx + elif hms_idx > idx: + hms = info.hms(tokens[hms_idx]) + new_idx = hms_idx + else: + # Looking backwards, increment one. + hms = info.hms(tokens[hms_idx]) + 1 + new_idx = idx + + return (new_idx, hms) + + # ------------------------------------------------------------------ + # Handling for individual tokens. These are kept as methods instead + # of functions for the sake of customizability via subclassing. + + def _parsems(self, value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + def _to_decimal(self, val): + try: + decimal_value = Decimal(val) + # See GH 662, edge case, infinite value should not be converted + # via `_to_decimal` + if not decimal_value.is_finite(): + raise ValueError("Converted decimal value is infinite or NaN") + except Exception as e: + msg = "Could not convert %s to decimal" % val + six.raise_from(ValueError(msg), e) + else: + return decimal_value + + # ------------------------------------------------------------------ + # Post-Parsing construction of datetime output. These are kept as + # methods instead of functions for the sake of customizability via + # subclassing. + + def _build_tzinfo(self, tzinfos, tzname, tzoffset): + if callable(tzinfos): + tzdata = tzinfos(tzname, tzoffset) + else: + tzdata = tzinfos.get(tzname) + # handle case where tzinfo is paased an options that returns None + # eg tzinfos = {'BRST' : None} + if isinstance(tzdata, datetime.tzinfo) or tzdata is None: + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(tzname, tzdata) + else: + raise TypeError("Offset must be tzinfo subclass, tz string, " + "or int offset.") + return tzinfo + + def _build_tzaware(self, naive, res, tzinfos): + if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): + tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) + aware = naive.replace(tzinfo=tzinfo) + aware = self._assign_tzname(aware, res.tzname) + + elif res.tzname and res.tzname in time.tzname: + aware = naive.replace(tzinfo=tz.tzlocal()) + + # Handle ambiguous local datetime + aware = self._assign_tzname(aware, res.tzname) + + # This is mostly relevant for winter GMT zones parsed in the UK + if (aware.tzname() != res.tzname and + res.tzname in self.info.UTCZONE): + aware = aware.replace(tzinfo=tz.UTC) + + elif res.tzoffset == 0: + aware = naive.replace(tzinfo=tz.UTC) + + elif res.tzoffset: + aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + + elif not res.tzname and not res.tzoffset: + # i.e. no timezone information was found. + aware = naive + + elif res.tzname: + # tz-like string was parsed but we don't know what to do + # with it + warnings.warn("tzname {tzname} identified but not understood. " + "Pass `tzinfos` argument in order to correctly " + "return a timezone-aware datetime. In a future " + "version, this will raise an " + "exception.".format(tzname=res.tzname), + category=UnknownTimezoneWarning) + aware = naive + + return aware + + def _build_naive(self, res, default): + repl = {} + for attr in ("year", "month", "day", "hour", + "minute", "second", "microsecond"): + value = getattr(res, attr) + if value is not None: + repl[attr] = value + + if 'day' not in repl: + # If the default day exceeds the last day of the month, fall back + # to the end of the month. + cyear = default.year if res.year is None else res.year + cmonth = default.month if res.month is None else res.month + cday = default.day if res.day is None else res.day + + if cday > monthrange(cyear, cmonth)[1]: + repl['day'] = monthrange(cyear, cmonth)[1] + + naive = default.replace(**repl) + + if res.weekday is not None and not res.day: + naive = naive + relativedelta.relativedelta(weekday=res.weekday) + + return naive + + def _assign_tzname(self, dt, tzname): + if dt.tzname() != tzname: + new_dt = tz.enfold(dt, fold=1) + if new_dt.tzname() == tzname: + return new_dt + + return dt + + def _recombine_skipped(self, tokens, skipped_idxs): + """ + >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] + >>> skipped_idxs = [0, 1, 2, 5] + >>> _recombine_skipped(tokens, skipped_idxs) + ["foo bar", "baz"] + """ + skipped_tokens = [] + for i, idx in enumerate(sorted(skipped_idxs)): + if i > 0 and idx - 1 == skipped_idxs[i - 1]: + skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] + else: + skipped_tokens.append(tokens[idx]) + + return skipped_tokens + + +DEFAULTPARSER = parser() + + +def parse(timestr, parserinfo=None, **kwargs): + """ + + Parse a string in one of the supported formats, using the + ``parserinfo`` parameters. + + :param timestr: + A string containing a date/time stamp. + + :param parserinfo: + A :class:`parserinfo` object containing parameters for the parser. + If ``None``, the default arguments to the :class:`parserinfo` + constructor are used. + + The ``**kwargs`` parameter takes the following keyword arguments: + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM and + YMD. If set to ``None``, this value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken to + be the year, otherwise the last number is taken to be the year. If + this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string formats, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date would + be created. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] + used_idxs = list() + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + + for ii in range(j): + used_idxs.append(ii) + i = j + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + used_idxs.append(i) + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) * signal) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i]) * 3600 + + int(l[i + 2]) * 60) * signal) + used_idxs.append(i) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2]) * 3600 * signal) + else: + return None + used_idxs.append(i) + i += 1 + if res.dstabbr: + break + else: + break + + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': + l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789+-"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + used_idxs.append(i) + i += 2 + if l[i] == '-': + value = int(l[i + 1]) * -1 + used_idxs.append(i) + i += 1 + else: + value = int(l[i]) + used_idxs.append(i) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i]) - 1) % 7 + else: + x.day = int(l[i]) + used_idxs.append(i) + i += 2 + x.time = int(l[i]) + used_idxs.append(i) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + used_idxs.append(i) + i += 1 + else: + signal = 1 + used_idxs.append(i) + res.dstoffset = (res.stdoffset + int(l[i]) * signal) + + # This was a made-up format that is not in normal use + warn(('Parsed time zone "%s"' % tzstr) + + 'is in a non-standard dateutil-specific format, which ' + + 'is now deprecated; support for parsing this format ' + + 'will be removed in future versions. It is recommended ' + + 'that you switch to a standard format like the GNU ' + + 'TZ variable format.', tz.DeprecatedTzFormatWarning) + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + used_idxs.append(i) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + used_idxs.append(i) + i += 1 + x.month = int(l[i]) + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.weekday = (int(l[i]) - 1) % 7 + else: + # year day (zero based) + x.yday = int(l[i]) + 1 + + used_idxs.append(i) + i += 1 + + if i < len_l and l[i] == '/': + used_idxs.append(i) + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 + used_idxs.append(i) + i += 2 + if i + 1 < len_l and l[i + 1] == ':': + used_idxs.append(i) + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2]) * 3600) + else: + return None + used_idxs.append(i) + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + unused_idxs = set(range(len_l)).difference(used_idxs) + res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) + return res + + +DEFAULTTZPARSER = _tzparser() + + +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + + +class ParserError(ValueError): + """Exception subclass used for any failure to parse a datetime string. + + This is a subclass of :py:exc:`ValueError`, and should be raised any time + earlier versions of ``dateutil`` would have raised ``ValueError``. + + .. versionadded:: 2.8.1 + """ + def __str__(self): + try: + return self.args[0] % self.args[1:] + except (TypeError, IndexError): + return super(ParserError, self).__str__() + + def __repr__(self): + args = ", ".join("'%s'" % arg for arg in self.args) + return "%s(%s)" % (self.__class__.__name__, args) + + +class UnknownTimezoneWarning(RuntimeWarning): + """Raised when the parser finds a timezone it cannot parse into a tzinfo. + + .. versionadded:: 2.7.0 + """ +# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.12/site-packages/dateutil/parser/isoparser.py b/venv/lib/python3.12/site-packages/dateutil/parser/isoparser.py new file mode 100644 index 0000000..5d7bee3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/parser/isoparser.py @@ -0,0 +1,416 @@ +# -*- coding: utf-8 -*- +""" +This module offers a parser for ISO-8601 strings + +It is intended to support all valid date, time and datetime formats per the +ISO-8601 specification. + +..versionadded:: 2.7.0 +""" +from datetime import datetime, timedelta, time, date +import calendar +from dateutil import tz + +from functools import wraps + +import re +import six + +__all__ = ["isoparse", "isoparser"] + + +def _takes_ascii(f): + @wraps(f) + def func(self, str_in, *args, **kwargs): + # If it's a stream, read the whole thing + str_in = getattr(str_in, 'read', lambda: str_in)() + + # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII + if isinstance(str_in, six.text_type): + # ASCII is the same in UTF-8 + try: + str_in = str_in.encode('ascii') + except UnicodeEncodeError as e: + msg = 'ISO-8601 strings should contain only ASCII characters' + six.raise_from(ValueError(msg), e) + + return f(self, str_in, *args, **kwargs) + + return func + + +class isoparser(object): + def __init__(self, sep=None): + """ + :param sep: + A single character that separates date and time portions. If + ``None``, the parser will accept any single character. + For strict ISO-8601 adherence, pass ``'T'``. + """ + if sep is not None: + if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): + raise ValueError('Separator must be a single, non-numeric ' + + 'ASCII character') + + sep = sep.encode('ascii') + + self._sep = sep + + @_takes_ascii + def isoparse(self, dt_str): + """ + Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. + + An ISO-8601 datetime string consists of a date portion, followed + optionally by a time portion - the date and time portions are separated + by a single character separator, which is ``T`` in the official + standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be + combined with a time portion. + + Supported date formats are: + + Common: + + - ``YYYY`` + - ``YYYY-MM`` or ``YYYYMM`` + - ``YYYY-MM-DD`` or ``YYYYMMDD`` + + Uncommon: + + - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) + - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day + + The ISO week and day numbering follows the same logic as + :func:`datetime.date.isocalendar`. + + Supported time formats are: + + - ``hh`` + - ``hh:mm`` or ``hhmm`` + - ``hh:mm:ss`` or ``hhmmss`` + - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) + + Midnight is a special case for `hh`, as the standard supports both + 00:00 and 24:00 as a representation. The decimal separator can be + either a dot or a comma. + + + .. caution:: + + Support for fractional components other than seconds is part of the + ISO-8601 standard, but is not currently implemented in this parser. + + Supported time zone offset formats are: + + - `Z` (UTC) + - `±HH:MM` + - `±HHMM` + - `±HH` + + Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, + with the exception of UTC, which will be represented as + :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such + as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. + + :param dt_str: + A string or stream containing only an ISO-8601 datetime string + + :return: + Returns a :class:`datetime.datetime` representing the string. + Unspecified components default to their lowest value. + + .. warning:: + + As of version 2.7.0, the strictness of the parser should not be + considered a stable part of the contract. Any valid ISO-8601 string + that parses correctly with the default settings will continue to + parse correctly in future versions, but invalid strings that + currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not + guaranteed to continue failing in future versions if they encode + a valid date. + + .. versionadded:: 2.7.0 + """ + components, pos = self._parse_isodate(dt_str) + + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + """ + Parse the date portion of an ISO string. + + :param datestr: + The string portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.date` object + """ + components, pos = self._parse_isodate(datestr) + if pos < len(datestr): + raise ValueError('String contains unknown ISO ' + + 'components: {!r}'.format(datestr.decode('ascii'))) + return date(*components) + + @_takes_ascii + def parse_isotime(self, timestr): + """ + Parse the time portion of an ISO string. + + :param timestr: + The time portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.time` object + """ + components = self._parse_isotime(timestr) + if components[0] == 24: + components[0] = 0 + return time(*components) + + @_takes_ascii + def parse_tzstr(self, tzstr, zero_as_utc=True): + """ + Parse a valid ISO time zone string. + + See :func:`isoparser.isoparse` for details on supported formats. + + :param tzstr: + A string representing an ISO time zone offset + + :param zero_as_utc: + Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones + + :return: + Returns :class:`dateutil.tz.tzoffset` for offsets and + :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is + specified) offsets equivalent to UTC. + """ + return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) + + # Constants + _DATE_SEP = b'-' + _TIME_SEP = b':' + _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') + + def _parse_isodate(self, dt_str): + try: + return self._parse_isodate_common(dt_str) + except ValueError: + return self._parse_isodate_uncommon(dt_str) + + def _parse_isodate_common(self, dt_str): + len_str = len(dt_str) + components = [1, 1, 1] + + if len_str < 4: + raise ValueError('ISO string too short') + + # Year + components[0] = int(dt_str[0:4]) + pos = 4 + if pos >= len_str: + return components, pos + + has_sep = dt_str[pos:pos + 1] == self._DATE_SEP + if has_sep: + pos += 1 + + # Month + if len_str - pos < 2: + raise ValueError('Invalid common month') + + components[1] = int(dt_str[pos:pos + 2]) + pos += 2 + + if pos >= len_str: + if has_sep: + return components, pos + else: + raise ValueError('Invalid ISO format') + + if has_sep: + if dt_str[pos:pos + 1] != self._DATE_SEP: + raise ValueError('Invalid separator in ISO string') + pos += 1 + + # Day + if len_str - pos < 2: + raise ValueError('Invalid common day') + components[2] = int(dt_str[pos:pos + 2]) + return components, pos + 2 + + def _parse_isodate_uncommon(self, dt_str): + if len(dt_str) < 4: + raise ValueError('ISO string too short') + + # All ISO formats start with the year + year = int(dt_str[0:4]) + + has_sep = dt_str[4:5] == self._DATE_SEP + + pos = 4 + has_sep # Skip '-' if it's there + if dt_str[pos:pos + 1] == b'W': + # YYYY-?Www-?D? + pos += 1 + weekno = int(dt_str[pos:pos + 2]) + pos += 2 + + dayno = 1 + if len(dt_str) > pos: + if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: + raise ValueError('Inconsistent use of dash separator') + + pos += has_sep + + dayno = int(dt_str[pos:pos + 1]) + pos += 1 + + base_date = self._calculate_weekdate(year, weekno, dayno) + else: + # YYYYDDD or YYYY-DDD + if len(dt_str) - pos < 3: + raise ValueError('Invalid ordinal day') + + ordinal_day = int(dt_str[pos:pos + 3]) + pos += 3 + + if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): + raise ValueError('Invalid ordinal day' + + ' {} for year {}'.format(ordinal_day, year)) + + base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) + + components = [base_date.year, base_date.month, base_date.day] + return components, pos + + def _calculate_weekdate(self, year, week, day): + """ + Calculate the day of corresponding to the ISO year-week-day calendar. + + This function is effectively the inverse of + :func:`datetime.date.isocalendar`. + + :param year: + The year in the ISO calendar + + :param week: + The week in the ISO calendar - range is [1, 53] + + :param day: + The day in the ISO calendar - range is [1 (MON), 7 (SUN)] + + :return: + Returns a :class:`datetime.date` + """ + if not 0 < week < 54: + raise ValueError('Invalid week: {}'.format(week)) + + if not 0 < day < 8: # Range is 1-7 + raise ValueError('Invalid weekday: {}'.format(day)) + + # Get week 1 for the specific year: + jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it + week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) + + # Now add the specific number of weeks and days to get what we want + week_offset = (week - 1) * 7 + (day - 1) + return week_1 + timedelta(days=week_offset) + + def _parse_isotime(self, timestr): + len_str = len(timestr) + components = [0, 0, 0, 0, None] + pos = 0 + comp = -1 + + if len_str < 2: + raise ValueError('ISO time too short') + + has_sep = False + + while pos < len_str and comp < 5: + comp += 1 + + if timestr[pos:pos + 1] in b'-+Zz': + # Detect time zone boundary + components[-1] = self._parse_tzstr(timestr[pos:]) + pos = len_str + break + + if comp == 1 and timestr[pos:pos+1] == self._TIME_SEP: + has_sep = True + pos += 1 + elif comp == 2 and has_sep: + if timestr[pos:pos+1] != self._TIME_SEP: + raise ValueError('Inconsistent use of colon separator') + pos += 1 + + if comp < 3: + # Hour, minute, second + components[comp] = int(timestr[pos:pos + 2]) + pos += 2 + + if comp == 3: + # Fraction of a second + frac = self._FRACTION_REGEX.match(timestr[pos:]) + if not frac: + continue + + us_str = frac.group(1)[:6] # Truncate to microseconds + components[comp] = int(us_str) * 10**(6 - len(us_str)) + pos += len(frac.group()) + + if pos < len_str: + raise ValueError('Unused components in ISO string') + + if components[0] == 24: + # Standard supports 00:00 and 24:00 as representations of midnight + if any(component != 0 for component in components[1:4]): + raise ValueError('Hour may only be 24 at 24:00:00.000') + + return components + + def _parse_tzstr(self, tzstr, zero_as_utc=True): + if tzstr == b'Z' or tzstr == b'z': + return tz.UTC + + if len(tzstr) not in {3, 5, 6}: + raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') + + if tzstr[0:1] == b'-': + mult = -1 + elif tzstr[0:1] == b'+': + mult = 1 + else: + raise ValueError('Time zone offset requires sign') + + hours = int(tzstr[1:3]) + if len(tzstr) == 3: + minutes = 0 + else: + minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) + + if zero_as_utc and hours == 0 and minutes == 0: + return tz.UTC + else: + if minutes > 59: + raise ValueError('Invalid minutes in time zone offset') + + if hours > 23: + raise ValueError('Invalid hours in time zone offset') + + return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) + + +DEFAULT_ISOPARSER = isoparser() +isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/venv/lib/python3.12/site-packages/dateutil/relativedelta.py b/venv/lib/python3.12/site-packages/dateutil/relativedelta.py new file mode 100644 index 0000000..a9e85f7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/relativedelta.py @@ -0,0 +1,599 @@ +# -*- coding: utf-8 -*- +import datetime +import calendar + +import operator +from math import copysign + +from six import integer_types +from warnings import warn + +from ._common import weekday + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + + +class relativedelta(object): + """ + The relativedelta type is designed to be applied to an existing datetime and + can replace specific components of that datetime, or represents an interval + of time. + + It is based on the specification of the excellent work done by M.-A. Lemburg + in his + `mx.DateTime `_ extension. + However, notice that this type does *NOT* implement the same algorithm as + his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + + There are two different ways to build a relativedelta instance. The + first one is passing it two date/datetime classes:: + + relativedelta(datetime1, datetime2) + + The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) + + year, month, day, hour, minute, second, microsecond: + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an arithmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding arithmetic operation on the original datetime value + with the information in the relativedelta. + + weekday: + One of the weekday instances (MO, TU, etc) available in the + relativedelta module. These instances may receive a parameter N, + specifying the Nth weekday, which could be positive or negative + (like MO(+1) or MO(-2)). Not specifying it is the same as specifying + +1. You can also use an integer, where 0=MO. This argument is always + relative e.g. if the calculated date is already Monday, using MO(1) + or MO(-1) won't change the day. To effectively make it absolute, use + it in combination with the day argument (e.g. day=1, MO(1) for first + Monday of the month). + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + + There are relative and absolute forms of the keyword + arguments. The plural is relative, and the singular is + absolute. For each argument in the order below, the absolute form + is applied first (by setting each attribute to that value) and + then the relative form (by adding the value to the attribute). + + The order of attributes considered when this relativedelta is + added to a datetime is: + + 1. Year + 2. Month + 3. Day + 4. Hours + 5. Minutes + 6. Seconds + 7. Microseconds + + Finally, weekday is applied, using the rule described above. + + For example + + >>> from datetime import datetime + >>> from dateutil.relativedelta import relativedelta, MO + >>> dt = datetime(2018, 4, 9, 13, 37, 0) + >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) + >>> dt + delta + datetime.datetime(2018, 4, 2, 14, 37) + + First, the day is set to 1 (the first of the month), then 25 hours + are added, to get to the 2nd day and 14th hour, finally the + weekday is applied, but since the 2nd is already a Monday there is + no effect. + + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + + if dt1 and dt2: + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + # Get year / month delta between the two + months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) + self._set_months(months) + + # Remove the year/month delta so the timedelta is just well-defined + # time units (seconds, days and microseconds) + dtm = self.__radd__(dt2) + + # If we've overshot our target, make an adjustment + if dt1 < dt2: + compare = operator.gt + increment = 1 + else: + compare = operator.lt + increment = -1 + + while compare(dt1, dtm): + months += increment + self._set_months(months) + dtm = self.__radd__(dt2) + + # Get the timedelta between the "months-adjusted" date and dt1 + delta = dt1 - dtm + self.seconds = delta.seconds + delta.days * 86400 + self.microseconds = delta.microseconds + else: + # Check for non-integer values in integer-only quantities + if any(x is not None and x != int(x) for x in (years, months)): + raise ValueError("Non-integer years and months are " + "ambiguous and not currently supported.") + + # Relative information + self.years = int(years) + self.months = int(months) + self.days = days + weeks * 7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + + # Absolute information + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if any(x is not None and int(x) != x + for x in (year, month, day, hour, + minute, second, microsecond)): + # For now we'll deprecate floats - later it'll be an error. + warn("Non-integer value passed as absolute information. " + + "This is not a well-defined condition and will raise " + + "errors in future versions.", DeprecationWarning) + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = _sign(self.microseconds) + div, mod = divmod(self.microseconds * s, 1000000) + self.microseconds = mod * s + self.seconds += div * s + if abs(self.seconds) > 59: + s = _sign(self.seconds) + div, mod = divmod(self.seconds * s, 60) + self.seconds = mod * s + self.minutes += div * s + if abs(self.minutes) > 59: + s = _sign(self.minutes) + div, mod = divmod(self.minutes * s, 60) + self.minutes = mod * s + self.hours += div * s + if abs(self.hours) > 23: + s = _sign(self.hours) + div, mod = divmod(self.hours * s, 24) + self.hours = mod * s + self.days += div * s + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years += div * s + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + @property + def weeks(self): + return int(self.days / 7.0) + + @weeks.setter + def weeks(self, value): + self.days = self.days - (self.weeks * 7) + value * 7 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years = div * s + else: + self.years = 0 + + def normalized(self): + """ + Return a version of this object represented entirely using integer + values for the relative attributes. + + >>> relativedelta(days=1.5, hours=2).normalized() + relativedelta(days=+1, hours=+14) + + :return: + Returns a :class:`dateutil.relativedelta.relativedelta` object. + """ + # Cascade remainders down (rounding each to roughly nearest microsecond) + days = int(self.days) + + hours_f = round(self.hours + 24 * (self.days - days), 11) + hours = int(hours_f) + + minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) + minutes = int(minutes_f) + + seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) + seconds = int(seconds_f) + + microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) + + # Constructor carries overflow back up with call to _fix() + return self.__class__(years=self.years, months=self.months, + days=days, hours=hours, minutes=minutes, + seconds=seconds, microseconds=microseconds, + leapdays=self.leapdays, year=self.year, + month=self.month, day=self.day, + weekday=self.weekday, hour=self.hour, + minute=self.minute, second=self.second, + microsecond=self.microsecond) + + def __add__(self, other): + if isinstance(other, relativedelta): + return self.__class__(years=other.years + self.years, + months=other.months + self.months, + days=other.days + self.days, + hours=other.hours + self.hours, + minutes=other.minutes + self.minutes, + seconds=other.seconds + self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=(other.year if other.year is not None + else self.year), + month=(other.month if other.month is not None + else self.month), + day=(other.day if other.day is not None + else self.day), + weekday=(other.weekday if other.weekday is not None + else self.weekday), + hour=(other.hour if other.hour is not None + else self.hour), + minute=(other.minute if other.minute is not None + else self.minute), + second=(other.second if other.second is not None + else self.second), + microsecond=(other.microsecond if other.microsecond + is not None else + self.microsecond)) + if isinstance(other, datetime.timedelta): + return self.__class__(years=self.years, + months=self.months, + days=self.days + other.days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds + other.seconds, + microseconds=self.microseconds + other.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + if not isinstance(other, datetime.date): + return NotImplemented + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth) - 1) * 7 + if nth > 0: + jumpdays += (7 - ret.weekday() + weekday) % 7 + else: + jumpdays += (ret.weekday() - weekday) % 7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented # In case the other object defines __rsub__ + return self.__class__(years=self.years - other.years, + months=self.months - other.months, + days=self.days - other.days, + hours=self.hours - other.hours, + minutes=self.minutes - other.minutes, + seconds=self.seconds - other.seconds, + microseconds=self.microseconds - other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=(self.year if self.year is not None + else other.year), + month=(self.month if self.month is not None else + other.month), + day=(self.day if self.day is not None else + other.day), + weekday=(self.weekday if self.weekday is not None else + other.weekday), + hour=(self.hour if self.hour is not None else + other.hour), + minute=(self.minute if self.minute is not None else + other.minute), + second=(self.second if self.second is not None else + other.second), + microsecond=(self.microsecond if self.microsecond + is not None else + other.microsecond)) + + def __abs__(self): + return self.__class__(years=abs(self.years), + months=abs(self.months), + days=abs(self.days), + hours=abs(self.hours), + minutes=abs(self.minutes), + seconds=abs(self.seconds), + microseconds=abs(self.microseconds), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + # Compatibility with Python 2.x + __nonzero__ = __bool__ + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + microseconds=int(self.microseconds * f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.microseconds == other.microseconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + def __hash__(self): + return hash(( + self.weekday, + self.years, + self.months, + self.days, + self.hours, + self.minutes, + self.seconds, + self.microseconds, + self.leapdays, + self.year, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + )) + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("{attr}={value:+g}".format(attr=attr, value=value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("{attr}={value}".format(attr=attr, value=repr(value))) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(l)) + + +def _sign(x): + return int(copysign(1, x)) + +# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.12/site-packages/dateutil/rrule.py b/venv/lib/python3.12/site-packages/dateutil/rrule.py new file mode 100644 index 0000000..b320339 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/rrule.py @@ -0,0 +1,1737 @@ +# -*- coding: utf-8 -*- +""" +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC `_, +including support for caching of results. +""" +import calendar +import datetime +import heapq +import itertools +import re +import sys +from functools import wraps +# For warning about deprecation of until and count +from warnings import warn + +from six import advance_iterator, integer_types + +from six.moves import _thread, range + +from ._common import weekday as weekdaybase + +try: + from math import gcd +except ImportError: + from fractions import gcd + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + + +class weekday(weekdaybase): + """ + This version of weekday does not allow n = 0. + """ + def __init__(self, wkday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n==0") + + super(weekday, self).__init__(wkday, n) + + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + + +def _invalidates_cache(f): + """ + Decorator for rruleset methods which may invalidate the + cached length. + """ + @wraps(f) + def inner_func(self, *args, **kwargs): + rv = f(self, *args, **kwargs) + self._invalidate_cache() + return rv + + return inner_func + + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._invalidate_cache() + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _invalidate_cache(self): + if self._cache is not None: + self._cache = [] + self._cache_complete = False + self._cache_gen = self._iter() + + if self._cache_lock.locked(): + self._cache_lock.release() + + self._len = None + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penalty. + def count(self): + """ Returns the number of recurrences in this set. It will have go + trough the whole recurrence, if this hasn't been done before. """ + if self._len is None: + for x in self: + pass + return self._len + + def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def xafter(self, dt, count=None, inc=False): + """ + Generator which yields up to `count` recurrences after the given + datetime instance, equivalent to `after`. + + :param dt: + The datetime at which to start generating recurrences. + + :param count: + The maximum number of recurrences to generate. If `None` (default), + dates are generated until the recurrence rule is exhausted. + + :param inc: + If `dt` is an instance of the rule and `inc` is `True`, it is + included in the output. + + :yields: Yields a sequence of `datetime` objects. + """ + + if self._cache_complete: + gen = self._cache + else: + gen = self + + # Select the comparison function + if inc: + comp = lambda dc, dtc: dc >= dtc + else: + comp = lambda dc, dtc: dc > dtc + + # Generate dates + n = 0 + for d in gen: + if comp(d, dt): + if count is not None: + n += 1 + if n > count: + break + + yield d + + def between(self, after, before, inc=False, count=1): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + + +class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + .. note:: + Per RFC section 3.3.10, recurrence instances falling on invalid dates + and times are ignored rather than coerced: + + Recurrence rules may generate recurrence instances with an invalid + date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM + on a day where the local time is moved forward by an hour at 1:00 + AM). Such recurrence instances MUST be ignored and MUST NOT be + counted as part of the recurrence set. + + This can lead to possibly surprising behavior when, for example, the + start date occurs at the end of the month: + + >>> from dateutil.rrule import rrule, MONTHLY + >>> from datetime import datetime + >>> start_date = datetime(2014, 12, 31) + >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) + ... # doctest: +NORMALIZE_WHITESPACE + [datetime.datetime(2014, 12, 31, 0, 0), + datetime.datetime(2015, 1, 31, 0, 0), + datetime.datetime(2015, 3, 31, 0, 0), + datetime.datetime(2015, 5, 31, 0, 0)] + + Additionally, it supports the following keyword arguments: + + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + If given, this determines how many occurrences will be generated. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param until: + If given, this must be a datetime instance specifying the upper-bound + limit of the recurrence. The last recurrence in the rule is the greatest + datetime that is less than or equal to the value specified in the + ``until`` parameter. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + """ + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + if until and until.tzinfo: + dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) + else: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + + # Cache the original byxxx rules, if they are provided, as the _byxxx + # attributes do not necessarily map to the inputs, and this can be + # a problem in generating the strings. Only store things if they've + # been supplied (the string retrieval will just use .get()) + self._original_rule = {} + + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + + if self._dtstart and self._until: + if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): + # According to RFC5545 Section 3.3.10: + # https://tools.ietf.org/html/rfc5545#section-3.3.10 + # + # > If the "DTSTART" property is specified as a date with UTC + # > time or a date with local time and time zone reference, + # > then the UNTIL rule part MUST be specified as a date with + # > UTC time. + raise ValueError( + 'RRULE UNTIL values must be specified in UTC when DTSTART ' + 'is timezone-aware' + ) + + if count is not None and until: + warn("Using both 'count' and 'until' is inconsistent with RFC 5545" + " and has been deprecated in dateutil. Future versions will " + "raise an error.", DeprecationWarning) + + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + + if self._bysetpos: + self._original_rule['bysetpos'] = self._bysetpos + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): + if freq == YEARLY: + if bymonth is None: + bymonth = dtstart.month + self._original_rule['bymonth'] = None + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == MONTHLY: + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == WEEKLY: + byweekday = dtstart.weekday() + self._original_rule['byweekday'] = None + + # bymonth + if bymonth is None: + self._bymonth = None + else: + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = tuple(sorted(set(bymonth))) + + if 'bymonth' not in self._original_rule: + self._original_rule['bymonth'] = self._bymonth + + # byyearday + if byyearday is None: + self._byyearday = None + else: + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = tuple(sorted(set(byyearday))) + self._original_rule['byyearday'] = self._byyearday + + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(sorted(byeaster)) + + self._original_rule['byeaster'] = self._byeaster + else: + self._byeaster = None + + # bymonthday + if bymonthday is None: + self._bymonthday = () + self._bynmonthday = () + else: + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + bymonthday = set(bymonthday) # Ensure it's unique + + self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) + self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) + + # Storing positive numbers first, then negative numbers + if 'bymonthday' not in self._original_rule: + self._original_rule['bymonthday'] = tuple( + itertools.chain(self._bymonthday, self._bynmonthday)) + + # byweekno + if byweekno is None: + self._byweekno = None + else: + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = tuple(sorted(set(byweekno))) + + self._original_rule['byweekno'] = self._byweekno + + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + else: + # If it's one of the valid non-sequence types, convert to a + # single-element sequence before the iterator that builds the + # byweekday set. + if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): + byweekday = (byweekday,) + + self._byweekday = set() + self._bynweekday = set() + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.add(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.add(wday.weekday) + else: + self._bynweekday.add((wday.weekday, wday.n)) + + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + + if self._byweekday is not None: + self._byweekday = tuple(sorted(self._byweekday)) + orig_byweekday = [weekday(x) for x in self._byweekday] + else: + orig_byweekday = () + + if self._bynweekday is not None: + self._bynweekday = tuple(sorted(self._bynweekday)) + orig_bynweekday = [weekday(*x) for x in self._bynweekday] + else: + orig_bynweekday = () + + if 'byweekday' not in self._original_rule: + self._original_rule['byweekday'] = tuple(itertools.chain( + orig_byweekday, orig_bynweekday)) + + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = {dtstart.hour} + else: + self._byhour = None + else: + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + + self._byhour = tuple(sorted(self._byhour)) + self._original_rule['byhour'] = self._byhour + + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = {dtstart.minute} + else: + self._byminute = None + else: + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + + self._byminute = tuple(sorted(self._byminute)) + self._original_rule['byminute'] = self._byminute + + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = ((dtstart.second,)) + else: + self._bysecond = None + else: + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) + + self._bysecond = tuple(sorted(self._bysecond)) + self._original_rule['bysecond'] = self._bysecond + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def __str__(self): + """ + Output a string that would generate this RRULE if passed to rrulestr. + This is mostly compatible with RFC5545, except for the + dateutil-specific extension BYEASTER. + """ + + output = [] + h, m, s = [None] * 3 + if self._dtstart: + output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) + h, m, s = self._dtstart.timetuple()[3:6] + + parts = ['FREQ=' + FREQNAMES[self._freq]] + if self._interval != 1: + parts.append('INTERVAL=' + str(self._interval)) + + if self._wkst: + parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) + + if self._count is not None: + parts.append('COUNT=' + str(self._count)) + + if self._until: + parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) + + if self._original_rule.get('byweekday') is not None: + # The str() method on weekday objects doesn't generate + # RFC5545-compliant strings, so we should modify that. + original_rule = dict(self._original_rule) + wday_strings = [] + for wday in original_rule['byweekday']: + if wday.n: + wday_strings.append('{n:+d}{wday}'.format( + n=wday.n, + wday=repr(wday)[0:2])) + else: + wday_strings.append(repr(wday)) + + original_rule['byweekday'] = wday_strings + else: + original_rule = self._original_rule + + partfmt = '{name}={vals}' + for name, key in [('BYSETPOS', 'bysetpos'), + ('BYMONTH', 'bymonth'), + ('BYMONTHDAY', 'bymonthday'), + ('BYYEARDAY', 'byyearday'), + ('BYWEEKNO', 'byweekno'), + ('BYDAY', 'byweekday'), + ('BYHOUR', 'byhour'), + ('BYMINUTE', 'byminute'), + ('BYSECOND', 'bysecond'), + ('BYEASTER', 'byeaster')]: + value = original_rule.get(key) + if value: + parts.append(partfmt.format(name=name, vals=(','.join(str(v) + for v in value)))) + + output.append('RRULE:' + ';'.join(parts)) + return '\n'.join(output) + + def replace(self, **kwargs): + """Return new rrule with same attributes except for those attributes given new + values by whichever keyword arguments are specified.""" + new_kwargs = {"interval": self._interval, + "count": self._count, + "dtstart": self._dtstart, + "freq": self._freq, + "until": self._until, + "wkst": self._wkst, + "cache": False if self._cache is None else True } + new_kwargs.update(self._original_rule) + new_kwargs.update(kwargs) + return rrule(**new_kwargs) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + total += 1 + yield res + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal + i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + + total += 1 + yield res + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) + if div: + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399 - (hour * 3600 + minute * 60 + second)) + // interval) * interval) + + rep_rate = (24 * 3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) + if div: + hour += div + div, hour = divmod(hour, 24) + if div: + day += div + fixday = True + + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param start: + Specifies the starting position. + :param byxxx: + An iterable containing the list of allowed values. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx, ) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param value: + The old value of the component. + :param byxxx: + The `BYXXX` set, which should have been generated by + `rrule._construct_byset`, or something else which checks that a + valid rule is present. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365 + calendar.isleap(year) + self.nextyearlen = 365 + calendar.isleap(year + 1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst) % 7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday) % 7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday) % 7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + dset = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + dset[i] = i + return dset, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + dset = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + dset[i] = i + i += 1 + # if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return dset, start, i + + def ddayset(self, year, month, day): + dset = [None] * self.yearlen + i = datetime.date(year, month, day).toordinal() - self.yearordinal + dset[i] = i + return dset, i, i + 1 + + def htimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def mtimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + if self.genlist[0] is self: + heapq.heappop(self.genlist) + else: + self.genlist.remove(self) + heapq.heapify(self.genlist) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + @_invalidates_cache + def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ + self._rrule.append(rrule) + + @_invalidates_cache + def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ + self._rdate.append(rdate) + + @_invalidates_cache + def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ + self._exrule.append(exrule) + + @_invalidates_cache + def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + lastdt = None + total = 0 + heapq.heapify(rlist) + heapq.heapify(exlist) + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + exitem = exlist[0] + advance_iterator(exitem) + if exlist and exlist[0] is exitem: + heapq.heapreplace(exlist, exitem) + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + if rlist and rlist[0] is ritem: + heapq.heapreplace(rlist, ritem) + self._len = total + + + + +class _rrulestr(object): + """ Parses a string representation of a recurrence rule or set of + recurrence rules. + + :param s: + Required, a string defining one or more recurrence rules. + + :param dtstart: + If given, used as the default recurrence start if not specified in the + rule string. + + :param cache: + If set ``True`` caching of results will be enabled, improving + performance of multiple queries considerably. + + :param unfold: + If set ``True`` indicates that a rule string is split over more + than one line and should be joined before processing. + + :param forceset: + If set ``True`` forces a :class:`dateutil.rrule.rruleset` to + be returned. + + :param compatible: + If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime.datetime` object is returned. + + :param tzids: + If given, a callable or mapping used to retrieve a + :class:`datetime.tzinfo` from a string representation. + Defaults to :func:`dateutil.tz.gettz`. + + :param tzinfos: + Additional time zone names / aliases which may be present in a string + representation. See :func:`dateutil.parser.parse` for more + information. + + :return: + Returns a :class:`dateutil.rrule.rruleset` or + :class:`dateutil.rrule.rrule` + """ + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): + """ + Two ways to specify this: +1MO or MO(+1) + """ + l = [] + for wday in value.split(','): + if '(' in wday: + # If it's of the form TH(+1), etc. + splt = wday.split('(') + w = splt[0] + n = int(splt[1][:-1]) + elif len(wday): + # If it's of the form +1MO + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: + n = int(n) + else: + raise ValueError("Invalid (empty) BYDAY specification.") + + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_date_value(self, date_value, parms, rule_tzids, + ignoretz, tzids, tzinfos): + global parser + if not parser: + from dateutil import parser + + datevals = [] + value_found = False + TZID = None + + for parm in parms: + if parm.startswith("TZID="): + try: + tzkey = rule_tzids[parm.split('TZID=')[-1]] + except KeyError: + continue + if tzids is None: + from . import tz + tzlookup = tz.gettz + elif callable(tzids): + tzlookup = tzids + else: + tzlookup = getattr(tzids, 'get', None) + if tzlookup is None: + msg = ('tzids must be a callable, mapping, or None, ' + 'not %s' % tzids) + raise ValueError(msg) + + TZID = tzlookup(tzkey) + continue + + # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found + # only once. + if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: + raise ValueError("unsupported parm: " + parm) + else: + if value_found: + msg = ("Duplicate value parameter found in: " + parm) + raise ValueError(msg) + value_found = True + + for datestr in date_value.split(','): + date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) + if TZID is not None: + if date.tzinfo is None: + date = date.replace(tzinfo=TZID) + else: + raise ValueError('DTSTART/EXDATE specifies multiple timezone') + datevals.append(date) + + return datevals + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzids=None, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + + TZID_NAMES = dict(map( + lambda x: (x.upper(), x), + re.findall('TZID=(?P[^:]+):', s) + )) + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + exdatevals.extend( + self._parse_date_value(value, parms, + TZID_NAMES, ignoretz, + tzids, tzinfos) + ) + elif name == "DTSTART": + dtvals = self._parse_date_value(value, parms, TZID_NAMES, + ignoretz, tzids, tzinfos) + if len(dtvals) != 1: + raise ValueError("Multiple DTSTART values specified:" + + value) + dtstart = dtvals[0] + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + rset = rruleset(cache=cache) + for value in rrulevals: + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + rset.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + rset.exdate(value) + if compatible and dtstart: + rset.rdate(dtstart) + return rset + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/__init__.py b/venv/lib/python3.12/site-packages/dateutil/tz/__init__.py new file mode 100644 index 0000000..af1352c --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/tz/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +from .tz import * +from .tz import __doc__ + +__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", + "enfold", "datetime_ambiguous", "datetime_exists", + "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] + + +class DeprecatedTzFormatWarning(Warning): + """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c97036a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc new file mode 100644 index 0000000..f8d916b Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_common.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc new file mode 100644 index 0000000..f682ed4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/_factories.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc new file mode 100644 index 0000000..db8f0ab Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/tz.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc new file mode 100644 index 0000000..cb77cb5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/tz/__pycache__/win.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/_common.py b/venv/lib/python3.12/site-packages/dateutil/tz/_common.py new file mode 100644 index 0000000..e6ac118 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/tz/_common.py @@ -0,0 +1,419 @@ +from six import PY2 + +from functools import wraps + +from datetime import datetime, timedelta, tzinfo + + +ZERO = timedelta(0) + +__all__ = ['tzname_in_python2', 'enfold'] + + +def tzname_in_python2(namefunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + if PY2: + @wraps(namefunc) + def adjust_encoding(*args, **kwargs): + name = namefunc(*args, **kwargs) + if name is not None: + name = name.encode() + + return name + + return adjust_encoding + else: + return namefunc + + +# The following is adapted from Alexander Belopolsky's tz library +# https://github.com/abalkin/tz +if hasattr(datetime, 'fold'): + # This is the pre-python 3.6 fold situation + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + return dt.replace(fold=fold) + +else: + class _DatetimeWithFold(datetime): + """ + This is a class designed to provide a PEP 495-compliant interface for + Python versions before 3.6. It is used only for dates in a fold, so + the ``fold`` attribute is fixed at ``1``. + + .. versionadded:: 2.6.0 + """ + __slots__ = () + + def replace(self, *args, **kwargs): + """ + Return a datetime with the same attributes, except for those + attributes given new values by whichever keyword arguments are + specified. Note that tzinfo=None can be specified to create a naive + datetime from an aware datetime with no conversion of date and time + data. + + This is reimplemented in ``_DatetimeWithFold`` because pypy3 will + return a ``datetime.datetime`` even if ``fold`` is unchanged. + """ + argnames = ( + 'year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond', 'tzinfo' + ) + + for arg, argname in zip(args, argnames): + if argname in kwargs: + raise TypeError('Duplicate argument: {}'.format(argname)) + + kwargs[argname] = arg + + for argname in argnames: + if argname not in kwargs: + kwargs[argname] = getattr(self, argname) + + dt_class = self.__class__ if kwargs.get('fold', 1) else datetime + + return dt_class(**kwargs) + + @property + def fold(self): + return 1 + + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + if getattr(dt, 'fold', 0) == fold: + return dt + + args = dt.timetuple()[:6] + args += (dt.microsecond, dt.tzinfo) + + if fold: + return _DatetimeWithFold(*args) + else: + return datetime(*args) + + +def _validate_fromutc_inputs(f): + """ + The CPython version of ``fromutc`` checks that the input is a ``datetime`` + object and that ``self`` is attached as its ``tzinfo``. + """ + @wraps(f) + def fromutc(self, dt): + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + return f(self, dt) + + return fromutc + + +class _tzinfo(tzinfo): + """ + Base class for all ``dateutil`` ``tzinfo`` objects. + """ + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + + dt = dt.replace(tzinfo=self) + + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) + + return same_dt and not same_offset + + def _fold_status(self, dt_utc, dt_wall): + """ + Determine the fold status of a "wall" datetime, given a representation + of the same datetime as a (naive) UTC datetime. This is calculated based + on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all + datetimes, and that this offset is the actual number of hours separating + ``dt_utc`` and ``dt_wall``. + + :param dt_utc: + Representation of the datetime as UTC + + :param dt_wall: + Representation of the datetime as "wall time". This parameter must + either have a `fold` attribute or have a fold-naive + :class:`datetime.tzinfo` attached, otherwise the calculation may + fail. + """ + if self.is_ambiguous(dt_wall): + delta_wall = dt_wall - dt_utc + _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) + else: + _fold = 0 + + return _fold + + def _fold(self, dt): + return getattr(dt, 'fold', 0) + + def _fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + + # Re-implement the algorithm from Python's datetime.py + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # The original datetime.py code assumes that `dst()` defaults to + # zero during ambiguous times. PEP 495 inverts this presumption, so + # for pre-PEP 495 versions of python, we need to tweak the algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + + dt += delta + # Set fold=1 so we can default to being in the fold for + # ambiguous dates. + dtdst = enfold(dt, fold=1).dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + dt_wall = self._fromutc(dt) + + # Calculate the fold status given the two datetimes. + _fold = self._fold_status(dt, dt_wall) + + # Set the default fold value for ambiguous dates + return enfold(dt_wall, fold=_fold) + + +class tzrangebase(_tzinfo): + """ + This is an abstract base class for time zones represented by an annual + transition into and out of DST. Child classes should implement the following + methods: + + * ``__init__(self, *args, **kwargs)`` + * ``transitions(self, year)`` - this is expected to return a tuple of + datetimes representing the DST on and off transitions in standard + time. + + A fully initialized ``tzrangebase`` subclass should also provide the + following attributes: + * ``hasdst``: Boolean whether or not the zone uses DST. + * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects + representing the respective UTC offsets. + * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short + abbreviations in DST and STD, respectively. + * ``_hasdst``: Whether or not the zone has DST. + + .. versionadded:: 2.6.0 + """ + def __init__(self): + raise NotImplementedError('tzrangebase is an abstract base class') + + def utcoffset(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_base_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def fromutc(self, dt): + """ Given a datetime in UTC, return local time """ + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # Get transitions - if there are none, fixed offset + transitions = self.transitions(dt.year) + if transitions is None: + return dt + self.utcoffset(dt) + + # Get the transition times in UTC + dston, dstoff = transitions + + dston -= self._std_offset + dstoff -= self._std_offset + + utc_transitions = (dston, dstoff) + dt_utc = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt_utc, utc_transitions) + + if isdst: + dt_wall = dt + self._dst_offset + else: + dt_wall = dt + self._std_offset + + _fold = int(not isdst and self.is_ambiguous(dt_wall)) + + return enfold(dt_wall, fold=_fold) + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if not self.hasdst: + return False + + start, end = self.transitions(dt.year) + + dt = dt.replace(tzinfo=None) + return (end <= dt < end + self._dst_base_offset) + + def _isdst(self, dt): + if not self.hasdst: + return False + elif dt is None: + return None + + transitions = self.transitions(dt.year) + + if transitions is None: + return False + + dt = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt, transitions) + + # Handle ambiguous dates + if not isdst and self.is_ambiguous(dt): + return not self._fold(dt) + else: + return isdst + + def _naive_isdst(self, dt, transitions): + dston, dstoff = transitions + + dt = dt.replace(tzinfo=None) + + if dston < dstoff: + isdst = dston <= dt < dstoff + else: + isdst = not dstoff <= dt < dston + + return isdst + + @property + def _dst_base_offset(self): + return self._dst_offset - self._std_offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/_factories.py b/venv/lib/python3.12/site-packages/dateutil/tz/_factories.py new file mode 100644 index 0000000..f8a6589 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/tz/_factories.py @@ -0,0 +1,80 @@ +from datetime import timedelta +import weakref +from collections import OrderedDict + +from six.moves import _thread + + +class _TzSingleton(type): + def __init__(cls, *args, **kwargs): + cls.__instance = None + super(_TzSingleton, cls).__init__(*args, **kwargs) + + def __call__(cls): + if cls.__instance is None: + cls.__instance = super(_TzSingleton, cls).__call__() + return cls.__instance + + +class _TzFactory(type): + def instance(cls, *args, **kwargs): + """Alternate constructor that returns a fresh instance""" + return type.__call__(cls, *args, **kwargs) + + +class _TzOffsetFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls._cache_lock = _thread.allocate_lock() + + def __call__(cls, name, offset): + if isinstance(offset, timedelta): + key = (name, offset.total_seconds()) + else: + key = (name, offset) + + instance = cls.__instances.get(key, None) + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(name, offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls._cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + + +class _TzStrFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls.__cache_lock = _thread.allocate_lock() + + def __call__(cls, s, posix_offset=False): + key = (s, posix_offset) + instance = cls.__instances.get(key, None) + + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(s, posix_offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls.__cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/tz.py b/venv/lib/python3.12/site-packages/dateutil/tz/tz.py new file mode 100644 index 0000000..c67f56d --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/tz/tz.py @@ -0,0 +1,1849 @@ +# -*- coding: utf-8 -*- +""" +This module offers timezone implementations subclassing the abstract +:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format +files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, +etc), TZ environment string (in all known formats), given ranges (with help +from relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. +""" +import datetime +import struct +import time +import sys +import os +import bisect +import weakref +from collections import OrderedDict + +import six +from six import string_types +from six.moves import _thread +from ._common import tzname_in_python2, _tzinfo +from ._common import tzrangebase, enfold +from ._common import _validate_fromutc_inputs + +from ._factories import _TzSingleton, _TzOffsetFactory +from ._factories import _TzStrFactory +try: + from .win import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + +# For warning about rounding tzinfo +from warnings import warn + +ZERO = datetime.timedelta(0) +EPOCH = datetime.datetime.utcfromtimestamp(0) +EPOCHORDINAL = EPOCH.toordinal() + + +@six.add_metaclass(_TzSingleton) +class tzutc(datetime.tzinfo): + """ + This is a tzinfo object that represents the UTC time zone. + + **Examples:** + + .. doctest:: + + >>> from datetime import * + >>> from dateutil.tz import * + + >>> datetime.now() + datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) + + >>> datetime.now(tzutc()) + datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) + + >>> datetime.now(tzutc()).tzname() + 'UTC' + + .. versionchanged:: 2.7.0 + ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will + always return the same object. + + .. doctest:: + + >>> from dateutil.tz import tzutc, UTC + >>> tzutc() is tzutc() + True + >>> tzutc() is UTC + True + """ + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Fast track version of fromutc() returns the original ``dt`` object for + any valid :py:class:`datetime.datetime` object. + """ + return dt + + def __eq__(self, other): + if not isinstance(other, (tzutc, tzoffset)): + return NotImplemented + + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +#: Convenience constant providing a :class:`tzutc()` instance +#: +#: .. versionadded:: 2.7.0 +UTC = tzutc() + + +@six.add_metaclass(_TzOffsetFactory) +class tzoffset(datetime.tzinfo): + """ + A simple class for representing a fixed offset from UTC. + + :param name: + The timezone name, to be returned when ``tzname()`` is called. + :param offset: + The time zone offset in seconds, or (since version 2.6.0, represented + as a :py:class:`datetime.timedelta` object). + """ + def __init__(self, name, offset): + self._name = name + + try: + # Allow a timedelta + offset = offset.total_seconds() + except (TypeError, AttributeError): + pass + + self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + @_validate_fromutc_inputs + def fromutc(self, dt): + return dt + self._offset + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + def __eq__(self, other): + if not isinstance(other, tzoffset): + return NotImplemented + + return self._offset == other._offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + int(self._offset.total_seconds())) + + __reduce__ = object.__reduce__ + + +class tzlocal(_tzinfo): + """ + A :class:`tzinfo` subclass built around the ``time`` timezone functions. + """ + def __init__(self): + super(tzlocal, self).__init__() + + self._std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + self._dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + self._dst_offset = self._std_offset + + self._dst_saved = self._dst_offset - self._std_offset + self._hasdst = bool(self._dst_saved) + self._tznames = tuple(time.tzname) + + def utcoffset(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset - self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._tznames[self._isdst(dt)] + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + naive_dst = self._naive_is_dst(dt) + return (not naive_dst and + (naive_dst != self._naive_is_dst(dt - self._dst_saved))) + + def _naive_is_dst(self, dt): + timestamp = _datetime_to_timestamp(dt) + return time.localtime(timestamp + time.timezone).tm_isdst + + def _isdst(self, dt, fold_naive=True): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # + # Here is a more stable implementation: + # + if not self._hasdst: + return False + + # Check for ambiguous times: + dstval = self._naive_is_dst(dt) + fold = getattr(dt, 'fold', None) + + if self.is_ambiguous(dt): + if fold is not None: + return not self._fold(dt) + else: + return True + + return dstval + + def __eq__(self, other): + if isinstance(other, tzlocal): + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + elif isinstance(other, tzutc): + return (not self._hasdst and + self._tznames[0] in {'UTC', 'GMT'} and + self._std_offset == ZERO) + elif isinstance(other, tzoffset): + return (not self._hasdst and + self._tznames[0] == other._name and + self._std_offset == other._offset) + else: + return NotImplemented + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", + "isstd", "isgmt", "dstoffset"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return NotImplemented + + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt and + self.dstoffset == other.dstoffset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + + +class _tzfile(object): + """ + Lightweight class for holding the relevant transition and time zone + information read from binary tzfiles. + """ + attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', + 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] + + def __init__(self, **kwargs): + for attr in self.attrs: + setattr(self, attr, kwargs.get(attr, None)) + + +class tzfile(_tzinfo): + """ + This is a ``tzinfo`` subclass that allows one to use the ``tzfile(5)`` + format timezone files to extract current and historical zone information. + + :param fileobj: + This can be an opened file stream or a file name that the time zone + information can be read from. + + :param filename: + This is an optional parameter specifying the source of the time zone + information in the event that ``fileobj`` is a file object. If omitted + and ``fileobj`` is a file stream, this parameter will be set either to + ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. + + See `Sources for Time Zone and Daylight Saving Time Data + `_ for more information. + Time zone files can be compiled from the `IANA Time Zone database files + `_ with the `zic time zone compiler + `_ + + .. note:: + + Only construct a ``tzfile`` directly if you have a specific timezone + file on disk that you want to read into a Python ``tzinfo`` object. + If you want to get a ``tzfile`` representing a specific IANA zone, + (e.g. ``'America/New_York'``), you should call + :func:`dateutil.tz.gettz` with the zone identifier. + + + **Examples:** + + Using the US Eastern time zone as an example, we can see that a ``tzfile`` + provides time zone information for the standard Daylight Saving offsets: + + .. testsetup:: tzfile + + from dateutil.tz import gettz + from datetime import datetime + + .. doctest:: tzfile + + >>> NYC = gettz('America/New_York') + >>> NYC + tzfile('/usr/share/zoneinfo/America/New_York') + + >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST + 2016-01-03 00:00:00-05:00 + + >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT + 2016-07-07 00:00:00-04:00 + + + The ``tzfile`` structure contains a fully history of the time zone, + so historical dates will also have the right offsets. For example, before + the adoption of the UTC standards, New York used local solar mean time: + + .. doctest:: tzfile + + >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT + 1901-04-12 00:00:00-04:56 + + And during World War II, New York was on "Eastern War Time", which was a + state of permanent daylight saving time: + + .. doctest:: tzfile + + >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT + 1944-02-07 00:00:00-04:00 + + """ + + def __init__(self, fileobj, filename=None): + super(tzfile, self).__init__() + + file_opened_here = False + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + if fileobj is not None: + if not file_opened_here: + fileobj = _nullcontext(fileobj) + + with fileobj as file_stream: + tzobj = self._read_tzfile(file_stream) + + self._set_tzdata(tzobj) + + def _set_tzdata(self, tzobj): + """ Set the time zone data of this object from a _tzfile object """ + # Copy the relevant attributes over as private attributes + for attr in _tzfile.attrs: + setattr(self, '_' + attr, getattr(tzobj, attr)) + + def _read_tzfile(self, fileobj): + out = _tzfile() + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4))) + else: + out.trans_list_utc = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + out.trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + out.trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now (but seek for correct file position) + if leapcnt: + fileobj.seek(leapcnt * 8, os.SEEK_CUR) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # Build ttinfo list + out.ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + gmtoff = _get_supported_offset(gmtoff) + tti = _ttinfo() + tti.offset = gmtoff + tti.dstoffset = datetime.timedelta(0) + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + out.ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + out.ttinfo_std = None + out.ttinfo_dst = None + out.ttinfo_before = None + if out.ttinfo_list: + if not out.trans_list_utc: + out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = out.trans_idx[i] + if not out.ttinfo_std and not tti.isdst: + out.ttinfo_std = tti + elif not out.ttinfo_dst and tti.isdst: + out.ttinfo_dst = tti + + if out.ttinfo_std and out.ttinfo_dst: + break + else: + if out.ttinfo_dst and not out.ttinfo_std: + out.ttinfo_std = out.ttinfo_dst + + for tti in out.ttinfo_list: + if not tti.isdst: + out.ttinfo_before = tti + break + else: + out.ttinfo_before = out.ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + lastdst = None + lastoffset = None + lastdstoffset = None + lastbaseoffset = None + out.trans_list = [] + + for i, tti in enumerate(out.trans_idx): + offset = tti.offset + dstoffset = 0 + + if lastdst is not None: + if tti.isdst: + if not lastdst: + dstoffset = offset - lastoffset + + if not dstoffset and lastdstoffset: + dstoffset = lastdstoffset + + tti.dstoffset = datetime.timedelta(seconds=dstoffset) + lastdstoffset = dstoffset + + # If a time zone changes its base offset during a DST transition, + # then you need to adjust by the previous base offset to get the + # transition time in local time. Otherwise you use the current + # base offset. Ideally, I would have some mathematical proof of + # why this is true, but I haven't really thought about it enough. + baseoffset = offset - dstoffset + adjustment = baseoffset + if (lastbaseoffset is not None and baseoffset != lastbaseoffset + and tti.isdst != lastdst): + # The base DST has changed + adjustment = lastbaseoffset + + lastdst = tti.isdst + lastoffset = offset + lastbaseoffset = baseoffset + + out.trans_list.append(out.trans_list_utc[i] + adjustment) + + out.trans_idx = tuple(out.trans_idx) + out.trans_list = tuple(out.trans_list) + out.trans_list_utc = tuple(out.trans_list_utc) + + return out + + def _find_last_transition(self, dt, in_utc=False): + # If there's no list, there are no transitions to find + if not self._trans_list: + return None + + timestamp = _datetime_to_timestamp(dt) + + # Find where the timestamp fits in the transition list - if the + # timestamp is a transition time, it's part of the "after" period. + trans_list = self._trans_list_utc if in_utc else self._trans_list + idx = bisect.bisect_right(trans_list, timestamp) + + # We want to know when the previous transition was, so subtract off 1 + return idx - 1 + + def _get_ttinfo(self, idx): + # For no list or after the last transition, default to _ttinfo_std + if idx is None or (idx + 1) >= len(self._trans_list): + return self._ttinfo_std + + # If there is a list and the time is before it, return _ttinfo_before + if idx < 0: + return self._ttinfo_before + + return self._trans_idx[idx] + + def _find_ttinfo(self, dt): + idx = self._resolve_ambiguous_time(dt) + + return self._get_ttinfo(idx) + + def fromutc(self, dt): + """ + The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. + + :param dt: + A :py:class:`datetime.datetime` object. + + :raises TypeError: + Raised if ``dt`` is not a :py:class:`datetime.datetime` object. + + :raises ValueError: + Raised if this is called with a ``dt`` which does not have this + ``tzinfo`` attached. + + :return: + Returns a :py:class:`datetime.datetime` object representing the + wall time in ``self``'s time zone. + """ + # These isinstance checks are in datetime.tzinfo, so we'll preserve + # them, even if we don't care about duck typing. + if not isinstance(dt, datetime.datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # First treat UTC as wall time and get the transition we're in. + idx = self._find_last_transition(dt, in_utc=True) + tti = self._get_ttinfo(idx) + + dt_out = dt + datetime.timedelta(seconds=tti.offset) + + fold = self.is_ambiguous(dt_out, idx=idx) + + return enfold(dt_out, fold=int(fold)) + + def is_ambiguous(self, dt, idx=None): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if idx is None: + idx = self._find_last_transition(dt) + + # Calculate the difference in offsets from current to previous + timestamp = _datetime_to_timestamp(dt) + tti = self._get_ttinfo(idx) + + if idx is None or idx <= 0: + return False + + od = self._get_ttinfo(idx - 1).offset - tti.offset + tt = self._trans_list[idx] # Transition time + + return timestamp < tt + od + + def _resolve_ambiguous_time(self, dt): + idx = self._find_last_transition(dt) + + # If we have no transitions, return the index + _fold = self._fold(dt) + if idx is None or idx == 0: + return idx + + # If it's ambiguous and we're in a fold, shift to a different index. + idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) + + return idx - idx_offset + + def utcoffset(self, dt): + if dt is None: + return None + + if not self._ttinfo_std: + return ZERO + + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if dt is None: + return None + + if not self._ttinfo_dst: + return ZERO + + tti = self._find_ttinfo(dt) + + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.dstoffset + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std or dt is None: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return NotImplemented + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + return self.__reduce_ex__(None) + + def __reduce_ex__(self, protocol): + return (self.__class__, (None, self._filename), self.__dict__) + + +class tzrange(tzrangebase): + """ + The ``tzrange`` object is a time zone specified by a set of offsets and + abbreviations, equivalent to the way the ``TZ`` variable can be specified + in POSIX-like systems, but using Python delta objects to specify DST + start, end and offsets. + + :param stdabbr: + The abbreviation for standard time (e.g. ``'EST'``). + + :param stdoffset: + An integer or :class:`datetime.timedelta` object or equivalent + specifying the base offset from UTC. + + If unspecified, +00:00 is used. + + :param dstabbr: + The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). + + If specified, with no other DST information, DST is assumed to occur + and the default behavior or ``dstoffset``, ``start`` and ``end`` is + used. If unspecified and no other DST information is specified, it + is assumed that this zone has no DST. + + If this is unspecified and other DST information is *is* specified, + DST occurs in the zone but the time zone abbreviation is left + unchanged. + + :param dstoffset: + A an integer or :class:`datetime.timedelta` object or equivalent + specifying the UTC offset during DST. If unspecified and any other DST + information is specified, it is assumed to be the STD offset +1 hour. + + :param start: + A :class:`relativedelta.relativedelta` object or equivalent specifying + the time and time of year that daylight savings time starts. To + specify, for example, that DST starts at 2AM on the 2nd Sunday in + March, pass: + + ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` + + If unspecified and any other DST information is specified, the default + value is 2 AM on the first Sunday in April. + + :param end: + A :class:`relativedelta.relativedelta` object or equivalent + representing the time and time of year that daylight savings time + ends, with the same specification method as in ``start``. One note is + that this should point to the first time in the *standard* zone, so if + a transition occurs at 2AM in the DST zone and the clocks are set back + 1 hour to 1AM, set the ``hours`` parameter to +1. + + + **Examples:** + + .. testsetup:: tzrange + + from dateutil.tz import tzrange, tzstr + + .. doctest:: tzrange + + >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") + True + + >>> from dateutil.relativedelta import * + >>> range1 = tzrange("EST", -18000, "EDT") + >>> range2 = tzrange("EST", -18000, "EDT", -14400, + ... relativedelta(hours=+2, month=4, day=1, + ... weekday=SU(+1)), + ... relativedelta(hours=+1, month=10, day=31, + ... weekday=SU(-1))) + >>> tzstr('EST5EDT') == range1 == range2 + True + + """ + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + + global relativedelta + from dateutil import relativedelta + + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + + try: + stdoffset = stdoffset.total_seconds() + except (TypeError, AttributeError): + pass + + try: + dstoffset = dstoffset.total_seconds() + except (TypeError, AttributeError): + pass + + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = bool(self._start_delta) + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + if not self.hasdst: + return None + + base_year = datetime.datetime(year, 1, 1) + + start = base_year + self._start_delta + end = base_year + self._end_delta + + return (start, end) + + def __eq__(self, other): + if not isinstance(other, tzrange): + return NotImplemented + + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +@six.add_metaclass(_TzStrFactory) +class tzstr(tzrange): + """ + ``tzstr`` objects are time zone objects specified by a time-zone string as + it would be passed to a ``TZ`` variable on POSIX-style systems (see + the `GNU C Library: TZ Variable`_ for more details). + + There is one notable exception, which is that POSIX-style time zones use an + inverted offset format, so normally ``GMT+3`` would be parsed as an offset + 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an + offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX + behavior, pass a ``True`` value to ``posix_offset``. + + The :class:`tzrange` object provides the same functionality, but is + specified using :class:`relativedelta.relativedelta` objects. rather than + strings. + + :param s: + A time zone string in ``TZ`` variable format. This can be a + :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: + :class:`unicode`) or a stream emitting unicode characters + (e.g. :class:`StringIO`). + + :param posix_offset: + Optional. If set to ``True``, interpret strings such as ``GMT+3`` or + ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the + POSIX standard. + + .. caution:: + + Prior to version 2.7.0, this function also supported time zones + in the format: + + * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` + * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` + + This format is non-standard and has been deprecated; this function + will raise a :class:`DeprecatedTZFormatWarning` until + support is removed in a future version. + + .. _`GNU C Library: TZ Variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + """ + def __init__(self, s, posix_offset=False): + global parser + from dateutil.parser import _parser as parser + + self._s = s + + res = parser._parsetz(s) + if res is None or res.any_unused_tokens: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC") and not posix_offset: + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + self.hasdst = bool(self._start_delta) + + def _delta(self, x, isend=0): + from dateutil import relativedelta + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset - self._std_offset + kwargs["seconds"] -= delta.seconds + delta.days * 86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + + +class _tzicalvtz(_tzinfo): + def __init__(self, tzid, comps=[]): + super(_tzicalvtz, self).__init__() + + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + self._cache_lock = _thread.allocate_lock() + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + + dt = dt.replace(tzinfo=None) + + try: + with self._cache_lock: + return self._cachecomp[self._cachedate.index( + (dt, self._fold(dt)))] + except ValueError: + pass + + lastcompdt = None + lastcomp = None + + for comp in self._comps: + compdt = self._find_compdt(comp, dt) + + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + + with self._cache_lock: + self._cachedate.insert(0, (dt, self._fold(dt))) + self._cachecomp.insert(0, lastcomp) + + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + + return lastcomp + + def _find_compdt(self, comp, dt): + if comp.tzoffsetdiff < ZERO and self._fold(dt): + dt -= comp.tzoffsetdiff + + compdt = comp.rrule.before(dt, inc=True) + + return compdt + + def utcoffset(self, dt): + if dt is None: + return None + + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "" % repr(self._tzid) + + __reduce__ = object.__reduce__ + + +class tzical(object): + """ + This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure + as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. + + :param `fileobj`: + A file or stream in iCalendar format, which should be UTF-8 encoded + with CRLF endings. + + .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 + """ + def __init__(self, fileobj): + global rrule + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') + else: + self._s = getattr(fileobj, 'name', repr(fileobj)) + fileobj = _nullcontext(fileobj) + + self._vtz = {} + + with fileobj as fobj: + self._parse_rfc(fobj.read()) + + def keys(self): + """ + Retrieves the available time zones as a list. + """ + return list(self._vtz.keys()) + + def get(self, tzid=None): + """ + Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. + + :param tzid: + If there is exactly one time zone available, omitting ``tzid`` + or passing :py:const:`None` value returns it. Otherwise a valid + key (which can be retrieved from :func:`keys`) is required. + + :raises ValueError: + Raised if ``tzid`` is not specified but there are either more + or fewer than 1 zone defined. + + :returns: + Returns either a :py:class:`datetime.tzinfo` object representing + the relevant time zone or :py:const:`None` if the ``tzid`` was + not found. + """ + if tzid is None: + if len(self._vtz) == 0: + raise ValueError("no timezones defined") + elif len(self._vtz) > 1: + raise ValueError("more than one timezone available") + tzid = next(iter(self._vtz)) + + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0] == '+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal + elif len(s) == 6: + return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal + else: + raise ValueError("invalid offset: " + s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError( + "at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + # DTSTART in VTIMEZONE takes a subset of valid RRULE + # values under RFC 5545. + for parm in parms: + if parm != 'VALUE=DATE-TIME': + msg = ('Unsupported DTSTART param in ' + + 'VTIMEZONE: ' + parm) + raise ValueError(msg) + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError( + "unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", + "/usr/lib/zoneinfo", + "/usr/share/lib/zoneinfo", + "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + + +def __get_gettz(): + tzlocal_classes = (tzlocal,) + if tzwinlocal is not None: + tzlocal_classes += (tzwinlocal,) + + class GettzFunc(object): + """ + Retrieve a time zone object from a string representation + + This function is intended to retrieve the :py:class:`tzinfo` subclass + that best represents the time zone that would be used if a POSIX + `TZ variable`_ were set to the same value. + + If no argument or an empty string is passed to ``gettz``, local time + is returned: + + .. code-block:: python3 + + >>> gettz() + tzfile('/etc/localtime') + + This function is also the preferred way to map IANA tz database keys + to :class:`tzfile` objects: + + .. code-block:: python3 + + >>> gettz('Pacific/Kiritimati') + tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') + + On Windows, the standard is extended to include the Windows-specific + zone names provided by the operating system: + + .. code-block:: python3 + + >>> gettz('Egypt Standard Time') + tzwin('Egypt Standard Time') + + Passing a GNU ``TZ`` style string time zone specification returns a + :class:`tzstr` object: + + .. code-block:: python3 + + >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + + :param name: + A time zone name (IANA, or, on Windows, Windows keys), location of + a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone + specifier. An empty string, no argument or ``None`` is interpreted + as local time. + + :return: + Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` + subclasses. + + .. versionchanged:: 2.7.0 + + After version 2.7.0, any two calls to ``gettz`` using the same + input strings will return the same object: + + .. code-block:: python3 + + >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') + True + + In addition to improving performance, this ensures that + `"same zone" semantics`_ are used for datetimes in the same zone. + + + .. _`TZ variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + + .. _`"same zone" semantics`: + https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html + """ + def __init__(self): + + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache_size = 8 + self.__strong_cache = OrderedDict() + self._cache_lock = _thread.allocate_lock() + + def __call__(self, name=None): + with self._cache_lock: + rv = self.__instances.get(name, None) + + if rv is None: + rv = self.nocache(name=name) + if not (name is None + or isinstance(rv, tzlocal_classes) + or rv is None): + # tzlocal is slightly more complicated than the other + # time zone providers because it depends on environment + # at construction time, so don't cache that. + # + # We also cannot store weak references to None, so we + # will also not store that. + self.__instances[name] = rv + else: + # No need for strong caching, return immediately + return rv + + self.__strong_cache[name] = self.__strong_cache.pop(name, rv) + + if len(self.__strong_cache) > self.__strong_cache_size: + self.__strong_cache.popitem(last=False) + + return rv + + def set_cache_size(self, size): + with self._cache_lock: + self.__strong_cache_size = size + while len(self.__strong_cache) > size: + self.__strong_cache.popitem(last=False) + + def cache_clear(self): + with self._cache_lock: + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache.clear() + + @staticmethod + def nocache(name=None): + """A non-cached version of gettz""" + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name in ("", ":"): + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + try: + if name.startswith(":"): + name = name[1:] + except TypeError as e: + if isinstance(name, bytes): + new_msg = "gettz argument should be str, not bytes" + six.raise_from(TypeError(new_msg), e) + else: + raise + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin is not None: + try: + tz = tzwin(name) + except (WindowsError, UnicodeEncodeError): + # UnicodeEncodeError is for Python 2.7 compat + tz = None + + if not tz: + from dateutil.zoneinfo import get_zonefile_instance + tz = get_zonefile_instance().get(name) + + if not tz: + for c in name: + # name is not a tzstr unless it has at least + # one offset. For short values of "name", an + # explicit for loop seems to be the fastest way + # To determine if a string contains a digit + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = UTC + elif name in time.tzname: + tz = tzlocal() + return tz + + return GettzFunc() + + +gettz = __get_gettz() +del __get_gettz + + +def datetime_exists(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + would fall in a gap. + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" exists in + ``tz``. + + .. versionadded:: 2.7.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + tz = dt.tzinfo + + dt = dt.replace(tzinfo=None) + + # This is essentially a test of whether or not the datetime can survive + # a round trip to UTC. + dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz) + dt_rt = dt_rt.replace(tzinfo=None) + + return dt == dt_rt + + +def datetime_ambiguous(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + is ambiguous (i.e if there are two times differentiated only by their DST + status). + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" is ambiguous in + ``tz``. + + .. versionadded:: 2.6.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + + tz = dt.tzinfo + + # If a time zone defines its own "is_ambiguous" function, we'll use that. + is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) + if is_ambiguous_fn is not None: + try: + return tz.is_ambiguous(dt) + except Exception: + pass + + # If it doesn't come out and tell us it's ambiguous, we'll just check if + # the fold attribute has any effect on this particular date and time. + dt = dt.replace(tzinfo=tz) + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dst = wall_0.dst() == wall_1.dst() + + return not (same_offset and same_dst) + + +def resolve_imaginary(dt): + """ + Given a datetime that may be imaginary, return an existing datetime. + + This function assumes that an imaginary datetime represents what the + wall time would be in a zone had the offset transition not occurred, so + it will always fall forward by the transition's change in offset. + + .. doctest:: + + >>> from dateutil import tz + >>> from datetime import datetime + >>> NYC = tz.gettz('America/New_York') + >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) + 2017-03-12 03:30:00-04:00 + + >>> KIR = tz.gettz('Pacific/Kiritimati') + >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) + 1995-01-02 12:30:00+14:00 + + As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, + existing datetime, so a round-trip to and from UTC is sufficient to get + an extant datetime, however, this generally "falls back" to an earlier time + rather than falling forward to the STD side (though no guarantees are made + about this behavior). + + :param dt: + A :class:`datetime.datetime` which may or may not exist. + + :return: + Returns an existing :class:`datetime.datetime`. If ``dt`` was not + imaginary, the datetime returned is guaranteed to be the same object + passed to the function. + + .. versionadded:: 2.7.0 + """ + if dt.tzinfo is not None and not datetime_exists(dt): + + curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() + old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() + + dt += curr_offset - old_offset + + return dt + + +def _datetime_to_timestamp(dt): + """ + Convert a :class:`datetime.datetime` object to an epoch timestamp in + seconds since January 1, 1970, ignoring the time zone. + """ + return (dt.replace(tzinfo=None) - EPOCH).total_seconds() + + +if sys.version_info >= (3, 6): + def _get_supported_offset(second_offset): + return second_offset +else: + def _get_supported_offset(second_offset): + # For python pre-3.6, round to full-minutes if that's not the case. + # Python's datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 + # for some information. + old_offset = second_offset + calculated_offset = 60 * ((second_offset + 30) // 60) + return calculated_offset + + +try: + # Python 3.7 feature + from contextlib import nullcontext as _nullcontext +except ImportError: + class _nullcontext(object): + """ + Class for wrapping contexts so that they are passed through in a + with statement. + """ + def __init__(self, context): + self.context = context + + def __enter__(self): + return self.context + + def __exit__(*args, **kwargs): + pass + +# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.12/site-packages/dateutil/tz/win.py b/venv/lib/python3.12/site-packages/dateutil/tz/win.py new file mode 100644 index 0000000..cde07ba --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/tz/win.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +""" +This module provides an interface to the native time zone data on Windows, +including :py:class:`datetime.tzinfo` implementations. + +Attempting to import this module on a non-Windows platform will raise an +:py:obj:`ImportError`. +""" +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct + +from six.moves import winreg +from six import text_type + +try: + import ctypes + from ctypes import wintypes +except ValueError: + # ValueError is raised on non-Windows systems for some horrible reason. + raise ImportError("Running tzwin on non-Windows system") + +from ._common import tzrangebase + +__all__ = ["tzwin", "tzwinlocal", "tzres"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + + +def _settzkeyname(): + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + return TZKEYNAME + + +TZKEYNAME = _settzkeyname() + + +class tzres(object): + """ + Class for accessing ``tzres.dll``, which contains timezone name related + resources. + + .. versionadded:: 2.5.0 + """ + p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char + + def __init__(self, tzres_loc='tzres.dll'): + # Load the user32 DLL so we can load strings from tzres + user32 = ctypes.WinDLL('user32') + + # Specify the LoadStringW function + user32.LoadStringW.argtypes = (wintypes.HINSTANCE, + wintypes.UINT, + wintypes.LPWSTR, + ctypes.c_int) + + self.LoadStringW = user32.LoadStringW + self._tzres = ctypes.WinDLL(tzres_loc) + self.tzres_loc = tzres_loc + + def load_name(self, offset): + """ + Load a timezone name from a DLL offset (integer). + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.load_name(112)) + 'Eastern Standard Time' + + :param offset: + A positive integer value referring to a string from the tzres dll. + + .. note:: + + Offsets found in the registry are generally of the form + ``@tzres.dll,-114``. The offset in this case is 114, not -114. + + """ + resource = self.p_wchar() + lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) + nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) + return resource[:nchar] + + def name_from_string(self, tzname_str): + """ + Parse strings as returned from the Windows registry into the time zone + name as defined in the registry. + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.name_from_string('@tzres.dll,-251')) + 'Dateline Daylight Time' + >>> print(tzr.name_from_string('Eastern Standard Time')) + 'Eastern Standard Time' + + :param tzname_str: + A timezone name string as returned from a Windows registry key. + + :return: + Returns the localized timezone string from tzres.dll if the string + is of the form `@tzres.dll,-offset`, else returns the input string. + """ + if not tzname_str.startswith('@'): + return tzname_str + + name_splt = tzname_str.split(',-') + try: + offset = int(name_splt[1]) + except: + raise ValueError("Malformed timezone string.") + + return self.load_name(offset) + + +class tzwinbase(tzrangebase): + """tzinfo class based on win32's timezones available in the registry.""" + def __init__(self): + raise NotImplementedError('tzwinbase is an abstract base class') + + def __eq__(self, other): + # Compare on all relevant dimensions, including name. + if not isinstance(other, tzwinbase): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._stddayofweek == other._stddayofweek and + self._dstdayofweek == other._dstdayofweek and + self._stdweeknumber == other._stdweeknumber and + self._dstweeknumber == other._dstweeknumber and + self._stdhour == other._stdhour and + self._dsthour == other._dsthour and + self._stdminute == other._stdminute and + self._dstminute == other._dstminute and + self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr) + + @staticmethod + def list(): + """Return a list of all time zones known to the system.""" + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZKEYNAME) as tzkey: + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + return result + + def display(self): + """ + Return the display name of the time zone. + """ + return self._display + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + + if not self.hasdst: + return None + + dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + + dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + + # Ambiguous dates default to the STD side + dstoff -= self._dst_base_offset + + return dston, dstoff + + def _get_hasdst(self): + return self._dstmonth != 0 + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzwin(tzwinbase): + """ + Time zone object created from the zone info in the Windows registry + + These are similar to :py:class:`dateutil.tz.tzrange` objects in that + the time zone data is provided in the format of a single offset rule + for either 0 or 2 time zone transitions per year. + + :param: name + The name of a Windows time zone key, e.g. "Eastern Standard Time". + The full list of keys can be retrieved with :func:`tzwin.list`. + """ + + def __init__(self, name): + self._name = name + + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + keydict = valuestodict(tzkey) + + self._std_abbr = keydict["Std"] + self._dst_abbr = keydict["Dlt"] + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + dstoffset = stdoffset-tup[2] # + DaylightBias * -1 + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + """ + Class representing the local time zone information in the Windows registry + + While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` + module) to retrieve time zone information, ``tzwinlocal`` retrieves the + rules directly from the Windows registry and creates an object like + :class:`dateutil.tz.tzwin`. + + Because Windows does not have an equivalent of :func:`time.tzset`, on + Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the + time zone settings *at the time that the process was started*, meaning + changes to the machine's time zone settings during the run of a program + on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. + Because ``tzwinlocal`` reads the registry directly, it is unaffected by + this issue. + """ + def __init__(self): + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) + + self._std_abbr = keydict["StandardName"] + self._dst_abbr = keydict["DaylightName"] + + try: + tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, + sn=self._std_abbr) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None + + stdoffset = -keydict["Bias"]-keydict["StandardBias"] + dstoffset = stdoffset-keydict["DaylightBias"] + + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # For reasons unclear, in this particular key, the day of week has been + # moved to the END of the SYSTEMTIME structure. + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:5] + + self._stddayofweek = tup[7] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:5] + + self._dstdayofweek = tup[7] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwinlocal()" + + def __str__(self): + # str will return the standard name, not the daylight name. + return "tzwinlocal(%s)" % repr(self._std_abbr) + + def __reduce__(self): + return (self.__class__, ()) + + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ + first = datetime.datetime(year, month, 1, hour, minute) + + # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), + # Because 7 % 7 = 0 + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) + wd = weekdayone + ((whichweek - 1) * ONEWEEK) + if (wd.month != month): + wd -= ONEWEEK + + return wd + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dout = {} + size = winreg.QueryInfoKey(key)[1] + tz_res = None + + for i in range(size): + key_name, value, dtype = winreg.EnumValue(key, i) + if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: + # If it's a DWORD (32-bit integer), it's stored as unsigned - convert + # that to a proper signed integer + if value & (1 << 31): + value = value - (1 << 32) + elif dtype == winreg.REG_SZ: + # If it's a reference to the tzres DLL, load the actual string + if value.startswith('@tzres'): + tz_res = tz_res or tzres() + value = tz_res.name_from_string(value) + + value = value.rstrip('\x00') # Remove trailing nulls + + dout[key_name] = value + + return dout diff --git a/venv/lib/python3.12/site-packages/dateutil/tzwin.py b/venv/lib/python3.12/site-packages/dateutil/tzwin.py new file mode 100644 index 0000000..cebc673 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/tzwin.py @@ -0,0 +1,2 @@ +# tzwin has moved to dateutil.tz.win +from .tz.win import * diff --git a/venv/lib/python3.12/site-packages/dateutil/utils.py b/venv/lib/python3.12/site-packages/dateutil/utils.py new file mode 100644 index 0000000..dd2d245 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/utils.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +This module offers general convenience and utility functions for dealing with +datetimes. + +.. versionadded:: 2.7.0 +""" +from __future__ import unicode_literals + +from datetime import datetime, time + + +def today(tzinfo=None): + """ + Returns a :py:class:`datetime` representing the current day at midnight + + :param tzinfo: + The time zone to attach (also used to determine the current day). + + :return: + A :py:class:`datetime.datetime` object representing the current day + at midnight. + """ + + dt = datetime.now(tzinfo) + return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) + + +def default_tzinfo(dt, tzinfo): + """ + Sets the ``tzinfo`` parameter on naive datetimes only + + This is useful for example when you are provided a datetime that may have + either an implicit or explicit time zone, such as when parsing a time zone + string. + + .. doctest:: + + >>> from dateutil.tz import tzoffset + >>> from dateutil.parser import parse + >>> from dateutil.utils import default_tzinfo + >>> dflt_tz = tzoffset("EST", -18000) + >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) + 2014-01-01 12:30:00+00:00 + >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) + 2014-01-01 12:30:00-05:00 + + :param dt: + The datetime on which to replace the time zone + + :param tzinfo: + The :py:class:`datetime.tzinfo` subclass instance to assign to + ``dt`` if (and only if) it is naive. + + :return: + Returns an aware :py:class:`datetime.datetime`. + """ + if dt.tzinfo is not None: + return dt + else: + return dt.replace(tzinfo=tzinfo) + + +def within_delta(dt1, dt2, delta): + """ + Useful for comparing two datetimes that may have a negligible difference + to be considered equal. + """ + delta = abs(delta) + difference = dt1 - dt2 + return -delta <= difference <= delta diff --git a/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__init__.py b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__init__.py new file mode 100644 index 0000000..34f11ad --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__init__.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +import warnings +import json + +from tarfile import TarFile +from pkgutil import get_data +from io import BytesIO + +from dateutil.tz import tzfile as _tzfile + +__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] + +ZONEFILENAME = "dateutil-zoneinfo.tar.gz" +METADATA_FN = 'METADATA' + + +class tzfile(_tzfile): + def __reduce__(self): + return (gettz, (self._filename,)) + + +def getzoneinfofile_stream(): + try: + return BytesIO(get_data(__name__, ZONEFILENAME)) + except IOError as e: # TODO switch to FileNotFoundError? + warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) + return None + + +class ZoneInfoFile(object): + def __init__(self, zonefile_stream=None): + if zonefile_stream is not None: + with TarFile.open(fileobj=zonefile_stream) as tf: + self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) + for zf in tf.getmembers() + if zf.isfile() and zf.name != METADATA_FN} + # deal with links: They'll point to their parent object. Less + # waste of memory + links = {zl.name: self.zones[zl.linkname] + for zl in tf.getmembers() if + zl.islnk() or zl.issym()} + self.zones.update(links) + try: + metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) + metadata_str = metadata_json.read().decode('UTF-8') + self.metadata = json.loads(metadata_str) + except KeyError: + # no metadata in tar file + self.metadata = None + else: + self.zones = {} + self.metadata = None + + def get(self, name, default=None): + """ + Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method + for retrieving zones from the zone dictionary. + + :param name: + The name of the zone to retrieve. (Generally IANA zone names) + + :param default: + The value to return in the event of a missing key. + + .. versionadded:: 2.6.0 + + """ + return self.zones.get(name, default) + + +# The current API has gettz as a module function, although in fact it taps into +# a stateful class. So as a workaround for now, without changing the API, we +# will create a new "global" class instance the first time a user requests a +# timezone. Ugly, but adheres to the api. +# +# TODO: Remove after deprecation period. +_CLASS_ZONE_INSTANCE = [] + + +def get_zonefile_instance(new_instance=False): + """ + This is a convenience function which provides a :class:`ZoneInfoFile` + instance using the data provided by the ``dateutil`` package. By default, it + caches a single instance of the ZoneInfoFile object and returns that. + + :param new_instance: + If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and + used as the cached instance for the next call. Otherwise, new instances + are created only as necessary. + + :return: + Returns a :class:`ZoneInfoFile` object. + + .. versionadded:: 2.6 + """ + if new_instance: + zif = None + else: + zif = getattr(get_zonefile_instance, '_cached_instance', None) + + if zif is None: + zif = ZoneInfoFile(getzoneinfofile_stream()) + + get_zonefile_instance._cached_instance = zif + + return zif + + +def gettz(name): + """ + This retrieves a time zone from the local zoneinfo tarball that is packaged + with dateutil. + + :param name: + An IANA-style time zone name, as found in the zoneinfo file. + + :return: + Returns a :class:`dateutil.tz.tzfile` time zone object. + + .. warning:: + It is generally inadvisable to use this function, and it is only + provided for API compatibility with earlier versions. This is *not* + equivalent to ``dateutil.tz.gettz()``, which selects an appropriate + time zone based on the inputs, favoring system zoneinfo. This is ONLY + for accessing the dateutil-specific zoneinfo (which may be out of + date compared to the system zoneinfo). + + .. deprecated:: 2.6 + If you need to use a specific zoneinfofile over the system zoneinfo, + instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call + :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. + + Use :func:`get_zonefile_instance` to retrieve an instance of the + dateutil-provided zoneinfo. + """ + warnings.warn("zoneinfo.gettz() will be removed in future versions, " + "to use the dateutil-provided zoneinfo files, instantiate a " + "ZoneInfoFile object and use ZoneInfoFile.zones.get() " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].zones.get(name) + + +def gettz_db_metadata(): + """ Get the zonefile metadata + + See `zonefile_metadata`_ + + :returns: + A dictionary with the database metadata + + .. deprecated:: 2.6 + See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, + query the attribute ``zoneinfo.ZoneInfoFile.metadata``. + """ + warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " + "versions, to use the dateutil-provided zoneinfo files, " + "ZoneInfoFile object and query the 'metadata' attribute " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5967193 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc new file mode 100644 index 0000000..54cc03c Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 0000000..524c48e Binary files /dev/null and b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/venv/lib/python3.12/site-packages/dateutil/zoneinfo/rebuild.py b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/rebuild.py new file mode 100644 index 0000000..684c658 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dateutil/zoneinfo/rebuild.py @@ -0,0 +1,75 @@ +import logging +import os +import tempfile +import shutil +import json +from subprocess import check_call, check_output +from tarfile import TarFile + +from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME + + +def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): + """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* + + filename is the timezone tarball from ``ftp.iana.org/tz``. + + """ + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + try: + with TarFile.open(filename) as tf: + for name in zonegroups: + tf.extract(name, tmpdir) + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + + _run_zic(zonedir, filepaths) + + # write metadata file + with open(os.path.join(zonedir, METADATA_FN), 'w') as f: + json.dump(metadata, f, indent=4, sort_keys=True) + target = os.path.join(moduledir, ZONEFILENAME) + with TarFile.open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + finally: + shutil.rmtree(tmpdir) + + +def _run_zic(zonedir, filepaths): + """Calls the ``zic`` compiler in a compatible way to get a "fat" binary. + + Recent versions of ``zic`` default to ``-b slim``, while older versions + don't even have the ``-b`` option (but default to "fat" binaries). The + current version of dateutil does not support Version 2+ TZif files, which + causes problems when used in conjunction with "slim" binaries, so this + function is used to ensure that we always get a "fat" binary. + """ + + try: + help_text = check_output(["zic", "--help"]) + except OSError as e: + _print_on_nosuchfile(e) + raise + + if b"-b " in help_text: + bloat_args = ["-b", "fat"] + else: + bloat_args = [] + + check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths) + + +def _print_on_nosuchfile(e): + """Print helpful troubleshooting message + + e is an exception raised by subprocess.check_call() + + """ + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") diff --git a/venv/lib/python3.12/site-packages/dns/__init__.py b/venv/lib/python3.12/site-packages/dns/__init__.py new file mode 100644 index 0000000..d30fd74 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/__init__.py @@ -0,0 +1,72 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython DNS toolkit""" + +__all__ = [ + "asyncbackend", + "asyncquery", + "asyncresolver", + "btree", + "btreezone", + "dnssec", + "dnssecalgs", + "dnssectypes", + "e164", + "edns", + "entropy", + "exception", + "flags", + "immutable", + "inet", + "ipv4", + "ipv6", + "message", + "name", + "namedict", + "node", + "opcode", + "query", + "quic", + "rcode", + "rdata", + "rdataclass", + "rdataset", + "rdatatype", + "renderer", + "resolver", + "reversename", + "rrset", + "serial", + "set", + "tokenizer", + "transaction", + "tsig", + "tsigkeyring", + "ttl", + "rdtypes", + "update", + "version", + "versioned", + "wire", + "xfr", + "zone", + "zonetypes", + "zonefile", +] + +from dns.version import version as __version__ # noqa diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..743d4fc Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncbackend.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncbackend.cpython-312.pyc new file mode 100644 index 0000000..3c1d913 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncbackend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncio_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncio_backend.cpython-312.pyc new file mode 100644 index 0000000..56ba977 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncio_backend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_ddr.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_ddr.cpython-312.pyc new file mode 100644 index 0000000..a481816 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/_ddr.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_features.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_features.cpython-312.pyc new file mode 100644 index 0000000..952badc Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/_features.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_immutable_ctx.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_immutable_ctx.cpython-312.pyc new file mode 100644 index 0000000..257c090 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/_immutable_ctx.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_no_ssl.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_no_ssl.cpython-312.pyc new file mode 100644 index 0000000..c80ca07 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/_no_ssl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_tls_util.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_tls_util.cpython-312.pyc new file mode 100644 index 0000000..a6c9be6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/_tls_util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_trio_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_trio_backend.cpython-312.pyc new file mode 100644 index 0000000..7757e55 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/_trio_backend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncbackend.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncbackend.cpython-312.pyc new file mode 100644 index 0000000..f8744cf Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncbackend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncquery.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncquery.cpython-312.pyc new file mode 100644 index 0000000..3b494b2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncquery.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncresolver.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncresolver.cpython-312.pyc new file mode 100644 index 0000000..f1bf5bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncresolver.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/btree.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/btree.cpython-312.pyc new file mode 100644 index 0000000..a684755 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/btree.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/btreezone.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/btreezone.cpython-312.pyc new file mode 100644 index 0000000..4b987fb Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/btreezone.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/dnssec.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/dnssec.cpython-312.pyc new file mode 100644 index 0000000..c686828 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/dnssec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/dnssectypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/dnssectypes.cpython-312.pyc new file mode 100644 index 0000000..6db9178 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/dnssectypes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/e164.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/e164.cpython-312.pyc new file mode 100644 index 0000000..8f22f80 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/e164.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/edns.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/edns.cpython-312.pyc new file mode 100644 index 0000000..62e34bf Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/edns.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/entropy.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/entropy.cpython-312.pyc new file mode 100644 index 0000000..83727db Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/entropy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/enum.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/enum.cpython-312.pyc new file mode 100644 index 0000000..9dfc7ea Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/enum.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/exception.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/exception.cpython-312.pyc new file mode 100644 index 0000000..65dd272 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/exception.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/flags.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/flags.cpython-312.pyc new file mode 100644 index 0000000..762ae2a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/flags.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/grange.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/grange.cpython-312.pyc new file mode 100644 index 0000000..5064152 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/grange.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/immutable.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/immutable.cpython-312.pyc new file mode 100644 index 0000000..305a506 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/immutable.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/inet.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/inet.cpython-312.pyc new file mode 100644 index 0000000..a1537b0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/inet.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/ipv4.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/ipv4.cpython-312.pyc new file mode 100644 index 0000000..0a33db7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/ipv4.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/ipv6.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/ipv6.cpython-312.pyc new file mode 100644 index 0000000..85bc6e1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/ipv6.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/message.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/message.cpython-312.pyc new file mode 100644 index 0000000..19f5f3a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/message.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/name.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/name.cpython-312.pyc new file mode 100644 index 0000000..51af99c Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/name.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/namedict.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/namedict.cpython-312.pyc new file mode 100644 index 0000000..16b3643 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/namedict.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/nameserver.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/nameserver.cpython-312.pyc new file mode 100644 index 0000000..1d755b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/nameserver.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/node.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/node.cpython-312.pyc new file mode 100644 index 0000000..e833065 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/node.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/opcode.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/opcode.cpython-312.pyc new file mode 100644 index 0000000..f6ade73 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/opcode.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/query.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/query.cpython-312.pyc new file mode 100644 index 0000000..d10b935 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/query.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rcode.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rcode.cpython-312.pyc new file mode 100644 index 0000000..c60c307 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/rcode.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rdata.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rdata.cpython-312.pyc new file mode 100644 index 0000000..563c581 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/rdata.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rdataclass.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rdataclass.cpython-312.pyc new file mode 100644 index 0000000..8aa1117 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/rdataclass.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rdataset.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rdataset.cpython-312.pyc new file mode 100644 index 0000000..7f91ab0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/rdataset.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rdatatype.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rdatatype.cpython-312.pyc new file mode 100644 index 0000000..1a6c0b8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/rdatatype.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/renderer.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/renderer.cpython-312.pyc new file mode 100644 index 0000000..5d37436 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/renderer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/resolver.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/resolver.cpython-312.pyc new file mode 100644 index 0000000..58ef6d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/resolver.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/reversename.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/reversename.cpython-312.pyc new file mode 100644 index 0000000..f5ed6d6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/reversename.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rrset.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rrset.cpython-312.pyc new file mode 100644 index 0000000..48060dc Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/rrset.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/serial.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/serial.cpython-312.pyc new file mode 100644 index 0000000..c368d23 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/serial.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/set.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/set.cpython-312.pyc new file mode 100644 index 0000000..6d925e4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/set.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/tokenizer.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/tokenizer.cpython-312.pyc new file mode 100644 index 0000000..6298623 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/tokenizer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/transaction.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/transaction.cpython-312.pyc new file mode 100644 index 0000000..9718a89 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/transaction.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/tsig.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/tsig.cpython-312.pyc new file mode 100644 index 0000000..db903d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/tsig.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/tsigkeyring.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/tsigkeyring.cpython-312.pyc new file mode 100644 index 0000000..a40e3ad Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/tsigkeyring.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/ttl.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/ttl.cpython-312.pyc new file mode 100644 index 0000000..90598f2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/ttl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/update.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/update.cpython-312.pyc new file mode 100644 index 0000000..d81931a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/update.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..55649a3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/versioned.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/versioned.cpython-312.pyc new file mode 100644 index 0000000..69f23c7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/versioned.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/win32util.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/win32util.cpython-312.pyc new file mode 100644 index 0000000..85e7884 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/win32util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/wire.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/wire.cpython-312.pyc new file mode 100644 index 0000000..27a2264 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/wire.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/xfr.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/xfr.cpython-312.pyc new file mode 100644 index 0000000..2411396 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/xfr.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/zone.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/zone.cpython-312.pyc new file mode 100644 index 0000000..6eee090 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/zone.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/zonefile.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/zonefile.cpython-312.pyc new file mode 100644 index 0000000..c168b66 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/zonefile.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/zonetypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/zonetypes.cpython-312.pyc new file mode 100644 index 0000000..7d8c9cd Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/__pycache__/zonetypes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/_asyncbackend.py b/venv/lib/python3.12/site-packages/dns/_asyncbackend.py new file mode 100644 index 0000000..23455db --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/_asyncbackend.py @@ -0,0 +1,100 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This is a nullcontext for both sync and async. 3.7 has a nullcontext, +# but it is only for sync use. + + +class NullContext: + def __init__(self, enter_result=None): + self.enter_result = enter_result + + def __enter__(self): + return self.enter_result + + def __exit__(self, exc_type, exc_value, traceback): + pass + + async def __aenter__(self): + return self.enter_result + + async def __aexit__(self, exc_type, exc_value, traceback): + pass + + +# These are declared here so backends can import them without creating +# circular dependencies with dns.asyncbackend. + + +class Socket: # pragma: no cover + def __init__(self, family: int, type: int): + self.family = family + self.type = type + + async def close(self): + pass + + async def getpeername(self): + raise NotImplementedError + + async def getsockname(self): + raise NotImplementedError + + async def getpeercert(self, timeout): + raise NotImplementedError + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.close() + + +class DatagramSocket(Socket): # pragma: no cover + async def sendto(self, what, destination, timeout): + raise NotImplementedError + + async def recvfrom(self, size, timeout): + raise NotImplementedError + + +class StreamSocket(Socket): # pragma: no cover + async def sendall(self, what, timeout): + raise NotImplementedError + + async def recv(self, size, timeout): + raise NotImplementedError + + +class NullTransport: + async def connect_tcp(self, host, port, timeout, local_address): + raise NotImplementedError + + +class Backend: # pragma: no cover + def name(self) -> str: + return "unknown" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + raise NotImplementedError + + def datagram_connection_required(self): + return False + + async def sleep(self, interval): + raise NotImplementedError + + def get_transport_class(self): + raise NotImplementedError + + async def wait_for(self, awaitable, timeout): + raise NotImplementedError diff --git a/venv/lib/python3.12/site-packages/dns/_asyncio_backend.py b/venv/lib/python3.12/site-packages/dns/_asyncio_backend.py new file mode 100644 index 0000000..303908c --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/_asyncio_backend.py @@ -0,0 +1,276 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""asyncio library query support""" + +import asyncio +import socket +import sys + +import dns._asyncbackend +import dns._features +import dns.exception +import dns.inet + +_is_win32 = sys.platform == "win32" + + +def _get_running_loop(): + try: + return asyncio.get_running_loop() + except AttributeError: # pragma: no cover + return asyncio.get_event_loop() + + +class _DatagramProtocol: + def __init__(self): + self.transport = None + self.recvfrom = None + + def connection_made(self, transport): + self.transport = transport + + def datagram_received(self, data, addr): + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_result((data, addr)) + + def error_received(self, exc): # pragma: no cover + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_exception(exc) + + def connection_lost(self, exc): + if self.recvfrom and not self.recvfrom.done(): + if exc is None: + # EOF we triggered. Is there a better way to do this? + try: + raise EOFError("EOF") + except EOFError as e: + self.recvfrom.set_exception(e) + else: + self.recvfrom.set_exception(exc) + + def close(self): + if self.transport is not None: + self.transport.close() + + +async def _maybe_wait_for(awaitable, timeout): + if timeout is not None: + try: + return await asyncio.wait_for(awaitable, timeout) + except asyncio.TimeoutError: + raise dns.exception.Timeout(timeout=timeout) + else: + return await awaitable + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, family, transport, protocol): + super().__init__(family, socket.SOCK_DGRAM) + self.transport = transport + self.protocol = protocol + + async def sendto(self, what, destination, timeout): # pragma: no cover + # no timeout for asyncio sendto + self.transport.sendto(what, destination) + return len(what) + + async def recvfrom(self, size, timeout): + # ignore size as there's no way I know to tell protocol about it + done = _get_running_loop().create_future() + try: + assert self.protocol.recvfrom is None + self.protocol.recvfrom = done + await _maybe_wait_for(done, timeout) + return done.result() + finally: + self.protocol.recvfrom = None + + async def close(self): + self.protocol.close() + + async def getpeername(self): + return self.transport.get_extra_info("peername") + + async def getsockname(self): + return self.transport.get_extra_info("sockname") + + async def getpeercert(self, timeout): + raise NotImplementedError + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, af, reader, writer): + super().__init__(af, socket.SOCK_STREAM) + self.reader = reader + self.writer = writer + + async def sendall(self, what, timeout): + self.writer.write(what) + return await _maybe_wait_for(self.writer.drain(), timeout) + + async def recv(self, size, timeout): + return await _maybe_wait_for(self.reader.read(size), timeout) + + async def close(self): + self.writer.close() + + async def getpeername(self): + return self.writer.get_extra_info("peername") + + async def getsockname(self): + return self.writer.get_extra_info("sockname") + + async def getpeercert(self, timeout): + return self.writer.get_extra_info("peercert") + + +if dns._features.have("doh"): + import anyio + import httpcore + import httpcore._backends.anyio + import httpx + + _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend + _CoreAnyIOStream = httpcore._backends.anyio.AnyIOStream # pyright: ignore + + from dns.query import _compute_times, _expiration_for_this_attempt, _remaining + + class _NetworkBackend(_CoreAsyncNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + if local_port != 0: + raise NotImplementedError( + "the asyncio transport for HTTPX cannot set the local port" + ) + + async def connect_tcp( + self, host, port, timeout=None, local_address=None, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = await self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + try: + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + timeout = _remaining(attempt_expiration) + with anyio.fail_after(timeout): + stream = await anyio.connect_tcp( + remote_host=address, + remote_port=port, + local_host=local_address, + ) + return _CoreAnyIOStream(stream) + except Exception: + pass + raise httpcore.ConnectError + + async def connect_unix_socket( + self, path, timeout=None, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + async def sleep(self, seconds): # pylint: disable=signature-differs + await anyio.sleep(seconds) + + class _HTTPTransport(httpx.AsyncHTTPTransport): + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None and bootstrap_address is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.asyncresolver + + resolver = dns.asyncresolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +else: + _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return "asyncio" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + loop = _get_running_loop() + if socktype == socket.SOCK_DGRAM: + if _is_win32 and source is None: + # Win32 wants explicit binding before recvfrom(). This is the + # proper fix for [#637]. + source = (dns.inet.any_for_af(af), 0) + transport, protocol = await loop.create_datagram_endpoint( + _DatagramProtocol, # pyright: ignore + source, + family=af, + proto=proto, + remote_addr=destination, + ) + return DatagramSocket(af, transport, protocol) + elif socktype == socket.SOCK_STREAM: + if destination is None: + # This shouldn't happen, but we check to make code analysis software + # happier. + raise ValueError("destination required for stream sockets") + (r, w) = await _maybe_wait_for( + asyncio.open_connection( + destination[0], + destination[1], + ssl=ssl_context, + family=af, + proto=proto, + local_addr=source, + server_hostname=server_hostname, + ), + timeout, + ) + return StreamSocket(af, r, w) + raise NotImplementedError( + "unsupported socket " + f"type {socktype}" + ) # pragma: no cover + + async def sleep(self, interval): + await asyncio.sleep(interval) + + def datagram_connection_required(self): + return False + + def get_transport_class(self): + return _HTTPTransport + + async def wait_for(self, awaitable, timeout): + return await _maybe_wait_for(awaitable, timeout) diff --git a/venv/lib/python3.12/site-packages/dns/_ddr.py b/venv/lib/python3.12/site-packages/dns/_ddr.py new file mode 100644 index 0000000..bf5c11e --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/_ddr.py @@ -0,0 +1,154 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license +# +# Support for Discovery of Designated Resolvers + +import socket +import time +from urllib.parse import urlparse + +import dns.asyncbackend +import dns.inet +import dns.name +import dns.nameserver +import dns.query +import dns.rdtypes.svcbbase + +# The special name of the local resolver when using DDR +_local_resolver_name = dns.name.from_text("_dns.resolver.arpa") + + +# +# Processing is split up into I/O independent and I/O dependent parts to +# make supporting sync and async versions easy. +# + + +class _SVCBInfo: + def __init__(self, bootstrap_address, port, hostname, nameservers): + self.bootstrap_address = bootstrap_address + self.port = port + self.hostname = hostname + self.nameservers = nameservers + + def ddr_check_certificate(self, cert): + """Verify that the _SVCBInfo's address is in the cert's subjectAltName (SAN)""" + for name, value in cert["subjectAltName"]: + if name == "IP Address" and value == self.bootstrap_address: + return True + return False + + def make_tls_context(self): + ssl = dns.query.ssl + ctx = ssl.create_default_context() + ctx.minimum_version = ssl.TLSVersion.TLSv1_2 + return ctx + + def ddr_tls_check_sync(self, lifetime): + ctx = self.make_tls_context() + expiration = time.time() + lifetime + with socket.create_connection( + (self.bootstrap_address, self.port), lifetime + ) as s: + with ctx.wrap_socket(s, server_hostname=self.hostname) as ts: + ts.settimeout(dns.query._remaining(expiration)) + ts.do_handshake() + cert = ts.getpeercert() + return self.ddr_check_certificate(cert) + + async def ddr_tls_check_async(self, lifetime, backend=None): + if backend is None: + backend = dns.asyncbackend.get_default_backend() + ctx = self.make_tls_context() + expiration = time.time() + lifetime + async with await backend.make_socket( + dns.inet.af_for_address(self.bootstrap_address), + socket.SOCK_STREAM, + 0, + None, + (self.bootstrap_address, self.port), + lifetime, + ctx, + self.hostname, + ) as ts: + cert = await ts.getpeercert(dns.query._remaining(expiration)) + return self.ddr_check_certificate(cert) + + +def _extract_nameservers_from_svcb(answer): + bootstrap_address = answer.nameserver + if not dns.inet.is_address(bootstrap_address): + return [] + infos = [] + for rr in answer.rrset.processing_order(): + nameservers = [] + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.ALPN) + if param is None: + continue + alpns = set(param.ids) + host = rr.target.to_text(omit_final_dot=True) + port = None + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.PORT) + if param is not None: + port = param.port + # For now we ignore address hints and address resolution and always use the + # bootstrap address + if b"h2" in alpns: + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.DOHPATH) + if param is None or not param.value.endswith(b"{?dns}"): + continue + path = param.value[:-6].decode() + if not path.startswith("/"): + path = "/" + path + if port is None: + port = 443 + url = f"https://{host}:{port}{path}" + # check the URL + try: + urlparse(url) + nameservers.append(dns.nameserver.DoHNameserver(url, bootstrap_address)) + except Exception: + # continue processing other ALPN types + pass + if b"dot" in alpns: + if port is None: + port = 853 + nameservers.append( + dns.nameserver.DoTNameserver(bootstrap_address, port, host) + ) + if b"doq" in alpns: + if port is None: + port = 853 + nameservers.append( + dns.nameserver.DoQNameserver(bootstrap_address, port, True, host) + ) + if len(nameservers) > 0: + infos.append(_SVCBInfo(bootstrap_address, port, host, nameservers)) + return infos + + +def _get_nameservers_sync(answer, lifetime): + """Return a list of TLS-validated resolver nameservers extracted from an SVCB + answer.""" + nameservers = [] + infos = _extract_nameservers_from_svcb(answer) + for info in infos: + try: + if info.ddr_tls_check_sync(lifetime): + nameservers.extend(info.nameservers) + except Exception: + pass + return nameservers + + +async def _get_nameservers_async(answer, lifetime): + """Return a list of TLS-validated resolver nameservers extracted from an SVCB + answer.""" + nameservers = [] + infos = _extract_nameservers_from_svcb(answer) + for info in infos: + try: + if await info.ddr_tls_check_async(lifetime): + nameservers.extend(info.nameservers) + except Exception: + pass + return nameservers diff --git a/venv/lib/python3.12/site-packages/dns/_features.py b/venv/lib/python3.12/site-packages/dns/_features.py new file mode 100644 index 0000000..65a9a2a --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/_features.py @@ -0,0 +1,95 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import importlib.metadata +import itertools +import string +from typing import Dict, List, Tuple + + +def _tuple_from_text(version: str) -> Tuple: + text_parts = version.split(".") + int_parts = [] + for text_part in text_parts: + digit_prefix = "".join( + itertools.takewhile(lambda x: x in string.digits, text_part) + ) + try: + int_parts.append(int(digit_prefix)) + except Exception: + break + return tuple(int_parts) + + +def _version_check( + requirement: str, +) -> bool: + """Is the requirement fulfilled? + + The requirement must be of the form + + package>=version + """ + package, minimum = requirement.split(">=") + try: + version = importlib.metadata.version(package) + # This shouldn't happen, but it apparently can. + if version is None: + return False + except Exception: + return False + t_version = _tuple_from_text(version) + t_minimum = _tuple_from_text(minimum) + if t_version < t_minimum: + return False + return True + + +_cache: Dict[str, bool] = {} + + +def have(feature: str) -> bool: + """Is *feature* available? + + This tests if all optional packages needed for the + feature are available and recent enough. + + Returns ``True`` if the feature is available, + and ``False`` if it is not or if metadata is + missing. + """ + value = _cache.get(feature) + if value is not None: + return value + requirements = _requirements.get(feature) + if requirements is None: + # we make a cache entry here for consistency not performance + _cache[feature] = False + return False + ok = True + for requirement in requirements: + if not _version_check(requirement): + ok = False + break + _cache[feature] = ok + return ok + + +def force(feature: str, enabled: bool) -> None: + """Force the status of *feature* to be *enabled*. + + This method is provided as a workaround for any cases + where importlib.metadata is ineffective, or for testing. + """ + _cache[feature] = enabled + + +_requirements: Dict[str, List[str]] = { + ### BEGIN generated requirements + "dnssec": ["cryptography>=45"], + "doh": ["httpcore>=1.0.0", "httpx>=0.28.0", "h2>=4.2.0"], + "doq": ["aioquic>=1.2.0"], + "idna": ["idna>=3.10"], + "trio": ["trio>=0.30"], + "wmi": ["wmi>=1.5.1"], + ### END generated requirements +} diff --git a/venv/lib/python3.12/site-packages/dns/_immutable_ctx.py b/venv/lib/python3.12/site-packages/dns/_immutable_ctx.py new file mode 100644 index 0000000..b3d72de --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/_immutable_ctx.py @@ -0,0 +1,76 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This implementation of the immutable decorator requires python >= +# 3.7, and is significantly more storage efficient when making classes +# with slots immutable. It's also faster. + +import contextvars +import inspect + +_in__init__ = contextvars.ContextVar("_immutable_in__init__", default=False) + + +class _Immutable: + """Immutable mixin class""" + + # We set slots to the empty list to say "we don't have any attributes". + # We do this so that if we're mixed in with a class with __slots__, we + # don't cause a __dict__ to be added which would waste space. + + __slots__ = () + + def __setattr__(self, name, value): + if _in__init__.get() is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__setattr__(name, value) + + def __delattr__(self, name): + if _in__init__.get() is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__delattr__(name) + + +def _immutable_init(f): + def nf(*args, **kwargs): + previous = _in__init__.set(args[0]) + try: + # call the actual __init__ + f(*args, **kwargs) + finally: + _in__init__.reset(previous) + + nf.__signature__ = inspect.signature(f) # pyright: ignore + return nf + + +def immutable(cls): + if _Immutable in cls.__mro__: + # Some ancestor already has the mixin, so just make sure we keep + # following the __init__ protocol. + cls.__init__ = _immutable_init(cls.__init__) + if hasattr(cls, "__setstate__"): + cls.__setstate__ = _immutable_init(cls.__setstate__) + ncls = cls + else: + # Mixin the Immutable class and follow the __init__ protocol. + class ncls(_Immutable, cls): + # We have to do the __slots__ declaration here too! + __slots__ = () + + @_immutable_init + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + if hasattr(cls, "__setstate__"): + + @_immutable_init + def __setstate__(self, *args, **kwargs): + super().__setstate__(*args, **kwargs) + + # make ncls have the same name and module as cls + ncls.__name__ = cls.__name__ + ncls.__qualname__ = cls.__qualname__ + ncls.__module__ = cls.__module__ + return ncls diff --git a/venv/lib/python3.12/site-packages/dns/_no_ssl.py b/venv/lib/python3.12/site-packages/dns/_no_ssl.py new file mode 100644 index 0000000..edb452d --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/_no_ssl.py @@ -0,0 +1,61 @@ +import enum +from typing import Any + +CERT_NONE = 0 + + +class TLSVersion(enum.IntEnum): + TLSv1_2 = 12 + + +class WantReadException(Exception): + pass + + +class WantWriteException(Exception): + pass + + +class SSLWantReadError(Exception): + pass + + +class SSLWantWriteError(Exception): + pass + + +class SSLContext: + def __init__(self) -> None: + self.minimum_version: Any = TLSVersion.TLSv1_2 + self.check_hostname: bool = False + self.verify_mode: int = CERT_NONE + + def wrap_socket(self, *args, **kwargs) -> "SSLSocket": # type: ignore + raise Exception("no ssl support") # pylint: disable=broad-exception-raised + + def set_alpn_protocols(self, *args, **kwargs): # type: ignore + raise Exception("no ssl support") # pylint: disable=broad-exception-raised + + +class SSLSocket: + def pending(self) -> bool: + raise Exception("no ssl support") # pylint: disable=broad-exception-raised + + def do_handshake(self) -> None: + raise Exception("no ssl support") # pylint: disable=broad-exception-raised + + def settimeout(self, value: Any) -> None: + pass + + def getpeercert(self) -> Any: + raise Exception("no ssl support") # pylint: disable=broad-exception-raised + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + return False + + +def create_default_context(*args, **kwargs) -> SSLContext: # type: ignore + raise Exception("no ssl support") # pylint: disable=broad-exception-raised diff --git a/venv/lib/python3.12/site-packages/dns/_tls_util.py b/venv/lib/python3.12/site-packages/dns/_tls_util.py new file mode 100644 index 0000000..10ddf72 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/_tls_util.py @@ -0,0 +1,19 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import os +from typing import Tuple + + +def convert_verify_to_cafile_and_capath( + verify: bool | str, +) -> Tuple[str | None, str | None]: + cafile: str | None = None + capath: str | None = None + if isinstance(verify, str): + if os.path.isfile(verify): + cafile = verify + elif os.path.isdir(verify): + capath = verify + else: + raise ValueError("invalid verify string") + return cafile, capath diff --git a/venv/lib/python3.12/site-packages/dns/_trio_backend.py b/venv/lib/python3.12/site-packages/dns/_trio_backend.py new file mode 100644 index 0000000..bde7e8b --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/_trio_backend.py @@ -0,0 +1,255 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""trio async I/O library query support""" + +import socket + +import trio +import trio.socket # type: ignore + +import dns._asyncbackend +import dns._features +import dns.exception +import dns.inet + +if not dns._features.have("trio"): + raise ImportError("trio not found or too old") + + +def _maybe_timeout(timeout): + if timeout is not None: + return trio.move_on_after(timeout) + else: + return dns._asyncbackend.NullContext() + + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + +# pylint: disable=redefined-outer-name + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, sock): + super().__init__(sock.family, socket.SOCK_DGRAM) + self.socket = sock + + async def sendto(self, what, destination, timeout): + with _maybe_timeout(timeout): + if destination is None: + return await self.socket.send(what) + else: + return await self.socket.sendto(what, destination) + raise dns.exception.Timeout( + timeout=timeout + ) # pragma: no cover lgtm[py/unreachable-statement] + + async def recvfrom(self, size, timeout): + with _maybe_timeout(timeout): + return await self.socket.recvfrom(size) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def close(self): + self.socket.close() + + async def getpeername(self): + return self.socket.getpeername() + + async def getsockname(self): + return self.socket.getsockname() + + async def getpeercert(self, timeout): + raise NotImplementedError + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, family, stream, tls=False): + super().__init__(family, socket.SOCK_STREAM) + self.stream = stream + self.tls = tls + + async def sendall(self, what, timeout): + with _maybe_timeout(timeout): + return await self.stream.send_all(what) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def recv(self, size, timeout): + with _maybe_timeout(timeout): + return await self.stream.receive_some(size) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def close(self): + await self.stream.aclose() + + async def getpeername(self): + if self.tls: + return self.stream.transport_stream.socket.getpeername() + else: + return self.stream.socket.getpeername() + + async def getsockname(self): + if self.tls: + return self.stream.transport_stream.socket.getsockname() + else: + return self.stream.socket.getsockname() + + async def getpeercert(self, timeout): + if self.tls: + with _maybe_timeout(timeout): + await self.stream.do_handshake() + return self.stream.getpeercert() + else: + raise NotImplementedError + + +if dns._features.have("doh"): + import httpcore + import httpcore._backends.trio + import httpx + + _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend + _CoreTrioStream = httpcore._backends.trio.TrioStream + + from dns.query import _compute_times, _expiration_for_this_attempt, _remaining + + class _NetworkBackend(_CoreAsyncNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + + async def connect_tcp( + self, host, port, timeout=None, local_address=None, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = await self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + try: + af = dns.inet.af_for_address(address) + if local_address is not None or self._local_port != 0: + source = (local_address, self._local_port) + else: + source = None + destination = (address, port) + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + timeout = _remaining(attempt_expiration) + sock = await Backend().make_socket( + af, socket.SOCK_STREAM, 0, source, destination, timeout + ) + assert isinstance(sock, StreamSocket) + return _CoreTrioStream(sock.stream) + except Exception: + continue + raise httpcore.ConnectError + + async def connect_unix_socket( + self, path, timeout=None, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + async def sleep(self, seconds): # pylint: disable=signature-differs + await trio.sleep(seconds) + + class _HTTPTransport(httpx.AsyncHTTPTransport): + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None and bootstrap_address is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.asyncresolver + + resolver = dns.asyncresolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +else: + _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return "trio" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + s = trio.socket.socket(af, socktype, proto) + stream = None + try: + if source: + await s.bind(_lltuple(source, af)) + if socktype == socket.SOCK_STREAM or destination is not None: + connected = False + with _maybe_timeout(timeout): + assert destination is not None + await s.connect(_lltuple(destination, af)) + connected = True + if not connected: + raise dns.exception.Timeout( + timeout=timeout + ) # lgtm[py/unreachable-statement] + except Exception: # pragma: no cover + s.close() + raise + if socktype == socket.SOCK_DGRAM: + return DatagramSocket(s) + elif socktype == socket.SOCK_STREAM: + stream = trio.SocketStream(s) + tls = False + if ssl_context: + tls = True + try: + stream = trio.SSLStream( + stream, ssl_context, server_hostname=server_hostname + ) + except Exception: # pragma: no cover + await stream.aclose() + raise + return StreamSocket(af, stream, tls) + raise NotImplementedError( + "unsupported socket " + f"type {socktype}" + ) # pragma: no cover + + async def sleep(self, interval): + await trio.sleep(interval) + + def get_transport_class(self): + return _HTTPTransport + + async def wait_for(self, awaitable, timeout): + with _maybe_timeout(timeout): + return await awaitable + raise dns.exception.Timeout( + timeout=timeout + ) # pragma: no cover lgtm[py/unreachable-statement] diff --git a/venv/lib/python3.12/site-packages/dns/asyncbackend.py b/venv/lib/python3.12/site-packages/dns/asyncbackend.py new file mode 100644 index 0000000..0ec58b0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/asyncbackend.py @@ -0,0 +1,101 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +from typing import Dict + +import dns.exception + +# pylint: disable=unused-import +from dns._asyncbackend import ( # noqa: F401 lgtm[py/unused-import] + Backend, + DatagramSocket, + Socket, + StreamSocket, +) + +# pylint: enable=unused-import + +_default_backend = None + +_backends: Dict[str, Backend] = {} + +# Allow sniffio import to be disabled for testing purposes +_no_sniffio = False + + +class AsyncLibraryNotFoundError(dns.exception.DNSException): + pass + + +def get_backend(name: str) -> Backend: + """Get the specified asynchronous backend. + + *name*, a ``str``, the name of the backend. Currently the "trio" + and "asyncio" backends are available. + + Raises NotImplementedError if an unknown backend name is specified. + """ + # pylint: disable=import-outside-toplevel,redefined-outer-name + backend = _backends.get(name) + if backend: + return backend + if name == "trio": + import dns._trio_backend + + backend = dns._trio_backend.Backend() + elif name == "asyncio": + import dns._asyncio_backend + + backend = dns._asyncio_backend.Backend() + else: + raise NotImplementedError(f"unimplemented async backend {name}") + _backends[name] = backend + return backend + + +def sniff() -> str: + """Attempt to determine the in-use asynchronous I/O library by using + the ``sniffio`` module if it is available. + + Returns the name of the library, or raises AsyncLibraryNotFoundError + if the library cannot be determined. + """ + # pylint: disable=import-outside-toplevel + try: + if _no_sniffio: + raise ImportError + import sniffio + + try: + return sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + raise AsyncLibraryNotFoundError("sniffio cannot determine async library") + except ImportError: + import asyncio + + try: + asyncio.get_running_loop() + return "asyncio" + except RuntimeError: + raise AsyncLibraryNotFoundError("no async library detected") + + +def get_default_backend() -> Backend: + """Get the default backend, initializing it if necessary.""" + if _default_backend: + return _default_backend + + return set_default_backend(sniff()) + + +def set_default_backend(name: str) -> Backend: + """Set the default backend. + + It's not normally necessary to call this method, as + ``get_default_backend()`` will initialize the backend + appropriately in many cases. If ``sniffio`` is not installed, or + in testing situations, this function allows the backend to be set + explicitly. + """ + global _default_backend + _default_backend = get_backend(name) + return _default_backend diff --git a/venv/lib/python3.12/site-packages/dns/asyncquery.py b/venv/lib/python3.12/site-packages/dns/asyncquery.py new file mode 100644 index 0000000..bb77045 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/asyncquery.py @@ -0,0 +1,953 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +import base64 +import contextlib +import random +import socket +import struct +import time +import urllib.parse +from typing import Any, Dict, Optional, Tuple, cast + +import dns.asyncbackend +import dns.exception +import dns.inet +import dns.message +import dns.name +import dns.quic +import dns.rdatatype +import dns.transaction +import dns.tsig +import dns.xfr +from dns._asyncbackend import NullContext +from dns.query import ( + BadResponse, + HTTPVersion, + NoDOH, + NoDOQ, + UDPMode, + _check_status, + _compute_times, + _matches_destination, + _remaining, + have_doh, + make_ssl_context, +) + +try: + import ssl +except ImportError: + import dns._no_ssl as ssl # type: ignore + +if have_doh: + import httpx + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + + +def _source_tuple(af, address, port): + # Make a high level source tuple, or return None if address and port + # are both None + if address or port: + if address is None: + if af == socket.AF_INET: + address = "0.0.0.0" + elif af == socket.AF_INET6: + address = "::" + else: + raise NotImplementedError(f"unknown address family {af}") + return (address, port) + else: + return None + + +def _timeout(expiration, now=None): + if expiration is not None: + if not now: + now = time.time() + return max(expiration - now, 0) + else: + return None + + +async def send_udp( + sock: dns.asyncbackend.DatagramSocket, + what: dns.message.Message | bytes, + destination: Any, + expiration: float | None = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified UDP socket. + + *sock*, a ``dns.asyncbackend.DatagramSocket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. The expiration value is meaningless for the asyncio backend, as + asyncio's transport sendto() never blocks. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + sent_time = time.time() + n = await sock.sendto(what, destination, _timeout(expiration, sent_time)) + return (n, sent_time) + + +async def receive_udp( + sock: dns.asyncbackend.DatagramSocket, + destination: Any | None = None, + expiration: float | None = None, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + keyring: Dict[dns.name.Name, dns.tsig.Key] | None = None, + request_mac: bytes | None = b"", + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + ignore_errors: bool = False, + query: dns.message.Message | None = None, +) -> Any: + """Read a DNS message from a UDP socket. + + *sock*, a ``dns.asyncbackend.DatagramSocket``. + + See :py:func:`dns.query.receive_udp()` for the documentation of the other + parameters, and exceptions. + + Returns a ``(dns.message.Message, float, tuple)`` tuple of the received message, the + received time, and the address where the message arrived from. + """ + + wire = b"" + while True: + (wire, from_address) = await sock.recvfrom(65535, _timeout(expiration)) + if not _matches_destination( + sock.family, from_address, destination, ignore_unexpected + ): + continue + received_time = time.time() + try: + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + raise_on_truncation=raise_on_truncation, + ) + except dns.message.Truncated as e: + # See the comment in query.py for details. + if ( + ignore_errors + and query is not None + and not query.is_response(e.message()) + ): + continue + else: + raise + except Exception: + if ignore_errors: + continue + else: + raise + if ignore_errors and query is not None and not query.is_response(r): + continue + return (r, received_time, from_address) + + +async def udp( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 53, + source: str | None = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + sock: dns.asyncbackend.DatagramSocket | None = None, + backend: dns.asyncbackend.Backend | None = None, + ignore_errors: bool = False, +) -> dns.message.Message: + """Return the response obtained after sending a query via UDP. + + *sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, + the socket to use for the query. If ``None``, the default, a + socket is created. Note that if a socket is provided, the + *source*, *source_port*, and *backend* are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.udp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + af = dns.inet.af_for_address(where) + destination = _lltuple((where, port), af) + if sock: + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + if not backend: + backend = dns.asyncbackend.get_default_backend() + stuple = _source_tuple(af, source, source_port) + if backend.datagram_connection_required(): + dtuple = (where, port) + else: + dtuple = None + cm = await backend.make_socket(af, socket.SOCK_DGRAM, 0, stuple, dtuple) + async with cm as s: + await send_udp(s, wire, destination, expiration) # pyright: ignore + (r, received_time, _) = await receive_udp( + s, # pyright: ignore + destination, + expiration, + ignore_unexpected, + one_rr_per_rrset, + q.keyring, + q.mac, + ignore_trailing, + raise_on_truncation, + ignore_errors, + q, + ) + r.time = received_time - begin_time + # We don't need to check q.is_response() if we are in ignore_errors mode + # as receive_udp() will have checked it. + if not (ignore_errors or q.is_response(r)): + raise BadResponse + return r + + +async def udp_with_fallback( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 53, + source: str | None = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + udp_sock: dns.asyncbackend.DatagramSocket | None = None, + tcp_sock: dns.asyncbackend.StreamSocket | None = None, + backend: dns.asyncbackend.Backend | None = None, + ignore_errors: bool = False, +) -> Tuple[dns.message.Message, bool]: + """Return the response to the query, trying UDP first and falling back + to TCP if UDP results in a truncated response. + + *udp_sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, + the socket to use for the UDP query. If ``None``, the default, a + socket is created. Note that if a socket is provided the *source*, + *source_port*, and *backend* are ignored for the UDP query. + + *tcp_sock*, a ``dns.asyncbackend.StreamSocket``, or ``None``, the + socket to use for the TCP query. If ``None``, the default, a + socket is created. Note that if a socket is provided *where*, + *source*, *source_port*, and *backend* are ignored for the TCP query. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.udp_with_fallback()` for the documentation + of the other parameters, exceptions, and return type of this + method. + """ + try: + response = await udp( + q, + where, + timeout, + port, + source, + source_port, + ignore_unexpected, + one_rr_per_rrset, + ignore_trailing, + True, + udp_sock, + backend, + ignore_errors, + ) + return (response, False) + except dns.message.Truncated: + response = await tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + tcp_sock, + backend, + ) + return (response, True) + + +async def send_tcp( + sock: dns.asyncbackend.StreamSocket, + what: dns.message.Message | bytes, + expiration: float | None = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified TCP socket. + + *sock*, a ``dns.asyncbackend.StreamSocket``. + + See :py:func:`dns.query.send_tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if isinstance(what, dns.message.Message): + tcpmsg = what.to_wire(prepend_length=True) + else: + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = len(what).to_bytes(2, "big") + what + sent_time = time.time() + await sock.sendall(tcpmsg, _timeout(expiration, sent_time)) + return (len(tcpmsg), sent_time) + + +async def _read_exactly(sock, count, expiration): + """Read the specified number of bytes from stream. Keep trying until we + either get the desired amount, or we hit EOF. + """ + s = b"" + while count > 0: + n = await sock.recv(count, _timeout(expiration)) + if n == b"": + raise EOFError("EOF") + count = count - len(n) + s = s + n + return s + + +async def receive_tcp( + sock: dns.asyncbackend.StreamSocket, + expiration: float | None = None, + one_rr_per_rrset: bool = False, + keyring: Dict[dns.name.Name, dns.tsig.Key] | None = None, + request_mac: bytes | None = b"", + ignore_trailing: bool = False, +) -> Tuple[dns.message.Message, float]: + """Read a DNS message from a TCP socket. + + *sock*, a ``dns.asyncbackend.StreamSocket``. + + See :py:func:`dns.query.receive_tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + ldata = await _read_exactly(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = await _read_exactly(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + return (r, received_time) + + +async def tcp( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 53, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: dns.asyncbackend.StreamSocket | None = None, + backend: dns.asyncbackend.Backend | None = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via TCP. + + *sock*, a ``dns.asyncbacket.StreamSocket``, or ``None``, the + socket to use for the query. If ``None``, the default, a socket + is created. Note that if a socket is provided + *where*, *port*, *source*, *source_port*, and *backend* are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + if sock: + # Verify that the socket is connected, as if it's not connected, + # it's not writable, and the polling in send_tcp() will time out or + # hang forever. + await sock.getpeername() + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + # These are simple (address, port) pairs, not family-dependent tuples + # you pass to low-level socket code. + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + cm = await backend.make_socket( + af, socket.SOCK_STREAM, 0, stuple, dtuple, timeout + ) + async with cm as s: + await send_tcp(s, wire, expiration) # pyright: ignore + (r, received_time) = await receive_tcp( + s, # pyright: ignore + expiration, + one_rr_per_rrset, + q.keyring, + q.mac, + ignore_trailing, + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + + +async def tls( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 853, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: dns.asyncbackend.StreamSocket | None = None, + backend: dns.asyncbackend.Backend | None = None, + ssl_context: ssl.SSLContext | None = None, + server_hostname: str | None = None, + verify: bool | str = True, +) -> dns.message.Message: + """Return the response obtained after sending a query via TLS. + + *sock*, an ``asyncbackend.StreamSocket``, or ``None``, the socket + to use for the query. If ``None``, the default, a socket is + created. Note that if a socket is provided, it must be a + connected SSL stream socket, and *where*, *port*, + *source*, *source_port*, *backend*, *ssl_context*, and *server_hostname* + are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.tls()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + if ssl_context is None: + ssl_context = make_ssl_context(verify, server_hostname is not None, ["dot"]) + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + cm = await backend.make_socket( + af, + socket.SOCK_STREAM, + 0, + stuple, + dtuple, + timeout, + ssl_context, + server_hostname, + ) + async with cm as s: + timeout = _timeout(expiration) + response = await tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + s, + backend, + ) + end_time = time.time() + response.time = end_time - begin_time + return response + + +def _maybe_get_resolver( + resolver: Optional["dns.asyncresolver.Resolver"], # pyright: ignore +) -> "dns.asyncresolver.Resolver": # pyright: ignore + # We need a separate method for this to avoid overriding the global + # variable "dns" with the as-yet undefined local variable "dns" + # in https(). + if resolver is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.asyncresolver + + resolver = dns.asyncresolver.Resolver() + return resolver + + +async def https( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 443, + source: str | None = None, + source_port: int = 0, # pylint: disable=W0613 + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + client: Optional["httpx.AsyncClient|dns.quic.AsyncQuicConnection"] = None, + path: str = "/dns-query", + post: bool = True, + verify: bool | str | ssl.SSLContext = True, + bootstrap_address: str | None = None, + resolver: Optional["dns.asyncresolver.Resolver"] = None, # pyright: ignore + family: int = socket.AF_UNSPEC, + http_version: HTTPVersion = HTTPVersion.DEFAULT, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-HTTPS. + + *client*, a ``httpx.AsyncClient``. If provided, the client to use for + the query. + + Unlike the other dnspython async functions, a backend cannot be provided + in this function because httpx always auto-detects the async backend. + + See :py:func:`dns.query.https()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + try: + af = dns.inet.af_for_address(where) + except ValueError: + af = None + # we bind url and then override as pyright can't figure out all paths bind. + url = where + if af is not None and dns.inet.is_address(where): + if af == socket.AF_INET: + url = f"https://{where}:{port}{path}" + elif af == socket.AF_INET6: + url = f"https://[{where}]:{port}{path}" + + extensions = {} + if bootstrap_address is None: + # pylint: disable=possibly-used-before-assignment + parsed = urllib.parse.urlparse(url) + if parsed.hostname is None: + raise ValueError("no hostname in URL") + if dns.inet.is_address(parsed.hostname): + bootstrap_address = parsed.hostname + extensions["sni_hostname"] = parsed.hostname + if parsed.port is not None: + port = parsed.port + + if http_version == HTTPVersion.H3 or ( + http_version == HTTPVersion.DEFAULT and not have_doh + ): + if bootstrap_address is None: + resolver = _maybe_get_resolver(resolver) + assert parsed.hostname is not None # pyright: ignore + answers = await resolver.resolve_name( # pyright: ignore + parsed.hostname, family # pyright: ignore + ) + bootstrap_address = random.choice(list(answers.addresses())) + if client and not isinstance( + client, dns.quic.AsyncQuicConnection + ): # pyright: ignore + raise ValueError("client parameter must be a dns.quic.AsyncQuicConnection.") + assert client is None or isinstance(client, dns.quic.AsyncQuicConnection) + return await _http3( + q, + bootstrap_address, + url, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + verify=verify, + post=post, + connection=client, + ) + + if not have_doh: + raise NoDOH # pragma: no cover + # pylint: disable=possibly-used-before-assignment + if client and not isinstance(client, httpx.AsyncClient): # pyright: ignore + raise ValueError("client parameter must be an httpx.AsyncClient") + # pylint: enable=possibly-used-before-assignment + + wire = q.to_wire() + headers = {"accept": "application/dns-message"} + + h1 = http_version in (HTTPVersion.H1, HTTPVersion.DEFAULT) + h2 = http_version in (HTTPVersion.H2, HTTPVersion.DEFAULT) + + backend = dns.asyncbackend.get_default_backend() + + if source is None: + local_address = None + local_port = 0 + else: + local_address = source + local_port = source_port + + if client: + cm: contextlib.AbstractAsyncContextManager = NullContext(client) + else: + transport = backend.get_transport_class()( + local_address=local_address, + http1=h1, + http2=h2, + verify=verify, + local_port=local_port, + bootstrap_address=bootstrap_address, + resolver=resolver, + family=family, + ) + + cm = httpx.AsyncClient( # pyright: ignore + http1=h1, http2=h2, verify=verify, transport=transport # type: ignore + ) + + async with cm as the_client: + # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH + # GET and POST examples + if post: + headers.update( + { + "content-type": "application/dns-message", + "content-length": str(len(wire)), + } + ) + response = await backend.wait_for( + the_client.post( # pyright: ignore + url, + headers=headers, + content=wire, + extensions=extensions, + ), + timeout, + ) + else: + wire = base64.urlsafe_b64encode(wire).rstrip(b"=") + twire = wire.decode() # httpx does a repr() if we give it bytes + response = await backend.wait_for( + the_client.get( # pyright: ignore + url, + headers=headers, + params={"dns": twire}, + extensions=extensions, + ), + timeout, + ) + + # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH + # status codes + if response.status_code < 200 or response.status_code > 299: + raise ValueError( + f"{where} responded with status code {response.status_code}" + f"\nResponse body: {response.content!r}" + ) + r = dns.message.from_wire( + response.content, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = response.elapsed.total_seconds() + if not q.is_response(r): + raise BadResponse + return r + + +async def _http3( + q: dns.message.Message, + where: str, + url: str, + timeout: float | None = None, + port: int = 443, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + verify: bool | str | ssl.SSLContext = True, + backend: dns.asyncbackend.Backend | None = None, + post: bool = True, + connection: dns.quic.AsyncQuicConnection | None = None, +) -> dns.message.Message: + if not dns.quic.have_quic: + raise NoDOH("DNS-over-HTTP3 is not available.") # pragma: no cover + + url_parts = urllib.parse.urlparse(url) + hostname = url_parts.hostname + assert hostname is not None + if url_parts.port is not None: + port = url_parts.port + + q.id = 0 + wire = q.to_wire() + the_connection: dns.quic.AsyncQuicConnection + if connection: + cfactory = dns.quic.null_factory + mfactory = dns.quic.null_factory + else: + (cfactory, mfactory) = dns.quic.factories_for_backend(backend) + + async with cfactory() as context: + async with mfactory( + context, verify_mode=verify, server_name=hostname, h3=True + ) as the_manager: + if connection: + the_connection = connection + else: + the_connection = the_manager.connect( # pyright: ignore + where, port, source, source_port + ) + (start, expiration) = _compute_times(timeout) + stream = await the_connection.make_stream(timeout) # pyright: ignore + async with stream: + # note that send_h3() does not need await + stream.send_h3(url, wire, post) + wire = await stream.receive(_remaining(expiration)) + _check_status(stream.headers(), where, wire) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +async def quic( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 853, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + connection: dns.quic.AsyncQuicConnection | None = None, + verify: bool | str = True, + backend: dns.asyncbackend.Backend | None = None, + hostname: str | None = None, + server_hostname: str | None = None, +) -> dns.message.Message: + """Return the response obtained after sending an asynchronous query via + DNS-over-QUIC. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.quic()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if not dns.quic.have_quic: + raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover + + if server_hostname is not None and hostname is None: + hostname = server_hostname + + q.id = 0 + wire = q.to_wire() + the_connection: dns.quic.AsyncQuicConnection + if connection: + cfactory = dns.quic.null_factory + mfactory = dns.quic.null_factory + the_connection = connection + else: + (cfactory, mfactory) = dns.quic.factories_for_backend(backend) + + async with cfactory() as context: + async with mfactory( + context, + verify_mode=verify, + server_name=server_hostname, + ) as the_manager: + if not connection: + the_connection = the_manager.connect( # pyright: ignore + where, port, source, source_port + ) + (start, expiration) = _compute_times(timeout) + stream = await the_connection.make_stream(timeout) # pyright: ignore + async with stream: + await stream.send(wire, True) + wire = await stream.receive(_remaining(expiration)) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +async def _inbound_xfr( + txn_manager: dns.transaction.TransactionManager, + s: dns.asyncbackend.Socket, + query: dns.message.Message, + serial: int | None, + timeout: float | None, + expiration: float, +) -> Any: + """Given a socket, does the zone transfer.""" + rdtype = query.question[0].rdtype + is_ixfr = rdtype == dns.rdatatype.IXFR + origin = txn_manager.from_wire_origin() + wire = query.to_wire() + is_udp = s.type == socket.SOCK_DGRAM + if is_udp: + udp_sock = cast(dns.asyncbackend.DatagramSocket, s) + await udp_sock.sendto(wire, None, _timeout(expiration)) + else: + tcp_sock = cast(dns.asyncbackend.StreamSocket, s) + tcpmsg = struct.pack("!H", len(wire)) + wire + await tcp_sock.sendall(tcpmsg, expiration) + with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: + done = False + tsig_ctx = None + r: dns.message.Message | None = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or ( + expiration is not None and mexpiration > expiration + ): + mexpiration = expiration + if is_udp: + timeout = _timeout(mexpiration) + (rwire, _) = await udp_sock.recvfrom(65535, timeout) # pyright: ignore + else: + ldata = await _read_exactly(tcp_sock, 2, mexpiration) # pyright: ignore + (l,) = struct.unpack("!H", ldata) + rwire = await _read_exactly(tcp_sock, l, mexpiration) # pyright: ignore + r = dns.message.from_wire( + rwire, + keyring=query.keyring, + request_mac=query.mac, + xfr=True, + origin=origin, + tsig_ctx=tsig_ctx, + multi=(not is_udp), + one_rr_per_rrset=is_ixfr, + ) + done = inbound.process_message(r) + yield r + tsig_ctx = r.tsig_ctx + if query.keyring and r is not None and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") + + +async def inbound_xfr( + where: str, + txn_manager: dns.transaction.TransactionManager, + query: dns.message.Message | None = None, + port: int = 53, + timeout: float | None = None, + lifetime: float | None = None, + source: str | None = None, + source_port: int = 0, + udp_mode: UDPMode = UDPMode.NEVER, + backend: dns.asyncbackend.Backend | None = None, +) -> None: + """Conduct an inbound transfer and apply it via a transaction from the + txn_manager. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.inbound_xfr()` for the documentation of + the other parameters, exceptions, and return type of this method. + """ + if query is None: + (query, serial) = dns.xfr.make_query(txn_manager) + else: + serial = dns.xfr.extract_serial_from_query(query) + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + (_, expiration) = _compute_times(lifetime) + if query.question[0].rdtype == dns.rdatatype.IXFR and udp_mode != UDPMode.NEVER: + s = await backend.make_socket( + af, socket.SOCK_DGRAM, 0, stuple, dtuple, _timeout(expiration) + ) + async with s: + try: + async for _ in _inbound_xfr( # pyright: ignore + txn_manager, + s, + query, + serial, + timeout, + expiration, # pyright: ignore + ): + pass + return + except dns.xfr.UseTCP: + if udp_mode == UDPMode.ONLY: + raise + + s = await backend.make_socket( + af, socket.SOCK_STREAM, 0, stuple, dtuple, _timeout(expiration) + ) + async with s: + async for _ in _inbound_xfr( # pyright: ignore + txn_manager, s, query, serial, timeout, expiration # pyright: ignore + ): + pass diff --git a/venv/lib/python3.12/site-packages/dns/asyncresolver.py b/venv/lib/python3.12/site-packages/dns/asyncresolver.py new file mode 100644 index 0000000..6f8c69f --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/asyncresolver.py @@ -0,0 +1,478 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Asynchronous DNS stub resolver.""" + +import socket +import time +from typing import Any, Dict, List + +import dns._ddr +import dns.asyncbackend +import dns.asyncquery +import dns.exception +import dns.inet +import dns.name +import dns.nameserver +import dns.query +import dns.rdataclass +import dns.rdatatype +import dns.resolver # lgtm[py/import-and-import-from] +import dns.reversename + +# import some resolver symbols for brevity +from dns.resolver import NXDOMAIN, NoAnswer, NoRootSOA, NotAbsolute + +# for indentation purposes below +_udp = dns.asyncquery.udp +_tcp = dns.asyncquery.tcp + + +class Resolver(dns.resolver.BaseResolver): + """Asynchronous DNS stub resolver.""" + + async def resolve( + self, + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.A, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + tcp: bool = False, + source: str | None = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: float | None = None, + search: bool | None = None, + backend: dns.asyncbackend.Backend | None = None, + ) -> dns.resolver.Answer: + """Query nameservers asynchronously to find the answer to the question. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.resolver.Resolver.resolve()` for the + documentation of the other parameters, exceptions, and return + type of this method. + """ + + resolution = dns.resolver._Resolution( + self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search + ) + if not backend: + backend = dns.asyncbackend.get_default_backend() + start = time.time() + while True: + (request, answer) = resolution.next_request() + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + # cache hit! + return answer + assert request is not None # needed for type checking + done = False + while not done: + (nameserver, tcp, backoff) = resolution.next_nameserver() + if backoff: + await backend.sleep(backoff) + timeout = self._compute_timeout(start, lifetime, resolution.errors) + try: + response = await nameserver.async_query( + request, + timeout=timeout, + source=source, + source_port=source_port, + max_size=tcp, + backend=backend, + ) + except Exception as ex: + (_, done) = resolution.query_result(None, ex) + continue + (answer, done) = resolution.query_result(response, None) + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + return answer + + async def resolve_address( + self, ipaddr: str, *args: Any, **kwargs: Any + ) -> dns.resolver.Answer: + """Use an asynchronous resolver to run a reverse query for PTR + records. + + This utilizes the resolve() method to perform a PTR lookup on the + specified IP address. + + *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get + the PTR record for. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs["rdtype"] = dns.rdatatype.PTR + modified_kwargs["rdclass"] = dns.rdataclass.IN + return await self.resolve( + dns.reversename.from_address(ipaddr), *args, **modified_kwargs + ) + + async def resolve_name( + self, + name: dns.name.Name | str, + family: int = socket.AF_UNSPEC, + **kwargs: Any, + ) -> dns.resolver.HostAnswers: + """Use an asynchronous resolver to query for address records. + + This utilizes the resolve() method to perform A and/or AAAA lookups on + the specified name. + + *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC + (the default), both A and AAAA records will be retrieved. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs.pop("rdtype", None) + modified_kwargs["rdclass"] = dns.rdataclass.IN + + if family == socket.AF_INET: + v4 = await self.resolve(name, dns.rdatatype.A, **modified_kwargs) + return dns.resolver.HostAnswers.make(v4=v4) + elif family == socket.AF_INET6: + v6 = await self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) + return dns.resolver.HostAnswers.make(v6=v6) + elif family != socket.AF_UNSPEC: + raise NotImplementedError(f"unknown address family {family}") + + raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) + lifetime = modified_kwargs.pop("lifetime", None) + start = time.time() + v6 = await self.resolve( + name, + dns.rdatatype.AAAA, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + # Note that setting name ensures we query the same name + # for A as we did for AAAA. (This is just in case search lists + # are active by default in the resolver configuration and + # we might be talking to a server that says NXDOMAIN when it + # wants to say NOERROR no data. + name = v6.qname + v4 = await self.resolve( + name, + dns.rdatatype.A, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + answers = dns.resolver.HostAnswers.make( + v6=v6, v4=v4, add_empty=not raise_on_no_answer + ) + if not answers: + raise NoAnswer(response=v6.response) + return answers + + # pylint: disable=redefined-outer-name + + async def canonical_name(self, name: dns.name.Name | str) -> dns.name.Name: + """Determine the canonical name of *name*. + + The canonical name is the name the resolver uses for queries + after all CNAME and DNAME renamings have been applied. + + *name*, a ``dns.name.Name`` or ``str``, the query name. + + This method can raise any exception that ``resolve()`` can + raise, other than ``dns.resolver.NoAnswer`` and + ``dns.resolver.NXDOMAIN``. + + Returns a ``dns.name.Name``. + """ + try: + answer = await self.resolve(name, raise_on_no_answer=False) + canonical_name = answer.canonical_name + except dns.resolver.NXDOMAIN as e: + canonical_name = e.canonical_name + return canonical_name + + async def try_ddr(self, lifetime: float = 5.0) -> None: + """Try to update the resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + *lifetime*, a float, is the maximum time to spend attempting DDR. The default + is 5 seconds. + + If the SVCB query is successful and results in a non-empty list of nameservers, + then the resolver's nameservers are set to the returned servers in priority + order. + + The current implementation does not use any address hints from the SVCB record, + nor does it resolve addresses for the SCVB target name, rather it assumes that + the bootstrap nameserver will always be one of the addresses and uses it. + A future revision to the code may offer fuller support. The code verifies that + the bootstrap nameserver is in the Subject Alternative Name field of the + TLS certficate. + """ + try: + expiration = time.time() + lifetime + answer = await self.resolve( + dns._ddr._local_resolver_name, "svcb", lifetime=lifetime + ) + timeout = dns.query._remaining(expiration) + nameservers = await dns._ddr._get_nameservers_async(answer, timeout) + if len(nameservers) > 0: + self.nameservers = nameservers + except Exception: + pass + + +default_resolver = None + + +def get_default_resolver() -> Resolver: + """Get the default asynchronous resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + assert default_resolver is not None + return default_resolver + + +def reset_default_resolver() -> None: + """Re-initialize default asynchronous resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +async def resolve( + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.A, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + tcp: bool = False, + source: str | None = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: float | None = None, + search: bool | None = None, + backend: dns.asyncbackend.Backend | None = None, +) -> dns.resolver.Answer: + """Query nameservers asynchronously to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See :py:func:`dns.asyncresolver.Resolver.resolve` for more + information on the parameters. + """ + + return await get_default_resolver().resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + backend, + ) + + +async def resolve_address( + ipaddr: str, *args: Any, **kwargs: Any +) -> dns.resolver.Answer: + """Use a resolver to run a reverse query for PTR records. + + See :py:func:`dns.asyncresolver.Resolver.resolve_address` for more + information on the parameters. + """ + + return await get_default_resolver().resolve_address(ipaddr, *args, **kwargs) + + +async def resolve_name( + name: dns.name.Name | str, family: int = socket.AF_UNSPEC, **kwargs: Any +) -> dns.resolver.HostAnswers: + """Use a resolver to asynchronously query for address records. + + See :py:func:`dns.asyncresolver.Resolver.resolve_name` for more + information on the parameters. + """ + + return await get_default_resolver().resolve_name(name, family, **kwargs) + + +async def canonical_name(name: dns.name.Name | str) -> dns.name.Name: + """Determine the canonical name of *name*. + + See :py:func:`dns.resolver.Resolver.canonical_name` for more + information on the parameters and possible exceptions. + """ + + return await get_default_resolver().canonical_name(name) + + +async def try_ddr(timeout: float = 5.0) -> None: + """Try to update the default resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + See :py:func:`dns.resolver.Resolver.try_ddr` for more information. + """ + return await get_default_resolver().try_ddr(timeout) + + +async def zone_for_name( + name: dns.name.Name | str, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + tcp: bool = False, + resolver: Resolver | None = None, + backend: dns.asyncbackend.Backend | None = None, +) -> dns.name.Name: + """Find the name of the zone which contains the specified name. + + See :py:func:`dns.resolver.Resolver.zone_for_name` for more + information on the parameters and possible exceptions. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + while True: + try: + answer = await resolver.resolve( + name, dns.rdatatype.SOA, rdclass, tcp, backend=backend + ) + assert answer.rrset is not None + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (NXDOMAIN, NoAnswer): + pass + try: + name = name.parent() + except dns.name.NoParent: # pragma: no cover + raise NoRootSOA + + +async def make_resolver_at( + where: dns.name.Name | str, + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Resolver | None = None, +) -> Resolver: + """Make a stub resolver using the specified destination as the full resolver. + + *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the + full resolver. + + *port*, an ``int``, the port to use. If not specified, the default is 53. + + *family*, an ``int``, the address family to use. This parameter is used if + *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case + the first address returned by ``resolve_name()`` will be used, otherwise the + first address of the specified family will be used. + + *resolver*, a ``dns.asyncresolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames. If not specified, the default resolver will be used. + + Returns a ``dns.resolver.Resolver`` or raises an exception. + """ + if resolver is None: + resolver = get_default_resolver() + nameservers: List[str | dns.nameserver.Nameserver] = [] + if isinstance(where, str) and dns.inet.is_address(where): + nameservers.append(dns.nameserver.Do53Nameserver(where, port)) + else: + answers = await resolver.resolve_name(where, family) + for address in answers.addresses(): + nameservers.append(dns.nameserver.Do53Nameserver(address, port)) + res = Resolver(configure=False) + res.nameservers = nameservers + return res + + +async def resolve_at( + where: dns.name.Name | str, + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.A, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + tcp: bool = False, + source: str | None = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: float | None = None, + search: bool | None = None, + backend: dns.asyncbackend.Backend | None = None, + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Resolver | None = None, +) -> dns.resolver.Answer: + """Query nameservers to find the answer to the question. + + This is a convenience function that calls ``dns.asyncresolver.make_resolver_at()`` + to make a resolver, and then uses it to resolve the query. + + See ``dns.asyncresolver.Resolver.resolve`` for more information on the resolution + parameters, and ``dns.asyncresolver.make_resolver_at`` for information about the + resolver parameters *where*, *port*, *family*, and *resolver*. + + If making more than one query, it is more efficient to call + ``dns.asyncresolver.make_resolver_at()`` and then use that resolver for the queries + instead of calling ``resolve_at()`` multiple times. + """ + res = await make_resolver_at(where, port, family, resolver) + return await res.resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + backend, + ) diff --git a/venv/lib/python3.12/site-packages/dns/btree.py b/venv/lib/python3.12/site-packages/dns/btree.py new file mode 100644 index 0000000..12da9f5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/btree.py @@ -0,0 +1,850 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +""" +A BTree in the style of Cormen, Leiserson, and Rivest's "Algorithms" book, with +copy-on-write node updates, cursors, and optional space optimization for mostly-in-order +insertion. +""" + +from collections.abc import MutableMapping, MutableSet +from typing import Any, Callable, Generic, Optional, Tuple, TypeVar, cast + +DEFAULT_T = 127 + +KT = TypeVar("KT") # the type of a key in Element + + +class Element(Generic[KT]): + """All items stored in the BTree are Elements.""" + + def key(self) -> KT: + """The key for this element; the returned type must implement comparison.""" + raise NotImplementedError # pragma: no cover + + +ET = TypeVar("ET", bound=Element) # the type of a value in a _KV + + +def _MIN(t: int) -> int: + """The minimum number of keys in a non-root node for a BTree with the specified + ``t`` + """ + return t - 1 + + +def _MAX(t: int) -> int: + """The maximum number of keys in node for a BTree with the specified ``t``""" + return 2 * t - 1 + + +class _Creator: + """A _Creator class instance is used as a unique id for the BTree which created + a node. + + We use a dedicated creator rather than just a BTree reference to avoid circularity + that would complicate GC. + """ + + def __str__(self): # pragma: no cover + return f"{id(self):x}" + + +class _Node(Generic[KT, ET]): + """A Node in the BTree. + + A Node (leaf or internal) of the BTree. + """ + + __slots__ = ["t", "creator", "is_leaf", "elts", "children"] + + def __init__(self, t: int, creator: _Creator, is_leaf: bool): + assert t >= 3 + self.t = t + self.creator = creator + self.is_leaf = is_leaf + self.elts: list[ET] = [] + self.children: list[_Node[KT, ET]] = [] + + def is_maximal(self) -> bool: + """Does this node have the maximal number of keys?""" + assert len(self.elts) <= _MAX(self.t) + return len(self.elts) == _MAX(self.t) + + def is_minimal(self) -> bool: + """Does this node have the minimal number of keys?""" + assert len(self.elts) >= _MIN(self.t) + return len(self.elts) == _MIN(self.t) + + def search_in_node(self, key: KT) -> tuple[int, bool]: + """Get the index of the ``Element`` matching ``key`` or the index of its + least successor. + + Returns a tuple of the index and an ``equal`` boolean that is ``True`` iff. + the key was found. + """ + l = len(self.elts) + if l > 0 and key > self.elts[l - 1].key(): + # This is optimizing near in-order insertion. + return l, False + l = 0 + i = len(self.elts) + r = i - 1 + equal = False + while l <= r: + m = (l + r) // 2 + k = self.elts[m].key() + if key == k: + i = m + equal = True + break + elif key < k: + i = m + r = m - 1 + else: + l = m + 1 + return i, equal + + def maybe_cow_child(self, index: int) -> "_Node[KT, ET]": + assert not self.is_leaf + child = self.children[index] + cloned = child.maybe_cow(self.creator) + if cloned: + self.children[index] = cloned + return cloned + else: + return child + + def _get_node(self, key: KT) -> Tuple[Optional["_Node[KT, ET]"], int]: + """Get the node associated with key and its index, doing + copy-on-write if we have to descend. + + Returns a tuple of the node and the index, or the tuple ``(None, 0)`` + if the key was not found. + """ + i, equal = self.search_in_node(key) + if equal: + return (self, i) + elif self.is_leaf: + return (None, 0) + else: + child = self.maybe_cow_child(i) + return child._get_node(key) + + def get(self, key: KT) -> ET | None: + """Get the element associated with *key* or return ``None``""" + i, equal = self.search_in_node(key) + if equal: + return self.elts[i] + elif self.is_leaf: + return None + else: + return self.children[i].get(key) + + def optimize_in_order_insertion(self, index: int) -> None: + """Try to minimize the number of Nodes in a BTree where the insertion + is done in-order or close to it, by stealing as much as we can from our + right sibling. + + If we don't do this, then an in-order insertion will produce a BTree + where most of the nodes are minimal. + """ + if index == 0: + return + left = self.children[index - 1] + if len(left.elts) == _MAX(self.t): + return + left = self.maybe_cow_child(index - 1) + while len(left.elts) < _MAX(self.t): + if not left.try_right_steal(self, index - 1): + break + + def insert_nonfull(self, element: ET, in_order: bool) -> ET | None: + assert not self.is_maximal() + while True: + key = element.key() + i, equal = self.search_in_node(key) + if equal: + # replace + old = self.elts[i] + self.elts[i] = element + return old + elif self.is_leaf: + self.elts.insert(i, element) + return None + else: + child = self.maybe_cow_child(i) + if child.is_maximal(): + self.adopt(*child.split()) + # Splitting might result in our target moving to us, so + # search again. + continue + oelt = child.insert_nonfull(element, in_order) + if in_order: + self.optimize_in_order_insertion(i) + return oelt + + def split(self) -> tuple["_Node[KT, ET]", ET, "_Node[KT, ET]"]: + """Split a maximal node into two minimal ones and a central element.""" + assert self.is_maximal() + right = self.__class__(self.t, self.creator, self.is_leaf) + right.elts = list(self.elts[_MIN(self.t) + 1 :]) + middle = self.elts[_MIN(self.t)] + self.elts = list(self.elts[: _MIN(self.t)]) + if not self.is_leaf: + right.children = list(self.children[_MIN(self.t) + 1 :]) + self.children = list(self.children[: _MIN(self.t) + 1]) + return self, middle, right + + def try_left_steal(self, parent: "_Node[KT, ET]", index: int) -> bool: + """Try to steal from this Node's left sibling for balancing purposes. + + Returns ``True`` if the theft was successful, or ``False`` if not. + """ + if index != 0: + left = parent.children[index - 1] + if not left.is_minimal(): + left = parent.maybe_cow_child(index - 1) + elt = parent.elts[index - 1] + parent.elts[index - 1] = left.elts.pop() + self.elts.insert(0, elt) + if not left.is_leaf: + assert not self.is_leaf + child = left.children.pop() + self.children.insert(0, child) + return True + return False + + def try_right_steal(self, parent: "_Node[KT, ET]", index: int) -> bool: + """Try to steal from this Node's right sibling for balancing purposes. + + Returns ``True`` if the theft was successful, or ``False`` if not. + """ + if index + 1 < len(parent.children): + right = parent.children[index + 1] + if not right.is_minimal(): + right = parent.maybe_cow_child(index + 1) + elt = parent.elts[index] + parent.elts[index] = right.elts.pop(0) + self.elts.append(elt) + if not right.is_leaf: + assert not self.is_leaf + child = right.children.pop(0) + self.children.append(child) + return True + return False + + def adopt(self, left: "_Node[KT, ET]", middle: ET, right: "_Node[KT, ET]") -> None: + """Adopt left, middle, and right into our Node (which must not be maximal, + and which must not be a leaf). In the case were we are not the new root, + then the left child must already be in the Node.""" + assert not self.is_maximal() + assert not self.is_leaf + key = middle.key() + i, equal = self.search_in_node(key) + assert not equal + self.elts.insert(i, middle) + if len(self.children) == 0: + # We are the new root + self.children = [left, right] + else: + assert self.children[i] == left + self.children.insert(i + 1, right) + + def merge(self, parent: "_Node[KT, ET]", index: int) -> None: + """Merge this node's parent and its right sibling into this node.""" + right = parent.children.pop(index + 1) + self.elts.append(parent.elts.pop(index)) + self.elts.extend(right.elts) + if not self.is_leaf: + self.children.extend(right.children) + + def minimum(self) -> ET: + """The least element in this subtree.""" + if self.is_leaf: + return self.elts[0] + else: + return self.children[0].minimum() + + def maximum(self) -> ET: + """The greatest element in this subtree.""" + if self.is_leaf: + return self.elts[-1] + else: + return self.children[-1].maximum() + + def balance(self, parent: "_Node[KT, ET]", index: int) -> None: + """This Node is minimal, and we want to make it non-minimal so we can delete. + We try to steal from our siblings, and if that doesn't work we will merge + with one of them.""" + assert not parent.is_leaf + if self.try_left_steal(parent, index): + return + if self.try_right_steal(parent, index): + return + # Stealing didn't work, so both siblings must be minimal. + if index == 0: + # We are the left-most node so merge with our right sibling. + self.merge(parent, index) + else: + # Have our left sibling merge with us. This lets us only have "merge right" + # code. + left = parent.maybe_cow_child(index - 1) + left.merge(parent, index - 1) + + def delete( + self, key: KT, parent: Optional["_Node[KT, ET]"], exact: ET | None + ) -> ET | None: + """Delete an element matching *key* if it exists. If *exact* is not ``None`` + then it must be an exact match with that element. The Node must not be + minimal unless it is the root.""" + assert parent is None or not self.is_minimal() + i, equal = self.search_in_node(key) + original_key = None + if equal: + # Note we use "is" here as we meant "exactly this object". + if exact is not None and self.elts[i] is not exact: + raise ValueError("exact delete did not match existing elt") + if self.is_leaf: + return self.elts.pop(i) + # Note we need to ensure exact is None going forward as we've + # already checked exactness and are about to change our target key + # to the least successor. + exact = None + original_key = key + least_successor = self.children[i + 1].minimum() + key = least_successor.key() + i = i + 1 + if self.is_leaf: + # No match + if exact is not None: + raise ValueError("exact delete had no match") + return None + # recursively delete in the appropriate child + child = self.maybe_cow_child(i) + if child.is_minimal(): + child.balance(self, i) + # Things may have moved. + i, equal = self.search_in_node(key) + assert not equal + child = self.children[i] + assert not child.is_minimal() + elt = child.delete(key, self, exact) + if original_key is not None: + node, i = self._get_node(original_key) + assert node is not None + assert elt is not None + oelt = node.elts[i] + node.elts[i] = elt + elt = oelt + return elt + + def visit_in_order(self, visit: Callable[[ET], None]) -> None: + """Call *visit* on all of the elements in order.""" + for i, elt in enumerate(self.elts): + if not self.is_leaf: + self.children[i].visit_in_order(visit) + visit(elt) + if not self.is_leaf: + self.children[-1].visit_in_order(visit) + + def _visit_preorder_by_node(self, visit: Callable[["_Node[KT, ET]"], None]) -> None: + """Visit nodes in preorder. This method is only used for testing.""" + visit(self) + if not self.is_leaf: + for child in self.children: + child._visit_preorder_by_node(visit) + + def maybe_cow(self, creator: _Creator) -> Optional["_Node[KT, ET]"]: + """Return a clone of this Node if it was not created by *creator*, or ``None`` + otherwise (i.e. copy for copy-on-write if we haven't already copied it).""" + if self.creator is not creator: + return self.clone(creator) + else: + return None + + def clone(self, creator: _Creator) -> "_Node[KT, ET]": + """Make a shallow-copy duplicate of this node.""" + cloned = self.__class__(self.t, creator, self.is_leaf) + cloned.elts.extend(self.elts) + if not self.is_leaf: + cloned.children.extend(self.children) + return cloned + + def __str__(self): # pragma: no cover + if not self.is_leaf: + children = " " + " ".join([f"{id(c):x}" for c in self.children]) + else: + children = "" + return f"{id(self):x} {self.creator} {self.elts}{children}" + + +class Cursor(Generic[KT, ET]): + """A seekable cursor for a BTree. + + If you are going to use a cursor on a mutable BTree, you should use it + in a ``with`` block so that any mutations of the BTree automatically park + the cursor. + """ + + def __init__(self, btree: "BTree[KT, ET]"): + self.btree = btree + self.current_node: _Node | None = None + # The current index is the element index within the current node, or + # if there is no current node then it is 0 on the left boundary and 1 + # on the right boundary. + self.current_index: int = 0 + self.recurse = False + self.increasing = True + self.parents: list[tuple[_Node, int]] = [] + self.parked = False + self.parking_key: KT | None = None + self.parking_key_read = False + + def _seek_least(self) -> None: + # seek to the least value in the subtree beneath the current index of the + # current node + assert self.current_node is not None + while not self.current_node.is_leaf: + self.parents.append((self.current_node, self.current_index)) + self.current_node = self.current_node.children[self.current_index] + assert self.current_node is not None + self.current_index = 0 + + def _seek_greatest(self) -> None: + # seek to the greatest value in the subtree beneath the current index of the + # current node + assert self.current_node is not None + while not self.current_node.is_leaf: + self.parents.append((self.current_node, self.current_index)) + self.current_node = self.current_node.children[self.current_index] + assert self.current_node is not None + self.current_index = len(self.current_node.elts) + + def park(self): + """Park the cursor. + + A cursor must be "parked" before mutating the BTree to avoid undefined behavior. + Cursors created in a ``with`` block register with their BTree and will park + automatically. Note that a parked cursor may not observe some changes made when + it is parked; for example a cursor being iterated with next() will not see items + inserted before its current position. + """ + if not self.parked: + self.parked = True + + def _maybe_unpark(self): + if self.parked: + if self.parking_key is not None: + # remember our increasing hint, as seeking might change it + increasing = self.increasing + if self.parking_key_read: + # We've already returned the parking key, so we want to be before it + # if decreasing and after it if increasing. + before = not self.increasing + else: + # We haven't returned the parking key, so we've parked right + # after seeking or are on a boundary. Either way, the before + # hint we want is the value of self.increasing. + before = self.increasing + self.seek(self.parking_key, before) + self.increasing = increasing # might have been altered by seek() + self.parked = False + self.parking_key = None + + def prev(self) -> ET | None: + """Get the previous element, or return None if on the left boundary.""" + self._maybe_unpark() + self.parking_key = None + if self.current_node is None: + # on a boundary + if self.current_index == 0: + # left boundary, there is no prev + return None + else: + assert self.current_index == 1 + # right boundary; seek to the actual boundary + # so we can do a prev() + self.current_node = self.btree.root + self.current_index = len(self.btree.root.elts) + self._seek_greatest() + while True: + if self.recurse: + if not self.increasing: + # We only want to recurse if we are continuing in the decreasing + # direction. + self._seek_greatest() + self.recurse = False + self.increasing = False + self.current_index -= 1 + if self.current_index >= 0: + elt = self.current_node.elts[self.current_index] + if not self.current_node.is_leaf: + self.recurse = True + self.parking_key = elt.key() + self.parking_key_read = True + return elt + else: + if len(self.parents) > 0: + self.current_node, self.current_index = self.parents.pop() + else: + self.current_node = None + self.current_index = 0 + return None + + def next(self) -> ET | None: + """Get the next element, or return None if on the right boundary.""" + self._maybe_unpark() + self.parking_key = None + if self.current_node is None: + # on a boundary + if self.current_index == 1: + # right boundary, there is no next + return None + else: + assert self.current_index == 0 + # left boundary; seek to the actual boundary + # so we can do a next() + self.current_node = self.btree.root + self.current_index = 0 + self._seek_least() + while True: + if self.recurse: + if self.increasing: + # We only want to recurse if we are continuing in the increasing + # direction. + self._seek_least() + self.recurse = False + self.increasing = True + if self.current_index < len(self.current_node.elts): + elt = self.current_node.elts[self.current_index] + self.current_index += 1 + if not self.current_node.is_leaf: + self.recurse = True + self.parking_key = elt.key() + self.parking_key_read = True + return elt + else: + if len(self.parents) > 0: + self.current_node, self.current_index = self.parents.pop() + else: + self.current_node = None + self.current_index = 1 + return None + + def _adjust_for_before(self, before: bool, i: int) -> None: + if before: + self.current_index = i + else: + self.current_index = i + 1 + + def seek(self, key: KT, before: bool = True) -> None: + """Seek to the specified key. + + If *before* is ``True`` (the default) then the cursor is positioned just + before *key* if it exists, or before its least successor if it doesn't. A + subsequent next() will retrieve this value. If *before* is ``False``, then + the cursor is positioned just after *key* if it exists, or its greatest + precessessor if it doesn't. A subsequent prev() will return this value. + """ + self.current_node = self.btree.root + assert self.current_node is not None + self.recurse = False + self.parents = [] + self.increasing = before + self.parked = False + self.parking_key = key + self.parking_key_read = False + while not self.current_node.is_leaf: + i, equal = self.current_node.search_in_node(key) + if equal: + self._adjust_for_before(before, i) + if before: + self._seek_greatest() + else: + self._seek_least() + return + self.parents.append((self.current_node, i)) + self.current_node = self.current_node.children[i] + assert self.current_node is not None + i, equal = self.current_node.search_in_node(key) + if equal: + self._adjust_for_before(before, i) + else: + self.current_index = i + + def seek_first(self) -> None: + """Seek to the left boundary (i.e. just before the least element). + + A subsequent next() will return the least element if the BTree isn't empty.""" + self.current_node = None + self.current_index = 0 + self.recurse = False + self.increasing = True + self.parents = [] + self.parked = False + self.parking_key = None + + def seek_last(self) -> None: + """Seek to the right boundary (i.e. just after the greatest element). + + A subsequent prev() will return the greatest element if the BTree isn't empty. + """ + self.current_node = None + self.current_index = 1 + self.recurse = False + self.increasing = False + self.parents = [] + self.parked = False + self.parking_key = None + + def __enter__(self): + self.btree.register_cursor(self) + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.btree.deregister_cursor(self) + return False + + +class Immutable(Exception): + """The BTree is immutable.""" + + +class BTree(Generic[KT, ET]): + """An in-memory BTree with copy-on-write and cursors.""" + + def __init__(self, *, t: int = DEFAULT_T, original: Optional["BTree"] = None): + """Create a BTree. + + If *original* is not ``None``, then the BTree is shallow-cloned from + *original* using copy-on-write. Otherwise a new BTree with the specified + *t* value is created. + + The BTree is not thread-safe. + """ + # We don't use a reference to ourselves as a creator as we don't want + # to prevent GC of old btrees. + self.creator = _Creator() + self._immutable = False + self.t: int + self.root: _Node + self.size: int + self.cursors: set[Cursor] = set() + if original is not None: + if not original._immutable: + raise ValueError("original BTree is not immutable") + self.t = original.t + self.root = original.root + self.size = original.size + else: + if t < 3: + raise ValueError("t must be >= 3") + self.t = t + self.root = _Node(self.t, self.creator, True) + self.size = 0 + + def make_immutable(self): + """Make the BTree immutable. + + Attempts to alter the BTree after making it immutable will raise an + Immutable exception. This operation cannot be undone. + """ + if not self._immutable: + self._immutable = True + + def _check_mutable_and_park(self) -> None: + if self._immutable: + raise Immutable + for cursor in self.cursors: + cursor.park() + + # Note that we don't use insert() and delete() but rather insert_element() and + # delete_key() so that BTreeDict can be a proper MutableMapping and supply the + # rest of the standard mapping API. + + def insert_element(self, elt: ET, in_order: bool = False) -> ET | None: + """Insert the element into the BTree. + + If *in_order* is ``True``, then extra work will be done to make left siblings + full, which optimizes storage space when the the elements are inserted in-order + or close to it. + + Returns the previously existing element at the element's key or ``None``. + """ + self._check_mutable_and_park() + cloned = self.root.maybe_cow(self.creator) + if cloned: + self.root = cloned + if self.root.is_maximal(): + old_root = self.root + self.root = _Node(self.t, self.creator, False) + self.root.adopt(*old_root.split()) + oelt = self.root.insert_nonfull(elt, in_order) + if oelt is None: + # We did not replace, so something was added. + self.size += 1 + return oelt + + def get_element(self, key: KT) -> ET | None: + """Get the element matching *key* from the BTree, or return ``None`` if it + does not exist. + """ + return self.root.get(key) + + def _delete(self, key: KT, exact: ET | None) -> ET | None: + self._check_mutable_and_park() + cloned = self.root.maybe_cow(self.creator) + if cloned: + self.root = cloned + elt = self.root.delete(key, None, exact) + if elt is not None: + # We deleted something + self.size -= 1 + if len(self.root.elts) == 0: + # The root is now empty. If there is a child, then collapse this root + # level and make the child the new root. + if not self.root.is_leaf: + assert len(self.root.children) == 1 + self.root = self.root.children[0] + return elt + + def delete_key(self, key: KT) -> ET | None: + """Delete the element matching *key* from the BTree. + + Returns the matching element or ``None`` if it does not exist. + """ + return self._delete(key, None) + + def delete_exact(self, element: ET) -> ET | None: + """Delete *element* from the BTree. + + Returns the matching element or ``None`` if it was not in the BTree. + """ + delt = self._delete(element.key(), element) + assert delt is element + return delt + + def __len__(self): + return self.size + + def visit_in_order(self, visit: Callable[[ET], None]) -> None: + """Call *visit*(element) on all elements in the tree in sorted order.""" + self.root.visit_in_order(visit) + + def _visit_preorder_by_node(self, visit: Callable[[_Node], None]) -> None: + self.root._visit_preorder_by_node(visit) + + def cursor(self) -> Cursor[KT, ET]: + """Create a cursor.""" + return Cursor(self) + + def register_cursor(self, cursor: Cursor) -> None: + """Register a cursor for the automatic parking service.""" + self.cursors.add(cursor) + + def deregister_cursor(self, cursor: Cursor) -> None: + """Deregister a cursor from the automatic parking service.""" + self.cursors.discard(cursor) + + def __copy__(self): + return self.__class__(original=self) + + def __iter__(self): + with self.cursor() as cursor: + while True: + elt = cursor.next() + if elt is None: + break + yield elt.key() + + +VT = TypeVar("VT") # the type of a value in a BTreeDict + + +class KV(Element, Generic[KT, VT]): + """The BTree element type used in a ``BTreeDict``.""" + + def __init__(self, key: KT, value: VT): + self._key = key + self._value = value + + def key(self) -> KT: + return self._key + + def value(self) -> VT: + return self._value + + def __str__(self): # pragma: no cover + return f"KV({self._key}, {self._value})" + + def __repr__(self): # pragma: no cover + return f"KV({self._key}, {self._value})" + + +class BTreeDict(Generic[KT, VT], BTree[KT, KV[KT, VT]], MutableMapping[KT, VT]): + """A MutableMapping implemented with a BTree. + + Unlike a normal Python dict, the BTreeDict may be mutated while iterating. + """ + + def __init__( + self, + *, + t: int = DEFAULT_T, + original: BTree | None = None, + in_order: bool = False, + ): + super().__init__(t=t, original=original) + self.in_order = in_order + + def __getitem__(self, key: KT) -> VT: + elt = self.get_element(key) + if elt is None: + raise KeyError + else: + return cast(KV, elt).value() + + def __setitem__(self, key: KT, value: VT) -> None: + elt = KV(key, value) + self.insert_element(elt, self.in_order) + + def __delitem__(self, key: KT) -> None: + if self.delete_key(key) is None: + raise KeyError + + +class Member(Element, Generic[KT]): + """The BTree element type used in a ``BTreeSet``.""" + + def __init__(self, key: KT): + self._key = key + + def key(self) -> KT: + return self._key + + +class BTreeSet(BTree, Generic[KT], MutableSet[KT]): + """A MutableSet implemented with a BTree. + + Unlike a normal Python set, the BTreeSet may be mutated while iterating. + """ + + def __init__( + self, + *, + t: int = DEFAULT_T, + original: BTree | None = None, + in_order: bool = False, + ): + super().__init__(t=t, original=original) + self.in_order = in_order + + def __contains__(self, key: Any) -> bool: + return self.get_element(key) is not None + + def add(self, value: KT) -> None: + elt = Member(value) + self.insert_element(elt, self.in_order) + + def discard(self, value: KT) -> None: + self.delete_key(value) diff --git a/venv/lib/python3.12/site-packages/dns/btreezone.py b/venv/lib/python3.12/site-packages/dns/btreezone.py new file mode 100644 index 0000000..27b5bb6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/btreezone.py @@ -0,0 +1,367 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# A derivative of a dnspython VersionedZone and related classes, using a BTreeDict and +# a separate per-version delegation index. These additions let us +# +# 1) Do efficient CoW versioning (useful for future online updates). +# 2) Maintain sort order +# 3) Allow delegations to be found easily +# 4) Handle glue +# 5) Add Node flags ORIGIN, DELEGATION, and GLUE whenever relevant. The ORIGIN +# flag is set at the origin node, the DELEGATION FLAG is set at delegation +# points, and the GLUE flag is set on nodes beneath delegation points. + +import enum +from dataclasses import dataclass +from typing import Callable, MutableMapping, Tuple, cast + +import dns.btree +import dns.immutable +import dns.name +import dns.node +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.versioned +import dns.zone + + +class NodeFlags(enum.IntFlag): + ORIGIN = 0x01 + DELEGATION = 0x02 + GLUE = 0x04 + + +class Node(dns.node.Node): + __slots__ = ["flags", "id"] + + def __init__(self, flags: NodeFlags | None = None): + super().__init__() + if flags is None: + # We allow optional flags rather than a default + # as pyright doesn't like assigning a literal 0 + # to flags. + flags = NodeFlags(0) + self.flags = flags + self.id = 0 + + def is_delegation(self): + return (self.flags & NodeFlags.DELEGATION) != 0 + + def is_glue(self): + return (self.flags & NodeFlags.GLUE) != 0 + + def is_origin(self): + return (self.flags & NodeFlags.ORIGIN) != 0 + + def is_origin_or_glue(self): + return (self.flags & (NodeFlags.ORIGIN | NodeFlags.GLUE)) != 0 + + +@dns.immutable.immutable +class ImmutableNode(Node): + def __init__(self, node: Node): + super().__init__() + self.id = node.id + self.rdatasets = tuple( # type: ignore + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + self.flags = node.flags + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset | None: + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + raise TypeError("immutable") + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + raise TypeError("immutable") + + def is_immutable(self) -> bool: + return True + + +class Delegations(dns.btree.BTreeSet[dns.name.Name]): + def get_delegation(self, name: dns.name.Name) -> Tuple[dns.name.Name | None, bool]: + """Get the delegation applicable to *name*, if it exists. + + If there delegation, then return a tuple consisting of the name of + the delegation point, and a boolean which is `True` if the name is a proper + subdomain of the delegation point, and `False` if it is equal to the delegation + point. + """ + cursor = self.cursor() + cursor.seek(name, before=False) + prev = cursor.prev() + if prev is None: + return None, False + cut = prev.key() + reln, _, _ = name.fullcompare(cut) + is_subdomain = reln == dns.name.NameRelation.SUBDOMAIN + if is_subdomain or reln == dns.name.NameRelation.EQUAL: + return cut, is_subdomain + else: + return None, False + + def is_glue(self, name: dns.name.Name) -> bool: + """Is *name* glue, i.e. is it beneath a delegation?""" + cursor = self.cursor() + cursor.seek(name, before=False) + cut, is_subdomain = self.get_delegation(name) + if cut is None: + return False + return is_subdomain + + +class WritableVersion(dns.zone.WritableVersion): + def __init__(self, zone: dns.zone.Zone, replacement: bool = False): + super().__init__(zone, True) + if not replacement: + assert isinstance(zone, dns.versioned.Zone) + version = zone._versions[-1] + self.nodes: dns.btree.BTreeDict[dns.name.Name, Node] = dns.btree.BTreeDict( + original=version.nodes # type: ignore + ) + self.delegations = Delegations(original=version.delegations) # type: ignore + else: + self.delegations = Delegations() + + def _is_origin(self, name: dns.name.Name) -> bool: + # Assumes name has already been validated (and thus adjusted to the right + # relativity too) + if self.zone.relativize: + return name == dns.name.empty + else: + return name == self.zone.origin + + def _maybe_cow_with_name( + self, name: dns.name.Name + ) -> Tuple[dns.node.Node, dns.name.Name]: + (node, name) = super()._maybe_cow_with_name(name) + node = cast(Node, node) + if self._is_origin(name): + node.flags |= NodeFlags.ORIGIN + elif self.delegations.is_glue(name): + node.flags |= NodeFlags.GLUE + return (node, name) + + def update_glue_flag(self, name: dns.name.Name, is_glue: bool) -> None: + cursor = self.nodes.cursor() # type: ignore + cursor.seek(name, False) + updates = [] + while True: + elt = cursor.next() + if elt is None: + break + ename = elt.key() + if not ename.is_subdomain(name): + break + node = cast(dns.node.Node, elt.value()) + if ename not in self.changed: + new_node = self.zone.node_factory() + new_node.id = self.id # type: ignore + new_node.rdatasets.extend(node.rdatasets) + self.changed.add(ename) + node = new_node + assert isinstance(node, Node) + if is_glue: + node.flags |= NodeFlags.GLUE + else: + node.flags &= ~NodeFlags.GLUE + # We don't update node here as any insertion could disturb the + # btree and invalidate our cursor. We could use the cursor in a + # with block and avoid this, but it would do a lot of parking and + # unparking so the deferred update mode may still be better. + updates.append((ename, node)) + for ename, node in updates: + self.nodes[ename] = node + + def delete_node(self, name: dns.name.Name) -> None: + name = self._validate_name(name) + node = self.nodes.get(name) + if node is not None: + if node.is_delegation(): # type: ignore + self.delegations.discard(name) + self.update_glue_flag(name, False) + del self.nodes[name] + self.changed.add(name) + + def put_rdataset( + self, name: dns.name.Name, rdataset: dns.rdataset.Rdataset + ) -> None: + (node, name) = self._maybe_cow_with_name(name) + if ( + rdataset.rdtype == dns.rdatatype.NS and not node.is_origin_or_glue() # type: ignore + ): + node.flags |= NodeFlags.DELEGATION # type: ignore + if name not in self.delegations: + self.delegations.add(name) + self.update_glue_flag(name, True) + node.replace_rdataset(rdataset) + + def delete_rdataset( + self, + name: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> None: + (node, name) = self._maybe_cow_with_name(name) + if rdtype == dns.rdatatype.NS and name in self.delegations: # type: ignore + node.flags &= ~NodeFlags.DELEGATION # type: ignore + self.delegations.discard(name) # type: ignore + self.update_glue_flag(name, False) + node.delete_rdataset(self.zone.rdclass, rdtype, covers) + if len(node) == 0: + del self.nodes[name] + + +@dataclass(frozen=True) +class Bounds: + name: dns.name.Name + left: dns.name.Name + right: dns.name.Name | None + closest_encloser: dns.name.Name + is_equal: bool + is_delegation: bool + + def __str__(self): + if self.is_equal: + op = "=" + else: + op = "<" + if self.is_delegation: + zonecut = " zonecut" + else: + zonecut = "" + return ( + f"{self.left} {op} {self.name} < {self.right}{zonecut}; " + f"{self.closest_encloser}" + ) + + +@dns.immutable.immutable +class ImmutableVersion(dns.zone.Version): + def __init__(self, version: dns.zone.Version): + if not isinstance(version, WritableVersion): + raise ValueError( + "a dns.btreezone.ImmutableVersion requires a " + "dns.btreezone.WritableVersion" + ) + super().__init__(version.zone, True) + self.id = version.id + self.origin = version.origin + for name in version.changed: + node = version.nodes.get(name) + if node: + version.nodes[name] = ImmutableNode(node) + # the cast below is for mypy + self.nodes = cast(MutableMapping[dns.name.Name, dns.node.Node], version.nodes) + self.nodes.make_immutable() # type: ignore + self.delegations = version.delegations + self.delegations.make_immutable() + + def bounds(self, name: dns.name.Name | str) -> Bounds: + """Return the 'bounds' of *name* in its zone. + + The bounds information is useful when making an authoritative response, as + it can be used to determine whether the query name is at or beneath a delegation + point. The other data in the ``Bounds`` object is useful for making on-the-fly + DNSSEC signatures. + + The left bound of *name* is *name* itself if it is in the zone, or the greatest + predecessor which is in the zone. + + The right bound of *name* is the least successor of *name*, or ``None`` if + no name in the zone is greater than *name*. + + The closest encloser of *name* is *name* itself, if *name* is in the zone; + otherwise it is the name with the largest number of labels in common with + *name* that is in the zone, either explicitly or by the implied existence + of empty non-terminals. + + The bounds *is_equal* field is ``True`` if and only if *name* is equal to + its left bound. + + The bounds *is_delegation* field is ``True`` if and only if the left bound is a + delegation point. + """ + assert self.origin is not None + # validate the origin because we may need to relativize + origin = self.zone._validate_name(self.origin) + name = self.zone._validate_name(name) + cut, _ = self.delegations.get_delegation(name) + if cut is not None: + target = cut + is_delegation = True + else: + target = name + is_delegation = False + c = cast(dns.btree.BTreeDict, self.nodes).cursor() + c.seek(target, False) + left = c.prev() + assert left is not None + c.next() # skip over left + while True: + right = c.next() + if right is None or not right.value().is_glue(): + break + left_comparison = left.key().fullcompare(name) + if right is not None: + right_key = right.key() + right_comparison = right_key.fullcompare(name) + else: + right_comparison = ( + dns.name.NAMERELN_COMMONANCESTOR, + -1, + len(origin), + ) + right_key = None + closest_encloser = dns.name.Name( + name[-max(left_comparison[2], right_comparison[2]) :] + ) + return Bounds( + name, + left.key(), + right_key, + closest_encloser, + left_comparison[0] == dns.name.NameRelation.EQUAL, + is_delegation, + ) + + +class Zone(dns.versioned.Zone): + node_factory: Callable[[], dns.node.Node] = Node + map_factory: Callable[[], MutableMapping[dns.name.Name, dns.node.Node]] = cast( + Callable[[], MutableMapping[dns.name.Name, dns.node.Node]], + dns.btree.BTreeDict[dns.name.Name, Node], + ) + writable_version_factory: ( + Callable[[dns.zone.Zone, bool], dns.zone.Version] | None + ) = WritableVersion + immutable_version_factory: Callable[[dns.zone.Version], dns.zone.Version] | None = ( + ImmutableVersion + ) diff --git a/venv/lib/python3.12/site-packages/dns/dnssec.py b/venv/lib/python3.12/site-packages/dns/dnssec.py new file mode 100644 index 0000000..0b2aa70 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssec.py @@ -0,0 +1,1242 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNSSEC-related functions and constants.""" + +# pylint: disable=unused-import + +import base64 +import contextlib +import functools +import hashlib +import struct +import time +from datetime import datetime +from typing import Callable, Dict, List, Set, Tuple, Union, cast + +import dns._features +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset +import dns.transaction +import dns.zone +from dns.dnssectypes import Algorithm, DSDigest, NSEC3Hash +from dns.exception import AlgorithmKeyMismatch as AlgorithmKeyMismatch +from dns.exception import DeniedByPolicy, UnsupportedAlgorithm, ValidationFailure +from dns.rdtypes.ANY.CDNSKEY import CDNSKEY +from dns.rdtypes.ANY.CDS import CDS +from dns.rdtypes.ANY.DNSKEY import DNSKEY +from dns.rdtypes.ANY.DS import DS +from dns.rdtypes.ANY.NSEC import NSEC, Bitmap +from dns.rdtypes.ANY.NSEC3PARAM import NSEC3PARAM +from dns.rdtypes.ANY.RRSIG import RRSIG, sigtime_to_posixtime +from dns.rdtypes.dnskeybase import Flag + +PublicKey = Union[ + "GenericPublicKey", + "rsa.RSAPublicKey", + "ec.EllipticCurvePublicKey", + "ed25519.Ed25519PublicKey", + "ed448.Ed448PublicKey", +] + +PrivateKey = Union[ + "GenericPrivateKey", + "rsa.RSAPrivateKey", + "ec.EllipticCurvePrivateKey", + "ed25519.Ed25519PrivateKey", + "ed448.Ed448PrivateKey", +] + +RRsetSigner = Callable[[dns.transaction.Transaction, dns.rrset.RRset], None] + + +def algorithm_from_text(text: str) -> Algorithm: + """Convert text into a DNSSEC algorithm value. + + *text*, a ``str``, the text to convert to into an algorithm value. + + Returns an ``int``. + """ + + return Algorithm.from_text(text) + + +def algorithm_to_text(value: Algorithm | int) -> str: + """Convert a DNSSEC algorithm value to text + + *value*, a ``dns.dnssec.Algorithm``. + + Returns a ``str``, the name of a DNSSEC algorithm. + """ + + return Algorithm.to_text(value) + + +def to_timestamp(value: datetime | str | float | int) -> int: + """Convert various format to a timestamp""" + if isinstance(value, datetime): + return int(value.timestamp()) + elif isinstance(value, str): + return sigtime_to_posixtime(value) + elif isinstance(value, float): + return int(value) + elif isinstance(value, int): + return value + else: + raise TypeError("Unsupported timestamp type") + + +def key_id(key: DNSKEY | CDNSKEY) -> int: + """Return the key id (a 16-bit number) for the specified key. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` + + Returns an ``int`` between 0 and 65535 + """ + + rdata = key.to_wire() + assert rdata is not None # for mypy + if key.algorithm == Algorithm.RSAMD5: + return (rdata[-3] << 8) + rdata[-2] + else: + total = 0 + for i in range(len(rdata) // 2): + total += (rdata[2 * i] << 8) + rdata[2 * i + 1] + if len(rdata) % 2 != 0: + total += rdata[len(rdata) - 1] << 8 + total += (total >> 16) & 0xFFFF + return total & 0xFFFF + + +class Policy: + def __init__(self): + pass + + def ok_to_sign(self, key: DNSKEY) -> bool: # pragma: no cover + return False + + def ok_to_validate(self, key: DNSKEY) -> bool: # pragma: no cover + return False + + def ok_to_create_ds(self, algorithm: DSDigest) -> bool: # pragma: no cover + return False + + def ok_to_validate_ds(self, algorithm: DSDigest) -> bool: # pragma: no cover + return False + + +class SimpleDeny(Policy): + def __init__(self, deny_sign, deny_validate, deny_create_ds, deny_validate_ds): + super().__init__() + self._deny_sign = deny_sign + self._deny_validate = deny_validate + self._deny_create_ds = deny_create_ds + self._deny_validate_ds = deny_validate_ds + + def ok_to_sign(self, key: DNSKEY) -> bool: + return key.algorithm not in self._deny_sign + + def ok_to_validate(self, key: DNSKEY) -> bool: + return key.algorithm not in self._deny_validate + + def ok_to_create_ds(self, algorithm: DSDigest) -> bool: + return algorithm not in self._deny_create_ds + + def ok_to_validate_ds(self, algorithm: DSDigest) -> bool: + return algorithm not in self._deny_validate_ds + + +rfc_8624_policy = SimpleDeny( + {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1, Algorithm.ECCGOST}, + {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1}, + {DSDigest.NULL, DSDigest.SHA1, DSDigest.GOST}, + {DSDigest.NULL}, +) + +allow_all_policy = SimpleDeny(set(), set(), set(), set()) + + +default_policy = rfc_8624_policy + + +def make_ds( + name: dns.name.Name | str, + key: dns.rdata.Rdata, + algorithm: DSDigest | str, + origin: dns.name.Name | None = None, + policy: Policy | None = None, + validating: bool = False, +) -> DS: + """Create a DS record for a DNSSEC key. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, + the key the DS is about. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, + then it will be made absolute using the specified origin. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + *validating*, a ``bool``. If ``True``, then policy is checked in + validating mode, i.e. "Is it ok to validate using this digest algorithm?". + Otherwise the policy is checked in creating mode, i.e. "Is it ok to create a DS with + this digest algorithm?". + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Raises ``DeniedByPolicy`` if the algorithm is denied by policy. + + Returns a ``dns.rdtypes.ANY.DS.DS`` + """ + + if policy is None: + policy = default_policy + try: + if isinstance(algorithm, str): + algorithm = DSDigest[algorithm.upper()] + except Exception: + raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') + if validating: + check = policy.ok_to_validate_ds + else: + check = policy.ok_to_create_ds + if not check(algorithm): + raise DeniedByPolicy + if not isinstance(key, DNSKEY | CDNSKEY): + raise ValueError("key is not a DNSKEY | CDNSKEY") + if algorithm == DSDigest.SHA1: + dshash = hashlib.sha1() + elif algorithm == DSDigest.SHA256: + dshash = hashlib.sha256() + elif algorithm == DSDigest.SHA384: + dshash = hashlib.sha384() + else: + raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') + + if isinstance(name, str): + name = dns.name.from_text(name, origin) + wire = name.canonicalize().to_wire() + kwire = key.to_wire(origin=origin) + assert wire is not None and kwire is not None # for mypy + dshash.update(wire) + dshash.update(kwire) + digest = dshash.digest() + + dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, algorithm) + digest + ds = dns.rdata.from_wire( + dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0, len(dsrdata) + ) + return cast(DS, ds) + + +def make_cds( + name: dns.name.Name | str, + key: dns.rdata.Rdata, + algorithm: DSDigest | str, + origin: dns.name.Name | None = None, +) -> CDS: + """Create a CDS record for a DNSSEC key. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, + the key the DS is about. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.rdtypes.ANY.DS.CDS`` + """ + + ds = make_ds(name, key, algorithm, origin) + return CDS( + rdclass=ds.rdclass, + rdtype=dns.rdatatype.CDS, + key_tag=ds.key_tag, + algorithm=ds.algorithm, + digest_type=ds.digest_type, + digest=ds.digest, + ) + + +def _find_candidate_keys( + keys: Dict[dns.name.Name, dns.rdataset.Rdataset | dns.node.Node], rrsig: RRSIG +) -> List[DNSKEY] | None: + value = keys.get(rrsig.signer) + if isinstance(value, dns.node.Node): + rdataset = value.get_rdataset(dns.rdataclass.IN, dns.rdatatype.DNSKEY) + else: + rdataset = value + if rdataset is None: + return None + return [ + cast(DNSKEY, rd) + for rd in rdataset + if rd.algorithm == rrsig.algorithm + and key_id(rd) == rrsig.key_tag + and (rd.flags & Flag.ZONE) == Flag.ZONE # RFC 4034 2.1.1 + and rd.protocol == 3 # RFC 4034 2.1.2 + ] + + +def _get_rrname_rdataset( + rrset: dns.rrset.RRset | Tuple[dns.name.Name, dns.rdataset.Rdataset], +) -> Tuple[dns.name.Name, dns.rdataset.Rdataset]: + if isinstance(rrset, tuple): + return rrset[0], rrset[1] + else: + return rrset.name, rrset + + +def _validate_signature(sig: bytes, data: bytes, key: DNSKEY) -> None: + # pylint: disable=possibly-used-before-assignment + public_cls = get_algorithm_cls_from_dnskey(key).public_cls + try: + public_key = public_cls.from_dnskey(key) + except ValueError: + raise ValidationFailure("invalid public key") + public_key.verify(sig, data) + + +def _validate_rrsig( + rrset: dns.rrset.RRset | Tuple[dns.name.Name, dns.rdataset.Rdataset], + rrsig: RRSIG, + keys: Dict[dns.name.Name, dns.node.Node | dns.rdataset.Rdataset], + origin: dns.name.Name | None = None, + now: float | None = None, + policy: Policy | None = None, +) -> None: + """Validate an RRset against a single signature rdata, throwing an + exception if validation is not successful. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsig*, a ``dns.rdata.Rdata``, the signature to validate. + + *keys*, the key dictionary, used to find the DNSKEY associated + with a given name. The dictionary is keyed by a + ``dns.name.Name``, and has ``dns.node.Node`` or + ``dns.rdataset.Rdataset`` values. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative + names. + + *now*, a ``float`` or ``None``, the time, in seconds since the epoch, to + use as the current time when validating. If ``None``, the actual current + time is used. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + Raises ``ValidationFailure`` if the signature is expired, not yet valid, + the public key is invalid, the algorithm is unknown, the verification + fails, etc. + + Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by + dnspython but not implemented. + """ + + if policy is None: + policy = default_policy + + candidate_keys = _find_candidate_keys(keys, rrsig) + if candidate_keys is None: + raise ValidationFailure("unknown key") + + if now is None: + now = time.time() + if rrsig.expiration < now: + raise ValidationFailure("expired") + if rrsig.inception > now: + raise ValidationFailure("not yet valid") + + data = _make_rrsig_signature_data(rrset, rrsig, origin) + + # pylint: disable=possibly-used-before-assignment + for candidate_key in candidate_keys: + if not policy.ok_to_validate(candidate_key): + continue + try: + _validate_signature(rrsig.signature, data, candidate_key) + return + except (InvalidSignature, ValidationFailure): + # this happens on an individual validation failure + continue + # nothing verified -- raise failure: + raise ValidationFailure("verify failure") + + +def _validate( + rrset: dns.rrset.RRset | Tuple[dns.name.Name, dns.rdataset.Rdataset], + rrsigset: dns.rrset.RRset | Tuple[dns.name.Name, dns.rdataset.Rdataset], + keys: Dict[dns.name.Name, dns.node.Node | dns.rdataset.Rdataset], + origin: dns.name.Name | None = None, + now: float | None = None, + policy: Policy | None = None, +) -> None: + """Validate an RRset against a signature RRset, throwing an exception + if none of the signatures validate. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsigset*, the signature RRset. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *keys*, the key dictionary, used to find the DNSKEY associated + with a given name. The dictionary is keyed by a + ``dns.name.Name``, and has ``dns.node.Node`` or + ``dns.rdataset.Rdataset`` values. + + *origin*, a ``dns.name.Name``, the origin to use for relative names; + defaults to None. + + *now*, an ``int`` or ``None``, the time, in seconds since the epoch, to + use as the current time when validating. If ``None``, the actual current + time is used. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + Raises ``ValidationFailure`` if the signature is expired, not yet valid, + the public key is invalid, the algorithm is unknown, the verification + fails, etc. + """ + + if policy is None: + policy = default_policy + + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root) + + if isinstance(rrset, tuple): + rrname = rrset[0] + else: + rrname = rrset.name + + if isinstance(rrsigset, tuple): + rrsigname = rrsigset[0] + rrsigrdataset = rrsigset[1] + else: + rrsigname = rrsigset.name + rrsigrdataset = rrsigset + + rrname = rrname.choose_relativity(origin) + rrsigname = rrsigname.choose_relativity(origin) + if rrname != rrsigname: + raise ValidationFailure("owner names do not match") + + for rrsig in rrsigrdataset: + if not isinstance(rrsig, RRSIG): + raise ValidationFailure("expected an RRSIG") + try: + _validate_rrsig(rrset, rrsig, keys, origin, now, policy) + return + except (ValidationFailure, UnsupportedAlgorithm): + pass + raise ValidationFailure("no RRSIGs validated") + + +def _sign( + rrset: dns.rrset.RRset | Tuple[dns.name.Name, dns.rdataset.Rdataset], + private_key: PrivateKey, + signer: dns.name.Name, + dnskey: DNSKEY, + inception: datetime | str | int | float | None = None, + expiration: datetime | str | int | float | None = None, + lifetime: int | None = None, + verify: bool = False, + policy: Policy | None = None, + origin: dns.name.Name | None = None, + deterministic: bool = True, +) -> RRSIG: + """Sign RRset using private key. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *private_key*, the private key to use for signing, a + ``cryptography.hazmat.primitives.asymmetric`` private key class applicable + for DNSSEC. + + *signer*, a ``dns.name.Name``, the Signer's name. + + *dnskey*, a ``DNSKEY`` matching ``private_key``. + + *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the + signature inception time. If ``None``, the current time is used. If a ``str``, the + format is "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX + epoch in text form; this is the same the RRSIG rdata's text form. + Values of type `int` or `float` are interpreted as seconds since the UNIX epoch. + + *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + expiration time. If ``None``, the expiration time will be the inception time plus + the value of the *lifetime* parameter. See the description of *inception* above + for how the various parameter types are interpreted. + + *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This + parameter is only meaningful if *expiration* is ``None``. + + *verify*, a ``bool``. If set to ``True``, the signer will verify signatures + after they are created; the default is ``False``. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + *origin*, a ``dns.name.Name`` or ``None``. If ``None``, the default, then all + names in the rrset (including its owner name) must be absolute; otherwise the + specified origin will be used to make names absolute when signing. + + *deterministic*, a ``bool``. If ``True``, the default, use deterministic + (reproducible) signatures when supported by the algorithm used for signing. + Currently, this only affects ECDSA. + + Raises ``DeniedByPolicy`` if the signature is denied by policy. + """ + + if policy is None: + policy = default_policy + if not policy.ok_to_sign(dnskey): + raise DeniedByPolicy + + if isinstance(rrset, tuple): + rdclass = rrset[1].rdclass + rdtype = rrset[1].rdtype + rrname = rrset[0] + original_ttl = rrset[1].ttl + else: + rdclass = rrset.rdclass + rdtype = rrset.rdtype + rrname = rrset.name + original_ttl = rrset.ttl + + if inception is not None: + rrsig_inception = to_timestamp(inception) + else: + rrsig_inception = int(time.time()) + + if expiration is not None: + rrsig_expiration = to_timestamp(expiration) + elif lifetime is not None: + rrsig_expiration = rrsig_inception + lifetime + else: + raise ValueError("expiration or lifetime must be specified") + + # Derelativize now because we need a correct labels length for the + # rrsig_template. + if origin is not None: + rrname = rrname.derelativize(origin) + labels = len(rrname) - 1 + + # Adjust labels appropriately for wildcards. + if rrname.is_wild(): + labels -= 1 + + rrsig_template = RRSIG( + rdclass=rdclass, + rdtype=dns.rdatatype.RRSIG, + type_covered=rdtype, + algorithm=dnskey.algorithm, + labels=labels, + original_ttl=original_ttl, + expiration=rrsig_expiration, + inception=rrsig_inception, + key_tag=key_id(dnskey), + signer=signer, + signature=b"", + ) + + data = _make_rrsig_signature_data(rrset, rrsig_template, origin) + + # pylint: disable=possibly-used-before-assignment + if isinstance(private_key, GenericPrivateKey): + signing_key = private_key + else: + try: + private_cls = get_algorithm_cls_from_dnskey(dnskey) + signing_key = private_cls(key=private_key) + except UnsupportedAlgorithm: + raise TypeError("Unsupported key algorithm") + + signature = signing_key.sign(data, verify, deterministic) + + return cast(RRSIG, rrsig_template.replace(signature=signature)) + + +def _make_rrsig_signature_data( + rrset: dns.rrset.RRset | Tuple[dns.name.Name, dns.rdataset.Rdataset], + rrsig: RRSIG, + origin: dns.name.Name | None = None, +) -> bytes: + """Create signature rdata. + + *rrset*, the RRset to sign/validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsig*, a ``dns.rdata.Rdata``, the signature to validate, or the + signature template used when signing. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative + names. + + Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by + dnspython but not implemented. + """ + + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root) + + signer = rrsig.signer + if not signer.is_absolute(): + if origin is None: + raise ValidationFailure("relative RR name without an origin specified") + signer = signer.derelativize(origin) + + # For convenience, allow the rrset to be specified as a (name, + # rdataset) tuple as well as a proper rrset + rrname, rdataset = _get_rrname_rdataset(rrset) + + data = b"" + wire = rrsig.to_wire(origin=signer) + assert wire is not None # for mypy + data += wire[:18] + data += rrsig.signer.to_digestable(signer) + + # Derelativize the name before considering labels. + if not rrname.is_absolute(): + if origin is None: + raise ValidationFailure("relative RR name without an origin specified") + rrname = rrname.derelativize(origin) + + name_len = len(rrname) + if rrname.is_wild() and rrsig.labels != name_len - 2: + raise ValidationFailure("wild owner name has wrong label length") + if name_len - 1 < rrsig.labels: + raise ValidationFailure("owner name longer than RRSIG labels") + elif rrsig.labels < name_len - 1: + suffix = rrname.split(rrsig.labels + 1)[1] + rrname = dns.name.from_text("*", suffix) + rrnamebuf = rrname.to_digestable() + rrfixed = struct.pack("!HHI", rdataset.rdtype, rdataset.rdclass, rrsig.original_ttl) + rdatas = [rdata.to_digestable(origin) for rdata in rdataset] + for rdata in sorted(rdatas): + data += rrnamebuf + data += rrfixed + rrlen = struct.pack("!H", len(rdata)) + data += rrlen + data += rdata + + return data + + +def _make_dnskey( + public_key: PublicKey, + algorithm: int | str, + flags: int = Flag.ZONE, + protocol: int = 3, +) -> DNSKEY: + """Convert a public key to DNSKEY Rdata + + *public_key*, a ``PublicKey`` (``GenericPublicKey`` or + ``cryptography.hazmat.primitives.asymmetric``) to convert. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *flags*: DNSKEY flags field as an integer. + + *protocol*: DNSKEY protocol field as an integer. + + Raises ``ValueError`` if the specified key algorithm parameters are not + unsupported, ``TypeError`` if the key type is unsupported, + `UnsupportedAlgorithm` if the algorithm is unknown and + `AlgorithmKeyMismatch` if the algorithm does not match the key type. + + Return DNSKEY ``Rdata``. + """ + + algorithm = Algorithm.make(algorithm) + + # pylint: disable=possibly-used-before-assignment + if isinstance(public_key, GenericPublicKey): + return public_key.to_dnskey(flags=flags, protocol=protocol) + else: + public_cls = get_algorithm_cls(algorithm).public_cls + return public_cls(key=public_key).to_dnskey(flags=flags, protocol=protocol) + + +def _make_cdnskey( + public_key: PublicKey, + algorithm: int | str, + flags: int = Flag.ZONE, + protocol: int = 3, +) -> CDNSKEY: + """Convert a public key to CDNSKEY Rdata + + *public_key*, the public key to convert, a + ``cryptography.hazmat.primitives.asymmetric`` public key class applicable + for DNSSEC. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *flags*: DNSKEY flags field as an integer. + + *protocol*: DNSKEY protocol field as an integer. + + Raises ``ValueError`` if the specified key algorithm parameters are not + unsupported, ``TypeError`` if the key type is unsupported, + `UnsupportedAlgorithm` if the algorithm is unknown and + `AlgorithmKeyMismatch` if the algorithm does not match the key type. + + Return CDNSKEY ``Rdata``. + """ + + dnskey = _make_dnskey(public_key, algorithm, flags, protocol) + + return CDNSKEY( + rdclass=dnskey.rdclass, + rdtype=dns.rdatatype.CDNSKEY, + flags=dnskey.flags, + protocol=dnskey.protocol, + algorithm=dnskey.algorithm, + key=dnskey.key, + ) + + +def nsec3_hash( + domain: dns.name.Name | str, + salt: str | bytes | None, + iterations: int, + algorithm: int | str, +) -> str: + """ + Calculate the NSEC3 hash, according to + https://tools.ietf.org/html/rfc5155#section-5 + + *domain*, a ``dns.name.Name`` or ``str``, the name to hash. + + *salt*, a ``str``, ``bytes``, or ``None``, the hash salt. If a + string, it is decoded as a hex string. + + *iterations*, an ``int``, the number of iterations. + + *algorithm*, a ``str`` or ``int``, the hash algorithm. + The only defined algorithm is SHA1. + + Returns a ``str``, the encoded NSEC3 hash. + """ + + b32_conversion = str.maketrans( + "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", "0123456789ABCDEFGHIJKLMNOPQRSTUV" + ) + + try: + if isinstance(algorithm, str): + algorithm = NSEC3Hash[algorithm.upper()] + except Exception: + raise ValueError("Wrong hash algorithm (only SHA1 is supported)") + + if algorithm != NSEC3Hash.SHA1: + raise ValueError("Wrong hash algorithm (only SHA1 is supported)") + + if salt is None: + salt_encoded = b"" + elif isinstance(salt, str): + if len(salt) % 2 == 0: + salt_encoded = bytes.fromhex(salt) + else: + raise ValueError("Invalid salt length") + else: + salt_encoded = salt + + if not isinstance(domain, dns.name.Name): + domain = dns.name.from_text(domain) + domain_encoded = domain.canonicalize().to_wire() + assert domain_encoded is not None + + digest = hashlib.sha1(domain_encoded + salt_encoded).digest() + for _ in range(iterations): + digest = hashlib.sha1(digest + salt_encoded).digest() + + output = base64.b32encode(digest).decode("utf-8") + output = output.translate(b32_conversion) + + return output + + +def make_ds_rdataset( + rrset: dns.rrset.RRset | Tuple[dns.name.Name, dns.rdataset.Rdataset], + algorithms: Set[DSDigest | str], + origin: dns.name.Name | None = None, +) -> dns.rdataset.Rdataset: + """Create a DS record from DNSKEY/CDNSKEY/CDS. + + *rrset*, the RRset to create DS Rdataset for. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *algorithms*, a set of ``str`` or ``int`` specifying the hash algorithms. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. If the RRset is a CDS, only digest + algorithms matching algorithms are accepted. + + *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if any of the algorithms are unknown and + ``ValueError`` if the given RRset is not usable. + + Returns a ``dns.rdataset.Rdataset`` + """ + + rrname, rdataset = _get_rrname_rdataset(rrset) + + if rdataset.rdtype not in ( + dns.rdatatype.DNSKEY, + dns.rdatatype.CDNSKEY, + dns.rdatatype.CDS, + ): + raise ValueError("rrset not a DNSKEY/CDNSKEY/CDS") + + _algorithms = set() + for algorithm in algorithms: + try: + if isinstance(algorithm, str): + algorithm = DSDigest[algorithm.upper()] + except Exception: + raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') + _algorithms.add(algorithm) + + if rdataset.rdtype == dns.rdatatype.CDS: + res = [] + for rdata in cds_rdataset_to_ds_rdataset(rdataset): + if rdata.digest_type in _algorithms: + res.append(rdata) + if len(res) == 0: + raise ValueError("no acceptable CDS rdata found") + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + res = [] + for algorithm in _algorithms: + res.extend(dnskey_rdataset_to_cds_rdataset(rrname, rdataset, algorithm, origin)) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def cds_rdataset_to_ds_rdataset( + rdataset: dns.rdataset.Rdataset, +) -> dns.rdataset.Rdataset: + """Create a CDS record from DS. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. + + Raises ``ValueError`` if the rdataset is not CDS. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype != dns.rdatatype.CDS: + raise ValueError("rdataset not a CDS") + res = [] + for rdata in rdataset: + res.append( + CDS( + rdclass=rdata.rdclass, + rdtype=dns.rdatatype.DS, + key_tag=rdata.key_tag, + algorithm=rdata.algorithm, + digest_type=rdata.digest_type, + digest=rdata.digest, + ) + ) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def dnskey_rdataset_to_cds_rdataset( + name: dns.name.Name | str, + rdataset: dns.rdataset.Rdataset, + algorithm: DSDigest | str, + origin: dns.name.Name | None = None, +) -> dns.rdataset.Rdataset: + """Create a CDS record from DNSKEY/CDNSKEY. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the CDS record. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown or + ``ValueError`` if the rdataset is not DNSKEY/CDNSKEY. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype not in (dns.rdatatype.DNSKEY, dns.rdatatype.CDNSKEY): + raise ValueError("rdataset not a DNSKEY/CDNSKEY") + res = [] + for rdata in rdataset: + res.append(make_cds(name, rdata, algorithm, origin)) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def dnskey_rdataset_to_cdnskey_rdataset( + rdataset: dns.rdataset.Rdataset, +) -> dns.rdataset.Rdataset: + """Create a CDNSKEY record from DNSKEY. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create CDNSKEY Rdataset for. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype != dns.rdatatype.DNSKEY: + raise ValueError("rdataset not a DNSKEY") + res = [] + for rdata in rdataset: + res.append( + CDNSKEY( + rdclass=rdataset.rdclass, + rdtype=rdataset.rdtype, + flags=rdata.flags, + protocol=rdata.protocol, + algorithm=rdata.algorithm, + key=rdata.key, + ) + ) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def default_rrset_signer( + txn: dns.transaction.Transaction, + rrset: dns.rrset.RRset, + signer: dns.name.Name, + ksks: List[Tuple[PrivateKey, DNSKEY]], + zsks: List[Tuple[PrivateKey, DNSKEY]], + inception: datetime | str | int | float | None = None, + expiration: datetime | str | int | float | None = None, + lifetime: int | None = None, + policy: Policy | None = None, + origin: dns.name.Name | None = None, + deterministic: bool = True, +) -> None: + """Default RRset signer""" + + if rrset.rdtype in set( + [ + dns.rdatatype.RdataType.DNSKEY, + dns.rdatatype.RdataType.CDS, + dns.rdatatype.RdataType.CDNSKEY, + ] + ): + keys = ksks + else: + keys = zsks + + for private_key, dnskey in keys: + rrsig = sign( + rrset=rrset, + private_key=private_key, + dnskey=dnskey, + inception=inception, + expiration=expiration, + lifetime=lifetime, + signer=signer, + policy=policy, + origin=origin, + deterministic=deterministic, + ) + txn.add(rrset.name, rrset.ttl, rrsig) + + +def sign_zone( + zone: dns.zone.Zone, + txn: dns.transaction.Transaction | None = None, + keys: List[Tuple[PrivateKey, DNSKEY]] | None = None, + add_dnskey: bool = True, + dnskey_ttl: int | None = None, + inception: datetime | str | int | float | None = None, + expiration: datetime | str | int | float | None = None, + lifetime: int | None = None, + nsec3: NSEC3PARAM | None = None, + rrset_signer: RRsetSigner | None = None, + policy: Policy | None = None, + deterministic: bool = True, +) -> None: + """Sign zone. + + *zone*, a ``dns.zone.Zone``, the zone to sign. + + *txn*, a ``dns.transaction.Transaction``, an optional transaction to use for + signing. + + *keys*, a list of (``PrivateKey``, ``DNSKEY``) tuples, to use for signing. KSK/ZSK + roles are assigned automatically if the SEP flag is used, otherwise all RRsets are + signed by all keys. + + *add_dnskey*, a ``bool``. If ``True``, the default, all specified DNSKEYs are + automatically added to the zone on signing. + + *dnskey_ttl*, a``int``, specifies the TTL for DNSKEY RRs. If not specified the TTL + of the existing DNSKEY RRset used or the TTL of the SOA RRset. + + *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + inception time. If ``None``, the current time is used. If a ``str``, the format is + "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX epoch in text + form; this is the same the RRSIG rdata's text form. Values of type `int` or `float` + are interpreted as seconds since the UNIX epoch. + + *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + expiration time. If ``None``, the expiration time will be the inception time plus + the value of the *lifetime* parameter. See the description of *inception* above for + how the various parameter types are interpreted. + + *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This + parameter is only meaningful if *expiration* is ``None``. + + *nsec3*, a ``NSEC3PARAM`` Rdata, configures signing using NSEC3. Not yet + implemented. + + *rrset_signer*, a ``Callable``, an optional function for signing RRsets. The + function requires two arguments: transaction and RRset. If the not specified, + ``dns.dnssec.default_rrset_signer`` will be used. + + *deterministic*, a ``bool``. If ``True``, the default, use deterministic + (reproducible) signatures when supported by the algorithm used for signing. + Currently, this only affects ECDSA. + + Returns ``None``. + """ + + ksks = [] + zsks = [] + + # if we have both KSKs and ZSKs, split by SEP flag. if not, sign all + # records with all keys + if keys: + for key in keys: + if key[1].flags & Flag.SEP: + ksks.append(key) + else: + zsks.append(key) + if not ksks: + ksks = keys + if not zsks: + zsks = keys + else: + keys = [] + + if txn: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(txn) + else: + cm = zone.writer() + + if zone.origin is None: + raise ValueError("no zone origin") + + with cm as _txn: + if add_dnskey: + if dnskey_ttl is None: + dnskey = _txn.get(zone.origin, dns.rdatatype.DNSKEY) + if dnskey: + dnskey_ttl = dnskey.ttl + else: + soa = _txn.get(zone.origin, dns.rdatatype.SOA) + dnskey_ttl = soa.ttl + for _, dnskey in keys: + _txn.add(zone.origin, dnskey_ttl, dnskey) + + if nsec3: + raise NotImplementedError("Signing with NSEC3 not yet implemented") + else: + _rrset_signer = rrset_signer or functools.partial( + default_rrset_signer, + signer=zone.origin, + ksks=ksks, + zsks=zsks, + inception=inception, + expiration=expiration, + lifetime=lifetime, + policy=policy, + origin=zone.origin, + deterministic=deterministic, + ) + return _sign_zone_nsec(zone, _txn, _rrset_signer) + + +def _sign_zone_nsec( + zone: dns.zone.Zone, + txn: dns.transaction.Transaction, + rrset_signer: RRsetSigner | None = None, +) -> None: + """NSEC zone signer""" + + def _txn_add_nsec( + txn: dns.transaction.Transaction, + name: dns.name.Name, + next_secure: dns.name.Name | None, + rdclass: dns.rdataclass.RdataClass, + ttl: int, + rrset_signer: RRsetSigner | None = None, + ) -> None: + """NSEC zone signer helper""" + mandatory_types = set( + [dns.rdatatype.RdataType.RRSIG, dns.rdatatype.RdataType.NSEC] + ) + node = txn.get_node(name) + if node and next_secure: + types = ( + set([rdataset.rdtype for rdataset in node.rdatasets]) | mandatory_types + ) + windows = Bitmap.from_rdtypes(list(types)) + rrset = dns.rrset.from_rdata( + name, + ttl, + NSEC( + rdclass=rdclass, + rdtype=dns.rdatatype.RdataType.NSEC, + next=next_secure, + windows=windows, + ), + ) + txn.add(rrset) + if rrset_signer: + rrset_signer(txn, rrset) + + rrsig_ttl = zone.get_soa(txn).minimum + delegation = None + last_secure = None + + for name in sorted(txn.iterate_names()): + if delegation and name.is_subdomain(delegation): + # names below delegations are not secure + continue + elif txn.get(name, dns.rdatatype.NS) and name != zone.origin: + # inside delegation + delegation = name + else: + # outside delegation + delegation = None + + if rrset_signer: + node = txn.get_node(name) + if node: + for rdataset in node.rdatasets: + if rdataset.rdtype == dns.rdatatype.RRSIG: + # do not sign RRSIGs + continue + elif delegation and rdataset.rdtype != dns.rdatatype.DS: + # do not sign delegations except DS records + continue + else: + rrset = dns.rrset.from_rdata(name, rdataset.ttl, *rdataset) + rrset_signer(txn, rrset) + + # We need "is not None" as the empty name is False because its length is 0. + if last_secure is not None: + _txn_add_nsec(txn, last_secure, name, zone.rdclass, rrsig_ttl, rrset_signer) + last_secure = name + + if last_secure: + _txn_add_nsec( + txn, last_secure, zone.origin, zone.rdclass, rrsig_ttl, rrset_signer + ) + + +def _need_pyca(*args, **kwargs): + raise ImportError( + "DNSSEC validation requires python cryptography" + ) # pragma: no cover + + +if dns._features.have("dnssec"): + from cryptography.exceptions import InvalidSignature + from cryptography.hazmat.primitives.asymmetric import ec # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import ed448 # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import rsa # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import ( # pylint: disable=W0611 + ed25519, + ) + + from dns.dnssecalgs import ( # pylint: disable=C0412 + get_algorithm_cls, + get_algorithm_cls_from_dnskey, + ) + from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey + + validate = _validate # type: ignore + validate_rrsig = _validate_rrsig # type: ignore + sign = _sign + make_dnskey = _make_dnskey + make_cdnskey = _make_cdnskey + _have_pyca = True +else: # pragma: no cover + validate = _need_pyca + validate_rrsig = _need_pyca + sign = _need_pyca + make_dnskey = _need_pyca + make_cdnskey = _need_pyca + _have_pyca = False + +### BEGIN generated Algorithm constants + +RSAMD5 = Algorithm.RSAMD5 +DH = Algorithm.DH +DSA = Algorithm.DSA +ECC = Algorithm.ECC +RSASHA1 = Algorithm.RSASHA1 +DSANSEC3SHA1 = Algorithm.DSANSEC3SHA1 +RSASHA1NSEC3SHA1 = Algorithm.RSASHA1NSEC3SHA1 +RSASHA256 = Algorithm.RSASHA256 +RSASHA512 = Algorithm.RSASHA512 +ECCGOST = Algorithm.ECCGOST +ECDSAP256SHA256 = Algorithm.ECDSAP256SHA256 +ECDSAP384SHA384 = Algorithm.ECDSAP384SHA384 +ED25519 = Algorithm.ED25519 +ED448 = Algorithm.ED448 +INDIRECT = Algorithm.INDIRECT +PRIVATEDNS = Algorithm.PRIVATEDNS +PRIVATEOID = Algorithm.PRIVATEOID + +### END generated Algorithm constants diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__init__.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__init__.py new file mode 100644 index 0000000..0810b19 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__init__.py @@ -0,0 +1,124 @@ +from typing import Dict, Tuple, Type + +import dns._features +import dns.name +from dns.dnssecalgs.base import GenericPrivateKey +from dns.dnssectypes import Algorithm +from dns.exception import UnsupportedAlgorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + +# pyright: reportPossiblyUnboundVariable=false + +if dns._features.have("dnssec"): + from dns.dnssecalgs.dsa import PrivateDSA, PrivateDSANSEC3SHA1 + from dns.dnssecalgs.ecdsa import PrivateECDSAP256SHA256, PrivateECDSAP384SHA384 + from dns.dnssecalgs.eddsa import PrivateED448, PrivateED25519 + from dns.dnssecalgs.rsa import ( + PrivateRSAMD5, + PrivateRSASHA1, + PrivateRSASHA1NSEC3SHA1, + PrivateRSASHA256, + PrivateRSASHA512, + ) + + _have_cryptography = True +else: + _have_cryptography = False + +AlgorithmPrefix = bytes | dns.name.Name | None + +algorithms: Dict[Tuple[Algorithm, AlgorithmPrefix], Type[GenericPrivateKey]] = {} +if _have_cryptography: + # pylint: disable=possibly-used-before-assignment + algorithms.update( + { + (Algorithm.RSAMD5, None): PrivateRSAMD5, + (Algorithm.DSA, None): PrivateDSA, + (Algorithm.RSASHA1, None): PrivateRSASHA1, + (Algorithm.DSANSEC3SHA1, None): PrivateDSANSEC3SHA1, + (Algorithm.RSASHA1NSEC3SHA1, None): PrivateRSASHA1NSEC3SHA1, + (Algorithm.RSASHA256, None): PrivateRSASHA256, + (Algorithm.RSASHA512, None): PrivateRSASHA512, + (Algorithm.ECDSAP256SHA256, None): PrivateECDSAP256SHA256, + (Algorithm.ECDSAP384SHA384, None): PrivateECDSAP384SHA384, + (Algorithm.ED25519, None): PrivateED25519, + (Algorithm.ED448, None): PrivateED448, + } + ) + + +def get_algorithm_cls( + algorithm: int | str, prefix: AlgorithmPrefix = None +) -> Type[GenericPrivateKey]: + """Get Private Key class from Algorithm. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.dnssecalgs.GenericPrivateKey`` + """ + algorithm = Algorithm.make(algorithm) + cls = algorithms.get((algorithm, prefix)) + if cls: + return cls + raise UnsupportedAlgorithm( + f'algorithm "{Algorithm.to_text(algorithm)}" not supported by dnspython' + ) + + +def get_algorithm_cls_from_dnskey(dnskey: DNSKEY) -> Type[GenericPrivateKey]: + """Get Private Key class from DNSKEY. + + *dnskey*, a ``DNSKEY`` to get Algorithm class for. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.dnssecalgs.GenericPrivateKey`` + """ + prefix: AlgorithmPrefix = None + if dnskey.algorithm == Algorithm.PRIVATEDNS: + prefix, _ = dns.name.from_wire(dnskey.key, 0) + elif dnskey.algorithm == Algorithm.PRIVATEOID: + length = int(dnskey.key[0]) + prefix = dnskey.key[0 : length + 1] + return get_algorithm_cls(dnskey.algorithm, prefix) + + +def register_algorithm_cls( + algorithm: int | str, + algorithm_cls: Type[GenericPrivateKey], + name: dns.name.Name | str | None = None, + oid: bytes | None = None, +) -> None: + """Register Algorithm Private Key class. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *algorithm_cls*: A `GenericPrivateKey` class. + + *name*, an optional ``dns.name.Name`` or ``str``, for for PRIVATEDNS algorithms. + + *oid*: an optional BER-encoded `bytes` for PRIVATEOID algorithms. + + Raises ``ValueError`` if a name or oid is specified incorrectly. + """ + if not issubclass(algorithm_cls, GenericPrivateKey): + raise TypeError("Invalid algorithm class") + algorithm = Algorithm.make(algorithm) + prefix: AlgorithmPrefix = None + if algorithm == Algorithm.PRIVATEDNS: + if name is None: + raise ValueError("Name required for PRIVATEDNS algorithms") + if isinstance(name, str): + name = dns.name.from_text(name) + prefix = name + elif algorithm == Algorithm.PRIVATEOID: + if oid is None: + raise ValueError("OID required for PRIVATEOID algorithms") + prefix = bytes([len(oid)]) + oid + elif name: + raise ValueError("Name only supported for PRIVATEDNS algorithm") + elif oid: + raise ValueError("OID only supported for PRIVATEOID algorithm") + algorithms[(algorithm, prefix)] = algorithm_cls diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..842f2cc Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..ab7a198 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-312.pyc new file mode 100644 index 0000000..6f16506 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-312.pyc new file mode 100644 index 0000000..9925509 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-312.pyc new file mode 100644 index 0000000..a3147ef Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-312.pyc new file mode 100644 index 0000000..146aa17 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-312.pyc new file mode 100644 index 0000000..9c709f0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/base.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/base.py new file mode 100644 index 0000000..0334fe6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssecalgs/base.py @@ -0,0 +1,89 @@ +from abc import ABC, abstractmethod # pylint: disable=no-name-in-module +from typing import Any, Type + +import dns.rdataclass +import dns.rdatatype +from dns.dnssectypes import Algorithm +from dns.exception import AlgorithmKeyMismatch +from dns.rdtypes.ANY.DNSKEY import DNSKEY +from dns.rdtypes.dnskeybase import Flag + + +class GenericPublicKey(ABC): + algorithm: Algorithm + + @abstractmethod + def __init__(self, key: Any) -> None: + pass + + @abstractmethod + def verify(self, signature: bytes, data: bytes) -> None: + """Verify signed DNSSEC data""" + + @abstractmethod + def encode_key_bytes(self) -> bytes: + """Encode key as bytes for DNSKEY""" + + @classmethod + def _ensure_algorithm_key_combination(cls, key: DNSKEY) -> None: + if key.algorithm != cls.algorithm: + raise AlgorithmKeyMismatch + + def to_dnskey(self, flags: int = Flag.ZONE, protocol: int = 3) -> DNSKEY: + """Return public key as DNSKEY""" + return DNSKEY( + rdclass=dns.rdataclass.IN, + rdtype=dns.rdatatype.DNSKEY, + flags=flags, + protocol=protocol, + algorithm=self.algorithm, + key=self.encode_key_bytes(), + ) + + @classmethod + @abstractmethod + def from_dnskey(cls, key: DNSKEY) -> "GenericPublicKey": + """Create public key from DNSKEY""" + + @classmethod + @abstractmethod + def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": + """Create public key from PEM-encoded SubjectPublicKeyInfo as specified + in RFC 5280""" + + @abstractmethod + def to_pem(self) -> bytes: + """Return public-key as PEM-encoded SubjectPublicKeyInfo as specified + in RFC 5280""" + + +class GenericPrivateKey(ABC): + public_cls: Type[GenericPublicKey] + + @abstractmethod + def __init__(self, key: Any) -> None: + pass + + @abstractmethod + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign DNSSEC data""" + + @abstractmethod + def public_key(self) -> "GenericPublicKey": + """Return public key instance""" + + @classmethod + @abstractmethod + def from_pem( + cls, private_pem: bytes, password: bytes | None = None + ) -> "GenericPrivateKey": + """Create private key from PEM-encoded PKCS#8""" + + @abstractmethod + def to_pem(self, password: bytes | None = None) -> bytes: + """Return private key as PEM-encoded PKCS#8""" diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/cryptography.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/cryptography.py new file mode 100644 index 0000000..a5dde6a --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssecalgs/cryptography.py @@ -0,0 +1,68 @@ +from typing import Any, Type + +from cryptography.hazmat.primitives import serialization + +from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey +from dns.exception import AlgorithmKeyMismatch + + +class CryptographyPublicKey(GenericPublicKey): + key: Any = None + key_cls: Any = None + + def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called + if self.key_cls is None: + raise TypeError("Undefined private key class") + if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type + key, self.key_cls + ): + raise AlgorithmKeyMismatch + self.key = key + + @classmethod + def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": + key = serialization.load_pem_public_key(public_pem) + return cls(key=key) + + def to_pem(self) -> bytes: + return self.key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + + +class CryptographyPrivateKey(GenericPrivateKey): + key: Any = None + key_cls: Any = None + public_cls: Type[CryptographyPublicKey] # pyright: ignore + + def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called + if self.key_cls is None: + raise TypeError("Undefined private key class") + if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type + key, self.key_cls + ): + raise AlgorithmKeyMismatch + self.key = key + + def public_key(self) -> "CryptographyPublicKey": + return self.public_cls(key=self.key.public_key()) + + @classmethod + def from_pem( + cls, private_pem: bytes, password: bytes | None = None + ) -> "GenericPrivateKey": + key = serialization.load_pem_private_key(private_pem, password=password) + return cls(key=key) + + def to_pem(self, password: bytes | None = None) -> bytes: + encryption_algorithm: serialization.KeySerializationEncryption + if password: + encryption_algorithm = serialization.BestAvailableEncryption(password) + else: + encryption_algorithm = serialization.NoEncryption() + return self.key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=encryption_algorithm, + ) diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/dsa.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/dsa.py new file mode 100644 index 0000000..a4eb987 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssecalgs/dsa.py @@ -0,0 +1,108 @@ +import struct + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import dsa, utils + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicDSA(CryptographyPublicKey): + key: dsa.DSAPublicKey + key_cls = dsa.DSAPublicKey + algorithm = Algorithm.DSA + chosen_hash = hashes.SHA1() + + def verify(self, signature: bytes, data: bytes) -> None: + sig_r = signature[1:21] + sig_s = signature[21:] + sig = utils.encode_dss_signature( + int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") + ) + self.key.verify(sig, data, self.chosen_hash) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 2536, section 2.""" + pn = self.key.public_numbers() + dsa_t = (self.key.key_size // 8 - 64) // 8 + if dsa_t > 8: + raise ValueError("unsupported DSA key size") + octets = 64 + dsa_t * 8 + res = struct.pack("!B", dsa_t) + res += pn.parameter_numbers.q.to_bytes(20, "big") + res += pn.parameter_numbers.p.to_bytes(octets, "big") + res += pn.parameter_numbers.g.to_bytes(octets, "big") + res += pn.y.to_bytes(octets, "big") + return res + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicDSA": + cls._ensure_algorithm_key_combination(key) + keyptr = key.key + (t,) = struct.unpack("!B", keyptr[0:1]) + keyptr = keyptr[1:] + octets = 64 + t * 8 + dsa_q = keyptr[0:20] + keyptr = keyptr[20:] + dsa_p = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_g = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_y = keyptr[0:octets] + return cls( + key=dsa.DSAPublicNumbers( # type: ignore + int.from_bytes(dsa_y, "big"), + dsa.DSAParameterNumbers( + int.from_bytes(dsa_p, "big"), + int.from_bytes(dsa_q, "big"), + int.from_bytes(dsa_g, "big"), + ), + ).public_key(default_backend()), + ) + + +class PrivateDSA(CryptographyPrivateKey): + key: dsa.DSAPrivateKey + key_cls = dsa.DSAPrivateKey + public_cls = PublicDSA + + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign using a private key per RFC 2536, section 3.""" + public_dsa_key = self.key.public_key() + if public_dsa_key.key_size > 1024: + raise ValueError("DSA key size overflow") + der_signature = self.key.sign( + data, self.public_cls.chosen_hash # pyright: ignore + ) + dsa_r, dsa_s = utils.decode_dss_signature(der_signature) + dsa_t = (public_dsa_key.key_size // 8 - 64) // 8 + octets = 20 + signature = ( + struct.pack("!B", dsa_t) + + int.to_bytes(dsa_r, length=octets, byteorder="big") + + int.to_bytes(dsa_s, length=octets, byteorder="big") + ) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls, key_size: int) -> "PrivateDSA": + return cls( + key=dsa.generate_private_key(key_size=key_size), + ) + + +class PublicDSANSEC3SHA1(PublicDSA): + algorithm = Algorithm.DSANSEC3SHA1 + + +class PrivateDSANSEC3SHA1(PrivateDSA): + public_cls = PublicDSANSEC3SHA1 diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/ecdsa.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/ecdsa.py new file mode 100644 index 0000000..e3f3f06 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssecalgs/ecdsa.py @@ -0,0 +1,100 @@ +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec, utils + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicECDSA(CryptographyPublicKey): + key: ec.EllipticCurvePublicKey + key_cls = ec.EllipticCurvePublicKey + algorithm: Algorithm + chosen_hash: hashes.HashAlgorithm + curve: ec.EllipticCurve + octets: int + + def verify(self, signature: bytes, data: bytes) -> None: + sig_r = signature[0 : self.octets] + sig_s = signature[self.octets :] + sig = utils.encode_dss_signature( + int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") + ) + self.key.verify(sig, data, ec.ECDSA(self.chosen_hash)) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 6605, section 4.""" + pn = self.key.public_numbers() + return pn.x.to_bytes(self.octets, "big") + pn.y.to_bytes(self.octets, "big") + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicECDSA": + cls._ensure_algorithm_key_combination(key) + ecdsa_x = key.key[0 : cls.octets] + ecdsa_y = key.key[cls.octets : cls.octets * 2] + return cls( + key=ec.EllipticCurvePublicNumbers( + curve=cls.curve, + x=int.from_bytes(ecdsa_x, "big"), + y=int.from_bytes(ecdsa_y, "big"), + ).public_key(default_backend()), + ) + + +class PrivateECDSA(CryptographyPrivateKey): + key: ec.EllipticCurvePrivateKey + key_cls = ec.EllipticCurvePrivateKey + public_cls = PublicECDSA + + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign using a private key per RFC 6605, section 4.""" + algorithm = ec.ECDSA( + self.public_cls.chosen_hash, # pyright: ignore + deterministic_signing=deterministic, + ) + der_signature = self.key.sign(data, algorithm) + dsa_r, dsa_s = utils.decode_dss_signature(der_signature) + signature = int.to_bytes( + dsa_r, length=self.public_cls.octets, byteorder="big" # pyright: ignore + ) + int.to_bytes( + dsa_s, length=self.public_cls.octets, byteorder="big" # pyright: ignore + ) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls) -> "PrivateECDSA": + return cls( + key=ec.generate_private_key( + curve=cls.public_cls.curve, backend=default_backend() # pyright: ignore + ), + ) + + +class PublicECDSAP256SHA256(PublicECDSA): + algorithm = Algorithm.ECDSAP256SHA256 + chosen_hash = hashes.SHA256() + curve = ec.SECP256R1() + octets = 32 + + +class PrivateECDSAP256SHA256(PrivateECDSA): + public_cls = PublicECDSAP256SHA256 + + +class PublicECDSAP384SHA384(PublicECDSA): + algorithm = Algorithm.ECDSAP384SHA384 + chosen_hash = hashes.SHA384() + curve = ec.SECP384R1() + octets = 48 + + +class PrivateECDSAP384SHA384(PrivateECDSA): + public_cls = PublicECDSAP384SHA384 diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/eddsa.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/eddsa.py new file mode 100644 index 0000000..1cbb407 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssecalgs/eddsa.py @@ -0,0 +1,70 @@ +from typing import Type + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ed448, ed25519 + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicEDDSA(CryptographyPublicKey): + def verify(self, signature: bytes, data: bytes) -> None: + self.key.verify(signature, data) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 8080, section 3.""" + return self.key.public_bytes( + encoding=serialization.Encoding.Raw, format=serialization.PublicFormat.Raw + ) + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicEDDSA": + cls._ensure_algorithm_key_combination(key) + return cls( + key=cls.key_cls.from_public_bytes(key.key), + ) + + +class PrivateEDDSA(CryptographyPrivateKey): + public_cls: Type[PublicEDDSA] # pyright: ignore + + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign using a private key per RFC 8080, section 4.""" + signature = self.key.sign(data) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls) -> "PrivateEDDSA": + return cls(key=cls.key_cls.generate()) + + +class PublicED25519(PublicEDDSA): + key: ed25519.Ed25519PublicKey + key_cls = ed25519.Ed25519PublicKey + algorithm = Algorithm.ED25519 + + +class PrivateED25519(PrivateEDDSA): + key: ed25519.Ed25519PrivateKey + key_cls = ed25519.Ed25519PrivateKey + public_cls = PublicED25519 + + +class PublicED448(PublicEDDSA): + key: ed448.Ed448PublicKey + key_cls = ed448.Ed448PublicKey + algorithm = Algorithm.ED448 + + +class PrivateED448(PrivateEDDSA): + key: ed448.Ed448PrivateKey + key_cls = ed448.Ed448PrivateKey + public_cls = PublicED448 diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/rsa.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/rsa.py new file mode 100644 index 0000000..de9160b --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssecalgs/rsa.py @@ -0,0 +1,126 @@ +import math +import struct + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import padding, rsa + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicRSA(CryptographyPublicKey): + key: rsa.RSAPublicKey + key_cls = rsa.RSAPublicKey + algorithm: Algorithm + chosen_hash: hashes.HashAlgorithm + + def verify(self, signature: bytes, data: bytes) -> None: + self.key.verify(signature, data, padding.PKCS1v15(), self.chosen_hash) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 3110, section 2.""" + pn = self.key.public_numbers() + _exp_len = math.ceil(int.bit_length(pn.e) / 8) + exp = int.to_bytes(pn.e, length=_exp_len, byteorder="big") + if _exp_len > 255: + exp_header = b"\0" + struct.pack("!H", _exp_len) + else: + exp_header = struct.pack("!B", _exp_len) + if pn.n.bit_length() < 512 or pn.n.bit_length() > 4096: + raise ValueError("unsupported RSA key length") + return exp_header + exp + pn.n.to_bytes((pn.n.bit_length() + 7) // 8, "big") + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicRSA": + cls._ensure_algorithm_key_combination(key) + keyptr = key.key + (bytes_,) = struct.unpack("!B", keyptr[0:1]) + keyptr = keyptr[1:] + if bytes_ == 0: + (bytes_,) = struct.unpack("!H", keyptr[0:2]) + keyptr = keyptr[2:] + rsa_e = keyptr[0:bytes_] + rsa_n = keyptr[bytes_:] + return cls( + key=rsa.RSAPublicNumbers( + int.from_bytes(rsa_e, "big"), int.from_bytes(rsa_n, "big") + ).public_key(default_backend()) + ) + + +class PrivateRSA(CryptographyPrivateKey): + key: rsa.RSAPrivateKey + key_cls = rsa.RSAPrivateKey + public_cls = PublicRSA + default_public_exponent = 65537 + + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign using a private key per RFC 3110, section 3.""" + signature = self.key.sign( + data, padding.PKCS1v15(), self.public_cls.chosen_hash # pyright: ignore + ) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls, key_size: int) -> "PrivateRSA": + return cls( + key=rsa.generate_private_key( + public_exponent=cls.default_public_exponent, + key_size=key_size, + backend=default_backend(), + ) + ) + + +class PublicRSAMD5(PublicRSA): + algorithm = Algorithm.RSAMD5 + chosen_hash = hashes.MD5() + + +class PrivateRSAMD5(PrivateRSA): + public_cls = PublicRSAMD5 + + +class PublicRSASHA1(PublicRSA): + algorithm = Algorithm.RSASHA1 + chosen_hash = hashes.SHA1() + + +class PrivateRSASHA1(PrivateRSA): + public_cls = PublicRSASHA1 + + +class PublicRSASHA1NSEC3SHA1(PublicRSA): + algorithm = Algorithm.RSASHA1NSEC3SHA1 + chosen_hash = hashes.SHA1() + + +class PrivateRSASHA1NSEC3SHA1(PrivateRSA): + public_cls = PublicRSASHA1NSEC3SHA1 + + +class PublicRSASHA256(PublicRSA): + algorithm = Algorithm.RSASHA256 + chosen_hash = hashes.SHA256() + + +class PrivateRSASHA256(PrivateRSA): + public_cls = PublicRSASHA256 + + +class PublicRSASHA512(PublicRSA): + algorithm = Algorithm.RSASHA512 + chosen_hash = hashes.SHA512() + + +class PrivateRSASHA512(PrivateRSA): + public_cls = PublicRSASHA512 diff --git a/venv/lib/python3.12/site-packages/dns/dnssectypes.py b/venv/lib/python3.12/site-packages/dns/dnssectypes.py new file mode 100644 index 0000000..02131e0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/dnssectypes.py @@ -0,0 +1,71 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNSSEC-related types.""" + +# This is a separate file to avoid import circularity between dns.dnssec and +# the implementations of the DS and DNSKEY types. + +import dns.enum + + +class Algorithm(dns.enum.IntEnum): + RSAMD5 = 1 + DH = 2 + DSA = 3 + ECC = 4 + RSASHA1 = 5 + DSANSEC3SHA1 = 6 + RSASHA1NSEC3SHA1 = 7 + RSASHA256 = 8 + RSASHA512 = 10 + ECCGOST = 12 + ECDSAP256SHA256 = 13 + ECDSAP384SHA384 = 14 + ED25519 = 15 + ED448 = 16 + INDIRECT = 252 + PRIVATEDNS = 253 + PRIVATEOID = 254 + + @classmethod + def _maximum(cls): + return 255 + + +class DSDigest(dns.enum.IntEnum): + """DNSSEC Delegation Signer Digest Algorithm""" + + NULL = 0 + SHA1 = 1 + SHA256 = 2 + GOST = 3 + SHA384 = 4 + + @classmethod + def _maximum(cls): + return 255 + + +class NSEC3Hash(dns.enum.IntEnum): + """NSEC3 hash algorithm""" + + SHA1 = 1 + + @classmethod + def _maximum(cls): + return 255 diff --git a/venv/lib/python3.12/site-packages/dns/e164.py b/venv/lib/python3.12/site-packages/dns/e164.py new file mode 100644 index 0000000..942d2c0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/e164.py @@ -0,0 +1,116 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS E.164 helpers.""" + +from typing import Iterable + +import dns.exception +import dns.name +import dns.resolver + +#: The public E.164 domain. +public_enum_domain = dns.name.from_text("e164.arpa.") + + +def from_e164( + text: str, origin: dns.name.Name | None = public_enum_domain +) -> dns.name.Name: + """Convert an E.164 number in textual form into a Name object whose + value is the ENUM domain name for that number. + + Non-digits in the text are ignored, i.e. "16505551212", + "+1.650.555.1212" and "1 (650) 555-1212" are all the same. + + *text*, a ``str``, is an E.164 number in textual form. + + *origin*, a ``dns.name.Name``, the domain in which the number + should be constructed. The default is ``e164.arpa.``. + + Returns a ``dns.name.Name``. + """ + + parts = [d for d in text if d.isdigit()] + parts.reverse() + return dns.name.from_text(".".join(parts), origin=origin) + + +def to_e164( + name: dns.name.Name, + origin: dns.name.Name | None = public_enum_domain, + want_plus_prefix: bool = True, +) -> str: + """Convert an ENUM domain name into an E.164 number. + + Note that dnspython does not have any information about preferred + number formats within national numbering plans, so all numbers are + emitted as a simple string of digits, prefixed by a '+' (unless + *want_plus_prefix* is ``False``). + + *name* is a ``dns.name.Name``, the ENUM domain name. + + *origin* is a ``dns.name.Name``, a domain containing the ENUM + domain name. The name is relativized to this domain before being + converted to text. If ``None``, no relativization is done. + + *want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of + the returned number. + + Returns a ``str``. + + """ + if origin is not None: + name = name.relativize(origin) + dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1] + if len(dlabels) != len(name.labels): + raise dns.exception.SyntaxError("non-digit labels in ENUM domain name") + dlabels.reverse() + text = b"".join(dlabels) + if want_plus_prefix: + text = b"+" + text + return text.decode() + + +def query( + number: str, + domains: Iterable[dns.name.Name | str], + resolver: dns.resolver.Resolver | None = None, +) -> dns.resolver.Answer: + """Look for NAPTR RRs for the specified number in the specified domains. + + e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.']) + + *number*, a ``str`` is the number to look for. + + *domains* is an iterable containing ``dns.name.Name`` values. + + *resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If + ``None``, the default resolver is used. + """ + + if resolver is None: + resolver = dns.resolver.get_default_resolver() + e_nx = dns.resolver.NXDOMAIN() + for domain in domains: + if isinstance(domain, str): + domain = dns.name.from_text(domain) + qname = from_e164(number, domain) + try: + return resolver.resolve(qname, "NAPTR") + except dns.resolver.NXDOMAIN as e: + e_nx += e + raise e_nx diff --git a/venv/lib/python3.12/site-packages/dns/edns.py b/venv/lib/python3.12/site-packages/dns/edns.py new file mode 100644 index 0000000..eb98548 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/edns.py @@ -0,0 +1,591 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""EDNS Options""" + +import binascii +import math +import socket +import struct +from typing import Any, Dict + +import dns.enum +import dns.inet +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata +import dns.wire + + +class OptionType(dns.enum.IntEnum): + """EDNS option type codes""" + + #: NSID + NSID = 3 + #: DAU + DAU = 5 + #: DHU + DHU = 6 + #: N3U + N3U = 7 + #: ECS (client-subnet) + ECS = 8 + #: EXPIRE + EXPIRE = 9 + #: COOKIE + COOKIE = 10 + #: KEEPALIVE + KEEPALIVE = 11 + #: PADDING + PADDING = 12 + #: CHAIN + CHAIN = 13 + #: EDE (extended-dns-error) + EDE = 15 + #: REPORTCHANNEL + REPORTCHANNEL = 18 + + @classmethod + def _maximum(cls): + return 65535 + + +class Option: + """Base class for all EDNS option types.""" + + def __init__(self, otype: OptionType | str): + """Initialize an option. + + *otype*, a ``dns.edns.OptionType``, is the option type. + """ + self.otype = OptionType.make(otype) + + def to_wire(self, file: Any | None = None) -> bytes | None: + """Convert an option to wire format. + + Returns a ``bytes`` or ``None``. + + """ + raise NotImplementedError # pragma: no cover + + def to_text(self) -> str: + raise NotImplementedError # pragma: no cover + + def to_generic(self) -> "GenericOption": + """Creates a dns.edns.GenericOption equivalent of this rdata. + + Returns a ``dns.edns.GenericOption``. + """ + wire = self.to_wire() + assert wire is not None # for mypy + return GenericOption(self.otype, wire) + + @classmethod + def from_wire_parser(cls, otype: OptionType, parser: "dns.wire.Parser") -> "Option": + """Build an EDNS option object from wire format. + + *otype*, a ``dns.edns.OptionType``, is the option type. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restructed to the option length. + + Returns a ``dns.edns.Option``. + """ + raise NotImplementedError # pragma: no cover + + def _cmp(self, other): + """Compare an EDNS option with another option of the same type. + + Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*. + """ + wire = self.to_wire() + owire = other.to_wire() + if wire == owire: + return 0 + if wire > owire: + return 1 + return -1 + + def __eq__(self, other): + if not isinstance(other, Option): + return False + if self.otype != other.otype: + return False + return self._cmp(other) == 0 + + def __ne__(self, other): + if not isinstance(other, Option): + return True + if self.otype != other.otype: + return True + return self._cmp(other) != 0 + + def __lt__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) > 0 + + def __str__(self): + return self.to_text() + + +class GenericOption(Option): # lgtm[py/missing-equals] + """Generic Option Class + + This class is used for EDNS option types for which we have no better + implementation. + """ + + def __init__(self, otype: OptionType | str, data: bytes | str): + super().__init__(otype) + self.data = dns.rdata.Rdata._as_bytes(data, True) + + def to_wire(self, file: Any | None = None) -> bytes | None: + if file: + file.write(self.data) + return None + else: + return self.data + + def to_text(self) -> str: + return f"Generic {self.otype}" + + def to_generic(self) -> "GenericOption": + return self + + @classmethod + def from_wire_parser( + cls, otype: OptionType | str, parser: "dns.wire.Parser" + ) -> Option: + return cls(otype, parser.get_remaining()) + + +class ECSOption(Option): # lgtm[py/missing-equals] + """EDNS Client Subnet (ECS, RFC7871)""" + + def __init__(self, address: str, srclen: int | None = None, scopelen: int = 0): + """*address*, a ``str``, is the client address information. + + *srclen*, an ``int``, the source prefix length, which is the + leftmost number of bits of the address to be used for the + lookup. The default is 24 for IPv4 and 56 for IPv6. + + *scopelen*, an ``int``, the scope prefix length. This value + must be 0 in queries, and should be set in responses. + """ + + super().__init__(OptionType.ECS) + af = dns.inet.af_for_address(address) + + if af == socket.AF_INET6: + self.family = 2 + if srclen is None: + srclen = 56 + address = dns.rdata.Rdata._as_ipv6_address(address) + srclen = dns.rdata.Rdata._as_int(srclen, 0, 128) + scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 128) + elif af == socket.AF_INET: + self.family = 1 + if srclen is None: + srclen = 24 + address = dns.rdata.Rdata._as_ipv4_address(address) + srclen = dns.rdata.Rdata._as_int(srclen, 0, 32) + scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 32) + else: # pragma: no cover (this will never happen) + raise ValueError("Bad address family") + + assert srclen is not None + self.address = address + self.srclen = srclen + self.scopelen = scopelen + + addrdata = dns.inet.inet_pton(af, address) + nbytes = int(math.ceil(srclen / 8.0)) + + # Truncate to srclen and pad to the end of the last octet needed + # See RFC section 6 + self.addrdata = addrdata[:nbytes] + nbits = srclen % 8 + if nbits != 0: + last = struct.pack("B", ord(self.addrdata[-1:]) & (0xFF << (8 - nbits))) + self.addrdata = self.addrdata[:-1] + last + + def to_text(self) -> str: + return f"ECS {self.address}/{self.srclen} scope/{self.scopelen}" + + @staticmethod + def from_text(text: str) -> Option: + """Convert a string into a `dns.edns.ECSOption` + + *text*, a `str`, the text form of the option. + + Returns a `dns.edns.ECSOption`. + + Examples: + + >>> import dns.edns + >>> + >>> # basic example + >>> dns.edns.ECSOption.from_text('1.2.3.4/24') + >>> + >>> # also understands scope + >>> dns.edns.ECSOption.from_text('1.2.3.4/24/32') + >>> + >>> # IPv6 + >>> dns.edns.ECSOption.from_text('2001:4b98::1/64/64') + >>> + >>> # it understands results from `dns.edns.ECSOption.to_text()` + >>> dns.edns.ECSOption.from_text('ECS 1.2.3.4/24/32') + """ + optional_prefix = "ECS" + tokens = text.split() + ecs_text = None + if len(tokens) == 1: + ecs_text = tokens[0] + elif len(tokens) == 2: + if tokens[0] != optional_prefix: + raise ValueError(f'could not parse ECS from "{text}"') + ecs_text = tokens[1] + else: + raise ValueError(f'could not parse ECS from "{text}"') + n_slashes = ecs_text.count("/") + if n_slashes == 1: + address, tsrclen = ecs_text.split("/") + tscope = "0" + elif n_slashes == 2: + address, tsrclen, tscope = ecs_text.split("/") + else: + raise ValueError(f'could not parse ECS from "{text}"') + try: + scope = int(tscope) + except ValueError: + raise ValueError("invalid scope " + f'"{tscope}": scope must be an integer') + try: + srclen = int(tsrclen) + except ValueError: + raise ValueError( + "invalid srclen " + f'"{tsrclen}": srclen must be an integer' + ) + return ECSOption(address, srclen, scope) + + def to_wire(self, file: Any | None = None) -> bytes | None: + value = ( + struct.pack("!HBB", self.family, self.srclen, self.scopelen) + self.addrdata + ) + if file: + file.write(value) + return None + else: + return value + + @classmethod + def from_wire_parser( + cls, otype: OptionType | str, parser: "dns.wire.Parser" + ) -> Option: + family, src, scope = parser.get_struct("!HBB") + addrlen = int(math.ceil(src / 8.0)) + prefix = parser.get_bytes(addrlen) + if family == 1: + pad = 4 - addrlen + addr = dns.ipv4.inet_ntoa(prefix + b"\x00" * pad) + elif family == 2: + pad = 16 - addrlen + addr = dns.ipv6.inet_ntoa(prefix + b"\x00" * pad) + else: + raise ValueError("unsupported family") + + return cls(addr, src, scope) + + +class EDECode(dns.enum.IntEnum): + """Extended DNS Error (EDE) codes""" + + OTHER = 0 + UNSUPPORTED_DNSKEY_ALGORITHM = 1 + UNSUPPORTED_DS_DIGEST_TYPE = 2 + STALE_ANSWER = 3 + FORGED_ANSWER = 4 + DNSSEC_INDETERMINATE = 5 + DNSSEC_BOGUS = 6 + SIGNATURE_EXPIRED = 7 + SIGNATURE_NOT_YET_VALID = 8 + DNSKEY_MISSING = 9 + RRSIGS_MISSING = 10 + NO_ZONE_KEY_BIT_SET = 11 + NSEC_MISSING = 12 + CACHED_ERROR = 13 + NOT_READY = 14 + BLOCKED = 15 + CENSORED = 16 + FILTERED = 17 + PROHIBITED = 18 + STALE_NXDOMAIN_ANSWER = 19 + NOT_AUTHORITATIVE = 20 + NOT_SUPPORTED = 21 + NO_REACHABLE_AUTHORITY = 22 + NETWORK_ERROR = 23 + INVALID_DATA = 24 + + @classmethod + def _maximum(cls): + return 65535 + + +class EDEOption(Option): # lgtm[py/missing-equals] + """Extended DNS Error (EDE, RFC8914)""" + + _preserve_case = {"DNSKEY", "DS", "DNSSEC", "RRSIGs", "NSEC", "NXDOMAIN"} + + def __init__(self, code: EDECode | str, text: str | None = None): + """*code*, a ``dns.edns.EDECode`` or ``str``, the info code of the + extended error. + + *text*, a ``str`` or ``None``, specifying additional information about + the error. + """ + + super().__init__(OptionType.EDE) + + self.code = EDECode.make(code) + if text is not None and not isinstance(text, str): + raise ValueError("text must be string or None") + self.text = text + + def to_text(self) -> str: + output = f"EDE {self.code}" + if self.code in EDECode: + desc = EDECode.to_text(self.code) + desc = " ".join( + word if word in self._preserve_case else word.title() + for word in desc.split("_") + ) + output += f" ({desc})" + if self.text is not None: + output += f": {self.text}" + return output + + def to_wire(self, file: Any | None = None) -> bytes | None: + value = struct.pack("!H", self.code) + if self.text is not None: + value += self.text.encode("utf8") + + if file: + file.write(value) + return None + else: + return value + + @classmethod + def from_wire_parser( + cls, otype: OptionType | str, parser: "dns.wire.Parser" + ) -> Option: + code = EDECode.make(parser.get_uint16()) + text = parser.get_remaining() + + if text: + if text[-1] == 0: # text MAY be null-terminated + text = text[:-1] + btext = text.decode("utf8") + else: + btext = None + + return cls(code, btext) + + +class NSIDOption(Option): + def __init__(self, nsid: bytes): + super().__init__(OptionType.NSID) + self.nsid = nsid + + def to_wire(self, file: Any = None) -> bytes | None: + if file: + file.write(self.nsid) + return None + else: + return self.nsid + + def to_text(self) -> str: + if all(c >= 0x20 and c <= 0x7E for c in self.nsid): + # All ASCII printable, so it's probably a string. + value = self.nsid.decode() + else: + value = binascii.hexlify(self.nsid).decode() + return f"NSID {value}" + + @classmethod + def from_wire_parser( + cls, otype: OptionType | str, parser: dns.wire.Parser + ) -> Option: + return cls(parser.get_remaining()) + + +class CookieOption(Option): + def __init__(self, client: bytes, server: bytes): + super().__init__(OptionType.COOKIE) + self.client = client + self.server = server + if len(client) != 8: + raise ValueError("client cookie must be 8 bytes") + if len(server) != 0 and (len(server) < 8 or len(server) > 32): + raise ValueError("server cookie must be empty or between 8 and 32 bytes") + + def to_wire(self, file: Any = None) -> bytes | None: + if file: + file.write(self.client) + if len(self.server) > 0: + file.write(self.server) + return None + else: + return self.client + self.server + + def to_text(self) -> str: + client = binascii.hexlify(self.client).decode() + if len(self.server) > 0: + server = binascii.hexlify(self.server).decode() + else: + server = "" + return f"COOKIE {client}{server}" + + @classmethod + def from_wire_parser( + cls, otype: OptionType | str, parser: dns.wire.Parser + ) -> Option: + return cls(parser.get_bytes(8), parser.get_remaining()) + + +class ReportChannelOption(Option): + # RFC 9567 + def __init__(self, agent_domain: dns.name.Name): + super().__init__(OptionType.REPORTCHANNEL) + self.agent_domain = agent_domain + + def to_wire(self, file: Any = None) -> bytes | None: + return self.agent_domain.to_wire(file) + + def to_text(self) -> str: + return "REPORTCHANNEL " + self.agent_domain.to_text() + + @classmethod + def from_wire_parser( + cls, otype: OptionType | str, parser: dns.wire.Parser + ) -> Option: + return cls(parser.get_name()) + + +_type_to_class: Dict[OptionType, Any] = { + OptionType.ECS: ECSOption, + OptionType.EDE: EDEOption, + OptionType.NSID: NSIDOption, + OptionType.COOKIE: CookieOption, + OptionType.REPORTCHANNEL: ReportChannelOption, +} + + +def get_option_class(otype: OptionType) -> Any: + """Return the class for the specified option type. + + The GenericOption class is used if a more specific class is not + known. + """ + + cls = _type_to_class.get(otype) + if cls is None: + cls = GenericOption + return cls + + +def option_from_wire_parser( + otype: OptionType | str, parser: "dns.wire.Parser" +) -> Option: + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restricted to the option length. + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + otype = OptionType.make(otype) + cls = get_option_class(otype) + return cls.from_wire_parser(otype, parser) + + +def option_from_wire( + otype: OptionType | str, wire: bytes, current: int, olen: int +) -> Option: + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *wire*, a ``bytes``, is the wire-format message. + + *current*, an ``int``, is the offset in *wire* of the beginning + of the rdata. + + *olen*, an ``int``, is the length of the wire-format option data + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + parser = dns.wire.Parser(wire, current) + with parser.restrict_to(olen): + return option_from_wire_parser(otype, parser) + + +def register_type(implementation: Any, otype: OptionType) -> None: + """Register the implementation of an option type. + + *implementation*, a ``class``, is a subclass of ``dns.edns.Option``. + + *otype*, an ``int``, is the option type. + """ + + _type_to_class[otype] = implementation + + +### BEGIN generated OptionType constants + +NSID = OptionType.NSID +DAU = OptionType.DAU +DHU = OptionType.DHU +N3U = OptionType.N3U +ECS = OptionType.ECS +EXPIRE = OptionType.EXPIRE +COOKIE = OptionType.COOKIE +KEEPALIVE = OptionType.KEEPALIVE +PADDING = OptionType.PADDING +CHAIN = OptionType.CHAIN +EDE = OptionType.EDE +REPORTCHANNEL = OptionType.REPORTCHANNEL + +### END generated OptionType constants diff --git a/venv/lib/python3.12/site-packages/dns/entropy.py b/venv/lib/python3.12/site-packages/dns/entropy.py new file mode 100644 index 0000000..6430926 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/entropy.py @@ -0,0 +1,130 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import hashlib +import os +import random +import threading +import time +from typing import Any + + +class EntropyPool: + # This is an entropy pool for Python implementations that do not + # have a working SystemRandom. I'm not sure there are any, but + # leaving this code doesn't hurt anything as the library code + # is used if present. + + def __init__(self, seed: bytes | None = None): + self.pool_index = 0 + self.digest: bytearray | None = None + self.next_byte = 0 + self.lock = threading.Lock() + self.hash = hashlib.sha1() + self.hash_len = 20 + self.pool = bytearray(b"\0" * self.hash_len) + if seed is not None: + self._stir(seed) + self.seeded = True + self.seed_pid = os.getpid() + else: + self.seeded = False + self.seed_pid = 0 + + def _stir(self, entropy: bytes | bytearray) -> None: + for c in entropy: + if self.pool_index == self.hash_len: + self.pool_index = 0 + b = c & 0xFF + self.pool[self.pool_index] ^= b + self.pool_index += 1 + + def stir(self, entropy: bytes | bytearray) -> None: + with self.lock: + self._stir(entropy) + + def _maybe_seed(self) -> None: + if not self.seeded or self.seed_pid != os.getpid(): + try: + seed = os.urandom(16) + except Exception: # pragma: no cover + try: + with open("/dev/urandom", "rb", 0) as r: + seed = r.read(16) + except Exception: + seed = str(time.time()).encode() + self.seeded = True + self.seed_pid = os.getpid() + self.digest = None + seed = bytearray(seed) + self._stir(seed) + + def random_8(self) -> int: + with self.lock: + self._maybe_seed() + if self.digest is None or self.next_byte == self.hash_len: + self.hash.update(bytes(self.pool)) + self.digest = bytearray(self.hash.digest()) + self._stir(self.digest) + self.next_byte = 0 + value = self.digest[self.next_byte] + self.next_byte += 1 + return value + + def random_16(self) -> int: + return self.random_8() * 256 + self.random_8() + + def random_32(self) -> int: + return self.random_16() * 65536 + self.random_16() + + def random_between(self, first: int, last: int) -> int: + size = last - first + 1 + if size > 4294967296: + raise ValueError("too big") + if size > 65536: + rand = self.random_32 + max = 4294967295 + elif size > 256: + rand = self.random_16 + max = 65535 + else: + rand = self.random_8 + max = 255 + return first + size * rand() // (max + 1) + + +pool = EntropyPool() + +system_random: Any | None +try: + system_random = random.SystemRandom() +except Exception: # pragma: no cover + system_random = None + + +def random_16() -> int: + if system_random is not None: + return system_random.randrange(0, 65536) + else: + return pool.random_16() + + +def between(first: int, last: int) -> int: + if system_random is not None: + return system_random.randrange(first, last + 1) + else: + return pool.random_between(first, last) diff --git a/venv/lib/python3.12/site-packages/dns/enum.py b/venv/lib/python3.12/site-packages/dns/enum.py new file mode 100644 index 0000000..822c995 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/enum.py @@ -0,0 +1,113 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import enum +from typing import Any, Type, TypeVar + +TIntEnum = TypeVar("TIntEnum", bound="IntEnum") + + +class IntEnum(enum.IntEnum): + @classmethod + def _missing_(cls, value): + cls._check_value(value) + val = int.__new__(cls, value) # pyright: ignore + val._name_ = cls._extra_to_text(value, None) or f"{cls._prefix()}{value}" + val._value_ = value # pyright: ignore + return val + + @classmethod + def _check_value(cls, value): + max = cls._maximum() + if not isinstance(value, int): + raise TypeError + if value < 0 or value > max: + name = cls._short_name() + raise ValueError(f"{name} must be an int between >= 0 and <= {max}") + + @classmethod + def from_text(cls: Type[TIntEnum], text: str) -> TIntEnum: + text = text.upper() + try: + return cls[text] + except KeyError: + pass + value = cls._extra_from_text(text) + if value: + return value + prefix = cls._prefix() + if text.startswith(prefix) and text[len(prefix) :].isdigit(): + value = int(text[len(prefix) :]) + cls._check_value(value) + return cls(value) + raise cls._unknown_exception_class() + + @classmethod + def to_text(cls: Type[TIntEnum], value: int) -> str: + cls._check_value(value) + try: + text = cls(value).name + except ValueError: + text = None + text = cls._extra_to_text(value, text) + if text is None: + text = f"{cls._prefix()}{value}" + return text + + @classmethod + def make(cls: Type[TIntEnum], value: int | str) -> TIntEnum: + """Convert text or a value into an enumerated type, if possible. + + *value*, the ``int`` or ``str`` to convert. + + Raises a class-specific exception if a ``str`` is provided that + cannot be converted. + + Raises ``ValueError`` if the value is out of range. + + Returns an enumeration from the calling class corresponding to the + value, if one is defined, or an ``int`` otherwise. + """ + + if isinstance(value, str): + return cls.from_text(value) + cls._check_value(value) + return cls(value) + + @classmethod + def _maximum(cls): + raise NotImplementedError # pragma: no cover + + @classmethod + def _short_name(cls): + return cls.__name__.lower() + + @classmethod + def _prefix(cls) -> str: + return "" + + @classmethod + def _extra_from_text(cls, text: str) -> Any | None: # pylint: disable=W0613 + return None + + @classmethod + def _extra_to_text(cls, value, current_text): # pylint: disable=W0613 + return current_text + + @classmethod + def _unknown_exception_class(cls) -> Type[Exception]: + return ValueError diff --git a/venv/lib/python3.12/site-packages/dns/exception.py b/venv/lib/python3.12/site-packages/dns/exception.py new file mode 100644 index 0000000..c3d42ff --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/exception.py @@ -0,0 +1,169 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNS Exceptions. + +Dnspython modules may also define their own exceptions, which will +always be subclasses of ``DNSException``. +""" + + +from typing import Set + + +class DNSException(Exception): + """Abstract base class shared by all dnspython exceptions. + + It supports two basic modes of operation: + + a) Old/compatible mode is used if ``__init__`` was called with + empty *kwargs*. In compatible mode all *args* are passed + to the standard Python Exception class as before and all *args* are + printed by the standard ``__str__`` implementation. Class variable + ``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()`` + if *args* is empty. + + b) New/parametrized mode is used if ``__init__`` was called with + non-empty *kwargs*. + In the new mode *args* must be empty and all kwargs must match + those set in class variable ``supp_kwargs``. All kwargs are stored inside + ``self.kwargs`` and used in a new ``__str__`` implementation to construct + a formatted message based on the ``fmt`` class variable, a ``string``. + + In the simplest case it is enough to override the ``supp_kwargs`` + and ``fmt`` class variables to get nice parametrized messages. + """ + + msg: str | None = None # non-parametrized message + supp_kwargs: Set[str] = set() # accepted parameters for _fmt_kwargs (sanity check) + fmt: str | None = None # message parametrized with results from _fmt_kwargs + + def __init__(self, *args, **kwargs): + self._check_params(*args, **kwargs) + if kwargs: + # This call to a virtual method from __init__ is ok in our usage + self.kwargs = self._check_kwargs(**kwargs) # lgtm[py/init-calls-subclass] + self.msg = str(self) + else: + self.kwargs = dict() # defined but empty for old mode exceptions + if self.msg is None: + # doc string is better implicit message than empty string + self.msg = self.__doc__ + if args: + super().__init__(*args) + else: + super().__init__(self.msg) + + def _check_params(self, *args, **kwargs): + """Old exceptions supported only args and not kwargs. + + For sanity we do not allow to mix old and new behavior.""" + if args or kwargs: + assert bool(args) != bool( + kwargs + ), "keyword arguments are mutually exclusive with positional args" + + def _check_kwargs(self, **kwargs): + if kwargs: + assert ( + set(kwargs.keys()) == self.supp_kwargs + ), f"following set of keyword args is required: {self.supp_kwargs}" + return kwargs + + def _fmt_kwargs(self, **kwargs): + """Format kwargs before printing them. + + Resulting dictionary has to have keys necessary for str.format call + on fmt class variable. + """ + fmtargs = {} + for kw, data in kwargs.items(): + if isinstance(data, list | set): + # convert list of to list of str() + fmtargs[kw] = list(map(str, data)) + if len(fmtargs[kw]) == 1: + # remove list brackets [] from single-item lists + fmtargs[kw] = fmtargs[kw].pop() + else: + fmtargs[kw] = data + return fmtargs + + def __str__(self): + if self.kwargs and self.fmt: + # provide custom message constructed from keyword arguments + fmtargs = self._fmt_kwargs(**self.kwargs) + return self.fmt.format(**fmtargs) + else: + # print *args directly in the same way as old DNSException + return super().__str__() + + +class FormError(DNSException): + """DNS message is malformed.""" + + +class SyntaxError(DNSException): + """Text input is malformed.""" + + +class UnexpectedEnd(SyntaxError): + """Text input ended unexpectedly.""" + + +class TooBig(DNSException): + """The DNS message is too big.""" + + +class Timeout(DNSException): + """The DNS operation timed out.""" + + supp_kwargs = {"timeout"} + fmt = "The DNS operation timed out after {timeout:.3f} seconds" + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class UnsupportedAlgorithm(DNSException): + """The DNSSEC algorithm is not supported.""" + + +class AlgorithmKeyMismatch(UnsupportedAlgorithm): + """The DNSSEC algorithm is not supported for the given key type.""" + + +class ValidationFailure(DNSException): + """The DNSSEC signature is invalid.""" + + +class DeniedByPolicy(DNSException): + """Denied by DNSSEC policy.""" + + +class ExceptionWrapper: + def __init__(self, exception_class): + self.exception_class = exception_class + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is not None and not isinstance(exc_val, self.exception_class): + raise self.exception_class(str(exc_val)) from exc_val + return False diff --git a/venv/lib/python3.12/site-packages/dns/flags.py b/venv/lib/python3.12/site-packages/dns/flags.py new file mode 100644 index 0000000..4c60be1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/flags.py @@ -0,0 +1,123 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Message Flags.""" + +import enum +from typing import Any + +# Standard DNS flags + + +class Flag(enum.IntFlag): + #: Query Response + QR = 0x8000 + #: Authoritative Answer + AA = 0x0400 + #: Truncated Response + TC = 0x0200 + #: Recursion Desired + RD = 0x0100 + #: Recursion Available + RA = 0x0080 + #: Authentic Data + AD = 0x0020 + #: Checking Disabled + CD = 0x0010 + + +# EDNS flags + + +class EDNSFlag(enum.IntFlag): + #: DNSSEC answer OK + DO = 0x8000 + + +def _from_text(text: str, enum_class: Any) -> int: + flags = 0 + tokens = text.split() + for t in tokens: + flags |= enum_class[t.upper()] + return flags + + +def _to_text(flags: int, enum_class: Any) -> str: + text_flags = [] + for k, v in enum_class.__members__.items(): + if flags & v != 0: + text_flags.append(k) + return " ".join(text_flags) + + +def from_text(text: str) -> int: + """Convert a space-separated list of flag text values into a flags + value. + + Returns an ``int`` + """ + + return _from_text(text, Flag) + + +def to_text(flags: int) -> str: + """Convert a flags value into a space-separated list of flag text + values. + + Returns a ``str``. + """ + + return _to_text(flags, Flag) + + +def edns_from_text(text: str) -> int: + """Convert a space-separated list of EDNS flag text values into a EDNS + flags value. + + Returns an ``int`` + """ + + return _from_text(text, EDNSFlag) + + +def edns_to_text(flags: int) -> str: + """Convert an EDNS flags value into a space-separated list of EDNS flag + text values. + + Returns a ``str``. + """ + + return _to_text(flags, EDNSFlag) + + +### BEGIN generated Flag constants + +QR = Flag.QR +AA = Flag.AA +TC = Flag.TC +RD = Flag.RD +RA = Flag.RA +AD = Flag.AD +CD = Flag.CD + +### END generated Flag constants + +### BEGIN generated EDNSFlag constants + +DO = EDNSFlag.DO + +### END generated EDNSFlag constants diff --git a/venv/lib/python3.12/site-packages/dns/grange.py b/venv/lib/python3.12/site-packages/dns/grange.py new file mode 100644 index 0000000..8d366dc --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/grange.py @@ -0,0 +1,72 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2012-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS GENERATE range conversion.""" + +from typing import Tuple + +import dns.exception + + +def from_text(text: str) -> Tuple[int, int, int]: + """Convert the text form of a range in a ``$GENERATE`` statement to an + integer. + + *text*, a ``str``, the textual range in ``$GENERATE`` form. + + Returns a tuple of three ``int`` values ``(start, stop, step)``. + """ + + start = -1 + stop = -1 + step = 1 + cur = "" + state = 0 + # state 0 1 2 + # x - y / z + + if text and text[0] == "-": + raise dns.exception.SyntaxError("Start cannot be a negative number") + + for c in text: + if c == "-" and state == 0: + start = int(cur) + cur = "" + state = 1 + elif c == "/": + stop = int(cur) + cur = "" + state = 2 + elif c.isdigit(): + cur += c + else: + raise dns.exception.SyntaxError(f"Could not parse {c}") + + if state == 0: + raise dns.exception.SyntaxError("no stop value specified") + elif state == 1: + stop = int(cur) + else: + assert state == 2 + step = int(cur) + + assert step >= 1 + assert start >= 0 + if start > stop: + raise dns.exception.SyntaxError("start must be <= stop") + + return (start, stop, step) diff --git a/venv/lib/python3.12/site-packages/dns/immutable.py b/venv/lib/python3.12/site-packages/dns/immutable.py new file mode 100644 index 0000000..36b0362 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/immutable.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import collections.abc +from typing import Any, Callable + +from dns._immutable_ctx import immutable + + +@immutable +class Dict(collections.abc.Mapping): # lgtm[py/missing-equals] + def __init__( + self, + dictionary: Any, + no_copy: bool = False, + map_factory: Callable[[], collections.abc.MutableMapping] = dict, + ): + """Make an immutable dictionary from the specified dictionary. + + If *no_copy* is `True`, then *dictionary* will be wrapped instead + of copied. Only set this if you are sure there will be no external + references to the dictionary. + """ + if no_copy and isinstance(dictionary, collections.abc.MutableMapping): + self._odict = dictionary + else: + self._odict = map_factory() + self._odict.update(dictionary) + self._hash = None + + def __getitem__(self, key): + return self._odict.__getitem__(key) + + def __hash__(self): # pylint: disable=invalid-hash-returned + if self._hash is None: + h = 0 + for key in sorted(self._odict.keys()): + h ^= hash(key) + object.__setattr__(self, "_hash", h) + # this does return an int, but pylint doesn't figure that out + return self._hash + + def __len__(self): + return len(self._odict) + + def __iter__(self): + return iter(self._odict) + + +def constify(o: Any) -> Any: + """ + Convert mutable types to immutable types. + """ + if isinstance(o, bytearray): + return bytes(o) + if isinstance(o, tuple): + try: + hash(o) + return o + except Exception: + return tuple(constify(elt) for elt in o) + if isinstance(o, list): + return tuple(constify(elt) for elt in o) + if isinstance(o, dict): + cdict = dict() + for k, v in o.items(): + cdict[k] = constify(v) + return Dict(cdict, True) + return o diff --git a/venv/lib/python3.12/site-packages/dns/inet.py b/venv/lib/python3.12/site-packages/dns/inet.py new file mode 100644 index 0000000..765203b --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/inet.py @@ -0,0 +1,195 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Generic Internet address helper functions.""" + +import socket +from typing import Any, Tuple + +import dns.ipv4 +import dns.ipv6 + +# We assume that AF_INET and AF_INET6 are always defined. We keep +# these here for the benefit of any old code (unlikely though that +# is!). +AF_INET = socket.AF_INET +AF_INET6 = socket.AF_INET6 + + +def inet_pton(family: int, text: str) -> bytes: + """Convert the textual form of a network address into its binary form. + + *family* is an ``int``, the address family. + + *text* is a ``str``, the textual address. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``bytes``. + """ + + if family == AF_INET: + return dns.ipv4.inet_aton(text) + elif family == AF_INET6: + return dns.ipv6.inet_aton(text, True) + else: + raise NotImplementedError + + +def inet_ntop(family: int, address: bytes) -> str: + """Convert the binary form of a network address into its textual form. + + *family* is an ``int``, the address family. + + *address* is a ``bytes``, the network address in binary form. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``str``. + """ + + if family == AF_INET: + return dns.ipv4.inet_ntoa(address) + elif family == AF_INET6: + return dns.ipv6.inet_ntoa(address) + else: + raise NotImplementedError + + +def af_for_address(text: str) -> int: + """Determine the address family of a textual-form network address. + + *text*, a ``str``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns an ``int``. + """ + + try: + dns.ipv4.inet_aton(text) + return AF_INET + except Exception: + try: + dns.ipv6.inet_aton(text, True) + return AF_INET6 + except Exception: + raise ValueError + + +def is_multicast(text: str) -> bool: + """Is the textual-form network address a multicast address? + + *text*, a ``str``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns a ``bool``. + """ + + try: + first = dns.ipv4.inet_aton(text)[0] + return first >= 224 and first <= 239 + except Exception: + try: + first = dns.ipv6.inet_aton(text, True)[0] + return first == 255 + except Exception: + raise ValueError + + +def is_address(text: str) -> bool: + """Is the specified string an IPv4 or IPv6 address? + + *text*, a ``str``, the textual address. + + Returns a ``bool``. + """ + + try: + dns.ipv4.inet_aton(text) + return True + except Exception: + try: + dns.ipv6.inet_aton(text, True) + return True + except Exception: + return False + + +def low_level_address_tuple(high_tuple: Tuple[str, int], af: int | None = None) -> Any: + """Given a "high-level" address tuple, i.e. + an (address, port) return the appropriate "low-level" address tuple + suitable for use in socket calls. + + If an *af* other than ``None`` is provided, it is assumed the + address in the high-level tuple is valid and has that af. If af + is ``None``, then af_for_address will be called. + """ + address, port = high_tuple + if af is None: + af = af_for_address(address) + if af == AF_INET: + return (address, port) + elif af == AF_INET6: + i = address.find("%") + if i < 0: + # no scope, shortcut! + return (address, port, 0, 0) + # try to avoid getaddrinfo() + addrpart = address[:i] + scope = address[i + 1 :] + if scope.isdigit(): + return (addrpart, port, 0, int(scope)) + try: + return (addrpart, port, 0, socket.if_nametoindex(scope)) + except AttributeError: # pragma: no cover (we can't really test this) + ai_flags = socket.AI_NUMERICHOST + ((*_, tup), *_) = socket.getaddrinfo(address, port, flags=ai_flags) + return tup + else: + raise NotImplementedError(f"unknown address family {af}") + + +def any_for_af(af): + """Return the 'any' address for the specified address family.""" + if af == socket.AF_INET: + return "0.0.0.0" + elif af == socket.AF_INET6: + return "::" + raise NotImplementedError(f"unknown address family {af}") + + +def canonicalize(text: str) -> str: + """Verify that *address* is a valid text form IPv4 or IPv6 address and return its + canonical text form. IPv6 addresses with scopes are rejected. + + *text*, a ``str``, the address in textual form. + + Raises ``ValueError`` if the text is not valid. + """ + try: + return dns.ipv6.canonicalize(text) + except Exception: + try: + return dns.ipv4.canonicalize(text) + except Exception: + raise ValueError diff --git a/venv/lib/python3.12/site-packages/dns/ipv4.py b/venv/lib/python3.12/site-packages/dns/ipv4.py new file mode 100644 index 0000000..a7161bc --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/ipv4.py @@ -0,0 +1,76 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv4 helper functions.""" + +import struct + +import dns.exception + + +def inet_ntoa(address: bytes) -> str: + """Convert an IPv4 address in binary form to text form. + + *address*, a ``bytes``, the IPv4 address in binary form. + + Returns a ``str``. + """ + + if len(address) != 4: + raise dns.exception.SyntaxError + return f"{address[0]}.{address[1]}.{address[2]}.{address[3]}" + + +def inet_aton(text: str | bytes) -> bytes: + """Convert an IPv4 address in text form to binary form. + + *text*, a ``str`` or ``bytes``, the IPv4 address in textual form. + + Returns a ``bytes``. + """ + + if not isinstance(text, bytes): + btext = text.encode() + else: + btext = text + parts = btext.split(b".") + if len(parts) != 4: + raise dns.exception.SyntaxError + for part in parts: + if not part.isdigit(): + raise dns.exception.SyntaxError + if len(part) > 1 and part[0] == ord("0"): + # No leading zeros + raise dns.exception.SyntaxError + try: + b = [int(part) for part in parts] + return struct.pack("BBBB", *b) + except Exception: + raise dns.exception.SyntaxError + + +def canonicalize(text: str | bytes) -> str: + """Verify that *address* is a valid text form IPv4 address and return its + canonical text form. + + *text*, a ``str`` or ``bytes``, the IPv4 address in textual form. + + Raises ``dns.exception.SyntaxError`` if the text is not valid. + """ + # Note that inet_aton() only accepts canonial form, but we still run through + # inet_ntoa() to ensure the output is a str. + return inet_ntoa(inet_aton(text)) diff --git a/venv/lib/python3.12/site-packages/dns/ipv6.py b/venv/lib/python3.12/site-packages/dns/ipv6.py new file mode 100644 index 0000000..eaa0f6c --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/ipv6.py @@ -0,0 +1,217 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv6 helper functions.""" + +import binascii +import re +from typing import List + +import dns.exception +import dns.ipv4 + +_leading_zero = re.compile(r"0+([0-9a-f]+)") + + +def inet_ntoa(address: bytes) -> str: + """Convert an IPv6 address in binary form to text form. + + *address*, a ``bytes``, the IPv6 address in binary form. + + Raises ``ValueError`` if the address isn't 16 bytes long. + Returns a ``str``. + """ + + if len(address) != 16: + raise ValueError("IPv6 addresses are 16 bytes long") + hex = binascii.hexlify(address) + chunks = [] + i = 0 + l = len(hex) + while i < l: + chunk = hex[i : i + 4].decode() + # strip leading zeros. we do this with an re instead of + # with lstrip() because lstrip() didn't support chars until + # python 2.2.2 + m = _leading_zero.match(chunk) + if m is not None: + chunk = m.group(1) + chunks.append(chunk) + i += 4 + # + # Compress the longest subsequence of 0-value chunks to :: + # + best_start = 0 + best_len = 0 + start = -1 + last_was_zero = False + for i in range(8): + if chunks[i] != "0": + if last_was_zero: + end = i + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + last_was_zero = False + elif not last_was_zero: + start = i + last_was_zero = True + if last_was_zero: + end = 8 + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + if best_len > 1: + if best_start == 0 and (best_len == 6 or best_len == 5 and chunks[5] == "ffff"): + # We have an embedded IPv4 address + if best_len == 6: + prefix = "::" + else: + prefix = "::ffff:" + thex = prefix + dns.ipv4.inet_ntoa(address[12:]) + else: + thex = ( + ":".join(chunks[:best_start]) + + "::" + + ":".join(chunks[best_start + best_len :]) + ) + else: + thex = ":".join(chunks) + return thex + + +_v4_ending = re.compile(rb"(.*):(\d+\.\d+\.\d+\.\d+)$") +_colon_colon_start = re.compile(rb"::.*") +_colon_colon_end = re.compile(rb".*::$") + + +def inet_aton(text: str | bytes, ignore_scope: bool = False) -> bytes: + """Convert an IPv6 address in text form to binary form. + + *text*, a ``str`` or ``bytes``, the IPv6 address in textual form. + + *ignore_scope*, a ``bool``. If ``True``, a scope will be ignored. + If ``False``, the default, it is an error for a scope to be present. + + Returns a ``bytes``. + """ + + # + # Our aim here is not something fast; we just want something that works. + # + if not isinstance(text, bytes): + btext = text.encode() + else: + btext = text + + if ignore_scope: + parts = btext.split(b"%") + l = len(parts) + if l == 2: + btext = parts[0] + elif l > 2: + raise dns.exception.SyntaxError + + if btext == b"": + raise dns.exception.SyntaxError + elif btext.endswith(b":") and not btext.endswith(b"::"): + raise dns.exception.SyntaxError + elif btext.startswith(b":") and not btext.startswith(b"::"): + raise dns.exception.SyntaxError + elif btext == b"::": + btext = b"0::" + # + # Get rid of the icky dot-quad syntax if we have it. + # + m = _v4_ending.match(btext) + if m is not None: + b = dns.ipv4.inet_aton(m.group(2)) + btext = ( + f"{m.group(1).decode()}:{b[0]:02x}{b[1]:02x}:{b[2]:02x}{b[3]:02x}" + ).encode() + # + # Try to turn '::' into ':'; if no match try to + # turn '::' into ':' + # + m = _colon_colon_start.match(btext) + if m is not None: + btext = btext[1:] + else: + m = _colon_colon_end.match(btext) + if m is not None: + btext = btext[:-1] + # + # Now canonicalize into 8 chunks of 4 hex digits each + # + chunks = btext.split(b":") + l = len(chunks) + if l > 8: + raise dns.exception.SyntaxError + seen_empty = False + canonical: List[bytes] = [] + for c in chunks: + if c == b"": + if seen_empty: + raise dns.exception.SyntaxError + seen_empty = True + for _ in range(0, 8 - l + 1): + canonical.append(b"0000") + else: + lc = len(c) + if lc > 4: + raise dns.exception.SyntaxError + if lc != 4: + c = (b"0" * (4 - lc)) + c + canonical.append(c) + if l < 8 and not seen_empty: + raise dns.exception.SyntaxError + btext = b"".join(canonical) + + # + # Finally we can go to binary. + # + try: + return binascii.unhexlify(btext) + except (binascii.Error, TypeError): + raise dns.exception.SyntaxError + + +_mapped_prefix = b"\x00" * 10 + b"\xff\xff" + + +def is_mapped(address: bytes) -> bool: + """Is the specified address a mapped IPv4 address? + + *address*, a ``bytes`` is an IPv6 address in binary form. + + Returns a ``bool``. + """ + + return address.startswith(_mapped_prefix) + + +def canonicalize(text: str | bytes) -> str: + """Verify that *address* is a valid text form IPv6 address and return its + canonical text form. Addresses with scopes are rejected. + + *text*, a ``str`` or ``bytes``, the IPv6 address in textual form. + + Raises ``dns.exception.SyntaxError`` if the text is not valid. + """ + return inet_ntoa(inet_aton(text)) diff --git a/venv/lib/python3.12/site-packages/dns/message.py b/venv/lib/python3.12/site-packages/dns/message.py new file mode 100644 index 0000000..bbfccfc --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/message.py @@ -0,0 +1,1954 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Messages""" + +import contextlib +import enum +import io +import time +from typing import Any, Dict, List, Tuple, cast + +import dns.edns +import dns.entropy +import dns.enum +import dns.exception +import dns.flags +import dns.name +import dns.opcode +import dns.rcode +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.ANY.OPT +import dns.rdtypes.ANY.SOA +import dns.rdtypes.ANY.TSIG +import dns.renderer +import dns.rrset +import dns.tokenizer +import dns.tsig +import dns.ttl +import dns.wire + + +class ShortHeader(dns.exception.FormError): + """The DNS packet passed to from_wire() is too short.""" + + +class TrailingJunk(dns.exception.FormError): + """The DNS packet passed to from_wire() has extra junk at the end of it.""" + + +class UnknownHeaderField(dns.exception.DNSException): + """The header field name was not recognized when converting from text + into a message.""" + + +class BadEDNS(dns.exception.FormError): + """An OPT record occurred somewhere other than + the additional data section.""" + + +class BadTSIG(dns.exception.FormError): + """A TSIG record occurred somewhere other than the end of + the additional data section.""" + + +class UnknownTSIGKey(dns.exception.DNSException): + """A TSIG with an unknown key was received.""" + + +class Truncated(dns.exception.DNSException): + """The truncated flag is set.""" + + supp_kwargs = {"message"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def message(self): + """As much of the message as could be processed. + + Returns a ``dns.message.Message``. + """ + return self.kwargs["message"] + + +class NotQueryResponse(dns.exception.DNSException): + """Message is not a response to a query.""" + + +class ChainTooLong(dns.exception.DNSException): + """The CNAME chain is too long.""" + + +class AnswerForNXDOMAIN(dns.exception.DNSException): + """The rcode is NXDOMAIN but an answer was found.""" + + +class NoPreviousName(dns.exception.SyntaxError): + """No previous name was known.""" + + +class MessageSection(dns.enum.IntEnum): + """Message sections""" + + QUESTION = 0 + ANSWER = 1 + AUTHORITY = 2 + ADDITIONAL = 3 + + @classmethod + def _maximum(cls): + return 3 + + +class MessageError: + def __init__(self, exception: Exception, offset: int): + self.exception = exception + self.offset = offset + + +DEFAULT_EDNS_PAYLOAD = 1232 +MAX_CHAIN = 16 + +IndexKeyType = Tuple[ + int, + dns.name.Name, + dns.rdataclass.RdataClass, + dns.rdatatype.RdataType, + dns.rdatatype.RdataType | None, + dns.rdataclass.RdataClass | None, +] +IndexType = Dict[IndexKeyType, dns.rrset.RRset] +SectionType = int | str | List[dns.rrset.RRset] + + +class Message: + """A DNS message.""" + + _section_enum = MessageSection + + def __init__(self, id: int | None = None): + if id is None: + self.id = dns.entropy.random_16() + else: + self.id = id + self.flags = 0 + self.sections: List[List[dns.rrset.RRset]] = [[], [], [], []] + self.opt: dns.rrset.RRset | None = None + self.request_payload = 0 + self.pad = 0 + self.keyring: Any = None + self.tsig: dns.rrset.RRset | None = None + self.want_tsig_sign = False + self.request_mac = b"" + self.xfr = False + self.origin: dns.name.Name | None = None + self.tsig_ctx: Any | None = None + self.index: IndexType = {} + self.errors: List[MessageError] = [] + self.time = 0.0 + self.wire: bytes | None = None + + @property + def question(self) -> List[dns.rrset.RRset]: + """The question section.""" + return self.sections[0] + + @question.setter + def question(self, v): + self.sections[0] = v + + @property + def answer(self) -> List[dns.rrset.RRset]: + """The answer section.""" + return self.sections[1] + + @answer.setter + def answer(self, v): + self.sections[1] = v + + @property + def authority(self) -> List[dns.rrset.RRset]: + """The authority section.""" + return self.sections[2] + + @authority.setter + def authority(self, v): + self.sections[2] = v + + @property + def additional(self) -> List[dns.rrset.RRset]: + """The additional data section.""" + return self.sections[3] + + @additional.setter + def additional(self, v): + self.sections[3] = v + + def __repr__(self): + return "" + + def __str__(self): + return self.to_text() + + def to_text( + self, + origin: dns.name.Name | None = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + """Convert the message to text. + + The *origin*, *relativize*, and any other keyword + arguments are passed to the RRset ``to_wire()`` method. + + Returns a ``str``. + """ + + s = io.StringIO() + s.write(f"id {self.id}\n") + s.write(f"opcode {dns.opcode.to_text(self.opcode())}\n") + s.write(f"rcode {dns.rcode.to_text(self.rcode())}\n") + s.write(f"flags {dns.flags.to_text(self.flags)}\n") + if self.edns >= 0: + s.write(f"edns {self.edns}\n") + if self.ednsflags != 0: + s.write(f"eflags {dns.flags.edns_to_text(self.ednsflags)}\n") + s.write(f"payload {self.payload}\n") + for opt in self.options: + s.write(f"option {opt.to_text()}\n") + for name, which in self._section_enum.__members__.items(): + s.write(f";{name}\n") + for rrset in self.section_from_number(which): + s.write(rrset.to_text(origin, relativize, **kw)) + s.write("\n") + if self.tsig is not None: + s.write(self.tsig.to_text(origin, relativize, **kw)) + s.write("\n") + # + # We strip off the final \n so the caller can print the result without + # doing weird things to get around eccentricities in Python print + # formatting + # + return s.getvalue()[:-1] + + def __eq__(self, other): + """Two messages are equal if they have the same content in the + header, question, answer, and authority sections. + + Returns a ``bool``. + """ + + if not isinstance(other, Message): + return False + if self.id != other.id: + return False + if self.flags != other.flags: + return False + for i, section in enumerate(self.sections): + other_section = other.sections[i] + for n in section: + if n not in other_section: + return False + for n in other_section: + if n not in section: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def is_response(self, other: "Message") -> bool: + """Is *other*, also a ``dns.message.Message``, a response to this + message? + + Returns a ``bool``. + """ + + if ( + other.flags & dns.flags.QR == 0 + or self.id != other.id + or dns.opcode.from_flags(self.flags) != dns.opcode.from_flags(other.flags) + ): + return False + if other.rcode() in { + dns.rcode.FORMERR, + dns.rcode.SERVFAIL, + dns.rcode.NOTIMP, + dns.rcode.REFUSED, + }: + # We don't check the question section in these cases if + # the other question section is empty, even though they + # still really ought to have a question section. + if len(other.question) == 0: + return True + if dns.opcode.is_update(self.flags): + # This is assuming the "sender doesn't include anything + # from the update", but we don't care to check the other + # case, which is that all the sections are returned and + # identical. + return True + for n in self.question: + if n not in other.question: + return False + for n in other.question: + if n not in self.question: + return False + return True + + def section_number(self, section: List[dns.rrset.RRset]) -> int: + """Return the "section number" of the specified section for use + in indexing. + + *section* is one of the section attributes of this message. + + Raises ``ValueError`` if the section isn't known. + + Returns an ``int``. + """ + + for i, our_section in enumerate(self.sections): + if section is our_section: + return self._section_enum(i) + raise ValueError("unknown section") + + def section_from_number(self, number: int) -> List[dns.rrset.RRset]: + """Return the section list associated with the specified section + number. + + *number* is a section number `int` or the text form of a section + name. + + Raises ``ValueError`` if the section isn't known. + + Returns a ``list``. + """ + + section = self._section_enum.make(number) + return self.sections[section] + + def find_rrset( + self, + section: SectionType, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: dns.rdataclass.RdataClass | None = None, + create: bool = False, + force_unique: bool = False, + idna_codec: dns.name.IDNACodec | None = None, + ) -> dns.rrset.RRset: + """Find the RRset with the given attributes in the specified section. + + *section*, an ``int`` section number, a ``str`` section name, or one of + the section attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.find_rrset(my_message.answer, name, rdclass, rdtype) + my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype) + my_message.find_rrset("ANSWER", name, rdclass, rdtype) + + *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. + + *rdclass*, an ``int`` or ``str``, the class of the RRset. + + *rdtype*, an ``int`` or ``str``, the type of the RRset. + + *covers*, an ``int`` or ``str``, the covers value of the RRset. + The default is ``dns.rdatatype.NONE``. + + *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the + RRset. The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Raises ``KeyError`` if the RRset was not found and create was + ``False``. + + Returns a ``dns.rrset.RRset object``. + """ + + if isinstance(section, int): + section_number = section + section = self.section_from_number(section_number) + elif isinstance(section, str): + section_number = self._section_enum.from_text(section) + section = self.section_from_number(section_number) + else: + section_number = self.section_number(section) + if isinstance(name, str): + name = dns.name.from_text(name, idna_codec=idna_codec) + rdtype = dns.rdatatype.RdataType.make(rdtype) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + covers = dns.rdatatype.RdataType.make(covers) + if deleting is not None: + deleting = dns.rdataclass.RdataClass.make(deleting) + key = (section_number, name, rdclass, rdtype, covers, deleting) + if not force_unique: + if self.index is not None: + rrset = self.index.get(key) + if rrset is not None: + return rrset + else: + for rrset in section: + if rrset.full_match(name, rdclass, rdtype, covers, deleting): + return rrset + if not create: + raise KeyError + rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting) + section.append(rrset) + if self.index is not None: + self.index[key] = rrset + return rrset + + def get_rrset( + self, + section: SectionType, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: dns.rdataclass.RdataClass | None = None, + create: bool = False, + force_unique: bool = False, + idna_codec: dns.name.IDNACodec | None = None, + ) -> dns.rrset.RRset | None: + """Get the RRset with the given attributes in the specified section. + + If the RRset is not found, None is returned. + + *section*, an ``int`` section number, a ``str`` section name, or one of + the section attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.get_rrset(my_message.answer, name, rdclass, rdtype) + my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype) + my_message.get_rrset("ANSWER", name, rdclass, rdtype) + + *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. + + *rdclass*, an ``int`` or ``str``, the class of the RRset. + + *rdtype*, an ``int`` or ``str``, the type of the RRset. + + *covers*, an ``int`` or ``str``, the covers value of the RRset. + The default is ``dns.rdatatype.NONE``. + + *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the + RRset. The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.rrset.RRset object`` or ``None``. + """ + + try: + rrset = self.find_rrset( + section, + name, + rdclass, + rdtype, + covers, + deleting, + create, + force_unique, + idna_codec, + ) + except KeyError: + rrset = None + return rrset + + def section_count(self, section: SectionType) -> int: + """Returns the number of records in the specified section. + + *section*, an ``int`` section number, a ``str`` section name, or one of + the section attributes of this message. This specifies the + the section of the message to count. For example:: + + my_message.section_count(my_message.answer) + my_message.section_count(dns.message.ANSWER) + my_message.section_count("ANSWER") + """ + + if isinstance(section, int): + section_number = section + section = self.section_from_number(section_number) + elif isinstance(section, str): + section_number = self._section_enum.from_text(section) + section = self.section_from_number(section_number) + else: + section_number = self.section_number(section) + count = sum(max(1, len(rrs)) for rrs in section) + if section_number == MessageSection.ADDITIONAL: + if self.opt is not None: + count += 1 + if self.tsig is not None: + count += 1 + return count + + def _compute_opt_reserve(self) -> int: + """Compute the size required for the OPT RR, padding excluded""" + if not self.opt: + return 0 + # 1 byte for the root name, 10 for the standard RR fields + size = 11 + # This would be more efficient if options had a size() method, but we won't + # worry about that for now. We also don't worry if there is an existing padding + # option, as it is unlikely and probably harmless, as the worst case is that we + # may add another, and this seems to be legal. + opt_rdata = cast(dns.rdtypes.ANY.OPT.OPT, self.opt[0]) + for option in opt_rdata.options: + wire = option.to_wire() + # We add 4 here to account for the option type and length + size += len(wire) + 4 + if self.pad: + # Padding will be added, so again add the option type and length. + size += 4 + return size + + def _compute_tsig_reserve(self) -> int: + """Compute the size required for the TSIG RR""" + # This would be more efficient if TSIGs had a size method, but we won't + # worry about for now. Also, we can't really cope with the potential + # compressibility of the TSIG owner name, so we estimate with the uncompressed + # size. We will disable compression when TSIG and padding are both is active + # so that the padding comes out right. + if not self.tsig: + return 0 + f = io.BytesIO() + self.tsig.to_wire(f) + return len(f.getvalue()) + + def to_wire( + self, + origin: dns.name.Name | None = None, + max_size: int = 0, + multi: bool = False, + tsig_ctx: Any | None = None, + prepend_length: bool = False, + prefer_truncation: bool = False, + **kw: Dict[str, Any], + ) -> bytes: + """Return a string containing the message in DNS compressed wire + format. + + Additional keyword arguments are passed to the RRset ``to_wire()`` + method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to be appended + to any relative names. If ``None``, and the message has an origin + attribute that is not ``None``, then it will be used. + + *max_size*, an ``int``, the maximum size of the wire format + output; default is 0, which means "the message's request + payload, if nonzero, or 65535". + + *multi*, a ``bool``, should be set to ``True`` if this message is + part of a multiple message sequence. + + *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the + ongoing TSIG context, used when signing zone transfers. + + *prepend_length*, a ``bool``, should be set to ``True`` if the caller + wants the message length prepended to the message itself. This is + useful for messages sent over TCP, TLS (DoT), or QUIC (DoQ). + + *prefer_truncation*, a ``bool``, should be set to ``True`` if the caller + wants the message to be truncated if it would otherwise exceed the + maximum length. If the truncation occurs before the additional section, + the TC bit will be set. + + Raises ``dns.exception.TooBig`` if *max_size* was exceeded. + + Returns a ``bytes``. + """ + + if origin is None and self.origin is not None: + origin = self.origin + if max_size == 0: + if self.request_payload != 0: + max_size = self.request_payload + else: + max_size = 65535 + if max_size < 512: + max_size = 512 + elif max_size > 65535: + max_size = 65535 + r = dns.renderer.Renderer(self.id, self.flags, max_size, origin) + opt_reserve = self._compute_opt_reserve() + r.reserve(opt_reserve) + tsig_reserve = self._compute_tsig_reserve() + r.reserve(tsig_reserve) + try: + for rrset in self.question: + r.add_question(rrset.name, rrset.rdtype, rrset.rdclass) + for rrset in self.answer: + r.add_rrset(dns.renderer.ANSWER, rrset, **kw) + for rrset in self.authority: + r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw) + for rrset in self.additional: + r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw) + except dns.exception.TooBig: + if prefer_truncation: + if r.section < dns.renderer.ADDITIONAL: + r.flags |= dns.flags.TC + else: + raise + r.release_reserved() + if self.opt is not None: + r.add_opt(self.opt, self.pad, opt_reserve, tsig_reserve) + r.write_header() + if self.tsig is not None: + if self.want_tsig_sign: + (new_tsig, ctx) = dns.tsig.sign( + r.get_wire(), + self.keyring, + self.tsig[0], + int(time.time()), + self.request_mac, + tsig_ctx, + multi, + ) + self.tsig.clear() + self.tsig.add(new_tsig) + if multi: + self.tsig_ctx = ctx + r.add_rrset(dns.renderer.ADDITIONAL, self.tsig) + r.write_header() + wire = r.get_wire() + self.wire = wire + if prepend_length: + wire = len(wire).to_bytes(2, "big") + wire + return wire + + @staticmethod + def _make_tsig( + keyname, algorithm, time_signed, fudge, mac, original_id, error, other + ): + tsig = dns.rdtypes.ANY.TSIG.TSIG( + dns.rdataclass.ANY, + dns.rdatatype.TSIG, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) + return dns.rrset.from_rdata(keyname, 0, tsig) + + def use_tsig( + self, + keyring: Any, + keyname: dns.name.Name | str | None = None, + fudge: int = 300, + original_id: int | None = None, + tsig_error: int = 0, + other_data: bytes = b"", + algorithm: dns.name.Name | str = dns.tsig.default_algorithm, + ) -> None: + """When sending, a TSIG signature using the specified key + should be added. + + *keyring*, a ``dict``, ``callable`` or ``dns.tsig.Key``, is either + the TSIG keyring or key to use. + + The format of a keyring dict is a mapping from TSIG key name, as + ``dns.name.Name`` to ``dns.tsig.Key`` or a TSIG secret, a ``bytes``. + If a ``dict`` *keyring* is specified but a *keyname* is not, the key + used will be the first key in the *keyring*. Note that the order of + keys in a dictionary is not defined, so applications should supply a + keyname when a ``dict`` keyring is used, unless they know the keyring + contains only one key. If a ``callable`` keyring is specified, the + callable will be called with the message and the keyname, and is + expected to return a key. + + *keyname*, a ``dns.name.Name``, ``str`` or ``None``, the name of + this TSIG key to use; defaults to ``None``. If *keyring* is a + ``dict``, the key must be defined in it. If *keyring* is a + ``dns.tsig.Key``, this is ignored. + + *fudge*, an ``int``, the TSIG time fudge. + + *original_id*, an ``int``, the TSIG original id. If ``None``, + the message's id is used. + + *tsig_error*, an ``int``, the TSIG error code. + + *other_data*, a ``bytes``, the TSIG other data. + + *algorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. This is + only used if *keyring* is a ``dict``, and the key entry is a ``bytes``. + """ + + if isinstance(keyring, dns.tsig.Key): + key = keyring + keyname = key.name + elif callable(keyring): + key = keyring(self, keyname) + else: + if isinstance(keyname, str): + keyname = dns.name.from_text(keyname) + if keyname is None: + keyname = next(iter(keyring)) + key = keyring[keyname] + if isinstance(key, bytes): + key = dns.tsig.Key(keyname, key, algorithm) + self.keyring = key + if original_id is None: + original_id = self.id + self.tsig = self._make_tsig( + keyname, + self.keyring.algorithm, + 0, + fudge, + b"\x00" * dns.tsig.mac_sizes[self.keyring.algorithm], + original_id, + tsig_error, + other_data, + ) + self.want_tsig_sign = True + + @property + def keyname(self) -> dns.name.Name | None: + if self.tsig: + return self.tsig.name + else: + return None + + @property + def keyalgorithm(self) -> dns.name.Name | None: + if self.tsig: + rdata = cast(dns.rdtypes.ANY.TSIG.TSIG, self.tsig[0]) + return rdata.algorithm + else: + return None + + @property + def mac(self) -> bytes | None: + if self.tsig: + rdata = cast(dns.rdtypes.ANY.TSIG.TSIG, self.tsig[0]) + return rdata.mac + else: + return None + + @property + def tsig_error(self) -> int | None: + if self.tsig: + rdata = cast(dns.rdtypes.ANY.TSIG.TSIG, self.tsig[0]) + return rdata.error + else: + return None + + @property + def had_tsig(self) -> bool: + return bool(self.tsig) + + @staticmethod + def _make_opt(flags=0, payload=DEFAULT_EDNS_PAYLOAD, options=None): + opt = dns.rdtypes.ANY.OPT.OPT(payload, dns.rdatatype.OPT, options or ()) + return dns.rrset.from_rdata(dns.name.root, int(flags), opt) + + def use_edns( + self, + edns: int | bool | None = 0, + ednsflags: int = 0, + payload: int = DEFAULT_EDNS_PAYLOAD, + request_payload: int | None = None, + options: List[dns.edns.Option] | None = None, + pad: int = 0, + ) -> None: + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying ``None``, ``False``, + or ``-1`` means "do not use EDNS", and in this case the other parameters are + ignored. Specifying ``True`` is equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the maximum + size of UDP datagram the sender can handle. I.e. how big a response to this + message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when sending this + message. If not specified, defaults to the value of *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS options. + + *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add + padding bytes to make the message size a multiple of *pad*. Note that if + padding is non-zero, an EDNS PADDING option will always be added to the + message. + """ + + if edns is None or edns is False: + edns = -1 + elif edns is True: + edns = 0 + if edns < 0: + self.opt = None + self.request_payload = 0 + else: + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= 0xFF00FFFF + ednsflags |= edns << 16 + if options is None: + options = [] + self.opt = self._make_opt(ednsflags, payload, options) + if request_payload is None: + request_payload = payload + self.request_payload = request_payload + if pad < 0: + raise ValueError("pad must be non-negative") + self.pad = pad + + @property + def edns(self) -> int: + if self.opt: + return (self.ednsflags & 0xFF0000) >> 16 + else: + return -1 + + @property + def ednsflags(self) -> int: + if self.opt: + return self.opt.ttl + else: + return 0 + + @ednsflags.setter + def ednsflags(self, v): + if self.opt: + self.opt.ttl = v + elif v: + self.opt = self._make_opt(v) + + @property + def payload(self) -> int: + if self.opt: + rdata = cast(dns.rdtypes.ANY.OPT.OPT, self.opt[0]) + return rdata.payload + else: + return 0 + + @property + def options(self) -> Tuple: + if self.opt: + rdata = cast(dns.rdtypes.ANY.OPT.OPT, self.opt[0]) + return rdata.options + else: + return () + + def want_dnssec(self, wanted: bool = True) -> None: + """Enable or disable 'DNSSEC desired' flag in requests. + + *wanted*, a ``bool``. If ``True``, then DNSSEC data is + desired in the response, EDNS is enabled if required, and then + the DO bit is set. If ``False``, the DO bit is cleared if + EDNS is enabled. + """ + + if wanted: + self.ednsflags |= dns.flags.DO + elif self.opt: + self.ednsflags &= ~int(dns.flags.DO) + + def rcode(self) -> dns.rcode.Rcode: + """Return the rcode. + + Returns a ``dns.rcode.Rcode``. + """ + return dns.rcode.from_flags(int(self.flags), int(self.ednsflags)) + + def set_rcode(self, rcode: dns.rcode.Rcode) -> None: + """Set the rcode. + + *rcode*, a ``dns.rcode.Rcode``, is the rcode to set. + """ + (value, evalue) = dns.rcode.to_flags(rcode) + self.flags &= 0xFFF0 + self.flags |= value + self.ednsflags &= 0x00FFFFFF + self.ednsflags |= evalue + + def opcode(self) -> dns.opcode.Opcode: + """Return the opcode. + + Returns a ``dns.opcode.Opcode``. + """ + return dns.opcode.from_flags(int(self.flags)) + + def set_opcode(self, opcode: dns.opcode.Opcode) -> None: + """Set the opcode. + + *opcode*, a ``dns.opcode.Opcode``, is the opcode to set. + """ + self.flags &= 0x87FF + self.flags |= dns.opcode.to_flags(opcode) + + def get_options(self, otype: dns.edns.OptionType) -> List[dns.edns.Option]: + """Return the list of options of the specified type.""" + return [option for option in self.options if option.otype == otype] + + def extended_errors(self) -> List[dns.edns.EDEOption]: + """Return the list of Extended DNS Error (EDE) options in the message""" + return cast(List[dns.edns.EDEOption], self.get_options(dns.edns.OptionType.EDE)) + + def _get_one_rr_per_rrset(self, value): + # What the caller picked is fine. + return value + + # pylint: disable=unused-argument + + def _parse_rr_header(self, section, name, rdclass, rdtype): + return (rdclass, rdtype, None, False) + + # pylint: enable=unused-argument + + def _parse_special_rr_header(self, section, count, position, name, rdclass, rdtype): + if rdtype == dns.rdatatype.OPT: + if ( + section != MessageSection.ADDITIONAL + or self.opt + or name != dns.name.root + ): + raise BadEDNS + elif rdtype == dns.rdatatype.TSIG: + if ( + section != MessageSection.ADDITIONAL + or rdclass != dns.rdatatype.ANY + or position != count - 1 + ): + raise BadTSIG + return (rdclass, rdtype, None, False) + + +class ChainingResult: + """The result of a call to dns.message.QueryMessage.resolve_chaining(). + + The ``answer`` attribute is the answer RRSet, or ``None`` if it doesn't + exist. + + The ``canonical_name`` attribute is the canonical name after all + chaining has been applied (this is the same name as ``rrset.name`` in cases + where rrset is not ``None``). + + The ``minimum_ttl`` attribute is the minimum TTL, i.e. the TTL to + use if caching the data. It is the smallest of all the CNAME TTLs + and either the answer TTL if it exists or the SOA TTL and SOA + minimum values for negative answers. + + The ``cnames`` attribute is a list of all the CNAME RRSets followed to + get to the canonical name. + """ + + def __init__( + self, + canonical_name: dns.name.Name, + answer: dns.rrset.RRset | None, + minimum_ttl: int, + cnames: List[dns.rrset.RRset], + ): + self.canonical_name = canonical_name + self.answer = answer + self.minimum_ttl = minimum_ttl + self.cnames = cnames + + +class QueryMessage(Message): + def resolve_chaining(self) -> ChainingResult: + """Follow the CNAME chain in the response to determine the answer + RRset. + + Raises ``dns.message.NotQueryResponse`` if the message is not + a response. + + Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. + + Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN + but an answer was found. + + Raises ``dns.exception.FormError`` if the question count is not 1. + + Returns a ChainingResult object. + """ + if self.flags & dns.flags.QR == 0: + raise NotQueryResponse + if len(self.question) != 1: + raise dns.exception.FormError + question = self.question[0] + qname = question.name + min_ttl = dns.ttl.MAX_TTL + answer = None + count = 0 + cnames = [] + while count < MAX_CHAIN: + try: + answer = self.find_rrset( + self.answer, qname, question.rdclass, question.rdtype + ) + min_ttl = min(min_ttl, answer.ttl) + break + except KeyError: + if question.rdtype != dns.rdatatype.CNAME: + try: + crrset = self.find_rrset( + self.answer, qname, question.rdclass, dns.rdatatype.CNAME + ) + cnames.append(crrset) + min_ttl = min(min_ttl, crrset.ttl) + for rd in crrset: + qname = rd.target + break + count += 1 + continue + except KeyError: + # Exit the chaining loop + break + else: + # Exit the chaining loop + break + if count >= MAX_CHAIN: + raise ChainTooLong + if self.rcode() == dns.rcode.NXDOMAIN and answer is not None: + raise AnswerForNXDOMAIN + if answer is None: + # Further minimize the TTL with NCACHE. + auname = qname + while True: + # Look for an SOA RR whose owner name is a superdomain + # of qname. + try: + srrset = self.find_rrset( + self.authority, auname, question.rdclass, dns.rdatatype.SOA + ) + srdata = cast(dns.rdtypes.ANY.SOA.SOA, srrset[0]) + min_ttl = min(min_ttl, srrset.ttl, srdata.minimum) + break + except KeyError: + try: + auname = auname.parent() + except dns.name.NoParent: + break + return ChainingResult(qname, answer, min_ttl, cnames) + + def canonical_name(self) -> dns.name.Name: + """Return the canonical name of the first name in the question + section. + + Raises ``dns.message.NotQueryResponse`` if the message is not + a response. + + Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. + + Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN + but an answer was found. + + Raises ``dns.exception.FormError`` if the question count is not 1. + """ + return self.resolve_chaining().canonical_name + + +def _maybe_import_update(): + # We avoid circular imports by doing this here. We do it in another + # function as doing it in _message_factory_from_opcode() makes "dns" + # a local symbol, and the first line fails :) + + # pylint: disable=redefined-outer-name,import-outside-toplevel,unused-import + import dns.update # noqa: F401 + + +def _message_factory_from_opcode(opcode): + if opcode == dns.opcode.QUERY: + return QueryMessage + elif opcode == dns.opcode.UPDATE: + _maybe_import_update() + return dns.update.UpdateMessage # pyright: ignore + else: + return Message + + +class _WireReader: + """Wire format reader. + + parser: the binary parser + message: The message object being built + initialize_message: Callback to set message parsing options + question_only: Are we only reading the question? + one_rr_per_rrset: Put each RR into its own RRset? + keyring: TSIG keyring + ignore_trailing: Ignore trailing junk at end of request? + multi: Is this message part of a multi-message sequence? + DNS dynamic updates. + continue_on_error: try to extract as much information as possible from + the message, accumulating MessageErrors in the *errors* attribute instead of + raising them. + """ + + def __init__( + self, + wire, + initialize_message, + question_only=False, + one_rr_per_rrset=False, + ignore_trailing=False, + keyring=None, + multi=False, + continue_on_error=False, + ): + self.parser = dns.wire.Parser(wire) + self.message = None + self.initialize_message = initialize_message + self.question_only = question_only + self.one_rr_per_rrset = one_rr_per_rrset + self.ignore_trailing = ignore_trailing + self.keyring = keyring + self.multi = multi + self.continue_on_error = continue_on_error + self.errors = [] + + def _get_question(self, section_number, qcount): + """Read the next *qcount* records from the wire data and add them to + the question section. + """ + assert self.message is not None + section = self.message.sections[section_number] + for _ in range(qcount): + qname = self.parser.get_name(self.message.origin) + (rdtype, rdclass) = self.parser.get_struct("!HH") + (rdclass, rdtype, _, _) = self.message._parse_rr_header( + section_number, qname, rdclass, rdtype + ) + self.message.find_rrset( + section, qname, rdclass, rdtype, create=True, force_unique=True + ) + + def _add_error(self, e): + self.errors.append(MessageError(e, self.parser.current)) + + def _get_section(self, section_number, count): + """Read the next I{count} records from the wire data and add them to + the specified section. + + section_number: the section of the message to which to add records + count: the number of records to read + """ + assert self.message is not None + section = self.message.sections[section_number] + force_unique = self.one_rr_per_rrset + for i in range(count): + rr_start = self.parser.current + absolute_name = self.parser.get_name() + if self.message.origin is not None: + name = absolute_name.relativize(self.message.origin) + else: + name = absolute_name + (rdtype, rdclass, ttl, rdlen) = self.parser.get_struct("!HHIH") + if rdtype in (dns.rdatatype.OPT, dns.rdatatype.TSIG): + ( + rdclass, + rdtype, + deleting, + empty, + ) = self.message._parse_special_rr_header( + section_number, count, i, name, rdclass, rdtype + ) + else: + (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + rdata_start = self.parser.current + try: + if empty: + if rdlen > 0: + raise dns.exception.FormError + rd = None + covers = dns.rdatatype.NONE + else: + with self.parser.restrict_to(rdlen): + rd = dns.rdata.from_wire_parser( + rdclass, # pyright: ignore + rdtype, + self.parser, + self.message.origin, + ) + covers = rd.covers() + if self.message.xfr and rdtype == dns.rdatatype.SOA: + force_unique = True + if rdtype == dns.rdatatype.OPT: + self.message.opt = dns.rrset.from_rdata(name, ttl, rd) + elif rdtype == dns.rdatatype.TSIG: + trd = cast(dns.rdtypes.ANY.TSIG.TSIG, rd) + if self.keyring is None or self.keyring is True: + raise UnknownTSIGKey("got signed message without keyring") + elif isinstance(self.keyring, dict): + key = self.keyring.get(absolute_name) + if isinstance(key, bytes): + key = dns.tsig.Key(absolute_name, key, trd.algorithm) + elif callable(self.keyring): + key = self.keyring(self.message, absolute_name) + else: + key = self.keyring + if key is None: + raise UnknownTSIGKey(f"key '{name}' unknown") + if key: + self.message.keyring = key + self.message.tsig_ctx = dns.tsig.validate( + self.parser.wire, + key, + absolute_name, + rd, + int(time.time()), + self.message.request_mac, + rr_start, + self.message.tsig_ctx, + self.multi, + ) + self.message.tsig = dns.rrset.from_rdata(absolute_name, 0, rd) + else: + rrset = self.message.find_rrset( + section, + name, + rdclass, # pyright: ignore + rdtype, + covers, + deleting, + True, + force_unique, + ) + if rd is not None: + if ttl > 0x7FFFFFFF: + ttl = 0 + rrset.add(rd, ttl) + except Exception as e: + if self.continue_on_error: + self._add_error(e) + self.parser.seek(rdata_start + rdlen) + else: + raise + + def read(self): + """Read a wire format DNS message and build a dns.message.Message + object.""" + + if self.parser.remaining() < 12: + raise ShortHeader + (id, flags, qcount, ancount, aucount, adcount) = self.parser.get_struct( + "!HHHHHH" + ) + factory = _message_factory_from_opcode(dns.opcode.from_flags(flags)) + self.message = factory(id=id) + self.message.flags = dns.flags.Flag(flags) + self.message.wire = self.parser.wire + self.initialize_message(self.message) + self.one_rr_per_rrset = self.message._get_one_rr_per_rrset( + self.one_rr_per_rrset + ) + try: + self._get_question(MessageSection.QUESTION, qcount) + if self.question_only: + return self.message + self._get_section(MessageSection.ANSWER, ancount) + self._get_section(MessageSection.AUTHORITY, aucount) + self._get_section(MessageSection.ADDITIONAL, adcount) + if not self.ignore_trailing and self.parser.remaining() != 0: + raise TrailingJunk + if self.multi and self.message.tsig_ctx and not self.message.had_tsig: + self.message.tsig_ctx.update(self.parser.wire) + except Exception as e: + if self.continue_on_error: + self._add_error(e) + else: + raise + return self.message + + +def from_wire( + wire: bytes, + keyring: Any | None = None, + request_mac: bytes | None = b"", + xfr: bool = False, + origin: dns.name.Name | None = None, + tsig_ctx: dns.tsig.HMACTSig | dns.tsig.GSSTSig | None = None, + multi: bool = False, + question_only: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + continue_on_error: bool = False, +) -> Message: + """Convert a DNS wire format message into a message object. + + *keyring*, a ``dns.tsig.Key``, ``dict``, ``bool``, or ``None``, the key or keyring + to use if the message is signed. If ``None`` or ``True``, then trying to decode + a message with a TSIG will fail as it cannot be validated. If ``False``, then + TSIG validation is disabled. + + *request_mac*, a ``bytes`` or ``None``. If the message is a response to a + TSIG-signed request, *request_mac* should be set to the MAC of that request. + + *xfr*, a ``bool``, should be set to ``True`` if this message is part of a zone + transfer. + + *origin*, a ``dns.name.Name`` or ``None``. If the message is part of a zone + transfer, *origin* should be the origin name of the zone. If not ``None``, names + will be relativized to the origin. + + *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the ongoing TSIG + context, used when validating zone transfers. + + *multi*, a ``bool``, should be set to ``True`` if this message is part of a multiple + message sequence. + + *question_only*, a ``bool``. If ``True``, read only up to the end of the question + section. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if the TC bit is + set. + + *continue_on_error*, a ``bool``. If ``True``, try to continue parsing even if + errors occur. Erroneous rdata will be ignored. Errors will be accumulated as a + list of MessageError objects in the message's ``errors`` attribute. This option is + recommended only for DNS analysis tools, or for use in a server as part of an error + handling path. The default is ``False``. + + Raises ``dns.message.ShortHeader`` if the message is less than 12 octets long. + + Raises ``dns.message.TrailingJunk`` if there were octets in the message past the end + of the proper DNS message, and *ignore_trailing* is ``False``. + + Raises ``dns.message.BadEDNS`` if an OPT record was in the wrong section, or + occurred more than once. + + Raises ``dns.message.BadTSIG`` if a TSIG record was not the last record of the + additional data section. + + Raises ``dns.message.Truncated`` if the TC flag is set and *raise_on_truncation* is + ``True``. + + Returns a ``dns.message.Message``. + """ + + # We permit None for request_mac solely for backwards compatibility + if request_mac is None: + request_mac = b"" + + def initialize_message(message): + message.request_mac = request_mac + message.xfr = xfr + message.origin = origin + message.tsig_ctx = tsig_ctx + + reader = _WireReader( + wire, + initialize_message, + question_only, + one_rr_per_rrset, + ignore_trailing, + keyring, + multi, + continue_on_error, + ) + try: + m = reader.read() + except dns.exception.FormError: + if ( + reader.message + and (reader.message.flags & dns.flags.TC) + and raise_on_truncation + ): + raise Truncated(message=reader.message) + else: + raise + # Reading a truncated message might not have any errors, so we + # have to do this check here too. + if m.flags & dns.flags.TC and raise_on_truncation: + raise Truncated(message=m) + if continue_on_error: + m.errors = reader.errors + + return m + + +class _TextReader: + """Text format reader. + + tok: the tokenizer. + message: The message object being built. + DNS dynamic updates. + last_name: The most recently read name when building a message object. + one_rr_per_rrset: Put each RR into its own RRset? + origin: The origin for relative names + relativize: relativize names? + relativize_to: the origin to relativize to. + """ + + def __init__( + self, + text: str, + idna_codec: dns.name.IDNACodec | None, + one_rr_per_rrset: bool = False, + origin: dns.name.Name | None = None, + relativize: bool = True, + relativize_to: dns.name.Name | None = None, + ): + self.message: Message | None = None # mypy: ignore + self.tok = dns.tokenizer.Tokenizer(text, idna_codec=idna_codec) + self.last_name = None + self.one_rr_per_rrset = one_rr_per_rrset + self.origin = origin + self.relativize = relativize + self.relativize_to = relativize_to + self.id = None + self.edns = -1 + self.ednsflags = 0 + self.payload = DEFAULT_EDNS_PAYLOAD + self.rcode = None + self.opcode = dns.opcode.QUERY + self.flags = 0 + + def _header_line(self, _): + """Process one line from the text format header section.""" + + token = self.tok.get() + what = token.value + if what == "id": + self.id = self.tok.get_int() + elif what == "flags": + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.flags = self.flags | dns.flags.from_text(token.value) + elif what == "edns": + self.edns = self.tok.get_int() + self.ednsflags = self.ednsflags | (self.edns << 16) + elif what == "eflags": + if self.edns < 0: + self.edns = 0 + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.ednsflags = self.ednsflags | dns.flags.edns_from_text(token.value) + elif what == "payload": + self.payload = self.tok.get_int() + if self.edns < 0: + self.edns = 0 + elif what == "opcode": + text = self.tok.get_string() + self.opcode = dns.opcode.from_text(text) + self.flags = self.flags | dns.opcode.to_flags(self.opcode) + elif what == "rcode": + text = self.tok.get_string() + self.rcode = dns.rcode.from_text(text) + else: + raise UnknownHeaderField + self.tok.get_eol() + + def _question_line(self, section_number): + """Process one line from the text format question section.""" + + assert self.message is not None + section = self.message.sections[section_number] + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name( + token, self.message.origin, self.relativize, self.relativize_to + ) + name = self.last_name + if name is None: + raise NoPreviousName + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + (rdclass, rdtype, _, _) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + self.message.find_rrset( + section, name, rdclass, rdtype, create=True, force_unique=True + ) + self.tok.get_eol() + + def _rr_line(self, section_number): + """Process one line from the text format answer, authority, or + additional data sections. + """ + + assert self.message is not None + section = self.message.sections[section_number] + # Name + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name( + token, self.message.origin, self.relativize, self.relativize_to + ) + name = self.last_name + if name is None: + raise NoPreviousName + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # TTL + try: + ttl = int(token.value, 0) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + ttl = 0 + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + token = self.tok.get() + if empty and not token.is_eol_or_eof(): + raise dns.exception.SyntaxError + if not empty and token.is_eol_or_eof(): + raise dns.exception.UnexpectedEnd + if not token.is_eol_or_eof(): + self.tok.unget(token) + rd = dns.rdata.from_text( + rdclass, + rdtype, + self.tok, + self.message.origin, + self.relativize, + self.relativize_to, + ) + covers = rd.covers() + else: + rd = None + covers = dns.rdatatype.NONE + rrset = self.message.find_rrset( + section, + name, + rdclass, + rdtype, + covers, + deleting, + True, + self.one_rr_per_rrset, + ) + if rd is not None: + rrset.add(rd, ttl) + + def _make_message(self): + factory = _message_factory_from_opcode(self.opcode) + message = factory(id=self.id) + message.flags = self.flags + if self.edns >= 0: + message.use_edns(self.edns, self.ednsflags, self.payload) + if self.rcode: + message.set_rcode(self.rcode) + if self.origin: + message.origin = self.origin + return message + + def read(self): + """Read a text format DNS message and build a dns.message.Message + object.""" + + line_method = self._header_line + section_number = None + while 1: + token = self.tok.get(True, True) + if token.is_eol_or_eof(): + break + if token.is_comment(): + u = token.value.upper() + if u == "HEADER": + line_method = self._header_line + + if self.message: + message = self.message + else: + # If we don't have a message, create one with the current + # opcode, so that we know which section names to parse. + message = self._make_message() + try: + section_number = message._section_enum.from_text(u) + # We found a section name. If we don't have a message, + # use the one we just created. + if not self.message: + self.message = message + self.one_rr_per_rrset = message._get_one_rr_per_rrset( + self.one_rr_per_rrset + ) + if section_number == MessageSection.QUESTION: + line_method = self._question_line + else: + line_method = self._rr_line + except Exception: + # It's just a comment. + pass + self.tok.get_eol() + continue + self.tok.unget(token) + line_method(section_number) + if not self.message: + self.message = self._make_message() + return self.message + + +def from_text( + text: str, + idna_codec: dns.name.IDNACodec | None = None, + one_rr_per_rrset: bool = False, + origin: dns.name.Name | None = None, + relativize: bool = True, + relativize_to: dns.name.Name | None = None, +) -> Message: + """Convert the text format message into a message object. + + The reader stops after reading the first blank line in the input to + facilitate reading multiple messages from a single file with + ``dns.message.from_file()``. + + *text*, a ``str``, the text format message. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put + into its own rrset. The default is ``False``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + reader = _TextReader( + text, idna_codec, one_rr_per_rrset, origin, relativize, relativize_to + ) + return reader.read() + + +def from_file( + f: Any, + idna_codec: dns.name.IDNACodec | None = None, + one_rr_per_rrset: bool = False, +) -> Message: + """Read the next text format message from the specified file. + + Message blocks are separated by a single blank line. + + *f*, a ``file`` or ``str``. If *f* is text, it is treated as the + pathname of a file to open. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put + into its own rrset. The default is ``False``. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + if isinstance(f, str): + cm: contextlib.AbstractContextManager = open(f, encoding="utf-8") + else: + cm = contextlib.nullcontext(f) + with cm as f: + return from_text(f, idna_codec, one_rr_per_rrset) + assert False # for mypy lgtm[py/unreachable-statement] + + +def make_query( + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + use_edns: int | bool | None = None, + want_dnssec: bool = False, + ednsflags: int | None = None, + payload: int | None = None, + request_payload: int | None = None, + options: List[dns.edns.Option] | None = None, + idna_codec: dns.name.IDNACodec | None = None, + id: int | None = None, + flags: int = dns.flags.RD, + pad: int = 0, +) -> QueryMessage: + """Make a query message. + + The query name, type, and class may all be specified either + as objects of the appropriate type, or as strings. + + The query will have a randomly chosen query id, and its DNS flags + will be set to dns.flags.RD. + + qname, a ``dns.name.Name`` or ``str``, the query name. + + *rdtype*, an ``int`` or ``str``, the desired rdata type. + + *rdclass*, an ``int`` or ``str``, the desired rdata class; the default + is class IN. + + *use_edns*, an ``int``, ``bool`` or ``None``. The EDNS level to use; the + default is ``None``. If ``None``, EDNS will be enabled only if other + parameters (*ednsflags*, *payload*, *request_payload*, or *options*) are + set. + See the description of dns.message.Message.use_edns() for the possible + values for use_edns and their meanings. + + *want_dnssec*, a ``bool``. If ``True``, DNSSEC data is desired. + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when + sending this message. If not specified, defaults to the value of + *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *id*, an ``int`` or ``None``, the desired query id. The default is + ``None``, which generates a random query id. + + *flags*, an ``int``, the desired query flags. The default is + ``dns.flags.RD``. + + *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add + padding bytes to make the message size a multiple of *pad*. Note that if + padding is non-zero, an EDNS PADDING option will always be added to the + message. + + Returns a ``dns.message.QueryMessage`` + """ + + if isinstance(qname, str): + qname = dns.name.from_text(qname, idna_codec=idna_codec) + rdtype = dns.rdatatype.RdataType.make(rdtype) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + m = QueryMessage(id=id) + m.flags = dns.flags.Flag(flags) + m.find_rrset(m.question, qname, rdclass, rdtype, create=True, force_unique=True) + # only pass keywords on to use_edns if they have been set to a + # non-None value. Setting a field will turn EDNS on if it hasn't + # been configured. + kwargs: Dict[str, Any] = {} + if ednsflags is not None: + kwargs["ednsflags"] = ednsflags + if payload is not None: + kwargs["payload"] = payload + if request_payload is not None: + kwargs["request_payload"] = request_payload + if options is not None: + kwargs["options"] = options + if kwargs and use_edns is None: + use_edns = 0 + kwargs["edns"] = use_edns + kwargs["pad"] = pad + m.use_edns(**kwargs) + if want_dnssec: + m.want_dnssec(want_dnssec) + return m + + +class CopyMode(enum.Enum): + """ + How should sections be copied when making an update response? + """ + + NOTHING = 0 + QUESTION = 1 + EVERYTHING = 2 + + +def make_response( + query: Message, + recursion_available: bool = False, + our_payload: int = 8192, + fudge: int = 300, + tsig_error: int = 0, + pad: int | None = None, + copy_mode: CopyMode | None = None, +) -> Message: + """Make a message which is a response for the specified query. + The message returned is really a response skeleton; it has all of the infrastructure + required of a response, but none of the content. + + Response section(s) which are copied are shallow copies of the matching section(s) + in the query, so the query's RRsets should not be changed. + + *query*, a ``dns.message.Message``, the query to respond to. + + *recursion_available*, a ``bool``, should RA be set in the response? + + *our_payload*, an ``int``, the payload size to advertise in EDNS responses. + + *fudge*, an ``int``, the TSIG time fudge. + + *tsig_error*, an ``int``, the TSIG error. + + *pad*, a non-negative ``int`` or ``None``. If 0, the default, do not pad; otherwise + if not ``None`` add padding bytes to make the message size a multiple of *pad*. Note + that if padding is non-zero, an EDNS PADDING option will always be added to the + message. If ``None``, add padding following RFC 8467, namely if the request is + padded, pad the response to 468 otherwise do not pad. + + *copy_mode*, a ``dns.message.CopyMode`` or ``None``, determines how sections are + copied. The default, ``None`` copies sections according to the default for the + message's opcode, which is currently ``dns.message.CopyMode.QUESTION`` for all + opcodes. ``dns.message.CopyMode.QUESTION`` copies only the question section. + ``dns.message.CopyMode.EVERYTHING`` copies all sections other than OPT or TSIG + records, which are created appropriately if needed. ``dns.message.CopyMode.NOTHING`` + copies no sections; note that this mode is for server testing purposes and is + otherwise not recommended for use. In particular, ``dns.message.is_response()`` + will be ``False`` if you create a response this way and the rcode is not + ``FORMERR``, ``SERVFAIL``, ``NOTIMP``, or ``REFUSED``. + + Returns a ``dns.message.Message`` object whose specific class is appropriate for the + query. For example, if query is a ``dns.update.UpdateMessage``, the response will + be one too. + """ + + if query.flags & dns.flags.QR: + raise dns.exception.FormError("specified query message is not a query") + opcode = query.opcode() + factory = _message_factory_from_opcode(opcode) + response = factory(id=query.id) + response.flags = dns.flags.QR | (query.flags & dns.flags.RD) + if recursion_available: + response.flags |= dns.flags.RA + response.set_opcode(opcode) + if copy_mode is None: + copy_mode = CopyMode.QUESTION + if copy_mode != CopyMode.NOTHING: + response.question = list(query.question) + if copy_mode == CopyMode.EVERYTHING: + response.answer = list(query.answer) + response.authority = list(query.authority) + response.additional = list(query.additional) + if query.edns >= 0: + if pad is None: + # Set response padding per RFC 8467 + pad = 0 + for option in query.options: + if option.otype == dns.edns.OptionType.PADDING: + pad = 468 + response.use_edns(0, 0, our_payload, query.payload, pad=pad) + if query.had_tsig and query.keyring: + assert query.mac is not None + assert query.keyalgorithm is not None + response.use_tsig( + query.keyring, + query.keyname, + fudge, + None, + tsig_error, + b"", + query.keyalgorithm, + ) + response.request_mac = query.mac + return response + + +### BEGIN generated MessageSection constants + +QUESTION = MessageSection.QUESTION +ANSWER = MessageSection.ANSWER +AUTHORITY = MessageSection.AUTHORITY +ADDITIONAL = MessageSection.ADDITIONAL + +### END generated MessageSection constants diff --git a/venv/lib/python3.12/site-packages/dns/name.py b/venv/lib/python3.12/site-packages/dns/name.py new file mode 100644 index 0000000..45c8f45 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/name.py @@ -0,0 +1,1289 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Names.""" + +import copy +import encodings.idna # type: ignore +import functools +import struct +from typing import Any, Callable, Dict, Iterable, Optional, Tuple + +import dns._features +import dns.enum +import dns.exception +import dns.immutable +import dns.wire + +# Dnspython will never access idna if the import fails, but pyright can't figure +# that out, so... +# +# pyright: reportAttributeAccessIssue = false, reportPossiblyUnboundVariable = false + +if dns._features.have("idna"): + import idna # type: ignore + + have_idna_2008 = True +else: # pragma: no cover + have_idna_2008 = False + + +CompressType = Dict["Name", int] + + +class NameRelation(dns.enum.IntEnum): + """Name relation result from fullcompare().""" + + # This is an IntEnum for backwards compatibility in case anyone + # has hardwired the constants. + + #: The compared names have no relationship to each other. + NONE = 0 + #: the first name is a superdomain of the second. + SUPERDOMAIN = 1 + #: The first name is a subdomain of the second. + SUBDOMAIN = 2 + #: The compared names are equal. + EQUAL = 3 + #: The compared names have a common ancestor. + COMMONANCESTOR = 4 + + @classmethod + def _maximum(cls): + return cls.COMMONANCESTOR # pragma: no cover + + @classmethod + def _short_name(cls): + return cls.__name__ # pragma: no cover + + +# Backwards compatibility +NAMERELN_NONE = NameRelation.NONE +NAMERELN_SUPERDOMAIN = NameRelation.SUPERDOMAIN +NAMERELN_SUBDOMAIN = NameRelation.SUBDOMAIN +NAMERELN_EQUAL = NameRelation.EQUAL +NAMERELN_COMMONANCESTOR = NameRelation.COMMONANCESTOR + + +class EmptyLabel(dns.exception.SyntaxError): + """A DNS label is empty.""" + + +class BadEscape(dns.exception.SyntaxError): + """An escaped code in a text format of DNS name is invalid.""" + + +class BadPointer(dns.exception.FormError): + """A DNS compression pointer points forward instead of backward.""" + + +class BadLabelType(dns.exception.FormError): + """The label type in DNS name wire format is unknown.""" + + +class NeedAbsoluteNameOrOrigin(dns.exception.DNSException): + """An attempt was made to convert a non-absolute name to + wire when there was also a non-absolute (or missing) origin.""" + + +class NameTooLong(dns.exception.FormError): + """A DNS name is > 255 octets long.""" + + +class LabelTooLong(dns.exception.SyntaxError): + """A DNS label is > 63 octets long.""" + + +class AbsoluteConcatenation(dns.exception.DNSException): + """An attempt was made to append anything other than the + empty name to an absolute DNS name.""" + + +class NoParent(dns.exception.DNSException): + """An attempt was made to get the parent of the root name + or the empty name.""" + + +class NoIDNA2008(dns.exception.DNSException): + """IDNA 2008 processing was requested but the idna module is not + available.""" + + +class IDNAException(dns.exception.DNSException): + """IDNA processing raised an exception.""" + + supp_kwargs = {"idna_exception"} + fmt = "IDNA processing exception: {idna_exception}" + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class NeedSubdomainOfOrigin(dns.exception.DNSException): + """An absolute name was provided that is not a subdomain of the specified origin.""" + + +_escaped = b'"().;\\@$' +_escaped_text = '"().;\\@$' + + +def _escapify(label: bytes | str) -> str: + """Escape the characters in label which need it. + @returns: the escaped string + @rtype: string""" + if isinstance(label, bytes): + # Ordinary DNS label mode. Escape special characters and values + # < 0x20 or > 0x7f. + text = "" + for c in label: + if c in _escaped: + text += "\\" + chr(c) + elif c > 0x20 and c < 0x7F: + text += chr(c) + else: + text += f"\\{c:03d}" + return text + + # Unicode label mode. Escape only special characters and values < 0x20 + text = "" + for uc in label: + if uc in _escaped_text: + text += "\\" + uc + elif uc <= "\x20": + text += f"\\{ord(uc):03d}" + else: + text += uc + return text + + +class IDNACodec: + """Abstract base class for IDNA encoder/decoders.""" + + def __init__(self): + pass + + def is_idna(self, label: bytes) -> bool: + return label.lower().startswith(b"xn--") + + def encode(self, label: str) -> bytes: + raise NotImplementedError # pragma: no cover + + def decode(self, label: bytes) -> str: + # We do not apply any IDNA policy on decode. + if self.is_idna(label): + try: + slabel = label[4:].decode("punycode") + return _escapify(slabel) + except Exception as e: + raise IDNAException(idna_exception=e) + else: + return _escapify(label) + + +class IDNA2003Codec(IDNACodec): + """IDNA 2003 encoder/decoder.""" + + def __init__(self, strict_decode: bool = False): + """Initialize the IDNA 2003 encoder/decoder. + + *strict_decode* is a ``bool``. If `True`, then IDNA2003 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2008. The default is `False`. + """ + + super().__init__() + self.strict_decode = strict_decode + + def encode(self, label: str) -> bytes: + """Encode *label*.""" + + if label == "": + return b"" + try: + return encodings.idna.ToASCII(label) + except UnicodeError: + raise LabelTooLong + + def decode(self, label: bytes) -> str: + """Decode *label*.""" + if not self.strict_decode: + return super().decode(label) + if label == b"": + return "" + try: + return _escapify(encodings.idna.ToUnicode(label)) + except Exception as e: + raise IDNAException(idna_exception=e) + + +class IDNA2008Codec(IDNACodec): + """IDNA 2008 encoder/decoder.""" + + def __init__( + self, + uts_46: bool = False, + transitional: bool = False, + allow_pure_ascii: bool = False, + strict_decode: bool = False, + ): + """Initialize the IDNA 2008 encoder/decoder. + + *uts_46* is a ``bool``. If True, apply Unicode IDNA + compatibility processing as described in Unicode Technical + Standard #46 (https://unicode.org/reports/tr46/). + If False, do not apply the mapping. The default is False. + + *transitional* is a ``bool``: If True, use the + "transitional" mode described in Unicode Technical Standard + #46. The default is False. + + *allow_pure_ascii* is a ``bool``. If True, then a label which + consists of only ASCII characters is allowed. This is less + strict than regular IDNA 2008, but is also necessary for mixed + names, e.g. a name with starting with "_sip._tcp." and ending + in an IDN suffix which would otherwise be disallowed. The + default is False. + + *strict_decode* is a ``bool``: If True, then IDNA2008 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2003. The default is False. + """ + super().__init__() + self.uts_46 = uts_46 + self.transitional = transitional + self.allow_pure_ascii = allow_pure_ascii + self.strict_decode = strict_decode + + def encode(self, label: str) -> bytes: + if label == "": + return b"" + if self.allow_pure_ascii and is_all_ascii(label): + encoded = label.encode("ascii") + if len(encoded) > 63: + raise LabelTooLong + return encoded + if not have_idna_2008: + raise NoIDNA2008 + try: + if self.uts_46: + # pylint: disable=possibly-used-before-assignment + label = idna.uts46_remap(label, False, self.transitional) + return idna.alabel(label) + except idna.IDNAError as e: + if e.args[0] == "Label too long": + raise LabelTooLong + else: + raise IDNAException(idna_exception=e) + + def decode(self, label: bytes) -> str: + if not self.strict_decode: + return super().decode(label) + if label == b"": + return "" + if not have_idna_2008: + raise NoIDNA2008 + try: + ulabel = idna.ulabel(label) + if self.uts_46: + ulabel = idna.uts46_remap(ulabel, False, self.transitional) + return _escapify(ulabel) + except (idna.IDNAError, UnicodeError) as e: + raise IDNAException(idna_exception=e) + + +IDNA_2003_Practical = IDNA2003Codec(False) +IDNA_2003_Strict = IDNA2003Codec(True) +IDNA_2003 = IDNA_2003_Practical +IDNA_2008_Practical = IDNA2008Codec(True, False, True, False) +IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False) +IDNA_2008_Strict = IDNA2008Codec(False, False, False, True) +IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False) +IDNA_2008 = IDNA_2008_Practical + + +def _validate_labels(labels: Tuple[bytes, ...]) -> None: + """Check for empty labels in the middle of a label sequence, + labels that are too long, and for too many labels. + + Raises ``dns.name.NameTooLong`` if the name as a whole is too long. + + Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root + label) and appears in a position other than the end of the label + sequence + + """ + + l = len(labels) + total = 0 + i = -1 + j = 0 + for label in labels: + ll = len(label) + total += ll + 1 + if ll > 63: + raise LabelTooLong + if i < 0 and label == b"": + i = j + j += 1 + if total > 255: + raise NameTooLong + if i >= 0 and i != l - 1: + raise EmptyLabel + + +def _maybe_convert_to_binary(label: bytes | str) -> bytes: + """If label is ``str``, convert it to ``bytes``. If it is already + ``bytes`` just return it. + + """ + + if isinstance(label, bytes): + return label + if isinstance(label, str): + return label.encode() + raise ValueError # pragma: no cover + + +@dns.immutable.immutable +class Name: + """A DNS name. + + The dns.name.Name class represents a DNS name as a tuple of + labels. Each label is a ``bytes`` in DNS wire format. Instances + of the class are immutable. + """ + + __slots__ = ["labels"] + + def __init__(self, labels: Iterable[bytes | str]): + """*labels* is any iterable whose values are ``str`` or ``bytes``.""" + + blabels = [_maybe_convert_to_binary(x) for x in labels] + self.labels = tuple(blabels) + _validate_labels(self.labels) + + def __copy__(self): + return Name(self.labels) + + def __deepcopy__(self, memo): + return Name(copy.deepcopy(self.labels, memo)) + + def __getstate__(self): + # Names can be pickled + return {"labels": self.labels} + + def __setstate__(self, state): + super().__setattr__("labels", state["labels"]) + _validate_labels(self.labels) + + def is_absolute(self) -> bool: + """Is the most significant label of this name the root label? + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[-1] == b"" + + def is_wild(self) -> bool: + """Is this name wild? (I.e. Is the least significant label '*'?) + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[0] == b"*" + + def __hash__(self) -> int: + """Return a case-insensitive hash of the name. + + Returns an ``int``. + """ + + h = 0 + for label in self.labels: + for c in label.lower(): + h += (h << 3) + c + return h + + def fullcompare(self, other: "Name") -> Tuple[NameRelation, int, int]: + """Compare two names, returning a 3-tuple + ``(relation, order, nlabels)``. + + *relation* describes the relation ship between the names, + and is one of: ``dns.name.NameRelation.NONE``, + ``dns.name.NameRelation.SUPERDOMAIN``, ``dns.name.NameRelation.SUBDOMAIN``, + ``dns.name.NameRelation.EQUAL``, or ``dns.name.NameRelation.COMMONANCESTOR``. + + *order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and == + 0 if *self* == *other*. A relative name is always less than an + absolute name. If both names have the same relativity, then + the DNSSEC order relation is used to order them. + + *nlabels* is the number of significant labels that the two names + have in common. + + Here are some examples. Names ending in "." are absolute names, + those not ending in "." are relative names. + + ============= ============= =========== ===== ======= + self other relation order nlabels + ============= ============= =========== ===== ======= + www.example. www.example. equal 0 3 + www.example. example. subdomain > 0 2 + example. www.example. superdomain < 0 2 + example1.com. example2.com. common anc. < 0 2 + example1 example2. none < 0 0 + example1. example2 none > 0 0 + ============= ============= =========== ===== ======= + """ + + sabs = self.is_absolute() + oabs = other.is_absolute() + if sabs != oabs: + if sabs: + return (NameRelation.NONE, 1, 0) + else: + return (NameRelation.NONE, -1, 0) + l1 = len(self.labels) + l2 = len(other.labels) + ldiff = l1 - l2 + if ldiff < 0: + l = l1 + else: + l = l2 + + order = 0 + nlabels = 0 + namereln = NameRelation.NONE + while l > 0: + l -= 1 + l1 -= 1 + l2 -= 1 + label1 = self.labels[l1].lower() + label2 = other.labels[l2].lower() + if label1 < label2: + order = -1 + if nlabels > 0: + namereln = NameRelation.COMMONANCESTOR + return (namereln, order, nlabels) + elif label1 > label2: + order = 1 + if nlabels > 0: + namereln = NameRelation.COMMONANCESTOR + return (namereln, order, nlabels) + nlabels += 1 + order = ldiff + if ldiff < 0: + namereln = NameRelation.SUPERDOMAIN + elif ldiff > 0: + namereln = NameRelation.SUBDOMAIN + else: + namereln = NameRelation.EQUAL + return (namereln, order, nlabels) + + def is_subdomain(self, other: "Name") -> bool: + """Is self a subdomain of other? + + Note that the notion of subdomain includes equality, e.g. + "dnspython.org" is a subdomain of itself. + + Returns a ``bool``. + """ + + (nr, _, _) = self.fullcompare(other) + if nr == NameRelation.SUBDOMAIN or nr == NameRelation.EQUAL: + return True + return False + + def is_superdomain(self, other: "Name") -> bool: + """Is self a superdomain of other? + + Note that the notion of superdomain includes equality, e.g. + "dnspython.org" is a superdomain of itself. + + Returns a ``bool``. + """ + + (nr, _, _) = self.fullcompare(other) + if nr == NameRelation.SUPERDOMAIN or nr == NameRelation.EQUAL: + return True + return False + + def canonicalize(self) -> "Name": + """Return a name which is equal to the current name, but is in + DNSSEC canonical form. + """ + + return Name([x.lower() for x in self.labels]) + + def __eq__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] == 0 + else: + return False + + def __ne__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] != 0 + else: + return True + + def __lt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] < 0 + else: + return NotImplemented + + def __le__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] <= 0 + else: + return NotImplemented + + def __ge__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] >= 0 + else: + return NotImplemented + + def __gt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] > 0 + else: + return NotImplemented + + def __repr__(self): + return "" + + def __str__(self): + return self.to_text(False) + + def to_text(self, omit_final_dot: bool = False) -> str: + """Convert name to DNS text format. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + + Returns a ``str``. + """ + + if len(self.labels) == 0: + return "@" + if len(self.labels) == 1 and self.labels[0] == b"": + return "." + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + s = ".".join(map(_escapify, l)) + return s + + def to_unicode( + self, omit_final_dot: bool = False, idna_codec: IDNACodec | None = None + ) -> str: + """Convert name to Unicode text format. + + IDN ACE labels are converted to Unicode. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + *idna_codec* specifies the IDNA encoder/decoder. If None, the + dns.name.IDNA_2003_Practical encoder/decoder is used. + The IDNA_2003_Practical decoder does + not impose any policy, it just decodes punycode, so if you + don't want checking for compliance, you can use this decoder + for IDNA2008 as well. + + Returns a ``str``. + """ + + if len(self.labels) == 0: + return "@" + if len(self.labels) == 1 and self.labels[0] == b"": + return "." + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + if idna_codec is None: + idna_codec = IDNA_2003_Practical + return ".".join([idna_codec.decode(x) for x in l]) + + def to_digestable(self, origin: Optional["Name"] = None) -> bytes: + """Convert name to a format suitable for digesting in hashes. + + The name is canonicalized and converted to uncompressed wire + format. All names in wire format are absolute. If the name + is a relative name, then an origin must be supplied. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then origin will be appended + to the name. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``bytes``. + """ + + digest = self.to_wire(origin=origin, canonicalize=True) + assert digest is not None + return digest + + def to_wire( + self, + file: Any | None = None, + compress: CompressType | None = None, + origin: Optional["Name"] = None, + canonicalize: bool = False, + ) -> bytes | None: + """Convert name to wire format, possibly compressing it. + + *file* is the file where the name is emitted (typically an + io.BytesIO file). If ``None`` (the default), a ``bytes`` + containing the wire name will be returned. + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. Note that + the compression code assumes that compression offset 0 is the + start of *file*, and thus compression will not be correct + if this is not the case. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *canonicalize*, a ``bool``, indicates whether the name should + be canonicalized; that is, converted to a format suitable for + digesting in hashes. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``bytes`` or ``None``. + """ + + if file is None: + out = bytearray() + for label in self.labels: + out.append(len(label)) + if canonicalize: + out += label.lower() + else: + out += label + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + for label in origin.labels: + out.append(len(label)) + if canonicalize: + out += label.lower() + else: + out += label + return bytes(out) + + labels: Iterable[bytes] + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + labels = list(self.labels) + labels.extend(list(origin.labels)) + else: + labels = self.labels + i = 0 + for label in labels: + n = Name(labels[i:]) + i += 1 + if compress is not None: + pos = compress.get(n) + else: + pos = None + if pos is not None: + value = 0xC000 + pos + s = struct.pack("!H", value) + file.write(s) + break + else: + if compress is not None and len(n) > 1: + pos = file.tell() + if pos <= 0x3FFF: + compress[n] = pos + l = len(label) + file.write(struct.pack("!B", l)) + if l > 0: + if canonicalize: + file.write(label.lower()) + else: + file.write(label) + return None + + def __len__(self) -> int: + """The length of the name (in labels). + + Returns an ``int``. + """ + + return len(self.labels) + + def __getitem__(self, index): + return self.labels[index] + + def __add__(self, other): + return self.concatenate(other) + + def __sub__(self, other): + return self.relativize(other) + + def split(self, depth: int) -> Tuple["Name", "Name"]: + """Split a name into a prefix and suffix names at the specified depth. + + *depth* is an ``int`` specifying the number of labels in the suffix + + Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the + name. + + Returns the tuple ``(prefix, suffix)``. + """ + + l = len(self.labels) + if depth == 0: + return (self, dns.name.empty) + elif depth == l: + return (dns.name.empty, self) + elif depth < 0 or depth > l: + raise ValueError("depth must be >= 0 and <= the length of the name") + return (Name(self[:-depth]), Name(self[-depth:])) + + def concatenate(self, other: "Name") -> "Name": + """Return a new name which is the concatenation of self and other. + + Raises ``dns.name.AbsoluteConcatenation`` if the name is + absolute and *other* is not the empty name. + + Returns a ``dns.name.Name``. + """ + + if self.is_absolute() and len(other) > 0: + raise AbsoluteConcatenation + labels = list(self.labels) + labels.extend(list(other.labels)) + return Name(labels) + + def relativize(self, origin: "Name") -> "Name": + """If the name is a subdomain of *origin*, return a new name which is + the name relative to origin. Otherwise return the name. + + For example, relativizing ``www.dnspython.org.`` to origin + ``dnspython.org.`` returns the name ``www``. Relativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if origin is not None and self.is_subdomain(origin): + return Name(self[: -len(origin)]) + else: + return self + + def derelativize(self, origin: "Name") -> "Name": + """If the name is a relative name, return a new name which is the + concatenation of the name and origin. Otherwise return the name. + + For example, derelativizing ``www`` to origin ``dnspython.org.`` + returns the name ``www.dnspython.org.``. Derelativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if not self.is_absolute(): + return self.concatenate(origin) + else: + return self + + def choose_relativity( + self, origin: Optional["Name"] = None, relativize: bool = True + ) -> "Name": + """Return a name with the relativity desired by the caller. + + If *origin* is ``None``, then the name is returned. + Otherwise, if *relativize* is ``True`` the name is + relativized, and if *relativize* is ``False`` the name is + derelativized. + + Returns a ``dns.name.Name``. + """ + + if origin: + if relativize: + return self.relativize(origin) + else: + return self.derelativize(origin) + else: + return self + + def parent(self) -> "Name": + """Return the parent of the name. + + For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. + + Raises ``dns.name.NoParent`` if the name is either the root name or the + empty name, and thus has no parent. + + Returns a ``dns.name.Name``. + """ + + if self == root or self == empty: + raise NoParent + return Name(self.labels[1:]) + + def predecessor(self, origin: "Name", prefix_ok: bool = True) -> "Name": + """Return the maximal predecessor of *name* in the DNSSEC ordering in the zone + whose origin is *origin*, or return the longest name under *origin* if the + name is origin (i.e. wrap around to the longest name, which may still be + *origin* due to length considerations. + + The relativity of the name is preserved, so if this name is relative + then the method will return a relative name, and likewise if this name + is absolute then the predecessor will be absolute. + + *prefix_ok* indicates if prefixing labels is allowed, and + defaults to ``True``. Normally it is good to allow this, but if computing + a maximal predecessor at a zone cut point then ``False`` must be specified. + """ + return _handle_relativity_and_call( + _absolute_predecessor, self, origin, prefix_ok + ) + + def successor(self, origin: "Name", prefix_ok: bool = True) -> "Name": + """Return the minimal successor of *name* in the DNSSEC ordering in the zone + whose origin is *origin*, or return *origin* if the successor cannot be + computed due to name length limitations. + + Note that *origin* is returned in the "too long" cases because wrapping + around to the origin is how NSEC records express "end of the zone". + + The relativity of the name is preserved, so if this name is relative + then the method will return a relative name, and likewise if this name + is absolute then the successor will be absolute. + + *prefix_ok* indicates if prefixing a new minimal label is allowed, and + defaults to ``True``. Normally it is good to allow this, but if computing + a minimal successor at a zone cut point then ``False`` must be specified. + """ + return _handle_relativity_and_call(_absolute_successor, self, origin, prefix_ok) + + +#: The root name, '.' +root = Name([b""]) + +#: The empty name. +empty = Name([]) + + +def from_unicode( + text: str, origin: Name | None = root, idna_codec: IDNACodec | None = None +) -> Name: + """Convert unicode text into a Name object. + + Labels are encoded in IDN ACE form according to rules specified by + the IDNA codec. + + *text*, a ``str``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if not isinstance(text, str): + raise ValueError("input to from_unicode() must be a unicode string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = "" + escaping = False + edigits = 0 + total = 0 + if idna_codec is None: + idna_codec = IDNA_2003 + if text == "@": + text = "" + if text: + if text in [".", "\u3002", "\uff0e", "\uff61"]: + return Name([b""]) # no Unicode "u" on this constant! + for c in text: + if escaping: + if edigits == 0: + if c.isdigit(): + total = int(c) + edigits += 1 + else: + label += c + escaping = False + else: + if not c.isdigit(): + raise BadEscape + total *= 10 + total += int(c) + edigits += 1 + if edigits == 3: + escaping = False + label += chr(total) + elif c in [".", "\u3002", "\uff0e", "\uff61"]: + if len(label) == 0: + raise EmptyLabel + labels.append(idna_codec.encode(label)) + label = "" + elif c == "\\": + escaping = True + edigits = 0 + total = 0 + else: + label += c + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(idna_codec.encode(label)) + else: + labels.append(b"") + + if (len(labels) == 0 or labels[-1] != b"") and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +def is_all_ascii(text: str) -> bool: + for c in text: + if ord(c) > 0x7F: + return False + return True + + +def from_text( + text: bytes | str, + origin: Name | None = root, + idna_codec: IDNACodec | None = None, +) -> Name: + """Convert text into a Name object. + + *text*, a ``bytes`` or ``str``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if isinstance(text, str): + if not is_all_ascii(text): + # Some codepoint in the input text is > 127, so IDNA applies. + return from_unicode(text, origin, idna_codec) + # The input is all ASCII, so treat this like an ordinary non-IDNA + # domain name. Note that "all ASCII" is about the input text, + # not the codepoints in the domain name. E.g. if text has value + # + # r'\150\151\152\153\154\155\156\157\158\159' + # + # then it's still "all ASCII" even though the domain name has + # codepoints > 127. + text = text.encode("ascii") + if not isinstance(text, bytes): + raise ValueError("input to from_text() must be a string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = b"" + escaping = False + edigits = 0 + total = 0 + if text == b"@": + text = b"" + if text: + if text == b".": + return Name([b""]) + for c in text: + byte_ = struct.pack("!B", c) + if escaping: + if edigits == 0: + if byte_.isdigit(): + total = int(byte_) + edigits += 1 + else: + label += byte_ + escaping = False + else: + if not byte_.isdigit(): + raise BadEscape + total *= 10 + total += int(byte_) + edigits += 1 + if edigits == 3: + escaping = False + label += struct.pack("!B", total) + elif byte_ == b".": + if len(label) == 0: + raise EmptyLabel + labels.append(label) + label = b"" + elif byte_ == b"\\": + escaping = True + edigits = 0 + total = 0 + else: + label += byte_ + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(label) + else: + labels.append(b"") + if (len(labels) == 0 or labels[-1] != b"") and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +# we need 'dns.wire.Parser' quoted as dns.name and dns.wire depend on each other. + + +def from_wire_parser(parser: "dns.wire.Parser") -> Name: + """Convert possibly compressed wire format into a Name. + + *parser* is a dns.wire.Parser. + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``dns.name.Name`` + """ + + labels = [] + biggest_pointer = parser.current + with parser.restore_furthest(): + count = parser.get_uint8() + while count != 0: + if count < 64: + labels.append(parser.get_bytes(count)) + elif count >= 192: + current = (count & 0x3F) * 256 + parser.get_uint8() + if current >= biggest_pointer: + raise BadPointer + biggest_pointer = current + parser.seek(current) + else: + raise BadLabelType + count = parser.get_uint8() + labels.append(b"") + return Name(labels) + + +def from_wire(message: bytes, current: int) -> Tuple[Name, int]: + """Convert possibly compressed wire format into a Name. + + *message* is a ``bytes`` containing an entire DNS message in DNS + wire form. + + *current*, an ``int``, is the offset of the beginning of the name + from the start of the message + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``(dns.name.Name, int)`` tuple consisting of the name + that was read and the number of bytes of the wire format message + which were consumed reading it. + """ + + if not isinstance(message, bytes): + raise ValueError("input to from_wire() must be a byte string") + parser = dns.wire.Parser(message, current) + name = from_wire_parser(parser) + return (name, parser.current - current) + + +# RFC 4471 Support + +_MINIMAL_OCTET = b"\x00" +_MINIMAL_OCTET_VALUE = ord(_MINIMAL_OCTET) +_SUCCESSOR_PREFIX = Name([_MINIMAL_OCTET]) +_MAXIMAL_OCTET = b"\xff" +_MAXIMAL_OCTET_VALUE = ord(_MAXIMAL_OCTET) +_AT_SIGN_VALUE = ord("@") +_LEFT_SQUARE_BRACKET_VALUE = ord("[") + + +def _wire_length(labels): + return functools.reduce(lambda v, x: v + len(x) + 1, labels, 0) + + +def _pad_to_max_name(name): + needed = 255 - _wire_length(name.labels) + new_labels = [] + while needed > 64: + new_labels.append(_MAXIMAL_OCTET * 63) + needed -= 64 + if needed >= 2: + new_labels.append(_MAXIMAL_OCTET * (needed - 1)) + # Note we're already maximal in the needed == 1 case as while we'd like + # to add one more byte as a new label, we can't, as adding a new non-empty + # label requires at least 2 bytes. + new_labels = list(reversed(new_labels)) + new_labels.extend(name.labels) + return Name(new_labels) + + +def _pad_to_max_label(label, suffix_labels): + length = len(label) + # We have to subtract one here to account for the length byte of label. + remaining = 255 - _wire_length(suffix_labels) - length - 1 + if remaining <= 0: + # Shouldn't happen! + return label + needed = min(63 - length, remaining) + return label + _MAXIMAL_OCTET * needed + + +def _absolute_predecessor(name: Name, origin: Name, prefix_ok: bool) -> Name: + # This is the RFC 4471 predecessor algorithm using the "absolute method" of section + # 3.1.1. + # + # Our caller must ensure that the name and origin are absolute, and that name is a + # subdomain of origin. + if name == origin: + return _pad_to_max_name(name) + least_significant_label = name[0] + if least_significant_label == _MINIMAL_OCTET: + return name.parent() + least_octet = least_significant_label[-1] + suffix_labels = name.labels[1:] + if least_octet == _MINIMAL_OCTET_VALUE: + new_labels = [least_significant_label[:-1]] + else: + octets = bytearray(least_significant_label) + octet = octets[-1] + if octet == _LEFT_SQUARE_BRACKET_VALUE: + octet = _AT_SIGN_VALUE + else: + octet -= 1 + octets[-1] = octet + least_significant_label = bytes(octets) + new_labels = [_pad_to_max_label(least_significant_label, suffix_labels)] + new_labels.extend(suffix_labels) + name = Name(new_labels) + if prefix_ok: + return _pad_to_max_name(name) + else: + return name + + +def _absolute_successor(name: Name, origin: Name, prefix_ok: bool) -> Name: + # This is the RFC 4471 successor algorithm using the "absolute method" of section + # 3.1.2. + # + # Our caller must ensure that the name and origin are absolute, and that name is a + # subdomain of origin. + if prefix_ok: + # Try prefixing \000 as new label + try: + return _SUCCESSOR_PREFIX.concatenate(name) + except NameTooLong: + pass + while name != origin: + # Try extending the least significant label. + least_significant_label = name[0] + if len(least_significant_label) < 63: + # We may be able to extend the least label with a minimal additional byte. + # This is only "may" because we could have a maximal length name even though + # the least significant label isn't maximally long. + new_labels = [least_significant_label + _MINIMAL_OCTET] + new_labels.extend(name.labels[1:]) + try: + return dns.name.Name(new_labels) + except dns.name.NameTooLong: + pass + # We can't extend the label either, so we'll try to increment the least + # signficant non-maximal byte in it. + octets = bytearray(least_significant_label) + # We do this reversed iteration with an explicit indexing variable because + # if we find something to increment, we're going to want to truncate everything + # to the right of it. + for i in range(len(octets) - 1, -1, -1): + octet = octets[i] + if octet == _MAXIMAL_OCTET_VALUE: + # We can't increment this, so keep looking. + continue + # Finally, something we can increment. We have to apply a special rule for + # incrementing "@", sending it to "[", because RFC 4034 6.1 says that when + # comparing names, uppercase letters compare as if they were their + # lower-case equivalents. If we increment "@" to "A", then it would compare + # as "a", which is after "[", "\", "]", "^", "_", and "`", so we would have + # skipped the most minimal successor, namely "[". + if octet == _AT_SIGN_VALUE: + octet = _LEFT_SQUARE_BRACKET_VALUE + else: + octet += 1 + octets[i] = octet + # We can now truncate all of the maximal values we skipped (if any) + new_labels = [bytes(octets[: i + 1])] + new_labels.extend(name.labels[1:]) + # We haven't changed the length of the name, so the Name constructor will + # always work. + return Name(new_labels) + # We couldn't increment, so chop off the least significant label and try + # again. + name = name.parent() + + # We couldn't increment at all, so return the origin, as wrapping around is the + # DNSSEC way. + return origin + + +def _handle_relativity_and_call( + function: Callable[[Name, Name, bool], Name], + name: Name, + origin: Name, + prefix_ok: bool, +) -> Name: + # Make "name" absolute if needed, ensure that the origin is absolute, + # call function(), and then relativize the result if needed. + if not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + relative = not name.is_absolute() + if relative: + name = name.derelativize(origin) + elif not name.is_subdomain(origin): + raise NeedSubdomainOfOrigin + result_name = function(name, origin, prefix_ok) + if relative: + result_name = result_name.relativize(origin) + return result_name diff --git a/venv/lib/python3.12/site-packages/dns/namedict.py b/venv/lib/python3.12/site-packages/dns/namedict.py new file mode 100644 index 0000000..ca8b197 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/namedict.py @@ -0,0 +1,109 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# Copyright (C) 2016 Coresec Systems AB +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC +# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS name dictionary""" + +# pylint seems to be confused about this one! +from collections.abc import MutableMapping # pylint: disable=no-name-in-module + +import dns.name + + +class NameDict(MutableMapping): + """A dictionary whose keys are dns.name.Name objects. + + In addition to being like a regular Python dictionary, this + dictionary can also get the deepest match for a given key. + """ + + __slots__ = ["max_depth", "max_depth_items", "__store"] + + def __init__(self, *args, **kwargs): + super().__init__() + self.__store = dict() + #: the maximum depth of the keys that have ever been added + self.max_depth = 0 + #: the number of items of maximum depth + self.max_depth_items = 0 + self.update(dict(*args, **kwargs)) + + def __update_max_depth(self, key): + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items + 1 + elif len(key) > self.max_depth: + self.max_depth = len(key) + self.max_depth_items = 1 + + def __getitem__(self, key): + return self.__store[key] + + def __setitem__(self, key, value): + if not isinstance(key, dns.name.Name): + raise ValueError("NameDict key must be a name") + self.__store[key] = value + self.__update_max_depth(key) + + def __delitem__(self, key): + self.__store.pop(key) + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items - 1 + if self.max_depth_items == 0: + self.max_depth = 0 + for k in self.__store: + self.__update_max_depth(k) + + def __iter__(self): + return iter(self.__store) + + def __len__(self): + return len(self.__store) + + def has_key(self, key): + return key in self.__store + + def get_deepest_match(self, name): + """Find the deepest match to *name* in the dictionary. + + The deepest match is the longest name in the dictionary which is + a superdomain of *name*. Note that *superdomain* includes matching + *name* itself. + + *name*, a ``dns.name.Name``, the name to find. + + Returns a ``(key, value)`` where *key* is the deepest + ``dns.name.Name``, and *value* is the value associated with *key*. + """ + + depth = len(name) + if depth > self.max_depth: + depth = self.max_depth + for i in range(-depth, 0): + n = dns.name.Name(name[i:]) + if n in self: + return (n, self[n]) + v = self[dns.name.empty] + return (dns.name.empty, v) diff --git a/venv/lib/python3.12/site-packages/dns/nameserver.py b/venv/lib/python3.12/site-packages/dns/nameserver.py new file mode 100644 index 0000000..c9307d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/nameserver.py @@ -0,0 +1,361 @@ +from urllib.parse import urlparse + +import dns.asyncbackend +import dns.asyncquery +import dns.message +import dns.query + + +class Nameserver: + def __init__(self): + pass + + def __str__(self): + raise NotImplementedError + + def kind(self) -> str: + raise NotImplementedError + + def is_always_max_size(self) -> bool: + raise NotImplementedError + + def answer_nameserver(self) -> str: + raise NotImplementedError + + def answer_port(self) -> int: + raise NotImplementedError + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + raise NotImplementedError + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + raise NotImplementedError + + +class AddressAndPortNameserver(Nameserver): + def __init__(self, address: str, port: int): + super().__init__() + self.address = address + self.port = port + + def kind(self) -> str: + raise NotImplementedError + + def is_always_max_size(self) -> bool: + return False + + def __str__(self): + ns_kind = self.kind() + return f"{ns_kind}:{self.address}@{self.port}" + + def answer_nameserver(self) -> str: + return self.address + + def answer_port(self) -> int: + return self.port + + +class Do53Nameserver(AddressAndPortNameserver): + def __init__(self, address: str, port: int = 53): + super().__init__(address, port) + + def kind(self): + return "Do53" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + if max_size: + response = dns.query.tcp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + else: + response = dns.query.udp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + raise_on_truncation=True, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ignore_errors=True, + ignore_unexpected=True, + ) + return response + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + if max_size: + response = await dns.asyncquery.tcp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + backend=backend, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + else: + response = await dns.asyncquery.udp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + raise_on_truncation=True, + backend=backend, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ignore_errors=True, + ignore_unexpected=True, + ) + return response + + +class DoHNameserver(Nameserver): + def __init__( + self, + url: str, + bootstrap_address: str | None = None, + verify: bool | str = True, + want_get: bool = False, + http_version: dns.query.HTTPVersion = dns.query.HTTPVersion.DEFAULT, + ): + super().__init__() + self.url = url + self.bootstrap_address = bootstrap_address + self.verify = verify + self.want_get = want_get + self.http_version = http_version + + def kind(self): + return "DoH" + + def is_always_max_size(self) -> bool: + return True + + def __str__(self): + return self.url + + def answer_nameserver(self) -> str: + return self.url + + def answer_port(self) -> int: + port = urlparse(self.url).port + if port is None: + port = 443 + return port + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.https( + request, + self.url, + timeout=timeout, + source=source, + source_port=source_port, + bootstrap_address=self.bootstrap_address, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + post=(not self.want_get), + http_version=self.http_version, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.https( + request, + self.url, + timeout=timeout, + source=source, + source_port=source_port, + bootstrap_address=self.bootstrap_address, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + post=(not self.want_get), + http_version=self.http_version, + ) + + +class DoTNameserver(AddressAndPortNameserver): + def __init__( + self, + address: str, + port: int = 853, + hostname: str | None = None, + verify: bool | str = True, + ): + super().__init__(address, port) + self.hostname = hostname + self.verify = verify + + def kind(self): + return "DoT" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.tls( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + server_hostname=self.hostname, + verify=self.verify, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.tls( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + server_hostname=self.hostname, + verify=self.verify, + ) + + +class DoQNameserver(AddressAndPortNameserver): + def __init__( + self, + address: str, + port: int = 853, + verify: bool | str = True, + server_hostname: str | None = None, + ): + super().__init__(address, port) + self.verify = verify + self.server_hostname = server_hostname + + def kind(self): + return "DoQ" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.quic( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + server_hostname=self.server_hostname, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: str | None, + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.quic( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + server_hostname=self.server_hostname, + ) diff --git a/venv/lib/python3.12/site-packages/dns/node.py b/venv/lib/python3.12/site-packages/dns/node.py new file mode 100644 index 0000000..b2cbf1b --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/node.py @@ -0,0 +1,358 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS nodes. A node is a set of rdatasets.""" + +import enum +import io +from typing import Any, Dict + +import dns.immutable +import dns.name +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset + +_cname_types = { + dns.rdatatype.CNAME, +} + +# "neutral" types can coexist with a CNAME and thus are not "other data" +_neutral_types = { + dns.rdatatype.NSEC, # RFC 4035 section 2.5 + dns.rdatatype.NSEC3, # This is not likely to happen, but not impossible! + dns.rdatatype.KEY, # RFC 4035 section 2.5, RFC 3007 +} + + +def _matches_type_or_its_signature(rdtypes, rdtype, covers): + return rdtype in rdtypes or (rdtype == dns.rdatatype.RRSIG and covers in rdtypes) + + +@enum.unique +class NodeKind(enum.Enum): + """Rdatasets in nodes""" + + REGULAR = 0 # a.k.a "other data" + NEUTRAL = 1 + CNAME = 2 + + @classmethod + def classify( + cls, rdtype: dns.rdatatype.RdataType, covers: dns.rdatatype.RdataType + ) -> "NodeKind": + if _matches_type_or_its_signature(_cname_types, rdtype, covers): + return NodeKind.CNAME + elif _matches_type_or_its_signature(_neutral_types, rdtype, covers): + return NodeKind.NEUTRAL + else: + return NodeKind.REGULAR + + @classmethod + def classify_rdataset(cls, rdataset: dns.rdataset.Rdataset) -> "NodeKind": + return cls.classify(rdataset.rdtype, rdataset.covers) + + +class Node: + """A Node is a set of rdatasets. + + A node is either a CNAME node or an "other data" node. A CNAME + node contains only CNAME, KEY, NSEC, and NSEC3 rdatasets along with their + covering RRSIG rdatasets. An "other data" node contains any + rdataset other than a CNAME or RRSIG(CNAME) rdataset. When + changes are made to a node, the CNAME or "other data" state is + always consistent with the update, i.e. the most recent change + wins. For example, if you have a node which contains a CNAME + rdataset, and then add an MX rdataset to it, then the CNAME + rdataset will be deleted. Likewise if you have a node containing + an MX rdataset and add a CNAME rdataset, the MX rdataset will be + deleted. + """ + + __slots__ = ["rdatasets"] + + def __init__(self): + # the set of rdatasets, represented as a list. + self.rdatasets = [] + + def to_text(self, name: dns.name.Name, **kw: Dict[str, Any]) -> str: + """Convert a node to text format. + + Each rdataset at the node is printed. Any keyword arguments + to this method are passed on to the rdataset's to_text() method. + + *name*, a ``dns.name.Name``, the owner name of the + rdatasets. + + Returns a ``str``. + + """ + + s = io.StringIO() + for rds in self.rdatasets: + if len(rds) > 0: + s.write(rds.to_text(name, **kw)) # type: ignore[arg-type] + s.write("\n") + return s.getvalue()[:-1] + + def __repr__(self): + return "" + + def __eq__(self, other): + # + # This is inefficient. Good thing we don't need to do it much. + # + for rd in self.rdatasets: + if rd not in other.rdatasets: + return False + for rd in other.rdatasets: + if rd not in self.rdatasets: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.rdatasets) + + def __iter__(self): + return iter(self.rdatasets) + + def _append_rdataset(self, rdataset): + """Append rdataset to the node with special handling for CNAME and + other data conditions. + + Specifically, if the rdataset being appended has ``NodeKind.CNAME``, + then all rdatasets other than KEY, NSEC, NSEC3, and their covering + RRSIGs are deleted. If the rdataset being appended has + ``NodeKind.REGULAR`` then CNAME and RRSIG(CNAME) are deleted. + """ + # Make having just one rdataset at the node fast. + if len(self.rdatasets) > 0: + kind = NodeKind.classify_rdataset(rdataset) + if kind == NodeKind.CNAME: + self.rdatasets = [ + rds + for rds in self.rdatasets + if NodeKind.classify_rdataset(rds) != NodeKind.REGULAR + ] + elif kind == NodeKind.REGULAR: + self.rdatasets = [ + rds + for rds in self.rdatasets + if NodeKind.classify_rdataset(rds) != NodeKind.CNAME + ] + # Otherwise the rdataset is NodeKind.NEUTRAL and we do not need to + # edit self.rdatasets. + self.rdatasets.append(rdataset) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + """Find an rdataset matching the specified properties in the + current node. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the class of the rdataset. + + *rdtype*, a ``dns.rdatatype.RdataType``, the type of the rdataset. + + *covers*, a ``dns.rdatatype.RdataType``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Raises ``KeyError`` if an rdataset of the desired type and class does + not exist and *create* is not ``True``. + + Returns a ``dns.rdataset.Rdataset``. + """ + + for rds in self.rdatasets: + if rds.match(rdclass, rdtype, covers): + return rds + if not create: + raise KeyError + rds = dns.rdataset.Rdataset(rdclass, rdtype, covers) + self._append_rdataset(rds) + return rds + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset | None: + """Get an rdataset matching the specified properties in the + current node. + + None is returned if an rdataset of the specified type and + class does not exist and *create* is not ``True``. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. Usually this value is + dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or + dns.rdatatype.RRSIG, then the covers value will be the rdata + type the SIG/RRSIG covers. The library treats the SIG and RRSIG + types as if they were a family of + types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much + easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rds = self.find_rdataset(rdclass, rdtype, covers, create) + except KeyError: + rds = None + return rds + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + """Delete the rdataset matching the specified properties in the + current node. + + If a matching rdataset does not exist, it is not an error. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. + """ + + rds = self.get_rdataset(rdclass, rdtype, covers) + if rds is not None: + self.rdatasets.remove(rds) + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + """Replace an rdataset. + + It is not an error if there is no rdataset matching *replacement*. + + Ownership of the *replacement* object is transferred to the node; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + *replacement*, a ``dns.rdataset.Rdataset``. + + Raises ``ValueError`` if *replacement* is not a + ``dns.rdataset.Rdataset``. + """ + + if not isinstance(replacement, dns.rdataset.Rdataset): + raise ValueError("replacement is not an rdataset") + if isinstance(replacement, dns.rrset.RRset): + # RRsets are not good replacements as the match() method + # is not compatible. + replacement = replacement.to_rdataset() + self.delete_rdataset( + replacement.rdclass, replacement.rdtype, replacement.covers + ) + self._append_rdataset(replacement) + + def classify(self) -> NodeKind: + """Classify a node. + + A node which contains a CNAME or RRSIG(CNAME) is a + ``NodeKind.CNAME`` node. + + A node which contains only "neutral" types, i.e. types allowed to + co-exist with a CNAME, is a ``NodeKind.NEUTRAL`` node. The neutral + types are NSEC, NSEC3, KEY, and their associated RRSIGS. An empty node + is also considered neutral. + + A node which contains some rdataset which is not a CNAME, RRSIG(CNAME), + or a neutral type is a a ``NodeKind.REGULAR`` node. Regular nodes are + also commonly referred to as "other data". + """ + for rdataset in self.rdatasets: + kind = NodeKind.classify(rdataset.rdtype, rdataset.covers) + if kind != NodeKind.NEUTRAL: + return kind + return NodeKind.NEUTRAL + + def is_immutable(self) -> bool: + return False + + +@dns.immutable.immutable +class ImmutableNode(Node): + def __init__(self, node): + super().__init__() + self.rdatasets = tuple( + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset | None: + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + raise TypeError("immutable") + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + raise TypeError("immutable") + + def is_immutable(self) -> bool: + return True diff --git a/venv/lib/python3.12/site-packages/dns/opcode.py b/venv/lib/python3.12/site-packages/dns/opcode.py new file mode 100644 index 0000000..3fa610d --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/opcode.py @@ -0,0 +1,119 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Opcodes.""" + +from typing import Type + +import dns.enum +import dns.exception + + +class Opcode(dns.enum.IntEnum): + #: Query + QUERY = 0 + #: Inverse Query (historical) + IQUERY = 1 + #: Server Status (unspecified and unimplemented anywhere) + STATUS = 2 + #: Notify + NOTIFY = 4 + #: Dynamic Update + UPDATE = 5 + + @classmethod + def _maximum(cls): + return 15 + + @classmethod + def _unknown_exception_class(cls) -> Type[Exception]: + return UnknownOpcode + + +class UnknownOpcode(dns.exception.DNSException): + """An DNS opcode is unknown.""" + + +def from_text(text: str) -> Opcode: + """Convert text into an opcode. + + *text*, a ``str``, the textual opcode + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns an ``int``. + """ + + return Opcode.from_text(text) + + +def from_flags(flags: int) -> Opcode: + """Extract an opcode from DNS message flags. + + *flags*, an ``int``, the DNS flags. + + Returns an ``int``. + """ + + return Opcode((flags & 0x7800) >> 11) + + +def to_flags(value: Opcode) -> int: + """Convert an opcode to a value suitable for ORing into DNS message + flags. + + *value*, an ``int``, the DNS opcode value. + + Returns an ``int``. + """ + + return (value << 11) & 0x7800 + + +def to_text(value: Opcode) -> str: + """Convert an opcode to text. + + *value*, an ``int`` the opcode value, + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns a ``str``. + """ + + return Opcode.to_text(value) + + +def is_update(flags: int) -> bool: + """Is the opcode in flags UPDATE? + + *flags*, an ``int``, the DNS message flags. + + Returns a ``bool``. + """ + + return from_flags(flags) == Opcode.UPDATE + + +### BEGIN generated Opcode constants + +QUERY = Opcode.QUERY +IQUERY = Opcode.IQUERY +STATUS = Opcode.STATUS +NOTIFY = Opcode.NOTIFY +UPDATE = Opcode.UPDATE + +### END generated Opcode constants diff --git a/venv/lib/python3.12/site-packages/dns/py.typed b/venv/lib/python3.12/site-packages/dns/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/dns/query.py b/venv/lib/python3.12/site-packages/dns/query.py new file mode 100644 index 0000000..17b1862 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/query.py @@ -0,0 +1,1786 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +import base64 +import contextlib +import enum +import errno +import os +import random +import selectors +import socket +import struct +import time +import urllib.parse +from typing import Any, Callable, Dict, Optional, Tuple, cast + +import dns._features +import dns._tls_util +import dns.exception +import dns.inet +import dns.message +import dns.name +import dns.quic +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.transaction +import dns.tsig +import dns.xfr + +try: + import ssl +except ImportError: + import dns._no_ssl as ssl # type: ignore + + +def _remaining(expiration): + if expiration is None: + return None + timeout = expiration - time.time() + if timeout <= 0.0: + raise dns.exception.Timeout + return timeout + + +def _expiration_for_this_attempt(timeout, expiration): + if expiration is None: + return None + return min(time.time() + timeout, expiration) + + +_have_httpx = dns._features.have("doh") +if _have_httpx: + import httpcore._backends.sync + import httpx + + _CoreNetworkBackend = httpcore.NetworkBackend + _CoreSyncStream = httpcore._backends.sync.SyncStream + + class _NetworkBackend(_CoreNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + + def connect_tcp( + self, host, port, timeout=None, local_address=None, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + af = dns.inet.af_for_address(address) + if local_address is not None or self._local_port != 0: + if local_address is None: + local_address = "0.0.0.0" + source = dns.inet.low_level_address_tuple( + (local_address, self._local_port), af + ) + else: + source = None + try: + sock = make_socket(af, socket.SOCK_STREAM, source) + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + _connect( + sock, + dns.inet.low_level_address_tuple((address, port), af), + attempt_expiration, + ) + return _CoreSyncStream(sock) + except Exception: + pass + raise httpcore.ConnectError + + def connect_unix_socket( + self, path, timeout=None, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + class _HTTPTransport(httpx.HTTPTransport): # pyright: ignore + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None and bootstrap_address is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.resolver + + resolver = dns.resolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +else: + + class _HTTPTransport: # type: ignore + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + pass + + def connect_tcp(self, host, port, timeout, local_address): + raise NotImplementedError + + +have_doh = _have_httpx + + +def default_socket_factory( + af: socket.AddressFamily | int, + kind: socket.SocketKind, + proto: int, +) -> socket.socket: + return socket.socket(af, kind, proto) + + +# Function used to create a socket. Can be overridden if needed in special +# situations. +socket_factory: Callable[ + [socket.AddressFamily | int, socket.SocketKind, int], socket.socket +] = default_socket_factory + + +class UnexpectedSource(dns.exception.DNSException): + """A DNS query response came from an unexpected address or port.""" + + +class BadResponse(dns.exception.FormError): + """A DNS query response does not respond to the question asked.""" + + +class NoDOH(dns.exception.DNSException): + """DNS over HTTPS (DOH) was requested but the httpx module is not + available.""" + + +class NoDOQ(dns.exception.DNSException): + """DNS over QUIC (DOQ) was requested but the aioquic module is not + available.""" + + +# for backwards compatibility +TransferError = dns.xfr.TransferError + + +def _compute_times(timeout): + now = time.time() + if timeout is None: + return (now, None) + else: + return (now, now + timeout) + + +def _wait_for(fd, readable, writable, _, expiration): + # Use the selected selector class to wait for any of the specified + # events. An "expiration" absolute time is converted into a relative + # timeout. + # + # The unused parameter is 'error', which is always set when + # selecting for read or write, and we have no error-only selects. + + if readable and isinstance(fd, ssl.SSLSocket) and fd.pending() > 0: + return True + with selectors.DefaultSelector() as sel: + events = 0 + if readable: + events |= selectors.EVENT_READ + if writable: + events |= selectors.EVENT_WRITE + if events: + sel.register(fd, events) # pyright: ignore + if expiration is None: + timeout = None + else: + timeout = expiration - time.time() + if timeout <= 0.0: + raise dns.exception.Timeout + if not sel.select(timeout): + raise dns.exception.Timeout + + +def _wait_for_readable(s, expiration): + _wait_for(s, True, False, True, expiration) + + +def _wait_for_writable(s, expiration): + _wait_for(s, False, True, True, expiration) + + +def _addresses_equal(af, a1, a2): + # Convert the first value of the tuple, which is a textual format + # address into binary form, so that we are not confused by different + # textual representations of the same address + try: + n1 = dns.inet.inet_pton(af, a1[0]) + n2 = dns.inet.inet_pton(af, a2[0]) + except dns.exception.SyntaxError: + return False + return n1 == n2 and a1[1:] == a2[1:] + + +def _matches_destination(af, from_address, destination, ignore_unexpected): + # Check that from_address is appropriate for a response to a query + # sent to destination. + if not destination: + return True + if _addresses_equal(af, from_address, destination) or ( + dns.inet.is_multicast(destination[0]) and from_address[1:] == destination[1:] + ): + return True + elif ignore_unexpected: + return False + raise UnexpectedSource( + f"got a response from {from_address} instead of " f"{destination}" + ) + + +def _destination_and_source( + where, port, source, source_port, where_must_be_address=True +): + # Apply defaults and compute destination and source tuples + # suitable for use in connect(), sendto(), or bind(). + af = None + destination = None + try: + af = dns.inet.af_for_address(where) + destination = where + except Exception: + if where_must_be_address: + raise + # URLs are ok so eat the exception + if source: + saf = dns.inet.af_for_address(source) + if af: + # We know the destination af, so source had better agree! + if saf != af: + raise ValueError( + "different address families for source and destination" + ) + else: + # We didn't know the destination af, but we know the source, + # so that's our af. + af = saf + if source_port and not source: + # Caller has specified a source_port but not an address, so we + # need to return a source, and we need to use the appropriate + # wildcard address as the address. + try: + source = dns.inet.any_for_af(af) + except Exception: + # we catch this and raise ValueError for backwards compatibility + raise ValueError("source_port specified but address family is unknown") + # Convert high-level (address, port) tuples into low-level address + # tuples. + if destination: + destination = dns.inet.low_level_address_tuple((destination, port), af) + if source: + source = dns.inet.low_level_address_tuple((source, source_port), af) + return (af, destination, source) + + +def make_socket( + af: socket.AddressFamily | int, + type: socket.SocketKind, + source: Any | None = None, +) -> socket.socket: + """Make a socket. + + This function uses the module's ``socket_factory`` to make a socket of the + specified address family and type. + + *af*, a ``socket.AddressFamily`` or ``int`` is the address family, either + ``socket.AF_INET`` or ``socket.AF_INET6``. + + *type*, a ``socket.SocketKind`` is the type of socket, e.g. ``socket.SOCK_DGRAM``, + a datagram socket, or ``socket.SOCK_STREAM``, a stream socket. Note that the + ``proto`` attribute of a socket is always zero with this API, so a datagram socket + will always be a UDP socket, and a stream socket will always be a TCP socket. + + *source* is the source address and port to bind to, if any. The default is + ``None`` which will bind to the wildcard address and a randomly chosen port. + If not ``None``, it should be a (low-level) address tuple appropriate for *af*. + """ + s = socket_factory(af, type, 0) + try: + s.setblocking(False) + if source is not None: + s.bind(source) + return s + except Exception: + s.close() + raise + + +def make_ssl_socket( + af: socket.AddressFamily | int, + type: socket.SocketKind, + ssl_context: ssl.SSLContext, + server_hostname: dns.name.Name | str | None = None, + source: Any | None = None, +) -> ssl.SSLSocket: + """Make a socket. + + This function uses the module's ``socket_factory`` to make a socket of the + specified address family and type. + + *af*, a ``socket.AddressFamily`` or ``int`` is the address family, either + ``socket.AF_INET`` or ``socket.AF_INET6``. + + *type*, a ``socket.SocketKind`` is the type of socket, e.g. ``socket.SOCK_DGRAM``, + a datagram socket, or ``socket.SOCK_STREAM``, a stream socket. Note that the + ``proto`` attribute of a socket is always zero with this API, so a datagram socket + will always be a UDP socket, and a stream socket will always be a TCP socket. + + If *ssl_context* is not ``None``, then it specifies the SSL context to use, + typically created with ``make_ssl_context()``. + + If *server_hostname* is not ``None``, then it is the hostname to use for server + certificate validation. A valid hostname must be supplied if *ssl_context* + requires hostname checking. + + *source* is the source address and port to bind to, if any. The default is + ``None`` which will bind to the wildcard address and a randomly chosen port. + If not ``None``, it should be a (low-level) address tuple appropriate for *af*. + """ + sock = make_socket(af, type, source) + if isinstance(server_hostname, dns.name.Name): + server_hostname = server_hostname.to_text() + # LGTM gets a false positive here, as our default context is OK + return ssl_context.wrap_socket( + sock, + do_handshake_on_connect=False, # lgtm[py/insecure-protocol] + server_hostname=server_hostname, + ) + + +# for backwards compatibility +def _make_socket( + af, + type, + source, + ssl_context, + server_hostname, +): + if ssl_context is not None: + return make_ssl_socket(af, type, ssl_context, server_hostname, source) + else: + return make_socket(af, type, source) + + +def _maybe_get_resolver( + resolver: Optional["dns.resolver.Resolver"], # pyright: ignore +) -> "dns.resolver.Resolver": # pyright: ignore + # We need a separate method for this to avoid overriding the global + # variable "dns" with the as-yet undefined local variable "dns" + # in https(). + if resolver is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.resolver + + resolver = dns.resolver.Resolver() + return resolver + + +class HTTPVersion(enum.IntEnum): + """Which version of HTTP should be used? + + DEFAULT will select the first version from the list [2, 1.1, 3] that + is available. + """ + + DEFAULT = 0 + HTTP_1 = 1 + H1 = 1 + HTTP_2 = 2 + H2 = 2 + HTTP_3 = 3 + H3 = 3 + + +def https( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 443, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + session: Any | None = None, + path: str = "/dns-query", + post: bool = True, + bootstrap_address: str | None = None, + verify: bool | str | ssl.SSLContext = True, + resolver: Optional["dns.resolver.Resolver"] = None, # pyright: ignore + family: int = socket.AF_UNSPEC, + http_version: HTTPVersion = HTTPVersion.DEFAULT, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-HTTPS. + + *q*, a ``dns.message.Message``, the query to send. + + *where*, a ``str``, the nameserver IP address or the full URL. If an IP address is + given, the URL will be constructed using the following schema: + https://:/. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query + times out. If ``None``, the default, wait forever. + + *port*, a ``int``, the port to send the query to. The default is 443. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source + address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. The default is + 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + received message. + + *session*, an ``httpx.Client``. If provided, the client session to use to send the + queries. + + *path*, a ``str``. If *where* is an IP address, then *path* will be used to + construct the URL to send the DNS query to. + + *post*, a ``bool``. If ``True``, the default, POST method will be used. + + *bootstrap_address*, a ``str``, the IP address to use to bypass resolution. + + *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification + of the server is done using the default CA bundle; if ``False``, then no + verification is done; if a `str` then it specifies the path to a certificate file or + directory which will be used for verification. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames in URLs. If not specified, a new resolver with a default + configuration will be used; note this is *not* the default resolver as that resolver + might have been configured to use DoH causing a chicken-and-egg problem. This + parameter only has an effect if the HTTP library is httpx. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC (the default), both A + and AAAA records will be retrieved. + + *http_version*, a ``dns.query.HTTPVersion``, indicating which HTTP version to use. + + Returns a ``dns.message.Message``. + """ + + (af, _, the_source) = _destination_and_source( + where, port, source, source_port, False + ) + # we bind url and then override as pyright can't figure out all paths bind. + url = where + if af is not None and dns.inet.is_address(where): + if af == socket.AF_INET: + url = f"https://{where}:{port}{path}" + elif af == socket.AF_INET6: + url = f"https://[{where}]:{port}{path}" + + extensions = {} + if bootstrap_address is None: + # pylint: disable=possibly-used-before-assignment + parsed = urllib.parse.urlparse(url) + if parsed.hostname is None: + raise ValueError("no hostname in URL") + if dns.inet.is_address(parsed.hostname): + bootstrap_address = parsed.hostname + extensions["sni_hostname"] = parsed.hostname + if parsed.port is not None: + port = parsed.port + + if http_version == HTTPVersion.H3 or ( + http_version == HTTPVersion.DEFAULT and not have_doh + ): + if bootstrap_address is None: + resolver = _maybe_get_resolver(resolver) + assert parsed.hostname is not None # pyright: ignore + answers = resolver.resolve_name(parsed.hostname, family) # pyright: ignore + bootstrap_address = random.choice(list(answers.addresses())) + if session and not isinstance( + session, dns.quic.SyncQuicConnection + ): # pyright: ignore + raise ValueError("session parameter must be a dns.quic.SyncQuicConnection.") + return _http3( + q, + bootstrap_address, + url, # pyright: ignore + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + verify=verify, + post=post, + connection=session, + ) + + if not have_doh: + raise NoDOH # pragma: no cover + if session and not isinstance(session, httpx.Client): # pyright: ignore + raise ValueError("session parameter must be an httpx.Client") + + wire = q.to_wire() + headers = {"accept": "application/dns-message"} + + h1 = http_version in (HTTPVersion.H1, HTTPVersion.DEFAULT) + h2 = http_version in (HTTPVersion.H2, HTTPVersion.DEFAULT) + + # set source port and source address + + if the_source is None: + local_address = None + local_port = 0 + else: + local_address = the_source[0] + local_port = the_source[1] + + if session: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(session) + else: + transport = _HTTPTransport( + local_address=local_address, + http1=h1, + http2=h2, + verify=verify, + local_port=local_port, + bootstrap_address=bootstrap_address, + resolver=resolver, + family=family, # pyright: ignore + ) + + cm = httpx.Client( # type: ignore + http1=h1, http2=h2, verify=verify, transport=transport # type: ignore + ) + with cm as session: + # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH + # GET and POST examples + assert session is not None + if post: + headers.update( + { + "content-type": "application/dns-message", + "content-length": str(len(wire)), + } + ) + response = session.post( + url, + headers=headers, + content=wire, + timeout=timeout, + extensions=extensions, + ) + else: + wire = base64.urlsafe_b64encode(wire).rstrip(b"=") + twire = wire.decode() # httpx does a repr() if we give it bytes + response = session.get( + url, + headers=headers, + timeout=timeout, + params={"dns": twire}, + extensions=extensions, + ) + + # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH + # status codes + if response.status_code < 200 or response.status_code > 299: + raise ValueError( + f"{where} responded with status code {response.status_code}" + f"\nResponse body: {response.content}" + ) + r = dns.message.from_wire( + response.content, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = response.elapsed.total_seconds() + if not q.is_response(r): + raise BadResponse + return r + + +def _find_header(headers: dns.quic.Headers, name: bytes) -> bytes: + if headers is None: + raise KeyError + for header, value in headers: + if header == name: + return value + raise KeyError + + +def _check_status(headers: dns.quic.Headers, peer: str, wire: bytes) -> None: + value = _find_header(headers, b":status") + if value is None: + raise SyntaxError("no :status header in response") + status = int(value) + if status < 0: + raise SyntaxError("status is negative") + if status < 200 or status > 299: + error = "" + if len(wire) > 0: + try: + error = ": " + wire.decode() + except Exception: + pass + raise ValueError(f"{peer} responded with status code {status}{error}") + + +def _http3( + q: dns.message.Message, + where: str, + url: str, + timeout: float | None = None, + port: int = 443, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + verify: bool | str | ssl.SSLContext = True, + post: bool = True, + connection: dns.quic.SyncQuicConnection | None = None, +) -> dns.message.Message: + if not dns.quic.have_quic: + raise NoDOH("DNS-over-HTTP3 is not available.") # pragma: no cover + + url_parts = urllib.parse.urlparse(url) + hostname = url_parts.hostname + assert hostname is not None + if url_parts.port is not None: + port = url_parts.port + + q.id = 0 + wire = q.to_wire() + the_connection: dns.quic.SyncQuicConnection + the_manager: dns.quic.SyncQuicManager + if connection: + manager: contextlib.AbstractContextManager = contextlib.nullcontext(None) + else: + manager = dns.quic.SyncQuicManager( + verify_mode=verify, server_name=hostname, h3=True # pyright: ignore + ) + the_manager = manager # for type checking happiness + + with manager: + if connection: + the_connection = connection + else: + the_connection = the_manager.connect( # pyright: ignore + where, port, source, source_port + ) + (start, expiration) = _compute_times(timeout) + with the_connection.make_stream(timeout) as stream: # pyright: ignore + stream.send_h3(url, wire, post) + wire = stream.receive(_remaining(expiration)) + _check_status(stream.headers(), where, wire) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +def _udp_recv(sock, max_size, expiration): + """Reads a datagram from the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + while True: + try: + return sock.recvfrom(max_size) + except BlockingIOError: + _wait_for_readable(sock, expiration) + + +def _udp_send(sock, data, destination, expiration): + """Sends the specified datagram to destination over the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + while True: + try: + if destination: + return sock.sendto(data, destination) + else: + return sock.send(data) + except BlockingIOError: # pragma: no cover + _wait_for_writable(sock, expiration) + + +def send_udp( + sock: Any, + what: dns.message.Message | bytes, + destination: Any, + expiration: float | None = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified UDP socket. + + *sock*, a ``socket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + sent_time = time.time() + n = _udp_send(sock, what, destination, expiration) + return (n, sent_time) + + +def receive_udp( + sock: Any, + destination: Any | None = None, + expiration: float | None = None, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + keyring: Dict[dns.name.Name, dns.tsig.Key] | None = None, + request_mac: bytes | None = b"", + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + ignore_errors: bool = False, + query: dns.message.Message | None = None, +) -> Any: + """Read a DNS message from a UDP socket. + + *sock*, a ``socket``. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where the message is expected to arrive from. + When receiving a response, this would be where the associated query was + sent. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if + the TC bit is set. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + If *destination* is not ``None``, returns a ``(dns.message.Message, float)`` + tuple of the received message and the received time. + + If *destination* is ``None``, returns a + ``(dns.message.Message, float, tuple)`` + tuple of the received message, the received time, and the address where + the message arrived from. + + *ignore_errors*, a ``bool``. If various format errors or response + mismatches occur, ignore them and keep listening for a valid response. + The default is ``False``. + + *query*, a ``dns.message.Message`` or ``None``. If not ``None`` and + *ignore_errors* is ``True``, check that the received message is a response + to this query, and if not keep listening for a valid response. + """ + + wire = b"" + while True: + (wire, from_address) = _udp_recv(sock, 65535, expiration) + if not _matches_destination( + sock.family, from_address, destination, ignore_unexpected + ): + continue + received_time = time.time() + try: + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + raise_on_truncation=raise_on_truncation, + ) + except dns.message.Truncated as e: + # If we got Truncated and not FORMERR, we at least got the header with TC + # set, and very likely the question section, so we'll re-raise if the + # message seems to be a response as we need to know when truncation happens. + # We need to check that it seems to be a response as we don't want a random + # injected message with TC set to cause us to bail out. + if ( + ignore_errors + and query is not None + and not query.is_response(e.message()) + ): + continue + else: + raise + except Exception: + if ignore_errors: + continue + else: + raise + if ignore_errors and query is not None and not query.is_response(r): + continue + if destination: + return (r, received_time) + else: + return (r, received_time, from_address) + + +def udp( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 53, + source: str | None = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + sock: Any | None = None, + ignore_errors: bool = False, +) -> dns.message.Message: + """Return the response obtained after sending a query via UDP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if + the TC bit is set. + + *sock*, a ``socket.socket``, or ``None``, the socket to use for the + query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking datagram socket, + and the *source* and *source_port* are ignored. + + *ignore_errors*, a ``bool``. If various format errors or response + mismatches occur, ignore them and keep listening for a valid response. + The default is ``False``. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (af, destination, source) = _destination_and_source( + where, port, source, source_port, True + ) + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) + else: + assert af is not None + cm = make_socket(af, socket.SOCK_DGRAM, source) + with cm as s: + send_udp(s, wire, destination, expiration) + (r, received_time) = receive_udp( + s, + destination, + expiration, + ignore_unexpected, + one_rr_per_rrset, + q.keyring, + q.mac, + ignore_trailing, + raise_on_truncation, + ignore_errors, + q, + ) + r.time = received_time - begin_time + # We don't need to check q.is_response() if we are in ignore_errors mode + # as receive_udp() will have checked it. + if not (ignore_errors or q.is_response(r)): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def udp_with_fallback( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 53, + source: str | None = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + udp_sock: Any | None = None, + tcp_sock: Any | None = None, + ignore_errors: bool = False, +) -> Tuple[dns.message.Message, bool]: + """Return the response to the query, trying UDP first and falling back + to TCP if UDP results in a truncated response. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query + times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source + address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. The default is + 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from unexpected + sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + received message. + + *udp_sock*, a ``socket.socket``, or ``None``, the socket to use for the UDP query. + If ``None``, the default, a socket is created. Note that if a socket is provided, + it must be a nonblocking datagram socket, and the *source* and *source_port* are + ignored for the UDP query. + + *tcp_sock*, a ``socket.socket``, or ``None``, the connected socket to use for the + TCP query. If ``None``, the default, a socket is created. Note that if a socket is + provided, it must be a nonblocking connected stream socket, and *where*, *source* + and *source_port* are ignored for the TCP query. + + *ignore_errors*, a ``bool``. If various format errors or response mismatches occur + while listening for UDP, ignore them and keep listening for a valid response. The + default is ``False``. + + Returns a (``dns.message.Message``, tcp) tuple where tcp is ``True`` if and only if + TCP was used. + """ + try: + response = udp( + q, + where, + timeout, + port, + source, + source_port, + ignore_unexpected, + one_rr_per_rrset, + ignore_trailing, + True, + udp_sock, + ignore_errors, + ) + return (response, False) + except dns.message.Truncated: + response = tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + tcp_sock, + ) + return (response, True) + + +def _net_read(sock, count, expiration): + """Read the specified number of bytes from sock. Keep trying until we + either get the desired amount, or we hit EOF. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + s = b"" + while count > 0: + try: + n = sock.recv(count) + if n == b"": + raise EOFError("EOF") + count -= len(n) + s += n + except (BlockingIOError, ssl.SSLWantReadError): + _wait_for_readable(sock, expiration) + except ssl.SSLWantWriteError: # pragma: no cover + _wait_for_writable(sock, expiration) + return s + + +def _net_write(sock, data, expiration): + """Write the specified data to the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + current = 0 + l = len(data) + while current < l: + try: + current += sock.send(data[current:]) + except (BlockingIOError, ssl.SSLWantWriteError): + _wait_for_writable(sock, expiration) + except ssl.SSLWantReadError: # pragma: no cover + _wait_for_readable(sock, expiration) + + +def send_tcp( + sock: Any, + what: dns.message.Message | bytes, + expiration: float | None = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified TCP socket. + + *sock*, a ``socket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + tcpmsg = what.to_wire(prepend_length=True) + else: + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = len(what).to_bytes(2, "big") + what + sent_time = time.time() + _net_write(sock, tcpmsg, expiration) + return (len(tcpmsg), sent_time) + + +def receive_tcp( + sock: Any, + expiration: float | None = None, + one_rr_per_rrset: bool = False, + keyring: Dict[dns.name.Name, dns.tsig.Key] | None = None, + request_mac: bytes | None = b"", + ignore_trailing: bool = False, +) -> Tuple[dns.message.Message, float]: + """Read a DNS message from a TCP socket. + + *sock*, a ``socket``. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + Returns a ``(dns.message.Message, float)`` tuple of the received message + and the received time. + """ + + ldata = _net_read(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = _net_read(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + return (r, received_time) + + +def _connect(s, address, expiration): + err = s.connect_ex(address) + if err == 0: + return + if err in (errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY): + _wait_for_writable(s, expiration) + err = s.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + if err != 0: + raise OSError(err, os.strerror(err)) + + +def tcp( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 53, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Any | None = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via TCP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *sock*, a ``socket.socket``, or ``None``, the connected socket to use for the + query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking connected stream + socket, and *where*, *port*, *source* and *source_port* are ignored. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) + else: + (af, destination, source) = _destination_and_source( + where, port, source, source_port, True + ) + assert af is not None + cm = make_socket(af, socket.SOCK_STREAM, source) + with cm as s: + if not sock: + # pylint: disable=possibly-used-before-assignment + _connect(s, destination, expiration) # pyright: ignore + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp( + s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def _tls_handshake(s, expiration): + while True: + try: + s.do_handshake() + return + except ssl.SSLWantReadError: + _wait_for_readable(s, expiration) + except ssl.SSLWantWriteError: # pragma: no cover + _wait_for_writable(s, expiration) + + +def make_ssl_context( + verify: bool | str = True, + check_hostname: bool = True, + alpns: list[str] | None = None, +) -> ssl.SSLContext: + """Make an SSL context + + If *verify* is ``True``, the default, then certificate verification will occur using + the standard CA roots. If *verify* is ``False``, then certificate verification will + be disabled. If *verify* is a string which is a valid pathname, then if the + pathname is a regular file, the CA roots will be taken from the file, otherwise if + the pathname is a directory roots will be taken from the directory. + + If *check_hostname* is ``True``, the default, then the hostname of the server must + be specified when connecting and the server's certificate must authorize the + hostname. If ``False``, then hostname checking is disabled. + + *aplns* is ``None`` or a list of TLS ALPN (Application Layer Protocol Negotiation) + strings to use in negotiation. For DNS-over-TLS, the right value is `["dot"]`. + """ + cafile, capath = dns._tls_util.convert_verify_to_cafile_and_capath(verify) + ssl_context = ssl.create_default_context(cafile=cafile, capath=capath) + # the pyright ignores below are because it gets confused between the + # _no_ssl compatibility types and the real ones. + ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 # type: ignore + ssl_context.check_hostname = check_hostname + if verify is False: + ssl_context.verify_mode = ssl.CERT_NONE # type: ignore + if alpns is not None: + ssl_context.set_alpn_protocols(alpns) + return ssl_context # type: ignore + + +# for backwards compatibility +def _make_dot_ssl_context( + server_hostname: str | None, verify: bool | str +) -> ssl.SSLContext: + return make_ssl_context(verify, server_hostname is not None, ["dot"]) + + +def tls( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 853, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: ssl.SSLSocket | None = None, + ssl_context: ssl.SSLContext | None = None, + server_hostname: str | None = None, + verify: bool | str = True, +) -> dns.message.Message: + """Return the response obtained after sending a query via TLS. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 853. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *sock*, an ``ssl.SSLSocket``, or ``None``, the socket to use for + the query. If ``None``, the default, a socket is created. Note + that if a socket is provided, it must be a nonblocking connected + SSL stream socket, and *where*, *port*, *source*, *source_port*, + and *ssl_context* are ignored. + + *ssl_context*, an ``ssl.SSLContext``, the context to use when establishing + a TLS connection. If ``None``, the default, creates one with the default + configuration. + + *server_hostname*, a ``str`` containing the server's hostname. The + default is ``None``, which means that no hostname is known, and if an + SSL context is created, hostname checking will be disabled. + + *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification + of the server is done using the default CA bundle; if ``False``, then no + verification is done; if a `str` then it specifies the path to a certificate file or + directory which will be used for verification. + + Returns a ``dns.message.Message``. + + """ + + if sock: + # + # If a socket was provided, there's no special TLS handling needed. + # + return tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + sock, + ) + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + (af, destination, source) = _destination_and_source( + where, port, source, source_port, True + ) + assert af is not None # where must be an address + if ssl_context is None: + ssl_context = make_ssl_context(verify, server_hostname is not None, ["dot"]) + + with make_ssl_socket( + af, + socket.SOCK_STREAM, + ssl_context=ssl_context, + server_hostname=server_hostname, + source=source, + ) as s: + _connect(s, destination, expiration) + _tls_handshake(s, expiration) + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp( + s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def quic( + q: dns.message.Message, + where: str, + timeout: float | None = None, + port: int = 853, + source: str | None = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + connection: dns.quic.SyncQuicConnection | None = None, + verify: bool | str = True, + hostname: str | None = None, + server_hostname: str | None = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-QUIC. + + *q*, a ``dns.message.Message``, the query to send. + + *where*, a ``str``, the nameserver IP address. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query + times out. If ``None``, the default, wait forever. + + *port*, a ``int``, the port to send the query to. The default is 853. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source + address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. The default is + 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + received message. + + *connection*, a ``dns.quic.SyncQuicConnection``. If provided, the connection to use + to send the query. + + *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification + of the server is done using the default CA bundle; if ``False``, then no + verification is done; if a `str` then it specifies the path to a certificate file or + directory which will be used for verification. + + *hostname*, a ``str`` containing the server's hostname or ``None``. The default is + ``None``, which means that no hostname is known, and if an SSL context is created, + hostname checking will be disabled. This value is ignored if *url* is not + ``None``. + + *server_hostname*, a ``str`` or ``None``. This item is for backwards compatibility + only, and has the same meaning as *hostname*. + + Returns a ``dns.message.Message``. + """ + + if not dns.quic.have_quic: + raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover + + if server_hostname is not None and hostname is None: + hostname = server_hostname + + q.id = 0 + wire = q.to_wire() + the_connection: dns.quic.SyncQuicConnection + the_manager: dns.quic.SyncQuicManager + if connection: + manager: contextlib.AbstractContextManager = contextlib.nullcontext(None) + the_connection = connection + else: + manager = dns.quic.SyncQuicManager( + verify_mode=verify, server_name=hostname # pyright: ignore + ) + the_manager = manager # for type checking happiness + + with manager: + if not connection: + the_connection = the_manager.connect( # pyright: ignore + where, port, source, source_port + ) + (start, expiration) = _compute_times(timeout) + with the_connection.make_stream(timeout) as stream: # pyright: ignore + stream.send(wire, True) + wire = stream.receive(_remaining(expiration)) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +class UDPMode(enum.IntEnum): + """How should UDP be used in an IXFR from :py:func:`inbound_xfr()`? + + NEVER means "never use UDP; always use TCP" + TRY_FIRST means "try to use UDP but fall back to TCP if needed" + ONLY means "raise ``dns.xfr.UseTCP`` if trying UDP does not succeed" + """ + + NEVER = 0 + TRY_FIRST = 1 + ONLY = 2 + + +def _inbound_xfr( + txn_manager: dns.transaction.TransactionManager, + s: socket.socket | ssl.SSLSocket, + query: dns.message.Message, + serial: int | None, + timeout: float | None, + expiration: float | None, +) -> Any: + """Given a socket, does the zone transfer.""" + rdtype = query.question[0].rdtype + is_ixfr = rdtype == dns.rdatatype.IXFR + origin = txn_manager.from_wire_origin() + wire = query.to_wire() + is_udp = isinstance(s, socket.socket) and s.type == socket.SOCK_DGRAM + if is_udp: + _udp_send(s, wire, None, expiration) + else: + tcpmsg = struct.pack("!H", len(wire)) + wire + _net_write(s, tcpmsg, expiration) + with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: + done = False + tsig_ctx = None + r: dns.message.Message | None = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or ( + expiration is not None and mexpiration > expiration + ): + mexpiration = expiration + if is_udp: + (rwire, _) = _udp_recv(s, 65535, mexpiration) + else: + ldata = _net_read(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + rwire = _net_read(s, l, mexpiration) + r = dns.message.from_wire( + rwire, + keyring=query.keyring, + request_mac=query.mac, + xfr=True, + origin=origin, + tsig_ctx=tsig_ctx, + multi=(not is_udp), + one_rr_per_rrset=is_ixfr, + ) + done = inbound.process_message(r) + yield r + tsig_ctx = r.tsig_ctx + if query.keyring and r is not None and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") + + +def xfr( + where: str, + zone: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.AXFR, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + timeout: float | None = None, + port: int = 53, + keyring: Dict[dns.name.Name, dns.tsig.Key] | None = None, + keyname: dns.name.Name | str | None = None, + relativize: bool = True, + lifetime: float | None = None, + source: str | None = None, + source_port: int = 0, + serial: int = 0, + use_udp: bool = False, + keyalgorithm: dns.name.Name | str = dns.tsig.default_algorithm, +) -> Any: + """Return a generator for the responses to a zone transfer. + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *zone*, a ``dns.name.Name`` or ``str``, the name of the zone to transfer. + + *rdtype*, an ``int`` or ``str``, the type of zone transfer. The + default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be + used to do an incremental transfer instead. + + *rdclass*, an ``int`` or ``str``, the class of the zone transfer. + The default is ``dns.rdataclass.IN``. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *keyname*, a ``dns.name.Name`` or ``str``, the name of the TSIG + key to use. + + *relativize*, a ``bool``. If ``True``, all names in the zone will be + relativized to the zone origin. It is essential that the + relativize setting matches the one specified to + ``dns.zone.from_xfr()`` if using this generator to make a zone. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *serial*, an ``int``, the SOA serial number to use as the base for + an IXFR diff sequence (only meaningful if *rdtype* is + ``dns.rdatatype.IXFR``). + + *use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR). + + *keyalgorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. + + Raises on errors, and so does the generator. + + Returns a generator of ``dns.message.Message`` objects. + """ + + class DummyTransactionManager(dns.transaction.TransactionManager): + def __init__(self, origin, relativize): + self.info = (origin, relativize, dns.name.empty if relativize else origin) + + def origin_information(self): + return self.info + + def get_class(self) -> dns.rdataclass.RdataClass: + raise NotImplementedError # pragma: no cover + + def reader(self): + raise NotImplementedError # pragma: no cover + + def writer(self, replacement: bool = False) -> dns.transaction.Transaction: + class DummyTransaction: + def nop(self, *args, **kw): + pass + + def __getattr__(self, _): + return self.nop + + return cast(dns.transaction.Transaction, DummyTransaction()) + + if isinstance(zone, str): + zone = dns.name.from_text(zone) + rdtype = dns.rdatatype.RdataType.make(rdtype) + q = dns.message.make_query(zone, rdtype, rdclass) + if rdtype == dns.rdatatype.IXFR: + rrset = q.find_rrset( + q.authority, zone, dns.rdataclass.IN, dns.rdatatype.SOA, create=True + ) + soa = dns.rdata.from_text("IN", "SOA", f". . {serial} 0 0 0 0") + rrset.add(soa, 0) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + (af, destination, source) = _destination_and_source( + where, port, source, source_port, True + ) + assert af is not None + (_, expiration) = _compute_times(lifetime) + tm = DummyTransactionManager(zone, relativize) + if use_udp and rdtype != dns.rdatatype.IXFR: + raise ValueError("cannot do a UDP AXFR") + sock_type = socket.SOCK_DGRAM if use_udp else socket.SOCK_STREAM + with make_socket(af, sock_type, source) as s: + _connect(s, destination, expiration) + yield from _inbound_xfr(tm, s, q, serial, timeout, expiration) + + +def inbound_xfr( + where: str, + txn_manager: dns.transaction.TransactionManager, + query: dns.message.Message | None = None, + port: int = 53, + timeout: float | None = None, + lifetime: float | None = None, + source: str | None = None, + source_port: int = 0, + udp_mode: UDPMode = UDPMode.NEVER, +) -> None: + """Conduct an inbound transfer and apply it via a transaction from the + txn_manager. + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *txn_manager*, a ``dns.transaction.TransactionManager``, the txn_manager + for this transfer (typically a ``dns.zone.Zone``). + + *query*, the query to send. If not supplied, a default query is + constructed using information from the *txn_manager*. + + *port*, an ``int``, the port send the message to. The default is 53. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *udp_mode*, a ``dns.query.UDPMode``, determines how UDP is used + for IXFRs. The default is ``dns.query.UDPMode.NEVER``, i.e. only use + TCP. Other possibilities are ``dns.query.UDPMode.TRY_FIRST``, which + means "try UDP but fallback to TCP if needed", and + ``dns.query.UDPMode.ONLY``, which means "try UDP and raise + ``dns.xfr.UseTCP`` if it does not succeed. + + Raises on errors. + """ + if query is None: + (query, serial) = dns.xfr.make_query(txn_manager) + else: + serial = dns.xfr.extract_serial_from_query(query) + + (af, destination, source) = _destination_and_source( + where, port, source, source_port, True + ) + assert af is not None + (_, expiration) = _compute_times(lifetime) + if query.question[0].rdtype == dns.rdatatype.IXFR and udp_mode != UDPMode.NEVER: + with make_socket(af, socket.SOCK_DGRAM, source) as s: + _connect(s, destination, expiration) + try: + for _ in _inbound_xfr( + txn_manager, s, query, serial, timeout, expiration + ): + pass + return + except dns.xfr.UseTCP: + if udp_mode == UDPMode.ONLY: + raise + + with make_socket(af, socket.SOCK_STREAM, source) as s: + _connect(s, destination, expiration) + for _ in _inbound_xfr(txn_manager, s, query, serial, timeout, expiration): + pass diff --git a/venv/lib/python3.12/site-packages/dns/quic/__init__.py b/venv/lib/python3.12/site-packages/dns/quic/__init__.py new file mode 100644 index 0000000..7c2a699 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/quic/__init__.py @@ -0,0 +1,78 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +from typing import Any, Dict, List, Tuple + +import dns._features +import dns.asyncbackend + +if dns._features.have("doq"): + from dns._asyncbackend import NullContext + from dns.quic._asyncio import AsyncioQuicConnection as AsyncioQuicConnection + from dns.quic._asyncio import AsyncioQuicManager + from dns.quic._asyncio import AsyncioQuicStream as AsyncioQuicStream + from dns.quic._common import AsyncQuicConnection # pyright: ignore + from dns.quic._common import AsyncQuicManager as AsyncQuicManager + from dns.quic._sync import SyncQuicConnection # pyright: ignore + from dns.quic._sync import SyncQuicStream # pyright: ignore + from dns.quic._sync import SyncQuicManager as SyncQuicManager + + have_quic = True + + def null_factory( + *args, # pylint: disable=unused-argument + **kwargs, # pylint: disable=unused-argument + ): + return NullContext(None) + + def _asyncio_manager_factory( + context, *args, **kwargs # pylint: disable=unused-argument + ): + return AsyncioQuicManager(*args, **kwargs) + + # We have a context factory and a manager factory as for trio we need to have + # a nursery. + + _async_factories: Dict[str, Tuple[Any, Any]] = { + "asyncio": (null_factory, _asyncio_manager_factory) + } + + if dns._features.have("trio"): + import trio + + # pylint: disable=ungrouped-imports + from dns.quic._trio import TrioQuicConnection as TrioQuicConnection + from dns.quic._trio import TrioQuicManager + from dns.quic._trio import TrioQuicStream as TrioQuicStream + + def _trio_context_factory(): + return trio.open_nursery() + + def _trio_manager_factory(context, *args, **kwargs): + return TrioQuicManager(context, *args, **kwargs) + + _async_factories["trio"] = (_trio_context_factory, _trio_manager_factory) + + def factories_for_backend(backend=None): + if backend is None: + backend = dns.asyncbackend.get_default_backend() + return _async_factories[backend.name()] + +else: # pragma: no cover + have_quic = False + + class AsyncQuicStream: # type: ignore + pass + + class AsyncQuicConnection: # type: ignore + async def make_stream(self) -> Any: + raise NotImplementedError + + class SyncQuicStream: # type: ignore + pass + + class SyncQuicConnection: # type: ignore + def make_stream(self) -> Any: + raise NotImplementedError + + +Headers = List[Tuple[bytes, bytes]] diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f38aeb2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_asyncio.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_asyncio.cpython-312.pyc new file mode 100644 index 0000000..fb4b21b Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_asyncio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_common.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_common.cpython-312.pyc new file mode 100644 index 0000000..8d85761 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_common.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_sync.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_sync.cpython-312.pyc new file mode 100644 index 0000000..ea00c86 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_sync.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_trio.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_trio.cpython-312.pyc new file mode 100644 index 0000000..d7f7db2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_trio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/_asyncio.py b/venv/lib/python3.12/site-packages/dns/quic/_asyncio.py new file mode 100644 index 0000000..0a177b6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/quic/_asyncio.py @@ -0,0 +1,276 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import asyncio +import socket +import ssl +import struct +import time + +import aioquic.h3.connection # type: ignore +import aioquic.h3.events # type: ignore +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore + +import dns.asyncbackend +import dns.exception +import dns.inet +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + AsyncQuicConnection, + AsyncQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + + +class AsyncioQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = asyncio.Condition() + + async def _wait_for_wake_up(self): + async with self._wake_up: + await self._wake_up.wait() + + async def wait_for(self, amount, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + if self._buffer.have(amount): + return + self._expecting = amount + try: + await asyncio.wait_for(self._wait_for_wake_up(), timeout) + except TimeoutError: + raise dns.exception.Timeout + self._expecting = 0 + + async def wait_for_end(self, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + if self._buffer.seen_end(): + return + try: + await asyncio.wait_for(self._wait_for_wake_up(), timeout) + except TimeoutError: + raise dns.exception.Timeout + + async def receive(self, timeout=None): + expiration = self._expiration_from_timeout(timeout) + if self._connection.is_h3(): + await self.wait_for_end(expiration) + return self._buffer.get_all() + else: + await self.wait_for(2, expiration) + (size,) = struct.unpack("!H", self._buffer.get(2)) + await self.wait_for(size, expiration) + return self._buffer.get(size) + + async def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + await self._connection.write(self._stream_id, data, is_end) + + async def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + async with self._wake_up: + self._wake_up.notify() + + async def close(self): + self._close() + + # Streams are async context managers + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + async with self._wake_up: + self._wake_up.notify() + return False + + +class AsyncioQuicConnection(AsyncQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager=None): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = None + self._handshake_complete = asyncio.Event() + self._socket_created = asyncio.Event() + self._wake_timer = asyncio.Condition() + self._receiver_task = None + self._sender_task = None + self._wake_pending = False + + async def _receiver(self): + try: + af = dns.inet.af_for_address(self._address) + backend = dns.asyncbackend.get_backend("asyncio") + # Note that peer is a low-level address tuple, but make_socket() wants + # a high-level address tuple, so we convert. + self._socket = await backend.make_socket( + af, socket.SOCK_DGRAM, 0, self._source, (self._peer[0], self._peer[1]) + ) + self._socket_created.set() + async with self._socket: + while not self._done: + (datagram, address) = await self._socket.recvfrom( + QUIC_MAX_DATAGRAM, None + ) + if address[0] != self._peer[0] or address[1] != self._peer[1]: + continue + self._connection.receive_datagram(datagram, address, time.time()) + # Wake up the timer in case the sender is sleeping, as there may be + # stuff to send now. + await self._wakeup() + except Exception: + pass + finally: + self._done = True + await self._wakeup() + self._handshake_complete.set() + + async def _wakeup(self): + self._wake_pending = True + async with self._wake_timer: + self._wake_timer.notify_all() + + async def _wait_for_wake_timer(self): + async with self._wake_timer: + if not self._wake_pending: + await self._wake_timer.wait() + self._wake_pending = False + + async def _sender(self): + await self._socket_created.wait() + while not self._done: + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, address in datagrams: + assert address == self._peer + assert self._socket is not None + await self._socket.sendto(datagram, self._peer, None) + (expiration, interval) = self._get_timer_values() + try: + await asyncio.wait_for(self._wait_for_wake_timer(), interval) + except Exception: + pass + self._handle_timer(expiration) + await self._handle_events() + + async def _handle_events(self): + count = 0 + while True: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + if self.is_h3(): + assert self._h3_conn is not None + h3_events = self._h3_conn.handle_event(event) + for h3_event in h3_events: + if isinstance(h3_event, aioquic.h3.events.HeadersReceived): + stream = self._streams.get(event.stream_id) + if stream: + if stream._headers is None: + stream._headers = h3_event.headers + elif stream._trailers is None: + stream._trailers = h3_event.headers + if h3_event.stream_ended: + await stream._add_input(b"", True) + elif isinstance(h3_event, aioquic.h3.events.DataReceived): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input( + h3_event.data, h3_event.stream_ended + ) + else: + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance(event, aioquic.quic.events.ConnectionTerminated): + self._done = True + if self._receiver_task is not None: + self._receiver_task.cancel() + elif isinstance(event, aioquic.quic.events.StreamReset): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(b"", True) + + count += 1 + if count > 10: + # yield + count = 0 + await asyncio.sleep(0) + + async def write(self, stream, data, is_end=False): + self._connection.send_stream_data(stream, data, is_end) + await self._wakeup() + + def run(self): + if self._closed: + return + self._receiver_task = asyncio.Task(self._receiver()) + self._sender_task = asyncio.Task(self._sender()) + + async def make_stream(self, timeout=None): + try: + await asyncio.wait_for(self._handshake_complete.wait(), timeout) + except TimeoutError: + raise dns.exception.Timeout + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = AsyncioQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + + async def close(self): + if not self._closed: + if self._manager is not None: + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + # sender might be blocked on this, so set it + self._socket_created.set() + await self._wakeup() + try: + if self._receiver_task is not None: + await self._receiver_task + except asyncio.CancelledError: + pass + try: + if self._sender_task is not None: + await self._sender_task + except asyncio.CancelledError: + pass + if self._socket is not None: + await self._socket.close() + + +class AsyncioQuicManager(AsyncQuicManager): + def __init__( + self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None, h3=False + ): + super().__init__(conf, verify_mode, AsyncioQuicConnection, server_name, h3) + + def connect( + self, address, port=853, source=None, source_port=0, want_session_ticket=True + ): + (connection, start) = self._connect( + address, port, source, source_port, want_session_ticket + ) + if start: + connection.run() + return connection + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + await connection.close() + return False diff --git a/venv/lib/python3.12/site-packages/dns/quic/_common.py b/venv/lib/python3.12/site-packages/dns/quic/_common.py new file mode 100644 index 0000000..ba9d245 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/quic/_common.py @@ -0,0 +1,344 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import base64 +import copy +import functools +import socket +import struct +import time +import urllib.parse +from typing import Any + +import aioquic.h3.connection # type: ignore +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore + +import dns._tls_util +import dns.inet + +QUIC_MAX_DATAGRAM = 2048 +MAX_SESSION_TICKETS = 8 +# If we hit the max sessions limit we will delete this many of the oldest connections. +# The value must be a integer > 0 and <= MAX_SESSION_TICKETS. +SESSIONS_TO_DELETE = MAX_SESSION_TICKETS // 4 + + +class UnexpectedEOF(Exception): + pass + + +class Buffer: + def __init__(self): + self._buffer = b"" + self._seen_end = False + + def put(self, data, is_end): + if self._seen_end: + return + self._buffer += data + if is_end: + self._seen_end = True + + def have(self, amount): + if len(self._buffer) >= amount: + return True + if self._seen_end: + raise UnexpectedEOF + return False + + def seen_end(self): + return self._seen_end + + def get(self, amount): + assert self.have(amount) + data = self._buffer[:amount] + self._buffer = self._buffer[amount:] + return data + + def get_all(self): + assert self.seen_end() + data = self._buffer + self._buffer = b"" + return data + + +class BaseQuicStream: + def __init__(self, connection, stream_id): + self._connection = connection + self._stream_id = stream_id + self._buffer = Buffer() + self._expecting = 0 + self._headers = None + self._trailers = None + + def id(self): + return self._stream_id + + def headers(self): + return self._headers + + def trailers(self): + return self._trailers + + def _expiration_from_timeout(self, timeout): + if timeout is not None: + expiration = time.time() + timeout + else: + expiration = None + return expiration + + def _timeout_from_expiration(self, expiration): + if expiration is not None: + timeout = max(expiration - time.time(), 0.0) + else: + timeout = None + return timeout + + # Subclass must implement receive() as sync / async and which returns a message + # or raises. + + # Subclass must implement send() as sync / async and which takes a message and + # an EOF indicator. + + def send_h3(self, url, datagram, post=True): + if not self._connection.is_h3(): + raise SyntaxError("cannot send H3 to a non-H3 connection") + url_parts = urllib.parse.urlparse(url) + path = url_parts.path.encode() + if post: + method = b"POST" + else: + method = b"GET" + path += b"?dns=" + base64.urlsafe_b64encode(datagram).rstrip(b"=") + headers = [ + (b":method", method), + (b":scheme", url_parts.scheme.encode()), + (b":authority", url_parts.netloc.encode()), + (b":path", path), + (b"accept", b"application/dns-message"), + ] + if post: + headers.extend( + [ + (b"content-type", b"application/dns-message"), + (b"content-length", str(len(datagram)).encode()), + ] + ) + self._connection.send_headers(self._stream_id, headers, not post) + if post: + self._connection.send_data(self._stream_id, datagram, True) + + def _encapsulate(self, datagram): + if self._connection.is_h3(): + return datagram + l = len(datagram) + return struct.pack("!H", l) + datagram + + def _common_add_input(self, data, is_end): + self._buffer.put(data, is_end) + try: + return ( + self._expecting > 0 and self._buffer.have(self._expecting) + ) or self._buffer.seen_end + except UnexpectedEOF: + return True + + def _close(self): + self._connection.close_stream(self._stream_id) + self._buffer.put(b"", True) # send EOF in case we haven't seen it. + + +class BaseQuicConnection: + def __init__( + self, + connection, + address, + port, + source=None, + source_port=0, + manager=None, + ): + self._done = False + self._connection = connection + self._address = address + self._port = port + self._closed = False + self._manager = manager + self._streams = {} + if manager is not None and manager.is_h3(): + self._h3_conn = aioquic.h3.connection.H3Connection(connection, False) + else: + self._h3_conn = None + self._af = dns.inet.af_for_address(address) + self._peer = dns.inet.low_level_address_tuple((address, port)) + if source is None and source_port != 0: + if self._af == socket.AF_INET: + source = "0.0.0.0" + elif self._af == socket.AF_INET6: + source = "::" + else: + raise NotImplementedError + if source: + self._source = (source, source_port) + else: + self._source = None + + def is_h3(self): + return self._h3_conn is not None + + def close_stream(self, stream_id): + del self._streams[stream_id] + + def send_headers(self, stream_id, headers, is_end=False): + assert self._h3_conn is not None + self._h3_conn.send_headers(stream_id, headers, is_end) + + def send_data(self, stream_id, data, is_end=False): + assert self._h3_conn is not None + self._h3_conn.send_data(stream_id, data, is_end) + + def _get_timer_values(self, closed_is_special=True): + now = time.time() + expiration = self._connection.get_timer() + if expiration is None: + expiration = now + 3600 # arbitrary "big" value + interval = max(expiration - now, 0) + if self._closed and closed_is_special: + # lower sleep interval to avoid a race in the closing process + # which can lead to higher latency closing due to sleeping when + # we have events. + interval = min(interval, 0.05) + return (expiration, interval) + + def _handle_timer(self, expiration): + now = time.time() + if expiration <= now: + self._connection.handle_timer(now) + + +class AsyncQuicConnection(BaseQuicConnection): + async def make_stream(self, timeout: float | None = None) -> Any: + pass + + +class BaseQuicManager: + def __init__( + self, conf, verify_mode, connection_factory, server_name=None, h3=False + ): + self._connections = {} + self._connection_factory = connection_factory + self._session_tickets = {} + self._tokens = {} + self._h3 = h3 + if conf is None: + verify_path = None + if isinstance(verify_mode, str): + verify_path = verify_mode + verify_mode = True + if h3: + alpn_protocols = ["h3"] + else: + alpn_protocols = ["doq", "doq-i03"] + conf = aioquic.quic.configuration.QuicConfiguration( + alpn_protocols=alpn_protocols, + verify_mode=verify_mode, + server_name=server_name, + ) + if verify_path is not None: + cafile, capath = dns._tls_util.convert_verify_to_cafile_and_capath( + verify_path + ) + conf.load_verify_locations(cafile=cafile, capath=capath) + self._conf = conf + + def _connect( + self, + address, + port=853, + source=None, + source_port=0, + want_session_ticket=True, + want_token=True, + ): + connection = self._connections.get((address, port)) + if connection is not None: + return (connection, False) + conf = self._conf + if want_session_ticket: + try: + session_ticket = self._session_tickets.pop((address, port)) + # We found a session ticket, so make a configuration that uses it. + conf = copy.copy(conf) + conf.session_ticket = session_ticket + except KeyError: + # No session ticket. + pass + # Whether or not we found a session ticket, we want a handler to save + # one. + session_ticket_handler = functools.partial( + self.save_session_ticket, address, port + ) + else: + session_ticket_handler = None + if want_token: + try: + token = self._tokens.pop((address, port)) + # We found a token, so make a configuration that uses it. + conf = copy.copy(conf) + conf.token = token + except KeyError: + # No token + pass + # Whether or not we found a token, we want a handler to save # one. + token_handler = functools.partial(self.save_token, address, port) + else: + token_handler = None + + qconn = aioquic.quic.connection.QuicConnection( + configuration=conf, + session_ticket_handler=session_ticket_handler, + token_handler=token_handler, + ) + lladdress = dns.inet.low_level_address_tuple((address, port)) + qconn.connect(lladdress, time.time()) + connection = self._connection_factory( + qconn, address, port, source, source_port, self + ) + self._connections[(address, port)] = connection + return (connection, True) + + def closed(self, address, port): + try: + del self._connections[(address, port)] + except KeyError: + pass + + def is_h3(self): + return self._h3 + + def save_session_ticket(self, address, port, ticket): + # We rely on dictionaries keys() being in insertion order here. We + # can't just popitem() as that would be LIFO which is the opposite of + # what we want. + l = len(self._session_tickets) + if l >= MAX_SESSION_TICKETS: + keys_to_delete = list(self._session_tickets.keys())[0:SESSIONS_TO_DELETE] + for key in keys_to_delete: + del self._session_tickets[key] + self._session_tickets[(address, port)] = ticket + + def save_token(self, address, port, token): + # We rely on dictionaries keys() being in insertion order here. We + # can't just popitem() as that would be LIFO which is the opposite of + # what we want. + l = len(self._tokens) + if l >= MAX_SESSION_TICKETS: + keys_to_delete = list(self._tokens.keys())[0:SESSIONS_TO_DELETE] + for key in keys_to_delete: + del self._tokens[key] + self._tokens[(address, port)] = token + + +class AsyncQuicManager(BaseQuicManager): + def connect(self, address, port=853, source=None, source_port=0): + raise NotImplementedError diff --git a/venv/lib/python3.12/site-packages/dns/quic/_sync.py b/venv/lib/python3.12/site-packages/dns/quic/_sync.py new file mode 100644 index 0000000..18f9d05 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/quic/_sync.py @@ -0,0 +1,306 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import selectors +import socket +import ssl +import struct +import threading +import time + +import aioquic.h3.connection # type: ignore +import aioquic.h3.events # type: ignore +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore + +import dns.exception +import dns.inet +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + BaseQuicConnection, + BaseQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + +# Function used to create a socket. Can be overridden if needed in special +# situations. +socket_factory = socket.socket + + +class SyncQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = threading.Condition() + self._lock = threading.Lock() + + def wait_for(self, amount, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + with self._lock: + if self._buffer.have(amount): + return + self._expecting = amount + with self._wake_up: + if not self._wake_up.wait(timeout): + raise dns.exception.Timeout + self._expecting = 0 + + def wait_for_end(self, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + with self._lock: + if self._buffer.seen_end(): + return + with self._wake_up: + if not self._wake_up.wait(timeout): + raise dns.exception.Timeout + + def receive(self, timeout=None): + expiration = self._expiration_from_timeout(timeout) + if self._connection.is_h3(): + self.wait_for_end(expiration) + with self._lock: + return self._buffer.get_all() + else: + self.wait_for(2, expiration) + with self._lock: + (size,) = struct.unpack("!H", self._buffer.get(2)) + self.wait_for(size, expiration) + with self._lock: + return self._buffer.get(size) + + def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + self._connection.write(self._stream_id, data, is_end) + + def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + with self._wake_up: + self._wake_up.notify() + + def close(self): + with self._lock: + self._close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + with self._wake_up: + self._wake_up.notify() + return False + + +class SyncQuicConnection(BaseQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = socket_factory(self._af, socket.SOCK_DGRAM, 0) + if self._source is not None: + try: + self._socket.bind( + dns.inet.low_level_address_tuple(self._source, self._af) + ) + except Exception: + self._socket.close() + raise + self._socket.connect(self._peer) + (self._send_wakeup, self._receive_wakeup) = socket.socketpair() + self._receive_wakeup.setblocking(False) + self._socket.setblocking(False) + self._handshake_complete = threading.Event() + self._worker_thread = None + self._lock = threading.Lock() + + def _read(self): + count = 0 + while count < 10: + count += 1 + try: + datagram = self._socket.recv(QUIC_MAX_DATAGRAM) + except BlockingIOError: + return + with self._lock: + self._connection.receive_datagram(datagram, self._peer, time.time()) + + def _drain_wakeup(self): + while True: + try: + self._receive_wakeup.recv(32) + except BlockingIOError: + return + + def _worker(self): + try: + with selectors.DefaultSelector() as sel: + sel.register(self._socket, selectors.EVENT_READ, self._read) + sel.register( + self._receive_wakeup, selectors.EVENT_READ, self._drain_wakeup + ) + while not self._done: + (expiration, interval) = self._get_timer_values(False) + items = sel.select(interval) + for key, _ in items: + key.data() + with self._lock: + self._handle_timer(expiration) + self._handle_events() + with self._lock: + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, _ in datagrams: + try: + self._socket.send(datagram) + except BlockingIOError: + # we let QUIC handle any lossage + pass + except Exception: + # Eat all exceptions as we have no way to pass them back to the + # caller currently. It might be nice to fix this in the future. + pass + finally: + with self._lock: + self._done = True + self._socket.close() + # Ensure anyone waiting for this gets woken up. + self._handshake_complete.set() + + def _handle_events(self): + while True: + with self._lock: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + if self.is_h3(): + assert self._h3_conn is not None + h3_events = self._h3_conn.handle_event(event) + for h3_event in h3_events: + if isinstance(h3_event, aioquic.h3.events.HeadersReceived): + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + if stream._headers is None: + stream._headers = h3_event.headers + elif stream._trailers is None: + stream._trailers = h3_event.headers + if h3_event.stream_ended: + stream._add_input(b"", True) + elif isinstance(h3_event, aioquic.h3.events.DataReceived): + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + stream._add_input(h3_event.data, h3_event.stream_ended) + else: + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance(event, aioquic.quic.events.ConnectionTerminated): + with self._lock: + self._done = True + elif isinstance(event, aioquic.quic.events.StreamReset): + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + stream._add_input(b"", True) + + def write(self, stream, data, is_end=False): + with self._lock: + self._connection.send_stream_data(stream, data, is_end) + self._send_wakeup.send(b"\x01") + + def send_headers(self, stream_id, headers, is_end=False): + with self._lock: + super().send_headers(stream_id, headers, is_end) + if is_end: + self._send_wakeup.send(b"\x01") + + def send_data(self, stream_id, data, is_end=False): + with self._lock: + super().send_data(stream_id, data, is_end) + if is_end: + self._send_wakeup.send(b"\x01") + + def run(self): + if self._closed: + return + self._worker_thread = threading.Thread(target=self._worker) + self._worker_thread.start() + + def make_stream(self, timeout=None): + if not self._handshake_complete.wait(timeout): + raise dns.exception.Timeout + with self._lock: + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = SyncQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + + def close_stream(self, stream_id): + with self._lock: + super().close_stream(stream_id) + + def close(self): + with self._lock: + if self._closed: + return + if self._manager is not None: + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + self._send_wakeup.send(b"\x01") + if self._worker_thread is not None: + self._worker_thread.join() + + +class SyncQuicManager(BaseQuicManager): + def __init__( + self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None, h3=False + ): + super().__init__(conf, verify_mode, SyncQuicConnection, server_name, h3) + self._lock = threading.Lock() + + def connect( + self, + address, + port=853, + source=None, + source_port=0, + want_session_ticket=True, + want_token=True, + ): + with self._lock: + (connection, start) = self._connect( + address, port, source, source_port, want_session_ticket, want_token + ) + if start: + connection.run() + return connection + + def closed(self, address, port): + with self._lock: + super().closed(address, port) + + def save_session_ticket(self, address, port, ticket): + with self._lock: + super().save_session_ticket(address, port, ticket) + + def save_token(self, address, port, token): + with self._lock: + super().save_token(address, port, token) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + connection.close() + return False diff --git a/venv/lib/python3.12/site-packages/dns/quic/_trio.py b/venv/lib/python3.12/site-packages/dns/quic/_trio.py new file mode 100644 index 0000000..046e6aa --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/quic/_trio.py @@ -0,0 +1,250 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import socket +import ssl +import struct +import time + +import aioquic.h3.connection # type: ignore +import aioquic.h3.events # type: ignore +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore +import trio + +import dns.exception +import dns.inet +from dns._asyncbackend import NullContext +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + AsyncQuicConnection, + AsyncQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + + +class TrioQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = trio.Condition() + + async def wait_for(self, amount): + while True: + if self._buffer.have(amount): + return + self._expecting = amount + async with self._wake_up: + await self._wake_up.wait() + self._expecting = 0 + + async def wait_for_end(self): + while True: + if self._buffer.seen_end(): + return + async with self._wake_up: + await self._wake_up.wait() + + async def receive(self, timeout=None): + if timeout is None: + context = NullContext(None) + else: + context = trio.move_on_after(timeout) + with context: + if self._connection.is_h3(): + await self.wait_for_end() + return self._buffer.get_all() + else: + await self.wait_for(2) + (size,) = struct.unpack("!H", self._buffer.get(2)) + await self.wait_for(size) + return self._buffer.get(size) + raise dns.exception.Timeout + + async def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + await self._connection.write(self._stream_id, data, is_end) + + async def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + async with self._wake_up: + self._wake_up.notify() + + async def close(self): + self._close() + + # Streams are async context managers + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + async with self._wake_up: + self._wake_up.notify() + return False + + +class TrioQuicConnection(AsyncQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager=None): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = trio.socket.socket(self._af, socket.SOCK_DGRAM, 0) + self._handshake_complete = trio.Event() + self._run_done = trio.Event() + self._worker_scope = None + self._send_pending = False + + async def _worker(self): + try: + if self._source: + await self._socket.bind( + dns.inet.low_level_address_tuple(self._source, self._af) + ) + await self._socket.connect(self._peer) + while not self._done: + (expiration, interval) = self._get_timer_values(False) + if self._send_pending: + # Do not block forever if sends are pending. Even though we + # have a wake-up mechanism if we've already started the blocking + # read, the possibility of context switching in send means that + # more writes can happen while we have no wake up context, so + # we need self._send_pending to avoid (effectively) a "lost wakeup" + # race. + interval = 0.0 + with trio.CancelScope( + deadline=trio.current_time() + interval # pyright: ignore + ) as self._worker_scope: + datagram = await self._socket.recv(QUIC_MAX_DATAGRAM) + self._connection.receive_datagram(datagram, self._peer, time.time()) + self._worker_scope = None + self._handle_timer(expiration) + await self._handle_events() + # We clear this now, before sending anything, as sending can cause + # context switches that do more sends. We want to know if that + # happens so we don't block a long time on the recv() above. + self._send_pending = False + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, _ in datagrams: + await self._socket.send(datagram) + finally: + self._done = True + self._socket.close() + self._handshake_complete.set() + + async def _handle_events(self): + count = 0 + while True: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + if self.is_h3(): + assert self._h3_conn is not None + h3_events = self._h3_conn.handle_event(event) + for h3_event in h3_events: + if isinstance(h3_event, aioquic.h3.events.HeadersReceived): + stream = self._streams.get(event.stream_id) + if stream: + if stream._headers is None: + stream._headers = h3_event.headers + elif stream._trailers is None: + stream._trailers = h3_event.headers + if h3_event.stream_ended: + await stream._add_input(b"", True) + elif isinstance(h3_event, aioquic.h3.events.DataReceived): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input( + h3_event.data, h3_event.stream_ended + ) + else: + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance(event, aioquic.quic.events.ConnectionTerminated): + self._done = True + self._socket.close() + elif isinstance(event, aioquic.quic.events.StreamReset): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(b"", True) + count += 1 + if count > 10: + # yield + count = 0 + await trio.sleep(0) + + async def write(self, stream, data, is_end=False): + self._connection.send_stream_data(stream, data, is_end) + self._send_pending = True + if self._worker_scope is not None: + self._worker_scope.cancel() + + async def run(self): + if self._closed: + return + async with trio.open_nursery() as nursery: + nursery.start_soon(self._worker) + self._run_done.set() + + async def make_stream(self, timeout=None): + if timeout is None: + context = NullContext(None) + else: + context = trio.move_on_after(timeout) + with context: + await self._handshake_complete.wait() + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = TrioQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + raise dns.exception.Timeout + + async def close(self): + if not self._closed: + if self._manager is not None: + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + self._send_pending = True + if self._worker_scope is not None: + self._worker_scope.cancel() + await self._run_done.wait() + + +class TrioQuicManager(AsyncQuicManager): + def __init__( + self, + nursery, + conf=None, + verify_mode=ssl.CERT_REQUIRED, + server_name=None, + h3=False, + ): + super().__init__(conf, verify_mode, TrioQuicConnection, server_name, h3) + self._nursery = nursery + + def connect( + self, address, port=853, source=None, source_port=0, want_session_ticket=True + ): + (connection, start) = self._connect( + address, port, source, source_port, want_session_ticket + ) + if start: + self._nursery.start_soon(connection.run) + return connection + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + await connection.close() + return False diff --git a/venv/lib/python3.12/site-packages/dns/rcode.py b/venv/lib/python3.12/site-packages/dns/rcode.py new file mode 100644 index 0000000..7bb8467 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rcode.py @@ -0,0 +1,168 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Result Codes.""" + +from typing import Tuple, Type + +import dns.enum +import dns.exception + + +class Rcode(dns.enum.IntEnum): + #: No error + NOERROR = 0 + #: Format error + FORMERR = 1 + #: Server failure + SERVFAIL = 2 + #: Name does not exist ("Name Error" in RFC 1025 terminology). + NXDOMAIN = 3 + #: Not implemented + NOTIMP = 4 + #: Refused + REFUSED = 5 + #: Name exists. + YXDOMAIN = 6 + #: RRset exists. + YXRRSET = 7 + #: RRset does not exist. + NXRRSET = 8 + #: Not authoritative. + NOTAUTH = 9 + #: Name not in zone. + NOTZONE = 10 + #: DSO-TYPE Not Implemented + DSOTYPENI = 11 + #: Bad EDNS version. + BADVERS = 16 + #: TSIG Signature Failure + BADSIG = 16 + #: Key not recognized. + BADKEY = 17 + #: Signature out of time window. + BADTIME = 18 + #: Bad TKEY Mode. + BADMODE = 19 + #: Duplicate key name. + BADNAME = 20 + #: Algorithm not supported. + BADALG = 21 + #: Bad Truncation + BADTRUNC = 22 + #: Bad/missing Server Cookie + BADCOOKIE = 23 + + @classmethod + def _maximum(cls): + return 4095 + + @classmethod + def _unknown_exception_class(cls) -> Type[Exception]: + return UnknownRcode + + +class UnknownRcode(dns.exception.DNSException): + """A DNS rcode is unknown.""" + + +def from_text(text: str) -> Rcode: + """Convert text into an rcode. + + *text*, a ``str``, the textual rcode or an integer in textual form. + + Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown. + + Returns a ``dns.rcode.Rcode``. + """ + + return Rcode.from_text(text) + + +def from_flags(flags: int, ednsflags: int) -> Rcode: + """Return the rcode value encoded by flags and ednsflags. + + *flags*, an ``int``, the DNS flags field. + + *ednsflags*, an ``int``, the EDNS flags field. + + Raises ``ValueError`` if rcode is < 0 or > 4095 + + Returns a ``dns.rcode.Rcode``. + """ + + value = (flags & 0x000F) | ((ednsflags >> 20) & 0xFF0) + return Rcode.make(value) + + +def to_flags(value: Rcode) -> Tuple[int, int]: + """Return a (flags, ednsflags) tuple which encodes the rcode. + + *value*, a ``dns.rcode.Rcode``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns an ``(int, int)`` tuple. + """ + + if value < 0 or value > 4095: + raise ValueError("rcode must be >= 0 and <= 4095") + v = value & 0xF + ev = (value & 0xFF0) << 20 + return (v, ev) + + +def to_text(value: Rcode, tsig: bool = False) -> str: + """Convert rcode into text. + + *value*, a ``dns.rcode.Rcode``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns a ``str``. + """ + + if tsig and value == Rcode.BADVERS: + return "BADSIG" + return Rcode.to_text(value) + + +### BEGIN generated Rcode constants + +NOERROR = Rcode.NOERROR +FORMERR = Rcode.FORMERR +SERVFAIL = Rcode.SERVFAIL +NXDOMAIN = Rcode.NXDOMAIN +NOTIMP = Rcode.NOTIMP +REFUSED = Rcode.REFUSED +YXDOMAIN = Rcode.YXDOMAIN +YXRRSET = Rcode.YXRRSET +NXRRSET = Rcode.NXRRSET +NOTAUTH = Rcode.NOTAUTH +NOTZONE = Rcode.NOTZONE +DSOTYPENI = Rcode.DSOTYPENI +BADVERS = Rcode.BADVERS +BADSIG = Rcode.BADSIG +BADKEY = Rcode.BADKEY +BADTIME = Rcode.BADTIME +BADMODE = Rcode.BADMODE +BADNAME = Rcode.BADNAME +BADALG = Rcode.BADALG +BADTRUNC = Rcode.BADTRUNC +BADCOOKIE = Rcode.BADCOOKIE + +### END generated Rcode constants diff --git a/venv/lib/python3.12/site-packages/dns/rdata.py b/venv/lib/python3.12/site-packages/dns/rdata.py new file mode 100644 index 0000000..c4522e6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdata.py @@ -0,0 +1,935 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata.""" + +import base64 +import binascii +import inspect +import io +import ipaddress +import itertools +import random +from importlib import import_module +from typing import Any, Dict, Tuple + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdataclass +import dns.rdatatype +import dns.tokenizer +import dns.ttl +import dns.wire + +_chunksize = 32 + +# We currently allow comparisons for rdata with relative names for backwards +# compatibility, but in the future we will not, as these kinds of comparisons +# can lead to subtle bugs if code is not carefully written. +# +# This switch allows the future behavior to be turned on so code can be +# tested with it. +_allow_relative_comparisons = True + + +class NoRelativeRdataOrdering(dns.exception.DNSException): + """An attempt was made to do an ordered comparison of one or more + rdata with relative names. The only reliable way of sorting rdata + is to use non-relativized rdata. + + """ + + +def _wordbreak(data, chunksize=_chunksize, separator=b" "): + """Break a binary string into chunks of chunksize characters separated by + a space. + """ + + if not chunksize: + return data.decode() + return separator.join( + [data[i : i + chunksize] for i in range(0, len(data), chunksize)] + ).decode() + + +# pylint: disable=unused-argument + + +def _hexify(data, chunksize=_chunksize, separator=b" ", **kw): + """Convert a binary string into its hex encoding, broken up into chunks + of chunksize characters separated by a separator. + """ + + return _wordbreak(binascii.hexlify(data), chunksize, separator) + + +def _base64ify(data, chunksize=_chunksize, separator=b" ", **kw): + """Convert a binary string into its base64 encoding, broken up into chunks + of chunksize characters separated by a separator. + """ + + return _wordbreak(base64.b64encode(data), chunksize, separator) + + +# pylint: enable=unused-argument + +__escaped = b'"\\' + + +def _escapify(qstring): + """Escape the characters in a quoted string which need it.""" + + if isinstance(qstring, str): + qstring = qstring.encode() + if not isinstance(qstring, bytearray): + qstring = bytearray(qstring) + + text = "" + for c in qstring: + if c in __escaped: + text += "\\" + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += f"\\{c:03d}" + return text + + +def _truncate_bitmap(what): + """Determine the index of greatest byte that isn't all zeros, and + return the bitmap that contains all the bytes less than that index. + """ + + for i in range(len(what) - 1, -1, -1): + if what[i] != 0: + return what[0 : i + 1] + return what[0:1] + + +# So we don't have to edit all the rdata classes... +_constify = dns.immutable.constify + + +@dns.immutable.immutable +class Rdata: + """Base class for all DNS rdata types.""" + + __slots__ = ["rdclass", "rdtype", "rdcomment"] + + def __init__( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + ) -> None: + """Initialize an rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + """ + + self.rdclass = self._as_rdataclass(rdclass) + self.rdtype = self._as_rdatatype(rdtype) + self.rdcomment = None + + def _get_all_slots(self): + return itertools.chain.from_iterable( + getattr(cls, "__slots__", []) for cls in self.__class__.__mro__ + ) + + def __getstate__(self): + # We used to try to do a tuple of all slots here, but it + # doesn't work as self._all_slots isn't available at + # __setstate__() time. Before that we tried to store a tuple + # of __slots__, but that didn't work as it didn't store the + # slots defined by ancestors. This older way didn't fail + # outright, but ended up with partially broken objects, e.g. + # if you unpickled an A RR it wouldn't have rdclass and rdtype + # attributes, and would compare badly. + state = {} + for slot in self._get_all_slots(): + state[slot] = getattr(self, slot) + return state + + def __setstate__(self, state): + for slot, val in state.items(): + object.__setattr__(self, slot, val) + if not hasattr(self, "rdcomment"): + # Pickled rdata from 2.0.x might not have a rdcomment, so add + # it if needed. + object.__setattr__(self, "rdcomment", None) + + def covers(self) -> dns.rdatatype.RdataType: + """Return the type a Rdata covers. + + DNS SIG/RRSIG rdatas apply to a specific type; this type is + returned by the covers() function. If the rdata type is not + SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when + creating rdatasets, allowing the rdataset to contain only RRSIGs + of a particular type, e.g. RRSIG(NS). + + Returns a ``dns.rdatatype.RdataType``. + """ + + return dns.rdatatype.NONE + + def extended_rdatatype(self) -> int: + """Return a 32-bit type value, the least significant 16 bits of + which are the ordinary DNS type, and the upper 16 bits of which are + the "covered" type, if any. + + Returns an ``int``. + """ + + return self.covers() << 16 | self.rdtype + + def to_text( + self, + origin: dns.name.Name | None = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + """Convert an rdata to text format. + + Returns a ``str``. + """ + + raise NotImplementedError # pragma: no cover + + def _to_wire( + self, + file: Any, + compress: dns.name.CompressType | None = None, + origin: dns.name.Name | None = None, + canonicalize: bool = False, + ) -> None: + raise NotImplementedError # pragma: no cover + + def to_wire( + self, + file: Any | None = None, + compress: dns.name.CompressType | None = None, + origin: dns.name.Name | None = None, + canonicalize: bool = False, + ) -> bytes | None: + """Convert an rdata to wire format. + + Returns a ``bytes`` if no output file was specified, or ``None`` otherwise. + """ + + if file: + # We call _to_wire() and then return None explicitly instead of + # of just returning the None from _to_wire() as mypy's func-returns-value + # unhelpfully errors out with "error: "_to_wire" of "Rdata" does not return + # a value (it only ever returns None)" + self._to_wire(file, compress, origin, canonicalize) + return None + else: + f = io.BytesIO() + self._to_wire(f, compress, origin, canonicalize) + return f.getvalue() + + def to_generic(self, origin: dns.name.Name | None = None) -> "GenericRdata": + """Creates a dns.rdata.GenericRdata equivalent of this rdata. + + Returns a ``dns.rdata.GenericRdata``. + """ + wire = self.to_wire(origin=origin) + assert wire is not None # for type checkers + return GenericRdata(self.rdclass, self.rdtype, wire) + + def to_digestable(self, origin: dns.name.Name | None = None) -> bytes: + """Convert rdata to a format suitable for digesting in hashes. This + is also the DNSSEC canonical form. + + Returns a ``bytes``. + """ + wire = self.to_wire(origin=origin, canonicalize=True) + assert wire is not None # for mypy + return wire + + def __repr__(self): + covers = self.covers() + if covers == dns.rdatatype.NONE: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(covers) + ")" + return ( + "" + ) + + def __str__(self): + return self.to_text() + + def _cmp(self, other): + """Compare an rdata with another rdata of the same rdtype and + rdclass. + + For rdata with only absolute names: + Return < 0 if self < other in the DNSSEC ordering, 0 if self + == other, and > 0 if self > other. + For rdata with at least one relative names: + The rdata sorts before any rdata with only absolute names. + When compared with another relative rdata, all names are + made absolute as if they were relative to the root, as the + proper origin is not available. While this creates a stable + ordering, it is NOT guaranteed to be the DNSSEC ordering. + In the future, all ordering comparisons for rdata with + relative names will be disallowed. + """ + # the next two lines are for type checkers, so they are bound + our = b"" + their = b"" + try: + our = self.to_digestable() + our_relative = False + except dns.name.NeedAbsoluteNameOrOrigin: + if _allow_relative_comparisons: + our = self.to_digestable(dns.name.root) + our_relative = True + try: + their = other.to_digestable() + their_relative = False + except dns.name.NeedAbsoluteNameOrOrigin: + if _allow_relative_comparisons: + their = other.to_digestable(dns.name.root) + their_relative = True + if _allow_relative_comparisons: + if our_relative != their_relative: + # For the purpose of comparison, all rdata with at least one + # relative name is less than an rdata with only absolute names. + if our_relative: + return -1 + else: + return 1 + elif our_relative or their_relative: + raise NoRelativeRdataOrdering + if our == their: + return 0 + elif our > their: + return 1 + else: + return -1 + + def __eq__(self, other): + if not isinstance(other, Rdata): + return False + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return False + our_relative = False + their_relative = False + try: + our = self.to_digestable() + except dns.name.NeedAbsoluteNameOrOrigin: + our = self.to_digestable(dns.name.root) + our_relative = True + try: + their = other.to_digestable() + except dns.name.NeedAbsoluteNameOrOrigin: + their = other.to_digestable(dns.name.root) + their_relative = True + if our_relative != their_relative: + return False + return our == their + + def __ne__(self, other): + if not isinstance(other, Rdata): + return True + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return True + return not self.__eq__(other) + + def __lt__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) > 0 + + def __hash__(self): + return hash(self.to_digestable(dns.name.root)) + + @classmethod + def from_text( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + tok: dns.tokenizer.Tokenizer, + origin: dns.name.Name | None = None, + relativize: bool = True, + relativize_to: dns.name.Name | None = None, + ) -> "Rdata": + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + parser: dns.wire.Parser, + origin: dns.name.Name | None = None, + ) -> "Rdata": + raise NotImplementedError # pragma: no cover + + def replace(self, **kwargs: Any) -> "Rdata": + """ + Create a new Rdata instance based on the instance replace was + invoked on. It is possible to pass different parameters to + override the corresponding properties of the base Rdata. + + Any field specific to the Rdata type can be replaced, but the + *rdtype* and *rdclass* fields cannot. + + Returns an instance of the same Rdata subclass as *self*. + """ + + # Get the constructor parameters. + parameters = inspect.signature(self.__init__).parameters # type: ignore + + # Ensure that all of the arguments correspond to valid fields. + # Don't allow rdclass or rdtype to be changed, though. + for key in kwargs: + if key == "rdcomment": + continue + if key not in parameters: + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{key}'" + ) + if key in ("rdclass", "rdtype"): + raise AttributeError( + f"Cannot overwrite '{self.__class__.__name__}' attribute '{key}'" + ) + + # Construct the parameter list. For each field, use the value in + # kwargs if present, and the current value otherwise. + args = (kwargs.get(key, getattr(self, key)) for key in parameters) + + # Create, validate, and return the new object. + rd = self.__class__(*args) + # The comment is not set in the constructor, so give it special + # handling. + rdcomment = kwargs.get("rdcomment", self.rdcomment) + if rdcomment is not None: + object.__setattr__(rd, "rdcomment", rdcomment) + return rd + + # Type checking and conversion helpers. These are class methods as + # they don't touch object state and may be useful to others. + + @classmethod + def _as_rdataclass(cls, value): + return dns.rdataclass.RdataClass.make(value) + + @classmethod + def _as_rdatatype(cls, value): + return dns.rdatatype.RdataType.make(value) + + @classmethod + def _as_bytes( + cls, + value: Any, + encode: bool = False, + max_length: int | None = None, + empty_ok: bool = True, + ) -> bytes: + if encode and isinstance(value, str): + bvalue = value.encode() + elif isinstance(value, bytearray): + bvalue = bytes(value) + elif isinstance(value, bytes): + bvalue = value + else: + raise ValueError("not bytes") + if max_length is not None and len(bvalue) > max_length: + raise ValueError("too long") + if not empty_ok and len(bvalue) == 0: + raise ValueError("empty bytes not allowed") + return bvalue + + @classmethod + def _as_name(cls, value): + # Note that proper name conversion (e.g. with origin and IDNA + # awareness) is expected to be done via from_text. This is just + # a simple thing for people invoking the constructor directly. + if isinstance(value, str): + return dns.name.from_text(value) + elif not isinstance(value, dns.name.Name): + raise ValueError("not a name") + return value + + @classmethod + def _as_uint8(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 255: + raise ValueError("not a uint8") + return value + + @classmethod + def _as_uint16(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 65535: + raise ValueError("not a uint16") + return value + + @classmethod + def _as_uint32(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 4294967295: + raise ValueError("not a uint32") + return value + + @classmethod + def _as_uint48(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 281474976710655: + raise ValueError("not a uint48") + return value + + @classmethod + def _as_int(cls, value, low=None, high=None): + if not isinstance(value, int): + raise ValueError("not an integer") + if low is not None and value < low: + raise ValueError("value too small") + if high is not None and value > high: + raise ValueError("value too large") + return value + + @classmethod + def _as_ipv4_address(cls, value): + if isinstance(value, str): + return dns.ipv4.canonicalize(value) + elif isinstance(value, bytes): + return dns.ipv4.inet_ntoa(value) + elif isinstance(value, ipaddress.IPv4Address): + return dns.ipv4.inet_ntoa(value.packed) + else: + raise ValueError("not an IPv4 address") + + @classmethod + def _as_ipv6_address(cls, value): + if isinstance(value, str): + return dns.ipv6.canonicalize(value) + elif isinstance(value, bytes): + return dns.ipv6.inet_ntoa(value) + elif isinstance(value, ipaddress.IPv6Address): + return dns.ipv6.inet_ntoa(value.packed) + else: + raise ValueError("not an IPv6 address") + + @classmethod + def _as_bool(cls, value): + if isinstance(value, bool): + return value + else: + raise ValueError("not a boolean") + + @classmethod + def _as_ttl(cls, value): + if isinstance(value, int): + return cls._as_int(value, 0, dns.ttl.MAX_TTL) + elif isinstance(value, str): + return dns.ttl.from_text(value) + else: + raise ValueError("not a TTL") + + @classmethod + def _as_tuple(cls, value, as_value): + try: + # For user convenience, if value is a singleton of the list + # element type, wrap it in a tuple. + return (as_value(value),) + except Exception: + # Otherwise, check each element of the iterable *value* + # against *as_value*. + return tuple(as_value(v) for v in value) + + # Processing order + + @classmethod + def _processing_order(cls, iterable): + items = list(iterable) + random.shuffle(items) + return items + + +@dns.immutable.immutable +class GenericRdata(Rdata): + """Generic Rdata Class + + This class is used for rdata types for which we have no better + implementation. It implements the DNS "unknown RRs" scheme. + """ + + __slots__ = ["data"] + + def __init__( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + data: bytes, + ) -> None: + super().__init__(rdclass, rdtype) + self.data = data + + def to_text( + self, + origin: dns.name.Name | None = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + return rf"\# {len(self.data)} " + _hexify(self.data, **kw) # pyright: ignore + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + token = tok.get() + if not token.is_identifier() or token.value != r"\#": + raise dns.exception.SyntaxError(r"generic rdata does not start with \#") + length = tok.get_int() + hex = tok.concatenate_remaining_identifiers(True).encode() + data = binascii.unhexlify(hex) + if len(data) != length: + raise dns.exception.SyntaxError("generic rdata hex data has wrong length") + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.data) + + def to_generic(self, origin: dns.name.Name | None = None) -> "GenericRdata": + return self + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + return cls(rdclass, rdtype, parser.get_remaining()) + + +_rdata_classes: Dict[Tuple[dns.rdataclass.RdataClass, dns.rdatatype.RdataType], Any] = ( + {} +) +_module_prefix = "dns.rdtypes" +_dynamic_load_allowed = True + + +def get_rdata_class(rdclass, rdtype, use_generic=True): + cls = _rdata_classes.get((rdclass, rdtype)) + if not cls: + cls = _rdata_classes.get((dns.rdataclass.ANY, rdtype)) + if not cls and _dynamic_load_allowed: + rdclass_text = dns.rdataclass.to_text(rdclass) + rdtype_text = dns.rdatatype.to_text(rdtype) + rdtype_text = rdtype_text.replace("-", "_") + try: + mod = import_module( + ".".join([_module_prefix, rdclass_text, rdtype_text]) + ) + cls = getattr(mod, rdtype_text) + _rdata_classes[(rdclass, rdtype)] = cls + except ImportError: + try: + mod = import_module(".".join([_module_prefix, "ANY", rdtype_text])) + cls = getattr(mod, rdtype_text) + _rdata_classes[(dns.rdataclass.ANY, rdtype)] = cls + _rdata_classes[(rdclass, rdtype)] = cls + except ImportError: + pass + if not cls and use_generic: + cls = GenericRdata + _rdata_classes[(rdclass, rdtype)] = cls + return cls + + +def load_all_types(disable_dynamic_load=True): + """Load all rdata types for which dnspython has a non-generic implementation. + + Normally dnspython loads DNS rdatatype implementations on demand, but in some + specialized cases loading all types at an application-controlled time is preferred. + + If *disable_dynamic_load*, a ``bool``, is ``True`` then dnspython will not attempt + to use its dynamic loading mechanism if an unknown type is subsequently encountered, + and will simply use the ``GenericRdata`` class. + """ + # Load class IN and ANY types. + for rdtype in dns.rdatatype.RdataType: + get_rdata_class(dns.rdataclass.IN, rdtype, False) + # Load the one non-ANY implementation we have in CH. Everything + # else in CH is an ANY type, and we'll discover those on demand but won't + # have to import anything. + get_rdata_class(dns.rdataclass.CH, dns.rdatatype.A, False) + if disable_dynamic_load: + # Now disable dynamic loading so any subsequent unknown type immediately becomes + # GenericRdata without a load attempt. + global _dynamic_load_allowed + _dynamic_load_allowed = False + + +def from_text( + rdclass: dns.rdataclass.RdataClass | str, + rdtype: dns.rdatatype.RdataType | str, + tok: dns.tokenizer.Tokenizer | str, + origin: dns.name.Name | None = None, + relativize: bool = True, + relativize_to: dns.name.Name | None = None, + idna_codec: dns.name.IDNACodec | None = None, +) -> Rdata: + """Build an rdata object from text format. + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_text() class method is called + with the parameters to this function. + + If *tok* is a ``str``, then a tokenizer is created and the string + is used as its input. + + *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. + + *tok*, a ``dns.tokenizer.Tokenizer`` or a ``str``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use if a tokenizer needs to be created. If + ``None``, the default IDNA 2003 encoder/decoder is used. If a + tokenizer is not created, then the codec associated with the tokenizer + is the one that is used. + + Returns an instance of the chosen Rdata subclass. + + """ + if isinstance(tok, str): + tok = dns.tokenizer.Tokenizer(tok, idna_codec=idna_codec) + if not isinstance(tok, dns.tokenizer.Tokenizer): + raise ValueError("tok must be a string or a Tokenizer") + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + cls = get_rdata_class(rdclass, rdtype) + assert cls is not None # for type checkers + with dns.exception.ExceptionWrapper(dns.exception.SyntaxError): + rdata = None + if cls != GenericRdata: + # peek at first token + token = tok.get() + tok.unget(token) + if token.is_identifier() and token.value == r"\#": + # + # Known type using the generic syntax. Extract the + # wire form from the generic syntax, and then run + # from_wire on it. + # + grdata = GenericRdata.from_text( + rdclass, rdtype, tok, origin, relativize, relativize_to + ) + rdata = from_wire( + rdclass, rdtype, grdata.data, 0, len(grdata.data), origin + ) + # + # If this comparison isn't equal, then there must have been + # compressed names in the wire format, which is an error, + # there being no reasonable context to decompress with. + # + rwire = rdata.to_wire() + if rwire != grdata.data: + raise dns.exception.SyntaxError( + "compressed data in " + "generic syntax form " + "of known rdatatype" + ) + if rdata is None: + rdata = cls.from_text( + rdclass, rdtype, tok, origin, relativize, relativize_to + ) + token = tok.get_eol_as_token() + if token.comment is not None: + object.__setattr__(rdata, "rdcomment", token.comment) + return rdata + + +def from_wire_parser( + rdclass: dns.rdataclass.RdataClass | str, + rdtype: dns.rdatatype.RdataType | str, + parser: dns.wire.Parser, + origin: dns.name.Name | None = None, +) -> Rdata: + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restricted to the rdata length. + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + cls = get_rdata_class(rdclass, rdtype) + assert cls is not None # for type checkers + with dns.exception.ExceptionWrapper(dns.exception.FormError): + return cls.from_wire_parser(rdclass, rdtype, parser, origin) + + +def from_wire( + rdclass: dns.rdataclass.RdataClass | str, + rdtype: dns.rdatatype.RdataType | str, + wire: bytes, + current: int, + rdlen: int, + origin: dns.name.Name | None = None, +) -> Rdata: + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *wire*, a ``bytes``, the wire-format message. + + *current*, an ``int``, the offset in wire of the beginning of + the rdata. + + *rdlen*, an ``int``, the length of the wire-format rdata + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + parser = dns.wire.Parser(wire, current) + with parser.restrict_to(rdlen): + return from_wire_parser(rdclass, rdtype, parser, origin) + + +class RdatatypeExists(dns.exception.DNSException): + """DNS rdatatype already exists.""" + + supp_kwargs = {"rdclass", "rdtype"} + fmt = ( + "The rdata type with class {rdclass:d} and rdtype {rdtype:d} " + + "already exists." + ) + + +def register_type( + implementation: Any, + rdtype: int, + rdtype_text: str, + is_singleton: bool = False, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, +) -> None: + """Dynamically register a module to handle an rdatatype. + + *implementation*, a subclass of ``dns.rdata.Rdata`` implementing the type, + or a module containing such a class named by its text form. + + *rdtype*, an ``int``, the rdatatype to register. + + *rdtype_text*, a ``str``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + + *rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if + it applies to all classes. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + existing_cls = get_rdata_class(rdclass, rdtype) + if existing_cls != GenericRdata or dns.rdatatype.is_metatype(rdtype): + raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) + if isinstance(implementation, type) and issubclass(implementation, Rdata): + impclass = implementation + else: + impclass = getattr(implementation, rdtype_text.replace("-", "_")) + _rdata_classes[(rdclass, rdtype)] = impclass + dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) diff --git a/venv/lib/python3.12/site-packages/dns/rdataclass.py b/venv/lib/python3.12/site-packages/dns/rdataclass.py new file mode 100644 index 0000000..89b85a7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdataclass.py @@ -0,0 +1,118 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Classes.""" + +import dns.enum +import dns.exception + + +class RdataClass(dns.enum.IntEnum): + """DNS Rdata Class""" + + RESERVED0 = 0 + IN = 1 + INTERNET = IN + CH = 3 + CHAOS = CH + HS = 4 + HESIOD = HS + NONE = 254 + ANY = 255 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "class" + + @classmethod + def _prefix(cls): + return "CLASS" + + @classmethod + def _unknown_exception_class(cls): + return UnknownRdataclass + + +_metaclasses = {RdataClass.NONE, RdataClass.ANY} + + +class UnknownRdataclass(dns.exception.DNSException): + """A DNS class is unknown.""" + + +def from_text(text: str) -> RdataClass: + """Convert text into a DNS rdata class value. + + The input text can be a defined DNS RR class mnemonic or + instance of the DNS generic class syntax. + + For example, "IN" and "CLASS1" will both result in a value of 1. + + Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns a ``dns.rdataclass.RdataClass``. + """ + + return RdataClass.from_text(text) + + +def to_text(value: RdataClass) -> str: + """Convert a DNS rdata class value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic class syntax will be used. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + return RdataClass.to_text(value) + + +def is_metaclass(rdclass: RdataClass) -> bool: + """True if the specified class is a metaclass. + + The currently defined metaclasses are ANY and NONE. + + *rdclass* is a ``dns.rdataclass.RdataClass``. + """ + + if rdclass in _metaclasses: + return True + return False + + +### BEGIN generated RdataClass constants + +RESERVED0 = RdataClass.RESERVED0 +IN = RdataClass.IN +INTERNET = RdataClass.INTERNET +CH = RdataClass.CH +CHAOS = RdataClass.CHAOS +HS = RdataClass.HS +HESIOD = RdataClass.HESIOD +NONE = RdataClass.NONE +ANY = RdataClass.ANY + +### END generated RdataClass constants diff --git a/venv/lib/python3.12/site-packages/dns/rdataset.py b/venv/lib/python3.12/site-packages/dns/rdataset.py new file mode 100644 index 0000000..1edf67d --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdataset.py @@ -0,0 +1,508 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)""" + +import io +import random +import struct +from typing import Any, Collection, Dict, List, cast + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.renderer +import dns.set +import dns.ttl + +# define SimpleSet here for backwards compatibility +SimpleSet = dns.set.Set + + +class DifferingCovers(dns.exception.DNSException): + """An attempt was made to add a DNS SIG/RRSIG whose covered type + is not the same as that of the other rdatas in the rdataset.""" + + +class IncompatibleTypes(dns.exception.DNSException): + """An attempt was made to add DNS RR data of an incompatible type.""" + + +class Rdataset(dns.set.Set): + """A DNS rdataset.""" + + __slots__ = ["rdclass", "rdtype", "covers", "ttl"] + + def __init__( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ttl: int = 0, + ): + """Create a new rdataset of the specified class and type. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the rdataclass. + + *rdtype*, an ``dns.rdatatype.RdataType``, the rdatatype. + + *covers*, an ``dns.rdatatype.RdataType``, the covered rdatatype. + + *ttl*, an ``int``, the TTL. + """ + + super().__init__() + self.rdclass = rdclass + self.rdtype: dns.rdatatype.RdataType = rdtype + self.covers: dns.rdatatype.RdataType = covers + self.ttl = ttl + + def _clone(self): + obj = cast(Rdataset, super()._clone()) + obj.rdclass = self.rdclass + obj.rdtype = self.rdtype + obj.covers = self.covers + obj.ttl = self.ttl + return obj + + def update_ttl(self, ttl: int) -> None: + """Perform TTL minimization. + + Set the TTL of the rdataset to be the lesser of the set's current + TTL or the specified TTL. If the set contains no rdatas, set the TTL + to the specified TTL. + + *ttl*, an ``int`` or ``str``. + """ + ttl = dns.ttl.make(ttl) + if len(self) == 0: + self.ttl = ttl + elif ttl < self.ttl: + self.ttl = ttl + + # pylint: disable=arguments-differ,arguments-renamed + def add( # pyright: ignore + self, rd: dns.rdata.Rdata, ttl: int | None = None + ) -> None: + """Add the specified rdata to the rdataset. + + If the optional *ttl* parameter is supplied, then + ``self.update_ttl(ttl)`` will be called prior to adding the rdata. + + *rd*, a ``dns.rdata.Rdata``, the rdata + + *ttl*, an ``int``, the TTL. + + Raises ``dns.rdataset.IncompatibleTypes`` if the type and class + do not match the type and class of the rdataset. + + Raises ``dns.rdataset.DifferingCovers`` if the type is a signature + type and the covered type does not match that of the rdataset. + """ + + # + # If we're adding a signature, do some special handling to + # check that the signature covers the same type as the + # other rdatas in this rdataset. If this is the first rdata + # in the set, initialize the covers field. + # + if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype: + raise IncompatibleTypes + if ttl is not None: + self.update_ttl(ttl) + if self.rdtype == dns.rdatatype.RRSIG or self.rdtype == dns.rdatatype.SIG: + covers = rd.covers() + if len(self) == 0 and self.covers == dns.rdatatype.NONE: + self.covers = covers + elif self.covers != covers: + raise DifferingCovers + if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0: + self.clear() + super().add(rd) + + def union_update(self, other): + self.update_ttl(other.ttl) + super().union_update(other) + + def intersection_update(self, other): + self.update_ttl(other.ttl) + super().intersection_update(other) + + def update(self, other): + """Add all rdatas in other to self. + + *other*, a ``dns.rdataset.Rdataset``, the rdataset from which + to update. + """ + + self.update_ttl(other.ttl) + super().update(other) + + def _rdata_repr(self): + def maybe_truncate(s): + if len(s) > 100: + return s[:100] + "..." + return s + + return "[" + ", ".join(f"<{maybe_truncate(str(rr))}>" for rr in self) + "]" + + def __repr__(self): + if self.covers == 0: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" + return ( + "" + ) + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if not isinstance(other, Rdataset): + return False + if ( + self.rdclass != other.rdclass + or self.rdtype != other.rdtype + or self.covers != other.covers + ): + return False + return super().__eq__(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def to_text( + self, + name: dns.name.Name | None = None, + origin: dns.name.Name | None = None, + relativize: bool = True, + override_rdclass: dns.rdataclass.RdataClass | None = None, + want_comments: bool = False, + **kw: Dict[str, Any], + ) -> str: + """Convert the rdataset into DNS zone file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with + *name* as the owner name. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + + *override_rdclass*, a ``dns.rdataclass.RdataClass`` or ``None``. + If not ``None``, use this class instead of the Rdataset's class. + + *want_comments*, a ``bool``. If ``True``, emit comments for rdata + which have them. The default is ``False``. + """ + + if name is not None: + name = name.choose_relativity(origin, relativize) + ntext = str(name) + pad = " " + else: + ntext = "" + pad = "" + s = io.StringIO() + if override_rdclass is not None: + rdclass = override_rdclass + else: + rdclass = self.rdclass + if len(self) == 0: + # + # Empty rdatasets are used for the question section, and in + # some dynamic updates, so we don't need to print out the TTL + # (which is meaningless anyway). + # + s.write( + f"{ntext}{pad}{dns.rdataclass.to_text(rdclass)} " + f"{dns.rdatatype.to_text(self.rdtype)}\n" + ) + else: + for rd in self: + extra = "" + if want_comments: + if rd.rdcomment: + extra = f" ;{rd.rdcomment}" + s.write( + f"{ntext}{pad}{self.ttl} " + f"{dns.rdataclass.to_text(rdclass)} " + f"{dns.rdatatype.to_text(self.rdtype)} " + f"{rd.to_text(origin=origin, relativize=relativize, **kw)}" + f"{extra}\n" + ) + # + # We strip off the final \n for the caller's convenience in printing + # + return s.getvalue()[:-1] + + def to_wire( + self, + name: dns.name.Name, + file: Any, + compress: dns.name.CompressType | None = None, + origin: dns.name.Name | None = None, + override_rdclass: dns.rdataclass.RdataClass | None = None, + want_shuffle: bool = True, + ) -> int: + """Convert the rdataset to wire format. + + *name*, a ``dns.name.Name`` is the owner name to use. + + *file* is the file where the name is emitted (typically a + BytesIO file). + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *override_rdclass*, an ``int``, is used as the class instead of the + class of the rdataset. This is useful when rendering rdatasets + associated with dynamic updates. + + *want_shuffle*, a ``bool``. If ``True``, then the order of the + Rdatas within the Rdataset will be shuffled before rendering. + + Returns an ``int``, the number of records emitted. + """ + + if override_rdclass is not None: + rdclass = override_rdclass + want_shuffle = False + else: + rdclass = self.rdclass + if len(self) == 0: + name.to_wire(file, compress, origin) + file.write(struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)) + return 1 + else: + l: Rdataset | List[dns.rdata.Rdata] + if want_shuffle: + l = list(self) + random.shuffle(l) + else: + l = self + for rd in l: + name.to_wire(file, compress, origin) + file.write(struct.pack("!HHI", self.rdtype, rdclass, self.ttl)) + with dns.renderer.prefixed_length(file, 2): + rd.to_wire(file, compress, origin) + return len(self) + + def match( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> bool: + """Returns ``True`` if this rdataset matches the specified class, + type, and covers. + """ + if self.rdclass == rdclass and self.rdtype == rdtype and self.covers == covers: + return True + return False + + def processing_order(self) -> List[dns.rdata.Rdata]: + """Return rdatas in a valid processing order according to the type's + specification. For example, MX records are in preference order from + lowest to highest preferences, with items of the same preference + shuffled. + + For types that do not define a processing order, the rdatas are + simply shuffled. + """ + if len(self) == 0: + return [] + else: + return self[0]._processing_order(iter(self)) # pyright: ignore + + +@dns.immutable.immutable +class ImmutableRdataset(Rdataset): # lgtm[py/missing-equals] + """An immutable DNS rdataset.""" + + _clone_class = Rdataset + + def __init__(self, rdataset: Rdataset): + """Create an immutable rdataset from the specified rdataset.""" + + super().__init__( + rdataset.rdclass, rdataset.rdtype, rdataset.covers, rdataset.ttl + ) + self.items = dns.immutable.Dict(rdataset.items) + + def update_ttl(self, ttl): + raise TypeError("immutable") + + def add(self, rd, ttl=None): + raise TypeError("immutable") + + def union_update(self, other): + raise TypeError("immutable") + + def intersection_update(self, other): + raise TypeError("immutable") + + def update(self, other): + raise TypeError("immutable") + + def __delitem__(self, i): + raise TypeError("immutable") + + # lgtm complains about these not raising ArithmeticError, but there is + # precedent for overrides of these methods in other classes to raise + # TypeError, and it seems like the better exception. + + def __ior__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __iand__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __iadd__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __isub__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def clear(self): + raise TypeError("immutable") + + def __copy__(self): + return ImmutableRdataset(super().copy()) # pyright: ignore + + def copy(self): + return ImmutableRdataset(super().copy()) # pyright: ignore + + def union(self, other): + return ImmutableRdataset(super().union(other)) # pyright: ignore + + def intersection(self, other): + return ImmutableRdataset(super().intersection(other)) # pyright: ignore + + def difference(self, other): + return ImmutableRdataset(super().difference(other)) # pyright: ignore + + def symmetric_difference(self, other): + return ImmutableRdataset(super().symmetric_difference(other)) # pyright: ignore + + +def from_text_list( + rdclass: dns.rdataclass.RdataClass | str, + rdtype: dns.rdatatype.RdataType | str, + ttl: int, + text_rdatas: Collection[str], + idna_codec: dns.name.IDNACodec | None = None, + origin: dns.name.Name | None = None, + relativize: bool = True, + relativize_to: dns.name.Name | None = None, +) -> Rdataset: + """Create an rdataset with the specified class, type, and TTL, and with + the specified list of rdatas in text format. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + r = Rdataset(rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text( + r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec + ) + r.add(rd) + return r + + +def from_text( + rdclass: dns.rdataclass.RdataClass | str, + rdtype: dns.rdatatype.RdataType | str, + ttl: int, + *text_rdatas: Any, +) -> Rdataset: + """Create an rdataset with the specified class, type, and TTL, and with + the specified rdatas in text format. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_text_list(rdclass, rdtype, ttl, cast(Collection[str], text_rdatas)) + + +def from_rdata_list(ttl: int, rdatas: Collection[dns.rdata.Rdata]) -> Rdataset: + """Create an rdataset with the specified TTL, and with + the specified list of rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = Rdataset(rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + assert r is not None + return r + + +def from_rdata(ttl: int, *rdatas: Any) -> Rdataset: + """Create an rdataset with the specified TTL, and with + the specified rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_rdata_list(ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/venv/lib/python3.12/site-packages/dns/rdatatype.py b/venv/lib/python3.12/site-packages/dns/rdatatype.py new file mode 100644 index 0000000..211d810 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdatatype.py @@ -0,0 +1,338 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Types.""" + +from typing import Dict + +import dns.enum +import dns.exception + + +class RdataType(dns.enum.IntEnum): + """DNS Rdata Type""" + + TYPE0 = 0 + NONE = 0 + A = 1 + NS = 2 + MD = 3 + MF = 4 + CNAME = 5 + SOA = 6 + MB = 7 + MG = 8 + MR = 9 + NULL = 10 + WKS = 11 + PTR = 12 + HINFO = 13 + MINFO = 14 + MX = 15 + TXT = 16 + RP = 17 + AFSDB = 18 + X25 = 19 + ISDN = 20 + RT = 21 + NSAP = 22 + NSAP_PTR = 23 + SIG = 24 + KEY = 25 + PX = 26 + GPOS = 27 + AAAA = 28 + LOC = 29 + NXT = 30 + SRV = 33 + NAPTR = 35 + KX = 36 + CERT = 37 + A6 = 38 + DNAME = 39 + OPT = 41 + APL = 42 + DS = 43 + SSHFP = 44 + IPSECKEY = 45 + RRSIG = 46 + NSEC = 47 + DNSKEY = 48 + DHCID = 49 + NSEC3 = 50 + NSEC3PARAM = 51 + TLSA = 52 + SMIMEA = 53 + HIP = 55 + NINFO = 56 + CDS = 59 + CDNSKEY = 60 + OPENPGPKEY = 61 + CSYNC = 62 + ZONEMD = 63 + SVCB = 64 + HTTPS = 65 + DSYNC = 66 + SPF = 99 + UNSPEC = 103 + NID = 104 + L32 = 105 + L64 = 106 + LP = 107 + EUI48 = 108 + EUI64 = 109 + TKEY = 249 + TSIG = 250 + IXFR = 251 + AXFR = 252 + MAILB = 253 + MAILA = 254 + ANY = 255 + URI = 256 + CAA = 257 + AVC = 258 + AMTRELAY = 260 + RESINFO = 261 + WALLET = 262 + TA = 32768 + DLV = 32769 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "type" + + @classmethod + def _prefix(cls): + return "TYPE" + + @classmethod + def _extra_from_text(cls, text): + if text.find("-") >= 0: + try: + return cls[text.replace("-", "_")] + except KeyError: # pragma: no cover + pass + return _registered_by_text.get(text) + + @classmethod + def _extra_to_text(cls, value, current_text): + if current_text is None: + return _registered_by_value.get(value) + if current_text.find("_") >= 0: + return current_text.replace("_", "-") + return current_text + + @classmethod + def _unknown_exception_class(cls): + return UnknownRdatatype + + +_registered_by_text: Dict[str, RdataType] = {} +_registered_by_value: Dict[RdataType, str] = {} + +_metatypes = {RdataType.OPT} + +_singletons = { + RdataType.SOA, + RdataType.NXT, + RdataType.DNAME, + RdataType.NSEC, + RdataType.CNAME, +} + + +class UnknownRdatatype(dns.exception.DNSException): + """DNS resource record type is unknown.""" + + +def from_text(text: str) -> RdataType: + """Convert text into a DNS rdata type value. + + The input text can be a defined DNS RR type mnemonic or + instance of the DNS generic type syntax. + + For example, "NS" and "TYPE2" will both result in a value of 2. + + Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns a ``dns.rdatatype.RdataType``. + """ + + return RdataType.from_text(text) + + +def to_text(value: RdataType) -> str: + """Convert a DNS rdata type value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic type syntax will be used. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + return RdataType.to_text(value) + + +def is_metatype(rdtype: RdataType) -> bool: + """True if the specified type is a metatype. + + *rdtype* is a ``dns.rdatatype.RdataType``. + + The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA, + MAILB, ANY, and OPT. + + Returns a ``bool``. + """ + + return (256 > rdtype >= 128) or rdtype in _metatypes + + +def is_singleton(rdtype: RdataType) -> bool: + """Is the specified type a singleton type? + + Singleton types can only have a single rdata in an rdataset, or a single + RR in an RRset. + + The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and + SOA. + + *rdtype* is an ``int``. + + Returns a ``bool``. + """ + + if rdtype in _singletons: + return True + return False + + +# pylint: disable=redefined-outer-name +def register_type( + rdtype: RdataType, rdtype_text: str, is_singleton: bool = False +) -> None: + """Dynamically register an rdatatype. + + *rdtype*, a ``dns.rdatatype.RdataType``, the rdatatype to register. + + *rdtype_text*, a ``str``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + """ + + _registered_by_text[rdtype_text] = rdtype + _registered_by_value[rdtype] = rdtype_text + if is_singleton: + _singletons.add(rdtype) + + +### BEGIN generated RdataType constants + +TYPE0 = RdataType.TYPE0 +NONE = RdataType.NONE +A = RdataType.A +NS = RdataType.NS +MD = RdataType.MD +MF = RdataType.MF +CNAME = RdataType.CNAME +SOA = RdataType.SOA +MB = RdataType.MB +MG = RdataType.MG +MR = RdataType.MR +NULL = RdataType.NULL +WKS = RdataType.WKS +PTR = RdataType.PTR +HINFO = RdataType.HINFO +MINFO = RdataType.MINFO +MX = RdataType.MX +TXT = RdataType.TXT +RP = RdataType.RP +AFSDB = RdataType.AFSDB +X25 = RdataType.X25 +ISDN = RdataType.ISDN +RT = RdataType.RT +NSAP = RdataType.NSAP +NSAP_PTR = RdataType.NSAP_PTR +SIG = RdataType.SIG +KEY = RdataType.KEY +PX = RdataType.PX +GPOS = RdataType.GPOS +AAAA = RdataType.AAAA +LOC = RdataType.LOC +NXT = RdataType.NXT +SRV = RdataType.SRV +NAPTR = RdataType.NAPTR +KX = RdataType.KX +CERT = RdataType.CERT +A6 = RdataType.A6 +DNAME = RdataType.DNAME +OPT = RdataType.OPT +APL = RdataType.APL +DS = RdataType.DS +SSHFP = RdataType.SSHFP +IPSECKEY = RdataType.IPSECKEY +RRSIG = RdataType.RRSIG +NSEC = RdataType.NSEC +DNSKEY = RdataType.DNSKEY +DHCID = RdataType.DHCID +NSEC3 = RdataType.NSEC3 +NSEC3PARAM = RdataType.NSEC3PARAM +TLSA = RdataType.TLSA +SMIMEA = RdataType.SMIMEA +HIP = RdataType.HIP +NINFO = RdataType.NINFO +CDS = RdataType.CDS +CDNSKEY = RdataType.CDNSKEY +OPENPGPKEY = RdataType.OPENPGPKEY +CSYNC = RdataType.CSYNC +ZONEMD = RdataType.ZONEMD +SVCB = RdataType.SVCB +HTTPS = RdataType.HTTPS +DSYNC = RdataType.DSYNC +SPF = RdataType.SPF +UNSPEC = RdataType.UNSPEC +NID = RdataType.NID +L32 = RdataType.L32 +L64 = RdataType.L64 +LP = RdataType.LP +EUI48 = RdataType.EUI48 +EUI64 = RdataType.EUI64 +TKEY = RdataType.TKEY +TSIG = RdataType.TSIG +IXFR = RdataType.IXFR +AXFR = RdataType.AXFR +MAILB = RdataType.MAILB +MAILA = RdataType.MAILA +ANY = RdataType.ANY +URI = RdataType.URI +CAA = RdataType.CAA +AVC = RdataType.AVC +AMTRELAY = RdataType.AMTRELAY +RESINFO = RdataType.RESINFO +WALLET = RdataType.WALLET +TA = RdataType.TA +DLV = RdataType.DLV + +### END generated RdataType constants diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AFSDB.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AFSDB.py new file mode 100644 index 0000000..06a3b97 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AFSDB.py @@ -0,0 +1,45 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX): + """AFSDB record""" + + # Use the property mechanism to make "subtype" an alias for the + # "preference" attribute, and "hostname" an alias for the "exchange" + # attribute. + # + # This lets us inherit the UncompressedMX implementation but lets + # the caller use appropriate attribute names for the rdata type. + # + # We probably lose some performance vs. a cut-and-paste + # implementation, but this way we don't copy code, and that's + # good. + + @property + def subtype(self): + "the AFSDB subtype" + return self.preference + + @property + def hostname(self): + "the AFSDB hostname" + return self.exchange diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AMTRELAY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AMTRELAY.py new file mode 100644 index 0000000..dc9fa87 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AMTRELAY.py @@ -0,0 +1,89 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdtypes.util + + +class Relay(dns.rdtypes.util.Gateway): + name = "AMTRELAY relay" + + @property + def relay(self): + return self.gateway + + +@dns.immutable.immutable +class AMTRELAY(dns.rdata.Rdata): + """AMTRELAY record""" + + # see: RFC 8777 + + __slots__ = ["precedence", "discovery_optional", "relay_type", "relay"] + + def __init__( + self, rdclass, rdtype, precedence, discovery_optional, relay_type, relay + ): + super().__init__(rdclass, rdtype) + relay = Relay(relay_type, relay) + self.precedence = self._as_uint8(precedence) + self.discovery_optional = self._as_bool(discovery_optional) + self.relay_type = relay.type + self.relay = relay.relay + + def to_text(self, origin=None, relativize=True, **kw): + relay = Relay(self.relay_type, self.relay).to_text(origin, relativize) + return ( + f"{self.precedence} {self.discovery_optional:d} {self.relay_type} {relay}" + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + precedence = tok.get_uint8() + discovery_optional = tok.get_uint8() + if discovery_optional > 1: + raise dns.exception.SyntaxError("expecting 0 or 1") + discovery_optional = bool(discovery_optional) + relay_type = tok.get_uint8() + if relay_type > 0x7F: + raise dns.exception.SyntaxError("expecting an integer <= 127") + relay = Relay.from_text(relay_type, tok, origin, relativize, relativize_to) + return cls( + rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + relay_type = self.relay_type | (self.discovery_optional << 7) + header = struct.pack("!BB", self.precedence, relay_type) + file.write(header) + Relay(self.relay_type, self.relay).to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (precedence, relay_type) = parser.get_struct("!BB") + discovery_optional = bool(relay_type >> 7) + relay_type &= 0x7F + relay = Relay.from_wire_parser(relay_type, parser, origin) + return cls( + rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay + ) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AVC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AVC.py new file mode 100644 index 0000000..a27ae2d --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AVC.py @@ -0,0 +1,26 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class AVC(dns.rdtypes.txtbase.TXTBase): + """AVC record""" + + # See: IANA dns parameters for AVC diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CAA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CAA.py new file mode 100644 index 0000000..8c62e62 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CAA.py @@ -0,0 +1,67 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class CAA(dns.rdata.Rdata): + """CAA (Certification Authority Authorization) record""" + + # see: RFC 6844 + + __slots__ = ["flags", "tag", "value"] + + def __init__(self, rdclass, rdtype, flags, tag, value): + super().__init__(rdclass, rdtype) + self.flags = self._as_uint8(flags) + self.tag = self._as_bytes(tag, True, 255) + if not tag.isalnum(): + raise ValueError("tag is not alphanumeric") + self.value = self._as_bytes(value) + + def to_text(self, origin=None, relativize=True, **kw): + return f'{self.flags} {dns.rdata._escapify(self.tag)} "{dns.rdata._escapify(self.value)}"' + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + flags = tok.get_uint8() + tag = tok.get_string().encode() + value = tok.get_string().encode() + return cls(rdclass, rdtype, flags, tag, value) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!B", self.flags)) + l = len(self.tag) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.tag) + file.write(self.value) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + flags = parser.get_uint8() + tag = parser.get_counted_bytes() + value = parser.get_remaining() + return cls(rdclass, rdtype, flags, tag, value) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDNSKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDNSKEY.py new file mode 100644 index 0000000..b613409 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDNSKEY.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] + +# pylint: disable=unused-import +from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] + REVOKE, + SEP, + ZONE, +) + +# pylint: enable=unused-import + + +@dns.immutable.immutable +class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + """CDNSKEY record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDS.py new file mode 100644 index 0000000..8312b97 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDS.py @@ -0,0 +1,29 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class CDS(dns.rdtypes.dsbase.DSBase): + """CDS record""" + + _digest_length_by_type = { + **dns.rdtypes.dsbase.DSBase._digest_length_by_type, + 0: 1, # delete, RFC 8078 Sec. 4 (including Errata ID 5049) + } diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CERT.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CERT.py new file mode 100644 index 0000000..4d5e5bd --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CERT.py @@ -0,0 +1,113 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + +_ctype_by_value = { + 1: "PKIX", + 2: "SPKI", + 3: "PGP", + 4: "IPKIX", + 5: "ISPKI", + 6: "IPGP", + 7: "ACPKIX", + 8: "IACPKIX", + 253: "URI", + 254: "OID", +} + +_ctype_by_name = { + "PKIX": 1, + "SPKI": 2, + "PGP": 3, + "IPKIX": 4, + "ISPKI": 5, + "IPGP": 6, + "ACPKIX": 7, + "IACPKIX": 8, + "URI": 253, + "OID": 254, +} + + +def _ctype_from_text(what): + v = _ctype_by_name.get(what) + if v is not None: + return v + return int(what) + + +def _ctype_to_text(what): + v = _ctype_by_value.get(what) + if v is not None: + return v + return str(what) + + +@dns.immutable.immutable +class CERT(dns.rdata.Rdata): + """CERT record""" + + # see RFC 4398 + + __slots__ = ["certificate_type", "key_tag", "algorithm", "certificate"] + + def __init__( + self, rdclass, rdtype, certificate_type, key_tag, algorithm, certificate + ): + super().__init__(rdclass, rdtype) + self.certificate_type = self._as_uint16(certificate_type) + self.key_tag = self._as_uint16(key_tag) + self.algorithm = self._as_uint8(algorithm) + self.certificate = self._as_bytes(certificate) + + def to_text(self, origin=None, relativize=True, **kw): + certificate_type = _ctype_to_text(self.certificate_type) + algorithm = dns.dnssectypes.Algorithm.to_text(self.algorithm) + certificate = dns.rdata._base64ify(self.certificate, **kw) # pyright: ignore + return f"{certificate_type} {self.key_tag} {algorithm} {certificate}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + certificate_type = _ctype_from_text(tok.get_string()) + key_tag = tok.get_uint16() + algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) + b64 = tok.concatenate_remaining_identifiers().encode() + certificate = base64.b64decode(b64) + return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + prefix = struct.pack( + "!HHB", self.certificate_type, self.key_tag, self.algorithm + ) + file.write(prefix) + file.write(self.certificate) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (certificate_type, key_tag, algorithm) = parser.get_struct("!HHB") + certificate = parser.get_remaining() + return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CNAME.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CNAME.py new file mode 100644 index 0000000..665e407 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CNAME.py @@ -0,0 +1,28 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class CNAME(dns.rdtypes.nsbase.NSBase): + """CNAME record + + Note: although CNAME is officially a singleton type, dnspython allows + non-singleton CNAME rdatasets because such sets have been commonly + used by BIND and other nameservers for load balancing.""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CSYNC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CSYNC.py new file mode 100644 index 0000000..103486d --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CSYNC.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "CSYNC" + + +@dns.immutable.immutable +class CSYNC(dns.rdata.Rdata): + """CSYNC record""" + + __slots__ = ["serial", "flags", "windows"] + + def __init__(self, rdclass, rdtype, serial, flags, windows): + super().__init__(rdclass, rdtype) + self.serial = self._as_uint32(serial) + self.flags = self._as_uint16(flags) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + text = Bitmap(self.windows).to_text() + return f"{self.serial} {self.flags}{text}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + serial = tok.get_uint32() + flags = tok.get_uint16() + bitmap = Bitmap.from_text(tok) + return cls(rdclass, rdtype, serial, flags, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!IH", self.serial, self.flags)) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (serial, flags) = parser.get_struct("!IH") + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, serial, flags, bitmap) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DLV.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DLV.py new file mode 100644 index 0000000..6c134f1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DLV.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class DLV(dns.rdtypes.dsbase.DSBase): + """DLV record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNAME.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNAME.py new file mode 100644 index 0000000..bbf9186 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNAME.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class DNAME(dns.rdtypes.nsbase.UncompressedNS): + """DNAME record""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, None, origin, canonicalize) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNSKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNSKEY.py new file mode 100644 index 0000000..6d961a9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNSKEY.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] + +# pylint: disable=unused-import +from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] + REVOKE, + SEP, + ZONE, +) + +# pylint: enable=unused-import + + +@dns.immutable.immutable +class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + """DNSKEY record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DS.py new file mode 100644 index 0000000..58b3108 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DS.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class DS(dns.rdtypes.dsbase.DSBase): + """DS record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DSYNC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DSYNC.py new file mode 100644 index 0000000..e8d1394 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DSYNC.py @@ -0,0 +1,72 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.enum +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + + +class UnknownScheme(dns.exception.DNSException): + """Unknown DSYNC scheme""" + + +class Scheme(dns.enum.IntEnum): + """DSYNC SCHEME""" + + NOTIFY = 1 + + @classmethod + def _maximum(cls): + return 255 + + @classmethod + def _unknown_exception_class(cls): + return UnknownScheme + + +@dns.immutable.immutable +class DSYNC(dns.rdata.Rdata): + """DSYNC record""" + + # see: draft-ietf-dnsop-generalized-notify + + __slots__ = ["rrtype", "scheme", "port", "target"] + + def __init__(self, rdclass, rdtype, rrtype, scheme, port, target): + super().__init__(rdclass, rdtype) + self.rrtype = self._as_rdatatype(rrtype) + self.scheme = Scheme.make(scheme) + self.port = self._as_uint16(port) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return ( + f"{dns.rdatatype.to_text(self.rrtype)} {Scheme.to_text(self.scheme)} " + f"{self.port} {target}" + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + rrtype = dns.rdatatype.from_text(tok.get_string()) + scheme = Scheme.make(tok.get_string()) + port = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, rrtype, scheme, port, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + three_ints = struct.pack("!HBH", self.rrtype, self.scheme, self.port) + file.write(three_ints) + self.target.to_wire(file, None, origin, False) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (rrtype, scheme, port) = parser.get_struct("!HBH") + target = parser.get_name(origin) + return cls(rdclass, rdtype, rrtype, scheme, port, target) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI48.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI48.py new file mode 100644 index 0000000..c843be5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI48.py @@ -0,0 +1,30 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.euibase + + +@dns.immutable.immutable +class EUI48(dns.rdtypes.euibase.EUIBase): + """EUI48 record""" + + # see: rfc7043.txt + + byte_len = 6 # 0123456789ab (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI64.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI64.py new file mode 100644 index 0000000..f6d7e25 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI64.py @@ -0,0 +1,30 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.euibase + + +@dns.immutable.immutable +class EUI64(dns.rdtypes.euibase.EUIBase): + """EUI64 record""" + + # see: rfc7043.txt + + byte_len = 8 # 0123456789abcdef (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/GPOS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/GPOS.py new file mode 100644 index 0000000..d79f4a0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/GPOS.py @@ -0,0 +1,126 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +def _validate_float_string(what): + if len(what) == 0: + raise dns.exception.FormError + if what[0] == b"-"[0] or what[0] == b"+"[0]: + what = what[1:] + if what.isdigit(): + return + try: + (left, right) = what.split(b".") + except ValueError: + raise dns.exception.FormError + if left == b"" and right == b"": + raise dns.exception.FormError + if not left == b"" and not left.decode().isdigit(): + raise dns.exception.FormError + if not right == b"" and not right.decode().isdigit(): + raise dns.exception.FormError + + +@dns.immutable.immutable +class GPOS(dns.rdata.Rdata): + """GPOS record""" + + # see: RFC 1712 + + __slots__ = ["latitude", "longitude", "altitude"] + + def __init__(self, rdclass, rdtype, latitude, longitude, altitude): + super().__init__(rdclass, rdtype) + if isinstance(latitude, float) or isinstance(latitude, int): + latitude = str(latitude) + if isinstance(longitude, float) or isinstance(longitude, int): + longitude = str(longitude) + if isinstance(altitude, float) or isinstance(altitude, int): + altitude = str(altitude) + latitude = self._as_bytes(latitude, True, 255) + longitude = self._as_bytes(longitude, True, 255) + altitude = self._as_bytes(altitude, True, 255) + _validate_float_string(latitude) + _validate_float_string(longitude) + _validate_float_string(altitude) + self.latitude = latitude + self.longitude = longitude + self.altitude = altitude + flat = self.float_latitude + if flat < -90.0 or flat > 90.0: + raise dns.exception.FormError("bad latitude") + flong = self.float_longitude + if flong < -180.0 or flong > 180.0: + raise dns.exception.FormError("bad longitude") + + def to_text(self, origin=None, relativize=True, **kw): + return ( + f"{self.latitude.decode()} {self.longitude.decode()} " + f"{self.altitude.decode()}" + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + latitude = tok.get_string() + longitude = tok.get_string() + altitude = tok.get_string() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.latitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.latitude) + l = len(self.longitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.longitude) + l = len(self.altitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.altitude) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + latitude = parser.get_counted_bytes() + longitude = parser.get_counted_bytes() + altitude = parser.get_counted_bytes() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + @property + def float_latitude(self): + "latitude as a floating point value" + return float(self.latitude) + + @property + def float_longitude(self): + "longitude as a floating point value" + return float(self.longitude) + + @property + def float_altitude(self): + "altitude as a floating point value" + return float(self.altitude) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HINFO.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HINFO.py new file mode 100644 index 0000000..06ad348 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HINFO.py @@ -0,0 +1,64 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class HINFO(dns.rdata.Rdata): + """HINFO record""" + + # see: RFC 1035 + + __slots__ = ["cpu", "os"] + + def __init__(self, rdclass, rdtype, cpu, os): + super().__init__(rdclass, rdtype) + self.cpu = self._as_bytes(cpu, True, 255) + self.os = self._as_bytes(os, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + return f'"{dns.rdata._escapify(self.cpu)}" "{dns.rdata._escapify(self.os)}"' + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + cpu = tok.get_string(max_length=255) + os = tok.get_string(max_length=255) + return cls(rdclass, rdtype, cpu, os) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.cpu) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.cpu) + l = len(self.os) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.os) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + cpu = parser.get_counted_bytes() + os = parser.get_counted_bytes() + return cls(rdclass, rdtype, cpu, os) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HIP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HIP.py new file mode 100644 index 0000000..dc7948a --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HIP.py @@ -0,0 +1,85 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class HIP(dns.rdata.Rdata): + """HIP record""" + + # see: RFC 5205 + + __slots__ = ["hit", "algorithm", "key", "servers"] + + def __init__(self, rdclass, rdtype, hit, algorithm, key, servers): + super().__init__(rdclass, rdtype) + self.hit = self._as_bytes(hit, True, 255) + self.algorithm = self._as_uint8(algorithm) + self.key = self._as_bytes(key, True) + self.servers = self._as_tuple(servers, self._as_name) + + def to_text(self, origin=None, relativize=True, **kw): + hit = binascii.hexlify(self.hit).decode() + key = base64.b64encode(self.key).replace(b"\n", b"").decode() + text = "" + servers = [] + for server in self.servers: + servers.append(server.choose_relativity(origin, relativize)) + if len(servers) > 0: + text += " " + " ".join(x.to_unicode() for x in servers) + return f"{self.algorithm} {hit} {key}{text}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + hit = binascii.unhexlify(tok.get_string().encode()) + key = base64.b64decode(tok.get_string().encode()) + servers = [] + for token in tok.get_remaining(): + server = tok.as_name(token, origin, relativize, relativize_to) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + lh = len(self.hit) + lk = len(self.key) + file.write(struct.pack("!BBH", lh, self.algorithm, lk)) + file.write(self.hit) + file.write(self.key) + for server in self.servers: + server.to_wire(file, None, origin, False) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (lh, algorithm, lk) = parser.get_struct("!BBH") + hit = parser.get_bytes(lh) + key = parser.get_bytes(lk) + servers = [] + while parser.remaining() > 0: + server = parser.get_name(origin) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ISDN.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ISDN.py new file mode 100644 index 0000000..6428a0a --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ISDN.py @@ -0,0 +1,78 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class ISDN(dns.rdata.Rdata): + """ISDN record""" + + # see: RFC 1183 + + __slots__ = ["address", "subaddress"] + + def __init__(self, rdclass, rdtype, address, subaddress): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address, True, 255) + self.subaddress = self._as_bytes(subaddress, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + if self.subaddress: + return ( + f'"{dns.rdata._escapify(self.address)}" ' + f'"{dns.rdata._escapify(self.subaddress)}"' + ) + else: + return f'"{dns.rdata._escapify(self.address)}"' + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + tokens = tok.get_remaining(max_tokens=1) + if len(tokens) >= 1: + subaddress = tokens[0].unescape().value + else: + subaddress = "" + return cls(rdclass, rdtype, address, subaddress) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.address) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.address) + l = len(self.subaddress) + if l > 0: + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.subaddress) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_counted_bytes() + if parser.remaining() > 0: + subaddress = parser.get_counted_bytes() + else: + subaddress = b"" + return cls(rdclass, rdtype, address, subaddress) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L32.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L32.py new file mode 100644 index 0000000..f51e5c7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L32.py @@ -0,0 +1,42 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.ipv4 +import dns.rdata + + +@dns.immutable.immutable +class L32(dns.rdata.Rdata): + """L32 record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "locator32"] + + def __init__(self, rdclass, rdtype, preference, locator32): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.locator32 = self._as_ipv4_address(locator32) + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.locator32}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + nodeid = tok.get_identifier() + return cls(rdclass, rdtype, preference, nodeid) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.ipv4.inet_aton(self.locator32)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + locator32 = parser.get_remaining() + return cls(rdclass, rdtype, preference, locator32) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L64.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L64.py new file mode 100644 index 0000000..a47da19 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L64.py @@ -0,0 +1,48 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class L64(dns.rdata.Rdata): + """L64 record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "locator64"] + + def __init__(self, rdclass, rdtype, preference, locator64): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + if isinstance(locator64, bytes): + if len(locator64) != 8: + raise ValueError("invalid locator64") + self.locator64 = dns.rdata._hexify(locator64, 4, b":") + else: + dns.rdtypes.util.parse_formatted_hex(locator64, 4, 4, ":") + self.locator64 = locator64 + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.locator64}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + locator64 = tok.get_identifier() + return cls(rdclass, rdtype, preference, locator64) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.rdtypes.util.parse_formatted_hex(self.locator64, 4, 4, ":")) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + locator64 = parser.get_remaining() + return cls(rdclass, rdtype, preference, locator64) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LOC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LOC.py new file mode 100644 index 0000000..6c7fe5e --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LOC.py @@ -0,0 +1,347 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata + +_pows = tuple(10**i for i in range(0, 11)) + +# default values are in centimeters +_default_size = 100.0 +_default_hprec = 1000000.0 +_default_vprec = 1000.0 + +# for use by from_wire() +_MAX_LATITUDE = 0x80000000 + 90 * 3600000 +_MIN_LATITUDE = 0x80000000 - 90 * 3600000 +_MAX_LONGITUDE = 0x80000000 + 180 * 3600000 +_MIN_LONGITUDE = 0x80000000 - 180 * 3600000 + + +def _exponent_of(what, desc): + if what == 0: + return 0 + exp = None + for i, pow in enumerate(_pows): + if what < pow: + exp = i - 1 + break + if exp is None or exp < 0: + raise dns.exception.SyntaxError(f"{desc} value out of bounds") + return exp + + +def _float_to_tuple(what): + if what < 0: + sign = -1 + what *= -1 + else: + sign = 1 + what = round(what * 3600000) + degrees = int(what // 3600000) + what -= degrees * 3600000 + minutes = int(what // 60000) + what -= minutes * 60000 + seconds = int(what // 1000) + what -= int(seconds * 1000) + what = int(what) + return (degrees, minutes, seconds, what, sign) + + +def _tuple_to_float(what): + value = float(what[0]) + value += float(what[1]) / 60.0 + value += float(what[2]) / 3600.0 + value += float(what[3]) / 3600000.0 + return float(what[4]) * value + + +def _encode_size(what, desc): + what = int(what) + exponent = _exponent_of(what, desc) & 0xF + base = what // pow(10, exponent) & 0xF + return base * 16 + exponent + + +def _decode_size(what, desc): + exponent = what & 0x0F + if exponent > 9: + raise dns.exception.FormError(f"bad {desc} exponent") + base = (what & 0xF0) >> 4 + if base > 9: + raise dns.exception.FormError(f"bad {desc} base") + return base * pow(10, exponent) + + +def _check_coordinate_list(value, low, high): + if value[0] < low or value[0] > high: + raise ValueError(f"not in range [{low}, {high}]") + if value[1] < 0 or value[1] > 59: + raise ValueError("bad minutes value") + if value[2] < 0 or value[2] > 59: + raise ValueError("bad seconds value") + if value[3] < 0 or value[3] > 999: + raise ValueError("bad milliseconds value") + if value[4] != 1 and value[4] != -1: + raise ValueError("bad hemisphere value") + + +@dns.immutable.immutable +class LOC(dns.rdata.Rdata): + """LOC record""" + + # see: RFC 1876 + + __slots__ = [ + "latitude", + "longitude", + "altitude", + "size", + "horizontal_precision", + "vertical_precision", + ] + + def __init__( + self, + rdclass, + rdtype, + latitude, + longitude, + altitude, + size=_default_size, + hprec=_default_hprec, + vprec=_default_vprec, + ): + """Initialize a LOC record instance. + + The parameters I{latitude} and I{longitude} may be either a 4-tuple + of integers specifying (degrees, minutes, seconds, milliseconds), + or they may be floating point values specifying the number of + degrees. The other parameters are floats. Size, horizontal precision, + and vertical precision are specified in centimeters.""" + + super().__init__(rdclass, rdtype) + if isinstance(latitude, int): + latitude = float(latitude) + if isinstance(latitude, float): + latitude = _float_to_tuple(latitude) + _check_coordinate_list(latitude, -90, 90) + self.latitude = tuple(latitude) # pyright: ignore + if isinstance(longitude, int): + longitude = float(longitude) + if isinstance(longitude, float): + longitude = _float_to_tuple(longitude) + _check_coordinate_list(longitude, -180, 180) + self.longitude = tuple(longitude) # pyright: ignore + self.altitude = float(altitude) + self.size = float(size) + self.horizontal_precision = float(hprec) + self.vertical_precision = float(vprec) + + def to_text(self, origin=None, relativize=True, **kw): + if self.latitude[4] > 0: + lat_hemisphere = "N" + else: + lat_hemisphere = "S" + if self.longitude[4] > 0: + long_hemisphere = "E" + else: + long_hemisphere = "W" + text = ( + f"{self.latitude[0]} {self.latitude[1]} " + f"{self.latitude[2]}.{self.latitude[3]:03d} {lat_hemisphere} " + f"{self.longitude[0]} {self.longitude[1]} " + f"{self.longitude[2]}.{self.longitude[3]:03d} {long_hemisphere} " + f"{(self.altitude / 100.0):0.2f}m" + ) + + # do not print default values + if ( + self.size != _default_size + or self.horizontal_precision != _default_hprec + or self.vertical_precision != _default_vprec + ): + text += ( + f" {self.size / 100.0:0.2f}m {self.horizontal_precision / 100.0:0.2f}m" + f" {self.vertical_precision / 100.0:0.2f}m" + ) + return text + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + latitude = [0, 0, 0, 0, 1] + longitude = [0, 0, 0, 0, 1] + size = _default_size + hprec = _default_hprec + vprec = _default_vprec + + latitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + latitude[1] = int(t) + t = tok.get_string() + if "." in t: + (seconds, milliseconds) = t.split(".") + if not seconds.isdigit(): + raise dns.exception.SyntaxError("bad latitude seconds value") + latitude[2] = int(seconds) + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError("bad latitude milliseconds value") + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + latitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + latitude[2] = int(t) + t = tok.get_string() + if t == "S": + latitude[4] = -1 + elif t != "N": + raise dns.exception.SyntaxError("bad latitude hemisphere value") + + longitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + longitude[1] = int(t) + t = tok.get_string() + if "." in t: + (seconds, milliseconds) = t.split(".") + if not seconds.isdigit(): + raise dns.exception.SyntaxError("bad longitude seconds value") + longitude[2] = int(seconds) + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError("bad longitude milliseconds value") + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + longitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + longitude[2] = int(t) + t = tok.get_string() + if t == "W": + longitude[4] = -1 + elif t != "E": + raise dns.exception.SyntaxError("bad longitude hemisphere value") + + t = tok.get_string() + if t[-1] == "m": + t = t[0:-1] + altitude = float(t) * 100.0 # m -> cm + + tokens = tok.get_remaining(max_tokens=3) + if len(tokens) >= 1: + value = tokens[0].unescape().value + if value[-1] == "m": + value = value[0:-1] + size = float(value) * 100.0 # m -> cm + if len(tokens) >= 2: + value = tokens[1].unescape().value + if value[-1] == "m": + value = value[0:-1] + hprec = float(value) * 100.0 # m -> cm + if len(tokens) >= 3: + value = tokens[2].unescape().value + if value[-1] == "m": + value = value[0:-1] + vprec = float(value) * 100.0 # m -> cm + + # Try encoding these now so we raise if they are bad + _encode_size(size, "size") + _encode_size(hprec, "horizontal precision") + _encode_size(vprec, "vertical precision") + + return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + milliseconds = ( + self.latitude[0] * 3600000 + + self.latitude[1] * 60000 + + self.latitude[2] * 1000 + + self.latitude[3] + ) * self.latitude[4] + latitude = 0x80000000 + milliseconds + milliseconds = ( + self.longitude[0] * 3600000 + + self.longitude[1] * 60000 + + self.longitude[2] * 1000 + + self.longitude[3] + ) * self.longitude[4] + longitude = 0x80000000 + milliseconds + altitude = int(self.altitude) + 10000000 + size = _encode_size(self.size, "size") + hprec = _encode_size(self.horizontal_precision, "horizontal precision") + vprec = _encode_size(self.vertical_precision, "vertical precision") + wire = struct.pack( + "!BBBBIII", 0, size, hprec, vprec, latitude, longitude, altitude + ) + file.write(wire) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + ( + version, + size, + hprec, + vprec, + latitude, + longitude, + altitude, + ) = parser.get_struct("!BBBBIII") + if version != 0: + raise dns.exception.FormError("LOC version not zero") + if latitude < _MIN_LATITUDE or latitude > _MAX_LATITUDE: + raise dns.exception.FormError("bad latitude") + if latitude > 0x80000000: + latitude = (latitude - 0x80000000) / 3600000 + else: + latitude = -1 * (0x80000000 - latitude) / 3600000 + if longitude < _MIN_LONGITUDE or longitude > _MAX_LONGITUDE: + raise dns.exception.FormError("bad longitude") + if longitude > 0x80000000: + longitude = (longitude - 0x80000000) / 3600000 + else: + longitude = -1 * (0x80000000 - longitude) / 3600000 + altitude = float(altitude) - 10000000.0 + size = _decode_size(size, "size") + hprec = _decode_size(hprec, "horizontal precision") + vprec = _decode_size(vprec, "vertical precision") + return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) + + @property + def float_latitude(self): + "latitude as a floating point value" + return _tuple_to_float(self.latitude) + + @property + def float_longitude(self): + "longitude as a floating point value" + return _tuple_to_float(self.longitude) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LP.py new file mode 100644 index 0000000..379c862 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LP.py @@ -0,0 +1,42 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class LP(dns.rdata.Rdata): + """LP record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "fqdn"] + + def __init__(self, rdclass, rdtype, preference, fqdn): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.fqdn = self._as_name(fqdn) + + def to_text(self, origin=None, relativize=True, **kw): + fqdn = self.fqdn.choose_relativity(origin, relativize) + return f"{self.preference} {fqdn}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + fqdn = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, fqdn) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + self.fqdn.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + fqdn = parser.get_name(origin) + return cls(rdclass, rdtype, preference, fqdn) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/MX.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/MX.py new file mode 100644 index 0000000..0c300c5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/MX.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class MX(dns.rdtypes.mxbase.MXBase): + """MX record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NID.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NID.py new file mode 100644 index 0000000..fa0dad5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NID.py @@ -0,0 +1,48 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class NID(dns.rdata.Rdata): + """NID record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "nodeid"] + + def __init__(self, rdclass, rdtype, preference, nodeid): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + if isinstance(nodeid, bytes): + if len(nodeid) != 8: + raise ValueError("invalid nodeid") + self.nodeid = dns.rdata._hexify(nodeid, 4, b":") + else: + dns.rdtypes.util.parse_formatted_hex(nodeid, 4, 4, ":") + self.nodeid = nodeid + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.nodeid}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + nodeid = tok.get_identifier() + return cls(rdclass, rdtype, preference, nodeid) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.rdtypes.util.parse_formatted_hex(self.nodeid, 4, 4, ":")) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + nodeid = parser.get_remaining() + return cls(rdclass, rdtype, preference, nodeid) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NINFO.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NINFO.py new file mode 100644 index 0000000..b177bdd --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NINFO.py @@ -0,0 +1,26 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class NINFO(dns.rdtypes.txtbase.TXTBase): + """NINFO record""" + + # see: draft-reid-dnsext-zs-01 diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NS.py new file mode 100644 index 0000000..c3f34ce --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NS.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class NS(dns.rdtypes.nsbase.NSBase): + """NS record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC.py new file mode 100644 index 0000000..3c78b72 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC.py @@ -0,0 +1,67 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "NSEC" + + +@dns.immutable.immutable +class NSEC(dns.rdata.Rdata): + """NSEC record""" + + __slots__ = ["next", "windows"] + + def __init__(self, rdclass, rdtype, next, windows): + super().__init__(rdclass, rdtype) + self.next = self._as_name(next) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + next = self.next.choose_relativity(origin, relativize) + text = Bitmap(self.windows).to_text() + return f"{next}{text}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + next = tok.get_name(origin, relativize, relativize_to) + windows = Bitmap.from_text(tok) + return cls(rdclass, rdtype, next, windows) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + # Note that NSEC downcasing, originally mandated by RFC 4034 + # section 6.2 was removed by RFC 6840 section 5.1. + self.next.to_wire(file, None, origin, False) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + next = parser.get_name(origin) + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, next, bitmap) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3.py new file mode 100644 index 0000000..6899418 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3.py @@ -0,0 +1,120 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + +b32_hex_to_normal = bytes.maketrans( + b"0123456789ABCDEFGHIJKLMNOPQRSTUV", b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" +) +b32_normal_to_hex = bytes.maketrans( + b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", b"0123456789ABCDEFGHIJKLMNOPQRSTUV" +) + +# hash algorithm constants +SHA1 = 1 + +# flag constants +OPTOUT = 1 + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "NSEC3" + + +@dns.immutable.immutable +class NSEC3(dns.rdata.Rdata): + """NSEC3 record""" + + __slots__ = ["algorithm", "flags", "iterations", "salt", "next", "windows"] + + def __init__( + self, rdclass, rdtype, algorithm, flags, iterations, salt, next, windows + ): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.flags = self._as_uint8(flags) + self.iterations = self._as_uint16(iterations) + self.salt = self._as_bytes(salt, True, 255) + self.next = self._as_bytes(next, True, 255) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def _next_text(self): + next = base64.b32encode(self.next).translate(b32_normal_to_hex).lower().decode() + next = next.rstrip("=") + return next + + def to_text(self, origin=None, relativize=True, **kw): + next = self._next_text() + if self.salt == b"": + salt = "-" + else: + salt = binascii.hexlify(self.salt).decode() + text = Bitmap(self.windows).to_text() + return f"{self.algorithm} {self.flags} {self.iterations} {salt} {next}{text}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == "-": + salt = b"" + else: + salt = binascii.unhexlify(salt.encode("ascii")) + next = tok.get_string().encode("ascii").upper().translate(b32_hex_to_normal) + if next.endswith(b"="): + raise binascii.Error("Incorrect padding") + if len(next) % 8 != 0: + next += b"=" * (8 - len(next) % 8) + next = base64.b32decode(next) + bitmap = Bitmap.from_text(tok) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) + file.write(self.salt) + l = len(self.next) + file.write(struct.pack("!B", l)) + file.write(self.next) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (algorithm, flags, iterations) = parser.get_struct("!BBH") + salt = parser.get_counted_bytes() + next = parser.get_counted_bytes() + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) + + def next_name(self, origin=None): + return dns.name.from_text(self._next_text(), origin) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py new file mode 100644 index 0000000..e867872 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class NSEC3PARAM(dns.rdata.Rdata): + """NSEC3PARAM record""" + + __slots__ = ["algorithm", "flags", "iterations", "salt"] + + def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.flags = self._as_uint8(flags) + self.iterations = self._as_uint16(iterations) + self.salt = self._as_bytes(salt, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + if self.salt == b"": + salt = "-" + else: + salt = binascii.hexlify(self.salt).decode() + return f"{self.algorithm} {self.flags} {self.iterations} {salt}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == "-": + salt = "" + else: + salt = binascii.unhexlify(salt.encode()) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) + file.write(self.salt) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (algorithm, flags, iterations) = parser.get_struct("!BBH") + salt = parser.get_counted_bytes() + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py new file mode 100644 index 0000000..ac1841c --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py @@ -0,0 +1,53 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class OPENPGPKEY(dns.rdata.Rdata): + """OPENPGPKEY record""" + + # see: RFC 7929 + + def __init__(self, rdclass, rdtype, key): + super().__init__(rdclass, rdtype) + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.key, chunksize=None, **kw) # pyright: ignore + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + key = parser.get_remaining() + return cls(rdclass, rdtype, key) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPT.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPT.py new file mode 100644 index 0000000..d343dfa --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPT.py @@ -0,0 +1,77 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.edns +import dns.exception +import dns.immutable +import dns.rdata + +# We don't implement from_text, and that's ok. +# pylint: disable=abstract-method + + +@dns.immutable.immutable +class OPT(dns.rdata.Rdata): + """OPT record""" + + __slots__ = ["options"] + + def __init__(self, rdclass, rdtype, options): + """Initialize an OPT rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata, + which is also the payload size. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *options*, a tuple of ``bytes`` + """ + + super().__init__(rdclass, rdtype) + + def as_option(option): + if not isinstance(option, dns.edns.Option): + raise ValueError("option is not a dns.edns.option") + return option + + self.options = self._as_tuple(options, as_option) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for opt in self.options: + owire = opt.to_wire() + file.write(struct.pack("!HH", opt.otype, len(owire))) + file.write(owire) + + def to_text(self, origin=None, relativize=True, **kw): + return " ".join(opt.to_text() for opt in self.options) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + options = [] + while parser.remaining() > 0: + (otype, olen) = parser.get_struct("!HH") + with parser.restrict_to(olen): + opt = dns.edns.option_from_wire_parser(otype, parser) + options.append(opt) + return cls(rdclass, rdtype, options) + + @property + def payload(self): + "payload size" + return self.rdclass diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/PTR.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/PTR.py new file mode 100644 index 0000000..98c3616 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/PTR.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class PTR(dns.rdtypes.nsbase.NSBase): + """PTR record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RESINFO.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RESINFO.py new file mode 100644 index 0000000..76c8ea2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RESINFO.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class RESINFO(dns.rdtypes.txtbase.TXTBase): + """RESINFO record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RP.py new file mode 100644 index 0000000..a66cfc5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RP.py @@ -0,0 +1,58 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class RP(dns.rdata.Rdata): + """RP record""" + + # see: RFC 1183 + + __slots__ = ["mbox", "txt"] + + def __init__(self, rdclass, rdtype, mbox, txt): + super().__init__(rdclass, rdtype) + self.mbox = self._as_name(mbox) + self.txt = self._as_name(txt) + + def to_text(self, origin=None, relativize=True, **kw): + mbox = self.mbox.choose_relativity(origin, relativize) + txt = self.txt.choose_relativity(origin, relativize) + return f"{str(mbox)} {str(txt)}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + mbox = tok.get_name(origin, relativize, relativize_to) + txt = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, mbox, txt) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.mbox.to_wire(file, None, origin, canonicalize) + self.txt.to_wire(file, None, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + mbox = parser.get_name(origin) + txt = parser.get_name(origin) + return cls(rdclass, rdtype, mbox, txt) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RRSIG.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RRSIG.py new file mode 100644 index 0000000..5556cba --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RRSIG.py @@ -0,0 +1,155 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import calendar +import struct +import time + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype + + +class BadSigTime(dns.exception.DNSException): + """Time in DNS SIG or RRSIG resource record cannot be parsed.""" + + +def sigtime_to_posixtime(what): + if len(what) <= 10 and what.isdigit(): + return int(what) + if len(what) != 14: + raise BadSigTime + year = int(what[0:4]) + month = int(what[4:6]) + day = int(what[6:8]) + hour = int(what[8:10]) + minute = int(what[10:12]) + second = int(what[12:14]) + return calendar.timegm((year, month, day, hour, minute, second, 0, 0, 0)) + + +def posixtime_to_sigtime(what): + return time.strftime("%Y%m%d%H%M%S", time.gmtime(what)) + + +@dns.immutable.immutable +class RRSIG(dns.rdata.Rdata): + """RRSIG record""" + + __slots__ = [ + "type_covered", + "algorithm", + "labels", + "original_ttl", + "expiration", + "inception", + "key_tag", + "signer", + "signature", + ] + + def __init__( + self, + rdclass, + rdtype, + type_covered, + algorithm, + labels, + original_ttl, + expiration, + inception, + key_tag, + signer, + signature, + ): + super().__init__(rdclass, rdtype) + self.type_covered = self._as_rdatatype(type_covered) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.labels = self._as_uint8(labels) + self.original_ttl = self._as_ttl(original_ttl) + self.expiration = self._as_uint32(expiration) + self.inception = self._as_uint32(inception) + self.key_tag = self._as_uint16(key_tag) + self.signer = self._as_name(signer) + self.signature = self._as_bytes(signature) + + def covers(self): + return self.type_covered + + def to_text(self, origin=None, relativize=True, **kw): + return ( + f"{dns.rdatatype.to_text(self.type_covered)} " + f"{self.algorithm} {self.labels} {self.original_ttl} " + f"{posixtime_to_sigtime(self.expiration)} " + f"{posixtime_to_sigtime(self.inception)} " + f"{self.key_tag} " + f"{self.signer.choose_relativity(origin, relativize)} " + f"{dns.rdata._base64ify(self.signature, **kw)}" # pyright: ignore + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + type_covered = dns.rdatatype.from_text(tok.get_string()) + algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) + labels = tok.get_int() + original_ttl = tok.get_ttl() + expiration = sigtime_to_posixtime(tok.get_string()) + inception = sigtime_to_posixtime(tok.get_string()) + key_tag = tok.get_int() + signer = tok.get_name(origin, relativize, relativize_to) + b64 = tok.concatenate_remaining_identifiers().encode() + signature = base64.b64decode(b64) + return cls( + rdclass, + rdtype, + type_covered, + algorithm, + labels, + original_ttl, + expiration, + inception, + key_tag, + signer, + signature, + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack( + "!HBBIIIH", + self.type_covered, + self.algorithm, + self.labels, + self.original_ttl, + self.expiration, + self.inception, + self.key_tag, + ) + file.write(header) + self.signer.to_wire(file, None, origin, canonicalize) + file.write(self.signature) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBBIIIH") + signer = parser.get_name(origin) + signature = parser.get_remaining() + return cls(rdclass, rdtype, *header, signer, signature) # pyright: ignore diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RT.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RT.py new file mode 100644 index 0000000..5a4d45c --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RT.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX): + """RT record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SMIMEA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SMIMEA.py new file mode 100644 index 0000000..55d87bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SMIMEA.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.tlsabase + + +@dns.immutable.immutable +class SMIMEA(dns.rdtypes.tlsabase.TLSABase): + """SMIMEA record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SOA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SOA.py new file mode 100644 index 0000000..3c7cd8c --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SOA.py @@ -0,0 +1,78 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class SOA(dns.rdata.Rdata): + """SOA record""" + + # see: RFC 1035 + + __slots__ = ["mname", "rname", "serial", "refresh", "retry", "expire", "minimum"] + + def __init__( + self, rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum + ): + super().__init__(rdclass, rdtype) + self.mname = self._as_name(mname) + self.rname = self._as_name(rname) + self.serial = self._as_uint32(serial) + self.refresh = self._as_ttl(refresh) + self.retry = self._as_ttl(retry) + self.expire = self._as_ttl(expire) + self.minimum = self._as_ttl(minimum) + + def to_text(self, origin=None, relativize=True, **kw): + mname = self.mname.choose_relativity(origin, relativize) + rname = self.rname.choose_relativity(origin, relativize) + return f"{mname} {rname} {self.serial} {self.refresh} {self.retry} {self.expire} {self.minimum}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + mname = tok.get_name(origin, relativize, relativize_to) + rname = tok.get_name(origin, relativize, relativize_to) + serial = tok.get_uint32() + refresh = tok.get_ttl() + retry = tok.get_ttl() + expire = tok.get_ttl() + minimum = tok.get_ttl() + return cls( + rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.mname.to_wire(file, compress, origin, canonicalize) + self.rname.to_wire(file, compress, origin, canonicalize) + five_ints = struct.pack( + "!IIIII", self.serial, self.refresh, self.retry, self.expire, self.minimum + ) + file.write(five_ints) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + mname = parser.get_name(origin) + rname = parser.get_name(origin) + return cls(rdclass, rdtype, mname, rname, *parser.get_struct("!IIIII")) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SPF.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SPF.py new file mode 100644 index 0000000..1df3b70 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SPF.py @@ -0,0 +1,26 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class SPF(dns.rdtypes.txtbase.TXTBase): + """SPF record""" + + # see: RFC 4408 diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SSHFP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SSHFP.py new file mode 100644 index 0000000..3f08f3a --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SSHFP.py @@ -0,0 +1,67 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class SSHFP(dns.rdata.Rdata): + """SSHFP record""" + + # See RFC 4255 + + __slots__ = ["algorithm", "fp_type", "fingerprint"] + + def __init__(self, rdclass, rdtype, algorithm, fp_type, fingerprint): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.fp_type = self._as_uint8(fp_type) + self.fingerprint = self._as_bytes(fingerprint, True) + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + fingerprint = dns.rdata._hexify( + self.fingerprint, chunksize=chunksize, **kw # pyright: ignore + ) + return f"{self.algorithm} {self.fp_type} {fingerprint}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + fp_type = tok.get_uint8() + fingerprint = tok.concatenate_remaining_identifiers().encode() + fingerprint = binascii.unhexlify(fingerprint) + return cls(rdclass, rdtype, algorithm, fp_type, fingerprint) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BB", self.algorithm, self.fp_type) + file.write(header) + file.write(self.fingerprint) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("BB") + fingerprint = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], fingerprint) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TKEY.py new file mode 100644 index 0000000..f9189b1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TKEY.py @@ -0,0 +1,135 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class TKEY(dns.rdata.Rdata): + """TKEY Record""" + + __slots__ = [ + "algorithm", + "inception", + "expiration", + "mode", + "error", + "key", + "other", + ] + + def __init__( + self, + rdclass, + rdtype, + algorithm, + inception, + expiration, + mode, + error, + key, + other=b"", + ): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_name(algorithm) + self.inception = self._as_uint32(inception) + self.expiration = self._as_uint32(expiration) + self.mode = self._as_uint16(mode) + self.error = self._as_uint16(error) + self.key = self._as_bytes(key) + self.other = self._as_bytes(other) + + def to_text(self, origin=None, relativize=True, **kw): + _algorithm = self.algorithm.choose_relativity(origin, relativize) + key = dns.rdata._base64ify(self.key, 0) + other = "" + if len(self.other) > 0: + other = " " + dns.rdata._base64ify(self.other, 0) + return f"{_algorithm} {self.inception} {self.expiration} {self.mode} {self.error} {key}{other}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_name(relativize=False) + inception = tok.get_uint32() + expiration = tok.get_uint32() + mode = tok.get_uint16() + error = tok.get_uint16() + key_b64 = tok.get_string().encode() + key = base64.b64decode(key_b64) + other_b64 = tok.concatenate_remaining_identifiers(True).encode() + other = base64.b64decode(other_b64) + + return cls( + rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.algorithm.to_wire(file, compress, origin) + file.write( + struct.pack("!IIHH", self.inception, self.expiration, self.mode, self.error) + ) + file.write(struct.pack("!H", len(self.key))) + file.write(self.key) + file.write(struct.pack("!H", len(self.other))) + if len(self.other) > 0: + file.write(self.other) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + algorithm = parser.get_name(origin) + inception, expiration, mode, error = parser.get_struct("!IIHH") + key = parser.get_counted_bytes(2) + other = parser.get_counted_bytes(2) + + return cls( + rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other + ) + + # Constants for the mode field - from RFC 2930: + # 2.5 The Mode Field + # + # The mode field specifies the general scheme for key agreement or + # the purpose of the TKEY DNS message. Servers and resolvers + # supporting this specification MUST implement the Diffie-Hellman key + # agreement mode and the key deletion mode for queries. All other + # modes are OPTIONAL. A server supporting TKEY that receives a TKEY + # request with a mode it does not support returns the BADMODE error. + # The following values of the Mode octet are defined, available, or + # reserved: + # + # Value Description + # ----- ----------- + # 0 - reserved, see section 7 + # 1 server assignment + # 2 Diffie-Hellman exchange + # 3 GSS-API negotiation + # 4 resolver assignment + # 5 key deletion + # 6-65534 - available, see section 7 + # 65535 - reserved, see section 7 + SERVER_ASSIGNMENT = 1 + DIFFIE_HELLMAN_EXCHANGE = 2 + GSSAPI_NEGOTIATION = 3 + RESOLVER_ASSIGNMENT = 4 + KEY_DELETION = 5 diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TLSA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TLSA.py new file mode 100644 index 0000000..4dffc55 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TLSA.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.tlsabase + + +@dns.immutable.immutable +class TLSA(dns.rdtypes.tlsabase.TLSABase): + """TLSA record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TSIG.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TSIG.py new file mode 100644 index 0000000..7942382 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TSIG.py @@ -0,0 +1,160 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rcode +import dns.rdata + + +@dns.immutable.immutable +class TSIG(dns.rdata.Rdata): + """TSIG record""" + + __slots__ = [ + "algorithm", + "time_signed", + "fudge", + "mac", + "original_id", + "error", + "other", + ] + + def __init__( + self, + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ): + """Initialize a TSIG rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *algorithm*, a ``dns.name.Name``. + + *time_signed*, an ``int``. + + *fudge*, an ``int`. + + *mac*, a ``bytes`` + + *original_id*, an ``int`` + + *error*, an ``int`` + + *other*, a ``bytes`` + """ + + super().__init__(rdclass, rdtype) + self.algorithm = self._as_name(algorithm) + self.time_signed = self._as_uint48(time_signed) + self.fudge = self._as_uint16(fudge) + self.mac = self._as_bytes(mac) + self.original_id = self._as_uint16(original_id) + self.error = dns.rcode.Rcode.make(error) + self.other = self._as_bytes(other) + + def to_text(self, origin=None, relativize=True, **kw): + algorithm = self.algorithm.choose_relativity(origin, relativize) + error = dns.rcode.to_text(self.error, True) + text = ( + f"{algorithm} {self.time_signed} {self.fudge} " + + f"{len(self.mac)} {dns.rdata._base64ify(self.mac, 0)} " + + f"{self.original_id} {error} {len(self.other)}" + ) + if self.other: + text += f" {dns.rdata._base64ify(self.other, 0)}" + return text + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_name(relativize=False) + time_signed = tok.get_uint48() + fudge = tok.get_uint16() + mac_len = tok.get_uint16() + mac = base64.b64decode(tok.get_string()) + if len(mac) != mac_len: + raise SyntaxError("invalid MAC") + original_id = tok.get_uint16() + error = dns.rcode.from_text(tok.get_string()) + other_len = tok.get_uint16() + if other_len > 0: + other = base64.b64decode(tok.get_string()) + if len(other) != other_len: + raise SyntaxError("invalid other data") + else: + other = b"" + return cls( + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.algorithm.to_wire(file, None, origin, False) + file.write( + struct.pack( + "!HIHH", + (self.time_signed >> 32) & 0xFFFF, + self.time_signed & 0xFFFFFFFF, + self.fudge, + len(self.mac), + ) + ) + file.write(self.mac) + file.write(struct.pack("!HHH", self.original_id, self.error, len(self.other))) + file.write(self.other) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + algorithm = parser.get_name() + time_signed = parser.get_uint48() + fudge = parser.get_uint16() + mac = parser.get_counted_bytes(2) + (original_id, error) = parser.get_struct("!HH") + other = parser.get_counted_bytes(2) + return cls( + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TXT.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TXT.py new file mode 100644 index 0000000..6d4dae2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TXT.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class TXT(dns.rdtypes.txtbase.TXTBase): + """TXT record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/URI.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/URI.py new file mode 100644 index 0000000..021391d --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/URI.py @@ -0,0 +1,79 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# Copyright (C) 2015 Red Hat, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class URI(dns.rdata.Rdata): + """URI record""" + + # see RFC 7553 + + __slots__ = ["priority", "weight", "target"] + + def __init__(self, rdclass, rdtype, priority, weight, target): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.weight = self._as_uint16(weight) + self.target = self._as_bytes(target, True) + if len(self.target) == 0: + raise dns.exception.SyntaxError("URI target cannot be empty") + + def to_text(self, origin=None, relativize=True, **kw): + return f'{self.priority} {self.weight} "{self.target.decode()}"' + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + weight = tok.get_uint16() + target = tok.get().unescape() + if not (target.is_quoted_string() or target.is_identifier()): + raise dns.exception.SyntaxError("URI target must be a string") + return cls(rdclass, rdtype, priority, weight, target.value) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + two_ints = struct.pack("!HH", self.priority, self.weight) + file.write(two_ints) + file.write(self.target) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (priority, weight) = parser.get_struct("!HH") + target = parser.get_remaining() + if len(target) == 0: + raise dns.exception.FormError("URI target may not be empty") + return cls(rdclass, rdtype, priority, weight, target) + + def _processing_priority(self): + return self.priority + + def _processing_weight(self): + return self.weight + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/WALLET.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/WALLET.py new file mode 100644 index 0000000..ff46476 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/WALLET.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class WALLET(dns.rdtypes.txtbase.TXTBase): + """WALLET record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/X25.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/X25.py new file mode 100644 index 0000000..2436ddb --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/X25.py @@ -0,0 +1,57 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class X25(dns.rdata.Rdata): + """X25 record""" + + # see RFC 1183 + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + return f'"{dns.rdata._escapify(self.address)}"' + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.address) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.address) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_counted_bytes() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ZONEMD.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ZONEMD.py new file mode 100644 index 0000000..acef4f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ZONEMD.py @@ -0,0 +1,64 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.zonetypes + + +@dns.immutable.immutable +class ZONEMD(dns.rdata.Rdata): + """ZONEMD record""" + + # See RFC 8976 + + __slots__ = ["serial", "scheme", "hash_algorithm", "digest"] + + def __init__(self, rdclass, rdtype, serial, scheme, hash_algorithm, digest): + super().__init__(rdclass, rdtype) + self.serial = self._as_uint32(serial) + self.scheme = dns.zonetypes.DigestScheme.make(scheme) + self.hash_algorithm = dns.zonetypes.DigestHashAlgorithm.make(hash_algorithm) + self.digest = self._as_bytes(digest) + + if self.scheme == 0: # reserved, RFC 8976 Sec. 5.2 + raise ValueError("scheme 0 is reserved") + if self.hash_algorithm == 0: # reserved, RFC 8976 Sec. 5.3 + raise ValueError("hash_algorithm 0 is reserved") + + hasher = dns.zonetypes._digest_hashers.get(self.hash_algorithm) + if hasher and hasher().digest_size != len(self.digest): + raise ValueError("digest length inconsistent with hash algorithm") + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + digest = dns.rdata._hexify( + self.digest, chunksize=chunksize, **kw # pyright: ignore + ) + return f"{self.serial} {self.scheme} {self.hash_algorithm} {digest}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + serial = tok.get_uint32() + scheme = tok.get_uint8() + hash_algorithm = tok.get_uint8() + digest = tok.concatenate_remaining_identifiers().encode() + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, serial, scheme, hash_algorithm, digest) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!IBB", self.serial, self.scheme, self.hash_algorithm) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!IBB") + digest = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__init__.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__init__.py new file mode 100644 index 0000000..cc39f86 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__init__.py @@ -0,0 +1,71 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class ANY (generic) rdata type classes.""" + +__all__ = [ + "AFSDB", + "AMTRELAY", + "AVC", + "CAA", + "CDNSKEY", + "CDS", + "CERT", + "CNAME", + "CSYNC", + "DLV", + "DNAME", + "DNSKEY", + "DS", + "DSYNC", + "EUI48", + "EUI64", + "GPOS", + "HINFO", + "HIP", + "ISDN", + "L32", + "L64", + "LOC", + "LP", + "MX", + "NID", + "NINFO", + "NS", + "NSEC", + "NSEC3", + "NSEC3PARAM", + "OPENPGPKEY", + "OPT", + "PTR", + "RESINFO", + "RP", + "RRSIG", + "RT", + "SMIMEA", + "SOA", + "SPF", + "SSHFP", + "TKEY", + "TLSA", + "TSIG", + "TXT", + "URI", + "WALLET", + "X25", + "ZONEMD", +] diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-312.pyc new file mode 100644 index 0000000..d20bc19 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-312.pyc new file mode 100644 index 0000000..1609fae Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-312.pyc new file mode 100644 index 0000000..69dbd83 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-312.pyc new file mode 100644 index 0000000..4a5b61a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-312.pyc new file mode 100644 index 0000000..73a96e6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-312.pyc new file mode 100644 index 0000000..cb152a9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-312.pyc new file mode 100644 index 0000000..12c0b66 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-312.pyc new file mode 100644 index 0000000..02fe27b Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-312.pyc new file mode 100644 index 0000000..da1fc47 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-312.pyc new file mode 100644 index 0000000..673861c Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-312.pyc new file mode 100644 index 0000000..5f29e2f Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-312.pyc new file mode 100644 index 0000000..16a39c3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-312.pyc new file mode 100644 index 0000000..1a78b4e Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DSYNC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DSYNC.cpython-312.pyc new file mode 100644 index 0000000..f7265d6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DSYNC.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-312.pyc new file mode 100644 index 0000000..f348fa6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-312.pyc new file mode 100644 index 0000000..a63e7df Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-312.pyc new file mode 100644 index 0000000..39ba564 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-312.pyc new file mode 100644 index 0000000..3766c87 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-312.pyc new file mode 100644 index 0000000..a3a3f1f Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-312.pyc new file mode 100644 index 0000000..cb7f1ef Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-312.pyc new file mode 100644 index 0000000..11137c9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-312.pyc new file mode 100644 index 0000000..36ef491 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-312.pyc new file mode 100644 index 0000000..1985e8f Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-312.pyc new file mode 100644 index 0000000..7909bd0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-312.pyc new file mode 100644 index 0000000..51d4ccd Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-312.pyc new file mode 100644 index 0000000..10c7cf3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-312.pyc new file mode 100644 index 0000000..86aabec Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-312.pyc new file mode 100644 index 0000000..30f4156 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-312.pyc new file mode 100644 index 0000000..bf15303 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-312.pyc new file mode 100644 index 0000000..5d662f5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-312.pyc new file mode 100644 index 0000000..20f6c4a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-312.pyc new file mode 100644 index 0000000..f290afc Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-312.pyc new file mode 100644 index 0000000..321ad3c Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-312.pyc new file mode 100644 index 0000000..d516d4a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-312.pyc new file mode 100644 index 0000000..57b19cb Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-312.pyc new file mode 100644 index 0000000..7904202 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-312.pyc new file mode 100644 index 0000000..46a687e Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-312.pyc new file mode 100644 index 0000000..e8fedd8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-312.pyc new file mode 100644 index 0000000..d04cd0b Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-312.pyc new file mode 100644 index 0000000..591c724 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-312.pyc new file mode 100644 index 0000000..234bd4e Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-312.pyc new file mode 100644 index 0000000..c2a7b05 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-312.pyc new file mode 100644 index 0000000..401b29b Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-312.pyc new file mode 100644 index 0000000..b26af7d Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-312.pyc new file mode 100644 index 0000000..fe522b5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-312.pyc new file mode 100644 index 0000000..65150ed Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-312.pyc new file mode 100644 index 0000000..4cd28ef Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-312.pyc new file mode 100644 index 0000000..31b4a27 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-312.pyc new file mode 100644 index 0000000..22fc6ed Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-312.pyc new file mode 100644 index 0000000..079bd04 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4931ffa Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/A.py b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/A.py new file mode 100644 index 0000000..e3e0752 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/A.py @@ -0,0 +1,60 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.immutable +import dns.rdata +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class A(dns.rdata.Rdata): + """A record for Chaosnet""" + + # domain: the domain of the address + # address: the 16-bit address + + __slots__ = ["domain", "address"] + + def __init__(self, rdclass, rdtype, domain, address): + super().__init__(rdclass, rdtype) + self.domain = self._as_name(domain) + self.address = self._as_uint16(address) + + def to_text(self, origin=None, relativize=True, **kw): + domain = self.domain.choose_relativity(origin, relativize) + return f"{domain} {self.address:o}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + domain = tok.get_name(origin, relativize, relativize_to) + address = tok.get_uint16(base=8) + return cls(rdclass, rdtype, domain, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.domain.to_wire(file, compress, origin, canonicalize) + pref = struct.pack("!H", self.address) + file.write(pref) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + domain = parser.get_name(origin) + address = parser.get_uint16() + return cls(rdclass, rdtype, domain, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__init__.py b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__init__.py new file mode 100644 index 0000000..0760c26 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__init__.py @@ -0,0 +1,22 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class CH rdata type classes.""" + +__all__ = [ + "A", +] diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-312.pyc new file mode 100644 index 0000000..66c7e59 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b7e8bab Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/A.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/A.py new file mode 100644 index 0000000..e09d611 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/A.py @@ -0,0 +1,51 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class A(dns.rdata.Rdata): + """A record.""" + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv4_address(address) + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_identifier() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv4.inet_aton(self.address)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/AAAA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/AAAA.py new file mode 100644 index 0000000..0cd139e --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/AAAA.py @@ -0,0 +1,51 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.ipv6 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class AAAA(dns.rdata.Rdata): + """AAAA record.""" + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv6_address(address) + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_identifier() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv6.inet_aton(self.address)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/APL.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/APL.py new file mode 100644 index 0000000..c4ce6e4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/APL.py @@ -0,0 +1,150 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import codecs +import struct + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class APLItem: + """An APL list item.""" + + __slots__ = ["family", "negation", "address", "prefix"] + + def __init__(self, family, negation, address, prefix): + self.family = dns.rdata.Rdata._as_uint16(family) + self.negation = dns.rdata.Rdata._as_bool(negation) + if self.family == 1: + self.address = dns.rdata.Rdata._as_ipv4_address(address) + self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 32) + elif self.family == 2: + self.address = dns.rdata.Rdata._as_ipv6_address(address) + self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 128) + else: + self.address = dns.rdata.Rdata._as_bytes(address, max_length=127) + self.prefix = dns.rdata.Rdata._as_uint8(prefix) + + def __str__(self): + if self.negation: + return f"!{self.family}:{self.address}/{self.prefix}" + else: + return f"{self.family}:{self.address}/{self.prefix}" + + def to_wire(self, file): + if self.family == 1: + address = dns.ipv4.inet_aton(self.address) + elif self.family == 2: + address = dns.ipv6.inet_aton(self.address) + else: + address = binascii.unhexlify(self.address) + # + # Truncate least significant zero bytes. + # + last = 0 + for i in range(len(address) - 1, -1, -1): + if address[i] != 0: + last = i + 1 + break + address = address[0:last] + l = len(address) + assert l < 128 + if self.negation: + l |= 0x80 + header = struct.pack("!HBB", self.family, self.prefix, l) + file.write(header) + file.write(address) + + +@dns.immutable.immutable +class APL(dns.rdata.Rdata): + """APL record.""" + + # see: RFC 3123 + + __slots__ = ["items"] + + def __init__(self, rdclass, rdtype, items): + super().__init__(rdclass, rdtype) + for item in items: + if not isinstance(item, APLItem): + raise ValueError("item not an APLItem") + self.items = tuple(items) + + def to_text(self, origin=None, relativize=True, **kw): + return " ".join(map(str, self.items)) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + items = [] + for token in tok.get_remaining(): + item = token.unescape().value + if item[0] == "!": + negation = True + item = item[1:] + else: + negation = False + (family, rest) = item.split(":", 1) + family = int(family) + (address, prefix) = rest.split("/", 1) + prefix = int(prefix) + item = APLItem(family, negation, address, prefix) + items.append(item) + + return cls(rdclass, rdtype, items) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for item in self.items: + item.to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + items = [] + while parser.remaining() > 0: + header = parser.get_struct("!HBB") + afdlen = header[2] + if afdlen > 127: + negation = True + afdlen -= 128 + else: + negation = False + address = parser.get_bytes(afdlen) + l = len(address) + if header[0] == 1: + if l < 4: + address += b"\x00" * (4 - l) + elif header[0] == 2: + if l < 16: + address += b"\x00" * (16 - l) + else: + # + # This isn't really right according to the RFC, but it + # seems better than throwing an exception + # + address = codecs.encode(address, "hex_codec") + item = APLItem(header[0], negation, address, header[1]) + items.append(item) + return cls(rdclass, rdtype, items) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/DHCID.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/DHCID.py new file mode 100644 index 0000000..8de8cdf --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/DHCID.py @@ -0,0 +1,54 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class DHCID(dns.rdata.Rdata): + """DHCID record""" + + # see: RFC 4701 + + __slots__ = ["data"] + + def __init__(self, rdclass, rdtype, data): + super().__init__(rdclass, rdtype) + self.data = self._as_bytes(data) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.data, **kw) # pyright: ignore + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + b64 = tok.concatenate_remaining_identifiers().encode() + data = base64.b64decode(b64) + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.data) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + data = parser.get_remaining() + return cls(rdclass, rdtype, data) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/HTTPS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/HTTPS.py new file mode 100644 index 0000000..15464cb --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/HTTPS.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.svcbbase + + +@dns.immutable.immutable +class HTTPS(dns.rdtypes.svcbbase.SVCBBase): + """HTTPS record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/IPSECKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/IPSECKEY.py new file mode 100644 index 0000000..aef93ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/IPSECKEY.py @@ -0,0 +1,87 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdtypes.util + + +class Gateway(dns.rdtypes.util.Gateway): + name = "IPSECKEY gateway" + + +@dns.immutable.immutable +class IPSECKEY(dns.rdata.Rdata): + """IPSECKEY record""" + + # see: RFC 4025 + + __slots__ = ["precedence", "gateway_type", "algorithm", "gateway", "key"] + + def __init__( + self, rdclass, rdtype, precedence, gateway_type, algorithm, gateway, key + ): + super().__init__(rdclass, rdtype) + gateway = Gateway(gateway_type, gateway) + self.precedence = self._as_uint8(precedence) + self.gateway_type = gateway.type + self.algorithm = self._as_uint8(algorithm) + self.gateway = gateway.gateway + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + gateway = Gateway(self.gateway_type, self.gateway).to_text(origin, relativize) + key = dns.rdata._base64ify(self.key, **kw) # pyright: ignore + return f"{self.precedence} {self.gateway_type} {self.algorithm} {gateway} {key}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + precedence = tok.get_uint8() + gateway_type = tok.get_uint8() + algorithm = tok.get_uint8() + gateway = Gateway.from_text( + gateway_type, tok, origin, relativize, relativize_to + ) + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls( + rdclass, rdtype, precedence, gateway_type, algorithm, gateway.gateway, key + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BBB", self.precedence, self.gateway_type, self.algorithm) + file.write(header) + Gateway(self.gateway_type, self.gateway).to_wire( + file, compress, origin, canonicalize + ) + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!BBB") + gateway_type = header[1] + gateway = Gateway.from_wire_parser(gateway_type, parser, origin) + key = parser.get_remaining() + return cls( + rdclass, rdtype, header[0], gateway_type, header[2], gateway.gateway, key + ) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/KX.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/KX.py new file mode 100644 index 0000000..6073df4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/KX.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class KX(dns.rdtypes.mxbase.UncompressedDowncasingMX): + """KX record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NAPTR.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NAPTR.py new file mode 100644 index 0000000..98bbf4a --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NAPTR.py @@ -0,0 +1,109 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +def _write_string(file, s): + l = len(s) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(s) + + +@dns.immutable.immutable +class NAPTR(dns.rdata.Rdata): + """NAPTR record""" + + # see: RFC 3403 + + __slots__ = ["order", "preference", "flags", "service", "regexp", "replacement"] + + def __init__( + self, rdclass, rdtype, order, preference, flags, service, regexp, replacement + ): + super().__init__(rdclass, rdtype) + self.flags = self._as_bytes(flags, True, 255) + self.service = self._as_bytes(service, True, 255) + self.regexp = self._as_bytes(regexp, True, 255) + self.order = self._as_uint16(order) + self.preference = self._as_uint16(preference) + self.replacement = self._as_name(replacement) + + def to_text(self, origin=None, relativize=True, **kw): + replacement = self.replacement.choose_relativity(origin, relativize) + return ( + f"{self.order} {self.preference} " + f'"{dns.rdata._escapify(self.flags)}" ' + f'"{dns.rdata._escapify(self.service)}" ' + f'"{dns.rdata._escapify(self.regexp)}" ' + f"{replacement}" + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + order = tok.get_uint16() + preference = tok.get_uint16() + flags = tok.get_string() + service = tok.get_string() + regexp = tok.get_string() + replacement = tok.get_name(origin, relativize, relativize_to) + return cls( + rdclass, rdtype, order, preference, flags, service, regexp, replacement + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + two_ints = struct.pack("!HH", self.order, self.preference) + file.write(two_ints) + _write_string(file, self.flags) + _write_string(file, self.service) + _write_string(file, self.regexp) + self.replacement.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (order, preference) = parser.get_struct("!HH") + strings = [] + for _ in range(3): + s = parser.get_counted_bytes() + strings.append(s) + replacement = parser.get_name(origin) + return cls( + rdclass, + rdtype, + order, + preference, + strings[0], + strings[1], + strings[2], + replacement, + ) + + def _processing_priority(self): + return (self.order, self.preference) + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP.py new file mode 100644 index 0000000..d55edb7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP.py @@ -0,0 +1,60 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class NSAP(dns.rdata.Rdata): + """NSAP record.""" + + # see: RFC 1706 + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address) + + def to_text(self, origin=None, relativize=True, **kw): + return f"0x{binascii.hexlify(self.address).decode()}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + if address[0:2] != "0x": + raise dns.exception.SyntaxError("string does not start with 0x") + address = address[2:].replace(".", "") + if len(address) % 2 != 0: + raise dns.exception.SyntaxError("hexstring has odd length") + address = binascii.unhexlify(address.encode()) + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.address) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP_PTR.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP_PTR.py new file mode 100644 index 0000000..ce1c663 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP_PTR.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class NSAP_PTR(dns.rdtypes.nsbase.UncompressedNS): + """NSAP-PTR record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/PX.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/PX.py new file mode 100644 index 0000000..20143bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/PX.py @@ -0,0 +1,73 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class PX(dns.rdata.Rdata): + """PX record.""" + + # see: RFC 2163 + + __slots__ = ["preference", "map822", "mapx400"] + + def __init__(self, rdclass, rdtype, preference, map822, mapx400): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.map822 = self._as_name(map822) + self.mapx400 = self._as_name(mapx400) + + def to_text(self, origin=None, relativize=True, **kw): + map822 = self.map822.choose_relativity(origin, relativize) + mapx400 = self.mapx400.choose_relativity(origin, relativize) + return f"{self.preference} {map822} {mapx400}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + map822 = tok.get_name(origin, relativize, relativize_to) + mapx400 = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.map822.to_wire(file, None, origin, canonicalize) + self.mapx400.to_wire(file, None, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + map822 = parser.get_name(origin) + mapx400 = parser.get_name(origin) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def _processing_priority(self): + return self.preference + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SRV.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SRV.py new file mode 100644 index 0000000..044c10e --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SRV.py @@ -0,0 +1,75 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class SRV(dns.rdata.Rdata): + """SRV record""" + + # see: RFC 2782 + + __slots__ = ["priority", "weight", "port", "target"] + + def __init__(self, rdclass, rdtype, priority, weight, port, target): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.weight = self._as_uint16(weight) + self.port = self._as_uint16(port) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return f"{self.priority} {self.weight} {self.port} {target}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + weight = tok.get_uint16() + port = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, priority, weight, port, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + three_ints = struct.pack("!HHH", self.priority, self.weight, self.port) + file.write(three_ints) + self.target.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (priority, weight, port) = parser.get_struct("!HHH") + target = parser.get_name(origin) + return cls(rdclass, rdtype, priority, weight, port, target) + + def _processing_priority(self): + return self.priority + + def _processing_weight(self): + return self.weight + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SVCB.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SVCB.py new file mode 100644 index 0000000..ff3e932 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SVCB.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.svcbbase + + +@dns.immutable.immutable +class SVCB(dns.rdtypes.svcbbase.SVCBBase): + """SVCB record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/WKS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/WKS.py new file mode 100644 index 0000000..cc6c373 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/WKS.py @@ -0,0 +1,100 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import socket +import struct + +import dns.immutable +import dns.ipv4 +import dns.rdata + +try: + _proto_tcp = socket.getprotobyname("tcp") + _proto_udp = socket.getprotobyname("udp") +except OSError: + # Fall back to defaults in case /etc/protocols is unavailable. + _proto_tcp = 6 + _proto_udp = 17 + + +@dns.immutable.immutable +class WKS(dns.rdata.Rdata): + """WKS record""" + + # see: RFC 1035 + + __slots__ = ["address", "protocol", "bitmap"] + + def __init__(self, rdclass, rdtype, address, protocol, bitmap): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv4_address(address) + self.protocol = self._as_uint8(protocol) + self.bitmap = self._as_bytes(bitmap) + + def to_text(self, origin=None, relativize=True, **kw): + bits = [] + for i, byte in enumerate(self.bitmap): + for j in range(0, 8): + if byte & (0x80 >> j): + bits.append(str(i * 8 + j)) + text = " ".join(bits) + return f"{self.address} {self.protocol} {text}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + protocol = tok.get_string() + if protocol.isdigit(): + protocol = int(protocol) + else: + protocol = socket.getprotobyname(protocol) + bitmap = bytearray() + for token in tok.get_remaining(): + value = token.unescape().value + if value.isdigit(): + serv = int(value) + else: + if protocol != _proto_udp and protocol != _proto_tcp: + raise NotImplementedError("protocol must be TCP or UDP") + if protocol == _proto_udp: + protocol_text = "udp" + else: + protocol_text = "tcp" + serv = socket.getservbyname(value, protocol_text) + i = serv // 8 + l = len(bitmap) + if l < i + 1: + for _ in range(l, i + 1): + bitmap.append(0) + bitmap[i] = bitmap[i] | (0x80 >> (serv % 8)) + bitmap = dns.rdata._truncate_bitmap(bitmap) + return cls(rdclass, rdtype, address, protocol, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv4.inet_aton(self.address)) + protocol = struct.pack("!B", self.protocol) + file.write(protocol) + file.write(self.bitmap) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_bytes(4) + protocol = parser.get_uint8() + bitmap = parser.get_remaining() + return cls(rdclass, rdtype, address, protocol, bitmap) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__init__.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__init__.py new file mode 100644 index 0000000..dcec4dd --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__init__.py @@ -0,0 +1,35 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class IN rdata type classes.""" + +__all__ = [ + "A", + "AAAA", + "APL", + "DHCID", + "HTTPS", + "IPSECKEY", + "KX", + "NAPTR", + "NSAP", + "NSAP_PTR", + "PX", + "SRV", + "SVCB", + "WKS", +] diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-312.pyc new file mode 100644 index 0000000..269ffb7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-312.pyc new file mode 100644 index 0000000..3ca850b Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-312.pyc new file mode 100644 index 0000000..b9dffa6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-312.pyc new file mode 100644 index 0000000..8d32cb9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-312.pyc new file mode 100644 index 0000000..ed48d15 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-312.pyc new file mode 100644 index 0000000..12d98cc Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-312.pyc new file mode 100644 index 0000000..d76179a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-312.pyc new file mode 100644 index 0000000..600f50c Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-312.pyc new file mode 100644 index 0000000..2fdcf55 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-312.pyc new file mode 100644 index 0000000..487e753 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-312.pyc new file mode 100644 index 0000000..7201610 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-312.pyc new file mode 100644 index 0000000..92cd20d Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-312.pyc new file mode 100644 index 0000000..9a671da Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-312.pyc new file mode 100644 index 0000000..088f004 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..aa81bf7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__init__.py b/venv/lib/python3.12/site-packages/dns/rdtypes/__init__.py new file mode 100644 index 0000000..3997f84 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/__init__.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata type classes""" + +__all__ = [ + "ANY", + "IN", + "CH", + "dnskeybase", + "dsbase", + "euibase", + "mxbase", + "nsbase", + "svcbbase", + "tlsabase", + "txtbase", + "util", +] diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1d7b496 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-312.pyc new file mode 100644 index 0000000..0fa408c Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-312.pyc new file mode 100644 index 0000000..be7b92c Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/euibase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/euibase.cpython-312.pyc new file mode 100644 index 0000000..5f0aacd Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/euibase.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-312.pyc new file mode 100644 index 0000000..abfa6e0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-312.pyc new file mode 100644 index 0000000..45a3aff Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-312.pyc new file mode 100644 index 0000000..0326a5a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-312.pyc new file mode 100644 index 0000000..a4e5581 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-312.pyc new file mode 100644 index 0000000..75eaf27 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..e11aba4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/dnskeybase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/dnskeybase.py new file mode 100644 index 0000000..fb49f92 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/dnskeybase.py @@ -0,0 +1,83 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import enum +import struct + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata + +# wildcard import +__all__ = ["SEP", "REVOKE", "ZONE"] # noqa: F822 + + +class Flag(enum.IntFlag): + SEP = 0x0001 + REVOKE = 0x0080 + ZONE = 0x0100 + + +@dns.immutable.immutable +class DNSKEYBase(dns.rdata.Rdata): + """Base class for rdata that is like a DNSKEY record""" + + __slots__ = ["flags", "protocol", "algorithm", "key"] + + def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): + super().__init__(rdclass, rdtype) + self.flags = Flag(self._as_uint16(flags)) + self.protocol = self._as_uint8(protocol) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + key = dns.rdata._base64ify(self.key, **kw) # pyright: ignore + return f"{self.flags} {self.protocol} {self.algorithm} {key}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + flags = tok.get_uint16() + protocol = tok.get_uint8() + algorithm = tok.get_string() + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, flags, protocol, algorithm, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm) + file.write(header) + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBB") + key = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], key) + + +### BEGIN generated Flag constants + +SEP = Flag.SEP +REVOKE = Flag.REVOKE +ZONE = Flag.ZONE + +### END generated Flag constants diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/dsbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/dsbase.py new file mode 100644 index 0000000..8e05c2a --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/dsbase.py @@ -0,0 +1,83 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.dnssectypes +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class DSBase(dns.rdata.Rdata): + """Base class for rdata that is like a DS record""" + + __slots__ = ["key_tag", "algorithm", "digest_type", "digest"] + + # Digest types registry: + # https://www.iana.org/assignments/ds-rr-types/ds-rr-types.xhtml + _digest_length_by_type = { + 1: 20, # SHA-1, RFC 3658 Sec. 2.4 + 2: 32, # SHA-256, RFC 4509 Sec. 2.2 + 3: 32, # GOST R 34.11-94, RFC 5933 Sec. 4 in conjunction with RFC 4490 Sec. 2.1 + 4: 48, # SHA-384, RFC 6605 Sec. 2 + } + + def __init__(self, rdclass, rdtype, key_tag, algorithm, digest_type, digest): + super().__init__(rdclass, rdtype) + self.key_tag = self._as_uint16(key_tag) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.digest_type = dns.dnssectypes.DSDigest.make(self._as_uint8(digest_type)) + self.digest = self._as_bytes(digest) + try: + if len(self.digest) != self._digest_length_by_type[self.digest_type]: + raise ValueError("digest length inconsistent with digest type") + except KeyError: + if self.digest_type == 0: # reserved, RFC 3658 Sec. 2.4 + raise ValueError("digest type 0 is reserved") + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + digest = dns.rdata._hexify( + self.digest, chunksize=chunksize, **kw # pyright: ignore + ) + return f"{self.key_tag} {self.algorithm} {self.digest_type} {digest}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + key_tag = tok.get_uint16() + algorithm = tok.get_string() + digest_type = tok.get_uint8() + digest = tok.concatenate_remaining_identifiers().encode() + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, key_tag, algorithm, digest_type, digest) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!HBB", self.key_tag, self.algorithm, self.digest_type) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBB") + digest = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/euibase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/euibase.py new file mode 100644 index 0000000..4eb82eb --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/euibase.py @@ -0,0 +1,73 @@ +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class EUIBase(dns.rdata.Rdata): + """EUIxx record""" + + # see: rfc7043.txt + + __slots__ = ["eui"] + # redefine these in subclasses + byte_len = 0 + text_len = 0 + # byte_len = 6 # 0123456789ab (in hex) + # text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab + + def __init__(self, rdclass, rdtype, eui): + super().__init__(rdclass, rdtype) + self.eui = self._as_bytes(eui) + if len(self.eui) != self.byte_len: + raise dns.exception.FormError( + f"EUI{self.byte_len * 8} rdata has to have {self.byte_len} bytes" + ) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._hexify(self.eui, chunksize=2, separator=b"-", **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + text = tok.get_string() + if len(text) != cls.text_len: + raise dns.exception.SyntaxError( + f"Input text must have {cls.text_len} characters" + ) + for i in range(2, cls.byte_len * 3 - 1, 3): + if text[i] != "-": + raise dns.exception.SyntaxError(f"Dash expected at position {i}") + text = text.replace("-", "") + try: + data = binascii.unhexlify(text.encode()) + except (ValueError, TypeError) as ex: + raise dns.exception.SyntaxError(f"Hex decoding error: {str(ex)}") + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.eui) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + eui = parser.get_bytes(cls.byte_len) + return cls(rdclass, rdtype, eui) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/mxbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/mxbase.py new file mode 100644 index 0000000..5d33e61 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/mxbase.py @@ -0,0 +1,87 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""MX-like base classes.""" + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class MXBase(dns.rdata.Rdata): + """Base class for rdata that is like an MX record.""" + + __slots__ = ["preference", "exchange"] + + def __init__(self, rdclass, rdtype, preference, exchange): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.exchange = self._as_name(exchange) + + def to_text(self, origin=None, relativize=True, **kw): + exchange = self.exchange.choose_relativity(origin, relativize) + return f"{self.preference} {exchange}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + exchange = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, exchange) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.exchange.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + exchange = parser.get_name(origin) + return cls(rdclass, rdtype, preference, exchange) + + def _processing_priority(self): + return self.preference + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) + + +@dns.immutable.immutable +class UncompressedMX(MXBase): + """Base class for rdata that is like an MX record, but whose name + is not compressed when converted to DNS wire format, and whose + digestable form is not downcased.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + super()._to_wire(file, None, origin, False) + + +@dns.immutable.immutable +class UncompressedDowncasingMX(MXBase): + """Base class for rdata that is like an MX record, but whose name + is not compressed when convert to DNS wire format.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + super()._to_wire(file, None, origin, canonicalize) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/nsbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/nsbase.py new file mode 100644 index 0000000..904224f --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/nsbase.py @@ -0,0 +1,63 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""NS-like base classes.""" + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class NSBase(dns.rdata.Rdata): + """Base class for rdata that is like an NS record.""" + + __slots__ = ["target"] + + def __init__(self, rdclass, rdtype, target): + super().__init__(rdclass, rdtype) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return str(target) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + target = parser.get_name(origin) + return cls(rdclass, rdtype, target) + + +@dns.immutable.immutable +class UncompressedNS(NSBase): + """Base class for rdata that is like an NS record, but whose name + is not compressed when convert to DNS wire format, and whose + digestable form is not downcased.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, None, origin, False) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/svcbbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/svcbbase.py new file mode 100644 index 0000000..7338b66 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/svcbbase.py @@ -0,0 +1,587 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import base64 +import enum +import struct +from typing import Any, Dict + +import dns.enum +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata +import dns.rdtypes.util +import dns.renderer +import dns.tokenizer +import dns.wire + +# Until there is an RFC, this module is experimental and may be changed in +# incompatible ways. + + +class UnknownParamKey(dns.exception.DNSException): + """Unknown SVCB ParamKey""" + + +class ParamKey(dns.enum.IntEnum): + """SVCB ParamKey""" + + MANDATORY = 0 + ALPN = 1 + NO_DEFAULT_ALPN = 2 + PORT = 3 + IPV4HINT = 4 + ECH = 5 + IPV6HINT = 6 + DOHPATH = 7 + OHTTP = 8 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "SVCBParamKey" + + @classmethod + def _prefix(cls): + return "KEY" + + @classmethod + def _unknown_exception_class(cls): + return UnknownParamKey + + +class Emptiness(enum.IntEnum): + NEVER = 0 + ALWAYS = 1 + ALLOWED = 2 + + +def _validate_key(key): + force_generic = False + if isinstance(key, bytes): + # We decode to latin-1 so we get 0-255 as valid and do NOT interpret + # UTF-8 sequences + key = key.decode("latin-1") + if isinstance(key, str): + if key.lower().startswith("key"): + force_generic = True + if key[3:].startswith("0") and len(key) != 4: + # key has leading zeros + raise ValueError("leading zeros in key") + key = key.replace("-", "_") + return (ParamKey.make(key), force_generic) + + +def key_to_text(key): + return ParamKey.to_text(key).replace("_", "-").lower() + + +# Like rdata escapify, but escapes ',' too. + +_escaped = b'",\\' + + +def _escapify(qstring): + text = "" + for c in qstring: + if c in _escaped: + text += "\\" + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += f"\\{c:03d}" + return text + + +def _unescape(value: str) -> bytes: + if value == "": + return b"" + unescaped = b"" + l = len(value) + i = 0 + while i < l: + c = value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via tokenizer get()) + raise dns.exception.UnexpectedEnd + c = value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + unescaped += b"%c" % (codepoint) + continue + unescaped += c.encode() + return unescaped + + +def _split(value): + l = len(value) + i = 0 + items = [] + unescaped = b"" + while i < l: + c = value[i] + i += 1 + if c == ord("\\"): + if i >= l: # pragma: no cover (can't happen via tokenizer get()) + raise dns.exception.UnexpectedEnd + c = value[i] + i += 1 + unescaped += b"%c" % (c) + elif c == ord(","): + items.append(unescaped) + unescaped = b"" + else: + unescaped += b"%c" % (c) + items.append(unescaped) + return items + + +@dns.immutable.immutable +class Param: + """Abstract base class for SVCB parameters""" + + @classmethod + def emptiness(cls) -> Emptiness: + return Emptiness.NEVER + + +@dns.immutable.immutable +class GenericParam(Param): + """Generic SVCB parameter""" + + def __init__(self, value): + self.value = dns.rdata.Rdata._as_bytes(value, True) + + @classmethod + def emptiness(cls): + return Emptiness.ALLOWED + + @classmethod + def from_value(cls, value): + if value is None or len(value) == 0: + return None + else: + return cls(_unescape(value)) + + def to_text(self): + return '"' + dns.rdata._escapify(self.value) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + value = parser.get_bytes(parser.remaining()) + if len(value) == 0: + return None + else: + return cls(value) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(self.value) + + +@dns.immutable.immutable +class MandatoryParam(Param): + def __init__(self, keys): + # check for duplicates + keys = sorted([_validate_key(key)[0] for key in keys]) + prior_k = None + for k in keys: + if k == prior_k: + raise ValueError(f"duplicate key {k:d}") + prior_k = k + if k == ParamKey.MANDATORY: + raise ValueError("listed the mandatory key as mandatory") + self.keys = tuple(keys) + + @classmethod + def from_value(cls, value): + keys = [k.encode() for k in value.split(",")] + return cls(keys) + + def to_text(self): + return '"' + ",".join([key_to_text(key) for key in self.keys]) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + keys = [] + last_key = -1 + while parser.remaining() > 0: + key = parser.get_uint16() + if key < last_key: + raise dns.exception.FormError("manadatory keys not ascending") + last_key = key + keys.append(key) + return cls(keys) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for key in self.keys: + file.write(struct.pack("!H", key)) + + +@dns.immutable.immutable +class ALPNParam(Param): + def __init__(self, ids): + self.ids = dns.rdata.Rdata._as_tuple( + ids, lambda x: dns.rdata.Rdata._as_bytes(x, True, 255, False) + ) + + @classmethod + def from_value(cls, value): + return cls(_split(_unescape(value))) + + def to_text(self): + value = ",".join([_escapify(id) for id in self.ids]) + return '"' + dns.rdata._escapify(value.encode()) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + ids = [] + while parser.remaining() > 0: + id = parser.get_counted_bytes() + ids.append(id) + return cls(ids) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for id in self.ids: + file.write(struct.pack("!B", len(id))) + file.write(id) + + +@dns.immutable.immutable +class NoDefaultALPNParam(Param): + # We don't ever expect to instantiate this class, but we need + # a from_value() and a from_wire_parser(), so we just return None + # from the class methods when things are OK. + + @classmethod + def emptiness(cls): + return Emptiness.ALWAYS + + @classmethod + def from_value(cls, value): + if value is None or value == "": + return None + else: + raise ValueError("no-default-alpn with non-empty value") + + def to_text(self): + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + if parser.remaining() != 0: + raise dns.exception.FormError + return None + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + raise NotImplementedError # pragma: no cover + + +@dns.immutable.immutable +class PortParam(Param): + def __init__(self, port): + self.port = dns.rdata.Rdata._as_uint16(port) + + @classmethod + def from_value(cls, value): + value = int(value) + return cls(value) + + def to_text(self): + return f'"{self.port}"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + port = parser.get_uint16() + return cls(port) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(struct.pack("!H", self.port)) + + +@dns.immutable.immutable +class IPv4HintParam(Param): + def __init__(self, addresses): + self.addresses = dns.rdata.Rdata._as_tuple( + addresses, dns.rdata.Rdata._as_ipv4_address + ) + + @classmethod + def from_value(cls, value): + addresses = value.split(",") + return cls(addresses) + + def to_text(self): + return '"' + ",".join(self.addresses) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + addresses = [] + while parser.remaining() > 0: + ip = parser.get_bytes(4) + addresses.append(dns.ipv4.inet_ntoa(ip)) + return cls(addresses) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for address in self.addresses: + file.write(dns.ipv4.inet_aton(address)) + + +@dns.immutable.immutable +class IPv6HintParam(Param): + def __init__(self, addresses): + self.addresses = dns.rdata.Rdata._as_tuple( + addresses, dns.rdata.Rdata._as_ipv6_address + ) + + @classmethod + def from_value(cls, value): + addresses = value.split(",") + return cls(addresses) + + def to_text(self): + return '"' + ",".join(self.addresses) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + addresses = [] + while parser.remaining() > 0: + ip = parser.get_bytes(16) + addresses.append(dns.ipv6.inet_ntoa(ip)) + return cls(addresses) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for address in self.addresses: + file.write(dns.ipv6.inet_aton(address)) + + +@dns.immutable.immutable +class ECHParam(Param): + def __init__(self, ech): + self.ech = dns.rdata.Rdata._as_bytes(ech, True) + + @classmethod + def from_value(cls, value): + if "\\" in value: + raise ValueError("escape in ECH value") + value = base64.b64decode(value.encode()) + return cls(value) + + def to_text(self): + b64 = base64.b64encode(self.ech).decode("ascii") + return f'"{b64}"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + value = parser.get_bytes(parser.remaining()) + return cls(value) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(self.ech) + + +@dns.immutable.immutable +class OHTTPParam(Param): + # We don't ever expect to instantiate this class, but we need + # a from_value() and a from_wire_parser(), so we just return None + # from the class methods when things are OK. + + @classmethod + def emptiness(cls): + return Emptiness.ALWAYS + + @classmethod + def from_value(cls, value): + if value is None or value == "": + return None + else: + raise ValueError("ohttp with non-empty value") + + def to_text(self): + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + if parser.remaining() != 0: + raise dns.exception.FormError + return None + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + raise NotImplementedError # pragma: no cover + + +_class_for_key: Dict[ParamKey, Any] = { + ParamKey.MANDATORY: MandatoryParam, + ParamKey.ALPN: ALPNParam, + ParamKey.NO_DEFAULT_ALPN: NoDefaultALPNParam, + ParamKey.PORT: PortParam, + ParamKey.IPV4HINT: IPv4HintParam, + ParamKey.ECH: ECHParam, + ParamKey.IPV6HINT: IPv6HintParam, + ParamKey.OHTTP: OHTTPParam, +} + + +def _validate_and_define(params, key, value): + (key, force_generic) = _validate_key(_unescape(key)) + if key in params: + raise SyntaxError(f'duplicate key "{key:d}"') + cls = _class_for_key.get(key, GenericParam) + emptiness = cls.emptiness() + if value is None: + if emptiness == Emptiness.NEVER: + raise SyntaxError("value cannot be empty") + value = cls.from_value(value) + else: + if force_generic: + value = cls.from_wire_parser(dns.wire.Parser(_unescape(value))) + else: + value = cls.from_value(value) + params[key] = value + + +@dns.immutable.immutable +class SVCBBase(dns.rdata.Rdata): + """Base class for SVCB-like records""" + + # see: draft-ietf-dnsop-svcb-https-11 + + __slots__ = ["priority", "target", "params"] + + def __init__(self, rdclass, rdtype, priority, target, params): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.target = self._as_name(target) + for k, v in params.items(): + k = ParamKey.make(k) + if not isinstance(v, Param) and v is not None: + raise ValueError(f"{k:d} not a Param") + self.params = dns.immutable.Dict(params) + # Make sure any parameter listed as mandatory is present in the + # record. + mandatory = params.get(ParamKey.MANDATORY) + if mandatory: + for key in mandatory.keys: + # Note we have to say "not in" as we have None as a value + # so a get() and a not None test would be wrong. + if key not in params: + raise ValueError(f"key {key:d} declared mandatory but not present") + # The no-default-alpn parameter requires the alpn parameter. + if ParamKey.NO_DEFAULT_ALPN in params: + if ParamKey.ALPN not in params: + raise ValueError("no-default-alpn present, but alpn missing") + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + params = [] + for key in sorted(self.params.keys()): + value = self.params[key] + if value is None: + params.append(key_to_text(key)) + else: + kv = key_to_text(key) + "=" + value.to_text() + params.append(kv) + if len(params) > 0: + space = " " + else: + space = "" + return f"{self.priority} {target}{space}{' '.join(params)}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + if priority == 0: + token = tok.get() + if not token.is_eol_or_eof(): + raise SyntaxError("parameters in AliasMode") + tok.unget(token) + params = {} + while True: + token = tok.get() + if token.is_eol_or_eof(): + tok.unget(token) + break + if token.ttype != dns.tokenizer.IDENTIFIER: + raise SyntaxError("parameter is not an identifier") + equals = token.value.find("=") + if equals == len(token.value) - 1: + # 'key=', so next token should be a quoted string without + # any intervening whitespace. + key = token.value[:-1] + token = tok.get(want_leading=True) + if token.ttype != dns.tokenizer.QUOTED_STRING: + raise SyntaxError("whitespace after =") + value = token.value + elif equals > 0: + # key=value + key = token.value[:equals] + value = token.value[equals + 1 :] + elif equals == 0: + # =key + raise SyntaxError('parameter cannot start with "="') + else: + # key + key = token.value + value = None + _validate_and_define(params, key, value) + return cls(rdclass, rdtype, priority, target, params) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.priority)) + self.target.to_wire(file, None, origin, False) + for key in sorted(self.params): + file.write(struct.pack("!H", key)) + value = self.params[key] + with dns.renderer.prefixed_length(file, 2): + # Note that we're still writing a length of zero if the value is None + if value is not None: + value.to_wire(file, origin) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + priority = parser.get_uint16() + target = parser.get_name(origin) + if priority == 0 and parser.remaining() != 0: + raise dns.exception.FormError("parameters in AliasMode") + params = {} + prior_key = -1 + while parser.remaining() > 0: + key = parser.get_uint16() + if key < prior_key: + raise dns.exception.FormError("keys not in order") + prior_key = key + vlen = parser.get_uint16() + pkey = ParamKey.make(key) + pcls = _class_for_key.get(pkey, GenericParam) + with parser.restrict_to(vlen): + value = pcls.from_wire_parser(parser, origin) + params[pkey] = value + return cls(rdclass, rdtype, priority, target, params) + + def _processing_priority(self): + return self.priority + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/tlsabase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/tlsabase.py new file mode 100644 index 0000000..ddc196f --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/tlsabase.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class TLSABase(dns.rdata.Rdata): + """Base class for TLSA and SMIMEA records""" + + # see: RFC 6698 + + __slots__ = ["usage", "selector", "mtype", "cert"] + + def __init__(self, rdclass, rdtype, usage, selector, mtype, cert): + super().__init__(rdclass, rdtype) + self.usage = self._as_uint8(usage) + self.selector = self._as_uint8(selector) + self.mtype = self._as_uint8(mtype) + self.cert = self._as_bytes(cert) + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + cert = dns.rdata._hexify( + self.cert, chunksize=chunksize, **kw # pyright: ignore + ) + return f"{self.usage} {self.selector} {self.mtype} {cert}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + usage = tok.get_uint8() + selector = tok.get_uint8() + mtype = tok.get_uint8() + cert = tok.concatenate_remaining_identifiers().encode() + cert = binascii.unhexlify(cert) + return cls(rdclass, rdtype, usage, selector, mtype, cert) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BBB", self.usage, self.selector, self.mtype) + file.write(header) + file.write(self.cert) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("BBB") + cert = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], cert) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/txtbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/txtbase.py new file mode 100644 index 0000000..5e5b24f --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/txtbase.py @@ -0,0 +1,109 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""TXT-like base class.""" + +from typing import Any, Dict, Iterable, Tuple + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.renderer +import dns.tokenizer + + +@dns.immutable.immutable +class TXTBase(dns.rdata.Rdata): + """Base class for rdata that is like a TXT record (see RFC 1035).""" + + __slots__ = ["strings"] + + def __init__( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + strings: Iterable[bytes | str], + ): + """Initialize a TXT-like rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *strings*, a tuple of ``bytes`` + """ + super().__init__(rdclass, rdtype) + self.strings: Tuple[bytes] = self._as_tuple( + strings, lambda x: self._as_bytes(x, True, 255) + ) + if len(self.strings) == 0: + raise ValueError("the list of strings must not be empty") + + def to_text( + self, + origin: dns.name.Name | None = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + txt = "" + prefix = "" + for s in self.strings: + txt += f'{prefix}"{dns.rdata._escapify(s)}"' + prefix = " " + return txt + + @classmethod + def from_text( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + tok: dns.tokenizer.Tokenizer, + origin: dns.name.Name | None = None, + relativize: bool = True, + relativize_to: dns.name.Name | None = None, + ) -> dns.rdata.Rdata: + strings = [] + for token in tok.get_remaining(): + token = token.unescape_to_bytes() + # The 'if' below is always true in the current code, but we + # are leaving this check in in case things change some day. + if not ( + token.is_quoted_string() or token.is_identifier() + ): # pragma: no cover + raise dns.exception.SyntaxError("expected a string") + if len(token.value) > 255: + raise dns.exception.SyntaxError("string too long") + strings.append(token.value) + if len(strings) == 0: + raise dns.exception.UnexpectedEnd + return cls(rdclass, rdtype, strings) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for s in self.strings: + with dns.renderer.prefixed_length(file, 1): + file.write(s) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + strings = [] + while parser.remaining() > 0: + s = parser.get_counted_bytes() + strings.append(s) + return cls(rdclass, rdtype, strings) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/util.py b/venv/lib/python3.12/site-packages/dns/rdtypes/util.py new file mode 100644 index 0000000..c17b154 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rdtypes/util.py @@ -0,0 +1,269 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import collections +import random +import struct +from typing import Any, Iterable, List, Tuple + +import dns.exception +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata +import dns.rdatatype +import dns.tokenizer +import dns.wire + + +class Gateway: + """A helper class for the IPSECKEY gateway and AMTRELAY relay fields""" + + name = "" + + def __init__(self, type: Any, gateway: str | dns.name.Name | None = None): + self.type = dns.rdata.Rdata._as_uint8(type) + self.gateway = gateway + self._check() + + @classmethod + def _invalid_type(cls, gateway_type): + return f"invalid {cls.name} type: {gateway_type}" + + def _check(self): + if self.type == 0: + if self.gateway not in (".", None): + raise SyntaxError(f"invalid {self.name} for type 0") + self.gateway = None + elif self.type == 1: + # check that it's OK + assert isinstance(self.gateway, str) + dns.ipv4.inet_aton(self.gateway) + elif self.type == 2: + # check that it's OK + assert isinstance(self.gateway, str) + dns.ipv6.inet_aton(self.gateway) + elif self.type == 3: + if not isinstance(self.gateway, dns.name.Name): + raise SyntaxError(f"invalid {self.name}; not a name") + else: + raise SyntaxError(self._invalid_type(self.type)) + + def to_text(self, origin=None, relativize=True): + if self.type == 0: + return "." + elif self.type in (1, 2): + return self.gateway + elif self.type == 3: + assert isinstance(self.gateway, dns.name.Name) + return str(self.gateway.choose_relativity(origin, relativize)) + else: + raise ValueError(self._invalid_type(self.type)) # pragma: no cover + + @classmethod + def from_text( + cls, gateway_type, tok, origin=None, relativize=True, relativize_to=None + ): + if gateway_type in (0, 1, 2): + gateway = tok.get_string() + elif gateway_type == 3: + gateway = tok.get_name(origin, relativize, relativize_to) + else: + raise dns.exception.SyntaxError( + cls._invalid_type(gateway_type) + ) # pragma: no cover + return cls(gateway_type, gateway) + + # pylint: disable=unused-argument + def to_wire(self, file, compress=None, origin=None, canonicalize=False): + if self.type == 0: + pass + elif self.type == 1: + assert isinstance(self.gateway, str) + file.write(dns.ipv4.inet_aton(self.gateway)) + elif self.type == 2: + assert isinstance(self.gateway, str) + file.write(dns.ipv6.inet_aton(self.gateway)) + elif self.type == 3: + assert isinstance(self.gateway, dns.name.Name) + self.gateway.to_wire(file, None, origin, False) + else: + raise ValueError(self._invalid_type(self.type)) # pragma: no cover + + # pylint: enable=unused-argument + + @classmethod + def from_wire_parser(cls, gateway_type, parser, origin=None): + if gateway_type == 0: + gateway = None + elif gateway_type == 1: + gateway = dns.ipv4.inet_ntoa(parser.get_bytes(4)) + elif gateway_type == 2: + gateway = dns.ipv6.inet_ntoa(parser.get_bytes(16)) + elif gateway_type == 3: + gateway = parser.get_name(origin) + else: + raise dns.exception.FormError(cls._invalid_type(gateway_type)) + return cls(gateway_type, gateway) + + +class Bitmap: + """A helper class for the NSEC/NSEC3/CSYNC type bitmaps""" + + type_name = "" + + def __init__(self, windows: Iterable[Tuple[int, bytes]] | None = None): + last_window = -1 + if windows is None: + windows = [] + self.windows = windows + for window, bitmap in self.windows: + if not isinstance(window, int): + raise ValueError(f"bad {self.type_name} window type") + if window <= last_window: + raise ValueError(f"bad {self.type_name} window order") + if window > 256: + raise ValueError(f"bad {self.type_name} window number") + last_window = window + if not isinstance(bitmap, bytes): + raise ValueError(f"bad {self.type_name} octets type") + if len(bitmap) == 0 or len(bitmap) > 32: + raise ValueError(f"bad {self.type_name} octets") + + def to_text(self) -> str: + text = "" + for window, bitmap in self.windows: + bits = [] + for i, byte in enumerate(bitmap): + for j in range(0, 8): + if byte & (0x80 >> j): + rdtype = dns.rdatatype.RdataType.make(window * 256 + i * 8 + j) + bits.append(dns.rdatatype.to_text(rdtype)) + text += " " + " ".join(bits) + return text + + @classmethod + def from_text(cls, tok: "dns.tokenizer.Tokenizer") -> "Bitmap": + rdtypes = [] + for token in tok.get_remaining(): + rdtype = dns.rdatatype.from_text(token.unescape().value) + if rdtype == 0: + raise dns.exception.SyntaxError(f"{cls.type_name} with bit 0") + rdtypes.append(rdtype) + return cls.from_rdtypes(rdtypes) + + @classmethod + def from_rdtypes(cls, rdtypes: List[dns.rdatatype.RdataType]) -> "Bitmap": + rdtypes = sorted(rdtypes) + window = 0 + octets = 0 + prior_rdtype = 0 + bitmap = bytearray(b"\0" * 32) + windows = [] + for rdtype in rdtypes: + if rdtype == prior_rdtype: + continue + prior_rdtype = rdtype + new_window = rdtype // 256 + if new_window != window: + if octets != 0: + windows.append((window, bytes(bitmap[0:octets]))) + bitmap = bytearray(b"\0" * 32) + window = new_window + offset = rdtype % 256 + byte = offset // 8 + bit = offset % 8 + octets = byte + 1 + bitmap[byte] = bitmap[byte] | (0x80 >> bit) + if octets != 0: + windows.append((window, bytes(bitmap[0:octets]))) + return cls(windows) + + def to_wire(self, file: Any) -> None: + for window, bitmap in self.windows: + file.write(struct.pack("!BB", window, len(bitmap))) + file.write(bitmap) + + @classmethod + def from_wire_parser(cls, parser: "dns.wire.Parser") -> "Bitmap": + windows = [] + while parser.remaining() > 0: + window = parser.get_uint8() + bitmap = parser.get_counted_bytes() + windows.append((window, bitmap)) + return cls(windows) + + +def _priority_table(items): + by_priority = collections.defaultdict(list) + for rdata in items: + by_priority[rdata._processing_priority()].append(rdata) + return by_priority + + +def priority_processing_order(iterable): + items = list(iterable) + if len(items) == 1: + return items + by_priority = _priority_table(items) + ordered = [] + for k in sorted(by_priority.keys()): + rdatas = by_priority[k] + random.shuffle(rdatas) + ordered.extend(rdatas) + return ordered + + +_no_weight = 0.1 + + +def weighted_processing_order(iterable): + items = list(iterable) + if len(items) == 1: + return items + by_priority = _priority_table(items) + ordered = [] + for k in sorted(by_priority.keys()): + rdatas = by_priority[k] + total = sum(rdata._processing_weight() or _no_weight for rdata in rdatas) + while len(rdatas) > 1: + r = random.uniform(0, total) + for n, rdata in enumerate(rdatas): # noqa: B007 + weight = rdata._processing_weight() or _no_weight + if weight > r: + break + r -= weight + total -= weight # pyright: ignore[reportPossiblyUnboundVariable] + # pylint: disable=undefined-loop-variable + ordered.append(rdata) # pyright: ignore[reportPossiblyUnboundVariable] + del rdatas[n] # pyright: ignore[reportPossiblyUnboundVariable] + ordered.append(rdatas[0]) + return ordered + + +def parse_formatted_hex(formatted, num_chunks, chunk_size, separator): + if len(formatted) != num_chunks * (chunk_size + 1) - 1: + raise ValueError("invalid formatted hex string") + value = b"" + for _ in range(num_chunks): + chunk = formatted[0:chunk_size] + value += int(chunk, 16).to_bytes(chunk_size // 2, "big") + formatted = formatted[chunk_size:] + if len(formatted) > 0 and formatted[0] != separator: + raise ValueError("invalid formatted hex string") + formatted = formatted[1:] + return value diff --git a/venv/lib/python3.12/site-packages/dns/renderer.py b/venv/lib/python3.12/site-packages/dns/renderer.py new file mode 100644 index 0000000..cc912b2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/renderer.py @@ -0,0 +1,355 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Help for building DNS wire format messages""" + +import contextlib +import io +import random +import struct +import time + +import dns.edns +import dns.exception +import dns.rdataclass +import dns.rdatatype +import dns.tsig + +# Note we can't import dns.message for cicularity reasons + +QUESTION = 0 +ANSWER = 1 +AUTHORITY = 2 +ADDITIONAL = 3 + + +@contextlib.contextmanager +def prefixed_length(output, length_length): + output.write(b"\00" * length_length) + start = output.tell() + yield + end = output.tell() + length = end - start + if length > 0: + try: + output.seek(start - length_length) + try: + output.write(length.to_bytes(length_length, "big")) + except OverflowError: + raise dns.exception.FormError + finally: + output.seek(end) + + +class Renderer: + """Helper class for building DNS wire-format messages. + + Most applications can use the higher-level L{dns.message.Message} + class and its to_wire() method to generate wire-format messages. + This class is for those applications which need finer control + over the generation of messages. + + Typical use:: + + r = dns.renderer.Renderer(id=1, flags=0x80, max_size=512) + r.add_question(qname, qtype, qclass) + r.add_rrset(dns.renderer.ANSWER, rrset_1) + r.add_rrset(dns.renderer.ANSWER, rrset_2) + r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_1) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_2) + r.add_edns(0, 0, 4096) + r.write_header() + r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) + wire = r.get_wire() + + If padding is going to be used, then the OPT record MUST be + written after everything else in the additional section except for + the TSIG (if any). + + output, an io.BytesIO, where rendering is written + + id: the message id + + flags: the message flags + + max_size: the maximum size of the message + + origin: the origin to use when rendering relative names + + compress: the compression table + + section: an int, the section currently being rendered + + counts: list of the number of RRs in each section + + mac: the MAC of the rendered message (if TSIG was used) + """ + + def __init__(self, id=None, flags=0, max_size=65535, origin=None): + """Initialize a new renderer.""" + + self.output = io.BytesIO() + if id is None: + self.id = random.randint(0, 65535) + else: + self.id = id + self.flags = flags + self.max_size = max_size + self.origin = origin + self.compress = {} + self.section = QUESTION + self.counts = [0, 0, 0, 0] + self.output.write(b"\x00" * 12) + self.mac = "" + self.reserved = 0 + self.was_padded = False + + def _rollback(self, where): + """Truncate the output buffer at offset *where*, and remove any + compression table entries that pointed beyond the truncation + point. + """ + + self.output.seek(where) + self.output.truncate() + keys_to_delete = [] + for k, v in self.compress.items(): + if v >= where: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.compress[k] + + def _set_section(self, section): + """Set the renderer's current section. + + Sections must be rendered order: QUESTION, ANSWER, AUTHORITY, + ADDITIONAL. Sections may be empty. + + Raises dns.exception.FormError if an attempt was made to set + a section value less than the current section. + """ + + if self.section != section: + if self.section > section: + raise dns.exception.FormError + self.section = section + + @contextlib.contextmanager + def _track_size(self): + start = self.output.tell() + yield start + if self.output.tell() > self.max_size: + self._rollback(start) + raise dns.exception.TooBig + + @contextlib.contextmanager + def _temporarily_seek_to(self, where): + current = self.output.tell() + try: + self.output.seek(where) + yield + finally: + self.output.seek(current) + + def add_question(self, qname, rdtype, rdclass=dns.rdataclass.IN): + """Add a question to the message.""" + + self._set_section(QUESTION) + with self._track_size(): + qname.to_wire(self.output, self.compress, self.origin) + self.output.write(struct.pack("!HH", rdtype, rdclass)) + self.counts[QUESTION] += 1 + + def add_rrset(self, section, rrset, **kw): + """Add the rrset to the specified section. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + with self._track_size(): + n = rrset.to_wire(self.output, self.compress, self.origin, **kw) + self.counts[section] += n + + def add_rdataset(self, section, name, rdataset, **kw): + """Add the rdataset to the specified section, using the specified + name as the owner name. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + with self._track_size(): + n = rdataset.to_wire(name, self.output, self.compress, self.origin, **kw) + self.counts[section] += n + + def add_opt(self, opt, pad=0, opt_size=0, tsig_size=0): + """Add *opt* to the additional section, applying padding if desired. The + padding will take the specified precomputed OPT size and TSIG size into + account. + + Note that we don't have reliable way of knowing how big a GSS-TSIG digest + might be, so we we might not get an even multiple of the pad in that case.""" + if pad: + ttl = opt.ttl + assert opt_size >= 11 + opt_rdata = opt[0] + size_without_padding = self.output.tell() + opt_size + tsig_size + remainder = size_without_padding % pad + if remainder: + pad = b"\x00" * (pad - remainder) + else: + pad = b"" + options = list(opt_rdata.options) + options.append(dns.edns.GenericOption(dns.edns.OptionType.PADDING, pad)) + opt = dns.message.Message._make_opt( # pyright: ignore + ttl, opt_rdata.rdclass, options + ) + self.was_padded = True + self.add_rrset(ADDITIONAL, opt) + + def add_edns(self, edns, ednsflags, payload, options=None): + """Add an EDNS OPT record to the message.""" + + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= 0xFF00FFFF + ednsflags |= edns << 16 + opt = dns.message.Message._make_opt( # pyright: ignore + ednsflags, payload, options + ) + self.add_opt(opt) + + def add_tsig( + self, + keyname, + secret, + fudge, + id, + tsig_error, + other_data, + request_mac, + algorithm=dns.tsig.default_algorithm, + ): + """Add a TSIG signature to the message.""" + + s = self.output.getvalue() + + if isinstance(secret, dns.tsig.Key): + key = secret + else: + key = dns.tsig.Key(keyname, secret, algorithm) + tsig = dns.message.Message._make_tsig( # pyright: ignore + keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data + ) + (tsig, _) = dns.tsig.sign(s, key, tsig[0], int(time.time()), request_mac) + self._write_tsig(tsig, keyname) + + def add_multi_tsig( + self, + ctx, + keyname, + secret, + fudge, + id, + tsig_error, + other_data, + request_mac, + algorithm=dns.tsig.default_algorithm, + ): + """Add a TSIG signature to the message. Unlike add_tsig(), this can be + used for a series of consecutive DNS envelopes, e.g. for a zone + transfer over TCP [RFC2845, 4.4]. + + For the first message in the sequence, give ctx=None. For each + subsequent message, give the ctx that was returned from the + add_multi_tsig() call for the previous message.""" + + s = self.output.getvalue() + + if isinstance(secret, dns.tsig.Key): + key = secret + else: + key = dns.tsig.Key(keyname, secret, algorithm) + tsig = dns.message.Message._make_tsig( # pyright: ignore + keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data + ) + (tsig, ctx) = dns.tsig.sign( + s, key, tsig[0], int(time.time()), request_mac, ctx, True + ) + self._write_tsig(tsig, keyname) + return ctx + + def _write_tsig(self, tsig, keyname): + if self.was_padded: + compress = None + else: + compress = self.compress + self._set_section(ADDITIONAL) + with self._track_size(): + keyname.to_wire(self.output, compress, self.origin) + self.output.write( + struct.pack("!HHI", dns.rdatatype.TSIG, dns.rdataclass.ANY, 0) + ) + with prefixed_length(self.output, 2): + tsig.to_wire(self.output) + + self.counts[ADDITIONAL] += 1 + with self._temporarily_seek_to(10): + self.output.write(struct.pack("!H", self.counts[ADDITIONAL])) + + def write_header(self): + """Write the DNS message header. + + Writing the DNS message header is done after all sections + have been rendered, but before the optional TSIG signature + is added. + """ + + with self._temporarily_seek_to(0): + self.output.write( + struct.pack( + "!HHHHHH", + self.id, + self.flags, + self.counts[0], + self.counts[1], + self.counts[2], + self.counts[3], + ) + ) + + def get_wire(self): + """Return the wire format message.""" + + return self.output.getvalue() + + def reserve(self, size: int) -> None: + """Reserve *size* bytes.""" + if size < 0: + raise ValueError("reserved amount must be non-negative") + if size > self.max_size: + raise ValueError("cannot reserve more than the maximum size") + self.reserved += size + self.max_size -= size + + def release_reserved(self) -> None: + """Release the reserved bytes.""" + self.max_size += self.reserved + self.reserved = 0 diff --git a/venv/lib/python3.12/site-packages/dns/resolver.py b/venv/lib/python3.12/site-packages/dns/resolver.py new file mode 100644 index 0000000..923bb4b --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/resolver.py @@ -0,0 +1,2068 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS stub resolver.""" + +import contextlib +import random +import socket +import sys +import threading +import time +import warnings +from typing import Any, Dict, Iterator, List, Sequence, Tuple, cast +from urllib.parse import urlparse + +import dns._ddr +import dns.edns +import dns.exception +import dns.flags +import dns.inet +import dns.ipv4 +import dns.ipv6 +import dns.message +import dns.name +import dns.nameserver +import dns.query +import dns.rcode +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.ANY.PTR +import dns.rdtypes.svcbbase +import dns.reversename +import dns.tsig + +if sys.platform == "win32": # pragma: no cover + import dns.win32util + + +class NXDOMAIN(dns.exception.DNSException): + """The DNS query name does not exist.""" + + supp_kwargs = {"qnames", "responses"} + fmt = None # we have our own __str__ implementation + + # pylint: disable=arguments-differ + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _check_kwargs(self, qnames, responses=None): # pyright: ignore + if not isinstance(qnames, list | tuple | set): + raise AttributeError("qnames must be a list, tuple or set") + if len(qnames) == 0: + raise AttributeError("qnames must contain at least one element") + if responses is None: + responses = {} + elif not isinstance(responses, dict): + raise AttributeError("responses must be a dict(qname=response)") + kwargs = dict(qnames=qnames, responses=responses) + return kwargs + + def __str__(self) -> str: + if "qnames" not in self.kwargs: + return super().__str__() + qnames = self.kwargs["qnames"] + if len(qnames) > 1: + msg = "None of DNS query names exist" + else: + msg = "The DNS query name does not exist" + qnames = ", ".join(map(str, qnames)) + return f"{msg}: {qnames}" + + @property + def canonical_name(self): + """Return the unresolved canonical name.""" + if "qnames" not in self.kwargs: + raise TypeError("parametrized exception required") + for qname in self.kwargs["qnames"]: + response = self.kwargs["responses"][qname] + try: + cname = response.canonical_name() + if cname != qname: + return cname + except Exception: # pragma: no cover + # We can just eat this exception as it means there was + # something wrong with the response. + pass + return self.kwargs["qnames"][0] + + def __add__(self, e_nx): + """Augment by results from another NXDOMAIN exception.""" + qnames0 = list(self.kwargs.get("qnames", [])) + responses0 = dict(self.kwargs.get("responses", {})) + responses1 = e_nx.kwargs.get("responses", {}) + for qname1 in e_nx.kwargs.get("qnames", []): + if qname1 not in qnames0: + qnames0.append(qname1) + if qname1 in responses1: + responses0[qname1] = responses1[qname1] + return NXDOMAIN(qnames=qnames0, responses=responses0) + + def qnames(self): + """All of the names that were tried. + + Returns a list of ``dns.name.Name``. + """ + return self.kwargs["qnames"] + + def responses(self): + """A map from queried names to their NXDOMAIN responses. + + Returns a dict mapping a ``dns.name.Name`` to a + ``dns.message.Message``. + """ + return self.kwargs["responses"] + + def response(self, qname): + """The response for query *qname*. + + Returns a ``dns.message.Message``. + """ + return self.kwargs["responses"][qname] + + +class YXDOMAIN(dns.exception.DNSException): + """The DNS query name is too long after DNAME substitution.""" + + +ErrorTuple = Tuple[ + str | None, + bool, + int, + Exception | str, + dns.message.Message | None, +] + + +def _errors_to_text(errors: List[ErrorTuple]) -> List[str]: + """Turn a resolution errors trace into a list of text.""" + texts = [] + for err in errors: + texts.append(f"Server {err[0]} answered {err[3]}") + return texts + + +class LifetimeTimeout(dns.exception.Timeout): + """The resolution lifetime expired.""" + + msg = "The resolution lifetime expired." + fmt = f"{msg[:-1]} after {{timeout:.3f}} seconds: {{errors}}" + supp_kwargs = {"timeout", "errors"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + srv_msgs = _errors_to_text(kwargs["errors"]) + return super()._fmt_kwargs( + timeout=kwargs["timeout"], errors="; ".join(srv_msgs) + ) + + +# We added more detail to resolution timeouts, but they are still +# subclasses of dns.exception.Timeout for backwards compatibility. We also +# keep dns.resolver.Timeout defined for backwards compatibility. +Timeout = LifetimeTimeout + + +class NoAnswer(dns.exception.DNSException): + """The DNS response does not contain an answer to the question.""" + + fmt = "The DNS response does not contain an answer to the question: {query}" + supp_kwargs = {"response"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + return super()._fmt_kwargs(query=kwargs["response"].question) + + def response(self): + return self.kwargs["response"] + + +class NoNameservers(dns.exception.DNSException): + """All nameservers failed to answer the query. + + errors: list of servers and respective errors + The type of errors is + [(server IP address, any object convertible to string)]. + Non-empty errors list will add explanatory message () + """ + + msg = "All nameservers failed to answer the query." + fmt = f"{msg[:-1]} {{query}}: {{errors}}" + supp_kwargs = {"request", "errors"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + srv_msgs = _errors_to_text(kwargs["errors"]) + return super()._fmt_kwargs( + query=kwargs["request"].question, errors="; ".join(srv_msgs) + ) + + +class NotAbsolute(dns.exception.DNSException): + """An absolute domain name is required but a relative name was provided.""" + + +class NoRootSOA(dns.exception.DNSException): + """There is no SOA RR at the DNS root name. This should never happen!""" + + +class NoMetaqueries(dns.exception.DNSException): + """DNS metaqueries are not allowed.""" + + +class NoResolverConfiguration(dns.exception.DNSException): + """Resolver configuration could not be read or specified no nameservers.""" + + +class Answer: + """DNS stub resolver answer. + + Instances of this class bundle up the result of a successful DNS + resolution. + + For convenience, the answer object implements much of the sequence + protocol, forwarding to its ``rrset`` attribute. E.g. + ``for a in answer`` is equivalent to ``for a in answer.rrset``. + ``answer[i]`` is equivalent to ``answer.rrset[i]``, and + ``answer[i:j]`` is equivalent to ``answer.rrset[i:j]``. + + Note that CNAMEs or DNAMEs in the response may mean that answer + RRset's name might not be the query name. + """ + + def __init__( + self, + qname: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + rdclass: dns.rdataclass.RdataClass, + response: dns.message.QueryMessage, + nameserver: str | None = None, + port: int | None = None, + ) -> None: + self.qname = qname + self.rdtype = rdtype + self.rdclass = rdclass + self.response = response + self.nameserver = nameserver + self.port = port + self.chaining_result = response.resolve_chaining() + # Copy some attributes out of chaining_result for backwards + # compatibility and convenience. + self.canonical_name = self.chaining_result.canonical_name + self.rrset = self.chaining_result.answer + self.expiration = time.time() + self.chaining_result.minimum_ttl + + def __getattr__(self, attr): # pragma: no cover + if self.rrset is not None: + if attr == "name": + return self.rrset.name + elif attr == "ttl": + return self.rrset.ttl + elif attr == "covers": + return self.rrset.covers + elif attr == "rdclass": + return self.rrset.rdclass + elif attr == "rdtype": + return self.rrset.rdtype + else: + raise AttributeError(attr) + + def __len__(self) -> int: + return self.rrset is not None and len(self.rrset) or 0 + + def __iter__(self) -> Iterator[Any]: + return self.rrset is not None and iter(self.rrset) or iter(tuple()) + + def __getitem__(self, i): + if self.rrset is None: + raise IndexError + return self.rrset[i] + + def __delitem__(self, i): + if self.rrset is None: + raise IndexError + del self.rrset[i] + + +class Answers(dict): + """A dict of DNS stub resolver answers, indexed by type.""" + + +class EmptyHostAnswers(dns.exception.DNSException): + """The HostAnswers has no addresses""" + + +class HostAnswers(Answers): + """A dict of DNS stub resolver answers to a host name lookup, indexed by + type. + """ + + @classmethod + def make( + cls, + v6: Answer | None = None, + v4: Answer | None = None, + add_empty: bool = True, + ) -> "HostAnswers": + answers = HostAnswers() + if v6 is not None and (add_empty or v6.rrset): + answers[dns.rdatatype.AAAA] = v6 + if v4 is not None and (add_empty or v4.rrset): + answers[dns.rdatatype.A] = v4 + return answers + + # Returns pairs of (address, family) from this result, potentially + # filtering by address family. + def addresses_and_families( + self, family: int = socket.AF_UNSPEC + ) -> Iterator[Tuple[str, int]]: + if family == socket.AF_UNSPEC: + yield from self.addresses_and_families(socket.AF_INET6) + yield from self.addresses_and_families(socket.AF_INET) + return + elif family == socket.AF_INET6: + answer = self.get(dns.rdatatype.AAAA) + elif family == socket.AF_INET: + answer = self.get(dns.rdatatype.A) + else: # pragma: no cover + raise NotImplementedError(f"unknown address family {family}") + if answer: + for rdata in answer: + yield (rdata.address, family) + + # Returns addresses from this result, potentially filtering by + # address family. + def addresses(self, family: int = socket.AF_UNSPEC) -> Iterator[str]: + return (pair[0] for pair in self.addresses_and_families(family)) + + # Returns the canonical name from this result. + def canonical_name(self) -> dns.name.Name: + answer = self.get(dns.rdatatype.AAAA, self.get(dns.rdatatype.A)) + if answer is None: + raise EmptyHostAnswers + return answer.canonical_name + + +class CacheStatistics: + """Cache Statistics""" + + def __init__(self, hits: int = 0, misses: int = 0) -> None: + self.hits = hits + self.misses = misses + + def reset(self) -> None: + self.hits = 0 + self.misses = 0 + + def clone(self) -> "CacheStatistics": + return CacheStatistics(self.hits, self.misses) + + +class CacheBase: + def __init__(self) -> None: + self.lock = threading.Lock() + self.statistics = CacheStatistics() + + def reset_statistics(self) -> None: + """Reset all statistics to zero.""" + with self.lock: + self.statistics.reset() + + def hits(self) -> int: + """How many hits has the cache had?""" + with self.lock: + return self.statistics.hits + + def misses(self) -> int: + """How many misses has the cache had?""" + with self.lock: + return self.statistics.misses + + def get_statistics_snapshot(self) -> CacheStatistics: + """Return a consistent snapshot of all the statistics. + + If running with multiple threads, it's better to take a + snapshot than to call statistics methods such as hits() and + misses() individually. + """ + with self.lock: + return self.statistics.clone() + + +CacheKey = Tuple[dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass] + + +class Cache(CacheBase): + """Simple thread-safe DNS answer cache.""" + + def __init__(self, cleaning_interval: float = 300.0) -> None: + """*cleaning_interval*, a ``float`` is the number of seconds between + periodic cleanings. + """ + + super().__init__() + self.data: Dict[CacheKey, Answer] = {} + self.cleaning_interval = cleaning_interval + self.next_cleaning: float = time.time() + self.cleaning_interval + + def _maybe_clean(self) -> None: + """Clean the cache if it's time to do so.""" + + now = time.time() + if self.next_cleaning <= now: + keys_to_delete = [] + for k, v in self.data.items(): + if v.expiration <= now: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.data[k] + now = time.time() + self.next_cleaning = now + self.cleaning_interval + + def get(self, key: CacheKey) -> Answer | None: + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + with self.lock: + self._maybe_clean() + v = self.data.get(key) + if v is None or v.expiration <= time.time(): + self.statistics.misses += 1 + return None + self.statistics.hits += 1 + return v + + def put(self, key: CacheKey, value: Answer) -> None: + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + with self.lock: + self._maybe_clean() + self.data[key] = value + + def flush(self, key: CacheKey | None = None) -> None: + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise the entire cache + is flushed. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + """ + + with self.lock: + if key is not None: + if key in self.data: + del self.data[key] + else: + self.data = {} + self.next_cleaning = time.time() + self.cleaning_interval + + +class LRUCacheNode: + """LRUCache node.""" + + def __init__(self, key, value): + self.key = key + self.value = value + self.hits = 0 + self.prev = self + self.next = self + + def link_after(self, node: "LRUCacheNode") -> None: + self.prev = node + self.next = node.next + node.next.prev = self + node.next = self + + def unlink(self) -> None: + self.next.prev = self.prev + self.prev.next = self.next + + +class LRUCache(CacheBase): + """Thread-safe, bounded, least-recently-used DNS answer cache. + + This cache is better than the simple cache (above) if you're + running a web crawler or other process that does a lot of + resolutions. The LRUCache has a maximum number of nodes, and when + it is full, the least-recently used node is removed to make space + for a new one. + """ + + def __init__(self, max_size: int = 100000) -> None: + """*max_size*, an ``int``, is the maximum number of nodes to cache; + it must be greater than 0. + """ + + super().__init__() + self.data: Dict[CacheKey, LRUCacheNode] = {} + self.set_max_size(max_size) + self.sentinel: LRUCacheNode = LRUCacheNode(None, None) + self.sentinel.prev = self.sentinel + self.sentinel.next = self.sentinel + + def set_max_size(self, max_size: int) -> None: + if max_size < 1: + max_size = 1 + self.max_size = max_size + + def get(self, key: CacheKey) -> Answer | None: + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + with self.lock: + node = self.data.get(key) + if node is None: + self.statistics.misses += 1 + return None + # Unlink because we're either going to move the node to the front + # of the LRU list or we're going to free it. + node.unlink() + if node.value.expiration <= time.time(): + del self.data[node.key] + self.statistics.misses += 1 + return None + node.link_after(self.sentinel) + self.statistics.hits += 1 + node.hits += 1 + return node.value + + def get_hits_for_key(self, key: CacheKey) -> int: + """Return the number of cache hits associated with the specified key.""" + with self.lock: + node = self.data.get(key) + if node is None or node.value.expiration <= time.time(): + return 0 + else: + return node.hits + + def put(self, key: CacheKey, value: Answer) -> None: + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + with self.lock: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + while len(self.data) >= self.max_size: + gnode = self.sentinel.prev + gnode.unlink() + del self.data[gnode.key] + node = LRUCacheNode(key, value) + node.link_after(self.sentinel) + self.data[key] = node + + def flush(self, key: CacheKey | None = None) -> None: + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise the entire cache + is flushed. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + """ + + with self.lock: + if key is not None: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + else: + gnode = self.sentinel.next + while gnode != self.sentinel: + next = gnode.next + gnode.unlink() + gnode = next + self.data = {} + + +class _Resolution: + """Helper class for dns.resolver.Resolver.resolve(). + + All of the "business logic" of resolution is encapsulated in this + class, allowing us to have multiple resolve() implementations + using different I/O schemes without copying all of the + complicated logic. + + This class is a "friend" to dns.resolver.Resolver and manipulates + resolver data structures directly. + """ + + def __init__( + self, + resolver: "BaseResolver", + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + rdclass: dns.rdataclass.RdataClass | str, + tcp: bool, + raise_on_no_answer: bool, + search: bool | None, + ) -> None: + if isinstance(qname, str): + qname = dns.name.from_text(qname, None) + rdtype = dns.rdatatype.RdataType.make(rdtype) + if dns.rdatatype.is_metatype(rdtype): + raise NoMetaqueries + rdclass = dns.rdataclass.RdataClass.make(rdclass) + if dns.rdataclass.is_metaclass(rdclass): + raise NoMetaqueries + self.resolver = resolver + self.qnames_to_try = resolver._get_qnames_to_try(qname, search) + self.qnames = self.qnames_to_try[:] + self.rdtype = rdtype + self.rdclass = rdclass + self.tcp = tcp + self.raise_on_no_answer = raise_on_no_answer + self.nxdomain_responses: Dict[dns.name.Name, dns.message.QueryMessage] = {} + # Initialize other things to help analysis tools + self.qname = dns.name.empty + self.nameservers: List[dns.nameserver.Nameserver] = [] + self.current_nameservers: List[dns.nameserver.Nameserver] = [] + self.errors: List[ErrorTuple] = [] + self.nameserver: dns.nameserver.Nameserver | None = None + self.tcp_attempt = False + self.retry_with_tcp = False + self.request: dns.message.QueryMessage | None = None + self.backoff = 0.0 + + def next_request( + self, + ) -> Tuple[dns.message.QueryMessage | None, Answer | None]: + """Get the next request to send, and check the cache. + + Returns a (request, answer) tuple. At most one of request or + answer will not be None. + """ + + # We return a tuple instead of Union[Message,Answer] as it lets + # the caller avoid isinstance(). + + while len(self.qnames) > 0: + self.qname = self.qnames.pop(0) + + # Do we know the answer? + if self.resolver.cache: + answer = self.resolver.cache.get( + (self.qname, self.rdtype, self.rdclass) + ) + if answer is not None: + if answer.rrset is None and self.raise_on_no_answer: + raise NoAnswer(response=answer.response) + else: + return (None, answer) + answer = self.resolver.cache.get( + (self.qname, dns.rdatatype.ANY, self.rdclass) + ) + if answer is not None and answer.response.rcode() == dns.rcode.NXDOMAIN: + # cached NXDOMAIN; record it and continue to next + # name. + self.nxdomain_responses[self.qname] = answer.response + continue + + # Build the request + request = dns.message.make_query(self.qname, self.rdtype, self.rdclass) + if self.resolver.keyname is not None: + request.use_tsig( + self.resolver.keyring, + self.resolver.keyname, + algorithm=self.resolver.keyalgorithm, + ) + request.use_edns( + self.resolver.edns, + self.resolver.ednsflags, + self.resolver.payload, + options=self.resolver.ednsoptions, + ) + if self.resolver.flags is not None: + request.flags = self.resolver.flags + + self.nameservers = self.resolver._enrich_nameservers( + self.resolver._nameservers, + self.resolver.nameserver_ports, + self.resolver.port, + ) + if self.resolver.rotate: + random.shuffle(self.nameservers) + self.current_nameservers = self.nameservers[:] + self.errors = [] + self.nameserver = None + self.tcp_attempt = False + self.retry_with_tcp = False + self.request = request + self.backoff = 0.10 + + return (request, None) + + # + # We've tried everything and only gotten NXDOMAINs. (We know + # it's only NXDOMAINs as anything else would have returned + # before now.) + # + raise NXDOMAIN(qnames=self.qnames_to_try, responses=self.nxdomain_responses) + + def next_nameserver(self) -> Tuple[dns.nameserver.Nameserver, bool, float]: + if self.retry_with_tcp: + assert self.nameserver is not None + assert not self.nameserver.is_always_max_size() + self.tcp_attempt = True + self.retry_with_tcp = False + return (self.nameserver, True, 0) + + backoff = 0.0 + if not self.current_nameservers: + if len(self.nameservers) == 0: + # Out of things to try! + raise NoNameservers(request=self.request, errors=self.errors) + self.current_nameservers = self.nameservers[:] + backoff = self.backoff + self.backoff = min(self.backoff * 2, 2) + + self.nameserver = self.current_nameservers.pop(0) + self.tcp_attempt = self.tcp or self.nameserver.is_always_max_size() + return (self.nameserver, self.tcp_attempt, backoff) + + def query_result( + self, response: dns.message.Message | None, ex: Exception | None + ) -> Tuple[Answer | None, bool]: + # + # returns an (answer: Answer, end_loop: bool) tuple. + # + assert self.nameserver is not None + if ex: + # Exception during I/O or from_wire() + assert response is None + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + ex, + response, + ) + ) + if ( + isinstance(ex, dns.exception.FormError) + or isinstance(ex, EOFError) + or isinstance(ex, OSError) + or isinstance(ex, NotImplementedError) + ): + # This nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + elif isinstance(ex, dns.message.Truncated): + if self.tcp_attempt: + # Truncation with TCP is no good! + self.nameservers.remove(self.nameserver) + else: + self.retry_with_tcp = True + return (None, False) + # We got an answer! + assert response is not None + assert isinstance(response, dns.message.QueryMessage) + rcode = response.rcode() + if rcode == dns.rcode.NOERROR: + try: + answer = Answer( + self.qname, + self.rdtype, + self.rdclass, + response, + self.nameserver.answer_nameserver(), + self.nameserver.answer_port(), + ) + except Exception as e: + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + e, + response, + ) + ) + # The nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + return (None, False) + if self.resolver.cache: + self.resolver.cache.put((self.qname, self.rdtype, self.rdclass), answer) + if answer.rrset is None and self.raise_on_no_answer: + raise NoAnswer(response=answer.response) + return (answer, True) + elif rcode == dns.rcode.NXDOMAIN: + # Further validate the response by making an Answer, even + # if we aren't going to cache it. + try: + answer = Answer( + self.qname, dns.rdatatype.ANY, dns.rdataclass.IN, response + ) + except Exception as e: + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + e, + response, + ) + ) + # The nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + return (None, False) + self.nxdomain_responses[self.qname] = response + if self.resolver.cache: + self.resolver.cache.put( + (self.qname, dns.rdatatype.ANY, self.rdclass), answer + ) + # Make next_nameserver() return None, so caller breaks its + # inner loop and calls next_request(). + return (None, True) + elif rcode == dns.rcode.YXDOMAIN: + yex = YXDOMAIN() + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + yex, + response, + ) + ) + raise yex + else: + # + # We got a response, but we're not happy with the + # rcode in it. + # + if rcode != dns.rcode.SERVFAIL or not self.resolver.retry_servfail: + self.nameservers.remove(self.nameserver) + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + dns.rcode.to_text(rcode), + response, + ) + ) + return (None, False) + + +class BaseResolver: + """DNS stub resolver.""" + + # We initialize in reset() + # + # pylint: disable=attribute-defined-outside-init + + domain: dns.name.Name + nameserver_ports: Dict[str, int] + port: int + search: List[dns.name.Name] + use_search_by_default: bool + timeout: float + lifetime: float + keyring: Any | None + keyname: dns.name.Name | str | None + keyalgorithm: dns.name.Name | str + edns: int + ednsflags: int + ednsoptions: List[dns.edns.Option] | None + payload: int + cache: Any + flags: int | None + retry_servfail: bool + rotate: bool + ndots: int | None + _nameservers: Sequence[str | dns.nameserver.Nameserver] + + def __init__( + self, filename: str = "/etc/resolv.conf", configure: bool = True + ) -> None: + """*filename*, a ``str`` or file object, specifying a file + in standard /etc/resolv.conf format. This parameter is meaningful + only when *configure* is true and the platform is POSIX. + + *configure*, a ``bool``. If True (the default), the resolver + instance is configured in the normal fashion for the operating + system the resolver is running on. (I.e. by reading a + /etc/resolv.conf file on POSIX systems and from the registry + on Windows systems.) + """ + + self.reset() + if configure: + if sys.platform == "win32": # pragma: no cover + self.read_registry() + elif filename: + self.read_resolv_conf(filename) + + def reset(self) -> None: + """Reset all resolver configuration to the defaults.""" + + self.domain = dns.name.Name(dns.name.from_text(socket.gethostname())[1:]) + if len(self.domain) == 0: # pragma: no cover + self.domain = dns.name.root + self._nameservers = [] + self.nameserver_ports = {} + self.port = 53 + self.search = [] + self.use_search_by_default = False + self.timeout = 2.0 + self.lifetime = 5.0 + self.keyring = None + self.keyname = None + self.keyalgorithm = dns.tsig.default_algorithm + self.edns = -1 + self.ednsflags = 0 + self.ednsoptions = None + self.payload = 0 + self.cache = None + self.flags = None + self.retry_servfail = False + self.rotate = False + self.ndots = None + + def read_resolv_conf(self, f: Any) -> None: + """Process *f* as a file in the /etc/resolv.conf format. If f is + a ``str``, it is used as the name of the file to open; otherwise it + is treated as the file itself. + + Interprets the following items: + + - nameserver - name server IP address + + - domain - local domain name + + - search - search list for host-name lookup + + - options - supported options are rotate, timeout, edns0, and ndots + + """ + + nameservers = [] + if isinstance(f, str): + try: + cm: contextlib.AbstractContextManager = open(f, encoding="utf-8") + except OSError: + # /etc/resolv.conf doesn't exist, can't be read, etc. + raise NoResolverConfiguration(f"cannot open {f}") + else: + cm = contextlib.nullcontext(f) + with cm as f: + for l in f: + if len(l) == 0 or l[0] == "#" or l[0] == ";": + continue + tokens = l.split() + + # Any line containing less than 2 tokens is malformed + if len(tokens) < 2: + continue + + if tokens[0] == "nameserver": + nameservers.append(tokens[1]) + elif tokens[0] == "domain": + self.domain = dns.name.from_text(tokens[1]) + # domain and search are exclusive + self.search = [] + elif tokens[0] == "search": + # the last search wins + self.search = [] + for suffix in tokens[1:]: + self.search.append(dns.name.from_text(suffix)) + # We don't set domain as it is not used if + # len(self.search) > 0 + elif tokens[0] == "options": + for opt in tokens[1:]: + if opt == "rotate": + self.rotate = True + elif opt == "edns0": + self.use_edns() + elif "timeout" in opt: + try: + self.timeout = int(opt.split(":")[1]) + except (ValueError, IndexError): + pass + elif "ndots" in opt: + try: + self.ndots = int(opt.split(":")[1]) + except (ValueError, IndexError): + pass + if len(nameservers) == 0: + raise NoResolverConfiguration("no nameservers") + # Assigning directly instead of appending means we invoke the + # setter logic, with additonal checking and enrichment. + self.nameservers = nameservers + + def read_registry(self) -> None: # pragma: no cover + """Extract resolver configuration from the Windows registry.""" + try: + info = dns.win32util.get_dns_info() # type: ignore + if info.domain is not None: + self.domain = info.domain + self.nameservers = info.nameservers + self.search = info.search + except AttributeError: + raise NotImplementedError + + def _compute_timeout( + self, + start: float, + lifetime: float | None = None, + errors: List[ErrorTuple] | None = None, + ) -> float: + lifetime = self.lifetime if lifetime is None else lifetime + now = time.time() + duration = now - start + if errors is None: + errors = [] + if duration < 0: + if duration < -1: + # Time going backwards is bad. Just give up. + raise LifetimeTimeout(timeout=duration, errors=errors) + else: + # Time went backwards, but only a little. This can + # happen, e.g. under vmware with older linux kernels. + # Pretend it didn't happen. + duration = 0 + if duration >= lifetime: + raise LifetimeTimeout(timeout=duration, errors=errors) + return min(lifetime - duration, self.timeout) + + def _get_qnames_to_try( + self, qname: dns.name.Name, search: bool | None + ) -> List[dns.name.Name]: + # This is a separate method so we can unit test the search + # rules without requiring the Internet. + if search is None: + search = self.use_search_by_default + qnames_to_try = [] + if qname.is_absolute(): + qnames_to_try.append(qname) + else: + abs_qname = qname.concatenate(dns.name.root) + if search: + if len(self.search) > 0: + # There is a search list, so use it exclusively + search_list = self.search[:] + elif self.domain != dns.name.root and self.domain is not None: + # We have some notion of a domain that isn't the root, so + # use it as the search list. + search_list = [self.domain] + else: + search_list = [] + # Figure out the effective ndots (default is 1) + if self.ndots is None: + ndots = 1 + else: + ndots = self.ndots + for suffix in search_list: + qnames_to_try.append(qname + suffix) + if len(qname) > ndots: + # The name has at least ndots dots, so we should try an + # absolute query first. + qnames_to_try.insert(0, abs_qname) + else: + # The name has less than ndots dots, so we should search + # first, then try the absolute name. + qnames_to_try.append(abs_qname) + else: + qnames_to_try.append(abs_qname) + return qnames_to_try + + def use_tsig( + self, + keyring: Any, + keyname: dns.name.Name | str | None = None, + algorithm: dns.name.Name | str = dns.tsig.default_algorithm, + ) -> None: + """Add a TSIG signature to each query. + + The parameters are passed to ``dns.message.Message.use_tsig()``; + see its documentation for details. + """ + + self.keyring = keyring + self.keyname = keyname + self.keyalgorithm = algorithm + + def use_edns( + self, + edns: int | bool | None = 0, + ednsflags: int = 0, + payload: int = dns.message.DEFAULT_EDNS_PAYLOAD, + options: List[dns.edns.Option] | None = None, + ) -> None: + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying + ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case + the other parameters are ignored. Specifying ``True`` is + equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + """ + + if edns is None or edns is False: + edns = -1 + elif edns is True: + edns = 0 + self.edns = edns + self.ednsflags = ednsflags + self.payload = payload + self.ednsoptions = options + + def set_flags(self, flags: int) -> None: + """Overrides the default flags with your own. + + *flags*, an ``int``, the message flags to use. + """ + + self.flags = flags + + @classmethod + def _enrich_nameservers( + cls, + nameservers: Sequence[str | dns.nameserver.Nameserver], + nameserver_ports: Dict[str, int], + default_port: int, + ) -> List[dns.nameserver.Nameserver]: + enriched_nameservers = [] + if isinstance(nameservers, list | tuple): + for nameserver in nameservers: + enriched_nameserver: dns.nameserver.Nameserver + if isinstance(nameserver, dns.nameserver.Nameserver): + enriched_nameserver = nameserver + elif dns.inet.is_address(nameserver): + port = nameserver_ports.get(nameserver, default_port) + enriched_nameserver = dns.nameserver.Do53Nameserver( + nameserver, port + ) + else: + try: + if urlparse(nameserver).scheme != "https": + raise NotImplementedError + except Exception: + raise ValueError( + f"nameserver {nameserver} is not a " + "dns.nameserver.Nameserver instance or text form, " + "IP address, nor a valid https URL" + ) + enriched_nameserver = dns.nameserver.DoHNameserver(nameserver) + enriched_nameservers.append(enriched_nameserver) + else: + raise ValueError( + f"nameservers must be a list or tuple (not a {type(nameservers)})" + ) + return enriched_nameservers + + @property + def nameservers( + self, + ) -> Sequence[str | dns.nameserver.Nameserver]: + return self._nameservers + + @nameservers.setter + def nameservers( + self, nameservers: Sequence[str | dns.nameserver.Nameserver] + ) -> None: + """ + *nameservers*, a ``list`` or ``tuple`` of nameservers, where a nameserver is either + a string interpretable as a nameserver, or a ``dns.nameserver.Nameserver`` + instance. + + Raises ``ValueError`` if *nameservers* is not a list of nameservers. + """ + # We just call _enrich_nameservers() for checking + self._enrich_nameservers(nameservers, self.nameserver_ports, self.port) + self._nameservers = nameservers + + +class Resolver(BaseResolver): + """DNS stub resolver.""" + + def resolve( + self, + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.A, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + tcp: bool = False, + source: str | None = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: float | None = None, + search: bool | None = None, + ) -> Answer: # pylint: disable=arguments-differ + """Query nameservers to find the answer to the question. + + The *qname*, *rdtype*, and *rdclass* parameters may be objects + of the appropriate type, or strings that can be converted into objects + of the appropriate type. + + *qname*, a ``dns.name.Name`` or ``str``, the query name. + + *rdtype*, an ``int`` or ``str``, the query type. + + *rdclass*, an ``int`` or ``str``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *source*, a ``str`` or ``None``. If not ``None``, bind to this IP + address when making queries. + + *raise_on_no_answer*, a ``bool``. If ``True``, raise + ``dns.resolver.NoAnswer`` if there's no answer to the question. + + *source_port*, an ``int``, the port from which to send the message. + + *lifetime*, a ``float``, how many seconds a query should run + before timing out. + + *search*, a ``bool`` or ``None``, determines whether the + search list configured in the system's resolver configuration + are used for relative names, and whether the resolver's domain + may be added to relative names. The default is ``None``, + which causes the value of the resolver's + ``use_search_by_default`` attribute to be used. + + Raises ``dns.resolver.LifetimeTimeout`` if no answers could be found + in the specified lifetime. + + Raises ``dns.resolver.NXDOMAIN`` if the query name does not exist. + + Raises ``dns.resolver.YXDOMAIN`` if the query name is too long after + DNAME substitution. + + Raises ``dns.resolver.NoAnswer`` if *raise_on_no_answer* is + ``True`` and the query name exists but has no RRset of the + desired type and class. + + Raises ``dns.resolver.NoNameservers`` if no non-broken + nameservers are available to answer the question. + + Returns a ``dns.resolver.Answer`` instance. + + """ + + resolution = _Resolution( + self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search + ) + start = time.time() + while True: + (request, answer) = resolution.next_request() + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + # cache hit! + return answer + assert request is not None # needed for type checking + done = False + while not done: + (nameserver, tcp, backoff) = resolution.next_nameserver() + if backoff: + time.sleep(backoff) + timeout = self._compute_timeout(start, lifetime, resolution.errors) + try: + response = nameserver.query( + request, + timeout=timeout, + source=source, + source_port=source_port, + max_size=tcp, + ) + except Exception as ex: + (_, done) = resolution.query_result(None, ex) + continue + (answer, done) = resolution.query_result(response, None) + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + return answer + + def query( + self, + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.A, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + tcp: bool = False, + source: str | None = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: float | None = None, + ) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This method calls resolve() with ``search=True``, and is + provided for backwards compatibility with prior versions of + dnspython. See the documentation for the resolve() method for + further details. + """ + warnings.warn( + "please use dns.resolver.Resolver.resolve() instead", + DeprecationWarning, + stacklevel=2, + ) + return self.resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + True, + ) + + def resolve_address(self, ipaddr: str, *args: Any, **kwargs: Any) -> Answer: + """Use a resolver to run a reverse query for PTR records. + + This utilizes the resolve() method to perform a PTR lookup on the + specified IP address. + + *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get + the PTR record for. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs["rdtype"] = dns.rdatatype.PTR + modified_kwargs["rdclass"] = dns.rdataclass.IN + return self.resolve( + dns.reversename.from_address(ipaddr), *args, **modified_kwargs + ) + + def resolve_name( + self, + name: dns.name.Name | str, + family: int = socket.AF_UNSPEC, + **kwargs: Any, + ) -> HostAnswers: + """Use a resolver to query for address records. + + This utilizes the resolve() method to perform A and/or AAAA lookups on + the specified name. + + *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC + (the default), both A and AAAA records will be retrieved. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs.pop("rdtype", None) + modified_kwargs["rdclass"] = dns.rdataclass.IN + + if family == socket.AF_INET: + v4 = self.resolve(name, dns.rdatatype.A, **modified_kwargs) + return HostAnswers.make(v4=v4) + elif family == socket.AF_INET6: + v6 = self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) + return HostAnswers.make(v6=v6) + elif family != socket.AF_UNSPEC: # pragma: no cover + raise NotImplementedError(f"unknown address family {family}") + + raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) + lifetime = modified_kwargs.pop("lifetime", None) + start = time.time() + v6 = self.resolve( + name, + dns.rdatatype.AAAA, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + # Note that setting name ensures we query the same name + # for A as we did for AAAA. (This is just in case search lists + # are active by default in the resolver configuration and + # we might be talking to a server that says NXDOMAIN when it + # wants to say NOERROR no data. + name = v6.qname + v4 = self.resolve( + name, + dns.rdatatype.A, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + answers = HostAnswers.make(v6=v6, v4=v4, add_empty=not raise_on_no_answer) + if not answers: + raise NoAnswer(response=v6.response) + return answers + + # pylint: disable=redefined-outer-name + + def canonical_name(self, name: dns.name.Name | str) -> dns.name.Name: + """Determine the canonical name of *name*. + + The canonical name is the name the resolver uses for queries + after all CNAME and DNAME renamings have been applied. + + *name*, a ``dns.name.Name`` or ``str``, the query name. + + This method can raise any exception that ``resolve()`` can + raise, other than ``dns.resolver.NoAnswer`` and + ``dns.resolver.NXDOMAIN``. + + Returns a ``dns.name.Name``. + """ + try: + answer = self.resolve(name, raise_on_no_answer=False) + canonical_name = answer.canonical_name + except NXDOMAIN as e: + canonical_name = e.canonical_name + return canonical_name + + # pylint: enable=redefined-outer-name + + def try_ddr(self, lifetime: float = 5.0) -> None: + """Try to update the resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + *lifetime*, a float, is the maximum time to spend attempting DDR. The default + is 5 seconds. + + If the SVCB query is successful and results in a non-empty list of nameservers, + then the resolver's nameservers are set to the returned servers in priority + order. + + The current implementation does not use any address hints from the SVCB record, + nor does it resolve addresses for the SCVB target name, rather it assumes that + the bootstrap nameserver will always be one of the addresses and uses it. + A future revision to the code may offer fuller support. The code verifies that + the bootstrap nameserver is in the Subject Alternative Name field of the + TLS certficate. + """ + try: + expiration = time.time() + lifetime + answer = self.resolve( + dns._ddr._local_resolver_name, "SVCB", lifetime=lifetime + ) + timeout = dns.query._remaining(expiration) + nameservers = dns._ddr._get_nameservers_sync(answer, timeout) + if len(nameservers) > 0: + self.nameservers = nameservers + except Exception: # pragma: no cover + pass + + +#: The default resolver. +default_resolver: Resolver | None = None + + +def get_default_resolver() -> Resolver: + """Get the default resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + assert default_resolver is not None + return default_resolver + + +def reset_default_resolver() -> None: + """Re-initialize default resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +def resolve( + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.A, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + tcp: bool = False, + source: str | None = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: float | None = None, + search: bool | None = None, +) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See ``dns.resolver.Resolver.resolve`` for more information on the + parameters. + """ + + return get_default_resolver().resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + ) + + +def query( + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.A, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + tcp: bool = False, + source: str | None = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: float | None = None, +) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This method calls resolve() with ``search=True``, and is + provided for backwards compatibility with prior versions of + dnspython. See the documentation for the resolve() method for + further details. + """ + warnings.warn( + "please use dns.resolver.resolve() instead", DeprecationWarning, stacklevel=2 + ) + return resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + True, + ) + + +def resolve_address(ipaddr: str, *args: Any, **kwargs: Any) -> Answer: + """Use a resolver to run a reverse query for PTR records. + + See ``dns.resolver.Resolver.resolve_address`` for more information on the + parameters. + """ + + return get_default_resolver().resolve_address(ipaddr, *args, **kwargs) + + +def resolve_name( + name: dns.name.Name | str, family: int = socket.AF_UNSPEC, **kwargs: Any +) -> HostAnswers: + """Use a resolver to query for address records. + + See ``dns.resolver.Resolver.resolve_name`` for more information on the + parameters. + """ + + return get_default_resolver().resolve_name(name, family, **kwargs) + + +def canonical_name(name: dns.name.Name | str) -> dns.name.Name: + """Determine the canonical name of *name*. + + See ``dns.resolver.Resolver.canonical_name`` for more information on the + parameters and possible exceptions. + """ + + return get_default_resolver().canonical_name(name) + + +def try_ddr(lifetime: float = 5.0) -> None: # pragma: no cover + """Try to update the default resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + See :py:func:`dns.resolver.Resolver.try_ddr` for more information. + """ + return get_default_resolver().try_ddr(lifetime) + + +def zone_for_name( + name: dns.name.Name | str, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + tcp: bool = False, + resolver: Resolver | None = None, + lifetime: float | None = None, +) -> dns.name.Name: # pyright: ignore[reportReturnType] + """Find the name of the zone which contains the specified name. + + *name*, an absolute ``dns.name.Name`` or ``str``, the query name. + + *rdclass*, an ``int``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + If ``None``, the default, then the default resolver is used. + + *lifetime*, a ``float``, the total time to allow for the queries needed + to determine the zone. If ``None``, the default, then only the individual + query limits of the resolver apply. + + Raises ``dns.resolver.NoRootSOA`` if there is no SOA RR at the DNS + root. (This is only likely to happen if you're using non-default + root servers in your network and they are misconfigured.) + + Raises ``dns.resolver.LifetimeTimeout`` if the answer could not be + found in the allotted lifetime. + + Returns a ``dns.name.Name``. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + start = time.time() + expiration: float | None + if lifetime is not None: + expiration = start + lifetime + else: + expiration = None + while 1: + try: + rlifetime: float | None + if expiration is not None: + rlifetime = expiration - time.time() + if rlifetime <= 0: + rlifetime = 0 + else: + rlifetime = None + answer = resolver.resolve( + name, dns.rdatatype.SOA, rdclass, tcp, lifetime=rlifetime + ) + assert answer.rrset is not None + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (NXDOMAIN, NoAnswer) as e: + if isinstance(e, NXDOMAIN): + response = e.responses().get(name) + else: + response = e.response() # pylint: disable=no-value-for-parameter + if response: + for rrs in response.authority: + if rrs.rdtype == dns.rdatatype.SOA and rrs.rdclass == rdclass: + (nr, _, _) = rrs.name.fullcompare(name) + if nr == dns.name.NAMERELN_SUPERDOMAIN: + # We're doing a proper superdomain check as + # if the name were equal we ought to have gotten + # it in the answer section! We are ignoring the + # possibility that the authority is insane and + # is including multiple SOA RRs for different + # authorities. + return rrs.name + # we couldn't extract anything useful from the response (e.g. it's + # a type 3 NXDOMAIN) + try: + name = name.parent() + except dns.name.NoParent: + raise NoRootSOA + + +def make_resolver_at( + where: dns.name.Name | str, + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Resolver | None = None, +) -> Resolver: + """Make a stub resolver using the specified destination as the full resolver. + + *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the + full resolver. + + *port*, an ``int``, the port to use. If not specified, the default is 53. + + *family*, an ``int``, the address family to use. This parameter is used if + *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case + the first address returned by ``resolve_name()`` will be used, otherwise the + first address of the specified family will be used. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames. If not specified, the default resolver will be used. + + Returns a ``dns.resolver.Resolver`` or raises an exception. + """ + if resolver is None: + resolver = get_default_resolver() + nameservers: List[str | dns.nameserver.Nameserver] = [] + if isinstance(where, str) and dns.inet.is_address(where): + nameservers.append(dns.nameserver.Do53Nameserver(where, port)) + else: + for address in resolver.resolve_name(where, family).addresses(): + nameservers.append(dns.nameserver.Do53Nameserver(address, port)) + res = Resolver(configure=False) + res.nameservers = nameservers + return res + + +def resolve_at( + where: dns.name.Name | str, + qname: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.A, + rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + tcp: bool = False, + source: str | None = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: float | None = None, + search: bool | None = None, + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Resolver | None = None, +) -> Answer: + """Query nameservers to find the answer to the question. + + This is a convenience function that calls ``dns.resolver.make_resolver_at()`` to + make a resolver, and then uses it to resolve the query. + + See ``dns.resolver.Resolver.resolve`` for more information on the resolution + parameters, and ``dns.resolver.make_resolver_at`` for information about the resolver + parameters *where*, *port*, *family*, and *resolver*. + + If making more than one query, it is more efficient to call + ``dns.resolver.make_resolver_at()`` and then use that resolver for the queries + instead of calling ``resolve_at()`` multiple times. + """ + return make_resolver_at(where, port, family, resolver).resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + ) + + +# +# Support for overriding the system resolver for all python code in the +# running process. +# + +_protocols_for_socktype: Dict[Any, List[Any]] = { + socket.SOCK_DGRAM: [socket.SOL_UDP], + socket.SOCK_STREAM: [socket.SOL_TCP], +} + +_resolver: Resolver | None = None +_original_getaddrinfo = socket.getaddrinfo +_original_getnameinfo = socket.getnameinfo +_original_getfqdn = socket.getfqdn +_original_gethostbyname = socket.gethostbyname +_original_gethostbyname_ex = socket.gethostbyname_ex +_original_gethostbyaddr = socket.gethostbyaddr + + +def _getaddrinfo( + host=None, service=None, family=socket.AF_UNSPEC, socktype=0, proto=0, flags=0 +): + if flags & socket.AI_NUMERICHOST != 0: + # Short circuit directly into the system's getaddrinfo(). We're + # not adding any value in this case, and this avoids infinite loops + # because dns.query.* needs to call getaddrinfo() for IPv6 scoping + # reasons. We will also do this short circuit below if we + # discover that the host is an address literal. + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: + # Not implemented. We raise a gaierror as opposed to a + # NotImplementedError as it helps callers handle errors more + # appropriately. [Issue #316] + # + # We raise EAI_FAIL as opposed to EAI_SYSTEM because there is + # no EAI_SYSTEM on Windows [Issue #416]. We didn't go for + # EAI_BADFLAGS as the flags aren't bad, we just don't + # implement them. + raise socket.gaierror( + socket.EAI_FAIL, "Non-recoverable failure in name resolution" + ) + if host is None and service is None: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + addrs = [] + canonical_name = None # pylint: disable=redefined-outer-name + # Is host None or an address literal? If so, use the system's + # getaddrinfo(). + if host is None: + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + try: + # We don't care about the result of af_for_address(), we're just + # calling it so it raises an exception if host is not an IPv4 or + # IPv6 address. + dns.inet.af_for_address(host) + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + except Exception: + pass + # Something needs resolution! + try: + assert _resolver is not None + answers = _resolver.resolve_name(host, family) + addrs = answers.addresses_and_families() + canonical_name = answers.canonical_name().to_text(True) + except NXDOMAIN: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + except Exception: + # We raise EAI_AGAIN here as the failure may be temporary + # (e.g. a timeout) and EAI_SYSTEM isn't defined on Windows. + # [Issue #416] + raise socket.gaierror(socket.EAI_AGAIN, "Temporary failure in name resolution") + port = None + try: + # Is it a port literal? + if service is None: + port = 0 + else: + port = int(service) + except Exception: + if flags & socket.AI_NUMERICSERV == 0: + try: + port = socket.getservbyname(service) # pyright: ignore + except Exception: + pass + if port is None: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + tuples = [] + if socktype == 0: + socktypes = [socket.SOCK_DGRAM, socket.SOCK_STREAM] + else: + socktypes = [socktype] + if flags & socket.AI_CANONNAME != 0: + cname = canonical_name + else: + cname = "" + for addr, af in addrs: + for socktype in socktypes: + for sockproto in _protocols_for_socktype[socktype]: + proto = int(sockproto) + addr_tuple = dns.inet.low_level_address_tuple((addr, port), af) + tuples.append((af, socktype, proto, cname, addr_tuple)) + if len(tuples) == 0: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + return tuples + + +def _getnameinfo(sockaddr, flags=0): + host = sockaddr[0] + port = sockaddr[1] + if len(sockaddr) == 4: + scope = sockaddr[3] + family = socket.AF_INET6 + else: + scope = None + family = socket.AF_INET + tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, socket.SOL_TCP, 0) + if len(tuples) > 1: + raise OSError("sockaddr resolved to multiple addresses") + addr = tuples[0][4][0] + if flags & socket.NI_DGRAM: + pname = "udp" + else: + pname = "tcp" + assert isinstance(addr, str) + qname = dns.reversename.from_address(addr) + if flags & socket.NI_NUMERICHOST == 0: + try: + assert _resolver is not None + answer = _resolver.resolve(qname, "PTR") + assert answer.rrset is not None + rdata = cast(dns.rdtypes.ANY.PTR.PTR, answer.rrset[0]) + hostname = rdata.target.to_text(True) + except (NXDOMAIN, NoAnswer): + if flags & socket.NI_NAMEREQD: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + hostname = addr + if scope is not None: + hostname += "%" + str(scope) + else: + hostname = addr + if scope is not None: + hostname += "%" + str(scope) + if flags & socket.NI_NUMERICSERV: + service = str(port) + else: + service = socket.getservbyport(port, pname) + return (hostname, service) + + +def _getfqdn(name=None): + if name is None: + name = socket.gethostname() + try: + (name, _, _) = _gethostbyaddr(name) + # Python's version checks aliases too, but our gethostbyname + # ignores them, so we do so here as well. + except Exception: # pragma: no cover + pass + return name + + +def _gethostbyname(name): + return _gethostbyname_ex(name)[2][0] + + +def _gethostbyname_ex(name): + aliases = [] + addresses = [] + tuples = _getaddrinfo( + name, 0, socket.AF_INET, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME + ) + canonical = tuples[0][3] + for item in tuples: + addresses.append(item[4][0]) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def _gethostbyaddr(ip): + try: + dns.ipv6.inet_aton(ip) + sockaddr = (ip, 80, 0, 0) + family = socket.AF_INET6 + except Exception: + try: + dns.ipv4.inet_aton(ip) + except Exception: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + sockaddr = (ip, 80) + family = socket.AF_INET + (name, _) = _getnameinfo(sockaddr, socket.NI_NAMEREQD) + aliases = [] + addresses = [] + tuples = _getaddrinfo( + name, 0, family, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME + ) + canonical = tuples[0][3] + # We only want to include an address from the tuples if it's the + # same as the one we asked about. We do this comparison in binary + # to avoid any differences in text representations. + bin_ip = dns.inet.inet_pton(family, ip) + for item in tuples: + addr = item[4][0] + assert isinstance(addr, str) + bin_addr = dns.inet.inet_pton(family, addr) + if bin_ip == bin_addr: + addresses.append(addr) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def override_system_resolver(resolver: Resolver | None = None) -> None: + """Override the system resolver routines in the socket module with + versions which use dnspython's resolver. + + This can be useful in testing situations where you want to control + the resolution behavior of python code without having to change + the system's resolver settings (e.g. /etc/resolv.conf). + + The resolver to use may be specified; if it's not, the default + resolver will be used. + + resolver, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + """ + + if resolver is None: + resolver = get_default_resolver() + global _resolver + _resolver = resolver + socket.getaddrinfo = _getaddrinfo + socket.getnameinfo = _getnameinfo + socket.getfqdn = _getfqdn + socket.gethostbyname = _gethostbyname + socket.gethostbyname_ex = _gethostbyname_ex + socket.gethostbyaddr = _gethostbyaddr + + +def restore_system_resolver() -> None: + """Undo the effects of prior override_system_resolver().""" + + global _resolver + _resolver = None + socket.getaddrinfo = _original_getaddrinfo + socket.getnameinfo = _original_getnameinfo + socket.getfqdn = _original_getfqdn + socket.gethostbyname = _original_gethostbyname + socket.gethostbyname_ex = _original_gethostbyname_ex + socket.gethostbyaddr = _original_gethostbyaddr diff --git a/venv/lib/python3.12/site-packages/dns/reversename.py b/venv/lib/python3.12/site-packages/dns/reversename.py new file mode 100644 index 0000000..60a4e83 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/reversename.py @@ -0,0 +1,106 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Reverse Map Names.""" + +import binascii + +import dns.exception +import dns.ipv4 +import dns.ipv6 +import dns.name + +ipv4_reverse_domain = dns.name.from_text("in-addr.arpa.") +ipv6_reverse_domain = dns.name.from_text("ip6.arpa.") + + +def from_address( + text: str, + v4_origin: dns.name.Name = ipv4_reverse_domain, + v6_origin: dns.name.Name = ipv6_reverse_domain, +) -> dns.name.Name: + """Convert an IPv4 or IPv6 address in textual form into a Name object whose + value is the reverse-map domain name of the address. + + *text*, a ``str``, is an IPv4 or IPv6 address in textual form + (e.g. '127.0.0.1', '::1') + + *v4_origin*, a ``dns.name.Name`` to append to the labels corresponding to + the address if the address is an IPv4 address, instead of the default + (in-addr.arpa.) + + *v6_origin*, a ``dns.name.Name`` to append to the labels corresponding to + the address if the address is an IPv6 address, instead of the default + (ip6.arpa.) + + Raises ``dns.exception.SyntaxError`` if the address is badly formed. + + Returns a ``dns.name.Name``. + """ + + try: + v6 = dns.ipv6.inet_aton(text) + if dns.ipv6.is_mapped(v6): + parts = [str(byte) for byte in v6[12:]] + origin = v4_origin + else: + parts = [x for x in str(binascii.hexlify(v6).decode())] + origin = v6_origin + except Exception: + parts = [str(byte) for byte in dns.ipv4.inet_aton(text)] + origin = v4_origin + return dns.name.from_text(".".join(reversed(parts)), origin=origin) + + +def to_address( + name: dns.name.Name, + v4_origin: dns.name.Name = ipv4_reverse_domain, + v6_origin: dns.name.Name = ipv6_reverse_domain, +) -> str: + """Convert a reverse map domain name into textual address form. + + *name*, a ``dns.name.Name``, an IPv4 or IPv6 address in reverse-map name + form. + + *v4_origin*, a ``dns.name.Name`` representing the top-level domain for + IPv4 addresses, instead of the default (in-addr.arpa.) + + *v6_origin*, a ``dns.name.Name`` representing the top-level domain for + IPv4 addresses, instead of the default (ip6.arpa.) + + Raises ``dns.exception.SyntaxError`` if the name does not have a + reverse-map form. + + Returns a ``str``. + """ + + if name.is_subdomain(v4_origin): + name = name.relativize(v4_origin) + text = b".".join(reversed(name.labels)) + # run through inet_ntoa() to check syntax and make pretty. + return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) + elif name.is_subdomain(v6_origin): + name = name.relativize(v6_origin) + labels = list(reversed(name.labels)) + parts = [] + for i in range(0, len(labels), 4): + parts.append(b"".join(labels[i : i + 4])) + text = b":".join(parts) + # run through inet_ntoa() to check syntax and make pretty. + return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) + else: + raise dns.exception.SyntaxError("unknown reverse-map address family") diff --git a/venv/lib/python3.12/site-packages/dns/rrset.py b/venv/lib/python3.12/site-packages/dns/rrset.py new file mode 100644 index 0000000..271ddbe --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/rrset.py @@ -0,0 +1,287 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS RRsets (an RRset is a named rdataset)""" + +from typing import Any, Collection, Dict, cast + +import dns.name +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.renderer + + +class RRset(dns.rdataset.Rdataset): + """A DNS RRset (named rdataset). + + RRset inherits from Rdataset, and RRsets can be treated as + Rdatasets in most cases. There are, however, a few notable + exceptions. RRsets have different to_wire() and to_text() method + arguments, reflecting the fact that RRsets always have an owner + name. + """ + + __slots__ = ["name", "deleting"] + + def __init__( + self, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: dns.rdataclass.RdataClass | None = None, + ): + """Create a new RRset.""" + + super().__init__(rdclass, rdtype, covers) + self.name = name + self.deleting = deleting + + def _clone(self): + obj = cast(RRset, super()._clone()) + obj.name = self.name + obj.deleting = self.deleting + return obj + + def __repr__(self): + if self.covers == 0: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" + if self.deleting is not None: + dtext = " delete=" + dns.rdataclass.to_text(self.deleting) + else: + dtext = "" + return ( + "" + ) + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if isinstance(other, RRset): + if self.name != other.name: + return False + elif not isinstance(other, dns.rdataset.Rdataset): + return False + return super().__eq__(other) + + def match(self, *args: Any, **kwargs: Any) -> bool: # type: ignore[override] + """Does this rrset match the specified attributes? + + Behaves as :py:func:`full_match()` if the first argument is a + ``dns.name.Name``, and as :py:func:`dns.rdataset.Rdataset.match()` + otherwise. + + (This behavior fixes a design mistake where the signature of this + method became incompatible with that of its superclass. The fix + makes RRsets matchable as Rdatasets while preserving backwards + compatibility.) + """ + if isinstance(args[0], dns.name.Name): + return self.full_match(*args, **kwargs) # type: ignore[arg-type] + else: + return super().match(*args, **kwargs) # type: ignore[arg-type] + + def full_match( + self, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + deleting: dns.rdataclass.RdataClass | None = None, + ) -> bool: + """Returns ``True`` if this rrset matches the specified name, class, + type, covers, and deletion state. + """ + if not super().match(rdclass, rdtype, covers): + return False + if self.name != name or self.deleting != deleting: + return False + return True + + # pylint: disable=arguments-differ + + def to_text( # type: ignore[override] + self, + origin: dns.name.Name | None = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + """Convert the RRset into DNS zone file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + """ + + return super().to_text( + self.name, origin, relativize, self.deleting, **kw # type: ignore + ) + + def to_wire( # type: ignore[override] + self, + file: Any, + compress: dns.name.CompressType | None = None, # type: ignore + origin: dns.name.Name | None = None, + **kw: Dict[str, Any], + ) -> int: + """Convert the RRset to wire format. + + All keyword arguments are passed to ``dns.rdataset.to_wire()``; see + that function for details. + + Returns an ``int``, the number of records emitted. + """ + + return super().to_wire( + self.name, file, compress, origin, self.deleting, **kw # type:ignore + ) + + # pylint: enable=arguments-differ + + def to_rdataset(self) -> dns.rdataset.Rdataset: + """Convert an RRset into an Rdataset. + + Returns a ``dns.rdataset.Rdataset``. + """ + return dns.rdataset.from_rdata_list(self.ttl, list(self)) + + +def from_text_list( + name: dns.name.Name | str, + ttl: int, + rdclass: dns.rdataclass.RdataClass | str, + rdtype: dns.rdatatype.RdataType | str, + text_rdatas: Collection[str], + idna_codec: dns.name.IDNACodec | None = None, + origin: dns.name.Name | None = None, + relativize: bool = True, + relativize_to: dns.name.Name | None = None, +) -> RRset: + """Create an RRset with the specified name, TTL, class, and type, and with + the specified list of rdatas in text format. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Returns a ``dns.rrset.RRset`` object. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + r = RRset(name, rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text( + r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec + ) + r.add(rd) + return r + + +def from_text( + name: dns.name.Name | str, + ttl: int, + rdclass: dns.rdataclass.RdataClass | str, + rdtype: dns.rdatatype.RdataType | str, + *text_rdatas: Any, +) -> RRset: + """Create an RRset with the specified name, TTL, class, and type and with + the specified rdatas in text format. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_text_list( + name, ttl, rdclass, rdtype, cast(Collection[str], text_rdatas) + ) + + +def from_rdata_list( + name: dns.name.Name | str, + ttl: int, + rdatas: Collection[dns.rdata.Rdata], + idna_codec: dns.name.IDNACodec | None = None, +) -> RRset: + """Create an RRset with the specified name and TTL, and with + the specified list of rdata objects. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + Returns a ``dns.rrset.RRset`` object. + + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = RRset(name, rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + assert r is not None + return r + + +def from_rdata(name: dns.name.Name | str, ttl: int, *rdatas: Any) -> RRset: + """Create an RRset with the specified name and TTL, and with + the specified rdata objects. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_rdata_list(name, ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/venv/lib/python3.12/site-packages/dns/serial.py b/venv/lib/python3.12/site-packages/dns/serial.py new file mode 100644 index 0000000..3417299 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/serial.py @@ -0,0 +1,118 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""Serial Number Arthimetic from RFC 1982""" + + +class Serial: + def __init__(self, value: int, bits: int = 32): + self.value = value % 2**bits + self.bits = bits + + def __repr__(self): + return f"dns.serial.Serial({self.value}, {self.bits})" + + def __eq__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + return self.value == other.value + + def __ne__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + return self.value != other.value + + def __lt__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + if self.value < other.value and other.value - self.value < 2 ** (self.bits - 1): + return True + elif self.value > other.value and self.value - other.value > 2 ** ( + self.bits - 1 + ): + return True + else: + return False + + def __le__(self, other): + return self == other or self < other + + def __gt__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + if self.value < other.value and other.value - self.value > 2 ** (self.bits - 1): + return True + elif self.value > other.value and self.value - other.value < 2 ** ( + self.bits - 1 + ): + return True + else: + return False + + def __ge__(self, other): + return self == other or self > other + + def __add__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v += delta + v = v % 2**self.bits + return Serial(v, self.bits) + + def __iadd__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v += delta + v = v % 2**self.bits + self.value = v + return self + + def __sub__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v -= delta + v = v % 2**self.bits + return Serial(v, self.bits) + + def __isub__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v -= delta + v = v % 2**self.bits + self.value = v + return self diff --git a/venv/lib/python3.12/site-packages/dns/set.py b/venv/lib/python3.12/site-packages/dns/set.py new file mode 100644 index 0000000..ae8f0dd --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/set.py @@ -0,0 +1,308 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import itertools + + +class Set: + """A simple set class. + + This class was originally used to deal with python not having a set class, and + originally the class used lists in its implementation. The ordered and indexable + nature of RRsets and Rdatasets is unfortunately widely used in dnspython + applications, so for backwards compatibility sets continue to be a custom class, now + based on an ordered dictionary. + """ + + __slots__ = ["items"] + + def __init__(self, items=None): + """Initialize the set. + + *items*, an iterable or ``None``, the initial set of items. + """ + + self.items = dict() + if items is not None: + for item in items: + # This is safe for how we use set, but if other code + # subclasses it could be a legitimate issue. + self.add(item) # lgtm[py/init-calls-subclass] + + def __repr__(self): + return f"dns.set.Set({repr(list(self.items.keys()))})" # pragma: no cover + + def add(self, item): + """Add an item to the set.""" + + if item not in self.items: + self.items[item] = None + + def remove(self, item): + """Remove an item from the set.""" + + try: + del self.items[item] + except KeyError: + raise ValueError + + def discard(self, item): + """Remove an item from the set if present.""" + + self.items.pop(item, None) + + def pop(self): + """Remove an arbitrary item from the set.""" + (k, _) = self.items.popitem() + return k + + def _clone(self) -> "Set": + """Make a (shallow) copy of the set. + + There is a 'clone protocol' that subclasses of this class + should use. To make a copy, first call your super's _clone() + method, and use the object returned as the new instance. Then + make shallow copies of the attributes defined in the subclass. + + This protocol allows us to write the set algorithms that + return new instances (e.g. union) once, and keep using them in + subclasses. + """ + + if hasattr(self, "_clone_class"): + cls = self._clone_class # type: ignore + else: + cls = self.__class__ + obj = cls.__new__(cls) + obj.items = dict() + obj.items.update(self.items) + return obj + + def __copy__(self): + """Make a (shallow) copy of the set.""" + + return self._clone() + + def copy(self): + """Make a (shallow) copy of the set.""" + + return self._clone() + + def union_update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + return + for item in other.items: + self.add(item) + + def intersection_update(self, other): + """Update the set, removing any elements from other which are not + in both sets. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + return + # we make a copy of the list so that we can remove items from + # the list without breaking the iterator. + for item in list(self.items): + if item not in other.items: + del self.items[item] + + def difference_update(self, other): + """Update the set, removing any elements from other which are in + the set. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + self.items.clear() + else: + for item in other.items: + self.discard(item) + + def symmetric_difference_update(self, other): + """Update the set, retaining only elements unique to both sets.""" + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + self.items.clear() + else: + overlap = self.intersection(other) + self.union_update(other) + self.difference_update(overlap) + + def union(self, other): + """Return a new set which is the union of ``self`` and ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.union_update(other) + return obj + + def intersection(self, other): + """Return a new set which is the intersection of ``self`` and + ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.intersection_update(other) + return obj + + def difference(self, other): + """Return a new set which ``self`` - ``other``, i.e. the items + in ``self`` which are not also in ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.difference_update(other) + return obj + + def symmetric_difference(self, other): + """Return a new set which (``self`` - ``other``) | (``other`` + - ``self), ie: the items in either ``self`` or ``other`` which + are not contained in their intersection. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.symmetric_difference_update(other) + return obj + + def __or__(self, other): + return self.union(other) + + def __and__(self, other): + return self.intersection(other) + + def __add__(self, other): + return self.union(other) + + def __sub__(self, other): + return self.difference(other) + + def __xor__(self, other): + return self.symmetric_difference(other) + + def __ior__(self, other): + self.union_update(other) + return self + + def __iand__(self, other): + self.intersection_update(other) + return self + + def __iadd__(self, other): + self.union_update(other) + return self + + def __isub__(self, other): + self.difference_update(other) + return self + + def __ixor__(self, other): + self.symmetric_difference_update(other) + return self + + def update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + + *other*, the collection of items with which to update the set, which + may be any iterable type. + """ + + for item in other: + self.add(item) + + def clear(self): + """Make the set empty.""" + self.items.clear() + + def __eq__(self, other): + return self.items == other.items + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.items) + + def __iter__(self): + return iter(self.items) + + def __getitem__(self, i): + if isinstance(i, slice): + return list(itertools.islice(self.items, i.start, i.stop, i.step)) + else: + return next(itertools.islice(self.items, i, i + 1)) + + def __delitem__(self, i): + if isinstance(i, slice): + for elt in list(self[i]): + del self.items[elt] + else: + del self.items[self[i]] + + def issubset(self, other): + """Is this set a subset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in self.items: + if item not in other.items: + return False + return True + + def issuperset(self, other): + """Is this set a superset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in other.items: + if item not in self.items: + return False + return True + + def isdisjoint(self, other): + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in other.items: + if item in self.items: + return False + return True diff --git a/venv/lib/python3.12/site-packages/dns/tokenizer.py b/venv/lib/python3.12/site-packages/dns/tokenizer.py new file mode 100644 index 0000000..86ae3e2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/tokenizer.py @@ -0,0 +1,706 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Tokenize DNS zone file format""" + +import io +import sys +from typing import Any, List, Tuple + +import dns.exception +import dns.name +import dns.ttl + +_DELIMITERS = {" ", "\t", "\n", ";", "(", ")", '"'} +_QUOTING_DELIMITERS = {'"'} + +EOF = 0 +EOL = 1 +WHITESPACE = 2 +IDENTIFIER = 3 +QUOTED_STRING = 4 +COMMENT = 5 +DELIMITER = 6 + + +class UngetBufferFull(dns.exception.DNSException): + """An attempt was made to unget a token when the unget buffer was full.""" + + +class Token: + """A DNS zone file format token. + + ttype: The token type + value: The token value + has_escape: Does the token value contain escapes? + """ + + def __init__( + self, + ttype: int, + value: Any = "", + has_escape: bool = False, + comment: str | None = None, + ): + """Initialize a token instance.""" + + self.ttype = ttype + self.value = value + self.has_escape = has_escape + self.comment = comment + + def is_eof(self) -> bool: + return self.ttype == EOF + + def is_eol(self) -> bool: + return self.ttype == EOL + + def is_whitespace(self) -> bool: + return self.ttype == WHITESPACE + + def is_identifier(self) -> bool: + return self.ttype == IDENTIFIER + + def is_quoted_string(self) -> bool: + return self.ttype == QUOTED_STRING + + def is_comment(self) -> bool: + return self.ttype == COMMENT + + def is_delimiter(self) -> bool: # pragma: no cover (we don't return delimiters yet) + return self.ttype == DELIMITER + + def is_eol_or_eof(self) -> bool: + return self.ttype == EOL or self.ttype == EOF + + def __eq__(self, other): + if not isinstance(other, Token): + return False + return self.ttype == other.ttype and self.value == other.value + + def __ne__(self, other): + if not isinstance(other, Token): + return True + return self.ttype != other.ttype or self.value != other.value + + def __str__(self): + return f'{self.ttype} "{self.value}"' + + def unescape(self) -> "Token": + if not self.has_escape: + return self + unescaped = "" + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via get()) + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + c = chr(codepoint) + unescaped += c + return Token(self.ttype, unescaped) + + def unescape_to_bytes(self) -> "Token": + # We used to use unescape() for TXT-like records, but this + # caused problems as we'd process DNS escapes into Unicode code + # points instead of byte values, and then a to_text() of the + # processed data would not equal the original input. For + # example, \226 in the TXT record would have a to_text() of + # \195\162 because we applied UTF-8 encoding to Unicode code + # point 226. + # + # We now apply escapes while converting directly to bytes, + # avoiding this double encoding. + # + # This code also handles cases where the unicode input has + # non-ASCII code-points in it by converting it to UTF-8. TXT + # records aren't defined for Unicode, but this is the best we + # can do to preserve meaning. For example, + # + # foo\u200bbar + # + # (where \u200b is Unicode code point 0x200b) will be treated + # as if the input had been the UTF-8 encoding of that string, + # namely: + # + # foo\226\128\139bar + # + unescaped = b"" + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via get()) + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + unescaped += b"%c" % (codepoint) + else: + # Note that as mentioned above, if c is a Unicode + # code point outside of the ASCII range, then this + # += is converting that code point to its UTF-8 + # encoding and appending multiple bytes to + # unescaped. + unescaped += c.encode() + else: + unescaped += c.encode() + return Token(self.ttype, bytes(unescaped)) + + +class Tokenizer: + """A DNS zone file format tokenizer. + + A token object is basically a (type, value) tuple. The valid + types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, + COMMENT, and DELIMITER. + + file: The file to tokenize + + ungotten_char: The most recently ungotten character, or None. + + ungotten_token: The most recently ungotten token, or None. + + multiline: The current multiline level. This value is increased + by one every time a '(' delimiter is read, and decreased by one every time + a ')' delimiter is read. + + quoting: This variable is true if the tokenizer is currently + reading a quoted string. + + eof: This variable is true if the tokenizer has encountered EOF. + + delimiters: The current delimiter dictionary. + + line_number: The current line number + + filename: A filename that will be returned by the where() method. + + idna_codec: A dns.name.IDNACodec, specifies the IDNA + encoder/decoder. If None, the default IDNA 2003 + encoder/decoder is used. + """ + + def __init__( + self, + f: Any = sys.stdin, + filename: str | None = None, + idna_codec: dns.name.IDNACodec | None = None, + ): + """Initialize a tokenizer instance. + + f: The file to tokenize. The default is sys.stdin. + This parameter may also be a string, in which case the tokenizer + will take its input from the contents of the string. + + filename: the name of the filename that the where() method + will return. + + idna_codec: A dns.name.IDNACodec, specifies the IDNA + encoder/decoder. If None, the default IDNA 2003 + encoder/decoder is used. + """ + + if isinstance(f, str): + f = io.StringIO(f) + if filename is None: + filename = "" + elif isinstance(f, bytes): + f = io.StringIO(f.decode()) + if filename is None: + filename = "" + else: + if filename is None: + if f is sys.stdin: + filename = "" + else: + filename = "" + self.file = f + self.ungotten_char: str | None = None + self.ungotten_token: Token | None = None + self.multiline = 0 + self.quoting = False + self.eof = False + self.delimiters = _DELIMITERS + self.line_number = 1 + assert filename is not None + self.filename = filename + if idna_codec is None: + self.idna_codec: dns.name.IDNACodec = dns.name.IDNA_2003 + else: + self.idna_codec = idna_codec + + def _get_char(self) -> str: + """Read a character from input.""" + + if self.ungotten_char is None: + if self.eof: + c = "" + else: + c = self.file.read(1) + if c == "": + self.eof = True + elif c == "\n": + self.line_number += 1 + else: + c = self.ungotten_char + self.ungotten_char = None + return c + + def where(self) -> Tuple[str, int]: + """Return the current location in the input. + + Returns a (string, int) tuple. The first item is the filename of + the input, the second is the current line number. + """ + + return (self.filename, self.line_number) + + def _unget_char(self, c: str) -> None: + """Unget a character. + + The unget buffer for characters is only one character large; it is + an error to try to unget a character when the unget buffer is not + empty. + + c: the character to unget + raises UngetBufferFull: there is already an ungotten char + """ + + if self.ungotten_char is not None: + # this should never happen! + raise UngetBufferFull # pragma: no cover + self.ungotten_char = c + + def skip_whitespace(self) -> int: + """Consume input until a non-whitespace character is encountered. + + The non-whitespace character is then ungotten, and the number of + whitespace characters consumed is returned. + + If the tokenizer is in multiline mode, then newlines are whitespace. + + Returns the number of characters skipped. + """ + + skipped = 0 + while True: + c = self._get_char() + if c != " " and c != "\t": + if (c != "\n") or not self.multiline: + self._unget_char(c) + return skipped + skipped += 1 + + def get(self, want_leading: bool = False, want_comment: bool = False) -> Token: + """Get the next token. + + want_leading: If True, return a WHITESPACE token if the + first character read is whitespace. The default is False. + + want_comment: If True, return a COMMENT token if the + first token read is a comment. The default is False. + + Raises dns.exception.UnexpectedEnd: input ended prematurely + + Raises dns.exception.SyntaxError: input was badly formed + + Returns a Token. + """ + + if self.ungotten_token is not None: + utoken = self.ungotten_token + self.ungotten_token = None + if utoken.is_whitespace(): + if want_leading: + return utoken + elif utoken.is_comment(): + if want_comment: + return utoken + else: + return utoken + skipped = self.skip_whitespace() + if want_leading and skipped > 0: + return Token(WHITESPACE, " ") + token = "" + ttype = IDENTIFIER + has_escape = False + while True: + c = self._get_char() + if c == "" or c in self.delimiters: + if c == "" and self.quoting: + raise dns.exception.UnexpectedEnd + if token == "" and ttype != QUOTED_STRING: + if c == "(": + self.multiline += 1 + self.skip_whitespace() + continue + elif c == ")": + if self.multiline <= 0: + raise dns.exception.SyntaxError + self.multiline -= 1 + self.skip_whitespace() + continue + elif c == '"': + if not self.quoting: + self.quoting = True + self.delimiters = _QUOTING_DELIMITERS + ttype = QUOTED_STRING + continue + else: + self.quoting = False + self.delimiters = _DELIMITERS + self.skip_whitespace() + continue + elif c == "\n": + return Token(EOL, "\n") + elif c == ";": + while 1: + c = self._get_char() + if c == "\n" or c == "": + break + token += c + if want_comment: + self._unget_char(c) + return Token(COMMENT, token) + elif c == "": + if self.multiline: + raise dns.exception.SyntaxError( + "unbalanced parentheses" + ) + return Token(EOF, comment=token) + elif self.multiline: + self.skip_whitespace() + token = "" + continue + else: + return Token(EOL, "\n", comment=token) + else: + # This code exists in case we ever want a + # delimiter to be returned. It never produces + # a token currently. + token = c + ttype = DELIMITER + else: + self._unget_char(c) + break + elif self.quoting and c == "\n": + raise dns.exception.SyntaxError("newline in quoted string") + elif c == "\\": + # + # It's an escape. Put it and the next character into + # the token; it will be checked later for goodness. + # + token += c + has_escape = True + c = self._get_char() + if c == "" or (c == "\n" and not self.quoting): + raise dns.exception.UnexpectedEnd + token += c + if token == "" and ttype != QUOTED_STRING: + if self.multiline: + raise dns.exception.SyntaxError("unbalanced parentheses") + ttype = EOF + return Token(ttype, token, has_escape) + + def unget(self, token: Token) -> None: + """Unget a token. + + The unget buffer for tokens is only one token large; it is + an error to try to unget a token when the unget buffer is not + empty. + + token: the token to unget + + Raises UngetBufferFull: there is already an ungotten token + """ + + if self.ungotten_token is not None: + raise UngetBufferFull + self.ungotten_token = token + + def next(self): + """Return the next item in an iteration. + + Returns a Token. + """ + + token = self.get() + if token.is_eof(): + raise StopIteration + return token + + __next__ = next + + def __iter__(self): + return self + + # Helpers + + def get_int(self, base: int = 10) -> int: + """Read the next token and interpret it as an unsigned integer. + + Raises dns.exception.SyntaxError if not an unsigned integer. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + if not token.value.isdigit(): + raise dns.exception.SyntaxError("expecting an integer") + return int(token.value, base) + + def get_uint8(self) -> int: + """Read the next token and interpret it as an 8-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not an 8-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int() + if value < 0 or value > 255: + raise dns.exception.SyntaxError(f"{value} is not an unsigned 8-bit integer") + return value + + def get_uint16(self, base: int = 10) -> int: + """Read the next token and interpret it as a 16-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 16-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 65535: + if base == 8: + raise dns.exception.SyntaxError( + f"{value:o} is not an octal unsigned 16-bit integer" + ) + else: + raise dns.exception.SyntaxError( + f"{value} is not an unsigned 16-bit integer" + ) + return value + + def get_uint32(self, base: int = 10) -> int: + """Read the next token and interpret it as a 32-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 32-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 4294967295: + raise dns.exception.SyntaxError( + f"{value} is not an unsigned 32-bit integer" + ) + return value + + def get_uint48(self, base: int = 10) -> int: + """Read the next token and interpret it as a 48-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 48-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 281474976710655: + raise dns.exception.SyntaxError( + f"{value} is not an unsigned 48-bit integer" + ) + return value + + def get_string(self, max_length: int | None = None) -> str: + """Read the next token and interpret it as a string. + + Raises dns.exception.SyntaxError if not a string. + Raises dns.exception.SyntaxError if token value length + exceeds max_length (if specified). + + Returns a string. + """ + + token = self.get().unescape() + if not (token.is_identifier() or token.is_quoted_string()): + raise dns.exception.SyntaxError("expecting a string") + if max_length and len(token.value) > max_length: + raise dns.exception.SyntaxError("string too long") + return token.value + + def get_identifier(self) -> str: + """Read the next token, which should be an identifier. + + Raises dns.exception.SyntaxError if not an identifier. + + Returns a string. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + return token.value + + def get_remaining(self, max_tokens: int | None = None) -> List[Token]: + """Return the remaining tokens on the line, until an EOL or EOF is seen. + + max_tokens: If not None, stop after this number of tokens. + + Returns a list of tokens. + """ + + tokens = [] + while True: + token = self.get() + if token.is_eol_or_eof(): + self.unget(token) + break + tokens.append(token) + if len(tokens) == max_tokens: + break + return tokens + + def concatenate_remaining_identifiers(self, allow_empty: bool = False) -> str: + """Read the remaining tokens on the line, which should be identifiers. + + Raises dns.exception.SyntaxError if there are no remaining tokens, + unless `allow_empty=True` is given. + + Raises dns.exception.SyntaxError if a token is seen that is not an + identifier. + + Returns a string containing a concatenation of the remaining + identifiers. + """ + s = "" + while True: + token = self.get().unescape() + if token.is_eol_or_eof(): + self.unget(token) + break + if not token.is_identifier(): + raise dns.exception.SyntaxError + s += token.value + if not (allow_empty or s): + raise dns.exception.SyntaxError("expecting another identifier") + return s + + def as_name( + self, + token: Token, + origin: dns.name.Name | None = None, + relativize: bool = False, + relativize_to: dns.name.Name | None = None, + ) -> dns.name.Name: + """Try to interpret the token as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + name = dns.name.from_text(token.value, origin, self.idna_codec) + return name.choose_relativity(relativize_to or origin, relativize) + + def get_name( + self, + origin: dns.name.Name | None = None, + relativize: bool = False, + relativize_to: dns.name.Name | None = None, + ) -> dns.name.Name: + """Read the next token and interpret it as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + + token = self.get() + return self.as_name(token, origin, relativize, relativize_to) + + def get_eol_as_token(self) -> Token: + """Read the next token and raise an exception if it isn't EOL or + EOF. + + Returns a string. + """ + + token = self.get() + if not token.is_eol_or_eof(): + raise dns.exception.SyntaxError( + f'expected EOL or EOF, got {token.ttype} "{token.value}"' + ) + return token + + def get_eol(self) -> str: + return self.get_eol_as_token().value + + def get_ttl(self) -> int: + """Read the next token and interpret it as a DNS TTL. + + Raises dns.exception.SyntaxError or dns.ttl.BadTTL if not an + identifier or badly formed. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + return dns.ttl.from_text(token.value) diff --git a/venv/lib/python3.12/site-packages/dns/transaction.py b/venv/lib/python3.12/site-packages/dns/transaction.py new file mode 100644 index 0000000..9ecd737 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/transaction.py @@ -0,0 +1,651 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import collections +from typing import Any, Callable, Iterator, List, Tuple + +import dns.exception +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset +import dns.serial +import dns.ttl + + +class TransactionManager: + def reader(self) -> "Transaction": + """Begin a read-only transaction.""" + raise NotImplementedError # pragma: no cover + + def writer(self, replacement: bool = False) -> "Transaction": + """Begin a writable transaction. + + *replacement*, a ``bool``. If `True`, the content of the + transaction completely replaces any prior content. If False, + the default, then the content of the transaction updates the + existing content. + """ + raise NotImplementedError # pragma: no cover + + def origin_information( + self, + ) -> Tuple[dns.name.Name | None, bool, dns.name.Name | None]: + """Returns a tuple + + (absolute_origin, relativize, effective_origin) + + giving the absolute name of the default origin for any + relative domain names, the "effective origin", and whether + names should be relativized. The "effective origin" is the + absolute origin if relativize is False, and the empty name if + relativize is true. (The effective origin is provided even + though it can be computed from the absolute_origin and + relativize setting because it avoids a lot of code + duplication.) + + If the returned names are `None`, then no origin information is + available. + + This information is used by code working with transactions to + allow it to coordinate relativization. The transaction code + itself takes what it gets (i.e. does not change name + relativity). + + """ + raise NotImplementedError # pragma: no cover + + def get_class(self) -> dns.rdataclass.RdataClass: + """The class of the transaction manager.""" + raise NotImplementedError # pragma: no cover + + def from_wire_origin(self) -> dns.name.Name | None: + """Origin to use in from_wire() calls.""" + (absolute_origin, relativize, _) = self.origin_information() + if relativize: + return absolute_origin + else: + return None + + +class DeleteNotExact(dns.exception.DNSException): + """Existing data did not match data specified by an exact delete.""" + + +class ReadOnly(dns.exception.DNSException): + """Tried to write to a read-only transaction.""" + + +class AlreadyEnded(dns.exception.DNSException): + """Tried to use an already-ended transaction.""" + + +def _ensure_immutable_rdataset(rdataset): + if rdataset is None or isinstance(rdataset, dns.rdataset.ImmutableRdataset): + return rdataset + return dns.rdataset.ImmutableRdataset(rdataset) + + +def _ensure_immutable_node(node): + if node is None or node.is_immutable(): + return node + return dns.node.ImmutableNode(node) + + +CheckPutRdatasetType = Callable[ + ["Transaction", dns.name.Name, dns.rdataset.Rdataset], None +] +CheckDeleteRdatasetType = Callable[ + ["Transaction", dns.name.Name, dns.rdatatype.RdataType, dns.rdatatype.RdataType], + None, +] +CheckDeleteNameType = Callable[["Transaction", dns.name.Name], None] + + +class Transaction: + def __init__( + self, + manager: TransactionManager, + replacement: bool = False, + read_only: bool = False, + ): + self.manager = manager + self.replacement = replacement + self.read_only = read_only + self._ended = False + self._check_put_rdataset: List[CheckPutRdatasetType] = [] + self._check_delete_rdataset: List[CheckDeleteRdatasetType] = [] + self._check_delete_name: List[CheckDeleteNameType] = [] + + # + # This is the high level API + # + # Note that we currently use non-immutable types in the return type signature to + # avoid covariance problems, e.g. if the caller has a List[Rdataset], mypy will be + # unhappy if we return an ImmutableRdataset. + + def get( + self, + name: dns.name.Name | str | None, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + ) -> dns.rdataset.Rdataset: + """Return the rdataset associated with *name*, *rdtype*, and *covers*, + or `None` if not found. + + Note that the returned rdataset is immutable. + """ + self._check_ended() + if isinstance(name, str): + name = dns.name.from_text(name, None) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + rdataset = self._get_rdataset(name, rdtype, covers) + return _ensure_immutable_rdataset(rdataset) + + def get_node(self, name: dns.name.Name) -> dns.node.Node | None: + """Return the node at *name*, if any. + + Returns an immutable node or ``None``. + """ + return _ensure_immutable_node(self._get_node(name)) + + def _check_read_only(self) -> None: + if self.read_only: + raise ReadOnly + + def add(self, *args: Any) -> None: + """Add records. + + The arguments may be: + + - rrset + + - name, rdataset... + + - name, ttl, rdata... + """ + self._check_ended() + self._check_read_only() + self._add(False, args) + + def replace(self, *args: Any) -> None: + """Replace the existing rdataset at the name with the specified + rdataset, or add the specified rdataset if there was no existing + rdataset. + + The arguments may be: + + - rrset + + - name, rdataset... + + - name, ttl, rdata... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add() or + replace(). + """ + self._check_ended() + self._check_read_only() + self._add(True, args) + + def delete(self, *args: Any) -> None: + """Delete records. + + It is not an error if some of the records are not in the existing + set. + + The arguments may be: + + - rrset + + - name + + - name, rdatatype, [covers] + + - name, rdataset... + + - name, rdata... + """ + self._check_ended() + self._check_read_only() + self._delete(False, args) + + def delete_exact(self, *args: Any) -> None: + """Delete records. + + The arguments may be: + + - rrset + + - name + + - name, rdatatype, [covers] + + - name, rdataset... + + - name, rdata... + + Raises dns.transaction.DeleteNotExact if some of the records + are not in the existing set. + + """ + self._check_ended() + self._check_read_only() + self._delete(True, args) + + def name_exists(self, name: dns.name.Name | str) -> bool: + """Does the specified name exist?""" + self._check_ended() + if isinstance(name, str): + name = dns.name.from_text(name, None) + return self._name_exists(name) + + def update_serial( + self, + value: int = 1, + relative: bool = True, + name: dns.name.Name = dns.name.empty, + ) -> None: + """Update the serial number. + + *value*, an `int`, is an increment if *relative* is `True`, or the + actual value to set if *relative* is `False`. + + Raises `KeyError` if there is no SOA rdataset at *name*. + + Raises `ValueError` if *value* is negative or if the increment is + so large that it would cause the new serial to be less than the + prior value. + """ + self._check_ended() + if value < 0: + raise ValueError("negative update_serial() value") + if isinstance(name, str): + name = dns.name.from_text(name, None) + rdataset = self._get_rdataset(name, dns.rdatatype.SOA, dns.rdatatype.NONE) + if rdataset is None or len(rdataset) == 0: + raise KeyError + if relative: + serial = dns.serial.Serial(rdataset[0].serial) + value + else: + serial = dns.serial.Serial(value) + serial = serial.value # convert back to int + if serial == 0: + serial = 1 + rdata = rdataset[0].replace(serial=serial) + new_rdataset = dns.rdataset.from_rdata(rdataset.ttl, rdata) + self.replace(name, new_rdataset) + + def __iter__(self): + self._check_ended() + return self._iterate_rdatasets() + + def changed(self) -> bool: + """Has this transaction changed anything? + + For read-only transactions, the result is always `False`. + + For writable transactions, the result is `True` if at some time + during the life of the transaction, the content was changed. + """ + self._check_ended() + return self._changed() + + def commit(self) -> None: + """Commit the transaction. + + Normally transactions are used as context managers and commit + or rollback automatically, but it may be done explicitly if needed. + A ``dns.transaction.Ended`` exception will be raised if you try + to use a transaction after it has been committed or rolled back. + + Raises an exception if the commit fails (in which case the transaction + is also rolled back. + """ + self._end(True) + + def rollback(self) -> None: + """Rollback the transaction. + + Normally transactions are used as context managers and commit + or rollback automatically, but it may be done explicitly if needed. + A ``dns.transaction.AlreadyEnded`` exception will be raised if you try + to use a transaction after it has been committed or rolled back. + + Rollback cannot otherwise fail. + """ + self._end(False) + + def check_put_rdataset(self, check: CheckPutRdatasetType) -> None: + """Call *check* before putting (storing) an rdataset. + + The function is called with the transaction, the name, and the rdataset. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_put_rdataset.append(check) + + def check_delete_rdataset(self, check: CheckDeleteRdatasetType) -> None: + """Call *check* before deleting an rdataset. + + The function is called with the transaction, the name, the rdatatype, + and the covered rdatatype. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_delete_rdataset.append(check) + + def check_delete_name(self, check: CheckDeleteNameType) -> None: + """Call *check* before putting (storing) an rdataset. + + The function is called with the transaction and the name. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_delete_name.append(check) + + def iterate_rdatasets( + self, + ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: + """Iterate all the rdatasets in the transaction, returning + (`dns.name.Name`, `dns.rdataset.Rdataset`) tuples. + + Note that as is usual with python iterators, adding or removing items + while iterating will invalidate the iterator and may raise `RuntimeError` + or fail to iterate over all entries.""" + self._check_ended() + return self._iterate_rdatasets() + + def iterate_names(self) -> Iterator[dns.name.Name]: + """Iterate all the names in the transaction. + + Note that as is usual with python iterators, adding or removing names + while iterating will invalidate the iterator and may raise `RuntimeError` + or fail to iterate over all entries.""" + self._check_ended() + return self._iterate_names() + + # + # Helper methods + # + + def _raise_if_not_empty(self, method, args): + if len(args) != 0: + raise TypeError(f"extra parameters to {method}") + + def _rdataset_from_args(self, method, deleting, args): + try: + arg = args.popleft() + if isinstance(arg, dns.rrset.RRset): + rdataset = arg.to_rdataset() + elif isinstance(arg, dns.rdataset.Rdataset): + rdataset = arg + else: + if deleting: + ttl = 0 + else: + if isinstance(arg, int): + ttl = arg + if ttl > dns.ttl.MAX_TTL: + raise ValueError(f"{method}: TTL value too big") + else: + raise TypeError(f"{method}: expected a TTL") + arg = args.popleft() + if isinstance(arg, dns.rdata.Rdata): + rdataset = dns.rdataset.from_rdata(ttl, arg) + else: + raise TypeError(f"{method}: expected an Rdata") + return rdataset + except IndexError: + if deleting: + return None + else: + # reraise + raise TypeError(f"{method}: expected more arguments") + + def _add(self, replace, args): + if replace: + method = "replace()" + else: + method = "add()" + try: + args = collections.deque(args) + arg = args.popleft() + if isinstance(arg, str): + arg = dns.name.from_text(arg, None) + if isinstance(arg, dns.name.Name): + name = arg + rdataset = self._rdataset_from_args(method, False, args) + elif isinstance(arg, dns.rrset.RRset): + rrset = arg + name = rrset.name + # rrsets are also rdatasets, but they don't print the + # same and can't be stored in nodes, so convert. + rdataset = rrset.to_rdataset() + else: + raise TypeError( + f"{method} requires a name or RRset as the first argument" + ) + assert rdataset is not None # for type checkers + if rdataset.rdclass != self.manager.get_class(): + raise ValueError(f"{method} has objects of wrong RdataClass") + if rdataset.rdtype == dns.rdatatype.SOA: + (_, _, origin) = self._origin_information() + if name != origin: + raise ValueError(f"{method} has non-origin SOA") + self._raise_if_not_empty(method, args) + if not replace: + existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) + if existing is not None: + if isinstance(existing, dns.rdataset.ImmutableRdataset): + trds = dns.rdataset.Rdataset( + existing.rdclass, existing.rdtype, existing.covers + ) + trds.update(existing) + existing = trds + rdataset = existing.union(rdataset) + self._checked_put_rdataset(name, rdataset) + except IndexError: + raise TypeError(f"not enough parameters to {method}") + + def _delete(self, exact, args): + if exact: + method = "delete_exact()" + else: + method = "delete()" + try: + args = collections.deque(args) + arg = args.popleft() + if isinstance(arg, str): + arg = dns.name.from_text(arg, None) + if isinstance(arg, dns.name.Name): + name = arg + if len(args) > 0 and ( + isinstance(args[0], int) or isinstance(args[0], str) + ): + # deleting by type and (optionally) covers + rdtype = dns.rdatatype.RdataType.make(args.popleft()) + if len(args) > 0: + covers = dns.rdatatype.RdataType.make(args.popleft()) + else: + covers = dns.rdatatype.NONE + self._raise_if_not_empty(method, args) + existing = self._get_rdataset(name, rdtype, covers) + if existing is None: + if exact: + raise DeleteNotExact(f"{method}: missing rdataset") + else: + self._checked_delete_rdataset(name, rdtype, covers) + return + else: + rdataset = self._rdataset_from_args(method, True, args) + elif isinstance(arg, dns.rrset.RRset): + rdataset = arg # rrsets are also rdatasets + name = rdataset.name + else: + raise TypeError( + f"{method} requires a name or RRset as the first argument" + ) + self._raise_if_not_empty(method, args) + if rdataset: + if rdataset.rdclass != self.manager.get_class(): + raise ValueError(f"{method} has objects of wrong RdataClass") + existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) + if existing is not None: + if exact: + intersection = existing.intersection(rdataset) + if intersection != rdataset: + raise DeleteNotExact(f"{method}: missing rdatas") + rdataset = existing.difference(rdataset) + if len(rdataset) == 0: + self._checked_delete_rdataset( + name, rdataset.rdtype, rdataset.covers + ) + else: + self._checked_put_rdataset(name, rdataset) + elif exact: + raise DeleteNotExact(f"{method}: missing rdataset") + else: + if exact and not self._name_exists(name): + raise DeleteNotExact(f"{method}: name not known") + self._checked_delete_name(name) + except IndexError: + raise TypeError(f"not enough parameters to {method}") + + def _check_ended(self): + if self._ended: + raise AlreadyEnded + + def _end(self, commit): + self._check_ended() + try: + self._end_transaction(commit) + finally: + self._ended = True + + def _checked_put_rdataset(self, name, rdataset): + for check in self._check_put_rdataset: + check(self, name, rdataset) + self._put_rdataset(name, rdataset) + + def _checked_delete_rdataset(self, name, rdtype, covers): + for check in self._check_delete_rdataset: + check(self, name, rdtype, covers) + self._delete_rdataset(name, rdtype, covers) + + def _checked_delete_name(self, name): + for check in self._check_delete_name: + check(self, name) + self._delete_name(name) + + # + # Transactions are context managers. + # + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self._ended: + if exc_type is None: + self.commit() + else: + self.rollback() + return False + + # + # This is the low level API, which must be implemented by subclasses + # of Transaction. + # + + def _get_rdataset(self, name, rdtype, covers): + """Return the rdataset associated with *name*, *rdtype*, and *covers*, + or `None` if not found. + """ + raise NotImplementedError # pragma: no cover + + def _put_rdataset(self, name, rdataset): + """Store the rdataset.""" + raise NotImplementedError # pragma: no cover + + def _delete_name(self, name): + """Delete all data associated with *name*. + + It is not an error if the name does not exist. + """ + raise NotImplementedError # pragma: no cover + + def _delete_rdataset(self, name, rdtype, covers): + """Delete all data associated with *name*, *rdtype*, and *covers*. + + It is not an error if the rdataset does not exist. + """ + raise NotImplementedError # pragma: no cover + + def _name_exists(self, name): + """Does name exist? + + Returns a bool. + """ + raise NotImplementedError # pragma: no cover + + def _changed(self): + """Has this transaction changed anything?""" + raise NotImplementedError # pragma: no cover + + def _end_transaction(self, commit): + """End the transaction. + + *commit*, a bool. If ``True``, commit the transaction, otherwise + roll it back. + + If committing and the commit fails, then roll back and raise an + exception. + """ + raise NotImplementedError # pragma: no cover + + def _set_origin(self, origin): + """Set the origin. + + This method is called when reading a possibly relativized + source, and an origin setting operation occurs (e.g. $ORIGIN + in a zone file). + """ + raise NotImplementedError # pragma: no cover + + def _iterate_rdatasets(self): + """Return an iterator that yields (name, rdataset) tuples.""" + raise NotImplementedError # pragma: no cover + + def _iterate_names(self): + """Return an iterator that yields a name.""" + raise NotImplementedError # pragma: no cover + + def _get_node(self, name): + """Return the node at *name*, if any. + + Returns a node or ``None``. + """ + raise NotImplementedError # pragma: no cover + + # + # Low-level API with a default implementation, in case a subclass needs + # to override. + # + + def _origin_information(self): + # This is only used by _add() + return self.manager.origin_information() diff --git a/venv/lib/python3.12/site-packages/dns/tsig.py b/venv/lib/python3.12/site-packages/dns/tsig.py new file mode 100644 index 0000000..333f9aa --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/tsig.py @@ -0,0 +1,359 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TSIG support.""" + +import base64 +import hashlib +import hmac +import struct + +import dns.exception +import dns.name +import dns.rcode +import dns.rdataclass +import dns.rdatatype + + +class BadTime(dns.exception.DNSException): + """The current time is not within the TSIG's validity time.""" + + +class BadSignature(dns.exception.DNSException): + """The TSIG signature fails to verify.""" + + +class BadKey(dns.exception.DNSException): + """The TSIG record owner name does not match the key.""" + + +class BadAlgorithm(dns.exception.DNSException): + """The TSIG algorithm does not match the key.""" + + +class PeerError(dns.exception.DNSException): + """Base class for all TSIG errors generated by the remote peer""" + + +class PeerBadKey(PeerError): + """The peer didn't know the key we used""" + + +class PeerBadSignature(PeerError): + """The peer didn't like the signature we sent""" + + +class PeerBadTime(PeerError): + """The peer didn't like the time we sent""" + + +class PeerBadTruncation(PeerError): + """The peer didn't like amount of truncation in the TSIG we sent""" + + +# TSIG Algorithms + +HMAC_MD5 = dns.name.from_text("HMAC-MD5.SIG-ALG.REG.INT") +HMAC_SHA1 = dns.name.from_text("hmac-sha1") +HMAC_SHA224 = dns.name.from_text("hmac-sha224") +HMAC_SHA256 = dns.name.from_text("hmac-sha256") +HMAC_SHA256_128 = dns.name.from_text("hmac-sha256-128") +HMAC_SHA384 = dns.name.from_text("hmac-sha384") +HMAC_SHA384_192 = dns.name.from_text("hmac-sha384-192") +HMAC_SHA512 = dns.name.from_text("hmac-sha512") +HMAC_SHA512_256 = dns.name.from_text("hmac-sha512-256") +GSS_TSIG = dns.name.from_text("gss-tsig") + +default_algorithm = HMAC_SHA256 + +mac_sizes = { + HMAC_SHA1: 20, + HMAC_SHA224: 28, + HMAC_SHA256: 32, + HMAC_SHA256_128: 16, + HMAC_SHA384: 48, + HMAC_SHA384_192: 24, + HMAC_SHA512: 64, + HMAC_SHA512_256: 32, + HMAC_MD5: 16, + GSS_TSIG: 128, # This is what we assume to be the worst case! +} + + +class GSSTSig: + """ + GSS-TSIG TSIG implementation. This uses the GSS-API context established + in the TKEY message handshake to sign messages using GSS-API message + integrity codes, per the RFC. + + In order to avoid a direct GSSAPI dependency, the keyring holds a ref + to the GSSAPI object required, rather than the key itself. + """ + + def __init__(self, gssapi_context): + self.gssapi_context = gssapi_context + self.data = b"" + self.name = "gss-tsig" + + def update(self, data): + self.data += data + + def sign(self): + # defer to the GSSAPI function to sign + return self.gssapi_context.get_signature(self.data) + + def verify(self, expected): + try: + # defer to the GSSAPI function to verify + return self.gssapi_context.verify_signature(self.data, expected) + except Exception: + # note the usage of a bare exception + raise BadSignature + + +class GSSTSigAdapter: + def __init__(self, keyring): + self.keyring = keyring + + def __call__(self, message, keyname): + if keyname in self.keyring: + key = self.keyring[keyname] + if isinstance(key, Key) and key.algorithm == GSS_TSIG: + if message: + GSSTSigAdapter.parse_tkey_and_step(key, message, keyname) + return key + else: + return None + + @classmethod + def parse_tkey_and_step(cls, key, message, keyname): + # if the message is a TKEY type, absorb the key material + # into the context using step(); this is used to allow the + # client to complete the GSSAPI negotiation before attempting + # to verify the signed response to a TKEY message exchange + try: + rrset = message.find_rrset( + message.answer, keyname, dns.rdataclass.ANY, dns.rdatatype.TKEY + ) + if rrset: + token = rrset[0].key + gssapi_context = key.secret + return gssapi_context.step(token) + except KeyError: + pass + + +class HMACTSig: + """ + HMAC TSIG implementation. This uses the HMAC python module to handle the + sign/verify operations. + """ + + _hashes = { + HMAC_SHA1: hashlib.sha1, + HMAC_SHA224: hashlib.sha224, + HMAC_SHA256: hashlib.sha256, + HMAC_SHA256_128: (hashlib.sha256, 128), + HMAC_SHA384: hashlib.sha384, + HMAC_SHA384_192: (hashlib.sha384, 192), + HMAC_SHA512: hashlib.sha512, + HMAC_SHA512_256: (hashlib.sha512, 256), + HMAC_MD5: hashlib.md5, + } + + def __init__(self, key, algorithm): + try: + hashinfo = self._hashes[algorithm] + except KeyError: + raise NotImplementedError(f"TSIG algorithm {algorithm} is not supported") + + # create the HMAC context + if isinstance(hashinfo, tuple): + self.hmac_context = hmac.new(key, digestmod=hashinfo[0]) + self.size = hashinfo[1] + else: + self.hmac_context = hmac.new(key, digestmod=hashinfo) + self.size = None + self.name = self.hmac_context.name + if self.size: + self.name += f"-{self.size}" + + def update(self, data): + return self.hmac_context.update(data) + + def sign(self): + # defer to the HMAC digest() function for that digestmod + digest = self.hmac_context.digest() + if self.size: + digest = digest[: (self.size // 8)] + return digest + + def verify(self, expected): + # re-digest and compare the results + mac = self.sign() + if not hmac.compare_digest(mac, expected): + raise BadSignature + + +def _digest(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=None): + """Return a context containing the TSIG rdata for the input parameters + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + first = not (ctx and multi) + if first: + ctx = get_context(key) + if request_mac: + ctx.update(struct.pack("!H", len(request_mac))) + ctx.update(request_mac) + assert ctx is not None # for type checkers + ctx.update(struct.pack("!H", rdata.original_id)) + ctx.update(wire[2:]) + if first: + ctx.update(key.name.to_digestable()) + ctx.update(struct.pack("!H", dns.rdataclass.ANY)) + ctx.update(struct.pack("!I", 0)) + if time is None: + time = rdata.time_signed + upper_time = (time >> 32) & 0xFFFF + lower_time = time & 0xFFFFFFFF + time_encoded = struct.pack("!HIH", upper_time, lower_time, rdata.fudge) + other_len = len(rdata.other) + if other_len > 65535: + raise ValueError("TSIG Other Data is > 65535 bytes") + if first: + ctx.update(key.algorithm.to_digestable() + time_encoded) + ctx.update(struct.pack("!HH", rdata.error, other_len) + rdata.other) + else: + ctx.update(time_encoded) + return ctx + + +def _maybe_start_digest(key, mac, multi): + """If this is the first message in a multi-message sequence, + start a new context. + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object + """ + if multi: + ctx = get_context(key) + ctx.update(struct.pack("!H", len(mac))) + ctx.update(mac) + return ctx + else: + return None + + +def sign(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=False): + """Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata + for the input parameters, the HMAC MAC calculated by applying the + TSIG signature algorithm, and the TSIG digest context. + @rtype: (string, dns.tsig.HMACTSig or dns.tsig.GSSTSig object) + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + ctx = _digest(wire, key, rdata, time, request_mac, ctx, multi) + mac = ctx.sign() + tsig = rdata.replace(time_signed=time, mac=mac) + + return (tsig, _maybe_start_digest(key, mac, multi)) + + +def validate( + wire, key, owner, rdata, now, request_mac, tsig_start, ctx=None, multi=False +): + """Validate the specified TSIG rdata against the other input parameters. + + @raises FormError: The TSIG is badly formed. + @raises BadTime: There is too much time skew between the client and the + server. + @raises BadSignature: The TSIG signature did not validate + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object""" + + (adcount,) = struct.unpack("!H", wire[10:12]) + if adcount == 0: + raise dns.exception.FormError + adcount -= 1 + new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] + if rdata.error != 0: + if rdata.error == dns.rcode.BADSIG: + raise PeerBadSignature + elif rdata.error == dns.rcode.BADKEY: + raise PeerBadKey + elif rdata.error == dns.rcode.BADTIME: + raise PeerBadTime + elif rdata.error == dns.rcode.BADTRUNC: + raise PeerBadTruncation + else: + raise PeerError(f"unknown TSIG error code {rdata.error}") + if abs(rdata.time_signed - now) > rdata.fudge: + raise BadTime + if key.name != owner: + raise BadKey + if key.algorithm != rdata.algorithm: + raise BadAlgorithm + ctx = _digest(new_wire, key, rdata, None, request_mac, ctx, multi) + ctx.verify(rdata.mac) + return _maybe_start_digest(key, rdata.mac, multi) + + +def get_context(key): + """Returns an HMAC context for the specified key. + + @rtype: HMAC context + @raises NotImplementedError: I{algorithm} is not supported + """ + + if key.algorithm == GSS_TSIG: + return GSSTSig(key.secret) + else: + return HMACTSig(key.secret, key.algorithm) + + +class Key: + def __init__( + self, + name: dns.name.Name | str, + secret: bytes | str, + algorithm: dns.name.Name | str = default_algorithm, + ): + if isinstance(name, str): + name = dns.name.from_text(name) + self.name = name + if isinstance(secret, str): + secret = base64.decodebytes(secret.encode()) + self.secret = secret + if isinstance(algorithm, str): + algorithm = dns.name.from_text(algorithm) + self.algorithm = algorithm + + def __eq__(self, other): + return ( + isinstance(other, Key) + and self.name == other.name + and self.secret == other.secret + and self.algorithm == other.algorithm + ) + + def __repr__(self): + r = f" Dict[dns.name.Name, Any]: + """Convert a dictionary containing (textual DNS name, base64 secret) + pairs into a binary keyring which has (dns.name.Name, bytes) pairs, or + a dictionary containing (textual DNS name, (algorithm, base64 secret)) + pairs into a binary keyring which has (dns.name.Name, dns.tsig.Key) pairs. + @rtype: dict""" + + keyring: Dict[dns.name.Name, Any] = {} + for name, value in textring.items(): + kname = dns.name.from_text(name) + if isinstance(value, str): + keyring[kname] = dns.tsig.Key(kname, value).secret + else: + (algorithm, secret) = value + keyring[kname] = dns.tsig.Key(kname, secret, algorithm) + return keyring + + +def to_text(keyring: Dict[dns.name.Name, Any]) -> Dict[str, Any]: + """Convert a dictionary containing (dns.name.Name, dns.tsig.Key) pairs + into a text keyring which has (textual DNS name, (textual algorithm, + base64 secret)) pairs, or a dictionary containing (dns.name.Name, bytes) + pairs into a text keyring which has (textual DNS name, base64 secret) pairs. + @rtype: dict""" + + textring = {} + + def b64encode(secret): + return base64.encodebytes(secret).decode().rstrip() + + for name, key in keyring.items(): + tname = name.to_text() + if isinstance(key, bytes): + textring[tname] = b64encode(key) + else: + if isinstance(key.secret, bytes): + text_secret = b64encode(key.secret) + else: + text_secret = str(key.secret) + + textring[tname] = (key.algorithm.to_text(), text_secret) + return textring diff --git a/venv/lib/python3.12/site-packages/dns/ttl.py b/venv/lib/python3.12/site-packages/dns/ttl.py new file mode 100644 index 0000000..16289cd --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/ttl.py @@ -0,0 +1,90 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TTL conversion.""" + +import dns.exception + +# Technically TTLs are supposed to be between 0 and 2**31 - 1, with values +# greater than that interpreted as 0, but we do not impose this policy here +# as values > 2**31 - 1 occur in real world data. +# +# We leave it to applications to impose tighter bounds if desired. +MAX_TTL = 2**32 - 1 + + +class BadTTL(dns.exception.SyntaxError): + """DNS TTL value is not well-formed.""" + + +def from_text(text: str) -> int: + """Convert the text form of a TTL to an integer. + + The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported. + + *text*, a ``str``, the textual TTL. + + Raises ``dns.ttl.BadTTL`` if the TTL is not well-formed. + + Returns an ``int``. + """ + + if text.isdigit(): + total = int(text) + elif len(text) == 0: + raise BadTTL + else: + total = 0 + current = 0 + need_digit = True + for c in text: + if c.isdigit(): + current *= 10 + current += int(c) + need_digit = False + else: + if need_digit: + raise BadTTL + c = c.lower() + if c == "w": + total += current * 604800 + elif c == "d": + total += current * 86400 + elif c == "h": + total += current * 3600 + elif c == "m": + total += current * 60 + elif c == "s": + total += current + else: + raise BadTTL(f"unknown unit '{c}'") + current = 0 + need_digit = True + if not current == 0: + raise BadTTL("trailing integer") + if total < 0 or total > MAX_TTL: + raise BadTTL("TTL should be between 0 and 2**32 - 1 (inclusive)") + return total + + +def make(value: int | str) -> int: + if isinstance(value, int): + return value + elif isinstance(value, str): + return from_text(value) + else: + raise ValueError("cannot convert value to TTL") diff --git a/venv/lib/python3.12/site-packages/dns/update.py b/venv/lib/python3.12/site-packages/dns/update.py new file mode 100644 index 0000000..0e4aee4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/update.py @@ -0,0 +1,389 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Dynamic Update Support""" + +from typing import Any, List + +import dns.enum +import dns.exception +import dns.message +import dns.name +import dns.opcode +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset +import dns.tsig + + +class UpdateSection(dns.enum.IntEnum): + """Update sections""" + + ZONE = 0 + PREREQ = 1 + UPDATE = 2 + ADDITIONAL = 3 + + @classmethod + def _maximum(cls): + return 3 + + +class UpdateMessage(dns.message.Message): # lgtm[py/missing-equals] + # ignore the mypy error here as we mean to use a different enum + _section_enum = UpdateSection # type: ignore + + def __init__( + self, + zone: dns.name.Name | str | None = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + keyring: Any | None = None, + keyname: dns.name.Name | None = None, + keyalgorithm: dns.name.Name | str = dns.tsig.default_algorithm, + id: int | None = None, + ): + """Initialize a new DNS Update object. + + See the documentation of the Message class for a complete + description of the keyring dictionary. + + *zone*, a ``dns.name.Name``, ``str``, or ``None``, the zone + which is being updated. ``None`` should only be used by dnspython's + message constructors, as a zone is required for the convenience + methods like ``add()``, ``replace()``, etc. + + *rdclass*, an ``int`` or ``str``, the class of the zone. + + The *keyring*, *keyname*, and *keyalgorithm* parameters are passed to + ``use_tsig()``; see its documentation for details. + """ + super().__init__(id=id) + self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) + if isinstance(zone, str): + zone = dns.name.from_text(zone) + self.origin = zone + rdclass = dns.rdataclass.RdataClass.make(rdclass) + self.zone_rdclass = rdclass + if self.origin: + self.find_rrset( + self.zone, + self.origin, + rdclass, + dns.rdatatype.SOA, + create=True, + force_unique=True, + ) + if keyring is not None: + self.use_tsig(keyring, keyname, algorithm=keyalgorithm) + + @property + def zone(self) -> List[dns.rrset.RRset]: + """The zone section.""" + return self.sections[0] + + @zone.setter + def zone(self, v): + self.sections[0] = v + + @property + def prerequisite(self) -> List[dns.rrset.RRset]: + """The prerequisite section.""" + return self.sections[1] + + @prerequisite.setter + def prerequisite(self, v): + self.sections[1] = v + + @property + def update(self) -> List[dns.rrset.RRset]: + """The update section.""" + return self.sections[2] + + @update.setter + def update(self, v): + self.sections[2] = v + + def _add_rr(self, name, ttl, rd, deleting=None, section=None): + """Add a single RR to the update section.""" + + if section is None: + section = self.update + covers = rd.covers() + rrset = self.find_rrset( + section, name, self.zone_rdclass, rd.rdtype, covers, deleting, True, True + ) + rrset.add(rd, ttl) + + def _add(self, replace, section, name, *args): + """Add records. + + *replace* is the replacement mode. If ``False``, + RRs are added to an existing RRset; if ``True``, the RRset + is replaced with the specified contents. The second + argument is the section to add to. The third argument + is always a name. The other arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + if replace: + self.delete(name, rds.rdtype) + for rd in rds: + self._add_rr(name, rds.ttl, rd, section=section) + else: + args = list(args) + ttl = int(args.pop(0)) + if isinstance(args[0], dns.rdata.Rdata): + if replace: + self.delete(name, args[0].rdtype) + for rd in args: + self._add_rr(name, ttl, rd, section=section) + else: + rdtype = dns.rdatatype.RdataType.make(args.pop(0)) + if replace: + self.delete(name, rdtype) + for s in args: + rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, self.origin) + self._add_rr(name, ttl, rd, section=section) + + def add(self, name: dns.name.Name | str, *args: Any) -> None: + """Add records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + self._add(False, self.update, name, *args) + + def delete(self, name: dns.name.Name | str, *args: Any) -> None: + """Delete records. + + The first argument is always a name. The other + arguments can be: + + - *empty* + + - rdataset... + + - rdata... + + - rdtype, [string...] + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset( + self.update, + name, + dns.rdataclass.ANY, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + dns.rdataclass.ANY, + True, + True, + ) + elif isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + for rd in rds: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + largs = list(args) + if isinstance(largs[0], dns.rdata.Rdata): + for rd in largs: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + rdtype = dns.rdatatype.RdataType.make(largs.pop(0)) + if len(largs) == 0: + self.find_rrset( + self.update, + name, + self.zone_rdclass, + rdtype, + dns.rdatatype.NONE, + dns.rdataclass.ANY, + True, + True, + ) + else: + for s in largs: + rd = dns.rdata.from_text( + self.zone_rdclass, + rdtype, + s, # type: ignore[arg-type] + self.origin, + ) + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + + def replace(self, name: dns.name.Name | str, *args: Any) -> None: + """Replace records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add. + """ + + self._add(True, self.update, name, *args) + + def present(self, name: dns.name.Name | str, *args: Any) -> None: + """Require that an owner name (and optionally an rdata type, + or specific rdataset) exists as a prerequisite to the + execution of the update. + + The first argument is always a name. + The other arguments can be: + + - rdataset... + + - rdata... + + - rdtype, string... + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.ANY, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + None, + True, + True, + ) + elif ( + isinstance(args[0], dns.rdataset.Rdataset) + or isinstance(args[0], dns.rdata.Rdata) + or len(args) > 1 + ): + if not isinstance(args[0], dns.rdataset.Rdataset): + # Add a 0 TTL + largs = list(args) + largs.insert(0, 0) # type: ignore[arg-type] + self._add(False, self.prerequisite, name, *largs) + else: + self._add(False, self.prerequisite, name, *args) + else: + rdtype = dns.rdatatype.RdataType.make(args[0]) + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.ANY, + rdtype, + dns.rdatatype.NONE, + None, + True, + True, + ) + + def absent( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str | None = None, + ) -> None: + """Require that an owner name (and optionally an rdata type) does + not exist as a prerequisite to the execution of the update.""" + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if rdtype is None: + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.NONE, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + None, + True, + True, + ) + else: + rdtype = dns.rdatatype.RdataType.make(rdtype) + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.NONE, + rdtype, + dns.rdatatype.NONE, + None, + True, + True, + ) + + def _get_one_rr_per_rrset(self, value): + # Updates are always one_rr_per_rrset + return True + + def _parse_rr_header(self, section, name, rdclass, rdtype): # pyright: ignore + deleting = None + empty = False + if section == UpdateSection.ZONE: + if ( + dns.rdataclass.is_metaclass(rdclass) + or rdtype != dns.rdatatype.SOA + or self.zone + ): + raise dns.exception.FormError + else: + if not self.zone: + raise dns.exception.FormError + if rdclass in (dns.rdataclass.ANY, dns.rdataclass.NONE): + deleting = rdclass + rdclass = self.zone[0].rdclass + empty = ( + deleting == dns.rdataclass.ANY or section == UpdateSection.PREREQ + ) + return (rdclass, rdtype, deleting, empty) + + +# backwards compatibility +Update = UpdateMessage + +### BEGIN generated UpdateSection constants + +ZONE = UpdateSection.ZONE +PREREQ = UpdateSection.PREREQ +UPDATE = UpdateSection.UPDATE +ADDITIONAL = UpdateSection.ADDITIONAL + +### END generated UpdateSection constants diff --git a/venv/lib/python3.12/site-packages/dns/version.py b/venv/lib/python3.12/site-packages/dns/version.py new file mode 100644 index 0000000..e11dd29 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/version.py @@ -0,0 +1,42 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython release version information.""" + +#: MAJOR +MAJOR = 2 +#: MINOR +MINOR = 8 +#: MICRO +MICRO = 0 +#: RELEASELEVEL +RELEASELEVEL = 0x0F +#: SERIAL +SERIAL = 0 + +if RELEASELEVEL == 0x0F: # pragma: no cover lgtm[py/unreachable-statement] + #: version + version = f"{MAJOR}.{MINOR}.{MICRO}" # lgtm[py/unreachable-statement] +elif RELEASELEVEL == 0x00: # pragma: no cover lgtm[py/unreachable-statement] + version = f"{MAJOR}.{MINOR}.{MICRO}dev{SERIAL}" # lgtm[py/unreachable-statement] +elif RELEASELEVEL == 0x0C: # pragma: no cover lgtm[py/unreachable-statement] + version = f"{MAJOR}.{MINOR}.{MICRO}rc{SERIAL}" # lgtm[py/unreachable-statement] +else: # pragma: no cover lgtm[py/unreachable-statement] + version = f"{MAJOR}.{MINOR}.{MICRO}{RELEASELEVEL:x}{SERIAL}" # lgtm[py/unreachable-statement] + +#: hexversion +hexversion = MAJOR << 24 | MINOR << 16 | MICRO << 8 | RELEASELEVEL << 4 | SERIAL diff --git a/venv/lib/python3.12/site-packages/dns/versioned.py b/venv/lib/python3.12/site-packages/dns/versioned.py new file mode 100644 index 0000000..3644711 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/versioned.py @@ -0,0 +1,320 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""DNS Versioned Zones.""" + +import collections +import threading +from typing import Callable, Deque, Set, cast + +import dns.exception +import dns.name +import dns.node +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.zone + + +class UseTransaction(dns.exception.DNSException): + """To alter a versioned zone, use a transaction.""" + + +# Backwards compatibility +Node = dns.zone.VersionedNode +ImmutableNode = dns.zone.ImmutableVersionedNode +Version = dns.zone.Version +WritableVersion = dns.zone.WritableVersion +ImmutableVersion = dns.zone.ImmutableVersion +Transaction = dns.zone.Transaction + + +class Zone(dns.zone.Zone): # lgtm[py/missing-equals] + __slots__ = [ + "_versions", + "_versions_lock", + "_write_txn", + "_write_waiters", + "_write_event", + "_pruning_policy", + "_readers", + ] + + node_factory: Callable[[], dns.node.Node] = Node + + def __init__( + self, + origin: dns.name.Name | str | None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + pruning_policy: Callable[["Zone", Version], bool | None] | None = None, + ): + """Initialize a versioned zone object. + + *origin* is the origin of the zone. It may be a ``dns.name.Name``, + a ``str``, or ``None``. If ``None``, then the zone's origin will + be set by the first ``$ORIGIN`` line in a zone file. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *pruning policy*, a function taking a ``Zone`` and a ``Version`` and returning + a ``bool``, or ``None``. Should the version be pruned? If ``None``, + the default policy, which retains one version is used. + """ + super().__init__(origin, rdclass, relativize) + self._versions: Deque[Version] = collections.deque() + self._version_lock = threading.Lock() + if pruning_policy is None: + self._pruning_policy = self._default_pruning_policy + else: + self._pruning_policy = pruning_policy + self._write_txn: Transaction | None = None + self._write_event: threading.Event | None = None + self._write_waiters: Deque[threading.Event] = collections.deque() + self._readers: Set[Transaction] = set() + self._commit_version_unlocked( + None, WritableVersion(self, replacement=True), origin + ) + + def reader( + self, id: int | None = None, serial: int | None = None + ) -> Transaction: # pylint: disable=arguments-differ + if id is not None and serial is not None: + raise ValueError("cannot specify both id and serial") + with self._version_lock: + if id is not None: + version = None + for v in reversed(self._versions): + if v.id == id: + version = v + break + if version is None: + raise KeyError("version not found") + elif serial is not None: + if self.relativize: + oname = dns.name.empty + else: + assert self.origin is not None + oname = self.origin + version = None + for v in reversed(self._versions): + n = v.nodes.get(oname) + if n: + rds = n.get_rdataset(self.rdclass, dns.rdatatype.SOA) + if rds is None: + continue + soa = cast(dns.rdtypes.ANY.SOA.SOA, rds[0]) + if rds and soa.serial == serial: + version = v + break + if version is None: + raise KeyError("serial not found") + else: + version = self._versions[-1] + txn = Transaction(self, False, version) + self._readers.add(txn) + return txn + + def writer(self, replacement: bool = False) -> Transaction: + event = None + while True: + with self._version_lock: + # Checking event == self._write_event ensures that either + # no one was waiting before we got lucky and found no write + # txn, or we were the one who was waiting and got woken up. + # This prevents "taking cuts" when creating a write txn. + if self._write_txn is None and event == self._write_event: + # Creating the transaction defers version setup + # (i.e. copying the nodes dictionary) until we + # give up the lock, so that we hold the lock as + # short a time as possible. This is why we call + # _setup_version() below. + self._write_txn = Transaction( + self, replacement, make_immutable=True + ) + # give up our exclusive right to make a Transaction + self._write_event = None + break + # Someone else is writing already, so we will have to + # wait, but we want to do the actual wait outside the + # lock. + event = threading.Event() + self._write_waiters.append(event) + # wait (note we gave up the lock!) + # + # We only wake one sleeper at a time, so it's important + # that no event waiter can exit this method (e.g. via + # cancellation) without returning a transaction or waking + # someone else up. + # + # This is not a problem with Threading module threads as + # they cannot be canceled, but could be an issue with trio + # tasks when we do the async version of writer(). + # I.e. we'd need to do something like: + # + # try: + # event.wait() + # except trio.Cancelled: + # with self._version_lock: + # self._maybe_wakeup_one_waiter_unlocked() + # raise + # + event.wait() + # Do the deferred version setup. + self._write_txn._setup_version() + return self._write_txn + + def _maybe_wakeup_one_waiter_unlocked(self): + if len(self._write_waiters) > 0: + self._write_event = self._write_waiters.popleft() + self._write_event.set() + + # pylint: disable=unused-argument + def _default_pruning_policy(self, zone, version): + return True + + # pylint: enable=unused-argument + + def _prune_versions_unlocked(self): + assert len(self._versions) > 0 + # Don't ever prune a version greater than or equal to one that + # a reader has open. This pins versions in memory while the + # reader is open, and importantly lets the reader open a txn on + # a successor version (e.g. if generating an IXFR). + # + # Note our definition of least_kept also ensures we do not try to + # delete the greatest version. + if len(self._readers) > 0: + least_kept = min(txn.version.id for txn in self._readers) # pyright: ignore + else: + least_kept = self._versions[-1].id + while self._versions[0].id < least_kept and self._pruning_policy( + self, self._versions[0] + ): + self._versions.popleft() + + def set_max_versions(self, max_versions: int | None) -> None: + """Set a pruning policy that retains up to the specified number + of versions + """ + if max_versions is not None and max_versions < 1: + raise ValueError("max versions must be at least 1") + if max_versions is None: + # pylint: disable=unused-argument + def policy(zone, _): # pyright: ignore + return False + + else: + + def policy(zone, _): + return len(zone._versions) > max_versions + + self.set_pruning_policy(policy) + + def set_pruning_policy( + self, policy: Callable[["Zone", Version], bool | None] | None + ) -> None: + """Set the pruning policy for the zone. + + The *policy* function takes a `Version` and returns `True` if + the version should be pruned, and `False` otherwise. `None` + may also be specified for policy, in which case the default policy + is used. + + Pruning checking proceeds from the least version and the first + time the function returns `False`, the checking stops. I.e. the + retained versions are always a consecutive sequence. + """ + if policy is None: + policy = self._default_pruning_policy + with self._version_lock: + self._pruning_policy = policy + self._prune_versions_unlocked() + + def _end_read(self, txn): + with self._version_lock: + self._readers.remove(txn) + self._prune_versions_unlocked() + + def _end_write_unlocked(self, txn): + assert self._write_txn == txn + self._write_txn = None + self._maybe_wakeup_one_waiter_unlocked() + + def _end_write(self, txn): + with self._version_lock: + self._end_write_unlocked(txn) + + def _commit_version_unlocked(self, txn, version, origin): + self._versions.append(version) + self._prune_versions_unlocked() + self.nodes = version.nodes + if self.origin is None: + self.origin = origin + # txn can be None in __init__ when we make the empty version. + if txn is not None: + self._end_write_unlocked(txn) + + def _commit_version(self, txn, version, origin): + with self._version_lock: + self._commit_version_unlocked(txn, version, origin) + + def _get_next_version_id(self): + if len(self._versions) > 0: + id = self._versions[-1].id + 1 + else: + id = 1 + return id + + def find_node( + self, name: dns.name.Name | str, create: bool = False + ) -> dns.node.Node: + if create: + raise UseTransaction + return super().find_node(name) + + def delete_node(self, name: dns.name.Name | str) -> None: + raise UseTransaction + + def find_rdataset( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise UseTransaction + rdataset = super().find_rdataset(name, rdtype, covers) + return dns.rdataset.ImmutableRdataset(rdataset) + + def get_rdataset( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset | None: + if create: + raise UseTransaction + rdataset = super().get_rdataset(name, rdtype, covers) + if rdataset is not None: + return dns.rdataset.ImmutableRdataset(rdataset) + else: + return None + + def delete_rdataset( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + ) -> None: + raise UseTransaction + + def replace_rdataset( + self, name: dns.name.Name | str, replacement: dns.rdataset.Rdataset + ) -> None: + raise UseTransaction diff --git a/venv/lib/python3.12/site-packages/dns/win32util.py b/venv/lib/python3.12/site-packages/dns/win32util.py new file mode 100644 index 0000000..2d77b4c --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/win32util.py @@ -0,0 +1,438 @@ +import sys + +import dns._features + +# pylint: disable=W0612,W0613,C0301 + +if sys.platform == "win32": + import ctypes + import ctypes.wintypes as wintypes + import winreg # pylint: disable=import-error + from enum import IntEnum + + import dns.name + + # Keep pylint quiet on non-windows. + try: + _ = WindowsError # pylint: disable=used-before-assignment + except NameError: + WindowsError = Exception + + class ConfigMethod(IntEnum): + Registry = 1 + WMI = 2 + Win32 = 3 + + class DnsInfo: + def __init__(self): + self.domain = None + self.nameservers = [] + self.search = [] + + _config_method = ConfigMethod.Registry + + if dns._features.have("wmi"): + import threading + + import pythoncom # pylint: disable=import-error + import wmi # pylint: disable=import-error + + # Prefer WMI by default if wmi is installed. + _config_method = ConfigMethod.WMI + + class _WMIGetter(threading.Thread): + # pylint: disable=possibly-used-before-assignment + def __init__(self): + super().__init__() + self.info = DnsInfo() + + def run(self): + pythoncom.CoInitialize() + try: + system = wmi.WMI() + for interface in system.Win32_NetworkAdapterConfiguration(): + if interface.IPEnabled and interface.DNSServerSearchOrder: + self.info.nameservers = list(interface.DNSServerSearchOrder) + if interface.DNSDomain: + self.info.domain = _config_domain(interface.DNSDomain) + if interface.DNSDomainSuffixSearchOrder: + self.info.search = [ + _config_domain(x) + for x in interface.DNSDomainSuffixSearchOrder + ] + break + finally: + pythoncom.CoUninitialize() + + def get(self): + # We always run in a separate thread to avoid any issues with + # the COM threading model. + self.start() + self.join() + return self.info + + else: + + class _WMIGetter: # type: ignore + pass + + def _config_domain(domain): + # Sometimes DHCP servers add a '.' prefix to the default domain, and + # Windows just stores such values in the registry (see #687). + # Check for this and fix it. + if domain.startswith("."): + domain = domain[1:] + return dns.name.from_text(domain) + + class _RegistryGetter: + def __init__(self): + self.info = DnsInfo() + + def _split(self, text): + # The windows registry has used both " " and "," as a delimiter, and while + # it is currently using "," in Windows 10 and later, updates can seemingly + # leave a space in too, e.g. "a, b". So we just convert all commas to + # spaces, and use split() in its default configuration, which splits on + # all whitespace and ignores empty strings. + return text.replace(",", " ").split() + + def _config_nameservers(self, nameservers): + for ns in self._split(nameservers): + if ns not in self.info.nameservers: + self.info.nameservers.append(ns) + + def _config_search(self, search): + for s in self._split(search): + s = _config_domain(s) + if s not in self.info.search: + self.info.search.append(s) + + def _config_fromkey(self, key, always_try_domain): + try: + servers, _ = winreg.QueryValueEx(key, "NameServer") + except WindowsError: + servers = None + if servers: + self._config_nameservers(servers) + if servers or always_try_domain: + try: + dom, _ = winreg.QueryValueEx(key, "Domain") + if dom: + self.info.domain = _config_domain(dom) + except WindowsError: + pass + else: + try: + servers, _ = winreg.QueryValueEx(key, "DhcpNameServer") + except WindowsError: + servers = None + if servers: + self._config_nameservers(servers) + try: + dom, _ = winreg.QueryValueEx(key, "DhcpDomain") + if dom: + self.info.domain = _config_domain(dom) + except WindowsError: + pass + try: + search, _ = winreg.QueryValueEx(key, "SearchList") + except WindowsError: + search = None + if search is None: + try: + search, _ = winreg.QueryValueEx(key, "DhcpSearchList") + except WindowsError: + search = None + if search: + self._config_search(search) + + def _is_nic_enabled(self, lm, guid): + # Look in the Windows Registry to determine whether the network + # interface corresponding to the given guid is enabled. + # + # (Code contributed by Paul Marks, thanks!) + # + try: + # This hard-coded location seems to be consistent, at least + # from Windows 2000 through Vista. + connection_key = winreg.OpenKey( + lm, + r"SYSTEM\CurrentControlSet\Control\Network" + r"\{4D36E972-E325-11CE-BFC1-08002BE10318}" + rf"\{guid}\Connection", + ) + + try: + # The PnpInstanceID points to a key inside Enum + (pnp_id, ttype) = winreg.QueryValueEx( + connection_key, "PnpInstanceID" + ) + + if ttype != winreg.REG_SZ: + raise ValueError # pragma: no cover + + device_key = winreg.OpenKey( + lm, rf"SYSTEM\CurrentControlSet\Enum\{pnp_id}" + ) + + try: + # Get ConfigFlags for this device + (flags, ttype) = winreg.QueryValueEx(device_key, "ConfigFlags") + + if ttype != winreg.REG_DWORD: + raise ValueError # pragma: no cover + + # Based on experimentation, bit 0x1 indicates that the + # device is disabled. + # + # XXXRTH I suspect we really want to & with 0x03 so + # that CONFIGFLAGS_REMOVED devices are also ignored, + # but we're shifting to WMI as ConfigFlags is not + # supposed to be used. + return not flags & 0x1 + + finally: + device_key.Close() + finally: + connection_key.Close() + except Exception: # pragma: no cover + return False + + def get(self): + """Extract resolver configuration from the Windows registry.""" + + lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + tcp_params = winreg.OpenKey( + lm, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" + ) + try: + self._config_fromkey(tcp_params, True) + finally: + tcp_params.Close() + interfaces = winreg.OpenKey( + lm, + r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces", + ) + try: + i = 0 + while True: + try: + guid = winreg.EnumKey(interfaces, i) + i += 1 + key = winreg.OpenKey(interfaces, guid) + try: + if not self._is_nic_enabled(lm, guid): + continue + self._config_fromkey(key, False) + finally: + key.Close() + except OSError: + break + finally: + interfaces.Close() + finally: + lm.Close() + return self.info + + class _Win32Getter(_RegistryGetter): + + def get(self): + """Get the attributes using the Windows API.""" + # Load the IP Helper library + # # https://learn.microsoft.com/en-us/windows/win32/api/iphlpapi/nf-iphlpapi-getadaptersaddresses + IPHLPAPI = ctypes.WinDLL("Iphlpapi.dll") + + # Constants + AF_UNSPEC = 0 + ERROR_SUCCESS = 0 + GAA_FLAG_INCLUDE_PREFIX = 0x00000010 + AF_INET = 2 + AF_INET6 = 23 + IF_TYPE_SOFTWARE_LOOPBACK = 24 + + # Define necessary structures + class SOCKADDRV4(ctypes.Structure): + _fields_ = [ + ("sa_family", wintypes.USHORT), + ("sa_data", ctypes.c_ubyte * 14), + ] + + class SOCKADDRV6(ctypes.Structure): + _fields_ = [ + ("sa_family", wintypes.USHORT), + ("sa_data", ctypes.c_ubyte * 26), + ] + + class SOCKET_ADDRESS(ctypes.Structure): + _fields_ = [ + ("lpSockaddr", ctypes.POINTER(SOCKADDRV4)), + ("iSockaddrLength", wintypes.INT), + ] + + class IP_ADAPTER_DNS_SERVER_ADDRESS(ctypes.Structure): + pass # Forward declaration + + IP_ADAPTER_DNS_SERVER_ADDRESS._fields_ = [ + ("Length", wintypes.ULONG), + ("Reserved", wintypes.DWORD), + ("Next", ctypes.POINTER(IP_ADAPTER_DNS_SERVER_ADDRESS)), + ("Address", SOCKET_ADDRESS), + ] + + class IF_LUID(ctypes.Structure): + _fields_ = [("Value", ctypes.c_ulonglong)] + + class NET_IF_NETWORK_GUID(ctypes.Structure): + _fields_ = [("Value", ctypes.c_ubyte * 16)] + + class IP_ADAPTER_PREFIX_XP(ctypes.Structure): + pass # Left undefined here for simplicity + + class IP_ADAPTER_GATEWAY_ADDRESS_LH(ctypes.Structure): + pass # Left undefined here for simplicity + + class IP_ADAPTER_DNS_SUFFIX(ctypes.Structure): + _fields_ = [ + ("String", ctypes.c_wchar * 256), + ("Next", ctypes.POINTER(ctypes.c_void_p)), + ] + + class IP_ADAPTER_UNICAST_ADDRESS_LH(ctypes.Structure): + pass # Left undefined here for simplicity + + class IP_ADAPTER_MULTICAST_ADDRESS_XP(ctypes.Structure): + pass # Left undefined here for simplicity + + class IP_ADAPTER_ANYCAST_ADDRESS_XP(ctypes.Structure): + pass # Left undefined here for simplicity + + class IP_ADAPTER_DNS_SERVER_ADDRESS_XP(ctypes.Structure): + pass # Left undefined here for simplicity + + class IP_ADAPTER_ADDRESSES(ctypes.Structure): + pass # Forward declaration + + IP_ADAPTER_ADDRESSES._fields_ = [ + ("Length", wintypes.ULONG), + ("IfIndex", wintypes.DWORD), + ("Next", ctypes.POINTER(IP_ADAPTER_ADDRESSES)), + ("AdapterName", ctypes.c_char_p), + ("FirstUnicastAddress", ctypes.POINTER(SOCKET_ADDRESS)), + ("FirstAnycastAddress", ctypes.POINTER(SOCKET_ADDRESS)), + ("FirstMulticastAddress", ctypes.POINTER(SOCKET_ADDRESS)), + ( + "FirstDnsServerAddress", + ctypes.POINTER(IP_ADAPTER_DNS_SERVER_ADDRESS), + ), + ("DnsSuffix", wintypes.LPWSTR), + ("Description", wintypes.LPWSTR), + ("FriendlyName", wintypes.LPWSTR), + ("PhysicalAddress", ctypes.c_ubyte * 8), + ("PhysicalAddressLength", wintypes.ULONG), + ("Flags", wintypes.ULONG), + ("Mtu", wintypes.ULONG), + ("IfType", wintypes.ULONG), + ("OperStatus", ctypes.c_uint), + # Remaining fields removed for brevity + ] + + def format_ipv4(sockaddr_in): + return ".".join(map(str, sockaddr_in.sa_data[2:6])) + + def format_ipv6(sockaddr_in6): + # The sa_data is: + # + # USHORT sin6_port; + # ULONG sin6_flowinfo; + # IN6_ADDR sin6_addr; + # ULONG sin6_scope_id; + # + # which is 2 + 4 + 16 + 4 = 26 bytes, and we need the plus 6 below + # to be in the sin6_addr range. + parts = [ + sockaddr_in6.sa_data[i + 6] << 8 | sockaddr_in6.sa_data[i + 6 + 1] + for i in range(0, 16, 2) + ] + return ":".join(f"{part:04x}" for part in parts) + + buffer_size = ctypes.c_ulong(15000) + while True: + buffer = ctypes.create_string_buffer(buffer_size.value) + + ret_val = IPHLPAPI.GetAdaptersAddresses( + AF_UNSPEC, + GAA_FLAG_INCLUDE_PREFIX, + None, + buffer, + ctypes.byref(buffer_size), + ) + + if ret_val == ERROR_SUCCESS: + break + elif ret_val != 0x6F: # ERROR_BUFFER_OVERFLOW + print(f"Error retrieving adapter information: {ret_val}") + return + + adapter_addresses = ctypes.cast( + buffer, ctypes.POINTER(IP_ADAPTER_ADDRESSES) + ) + + current_adapter = adapter_addresses + while current_adapter: + + # Skip non-operational adapters. + oper_status = current_adapter.contents.OperStatus + if oper_status != 1: + current_adapter = current_adapter.contents.Next + continue + + # Exclude loopback adapters. + if current_adapter.contents.IfType == IF_TYPE_SOFTWARE_LOOPBACK: + current_adapter = current_adapter.contents.Next + continue + + # Get the domain from the DnsSuffix attribute. + dns_suffix = current_adapter.contents.DnsSuffix + if dns_suffix: + self.info.domain = dns.name.from_text(dns_suffix) + + current_dns_server = current_adapter.contents.FirstDnsServerAddress + while current_dns_server: + sockaddr = current_dns_server.contents.Address.lpSockaddr + sockaddr_family = sockaddr.contents.sa_family + + ip = None + if sockaddr_family == AF_INET: # IPv4 + ip = format_ipv4(sockaddr.contents) + elif sockaddr_family == AF_INET6: # IPv6 + sockaddr = ctypes.cast(sockaddr, ctypes.POINTER(SOCKADDRV6)) + ip = format_ipv6(sockaddr.contents) + + if ip: + if ip not in self.info.nameservers: + self.info.nameservers.append(ip) + + current_dns_server = current_dns_server.contents.Next + + current_adapter = current_adapter.contents.Next + + # Use the registry getter to get the search info, since it is set at the system level. + registry_getter = _RegistryGetter() + info = registry_getter.get() + self.info.search = info.search + return self.info + + def set_config_method(method: ConfigMethod) -> None: + global _config_method + _config_method = method + + def get_dns_info() -> DnsInfo: + """Extract resolver configuration.""" + if _config_method == ConfigMethod.Win32: + getter = _Win32Getter() + elif _config_method == ConfigMethod.WMI: + getter = _WMIGetter() + else: + getter = _RegistryGetter() + return getter.get() diff --git a/venv/lib/python3.12/site-packages/dns/wire.py b/venv/lib/python3.12/site-packages/dns/wire.py new file mode 100644 index 0000000..cd027fa --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/wire.py @@ -0,0 +1,98 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import contextlib +import struct +from typing import Iterator, Optional, Tuple + +import dns.exception +import dns.name + + +class Parser: + """Helper class for parsing DNS wire format.""" + + def __init__(self, wire: bytes, current: int = 0): + """Initialize a Parser + + *wire*, a ``bytes`` contains the data to be parsed, and possibly other data. + Typically it is the whole message or a slice of it. + + *current*, an `int`, the offset within *wire* where parsing should begin. + """ + self.wire = wire + self.current = 0 + self.end = len(self.wire) + if current: + self.seek(current) + self.furthest = current + + def remaining(self) -> int: + return self.end - self.current + + def get_bytes(self, size: int) -> bytes: + assert size >= 0 + if size > self.remaining(): + raise dns.exception.FormError + output = self.wire[self.current : self.current + size] + self.current += size + self.furthest = max(self.furthest, self.current) + return output + + def get_counted_bytes(self, length_size: int = 1) -> bytes: + length = int.from_bytes(self.get_bytes(length_size), "big") + return self.get_bytes(length) + + def get_remaining(self) -> bytes: + return self.get_bytes(self.remaining()) + + def get_uint8(self) -> int: + return struct.unpack("!B", self.get_bytes(1))[0] + + def get_uint16(self) -> int: + return struct.unpack("!H", self.get_bytes(2))[0] + + def get_uint32(self) -> int: + return struct.unpack("!I", self.get_bytes(4))[0] + + def get_uint48(self) -> int: + return int.from_bytes(self.get_bytes(6), "big") + + def get_struct(self, format: str) -> Tuple: + return struct.unpack(format, self.get_bytes(struct.calcsize(format))) + + def get_name(self, origin: Optional["dns.name.Name"] = None) -> "dns.name.Name": + name = dns.name.from_wire_parser(self) + if origin: + name = name.relativize(origin) + return name + + def seek(self, where: int) -> None: + # Note that seeking to the end is OK! (If you try to read + # after such a seek, you'll get an exception as expected.) + if where < 0 or where > self.end: + raise dns.exception.FormError + self.current = where + + @contextlib.contextmanager + def restrict_to(self, size: int) -> Iterator: + assert size >= 0 + if size > self.remaining(): + raise dns.exception.FormError + saved_end = self.end + try: + self.end = self.current + size + yield + # We make this check here and not in the finally as we + # don't want to raise if we're already raising for some + # other reason. + if self.current != self.end: + raise dns.exception.FormError + finally: + self.end = saved_end + + @contextlib.contextmanager + def restore_furthest(self) -> Iterator: + try: + yield None + finally: + self.current = self.furthest diff --git a/venv/lib/python3.12/site-packages/dns/xfr.py b/venv/lib/python3.12/site-packages/dns/xfr.py new file mode 100644 index 0000000..219fdc8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/xfr.py @@ -0,0 +1,356 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from typing import Any, List, Tuple, cast + +import dns.edns +import dns.exception +import dns.message +import dns.name +import dns.rcode +import dns.rdata +import dns.rdataset +import dns.rdatatype +import dns.rdtypes +import dns.rdtypes.ANY +import dns.rdtypes.ANY.SMIMEA +import dns.rdtypes.ANY.SOA +import dns.rdtypes.svcbbase +import dns.serial +import dns.transaction +import dns.tsig +import dns.zone + + +class TransferError(dns.exception.DNSException): + """A zone transfer response got a non-zero rcode.""" + + def __init__(self, rcode): + message = f"Zone transfer error: {dns.rcode.to_text(rcode)}" + super().__init__(message) + self.rcode = rcode + + +class SerialWentBackwards(dns.exception.FormError): + """The current serial number is less than the serial we know.""" + + +class UseTCP(dns.exception.DNSException): + """This IXFR cannot be completed with UDP.""" + + +class Inbound: + """ + State machine for zone transfers. + """ + + def __init__( + self, + txn_manager: dns.transaction.TransactionManager, + rdtype: dns.rdatatype.RdataType = dns.rdatatype.AXFR, + serial: int | None = None, + is_udp: bool = False, + ): + """Initialize an inbound zone transfer. + + *txn_manager* is a :py:class:`dns.transaction.TransactionManager`. + + *rdtype* can be `dns.rdatatype.AXFR` or `dns.rdatatype.IXFR` + + *serial* is the base serial number for IXFRs, and is required in + that case. + + *is_udp*, a ``bool`` indidicates if UDP is being used for this + XFR. + """ + self.txn_manager = txn_manager + self.txn: dns.transaction.Transaction | None = None + self.rdtype = rdtype + if rdtype == dns.rdatatype.IXFR: + if serial is None: + raise ValueError("a starting serial must be supplied for IXFRs") + self.incremental = True + elif rdtype == dns.rdatatype.AXFR: + if is_udp: + raise ValueError("is_udp specified for AXFR") + self.incremental = False + else: + raise ValueError("rdtype is not IXFR or AXFR") + self.serial = serial + self.is_udp = is_udp + (_, _, self.origin) = txn_manager.origin_information() + self.soa_rdataset: dns.rdataset.Rdataset | None = None + self.done = False + self.expecting_SOA = False + self.delete_mode = False + + def process_message(self, message: dns.message.Message) -> bool: + """Process one message in the transfer. + + The message should have the same relativization as was specified when + the `dns.xfr.Inbound` was created. The message should also have been + created with `one_rr_per_rrset=True` because order matters. + + Returns `True` if the transfer is complete, and `False` otherwise. + """ + if self.txn is None: + self.txn = self.txn_manager.writer(not self.incremental) + rcode = message.rcode() + if rcode != dns.rcode.NOERROR: + raise TransferError(rcode) + # + # We don't require a question section, but if it is present is + # should be correct. + # + if len(message.question) > 0: + if message.question[0].name != self.origin: + raise dns.exception.FormError("wrong question name") + if message.question[0].rdtype != self.rdtype: + raise dns.exception.FormError("wrong question rdatatype") + answer_index = 0 + if self.soa_rdataset is None: + # + # This is the first message. We're expecting an SOA at + # the origin. + # + if not message.answer or message.answer[0].name != self.origin: + raise dns.exception.FormError("No answer or RRset not for zone origin") + rrset = message.answer[0] + rdataset = rrset + if rdataset.rdtype != dns.rdatatype.SOA: + raise dns.exception.FormError("first RRset is not an SOA") + answer_index = 1 + self.soa_rdataset = rdataset.copy() # pyright: ignore + if self.incremental: + assert self.soa_rdataset is not None + soa = cast(dns.rdtypes.ANY.SOA.SOA, self.soa_rdataset[0]) + if soa.serial == self.serial: + # + # We're already up-to-date. + # + self.done = True + elif dns.serial.Serial(soa.serial) < self.serial: + # It went backwards! + raise SerialWentBackwards + else: + if self.is_udp and len(message.answer[answer_index:]) == 0: + # + # There are no more records, so this is the + # "truncated" response. Say to use TCP + # + raise UseTCP + # + # Note we're expecting another SOA so we can detect + # if this IXFR response is an AXFR-style response. + # + self.expecting_SOA = True + # + # Process the answer section (other than the initial SOA in + # the first message). + # + for rrset in message.answer[answer_index:]: + name = rrset.name + rdataset = rrset + if self.done: + raise dns.exception.FormError("answers after final SOA") + assert self.txn is not None # for mypy + if rdataset.rdtype == dns.rdatatype.SOA and name == self.origin: + # + # Every time we see an origin SOA delete_mode inverts + # + if self.incremental: + self.delete_mode = not self.delete_mode + # + # If this SOA Rdataset is equal to the first we saw + # then we're finished. If this is an IXFR we also + # check that we're seeing the record in the expected + # part of the response. + # + if rdataset == self.soa_rdataset and ( + (not self.incremental) or self.delete_mode + ): + # + # This is the final SOA + # + soa = cast(dns.rdtypes.ANY.SOA.SOA, rdataset[0]) + if self.expecting_SOA: + # We got an empty IXFR sequence! + raise dns.exception.FormError("empty IXFR sequence") + if self.incremental and self.serial != soa.serial: + raise dns.exception.FormError("unexpected end of IXFR sequence") + self.txn.replace(name, rdataset) + self.txn.commit() + self.txn = None + self.done = True + else: + # + # This is not the final SOA + # + self.expecting_SOA = False + soa = cast(dns.rdtypes.ANY.SOA.SOA, rdataset[0]) + if self.incremental: + if self.delete_mode: + # This is the start of an IXFR deletion set + if soa.serial != self.serial: + raise dns.exception.FormError( + "IXFR base serial mismatch" + ) + else: + # This is the start of an IXFR addition set + self.serial = soa.serial + self.txn.replace(name, rdataset) + else: + # We saw a non-final SOA for the origin in an AXFR. + raise dns.exception.FormError("unexpected origin SOA in AXFR") + continue + if self.expecting_SOA: + # + # We made an IXFR request and are expecting another + # SOA RR, but saw something else, so this must be an + # AXFR response. + # + self.incremental = False + self.expecting_SOA = False + self.delete_mode = False + self.txn.rollback() + self.txn = self.txn_manager.writer(True) + # + # Note we are falling through into the code below + # so whatever rdataset this was gets written. + # + # Add or remove the data + if self.delete_mode: + self.txn.delete_exact(name, rdataset) + else: + self.txn.add(name, rdataset) + if self.is_udp and not self.done: + # + # This is a UDP IXFR and we didn't get to done, and we didn't + # get the proper "truncated" response + # + raise dns.exception.FormError("unexpected end of UDP IXFR") + return self.done + + # + # Inbounds are context managers. + # + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.txn: + self.txn.rollback() + return False + + +def make_query( + txn_manager: dns.transaction.TransactionManager, + serial: int | None = 0, + use_edns: int | bool | None = None, + ednsflags: int | None = None, + payload: int | None = None, + request_payload: int | None = None, + options: List[dns.edns.Option] | None = None, + keyring: Any = None, + keyname: dns.name.Name | None = None, + keyalgorithm: dns.name.Name | str = dns.tsig.default_algorithm, +) -> Tuple[dns.message.QueryMessage, int | None]: + """Make an AXFR or IXFR query. + + *txn_manager* is a ``dns.transaction.TransactionManager``, typically a + ``dns.zone.Zone``. + + *serial* is an ``int`` or ``None``. If 0, then IXFR will be + attempted using the most recent serial number from the + *txn_manager*; it is the caller's responsibility to ensure there + are no write transactions active that could invalidate the + retrieved serial. If a serial cannot be determined, AXFR will be + forced. Other integer values are the starting serial to use. + ``None`` forces an AXFR. + + Please see the documentation for :py:func:`dns.message.make_query` and + :py:func:`dns.message.Message.use_tsig` for details on the other parameters + to this function. + + Returns a `(query, serial)` tuple. + """ + (zone_origin, _, origin) = txn_manager.origin_information() + if zone_origin is None: + raise ValueError("no zone origin") + if serial is None: + rdtype = dns.rdatatype.AXFR + elif not isinstance(serial, int): + raise ValueError("serial is not an integer") + elif serial == 0: + with txn_manager.reader() as txn: + rdataset = txn.get(origin, "SOA") + if rdataset: + soa = cast(dns.rdtypes.ANY.SOA.SOA, rdataset[0]) + serial = soa.serial + rdtype = dns.rdatatype.IXFR + else: + serial = None + rdtype = dns.rdatatype.AXFR + elif serial > 0 and serial < 4294967296: + rdtype = dns.rdatatype.IXFR + else: + raise ValueError("serial out-of-range") + rdclass = txn_manager.get_class() + q = dns.message.make_query( + zone_origin, + rdtype, + rdclass, + use_edns, + False, + ednsflags, + payload, + request_payload, + options, + ) + if serial is not None: + rdata = dns.rdata.from_text(rdclass, "SOA", f". . {serial} 0 0 0 0") + rrset = q.find_rrset( + q.authority, zone_origin, rdclass, dns.rdatatype.SOA, create=True + ) + rrset.add(rdata, 0) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + return (q, serial) + + +def extract_serial_from_query(query: dns.message.Message) -> int | None: + """Extract the SOA serial number from query if it is an IXFR and return + it, otherwise return None. + + *query* is a dns.message.QueryMessage that is an IXFR or AXFR request. + + Raises if the query is not an IXFR or AXFR, or if an IXFR doesn't have + an appropriate SOA RRset in the authority section. + """ + if not isinstance(query, dns.message.QueryMessage): + raise ValueError("query not a QueryMessage") + question = query.question[0] + if question.rdtype == dns.rdatatype.AXFR: + return None + elif question.rdtype != dns.rdatatype.IXFR: + raise ValueError("query is not an AXFR or IXFR") + soa_rrset = query.find_rrset( + query.authority, question.name, question.rdclass, dns.rdatatype.SOA + ) + soa = cast(dns.rdtypes.ANY.SOA.SOA, soa_rrset[0]) + return soa.serial diff --git a/venv/lib/python3.12/site-packages/dns/zone.py b/venv/lib/python3.12/site-packages/dns/zone.py new file mode 100644 index 0000000..f916ffe --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/zone.py @@ -0,0 +1,1462 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +import contextlib +import io +import os +import struct +from typing import ( + Any, + Callable, + Iterable, + Iterator, + List, + MutableMapping, + Set, + Tuple, + cast, +) + +import dns.exception +import dns.grange +import dns.immutable +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.rdtypes.ANY.ZONEMD +import dns.rrset +import dns.tokenizer +import dns.transaction +import dns.ttl +import dns.zonefile +from dns.zonetypes import DigestHashAlgorithm, DigestScheme, _digest_hashers + + +class BadZone(dns.exception.DNSException): + """The DNS zone is malformed.""" + + +class NoSOA(BadZone): + """The DNS zone has no SOA RR at its origin.""" + + +class NoNS(BadZone): + """The DNS zone has no NS RRset at its origin.""" + + +class UnknownOrigin(BadZone): + """The DNS zone's origin is unknown.""" + + +class UnsupportedDigestScheme(dns.exception.DNSException): + """The zone digest's scheme is unsupported.""" + + +class UnsupportedDigestHashAlgorithm(dns.exception.DNSException): + """The zone digest's origin is unsupported.""" + + +class NoDigest(dns.exception.DNSException): + """The DNS zone has no ZONEMD RRset at its origin.""" + + +class DigestVerificationFailure(dns.exception.DNSException): + """The ZONEMD digest failed to verify.""" + + +def _validate_name( + name: dns.name.Name, + origin: dns.name.Name | None, + relativize: bool, +) -> dns.name.Name: + # This name validation code is shared by Zone and Version + if origin is None: + # This should probably never happen as other code (e.g. + # _rr_line) will notice the lack of an origin before us, but + # we check just in case! + raise KeyError("no zone origin is defined") + if name.is_absolute(): + if not name.is_subdomain(origin): + raise KeyError("name parameter must be a subdomain of the zone origin") + if relativize: + name = name.relativize(origin) + else: + # We have a relative name. Make sure that the derelativized name is + # not too long. + try: + abs_name = name.derelativize(origin) + except dns.name.NameTooLong: + # We map dns.name.NameTooLong to KeyError to be consistent with + # the other exceptions above. + raise KeyError("relative name too long for zone") + if not relativize: + # We have a relative name in a non-relative zone, so use the + # derelativized name. + name = abs_name + return name + + +class Zone(dns.transaction.TransactionManager): + """A DNS zone. + + A ``Zone`` is a mapping from names to nodes. The zone object may be + treated like a Python dictionary, e.g. ``zone[name]`` will retrieve + the node associated with that name. The *name* may be a + ``dns.name.Name object``, or it may be a string. In either case, + if the name is relative it is treated as relative to the origin of + the zone. + """ + + node_factory: Callable[[], dns.node.Node] = dns.node.Node + map_factory: Callable[[], MutableMapping[dns.name.Name, dns.node.Node]] = dict + # We only require the version types as "Version" to allow for flexibility, as + # only the version protocol matters + writable_version_factory: Callable[["Zone", bool], "Version"] | None = None + immutable_version_factory: Callable[["Version"], "Version"] | None = None + + __slots__ = ["rdclass", "origin", "nodes", "relativize"] + + def __init__( + self, + origin: dns.name.Name | str | None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + ): + """Initialize a zone object. + + *origin* is the origin of the zone. It may be a ``dns.name.Name``, + a ``str``, or ``None``. If ``None``, then the zone's origin will + be set by the first ``$ORIGIN`` line in a zone file. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + """ + + if origin is not None: + if isinstance(origin, str): + origin = dns.name.from_text(origin) + elif not isinstance(origin, dns.name.Name): + raise ValueError("origin parameter must be convertible to a DNS name") + if not origin.is_absolute(): + raise ValueError("origin parameter must be an absolute name") + self.origin = origin + self.rdclass = rdclass + self.nodes: MutableMapping[dns.name.Name, dns.node.Node] = self.map_factory() + self.relativize = relativize + + def __eq__(self, other): + """Two zones are equal if they have the same origin, class, and + nodes. + + Returns a ``bool``. + """ + + if not isinstance(other, Zone): + return False + if ( + self.rdclass != other.rdclass + or self.origin != other.origin + or self.nodes != other.nodes + ): + return False + return True + + def __ne__(self, other): + """Are two zones not equal? + + Returns a ``bool``. + """ + + return not self.__eq__(other) + + def _validate_name(self, name: dns.name.Name | str) -> dns.name.Name: + # Note that any changes in this method should have corresponding changes + # made in the Version _validate_name() method. + if isinstance(name, str): + name = dns.name.from_text(name, None) + elif not isinstance(name, dns.name.Name): + raise KeyError("name parameter must be convertible to a DNS name") + return _validate_name(name, self.origin, self.relativize) + + def __getitem__(self, key): + key = self._validate_name(key) + return self.nodes[key] + + def __setitem__(self, key, value): + key = self._validate_name(key) + self.nodes[key] = value + + def __delitem__(self, key): + key = self._validate_name(key) + del self.nodes[key] + + def __iter__(self): + return self.nodes.__iter__() + + def keys(self): + return self.nodes.keys() + + def values(self): + return self.nodes.values() + + def items(self): + return self.nodes.items() + + def get(self, key): + key = self._validate_name(key) + return self.nodes.get(key) + + def __contains__(self, key): + key = self._validate_name(key) + return key in self.nodes + + def find_node( + self, name: dns.name.Name | str, create: bool = False + ) -> dns.node.Node: + """Find a node in the zone, possibly creating it. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.node.Node``. + """ + + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None: + if not create: + raise KeyError + node = self.node_factory() + self.nodes[name] = node + return node + + def get_node( + self, name: dns.name.Name | str, create: bool = False + ) -> dns.node.Node | None: + """Get a node in the zone, possibly creating it. + + This method is like ``find_node()``, except it returns None instead + of raising an exception if the node does not exist and creation + has not been requested. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Returns a ``dns.node.Node`` or ``None``. + """ + + try: + node = self.find_node(name, create) + except KeyError: + node = None + return node + + def delete_node(self, name: dns.name.Name | str) -> None: + """Delete the specified node if it exists. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + It is not an error if the node does not exist. + """ + + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + + def find_rdataset( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + """Look for an rdataset with the specified name and type in the zone, + and return an rdataset encapsulating it. + + The rdataset returned is not a copy; changes to it will change + the zone. + + KeyError is raised if the name or type are not found. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str`` the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rdataset.Rdataset``. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + node = self.find_node(name, create) + return node.find_rdataset(self.rdclass, rdtype, covers, create) + + def get_rdataset( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset | None: + """Look for an rdataset with the specified name and type in the zone. + + This method is like ``find_rdataset()``, except it returns None instead + of raising an exception if the rdataset does not exist and creation + has not been requested. + + The rdataset returned is not a copy; changes to it will change + the zone. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rdataset = self.find_rdataset(name, rdtype, covers, create) + except KeyError: + rdataset = None + return rdataset + + def delete_rdataset( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + ) -> None: + """Delete the rdataset matching *rdtype* and *covers*, if it + exists at the node specified by *name*. + + It is not an error if the node does not exist, or if there is no matching + rdataset at the node. + + If the node has no rdatasets after the deletion, it will itself be deleted. + + *name*: the name of the node to find. The value may be a ``dns.name.Name`` or a + ``str``. If absolute, the name must be a subdomain of the zone's origin. If + ``zone.relativize`` is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str`` or ``None``, the covered + type. Usually this value is ``dns.rdatatype.NONE``, but if the rdtype is + ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, then the covers value will be + the rdata type the SIG/RRSIG covers. The library treats the SIG and RRSIG types + as if they were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This + makes RRSIGs much easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + node = self.get_node(name) + if node is not None: + node.delete_rdataset(self.rdclass, rdtype, covers) + if len(node) == 0: + self.delete_node(name) + + def replace_rdataset( + self, name: dns.name.Name | str, replacement: dns.rdataset.Rdataset + ) -> None: + """Replace an rdataset at name. + + It is not an error if there is no rdataset matching I{replacement}. + + Ownership of the *replacement* object is transferred to the zone; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + If the node does not exist, it is created. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *replacement*, a ``dns.rdataset.Rdataset``, the replacement rdataset. + """ + + if replacement.rdclass != self.rdclass: + raise ValueError("replacement.rdclass != zone.rdclass") + node = self.find_node(name, True) + node.replace_rdataset(replacement) + + def find_rrset( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + ) -> dns.rrset.RRset: + """Look for an rdataset with the specified name and type in the zone, + and return an RRset encapsulating it. + + This method is less efficient than the similar + ``find_rdataset()`` because it creates an RRset instead of + returning the matching rdataset. It may be more convenient + for some uses since it returns an object which binds the owner + name to the rdataset. + + This method may not be used to create new nodes or rdatasets; + use ``find_rdataset`` instead. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rrset.RRset`` or ``None``. + """ + + vname = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + rdataset = self.nodes[vname].find_rdataset(self.rdclass, rdtype, covers) + rrset = dns.rrset.RRset(vname, self.rdclass, rdtype, covers) + rrset.update(rdataset) + return rrset + + def get_rrset( + self, + name: dns.name.Name | str, + rdtype: dns.rdatatype.RdataType | str, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + ) -> dns.rrset.RRset | None: + """Look for an rdataset with the specified name and type in the zone, + and return an RRset encapsulating it. + + This method is less efficient than the similar ``get_rdataset()`` + because it creates an RRset instead of returning the matching + rdataset. It may be more convenient for some uses since it + returns an object which binds the owner name to the rdataset. + + This method may not be used to create new nodes or rdatasets; + use ``get_rdataset()`` instead. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Returns a ``dns.rrset.RRset`` or ``None``. + """ + + try: + rrset = self.find_rrset(name, rdtype, covers) + except KeyError: + rrset = None + return rrset + + def iterate_rdatasets( + self, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.ANY, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: + """Return a generator which yields (name, rdataset) tuples for + all rdatasets in the zone which have the specified *rdtype* + and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, + then all rdatasets will be matched. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + for name, node in self.items(): + for rds in node: + if rdtype == dns.rdatatype.ANY or ( + rds.rdtype == rdtype and rds.covers == covers + ): + yield (name, rds) + + def iterate_rdatas( + self, + rdtype: dns.rdatatype.RdataType | str = dns.rdatatype.ANY, + covers: dns.rdatatype.RdataType | str = dns.rdatatype.NONE, + ) -> Iterator[Tuple[dns.name.Name, int, dns.rdata.Rdata]]: + """Return a generator which yields (name, ttl, rdata) tuples for + all rdatas in the zone which have the specified *rdtype* + and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, + then all rdatas will be matched. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + for name, node in self.items(): + for rds in node: + if rdtype == dns.rdatatype.ANY or ( + rds.rdtype == rdtype and rds.covers == covers + ): + for rdata in rds: + yield (name, rds.ttl, rdata) + + def to_file( + self, + f: Any, + sorted: bool = True, + relativize: bool = True, + nl: str | None = None, + want_comments: bool = False, + want_origin: bool = False, + ) -> None: + """Write a zone to a file. + + *f*, a file or `str`. If *f* is a string, it is treated + as the name of a file to open. + + *sorted*, a ``bool``. If True, the default, then the file + will be written with the names sorted in DNSSEC order from + least to greatest. Otherwise the names will be written in + whatever order they happen to have in the zone's dictionary. + + *relativize*, a ``bool``. If True, the default, then domain + names in the output will be relativized to the zone's origin + if possible. + + *nl*, a ``str`` or None. The end of line string. If not + ``None``, the output will use the platform's native + end-of-line marker (i.e. LF on POSIX, CRLF on Windows). + + *want_comments*, a ``bool``. If ``True``, emit end-of-line comments + as part of writing the file. If ``False``, the default, do not + emit them. + + *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at + the start of the file. If ``False``, the default, do not emit + one. + """ + + if isinstance(f, str): + cm: contextlib.AbstractContextManager = open(f, "wb") + else: + cm = contextlib.nullcontext(f) + with cm as f: + # must be in this way, f.encoding may contain None, or even + # attribute may not be there + file_enc = getattr(f, "encoding", None) + if file_enc is None: + file_enc = "utf-8" + + if nl is None: + # binary mode, '\n' is not enough + nl_b = os.linesep.encode(file_enc) + nl = "\n" + elif isinstance(nl, str): + nl_b = nl.encode(file_enc) + else: + nl_b = nl + nl = nl.decode() + + if want_origin: + assert self.origin is not None + l = "$ORIGIN " + self.origin.to_text() + l_b = l.encode(file_enc) + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + + if sorted: + names = list(self.keys()) + names.sort() + else: + names = self.keys() + for n in names: + l = self[n].to_text( + n, + origin=self.origin, # pyright: ignore + relativize=relativize, # pyright: ignore + want_comments=want_comments, # pyright: ignore + ) + l_b = l.encode(file_enc) + + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + + def to_text( + self, + sorted: bool = True, + relativize: bool = True, + nl: str | None = None, + want_comments: bool = False, + want_origin: bool = False, + ) -> str: + """Return a zone's text as though it were written to a file. + + *sorted*, a ``bool``. If True, the default, then the file + will be written with the names sorted in DNSSEC order from + least to greatest. Otherwise the names will be written in + whatever order they happen to have in the zone's dictionary. + + *relativize*, a ``bool``. If True, the default, then domain + names in the output will be relativized to the zone's origin + if possible. + + *nl*, a ``str`` or None. The end of line string. If not + ``None``, the output will use the platform's native + end-of-line marker (i.e. LF on POSIX, CRLF on Windows). + + *want_comments*, a ``bool``. If ``True``, emit end-of-line comments + as part of writing the file. If ``False``, the default, do not + emit them. + + *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at + the start of the output. If ``False``, the default, do not emit + one. + + Returns a ``str``. + """ + temp_buffer = io.StringIO() + self.to_file(temp_buffer, sorted, relativize, nl, want_comments, want_origin) + return_value = temp_buffer.getvalue() + temp_buffer.close() + return return_value + + def check_origin(self) -> None: + """Do some simple checking of the zone's origin. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + """ + if self.relativize: + name = dns.name.empty + else: + assert self.origin is not None + name = self.origin + if self.get_rdataset(name, dns.rdatatype.SOA) is None: + raise NoSOA + if self.get_rdataset(name, dns.rdatatype.NS) is None: + raise NoNS + + def get_soa( + self, txn: dns.transaction.Transaction | None = None + ) -> dns.rdtypes.ANY.SOA.SOA: + """Get the zone SOA rdata. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Returns a ``dns.rdtypes.ANY.SOA.SOA`` Rdata. + """ + if self.relativize: + origin_name = dns.name.empty + else: + if self.origin is None: + # get_soa() has been called very early, and there must not be + # an SOA if there is no origin. + raise NoSOA + origin_name = self.origin + soa_rds: dns.rdataset.Rdataset | None + if txn: + soa_rds = txn.get(origin_name, dns.rdatatype.SOA) + else: + soa_rds = self.get_rdataset(origin_name, dns.rdatatype.SOA) + if soa_rds is None: + raise NoSOA + else: + soa = cast(dns.rdtypes.ANY.SOA.SOA, soa_rds[0]) + return soa + + def _compute_digest( + self, + hash_algorithm: DigestHashAlgorithm, + scheme: DigestScheme = DigestScheme.SIMPLE, + ) -> bytes: + hashinfo = _digest_hashers.get(hash_algorithm) + if not hashinfo: + raise UnsupportedDigestHashAlgorithm + if scheme != DigestScheme.SIMPLE: + raise UnsupportedDigestScheme + + if self.relativize: + origin_name = dns.name.empty + else: + assert self.origin is not None + origin_name = self.origin + hasher = hashinfo() + for name, node in sorted(self.items()): + rrnamebuf = name.to_digestable(self.origin) + for rdataset in sorted(node, key=lambda rds: (rds.rdtype, rds.covers)): + if name == origin_name and dns.rdatatype.ZONEMD in ( + rdataset.rdtype, + rdataset.covers, + ): + continue + rrfixed = struct.pack( + "!HHI", rdataset.rdtype, rdataset.rdclass, rdataset.ttl + ) + rdatas = [rdata.to_digestable(self.origin) for rdata in rdataset] + for rdata in sorted(rdatas): + rrlen = struct.pack("!H", len(rdata)) + hasher.update(rrnamebuf + rrfixed + rrlen + rdata) + return hasher.digest() + + def compute_digest( + self, + hash_algorithm: DigestHashAlgorithm, + scheme: DigestScheme = DigestScheme.SIMPLE, + ) -> dns.rdtypes.ANY.ZONEMD.ZONEMD: + serial = self.get_soa().serial + digest = self._compute_digest(hash_algorithm, scheme) + return dns.rdtypes.ANY.ZONEMD.ZONEMD( + self.rdclass, dns.rdatatype.ZONEMD, serial, scheme, hash_algorithm, digest + ) + + def verify_digest( + self, zonemd: dns.rdtypes.ANY.ZONEMD.ZONEMD | None = None + ) -> None: + digests: dns.rdataset.Rdataset | List[dns.rdtypes.ANY.ZONEMD.ZONEMD] + if zonemd: + digests = [zonemd] + else: + assert self.origin is not None + rds = self.get_rdataset(self.origin, dns.rdatatype.ZONEMD) + if rds is None: + raise NoDigest + digests = rds + for digest in digests: + try: + computed = self._compute_digest(digest.hash_algorithm, digest.scheme) + if computed == digest.digest: + return + except Exception: + pass + raise DigestVerificationFailure + + # TransactionManager methods + + def reader(self) -> "Transaction": + return Transaction(self, False, Version(self, 1, self.nodes, self.origin)) + + def writer(self, replacement: bool = False) -> "Transaction": + txn = Transaction(self, replacement) + txn._setup_version() + return txn + + def origin_information( + self, + ) -> Tuple[dns.name.Name | None, bool, dns.name.Name | None]: + effective: dns.name.Name | None + if self.relativize: + effective = dns.name.empty + else: + effective = self.origin + return (self.origin, self.relativize, effective) + + def get_class(self): + return self.rdclass + + # Transaction methods + + def _end_read(self, txn): + pass + + def _end_write(self, txn): + pass + + def _commit_version(self, txn, version, origin): + self.nodes = version.nodes + if self.origin is None: + self.origin = origin + + def _get_next_version_id(self) -> int: + # Versions are ephemeral and all have id 1 + return 1 + + +# These classes used to be in dns.versioned, but have moved here so we can use +# the copy-on-write transaction mechanism for both kinds of zones. In a +# regular zone, the version only exists during the transaction, and the nodes +# are regular dns.node.Nodes. + +# A node with a version id. + + +class VersionedNode(dns.node.Node): # lgtm[py/missing-equals] + __slots__ = ["id"] + + def __init__(self): + super().__init__() + # A proper id will get set by the Version + self.id = 0 + + +@dns.immutable.immutable +class ImmutableVersionedNode(VersionedNode): + def __init__(self, node): + super().__init__() + self.id = node.id + self.rdatasets = tuple( + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset | None: + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + raise TypeError("immutable") + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + raise TypeError("immutable") + + def is_immutable(self) -> bool: + return True + + +class Version: + def __init__( + self, + zone: Zone, + id: int, + nodes: MutableMapping[dns.name.Name, dns.node.Node] | None = None, + origin: dns.name.Name | None = None, + ): + self.zone = zone + self.id = id + if nodes is not None: + self.nodes = nodes + else: + self.nodes = zone.map_factory() + self.origin = origin + + def _validate_name(self, name: dns.name.Name) -> dns.name.Name: + return _validate_name(name, self.origin, self.zone.relativize) + + def get_node(self, name: dns.name.Name) -> dns.node.Node | None: + name = self._validate_name(name) + return self.nodes.get(name) + + def get_rdataset( + self, + name: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> dns.rdataset.Rdataset | None: + node = self.get_node(name) + if node is None: + return None + return node.get_rdataset(self.zone.rdclass, rdtype, covers) + + def keys(self): + return self.nodes.keys() + + def items(self): + return self.nodes.items() + + +class WritableVersion(Version): + def __init__(self, zone: Zone, replacement: bool = False): + # The zone._versions_lock must be held by our caller in a versioned + # zone. + id = zone._get_next_version_id() + super().__init__(zone, id) + if not replacement: + # We copy the map, because that gives us a simple and thread-safe + # way of doing versions, and we have a garbage collector to help + # us. We only make new node objects if we actually change the + # node. + self.nodes.update(zone.nodes) + # We have to copy the zone origin as it may be None in the first + # version, and we don't want to mutate the zone until we commit. + self.origin = zone.origin + self.changed: Set[dns.name.Name] = set() + + def _maybe_cow_with_name( + self, name: dns.name.Name + ) -> Tuple[dns.node.Node, dns.name.Name]: + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None or name not in self.changed: + new_node = self.zone.node_factory() + if hasattr(new_node, "id"): + # We keep doing this for backwards compatibility, as earlier + # code used new_node.id != self.id for the "do we need to CoW?" + # test. Now we use the changed set as this works with both + # regular zones and versioned zones. + # + # We ignore the mypy error as this is safe but it doesn't see it. + new_node.id = self.id # type: ignore + if node is not None: + # moo! copy on write! + new_node.rdatasets.extend(node.rdatasets) + self.nodes[name] = new_node + self.changed.add(name) + return (new_node, name) + else: + return (node, name) + + def _maybe_cow(self, name: dns.name.Name) -> dns.node.Node: + return self._maybe_cow_with_name(name)[0] + + def delete_node(self, name: dns.name.Name) -> None: + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + self.changed.add(name) + + def put_rdataset( + self, name: dns.name.Name, rdataset: dns.rdataset.Rdataset + ) -> None: + node = self._maybe_cow(name) + node.replace_rdataset(rdataset) + + def delete_rdataset( + self, + name: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> None: + node = self._maybe_cow(name) + node.delete_rdataset(self.zone.rdclass, rdtype, covers) + if len(node) == 0: + del self.nodes[name] + + +@dns.immutable.immutable +class ImmutableVersion(Version): + def __init__(self, version: Version): + if not isinstance(version, WritableVersion): + raise ValueError( + "a dns.zone.ImmutableVersion requires a dns.zone.WritableVersion" + ) + # We tell super() that it's a replacement as we don't want it + # to copy the nodes, as we're about to do that with an + # immutable Dict. + super().__init__(version.zone, True) + # set the right id! + self.id = version.id + # keep the origin + self.origin = version.origin + # Make changed nodes immutable + for name in version.changed: + node = version.nodes.get(name) + # it might not exist if we deleted it in the version + if node: + version.nodes[name] = ImmutableVersionedNode(node) + # We're changing the type of the nodes dictionary here on purpose, so + # we ignore the mypy error. + self.nodes = dns.immutable.Dict( + version.nodes, True, self.zone.map_factory + ) # type: ignore + + +class Transaction(dns.transaction.Transaction): + def __init__(self, zone, replacement, version=None, make_immutable=False): + read_only = version is not None + super().__init__(zone, replacement, read_only) + self.version = version + self.make_immutable = make_immutable + + @property + def zone(self): + return self.manager + + def _setup_version(self): + assert self.version is None + factory = self.manager.writable_version_factory # pyright: ignore + if factory is None: + factory = WritableVersion + self.version = factory(self.zone, self.replacement) # pyright: ignore + + def _get_rdataset(self, name, rdtype, covers): + assert self.version is not None + return self.version.get_rdataset(name, rdtype, covers) + + def _put_rdataset(self, name, rdataset): + assert not self.read_only + assert self.version is not None + self.version.put_rdataset(name, rdataset) + + def _delete_name(self, name): + assert not self.read_only + assert self.version is not None + self.version.delete_node(name) + + def _delete_rdataset(self, name, rdtype, covers): + assert not self.read_only + assert self.version is not None + self.version.delete_rdataset(name, rdtype, covers) + + def _name_exists(self, name): + assert self.version is not None + return self.version.get_node(name) is not None + + def _changed(self): + if self.read_only: + return False + else: + assert self.version is not None + return len(self.version.changed) > 0 + + def _end_transaction(self, commit): + assert self.zone is not None + assert self.version is not None + if self.read_only: + self.zone._end_read(self) # pyright: ignore + elif commit and len(self.version.changed) > 0: + if self.make_immutable: + factory = self.manager.immutable_version_factory # pyright: ignore + if factory is None: + factory = ImmutableVersion + version = factory(self.version) + else: + version = self.version + self.zone._commit_version( # pyright: ignore + self, version, self.version.origin + ) + + else: + # rollback + self.zone._end_write(self) # pyright: ignore + + def _set_origin(self, origin): + assert self.version is not None + if self.version.origin is None: + self.version.origin = origin + + def _iterate_rdatasets(self): + assert self.version is not None + for name, node in self.version.items(): + for rdataset in node: + yield (name, rdataset) + + def _iterate_names(self): + assert self.version is not None + return self.version.keys() + + def _get_node(self, name): + assert self.version is not None + return self.version.get_node(name) + + def _origin_information(self): + assert self.version is not None + (absolute, relativize, effective) = self.manager.origin_information() + if absolute is None and self.version.origin is not None: + # No origin has been committed yet, but we've learned one as part of + # this txn. Use it. + absolute = self.version.origin + if relativize: + effective = dns.name.empty + else: + effective = absolute + return (absolute, relativize, effective) + + +def _from_text( + text: Any, + origin: dns.name.Name | str | None = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + zone_factory: Any = Zone, + filename: str | None = None, + allow_include: bool = False, + check_origin: bool = True, + idna_codec: dns.name.IDNACodec | None = None, + allow_directives: bool | Iterable[str] = True, +) -> Zone: + # See the comments for the public APIs from_text() and from_file() for + # details. + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + if filename is None: + filename = "" + zone = zone_factory(origin, rdclass, relativize=relativize) + with zone.writer(True) as txn: + tok = dns.tokenizer.Tokenizer(text, filename, idna_codec=idna_codec) + reader = dns.zonefile.Reader( + tok, + rdclass, + txn, + allow_include=allow_include, + allow_directives=allow_directives, + ) + try: + reader.read() + except dns.zonefile.UnknownOrigin: + # for backwards compatibility + raise UnknownOrigin + # Now that we're done reading, do some basic checking of the zone. + if check_origin: + zone.check_origin() + return zone + + +def from_text( + text: str, + origin: dns.name.Name | str | None = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + zone_factory: Any = Zone, + filename: str | None = None, + allow_include: bool = False, + check_origin: bool = True, + idna_codec: dns.name.IDNACodec | None = None, + allow_directives: bool | Iterable[str] = True, +) -> Zone: + """Build a zone object from a zone file format string. + + *text*, a ``str``, the zone file format input. + + *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin + of the zone; if not specified, the first ``$ORIGIN`` statement in the + zone file will determine the origin of the zone. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the zone's rdata class; the default is + class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *zone_factory*, the zone factory to use or ``None``. If ``None``, then + ``dns.zone.Zone`` will be used. The value may be any class or callable + that returns a subclass of ``dns.zone.Zone``. + + *filename*, a ``str`` or ``None``, the filename to emit when + describing where an error occurred; the default is ``''``. + + *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` + directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` + will raise a ``SyntaxError`` exception. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, + then directives are permitted, and the *allow_include* parameter controls whether + ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive + processing is done and any directive-like text will be treated as a regular owner + name. If a non-empty iterable, then only the listed directives (including the + ``$``) are allowed. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + return _from_text( + text, + origin, + rdclass, + relativize, + zone_factory, + filename, + allow_include, + check_origin, + idna_codec, + allow_directives, + ) + + +def from_file( + f: Any, + origin: dns.name.Name | str | None = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + zone_factory: Any = Zone, + filename: str | None = None, + allow_include: bool = True, + check_origin: bool = True, + idna_codec: dns.name.IDNACodec | None = None, + allow_directives: bool | Iterable[str] = True, +) -> Zone: + """Read a zone file and build a zone object. + + *f*, a file or ``str``. If *f* is a string, it is treated + as the name of a file to open. + + *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin + of the zone; if not specified, the first ``$ORIGIN`` statement in the + zone file will determine the origin of the zone. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *zone_factory*, the zone factory to use or ``None``. If ``None``, then + ``dns.zone.Zone`` will be used. The value may be any class or callable + that returns a subclass of ``dns.zone.Zone``. + + *filename*, a ``str`` or ``None``, the filename to emit when + describing where an error occurred; the default is ``''``. + + *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` + directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` + will raise a ``SyntaxError`` exception. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, + then directives are permitted, and the *allow_include* parameter controls whether + ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive + processing is done and any directive-like text will be treated as a regular owner + name. If a non-empty iterable, then only the listed directives (including the + ``$``) are allowed. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + + if isinstance(f, str): + if filename is None: + filename = f + cm: contextlib.AbstractContextManager = open(f, encoding="utf-8") + else: + cm = contextlib.nullcontext(f) + with cm as f: + return _from_text( + f, + origin, + rdclass, + relativize, + zone_factory, + filename, + allow_include, + check_origin, + idna_codec, + allow_directives, + ) + assert False # make mypy happy lgtm[py/unreachable-statement] + + +def from_xfr( + xfr: Any, + zone_factory: Any = Zone, + relativize: bool = True, + check_origin: bool = True, +) -> Zone: + """Convert the output of a zone transfer generator into a zone object. + + *xfr*, a generator of ``dns.message.Message`` objects, typically + ``dns.query.xfr()``. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + It is essential that the relativize setting matches the one specified + to the generator. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Raises ``ValueError`` if no messages are yielded by the generator. + + Returns a subclass of ``dns.zone.Zone``. + """ + + z = None + for r in xfr: + if z is None: + if relativize: + origin = r.origin + else: + origin = r.answer[0].name + rdclass = r.answer[0].rdclass + z = zone_factory(origin, rdclass, relativize=relativize) + for rrset in r.answer: + znode = z.nodes.get(rrset.name) + if not znode: + znode = z.node_factory() + z.nodes[rrset.name] = znode + zrds = znode.find_rdataset(rrset.rdclass, rrset.rdtype, rrset.covers, True) + zrds.update_ttl(rrset.ttl) + for rd in rrset: + zrds.add(rd) + if z is None: + raise ValueError("empty transfer") + if check_origin: + z.check_origin() + return z diff --git a/venv/lib/python3.12/site-packages/dns/zonefile.py b/venv/lib/python3.12/site-packages/dns/zonefile.py new file mode 100644 index 0000000..7a81454 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/zonefile.py @@ -0,0 +1,756 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +import re +import sys +from typing import Any, Iterable, List, Set, Tuple, cast + +import dns.exception +import dns.grange +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.rrset +import dns.tokenizer +import dns.transaction +import dns.ttl + + +class UnknownOrigin(dns.exception.DNSException): + """Unknown origin""" + + +class CNAMEAndOtherData(dns.exception.DNSException): + """A node has a CNAME and other data""" + + +def _check_cname_and_other_data(txn, name, rdataset): + rdataset_kind = dns.node.NodeKind.classify_rdataset(rdataset) + node = txn.get_node(name) + if node is None: + # empty nodes are neutral. + return + node_kind = node.classify() + if ( + node_kind == dns.node.NodeKind.CNAME + and rdataset_kind == dns.node.NodeKind.REGULAR + ): + raise CNAMEAndOtherData("rdataset type is not compatible with a CNAME node") + elif ( + node_kind == dns.node.NodeKind.REGULAR + and rdataset_kind == dns.node.NodeKind.CNAME + ): + raise CNAMEAndOtherData( + "CNAME rdataset is not compatible with a regular data node" + ) + # Otherwise at least one of the node and the rdataset is neutral, so + # adding the rdataset is ok + + +SavedStateType = Tuple[ + dns.tokenizer.Tokenizer, + dns.name.Name | None, # current_origin + dns.name.Name | None, # last_name + Any | None, # current_file + int, # last_ttl + bool, # last_ttl_known + int, # default_ttl + bool, +] # default_ttl_known + + +def _upper_dollarize(s): + s = s.upper() + if not s.startswith("$"): + s = "$" + s + return s + + +class Reader: + """Read a DNS zone file into a transaction.""" + + def __init__( + self, + tok: dns.tokenizer.Tokenizer, + rdclass: dns.rdataclass.RdataClass, + txn: dns.transaction.Transaction, + allow_include: bool = False, + allow_directives: bool | Iterable[str] = True, + force_name: dns.name.Name | None = None, + force_ttl: int | None = None, + force_rdclass: dns.rdataclass.RdataClass | None = None, + force_rdtype: dns.rdatatype.RdataType | None = None, + default_ttl: int | None = None, + ): + self.tok = tok + (self.zone_origin, self.relativize, _) = txn.manager.origin_information() + self.current_origin = self.zone_origin + self.last_ttl = 0 + self.last_ttl_known = False + if force_ttl is not None: + default_ttl = force_ttl + if default_ttl is None: + self.default_ttl = 0 + self.default_ttl_known = False + else: + self.default_ttl = default_ttl + self.default_ttl_known = True + self.last_name = self.current_origin + self.zone_rdclass = rdclass + self.txn = txn + self.saved_state: List[SavedStateType] = [] + self.current_file: Any | None = None + self.allowed_directives: Set[str] + if allow_directives is True: + self.allowed_directives = {"$GENERATE", "$ORIGIN", "$TTL"} + if allow_include: + self.allowed_directives.add("$INCLUDE") + elif allow_directives is False: + # allow_include was ignored in earlier releases if allow_directives was + # False, so we continue that. + self.allowed_directives = set() + else: + # Note that if directives are explicitly specified, then allow_include + # is ignored. + self.allowed_directives = set(_upper_dollarize(d) for d in allow_directives) + self.force_name = force_name + self.force_ttl = force_ttl + self.force_rdclass = force_rdclass + self.force_rdtype = force_rdtype + self.txn.check_put_rdataset(_check_cname_and_other_data) + + def _eat_line(self): + while 1: + token = self.tok.get() + if token.is_eol_or_eof(): + break + + def _get_identifier(self): + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + return token + + def _rr_line(self): + """Process one line from a DNS zone file.""" + token = None + # Name + if self.force_name is not None: + name = self.force_name + else: + if self.current_origin is None: + raise UnknownOrigin + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name(token, self.current_origin) + else: + token = self.tok.get() + if token.is_eol_or_eof(): + # treat leading WS followed by EOL/EOF as if they were EOL/EOF. + return + self.tok.unget(token) + name = self.last_name + if name is None: + raise dns.exception.SyntaxError("the last used name is undefined") + assert self.zone_origin is not None + if not name.is_subdomain(self.zone_origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone_origin) + + # TTL + if self.force_ttl is not None: + ttl = self.force_ttl + self.last_ttl = ttl + self.last_ttl_known = True + else: + token = self._get_identifier() + ttl = None + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = None + except dns.ttl.BadTTL: + self.tok.unget(token) + + # Class + if self.force_rdclass is not None: + rdclass = self.force_rdclass + else: + token = self._get_identifier() + try: + rdclass = dns.rdataclass.from_text(token.value) + except dns.exception.SyntaxError: + raise + except Exception: + rdclass = self.zone_rdclass + self.tok.unget(token) + if rdclass != self.zone_rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + + if ttl is None: + # support for syntax + token = self._get_identifier() + ttl = None + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = None + except dns.ttl.BadTTL: + if self.default_ttl_known: + ttl = self.default_ttl + elif self.last_ttl_known: + ttl = self.last_ttl + self.tok.unget(token) + + # Type + if self.force_rdtype is not None: + rdtype = self.force_rdtype + else: + token = self._get_identifier() + try: + rdtype = dns.rdatatype.from_text(token.value) + except Exception: + raise dns.exception.SyntaxError(f"unknown rdatatype '{token.value}'") + + try: + rd = dns.rdata.from_text( + rdclass, + rdtype, + self.tok, + self.current_origin, + self.relativize, + self.zone_origin, + ) + except dns.exception.SyntaxError: + # Catch and reraise. + raise + except Exception: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError(f"caught exception {str(ty)}: {str(va)}") + + if not self.default_ttl_known and rdtype == dns.rdatatype.SOA: + # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default + # TTL from the SOA minttl if no $TTL statement is present before the + # SOA is parsed. + soa_rd = cast(dns.rdtypes.ANY.SOA.SOA, rd) + self.default_ttl = soa_rd.minimum + self.default_ttl_known = True + if ttl is None: + # if we didn't have a TTL on the SOA, set it! + ttl = soa_rd.minimum + + # TTL check. We had to wait until now to do this as the SOA RR's + # own TTL can be inferred from its minimum. + if ttl is None: + raise dns.exception.SyntaxError("Missing default TTL value") + + self.txn.add(name, ttl, rd) + + def _parse_modify(self, side: str) -> Tuple[str, str, int, int, str]: + # Here we catch everything in '{' '}' in a group so we can replace it + # with ''. + is_generate1 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+),(.)}).*$") + is_generate2 = re.compile(r"^.*\$({(\+|-?)(\d+)}).*$") + is_generate3 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+)}).*$") + # Sometimes there are modifiers in the hostname. These come after + # the dollar sign. They are in the form: ${offset[,width[,base]]}. + # Make names + mod = "" + sign = "+" + offset = "0" + width = "0" + base = "d" + g1 = is_generate1.match(side) + if g1: + mod, sign, offset, width, base = g1.groups() + if sign == "": + sign = "+" + else: + g2 = is_generate2.match(side) + if g2: + mod, sign, offset = g2.groups() + if sign == "": + sign = "+" + width = "0" + base = "d" + else: + g3 = is_generate3.match(side) + if g3: + mod, sign, offset, width = g3.groups() + if sign == "": + sign = "+" + base = "d" + + ioffset = int(offset) + iwidth = int(width) + + if sign not in ["+", "-"]: + raise dns.exception.SyntaxError(f"invalid offset sign {sign}") + if base not in ["d", "o", "x", "X", "n", "N"]: + raise dns.exception.SyntaxError(f"invalid type {base}") + + return mod, sign, ioffset, iwidth, base + + def _generate_line(self): + # range lhs [ttl] [class] type rhs [ comment ] + """Process one line containing the GENERATE statement from a DNS + zone file.""" + if self.current_origin is None: + raise UnknownOrigin + + token = self.tok.get() + # Range (required) + try: + start, stop, step = dns.grange.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError + + # lhs (required) + try: + lhs = token.value + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError + + # TTL + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.ttl.BadTTL: + if not (self.last_ttl_known or self.default_ttl_known): + raise dns.exception.SyntaxError("Missing default TTL value") + if self.default_ttl_known: + ttl = self.default_ttl + elif self.last_ttl_known: + ttl = self.last_ttl + else: + # We don't go to the extra "look at the SOA" level of effort for + # $GENERATE, because the user really ought to have defined a TTL + # somehow! + raise dns.exception.SyntaxError("Missing default TTL value") + + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = self.zone_rdclass + if rdclass != self.zone_rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + # Type + try: + rdtype = dns.rdatatype.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError(f"unknown rdatatype '{token.value}'") + + # rhs (required) + rhs = token.value + + def _calculate_index(counter: int, offset_sign: str, offset: int) -> int: + """Calculate the index from the counter and offset.""" + if offset_sign == "-": + offset *= -1 + return counter + offset + + def _format_index(index: int, base: str, width: int) -> str: + """Format the index with the given base, and zero-fill it + to the given width.""" + if base in ["d", "o", "x", "X"]: + return format(index, base).zfill(width) + + # base can only be n or N here + hexa = _format_index(index, "x", width) + nibbles = ".".join(hexa[::-1])[:width] + if base == "N": + nibbles = nibbles.upper() + return nibbles + + lmod, lsign, loffset, lwidth, lbase = self._parse_modify(lhs) + rmod, rsign, roffset, rwidth, rbase = self._parse_modify(rhs) + for i in range(start, stop + 1, step): + # +1 because bind is inclusive and python is exclusive + + lindex = _calculate_index(i, lsign, loffset) + rindex = _calculate_index(i, rsign, roffset) + + lzfindex = _format_index(lindex, lbase, lwidth) + rzfindex = _format_index(rindex, rbase, rwidth) + + name = lhs.replace(f"${lmod}", lzfindex) + rdata = rhs.replace(f"${rmod}", rzfindex) + + self.last_name = dns.name.from_text( + name, self.current_origin, self.tok.idna_codec + ) + name = self.last_name + assert self.zone_origin is not None + if not name.is_subdomain(self.zone_origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone_origin) + + try: + rd = dns.rdata.from_text( + rdclass, + rdtype, + rdata, + self.current_origin, + self.relativize, + self.zone_origin, + ) + except dns.exception.SyntaxError: + # Catch and reraise. + raise + except Exception: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError( + f"caught exception {str(ty)}: {str(va)}" + ) + + self.txn.add(name, ttl, rd) + + def read(self) -> None: + """Read a DNS zone file and build a zone object. + + @raises dns.zone.NoSOA: No SOA RR was found at the zone origin + @raises dns.zone.NoNS: No NS RRset was found at the zone origin + """ + + try: + while 1: + token = self.tok.get(True, True) + if token.is_eof(): + if self.current_file is not None: + self.current_file.close() + if len(self.saved_state) > 0: + ( + self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known, + ) = self.saved_state.pop(-1) + continue + break + elif token.is_eol(): + continue + elif token.is_comment(): + self.tok.get_eol() + continue + elif token.value[0] == "$" and len(self.allowed_directives) > 0: + # Note that we only run directive processing code if at least + # one directive is allowed in order to be backwards compatible + c = token.value.upper() + if c not in self.allowed_directives: + raise dns.exception.SyntaxError( + f"zone file directive '{c}' is not allowed" + ) + if c == "$TTL": + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError("bad $TTL") + self.default_ttl = dns.ttl.from_text(token.value) + self.default_ttl_known = True + self.tok.get_eol() + elif c == "$ORIGIN": + self.current_origin = self.tok.get_name() + self.tok.get_eol() + if self.zone_origin is None: + self.zone_origin = self.current_origin + self.txn._set_origin(self.current_origin) + elif c == "$INCLUDE": + token = self.tok.get() + filename = token.value + token = self.tok.get() + new_origin: dns.name.Name | None + if token.is_identifier(): + new_origin = dns.name.from_text( + token.value, self.current_origin, self.tok.idna_codec + ) + self.tok.get_eol() + elif not token.is_eol_or_eof(): + raise dns.exception.SyntaxError("bad origin in $INCLUDE") + else: + new_origin = self.current_origin + self.saved_state.append( + ( + self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known, + ) + ) + self.current_file = open(filename, encoding="utf-8") + self.tok = dns.tokenizer.Tokenizer(self.current_file, filename) + self.current_origin = new_origin + elif c == "$GENERATE": + self._generate_line() + else: + raise dns.exception.SyntaxError( + f"Unknown zone file directive '{c}'" + ) + continue + self.tok.unget(token) + self._rr_line() + except dns.exception.SyntaxError as detail: + (filename, line_number) = self.tok.where() + if detail is None: + detail = "syntax error" + ex = dns.exception.SyntaxError(f"{filename}:{line_number}: {detail}") + tb = sys.exc_info()[2] + raise ex.with_traceback(tb) from None + + +class RRsetsReaderTransaction(dns.transaction.Transaction): + def __init__(self, manager, replacement, read_only): + assert not read_only + super().__init__(manager, replacement, read_only) + self.rdatasets = {} + + def _get_rdataset(self, name, rdtype, covers): + return self.rdatasets.get((name, rdtype, covers)) + + def _get_node(self, name): + rdatasets = [] + for (rdataset_name, _, _), rdataset in self.rdatasets.items(): + if name == rdataset_name: + rdatasets.append(rdataset) + if len(rdatasets) == 0: + return None + node = dns.node.Node() + node.rdatasets = rdatasets + return node + + def _put_rdataset(self, name, rdataset): + self.rdatasets[(name, rdataset.rdtype, rdataset.covers)] = rdataset + + def _delete_name(self, name): + # First remove any changes involving the name + remove = [] + for key in self.rdatasets: + if key[0] == name: + remove.append(key) + if len(remove) > 0: + for key in remove: + del self.rdatasets[key] + + def _delete_rdataset(self, name, rdtype, covers): + try: + del self.rdatasets[(name, rdtype, covers)] + except KeyError: + pass + + def _name_exists(self, name): + for n, _, _ in self.rdatasets: + if n == name: + return True + return False + + def _changed(self): + return len(self.rdatasets) > 0 + + def _end_transaction(self, commit): + if commit and self._changed(): + rrsets = [] + for (name, _, _), rdataset in self.rdatasets.items(): + rrset = dns.rrset.RRset( + name, rdataset.rdclass, rdataset.rdtype, rdataset.covers + ) + rrset.update(rdataset) + rrsets.append(rrset) + self.manager.set_rrsets(rrsets) # pyright: ignore + + def _set_origin(self, origin): + pass + + def _iterate_rdatasets(self): + raise NotImplementedError # pragma: no cover + + def _iterate_names(self): + raise NotImplementedError # pragma: no cover + + +class RRSetsReaderManager(dns.transaction.TransactionManager): + def __init__( + self, + origin: dns.name.Name | None = dns.name.root, + relativize: bool = False, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + ): + self.origin = origin + self.relativize = relativize + self.rdclass = rdclass + self.rrsets: List[dns.rrset.RRset] = [] + + def reader(self): # pragma: no cover + raise NotImplementedError + + def writer(self, replacement=False): + assert replacement is True + return RRsetsReaderTransaction(self, True, False) + + def get_class(self): + return self.rdclass + + def origin_information(self): + if self.relativize: + effective = dns.name.empty + else: + effective = self.origin + return (self.origin, self.relativize, effective) + + def set_rrsets(self, rrsets: List[dns.rrset.RRset]) -> None: + self.rrsets = rrsets + + +def read_rrsets( + text: Any, + name: dns.name.Name | str | None = None, + ttl: int | None = None, + rdclass: dns.rdataclass.RdataClass | str | None = dns.rdataclass.IN, + default_rdclass: dns.rdataclass.RdataClass | str = dns.rdataclass.IN, + rdtype: dns.rdatatype.RdataType | str | None = None, + default_ttl: int | str | None = None, + idna_codec: dns.name.IDNACodec | None = None, + origin: dns.name.Name | str | None = dns.name.root, + relativize: bool = False, +) -> List[dns.rrset.RRset]: + """Read one or more rrsets from the specified text, possibly subject + to restrictions. + + *text*, a file object or a string, is the input to process. + + *name*, a string, ``dns.name.Name``, or ``None``, is the owner name of + the rrset. If not ``None``, then the owner name is "forced", and the + input must not specify an owner name. If ``None``, then any owner names + are allowed and must be present in the input. + + *ttl*, an ``int``, string, or None. If not ``None``, the the TTL is + forced to be the specified value and the input must not specify a TTL. + If ``None``, then a TTL may be specified in the input. If it is not + specified, then the *default_ttl* will be used. + + *rdclass*, a ``dns.rdataclass.RdataClass``, string, or ``None``. If + not ``None``, then the class is forced to the specified value, and the + input must not specify a class. If ``None``, then the input may specify + a class that matches *default_rdclass*. Note that it is not possible to + return rrsets with differing classes; specifying ``None`` for the class + simply allows the user to optionally type a class as that may be convenient + when cutting and pasting. + + *default_rdclass*, a ``dns.rdataclass.RdataClass`` or string. The class + of the returned rrsets. + + *rdtype*, a ``dns.rdatatype.RdataType``, string, or ``None``. If not + ``None``, then the type is forced to the specified value, and the + input must not specify a type. If ``None``, then a type must be present + for each RR. + + *default_ttl*, an ``int``, string, or ``None``. If not ``None``, then if + the TTL is not forced and is not specified, then this value will be used. + if ``None``, then if the TTL is not forced an error will occur if the TTL + is not specified. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. Note that codecs only apply to the owner name; dnspython does + not do IDNA for names in rdata, as there is no IDNA zonefile format. + + *origin*, a string, ``dns.name.Name``, or ``None``, is the origin for any + relative names in the input, and also the origin to relativize to if + *relativize* is ``True``. + + *relativize*, a bool. If ``True``, names are relativized to the *origin*; + if ``False`` then any relative names in the input are made absolute by + appending the *origin*. + """ + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root, idna_codec) + if isinstance(name, str): + name = dns.name.from_text(name, origin, idna_codec) + if isinstance(ttl, str): + ttl = dns.ttl.from_text(ttl) + if isinstance(default_ttl, str): + default_ttl = dns.ttl.from_text(default_ttl) + if rdclass is not None: + rdclass = dns.rdataclass.RdataClass.make(rdclass) + else: + rdclass = None + default_rdclass = dns.rdataclass.RdataClass.make(default_rdclass) + if rdtype is not None: + rdtype = dns.rdatatype.RdataType.make(rdtype) + else: + rdtype = None + manager = RRSetsReaderManager(origin, relativize, default_rdclass) + with manager.writer(True) as txn: + tok = dns.tokenizer.Tokenizer(text, "", idna_codec=idna_codec) + reader = Reader( + tok, + default_rdclass, + txn, + allow_directives=False, + force_name=name, + force_ttl=ttl, + force_rdclass=rdclass, + force_rdtype=rdtype, + default_ttl=default_ttl, + ) + reader.read() + return manager.rrsets diff --git a/venv/lib/python3.12/site-packages/dns/zonetypes.py b/venv/lib/python3.12/site-packages/dns/zonetypes.py new file mode 100644 index 0000000..195ee2e --- /dev/null +++ b/venv/lib/python3.12/site-packages/dns/zonetypes.py @@ -0,0 +1,37 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""Common zone-related types.""" + +# This is a separate file to avoid import circularity between dns.zone and +# the implementation of the ZONEMD type. + +import hashlib + +import dns.enum + + +class DigestScheme(dns.enum.IntEnum): + """ZONEMD Scheme""" + + SIMPLE = 1 + + @classmethod + def _maximum(cls): + return 255 + + +class DigestHashAlgorithm(dns.enum.IntEnum): + """ZONEMD Hash Algorithm""" + + SHA384 = 1 + SHA512 = 2 + + @classmethod + def _maximum(cls): + return 255 + + +_digest_hashers = { + DigestHashAlgorithm.SHA384: hashlib.sha384, + DigestHashAlgorithm.SHA512: hashlib.sha512, +} diff --git a/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/METADATA new file mode 100644 index 0000000..eaaf09b --- /dev/null +++ b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/METADATA @@ -0,0 +1,149 @@ +Metadata-Version: 2.4 +Name: dnspython +Version: 2.8.0 +Summary: DNS toolkit +Project-URL: homepage, https://www.dnspython.org +Project-URL: repository, https://github.com/rthalley/dnspython.git +Project-URL: documentation, https://dnspython.readthedocs.io/en/stable/ +Project-URL: issues, https://github.com/rthalley/dnspython/issues +Author-email: Bob Halley +License: ISC +License-File: LICENSE +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: ISC License (ISCL) +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.10 +Provides-Extra: dev +Requires-Dist: black>=25.1.0; extra == 'dev' +Requires-Dist: coverage>=7.0; extra == 'dev' +Requires-Dist: flake8>=7; extra == 'dev' +Requires-Dist: hypercorn>=0.17.0; extra == 'dev' +Requires-Dist: mypy>=1.17; extra == 'dev' +Requires-Dist: pylint>=3; extra == 'dev' +Requires-Dist: pytest-cov>=6.2.0; extra == 'dev' +Requires-Dist: pytest>=8.4; extra == 'dev' +Requires-Dist: quart-trio>=0.12.0; extra == 'dev' +Requires-Dist: sphinx-rtd-theme>=3.0.0; extra == 'dev' +Requires-Dist: sphinx>=8.2.0; extra == 'dev' +Requires-Dist: twine>=6.1.0; extra == 'dev' +Requires-Dist: wheel>=0.45.0; extra == 'dev' +Provides-Extra: dnssec +Requires-Dist: cryptography>=45; extra == 'dnssec' +Provides-Extra: doh +Requires-Dist: h2>=4.2.0; extra == 'doh' +Requires-Dist: httpcore>=1.0.0; extra == 'doh' +Requires-Dist: httpx>=0.28.0; extra == 'doh' +Provides-Extra: doq +Requires-Dist: aioquic>=1.2.0; extra == 'doq' +Provides-Extra: idna +Requires-Dist: idna>=3.10; extra == 'idna' +Provides-Extra: trio +Requires-Dist: trio>=0.30; extra == 'trio' +Provides-Extra: wmi +Requires-Dist: wmi>=1.5.1; (platform_system == 'Windows') and extra == 'wmi' +Description-Content-Type: text/markdown + +# dnspython + +[![Build Status](https://github.com/rthalley/dnspython/actions/workflows/ci.yml/badge.svg)](https://github.com/rthalley/dnspython/actions/) +[![Documentation Status](https://readthedocs.org/projects/dnspython/badge/?version=latest)](https://dnspython.readthedocs.io/en/latest/?badge=latest) +[![PyPI version](https://badge.fury.io/py/dnspython.svg)](https://badge.fury.io/py/dnspython) +[![License: ISC](https://img.shields.io/badge/License-ISC-brightgreen.svg)](https://opensource.org/licenses/ISC) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + +## INTRODUCTION + +`dnspython` is a DNS toolkit for Python. It supports almost all record types. It +can be used for queries, zone transfers, and dynamic updates. It supports +TSIG-authenticated messages and EDNS0. + +`dnspython` provides both high- and low-level access to DNS. The high-level +classes perform queries for data of a given name, type, and class, and return an +answer set. The low-level classes allow direct manipulation of DNS zones, +messages, names, and records. + +To see a few of the ways `dnspython` can be used, look in the `examples/` +directory. + +`dnspython` is a utility to work with DNS, `/etc/hosts` is thus not used. For +simple forward DNS lookups, it's better to use `socket.getaddrinfo()` or +`socket.gethostbyname()`. + +`dnspython` originated at Nominum where it was developed to facilitate the +testing of DNS software. + +## ABOUT THIS RELEASE + +This is of `dnspython` 2.8.0. +Please read +[What's New](https://dnspython.readthedocs.io/en/stable/whatsnew.html) for +information about the changes in this release. + +## INSTALLATION + +* Many distributions have dnspython packaged for you, so you should check there + first. +* To use a wheel downloaded from PyPi, run: + +``` + pip install dnspython +``` + +* To install from the source code, go into the top-level of the source code + and run: + +``` + pip install --upgrade pip build + python -m build + pip install dist/*.whl +``` + +* To install the latest from the main branch, run +`pip install git+https://github.com/rthalley/dnspython.git` + +`dnspython`'s default installation does not depend on any modules other than +those in the Python standard library. To use some features, additional modules +must be installed. For convenience, `pip` options are defined for the +requirements. + +If you want to use DNS-over-HTTPS, run +`pip install dnspython[doh]`. + +If you want to use DNSSEC functionality, run +`pip install dnspython[dnssec]`. + +If you want to use internationalized domain names (IDNA) +functionality, you must run +`pip install dnspython[idna]` + +If you want to use the Trio asynchronous I/O package, run +`pip install dnspython[trio]`. + +If you want to use WMI on Windows to determine the active DNS settings +instead of the default registry scanning method, run +`pip install dnspython[wmi]`. + +If you want to try the experimental DNS-over-QUIC code, run +`pip install dnspython[doq]`. + +Note that you can install any combination of the above, e.g.: +`pip install dnspython[doh,dnssec,idna]` + +### Notices + +Python 2.x support ended with the release of 1.16.0. `dnspython` supports Python 3.10 +and later. Future support is aligned with the lifetime of the Python 3 versions. + +Documentation has moved to +[dnspython.readthedocs.io](https://dnspython.readthedocs.io). diff --git a/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/RECORD new file mode 100644 index 0000000..1ae89b0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/RECORD @@ -0,0 +1,304 @@ +dns/__init__.py,sha256=2TTaN3FRnBIkYhrrkDUs7XYnu4h9zTlfOWdQ4nLuxnA,1693 +dns/__pycache__/__init__.cpython-312.pyc,, +dns/__pycache__/_asyncbackend.cpython-312.pyc,, +dns/__pycache__/_asyncio_backend.cpython-312.pyc,, +dns/__pycache__/_ddr.cpython-312.pyc,, +dns/__pycache__/_features.cpython-312.pyc,, +dns/__pycache__/_immutable_ctx.cpython-312.pyc,, +dns/__pycache__/_no_ssl.cpython-312.pyc,, +dns/__pycache__/_tls_util.cpython-312.pyc,, +dns/__pycache__/_trio_backend.cpython-312.pyc,, +dns/__pycache__/asyncbackend.cpython-312.pyc,, +dns/__pycache__/asyncquery.cpython-312.pyc,, +dns/__pycache__/asyncresolver.cpython-312.pyc,, +dns/__pycache__/btree.cpython-312.pyc,, +dns/__pycache__/btreezone.cpython-312.pyc,, +dns/__pycache__/dnssec.cpython-312.pyc,, +dns/__pycache__/dnssectypes.cpython-312.pyc,, +dns/__pycache__/e164.cpython-312.pyc,, +dns/__pycache__/edns.cpython-312.pyc,, +dns/__pycache__/entropy.cpython-312.pyc,, +dns/__pycache__/enum.cpython-312.pyc,, +dns/__pycache__/exception.cpython-312.pyc,, +dns/__pycache__/flags.cpython-312.pyc,, +dns/__pycache__/grange.cpython-312.pyc,, +dns/__pycache__/immutable.cpython-312.pyc,, +dns/__pycache__/inet.cpython-312.pyc,, +dns/__pycache__/ipv4.cpython-312.pyc,, +dns/__pycache__/ipv6.cpython-312.pyc,, +dns/__pycache__/message.cpython-312.pyc,, +dns/__pycache__/name.cpython-312.pyc,, +dns/__pycache__/namedict.cpython-312.pyc,, +dns/__pycache__/nameserver.cpython-312.pyc,, +dns/__pycache__/node.cpython-312.pyc,, +dns/__pycache__/opcode.cpython-312.pyc,, +dns/__pycache__/query.cpython-312.pyc,, +dns/__pycache__/rcode.cpython-312.pyc,, +dns/__pycache__/rdata.cpython-312.pyc,, +dns/__pycache__/rdataclass.cpython-312.pyc,, +dns/__pycache__/rdataset.cpython-312.pyc,, +dns/__pycache__/rdatatype.cpython-312.pyc,, +dns/__pycache__/renderer.cpython-312.pyc,, +dns/__pycache__/resolver.cpython-312.pyc,, +dns/__pycache__/reversename.cpython-312.pyc,, +dns/__pycache__/rrset.cpython-312.pyc,, +dns/__pycache__/serial.cpython-312.pyc,, +dns/__pycache__/set.cpython-312.pyc,, +dns/__pycache__/tokenizer.cpython-312.pyc,, +dns/__pycache__/transaction.cpython-312.pyc,, +dns/__pycache__/tsig.cpython-312.pyc,, +dns/__pycache__/tsigkeyring.cpython-312.pyc,, +dns/__pycache__/ttl.cpython-312.pyc,, +dns/__pycache__/update.cpython-312.pyc,, +dns/__pycache__/version.cpython-312.pyc,, +dns/__pycache__/versioned.cpython-312.pyc,, +dns/__pycache__/win32util.cpython-312.pyc,, +dns/__pycache__/wire.cpython-312.pyc,, +dns/__pycache__/xfr.cpython-312.pyc,, +dns/__pycache__/zone.cpython-312.pyc,, +dns/__pycache__/zonefile.cpython-312.pyc,, +dns/__pycache__/zonetypes.cpython-312.pyc,, +dns/_asyncbackend.py,sha256=bv-2iaDTEDH4Esx2tc2GeVCnaqHtsQqb3WWqoYZngzA,2403 +dns/_asyncio_backend.py,sha256=08Ezq3L8G190Sdr8qMgjwnWNhbyMa1MFB3pWYkGQ0a0,9147 +dns/_ddr.py,sha256=rHXKC8kncCTT9N4KBh1flicl79nyDjQ-DDvq30MJ3B8,5247 +dns/_features.py,sha256=VYTUetGL5x8IEtxMUQk9_ftat2cvyYJw8HfIfpMM8D8,2493 +dns/_immutable_ctx.py,sha256=Schj9tuGUAQ_QMh612H7Uq6XcvPo5AkVwoBxZJJ8liA,2478 +dns/_no_ssl.py,sha256=M8mj_xYkpsuhny_vHaTWCjI1pNvekYG6V52kdqFkUYY,1502 +dns/_tls_util.py,sha256=kcvrPdGnSGP1fP9sNKekBZ3j-599HwZkmAk6ybyCebM,528 +dns/_trio_backend.py,sha256=Tqzm46FuRSYkUJDYL8qp6Qk8hbc6ZxiLBc8z-NsTULg,8597 +dns/asyncbackend.py,sha256=82fXTFls_m7F_ekQbgUGOkoBbs4BI-GBLDZAWNGUvJ0,2796 +dns/asyncquery.py,sha256=34B1EIekX3oSg0jF8ZSqEiUbNZTsJa3r2oqC01OIY7U,32329 +dns/asyncresolver.py,sha256=TncJ7UukzA0vF79AwNa2gel0y9UO02tCdQf3zUHbygg,17728 +dns/btree.py,sha256=QPz4IzW_yTtSmz_DC6LKvZdJvTs50CQRKbAa0UAFMTs,30757 +dns/btreezone.py,sha256=H9orKjQaMhnPjtAhHpRZlV5wd91N17iuqOmTUVzv6sU,13082 +dns/dnssec.py,sha256=zXqhmUM4k6M-9YVR49crEI6Jc0zhZSk7NX9BWDafhTQ,41356 +dns/dnssecalgs/__init__.py,sha256=B4hebjElugf8zhCauhH6kvACqI50iYLSKxEqUfL6970,4350 +dns/dnssecalgs/__pycache__/__init__.cpython-312.pyc,, +dns/dnssecalgs/__pycache__/base.cpython-312.pyc,, +dns/dnssecalgs/__pycache__/cryptography.cpython-312.pyc,, +dns/dnssecalgs/__pycache__/dsa.cpython-312.pyc,, +dns/dnssecalgs/__pycache__/ecdsa.cpython-312.pyc,, +dns/dnssecalgs/__pycache__/eddsa.cpython-312.pyc,, +dns/dnssecalgs/__pycache__/rsa.cpython-312.pyc,, +dns/dnssecalgs/base.py,sha256=4Oq9EhKBEYupojZ3hENBiuq2Js3Spimy_NeDb9Rl1a8,2497 +dns/dnssecalgs/cryptography.py,sha256=utsBa_s8OOOKUeudvFullBNMRMjHmeoa66RNA6UiJMw,2428 +dns/dnssecalgs/dsa.py,sha256=ONilkD8Hhartj3Mwe7LKBT0vXS4E0KgfvTtV2ysZLhM,3605 +dns/dnssecalgs/ecdsa.py,sha256=TK8PclMAt7xVQTv6FIse9jZwXVCv_B-_AAgfhK0rTWQ,3283 +dns/dnssecalgs/eddsa.py,sha256=Yc0L9O2A_ySOSSalJiq5h7TU1LWtJgW1JIJWsGx96FI,2000 +dns/dnssecalgs/rsa.py,sha256=YOPPtpfOKdgBfBJvOcDofYTiC4mGmwCfqdYUvEbdHf8,3663 +dns/dnssectypes.py,sha256=CyeuGTS_rM3zXr8wD9qMT9jkzvVfTY2JWckUcogG83E,1799 +dns/e164.py,sha256=Sc-Ctv8lXpaDot_Su02wLFxLpxLReVW7_23YiGrnMC4,3937 +dns/edns.py,sha256=E5HRHMJNGGOyNvkR4iKY2jkaoQasa4K61Feuko9uY5s,17436 +dns/entropy.py,sha256=dSbsNoNVoypURvOu-clqMiD-dFQ-fsKOPYSHwoTjaec,4247 +dns/enum.py,sha256=PBphGzrIWOi8l3MgvkEMpsJapKIejkaQUqFuMWUcZXc,3685 +dns/exception.py,sha256=zEdlBUUsjb3dqk0etKxbFXUng0lLB7TPj7JFsNN7HzQ,5936 +dns/flags.py,sha256=cQ3kTFyvcKiWHAxI5AwchNqxVOrsIrgJ6brgrH42Wq8,2750 +dns/grange.py,sha256=ZqjNVDtb7i6E9D3ai6mcWR_nFNHyCXPp7j3dLFidtvY,2154 +dns/immutable.py,sha256=InrtpKvPxl-74oYbzsyneZwAuX78hUqeG22f2aniZbk,2017 +dns/inet.py,sha256=DbkUeb4PNLmxgUVPXX1GeWQH6e7a5WZ2AP_-befdg-o,5753 +dns/ipv4.py,sha256=dRiZRfyZAOlwlj3YlfbvZChRQAKstYh9k0ibNZwHu5U,2487 +dns/ipv6.py,sha256=GccOccOFZGFlwNFgV79GffZJv6u1GW28jM_amdiLqeM,6517 +dns/message.py,sha256=YVNQjYYFDSY6ttuwz_zvJnsCGuY1t11DdchsNlcBHG0,69152 +dns/name.py,sha256=rHvrUjhkCoR0_ANOH3fHJcY1swefx62SfBTDRvoGTsI,42910 +dns/namedict.py,sha256=hJRYpKeQv6Bd2LaUOPV0L_a0eXEIuqgggPXaH4c3Tow,4000 +dns/nameserver.py,sha256=LLOUGTjdAcj4cs-zAXeaH7Pf90IW0P64MQOrAb9PAPE,10007 +dns/node.py,sha256=Z2lzeqvPjqoR-Pbevp0OJqI_bGxwYzJIIevUccTElaM,12627 +dns/opcode.py,sha256=2EgPHQaGBRXN5q4C0KslagWbmWAbyT9Cw_cBj_sMXeA,2774 +dns/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +dns/query.py,sha256=85EWlMD1hDJO5xozZ7tFazMbZldpQ04L0sQFoQfBZiI,61686 +dns/quic/__init__.py,sha256=eqHPKj8SUk5rdeQxJSS-x3XSVqwcUPZlzTUio8mOpSg,2575 +dns/quic/__pycache__/__init__.cpython-312.pyc,, +dns/quic/__pycache__/_asyncio.cpython-312.pyc,, +dns/quic/__pycache__/_common.cpython-312.pyc,, +dns/quic/__pycache__/_sync.cpython-312.pyc,, +dns/quic/__pycache__/_trio.cpython-312.pyc,, +dns/quic/_asyncio.py,sha256=YgoU65THKtpHfV8UPAnNr-HkpbkR7XY01E7R3oh5apg,10314 +dns/quic/_common.py,sha256=M7lfxwUfr07fHkefo9BbRogQmwB_lEbittc7ZAQ_ulI,11087 +dns/quic/_sync.py,sha256=Ixj0BR6ngRWaKqTUiTrYbLw0rWVsUE6uJuNJB5oUlI0,10982 +dns/quic/_trio.py,sha256=NdClJJ80TY4kg8wM34JCfzX75fhhDb0vLy-WZkSyW6E,9452 +dns/rcode.py,sha256=A7UyvwbaFDz1PZaoYcAmXcerpZV-bRC2Zv3uJepiXa4,4181 +dns/rdata.py,sha256=7OAmPoSVEysCF84bjvaGXrfB1K69bpswaKtM1X89tXQ,31977 +dns/rdataclass.py,sha256=TK4W4ywB1L_X7EZqk2Gmwnu7vdQpolQF5DtQWyNk5xo,2984 +dns/rdataset.py,sha256=aoOatp7pbWhs2JieS0vcHnNc4dfwA0SBuvXAoqe3vxE,16627 +dns/rdatatype.py,sha256=W7r_B43ja4ZTHIJgqbb2eR99lXOYntf3ngGj396AvKg,7487 +dns/rdtypes/ANY/AFSDB.py,sha256=k75wMwreF1DAfDymu4lHh16BUx7ulVP3PLeQBZnkurY,1661 +dns/rdtypes/ANY/AMTRELAY.py,sha256=zE5xls02_NvbQwXUy-MnpV-uVVSJJuaKtZ86H8_X4ic,3355 +dns/rdtypes/ANY/AVC.py,sha256=SpsXYzlBirRWN0mGnQe0MdN6H8fvlgXPJX5PjOHnEak,1024 +dns/rdtypes/ANY/CAA.py,sha256=Hq1tHBrFW-BdxkjrGCq9u6ezaUHj6nFspBD5ClpkRYc,2456 +dns/rdtypes/ANY/CDNSKEY.py,sha256=bJAdrBMsFHIJz8TF1AxZoNbdxVWBCRTG-bR_uR_r_G4,1225 +dns/rdtypes/ANY/CDS.py,sha256=Y9nIRUCAabztVLbxm2SXAdYapFemCOUuGh5JqroCDUs,1163 +dns/rdtypes/ANY/CERT.py,sha256=OAYbtDdcwRhW8w_lbxHbgyWUHxYkTHV2zbiQff00X74,3547 +dns/rdtypes/ANY/CNAME.py,sha256=IHGGq2BDpeKUahTr1pvyBQgm0NGBI_vQ3Vs5mKTXO4w,1206 +dns/rdtypes/ANY/CSYNC.py,sha256=TnO2TjHfc9Cccfsz8dSsuH9Y53o-HllMVeU2DSAglrc,2431 +dns/rdtypes/ANY/DLV.py,sha256=J-pOrw5xXsDoaB9G0r6znlYXJtqtcqhsl1OXs6CPRU4,986 +dns/rdtypes/ANY/DNAME.py,sha256=yqXRtx4dAWwB4YCCv-qW6uaxeGhg2LPQ2uyKwWaMdXs,1150 +dns/rdtypes/ANY/DNSKEY.py,sha256=MD8HUVH5XXeAGOnFWg5aVz_w-2tXYwCeVXmzExhiIeQ,1223 +dns/rdtypes/ANY/DS.py,sha256=_gf8vk1O_uY8QXFjsfUw-bny-fm6e-QpCk3PT0JCyoM,995 +dns/rdtypes/ANY/DSYNC.py,sha256=q-26ceC4f2A2A6OmVaiOwDwAe_LAHvRsra1PZ4GyotA,2154 +dns/rdtypes/ANY/EUI48.py,sha256=x0BkK0sY_tgzuCwfDYpw6tyuChHjjtbRpAgYhO0Y44o,1151 +dns/rdtypes/ANY/EUI64.py,sha256=1jCff2-SXHJLDnNDnMW8Cd_o-ok0P3x6zKy_bcCU5h4,1161 +dns/rdtypes/ANY/GPOS.py,sha256=u4qwiDBVoC7bsKfxDKGbPjnOKddpdjy2p1AhziDWcPw,4439 +dns/rdtypes/ANY/HINFO.py,sha256=D2WvjTsvD_XqT8BepBIyjPL2iYGMgYqb1VQa9ApO0qE,2217 +dns/rdtypes/ANY/HIP.py,sha256=WSw31w96y1JM6ufasx7gRHUPTQuI5ejtyLxpD7vcINE,3216 +dns/rdtypes/ANY/ISDN.py,sha256=L4C2Rxrr4JJN17lmJRbZN8RhM_ujjwIskY_4V4Gd3r4,2723 +dns/rdtypes/ANY/L32.py,sha256=I0HcPHmvRUz2_yeDd0c5uueNKwcxmbz6V-7upNOc1GA,1302 +dns/rdtypes/ANY/L64.py,sha256=rbdYukNdezhQGH6vowKu1VbUWwi5cYSg_VbWEDWyYGA,1609 +dns/rdtypes/ANY/LOC.py,sha256=jxbB0bmbnMW8AVrElmoSW0SOmLPoEf5AwQLwUeAyMsY,11962 +dns/rdtypes/ANY/LP.py,sha256=X0xGo9vr1b3AQ8J8LPMyn_ooKRuEmjwdi7TGE2mqK_k,1332 +dns/rdtypes/ANY/MX.py,sha256=qQk83idY0-SbRMDmB15JOpJi7cSyiheF-ALUD0Ev19E,995 +dns/rdtypes/ANY/NID.py,sha256=8D8RDttb0BPObs0dXbFKajAhA05iZlqAq-51b6wusEI,1561 +dns/rdtypes/ANY/NINFO.py,sha256=bdL_-6Bejb2EH-xwR1rfSr_9E3SDXLTAnov7x2924FI,1041 +dns/rdtypes/ANY/NS.py,sha256=ThfaPalUlhbyZyNyvBM3k-7onl3eJKq5wCORrOGtkMM,995 +dns/rdtypes/ANY/NSEC.py,sha256=kicEYxcKaLBpV6C_M8cHdDaqBoiYl6EYtPvjyR6kExI,2465 +dns/rdtypes/ANY/NSEC3.py,sha256=NUG3AT626zu3My8QeNMiPVfpn3PRK9AGBkKW3cIZDzM,4250 +dns/rdtypes/ANY/NSEC3PARAM.py,sha256=-r5rBTMezSh7J9Wb7bWng_TXPKIETs2AXY4WFdhz7tM,2625 +dns/rdtypes/ANY/OPENPGPKEY.py,sha256=3LHryx1g0g-WrOI19PhGzGZG0anIJw2CCn93P4aT-Lk,1870 +dns/rdtypes/ANY/OPT.py,sha256=W36RslT_Psp95OPUC70knumOYjKpaRHvGT27I-NV2qc,2561 +dns/rdtypes/ANY/PTR.py,sha256=5HcR1D77Otyk91vVY4tmqrfZfSxSXWyWvwIW-rIH5gc,997 +dns/rdtypes/ANY/RESINFO.py,sha256=Kf2NcKbkeI5gFE1bJfQNqQCaitYyXfV_9nQYl1luUZ0,1008 +dns/rdtypes/ANY/RP.py,sha256=8doJlhjYDYiAT6KNF1mAaemJ20YJFUPvit8LOx4-I-U,2174 +dns/rdtypes/ANY/RRSIG.py,sha256=_ohbap8Dp_3VMU4w7ozVWGyFCtpm8A-l1F1wQiFZogA,4941 +dns/rdtypes/ANY/RT.py,sha256=2t9q3FZQ28iEyceeU25KU2Ur0T5JxELAu8BTwfOUgVw,1013 +dns/rdtypes/ANY/SMIMEA.py,sha256=6yjHuVDfIEodBU9wxbCGCDZ5cWYwyY6FCk-aq2VNU0s,222 +dns/rdtypes/ANY/SOA.py,sha256=tbbpP7RK2kpTTYCgdAWGCxlIMcX9U5MTOhz7vLP4p0I,3034 +dns/rdtypes/ANY/SPF.py,sha256=rA3Srs9ECQx-37lqm7Zf7aYmMpp_asv4tGS8_fSQ-CU,1022 +dns/rdtypes/ANY/SSHFP.py,sha256=F5vrZB-MAmeGJFAgEwRjXxgxerhoAd6kT9AcNNmkcF4,2550 +dns/rdtypes/ANY/TKEY.py,sha256=qvMJd0HGQF1wHGk1eWdITBVnAkj1oTHHbP5zSzV4cTc,4848 +dns/rdtypes/ANY/TLSA.py,sha256=cytzebS3W7FFr9qeJ9gFSHq_bOwUk9aRVlXWHfnVrRs,218 +dns/rdtypes/ANY/TSIG.py,sha256=4fNQJSNWZXUKZejCciwQuUJtTw2g-YbPmqHrEj_pitg,4750 +dns/rdtypes/ANY/TXT.py,sha256=F1U9gIAhwXIV4UVT7CwOCEn_su6G1nJIdgWJsLktk20,1000 +dns/rdtypes/ANY/URI.py,sha256=JyPYKh2RXzI34oABDiJ2oDh3TE_l-zmut4jBNA-ONt4,2913 +dns/rdtypes/ANY/WALLET.py,sha256=IaP2g7Nq26jWGKa8MVxvJjWXLQ0wrNR1IWJVyyMG8oU,219 +dns/rdtypes/ANY/X25.py,sha256=BzEM7uOY7CMAm7QN-dSLj-_LvgnnohwJDUjMstzwqYo,1942 +dns/rdtypes/ANY/ZONEMD.py,sha256=DjBYvHY13nF70uxTM77zf3R9n0Uy8Frbj1LuBXbC7jU,2389 +dns/rdtypes/ANY/__init__.py,sha256=2UKaYp81SLH6ofE021on9pR7jzmB47D1iXjQ3M7FXrw,1539 +dns/rdtypes/ANY/__pycache__/AFSDB.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/AVC.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/CAA.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/CDS.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/CERT.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/CNAME.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/CSYNC.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/DLV.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/DNAME.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/DS.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/DSYNC.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/EUI48.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/EUI64.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/GPOS.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/HINFO.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/HIP.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/ISDN.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/L32.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/L64.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/LOC.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/LP.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/MX.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/NID.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/NINFO.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/NS.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC3.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/OPT.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/PTR.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/RESINFO.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/RP.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/RRSIG.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/RT.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/SOA.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/SPF.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/SSHFP.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/TKEY.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/TLSA.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/TSIG.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/TXT.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/URI.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/WALLET.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/X25.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-312.pyc,, +dns/rdtypes/ANY/__pycache__/__init__.cpython-312.pyc,, +dns/rdtypes/CH/A.py,sha256=Iq82L3RLM-OwB5hyvtX1Das9oToiZMzNgs979cAkDz8,2229 +dns/rdtypes/CH/__init__.py,sha256=GD9YeDKb9VBDo-J5rrChX1MWEGyQXuR9Htnbhg_iYLc,923 +dns/rdtypes/CH/__pycache__/A.cpython-312.pyc,, +dns/rdtypes/CH/__pycache__/__init__.cpython-312.pyc,, +dns/rdtypes/IN/A.py,sha256=FfFn3SqbpneL9Ky63COP50V2ZFxqS1ldCKJh39Enwug,1814 +dns/rdtypes/IN/AAAA.py,sha256=AxrOlYy-1TTTWeQypDKeXrDCrdHGor0EKCE4fxzSQGo,1820 +dns/rdtypes/IN/APL.py,sha256=4Kz56antsRGu-cfV2MCHN8rmVo90wnZXnLWA6uQpnk4,5081 +dns/rdtypes/IN/DHCID.py,sha256=x9vedfzJ3vvxPC1ihWTTcxXBMYL0Q24Wmj6O67aY5og,1875 +dns/rdtypes/IN/HTTPS.py,sha256=P-IjwcvDQMmtoBgsDHglXF7KgLX73G6jEDqCKsnaGpQ,220 +dns/rdtypes/IN/IPSECKEY.py,sha256=jMO-aGl1eglWDqMxAkM2BvKDjfe9O1X0avBoWCtWi30,3261 +dns/rdtypes/IN/KX.py,sha256=K1JwItL0n5G-YGFCjWeh0C9DyDD8G8VzicsBeQiNAv0,1013 +dns/rdtypes/IN/NAPTR.py,sha256=JhGpvtCn_qlNWWlW9ilrWh9PNElBgNq1SWJPqD3LRzA,3741 +dns/rdtypes/IN/NSAP.py,sha256=6YfWCVSIPTTBmRAzG8nVBj3LnohncXUhSFJHgp-TRdc,2163 +dns/rdtypes/IN/NSAP_PTR.py,sha256=iTxlV6fr_Y9lqivLLncSHxEhmFqz5UEElDW3HMBtuCU,1015 +dns/rdtypes/IN/PX.py,sha256=zRg_5eGQdpzCRUsXIccxJOs7xoTAn7i4PIrj0Zwv-1A,2748 +dns/rdtypes/IN/SRV.py,sha256=TVai6Rtfx0_73wH999uPGuz-p2m6BTVIleXy1Tlm5Dc,2759 +dns/rdtypes/IN/SVCB.py,sha256=HeFmi2v01F00Hott8FlvQ4R7aPxFmT7RF-gt45R5K_M,218 +dns/rdtypes/IN/WKS.py,sha256=4_dLY3Bh6ePkfgku11QzLJv74iSyoSpt8EflIp_AMNc,3644 +dns/rdtypes/IN/__init__.py,sha256=HbI8aw9HWroI6SgEvl8Sx6FdkDswCCXMbSRuJy5o8LQ,1083 +dns/rdtypes/IN/__pycache__/A.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/AAAA.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/APL.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/DHCID.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/HTTPS.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/KX.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/NAPTR.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/NSAP.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/PX.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/SRV.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/SVCB.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/WKS.cpython-312.pyc,, +dns/rdtypes/IN/__pycache__/__init__.cpython-312.pyc,, +dns/rdtypes/__init__.py,sha256=NYizfGglJfhqt_GMtSSXf7YQXIEHHCiJ_Y_qaLVeiOI,1073 +dns/rdtypes/__pycache__/__init__.cpython-312.pyc,, +dns/rdtypes/__pycache__/dnskeybase.cpython-312.pyc,, +dns/rdtypes/__pycache__/dsbase.cpython-312.pyc,, +dns/rdtypes/__pycache__/euibase.cpython-312.pyc,, +dns/rdtypes/__pycache__/mxbase.cpython-312.pyc,, +dns/rdtypes/__pycache__/nsbase.cpython-312.pyc,, +dns/rdtypes/__pycache__/svcbbase.cpython-312.pyc,, +dns/rdtypes/__pycache__/tlsabase.cpython-312.pyc,, +dns/rdtypes/__pycache__/txtbase.cpython-312.pyc,, +dns/rdtypes/__pycache__/util.cpython-312.pyc,, +dns/rdtypes/dnskeybase.py,sha256=GXSOvGtiRjY3fhqlI_T-4ukF4JQvvh3sk7UF0vipmPc,2824 +dns/rdtypes/dsbase.py,sha256=elOLkRb45vYzyh36_1FSJWWO9AI2wnK3GpddmQNdj3Y,3423 +dns/rdtypes/euibase.py,sha256=2DluC_kTi2io2ICgzFEdSxKGPFx3ib3ZXnA6YaAhAp0,2675 +dns/rdtypes/mxbase.py,sha256=N_3EX_2BgY0wMdGADL6_5nxBRUdx4ZcdNIYfGg5rMP8,3190 +dns/rdtypes/nsbase.py,sha256=tueXVV6E8lelebOmrmoOPq47eeRvOpsxHVXH4cOFxcs,2323 +dns/rdtypes/svcbbase.py,sha256=0VnPpt7fSCNt_MtGnWOiYtkY-6jQRWIli8JTRROakys,17717 +dns/rdtypes/tlsabase.py,sha256=hHuRO_MQ5g_tWBIDyTNArAWwbUc-MdZlXcjQxy5defA,2588 +dns/rdtypes/txtbase.py,sha256=lEzlKS6dx6UnhgoBPGIzqC3G0e8iWBetrkDtkwM16Ic,3723 +dns/rdtypes/util.py,sha256=WjiRlxsu_sq40XpSdR6wN54WWavKe7PLh-V9UaNhk7A,9680 +dns/renderer.py,sha256=sj_m9NRJoY8gdQ9zOhSVu0pTAUyBtM5AGpfea83jGpQ,11500 +dns/resolver.py,sha256=FRa-pJApeV_DFgLEwiwZP-2g7RHAg0kVCbg9EdNYLnc,73967 +dns/reversename.py,sha256=pPDGRfg7iq09cjEhKLKEcahdoyViS0y0ORip--r5vk8,3845 +dns/rrset.py,sha256=f8avzbtBb-y93jdyhhTJ8EJx1zOTcNTK3DtiK84eGNY,9129 +dns/serial.py,sha256=-t5rPW-TcJwzBMfIJo7Tl-uDtaYtpqOfCVYx9dMaDCY,3606 +dns/set.py,sha256=hublMKCIhd9zp5Hz_fvQTwF-Ze28jn7mjqei6vTGWfs,9213 +dns/tokenizer.py,sha256=dqQvBF3oUjP7URC7ZzBuQVLMVXhvf1gJusIpkV-IQ6U,23490 +dns/transaction.py,sha256=HnHa4nKL_ddtuWH4FaiKPEt81ImELL1fumZb3ll4KbI,22579 +dns/tsig.py,sha256=mWjZGZL75atl-jf3va1FhP9LfLGWT5g9Y9DgsSan4Mo,11576 +dns/tsigkeyring.py,sha256=1xSBgaV1KLR_9FQGsGWbkBD3XJjK8IFQx-H_olH1qyQ,2650 +dns/ttl.py,sha256=Rl8UOKV0_QyZzOdQ-JoB7nSHvBFehZXe_M0cxIBVc3Y,2937 +dns/update.py,sha256=iqZEO-_U0ooAqLlIRo1OhAKI8d-jpwPhBy-vC8v1dtY,12236 +dns/version.py,sha256=d7ViavUC8gYfrWbeyH8WMAldyGk_WVF5_zkCmCJv0ZQ,1763 +dns/versioned.py,sha256=yJ76QfKdIEKBtKX_DLA_IZGUZoFB1id1mMKzIj2eRm8,11841 +dns/win32util.py,sha256=iz5Gw0CTHAIqumdE25xdYUbhhSFiaZTRM-HXskglB2o,16799 +dns/wire.py,sha256=hylnQ30yjA3UcJSElhSAqYKMt5HICYqQ_N5b71K2smA,3155 +dns/xfr.py,sha256=UE4xAyfRDNH14x4os8yC-4Tl8brc_kCpBLxT0h6x-AM,13637 +dns/zone.py,sha256=ZferSA6wMN46uuBNkrgbRcSM8FSCCxMrNiLT3WoISbw,53098 +dns/zonefile.py,sha256=Xz24A8wH97NoA_iTbastSzUZ-S-DmLFG0SgIfVzQinY,28517 +dns/zonetypes.py,sha256=HrQNZxZ_gWLWI9dskix71msi9wkYK5pgrBBbPb1T74Y,690 +dnspython-2.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +dnspython-2.8.0.dist-info/METADATA,sha256=dPdZU5uJ4pkVGy1pfGEjBzRbdm27fpQ1z4Y6Bpgf04U,5680 +dnspython-2.8.0.dist-info/RECORD,, +dnspython-2.8.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +dnspython-2.8.0.dist-info/licenses/LICENSE,sha256=w-o_9WVLMpwZ07xfdIGvYjw93tSmFFWFSZ-EOtPXQc0,1526 diff --git a/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/WHEEL new file mode 100644 index 0000000..12228d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..390a726 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dnspython-2.8.0.dist-info/licenses/LICENSE @@ -0,0 +1,35 @@ +ISC License + +Copyright (C) Dnspython Contributors + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all +copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR +PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + + +Copyright (C) 2001-2017 Nominum, Inc. +Copyright (C) Google Inc. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose with or without fee is hereby granted, +provided that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/dotenv/__init__.py b/venv/lib/python3.12/site-packages/dotenv/__init__.py new file mode 100644 index 0000000..7f4c631 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/__init__.py @@ -0,0 +1,49 @@ +from typing import Any, Optional + +from .main import (dotenv_values, find_dotenv, get_key, load_dotenv, set_key, + unset_key) + + +def load_ipython_extension(ipython: Any) -> None: + from .ipython import load_ipython_extension + load_ipython_extension(ipython) + + +def get_cli_string( + path: Optional[str] = None, + action: Optional[str] = None, + key: Optional[str] = None, + value: Optional[str] = None, + quote: Optional[str] = None, +): + """Returns a string suitable for running as a shell script. + + Useful for converting a arguments passed to a fabric task + to be passed to a `local` or `run` command. + """ + command = ['dotenv'] + if quote: + command.append(f'-q {quote}') + if path: + command.append(f'-f {path}') + if action: + command.append(action) + if key: + command.append(key) + if value: + if ' ' in value: + command.append(f'"{value}"') + else: + command.append(value) + + return ' '.join(command).strip() + + +__all__ = ['get_cli_string', + 'load_dotenv', + 'dotenv_values', + 'get_key', + 'set_key', + 'unset_key', + 'find_dotenv', + 'load_ipython_extension'] diff --git a/venv/lib/python3.12/site-packages/dotenv/__main__.py b/venv/lib/python3.12/site-packages/dotenv/__main__.py new file mode 100644 index 0000000..3977f55 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/__main__.py @@ -0,0 +1,6 @@ +"""Entry point for cli, enables execution with `python -m dotenv`""" + +from .cli import cli + +if __name__ == "__main__": + cli() diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..61a012a Binary files /dev/null and b/venv/lib/python3.12/site-packages/dotenv/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..81acdc3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dotenv/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/cli.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/cli.cpython-312.pyc new file mode 100644 index 0000000..b5dc76e Binary files /dev/null and b/venv/lib/python3.12/site-packages/dotenv/__pycache__/cli.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/ipython.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/ipython.cpython-312.pyc new file mode 100644 index 0000000..27eae24 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dotenv/__pycache__/ipython.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/main.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000..cb99474 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dotenv/__pycache__/main.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/parser.cpython-312.pyc new file mode 100644 index 0000000..7a58bb9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/dotenv/__pycache__/parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/variables.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/variables.cpython-312.pyc new file mode 100644 index 0000000..936076e Binary files /dev/null and b/venv/lib/python3.12/site-packages/dotenv/__pycache__/variables.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..ec3962f Binary files /dev/null and b/venv/lib/python3.12/site-packages/dotenv/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/dotenv/cli.py b/venv/lib/python3.12/site-packages/dotenv/cli.py new file mode 100644 index 0000000..65ead46 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/cli.py @@ -0,0 +1,199 @@ +import json +import os +import shlex +import sys +from contextlib import contextmanager +from subprocess import Popen +from typing import Any, Dict, IO, Iterator, List + +try: + import click +except ImportError: + sys.stderr.write('It seems python-dotenv is not installed with cli option. \n' + 'Run pip install "python-dotenv[cli]" to fix this.') + sys.exit(1) + +from .main import dotenv_values, set_key, unset_key +from .version import __version__ + + +def enumerate_env(): + """ + Return a path for the ${pwd}/.env file. + + If pwd does not exist, return None. + """ + try: + cwd = os.getcwd() + except FileNotFoundError: + return None + path = os.path.join(cwd, '.env') + return path + + +@click.group() +@click.option('-f', '--file', default=enumerate_env(), + type=click.Path(file_okay=True), + help="Location of the .env file, defaults to .env file in current working directory.") +@click.option('-q', '--quote', default='always', + type=click.Choice(['always', 'never', 'auto']), + help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") +@click.option('-e', '--export', default=False, + type=click.BOOL, + help="Whether to write the dot file as an executable bash script.") +@click.version_option(version=__version__) +@click.pass_context +def cli(ctx: click.Context, file: Any, quote: Any, export: Any) -> None: + """This script is used to set, get or unset values from a .env file.""" + ctx.obj = {'QUOTE': quote, 'EXPORT': export, 'FILE': file} + + +@contextmanager +def stream_file(path: os.PathLike) -> Iterator[IO[str]]: + """ + Open a file and yield the corresponding (decoded) stream. + + Exits with error code 2 if the file cannot be opened. + """ + + try: + with open(path) as stream: + yield stream + except OSError as exc: + print(f"Error opening env file: {exc}", file=sys.stderr) + exit(2) + + +@cli.command() +@click.pass_context +@click.option('--format', default='simple', + type=click.Choice(['simple', 'json', 'shell', 'export']), + help="The format in which to display the list. Default format is simple, " + "which displays name=value without quotes.") +def list(ctx: click.Context, format: bool) -> None: + """Display all the stored key/value.""" + file = ctx.obj['FILE'] + + with stream_file(file) as stream: + values = dotenv_values(stream=stream) + + if format == 'json': + click.echo(json.dumps(values, indent=2, sort_keys=True)) + else: + prefix = 'export ' if format == 'export' else '' + for k in sorted(values): + v = values[k] + if v is not None: + if format in ('export', 'shell'): + v = shlex.quote(v) + click.echo(f'{prefix}{k}={v}') + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +@click.argument('value', required=True) +def set(ctx: click.Context, key: Any, value: Any) -> None: + """Store the given key/value.""" + file = ctx.obj['FILE'] + quote = ctx.obj['QUOTE'] + export = ctx.obj['EXPORT'] + success, key, value = set_key(file, key, value, quote, export) + if success: + click.echo(f'{key}={value}') + else: + exit(1) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +def get(ctx: click.Context, key: Any) -> None: + """Retrieve the value for the given key.""" + file = ctx.obj['FILE'] + + with stream_file(file) as stream: + values = dotenv_values(stream=stream) + + stored_value = values.get(key) + if stored_value: + click.echo(stored_value) + else: + exit(1) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +def unset(ctx: click.Context, key: Any) -> None: + """Removes the given key.""" + file = ctx.obj['FILE'] + quote = ctx.obj['QUOTE'] + success, key = unset_key(file, key, quote) + if success: + click.echo(f"Successfully removed {key}") + else: + exit(1) + + +@cli.command(context_settings={'ignore_unknown_options': True}) +@click.pass_context +@click.option( + "--override/--no-override", + default=True, + help="Override variables from the environment file with those from the .env file.", +) +@click.argument('commandline', nargs=-1, type=click.UNPROCESSED) +def run(ctx: click.Context, override: bool, commandline: List[str]) -> None: + """Run command with environment variables present.""" + file = ctx.obj['FILE'] + if not os.path.isfile(file): + raise click.BadParameter( + f'Invalid value for \'-f\' "{file}" does not exist.', + ctx=ctx + ) + dotenv_as_dict = { + k: v + for (k, v) in dotenv_values(file).items() + if v is not None and (override or k not in os.environ) + } + + if not commandline: + click.echo('No command given.') + exit(1) + ret = run_command(commandline, dotenv_as_dict) + exit(ret) + + +def run_command(command: List[str], env: Dict[str, str]) -> int: + """Run command in sub process. + + Runs the command in a sub process with the variables from `env` + added in the current environment variables. + + Parameters + ---------- + command: List[str] + The command and it's parameters + env: Dict + The additional environment variables + + Returns + ------- + int + The return code of the command + + """ + # copy the current environment variables and add the vales from + # `env` + cmd_env = os.environ.copy() + cmd_env.update(env) + + p = Popen(command, + universal_newlines=True, + bufsize=0, + shell=False, + env=cmd_env) + _, _ = p.communicate() + + return p.returncode diff --git a/venv/lib/python3.12/site-packages/dotenv/ipython.py b/venv/lib/python3.12/site-packages/dotenv/ipython.py new file mode 100644 index 0000000..7df727c --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/ipython.py @@ -0,0 +1,39 @@ +from IPython.core.magic import Magics, line_magic, magics_class # type: ignore +from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore + parse_argstring) # type: ignore + +from .main import find_dotenv, load_dotenv + + +@magics_class +class IPythonDotEnv(Magics): + + @magic_arguments() + @argument( + '-o', '--override', action='store_true', + help="Indicate to override existing variables" + ) + @argument( + '-v', '--verbose', action='store_true', + help="Indicate function calls to be verbose" + ) + @argument('dotenv_path', nargs='?', type=str, default='.env', + help='Search in increasingly higher folders for the `dotenv_path`') + @line_magic + def dotenv(self, line): + args = parse_argstring(self.dotenv, line) + # Locate the .env file + dotenv_path = args.dotenv_path + try: + dotenv_path = find_dotenv(dotenv_path, True, True) + except IOError: + print("cannot find .env file") + return + + # Load the .env file + load_dotenv(dotenv_path, verbose=args.verbose, override=args.override) + + +def load_ipython_extension(ipython): + """Register the %dotenv magic.""" + ipython.register_magics(IPythonDotEnv) diff --git a/venv/lib/python3.12/site-packages/dotenv/main.py b/venv/lib/python3.12/site-packages/dotenv/main.py new file mode 100644 index 0000000..f40c20e --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/main.py @@ -0,0 +1,382 @@ +import io +import logging +import os +import shutil +import sys +import tempfile +from collections import OrderedDict +from contextlib import contextmanager +from typing import (IO, Dict, Iterable, Iterator, Mapping, Optional, Tuple, + Union) + +from .parser import Binding, parse_stream +from .variables import parse_variables + +# A type alias for a string path to be used for the paths in this file. +# These paths may flow to `open()` and `shutil.move()`; `shutil.move()` +# only accepts string paths, not byte paths or file descriptors. See +# https://github.com/python/typeshed/pull/6832. +StrPath = Union[str, 'os.PathLike[str]'] + +logger = logging.getLogger(__name__) + + +def with_warn_for_invalid_lines(mappings: Iterator[Binding]) -> Iterator[Binding]: + for mapping in mappings: + if mapping.error: + logger.warning( + "Python-dotenv could not parse statement starting at line %s", + mapping.original.line, + ) + yield mapping + + +class DotEnv: + def __init__( + self, + dotenv_path: Optional[StrPath], + stream: Optional[IO[str]] = None, + verbose: bool = False, + encoding: Optional[str] = None, + interpolate: bool = True, + override: bool = True, + ) -> None: + self.dotenv_path: Optional[StrPath] = dotenv_path + self.stream: Optional[IO[str]] = stream + self._dict: Optional[Dict[str, Optional[str]]] = None + self.verbose: bool = verbose + self.encoding: Optional[str] = encoding + self.interpolate: bool = interpolate + self.override: bool = override + + @contextmanager + def _get_stream(self) -> Iterator[IO[str]]: + if self.dotenv_path and os.path.isfile(self.dotenv_path): + with open(self.dotenv_path, encoding=self.encoding) as stream: + yield stream + elif self.stream is not None: + yield self.stream + else: + if self.verbose: + logger.info( + "Python-dotenv could not find configuration file %s.", + self.dotenv_path or '.env', + ) + yield io.StringIO('') + + def dict(self) -> Dict[str, Optional[str]]: + """Return dotenv as dict""" + if self._dict: + return self._dict + + raw_values = self.parse() + + if self.interpolate: + self._dict = OrderedDict(resolve_variables(raw_values, override=self.override)) + else: + self._dict = OrderedDict(raw_values) + + return self._dict + + def parse(self) -> Iterator[Tuple[str, Optional[str]]]: + with self._get_stream() as stream: + for mapping in with_warn_for_invalid_lines(parse_stream(stream)): + if mapping.key is not None: + yield mapping.key, mapping.value + + def set_as_environment_variables(self) -> bool: + """ + Load the current dotenv as system environment variable. + """ + if not self.dict(): + return False + + for k, v in self.dict().items(): + if k in os.environ and not self.override: + continue + if v is not None: + os.environ[k] = v + + return True + + def get(self, key: str) -> Optional[str]: + """ + """ + data = self.dict() + + if key in data: + return data[key] + + if self.verbose: + logger.warning("Key %s not found in %s.", key, self.dotenv_path) + + return None + + +def get_key( + dotenv_path: StrPath, + key_to_get: str, + encoding: Optional[str] = "utf-8", +) -> Optional[str]: + """ + Get the value of a given key from the given .env. + + Returns `None` if the key isn't found or doesn't have a value. + """ + return DotEnv(dotenv_path, verbose=True, encoding=encoding).get(key_to_get) + + +@contextmanager +def rewrite( + path: StrPath, + encoding: Optional[str], +) -> Iterator[Tuple[IO[str], IO[str]]]: + if not os.path.isfile(path): + with open(path, mode="w", encoding=encoding) as source: + source.write("") + with tempfile.NamedTemporaryFile(mode="w", encoding=encoding, delete=False) as dest: + try: + with open(path, encoding=encoding) as source: + yield (source, dest) + except BaseException: + os.unlink(dest.name) + raise + shutil.move(dest.name, path) + + +def set_key( + dotenv_path: StrPath, + key_to_set: str, + value_to_set: str, + quote_mode: str = "always", + export: bool = False, + encoding: Optional[str] = "utf-8", +) -> Tuple[Optional[bool], str, str]: + """ + Adds or Updates a key/value to the given .env + + If the .env path given doesn't exist, fails instead of risking creating + an orphan .env somewhere in the filesystem + """ + if quote_mode not in ("always", "auto", "never"): + raise ValueError(f"Unknown quote_mode: {quote_mode}") + + quote = ( + quote_mode == "always" + or (quote_mode == "auto" and not value_to_set.isalnum()) + ) + + if quote: + value_out = "'{}'".format(value_to_set.replace("'", "\\'")) + else: + value_out = value_to_set + if export: + line_out = f'export {key_to_set}={value_out}\n' + else: + line_out = f"{key_to_set}={value_out}\n" + + with rewrite(dotenv_path, encoding=encoding) as (source, dest): + replaced = False + missing_newline = False + for mapping in with_warn_for_invalid_lines(parse_stream(source)): + if mapping.key == key_to_set: + dest.write(line_out) + replaced = True + else: + dest.write(mapping.original.string) + missing_newline = not mapping.original.string.endswith("\n") + if not replaced: + if missing_newline: + dest.write("\n") + dest.write(line_out) + + return True, key_to_set, value_to_set + + +def unset_key( + dotenv_path: StrPath, + key_to_unset: str, + quote_mode: str = "always", + encoding: Optional[str] = "utf-8", +) -> Tuple[Optional[bool], str]: + """ + Removes a given key from the given `.env` file. + + If the .env path given doesn't exist, fails. + If the given key doesn't exist in the .env, fails. + """ + if not os.path.exists(dotenv_path): + logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path) + return None, key_to_unset + + removed = False + with rewrite(dotenv_path, encoding=encoding) as (source, dest): + for mapping in with_warn_for_invalid_lines(parse_stream(source)): + if mapping.key == key_to_unset: + removed = True + else: + dest.write(mapping.original.string) + + if not removed: + logger.warning("Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path) + return None, key_to_unset + + return removed, key_to_unset + + +def resolve_variables( + values: Iterable[Tuple[str, Optional[str]]], + override: bool, +) -> Mapping[str, Optional[str]]: + new_values: Dict[str, Optional[str]] = {} + + for (name, value) in values: + if value is None: + result = None + else: + atoms = parse_variables(value) + env: Dict[str, Optional[str]] = {} + if override: + env.update(os.environ) # type: ignore + env.update(new_values) + else: + env.update(new_values) + env.update(os.environ) # type: ignore + result = "".join(atom.resolve(env) for atom in atoms) + + new_values[name] = result + + return new_values + + +def _walk_to_root(path: str) -> Iterator[str]: + """ + Yield directories starting from the given directory up to the root + """ + if not os.path.exists(path): + raise IOError('Starting path not found') + + if os.path.isfile(path): + path = os.path.dirname(path) + + last_dir = None + current_dir = os.path.abspath(path) + while last_dir != current_dir: + yield current_dir + parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) + last_dir, current_dir = current_dir, parent_dir + + +def find_dotenv( + filename: str = '.env', + raise_error_if_not_found: bool = False, + usecwd: bool = False, +) -> str: + """ + Search in increasingly higher folders for the given file + + Returns path to the file if found, or an empty string otherwise + """ + + def _is_interactive(): + """ Decide whether this is running in a REPL or IPython notebook """ + main = __import__('__main__', None, None, fromlist=['__file__']) + return not hasattr(main, '__file__') + + if usecwd or _is_interactive() or getattr(sys, 'frozen', False): + # Should work without __file__, e.g. in REPL or IPython notebook. + path = os.getcwd() + else: + # will work for .py files + frame = sys._getframe() + current_file = __file__ + + while frame.f_code.co_filename == current_file: + assert frame.f_back is not None + frame = frame.f_back + frame_filename = frame.f_code.co_filename + path = os.path.dirname(os.path.abspath(frame_filename)) + + for dirname in _walk_to_root(path): + check_path = os.path.join(dirname, filename) + if os.path.isfile(check_path): + return check_path + + if raise_error_if_not_found: + raise IOError('File not found') + + return '' + + +def load_dotenv( + dotenv_path: Optional[StrPath] = None, + stream: Optional[IO[str]] = None, + verbose: bool = False, + override: bool = False, + interpolate: bool = True, + encoding: Optional[str] = "utf-8", +) -> bool: + """Parse a .env file and then load all the variables found as environment variables. + + Parameters: + dotenv_path: Absolute or relative path to .env file. + stream: Text stream (such as `io.StringIO`) with .env content, used if + `dotenv_path` is `None`. + verbose: Whether to output a warning the .env file is missing. + override: Whether to override the system environment variables with the variables + from the `.env` file. + encoding: Encoding to be used to read the file. + Returns: + Bool: True if at least one environment variable is set else False + + If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the + .env file. + """ + if dotenv_path is None and stream is None: + dotenv_path = find_dotenv() + + dotenv = DotEnv( + dotenv_path=dotenv_path, + stream=stream, + verbose=verbose, + interpolate=interpolate, + override=override, + encoding=encoding, + ) + return dotenv.set_as_environment_variables() + + +def dotenv_values( + dotenv_path: Optional[StrPath] = None, + stream: Optional[IO[str]] = None, + verbose: bool = False, + interpolate: bool = True, + encoding: Optional[str] = "utf-8", +) -> Dict[str, Optional[str]]: + """ + Parse a .env file and return its content as a dict. + + The returned dict will have `None` values for keys without values in the .env file. + For example, `foo=bar` results in `{"foo": "bar"}` whereas `foo` alone results in + `{"foo": None}` + + Parameters: + dotenv_path: Absolute or relative path to the .env file. + stream: `StringIO` object with .env content, used if `dotenv_path` is `None`. + verbose: Whether to output a warning if the .env file is missing. + encoding: Encoding to be used to read the file. + + If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the + .env file. + """ + if dotenv_path is None and stream is None: + dotenv_path = find_dotenv() + + return DotEnv( + dotenv_path=dotenv_path, + stream=stream, + verbose=verbose, + interpolate=interpolate, + override=True, + encoding=encoding, + ).dict() diff --git a/venv/lib/python3.12/site-packages/dotenv/parser.py b/venv/lib/python3.12/site-packages/dotenv/parser.py new file mode 100644 index 0000000..735f14a --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/parser.py @@ -0,0 +1,175 @@ +import codecs +import re +from typing import (IO, Iterator, Match, NamedTuple, Optional, # noqa:F401 + Pattern, Sequence, Tuple) + + +def make_regex(string: str, extra_flags: int = 0) -> Pattern[str]: + return re.compile(string, re.UNICODE | extra_flags) + + +_newline = make_regex(r"(\r\n|\n|\r)") +_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE) +_whitespace = make_regex(r"[^\S\r\n]*") +_export = make_regex(r"(?:export[^\S\r\n]+)?") +_single_quoted_key = make_regex(r"'([^']+)'") +_unquoted_key = make_regex(r"([^=\#\s]+)") +_equal_sign = make_regex(r"(=[^\S\r\n]*)") +_single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'") +_double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"') +_unquoted_value = make_regex(r"([^\r\n]*)") +_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?") +_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)") +_rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?") +_double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]") +_single_quote_escapes = make_regex(r"\\[\\']") + + +class Original(NamedTuple): + string: str + line: int + + +class Binding(NamedTuple): + key: Optional[str] + value: Optional[str] + original: Original + error: bool + + +class Position: + def __init__(self, chars: int, line: int) -> None: + self.chars = chars + self.line = line + + @classmethod + def start(cls) -> "Position": + return cls(chars=0, line=1) + + def set(self, other: "Position") -> None: + self.chars = other.chars + self.line = other.line + + def advance(self, string: str) -> None: + self.chars += len(string) + self.line += len(re.findall(_newline, string)) + + +class Error(Exception): + pass + + +class Reader: + def __init__(self, stream: IO[str]) -> None: + self.string = stream.read() + self.position = Position.start() + self.mark = Position.start() + + def has_next(self) -> bool: + return self.position.chars < len(self.string) + + def set_mark(self) -> None: + self.mark.set(self.position) + + def get_marked(self) -> Original: + return Original( + string=self.string[self.mark.chars:self.position.chars], + line=self.mark.line, + ) + + def peek(self, count: int) -> str: + return self.string[self.position.chars:self.position.chars + count] + + def read(self, count: int) -> str: + result = self.string[self.position.chars:self.position.chars + count] + if len(result) < count: + raise Error("read: End of string") + self.position.advance(result) + return result + + def read_regex(self, regex: Pattern[str]) -> Sequence[str]: + match = regex.match(self.string, self.position.chars) + if match is None: + raise Error("read_regex: Pattern not found") + self.position.advance(self.string[match.start():match.end()]) + return match.groups() + + +def decode_escapes(regex: Pattern[str], string: str) -> str: + def decode_match(match: Match[str]) -> str: + return codecs.decode(match.group(0), 'unicode-escape') # type: ignore + + return regex.sub(decode_match, string) + + +def parse_key(reader: Reader) -> Optional[str]: + char = reader.peek(1) + if char == "#": + return None + elif char == "'": + (key,) = reader.read_regex(_single_quoted_key) + else: + (key,) = reader.read_regex(_unquoted_key) + return key + + +def parse_unquoted_value(reader: Reader) -> str: + (part,) = reader.read_regex(_unquoted_value) + return re.sub(r"\s+#.*", "", part).rstrip() + + +def parse_value(reader: Reader) -> str: + char = reader.peek(1) + if char == u"'": + (value,) = reader.read_regex(_single_quoted_value) + return decode_escapes(_single_quote_escapes, value) + elif char == u'"': + (value,) = reader.read_regex(_double_quoted_value) + return decode_escapes(_double_quote_escapes, value) + elif char in (u"", u"\n", u"\r"): + return u"" + else: + return parse_unquoted_value(reader) + + +def parse_binding(reader: Reader) -> Binding: + reader.set_mark() + try: + reader.read_regex(_multiline_whitespace) + if not reader.has_next(): + return Binding( + key=None, + value=None, + original=reader.get_marked(), + error=False, + ) + reader.read_regex(_export) + key = parse_key(reader) + reader.read_regex(_whitespace) + if reader.peek(1) == "=": + reader.read_regex(_equal_sign) + value: Optional[str] = parse_value(reader) + else: + value = None + reader.read_regex(_comment) + reader.read_regex(_end_of_line) + return Binding( + key=key, + value=value, + original=reader.get_marked(), + error=False, + ) + except Error: + reader.read_regex(_rest_of_line) + return Binding( + key=None, + value=None, + original=reader.get_marked(), + error=True, + ) + + +def parse_stream(stream: IO[str]) -> Iterator[Binding]: + reader = Reader(stream) + while reader.has_next(): + yield parse_binding(reader) diff --git a/venv/lib/python3.12/site-packages/dotenv/py.typed b/venv/lib/python3.12/site-packages/dotenv/py.typed new file mode 100644 index 0000000..7632ecf --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/venv/lib/python3.12/site-packages/dotenv/variables.py b/venv/lib/python3.12/site-packages/dotenv/variables.py new file mode 100644 index 0000000..667f2f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/variables.py @@ -0,0 +1,86 @@ +import re +from abc import ABCMeta, abstractmethod +from typing import Iterator, Mapping, Optional, Pattern + +_posix_variable: Pattern[str] = re.compile( + r""" + \$\{ + (?P[^\}:]*) + (?::- + (?P[^\}]*) + )? + \} + """, + re.VERBOSE, +) + + +class Atom(metaclass=ABCMeta): + def __ne__(self, other: object) -> bool: + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + return not result + + @abstractmethod + def resolve(self, env: Mapping[str, Optional[str]]) -> str: ... + + +class Literal(Atom): + def __init__(self, value: str) -> None: + self.value = value + + def __repr__(self) -> str: + return f"Literal(value={self.value})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + return self.value == other.value + + def __hash__(self) -> int: + return hash((self.__class__, self.value)) + + def resolve(self, env: Mapping[str, Optional[str]]) -> str: + return self.value + + +class Variable(Atom): + def __init__(self, name: str, default: Optional[str]) -> None: + self.name = name + self.default = default + + def __repr__(self) -> str: + return f"Variable(name={self.name}, default={self.default})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + return (self.name, self.default) == (other.name, other.default) + + def __hash__(self) -> int: + return hash((self.__class__, self.name, self.default)) + + def resolve(self, env: Mapping[str, Optional[str]]) -> str: + default = self.default if self.default is not None else "" + result = env.get(self.name, default) + return result if result is not None else "" + + +def parse_variables(value: str) -> Iterator[Atom]: + cursor = 0 + + for match in _posix_variable.finditer(value): + (start, end) = match.span() + name = match["name"] + default = match["default"] + + if start > cursor: + yield Literal(value=value[cursor:start]) + + yield Variable(name=name, default=default) + cursor = end + + length = len(value) + if cursor < length: + yield Literal(value=value[cursor:length]) diff --git a/venv/lib/python3.12/site-packages/dotenv/version.py b/venv/lib/python3.12/site-packages/dotenv/version.py new file mode 100644 index 0000000..5becc17 --- /dev/null +++ b/venv/lib/python3.12/site-packages/dotenv/version.py @@ -0,0 +1 @@ +__version__ = "1.0.0" diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/LICENSE b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/LICENSE new file mode 100644 index 0000000..474479a --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/LICENSE @@ -0,0 +1,24 @@ +"python-ecdsa" Copyright (c) 2010 Brian Warner + +Portions written in 2005 by Peter Pearson and placed in the public domain. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/METADATA new file mode 100644 index 0000000..3e63190 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/METADATA @@ -0,0 +1,671 @@ +Metadata-Version: 2.1 +Name: ecdsa +Version: 0.19.1 +Summary: ECDSA cryptographic signature library (pure python) +Home-page: http://github.com/tlsfuzzer/python-ecdsa +Author: Brian Warner +Author-email: warner@lothar.com +License: MIT +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.* +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: six >=1.9.0 +Provides-Extra: gmpy +Requires-Dist: gmpy ; extra == 'gmpy' +Provides-Extra: gmpy2 +Requires-Dist: gmpy2 ; extra == 'gmpy2' + +# Pure-Python ECDSA and ECDH + +[![GitHub CI](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/ci.yml/badge.svg)](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/ci.yml) +[![Documentation Status](https://readthedocs.org/projects/ecdsa/badge/?version=latest)](https://ecdsa.readthedocs.io/en/latest/?badge=latest) +[![Coverage Status](https://coveralls.io/repos/github/tlsfuzzer/python-ecdsa/badge.svg?branch=master)](https://coveralls.io/github/tlsfuzzer/python-ecdsa?branch=master) +![condition coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-condition-coverage.json) +![mutation score](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-mutation-score.json) +[![CodeQL](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/codeql.yml/badge.svg)](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/codeql.yml) +[![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/) +![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat) + + +This is an easy-to-use implementation of ECC (Elliptic Curve Cryptography) +with support for ECDSA (Elliptic Curve Digital Signature Algorithm), +EdDSA (Edwards-curve Digital Signature Algorithm) and ECDH +(Elliptic Curve Diffie-Hellman), implemented purely in Python, released under +the MIT license. With this library, you can quickly create key pairs (signing +key and verifying key), sign messages, and verify the signatures. You can +also agree on a shared secret key based on exchanged public keys. +The keys and signatures are very short, making them easy to handle and +incorporate into other protocols. + +**NOTE: This library should not be used in production settings, see [Security](#Security) for more details.** + +## Features + +This library provides key generation, signing, verifying, and shared secret +derivation for five +popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192, +224, 256, 384, and 521 bits. The "short names" for these curves, as known by +the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`, +`secp224r1`, `prime256v1`, `secp384r1`, and `secp521r1`. It includes the +256-bit curve `secp256k1` used by Bitcoin. There is also support for the +regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The +"short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`, +`brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`, +`brainpoolP512r1`. Few of the small curves from SEC standard are also +included (mainly to speed-up testing of the library), those are: +`secp112r1`, `secp112r2`, `secp128r1`, and `secp160r1`. +Key generation, siging and verifying is also supported for Ed25519 and +Ed448 curves. +No other curves are included, but it is not too hard to add support for more +curves over prime fields. + +## Dependencies + +This library uses only Python and the 'six' package. It is compatible with +Python 2.6, 2.7, and 3.6+. It also supports execution on alternative +implementations like pypy and pypy3. + +If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic. +Either of them can be installed after this library is installed, +`python-ecdsa` will detect their presence on start-up and use them +automatically. +You should prefer `gmpy2` on Python3 for optimal performance. + +To run the OpenSSL compatibility tests, the 'openssl' tool must be in your +`PATH`. This release has been tested successfully against OpenSSL 0.9.8o, +1.0.0a, 1.0.2f, 1.1.1d and 3.0.1 (among others). + + +## Installation + +This library is available on PyPI, it's recommended to install it using `pip`: + +``` +pip install ecdsa +``` + +In case higher performance is wanted and using native code is not a problem, +it's possible to specify installation together with `gmpy2`: + +``` +pip install ecdsa[gmpy2] +``` + +or (slower, legacy option): +``` +pip install ecdsa[gmpy] +``` + +## Speed + +The following table shows how long this library takes to generate key pairs +(`keygen`), to sign data (`sign`), to verify those signatures (`verify`), +to derive a shared secret (`ecdh`), and +to verify the signatures with no key-specific precomputation (`no PC verify`). +All those values are in seconds. +For convenience, the inverses of those values are also provided: +how many keys per second can be generated (`keygen/s`), how many signatures +can be made per second (`sign/s`), how many signatures can be verified +per second (`verify/s`), how many shared secrets can be derived per second +(`ecdh/s`), and how many signatures with no key specific +precomputation can be verified per second (`no PC verify/s`). The size of raw +signature (generally the smallest +the way a signature can be encoded) is also provided in the `siglen` column. +Use `tox -e speed` to generate this table on your own computer. +On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance: + +``` + siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s + NIST192p: 48 0.00032s 3134.06 0.00033s 2985.53 0.00063s 1598.36 0.00129s 774.43 + NIST224p: 56 0.00040s 2469.24 0.00042s 2367.88 0.00081s 1233.41 0.00170s 586.66 + NIST256p: 64 0.00051s 1952.73 0.00054s 1867.80 0.00098s 1021.86 0.00212s 471.27 + NIST384p: 96 0.00107s 935.92 0.00111s 904.23 0.00203s 491.77 0.00446s 224.00 + NIST521p: 132 0.00210s 475.52 0.00215s 464.16 0.00398s 251.28 0.00874s 114.39 + SECP256k1: 64 0.00052s 1921.54 0.00054s 1847.49 0.00105s 948.68 0.00210s 477.01 + BRAINPOOLP160r1: 40 0.00025s 4003.88 0.00026s 3845.12 0.00053s 1893.93 0.00105s 949.92 + BRAINPOOLP192r1: 48 0.00033s 3043.97 0.00034s 2975.98 0.00063s 1581.50 0.00135s 742.29 + BRAINPOOLP224r1: 56 0.00041s 2436.44 0.00043s 2315.51 0.00078s 1278.49 0.00180s 556.16 + BRAINPOOLP256r1: 64 0.00053s 1892.49 0.00054s 1846.24 0.00114s 875.64 0.00229s 437.25 + BRAINPOOLP320r1: 80 0.00073s 1361.26 0.00076s 1309.25 0.00143s 699.29 0.00322s 310.49 + BRAINPOOLP384r1: 96 0.00107s 931.29 0.00111s 901.80 0.00230s 434.19 0.00476s 210.20 + BRAINPOOLP512r1: 128 0.00207s 483.41 0.00212s 471.42 0.00425s 235.43 0.00912s 109.61 + SECP112r1: 28 0.00015s 6672.53 0.00016s 6440.34 0.00031s 3265.41 0.00056s 1774.20 + SECP112r2: 28 0.00015s 6697.11 0.00015s 6479.98 0.00028s 3524.72 0.00058s 1716.16 + SECP128r1: 32 0.00018s 5497.65 0.00019s 5272.89 0.00036s 2747.39 0.00072s 1396.16 + SECP160r1: 42 0.00025s 3949.32 0.00026s 3894.45 0.00046s 2153.85 0.00102s 985.07 + Ed25519: 64 0.00076s 1324.48 0.00042s 2405.01 0.00109s 918.05 0.00344s 290.50 + Ed448: 114 0.00176s 569.53 0.00115s 870.94 0.00282s 355.04 0.01024s 97.69 + + ecdh ecdh/s + NIST192p: 0.00104s 964.89 + NIST224p: 0.00134s 748.63 + NIST256p: 0.00170s 587.08 + NIST384p: 0.00352s 283.90 + NIST521p: 0.00717s 139.51 + SECP256k1: 0.00154s 648.40 + BRAINPOOLP160r1: 0.00082s 1220.70 + BRAINPOOLP192r1: 0.00105s 956.75 + BRAINPOOLP224r1: 0.00136s 734.52 + BRAINPOOLP256r1: 0.00178s 563.32 + BRAINPOOLP320r1: 0.00252s 397.23 + BRAINPOOLP384r1: 0.00376s 266.27 + BRAINPOOLP512r1: 0.00733s 136.35 + SECP112r1: 0.00046s 2180.40 + SECP112r2: 0.00045s 2229.14 + SECP128r1: 0.00054s 1868.15 + SECP160r1: 0.00080s 1243.98 +``` + +To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`. +On the same machine I'm getting the following performance with `gmpy2`: +``` + siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s + NIST192p: 48 0.00017s 5933.40 0.00017s 5751.70 0.00032s 3125.28 0.00067s 1502.41 + NIST224p: 56 0.00021s 4782.87 0.00022s 4610.05 0.00040s 2487.04 0.00089s 1126.90 + NIST256p: 64 0.00023s 4263.98 0.00024s 4125.16 0.00045s 2200.88 0.00098s 1016.82 + NIST384p: 96 0.00041s 2449.54 0.00042s 2399.96 0.00083s 1210.57 0.00172s 581.43 + NIST521p: 132 0.00071s 1416.07 0.00072s 1389.81 0.00144s 692.93 0.00312s 320.40 + SECP256k1: 64 0.00024s 4245.05 0.00024s 4122.09 0.00045s 2206.40 0.00094s 1068.32 + BRAINPOOLP160r1: 40 0.00014s 6939.17 0.00015s 6681.55 0.00029s 3452.43 0.00057s 1769.81 + BRAINPOOLP192r1: 48 0.00017s 5920.05 0.00017s 5774.36 0.00034s 2979.00 0.00069s 1453.19 + BRAINPOOLP224r1: 56 0.00021s 4732.12 0.00022s 4622.65 0.00041s 2422.47 0.00087s 1149.87 + BRAINPOOLP256r1: 64 0.00024s 4233.02 0.00024s 4115.20 0.00047s 2143.27 0.00098s 1015.60 + BRAINPOOLP320r1: 80 0.00032s 3162.38 0.00032s 3077.62 0.00063s 1598.83 0.00136s 737.34 + BRAINPOOLP384r1: 96 0.00041s 2436.88 0.00042s 2395.62 0.00083s 1202.68 0.00178s 562.85 + BRAINPOOLP512r1: 128 0.00063s 1587.60 0.00064s 1558.83 0.00125s 799.96 0.00281s 355.83 + SECP112r1: 28 0.00009s 11118.66 0.00009s 10775.48 0.00018s 5456.00 0.00033s 3020.83 + SECP112r2: 28 0.00009s 11322.97 0.00009s 10857.71 0.00017s 5748.77 0.00032s 3094.28 + SECP128r1: 32 0.00010s 10078.39 0.00010s 9665.27 0.00019s 5200.58 0.00036s 2760.88 + SECP160r1: 42 0.00015s 6875.51 0.00015s 6647.35 0.00029s 3422.41 0.00057s 1768.35 + Ed25519: 64 0.00030s 3322.56 0.00018s 5568.63 0.00046s 2165.35 0.00153s 654.02 + Ed448: 114 0.00060s 1680.53 0.00039s 2567.40 0.00096s 1036.67 0.00350s 285.62 + + ecdh ecdh/s + NIST192p: 0.00050s 1985.70 + NIST224p: 0.00066s 1524.16 + NIST256p: 0.00071s 1413.07 + NIST384p: 0.00127s 788.89 + NIST521p: 0.00230s 434.85 + SECP256k1: 0.00071s 1409.95 + BRAINPOOLP160r1: 0.00042s 2374.65 + BRAINPOOLP192r1: 0.00051s 1960.01 + BRAINPOOLP224r1: 0.00066s 1518.37 + BRAINPOOLP256r1: 0.00071s 1399.90 + BRAINPOOLP320r1: 0.00100s 997.21 + BRAINPOOLP384r1: 0.00129s 777.51 + BRAINPOOLP512r1: 0.00210s 475.99 + SECP112r1: 0.00022s 4457.70 + SECP112r2: 0.00024s 4252.33 + SECP128r1: 0.00028s 3589.31 + SECP160r1: 0.00043s 2305.02 +``` + +(there's also `gmpy` version, execute it using `tox -e speedgmpy`) + +For comparison, a highly optimised implementation (including curve-specific +assembly for some curves), like the one in OpenSSL 1.1.1d, provides the +following performance numbers on the same machine. +Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it: +``` + sign verify sign/s verify/s + 192 bits ecdsa (nistp192) 0.0002s 0.0002s 4785.6 5380.7 + 224 bits ecdsa (nistp224) 0.0000s 0.0001s 22475.6 9822.0 + 256 bits ecdsa (nistp256) 0.0000s 0.0001s 45069.6 14166.6 + 384 bits ecdsa (nistp384) 0.0008s 0.0006s 1265.6 1648.1 + 521 bits ecdsa (nistp521) 0.0003s 0.0005s 3753.1 1819.5 + 256 bits ecdsa (brainpoolP256r1) 0.0003s 0.0003s 2983.5 3333.2 + 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1 + 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1 + + sign verify sign/s verify/s + 253 bits EdDSA (Ed25519) 0.0000s 0.0001s 28217.9 10897.7 + 456 bits EdDSA (Ed448) 0.0003s 0.0005s 3926.5 2147.7 + + op op/s + 192 bits ecdh (nistp192) 0.0002s 4853.4 + 224 bits ecdh (nistp224) 0.0001s 15252.1 + 256 bits ecdh (nistp256) 0.0001s 18436.3 + 384 bits ecdh (nistp384) 0.0008s 1292.7 + 521 bits ecdh (nistp521) 0.0003s 2884.7 + 256 bits ecdh (brainpoolP256r1) 0.0003s 3066.5 + 384 bits ecdh (brainpoolP384r1) 0.0008s 1298.0 + 512 bits ecdh (brainpoolP512r1) 0.0014s 694.8 +``` + +Keys and signature can be serialized in different ways (see Usage, below). +For a NIST192p key, the three basic representations require strings of the +following lengths (in bytes): + + to_string: signkey= 24, verifykey= 48, signature=48 + compressed: signkey=n/a, verifykey= 25, signature=n/a + DER: signkey=106, verifykey= 80, signature=55 + PEM: signkey=278, verifykey=162, (no support for PEM signatures) + +## History + +In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a +[message to sci.crypt][1], available from his [download site][2]. In 2010, +Brian Warner wrote a wrapper around this code, to make it a bit easier and +safer to use. In 2020, Hubert Kario included an implementation of elliptic +curve cryptography that uses Jacobian coordinates internally, improving +performance about 20-fold. You are looking at the README for this wrapper. + +[1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html +[2]: http://webpages.charter.net/curryfans/peter/downloads.html + +## Testing + +To run the full test suite, do this: + + tox -e coverage + +On an Intel Core i7 4790K @ 4.0GHz, the tests take about 18 seconds to execute. +The test suite uses +[`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some +inherent variability in the test suite execution time. + +One part of `test_pyecdsa.py` and `test_ecdh.py` checks compatibility with +OpenSSL, by running the "openssl" CLI tool, make sure it's in your `PATH` if +you want to test compatibility with it (if OpenSSL is missing, too old, or +doesn't support all the curves supported in upstream releases you will see +skipped tests in the above `coverage` run). + +## Security + +This library was not designed with security in mind. If you are processing +data that needs to be protected we suggest you use a quality wrapper around +OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such +a wrapper. The primary use-case of this library is as a portable library for +interoperability testing and as a teaching tool. + +**This library does not protect against side-channel attacks.** + +Do not allow attackers to measure how long it takes you to generate a key pair +or sign a message. Do not allow attackers to run code on the same physical +machine when key pair generation or signing is taking place (this includes +virtual machines). Do not allow attackers to measure how much power your +computer uses while generating the key pair or signing a message. Do not allow +attackers to measure RF interference coming from your computer while generating +a key pair or signing a message. Note: just loading the private key will cause +key pair generation. Other operations or attack vectors may also be +vulnerable to attacks. **For a sophisticated attacker observing just one +operation with a private key will be sufficient to completely +reconstruct the private key**. + +Please also note that any Pure-python cryptographic library will be vulnerable +to the same side-channel attacks. This is because Python does not provide +side-channel secure primitives (with the exception of +[`hmac.compare_digest()`][3]), making side-channel secure programming +impossible. + +This library depends upon a strong source of random numbers. Do not use it on +a system where `os.urandom()` does not provide cryptographically secure +random numbers. + +[3]: https://docs.python.org/3/library/hmac.html#hmac.compare_digest + +## Usage + +You start by creating a `SigningKey`. You can use this to sign data, by passing +in data as a byte string and getting back the signature (also a byte string). +You can also ask a `SigningKey` to give you the corresponding `VerifyingKey`. +The `VerifyingKey` can be used to verify a signature, by passing it both the +data string and the signature byte string: it either returns True or raises +`BadSignatureError`. + +```python +from ecdsa import SigningKey +sk = SigningKey.generate() # uses NIST192p +vk = sk.verifying_key +signature = sk.sign(b"message") +assert vk.verify(signature, b"message") +``` + +Each `SigningKey`/`VerifyingKey` is associated with a specific curve, like +NIST192p (the default one). Longer curves are more secure, but take longer to +use, and result in longer keys and signatures. + +```python +from ecdsa import SigningKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +vk = sk.verifying_key +signature = sk.sign(b"message") +assert vk.verify(signature, b"message") +``` + +The `SigningKey` can be serialized into several different formats: the shortest +is to call `s=sk.to_string()`, and then re-create it with +`SigningKey.from_string(s, curve)` . This short form does not record the +curve, so you must be sure to pass to `from_string()` the same curve you used +for the original key. The short form of a NIST192p-based signing key is just 24 +bytes long. If a point encoding is invalid or it does not lie on the specified +curve, `from_string()` will raise `MalformedPointError`. + +```python +from ecdsa import SigningKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +sk_string = sk.to_string() +sk2 = SigningKey.from_string(sk_string, curve=NIST384p) +print(sk_string.hex()) +print(sk2.to_string().hex()) +``` + +Note: while the methods are called `to_string()` the type they return is +actually `bytes`, the "string" part is leftover from Python 2. + +`sk.to_pem()` and `sk.to_der()` will serialize the signing key into the same +formats that OpenSSL uses. The PEM file looks like the familiar ASCII-armored +`"-----BEGIN EC PRIVATE KEY-----"` base64-encoded format, and the DER format +is a shorter binary form of the same data. +`SigningKey.from_pem()/.from_der()` will undo this serialization. These +formats include the curve name, so you do not need to pass in a curve +identifier to the deserializer. In case the file is malformed `from_der()` +and `from_pem()` will raise `UnexpectedDER` or` MalformedPointError`. + +```python +from ecdsa import SigningKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +sk_pem = sk.to_pem() +sk2 = SigningKey.from_pem(sk_pem) +# sk and sk2 are the same key +``` + +Likewise, the `VerifyingKey` can be serialized in the same way: +`vk.to_string()/VerifyingKey.from_string()`, `to_pem()/from_pem()`, and +`to_der()/from_der()`. The same `curve=` argument is needed for +`VerifyingKey.from_string()`. + +```python +from ecdsa import SigningKey, VerifyingKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +vk = sk.verifying_key +vk_string = vk.to_string() +vk2 = VerifyingKey.from_string(vk_string, curve=NIST384p) +# vk and vk2 are the same key + +from ecdsa import SigningKey, VerifyingKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +vk = sk.verifying_key +vk_pem = vk.to_pem() +vk2 = VerifyingKey.from_pem(vk_pem) +# vk and vk2 are the same key +``` + +There are a couple of different ways to compute a signature. Fundamentally, +ECDSA takes a number that represents the data being signed, and returns a +pair of numbers that represent the signature. The `hashfunc=` argument to +`sk.sign()` and `vk.verify()` is used to turn an arbitrary string into a +fixed-length digest, which is then turned into a number that ECDSA can sign, +and both sign and verify must use the same approach. The default value is +`hashlib.sha1`, but if you use NIST256p or a longer curve, you can use +`hashlib.sha256` instead. + +There are also multiple ways to represent a signature. The default +`sk.sign()` and `vk.verify()` methods present it as a short string, for +simplicity and minimal overhead. To use a different scheme, use the +`sk.sign(sigencode=)` and `vk.verify(sigdecode=)` arguments. There are helper +functions in the `ecdsa.util` module that can be useful here. + +It is also possible to create a `SigningKey` from a "seed", which is +deterministic. This can be used in protocols where you want to derive +consistent signing keys from some other secret, for example when you want +three separate keys and only want to store a single master secret. You should +start with a uniformly-distributed unguessable seed with about `curve.baselen` +bytes of entropy, and then use one of the helper functions in `ecdsa.util` to +convert it into an integer in the correct range, and then finally pass it +into `SigningKey.from_secret_exponent()`, like this: + +```python +import os +from ecdsa import NIST384p, SigningKey +from ecdsa.util import randrange_from_seed__trytryagain + +def make_key(seed): + secexp = randrange_from_seed__trytryagain(seed, NIST384p.order) + return SigningKey.from_secret_exponent(secexp, curve=NIST384p) + +seed = os.urandom(NIST384p.baselen) # or other starting point +sk1a = make_key(seed) +sk1b = make_key(seed) +# note: sk1a and sk1b are the same key +assert sk1a.to_string() == sk1b.to_string() +sk2 = make_key(b"2-"+seed) # different key +assert sk1a.to_string() != sk2.to_string() +``` + +In case the application will verify a lot of signatures made with a single +key, it's possible to precompute some of the internal values to make +signature verification significantly faster. The break-even point occurs at +about 100 signatures verified. + +To perform precomputation, you can call the `precompute()` method +on `VerifyingKey` instance: +```python +from ecdsa import SigningKey, NIST384p +sk = SigningKey.generate(curve=NIST384p) +vk = sk.verifying_key +vk.precompute() +signature = sk.sign(b"message") +assert vk.verify(signature, b"message") +``` + +Once `precompute()` was called, all signature verifications with this key will +be faster to execute. + +## OpenSSL Compatibility + +To produce signatures that can be verified by OpenSSL tools, or to verify +signatures that were produced by those tools, use: + +```python +# openssl ecparam -name prime256v1 -genkey -out sk.pem +# openssl ec -in sk.pem -pubout -out vk.pem +# echo "data for signing" > data +# openssl dgst -sha256 -sign sk.pem -out data.sig data +# openssl dgst -sha256 -verify vk.pem -signature data.sig data +# openssl dgst -sha256 -prverify sk.pem -signature data.sig data + +import hashlib +from ecdsa import SigningKey, VerifyingKey +from ecdsa.util import sigencode_der, sigdecode_der + +with open("vk.pem") as f: + vk = VerifyingKey.from_pem(f.read()) + +with open("data", "rb") as f: + data = f.read() + +with open("data.sig", "rb") as f: + signature = f.read() + +assert vk.verify(signature, data, hashlib.sha256, sigdecode=sigdecode_der) + +with open("sk.pem") as f: + sk = SigningKey.from_pem(f.read(), hashlib.sha256) + +new_signature = sk.sign_deterministic(data, sigencode=sigencode_der) + +with open("data.sig2", "wb") as f: + f.write(new_signature) + +# openssl dgst -sha256 -verify vk.pem -signature data.sig2 data +``` + +Note: if compatibility with OpenSSL 1.0.0 or earlier is necessary, the +`sigencode_string` and `sigdecode_string` from `ecdsa.util` can be used for +respectively writing and reading the signatures. + +The keys also can be written in format that openssl can handle: + +```python +from ecdsa import SigningKey, VerifyingKey + +with open("sk.pem") as f: + sk = SigningKey.from_pem(f.read()) +with open("sk.pem", "wb") as f: + f.write(sk.to_pem()) + +with open("vk.pem") as f: + vk = VerifyingKey.from_pem(f.read()) +with open("vk.pem", "wb") as f: + f.write(vk.to_pem()) +``` + +## Entropy + +Creating a signing key with `SigningKey.generate()` requires some form of +entropy (as opposed to +`from_secret_exponent`/`from_string`/`from_der`/`from_pem`, +which are deterministic and do not require an entropy source). The default +source is `os.urandom()`, but you can pass any other function that behaves +like `os.urandom` as the `entropy=` argument to do something different. This +may be useful in unit tests, where you want to achieve repeatable results. The +`ecdsa.util.PRNG` utility is handy here: it takes a seed and produces a strong +pseudo-random stream from it: + +```python +from ecdsa.util import PRNG +from ecdsa import SigningKey +rng1 = PRNG(b"seed") +sk1 = SigningKey.generate(entropy=rng1) +rng2 = PRNG(b"seed") +sk2 = SigningKey.generate(entropy=rng2) +# sk1 and sk2 are the same key +``` + +Likewise, ECDSA signature generation requires a random number, and each +signature must use a different one (using the same number twice will +immediately reveal the private signing key). The `sk.sign()` method takes an +`entropy=` argument which behaves the same as `SigningKey.generate(entropy=)`. + +## Deterministic Signatures + +If you call `SigningKey.sign_deterministic(data)` instead of `.sign(data)`, +the code will generate a deterministic signature instead of a random one. +This uses the algorithm from RFC6979 to safely generate a unique `k` value, +derived from the private key and the message being signed. Each time you sign +the same message with the same key, you will get the same signature (using +the same `k`). + +This may become the default in a future version, as it is not vulnerable to +failures of the entropy source. + +## Examples + +Create a NIST192p key pair and immediately save both to disk: + +```python +from ecdsa import SigningKey +sk = SigningKey.generate() +vk = sk.verifying_key +with open("private.pem", "wb") as f: + f.write(sk.to_pem()) +with open("public.pem", "wb") as f: + f.write(vk.to_pem()) +``` + +Load a signing key from disk, use it to sign a message (using SHA-1), and write +the signature to disk: + +```python +from ecdsa import SigningKey +with open("private.pem") as f: + sk = SigningKey.from_pem(f.read()) +with open("message", "rb") as f: + message = f.read() +sig = sk.sign(message) +with open("signature", "wb") as f: + f.write(sig) +``` + +Load the verifying key, message, and signature from disk, and verify the +signature (assume SHA-1 hash): + +```python +from ecdsa import VerifyingKey, BadSignatureError +vk = VerifyingKey.from_pem(open("public.pem").read()) +with open("message", "rb") as f: + message = f.read() +with open("signature", "rb") as f: + sig = f.read() +try: + vk.verify(sig, message) + print "good signature" +except BadSignatureError: + print "BAD SIGNATURE" +``` + +Create a NIST521p key pair: + +```python +from ecdsa import SigningKey, NIST521p +sk = SigningKey.generate(curve=NIST521p) +vk = sk.verifying_key +``` + +Create three independent signing keys from a master seed: + +```python +from ecdsa import NIST192p, SigningKey +from ecdsa.util import randrange_from_seed__trytryagain + +def make_key_from_seed(seed, curve=NIST192p): + secexp = randrange_from_seed__trytryagain(seed, curve.order) + return SigningKey.from_secret_exponent(secexp, curve) + +sk1 = make_key_from_seed("1:%s" % seed) +sk2 = make_key_from_seed("2:%s" % seed) +sk3 = make_key_from_seed("3:%s" % seed) +``` + +Load a verifying key from disk and print it using hex encoding in +uncompressed and compressed format (defined in X9.62 and SEC1 standards): + +```python +from ecdsa import VerifyingKey + +with open("public.pem") as f: + vk = VerifyingKey.from_pem(f.read()) + +print("uncompressed: {0}".format(vk.to_string("uncompressed").hex())) +print("compressed: {0}".format(vk.to_string("compressed").hex())) +``` + +Load a verifying key from a hex string from compressed format, output +uncompressed: + +```python +from ecdsa import VerifyingKey, NIST256p + +comp_str = '022799c0d0ee09772fdd337d4f28dc155581951d07082fb19a38aa396b67e77759' +vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p) +print(vk.to_string("uncompressed").hex()) +``` + +ECDH key exchange with remote party: + +```python +from ecdsa import ECDH, NIST256p + +ecdh = ECDH(curve=NIST256p) +ecdh.generate_private_key() +local_public_key = ecdh.get_public_key() +#send `local_public_key` to remote party and receive `remote_public_key` from remote party +with open("remote_public_key.pem") as e: + remote_public_key = e.read() +ecdh.load_received_public_key_pem(remote_public_key) +secret = ecdh.generate_sharedsecret_bytes() +``` diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/RECORD new file mode 100644 index 0000000..9409b4c --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/RECORD @@ -0,0 +1,66 @@ +ecdsa-0.19.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ecdsa-0.19.1.dist-info/LICENSE,sha256=PsqYRXc9LluMydjBGdNF8ApIBuS9Zg1KPWzfnA6di7I,1147 +ecdsa-0.19.1.dist-info/METADATA,sha256=pgYAeXZAWaSlZnvx6ZgK6RpQAbgo7nhRuptSt4T71hY,29641 +ecdsa-0.19.1.dist-info/RECORD,, +ecdsa-0.19.1.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110 +ecdsa-0.19.1.dist-info/top_level.txt,sha256=7ovPHfAPyTou19f8gOSbHm6B9dGjTibWolcCB7Zjovs,6 +ecdsa/__init__.py,sha256=wRUnU3g01MM-mLNemnov-_8B81DlbSrqoFtOO4bZzQQ,1931 +ecdsa/__pycache__/__init__.cpython-312.pyc,, +ecdsa/__pycache__/_compat.cpython-312.pyc,, +ecdsa/__pycache__/_rwlock.cpython-312.pyc,, +ecdsa/__pycache__/_sha3.cpython-312.pyc,, +ecdsa/__pycache__/_version.cpython-312.pyc,, +ecdsa/__pycache__/curves.cpython-312.pyc,, +ecdsa/__pycache__/der.cpython-312.pyc,, +ecdsa/__pycache__/ecdh.cpython-312.pyc,, +ecdsa/__pycache__/ecdsa.cpython-312.pyc,, +ecdsa/__pycache__/eddsa.cpython-312.pyc,, +ecdsa/__pycache__/ellipticcurve.cpython-312.pyc,, +ecdsa/__pycache__/errors.cpython-312.pyc,, +ecdsa/__pycache__/keys.cpython-312.pyc,, +ecdsa/__pycache__/numbertheory.cpython-312.pyc,, +ecdsa/__pycache__/rfc6979.cpython-312.pyc,, +ecdsa/__pycache__/ssh.cpython-312.pyc,, +ecdsa/__pycache__/test_curves.cpython-312.pyc,, +ecdsa/__pycache__/test_der.cpython-312.pyc,, +ecdsa/__pycache__/test_ecdh.cpython-312.pyc,, +ecdsa/__pycache__/test_ecdsa.cpython-312.pyc,, +ecdsa/__pycache__/test_eddsa.cpython-312.pyc,, +ecdsa/__pycache__/test_ellipticcurve.cpython-312.pyc,, +ecdsa/__pycache__/test_jacobi.cpython-312.pyc,, +ecdsa/__pycache__/test_keys.cpython-312.pyc,, +ecdsa/__pycache__/test_malformed_sigs.cpython-312.pyc,, +ecdsa/__pycache__/test_numbertheory.cpython-312.pyc,, +ecdsa/__pycache__/test_pyecdsa.cpython-312.pyc,, +ecdsa/__pycache__/test_rw_lock.cpython-312.pyc,, +ecdsa/__pycache__/test_sha3.cpython-312.pyc,, +ecdsa/__pycache__/util.cpython-312.pyc,, +ecdsa/_compat.py,sha256=5EP735DlmaSb8Slk4BMnh9Z7MvYpTnGS3S_DBmHETwo,4047 +ecdsa/_rwlock.py,sha256=CAwHp2V65ksI8B1UqY7EccK9LaUToiv6pDLVzm44eag,2849 +ecdsa/_sha3.py,sha256=DJs7QLmdkQMU35llyD8HQeAXNvf5sMcujO6oFdScIqI,4747 +ecdsa/_version.py,sha256=6Q6w07SB50LWxDw7_U4KvjfFSQLiAZWHvYT_iDehYqw,498 +ecdsa/curves.py,sha256=EcPE0WRFkjpPMZfVM0-hRQ63CdW5GKTkDRpK-VOy1Zo,15975 +ecdsa/der.py,sha256=wXf8ftHJLdJZ2MjpQjeT5ji_qTj05dJtqCsvmwjvbuo,16444 +ecdsa/ecdh.py,sha256=Tiirawt5xegVDrY9eS-ATvvfmTIznUyv5fy2k7VnzTk,11011 +ecdsa/ecdsa.py,sha256=82ECyve36rL4_y8Zk8jPB-Uyb67Ppx6CYdqIeFniv7w,31699 +ecdsa/eddsa.py,sha256=IzsGzoGAefcoYjF7DVjFkX5ZJqiK2LTOmAMe6wyf4UU,7170 +ecdsa/ellipticcurve.py,sha256=OhoxJsJJ_7Dp5e10AblD_ui9Uo2WxozRby37gxqcuOk,54296 +ecdsa/errors.py,sha256=b4mhnmIpRnEdHzbectHAA5F7O9MtSaI-fYoc13_vBxQ,130 +ecdsa/keys.py,sha256=FkpubL9MGCfkCJ6aVvV5c0bve4ZuBtU3P1dRRBTth0k,65503 +ecdsa/numbertheory.py,sha256=Ad2-mVFaOytMomBC-7d0cJao4tpkVLh4x7vyqj73G6A,17831 +ecdsa/rfc6979.py,sha256=zwzo33lsZJA9r2dSf7HCliI_yIbw5cJ0Ek9tLdRRO40,2850 +ecdsa/ssh.py,sha256=360JY0dbYeZaqx9k_OhlHzPZYZTw5UB5xtK2XLBqb0M,1916 +ecdsa/test_curves.py,sha256=l5N-m4Yo5IAy4a8aJMsBamaSlLAfSoYjYqCj1HDEVpU,13081 +ecdsa/test_der.py,sha256=Bqbu3QiESyl8X9bjR2IBzDW35fDHTVB-mhzk5qXgGQQ,18915 +ecdsa/test_ecdh.py,sha256=20TEYyGcnynAPS9nlr_k2ed6S7lJ1Z0bbohNU-pEGco,15380 +ecdsa/test_ecdsa.py,sha256=V5Q4Q7uUFfStVdeFtwhXVpFlFi0Nx5J5uru9T3j_3DQ,25037 +ecdsa/test_eddsa.py,sha256=1jfHF_ZSyillv6DLKJZ0SeYThauTCHsyRNAweS_4MEQ,33720 +ecdsa/test_ellipticcurve.py,sha256=mvIBD_KKrOY4Vpd4KcR3J8RCBxINJtqWls1WMEsFgnE,8798 +ecdsa/test_jacobi.py,sha256=SVrS--PSwDakCYit216khmzF8eQryWtiI3sYWd_6890,25811 +ecdsa/test_keys.py,sha256=reQ2KtfOsANa59CqvvRWjZm-SaWfscJZVJaE9RFyL94,39415 +ecdsa/test_malformed_sigs.py,sha256=Dg1Dkvgz1tO-KhZ6f9ZRljykz8FBYtld3hq1W2hLLM8,11289 +ecdsa/test_numbertheory.py,sha256=YTtclt_50n_24U__r2JyV3LEqR2BsBmA6IJghwBDz8c,13265 +ecdsa/test_pyecdsa.py,sha256=2zoWfR_xAqagKOFHVJt4dTZZacu1zT4JxXH_HJUABW8,91272 +ecdsa/test_rw_lock.py,sha256=byv0_FTM90cbuHPCI6__LeQJkHL_zYEeVYIBO8e2LLc,7021 +ecdsa/test_sha3.py,sha256=0PkWi7AnTJ10YNfDVqj2SB7adeTbV6DCvMg4n9ULad8,3042 +ecdsa/util.py,sha256=UixXH6PR9poMrWQJ93rKpUSVZHxP1AmLwlkrkI3FWFE,18006 diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/WHEEL new file mode 100644 index 0000000..c34f116 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/top_level.txt new file mode 100644 index 0000000..aa5efdb --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/top_level.txt @@ -0,0 +1 @@ +ecdsa diff --git a/venv/lib/python3.12/site-packages/ecdsa/__init__.py b/venv/lib/python3.12/site-packages/ecdsa/__init__.py new file mode 100644 index 0000000..342538e --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/__init__.py @@ -0,0 +1,104 @@ +# while we don't use six in this file, we did bundle it for a long time, so +# keep as part of module in a virtual way (through __all__) +import six +from .keys import ( + SigningKey, + VerifyingKey, + BadSignatureError, + BadDigestError, + MalformedPointError, +) +from .curves import ( + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + SECP256k1, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + Ed25519, + Ed448, + BRAINPOOLP160t1, + BRAINPOOLP192t1, + BRAINPOOLP224t1, + BRAINPOOLP256t1, + BRAINPOOLP320t1, + BRAINPOOLP384t1, + BRAINPOOLP512t1, +) +from .ecdh import ( + ECDH, + NoKeyError, + NoCurveError, + InvalidCurveError, + InvalidSharedSecretError, +) +from .der import UnexpectedDER +from . import _version + +# This code comes from http://github.com/tlsfuzzer/python-ecdsa +__all__ = [ + "curves", + "der", + "ecdsa", + "ellipticcurve", + "keys", + "numbertheory", + "test_pyecdsa", + "util", + "six", +] + +_hush_pyflakes = [ + SigningKey, + VerifyingKey, + BadSignatureError, + BadDigestError, + MalformedPointError, + UnexpectedDER, + InvalidCurveError, + NoKeyError, + InvalidSharedSecretError, + ECDH, + NoCurveError, + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + SECP256k1, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + Ed25519, + Ed448, + six.b(""), + BRAINPOOLP160t1, + BRAINPOOLP192t1, + BRAINPOOLP224t1, + BRAINPOOLP256t1, + BRAINPOOLP320t1, + BRAINPOOLP384t1, + BRAINPOOLP512t1, +] +del _hush_pyflakes + +__version__ = _version.get_versions()["version"] diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c86d218 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..f4860d8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_rwlock.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_rwlock.cpython-312.pyc new file mode 100644 index 0000000..b86f1c6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_rwlock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_sha3.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_sha3.cpython-312.pyc new file mode 100644 index 0000000..290f61a Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_sha3.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_version.cpython-312.pyc new file mode 100644 index 0000000..734684c Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/curves.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/curves.cpython-312.pyc new file mode 100644 index 0000000..5406c68 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/curves.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/der.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/der.cpython-312.pyc new file mode 100644 index 0000000..f4c7e04 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/der.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdh.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdh.cpython-312.pyc new file mode 100644 index 0000000..d03f320 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdsa.cpython-312.pyc new file mode 100644 index 0000000..b5628dd Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/eddsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/eddsa.cpython-312.pyc new file mode 100644 index 0000000..07a11e3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/eddsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-312.pyc new file mode 100644 index 0000000..df7b558 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/errors.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..2d44a08 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/errors.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/keys.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/keys.cpython-312.pyc new file mode 100644 index 0000000..33dfeda Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/keys.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/numbertheory.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/numbertheory.cpython-312.pyc new file mode 100644 index 0000000..3fddc79 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/numbertheory.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/rfc6979.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/rfc6979.cpython-312.pyc new file mode 100644 index 0000000..66f54fb Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/rfc6979.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ssh.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ssh.cpython-312.pyc new file mode 100644 index 0000000..2be2780 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ssh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_curves.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_curves.cpython-312.pyc new file mode 100644 index 0000000..e8fc8eb Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_curves.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_der.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_der.cpython-312.pyc new file mode 100644 index 0000000..ef7959b Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_der.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdh.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdh.cpython-312.pyc new file mode 100644 index 0000000..38f78df Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-312.pyc new file mode 100644 index 0000000..d90d4ef Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_eddsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_eddsa.cpython-312.pyc new file mode 100644 index 0000000..ec98c85 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_eddsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-312.pyc new file mode 100644 index 0000000..a48853c Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_jacobi.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_jacobi.cpython-312.pyc new file mode 100644 index 0000000..77bdc52 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_jacobi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_keys.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_keys.cpython-312.pyc new file mode 100644 index 0000000..d1eb72f Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_keys.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-312.pyc new file mode 100644 index 0000000..aa29a40 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-312.pyc new file mode 100644 index 0000000..272dbf7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-312.pyc new file mode 100644 index 0000000..64620d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-312.pyc new file mode 100644 index 0000000..f5c6b0e Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_sha3.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_sha3.cpython-312.pyc new file mode 100644 index 0000000..f3f6e0b Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_sha3.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..3c52664 Binary files /dev/null and b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/_compat.py b/venv/lib/python3.12/site-packages/ecdsa/_compat.py new file mode 100644 index 0000000..4558e33 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/_compat.py @@ -0,0 +1,138 @@ +""" +Common functions for providing cross-python version compatibility. +""" +import sys +import re +import binascii +from six import integer_types + + +def str_idx_as_int(string, index): + """Take index'th byte from string, return as integer""" + val = string[index] + if isinstance(val, integer_types): + return val + return ord(val) + + +if sys.version_info < (3, 0): # pragma: no branch + import platform + + def normalise_bytes(buffer_object): + """Cast the input into array of bytes.""" + # flake8 runs on py3 where `buffer` indeed doesn't exist... + return buffer(buffer_object) # noqa: F821 + + def hmac_compat(ret): + return ret + + if ( + sys.version_info < (2, 7) + or sys.version_info < (2, 7, 4) + or platform.system() == "Java" + ): # pragma: no branch + + def remove_whitespace(text): + """Removes all whitespace from passed in string""" + return re.sub(r"\s+", "", text) + + def compat26_str(val): + return str(val) + + def bit_length(val): + if val == 0: + return 0 + return len(bin(val)) - 2 + + else: + + def remove_whitespace(text): + """Removes all whitespace from passed in string""" + return re.sub(r"\s+", "", text, flags=re.UNICODE) + + def compat26_str(val): + return val + + def bit_length(val): + """Return number of bits necessary to represent an integer.""" + return val.bit_length() + + def b2a_hex(val): + return binascii.b2a_hex(compat26_str(val)) + + def a2b_hex(val): + try: + return bytearray(binascii.a2b_hex(val)) + except Exception as e: + raise ValueError("base16 error: %s" % e) + + def bytes_to_int(val, byteorder): + """Convert bytes to an int.""" + if not val: + return 0 + if byteorder == "big": + return int(b2a_hex(val), 16) + if byteorder == "little": + return int(b2a_hex(val[::-1]), 16) + raise ValueError("Only 'big' and 'little' endian supported") + + def int_to_bytes(val, length=None, byteorder="big"): + """Return number converted to bytes""" + if length is None: + length = byte_length(val) + if byteorder == "big": + return bytearray( + (val >> i) & 0xFF for i in reversed(range(0, length * 8, 8)) + ) + if byteorder == "little": + return bytearray( + (val >> i) & 0xFF for i in range(0, length * 8, 8) + ) + raise ValueError("Only 'big' or 'little' endian supported") + +else: + + def hmac_compat(data): + return data + + def normalise_bytes(buffer_object): + """Cast the input into array of bytes.""" + return memoryview(buffer_object).cast("B") + + def compat26_str(val): + return val + + def remove_whitespace(text): + """Removes all whitespace from passed in string""" + return re.sub(r"\s+", "", text, flags=re.UNICODE) + + def a2b_hex(val): + try: + return bytearray(binascii.a2b_hex(bytearray(val, "ascii"))) + except Exception as e: + raise ValueError("base16 error: %s" % e) + + # pylint: disable=invalid-name + # pylint is stupid here and doesn't notice it's a function, not + # constant + bytes_to_int = int.from_bytes + # pylint: enable=invalid-name + + def bit_length(val): + """Return number of bits necessary to represent an integer.""" + return val.bit_length() + + def int_to_bytes(val, length=None, byteorder="big"): + """Convert integer to bytes.""" + if length is None: + length = byte_length(val) + # for gmpy we need to convert back to native int + if not isinstance(val, int): + val = int(val) + return bytearray(val.to_bytes(length=length, byteorder=byteorder)) + + +def byte_length(val): + """Return number of bytes necessary to represent an integer.""" + length = bit_length(val) + return (length + 7) // 8 diff --git a/venv/lib/python3.12/site-packages/ecdsa/_rwlock.py b/venv/lib/python3.12/site-packages/ecdsa/_rwlock.py new file mode 100644 index 0000000..010e498 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/_rwlock.py @@ -0,0 +1,86 @@ +# Copyright Mateusz Kobos, (c) 2011 +# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ +# released under the MIT licence + +import threading + + +__author__ = "Mateusz Kobos" + + +class RWLock: + """ + Read-Write locking primitive + + Synchronization object used in a solution of so-called second + readers-writers problem. In this problem, many readers can simultaneously + access a share, and a writer has an exclusive access to this share. + Additionally, the following constraints should be met: + 1) no reader should be kept waiting if the share is currently opened for + reading unless a writer is also waiting for the share, + 2) no writer should be kept waiting for the share longer than absolutely + necessary. + + The implementation is based on [1, secs. 4.2.2, 4.2.6, 4.2.7] + with a modification -- adding an additional lock (C{self.__readers_queue}) + -- in accordance with [2]. + + Sources: + [1] A.B. Downey: "The little book of semaphores", Version 2.1.5, 2008 + [2] P.J. Courtois, F. Heymans, D.L. Parnas: + "Concurrent Control with 'Readers' and 'Writers'", + Communications of the ACM, 1971 (via [3]) + [3] http://en.wikipedia.org/wiki/Readers-writers_problem + """ + + def __init__(self): + """ + A lock giving an even higher priority to the writer in certain + cases (see [2] for a discussion). + """ + self.__read_switch = _LightSwitch() + self.__write_switch = _LightSwitch() + self.__no_readers = threading.Lock() + self.__no_writers = threading.Lock() + self.__readers_queue = threading.Lock() + + def reader_acquire(self): + self.__readers_queue.acquire() + self.__no_readers.acquire() + self.__read_switch.acquire(self.__no_writers) + self.__no_readers.release() + self.__readers_queue.release() + + def reader_release(self): + self.__read_switch.release(self.__no_writers) + + def writer_acquire(self): + self.__write_switch.acquire(self.__no_readers) + self.__no_writers.acquire() + + def writer_release(self): + self.__no_writers.release() + self.__write_switch.release(self.__no_readers) + + +class _LightSwitch: + """An auxiliary "light switch"-like object. The first thread turns on the + "switch", the last one turns it off (see [1, sec. 4.2.2] for details).""" + + def __init__(self): + self.__counter = 0 + self.__mutex = threading.Lock() + + def acquire(self, lock): + self.__mutex.acquire() + self.__counter += 1 + if self.__counter == 1: + lock.acquire() + self.__mutex.release() + + def release(self, lock): + self.__mutex.acquire() + self.__counter -= 1 + if self.__counter == 0: + lock.release() + self.__mutex.release() diff --git a/venv/lib/python3.12/site-packages/ecdsa/_sha3.py b/venv/lib/python3.12/site-packages/ecdsa/_sha3.py new file mode 100644 index 0000000..2db0058 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/_sha3.py @@ -0,0 +1,181 @@ +""" +Implementation of the SHAKE-256 algorithm for Ed448 +""" + +try: + import hashlib + + hashlib.new("shake256").digest(64) + + def shake_256(msg, outlen): + return hashlib.new("shake256", msg).digest(outlen) + +except (TypeError, ValueError): + + from ._compat import bytes_to_int, int_to_bytes + + # From little endian. + def _from_le(s): + return bytes_to_int(s, byteorder="little") + + # Rotate a word x by b places to the left. + def _rol(x, b): + return ((x << b) | (x >> (64 - b))) & (2**64 - 1) + + # Do the SHA-3 state transform on state s. + def _sha3_transform(s): + ROTATIONS = [ + 0, + 1, + 62, + 28, + 27, + 36, + 44, + 6, + 55, + 20, + 3, + 10, + 43, + 25, + 39, + 41, + 45, + 15, + 21, + 8, + 18, + 2, + 61, + 56, + 14, + ] + PERMUTATION = [ + 1, + 6, + 9, + 22, + 14, + 20, + 2, + 12, + 13, + 19, + 23, + 15, + 4, + 24, + 21, + 8, + 16, + 5, + 3, + 18, + 17, + 11, + 7, + 10, + ] + RC = [ + 0x0000000000000001, + 0x0000000000008082, + 0x800000000000808A, + 0x8000000080008000, + 0x000000000000808B, + 0x0000000080000001, + 0x8000000080008081, + 0x8000000000008009, + 0x000000000000008A, + 0x0000000000000088, + 0x0000000080008009, + 0x000000008000000A, + 0x000000008000808B, + 0x800000000000008B, + 0x8000000000008089, + 0x8000000000008003, + 0x8000000000008002, + 0x8000000000000080, + 0x000000000000800A, + 0x800000008000000A, + 0x8000000080008081, + 0x8000000000008080, + 0x0000000080000001, + 0x8000000080008008, + ] + + for rnd in range(0, 24): + # AddColumnParity (Theta) + c = [0] * 5 + d = [0] * 5 + for i in range(0, 25): + c[i % 5] ^= s[i] + for i in range(0, 5): + d[i] = c[(i + 4) % 5] ^ _rol(c[(i + 1) % 5], 1) + for i in range(0, 25): + s[i] ^= d[i % 5] + # RotateWords (Rho) + for i in range(0, 25): + s[i] = _rol(s[i], ROTATIONS[i]) + # PermuteWords (Pi) + t = s[PERMUTATION[0]] + for i in range(0, len(PERMUTATION) - 1): + s[PERMUTATION[i]] = s[PERMUTATION[i + 1]] + s[PERMUTATION[-1]] = t + # NonlinearMixRows (Chi) + for i in range(0, 25, 5): + t = [ + s[i], + s[i + 1], + s[i + 2], + s[i + 3], + s[i + 4], + s[i], + s[i + 1], + ] + for j in range(0, 5): + s[i + j] = t[j] ^ ((~t[j + 1]) & (t[j + 2])) + # AddRoundConstant (Iota) + s[0] ^= RC[rnd] + + # Reinterpret octet array b to word array and XOR it to state s. + def _reinterpret_to_words_and_xor(s, b): + for j in range(0, len(b) // 8): + s[j] ^= _from_le(b[8 * j : 8 * j + 8]) + + # Reinterpret word array w to octet array and return it. + def _reinterpret_to_octets(w): + mp = bytearray() + for j in range(0, len(w)): + mp += int_to_bytes(w[j], 8, byteorder="little") + return mp + + def _sha3_raw(msg, r_w, o_p, e_b): + """Semi-generic SHA-3 implementation""" + r_b = 8 * r_w + s = [0] * 25 + # Handle whole blocks. + idx = 0 + blocks = len(msg) // r_b + for i in range(0, blocks): + _reinterpret_to_words_and_xor(s, msg[idx : idx + r_b]) + idx += r_b + _sha3_transform(s) + # Handle last block padding. + m = bytearray(msg[idx:]) + m.append(o_p) + while len(m) < r_b: + m.append(0) + m[len(m) - 1] |= 128 + # Handle padded last block. + _reinterpret_to_words_and_xor(s, m) + _sha3_transform(s) + # Output. + out = bytearray() + while len(out) < e_b: + out += _reinterpret_to_octets(s[:r_w]) + _sha3_transform(s) + return out[:e_b] + + def shake_256(msg, outlen): + return _sha3_raw(msg, 17, 31, outlen) diff --git a/venv/lib/python3.12/site-packages/ecdsa/_version.py b/venv/lib/python3.12/site-packages/ecdsa/_version.py new file mode 100644 index 0000000..d910ff9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/_version.py @@ -0,0 +1,21 @@ + +# This file was generated by 'versioneer.py' (0.21) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +{ + "date": "2025-03-13T12:48:15+0100", + "dirty": false, + "error": null, + "full-revisionid": "2a6593d840ad153a16ebdd4f9b772b290494f3e3", + "version": "0.19.1" +} +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) diff --git a/venv/lib/python3.12/site-packages/ecdsa/curves.py b/venv/lib/python3.12/site-packages/ecdsa/curves.py new file mode 100644 index 0000000..38e3a75 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/curves.py @@ -0,0 +1,590 @@ +from __future__ import division + +from six import PY2 +from . import der, ecdsa, ellipticcurve, eddsa +from .util import orderlen, number_to_string, string_to_number +from ._compat import normalise_bytes, bit_length + + +# orderlen was defined in this module previously, so keep it in __all__, +# will need to mark it as deprecated later +__all__ = [ + "UnknownCurveError", + "orderlen", + "Curve", + "SECP112r1", + "SECP112r2", + "SECP128r1", + "SECP160r1", + "NIST192p", + "NIST224p", + "NIST256p", + "NIST384p", + "NIST521p", + "curves", + "find_curve", + "curve_by_name", + "SECP256k1", + "BRAINPOOLP160r1", + "BRAINPOOLP160t1", + "BRAINPOOLP192r1", + "BRAINPOOLP192t1", + "BRAINPOOLP224r1", + "BRAINPOOLP224t1", + "BRAINPOOLP256r1", + "BRAINPOOLP256t1", + "BRAINPOOLP320r1", + "BRAINPOOLP320t1", + "BRAINPOOLP384r1", + "BRAINPOOLP384t1", + "BRAINPOOLP512r1", + "BRAINPOOLP512t1", + "PRIME_FIELD_OID", + "CHARACTERISTIC_TWO_FIELD_OID", + "Ed25519", + "Ed448", +] + + +PRIME_FIELD_OID = (1, 2, 840, 10045, 1, 1) +CHARACTERISTIC_TWO_FIELD_OID = (1, 2, 840, 10045, 1, 2) + + +class UnknownCurveError(Exception): + pass + + +class Curve: + def __init__(self, name, curve, generator, oid, openssl_name=None): + self.name = name + self.openssl_name = openssl_name # maybe None + self.curve = curve + self.generator = generator + self.order = generator.order() + if isinstance(curve, ellipticcurve.CurveEdTw): + # EdDSA keys are special in that both private and public + # are the same size (as it's defined only with compressed points) + + # +1 for the sign bit and then round up + self.baselen = (bit_length(curve.p()) + 1 + 7) // 8 + self.verifying_key_length = self.baselen + else: + self.baselen = orderlen(self.order) + self.verifying_key_length = 2 * orderlen(curve.p()) + self.signature_length = 2 * self.baselen + self.oid = oid + if oid: + self.encoded_oid = der.encode_oid(*oid) + + def __eq__(self, other): + if isinstance(other, Curve): + return ( + self.curve == other.curve and self.generator == other.generator + ) + return NotImplemented + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return self.name + + def to_der(self, encoding=None, point_encoding="uncompressed"): + """Serialise the curve parameters to binary string. + + :param str encoding: the format to save the curve parameters in. + Default is ``named_curve``, with fallback being the ``explicit`` + if the OID is not set for the curve. + :param str point_encoding: the point encoding of the generator when + explicit curve encoding is used. Ignored for ``named_curve`` + format. + + :return: DER encoded ECParameters structure + :rtype: bytes + """ + if encoding is None: + if self.oid: + encoding = "named_curve" + else: + encoding = "explicit" + + if encoding not in ("named_curve", "explicit"): + raise ValueError( + "Only 'named_curve' and 'explicit' encodings supported" + ) + + if encoding == "named_curve": + if not self.oid: + raise UnknownCurveError( + "Can't encode curve using named_curve encoding without " + "associated curve OID" + ) + return der.encode_oid(*self.oid) + elif isinstance(self.curve, ellipticcurve.CurveEdTw): + assert encoding == "explicit" + raise UnknownCurveError( + "Twisted Edwards curves don't support explicit encoding" + ) + + # encode the ECParameters sequence + curve_p = self.curve.p() + version = der.encode_integer(1) + field_id = der.encode_sequence( + der.encode_oid(*PRIME_FIELD_OID), der.encode_integer(curve_p) + ) + curve = der.encode_sequence( + der.encode_octet_string( + number_to_string(self.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(self.curve.b() % curve_p, curve_p) + ), + ) + base = der.encode_octet_string(self.generator.to_bytes(point_encoding)) + order = der.encode_integer(self.generator.order()) + seq_elements = [version, field_id, curve, base, order] + if self.curve.cofactor(): + cofactor = der.encode_integer(self.curve.cofactor()) + seq_elements.append(cofactor) + + return der.encode_sequence(*seq_elements) + + def to_pem(self, encoding=None, point_encoding="uncompressed"): + """ + Serialise the curve parameters to the :term:`PEM` format. + + :param str encoding: the format to save the curve parameters in. + Default is ``named_curve``, with fallback being the ``explicit`` + if the OID is not set for the curve. + :param str point_encoding: the point encoding of the generator when + explicit curve encoding is used. Ignored for ``named_curve`` + format. + + :return: PEM encoded ECParameters structure + :rtype: str + """ + return der.topem( + self.to_der(encoding, point_encoding), "EC PARAMETERS" + ) + + @staticmethod + def from_der(data, valid_encodings=None): + """Decode the curve parameters from DER file. + + :param data: the binary string to decode the parameters from + :type data: :term:`bytes-like object` + :param valid_encodings: set of names of allowed encodings, by default + all (set by passing ``None``), supported ones are ``named_curve`` + and ``explicit`` + :type valid_encodings: :term:`set-like object` + """ + if not valid_encodings: + valid_encodings = set(("named_curve", "explicit")) + if not all(i in ["named_curve", "explicit"] for i in valid_encodings): + raise ValueError( + "Only named_curve and explicit encodings supported" + ) + data = normalise_bytes(data) + if not der.is_sequence(data): + if "named_curve" not in valid_encodings: + raise der.UnexpectedDER( + "named_curve curve parameters not allowed" + ) + oid, empty = der.remove_object(data) + if empty: + raise der.UnexpectedDER("Unexpected data after OID") + return find_curve(oid) + + if "explicit" not in valid_encodings: + raise der.UnexpectedDER("explicit curve parameters not allowed") + + seq, empty = der.remove_sequence(data) + if empty: + raise der.UnexpectedDER( + "Unexpected data after ECParameters structure" + ) + # decode the ECParameters sequence + version, rest = der.remove_integer(seq) + if version != 1: + raise der.UnexpectedDER("Unknown parameter encoding format") + field_id, rest = der.remove_sequence(rest) + curve, rest = der.remove_sequence(rest) + base_bytes, rest = der.remove_octet_string(rest) + order, rest = der.remove_integer(rest) + cofactor = None + if rest: + # the ASN.1 specification of ECParameters allows for future + # extensions of the sequence, so ignore the remaining bytes + cofactor, _ = der.remove_integer(rest) + + # decode the ECParameters.fieldID sequence + field_type, rest = der.remove_object(field_id) + if field_type == CHARACTERISTIC_TWO_FIELD_OID: + raise UnknownCurveError("Characteristic 2 curves unsupported") + if field_type != PRIME_FIELD_OID: + raise UnknownCurveError( + "Unknown field type: {0}".format(field_type) + ) + prime, empty = der.remove_integer(rest) + if empty: + raise der.UnexpectedDER( + "Unexpected data after ECParameters.fieldID.Prime-p element" + ) + + # decode the ECParameters.curve sequence + curve_a_bytes, rest = der.remove_octet_string(curve) + curve_b_bytes, rest = der.remove_octet_string(rest) + # seed can be defined here, but we don't parse it, so ignore `rest` + + curve_a = string_to_number(curve_a_bytes) + curve_b = string_to_number(curve_b_bytes) + + curve_fp = ellipticcurve.CurveFp(prime, curve_a, curve_b, cofactor) + + # decode the ECParameters.base point + + base = ellipticcurve.PointJacobi.from_bytes( + curve_fp, + base_bytes, + valid_encodings=("uncompressed", "compressed", "hybrid"), + order=order, + generator=True, + ) + tmp_curve = Curve("unknown", curve_fp, base, None) + + # if the curve matches one of the well-known ones, use the well-known + # one in preference, as it will have the OID and name associated + for i in curves: + if tmp_curve == i: + return i + return tmp_curve + + @classmethod + def from_pem(cls, string, valid_encodings=None): + """Decode the curve parameters from PEM file. + + :param str string: the text string to decode the parameters from + :param valid_encodings: set of names of allowed encodings, by default + all (set by passing ``None``), supported ones are ``named_curve`` + and ``explicit`` + :type valid_encodings: :term:`set-like object` + """ + if not PY2 and isinstance(string, str): # pragma: no branch + string = string.encode() + + ec_param_index = string.find(b"-----BEGIN EC PARAMETERS-----") + if ec_param_index == -1: + raise der.UnexpectedDER("EC PARAMETERS PEM header not found") + + return cls.from_der( + der.unpem(string[ec_param_index:]), valid_encodings + ) + + +# the SEC curves +SECP112r1 = Curve( + "SECP112r1", + ecdsa.curve_112r1, + ecdsa.generator_112r1, + (1, 3, 132, 0, 6), + "secp112r1", +) + + +SECP112r2 = Curve( + "SECP112r2", + ecdsa.curve_112r2, + ecdsa.generator_112r2, + (1, 3, 132, 0, 7), + "secp112r2", +) + + +SECP128r1 = Curve( + "SECP128r1", + ecdsa.curve_128r1, + ecdsa.generator_128r1, + (1, 3, 132, 0, 28), + "secp128r1", +) + + +SECP160r1 = Curve( + "SECP160r1", + ecdsa.curve_160r1, + ecdsa.generator_160r1, + (1, 3, 132, 0, 8), + "secp160r1", +) + + +# the NIST curves +NIST192p = Curve( + "NIST192p", + ecdsa.curve_192, + ecdsa.generator_192, + (1, 2, 840, 10045, 3, 1, 1), + "prime192v1", +) + + +NIST224p = Curve( + "NIST224p", + ecdsa.curve_224, + ecdsa.generator_224, + (1, 3, 132, 0, 33), + "secp224r1", +) + + +NIST256p = Curve( + "NIST256p", + ecdsa.curve_256, + ecdsa.generator_256, + (1, 2, 840, 10045, 3, 1, 7), + "prime256v1", +) + + +NIST384p = Curve( + "NIST384p", + ecdsa.curve_384, + ecdsa.generator_384, + (1, 3, 132, 0, 34), + "secp384r1", +) + + +NIST521p = Curve( + "NIST521p", + ecdsa.curve_521, + ecdsa.generator_521, + (1, 3, 132, 0, 35), + "secp521r1", +) + + +SECP256k1 = Curve( + "SECP256k1", + ecdsa.curve_secp256k1, + ecdsa.generator_secp256k1, + (1, 3, 132, 0, 10), + "secp256k1", +) + + +BRAINPOOLP160r1 = Curve( + "BRAINPOOLP160r1", + ecdsa.curve_brainpoolp160r1, + ecdsa.generator_brainpoolp160r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), + "brainpoolP160r1", +) + + +BRAINPOOLP160t1 = Curve( + "BRAINPOOLP160t1", + ecdsa.curve_brainpoolp160t1, + ecdsa.generator_brainpoolp160t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 2), + "brainpoolP160t1", +) + + +BRAINPOOLP192r1 = Curve( + "BRAINPOOLP192r1", + ecdsa.curve_brainpoolp192r1, + ecdsa.generator_brainpoolp192r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 3), + "brainpoolP192r1", +) + + +BRAINPOOLP192t1 = Curve( + "BRAINPOOLP192t1", + ecdsa.curve_brainpoolp192t1, + ecdsa.generator_brainpoolp192t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 4), + "brainpoolP192t1", +) + + +BRAINPOOLP224r1 = Curve( + "BRAINPOOLP224r1", + ecdsa.curve_brainpoolp224r1, + ecdsa.generator_brainpoolp224r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 5), + "brainpoolP224r1", +) + + +BRAINPOOLP224t1 = Curve( + "BRAINPOOLP224t1", + ecdsa.curve_brainpoolp224t1, + ecdsa.generator_brainpoolp224t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 6), + "brainpoolP224t1", +) + + +BRAINPOOLP256r1 = Curve( + "BRAINPOOLP256r1", + ecdsa.curve_brainpoolp256r1, + ecdsa.generator_brainpoolp256r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 7), + "brainpoolP256r1", +) + + +BRAINPOOLP256t1 = Curve( + "BRAINPOOLP256t1", + ecdsa.curve_brainpoolp256t1, + ecdsa.generator_brainpoolp256t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 8), + "brainpoolP256t1", +) + + +BRAINPOOLP320r1 = Curve( + "BRAINPOOLP320r1", + ecdsa.curve_brainpoolp320r1, + ecdsa.generator_brainpoolp320r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 9), + "brainpoolP320r1", +) + + +BRAINPOOLP320t1 = Curve( + "BRAINPOOLP320t1", + ecdsa.curve_brainpoolp320t1, + ecdsa.generator_brainpoolp320t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 10), + "brainpoolP320t1", +) + + +BRAINPOOLP384r1 = Curve( + "BRAINPOOLP384r1", + ecdsa.curve_brainpoolp384r1, + ecdsa.generator_brainpoolp384r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 11), + "brainpoolP384r1", +) + + +BRAINPOOLP384t1 = Curve( + "BRAINPOOLP384t1", + ecdsa.curve_brainpoolp384t1, + ecdsa.generator_brainpoolp384t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 12), + "brainpoolP384t1", +) + + +BRAINPOOLP512r1 = Curve( + "BRAINPOOLP512r1", + ecdsa.curve_brainpoolp512r1, + ecdsa.generator_brainpoolp512r1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 13), + "brainpoolP512r1", +) + + +BRAINPOOLP512t1 = Curve( + "BRAINPOOLP512t1", + ecdsa.curve_brainpoolp512t1, + ecdsa.generator_brainpoolp512t1, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 14), + "brainpoolP512t1", +) + + +Ed25519 = Curve( + "Ed25519", + eddsa.curve_ed25519, + eddsa.generator_ed25519, + (1, 3, 101, 112), +) + + +Ed448 = Curve( + "Ed448", + eddsa.curve_ed448, + eddsa.generator_ed448, + (1, 3, 101, 113), +) + + +# no order in particular, but keep previously added curves first +curves = [ + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + SECP256k1, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + Ed25519, + Ed448, + BRAINPOOLP160t1, + BRAINPOOLP192t1, + BRAINPOOLP224t1, + BRAINPOOLP256t1, + BRAINPOOLP320t1, + BRAINPOOLP384t1, + BRAINPOOLP512t1, +] + + +def find_curve(oid_curve): + """Select a curve based on its OID + + :param tuple[int,...] oid_curve: ASN.1 Object Identifier of the + curve to return, like ``(1, 2, 840, 10045, 3, 1, 7)`` for ``NIST256p``. + + :raises UnknownCurveError: When the oid doesn't match any of the supported + curves + + :rtype: ~ecdsa.curves.Curve + """ + for c in curves: + if c.oid == oid_curve: + return c + raise UnknownCurveError( + "I don't know about the curve with oid %s." + "I only know about these: %s" % (oid_curve, [c.name for c in curves]) + ) + + +def curve_by_name(name): + """Select a curve based on its name. + + Returns a :py:class:`~ecdsa.curves.Curve` object with a ``name`` name. + Note that ``name`` is case-sensitve. + + :param str name: Name of the curve to return, like ``NIST256p`` or + ``prime256v1`` + + :raises UnknownCurveError: When the name doesn't match any of the supported + curves + + :rtype: ~ecdsa.curves.Curve + """ + for c in curves: + if name == c.name or (c.openssl_name and name == c.openssl_name): + return c + raise UnknownCurveError( + "Curve with name {0!r} unknown, only curves supported: {1}".format( + name, [c.name for c in curves] + ) + ) diff --git a/venv/lib/python3.12/site-packages/ecdsa/der.py b/venv/lib/python3.12/site-packages/ecdsa/der.py new file mode 100644 index 0000000..7a06b68 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/der.py @@ -0,0 +1,478 @@ +from __future__ import division + +import binascii +import base64 +import warnings +from itertools import chain +from six import int2byte, text_type +from ._compat import compat26_str, str_idx_as_int + + +class UnexpectedDER(Exception): + pass + + +def encode_constructed(tag, value): + return int2byte(0xA0 + tag) + encode_length(len(value)) + value + + +def encode_implicit(tag, value, cls="context-specific"): + """ + Encode and IMPLICIT value using :term:`DER`. + + :param int tag: the tag value to encode, must be between 0 an 31 inclusive + :param bytes value: the data to encode + :param str cls: the class of the tag to encode: "application", + "context-specific", or "private" + :rtype: bytes + """ + if cls not in ("application", "context-specific", "private"): + raise ValueError("invalid tag class") + if tag > 31: + raise ValueError("Long tags not supported") + + if cls == "application": + tag_class = 0b01000000 + elif cls == "context-specific": + tag_class = 0b10000000 + else: + assert cls == "private" + tag_class = 0b11000000 + + return int2byte(tag_class + tag) + encode_length(len(value)) + value + + +def encode_integer(r): + assert r >= 0 # can't support negative numbers yet + h = ("%x" % r).encode() + if len(h) % 2: + h = b"0" + h + s = binascii.unhexlify(h) + num = str_idx_as_int(s, 0) + if num <= 0x7F: + return b"\x02" + encode_length(len(s)) + s + else: + # DER integers are two's complement, so if the first byte is + # 0x80-0xff then we need an extra 0x00 byte to prevent it from + # looking negative. + return b"\x02" + encode_length(len(s) + 1) + b"\x00" + s + + +# sentry object to check if an argument was specified (used to detect +# deprecated calling convention) +_sentry = object() + + +def encode_bitstring(s, unused=_sentry): + """ + Encode a binary string as a BIT STRING using :term:`DER` encoding. + + Note, because there is no native Python object that can encode an actual + bit string, this function only accepts byte strings as the `s` argument. + The byte string is the actual bit string that will be encoded, padded + on the right (least significant bits, looking from big endian perspective) + to the first full byte. If the bit string has a bit length that is multiple + of 8, then the padding should not be included. For correct DER encoding + the padding bits MUST be set to 0. + + Number of bits of padding need to be provided as the `unused` parameter. + In case they are specified as None, it means the number of unused bits + is already encoded in the string as the first byte. + + The deprecated call convention specifies just the `s` parameters and + encodes the number of unused bits as first parameter (same convention + as with None). + + Empty string must be encoded with `unused` specified as 0. + + Future version of python-ecdsa will make specifying the `unused` argument + mandatory. + + :param s: bytes to encode + :type s: bytes like object + :param unused: number of bits at the end of `s` that are unused, must be + between 0 and 7 (inclusive) + :type unused: int or None + + :raises ValueError: when `unused` is too large or too small + + :return: `s` encoded using DER + :rtype: bytes + """ + encoded_unused = b"" + len_extra = 0 + if unused is _sentry: + warnings.warn( + "Legacy call convention used, unused= needs to be specified", + DeprecationWarning, + ) + elif unused is not None: + if not 0 <= unused <= 7: + raise ValueError("unused must be integer between 0 and 7") + if unused: + if not s: + raise ValueError("unused is non-zero but s is empty") + last = str_idx_as_int(s, -1) + if last & (2**unused - 1): + raise ValueError("unused bits must be zeros in DER") + encoded_unused = int2byte(unused) + len_extra = 1 + return b"\x03" + encode_length(len(s) + len_extra) + encoded_unused + s + + +def encode_octet_string(s): + return b"\x04" + encode_length(len(s)) + s + + +def encode_oid(first, second, *pieces): + assert 0 <= first < 2 and 0 <= second <= 39 or first == 2 and 0 <= second + body = b"".join( + chain( + [encode_number(40 * first + second)], + (encode_number(p) for p in pieces), + ) + ) + return b"\x06" + encode_length(len(body)) + body + + +def encode_sequence(*encoded_pieces): + total_len = sum([len(p) for p in encoded_pieces]) + return b"\x30" + encode_length(total_len) + b"".join(encoded_pieces) + + +def encode_number(n): + b128_digits = [] + while n: + b128_digits.insert(0, (n & 0x7F) | 0x80) + n = n >> 7 + if not b128_digits: + b128_digits.append(0) + b128_digits[-1] &= 0x7F + return b"".join([int2byte(d) for d in b128_digits]) + + +def is_sequence(string): + return string and string[:1] == b"\x30" + + +def remove_constructed(string): + s0 = str_idx_as_int(string, 0) + if (s0 & 0xE0) != 0xA0: + raise UnexpectedDER( + "wanted type 'constructed tag' (0xa0-0xbf), got 0x%02x" % s0 + ) + tag = s0 & 0x1F + length, llen = read_length(string[1:]) + body = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + return tag, body, rest + + +def remove_implicit(string, exp_class="context-specific"): + """ + Removes an IMPLICIT tagged value from ``string`` following :term:`DER`. + + :param bytes string: a byte string that can have one or more + DER elements. + :param str exp_class: the expected tag class of the implicitly + encoded value. Possible values are: "context-specific", "application", + and "private". + :return: a tuple with first value being the tag without indicator bits, + second being the raw bytes of the value and the third one being + remaining bytes (or an empty string if there are none) + :rtype: tuple(int,bytes,bytes) + """ + if exp_class not in ("context-specific", "application", "private"): + raise ValueError("invalid `exp_class` value") + if exp_class == "application": + tag_class = 0b01000000 + elif exp_class == "context-specific": + tag_class = 0b10000000 + else: + assert exp_class == "private" + tag_class = 0b11000000 + tag_mask = 0b11000000 + + s0 = str_idx_as_int(string, 0) + + if (s0 & tag_mask) != tag_class: + raise UnexpectedDER( + "wanted class {0}, got 0x{1:02x} tag".format(exp_class, s0) + ) + if s0 & 0b00100000 != 0: + raise UnexpectedDER( + "wanted type primitive, got 0x{0:02x} tag".format(s0) + ) + + tag = s0 & 0x1F + length, llen = read_length(string[1:]) + body = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + return tag, body, rest + + +def remove_sequence(string): + if not string: + raise UnexpectedDER("Empty string does not encode a sequence") + if string[:1] != b"\x30": + n = str_idx_as_int(string, 0) + raise UnexpectedDER("wanted type 'sequence' (0x30), got 0x%02x" % n) + length, lengthlength = read_length(string[1:]) + if length > len(string) - 1 - lengthlength: + raise UnexpectedDER("Length longer than the provided buffer") + endseq = 1 + lengthlength + length + return string[1 + lengthlength : endseq], string[endseq:] + + +def remove_octet_string(string): + if string[:1] != b"\x04": + n = str_idx_as_int(string, 0) + raise UnexpectedDER("wanted type 'octetstring' (0x04), got 0x%02x" % n) + length, llen = read_length(string[1:]) + body = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + return body, rest + + +def remove_object(string): + if not string: + raise UnexpectedDER( + "Empty string does not encode an object identifier" + ) + if string[:1] != b"\x06": + n = str_idx_as_int(string, 0) + raise UnexpectedDER("wanted type 'object' (0x06), got 0x%02x" % n) + length, lengthlength = read_length(string[1:]) + body = string[1 + lengthlength : 1 + lengthlength + length] + rest = string[1 + lengthlength + length :] + if not body: + raise UnexpectedDER("Empty object identifier") + if len(body) != length: + raise UnexpectedDER( + "Length of object identifier longer than the provided buffer" + ) + numbers = [] + while body: + n, ll = read_number(body) + numbers.append(n) + body = body[ll:] + n0 = numbers.pop(0) + if n0 < 80: + first = n0 // 40 + else: + first = 2 + second = n0 - (40 * first) + numbers.insert(0, first) + numbers.insert(1, second) + return tuple(numbers), rest + + +def remove_integer(string): + if not string: + raise UnexpectedDER( + "Empty string is an invalid encoding of an integer" + ) + if string[:1] != b"\x02": + n = str_idx_as_int(string, 0) + raise UnexpectedDER("wanted type 'integer' (0x02), got 0x%02x" % n) + length, llen = read_length(string[1:]) + if length > len(string) - 1 - llen: + raise UnexpectedDER("Length longer than provided buffer") + if length == 0: + raise UnexpectedDER("0-byte long encoding of integer") + numberbytes = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + msb = str_idx_as_int(numberbytes, 0) + if not msb < 0x80: + raise UnexpectedDER("Negative integers are not supported") + # check if the encoding is the minimal one (DER requirement) + if length > 1 and not msb: + # leading zero byte is allowed if the integer would have been + # considered a negative number otherwise + smsb = str_idx_as_int(numberbytes, 1) + if smsb < 0x80: + raise UnexpectedDER( + "Invalid encoding of integer, unnecessary " + "zero padding bytes" + ) + return int(binascii.hexlify(numberbytes), 16), rest + + +def read_number(string): + number = 0 + llen = 0 + if str_idx_as_int(string, 0) == 0x80: + raise UnexpectedDER("Non minimal encoding of OID subidentifier") + # base-128 big endian, with most significant bit set in all but the last + # byte + while True: + if llen >= len(string): + raise UnexpectedDER("ran out of length bytes") + number = number << 7 + d = str_idx_as_int(string, llen) + number += d & 0x7F + llen += 1 + if not d & 0x80: + break + return number, llen + + +def encode_length(l): + assert l >= 0 + if l < 0x80: + return int2byte(l) + s = ("%x" % l).encode() + if len(s) % 2: + s = b"0" + s + s = binascii.unhexlify(s) + llen = len(s) + return int2byte(0x80 | llen) + s + + +def read_length(string): + if not string: + raise UnexpectedDER("Empty string can't encode valid length value") + num = str_idx_as_int(string, 0) + if not (num & 0x80): + # short form + return (num & 0x7F), 1 + # else long-form: b0&0x7f is number of additional base256 length bytes, + # big-endian + llen = num & 0x7F + if not llen: + raise UnexpectedDER("Invalid length encoding, length of length is 0") + if llen > len(string) - 1: + raise UnexpectedDER("Length of length longer than provided buffer") + # verify that the encoding is minimal possible (DER requirement) + msb = str_idx_as_int(string, 1) + if not msb or llen == 1 and msb < 0x80: + raise UnexpectedDER("Not minimal encoding of length") + return int(binascii.hexlify(string[1 : 1 + llen]), 16), 1 + llen + + +def remove_bitstring(string, expect_unused=_sentry): + """ + Remove a BIT STRING object from `string` following :term:`DER`. + + The `expect_unused` can be used to specify if the bit string should + have the amount of unused bits decoded or not. If it's an integer, any + read BIT STRING that has number of unused bits different from specified + value will cause UnexpectedDER exception to be raised (this is especially + useful when decoding BIT STRINGS that have DER encoded object in them; + DER encoding is byte oriented, so the unused bits will always equal 0). + + If the `expect_unused` is specified as None, the first element returned + will be a tuple, with the first value being the extracted bit string + while the second value will be the decoded number of unused bits. + + If the `expect_unused` is unspecified, the decoding of byte with + number of unused bits will not be attempted and the bit string will be + returned as-is, the callee will be required to decode it and verify its + correctness. + + Future version of python will require the `expected_unused` parameter + to be specified. + + :param string: string of bytes to extract the BIT STRING from + :type string: bytes like object + :param expect_unused: number of bits that should be unused in the BIT + STRING, or None, to return it to caller + :type expect_unused: int or None + + :raises UnexpectedDER: when the encoding does not follow DER. + + :return: a tuple with first element being the extracted bit string and + the second being the remaining bytes in the string (if any); if the + `expect_unused` is specified as None, the first element of the returned + tuple will be a tuple itself, with first element being the bit string + as bytes and the second element being the number of unused bits at the + end of the byte array as an integer + :rtype: tuple + """ + if not string: + raise UnexpectedDER("Empty string does not encode a bitstring") + if expect_unused is _sentry: + warnings.warn( + "Legacy call convention used, expect_unused= needs to be" + " specified", + DeprecationWarning, + ) + num = str_idx_as_int(string, 0) + if string[:1] != b"\x03": + raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num) + length, llen = read_length(string[1:]) + if not length: + raise UnexpectedDER("Invalid length of bit string, can't be 0") + body = string[1 + llen : 1 + llen + length] + rest = string[1 + llen + length :] + if expect_unused is not _sentry: + unused = str_idx_as_int(body, 0) + if not 0 <= unused <= 7: + raise UnexpectedDER("Invalid encoding of unused bits") + if expect_unused is not None and expect_unused != unused: + raise UnexpectedDER("Unexpected number of unused bits") + body = body[1:] + if unused: + if not body: + raise UnexpectedDER("Invalid encoding of empty bit string") + last = str_idx_as_int(body, -1) + # verify that all the unused bits are set to zero (DER requirement) + if last & (2**unused - 1): + raise UnexpectedDER("Non zero padding bits in bit string") + if expect_unused is None: + body = (body, unused) + return body, rest + + +# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING) + + +# signatures: (from RFC3279) +# ansi-X9-62 OBJECT IDENTIFIER ::= { +# iso(1) member-body(2) us(840) 10045 } +# +# id-ecSigType OBJECT IDENTIFIER ::= { +# ansi-X9-62 signatures(4) } +# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= { +# id-ecSigType 1 } +# so 1,2,840,10045,4,1 +# so 0x42, .. .. + +# Ecdsa-Sig-Value ::= SEQUENCE { +# r INTEGER, +# s INTEGER } + +# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 } +# +# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 } + +# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021) +# secp224r1 OBJECT IDENTIFIER ::= { +# iso(1) identified-organization(3) certicom(132) curve(0) 33 } +# and the secp384r1 is (t=06,l=05,v=2b81040022) +# secp384r1 OBJECT IDENTIFIER ::= { +# iso(1) identified-organization(3) certicom(132) curve(0) 34 } + + +def unpem(pem): + if isinstance(pem, text_type): # pragma: no branch + pem = pem.encode() + + d = b"".join( + [ + l.strip() + for l in pem.split(b"\n") + if l and not l.startswith(b"-----") + ] + ) + return base64.b64decode(d) + + +def topem(der, name): + b64 = base64.b64encode(compat26_str(der)) + lines = [("-----BEGIN %s-----\n" % name).encode()] + lines.extend( + [b64[start : start + 76] + b"\n" for start in range(0, len(b64), 76)] + ) + lines.append(("-----END %s-----\n" % name).encode()) + return b"".join(lines) diff --git a/venv/lib/python3.12/site-packages/ecdsa/ecdh.py b/venv/lib/python3.12/site-packages/ecdsa/ecdh.py new file mode 100644 index 0000000..7f697d9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/ecdh.py @@ -0,0 +1,336 @@ +""" +Class for performing Elliptic-curve Diffie-Hellman (ECDH) operations. +""" + +from .util import number_to_string +from .ellipticcurve import INFINITY +from .keys import SigningKey, VerifyingKey + + +__all__ = [ + "ECDH", + "NoKeyError", + "NoCurveError", + "InvalidCurveError", + "InvalidSharedSecretError", +] + + +class NoKeyError(Exception): + """ECDH. Key not found but it is needed for operation.""" + + pass + + +class NoCurveError(Exception): + """ECDH. Curve not set but it is needed for operation.""" + + pass + + +class InvalidCurveError(Exception): + """ + ECDH. Raised in case the public and private keys use different curves. + """ + + pass + + +class InvalidSharedSecretError(Exception): + """ECDH. Raised in case the shared secret we obtained is an INFINITY.""" + + pass + + +class ECDH(object): + """ + Elliptic-curve Diffie-Hellman (ECDH). A key agreement protocol. + + Allows two parties, each having an elliptic-curve public-private key + pair, to establish a shared secret over an insecure channel + """ + + def __init__(self, curve=None, private_key=None, public_key=None): + """ + ECDH init. + + Call can be initialised without parameters, then the first operation + (loading either key) will set the used curve. + All parameters must be ultimately set before shared secret + calculation will be allowed. + + :param curve: curve for operations + :type curve: Curve + :param private_key: `my` private key for ECDH + :type private_key: SigningKey + :param public_key: `their` public key for ECDH + :type public_key: VerifyingKey + """ + self.curve = curve + self.private_key = None + self.public_key = None + if private_key: + self.load_private_key(private_key) + if public_key: + self.load_received_public_key(public_key) + + def _get_shared_secret(self, remote_public_key): + if not self.private_key: + raise NoKeyError( + "Private key needs to be set to create shared secret" + ) + if not self.public_key: + raise NoKeyError( + "Public key needs to be set to create shared secret" + ) + if not ( + self.private_key.curve == self.curve == remote_public_key.curve + ): + raise InvalidCurveError( + "Curves for public key and private key is not equal." + ) + + # shared secret = PUBKEYtheirs * PRIVATEKEYours + result = ( + remote_public_key.pubkey.point + * self.private_key.privkey.secret_multiplier + ) + if result == INFINITY: + raise InvalidSharedSecretError("Invalid shared secret (INFINITY).") + + return result.x() + + def set_curve(self, key_curve): + """ + Set the working curve for ecdh operations. + + :param key_curve: curve from `curves` module + :type key_curve: Curve + """ + self.curve = key_curve + + def generate_private_key(self): + """ + Generate local private key for ecdh operation with curve that was set. + + :raises NoCurveError: Curve must be set before key generation. + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey + """ + if not self.curve: + raise NoCurveError("Curve must be set prior to key generation.") + return self.load_private_key(SigningKey.generate(curve=self.curve)) + + def load_private_key(self, private_key): + """ + Load private key from SigningKey (keys.py) object. + + Needs to have the same curve as was set with set_curve method. + If curve is not set - it sets from this SigningKey + + :param private_key: Initialised SigningKey class + :type private_key: SigningKey + + :raises InvalidCurveError: private_key curve not the same as self.curve + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey + """ + if not self.curve: + self.curve = private_key.curve + if self.curve != private_key.curve: + raise InvalidCurveError("Curve mismatch.") + self.private_key = private_key + return self.private_key.get_verifying_key() + + def load_private_key_bytes(self, private_key): + """ + Load private key from byte string. + + Uses current curve and checks if the provided key matches + the curve of ECDH key agreement. + Key loads via from_string method of SigningKey class + + :param private_key: private key in bytes string format + :type private_key: :term:`bytes-like object` + + :raises NoCurveError: Curve must be set before loading. + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey + """ + if not self.curve: + raise NoCurveError("Curve must be set prior to key load.") + return self.load_private_key( + SigningKey.from_string(private_key, curve=self.curve) + ) + + def load_private_key_der(self, private_key_der): + """ + Load private key from DER byte string. + + Compares the curve of the DER-encoded key with the ECDH set curve, + uses the former if unset. + + Note, the only DER format supported is the RFC5915 + Look at keys.py:SigningKey.from_der() + + :param private_key_der: string with the DER encoding of private ECDSA + key + :type private_key_der: string + + :raises InvalidCurveError: private_key curve not the same as self.curve + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey + """ + return self.load_private_key(SigningKey.from_der(private_key_der)) + + def load_private_key_pem(self, private_key_pem): + """ + Load private key from PEM string. + + Compares the curve of the DER-encoded key with the ECDH set curve, + uses the former if unset. + + Note, the only PEM format supported is the RFC5915 + Look at keys.py:SigningKey.from_pem() + it needs to have `EC PRIVATE KEY` section + + :param private_key_pem: string with PEM-encoded private ECDSA key + :type private_key_pem: string + + :raises InvalidCurveError: private_key curve not the same as self.curve + + :return: public (verifying) key from this private key. + :rtype: VerifyingKey + """ + return self.load_private_key(SigningKey.from_pem(private_key_pem)) + + def get_public_key(self): + """ + Provides a public key that matches the local private key. + + Needs to be sent to the remote party. + + :return: public (verifying) key from local private key. + :rtype: VerifyingKey + """ + return self.private_key.get_verifying_key() + + def load_received_public_key(self, public_key): + """ + Load public key from VerifyingKey (keys.py) object. + + Needs to have the same curve as set as current for ecdh operation. + If curve is not set - it sets it from VerifyingKey. + + :param public_key: Initialised VerifyingKey class + :type public_key: VerifyingKey + + :raises InvalidCurveError: public_key curve not the same as self.curve + """ + if not self.curve: + self.curve = public_key.curve + if self.curve != public_key.curve: + raise InvalidCurveError("Curve mismatch.") + self.public_key = public_key + + def load_received_public_key_bytes( + self, public_key_str, valid_encodings=None + ): + """ + Load public key from byte string. + + Uses current curve and checks if key length corresponds to + the current curve. + Key loads via from_string method of VerifyingKey class + + :param public_key_str: public key in bytes string format + :type public_key_str: :term:`bytes-like object` + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + """ + return self.load_received_public_key( + VerifyingKey.from_string( + public_key_str, self.curve, valid_encodings + ) + ) + + def load_received_public_key_der(self, public_key_der): + """ + Load public key from DER byte string. + + Compares the curve of the DER-encoded key with the ECDH set curve, + uses the former if unset. + + Note, the only DER format supported is the RFC5912 + Look at keys.py:VerifyingKey.from_der() + + :param public_key_der: string with the DER encoding of public ECDSA key + :type public_key_der: string + + :raises InvalidCurveError: public_key curve not the same as self.curve + """ + return self.load_received_public_key( + VerifyingKey.from_der(public_key_der) + ) + + def load_received_public_key_pem(self, public_key_pem): + """ + Load public key from PEM string. + + Compares the curve of the PEM-encoded key with the ECDH set curve, + uses the former if unset. + + Note, the only PEM format supported is the RFC5912 + Look at keys.py:VerifyingKey.from_pem() + + :param public_key_pem: string with PEM-encoded public ECDSA key + :type public_key_pem: string + + :raises InvalidCurveError: public_key curve not the same as self.curve + """ + return self.load_received_public_key( + VerifyingKey.from_pem(public_key_pem) + ) + + def generate_sharedsecret_bytes(self): + """ + Generate shared secret from local private key and remote public key. + + The objects needs to have both private key and received public key + before generation is allowed. + + :raises InvalidCurveError: public_key curve not the same as self.curve + :raises NoKeyError: public_key or private_key is not set + + :return: shared secret + :rtype: bytes + """ + return number_to_string( + self.generate_sharedsecret(), self.private_key.curve.curve.p() + ) + + def generate_sharedsecret(self): + """ + Generate shared secret from local private key and remote public key. + + The objects needs to have both private key and received public key + before generation is allowed. + + It's the same for local and remote party, + shared secret(local private key, remote public key) == + shared secret(local public key, remote private key) + + :raises InvalidCurveError: public_key curve not the same as self.curve + :raises NoKeyError: public_key or private_key is not set + + :return: shared secret + :rtype: int + """ + return self._get_shared_secret(self.public_key) diff --git a/venv/lib/python3.12/site-packages/ecdsa/ecdsa.py b/venv/lib/python3.12/site-packages/ecdsa/ecdsa.py new file mode 100644 index 0000000..f710965 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/ecdsa.py @@ -0,0 +1,1094 @@ +#! /usr/bin/env python + +""" +Low level implementation of Elliptic-Curve Digital Signatures. + +.. note :: + You're most likely looking for the :py:class:`~ecdsa.keys` module. + This is a low-level implementation of the ECDSA that operates on + integers, not byte strings. + +NOTE: This a low level implementation of ECDSA, for normal applications +you should be looking at the keys.py module. + +Classes and methods for elliptic-curve signatures: +private keys, public keys, signatures, +and definitions of prime-modulus curves. + +Example: + +.. code-block:: python + + # (In real-life applications, you would probably want to + # protect against defects in SystemRandom.) + from random import SystemRandom + randrange = SystemRandom().randrange + + # Generate a public/private key pair using the NIST Curve P-192: + + g = generator_192 + n = g.order() + secret = randrange( 1, n ) + pubkey = Public_key( g, g * secret ) + privkey = Private_key( pubkey, secret ) + + # Signing a hash value: + + hash = randrange( 1, n ) + signature = privkey.sign( hash, randrange( 1, n ) ) + + # Verifying a signature for a hash value: + + if pubkey.verifies( hash, signature ): + print("Demo verification succeeded.") + else: + print("*** Demo verification failed.") + + # Verification fails if the hash value is modified: + + if pubkey.verifies( hash-1, signature ): + print("**** Demo verification failed to reject tampered hash.") + else: + print("Demo verification correctly rejected tampered hash.") + +Revision history: + 2005.12.31 - Initial version. + + 2008.11.25 - Substantial revisions introducing new classes. + + 2009.05.16 - Warn against using random.randrange in real applications. + + 2009.05.17 - Use random.SystemRandom by default. + +Originally written in 2005 by Peter Pearson and placed in the public domain, +modified as part of the python-ecdsa package. +""" + +import warnings +from six import int2byte +from . import ellipticcurve +from . import numbertheory +from .util import bit_length +from ._compat import remove_whitespace + + +class RSZeroError(RuntimeError): + pass + + +class InvalidPointError(RuntimeError): + pass + + +class Signature(object): + """ + ECDSA signature. + + :ivar int r: the ``r`` element of the ECDSA signature + :ivar int s: the ``s`` element of the ECDSA signature + """ + + def __init__(self, r, s): + self.r = r + self.s = s + + def recover_public_keys(self, hash, generator): + """ + Returns two public keys for which the signature is valid + + :param int hash: signed hash + :param AbstractPoint generator: is the generator used in creation + of the signature + :rtype: tuple(Public_key, Public_key) + :return: a pair of public keys that can validate the signature + """ + curve = generator.curve() + n = generator.order() + r = self.r + s = self.s + e = hash + x = r + + # Compute the curve point with x as x-coordinate + alpha = ( + pow(x, 3, curve.p()) + (curve.a() * x) + curve.b() + ) % curve.p() + beta = numbertheory.square_root_mod_prime(alpha, curve.p()) + y = beta if beta % 2 == 0 else curve.p() - beta + + # Compute the public key + R1 = ellipticcurve.PointJacobi(curve, x, y, 1, n) + Q1 = numbertheory.inverse_mod(r, n) * (s * R1 + (-e % n) * generator) + Pk1 = Public_key(generator, Q1) + + # And the second solution + R2 = ellipticcurve.PointJacobi(curve, x, -y, 1, n) + Q2 = numbertheory.inverse_mod(r, n) * (s * R2 + (-e % n) * generator) + Pk2 = Public_key(generator, Q2) + + return [Pk1, Pk2] + + +class Public_key(object): + """Public key for ECDSA.""" + + def __init__(self, generator, point, verify=True): + """Low level ECDSA public key object. + + :param generator: the Point that generates the group (the base point) + :param point: the Point that defines the public key + :param bool verify: if True check if point is valid point on curve + + :raises InvalidPointError: if the point parameters are invalid or + point does not lay on the curve + """ + + self.curve = generator.curve() + self.generator = generator + self.point = point + n = generator.order() + p = self.curve.p() + if not (0 <= point.x() < p) or not (0 <= point.y() < p): + raise InvalidPointError( + "The public point has x or y out of range." + ) + if verify and not self.curve.contains_point(point.x(), point.y()): + raise InvalidPointError("Point does not lay on the curve") + if not n: + raise InvalidPointError("Generator point must have order.") + # for curve parameters with base point with cofactor 1, all points + # that are on the curve are scalar multiples of the base point, so + # verifying that is not necessary. See Section 3.2.2.1 of SEC 1 v2 + if ( + verify + and self.curve.cofactor() != 1 + and not n * point == ellipticcurve.INFINITY + ): + raise InvalidPointError("Generator point order is bad.") + + def __eq__(self, other): + """Return True if the keys are identical, False otherwise. + + Note: for comparison, only placement on the same curve and point + equality is considered, use of the same generator point is not + considered. + """ + if isinstance(other, Public_key): + return self.curve == other.curve and self.point == other.point + return NotImplemented + + def __ne__(self, other): + """Return False if the keys are identical, True otherwise.""" + return not self == other + + def verifies(self, hash, signature): + """Verify that signature is a valid signature of hash. + Return True if the signature is valid. + """ + + # From X9.62 J.3.1. + + G = self.generator + n = G.order() + r = signature.r + s = signature.s + if r < 1 or r > n - 1: + return False + if s < 1 or s > n - 1: + return False + c = numbertheory.inverse_mod(s, n) + u1 = (hash * c) % n + u2 = (r * c) % n + if hasattr(G, "mul_add"): + xy = G.mul_add(u1, self.point, u2) + else: + xy = u1 * G + u2 * self.point + v = xy.x() % n + return v == r + + +class Private_key(object): + """Private key for ECDSA.""" + + def __init__(self, public_key, secret_multiplier): + """public_key is of class Public_key; + secret_multiplier is a large integer. + """ + + self.public_key = public_key + self.secret_multiplier = secret_multiplier + + def __eq__(self, other): + """Return True if the points are identical, False otherwise.""" + if isinstance(other, Private_key): + return ( + self.public_key == other.public_key + and self.secret_multiplier == other.secret_multiplier + ) + return NotImplemented + + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + + def sign(self, hash, random_k): + """Return a signature for the provided hash, using the provided + random nonce. It is absolutely vital that random_k be an unpredictable + number in the range [1, self.public_key.point.order()-1]. If + an attacker can guess random_k, he can compute our private key from a + single signature. Also, if an attacker knows a few high-order + bits (or a few low-order bits) of random_k, he can compute our private + key from many signatures. The generation of nonces with adequate + cryptographic strength is very difficult and far beyond the scope + of this comment. + + May raise RuntimeError, in which case retrying with a new + random value k is in order. + """ + + G = self.public_key.generator + n = G.order() + k = random_k % n + # Fix the bit-length of the random nonce, + # so that it doesn't leak via timing. + # This does not change that ks = k mod n + ks = k + n + kt = ks + n + if bit_length(ks) == bit_length(n): + p1 = kt * G + else: + p1 = ks * G + r = p1.x() % n + if r == 0: + raise RSZeroError("amazingly unlucky random number r") + s = ( + numbertheory.inverse_mod(k, n) + * (hash + (self.secret_multiplier * r) % n) + ) % n + if s == 0: + raise RSZeroError("amazingly unlucky random number s") + return Signature(r, s) + + +def int_to_string(x): # pragma: no cover + """Convert integer x into a string of bytes, as per X9.62.""" + # deprecated in 0.19 + warnings.warn( + "Function is unused in library code. If you use this code, " + "change to util.number_to_string.", + DeprecationWarning, + ) + assert x >= 0 + if x == 0: + return b"\0" + result = [] + while x: + ordinal = x & 0xFF + result.append(int2byte(ordinal)) + x >>= 8 + + result.reverse() + return b"".join(result) + + +def string_to_int(s): # pragma: no cover + """Convert a string of bytes into an integer, as per X9.62.""" + # deprecated in 0.19 + warnings.warn( + "Function is unused in library code. If you use this code, " + "change to util.string_to_number.", + DeprecationWarning, + ) + result = 0 + for c in s: + if not isinstance(c, int): + c = ord(c) + result = 256 * result + c + return result + + +def digest_integer(m): # pragma: no cover + """Convert an integer into a string of bytes, compute + its SHA-1 hash, and convert the result to an integer.""" + # deprecated in 0.19 + warnings.warn( + "Function is unused in library code. If you use this code, " + "change to a one-liner with util.number_to_string and " + "util.string_to_number methods.", + DeprecationWarning, + ) + # + # I don't expect this function to be used much. I wrote + # it in order to be able to duplicate the examples + # in ECDSAVS. + # + from hashlib import sha1 + + return string_to_int(sha1(int_to_string(m)).digest()) + + +def point_is_valid(generator, x, y): + """Is (x,y) a valid public key based on the specified generator?""" + + # These are the tests specified in X9.62. + + n = generator.order() + curve = generator.curve() + p = curve.p() + if not (0 <= x < p) or not (0 <= y < p): + return False + if not curve.contains_point(x, y): + return False + if ( + curve.cofactor() != 1 + and not n * ellipticcurve.PointJacobi(curve, x, y, 1) + == ellipticcurve.INFINITY + ): + return False + return True + + +# secp112r1 curve +_p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) +# s = 00F50B02 8E4D696E 67687561 51752904 72783FB1 +_a = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD2088"), 16) +_b = int(remove_whitespace("659E F8BA0439 16EEDE89 11702B22"), 16) +_Gx = int(remove_whitespace("09487239 995A5EE7 6B55F9C2 F098"), 16) +_Gy = int(remove_whitespace("A89C E5AF8724 C0A23E0E 0FF77500"), 16) +_r = int(remove_whitespace("DB7C 2ABF62E3 5E7628DF AC6561C5"), 16) +_h = 1 +curve_112r1 = ellipticcurve.CurveFp(_p, _a, _b, _h) +generator_112r1 = ellipticcurve.PointJacobi( + curve_112r1, _Gx, _Gy, 1, _r, generator=True +) + + +# secp112r2 curve +_p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) +# s = 022757A1 114D69E 67687561 51755316 C05E0BD4 +_a = int(remove_whitespace("6127 C24C05F3 8A0AAAF6 5C0EF02C"), 16) +_b = int(remove_whitespace("51DE F1815DB5 ED74FCC3 4C85D709"), 16) +_Gx = int(remove_whitespace("4BA30AB5 E892B4E1 649DD092 8643"), 16) +_Gy = int(remove_whitespace("ADCD 46F5882E 3747DEF3 6E956E97"), 16) +_r = int(remove_whitespace("36DF 0AAFD8B8 D7597CA1 0520D04B"), 16) +_h = 4 +curve_112r2 = ellipticcurve.CurveFp(_p, _a, _b, _h) +generator_112r2 = ellipticcurve.PointJacobi( + curve_112r2, _Gx, _Gy, 1, _r, generator=True +) + + +# secp128r1 curve +_p = int(remove_whitespace("FFFFFFFD FFFFFFFF FFFFFFFF FFFFFFFF"), 16) +# S = 000E0D4D 69E6768 75615175 0CC03A44 73D03679 +# a and b are mod p, so a is equal to p-3, or simply -3 +# _a = -3 +_b = int(remove_whitespace("E87579C1 1079F43D D824993C 2CEE5ED3"), 16) +_Gx = int(remove_whitespace("161FF752 8B899B2D 0C28607C A52C5B86"), 16) +_Gy = int(remove_whitespace("CF5AC839 5BAFEB13 C02DA292 DDED7A83"), 16) +_r = int(remove_whitespace("FFFFFFFE 00000000 75A30D1B 9038A115"), 16) +_h = 1 +curve_128r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) +generator_128r1 = ellipticcurve.PointJacobi( + curve_128r1, _Gx, _Gy, 1, _r, generator=True +) + + +# secp160r1 +_p = int(remove_whitespace("FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF 7FFFFFFF"), 16) +# S = 1053CDE4 2C14D696 E6768756 1517533B F3F83345 +# a and b are mod p, so a is equal to p-3, or simply -3 +# _a = -3 +_b = int(remove_whitespace("1C97BEFC 54BD7A8B 65ACF89F 81D4D4AD C565FA45"), 16) +_Gx = int( + remove_whitespace("4A96B568 8EF57328 46646989 68C38BB9 13CBFC82"), + 16, +) +_Gy = int( + remove_whitespace("23A62855 3168947D 59DCC912 04235137 7AC5FB32"), + 16, +) +_r = int( + remove_whitespace("01 00000000 00000000 0001F4C8 F927AED3 CA752257"), + 16, +) +_h = 1 +curve_160r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) +generator_160r1 = ellipticcurve.PointJacobi( + curve_160r1, _Gx, _Gy, 1, _r, generator=True +) + + +# NIST Curve P-192: +_p = 6277101735386680763835789423207666416083908700390324961279 +_r = 6277101735386680763835789423176059013767194773182842284081 +# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L +# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L +_b = int( + remove_whitespace( + """ + 64210519 E59C80E7 0FA7E9AB 72243049 FEB8DEEC C146B9B1""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 188DA80E B03090F6 7CBF20EB 43A18800 F4FF0AFD 82FF1012""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 07192B95 FFC8DA78 631011ED 6B24CDD5 73F977A1 1E794811""" + ), + 16, +) + +curve_192 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_192 = ellipticcurve.PointJacobi( + curve_192, _Gx, _Gy, 1, _r, generator=True +) + + +# NIST Curve P-224: +_p = int( + remove_whitespace( + """ + 2695994666715063979466701508701963067355791626002630814351 + 0066298881""" + ) +) +_r = int( + remove_whitespace( + """ + 2695994666715063979466701508701962594045780771442439172168 + 2722368061""" + ) +) +# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L +# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL +_b = int( + remove_whitespace( + """ + B4050A85 0C04B3AB F5413256 5044B0B7 D7BFD8BA 270B3943 + 2355FFB4""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + B70E0CBD 6BB4BF7F 321390B9 4A03C1D3 56C21122 343280D6 + 115C1D21""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + BD376388 B5F723FB 4C22DFE6 CD4375A0 5A074764 44D58199 + 85007E34""" + ), + 16, +) + +curve_224 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_224 = ellipticcurve.PointJacobi( + curve_224, _Gx, _Gy, 1, _r, generator=True +) + +# NIST Curve P-256: +_p = int( + remove_whitespace( + """ + 1157920892103562487626974469494075735300861434152903141955 + 33631308867097853951""" + ) +) +_r = int( + remove_whitespace( + """ + 115792089210356248762697446949407573529996955224135760342 + 422259061068512044369""" + ) +) +# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L +# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL +_b = int( + remove_whitespace( + """ + 5AC635D8 AA3A93E7 B3EBBD55 769886BC 651D06B0 CC53B0F6 + 3BCE3C3E 27D2604B""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 6B17D1F2 E12C4247 F8BCE6E5 63A440F2 77037D81 2DEB33A0 + F4A13945 D898C296""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 4FE342E2 FE1A7F9B 8EE7EB4A 7C0F9E16 2BCE3357 6B315ECE + CBB64068 37BF51F5""" + ), + 16, +) + +curve_256 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_256 = ellipticcurve.PointJacobi( + curve_256, _Gx, _Gy, 1, _r, generator=True +) + +# NIST Curve P-384: +_p = int( + remove_whitespace( + """ + 3940200619639447921227904010014361380507973927046544666794 + 8293404245721771496870329047266088258938001861606973112319""" + ) +) +_r = int( + remove_whitespace( + """ + 3940200619639447921227904010014361380507973927046544666794 + 6905279627659399113263569398956308152294913554433653942643""" + ) +) +# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L +# c = int(remove_whitespace( +# """ +# 79d1e655 f868f02f ff48dcde e14151dd b80643c1 406d0ca1 +# 0dfe6fc5 2009540a 495e8042 ea5f744f 6e184667 cc722483""" +# ), 16) +_b = int( + remove_whitespace( + """ + B3312FA7 E23EE7E4 988E056B E3F82D19 181D9C6E FE814112 + 0314088F 5013875A C656398D 8A2ED19D 2A85C8ED D3EC2AEF""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + AA87CA22 BE8B0537 8EB1C71E F320AD74 6E1D3B62 8BA79B98 + 59F741E0 82542A38 5502F25D BF55296C 3A545E38 72760AB7""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 3617DE4A 96262C6F 5D9E98BF 9292DC29 F8F41DBD 289A147C + E9DA3113 B5F0B8C0 0A60B1CE 1D7E819D 7A431D7C 90EA0E5F""" + ), + 16, +) + +curve_384 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_384 = ellipticcurve.PointJacobi( + curve_384, _Gx, _Gy, 1, _r, generator=True +) + +# NIST Curve P-521: +_p = int( + "686479766013060971498190079908139321726943530014330540939" + "446345918554318339765605212255964066145455497729631139148" + "0858037121987999716643812574028291115057151" +) +_r = int( + "686479766013060971498190079908139321726943530014330540939" + "446345918554318339765539424505774633321719753296399637136" + "3321113864768612440380340372808892707005449" +) +# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL +# c = int(remove_whitespace( +# """ +# 0b4 8bfa5f42 0a349495 39d2bdfc 264eeeeb 077688e4 +# 4fbf0ad8 f6d0edb3 7bd6b533 28100051 8e19f1b9 ffbe0fe9 +# ed8a3c22 00b8f875 e523868c 70c1e5bf 55bad637""" +# ), 16) +_b = int( + remove_whitespace( + """ + 051 953EB961 8E1C9A1F 929A21A0 B68540EE A2DA725B + 99B315F3 B8B48991 8EF109E1 56193951 EC7E937B 1652C0BD + 3BB1BF07 3573DF88 3D2C34F1 EF451FD4 6B503F00""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + C6 858E06B7 0404E9CD 9E3ECB66 2395B442 9C648139 + 053FB521 F828AF60 6B4D3DBA A14B5E77 EFE75928 FE1DC127 + A2FFA8DE 3348B3C1 856A429B F97E7E31 C2E5BD66""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 118 39296A78 9A3BC004 5C8A5FB4 2C7D1BD9 98F54449 + 579B4468 17AFBD17 273E662C 97EE7299 5EF42640 C550B901 + 3FAD0761 353C7086 A272C240 88BE9476 9FD16650""" + ), + 16, +) + +curve_521 = ellipticcurve.CurveFp(_p, -3, _b, 1) +generator_521 = ellipticcurve.PointJacobi( + curve_521, _Gx, _Gy, 1, _r, generator=True +) + +# Certicom secp256-k1 +_a = 0x0000000000000000000000000000000000000000000000000000000000000000 +_b = 0x0000000000000000000000000000000000000000000000000000000000000007 +_p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F +_Gx = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 +_Gy = 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 +_r = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 + +curve_secp256k1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_secp256k1 = ellipticcurve.PointJacobi( + curve_secp256k1, _Gx, _Gy, 1, _r, generator=True +) + +# Brainpool P-160-r1 +_a = 0x340E7BE2A280EB74E2BE61BADA745D97E8F7C300 +_b = 0x1E589A8595423412134FAA2DBDEC95C8D8675E58 +_p = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620F +_Gx = 0xBED5AF16EA3F6A4F62938C4631EB5AF7BDBCDBC3 +_Gy = 0x1667CB477A1A8EC338F94741669C976316DA6321 +_q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09 + +curve_brainpoolp160r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp160r1 = ellipticcurve.PointJacobi( + curve_brainpoolp160r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-160-t1 +_a = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620C +_b = 0x7A556B6DAE535B7B51ED2C4D7DAA7A0B5C55F380 +# _z = 0x24DBFF5DEC9B986BBFE5295A29BFBAE45E0F5D0B +_Gx = 0xB199B13B9B34EFC1397E64BAEB05ACC265FF2378 +_Gy = 0xADD6718B7C7C1961F0991B842443772152C9E0AD +_q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09 +curve_brainpoolp160t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp160t1 = ellipticcurve.PointJacobi( + curve_brainpoolp160t1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-192-r1 +_a = 0x6A91174076B1E0E19C39C031FE8685C1CAE040E5C69A28EF +_b = 0x469A28EF7C28CCA3DC721D044F4496BCCA7EF4146FBF25C9 +_p = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86297 +_Gx = 0xC0A0647EAAB6A48753B033C56CB0F0900A2F5C4853375FD6 +_Gy = 0x14B690866ABD5BB88B5F4828C1490002E6773FA2FA299B8F +_q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1 + +curve_brainpoolp192r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp192r1 = ellipticcurve.PointJacobi( + curve_brainpoolp192r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-192-t1 +_a = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86294 +_b = 0x13D56FFAEC78681E68F9DEB43B35BEC2FB68542E27897B79 +# _z = 0x1B6F5CC8DB4DC7AF19458A9CB80DC2295E5EB9C3732104CB +_Gx = 0x3AE9E58C82F63C30282E1FE7BBF43FA72C446AF6F4618129 +_Gy = 0x097E2C5667C2223A902AB5CA449D0084B7E5B3DE7CCC01C9 +_q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1 + +curve_brainpoolp192t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp192t1 = ellipticcurve.PointJacobi( + curve_brainpoolp192t1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-224-r1 +_a = 0x68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43 +_b = 0x2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B +_p = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF +_Gx = 0x0D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D +_Gy = 0x58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD +_q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F + +curve_brainpoolp224r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp224r1 = ellipticcurve.PointJacobi( + curve_brainpoolp224r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-224-t1 +_a = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FC +_b = 0x4B337D934104CD7BEF271BF60CED1ED20DA14C08B3BB64F18A60888D +# _z = 0x2DF271E14427A346910CF7A2E6CFA7B3F484E5C2CCE1C8B730E28B3F +_Gx = 0x6AB1E344CE25FF3896424E7FFE14762ECB49F8928AC0C76029B4D580 +_Gy = 0x0374E9F5143E568CD23F3F4D7C0D4B1E41C8CC0D1C6ABD5F1A46DB4C +_q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F + +curve_brainpoolp224t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp224t1 = ellipticcurve.PointJacobi( + curve_brainpoolp224t1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-256-r1 +_a = 0x7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9 +_b = 0x26DC5C6CE94A4B44F330B5D9BBD77CBF958416295CF7E1CE6BCCDC18FF8C07B6 +_p = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377 +_Gx = 0x8BD2AEB9CB7E57CB2C4B482FFC81B7AFB9DE27E1E3BD23C23A4453BD9ACE3262 +_Gy = 0x547EF835C3DAC4FD97F8461A14611DC9C27745132DED8E545C1D54C72F046997 +_q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 + +curve_brainpoolp256r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp256r1 = ellipticcurve.PointJacobi( + curve_brainpoolp256r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-256-t1 +_a = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5374 +_b = 0x662C61C430D84EA4FE66A7733D0B76B7BF93EBC4AF2F49256AE58101FEE92B04 +# _z = 0x3E2D4BD9597B58639AE7AA669CAB9837CF5CF20A2C852D10F655668DFC150EF0 +_Gx = 0xA3E8EB3CC1CFE7B7732213B23A656149AFA142C47AAFBC2B79A191562E1305F4 +_Gy = 0x2D996C823439C56D7F7B22E14644417E69BCB6DE39D027001DABE8F35B25C9BE +_q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 + +curve_brainpoolp256t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp256t1 = ellipticcurve.PointJacobi( + curve_brainpoolp256t1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-320-r1 +_a = int( + remove_whitespace( + """ + 3EE30B568FBAB0F883CCEBD46D3F3BB8A2A73513F5EB79DA66190EB085FFA9 + F492F375A97D860EB4""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 520883949DFDBC42D3AD198640688A6FE13F41349554B49ACC31DCCD884539 + 816F5EB4AC8FB1F1A6""" + ), + 16, +) +_p = int( + remove_whitespace( + """ + D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC + 28FCD412B1F1B32E27""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 43BD7E9AFB53D8B85289BCC48EE5BFE6F20137D10A087EB6E7871E2A10A599 + C710AF8D0D39E20611""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 14FDD05545EC1CC8AB4093247F77275E0743FFED117182EAA9C77877AAAC6A + C7D35245D1692E8EE1""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658 + E98691555B44C59311""" + ), + 16, +) + +curve_brainpoolp320r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp320r1 = ellipticcurve.PointJacobi( + curve_brainpoolp320r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-320-t1 +_a = int( + remove_whitespace( + """ + D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC + 28FCD412B1F1B32E24""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + A7F561E038EB1ED560B3D147DB782013064C19F27ED27C6780AAF77FB8A547 + CEB5B4FEF422340353""" + ), + 16, +) +# _z = int( +# remove_whitespace( +# """ +# 15F75CAF668077F7E85B42EB01F0A81FF56ECD6191D55CB82B7D861458A18F +# EFC3E5AB7496F3C7B1""" +# ), +# 16, +# ) +_Gx = int( + remove_whitespace( + """ + 925BE9FB01AFC6FB4D3E7D4990010F813408AB106C4F09CB7EE07868CC136F + FF3357F624A21BED52""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 63BA3A7A27483EBF6671DBEF7ABB30EBEE084E58A0B077AD42A5A0989D1EE7 + 1B1B9BC0455FB0D2C3""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658 + E98691555B44C59311""" + ), + 16, +) + +curve_brainpoolp320t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp320t1 = ellipticcurve.PointJacobi( + curve_brainpoolp320t1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-384-r1 +_a = int( + remove_whitespace( + """ + 7BC382C63D8C150C3C72080ACE05AFA0C2BEA28E4FB22787139165EFBA91F9 + 0F8AA5814A503AD4EB04A8C7DD22CE2826""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 04A8C7DD22CE28268B39B55416F0447C2FB77DE107DCD2A62E880EA53EEB62 + D57CB4390295DBC9943AB78696FA504C11""" + ), + 16, +) +_p = int( + remove_whitespace( + """ + 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB711 + 23ACD3A729901D1A71874700133107EC53""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 1D1C64F068CF45FFA2A63A81B7C13F6B8847A3E77EF14FE3DB7FCAFE0CBD10 + E8E826E03436D646AAEF87B2E247D4AF1E""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 8ABE1D7520F9C2A45CB1EB8E95CFD55262B70B29FEEC5864E19C054FF991292 + 80E4646217791811142820341263C5315""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425 + A7CF3AB6AF6B7FC3103B883202E9046565""" + ), + 16, +) + +curve_brainpoolp384r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp384r1 = ellipticcurve.PointJacobi( + curve_brainpoolp384r1, _Gx, _Gy, 1, _q, generator=True +) + +_a = int( + remove_whitespace( + """ + 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB711 + 23ACD3A729901D1A71874700133107EC50""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 7F519EADA7BDA81BD826DBA647910F8C4B9346ED8CCDC64E4B1ABD11756DCE + 1D2074AA263B88805CED70355A33B471EE""" + ), + 16, +) +# _z = int( +# remove_whitespace( +# """ +# 41DFE8DD399331F7166A66076734A89CD0D2BCDB7D068E44E1F378F41ECBAE +# 97D2D63DBC87BCCDDCCC5DA39E8589291C""" +# ), +# 16, +# ) +_Gx = int( + remove_whitespace( + """ + 18DE98B02DB9A306F2AFCD7235F72A819B80AB12EBD653172476FECD462AAB + FFC4FF191B946A5F54D8D0AA2F418808CC""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 25AB056962D30651A114AFD2755AD336747F93475B7A1FCA3B88F2B6A208CC + FE469408584DC2B2912675BF5B9E582928""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425 + A7CF3AB6AF6B7FC3103B883202E9046565""" + ), + 16, +) + +curve_brainpoolp384t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp384t1 = ellipticcurve.PointJacobi( + curve_brainpoolp384t1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-512-r1 +_a = int( + remove_whitespace( + """ + 7830A3318B603B89E2327145AC234CC594CBDD8D3DF91610A83441CAEA9863 + BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CA""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117 + A72BF2C7B9E7C1AC4D77FC94CADC083E67984050B75EBAE5DD2809BD638016F723""" + ), + 16, +) +_p = int( + remove_whitespace( + """ + AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 + 717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3""" + ), + 16, +) +_Gx = int( + remove_whitespace( + """ + 81AEE4BDD82ED9645A21322E9C4C6A9385ED9F70B5D916C1B43B62EEF4D009 + 8EFF3B1F78E2D0D48D50D1687B93B97D5F7C6D5047406A5E688B352209BCB9F822""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 7DDE385D566332ECC0EABFA9CF7822FDF209F70024A57B1AA000C55B881F81 + 11B2DCDE494A5F485E5BCA4BD88A2763AED1CA2B2FA8F0540678CD1E0F3AD80892""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 + 70553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069""" + ), + 16, +) + +curve_brainpoolp512r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp512r1 = ellipticcurve.PointJacobi( + curve_brainpoolp512r1, _Gx, _Gy, 1, _q, generator=True +) + +# Brainpool P-512-t1 +_a = int( + remove_whitespace( + """ + AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 + 717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F0""" + ), + 16, +) +_b = int( + remove_whitespace( + """ + 7CBBBCF9441CFAB76E1890E46884EAE321F70C0BCB4981527897504BEC3E36 + A62BCDFA2304976540F6450085F2DAE145C22553B465763689180EA2571867423E""" + ), + 16, +) +# _z = int( +# remove_whitespace( +# """ +# 12EE58E6764838B69782136F0F2D3BA06E27695716054092E60A80BEDB212B +# 64E585D90BCE13761F85C3F1D2A64E3BE8FEA2220F01EBA5EEB0F35DBD29D922AB""" +# ), +# 16, +# ) +_Gx = int( + remove_whitespace( + """ + 640ECE5C12788717B9C1BA06CBC2A6FEBA85842458C56DDE9DB1758D39C031 + 3D82BA51735CDB3EA499AA77A7D6943A64F7A3F25FE26F06B51BAA2696FA9035DA""" + ), + 16, +) +_Gy = int( + remove_whitespace( + """ + 5B534BD595F5AF0FA2C892376C84ACE1BB4E3019B71634C01131159CAE03CE + E9D9932184BEEF216BD71DF2DADF86A627306ECFF96DBB8BACE198B61E00F8B332""" + ), + 16, +) +_q = int( + remove_whitespace( + """ + AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 + 70553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069""" + ), + 16, +) + +curve_brainpoolp512t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) +generator_brainpoolp512t1 = ellipticcurve.PointJacobi( + curve_brainpoolp512t1, _Gx, _Gy, 1, _q, generator=True +) diff --git a/venv/lib/python3.12/site-packages/ecdsa/eddsa.py b/venv/lib/python3.12/site-packages/ecdsa/eddsa.py new file mode 100644 index 0000000..9769cfd --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/eddsa.py @@ -0,0 +1,252 @@ +"""Implementation of Edwards Digital Signature Algorithm.""" + +import hashlib +from ._sha3 import shake_256 +from . import ellipticcurve +from ._compat import ( + remove_whitespace, + bit_length, + bytes_to_int, + int_to_bytes, + compat26_str, +) + +# edwards25519, defined in RFC7748 +_p = 2**255 - 19 +_a = -1 +_d = int( + remove_whitespace( + "370957059346694393431380835087545651895421138798432190163887855330" + "85940283555" + ) +) +_h = 8 + +_Gx = int( + remove_whitespace( + "151122213495354007725011514095885315114540126930418572060461132" + "83949847762202" + ) +) +_Gy = int( + remove_whitespace( + "463168356949264781694283940034751631413079938662562256157830336" + "03165251855960" + ) +) +_r = 2**252 + 0x14DEF9DEA2F79CD65812631A5CF5D3ED + + +def _sha512(data): + return hashlib.new("sha512", compat26_str(data)).digest() + + +curve_ed25519 = ellipticcurve.CurveEdTw(_p, _a, _d, _h, _sha512) +generator_ed25519 = ellipticcurve.PointEdwards( + curve_ed25519, _Gx, _Gy, 1, _Gx * _Gy % _p, _r, generator=True +) + + +# edwards448, defined in RFC7748 +_p = 2**448 - 2**224 - 1 +_a = 1 +_d = -39081 % _p +_h = 4 + +_Gx = int( + remove_whitespace( + "224580040295924300187604334099896036246789641632564134246125461" + "686950415467406032909029192869357953282578032075146446173674602635" + "247710" + ) +) +_Gy = int( + remove_whitespace( + "298819210078481492676017930443930673437544040154080242095928241" + "372331506189835876003536878655418784733982303233503462500531545062" + "832660" + ) +) +_r = 2**446 - 0x8335DC163BB124B65129C96FDE933D8D723A70AADC873D6D54A7BB0D + + +def _shake256(data): + return shake_256(data, 114) + + +curve_ed448 = ellipticcurve.CurveEdTw(_p, _a, _d, _h, _shake256) +generator_ed448 = ellipticcurve.PointEdwards( + curve_ed448, _Gx, _Gy, 1, _Gx * _Gy % _p, _r, generator=True +) + + +class PublicKey(object): + """Public key for the Edwards Digital Signature Algorithm.""" + + def __init__(self, generator, public_key, public_point=None): + self.generator = generator + self.curve = generator.curve() + self.__encoded = public_key + # plus one for the sign bit and round up + self.baselen = (bit_length(self.curve.p()) + 1 + 7) // 8 + if len(public_key) != self.baselen: + raise ValueError( + "Incorrect size of the public key, expected: {0} bytes".format( + self.baselen + ) + ) + if public_point: + self.__point = public_point + else: + self.__point = ellipticcurve.PointEdwards.from_bytes( + self.curve, public_key + ) + + def __eq__(self, other): + if isinstance(other, PublicKey): + return ( + self.curve == other.curve and self.__encoded == other.__encoded + ) + return NotImplemented + + def __ne__(self, other): + return not self == other + + @property + def point(self): + return self.__point + + @point.setter + def point(self, other): + if self.__point != other: + raise ValueError("Can't change the coordinates of the point") + self.__point = other + + def public_point(self): + return self.__point + + def public_key(self): + return self.__encoded + + def verify(self, data, signature): + """Verify a Pure EdDSA signature over data.""" + data = compat26_str(data) + if len(signature) != 2 * self.baselen: + raise ValueError( + "Invalid signature length, expected: {0} bytes".format( + 2 * self.baselen + ) + ) + R = ellipticcurve.PointEdwards.from_bytes( + self.curve, signature[: self.baselen] + ) + S = bytes_to_int(signature[self.baselen :], "little") + if S >= self.generator.order(): + raise ValueError("Invalid signature") + + dom = bytearray() + if self.curve == curve_ed448: + dom = bytearray(b"SigEd448" + b"\x00\x00") + + k = bytes_to_int( + self.curve.hash_func(dom + R.to_bytes() + self.__encoded + data), + "little", + ) + + if self.generator * S != self.__point * k + R: + raise ValueError("Invalid signature") + + return True + + +class PrivateKey(object): + """Private key for the Edwards Digital Signature Algorithm.""" + + def __init__(self, generator, private_key): + self.generator = generator + self.curve = generator.curve() + # plus one for the sign bit and round up + self.baselen = (bit_length(self.curve.p()) + 1 + 7) // 8 + if len(private_key) != self.baselen: + raise ValueError( + "Incorrect size of private key, expected: {0} bytes".format( + self.baselen + ) + ) + self.__private_key = bytes(private_key) + self.__h = bytearray(self.curve.hash_func(private_key)) + self.__public_key = None + + a = self.__h[: self.baselen] + a = self._key_prune(a) + scalar = bytes_to_int(a, "little") + self.__s = scalar + + @property + def private_key(self): + return self.__private_key + + def __eq__(self, other): + if isinstance(other, PrivateKey): + return ( + self.curve == other.curve + and self.__private_key == other.__private_key + ) + return NotImplemented + + def __ne__(self, other): + return not self == other + + def _key_prune(self, key): + # make sure the key is not in a small subgroup + h = self.curve.cofactor() + if h == 4: + h_log = 2 + elif h == 8: + h_log = 3 + else: + raise ValueError("Only cofactor 4 and 8 curves supported") + key[0] &= ~((1 << h_log) - 1) + + # ensure the highest bit is set but no higher + l = bit_length(self.curve.p()) + if l % 8 == 0: + key[-1] = 0 + key[-2] |= 0x80 + else: + key[-1] = key[-1] & (1 << (l % 8)) - 1 | 1 << (l % 8) - 1 + return key + + def public_key(self): + """Generate the public key based on the included private key""" + if self.__public_key: + return self.__public_key + + public_point = self.generator * self.__s + + self.__public_key = PublicKey( + self.generator, public_point.to_bytes(), public_point + ) + + return self.__public_key + + def sign(self, data): + """Perform a Pure EdDSA signature over data.""" + data = compat26_str(data) + A = self.public_key().public_key() + + prefix = self.__h[self.baselen :] + + dom = bytearray() + if self.curve == curve_ed448: + dom = bytearray(b"SigEd448" + b"\x00\x00") + + r = bytes_to_int(self.curve.hash_func(dom + prefix + data), "little") + R = (self.generator * r).to_bytes() + + k = bytes_to_int(self.curve.hash_func(dom + R + A + data), "little") + k %= self.generator.order() + + S = (r + k * self.__s) % self.generator.order() + + return R + int_to_bytes(S, self.baselen, "little") diff --git a/venv/lib/python3.12/site-packages/ecdsa/ellipticcurve.py b/venv/lib/python3.12/site-packages/ecdsa/ellipticcurve.py new file mode 100644 index 0000000..a982c1e --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/ellipticcurve.py @@ -0,0 +1,1609 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- +# +# Implementation of elliptic curves, for cryptographic applications. +# +# This module doesn't provide any way to choose a random elliptic +# curve, nor to verify that an elliptic curve was chosen randomly, +# because one can simply use NIST's standard curves. +# +# Notes from X9.62-1998 (draft): +# Nomenclature: +# - Q is a public key. +# The "Elliptic Curve Domain Parameters" include: +# - q is the "field size", which in our case equals p. +# - p is a big prime. +# - G is a point of prime order (5.1.1.1). +# - n is the order of G (5.1.1.1). +# Public-key validation (5.2.2): +# - Verify that Q is not the point at infinity. +# - Verify that X_Q and Y_Q are in [0,p-1]. +# - Verify that Q is on the curve. +# - Verify that nQ is the point at infinity. +# Signature generation (5.3): +# - Pick random k from [1,n-1]. +# Signature checking (5.4.2): +# - Verify that r and s are in [1,n-1]. +# +# Revision history: +# 2005.12.31 - Initial version. +# 2008.11.25 - Change CurveFp.is_on to contains_point. +# +# Written in 2005 by Peter Pearson and placed in the public domain. +# Modified extensively as part of python-ecdsa. + +from __future__ import division + +try: + from gmpy2 import mpz + + GMPY = True +except ImportError: # pragma: no branch + try: + from gmpy import mpz + + GMPY = True + except ImportError: + GMPY = False + + +from six import python_2_unicode_compatible +from . import numbertheory +from ._compat import normalise_bytes, int_to_bytes, bit_length, bytes_to_int +from .errors import MalformedPointError +from .util import orderlen, string_to_number, number_to_string + + +@python_2_unicode_compatible +class CurveFp(object): + """ + :term:`Short Weierstrass Elliptic Curve ` over a + prime field. + """ + + if GMPY: # pragma: no branch + + def __init__(self, p, a, b, h=None): + """ + The curve of points satisfying y^2 = x^3 + a*x + b (mod p). + + h is an integer that is the cofactor of the elliptic curve domain + parameters; it is the number of points satisfying the elliptic + curve equation divided by the order of the base point. It is used + for selection of efficient algorithm for public point verification. + """ + self.__p = mpz(p) + self.__a = mpz(a) + self.__b = mpz(b) + # h is not used in calculations and it can be None, so don't use + # gmpy with it + self.__h = h + + else: # pragma: no branch + + def __init__(self, p, a, b, h=None): + """ + The curve of points satisfying y^2 = x^3 + a*x + b (mod p). + + h is an integer that is the cofactor of the elliptic curve domain + parameters; it is the number of points satisfying the elliptic + curve equation divided by the order of the base point. It is used + for selection of efficient algorithm for public point verification. + """ + self.__p = p + self.__a = a + self.__b = b + self.__h = h + + def __eq__(self, other): + """Return True if other is an identical curve, False otherwise. + + Note: the value of the cofactor of the curve is not taken into account + when comparing curves, as it's derived from the base point and + intrinsic curve characteristic (but it's complex to compute), + only the prime and curve parameters are considered. + """ + if isinstance(other, CurveFp): + p = self.__p + return ( + self.__p == other.__p + and self.__a % p == other.__a % p + and self.__b % p == other.__b % p + ) + return NotImplemented + + def __ne__(self, other): + """Return False if other is an identical curve, True otherwise.""" + return not self == other + + def __hash__(self): + return hash((self.__p, self.__a, self.__b)) + + def p(self): + return self.__p + + def a(self): + return self.__a + + def b(self): + return self.__b + + def cofactor(self): + return self.__h + + def contains_point(self, x, y): + """Is the point (x,y) on this curve?""" + return (y * y - ((x * x + self.__a) * x + self.__b)) % self.__p == 0 + + def __str__(self): + if self.__h is not None: + return "CurveFp(p={0}, a={1}, b={2}, h={3})".format( + self.__p, + self.__a, + self.__b, + self.__h, + ) + return "CurveFp(p={0}, a={1}, b={2})".format( + self.__p, + self.__a, + self.__b, + ) + + +class CurveEdTw(object): + """Parameters for a Twisted Edwards Elliptic Curve""" + + if GMPY: # pragma: no branch + + def __init__(self, p, a, d, h=None, hash_func=None): + """ + The curve of points satisfying a*x^2 + y^2 = 1 + d*x^2*y^2 (mod p). + + h is the cofactor of the curve. + hash_func is the hash function associated with the curve + (like SHA-512 for Ed25519) + """ + self.__p = mpz(p) + self.__a = mpz(a) + self.__d = mpz(d) + self.__h = h + self.__hash_func = hash_func + + else: + + def __init__(self, p, a, d, h=None, hash_func=None): + """ + The curve of points satisfying a*x^2 + y^2 = 1 + d*x^2*y^2 (mod p). + + h is the cofactor of the curve. + hash_func is the hash function associated with the curve + (like SHA-512 for Ed25519) + """ + self.__p = p + self.__a = a + self.__d = d + self.__h = h + self.__hash_func = hash_func + + def __eq__(self, other): + """Returns True if other is an identical curve.""" + if isinstance(other, CurveEdTw): + p = self.__p + return ( + self.__p == other.__p + and self.__a % p == other.__a % p + and self.__d % p == other.__d % p + ) + return NotImplemented + + def __ne__(self, other): + """Return False if the other is an identical curve, True otherwise.""" + return not self == other + + def __hash__(self): + return hash((self.__p, self.__a, self.__d)) + + def contains_point(self, x, y): + """Is the point (x, y) on this curve?""" + return ( + self.__a * x * x + y * y - 1 - self.__d * x * x * y * y + ) % self.__p == 0 + + def p(self): + return self.__p + + def a(self): + return self.__a + + def d(self): + return self.__d + + def hash_func(self, data): + return self.__hash_func(data) + + def cofactor(self): + return self.__h + + def __str__(self): + if self.__h is not None: + return "CurveEdTw(p={0}, a={1}, d={2}, h={3})".format( + self.__p, + self.__a, + self.__d, + self.__h, + ) + return "CurveEdTw(p={0}, a={1}, d={2})".format( + self.__p, + self.__a, + self.__d, + ) + + +class AbstractPoint(object): + """Class for common methods of elliptic curve points.""" + + @staticmethod + def _from_raw_encoding(data, raw_encoding_length): + """ + Decode public point from :term:`raw encoding`. + + :term:`raw encoding` is the same as the :term:`uncompressed` encoding, + but without the 0x04 byte at the beginning. + """ + # real assert, from_bytes() should not call us with different length + assert len(data) == raw_encoding_length + xs = data[: raw_encoding_length // 2] + ys = data[raw_encoding_length // 2 :] + # real assert, raw_encoding_length is calculated by multiplying an + # integer by two so it will always be even + assert len(xs) == raw_encoding_length // 2 + assert len(ys) == raw_encoding_length // 2 + coord_x = string_to_number(xs) + coord_y = string_to_number(ys) + + return coord_x, coord_y + + @staticmethod + def _from_compressed(data, curve): + """Decode public point from compressed encoding.""" + if data[:1] not in (b"\x02", b"\x03"): + raise MalformedPointError("Malformed compressed point encoding") + + is_even = data[:1] == b"\x02" + x = string_to_number(data[1:]) + p = curve.p() + alpha = (pow(x, 3, p) + (curve.a() * x) + curve.b()) % p + try: + beta = numbertheory.square_root_mod_prime(alpha, p) + except numbertheory.Error as e: + raise MalformedPointError( + "Encoding does not correspond to a point on curve", e + ) + if is_even == bool(beta & 1): + y = p - beta + else: + y = beta + return x, y + + @classmethod + def _from_hybrid(cls, data, raw_encoding_length, validate_encoding): + """Decode public point from hybrid encoding.""" + # real assert, from_bytes() should not call us with different types + assert data[:1] in (b"\x06", b"\x07") + + # primarily use the uncompressed as it's easiest to handle + x, y = cls._from_raw_encoding(data[1:], raw_encoding_length) + + # but validate if it's self-consistent if we're asked to do that + if validate_encoding and ( + y & 1 + and data[:1] != b"\x07" + or (not y & 1) + and data[:1] != b"\x06" + ): + raise MalformedPointError("Inconsistent hybrid point encoding") + + return x, y + + @classmethod + def _from_edwards(cls, curve, data): + """Decode a point on an Edwards curve.""" + data = bytearray(data) + p = curve.p() + # add 1 for the sign bit and then round up + exp_len = (bit_length(p) + 1 + 7) // 8 + if len(data) != exp_len: + raise MalformedPointError("Point length doesn't match the curve.") + x_0 = (data[-1] & 0x80) >> 7 + + data[-1] &= 0x80 - 1 + + y = bytes_to_int(data, "little") + if GMPY: + y = mpz(y) + + x2 = ( + (y * y - 1) + * numbertheory.inverse_mod(curve.d() * y * y - curve.a(), p) + % p + ) + + try: + x = numbertheory.square_root_mod_prime(x2, p) + except numbertheory.Error as e: + raise MalformedPointError( + "Encoding does not correspond to a point on curve", e + ) + + if x % 2 != x_0: + x = -x % p + + return x, y + + @classmethod + def from_bytes( + cls, curve, data, validate_encoding=True, valid_encodings=None + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + Note: generally you will want to call the ``from_bytes()`` method of + either a child class, PointJacobi or Point. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ~ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + + :raises `~ecdsa.errors.MalformedPointError`: if the public point does + not lay on the curve or the encoding is invalid + + :return: x and y coordinates of the encoded point + :rtype: tuple(int, int) + """ + if not valid_encodings: + valid_encodings = set( + ["uncompressed", "compressed", "hybrid", "raw"] + ) + if not all( + i in set(("uncompressed", "compressed", "hybrid", "raw")) + for i in valid_encodings + ): + raise ValueError( + "Only uncompressed, compressed, hybrid or raw encoding " + "supported." + ) + data = normalise_bytes(data) + + if isinstance(curve, CurveEdTw): + return cls._from_edwards(curve, data) + + key_len = len(data) + raw_encoding_length = 2 * orderlen(curve.p()) + if key_len == raw_encoding_length and "raw" in valid_encodings: + coord_x, coord_y = cls._from_raw_encoding( + data, raw_encoding_length + ) + elif key_len == raw_encoding_length + 1 and ( + "hybrid" in valid_encodings or "uncompressed" in valid_encodings + ): + if data[:1] in (b"\x06", b"\x07") and "hybrid" in valid_encodings: + coord_x, coord_y = cls._from_hybrid( + data, raw_encoding_length, validate_encoding + ) + elif data[:1] == b"\x04" and "uncompressed" in valid_encodings: + coord_x, coord_y = cls._from_raw_encoding( + data[1:], raw_encoding_length + ) + else: + raise MalformedPointError( + "Invalid X9.62 encoding of the public point" + ) + elif ( + key_len == raw_encoding_length // 2 + 1 + and "compressed" in valid_encodings + ): + coord_x, coord_y = cls._from_compressed(data, curve) + else: + raise MalformedPointError( + "Length of string does not match lengths of " + "any of the enabled ({0}) encodings of the " + "curve.".format(", ".join(valid_encodings)) + ) + return coord_x, coord_y + + def _raw_encode(self): + """Convert the point to the :term:`raw encoding`.""" + prime = self.curve().p() + x_str = number_to_string(self.x(), prime) + y_str = number_to_string(self.y(), prime) + return x_str + y_str + + def _compressed_encode(self): + """Encode the point into the compressed form.""" + prime = self.curve().p() + x_str = number_to_string(self.x(), prime) + if self.y() & 1: + return b"\x03" + x_str + return b"\x02" + x_str + + def _hybrid_encode(self): + """Encode the point into the hybrid form.""" + raw_enc = self._raw_encode() + if self.y() & 1: + return b"\x07" + raw_enc + return b"\x06" + raw_enc + + def _edwards_encode(self): + """Encode the point according to RFC8032 encoding.""" + self.scale() + x, y, p = self.x(), self.y(), self.curve().p() + + # add 1 for the sign bit and then round up + enc_len = (bit_length(p) + 1 + 7) // 8 + y_str = int_to_bytes(y, enc_len, "little") + if x % 2: + y_str[-1] |= 0x80 + return y_str + + def to_bytes(self, encoding="raw"): + """ + Convert the point to a byte string. + + The method by default uses the :term:`raw encoding` (specified + by `encoding="raw"`. It can also output points in :term:`uncompressed`, + :term:`compressed`, and :term:`hybrid` formats. + + For points on Edwards curves `encoding` is ignored and only the + encoding defined in RFC 8032 is supported. + + :return: :term:`raw encoding` of a public on the curve + :rtype: bytes + """ + assert encoding in ("raw", "uncompressed", "compressed", "hybrid") + curve = self.curve() + if isinstance(curve, CurveEdTw): + return self._edwards_encode() + elif encoding == "raw": + return self._raw_encode() + elif encoding == "uncompressed": + return b"\x04" + self._raw_encode() + elif encoding == "hybrid": + return self._hybrid_encode() + else: + return self._compressed_encode() + + @staticmethod + def _naf(mult): + """Calculate non-adjacent form of number.""" + ret = [] + while mult: + if mult % 2: + nd = mult % 4 + if nd >= 2: + nd -= 4 + ret.append(nd) + mult -= nd + else: + ret.append(0) + mult //= 2 + return ret + + +class PointJacobi(AbstractPoint): + """ + Point on a short Weierstrass elliptic curve. Uses Jacobi coordinates. + + In Jacobian coordinates, there are three parameters, X, Y and Z. + They correspond to affine parameters 'x' and 'y' like so: + + x = X / Z² + y = Y / Z³ + """ + + def __init__(self, curve, x, y, z, order=None, generator=False): + """ + Initialise a point that uses Jacobi representation internally. + + :param CurveFp curve: curve on which the point resides + :param int x: the X parameter of Jacobi representation (equal to x when + converting from affine coordinates + :param int y: the Y parameter of Jacobi representation (equal to y when + converting from affine coordinates + :param int z: the Z parameter of Jacobi representation (equal to 1 when + converting from affine coordinates + :param int order: the point order, must be non zero when using + generator=True + :param bool generator: the point provided is a curve generator, as + such, it will be commonly used with scalar multiplication. This will + cause to precompute multiplication table generation for it + """ + super(PointJacobi, self).__init__() + self.__curve = curve + if GMPY: # pragma: no branch + self.__coords = (mpz(x), mpz(y), mpz(z)) + self.__order = order and mpz(order) + else: # pragma: no branch + self.__coords = (x, y, z) + self.__order = order + self.__generator = generator + self.__precompute = [] + + @classmethod + def from_bytes( + cls, + curve, + data, + validate_encoding=True, + valid_encodings=None, + order=None, + generator=False, + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ~ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + :param int order: the point order, must be non zero when using + generator=True + :param bool generator: the point provided is a curve generator, as + such, it will be commonly used with scalar multiplication. This + will cause to precompute multiplication table generation for it + + :raises `~ecdsa.errors.MalformedPointError`: if the public point does + not lay on the curve or the encoding is invalid + + :return: Point on curve + :rtype: PointJacobi + """ + coord_x, coord_y = super(PointJacobi, cls).from_bytes( + curve, data, validate_encoding, valid_encodings + ) + return PointJacobi(curve, coord_x, coord_y, 1, order, generator) + + def _maybe_precompute(self): + if not self.__generator or self.__precompute: + return + + # since this code will execute just once, and it's fully deterministic, + # depend on atomicity of the last assignment to switch from empty + # self.__precompute to filled one and just ignore the unlikely + # situation when two threads execute it at the same time (as it won't + # lead to inconsistent __precompute) + order = self.__order + assert order + precompute = [] + i = 1 + order *= 2 + coord_x, coord_y, coord_z = self.__coords + doubler = PointJacobi(self.__curve, coord_x, coord_y, coord_z, order) + order *= 2 + precompute.append((doubler.x(), doubler.y())) + + while i < order: + i *= 2 + doubler = doubler.double().scale() + precompute.append((doubler.x(), doubler.y())) + + self.__precompute = precompute + + def __getstate__(self): + # while this code can execute at the same time as _maybe_precompute() + # is updating the __precompute or scale() is updating the __coords, + # there is no requirement for consistency between __coords and + # __precompute + state = self.__dict__.copy() + return state + + def __setstate__(self, state): + self.__dict__.update(state) + + def __eq__(self, other): + """Compare for equality two points with each-other. + + Note: only points that lay on the same curve can be equal. + """ + x1, y1, z1 = self.__coords + if other is INFINITY: + return not z1 + if isinstance(other, Point): + x2, y2, z2 = other.x(), other.y(), 1 + elif isinstance(other, PointJacobi): + x2, y2, z2 = other.__coords + else: + return NotImplemented + if self.__curve != other.curve(): + return False + p = self.__curve.p() + + zz1 = z1 * z1 % p + zz2 = z2 * z2 % p + + # compare the fractions by bringing them to the same denominator + # depend on short-circuit to save 4 multiplications in case of + # inequality + return (x1 * zz2 - x2 * zz1) % p == 0 and ( + y1 * zz2 * z2 - y2 * zz1 * z1 + ) % p == 0 + + def __ne__(self, other): + """Compare for inequality two points with each-other.""" + return not self == other + + def order(self): + """Return the order of the point. + + None if it is undefined. + """ + return self.__order + + def curve(self): + """Return curve over which the point is defined.""" + return self.__curve + + def x(self): + """ + Return affine x coordinate. + + This method should be used only when the 'y' coordinate is not needed. + It's computationally more efficient to use `to_affine()` and then + call x() and y() on the returned instance. Or call `scale()` + and then x() and y() on the returned instance. + """ + x, _, z = self.__coords + if z == 1: + return x + p = self.__curve.p() + z = numbertheory.inverse_mod(z, p) + return x * z**2 % p + + def y(self): + """ + Return affine y coordinate. + + This method should be used only when the 'x' coordinate is not needed. + It's computationally more efficient to use `to_affine()` and then + call x() and y() on the returned instance. Or call `scale()` + and then x() and y() on the returned instance. + """ + _, y, z = self.__coords + if z == 1: + return y + p = self.__curve.p() + z = numbertheory.inverse_mod(z, p) + return y * z**3 % p + + def scale(self): + """ + Return point scaled so that z == 1. + + Modifies point in place, returns self. + """ + x, y, z = self.__coords + if z == 1: + return self + + # scaling is deterministic, so even if two threads execute the below + # code at the same time, they will set __coords to the same value + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(z, p) + zz_inv = z_inv * z_inv % p + x = x * zz_inv % p + y = y * zz_inv * z_inv % p + self.__coords = (x, y, 1) + return self + + def to_affine(self): + """Return point in affine form.""" + _, _, z = self.__coords + p = self.__curve.p() + if not (z % p): + return INFINITY + self.scale() + x, y, z = self.__coords + assert z == 1 + return Point(self.__curve, x, y, self.__order) + + @staticmethod + def from_affine(point, generator=False): + """Create from an affine point. + + :param bool generator: set to True to make the point to precalculate + multiplication table - useful for public point when verifying many + signatures (around 100 or so) or for generator points of a curve. + """ + return PointJacobi( + point.curve(), point.x(), point.y(), 1, point.order(), generator + ) + + # please note that all the methods that use the equations from + # hyperelliptic + # are formatted in a way to maximise performance. + # Things that make code faster: multiplying instead of taking to the power + # (`xx = x * x; xxxx = xx * xx % p` is faster than `xxxx = x**4 % p` and + # `pow(x, 4, p)`), + # multiple assignments at the same time (`x1, x2 = self.x1, self.x2` is + # faster than `x1 = self.x1; x2 = self.x2`), + # similarly, sometimes the `% p` is skipped if it makes the calculation + # faster and the result of calculation is later reduced modulo `p` + + def _double_with_z_1(self, X1, Y1, p, a): + """Add a point to itself with z == 1.""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-mdbl-2007-bl + XX, YY = X1 * X1 % p, Y1 * Y1 % p + if not YY: + return 0, 0, 0 + YYYY = YY * YY % p + S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p + M = 3 * XX + a + T = (M * M - 2 * S) % p + # X3 = T + Y3 = (M * (S - T) - 8 * YYYY) % p + Z3 = 2 * Y1 % p + return T, Y3, Z3 + + def _double(self, X1, Y1, Z1, p, a): + """Add a point to itself, arbitrary z.""" + if Z1 == 1: + return self._double_with_z_1(X1, Y1, p, a) + if not Z1: + return 0, 0, 0 + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-dbl-2007-bl + XX, YY = X1 * X1 % p, Y1 * Y1 % p + if not YY: + return 0, 0, 0 + YYYY = YY * YY % p + ZZ = Z1 * Z1 % p + S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p + M = (3 * XX + a * ZZ * ZZ) % p + T = (M * M - 2 * S) % p + # X3 = T + Y3 = (M * (S - T) - 8 * YYYY) % p + Z3 = ((Y1 + Z1) ** 2 - YY - ZZ) % p + + return T, Y3, Z3 + + def double(self): + """Add a point to itself.""" + X1, Y1, Z1 = self.__coords + + if not Z1: + return INFINITY + + p, a = self.__curve.p(), self.__curve.a() + + X3, Y3, Z3 = self._double(X1, Y1, Z1, p, a) + + if not Z3: + return INFINITY + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def _add_with_z_1(self, X1, Y1, X2, Y2, p): + """add points when both Z1 and Z2 equal 1""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-mmadd-2007-bl + H = X2 - X1 + HH = H * H + I = 4 * HH % p + J = H * I + r = 2 * (Y2 - Y1) + if not H and not r: + return self._double_with_z_1(X1, Y1, p, self.__curve.a()) + V = X1 * I + X3 = (r**2 - J - 2 * V) % p + Y3 = (r * (V - X3) - 2 * Y1 * J) % p + Z3 = 2 * H % p + return X3, Y3, Z3 + + def _add_with_z_eq(self, X1, Y1, Z1, X2, Y2, p): + """add points when Z1 == Z2""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-zadd-2007-m + A = (X2 - X1) ** 2 % p + B = X1 * A % p + C = X2 * A + D = (Y2 - Y1) ** 2 % p + if not A and not D: + return self._double(X1, Y1, Z1, p, self.__curve.a()) + X3 = (D - B - C) % p + Y3 = ((Y2 - Y1) * (B - X3) - Y1 * (C - B)) % p + Z3 = Z1 * (X2 - X1) % p + return X3, Y3, Z3 + + def _add_with_z2_1(self, X1, Y1, Z1, X2, Y2, p): + """add points when Z2 == 1""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-madd-2007-bl + Z1Z1 = Z1 * Z1 % p + U2, S2 = X2 * Z1Z1 % p, Y2 * Z1 * Z1Z1 % p + H = (U2 - X1) % p + HH = H * H % p + I = 4 * HH % p + J = H * I + r = 2 * (S2 - Y1) % p + if not r and not H: + return self._double_with_z_1(X2, Y2, p, self.__curve.a()) + V = X1 * I + X3 = (r * r - J - 2 * V) % p + Y3 = (r * (V - X3) - 2 * Y1 * J) % p + Z3 = ((Z1 + H) ** 2 - Z1Z1 - HH) % p + return X3, Y3, Z3 + + def _add_with_z_ne(self, X1, Y1, Z1, X2, Y2, Z2, p): + """add points with arbitrary z""" + # after: + # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-add-2007-bl + Z1Z1 = Z1 * Z1 % p + Z2Z2 = Z2 * Z2 % p + U1 = X1 * Z2Z2 % p + U2 = X2 * Z1Z1 % p + S1 = Y1 * Z2 * Z2Z2 % p + S2 = Y2 * Z1 * Z1Z1 % p + H = U2 - U1 + I = 4 * H * H % p + J = H * I % p + r = 2 * (S2 - S1) % p + if not H and not r: + return self._double(X1, Y1, Z1, p, self.__curve.a()) + V = U1 * I + X3 = (r * r - J - 2 * V) % p + Y3 = (r * (V - X3) - 2 * S1 * J) % p + Z3 = ((Z1 + Z2) ** 2 - Z1Z1 - Z2Z2) * H % p + + return X3, Y3, Z3 + + def __radd__(self, other): + """Add other to self.""" + return self + other + + def _add(self, X1, Y1, Z1, X2, Y2, Z2, p): + """add two points, select fastest method.""" + if not Z1: + return X2 % p, Y2 % p, Z2 % p + if not Z2: + return X1 % p, Y1 % p, Z1 % p + if Z1 == Z2: + if Z1 == 1: + return self._add_with_z_1(X1, Y1, X2, Y2, p) + return self._add_with_z_eq(X1, Y1, Z1, X2, Y2, p) + if Z1 == 1: + return self._add_with_z2_1(X2, Y2, Z2, X1, Y1, p) + if Z2 == 1: + return self._add_with_z2_1(X1, Y1, Z1, X2, Y2, p) + return self._add_with_z_ne(X1, Y1, Z1, X2, Y2, Z2, p) + + def __add__(self, other): + """Add two points on elliptic curve.""" + if self == INFINITY: + return other + if other == INFINITY: + return self + if isinstance(other, Point): + other = PointJacobi.from_affine(other) + if self.__curve != other.__curve: + raise ValueError("The other point is on different curve") + + p = self.__curve.p() + X1, Y1, Z1 = self.__coords + X2, Y2, Z2 = other.__coords + + X3, Y3, Z3 = self._add(X1, Y1, Z1, X2, Y2, Z2, p) + + if not Z3: + return INFINITY + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def __rmul__(self, other): + """Multiply point by an integer.""" + return self * other + + def _mul_precompute(self, other): + """Multiply point by integer with precomputation table.""" + X3, Y3, Z3, p = 0, 0, 0, self.__curve.p() + _add = self._add + for X2, Y2 in self.__precompute: + if other % 2: + if other % 4 >= 2: + other = (other + 1) // 2 + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p) + else: + other = (other - 1) // 2 + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) + else: + other //= 2 + + if not Z3: + return INFINITY + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def __mul__(self, other): + """Multiply point by an integer.""" + if not self.__coords[1] or not other: + return INFINITY + if other == 1: + return self + if self.__order: + # order*2 as a protection for Minerva + other = other % (self.__order * 2) + self._maybe_precompute() + if self.__precompute: + return self._mul_precompute(other) + + self = self.scale() + X2, Y2, _ = self.__coords + X3, Y3, Z3 = 0, 0, 0 + p, a = self.__curve.p(), self.__curve.a() + _double = self._double + _add = self._add + # since adding points when at least one of them is scaled + # is quicker, reverse the NAF order + for i in reversed(self._naf(other)): + X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) + if i < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p) + elif i > 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) + + if not Z3: + return INFINITY + + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def mul_add(self, self_mul, other, other_mul): + """ + Do two multiplications at the same time, add results. + + calculates self*self_mul + other*other_mul + """ + if other == INFINITY or other_mul == 0: + return self * self_mul + if self_mul == 0: + return other * other_mul + if not isinstance(other, PointJacobi): + other = PointJacobi.from_affine(other) + # when the points have precomputed answers, then multiplying them alone + # is faster (as it uses NAF and no point doublings) + self._maybe_precompute() + other._maybe_precompute() + if self.__precompute and other.__precompute: + return self * self_mul + other * other_mul + + if self.__order: + self_mul = self_mul % self.__order + other_mul = other_mul % self.__order + + # (X3, Y3, Z3) is the accumulator + X3, Y3, Z3 = 0, 0, 0 + p, a = self.__curve.p(), self.__curve.a() + + # as we have 6 unique points to work with, we can't scale all of them, + # but do scale the ones that are used most often + self.scale() + X1, Y1, Z1 = self.__coords + other.scale() + X2, Y2, Z2 = other.__coords + + _double = self._double + _add = self._add + + # with NAF we have 3 options: no add, subtract, add + # so with 2 points, we have 9 combinations: + # 0, -A, +A, -B, -A-B, +A-B, +B, -A+B, +A+B + # so we need 4 combined points: + mAmB_X, mAmB_Y, mAmB_Z = _add(X1, -Y1, Z1, X2, -Y2, Z2, p) + pAmB_X, pAmB_Y, pAmB_Z = _add(X1, Y1, Z1, X2, -Y2, Z2, p) + mApB_X, mApB_Y, mApB_Z = pAmB_X, -pAmB_Y, pAmB_Z + pApB_X, pApB_Y, pApB_Z = mAmB_X, -mAmB_Y, mAmB_Z + # when the self and other sum to infinity, we need to add them + # one by one to get correct result but as that's very unlikely to + # happen in regular operation, we don't need to optimise this case + if not pApB_Z: + return self * self_mul + other * other_mul + + # gmp object creation has cumulatively higher overhead than the + # speedup we get from calculating the NAF using gmp so ensure use + # of int() + self_naf = list(reversed(self._naf(int(self_mul)))) + other_naf = list(reversed(self._naf(int(other_mul)))) + # ensure that the lists are the same length (zip() will truncate + # longer one otherwise) + if len(self_naf) < len(other_naf): + self_naf = [0] * (len(other_naf) - len(self_naf)) + self_naf + elif len(self_naf) > len(other_naf): + other_naf = [0] * (len(self_naf) - len(other_naf)) + other_naf + + for A, B in zip(self_naf, other_naf): + X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) + + # conditions ordered from most to least likely + if A == 0: + if B == 0: + pass + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, Z2, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, Z2, p) + elif A < 0: + if B == 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X1, -Y1, Z1, p) + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, mAmB_X, mAmB_Y, mAmB_Z, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, mApB_X, mApB_Y, mApB_Z, p) + else: + assert A > 0 + if B == 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, X1, Y1, Z1, p) + elif B < 0: + X3, Y3, Z3 = _add(X3, Y3, Z3, pAmB_X, pAmB_Y, pAmB_Z, p) + else: + assert B > 0 + X3, Y3, Z3 = _add(X3, Y3, Z3, pApB_X, pApB_Y, pApB_Z, p) + + if not Z3: + return INFINITY + + return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) + + def __neg__(self): + """Return negated point.""" + x, y, z = self.__coords + return PointJacobi(self.__curve, x, -y, z, self.__order) + + +class Point(AbstractPoint): + """A point on a short Weierstrass elliptic curve. Altering x and y is + forbidden, but they can be read by the x() and y() methods.""" + + def __init__(self, curve, x, y, order=None): + """curve, x, y, order; order (optional) is the order of this point.""" + super(Point, self).__init__() + self.__curve = curve + if GMPY: + self.__x = x and mpz(x) + self.__y = y and mpz(y) + self.__order = order and mpz(order) + else: + self.__x = x + self.__y = y + self.__order = order + # self.curve is allowed to be None only for INFINITY: + if self.__curve: + assert self.__curve.contains_point(x, y) + # for curves with cofactor 1, all points that are on the curve are + # scalar multiples of the base point, so performing multiplication is + # not necessary to verify that. See Section 3.2.2.1 of SEC 1 v2 + if curve and curve.cofactor() != 1 and order: + assert self * order == INFINITY + + @classmethod + def from_bytes( + cls, + curve, + data, + validate_encoding=True, + valid_encodings=None, + order=None, + ): + """ + Initialise the object from byte encoding of a point. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ~ecdsa.ellipticcurve.CurveFp + :param validate_encoding: whether to verify that the encoding of the + point is self-consistent, defaults to True, has effect only + on ``hybrid`` encoding + :type validate_encoding: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + :type valid_encodings: :term:`set-like object` + :param int order: the point order, must be non zero when using + generator=True + + :raises `~ecdsa.errors.MalformedPointError`: if the public point does + not lay on the curve or the encoding is invalid + + :return: Point on curve + :rtype: Point + """ + coord_x, coord_y = super(Point, cls).from_bytes( + curve, data, validate_encoding, valid_encodings + ) + return Point(curve, coord_x, coord_y, order) + + def __eq__(self, other): + """Return True if the points are identical, False otherwise. + + Note: only points that lay on the same curve can be equal. + """ + if other is INFINITY: + return self.__x is None or self.__y is None + if isinstance(other, Point): + return ( + self.__curve == other.__curve + and self.__x == other.__x + and self.__y == other.__y + ) + return NotImplemented + + def __ne__(self, other): + """Returns False if points are identical, True otherwise.""" + return not self == other + + def __neg__(self): + return Point(self.__curve, self.__x, self.__curve.p() - self.__y) + + def __add__(self, other): + """Add one point to another point.""" + + # X9.62 B.3: + + if not isinstance(other, Point): + return NotImplemented + if other == INFINITY: + return self + if self == INFINITY: + return other + assert self.__curve == other.__curve + if self.__x == other.__x: + if (self.__y + other.__y) % self.__curve.p() == 0: + return INFINITY + else: + return self.double() + + p = self.__curve.p() + + l = ( + (other.__y - self.__y) + * numbertheory.inverse_mod(other.__x - self.__x, p) + ) % p + + x3 = (l * l - self.__x - other.__x) % p + y3 = (l * (self.__x - x3) - self.__y) % p + + return Point(self.__curve, x3, y3) + + def __mul__(self, other): + """Multiply a point by an integer.""" + + def leftmost_bit(x): + assert x > 0 + result = 1 + while result <= x: + result = 2 * result + return result // 2 + + e = other + if e == 0 or (self.__order and e % self.__order == 0): + return INFINITY + if self == INFINITY: + return INFINITY + if e < 0: + return (-self) * (-e) + + # From X9.62 D.3.2: + + e3 = 3 * e + negative_self = Point( + self.__curve, + self.__x, + (-self.__y) % self.__curve.p(), + self.__order, + ) + i = leftmost_bit(e3) // 2 + result = self + # print("Multiplying %s by %d (e3 = %d):" % (self, other, e3)) + while i > 1: + result = result.double() + if (e3 & i) != 0 and (e & i) == 0: + result = result + self + if (e3 & i) == 0 and (e & i) != 0: + result = result + negative_self + # print(". . . i = %d, result = %s" % ( i, result )) + i = i // 2 + + return result + + def __rmul__(self, other): + """Multiply a point by an integer.""" + + return self * other + + def __str__(self): + if self == INFINITY: + return "infinity" + return "(%d,%d)" % (self.__x, self.__y) + + def double(self): + """Return a new point that is twice the old.""" + if self == INFINITY: + return INFINITY + + # X9.62 B.3: + + p = self.__curve.p() + a = self.__curve.a() + + l = ( + (3 * self.__x * self.__x + a) + * numbertheory.inverse_mod(2 * self.__y, p) + ) % p + + if not l: + return INFINITY + + x3 = (l * l - 2 * self.__x) % p + y3 = (l * (self.__x - x3) - self.__y) % p + + return Point(self.__curve, x3, y3) + + def x(self): + return self.__x + + def y(self): + return self.__y + + def curve(self): + return self.__curve + + def order(self): + return self.__order + + +class PointEdwards(AbstractPoint): + """Point on Twisted Edwards curve. + + Internally represents the coordinates on the curve using four parameters, + X, Y, Z, T. They correspond to affine parameters 'x' and 'y' like so: + + x = X / Z + y = Y / Z + x*y = T / Z + """ + + def __init__(self, curve, x, y, z, t, order=None, generator=False): + """ + Initialise a point that uses the extended coordinates internally. + """ + super(PointEdwards, self).__init__() + self.__curve = curve + if GMPY: # pragma: no branch + self.__coords = (mpz(x), mpz(y), mpz(z), mpz(t)) + self.__order = order and mpz(order) + else: # pragma: no branch + self.__coords = (x, y, z, t) + self.__order = order + self.__generator = generator + self.__precompute = [] + + @classmethod + def from_bytes( + cls, + curve, + data, + validate_encoding=None, + valid_encodings=None, + order=None, + generator=False, + ): + """ + Initialise the object from byte encoding of a point. + + `validate_encoding` and `valid_encodings` are provided for + compatibility with Weierstrass curves, they are ignored for Edwards + points. + + :param data: single point encoding of the public key + :type data: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ecdsa.ellipticcurve.CurveEdTw + :param None validate_encoding: Ignored, encoding is always validated + :param None valid_encodings: Ignored, there is just one encoding + supported + :param int order: the point order, must be non zero when using + generator=True + :param bool generator: Flag to mark the point as a curve generator, + this will cause the library to pre-compute some values to + make repeated usages of the point much faster + + :raises `~ecdsa.errors.MalformedPointError`: if the public point does + not lay on the curve or the encoding is invalid + + :return: Initialised point on an Edwards curve + :rtype: PointEdwards + """ + coord_x, coord_y = super(PointEdwards, cls).from_bytes( + curve, data, validate_encoding, valid_encodings + ) + return PointEdwards( + curve, coord_x, coord_y, 1, coord_x * coord_y, order, generator + ) + + def _maybe_precompute(self): + if not self.__generator or self.__precompute: + return self.__precompute + + # since this code will execute just once, and it's fully deterministic, + # depend on atomicity of the last assignment to switch from empty + # self.__precompute to filled one and just ignore the unlikely + # situation when two threads execute it at the same time (as it won't + # lead to inconsistent __precompute) + order = self.__order + assert order + precompute = [] + i = 1 + order *= 2 + coord_x, coord_y, coord_z, coord_t = self.__coords + prime = self.__curve.p() + + doubler = PointEdwards( + self.__curve, coord_x, coord_y, coord_z, coord_t, order + ) + # for "protection" against Minerva we need 1 or 2 more bits depending + # on order bit size, but it's easier to just calculate one + # point more always + order *= 4 + + while i < order: + doubler = doubler.scale() + coord_x, coord_y = doubler.x(), doubler.y() + coord_t = coord_x * coord_y % prime + precompute.append((coord_x, coord_y, coord_t)) + + i *= 2 + doubler = doubler.double() + + self.__precompute = precompute + return self.__precompute + + def x(self): + """Return affine x coordinate.""" + X1, _, Z1, _ = self.__coords + if Z1 == 1: + return X1 + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(Z1, p) + return X1 * z_inv % p + + def y(self): + """Return affine y coordinate.""" + _, Y1, Z1, _ = self.__coords + if Z1 == 1: + return Y1 + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(Z1, p) + return Y1 * z_inv % p + + def curve(self): + """Return the curve of the point.""" + return self.__curve + + def order(self): + return self.__order + + def scale(self): + """ + Return point scaled so that z == 1. + + Modifies point in place, returns self. + """ + X1, Y1, Z1, _ = self.__coords + if Z1 == 1: + return self + + p = self.__curve.p() + z_inv = numbertheory.inverse_mod(Z1, p) + x = X1 * z_inv % p + y = Y1 * z_inv % p + t = x * y % p + self.__coords = (x, y, 1, t) + return self + + def __eq__(self, other): + """Compare for equality two points with each-other. + + Note: only points on the same curve can be equal. + """ + x1, y1, z1, t1 = self.__coords + if other is INFINITY: + return not x1 or not t1 + if isinstance(other, PointEdwards): + x2, y2, z2, t2 = other.__coords + else: + return NotImplemented + if self.__curve != other.curve(): + return False + p = self.__curve.p() + + # cross multiply to eliminate divisions + xn1 = x1 * z2 % p + xn2 = x2 * z1 % p + yn1 = y1 * z2 % p + yn2 = y2 * z1 % p + return xn1 == xn2 and yn1 == yn2 + + def __ne__(self, other): + """Compare for inequality two points with each-other.""" + return not self == other + + def _add(self, X1, Y1, Z1, T1, X2, Y2, Z2, T2, p, a): + """add two points, assume sane parameters.""" + # after add-2008-hwcd-2 + # from https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html + # NOTE: there are more efficient formulas for Z1 or Z2 == 1 + A = X1 * X2 % p + B = Y1 * Y2 % p + C = Z1 * T2 % p + D = T1 * Z2 % p + E = D + C + F = ((X1 - Y1) * (X2 + Y2) + B - A) % p + G = B + a * A + H = D - C + if not H: + return self._double(X1, Y1, Z1, T1, p, a) + X3 = E * F % p + Y3 = G * H % p + T3 = E * H % p + Z3 = F * G % p + + return X3, Y3, Z3, T3 + + def __add__(self, other): + """Add point to another.""" + if other == INFINITY: + return self + if ( + not isinstance(other, PointEdwards) + or self.__curve != other.__curve + ): + raise ValueError("The other point is on a different curve.") + + p, a = self.__curve.p(), self.__curve.a() + X1, Y1, Z1, T1 = self.__coords + X2, Y2, Z2, T2 = other.__coords + + X3, Y3, Z3, T3 = self._add(X1, Y1, Z1, T1, X2, Y2, Z2, T2, p, a) + + if not X3 or not T3: + return INFINITY + return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) + + def __radd__(self, other): + """Add other to self.""" + return self + other + + def _double(self, X1, Y1, Z1, T1, p, a): + """Double the point, assume sane parameters.""" + # after "dbl-2008-hwcd" + # from https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html + # NOTE: there are more efficient formulas for Z1 == 1 + A = X1 * X1 % p + B = Y1 * Y1 % p + C = 2 * Z1 * Z1 % p + D = a * A % p + E = ((X1 + Y1) * (X1 + Y1) - A - B) % p + G = D + B + F = G - C + H = D - B + X3 = E * F % p + Y3 = G * H % p + T3 = E * H % p + Z3 = F * G % p + + return X3, Y3, Z3, T3 + + def double(self): + """Return point added to itself.""" + X1, Y1, Z1, T1 = self.__coords + + if not X1 or not T1: + return INFINITY + + p, a = self.__curve.p(), self.__curve.a() + + X3, Y3, Z3, T3 = self._double(X1, Y1, Z1, T1, p, a) + + # both Ed25519 and Ed448 have prime order, so no point added to + # itself will equal zero + if not X3 or not T3: # pragma: no branch + return INFINITY + return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) + + def __rmul__(self, other): + """Multiply point by an integer.""" + return self * other + + def _mul_precompute(self, other): + """Multiply point by integer with precomputation table.""" + X3, Y3, Z3, T3, p, a = 0, 1, 1, 0, self.__curve.p(), self.__curve.a() + _add = self._add + for X2, Y2, T2 in self.__precompute: + rem = other % 4 + if rem == 0 or rem == 2: + other //= 2 + elif rem == 3: + other = (other + 1) // 2 + X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, -X2, Y2, 1, -T2, p, a) + else: + assert rem == 1 + other = (other - 1) // 2 + X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, X2, Y2, 1, T2, p, a) + + if not X3 or not T3: + return INFINITY + + return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) + + def __mul__(self, other): + """Multiply point by an integer.""" + X2, Y2, Z2, T2 = self.__coords + if not X2 or not T2 or not other: + return INFINITY + if other == 1: + return self + if self.__order: + # order*2 as a "protection" for Minerva + other = other % (self.__order * 2) + if self._maybe_precompute(): + return self._mul_precompute(other) + + X3, Y3, Z3, T3 = 0, 1, 1, 0 # INFINITY in extended coordinates + p, a = self.__curve.p(), self.__curve.a() + _double = self._double + _add = self._add + + for i in reversed(self._naf(other)): + X3, Y3, Z3, T3 = _double(X3, Y3, Z3, T3, p, a) + if i < 0: + X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, -X2, Y2, Z2, -T2, p, a) + elif i > 0: + X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, X2, Y2, Z2, T2, p, a) + + if not X3 or not T3: + return INFINITY + + return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) + + +# This one point is the Point At Infinity for all purposes: +INFINITY = Point(None, None, None) diff --git a/venv/lib/python3.12/site-packages/ecdsa/errors.py b/venv/lib/python3.12/site-packages/ecdsa/errors.py new file mode 100644 index 0000000..0184c05 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/errors.py @@ -0,0 +1,4 @@ +class MalformedPointError(AssertionError): + """Raised in case the encoding of private or public key is malformed.""" + + pass diff --git a/venv/lib/python3.12/site-packages/ecdsa/keys.py b/venv/lib/python3.12/site-packages/ecdsa/keys.py new file mode 100644 index 0000000..f74252c --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/keys.py @@ -0,0 +1,1631 @@ +""" +Primary classes for performing signing and verification operations. +""" + +import binascii +from hashlib import sha1 +import os +from six import PY2 +from . import ecdsa, eddsa +from . import der, ssh +from . import rfc6979 +from . import ellipticcurve +from .curves import NIST192p, Curve, Ed25519, Ed448 +from .ecdsa import RSZeroError +from .util import string_to_number, number_to_string, randrange +from .util import sigencode_string, sigdecode_string, bit_length +from .util import ( + oid_ecPublicKey, + encoded_oid_ecPublicKey, + oid_ecDH, + oid_ecMQV, + MalformedSignature, +) +from ._compat import normalise_bytes +from .errors import MalformedPointError +from .ellipticcurve import PointJacobi, CurveEdTw + + +__all__ = [ + "BadSignatureError", + "BadDigestError", + "VerifyingKey", + "SigningKey", + "MalformedPointError", +] + + +class BadSignatureError(Exception): + """ + Raised when verification of signature failed. + + Will be raised irrespective of reason of the failure: + + * the calculated or provided hash does not match the signature + * the signature does not match the curve/public key + * the encoding of the signature is malformed + * the size of the signature does not match the curve of the VerifyingKey + """ + + pass + + +class BadDigestError(Exception): + """Raised in case the selected hash is too large for the curve.""" + + pass + + +def _truncate_and_convert_digest(digest, curve, allow_truncate): + """Truncates and converts digest to an integer.""" + if not allow_truncate: + if len(digest) > curve.baselen: + raise BadDigestError( + "this curve ({0}) is too short " + "for the length of your digest ({1})".format( + curve.name, 8 * len(digest) + ) + ) + else: + digest = digest[: curve.baselen] + number = string_to_number(digest) + if allow_truncate: + max_length = bit_length(curve.order) + # we don't use bit_length(number) as that truncates leading zeros + length = len(digest) * 8 + + # See NIST FIPS 186-4: + # + # When the length of the output of the hash function is greater + # than N (i.e., the bit length of q), then the leftmost N bits of + # the hash function output block shall be used in any calculation + # using the hash function output during the generation or + # verification of a digital signature. + # + # as such, we need to shift-out the low-order bits: + number >>= max(0, length - max_length) + + return number + + +class VerifyingKey(object): + """ + Class for handling keys that can verify signatures (public keys). + + :ivar `~ecdsa.curves.Curve` ~.curve: The Curve over which all the + cryptographic operations will take place + :ivar default_hashfunc: the function that will be used for hashing the + data. Should implement the same API as hashlib.sha1 + :vartype default_hashfunc: callable + :ivar pubkey: the actual public key + :vartype pubkey: ~ecdsa.ecdsa.Public_key + """ + + def __init__(self, _error__please_use_generate=None): + """Unsupported, please use one of the classmethods to initialise.""" + if not _error__please_use_generate: + raise TypeError( + "Please use VerifyingKey.generate() to construct me" + ) + self.curve = None + self.default_hashfunc = None + self.pubkey = None + + def __repr__(self): + pub_key = self.to_string("compressed") + if self.default_hashfunc: + hash_name = self.default_hashfunc().name + else: + hash_name = "None" + return "VerifyingKey.from_string({0!r}, {1!r}, {2})".format( + pub_key, self.curve, hash_name + ) + + def __eq__(self, other): + """Return True if the points are identical, False otherwise.""" + if isinstance(other, VerifyingKey): + return self.curve == other.curve and self.pubkey == other.pubkey + return NotImplemented + + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + + @classmethod + def from_public_point( + cls, point, curve=NIST192p, hashfunc=sha1, validate_point=True + ): + """ + Initialise the object from a Point object. + + This is a low-level method, generally you will not want to use it. + + :param point: The point to wrap around, the actual public key + :type point: ~ecdsa.ellipticcurve.AbstractPoint + :param curve: The curve on which the point needs to reside, defaults + to NIST192p + :type curve: ~ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface + as :py:class:`hashlib.sha1` + :type hashfunc: callable + :type bool validate_point: whether to check if the point lays on curve + should always be used if the public point is not a result + of our own calculation + + :raises MalformedPointError: if the public point does not lay on the + curve + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + self = cls(_error__please_use_generate=True) + if isinstance(curve.curve, CurveEdTw): + raise ValueError("Method incompatible with Edwards curves") + if not isinstance(point, ellipticcurve.PointJacobi): + point = ellipticcurve.PointJacobi.from_affine(point) + self.curve = curve + self.default_hashfunc = hashfunc + try: + self.pubkey = ecdsa.Public_key( + curve.generator, point, validate_point + ) + except ecdsa.InvalidPointError: + raise MalformedPointError("Point does not lay on the curve") + self.pubkey.order = curve.order + return self + + def precompute(self, lazy=False): + """ + Precompute multiplication tables for faster signature verification. + + Calling this method will cause the library to precompute the + scalar multiplication tables, used in signature verification. + While it's an expensive operation (comparable to performing + as many signatures as the bit size of the curve, i.e. 256 for NIST256p) + it speeds up verification 2 times. You should call this method + if you expect to verify hundreds of signatures (or more) using the same + VerifyingKey object. + + Note: You should call this method only once, this method generates a + new precomputation table every time it's called. + + :param bool lazy: whether to calculate the precomputation table now + (if set to False) or if it should be delayed to the time of first + use (when set to True) + """ + if isinstance(self.curve.curve, CurveEdTw): + pt = self.pubkey.point + self.pubkey.point = ellipticcurve.PointEdwards( + pt.curve(), + pt.x(), + pt.y(), + 1, + pt.x() * pt.y(), + self.curve.order, + generator=True, + ) + else: + self.pubkey.point = ellipticcurve.PointJacobi.from_affine( + self.pubkey.point, True + ) + # as precomputation in now delayed to the time of first use of the + # point and we were asked specifically to precompute now, make + # sure the precomputation is performed now to preserve the behaviour + if not lazy: + self.pubkey.point * 2 + + @classmethod + def from_string( + cls, + string, + curve=NIST192p, + hashfunc=sha1, + validate_point=True, + valid_encodings=None, + ): + """ + Initialise the object from byte encoding of public key. + + The method does accept and automatically detect the type of point + encoding used. It supports the :term:`raw encoding`, + :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. + It also works with the native encoding of Ed25519 and Ed448 public + keys (technically those are compressed, but encoded differently than + in other signature systems). + + Note, while the method is named "from_string" it's a misnomer from + Python 2 days when there were no binary strings. In Python 3 the + input needs to be a bytes-like object. + + :param string: single point encoding of the public key + :type string: :term:`bytes-like object` + :param curve: the curve on which the public key is expected to lay + :type curve: ~ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface as + hashlib.sha1. Ignored for EdDSA. + :type hashfunc: callable + :param validate_point: whether to verify that the point lays on the + provided curve or not, defaults to True. Ignored for EdDSA. + :type validate_point: bool + :param valid_encodings: list of acceptable point encoding formats, + supported ones are: :term:`uncompressed`, :term:`compressed`, + :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` + name). All formats by default (specified with ``None``). + Ignored for EdDSA. + :type valid_encodings: :term:`set-like object` + + :raises MalformedPointError: if the public point does not lay on the + curve or the encoding is invalid + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + if isinstance(curve.curve, CurveEdTw): + self = cls(_error__please_use_generate=True) + self.curve = curve + self.default_hashfunc = None # ignored for EdDSA + try: + self.pubkey = eddsa.PublicKey(curve.generator, string) + except ValueError: + raise MalformedPointError("Malformed point for the curve") + return self + + point = PointJacobi.from_bytes( + curve.curve, + string, + validate_encoding=validate_point, + valid_encodings=valid_encodings, + ) + return cls.from_public_point(point, curve, hashfunc, validate_point) + + @classmethod + def from_pem( + cls, + string, + hashfunc=sha1, + valid_encodings=None, + valid_curve_encodings=None, + ): + """ + Initialise from public key stored in :term:`PEM` format. + + The PEM header of the key should be ``BEGIN PUBLIC KEY``. + + See the :func:`~VerifyingKey.from_der()` method for details of the + format supported. + + Note: only a single PEM object decoding is supported in provided + string. + + :param string: text with PEM-encoded public ECDSA key + :type string: str + :param valid_encodings: list of allowed point encodings. + By default :term:`uncompressed`, :term:`compressed`, and + :term:`hybrid`. To read malformed files, include + :term:`raw encoding` with ``raw`` in the list. + :type valid_encodings: :term:`set-like object` + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + return cls.from_der( + der.unpem(string), + hashfunc=hashfunc, + valid_encodings=valid_encodings, + valid_curve_encodings=valid_curve_encodings, + ) + + @classmethod + def from_der( + cls, + string, + hashfunc=sha1, + valid_encodings=None, + valid_curve_encodings=None, + ): + """ + Initialise the key stored in :term:`DER` format. + + The expected format of the key is the SubjectPublicKeyInfo structure + from RFC5912 (for RSA keys, it's known as the PKCS#1 format):: + + SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE { + algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}}, + subjectPublicKey BIT STRING + } + + Note: only public EC keys are supported by this method. The + SubjectPublicKeyInfo.algorithm.algorithm field must specify + id-ecPublicKey (see RFC3279). + + Only the named curve encoding is supported, thus the + SubjectPublicKeyInfo.algorithm.parameters field needs to be an + object identifier. A sequence in that field indicates an explicit + parameter curve encoding, this format is not supported. A NULL object + in that field indicates an "implicitlyCA" encoding, where the curve + parameters come from CA certificate, those, again, are not supported. + + :param string: binary string with the DER encoding of public ECDSA key + :type string: bytes-like object + :param valid_encodings: list of allowed point encodings. + By default :term:`uncompressed`, :term:`compressed`, and + :term:`hybrid`. To read malformed files, include + :term:`raw encoding` with ``raw`` in the list. + :type valid_encodings: :term:`set-like object` + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + if valid_encodings is None: + valid_encodings = set(["uncompressed", "compressed", "hybrid"]) + string = normalise_bytes(string) + # [[oid_ecPublicKey,oid_curve], point_str_bitstring] + s1, empty = der.remove_sequence(string) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after DER pubkey: %s" % binascii.hexlify(empty) + ) + s2, point_str_bitstring = der.remove_sequence(s1) + # s2 = oid_ecPublicKey,oid_curve + oid_pk, rest = der.remove_object(s2) + if oid_pk in (Ed25519.oid, Ed448.oid): + if oid_pk == Ed25519.oid: + curve = Ed25519 + else: + assert oid_pk == Ed448.oid + curve = Ed448 + point_str, empty = der.remove_bitstring(point_str_bitstring, 0) + if empty: + raise der.UnexpectedDER("trailing junk after public key") + return cls.from_string(point_str, curve, None) + if not oid_pk == oid_ecPublicKey: + raise der.UnexpectedDER( + "Unexpected object identifier in DER " + "encoding: {0!r}".format(oid_pk) + ) + curve = Curve.from_der(rest, valid_curve_encodings) + point_str, empty = der.remove_bitstring(point_str_bitstring, 0) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after pubkey pointstring: %s" + % binascii.hexlify(empty) + ) + # raw encoding of point is invalid in DER files + if len(point_str) == curve.verifying_key_length: + raise der.UnexpectedDER("Malformed encoding of public point") + return cls.from_string( + point_str, + curve, + hashfunc=hashfunc, + valid_encodings=valid_encodings, + ) + + @classmethod + def from_public_key_recovery( + cls, + signature, + data, + curve, + hashfunc=sha1, + sigdecode=sigdecode_string, + allow_truncate=True, + ): + """ + Return keys that can be used as verifiers of the provided signature. + + Tries to recover the public key that can be used to verify the + signature, usually returns two keys like that. + + :param signature: the byte string with the encoded signature + :type signature: bytes-like object + :param data: the data to be hashed for signature verification + :type data: bytes-like object + :param curve: the curve over which the signature was performed + :type curve: ~ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface as hashlib.sha1 + :type hashfunc: callable + :param sigdecode: Callable to define the way the signature needs to + be decoded to an object, needs to handle `signature` as the + first parameter, the curve order (an int) as the second and return + a tuple with two integers, "r" as the first one and "s" as the + second one. See :func:`ecdsa.util.sigdecode_string` and + :func:`ecdsa.util.sigdecode_der` for examples. + :param bool allow_truncate: if True, the provided hashfunc can generate + values larger than the bit size of the order of the curve, the + extra bits (at the end of the digest) will be truncated. + :type sigdecode: callable + + :return: Initialised VerifyingKey objects + :rtype: list of VerifyingKey + """ + if isinstance(curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") + data = normalise_bytes(data) + digest = hashfunc(data).digest() + return cls.from_public_key_recovery_with_digest( + signature, + digest, + curve, + hashfunc=hashfunc, + sigdecode=sigdecode, + allow_truncate=allow_truncate, + ) + + @classmethod + def from_public_key_recovery_with_digest( + cls, + signature, + digest, + curve, + hashfunc=sha1, + sigdecode=sigdecode_string, + allow_truncate=False, + ): + """ + Return keys that can be used as verifiers of the provided signature. + + Tries to recover the public key that can be used to verify the + signature, usually returns two keys like that. + + :param signature: the byte string with the encoded signature + :type signature: bytes-like object + :param digest: the hash value of the message signed by the signature + :type digest: bytes-like object + :param curve: the curve over which the signature was performed + :type curve: ~ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface as hashlib.sha1 + :type hashfunc: callable + :param sigdecode: Callable to define the way the signature needs to + be decoded to an object, needs to handle `signature` as the + first parameter, the curve order (an int) as the second and return + a tuple with two integers, "r" as the first one and "s" as the + second one. See :func:`ecdsa.util.sigdecode_string` and + :func:`ecdsa.util.sigdecode_der` for examples. + :type sigdecode: callable + :param bool allow_truncate: if True, the provided hashfunc can generate + values larger than the bit size of the order of the curve (and + the length of provided `digest`), the extra bits (at the end of the + digest) will be truncated. + + :return: Initialised VerifyingKey object + :rtype: VerifyingKey + """ + if isinstance(curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") + generator = curve.generator + r, s = sigdecode(signature, generator.order()) + sig = ecdsa.Signature(r, s) + + digest = normalise_bytes(digest) + digest_as_number = _truncate_and_convert_digest( + digest, curve, allow_truncate + ) + pks = sig.recover_public_keys(digest_as_number, generator) + + # Transforms the ecdsa.Public_key object into a VerifyingKey + verifying_keys = [ + cls.from_public_point(pk.point, curve, hashfunc) for pk in pks + ] + return verifying_keys + + def to_string(self, encoding="raw"): + """ + Convert the public key to a byte string. + + The method by default uses the :term:`raw encoding` (specified + by `encoding="raw"`. It can also output keys in :term:`uncompressed`, + :term:`compressed` and :term:`hybrid` formats. + + Remember that the curve identification is not part of the encoding + so to decode the point using :func:`~VerifyingKey.from_string`, curve + needs to be specified. + + Note: while the method is called "to_string", it's a misnomer from + Python 2 days when character strings and byte strings shared type. + On Python 3 the returned type will be `bytes`. + + :return: :term:`raw encoding` of the public key (public point) on the + curve + :rtype: bytes + """ + assert encoding in ("raw", "uncompressed", "compressed", "hybrid") + return self.pubkey.point.to_bytes(encoding) + + def to_pem( + self, point_encoding="uncompressed", curve_parameters_encoding=None + ): + """ + Convert the public key to the :term:`PEM` format. + + The PEM header of the key will be ``BEGIN PUBLIC KEY``. + + The format of the key is described in the + :func:`~VerifyingKey.from_der()` method. + This method supports only "named curve" encoding of keys. + + :param str point_encoding: specification of the encoding format + of public keys. "uncompressed" is most portable, "compressed" is + smallest. "hybrid" is uncommon and unsupported by most + implementations, it is as big as "uncompressed". + :param str curve_parameters_encoding: the encoding for curve parameters + to use, by default tries to use ``named_curve`` encoding, + if that is not possible, falls back to ``explicit`` encoding. + + :return: portable encoding of the public key + :rtype: bytes + + .. warning:: The PEM is encoded to US-ASCII, it needs to be + re-encoded if the system is incompatible (e.g. uses UTF-16) + """ + return der.topem( + self.to_der(point_encoding, curve_parameters_encoding), + "PUBLIC KEY", + ) + + def to_der( + self, point_encoding="uncompressed", curve_parameters_encoding=None + ): + """ + Convert the public key to the :term:`DER` format. + + The format of the key is described in the + :func:`~VerifyingKey.from_der()` method. + This method supports only "named curve" encoding of keys. + + :param str point_encoding: specification of the encoding format + of public keys. "uncompressed" is most portable, "compressed" is + smallest. "hybrid" is uncommon and unsupported by most + implementations, it is as big as "uncompressed". + :param str curve_parameters_encoding: the encoding for curve parameters + to use, by default tries to use ``named_curve`` encoding, + if that is not possible, falls back to ``explicit`` encoding. + + :return: DER encoding of the public key + :rtype: bytes + """ + if point_encoding == "raw": + raise ValueError("raw point_encoding not allowed in DER") + point_str = self.to_string(point_encoding) + if isinstance(self.curve.curve, CurveEdTw): + return der.encode_sequence( + der.encode_sequence(der.encode_oid(*self.curve.oid)), + der.encode_bitstring(bytes(point_str), 0), + ) + return der.encode_sequence( + der.encode_sequence( + encoded_oid_ecPublicKey, + self.curve.to_der(curve_parameters_encoding, point_encoding), + ), + # 0 is the number of unused bits in the + # bit string + der.encode_bitstring(point_str, 0), + ) + + def to_ssh(self): + """ + Convert the public key to the SSH format. + + :return: SSH encoding of the public key + :rtype: bytes + """ + return ssh.serialize_public( + self.curve.name, + self.to_string(), + ) + + def verify( + self, + signature, + data, + hashfunc=None, + sigdecode=sigdecode_string, + allow_truncate=True, + ): + """ + Verify a signature made over provided data. + + Will hash `data` to verify the signature. + + By default expects signature in :term:`raw encoding`. Can also be used + to verify signatures in ASN.1 DER encoding by using + :func:`ecdsa.util.sigdecode_der` + as the `sigdecode` parameter. + + :param signature: encoding of the signature + :type signature: sigdecode method dependent + :param data: data signed by the `signature`, will be hashed using + `hashfunc`, if specified, or default hash function + :type data: :term:`bytes-like object` + :param hashfunc: The default hash function that will be used for + verification, needs to implement the same interface as hashlib.sha1 + :type hashfunc: callable + :param sigdecode: Callable to define the way the signature needs to + be decoded to an object, needs to handle `signature` as the + first parameter, the curve order (an int) as the second and return + a tuple with two integers, "r" as the first one and "s" as the + second one. See :func:`ecdsa.util.sigdecode_string` and + :func:`ecdsa.util.sigdecode_der` for examples. + :type sigdecode: callable + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when verifying + SHA-384 output using NIST256p or in similar situations. Defaults to + True. + + :raises BadSignatureError: if the signature is invalid or malformed + + :return: True if the verification was successful + :rtype: bool + """ + # signature doesn't have to be a bytes-like-object so don't normalise + # it, the decoders will do that + data = normalise_bytes(data) + if isinstance(self.curve.curve, CurveEdTw): + signature = normalise_bytes(signature) + try: + return self.pubkey.verify(data, signature) + except (ValueError, MalformedPointError) as e: + raise BadSignatureError("Signature verification failed", e) + + hashfunc = hashfunc or self.default_hashfunc + digest = hashfunc(data).digest() + return self.verify_digest(signature, digest, sigdecode, allow_truncate) + + def verify_digest( + self, + signature, + digest, + sigdecode=sigdecode_string, + allow_truncate=False, + ): + """ + Verify a signature made over provided hash value. + + By default expects signature in :term:`raw encoding`. Can also be used + to verify signatures in ASN.1 DER encoding by using + :func:`ecdsa.util.sigdecode_der` + as the `sigdecode` parameter. + + :param signature: encoding of the signature + :type signature: sigdecode method dependent + :param digest: raw hash value that the signature authenticates. + :type digest: :term:`bytes-like object` + :param sigdecode: Callable to define the way the signature needs to + be decoded to an object, needs to handle `signature` as the + first parameter, the curve order (an int) as the second and return + a tuple with two integers, "r" as the first one and "s" as the + second one. See :func:`ecdsa.util.sigdecode_string` and + :func:`ecdsa.util.sigdecode_der` for examples. + :type sigdecode: callable + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when verifying + SHA-384 output using NIST256p or in similar situations. + + :raises BadSignatureError: if the signature is invalid or malformed + :raises BadDigestError: if the provided digest is too big for the curve + associated with this VerifyingKey and allow_truncate was not set + + :return: True if the verification was successful + :rtype: bool + """ + # signature doesn't have to be a bytes-like-object so don't normalise + # it, the decoders will do that + digest = normalise_bytes(digest) + number = _truncate_and_convert_digest( + digest, + self.curve, + allow_truncate, + ) + + try: + r, s = sigdecode(signature, self.pubkey.order) + except (der.UnexpectedDER, MalformedSignature) as e: + raise BadSignatureError("Malformed formatting of signature", e) + sig = ecdsa.Signature(r, s) + if self.pubkey.verifies(number, sig): + return True + raise BadSignatureError("Signature verification failed") + + +class SigningKey(object): + """ + Class for handling keys that can create signatures (private keys). + + :ivar `~ecdsa.curves.Curve` curve: The Curve over which all the + cryptographic operations will take place + :ivar default_hashfunc: the function that will be used for hashing the + data. Should implement the same API as :py:class:`hashlib.sha1` + :ivar int baselen: the length of a :term:`raw encoding` of private key + :ivar `~ecdsa.keys.VerifyingKey` verifying_key: the public key + associated with this private key + :ivar `~ecdsa.ecdsa.Private_key` privkey: the actual private key + """ + + def __init__(self, _error__please_use_generate=None): + """Unsupported, please use one of the classmethods to initialise.""" + if not _error__please_use_generate: + raise TypeError("Please use SigningKey.generate() to construct me") + self.curve = None + self.default_hashfunc = None + self.baselen = None + self.verifying_key = None + self.privkey = None + + def __eq__(self, other): + """Return True if the points are identical, False otherwise.""" + if isinstance(other, SigningKey): + return ( + self.curve == other.curve + and self.verifying_key == other.verifying_key + and self.privkey == other.privkey + ) + return NotImplemented + + def __ne__(self, other): + """Return False if the points are identical, True otherwise.""" + return not self == other + + @classmethod + def _twisted_edwards_keygen(cls, curve, entropy): + """Generate a private key on a Twisted Edwards curve.""" + if not entropy: + entropy = os.urandom + random = entropy(curve.baselen) + private_key = eddsa.PrivateKey(curve.generator, random) + public_key = private_key.public_key() + + verifying_key = VerifyingKey.from_string( + public_key.public_key(), curve + ) + + self = cls(_error__please_use_generate=True) + self.curve = curve + self.default_hashfunc = None + self.baselen = curve.baselen + self.privkey = private_key + self.verifying_key = verifying_key + return self + + @classmethod + def _weierstrass_keygen(cls, curve, entropy, hashfunc): + """Generate a private key on a Weierstrass curve.""" + secexp = randrange(curve.order, entropy) + return cls.from_secret_exponent(secexp, curve, hashfunc) + + @classmethod + def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1): + """ + Generate a random private key. + + :param curve: The curve on which the point needs to reside, defaults + to NIST192p + :type curve: ~ecdsa.curves.Curve + :param entropy: Source of randomness for generating the private keys, + should provide cryptographically secure random numbers if the keys + need to be secure. Uses os.urandom() by default. + :type entropy: callable + :param hashfunc: The default hash function that will be used for + signing, needs to implement the same interface + as hashlib.sha1 + :type hashfunc: callable + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + if isinstance(curve.curve, CurveEdTw): + return cls._twisted_edwards_keygen(curve, entropy) + return cls._weierstrass_keygen(curve, entropy, hashfunc) + + @classmethod + def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1): + """ + Create a private key from a random integer. + + Note: it's a low level method, it's recommended to use the + :func:`~SigningKey.generate` method to create private keys. + + :param int secexp: secret multiplier (the actual private key in ECDSA). + Needs to be an integer between 1 and the curve order. + :param curve: The curve on which the point needs to reside + :type curve: ~ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + signing, needs to implement the same interface + as hashlib.sha1 + :type hashfunc: callable + + :raises MalformedPointError: when the provided secexp is too large + or too small for the curve selected + :raises RuntimeError: if the generation of public key from private + key failed + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + if isinstance(curve.curve, CurveEdTw): + raise ValueError( + "Edwards keys don't support setting the secret scalar " + "(exponent) directly" + ) + self = cls(_error__please_use_generate=True) + self.curve = curve + self.default_hashfunc = hashfunc + self.baselen = curve.baselen + n = curve.order + if not 1 <= secexp < n: + raise MalformedPointError( + "Invalid value for secexp, expected integer " + "between 1 and {0}".format(n) + ) + pubkey_point = curve.generator * secexp + if hasattr(pubkey_point, "scale"): + pubkey_point = pubkey_point.scale() + self.verifying_key = VerifyingKey.from_public_point( + pubkey_point, curve, hashfunc, False + ) + pubkey = self.verifying_key.pubkey + self.privkey = ecdsa.Private_key(pubkey, secexp) + self.privkey.order = n + return self + + @classmethod + def from_string(cls, string, curve=NIST192p, hashfunc=sha1): + """ + Decode the private key from :term:`raw encoding`. + + Note: the name of this method is a misnomer coming from days of + Python 2, when binary strings and character strings shared a type. + In Python 3, the expected type is `bytes`. + + :param string: the raw encoding of the private key + :type string: :term:`bytes-like object` + :param curve: The curve on which the point needs to reside + :type curve: ~ecdsa.curves.Curve + :param hashfunc: The default hash function that will be used for + signing, needs to implement the same interface + as hashlib.sha1 + :type hashfunc: callable + + :raises MalformedPointError: if the length of encoding doesn't match + the provided curve or the encoded values is too large + :raises RuntimeError: if the generation of public key from private + key failed + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + string = normalise_bytes(string) + + if len(string) != curve.baselen: + raise MalformedPointError( + "Invalid length of private key, received {0}, " + "expected {1}".format(len(string), curve.baselen) + ) + if isinstance(curve.curve, CurveEdTw): + self = cls(_error__please_use_generate=True) + self.curve = curve + self.default_hashfunc = None # Ignored for EdDSA + self.baselen = curve.baselen + self.privkey = eddsa.PrivateKey(curve.generator, string) + self.verifying_key = VerifyingKey.from_string( + self.privkey.public_key().public_key(), curve + ) + return self + secexp = string_to_number(string) + return cls.from_secret_exponent(secexp, curve, hashfunc) + + @classmethod + def from_pem(cls, string, hashfunc=sha1, valid_curve_encodings=None): + """ + Initialise from key stored in :term:`PEM` format. + + The PEM formats supported are the un-encrypted RFC5915 + (the ssleay format) supported by OpenSSL, and the more common + un-encrypted RFC5958 (the PKCS #8 format). + + The legacy format files have the header with the string + ``BEGIN EC PRIVATE KEY``. + PKCS#8 files have the header ``BEGIN PRIVATE KEY``. + Encrypted files (ones that include the string + ``Proc-Type: 4,ENCRYPTED`` + right after the PEM header) are not supported. + + See :func:`~SigningKey.from_der` for ASN.1 syntax of the objects in + this files. + + :param string: text with PEM-encoded private ECDSA key + :type string: str + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + :type valid_curve_encodings: :term:`set-like object` + + + :raises MalformedPointError: if the length of encoding doesn't match + the provided curve or the encoded values is too large + :raises RuntimeError: if the generation of public key from private + key failed + :raises UnexpectedDER: if the encoding of the PEM file is incorrect + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + if not PY2 and isinstance(string, str): # pragma: no branch + string = string.encode() + + # The privkey pem may have multiple sections, commonly it also has + # "EC PARAMETERS", we need just "EC PRIVATE KEY". PKCS#8 should not + # have the "EC PARAMETERS" section; it's just "PRIVATE KEY". + private_key_index = string.find(b"-----BEGIN EC PRIVATE KEY-----") + if private_key_index == -1: + private_key_index = string.index(b"-----BEGIN PRIVATE KEY-----") + + return cls.from_der( + der.unpem(string[private_key_index:]), + hashfunc, + valid_curve_encodings, + ) + + @classmethod + def from_der(cls, string, hashfunc=sha1, valid_curve_encodings=None): + """ + Initialise from key stored in :term:`DER` format. + + The DER formats supported are the un-encrypted RFC5915 + (the ssleay format) supported by OpenSSL, and the more common + un-encrypted RFC5958 (the PKCS #8 format). + + Both formats contain an ASN.1 object following the syntax specified + in RFC5915:: + + ECPrivateKey ::= SEQUENCE { + version INTEGER { ecPrivkeyVer1(1) }} (ecPrivkeyVer1), + privateKey OCTET STRING, + parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, + publicKey [1] BIT STRING OPTIONAL + } + + `publicKey` field is ignored completely (errors, if any, in it will + be undetected). + + Two formats are supported for the `parameters` field: the named + curve and the explicit encoding of curve parameters. + In the legacy ssleay format, this implementation requires the optional + `parameters` field to get the curve name. In PKCS #8 format, the curve + is part of the PrivateKeyAlgorithmIdentifier. + + The PKCS #8 format includes an ECPrivateKey object as the `privateKey` + field within a larger structure:: + + OneAsymmetricKey ::= SEQUENCE { + version Version, + privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, + privateKey PrivateKey, + attributes [0] Attributes OPTIONAL, + ..., + [[2: publicKey [1] PublicKey OPTIONAL ]], + ... + } + + The `attributes` and `publicKey` fields are completely ignored; errors + in them will not be detected. + + :param string: binary string with DER-encoded private ECDSA key + :type string: :term:`bytes-like object` + :param valid_curve_encodings: list of allowed encoding formats + for curve parameters. By default (``None``) all are supported: + ``named_curve`` and ``explicit``. + Ignored for EdDSA. + :type valid_curve_encodings: :term:`set-like object` + + :raises MalformedPointError: if the length of encoding doesn't match + the provided curve or the encoded values is too large + :raises RuntimeError: if the generation of public key from private + key failed + :raises UnexpectedDER: if the encoding of the DER file is incorrect + + :return: Initialised SigningKey object + :rtype: SigningKey + """ + s = normalise_bytes(string) + curve = None + + s, empty = der.remove_sequence(s) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after DER privkey: %s" % binascii.hexlify(empty) + ) + + version, s = der.remove_integer(s) + + # At this point, PKCS #8 has a sequence containing the algorithm + # identifier and the curve identifier. The ssleay format instead has + # an octet string containing the key data, so this is how we can + # distinguish the two formats. + if der.is_sequence(s): + if version not in (0, 1): + raise der.UnexpectedDER( + "expected version '0' or '1' at start of privkey, got %d" + % version + ) + + sequence, s = der.remove_sequence(s) + algorithm_oid, algorithm_identifier = der.remove_object(sequence) + + if algorithm_oid in (Ed25519.oid, Ed448.oid): + if algorithm_identifier: + raise der.UnexpectedDER( + "Non NULL parameters for a EdDSA key" + ) + key_str_der, s = der.remove_octet_string(s) + + # As RFC5958 describe, there are may be optional Attributes + # and Publickey. Don't raise error if something after + # Privatekey + + # TODO parse attributes or validate publickey + # if s: + # raise der.UnexpectedDER( + # "trailing junk inside the privateKey" + # ) + key_str, s = der.remove_octet_string(key_str_der) + if s: + raise der.UnexpectedDER( + "trailing junk after the encoded private key" + ) + + if algorithm_oid == Ed25519.oid: + curve = Ed25519 + else: + assert algorithm_oid == Ed448.oid + curve = Ed448 + + return cls.from_string(key_str, curve, None) + + if algorithm_oid not in (oid_ecPublicKey, oid_ecDH, oid_ecMQV): + raise der.UnexpectedDER( + "unexpected algorithm identifier '%s'" % (algorithm_oid,) + ) + + curve = Curve.from_der(algorithm_identifier, valid_curve_encodings) + + # Up next is an octet string containing an ECPrivateKey. Ignore + # the optional "attributes" and "publicKey" fields that come after. + s, _ = der.remove_octet_string(s) + + # Unpack the ECPrivateKey to get to the key data octet string, + # and rejoin the ssleay parsing path. + s, empty = der.remove_sequence(s) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after DER privkey: %s" + % binascii.hexlify(empty) + ) + + version, s = der.remove_integer(s) + + # The version of the ECPrivateKey must be 1. + if version != 1: + raise der.UnexpectedDER( + "expected version '1' at start of DER privkey, got %d" + % version + ) + + privkey_str, s = der.remove_octet_string(s) + + if not curve: + tag, curve_oid_str, s = der.remove_constructed(s) + if tag != 0: + raise der.UnexpectedDER( + "expected tag 0 in DER privkey, got %d" % tag + ) + curve = Curve.from_der(curve_oid_str, valid_curve_encodings) + + # we don't actually care about the following fields + # + # tag, pubkey_bitstring, s = der.remove_constructed(s) + # if tag != 1: + # raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d" + # % tag) + # pubkey_str = der.remove_bitstring(pubkey_bitstring, 0) + # if empty != "": + # raise der.UnexpectedDER("trailing junk after DER privkey " + # "pubkeystr: %s" + # % binascii.hexlify(empty)) + + # our from_string method likes fixed-length privkey strings + if len(privkey_str) < curve.baselen: + privkey_str = ( + b"\x00" * (curve.baselen - len(privkey_str)) + privkey_str + ) + return cls.from_string(privkey_str, curve, hashfunc) + + def to_string(self): + """ + Convert the private key to :term:`raw encoding`. + + Note: while the method is named "to_string", its name comes from + Python 2 days, when binary and character strings used the same type. + The type used in Python 3 is `bytes`. + + :return: raw encoding of private key + :rtype: bytes + """ + if isinstance(self.curve.curve, CurveEdTw): + return bytes(self.privkey.private_key) + secexp = self.privkey.secret_multiplier + s = number_to_string(secexp, self.privkey.order) + return s + + def to_pem( + self, + point_encoding="uncompressed", + format="ssleay", + curve_parameters_encoding=None, + ): + """ + Convert the private key to the :term:`PEM` format. + + See :func:`~SigningKey.from_pem` method for format description. + + Only the named curve format is supported. + The public key will be included in generated string. + + The PEM header will specify ``BEGIN EC PRIVATE KEY`` or + ``BEGIN PRIVATE KEY``, depending on the desired format. + + :param str point_encoding: format to use for encoding public point + :param str format: either ``ssleay`` (default) or ``pkcs8`` + :param str curve_parameters_encoding: format of encoded curve + parameters, default depends on the curve, if the curve has + an associated OID, ``named_curve`` format will be used, + if no OID is associated with the curve, the fallback of + ``explicit`` parameters will be used. + + :return: PEM encoded private key + :rtype: bytes + + .. warning:: The PEM is encoded to US-ASCII, it needs to be + re-encoded if the system is incompatible (e.g. uses UTF-16) + """ + # TODO: "BEGIN ECPARAMETERS" + assert format in ("ssleay", "pkcs8") + header = "EC PRIVATE KEY" if format == "ssleay" else "PRIVATE KEY" + return der.topem( + self.to_der(point_encoding, format, curve_parameters_encoding), + header, + ) + + def _encode_eddsa(self): + """Create a PKCS#8 encoding of EdDSA keys.""" + ec_private_key = der.encode_octet_string(self.to_string()) + return der.encode_sequence( + der.encode_integer(0), + der.encode_sequence(der.encode_oid(*self.curve.oid)), + der.encode_octet_string(ec_private_key), + ) + + def to_der( + self, + point_encoding="uncompressed", + format="ssleay", + curve_parameters_encoding=None, + ): + """ + Convert the private key to the :term:`DER` format. + + See :func:`~SigningKey.from_der` method for format specification. + + Only the named curve format is supported. + The public key will be included in the generated string. + + :param str point_encoding: format to use for encoding public point + Ignored for EdDSA + :param str format: either ``ssleay`` (default) or ``pkcs8``. + EdDSA keys require ``pkcs8``. + :param str curve_parameters_encoding: format of encoded curve + parameters, default depends on the curve, if the curve has + an associated OID, ``named_curve`` format will be used, + if no OID is associated with the curve, the fallback of + ``explicit`` parameters will be used. + Ignored for EdDSA. + + :return: DER encoded private key + :rtype: bytes + """ + # SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1), + # cont[1],bitstring]) + if point_encoding == "raw": + raise ValueError("raw encoding not allowed in DER") + assert format in ("ssleay", "pkcs8") + if isinstance(self.curve.curve, CurveEdTw): + if format != "pkcs8": + raise ValueError("Only PKCS#8 format supported for EdDSA keys") + return self._encode_eddsa() + encoded_vk = self.get_verifying_key().to_string(point_encoding) + priv_key_elems = [ + der.encode_integer(1), + der.encode_octet_string(self.to_string()), + ] + if format == "ssleay": + priv_key_elems.append( + der.encode_constructed( + 0, self.curve.to_der(curve_parameters_encoding) + ) + ) + # the 0 in encode_bitstring specifies the number of unused bits + # in the `encoded_vk` string + priv_key_elems.append( + der.encode_constructed(1, der.encode_bitstring(encoded_vk, 0)) + ) + ec_private_key = der.encode_sequence(*priv_key_elems) + + if format == "ssleay": + return ec_private_key + else: + return der.encode_sequence( + # version = 1 means the public key is not present in the + # top-level structure. + der.encode_integer(1), + der.encode_sequence( + der.encode_oid(*oid_ecPublicKey), + self.curve.to_der(curve_parameters_encoding), + ), + der.encode_octet_string(ec_private_key), + ) + + def to_ssh(self): + """ + Convert the private key to the SSH format. + + :return: SSH encoded private key + :rtype: bytes + """ + return ssh.serialize_private( + self.curve.name, + self.verifying_key.to_string(), + self.to_string(), + ) + + def get_verifying_key(self): + """ + Return the VerifyingKey associated with this private key. + + Equivalent to reading the `verifying_key` field of an instance. + + :return: a public key that can be used to verify the signatures made + with this SigningKey + :rtype: VerifyingKey + """ + return self.verifying_key + + def sign_deterministic( + self, + data, + hashfunc=None, + sigencode=sigencode_string, + extra_entropy=b"", + ): + """ + Create signature over data. + + For Weierstrass curves it uses the deterministic RFC6979 algorithm. + For Edwards curves it uses the standard EdDSA algorithm. + + For ECDSA the data will be hashed using the `hashfunc` function before + signing. + For EdDSA the data will be hashed with the hash associated with the + curve (SHA-512 for Ed25519 and SHAKE-256 for Ed448). + + This is the recommended method for performing signatures when hashing + of data is necessary. + + :param data: data to be hashed and computed signature over + :type data: :term:`bytes-like object` + :param hashfunc: hash function to use for computing the signature, + if unspecified, the default hash function selected during + object initialisation will be used (see + `VerifyingKey.default_hashfunc`). The object needs to implement + the same interface as hashlib.sha1. + Ignored with EdDSA. + :type hashfunc: callable + :param sigencode: function used to encode the signature. + The function needs to accept three parameters: the two integers + that are the signature and the order of the curve over which the + signature was computed. It needs to return an encoded signature. + See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` + as examples of such functions. + Ignored with EdDSA. + :type sigencode: callable + :param extra_entropy: additional data that will be fed into the random + number generator used in the RFC6979 process. Entirely optional. + Ignored with EdDSA. + :type extra_entropy: :term:`bytes-like object` + + :return: encoded signature over `data` + :rtype: bytes or sigencode function dependent type + """ + hashfunc = hashfunc or self.default_hashfunc + data = normalise_bytes(data) + + if isinstance(self.curve.curve, CurveEdTw): + return self.privkey.sign(data) + + extra_entropy = normalise_bytes(extra_entropy) + digest = hashfunc(data).digest() + + return self.sign_digest_deterministic( + digest, + hashfunc=hashfunc, + sigencode=sigencode, + extra_entropy=extra_entropy, + allow_truncate=True, + ) + + def sign_digest_deterministic( + self, + digest, + hashfunc=None, + sigencode=sigencode_string, + extra_entropy=b"", + allow_truncate=False, + ): + """ + Create signature for digest using the deterministic RFC6979 algorithm. + + `digest` should be the output of cryptographically secure hash function + like SHA256 or SHA-3-256. + + This is the recommended method for performing signatures when no + hashing of data is necessary. + + :param digest: hash of data that will be signed + :type digest: :term:`bytes-like object` + :param hashfunc: hash function to use for computing the random "k" + value from RFC6979 process, + if unspecified, the default hash function selected during + object initialisation will be used (see + :attr:`.VerifyingKey.default_hashfunc`). The object needs to + implement + the same interface as :func:`~hashlib.sha1` from :py:mod:`hashlib`. + :type hashfunc: callable + :param sigencode: function used to encode the signature. + The function needs to accept three parameters: the two integers + that are the signature and the order of the curve over which the + signature was computed. It needs to return an encoded signature. + See :func:`~ecdsa.util.sigencode_string` and + :func:`~ecdsa.util.sigencode_der` + as examples of such functions. + :type sigencode: callable + :param extra_entropy: additional data that will be fed into the random + number generator used in the RFC6979 process. Entirely optional. + :type extra_entropy: :term:`bytes-like object` + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when signing + SHA-384 output using NIST256p or in similar situations. + + :return: encoded signature for the `digest` hash + :rtype: bytes or sigencode function dependent type + """ + if isinstance(self.curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") + secexp = self.privkey.secret_multiplier + hashfunc = hashfunc or self.default_hashfunc + digest = normalise_bytes(digest) + extra_entropy = normalise_bytes(extra_entropy) + + def simple_r_s(r, s, order): + return r, s, order + + retry_gen = 0 + while True: + k = rfc6979.generate_k( + self.curve.generator.order(), + secexp, + hashfunc, + digest, + retry_gen=retry_gen, + extra_entropy=extra_entropy, + ) + try: + r, s, order = self.sign_digest( + digest, + sigencode=simple_r_s, + k=k, + allow_truncate=allow_truncate, + ) + break + except RSZeroError: + retry_gen += 1 + + return sigencode(r, s, order) + + def sign( + self, + data, + entropy=None, + hashfunc=None, + sigencode=sigencode_string, + k=None, + allow_truncate=True, + ): + """ + Create signature over data. + + Uses the probabilistic ECDSA algorithm for Weierstrass curves + (NIST256p, etc.) and the deterministic EdDSA algorithm for the + Edwards curves (Ed25519, Ed448). + + This method uses the standard ECDSA algorithm that requires a + cryptographically secure random number generator. + + It's recommended to use the :func:`~SigningKey.sign_deterministic` + method instead of this one. + + :param data: data that will be hashed for signing + :type data: :term:`bytes-like object` + :param callable entropy: randomness source, :func:`os.urandom` by + default. Ignored with EdDSA. + :param hashfunc: hash function to use for hashing the provided + ``data``. + If unspecified the default hash function selected during + object initialisation will be used (see + :attr:`.VerifyingKey.default_hashfunc`). + Should behave like :func:`~hashlib.sha1` from :py:mod:`hashlib`. + The output length of the + hash (in bytes) must not be longer than the length of the curve + order (rounded up to the nearest byte), so using SHA256 with + NIST256p is ok, but SHA256 with NIST192p is not. (In the 2**-96ish + unlikely event of a hash output larger than the curve order, the + hash will effectively be wrapped mod n). + If you want to explicitly allow use of large hashes with small + curves set the ``allow_truncate`` to ``True``. + Use ``hashfunc=hashlib.sha1`` to match openssl's + ``-ecdsa-with-SHA1`` mode, + or ``hashfunc=hashlib.sha256`` for openssl-1.0.0's + ``-ecdsa-with-SHA256``. + Ignored for EdDSA + :type hashfunc: callable + :param sigencode: function used to encode the signature. + The function needs to accept three parameters: the two integers + that are the signature and the order of the curve over which the + signature was computed. It needs to return an encoded signature. + See :func:`~ecdsa.util.sigencode_string` and + :func:`~ecdsa.util.sigencode_der` + as examples of such functions. + Ignored for EdDSA + :type sigencode: callable + :param int k: a pre-selected nonce for calculating the signature. + In typical use cases, it should be set to None (the default) to + allow its generation from an entropy source. + Ignored for EdDSA. + :param bool allow_truncate: if ``True``, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when signing + SHA-384 output using NIST256p or in similar situations. True by + default. + Ignored for EdDSA. + + :raises RSZeroError: in the unlikely event when *r* parameter or + *s* parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different ``k``, or use the + :func:`~SigningKey.sign_deterministic` in such case. + + :return: encoded signature of the hash of `data` + :rtype: bytes or sigencode function dependent type + """ + hashfunc = hashfunc or self.default_hashfunc + data = normalise_bytes(data) + if isinstance(self.curve.curve, CurveEdTw): + return self.sign_deterministic(data) + h = hashfunc(data).digest() + return self.sign_digest(h, entropy, sigencode, k, allow_truncate) + + def sign_digest( + self, + digest, + entropy=None, + sigencode=sigencode_string, + k=None, + allow_truncate=False, + ): + """ + Create signature over digest using the probabilistic ECDSA algorithm. + + This method uses the standard ECDSA algorithm that requires a + cryptographically secure random number generator. + + This method does not hash the input. + + It's recommended to use the + :func:`~SigningKey.sign_digest_deterministic` method + instead of this one. + + :param digest: hash value that will be signed + :type digest: :term:`bytes-like object` + :param callable entropy: randomness source, os.urandom by default + :param sigencode: function used to encode the signature. + The function needs to accept three parameters: the two integers + that are the signature and the order of the curve over which the + signature was computed. It needs to return an encoded signature. + See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` + as examples of such functions. + :type sigencode: callable + :param int k: a pre-selected nonce for calculating the signature. + In typical use cases, it should be set to None (the default) to + allow its generation from an entropy source. + :param bool allow_truncate: if True, the provided digest can have + bigger bit-size than the order of the curve, the extra bits (at + the end of the digest) will be truncated. Use it when signing + SHA-384 output using NIST256p or in similar situations. + + :raises RSZeroError: in the unlikely event when "r" parameter or + "s" parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different 'k', or use the + :func:`~SigningKey.sign_digest_deterministic` in such case. + + :return: encoded signature for the `digest` hash + :rtype: bytes or sigencode function dependent type + """ + if isinstance(self.curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") + digest = normalise_bytes(digest) + number = _truncate_and_convert_digest( + digest, + self.curve, + allow_truncate, + ) + r, s = self.sign_number(number, entropy, k) + return sigencode(r, s, self.privkey.order) + + def sign_number(self, number, entropy=None, k=None): + """ + Sign an integer directly. + + Note, this is a low level method, usually you will want to use + :func:`~SigningKey.sign_deterministic` or + :func:`~SigningKey.sign_digest_deterministic`. + + :param int number: number to sign using the probabilistic ECDSA + algorithm. + :param callable entropy: entropy source, os.urandom by default + :param int k: pre-selected nonce for signature operation. If unset + it will be selected at random using the entropy source. + + :raises RSZeroError: in the unlikely event when "r" parameter or + "s" parameter of the created signature is equal 0, as that would + leak the key. Caller should try a better entropy source, retry with + different 'k', or use the + :func:`~SigningKey.sign_digest_deterministic` in such case. + + :return: the "r" and "s" parameters of the signature + :rtype: tuple of ints + """ + if isinstance(self.curve.curve, CurveEdTw): + raise ValueError("Method unsupported for Edwards curves") + order = self.privkey.order + + if k is not None: + _k = k + else: + _k = randrange(order, entropy) + + assert 1 <= _k < order + sig = self.privkey.sign(number, _k) + return sig.r, sig.s diff --git a/venv/lib/python3.12/site-packages/ecdsa/numbertheory.py b/venv/lib/python3.12/site-packages/ecdsa/numbertheory.py new file mode 100644 index 0000000..fe974f8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/numbertheory.py @@ -0,0 +1,835 @@ +#! /usr/bin/env python +# +# Provide some simple capabilities from number theory. +# +# Version of 2008.11.14. +# +# Written in 2005 and 2006 by Peter Pearson and placed in the public domain. +# Revision history: +# 2008.11.14: Use pow(base, exponent, modulus) for modular_exp. +# Make gcd and lcm accept arbitrarily many arguments. + +from __future__ import division + +import sys +from six import integer_types, PY2 +from six.moves import reduce + +try: + xrange +except NameError: + xrange = range +try: + from gmpy2 import powmod, mpz + + GMPY2 = True + GMPY = False +except ImportError: # pragma: no branch + GMPY2 = False + try: + from gmpy import mpz + + GMPY = True + except ImportError: + GMPY = False + + +if GMPY2 or GMPY: # pragma: no branch + integer_types = tuple(integer_types + (type(mpz(1)),)) + + +import math +import warnings +import random +from .util import bit_length + + +class Error(Exception): + """Base class for exceptions in this module.""" + + pass + + +class JacobiError(Error): + pass + + +class SquareRootError(Error): + pass + + +class NegativeExponentError(Error): + pass + + +def modular_exp(base, exponent, modulus): # pragma: no cover + """Raise base to exponent, reducing by modulus""" + # deprecated in 0.14 + warnings.warn( + "Function is unused in library code. If you use this code, " + "change to pow() builtin.", + DeprecationWarning, + ) + if exponent < 0: + raise NegativeExponentError( + "Negative exponents (%d) not allowed" % exponent + ) + return pow(base, exponent, modulus) + + +def polynomial_reduce_mod(poly, polymod, p): + """Reduce poly by polymod, integer arithmetic modulo p. + + Polynomials are represented as lists of coefficients + of increasing powers of x.""" + + # This module has been tested only by extensive use + # in calculating modular square roots. + + # Just to make this easy, require a monic polynomial: + assert polymod[-1] == 1 + + assert len(polymod) > 1 + + while len(poly) >= len(polymod): + if poly[-1] != 0: + for i in xrange(2, len(polymod) + 1): + poly[-i] = (poly[-i] - poly[-1] * polymod[-i]) % p + poly = poly[0:-1] + + return poly + + +def polynomial_multiply_mod(m1, m2, polymod, p): + """Polynomial multiplication modulo a polynomial over ints mod p. + + Polynomials are represented as lists of coefficients + of increasing powers of x.""" + + # This is just a seat-of-the-pants implementation. + + # This module has been tested only by extensive use + # in calculating modular square roots. + + # Initialize the product to zero: + + prod = (len(m1) + len(m2) - 1) * [0] + + # Add together all the cross-terms: + + for i in xrange(len(m1)): + for j in xrange(len(m2)): + prod[i + j] = (prod[i + j] + m1[i] * m2[j]) % p + + return polynomial_reduce_mod(prod, polymod, p) + + +def polynomial_exp_mod(base, exponent, polymod, p): + """Polynomial exponentiation modulo a polynomial over ints mod p. + + Polynomials are represented as lists of coefficients + of increasing powers of x.""" + + # Based on the Handbook of Applied Cryptography, algorithm 2.227. + + # This module has been tested only by extensive use + # in calculating modular square roots. + + assert exponent < p + + if exponent == 0: + return [1] + + G = base + k = exponent + if k % 2 == 1: + s = G + else: + s = [1] + + while k > 1: + k = k // 2 + G = polynomial_multiply_mod(G, G, polymod, p) + if k % 2 == 1: + s = polynomial_multiply_mod(G, s, polymod, p) + + return s + + +def jacobi(a, n): + """Jacobi symbol""" + + # Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149. + + # This function has been tested by comparison with a small + # table printed in HAC, and by extensive use in calculating + # modular square roots. + + if not n >= 3: + raise JacobiError("n must be larger than 2") + if not n % 2 == 1: + raise JacobiError("n must be odd") + a = a % n + if a == 0: + return 0 + if a == 1: + return 1 + a1, e = a, 0 + while a1 % 2 == 0: + a1, e = a1 // 2, e + 1 + if e % 2 == 0 or n % 8 == 1 or n % 8 == 7: + s = 1 + else: + s = -1 + if a1 == 1: + return s + if n % 4 == 3 and a1 % 4 == 3: + s = -s + return s * jacobi(n % a1, a1) + + +def square_root_mod_prime(a, p): + """Modular square root of a, mod p, p prime.""" + + # Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39. + + # This module has been tested for all values in [0,p-1] for + # every prime p from 3 to 1229. + + assert 0 <= a < p + assert 1 < p + + if a == 0: + return 0 + if p == 2: + return a + + jac = jacobi(a, p) + if jac == -1: + raise SquareRootError("%d has no square root modulo %d" % (a, p)) + + if p % 4 == 3: + return pow(a, (p + 1) // 4, p) + + if p % 8 == 5: + d = pow(a, (p - 1) // 4, p) + if d == 1: + return pow(a, (p + 3) // 8, p) + assert d == p - 1 + return (2 * a * pow(4 * a, (p - 5) // 8, p)) % p + + if PY2: + # xrange on python2 can take integers representable as C long only + range_top = min(0x7FFFFFFF, p) + else: + range_top = p + for b in xrange(2, range_top): # pragma: no branch + if jacobi(b * b - 4 * a, p) == -1: + f = (a, -b, 1) + ff = polynomial_exp_mod((0, 1), (p + 1) // 2, f, p) + if ff[1]: + raise SquareRootError("p is not prime") + return ff[0] + # just an assertion + raise RuntimeError("No b found.") # pragma: no cover + + +# because all the inverse_mod code is arch/environment specific, and coveralls +# expects it to execute equal number of times, we need to waive it by +# adding the "no branch" pragma to all branches +if GMPY2: # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + if a == 0: # pragma: no branch + return 0 + return powmod(a, -1, m) + +elif GMPY: # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + # while libgmp does support inverses modulo, it is accessible + # only using the native `pow()` function, and `pow()` in gmpy sanity + # checks the parameters before passing them on to underlying + # implementation + if a == 0: # pragma: no branch + return 0 + a = mpz(a) + m = mpz(m) + + lm, hm = mpz(1), mpz(0) + low, high = a % m, m + while low > 1: # pragma: no branch + r = high // low + lm, low, hm, high = hm - lm * r, high - low * r, lm, low + + return lm % m + +elif sys.version_info >= (3, 8): # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + if a == 0: # pragma: no branch + return 0 + return pow(a, -1, m) + +else: # pragma: no branch + + def inverse_mod(a, m): + """Inverse of a mod m.""" + + if a == 0: # pragma: no branch + return 0 + + lm, hm = 1, 0 + low, high = a % m, m + while low > 1: # pragma: no branch + r = high // low + lm, low, hm, high = hm - lm * r, high - low * r, lm, low + + return lm % m + + +try: + gcd2 = math.gcd +except AttributeError: + + def gcd2(a, b): + """Greatest common divisor using Euclid's algorithm.""" + while a: + a, b = b % a, a + return b + + +def gcd(*a): + """Greatest common divisor. + + Usage: gcd([ 2, 4, 6 ]) + or: gcd(2, 4, 6) + """ + + if len(a) > 1: + return reduce(gcd2, a) + if hasattr(a[0], "__iter__"): + return reduce(gcd2, a[0]) + return a[0] + + +def lcm2(a, b): + """Least common multiple of two integers.""" + + return (a * b) // gcd(a, b) + + +def lcm(*a): + """Least common multiple. + + Usage: lcm([ 3, 4, 5 ]) + or: lcm(3, 4, 5) + """ + + if len(a) > 1: + return reduce(lcm2, a) + if hasattr(a[0], "__iter__"): + return reduce(lcm2, a[0]) + return a[0] + + +def factorization(n): + """Decompose n into a list of (prime,exponent) pairs.""" + + assert isinstance(n, integer_types) + + if n < 2: + return [] + + result = [] + + # Test the small primes: + + for d in smallprimes: + if d > n: + break + q, r = divmod(n, d) + if r == 0: + count = 1 + while d <= n: # pragma: no branch + n = q + q, r = divmod(n, d) + if r != 0: + break + count = count + 1 + result.append((d, count)) + + # If n is still greater than the last of our small primes, + # it may require further work: + + if n > smallprimes[-1]: + if is_prime(n): # If what's left is prime, it's easy: + result.append((n, 1)) + else: # Ugh. Search stupidly for a divisor: + d = smallprimes[-1] + while 1: + d = d + 2 # Try the next divisor. + q, r = divmod(n, d) + if q < d: # n < d*d means we're done, n = 1 or prime. + break + if r == 0: # d divides n. How many times? + count = 1 + n = q + # As long as d might still divide n, + while d <= n: # pragma: no branch + q, r = divmod(n, d) # see if it does. + if r != 0: + break + n = q # It does. Reduce n, increase count. + count = count + 1 + result.append((d, count)) + if n > 1: + result.append((n, 1)) + + return result + + +def phi(n): # pragma: no cover + """Return the Euler totient function of n.""" + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + assert isinstance(n, integer_types) + + if n < 3: + return 1 + + result = 1 + ff = factorization(n) + for f in ff: + e = f[1] + if e > 1: + result = result * f[0] ** (e - 1) * (f[0] - 1) + else: + result = result * (f[0] - 1) + return result + + +def carmichael(n): # pragma: no cover + """Return Carmichael function of n. + + Carmichael(n) is the smallest integer x such that + m**x = 1 mod n for all m relatively prime to n. + """ + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + return carmichael_of_factorized(factorization(n)) + + +def carmichael_of_factorized(f_list): # pragma: no cover + """Return the Carmichael function of a number that is + represented as a list of (prime,exponent) pairs. + """ + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + if len(f_list) < 1: + return 1 + + result = carmichael_of_ppower(f_list[0]) + for i in xrange(1, len(f_list)): + result = lcm(result, carmichael_of_ppower(f_list[i])) + + return result + + +def carmichael_of_ppower(pp): # pragma: no cover + """Carmichael function of the given power of the given prime.""" + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + p, a = pp + if p == 2 and a > 2: + return 2 ** (a - 2) + else: + return (p - 1) * p ** (a - 1) + + +def order_mod(x, m): # pragma: no cover + """Return the order of x in the multiplicative group mod m.""" + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + # Warning: this implementation is not very clever, and will + # take a long time if m is very large. + + if m <= 1: + return 0 + + assert gcd(x, m) == 1 + + z = x + result = 1 + while z != 1: + z = (z * x) % m + result = result + 1 + return result + + +def largest_factor_relatively_prime(a, b): # pragma: no cover + """Return the largest factor of a relatively prime to b.""" + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + while 1: + d = gcd(a, b) + if d <= 1: + break + b = d + while 1: + q, r = divmod(a, d) + if r > 0: + break + a = q + return a + + +def kinda_order_mod(x, m): # pragma: no cover + """Return the order of x in the multiplicative group mod m', + where m' is the largest factor of m relatively prime to x. + """ + # deprecated in 0.14 + warnings.warn( + "Function is unused by library code. If you use this code, " + "please open an issue in " + "https://github.com/tlsfuzzer/python-ecdsa", + DeprecationWarning, + ) + + return order_mod(x, largest_factor_relatively_prime(m, x)) + + +def is_prime(n): + """Return True if x is prime, False otherwise. + + We use the Miller-Rabin test, as given in Menezes et al. p. 138. + This test is not exact: there are composite values n for which + it returns True. + + In testing the odd numbers from 10000001 to 19999999, + about 66 composites got past the first test, + 5 got past the second test, and none got past the third. + Since factors of 2, 3, 5, 7, and 11 were detected during + preliminary screening, the number of numbers tested by + Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7) + = 4.57 million. + """ + + # (This is used to study the risk of false positives:) + global miller_rabin_test_count + + miller_rabin_test_count = 0 + + if n <= smallprimes[-1]: + if n in smallprimes: + return True + else: + return False + # 2310 = 2 * 3 * 5 * 7 * 11 + if gcd(n, 2310) != 1: + return False + + # Choose a number of iterations sufficient to reduce the + # probability of accepting a composite below 2**-80 + # (from Menezes et al. Table 4.4): + + t = 40 + n_bits = 1 + bit_length(n) + assert 11 <= n_bits <= 16384 + for k, tt in ( + (100, 27), + (150, 18), + (200, 15), + (250, 12), + (300, 9), + (350, 8), + (400, 7), + (450, 6), + (550, 5), + (650, 4), + (850, 3), + (1300, 2), + ): + if n_bits < k: + break + t = tt + + # Run the test t times: + + s = 0 + r = n - 1 + while (r % 2) == 0: + s = s + 1 + r = r // 2 + for i in xrange(t): + a = random.choice(smallprimes) + y = pow(a, r, n) + if y != 1 and y != n - 1: + j = 1 + while j <= s - 1 and y != n - 1: + y = pow(y, 2, n) + if y == 1: + miller_rabin_test_count = i + 1 + return False + j = j + 1 + if y != n - 1: + miller_rabin_test_count = i + 1 + return False + return True + + +def next_prime(starting_value): + """Return the smallest prime larger than the starting value.""" + + if starting_value < 2: + return 2 + result = (starting_value + 1) | 1 + while not is_prime(result): + result = result + 2 + return result + + +smallprimes = [ + 2, + 3, + 5, + 7, + 11, + 13, + 17, + 19, + 23, + 29, + 31, + 37, + 41, + 43, + 47, + 53, + 59, + 61, + 67, + 71, + 73, + 79, + 83, + 89, + 97, + 101, + 103, + 107, + 109, + 113, + 127, + 131, + 137, + 139, + 149, + 151, + 157, + 163, + 167, + 173, + 179, + 181, + 191, + 193, + 197, + 199, + 211, + 223, + 227, + 229, + 233, + 239, + 241, + 251, + 257, + 263, + 269, + 271, + 277, + 281, + 283, + 293, + 307, + 311, + 313, + 317, + 331, + 337, + 347, + 349, + 353, + 359, + 367, + 373, + 379, + 383, + 389, + 397, + 401, + 409, + 419, + 421, + 431, + 433, + 439, + 443, + 449, + 457, + 461, + 463, + 467, + 479, + 487, + 491, + 499, + 503, + 509, + 521, + 523, + 541, + 547, + 557, + 563, + 569, + 571, + 577, + 587, + 593, + 599, + 601, + 607, + 613, + 617, + 619, + 631, + 641, + 643, + 647, + 653, + 659, + 661, + 673, + 677, + 683, + 691, + 701, + 709, + 719, + 727, + 733, + 739, + 743, + 751, + 757, + 761, + 769, + 773, + 787, + 797, + 809, + 811, + 821, + 823, + 827, + 829, + 839, + 853, + 857, + 859, + 863, + 877, + 881, + 883, + 887, + 907, + 911, + 919, + 929, + 937, + 941, + 947, + 953, + 967, + 971, + 977, + 983, + 991, + 997, + 1009, + 1013, + 1019, + 1021, + 1031, + 1033, + 1039, + 1049, + 1051, + 1061, + 1063, + 1069, + 1087, + 1091, + 1093, + 1097, + 1103, + 1109, + 1117, + 1123, + 1129, + 1151, + 1153, + 1163, + 1171, + 1181, + 1187, + 1193, + 1201, + 1213, + 1217, + 1223, + 1229, +] + +miller_rabin_test_count = 0 diff --git a/venv/lib/python3.12/site-packages/ecdsa/rfc6979.py b/venv/lib/python3.12/site-packages/ecdsa/rfc6979.py new file mode 100644 index 0000000..0728b5a --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/rfc6979.py @@ -0,0 +1,113 @@ +""" +RFC 6979: + Deterministic Usage of the Digital Signature Algorithm (DSA) and + Elliptic Curve Digital Signature Algorithm (ECDSA) + + http://tools.ietf.org/html/rfc6979 + +Many thanks to Coda Hale for his implementation in Go language: + https://github.com/codahale/rfc6979 +""" + +import hmac +from binascii import hexlify +from .util import number_to_string, number_to_string_crop, bit_length +from ._compat import hmac_compat + + +# bit_length was defined in this module previously so keep it for backwards +# compatibility, will need to deprecate and remove it later +__all__ = ["bit_length", "bits2int", "bits2octets", "generate_k"] + + +def bits2int(data, qlen): + x = int(hexlify(data), 16) + l = len(data) * 8 + + if l > qlen: + return x >> (l - qlen) + return x + + +def bits2octets(data, order): + z1 = bits2int(data, bit_length(order)) + z2 = z1 - order + + if z2 < 0: + z2 = z1 + + return number_to_string_crop(z2, order) + + +# https://tools.ietf.org/html/rfc6979#section-3.2 +def generate_k(order, secexp, hash_func, data, retry_gen=0, extra_entropy=b""): + """ + Generate the ``k`` value - the nonce for DSA. + + :param int order: order of the DSA generator used in the signature + :param int secexp: secure exponent (private key) in numeric form + :param hash_func: reference to the same hash function used for generating + hash, like :py:class:`hashlib.sha1` + :param bytes data: hash in binary form of the signing data + :param int retry_gen: how many good 'k' values to skip before returning + :param bytes extra_entropy: additional added data in binary form as per + section-3.6 of rfc6979 + :rtype: int + """ + + qlen = bit_length(order) + holen = hash_func().digest_size + rolen = (qlen + 7) // 8 + bx = ( + hmac_compat(number_to_string(secexp, order)), + hmac_compat(bits2octets(data, order)), + hmac_compat(extra_entropy), + ) + + # Step B + v = b"\x01" * holen + + # Step C + k = b"\x00" * holen + + # Step D + + k = hmac.new(k, digestmod=hash_func) + k.update(v + b"\x00") + for i in bx: + k.update(i) + k = k.digest() + + # Step E + v = hmac.new(k, v, hash_func).digest() + + # Step F + k = hmac.new(k, digestmod=hash_func) + k.update(v + b"\x01") + for i in bx: + k.update(i) + k = k.digest() + + # Step G + v = hmac.new(k, v, hash_func).digest() + + # Step H + while True: + # Step H1 + t = b"" + + # Step H2 + while len(t) < rolen: + v = hmac.new(k, v, hash_func).digest() + t += v + + # Step H3 + secret = bits2int(t, qlen) + + if 1 <= secret < order: + if retry_gen <= 0: + return secret + retry_gen -= 1 + + k = hmac.new(k, v + b"\x00", hash_func).digest() + v = hmac.new(k, v, hash_func).digest() diff --git a/venv/lib/python3.12/site-packages/ecdsa/ssh.py b/venv/lib/python3.12/site-packages/ecdsa/ssh.py new file mode 100644 index 0000000..64e9403 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/ssh.py @@ -0,0 +1,83 @@ +import binascii +from . import der +from ._compat import compat26_str, int_to_bytes + +_SSH_ED25519 = b"ssh-ed25519" +_SK_MAGIC = b"openssh-key-v1\0" +_NONE = b"none" + + +def _get_key_type(name): + if name == "Ed25519": + return _SSH_ED25519 + else: + raise ValueError("Unsupported key type") + + +class _Serializer: + def __init__(self): + self.bytes = b"" + + def put_raw(self, val): + self.bytes += val + + def put_u32(self, val): + self.bytes += int_to_bytes(val, length=4, byteorder="big") + + def put_str(self, val): + self.put_u32(len(val)) + self.bytes += val + + def put_pad(self, blklen=8): + padlen = blklen - (len(self.bytes) % blklen) + self.put_raw(bytearray(range(1, 1 + padlen))) + + def encode(self): + return binascii.b2a_base64(compat26_str(self.bytes)) + + def tobytes(self): + return self.bytes + + def topem(self): + return der.topem(self.bytes, "OPENSSH PRIVATE KEY") + + +def serialize_public(name, pub): + serial = _Serializer() + ktype = _get_key_type(name) + serial.put_str(ktype) + serial.put_str(pub) + return b" ".join([ktype, serial.encode()]) + + +def serialize_private(name, pub, priv): + # encode public part + spub = _Serializer() + ktype = _get_key_type(name) + spub.put_str(ktype) + spub.put_str(pub) + + # encode private part + spriv = _Serializer() + checksum = 0 + spriv.put_u32(checksum) + spriv.put_u32(checksum) + spriv.put_raw(spub.tobytes()) + spriv.put_str(priv + pub) + comment = b"" + spriv.put_str(comment) + spriv.put_pad() + + # top-level structure + main = _Serializer() + main.put_raw(_SK_MAGIC) + ciphername = kdfname = _NONE + main.put_str(ciphername) + main.put_str(kdfname) + nokdf = 0 + main.put_u32(nokdf) + nkeys = 1 + main.put_u32(nkeys) + main.put_str(spub.tobytes()) + main.put_str(spriv.tobytes()) + return main.topem() diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_curves.py b/venv/lib/python3.12/site-packages/ecdsa/test_curves.py new file mode 100644 index 0000000..93b6c9b --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_curves.py @@ -0,0 +1,361 @@ +try: + import unittest2 as unittest +except ImportError: + import unittest + +import base64 +import pytest +from .curves import ( + Curve, + NIST256p, + curves, + UnknownCurveError, + PRIME_FIELD_OID, + curve_by_name, +) +from .ellipticcurve import CurveFp, PointJacobi, CurveEdTw +from . import der +from .util import number_to_string + + +class TestParameterEncoding(unittest.TestCase): + @classmethod + def setUpClass(cls): + # minimal, but with cofactor (excludes seed when compared to + # OpenSSL output) + cls.base64_params = ( + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=" + ) + + def test_from_pem(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END EC PARAMETERS-----\n" + ) + curve = Curve.from_pem(pem_params) + + self.assertIs(curve, NIST256p) + + def test_from_pem_with_explicit_when_explicit_disabled(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END EC PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params, ["named_curve"]) + + self.assertIn("explicit curve parameters not", str(e.exception)) + + def test_from_pem_with_named_curve_with_named_curve_disabled(self): + pem_params = ( + "-----BEGIN EC PARAMETERS-----\n" + "BggqhkjOPQMBBw==\n" + "-----END EC PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params, ["explicit"]) + + self.assertIn("named_curve curve parameters not", str(e.exception)) + + def test_from_pem_with_wrong_header(self): + pem_params = ( + "-----BEGIN PARAMETERS-----\n" + "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" + "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" + "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" + "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" + "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" + "-----END PARAMETERS-----\n" + ) + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_pem(pem_params) + + self.assertIn("PARAMETERS PEM header", str(e.exception)) + + def test_to_pem(self): + pem_params = ( + b"-----BEGIN EC PARAMETERS-----\n" + b"BggqhkjOPQMBBw==\n" + b"-----END EC PARAMETERS-----\n" + ) + encoding = NIST256p.to_pem() + + self.assertEqual(pem_params, encoding) + + def test_compare_with_different_object(self): + self.assertNotEqual(NIST256p, 256) + + def test_named_curve_params_der(self): + encoded = NIST256p.to_der() + + # just the encoding of the NIST256p OID (prime256v1) + self.assertEqual(b"\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07", encoded) + + def test_verify_that_default_is_named_curve_der(self): + encoded_default = NIST256p.to_der() + encoded_named = NIST256p.to_der("named_curve") + + self.assertEqual(encoded_default, encoded_named) + + def test_encoding_to_explicit_params(self): + encoded = NIST256p.to_der("explicit") + + self.assertEqual(encoded, bytes(base64.b64decode(self.base64_params))) + + def test_encoding_to_unsupported_type(self): + with self.assertRaises(ValueError) as e: + NIST256p.to_der("unsupported") + + self.assertIn("Only 'named_curve'", str(e.exception)) + + def test_encoding_to_explicit_compressed_params(self): + encoded = NIST256p.to_der("explicit", "compressed") + + compressed_base_point = ( + "MIHAAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" + "/////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" + "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEIQNrF9Hy4SxCR/i85uVjpEDydwN9" + "gS3rM6D0oTlF2JjClgIhAP////8AAAAA//////////+85vqtpxeehPO5ysL8YyVR" + "AgEB" + ) + + self.assertEqual( + encoded, bytes(base64.b64decode(compressed_base_point)) + ) + + def test_decoding_explicit_from_openssl(self): + # generated with openssl 1.1.1k using + # openssl ecparam -name P-256 -param_enc explicit -out /tmp/file.pem + p256_explicit = ( + "MIH3AgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" + "/////zBbBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" + "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsDFQDEnTYIhucEk2pmeOETnSa3gZ9+" + "kARBBGsX0fLhLEJH+Lzm5WOkQPJ3A32BLeszoPShOUXYmMKWT+NC4v4af5uO5+tK" + "fA+eFivOM1drMV7Oy7ZAaDe/UfUCIQD/////AAAAAP//////////vOb6racXnoTz" + "ucrC/GMlUQIBAQ==" + ) + + decoded = Curve.from_der(bytes(base64.b64decode(p256_explicit))) + + self.assertEqual(NIST256p, decoded) + + def test_decoding_well_known_from_explicit_params(self): + curve = Curve.from_der(bytes(base64.b64decode(self.base64_params))) + + self.assertIs(curve, NIST256p) + + def test_decoding_with_incorrect_valid_encodings(self): + with self.assertRaises(ValueError) as e: + Curve.from_der(b"", ["explicitCA"]) + + self.assertIn("Only named_curve", str(e.exception)) + + def test_compare_curves_with_different_generators(self): + curve_fp = CurveFp(23, 1, 7) + base_a = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + base_b = PointJacobi(curve_fp, 1, 20, 1, 9, generator=True) + + curve_a = Curve("unknown", curve_fp, base_a, None) + curve_b = Curve("unknown", curve_fp, base_b, None) + + self.assertNotEqual(curve_a, curve_b) + + def test_default_encode_for_custom_curve(self): + curve_fp = CurveFp(23, 1, 7) + base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + + curve = Curve("unknown", curve_fp, base_point, None) + + encoded = curve.to_der() + + decoded = Curve.from_der(encoded) + + self.assertEqual(curve, decoded) + + expected = "MCECAQEwDAYHKoZIzj0BAQIBFzAGBAEBBAEHBAMEDQMCAQk=" + + self.assertEqual(encoded, bytes(base64.b64decode(expected))) + + def test_named_curve_encode_for_custom_curve(self): + curve_fp = CurveFp(23, 1, 7) + base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) + + curve = Curve("unknown", curve_fp, base_point, None) + + with self.assertRaises(UnknownCurveError) as e: + curve.to_der("named_curve") + + self.assertIn("Can't encode curve", str(e.exception)) + + def test_try_decoding_binary_explicit(self): + sect113r1_explicit = ( + "MIGRAgEBMBwGByqGSM49AQIwEQIBcQYJKoZIzj0BAgMCAgEJMDkEDwAwiCUMpufH" + "/mSc6Fgg9wQPAOi+5NPiJgdEGIvg6ccjAxUAEOcjqxTWluZ2h1YVF1b+v4/LSakE" + "HwQAnXNhbzX0qxQH1zViwQ8ApSgwJ3lY7oTRMV7TGIYCDwEAAAAAAAAA2czsijnl" + "bwIBAg==" + ) + + with self.assertRaises(UnknownCurveError) as e: + Curve.from_der(base64.b64decode(sect113r1_explicit)) + + self.assertIn("Characteristic 2 curves unsupported", str(e.exception)) + + def test_decode_malformed_named_curve(self): + bad_der = der.encode_oid(*NIST256p.oid) + der.encode_integer(1) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unexpected data after OID", str(e.exception)) + + def test_decode_malformed_explicit_garbage_after_ECParam(self): + bad_der = bytes( + base64.b64decode(self.base64_params) + ) + der.encode_integer(1) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unexpected data after ECParameters", str(e.exception)) + + def test_decode_malformed_unknown_version_number(self): + bad_der = der.encode_sequence(der.encode_integer(2)) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Unknown parameter encoding format", str(e.exception)) + + def test_decode_malformed_unknown_field_type(self): + curve_p = NIST256p.curve.p() + bad_der = der.encode_sequence( + der.encode_integer(1), + der.encode_sequence( + der.encode_oid(1, 2, 3), der.encode_integer(curve_p) + ), + der.encode_sequence( + der.encode_octet_string( + number_to_string(NIST256p.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(NIST256p.curve.b(), curve_p) + ), + ), + der.encode_octet_string( + NIST256p.generator.to_bytes("uncompressed") + ), + der.encode_integer(NIST256p.generator.order()), + ) + + with self.assertRaises(UnknownCurveError) as e: + Curve.from_der(bad_der) + + self.assertIn("Unknown field type: (1, 2, 3)", str(e.exception)) + + def test_decode_malformed_garbage_after_prime(self): + curve_p = NIST256p.curve.p() + bad_der = der.encode_sequence( + der.encode_integer(1), + der.encode_sequence( + der.encode_oid(*PRIME_FIELD_OID), + der.encode_integer(curve_p), + der.encode_integer(1), + ), + der.encode_sequence( + der.encode_octet_string( + number_to_string(NIST256p.curve.a() % curve_p, curve_p) + ), + der.encode_octet_string( + number_to_string(NIST256p.curve.b(), curve_p) + ), + ), + der.encode_octet_string( + NIST256p.generator.to_bytes("uncompressed") + ), + der.encode_integer(NIST256p.generator.order()), + ) + + with self.assertRaises(der.UnexpectedDER) as e: + Curve.from_der(bad_der) + + self.assertIn("Prime-p element", str(e.exception)) + + +class TestCurveSearching(unittest.TestCase): + def test_correct_name(self): + c = curve_by_name("NIST256p") + self.assertIs(c, NIST256p) + + def test_openssl_name(self): + c = curve_by_name("prime256v1") + self.assertIs(c, NIST256p) + + def test_unknown_curve(self): + with self.assertRaises(UnknownCurveError) as e: + curve_by_name("foo bar") + + self.assertIn( + "name 'foo bar' unknown, only curves supported: " + "['NIST192p', 'NIST224p'", + str(e.exception), + ) + + def test_with_None_as_parameter(self): + with self.assertRaises(UnknownCurveError) as e: + curve_by_name(None) + + self.assertIn( + "name None unknown, only curves supported: " + "['NIST192p', 'NIST224p'", + str(e.exception), + ) + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_named(curve): + ret = Curve.from_der(curve.to_der("named_curve")) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_explicit(curve): + if isinstance(curve.curve, CurveEdTw): + with pytest.raises(UnknownCurveError): + curve.to_der("explicit") + else: + ret = Curve.from_der(curve.to_der("explicit")) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_default(curve): + ret = Curve.from_der(curve.to_der()) + + assert curve == ret + + +@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) +def test_curve_params_encode_decode_explicit_compressed(curve): + if isinstance(curve.curve, CurveEdTw): + with pytest.raises(UnknownCurveError): + curve.to_der("explicit", "compressed") + else: + ret = Curve.from_der(curve.to_der("explicit", "compressed")) + + assert curve == ret diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_der.py b/venv/lib/python3.12/site-packages/ecdsa/test_der.py new file mode 100644 index 0000000..b095543 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_der.py @@ -0,0 +1,602 @@ +# compatibility with Python 2.6, for that we need unittest2 package, +# which is not available on 3.3 or 3.4 +import warnings +from binascii import hexlify + +try: + import unittest2 as unittest +except ImportError: + import unittest +import sys +import hypothesis.strategies as st +from hypothesis import given, settings +import pytest +from ._compat import str_idx_as_int +from .curves import NIST256p, NIST224p +from .der import ( + remove_integer, + UnexpectedDER, + read_length, + encode_bitstring, + remove_bitstring, + remove_object, + encode_oid, + remove_constructed, + remove_implicit, + remove_octet_string, + remove_sequence, + encode_implicit, +) + + +class TestRemoveInteger(unittest.TestCase): + # DER requires the integers to be 0-padded only if they would be + # interpreted as negative, check if those errors are detected + def test_non_minimal_encoding(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b"\x02\x02\x00\x01") + + def test_negative_with_high_bit_set(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b"\x02\x01\x80") + + def test_minimal_with_high_bit_set(self): + val, rem = remove_integer(b"\x02\x02\x00\x80") + + self.assertEqual(val, 0x80) + self.assertEqual(rem, b"") + + def test_two_zero_bytes_with_high_bit_set(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b"\x02\x03\x00\x00\xff") + + def test_zero_length_integer(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b"\x02\x00") + + def test_empty_string(self): + with self.assertRaises(UnexpectedDER): + remove_integer(b"") + + def test_encoding_of_zero(self): + val, rem = remove_integer(b"\x02\x01\x00") + + self.assertEqual(val, 0) + self.assertEqual(rem, b"") + + def test_encoding_of_127(self): + val, rem = remove_integer(b"\x02\x01\x7f") + + self.assertEqual(val, 127) + self.assertEqual(rem, b"") + + def test_encoding_of_128(self): + val, rem = remove_integer(b"\x02\x02\x00\x80") + + self.assertEqual(val, 128) + self.assertEqual(rem, b"") + + def test_wrong_tag(self): + with self.assertRaises(UnexpectedDER) as e: + remove_integer(b"\x01\x02\x00\x80") + + self.assertIn("wanted type 'integer'", str(e.exception)) + + def test_wrong_length(self): + with self.assertRaises(UnexpectedDER) as e: + remove_integer(b"\x02\x03\x00\x80") + + self.assertIn("Length longer", str(e.exception)) + + +class TestReadLength(unittest.TestCase): + # DER requires the lengths between 0 and 127 to be encoded using the short + # form and lengths above that encoded with minimal number of bytes + # necessary + def test_zero_length(self): + self.assertEqual((0, 1), read_length(b"\x00")) + + def test_two_byte_zero_length(self): + with self.assertRaises(UnexpectedDER): + read_length(b"\x81\x00") + + def test_two_byte_small_length(self): + with self.assertRaises(UnexpectedDER): + read_length(b"\x81\x7f") + + def test_long_form_with_zero_length(self): + with self.assertRaises(UnexpectedDER): + read_length(b"\x80") + + def test_smallest_two_byte_length(self): + self.assertEqual((128, 2), read_length(b"\x81\x80")) + + def test_zero_padded_length(self): + with self.assertRaises(UnexpectedDER): + read_length(b"\x82\x00\x80") + + def test_two_three_byte_length(self): + self.assertEqual((256, 3), read_length(b"\x82\x01\x00")) + + def test_empty_string(self): + with self.assertRaises(UnexpectedDER): + read_length(b"") + + def test_length_overflow(self): + with self.assertRaises(UnexpectedDER): + read_length(b"\x83\x01\x00") + + +class TestEncodeBitstring(unittest.TestCase): + # DER requires BIT STRINGS to include a number of padding bits in the + # encoded byte string, that padding must be between 0 and 7 + + def test_old_call_convention(self): + """This is the old way to use the function.""" + warnings.simplefilter("always") + with pytest.warns(DeprecationWarning) as warns: + der = encode_bitstring(b"\x00\xff") + + self.assertEqual(len(warns), 1) + self.assertIn( + "unused= needs to be specified", warns[0].message.args[0] + ) + + self.assertEqual(der, b"\x03\x02\x00\xff") + + def test_new_call_convention(self): + """This is how it should be called now.""" + # make sure no warnings are raised + with warnings.catch_warnings(): + warnings.simplefilter("error") + der = encode_bitstring(b"\xff", 0) + + self.assertEqual(der, b"\x03\x02\x00\xff") + + def test_implicit_unused_bits(self): + """ + Writing bit string with already included the number of unused bits. + """ + # make sure no warnings are raised + with warnings.catch_warnings(): + warnings.simplefilter("error") + der = encode_bitstring(b"\x00\xff", None) + + self.assertEqual(der, b"\x03\x02\x00\xff") + + def test_explicit_unused_bits(self): + der = encode_bitstring(b"\xff\xf0", 4) + + self.assertEqual(der, b"\x03\x03\x04\xff\xf0") + + def test_empty_string(self): + self.assertEqual(encode_bitstring(b"", 0), b"\x03\x01\x00") + + def test_invalid_unused_count(self): + with self.assertRaises(ValueError): + encode_bitstring(b"\xff\x00", 8) + + def test_invalid_unused_with_empty_string(self): + with self.assertRaises(ValueError): + encode_bitstring(b"", 1) + + def test_non_zero_padding_bits(self): + with self.assertRaises(ValueError): + encode_bitstring(b"\xff", 2) + + +class TestRemoveBitstring(unittest.TestCase): + def test_old_call_convention(self): + """This is the old way to call the function.""" + warnings.simplefilter("always") + with pytest.warns(DeprecationWarning) as warns: + bits, rest = remove_bitstring(b"\x03\x02\x00\xff") + + self.assertEqual(len(warns), 1) + self.assertIn( + "expect_unused= needs to be specified", warns[0].message.args[0] + ) + + self.assertEqual(bits, b"\x00\xff") + self.assertEqual(rest, b"") + + def test_new_call_convention(self): + # make sure no warnings are raised + with warnings.catch_warnings(): + warnings.simplefilter("error") + bits, rest = remove_bitstring(b"\x03\x02\x00\xff", 0) + + self.assertEqual(bits, b"\xff") + self.assertEqual(rest, b"") + + def test_implicit_unexpected_unused(self): + # make sure no warnings are raised + with warnings.catch_warnings(): + warnings.simplefilter("error") + bits, rest = remove_bitstring(b"\x03\x02\x00\xff", None) + + self.assertEqual(bits, (b"\xff", 0)) + self.assertEqual(rest, b"") + + def test_with_padding(self): + ret, rest = remove_bitstring(b"\x03\x02\x04\xf0", None) + + self.assertEqual(ret, (b"\xf0", 4)) + self.assertEqual(rest, b"") + + def test_not_a_bitstring(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x02\x02\x00\xff", None) + + def test_empty_encoding(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x00", None) + + def test_empty_string(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"", None) + + def test_no_length(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03", None) + + def test_unexpected_number_of_unused_bits(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x02\x00\xff", 1) + + def test_invalid_encoding_of_unused_bits(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x03\x08\xff\x00", None) + + def test_invalid_encoding_of_empty_string(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x01\x01", None) + + def test_invalid_padding_bits(self): + with self.assertRaises(UnexpectedDER): + remove_bitstring(b"\x03\x02\x01\xff", None) + + +class TestStrIdxAsInt(unittest.TestCase): + def test_str(self): + self.assertEqual(115, str_idx_as_int("str", 0)) + + def test_bytes(self): + self.assertEqual(115, str_idx_as_int(b"str", 0)) + + def test_bytearray(self): + self.assertEqual(115, str_idx_as_int(bytearray(b"str"), 0)) + + +class TestEncodeOid(unittest.TestCase): + def test_pub_key_oid(self): + oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) + self.assertEqual(hexlify(oid_ecPublicKey), b"06072a8648ce3d0201") + + def test_nist224p_oid(self): + self.assertEqual(hexlify(NIST224p.encoded_oid), b"06052b81040021") + + def test_nist256p_oid(self): + self.assertEqual( + hexlify(NIST256p.encoded_oid), b"06082a8648ce3d030107" + ) + + def test_large_second_subid(self): + # from X.690, section 8.19.5 + oid = encode_oid(2, 999, 3) + self.assertEqual(oid, b"\x06\x03\x88\x37\x03") + + def test_with_two_subids(self): + oid = encode_oid(2, 999) + self.assertEqual(oid, b"\x06\x02\x88\x37") + + def test_zero_zero(self): + oid = encode_oid(0, 0) + self.assertEqual(oid, b"\x06\x01\x00") + + def test_with_wrong_types(self): + with self.assertRaises((TypeError, AssertionError)): + encode_oid(0, None) + + def test_with_small_first_large_second(self): + with self.assertRaises(AssertionError): + encode_oid(1, 40) + + def test_small_first_max_second(self): + oid = encode_oid(1, 39) + self.assertEqual(oid, b"\x06\x01\x4f") + + def test_with_invalid_first(self): + with self.assertRaises(AssertionError): + encode_oid(3, 39) + + +class TestRemoveObject(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) + + def test_pub_key_oid(self): + oid, rest = remove_object(self.oid_ecPublicKey) + self.assertEqual(rest, b"") + self.assertEqual(oid, (1, 2, 840, 10045, 2, 1)) + + def test_with_extra_bytes(self): + oid, rest = remove_object(self.oid_ecPublicKey + b"more") + self.assertEqual(rest, b"more") + self.assertEqual(oid, (1, 2, 840, 10045, 2, 1)) + + def test_with_large_second_subid(self): + # from X.690, section 8.19.5 + oid, rest = remove_object(b"\x06\x03\x88\x37\x03") + self.assertEqual(rest, b"") + self.assertEqual(oid, (2, 999, 3)) + + def test_with_padded_first_subid(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x02\x80\x00") + + def test_with_padded_second_subid(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x04\x88\x37\x80\x01") + + def test_with_missing_last_byte_of_multi_byte(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x03\x88\x37\x83") + + def test_with_two_subids(self): + oid, rest = remove_object(b"\x06\x02\x88\x37") + self.assertEqual(rest, b"") + self.assertEqual(oid, (2, 999)) + + def test_zero_zero(self): + oid, rest = remove_object(b"\x06\x01\x00") + self.assertEqual(rest, b"") + self.assertEqual(oid, (0, 0)) + + def test_empty_string(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"") + + def test_missing_length(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06") + + def test_empty_oid(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x00") + + def test_empty_oid_overflow(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x01") + + def test_with_wrong_type(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x04\x02\x88\x37") + + def test_with_too_long_length(self): + with self.assertRaises(UnexpectedDER): + remove_object(b"\x06\x03\x88\x37") + + +class TestRemoveConstructed(unittest.TestCase): + def test_simple(self): + data = b"\xa1\x02\xff\xaa" + + tag, body, rest = remove_constructed(data) + + self.assertEqual(tag, 0x01) + self.assertEqual(body, b"\xff\xaa") + self.assertEqual(rest, b"") + + def test_with_malformed_tag(self): + data = b"\x01\x02\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_constructed(data) + + self.assertIn("constructed tag", str(e.exception)) + + +class TestRemoveImplicit(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.exp_tag = 6 + cls.exp_data = b"\x0a\x0b" + # data with application tag class + cls.data_application = b"\x46\x02\x0a\x0b" + # data with context-specific tag class + cls.data_context_specific = b"\x86\x02\x0a\x0b" + # data with private tag class + cls.data_private = b"\xc6\x02\x0a\x0b" + + def test_simple(self): + tag, body, rest = remove_implicit(self.data_context_specific) + + self.assertEqual(tag, self.exp_tag) + self.assertEqual(body, self.exp_data) + self.assertEqual(rest, b"") + + def test_wrong_expected_class(self): + with self.assertRaises(ValueError) as e: + remove_implicit(self.data_context_specific, "foobar") + + self.assertIn("invalid `exp_class` value", str(e.exception)) + + def test_with_wrong_class(self): + with self.assertRaises(UnexpectedDER) as e: + remove_implicit(self.data_application) + + self.assertIn( + "wanted class context-specific, got 0x46 tag", str(e.exception) + ) + + def test_with_application_class(self): + tag, body, rest = remove_implicit(self.data_application, "application") + + self.assertEqual(tag, self.exp_tag) + self.assertEqual(body, self.exp_data) + self.assertEqual(rest, b"") + + def test_with_private_class(self): + tag, body, rest = remove_implicit(self.data_private, "private") + + self.assertEqual(tag, self.exp_tag) + self.assertEqual(body, self.exp_data) + self.assertEqual(rest, b"") + + def test_with_data_following(self): + extra_data = b"\x00\x01" + + tag, body, rest = remove_implicit( + self.data_context_specific + extra_data + ) + + self.assertEqual(tag, self.exp_tag) + self.assertEqual(body, self.exp_data) + self.assertEqual(rest, extra_data) + + def test_with_constructed(self): + data = b"\xa6\x02\x0a\x0b" + + with self.assertRaises(UnexpectedDER) as e: + remove_implicit(data) + + self.assertIn("wanted type primitive, got 0xa6 tag", str(e.exception)) + + def test_encode_decode(self): + data = b"some longish string" + + tag, body, rest = remove_implicit( + encode_implicit(6, data, "application"), "application" + ) + + self.assertEqual(tag, 6) + self.assertEqual(body, data) + self.assertEqual(rest, b"") + + +class TestEncodeImplicit(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.data = b"\x0a\x0b" + # data with application tag class + cls.data_application = b"\x46\x02\x0a\x0b" + # data with context-specific tag class + cls.data_context_specific = b"\x86\x02\x0a\x0b" + # data with private tag class + cls.data_private = b"\xc6\x02\x0a\x0b" + + def test_encode_with_default_class(self): + ret = encode_implicit(6, self.data) + + self.assertEqual(ret, self.data_context_specific) + + def test_encode_with_application_class(self): + ret = encode_implicit(6, self.data, "application") + + self.assertEqual(ret, self.data_application) + + def test_encode_with_context_specific_class(self): + ret = encode_implicit(6, self.data, "context-specific") + + self.assertEqual(ret, self.data_context_specific) + + def test_encode_with_private_class(self): + ret = encode_implicit(6, self.data, "private") + + self.assertEqual(ret, self.data_private) + + def test_encode_with_invalid_class(self): + with self.assertRaises(ValueError) as e: + encode_implicit(6, self.data, "foobar") + + self.assertIn("invalid tag class", str(e.exception)) + + def test_encode_with_too_large_tag(self): + with self.assertRaises(ValueError) as e: + encode_implicit(32, self.data) + + self.assertIn("Long tags not supported", str(e.exception)) + + +class TestRemoveOctetString(unittest.TestCase): + def test_simple(self): + data = b"\x04\x03\xaa\xbb\xcc" + body, rest = remove_octet_string(data) + self.assertEqual(body, b"\xaa\xbb\xcc") + self.assertEqual(rest, b"") + + def test_with_malformed_tag(self): + data = b"\x03\x03\xaa\xbb\xcc" + with self.assertRaises(UnexpectedDER) as e: + remove_octet_string(data) + + self.assertIn("octetstring", str(e.exception)) + + +class TestRemoveSequence(unittest.TestCase): + def test_simple(self): + data = b"\x30\x02\xff\xaa" + body, rest = remove_sequence(data) + self.assertEqual(body, b"\xff\xaa") + self.assertEqual(rest, b"") + + def test_with_empty_string(self): + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(b"") + + self.assertIn("Empty string", str(e.exception)) + + def test_with_wrong_tag(self): + data = b"\x20\x02\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(data) + + self.assertIn("wanted type 'sequence'", str(e.exception)) + + def test_with_wrong_length(self): + data = b"\x30\x03\xff\xaa" + + with self.assertRaises(UnexpectedDER) as e: + remove_sequence(data) + + self.assertIn("Length longer", str(e.exception)) + + +@st.composite +def st_oid(draw, max_value=2**512, max_size=50): + """ + Hypothesis strategy that returns valid OBJECT IDENTIFIERs as tuples + + :param max_value: maximum value of any single sub-identifier + :param max_size: maximum length of the generated OID + """ + first = draw(st.integers(min_value=0, max_value=2)) + if first < 2: + second = draw(st.integers(min_value=0, max_value=39)) + else: + second = draw(st.integers(min_value=0, max_value=max_value)) + rest = draw( + st.lists( + st.integers(min_value=0, max_value=max_value), max_size=max_size + ) + ) + return (first, second) + tuple(rest) + + +HYP_SETTINGS = {} + + +if "--fast" in sys.argv: # pragma: no cover + HYP_SETTINGS["max_examples"] = 2 + + +@settings(**HYP_SETTINGS) +@given(st_oid()) +def test_oids(ids): + encoded_oid = encode_oid(*ids) + decoded_oid, rest = remove_object(encoded_oid) + assert rest == b"" + assert decoded_oid == ids diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_ecdh.py b/venv/lib/python3.12/site-packages/ecdsa/test_ecdh.py new file mode 100644 index 0000000..cb22580 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_ecdh.py @@ -0,0 +1,449 @@ +import os +import sys +import shutil +import subprocess +import pytest +from binascii import unhexlify + +try: + import unittest2 as unittest +except ImportError: + import unittest + +from .curves import ( + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + BRAINPOOLP160r1, + SECP112r2, + SECP128r1, +) +from .curves import curves +from .ecdh import ( + ECDH, + InvalidCurveError, + InvalidSharedSecretError, + NoKeyError, + NoCurveError, +) +from .keys import SigningKey, VerifyingKey +from .ellipticcurve import CurveEdTw + + +if "--fast" in sys.argv: # pragma: no cover + curves = [SECP112r2, SECP128r1] + + +@pytest.mark.parametrize( + "vcurve", + curves, + ids=[curve.name for curve in curves], +) +def test_ecdh_each(vcurve): + if isinstance(vcurve.curve, CurveEdTw): + pytest.skip("ECDH is not supported for Edwards curves") + ecdh1 = ECDH(curve=vcurve) + ecdh2 = ECDH(curve=vcurve) + + ecdh2.generate_private_key() + ecdh1.load_received_public_key(ecdh2.get_public_key()) + ecdh2.load_received_public_key(ecdh1.generate_private_key()) + + secret1 = ecdh1.generate_sharedsecret_bytes() + secret2 = ecdh2.generate_sharedsecret_bytes() + assert secret1 == secret2 + + +def test_ecdh_both_keys_present(): + key1 = SigningKey.generate(BRAINPOOLP160r1) + key2 = SigningKey.generate(BRAINPOOLP160r1) + + ecdh1 = ECDH(BRAINPOOLP160r1, key1, key2.verifying_key) + ecdh2 = ECDH(private_key=key2, public_key=key1.verifying_key) + + secret1 = ecdh1.generate_sharedsecret_bytes() + secret2 = ecdh2.generate_sharedsecret_bytes() + + assert secret1 == secret2 + + +def test_ecdh_no_public_key(): + ecdh1 = ECDH(curve=NIST192p) + + with pytest.raises(NoKeyError): + ecdh1.generate_sharedsecret_bytes() + + ecdh1.generate_private_key() + + with pytest.raises(NoKeyError): + ecdh1.generate_sharedsecret_bytes() + + +class TestECDH(unittest.TestCase): + def test_load_key_from_wrong_curve(self): + ecdh1 = ECDH() + ecdh1.set_curve(NIST192p) + + key1 = SigningKey.generate(BRAINPOOLP160r1) + + with self.assertRaises(InvalidCurveError) as e: + ecdh1.load_private_key(key1) + + self.assertIn("Curve mismatch", str(e.exception)) + + def test_generate_without_curve(self): + ecdh1 = ECDH() + + with self.assertRaises(NoCurveError) as e: + ecdh1.generate_private_key() + + self.assertIn("Curve must be set", str(e.exception)) + + def test_load_bytes_without_curve_set(self): + ecdh1 = ECDH() + + with self.assertRaises(NoCurveError) as e: + ecdh1.load_private_key_bytes(b"\x01" * 32) + + self.assertIn("Curve must be set", str(e.exception)) + + def test_set_curve_from_received_public_key(self): + ecdh1 = ECDH() + + key1 = SigningKey.generate(BRAINPOOLP160r1) + + ecdh1.load_received_public_key(key1.verifying_key) + + self.assertEqual(ecdh1.curve, BRAINPOOLP160r1) + + +def test_ecdh_wrong_public_key_curve(): + ecdh1 = ECDH(curve=NIST192p) + ecdh1.generate_private_key() + ecdh2 = ECDH(curve=NIST256p) + ecdh2.generate_private_key() + + with pytest.raises(InvalidCurveError): + ecdh1.load_received_public_key(ecdh2.get_public_key()) + + with pytest.raises(InvalidCurveError): + ecdh2.load_received_public_key(ecdh1.get_public_key()) + + ecdh1.public_key = ecdh2.get_public_key() + ecdh2.public_key = ecdh1.get_public_key() + + with pytest.raises(InvalidCurveError): + ecdh1.generate_sharedsecret_bytes() + + with pytest.raises(InvalidCurveError): + ecdh2.generate_sharedsecret_bytes() + + +def test_ecdh_invalid_shared_secret_curve(): + ecdh1 = ECDH(curve=NIST256p) + ecdh1.generate_private_key() + + ecdh1.load_received_public_key( + SigningKey.generate(NIST256p).get_verifying_key() + ) + + ecdh1.private_key.privkey.secret_multiplier = ecdh1.private_key.curve.order + + with pytest.raises(InvalidSharedSecretError): + ecdh1.generate_sharedsecret_bytes() + + +# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp192r1.txt +# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp256r1.txt +# https://github.com/coruus/nist-testvectors/blob/master/csrc.nist.gov/groups/STM/cavp/documents/components/ecccdhtestvectors/KAS_ECC_CDH_PrimitiveTest.txt +@pytest.mark.parametrize( + "curve,privatekey,pubkey,secret", + [ + pytest.param( + NIST192p, + "f17d3fea367b74d340851ca4270dcb24c271f445bed9d527", + "42ea6dd9969dd2a61fea1aac7f8e98edcc896c6e55857cc0" + "dfbe5d7c61fac88b11811bde328e8a0d12bf01a9d204b523", + "803d8ab2e5b6e6fca715737c3a82f7ce3c783124f6d51cd0", + id="NIST192p-1", + ), + pytest.param( + NIST192p, + "56e853349d96fe4c442448dacb7cf92bb7a95dcf574a9bd5", + "deb5712fa027ac8d2f22c455ccb73a91e17b6512b5e030e7" + "7e2690a02cc9b28708431a29fb54b87b1f0c14e011ac2125", + "c208847568b98835d7312cef1f97f7aa298283152313c29d", + id="NIST192p-2", + ), + pytest.param( + NIST192p, + "c6ef61fe12e80bf56f2d3f7d0bb757394519906d55500949", + "4edaa8efc5a0f40f843663ec5815e7762dddc008e663c20f" + "0a9f8dc67a3e60ef6d64b522185d03df1fc0adfd42478279", + "87229107047a3b611920d6e3b2c0c89bea4f49412260b8dd", + id="NIST192p-3", + ), + pytest.param( + NIST192p, + "e6747b9c23ba7044f38ff7e62c35e4038920f5a0163d3cda", + "8887c276edeed3e9e866b46d58d895c73fbd80b63e382e88" + "04c5097ba6645e16206cfb70f7052655947dd44a17f1f9d5", + "eec0bed8fc55e1feddc82158fd6dc0d48a4d796aaf47d46c", + id="NIST192p-4", + ), + pytest.param( + NIST192p, + "beabedd0154a1afcfc85d52181c10f5eb47adc51f655047d", + "0d045f30254adc1fcefa8a5b1f31bf4e739dd327cd18d594" + "542c314e41427c08278a08ce8d7305f3b5b849c72d8aff73", + "716e743b1b37a2cd8479f0a3d5a74c10ba2599be18d7e2f4", + id="NIST192p-5", + ), + pytest.param( + NIST192p, + "cf70354226667321d6e2baf40999e2fd74c7a0f793fa8699", + "fb35ca20d2e96665c51b98e8f6eb3d79113508d8bccd4516" + "368eec0d5bfb847721df6aaff0e5d48c444f74bf9cd8a5a7", + "f67053b934459985a315cb017bf0302891798d45d0e19508", + id="NIST192p-6", + ), + pytest.param( + NIST224p, + "8346a60fc6f293ca5a0d2af68ba71d1dd389e5e40837942df3e43cbd", + "af33cd0629bc7e996320a3f40368f74de8704fa37b8fab69abaae280" + "882092ccbba7930f419a8a4f9bb16978bbc3838729992559a6f2e2d7", + "7d96f9a3bd3c05cf5cc37feb8b9d5209d5c2597464dec3e9983743e8", + id="NIST224p", + ), + pytest.param( + NIST256p, + "7d7dc5f71eb29ddaf80d6214632eeae03d9058af1fb6d22ed80badb62bc1a534", + "700c48f77f56584c5cc632ca65640db91b6bacce3a4df6b42ce7cc838833d287" + "db71e509e3fd9b060ddb20ba5c51dcc5948d46fbf640dfe0441782cab85fa4ac", + "46fc62106420ff012e54a434fbdd2d25ccc5852060561e68040dd7778997bd7b", + id="NIST256p-1", + ), + pytest.param( + NIST256p, + "38f65d6dce47676044d58ce5139582d568f64bb16098d179dbab07741dd5caf5", + "809f04289c64348c01515eb03d5ce7ac1a8cb9498f5caa50197e58d43a86a7ae" + "b29d84e811197f25eba8f5194092cb6ff440e26d4421011372461f579271cda3", + "057d636096cb80b67a8c038c890e887d1adfa4195e9b3ce241c8a778c59cda67", + id="NIST256p-2", + ), + pytest.param( + NIST256p, + "1accfaf1b97712b85a6f54b148985a1bdc4c9bec0bd258cad4b3d603f49f32c8", + "a2339c12d4a03c33546de533268b4ad667debf458b464d77443636440ee7fec3" + "ef48a3ab26e20220bcda2c1851076839dae88eae962869a497bf73cb66faf536", + "2d457b78b4614132477618a5b077965ec90730a8c81a1c75d6d4ec68005d67ec", + id="NIST256p-3", + ), + pytest.param( + NIST256p, + "207c43a79bfee03db6f4b944f53d2fb76cc49ef1c9c4d34d51b6c65c4db6932d", + "df3989b9fa55495719b3cf46dccd28b5153f7808191dd518eff0c3cff2b705ed" + "422294ff46003429d739a33206c8752552c8ba54a270defc06e221e0feaf6ac4", + "96441259534b80f6aee3d287a6bb17b5094dd4277d9e294f8fe73e48bf2a0024", + id="NIST256p-4", + ), + pytest.param( + NIST256p, + "59137e38152350b195c9718d39673d519838055ad908dd4757152fd8255c09bf", + "41192d2813e79561e6a1d6f53c8bc1a433a199c835e141b05a74a97b0faeb922" + "1af98cc45e98a7e041b01cf35f462b7562281351c8ebf3ffa02e33a0722a1328", + "19d44c8d63e8e8dd12c22a87b8cd4ece27acdde04dbf47f7f27537a6999a8e62", + id="NIST256p-5", + ), + pytest.param( + NIST256p, + "f5f8e0174610a661277979b58ce5c90fee6c9b3bb346a90a7196255e40b132ef", + "33e82092a0f1fb38f5649d5867fba28b503172b7035574bf8e5b7100a3052792" + "f2cf6b601e0a05945e335550bf648d782f46186c772c0f20d3cd0d6b8ca14b2f", + "664e45d5bba4ac931cd65d52017e4be9b19a515f669bea4703542a2c525cd3d3", + id="NIST256p-6", + ), + pytest.param( + NIST384p, + "3cc3122a68f0d95027ad38c067916ba0eb8c38894d22e1b1" + "5618b6818a661774ad463b205da88cf699ab4d43c9cf98a1", + "a7c76b970c3b5fe8b05d2838ae04ab47697b9eaf52e76459" + "2efda27fe7513272734466b400091adbf2d68c58e0c50066" + "ac68f19f2e1cb879aed43a9969b91a0839c4c38a49749b66" + "1efedf243451915ed0905a32b060992b468c64766fc8437a", + "5f9d29dc5e31a163060356213669c8ce132e22f57c9a04f4" + "0ba7fcead493b457e5621e766c40a2e3d4d6a04b25e533f1", + id="NIST384p", + ), + pytest.param( + NIST521p, + "017eecc07ab4b329068fba65e56a1f8890aa935e57134ae0ffcce802735151f4ea" + "c6564f6ee9974c5e6887a1fefee5743ae2241bfeb95d5ce31ddcb6f9edb4d6fc47", + "00685a48e86c79f0f0875f7bc18d25eb5fc8c0b07e5da4f4370f3a949034085433" + "4b1e1b87fa395464c60626124a4e70d0f785601d37c09870ebf176666877a2046d" + "01ba52c56fc8776d9e8f5db4f0cc27636d0b741bbe05400697942e80b739884a83" + "bde99e0f6716939e632bc8986fa18dccd443a348b6c3e522497955a4f3c302f676", + "005fc70477c3e63bc3954bd0df3ea0d1f41ee21746ed95fc5e1fdf90930d5e1366" + "72d72cc770742d1711c3c3a4c334a0ad9759436a4d3c5bf6e74b9578fac148c831", + id="NIST521p", + ), + ], +) +def test_ecdh_NIST(curve, privatekey, pubkey, secret): + ecdh = ECDH(curve=curve) + ecdh.load_private_key_bytes(unhexlify(privatekey)) + ecdh.load_received_public_key_bytes(unhexlify(pubkey)) + + sharedsecret = ecdh.generate_sharedsecret_bytes() + + assert sharedsecret == unhexlify(secret) + + +pem_local_private_key = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" +) +der_local_private_key = ( + "305f02010104185ec8420bd6ef9252a942e989043ca29f561fa525770eb1c5a00a06082a864" + "8ce3d030101a13403320004b88177d084ef17f5e45639408028360f9f59b4a4d7264e62da06" + "51dce47a35a4c5b45cf51593423a8b557b9c2099f36c" +) +pem_remote_public_key = ( + "-----BEGIN PUBLIC KEY-----\n" + "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n" + "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n" + "-----END PUBLIC KEY-----\n" +) +der_remote_public_key = ( + "3049301306072a8648ce3d020106082a8648ce3d03010103320004b88177d084ef17f5e4563" + "9408028360f9f59b4a4d7264e62da0651dce47a35a4c5b45cf51593423a8b557b9c2099f36c" +) +gshared_secret = "8f457e34982478d1c34b9cd2d0c15911b72dd60d869e2cea" + + +def test_ecdh_pem(): + ecdh = ECDH() + ecdh.load_private_key_pem(pem_local_private_key) + ecdh.load_received_public_key_pem(pem_remote_public_key) + + sharedsecret = ecdh.generate_sharedsecret_bytes() + + assert sharedsecret == unhexlify(gshared_secret) + + +def test_ecdh_der(): + ecdh = ECDH() + ecdh.load_private_key_der(unhexlify(der_local_private_key)) + ecdh.load_received_public_key_der(unhexlify(der_remote_public_key)) + + sharedsecret = ecdh.generate_sharedsecret_bytes() + + assert sharedsecret == unhexlify(gshared_secret) + + +# Exception classes used by run_openssl. +class RunOpenSslError(Exception): + pass + + +def run_openssl(cmd): + OPENSSL = "openssl" + p = subprocess.Popen( + [OPENSSL] + cmd.split(), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + stdout, ignored = p.communicate() + if p.returncode != 0: + raise RunOpenSslError( + "cmd '%s %s' failed: rc=%s, stdout/err was %s" + % (OPENSSL, cmd, p.returncode, stdout) + ) + return stdout.decode() + + +OPENSSL_SUPPORTED_CURVES = set( + c.split(":")[0].strip() + for c in run_openssl("ecparam -list_curves").split("\n") +) + + +@pytest.mark.slow +@pytest.mark.parametrize( + "vcurve", + curves, + ids=[curve.name for curve in curves], +) +def test_ecdh_with_openssl(vcurve): + if isinstance(vcurve.curve, CurveEdTw): + pytest.skip("Edwards curves are not supported for ECDH") + + assert vcurve.openssl_name + + if vcurve.openssl_name not in OPENSSL_SUPPORTED_CURVES: + pytest.skip("system openssl does not support " + vcurve.openssl_name) + + try: + hlp = run_openssl("pkeyutl -help") + if hlp.find("-derive") == 0: # pragma: no cover + pytest.skip("system openssl does not support `pkeyutl -derive`") + except RunOpenSslError: # pragma: no cover + pytest.skip("system openssl could not be executed") + + if os.path.isdir("t"): # pragma: no branch + shutil.rmtree("t") + os.mkdir("t") + run_openssl( + "ecparam -name %s -genkey -out t/privkey1.pem" % vcurve.openssl_name + ) + run_openssl( + "ecparam -name %s -genkey -out t/privkey2.pem" % vcurve.openssl_name + ) + run_openssl("ec -in t/privkey1.pem -pubout -out t/pubkey1.pem") + + ecdh1 = ECDH(curve=vcurve) + ecdh2 = ECDH(curve=vcurve) + with open("t/privkey1.pem") as e: + key = e.read() + ecdh1.load_private_key_pem(key) + with open("t/privkey2.pem") as e: + key = e.read() + ecdh2.load_private_key_pem(key) + + with open("t/pubkey1.pem") as e: + key = e.read() + vk1 = VerifyingKey.from_pem(key) + assert vk1.to_string() == ecdh1.get_public_key().to_string() + vk2 = ecdh2.get_public_key() + with open("t/pubkey2.pem", "wb") as e: + e.write(vk2.to_pem()) + + ecdh1.load_received_public_key(vk2) + ecdh2.load_received_public_key(vk1) + secret1 = ecdh1.generate_sharedsecret_bytes() + secret2 = ecdh2.generate_sharedsecret_bytes() + + assert secret1 == secret2 + + run_openssl( + "pkeyutl -derive -inkey t/privkey1.pem -peerkey t/pubkey2.pem -out t/secret1" + ) + run_openssl( + "pkeyutl -derive -inkey t/privkey2.pem -peerkey t/pubkey1.pem -out t/secret2" + ) + + with open("t/secret1", "rb") as e: + ssl_secret1 = e.read() + with open("t/secret1", "rb") as e: + ssl_secret2 = e.read() + + assert len(ssl_secret1) == vk1.curve.verifying_key_length // 2 + assert len(secret1) == vk1.curve.verifying_key_length // 2 + + assert ssl_secret1 == ssl_secret2 + assert secret1 == ssl_secret1 diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_ecdsa.py b/venv/lib/python3.12/site-packages/ecdsa/test_ecdsa.py new file mode 100644 index 0000000..c1e2582 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_ecdsa.py @@ -0,0 +1,694 @@ +from __future__ import print_function +import sys +import hypothesis.strategies as st +from hypothesis import given, settings, note, example + +try: + import unittest2 as unittest +except ImportError: + import unittest +import pytest +from .ecdsa import ( + Private_key, + Public_key, + Signature, + generator_192, + digest_integer, + ellipticcurve, + point_is_valid, + generator_224, + generator_256, + generator_384, + generator_521, + generator_secp256k1, + curve_192, + InvalidPointError, + curve_112r2, + generator_112r2, + int_to_string, +) +from .ellipticcurve import Point + + +HYP_SETTINGS = {} +# old hypothesis doesn't have the "deadline" setting +if sys.version_info > (2, 7): # pragma: no branch + # SEC521p is slow, allow long execution for it + HYP_SETTINGS["deadline"] = 5000 + + +class TestP192FromX9_62(unittest.TestCase): + """Check test vectors from X9.62""" + + @classmethod + def setUpClass(cls): + cls.d = 651056770906015076056810763456358567190100156695615665659 + cls.Q = cls.d * generator_192 + cls.k = 6140507067065001063065065565667405560006161556565665656654 + cls.R = cls.k * generator_192 + + cls.msg = 968236873715988614170569073515315707566766479517 + cls.pubk = Public_key(generator_192, generator_192 * cls.d) + cls.privk = Private_key(cls.pubk, cls.d) + cls.sig = cls.privk.sign(cls.msg, cls.k) + + def test_point_multiplication(self): + assert self.Q.x() == 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5 + + def test_point_multiplication_2(self): + assert self.R.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD + assert self.R.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 + + def test_mult_and_addition(self): + u1 = 2563697409189434185194736134579731015366492496392189760599 + u2 = 6266643813348617967186477710235785849136406323338782220568 + temp = u1 * generator_192 + u2 * self.Q + assert temp.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD + assert temp.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 + + def test_signature(self): + r, s = self.sig.r, self.sig.s + assert r == 3342403536405981729393488334694600415596881826869351677613 + assert s == 5735822328888155254683894997897571951568553642892029982342 + + def test_verification(self): + assert self.pubk.verifies(self.msg, self.sig) + + def test_rejection(self): + assert not self.pubk.verifies(self.msg - 1, self.sig) + + def test_verification_with_regular_point(self): + pubk = Public_key( + Point( + generator_192.curve(), + generator_192.x(), + generator_192.y(), + generator_192.order(), + ), + self.pubk.point, + ) + + assert pubk.verifies(self.msg, self.sig) + + +class TestPublicKey(unittest.TestCase): + def test_equality_public_keys(self): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + pub_key1 = Public_key(gen, point) + pub_key2 = Public_key(gen, point) + self.assertEqual(pub_key1, pub_key2) + + def test_inequality_public_key(self): + gen = generator_192 + x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point1 = ellipticcurve.Point(gen.curve(), x1, y1) + + x2 = 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15 + y2 = 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF + point2 = ellipticcurve.Point(gen.curve(), x2, y2) + + pub_key1 = Public_key(gen, point1) + pub_key2 = Public_key(gen, point2) + self.assertNotEqual(pub_key1, pub_key2) + + def test_inequality_different_curves(self): + gen = generator_192 + x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point1 = ellipticcurve.Point(gen.curve(), x1, y1) + + x2 = 0x722BA0FB6B8FC8898A4C6AB49E66 + y2 = 0x2B7344BB57A7ABC8CA0F1A398C7D + point2 = ellipticcurve.Point(generator_112r2.curve(), x2, y2) + + pub_key1 = Public_key(gen, point1) + pub_key2 = Public_key(generator_112r2, point2) + self.assertNotEqual(pub_key1, pub_key2) + + def test_inequality_public_key_not_implemented(self): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + pub_key = Public_key(gen, point) + self.assertNotEqual(pub_key, None) + + def test_public_key_with_generator_without_order(self): + gen = ellipticcurve.PointJacobi( + generator_192.curve(), generator_192.x(), generator_192.y(), 1 + ) + + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + + with self.assertRaises(InvalidPointError) as e: + Public_key(gen, point) + + self.assertIn("Generator point must have order", str(e.exception)) + + def test_public_point_on_curve_not_scalar_multiple_of_base_point(self): + x = 2 + y = 0xBE6AA4938EF7CFE6FE29595B6B00 + # we need a curve with cofactor != 1 + point = ellipticcurve.PointJacobi(curve_112r2, x, y, 1) + + self.assertTrue(curve_112r2.contains_point(x, y)) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_112r2, point) + + self.assertIn("Generator point order", str(e.exception)) + + def test_point_is_valid_with_not_scalar_multiple_of_base_point(self): + x = 2 + y = 0xBE6AA4938EF7CFE6FE29595B6B00 + + self.assertFalse(point_is_valid(generator_112r2, x, y)) + + # the tests to verify the extensiveness of tests in ecdsa.ecdsa + # if PointJacobi gets modified to calculate the x and y mod p the tests + # below will need to use a fake/mock object + def test_invalid_point_x_negative(self): + pt = ellipticcurve.PointJacobi(curve_192, -1, 0, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_x_equal_p(self): + pt = ellipticcurve.PointJacobi(curve_192, curve_192.p(), 0, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_y_negative(self): + pt = ellipticcurve.PointJacobi(curve_192, 0, -1, 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + def test_invalid_point_y_equal_p(self): + pt = ellipticcurve.PointJacobi(curve_192, 0, curve_192.p(), 1) + + with self.assertRaises(InvalidPointError) as e: + Public_key(generator_192, pt) + + self.assertIn("The public point has x or y", str(e.exception)) + + +class TestPublicKeyVerifies(unittest.TestCase): + # test all the different ways that a signature can be publicly invalid + @classmethod + def setUpClass(cls): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + + cls.pub_key = Public_key(gen, point) + + def test_sig_with_r_zero(self): + sig = Signature(0, 1) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_r_order(self): + sig = Signature(generator_192.order(), 1) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_s_zero(self): + sig = Signature(1, 0) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + def test_sig_with_s_order(self): + sig = Signature(1, generator_192.order()) + + self.assertFalse(self.pub_key.verifies(1, sig)) + + +class TestPrivateKey(unittest.TestCase): + @classmethod + def setUpClass(cls): + gen = generator_192 + x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 + y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F + point = ellipticcurve.Point(gen.curve(), x, y) + cls.pub_key = Public_key(gen, point) + + def test_equality_private_keys(self): + pr_key1 = Private_key(self.pub_key, 100) + pr_key2 = Private_key(self.pub_key, 100) + self.assertEqual(pr_key1, pr_key2) + + def test_inequality_private_keys(self): + pr_key1 = Private_key(self.pub_key, 100) + pr_key2 = Private_key(self.pub_key, 200) + self.assertNotEqual(pr_key1, pr_key2) + + def test_inequality_private_keys_not_implemented(self): + pr_key = Private_key(self.pub_key, 100) + self.assertNotEqual(pr_key, None) + + +# Testing point validity, as per ECDSAVS.pdf B.2.2: +P192_POINTS = [ + ( + generator_192, + 0xCD6D0F029A023E9AACA429615B8F577ABEE685D8257CC83A, + 0x00019C410987680E9FB6C0B6ECC01D9A2647C8BAE27721BACDFC, + False, + ), + ( + generator_192, + 0x00017F2FCE203639E9EAF9FB50B81FC32776B30E3B02AF16C73B, + 0x95DA95C5E72DD48E229D4748D4EEE658A9A54111B23B2ADB, + False, + ), + ( + generator_192, + 0x4F77F8BC7FCCBADD5760F4938746D5F253EE2168C1CF2792, + 0x000147156FF824D131629739817EDB197717C41AAB5C2A70F0F6, + False, + ), + ( + generator_192, + 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6, + 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F, + True, + ), + ( + generator_192, + 0xCDF56C1AA3D8AFC53C521ADF3FFB96734A6A630A4A5B5A70, + 0x97C1C44A5FB229007B5EC5D25F7413D170068FFD023CAA4E, + True, + ), + ( + generator_192, + 0x89009C0DC361C81E99280C8E91DF578DF88CDF4B0CDEDCED, + 0x27BE44A529B7513E727251F128B34262A0FD4D8EC82377B9, + True, + ), + ( + generator_192, + 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15, + 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF, + True, + ), + ( + generator_192, + 0x6DCCBDE75C0948C98DAB32EA0BC59FE125CF0FB1A3798EDA, + 0x0001171A3E0FA60CF3096F4E116B556198DE430E1FBD330C8835, + False, + ), + ( + generator_192, + 0xD266B39E1F491FC4ACBBBC7D098430931CFA66D55015AF12, + 0x193782EB909E391A3148B7764E6B234AA94E48D30A16DBB2, + False, + ), + ( + generator_192, + 0x9D6DDBCD439BAA0C6B80A654091680E462A7D1D3F1FFEB43, + 0x6AD8EFC4D133CCF167C44EB4691C80ABFFB9F82B932B8CAA, + False, + ), + ( + generator_192, + 0x146479D944E6BDA87E5B35818AA666A4C998A71F4E95EDBC, + 0xA86D6FE62BC8FBD88139693F842635F687F132255858E7F6, + False, + ), + ( + generator_192, + 0xE594D4A598046F3598243F50FD2C7BD7D380EDB055802253, + 0x509014C0C4D6B536E3CA750EC09066AF39B4C8616A53A923, + False, + ), +] + + +@pytest.mark.parametrize("generator,x,y,expected", P192_POINTS) +def test_point_validity(generator, x, y, expected): + """ + `generator` defines the curve; is `(x, y)` a point on + this curve? `expected` is True if the right answer is Yes. + """ + assert point_is_valid(generator, x, y) == expected + + +# Trying signature-verification tests from ECDSAVS.pdf B.2.4: +CURVE_192_KATS = [ + ( + generator_192, + int( + "0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee" + "425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30" + "d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff79" + "8cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d1" + "58", + 16, + ), + 0xD9DBFB332AA8E5FF091E8CE535857C37C73F6250FFB2E7AC, + 0x282102E364FEDED3AD15DDF968F88D8321AA268DD483EBC4, + 0x64DCA58A20787C488D11D6DD96313F1B766F2D8EFE122916, + 0x1ECBA28141E84AB4ECAD92F56720E2CC83EB3D22DEC72479, + True, + ), + ( + generator_192, + int( + "0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db1" + "2e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a" + "91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db3" + "26ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63" + "f4", + 16, + ), + 0x3E53EF8D3112AF3285C0E74842090712CD324832D4277AE7, + 0xCC75F8952D30AEC2CBB719FC6AA9934590B5D0FF5A83ADB7, + 0x8285261607283BA18F335026130BAB31840DCFD9C3E555AF, + 0x356D89E1B04541AFC9704A45E9C535CE4A50929E33D7E06C, + True, + ), + ( + generator_192, + int( + "0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911" + "b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cd" + "d41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d30" + "3f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42" + "dd", + 16, + ), + 0x16335DBE95F8E8254A4E04575D736BEFB258B8657F773CB7, + 0x421B13379C59BC9DCE38A1099CA79BBD06D647C7F6242336, + 0x4141BD5D64EA36C5B0BD21EF28C02DA216ED9D04522B1E91, + 0x159A6AA852BCC579E821B7BB0994C0861FB08280C38DAA09, + False, + ), + ( + generator_192, + int( + "0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b56309" + "7ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8" + "bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447" + "bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd" + "8a", + 16, + ), + 0xFD14CDF1607F5EFB7B1793037B15BDF4BAA6F7C16341AB0B, + 0x83FA0795CC6C4795B9016DAC928FD6BAC32F3229A96312C4, + 0x8DFDB832951E0167C5D762A473C0416C5C15BC1195667DC1, + 0x1720288A2DC13FA1EC78F763F8FE2FF7354A7E6FDDE44520, + False, + ), + ( + generator_192, + int( + "0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d3919" + "2e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196" + "683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bc" + "eae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072" + "fb", + 16, + ), + 0x674F941DC1A1F8B763C9334D726172D527B90CA324DB8828, + 0x65ADFA32E8B236CB33A3E84CF59BFB9417AE7E8EDE57A7FF, + 0x9508B9FDD7DAF0D8126F9E2BC5A35E4C6D800B5B804D7796, + 0x36F2BF6B21B987C77B53BB801B3435A577E3D493744BFAB0, + False, + ), + ( + generator_192, + int( + "0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397c" + "e15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aa" + "e98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc" + "55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca" + "6d", + 16, + ), + 0x10ECCA1AAD7220B56A62008B35170BFD5E35885C4014A19F, + 0x04EB61984C6C12ADE3BC47F3C629ECE7AA0A033B9948D686, + 0x82BFA4E82C0DFE9274169B86694E76CE993FD83B5C60F325, + 0xA97685676C59A65DBDE002FE9D613431FB183E8006D05633, + False, + ), + ( + generator_192, + int( + "0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f" + "698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98" + "f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a2" + "78461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76" + "e1", + 16, + ), + 0x6636653CB5B894CA65C448277B29DA3AD101C4C2300F7C04, + 0xFDF1CBB3FC3FD6A4F890B59E554544175FA77DBDBEB656C1, + 0xEAC2DDECDDFB79931A9C3D49C08DE0645C783A24CB365E1C, + 0x3549FEE3CFA7E5F93BC47D92D8BA100E881A2A93C22F8D50, + False, + ), + ( + generator_192, + int( + "0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6" + "c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7" + "a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b" + "9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6b" + "a2", + 16, + ), + 0xA82BD718D01D354001148CD5F69B9EBF38FF6F21898F8AAA, + 0xE67CEEDE07FC2EBFAFD62462A51E4B6C6B3D5B537B7CAF3E, + 0x4D292486C620C3DE20856E57D3BB72FCDE4A73AD26376955, + 0xA85289591A6081D5728825520E62FF1C64F94235C04C7F95, + False, + ), + ( + generator_192, + int( + "0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a" + "961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc91" + "0250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53" + "808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb6" + "58", + 16, + ), + 0x7D3B016B57758B160C4FCA73D48DF07AE3B6B30225126C2F, + 0x4AF3790D9775742BDE46F8DA876711BE1B65244B2B39E7EC, + 0x95F778F5F656511A5AB49A5D69DDD0929563C29CBC3A9E62, + 0x75C87FC358C251B4C83D2DD979FAAD496B539F9F2EE7A289, + False, + ), + ( + generator_192, + int( + "0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e102" + "88acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c9" + "0a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9e" + "a387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c9" + "7a", + 16, + ), + 0x9362F28C4EF96453D8A2F849F21E881CD7566887DA8BEB4A, + 0xE64D26D8D74C48A024AE85D982EE74CD16046F4EE5333905, + 0xF3923476A296C88287E8DE914B0B324AD5A963319A4FE73B, + 0xF0BAEED7624ED00D15244D8BA2AEDE085517DBDEC8AC65F5, + True, + ), + ( + generator_192, + int( + "0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f645" + "0d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d90" + "64e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8c" + "e1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd045" + "6d", + 16, + ), + 0xCC6FC032A846AAAC25533EB033522824F94E670FA997ECEF, + 0xE25463EF77A029ECCDA8B294FD63DD694E38D223D30862F1, + 0x066B1D07F3A40E679B620EDA7F550842A35C18B80C5EBE06, + 0xA0B0FB201E8F2DF65E2C4508EF303BDC90D934016F16B2DC, + False, + ), + ( + generator_192, + int( + "0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae" + "5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214e" + "ed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c4" + "40341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839" + "d7", + 16, + ), + 0x955C908FE900A996F7E2089BEE2F6376830F76A19135E753, + 0xBA0C42A91D3847DE4A592A46DC3FDAF45A7CC709B90DE520, + 0x1F58AD77FC04C782815A1405B0925E72095D906CBF52A668, + 0xF2E93758B3AF75EDF784F05A6761C9B9A6043C66B845B599, + False, + ), + ( + generator_192, + int( + "0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf99866" + "70a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b412" + "69bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52" + "e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160ce" + "f3", + 16, + ), + 0x31F7FA05576D78A949B24812D4383107A9A45BB5FCCDD835, + 0x8DC0EB65994A90F02B5E19BD18B32D61150746C09107E76B, + 0xBE26D59E4E883DDE7C286614A767B31E49AD88789D3A78FF, + 0x8762CA831C1CE42DF77893C9B03119428E7A9B819B619068, + False, + ), + ( + generator_192, + int( + "0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f" + "387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add502357" + "2720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670" + "716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1", + 16, + ), + 0x66AA8EDBBDB5CF8E28CEB51B5BDA891CAE2DF84819FE25C0, + 0x0C6BC2F69030A7CE58D4A00E3B3349844784A13B8936F8DA, + 0xA4661E69B1734F4A71B788410A464B71E7FFE42334484F23, + 0x738421CF5E049159D69C57A915143E226CAC8355E149AFE9, + False, + ), + ( + generator_192, + int( + "0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5af" + "a261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461" + "184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6d" + "b377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb", + 16, + ), + 0xBCFACF45139B6F5F690A4C35A5FFFA498794136A2353FC77, + 0x6F4A6C906316A6AFC6D98FE1F0399D056F128FE0270B0F22, + 0x9DB679A3DAFE48F7CCAD122933ACFE9DA0970B71C94C21C1, + 0x984C2DB99827576C0A41A5DA41E07D8CC768BC82F18C9DA9, + False, + ), +] + + +@pytest.mark.parametrize("gen,msg,qx,qy,r,s,expected", CURVE_192_KATS) +def test_signature_validity(gen, msg, qx, qy, r, s, expected): + """ + `msg` = message, `qx` and `qy` represent the base point on + elliptic curve of `gen`, `r` and `s` are the signature, and + `expected` is True iff the signature is expected to be valid.""" + pubk = Public_key(gen, ellipticcurve.Point(gen.curve(), qx, qy)) + with pytest.warns(DeprecationWarning) as warns: + msg_dgst = digest_integer(msg) + assert len(warns) == 3 + assert "unused" in warns[0].message.args[0] + assert "unused" in warns[1].message.args[0] + assert "unused" in warns[2].message.args[0] + assert expected == pubk.verifies(msg_dgst, Signature(r, s)) + + +@pytest.mark.parametrize( + "gen,msg,qx,qy,r,s,expected", [x for x in CURVE_192_KATS if x[6]] +) +def test_pk_recovery(gen, msg, r, s, qx, qy, expected): + del expected + sign = Signature(r, s) + with pytest.warns(DeprecationWarning) as warns: + msg_dgst = digest_integer(msg) + assert len(warns) == 3 + assert "unused" in warns[0].message.args[0] + assert "unused" in warns[1].message.args[0] + assert "unused" in warns[2].message.args[0] + pks = sign.recover_public_keys(msg_dgst, gen) + + assert pks + + # Test if the signature is valid for all found public keys + for pk in pks: + q = pk.point + test_signature_validity(gen, msg, q.x(), q.y(), r, s, True) + + # Test if the original public key is in the set of found keys + original_q = ellipticcurve.Point(gen.curve(), qx, qy) + points = [pk.point for pk in pks] + assert original_q in points + + +@st.composite +def st_random_gen_key_msg_nonce(draw): + """Hypothesis strategy for test_sig_verify().""" + name_gen = { + "generator_192": generator_192, + "generator_224": generator_224, + "generator_256": generator_256, + "generator_secp256k1": generator_secp256k1, + "generator_384": generator_384, + "generator_521": generator_521, + } + name = draw(st.sampled_from(sorted(name_gen.keys()))) + note("Generator used: {0}".format(name)) + generator = name_gen[name] + order = int(generator.order()) - 1 + + key = draw(st.integers(min_value=1, max_value=order)) + msg = draw(st.integers(min_value=1, max_value=order)) + nonce = draw( + st.integers(min_value=1, max_value=order) + | st.integers(min_value=order >> 1, max_value=order) + ) + return generator, key, msg, nonce + + +SIG_VER_SETTINGS = dict(HYP_SETTINGS) +if "--fast" in sys.argv: # pragma: no cover + SIG_VER_SETTINGS["max_examples"] = 1 +else: + SIG_VER_SETTINGS["max_examples"] = 10 + + +@settings(**SIG_VER_SETTINGS) +@example((generator_224, 4, 1, 1)) +@given(st_random_gen_key_msg_nonce()) +def test_sig_verify(args): + """ + Check if signing and verification works for arbitrary messages and + that signatures for other messages are rejected. + """ + generator, sec_mult, msg, nonce = args + + pubkey = Public_key(generator, generator * sec_mult) + privkey = Private_key(pubkey, sec_mult) + + signature = privkey.sign(msg, nonce) + + assert pubkey.verifies(msg, signature) + + assert not pubkey.verifies(msg - 1, signature) + + +def test_int_to_string_with_zero(): + with pytest.warns(DeprecationWarning) as warns: + assert int_to_string(0) == b"\x00" + + assert len(warns) == 1 + assert "unused" in warns[0].message.args[0] diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_eddsa.py b/venv/lib/python3.12/site-packages/ecdsa/test_eddsa.py new file mode 100644 index 0000000..6821b3b --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_eddsa.py @@ -0,0 +1,1124 @@ +import sys +import pickle +import hashlib +import pytest + +try: + import unittest2 as unittest +except ImportError: + import unittest +from hypothesis import given, settings, example +import hypothesis.strategies as st +from .ellipticcurve import PointEdwards, INFINITY, CurveEdTw +from .eddsa import ( + generator_ed25519, + curve_ed25519, + generator_ed448, + curve_ed448, + PrivateKey, + PublicKey, +) +from .ecdsa import generator_256, curve_256 +from .errors import MalformedPointError +from ._compat import a2b_hex, compat26_str + + +class TestA2B_Hex(unittest.TestCase): + def test_invalid_input(self): + with self.assertRaises(ValueError): + a2b_hex("abcdefghi") + + +def test_ed25519_curve_compare(): + assert curve_ed25519 != curve_256 + + +def test_ed25519_and_ed448_compare(): + assert curve_ed448 != curve_ed25519 + + +def test_ed25519_and_custom_curve_compare(): + a = CurveEdTw(curve_ed25519.p(), -curve_ed25519.a(), 1) + + assert curve_ed25519 != a + + +def test_ed25519_and_almost_exact_curve_compare(): + a = CurveEdTw(curve_ed25519.p(), curve_ed25519.a(), 1) + + assert curve_ed25519 != a + + +def test_ed25519_and_same_curve_params(): + a = CurveEdTw(curve_ed25519.p(), curve_ed25519.a(), curve_ed25519.d()) + + assert curve_ed25519 == a + assert not (curve_ed25519 != a) + + +def test_ed25519_contains_point(): + g = generator_ed25519 + assert curve_ed25519.contains_point(g.x(), g.y()) + + +def test_ed25519_contains_point_bad(): + assert not curve_ed25519.contains_point(1, 1) + + +def test_ed25519_double(): + a = generator_ed25519 + + z = a.double() + + assert isinstance(z, PointEdwards) + + x2 = int( + "24727413235106541002554574571675588834622768167397638456726423" + "682521233608206" + ) + y2 = int( + "15549675580280190176352668710449542251549572066445060580507079" + "593062643049417" + ) + + b = PointEdwards(curve_ed25519, x2, y2, 1, x2 * y2) + + assert z == b + assert a != b + + +def test_ed25519_add_as_double(): + a = generator_ed25519 + + z = a + a + + assert isinstance(z, PointEdwards) + + b = generator_ed25519.double() + + assert z == b + + +def test_ed25519_double_infinity(): + a = PointEdwards(curve_ed25519, 0, 1, 1, 0) + + z = a.double() + + assert z is INFINITY + + +def test_ed25519_double_badly_encoded_infinity(): + # invalid point, mostly to make instrumental happy + a = PointEdwards(curve_ed25519, 1, 1, 1, 0) + + z = a.double() + + assert z is INFINITY + + +def test_ed25519_eq_with_different_z(): + x = generator_ed25519.x() + y = generator_ed25519.y() + p = curve_ed25519.p() + + a = PointEdwards(curve_ed25519, x * 2 % p, y * 2 % p, 2, x * y * 2 % p) + b = PointEdwards(curve_ed25519, x * 3 % p, y * 3 % p, 3, x * y * 3 % p) + + assert a == b + + assert not (a != b) + + +def test_ed25519_eq_against_infinity(): + assert generator_ed25519 != INFINITY + + +def test_ed25519_eq_encoded_infinity_against_infinity(): + a = PointEdwards(curve_ed25519, 0, 1, 1, 0) + assert a == INFINITY + + +def test_ed25519_eq_bad_encode_of_infinity_against_infinity(): + # technically incorrect encoding of the point at infinity, but we check + # both X and T, so verify that just T==0 works + a = PointEdwards(curve_ed25519, 1, 1, 1, 0) + assert a == INFINITY + + +def test_ed25519_eq_against_non_Edwards_point(): + assert generator_ed25519 != generator_256 + + +def test_ed25519_eq_against_negated_point(): + g = generator_ed25519 + neg = PointEdwards(curve_ed25519, -g.x(), g.y(), 1, -g.x() * g.y()) + assert g != neg + + +def test_ed25519_eq_x_different_y(): + # not points on the curve, but __eq__ doesn't care + a = PointEdwards(curve_ed25519, 1, 1, 1, 1) + b = PointEdwards(curve_ed25519, 1, 2, 1, 2) + + assert a != b + + +def test_ed25519_mul_by_order(): + g = PointEdwards( + curve_ed25519, + generator_ed25519.x(), + generator_ed25519.y(), + 1, + generator_ed25519.x() * generator_ed25519.y(), + ) + + assert g * generator_ed25519.order() == INFINITY + + +def test_radd(): + + a = PointEdwards(curve_ed25519, 1, 1, 1, 1) + + p = INFINITY + a + + assert p == a + + +def test_ed25519_test_normalisation_and_scaling(): + x = generator_ed25519.x() + y = generator_ed25519.y() + p = curve_ed25519.p() + + a = PointEdwards(curve_ed25519, x * 11 % p, y * 11 % p, 11, x * y * 11 % p) + + assert a.x() == x + assert a.y() == y + + a.scale() + + assert a.x() == x + assert a.y() == y + + a.scale() # second execution should be a noop + + assert a.x() == x + assert a.y() == y + + +def test_ed25519_add_three_times(): + a = generator_ed25519 + + z = a + a + a + + x3 = int( + "468967334644549386571235445953867877890461982801326656862413" + "21779790909858396" + ) + y3 = int( + "832484377853344397649037712036920113830141722629755531674120" + "2210403726505172" + ) + + b = PointEdwards(curve_ed25519, x3, y3, 1, x3 * y3) + + assert z == b + + +def test_ed25519_add_to_infinity(): + # generator * (order-1) + x1 = int( + "427838232691226969392843410947554224151809796397784248136826" + "78720006717057747" + ) + y1 = int( + "463168356949264781694283940034751631413079938662562256157830" + "33603165251855960" + ) + inf_m_1 = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) + + inf = inf_m_1 + generator_ed25519 + + assert inf is INFINITY + + +def test_ed25519_add_and_mul_equivalence(): + g = generator_ed25519 + + assert g + g == g * 2 + assert g + g + g == g * 3 + + +def test_ed25519_add_literal_infinity(): + g = generator_ed25519 + z = g + INFINITY + + assert z == g + + +def test_ed25519_add_infinity(): + inf = PointEdwards(curve_ed25519, 0, 1, 1, 0) + g = generator_ed25519 + z = g + inf + + assert z == g + + z = inf + g + + assert z == g + + +class TestEd25519(unittest.TestCase): + def test_add_wrong_curves(self): + with self.assertRaises(ValueError) as e: + generator_ed25519 + generator_ed448 + + self.assertIn("different curve", str(e.exception)) + + def test_add_wrong_point_type(self): + with self.assertRaises(ValueError) as e: + generator_ed25519 + generator_256 + + self.assertIn("different curve", str(e.exception)) + + +def test_generate_with_point(): + x1 = int( + "427838232691226969392843410947554224151809796397784248136826" + "78720006717057747" + ) + y1 = int( + "463168356949264781694283940034751631413079938662562256157830" + "33603165251855960" + ) + p = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) + + pk = PublicKey(generator_ed25519, b"0" * 32, public_point=p) + + assert pk.public_point() == p + + +def test_ed25519_mul_to_order_min_1(): + x1 = int( + "427838232691226969392843410947554224151809796397784248136826" + "78720006717057747" + ) + y1 = int( + "463168356949264781694283940034751631413079938662562256157830" + "33603165251855960" + ) + inf_m_1 = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) + + assert generator_ed25519 * (generator_ed25519.order() - 1) == inf_m_1 + + +def test_ed25519_mul_to_infinity(): + assert generator_ed25519 * generator_ed25519.order() == INFINITY + + +def test_ed25519_mul_to_infinity_plus_1(): + g = generator_ed25519 + assert g * (g.order() + 1) == g + + +def test_ed25519_mul_and_add(): + g = generator_ed25519 + a = g * 128 + b = g * 64 + g * 64 + + assert a == b + + +def test_ed25519_mul_and_add_2(): + g = generator_ed25519 + + a = g * 123 + b = g * 120 + g * 3 + + assert a == b + + +def test_ed25519_mul_infinity(): + inf = PointEdwards(curve_ed25519, 0, 1, 1, 0) + + z = inf * 11 + + assert z == INFINITY + + +def test_ed25519_mul_by_zero(): + z = generator_ed25519 * 0 + + assert z == INFINITY + + +def test_ed25519_mul_by_one(): + z = generator_ed25519 * 1 + + assert z == generator_ed25519 + + +def test_ed25519_mul_custom_point(): + # verify that multiplication without order set works + + g = generator_ed25519 + + a = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) + + z = a * 11 + + assert z == g * 11 + + +def test_ed25519_pickle(): + g = generator_ed25519 + assert pickle.loads(pickle.dumps(g)) == g + + +def test_ed448_eq_against_different_curve(): + assert generator_ed25519 != generator_ed448 + + +def test_ed448_double(): + g = generator_ed448 + z = g.double() + + assert isinstance(z, PointEdwards) + + x2 = int( + "4845591495304045936995492052586696895690942404582120401876" + "6013278705691214670908136440114445572635086627683154494739" + "7859048262938744149" + ) + y2 = int( + "4940887598674337276743026725267350893505445523037277237461" + "2648447308771911703729389009346215770388834286503647778745" + "3078312060500281069" + ) + + b = PointEdwards(curve_ed448, x2, y2, 1, x2 * y2) + + assert z == b + assert g != b + + +def test_ed448_add_as_double(): + g = generator_ed448 + z = g + g + + b = g.double() + + assert z == b + + +def test_ed448_mul_as_double(): + g = generator_ed448 + z = g * 2 + b = g.double() + + assert z == b + + +def test_ed448_add_to_infinity(): + # generator * (order - 1) + x1 = int( + "5022586839996825903617194737881084981068517190547539260353" + "6473749366191269932473977736719082931859264751085238669719" + "1187378895383117729" + ) + y1 = int( + "2988192100784814926760179304439306734375440401540802420959" + "2824137233150618983587600353687865541878473398230323350346" + "2500531545062832660" + ) + inf_m_1 = PointEdwards(curve_ed448, x1, y1, 1, x1 * y1) + + inf = inf_m_1 + generator_ed448 + + assert inf is INFINITY + + +def test_ed448_mul_to_infinity(): + g = generator_ed448 + inf = g * g.order() + + assert inf is INFINITY + + +def test_ed448_mul_to_infinity_plus_1(): + g = generator_ed448 + + z = g * (g.order() + 1) + + assert z == g + + +def test_ed448_add_and_mul_equivalence(): + g = generator_ed448 + + assert g + g == g * 2 + assert g + g + g == g * 3 + + +def test_ed25519_encode(): + g = generator_ed25519 + g_bytes = g.to_bytes() + assert len(g_bytes) == 32 + exp_bytes = ( + b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + ) + assert g_bytes == exp_bytes + + +def test_ed25519_decode(): + exp_bytes = ( + b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + ) + a = PointEdwards.from_bytes(curve_ed25519, exp_bytes) + + assert a == generator_ed25519 + + +class TestEdwardsMalformed(unittest.TestCase): + def test_invalid_point(self): + exp_bytes = ( + b"\x78\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + ) + with self.assertRaises(MalformedPointError): + PointEdwards.from_bytes(curve_ed25519, exp_bytes) + + def test_invalid_length(self): + exp_bytes = ( + b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" + b"\x66" + ) + with self.assertRaises(MalformedPointError) as e: + PointEdwards.from_bytes(curve_ed25519, exp_bytes) + + self.assertIn("length", str(e.exception)) + + def test_ed448_invalid(self): + exp_bytes = b"\xff" * 57 + with self.assertRaises(MalformedPointError): + PointEdwards.from_bytes(curve_ed448, exp_bytes) + + +def test_ed448_encode(): + g = generator_ed448 + g_bytes = g.to_bytes() + assert len(g_bytes) == 57 + exp_bytes = ( + b"\x14\xfa\x30\xf2\x5b\x79\x08\x98\xad\xc8\xd7\x4e\x2c\x13\xbd" + b"\xfd\xc4\x39\x7c\xe6\x1c\xff\xd3\x3a\xd7\xc2\xa0\x05\x1e\x9c" + b"\x78\x87\x40\x98\xa3\x6c\x73\x73\xea\x4b\x62\xc7\xc9\x56\x37" + b"\x20\x76\x88\x24\xbc\xb6\x6e\x71\x46\x3f\x69\x00" + ) + assert g_bytes == exp_bytes + + +def test_ed448_decode(): + exp_bytes = ( + b"\x14\xfa\x30\xf2\x5b\x79\x08\x98\xad\xc8\xd7\x4e\x2c\x13\xbd" + b"\xfd\xc4\x39\x7c\xe6\x1c\xff\xd3\x3a\xd7\xc2\xa0\x05\x1e\x9c" + b"\x78\x87\x40\x98\xa3\x6c\x73\x73\xea\x4b\x62\xc7\xc9\x56\x37" + b"\x20\x76\x88\x24\xbc\xb6\x6e\x71\x46\x3f\x69\x00" + ) + + a = PointEdwards.from_bytes(curve_ed448, exp_bytes) + + assert a == generator_ed448 + + +class TestEdDSAEquality(unittest.TestCase): + def test_equal_public_points(self): + key1 = PublicKey(generator_ed25519, b"\x01" * 32) + key2 = PublicKey(generator_ed25519, b"\x01" * 32) + + self.assertEqual(key1, key2) + # verify that `__ne__` works as expected + self.assertFalse(key1 != key2) + + def test_unequal_public_points(self): + key1 = PublicKey(generator_ed25519, b"\x01" * 32) + key2 = PublicKey(generator_ed25519, b"\x03" * 32) + + self.assertNotEqual(key1, key2) + + def test_unequal_to_string(self): + key1 = PublicKey(generator_ed25519, b"\x01" * 32) + key2 = b"\x01" * 32 + + self.assertNotEqual(key1, key2) + + def test_unequal_publickey_curves(self): + key1 = PublicKey(generator_ed25519, b"\x01" * 32) + key2 = PublicKey(generator_ed448, b"\x03" * 56 + b"\x00") + + self.assertNotEqual(key1, key2) + # verify that `__ne__` works as expected + self.assertTrue(key1 != key2) + + def test_equal_private_keys(self): + key1 = PrivateKey(generator_ed25519, b"\x01" * 32) + key2 = PrivateKey(generator_ed25519, b"\x01" * 32) + + self.assertEqual(key1, key2) + # verify that `__ne__` works as expected + self.assertFalse(key1 != key2) + + def test_unequal_private_keys(self): + key1 = PrivateKey(generator_ed25519, b"\x01" * 32) + key2 = PrivateKey(generator_ed25519, b"\x02" * 32) + + self.assertNotEqual(key1, key2) + # verify that `__ne__` works as expected + self.assertTrue(key1 != key2) + + def test_unequal_privatekey_to_string(self): + key1 = PrivateKey(generator_ed25519, b"\x01" * 32) + key2 = b"\x01" * 32 + + self.assertNotEqual(key1, key2) + + def test_unequal_privatekey_curves(self): + key1 = PrivateKey(generator_ed25519, b"\x01" * 32) + key2 = PrivateKey(generator_ed448, b"\x01" * 57) + + self.assertNotEqual(key1, key2) + + +class TestInvalidEdDSAInputs(unittest.TestCase): + def test_wrong_length_of_private_key(self): + with self.assertRaises(ValueError): + PrivateKey(generator_ed25519, b"\x01" * 31) + + def test_wrong_length_of_public_key(self): + with self.assertRaises(ValueError): + PublicKey(generator_ed25519, b"\x01" * 33) + + def test_wrong_cofactor_curve(self): + ed_c = curve_ed25519 + + def _hash(data): + return hashlib.new("sha512", compat26_str(data)).digest() + + curve = CurveEdTw(ed_c.p(), ed_c.a(), ed_c.d(), 1, _hash) + g = generator_ed25519 + fake_gen = PointEdwards(curve, g.x(), g.y(), 1, g.x() * g.y()) + + with self.assertRaises(ValueError) as e: + PrivateKey(fake_gen, g.to_bytes()) + + self.assertIn("cofactor", str(e.exception)) + + def test_invalid_signature_length(self): + key = PublicKey(generator_ed25519, b"\x01" * 32) + + with self.assertRaises(ValueError) as e: + key.verify(b"", b"\x01" * 65) + + self.assertIn("length", str(e.exception)) + + def test_changing_public_key(self): + key = PublicKey(generator_ed25519, b"\x01" * 32) + + g = key.point + + new_g = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) + + key.point = new_g + + self.assertEqual(g, key.point) + + def test_changing_public_key_to_different_point(self): + key = PublicKey(generator_ed25519, b"\x01" * 32) + + with self.assertRaises(ValueError) as e: + key.point = generator_ed25519 + + self.assertIn("coordinates", str(e.exception)) + + def test_invalid_s_value(self): + key = PublicKey( + generator_ed25519, + b"\xd7\x5a\x98\x01\x82\xb1\x0a\xb7\xd5\x4b\xfe\xd3\xc9\x64\x07\x3a" + b"\x0e\xe1\x72\xf3\xda\xa6\x23\x25\xaf\x02\x1a\x68\xf7\x07\x51\x1a", + ) + sig_valid = bytearray( + b"\xe5\x56\x43\x00\xc3\x60\xac\x72\x90\x86\xe2\xcc\x80\x6e\x82\x8a" + b"\x84\x87\x7f\x1e\xb8\xe5\xd9\x74\xd8\x73\xe0\x65\x22\x49\x01\x55" + b"\x5f\xb8\x82\x15\x90\xa3\x3b\xac\xc6\x1e\x39\x70\x1c\xf9\xb4\x6b" + b"\xd2\x5b\xf5\xf0\x59\x5b\xbe\x24\x65\x51\x41\x43\x8e\x7a\x10\x0b" + ) + + self.assertTrue(key.verify(b"", sig_valid)) + + sig_invalid = bytearray(sig_valid) + sig_invalid[-1] = 0xFF + + with self.assertRaises(ValueError): + key.verify(b"", sig_invalid) + + def test_invalid_r_value(self): + key = PublicKey( + generator_ed25519, + b"\xd7\x5a\x98\x01\x82\xb1\x0a\xb7\xd5\x4b\xfe\xd3\xc9\x64\x07\x3a" + b"\x0e\xe1\x72\xf3\xda\xa6\x23\x25\xaf\x02\x1a\x68\xf7\x07\x51\x1a", + ) + sig_valid = bytearray( + b"\xe5\x56\x43\x00\xc3\x60\xac\x72\x90\x86\xe2\xcc\x80\x6e\x82\x8a" + b"\x84\x87\x7f\x1e\xb8\xe5\xd9\x74\xd8\x73\xe0\x65\x22\x49\x01\x55" + b"\x5f\xb8\x82\x15\x90\xa3\x3b\xac\xc6\x1e\x39\x70\x1c\xf9\xb4\x6b" + b"\xd2\x5b\xf5\xf0\x59\x5b\xbe\x24\x65\x51\x41\x43\x8e\x7a\x10\x0b" + ) + + self.assertTrue(key.verify(b"", sig_valid)) + + sig_invalid = bytearray(sig_valid) + sig_invalid[0] = 0xE0 + + with self.assertRaises(ValueError): + key.verify(b"", sig_invalid) + + +HYP_SETTINGS = dict() +if "--fast" in sys.argv: # pragma: no cover + HYP_SETTINGS["max_examples"] = 2 +else: + HYP_SETTINGS["max_examples"] = 10 + + +@settings(**HYP_SETTINGS) +@example(1) +@example(5) # smallest multiple that requires changing sign of x +@given(st.integers(min_value=1, max_value=int(generator_ed25519.order() - 1))) +def test_ed25519_encode_decode(multiple): + a = generator_ed25519 * multiple + + b = PointEdwards.from_bytes(curve_ed25519, a.to_bytes()) + + assert a == b + + +@settings(**HYP_SETTINGS) +@example(1) +@example(2) # smallest multiple that requires changing the sign of x +@given(st.integers(min_value=1, max_value=int(generator_ed448.order() - 1))) +def test_ed448_encode_decode(multiple): + a = generator_ed448 * multiple + + b = PointEdwards.from_bytes(curve_ed448, a.to_bytes()) + + assert a == b + + +@settings(**HYP_SETTINGS) +@example(1) +@example(2) +@given(st.integers(min_value=1, max_value=int(generator_ed25519.order()) - 1)) +def test_ed25519_mul_precompute_vs_naf(multiple): + """Compare multiplication with and without precomputation.""" + g = generator_ed25519 + new_g = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) + + assert g * multiple == multiple * new_g + + +# Test vectors from RFC 8032 +TEST_VECTORS = [ + # TEST 1 + ( + generator_ed25519, + "9d61b19deffd5a60ba844af492ec2cc4" "4449c5697b326919703bac031cae7f60", + "d75a980182b10ab7d54bfed3c964073a" "0ee172f3daa62325af021a68f707511a", + "", + "e5564300c360ac729086e2cc806e828a" + "84877f1eb8e5d974d873e06522490155" + "5fb8821590a33bacc61e39701cf9b46b" + "d25bf5f0595bbe24655141438e7a100b", + ), + # TEST 2 + ( + generator_ed25519, + "4ccd089b28ff96da9db6c346ec114e0f" "5b8a319f35aba624da8cf6ed4fb8a6fb", + "3d4017c3e843895a92b70aa74d1b7ebc" "9c982ccf2ec4968cc0cd55f12af4660c", + "72", + "92a009a9f0d4cab8720e820b5f642540" + "a2b27b5416503f8fb3762223ebdb69da" + "085ac1e43e15996e458f3613d0f11d8c" + "387b2eaeb4302aeeb00d291612bb0c00", + ), + # TEST 3 + ( + generator_ed25519, + "c5aa8df43f9f837bedb7442f31dcb7b1" "66d38535076f094b85ce3a2e0b4458f7", + "fc51cd8e6218a1a38da47ed00230f058" "0816ed13ba3303ac5deb911548908025", + "af82", + "6291d657deec24024827e69c3abe01a3" + "0ce548a284743a445e3680d7db5ac3ac" + "18ff9b538d16f290ae67f760984dc659" + "4a7c15e9716ed28dc027beceea1ec40a", + ), + # TEST 1024 + ( + generator_ed25519, + "f5e5767cf153319517630f226876b86c" "8160cc583bc013744c6bf255f5cc0ee5", + "278117fc144c72340f67d0f2316e8386" "ceffbf2b2428c9c51fef7c597f1d426e", + "08b8b2b733424243760fe426a4b54908" + "632110a66c2f6591eabd3345e3e4eb98" + "fa6e264bf09efe12ee50f8f54e9f77b1" + "e355f6c50544e23fb1433ddf73be84d8" + "79de7c0046dc4996d9e773f4bc9efe57" + "38829adb26c81b37c93a1b270b20329d" + "658675fc6ea534e0810a4432826bf58c" + "941efb65d57a338bbd2e26640f89ffbc" + "1a858efcb8550ee3a5e1998bd177e93a" + "7363c344fe6b199ee5d02e82d522c4fe" + "ba15452f80288a821a579116ec6dad2b" + "3b310da903401aa62100ab5d1a36553e" + "06203b33890cc9b832f79ef80560ccb9" + "a39ce767967ed628c6ad573cb116dbef" + "efd75499da96bd68a8a97b928a8bbc10" + "3b6621fcde2beca1231d206be6cd9ec7" + "aff6f6c94fcd7204ed3455c68c83f4a4" + "1da4af2b74ef5c53f1d8ac70bdcb7ed1" + "85ce81bd84359d44254d95629e9855a9" + "4a7c1958d1f8ada5d0532ed8a5aa3fb2" + "d17ba70eb6248e594e1a2297acbbb39d" + "502f1a8c6eb6f1ce22b3de1a1f40cc24" + "554119a831a9aad6079cad88425de6bd" + "e1a9187ebb6092cf67bf2b13fd65f270" + "88d78b7e883c8759d2c4f5c65adb7553" + "878ad575f9fad878e80a0c9ba63bcbcc" + "2732e69485bbc9c90bfbd62481d9089b" + "eccf80cfe2df16a2cf65bd92dd597b07" + "07e0917af48bbb75fed413d238f5555a" + "7a569d80c3414a8d0859dc65a46128ba" + "b27af87a71314f318c782b23ebfe808b" + "82b0ce26401d2e22f04d83d1255dc51a" + "ddd3b75a2b1ae0784504df543af8969b" + "e3ea7082ff7fc9888c144da2af58429e" + "c96031dbcad3dad9af0dcbaaaf268cb8" + "fcffead94f3c7ca495e056a9b47acdb7" + "51fb73e666c6c655ade8297297d07ad1" + "ba5e43f1bca32301651339e22904cc8c" + "42f58c30c04aafdb038dda0847dd988d" + "cda6f3bfd15c4b4c4525004aa06eeff8" + "ca61783aacec57fb3d1f92b0fe2fd1a8" + "5f6724517b65e614ad6808d6f6ee34df" + "f7310fdc82aebfd904b01e1dc54b2927" + "094b2db68d6f903b68401adebf5a7e08" + "d78ff4ef5d63653a65040cf9bfd4aca7" + "984a74d37145986780fc0b16ac451649" + "de6188a7dbdf191f64b5fc5e2ab47b57" + "f7f7276cd419c17a3ca8e1b939ae49e4" + "88acba6b965610b5480109c8b17b80e1" + "b7b750dfc7598d5d5011fd2dcc5600a3" + "2ef5b52a1ecc820e308aa342721aac09" + "43bf6686b64b2579376504ccc493d97e" + "6aed3fb0f9cd71a43dd497f01f17c0e2" + "cb3797aa2a2f256656168e6c496afc5f" + "b93246f6b1116398a346f1a641f3b041" + "e989f7914f90cc2c7fff357876e506b5" + "0d334ba77c225bc307ba537152f3f161" + "0e4eafe595f6d9d90d11faa933a15ef1" + "369546868a7f3a45a96768d40fd9d034" + "12c091c6315cf4fde7cb68606937380d" + "b2eaaa707b4c4185c32eddcdd306705e" + "4dc1ffc872eeee475a64dfac86aba41c" + "0618983f8741c5ef68d3a101e8a3b8ca" + "c60c905c15fc910840b94c00a0b9d0", + "0aab4c900501b3e24d7cdf4663326a3a" + "87df5e4843b2cbdb67cbf6e460fec350" + "aa5371b1508f9f4528ecea23c436d94b" + "5e8fcd4f681e30a6ac00a9704a188a03", + ), + # TEST SHA(abc) + ( + generator_ed25519, + "833fe62409237b9d62ec77587520911e" "9a759cec1d19755b7da901b96dca3d42", + "ec172b93ad5e563bf4932c70e1245034" "c35467ef2efd4d64ebf819683467e2bf", + "ddaf35a193617abacc417349ae204131" + "12e6fa4e89a97ea20a9eeee64b55d39a" + "2192992a274fc1a836ba3c23a3feebbd" + "454d4423643ce80e2a9ac94fa54ca49f", + "dc2a4459e7369633a52b1bf277839a00" + "201009a3efbf3ecb69bea2186c26b589" + "09351fc9ac90b3ecfdfbc7c66431e030" + "3dca179c138ac17ad9bef1177331a704", + ), + # Blank + ( + generator_ed448, + "6c82a562cb808d10d632be89c8513ebf" + "6c929f34ddfa8c9f63c9960ef6e348a3" + "528c8a3fcc2f044e39a3fc5b94492f8f" + "032e7549a20098f95b", + "5fd7449b59b461fd2ce787ec616ad46a" + "1da1342485a70e1f8a0ea75d80e96778" + "edf124769b46c7061bd6783df1e50f6c" + "d1fa1abeafe8256180", + "", + "533a37f6bbe457251f023c0d88f976ae" + "2dfb504a843e34d2074fd823d41a591f" + "2b233f034f628281f2fd7a22ddd47d78" + "28c59bd0a21bfd3980ff0d2028d4b18a" + "9df63e006c5d1c2d345b925d8dc00b41" + "04852db99ac5c7cdda8530a113a0f4db" + "b61149f05a7363268c71d95808ff2e65" + "2600", + ), + # 1 octet + ( + generator_ed448, + "c4eab05d357007c632f3dbb48489924d" + "552b08fe0c353a0d4a1f00acda2c463a" + "fbea67c5e8d2877c5e3bc397a659949e" + "f8021e954e0a12274e", + "43ba28f430cdff456ae531545f7ecd0a" + "c834a55d9358c0372bfa0c6c6798c086" + "6aea01eb00742802b8438ea4cb82169c" + "235160627b4c3a9480", + "03", + "26b8f91727bd62897af15e41eb43c377" + "efb9c610d48f2335cb0bd0087810f435" + "2541b143c4b981b7e18f62de8ccdf633" + "fc1bf037ab7cd779805e0dbcc0aae1cb" + "cee1afb2e027df36bc04dcecbf154336" + "c19f0af7e0a6472905e799f1953d2a0f" + "f3348ab21aa4adafd1d234441cf807c0" + "3a00", + ), + # 11 octets + ( + generator_ed448, + "cd23d24f714274e744343237b93290f5" + "11f6425f98e64459ff203e8985083ffd" + "f60500553abc0e05cd02184bdb89c4cc" + "d67e187951267eb328", + "dcea9e78f35a1bf3499a831b10b86c90" + "aac01cd84b67a0109b55a36e9328b1e3" + "65fce161d71ce7131a543ea4cb5f7e9f" + "1d8b00696447001400", + "0c3e544074ec63b0265e0c", + "1f0a8888ce25e8d458a21130879b840a" + "9089d999aaba039eaf3e3afa090a09d3" + "89dba82c4ff2ae8ac5cdfb7c55e94d5d" + "961a29fe0109941e00b8dbdeea6d3b05" + "1068df7254c0cdc129cbe62db2dc957d" + "bb47b51fd3f213fb8698f064774250a5" + "028961c9bf8ffd973fe5d5c206492b14" + "0e00", + ), + # 12 octets + ( + generator_ed448, + "258cdd4ada32ed9c9ff54e63756ae582" + "fb8fab2ac721f2c8e676a72768513d93" + "9f63dddb55609133f29adf86ec9929dc" + "cb52c1c5fd2ff7e21b", + "3ba16da0c6f2cc1f30187740756f5e79" + "8d6bc5fc015d7c63cc9510ee3fd44adc" + "24d8e968b6e46e6f94d19b945361726b" + "d75e149ef09817f580", + "64a65f3cdedcdd66811e2915", + "7eeeab7c4e50fb799b418ee5e3197ff6" + "bf15d43a14c34389b59dd1a7b1b85b4a" + "e90438aca634bea45e3a2695f1270f07" + "fdcdf7c62b8efeaf00b45c2c96ba457e" + "b1a8bf075a3db28e5c24f6b923ed4ad7" + "47c3c9e03c7079efb87cb110d3a99861" + "e72003cbae6d6b8b827e4e6c143064ff" + "3c00", + ), + # 13 octets + ( + generator_ed448, + "7ef4e84544236752fbb56b8f31a23a10" + "e42814f5f55ca037cdcc11c64c9a3b29" + "49c1bb60700314611732a6c2fea98eeb" + "c0266a11a93970100e", + "b3da079b0aa493a5772029f0467baebe" + "e5a8112d9d3a22532361da294f7bb381" + "5c5dc59e176b4d9f381ca0938e13c6c0" + "7b174be65dfa578e80", + "64a65f3cdedcdd66811e2915e7", + "6a12066f55331b6c22acd5d5bfc5d712" + "28fbda80ae8dec26bdd306743c5027cb" + "4890810c162c027468675ecf645a8317" + "6c0d7323a2ccde2d80efe5a1268e8aca" + "1d6fbc194d3f77c44986eb4ab4177919" + "ad8bec33eb47bbb5fc6e28196fd1caf5" + "6b4e7e0ba5519234d047155ac727a105" + "3100", + ), + # 64 octets + ( + generator_ed448, + "d65df341ad13e008567688baedda8e9d" + "cdc17dc024974ea5b4227b6530e339bf" + "f21f99e68ca6968f3cca6dfe0fb9f4fa" + "b4fa135d5542ea3f01", + "df9705f58edbab802c7f8363cfe5560a" + "b1c6132c20a9f1dd163483a26f8ac53a" + "39d6808bf4a1dfbd261b099bb03b3fb5" + "0906cb28bd8a081f00", + "bd0f6a3747cd561bdddf4640a332461a" + "4a30a12a434cd0bf40d766d9c6d458e5" + "512204a30c17d1f50b5079631f64eb31" + "12182da3005835461113718d1a5ef944", + "554bc2480860b49eab8532d2a533b7d5" + "78ef473eeb58c98bb2d0e1ce488a98b1" + "8dfde9b9b90775e67f47d4a1c3482058" + "efc9f40d2ca033a0801b63d45b3b722e" + "f552bad3b4ccb667da350192b61c508c" + "f7b6b5adadc2c8d9a446ef003fb05cba" + "5f30e88e36ec2703b349ca229c267083" + "3900", + ), + # 256 octets + ( + generator_ed448, + "2ec5fe3c17045abdb136a5e6a913e32a" + "b75ae68b53d2fc149b77e504132d3756" + "9b7e766ba74a19bd6162343a21c8590a" + "a9cebca9014c636df5", + "79756f014dcfe2079f5dd9e718be4171" + "e2ef2486a08f25186f6bff43a9936b9b" + "fe12402b08ae65798a3d81e22e9ec80e" + "7690862ef3d4ed3a00", + "15777532b0bdd0d1389f636c5f6b9ba7" + "34c90af572877e2d272dd078aa1e567c" + "fa80e12928bb542330e8409f31745041" + "07ecd5efac61ae7504dabe2a602ede89" + "e5cca6257a7c77e27a702b3ae39fc769" + "fc54f2395ae6a1178cab4738e543072f" + "c1c177fe71e92e25bf03e4ecb72f47b6" + "4d0465aaea4c7fad372536c8ba516a60" + "39c3c2a39f0e4d832be432dfa9a706a6" + "e5c7e19f397964ca4258002f7c0541b5" + "90316dbc5622b6b2a6fe7a4abffd9610" + "5eca76ea7b98816af0748c10df048ce0" + "12d901015a51f189f3888145c03650aa" + "23ce894c3bd889e030d565071c59f409" + "a9981b51878fd6fc110624dcbcde0bf7" + "a69ccce38fabdf86f3bef6044819de11", + "c650ddbb0601c19ca11439e1640dd931" + "f43c518ea5bea70d3dcde5f4191fe53f" + "00cf966546b72bcc7d58be2b9badef28" + "743954e3a44a23f880e8d4f1cfce2d7a" + "61452d26da05896f0a50da66a239a8a1" + "88b6d825b3305ad77b73fbac0836ecc6" + "0987fd08527c1a8e80d5823e65cafe2a" + "3d00", + ), + # 1023 octets + ( + generator_ed448, + "872d093780f5d3730df7c212664b37b8" + "a0f24f56810daa8382cd4fa3f77634ec" + "44dc54f1c2ed9bea86fafb7632d8be19" + "9ea165f5ad55dd9ce8", + "a81b2e8a70a5ac94ffdbcc9badfc3feb" + "0801f258578bb114ad44ece1ec0e799d" + "a08effb81c5d685c0c56f64eecaef8cd" + "f11cc38737838cf400", + "6ddf802e1aae4986935f7f981ba3f035" + "1d6273c0a0c22c9c0e8339168e675412" + "a3debfaf435ed651558007db4384b650" + "fcc07e3b586a27a4f7a00ac8a6fec2cd" + "86ae4bf1570c41e6a40c931db27b2faa" + "15a8cedd52cff7362c4e6e23daec0fbc" + "3a79b6806e316efcc7b68119bf46bc76" + "a26067a53f296dafdbdc11c77f7777e9" + "72660cf4b6a9b369a6665f02e0cc9b6e" + "dfad136b4fabe723d2813db3136cfde9" + "b6d044322fee2947952e031b73ab5c60" + "3349b307bdc27bc6cb8b8bbd7bd32321" + "9b8033a581b59eadebb09b3c4f3d2277" + "d4f0343624acc817804728b25ab79717" + "2b4c5c21a22f9c7839d64300232eb66e" + "53f31c723fa37fe387c7d3e50bdf9813" + "a30e5bb12cf4cd930c40cfb4e1fc6225" + "92a49588794494d56d24ea4b40c89fc0" + "596cc9ebb961c8cb10adde976a5d602b" + "1c3f85b9b9a001ed3c6a4d3b1437f520" + "96cd1956d042a597d561a596ecd3d173" + "5a8d570ea0ec27225a2c4aaff26306d1" + "526c1af3ca6d9cf5a2c98f47e1c46db9" + "a33234cfd4d81f2c98538a09ebe76998" + "d0d8fd25997c7d255c6d66ece6fa56f1" + "1144950f027795e653008f4bd7ca2dee" + "85d8e90f3dc315130ce2a00375a318c7" + "c3d97be2c8ce5b6db41a6254ff264fa6" + "155baee3b0773c0f497c573f19bb4f42" + "40281f0b1f4f7be857a4e59d416c06b4" + "c50fa09e1810ddc6b1467baeac5a3668" + "d11b6ecaa901440016f389f80acc4db9" + "77025e7f5924388c7e340a732e554440" + "e76570f8dd71b7d640b3450d1fd5f041" + "0a18f9a3494f707c717b79b4bf75c984" + "00b096b21653b5d217cf3565c9597456" + "f70703497a078763829bc01bb1cbc8fa" + "04eadc9a6e3f6699587a9e75c94e5bab" + "0036e0b2e711392cff0047d0d6b05bd2" + "a588bc109718954259f1d86678a579a3" + "120f19cfb2963f177aeb70f2d4844826" + "262e51b80271272068ef5b3856fa8535" + "aa2a88b2d41f2a0e2fda7624c2850272" + "ac4a2f561f8f2f7a318bfd5caf969614" + "9e4ac824ad3460538fdc25421beec2cc" + "6818162d06bbed0c40a387192349db67" + "a118bada6cd5ab0140ee273204f628aa" + "d1c135f770279a651e24d8c14d75a605" + "9d76b96a6fd857def5e0b354b27ab937" + "a5815d16b5fae407ff18222c6d1ed263" + "be68c95f32d908bd895cd76207ae7264" + "87567f9a67dad79abec316f683b17f2d" + "02bf07e0ac8b5bc6162cf94697b3c27c" + "d1fea49b27f23ba2901871962506520c" + "392da8b6ad0d99f7013fbc06c2c17a56" + "9500c8a7696481c1cd33e9b14e40b82e" + "79a5f5db82571ba97bae3ad3e0479515" + "bb0e2b0f3bfcd1fd33034efc6245eddd" + "7ee2086ddae2600d8ca73e214e8c2b0b" + "db2b047c6a464a562ed77b73d2d841c4" + "b34973551257713b753632efba348169" + "abc90a68f42611a40126d7cb21b58695" + "568186f7e569d2ff0f9e745d0487dd2e" + "b997cafc5abf9dd102e62ff66cba87", + "e301345a41a39a4d72fff8df69c98075" + "a0cc082b802fc9b2b6bc503f926b65bd" + "df7f4c8f1cb49f6396afc8a70abe6d8a" + "ef0db478d4c6b2970076c6a0484fe76d" + "76b3a97625d79f1ce240e7c576750d29" + "5528286f719b413de9ada3e8eb78ed57" + "3603ce30d8bb761785dc30dbc320869e" + "1a00", + ), +] + + +@pytest.mark.parametrize( + "generator,private_key,public_key,message,signature", + TEST_VECTORS, +) +def test_vectors(generator, private_key, public_key, message, signature): + private_key = a2b_hex(private_key) + public_key = a2b_hex(public_key) + message = a2b_hex(message) + signature = a2b_hex(signature) + + sig_key = PrivateKey(generator, private_key) + ver_key = PublicKey(generator, public_key) + + assert sig_key.public_key().public_key() == ver_key.public_key() + + gen_sig = sig_key.sign(message) + + assert gen_sig == signature + + assert ver_key.verify(message, signature) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_ellipticcurve.py b/venv/lib/python3.12/site-packages/ecdsa/test_ellipticcurve.py new file mode 100644 index 0000000..864cf10 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_ellipticcurve.py @@ -0,0 +1,294 @@ +import pytest + +try: + import unittest2 as unittest +except ImportError: + import unittest +from hypothesis import given, settings +import hypothesis.strategies as st + +try: + from hypothesis import HealthCheck + + HC_PRESENT = True +except ImportError: # pragma: no cover + HC_PRESENT = False +from .numbertheory import inverse_mod +from .ellipticcurve import CurveFp, INFINITY, Point, CurveEdTw + + +HYP_SETTINGS = {} +if HC_PRESENT: # pragma: no branch + HYP_SETTINGS["suppress_health_check"] = [HealthCheck.too_slow] + HYP_SETTINGS["deadline"] = 5000 + + +# NIST Curve P-192: +p = 6277101735386680763835789423207666416083908700390324961279 +r = 6277101735386680763835789423176059013767194773182842284081 +# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5 +# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65 +b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1 +Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012 +Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 + +c192 = CurveFp(p, -3, b) +p192 = Point(c192, Gx, Gy, r) + +c_23 = CurveFp(23, 1, 1) +g_23 = Point(c_23, 13, 7, 7) + + +HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) +HYP_SLOW_SETTINGS["max_examples"] = 2 + + +@settings(**HYP_SLOW_SETTINGS) +@given(st.integers(min_value=1, max_value=r - 1)) +def test_p192_mult_tests(multiple): + inv_m = inverse_mod(multiple, r) + + p1 = p192 * multiple + assert p1 * inv_m == p192 + + +def add_n_times(point, n): + ret = INFINITY + i = 0 + while i <= n: + yield ret + ret = ret + point + i += 1 + + +# From X9.62 I.1 (p. 96): +@pytest.mark.parametrize( + "p, m, check", + [(g_23, n, exp) for n, exp in enumerate(add_n_times(g_23, 8))], + ids=["g_23 test with mult {0}".format(i) for i in range(9)], +) +def test_add_and_mult_equivalence(p, m, check): + assert p * m == check + + +class TestCurve(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.c_23 = CurveFp(23, 1, 1) + + def test_equality_curves(self): + self.assertEqual(self.c_23, CurveFp(23, 1, 1)) + + def test_inequality_curves(self): + c192 = CurveFp(p, -3, b) + self.assertNotEqual(self.c_23, c192) + + def test_inequality_curves_by_b_only(self): + a = CurveFp(23, 1, 0) + b = CurveFp(23, 1, 1) + self.assertNotEqual(a, b) + + def test_usability_in_a_hashed_collection_curves(self): + {self.c_23: None} + + def test_hashability_curves(self): + hash(self.c_23) + + def test_conflation_curves(self): + ne1, ne2, ne3 = CurveFp(24, 1, 1), CurveFp(23, 2, 1), CurveFp(23, 1, 2) + eq1, eq2, eq3 = CurveFp(23, 1, 1), CurveFp(23, 1, 1), self.c_23 + self.assertEqual(len(set((c_23, eq1, eq2, eq3))), 1) + self.assertEqual(len(set((c_23, ne1, ne2, ne3))), 4) + self.assertDictEqual({c_23: None}, {eq1: None}) + self.assertIn(eq2, {eq3: None}) + + def test___str__(self): + self.assertEqual(str(self.c_23), "CurveFp(p=23, a=1, b=1)") + + def test___str___with_cofactor(self): + c = CurveFp(23, 1, 1, 4) + self.assertEqual(str(c), "CurveFp(p=23, a=1, b=1, h=4)") + + +class TestCurveEdTw(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.c_23 = CurveEdTw(23, 1, 1) + + def test___str__(self): + self.assertEqual(str(self.c_23), "CurveEdTw(p=23, a=1, d=1)") + + def test___str___with_cofactor(self): + c = CurveEdTw(23, 1, 1, 4) + self.assertEqual(str(c), "CurveEdTw(p=23, a=1, d=1, h=4)") + + def test_usability_in_a_hashed_collection_curves(self): + {self.c_23: None} + + def test_hashability_curves(self): + hash(self.c_23) + + +class TestPoint(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.c_23 = CurveFp(23, 1, 1) + cls.g_23 = Point(cls.c_23, 13, 7, 7) + + p = 6277101735386680763835789423207666416083908700390324961279 + r = 6277101735386680763835789423176059013767194773182842284081 + # s = 0x3045ae6fc8422f64ed579528d38120eae12196d5 + # c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65 + b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1 + Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012 + Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 + + cls.c192 = CurveFp(p, -3, b) + cls.p192 = Point(cls.c192, Gx, Gy, r) + + def test_p192(self): + # Checking against some sample computations presented + # in X9.62: + d = 651056770906015076056810763456358567190100156695615665659 + Q = d * self.p192 + self.assertEqual( + Q.x(), 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5 + ) + + k = 6140507067065001063065065565667405560006161556565665656654 + R = k * self.p192 + self.assertEqual( + R.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD + ) + self.assertEqual( + R.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 + ) + + u1 = 2563697409189434185194736134579731015366492496392189760599 + u2 = 6266643813348617967186477710235785849136406323338782220568 + temp = u1 * self.p192 + u2 * Q + self.assertEqual( + temp.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD + ) + self.assertEqual( + temp.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 + ) + + def test_double_infinity(self): + p1 = INFINITY + p3 = p1.double() + self.assertEqual(p1, p3) + self.assertEqual(p3.x(), p1.x()) + self.assertEqual(p3.y(), p3.y()) + + def test_double(self): + x1, y1, x3, y3 = (3, 10, 7, 12) + + p1 = Point(self.c_23, x1, y1) + p3 = p1.double() + self.assertEqual(p3.x(), x3) + self.assertEqual(p3.y(), y3) + + def test_double_to_infinity(self): + p1 = Point(self.c_23, 11, 20) + p2 = p1.double() + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2.double() + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_add_self_to_infinity(self): + p1 = Point(self.c_23, 11, 20) + p2 = p1 + p1 + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2 + p2 + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_mul_to_infinity(self): + p1 = Point(self.c_23, 11, 20) + p2 = p1 * 2 + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2 * 2 + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_multiply(self): + x1, y1, m, x3, y3 = (3, 10, 2, 7, 12) + p1 = Point(self.c_23, x1, y1) + p3 = p1 * m + self.assertEqual(p3.x(), x3) + self.assertEqual(p3.y(), y3) + + # Trivial tests from X9.62 B.3: + def test_add(self): + """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3).""" + + x1, y1, x2, y2, x3, y3 = (3, 10, 9, 7, 17, 20) + p1 = Point(self.c_23, x1, y1) + p2 = Point(self.c_23, x2, y2) + p3 = p1 + p2 + self.assertEqual(p3.x(), x3) + self.assertEqual(p3.y(), y3) + + def test_add_as_double(self): + """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3).""" + + x1, y1, x2, y2, x3, y3 = (3, 10, 3, 10, 7, 12) + p1 = Point(self.c_23, x1, y1) + p2 = Point(self.c_23, x2, y2) + p3 = p1 + p2 + self.assertEqual(p3.x(), x3) + self.assertEqual(p3.y(), y3) + + def test_equality_points(self): + self.assertEqual(self.g_23, Point(self.c_23, 13, 7, 7)) + + def test_inequality_points(self): + c = CurveFp(100, -3, 100) + p = Point(c, 100, 100, 100) + self.assertNotEqual(self.g_23, p) + + def test_inequality_points_diff_types(self): + c = CurveFp(100, -3, 100) + self.assertNotEqual(self.g_23, c) + + def test_inequality_diff_y(self): + p1 = Point(self.c_23, 6, 4) + p2 = Point(self.c_23, 6, 19) + + self.assertNotEqual(p1, p2) + + def test_to_bytes_from_bytes(self): + p = Point(self.c_23, 3, 10) + + self.assertEqual(p, Point.from_bytes(self.c_23, p.to_bytes())) + + def test_add_to_neg_self(self): + p = Point(self.c_23, 3, 10) + + self.assertEqual(INFINITY, p + (-p)) + + def test_add_to_infinity(self): + p = Point(self.c_23, 3, 10) + + self.assertIs(p, p + INFINITY) + + def test_mul_infinity_by_scalar(self): + self.assertIs(INFINITY, INFINITY * 10) + + def test_mul_by_negative(self): + p = Point(self.c_23, 3, 10) + + self.assertEqual(p * -5, (-p) * 5) + + def test_str_infinity(self): + self.assertEqual(str(INFINITY), "infinity") + + def test_str_point(self): + p = Point(self.c_23, 3, 10) + + self.assertEqual(str(p), "(3,10)") diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_jacobi.py b/venv/lib/python3.12/site-packages/ecdsa/test_jacobi.py new file mode 100644 index 0000000..f811b92 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_jacobi.py @@ -0,0 +1,934 @@ +import pickle +import sys + +try: + import unittest2 as unittest +except ImportError: + import unittest + +import os +import signal +import pytest +import threading +import platform +import hypothesis.strategies as st +from hypothesis import given, assume, settings, example + +from .ellipticcurve import CurveFp, PointJacobi, INFINITY, Point +from .ecdsa import ( + generator_256, + curve_256, + generator_224, + generator_brainpoolp160r1, + curve_brainpoolp160r1, + generator_112r2, + curve_112r2, +) +from .numbertheory import inverse_mod +from .util import randrange + + +NO_OLD_SETTINGS = {} +if sys.version_info > (2, 7): # pragma: no branch + NO_OLD_SETTINGS["deadline"] = 5000 + + +SLOW_SETTINGS = {} +if "--fast" in sys.argv: # pragma: no cover + SLOW_SETTINGS["max_examples"] = 2 +else: + SLOW_SETTINGS["max_examples"] = 10 + + +class TestJacobi(unittest.TestCase): + def test___init__(self): + curve = object() + x = 2 + y = 3 + z = 1 + order = 4 + pj = PointJacobi(curve, x, y, z, order) + + self.assertEqual(pj.order(), order) + self.assertIs(pj.curve(), curve) + self.assertEqual(pj.x(), x) + self.assertEqual(pj.y(), y) + + def test_add_with_different_curves(self): + p_a = PointJacobi.from_affine(generator_256) + p_b = PointJacobi.from_affine(generator_224) + + with self.assertRaises(ValueError): # pragma: no branch + p_a + p_b + + def test_compare_different_curves(self): + self.assertNotEqual(generator_256, generator_224) + + def test_equality_with_non_point(self): + pj = PointJacobi.from_affine(generator_256) + + self.assertNotEqual(pj, "value") + + def test_conversion(self): + pj = PointJacobi.from_affine(generator_256) + pw = pj.to_affine() + + self.assertEqual(generator_256, pw) + + def test_single_double(self): + pj = PointJacobi.from_affine(generator_256) + pw = generator_256.double() + + pj = pj.double() + + self.assertEqual(pj.x(), pw.x()) + self.assertEqual(pj.y(), pw.y()) + + def test_double_with_zero_point(self): + pj = PointJacobi(curve_256, 0, 0, 1) + + pj = pj.double() + + self.assertIs(pj, INFINITY) + + def test_double_with_zero_equivalent_point(self): + pj = PointJacobi(curve_256, 0, 0, 0) + + pj = pj.double() + + self.assertIs(pj, INFINITY) + + def test_double_with_zero_equivalent_point_non_zero_z_non_zero_y(self): + pj = PointJacobi(curve_256, 0, 1, curve_256.p()) + + pj = pj.double() + + self.assertIs(pj, INFINITY) + + def test_double_with_zero_equivalent_point_non_zero_z(self): + pj = PointJacobi(curve_256, 0, 0, curve_256.p()) + + pj = pj.double() + + self.assertIs(pj, INFINITY) + + def test_compare_with_affine_point(self): + pj = PointJacobi.from_affine(generator_256) + pa = pj.to_affine() + + self.assertEqual(pj, pa) + self.assertEqual(pa, pj) + + def test_to_affine_with_zero_point(self): + pj = PointJacobi(curve_256, 0, 0, 0) + + pa = pj.to_affine() + + self.assertIs(pa, INFINITY) + + def test_add_with_affine_point(self): + pj = PointJacobi.from_affine(generator_256) + pa = pj.to_affine() + + s = pj + pa + + self.assertEqual(s, pj.double()) + + def test_radd_with_affine_point(self): + pj = PointJacobi.from_affine(generator_256) + pa = pj.to_affine() + + s = pa + pj + + self.assertEqual(s, pj.double()) + + def test_add_with_infinity(self): + pj = PointJacobi.from_affine(generator_256) + + s = pj + INFINITY + + self.assertEqual(s, pj) + + def test_add_zero_point_to_affine(self): + pa = PointJacobi.from_affine(generator_256).to_affine() + pj = PointJacobi(curve_256, 0, 0, 0) + + s = pj + pa + + self.assertIs(s, pa) + + def test_multiply_by_zero(self): + pj = PointJacobi.from_affine(generator_256) + + pj = pj * 0 + + self.assertIs(pj, INFINITY) + + def test_zero_point_multiply_by_one(self): + pj = PointJacobi(curve_256, 0, 0, 1) + + pj = pj * 1 + + self.assertIs(pj, INFINITY) + + def test_multiply_by_one(self): + pj = PointJacobi.from_affine(generator_256) + pw = generator_256 * 1 + + pj = pj * 1 + + self.assertEqual(pj.x(), pw.x()) + self.assertEqual(pj.y(), pw.y()) + + def test_multiply_by_two(self): + pj = PointJacobi.from_affine(generator_256) + pw = generator_256 * 2 + + pj = pj * 2 + + self.assertEqual(pj.x(), pw.x()) + self.assertEqual(pj.y(), pw.y()) + + def test_rmul_by_two(self): + pj = PointJacobi.from_affine(generator_256) + pw = generator_256 * 2 + + pj = 2 * pj + + self.assertEqual(pj, pw) + + def test_compare_non_zero_with_infinity(self): + pj = PointJacobi.from_affine(generator_256) + + self.assertNotEqual(pj, INFINITY) + + def test_compare_non_zero_bad_scale_with_infinity(self): + pj = PointJacobi(curve_256, 1, 1, 0) + self.assertEqual(pj, INFINITY) + + def test_eq_x_0_on_curve_with_infinity(self): + c_23 = CurveFp(23, 1, 1) + pj = PointJacobi(c_23, 0, 1, 1) + + self.assertTrue(c_23.contains_point(0, 1)) + + self.assertNotEqual(pj, INFINITY) + + def test_eq_y_0_on_curve_with_infinity(self): + c_23 = CurveFp(23, 1, 1) + pj = PointJacobi(c_23, 4, 0, 1) + + self.assertTrue(c_23.contains_point(4, 0)) + + self.assertNotEqual(pj, INFINITY) + + def test_eq_with_same_x_different_y(self): + c_23 = CurveFp(23, 1, 1) + p_a = PointJacobi(c_23, 0, 22, 1) + p_b = PointJacobi(c_23, 0, 1, 1) + + self.assertNotEqual(p_a, p_b) + + def test_compare_zero_point_with_infinity(self): + pj = PointJacobi(curve_256, 0, 0, 0) + + self.assertEqual(pj, INFINITY) + + def test_compare_double_with_multiply(self): + pj = PointJacobi.from_affine(generator_256) + dbl = pj.double() + mlpl = pj * 2 + + self.assertEqual(dbl, mlpl) + + @settings(**SLOW_SETTINGS) + @given( + st.integers( + min_value=0, max_value=int(generator_brainpoolp160r1.order() - 1) + ) + ) + def test_multiplications(self, mul): + pj = PointJacobi.from_affine(generator_brainpoolp160r1) + pw = pj.to_affine() * mul + + pj = pj * mul + + self.assertEqual((pj.x(), pj.y()), (pw.x(), pw.y())) + self.assertEqual(pj, pw) + + @settings(**SLOW_SETTINGS) + @given( + st.integers( + min_value=0, max_value=int(generator_brainpoolp160r1.order() - 1) + ) + ) + @example(0) + @example(int(generator_brainpoolp160r1.order())) + def test_precompute(self, mul): + precomp = generator_brainpoolp160r1 + self.assertTrue(precomp._PointJacobi__precompute) + pj = PointJacobi.from_affine(generator_brainpoolp160r1) + + a = precomp * mul + b = pj * mul + + self.assertEqual(a, b) + + @settings(**SLOW_SETTINGS) + @given( + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + ) + @example(3, 3) + def test_add_scaled_points(self, a_mul, b_mul): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) + a = PointJacobi.from_affine(j_g * a_mul) + b = PointJacobi.from_affine(j_g * b_mul) + + c = a + b + + self.assertEqual(c, j_g * (a_mul + b_mul)) + + @settings(**SLOW_SETTINGS) + @given( + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), + ) + def test_add_one_scaled_point(self, a_mul, b_mul, new_z): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) + a = PointJacobi.from_affine(j_g * a_mul) + b = PointJacobi.from_affine(j_g * b_mul) + + p = curve_brainpoolp160r1.p() + + assume(inverse_mod(new_z, p)) + + new_zz = new_z * new_z % p + + b = PointJacobi( + curve_brainpoolp160r1, + b.x() * new_zz % p, + b.y() * new_zz * new_z % p, + new_z, + ) + + c = a + b + + self.assertEqual(c, j_g * (a_mul + b_mul)) + + @pytest.mark.slow + @settings(**SLOW_SETTINGS) + @given( + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), + ) + @example(1, 1, 1) + @example(3, 3, 3) + @example(2, int(generator_brainpoolp160r1.order() - 2), 1) + @example(2, int(generator_brainpoolp160r1.order() - 2), 3) + def test_add_same_scale_points(self, a_mul, b_mul, new_z): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) + a = PointJacobi.from_affine(j_g * a_mul) + b = PointJacobi.from_affine(j_g * b_mul) + + p = curve_brainpoolp160r1.p() + + assume(inverse_mod(new_z, p)) + + new_zz = new_z * new_z % p + + a = PointJacobi( + curve_brainpoolp160r1, + a.x() * new_zz % p, + a.y() * new_zz * new_z % p, + new_z, + ) + b = PointJacobi( + curve_brainpoolp160r1, + b.x() * new_zz % p, + b.y() * new_zz * new_z % p, + new_z, + ) + + c = a + b + + self.assertEqual(c, j_g * (a_mul + b_mul)) + + def test_add_same_scale_points_static(self): + j_g = generator_brainpoolp160r1 + p = curve_brainpoolp160r1.p() + a = j_g * 11 + a.scale() + z1 = 13 + x = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1**2 % p, + a.y() * z1**3 % p, + z1, + ) + y = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1**2 % p, + a.y() * z1**3 % p, + z1, + ) + + c = a + a + + self.assertEqual(c, x + y) + + @pytest.mark.slow + @settings(**SLOW_SETTINGS) + @given( + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.integers( + min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) + ), + st.lists( + st.integers( + min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1) + ), + min_size=2, + max_size=2, + unique=True, + ), + ) + @example(2, 2, [2, 1]) + @example(2, 2, [2, 3]) + @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 3]) + @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 1]) + def test_add_different_scale_points(self, a_mul, b_mul, new_z): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1) + a = PointJacobi.from_affine(j_g * a_mul) + b = PointJacobi.from_affine(j_g * b_mul) + + p = curve_brainpoolp160r1.p() + + assume(inverse_mod(new_z[0], p)) + assume(inverse_mod(new_z[1], p)) + + new_zz0 = new_z[0] * new_z[0] % p + new_zz1 = new_z[1] * new_z[1] % p + + a = PointJacobi( + curve_brainpoolp160r1, + a.x() * new_zz0 % p, + a.y() * new_zz0 * new_z[0] % p, + new_z[0], + ) + b = PointJacobi( + curve_brainpoolp160r1, + b.x() * new_zz1 % p, + b.y() * new_zz1 * new_z[1] % p, + new_z[1], + ) + + c = a + b + + self.assertEqual(c, j_g * (a_mul + b_mul)) + + def test_add_different_scale_points_static(self): + j_g = generator_brainpoolp160r1 + p = curve_brainpoolp160r1.p() + a = j_g * 11 + a.scale() + z1 = 13 + x = PointJacobi( + curve_brainpoolp160r1, + a.x() * z1**2 % p, + a.y() * z1**3 % p, + z1, + ) + z2 = 29 + y = PointJacobi( + curve_brainpoolp160r1, + a.x() * z2**2 % p, + a.y() * z2**3 % p, + z2, + ) + + c = a + a + + self.assertEqual(c, x + y) + + def test_add_different_points_same_scale_static(self): + j_g = generator_brainpoolp160r1 + p = curve_brainpoolp160r1.p() + a = j_g * 11 + a.scale() + b = j_g * 12 + z = 13 + x = PointJacobi( + curve_brainpoolp160r1, + a.x() * z**2 % p, + a.y() * z**3 % p, + z, + ) + y = PointJacobi( + curve_brainpoolp160r1, + b.x() * z**2 % p, + b.y() * z**3 % p, + z, + ) + + c = a + b + + self.assertEqual(c, x + y) + + def test_add_same_point_different_scale_second_z_1_static(self): + j_g = generator_112r2 + p = curve_112r2.p() + z = 11 + a = j_g * z + a.scale() + + x = PointJacobi( + curve_112r2, + a.x() * z**2 % p, + a.y() * z**3 % p, + z, + ) + y = PointJacobi( + curve_112r2, + a.x(), + a.y(), + 1, + ) + + c = a + a + + self.assertEqual(c, x + y) + + def test_add_to_infinity_static(self): + j_g = generator_112r2 + + z = 11 + a = j_g * z + a.scale() + + b = -a + + x = PointJacobi( + curve_112r2, + a.x(), + a.y(), + 1, + ) + y = PointJacobi( + curve_112r2, + b.x(), + b.y(), + 1, + ) + + self.assertEqual(INFINITY, x + y) + + def test_add_point_3_times(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertEqual(j_g * 3, j_g + j_g + j_g) + + def test_mul_without_order(self): + j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) + + self.assertEqual(j_g * generator_256.order(), INFINITY) + + def test_mul_add_inf(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertEqual(j_g, j_g.mul_add(1, INFINITY, 1)) + + def test_mul_add_same(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertEqual(j_g * 2, j_g.mul_add(1, j_g, 1)) + + def test_mul_add_precompute(self): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) + b = PointJacobi.from_affine(j_g * 255, True) + + self.assertEqual(j_g * 256, j_g + b) + self.assertEqual(j_g * (5 + 255 * 7), j_g * 5 + b * 7) + self.assertEqual(j_g * (5 + 255 * 7), j_g.mul_add(5, b, 7)) + + def test_mul_add_precompute_large(self): + j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) + b = PointJacobi.from_affine(j_g * 255, True) + + self.assertEqual(j_g * 256, j_g + b) + self.assertEqual( + j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0 + ) + self.assertEqual( + j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) + ) + + def test_mul_add_to_mul(self): + j_g = PointJacobi.from_affine(generator_256) + + a = j_g * 3 + b = j_g.mul_add(2, j_g, 1) + + self.assertEqual(a, b) + + def test_mul_add_differnt(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = j_g * 2 + + self.assertEqual(j_g.mul_add(1, w_a, 1), j_g * 3) + + def test_mul_add_slightly_different(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = j_g * 2 + w_b = j_g * 3 + + self.assertEqual(w_a.mul_add(1, w_b, 3), w_a * 1 + w_b * 3) + + def test_mul_add(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = generator_256 * 255 + w_b = generator_256 * (0xA8 * 0xF0) + j_b = j_g * 0xA8 + + ret = j_g.mul_add(255, j_b, 0xF0) + + self.assertEqual(ret.to_affine(), w_a + w_b) + + def test_mul_add_zero(self): + j_g = PointJacobi.from_affine(generator_256) + + w_a = generator_256 * 255 + w_b = generator_256 * (0 * 0xA8) + + j_b = j_g * 0xA8 + + ret = j_g.mul_add(255, j_b, 0) + + self.assertEqual(ret.to_affine(), w_a + w_b) + + def test_mul_add_large(self): + j_g = PointJacobi.from_affine(generator_256) + b = PointJacobi.from_affine(j_g * 255) + + self.assertEqual(j_g * 256, j_g + b) + self.assertEqual( + j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0 + ) + self.assertEqual( + j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) + ) + + def test_mul_add_with_infinity_as_result(self): + j_g = PointJacobi.from_affine(generator_256) + + order = generator_256.order() + + b = PointJacobi.from_affine(generator_256 * 256) + + self.assertEqual(j_g.mul_add(order % 256, b, order // 256), INFINITY) + + def test_mul_add_without_order(self): + j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) + + order = generator_256.order() + + w_b = generator_256 * 34 + w_b.scale() + + b = PointJacobi(curve_256, w_b.x(), w_b.y(), 1) + + self.assertEqual(j_g.mul_add(order % 34, b, order // 34), INFINITY) + + def test_mul_add_with_doubled_negation_of_itself(self): + j_g = PointJacobi.from_affine(generator_256 * 17) + + dbl_neg = 2 * (-j_g) + + self.assertEqual(j_g.mul_add(4, dbl_neg, 2), INFINITY) + + @given( + st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), + st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), + st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), + ) + @example(693, 2, 3293) # values that will hit all the conditions for NAF + def test_mul_add_random(self, mul1, mul2, mul3): + p_a = PointJacobi.from_affine(generator_112r2) + p_b = generator_112r2 * mul2 + + res = p_a.mul_add(mul1, p_b, mul3) + + self.assertEqual(res, p_a * mul1 + p_b * mul3) + + def test_equality(self): + pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) + pj2 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) + self.assertEqual(pj1, pj2) + + def test_equality_with_invalid_object(self): + j_g = PointJacobi.from_affine(generator_256) + + self.assertNotEqual(j_g, 12) + + def test_equality_with_wrong_curves(self): + p_a = PointJacobi.from_affine(generator_256) + p_b = PointJacobi.from_affine(generator_224) + + self.assertNotEqual(p_a, p_b) + + def test_add_with_point_at_infinity(self): + pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) + x, y, z = pj1._add(2, 3, 1, 5, 5, 0, 23) + + self.assertEqual((x, y, z), (2, 3, 1)) + + def test_double_to_infinity(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 11, 20, 1) + p2 = p.double() + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2.double() + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_double_to_x_0(self): + c_23_2 = CurveFp(23, 1, 2) + p = PointJacobi(c_23_2, 9, 2, 1) + p2 = p.double() + + self.assertEqual((p2.x(), p2.y()), (0, 18)) + + def test_mul_to_infinity(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 11, 20, 1) + p2 = p * 2 + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2 * 2 + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_add_to_infinity(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 11, 20, 1) + p2 = p + p + self.assertEqual((p2.x(), p2.y()), (4, 0)) + self.assertNotEqual(p2, INFINITY) + p3 = p2 + p2 + self.assertEqual(p3, INFINITY) + self.assertIs(p3, INFINITY) + + def test_mul_to_x_0(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + p2 = p * 13 + self.assertEqual((p2.x(), p2.y()), (0, 22)) + + def test_mul_to_y_0(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + p2 = p * 14 + self.assertEqual((p2.x(), p2.y()), (4, 0)) + + def test_add_to_x_0(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + p2 = p * 12 + p + self.assertEqual((p2.x(), p2.y()), (0, 22)) + + def test_add_to_y_0(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + p2 = p * 13 + p + self.assertEqual((p2.x(), p2.y()), (4, 0)) + + def test_add_diff_z_to_infinity(self): + c_23 = CurveFp(23, 1, 1) + p = PointJacobi(c_23, 9, 7, 1) + + c = p * 20 + p * 8 + self.assertIs(c, INFINITY) + + def test_pickle(self): + pj = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) + self.assertEqual(pickle.loads(pickle.dumps(pj)), pj) + + @pytest.mark.slow + @settings(**NO_OLD_SETTINGS) + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", + reason="threading on PyPy breaks coverage", + ) + @given(st.integers(min_value=1, max_value=10)) + def test_multithreading(self, thread_num): # pragma: no cover + # ensure that generator's precomputation table is filled + generator_112r2 * 2 + + # create a fresh point that doesn't have a filled precomputation table + gen = generator_112r2 + gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) + + self.assertEqual(gen._PointJacobi__precompute, []) + + def runner(generator): + order = generator.order() + for _ in range(10): + generator * randrange(order) + + threads = [] + for _ in range(thread_num): + threads.append(threading.Thread(target=runner, args=(gen,))) + + for t in threads: + t.start() + + runner(gen) + + for t in threads: + t.join() + + self.assertEqual( + gen._PointJacobi__precompute, + generator_112r2._PointJacobi__precompute, + ) + + @pytest.mark.slow + @pytest.mark.skipif( + platform.system() == "Windows" + or platform.python_implementation() == "PyPy", + reason="there are no signals on Windows, and threading breaks coverage" + " on PyPy", + ) + def test_multithreading_with_interrupts(self): # pragma: no cover + thread_num = 10 + # ensure that generator's precomputation table is filled + generator_112r2 * 2 + + # create a fresh point that doesn't have a filled precomputation table + gen = generator_112r2 + gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) + + self.assertEqual(gen._PointJacobi__precompute, []) + + def runner(generator): + order = generator.order() + for _ in range(50): + generator * randrange(order) + + def interrupter(barrier_start, barrier_end, lock_exit): + # wait until MainThread can handle KeyboardInterrupt + barrier_start.release() + barrier_end.acquire() + os.kill(os.getpid(), signal.SIGINT) + lock_exit.release() + + threads = [] + for _ in range(thread_num): + threads.append(threading.Thread(target=runner, args=(gen,))) + + barrier_start = threading.Lock() + barrier_start.acquire() + barrier_end = threading.Lock() + barrier_end.acquire() + lock_exit = threading.Lock() + lock_exit.acquire() + + threads.append( + threading.Thread( + target=interrupter, + args=(barrier_start, barrier_end, lock_exit), + ) + ) + + for t in threads: + t.start() + + with self.assertRaises(KeyboardInterrupt): + # signal to interrupter that we can now handle the signal + barrier_start.acquire() + barrier_end.release() + runner(gen) + # use the lock to ensure we never go past the scope of + # assertRaises before the os.kill is called + lock_exit.acquire() + + for t in threads: + t.join() + + self.assertEqual( + gen._PointJacobi__precompute, + generator_112r2._PointJacobi__precompute, + ) + + +class TestZeroCurve(unittest.TestCase): + """Tests with curve that has (0, 0) on the curve.""" + + def setUp(self): + self.curve = CurveFp(23, 1, 0) + + def test_zero_point_on_curve(self): + self.assertTrue(self.curve.contains_point(0, 0)) + + def test_double_to_0_0_point(self): + p = PointJacobi(self.curve, 1, 18, 1) + + d = p.double() + + self.assertNotEqual(d, INFINITY) + self.assertEqual((0, 0), (d.x(), d.y())) + + def test_double_to_0_0_point_with_non_one_z(self): + z = 2 + p = PointJacobi(self.curve, 1 * z**2, 18 * z**3, z) + + d = p.double() + + self.assertNotEqual(d, INFINITY) + self.assertEqual((0, 0), (d.x(), d.y())) + + def test_mul_to_0_0_point(self): + p = PointJacobi(self.curve, 11, 13, 1) + + d = p * 12 + + self.assertNotEqual(d, INFINITY) + self.assertEqual((0, 0), (d.x(), d.y())) + + def test_double_of_0_0_point(self): + p = PointJacobi(self.curve, 0, 0, 1) + + d = p.double() + + self.assertIs(d, INFINITY) + + def test_compare_to_old_implementation(self): + p = PointJacobi(self.curve, 11, 13, 1) + p_c = Point(self.curve, 11, 13) + + for i in range(24): + self.assertEqual(p * i, p_c * i) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_keys.py b/venv/lib/python3.12/site-packages/ecdsa/test_keys.py new file mode 100644 index 0000000..348475e --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_keys.py @@ -0,0 +1,1138 @@ +try: + import unittest2 as unittest +except ImportError: + import unittest + +try: + buffer +except NameError: + buffer = memoryview + +import os +import array +import pytest +import hashlib + +from .keys import ( + VerifyingKey, + SigningKey, + MalformedPointError, + BadSignatureError, +) +from .der import ( + unpem, + UnexpectedDER, + encode_sequence, + encode_oid, + encode_bitstring, + encode_integer, + encode_octet_string, +) +from .util import ( + sigencode_string, + sigencode_der, + sigencode_strings, + sigdecode_string, + sigdecode_der, + sigdecode_strings, +) +from .curves import NIST256p, Curve, BRAINPOOLP160r1, Ed25519, Ed448 +from .ellipticcurve import Point, PointJacobi, CurveFp, INFINITY +from .ecdsa import generator_brainpoolp160r1 + + +class TestVerifyingKeyFromString(unittest.TestCase): + """ + Verify that ecdsa.keys.VerifyingKey.from_string() can be used with + bytes-like objects + """ + + @classmethod + def setUpClass(cls): + cls.key_bytes = ( + b"\x04L\xa2\x95\xdb\xc7Z\xd7\x1f\x93\nz\xcf\x97\xcf" + b"\xd7\xc2\xd9o\xfe8}X!\xae\xd4\xfah\xfa^\rpI\xba\xd1" + b"Y\xfb\x92xa\xebo+\x9cG\xfav\xca" + ) + cls.vk = VerifyingKey.from_string(cls.key_bytes) + + def test_bytes(self): + self.assertIsNotNone(self.vk) + self.assertIsInstance(self.vk, VerifyingKey) + self.assertEqual( + self.vk.pubkey.point.x(), + 105419898848891948935835657980914000059957975659675736097, + ) + self.assertEqual( + self.vk.pubkey.point.y(), + 4286866841217412202667522375431381222214611213481632495306, + ) + + def test_bytes_memoryview(self): + vk = VerifyingKey.from_string(buffer(self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytearray(self): + vk = VerifyingKey.from_string(bytearray(self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytesarray_memoryview(self): + vk = VerifyingKey.from_string(buffer(bytearray(self.key_bytes))) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_bytes(self): + arr = array.array("B", self.key_bytes) + vk = VerifyingKey.from_string(arr) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_bytes_memoryview(self): + arr = array.array("B", self.key_bytes) + vk = VerifyingKey.from_string(buffer(arr)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_ints(self): + arr = array.array("I", self.key_bytes) + vk = VerifyingKey.from_string(arr) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_ints_memoryview(self): + arr = array.array("I", self.key_bytes) + vk = VerifyingKey.from_string(buffer(arr)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytes_uncompressed(self): + vk = VerifyingKey.from_string(b"\x04" + self.key_bytes) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytearray_uncompressed(self): + vk = VerifyingKey.from_string(bytearray(b"\x04" + self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytes_compressed(self): + vk = VerifyingKey.from_string(b"\x02" + self.key_bytes[:24]) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytearray_compressed(self): + vk = VerifyingKey.from_string(bytearray(b"\x02" + self.key_bytes[:24])) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_ed25519_VerifyingKey_from_string_imported(self): + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"AAA", Ed25519) + + +class TestVerifyingKeyFromDer(unittest.TestCase): + """ + Verify that ecdsa.keys.VerifyingKey.from_der() can be used with + bytes-like objects. + """ + + @classmethod + def setUpClass(cls): + prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" + ) + key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n" + "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n" + "-----END PUBLIC KEY-----\n" + ) + cls.key_pem = key_str + + cls.key_bytes = unpem(key_str) + assert isinstance(cls.key_bytes, bytes) + cls.vk = VerifyingKey.from_pem(key_str) + cls.sk = SigningKey.from_pem(prv_key_str) + + key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8\n" + "Poqzgjau4kfxBPyZimeRfuY/9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n" + "-----END PUBLIC KEY-----\n" + ) + cls.vk2 = VerifyingKey.from_pem(key_str) + + cls.sk2 = SigningKey.generate(vk.curve) + + def test_load_key_with_explicit_parameters(self): + pub_key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" + "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" + "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" + "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" + "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" + "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" + "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" + "-----END PUBLIC KEY-----" + ) + pk = VerifyingKey.from_pem(pub_key_str) + + pk_exp = VerifyingKey.from_string( + b"\x04\x8a\xf5\x52\x48\x18\xb3\x98\xe6\x6c\x57\x3b\x8a\xdd\x7f" + b"\x60\x8d\x97\x4f\xff\xc8\x95\xa1\x23\x30\xd6\x5f\xb7\x5a\x30" + b"\x8e\xaa\x41\x60\x7d\x84\xac\x91\xe4\xe1\x4e\x4f\xa7\x85\xaf" + b"\x5a\xad\x71\x98\x7c\x08\x66\x5b\x07\xec\x88\x38\x2a\x67\x9f" + b"\x09\xf4\x7a\x4a\x65", + curve=NIST256p, + ) + self.assertEqual(pk, pk_exp) + + def test_load_key_with_explicit_with_explicit_disabled(self): + pub_key_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" + "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" + "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" + "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" + "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" + "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" + "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" + "-----END PUBLIC KEY-----" + ) + with self.assertRaises(UnexpectedDER): + VerifyingKey.from_pem( + pub_key_str, valid_curve_encodings=["named_curve"] + ) + + def test_load_key_with_disabled_format(self): + with self.assertRaises(MalformedPointError) as e: + VerifyingKey.from_der(self.key_bytes, valid_encodings=["raw"]) + + self.assertIn("enabled (raw) encodings", str(e.exception)) + + def test_custom_hashfunc(self): + vk = VerifyingKey.from_der(self.key_bytes, hashlib.sha256) + + self.assertIs(vk.default_hashfunc, hashlib.sha256) + + def test_from_pem_with_custom_hashfunc(self): + vk = VerifyingKey.from_pem(self.key_pem, hashlib.sha256) + + self.assertIs(vk.default_hashfunc, hashlib.sha256) + + def test_bytes(self): + vk = VerifyingKey.from_der(self.key_bytes) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytes_memoryview(self): + vk = VerifyingKey.from_der(buffer(self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytearray(self): + vk = VerifyingKey.from_der(bytearray(self.key_bytes)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_bytesarray_memoryview(self): + vk = VerifyingKey.from_der(buffer(bytearray(self.key_bytes))) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_bytes(self): + arr = array.array("B", self.key_bytes) + vk = VerifyingKey.from_der(arr) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_array_array_of_bytes_memoryview(self): + arr = array.array("B", self.key_bytes) + vk = VerifyingKey.from_der(buffer(arr)) + + self.assertEqual(self.vk.to_string(), vk.to_string()) + + def test_equality_on_verifying_keys(self): + self.assertTrue(self.vk == self.sk.get_verifying_key()) + + def test_inequality_on_verifying_keys(self): + self.assertFalse(self.vk == self.vk2) + + def test_inequality_on_verifying_keys_not_implemented(self): + self.assertFalse(self.vk == None) + + def test_VerifyingKey_inequality_on_same_curve(self): + self.assertNotEqual(self.vk, self.sk2.verifying_key) + + def test_SigningKey_inequality_on_same_curve(self): + self.assertNotEqual(self.sk, self.sk2) + + def test_inequality_on_wrong_types(self): + self.assertFalse(self.vk == self.sk) + + def test_from_public_point_old(self): + pj = self.vk.pubkey.point + point = Point(pj.curve(), pj.x(), pj.y()) + + vk = VerifyingKey.from_public_point(point, self.vk.curve) + + self.assertTrue(vk == self.vk) + + def test_ed25519_VerifyingKey_repr__(self): + sk = SigningKey.from_string(Ed25519.generator.to_bytes(), Ed25519) + string = repr(sk.verifying_key) + + self.assertEqual( + "VerifyingKey.from_string(" + "bytearray(b'K\\x0c\\xfbZH\\x8e\\x8c\\x8c\\x07\\xee\\xda\\xfb" + "\\xe1\\x97\\xcd\\x90\\x18\\x02\\x15h]\\xfe\\xbe\\xcbB\\xba\\xe6r" + "\\x10\\xae\\xf1P'), Ed25519, None)", + string, + ) + + def test_edwards_from_public_point(self): + point = Ed25519.generator + with self.assertRaises(ValueError) as e: + VerifyingKey.from_public_point(point, Ed25519) + + self.assertIn("incompatible with Edwards", str(e.exception)) + + def test_edwards_precompute_no_side_effect(self): + sk = SigningKey.from_string(Ed25519.generator.to_bytes(), Ed25519) + vk = sk.verifying_key + vk2 = VerifyingKey.from_string(vk.to_string(), Ed25519) + vk.precompute() + + self.assertEqual(vk, vk2) + + def test_parse_malfomed_eddsa_der_pubkey(self): + der_str = encode_sequence( + encode_sequence(encode_oid(*Ed25519.oid)), + encode_bitstring(bytes(Ed25519.generator.to_bytes()), 0), + encode_bitstring(b"\x00", 0), + ) + + with self.assertRaises(UnexpectedDER) as e: + VerifyingKey.from_der(der_str) + + self.assertIn("trailing junk after public key", str(e.exception)) + + def test_edwards_from_public_key_recovery(self): + with self.assertRaises(ValueError) as e: + VerifyingKey.from_public_key_recovery(b"", b"", Ed25519) + + self.assertIn("unsupported for Edwards", str(e.exception)) + + def test_edwards_from_public_key_recovery_with_digest(self): + with self.assertRaises(ValueError) as e: + VerifyingKey.from_public_key_recovery_with_digest( + b"", b"", Ed25519 + ) + + self.assertIn("unsupported for Edwards", str(e.exception)) + + def test_load_ed25519_from_pem(self): + vk_pem = ( + "-----BEGIN PUBLIC KEY-----\n" + "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(vk_pem) + + self.assertIsInstance(vk.curve, Curve) + self.assertIs(vk.curve, Ed25519) + + vk_str = ( + b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" + b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" + ) + + vk_2 = VerifyingKey.from_string(vk_str, Ed25519) + + self.assertEqual(vk, vk_2) + + def test_export_ed255_to_pem(self): + vk_str = ( + b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" + b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" + ) + + vk = VerifyingKey.from_string(vk_str, Ed25519) + + vk_pem = ( + b"-----BEGIN PUBLIC KEY-----\n" + b"MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" + b"-----END PUBLIC KEY-----\n" + ) + + self.assertEqual(vk_pem, vk.to_pem()) + + def test_export_ed255_to_ssh(self): + vk_str = ( + b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" + b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" + ) + + vk = VerifyingKey.from_string(vk_str, Ed25519) + + vk_ssh = b"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICMAUNDWZCIojuNViX5uQVeNrt5EJu5WJ7yF5gsvKstl\n" + + self.assertEqual(vk_ssh, vk.to_ssh()) + + def test_ed25519_export_import(self): + sk = SigningKey.generate(Ed25519) + vk = sk.verifying_key + + vk2 = VerifyingKey.from_pem(vk.to_pem()) + + self.assertEqual(vk, vk2) + + def test_ed25519_sig_verify(self): + vk_pem = ( + "-----BEGIN PUBLIC KEY-----\n" + "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(vk_pem) + + data = b"data\n" + + # signature created by OpenSSL 3.0.0 beta1 + sig = ( + b"\x64\x47\xab\x6a\x33\xcd\x79\x45\xad\x98\x11\x6c\xb9\xf2\x20\xeb" + b"\x90\xd6\x50\xe3\xc7\x8f\x9f\x60\x10\xec\x75\xe0\x2f\x27\xd3\x96" + b"\xda\xe8\x58\x7f\xe0\xfe\x46\x5c\x81\xef\x50\xec\x29\x9f\xae\xd5" + b"\xad\x46\x3c\x91\x68\x83\x4d\xea\x8d\xa8\x19\x04\x04\x79\x03\x0b" + ) + + self.assertTrue(vk.verify(sig, data)) + + def test_ed25519_sig_verify_malformed(self): + vk_pem = ( + "-----BEGIN PUBLIC KEY-----\n" + "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(vk_pem) + + data = b"data\n" + + # modified signature from test_ed25519_sig_verify + sig = ( + b"\xAA\x47\xab\x6a\x33\xcd\x79\x45\xad\x98\x11\x6c\xb9\xf2\x20\xeb" + b"\x90\xd6\x50\xe3\xc7\x8f\x9f\x60\x10\xec\x75\xe0\x2f\x27\xd3\x96" + b"\xda\xe8\x58\x7f\xe0\xfe\x46\x5c\x81\xef\x50\xec\x29\x9f\xae\xd5" + b"\xad\x46\x3c\x91\x68\x83\x4d\xea\x8d\xa8\x19\x04\x04\x79\x03\x0b" + ) + + with self.assertRaises(BadSignatureError): + vk.verify(sig, data) + + def test_ed448_from_pem(self): + pem_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" + "dTdYD2ll94g58MhSnBiBQB9A1MMA\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(pem_str) + + self.assertIsInstance(vk.curve, Curve) + self.assertIs(vk.curve, Ed448) + + vk_str = ( + b"\x79\x0b\x5e\xb5\x2b\xbb\x08\xc1\x33\x13\xe5\xd6\x07\x5d\x01\x83" + b"\x8e\xcb\x08\x0d\x20\x88\xd8\xa4\x3b\x11\xf3\x76\x9f\xad\x67\xf7" + b"\x8a\xfc\x49\xf4\x75\x37\x58\x0f\x69\x65\xf7\x88\x39\xf0\xc8\x52" + b"\x9c\x18\x81\x40\x1f\x40\xd4\xc3\x00" + ) + + vk2 = VerifyingKey.from_string(vk_str, Ed448) + + self.assertEqual(vk, vk2) + + def test_ed448_to_pem(self): + vk_str = ( + b"\x79\x0b\x5e\xb5\x2b\xbb\x08\xc1\x33\x13\xe5\xd6\x07\x5d\x01\x83" + b"\x8e\xcb\x08\x0d\x20\x88\xd8\xa4\x3b\x11\xf3\x76\x9f\xad\x67\xf7" + b"\x8a\xfc\x49\xf4\x75\x37\x58\x0f\x69\x65\xf7\x88\x39\xf0\xc8\x52" + b"\x9c\x18\x81\x40\x1f\x40\xd4\xc3\x00" + ) + vk = VerifyingKey.from_string(vk_str, Ed448) + + vk_pem = ( + b"-----BEGIN PUBLIC KEY-----\n" + b"MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0dTdYD2ll94g5\n" + b"8MhSnBiBQB9A1MMA\n" + b"-----END PUBLIC KEY-----\n" + ) + + self.assertEqual(vk_pem, vk.to_pem()) + + def test_ed448_export_import(self): + sk = SigningKey.generate(Ed448) + vk = sk.verifying_key + + vk2 = VerifyingKey.from_pem(vk.to_pem()) + + self.assertEqual(vk, vk2) + + def test_ed448_sig_verify(self): + pem_str = ( + "-----BEGIN PUBLIC KEY-----\n" + "MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" + "dTdYD2ll94g58MhSnBiBQB9A1MMA\n" + "-----END PUBLIC KEY-----\n" + ) + + vk = VerifyingKey.from_pem(pem_str) + + data = b"data\n" + + # signature created by OpenSSL 3.0.0 beta1 + sig = ( + b"\x68\xed\x2c\x70\x35\x22\xca\x1c\x35\x03\xf3\xaa\x51\x33\x3d\x00" + b"\xc0\xae\xb0\x54\xc5\xdc\x7f\x6f\x30\x57\xb4\x1d\xcb\xe9\xec\xfa" + b"\xc8\x45\x3e\x51\xc1\xcb\x60\x02\x6a\xd0\x43\x11\x0b\x5f\x9b\xfa" + b"\x32\x88\xb2\x38\x6b\xed\xac\x09\x00\x78\xb1\x7b\x5d\x7e\xf8\x16" + b"\x31\xdd\x1b\x3f\x98\xa0\xce\x19\xe7\xd8\x1c\x9f\x30\xac\x2f\xd4" + b"\x1e\x55\xbf\x21\x98\xf6\x4c\x8c\xbe\x81\xa5\x2d\x80\x4c\x62\x53" + b"\x91\xd5\xee\x03\x30\xc6\x17\x66\x4b\x9e\x0c\x8d\x40\xd0\xad\xae" + b"\x0a\x00" + ) + + self.assertTrue(vk.verify(sig, data)) + + +class TestSigningKey(unittest.TestCase): + """ + Verify that ecdsa.keys.SigningKey.from_der() can be used with + bytes-like objects. + """ + + @classmethod + def setUpClass(cls): + prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" + ) + cls.sk1 = SigningKey.from_pem(prv_key_str) + + prv_key_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MG8CAQAwEwYHKoZIzj0CAQYIKoZIzj0DAQEEVTBTAgEBBBheyEIL1u+SUqlC6YkE\n" + "PKKfVh+lJXcOscWhNAMyAAS4gXfQhO8X9eRWOUCAKDYPn1m0pNcmTmLaBlHc5Ho1\n" + "pMW0XPUVk0I6i1V7nCCZ82w=\n" + "-----END PRIVATE KEY-----\n" + ) + cls.sk1_pkcs8 = SigningKey.from_pem(prv_key_str) + + prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MHcCAQEEIKlL2EAm5NPPZuXwxRf4nXMk0A80y6UUbiQ17be/qFhRoAoGCCqGSM49\n" + "AwEHoUQDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8Poqzgjau4kfxBPyZimeRfuY/\n" + "9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n" + "-----END EC PRIVATE KEY-----\n" + ) + cls.sk2 = SigningKey.from_pem(prv_key_str) + + def test_to_der_pkcs8(self): + self.assertEqual( + self.sk1.to_der(format="pkcs8"), + b"0o\x02\x01\x010\x13\x06\x07*\x86H\xce=\x02\x01\x06\x08*\x86H" + b"\xce=\x03\x01\x01\x04U0S\x02\x01\x01\x04\x18^\xc8B\x0b\xd6\xef" + b"\x92R\xa9B\xe9\x89\x04<\xa2\x9fV\x1f\xa5%w\x0e\xb1\xc5\xa14\x03" + b"2\x00\x04\xb8\x81w\xd0\x84\xef\x17\xf5\xe4V9@\x80(6\x0f\x9fY" + b"\xb4\xa4\xd7&Nb\xda\x06Q\xdc\xe4z5\xa4\xc5\xb4\\\xf5\x15\x93B:" + b"\x8bU{\x9c \x99\xf3l", + ) + + def test_decoding_explicit_curve_parameters(self): + prv_key_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" + "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" + "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" + "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" + "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" + "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" + "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" + "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" + "-----END PRIVATE KEY-----\n" + ) + + sk = SigningKey.from_pem(prv_key_str) + + sk2 = SigningKey.from_string( + b"\x21\x7b\x51\x11\xf5\x26\x47\x5e\xab\x65\xb9\xaf\x0c\x60\xf6" + b"\x94\x01\x05\x3d\x96\xb6\x0c\xb3\xf2\xcf\x47\x33\x4a\x36\xb9" + b"\xf3\x20", + curve=NIST256p, + ) + + self.assertEqual(sk, sk2) + + def test_decoding_explicit_curve_parameters_with_explicit_disabled(self): + prv_key_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" + "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" + "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" + "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" + "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" + "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" + "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" + "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" + "-----END PRIVATE KEY-----\n" + ) + + with self.assertRaises(UnexpectedDER): + SigningKey.from_pem( + prv_key_str, valid_curve_encodings=["named_curve"] + ) + + def test_equality_on_signing_keys(self): + sk = SigningKey.from_secret_exponent( + self.sk1.privkey.secret_multiplier, self.sk1.curve + ) + self.assertEqual(self.sk1, sk) + self.assertEqual(self.sk1_pkcs8, sk) + + def test_verify_with_empty_message(self): + sig = self.sk1.sign(b"") + + self.assertTrue(sig) + + vk = self.sk1.verifying_key + + self.assertTrue(vk.verify(sig, b"")) + + def test_verify_with_precompute(self): + sig = self.sk1.sign(b"message") + + vk = self.sk1.verifying_key + + vk.precompute() + + self.assertTrue(vk.verify(sig, b"message")) + + def test_compare_verifying_key_with_precompute(self): + vk1 = self.sk1.verifying_key + vk1.precompute() + + vk2 = self.sk1_pkcs8.verifying_key + + self.assertEqual(vk1, vk2) + + def test_verify_with_lazy_precompute(self): + sig = self.sk2.sign(b"other message") + + vk = self.sk2.verifying_key + + vk.precompute(lazy=True) + + self.assertTrue(vk.verify(sig, b"other message")) + + def test_inequality_on_signing_keys(self): + self.assertNotEqual(self.sk1, self.sk2) + + def test_inequality_on_signing_keys_not_implemented(self): + self.assertNotEqual(self.sk1, None) + + def test_ed25519_from_pem(self): + pem_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" + "-----END PRIVATE KEY-----\n" + ) + + sk = SigningKey.from_pem(pem_str) + + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + + self.assertEqual(sk, sk_str) + + def test_ed25519_from_der_bad_alg_id_params(self): + der_str = encode_sequence( + encode_integer(1), + encode_sequence(encode_oid(*Ed25519.oid), encode_integer(1)), + encode_octet_string(encode_octet_string(b"A" * 32)), + ) + + with self.assertRaises(UnexpectedDER) as e: + SigningKey.from_der(der_str) + + self.assertIn("Non NULL parameters", str(e.exception)) + + def test_ed25519_from_der_junk_after_priv_key(self): + der_str = encode_sequence( + encode_integer(1), + encode_sequence( + encode_oid(*Ed25519.oid), + ), + encode_octet_string(encode_octet_string(b"A" * 32) + b"B"), + ) + + with self.assertRaises(UnexpectedDER) as e: + SigningKey.from_der(der_str) + + self.assertIn( + "trailing junk after the encoded private key", str(e.exception) + ) + + def test_ed25519_sign(self): + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + + msg = b"message" + + sig = sk_str.sign(msg, sigencode=sigencode_der) + + self.assertEqual( + sig, + b"\xe1,v\xc9>%\xda\xd2~>\xc3&\na\xf4@|\x9e`X\x11\x13@<\x987\xd4" + b"\r\xb1\xf5\xb3\x15\x7f%i{\xdf}\xdd\xb1\xf3\x02\x7f\x80\x02\xc2" + b'|\xe5\xd6\x06\xc4\n\xa3\xb0\xf6}\xc0\xed)"+E\xaf\x00', + ) + + def test_ed25519_sign_digest_deterministic(self): + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + with self.assertRaises(ValueError) as e: + sk_str.sign_digest_deterministic(b"a" * 20) + + self.assertIn("Method unsupported for Edwards", str(e.exception)) + + def test_ed25519_sign_digest(self): + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + with self.assertRaises(ValueError) as e: + sk_str.sign_digest(b"a" * 20) + + self.assertIn("Method unsupported for Edwards", str(e.exception)) + + def test_ed25519_sign_number(self): + sk_str = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + with self.assertRaises(ValueError) as e: + sk_str.sign_number(20) + + self.assertIn("Method unsupported for Edwards", str(e.exception)) + + def test_ed25519_to_der_ssleay(self): + pem_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" + "-----END PRIVATE KEY-----\n" + ) + + sk = SigningKey.from_pem(pem_str) + + with self.assertRaises(ValueError) as e: + sk.to_der(format="ssleay") + + self.assertIn("Only PKCS#8 format", str(e.exception)) + + def test_ed25519_to_pem(self): + sk = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + + pem_str = ( + b"-----BEGIN PRIVATE KEY-----\n" + b"MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" + b"-----END PRIVATE KEY-----\n" + ) + + self.assertEqual(sk.to_pem(format="pkcs8"), pem_str) + + def test_ed25519_to_ssh(self): + sk = SigningKey.from_string( + b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" + b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", + Ed25519, + ) + + ssh_str = ( + b"-----BEGIN OPENSSH PRIVATE KEY-----\n" + b"b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZWQyNTUx\n" + b"OQAAACAjAFDQ1mQiKI7jVYl+bkFXja7eRCbuVie8heYLLyrLZQAAAIgAAAAAAAAAAAAAAAtzc2gt\n" + b"ZWQyNTUxOQAAACAjAFDQ1mQiKI7jVYl+bkFXja7eRCbuVie8heYLLyrLZQAAAEA0usfRTtTxvE+M\n" + b"SD4PGXdM/Li+rFRmRRGa19e4Bwv11CMAUNDWZCIojuNViX5uQVeNrt5EJu5WJ7yF5gsvKstlAAAA\n" + b"AAECAwQF\n" + b"-----END OPENSSH PRIVATE KEY-----\n" + ) + + self.assertEqual(sk.to_ssh(), ssh_str) + + def test_ed25519_to_and_from_pem(self): + sk = SigningKey.generate(Ed25519) + + decoded = SigningKey.from_pem(sk.to_pem(format="pkcs8")) + + self.assertEqual(sk, decoded) + + def test_ed25519_custom_entropy(self): + sk = SigningKey.generate(Ed25519, entropy=os.urandom) + + self.assertIsNotNone(sk) + + def test_ed25519_from_secret_exponent(self): + with self.assertRaises(ValueError) as e: + SigningKey.from_secret_exponent(1234567890, curve=Ed25519) + + self.assertIn("don't support setting the secret", str(e.exception)) + + def test_ed448_from_pem(self): + pem_str = ( + "-----BEGIN PRIVATE KEY-----\n" + "MEcCAQAwBQYDK2VxBDsEOTyFuXqFLXgJlV8uDqcOw9nG4IqzLiZ/i5NfBDoHPzmP\n" + "OP0JMYaLGlTzwovmvCDJ2zLaezu9NLz9aQ==\n" + "-----END PRIVATE KEY-----\n" + ) + sk = SigningKey.from_pem(pem_str) + + sk_str = SigningKey.from_string( + b"\x3C\x85\xB9\x7A\x85\x2D\x78\x09\x95\x5F\x2E\x0E\xA7\x0E\xC3\xD9" + b"\xC6\xE0\x8A\xB3\x2E\x26\x7F\x8B\x93\x5F\x04\x3A\x07\x3F\x39\x8F" + b"\x38\xFD\x09\x31\x86\x8B\x1A\x54\xF3\xC2\x8B\xE6\xBC\x20\xC9\xDB" + b"\x32\xDA\x7B\x3B\xBD\x34\xBC\xFD\x69", + Ed448, + ) + + self.assertEqual(sk, sk_str) + + def test_ed448_to_pem(self): + sk = SigningKey.from_string( + b"\x3C\x85\xB9\x7A\x85\x2D\x78\x09\x95\x5F\x2E\x0E\xA7\x0E\xC3\xD9" + b"\xC6\xE0\x8A\xB3\x2E\x26\x7F\x8B\x93\x5F\x04\x3A\x07\x3F\x39\x8F" + b"\x38\xFD\x09\x31\x86\x8B\x1A\x54\xF3\xC2\x8B\xE6\xBC\x20\xC9\xDB" + b"\x32\xDA\x7B\x3B\xBD\x34\xBC\xFD\x69", + Ed448, + ) + pem_str = ( + b"-----BEGIN PRIVATE KEY-----\n" + b"MEcCAQAwBQYDK2VxBDsEOTyFuXqFLXgJlV8uDqcOw9nG4IqzLiZ/i5NfBDoHPzmPOP0JMYaLGlTz\n" + b"wovmvCDJ2zLaezu9NLz9aQ==\n" + b"-----END PRIVATE KEY-----\n" + ) + + self.assertEqual(sk.to_pem(format="pkcs8"), pem_str) + + def test_ed448_encode_decode(self): + sk = SigningKey.generate(Ed448) + + decoded = SigningKey.from_pem(sk.to_pem(format="pkcs8")) + + self.assertEqual(decoded, sk) + + +class TestTrivialCurve(unittest.TestCase): + @classmethod + def setUpClass(cls): + # To test what happens with r or s in signing happens to be zero we + # need to find a scalar that creates one of the points on a curve that + # has x coordinate equal to zero. + # Even for secp112r2 curve that's non trivial so use this toy + # curve, for which we can iterate over all points quickly + curve = CurveFp(163, 84, 58) + gen = PointJacobi(curve, 2, 87, 1, 167, generator=True) + + cls.toy_curve = Curve("toy_p8", curve, gen, (1, 2, 0)) + + cls.sk = SigningKey.from_secret_exponent( + 140, + cls.toy_curve, + hashfunc=hashlib.sha1, + ) + + def test_generator_sanity(self): + gen = self.toy_curve.generator + + self.assertEqual(gen * gen.order(), INFINITY) + + def test_public_key_sanity(self): + self.assertEqual(self.sk.verifying_key.to_string(), b"\x98\x1e") + + def test_deterministic_sign(self): + sig = self.sk.sign_deterministic(b"message") + + self.assertEqual(sig, b"-.") + + self.assertTrue(self.sk.verifying_key.verify(sig, b"message")) + + def test_deterministic_sign_random_message(self): + msg = os.urandom(32) + sig = self.sk.sign_deterministic(msg) + self.assertEqual(len(sig), 2) + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + + def test_deterministic_sign_that_rises_R_zero_error(self): + # the raised RSZeroError is caught and handled internally by + # sign_deterministic methods + msg = b"\x00\x4f" + sig = self.sk.sign_deterministic(msg) + self.assertEqual(sig, b"\x36\x9e") + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + + def test_deterministic_sign_that_rises_S_zero_error(self): + msg = b"\x01\x6d" + sig = self.sk.sign_deterministic(msg) + self.assertEqual(sig, b"\x49\x6c") + self.assertTrue(self.sk.verifying_key.verify(sig, msg)) + + +# test VerifyingKey.verify() +prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" +) +key_bytes = unpem(prv_key_str) +assert isinstance(key_bytes, bytes) +sk = SigningKey.from_der(key_bytes) +vk = sk.verifying_key + +data = ( + b"some string for signing" + b"contents don't really matter" + b"but do include also some crazy values: " + b"\x00\x01\t\r\n\x00\x00\x00\xff\xf0" +) +assert len(data) % 4 == 0 +sha1 = hashlib.sha1() +sha1.update(data) +data_hash = sha1.digest() +assert isinstance(data_hash, bytes) +sig_raw = sk.sign(data, sigencode=sigencode_string) +assert isinstance(sig_raw, bytes) +sig_der = sk.sign(data, sigencode=sigencode_der) +assert isinstance(sig_der, bytes) +sig_strings = sk.sign(data, sigencode=sigencode_strings) +assert isinstance(sig_strings[0], bytes) + +verifiers = [] +for modifier, fun in [ + ("bytes", lambda x: x), + ("bytes memoryview", buffer), + ("bytearray", bytearray), + ("bytearray memoryview", lambda x: buffer(bytearray(x))), + ("array.array of bytes", lambda x: array.array("B", x)), + ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), + ("array.array of ints", lambda x: array.array("I", x)), + ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))), +]: + if "ints" in modifier: + conv = lambda x: x + else: + conv = fun + for sig_format, signature, decoder, mod_apply in [ + ("raw", sig_raw, sigdecode_string, lambda x: conv(x)), + ("der", sig_der, sigdecode_der, lambda x: conv(x)), + ( + "strings", + sig_strings, + sigdecode_strings, + lambda x: tuple(conv(i) for i in x), + ), + ]: + for method_name, vrf_mthd, vrf_data in [ + ("verify", vk.verify, data), + ("verify_digest", vk.verify_digest, data_hash), + ]: + verifiers.append( + pytest.param( + signature, + decoder, + mod_apply, + fun, + vrf_mthd, + vrf_data, + id="{2}-{0}-{1}".format(modifier, sig_format, method_name), + ) + ) + + +@pytest.mark.parametrize( + "signature,decoder,mod_apply,fun,vrf_mthd,vrf_data", verifiers +) +def test_VerifyingKey_verify( + signature, decoder, mod_apply, fun, vrf_mthd, vrf_data +): + sig = mod_apply(signature) + + assert vrf_mthd(sig, fun(vrf_data), sigdecode=decoder) + + +# test SigningKey.from_string() +prv_key_bytes = ( + b"^\xc8B\x0b\xd6\xef\x92R\xa9B\xe9\x89\x04<\xa2" + b"\x9fV\x1f\xa5%w\x0e\xb1\xc5" +) +assert len(prv_key_bytes) == 24 +converters = [] +for modifier, convert in [ + ("bytes", lambda x: x), + ("bytes memoryview", buffer), + ("bytearray", bytearray), + ("bytearray memoryview", lambda x: buffer(bytearray(x))), + ("array.array of bytes", lambda x: array.array("B", x)), + ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), + ("array.array of ints", lambda x: array.array("I", x)), + ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))), +]: + converters.append(pytest.param(convert, id=modifier)) + + +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_from_string(convert): + key = convert(prv_key_bytes) + sk = SigningKey.from_string(key) + + assert sk.to_string() == prv_key_bytes + + +# test SigningKey.from_der() +prv_key_str = ( + "-----BEGIN EC PRIVATE KEY-----\n" + "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" + "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" + "bA==\n" + "-----END EC PRIVATE KEY-----\n" +) +key_bytes = unpem(prv_key_str) +assert isinstance(key_bytes, bytes) + +# last two converters are for array.array of ints, those require input +# that's multiple of 4, which no curve we support produces +@pytest.mark.parametrize("convert", converters[:-2]) +def test_SigningKey_from_der(convert): + key = convert(key_bytes) + sk = SigningKey.from_der(key) + + assert sk.to_string() == prv_key_bytes + + +# test SigningKey.sign_deterministic() +extra_entropy = b"\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11" + + +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_sign_deterministic(convert): + sig = sk.sign_deterministic( + convert(data), extra_entropy=convert(extra_entropy) + ) + + vk.verify(sig, data) + + +# test SigningKey.sign_digest_deterministic() +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_sign_digest_deterministic(convert): + sig = sk.sign_digest_deterministic( + convert(data_hash), extra_entropy=convert(extra_entropy) + ) + + vk.verify(sig, data) + + +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_sign(convert): + sig = sk.sign(convert(data)) + + vk.verify(sig, data) + + +@pytest.mark.parametrize("convert", converters) +def test_SigningKey_sign_digest(convert): + sig = sk.sign_digest(convert(data_hash)) + + vk.verify(sig, data) + + +def test_SigningKey_with_unlikely_value(): + sk = SigningKey.from_secret_exponent(NIST256p.order - 1, curve=NIST256p) + vk = sk.verifying_key + sig = sk.sign(b"hello") + assert vk.verify(sig, b"hello") + + +def test_SigningKey_with_custom_curve_old_point(): + generator = generator_brainpoolp160r1 + generator = Point( + generator.curve(), + generator.x(), + generator.y(), + generator.order(), + ) + + curve = Curve( + "BRAINPOOLP160r1", + generator.curve(), + generator, + (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), + ) + + sk = SigningKey.from_secret_exponent(12, curve) + + sk2 = SigningKey.from_secret_exponent(12, BRAINPOOLP160r1) + + assert sk.privkey == sk2.privkey + + +def test_VerifyingKey_inequality_with_different_curves(): + sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) + sk2 = SigningKey.from_secret_exponent(2, NIST256p) + + assert not (sk1.verifying_key == sk2.verifying_key) + + +def test_VerifyingKey_inequality_with_different_secret_points(): + sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) + sk2 = SigningKey.from_secret_exponent(3, BRAINPOOLP160r1) + + assert not (sk1.verifying_key == sk2.verifying_key) + + +def test_SigningKey_from_pem_pkcs8v2_EdDSA(): + pem = """-----BEGIN PRIVATE KEY----- + MFMCAQEwBQYDK2VwBCIEICc2F2ag1n1QP0jY+g9qWx5sDkx0s/HdNi3cSRHw+zsI + oSMDIQA+HQ2xCif8a/LMWR2m5HaCm5I2pKe/cc8OiRANMHxjKQ== + -----END PRIVATE KEY-----""" + + sk = SigningKey.from_pem(pem) + assert sk.curve == Ed25519 diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_malformed_sigs.py b/venv/lib/python3.12/site-packages/ecdsa/test_malformed_sigs.py new file mode 100644 index 0000000..e5a87c2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_malformed_sigs.py @@ -0,0 +1,378 @@ +from __future__ import with_statement, division + +import hashlib + +try: + from hashlib import algorithms_available +except ImportError: # pragma: no cover + algorithms_available = [ + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + ] +# skip algorithms broken by change to OpenSSL 3.0 and early versions +# of hashlib that list algorithms that require the legacy provider to work +# https://bugs.python.org/issue38820 +algorithms_available = [ + i + for i in algorithms_available + if i not in ("mdc2", "md2", "md4", "whirlpool", "ripemd160") +] +from functools import partial +import pytest +import sys +import hypothesis.strategies as st +from hypothesis import note, assume, given, settings, example + +from .keys import SigningKey +from .keys import BadSignatureError +from .util import sigencode_der, sigencode_string +from .util import sigdecode_der, sigdecode_string +from .curves import curves, SECP112r2, SECP128r1 +from .der import ( + encode_integer, + encode_bitstring, + encode_octet_string, + encode_oid, + encode_sequence, + encode_constructed, +) +from .ellipticcurve import CurveEdTw + + +example_data = b"some data to sign" +"""Since the data is hashed for processing, really any string will do.""" + + +hash_and_size = [ + (name, hashlib.new(name).digest_size) for name in algorithms_available +] +"""Pairs of hash names and their output sizes. +Needed for pairing with curves as we don't support hashes +bigger than order sizes of curves.""" + + +if "--fast" in sys.argv: # pragma: no cover + curves = [SECP112r2, SECP128r1] + + +keys_and_sigs = [] +"""Name of the curve+hash combination, VerifyingKey and DER signature.""" + + +# for hypothesis strategy shrinking we want smallest curves and hashes first +for curve in sorted(curves, key=lambda x: x.baselen): + for hash_alg in [ + name + for name, size in sorted(hash_and_size, key=lambda x: x[1]) + if 0 < size <= curve.baselen + ]: + sk = SigningKey.generate( + curve, hashfunc=partial(hashlib.new, hash_alg) + ) + + keys_and_sigs.append( + ( + "{0} {1}".format(curve, hash_alg), + sk.verifying_key, + sk.sign(example_data, sigencode=sigencode_der), + ) + ) + + +# first make sure that the signatures can be verified +@pytest.mark.parametrize( + "verifying_key,signature", + [pytest.param(vk, sig, id=name) for name, vk, sig in keys_and_sigs], +) +def test_signatures(verifying_key, signature): + assert verifying_key.verify( + signature, example_data, sigdecode=sigdecode_der + ) + + +@st.composite +def st_fuzzed_sig(draw, keys_and_sigs): # pragma: no cover + """ + Hypothesis strategy that generates pairs of VerifyingKey and malformed + signatures created by fuzzing of a valid signature. + """ + name, verifying_key, old_sig = draw(st.sampled_from(keys_and_sigs)) + note("Configuration: {0}".format(name)) + + sig = bytearray(old_sig) + + # decide which bytes should be removed + to_remove = draw( + st.lists(st.integers(min_value=0, max_value=len(sig) - 1), unique=True) + ) + to_remove.sort() + for i in reversed(to_remove): + del sig[i] + note("Remove bytes: {0}".format(to_remove)) + + # decide which bytes of the original signature should be changed + xors = None + if sig: # pragma: no branch + xors = draw( + st.dictionaries( + st.integers(min_value=0, max_value=len(sig) - 1), + st.integers(min_value=1, max_value=255), + ) + ) + for i, val in xors.items(): + sig[i] ^= val + note("xors: {0}".format(xors)) + + # decide where new data should be inserted + insert_pos = draw(st.integers(min_value=0, max_value=len(sig))) + # NIST521p signature is about 140 bytes long, test slightly longer + insert_data = draw(st.binary(max_size=256)) + + sig = sig[:insert_pos] + insert_data + sig[insert_pos:] + note( + "Inserted at position {0} bytes: {1!r}".format(insert_pos, insert_data) + ) + + sig = bytes(sig) + # make sure that there was performed at least one mutation on the data + assume(to_remove or xors or insert_data) + # and that the mutations didn't cancel each-other out + assume(sig != old_sig) + + return verifying_key, sig + + +params = {} +# not supported in hypothesis 2.0.0 +if sys.version_info >= (2, 7): # pragma: no branch + from hypothesis import HealthCheck + + # deadline=5s because NIST521p are slow to verify + params["deadline"] = 5000 + params["suppress_health_check"] = [ + HealthCheck.data_too_large, + HealthCheck.filter_too_much, + HealthCheck.too_slow, + ] +if "--fast" in sys.argv: # pragma: no cover + params["max_examples"] = 20 + +slow_params = dict(params) +if "--fast" in sys.argv: # pragma: no cover + slow_params["max_examples"] = 1 +else: + slow_params["max_examples"] = 10 + + +@settings(**slow_params) +@given(st_fuzzed_sig(keys_and_sigs)) +def test_fuzzed_der_signatures(args): + verifying_key, sig = args + + with pytest.raises(BadSignatureError): + verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) + + +@st.composite +def st_random_der_ecdsa_sig_value(draw): # pragma: no cover + """ + Hypothesis strategy for selecting random values and encoding them + to ECDSA-Sig-Value object:: + + ECDSA-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + """ + name, verifying_key, _ = draw(st.sampled_from(keys_and_sigs)) + note("Configuration: {0}".format(name)) + order = int(verifying_key.curve.order) + + # the encode_integer doesn't support negative numbers, would be nice + # to generate them too, but we have coverage for remove_integer() + # verifying that it doesn't accept them, so meh. + # Test all numbers around the ones that can show up (around order) + # way smaller and slightly bigger + r = draw( + st.integers(min_value=0, max_value=order << 4) + | st.integers(min_value=order >> 2, max_value=order + 1) + ) + s = draw( + st.integers(min_value=0, max_value=order << 4) + | st.integers(min_value=order >> 2, max_value=order + 1) + ) + + sig = encode_sequence(encode_integer(r), encode_integer(s)) + + return verifying_key, sig + + +@settings(**slow_params) +@given(st_random_der_ecdsa_sig_value()) +def test_random_der_ecdsa_sig_value(params): + """ + Check if random values encoded in ECDSA-Sig-Value structure are rejected + as signature. + """ + verifying_key, sig = params + + with pytest.raises(BadSignatureError): + verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) + + +def st_der_integer(*args, **kwargs): # pragma: no cover + """ + Hypothesis strategy that returns a random positive integer as DER + INTEGER. + Parameters are passed to hypothesis.strategy.integer. + """ + if "min_value" not in kwargs: # pragma: no branch + kwargs["min_value"] = 0 + return st.builds(encode_integer, st.integers(*args, **kwargs)) + + +@st.composite +def st_der_bit_string(draw, *args, **kwargs): # pragma: no cover + """ + Hypothesis strategy that returns a random DER BIT STRING. + Parameters are passed to hypothesis.strategy.binary. + """ + data = draw(st.binary(*args, **kwargs)) + if data: + unused = draw(st.integers(min_value=0, max_value=7)) + data = bytearray(data) + data[-1] &= -(2**unused) + data = bytes(data) + else: + unused = 0 + return encode_bitstring(data, unused) + + +def st_der_octet_string(*args, **kwargs): # pragma: no cover + """ + Hypothesis strategy that returns a random DER OCTET STRING object. + Parameters are passed to hypothesis.strategy.binary + """ + return st.builds(encode_octet_string, st.binary(*args, **kwargs)) + + +def st_der_null(): # pragma: no cover + """ + Hypothesis strategy that returns DER NULL object. + """ + return st.just(b"\x05\x00") + + +@st.composite +def st_der_oid(draw): # pragma: no cover + """ + Hypothesis strategy that returns DER OBJECT IDENTIFIER objects. + """ + first = draw(st.integers(min_value=0, max_value=2)) + if first < 2: + second = draw(st.integers(min_value=0, max_value=39)) + else: + second = draw(st.integers(min_value=0, max_value=2**512)) + rest = draw( + st.lists(st.integers(min_value=0, max_value=2**512), max_size=50) + ) + return encode_oid(first, second, *rest) + + +def st_der(): # pragma: no cover + """ + Hypothesis strategy that returns random DER structures. + + A valid DER structure is any primitive object, an octet encoding + of a valid DER structure, sequence of valid DER objects or a constructed + encoding of any of the above. + """ + return st.recursive( # pragma: no branch + st.just(b"") + | st_der_integer(max_value=2**4096) + | st_der_bit_string(max_size=1024**2) + | st_der_octet_string(max_size=1024**2) + | st_der_null() + | st_der_oid(), + lambda children: st.builds(encode_octet_string, st.one_of(children)) + | st.builds(lambda x: encode_bitstring(x, 0), st.one_of(children)) + | st.builds( + lambda x: encode_sequence(*x), st.lists(children, max_size=200) + ) + | st.builds( + encode_constructed, + st.integers(min_value=0, max_value=0x3F), + st.one_of(children), + ), + max_leaves=40, + ) + + +@settings(**slow_params) +@given(st.sampled_from(keys_and_sigs), st_der()) +def test_random_der_as_signature(params, der): + """Check if random DER structures are rejected as signature""" + name, verifying_key, _ = params + + with pytest.raises(BadSignatureError): + verifying_key.verify(der, example_data, sigdecode=sigdecode_der) + + +@settings(**slow_params) +@given(st.sampled_from(keys_and_sigs), st.binary(max_size=1024**2)) +@example( + keys_and_sigs[0], encode_sequence(encode_integer(0), encode_integer(0)) +) +@example( + keys_and_sigs[0], + encode_sequence(encode_integer(1), encode_integer(1)) + b"\x00", +) +@example(keys_and_sigs[0], encode_sequence(*[encode_integer(1)] * 3)) +def test_random_bytes_as_signature(params, der): + """Check if random bytes are rejected as signature""" + name, verifying_key, _ = params + + with pytest.raises(BadSignatureError): + verifying_key.verify(der, example_data, sigdecode=sigdecode_der) + + +keys_and_string_sigs = [ + ( + name, + verifying_key, + sigencode_string( + *sigdecode_der(sig, verifying_key.curve.order), + order=verifying_key.curve.order + ), + ) + for name, verifying_key, sig in keys_and_sigs + if not isinstance(verifying_key.curve.curve, CurveEdTw) +] +""" +Name of the curve+hash combination, VerifyingKey and signature as a +byte string. +""" + + +keys_and_string_sigs += [ + ( + name, + verifying_key, + sig, + ) + for name, verifying_key, sig in keys_and_sigs + if isinstance(verifying_key.curve.curve, CurveEdTw) +] + + +@settings(**slow_params) +@given(st_fuzzed_sig(keys_and_string_sigs)) +def test_fuzzed_string_signatures(params): + verifying_key, sig = params + + with pytest.raises(BadSignatureError): + verifying_key.verify(sig, example_data, sigdecode=sigdecode_string) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_numbertheory.py b/venv/lib/python3.12/site-packages/ecdsa/test_numbertheory.py new file mode 100644 index 0000000..966eca2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_numbertheory.py @@ -0,0 +1,483 @@ +import operator +from functools import reduce +import sys + +try: + import unittest2 as unittest +except ImportError: + import unittest +import hypothesis.strategies as st +import pytest +from hypothesis import given, settings, example + +try: + from hypothesis import HealthCheck + + HC_PRESENT = True +except ImportError: # pragma: no cover + HC_PRESENT = False +from .numbertheory import ( + SquareRootError, + JacobiError, + factorization, + gcd, + lcm, + jacobi, + inverse_mod, + is_prime, + next_prime, + smallprimes, + square_root_mod_prime, +) + +try: + from gmpy2 import mpz +except ImportError: + try: + from gmpy import mpz + except ImportError: + + def mpz(x): + return x + + +BIGPRIMES = ( + 999671, + 999683, + 999721, + 999727, + 999749, + 999763, + 999769, + 999773, + 999809, + 999853, + 999863, + 999883, + 999907, + 999917, + 999931, + 999953, + 999959, + 999961, + 999979, + 999983, +) + + +@pytest.mark.parametrize( + "prime, next_p", [(p, q) for p, q in zip(BIGPRIMES[:-1], BIGPRIMES[1:])] +) +def test_next_prime(prime, next_p): + assert next_prime(prime) == next_p + + +@pytest.mark.parametrize("val", [-1, 0, 1]) +def test_next_prime_with_nums_less_2(val): + assert next_prime(val) == 2 + + +@pytest.mark.slow +@pytest.mark.parametrize("prime", smallprimes) +def test_square_root_mod_prime_for_small_primes(prime): + squares = set() + for num in range(0, 1 + prime // 2): + sq = num * num % prime + squares.add(sq) + root = square_root_mod_prime(sq, prime) + # tested for real with TestNumbertheory.test_square_root_mod_prime + assert root * root % prime == sq + + for nonsquare in range(0, prime): + if nonsquare in squares: + continue + with pytest.raises(SquareRootError): + square_root_mod_prime(nonsquare, prime) + + +def test_square_root_mod_prime_for_2(): + a = square_root_mod_prime(1, 2) + assert a == 1 + + +def test_square_root_mod_prime_for_small_prime(): + root = square_root_mod_prime(98**2 % 101, 101) + assert root * root % 101 == 9 + + +def test_square_root_mod_prime_for_p_congruent_5(): + p = 13 + assert p % 8 == 5 + + root = square_root_mod_prime(3, p) + assert root * root % p == 3 + + +def test_square_root_mod_prime_for_p_congruent_5_large_d(): + p = 29 + assert p % 8 == 5 + + root = square_root_mod_prime(4, p) + assert root * root % p == 4 + + +class TestSquareRootModPrime(unittest.TestCase): + def test_power_of_2_p(self): + with self.assertRaises(JacobiError): + square_root_mod_prime(12, 32) + + def test_no_square(self): + with self.assertRaises(SquareRootError) as e: + square_root_mod_prime(12, 31) + + self.assertIn("no square root", str(e.exception)) + + def test_non_prime(self): + with self.assertRaises(SquareRootError) as e: + square_root_mod_prime(12, 33) + + self.assertIn("p is not prime", str(e.exception)) + + def test_non_prime_with_negative(self): + with self.assertRaises(SquareRootError) as e: + square_root_mod_prime(697 - 1, 697) + + self.assertIn("p is not prime", str(e.exception)) + + +@st.composite +def st_two_nums_rel_prime(draw): + # 521-bit is the biggest curve we operate on, use 1024 for a bit + # of breathing space + mod = draw(st.integers(min_value=2, max_value=2**1024)) + num = draw( + st.integers(min_value=1, max_value=mod - 1).filter( + lambda x: gcd(x, mod) == 1 + ) + ) + return num, mod + + +@st.composite +def st_primes(draw, *args, **kwargs): + if "min_value" not in kwargs: # pragma: no branch + kwargs["min_value"] = 1 + prime = draw( + st.sampled_from(smallprimes) + | st.integers(*args, **kwargs).filter(is_prime) + ) + return prime + + +@st.composite +def st_num_square_prime(draw): + prime = draw(st_primes(max_value=2**1024)) + num = draw(st.integers(min_value=0, max_value=1 + prime // 2)) + sq = num * num % prime + return sq, prime + + +@st.composite +def st_comp_with_com_fac(draw): + """ + Strategy that returns lists of numbers, all having a common factor. + """ + primes = draw( + st.lists(st_primes(max_value=2**512), min_size=1, max_size=10) + ) + # select random prime(s) that will make the common factor of composites + com_fac_primes = draw( + st.lists(st.sampled_from(primes), min_size=1, max_size=20) + ) + com_fac = reduce(operator.mul, com_fac_primes, 1) + + # select at most 20 lists (returned numbers), + # each having at most 30 primes (factors) including none (then the number + # will be 1) + comp_primes = draw( # pragma: no branch + st.integers(min_value=1, max_value=20).flatmap( + lambda n: st.lists( + st.lists(st.sampled_from(primes), max_size=30), + min_size=1, + max_size=n, + ) + ) + ) + + return [reduce(operator.mul, nums, 1) * com_fac for nums in comp_primes] + + +@st.composite +def st_comp_no_com_fac(draw): + """ + Strategy that returns lists of numbers that don't have a common factor. + """ + primes = draw( + st.lists( + st_primes(max_value=2**512), min_size=2, max_size=10, unique=True + ) + ) + # first select the primes that will create the uncommon factor + # between returned numbers + uncom_fac_primes = draw( + st.lists( + st.sampled_from(primes), + min_size=1, + max_size=len(primes) - 1, + unique=True, + ) + ) + uncom_fac = reduce(operator.mul, uncom_fac_primes, 1) + + # then build composites from leftover primes + leftover_primes = [i for i in primes if i not in uncom_fac_primes] + + assert leftover_primes + assert uncom_fac_primes + + # select at most 20 lists, each having at most 30 primes + # selected from the leftover_primes list + number_primes = draw( # pragma: no branch + st.integers(min_value=1, max_value=20).flatmap( + lambda n: st.lists( + st.lists(st.sampled_from(leftover_primes), max_size=30), + min_size=1, + max_size=n, + ) + ) + ) + + numbers = [reduce(operator.mul, nums, 1) for nums in number_primes] + + insert_at = draw(st.integers(min_value=0, max_value=len(numbers))) + numbers.insert(insert_at, uncom_fac) + return numbers + + +HYP_SETTINGS = {} +if HC_PRESENT: # pragma: no branch + HYP_SETTINGS["suppress_health_check"] = [ + HealthCheck.filter_too_much, + HealthCheck.too_slow, + ] + # the factorization() sometimes takes a long time to finish + HYP_SETTINGS["deadline"] = 5000 + +if "--fast" in sys.argv: # pragma: no cover + HYP_SETTINGS["max_examples"] = 20 + + +HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) +if "--fast" in sys.argv: # pragma: no cover + HYP_SLOW_SETTINGS["max_examples"] = 1 +else: + HYP_SLOW_SETTINGS["max_examples"] = 20 + + +class TestIsPrime(unittest.TestCase): + def test_very_small_prime(self): + assert is_prime(23) + + def test_very_small_composite(self): + assert not is_prime(22) + + def test_small_prime(self): + assert is_prime(123456791) + + def test_special_composite(self): + assert not is_prime(10261) + + def test_medium_prime_1(self): + # nextPrime[2^256] + assert is_prime(2**256 + 0x129) + + def test_medium_prime_2(self): + # nextPrime(2^256+0x129) + assert is_prime(2**256 + 0x12D) + + def test_medium_trivial_composite(self): + assert not is_prime(2**256 + 0x130) + + def test_medium_non_trivial_composite(self): + assert not is_prime(2**256 + 0x12F) + + def test_large_prime(self): + # nextPrime[2^2048] + assert is_prime(mpz(2) ** 2048 + 0x3D5) + + def test_pseudoprime_base_19(self): + assert not is_prime(1543267864443420616877677640751301) + + def test_pseudoprime_base_300(self): + # F. Arnault "Constructing Carmichael Numbers Which Are Strong + # Pseudoprimes to Several Bases". Journal of Symbolic + # Computation. 20 (2): 151-161. doi:10.1006/jsco.1995.1042. + # Section 4.4 Large Example (a pseudoprime to all bases up to + # 300) + p = int( + "29 674 495 668 685 510 550 154 174 642 905 332 730 " + "771 991 799 853 043 350 995 075 531 276 838 753 171 " + "770 199 594 238 596 428 121 188 033 664 754 218 345 " + "562 493 168 782 883".replace(" ", "") + ) + + assert is_prime(p) + for _ in range(10): + if not is_prime(p * (313 * (p - 1) + 1) * (353 * (p - 1) + 1)): + break + else: + assert False, "composite not detected" + + +class TestNumbertheory(unittest.TestCase): + def test_gcd(self): + assert gcd(3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13) == 3 * 5 + assert gcd([3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13]) == 3 * 5 + assert gcd(3) == 3 + + @unittest.skipUnless( + HC_PRESENT, + "Hypothesis 2.0.0 can't be made tolerant of hard to " + "meet requirements (like `is_prime()`), the test " + "case times-out on it", + ) + @settings(**HYP_SLOW_SETTINGS) + @example([877 * 1151, 877 * 1009]) + @given(st_comp_with_com_fac()) + def test_gcd_with_com_factor(self, numbers): + n = gcd(numbers) + assert 1 in numbers or n != 1 + for i in numbers: + assert i % n == 0 + + @unittest.skipUnless( + HC_PRESENT, + "Hypothesis 2.0.0 can't be made tolerant of hard to " + "meet requirements (like `is_prime()`), the test " + "case times-out on it", + ) + @settings(**HYP_SLOW_SETTINGS) + @example([1151, 1069, 1009]) + @given(st_comp_no_com_fac()) + def test_gcd_with_uncom_factor(self, numbers): + n = gcd(numbers) + assert n == 1 + + @settings(**HYP_SLOW_SETTINGS) + @given( + st.lists( + st.integers(min_value=1, max_value=2**8192), + min_size=1, + max_size=20, + ) + ) + def test_gcd_with_random_numbers(self, numbers): + n = gcd(numbers) + for i in numbers: + # check that at least it's a divider + assert i % n == 0 + + def test_lcm(self): + assert lcm(3, 5 * 3, 7 * 3) == 3 * 5 * 7 + assert lcm([3, 5 * 3, 7 * 3]) == 3 * 5 * 7 + assert lcm(3) == 3 + + @settings(**HYP_SLOW_SETTINGS) + @given( + st.lists( + st.integers(min_value=1, max_value=2**8192), + min_size=1, + max_size=20, + ) + ) + def test_lcm_with_random_numbers(self, numbers): + n = lcm(numbers) + for i in numbers: + assert n % i == 0 + + @unittest.skipUnless( + HC_PRESENT, + "Hypothesis 2.0.0 can't be made tolerant of hard to " + "meet requirements (like `is_prime()`), the test " + "case times-out on it", + ) + @settings(**HYP_SLOW_SETTINGS) + @given(st_num_square_prime()) + def test_square_root_mod_prime(self, vals): + square, prime = vals + + calc = square_root_mod_prime(square, prime) + assert calc * calc % prime == square + + @pytest.mark.slow + @settings(**HYP_SLOW_SETTINGS) + @given(st.integers(min_value=1, max_value=10**12)) + @example(265399 * 1526929) + @example(373297**2 * 553991) + def test_factorization(self, num): + factors = factorization(num) + mult = 1 + for i in factors: + mult *= i[0] ** i[1] + assert mult == num + + def test_factorisation_smallprimes(self): + exp = 101 * 103 + assert 101 in smallprimes + assert 103 in smallprimes + factors = factorization(exp) + mult = 1 + for i in factors: + mult *= i[0] ** i[1] + assert mult == exp + + def test_factorisation_not_smallprimes(self): + exp = 1231 * 1237 + assert 1231 not in smallprimes + assert 1237 not in smallprimes + factors = factorization(exp) + mult = 1 + for i in factors: + mult *= i[0] ** i[1] + assert mult == exp + + def test_jacobi_with_zero(self): + assert jacobi(0, 3) == 0 + + def test_jacobi_with_one(self): + assert jacobi(1, 3) == 1 + + @settings(**HYP_SLOW_SETTINGS) + @given(st.integers(min_value=3, max_value=1000).filter(lambda x: x % 2)) + def test_jacobi(self, mod): + mod = mpz(mod) + if is_prime(mod): + squares = set() + for root in range(1, mod): + root = mpz(root) + assert jacobi(root * root, mod) == 1 + squares.add(root * root % mod) + for i in range(1, mod): + if i not in squares: + i = mpz(i) + assert jacobi(i, mod) == -1 + else: + factors = factorization(mod) + for a in range(1, mod): + c = 1 + for i in factors: + c *= jacobi(a, i[0]) ** i[1] + assert c == jacobi(a, mod) + + @settings(**HYP_SLOW_SETTINGS) + @given(st_two_nums_rel_prime()) + def test_inverse_mod(self, nums): + num, mod = nums + + inv = inverse_mod(num, mod) + + assert 0 < inv < mod + assert num * inv % mod == 1 + + def test_inverse_mod_with_zero(self): + assert 0 == inverse_mod(0, 11) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_pyecdsa.py b/venv/lib/python3.12/site-packages/ecdsa/test_pyecdsa.py new file mode 100644 index 0000000..799e9b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_pyecdsa.py @@ -0,0 +1,2564 @@ +from __future__ import with_statement, division, print_function + +try: + import unittest2 as unittest +except ImportError: + import unittest +import os +import shutil +import subprocess +import pytest +import sys +from binascii import hexlify, unhexlify +import hashlib +from functools import partial + +from hypothesis import given, settings +import hypothesis.strategies as st + +from six import binary_type +from .keys import SigningKey, VerifyingKey +from .keys import BadSignatureError, MalformedPointError, BadDigestError +from . import util +from .util import ( + sigencode_der, + sigencode_strings, + sigencode_strings_canonize, + sigencode_string_canonize, + sigencode_der_canonize, +) +from .util import sigdecode_der, sigdecode_strings, sigdecode_string +from .util import number_to_string, encoded_oid_ecPublicKey, MalformedSignature +from .curves import Curve, UnknownCurveError +from .curves import ( + SECP112r1, + SECP112r2, + SECP128r1, + SECP160r1, + NIST192p, + NIST224p, + NIST256p, + NIST384p, + NIST521p, + SECP256k1, + BRAINPOOLP160r1, + BRAINPOOLP192r1, + BRAINPOOLP224r1, + BRAINPOOLP256r1, + BRAINPOOLP320r1, + BRAINPOOLP384r1, + BRAINPOOLP512r1, + BRAINPOOLP160t1, + BRAINPOOLP192t1, + BRAINPOOLP224t1, + BRAINPOOLP256t1, + BRAINPOOLP320t1, + BRAINPOOLP384t1, + BRAINPOOLP512t1, + Ed25519, + Ed448, + curves, +) +from .ecdsa import ( + curve_brainpoolp224r1, + curve_brainpoolp256r1, + curve_brainpoolp384r1, + curve_brainpoolp512r1, +) +from .ellipticcurve import Point +from . import der +from . import rfc6979 +from . import ecdsa + + +class SubprocessError(Exception): + pass + + +HYP_SETTINGS = {} + + +if "--fast" in sys.argv: # pragma: no cover + HYP_SETTINGS["max_examples"] = 2 + + +def run_openssl(cmd): + OPENSSL = "openssl" + p = subprocess.Popen( + [OPENSSL] + cmd.split(), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + stdout, ignored = p.communicate() + if p.returncode != 0: + raise SubprocessError( + "cmd '%s %s' failed: rc=%s, stdout/err was %s" + % (OPENSSL, cmd, p.returncode, stdout) + ) + return stdout.decode() + + +class ECDSA(unittest.TestCase): + def test_basic(self): + priv = SigningKey.generate() + pub = priv.get_verifying_key() + + data = b"blahblah" + sig = priv.sign(data) + + self.assertTrue(pub.verify(sig, data)) + self.assertRaises(BadSignatureError, pub.verify, sig, data + b"bad") + + pub2 = VerifyingKey.from_string(pub.to_string()) + self.assertTrue(pub2.verify(sig, data)) + + def test_deterministic(self): + data = b"blahblah" + secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16) + + priv = SigningKey.from_secret_exponent( + secexp, SECP256k1, hashlib.sha256 + ) + pub = priv.get_verifying_key() + + k = rfc6979.generate_k( + SECP256k1.generator.order(), + secexp, + hashlib.sha256, + hashlib.sha256(data).digest(), + ) + + sig1 = priv.sign(data, k=k) + self.assertTrue(pub.verify(sig1, data)) + + sig2 = priv.sign(data, k=k) + self.assertTrue(pub.verify(sig2, data)) + + sig3 = priv.sign_deterministic(data, hashlib.sha256) + self.assertTrue(pub.verify(sig3, data)) + + self.assertEqual(sig1, sig2) + self.assertEqual(sig1, sig3) + + def test_bad_usage(self): + # sk=SigningKey() is wrong + self.assertRaises(TypeError, SigningKey) + self.assertRaises(TypeError, VerifyingKey) + + def test_lengths_default(self): + default = NIST192p + priv = SigningKey.generate() + pub = priv.get_verifying_key() + self.assertEqual(len(pub.to_string()), default.verifying_key_length) + sig = priv.sign(b"data") + self.assertEqual(len(sig), default.signature_length) + + def test_serialize(self): + seed = b"secret" + curve = NIST192p + secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order) + secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order) + self.assertEqual(secexp1, secexp2) + priv1 = SigningKey.from_secret_exponent(secexp1, curve) + priv2 = SigningKey.from_secret_exponent(secexp2, curve) + self.assertEqual( + hexlify(priv1.to_string()), hexlify(priv2.to_string()) + ) + self.assertEqual(priv1.to_pem(), priv2.to_pem()) + pub1 = priv1.get_verifying_key() + pub2 = priv2.get_verifying_key() + data = b"data" + sig1 = priv1.sign(data) + sig2 = priv2.sign(data) + self.assertTrue(pub1.verify(sig1, data)) + self.assertTrue(pub2.verify(sig1, data)) + self.assertTrue(pub1.verify(sig2, data)) + self.assertTrue(pub2.verify(sig2, data)) + self.assertEqual(hexlify(pub1.to_string()), hexlify(pub2.to_string())) + + def test_nonrandom(self): + s = b"all the entropy in the entire world, compressed into one line" + + def not_much_entropy(numbytes): + return s[:numbytes] + + # we control the entropy source, these two keys should be identical: + priv1 = SigningKey.generate(entropy=not_much_entropy) + priv2 = SigningKey.generate(entropy=not_much_entropy) + self.assertEqual( + hexlify(priv1.get_verifying_key().to_string()), + hexlify(priv2.get_verifying_key().to_string()), + ) + # likewise, signatures should be identical. Obviously you'd never + # want to do this with keys you care about, because the secrecy of + # the private key depends upon using different random numbers for + # each signature + sig1 = priv1.sign(b"data", entropy=not_much_entropy) + sig2 = priv2.sign(b"data", entropy=not_much_entropy) + self.assertEqual(hexlify(sig1), hexlify(sig2)) + + def assertTruePrivkeysEqual(self, priv1, priv2): + self.assertEqual( + priv1.privkey.secret_multiplier, priv2.privkey.secret_multiplier + ) + self.assertEqual( + priv1.privkey.public_key.generator, + priv2.privkey.public_key.generator, + ) + + def test_privkey_creation(self): + s = b"all the entropy in the entire world, compressed into one line" + + def not_much_entropy(numbytes): + return s[:numbytes] + + priv1 = SigningKey.generate() + self.assertEqual(priv1.baselen, NIST192p.baselen) + + priv1 = SigningKey.generate(curve=NIST224p) + self.assertEqual(priv1.baselen, NIST224p.baselen) + + priv1 = SigningKey.generate(entropy=not_much_entropy) + self.assertEqual(priv1.baselen, NIST192p.baselen) + priv2 = SigningKey.generate(entropy=not_much_entropy) + self.assertEqual(priv2.baselen, NIST192p.baselen) + self.assertTruePrivkeysEqual(priv1, priv2) + + priv1 = SigningKey.from_secret_exponent(secexp=3) + self.assertEqual(priv1.baselen, NIST192p.baselen) + priv2 = SigningKey.from_secret_exponent(secexp=3) + self.assertTruePrivkeysEqual(priv1, priv2) + + priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p) + self.assertEqual(priv1.baselen, NIST224p.baselen) + + def test_privkey_strings(self): + priv1 = SigningKey.generate() + s1 = priv1.to_string() + self.assertEqual(type(s1), binary_type) + self.assertEqual(len(s1), NIST192p.baselen) + priv2 = SigningKey.from_string(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + s1 = priv1.to_pem() + self.assertEqual(type(s1), binary_type) + self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) + self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) + priv2 = SigningKey.from_pem(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + s1 = priv1.to_der() + self.assertEqual(type(s1), binary_type) + priv2 = SigningKey.from_der(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + priv1 = SigningKey.generate(curve=NIST256p) + s1 = priv1.to_pem() + self.assertEqual(type(s1), binary_type) + self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) + self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) + priv2 = SigningKey.from_pem(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + s1 = priv1.to_der() + self.assertEqual(type(s1), binary_type) + priv2 = SigningKey.from_der(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + def test_privkey_strings_brainpool(self): + priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) + s1 = priv1.to_pem() + self.assertEqual(type(s1), binary_type) + self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) + self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) + priv2 = SigningKey.from_pem(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + s1 = priv1.to_der() + self.assertEqual(type(s1), binary_type) + priv2 = SigningKey.from_der(s1) + self.assertTruePrivkeysEqual(priv1, priv2) + + def assertTruePubkeysEqual(self, pub1, pub2): + self.assertEqual(pub1.pubkey.point, pub2.pubkey.point) + self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator) + self.assertEqual(pub1.curve, pub2.curve) + + def test_pubkey_strings(self): + priv1 = SigningKey.generate() + pub1 = priv1.get_verifying_key() + s1 = pub1.to_string() + self.assertEqual(type(s1), binary_type) + self.assertEqual(len(s1), NIST192p.verifying_key_length) + pub2 = VerifyingKey.from_string(s1) + self.assertTruePubkeysEqual(pub1, pub2) + + priv1 = SigningKey.generate(curve=NIST256p) + pub1 = priv1.get_verifying_key() + s1 = pub1.to_string() + self.assertEqual(type(s1), binary_type) + self.assertEqual(len(s1), NIST256p.verifying_key_length) + pub2 = VerifyingKey.from_string(s1, curve=NIST256p) + self.assertTruePubkeysEqual(pub1, pub2) + + pub1_der = pub1.to_der() + self.assertEqual(type(pub1_der), binary_type) + pub2 = VerifyingKey.from_der(pub1_der) + self.assertTruePubkeysEqual(pub1, pub2) + + self.assertRaises( + der.UnexpectedDER, VerifyingKey.from_der, pub1_der + b"junk" + ) + badpub = VerifyingKey.from_der(pub1_der) + + class FakeGenerator: + def order(self): + return 123456789 + + class FakeCurveFp: + def p(self): + return int( + "6525534529039240705020950546962731340" + "4541085228058844382513856749047873406763" + ) + + badcurve = Curve( + "unknown", FakeCurveFp(), FakeGenerator(), (1, 2, 3, 4, 5, 6), None + ) + badpub.curve = badcurve + badder = badpub.to_der() + self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder) + + pem = pub1.to_pem() + self.assertEqual(type(pem), binary_type) + self.assertTrue(pem.startswith(b"-----BEGIN PUBLIC KEY-----"), pem) + self.assertTrue(pem.strip().endswith(b"-----END PUBLIC KEY-----"), pem) + pub2 = VerifyingKey.from_pem(pem) + self.assertTruePubkeysEqual(pub1, pub2) + + def test_pubkey_strings_brainpool(self): + priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) + pub1 = priv1.get_verifying_key() + s1 = pub1.to_string() + self.assertEqual(type(s1), binary_type) + self.assertEqual(len(s1), BRAINPOOLP512r1.verifying_key_length) + pub2 = VerifyingKey.from_string(s1, curve=BRAINPOOLP512r1) + self.assertTruePubkeysEqual(pub1, pub2) + + pub1_der = pub1.to_der() + self.assertEqual(type(pub1_der), binary_type) + pub2 = VerifyingKey.from_der(pub1_der) + self.assertTruePubkeysEqual(pub1, pub2) + + def test_vk_to_der_with_invalid_point_encoding(self): + sk = SigningKey.generate() + vk = sk.verifying_key + + with self.assertRaises(ValueError): + vk.to_der("raw") + + def test_sk_to_der_with_invalid_point_encoding(self): + sk = SigningKey.generate() + + with self.assertRaises(ValueError): + sk.to_der("raw") + + def test_vk_from_der_garbage_after_curve_oid(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = ( + der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + b"garbage" + ) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\x00\xff", None) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_invalid_key_type(self): + type_oid_der = der.encode_oid(*(1, 2, 3)) + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\x00\xff", None) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_garbage_after_point_string(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\x00\xff", None) + b"garbage" + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_invalid_bitstring(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\x08\xff", None) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_with_invalid_length_of_encoding(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\xff" * 64, 0) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_der(to_decode) + + def test_vk_from_der_with_raw_encoding(self): + type_oid_der = encoded_oid_ecPublicKey + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) + point_der = der.encode_bitstring(b"\xff" * 48, 0) + to_decode = der.encode_sequence(enc_type_der, point_der) + + with self.assertRaises(der.UnexpectedDER): + VerifyingKey.from_der(to_decode) + + def test_signature_strings(self): + priv1 = SigningKey.generate() + pub1 = priv1.get_verifying_key() + data = b"data" + + sig = priv1.sign(data) + self.assertEqual(type(sig), binary_type) + self.assertEqual(len(sig), NIST192p.signature_length) + self.assertTrue(pub1.verify(sig, data)) + + sig = priv1.sign(data, sigencode=sigencode_strings) + self.assertEqual(type(sig), tuple) + self.assertEqual(len(sig), 2) + self.assertEqual(type(sig[0]), binary_type) + self.assertEqual(type(sig[1]), binary_type) + self.assertEqual(len(sig[0]), NIST192p.baselen) + self.assertEqual(len(sig[1]), NIST192p.baselen) + self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings)) + + sig_der = priv1.sign(data, sigencode=sigencode_der) + self.assertEqual(type(sig_der), binary_type) + self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der)) + + def test_sigencode_string_canonize_no_change(self): + r = 12 + s = 400 + order = SECP112r1.order + + new_r, new_s = sigdecode_string( + sigencode_string_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(s, new_s) + + def test_sigencode_string_canonize(self): + r = 12 + order = SECP112r1.order + s = order - 10 + + new_r, new_s = sigdecode_string( + sigencode_string_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(order - s, new_s) + + def test_sigencode_strings_canonize_no_change(self): + r = 12 + s = 400 + order = SECP112r1.order + + new_r, new_s = sigdecode_strings( + sigencode_strings_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(s, new_s) + + def test_sigencode_strings_canonize(self): + r = 12 + order = SECP112r1.order + s = order - 10 + + new_r, new_s = sigdecode_strings( + sigencode_strings_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(order - s, new_s) + + def test_sigencode_der_canonize_no_change(self): + r = 13 + s = 200 + order = SECP112r1.order + + new_r, new_s = sigdecode_der( + sigencode_der_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(s, new_s) + + def test_sigencode_der_canonize(self): + r = 13 + order = SECP112r1.order + s = order - 14 + + new_r, new_s = sigdecode_der( + sigencode_der_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(order - s, new_s) + + def test_sigencode_der_canonize_with_close_to_half_order(self): + r = 13 + order = SECP112r1.order + s = order // 2 + 1 + + regular_encode = sigencode_der(r, s, order) + canonical_encode = sigencode_der_canonize(r, s, order) + + self.assertNotEqual(regular_encode, canonical_encode) + + new_r, new_s = sigdecode_der( + sigencode_der_canonize(r, s, order), order + ) + + self.assertEqual(r, new_r) + self.assertEqual(order - s, new_s) + + def test_sig_decode_strings_with_invalid_count(self): + with self.assertRaises(MalformedSignature): + sigdecode_strings([b"one", b"two", b"three"], 0xFF) + + def test_sig_decode_strings_with_wrong_r_len(self): + with self.assertRaises(MalformedSignature): + sigdecode_strings([b"one", b"two"], 0xFF) + + def test_sig_decode_strings_with_wrong_s_len(self): + with self.assertRaises(MalformedSignature): + sigdecode_strings([b"\xa0", b"\xb0\xff"], 0xFF) + + def test_verify_with_too_long_input(self): + sk = SigningKey.generate() + vk = sk.verifying_key + + with self.assertRaises(BadDigestError): + vk.verify_digest(None, b"\x00" * 128) + + def test_sk_from_secret_exponent_with_wrong_sec_exponent(self): + with self.assertRaises(MalformedPointError): + SigningKey.from_secret_exponent(0) + + def test_sk_from_string_with_wrong_len_string(self): + with self.assertRaises(MalformedPointError): + SigningKey.from_string(b"\x01") + + def test_sk_from_der_with_junk_after_sequence(self): + ver_der = der.encode_integer(1) + to_decode = der.encode_sequence(ver_der) + b"garbage" + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_der_with_wrong_version(self): + ver_der = der.encode_integer(0) + to_decode = der.encode_sequence(ver_der) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_der_invalid_const_tag(self): + ver_der = der.encode_integer(1) + privkey_der = der.encode_octet_string(b"\x00\xff") + curve_oid_der = der.encode_oid(*(1, 2, 3)) + const_der = der.encode_constructed(1, curve_oid_der) + to_decode = der.encode_sequence( + ver_der, privkey_der, const_der, curve_oid_der + ) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_der_garbage_after_privkey_oid(self): + ver_der = der.encode_integer(1) + privkey_der = der.encode_octet_string(b"\x00\xff") + curve_oid_der = der.encode_oid(*(1, 2, 3)) + b"garbage" + const_der = der.encode_constructed(0, curve_oid_der) + to_decode = der.encode_sequence( + ver_der, privkey_der, const_der, curve_oid_der + ) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_der_with_short_privkey(self): + ver_der = der.encode_integer(1) + privkey_der = der.encode_octet_string(b"\x00\xff") + curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + const_der = der.encode_constructed(0, curve_oid_der) + to_decode = der.encode_sequence( + ver_der, privkey_der, const_der, curve_oid_der + ) + + sk = SigningKey.from_der(to_decode) + self.assertEqual(sk.privkey.secret_multiplier, 255) + + def test_sk_from_p8_der_with_wrong_version(self): + ver_der = der.encode_integer(2) + algorithm_der = der.encode_sequence( + der.encode_oid(1, 2, 840, 10045, 2, 1), + der.encode_oid(1, 2, 840, 10045, 3, 1, 1), + ) + privkey_der = der.encode_octet_string( + der.encode_sequence( + der.encode_integer(1), der.encode_octet_string(b"\x00\xff") + ) + ) + to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_p8_der_with_wrong_algorithm(self): + ver_der = der.encode_integer(1) + algorithm_der = der.encode_sequence( + der.encode_oid(1, 2, 3), der.encode_oid(1, 2, 840, 10045, 3, 1, 1) + ) + privkey_der = der.encode_octet_string( + der.encode_sequence( + der.encode_integer(1), der.encode_octet_string(b"\x00\xff") + ) + ) + to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_p8_der_with_trailing_junk_after_algorithm(self): + ver_der = der.encode_integer(1) + algorithm_der = der.encode_sequence( + der.encode_oid(1, 2, 840, 10045, 2, 1), + der.encode_oid(1, 2, 840, 10045, 3, 1, 1), + der.encode_octet_string(b"junk"), + ) + privkey_der = der.encode_octet_string( + der.encode_sequence( + der.encode_integer(1), der.encode_octet_string(b"\x00\xff") + ) + ) + to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sk_from_p8_der_with_trailing_junk_after_key(self): + ver_der = der.encode_integer(1) + algorithm_der = der.encode_sequence( + der.encode_oid(1, 2, 840, 10045, 2, 1), + der.encode_oid(1, 2, 840, 10045, 3, 1, 1), + ) + privkey_der = der.encode_octet_string( + der.encode_sequence( + der.encode_integer(1), der.encode_octet_string(b"\x00\xff") + ) + + der.encode_integer(999) + ) + to_decode = der.encode_sequence( + ver_der, + algorithm_der, + privkey_der, + der.encode_octet_string(b"junk"), + ) + + with self.assertRaises(der.UnexpectedDER): + SigningKey.from_der(to_decode) + + def test_sign_with_too_long_hash(self): + sk = SigningKey.from_secret_exponent(12) + + with self.assertRaises(BadDigestError): + sk.sign_digest(b"\xff" * 64) + + def test_hashfunc(self): + sk = SigningKey.generate(curve=NIST256p, hashfunc=hashlib.sha256) + data = b"security level is 128 bits" + sig = sk.sign(data) + vk = VerifyingKey.from_string( + sk.get_verifying_key().to_string(), + curve=NIST256p, + hashfunc=hashlib.sha256, + ) + self.assertTrue(vk.verify(sig, data)) + + sk2 = SigningKey.generate(curve=NIST256p) + sig2 = sk2.sign(data, hashfunc=hashlib.sha256) + vk2 = VerifyingKey.from_string( + sk2.get_verifying_key().to_string(), + curve=NIST256p, + hashfunc=hashlib.sha256, + ) + self.assertTrue(vk2.verify(sig2, data)) + + vk3 = VerifyingKey.from_string( + sk.get_verifying_key().to_string(), curve=NIST256p + ) + self.assertTrue(vk3.verify(sig, data, hashfunc=hashlib.sha256)) + + def test_public_key_recovery(self): + # Create keys + curve = BRAINPOOLP160r1 + + sk = SigningKey.generate(curve=curve) + vk = sk.get_verifying_key() + + # Sign a message + data = b"blahblah" + signature = sk.sign(data) + + # Recover verifying keys + recovered_vks = VerifyingKey.from_public_key_recovery( + signature, data, curve + ) + + # Test if each pk is valid + for recovered_vk in recovered_vks: + # Test if recovered vk is valid for the data + self.assertTrue(recovered_vk.verify(signature, data)) + + # Test if properties are equal + self.assertEqual(vk.curve, recovered_vk.curve) + self.assertEqual( + vk.default_hashfunc, recovered_vk.default_hashfunc + ) + + # Test if original vk is the list of recovered keys + self.assertIn( + vk.pubkey.point, + [recovered_vk.pubkey.point for recovered_vk in recovered_vks], + ) + + def test_public_key_recovery_with_custom_hash(self): + # Create keys + curve = BRAINPOOLP160r1 + + sk = SigningKey.generate(curve=curve, hashfunc=hashlib.sha256) + vk = sk.get_verifying_key() + + # Sign a message + data = b"blahblah" + signature = sk.sign(data) + + # Recover verifying keys + recovered_vks = VerifyingKey.from_public_key_recovery( + signature, + data, + curve, + hashfunc=hashlib.sha256, + allow_truncate=True, + ) + + # Test if each pk is valid + for recovered_vk in recovered_vks: + # Test if recovered vk is valid for the data + self.assertTrue(recovered_vk.verify(signature, data)) + + # Test if properties are equal + self.assertEqual(vk.curve, recovered_vk.curve) + self.assertEqual(hashlib.sha256, recovered_vk.default_hashfunc) + + # Test if original vk is the list of recovered keys + self.assertIn( + vk.pubkey.point, + [recovered_vk.pubkey.point for recovered_vk in recovered_vks], + ) + + def test_encoding(self): + sk = SigningKey.from_secret_exponent(123456789) + vk = sk.verifying_key + + exp = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + self.assertEqual(vk.to_string(), exp) + self.assertEqual(vk.to_string("raw"), exp) + self.assertEqual(vk.to_string("uncompressed"), b"\x04" + exp) + self.assertEqual(vk.to_string("compressed"), b"\x02" + exp[:24]) + self.assertEqual(vk.to_string("hybrid"), b"\x06" + exp) + + def test_decoding(self): + sk = SigningKey.from_secret_exponent(123456789) + vk = sk.verifying_key + + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + from_raw = VerifyingKey.from_string(enc) + self.assertEqual(from_raw.pubkey.point, vk.pubkey.point) + + from_uncompressed = VerifyingKey.from_string(b"\x04" + enc) + self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) + + from_compressed = VerifyingKey.from_string(b"\x02" + enc[:24]) + self.assertEqual(from_compressed.pubkey.point, vk.pubkey.point) + + from_uncompressed = VerifyingKey.from_string(b"\x06" + enc) + self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) + + def test_uncompressed_decoding_as_only_alowed(self): + enc = ( + b"\x04" + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + vk = VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) + sk = SigningKey.from_secret_exponent(123456789) + + self.assertEqual(vk, sk.verifying_key) + + def test_raw_decoding_with_blocked_format(self): + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) + + self.assertIn("hybrid", str(exp.exception)) + + def test_decoding_with_unknown_format(self): + with self.assertRaises(ValueError) as e: + VerifyingKey.from_string(b"", valid_encodings=("raw", "foobar")) + + self.assertIn("Only uncompressed, compressed", str(e.exception)) + + def test_uncompressed_decoding_with_blocked_format(self): + enc = ( + b"\x04" + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) + + self.assertIn("Invalid X9.62 encoding", str(exp.exception)) + + def test_hybrid_decoding_with_blocked_format(self): + enc = ( + b"\x06" + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) + + self.assertIn("Invalid X9.62 encoding", str(exp.exception)) + + def test_hybrid_decoding_with_inconsistent_encoding_and_no_validation( + self, + ): + sk = SigningKey.from_secret_exponent(123456789) + vk = sk.verifying_key + + enc = vk.to_string("hybrid") + self.assertEqual(enc[:1], b"\x06") + enc = b"\x07" + enc[1:] + + b = VerifyingKey.from_string( + enc, valid_encodings=("hybrid",), validate_point=False + ) + + self.assertEqual(vk, b) + + def test_compressed_decoding_with_blocked_format(self): + enc = ( + b"\x02" + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + )[:25] + with self.assertRaises(MalformedPointError) as exp: + VerifyingKey.from_string(enc, valid_encodings=("hybrid", "raw")) + + self.assertIn("(hybrid, raw)", str(exp.exception)) + + def test_decoding_with_malformed_uncompressed(self): + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"\x02" + enc) + + def test_decoding_with_malformed_compressed(self): + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"\x01" + enc[:24]) + + def test_decoding_with_inconsistent_hybrid(self): + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"\x07" + enc) + + def test_decoding_with_inconsistent_hybrid_odd_point(self): + sk = SigningKey.from_secret_exponent(123456791) + vk = sk.verifying_key + + enc = vk.to_string("hybrid") + self.assertEqual(enc[:1], b"\x07") + enc = b"\x06" + enc[1:] + + with self.assertRaises(MalformedPointError): + b = VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) + + def test_decoding_with_point_not_on_curve(self): + enc = ( + b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" + b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" + b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" + ) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(enc[:47] + b"\x00") + + def test_decoding_with_point_at_infinity(self): + # decoding it is unsupported, as it's not necessary to encode it + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"\x00") + + def test_not_lying_on_curve(self): + enc = number_to_string(NIST192p.curve.p(), NIST192p.curve.p() + 1) + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"\x02" + enc) + + def test_from_string_with_invalid_curve_too_short_ver_key_len(self): + # both verifying_key_length and baselen are calculated internally + # by the Curve constructor, but since we depend on them verify + # that inconsistent values are detected + curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2)) + curve.verifying_key_length = 16 + curve.baselen = 32 + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"\x00" * 16, curve) + + def test_from_string_with_invalid_curve_too_long_ver_key_len(self): + # both verifying_key_length and baselen are calculated internally + # by the Curve constructor, but since we depend on them verify + # that inconsistent values are detected + curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2)) + curve.verifying_key_length = 16 + curve.baselen = 16 + + with self.assertRaises(MalformedPointError): + VerifyingKey.from_string(b"\x00" * 16, curve) + + +@pytest.mark.parametrize( + "val,even", [(i, j) for i in range(256) for j in [True, False]] +) +def test_VerifyingKey_decode_with_small_values(val, even): + enc = number_to_string(val, NIST192p.order) + + if even: + enc = b"\x02" + enc + else: + enc = b"\x03" + enc + + # small values can both be actual valid public keys and not, verify that + # only expected exceptions are raised if they are not + try: + vk = VerifyingKey.from_string(enc) + assert isinstance(vk, VerifyingKey) + except MalformedPointError: + assert True + + +params = [] +for curve in curves: + for enc in ["raw", "uncompressed", "compressed", "hybrid"]: + params.append( + pytest.param(curve, enc, id="{0}-{1}".format(curve.name, enc)) + ) + + +@pytest.mark.parametrize("curve,encoding", params) +def test_VerifyingKey_encode_decode(curve, encoding): + sk = SigningKey.generate(curve=curve) + vk = sk.verifying_key + + encoded = vk.to_string(encoding) + + from_enc = VerifyingKey.from_string(encoded, curve=curve) + + assert vk.pubkey.point == from_enc.pubkey.point + + +if "--fast" in sys.argv: # pragma: no cover + params = [NIST192p, BRAINPOOLP160r1] +else: + params = curves + + +@pytest.mark.parametrize("curve", params) +def test_lengths(curve): + priv = SigningKey.generate(curve=curve) + pub1 = priv.get_verifying_key() + pub2 = VerifyingKey.from_string(pub1.to_string(), curve) + assert pub1.to_string() == pub2.to_string() + assert len(pub1.to_string()) == curve.verifying_key_length + sig = priv.sign(b"data") + assert len(sig) == curve.signature_length + + +@pytest.mark.slow +class OpenSSL(unittest.TestCase): + # test interoperability with OpenSSL tools. Note that openssl's ECDSA + # sign/verify arguments changed between 0.9.8 and 1.0.0: the early + # versions require "-ecdsa-with-SHA1", the later versions want just + # "-SHA1" (or to leave out that argument entirely, which means the + # signature will use some default digest algorithm, probably determined + # by the key, probably always SHA1). + # + # openssl ecparam -name secp224r1 -genkey -out privkey.pem + # openssl ec -in privkey.pem -text -noout # get the priv/pub keys + # openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt + # openssl asn1parse -in data.sig -inform DER + # data.sig is 64 bytes, probably 56b plus ASN1 overhead + # openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $? + # openssl ec -in privkey.pem -pubout -out pubkey.pem + # openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der + + OPENSSL_SUPPORTED_CURVES = set( + c.split(":")[0].strip() + for c in run_openssl("ecparam -list_curves").split("\n") + ) + + def get_openssl_messagedigest_arg(self, hash_name): + v = run_openssl("version") + # e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010", + # or "OpenSSL 0.9.8o 01 Jun 2010" + vs = v.split()[1].split(".") + if vs >= ["1", "0", "0"]: # pragma: no cover + return "-{0}".format(hash_name) + else: # pragma: no cover + return "-ecdsa-with-{0}".format(hash_name) + + # sk: 1:OpenSSL->python 2:python->OpenSSL + # vk: 3:OpenSSL->python 4:python->OpenSSL + # sig: 5:OpenSSL->python 6:python->OpenSSL + + @pytest.mark.slow + @pytest.mark.skipif( + "secp112r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r1", + ) + def test_from_openssl_secp112r1(self): + return self.do_test_from_openssl(SECP112r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp112r2" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r2", + ) + def test_from_openssl_secp112r2(self): + return self.do_test_from_openssl(SECP112r2) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp128r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp128r1", + ) + def test_from_openssl_secp128r1(self): + return self.do_test_from_openssl(SECP128r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp160r1", + ) + def test_from_openssl_secp160r1(self): + return self.do_test_from_openssl(SECP160r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_from_openssl_nist192p(self): + return self.do_test_from_openssl(NIST192p) + + @pytest.mark.slow + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_from_openssl_nist192p_sha256(self): + return self.do_test_from_openssl(NIST192p, "SHA256") + + @pytest.mark.slow + @pytest.mark.skipif( + "secp224r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp224r1", + ) + def test_from_openssl_nist224p(self): + return self.do_test_from_openssl(NIST224p) + + @pytest.mark.slow + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_from_openssl_nist256p(self): + return self.do_test_from_openssl(NIST256p) + + @pytest.mark.slow + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_from_openssl_nist256p_sha384(self): + return self.do_test_from_openssl(NIST256p, "SHA384") + + @pytest.mark.slow + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_from_openssl_nist256p_sha512(self): + return self.do_test_from_openssl(NIST256p, "SHA512") + + @pytest.mark.slow + @pytest.mark.skipif( + "secp384r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp384r1", + ) + def test_from_openssl_nist384p(self): + return self.do_test_from_openssl(NIST384p) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp521r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp521r1", + ) + def test_from_openssl_nist521p(self): + return self.do_test_from_openssl(NIST521p) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp256k1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp256k1", + ) + def test_from_openssl_secp256k1(self): + return self.do_test_from_openssl(SECP256k1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP160r1", + ) + def test_from_openssl_brainpoolp160r1(self): + return self.do_test_from_openssl(BRAINPOOLP160r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP192r1", + ) + def test_from_openssl_brainpoolp192r1(self): + return self.do_test_from_openssl(BRAINPOOLP192r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP224r1", + ) + def test_from_openssl_brainpoolp224r1(self): + return self.do_test_from_openssl(BRAINPOOLP224r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP256r1", + ) + def test_from_openssl_brainpoolp256r1(self): + return self.do_test_from_openssl(BRAINPOOLP256r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP320r1", + ) + def test_from_openssl_brainpoolp320r1(self): + return self.do_test_from_openssl(BRAINPOOLP320r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP384r1", + ) + def test_from_openssl_brainpoolp384r1(self): + return self.do_test_from_openssl(BRAINPOOLP384r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP512r1", + ) + def test_from_openssl_brainpoolp512r1(self): + return self.do_test_from_openssl(BRAINPOOLP512r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP160t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP160t1", + ) + def test_from_openssl_brainpoolp160t1(self): + return self.do_test_from_openssl(BRAINPOOLP160t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP192t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP192t1", + ) + def test_from_openssl_brainpoolp192t1(self): + return self.do_test_from_openssl(BRAINPOOLP192t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP224t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP224t1", + ) + def test_from_openssl_brainpoolp224t1(self): + return self.do_test_from_openssl(BRAINPOOLP224t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP256t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP256t1", + ) + def test_from_openssl_brainpoolp256t1(self): + return self.do_test_from_openssl(BRAINPOOLP256t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP320t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP320t1", + ) + def test_from_openssl_brainpoolp320t1(self): + return self.do_test_from_openssl(BRAINPOOLP320t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP384t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP384t1", + ) + def test_from_openssl_brainpoolp384t1(self): + return self.do_test_from_openssl(BRAINPOOLP384t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP512t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP512t1", + ) + def test_from_openssl_brainpoolp512t1(self): + return self.do_test_from_openssl(BRAINPOOLP512t1) + + def do_test_from_openssl(self, curve, hash_name="SHA1"): + curvename = curve.openssl_name + assert curvename + # OpenSSL: create sk, vk, sign. + # Python: read vk(3), checksig(5), read sk(1), sign, check + mdarg = self.get_openssl_messagedigest_arg(hash_name) + if os.path.isdir("t"): # pragma: no cover + shutil.rmtree("t") + os.mkdir("t") + run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename) + run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem") + data = b"data" + with open("t/data.txt", "wb") as e: + e.write(data) + run_openssl( + "dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" + % mdarg + ) + with open("t/pubkey.pem", "rb") as e: + pubkey_pem = e.read() + vk = VerifyingKey.from_pem(pubkey_pem) # 3 + with open("t/data.sig", "rb") as e: + sig_der = e.read() + self.assertTrue( + vk.verify( + sig_der, + data, # 5 + hashfunc=partial(hashlib.new, hash_name), + sigdecode=sigdecode_der, + ) + ) + + with open("t/privkey.pem") as e: + fp = e.read() + sk = SigningKey.from_pem(fp) # 1 + sig = sk.sign(data, hashfunc=partial(hashlib.new, hash_name)) + self.assertTrue( + vk.verify(sig, data, hashfunc=partial(hashlib.new, hash_name)) + ) + + run_openssl( + "pkcs8 -topk8 -nocrypt " + "-in t/privkey.pem -outform pem -out t/privkey-p8.pem" + ) + with open("t/privkey-p8.pem", "rb") as e: + privkey_p8_pem = e.read() + sk_from_p8 = SigningKey.from_pem(privkey_p8_pem) + self.assertEqual(sk, sk_from_p8) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp112r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r1", + ) + def test_to_openssl_secp112r1(self): + self.do_test_to_openssl(SECP112r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp112r2" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp112r2", + ) + def test_to_openssl_secp112r2(self): + self.do_test_to_openssl(SECP112r2) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp128r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp128r1", + ) + def test_to_openssl_secp128r1(self): + self.do_test_to_openssl(SECP128r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp160r1", + ) + def test_to_openssl_secp160r1(self): + self.do_test_to_openssl(SECP160r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_to_openssl_nist192p(self): + self.do_test_to_openssl(NIST192p) + + @pytest.mark.slow + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_to_openssl_nist192p_sha256(self): + self.do_test_to_openssl(NIST192p, "SHA256") + + @pytest.mark.slow + @pytest.mark.skipif( + "secp224r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp224r1", + ) + def test_to_openssl_nist224p(self): + self.do_test_to_openssl(NIST224p) + + @pytest.mark.slow + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_to_openssl_nist256p(self): + self.do_test_to_openssl(NIST256p) + + @pytest.mark.slow + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_to_openssl_nist256p_sha384(self): + self.do_test_to_openssl(NIST256p, "SHA384") + + @pytest.mark.slow + @pytest.mark.skipif( + "prime256v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime256v1", + ) + def test_to_openssl_nist256p_sha512(self): + self.do_test_to_openssl(NIST256p, "SHA512") + + @pytest.mark.slow + @pytest.mark.skipif( + "secp384r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp384r1", + ) + def test_to_openssl_nist384p(self): + self.do_test_to_openssl(NIST384p) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp521r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp521r1", + ) + def test_to_openssl_nist521p(self): + self.do_test_to_openssl(NIST521p) + + @pytest.mark.slow + @pytest.mark.skipif( + "secp256k1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support secp256k1", + ) + def test_to_openssl_secp256k1(self): + self.do_test_to_openssl(SECP256k1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP160r1", + ) + def test_to_openssl_brainpoolp160r1(self): + self.do_test_to_openssl(BRAINPOOLP160r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP192r1", + ) + def test_to_openssl_brainpoolp192r1(self): + self.do_test_to_openssl(BRAINPOOLP192r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP224r1", + ) + def test_to_openssl_brainpoolp224r1(self): + self.do_test_to_openssl(BRAINPOOLP224r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP256r1", + ) + def test_to_openssl_brainpoolp256r1(self): + self.do_test_to_openssl(BRAINPOOLP256r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP320r1", + ) + def test_to_openssl_brainpoolp320r1(self): + self.do_test_to_openssl(BRAINPOOLP320r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP384r1", + ) + def test_to_openssl_brainpoolp384r1(self): + self.do_test_to_openssl(BRAINPOOLP384r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP512r1", + ) + def test_to_openssl_brainpoolp512r1(self): + self.do_test_to_openssl(BRAINPOOLP512r1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP160t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP160t1", + ) + def test_to_openssl_brainpoolp160t1(self): + self.do_test_to_openssl(BRAINPOOLP160t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP192t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP192t1", + ) + def test_to_openssl_brainpoolp192t1(self): + self.do_test_to_openssl(BRAINPOOLP192t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP224t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP224t1", + ) + def test_to_openssl_brainpoolp224t1(self): + self.do_test_to_openssl(BRAINPOOLP224t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP256t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP256t1", + ) + def test_to_openssl_brainpoolp256t1(self): + self.do_test_to_openssl(BRAINPOOLP256t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP320t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP320t1", + ) + def test_to_openssl_brainpoolp320t1(self): + self.do_test_to_openssl(BRAINPOOLP320t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP384t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP384t1", + ) + def test_to_openssl_brainpoolp384t1(self): + self.do_test_to_openssl(BRAINPOOLP384t1) + + @pytest.mark.slow + @pytest.mark.skipif( + "brainpoolP512t1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support brainpoolP512t1", + ) + def test_to_openssl_brainpoolp512t1(self): + self.do_test_to_openssl(BRAINPOOLP512t1) + + def do_test_to_openssl(self, curve, hash_name="SHA1"): + curvename = curve.openssl_name + assert curvename + # Python: create sk, vk, sign. + # OpenSSL: read vk(4), checksig(6), read sk(2), sign, check + mdarg = self.get_openssl_messagedigest_arg(hash_name) + if os.path.isdir("t"): # pragma: no cover + shutil.rmtree("t") + os.mkdir("t") + sk = SigningKey.generate(curve=curve) + vk = sk.get_verifying_key() + data = b"data" + with open("t/pubkey.der", "wb") as e: + e.write(vk.to_der()) # 4 + with open("t/pubkey.pem", "wb") as e: + e.write(vk.to_pem()) # 4 + sig_der = sk.sign( + data, + hashfunc=partial(hashlib.new, hash_name), + sigencode=sigencode_der, + ) + + with open("t/data.sig", "wb") as e: + e.write(sig_der) # 6 + with open("t/data.txt", "wb") as e: + e.write(data) + with open("t/baddata.txt", "wb") as e: + e.write(data + b"corrupt") + + self.assertRaises( + SubprocessError, + run_openssl, + "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" + % mdarg, + ) + run_openssl( + "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" + % mdarg + ) + + with open("t/privkey.pem", "wb") as e: + e.write(sk.to_pem()) # 2 + run_openssl( + "dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" + % mdarg + ) + + with open("t/privkey-explicit.pem", "wb") as e: + e.write(sk.to_pem(curve_parameters_encoding="explicit")) + run_openssl( + "dgst %s -sign t/privkey-explicit.pem -out t/data.sig2 t/data.txt" + % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" + % mdarg + ) + + with open("t/privkey-p8.pem", "wb") as e: + e.write(sk.to_pem(format="pkcs8")) + run_openssl( + "dgst %s -sign t/privkey-p8.pem -out t/data.sig3 t/data.txt" + % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" + % mdarg + ) + + with open("t/privkey-p8-explicit.pem", "wb") as e: + e.write( + sk.to_pem(format="pkcs8", curve_parameters_encoding="explicit") + ) + run_openssl( + "dgst %s -sign t/privkey-p8-explicit.pem -out t/data.sig3 t/data.txt" + % mdarg + ) + run_openssl( + "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" + % mdarg + ) + + OPENSSL_SUPPORTED_TYPES = set() + try: + if "-rawin" in run_openssl("pkeyutl -help"): + OPENSSL_SUPPORTED_TYPES = set( # pragma: no branch + c.lower() + for c in ("ED25519", "ED448") + if c in run_openssl("list -public-key-methods") + ) + except SubprocessError: # pragma: no cover + pass + + def do_eddsa_test_to_openssl(self, curve): + if os.path.isdir("t"): + shutil.rmtree("t") + os.mkdir("t") + + sk = SigningKey.generate(curve=curve) + vk = sk.get_verifying_key() + + data = b"data" + with open("t/pubkey.der", "wb") as e: + e.write(vk.to_der()) + with open("t/pubkey.pem", "wb") as e: + e.write(vk.to_pem()) + + sig = sk.sign(data) + + with open("t/data.sig", "wb") as e: + e.write(sig) + with open("t/data.txt", "wb") as e: + e.write(data) + with open("t/baddata.txt", "wb") as e: + e.write(data + b"corrupt") + + with self.assertRaises(SubprocessError): + run_openssl( + "pkeyutl -verify -pubin -inkey t/pubkey.pem -rawin " + "-in t/baddata.txt -sigfile t/data.sig" + ) + run_openssl( + "pkeyutl -verify -pubin -inkey t/pubkey.pem -rawin " + "-in t/data.txt -sigfile t/data.sig" + ) + + shutil.rmtree("t") + + # in practice at least OpenSSL 3.0.0 is needed to make EdDSA signatures + # earlier versions support EdDSA only in X.509 certificates + @pytest.mark.slow + @pytest.mark.skipif( + "ed25519" not in OPENSSL_SUPPORTED_TYPES, + reason="system openssl does not support signing with Ed25519", + ) + def test_to_openssl_ed25519(self): + return self.do_eddsa_test_to_openssl(Ed25519) + + @pytest.mark.slow + @pytest.mark.skipif( + "ed448" not in OPENSSL_SUPPORTED_TYPES, + reason="system openssl does not support signing with Ed448", + ) + def test_to_openssl_ed448(self): + return self.do_eddsa_test_to_openssl(Ed448) + + def do_eddsa_test_from_openssl(self, curve): + curvename = curve.name + + if os.path.isdir("t"): + shutil.rmtree("t") + os.mkdir("t") + + data = b"data" + + run_openssl( + "genpkey -algorithm {0} -outform PEM -out t/privkey.pem".format( + curvename + ) + ) + run_openssl( + "pkey -outform PEM -pubout -in t/privkey.pem -out t/pubkey.pem" + ) + + with open("t/data.txt", "wb") as e: + e.write(data) + run_openssl( + "pkeyutl -sign -inkey t/privkey.pem " + "-rawin -in t/data.txt -out t/data.sig" + ) + + with open("t/data.sig", "rb") as e: + sig = e.read() + with open("t/pubkey.pem", "rb") as e: + vk = VerifyingKey.from_pem(e.read()) + + self.assertIs(vk.curve, curve) + + vk.verify(sig, data) + + shutil.rmtree("t") + + @pytest.mark.slow + @pytest.mark.skipif( + "ed25519" not in OPENSSL_SUPPORTED_TYPES, + reason="system openssl does not support signing with Ed25519", + ) + def test_from_openssl_ed25519(self): + return self.do_eddsa_test_from_openssl(Ed25519) + + @pytest.mark.slow + @pytest.mark.skipif( + "ed448" not in OPENSSL_SUPPORTED_TYPES, + reason="system openssl does not support signing with Ed448", + ) + def test_from_openssl_ed448(self): + return self.do_eddsa_test_from_openssl(Ed448) + + +class TooSmallCurve(unittest.TestCase): + OPENSSL_SUPPORTED_CURVES = set( + c.split(":")[0].strip() + for c in run_openssl("ecparam -list_curves").split("\n") + ) + + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_sign_too_small_curve_dont_allow_truncate_raises(self): + sk = SigningKey.generate(curve=NIST192p) + data = b"data" + with self.assertRaises(BadDigestError): + sk.sign( + data, + hashfunc=partial(hashlib.new, "SHA256"), + sigencode=sigencode_der, + allow_truncate=False, + ) + + @pytest.mark.skipif( + "prime192v1" not in OPENSSL_SUPPORTED_CURVES, + reason="system openssl does not support prime192v1", + ) + def test_verify_too_small_curve_dont_allow_truncate_raises(self): + sk = SigningKey.generate(curve=NIST192p) + vk = sk.get_verifying_key() + data = b"data" + sig_der = sk.sign( + data, + hashfunc=partial(hashlib.new, "SHA256"), + sigencode=sigencode_der, + allow_truncate=True, + ) + with self.assertRaises(BadDigestError): + vk.verify( + sig_der, + data, + hashfunc=partial(hashlib.new, "SHA256"), + sigdecode=sigdecode_der, + allow_truncate=False, + ) + + +class DER(unittest.TestCase): + def test_integer(self): + self.assertEqual(der.encode_integer(0), b"\x02\x01\x00") + self.assertEqual(der.encode_integer(1), b"\x02\x01\x01") + self.assertEqual(der.encode_integer(127), b"\x02\x01\x7f") + self.assertEqual(der.encode_integer(128), b"\x02\x02\x00\x80") + self.assertEqual(der.encode_integer(256), b"\x02\x02\x01\x00") + # self.assertEqual(der.encode_integer(-1), b"\x02\x01\xff") + + def s(n): + return der.remove_integer(der.encode_integer(n) + b"junk") + + self.assertEqual(s(0), (0, b"junk")) + self.assertEqual(s(1), (1, b"junk")) + self.assertEqual(s(127), (127, b"junk")) + self.assertEqual(s(128), (128, b"junk")) + self.assertEqual(s(256), (256, b"junk")) + self.assertEqual( + s(1234567890123456789012345678901234567890), + (1234567890123456789012345678901234567890, b"junk"), + ) + + def test_number(self): + self.assertEqual(der.encode_number(0), b"\x00") + self.assertEqual(der.encode_number(127), b"\x7f") + self.assertEqual(der.encode_number(128), b"\x81\x00") + self.assertEqual(der.encode_number(3 * 128 + 7), b"\x83\x07") + # self.assertEqual(der.read_number("\x81\x9b" + "more"), (155, 2)) + # self.assertEqual(der.encode_number(155), b"\x81\x9b") + for n in (0, 1, 2, 127, 128, 3 * 128 + 7, 840, 10045): # , 155): + x = der.encode_number(n) + b"more" + n1, llen = der.read_number(x) + self.assertEqual(n1, n) + self.assertEqual(x[llen:], b"more") + + def test_length(self): + self.assertEqual(der.encode_length(0), b"\x00") + self.assertEqual(der.encode_length(127), b"\x7f") + self.assertEqual(der.encode_length(128), b"\x81\x80") + self.assertEqual(der.encode_length(255), b"\x81\xff") + self.assertEqual(der.encode_length(256), b"\x82\x01\x00") + self.assertEqual(der.encode_length(3 * 256 + 7), b"\x82\x03\x07") + self.assertEqual(der.read_length(b"\x81\x9b" + b"more"), (155, 2)) + self.assertEqual(der.encode_length(155), b"\x81\x9b") + for n in (0, 1, 2, 127, 128, 255, 256, 3 * 256 + 7, 155): + x = der.encode_length(n) + b"more" + n1, llen = der.read_length(x) + self.assertEqual(n1, n) + self.assertEqual(x[llen:], b"more") + + def test_sequence(self): + x = der.encode_sequence(b"ABC", b"DEF") + b"GHI" + self.assertEqual(x, b"\x30\x06ABCDEFGHI") + x1, rest = der.remove_sequence(x) + self.assertEqual(x1, b"ABCDEF") + self.assertEqual(rest, b"GHI") + + def test_constructed(self): + x = der.encode_constructed(0, NIST224p.encoded_oid) + self.assertEqual(hexlify(x), b"a007" + b"06052b81040021") + x = der.encode_constructed(1, unhexlify(b"0102030a0b0c")) + self.assertEqual(hexlify(x), b"a106" + b"0102030a0b0c") + + +class Util(unittest.TestCase): + @pytest.mark.slow + def test_trytryagain(self): + tta = util.randrange_from_seed__trytryagain + for i in range(1000): + seed = "seed-%d" % i + for order in ( + 2**8 - 2, + 2**8 - 1, + 2**8, + 2**8 + 1, + 2**8 + 2, + 2**16 - 1, + 2**16 + 1, + ): + n = tta(seed, order) + self.assertTrue(1 <= n < order, (1, n, order)) + # this trytryagain *does* provide long-term stability + self.assertEqual( + ("%x" % (tta("seed", NIST224p.order))).encode(), + b"6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc", + ) + + def test_trytryagain_single(self): + tta = util.randrange_from_seed__trytryagain + order = 2**8 - 2 + seed = b"text" + n = tta(seed, order) + # known issue: https://github.com/warner/python-ecdsa/issues/221 + if sys.version_info < (3, 0): # pragma: no branch + self.assertEqual(n, 228) + else: # pragma: no branch + self.assertEqual(n, 18) + + @settings(**HYP_SETTINGS) + @given(st.integers(min_value=0, max_value=10**200)) + def test_randrange(self, i): + # util.randrange does not provide long-term stability: we might + # change the algorithm in the future. + entropy = util.PRNG("seed-%d" % i) + for order in ( + 2**8 - 2, + 2**8 - 1, + 2**8, + 2**16 - 1, + 2**16 + 1, + ): + # that oddball 2**16+1 takes half our runtime + n = util.randrange(order, entropy=entropy) + self.assertTrue(1 <= n < order, (1, n, order)) + + def OFF_test_prove_uniformity(self): # pragma: no cover + order = 2**8 - 2 + counts = dict([(i, 0) for i in range(1, order)]) + assert 0 not in counts + assert order not in counts + for i in range(1000000): + seed = "seed-%d" % i + n = util.randrange_from_seed__trytryagain(seed, order) + counts[n] += 1 + # this technique should use the full range + self.assertTrue(counts[order - 1]) + for i in range(1, order): + print("%3d: %s" % (i, "*" * (counts[i] // 100))) + + +class RFC6979(unittest.TestCase): + # https://tools.ietf.org/html/rfc6979#appendix-A.1 + def _do(self, generator, secexp, hsh, hash_func, expected): + actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh) + self.assertEqual(expected, actual) + + def test_SECP256k1(self): + """RFC doesn't contain test vectors for SECP256k1 used in bitcoin. + This vector has been computed by Golang reference implementation instead.""" + self._do( + generator=SECP256k1.generator, + secexp=int("9d0219792467d7d37b4d43298a7d0c05", 16), + hsh=hashlib.sha256(b"sample").digest(), + hash_func=hashlib.sha256, + expected=int( + "8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", + 16, + ), + ) + + def test_SECP256k1_2(self): + self._do( + generator=SECP256k1.generator, + secexp=int( + "cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", + 16, + ), + hsh=hashlib.sha256(b"sample").digest(), + hash_func=hashlib.sha256, + expected=int( + "2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", + 16, + ), + ) + + def test_SECP256k1_3(self): + self._do( + generator=SECP256k1.generator, + secexp=0x1, + hsh=hashlib.sha256(b"Satoshi Nakamoto").digest(), + hash_func=hashlib.sha256, + expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15, + ) + + def test_SECP256k1_4(self): + self._do( + generator=SECP256k1.generator, + secexp=0x1, + hsh=hashlib.sha256( + b"All those moments will be lost in time, like tears in rain. Time to die..." + ).digest(), + hash_func=hashlib.sha256, + expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3, + ) + + def test_SECP256k1_5(self): + self._do( + generator=SECP256k1.generator, + secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140, + hsh=hashlib.sha256(b"Satoshi Nakamoto").digest(), + hash_func=hashlib.sha256, + expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90, + ) + + def test_SECP256k1_6(self): + self._do( + generator=SECP256k1.generator, + secexp=0xF8B8AF8CE3C7CCA5E300D33939540C10D45CE001B8F252BFBC57BA0342904181, + hsh=hashlib.sha256(b"Alan Turing").digest(), + hash_func=hashlib.sha256, + expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1, + ) + + def test_1(self): + # Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979 + self._do( + generator=Point( + None, + 0, + 0, + int("4000000000000000000020108A2E0CC0D99F8A5EF", 16), + ), + secexp=int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16), + hsh=unhexlify( + b"AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF" + ), + hash_func=hashlib.sha256, + expected=int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16), + ) + + def test_2(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=hashlib.sha1(b"sample").digest(), + hash_func=hashlib.sha1, + expected=int( + "37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16 + ), + ) + + def test_3(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=hashlib.sha256(b"sample").digest(), + hash_func=hashlib.sha256, + expected=int( + "32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16 + ), + ) + + def test_4(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=hashlib.sha512(b"sample").digest(), + hash_func=hashlib.sha512, + expected=int( + "A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16 + ), + ) + + def test_5(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=hashlib.sha1(b"test").digest(), + hash_func=hashlib.sha1, + expected=int( + "D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16 + ), + ) + + def test_6(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=hashlib.sha256(b"test").digest(), + hash_func=hashlib.sha256, + expected=int( + "5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16 + ), + ) + + def test_7(self): + self._do( + generator=NIST192p.generator, + secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), + hsh=hashlib.sha512(b"test").digest(), + hash_func=hashlib.sha512, + expected=int( + "0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16 + ), + ) + + def test_8(self): + self._do( + generator=NIST521p.generator, + secexp=int( + "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", + 16, + ), + hsh=hashlib.sha1(b"sample").digest(), + hash_func=hashlib.sha1, + expected=int( + "089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", + 16, + ), + ) + + def test_9(self): + self._do( + generator=NIST521p.generator, + secexp=int( + "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", + 16, + ), + hsh=hashlib.sha256(b"sample").digest(), + hash_func=hashlib.sha256, + expected=int( + "0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", + 16, + ), + ) + + def test_10(self): + self._do( + generator=NIST521p.generator, + secexp=int( + "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", + 16, + ), + hsh=hashlib.sha512(b"test").digest(), + hash_func=hashlib.sha512, + expected=int( + "16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", + 16, + ), + ) + + +class ECDH(unittest.TestCase): + def _do(self, curve, generator, dA, x_qA, y_qA, dB, x_qB, y_qB, x_Z, y_Z): + qA = dA * generator + qB = dB * generator + Z = dA * qB + self.assertEqual(Point(curve, x_qA, y_qA), qA) + self.assertEqual(Point(curve, x_qB, y_qB), qB) + self.assertTrue( + (dA * qB) + == (dA * dB * generator) + == (dB * dA * generator) + == (dB * qA) + ) + self.assertEqual(Point(curve, x_Z, y_Z), Z) + + +class RFC6932(ECDH): + # https://tools.ietf.org/html/rfc6932#appendix-A.1 + + def test_brainpoolP224r1(self): + self._do( + curve=curve_brainpoolp224r1, + generator=BRAINPOOLP224r1.generator, + dA=int( + "7C4B7A2C8A4BAD1FBB7D79CC0955DB7C6A4660CA64CC4778159B495E", 16 + ), + x_qA=int( + "B104A67A6F6E85E14EC1825E1539E8ECDBBF584922367DD88C6BDCF2", 16 + ), + y_qA=int( + "46D782E7FDB5F60CD8404301AC5949C58EDB26BC68BA07695B750A94", 16 + ), + dB=int( + "63976D4AAE6CD0F6DD18DEFEF55D96569D0507C03E74D6486FFA28FB", 16 + ), + x_qB=int( + "2A97089A9296147B71B21A4B574E1278245B536F14D8C2B9D07A874E", 16 + ), + y_qB=int( + "9B900D7C77A709A797276B8CA1BA61BB95B546FC29F862E44D59D25B", 16 + ), + x_Z=int( + "312DFD98783F9FB77B9704945A73BEB6DCCBE3B65D0F967DCAB574EB", 16 + ), + y_Z=int( + "6F800811D64114B1C48C621AB3357CF93F496E4238696A2A012B3C98", 16 + ), + ) + + def test_brainpoolP256r1(self): + self._do( + curve=curve_brainpoolp256r1, + generator=BRAINPOOLP256r1.generator, + dA=int( + "041EB8B1E2BC681BCE8E39963B2E9FC415B05283313DD1A8BCC055F11AE" + "49699", + 16, + ), + x_qA=int( + "78028496B5ECAAB3C8B6C12E45DB1E02C9E4D26B4113BC4F015F60C5C" + "CC0D206", + 16, + ), + y_qA=int( + "A2AE1762A3831C1D20F03F8D1E3C0C39AFE6F09B4D44BBE80CD100987" + "B05F92B", + 16, + ), + dB=int( + "06F5240EACDB9837BC96D48274C8AA834B6C87BA9CC3EEDD81F99A16B8D" + "804D3", + 16, + ), + x_qB=int( + "8E07E219BA588916C5B06AA30A2F464C2F2ACFC1610A3BE2FB240B635" + "341F0DB", + 16, + ), + y_qB=int( + "148EA1D7D1E7E54B9555B6C9AC90629C18B63BEE5D7AA6949EBBF47B2" + "4FDE40D", + 16, + ), + x_Z=int( + "05E940915549E9F6A4A75693716E37466ABA79B4BF2919877A16DD2CC2" + "E23708", + 16, + ), + y_Z=int( + "6BC23B6702BC5A019438CEEA107DAAD8B94232FFBBC350F3B137628FE6" + "FD134C", + 16, + ), + ) + + @pytest.mark.slow + def test_brainpoolP384r1(self): + self._do( + curve=curve_brainpoolp384r1, + generator=BRAINPOOLP384r1.generator, + dA=int( + "014EC0755B78594BA47FB0A56F6173045B4331E74BA1A6F47322E70D79D" + "828D97E095884CA72B73FDABD5910DF0FA76A", + 16, + ), + x_qA=int( + "45CB26E4384DAF6FB776885307B9A38B7AD1B5C692E0C32F012533277" + "8F3B8D3F50CA358099B30DEB5EE69A95C058B4E", + 16, + ), + y_qA=int( + "8173A1C54AFFA7E781D0E1E1D12C0DC2B74F4DF58E4A4E3AF7026C5D3" + "2DC530A2CD89C859BB4B4B768497F49AB8CC859", + 16, + ), + dB=int( + "6B461CB79BD0EA519A87D6828815D8CE7CD9B3CAA0B5A8262CBCD550A01" + "5C90095B976F3529957506E1224A861711D54", + 16, + ), + x_qB=int( + "01BF92A92EE4BE8DED1A911125C209B03F99E3161CFCC986DC7711383" + "FC30AF9CE28CA3386D59E2C8D72CE1E7B4666E8", + 16, + ), + y_qB=int( + "3289C4A3A4FEE035E39BDB885D509D224A142FF9FBCC5CFE5CCBB3026" + "8EE47487ED8044858D31D848F7A95C635A347AC", + 16, + ), + x_Z=int( + "04CC4FF3DCCCB07AF24E0ACC529955B36D7C807772B92FCBE48F3AFE9A" + "2F370A1F98D3FA73FD0C0747C632E12F1423EC", + 16, + ), + y_Z=int( + "7F465F90BD69AFB8F828A214EB9716D66ABC59F17AF7C75EE7F1DE22AB" + "5D05085F5A01A9382D05BF72D96698FE3FF64E", + 16, + ), + ) + + @pytest.mark.slow + def test_brainpoolP512r1(self): + self._do( + curve=curve_brainpoolp512r1, + generator=BRAINPOOLP512r1.generator, + dA=int( + "636B6BE0482A6C1C41AA7AE7B245E983392DB94CECEA2660A379CFE1595" + "59E357581825391175FC195D28BAC0CF03A7841A383B95C262B98378287" + "4CCE6FE333", + 16, + ), + x_qA=int( + "0562E68B9AF7CBFD5565C6B16883B777FF11C199161ECC427A39D17EC" + "2166499389571D6A994977C56AD8252658BA8A1B72AE42F4FB7532151" + "AFC3EF0971CCDA", + 16, + ), + y_qA=int( + "A7CA2D8191E21776A89860AFBC1F582FAA308D551C1DC6133AF9F9C3C" + "AD59998D70079548140B90B1F311AFB378AA81F51B275B2BE6B7DEE97" + "8EFC7343EA642E", + 16, + ), + dB=int( + "0AF4E7F6D52EDD52907BB8DBAB3992A0BB696EC10DF11892FF205B66D38" + "1ECE72314E6A6EA079CEA06961DBA5AE6422EF2E9EE803A1F236FB96A17" + "99B86E5C8B", + 16, + ), + x_qB=int( + "5A7954E32663DFF11AE24712D87419F26B708AC2B92877D6BFEE2BFC4" + "3714D89BBDB6D24D807BBD3AEB7F0C325F862E8BADE4F74636B97EAAC" + "E739E11720D323", + 16, + ), + y_qB=int( + "96D14621A9283A1BED84DE8DD64836B2C0758B11441179DC0C54C0D49" + "A47C03807D171DD544B72CAAEF7B7CE01C7753E2CAD1A861ECA55A719" + "54EE1BA35E04BE", + 16, + ), + x_Z=int( + "1EE8321A4BBF93B9CF8921AB209850EC9B7066D1984EF08C2BB7232362" + "08AC8F1A483E79461A00E0D5F6921CE9D360502F85C812BEDEE23AC5B2" + "10E5811B191E", + 16, + ), + y_Z=int( + "2632095B7B936174B41FD2FAF369B1D18DCADEED7E410A7E251F083109" + "7C50D02CFED02607B6A2D5ADB4C0006008562208631875B58B54ECDA5A" + "4F9FE9EAABA6", + 16, + ), + ) + + +class RFC7027(ECDH): + # https://tools.ietf.org/html/rfc7027#appendix-A + + def test_brainpoolP256r1(self): + self._do( + curve=curve_brainpoolp256r1, + generator=BRAINPOOLP256r1.generator, + dA=int( + "81DB1EE100150FF2EA338D708271BE38300CB54241D79950F77B0630398" + "04F1D", + 16, + ), + x_qA=int( + "44106E913F92BC02A1705D9953A8414DB95E1AAA49E81D9E85F929A8E" + "3100BE5", + 16, + ), + y_qA=int( + "8AB4846F11CACCB73CE49CBDD120F5A900A69FD32C272223F789EF10E" + "B089BDC", + 16, + ), + dB=int( + "55E40BC41E37E3E2AD25C3C6654511FFA8474A91A0032087593852D3E7D" + "76BD3", + 16, + ), + x_qB=int( + "8D2D688C6CF93E1160AD04CC4429117DC2C41825E1E9FCA0ADDD34E6F" + "1B39F7B", + 16, + ), + y_qB=int( + "990C57520812BE512641E47034832106BC7D3E8DD0E4C7F1136D70065" + "47CEC6A", + 16, + ), + x_Z=int( + "89AFC39D41D3B327814B80940B042590F96556EC91E6AE7939BCE31F3A" + "18BF2B", + 16, + ), + y_Z=int( + "49C27868F4ECA2179BFD7D59B1E3BF34C1DBDE61AE12931648F43E5963" + "2504DE", + 16, + ), + ) + + @pytest.mark.slow + def test_brainpoolP384r1(self): + self._do( + curve=curve_brainpoolp384r1, + generator=BRAINPOOLP384r1.generator, + dA=int( + "1E20F5E048A5886F1F157C74E91BDE2B98C8B52D58E5003D57053FC4B0B" + "D65D6F15EB5D1EE1610DF870795143627D042", + 16, + ), + x_qA=int( + "68B665DD91C195800650CDD363C625F4E742E8134667B767B1B476793" + "588F885AB698C852D4A6E77A252D6380FCAF068", + 16, + ), + y_qA=int( + "55BC91A39C9EC01DEE36017B7D673A931236D2F1F5C83942D049E3FA2" + "0607493E0D038FF2FD30C2AB67D15C85F7FAA59", + 16, + ), + dB=int( + "032640BC6003C59260F7250C3DB58CE647F98E1260ACCE4ACDA3DD869F7" + "4E01F8BA5E0324309DB6A9831497ABAC96670", + 16, + ), + x_qB=int( + "4D44326F269A597A5B58BBA565DA5556ED7FD9A8A9EB76C25F46DB69D" + "19DC8CE6AD18E404B15738B2086DF37E71D1EB4", + 16, + ), + y_qB=int( + "62D692136DE56CBE93BF5FA3188EF58BC8A3A0EC6C1E151A21038A42E" + "9185329B5B275903D192F8D4E1F32FE9CC78C48", + 16, + ), + x_Z=int( + "0BD9D3A7EA0B3D519D09D8E48D0785FB744A6B355E6304BC51C229FBBC" + "E239BBADF6403715C35D4FB2A5444F575D4F42", + 16, + ), + y_Z=int( + "0DF213417EBE4D8E40A5F76F66C56470C489A3478D146DECF6DF0D94BA" + "E9E598157290F8756066975F1DB34B2324B7BD", + 16, + ), + ) + + @pytest.mark.slow + def test_brainpoolP512r1(self): + self._do( + curve=curve_brainpoolp512r1, + generator=BRAINPOOLP512r1.generator, + dA=int( + "16302FF0DBBB5A8D733DAB7141C1B45ACBC8715939677F6A56850A38BD8" + "7BD59B09E80279609FF333EB9D4C061231FB26F92EEB04982A5F1D1764C" + "AD57665422", + 16, + ), + x_qA=int( + "0A420517E406AAC0ACDCE90FCD71487718D3B953EFD7FBEC5F7F27E28" + "C6149999397E91E029E06457DB2D3E640668B392C2A7E737A7F0BF044" + "36D11640FD09FD", + 16, + ), + y_qA=int( + "72E6882E8DB28AAD36237CD25D580DB23783961C8DC52DFA2EC138AD4" + "72A0FCEF3887CF62B623B2A87DE5C588301EA3E5FC269B373B60724F5" + "E82A6AD147FDE7", + 16, + ), + dB=int( + "230E18E1BCC88A362FA54E4EA3902009292F7F8033624FD471B5D8ACE49" + "D12CFABBC19963DAB8E2F1EBA00BFFB29E4D72D13F2224562F405CB8050" + "3666B25429", + 16, + ), + x_qB=int( + "9D45F66DE5D67E2E6DB6E93A59CE0BB48106097FF78A081DE781CDB31" + "FCE8CCBAAEA8DD4320C4119F1E9CD437A2EAB3731FA9668AB268D871D" + "EDA55A5473199F", + 16, + ), + y_qB=int( + "2FDC313095BCDD5FB3A91636F07A959C8E86B5636A1E930E8396049CB" + "481961D365CC11453A06C719835475B12CB52FC3C383BCE35E27EF194" + "512B71876285FA", + 16, + ), + x_Z=int( + "A7927098655F1F9976FA50A9D566865DC530331846381C87256BAF3226" + "244B76D36403C024D7BBF0AA0803EAFF405D3D24F11A9B5C0BEF679FE1" + "454B21C4CD1F", + 16, + ), + y_Z=int( + "7DB71C3DEF63212841C463E881BDCF055523BD368240E6C3143BD8DEF8" + "B3B3223B95E0F53082FF5E412F4222537A43DF1C6D25729DDB51620A83" + "2BE6A26680A2", + 16, + ), + ) + + +# https://tools.ietf.org/html/rfc4754#page-5 +@pytest.mark.parametrize( + "w, gwx, gwy, k, msg, md, r, s, curve", + [ + pytest.param( + "DC51D3866A15BACDE33D96F992FCA99DA7E6EF0934E7097559C27F1614C88A7F", + "2442A5CC0ECD015FA3CA31DC8E2BBC70BF42D60CBCA20085E0822CB04235E970", + "6FC98BD7E50211A4A27102FA3549DF79EBCB4BF246B80945CDDFE7D509BBFD7D", + "9E56F509196784D963D1C0A401510EE7ADA3DCC5DEE04B154BF61AF1D5A6DECE", + b"abc", + hashlib.sha256, + "CB28E0999B9C7715FD0A80D8E47A77079716CBBF917DD72E97566EA1C066957C", + "86FA3BB4E26CAD5BF90B7F81899256CE7594BB1EA0C89212748BFF3B3D5B0315", + NIST256p, + id="ECDSA-256", + ), + pytest.param( + "0BEB646634BA87735D77AE4809A0EBEA865535DE4C1E1DCB692E84708E81A5AF" + "62E528C38B2A81B35309668D73524D9F", + "96281BF8DD5E0525CA049C048D345D3082968D10FEDF5C5ACA0C64E6465A97EA" + "5CE10C9DFEC21797415710721F437922", + "447688BA94708EB6E2E4D59F6AB6D7EDFF9301D249FE49C33096655F5D502FAD" + "3D383B91C5E7EDAA2B714CC99D5743CA", + "B4B74E44D71A13D568003D7489908D564C7761E229C58CBFA18950096EB7463B" + "854D7FA992F934D927376285E63414FA", + b"abc", + hashlib.sha384, + "FB017B914E29149432D8BAC29A514640B46F53DDAB2C69948084E2930F1C8F7E" + "08E07C9C63F2D21A07DCB56A6AF56EB3", + "B263A1305E057F984D38726A1B46874109F417BCA112674C528262A40A629AF1" + "CBB9F516CE0FA7D2FF630863A00E8B9F", + NIST384p, + id="ECDSA-384", + ), + pytest.param( + "0065FDA3409451DCAB0A0EAD45495112A3D813C17BFD34BDF8C1209D7DF58491" + "20597779060A7FF9D704ADF78B570FFAD6F062E95C7E0C5D5481C5B153B48B37" + "5FA1", + "0151518F1AF0F563517EDD5485190DF95A4BF57B5CBA4CF2A9A3F6474725A35F" + "7AFE0A6DDEB8BEDBCD6A197E592D40188901CECD650699C9B5E456AEA5ADD190" + "52A8", + "006F3B142EA1BFFF7E2837AD44C9E4FF6D2D34C73184BBAD90026DD5E6E85317" + "D9DF45CAD7803C6C20035B2F3FF63AFF4E1BA64D1C077577DA3F4286C58F0AEA" + "E643", + "00C1C2B305419F5A41344D7E4359933D734096F556197A9B244342B8B62F46F9" + "373778F9DE6B6497B1EF825FF24F42F9B4A4BD7382CFC3378A540B1B7F0C1B95" + "6C2F", + b"abc", + hashlib.sha512, + "0154FD3836AF92D0DCA57DD5341D3053988534FDE8318FC6AAAAB68E2E6F4339" + "B19F2F281A7E0B22C269D93CF8794A9278880ED7DBB8D9362CAEACEE54432055" + "2251", + "017705A7030290D1CEB605A9A1BB03FF9CDD521E87A696EC926C8C10C8362DF4" + "975367101F67D1CF9BCCBF2F3D239534FA509E70AAC851AE01AAC68D62F86647" + "2660", + NIST521p, + id="ECDSA-521", + ), + ], +) +def test_RFC4754_vectors(w, gwx, gwy, k, msg, md, r, s, curve): + sk = SigningKey.from_string(unhexlify(w), curve) + vk = VerifyingKey.from_string(unhexlify(gwx + gwy), curve) + assert sk.verifying_key == vk + sig = sk.sign(msg, hashfunc=md, sigencode=sigencode_strings, k=int(k, 16)) + + assert sig == (unhexlify(r), unhexlify(s)) + + assert vk.verify(sig, msg, md, sigdecode_strings) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_rw_lock.py b/venv/lib/python3.12/site-packages/ecdsa/test_rw_lock.py new file mode 100644 index 0000000..0a84b9c --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_rw_lock.py @@ -0,0 +1,180 @@ +# Copyright Mateusz Kobos, (c) 2011 +# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ +# released under the MIT licence + +try: + import unittest2 as unittest +except ImportError: + import unittest +import threading +import time +import copy +from ._rwlock import RWLock + + +class Writer(threading.Thread): + def __init__( + self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write + ): + """ + @param buffer_: common buffer_ shared by the readers and writers + @type buffer_: list + @type rw_lock: L{RWLock} + @param init_sleep_time: sleep time before doing any action + @type init_sleep_time: C{float} + @param sleep_time: sleep time while in critical section + @type sleep_time: C{float} + @param to_write: data that will be appended to the buffer + """ + threading.Thread.__init__(self) + self.__buffer = buffer_ + self.__rw_lock = rw_lock + self.__init_sleep_time = init_sleep_time + self.__sleep_time = sleep_time + self.__to_write = to_write + self.entry_time = None + """Time of entry to the critical section""" + self.exit_time = None + """Time of exit from the critical section""" + + def run(self): + time.sleep(self.__init_sleep_time) + self.__rw_lock.writer_acquire() + self.entry_time = time.time() + time.sleep(self.__sleep_time) + self.__buffer.append(self.__to_write) + self.exit_time = time.time() + self.__rw_lock.writer_release() + + +class Reader(threading.Thread): + def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time): + """ + @param buffer_: common buffer shared by the readers and writers + @type buffer_: list + @type rw_lock: L{RWLock} + @param init_sleep_time: sleep time before doing any action + @type init_sleep_time: C{float} + @param sleep_time: sleep time while in critical section + @type sleep_time: C{float} + """ + threading.Thread.__init__(self) + self.__buffer = buffer_ + self.__rw_lock = rw_lock + self.__init_sleep_time = init_sleep_time + self.__sleep_time = sleep_time + self.buffer_read = None + """a copy of a the buffer read while in critical section""" + self.entry_time = None + """Time of entry to the critical section""" + self.exit_time = None + """Time of exit from the critical section""" + + def run(self): + time.sleep(self.__init_sleep_time) + self.__rw_lock.reader_acquire() + self.entry_time = time.time() + time.sleep(self.__sleep_time) + self.buffer_read = copy.deepcopy(self.__buffer) + self.exit_time = time.time() + self.__rw_lock.reader_release() + + +class RWLockTestCase(unittest.TestCase): + def test_readers_nonexclusive_access(self): + (buffer_, rw_lock, threads) = self.__init_variables() + + threads.append(Reader(buffer_, rw_lock, 0, 0)) + threads.append(Writer(buffer_, rw_lock, 0.2, 0.4, 1)) + threads.append(Reader(buffer_, rw_lock, 0.3, 0.3)) + threads.append(Reader(buffer_, rw_lock, 0.5, 0)) + + self.__start_and_join_threads(threads) + + ## The third reader should enter after the second one but it should + ## exit before the second one exits + ## (i.e. the readers should be in the critical section + ## at the same time) + + self.assertEqual([], threads[0].buffer_read) + self.assertEqual([1], threads[2].buffer_read) + self.assertEqual([1], threads[3].buffer_read) + self.assertTrue(threads[1].exit_time <= threads[2].entry_time) + self.assertTrue(threads[2].entry_time <= threads[3].entry_time) + self.assertTrue(threads[3].exit_time < threads[2].exit_time) + + def test_writers_exclusive_access(self): + (buffer_, rw_lock, threads) = self.__init_variables() + + threads.append(Writer(buffer_, rw_lock, 0, 0.4, 1)) + threads.append(Writer(buffer_, rw_lock, 0.1, 0, 2)) + threads.append(Reader(buffer_, rw_lock, 0.2, 0)) + + self.__start_and_join_threads(threads) + + ## The second writer should wait for the first one to exit + + self.assertEqual([1, 2], threads[2].buffer_read) + self.assertTrue(threads[0].exit_time <= threads[1].entry_time) + self.assertTrue(threads[1].exit_time <= threads[2].exit_time) + + def test_writer_priority(self): + (buffer_, rw_lock, threads) = self.__init_variables() + + threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) + threads.append(Reader(buffer_, rw_lock, 0.1, 0.4)) + threads.append(Writer(buffer_, rw_lock, 0.2, 0, 2)) + threads.append(Reader(buffer_, rw_lock, 0.3, 0)) + threads.append(Reader(buffer_, rw_lock, 0.3, 0)) + + self.__start_and_join_threads(threads) + + ## The second writer should go before the second and the third reader + + self.assertEqual([1], threads[1].buffer_read) + self.assertEqual([1, 2], threads[3].buffer_read) + self.assertEqual([1, 2], threads[4].buffer_read) + self.assertTrue(threads[0].exit_time < threads[1].entry_time) + self.assertTrue(threads[1].exit_time <= threads[2].entry_time) + self.assertTrue(threads[2].exit_time <= threads[3].entry_time) + self.assertTrue(threads[2].exit_time <= threads[4].entry_time) + + def test_many_writers_priority(self): + (buffer_, rw_lock, threads) = self.__init_variables() + + threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) + threads.append(Reader(buffer_, rw_lock, 0.1, 0.6)) + threads.append(Writer(buffer_, rw_lock, 0.2, 0.1, 2)) + threads.append(Reader(buffer_, rw_lock, 0.3, 0)) + threads.append(Reader(buffer_, rw_lock, 0.4, 0)) + threads.append(Writer(buffer_, rw_lock, 0.5, 0.1, 3)) + + self.__start_and_join_threads(threads) + + ## The two last writers should go first -- after the first reader and + ## before the second and the third reader + + self.assertEqual([1], threads[1].buffer_read) + self.assertEqual([1, 2, 3], threads[3].buffer_read) + self.assertEqual([1, 2, 3], threads[4].buffer_read) + self.assertTrue(threads[0].exit_time < threads[1].entry_time) + self.assertTrue(threads[1].exit_time <= threads[2].entry_time) + self.assertTrue(threads[1].exit_time <= threads[5].entry_time) + self.assertTrue(threads[2].exit_time <= threads[3].entry_time) + self.assertTrue(threads[2].exit_time <= threads[4].entry_time) + self.assertTrue(threads[5].exit_time <= threads[3].entry_time) + self.assertTrue(threads[5].exit_time <= threads[4].entry_time) + + @staticmethod + def __init_variables(): + buffer_ = [] + rw_lock = RWLock() + threads = [] + return (buffer_, rw_lock, threads) + + @staticmethod + def __start_and_join_threads(threads): + for t in threads: + t.start() + for t in threads: + t.join() diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_sha3.py b/venv/lib/python3.12/site-packages/ecdsa/test_sha3.py new file mode 100644 index 0000000..d30381d --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/test_sha3.py @@ -0,0 +1,111 @@ +try: + import unittest2 as unittest +except ImportError: + import unittest +import pytest + +try: + from gmpy2 import mpz + + GMPY = True +except ImportError: # pragma: no cover + try: + from gmpy import mpz + + GMPY = True + except ImportError: + GMPY = False + +from ._sha3 import shake_256 +from ._compat import bytes_to_int, int_to_bytes + +B2I_VECTORS = [ + (b"\x00\x01", "big", 1), + (b"\x00\x01", "little", 0x0100), + (b"", "big", 0), + (b"\x00", "little", 0), +] + + +@pytest.mark.parametrize("bytes_in,endian,int_out", B2I_VECTORS) +def test_bytes_to_int(bytes_in, endian, int_out): + out = bytes_to_int(bytes_in, endian) + assert out == int_out + + +class TestBytesToInt(unittest.TestCase): + def test_bytes_to_int_wrong_endian(self): + with self.assertRaises(ValueError): + bytes_to_int(b"\x00", "middle") + + def test_int_to_bytes_wrong_endian(self): + with self.assertRaises(ValueError): + int_to_bytes(0, byteorder="middle") + + +@pytest.mark.skipif(GMPY == False, reason="requires gmpy or gmpy2") +def test_int_to_bytes_with_gmpy(): + assert int_to_bytes(mpz(1)) == b"\x01" + + +I2B_VECTORS = [ + (0, None, "big", b""), + (0, 1, "big", b"\x00"), + (1, None, "big", b"\x01"), + (0x0100, None, "little", b"\x00\x01"), + (0x0100, 4, "little", b"\x00\x01\x00\x00"), + (1, 4, "big", b"\x00\x00\x00\x01"), +] + + +@pytest.mark.parametrize("int_in,length,endian,bytes_out", I2B_VECTORS) +def test_int_to_bytes(int_in, length, endian, bytes_out): + out = int_to_bytes(int_in, length, endian) + assert out == bytes_out + + +SHAKE_256_VECTORS = [ + ( + b"Message.", + 32, + b"\x78\xa1\x37\xbb\x33\xae\xe2\x72\xb1\x02\x4f\x39\x43\xe5\xcf\x0c" + b"\x4e\x9c\x72\x76\x2e\x34\x4c\xf8\xf9\xc3\x25\x9d\x4f\x91\x2c\x3a", + ), + ( + b"", + 32, + b"\x46\xb9\xdd\x2b\x0b\xa8\x8d\x13\x23\x3b\x3f\xeb\x74\x3e\xeb\x24" + b"\x3f\xcd\x52\xea\x62\xb8\x1b\x82\xb5\x0c\x27\x64\x6e\xd5\x76\x2f", + ), + ( + b"message", + 32, + b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51" + b"\xa2\xa6\x5a\xf8\x64\xfc\xb1\x26\xc2\x66\x0a\xb3\x46\x51\xb1\x75", + ), + ( + b"message", + 16, + b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51", + ), + ( + b"message", + 64, + b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51" + b"\xa2\xa6\x5a\xf8\x64\xfc\xb1\x26\xc2\x66\x0a\xb3\x46\x51\xb1\x75" + b"\x30\xd6\xba\x2a\x46\x65\xf1\x9d\xf0\x62\x25\xb1\x26\xd1\x3e\xed" + b"\x91\xd5\x0d\xe7\xb9\xcb\x65\xf3\x3a\x46\xae\xd3\x6c\x7d\xc5\xe8", + ), + ( + b"A" * 1024, + 32, + b"\xa5\xef\x7e\x30\x8b\xe8\x33\x64\xe5\x9c\xf3\xb5\xf3\xba\x20\xa3" + b"\x5a\xe7\x30\xfd\xbc\x33\x11\xbf\x83\x89\x50\x82\xb4\x41\xe9\xb3", + ), +] + + +@pytest.mark.parametrize("msg,olen,ohash", SHAKE_256_VECTORS) +def test_shake_256(msg, olen, ohash): + out = shake_256(msg, olen) + assert out == bytearray(ohash) diff --git a/venv/lib/python3.12/site-packages/ecdsa/util.py b/venv/lib/python3.12/site-packages/ecdsa/util.py new file mode 100644 index 0000000..1aff5bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/ecdsa/util.py @@ -0,0 +1,533 @@ +""" +This module includes some utility functions. + +The methods most typically used are the sigencode and sigdecode functions +to be used with :func:`~ecdsa.keys.SigningKey.sign` and +:func:`~ecdsa.keys.VerifyingKey.verify` +respectively. See the :func:`sigencode_strings`, :func:`sigdecode_string`, +:func:`sigencode_der`, :func:`sigencode_strings_canonize`, +:func:`sigencode_string_canonize`, :func:`sigencode_der_canonize`, +:func:`sigdecode_strings`, :func:`sigdecode_string`, and +:func:`sigdecode_der` functions. +""" + +from __future__ import division + +import os +import math +import binascii +import sys +from hashlib import sha256 +from six import PY2, int2byte, next +from . import der +from ._compat import normalise_bytes + + +# RFC5480: +# The "unrestricted" algorithm identifier is: +# id-ecPublicKey OBJECT IDENTIFIER ::= { +# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 } + +oid_ecPublicKey = (1, 2, 840, 10045, 2, 1) +encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey) + +# RFC5480: +# The ECDH algorithm uses the following object identifier: +# id-ecDH OBJECT IDENTIFIER ::= { +# iso(1) identified-organization(3) certicom(132) schemes(1) +# ecdh(12) } + +oid_ecDH = (1, 3, 132, 1, 12) + +# RFC5480: +# The ECMQV algorithm uses the following object identifier: +# id-ecMQV OBJECT IDENTIFIER ::= { +# iso(1) identified-organization(3) certicom(132) schemes(1) +# ecmqv(13) } + +oid_ecMQV = (1, 3, 132, 1, 13) + +if sys.version_info >= (3,): # pragma: no branch + + def entropy_to_bits(ent_256): + """Convert a bytestring to string of 0's and 1's""" + return bin(int.from_bytes(ent_256, "big"))[2:].zfill(len(ent_256) * 8) + +else: + + def entropy_to_bits(ent_256): + """Convert a bytestring to string of 0's and 1's""" + return "".join(bin(ord(x))[2:].zfill(8) for x in ent_256) + + +if sys.version_info < (2, 7): # pragma: no branch + # Can't add a method to a built-in type so we are stuck with this + def bit_length(x): + return len(bin(x)) - 2 + +else: + + def bit_length(x): + return x.bit_length() or 1 + + +def orderlen(order): + return (1 + len("%x" % order)) // 2 # bytes + + +def randrange(order, entropy=None): + """Return a random integer k such that 1 <= k < order, uniformly + distributed across that range. Worst case should be a mean of 2 loops at + (2**k)+2. + + Note that this function is not declared to be forwards-compatible: we may + change the behavior in future releases. The entropy= argument (which + should get a callable that behaves like os.urandom) can be used to + achieve stability within a given release (for repeatable unit tests), but + should not be used as a long-term-compatible key generation algorithm. + """ + assert order > 1 + if entropy is None: + entropy = os.urandom + upper_2 = bit_length(order - 2) + upper_256 = upper_2 // 8 + 1 + while True: # I don't think this needs a counter with bit-wise randrange + ent_256 = entropy(upper_256) + ent_2 = entropy_to_bits(ent_256) + rand_num = int(ent_2[:upper_2], base=2) + 1 + if 0 < rand_num < order: + return rand_num + + +class PRNG: + # this returns a callable which, when invoked with an integer N, will + # return N pseudorandom bytes. Note: this is a short-term PRNG, meant + # primarily for the needs of randrange_from_seed__trytryagain(), which + # only needs to run it a few times per seed. It does not provide + # protection against state compromise (forward security). + def __init__(self, seed): + self.generator = self.block_generator(seed) + + def __call__(self, numbytes): + a = [next(self.generator) for i in range(numbytes)] + + if PY2: # pragma: no branch + return "".join(a) + else: + return bytes(a) + + def block_generator(self, seed): + counter = 0 + while True: + for byte in sha256( + ("prng-%d-%s" % (counter, seed)).encode() + ).digest(): + yield byte + counter += 1 + + +def randrange_from_seed__overshoot_modulo(seed, order): + # hash the data, then turn the digest into a number in [1,order). + # + # We use David-Sarah Hopwood's suggestion: turn it into a number that's + # sufficiently larger than the group order, then modulo it down to fit. + # This should give adequate (but not perfect) uniformity, and simple + # code. There are other choices: try-try-again is the main one. + base = PRNG(seed)(2 * orderlen(order)) + number = (int(binascii.hexlify(base), 16) % (order - 1)) + 1 + assert 1 <= number < order, (1, number, order) + return number + + +def lsb_of_ones(numbits): + return (1 << numbits) - 1 + + +def bits_and_bytes(order): + bits = int(math.log(order - 1, 2) + 1) + bytes = bits // 8 + extrabits = bits % 8 + return bits, bytes, extrabits + + +# the following randrange_from_seed__METHOD() functions take an +# arbitrarily-sized secret seed and turn it into a number that obeys the same +# range limits as randrange() above. They are meant for deriving consistent +# signing keys from a secret rather than generating them randomly, for +# example a protocol in which three signing keys are derived from a master +# secret. You should use a uniformly-distributed unguessable seed with about +# curve.baselen bytes of entropy. To use one, do this: +# seed = os.urandom(curve.baselen) # or other starting point +# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order) +# sk = SigningKey.from_secret_exponent(secexp, curve) + + +def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256): + # hash the seed, then turn the digest into a number in [1,order), but + # don't worry about trying to uniformly fill the range. This will lose, + # on average, four bits of entropy. + bits, _bytes, extrabits = bits_and_bytes(order) + if extrabits: + _bytes += 1 + base = hashmod(seed).digest()[:_bytes] + base = "\x00" * (_bytes - len(base)) + base + number = 1 + int(binascii.hexlify(base), 16) + assert 1 <= number < order + return number + + +def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256): + # like string_to_randrange_truncate_bytes, but only lose an average of + # half a bit + bits = int(math.log(order - 1, 2) + 1) + maxbytes = (bits + 7) // 8 + base = hashmod(seed).digest()[:maxbytes] + base = "\x00" * (maxbytes - len(base)) + base + topbits = 8 * maxbytes - bits + if topbits: + base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:] + number = 1 + int(binascii.hexlify(base), 16) + assert 1 <= number < order + return number + + +def randrange_from_seed__trytryagain(seed, order): + # figure out exactly how many bits we need (rounded up to the nearest + # bit), so we can reduce the chance of looping to less than 0.5 . This is + # specified to feed from a byte-oriented PRNG, and discards the + # high-order bits of the first byte as necessary to get the right number + # of bits. The average number of loops will range from 1.0 (when + # order=2**k-1) to 2.0 (when order=2**k+1). + assert order > 1 + bits, bytes, extrabits = bits_and_bytes(order) + generate = PRNG(seed) + while True: + extrabyte = b"" + if extrabits: + extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits)) + guess = string_to_number(extrabyte + generate(bytes)) + 1 + if 1 <= guess < order: + return guess + + +def number_to_string(num, order): + l = orderlen(order) + fmt_str = "%0" + str(2 * l) + "x" + string = binascii.unhexlify((fmt_str % num).encode()) + assert len(string) == l, (len(string), l) + return string + + +def number_to_string_crop(num, order): + l = orderlen(order) + fmt_str = "%0" + str(2 * l) + "x" + string = binascii.unhexlify((fmt_str % num).encode()) + return string[:l] + + +def string_to_number(string): + return int(binascii.hexlify(string), 16) + + +def string_to_number_fixedlen(string, order): + l = orderlen(order) + assert len(string) == l, (len(string), l) + return int(binascii.hexlify(string), 16) + + +def sigencode_strings(r, s, order): + """ + Encode the signature to a pair of strings in a tuple + + Encodes signature into raw encoding (:term:`raw encoding`) with the + ``r`` and ``s`` parts of the signature encoded separately. + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: raw encoding of ECDSA signature + :rtype: tuple(bytes, bytes) + """ + r_str = number_to_string(r, order) + s_str = number_to_string(s, order) + return (r_str, s_str) + + +def sigencode_string(r, s, order): + """ + Encode the signature to raw format (:term:`raw encoding`) + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: raw encoding of ECDSA signature + :rtype: bytes + """ + # for any given curve, the size of the signature numbers is + # fixed, so just use simple concatenation + r_str, s_str = sigencode_strings(r, s, order) + return r_str + s_str + + +def sigencode_der(r, s, order): + """ + Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`. + + Encodes the signature to the following :term:`ASN.1` structure:: + + Ecdsa-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: DER encoding of ECDSA signature + :rtype: bytes + """ + return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) + + +def _canonize(s, order): + """ + Internal function for ensuring that the ``s`` value of a signature is in + the "canonical" format. + + :param int s: the second parameter of ECDSA signature + :param int order: the order of the curve over which the signatures was + computed + + :return: canonical value of s + :rtype: int + """ + if s > order // 2: + s = order - s + return s + + +def sigencode_strings_canonize(r, s, order): + """ + Encode the signature to a pair of strings in a tuple + + Encodes signature into raw encoding (:term:`raw encoding`) with the + ``r`` and ``s`` parts of the signature encoded separately. + + Makes sure that the signature is encoded in the canonical format, where + the ``s`` parameter is always smaller than ``order / 2``. + Most commonly used in bitcoin. + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: raw encoding of ECDSA signature + :rtype: tuple(bytes, bytes) + """ + s = _canonize(s, order) + return sigencode_strings(r, s, order) + + +def sigencode_string_canonize(r, s, order): + """ + Encode the signature to raw format (:term:`raw encoding`) + + Makes sure that the signature is encoded in the canonical format, where + the ``s`` parameter is always smaller than ``order / 2``. + Most commonly used in bitcoin. + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: raw encoding of ECDSA signature + :rtype: bytes + """ + s = _canonize(s, order) + return sigencode_string(r, s, order) + + +def sigencode_der_canonize(r, s, order): + """ + Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`. + + Makes sure that the signature is encoded in the canonical format, where + the ``s`` parameter is always smaller than ``order / 2``. + Most commonly used in bitcoin. + + Encodes the signature to the following :term:`ASN.1` structure:: + + Ecdsa-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + + It's expected that this function will be used as a ``sigencode=`` parameter + in :func:`ecdsa.keys.SigningKey.sign` method. + + :param int r: first parameter of the signature + :param int s: second parameter of the signature + :param int order: the order of the curve over which the signature was + computed + + :return: DER encoding of ECDSA signature + :rtype: bytes + """ + s = _canonize(s, order) + return sigencode_der(r, s, order) + + +class MalformedSignature(Exception): + """ + Raised by decoding functions when the signature is malformed. + + Malformed in this context means that the relevant strings or integers + do not match what a signature over provided curve would create. Either + because the byte strings have incorrect lengths or because the encoded + values are too large. + """ + + pass + + +def sigdecode_string(signature, order): + """ + Decoder for :term:`raw encoding` of ECDSA signatures. + + raw encoding is a simple concatenation of the two integers that comprise + the signature, with each encoded using the same amount of bytes depending + on curve size/order. + + It's expected that this function will be used as the ``sigdecode=`` + parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. + + :param signature: encoded signature + :type signature: bytes like object + :param order: order of the curve over which the signature was computed + :type order: int + + :raises MalformedSignature: when the encoding of the signature is invalid + + :return: tuple with decoded ``r`` and ``s`` values of signature + :rtype: tuple of ints + """ + signature = normalise_bytes(signature) + l = orderlen(order) + if not len(signature) == 2 * l: + raise MalformedSignature( + "Invalid length of signature, expected {0} bytes long, " + "provided string is {1} bytes long".format(2 * l, len(signature)) + ) + r = string_to_number_fixedlen(signature[:l], order) + s = string_to_number_fixedlen(signature[l:], order) + return r, s + + +def sigdecode_strings(rs_strings, order): + """ + Decode the signature from two strings. + + First string needs to be a big endian encoding of ``r``, second needs to + be a big endian encoding of the ``s`` parameter of an ECDSA signature. + + It's expected that this function will be used as the ``sigdecode=`` + parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. + + :param list rs_strings: list of two bytes-like objects, each encoding one + parameter of signature + :param int order: order of the curve over which the signature was computed + + :raises MalformedSignature: when the encoding of the signature is invalid + + :return: tuple with decoded ``r`` and ``s`` values of signature + :rtype: tuple of ints + """ + if not len(rs_strings) == 2: + raise MalformedSignature( + "Invalid number of strings provided: {0}, expected 2".format( + len(rs_strings) + ) + ) + (r_str, s_str) = rs_strings + r_str = normalise_bytes(r_str) + s_str = normalise_bytes(s_str) + l = orderlen(order) + if not len(r_str) == l: + raise MalformedSignature( + "Invalid length of first string ('r' parameter), " + "expected {0} bytes long, provided string is {1} " + "bytes long".format(l, len(r_str)) + ) + if not len(s_str) == l: + raise MalformedSignature( + "Invalid length of second string ('s' parameter), " + "expected {0} bytes long, provided string is {1} " + "bytes long".format(l, len(s_str)) + ) + r = string_to_number_fixedlen(r_str, order) + s = string_to_number_fixedlen(s_str, order) + return r, s + + +def sigdecode_der(sig_der, order): + """ + Decoder for DER format of ECDSA signatures. + + DER format of signature is one that uses the :term:`ASN.1` :term:`DER` + rules to encode it as a sequence of two integers:: + + Ecdsa-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + + It's expected that this function will be used as as the ``sigdecode=`` + parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. + + :param sig_der: encoded signature + :type sig_der: bytes like object + :param order: order of the curve over which the signature was computed + :type order: int + + :raises UnexpectedDER: when the encoding of signature is invalid + + :return: tuple with decoded ``r`` and ``s`` values of signature + :rtype: tuple of ints + """ + sig_der = normalise_bytes(sig_der) + # return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) + rs_strings, empty = der.remove_sequence(sig_der) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after DER sig: %s" % binascii.hexlify(empty) + ) + r, rest = der.remove_integer(rs_strings) + s, empty = der.remove_integer(rest) + if empty != b"": + raise der.UnexpectedDER( + "trailing junk after DER numbers: %s" % binascii.hexlify(empty) + ) + return r, s diff --git a/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/METADATA new file mode 100644 index 0000000..04cbc2a --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/METADATA @@ -0,0 +1,466 @@ +Metadata-Version: 2.4 +Name: email-validator +Version: 2.3.0 +Summary: A robust email address syntax and deliverability validation library. +Home-page: https://github.com/JoshData/python-email-validator +Author: Joshua Tauberer +Author-email: jt@occams.info +License: Unlicense +Keywords: email address validator +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: The Unlicense (Unlicense) +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: dnspython>=2.0.0 +Requires-Dist: idna>=2.0.0 +Dynamic: license-file + +email-validator: Validate Email Addresses +========================================= + +A robust email address syntax and deliverability validation library for +Python 3.8+ by [Joshua Tauberer](https://joshdata.me). + +This library validates that a string is of the form `name@example.com` +and optionally checks that the domain name is set up to receive email. +This is the sort of validation you would want when you are identifying +users by their email address like on a registration form. + +Key features: + +* Checks that an email address has the correct syntax --- great for + email-based registration/login forms or validating data. +* Gives friendly English error messages when validation fails that you + can display to end-users. +* Checks deliverability (optional): Does the domain name resolve? + (You can override the default DNS resolver to add query caching.) +* Supports internationalized domain names (like `@ツ.life`), + internationalized local parts (like `ツ@example.com`), + and optionally parses display names (e.g. `"My Name" `). +* Rejects addresses with invalid or unsafe Unicode characters, + obsolete email address syntax that you'd find unexpected, + special use domain names like `@localhost`, + and domains without a dot by default. + This is an opinionated library! +* Normalizes email addresses (important for internationalized + and quoted-string addresses! see below). +* Python type annotations are used. + +This is an opinionated library. You should definitely also consider using +the less-opinionated [pyIsEmail](https://github.com/michaelherold/pyIsEmail) +if it works better for you. + +[![Build Status](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml/badge.svg)](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml) + +View the [CHANGELOG / Release Notes](CHANGELOG.md) for the version history of changes in the library. Occasionally this README is ahead of the latest published package --- see the CHANGELOG for details. + +--- + +Installation +------------ + +This package [is on PyPI](https://pypi.org/project/email-validator/), so: + +```sh +pip install email-validator +``` + +(You might need to use `pip3` depending on your local environment.) + +Quick Start +----------- + +If you're validating a user's email address before creating a user +account in your application, you might do this: + +```python +from email_validator import validate_email, EmailNotValidError + +email = "my+address@example.org" + +try: + + # Check that the email address is valid. Turn on check_deliverability + # for first-time validations like on account creation pages (but not + # login pages). + emailinfo = validate_email(email, check_deliverability=False) + + # After this point, use only the normalized form of the email address, + # especially before going to a database query. + email = emailinfo.normalized + +except EmailNotValidError as e: + + # The exception message is human-readable explanation of why it's + # not a valid (or deliverable) email address. + print(str(e)) +``` + +This validates the address and gives you its normalized form. You should +**put the normalized form in your database** and always normalize before +checking if an address is in your database. When using this in a login form, +set `check_deliverability` to `False` to avoid unnecessary DNS queries. + +Usage +----- + +### Overview + +The module provides a function `validate_email(email_address)` which +takes an email address and: + +- Raises a `EmailNotValidError` with a helpful, human-readable error + message explaining why the email address is not valid, or +- Returns an object with a normalized form of the email address (which + you should use!) and other information about it. + +When an email address is not valid, `validate_email` raises either an +`EmailSyntaxError` if the form of the address is invalid or an +`EmailUndeliverableError` if the domain name fails DNS checks. Both +exception classes are subclasses of `EmailNotValidError`, which in turn +is a subclass of `ValueError`. + +But when an email address is valid, an object is returned containing +a normalized form of the email address (which you should use!) and +other information. + +The validator doesn't, by default, permit obsoleted forms of email addresses +that no one uses anymore even though they are still valid and deliverable, since +they will probably give you grief if you're using email for login. (See +later in the document about how to allow some obsolete forms.) + +The validator optionally checks that the domain name in the email address has +a DNS MX record indicating that it can receive email. (Except a Null MX record. +If there is no MX record, a fallback A/AAAA-record is permitted, unless +a reject-all SPF record is present.) DNS is slow and sometimes unavailable or +unreliable, so consider whether these checks are useful for your use case and +turn them off if they aren't. +There is nothing to be gained by trying to actually contact an SMTP server, so +that's not done here. For privacy, security, and practicality reasons, servers +are good at not giving away whether an address is +deliverable or not: email addresses that appear to accept mail at first +can bounce mail after a delay, and bounced mail may indicate a temporary +failure of a good email address (sometimes an intentional failure, like +greylisting). + +### Options + +The `validate_email` function also accepts the following keyword arguments +(defaults are as shown below): + +`check_deliverability=True`: If true, DNS queries are made to check that the domain name in the email address (the part after the @-sign) can receive mail, as described above. Set to `False` to skip this DNS-based check. It is recommended to pass `False` when performing validation for login pages (but not account creation pages) since re-validation of a previously validated domain in your database by querying DNS at every login is probably undesirable. You can also set `email_validator.CHECK_DELIVERABILITY` to `False` to turn this off for all calls by default. + +`dns_resolver=None`: Pass an instance of [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to control the DNS resolver including setting a timeout and [a cache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html). The `caching_resolver` function shown below is a helper function to construct a dns.resolver.Resolver with a [LRUCache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html#dns.resolver.LRUCache). Reuse the same resolver instance across calls to `validate_email` to make use of the cache. + +`test_environment=False`: If `True`, DNS-based deliverability checks are disabled and `test` and `**.test` domain names are permitted (see below). You can also set `email_validator.TEST_ENVIRONMENT` to `True` to turn it on for all calls by default. + +`allow_smtputf8=True`: Set to `False` to prohibit internationalized addresses that would + require the + [SMTPUTF8](https://tools.ietf.org/html/rfc6531) extension. You can also set `email_validator.ALLOW_SMTPUTF8` to `False` to turn it off for all calls by default. + +`allow_quoted_local=False`: Set to `True` to allow obscure and potentially problematic email addresses in which the part of the address before the @-sign contains spaces, @-signs, or other surprising characters when the local part is surrounded in quotes (so-called quoted-string local parts). In the object returned by `validate_email`, the normalized local part removes any unnecessary backslash-escaping and even removes the surrounding quotes if the address would be valid without them. You can also set `email_validator.ALLOW_QUOTED_LOCAL` to `True` to turn this on for all calls by default. + +`allow_domain_literal=False`: Set to `True` to allow bracketed IPv4 and "IPv6:"-prefixed IPv6 addresses in the domain part of the email address. No deliverability checks are performed for these addresses. In the object returned by `validate_email`, the normalized domain will use the condensed IPv6 format, if applicable. The object's `domain_address` attribute will hold the parsed `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object if applicable. You can also set `email_validator.ALLOW_DOMAIN_LITERAL` to `True` to turn this on for all calls by default. + +`allow_display_name=False`: Set to `True` to allow a display name and bracketed address in the input string, like `My Name `. It's implemented in the spirit but not the letter of RFC 5322 3.4, so it may be stricter or more relaxed than what you want. The display name, if present, is provided in the returned object's `display_name` field after being unquoted and unescaped. You can also set `email_validator.ALLOW_DISPLAY_NAME` to `True` to turn this on for all calls by default. + +`allow_empty_local=False`: Set to `True` to allow an empty local part (i.e. + `@example.com`), e.g. for validating Postfix aliases. + +`strict=False`: Set to `True` to perform additional syntax checks (currently only a local part length check). This should be used by mail service providers at address creation to ensure email addresses meet broad compatibility requirements. + +### DNS timeout and cache + +When validating many email addresses or to control the timeout (the default is 15 seconds), create a caching [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to reuse in each call. The `caching_resolver` function returns one easily for you: + +```python +from email_validator import validate_email, caching_resolver + +resolver = caching_resolver(timeout=10) + +while True: + validate_email(email, dns_resolver=resolver) +``` + +### Test addresses + +This library rejects email addresses that use the [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) `invalid`, `localhost`, `test`, and some others by raising `EmailSyntaxError`. This is to protect your system from abuse: You probably don't want a user to be able to cause an email to be sent to `localhost` (although they might be able to still do so via a malicious MX record). However, in your non-production test environments you may want to use `@test` or `@myname.test` email addresses. There are three ways you can allow this: + +1. Add `test_environment=True` to the call to `validate_email` (see above). +2. Set `email_validator.TEST_ENVIRONMENT` to `True` globally. +3. Remove the special-use domain name that you want to use from `email_validator.SPECIAL_USE_DOMAIN_NAMES`, e.g.: + +```python +import email_validator +email_validator.SPECIAL_USE_DOMAIN_NAMES.remove("test") +``` + +It is tempting to use `@example.com/net/org` in tests. They are *not* in this library's `SPECIAL_USE_DOMAIN_NAMES` list so you can, but shouldn't, use them. These domains are reserved to IANA for use in documentation so there is no risk of accidentally emailing someone at those domains. But beware that this library will nevertheless reject these domain names if DNS-based deliverability checks are not disabled because these domains do not resolve to domains that accept email. In tests, consider using your own domain name or `@test` or `@myname.test` instead. + +Internationalized email addresses +--------------------------------- + +The email protocol SMTP and the domain name system DNS have historically +only allowed English (ASCII) characters in email addresses and domain names, +respectively. Each has adapted to internationalization in a separate +way, creating two separate aspects to email address internationalization. + +(If your mail submission library doesn't support Unicode at all, then +immediately prior to mail submission you must replace the email address with +its ASCII-ized form. This library gives you back the ASCII-ized form in the +`ascii_email` field in the returned object.) + +### Internationalized domain names (IDN) + +The first is [internationalized domain names (RFC +5891)](https://tools.ietf.org/html/rfc5891), a.k.a IDNA 2008. The DNS +system has not been updated with Unicode support. Instead, internationalized +domain names are converted into a special IDNA ASCII "[Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)" +form starting with `xn--`. When an email address has non-ASCII +characters in its domain part, the domain part is replaced with its IDNA +ASCII equivalent form in the process of mail transmission. Your mail +submission library probably does this for you transparently. ([Compliance +around the web is not very good though](http://archives.miloush.net/michkap/archive/2012/02/27/10273315.html).) This library conforms to IDNA 2008 +using the [idna](https://github.com/kjd/idna) module by Kim Davies. + +### Internationalized local parts + +The second sort of internationalization is internationalization in the +*local* part of the address (before the @-sign). In non-internationalized +email addresses, only English letters, numbers, and some punctuation +(`._!#$%&'^``*+-=~/?{|}`) are allowed. In internationalized email address +local parts, a wider range of Unicode characters are allowed. + +Email addresses with these non-ASCII characters require that your mail +submission library and all the mail servers along the route to the destination, +including your own outbound mail server, all support the +[SMTPUTF8 (RFC 6531)](https://tools.ietf.org/html/rfc6531) extension. +Support for SMTPUTF8 varies. If you know ahead of time that SMTPUTF8 is not +supported by your mail submission stack, then you must filter out addresses that +require SMTPUTF8 using the `allow_smtputf8=False` keyword argument (see above). +This will cause the validation function to raise a `EmailSyntaxError` if +delivery would require SMTPUTF8. If you do not set `allow_smtputf8=False`, +you can also check the value of the `smtputf8` field in the returned object. + +### Unsafe Unicode characters are rejected + +A surprisingly large number of Unicode characters are not safe to display, +especially when the email address is concatenated with other text, so this +library tries to protect you by not permitting reserved, non-, private use, +formatting (which can be used to alter the display order of characters), +whitespace, and control characters, and combining characters +as the first character of the local part and the domain name (so that they +cannot combine with something outside of the email address string or with +the @-sign). See https://qntm.org/safe and https://trojansource.codes/ +for relevant prior work. (Other than whitespace, these are checks that +you should be applying to nearly all user inputs in a security-sensitive +context.) This does not guard against the well known problem that many +Unicode characters look alike, which can be used to fool humans reading +displayed text. + + +Normalization +------------- + +### Unicode Normalization + +The use of Unicode in email addresses introduced a normalization +problem. Different Unicode strings can look identical and have the same +semantic meaning to the user. The `normalized` field returned on successful +validation provides the correctly normalized form of the given email +address. + +For example, the CJK fullwidth Latin letters are considered semantically +equivalent in domain names to their ASCII counterparts. This library +normalizes them to their ASCII counterparts (as required by IDNA): + +```python +emailinfo = validate_email("me@Domain.com") +print(emailinfo.normalized) +print(emailinfo.ascii_email) +# prints "me@domain.com" twice +``` + +Because an end-user might type their email address in different (but +equivalent) un-normalized forms at different times, you ought to +replace what they enter with the normalized form immediately prior to +going into your database (during account creation), querying your database +(during login), or sending outbound mail. + +The normalizations include lowercasing the domain part of the email +address (domain names are case-insensitive), [Unicode "NFC" +normalization](https://en.wikipedia.org/wiki/Unicode_equivalence) of the +whole address (which turns characters plus [combining +characters](https://en.wikipedia.org/wiki/Combining_character) into +precomposed characters where possible, replacement of [fullwidth and +halfwidth +characters](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) +in the domain part, possibly other +[UTS46](http://unicode.org/reports/tr46) mappings on the domain part, +and conversion from Punycode to Unicode characters. + +Normalization may change the characters in the email address and the +length of the email address, such that a string might be a valid address +before normalization but invalid after, or vice versa. This library only +permits addresses that are valid both before and after normalization. + +(See [RFC 6532 (internationalized email) section +3.1](https://tools.ietf.org/html/rfc6532#section-3.1) and [RFC 5895 +(IDNA 2008) section 2](http://www.ietf.org/rfc/rfc5895.txt).) + +### Other Normalization + +Normalization is also applied to quoted-string local parts and domain +literal IPv6 addresses if you have allowed them by the `allow_quoted_local` +and `allow_domain_literal` options. In quoted-string local parts, unnecessary +backslash escaping is removed and even the surrounding quotes are removed if +they are unnecessary. For IPv6 domain literals, the IPv6 address is +normalized to condensed form. [RFC 2142](https://datatracker.ietf.org/doc/html/rfc2142) +also requires lowercase normalization for some specific mailbox names like `postmaster@`. + + +Examples +-------- + +For the email address `test@joshdata.me`, the returned object is: + +```python +ValidatedEmail( + normalized='test@joshdata.me', + local_part='test', + domain='joshdata.me', + ascii_email='test@joshdata.me', + ascii_local_part='test', + ascii_domain='joshdata.me', + smtputf8=False) +``` + +For the fictitious but valid address `example@ツ.ⓁⒾⒻⒺ`, which has an +internationalized domain but ASCII local part, the returned object is: + +```python +ValidatedEmail( + normalized='example@ツ.life', + local_part='example', + domain='ツ.life', + ascii_email='example@xn--bdk.life', + ascii_local_part='example', + ascii_domain='xn--bdk.life', + smtputf8=False) + +``` + +Note that `normalized` and other fields provide a normalized form of the +email address, domain name, and (in other cases) local part (see earlier +discussion of normalization), which you should use in your database. + +Calling `validate_email` with the ASCII form of the above email address, +`example@xn--bdk.life`, returns the exact same information (i.e., the +`normalized` field always will contain Unicode characters, not Punycode). + +For the fictitious address `ツ-test@joshdata.me`, which has an +internationalized local part, the returned object is: + +```python +ValidatedEmail( + normalized='ツ-test@joshdata.me', + local_part='ツ-test', + domain='joshdata.me', + ascii_email=None, + ascii_local_part=None, + ascii_domain='joshdata.me', + smtputf8=True) +``` + +Now `smtputf8` is `True` and `ascii_email` is `None` because the local +part of the address is internationalized. The `local_part` and `normalized` fields +return the normalized form of the address. + +Return value +------------ + +When an email address passes validation, the fields in the returned object +are: + +| Field | Value | +| -----:|-------| +| `normalized` | The normalized form of the email address that you should put in your database. This combines the `local_part` and `domain` fields (see below). | +| `ascii_email` | If set, an ASCII-only form of the normalized email address by replacing the domain part with [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt). This field will be present when an ASCII-only form of the email address exists (including if the email address is already ASCII). If the local part of the email address contains internationalized characters, `ascii_email` will be `None`. If set, it merely combines `ascii_local_part` and `ascii_domain`. | +| `local_part` | The normalized local part of the given email address (before the @-sign). Normalization includes Unicode NFC normalization and removing unnecessary quoted-string quotes and backslashes. If `allow_quoted_local` is True and the surrounding quotes are necessary, the quotes _will_ be present in this field. | +| `ascii_local_part` | If set, the local part, which is composed of ASCII characters only. | +| `domain` | The canonical internationalized Unicode form of the domain part of the email address. If the returned string contains non-ASCII characters, either the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit the message or else the email address's domain part must be converted to IDNA ASCII first: Use `ascii_domain` field instead. | +| `ascii_domain` | The [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)-encoded form of the domain part of the given email address, as it would be transmitted on the wire. | +| `domain_address` | If domain literals are allowed and if the email address contains one, an `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object. | +| `display_name` | If no display name was present and angle brackets do not surround the address, this will be `None`; otherwise, it will be set to the display name, or the empty string if there were angle brackets but no display name. If the display name was quoted, it will be unquoted and unescaped. | +| `smtputf8` | A boolean indicating that the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit messages to this address because the local part of the address has non-ASCII characters (the local part cannot be IDNA-encoded). If `allow_smtputf8=False` is passed as an argument, this flag will always be false because an exception is raised if it would have been true. | +| `mx` | A list of (priority, domain) tuples of MX records specified in the DNS for the domain (see [RFC 5321 section 5](https://tools.ietf.org/html/rfc5321#section-5)). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | +| `mx_fallback_type` | `None` if an `MX` record is found. If no MX records are actually specified in DNS and instead are inferred, through an obsolete mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | +| `spf` | Any SPF record found while checking deliverability. Only set if the SPF record is queried. | + +Assumptions +----------- + +By design, this validator does not pass all email addresses that +strictly conform to the standards. Many email address forms are obsolete +or likely to cause trouble: + +* The validator assumes the email address is intended to be + usable on the public Internet. The domain part + of the email address must be a resolvable domain name + (see the deliverability checks described above). + Most [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) + and their subdomains, as well as + domain names without a `.`, are rejected as a syntax error + (except see the `test_environment` parameter above). +* Obsolete email syntaxes are rejected: + The unusual ["(comment)" syntax](https://github.com/JoshData/python-email-validator/issues/77) + is rejected. Extremely old obsolete syntaxes are + rejected. Quoted-string local parts and domain-literal addresses + are rejected by default, but there are options to allow them (see above). + No one uses these forms anymore, and I can't think of any reason why anyone + using this library would need to accept them. + +Testing +------- + +Tests can be run using + +```sh +pip install -r test_requirements.txt +make test +``` + +Tests run with mocked DNS responses. When adding or changing tests, temporarily turn on the `BUILD_MOCKED_DNS_RESPONSE_DATA` flag in `tests/mocked_dns_responses.py` to re-build the database of mocked responses from live queries. + +For Project Maintainers +----------------------- + +The package is distributed as a universal wheel and as a source package. + +To release: + +* Update CHANGELOG.md. +* Update the version number in `email_validator/version.py`. +* Make & push a commit with the new version number and make sure tests pass. +* Make a release at https://github.com/JoshData/python-email-validator/releases/new creating a new tag (or use command below). +* Publish a source and wheel distribution to pypi (see command below). + +```sh +git tag v$(cat email_validator/version.py | sed "s/.* = //" | sed 's/"//g') +git push --tags +./release_to_pypi.sh +``` + +License +------- + +This project is free of any copyright restrictions per the [Unlicense](https://unlicense.org/). (Prior to Feb. 4, 2024, the project was made available under the terms of the [CC0 1.0 Universal public domain dedication](http://creativecommons.org/publicdomain/zero/1.0/).) See [LICENSE](LICENSE) and [CONTRIBUTING.md](CONTRIBUTING.md). diff --git a/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/RECORD new file mode 100644 index 0000000..9968dec --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/RECORD @@ -0,0 +1,27 @@ +../../../bin/email_validator,sha256=6oKEJmYvjSy_uJC9BdVII6Ng3ICUFK-76UgUKQea8is,215 +email_validator-2.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +email_validator-2.3.0.dist-info/METADATA,sha256=Kpe4Hu_NhWvICwNG9H-i2AC5pDi_j5IxrgD-kx1cn7w,26006 +email_validator-2.3.0.dist-info/RECORD,, +email_validator-2.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +email_validator-2.3.0.dist-info/entry_points.txt,sha256=zRM_6bNIUSHTbNx5u6M3nK1MAguvryrc9hICC6HyrBg,66 +email_validator-2.3.0.dist-info/licenses/LICENSE,sha256=ZyF5dS4QkTSj-yvdB4Cyn9t6A5dPD1hqE66tUSlWLUw,1212 +email_validator-2.3.0.dist-info/top_level.txt,sha256=fYDOSWFZke46ut7WqdOAJjjhlpPYAaOwOwIsh3s8oWI,16 +email_validator/__init__.py,sha256=g3oVBGdXGJATgBnVqt5Q7pUhXM9QrmOl5qWSu_RtWmQ,4381 +email_validator/__main__.py,sha256=uc6i2EMCK67cCgcHr5ZFG5LqB3khljmR7lNAYZGSUKY,2302 +email_validator/__pycache__/__init__.cpython-312.pyc,, +email_validator/__pycache__/__main__.cpython-312.pyc,, +email_validator/__pycache__/deliverability.cpython-312.pyc,, +email_validator/__pycache__/exceptions.cpython-312.pyc,, +email_validator/__pycache__/rfc_constants.cpython-312.pyc,, +email_validator/__pycache__/syntax.cpython-312.pyc,, +email_validator/__pycache__/types.cpython-312.pyc,, +email_validator/__pycache__/validate_email.cpython-312.pyc,, +email_validator/__pycache__/version.cpython-312.pyc,, +email_validator/deliverability.py,sha256=ZIjFkgWMzxYexanwKhrRHLTnjWMqlR5b0ltOnlA0u-E,7216 +email_validator/exceptions.py,sha256=Ry2j5FMpEe9JthmTF3zF5pGgWer-QmWc1m0szXAZ7fo,434 +email_validator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +email_validator/rfc_constants.py,sha256=LhUiBZLBw_Nn-KHkH--nVwOWFlgz2aCuauj98ZSl-gk,3443 +email_validator/syntax.py,sha256=puufskeIG6_ORWb7fvRdV_Yczmk4bibNZPs9TjWE1K0,38971 +email_validator/types.py,sha256=mvmwN9R3lFx9Tv9wtWvDzxfit6mr_5wQmY2I0HjuqRk,5588 +email_validator/validate_email.py,sha256=bmrdQ9dGt1-Mk0rwDRrX-l6xbYhQ0US20Dz46Aatnkk,9928 +email_validator/version.py,sha256=CpK8IH_dCUAwg9tqv7zm9FxbBFkxCnED1JUiRe7cftU,22 diff --git a/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/WHEEL new file mode 100644 index 0000000..e7fa31b --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/entry_points.txt new file mode 100644 index 0000000..03c6e23 --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +email_validator = email_validator.__main__:main diff --git a/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..122e7a7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/licenses/LICENSE @@ -0,0 +1,27 @@ +This is free and unencumbered software released into the public +domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a +compiled binary, for any purpose, commercial or non-commercial, +and by any means. + +In jurisdictions that recognize copyright laws, the author or +authors of this software dedicate any and all copyright +interest in the software to the public domain. We make this +dedication for the benefit of the public at large and to the +detriment of our heirs and successors. We intend this +dedication to be an overt act of relinquishment in perpetuity +of all present and future rights to this software under +copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +For more information, please refer to diff --git a/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..798fd5e --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator-2.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +email_validator diff --git a/venv/lib/python3.12/site-packages/email_validator/__init__.py b/venv/lib/python3.12/site-packages/email_validator/__init__.py new file mode 100644 index 0000000..38d0741 --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/__init__.py @@ -0,0 +1,103 @@ +from typing import TYPE_CHECKING + +# Export the main method, helper methods, and the public data types. +from .exceptions import EmailNotValidError, EmailSyntaxError, EmailUndeliverableError +from .types import ValidatedEmail +from .validate_email import validate_email +from .version import __version__ + +__all__ = ["validate_email", + "ValidatedEmail", "EmailNotValidError", + "EmailSyntaxError", "EmailUndeliverableError", + "caching_resolver", "__version__"] + +if TYPE_CHECKING: + from .deliverability import caching_resolver +else: + def caching_resolver(*args, **kwargs): + # Lazy load `deliverability` as it is slow to import (due to dns.resolver) + from .deliverability import caching_resolver + + return caching_resolver(*args, **kwargs) + + +# These global attributes are a part of the library's API and can be +# changed by library users. + +# Default values for keyword arguments. + +ALLOW_SMTPUTF8 = True +ALLOW_EMPTY_LOCAL = False +ALLOW_QUOTED_LOCAL = False +ALLOW_DOMAIN_LITERAL = False +ALLOW_DISPLAY_NAME = False +STRICT = False +GLOBALLY_DELIVERABLE = True +CHECK_DELIVERABILITY = True +TEST_ENVIRONMENT = False +DEFAULT_TIMEOUT = 15 # secs + +# IANA Special Use Domain Names +# Last Updated 2021-09-21 +# https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.txt +# +# The domain names without dots would be caught by the check that the domain +# name in an email address must have a period, but this list will also catch +# subdomains of these domains, which are also reserved. +SPECIAL_USE_DOMAIN_NAMES = [ + # The "arpa" entry here is consolidated from a lot of arpa subdomains + # for private address (i.e. non-routable IP addresses like 172.16.x.x) + # reverse mapping, plus some other subdomains. Although RFC 6761 says + # that application software should not treat these domains as special, + # they are private-use domains and so cannot have globally deliverable + # email addresses, which is an assumption of this library, and probably + # all of arpa is similarly special-use, so we reject it all. + "arpa", + + # RFC 6761 says applications "SHOULD NOT" treat the "example" domains + # as special, i.e. applications should accept these domains. + # + # The domain "example" alone fails our syntax validation because it + # lacks a dot (we assume no one has an email address on a TLD directly). + # "@example.com/net/org" will currently fail DNS-based deliverability + # checks because IANA publishes a NULL MX for these domains, and + # "@mail.example[.com/net/org]" and other subdomains will fail DNS- + # based deliverability checks because IANA does not publish MX or A + # DNS records for these subdomains. + # "example", # i.e. "wwww.example" + # "example.com", + # "example.net", + # "example.org", + + # RFC 6761 says that applications are permitted to treat this domain + # as special and that DNS should return an immediate negative response, + # so we also immediately reject this domain, which also follows the + # purpose of the domain. + "invalid", + + # RFC 6762 says that applications "may" treat ".local" as special and + # that "name resolution APIs and libraries SHOULD recognize these names + # as special," and since ".local" has no global definition, we reject + # it, as we expect email addresses to be gloally routable. + "local", + + # RFC 6761 says that applications (like this library) are permitted + # to treat "localhost" as special, and since it cannot have a globally + # deliverable email address, we reject it. + "localhost", + + # RFC 7686 says "applications that do not implement the Tor protocol + # SHOULD generate an error upon the use of .onion and SHOULD NOT + # perform a DNS lookup. + "onion", + + # Although RFC 6761 says that application software should not treat + # these domains as special, it also warns users that the address may + # resolve differently in different systems, and therefore it cannot + # have a globally routable email address, which is an assumption of + # this library, so we reject "@test" and "@*.test" addresses, unless + # the test_environment keyword argument is given, to allow their use + # in application-level test environments. These domains will generally + # fail deliverability checks because "test" is not an actual TLD. + "test", +] diff --git a/venv/lib/python3.12/site-packages/email_validator/__main__.py b/venv/lib/python3.12/site-packages/email_validator/__main__.py new file mode 100644 index 0000000..84d9fd4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/__main__.py @@ -0,0 +1,61 @@ +# A command-line tool for testing. +# +# Usage: +# +# python -m email_validator test@example.org +# python -m email_validator < LIST_OF_ADDRESSES.TXT +# +# Provide email addresses to validate either as a command-line argument +# or in STDIN separated by newlines. Validation errors will be printed for +# invalid email addresses. When passing an email address on the command +# line, if the email address is valid, information about it will be printed. +# When using STDIN, no output will be given for valid email addresses. +# +# Keyword arguments to validate_email can be set in environment variables +# of the same name but uppercase (see below). + +import json +import os +import sys +from typing import Any, Dict, Optional + +from .validate_email import validate_email, _Resolver +from .deliverability import caching_resolver +from .exceptions import EmailNotValidError + + +def main(dns_resolver: Optional[_Resolver] = None) -> None: + # The dns_resolver argument is for tests. + + # Set options from environment variables. + options: Dict[str, Any] = {} + for varname in ('ALLOW_SMTPUTF8', 'ALLOW_EMPTY_LOCAL', 'ALLOW_QUOTED_LOCAL', 'ALLOW_DOMAIN_LITERAL', + 'ALLOW_DISPLAY_NAME', + 'GLOBALLY_DELIVERABLE', 'CHECK_DELIVERABILITY', 'TEST_ENVIRONMENT'): + if varname in os.environ: + options[varname.lower()] = bool(os.environ[varname]) + for varname in ('DEFAULT_TIMEOUT',): + if varname in os.environ: + options[varname.lower()] = float(os.environ[varname]) + + if len(sys.argv) == 1: + # Validate the email addresses passed line-by-line on STDIN. + dns_resolver = dns_resolver or caching_resolver() + for line in sys.stdin: + email = line.strip() + try: + validate_email(email, dns_resolver=dns_resolver, **options) + except EmailNotValidError as e: + print(f"{email} {e}") + else: + # Validate the email address passed on the command line. + email = sys.argv[1] + try: + result = validate_email(email, dns_resolver=dns_resolver, **options) + print(json.dumps(result.as_dict(), indent=2, sort_keys=True, ensure_ascii=False)) + except EmailNotValidError as e: + print(e) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..43c6b22 Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..9d45e74 Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/deliverability.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/deliverability.cpython-312.pyc new file mode 100644 index 0000000..b9b6344 Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/deliverability.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..2d8477e Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/rfc_constants.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/rfc_constants.cpython-312.pyc new file mode 100644 index 0000000..6201193 Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/rfc_constants.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/syntax.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/syntax.cpython-312.pyc new file mode 100644 index 0000000..241f737 Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/syntax.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/types.cpython-312.pyc new file mode 100644 index 0000000..1a7be0e Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/validate_email.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/validate_email.cpython-312.pyc new file mode 100644 index 0000000..6806957 Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/validate_email.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..ed99e24 Binary files /dev/null and b/venv/lib/python3.12/site-packages/email_validator/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/email_validator/deliverability.py b/venv/lib/python3.12/site-packages/email_validator/deliverability.py new file mode 100644 index 0000000..6100a31 --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/deliverability.py @@ -0,0 +1,159 @@ +from typing import Any, List, Optional, Tuple, TypedDict + +import ipaddress + +from .exceptions import EmailUndeliverableError + +import dns.resolver +import dns.exception + + +def caching_resolver(*, timeout: Optional[int] = None, cache: Any = None, dns_resolver: Optional[dns.resolver.Resolver] = None) -> dns.resolver.Resolver: + if timeout is None: + from . import DEFAULT_TIMEOUT + timeout = DEFAULT_TIMEOUT + resolver = dns_resolver or dns.resolver.Resolver() + resolver.cache = cache or dns.resolver.LRUCache() + resolver.lifetime = timeout # timeout, in seconds + return resolver + + +DeliverabilityInfo = TypedDict("DeliverabilityInfo", { + "mx": List[Tuple[int, str]], + "mx_fallback_type": Optional[str], + "unknown-deliverability": str, +}, total=False) + + +def validate_email_deliverability(domain: str, domain_i18n: str, timeout: Optional[int] = None, dns_resolver: Optional[dns.resolver.Resolver] = None) -> DeliverabilityInfo: + # Check that the domain resolves to an MX record. If there is no MX record, + # try an A or AAAA record which is a deprecated fallback for deliverability. + # Raises an EmailUndeliverableError on failure. On success, returns a dict + # with deliverability information. + + # If no dns.resolver.Resolver was given, get dnspython's default resolver. + # Override the default resolver's timeout. This may affect other uses of + # dnspython in this process. + if dns_resolver is None: + from . import DEFAULT_TIMEOUT + if timeout is None: + timeout = DEFAULT_TIMEOUT + dns_resolver = dns.resolver.get_default_resolver() + dns_resolver.lifetime = timeout + elif timeout is not None: + raise ValueError("It's not valid to pass both timeout and dns_resolver.") + + deliverability_info: DeliverabilityInfo = {} + + try: + try: + # Try resolving for MX records (RFC 5321 Section 5). + response = dns_resolver.resolve(domain, "MX") + + # For reporting, put them in priority order and remove the trailing dot in the qnames. + mtas = sorted([(r.preference, str(r.exchange).rstrip('.')) for r in response]) + + # RFC 7505: Null MX (0, ".") records signify the domain does not accept email. + # Remove null MX records from the mtas list (but we've stripped trailing dots, + # so the 'exchange' is just "") so we can check if there are no non-null MX + # records remaining. + mtas = [(preference, exchange) for preference, exchange in mtas + if exchange != ""] + if len(mtas) == 0: # null MX only, if there were no MX records originally a NoAnswer exception would have occurred + raise EmailUndeliverableError(f"The domain name {domain_i18n} does not accept email.") + + deliverability_info["mx"] = mtas + deliverability_info["mx_fallback_type"] = None + + except dns.resolver.NoAnswer: + # If there was no MX record, fall back to an A or AAA record + # (RFC 5321 Section 5). Check A first since it's more common. + + # If the A/AAAA response has no Globally Reachable IP address, + # treat the response as if it were NoAnswer, i.e., the following + # address types are not allowed fallbacks: Private-Use, Loopback, + # Link-Local, and some other obscure ranges. See + # https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml + # https://www.iana.org/assignments/iana-ipv6-special-registry/iana-ipv6-special-registry.xhtml + # (Issue #134.) + def is_global_addr(address: Any) -> bool: + try: + ipaddr = ipaddress.ip_address(address) + except ValueError: + return False + return ipaddr.is_global + + try: + response = dns_resolver.resolve(domain, "A") + + if not any(is_global_addr(r.address) for r in response): + raise dns.resolver.NoAnswer # fall back to AAAA + + deliverability_info["mx"] = [(0, domain)] + deliverability_info["mx_fallback_type"] = "A" + + except dns.resolver.NoAnswer: + + # If there was no A record, fall back to an AAAA record. + # (It's unclear if SMTP servers actually do this.) + try: + response = dns_resolver.resolve(domain, "AAAA") + + if not any(is_global_addr(r.address) for r in response): + raise dns.resolver.NoAnswer + + deliverability_info["mx"] = [(0, domain)] + deliverability_info["mx_fallback_type"] = "AAAA" + + except dns.resolver.NoAnswer as e: + # If there was no MX, A, or AAAA record, then mail to + # this domain is not deliverable, although the domain + # name has other records (otherwise NXDOMAIN would + # have been raised). + raise EmailUndeliverableError(f"The domain name {domain_i18n} does not accept email.") from e + + # Check for a SPF (RFC 7208) reject-all record ("v=spf1 -all") which indicates + # no emails are sent from this domain (similar to a Null MX record + # but for sending rather than receiving). In combination with the + # absence of an MX record, this is probably a good sign that the + # domain is not used for email. + try: + response = dns_resolver.resolve(domain, "TXT") + for rec in response: + value = b"".join(rec.strings) + if value.startswith(b"v=spf1 "): + if value == b"v=spf1 -all": + raise EmailUndeliverableError(f"The domain name {domain_i18n} does not send email.") + except dns.resolver.NoAnswer: + # No TXT records means there is no SPF policy, so we cannot take any action. + pass + + except dns.resolver.NXDOMAIN as e: + # The domain name does not exist --- there are no records of any sort + # for the domain name. + raise EmailUndeliverableError(f"The domain name {domain_i18n} does not exist.") from e + + except dns.resolver.NoNameservers: + # All nameservers failed to answer the query. This might be a problem + # with local nameservers, maybe? We'll allow the domain to go through. + return { + "unknown-deliverability": "no_nameservers", + } + + except dns.exception.Timeout: + # A timeout could occur for various reasons, so don't treat it as a failure. + return { + "unknown-deliverability": "timeout", + } + + except EmailUndeliverableError: + # Don't let these get clobbered by the wider except block below. + raise + + except Exception as e: + # Unhandled conditions should not propagate. + raise EmailUndeliverableError( + "There was an error while checking if the domain name in the email address is deliverable: " + str(e) + ) from e + + return deliverability_info diff --git a/venv/lib/python3.12/site-packages/email_validator/exceptions.py b/venv/lib/python3.12/site-packages/email_validator/exceptions.py new file mode 100644 index 0000000..87ef13c --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/exceptions.py @@ -0,0 +1,13 @@ +class EmailNotValidError(ValueError): + """Parent class of all exceptions raised by this module.""" + pass + + +class EmailSyntaxError(EmailNotValidError): + """Exception raised when an email address fails validation because of its form.""" + pass + + +class EmailUndeliverableError(EmailNotValidError): + """Exception raised when an email address fails validation because its domain name does not appear deliverable.""" + pass diff --git a/venv/lib/python3.12/site-packages/email_validator/py.typed b/venv/lib/python3.12/site-packages/email_validator/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/email_validator/rfc_constants.py b/venv/lib/python3.12/site-packages/email_validator/rfc_constants.py new file mode 100644 index 0000000..e93441b --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/rfc_constants.py @@ -0,0 +1,63 @@ +# These constants are defined by the email specifications. + +import re + +# Based on RFC 5322 3.2.3, these characters are permitted in email +# addresses (not taking into account internationalization) separated by dots: +ATEXT = r'a-zA-Z0-9_!#\$%&\'\*\+\-/=\?\^`\{\|\}~' +ATEXT_RE = re.compile('[.' + ATEXT + ']') # ATEXT plus dots +DOT_ATOM_TEXT = re.compile('[' + ATEXT + ']+(?:\\.[' + ATEXT + r']+)*\Z') + +# RFC 6531 3.3 extends the allowed characters in internationalized +# addresses to also include three specific ranges of UTF8 defined in +# RFC 3629 section 4, which appear to be the Unicode code points from +# U+0080 to U+10FFFF. +ATEXT_INTL = ATEXT + "\u0080-\U0010FFFF" +ATEXT_INTL_DOT_RE = re.compile('[.' + ATEXT_INTL + ']') # ATEXT_INTL plus dots +DOT_ATOM_TEXT_INTL = re.compile('[' + ATEXT_INTL + ']+(?:\\.[' + ATEXT_INTL + r']+)*\Z') + +# The domain part of the email address, after IDNA (ASCII) encoding, +# must also satisfy the requirements of RFC 952/RFC 1123 2.1 which +# restrict the allowed characters of hostnames further. +ATEXT_HOSTNAME_INTL = re.compile(r"[a-zA-Z0-9\-\." + "\u0080-\U0010FFFF" + "]") +HOSTNAME_LABEL = r'(?:(?:[a-zA-Z0-9][a-zA-Z0-9\-]*)?[a-zA-Z0-9])' +DOT_ATOM_TEXT_HOSTNAME = re.compile(HOSTNAME_LABEL + r'(?:\.' + HOSTNAME_LABEL + r')*\Z') +DOMAIN_NAME_REGEX = re.compile(r"[A-Za-z]\Z") # all TLDs currently end with a letter + +# Domain literal (RFC 5322 3.4.1) +DOMAIN_LITERAL_CHARS = re.compile(r"[\u0021-\u00FA\u005E-\u007E]") + +# Quoted-string local part (RFC 5321 4.1.2, internationalized by RFC 6531 3.3) +# The permitted characters in a quoted string are the characters in the range +# 32-126, except that quotes and (literal) backslashes can only appear when escaped +# by a backslash. When internationalized, UTF-8 strings are also permitted except +# the ASCII characters that are not previously permitted (see above). +# QUOTED_LOCAL_PART_ADDR = re.compile(r"^\"((?:[\u0020-\u0021\u0023-\u005B\u005D-\u007E]|\\[\u0020-\u007E])*)\"@(.*)") +QTEXT_INTL = re.compile(r"[\u0020-\u007E\u0080-\U0010FFFF]") + +# Length constants + +# RFC 3696 + errata 1003 + errata 1690 (https://www.rfc-editor.org/errata_search.php?rfc=3696&eid=1690) +# explains the maximum length of an email address is 254 octets based on RFC 5321 4.5.3.1.3. A +# maximum local part length is also given at RFC 5321 4.5.3.1.1. +# +# But RFC 5321 4.5.3.1 says that these (and other) limits are in a sense suggestions, and longer +# local parts have been seen in the wild. Consequntely, the local part length is only checked +# in "strict" mode. Although the email address maximum length is also somewhat of a suggestion, +# I don't like the idea of having no length checks performed, so I'm leaving that to always be +# checked. +EMAIL_MAX_LENGTH = 254 +LOCAL_PART_MAX_LENGTH = 64 + +# Although RFC 5321 4.5.3.1.2 gives a (suggested, see above) limit of 255 octets, RFC 1035 2.3.4 also +# imposes a length limit (255 octets). But per https://stackoverflow.com/questions/32290167/what-is-the-maximum-length-of-a-dns-name, +# two of those octets are taken up by the optional final dot and null root label. +DNS_LABEL_LENGTH_LIMIT = 63 # in "octets", RFC 1035 2.3.1 +DOMAIN_MAX_LENGTH = 253 # in "octets" as transmitted + +# RFC 2142 +CASE_INSENSITIVE_MAILBOX_NAMES = [ + 'info', 'marketing', 'sales', 'support', # section 3 + 'abuse', 'noc', 'security', # section 4 + 'postmaster', 'hostmaster', 'usenet', 'news', 'webmaster', 'www', 'uucp', 'ftp', # section 5 +] diff --git a/venv/lib/python3.12/site-packages/email_validator/syntax.py b/venv/lib/python3.12/site-packages/email_validator/syntax.py new file mode 100644 index 0000000..0b1c7b0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/syntax.py @@ -0,0 +1,822 @@ +from .exceptions import EmailSyntaxError +from .types import ValidatedEmail +from .rfc_constants import EMAIL_MAX_LENGTH, LOCAL_PART_MAX_LENGTH, DOMAIN_MAX_LENGTH, \ + DOT_ATOM_TEXT, DOT_ATOM_TEXT_INTL, ATEXT_RE, ATEXT_INTL_DOT_RE, ATEXT_HOSTNAME_INTL, QTEXT_INTL, \ + DNS_LABEL_LENGTH_LIMIT, DOT_ATOM_TEXT_HOSTNAME, DOMAIN_NAME_REGEX, DOMAIN_LITERAL_CHARS + +import re +import unicodedata +import idna # implements IDNA 2008; Python's codec is only IDNA 2003 +import ipaddress +from typing import Optional, Tuple, TypedDict, Union + + +def split_email(email: str) -> Tuple[Optional[str], str, str, bool]: + # Return the display name, unescaped local part, and domain part + # of the address, and whether the local part was quoted. If no + # display name was present and angle brackets do not surround + # the address, display name will be None; otherwise, it will be + # set to the display name or the empty string if there were + # angle brackets but no display name. + + # Typical email addresses have a single @-sign and no quote + # characters, but the awkward "quoted string" local part form + # (RFC 5321 4.1.2) allows @-signs and escaped quotes to appear + # in the local part if the local part is quoted. + + # A `display name ` format is also present in MIME messages + # (RFC 5322 3.4) and this format is also often recognized in + # mail UIs. It's not allowed in SMTP commands or in typical web + # login forms, but parsing it has been requested, so it's done + # here as a convenience. It's implemented in the spirit but not + # the letter of RFC 5322 3.4 because MIME messages allow newlines + # and comments as a part of the CFWS rule, but this is typically + # not allowed in mail UIs (although comment syntax was requested + # once too). + # + # Display names are either basic characters (the same basic characters + # permitted in email addresses, but periods are not allowed and spaces + # are allowed; see RFC 5322 Appendix A.1.2), or or a quoted string with + # the same rules as a quoted local part. (Multiple quoted strings might + # be allowed? Unclear.) Optional space (RFC 5322 3.4 CFWS) and then the + # email address follows in angle brackets. + # + # An initial quote is ambiguous between starting a display name or + # a quoted local part --- fun. + # + # We assume the input string is already stripped of leading and + # trailing CFWS. + + def split_string_at_unquoted_special(text: str, specials: Tuple[str, ...]) -> Tuple[str, str]: + # Split the string at the first character in specials (an @-sign + # or left angle bracket) that does not occur within quotes and + # is not followed by a Unicode combining character. + # If no special character is found, raise an error. + inside_quote = False + escaped = False + left_part = "" + for i, c in enumerate(text): + # < plus U+0338 (Combining Long Solidus Overlay) normalizes to + # ≮ U+226E (Not Less-Than), and it would be confusing to treat + # the < as the start of "" syntax in that case. Likewise, + # if anything combines with an @ or ", we should probably not + # treat it as a special character. + if unicodedata.normalize("NFC", text[i:])[0] != c: + left_part += c + + elif inside_quote: + left_part += c + if c == '\\' and not escaped: + escaped = True + elif c == '"' and not escaped: + # The only way to exit the quote is an unescaped quote. + inside_quote = False + escaped = False + else: + escaped = False + elif c == '"': + left_part += c + inside_quote = True + elif c in specials: + # When unquoted, stop before a special character. + break + else: + left_part += c + + # No special symbol found. The special symbols always + # include an at-sign, so this always indicates a missing + # at-sign. The other symbol is optional. + if len(left_part) == len(text): + # The full-width at-sign might occur in CJK contexts. + # We can't accept it because we only accept addresess + # that are actually valid. But if this is common we + # may want to consider accepting and normalizing full- + # width characters for the other special symbols (and + # full-width dot is already accepted in internationalized + # domains) with a new option. + # See https://news.ycombinator.com/item?id=42235268. + if "@" in text: + raise EmailSyntaxError("The email address has the \"full-width\" at-sign (@) character instead of a regular at-sign.") + + # Check another near-homoglyph for good measure because + # homoglyphs in place of required characters could be + # very confusing. We may want to consider checking for + # homoglyphs anywhere we look for a special symbol. + if "﹫" in text: + raise EmailSyntaxError('The email address has the "small commercial at" character instead of a regular at-sign.') + + raise EmailSyntaxError("An email address must have an @-sign.") + + # The right part is whatever is left. + right_part = text[len(left_part):] + + return left_part, right_part + + def unquote_quoted_string(text: str) -> Tuple[str, bool]: + # Remove surrounding quotes and unescape escaped backslashes + # and quotes. Escapes are parsed liberally. I think only + # backslashes and quotes can be escaped but we'll allow anything + # to be. + quoted = False + escaped = False + value = "" + for i, c in enumerate(text): + if quoted: + if escaped: + value += c + escaped = False + elif c == '\\': + escaped = True + elif c == '"': + if i != len(text) - 1: + raise EmailSyntaxError("Extra character(s) found after close quote: " + + ", ".join(safe_character_display(c) for c in text[i + 1:])) + break + else: + value += c + elif i == 0 and c == '"': + quoted = True + else: + value += c + + return value, quoted + + # Split the string at the first unquoted @-sign or left angle bracket. + left_part, right_part = split_string_at_unquoted_special(email, ("@", "<")) + + # If the right part starts with an angle bracket, + # then the left part is a display name and the rest + # of the right part up to the final right angle bracket + # is the email address, . + if right_part.startswith("<"): + # Remove space between the display name and angle bracket. + left_part = left_part.rstrip() + + # Unquote and unescape the display name. + display_name, display_name_quoted = unquote_quoted_string(left_part) + + # Check that only basic characters are present in a + # non-quoted display name. + if not display_name_quoted: + bad_chars = { + safe_character_display(c) + for c in display_name + if (not ATEXT_RE.match(c) and c != ' ') or c == '.' + } + if bad_chars: + raise EmailSyntaxError("The display name contains invalid characters when not quoted: " + ", ".join(sorted(bad_chars)) + ".") + + # Check for other unsafe characters. + check_unsafe_chars(display_name, allow_space=True) + + # Check that the right part ends with an angle bracket + # but allow spaces after it, I guess. + if ">" not in right_part: + raise EmailSyntaxError("An open angle bracket at the start of the email address has to be followed by a close angle bracket at the end.") + right_part = right_part.rstrip(" ") + if right_part[-1] != ">": + raise EmailSyntaxError("There can't be anything after the email address.") + + # Remove the initial and trailing angle brackets. + addr_spec = right_part[1:].rstrip(">") + + # Split the email address at the first unquoted @-sign. + local_part, domain_part = split_string_at_unquoted_special(addr_spec, ("@",)) + + # Otherwise there is no display name. The left part is the local + # part and the right part is the domain. + else: + display_name = None + local_part, domain_part = left_part, right_part + + if domain_part.startswith("@"): + domain_part = domain_part[1:] + + # Unquote the local part if it is quoted. + local_part, is_quoted_local_part = unquote_quoted_string(local_part) + + return display_name, local_part, domain_part, is_quoted_local_part + + +def get_length_reason(addr: str, limit: int) -> str: + """Helper function to return an error message related to invalid length.""" + diff = len(addr) - limit + suffix = "s" if diff > 1 else "" + return f"({diff} character{suffix} too many)" + + +def safe_character_display(c: str) -> str: + # Return safely displayable characters in quotes. + if c == '\\': + return f"\"{c}\"" # can't use repr because it escapes it + if unicodedata.category(c)[0] in ("L", "N", "P", "S"): + return repr(c) + + # Construct a hex string in case the unicode name doesn't exist. + if ord(c) < 0xFFFF: + h = f"U+{ord(c):04x}".upper() + else: + h = f"U+{ord(c):08x}".upper() + + # Return the character name or, if it has no name, the hex string. + return unicodedata.name(c, h) + + +class LocalPartValidationResult(TypedDict): + local_part: str + ascii_local_part: Optional[str] + smtputf8: bool + + +def validate_email_local_part(local: str, allow_smtputf8: bool = True, allow_empty_local: bool = False, + quoted_local_part: bool = False, strict: bool = False) -> LocalPartValidationResult: + """Validates the syntax of the local part of an email address.""" + + if len(local) == 0: + if not allow_empty_local: + raise EmailSyntaxError("There must be something before the @-sign.") + + # The caller allows an empty local part. Useful for validating certain + # Postfix aliases. + return { + "local_part": local, + "ascii_local_part": local, + "smtputf8": False, + } + + # Check the length of the local part by counting characters. + # (RFC 5321 4.5.3.1.1) + # We're checking the number of characters here. If the local part + # is ASCII-only, then that's the same as bytes (octets). If it's + # internationalized, then the UTF-8 encoding may be longer, but + # that may not be relevant. We will check the total address length + # instead. + if strict and len(local) > LOCAL_PART_MAX_LENGTH: + reason = get_length_reason(local, limit=LOCAL_PART_MAX_LENGTH) + raise EmailSyntaxError(f"The email address is too long before the @-sign {reason}.") + + # Check the local part against the non-internationalized regular expression. + # Most email addresses match this regex so it's probably fastest to check this first. + # (RFC 5322 3.2.3) + # All local parts matching the dot-atom rule are also valid as a quoted string + # so if it was originally quoted (quoted_local_part is True) and this regex matches, + # it's ok. + # (RFC 5321 4.1.2 / RFC 5322 3.2.4). + if DOT_ATOM_TEXT.match(local): + # It's valid. And since it's just the permitted ASCII characters, + # it's normalized and safe. If the local part was originally quoted, + # the quoting was unnecessary and it'll be returned as normalized to + # non-quoted form. + + # Return the local part and flag that SMTPUTF8 is not needed. + return { + "local_part": local, + "ascii_local_part": local, + "smtputf8": False, + } + + # The local part failed the basic dot-atom check. Try the extended character set + # for internationalized addresses. It's the same pattern but with additional + # characters permitted. + # RFC 6531 section 3.3. + valid: Optional[str] = None + requires_smtputf8 = False + if DOT_ATOM_TEXT_INTL.match(local): + # But international characters in the local part may not be permitted. + if not allow_smtputf8: + # Check for invalid characters against the non-internationalized + # permitted character set. + # (RFC 5322 3.2.3) + bad_chars = { + safe_character_display(c) + for c in local + if not ATEXT_RE.match(c) + } + if bad_chars: + raise EmailSyntaxError("Internationalized characters before the @-sign are not supported: " + ", ".join(sorted(bad_chars)) + ".") + + # Although the check above should always find something, fall back to this just in case. + raise EmailSyntaxError("Internationalized characters before the @-sign are not supported.") + + # It's valid. + valid = "dot-atom" + requires_smtputf8 = True + + # There are no dot-atom syntax restrictions on quoted local parts, so + # if it was originally quoted, it is probably valid. More characters + # are allowed, like @-signs, spaces, and quotes, and there are no + # restrictions on the placement of dots, as in dot-atom local parts. + elif quoted_local_part: + # Check for invalid characters in a quoted string local part. + # (RFC 5321 4.1.2. RFC 5322 lists additional permitted *obsolete* + # characters which are *not* allowed here. RFC 6531 section 3.3 + # extends the range to UTF8 strings.) + bad_chars = { + safe_character_display(c) + for c in local + if not QTEXT_INTL.match(c) + } + if bad_chars: + raise EmailSyntaxError("The email address contains invalid characters in quotes before the @-sign: " + ", ".join(sorted(bad_chars)) + ".") + + # See if any characters are outside of the ASCII range. + bad_chars = { + safe_character_display(c) + for c in local + if not (32 <= ord(c) <= 126) + } + if bad_chars: + requires_smtputf8 = True + + # International characters in the local part may not be permitted. + if not allow_smtputf8: + raise EmailSyntaxError("Internationalized characters before the @-sign are not supported: " + ", ".join(sorted(bad_chars)) + ".") + + # It's valid. + valid = "quoted" + + # If the local part matches the internationalized dot-atom form or was quoted, + # perform additional checks for Unicode strings. + if valid: + # Check that the local part is a valid, safe, and sensible Unicode string. + # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked + # by DOT_ATOM_TEXT_INTL and QTEXT_INTL. Other characters may be permitted by the + # email specs, but they may not be valid, safe, or sensible Unicode strings. + # See the function for rationale. + check_unsafe_chars(local, allow_space=(valid == "quoted")) + + # Try encoding to UTF-8. Failure is possible with some characters like + # surrogate code points, but those are checked above. Still, we don't + # want to have an unhandled exception later. + try: + local.encode("utf8") + except ValueError as e: + raise EmailSyntaxError("The email address contains an invalid character.") from e + + # If this address passes only by the quoted string form, re-quote it + # and backslash-escape quotes and backslashes (removing any unnecessary + # escapes). Per RFC 5321 4.1.2, "all quoted forms MUST be treated as equivalent, + # and the sending system SHOULD transmit the form that uses the minimum quoting possible." + if valid == "quoted": + local = '"' + re.sub(r'(["\\])', r'\\\1', local) + '"' + + return { + "local_part": local, + "ascii_local_part": local if not requires_smtputf8 else None, + "smtputf8": requires_smtputf8, + } + + # It's not a valid local part. Let's find out why. + # (Since quoted local parts are all valid or handled above, these checks + # don't apply in those cases.) + + # Check for invalid characters. + # (RFC 5322 3.2.3, plus RFC 6531 3.3) + bad_chars = { + safe_character_display(c) + for c in local + if not ATEXT_INTL_DOT_RE.match(c) + } + if bad_chars: + raise EmailSyntaxError("The email address contains invalid characters before the @-sign: " + ", ".join(sorted(bad_chars)) + ".") + + # Check for dot errors imposted by the dot-atom rule. + # (RFC 5322 3.2.3) + check_dot_atom(local, 'An email address cannot start with a {}.', 'An email address cannot have a {} immediately before the @-sign.', is_hostname=False) + + # All of the reasons should already have been checked, but just in case + # we have a fallback message. + raise EmailSyntaxError("The email address contains invalid characters before the @-sign.") + + +def check_unsafe_chars(s: str, allow_space: bool = False) -> None: + # Check for unsafe characters or characters that would make the string + # invalid or non-sensible Unicode. + bad_chars = set() + for i, c in enumerate(s): + category = unicodedata.category(c) + if category[0] in ("L", "N", "P", "S"): + # Letters, numbers, punctuation, and symbols are permitted. + pass + elif category[0] == "M": + # Combining character in first position would combine with something + # outside of the email address if concatenated, so they are not safe. + # We also check if this occurs after the @-sign, which would not be + # sensible because it would modify the @-sign. + if i == 0: + bad_chars.add(c) + elif category == "Zs": + # Spaces outside of the ASCII range are not specifically disallowed in + # internationalized addresses as far as I can tell, but they violate + # the spirit of the non-internationalized specification that email + # addresses do not contain ASCII spaces when not quoted. Excluding + # ASCII spaces when not quoted is handled directly by the atom regex. + # + # In quoted-string local parts, spaces are explicitly permitted, and + # the ASCII space has category Zs, so we must allow it here, and we'll + # allow all Unicode spaces to be consistent. + if not allow_space: + bad_chars.add(c) + elif category[0] == "Z": + # The two line and paragraph separator characters (in categories Zl and Zp) + # are not specifically disallowed in internationalized addresses + # as far as I can tell, but they violate the spirit of the non-internationalized + # specification that email addresses do not contain line breaks when not quoted. + bad_chars.add(c) + elif category[0] == "C": + # Control, format, surrogate, private use, and unassigned code points (C) + # are all unsafe in various ways. Control and format characters can affect + # text rendering if the email address is concatenated with other text. + # Bidirectional format characters are unsafe, even if used properly, because + # they cause an email address to render as a different email address. + # Private use characters do not make sense for publicly deliverable + # email addresses. + bad_chars.add(c) + else: + # All categories should be handled above, but in case there is something new + # to the Unicode specification in the future, reject all other categories. + bad_chars.add(c) + if bad_chars: + raise EmailSyntaxError("The email address contains unsafe characters: " + + ", ".join(safe_character_display(c) for c in sorted(bad_chars)) + ".") + + +def check_dot_atom(label: str, start_descr: str, end_descr: str, is_hostname: bool) -> None: + # RFC 5322 3.2.3 + if label.endswith("."): + raise EmailSyntaxError(end_descr.format("period")) + if label.startswith("."): + raise EmailSyntaxError(start_descr.format("period")) + if ".." in label: + raise EmailSyntaxError("An email address cannot have two periods in a row.") + + if is_hostname: + # RFC 952 + if label.endswith("-"): + raise EmailSyntaxError(end_descr.format("hyphen")) + if label.startswith("-"): + raise EmailSyntaxError(start_descr.format("hyphen")) + if ".-" in label or "-." in label: + raise EmailSyntaxError("An email address cannot have a period and a hyphen next to each other.") + + +def uts46_valid_char(char: str) -> bool: + # By exhaustively searching for characters rejected by + # for c in (chr(i) for i in range(0x110000)): + # idna.uts46_remap(c, std3_rules=False, transitional=False) + # I found the following rules are pretty close. + c = ord(char) + if 0x80 <= c <= 0x9f: + # 8-bit ASCII range. + return False + elif ((0x2010 <= c <= 0x2060 and not (0x2024 <= c <= 0x2026) and not (0x2028 <= c <= 0x202E)) + or c in (0x00AD, 0x2064, 0xFF0E) + or 0x200B <= c <= 0x200D + or 0x1BCA0 <= c <= 0x1BCA3): + # Characters that are permitted but fall into one of the + # tests below. + return True + elif unicodedata.category(chr(c)) in ("Cf", "Cn", "Co", "Cs", "Zs", "Zl", "Zp"): + # There are a bunch of Zs characters including regular space + # that are allowed by UTS46 but are not allowed in domain + # names anyway. + # + # There are some Cn (unassigned) characters that the idna + # package doesn't reject but we can, I think. + return False + elif "002E" in unicodedata.decomposition(chr(c)).split(" "): + # Characters that decompose into a sequence with a dot. + return False + return True + + +class DomainNameValidationResult(TypedDict): + ascii_domain: str + domain: str + + +def validate_email_domain_name(domain: str, test_environment: bool = False, globally_deliverable: bool = True) -> DomainNameValidationResult: + """Validates the syntax of the domain part of an email address.""" + + # Check for invalid characters. + # (RFC 952 plus RFC 6531 section 3.3 for internationalized addresses) + bad_chars = { + safe_character_display(c) + for c in domain + if not ATEXT_HOSTNAME_INTL.match(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") + + # Check for unsafe characters. + # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked + # by DOT_ATOM_TEXT_INTL. Other characters may be permitted by the email specs, but + # they may not be valid, safe, or sensible Unicode strings. + check_unsafe_chars(domain) + + # Reject characters that would be rejected by UTS-46 normalization next but + # with an error message under our control. + bad_chars = { + safe_character_display(c) for c in domain + if not uts46_valid_char(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") + + # Perform UTS-46 normalization, which includes casefolding, NFC normalization, + # and converting all label separators (the period/full stop, fullwidth full stop, + # ideographic full stop, and halfwidth ideographic full stop) to regular dots. + # It will also raise an exception if there is an invalid character in the input, + # such as "⒈" which is invalid because it would expand to include a dot and + # U+1FEF which normalizes to a backtick, which is not an allowed hostname character. + # Since several characters *are* normalized to a dot, this has to come before + # checks related to dots, like check_dot_atom which comes next. + original_domain = domain + try: + domain = idna.uts46_remap(domain, std3_rules=False, transitional=False) + except idna.IDNAError as e: + raise EmailSyntaxError(f"The part after the @-sign contains invalid characters ({e}).") from e + + # Check for invalid characters after Unicode normalization which are not caught + # by uts46_remap (see tests for examples). + bad_chars = { + safe_character_display(c) + for c in domain + if not ATEXT_HOSTNAME_INTL.match(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters after Unicode normalization: " + ", ".join(sorted(bad_chars)) + ".") + + # The domain part is made up dot-separated "labels." Each label must + # have at least one character and cannot start or end with dashes, which + # means there are some surprising restrictions on periods and dashes. + # Check that before we do IDNA encoding because the IDNA library gives + # unfriendly errors for these cases, but after UTS-46 normalization because + # it can insert periods and hyphens (from fullwidth characters). + # (RFC 952, RFC 1123 2.1, RFC 5322 3.2.3) + check_dot_atom(domain, 'An email address cannot have a {} immediately after the @-sign.', 'An email address cannot end with a {}.', is_hostname=True) + + # Check for RFC 5890's invalid R-LDH labels, which are labels that start + # with two characters other than "xn" and two dashes. + for label in domain.split("."): + if re.match(r"(?!xn)..--", label, re.I): + raise EmailSyntaxError("An email address cannot have two letters followed by two dashes immediately after the @-sign or after a period, except Punycode.") + + if DOT_ATOM_TEXT_HOSTNAME.match(domain): + # This is a valid non-internationalized domain. + ascii_domain = domain + else: + # If international characters are present in the domain name, convert + # the domain to IDNA ASCII. If internationalized characters are present, + # the MTA must either support SMTPUTF8 or the mail client must convert the + # domain name to IDNA before submission. + # + # For ASCII-only domains, the transformation does nothing and is safe to + # apply. However, to ensure we don't rely on the idna library for basic + # syntax checks, we don't use it if it's not needed. + # + # idna.encode also checks the domain name length after encoding but it + # doesn't give a nice error, so we call the underlying idna.alabel method + # directly. idna.alabel checks label length and doesn't give great messages, + # but we can't easily go to lower level methods. + try: + ascii_domain = ".".join( + idna.alabel(label).decode("ascii") + for label in domain.split(".") + ) + except idna.IDNAError as e: + # Some errors would have already been raised by idna.uts46_remap. + raise EmailSyntaxError(f"The part after the @-sign is invalid ({e}).") from e + + # Check the syntax of the string returned by idna.encode. + # It should never fail. + if not DOT_ATOM_TEXT_HOSTNAME.match(ascii_domain): + raise EmailSyntaxError("The email address contains invalid characters after the @-sign after IDNA encoding.") + + # Check the length of the domain name in bytes. + # (RFC 1035 2.3.4 and RFC 5321 4.5.3.1.2) + # We're checking the number of bytes ("octets") here, which can be much + # higher than the number of characters in internationalized domains, + # on the assumption that the domain may be transmitted without SMTPUTF8 + # as IDNA ASCII. (This is also checked by idna.encode, so this exception + # is never reached for internationalized domains.) + if len(ascii_domain) > DOMAIN_MAX_LENGTH: + if ascii_domain == original_domain: + reason = get_length_reason(ascii_domain, limit=DOMAIN_MAX_LENGTH) + raise EmailSyntaxError(f"The email address is too long after the @-sign {reason}.") + else: + diff = len(ascii_domain) - DOMAIN_MAX_LENGTH + s = "" if diff == 1 else "s" + raise EmailSyntaxError(f"The email address is too long after the @-sign ({diff} byte{s} too many after IDNA encoding).") + + # Also check the label length limit. + # (RFC 1035 2.3.1) + for label in ascii_domain.split("."): + if len(label) > DNS_LABEL_LENGTH_LIMIT: + reason = get_length_reason(label, limit=DNS_LABEL_LENGTH_LIMIT) + raise EmailSyntaxError(f"After the @-sign, periods cannot be separated by so many characters {reason}.") + + if globally_deliverable: + # All publicly deliverable addresses have domain names with at least + # one period, at least for gTLDs created since 2013 (per the ICANN Board + # New gTLD Program Committee, https://www.icann.org/en/announcements/details/new-gtld-dotless-domain-names-prohibited-30-8-2013-en). + # We'll consider the lack of a period a syntax error + # since that will match people's sense of what an email address looks + # like. We'll skip this in test environments to allow '@test' email + # addresses. + if "." not in ascii_domain and not (ascii_domain == "test" and test_environment): + raise EmailSyntaxError("The part after the @-sign is not valid. It should have a period.") + + # We also know that all TLDs currently end with a letter. + if not DOMAIN_NAME_REGEX.search(ascii_domain): + raise EmailSyntaxError("The part after the @-sign is not valid. It is not within a valid top-level domain.") + + # Check special-use and reserved domain names. + # Some might fail DNS-based deliverability checks, but that + # can be turned off, so we should fail them all sooner. + # See the references in __init__.py. + from . import SPECIAL_USE_DOMAIN_NAMES + for d in SPECIAL_USE_DOMAIN_NAMES: + # See the note near the definition of SPECIAL_USE_DOMAIN_NAMES. + if d == "test" and test_environment: + continue + + if ascii_domain == d or ascii_domain.endswith("." + d): + raise EmailSyntaxError("The part after the @-sign is a special-use or reserved name that cannot be used with email.") + + # We may have been given an IDNA ASCII domain to begin with. Check + # that the domain actually conforms to IDNA. It could look like IDNA + # but not be actual IDNA. For ASCII-only domains, the conversion out + # of IDNA just gives the same thing back. + # + # This gives us the canonical internationalized form of the domain, + # which we return to the caller as a part of the normalized email + # address. + try: + domain_i18n = idna.decode(ascii_domain.encode('ascii')) + except idna.IDNAError as e: + raise EmailSyntaxError(f"The part after the @-sign is not valid IDNA ({e}).") from e + + # Check that this normalized domain name has not somehow become + # an invalid domain name. All of the checks before this point + # using the idna package probably guarantee that we now have + # a valid international domain name in most respects. But it + # doesn't hurt to re-apply some tests to be sure. See the similar + # tests above. + + # Check for invalid and unsafe characters. We have no test + # case for this. + bad_chars = { + safe_character_display(c) + for c in domain_i18n + if not ATEXT_HOSTNAME_INTL.match(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") + check_unsafe_chars(domain_i18n) + + # Check that it can be encoded back to IDNA ASCII. We have no test + # case for this. + try: + idna.encode(domain_i18n) + except idna.IDNAError as e: + raise EmailSyntaxError(f"The part after the @-sign became invalid after normalizing to international characters ({e}).") from e + + # Return the IDNA ASCII-encoded form of the domain, which is how it + # would be transmitted on the wire (except when used with SMTPUTF8 + # possibly), as well as the canonical Unicode form of the domain, + # which is better for display purposes. This should also take care + # of RFC 6532 section 3.1's suggestion to apply Unicode NFC + # normalization to addresses. + return { + "ascii_domain": ascii_domain, + "domain": domain_i18n, + } + + +def validate_email_length(addrinfo: ValidatedEmail) -> None: + # There are three forms of the email address whose length must be checked: + # + # 1) The original email address string. Since callers may continue to use + # this string, even though we recommend using the normalized form, we + # should not pass validation when the original input is not valid. This + # form is checked first because it is the original input. + # 2) The normalized email address. We perform Unicode NFC normalization of + # the local part, we normalize the domain to internationalized characters + # (if originally IDNA ASCII) which also includes Unicode normalization, + # and we may remove quotes in quoted local parts. We recommend that + # callers use this string, so it must be valid. + # 3) The email address with the IDNA ASCII representation of the domain + # name, since this string may be used with email stacks that don't + # support UTF-8. Since this is the least likely to be used by callers, + # it is checked last. Note that ascii_email will only be set if the + # local part is ASCII, but conceivably the caller may combine a + # internationalized local part with an ASCII domain, so we check this + # on that combination also. Since we only return the normalized local + # part, we use that (and not the unnormalized local part). + # + # In all cases, the length is checked in UTF-8 because the SMTPUTF8 + # extension to SMTP validates the length in bytes. + + addresses_to_check = [ + (addrinfo.original, None), + (addrinfo.normalized, "after normalization"), + ((addrinfo.ascii_local_part or addrinfo.local_part or "") + "@" + addrinfo.ascii_domain, "when the part after the @-sign is converted to IDNA ASCII"), + ] + + for addr, reason in addresses_to_check: + addr_len = len(addr) + addr_utf8_len = len(addr.encode("utf8")) + diff = addr_utf8_len - EMAIL_MAX_LENGTH + if diff > 0: + if reason is None and addr_len == addr_utf8_len: + # If there is no normalization or transcoding, + # we can give a simple count of the number of + # characters over the limit. + reason = get_length_reason(addr, limit=EMAIL_MAX_LENGTH) + elif reason is None: + # If there is no normalization but there is + # some transcoding to UTF-8, we can compute + # the minimum number of characters over the + # limit by dividing the number of bytes over + # the limit by the maximum number of bytes + # per character. + mbpc = max(len(c.encode("utf8")) for c in addr) + mchars = max(1, diff // mbpc) + suffix = "s" if diff > 1 else "" + if mchars == diff: + reason = f"({diff} character{suffix} too many)" + else: + reason = f"({mchars}-{diff} character{suffix} too many)" + else: + # Since there is normalization, the number of + # characters in the input that need to change is + # impossible to know. + suffix = "s" if diff > 1 else "" + reason += f" ({diff} byte{suffix} too many)" + raise EmailSyntaxError(f"The email address is too long {reason}.") + + +class DomainLiteralValidationResult(TypedDict): + domain_address: Union[ipaddress.IPv4Address, ipaddress.IPv6Address] + domain: str + + +def validate_email_domain_literal(domain_literal: str) -> DomainLiteralValidationResult: + # This is obscure domain-literal syntax. Parse it and return + # a compressed/normalized address. + # RFC 5321 4.1.3 and RFC 5322 3.4.1. + + addr: Union[ipaddress.IPv4Address, ipaddress.IPv6Address] + + # Try to parse the domain literal as an IPv4 address. + # There is no tag for IPv4 addresses, so we can never + # be sure if the user intends an IPv4 address. + if re.match(r"^[0-9\.]+$", domain_literal): + try: + addr = ipaddress.IPv4Address(domain_literal) + except ValueError as e: + raise EmailSyntaxError(f"The address in brackets after the @-sign is not valid: It is not an IPv4 address ({e}) or is missing an address literal tag.") from e + + # Return the IPv4Address object and the domain back unchanged. + return { + "domain_address": addr, + "domain": f"[{addr}]", + } + + # If it begins with "IPv6:" it's an IPv6 address. + if domain_literal.startswith("IPv6:"): + try: + addr = ipaddress.IPv6Address(domain_literal[5:]) + except ValueError as e: + raise EmailSyntaxError(f"The IPv6 address in brackets after the @-sign is not valid ({e}).") from e + + # Return the IPv6Address object and construct a normalized + # domain literal. + return { + "domain_address": addr, + "domain": f"[IPv6:{addr.compressed}]", + } + + # Nothing else is valid. + + if ":" not in domain_literal: + raise EmailSyntaxError("The part after the @-sign in brackets is not an IPv4 address and has no address literal tag.") + + # The tag (the part before the colon) has character restrictions, + # but since it must come from a registry of tags (in which only "IPv6" is defined), + # there's no need to check the syntax of the tag. See RFC 5321 4.1.2. + + # Check for permitted ASCII characters. This actually doesn't matter + # since there will be an exception after anyway. + bad_chars = { + safe_character_display(c) + for c in domain_literal + if not DOMAIN_LITERAL_CHARS.match(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters in brackets: " + ", ".join(sorted(bad_chars)) + ".") + + # There are no other domain literal tags. + # https://www.iana.org/assignments/address-literal-tags/address-literal-tags.xhtml + raise EmailSyntaxError("The part after the @-sign contains an invalid address literal tag in brackets.") diff --git a/venv/lib/python3.12/site-packages/email_validator/types.py b/venv/lib/python3.12/site-packages/email_validator/types.py new file mode 100644 index 0000000..1df60ff --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/types.py @@ -0,0 +1,126 @@ +import warnings +from typing import Any, Dict, List, Optional, Tuple, Union + + +class ValidatedEmail: + """The validate_email function returns objects of this type holding the normalized form of the email address + and other information.""" + + """The email address that was passed to validate_email. (If passed as bytes, this will be a string.)""" + original: str + + """The normalized email address, which should always be used in preference to the original address. + The normalized address converts an IDNA ASCII domain name to Unicode, if possible, and performs + Unicode normalization on the local part and on the domain (if originally Unicode). It is the + concatenation of the local_part and domain attributes, separated by an @-sign.""" + normalized: str + + """The local part of the email address after Unicode normalization.""" + local_part: str + + """The domain part of the email address after Unicode normalization or conversion to + Unicode from IDNA ascii.""" + domain: str + + """If the domain part is a domain literal, the IPv4Address or IPv6Address object.""" + domain_address: object + + """If not None, a form of the email address that uses 7-bit ASCII characters only.""" + ascii_email: Optional[str] + + """If not None, the local part of the email address using 7-bit ASCII characters only.""" + ascii_local_part: Optional[str] + + """A form of the domain name that uses 7-bit ASCII characters only.""" + ascii_domain: str + + """If True, the SMTPUTF8 feature of your mail relay will be required to transmit messages + to this address. This flag is True just when ascii_local_part is missing. Otherwise it + is False.""" + smtputf8: bool + + """If a deliverability check is performed and if it succeeds, a list of (priority, domain) + tuples of MX records specified in the DNS for the domain.""" + mx: List[Tuple[int, str]] + + """If no MX records are actually specified in DNS and instead are inferred, through an obsolete + mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`).""" + mx_fallback_type: Optional[str] + + """The display name in the original input text, unquoted and unescaped, or None.""" + display_name: Optional[str] + + def __repr__(self) -> str: + return f"" + + """For backwards compatibility, support old field names.""" + def __getattr__(self, key: str) -> str: + if key == "original_email": + return self.original + if key == "email": + return self.normalized + raise AttributeError(key) + + @property + def email(self) -> str: + warnings.warn("ValidatedEmail.email is deprecated and will be removed, use ValidatedEmail.normalized instead", DeprecationWarning) + return self.normalized + + """For backwards compatibility, some fields are also exposed through a dict-like interface. Note + that some of the names changed when they became attributes.""" + def __getitem__(self, key: str) -> Union[Optional[str], bool, List[Tuple[int, str]]]: + warnings.warn("dict-like access to the return value of validate_email is deprecated and may not be supported in the future.", DeprecationWarning, stacklevel=2) + if key == "email": + return self.normalized + if key == "email_ascii": + return self.ascii_email + if key == "local": + return self.local_part + if key == "domain": + return self.ascii_domain + if key == "domain_i18n": + return self.domain + if key == "smtputf8": + return self.smtputf8 + if key == "mx": + return self.mx + if key == "mx-fallback": + return self.mx_fallback_type + raise KeyError() + + """Tests use this.""" + def __eq__(self, other: object) -> bool: + if not isinstance(other, ValidatedEmail): + return False + return ( + self.normalized == other.normalized + and self.local_part == other.local_part + and self.domain == other.domain + and getattr(self, 'ascii_email', None) == getattr(other, 'ascii_email', None) + and getattr(self, 'ascii_local_part', None) == getattr(other, 'ascii_local_part', None) + and getattr(self, 'ascii_domain', None) == getattr(other, 'ascii_domain', None) + and self.smtputf8 == other.smtputf8 + and repr(sorted(self.mx) if getattr(self, 'mx', None) else None) + == repr(sorted(other.mx) if getattr(other, 'mx', None) else None) + and getattr(self, 'mx_fallback_type', None) == getattr(other, 'mx_fallback_type', None) + and getattr(self, 'display_name', None) == getattr(other, 'display_name', None) + ) + + """This helps producing the README.""" + def as_constructor(self) -> str: + return "ValidatedEmail(" \ + + ",".join(f"\n {key}={repr(getattr(self, key))}" + for key in ('normalized', 'local_part', 'domain', + 'ascii_email', 'ascii_local_part', 'ascii_domain', + 'smtputf8', 'mx', 'mx_fallback_type', + 'display_name') + if hasattr(self, key) + ) \ + + ")" + + """Convenience method for accessing ValidatedEmail as a dict""" + def as_dict(self) -> Dict[str, Any]: + d = self.__dict__ + if d.get('domain_address'): + d['domain_address'] = repr(d['domain_address']) + return d diff --git a/venv/lib/python3.12/site-packages/email_validator/validate_email.py b/venv/lib/python3.12/site-packages/email_validator/validate_email.py new file mode 100644 index 0000000..ae5d963 --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/validate_email.py @@ -0,0 +1,209 @@ +from typing import Optional, Union, TYPE_CHECKING +import unicodedata + +from .exceptions import EmailSyntaxError +from .types import ValidatedEmail +from .syntax import split_email, validate_email_local_part, validate_email_domain_name, validate_email_domain_literal, validate_email_length +from .rfc_constants import CASE_INSENSITIVE_MAILBOX_NAMES + +if TYPE_CHECKING: + import dns.resolver + _Resolver = dns.resolver.Resolver +else: + _Resolver = object + + +def validate_email( + email: Union[str, bytes], + /, # prior arguments are positional-only + *, # subsequent arguments are keyword-only + allow_smtputf8: Optional[bool] = None, + allow_empty_local: Optional[bool] = None, + allow_quoted_local: Optional[bool] = None, + allow_domain_literal: Optional[bool] = None, + allow_display_name: Optional[bool] = None, + strict: Optional[bool] = None, + check_deliverability: Optional[bool] = None, + test_environment: Optional[bool] = None, + globally_deliverable: Optional[bool] = None, + timeout: Optional[int] = None, + dns_resolver: Optional[_Resolver] = None +) -> ValidatedEmail: + """ + Given an email address, and some options, returns a ValidatedEmail instance + with information about the address if it is valid or, if the address is not + valid, raises an EmailNotValidError. This is the main function of the module. + """ + + # Fill in default values of arguments. + from . import ALLOW_SMTPUTF8, ALLOW_EMPTY_LOCAL, ALLOW_QUOTED_LOCAL, ALLOW_DOMAIN_LITERAL, ALLOW_DISPLAY_NAME, \ + STRICT, GLOBALLY_DELIVERABLE, CHECK_DELIVERABILITY, TEST_ENVIRONMENT, DEFAULT_TIMEOUT + if allow_smtputf8 is None: + allow_smtputf8 = ALLOW_SMTPUTF8 + if allow_empty_local is None: + allow_empty_local = ALLOW_EMPTY_LOCAL + if allow_quoted_local is None: + allow_quoted_local = ALLOW_QUOTED_LOCAL + if allow_domain_literal is None: + allow_domain_literal = ALLOW_DOMAIN_LITERAL + if allow_display_name is None: + allow_display_name = ALLOW_DISPLAY_NAME + if strict is None: + strict = STRICT + if check_deliverability is None: + check_deliverability = CHECK_DELIVERABILITY + if test_environment is None: + test_environment = TEST_ENVIRONMENT + if globally_deliverable is None: + globally_deliverable = GLOBALLY_DELIVERABLE + if timeout is None and dns_resolver is None: + timeout = DEFAULT_TIMEOUT + + if isinstance(email, str): + pass + elif isinstance(email, bytes): + # Allow email to be a bytes instance as if it is what + # will be transmitted on the wire. But assume SMTPUTF8 + # is unavailable, so it must be ASCII. + try: + email = email.decode("ascii") + except ValueError as e: + raise EmailSyntaxError("The email address is not valid ASCII.") from e + else: + raise TypeError("email must be str or bytes") + + # Split the address into the display name (or None), the local part + # (before the @-sign), and the domain part (after the @-sign). + # Normally, there is only one @-sign. But the awkward "quoted string" + # local part form (RFC 5321 4.1.2) allows @-signs in the local + # part if the local part is quoted. + display_name, local_part, domain_part, is_quoted_local_part \ + = split_email(email) + + if display_name: + # UTS #39 3.3 Email Security Profiles for Identifiers requires + # display names (incorrectly called "quoted-string-part" there) + # to be NFC normalized. Since these are not a part of what we + # are really validating, we won't check that the input was NFC + # normalized, but we'll normalize in output. + display_name = unicodedata.normalize("NFC", display_name) + + # Collect return values in this instance. + ret = ValidatedEmail() + ret.original = ((local_part if not is_quoted_local_part + else ('"' + local_part + '"')) + + "@" + domain_part) # drop the display name, if any, for email length tests at the end + ret.display_name = display_name + + # Validate the email address's local part syntax and get a normalized form. + # If the original address was quoted and the decoded local part is a valid + # unquoted local part, then we'll get back a normalized (unescaped) local + # part. + local_part_info = validate_email_local_part(local_part, + allow_smtputf8=allow_smtputf8, + allow_empty_local=allow_empty_local, + quoted_local_part=is_quoted_local_part, + strict=strict) + ret.local_part = local_part_info["local_part"] + ret.ascii_local_part = local_part_info["ascii_local_part"] + ret.smtputf8 = local_part_info["smtputf8"] + + # RFC 6532 section 3.1 says that Unicode NFC normalization should be applied, + # so we'll return the NFC-normalized local part. Since the caller may use that + # string in place of the original string, ensure it is also valid. + # + # UTS #39 3.3 Email Security Profiles for Identifiers requires local parts + # to be NFKC normalized, which loses some information in characters that can + # be decomposed. We might want to consider applying NFKC normalization, but + # we can't make the change easily because it would break database lookups + # for any caller that put a normalized address from a previous version of + # this library. (UTS #39 seems to require that the *input* be NKFC normalized + # and has other requirements that are hard to check without additional Unicode + # data, and I don't know whether the rules really apply in the wild.) + normalized_local_part = unicodedata.normalize("NFC", ret.local_part) + if normalized_local_part != ret.local_part: + try: + validate_email_local_part(normalized_local_part, + allow_smtputf8=allow_smtputf8, + allow_empty_local=allow_empty_local, + quoted_local_part=is_quoted_local_part, + strict=strict) + except EmailSyntaxError as e: + raise EmailSyntaxError("After Unicode normalization: " + str(e)) from e + ret.local_part = normalized_local_part + + # If a quoted local part isn't allowed but is present, now raise an exception. + # This is done after any exceptions raised by validate_email_local_part so + # that mandatory checks have highest precedence. + if is_quoted_local_part and not allow_quoted_local: + raise EmailSyntaxError("Quoting the part before the @-sign is not allowed here.") + + # Some local parts are required to be case-insensitive, so we should normalize + # to lowercase. + # RFC 2142 + if ret.ascii_local_part is not None \ + and ret.ascii_local_part.lower() in CASE_INSENSITIVE_MAILBOX_NAMES \ + and ret.local_part is not None: + ret.ascii_local_part = ret.ascii_local_part.lower() + ret.local_part = ret.local_part.lower() + + # Validate the email address's domain part syntax and get a normalized form. + is_domain_literal = False + if len(domain_part) == 0: + raise EmailSyntaxError("There must be something after the @-sign.") + + elif domain_part.startswith("[") and domain_part.endswith("]"): + # Parse the address in the domain literal and get back a normalized domain. + domain_literal_info = validate_email_domain_literal(domain_part[1:-1]) + if not allow_domain_literal: + raise EmailSyntaxError("A bracketed IP address after the @-sign is not allowed here.") + ret.domain = domain_literal_info["domain"] + ret.ascii_domain = domain_literal_info["domain"] # Domain literals are always ASCII. + ret.domain_address = domain_literal_info["domain_address"] + is_domain_literal = True # Prevent deliverability checks. + + else: + # Check the syntax of the domain and get back a normalized + # internationalized and ASCII form. + domain_name_info = validate_email_domain_name(domain_part, test_environment=test_environment, globally_deliverable=globally_deliverable) + ret.domain = domain_name_info["domain"] + ret.ascii_domain = domain_name_info["ascii_domain"] + + # Construct the complete normalized form. + ret.normalized = ret.local_part + "@" + ret.domain + + # If the email address has an ASCII form, add it. + if not ret.smtputf8: + if not ret.ascii_domain: + raise Exception("Missing ASCII domain.") + ret.ascii_email = (ret.ascii_local_part or "") + "@" + ret.ascii_domain + else: + ret.ascii_email = None + + # Check the length of the address. + validate_email_length(ret) + + # Check that a display name is permitted. It's the last syntax check + # because we always check against optional parsing features last. + if display_name is not None and not allow_display_name: + raise EmailSyntaxError("A display name and angle brackets around the email address are not permitted here.") + + if check_deliverability and not test_environment: + # Validate the email address's deliverability using DNS + # and update the returned ValidatedEmail object with metadata. + + if is_domain_literal: + # There is nothing to check --- skip deliverability checks. + return ret + + # Lazy load `deliverability` as it is slow to import (due to dns.resolver) + from .deliverability import validate_email_deliverability + deliverability_info = validate_email_deliverability( + ret.ascii_domain, ret.domain, timeout, dns_resolver + ) + mx = deliverability_info.get("mx") + if mx is not None: + ret.mx = mx + ret.mx_fallback_type = deliverability_info.get("mx_fallback_type") + + return ret diff --git a/venv/lib/python3.12/site-packages/email_validator/version.py b/venv/lib/python3.12/site-packages/email_validator/version.py new file mode 100644 index 0000000..55e4709 --- /dev/null +++ b/venv/lib/python3.12/site-packages/email_validator/version.py @@ -0,0 +1 @@ +__version__ = "2.3.0" diff --git a/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/METADATA new file mode 100644 index 0000000..8320e67 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/METADATA @@ -0,0 +1,533 @@ +Metadata-Version: 2.1 +Name: fastapi +Version: 0.109.0 +Summary: FastAPI framework, high performance, easy to learn, fast to code, ready for production +Project-URL: Homepage, https://github.com/tiangolo/fastapi +Project-URL: Documentation, https://fastapi.tiangolo.com/ +Project-URL: Repository, https://github.com/tiangolo/fastapi +Author-email: Sebastián Ramírez +License-Expression: MIT +License-File: LICENSE +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: FastAPI +Classifier: Framework :: Pydantic +Classifier: Framework :: Pydantic :: 1 +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Requires-Dist: pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4 +Requires-Dist: starlette<0.36.0,>=0.35.0 +Requires-Dist: typing-extensions>=4.8.0 +Provides-Extra: all +Requires-Dist: email-validator>=2.0.0; extra == 'all' +Requires-Dist: httpx>=0.23.0; extra == 'all' +Requires-Dist: itsdangerous>=1.1.0; extra == 'all' +Requires-Dist: jinja2>=2.11.2; extra == 'all' +Requires-Dist: orjson>=3.2.1; extra == 'all' +Requires-Dist: pydantic-extra-types>=2.0.0; extra == 'all' +Requires-Dist: pydantic-settings>=2.0.0; extra == 'all' +Requires-Dist: python-multipart>=0.0.5; extra == 'all' +Requires-Dist: pyyaml>=5.3.1; extra == 'all' +Requires-Dist: ujson!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,>=4.0.1; extra == 'all' +Requires-Dist: uvicorn[standard]>=0.12.0; extra == 'all' +Description-Content-Type: text/markdown + +

    + FastAPI +

    +

    + FastAPI framework, high performance, easy to learn, fast to code, ready for production +

    +

    + + Test + + + Coverage + + + Package version + + + Supported Python versions + +

    + +--- + +**Documentation**: https://fastapi.tiangolo.com + +**Source Code**: https://github.com/tiangolo/fastapi + +--- + +FastAPI is a modern, fast (high-performance), web framework for building APIs with Python 3.8+ based on standard Python type hints. + +The key features are: + +* **Fast**: Very high performance, on par with **NodeJS** and **Go** (thanks to Starlette and Pydantic). [One of the fastest Python frameworks available](#performance). +* **Fast to code**: Increase the speed to develop features by about 200% to 300%. * +* **Fewer bugs**: Reduce about 40% of human (developer) induced errors. * +* **Intuitive**: Great editor support. Completion everywhere. Less time debugging. +* **Easy**: Designed to be easy to use and learn. Less time reading docs. +* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs. +* **Robust**: Get production-ready code. With automatic interactive documentation. +* **Standards-based**: Based on (and fully compatible with) the open standards for APIs: OpenAPI (previously known as Swagger) and JSON Schema. + +* estimation based on tests on an internal development team, building production applications. + +## Sponsors + + + + + + + + + + + + + + + + + + + + + + +Other sponsors + +## Opinions + +"_[...] I'm using **FastAPI** a ton these days. [...] I'm actually planning to use it for all of my team's **ML services at Microsoft**. Some of them are getting integrated into the core **Windows** product and some **Office** products._" + +
    Kabir Khan - Microsoft (ref)
    + +--- + +"_We adopted the **FastAPI** library to spawn a **REST** server that can be queried to obtain **predictions**. [for Ludwig]_" + +
    Piero Molino, Yaroslav Dudin, and Sai Sumanth Miryala - Uber (ref)
    + +--- + +"_**Netflix** is pleased to announce the open-source release of our **crisis management** orchestration framework: **Dispatch**! [built with **FastAPI**]_" + +
    Kevin Glisson, Marc Vilanova, Forest Monsen - Netflix (ref)
    + +--- + +"_I’m over the moon excited about **FastAPI**. It’s so fun!_" + +
    Brian Okken - Python Bytes podcast host (ref)
    + +--- + +"_Honestly, what you've built looks super solid and polished. In many ways, it's what I wanted **Hug** to be - it's really inspiring to see someone build that._" + +
    Timothy Crosley - Hug creator (ref)
    + +--- + +"_If you're looking to learn one **modern framework** for building REST APIs, check out **FastAPI** [...] It's fast, easy to use and easy to learn [...]_" + +"_We've switched over to **FastAPI** for our **APIs** [...] I think you'll like it [...]_" + +
    Ines Montani - Matthew Honnibal - Explosion AI founders - spaCy creators (ref) - (ref)
    + +--- + +"_If anyone is looking to build a production Python API, I would highly recommend **FastAPI**. It is **beautifully designed**, **simple to use** and **highly scalable**, it has become a **key component** in our API first development strategy and is driving many automations and services such as our Virtual TAC Engineer._" + +
    Deon Pillsbury - Cisco (ref)
    + +--- + +## **Typer**, the FastAPI of CLIs + + + +If you are building a CLI app to be used in the terminal instead of a web API, check out **Typer**. + +**Typer** is FastAPI's little sibling. And it's intended to be the **FastAPI of CLIs**. ⌨️ 🚀 + +## Requirements + +Python 3.8+ + +FastAPI stands on the shoulders of giants: + +* Starlette for the web parts. +* Pydantic for the data parts. + +## Installation + +
    + +```console +$ pip install fastapi + +---> 100% +``` + +
    + +You will also need an ASGI server, for production such as Uvicorn or Hypercorn. + +
    + +```console +$ pip install "uvicorn[standard]" + +---> 100% +``` + +
    + +## Example + +### Create it + +* Create a file `main.py` with: + +```Python +from typing import Union + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} +``` + +
    +Or use async def... + +If your code uses `async` / `await`, use `async def`: + +```Python hl_lines="9 14" +from typing import Union + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +async def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +async def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} +``` + +**Note**: + +If you don't know, check the _"In a hurry?"_ section about `async` and `await` in the docs. + +
    + +### Run it + +Run the server with: + +
    + +```console +$ uvicorn main:app --reload + +INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) +INFO: Started reloader process [28720] +INFO: Started server process [28722] +INFO: Waiting for application startup. +INFO: Application startup complete. +``` + +
    + +
    +About the command uvicorn main:app --reload... + +The command `uvicorn main:app` refers to: + +* `main`: the file `main.py` (the Python "module"). +* `app`: the object created inside of `main.py` with the line `app = FastAPI()`. +* `--reload`: make the server restart after code changes. Only do this for development. + +
    + +### Check it + +Open your browser at http://127.0.0.1:8000/items/5?q=somequery. + +You will see the JSON response as: + +```JSON +{"item_id": 5, "q": "somequery"} +``` + +You already created an API that: + +* Receives HTTP requests in the _paths_ `/` and `/items/{item_id}`. +* Both _paths_ take `GET` operations (also known as HTTP _methods_). +* The _path_ `/items/{item_id}` has a _path parameter_ `item_id` that should be an `int`. +* The _path_ `/items/{item_id}` has an optional `str` _query parameter_ `q`. + +### Interactive API docs + +Now go to http://127.0.0.1:8000/docs. + +You will see the automatic interactive API documentation (provided by Swagger UI): + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-01-swagger-ui-simple.png) + +### Alternative API docs + +And now, go to http://127.0.0.1:8000/redoc. + +You will see the alternative automatic documentation (provided by ReDoc): + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-02-redoc-simple.png) + +## Example upgrade + +Now modify the file `main.py` to receive a body from a `PUT` request. + +Declare the body using standard Python types, thanks to Pydantic. + +```Python hl_lines="4 9-12 25-27" +from typing import Union + +from fastapi import FastAPI +from pydantic import BaseModel + +app = FastAPI() + + +class Item(BaseModel): + name: str + price: float + is_offer: Union[bool, None] = None + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} + + +@app.put("/items/{item_id}") +def update_item(item_id: int, item: Item): + return {"item_name": item.name, "item_id": item_id} +``` + +The server should reload automatically (because you added `--reload` to the `uvicorn` command above). + +### Interactive API docs upgrade + +Now go to http://127.0.0.1:8000/docs. + +* The interactive API documentation will be automatically updated, including the new body: + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-03-swagger-02.png) + +* Click on the button "Try it out", it allows you to fill the parameters and directly interact with the API: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-04-swagger-03.png) + +* Then click on the "Execute" button, the user interface will communicate with your API, send the parameters, get the results and show them on the screen: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-05-swagger-04.png) + +### Alternative API docs upgrade + +And now, go to http://127.0.0.1:8000/redoc. + +* The alternative documentation will also reflect the new query parameter and body: + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-06-redoc-02.png) + +### Recap + +In summary, you declare **once** the types of parameters, body, etc. as function parameters. + +You do that with standard modern Python types. + +You don't have to learn a new syntax, the methods or classes of a specific library, etc. + +Just standard **Python 3.8+**. + +For example, for an `int`: + +```Python +item_id: int +``` + +or for a more complex `Item` model: + +```Python +item: Item +``` + +...and with that single declaration you get: + +* Editor support, including: + * Completion. + * Type checks. +* Validation of data: + * Automatic and clear errors when the data is invalid. + * Validation even for deeply nested JSON objects. +* Conversion of input data: coming from the network to Python data and types. Reading from: + * JSON. + * Path parameters. + * Query parameters. + * Cookies. + * Headers. + * Forms. + * Files. +* Conversion of output data: converting from Python data and types to network data (as JSON): + * Convert Python types (`str`, `int`, `float`, `bool`, `list`, etc). + * `datetime` objects. + * `UUID` objects. + * Database models. + * ...and many more. +* Automatic interactive API documentation, including 2 alternative user interfaces: + * Swagger UI. + * ReDoc. + +--- + +Coming back to the previous code example, **FastAPI** will: + +* Validate that there is an `item_id` in the path for `GET` and `PUT` requests. +* Validate that the `item_id` is of type `int` for `GET` and `PUT` requests. + * If it is not, the client will see a useful, clear error. +* Check if there is an optional query parameter named `q` (as in `http://127.0.0.1:8000/items/foo?q=somequery`) for `GET` requests. + * As the `q` parameter is declared with `= None`, it is optional. + * Without the `None` it would be required (as is the body in the case with `PUT`). +* For `PUT` requests to `/items/{item_id}`, Read the body as JSON: + * Check that it has a required attribute `name` that should be a `str`. + * Check that it has a required attribute `price` that has to be a `float`. + * Check that it has an optional attribute `is_offer`, that should be a `bool`, if present. + * All this would also work for deeply nested JSON objects. +* Convert from and to JSON automatically. +* Document everything with OpenAPI, that can be used by: + * Interactive documentation systems. + * Automatic client code generation systems, for many languages. +* Provide 2 interactive documentation web interfaces directly. + +--- + +We just scratched the surface, but you already get the idea of how it all works. + +Try changing the line with: + +```Python + return {"item_name": item.name, "item_id": item_id} +``` + +...from: + +```Python + ... "item_name": item.name ... +``` + +...to: + +```Python + ... "item_price": item.price ... +``` + +...and see how your editor will auto-complete the attributes and know their types: + +![editor support](https://fastapi.tiangolo.com/img/vscode-completion.png) + +For a more complete example including more features, see the Tutorial - User Guide. + +**Spoiler alert**: the tutorial - user guide includes: + +* Declaration of **parameters** from other different places as: **headers**, **cookies**, **form fields** and **files**. +* How to set **validation constraints** as `maximum_length` or `regex`. +* A very powerful and easy to use **Dependency Injection** system. +* Security and authentication, including support for **OAuth2** with **JWT tokens** and **HTTP Basic** auth. +* More advanced (but equally easy) techniques for declaring **deeply nested JSON models** (thanks to Pydantic). +* **GraphQL** integration with Strawberry and other libraries. +* Many extra features (thanks to Starlette) as: + * **WebSockets** + * extremely easy tests based on HTTPX and `pytest` + * **CORS** + * **Cookie Sessions** + * ...and more. + +## Performance + +Independent TechEmpower benchmarks show **FastAPI** applications running under Uvicorn as one of the fastest Python frameworks available, only below Starlette and Uvicorn themselves (used internally by FastAPI). (*) + +To understand more about it, see the section Benchmarks. + +## Optional Dependencies + +Used by Pydantic: + +* email_validator - for email validation. +* pydantic-settings - for settings management. +* pydantic-extra-types - for extra types to be used with Pydantic. + +Used by Starlette: + +* httpx - Required if you want to use the `TestClient`. +* jinja2 - Required if you want to use the default template configuration. +* python-multipart - Required if you want to support form "parsing", with `request.form()`. +* itsdangerous - Required for `SessionMiddleware` support. +* pyyaml - Required for Starlette's `SchemaGenerator` support (you probably don't need it with FastAPI). +* ujson - Required if you want to use `UJSONResponse`. + +Used by FastAPI / Starlette: + +* uvicorn - for the server that loads and serves your application. +* orjson - Required if you want to use `ORJSONResponse`. + +You can install all of these with `pip install "fastapi[all]"`. + +## License + +This project is licensed under the terms of the MIT license. diff --git a/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/RECORD new file mode 100644 index 0000000..f312c9a --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/RECORD @@ -0,0 +1,91 @@ +fastapi-0.109.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fastapi-0.109.0.dist-info/METADATA,sha256=WE-jq6OygG5PfaK-s4ibVRj00nhgkadlax2TYbq8GBM,24960 +fastapi-0.109.0.dist-info/RECORD,, +fastapi-0.109.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi-0.109.0.dist-info/WHEEL,sha256=mRYSEL3Ih6g5a_CVMIcwiF__0Ae4_gLYh01YFNwiq1k,87 +fastapi-0.109.0.dist-info/licenses/LICENSE,sha256=Tsif_IFIW5f-xYSy1KlhAy7v_oNEU4lP2cEnSQbMdE4,1086 +fastapi/__init__.py,sha256=Xhg_TocZLhLOBARBFyRXnFut9fgxKCYkQEhdV0QIkMI,1081 +fastapi/__pycache__/__init__.cpython-312.pyc,, +fastapi/__pycache__/_compat.cpython-312.pyc,, +fastapi/__pycache__/applications.cpython-312.pyc,, +fastapi/__pycache__/background.cpython-312.pyc,, +fastapi/__pycache__/concurrency.cpython-312.pyc,, +fastapi/__pycache__/datastructures.cpython-312.pyc,, +fastapi/__pycache__/encoders.cpython-312.pyc,, +fastapi/__pycache__/exception_handlers.cpython-312.pyc,, +fastapi/__pycache__/exceptions.cpython-312.pyc,, +fastapi/__pycache__/logger.cpython-312.pyc,, +fastapi/__pycache__/param_functions.cpython-312.pyc,, +fastapi/__pycache__/params.cpython-312.pyc,, +fastapi/__pycache__/requests.cpython-312.pyc,, +fastapi/__pycache__/responses.cpython-312.pyc,, +fastapi/__pycache__/routing.cpython-312.pyc,, +fastapi/__pycache__/staticfiles.cpython-312.pyc,, +fastapi/__pycache__/templating.cpython-312.pyc,, +fastapi/__pycache__/testclient.cpython-312.pyc,, +fastapi/__pycache__/types.cpython-312.pyc,, +fastapi/__pycache__/utils.cpython-312.pyc,, +fastapi/__pycache__/websockets.cpython-312.pyc,, +fastapi/_compat.py,sha256=i_LwkpFVbNCuraFRbc1UmNNocX63BAwjjBXgF59JJVQ,23027 +fastapi/applications.py,sha256=nUoSz0Lhif39vAw9KATzFH2Xjz73kftOMCiwq6osNfg,176375 +fastapi/background.py,sha256=F1tsrJKfDZaRchNgF9ykB2PcRaPBJTbL4htN45TJAIc,1799 +fastapi/concurrency.py,sha256=AYLnS4judDUmXsNRICtoKSP0prfYDcS8ehBtYW9JhQQ,1403 +fastapi/datastructures.py,sha256=FF1s2g6cAQ5XxlNToB3scgV94Zf3DjdzcaI7ToaTrmg,5797 +fastapi/dependencies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/dependencies/__pycache__/__init__.cpython-312.pyc,, +fastapi/dependencies/__pycache__/models.cpython-312.pyc,, +fastapi/dependencies/__pycache__/utils.cpython-312.pyc,, +fastapi/dependencies/models.py,sha256=-n-YCxzgVBkurQi49qOTooT71v_oeAhHJ-qQFonxh5o,2494 +fastapi/dependencies/utils.py,sha256=IUY-uZmp56BUZuEiHtTQT0iGNF5ASIst7AzFMzA_As8,30158 +fastapi/encoders.py,sha256=90lbmIW8NZjpPVzbgKhpY49B7TFqa7hrdQDQa70SM9U,11024 +fastapi/exception_handlers.py,sha256=MBrIOA-ugjJDivIi4rSsUJBdTsjuzN76q4yh0q1COKw,1332 +fastapi/exceptions.py,sha256=SQsPxq-QYBZUhq6L4K3B3W7gaSD3Gub2f17erStRagY,5000 +fastapi/logger.py,sha256=I9NNi3ov8AcqbsbC9wl1X-hdItKgYt2XTrx1f99Zpl4,54 +fastapi/middleware/__init__.py,sha256=oQDxiFVcc1fYJUOIFvphnK7pTT5kktmfL32QXpBFvvo,58 +fastapi/middleware/__pycache__/__init__.cpython-312.pyc,, +fastapi/middleware/__pycache__/cors.cpython-312.pyc,, +fastapi/middleware/__pycache__/gzip.cpython-312.pyc,, +fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc,, +fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc,, +fastapi/middleware/__pycache__/wsgi.cpython-312.pyc,, +fastapi/middleware/cors.py,sha256=ynwjWQZoc_vbhzZ3_ZXceoaSrslHFHPdoM52rXr0WUU,79 +fastapi/middleware/gzip.py,sha256=xM5PcsH8QlAimZw4VDvcmTnqQamslThsfe3CVN2voa0,79 +fastapi/middleware/httpsredirect.py,sha256=rL8eXMnmLijwVkH7_400zHri1AekfeBd6D6qs8ix950,115 +fastapi/middleware/trustedhost.py,sha256=eE5XGRxGa7c5zPnMJDGp3BxaL25k5iVQlhnv-Pk0Pss,109 +fastapi/middleware/wsgi.py,sha256=Z3Ue-7wni4lUZMvH3G9ek__acgYdJstbnpZX_HQAboY,79 +fastapi/openapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/openapi/__pycache__/__init__.cpython-312.pyc,, +fastapi/openapi/__pycache__/constants.cpython-312.pyc,, +fastapi/openapi/__pycache__/docs.cpython-312.pyc,, +fastapi/openapi/__pycache__/models.cpython-312.pyc,, +fastapi/openapi/__pycache__/utils.cpython-312.pyc,, +fastapi/openapi/constants.py,sha256=adGzmis1L1HJRTE3kJ5fmHS_Noq6tIY6pWv_SFzoFDU,153 +fastapi/openapi/docs.py,sha256=Fo_SGB0eEfGvlNLqP-w_jgYifmHTe-3LbO_qC-ncFVY,10387 +fastapi/openapi/models.py,sha256=DEmsWA-9sNqv2H4YneZUW86r1nMwD920EiTvan5kndI,17763 +fastapi/openapi/utils.py,sha256=PUuz_ISarHVPBRyIgfyHz8uwH0eEsDY3rJUfW__I9GI,22303 +fastapi/param_functions.py,sha256=VWEsJbkH8lJZgcJ6fI6uzquui1kgHrDv1i_wXM7cW3M,63896 +fastapi/params.py,sha256=LzjihAvODd3w7-GddraUyVtH1xfwR9smIoQn-Z_g4mg,27807 +fastapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/requests.py,sha256=zayepKFcienBllv3snmWI20Gk0oHNVLU4DDhqXBb4LU,142 +fastapi/responses.py,sha256=QNQQlwpKhQoIPZTTWkpc9d_QGeGZ_aVQPaDV3nQ8m7c,1761 +fastapi/routing.py,sha256=XvVtyB5PEnDepB4uxGt1q8sFeh_2WkmvTlxWrw4Y1Ok,174485 +fastapi/security/__init__.py,sha256=bO8pNmxqVRXUjfl2mOKiVZLn0FpBQ61VUYVjmppnbJw,881 +fastapi/security/__pycache__/__init__.cpython-312.pyc,, +fastapi/security/__pycache__/api_key.cpython-312.pyc,, +fastapi/security/__pycache__/base.cpython-312.pyc,, +fastapi/security/__pycache__/http.cpython-312.pyc,, +fastapi/security/__pycache__/oauth2.cpython-312.pyc,, +fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc,, +fastapi/security/__pycache__/utils.cpython-312.pyc,, +fastapi/security/api_key.py,sha256=bcZbUzTqeR_CI_LXuJdDq1qL322kmhgy5ApOCqgGDi4,9399 +fastapi/security/base.py,sha256=dl4pvbC-RxjfbWgPtCWd8MVU-7CB2SZ22rJDXVCXO6c,141 +fastapi/security/http.py,sha256=_YdhSRRUCGydVDUILygWg0VlkPA28t_gjcy_axD3eOk,13537 +fastapi/security/oauth2.py,sha256=QAUOE2f6KXbXjkrJIIYCOugI6-R0g9EECZ5t8eN9nA4,21612 +fastapi/security/open_id_connect_url.py,sha256=Mb8wFxrRh4CrsFW0RcjBEQLASPHGDtZRP6c2dCrspAg,2753 +fastapi/security/utils.py,sha256=bd8T0YM7UQD5ATKucr1bNtAvz_Y3__dVNAv5UebiPvc,293 +fastapi/staticfiles.py,sha256=iirGIt3sdY2QZXd36ijs3Cj-T0FuGFda3cd90kM9Ikw,69 +fastapi/templating.py,sha256=4zsuTWgcjcEainMJFAlW6-gnslm6AgOS1SiiDWfmQxk,76 +fastapi/testclient.py,sha256=nBvaAmX66YldReJNZXPOk1sfuo2Q6hs8bOvIaCep6LQ,66 +fastapi/types.py,sha256=nFb36sK3DSoqoyo7Miwy3meKK5UdFBgkAgLSzQlUVyI,383 +fastapi/utils.py,sha256=rpSasHpgooPIfe67yU3HzOMDv7PtxiG9x6K-bhu6Z18,8193 +fastapi/websockets.py,sha256=419uncYObEKZG0YcrXscfQQYLSWoE10jqxVMetGdR98,222 diff --git a/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/WHEEL new file mode 100644 index 0000000..2860816 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..3e92463 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi-0.109.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Sebastián Ramírez + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/fastapi/__init__.py b/venv/lib/python3.12/site-packages/fastapi/__init__.py new file mode 100644 index 0000000..f457faf --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/__init__.py @@ -0,0 +1,25 @@ +"""FastAPI framework, high performance, easy to learn, fast to code, ready for production""" + +__version__ = "0.109.0" + +from starlette import status as status + +from .applications import FastAPI as FastAPI +from .background import BackgroundTasks as BackgroundTasks +from .datastructures import UploadFile as UploadFile +from .exceptions import HTTPException as HTTPException +from .exceptions import WebSocketException as WebSocketException +from .param_functions import Body as Body +from .param_functions import Cookie as Cookie +from .param_functions import Depends as Depends +from .param_functions import File as File +from .param_functions import Form as Form +from .param_functions import Header as Header +from .param_functions import Path as Path +from .param_functions import Query as Query +from .param_functions import Security as Security +from .requests import Request as Request +from .responses import Response as Response +from .routing import APIRouter as APIRouter +from .websockets import WebSocket as WebSocket +from .websockets import WebSocketDisconnect as WebSocketDisconnect diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..eac8b02 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..88b336d Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/applications.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/applications.cpython-312.pyc new file mode 100644 index 0000000..c65625c Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/applications.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/background.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/background.cpython-312.pyc new file mode 100644 index 0000000..da24fa3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/background.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/concurrency.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/concurrency.cpython-312.pyc new file mode 100644 index 0000000..b786994 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/concurrency.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/datastructures.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/datastructures.cpython-312.pyc new file mode 100644 index 0000000..a00bd8d Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/datastructures.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/encoders.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/encoders.cpython-312.pyc new file mode 100644 index 0000000..da15ed4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/encoders.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/exception_handlers.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/exception_handlers.cpython-312.pyc new file mode 100644 index 0000000..aa30315 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/exception_handlers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..dc4a504 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/logger.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/logger.cpython-312.pyc new file mode 100644 index 0000000..70740d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/logger.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/param_functions.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/param_functions.cpython-312.pyc new file mode 100644 index 0000000..caa8846 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/param_functions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/params.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/params.cpython-312.pyc new file mode 100644 index 0000000..ec37882 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/params.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/requests.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/requests.cpython-312.pyc new file mode 100644 index 0000000..782b968 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/requests.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/responses.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/responses.cpython-312.pyc new file mode 100644 index 0000000..38449e6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/responses.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/routing.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/routing.cpython-312.pyc new file mode 100644 index 0000000..d7d3ec4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/routing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/staticfiles.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/staticfiles.cpython-312.pyc new file mode 100644 index 0000000..9c78dc0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/staticfiles.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/templating.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/templating.cpython-312.pyc new file mode 100644 index 0000000..53d8ea4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/templating.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc new file mode 100644 index 0000000..99e9e61 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/types.cpython-312.pyc new file mode 100644 index 0000000..ad2690d Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..eccecd7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/websockets.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/websockets.cpython-312.pyc new file mode 100644 index 0000000..5ada99f Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/__pycache__/websockets.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/_compat.py b/venv/lib/python3.12/site-packages/fastapi/_compat.py new file mode 100644 index 0000000..35d4a87 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/_compat.py @@ -0,0 +1,634 @@ +from collections import deque +from copy import copy +from dataclasses import dataclass, is_dataclass +from enum import Enum +from typing import ( + Any, + Callable, + Deque, + Dict, + FrozenSet, + List, + Mapping, + Sequence, + Set, + Tuple, + Type, + Union, +) + +from fastapi.exceptions import RequestErrorModel +from fastapi.types import IncEx, ModelNameMap, UnionType +from pydantic import BaseModel, create_model +from pydantic.version import VERSION as PYDANTIC_VERSION +from starlette.datastructures import UploadFile +from typing_extensions import Annotated, Literal, get_args, get_origin + +PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.") + + +sequence_annotation_to_type = { + Sequence: list, + List: list, + list: list, + Tuple: tuple, + tuple: tuple, + Set: set, + set: set, + FrozenSet: frozenset, + frozenset: frozenset, + Deque: deque, + deque: deque, +} + +sequence_types = tuple(sequence_annotation_to_type.keys()) + +if PYDANTIC_V2: + from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError + from pydantic import TypeAdapter + from pydantic import ValidationError as ValidationError + from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined] + GetJsonSchemaHandler as GetJsonSchemaHandler, + ) + from pydantic._internal._typing_extra import eval_type_lenient + from pydantic._internal._utils import lenient_issubclass as lenient_issubclass + from pydantic.fields import FieldInfo + from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema + from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue + from pydantic_core import CoreSchema as CoreSchema + from pydantic_core import PydanticUndefined, PydanticUndefinedType + from pydantic_core import Url as Url + + try: + from pydantic_core.core_schema import ( + with_info_plain_validator_function as with_info_plain_validator_function, + ) + except ImportError: # pragma: no cover + from pydantic_core.core_schema import ( + general_plain_validator_function as with_info_plain_validator_function, # noqa: F401 + ) + + Required = PydanticUndefined + Undefined = PydanticUndefined + UndefinedType = PydanticUndefinedType + evaluate_forwardref = eval_type_lenient + Validator = Any + + class BaseConfig: + pass + + class ErrorWrapper(Exception): + pass + + @dataclass + class ModelField: + field_info: FieldInfo + name: str + mode: Literal["validation", "serialization"] = "validation" + + @property + def alias(self) -> str: + a = self.field_info.alias + return a if a is not None else self.name + + @property + def required(self) -> bool: + return self.field_info.is_required() + + @property + def default(self) -> Any: + return self.get_default() + + @property + def type_(self) -> Any: + return self.field_info.annotation + + def __post_init__(self) -> None: + self._type_adapter: TypeAdapter[Any] = TypeAdapter( + Annotated[self.field_info.annotation, self.field_info] + ) + + def get_default(self) -> Any: + if self.field_info.is_required(): + return Undefined + return self.field_info.get_default(call_default_factory=True) + + def validate( + self, + value: Any, + values: Dict[str, Any] = {}, # noqa: B006 + *, + loc: Tuple[Union[int, str], ...] = (), + ) -> Tuple[Any, Union[List[Dict[str, Any]], None]]: + try: + return ( + self._type_adapter.validate_python(value, from_attributes=True), + None, + ) + except ValidationError as exc: + return None, _regenerate_error_with_loc( + errors=exc.errors(), loc_prefix=loc + ) + + def serialize( + self, + value: Any, + *, + mode: Literal["json", "python"] = "json", + include: Union[IncEx, None] = None, + exclude: Union[IncEx, None] = None, + by_alias: bool = True, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> Any: + # What calls this code passes a value that already called + # self._type_adapter.validate_python(value) + return self._type_adapter.dump_python( + value, + mode=mode, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + def __hash__(self) -> int: + # Each ModelField is unique for our purposes, to allow making a dict from + # ModelField to its JSON Schema. + return id(self) + + def get_annotation_from_field_info( + annotation: Any, field_info: FieldInfo, field_name: str + ) -> Any: + return annotation + + def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]: + return errors # type: ignore[return-value] + + def _model_rebuild(model: Type[BaseModel]) -> None: + model.model_rebuild() + + def _model_dump( + model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any + ) -> Any: + return model.model_dump(mode=mode, **kwargs) + + def _get_model_config(model: BaseModel) -> Any: + return model.model_config + + def get_schema_from_model_field( + *, + field: ModelField, + schema_generator: GenerateJsonSchema, + model_name_map: ModelNameMap, + field_mapping: Dict[ + Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], + separate_input_output_schemas: bool = True, + ) -> Dict[str, Any]: + override_mode: Union[Literal["validation"], None] = ( + None if separate_input_output_schemas else "validation" + ) + # This expects that GenerateJsonSchema was already used to generate the definitions + json_schema = field_mapping[(field, override_mode or field.mode)] + if "$ref" not in json_schema: + # TODO remove when deprecating Pydantic v1 + # Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207 + json_schema["title"] = ( + field.field_info.title or field.alias.title().replace("_", " ") + ) + return json_schema + + def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap: + return {} + + def get_definitions( + *, + fields: List[ModelField], + schema_generator: GenerateJsonSchema, + model_name_map: ModelNameMap, + separate_input_output_schemas: bool = True, + ) -> Tuple[ + Dict[ + Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], + Dict[str, Dict[str, Any]], + ]: + override_mode: Union[Literal["validation"], None] = ( + None if separate_input_output_schemas else "validation" + ) + inputs = [ + (field, override_mode or field.mode, field._type_adapter.core_schema) + for field in fields + ] + field_mapping, definitions = schema_generator.generate_definitions( + inputs=inputs + ) + return field_mapping, definitions # type: ignore[return-value] + + def is_scalar_field(field: ModelField) -> bool: + from fastapi import params + + return field_annotation_is_scalar( + field.field_info.annotation + ) and not isinstance(field.field_info, params.Body) + + def is_sequence_field(field: ModelField) -> bool: + return field_annotation_is_sequence(field.field_info.annotation) + + def is_scalar_sequence_field(field: ModelField) -> bool: + return field_annotation_is_scalar_sequence(field.field_info.annotation) + + def is_bytes_field(field: ModelField) -> bool: + return is_bytes_or_nonable_bytes_annotation(field.type_) + + def is_bytes_sequence_field(field: ModelField) -> bool: + return is_bytes_sequence_annotation(field.type_) + + def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo: + cls = type(field_info) + merged_field_info = cls.from_annotation(annotation) + new_field_info = copy(field_info) + new_field_info.metadata = merged_field_info.metadata + new_field_info.annotation = merged_field_info.annotation + return new_field_info + + def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]: + origin_type = ( + get_origin(field.field_info.annotation) or field.field_info.annotation + ) + assert issubclass(origin_type, sequence_types) # type: ignore[arg-type] + return sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return] + + def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]: + error = ValidationError.from_exception_data( + "Field required", [{"type": "missing", "loc": loc, "input": {}}] + ).errors()[0] + error["input"] = None + return error # type: ignore[return-value] + + def create_body_model( + *, fields: Sequence[ModelField], model_name: str + ) -> Type[BaseModel]: + field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields} + BodyModel: Type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload] + return BodyModel + +else: + from fastapi.openapi.constants import REF_PREFIX as REF_PREFIX + from pydantic import AnyUrl as Url # noqa: F401 + from pydantic import ( # type: ignore[assignment] + BaseConfig as BaseConfig, # noqa: F401 + ) + from pydantic import ValidationError as ValidationError # noqa: F401 + from pydantic.class_validators import ( # type: ignore[no-redef] + Validator as Validator, # noqa: F401 + ) + from pydantic.error_wrappers import ( # type: ignore[no-redef] + ErrorWrapper as ErrorWrapper, # noqa: F401 + ) + from pydantic.errors import MissingError + from pydantic.fields import ( # type: ignore[attr-defined] + SHAPE_FROZENSET, + SHAPE_LIST, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_SINGLETON, + SHAPE_TUPLE, + SHAPE_TUPLE_ELLIPSIS, + ) + from pydantic.fields import FieldInfo as FieldInfo + from pydantic.fields import ( # type: ignore[no-redef,attr-defined] + ModelField as ModelField, # noqa: F401 + ) + from pydantic.fields import ( # type: ignore[no-redef,attr-defined] + Required as Required, # noqa: F401 + ) + from pydantic.fields import ( # type: ignore[no-redef,attr-defined] + Undefined as Undefined, + ) + from pydantic.fields import ( # type: ignore[no-redef, attr-defined] + UndefinedType as UndefinedType, # noqa: F401 + ) + from pydantic.schema import ( + field_schema, + get_flat_models_from_fields, + get_model_name_map, + model_process_schema, + ) + from pydantic.schema import ( # type: ignore[no-redef] # noqa: F401 + get_annotation_from_field_info as get_annotation_from_field_info, + ) + from pydantic.typing import ( # type: ignore[no-redef] + evaluate_forwardref as evaluate_forwardref, # noqa: F401 + ) + from pydantic.utils import ( # type: ignore[no-redef] + lenient_issubclass as lenient_issubclass, # noqa: F401 + ) + + GetJsonSchemaHandler = Any # type: ignore[assignment,misc] + JsonSchemaValue = Dict[str, Any] # type: ignore[misc] + CoreSchema = Any # type: ignore[assignment,misc] + + sequence_shapes = { + SHAPE_LIST, + SHAPE_SET, + SHAPE_FROZENSET, + SHAPE_TUPLE, + SHAPE_SEQUENCE, + SHAPE_TUPLE_ELLIPSIS, + } + sequence_shape_to_type = { + SHAPE_LIST: list, + SHAPE_SET: set, + SHAPE_TUPLE: tuple, + SHAPE_SEQUENCE: list, + SHAPE_TUPLE_ELLIPSIS: list, + } + + @dataclass + class GenerateJsonSchema: # type: ignore[no-redef] + ref_template: str + + class PydanticSchemaGenerationError(Exception): # type: ignore[no-redef] + pass + + def with_info_plain_validator_function( # type: ignore[misc] + function: Callable[..., Any], + *, + ref: Union[str, None] = None, + metadata: Any = None, + serialization: Any = None, + ) -> Any: + return {} + + def get_model_definitions( + *, + flat_models: Set[Union[Type[BaseModel], Type[Enum]]], + model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], + ) -> Dict[str, Any]: + definitions: Dict[str, Dict[str, Any]] = {} + for model in flat_models: + m_schema, m_definitions, m_nested_models = model_process_schema( + model, model_name_map=model_name_map, ref_prefix=REF_PREFIX + ) + definitions.update(m_definitions) + model_name = model_name_map[model] + if "description" in m_schema: + m_schema["description"] = m_schema["description"].split("\f")[0] + definitions[model_name] = m_schema + return definitions + + def is_pv1_scalar_field(field: ModelField) -> bool: + from fastapi import params + + field_info = field.field_info + if not ( + field.shape == SHAPE_SINGLETON # type: ignore[attr-defined] + and not lenient_issubclass(field.type_, BaseModel) + and not lenient_issubclass(field.type_, dict) + and not field_annotation_is_sequence(field.type_) + and not is_dataclass(field.type_) + and not isinstance(field_info, params.Body) + ): + return False + if field.sub_fields: # type: ignore[attr-defined] + if not all( + is_pv1_scalar_field(f) + for f in field.sub_fields # type: ignore[attr-defined] + ): + return False + return True + + def is_pv1_scalar_sequence_field(field: ModelField) -> bool: + if (field.shape in sequence_shapes) and not lenient_issubclass( # type: ignore[attr-defined] + field.type_, BaseModel + ): + if field.sub_fields is not None: # type: ignore[attr-defined] + for sub_field in field.sub_fields: # type: ignore[attr-defined] + if not is_pv1_scalar_field(sub_field): + return False + return True + if _annotation_is_sequence(field.type_): + return True + return False + + def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]: + use_errors: List[Any] = [] + for error in errors: + if isinstance(error, ErrorWrapper): + new_errors = ValidationError( # type: ignore[call-arg] + errors=[error], model=RequestErrorModel + ).errors() + use_errors.extend(new_errors) + elif isinstance(error, list): + use_errors.extend(_normalize_errors(error)) + else: + use_errors.append(error) + return use_errors + + def _model_rebuild(model: Type[BaseModel]) -> None: + model.update_forward_refs() + + def _model_dump( + model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any + ) -> Any: + return model.dict(**kwargs) + + def _get_model_config(model: BaseModel) -> Any: + return model.__config__ # type: ignore[attr-defined] + + def get_schema_from_model_field( + *, + field: ModelField, + schema_generator: GenerateJsonSchema, + model_name_map: ModelNameMap, + field_mapping: Dict[ + Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], + separate_input_output_schemas: bool = True, + ) -> Dict[str, Any]: + # This expects that GenerateJsonSchema was already used to generate the definitions + return field_schema( # type: ignore[no-any-return] + field, model_name_map=model_name_map, ref_prefix=REF_PREFIX + )[0] + + def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap: + models = get_flat_models_from_fields(fields, known_models=set()) + return get_model_name_map(models) # type: ignore[no-any-return] + + def get_definitions( + *, + fields: List[ModelField], + schema_generator: GenerateJsonSchema, + model_name_map: ModelNameMap, + separate_input_output_schemas: bool = True, + ) -> Tuple[ + Dict[ + Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], + Dict[str, Dict[str, Any]], + ]: + models = get_flat_models_from_fields(fields, known_models=set()) + return {}, get_model_definitions( + flat_models=models, model_name_map=model_name_map + ) + + def is_scalar_field(field: ModelField) -> bool: + return is_pv1_scalar_field(field) + + def is_sequence_field(field: ModelField) -> bool: + return field.shape in sequence_shapes or _annotation_is_sequence(field.type_) # type: ignore[attr-defined] + + def is_scalar_sequence_field(field: ModelField) -> bool: + return is_pv1_scalar_sequence_field(field) + + def is_bytes_field(field: ModelField) -> bool: + return lenient_issubclass(field.type_, bytes) + + def is_bytes_sequence_field(field: ModelField) -> bool: + return field.shape in sequence_shapes and lenient_issubclass(field.type_, bytes) # type: ignore[attr-defined] + + def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo: + return copy(field_info) + + def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]: + return sequence_shape_to_type[field.shape](value) # type: ignore[no-any-return,attr-defined] + + def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]: + missing_field_error = ErrorWrapper(MissingError(), loc=loc) # type: ignore[call-arg] + new_error = ValidationError([missing_field_error], RequestErrorModel) + return new_error.errors()[0] # type: ignore[return-value] + + def create_body_model( + *, fields: Sequence[ModelField], model_name: str + ) -> Type[BaseModel]: + BodyModel = create_model(model_name) + for f in fields: + BodyModel.__fields__[f.name] = f # type: ignore[index] + return BodyModel + + +def _regenerate_error_with_loc( + *, errors: Sequence[Any], loc_prefix: Tuple[Union[str, int], ...] +) -> List[Dict[str, Any]]: + updated_loc_errors: List[Any] = [ + {**err, "loc": loc_prefix + err.get("loc", ())} + for err in _normalize_errors(errors) + ] + + return updated_loc_errors + + +def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool: + if lenient_issubclass(annotation, (str, bytes)): + return False + return lenient_issubclass(annotation, sequence_types) + + +def field_annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool: + return _annotation_is_sequence(annotation) or _annotation_is_sequence( + get_origin(annotation) + ) + + +def value_is_sequence(value: Any) -> bool: + return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) # type: ignore[arg-type] + + +def _annotation_is_complex(annotation: Union[Type[Any], None]) -> bool: + return ( + lenient_issubclass(annotation, (BaseModel, Mapping, UploadFile)) + or _annotation_is_sequence(annotation) + or is_dataclass(annotation) + ) + + +def field_annotation_is_complex(annotation: Union[Type[Any], None]) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + return any(field_annotation_is_complex(arg) for arg in get_args(annotation)) + + return ( + _annotation_is_complex(annotation) + or _annotation_is_complex(origin) + or hasattr(origin, "__pydantic_core_schema__") + or hasattr(origin, "__get_pydantic_core_schema__") + ) + + +def field_annotation_is_scalar(annotation: Any) -> bool: + # handle Ellipsis here to make tuple[int, ...] work nicely + return annotation is Ellipsis or not field_annotation_is_complex(annotation) + + +def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + at_least_one_scalar_sequence = False + for arg in get_args(annotation): + if field_annotation_is_scalar_sequence(arg): + at_least_one_scalar_sequence = True + continue + elif not field_annotation_is_scalar(arg): + return False + return at_least_one_scalar_sequence + return field_annotation_is_sequence(annotation) and all( + field_annotation_is_scalar(sub_annotation) + for sub_annotation in get_args(annotation) + ) + + +def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool: + if lenient_issubclass(annotation, bytes): + return True + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + for arg in get_args(annotation): + if lenient_issubclass(arg, bytes): + return True + return False + + +def is_uploadfile_or_nonable_uploadfile_annotation(annotation: Any) -> bool: + if lenient_issubclass(annotation, UploadFile): + return True + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + for arg in get_args(annotation): + if lenient_issubclass(arg, UploadFile): + return True + return False + + +def is_bytes_sequence_annotation(annotation: Any) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + at_least_one = False + for arg in get_args(annotation): + if is_bytes_sequence_annotation(arg): + at_least_one = True + continue + return at_least_one + return field_annotation_is_sequence(annotation) and all( + is_bytes_or_nonable_bytes_annotation(sub_annotation) + for sub_annotation in get_args(annotation) + ) + + +def is_uploadfile_sequence_annotation(annotation: Any) -> bool: + origin = get_origin(annotation) + if origin is Union or origin is UnionType: + at_least_one = False + for arg in get_args(annotation): + if is_uploadfile_sequence_annotation(arg): + at_least_one = True + continue + return at_least_one + return field_annotation_is_sequence(annotation) and all( + is_uploadfile_or_nonable_uploadfile_annotation(sub_annotation) + for sub_annotation in get_args(annotation) + ) diff --git a/venv/lib/python3.12/site-packages/fastapi/applications.py b/venv/lib/python3.12/site-packages/fastapi/applications.py new file mode 100644 index 0000000..597c60a --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/applications.py @@ -0,0 +1,4585 @@ +from enum import Enum +from typing import ( + Any, + Awaitable, + Callable, + Coroutine, + Dict, + List, + Optional, + Sequence, + Type, + TypeVar, + Union, +) + +from fastapi import routing +from fastapi.datastructures import Default, DefaultPlaceholder +from fastapi.exception_handlers import ( + http_exception_handler, + request_validation_exception_handler, + websocket_request_validation_exception_handler, +) +from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError +from fastapi.logger import logger +from fastapi.openapi.docs import ( + get_redoc_html, + get_swagger_ui_html, + get_swagger_ui_oauth2_redirect_html, +) +from fastapi.openapi.utils import get_openapi +from fastapi.params import Depends +from fastapi.types import DecoratedCallable, IncEx +from fastapi.utils import generate_unique_id +from starlette.applications import Starlette +from starlette.datastructures import State +from starlette.exceptions import HTTPException +from starlette.middleware import Middleware +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.responses import HTMLResponse, JSONResponse, Response +from starlette.routing import BaseRoute +from starlette.types import ASGIApp, Lifespan, Receive, Scope, Send +from typing_extensions import Annotated, Doc, deprecated # type: ignore [attr-defined] + +AppType = TypeVar("AppType", bound="FastAPI") + + +class FastAPI(Starlette): + """ + `FastAPI` app class, the main entrypoint to use FastAPI. + + Read more in the + [FastAPI docs for First Steps](https://fastapi.tiangolo.com/tutorial/first-steps/). + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + ``` + """ + + def __init__( + self: AppType, + *, + debug: Annotated[ + bool, + Doc( + """ + Boolean indicating if debug tracebacks should be returned on server + errors. + + Read more in the + [Starlette docs for Applications](https://www.starlette.io/applications/#instantiating-the-application). + """ + ), + ] = False, + routes: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Starlette and supported for compatibility. + + --- + + A list of routes to serve incoming HTTP and WebSocket requests. + """ + ), + deprecated( + """ + You normally wouldn't use this parameter with FastAPI, it is inherited + from Starlette and supported for compatibility. + + In FastAPI, you normally would use the *path operation methods*, + like `app.get()`, `app.post()`, etc. + """ + ), + ] = None, + title: Annotated[ + str, + Doc( + """ + The title of the API. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(title="ChimichangApp") + ``` + """ + ), + ] = "FastAPI", + summary: Annotated[ + Optional[str], + Doc( + """ + A short summary of the API. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(summary="Deadpond's favorite app. Nuff said.") + ``` + """ + ), + ] = None, + description: Annotated[ + str, + Doc( + ''' + A description of the API. Supports Markdown (using + [CommonMark syntax](https://commonmark.org/)). + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI( + description=""" + ChimichangApp API helps you do awesome stuff. 🚀 + + ## Items + + You can **read items**. + + ## Users + + You will be able to: + + * **Create users** (_not implemented_). + * **Read users** (_not implemented_). + + """ + ) + ``` + ''' + ), + ] = "", + version: Annotated[ + str, + Doc( + """ + The version of the API. + + **Note** This is the version of your application, not the version of + the OpenAPI specification nor the version of FastAPI being used. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(version="0.0.1") + ``` + """ + ), + ] = "0.1.0", + openapi_url: Annotated[ + Optional[str], + Doc( + """ + The URL where the OpenAPI schema will be served from. + + If you set it to `None`, no OpenAPI schema will be served publicly, and + the default automatic endpoints `/docs` and `/redoc` will also be + disabled. + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#openapi-url). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(openapi_url="/api/v1/openapi.json") + ``` + """ + ), + ] = "/openapi.json", + openapi_tags: Annotated[ + Optional[List[Dict[str, Any]]], + Doc( + """ + A list of tags used by OpenAPI, these are the same `tags` you can set + in the *path operations*, like: + + * `@app.get("/users/", tags=["users"])` + * `@app.get("/items/", tags=["items"])` + + The order of the tags can be used to specify the order shown in + tools like Swagger UI, used in the automatic path `/docs`. + + It's not required to specify all the tags used. + + The tags that are not declared MAY be organized randomly or based + on the tools' logic. Each tag name in the list MUST be unique. + + The value of each item is a `dict` containing: + + * `name`: The name of the tag. + * `description`: A short description of the tag. + [CommonMark syntax](https://commonmark.org/) MAY be used for rich + text representation. + * `externalDocs`: Additional external documentation for this tag. If + provided, it would contain a `dict` with: + * `description`: A short description of the target documentation. + [CommonMark syntax](https://commonmark.org/) MAY be used for + rich text representation. + * `url`: The URL for the target documentation. Value MUST be in + the form of a URL. + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-tags). + + **Example** + + ```python + from fastapi import FastAPI + + tags_metadata = [ + { + "name": "users", + "description": "Operations with users. The **login** logic is also here.", + }, + { + "name": "items", + "description": "Manage items. So _fancy_ they have their own docs.", + "externalDocs": { + "description": "Items external docs", + "url": "https://fastapi.tiangolo.com/", + }, + }, + ] + + app = FastAPI(openapi_tags=tags_metadata) + ``` + """ + ), + ] = None, + servers: Annotated[ + Optional[List[Dict[str, Union[str, Any]]]], + Doc( + """ + A `list` of `dict`s with connectivity information to a target server. + + You would use it, for example, if your application is served from + different domains and you want to use the same Swagger UI in the + browser to interact with each of them (instead of having multiple + browser tabs open). Or if you want to leave fixed the possible URLs. + + If the servers `list` is not provided, or is an empty `list`, the + default value would be a a `dict` with a `url` value of `/`. + + Each item in the `list` is a `dict` containing: + + * `url`: A URL to the target host. This URL supports Server Variables + and MAY be relative, to indicate that the host location is relative + to the location where the OpenAPI document is being served. Variable + substitutions will be made when a variable is named in `{`brackets`}`. + * `description`: An optional string describing the host designated by + the URL. [CommonMark syntax](https://commonmark.org/) MAY be used for + rich text representation. + * `variables`: A `dict` between a variable name and its value. The value + is used for substitution in the server's URL template. + + Read more in the + [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/#additional-servers). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI( + servers=[ + {"url": "https://stag.example.com", "description": "Staging environment"}, + {"url": "https://prod.example.com", "description": "Production environment"}, + ] + ) + ``` + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of global dependencies, they will be applied to each + *path operation*, including in sub-routers. + + Read more about it in the + [FastAPI docs for Global Dependencies](https://fastapi.tiangolo.com/tutorial/dependencies/global-dependencies/). + + **Example** + + ```python + from fastapi import Depends, FastAPI + + from .dependencies import func_dep_1, func_dep_2 + + app = FastAPI(dependencies=[Depends(func_dep_1), Depends(func_dep_2)]) + ``` + """ + ), + ] = None, + default_response_class: Annotated[ + Type[Response], + Doc( + """ + The default response class to be used. + + Read more in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). + + **Example** + + ```python + from fastapi import FastAPI + from fastapi.responses import ORJSONResponse + + app = FastAPI(default_response_class=ORJSONResponse) + ``` + """ + ), + ] = Default(JSONResponse), + redirect_slashes: Annotated[ + bool, + Doc( + """ + Whether to detect and redirect slashes in URLs when the client doesn't + use the same format. + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(redirect_slashes=True) # the default + + @app.get("/items/") + async def read_items(): + return [{"item_id": "Foo"}] + ``` + + With this app, if a client goes to `/items` (without a trailing slash), + they will be automatically redirected with an HTTP status code of 307 + to `/items/`. + """ + ), + ] = True, + docs_url: Annotated[ + Optional[str], + Doc( + """ + The path to the automatic interactive API documentation. + It is handled in the browser by Swagger UI. + + The default URL is `/docs`. You can disable it by setting it to `None`. + + If `openapi_url` is set to `None`, this will be automatically disabled. + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#docs-urls). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(docs_url="/documentation", redoc_url=None) + ``` + """ + ), + ] = "/docs", + redoc_url: Annotated[ + Optional[str], + Doc( + """ + The path to the alternative automatic interactive API documentation + provided by ReDoc. + + The default URL is `/redoc`. You can disable it by setting it to `None`. + + If `openapi_url` is set to `None`, this will be automatically disabled. + + Read more in the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#docs-urls). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(docs_url="/documentation", redoc_url="redocumentation") + ``` + """ + ), + ] = "/redoc", + swagger_ui_oauth2_redirect_url: Annotated[ + Optional[str], + Doc( + """ + The OAuth2 redirect endpoint for the Swagger UI. + + By default it is `/docs/oauth2-redirect`. + + This is only used if you use OAuth2 (with the "Authorize" button) + with Swagger UI. + """ + ), + ] = "/docs/oauth2-redirect", + swagger_ui_init_oauth: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + OAuth2 configuration for the Swagger UI, by default shown at `/docs`. + + Read more about the available configuration options in the + [Swagger UI docs](https://swagger.io/docs/open-source-tools/swagger-ui/usage/oauth2/). + """ + ), + ] = None, + middleware: Annotated[ + Optional[Sequence[Middleware]], + Doc( + """ + List of middleware to be added when creating the application. + + In FastAPI you would normally do this with `app.add_middleware()` + instead. + + Read more in the + [FastAPI docs for Middleware](https://fastapi.tiangolo.com/tutorial/middleware/). + """ + ), + ] = None, + exception_handlers: Annotated[ + Optional[ + Dict[ + Union[int, Type[Exception]], + Callable[[Request, Any], Coroutine[Any, Any, Response]], + ] + ], + Doc( + """ + A dictionary with handlers for exceptions. + + In FastAPI, you would normally use the decorator + `@app.exception_handler()`. + + Read more in the + [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). + """ + ), + ] = None, + on_startup: Annotated[ + Optional[Sequence[Callable[[], Any]]], + Doc( + """ + A list of startup event handler functions. + + You should instead use the `lifespan` handlers. + + Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + on_shutdown: Annotated[ + Optional[Sequence[Callable[[], Any]]], + Doc( + """ + A list of shutdown event handler functions. + + You should instead use the `lifespan` handlers. + + Read more in the + [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + lifespan: Annotated[ + Optional[Lifespan[AppType]], + Doc( + """ + A `Lifespan` context manager handler. This replaces `startup` and + `shutdown` functions with a single context manager. + + Read more in the + [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + terms_of_service: Annotated[ + Optional[str], + Doc( + """ + A URL to the Terms of Service for your API. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more at the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + app = FastAPI(terms_of_service="http://example.com/terms/") + ``` + """ + ), + ] = None, + contact: Annotated[ + Optional[Dict[str, Union[str, Any]]], + Doc( + """ + A dictionary with the contact information for the exposed API. + + It can contain several fields. + + * `name`: (`str`) The name of the contact person/organization. + * `url`: (`str`) A URL pointing to the contact information. MUST be in + the format of a URL. + * `email`: (`str`) The email address of the contact person/organization. + MUST be in the format of an email address. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more at the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + app = FastAPI( + contact={ + "name": "Deadpoolio the Amazing", + "url": "http://x-force.example.com/contact/", + "email": "dp@x-force.example.com", + } + ) + ``` + """ + ), + ] = None, + license_info: Annotated[ + Optional[Dict[str, Union[str, Any]]], + Doc( + """ + A dictionary with the license information for the exposed API. + + It can contain several fields. + + * `name`: (`str`) **REQUIRED** (if a `license_info` is set). The + license name used for the API. + * `identifier`: (`str`) An [SPDX](https://spdx.dev/) license expression + for the API. The `identifier` field is mutually exclusive of the `url` + field. Available since OpenAPI 3.1.0, FastAPI 0.99.0. + * `url`: (`str`) A URL to the license used for the API. This MUST be + the format of a URL. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more at the + [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). + + **Example** + + ```python + app = FastAPI( + license_info={ + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0.html", + } + ) + ``` + """ + ), + ] = None, + openapi_prefix: Annotated[ + str, + Doc( + """ + A URL prefix for the OpenAPI URL. + """ + ), + deprecated( + """ + "openapi_prefix" has been deprecated in favor of "root_path", which + follows more closely the ASGI standard, is simpler, and more + automatic. + """ + ), + ] = "", + root_path: Annotated[ + str, + Doc( + """ + A path prefix handled by a proxy that is not seen by the application + but is seen by external clients, which affects things like Swagger UI. + + Read more about it at the + [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(root_path="/api/v1") + ``` + """ + ), + ] = "", + root_path_in_servers: Annotated[ + bool, + Doc( + """ + To disable automatically generating the URLs in the `servers` field + in the autogenerated OpenAPI using the `root_path`. + + Read more about it in the + [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/#disable-automatic-server-from-root_path). + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI(root_path_in_servers=False) + ``` + """ + ), + ] = True, + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses to be shown in OpenAPI. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). + + And in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + OpenAPI callbacks that should apply to all *path operations*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + webhooks: Annotated[ + Optional[routing.APIRouter], + Doc( + """ + Add OpenAPI webhooks. This is similar to `callbacks` but it doesn't + depend on specific *path operations*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + **Note**: This is available since OpenAPI 3.1.0, FastAPI 0.99.0. + + Read more about it in the + [FastAPI docs for OpenAPI Webhooks](https://fastapi.tiangolo.com/advanced/openapi-webhooks/). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark all *path operations* as deprecated. You probably don't need it, + but it's available. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) all the *path operations* in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + swagger_ui_parameters: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Parameters to configure Swagger UI, the autogenerated interactive API + documentation (by default at `/docs`). + + Read more about it in the + [FastAPI docs about how to Configure Swagger UI](https://fastapi.tiangolo.com/how-to/configure-swagger-ui/). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + separate_input_output_schemas: Annotated[ + bool, + Doc( + """ + Whether to generate separate OpenAPI schemas for request body and + response body when the results would be more precise. + + This is particularly useful when automatically generating clients. + + For example, if you have a model like: + + ```python + from pydantic import BaseModel + + class Item(BaseModel): + name: str + tags: list[str] = [] + ``` + + When `Item` is used for input, a request body, `tags` is not required, + the client doesn't have to provide it. + + But when using `Item` for output, for a response body, `tags` is always + available because it has a default value, even if it's just an empty + list. So, the client should be able to always expect it. + + In this case, there would be two different schemas, one for input and + another one for output. + """ + ), + ] = True, + **extra: Annotated[ + Any, + Doc( + """ + Extra keyword arguments to be stored in the app, not used by FastAPI + anywhere. + """ + ), + ], + ) -> None: + self.debug = debug + self.title = title + self.summary = summary + self.description = description + self.version = version + self.terms_of_service = terms_of_service + self.contact = contact + self.license_info = license_info + self.openapi_url = openapi_url + self.openapi_tags = openapi_tags + self.root_path_in_servers = root_path_in_servers + self.docs_url = docs_url + self.redoc_url = redoc_url + self.swagger_ui_oauth2_redirect_url = swagger_ui_oauth2_redirect_url + self.swagger_ui_init_oauth = swagger_ui_init_oauth + self.swagger_ui_parameters = swagger_ui_parameters + self.servers = servers or [] + self.separate_input_output_schemas = separate_input_output_schemas + self.extra = extra + self.openapi_version: Annotated[ + str, + Doc( + """ + The version string of OpenAPI. + + FastAPI will generate OpenAPI version 3.1.0, and will output that as + the OpenAPI version. But some tools, even though they might be + compatible with OpenAPI 3.1.0, might not recognize it as a valid. + + So you could override this value to trick those tools into using + the generated OpenAPI. Have in mind that this is a hack. But if you + avoid using features added in OpenAPI 3.1.0, it might work for your + use case. + + This is not passed as a parameter to the `FastAPI` class to avoid + giving the false idea that FastAPI would generate a different OpenAPI + schema. It is only available as an attribute. + + **Example** + + ```python + from fastapi import FastAPI + + app = FastAPI() + + app.openapi_version = "3.0.2" + ``` + """ + ), + ] = "3.1.0" + self.openapi_schema: Optional[Dict[str, Any]] = None + if self.openapi_url: + assert self.title, "A title must be provided for OpenAPI, e.g.: 'My API'" + assert self.version, "A version must be provided for OpenAPI, e.g.: '2.1.0'" + # TODO: remove when discarding the openapi_prefix parameter + if openapi_prefix: + logger.warning( + '"openapi_prefix" has been deprecated in favor of "root_path", which ' + "follows more closely the ASGI standard, is simpler, and more " + "automatic. Check the docs at " + "https://fastapi.tiangolo.com/advanced/sub-applications/" + ) + self.webhooks: Annotated[ + routing.APIRouter, + Doc( + """ + The `app.webhooks` attribute is an `APIRouter` with the *path + operations* that will be used just for documentation of webhooks. + + Read more about it in the + [FastAPI docs for OpenAPI Webhooks](https://fastapi.tiangolo.com/advanced/openapi-webhooks/). + """ + ), + ] = webhooks or routing.APIRouter() + self.root_path = root_path or openapi_prefix + self.state: Annotated[ + State, + Doc( + """ + A state object for the application. This is the same object for the + entire application, it doesn't change from request to request. + + You normally woudln't use this in FastAPI, for most of the cases you + would instead use FastAPI dependencies. + + This is simply inherited from Starlette. + + Read more about it in the + [Starlette docs for Applications](https://www.starlette.io/applications/#storing-state-on-the-app-instance). + """ + ), + ] = State() + self.dependency_overrides: Annotated[ + Dict[Callable[..., Any], Callable[..., Any]], + Doc( + """ + A dictionary with overrides for the dependencies. + + Each key is the original dependency callable, and the value is the + actual dependency that should be called. + + This is for testing, to replace expensive dependencies with testing + versions. + + Read more about it in the + [FastAPI docs for Testing Dependencies with Overrides](https://fastapi.tiangolo.com/advanced/testing-dependencies/). + """ + ), + ] = {} + self.router: routing.APIRouter = routing.APIRouter( + routes=routes, + redirect_slashes=redirect_slashes, + dependency_overrides_provider=self, + on_startup=on_startup, + on_shutdown=on_shutdown, + lifespan=lifespan, + default_response_class=default_response_class, + dependencies=dependencies, + callbacks=callbacks, + deprecated=deprecated, + include_in_schema=include_in_schema, + responses=responses, + generate_unique_id_function=generate_unique_id_function, + ) + self.exception_handlers: Dict[ + Any, Callable[[Request, Any], Union[Response, Awaitable[Response]]] + ] = {} if exception_handlers is None else dict(exception_handlers) + self.exception_handlers.setdefault(HTTPException, http_exception_handler) + self.exception_handlers.setdefault( + RequestValidationError, request_validation_exception_handler + ) + self.exception_handlers.setdefault( + WebSocketRequestValidationError, + # Starlette still has incorrect type specification for the handlers + websocket_request_validation_exception_handler, # type: ignore + ) + + self.user_middleware: List[Middleware] = ( + [] if middleware is None else list(middleware) + ) + self.middleware_stack: Union[ASGIApp, None] = None + self.setup() + + def openapi(self) -> Dict[str, Any]: + """ + Generate the OpenAPI schema of the application. This is called by FastAPI + internally. + + The first time it is called it stores the result in the attribute + `app.openapi_schema`, and next times it is called, it just returns that same + result. To avoid the cost of generating the schema every time. + + If you need to modify the generated OpenAPI schema, you could modify it. + + Read more in the + [FastAPI docs for OpenAPI](https://fastapi.tiangolo.com/how-to/extending-openapi/). + """ + if not self.openapi_schema: + self.openapi_schema = get_openapi( + title=self.title, + version=self.version, + openapi_version=self.openapi_version, + summary=self.summary, + description=self.description, + terms_of_service=self.terms_of_service, + contact=self.contact, + license_info=self.license_info, + routes=self.routes, + webhooks=self.webhooks.routes, + tags=self.openapi_tags, + servers=self.servers, + separate_input_output_schemas=self.separate_input_output_schemas, + ) + return self.openapi_schema + + def setup(self) -> None: + if self.openapi_url: + urls = (server_data.get("url") for server_data in self.servers) + server_urls = {url for url in urls if url} + + async def openapi(req: Request) -> JSONResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + if root_path not in server_urls: + if root_path and self.root_path_in_servers: + self.servers.insert(0, {"url": root_path}) + server_urls.add(root_path) + return JSONResponse(self.openapi()) + + self.add_route(self.openapi_url, openapi, include_in_schema=False) + if self.openapi_url and self.docs_url: + + async def swagger_ui_html(req: Request) -> HTMLResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + openapi_url = root_path + self.openapi_url + oauth2_redirect_url = self.swagger_ui_oauth2_redirect_url + if oauth2_redirect_url: + oauth2_redirect_url = root_path + oauth2_redirect_url + return get_swagger_ui_html( + openapi_url=openapi_url, + title=self.title + " - Swagger UI", + oauth2_redirect_url=oauth2_redirect_url, + init_oauth=self.swagger_ui_init_oauth, + swagger_ui_parameters=self.swagger_ui_parameters, + ) + + self.add_route(self.docs_url, swagger_ui_html, include_in_schema=False) + + if self.swagger_ui_oauth2_redirect_url: + + async def swagger_ui_redirect(req: Request) -> HTMLResponse: + return get_swagger_ui_oauth2_redirect_html() + + self.add_route( + self.swagger_ui_oauth2_redirect_url, + swagger_ui_redirect, + include_in_schema=False, + ) + if self.openapi_url and self.redoc_url: + + async def redoc_html(req: Request) -> HTMLResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + openapi_url = root_path + self.openapi_url + return get_redoc_html( + openapi_url=openapi_url, title=self.title + " - ReDoc" + ) + + self.add_route(self.redoc_url, redoc_html, include_in_schema=False) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if self.root_path: + scope["root_path"] = self.root_path + await super().__call__(scope, receive, send) + + def add_api_route( + self, + path: str, + endpoint: Callable[..., Coroutine[Any, Any, Response]], + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + name: Optional[str] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + self.router.add_api_route( + path, + endpoint=endpoint, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def api_route( + self, + path: str, + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.router.add_api_route( + path, + func, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + return func + + return decorator + + def add_api_websocket_route( + self, + path: str, + endpoint: Callable[..., Any], + name: Optional[str] = None, + *, + dependencies: Optional[Sequence[Depends]] = None, + ) -> None: + self.router.add_api_websocket_route( + path, + endpoint, + name=name, + dependencies=dependencies, + ) + + def websocket( + self, + path: Annotated[ + str, + Doc( + """ + WebSocket path. + """ + ), + ], + name: Annotated[ + Optional[str], + Doc( + """ + A name for the WebSocket. Only used internally. + """ + ), + ] = None, + *, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be used for this + WebSocket. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + """ + ), + ] = None, + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Decorate a WebSocket function. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + + **Example** + + ```python + from fastapi import FastAPI, WebSocket + + app = FastAPI() + + @app.websocket("/ws") + async def websocket_endpoint(websocket: WebSocket): + await websocket.accept() + while True: + data = await websocket.receive_text() + await websocket.send_text(f"Message text was: {data}") + ``` + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_websocket_route( + path, + func, + name=name, + dependencies=dependencies, + ) + return func + + return decorator + + def include_router( + self, + router: Annotated[routing.APIRouter, Doc("The `APIRouter` to include.")], + *, + prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to all the *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to all the + *path operations* in this router. + + Read more about it in the + [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + + **Example** + + ```python + from fastapi import Depends, FastAPI + + from .dependencies import get_token_header + from .internal import admin + + app = FastAPI() + + app.include_router( + admin.router, + dependencies=[Depends(get_token_header)], + ) + ``` + """ + ), + ] = None, + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses to be shown in OpenAPI. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). + + And in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark all the *path operations* in this router as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + **Example** + + ```python + from fastapi import FastAPI + + from .internal import old_api + + app = FastAPI() + + app.include_router( + old_api.router, + deprecated=True, + ) + ``` + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include (or not) all the *path operations* in this router in the + generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + **Example** + + ```python + from fastapi import FastAPI + + from .internal import old_api + + app = FastAPI() + + app.include_router( + old_api.router, + include_in_schema=False, + ) + ``` + """ + ), + ] = True, + default_response_class: Annotated[ + Type[Response], + Doc( + """ + Default response class to be used for the *path operations* in this + router. + + Read more in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). + + **Example** + + ```python + from fastapi import FastAPI + from fastapi.responses import ORJSONResponse + + from .internal import old_api + + app = FastAPI() + + app.include_router( + old_api.router, + default_response_class=ORJSONResponse, + ) + ``` + """ + ), + ] = Default(JSONResponse), + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> None: + """ + Include an `APIRouter` in the same app. + + Read more about it in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). + + ## Example + + ```python + from fastapi import FastAPI + + from .users import users_router + + app = FastAPI() + + app.include_router(users_router) + ``` + """ + self.router.include_router( + router, + prefix=prefix, + tags=tags, + dependencies=dependencies, + responses=responses, + deprecated=deprecated, + include_in_schema=include_in_schema, + default_response_class=default_response_class, + callbacks=callbacks, + generate_unique_id_function=generate_unique_id_function, + ) + + def get( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP GET operation. + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + + @app.get("/items/") + def read_items(): + return [{"name": "Empanada"}, {"name": "Arepa"}] + ``` + """ + return self.router.get( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def put( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP PUT operation. + + ## Example + + ```python + from fastapi import FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + + @app.put("/items/{item_id}") + def replace_item(item_id: str, item: Item): + return {"message": "Item replaced", "id": item_id} + ``` + """ + return self.router.put( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def post( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP POST operation. + + ## Example + + ```python + from fastapi import FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + + @app.post("/items/") + def create_item(item: Item): + return {"message": "Item created"} + ``` + """ + return self.router.post( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def delete( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP DELETE operation. + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + + @app.delete("/items/{item_id}") + def delete_item(item_id: str): + return {"message": "Item deleted"} + ``` + """ + return self.router.delete( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def options( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP OPTIONS operation. + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + + @app.options("/items/") + def get_item_options(): + return {"additions": ["Aji", "Guacamole"]} + ``` + """ + return self.router.options( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def head( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP HEAD operation. + + ## Example + + ```python + from fastapi import FastAPI, Response + + app = FastAPI() + + @app.head("/items/", status_code=204) + def get_items_headers(response: Response): + response.headers["X-Cat-Dog"] = "Alone in the world" + ``` + """ + return self.router.head( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def patch( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP PATCH operation. + + ## Example + + ```python + from fastapi import FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + + @app.patch("/items/") + def update_item(item: Item): + return {"message": "Item updated in place"} + ``` + """ + return self.router.patch( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def trace( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[routing.APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP TRACE operation. + + ## Example + + ```python + from fastapi import FastAPI + + app = FastAPI() + + @app.put("/items/{item_id}") + def trace_item(item_id: str): + return None + ``` + """ + return self.router.trace( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def websocket_route( + self, path: str, name: Union[str, None] = None + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.router.add_websocket_route(path, func, name=name) + return func + + return decorator + + @deprecated( + """ + on_event is deprecated, use lifespan event handlers instead. + + Read more about it in the + [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/). + """ + ) + def on_event( + self, + event_type: Annotated[ + str, + Doc( + """ + The type of event. `startup` or `shutdown`. + """ + ), + ], + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add an event handler for the application. + + `on_event` is deprecated, use `lifespan` event handlers instead. + + Read more about it in the + [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/#alternative-events-deprecated). + """ + return self.router.on_event(event_type) + + def middleware( + self, + middleware_type: Annotated[ + str, + Doc( + """ + The type of middleware. Currently only supports `http`. + """ + ), + ], + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a middleware to the application. + + Read more about it in the + [FastAPI docs for Middleware](https://fastapi.tiangolo.com/tutorial/middleware/). + + ## Example + + ```python + import time + + from fastapi import FastAPI, Request + + app = FastAPI() + + + @app.middleware("http") + async def add_process_time_header(request: Request, call_next): + start_time = time.time() + response = await call_next(request) + process_time = time.time() - start_time + response.headers["X-Process-Time"] = str(process_time) + return response + ``` + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_middleware(BaseHTTPMiddleware, dispatch=func) + return func + + return decorator + + def exception_handler( + self, + exc_class_or_status_code: Annotated[ + Union[int, Type[Exception]], + Doc( + """ + The Exception class this would handle, or a status code. + """ + ), + ], + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add an exception handler to the app. + + Read more about it in the + [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). + + ## Example + + ```python + from fastapi import FastAPI, Request + from fastapi.responses import JSONResponse + + + class UnicornException(Exception): + def __init__(self, name: str): + self.name = name + + + app = FastAPI() + + + @app.exception_handler(UnicornException) + async def unicorn_exception_handler(request: Request, exc: UnicornException): + return JSONResponse( + status_code=418, + content={"message": f"Oops! {exc.name} did something. There goes a rainbow..."}, + ) + ``` + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_exception_handler(exc_class_or_status_code, func) + return func + + return decorator diff --git a/venv/lib/python3.12/site-packages/fastapi/background.py b/venv/lib/python3.12/site-packages/fastapi/background.py new file mode 100644 index 0000000..35ab1b2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/background.py @@ -0,0 +1,59 @@ +from typing import Any, Callable + +from starlette.background import BackgroundTasks as StarletteBackgroundTasks +from typing_extensions import Annotated, Doc, ParamSpec # type: ignore [attr-defined] + +P = ParamSpec("P") + + +class BackgroundTasks(StarletteBackgroundTasks): + """ + A collection of background tasks that will be called after a response has been + sent to the client. + + Read more about it in the + [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). + + ## Example + + ```python + from fastapi import BackgroundTasks, FastAPI + + app = FastAPI() + + + def write_notification(email: str, message=""): + with open("log.txt", mode="w") as email_file: + content = f"notification for {email}: {message}" + email_file.write(content) + + + @app.post("/send-notification/{email}") + async def send_notification(email: str, background_tasks: BackgroundTasks): + background_tasks.add_task(write_notification, email, message="some notification") + return {"message": "Notification sent in the background"} + ``` + """ + + def add_task( + self, + func: Annotated[ + Callable[P, Any], + Doc( + """ + The function to call after the response is sent. + + It can be a regular `def` function or an `async def` function. + """ + ), + ], + *args: P.args, + **kwargs: P.kwargs, + ) -> None: + """ + Add a function to be called in the background after the response is sent. + + Read more about it in the + [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). + """ + return super().add_task(func, *args, **kwargs) diff --git a/venv/lib/python3.12/site-packages/fastapi/concurrency.py b/venv/lib/python3.12/site-packages/fastapi/concurrency.py new file mode 100644 index 0000000..894bd3e --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/concurrency.py @@ -0,0 +1,39 @@ +from contextlib import asynccontextmanager as asynccontextmanager +from typing import AsyncGenerator, ContextManager, TypeVar + +import anyio +from anyio import CapacityLimiter +from starlette.concurrency import iterate_in_threadpool as iterate_in_threadpool # noqa +from starlette.concurrency import run_in_threadpool as run_in_threadpool # noqa +from starlette.concurrency import ( # noqa + run_until_first_complete as run_until_first_complete, +) + +_T = TypeVar("_T") + + +@asynccontextmanager +async def contextmanager_in_threadpool( + cm: ContextManager[_T], +) -> AsyncGenerator[_T, None]: + # blocking __exit__ from running waiting on a free thread + # can create race conditions/deadlocks if the context manager itself + # has its own internal pool (e.g. a database connection pool) + # to avoid this we let __exit__ run without a capacity limit + # since we're creating a new limiter for each call, any non-zero limit + # works (1 is arbitrary) + exit_limiter = CapacityLimiter(1) + try: + yield await run_in_threadpool(cm.__enter__) + except Exception as e: + ok = bool( + await anyio.to_thread.run_sync( + cm.__exit__, type(e), e, None, limiter=exit_limiter + ) + ) + if not ok: + raise e + else: + await anyio.to_thread.run_sync( + cm.__exit__, None, None, None, limiter=exit_limiter + ) diff --git a/venv/lib/python3.12/site-packages/fastapi/datastructures.py b/venv/lib/python3.12/site-packages/fastapi/datastructures.py new file mode 100644 index 0000000..ce03e3c --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/datastructures.py @@ -0,0 +1,204 @@ +from typing import ( + Any, + BinaryIO, + Callable, + Dict, + Iterable, + Optional, + Type, + TypeVar, + cast, +) + +from fastapi._compat import ( + PYDANTIC_V2, + CoreSchema, + GetJsonSchemaHandler, + JsonSchemaValue, + with_info_plain_validator_function, +) +from starlette.datastructures import URL as URL # noqa: F401 +from starlette.datastructures import Address as Address # noqa: F401 +from starlette.datastructures import FormData as FormData # noqa: F401 +from starlette.datastructures import Headers as Headers # noqa: F401 +from starlette.datastructures import QueryParams as QueryParams # noqa: F401 +from starlette.datastructures import State as State # noqa: F401 +from starlette.datastructures import UploadFile as StarletteUploadFile +from typing_extensions import Annotated, Doc # type: ignore [attr-defined] + + +class UploadFile(StarletteUploadFile): + """ + A file uploaded in a request. + + Define it as a *path operation function* (or dependency) parameter. + + If you are using a regular `def` function, you can use the `upload_file.file` + attribute to access the raw standard Python file (blocking, not async), useful and + needed for non-async code. + + Read more about it in the + [FastAPI docs for Request Files](https://fastapi.tiangolo.com/tutorial/request-files/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import FastAPI, File, UploadFile + + app = FastAPI() + + + @app.post("/files/") + async def create_file(file: Annotated[bytes, File()]): + return {"file_size": len(file)} + + + @app.post("/uploadfile/") + async def create_upload_file(file: UploadFile): + return {"filename": file.filename} + ``` + """ + + file: Annotated[ + BinaryIO, + Doc("The standard Python file object (non-async)."), + ] + filename: Annotated[Optional[str], Doc("The original file name.")] + size: Annotated[Optional[int], Doc("The size of the file in bytes.")] + headers: Annotated[Headers, Doc("The headers of the request.")] + content_type: Annotated[ + Optional[str], Doc("The content type of the request, from the headers.") + ] + + async def write( + self, + data: Annotated[ + bytes, + Doc( + """ + The bytes to write to the file. + """ + ), + ], + ) -> None: + """ + Write some bytes to the file. + + You normally wouldn't use this from a file you read in a request. + + To be awaitable, compatible with async, this is run in threadpool. + """ + return await super().write(data) + + async def read( + self, + size: Annotated[ + int, + Doc( + """ + The number of bytes to read from the file. + """ + ), + ] = -1, + ) -> bytes: + """ + Read some bytes from the file. + + To be awaitable, compatible with async, this is run in threadpool. + """ + return await super().read(size) + + async def seek( + self, + offset: Annotated[ + int, + Doc( + """ + The position in bytes to seek to in the file. + """ + ), + ], + ) -> None: + """ + Move to a position in the file. + + Any next read or write will be done from that position. + + To be awaitable, compatible with async, this is run in threadpool. + """ + return await super().seek(offset) + + async def close(self) -> None: + """ + Close the file. + + To be awaitable, compatible with async, this is run in threadpool. + """ + return await super().close() + + @classmethod + def __get_validators__(cls: Type["UploadFile"]) -> Iterable[Callable[..., Any]]: + yield cls.validate + + @classmethod + def validate(cls: Type["UploadFile"], v: Any) -> Any: + if not isinstance(v, StarletteUploadFile): + raise ValueError(f"Expected UploadFile, received: {type(v)}") + return v + + @classmethod + def _validate(cls, __input_value: Any, _: Any) -> "UploadFile": + if not isinstance(__input_value, StarletteUploadFile): + raise ValueError(f"Expected UploadFile, received: {type(__input_value)}") + return cast(UploadFile, __input_value) + + if not PYDANTIC_V2: + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update({"type": "string", "format": "binary"}) + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + return {"type": "string", "format": "binary"} + + @classmethod + def __get_pydantic_core_schema__( + cls, source: Type[Any], handler: Callable[[Any], CoreSchema] + ) -> CoreSchema: + return with_info_plain_validator_function(cls._validate) + + +class DefaultPlaceholder: + """ + You shouldn't use this class directly. + + It's used internally to recognize when a default value has been overwritten, even + if the overridden default value was truthy. + """ + + def __init__(self, value: Any): + self.value = value + + def __bool__(self) -> bool: + return bool(self.value) + + def __eq__(self, o: object) -> bool: + return isinstance(o, DefaultPlaceholder) and o.value == self.value + + +DefaultType = TypeVar("DefaultType") + + +def Default(value: DefaultType) -> DefaultType: + """ + You shouldn't use this function directly. + + It's used internally to recognize when a default value has been overwritten, even + if the overridden default value was truthy. + """ + return DefaultPlaceholder(value) # type: ignore diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/__init__.py b/venv/lib/python3.12/site-packages/fastapi/dependencies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f71750b Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..d992445 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..c65124f Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/models.py b/venv/lib/python3.12/site-packages/fastapi/dependencies/models.py new file mode 100644 index 0000000..61ef006 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/dependencies/models.py @@ -0,0 +1,58 @@ +from typing import Any, Callable, List, Optional, Sequence + +from fastapi._compat import ModelField +from fastapi.security.base import SecurityBase + + +class SecurityRequirement: + def __init__( + self, security_scheme: SecurityBase, scopes: Optional[Sequence[str]] = None + ): + self.security_scheme = security_scheme + self.scopes = scopes + + +class Dependant: + def __init__( + self, + *, + path_params: Optional[List[ModelField]] = None, + query_params: Optional[List[ModelField]] = None, + header_params: Optional[List[ModelField]] = None, + cookie_params: Optional[List[ModelField]] = None, + body_params: Optional[List[ModelField]] = None, + dependencies: Optional[List["Dependant"]] = None, + security_schemes: Optional[List[SecurityRequirement]] = None, + name: Optional[str] = None, + call: Optional[Callable[..., Any]] = None, + request_param_name: Optional[str] = None, + websocket_param_name: Optional[str] = None, + http_connection_param_name: Optional[str] = None, + response_param_name: Optional[str] = None, + background_tasks_param_name: Optional[str] = None, + security_scopes_param_name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, + use_cache: bool = True, + path: Optional[str] = None, + ) -> None: + self.path_params = path_params or [] + self.query_params = query_params or [] + self.header_params = header_params or [] + self.cookie_params = cookie_params or [] + self.body_params = body_params or [] + self.dependencies = dependencies or [] + self.security_requirements = security_schemes or [] + self.request_param_name = request_param_name + self.websocket_param_name = websocket_param_name + self.http_connection_param_name = http_connection_param_name + self.response_param_name = response_param_name + self.background_tasks_param_name = background_tasks_param_name + self.security_scopes = security_scopes + self.security_scopes_param_name = security_scopes_param_name + self.name = name + self.call = call + self.use_cache = use_cache + # Store the path to be able to re-generate a dependable from it in overrides + self.path = path + # Save the cache key at creation to optimize performance + self.cache_key = (self.call, tuple(sorted(set(self.security_scopes or [])))) diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py b/venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py new file mode 100644 index 0000000..b734734 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py @@ -0,0 +1,816 @@ +import inspect +from contextlib import AsyncExitStack, contextmanager +from copy import deepcopy +from typing import ( + Any, + Callable, + Coroutine, + Dict, + ForwardRef, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import anyio +from fastapi import params +from fastapi._compat import ( + PYDANTIC_V2, + ErrorWrapper, + ModelField, + Required, + Undefined, + _regenerate_error_with_loc, + copy_field_info, + create_body_model, + evaluate_forwardref, + field_annotation_is_scalar, + get_annotation_from_field_info, + get_missing_field_error, + is_bytes_field, + is_bytes_sequence_field, + is_scalar_field, + is_scalar_sequence_field, + is_sequence_field, + is_uploadfile_or_nonable_uploadfile_annotation, + is_uploadfile_sequence_annotation, + lenient_issubclass, + sequence_types, + serialize_sequence_value, + value_is_sequence, +) +from fastapi.background import BackgroundTasks +from fastapi.concurrency import ( + asynccontextmanager, + contextmanager_in_threadpool, +) +from fastapi.dependencies.models import Dependant, SecurityRequirement +from fastapi.logger import logger +from fastapi.security.base import SecurityBase +from fastapi.security.oauth2 import OAuth2, SecurityScopes +from fastapi.security.open_id_connect_url import OpenIdConnect +from fastapi.utils import create_response_field, get_path_param_names +from pydantic.fields import FieldInfo +from starlette.background import BackgroundTasks as StarletteBackgroundTasks +from starlette.concurrency import run_in_threadpool +from starlette.datastructures import FormData, Headers, QueryParams, UploadFile +from starlette.requests import HTTPConnection, Request +from starlette.responses import Response +from starlette.websockets import WebSocket +from typing_extensions import Annotated, get_args, get_origin + +multipart_not_installed_error = ( + 'Form data requires "python-multipart" to be installed. \n' + 'You can install "python-multipart" with: \n\n' + "pip install python-multipart\n" +) +multipart_incorrect_install_error = ( + 'Form data requires "python-multipart" to be installed. ' + 'It seems you installed "multipart" instead. \n' + 'You can remove "multipart" with: \n\n' + "pip uninstall multipart\n\n" + 'And then install "python-multipart" with: \n\n' + "pip install python-multipart\n" +) + + +def check_file_field(field: ModelField) -> None: + field_info = field.field_info + if isinstance(field_info, params.Form): + try: + # __version__ is available in both multiparts, and can be mocked + from multipart import __version__ # type: ignore + + assert __version__ + try: + # parse_options_header is only available in the right multipart + from multipart.multipart import parse_options_header # type: ignore + + assert parse_options_header + except ImportError: + logger.error(multipart_incorrect_install_error) + raise RuntimeError(multipart_incorrect_install_error) from None + except ImportError: + logger.error(multipart_not_installed_error) + raise RuntimeError(multipart_not_installed_error) from None + + +def get_param_sub_dependant( + *, + param_name: str, + depends: params.Depends, + path: str, + security_scopes: Optional[List[str]] = None, +) -> Dependant: + assert depends.dependency + return get_sub_dependant( + depends=depends, + dependency=depends.dependency, + path=path, + name=param_name, + security_scopes=security_scopes, + ) + + +def get_parameterless_sub_dependant(*, depends: params.Depends, path: str) -> Dependant: + assert callable( + depends.dependency + ), "A parameter-less dependency must have a callable dependency" + return get_sub_dependant(depends=depends, dependency=depends.dependency, path=path) + + +def get_sub_dependant( + *, + depends: params.Depends, + dependency: Callable[..., Any], + path: str, + name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, +) -> Dependant: + security_requirement = None + security_scopes = security_scopes or [] + if isinstance(depends, params.Security): + dependency_scopes = depends.scopes + security_scopes.extend(dependency_scopes) + if isinstance(dependency, SecurityBase): + use_scopes: List[str] = [] + if isinstance(dependency, (OAuth2, OpenIdConnect)): + use_scopes = security_scopes + security_requirement = SecurityRequirement( + security_scheme=dependency, scopes=use_scopes + ) + sub_dependant = get_dependant( + path=path, + call=dependency, + name=name, + security_scopes=security_scopes, + use_cache=depends.use_cache, + ) + if security_requirement: + sub_dependant.security_requirements.append(security_requirement) + return sub_dependant + + +CacheKey = Tuple[Optional[Callable[..., Any]], Tuple[str, ...]] + + +def get_flat_dependant( + dependant: Dependant, + *, + skip_repeats: bool = False, + visited: Optional[List[CacheKey]] = None, +) -> Dependant: + if visited is None: + visited = [] + visited.append(dependant.cache_key) + + flat_dependant = Dependant( + path_params=dependant.path_params.copy(), + query_params=dependant.query_params.copy(), + header_params=dependant.header_params.copy(), + cookie_params=dependant.cookie_params.copy(), + body_params=dependant.body_params.copy(), + security_schemes=dependant.security_requirements.copy(), + use_cache=dependant.use_cache, + path=dependant.path, + ) + for sub_dependant in dependant.dependencies: + if skip_repeats and sub_dependant.cache_key in visited: + continue + flat_sub = get_flat_dependant( + sub_dependant, skip_repeats=skip_repeats, visited=visited + ) + flat_dependant.path_params.extend(flat_sub.path_params) + flat_dependant.query_params.extend(flat_sub.query_params) + flat_dependant.header_params.extend(flat_sub.header_params) + flat_dependant.cookie_params.extend(flat_sub.cookie_params) + flat_dependant.body_params.extend(flat_sub.body_params) + flat_dependant.security_requirements.extend(flat_sub.security_requirements) + return flat_dependant + + +def get_flat_params(dependant: Dependant) -> List[ModelField]: + flat_dependant = get_flat_dependant(dependant, skip_repeats=True) + return ( + flat_dependant.path_params + + flat_dependant.query_params + + flat_dependant.header_params + + flat_dependant.cookie_params + ) + + +def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature: + signature = inspect.signature(call) + globalns = getattr(call, "__globals__", {}) + typed_params = [ + inspect.Parameter( + name=param.name, + kind=param.kind, + default=param.default, + annotation=get_typed_annotation(param.annotation, globalns), + ) + for param in signature.parameters.values() + ] + typed_signature = inspect.Signature(typed_params) + return typed_signature + + +def get_typed_annotation(annotation: Any, globalns: Dict[str, Any]) -> Any: + if isinstance(annotation, str): + annotation = ForwardRef(annotation) + annotation = evaluate_forwardref(annotation, globalns, globalns) + return annotation + + +def get_typed_return_annotation(call: Callable[..., Any]) -> Any: + signature = inspect.signature(call) + annotation = signature.return_annotation + + if annotation is inspect.Signature.empty: + return None + + globalns = getattr(call, "__globals__", {}) + return get_typed_annotation(annotation, globalns) + + +def get_dependant( + *, + path: str, + call: Callable[..., Any], + name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, + use_cache: bool = True, +) -> Dependant: + path_param_names = get_path_param_names(path) + endpoint_signature = get_typed_signature(call) + signature_params = endpoint_signature.parameters + dependant = Dependant( + call=call, + name=name, + path=path, + security_scopes=security_scopes, + use_cache=use_cache, + ) + for param_name, param in signature_params.items(): + is_path_param = param_name in path_param_names + type_annotation, depends, param_field = analyze_param( + param_name=param_name, + annotation=param.annotation, + value=param.default, + is_path_param=is_path_param, + ) + if depends is not None: + sub_dependant = get_param_sub_dependant( + param_name=param_name, + depends=depends, + path=path, + security_scopes=security_scopes, + ) + dependant.dependencies.append(sub_dependant) + continue + if add_non_field_param_to_dependency( + param_name=param_name, + type_annotation=type_annotation, + dependant=dependant, + ): + assert ( + param_field is None + ), f"Cannot specify multiple FastAPI annotations for {param_name!r}" + continue + assert param_field is not None + if is_body_param(param_field=param_field, is_path_param=is_path_param): + dependant.body_params.append(param_field) + else: + add_param_to_fields(field=param_field, dependant=dependant) + return dependant + + +def add_non_field_param_to_dependency( + *, param_name: str, type_annotation: Any, dependant: Dependant +) -> Optional[bool]: + if lenient_issubclass(type_annotation, Request): + dependant.request_param_name = param_name + return True + elif lenient_issubclass(type_annotation, WebSocket): + dependant.websocket_param_name = param_name + return True + elif lenient_issubclass(type_annotation, HTTPConnection): + dependant.http_connection_param_name = param_name + return True + elif lenient_issubclass(type_annotation, Response): + dependant.response_param_name = param_name + return True + elif lenient_issubclass(type_annotation, StarletteBackgroundTasks): + dependant.background_tasks_param_name = param_name + return True + elif lenient_issubclass(type_annotation, SecurityScopes): + dependant.security_scopes_param_name = param_name + return True + return None + + +def analyze_param( + *, + param_name: str, + annotation: Any, + value: Any, + is_path_param: bool, +) -> Tuple[Any, Optional[params.Depends], Optional[ModelField]]: + field_info = None + depends = None + type_annotation: Any = Any + use_annotation: Any = Any + if annotation is not inspect.Signature.empty: + use_annotation = annotation + type_annotation = annotation + if get_origin(use_annotation) is Annotated: + annotated_args = get_args(annotation) + type_annotation = annotated_args[0] + fastapi_annotations = [ + arg + for arg in annotated_args[1:] + if isinstance(arg, (FieldInfo, params.Depends)) + ] + fastapi_specific_annotations = [ + arg + for arg in fastapi_annotations + if isinstance(arg, (params.Param, params.Body, params.Depends)) + ] + if fastapi_specific_annotations: + fastapi_annotation: Union[ + FieldInfo, params.Depends, None + ] = fastapi_specific_annotations[-1] + else: + fastapi_annotation = None + if isinstance(fastapi_annotation, FieldInfo): + # Copy `field_info` because we mutate `field_info.default` below. + field_info = copy_field_info( + field_info=fastapi_annotation, annotation=use_annotation + ) + assert field_info.default is Undefined or field_info.default is Required, ( + f"`{field_info.__class__.__name__}` default value cannot be set in" + f" `Annotated` for {param_name!r}. Set the default value with `=` instead." + ) + if value is not inspect.Signature.empty: + assert not is_path_param, "Path parameters cannot have default values" + field_info.default = value + else: + field_info.default = Required + elif isinstance(fastapi_annotation, params.Depends): + depends = fastapi_annotation + + if isinstance(value, params.Depends): + assert depends is None, ( + "Cannot specify `Depends` in `Annotated` and default value" + f" together for {param_name!r}" + ) + assert field_info is None, ( + "Cannot specify a FastAPI annotation in `Annotated` and `Depends` as a" + f" default value together for {param_name!r}" + ) + depends = value + elif isinstance(value, FieldInfo): + assert field_info is None, ( + "Cannot specify FastAPI annotations in `Annotated` and default value" + f" together for {param_name!r}" + ) + field_info = value + if PYDANTIC_V2: + field_info.annotation = type_annotation + + if depends is not None and depends.dependency is None: + depends.dependency = type_annotation + + if lenient_issubclass( + type_annotation, + ( + Request, + WebSocket, + HTTPConnection, + Response, + StarletteBackgroundTasks, + SecurityScopes, + ), + ): + assert depends is None, f"Cannot specify `Depends` for type {type_annotation!r}" + assert ( + field_info is None + ), f"Cannot specify FastAPI annotation for type {type_annotation!r}" + elif field_info is None and depends is None: + default_value = value if value is not inspect.Signature.empty else Required + if is_path_param: + # We might check here that `default_value is Required`, but the fact is that the same + # parameter might sometimes be a path parameter and sometimes not. See + # `tests/test_infer_param_optionality.py` for an example. + field_info = params.Path(annotation=use_annotation) + elif is_uploadfile_or_nonable_uploadfile_annotation( + type_annotation + ) or is_uploadfile_sequence_annotation(type_annotation): + field_info = params.File(annotation=use_annotation, default=default_value) + elif not field_annotation_is_scalar(annotation=type_annotation): + field_info = params.Body(annotation=use_annotation, default=default_value) + else: + field_info = params.Query(annotation=use_annotation, default=default_value) + + field = None + if field_info is not None: + if is_path_param: + assert isinstance(field_info, params.Path), ( + f"Cannot use `{field_info.__class__.__name__}` for path param" + f" {param_name!r}" + ) + elif ( + isinstance(field_info, params.Param) + and getattr(field_info, "in_", None) is None + ): + field_info.in_ = params.ParamTypes.query + use_annotation_from_field_info = get_annotation_from_field_info( + use_annotation, + field_info, + param_name, + ) + if not field_info.alias and getattr(field_info, "convert_underscores", None): + alias = param_name.replace("_", "-") + else: + alias = field_info.alias or param_name + field_info.alias = alias + field = create_response_field( + name=param_name, + type_=use_annotation_from_field_info, + default=field_info.default, + alias=alias, + required=field_info.default in (Required, Undefined), + field_info=field_info, + ) + + return type_annotation, depends, field + + +def is_body_param(*, param_field: ModelField, is_path_param: bool) -> bool: + if is_path_param: + assert is_scalar_field( + field=param_field + ), "Path params must be of one of the supported types" + return False + elif is_scalar_field(field=param_field): + return False + elif isinstance( + param_field.field_info, (params.Query, params.Header) + ) and is_scalar_sequence_field(param_field): + return False + else: + assert isinstance( + param_field.field_info, params.Body + ), f"Param: {param_field.name} can only be a request body, using Body()" + return True + + +def add_param_to_fields(*, field: ModelField, dependant: Dependant) -> None: + field_info = field.field_info + field_info_in = getattr(field_info, "in_", None) + if field_info_in == params.ParamTypes.path: + dependant.path_params.append(field) + elif field_info_in == params.ParamTypes.query: + dependant.query_params.append(field) + elif field_info_in == params.ParamTypes.header: + dependant.header_params.append(field) + else: + assert ( + field_info_in == params.ParamTypes.cookie + ), f"non-body parameters must be in path, query, header or cookie: {field.name}" + dependant.cookie_params.append(field) + + +def is_coroutine_callable(call: Callable[..., Any]) -> bool: + if inspect.isroutine(call): + return inspect.iscoroutinefunction(call) + if inspect.isclass(call): + return False + dunder_call = getattr(call, "__call__", None) # noqa: B004 + return inspect.iscoroutinefunction(dunder_call) + + +def is_async_gen_callable(call: Callable[..., Any]) -> bool: + if inspect.isasyncgenfunction(call): + return True + dunder_call = getattr(call, "__call__", None) # noqa: B004 + return inspect.isasyncgenfunction(dunder_call) + + +def is_gen_callable(call: Callable[..., Any]) -> bool: + if inspect.isgeneratorfunction(call): + return True + dunder_call = getattr(call, "__call__", None) # noqa: B004 + return inspect.isgeneratorfunction(dunder_call) + + +async def solve_generator( + *, call: Callable[..., Any], stack: AsyncExitStack, sub_values: Dict[str, Any] +) -> Any: + if is_gen_callable(call): + cm = contextmanager_in_threadpool(contextmanager(call)(**sub_values)) + elif is_async_gen_callable(call): + cm = asynccontextmanager(call)(**sub_values) + return await stack.enter_async_context(cm) + + +async def solve_dependencies( + *, + request: Union[Request, WebSocket], + dependant: Dependant, + body: Optional[Union[Dict[str, Any], FormData]] = None, + background_tasks: Optional[StarletteBackgroundTasks] = None, + response: Optional[Response] = None, + dependency_overrides_provider: Optional[Any] = None, + dependency_cache: Optional[Dict[Tuple[Callable[..., Any], Tuple[str]], Any]] = None, + async_exit_stack: AsyncExitStack, +) -> Tuple[ + Dict[str, Any], + List[Any], + Optional[StarletteBackgroundTasks], + Response, + Dict[Tuple[Callable[..., Any], Tuple[str]], Any], +]: + values: Dict[str, Any] = {} + errors: List[Any] = [] + if response is None: + response = Response() + del response.headers["content-length"] + response.status_code = None # type: ignore + dependency_cache = dependency_cache or {} + sub_dependant: Dependant + for sub_dependant in dependant.dependencies: + sub_dependant.call = cast(Callable[..., Any], sub_dependant.call) + sub_dependant.cache_key = cast( + Tuple[Callable[..., Any], Tuple[str]], sub_dependant.cache_key + ) + call = sub_dependant.call + use_sub_dependant = sub_dependant + if ( + dependency_overrides_provider + and dependency_overrides_provider.dependency_overrides + ): + original_call = sub_dependant.call + call = getattr( + dependency_overrides_provider, "dependency_overrides", {} + ).get(original_call, original_call) + use_path: str = sub_dependant.path # type: ignore + use_sub_dependant = get_dependant( + path=use_path, + call=call, + name=sub_dependant.name, + security_scopes=sub_dependant.security_scopes, + ) + + solved_result = await solve_dependencies( + request=request, + dependant=use_sub_dependant, + body=body, + background_tasks=background_tasks, + response=response, + dependency_overrides_provider=dependency_overrides_provider, + dependency_cache=dependency_cache, + async_exit_stack=async_exit_stack, + ) + ( + sub_values, + sub_errors, + background_tasks, + _, # the subdependency returns the same response we have + sub_dependency_cache, + ) = solved_result + dependency_cache.update(sub_dependency_cache) + if sub_errors: + errors.extend(sub_errors) + continue + if sub_dependant.use_cache and sub_dependant.cache_key in dependency_cache: + solved = dependency_cache[sub_dependant.cache_key] + elif is_gen_callable(call) or is_async_gen_callable(call): + solved = await solve_generator( + call=call, stack=async_exit_stack, sub_values=sub_values + ) + elif is_coroutine_callable(call): + solved = await call(**sub_values) + else: + solved = await run_in_threadpool(call, **sub_values) + if sub_dependant.name is not None: + values[sub_dependant.name] = solved + if sub_dependant.cache_key not in dependency_cache: + dependency_cache[sub_dependant.cache_key] = solved + path_values, path_errors = request_params_to_args( + dependant.path_params, request.path_params + ) + query_values, query_errors = request_params_to_args( + dependant.query_params, request.query_params + ) + header_values, header_errors = request_params_to_args( + dependant.header_params, request.headers + ) + cookie_values, cookie_errors = request_params_to_args( + dependant.cookie_params, request.cookies + ) + values.update(path_values) + values.update(query_values) + values.update(header_values) + values.update(cookie_values) + errors += path_errors + query_errors + header_errors + cookie_errors + if dependant.body_params: + ( + body_values, + body_errors, + ) = await request_body_to_args( # body_params checked above + required_params=dependant.body_params, received_body=body + ) + values.update(body_values) + errors.extend(body_errors) + if dependant.http_connection_param_name: + values[dependant.http_connection_param_name] = request + if dependant.request_param_name and isinstance(request, Request): + values[dependant.request_param_name] = request + elif dependant.websocket_param_name and isinstance(request, WebSocket): + values[dependant.websocket_param_name] = request + if dependant.background_tasks_param_name: + if background_tasks is None: + background_tasks = BackgroundTasks() + values[dependant.background_tasks_param_name] = background_tasks + if dependant.response_param_name: + values[dependant.response_param_name] = response + if dependant.security_scopes_param_name: + values[dependant.security_scopes_param_name] = SecurityScopes( + scopes=dependant.security_scopes + ) + return values, errors, background_tasks, response, dependency_cache + + +def request_params_to_args( + required_params: Sequence[ModelField], + received_params: Union[Mapping[str, Any], QueryParams, Headers], +) -> Tuple[Dict[str, Any], List[Any]]: + values = {} + errors = [] + for field in required_params: + if is_scalar_sequence_field(field) and isinstance( + received_params, (QueryParams, Headers) + ): + value = received_params.getlist(field.alias) or field.default + else: + value = received_params.get(field.alias) + field_info = field.field_info + assert isinstance( + field_info, params.Param + ), "Params must be subclasses of Param" + loc = (field_info.in_.value, field.alias) + if value is None: + if field.required: + errors.append(get_missing_field_error(loc=loc)) + else: + values[field.name] = deepcopy(field.default) + continue + v_, errors_ = field.validate(value, values, loc=loc) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + new_errors = _regenerate_error_with_loc(errors=errors_, loc_prefix=()) + errors.extend(new_errors) + else: + values[field.name] = v_ + return values, errors + + +async def request_body_to_args( + required_params: List[ModelField], + received_body: Optional[Union[Dict[str, Any], FormData]], +) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: + values = {} + errors: List[Dict[str, Any]] = [] + if required_params: + field = required_params[0] + field_info = field.field_info + embed = getattr(field_info, "embed", None) + field_alias_omitted = len(required_params) == 1 and not embed + if field_alias_omitted: + received_body = {field.alias: received_body} + + for field in required_params: + loc: Tuple[str, ...] + if field_alias_omitted: + loc = ("body",) + else: + loc = ("body", field.alias) + + value: Optional[Any] = None + if received_body is not None: + if (is_sequence_field(field)) and isinstance(received_body, FormData): + value = received_body.getlist(field.alias) + else: + try: + value = received_body.get(field.alias) + except AttributeError: + errors.append(get_missing_field_error(loc)) + continue + if ( + value is None + or (isinstance(field_info, params.Form) and value == "") + or ( + isinstance(field_info, params.Form) + and is_sequence_field(field) + and len(value) == 0 + ) + ): + if field.required: + errors.append(get_missing_field_error(loc)) + else: + values[field.name] = deepcopy(field.default) + continue + if ( + isinstance(field_info, params.File) + and is_bytes_field(field) + and isinstance(value, UploadFile) + ): + value = await value.read() + elif ( + is_bytes_sequence_field(field) + and isinstance(field_info, params.File) + and value_is_sequence(value) + ): + # For types + assert isinstance(value, sequence_types) # type: ignore[arg-type] + results: List[Union[bytes, str]] = [] + + async def process_fn( + fn: Callable[[], Coroutine[Any, Any, Any]] + ) -> None: + result = await fn() + results.append(result) # noqa: B023 + + async with anyio.create_task_group() as tg: + for sub_value in value: + tg.start_soon(process_fn, sub_value.read) + value = serialize_sequence_value(field=field, value=results) + + v_, errors_ = field.validate(value, values, loc=loc) + + if isinstance(errors_, list): + errors.extend(errors_) + elif errors_: + errors.append(errors_) + else: + values[field.name] = v_ + return values, errors + + +def get_body_field(*, dependant: Dependant, name: str) -> Optional[ModelField]: + flat_dependant = get_flat_dependant(dependant) + if not flat_dependant.body_params: + return None + first_param = flat_dependant.body_params[0] + field_info = first_param.field_info + embed = getattr(field_info, "embed", None) + body_param_names_set = {param.name for param in flat_dependant.body_params} + if len(body_param_names_set) == 1 and not embed: + check_file_field(first_param) + return first_param + # If one field requires to embed, all have to be embedded + # in case a sub-dependency is evaluated with a single unique body field + # That is combined (embedded) with other body fields + for param in flat_dependant.body_params: + setattr(param.field_info, "embed", True) # noqa: B010 + model_name = "Body_" + name + BodyModel = create_body_model( + fields=flat_dependant.body_params, model_name=model_name + ) + required = any(True for f in flat_dependant.body_params if f.required) + BodyFieldInfo_kwargs: Dict[str, Any] = { + "annotation": BodyModel, + "alias": "body", + } + if not required: + BodyFieldInfo_kwargs["default"] = None + if any(isinstance(f.field_info, params.File) for f in flat_dependant.body_params): + BodyFieldInfo: Type[params.Body] = params.File + elif any(isinstance(f.field_info, params.Form) for f in flat_dependant.body_params): + BodyFieldInfo = params.Form + else: + BodyFieldInfo = params.Body + + body_param_media_types = [ + f.field_info.media_type + for f in flat_dependant.body_params + if isinstance(f.field_info, params.Body) + ] + if len(set(body_param_media_types)) == 1: + BodyFieldInfo_kwargs["media_type"] = body_param_media_types[0] + final_field = create_response_field( + name="body", + type_=BodyModel, + required=required, + alias="body", + field_info=BodyFieldInfo(**BodyFieldInfo_kwargs), + ) + check_file_field(final_field) + return final_field diff --git a/venv/lib/python3.12/site-packages/fastapi/encoders.py b/venv/lib/python3.12/site-packages/fastapi/encoders.py new file mode 100644 index 0000000..e501713 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/encoders.py @@ -0,0 +1,341 @@ +import dataclasses +import datetime +from collections import defaultdict, deque +from decimal import Decimal +from enum import Enum +from ipaddress import ( + IPv4Address, + IPv4Interface, + IPv4Network, + IPv6Address, + IPv6Interface, + IPv6Network, +) +from pathlib import Path, PurePath +from re import Pattern +from types import GeneratorType +from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union +from uuid import UUID + +from fastapi.types import IncEx +from pydantic import BaseModel +from pydantic.color import Color +from pydantic.networks import AnyUrl, NameEmail +from pydantic.types import SecretBytes, SecretStr +from typing_extensions import Annotated, Doc # type: ignore [attr-defined] + +from ._compat import PYDANTIC_V2, Url, _model_dump + + +# Taken from Pydantic v1 as is +def isoformat(o: Union[datetime.date, datetime.time]) -> str: + return o.isoformat() + + +# Taken from Pydantic v1 as is +# TODO: pv2 should this return strings instead? +def decimal_encoder(dec_value: Decimal) -> Union[int, float]: + """ + Encodes a Decimal as int of there's no exponent, otherwise float + + This is useful when we use ConstrainedDecimal to represent Numeric(x,0) + where a integer (but not int typed) is used. Encoding this as a float + results in failed round-tripping between encode and parse. + Our Id type is a prime example of this. + + >>> decimal_encoder(Decimal("1.0")) + 1.0 + + >>> decimal_encoder(Decimal("1")) + 1 + """ + if dec_value.as_tuple().exponent >= 0: # type: ignore[operator] + return int(dec_value) + else: + return float(dec_value) + + +ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { + bytes: lambda o: o.decode(), + Color: str, + datetime.date: isoformat, + datetime.datetime: isoformat, + datetime.time: isoformat, + datetime.timedelta: lambda td: td.total_seconds(), + Decimal: decimal_encoder, + Enum: lambda o: o.value, + frozenset: list, + deque: list, + GeneratorType: list, + IPv4Address: str, + IPv4Interface: str, + IPv4Network: str, + IPv6Address: str, + IPv6Interface: str, + IPv6Network: str, + NameEmail: str, + Path: str, + Pattern: lambda o: o.pattern, + SecretBytes: str, + SecretStr: str, + set: list, + UUID: str, + Url: str, + AnyUrl: str, +} + + +def generate_encoders_by_class_tuples( + type_encoder_map: Dict[Any, Callable[[Any], Any]] +) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( + tuple + ) + for type_, encoder in type_encoder_map.items(): + encoders_by_class_tuples[encoder] += (type_,) + return encoders_by_class_tuples + + +encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) + + +def jsonable_encoder( + obj: Annotated[ + Any, + Doc( + """ + The input object to convert to JSON. + """ + ), + ], + include: Annotated[ + Optional[IncEx], + Doc( + """ + Pydantic's `include` parameter, passed to Pydantic models to set the + fields to include. + """ + ), + ] = None, + exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Pydantic's `exclude` parameter, passed to Pydantic models to set the + fields to exclude. + """ + ), + ] = None, + by_alias: Annotated[ + bool, + Doc( + """ + Pydantic's `by_alias` parameter, passed to Pydantic models to define if + the output should use the alias names (when provided) or the Python + attribute names. In an API, if you set an alias, it's probably because you + want to use it in the result, so you probably want to leave this set to + `True`. + """ + ), + ] = True, + exclude_unset: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_unset` parameter, passed to Pydantic models to define + if it should exclude from the output the fields that were not explicitly + set (and that only had their default values). + """ + ), + ] = False, + exclude_defaults: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_defaults` parameter, passed to Pydantic models to define + if it should exclude from the output the fields that had the same default + value, even when they were explicitly set. + """ + ), + ] = False, + exclude_none: Annotated[ + bool, + Doc( + """ + Pydantic's `exclude_none` parameter, passed to Pydantic models to define + if it should exclude from the output any fields that have a `None` value. + """ + ), + ] = False, + custom_encoder: Annotated[ + Optional[Dict[Any, Callable[[Any], Any]]], + Doc( + """ + Pydantic's `custom_encoder` parameter, passed to Pydantic models to define + a custom encoder. + """ + ), + ] = None, + sqlalchemy_safe: Annotated[ + bool, + Doc( + """ + Exclude from the output any fields that start with the name `_sa`. + + This is mainly a hack for compatibility with SQLAlchemy objects, they + store internal SQLAlchemy-specific state in attributes named with `_sa`, + and those objects can't (and shouldn't be) serialized to JSON. + """ + ), + ] = True, +) -> Any: + """ + Convert any object to something that can be encoded in JSON. + + This is used internally by FastAPI to make sure anything you return can be + encoded as JSON before it is sent to the client. + + You can also use it yourself, for example to convert objects before saving them + in a database that supports only JSON. + + Read more about it in the + [FastAPI docs for JSON Compatible Encoder](https://fastapi.tiangolo.com/tutorial/encoder/). + """ + custom_encoder = custom_encoder or {} + if custom_encoder: + if type(obj) in custom_encoder: + return custom_encoder[type(obj)](obj) + else: + for encoder_type, encoder_instance in custom_encoder.items(): + if isinstance(obj, encoder_type): + return encoder_instance(obj) + if include is not None and not isinstance(include, (set, dict)): + include = set(include) + if exclude is not None and not isinstance(exclude, (set, dict)): + exclude = set(exclude) + if isinstance(obj, BaseModel): + # TODO: remove when deprecating Pydantic v1 + encoders: Dict[Any, Any] = {} + if not PYDANTIC_V2: + encoders = getattr(obj.__config__, "json_encoders", {}) # type: ignore[attr-defined] + if custom_encoder: + encoders.update(custom_encoder) + obj_dict = _model_dump( + obj, + mode="json", + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + ) + if "__root__" in obj_dict: + obj_dict = obj_dict["__root__"] + return jsonable_encoder( + obj_dict, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + # TODO: remove when deprecating Pydantic v1 + custom_encoder=encoders, + sqlalchemy_safe=sqlalchemy_safe, + ) + if dataclasses.is_dataclass(obj): + obj_dict = dataclasses.asdict(obj) + return jsonable_encoder( + obj_dict, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + if isinstance(obj, Enum): + return obj.value + if isinstance(obj, PurePath): + return str(obj) + if isinstance(obj, (str, int, float, type(None))): + return obj + if isinstance(obj, dict): + encoded_dict = {} + allowed_keys = set(obj.keys()) + if include is not None: + allowed_keys &= set(include) + if exclude is not None: + allowed_keys -= set(exclude) + for key, value in obj.items(): + if ( + ( + not sqlalchemy_safe + or (not isinstance(key, str)) + or (not key.startswith("_sa")) + ) + and (value is not None or not exclude_none) + and key in allowed_keys + ): + encoded_key = jsonable_encoder( + key, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_value = jsonable_encoder( + value, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_dict[encoded_key] = encoded_value + return encoded_dict + if isinstance(obj, (list, set, frozenset, GeneratorType, tuple, deque)): + encoded_list = [] + for item in obj: + encoded_list.append( + jsonable_encoder( + item, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + ) + return encoded_list + + if type(obj) in ENCODERS_BY_TYPE: + return ENCODERS_BY_TYPE[type(obj)](obj) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(obj, classes_tuple): + return encoder(obj) + + try: + data = dict(obj) + except Exception as e: + errors: List[Exception] = [] + errors.append(e) + try: + data = vars(obj) + except Exception as e: + errors.append(e) + raise ValueError(errors) from e + return jsonable_encoder( + data, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) diff --git a/venv/lib/python3.12/site-packages/fastapi/exception_handlers.py b/venv/lib/python3.12/site-packages/fastapi/exception_handlers.py new file mode 100644 index 0000000..6c2ba7f --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/exception_handlers.py @@ -0,0 +1,34 @@ +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError +from fastapi.utils import is_body_allowed_for_status_code +from fastapi.websockets import WebSocket +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse, Response +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY, WS_1008_POLICY_VIOLATION + + +async def http_exception_handler(request: Request, exc: HTTPException) -> Response: + headers = getattr(exc, "headers", None) + if not is_body_allowed_for_status_code(exc.status_code): + return Response(status_code=exc.status_code, headers=headers) + return JSONResponse( + {"detail": exc.detail}, status_code=exc.status_code, headers=headers + ) + + +async def request_validation_exception_handler( + request: Request, exc: RequestValidationError +) -> JSONResponse: + return JSONResponse( + status_code=HTTP_422_UNPROCESSABLE_ENTITY, + content={"detail": jsonable_encoder(exc.errors())}, + ) + + +async def websocket_request_validation_exception_handler( + websocket: WebSocket, exc: WebSocketRequestValidationError +) -> None: + await websocket.close( + code=WS_1008_POLICY_VIOLATION, reason=jsonable_encoder(exc.errors()) + ) diff --git a/venv/lib/python3.12/site-packages/fastapi/exceptions.py b/venv/lib/python3.12/site-packages/fastapi/exceptions.py new file mode 100644 index 0000000..680d288 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/exceptions.py @@ -0,0 +1,176 @@ +from typing import Any, Dict, Optional, Sequence, Type, Union + +from pydantic import BaseModel, create_model +from starlette.exceptions import HTTPException as StarletteHTTPException +from starlette.exceptions import WebSocketException as StarletteWebSocketException +from typing_extensions import Annotated, Doc # type: ignore [attr-defined] + + +class HTTPException(StarletteHTTPException): + """ + An HTTP exception you can raise in your own code to show errors to the client. + + This is for client errors, invalid authentication, invalid data, etc. Not for server + errors in your code. + + Read more about it in the + [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). + + ## Example + + ```python + from fastapi import FastAPI, HTTPException + + app = FastAPI() + + items = {"foo": "The Foo Wrestlers"} + + + @app.get("/items/{item_id}") + async def read_item(item_id: str): + if item_id not in items: + raise HTTPException(status_code=404, detail="Item not found") + return {"item": items[item_id]} + ``` + """ + + def __init__( + self, + status_code: Annotated[ + int, + Doc( + """ + HTTP status code to send to the client. + """ + ), + ], + detail: Annotated[ + Any, + Doc( + """ + Any data to be sent to the client in the `detail` key of the JSON + response. + """ + ), + ] = None, + headers: Annotated[ + Optional[Dict[str, str]], + Doc( + """ + Any headers to send to the client in the response. + """ + ), + ] = None, + ) -> None: + super().__init__(status_code=status_code, detail=detail, headers=headers) + + +class WebSocketException(StarletteWebSocketException): + """ + A WebSocket exception you can raise in your own code to show errors to the client. + + This is for client errors, invalid authentication, invalid data, etc. Not for server + errors in your code. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import ( + Cookie, + FastAPI, + WebSocket, + WebSocketException, + status, + ) + + app = FastAPI() + + @app.websocket("/items/{item_id}/ws") + async def websocket_endpoint( + *, + websocket: WebSocket, + session: Annotated[str | None, Cookie()] = None, + item_id: str, + ): + if session is None: + raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION) + await websocket.accept() + while True: + data = await websocket.receive_text() + await websocket.send_text(f"Session cookie is: {session}") + await websocket.send_text(f"Message text was: {data}, for item ID: {item_id}") + ``` + """ + + def __init__( + self, + code: Annotated[ + int, + Doc( + """ + A closing code from the + [valid codes defined in the specification](https://datatracker.ietf.org/doc/html/rfc6455#section-7.4.1). + """ + ), + ], + reason: Annotated[ + Union[str, None], + Doc( + """ + The reason to close the WebSocket connection. + + It is UTF-8-encoded data. The interpretation of the reason is up to the + application, it is not specified by the WebSocket specification. + + It could contain text that could be human-readable or interpretable + by the client code, etc. + """ + ), + ] = None, + ) -> None: + super().__init__(code=code, reason=reason) + + +RequestErrorModel: Type[BaseModel] = create_model("Request") +WebSocketErrorModel: Type[BaseModel] = create_model("WebSocket") + + +class FastAPIError(RuntimeError): + """ + A generic, FastAPI-specific error. + """ + + +class ValidationException(Exception): + def __init__(self, errors: Sequence[Any]) -> None: + self._errors = errors + + def errors(self) -> Sequence[Any]: + return self._errors + + +class RequestValidationError(ValidationException): + def __init__(self, errors: Sequence[Any], *, body: Any = None) -> None: + super().__init__(errors) + self.body = body + + +class WebSocketRequestValidationError(ValidationException): + pass + + +class ResponseValidationError(ValidationException): + def __init__(self, errors: Sequence[Any], *, body: Any = None) -> None: + super().__init__(errors) + self.body = body + + def __str__(self) -> str: + message = f"{len(self._errors)} validation errors:\n" + for err in self._errors: + message += f" {err}\n" + return message diff --git a/venv/lib/python3.12/site-packages/fastapi/logger.py b/venv/lib/python3.12/site-packages/fastapi/logger.py new file mode 100644 index 0000000..5b2c4ad --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/logger.py @@ -0,0 +1,3 @@ +import logging + +logger = logging.getLogger("fastapi") diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py b/venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py new file mode 100644 index 0000000..620296d --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py @@ -0,0 +1 @@ +from starlette.middleware import Middleware as Middleware diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c01c436 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/cors.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/cors.cpython-312.pyc new file mode 100644 index 0000000..eb85707 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/cors.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/gzip.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/gzip.cpython-312.pyc new file mode 100644 index 0000000..a4bcf61 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/gzip.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc new file mode 100644 index 0000000..45496db Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc new file mode 100644 index 0000000..eff3315 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-312.pyc new file mode 100644 index 0000000..4d17da4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/cors.py b/venv/lib/python3.12/site-packages/fastapi/middleware/cors.py new file mode 100644 index 0000000..8dfaad0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/middleware/cors.py @@ -0,0 +1 @@ +from starlette.middleware.cors import CORSMiddleware as CORSMiddleware # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py b/venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py new file mode 100644 index 0000000..bbeb2cc --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py @@ -0,0 +1 @@ +from starlette.middleware.gzip import GZipMiddleware as GZipMiddleware # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py b/venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py new file mode 100644 index 0000000..b7a3d8e --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py @@ -0,0 +1,3 @@ +from starlette.middleware.httpsredirect import ( # noqa + HTTPSRedirectMiddleware as HTTPSRedirectMiddleware, +) diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py b/venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py new file mode 100644 index 0000000..08d7e03 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py @@ -0,0 +1,3 @@ +from starlette.middleware.trustedhost import ( # noqa + TrustedHostMiddleware as TrustedHostMiddleware, +) diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py b/venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py new file mode 100644 index 0000000..c4c6a79 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py @@ -0,0 +1 @@ +from starlette.middleware.wsgi import WSGIMiddleware as WSGIMiddleware # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__init__.py b/venv/lib/python3.12/site-packages/fastapi/openapi/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..20d08fa Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/constants.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/constants.cpython-312.pyc new file mode 100644 index 0000000..41c85fd Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/constants.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/docs.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/docs.cpython-312.pyc new file mode 100644 index 0000000..717328c Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/docs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..0e40046 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..de8cd70 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/constants.py b/venv/lib/python3.12/site-packages/fastapi/openapi/constants.py new file mode 100644 index 0000000..d724ee3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/openapi/constants.py @@ -0,0 +1,3 @@ +METHODS_WITH_BODY = {"GET", "HEAD", "POST", "PUT", "DELETE", "PATCH"} +REF_PREFIX = "#/components/schemas/" +REF_TEMPLATE = "#/components/schemas/{model}" diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/docs.py b/venv/lib/python3.12/site-packages/fastapi/openapi/docs.py new file mode 100644 index 0000000..69473d1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/openapi/docs.py @@ -0,0 +1,344 @@ +import json +from typing import Any, Dict, Optional + +from fastapi.encoders import jsonable_encoder +from starlette.responses import HTMLResponse +from typing_extensions import Annotated, Doc # type: ignore [attr-defined] + +swagger_ui_default_parameters: Annotated[ + Dict[str, Any], + Doc( + """ + Default configurations for Swagger UI. + + You can use it as a template to add any other configurations needed. + """ + ), +] = { + "dom_id": "#swagger-ui", + "layout": "BaseLayout", + "deepLinking": True, + "showExtensions": True, + "showCommonExtensions": True, +} + + +def get_swagger_ui_html( + *, + openapi_url: Annotated[ + str, + Doc( + """ + The OpenAPI URL that Swagger UI should load and use. + + This is normally done automatically by FastAPI using the default URL + `/openapi.json`. + """ + ), + ], + title: Annotated[ + str, + Doc( + """ + The HTML `` content, normally shown in the browser tab. + """ + ), + ], + swagger_js_url: Annotated[ + str, + Doc( + """ + The URL to use to load the Swagger UI JavaScript. + + It is normally set to a CDN URL. + """ + ), + ] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5.9.0/swagger-ui-bundle.js", + swagger_css_url: Annotated[ + str, + Doc( + """ + The URL to use to load the Swagger UI CSS. + + It is normally set to a CDN URL. + """ + ), + ] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5.9.0/swagger-ui.css", + swagger_favicon_url: Annotated[ + str, + Doc( + """ + The URL of the favicon to use. It is normally shown in the browser tab. + """ + ), + ] = "https://fastapi.tiangolo.com/img/favicon.png", + oauth2_redirect_url: Annotated[ + Optional[str], + Doc( + """ + The OAuth2 redirect URL, it is normally automatically handled by FastAPI. + """ + ), + ] = None, + init_oauth: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + A dictionary with Swagger UI OAuth2 initialization configurations. + """ + ), + ] = None, + swagger_ui_parameters: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Configuration parameters for Swagger UI. + + It defaults to [swagger_ui_default_parameters][fastapi.openapi.docs.swagger_ui_default_parameters]. + """ + ), + ] = None, +) -> HTMLResponse: + """ + Generate and return the HTML that loads Swagger UI for the interactive + API docs (normally served at `/docs`). + + You would only call this function yourself if you needed to override some parts, + for example the URLs to use to load Swagger UI's JavaScript and CSS. + + Read more about it in the + [FastAPI docs for Configure Swagger UI](https://fastapi.tiangolo.com/how-to/configure-swagger-ui/) + and the [FastAPI docs for Custom Docs UI Static Assets (Self-Hosting)](https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/). + """ + current_swagger_ui_parameters = swagger_ui_default_parameters.copy() + if swagger_ui_parameters: + current_swagger_ui_parameters.update(swagger_ui_parameters) + + html = f""" + <!DOCTYPE html> + <html> + <head> + <link type="text/css" rel="stylesheet" href="{swagger_css_url}"> + <link rel="shortcut icon" href="{swagger_favicon_url}"> + <title>{title} + + +
    +
    + + + + + + """ + return HTMLResponse(html) + + +def get_redoc_html( + *, + openapi_url: Annotated[ + str, + Doc( + """ + The OpenAPI URL that ReDoc should load and use. + + This is normally done automatically by FastAPI using the default URL + `/openapi.json`. + """ + ), + ], + title: Annotated[ + str, + Doc( + """ + The HTML `` content, normally shown in the browser tab. + """ + ), + ], + redoc_js_url: Annotated[ + str, + Doc( + """ + The URL to use to load the ReDoc JavaScript. + + It is normally set to a CDN URL. + """ + ), + ] = "https://cdn.jsdelivr.net/npm/redoc@next/bundles/redoc.standalone.js", + redoc_favicon_url: Annotated[ + str, + Doc( + """ + The URL of the favicon to use. It is normally shown in the browser tab. + """ + ), + ] = "https://fastapi.tiangolo.com/img/favicon.png", + with_google_fonts: Annotated[ + bool, + Doc( + """ + Load and use Google Fonts. + """ + ), + ] = True, +) -> HTMLResponse: + """ + Generate and return the HTML response that loads ReDoc for the alternative + API docs (normally served at `/redoc`). + + You would only call this function yourself if you needed to override some parts, + for example the URLs to use to load ReDoc's JavaScript and CSS. + + Read more about it in the + [FastAPI docs for Custom Docs UI Static Assets (Self-Hosting)](https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/). + """ + html = f""" + <!DOCTYPE html> + <html> + <head> + <title>{title} + + + + """ + if with_google_fonts: + html += """ + + """ + html += f""" + + + + + + + + + + + """ + return HTMLResponse(html) + + +def get_swagger_ui_oauth2_redirect_html() -> HTMLResponse: + """ + Generate the HTML response with the OAuth2 redirection for Swagger UI. + + You normally don't need to use or change this. + """ + # copied from https://github.com/swagger-api/swagger-ui/blob/v4.14.0/dist/oauth2-redirect.html + html = """ + + + + Swagger UI: OAuth2 Redirect + + + + + + """ + return HTMLResponse(content=html) diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/models.py b/venv/lib/python3.12/site-packages/fastapi/openapi/models.py new file mode 100644 index 0000000..5f3bdbb --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/openapi/models.py @@ -0,0 +1,611 @@ +from enum import Enum +from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Type, Union + +from fastapi._compat import ( + PYDANTIC_V2, + CoreSchema, + GetJsonSchemaHandler, + JsonSchemaValue, + _model_rebuild, + with_info_plain_validator_function, +) +from fastapi.logger import logger +from pydantic import AnyUrl, BaseModel, Field +from typing_extensions import Annotated, Literal, TypedDict +from typing_extensions import deprecated as typing_deprecated + +try: + import email_validator + + assert email_validator # make autoflake ignore the unused import + from pydantic import EmailStr +except ImportError: # pragma: no cover + + class EmailStr(str): # type: ignore + @classmethod + def __get_validators__(cls) -> Iterable[Callable[..., Any]]: + yield cls.validate + + @classmethod + def validate(cls, v: Any) -> str: + logger.warning( + "email-validator not installed, email fields will be treated as str.\n" + "To install, run: pip install email-validator" + ) + return str(v) + + @classmethod + def _validate(cls, __input_value: Any, _: Any) -> str: + logger.warning( + "email-validator not installed, email fields will be treated as str.\n" + "To install, run: pip install email-validator" + ) + return str(__input_value) + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + return {"type": "string", "format": "email"} + + @classmethod + def __get_pydantic_core_schema__( + cls, source: Type[Any], handler: Callable[[Any], CoreSchema] + ) -> CoreSchema: + return with_info_plain_validator_function(cls._validate) + + +class Contact(BaseModel): + name: Optional[str] = None + url: Optional[AnyUrl] = None + email: Optional[EmailStr] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class License(BaseModel): + name: str + identifier: Optional[str] = None + url: Optional[AnyUrl] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Info(BaseModel): + title: str + summary: Optional[str] = None + description: Optional[str] = None + termsOfService: Optional[str] = None + contact: Optional[Contact] = None + license: Optional[License] = None + version: str + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class ServerVariable(BaseModel): + enum: Annotated[Optional[List[str]], Field(min_length=1)] = None + default: str + description: Optional[str] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Server(BaseModel): + url: Union[AnyUrl, str] + description: Optional[str] = None + variables: Optional[Dict[str, ServerVariable]] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Reference(BaseModel): + ref: str = Field(alias="$ref") + + +class Discriminator(BaseModel): + propertyName: str + mapping: Optional[Dict[str, str]] = None + + +class XML(BaseModel): + name: Optional[str] = None + namespace: Optional[str] = None + prefix: Optional[str] = None + attribute: Optional[bool] = None + wrapped: Optional[bool] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class ExternalDocumentation(BaseModel): + description: Optional[str] = None + url: AnyUrl + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Schema(BaseModel): + # Ref: JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-core.html#name-the-json-schema-core-vocabu + # Core Vocabulary + schema_: Optional[str] = Field(default=None, alias="$schema") + vocabulary: Optional[str] = Field(default=None, alias="$vocabulary") + id: Optional[str] = Field(default=None, alias="$id") + anchor: Optional[str] = Field(default=None, alias="$anchor") + dynamicAnchor: Optional[str] = Field(default=None, alias="$dynamicAnchor") + ref: Optional[str] = Field(default=None, alias="$ref") + dynamicRef: Optional[str] = Field(default=None, alias="$dynamicRef") + defs: Optional[Dict[str, "SchemaOrBool"]] = Field(default=None, alias="$defs") + comment: Optional[str] = Field(default=None, alias="$comment") + # Ref: JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-core.html#name-a-vocabulary-for-applying-s + # A Vocabulary for Applying Subschemas + allOf: Optional[List["SchemaOrBool"]] = None + anyOf: Optional[List["SchemaOrBool"]] = None + oneOf: Optional[List["SchemaOrBool"]] = None + not_: Optional["SchemaOrBool"] = Field(default=None, alias="not") + if_: Optional["SchemaOrBool"] = Field(default=None, alias="if") + then: Optional["SchemaOrBool"] = None + else_: Optional["SchemaOrBool"] = Field(default=None, alias="else") + dependentSchemas: Optional[Dict[str, "SchemaOrBool"]] = None + prefixItems: Optional[List["SchemaOrBool"]] = None + # TODO: uncomment and remove below when deprecating Pydantic v1 + # It generales a list of schemas for tuples, before prefixItems was available + # items: Optional["SchemaOrBool"] = None + items: Optional[Union["SchemaOrBool", List["SchemaOrBool"]]] = None + contains: Optional["SchemaOrBool"] = None + properties: Optional[Dict[str, "SchemaOrBool"]] = None + patternProperties: Optional[Dict[str, "SchemaOrBool"]] = None + additionalProperties: Optional["SchemaOrBool"] = None + propertyNames: Optional["SchemaOrBool"] = None + unevaluatedItems: Optional["SchemaOrBool"] = None + unevaluatedProperties: Optional["SchemaOrBool"] = None + # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-structural + # A Vocabulary for Structural Validation + type: Optional[str] = None + enum: Optional[List[Any]] = None + const: Optional[Any] = None + multipleOf: Optional[float] = Field(default=None, gt=0) + maximum: Optional[float] = None + exclusiveMaximum: Optional[float] = None + minimum: Optional[float] = None + exclusiveMinimum: Optional[float] = None + maxLength: Optional[int] = Field(default=None, ge=0) + minLength: Optional[int] = Field(default=None, ge=0) + pattern: Optional[str] = None + maxItems: Optional[int] = Field(default=None, ge=0) + minItems: Optional[int] = Field(default=None, ge=0) + uniqueItems: Optional[bool] = None + maxContains: Optional[int] = Field(default=None, ge=0) + minContains: Optional[int] = Field(default=None, ge=0) + maxProperties: Optional[int] = Field(default=None, ge=0) + minProperties: Optional[int] = Field(default=None, ge=0) + required: Optional[List[str]] = None + dependentRequired: Optional[Dict[str, Set[str]]] = None + # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-vocabularies-for-semantic-c + # Vocabularies for Semantic Content With "format" + format: Optional[str] = None + # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-the-conten + # A Vocabulary for the Contents of String-Encoded Data + contentEncoding: Optional[str] = None + contentMediaType: Optional[str] = None + contentSchema: Optional["SchemaOrBool"] = None + # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-basic-meta + # A Vocabulary for Basic Meta-Data Annotations + title: Optional[str] = None + description: Optional[str] = None + default: Optional[Any] = None + deprecated: Optional[bool] = None + readOnly: Optional[bool] = None + writeOnly: Optional[bool] = None + examples: Optional[List[Any]] = None + # Ref: OpenAPI 3.1.0: https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#schema-object + # Schema Object + discriminator: Optional[Discriminator] = None + xml: Optional[XML] = None + externalDocs: Optional[ExternalDocumentation] = None + example: Annotated[ + Optional[Any], + typing_deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +# Ref: https://json-schema.org/draft/2020-12/json-schema-core.html#name-json-schema-documents +# A JSON Schema MUST be an object or a boolean. +SchemaOrBool = Union[Schema, bool] + + +class Example(TypedDict, total=False): + summary: Optional[str] + description: Optional[str] + value: Optional[Any] + externalValue: Optional[AnyUrl] + + if PYDANTIC_V2: # type: ignore [misc] + __pydantic_config__ = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class ParameterInType(Enum): + query = "query" + header = "header" + path = "path" + cookie = "cookie" + + +class Encoding(BaseModel): + contentType: Optional[str] = None + headers: Optional[Dict[str, Union["Header", Reference]]] = None + style: Optional[str] = None + explode: Optional[bool] = None + allowReserved: Optional[bool] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class MediaType(BaseModel): + schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") + example: Optional[Any] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + encoding: Optional[Dict[str, Encoding]] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class ParameterBase(BaseModel): + description: Optional[str] = None + required: Optional[bool] = None + deprecated: Optional[bool] = None + # Serialization rules for simple scenarios + style: Optional[str] = None + explode: Optional[bool] = None + allowReserved: Optional[bool] = None + schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") + example: Optional[Any] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + # Serialization rules for more complex scenarios + content: Optional[Dict[str, MediaType]] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Parameter(ParameterBase): + name: str + in_: ParameterInType = Field(alias="in") + + +class Header(ParameterBase): + pass + + +class RequestBody(BaseModel): + description: Optional[str] = None + content: Dict[str, MediaType] + required: Optional[bool] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Link(BaseModel): + operationRef: Optional[str] = None + operationId: Optional[str] = None + parameters: Optional[Dict[str, Union[Any, str]]] = None + requestBody: Optional[Union[Any, str]] = None + description: Optional[str] = None + server: Optional[Server] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Response(BaseModel): + description: str + headers: Optional[Dict[str, Union[Header, Reference]]] = None + content: Optional[Dict[str, MediaType]] = None + links: Optional[Dict[str, Union[Link, Reference]]] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Operation(BaseModel): + tags: Optional[List[str]] = None + summary: Optional[str] = None + description: Optional[str] = None + externalDocs: Optional[ExternalDocumentation] = None + operationId: Optional[str] = None + parameters: Optional[List[Union[Parameter, Reference]]] = None + requestBody: Optional[Union[RequestBody, Reference]] = None + # Using Any for Specification Extensions + responses: Optional[Dict[str, Union[Response, Any]]] = None + callbacks: Optional[Dict[str, Union[Dict[str, "PathItem"], Reference]]] = None + deprecated: Optional[bool] = None + security: Optional[List[Dict[str, List[str]]]] = None + servers: Optional[List[Server]] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class PathItem(BaseModel): + ref: Optional[str] = Field(default=None, alias="$ref") + summary: Optional[str] = None + description: Optional[str] = None + get: Optional[Operation] = None + put: Optional[Operation] = None + post: Optional[Operation] = None + delete: Optional[Operation] = None + options: Optional[Operation] = None + head: Optional[Operation] = None + patch: Optional[Operation] = None + trace: Optional[Operation] = None + servers: Optional[List[Server]] = None + parameters: Optional[List[Union[Parameter, Reference]]] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class SecuritySchemeType(Enum): + apiKey = "apiKey" + http = "http" + oauth2 = "oauth2" + openIdConnect = "openIdConnect" + + +class SecurityBase(BaseModel): + type_: SecuritySchemeType = Field(alias="type") + description: Optional[str] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class APIKeyIn(Enum): + query = "query" + header = "header" + cookie = "cookie" + + +class APIKey(SecurityBase): + type_: SecuritySchemeType = Field(default=SecuritySchemeType.apiKey, alias="type") + in_: APIKeyIn = Field(alias="in") + name: str + + +class HTTPBase(SecurityBase): + type_: SecuritySchemeType = Field(default=SecuritySchemeType.http, alias="type") + scheme: str + + +class HTTPBearer(HTTPBase): + scheme: Literal["bearer"] = "bearer" + bearerFormat: Optional[str] = None + + +class OAuthFlow(BaseModel): + refreshUrl: Optional[str] = None + scopes: Dict[str, str] = {} + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class OAuthFlowImplicit(OAuthFlow): + authorizationUrl: str + + +class OAuthFlowPassword(OAuthFlow): + tokenUrl: str + + +class OAuthFlowClientCredentials(OAuthFlow): + tokenUrl: str + + +class OAuthFlowAuthorizationCode(OAuthFlow): + authorizationUrl: str + tokenUrl: str + + +class OAuthFlows(BaseModel): + implicit: Optional[OAuthFlowImplicit] = None + password: Optional[OAuthFlowPassword] = None + clientCredentials: Optional[OAuthFlowClientCredentials] = None + authorizationCode: Optional[OAuthFlowAuthorizationCode] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class OAuth2(SecurityBase): + type_: SecuritySchemeType = Field(default=SecuritySchemeType.oauth2, alias="type") + flows: OAuthFlows + + +class OpenIdConnect(SecurityBase): + type_: SecuritySchemeType = Field( + default=SecuritySchemeType.openIdConnect, alias="type" + ) + openIdConnectUrl: str + + +SecurityScheme = Union[APIKey, HTTPBase, OAuth2, OpenIdConnect, HTTPBearer] + + +class Components(BaseModel): + schemas: Optional[Dict[str, Union[Schema, Reference]]] = None + responses: Optional[Dict[str, Union[Response, Reference]]] = None + parameters: Optional[Dict[str, Union[Parameter, Reference]]] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + requestBodies: Optional[Dict[str, Union[RequestBody, Reference]]] = None + headers: Optional[Dict[str, Union[Header, Reference]]] = None + securitySchemes: Optional[Dict[str, Union[SecurityScheme, Reference]]] = None + links: Optional[Dict[str, Union[Link, Reference]]] = None + # Using Any for Specification Extensions + callbacks: Optional[Dict[str, Union[Dict[str, PathItem], Reference, Any]]] = None + pathItems: Optional[Dict[str, Union[PathItem, Reference]]] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class Tag(BaseModel): + name: str + description: Optional[str] = None + externalDocs: Optional[ExternalDocumentation] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +class OpenAPI(BaseModel): + openapi: str + info: Info + jsonSchemaDialect: Optional[str] = None + servers: Optional[List[Server]] = None + # Using Any for Specification Extensions + paths: Optional[Dict[str, Union[PathItem, Any]]] = None + webhooks: Optional[Dict[str, Union[PathItem, Reference]]] = None + components: Optional[Components] = None + security: Optional[List[Dict[str, List[str]]]] = None + tags: Optional[List[Tag]] = None + externalDocs: Optional[ExternalDocumentation] = None + + if PYDANTIC_V2: + model_config = {"extra": "allow"} + + else: + + class Config: + extra = "allow" + + +_model_rebuild(Schema) +_model_rebuild(Operation) +_model_rebuild(Encoding) diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/utils.py b/venv/lib/python3.12/site-packages/fastapi/openapi/utils.py new file mode 100644 index 0000000..5bfb5ac --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/openapi/utils.py @@ -0,0 +1,530 @@ +import http.client +import inspect +import warnings +from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Type, Union, cast + +from fastapi import routing +from fastapi._compat import ( + GenerateJsonSchema, + JsonSchemaValue, + ModelField, + Undefined, + get_compat_model_name_map, + get_definitions, + get_schema_from_model_field, + lenient_issubclass, +) +from fastapi.datastructures import DefaultPlaceholder +from fastapi.dependencies.models import Dependant +from fastapi.dependencies.utils import get_flat_dependant, get_flat_params +from fastapi.encoders import jsonable_encoder +from fastapi.openapi.constants import METHODS_WITH_BODY, REF_PREFIX, REF_TEMPLATE +from fastapi.openapi.models import OpenAPI +from fastapi.params import Body, Param +from fastapi.responses import Response +from fastapi.types import ModelNameMap +from fastapi.utils import ( + deep_dict_update, + generate_operation_id_for_path, + is_body_allowed_for_status_code, +) +from starlette.responses import JSONResponse +from starlette.routing import BaseRoute +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY +from typing_extensions import Literal + +validation_error_definition = { + "title": "ValidationError", + "type": "object", + "properties": { + "loc": { + "title": "Location", + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "integer"}]}, + }, + "msg": {"title": "Message", "type": "string"}, + "type": {"title": "Error Type", "type": "string"}, + }, + "required": ["loc", "msg", "type"], +} + +validation_error_response_definition = { + "title": "HTTPValidationError", + "type": "object", + "properties": { + "detail": { + "title": "Detail", + "type": "array", + "items": {"$ref": REF_PREFIX + "ValidationError"}, + } + }, +} + +status_code_ranges: Dict[str, str] = { + "1XX": "Information", + "2XX": "Success", + "3XX": "Redirection", + "4XX": "Client Error", + "5XX": "Server Error", + "DEFAULT": "Default Response", +} + + +def get_openapi_security_definitions( + flat_dependant: Dependant, +) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: + security_definitions = {} + operation_security = [] + for security_requirement in flat_dependant.security_requirements: + security_definition = jsonable_encoder( + security_requirement.security_scheme.model, + by_alias=True, + exclude_none=True, + ) + security_name = security_requirement.security_scheme.scheme_name + security_definitions[security_name] = security_definition + operation_security.append({security_name: security_requirement.scopes}) + return security_definitions, operation_security + + +def get_openapi_operation_parameters( + *, + all_route_params: Sequence[ModelField], + schema_generator: GenerateJsonSchema, + model_name_map: ModelNameMap, + field_mapping: Dict[ + Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], + separate_input_output_schemas: bool = True, +) -> List[Dict[str, Any]]: + parameters = [] + for param in all_route_params: + field_info = param.field_info + field_info = cast(Param, field_info) + if not field_info.include_in_schema: + continue + param_schema = get_schema_from_model_field( + field=param, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + parameter = { + "name": param.alias, + "in": field_info.in_.value, + "required": param.required, + "schema": param_schema, + } + if field_info.description: + parameter["description"] = field_info.description + if field_info.openapi_examples: + parameter["examples"] = jsonable_encoder(field_info.openapi_examples) + elif field_info.example != Undefined: + parameter["example"] = jsonable_encoder(field_info.example) + if field_info.deprecated: + parameter["deprecated"] = field_info.deprecated + parameters.append(parameter) + return parameters + + +def get_openapi_operation_request_body( + *, + body_field: Optional[ModelField], + schema_generator: GenerateJsonSchema, + model_name_map: ModelNameMap, + field_mapping: Dict[ + Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], + separate_input_output_schemas: bool = True, +) -> Optional[Dict[str, Any]]: + if not body_field: + return None + assert isinstance(body_field, ModelField) + body_schema = get_schema_from_model_field( + field=body_field, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + field_info = cast(Body, body_field.field_info) + request_media_type = field_info.media_type + required = body_field.required + request_body_oai: Dict[str, Any] = {} + if required: + request_body_oai["required"] = required + request_media_content: Dict[str, Any] = {"schema": body_schema} + if field_info.openapi_examples: + request_media_content["examples"] = jsonable_encoder( + field_info.openapi_examples + ) + elif field_info.example != Undefined: + request_media_content["example"] = jsonable_encoder(field_info.example) + request_body_oai["content"] = {request_media_type: request_media_content} + return request_body_oai + + +def generate_operation_id( + *, route: routing.APIRoute, method: str +) -> str: # pragma: nocover + warnings.warn( + "fastapi.openapi.utils.generate_operation_id() was deprecated, " + "it is not used internally, and will be removed soon", + DeprecationWarning, + stacklevel=2, + ) + if route.operation_id: + return route.operation_id + path: str = route.path_format + return generate_operation_id_for_path(name=route.name, path=path, method=method) + + +def generate_operation_summary(*, route: routing.APIRoute, method: str) -> str: + if route.summary: + return route.summary + return route.name.replace("_", " ").title() + + +def get_openapi_operation_metadata( + *, route: routing.APIRoute, method: str, operation_ids: Set[str] +) -> Dict[str, Any]: + operation: Dict[str, Any] = {} + if route.tags: + operation["tags"] = route.tags + operation["summary"] = generate_operation_summary(route=route, method=method) + if route.description: + operation["description"] = route.description + operation_id = route.operation_id or route.unique_id + if operation_id in operation_ids: + message = ( + f"Duplicate Operation ID {operation_id} for function " + + f"{route.endpoint.__name__}" + ) + file_name = getattr(route.endpoint, "__globals__", {}).get("__file__") + if file_name: + message += f" at {file_name}" + warnings.warn(message, stacklevel=1) + operation_ids.add(operation_id) + operation["operationId"] = operation_id + if route.deprecated: + operation["deprecated"] = route.deprecated + return operation + + +def get_openapi_path( + *, + route: routing.APIRoute, + operation_ids: Set[str], + schema_generator: GenerateJsonSchema, + model_name_map: ModelNameMap, + field_mapping: Dict[ + Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue + ], + separate_input_output_schemas: bool = True, +) -> Tuple[Dict[str, Any], Dict[str, Any], Dict[str, Any]]: + path = {} + security_schemes: Dict[str, Any] = {} + definitions: Dict[str, Any] = {} + assert route.methods is not None, "Methods must be a list" + if isinstance(route.response_class, DefaultPlaceholder): + current_response_class: Type[Response] = route.response_class.value + else: + current_response_class = route.response_class + assert current_response_class, "A response class is needed to generate OpenAPI" + route_response_media_type: Optional[str] = current_response_class.media_type + if route.include_in_schema: + for method in route.methods: + operation = get_openapi_operation_metadata( + route=route, method=method, operation_ids=operation_ids + ) + parameters: List[Dict[str, Any]] = [] + flat_dependant = get_flat_dependant(route.dependant, skip_repeats=True) + security_definitions, operation_security = get_openapi_security_definitions( + flat_dependant=flat_dependant + ) + if operation_security: + operation.setdefault("security", []).extend(operation_security) + if security_definitions: + security_schemes.update(security_definitions) + all_route_params = get_flat_params(route.dependant) + operation_parameters = get_openapi_operation_parameters( + all_route_params=all_route_params, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + parameters.extend(operation_parameters) + if parameters: + all_parameters = { + (param["in"], param["name"]): param for param in parameters + } + required_parameters = { + (param["in"], param["name"]): param + for param in parameters + if param.get("required") + } + # Make sure required definitions of the same parameter take precedence + # over non-required definitions + all_parameters.update(required_parameters) + operation["parameters"] = list(all_parameters.values()) + if method in METHODS_WITH_BODY: + request_body_oai = get_openapi_operation_request_body( + body_field=route.body_field, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + if request_body_oai: + operation["requestBody"] = request_body_oai + if route.callbacks: + callbacks = {} + for callback in route.callbacks: + if isinstance(callback, routing.APIRoute): + ( + cb_path, + cb_security_schemes, + cb_definitions, + ) = get_openapi_path( + route=callback, + operation_ids=operation_ids, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + callbacks[callback.name] = {callback.path: cb_path} + operation["callbacks"] = callbacks + if route.status_code is not None: + status_code = str(route.status_code) + else: + # It would probably make more sense for all response classes to have an + # explicit default status_code, and to extract it from them, instead of + # doing this inspection tricks, that would probably be in the future + # TODO: probably make status_code a default class attribute for all + # responses in Starlette + response_signature = inspect.signature(current_response_class.__init__) + status_code_param = response_signature.parameters.get("status_code") + if status_code_param is not None: + if isinstance(status_code_param.default, int): + status_code = str(status_code_param.default) + operation.setdefault("responses", {}).setdefault(status_code, {})[ + "description" + ] = route.response_description + if route_response_media_type and is_body_allowed_for_status_code( + route.status_code + ): + response_schema = {"type": "string"} + if lenient_issubclass(current_response_class, JSONResponse): + if route.response_field: + response_schema = get_schema_from_model_field( + field=route.response_field, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + else: + response_schema = {} + operation.setdefault("responses", {}).setdefault( + status_code, {} + ).setdefault("content", {}).setdefault(route_response_media_type, {})[ + "schema" + ] = response_schema + if route.responses: + operation_responses = operation.setdefault("responses", {}) + for ( + additional_status_code, + additional_response, + ) in route.responses.items(): + process_response = additional_response.copy() + process_response.pop("model", None) + status_code_key = str(additional_status_code).upper() + if status_code_key == "DEFAULT": + status_code_key = "default" + openapi_response = operation_responses.setdefault( + status_code_key, {} + ) + assert isinstance( + process_response, dict + ), "An additional response must be a dict" + field = route.response_fields.get(additional_status_code) + additional_field_schema: Optional[Dict[str, Any]] = None + if field: + additional_field_schema = get_schema_from_model_field( + field=field, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + media_type = route_response_media_type or "application/json" + additional_schema = ( + process_response.setdefault("content", {}) + .setdefault(media_type, {}) + .setdefault("schema", {}) + ) + deep_dict_update(additional_schema, additional_field_schema) + status_text: Optional[str] = status_code_ranges.get( + str(additional_status_code).upper() + ) or http.client.responses.get(int(additional_status_code)) + description = ( + process_response.get("description") + or openapi_response.get("description") + or status_text + or "Additional Response" + ) + deep_dict_update(openapi_response, process_response) + openapi_response["description"] = description + http422 = str(HTTP_422_UNPROCESSABLE_ENTITY) + if (all_route_params or route.body_field) and not any( + status in operation["responses"] + for status in [http422, "4XX", "default"] + ): + operation["responses"][http422] = { + "description": "Validation Error", + "content": { + "application/json": { + "schema": {"$ref": REF_PREFIX + "HTTPValidationError"} + } + }, + } + if "ValidationError" not in definitions: + definitions.update( + { + "ValidationError": validation_error_definition, + "HTTPValidationError": validation_error_response_definition, + } + ) + if route.openapi_extra: + deep_dict_update(operation, route.openapi_extra) + path[method.lower()] = operation + return path, security_schemes, definitions + + +def get_fields_from_routes( + routes: Sequence[BaseRoute], +) -> List[ModelField]: + body_fields_from_routes: List[ModelField] = [] + responses_from_routes: List[ModelField] = [] + request_fields_from_routes: List[ModelField] = [] + callback_flat_models: List[ModelField] = [] + for route in routes: + if getattr(route, "include_in_schema", None) and isinstance( + route, routing.APIRoute + ): + if route.body_field: + assert isinstance( + route.body_field, ModelField + ), "A request body must be a Pydantic Field" + body_fields_from_routes.append(route.body_field) + if route.response_field: + responses_from_routes.append(route.response_field) + if route.response_fields: + responses_from_routes.extend(route.response_fields.values()) + if route.callbacks: + callback_flat_models.extend(get_fields_from_routes(route.callbacks)) + params = get_flat_params(route.dependant) + request_fields_from_routes.extend(params) + + flat_models = callback_flat_models + list( + body_fields_from_routes + responses_from_routes + request_fields_from_routes + ) + return flat_models + + +def get_openapi( + *, + title: str, + version: str, + openapi_version: str = "3.1.0", + summary: Optional[str] = None, + description: Optional[str] = None, + routes: Sequence[BaseRoute], + webhooks: Optional[Sequence[BaseRoute]] = None, + tags: Optional[List[Dict[str, Any]]] = None, + servers: Optional[List[Dict[str, Union[str, Any]]]] = None, + terms_of_service: Optional[str] = None, + contact: Optional[Dict[str, Union[str, Any]]] = None, + license_info: Optional[Dict[str, Union[str, Any]]] = None, + separate_input_output_schemas: bool = True, +) -> Dict[str, Any]: + info: Dict[str, Any] = {"title": title, "version": version} + if summary: + info["summary"] = summary + if description: + info["description"] = description + if terms_of_service: + info["termsOfService"] = terms_of_service + if contact: + info["contact"] = contact + if license_info: + info["license"] = license_info + output: Dict[str, Any] = {"openapi": openapi_version, "info": info} + if servers: + output["servers"] = servers + components: Dict[str, Dict[str, Any]] = {} + paths: Dict[str, Dict[str, Any]] = {} + webhook_paths: Dict[str, Dict[str, Any]] = {} + operation_ids: Set[str] = set() + all_fields = get_fields_from_routes(list(routes or []) + list(webhooks or [])) + model_name_map = get_compat_model_name_map(all_fields) + schema_generator = GenerateJsonSchema(ref_template=REF_TEMPLATE) + field_mapping, definitions = get_definitions( + fields=all_fields, + schema_generator=schema_generator, + model_name_map=model_name_map, + separate_input_output_schemas=separate_input_output_schemas, + ) + for route in routes or []: + if isinstance(route, routing.APIRoute): + result = get_openapi_path( + route=route, + operation_ids=operation_ids, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + if result: + path, security_schemes, path_definitions = result + if path: + paths.setdefault(route.path_format, {}).update(path) + if security_schemes: + components.setdefault("securitySchemes", {}).update( + security_schemes + ) + if path_definitions: + definitions.update(path_definitions) + for webhook in webhooks or []: + if isinstance(webhook, routing.APIRoute): + result = get_openapi_path( + route=webhook, + operation_ids=operation_ids, + schema_generator=schema_generator, + model_name_map=model_name_map, + field_mapping=field_mapping, + separate_input_output_schemas=separate_input_output_schemas, + ) + if result: + path, security_schemes, path_definitions = result + if path: + webhook_paths.setdefault(webhook.path_format, {}).update(path) + if security_schemes: + components.setdefault("securitySchemes", {}).update( + security_schemes + ) + if path_definitions: + definitions.update(path_definitions) + if definitions: + components["schemas"] = {k: definitions[k] for k in sorted(definitions)} + if components: + output["components"] = components + output["paths"] = paths + if webhook_paths: + output["webhooks"] = webhook_paths + if tags: + output["tags"] = tags + return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) # type: ignore diff --git a/venv/lib/python3.12/site-packages/fastapi/param_functions.py b/venv/lib/python3.12/site-packages/fastapi/param_functions.py new file mode 100644 index 0000000..3f6dbc9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/param_functions.py @@ -0,0 +1,2360 @@ +from typing import Any, Callable, Dict, List, Optional, Sequence, Union + +from fastapi import params +from fastapi._compat import Undefined +from fastapi.openapi.models import Example +from typing_extensions import Annotated, Doc, deprecated # type: ignore [attr-defined] + +_Unset: Any = Undefined + + +def Path( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = ..., + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[List[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[Dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[Dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + """ + Declare a path parameter for a *path operation*. + + Read more about it in the + [FastAPI docs for Path Parameters and Numeric Validations](https://fastapi.tiangolo.com/tutorial/path-params-numeric-validations/). + + ```python + from typing import Annotated + + from fastapi import FastAPI, Path + + app = FastAPI() + + + @app.get("/items/{item_id}") + async def read_items( + item_id: Annotated[int, Path(title="The ID of the item to get")], + ): + return {"item_id": item_id} + ``` + """ + return params.Path( + default=default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Query( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[List[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[Dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[Dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Query( + default=default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Header( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + convert_underscores: Annotated[ + bool, + Doc( + """ + Automatically convert underscores to hyphens in the parameter field name. + + Read more about it in the + [FastAPI docs for Header Parameters](https://fastapi.tiangolo.com/tutorial/header-params/#automatic-conversion) + """ + ), + ] = True, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[List[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[Dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[Dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Header( + default=default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + convert_underscores=convert_underscores, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Cookie( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[List[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[Dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[Dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Cookie( + default=default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Body( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + embed: Annotated[ + bool, + Doc( + """ + When `embed` is `True`, the parameter will be expected in a JSON body as a + key instead of being the JSON body itself. + + This happens automatically when more than one `Body` parameter is declared. + + Read more about it in the + [FastAPI docs for Body - Multiple Parameters](https://fastapi.tiangolo.com/tutorial/body-multiple-params/#embed-a-single-body-parameter). + """ + ), + ] = False, + media_type: Annotated[ + str, + Doc( + """ + The media type of this parameter field. Changing it would affect the + generated OpenAPI, but currently it doesn't affect the parsing of the data. + """ + ), + ] = "application/json", + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[List[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[Dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[Dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Body( + default=default, + default_factory=default_factory, + embed=embed, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Form( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + media_type: Annotated[ + str, + Doc( + """ + The media type of this parameter field. Changing it would affect the + generated OpenAPI, but currently it doesn't affect the parsing of the data. + """ + ), + ] = "application/x-www-form-urlencoded", + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[List[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[Dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[Dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.Form( + default=default, + default_factory=default_factory, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def File( # noqa: N802 + default: Annotated[ + Any, + Doc( + """ + Default value if the parameter field is not set. + """ + ), + ] = Undefined, + *, + default_factory: Annotated[ + Union[Callable[[], Any], None], + Doc( + """ + A callable to generate the default value. + + This doesn't affect `Path` parameters as the value is always required. + The parameter is available only for compatibility. + """ + ), + ] = _Unset, + media_type: Annotated[ + str, + Doc( + """ + The media type of this parameter field. Changing it would affect the + generated OpenAPI, but currently it doesn't affect the parsing of the data. + """ + ), + ] = "multipart/form-data", + alias: Annotated[ + Optional[str], + Doc( + """ + An alternative name for the parameter field. + + This will be used to extract the data and for the generated OpenAPI. + It is particularly useful when you can't use the name you want because it + is a Python reserved keyword or similar. + """ + ), + ] = None, + alias_priority: Annotated[ + Union[int, None], + Doc( + """ + Priority of the alias. This affects whether an alias generator is used. + """ + ), + ] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Whitelist' validation step. The parameter field will be the single one + allowed by the alias or set of aliases defined. + """ + ), + ] = None, + serialization_alias: Annotated[ + Union[str, None], + Doc( + """ + 'Blacklist' validation step. The vanilla parameter field will be the + single one of the alias' or set of aliases' fields and all the other + fields will be ignored at serialization time. + """ + ), + ] = None, + title: Annotated[ + Optional[str], + Doc( + """ + Human-readable title. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Human-readable description. + """ + ), + ] = None, + gt: Annotated[ + Optional[float], + Doc( + """ + Greater than. If set, value must be greater than this. Only applicable to + numbers. + """ + ), + ] = None, + ge: Annotated[ + Optional[float], + Doc( + """ + Greater than or equal. If set, value must be greater than or equal to + this. Only applicable to numbers. + """ + ), + ] = None, + lt: Annotated[ + Optional[float], + Doc( + """ + Less than. If set, value must be less than this. Only applicable to numbers. + """ + ), + ] = None, + le: Annotated[ + Optional[float], + Doc( + """ + Less than or equal. If set, value must be less than or equal to this. + Only applicable to numbers. + """ + ), + ] = None, + min_length: Annotated[ + Optional[int], + Doc( + """ + Minimum length for strings. + """ + ), + ] = None, + max_length: Annotated[ + Optional[int], + Doc( + """ + Maximum length for strings. + """ + ), + ] = None, + pattern: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + ] = None, + regex: Annotated[ + Optional[str], + Doc( + """ + RegEx pattern for strings. + """ + ), + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Annotated[ + Union[str, None], + Doc( + """ + Parameter field name for discriminating the type in a tagged union. + """ + ), + ] = None, + strict: Annotated[ + Union[bool, None], + Doc( + """ + If `True`, strict validation is applied to the field. + """ + ), + ] = _Unset, + multiple_of: Annotated[ + Union[float, None], + Doc( + """ + Value must be a multiple of this. Only applicable to numbers. + """ + ), + ] = _Unset, + allow_inf_nan: Annotated[ + Union[bool, None], + Doc( + """ + Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + """ + ), + ] = _Unset, + max_digits: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of allow digits for strings. + """ + ), + ] = _Unset, + decimal_places: Annotated[ + Union[int, None], + Doc( + """ + Maximum number of decimal places allowed for numbers. + """ + ), + ] = _Unset, + examples: Annotated[ + Optional[List[Any]], + Doc( + """ + Example values for this field. + """ + ), + ] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Annotated[ + Optional[Dict[str, Example]], + Doc( + """ + OpenAPI-specific examples. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Swagger UI (that provides the `/docs` interface) has better support for the + OpenAPI-specific examples than the JSON Schema `examples`, that's the main + use case for this. + + Read more about it in the + [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this parameter field as deprecated. + + It will affect the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) this parameter field in the generated OpenAPI. + You probably don't need it, but it's available. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + json_schema_extra: Annotated[ + Union[Dict[str, Any], None], + Doc( + """ + Any additional JSON schema data. + """ + ), + ] = None, + **extra: Annotated[ + Any, + Doc( + """ + Include extra fields used by the JSON Schema. + """ + ), + deprecated( + """ + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + """ + ), + ], +) -> Any: + return params.File( + default=default, + default_factory=default_factory, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + example=example, + examples=examples, + openapi_examples=openapi_examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +def Depends( # noqa: N802 + dependency: Annotated[ + Optional[Callable[..., Any]], + Doc( + """ + A "dependable" callable (like a function). + + Don't call it directly, FastAPI will call it for you, just pass the object + directly. + """ + ), + ] = None, + *, + use_cache: Annotated[ + bool, + Doc( + """ + By default, after a dependency is called the first time in a request, if + the dependency is declared again for the rest of the request (for example + if the dependency is needed by several dependencies), the value will be + re-used for the rest of the request. + + Set `use_cache` to `False` to disable this behavior and ensure the + dependency is called again (if declared more than once) in the same request. + """ + ), + ] = True, +) -> Any: + """ + Declare a FastAPI dependency. + + It takes a single "dependable" callable (like a function). + + Don't call it directly, FastAPI will call it for you. + + Read more about it in the + [FastAPI docs for Dependencies](https://fastapi.tiangolo.com/tutorial/dependencies/). + + **Example** + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + + app = FastAPI() + + + async def common_parameters(q: str | None = None, skip: int = 0, limit: int = 100): + return {"q": q, "skip": skip, "limit": limit} + + + @app.get("/items/") + async def read_items(commons: Annotated[dict, Depends(common_parameters)]): + return commons + ``` + """ + return params.Depends(dependency=dependency, use_cache=use_cache) + + +def Security( # noqa: N802 + dependency: Annotated[ + Optional[Callable[..., Any]], + Doc( + """ + A "dependable" callable (like a function). + + Don't call it directly, FastAPI will call it for you, just pass the object + directly. + """ + ), + ] = None, + *, + scopes: Annotated[ + Optional[Sequence[str]], + Doc( + """ + OAuth2 scopes required for the *path operation* that uses this Security + dependency. + + The term "scope" comes from the OAuth2 specification, it seems to be + intentionaly vague and interpretable. It normally refers to permissions, + in cases to roles. + + These scopes are integrated with OpenAPI (and the API docs at `/docs`). + So they are visible in the OpenAPI specification. + ) + """ + ), + ] = None, + use_cache: Annotated[ + bool, + Doc( + """ + By default, after a dependency is called the first time in a request, if + the dependency is declared again for the rest of the request (for example + if the dependency is needed by several dependencies), the value will be + re-used for the rest of the request. + + Set `use_cache` to `False` to disable this behavior and ensure the + dependency is called again (if declared more than once) in the same request. + """ + ), + ] = True, +) -> Any: + """ + Declare a FastAPI Security dependency. + + The only difference with a regular dependency is that it can declare OAuth2 + scopes that will be integrated with OpenAPI and the automatic UI docs (by default + at `/docs`). + + It takes a single "dependable" callable (like a function). + + Don't call it directly, FastAPI will call it for you. + + Read more about it in the + [FastAPI docs for Security](https://fastapi.tiangolo.com/tutorial/security/) and + in the + [FastAPI docs for OAuth2 scopes](https://fastapi.tiangolo.com/advanced/security/oauth2-scopes/). + + **Example** + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + + from .db import User + from .security import get_current_active_user + + app = FastAPI() + + @app.get("/users/me/items/") + async def read_own_items( + current_user: Annotated[User, Security(get_current_active_user, scopes=["items"])] + ): + return [{"item_id": "Foo", "owner": current_user.username}] + ``` + """ + return params.Security(dependency=dependency, scopes=scopes, use_cache=use_cache) diff --git a/venv/lib/python3.12/site-packages/fastapi/params.py b/venv/lib/python3.12/site-packages/fastapi/params.py new file mode 100644 index 0000000..b40944d --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/params.py @@ -0,0 +1,777 @@ +import warnings +from enum import Enum +from typing import Any, Callable, Dict, List, Optional, Sequence, Union + +from fastapi.openapi.models import Example +from pydantic.fields import FieldInfo +from typing_extensions import Annotated, deprecated + +from ._compat import PYDANTIC_V2, Undefined + +_Unset: Any = Undefined + + +class ParamTypes(Enum): + query = "query" + header = "header" + path = "path" + cookie = "cookie" + + +class Param(FieldInfo): + in_: ParamTypes + + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Union[str, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[List[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[Dict[str, Example]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + json_schema_extra: Union[Dict[str, Any], None] = None, + **extra: Any, + ): + self.deprecated = deprecated + if example is not _Unset: + warnings.warn( + "`example` has been deprecated, please use `examples` instead", + category=DeprecationWarning, + stacklevel=4, + ) + self.example = example + self.include_in_schema = include_in_schema + self.openapi_examples = openapi_examples + kwargs = dict( + default=default, + default_factory=default_factory, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + discriminator=discriminator, + multiple_of=multiple_of, + allow_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + **extra, + ) + if examples is not None: + kwargs["examples"] = examples + if regex is not None: + warnings.warn( + "`regex` has been deprecated, please use `pattern` instead", + category=DeprecationWarning, + stacklevel=4, + ) + current_json_schema_extra = json_schema_extra or extra + if PYDANTIC_V2: + kwargs.update( + { + "annotation": annotation, + "alias_priority": alias_priority, + "validation_alias": validation_alias, + "serialization_alias": serialization_alias, + "strict": strict, + "json_schema_extra": current_json_schema_extra, + } + ) + kwargs["pattern"] = pattern or regex + else: + kwargs["regex"] = pattern or regex + kwargs.update(**current_json_schema_extra) + use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset} + + super().__init__(**use_kwargs) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.default})" + + +class Path(Param): + in_ = ParamTypes.path + + def __init__( + self, + default: Any = ..., + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Union[str, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[List[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[Dict[str, Example]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + json_schema_extra: Union[Dict[str, Any], None] = None, + **extra: Any, + ): + assert default is ..., "Path parameters cannot have a default value" + self.in_ = self.in_ + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Query(Param): + in_ = ParamTypes.query + + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Union[str, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[List[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[Dict[str, Example]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + json_schema_extra: Union[Dict[str, Any], None] = None, + **extra: Any, + ): + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Header(Param): + in_ = ParamTypes.header + + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Union[str, None] = None, + serialization_alias: Union[str, None] = None, + convert_underscores: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[List[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[Dict[str, Example]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + json_schema_extra: Union[Dict[str, Any], None] = None, + **extra: Any, + ): + self.convert_underscores = convert_underscores + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Cookie(Param): + in_ = ParamTypes.cookie + + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Union[str, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[List[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[Dict[str, Example]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + json_schema_extra: Union[Dict[str, Any], None] = None, + **extra: Any, + ): + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Body(FieldInfo): + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + embed: bool = False, + media_type: str = "application/json", + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Union[str, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[List[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[Dict[str, Example]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + json_schema_extra: Union[Dict[str, Any], None] = None, + **extra: Any, + ): + self.embed = embed + self.media_type = media_type + self.deprecated = deprecated + if example is not _Unset: + warnings.warn( + "`example` has been deprecated, please use `examples` instead", + category=DeprecationWarning, + stacklevel=4, + ) + self.example = example + self.include_in_schema = include_in_schema + self.openapi_examples = openapi_examples + kwargs = dict( + default=default, + default_factory=default_factory, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + discriminator=discriminator, + multiple_of=multiple_of, + allow_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + **extra, + ) + if examples is not None: + kwargs["examples"] = examples + if regex is not None: + warnings.warn( + "`regex` has been depreacated, please use `pattern` instead", + category=DeprecationWarning, + stacklevel=4, + ) + current_json_schema_extra = json_schema_extra or extra + if PYDANTIC_V2: + kwargs.update( + { + "annotation": annotation, + "alias_priority": alias_priority, + "validation_alias": validation_alias, + "serialization_alias": serialization_alias, + "strict": strict, + "json_schema_extra": current_json_schema_extra, + } + ) + kwargs["pattern"] = pattern or regex + else: + kwargs["regex"] = pattern or regex + kwargs.update(**current_json_schema_extra) + + use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset} + + super().__init__(**use_kwargs) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.default})" + + +class Form(Body): + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + media_type: str = "application/x-www-form-urlencoded", + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Union[str, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[List[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[Dict[str, Example]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + json_schema_extra: Union[Dict[str, Any], None] = None, + **extra: Any, + ): + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + embed=True, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class File(Form): + def __init__( + self, + default: Any = Undefined, + *, + default_factory: Union[Callable[[], Any], None] = _Unset, + annotation: Optional[Any] = None, + media_type: str = "multipart/form-data", + alias: Optional[str] = None, + alias_priority: Union[int, None] = _Unset, + # TODO: update when deprecating Pydantic v1, import these types + # validation_alias: str | AliasPath | AliasChoices | None + validation_alias: Union[str, None] = None, + serialization_alias: Union[str, None] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + pattern: Optional[str] = None, + regex: Annotated[ + Optional[str], + deprecated( + "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." + ), + ] = None, + discriminator: Union[str, None] = None, + strict: Union[bool, None] = _Unset, + multiple_of: Union[float, None] = _Unset, + allow_inf_nan: Union[bool, None] = _Unset, + max_digits: Union[int, None] = _Unset, + decimal_places: Union[int, None] = _Unset, + examples: Optional[List[Any]] = None, + example: Annotated[ + Optional[Any], + deprecated( + "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " + "although still supported. Use examples instead." + ), + ] = _Unset, + openapi_examples: Optional[Dict[str, Example]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + json_schema_extra: Union[Dict[str, Any], None] = None, + **extra: Any, + ): + super().__init__( + default=default, + default_factory=default_factory, + annotation=annotation, + media_type=media_type, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + pattern=pattern, + regex=regex, + discriminator=discriminator, + strict=strict, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + deprecated=deprecated, + example=example, + examples=examples, + openapi_examples=openapi_examples, + include_in_schema=include_in_schema, + json_schema_extra=json_schema_extra, + **extra, + ) + + +class Depends: + def __init__( + self, dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True + ): + self.dependency = dependency + self.use_cache = use_cache + + def __repr__(self) -> str: + attr = getattr(self.dependency, "__name__", type(self.dependency).__name__) + cache = "" if self.use_cache else ", use_cache=False" + return f"{self.__class__.__name__}({attr}{cache})" + + +class Security(Depends): + def __init__( + self, + dependency: Optional[Callable[..., Any]] = None, + *, + scopes: Optional[Sequence[str]] = None, + use_cache: bool = True, + ): + super().__init__(dependency=dependency, use_cache=use_cache) + self.scopes = scopes or [] diff --git a/venv/lib/python3.12/site-packages/fastapi/py.typed b/venv/lib/python3.12/site-packages/fastapi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/fastapi/requests.py b/venv/lib/python3.12/site-packages/fastapi/requests.py new file mode 100644 index 0000000..d16552c --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/requests.py @@ -0,0 +1,2 @@ +from starlette.requests import HTTPConnection as HTTPConnection # noqa: F401 +from starlette.requests import Request as Request # noqa: F401 diff --git a/venv/lib/python3.12/site-packages/fastapi/responses.py b/venv/lib/python3.12/site-packages/fastapi/responses.py new file mode 100644 index 0000000..6c8db6f --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/responses.py @@ -0,0 +1,48 @@ +from typing import Any + +from starlette.responses import FileResponse as FileResponse # noqa +from starlette.responses import HTMLResponse as HTMLResponse # noqa +from starlette.responses import JSONResponse as JSONResponse # noqa +from starlette.responses import PlainTextResponse as PlainTextResponse # noqa +from starlette.responses import RedirectResponse as RedirectResponse # noqa +from starlette.responses import Response as Response # noqa +from starlette.responses import StreamingResponse as StreamingResponse # noqa + +try: + import ujson +except ImportError: # pragma: nocover + ujson = None # type: ignore + + +try: + import orjson +except ImportError: # pragma: nocover + orjson = None # type: ignore + + +class UJSONResponse(JSONResponse): + """ + JSON response using the high-performance ujson library to serialize data to JSON. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/). + """ + + def render(self, content: Any) -> bytes: + assert ujson is not None, "ujson must be installed to use UJSONResponse" + return ujson.dumps(content, ensure_ascii=False).encode("utf-8") + + +class ORJSONResponse(JSONResponse): + """ + JSON response using the high-performance orjson library to serialize data to JSON. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/). + """ + + def render(self, content: Any) -> bytes: + assert orjson is not None, "orjson must be installed to use ORJSONResponse" + return orjson.dumps( + content, option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SERIALIZE_NUMPY + ) diff --git a/venv/lib/python3.12/site-packages/fastapi/routing.py b/venv/lib/python3.12/site-packages/fastapi/routing.py new file mode 100644 index 0000000..acebabf --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/routing.py @@ -0,0 +1,4397 @@ +import asyncio +import dataclasses +import email.message +import inspect +import json +from contextlib import AsyncExitStack +from enum import Enum, IntEnum +from typing import ( + Any, + Callable, + Coroutine, + Dict, + List, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, +) + +from fastapi import params +from fastapi._compat import ( + ModelField, + Undefined, + _get_model_config, + _model_dump, + _normalize_errors, + lenient_issubclass, +) +from fastapi.datastructures import Default, DefaultPlaceholder +from fastapi.dependencies.models import Dependant +from fastapi.dependencies.utils import ( + get_body_field, + get_dependant, + get_parameterless_sub_dependant, + get_typed_return_annotation, + solve_dependencies, +) +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import ( + FastAPIError, + RequestValidationError, + ResponseValidationError, + WebSocketRequestValidationError, +) +from fastapi.types import DecoratedCallable, IncEx +from fastapi.utils import ( + create_cloned_field, + create_response_field, + generate_unique_id, + get_value_or_default, + is_body_allowed_for_status_code, +) +from pydantic import BaseModel +from starlette import routing +from starlette.concurrency import run_in_threadpool +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse, Response +from starlette.routing import ( + BaseRoute, + Match, + compile_path, + get_name, + request_response, + websocket_session, +) +from starlette.routing import Mount as Mount # noqa +from starlette.types import ASGIApp, Lifespan, Scope +from starlette.websockets import WebSocket +from typing_extensions import Annotated, Doc, deprecated # type: ignore [attr-defined] + + +def _prepare_response_content( + res: Any, + *, + exclude_unset: bool, + exclude_defaults: bool = False, + exclude_none: bool = False, +) -> Any: + if isinstance(res, BaseModel): + read_with_orm_mode = getattr(_get_model_config(res), "read_with_orm_mode", None) + if read_with_orm_mode: + # Let from_orm extract the data from this model instead of converting + # it now to a dict. + # Otherwise, there's no way to extract lazy data that requires attribute + # access instead of dict iteration, e.g. lazy relationships. + return res + return _model_dump( + res, + by_alias=True, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + elif isinstance(res, list): + return [ + _prepare_response_content( + item, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + for item in res + ] + elif isinstance(res, dict): + return { + k: _prepare_response_content( + v, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + for k, v in res.items() + } + elif dataclasses.is_dataclass(res): + return dataclasses.asdict(res) + return res + + +async def serialize_response( + *, + field: Optional[ModelField] = None, + response_content: Any, + include: Optional[IncEx] = None, + exclude: Optional[IncEx] = None, + by_alias: bool = True, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + is_coroutine: bool = True, +) -> Any: + if field: + errors = [] + if not hasattr(field, "serialize"): + # pydantic v1 + response_content = _prepare_response_content( + response_content, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + if is_coroutine: + value, errors_ = field.validate(response_content, {}, loc=("response",)) + else: + value, errors_ = await run_in_threadpool( + field.validate, response_content, {}, loc=("response",) + ) + if isinstance(errors_, list): + errors.extend(errors_) + elif errors_: + errors.append(errors_) + if errors: + raise ResponseValidationError( + errors=_normalize_errors(errors), body=response_content + ) + + if hasattr(field, "serialize"): + return field.serialize( + value, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + return jsonable_encoder( + value, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + else: + return jsonable_encoder(response_content) + + +async def run_endpoint_function( + *, dependant: Dependant, values: Dict[str, Any], is_coroutine: bool +) -> Any: + # Only called by get_request_handler. Has been split into its own function to + # facilitate profiling endpoints, since inner functions are harder to profile. + assert dependant.call is not None, "dependant.call must be a function" + + if is_coroutine: + return await dependant.call(**values) + else: + return await run_in_threadpool(dependant.call, **values) + + +def get_request_handler( + dependant: Dependant, + body_field: Optional[ModelField] = None, + status_code: Optional[int] = None, + response_class: Union[Type[Response], DefaultPlaceholder] = Default(JSONResponse), + response_field: Optional[ModelField] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + dependency_overrides_provider: Optional[Any] = None, +) -> Callable[[Request], Coroutine[Any, Any, Response]]: + assert dependant.call is not None, "dependant.call must be a function" + is_coroutine = asyncio.iscoroutinefunction(dependant.call) + is_body_form = body_field and isinstance(body_field.field_info, params.Form) + if isinstance(response_class, DefaultPlaceholder): + actual_response_class: Type[Response] = response_class.value + else: + actual_response_class = response_class + + async def app(request: Request) -> Response: + exception_to_reraise: Optional[Exception] = None + response: Union[Response, None] = None + async with AsyncExitStack() as async_exit_stack: + # TODO: remove this scope later, after a few releases + # This scope fastapi_astack is no longer used by FastAPI, kept for + # compatibility, just in case + request.scope["fastapi_astack"] = async_exit_stack + try: + body: Any = None + if body_field: + if is_body_form: + body = await request.form() + async_exit_stack.push_async_callback(body.close) + else: + body_bytes = await request.body() + if body_bytes: + json_body: Any = Undefined + content_type_value = request.headers.get("content-type") + if not content_type_value: + json_body = await request.json() + else: + message = email.message.Message() + message["content-type"] = content_type_value + if message.get_content_maintype() == "application": + subtype = message.get_content_subtype() + if subtype == "json" or subtype.endswith("+json"): + json_body = await request.json() + if json_body != Undefined: + body = json_body + else: + body = body_bytes + except json.JSONDecodeError as e: + validation_error = RequestValidationError( + [ + { + "type": "json_invalid", + "loc": ("body", e.pos), + "msg": "JSON decode error", + "input": {}, + "ctx": {"error": e.msg}, + } + ], + body=e.doc, + ) + exception_to_reraise = validation_error + raise validation_error from e + except HTTPException as e: + exception_to_reraise = e + raise + except Exception as e: + http_error = HTTPException( + status_code=400, detail="There was an error parsing the body" + ) + exception_to_reraise = http_error + raise http_error from e + try: + solved_result = await solve_dependencies( + request=request, + dependant=dependant, + body=body, + dependency_overrides_provider=dependency_overrides_provider, + async_exit_stack=async_exit_stack, + ) + values, errors, background_tasks, sub_response, _ = solved_result + except Exception as e: + exception_to_reraise = e + raise e + if errors: + validation_error = RequestValidationError( + _normalize_errors(errors), body=body + ) + exception_to_reraise = validation_error + raise validation_error + else: + try: + raw_response = await run_endpoint_function( + dependant=dependant, values=values, is_coroutine=is_coroutine + ) + except Exception as e: + exception_to_reraise = e + raise e + if isinstance(raw_response, Response): + if raw_response.background is None: + raw_response.background = background_tasks + response = raw_response + else: + response_args: Dict[str, Any] = {"background": background_tasks} + # If status_code was set, use it, otherwise use the default from the + # response class, in the case of redirect it's 307 + current_status_code = ( + status_code if status_code else sub_response.status_code + ) + if current_status_code is not None: + response_args["status_code"] = current_status_code + if sub_response.status_code: + response_args["status_code"] = sub_response.status_code + content = await serialize_response( + field=response_field, + response_content=raw_response, + include=response_model_include, + exclude=response_model_exclude, + by_alias=response_model_by_alias, + exclude_unset=response_model_exclude_unset, + exclude_defaults=response_model_exclude_defaults, + exclude_none=response_model_exclude_none, + is_coroutine=is_coroutine, + ) + response = actual_response_class(content, **response_args) + if not is_body_allowed_for_status_code(response.status_code): + response.body = b"" + response.headers.raw.extend(sub_response.headers.raw) + # This exception was possibly handled by the dependency but it should + # still bubble up so that the ServerErrorMiddleware can return a 500 + # or the ExceptionMiddleware can catch and handle any other exceptions + if exception_to_reraise: + raise exception_to_reraise + assert response is not None, "An error occurred while generating the request" + return response + + return app + + +def get_websocket_app( + dependant: Dependant, dependency_overrides_provider: Optional[Any] = None +) -> Callable[[WebSocket], Coroutine[Any, Any, Any]]: + async def app(websocket: WebSocket) -> None: + async with AsyncExitStack() as async_exit_stack: + # TODO: remove this scope later, after a few releases + # This scope fastapi_astack is no longer used by FastAPI, kept for + # compatibility, just in case + websocket.scope["fastapi_astack"] = async_exit_stack + solved_result = await solve_dependencies( + request=websocket, + dependant=dependant, + dependency_overrides_provider=dependency_overrides_provider, + async_exit_stack=async_exit_stack, + ) + values, errors, _, _2, _3 = solved_result + if errors: + raise WebSocketRequestValidationError(_normalize_errors(errors)) + assert dependant.call is not None, "dependant.call must be a function" + await dependant.call(**values) + + return app + + +class APIWebSocketRoute(routing.WebSocketRoute): + def __init__( + self, + path: str, + endpoint: Callable[..., Any], + *, + name: Optional[str] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + dependency_overrides_provider: Optional[Any] = None, + ) -> None: + self.path = path + self.endpoint = endpoint + self.name = get_name(endpoint) if name is None else name + self.dependencies = list(dependencies or []) + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + self.dependant = get_dependant(path=self.path_format, call=self.endpoint) + for depends in self.dependencies[::-1]: + self.dependant.dependencies.insert( + 0, + get_parameterless_sub_dependant(depends=depends, path=self.path_format), + ) + + self.app = websocket_session( + get_websocket_app( + dependant=self.dependant, + dependency_overrides_provider=dependency_overrides_provider, + ) + ) + + def matches(self, scope: Scope) -> Tuple[Match, Scope]: + match, child_scope = super().matches(scope) + if match != Match.NONE: + child_scope["route"] = self + return match, child_scope + + +class APIRoute(routing.Route): + def __init__( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + name: Optional[str] = None, + methods: Optional[Union[Set[str], List[str]]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + dependency_overrides_provider: Optional[Any] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Union[ + Callable[["APIRoute"], str], DefaultPlaceholder + ] = Default(generate_unique_id), + ) -> None: + self.path = path + self.endpoint = endpoint + if isinstance(response_model, DefaultPlaceholder): + return_annotation = get_typed_return_annotation(endpoint) + if lenient_issubclass(return_annotation, Response): + response_model = None + else: + response_model = return_annotation + self.response_model = response_model + self.summary = summary + self.response_description = response_description + self.deprecated = deprecated + self.operation_id = operation_id + self.response_model_include = response_model_include + self.response_model_exclude = response_model_exclude + self.response_model_by_alias = response_model_by_alias + self.response_model_exclude_unset = response_model_exclude_unset + self.response_model_exclude_defaults = response_model_exclude_defaults + self.response_model_exclude_none = response_model_exclude_none + self.include_in_schema = include_in_schema + self.response_class = response_class + self.dependency_overrides_provider = dependency_overrides_provider + self.callbacks = callbacks + self.openapi_extra = openapi_extra + self.generate_unique_id_function = generate_unique_id_function + self.tags = tags or [] + self.responses = responses or {} + self.name = get_name(endpoint) if name is None else name + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + if methods is None: + methods = ["GET"] + self.methods: Set[str] = {method.upper() for method in methods} + if isinstance(generate_unique_id_function, DefaultPlaceholder): + current_generate_unique_id: Callable[ + ["APIRoute"], str + ] = generate_unique_id_function.value + else: + current_generate_unique_id = generate_unique_id_function + self.unique_id = self.operation_id or current_generate_unique_id(self) + # normalize enums e.g. http.HTTPStatus + if isinstance(status_code, IntEnum): + status_code = int(status_code) + self.status_code = status_code + if self.response_model: + assert is_body_allowed_for_status_code( + status_code + ), f"Status code {status_code} must not have a response body" + response_name = "Response_" + self.unique_id + self.response_field = create_response_field( + name=response_name, + type_=self.response_model, + mode="serialization", + ) + # Create a clone of the field, so that a Pydantic submodel is not returned + # as is just because it's an instance of a subclass of a more limited class + # e.g. UserInDB (containing hashed_password) could be a subclass of User + # that doesn't have the hashed_password. But because it's a subclass, it + # would pass the validation and be returned as is. + # By being a new field, no inheritance will be passed as is. A new model + # will always be created. + # TODO: remove when deprecating Pydantic v1 + self.secure_cloned_response_field: Optional[ + ModelField + ] = create_cloned_field(self.response_field) + else: + self.response_field = None # type: ignore + self.secure_cloned_response_field = None + self.dependencies = list(dependencies or []) + self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "") + # if a "form feed" character (page break) is found in the description text, + # truncate description text to the content preceding the first "form feed" + self.description = self.description.split("\f")[0].strip() + response_fields = {} + for additional_status_code, response in self.responses.items(): + assert isinstance(response, dict), "An additional response must be a dict" + model = response.get("model") + if model: + assert is_body_allowed_for_status_code( + additional_status_code + ), f"Status code {additional_status_code} must not have a response body" + response_name = f"Response_{additional_status_code}_{self.unique_id}" + response_field = create_response_field(name=response_name, type_=model) + response_fields[additional_status_code] = response_field + if response_fields: + self.response_fields: Dict[Union[int, str], ModelField] = response_fields + else: + self.response_fields = {} + + assert callable(endpoint), "An endpoint must be a callable" + self.dependant = get_dependant(path=self.path_format, call=self.endpoint) + for depends in self.dependencies[::-1]: + self.dependant.dependencies.insert( + 0, + get_parameterless_sub_dependant(depends=depends, path=self.path_format), + ) + self.body_field = get_body_field(dependant=self.dependant, name=self.unique_id) + self.app = request_response(self.get_route_handler()) + + def get_route_handler(self) -> Callable[[Request], Coroutine[Any, Any, Response]]: + return get_request_handler( + dependant=self.dependant, + body_field=self.body_field, + status_code=self.status_code, + response_class=self.response_class, + response_field=self.secure_cloned_response_field, + response_model_include=self.response_model_include, + response_model_exclude=self.response_model_exclude, + response_model_by_alias=self.response_model_by_alias, + response_model_exclude_unset=self.response_model_exclude_unset, + response_model_exclude_defaults=self.response_model_exclude_defaults, + response_model_exclude_none=self.response_model_exclude_none, + dependency_overrides_provider=self.dependency_overrides_provider, + ) + + def matches(self, scope: Scope) -> Tuple[Match, Scope]: + match, child_scope = super().matches(scope) + if match != Match.NONE: + child_scope["route"] = self + return match, child_scope + + +class APIRouter(routing.Router): + """ + `APIRouter` class, used to group *path operations*, for example to structure + an app in multiple files. It would then be included in the `FastAPI` app, or + in another `APIRouter` (ultimately included in the app). + + Read more about it in the + [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/). + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + router = APIRouter() + + + @router.get("/users/", tags=["users"]) + async def read_users(): + return [{"username": "Rick"}, {"username": "Morty"}] + + + app.include_router(router) + ``` + """ + + def __init__( + self, + *, + prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to all the *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to all the + *path operations* in this router. + + Read more about it in the + [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + default_response_class: Annotated[ + Type[Response], + Doc( + """ + The default response class to be used. + + Read more in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). + """ + ), + ] = Default(JSONResponse), + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses to be shown in OpenAPI. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). + + And in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + OpenAPI callbacks that should apply to all *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + routes: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Starlette and supported for compatibility. + + --- + + A list of routes to serve incoming HTTP and WebSocket requests. + """ + ), + deprecated( + """ + You normally wouldn't use this parameter with FastAPI, it is inherited + from Starlette and supported for compatibility. + + In FastAPI, you normally would use the *path operation methods*, + like `router.get()`, `router.post()`, etc. + """ + ), + ] = None, + redirect_slashes: Annotated[ + bool, + Doc( + """ + Whether to detect and redirect slashes in URLs when the client doesn't + use the same format. + """ + ), + ] = True, + default: Annotated[ + Optional[ASGIApp], + Doc( + """ + Default function handler for this router. Used to handle + 404 Not Found errors. + """ + ), + ] = None, + dependency_overrides_provider: Annotated[ + Optional[Any], + Doc( + """ + Only used internally by FastAPI to handle dependency overrides. + + You shouldn't need to use it. It normally points to the `FastAPI` app + object. + """ + ), + ] = None, + route_class: Annotated[ + Type[APIRoute], + Doc( + """ + Custom route (*path operation*) class to be used by this router. + + Read more about it in the + [FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router). + """ + ), + ] = APIRoute, + on_startup: Annotated[ + Optional[Sequence[Callable[[], Any]]], + Doc( + """ + A list of startup event handler functions. + + You should instead use the `lifespan` handlers. + + Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + on_shutdown: Annotated[ + Optional[Sequence[Callable[[], Any]]], + Doc( + """ + A list of shutdown event handler functions. + + You should instead use the `lifespan` handlers. + + Read more in the + [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + # the generic to Lifespan[AppType] is the type of the top level application + # which the router cannot know statically, so we use typing.Any + lifespan: Annotated[ + Optional[Lifespan[Any]], + Doc( + """ + A `Lifespan` context manager handler. This replaces `startup` and + `shutdown` functions with a single context manager. + + Read more in the + [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark all *path operations* in this router as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + To include (or not) all the *path operations* in this router in the + generated OpenAPI. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> None: + super().__init__( + routes=routes, + redirect_slashes=redirect_slashes, + default=default, + on_startup=on_startup, + on_shutdown=on_shutdown, + lifespan=lifespan, + ) + if prefix: + assert prefix.startswith("/"), "A path prefix must start with '/'" + assert not prefix.endswith( + "/" + ), "A path prefix must not end with '/', as the routes will start with '/'" + self.prefix = prefix + self.tags: List[Union[str, Enum]] = tags or [] + self.dependencies = list(dependencies or []) + self.deprecated = deprecated + self.include_in_schema = include_in_schema + self.responses = responses or {} + self.callbacks = callbacks or [] + self.dependency_overrides_provider = dependency_overrides_provider + self.route_class = route_class + self.default_response_class = default_response_class + self.generate_unique_id_function = generate_unique_id_function + + def route( + self, + path: str, + methods: Optional[List[str]] = None, + name: Optional[str] = None, + include_in_schema: bool = True, + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_route( + path, + func, + methods=methods, + name=name, + include_in_schema=include_in_schema, + ) + return func + + return decorator + + def add_api_route( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[Union[Set[str], List[str]]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + name: Optional[str] = None, + route_class_override: Optional[Type[APIRoute]] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Union[ + Callable[[APIRoute], str], DefaultPlaceholder + ] = Default(generate_unique_id), + ) -> None: + route_class = route_class_override or self.route_class + responses = responses or {} + combined_responses = {**self.responses, **responses} + current_response_class = get_value_or_default( + response_class, self.default_response_class + ) + current_tags = self.tags.copy() + if tags: + current_tags.extend(tags) + current_dependencies = self.dependencies.copy() + if dependencies: + current_dependencies.extend(dependencies) + current_callbacks = self.callbacks.copy() + if callbacks: + current_callbacks.extend(callbacks) + current_generate_unique_id = get_value_or_default( + generate_unique_id_function, self.generate_unique_id_function + ) + route = route_class( + self.prefix + path, + endpoint=endpoint, + response_model=response_model, + status_code=status_code, + tags=current_tags, + dependencies=current_dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=combined_responses, + deprecated=deprecated or self.deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema and self.include_in_schema, + response_class=current_response_class, + name=name, + dependency_overrides_provider=self.dependency_overrides_provider, + callbacks=current_callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=current_generate_unique_id, + ) + self.routes.append(route) + + def api_route( + self, + path: str, + *, + response_model: Any = Default(None), + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[IncEx] = None, + response_model_exclude: Optional[IncEx] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_route( + path, + func, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + return func + + return decorator + + def add_api_websocket_route( + self, + path: str, + endpoint: Callable[..., Any], + name: Optional[str] = None, + *, + dependencies: Optional[Sequence[params.Depends]] = None, + ) -> None: + current_dependencies = self.dependencies.copy() + if dependencies: + current_dependencies.extend(dependencies) + + route = APIWebSocketRoute( + self.prefix + path, + endpoint=endpoint, + name=name, + dependencies=current_dependencies, + dependency_overrides_provider=self.dependency_overrides_provider, + ) + self.routes.append(route) + + def websocket( + self, + path: Annotated[ + str, + Doc( + """ + WebSocket path. + """ + ), + ], + name: Annotated[ + Optional[str], + Doc( + """ + A name for the WebSocket. Only used internally. + """ + ), + ] = None, + *, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be used for this + WebSocket. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + """ + ), + ] = None, + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Decorate a WebSocket function. + + Read more about it in the + [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). + + **Example** + + ## Example + + ```python + from fastapi import APIRouter, FastAPI, WebSocket + + app = FastAPI() + router = APIRouter() + + @router.websocket("/ws") + async def websocket_endpoint(websocket: WebSocket): + await websocket.accept() + while True: + data = await websocket.receive_text() + await websocket.send_text(f"Message text was: {data}") + + app.include_router(router) + ``` + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_websocket_route( + path, func, name=name, dependencies=dependencies + ) + return func + + return decorator + + def websocket_route( + self, path: str, name: Union[str, None] = None + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_websocket_route(path, func, name=name) + return func + + return decorator + + def include_router( + self, + router: Annotated["APIRouter", Doc("The `APIRouter` to include.")], + *, + prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to all the *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to all the + *path operations* in this router. + + Read more about it in the + [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + default_response_class: Annotated[ + Type[Response], + Doc( + """ + The default response class to be used. + + Read more in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). + """ + ), + ] = Default(JSONResponse), + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses to be shown in OpenAPI. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). + + And in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + OpenAPI callbacks that should apply to all *path operations* in this + router. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark all *path operations* in this router as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include (or not) all the *path operations* in this router in the + generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = True, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> None: + """ + Include another `APIRouter` in the same current `APIRouter`. + + Read more about it in the + [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + internal_router = APIRouter() + users_router = APIRouter() + + @users_router.get("/users/") + def read_users(): + return [{"name": "Rick"}, {"name": "Morty"}] + + internal_router.include_router(users_router) + app.include_router(internal_router) + ``` + """ + if prefix: + assert prefix.startswith("/"), "A path prefix must start with '/'" + assert not prefix.endswith( + "/" + ), "A path prefix must not end with '/', as the routes will start with '/'" + else: + for r in router.routes: + path = getattr(r, "path") # noqa: B009 + name = getattr(r, "name", "unknown") + if path is not None and not path: + raise FastAPIError( + f"Prefix and path cannot be both empty (path operation: {name})" + ) + if responses is None: + responses = {} + for route in router.routes: + if isinstance(route, APIRoute): + combined_responses = {**responses, **route.responses} + use_response_class = get_value_or_default( + route.response_class, + router.default_response_class, + default_response_class, + self.default_response_class, + ) + current_tags = [] + if tags: + current_tags.extend(tags) + if route.tags: + current_tags.extend(route.tags) + current_dependencies: List[params.Depends] = [] + if dependencies: + current_dependencies.extend(dependencies) + if route.dependencies: + current_dependencies.extend(route.dependencies) + current_callbacks = [] + if callbacks: + current_callbacks.extend(callbacks) + if route.callbacks: + current_callbacks.extend(route.callbacks) + current_generate_unique_id = get_value_or_default( + route.generate_unique_id_function, + router.generate_unique_id_function, + generate_unique_id_function, + self.generate_unique_id_function, + ) + self.add_api_route( + prefix + route.path, + route.endpoint, + response_model=route.response_model, + status_code=route.status_code, + tags=current_tags, + dependencies=current_dependencies, + summary=route.summary, + description=route.description, + response_description=route.response_description, + responses=combined_responses, + deprecated=route.deprecated or deprecated or self.deprecated, + methods=route.methods, + operation_id=route.operation_id, + response_model_include=route.response_model_include, + response_model_exclude=route.response_model_exclude, + response_model_by_alias=route.response_model_by_alias, + response_model_exclude_unset=route.response_model_exclude_unset, + response_model_exclude_defaults=route.response_model_exclude_defaults, + response_model_exclude_none=route.response_model_exclude_none, + include_in_schema=route.include_in_schema + and self.include_in_schema + and include_in_schema, + response_class=use_response_class, + name=route.name, + route_class_override=type(route), + callbacks=current_callbacks, + openapi_extra=route.openapi_extra, + generate_unique_id_function=current_generate_unique_id, + ) + elif isinstance(route, routing.Route): + methods = list(route.methods or []) + self.add_route( + prefix + route.path, + route.endpoint, + methods=methods, + include_in_schema=route.include_in_schema, + name=route.name, + ) + elif isinstance(route, APIWebSocketRoute): + current_dependencies = [] + if dependencies: + current_dependencies.extend(dependencies) + if route.dependencies: + current_dependencies.extend(route.dependencies) + self.add_api_websocket_route( + prefix + route.path, + route.endpoint, + dependencies=current_dependencies, + name=route.name, + ) + elif isinstance(route, routing.WebSocketRoute): + self.add_websocket_route( + prefix + route.path, route.endpoint, name=route.name + ) + for handler in router.on_startup: + self.add_event_handler("startup", handler) + for handler in router.on_shutdown: + self.add_event_handler("shutdown", handler) + + def get( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP GET operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + router = APIRouter() + + @router.get("/items/") + def read_items(): + return [{"name": "Empanada"}, {"name": "Arepa"}] + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["GET"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def put( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP PUT operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.put("/items/{item_id}") + def replace_item(item_id: str, item: Item): + return {"message": "Item replaced", "id": item_id} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["PUT"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def post( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP POST operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.post("/items/") + def create_item(item: Item): + return {"message": "Item created"} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["POST"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def delete( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP DELETE operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + router = APIRouter() + + @router.delete("/items/{item_id}") + def delete_item(item_id: str): + return {"message": "Item deleted"} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["DELETE"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def options( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP OPTIONS operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + + app = FastAPI() + router = APIRouter() + + @router.options("/items/") + def get_item_options(): + return {"additions": ["Aji", "Guacamole"]} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["OPTIONS"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def head( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP HEAD operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.head("/items/", status_code=204) + def get_items_headers(response: Response): + response.headers["X-Cat-Dog"] = "Alone in the world" + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["HEAD"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def patch( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP PATCH operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.patch("/items/") + def update_item(item: Item): + return {"message": "Item updated in place"} + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["PATCH"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def trace( + self, + path: Annotated[ + str, + Doc( + """ + The URL path to be used for this *path operation*. + + For example, in `http://example.com/items`, the path is `/items`. + """ + ), + ], + *, + response_model: Annotated[ + Any, + Doc( + """ + The type to use for the response. + + It could be any valid Pydantic *field* type. So, it doesn't have to + be a Pydantic model, it could be other things, like a `list`, `dict`, + etc. + + It will be used for: + + * Documentation: the generated OpenAPI (and the UI at `/docs`) will + show it as the response (JSON Schema). + * Serialization: you could return an arbitrary object and the + `response_model` would be used to serialize that object into the + corresponding JSON. + * Filtering: the JSON sent to the client will only contain the data + (fields) defined in the `response_model`. If you returned an object + that contains an attribute `password` but the `response_model` does + not include that field, the JSON sent to the client would not have + that `password`. + * Validation: whatever you return will be serialized with the + `response_model`, converting any data as necessary to generate the + corresponding JSON. But if the data in the object returned is not + valid, that would mean a violation of the contract with the client, + so it's an error from the API developer. So, FastAPI will raise an + error and return a 500 error code (Internal Server Error). + + Read more about it in the + [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). + """ + ), + ] = Default(None), + status_code: Annotated[ + Optional[int], + Doc( + """ + The default status code to be used for the response. + + You could override the status code by returning a response directly. + + Read more about it in the + [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). + """ + ), + ] = None, + tags: Annotated[ + Optional[List[Union[str, Enum]]], + Doc( + """ + A list of tags to be applied to the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). + """ + ), + ] = None, + dependencies: Annotated[ + Optional[Sequence[params.Depends]], + Doc( + """ + A list of dependencies (using `Depends()`) to be applied to the + *path operation*. + + Read more about it in the + [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). + """ + ), + ] = None, + summary: Annotated[ + Optional[str], + Doc( + """ + A summary for the *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + A description for the *path operation*. + + If not provided, it will be extracted automatically from the docstring + of the *path operation function*. + + It can contain Markdown. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). + """ + ), + ] = None, + response_description: Annotated[ + str, + Doc( + """ + The description for the default response. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = "Successful Response", + responses: Annotated[ + Optional[Dict[Union[int, str], Dict[str, Any]]], + Doc( + """ + Additional responses that could be returned by this *path operation*. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + deprecated: Annotated[ + Optional[bool], + Doc( + """ + Mark this *path operation* as deprecated. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + operation_id: Annotated[ + Optional[str], + Doc( + """ + Custom operation ID to be used by this *path operation*. + + By default, it is generated automatically. + + If you provide a custom operation ID, you need to make sure it is + unique for the whole API. + + You can customize the + operation ID generation with the parameter + `generate_unique_id_function` in the `FastAPI` class. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = None, + response_model_include: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to include only certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_exclude: Annotated[ + Optional[IncEx], + Doc( + """ + Configuration passed to Pydantic to exclude certain fields in the + response data. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = None, + response_model_by_alias: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response model + should be serialized by alias when an alias is used. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). + """ + ), + ] = True, + response_model_exclude_unset: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that were not set and + have their default values. This is different from + `response_model_exclude_defaults` in that if the fields are set, + they will be included in the response, even if the value is the same + as the default. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_defaults: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data + should have all the fields, including the ones that have the same value + as the default. This is different from `response_model_exclude_unset` + in that if the fields are set but contain the same default values, + they will be excluded from the response. + + When `True`, default values are omitted from the response. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). + """ + ), + ] = False, + response_model_exclude_none: Annotated[ + bool, + Doc( + """ + Configuration passed to Pydantic to define if the response data should + exclude fields set to `None`. + + This is much simpler (less smart) than `response_model_exclude_unset` + and `response_model_exclude_defaults`. You probably want to use one of + those two instead of this one, as those allow returning `None` values + when it makes sense. + + Read more about it in the + [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). + """ + ), + ] = False, + include_in_schema: Annotated[ + bool, + Doc( + """ + Include this *path operation* in the generated OpenAPI schema. + + This affects the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). + """ + ), + ] = True, + response_class: Annotated[ + Type[Response], + Doc( + """ + Response class to be used for this *path operation*. + + This will not be used if you return a response directly. + + Read more about it in the + [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). + """ + ), + ] = Default(JSONResponse), + name: Annotated[ + Optional[str], + Doc( + """ + Name for this *path operation*. Only used internally. + """ + ), + ] = None, + callbacks: Annotated[ + Optional[List[BaseRoute]], + Doc( + """ + List of *path operations* that will be used as OpenAPI callbacks. + + This is only for OpenAPI documentation, the callbacks won't be used + directly. + + It will be added to the generated OpenAPI (e.g. visible at `/docs`). + + Read more about it in the + [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). + """ + ), + ] = None, + openapi_extra: Annotated[ + Optional[Dict[str, Any]], + Doc( + """ + Extra metadata to be included in the OpenAPI schema for this *path + operation*. + + Read more about it in the + [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). + """ + ), + ] = None, + generate_unique_id_function: Annotated[ + Callable[[APIRoute], str], + Doc( + """ + Customize the function used to generate unique IDs for the *path + operations* shown in the generated OpenAPI. + + This is particularly useful when automatically generating clients or + SDKs for your API. + + Read more about it in the + [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). + """ + ), + ] = Default(generate_unique_id), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add a *path operation* using an HTTP TRACE operation. + + ## Example + + ```python + from fastapi import APIRouter, FastAPI + from pydantic import BaseModel + + class Item(BaseModel): + name: str + description: str | None = None + + app = FastAPI() + router = APIRouter() + + @router.trace("/items/{item_id}") + def trace_item(item_id: str): + return None + + app.include_router(router) + ``` + """ + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["TRACE"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + @deprecated( + """ + on_event is deprecated, use lifespan event handlers instead. + + Read more about it in the + [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/). + """ + ) + def on_event( + self, + event_type: Annotated[ + str, + Doc( + """ + The type of event. `startup` or `shutdown`. + """ + ), + ], + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """ + Add an event handler for the router. + + `on_event` is deprecated, use `lifespan` event handlers instead. + + Read more about it in the + [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/#alternative-events-deprecated). + """ + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_event_handler(event_type, func) + return func + + return decorator diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__init__.py b/venv/lib/python3.12/site-packages/fastapi/security/__init__.py new file mode 100644 index 0000000..3aa6bf2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/security/__init__.py @@ -0,0 +1,15 @@ +from .api_key import APIKeyCookie as APIKeyCookie +from .api_key import APIKeyHeader as APIKeyHeader +from .api_key import APIKeyQuery as APIKeyQuery +from .http import HTTPAuthorizationCredentials as HTTPAuthorizationCredentials +from .http import HTTPBasic as HTTPBasic +from .http import HTTPBasicCredentials as HTTPBasicCredentials +from .http import HTTPBearer as HTTPBearer +from .http import HTTPDigest as HTTPDigest +from .oauth2 import OAuth2 as OAuth2 +from .oauth2 import OAuth2AuthorizationCodeBearer as OAuth2AuthorizationCodeBearer +from .oauth2 import OAuth2PasswordBearer as OAuth2PasswordBearer +from .oauth2 import OAuth2PasswordRequestForm as OAuth2PasswordRequestForm +from .oauth2 import OAuth2PasswordRequestFormStrict as OAuth2PasswordRequestFormStrict +from .oauth2 import SecurityScopes as SecurityScopes +from .open_id_connect_url import OpenIdConnect as OpenIdConnect diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a23f42d Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/api_key.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/api_key.cpython-312.pyc new file mode 100644 index 0000000..059a914 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/api_key.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..362cee7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/http.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/http.cpython-312.pyc new file mode 100644 index 0000000..84e5c9f Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/http.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/oauth2.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/oauth2.cpython-312.pyc new file mode 100644 index 0000000..2eedd92 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/oauth2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc new file mode 100644 index 0000000..9fa5d6f Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..79e8ba3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/api_key.py b/venv/lib/python3.12/site-packages/fastapi/security/api_key.py new file mode 100644 index 0000000..b1a6b4f --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/security/api_key.py @@ -0,0 +1,301 @@ +from typing import Optional + +from fastapi.openapi.models import APIKey, APIKeyIn +from fastapi.security.base import SecurityBase +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.status import HTTP_403_FORBIDDEN +from typing_extensions import Annotated, Doc # type: ignore [attr-defined] + + +class APIKeyBase(SecurityBase): + pass + + +class APIKeyQuery(APIKeyBase): + """ + API key authentication using a query parameter. + + This defines the name of the query parameter that should be provided in the request + with the API key and integrates that into the OpenAPI documentation. It extracts + the key value sent in the query parameter automatically and provides it as the + dependency result. But it doesn't define how to send that API key to the client. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be a string containing the key value. + + ## Example + + ```python + from fastapi import Depends, FastAPI + from fastapi.security import APIKeyQuery + + app = FastAPI() + + query_scheme = APIKeyQuery(name="api_key") + + + @app.get("/items/") + async def read_items(api_key: str = Depends(query_scheme)): + return {"api_key": api_key} + ``` + """ + + def __init__( + self, + *, + name: Annotated[ + str, + Doc("Query parameter name."), + ], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the query parameter is not provided, `APIKeyQuery` will + automatically cancel the request and sebd the client an error. + + If `auto_error` is set to `False`, when the query parameter is not + available, instead of erroring out, the dependency result will be + `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in a query + parameter or in an HTTP Bearer token). + """ + ), + ] = True, + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.query}, # type: ignore[arg-type] + name=name, + description=description, + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key = request.query_params.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key + + +class APIKeyHeader(APIKeyBase): + """ + API key authentication using a header. + + This defines the name of the header that should be provided in the request with + the API key and integrates that into the OpenAPI documentation. It extracts + the key value sent in the header automatically and provides it as the dependency + result. But it doesn't define how to send that key to the client. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be a string containing the key value. + + ## Example + + ```python + from fastapi import Depends, FastAPI + from fastapi.security import APIKeyHeader + + app = FastAPI() + + header_scheme = APIKeyHeader(name="x-key") + + + @app.get("/items/") + async def read_items(key: str = Depends(header_scheme)): + return {"key": key} + ``` + """ + + def __init__( + self, + *, + name: Annotated[str, Doc("Header name.")], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the header is not provided, `APIKeyHeader` will + automatically cancel the request and send the client an error. + + If `auto_error` is set to `False`, when the header is not available, + instead of erroring out, the dependency result will be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in a header or + in an HTTP Bearer token). + """ + ), + ] = True, + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.header}, # type: ignore[arg-type] + name=name, + description=description, + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key = request.headers.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key + + +class APIKeyCookie(APIKeyBase): + """ + API key authentication using a cookie. + + This defines the name of the cookie that should be provided in the request with + the API key and integrates that into the OpenAPI documentation. It extracts + the key value sent in the cookie automatically and provides it as the dependency + result. But it doesn't define how to set that cookie. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be a string containing the key value. + + ## Example + + ```python + from fastapi import Depends, FastAPI + from fastapi.security import APIKeyCookie + + app = FastAPI() + + cookie_scheme = APIKeyCookie(name="session") + + + @app.get("/items/") + async def read_items(session: str = Depends(cookie_scheme)): + return {"session": session} + ``` + """ + + def __init__( + self, + *, + name: Annotated[str, Doc("Cookie name.")], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the cookie is not provided, `APIKeyCookie` will + automatically cancel the request and send the client an error. + + If `auto_error` is set to `False`, when the cookie is not available, + instead of erroring out, the dependency result will be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in a cookie or + in an HTTP Bearer token). + """ + ), + ] = True, + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.cookie}, # type: ignore[arg-type] + name=name, + description=description, + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key = request.cookies.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key diff --git a/venv/lib/python3.12/site-packages/fastapi/security/base.py b/venv/lib/python3.12/site-packages/fastapi/security/base.py new file mode 100644 index 0000000..c43555d --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/security/base.py @@ -0,0 +1,6 @@ +from fastapi.openapi.models import SecurityBase as SecurityBaseModel + + +class SecurityBase: + model: SecurityBaseModel + scheme_name: str diff --git a/venv/lib/python3.12/site-packages/fastapi/security/http.py b/venv/lib/python3.12/site-packages/fastapi/security/http.py new file mode 100644 index 0000000..738455d --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/security/http.py @@ -0,0 +1,420 @@ +import binascii +from base64 import b64decode +from typing import Optional + +from fastapi.exceptions import HTTPException +from fastapi.openapi.models import HTTPBase as HTTPBaseModel +from fastapi.openapi.models import HTTPBearer as HTTPBearerModel +from fastapi.security.base import SecurityBase +from fastapi.security.utils import get_authorization_scheme_param +from pydantic import BaseModel +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN +from typing_extensions import Annotated, Doc # type: ignore [attr-defined] + + +class HTTPBasicCredentials(BaseModel): + """ + The HTTP Basic credendials given as the result of using `HTTPBasic` in a + dependency. + + Read more about it in the + [FastAPI docs for HTTP Basic Auth](https://fastapi.tiangolo.com/advanced/security/http-basic-auth/). + """ + + username: Annotated[str, Doc("The HTTP Basic username.")] + password: Annotated[str, Doc("The HTTP Basic password.")] + + +class HTTPAuthorizationCredentials(BaseModel): + """ + The HTTP authorization credentials in the result of using `HTTPBearer` or + `HTTPDigest` in a dependency. + + The HTTP authorization header value is split by the first space. + + The first part is the `scheme`, the second part is the `credentials`. + + For example, in an HTTP Bearer token scheme, the client will send a header + like: + + ``` + Authorization: Bearer deadbeef12346 + ``` + + In this case: + + * `scheme` will have the value `"Bearer"` + * `credentials` will have the value `"deadbeef12346"` + """ + + scheme: Annotated[ + str, + Doc( + """ + The HTTP authorization scheme extracted from the header value. + """ + ), + ] + credentials: Annotated[ + str, + Doc( + """ + The HTTP authorization credentials extracted from the header value. + """ + ), + ] + + +class HTTPBase(SecurityBase): + def __init__( + self, + *, + scheme: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBaseModel(scheme=scheme, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + + +class HTTPBasic(HTTPBase): + """ + HTTP Basic authentication. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be an `HTTPBasicCredentials` object containing the + `username` and the `password`. + + Read more about it in the + [FastAPI docs for HTTP Basic Auth](https://fastapi.tiangolo.com/advanced/security/http-basic-auth/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import HTTPBasic, HTTPBasicCredentials + + app = FastAPI() + + security = HTTPBasic() + + + @app.get("/users/me") + def read_current_user(credentials: Annotated[HTTPBasicCredentials, Depends(security)]): + return {"username": credentials.username, "password": credentials.password} + ``` + """ + + def __init__( + self, + *, + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + realm: Annotated[ + Optional[str], + Doc( + """ + HTTP Basic authentication realm. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the HTTP Basic authentication is not provided (a + header), `HTTPBasic` will automatically cancel the request and send the + client an error. + + If `auto_error` is set to `False`, when the HTTP Basic authentication + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in HTTP Basic + authentication or in an HTTP Bearer token). + """ + ), + ] = True, + ): + self.model = HTTPBaseModel(scheme="basic", description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.realm = realm + self.auto_error = auto_error + + async def __call__( # type: ignore + self, request: Request + ) -> Optional[HTTPBasicCredentials]: + authorization = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if self.realm: + unauthorized_headers = {"WWW-Authenticate": f'Basic realm="{self.realm}"'} + else: + unauthorized_headers = {"WWW-Authenticate": "Basic"} + if not authorization or scheme.lower() != "basic": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers=unauthorized_headers, + ) + else: + return None + invalid_user_credentials_exc = HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Invalid authentication credentials", + headers=unauthorized_headers, + ) + try: + data = b64decode(param).decode("ascii") + except (ValueError, UnicodeDecodeError, binascii.Error): + raise invalid_user_credentials_exc # noqa: B904 + username, separator, password = data.partition(":") + if not separator: + raise invalid_user_credentials_exc + return HTTPBasicCredentials(username=username, password=password) + + +class HTTPBearer(HTTPBase): + """ + HTTP Bearer token authentication. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be an `HTTPAuthorizationCredentials` object containing + the `scheme` and the `credentials`. + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer + + app = FastAPI() + + security = HTTPBearer() + + + @app.get("/users/me") + def read_current_user( + credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)] + ): + return {"scheme": credentials.scheme, "credentials": credentials.credentials} + ``` + """ + + def __init__( + self, + *, + bearerFormat: Annotated[Optional[str], Doc("Bearer token format.")] = None, + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the HTTP Bearer token not provided (in an + `Authorization` header), `HTTPBearer` will automatically cancel the + request and send the client an error. + + If `auto_error` is set to `False`, when the HTTP Bearer token + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in an HTTP + Bearer token or in a cookie). + """ + ), + ] = True, + ): + self.model = HTTPBearerModel(bearerFormat=bearerFormat, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + if scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="Invalid authentication credentials", + ) + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + + +class HTTPDigest(HTTPBase): + """ + HTTP Digest authentication. + + ## Usage + + Create an instance object and use that object as the dependency in `Depends()`. + + The dependency result will be an `HTTPAuthorizationCredentials` object containing + the `scheme` and the `credentials`. + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import HTTPAuthorizationCredentials, HTTPDigest + + app = FastAPI() + + security = HTTPDigest() + + + @app.get("/users/me") + def read_current_user( + credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)] + ): + return {"scheme": credentials.scheme, "credentials": credentials.credentials} + ``` + """ + + def __init__( + self, + *, + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if the HTTP Digest not provided, `HTTPDigest` will + automatically cancel the request and send the client an error. + + If `auto_error` is set to `False`, when the HTTP Digest is not + available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, in HTTP + Digest or in a cookie). + """ + ), + ] = True, + ): + self.model = HTTPBaseModel(scheme="digest", description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + if scheme.lower() != "digest": + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="Invalid authentication credentials", + ) + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) diff --git a/venv/lib/python3.12/site-packages/fastapi/security/oauth2.py b/venv/lib/python3.12/site-packages/fastapi/security/oauth2.py new file mode 100644 index 0000000..9281dfb --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/security/oauth2.py @@ -0,0 +1,638 @@ +from typing import Any, Dict, List, Optional, Union, cast + +from fastapi.exceptions import HTTPException +from fastapi.openapi.models import OAuth2 as OAuth2Model +from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel +from fastapi.param_functions import Form +from fastapi.security.base import SecurityBase +from fastapi.security.utils import get_authorization_scheme_param +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN + +# TODO: import from typing when deprecating Python 3.9 +from typing_extensions import Annotated, Doc # type: ignore [attr-defined] + + +class OAuth2PasswordRequestForm: + """ + This is a dependency class to collect the `username` and `password` as form data + for an OAuth2 password flow. + + The OAuth2 specification dictates that for a password flow the data should be + collected using form data (instead of JSON) and that it should have the specific + fields `username` and `password`. + + All the initialization parameters are extracted from the request. + + Read more about it in the + [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import OAuth2PasswordRequestForm + + app = FastAPI() + + + @app.post("/login") + def login(form_data: Annotated[OAuth2PasswordRequestForm, Depends()]): + data = {} + data["scopes"] = [] + for scope in form_data.scopes: + data["scopes"].append(scope) + if form_data.client_id: + data["client_id"] = form_data.client_id + if form_data.client_secret: + data["client_secret"] = form_data.client_secret + return data + ``` + + Note that for OAuth2 the scope `items:read` is a single scope in an opaque string. + You could have custom internal logic to separate it by colon caracters (`:`) or + similar, and get the two parts `items` and `read`. Many applications do that to + group and organize permisions, you could do it as well in your application, just + know that that it is application specific, it's not part of the specification. + """ + + def __init__( + self, + *, + grant_type: Annotated[ + Union[str, None], + Form(pattern="password"), + Doc( + """ + The OAuth2 spec says it is required and MUST be the fixed string + "password". Nevertheless, this dependency class is permissive and + allows not passing it. If you want to enforce it, use instead the + `OAuth2PasswordRequestFormStrict` dependency. + """ + ), + ] = None, + username: Annotated[ + str, + Form(), + Doc( + """ + `username` string. The OAuth2 spec requires the exact field name + `username`. + """ + ), + ], + password: Annotated[ + str, + Form(), + Doc( + """ + `password` string. The OAuth2 spec requires the exact field name + `password". + """ + ), + ], + scope: Annotated[ + str, + Form(), + Doc( + """ + A single string with actually several scopes separated by spaces. Each + scope is also a string. + + For example, a single string with: + + ```python + "items:read items:write users:read profile openid" + ```` + + would represent the scopes: + + * `items:read` + * `items:write` + * `users:read` + * `profile` + * `openid` + """ + ), + ] = "", + client_id: Annotated[ + Union[str, None], + Form(), + Doc( + """ + If there's a `client_id`, it can be sent as part of the form fields. + But the OAuth2 specification recommends sending the `client_id` and + `client_secret` (if any) using HTTP Basic auth. + """ + ), + ] = None, + client_secret: Annotated[ + Union[str, None], + Form(), + Doc( + """ + If there's a `client_password` (and a `client_id`), they can be sent + as part of the form fields. But the OAuth2 specification recommends + sending the `client_id` and `client_secret` (if any) using HTTP Basic + auth. + """ + ), + ] = None, + ): + self.grant_type = grant_type + self.username = username + self.password = password + self.scopes = scope.split() + self.client_id = client_id + self.client_secret = client_secret + + +class OAuth2PasswordRequestFormStrict(OAuth2PasswordRequestForm): + """ + This is a dependency class to collect the `username` and `password` as form data + for an OAuth2 password flow. + + The OAuth2 specification dictates that for a password flow the data should be + collected using form data (instead of JSON) and that it should have the specific + fields `username` and `password`. + + All the initialization parameters are extracted from the request. + + The only difference between `OAuth2PasswordRequestFormStrict` and + `OAuth2PasswordRequestForm` is that `OAuth2PasswordRequestFormStrict` requires the + client to send the form field `grant_type` with the value `"password"`, which + is required in the OAuth2 specification (it seems that for no particular reason), + while for `OAuth2PasswordRequestForm` `grant_type` is optional. + + Read more about it in the + [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). + + ## Example + + ```python + from typing import Annotated + + from fastapi import Depends, FastAPI + from fastapi.security import OAuth2PasswordRequestForm + + app = FastAPI() + + + @app.post("/login") + def login(form_data: Annotated[OAuth2PasswordRequestFormStrict, Depends()]): + data = {} + data["scopes"] = [] + for scope in form_data.scopes: + data["scopes"].append(scope) + if form_data.client_id: + data["client_id"] = form_data.client_id + if form_data.client_secret: + data["client_secret"] = form_data.client_secret + return data + ``` + + Note that for OAuth2 the scope `items:read` is a single scope in an opaque string. + You could have custom internal logic to separate it by colon caracters (`:`) or + similar, and get the two parts `items` and `read`. Many applications do that to + group and organize permisions, you could do it as well in your application, just + know that that it is application specific, it's not part of the specification. + + + grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password". + This dependency is strict about it. If you want to be permissive, use instead the + OAuth2PasswordRequestForm dependency class. + username: username string. The OAuth2 spec requires the exact field name "username". + password: password string. The OAuth2 spec requires the exact field name "password". + scope: Optional string. Several scopes (each one a string) separated by spaces. E.g. + "items:read items:write users:read profile openid" + client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + """ + + def __init__( + self, + grant_type: Annotated[ + str, + Form(pattern="password"), + Doc( + """ + The OAuth2 spec says it is required and MUST be the fixed string + "password". This dependency is strict about it. If you want to be + permissive, use instead the `OAuth2PasswordRequestForm` dependency + class. + """ + ), + ], + username: Annotated[ + str, + Form(), + Doc( + """ + `username` string. The OAuth2 spec requires the exact field name + `username`. + """ + ), + ], + password: Annotated[ + str, + Form(), + Doc( + """ + `password` string. The OAuth2 spec requires the exact field name + `password". + """ + ), + ], + scope: Annotated[ + str, + Form(), + Doc( + """ + A single string with actually several scopes separated by spaces. Each + scope is also a string. + + For example, a single string with: + + ```python + "items:read items:write users:read profile openid" + ```` + + would represent the scopes: + + * `items:read` + * `items:write` + * `users:read` + * `profile` + * `openid` + """ + ), + ] = "", + client_id: Annotated[ + Union[str, None], + Form(), + Doc( + """ + If there's a `client_id`, it can be sent as part of the form fields. + But the OAuth2 specification recommends sending the `client_id` and + `client_secret` (if any) using HTTP Basic auth. + """ + ), + ] = None, + client_secret: Annotated[ + Union[str, None], + Form(), + Doc( + """ + If there's a `client_password` (and a `client_id`), they can be sent + as part of the form fields. But the OAuth2 specification recommends + sending the `client_id` and `client_secret` (if any) using HTTP Basic + auth. + """ + ), + ] = None, + ): + super().__init__( + grant_type=grant_type, + username=username, + password=password, + scope=scope, + client_id=client_id, + client_secret=client_secret, + ) + + +class OAuth2(SecurityBase): + """ + This is the base class for OAuth2 authentication, an instance of it would be used + as a dependency. All other OAuth2 classes inherit from it and customize it for + each OAuth2 flow. + + You normally would not create a new class inheriting from it but use one of the + existing subclasses, and maybe compose them if you want to support multiple flows. + + Read more about it in the + [FastAPI docs for Security](https://fastapi.tiangolo.com/tutorial/security/). + """ + + def __init__( + self, + *, + flows: Annotated[ + Union[OAuthFlowsModel, Dict[str, Dict[str, Any]]], + Doc( + """ + The dictionary of OAuth2 flows. + """ + ), + ] = OAuthFlowsModel(), + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if no HTTP Auhtorization header is provided, required for + OAuth2 authentication, it will automatically cancel the request and + send the client an error. + + If `auto_error` is set to `False`, when the HTTP Authorization header + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, with OAuth2 + or in a cookie). + """ + ), + ] = True, + ): + self.model = OAuth2Model( + flows=cast(OAuthFlowsModel, flows), description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + authorization = request.headers.get("Authorization") + if not authorization: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return authorization + + +class OAuth2PasswordBearer(OAuth2): + """ + OAuth2 flow for authentication using a bearer token obtained with a password. + An instance of it would be used as a dependency. + + Read more about it in the + [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). + """ + + def __init__( + self, + tokenUrl: Annotated[ + str, + Doc( + """ + The URL to obtain the OAuth2 token. This would be the *path operation* + that has `OAuth2PasswordRequestForm` as a dependency. + """ + ), + ], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + scopes: Annotated[ + Optional[Dict[str, str]], + Doc( + """ + The OAuth2 scopes that would be required by the *path operations* that + use this dependency. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if no HTTP Auhtorization header is provided, required for + OAuth2 authentication, it will automatically cancel the request and + send the client an error. + + If `auto_error` is set to `False`, when the HTTP Authorization header + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, with OAuth2 + or in a cookie). + """ + ), + ] = True, + ): + if not scopes: + scopes = {} + flows = OAuthFlowsModel( + password=cast(Any, {"tokenUrl": tokenUrl, "scopes": scopes}) + ) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + return None + return param + + +class OAuth2AuthorizationCodeBearer(OAuth2): + """ + OAuth2 flow for authentication using a bearer token obtained with an OAuth2 code + flow. An instance of it would be used as a dependency. + """ + + def __init__( + self, + authorizationUrl: str, + tokenUrl: Annotated[ + str, + Doc( + """ + The URL to obtain the OAuth2 token. + """ + ), + ], + refreshUrl: Annotated[ + Optional[str], + Doc( + """ + The URL to refresh the token and obtain a new one. + """ + ), + ] = None, + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + scopes: Annotated[ + Optional[Dict[str, str]], + Doc( + """ + The OAuth2 scopes that would be required by the *path operations* that + use this dependency. + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if no HTTP Auhtorization header is provided, required for + OAuth2 authentication, it will automatically cancel the request and + send the client an error. + + If `auto_error` is set to `False`, when the HTTP Authorization header + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, with OAuth2 + or in a cookie). + """ + ), + ] = True, + ): + if not scopes: + scopes = {} + flows = OAuthFlowsModel( + authorizationCode=cast( + Any, + { + "authorizationUrl": authorizationUrl, + "tokenUrl": tokenUrl, + "refreshUrl": refreshUrl, + "scopes": scopes, + }, + ) + ) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + return None # pragma: nocover + return param + + +class SecurityScopes: + """ + This is a special class that you can define in a parameter in a dependency to + obtain the OAuth2 scopes required by all the dependencies in the same chain. + + This way, multiple dependencies can have different scopes, even when used in the + same *path operation*. And with this, you can access all the scopes required in + all those dependencies in a single place. + + Read more about it in the + [FastAPI docs for OAuth2 scopes](https://fastapi.tiangolo.com/advanced/security/oauth2-scopes/). + """ + + def __init__( + self, + scopes: Annotated[ + Optional[List[str]], + Doc( + """ + This will be filled by FastAPI. + """ + ), + ] = None, + ): + self.scopes: Annotated[ + List[str], + Doc( + """ + The list of all the scopes required by dependencies. + """ + ), + ] = scopes or [] + self.scope_str: Annotated[ + str, + Doc( + """ + All the scopes required by all the dependencies in a single string + separated by spaces, as defined in the OAuth2 specification. + """ + ), + ] = " ".join(self.scopes) diff --git a/venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py b/venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py new file mode 100644 index 0000000..c612b47 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py @@ -0,0 +1,84 @@ +from typing import Optional + +from fastapi.openapi.models import OpenIdConnect as OpenIdConnectModel +from fastapi.security.base import SecurityBase +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.status import HTTP_403_FORBIDDEN +from typing_extensions import Annotated, Doc # type: ignore [attr-defined] + + +class OpenIdConnect(SecurityBase): + """ + OpenID Connect authentication class. An instance of it would be used as a + dependency. + """ + + def __init__( + self, + *, + openIdConnectUrl: Annotated[ + str, + Doc( + """ + The OpenID Connect URL. + """ + ), + ], + scheme_name: Annotated[ + Optional[str], + Doc( + """ + Security scheme name. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + description: Annotated[ + Optional[str], + Doc( + """ + Security scheme description. + + It will be included in the generated OpenAPI (e.g. visible at `/docs`). + """ + ), + ] = None, + auto_error: Annotated[ + bool, + Doc( + """ + By default, if no HTTP Auhtorization header is provided, required for + OpenID Connect authentication, it will automatically cancel the request + and send the client an error. + + If `auto_error` is set to `False`, when the HTTP Authorization header + is not available, instead of erroring out, the dependency result will + be `None`. + + This is useful when you want to have optional authentication. + + It is also useful when you want to have authentication that can be + provided in one of multiple optional ways (for example, with OpenID + Connect or in a cookie). + """ + ), + ] = True, + ): + self.model = OpenIdConnectModel( + openIdConnectUrl=openIdConnectUrl, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + authorization = request.headers.get("Authorization") + if not authorization: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return authorization diff --git a/venv/lib/python3.12/site-packages/fastapi/security/utils.py b/venv/lib/python3.12/site-packages/fastapi/security/utils.py new file mode 100644 index 0000000..fa7a450 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/security/utils.py @@ -0,0 +1,10 @@ +from typing import Optional, Tuple + + +def get_authorization_scheme_param( + authorization_header_value: Optional[str], +) -> Tuple[str, str]: + if not authorization_header_value: + return "", "" + scheme, _, param = authorization_header_value.partition(" ") + return scheme, param diff --git a/venv/lib/python3.12/site-packages/fastapi/staticfiles.py b/venv/lib/python3.12/site-packages/fastapi/staticfiles.py new file mode 100644 index 0000000..299015d --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/staticfiles.py @@ -0,0 +1 @@ +from starlette.staticfiles import StaticFiles as StaticFiles # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/templating.py b/venv/lib/python3.12/site-packages/fastapi/templating.py new file mode 100644 index 0000000..0cb8684 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/templating.py @@ -0,0 +1 @@ +from starlette.templating import Jinja2Templates as Jinja2Templates # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/testclient.py b/venv/lib/python3.12/site-packages/fastapi/testclient.py new file mode 100644 index 0000000..4012406 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/testclient.py @@ -0,0 +1 @@ +from starlette.testclient import TestClient as TestClient # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/types.py b/venv/lib/python3.12/site-packages/fastapi/types.py new file mode 100644 index 0000000..3205654 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/types.py @@ -0,0 +1,10 @@ +import types +from enum import Enum +from typing import Any, Callable, Dict, Set, Type, TypeVar, Union + +from pydantic import BaseModel + +DecoratedCallable = TypeVar("DecoratedCallable", bound=Callable[..., Any]) +UnionType = getattr(types, "UnionType", Union) +ModelNameMap = Dict[Union[Type[BaseModel], Type[Enum]], str] +IncEx = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any]] diff --git a/venv/lib/python3.12/site-packages/fastapi/utils.py b/venv/lib/python3.12/site-packages/fastapi/utils.py new file mode 100644 index 0000000..f8463dd --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/utils.py @@ -0,0 +1,229 @@ +import re +import warnings +from dataclasses import is_dataclass +from typing import ( + TYPE_CHECKING, + Any, + Dict, + MutableMapping, + Optional, + Set, + Type, + Union, + cast, +) +from weakref import WeakKeyDictionary + +import fastapi +from fastapi._compat import ( + PYDANTIC_V2, + BaseConfig, + ModelField, + PydanticSchemaGenerationError, + Undefined, + UndefinedType, + Validator, + lenient_issubclass, +) +from fastapi.datastructures import DefaultPlaceholder, DefaultType +from pydantic import BaseModel, create_model +from pydantic.fields import FieldInfo +from typing_extensions import Literal + +if TYPE_CHECKING: # pragma: nocover + from .routing import APIRoute + +# Cache for `create_cloned_field` +_CLONED_TYPES_CACHE: MutableMapping[ + Type[BaseModel], Type[BaseModel] +] = WeakKeyDictionary() + + +def is_body_allowed_for_status_code(status_code: Union[int, str, None]) -> bool: + if status_code is None: + return True + # Ref: https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#patterned-fields-1 + if status_code in { + "default", + "1XX", + "2XX", + "3XX", + "4XX", + "5XX", + }: + return True + current_status_code = int(status_code) + return not (current_status_code < 200 or current_status_code in {204, 304}) + + +def get_path_param_names(path: str) -> Set[str]: + return set(re.findall("{(.*?)}", path)) + + +def create_response_field( + name: str, + type_: Type[Any], + class_validators: Optional[Dict[str, Validator]] = None, + default: Optional[Any] = Undefined, + required: Union[bool, UndefinedType] = Undefined, + model_config: Type[BaseConfig] = BaseConfig, + field_info: Optional[FieldInfo] = None, + alias: Optional[str] = None, + mode: Literal["validation", "serialization"] = "validation", +) -> ModelField: + """ + Create a new response field. Raises if type_ is invalid. + """ + class_validators = class_validators or {} + if PYDANTIC_V2: + field_info = field_info or FieldInfo( + annotation=type_, default=default, alias=alias + ) + else: + field_info = field_info or FieldInfo() + kwargs = {"name": name, "field_info": field_info} + if PYDANTIC_V2: + kwargs.update({"mode": mode}) + else: + kwargs.update( + { + "type_": type_, + "class_validators": class_validators, + "default": default, + "required": required, + "model_config": model_config, + "alias": alias, + } + ) + try: + return ModelField(**kwargs) # type: ignore[arg-type] + except (RuntimeError, PydanticSchemaGenerationError): + raise fastapi.exceptions.FastAPIError( + "Invalid args for response field! Hint: " + f"check that {type_} is a valid Pydantic field type. " + "If you are using a return type annotation that is not a valid Pydantic " + "field (e.g. Union[Response, dict, None]) you can disable generating the " + "response model from the type annotation with the path operation decorator " + "parameter response_model=None. Read more: " + "https://fastapi.tiangolo.com/tutorial/response-model/" + ) from None + + +def create_cloned_field( + field: ModelField, + *, + cloned_types: Optional[MutableMapping[Type[BaseModel], Type[BaseModel]]] = None, +) -> ModelField: + if PYDANTIC_V2: + return field + # cloned_types caches already cloned types to support recursive models and improve + # performance by avoiding unnecessary cloning + if cloned_types is None: + cloned_types = _CLONED_TYPES_CACHE + + original_type = field.type_ + if is_dataclass(original_type) and hasattr(original_type, "__pydantic_model__"): + original_type = original_type.__pydantic_model__ + use_type = original_type + if lenient_issubclass(original_type, BaseModel): + original_type = cast(Type[BaseModel], original_type) + use_type = cloned_types.get(original_type) + if use_type is None: + use_type = create_model(original_type.__name__, __base__=original_type) + cloned_types[original_type] = use_type + for f in original_type.__fields__.values(): + use_type.__fields__[f.name] = create_cloned_field( + f, cloned_types=cloned_types + ) + new_field = create_response_field(name=field.name, type_=use_type) + new_field.has_alias = field.has_alias # type: ignore[attr-defined] + new_field.alias = field.alias # type: ignore[misc] + new_field.class_validators = field.class_validators # type: ignore[attr-defined] + new_field.default = field.default # type: ignore[misc] + new_field.required = field.required # type: ignore[misc] + new_field.model_config = field.model_config # type: ignore[attr-defined] + new_field.field_info = field.field_info + new_field.allow_none = field.allow_none # type: ignore[attr-defined] + new_field.validate_always = field.validate_always # type: ignore[attr-defined] + if field.sub_fields: # type: ignore[attr-defined] + new_field.sub_fields = [ # type: ignore[attr-defined] + create_cloned_field(sub_field, cloned_types=cloned_types) + for sub_field in field.sub_fields # type: ignore[attr-defined] + ] + if field.key_field: # type: ignore[attr-defined] + new_field.key_field = create_cloned_field( # type: ignore[attr-defined] + field.key_field, # type: ignore[attr-defined] + cloned_types=cloned_types, + ) + new_field.validators = field.validators # type: ignore[attr-defined] + new_field.pre_validators = field.pre_validators # type: ignore[attr-defined] + new_field.post_validators = field.post_validators # type: ignore[attr-defined] + new_field.parse_json = field.parse_json # type: ignore[attr-defined] + new_field.shape = field.shape # type: ignore[attr-defined] + new_field.populate_validators() # type: ignore[attr-defined] + return new_field + + +def generate_operation_id_for_path( + *, name: str, path: str, method: str +) -> str: # pragma: nocover + warnings.warn( + "fastapi.utils.generate_operation_id_for_path() was deprecated, " + "it is not used internally, and will be removed soon", + DeprecationWarning, + stacklevel=2, + ) + operation_id = name + path + operation_id = re.sub(r"\W", "_", operation_id) + operation_id = operation_id + "_" + method.lower() + return operation_id + + +def generate_unique_id(route: "APIRoute") -> str: + operation_id = route.name + route.path_format + operation_id = re.sub(r"\W", "_", operation_id) + assert route.methods + operation_id = operation_id + "_" + list(route.methods)[0].lower() + return operation_id + + +def deep_dict_update(main_dict: Dict[Any, Any], update_dict: Dict[Any, Any]) -> None: + for key, value in update_dict.items(): + if ( + key in main_dict + and isinstance(main_dict[key], dict) + and isinstance(value, dict) + ): + deep_dict_update(main_dict[key], value) + elif ( + key in main_dict + and isinstance(main_dict[key], list) + and isinstance(update_dict[key], list) + ): + main_dict[key] = main_dict[key] + update_dict[key] + else: + main_dict[key] = value + + +def get_value_or_default( + first_item: Union[DefaultPlaceholder, DefaultType], + *extra_items: Union[DefaultPlaceholder, DefaultType], +) -> Union[DefaultPlaceholder, DefaultType]: + """ + Pass items or `DefaultPlaceholder`s by descending priority. + + The first one to _not_ be a `DefaultPlaceholder` will be returned. + + Otherwise, the first item (a `DefaultPlaceholder`) will be returned. + """ + items = (first_item,) + extra_items + for item in items: + if not isinstance(item, DefaultPlaceholder): + return item + return first_item + + +def match_pydantic_error_url(error_type: str) -> Any: + from dirty_equals import IsStr + + return IsStr(regex=rf"^https://errors\.pydantic\.dev/.*/v/{error_type}") diff --git a/venv/lib/python3.12/site-packages/fastapi/websockets.py b/venv/lib/python3.12/site-packages/fastapi/websockets.py new file mode 100644 index 0000000..55a4ac4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/fastapi/websockets.py @@ -0,0 +1,3 @@ +from starlette.websockets import WebSocket as WebSocket # noqa +from starlette.websockets import WebSocketDisconnect as WebSocketDisconnect # noqa +from starlette.websockets import WebSocketState as WebSocketState # noqa diff --git a/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/METADATA new file mode 100644 index 0000000..30d6653 --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/METADATA @@ -0,0 +1,672 @@ +Metadata-Version: 2.4 +Name: frozenlist +Version: 1.8.0 +Summary: A list-like structure which implements collections.abc.MutableSequence +Home-page: https://github.com/aio-libs/frozenlist +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache-2.0 +Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org +Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org +Project-URL: CI: Github Actions, https://github.com/aio-libs/frozenlist/actions +Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/frozenlist +Project-URL: Docs: Changelog, https://github.com/aio-libs/frozenlist/blob/master/CHANGES.rst#changelog +Project-URL: Docs: RTD, https://frozenlist.aio-libs.org +Project-URL: GitHub: issues, https://github.com/aio-libs/frozenlist/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/frozenlist +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Dynamic: license-file + +frozenlist +========== + +.. image:: https://github.com/aio-libs/frozenlist/workflows/CI/badge.svg + :target: https://github.com/aio-libs/frozenlist/actions + :alt: GitHub status for master branch + +.. image:: https://codecov.io/gh/aio-libs/frozenlist/branch/master/graph/badge.svg?flag=pytest + :target: https://codecov.io/gh/aio-libs/frozenlist?flags[]=pytest + :alt: codecov.io status for master branch + +.. image:: https://img.shields.io/pypi/v/frozenlist.svg?logo=Python&logoColor=white + :target: https://pypi.org/project/frozenlist + :alt: frozenlist @ PyPI + +.. image:: https://readthedocs.org/projects/frozenlist/badge/?version=latest + :target: https://frozenlist.aio-libs.org + :alt: Read The Docs build status badge + +.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs:matrix.org + :alt: Matrix Room — #aio-libs:matrix.org + +.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs-space:matrix.org + :alt: Matrix Space — #aio-libs-space:matrix.org + +Introduction +------------ + +``frozenlist.FrozenList`` is a list-like structure which implements +``collections.abc.MutableSequence``. The list is *mutable* until ``FrozenList.freeze`` +is called, after which list modifications raise ``RuntimeError``: + + +>>> from frozenlist import FrozenList +>>> fl = FrozenList([17, 42]) +>>> fl.append('spam') +>>> fl.append('Vikings') +>>> fl + +>>> fl.freeze() +>>> fl + +>>> fl.frozen +True +>>> fl.append("Monty") +Traceback (most recent call last): + File "", line 1, in + File "frozenlist/_frozenlist.pyx", line 97, in frozenlist._frozenlist.FrozenList.append + self._check_frozen() + File "frozenlist/_frozenlist.pyx", line 19, in frozenlist._frozenlist.FrozenList._check_frozen + raise RuntimeError("Cannot modify frozen list.") +RuntimeError: Cannot modify frozen list. + + +FrozenList is also hashable, but only when frozen. Otherwise it also throws a RuntimeError: + + +>>> fl = FrozenList([17, 42, 'spam']) +>>> hash(fl) +Traceback (most recent call last): + File "", line 1, in + File "frozenlist/_frozenlist.pyx", line 111, in frozenlist._frozenlist.FrozenList.__hash__ + raise RuntimeError("Cannot hash unfrozen list.") +RuntimeError: Cannot hash unfrozen list. +>>> fl.freeze() +>>> hash(fl) +3713081631934410656 +>>> dictionary = {fl: 'Vikings'} # frozen fl can be a dict key +>>> dictionary +{: 'Vikings'} + + +Installation +------------ + +:: + + $ pip install frozenlist + + +Documentation +------------- + +https://frozenlist.aio-libs.org + +Communication channels +---------------------- + +We have a *Matrix Space* `#aio-libs-space:matrix.org +`_ which is +also accessible via Gitter. + +License +------- + +``frozenlist`` is offered under the Apache 2 license. + +Source code +----------- + +The project is hosted on GitHub_ + +Please file an issue in the `bug tracker +`_ if you have found a bug +or have some suggestions to improve the library. + +.. _GitHub: https://github.com/aio-libs/frozenlist + +========= +Changelog +========= + +.. + You should *NOT* be adding new change log entries to this file, this + file is managed by towncrier. You *may* edit previous change logs to + fix problems like typo corrections or such. + To add a new change log entry, please see + https://pip.pypa.io/en/latest/development/contributing/#news-entries + we named the news folder "changes". + + WARNING: Don't drop the next directive! + +.. towncrier release notes start + +v1.8.0 +====== + +*(2025-10-05)* + + +Contributor-facing changes +-------------------------- + +- The ``reusable-cibuildwheel.yml`` workflow has been refactored to + be more generic and ``ci-cd.yml`` now holds all the configuration + toggles -- by `@webknjaz `__. + + *Related issues and pull requests on GitHub:* + `#668 `__. + +- When building wheels, the source distribution is now passed directly + to the ``cibuildwheel`` invocation -- by `@webknjaz `__. + + *Related issues and pull requests on GitHub:* + `#669 `__. + +- Builds and tests have been added to + ``ci-cd.yml`` for arm64 Windows wheels -- by `@finnagin `__. + + *Related issues and pull requests on GitHub:* + `#677 `__. + +- Started building wheels for CPython 3.14 -- by `@kumaraditya303 `__. + + *Related issues and pull requests on GitHub:* + `#681 `__, `#682 `__. + +- Removed ``--config-settings=pure-python=false`` from ``requirements/dev.txt``. + Developers on CPython still get accelerated builds by default. To explicitly build + a pure Python wheel, use ``pip install -e . --config-settings=pure-python=true`` + -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#687 `__. + + +---- + + +v1.7.0 +====== + +*(2025-06-09)* + + +Features +-------- + +- Added deepcopy support to FrozenList -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#659 `__. + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Fixed an issue where ``frozenlist`` binary wheels would be built with debugging symbols and line tracing enabled, which significantly impacted performance. Line tracing is now disabled by default and can only be enabled explicitly -- by `@bdraco `__. + + This change ensures that production builds are optimized for performance. Developers who need line tracing for debugging purposes can still enable it by: + + 1. Setting the ``FROZENLIST_CYTHON_TRACING`` environment variable + 2. Using the ``--config-setting=with-cython-tracing=true`` option with pip + + *Related issues and pull requests on GitHub:* + `#660 `__. + +- Enabled ``PIP_CONSTRAINT`` environment variable in the build configuration to ensure the pinned Cython version from ``requirements/cython.txt`` is used during wheel builds. + + *Related issues and pull requests on GitHub:* + `#661 `__. + + +---- + + +v1.6.2 +====== + +*(2025-06-03)* + + +No significant changes. + + +---- + + +v1.6.1 +====== + +*(2025-06-02)* + + +Bug fixes +--------- + +- Correctly use ``cimport`` for including ``PyBool_FromLong`` -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#653 `__. + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Exclude ``_frozenlist.cpp`` from bdists/wheels -- by `@musicinmybrain `__. + + *Related issues and pull requests on GitHub:* + `#649 `__. + +- Updated to use Cython 3.1 universally across the build path -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#654 `__. + + +---- + + +v1.6.0 +====== + +*(2025-04-17)* + + +Bug fixes +--------- + +- Stopped implicitly allowing the use of Cython pre-release versions when + building the distribution package -- by `@ajsanchezsanz `__ and + `@markgreene74 `__. + + *Related commits on GitHub:* + `41591f2 `__. + + +Features +-------- + +- Implemented support for the free-threaded build of CPython 3.13 -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#618 `__. + +- Started building armv7l wheels -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#642 `__. + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Stopped implicitly allowing the use of Cython pre-release versions when + building the distribution package -- by `@ajsanchezsanz `__ and + `@markgreene74 `__. + + *Related commits on GitHub:* + `41591f2 `__. + +- Started building wheels for the free-threaded build of CPython 3.13 -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#618 `__. + +- The packaging metadata switched to including an SPDX license identifier introduced in `PEP 639 `__ -- by `@cdce8p `__. + + *Related issues and pull requests on GitHub:* + `#639 `__. + + +Contributor-facing changes +-------------------------- + +- GitHub Actions CI/CD is now configured to manage caching pip-ecosystem + dependencies using `re-actors/cache-python-deps`_ -- an action by + `@webknjaz `__ that takes into account ABI stability and the exact + version of Python runtime. + + .. _`re-actors/cache-python-deps`: + https://github.com/marketplace/actions/cache-python-deps + + *Related issues and pull requests on GitHub:* + `#633 `__. + +- Organized dependencies into test and lint dependencies so that no + unnecessary ones are installed during CI runs -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#636 `__. + + +---- + + +1.5.0 (2024-10-22) +================== + +Bug fixes +--------- + +- An incorrect signature of the ``__class_getitem__`` class method + has been fixed, adding a missing ``class_item`` argument under + Python 3.8 and older. + + This change also improves the code coverage of this method that + was previously missing -- by `@webknjaz `__. + + + *Related issues and pull requests on GitHub:* + `#567 `__, `#571 `__. + + +Improved documentation +---------------------- + +- Rendered issue, PR, and commit links now lead to + ``frozenlist``'s repo instead of ``yarl``'s repo. + + + *Related issues and pull requests on GitHub:* + `#573 `__. + +- On the ``Contributing docs`` page, + a link to the ``Towncrier philosophy`` has been fixed. + + + *Related issues and pull requests on GitHub:* + `#574 `__. + + +Packaging updates and notes for downstreams +------------------------------------------- + +- A name of a temporary building directory now reflects + that it's related to ``frozenlist``, not ``yarl``. + + + *Related issues and pull requests on GitHub:* + `#573 `__. + +- Declared Python 3.13 supported officially in the distribution package metadata. + + + *Related issues and pull requests on GitHub:* + `#595 `__. + + +---- + + +1.4.1 (2023-12-15) +================== + +Packaging updates and notes for downstreams +------------------------------------------- + +- Declared Python 3.12 and PyPy 3.8-3.10 supported officially + in the distribution package metadata. + + + *Related issues and pull requests on GitHub:* + `#553 `__. + +- Replaced the packaging is replaced from an old-fashioned ``setup.py`` to an + in-tree `PEP 517 `__ build backend -- by `@webknjaz `__. + + Whenever the end-users or downstream packagers need to build ``frozenlist`` + from source (a Git checkout or an sdist), they may pass a ``config_settings`` + flag ``pure-python``. If this flag is not set, a C-extension will be built + and included into the distribution. + + Here is how this can be done with ``pip``: + + .. code-block:: console + + $ python3 -m pip install . --config-settings=pure-python= + + This will also work with ``-e | --editable``. + + The same can be achieved via ``pypa/build``: + + .. code-block:: console + + $ python3 -m build --config-setting=pure-python= + + Adding ``-w | --wheel`` can force ``pypa/build`` produce a wheel from source + directly, as opposed to building an ``sdist`` and then building from it. + + + *Related issues and pull requests on GitHub:* + `#560 `__. + + +Contributor-facing changes +-------------------------- + +- It is now possible to request line tracing in Cython builds using the + ``with-cython-tracing`` `PEP 517 `__ config setting + -- `@webknjaz `__. + + This can be used in CI and development environment to measure coverage + on Cython modules, but is not normally useful to the end-users or + downstream packagers. + + Here's a usage example: + + .. code-block:: console + + $ python3 -Im pip install . --config-settings=with-cython-tracing=true + + For editable installs, this setting is on by default. Otherwise, it's + off unless requested explicitly. + + The following produces C-files required for the Cython coverage + plugin to map the measurements back to the PYX-files: + + .. code-block:: console + + $ python -Im pip install -e . + + Alternatively, the ``FROZENLIST_CYTHON_TRACING=1`` environment variable + can be set to do the same as the `PEP 517 `__ config setting. + + + *Related issues and pull requests on GitHub:* + `#560 `__. + +- Coverage collection has been implemented for the Cython modules + -- by `@webknjaz `__. + + It will also be reported to Codecov from any non-release CI jobs. + + + *Related issues and pull requests on GitHub:* + `#561 `__. + +- A step-by-step ``Release Guide`` guide has + been added, describing how to release *frozenlist* -- by `@webknjaz `__. + + This is primarily targeting the maintainers. + + + *Related issues and pull requests on GitHub:* + `#563 `__. + +- Detailed ``Contributing Guidelines`` on + authoring the changelog fragments have been published in the + documentation -- by `@webknjaz `__. + + + *Related issues and pull requests on GitHub:* + `#564 `__. + + +---- + + +1.4.0 (2023-07-12) +================== + +The published source distribution package became buildable +under Python 3.12. + + +---- + + +Bugfixes +-------- + +- Removed an unused ``typing.Tuple`` import + `#411 `_ + + +Deprecations and Removals +------------------------- + +- Dropped Python 3.7 support. + `#413 `_ + + +Misc +---- + +- `#410 `_, `#433 `_ + + +---- + + +1.3.3 (2022-11-08) +================== + +- Fixed CI runs when creating a new release, where new towncrier versions + fail when the current version section is already present. + + +---- + + +1.3.2 (2022-11-08) +================== + +Misc +---- + +- Updated the CI runs to better check for test results and to avoid deprecated syntax. `#327 `_ + + +---- + + +1.3.1 (2022-08-02) +================== + +The published source distribution package became buildable +under Python 3.11. + + +---- + + +1.3.0 (2022-01-18) +================== + +Bugfixes +-------- + +- Do not install C sources with binary distributions. + `#250 `_ + + +Deprecations and Removals +------------------------- + +- Dropped Python 3.6 support + `#274 `_ + + +---- + + +1.2.0 (2021-10-16) +================== + +Features +-------- + +- ``FrozenList`` now supports being used as a generic type as per PEP 585, e.g. ``frozen_int_list: FrozenList[int]`` (requires Python 3.9 or newer). + `#172 `_ +- Added support for Python 3.10. + `#227 `_ +- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes. + `#227 `_ +- Started shipping platform-specific arm64 wheels for Apple Silicon. + `#227 `_ + + +---- + + +1.1.1 (2020-11-14) +================== + +Bugfixes +-------- + +- Provide x86 Windows wheels. + `#169 `_ + + +---- + + +1.1.0 (2020-10-13) +================== + +Features +-------- + +- Add support for hashing of a frozen list. + `#136 `_ + +- Support Python 3.8 and 3.9. + +- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on + Linux as well as ``x86_64``. + + +---- + + +1.0.0 (2019-11-09) +================== + +Deprecations and Removals +------------------------- + +- Dropped support for Python 3.5; only 3.6, 3.7 and 3.8 are supported going forward. + `#24 `_ diff --git a/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/RECORD new file mode 100644 index 0000000..4ef1bed --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/RECORD @@ -0,0 +1,12 @@ +frozenlist-1.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +frozenlist-1.8.0.dist-info/METADATA,sha256=lGwi3J9-LHby1pjwCkMG2-Sagg4rPxoOIv5ShsvsNfE,20333 +frozenlist-1.8.0.dist-info/RECORD,, +frozenlist-1.8.0.dist-info/WHEEL,sha256=mX4U4odf6w47aVjwZUmTYd1MF9BbrhVLKlaWSvZwHEk,186 +frozenlist-1.8.0.dist-info/licenses/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 +frozenlist-1.8.0.dist-info/top_level.txt,sha256=jivtxsPXA3nK3WBWW2LW5Mtu_GHt8UZA13NeCs2cKuA,11 +frozenlist/__init__.py,sha256=xAIE2u9ncAbjATGIPfno_OJfe8AQ-1h7z_uc73dYsEA,2108 +frozenlist/__init__.pyi,sha256=vMEoES1xGegPtVXoCi9XydEeHsyuIq-KdeXwP5PdsaA,1470 +frozenlist/__pycache__/__init__.cpython-312.pyc,, +frozenlist/_frozenlist.cpython-312-x86_64-linux-gnu.so,sha256=UzFjN2uN24Tz86SdErp07aJh4LOp8A0w9EhxkDP4i3E,786872 +frozenlist/_frozenlist.pyx,sha256=t-aGjuEiVt_MZPBJ0RnraavVmPBK6arz3i48ZvXuYsU,3708 +frozenlist/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 diff --git a/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/WHEEL new file mode 100644 index 0000000..9921a02 --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_5_x86_64 +Tag: cp312-cp312-manylinux1_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..7082a2d --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2013-2019 Nikolay Kim and Andrew Svetlov + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/top_level.txt new file mode 100644 index 0000000..52f13fc --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist-1.8.0.dist-info/top_level.txt @@ -0,0 +1 @@ +frozenlist diff --git a/venv/lib/python3.12/site-packages/frozenlist/__init__.py b/venv/lib/python3.12/site-packages/frozenlist/__init__.py new file mode 100644 index 0000000..41c8595 --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist/__init__.py @@ -0,0 +1,86 @@ +import os +import types +from collections.abc import MutableSequence +from functools import total_ordering + +__version__ = "1.8.0" + +__all__ = ("FrozenList", "PyFrozenList") # type: Tuple[str, ...] + + +NO_EXTENSIONS = bool(os.environ.get("FROZENLIST_NO_EXTENSIONS")) # type: bool + + +@total_ordering +class FrozenList(MutableSequence): + __slots__ = ("_frozen", "_items") + __class_getitem__ = classmethod(types.GenericAlias) + + def __init__(self, items=None): + self._frozen = False + if items is not None: + items = list(items) + else: + items = [] + self._items = items + + @property + def frozen(self): + return self._frozen + + def freeze(self): + self._frozen = True + + def __getitem__(self, index): + return self._items[index] + + def __setitem__(self, index, value): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + self._items[index] = value + + def __delitem__(self, index): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + del self._items[index] + + def __len__(self): + return self._items.__len__() + + def __iter__(self): + return self._items.__iter__() + + def __reversed__(self): + return self._items.__reversed__() + + def __eq__(self, other): + return list(self) == other + + def __le__(self, other): + return list(self) <= other + + def insert(self, pos, item): + if self._frozen: + raise RuntimeError("Cannot modify frozen list.") + self._items.insert(pos, item) + + def __repr__(self): + return f"" + + def __hash__(self): + if self._frozen: + return hash(tuple(self)) + else: + raise RuntimeError("Cannot hash unfrozen list.") + + +PyFrozenList = FrozenList + + +if not NO_EXTENSIONS: + try: + from ._frozenlist import FrozenList as CFrozenList # type: ignore + except ImportError: # pragma: no cover + pass + else: + FrozenList = CFrozenList # type: ignore diff --git a/venv/lib/python3.12/site-packages/frozenlist/__init__.pyi b/venv/lib/python3.12/site-packages/frozenlist/__init__.pyi new file mode 100644 index 0000000..ae803ef --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist/__init__.pyi @@ -0,0 +1,47 @@ +from typing import ( + Generic, + Iterable, + Iterator, + List, + MutableSequence, + Optional, + TypeVar, + Union, + overload, +) + +_T = TypeVar("_T") +_Arg = Union[List[_T], Iterable[_T]] + +class FrozenList(MutableSequence[_T], Generic[_T]): + def __init__(self, items: Optional[_Arg[_T]] = None) -> None: ... + @property + def frozen(self) -> bool: ... + def freeze(self) -> None: ... + @overload + def __getitem__(self, i: int) -> _T: ... + @overload + def __getitem__(self, s: slice) -> FrozenList[_T]: ... + @overload + def __setitem__(self, i: int, o: _T) -> None: ... + @overload + def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... + @overload + def __delitem__(self, i: int) -> None: ... + @overload + def __delitem__(self, i: slice) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + def __reversed__(self) -> Iterator[_T]: ... + def __eq__(self, other: object) -> bool: ... + def __le__(self, other: FrozenList[_T]) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __lt__(self, other: FrozenList[_T]) -> bool: ... + def __ge__(self, other: FrozenList[_T]) -> bool: ... + def __gt__(self, other: FrozenList[_T]) -> bool: ... + def insert(self, pos: int, item: _T) -> None: ... + def __repr__(self) -> str: ... + def __hash__(self) -> int: ... + +# types for C accelerators are the same +CFrozenList = PyFrozenList = FrozenList diff --git a/venv/lib/python3.12/site-packages/frozenlist/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/frozenlist/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d2cdf21 Binary files /dev/null and b/venv/lib/python3.12/site-packages/frozenlist/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/frozenlist/_frozenlist.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/frozenlist/_frozenlist.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..915d941 Binary files /dev/null and b/venv/lib/python3.12/site-packages/frozenlist/_frozenlist.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/frozenlist/_frozenlist.pyx b/venv/lib/python3.12/site-packages/frozenlist/_frozenlist.pyx new file mode 100644 index 0000000..a82d8c8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist/_frozenlist.pyx @@ -0,0 +1,148 @@ +# cython: freethreading_compatible = True +# distutils: language = c++ + +from cpython.bool cimport PyBool_FromLong +from libcpp.atomic cimport atomic + +import copy +import types +from collections.abc import MutableSequence + + +cdef class FrozenList: + __class_getitem__ = classmethod(types.GenericAlias) + + cdef atomic[bint] _frozen + cdef list _items + + def __init__(self, items=None): + self._frozen.store(False) + if items is not None: + items = list(items) + else: + items = [] + self._items = items + + @property + def frozen(self): + return PyBool_FromLong(self._frozen.load()) + + cdef object _check_frozen(self): + if self._frozen.load(): + raise RuntimeError("Cannot modify frozen list.") + + cdef inline object _fast_len(self): + return len(self._items) + + def freeze(self): + self._frozen.store(True) + + def __getitem__(self, index): + return self._items[index] + + def __setitem__(self, index, value): + self._check_frozen() + self._items[index] = value + + def __delitem__(self, index): + self._check_frozen() + del self._items[index] + + def __len__(self): + return self._fast_len() + + def __iter__(self): + return self._items.__iter__() + + def __reversed__(self): + return self._items.__reversed__() + + def __richcmp__(self, other, op): + if op == 0: # < + return list(self) < other + if op == 1: # <= + return list(self) <= other + if op == 2: # == + return list(self) == other + if op == 3: # != + return list(self) != other + if op == 4: # > + return list(self) > other + if op == 5: # => + return list(self) >= other + + def insert(self, pos, item): + self._check_frozen() + self._items.insert(pos, item) + + def __contains__(self, item): + return item in self._items + + def __iadd__(self, items): + self._check_frozen() + self._items += list(items) + return self + + def index(self, item): + return self._items.index(item) + + def remove(self, item): + self._check_frozen() + self._items.remove(item) + + def clear(self): + self._check_frozen() + self._items.clear() + + def extend(self, items): + self._check_frozen() + self._items += list(items) + + def reverse(self): + self._check_frozen() + self._items.reverse() + + def pop(self, index=-1): + self._check_frozen() + return self._items.pop(index) + + def append(self, item): + self._check_frozen() + return self._items.append(item) + + def count(self, item): + return self._items.count(item) + + def __repr__(self): + return ''.format(self._frozen.load(), + self._items) + + def __hash__(self): + if self._frozen.load(): + return hash(tuple(self._items)) + else: + raise RuntimeError("Cannot hash unfrozen list.") + + def __deepcopy__(self, memo): + cdef FrozenList new_list + obj_id = id(self) + + # Return existing copy if already processed (circular reference) + if obj_id in memo: + return memo[obj_id] + + # Create new instance and register immediately + new_list = self.__class__([]) + memo[obj_id] = new_list + + # Deep copy items + new_list._items[:] = [copy.deepcopy(item, memo) for item in self._items] + + # Preserve frozen state + if self._frozen.load(): + new_list.freeze() + + return new_list + + +MutableSequence.register(FrozenList) diff --git a/venv/lib/python3.12/site-packages/frozenlist/py.typed b/venv/lib/python3.12/site-packages/frozenlist/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/frozenlist/py.typed @@ -0,0 +1 @@ +Marker diff --git a/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/METADATA b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/METADATA new file mode 100644 index 0000000..0e3a649 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/METADATA @@ -0,0 +1,117 @@ +Metadata-Version: 2.4 +Name: greenlet +Version: 3.2.4 +Summary: Lightweight in-process concurrent programming +Home-page: https://greenlet.readthedocs.io/ +Author: Alexey Borzenkov +Author-email: snaury@gmail.com +Maintainer: Jason Madden +Maintainer-email: jason@seecoresoftware.com +License: MIT AND Python-2.0 +Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues +Project-URL: Source Code, https://github.com/python-greenlet/greenlet/ +Project-URL: Documentation, https://greenlet.readthedocs.io/ +Project-URL: Changes, https://greenlet.readthedocs.io/en/latest/changes.html +Keywords: greenlet coroutine concurrency threads cooperative +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Programming Language :: C +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.PSF +Provides-Extra: docs +Requires-Dist: Sphinx; extra == "docs" +Requires-Dist: furo; extra == "docs" +Provides-Extra: test +Requires-Dist: objgraph; extra == "test" +Requires-Dist: psutil; extra == "test" +Requires-Dist: setuptools; extra == "test" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: maintainer +Dynamic: maintainer-email +Dynamic: platform +Dynamic: project-url +Dynamic: provides-extra +Dynamic: requires-python +Dynamic: summary + +.. This file is included into docs/history.rst + + +Greenlets are lightweight coroutines for in-process concurrent +programming. + +The "greenlet" package is a spin-off of `Stackless`_, a version of +CPython that supports micro-threads called "tasklets". Tasklets run +pseudo-concurrently (typically in a single or a few OS-level threads) +and are synchronized with data exchanges on "channels". + +A "greenlet", on the other hand, is a still more primitive notion of +micro-thread with no implicit scheduling; coroutines, in other words. +This is useful when you want to control exactly when your code runs. +You can build custom scheduled micro-threads on top of greenlet; +however, it seems that greenlets are useful on their own as a way to +make advanced control flow structures. For example, we can recreate +generators; the difference with Python's own generators is that our +generators can call nested functions and the nested functions can +yield values too. (Additionally, you don't need a "yield" keyword. See +the example in `test_generator.py +`_). + +Greenlets are provided as a C extension module for the regular unmodified +interpreter. + +.. _`Stackless`: http://www.stackless.com + + +Who is using Greenlet? +====================== + +There are several libraries that use Greenlet as a more flexible +alternative to Python's built in coroutine support: + + - `Concurrence`_ + - `Eventlet`_ + - `Gevent`_ + +.. _Concurrence: http://opensource.hyves.org/concurrence/ +.. _Eventlet: http://eventlet.net/ +.. _Gevent: http://www.gevent.org/ + +Getting Greenlet +================ + +The easiest way to get Greenlet is to install it with pip:: + + pip install greenlet + + +Source code archives and binary distributions are available on the +python package index at https://pypi.org/project/greenlet + +The source code repository is hosted on github: +https://github.com/python-greenlet/greenlet + +Documentation is available on readthedocs.org: +https://greenlet.readthedocs.io diff --git a/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/RECORD b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/RECORD new file mode 100644 index 0000000..11a4300 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/RECORD @@ -0,0 +1,121 @@ +../../../include/site/python3.12/greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 +greenlet-3.2.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +greenlet-3.2.4.dist-info/METADATA,sha256=ZwuiD2PER_KIrBSuuQdUPtK-VCLKtfY5RueYGQheX6o,4120 +greenlet-3.2.4.dist-info/RECORD,, +greenlet-3.2.4.dist-info/WHEEL,sha256=LpQoElFRmdjMbJKp2FHd8t88QuEtEHUH_crLCk0WHuI,152 +greenlet-3.2.4.dist-info/licenses/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434 +greenlet-3.2.4.dist-info/licenses/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424 +greenlet-3.2.4.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9 +greenlet/CObjects.cpp,sha256=OPej1bWBgc4sRrTRQ2aFFML9pzDYKlKhlJSjsI0X_eU,3508 +greenlet/PyGreenlet.cpp,sha256=dGal9uux_E0d6yMaZfVYpdD9x1XFVOrp4s_or_D_UEM,24199 +greenlet/PyGreenlet.hpp,sha256=2ZQlOxYNoy7QwD7mppFoOXe_At56NIsJ0eNsE_hoSsw,1463 +greenlet/PyGreenletUnswitchable.cpp,sha256=PQE0fSZa_IOyUM44IESHkJoD2KtGW3dkhkmZSYY3WHs,4375 +greenlet/PyModule.cpp,sha256=J2TH06dGcNEarioS6NbWXkdME8hJY05XVbdqLrfO5w4,8587 +greenlet/TBrokenGreenlet.cpp,sha256=smN26uC7ahAbNYiS10rtWPjCeTG4jevM8siA2sjJiXg,1021 +greenlet/TExceptionState.cpp,sha256=U7Ctw9fBdNraS0d174MoQW7bN-ae209Ta0JuiKpcpVI,1359 +greenlet/TGreenlet.cpp,sha256=IM4cHsv1drEl35d7n8YOA_wR-R7oRvx5XhOJOK2PBB8,25732 +greenlet/TGreenlet.hpp,sha256=DoN795i3vofgll-20GA-ylg3qCNw-nKprLA6r7CK5HY,28522 +greenlet/TGreenletGlobals.cpp,sha256=YyEmDjKf1g32bsL-unIUScFLnnA1fzLWf2gOMd-D0Zw,3264 +greenlet/TMainGreenlet.cpp,sha256=fvgb8HHB-FVTPEKjR1s_ifCZSpp5D5YQByik0CnIABg,3276 +greenlet/TPythonState.cpp,sha256=b12U09sNjQvKG0_agROFHuJkDDa7HDccWaFW55XViQA,15975 +greenlet/TStackState.cpp,sha256=V444I8Jj9DhQz-9leVW_9dtiSRjaE1NMlgDG02Xxq-Y,7381 +greenlet/TThreadState.hpp,sha256=2Jgg7DtGggMYR_x3CLAvAFf1mIdIDtQvSSItcdmX4ZQ,19131 +greenlet/TThreadStateCreator.hpp,sha256=uYTexDWooXSSgUc5uh-Mhm5BQi3-kR6CqpizvNynBFQ,2610 +greenlet/TThreadStateDestroy.cpp,sha256=36yBCAMq3beXTZd-XnFA7DwaHVSOx2vc28-nf0spysU,8169 +greenlet/TUserGreenlet.cpp,sha256=uemg0lwKXtYB0yzmvyYdIIAsKnNkifXM1OJ2OlrFP1A,23553 +greenlet/__init__.py,sha256=vSR8EU6Bi32-0MkAlx--fzCL-Eheh6EqJWa-7B9LTOk,1723 +greenlet/__pycache__/__init__.cpython-312.pyc,, +greenlet/_greenlet.cpython-312-x86_64-linux-gnu.so,sha256=5JNZW-sTm4GrmgS5ACVmTnr03ssOpDX0OVtVrUleW8U,1446176 +greenlet/greenlet.cpp,sha256=WdItb1yWL9WNsTqJNf0Iw8ZwDHD49pkDP0rIRGBg2pw,10996 +greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 +greenlet/greenlet_allocator.hpp,sha256=eC0S1AQuep1vnVRsag-r83xgfAtbpn0qQZ-oXzQXaso,2607 +greenlet/greenlet_compiler_compat.hpp,sha256=nRxpLN9iNbnLVyFDeVmOwyeeNm6scQrOed1l7JQYMCM,4346 +greenlet/greenlet_cpython_compat.hpp,sha256=kJG6d_yDwwl3bSZOOFqM3ks1UzVIGcwbsTM2s8C6VYE,4149 +greenlet/greenlet_exceptions.hpp,sha256=06Bx81DtVaJTa6RtiMcV141b-XHv4ppEgVItkblcLWY,4503 +greenlet/greenlet_internal.hpp,sha256=Ajc-_09W4xWzm9XfyXHAeQAFUgKGKsnJwYsTCoNy3ns,2709 +greenlet/greenlet_msvc_compat.hpp,sha256=0MyaiyoCE_A6UROXZlMQRxRS17gfyh0d7NUppU3EVFc,2978 +greenlet/greenlet_refs.hpp,sha256=OnbA91yZf3QHH6-eJccvoNDAaN-pQBMMrclFU1Ot3J4,34436 +greenlet/greenlet_slp_switch.hpp,sha256=kM1QHA2iV-gH4cFyN6lfIagHQxvJZjWOVJdIxRE3TlQ,3198 +greenlet/greenlet_thread_support.hpp,sha256=XUJ6ljWjf9OYyuOILiz8e_yHvT3fbaUiHdhiPNQUV4s,867 +greenlet/platform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +greenlet/platform/__pycache__/__init__.cpython-312.pyc,, +greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143 +greenlet/platform/switch_aarch64_gcc.h,sha256=GKC0yWNXnbK2X--X6aguRCMj2Tg7hDU1Zkl3RljDvC8,4307 +greenlet/platform/switch_alpha_unix.h,sha256=Z-SvF8JQV3oxWT8JRbL9RFu4gRFxPdJ7cviM8YayMmw,671 +greenlet/platform/switch_amd64_unix.h,sha256=EcSFCBlodEBhqhKjcJqY_5Dn_jn7pKpkJlOvp7gFXLI,2748 +greenlet/platform/switch_arm32_gcc.h,sha256=Z3KkHszdgq6uU4YN3BxvKMG2AdDnovwCCNrqGWZ1Lyo,2479 +greenlet/platform/switch_arm32_ios.h,sha256=mm5_R9aXB92hyxzFRwB71M60H6AlvHjrpTrc72Pz3l8,1892 +greenlet/platform/switch_arm64_masm.asm,sha256=4kpTtfy7rfcr8j1CpJLAK21EtZpGDAJXWRU68HEy5A8,1245 +greenlet/platform/switch_arm64_masm.obj,sha256=DmLnIB_icoEHAz1naue_pJPTZgR9ElM7-Nmztr-o9_U,746 +greenlet/platform/switch_arm64_msvc.h,sha256=RqK5MHLmXI3Q-FQ7tm32KWnbDNZKnkJdq8CR89cz640,398 +greenlet/platform/switch_csky_gcc.h,sha256=kDikyiPpewP71KoBZQO_MukDTXTXBiC7x-hF0_2DL0w,1331 +greenlet/platform/switch_loongarch64_linux.h,sha256=7M-Dhc4Q8tRbJCJhalDLwU6S9Mx8MjmN1RbTDgIvQTM,779 +greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928 +greenlet/platform/switch_mips_unix.h,sha256=E0tYsqc5anDY1BhenU1l8DW-nVHC_BElzLgJw3TGtPk,1426 +greenlet/platform/switch_ppc64_aix.h,sha256=_BL0iyRr3ZA5iPlr3uk9SJ5sNRWGYLrXcZ5z-CE9anE,3860 +greenlet/platform/switch_ppc64_linux.h,sha256=0rriT5XyxPb0GqsSSn_bP9iQsnjsPbBmu0yqo5goSyQ,3815 +greenlet/platform/switch_ppc_aix.h,sha256=pHA4slEjUFP3J3SYm1TAlNPhgb2G_PAtax5cO8BEe1A,2941 +greenlet/platform/switch_ppc_linux.h,sha256=YwrlKUzxlXuiKMQqr6MFAV1bPzWnmvk6X1AqJZEpOWU,2759 +greenlet/platform/switch_ppc_macosx.h,sha256=Z6KN_ud0n6nC3ltJrNz2qtvER6vnRAVRNH9mdIDpMxY,2624 +greenlet/platform/switch_ppc_unix.h,sha256=-ZG7MSSPEA5N4qO9PQChtyEJ-Fm6qInhyZm_ZBHTtMg,2652 +greenlet/platform/switch_riscv_unix.h,sha256=606V6ACDf79Fz_WGItnkgbjIJ0pGg_sHmPyDxQYKK58,949 +greenlet/platform/switch_s390_unix.h,sha256=RRlGu957ybmq95qNNY4Qw1mcaoT3eBnW5KbVwu48KX8,2763 +greenlet/platform/switch_sh_gcc.h,sha256=mcRJBTu-2UBf4kZtX601qofwuDuy-Y-hnxJtrcaB7do,901 +greenlet/platform/switch_sparc_sun_gcc.h,sha256=xZish9GsMHBienUbUMsX1-ZZ-as7hs36sVhYIE3ew8Y,2797 +greenlet/platform/switch_x32_unix.h,sha256=nM98PKtzTWc1lcM7TRMUZJzskVdR1C69U1UqZRWX0GE,1509 +greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841 +greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078 +greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805 +greenlet/platform/switch_x86_msvc.h,sha256=TtGOwinbFfnn6clxMNkCz8i6OmgB6kVRrShoF5iT9to,12838 +greenlet/platform/switch_x86_unix.h,sha256=VplW9H0FF0cZHw1DhJdIUs5q6YLS4cwb2nYwjF83R1s,3059 +greenlet/slp_platformselect.h,sha256=hTb3GFdcPUYJTuu1MY93js7MZEax1_e5E-gflpi0RzI,3959 +greenlet/tests/__init__.py,sha256=EtTtQfpRDde0MhsdAM5Cm7LYIfS_HKUIFwquiH4Q7ac,9736 +greenlet/tests/__pycache__/__init__.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc,, +greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc,, +greenlet/tests/__pycache__/leakcheck.cpython-312.pyc,, +greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc,, +greenlet/tests/__pycache__/test_cpp.cpython-312.pyc,, +greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc,, +greenlet/tests/__pycache__/test_gc.cpython-312.pyc,, +greenlet/tests/__pycache__/test_generator.cpython-312.pyc,, +greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc,, +greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc,, +greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc,, +greenlet/tests/__pycache__/test_leaks.cpython-312.pyc,, +greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc,, +greenlet/tests/__pycache__/test_throw.cpython-312.pyc,, +greenlet/tests/__pycache__/test_tracing.cpython-312.pyc,, +greenlet/tests/__pycache__/test_version.cpython-312.pyc,, +greenlet/tests/__pycache__/test_weakref.cpython-312.pyc,, +greenlet/tests/_test_extension.c,sha256=vkeGA-6oeJcGILsD7oIrT1qZop2GaTOHXiNT7mcSl-0,5773 +greenlet/tests/_test_extension.cpython-312-x86_64-linux-gnu.so,sha256=J6t_AfqGHhCVKKwL3MMeVLA3-0fX4CIb85P8aLPbbBY,17256 +greenlet/tests/_test_extension_cpp.cpp,sha256=e0kVnaB8CCaEhE9yHtNyfqTjevsPDKKx-zgxk7PPK48,6565 +greenlet/tests/_test_extension_cpp.cpython-312-x86_64-linux-gnu.so,sha256=bxLQ3ukXYXuFVMFVNEdMaeYQk8zgfeMgYPgw621VQZc,58384 +greenlet/tests/fail_clearing_run_switches.py,sha256=o433oA_nUCtOPaMEGc8VEhZIKa71imVHXFw7TsXaP8M,1263 +greenlet/tests/fail_cpp_exception.py,sha256=o_ZbipWikok8Bjc-vjiQvcb5FHh2nVW-McGKMLcMzh0,985 +greenlet/tests/fail_initialstub_already_started.py,sha256=txENn5IyzGx2p-XR1XB7qXmC8JX_4mKDEA8kYBXUQKc,1961 +greenlet/tests/fail_slp_switch.py,sha256=rJBZcZfTWR3e2ERQtPAud6YKShiDsP84PmwOJbp4ey0,524 +greenlet/tests/fail_switch_three_greenlets.py,sha256=zSitV7rkNnaoHYVzAGGLnxz-yPtohXJJzaE8ehFDQ0M,956 +greenlet/tests/fail_switch_three_greenlets2.py,sha256=FPJensn2EJxoropl03JSTVP3kgP33k04h6aDWWozrOk,1285 +greenlet/tests/fail_switch_two_greenlets.py,sha256=1CaI8s3504VbbF1vj1uBYuy-zxBHVzHPIAd1LIc8ONg,817 +greenlet/tests/leakcheck.py,sha256=JHgc45bnTyVtn9MiprIlz2ygSXMFtcaCSp2eB9XIhQE,12612 +greenlet/tests/test_contextvars.py,sha256=xutO-qZgKTwKsA9lAqTjIcTBEiQV4RpNKM-vO2_YCVU,10541 +greenlet/tests/test_cpp.py,sha256=hpxhFAdKJTpAVZP8CBGs1ZcrKdscI9BaDZk4btkI5d4,2736 +greenlet/tests/test_extension_interface.py,sha256=eJ3cwLacdK2WbsrC-4DgeyHdwLRcG4zx7rrkRtqSzC4,3829 +greenlet/tests/test_gc.py,sha256=PCOaRpIyjNnNlDogGL3FZU_lrdXuM-pv1rxeE5TP5mc,2923 +greenlet/tests/test_generator.py,sha256=tONXiTf98VGm347o1b-810daPiwdla5cbpFg6QI1R1g,1240 +greenlet/tests/test_generator_nested.py,sha256=7v4HOYrf1XZP39dk5IUMubdZ8yc3ynwZcqj9GUJyMSA,3718 +greenlet/tests/test_greenlet.py,sha256=gSG6hOjKYyRRe5ZzNUpskrUcMnBT3WU4yITTzaZfLH4,47995 +greenlet/tests/test_greenlet_trash.py,sha256=n2dBlQfOoEO1ODatFi8QdhboH3fB86YtqzcYMYOXxbw,7947 +greenlet/tests/test_leaks.py,sha256=OFSE870Zyql85HukfC_XYa2c4gDQBU889RV1AlLum74,18076 +greenlet/tests/test_stack_saved.py,sha256=eyzqNY2VCGuGlxhT_In6TvZ6Okb0AXFZVyBEnK1jDwA,446 +greenlet/tests/test_throw.py,sha256=u2TQ_WvvCd6N6JdXWIxVEcXkKu5fepDlz9dktYdmtng,3712 +greenlet/tests/test_tracing.py,sha256=NFD6Vcww8grBnFQFhCNdswwGetjLeLQ7vL2Qqw3LWBM,8591 +greenlet/tests/test_version.py,sha256=O9DpAITsOFgiRcjd4odQ7ejmwx_N9Q1zQENVcbtFHIc,1339 +greenlet/tests/test_weakref.py,sha256=F8M23btEF87bIbpptLNBORosbQqNZGiYeKMqYjWrsak,883 diff --git a/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/WHEEL b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/WHEEL new file mode 100644 index 0000000..d9747de --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_24_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/licenses/LICENSE new file mode 100644 index 0000000..b73a4a1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/licenses/LICENSE @@ -0,0 +1,30 @@ +The following files are derived from Stackless Python and are subject to the +same license as Stackless Python: + + src/greenlet/slp_platformselect.h + files in src/greenlet/platform/ directory + +See LICENSE.PSF and http://www.stackless.com/ for details. + +Unless otherwise noted, the files in greenlet have been released under the +following MIT license: + +Copyright (c) Armin Rigo, Christian Tismer and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/licenses/LICENSE.PSF b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/licenses/LICENSE.PSF new file mode 100644 index 0000000..d3b509a --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/licenses/LICENSE.PSF @@ -0,0 +1,47 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/top_level.txt new file mode 100644 index 0000000..46725be --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet-3.2.4.dist-info/top_level.txt @@ -0,0 +1 @@ +greenlet diff --git a/venv/lib/python3.12/site-packages/greenlet/CObjects.cpp b/venv/lib/python3.12/site-packages/greenlet/CObjects.cpp new file mode 100644 index 0000000..c135995 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/CObjects.cpp @@ -0,0 +1,157 @@ +#ifndef COBJECTS_CPP +#define COBJECTS_CPP +/***************************************************************************** + * C interface + * + * These are exported using the CObject API + */ +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +#include "greenlet_exceptions.hpp" + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" + + +#include "TThreadStateDestroy.cpp" + +#include "PyGreenlet.hpp" + +using greenlet::PyErrOccurred; +using greenlet::Require; + + + +extern "C" { +static PyGreenlet* +PyGreenlet_GetCurrent(void) +{ + return GET_THREAD_STATE().state().get_current().relinquish_ownership(); +} + +static int +PyGreenlet_SetParent(PyGreenlet* g, PyGreenlet* nparent) +{ + return green_setparent((PyGreenlet*)g, (PyObject*)nparent, NULL); +} + +static PyGreenlet* +PyGreenlet_New(PyObject* run, PyGreenlet* parent) +{ + using greenlet::refs::NewDictReference; + // In the past, we didn't use green_new and green_init, but that + // was a maintenance issue because we duplicated code. This way is + // much safer, but slightly slower. If that's a problem, we could + // refactor green_init to separate argument parsing from initialization. + OwnedGreenlet g = OwnedGreenlet::consuming(green_new(&PyGreenlet_Type, nullptr, nullptr)); + if (!g) { + return NULL; + } + + try { + NewDictReference kwargs; + if (run) { + kwargs.SetItem(mod_globs->str_run, run); + } + if (parent) { + kwargs.SetItem("parent", (PyObject*)parent); + } + + Require(green_init(g.borrow(), mod_globs->empty_tuple, kwargs.borrow())); + } + catch (const PyErrOccurred&) { + return nullptr; + } + + return g.relinquish_ownership(); +} + +static PyObject* +PyGreenlet_Switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + + if (args == NULL) { + args = mod_globs->empty_tuple; + } + + if (kwargs == NULL || !PyDict_Check(kwargs)) { + kwargs = NULL; + } + + return green_switch(self, args, kwargs); +} + +static PyObject* +PyGreenlet_Throw(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return nullptr; + } + try { + PyErrPieces err_pieces(typ, val, tb); + return internal_green_throw(self, err_pieces).relinquish_ownership(); + } + catch (const PyErrOccurred&) { + return nullptr; + } +} + + + +static int +Extern_PyGreenlet_MAIN(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->main(); +} + +static int +Extern_PyGreenlet_ACTIVE(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->active(); +} + +static int +Extern_PyGreenlet_STARTED(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->started(); +} + +static PyGreenlet* +Extern_PyGreenlet_GET_PARENT(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + // This can return NULL even if there is no exception + return self->pimpl->parent().acquire(); +} +} // extern C. + +/** End C API ****************************************************************/ +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/PyGreenlet.cpp b/venv/lib/python3.12/site-packages/greenlet/PyGreenlet.cpp new file mode 100644 index 0000000..6b118a5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/PyGreenlet.cpp @@ -0,0 +1,751 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef PYGREENLET_CPP +#define PYGREENLET_CPP +/***************** +The Python slot functions for TGreenlet. + */ + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +#include "TThreadStateDestroy.cpp" +#include "TGreenlet.hpp" +// #include "TUserGreenlet.cpp" +// #include "TMainGreenlet.cpp" +// #include "TBrokenGreenlet.cpp" + + +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" +#include "PyGreenlet.hpp" +// #include "TGreenlet.cpp" + +// #include "TExceptionState.cpp" +// #include "TPythonState.cpp" +// #include "TStackState.cpp" + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + + +static PyGreenlet* +green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) +{ + PyGreenlet* o = + (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); + if (o) { + // Recall: borrowing or getting the current greenlet + // causes the "deleteme list" to get cleared. So constructing a greenlet + // can do things like cause other greenlets to get finalized. + UserGreenlet* c = new UserGreenlet(o, GET_THREAD_STATE().state().borrow_current()); + assert(Py_REFCNT(o) == 1); + // Also: This looks like a memory leak, but isn't. Constructing the + // C++ object assigns it to the pimpl pointer of the Python object (o); + // we'll need that later. + assert(c == o->pimpl); + } + return o; +} + + +// green_init is used in the tp_init slot. So it's important that +// it can be called directly from CPython. Thus, we don't use +// BorrowedGreenlet and BorrowedObject --- although in theory +// these should be binary layout compatible, that may not be +// guaranteed to be the case (32-bit linux ppc possibly). +static int +green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + PyArgParseParam run; + PyArgParseParam nparent; + static const char* kwlist[] = { + "run", + "parent", + NULL + }; + + // recall: The O specifier does NOT increase the reference count. + if (!PyArg_ParseTupleAndKeywords( + args, kwargs, "|OO:green", (char**)kwlist, &run, &nparent)) { + return -1; + } + + if (run) { + if (green_setrun(self, run, NULL)) { + return -1; + } + } + if (nparent && !nparent.is_None()) { + return green_setparent(self, nparent, NULL); + } + return 0; +} + + + +static int +green_traverse(PyGreenlet* self, visitproc visit, void* arg) +{ + // We must only visit referenced objects, i.e. only objects + // Py_INCREF'ed by this greenlet (directly or indirectly): + // + // - stack_prev is not visited: holds previous stack pointer, but it's not + // referenced + // - frames are not visited as we don't strongly reference them; + // alive greenlets are not garbage collected + // anyway. This can be a problem, however, if this greenlet is + // never allowed to finish, and is referenced from the frame: we + // have an uncollectible cycle in that case. Note that the + // frame object itself is also frequently not even tracked by the GC + // starting with Python 3.7 (frames are allocated by the + // interpreter untracked, and only become tracked when their + // evaluation is finished if they have a refcount > 1). All of + // this is to say that we should probably strongly reference + // the frame object. Doing so, while always allowing GC on a + // greenlet, solves several leaks for us. + + Py_VISIT(self->dict); + if (!self->pimpl) { + // Hmm. I have seen this at interpreter shutdown time, + // I think. That's very odd because this doesn't go away until + // we're ``green_dealloc()``, at which point we shouldn't be + // traversed anymore. + return 0; + } + + return self->pimpl->tp_traverse(visit, arg); +} + +static int +green_is_gc(PyObject* _self) +{ + BorrowedGreenlet self(_self); + int result = 0; + /* Main greenlet can be garbage collected since it can only + become unreachable if the underlying thread exited. + Active greenlets --- including those that are suspended --- + cannot be garbage collected, however. + */ + if (self->main() || !self->active()) { + result = 1; + } + // The main greenlet pointer will eventually go away after the thread dies. + if (self->was_running_in_dead_thread()) { + // Our thread is dead! We can never run again. Might as well + // GC us. Note that if a tuple containing only us and other + // immutable objects had been scanned before this, when we + // would have returned 0, the tuple will take itself out of GC + // tracking and never be investigated again. So that could + // result in both us and the tuple leaking due to an + // unreachable/uncollectible reference. The same goes for + // dictionaries. + // + // It's not a great idea to be changing our GC state on the + // fly. + result = 1; + } + return result; +} + + +static int +green_clear(PyGreenlet* self) +{ + /* Greenlet is only cleared if it is about to be collected. + Since active greenlets are not garbage collectable, we can + be sure that, even if they are deallocated during clear, + nothing they reference is in unreachable or finalizers, + so even if it switches we are relatively safe. */ + // XXX: Are we responsible for clearing weakrefs here? + Py_CLEAR(self->dict); + return self->pimpl->tp_clear(); +} + +/** + * Returns 0 on failure (the object was resurrected) or 1 on success. + **/ +static int +_green_dealloc_kill_started_non_main_greenlet(BorrowedGreenlet self) +{ + /* Hacks hacks hacks copied from instance_dealloc() */ + /* Temporarily resurrect the greenlet. */ + assert(self.REFCNT() == 0); + Py_SET_REFCNT(self.borrow(), 1); + /* Save the current exception, if any. */ + PyErrPieces saved_err; + try { + // BY THE TIME WE GET HERE, the state may actually be going + // away + // if we're shutting down the interpreter and freeing thread + // entries, + // this could result in freeing greenlets that were leaked. So + // we can't try to read the state. + self->deallocing_greenlet_in_thread( + self->thread_state() + ? static_cast(GET_THREAD_STATE()) + : nullptr); + } + catch (const PyErrOccurred&) { + PyErr_WriteUnraisable(self.borrow_o()); + /* XXX what else should we do? */ + } + /* Check for no resurrection must be done while we keep + * our internal reference, otherwise PyFile_WriteObject + * causes recursion if using Py_INCREF/Py_DECREF + */ + if (self.REFCNT() == 1 && self->active()) { + /* Not resurrected, but still not dead! + XXX what else should we do? we complain. */ + PyObject* f = PySys_GetObject("stderr"); + Py_INCREF(self.borrow_o()); /* leak! */ + if (f != NULL) { + PyFile_WriteString("GreenletExit did not kill ", f); + PyFile_WriteObject(self.borrow_o(), f, 0); + PyFile_WriteString("\n", f); + } + } + /* Restore the saved exception. */ + saved_err.PyErrRestore(); + /* Undo the temporary resurrection; can't use DECREF here, + * it would cause a recursive call. + */ + assert(self.REFCNT() > 0); + + Py_ssize_t refcnt = self.REFCNT() - 1; + Py_SET_REFCNT(self.borrow_o(), refcnt); + if (refcnt != 0) { + /* Resurrected! */ + _Py_NewReference(self.borrow_o()); + Py_SET_REFCNT(self.borrow_o(), refcnt); + /* Better to use tp_finalizer slot (PEP 442) + * and call ``PyObject_CallFinalizerFromDealloc``, + * but that's only supported in Python 3.4+; see + * Modules/_io/iobase.c for an example. + * TODO: We no longer run on anything that old, switch to finalizers. + * + * The following approach is copied from iobase.c in CPython 2.7. + * (along with much of this function in general). Here's their + * comment: + * + * When called from a heap type's dealloc, the type will be + * decref'ed on return (see e.g. subtype_dealloc in typeobject.c). + * + * On free-threaded builds of CPython, the type is meant to be immortal + * so we probably shouldn't mess with this? See + * test_issue_245_reference_counting_subclass_no_threads + */ + if (PyType_HasFeature(self.TYPE(), Py_TPFLAGS_HEAPTYPE)) { + Py_INCREF(self.TYPE()); + } + + PyObject_GC_Track((PyObject*)self); + + GREENLET_Py_DEC_REFTOTAL; +#ifdef COUNT_ALLOCS + --Py_TYPE(self)->tp_frees; + --Py_TYPE(self)->tp_allocs; +#endif /* COUNT_ALLOCS */ + return 0; + } + return 1; +} + + +static void +green_dealloc(PyGreenlet* self) +{ + PyObject_GC_UnTrack(self); + BorrowedGreenlet me(self); + if (me->active() + && me->started() + && !me->main()) { + if (!_green_dealloc_kill_started_non_main_greenlet(me)) { + return; + } + } + + if (self->weakreflist != NULL) { + PyObject_ClearWeakRefs((PyObject*)self); + } + Py_CLEAR(self->dict); + + if (self->pimpl) { + // In case deleting this, which frees some memory, + // somehow winds up calling back into us. That's usually a + //bug in our code. + Greenlet* p = self->pimpl; + self->pimpl = nullptr; + delete p; + } + // and finally we're done. self is now invalid. + Py_TYPE(self)->tp_free((PyObject*)self); +} + + + +static OwnedObject +internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces) +{ + PyObject* result = nullptr; + err_pieces.PyErrRestore(); + assert(PyErr_Occurred()); + if (self->started() && !self->active()) { + /* dead greenlet: turn GreenletExit into a regular return */ + result = g_handle_exit(OwnedObject()).relinquish_ownership(); + } + self->args() <<= result; + + return single_result(self->g_switch()); +} + + + +PyDoc_STRVAR( + green_switch_doc, + "switch(*args, **kwargs)\n" + "\n" + "Switch execution to this greenlet.\n" + "\n" + "If this greenlet has never been run, then this greenlet\n" + "will be switched to using the body of ``self.run(*args, **kwargs)``.\n" + "\n" + "If the greenlet is active (has been run, but was switch()'ed\n" + "out before leaving its run function), then this greenlet will\n" + "be resumed and the return value to its switch call will be\n" + "None if no arguments are given, the given argument if one\n" + "argument is given, or the args tuple and keyword args dict if\n" + "multiple arguments are given.\n" + "\n" + "If the greenlet is dead, or is the current greenlet then this\n" + "function will simply return the arguments using the same rules as\n" + "above.\n"); + +static PyObject* +green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + using greenlet::SwitchingArgs; + SwitchingArgs switch_args(OwnedObject::owning(args), OwnedObject::owning(kwargs)); + self->pimpl->may_switch_away(); + self->pimpl->args() <<= switch_args; + + // If we're switching out of a greenlet, and that switch is the + // last thing the greenlet does, the greenlet ought to be able to + // go ahead and die at that point. Currently, someone else must + // manually switch back to the greenlet so that we "fall off the + // end" and can perform cleanup. You'd think we'd be able to + // figure out that this is happening using the frame's ``f_lasti`` + // member, which is supposed to be an index into + // ``frame->f_code->co_code``, the bytecode string. However, in + // recent interpreters, ``f_lasti`` tends not to be updated thanks + // to things like the PREDICT() macros in ceval.c. So it doesn't + // really work to do that in many cases. For example, the Python + // code: + // def run(): + // greenlet.getcurrent().parent.switch() + // produces bytecode of len 16, with the actual call to switch() + // being at index 10 (in Python 3.10). However, the reported + // ``f_lasti`` we actually see is...5! (Which happens to be the + // second byte of the CALL_METHOD op for ``getcurrent()``). + + try { + //OwnedObject result = single_result(self->pimpl->g_switch()); + OwnedObject result(single_result(self->pimpl->g_switch())); +#ifndef NDEBUG + // Note that the current greenlet isn't necessarily self. If self + // finished, we went to one of its parents. + assert(!self->pimpl->args()); + + const BorrowedGreenlet& current = GET_THREAD_STATE().state().borrow_current(); + // It's possible it's never been switched to. + assert(!current->args()); +#endif + PyObject* p = result.relinquish_ownership(); + + if (!p && !PyErr_Occurred()) { + // This shouldn't be happening anymore, so the asserts + // are there for debug builds. Non-debug builds + // crash "gracefully" in this case, although there is an + // argument to be made for killing the process in all + // cases --- for this to be the case, our switches + // probably nested in an incorrect way, so the state is + // suspicious. Nothing should be corrupt though, just + // confused at the Python level. Letting this propagate is + // probably good enough. + assert(p || PyErr_Occurred()); + throw PyErrOccurred( + mod_globs->PyExc_GreenletError, + "Greenlet.switch() returned NULL without an exception set." + ); + } + return p; + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + +PyDoc_STRVAR( + green_throw_doc, + "Switches execution to this greenlet, but immediately raises the\n" + "given exception in this greenlet. If no argument is provided, the " + "exception\n" + "defaults to `greenlet.GreenletExit`. The normal exception\n" + "propagation rules apply, as described for `switch`. Note that calling " + "this\n" + "method is almost equivalent to the following::\n" + "\n" + " def raiser():\n" + " raise typ, val, tb\n" + " g_raiser = greenlet(raiser, parent=g)\n" + " g_raiser.switch()\n" + "\n" + "except that this trick does not work for the\n" + "`greenlet.GreenletExit` exception, which would not propagate\n" + "from ``g_raiser`` to ``g``.\n"); + +static PyObject* +green_throw(PyGreenlet* self, PyObject* args) +{ + PyArgParseParam typ(mod_globs->PyExc_GreenletExit); + PyArgParseParam val; + PyArgParseParam tb; + + if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb)) { + return nullptr; + } + + assert(typ.borrow() || val.borrow()); + + self->pimpl->may_switch_away(); + try { + // Both normalizing the error and the actual throw_greenlet + // could throw PyErrOccurred. + PyErrPieces err_pieces(typ.borrow(), val.borrow(), tb.borrow()); + + return internal_green_throw(self, err_pieces).relinquish_ownership(); + } + catch (const PyErrOccurred&) { + return nullptr; + } +} + +static int +green_bool(PyGreenlet* self) +{ + return self->pimpl->active(); +} + +/** + * CAUTION: Allocates memory, may run GC and arbitrary Python code. + */ +static PyObject* +green_getdict(PyGreenlet* self, void* UNUSED(context)) +{ + if (self->dict == NULL) { + self->dict = PyDict_New(); + if (self->dict == NULL) { + return NULL; + } + } + Py_INCREF(self->dict); + return self->dict; +} + +static int +green_setdict(PyGreenlet* self, PyObject* val, void* UNUSED(context)) +{ + PyObject* tmp; + + if (val == NULL) { + PyErr_SetString(PyExc_TypeError, "__dict__ may not be deleted"); + return -1; + } + if (!PyDict_Check(val)) { + PyErr_SetString(PyExc_TypeError, "__dict__ must be a dictionary"); + return -1; + } + tmp = self->dict; + Py_INCREF(val); + self->dict = val; + Py_XDECREF(tmp); + return 0; +} + +static bool +_green_not_dead(BorrowedGreenlet self) +{ + // XXX: Where else should we do this? + // Probably on entry to most Python-facing functions? + if (self->was_running_in_dead_thread()) { + self->deactivate_and_free(); + return false; + } + return self->active() || !self->started(); +} + + +static PyObject* +green_getdead(PyGreenlet* self, void* UNUSED(context)) +{ + if (_green_not_dead(self)) { + Py_RETURN_FALSE; + } + else { + Py_RETURN_TRUE; + } +} + +static PyObject* +green_get_stack_saved(PyGreenlet* self, void* UNUSED(context)) +{ + return PyLong_FromSsize_t(self->pimpl->stack_saved()); +} + + +static PyObject* +green_getrun(PyGreenlet* self, void* UNUSED(context)) +{ + try { + OwnedObject result(BorrowedGreenlet(self)->run()); + return result.relinquish_ownership(); + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + + +static int +green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->run(nrun); + return 0; + } + catch(const PyErrOccurred&) { + return -1; + } +} + +static PyObject* +green_getparent(PyGreenlet* self, void* UNUSED(context)) +{ + return BorrowedGreenlet(self)->parent().acquire_or_None(); +} + + +static int +green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->parent(nparent); + } + catch(const PyErrOccurred&) { + return -1; + } + return 0; +} + + +static PyObject* +green_getcontext(const PyGreenlet* self, void* UNUSED(context)) +{ + const Greenlet *const g = self->pimpl; + try { + OwnedObject result(g->context()); + return result.relinquish_ownership(); + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + +static int +green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->context(nctx); + return 0; + } + catch(const PyErrOccurred&) { + return -1; + } +} + + +static PyObject* +green_getframe(PyGreenlet* self, void* UNUSED(context)) +{ + const PythonState::OwnedFrame& top_frame = BorrowedGreenlet(self)->top_frame(); + return top_frame.acquire_or_None(); +} + + +static PyObject* +green_getstate(PyGreenlet* self) +{ + PyErr_Format(PyExc_TypeError, + "cannot serialize '%s' object", + Py_TYPE(self)->tp_name); + return nullptr; +} + +static PyObject* +green_repr(PyGreenlet* _self) +{ + BorrowedGreenlet self(_self); + /* + Return a string like + + + The handling of greenlets across threads is not super good. + We mostly use the internal definitions of these terms, but they + generally should make sense to users as well. + */ + PyObject* result; + int never_started = !self->started() && !self->active(); + + const char* const tp_name = Py_TYPE(self)->tp_name; + + if (_green_not_dead(self)) { + /* XXX: The otid= is almost useless because you can't correlate it to + any thread identifier exposed to Python. We could use + PyThreadState_GET()->thread_id, but we'd need to save that in the + greenlet, or save the whole PyThreadState object itself. + + As it stands, its only useful for identifying greenlets from the same thread. + */ + const char* state_in_thread; + if (self->was_running_in_dead_thread()) { + // The thread it was running in is dead! + // This can happen, especially at interpreter shut down. + // It complicates debugging output because it may be + // impossible to access the current thread state at that + // time. Thus, don't access the current thread state. + state_in_thread = " (thread exited)"; + } + else { + state_in_thread = GET_THREAD_STATE().state().is_current(self) + ? " current" + : (self->started() ? " suspended" : ""); + } + result = PyUnicode_FromFormat( + "<%s object at %p (otid=%p)%s%s%s%s>", + tp_name, + self.borrow_o(), + self->thread_state(), + state_in_thread, + self->active() ? " active" : "", + never_started ? " pending" : " started", + self->main() ? " main" : "" + ); + } + else { + result = PyUnicode_FromFormat( + "<%s object at %p (otid=%p) %sdead>", + tp_name, + self.borrow_o(), + self->thread_state(), + self->was_running_in_dead_thread() + ? "(thread exited) " + : "" + ); + } + + return result; +} + + +static PyMethodDef green_methods[] = { + { + .ml_name="switch", + .ml_meth=reinterpret_cast(green_switch), + .ml_flags=METH_VARARGS | METH_KEYWORDS, + .ml_doc=green_switch_doc + }, + {.ml_name="throw", .ml_meth=(PyCFunction)green_throw, .ml_flags=METH_VARARGS, .ml_doc=green_throw_doc}, + {.ml_name="__getstate__", .ml_meth=(PyCFunction)green_getstate, .ml_flags=METH_NOARGS, .ml_doc=NULL}, + {.ml_name=NULL, .ml_meth=NULL} /* sentinel */ +}; + +static PyGetSetDef green_getsets[] = { + /* name, getter, setter, doc, context pointer */ + {.name="__dict__", .get=(getter)green_getdict, .set=(setter)green_setdict}, + {.name="run", .get=(getter)green_getrun, .set=(setter)green_setrun}, + {.name="parent", .get=(getter)green_getparent, .set=(setter)green_setparent}, + {.name="gr_frame", .get=(getter)green_getframe }, + { + .name="gr_context", + .get=(getter)green_getcontext, + .set=(setter)green_setcontext + }, + {.name="dead", .get=(getter)green_getdead}, + {.name="_stack_saved", .get=(getter)green_get_stack_saved}, + {.name=NULL} +}; + +static PyMemberDef green_members[] = { + {.name=NULL} +}; + +static PyNumberMethods green_as_number = { + .nb_bool=(inquiry)green_bool, +}; + + +PyTypeObject PyGreenlet_Type = { + .ob_base=PyVarObject_HEAD_INIT(NULL, 0) + .tp_name="greenlet.greenlet", /* tp_name */ + .tp_basicsize=sizeof(PyGreenlet), /* tp_basicsize */ + /* methods */ + .tp_dealloc=(destructor)green_dealloc, /* tp_dealloc */ + .tp_repr=(reprfunc)green_repr, /* tp_repr */ + .tp_as_number=&green_as_number, /* tp_as _number*/ + .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + .tp_doc="greenlet(run=None, parent=None) -> greenlet\n\n" + "Creates a new greenlet object (without running it).\n\n" + " - *run* -- The callable to invoke.\n" + " - *parent* -- The parent greenlet. The default is the current " + "greenlet.", /* tp_doc */ + .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ + .tp_clear=(inquiry)green_clear, /* tp_clear */ + .tp_weaklistoffset=offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */ + + .tp_methods=green_methods, /* tp_methods */ + .tp_members=green_members, /* tp_members */ + .tp_getset=green_getsets, /* tp_getset */ + .tp_dictoffset=offsetof(PyGreenlet, dict), /* tp_dictoffset */ + .tp_init=(initproc)green_init, /* tp_init */ + .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ + .tp_new=(newfunc)green_new, /* tp_new */ + .tp_free=PyObject_GC_Del, /* tp_free */ + .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ +}; + +#endif + +// Local Variables: +// flycheck-clang-include-path: ("/opt/local/Library/Frameworks/Python.framework/Versions/3.8/include/python3.8") +// End: diff --git a/venv/lib/python3.12/site-packages/greenlet/PyGreenlet.hpp b/venv/lib/python3.12/site-packages/greenlet/PyGreenlet.hpp new file mode 100644 index 0000000..df6cd80 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/PyGreenlet.hpp @@ -0,0 +1,35 @@ +#ifndef PYGREENLET_HPP +#define PYGREENLET_HPP + + +#include "greenlet.h" +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" + + +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::BorrowedGreenlet; +using greenlet::refs::BorrowedObject;; +using greenlet::refs::OwnedObject; +using greenlet::refs::PyErrPieces; + + +// XXX: These doesn't really belong here, it's not a Python slot. +static OwnedObject internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces); + +static PyGreenlet* green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)); +static int green_clear(PyGreenlet* self); +static int green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs); +static int green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)); +static int green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)); +static int green_traverse(PyGreenlet* self, visitproc visit, void* arg); +static void green_dealloc(PyGreenlet* self); +static PyObject* green_getparent(PyGreenlet* self, void* UNUSED(context)); + +static int green_is_gc(PyObject* self); +static PyObject* green_getdead(PyGreenlet* self, void* UNUSED(context)); +static PyObject* green_getrun(PyGreenlet* self, void* UNUSED(context)); +static int green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)); +static PyObject* green_getframe(PyGreenlet* self, void* UNUSED(context)); +static PyObject* green_repr(PyGreenlet* self); +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/PyGreenletUnswitchable.cpp b/venv/lib/python3.12/site-packages/greenlet/PyGreenletUnswitchable.cpp new file mode 100644 index 0000000..1b768ee --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/PyGreenletUnswitchable.cpp @@ -0,0 +1,147 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + Implementation of the Python slots for PyGreenletUnswitchable_Type +*/ +#ifndef PY_GREENLET_UNSWITCHABLE_CPP +#define PY_GREENLET_UNSWITCHABLE_CPP + + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +// Code after this point can assume access to things declared in stdint.h, +// including the fixed-width types. This goes for the platform-specific switch functions +// as well. +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenlet.cpp" +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" + + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + +#include "PyGreenlet.hpp" + +static PyGreenlet* +green_unswitchable_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) +{ + PyGreenlet* o = + (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); + if (o) { + new BrokenGreenlet(o, GET_THREAD_STATE().state().borrow_current()); + assert(Py_REFCNT(o) == 1); + } + return o; +} + +static PyObject* +green_unswitchable_getforce(PyGreenlet* self, void* UNUSED(context)) +{ + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + return PyBool_FromLong(broken->_force_switch_error); +} + +static int +green_unswitchable_setforce(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) +{ + if (!nforce) { + PyErr_SetString( + PyExc_AttributeError, + "Cannot delete force_switch_error" + ); + return -1; + } + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + int is_true = PyObject_IsTrue(nforce); + if (is_true == -1) { + return -1; + } + broken->_force_switch_error = is_true; + return 0; +} + +static PyObject* +green_unswitchable_getforceslp(PyGreenlet* self, void* UNUSED(context)) +{ + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + return PyBool_FromLong(broken->_force_slp_switch_error); +} + +static int +green_unswitchable_setforceslp(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) +{ + if (!nforce) { + PyErr_SetString( + PyExc_AttributeError, + "Cannot delete force_slp_switch_error" + ); + return -1; + } + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + int is_true = PyObject_IsTrue(nforce); + if (is_true == -1) { + return -1; + } + broken->_force_slp_switch_error = is_true; + return 0; +} + +static PyGetSetDef green_unswitchable_getsets[] = { + /* name, getter, setter, doc, closure (context pointer) */ + { + .name="force_switch_error", + .get=(getter)green_unswitchable_getforce, + .set=(setter)green_unswitchable_setforce, + .doc=NULL + }, + { + .name="force_slp_switch_error", + .get=(getter)green_unswitchable_getforceslp, + .set=(setter)green_unswitchable_setforceslp, + .doc=nullptr + }, + {.name=nullptr} +}; + +PyTypeObject PyGreenletUnswitchable_Type = { + .ob_base=PyVarObject_HEAD_INIT(NULL, 0) + .tp_name="greenlet._greenlet.UnswitchableGreenlet", + .tp_dealloc= (destructor)green_dealloc, /* tp_dealloc */ + .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + .tp_doc="Undocumented internal class", /* tp_doc */ + .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ + .tp_clear=(inquiry)green_clear, /* tp_clear */ + + .tp_getset=green_unswitchable_getsets, /* tp_getset */ + .tp_base=&PyGreenlet_Type, /* tp_base */ + .tp_init=(initproc)green_init, /* tp_init */ + .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ + .tp_new=(newfunc)green_unswitchable_new, /* tp_new */ + .tp_free=PyObject_GC_Del, /* tp_free */ + .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ +}; + + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/PyModule.cpp b/venv/lib/python3.12/site-packages/greenlet/PyModule.cpp new file mode 100644 index 0000000..6adcb5c --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/PyModule.cpp @@ -0,0 +1,292 @@ +#ifndef PY_MODULE_CPP +#define PY_MODULE_CPP + +#include "greenlet_internal.hpp" + + +#include "TGreenletGlobals.cpp" +#include "TMainGreenlet.cpp" +#include "TThreadStateDestroy.cpp" + +using greenlet::LockGuard; +using greenlet::ThreadState; + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +# pragma clang diagnostic ignored "-Wunused-variable" +#endif + +PyDoc_STRVAR(mod_getcurrent_doc, + "getcurrent() -> greenlet\n" + "\n" + "Returns the current greenlet (i.e. the one which called this " + "function).\n"); + +static PyObject* +mod_getcurrent(PyObject* UNUSED(module)) +{ + return GET_THREAD_STATE().state().get_current().relinquish_ownership_o(); +} + +PyDoc_STRVAR(mod_settrace_doc, + "settrace(callback) -> object\n" + "\n" + "Sets a new tracing function and returns the previous one.\n"); +static PyObject* +mod_settrace(PyObject* UNUSED(module), PyObject* args) +{ + PyArgParseParam tracefunc; + if (!PyArg_ParseTuple(args, "O", &tracefunc)) { + return NULL; + } + ThreadState& state = GET_THREAD_STATE(); + OwnedObject previous = state.get_tracefunc(); + if (!previous) { + previous = Py_None; + } + + state.set_tracefunc(tracefunc); + + return previous.relinquish_ownership(); +} + +PyDoc_STRVAR(mod_gettrace_doc, + "gettrace() -> object\n" + "\n" + "Returns the currently set tracing function, or None.\n"); + +static PyObject* +mod_gettrace(PyObject* UNUSED(module)) +{ + OwnedObject tracefunc = GET_THREAD_STATE().state().get_tracefunc(); + if (!tracefunc) { + tracefunc = Py_None; + } + return tracefunc.relinquish_ownership(); +} + + + +PyDoc_STRVAR(mod_set_thread_local_doc, + "set_thread_local(key, value) -> None\n" + "\n" + "Set a value in the current thread-local dictionary. Debugging only.\n"); + +static PyObject* +mod_set_thread_local(PyObject* UNUSED(module), PyObject* args) +{ + PyArgParseParam key; + PyArgParseParam value; + PyObject* result = NULL; + + if (PyArg_UnpackTuple(args, "set_thread_local", 2, 2, &key, &value)) { + if(PyDict_SetItem( + PyThreadState_GetDict(), // borrow + key, + value) == 0 ) { + // success + Py_INCREF(Py_None); + result = Py_None; + } + } + return result; +} + +PyDoc_STRVAR(mod_get_pending_cleanup_count_doc, + "get_pending_cleanup_count() -> Integer\n" + "\n" + "Get the number of greenlet cleanup operations pending. Testing only.\n"); + + +static PyObject* +mod_get_pending_cleanup_count(PyObject* UNUSED(module)) +{ + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + return PyLong_FromSize_t(mod_globs->thread_states_to_destroy.size()); +} + +PyDoc_STRVAR(mod_get_total_main_greenlets_doc, + "get_total_main_greenlets() -> Integer\n" + "\n" + "Quickly return the number of main greenlets that exist. Testing only.\n"); + +static PyObject* +mod_get_total_main_greenlets(PyObject* UNUSED(module)) +{ + return PyLong_FromSize_t(G_TOTAL_MAIN_GREENLETS); +} + + + +PyDoc_STRVAR(mod_get_clocks_used_doing_optional_cleanup_doc, + "get_clocks_used_doing_optional_cleanup() -> Integer\n" + "\n" + "Get the number of clock ticks the program has used doing optional " + "greenlet cleanup.\n" + "Beginning in greenlet 2.0, greenlet tries to find and dispose of greenlets\n" + "that leaked after a thread exited. This requires invoking Python's garbage collector,\n" + "which may have a performance cost proportional to the number of live objects.\n" + "This function returns the amount of processor time\n" + "greenlet has used to do this. In programs that run with very large amounts of live\n" + "objects, this metric can be used to decide whether the cost of doing this cleanup\n" + "is worth the memory leak being corrected. If not, you can disable the cleanup\n" + "using ``enable_optional_cleanup(False)``.\n" + "The units are arbitrary and can only be compared to themselves (similarly to ``time.clock()``);\n" + "for example, to see how it scales with your heap. You can attempt to convert them into seconds\n" + "by dividing by the value of CLOCKS_PER_SEC." + "If cleanup has been disabled, returns None." + "\n" + "This is an implementation specific, provisional API. It may be changed or removed\n" + "in the future.\n" + ".. versionadded:: 2.0" + ); +static PyObject* +mod_get_clocks_used_doing_optional_cleanup(PyObject* UNUSED(module)) +{ + std::clock_t& clocks = ThreadState::clocks_used_doing_gc(); + + if (clocks == std::clock_t(-1)) { + Py_RETURN_NONE; + } + // This might not actually work on some implementations; clock_t + // is an opaque type. + return PyLong_FromSsize_t(clocks); +} + +PyDoc_STRVAR(mod_enable_optional_cleanup_doc, + "mod_enable_optional_cleanup(bool) -> None\n" + "\n" + "Enable or disable optional cleanup operations.\n" + "See ``get_clocks_used_doing_optional_cleanup()`` for details.\n" + ); +static PyObject* +mod_enable_optional_cleanup(PyObject* UNUSED(module), PyObject* flag) +{ + int is_true = PyObject_IsTrue(flag); + if (is_true == -1) { + return nullptr; + } + + std::clock_t& clocks = ThreadState::clocks_used_doing_gc(); + if (is_true) { + // If we already have a value, we don't want to lose it. + if (clocks == std::clock_t(-1)) { + clocks = 0; + } + } + else { + clocks = std::clock_t(-1); + } + Py_RETURN_NONE; +} + + + + +#if !GREENLET_PY313 +PyDoc_STRVAR(mod_get_tstate_trash_delete_nesting_doc, + "get_tstate_trash_delete_nesting() -> Integer\n" + "\n" + "Return the 'trash can' nesting level. Testing only.\n"); +static PyObject* +mod_get_tstate_trash_delete_nesting(PyObject* UNUSED(module)) +{ + PyThreadState* tstate = PyThreadState_GET(); + +#if GREENLET_PY312 + return PyLong_FromLong(tstate->trash.delete_nesting); +#else + return PyLong_FromLong(tstate->trash_delete_nesting); +#endif +} +#endif + + + + +static PyMethodDef GreenMethods[] = { + { + .ml_name="getcurrent", + .ml_meth=(PyCFunction)mod_getcurrent, + .ml_flags=METH_NOARGS, + .ml_doc=mod_getcurrent_doc + }, + { + .ml_name="settrace", + .ml_meth=(PyCFunction)mod_settrace, + .ml_flags=METH_VARARGS, + .ml_doc=mod_settrace_doc + }, + { + .ml_name="gettrace", + .ml_meth=(PyCFunction)mod_gettrace, + .ml_flags=METH_NOARGS, + .ml_doc=mod_gettrace_doc + }, + { + .ml_name="set_thread_local", + .ml_meth=(PyCFunction)mod_set_thread_local, + .ml_flags=METH_VARARGS, + .ml_doc=mod_set_thread_local_doc + }, + { + .ml_name="get_pending_cleanup_count", + .ml_meth=(PyCFunction)mod_get_pending_cleanup_count, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_pending_cleanup_count_doc + }, + { + .ml_name="get_total_main_greenlets", + .ml_meth=(PyCFunction)mod_get_total_main_greenlets, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_total_main_greenlets_doc + }, + { + .ml_name="get_clocks_used_doing_optional_cleanup", + .ml_meth=(PyCFunction)mod_get_clocks_used_doing_optional_cleanup, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_clocks_used_doing_optional_cleanup_doc + }, + { + .ml_name="enable_optional_cleanup", + .ml_meth=(PyCFunction)mod_enable_optional_cleanup, + .ml_flags=METH_O, + .ml_doc=mod_enable_optional_cleanup_doc + }, +#if !GREENLET_PY313 + { + .ml_name="get_tstate_trash_delete_nesting", + .ml_meth=(PyCFunction)mod_get_tstate_trash_delete_nesting, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_tstate_trash_delete_nesting_doc + }, +#endif + {.ml_name=NULL, .ml_meth=NULL} /* Sentinel */ +}; + +static const char* const copy_on_greentype[] = { + "getcurrent", + "error", + "GreenletExit", + "settrace", + "gettrace", + NULL +}; + +static struct PyModuleDef greenlet_module_def = { + .m_base=PyModuleDef_HEAD_INIT, + .m_name="greenlet._greenlet", + .m_doc=NULL, + .m_size=-1, + .m_methods=GreenMethods, +}; + + +#endif + +#ifdef __clang__ +# pragma clang diagnostic pop +#elif defined(__GNUC__) +# pragma GCC diagnostic pop +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/TBrokenGreenlet.cpp b/venv/lib/python3.12/site-packages/greenlet/TBrokenGreenlet.cpp new file mode 100644 index 0000000..7e9ab5b --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TBrokenGreenlet.cpp @@ -0,0 +1,45 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::UserGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ + +#include "TGreenlet.hpp" + +namespace greenlet { + +void* BrokenGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void BrokenGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + +greenlet::PythonAllocator greenlet::BrokenGreenlet::allocator; + +bool +BrokenGreenlet::force_slp_switch_error() const noexcept +{ + return this->_force_slp_switch_error; +} + +UserGreenlet::switchstack_result_t BrokenGreenlet::g_switchstack(void) +{ + if (this->_force_switch_error) { + return switchstack_result_t(-1); + } + return UserGreenlet::g_switchstack(); +} + +}; //namespace greenlet diff --git a/venv/lib/python3.12/site-packages/greenlet/TExceptionState.cpp b/venv/lib/python3.12/site-packages/greenlet/TExceptionState.cpp new file mode 100644 index 0000000..08a94ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TExceptionState.cpp @@ -0,0 +1,62 @@ +#ifndef GREENLET_EXCEPTION_STATE_CPP +#define GREENLET_EXCEPTION_STATE_CPP + +#include +#include "TGreenlet.hpp" + +namespace greenlet { + + +ExceptionState::ExceptionState() +{ + this->clear(); +} + +void ExceptionState::operator<<(const PyThreadState *const tstate) noexcept +{ + this->exc_info = tstate->exc_info; + this->exc_state = tstate->exc_state; +} + +void ExceptionState::operator>>(PyThreadState *const tstate) noexcept +{ + tstate->exc_state = this->exc_state; + tstate->exc_info = + this->exc_info ? this->exc_info : &tstate->exc_state; + this->clear(); +} + +void ExceptionState::clear() noexcept +{ + this->exc_info = nullptr; + this->exc_state.exc_value = nullptr; +#if !GREENLET_PY311 + this->exc_state.exc_type = nullptr; + this->exc_state.exc_traceback = nullptr; +#endif + this->exc_state.previous_item = nullptr; +} + +int ExceptionState::tp_traverse(visitproc visit, void* arg) noexcept +{ + Py_VISIT(this->exc_state.exc_value); +#if !GREENLET_PY311 + Py_VISIT(this->exc_state.exc_type); + Py_VISIT(this->exc_state.exc_traceback); +#endif + return 0; +} + +void ExceptionState::tp_clear() noexcept +{ + Py_CLEAR(this->exc_state.exc_value); +#if !GREENLET_PY311 + Py_CLEAR(this->exc_state.exc_type); + Py_CLEAR(this->exc_state.exc_traceback); +#endif +} + + +}; // namespace greenlet + +#endif // GREENLET_EXCEPTION_STATE_CPP diff --git a/venv/lib/python3.12/site-packages/greenlet/TGreenlet.cpp b/venv/lib/python3.12/site-packages/greenlet/TGreenlet.cpp new file mode 100644 index 0000000..d12722b --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TGreenlet.cpp @@ -0,0 +1,719 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::Greenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef TGREENLET_CPP +#define TGREENLET_CPP +#include "greenlet_internal.hpp" +#include "TGreenlet.hpp" + + +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" + +namespace greenlet { + +Greenlet::Greenlet(PyGreenlet* p) + : Greenlet(p, StackState()) +{ +} + +Greenlet::Greenlet(PyGreenlet* p, const StackState& initial_stack) + : _self(p), stack_state(initial_stack) +{ + assert(p->pimpl == nullptr); + p->pimpl = this; +} + +Greenlet::~Greenlet() +{ + // XXX: Can't do this. tp_clear is a virtual function, and by the + // time we're here, we've sliced off our child classes. + //this->tp_clear(); + this->_self->pimpl = nullptr; +} + +bool +Greenlet::force_slp_switch_error() const noexcept +{ + return false; +} + +void +Greenlet::release_args() +{ + this->switch_args.CLEAR(); +} + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +Greenlet::throw_GreenletExit_during_dealloc(const ThreadState& UNUSED(current_thread_state)) +{ + // If we're killed because we lost all references in the + // middle of a switch, that's ok. Don't reset the args/kwargs, + // we still want to pass them to the parent. + PyErr_SetString(mod_globs->PyExc_GreenletExit, + "Killing the greenlet because all references have vanished."); + // To get here it had to have run before + return this->g_switch(); +} + +inline void +Greenlet::slp_restore_state() noexcept +{ +#ifdef SLP_BEFORE_RESTORE_STATE + SLP_BEFORE_RESTORE_STATE(); +#endif + this->stack_state.copy_heap_to_stack( + this->thread_state()->borrow_current()->stack_state); +} + + +inline int +Greenlet::slp_save_state(char *const stackref) noexcept +{ + // XXX: This used to happen in the middle, before saving, but + // after finding the next owner. Does that matter? This is + // only defined for Sparc/GCC where it flushes register + // windows to the stack (I think) +#ifdef SLP_BEFORE_SAVE_STATE + SLP_BEFORE_SAVE_STATE(); +#endif + return this->stack_state.copy_stack_to_heap(stackref, + this->thread_state()->borrow_current()->stack_state); +} + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +Greenlet::on_switchstack_or_initialstub_failure( + Greenlet* target, + const Greenlet::switchstack_result_t& err, + const bool target_was_me, + const bool was_initial_stub) +{ + // If we get here, either g_initialstub() + // failed, or g_switchstack() failed. Either one of those + // cases SHOULD leave us in the original greenlet with a valid stack. + if (!PyErr_Occurred()) { + PyErr_SetString( + PyExc_SystemError, + was_initial_stub + ? "Failed to switch stacks into a greenlet for the first time." + : "Failed to switch stacks into a running greenlet."); + } + this->release_args(); + + if (target && !target_was_me) { + target->murder_in_place(); + } + + assert(!err.the_new_current_greenlet); + assert(!err.origin_greenlet); + return OwnedObject(); + +} + +OwnedGreenlet +Greenlet::g_switchstack_success() noexcept +{ + PyThreadState* tstate = PyThreadState_GET(); + // restore the saved state + this->python_state >> tstate; + this->exception_state >> tstate; + + // The thread state hasn't been changed yet. + ThreadState* thread_state = this->thread_state(); + OwnedGreenlet result(thread_state->get_current()); + thread_state->set_current(this->self()); + //assert(thread_state->borrow_current().borrow() == this->_self); + return result; +} + +Greenlet::switchstack_result_t +Greenlet::g_switchstack(void) +{ + // if any of these assertions fail, it's likely because we + // switched away and tried to switch back to us. Early stages of + // switching are not reentrant because we re-use ``this->args()``. + // Switching away would happen if we trigger a garbage collection + // (by just using some Python APIs that happen to allocate Python + // objects) and some garbage had weakref callbacks or __del__ that + // switches (people don't write code like that by hand, but with + // gevent it's possible without realizing it) + assert(this->args() || PyErr_Occurred()); + { /* save state */ + if (this->thread_state()->is_current(this->self())) { + // Hmm, nothing to do. + // TODO: Does this bypass trace events that are + // important? + return switchstack_result_t(0, + this, this->thread_state()->borrow_current()); + } + BorrowedGreenlet current = this->thread_state()->borrow_current(); + PyThreadState* tstate = PyThreadState_GET(); + + current->python_state << tstate; + current->exception_state << tstate; + this->python_state.will_switch_from(tstate); + switching_thread_state = this; + current->expose_frames(); + } + assert(this->args() || PyErr_Occurred()); + // If this is the first switch into a greenlet, this will + // return twice, once with 1 in the new greenlet, once with 0 + // in the origin. + int err; + if (this->force_slp_switch_error()) { + err = -1; + } + else { + err = slp_switch(); + } + + if (err < 0) { /* error */ + // Tested by + // test_greenlet.TestBrokenGreenlets.test_failed_to_slp_switch_into_running + // + // It's not clear if it's worth trying to clean up and + // continue here. Failing to switch stacks is a big deal which + // may not be recoverable (who knows what state the stack is in). + // Also, we've stolen references in preparation for calling + // ``g_switchstack_success()`` and we don't have a clean + // mechanism for backing that all out. + Py_FatalError("greenlet: Failed low-level slp_switch(). The stack is probably corrupt."); + } + + // No stack-based variables are valid anymore. + + // But the global is volatile so we can reload it without the + // compiler caching it from earlier. + Greenlet* greenlet_that_switched_in = switching_thread_state; // aka this + switching_thread_state = nullptr; + // except that no stack variables are valid, we would: + // assert(this == greenlet_that_switched_in); + + // switchstack success is where we restore the exception state, + // etc. It returns the origin greenlet because its convenient. + + OwnedGreenlet origin = greenlet_that_switched_in->g_switchstack_success(); + assert(greenlet_that_switched_in->args() || PyErr_Occurred()); + return switchstack_result_t(err, greenlet_that_switched_in, origin); +} + + +inline void +Greenlet::check_switch_allowed() const +{ + // TODO: Make this take a parameter of the current greenlet, + // or current main greenlet, to make the check for + // cross-thread switching cheaper. Surely somewhere up the + // call stack we've already accessed the thread local variable. + + // We expect to always have a main greenlet now; accessing the thread state + // created it. However, if we get here and cleanup has already + // begun because we're a greenlet that was running in a + // (now dead) thread, these invariants will not hold true. In + // fact, accessing `this->thread_state` may not even be possible. + + // If the thread this greenlet was running in is dead, + // we'll still have a reference to a main greenlet, but the + // thread state pointer we have is bogus. + // TODO: Give the objects an API to determine if they belong + // to a dead thread. + + const BorrowedMainGreenlet main_greenlet = this->find_main_greenlet_in_lineage(); + + if (!main_greenlet) { + throw PyErrOccurred(mod_globs->PyExc_GreenletError, + "cannot switch to a garbage collected greenlet"); + } + + if (!main_greenlet->thread_state()) { + throw PyErrOccurred(mod_globs->PyExc_GreenletError, + "cannot switch to a different thread (which happens to have exited)"); + } + + // The main greenlet we found was from the .parent lineage. + // That may or may not have any relationship to the main + // greenlet of the running thread. We can't actually access + // our this->thread_state members to try to check that, + // because it could be in the process of getting destroyed, + // but setting the main_greenlet->thread_state member to NULL + // may not be visible yet. So we need to check against the + // current thread state (once the cheaper checks are out of + // the way) + const BorrowedMainGreenlet current_main_greenlet = GET_THREAD_STATE().state().borrow_main_greenlet(); + if ( + // lineage main greenlet is not this thread's greenlet + current_main_greenlet != main_greenlet + || ( + // atteched to some thread + this->main_greenlet() + // XXX: Same condition as above. Was this supposed to be + // this->main_greenlet()? + && current_main_greenlet != main_greenlet) + // switching into a known dead thread (XXX: which, if we get here, + // is bad, because we just accessed the thread state, which is + // gone!) + || (!current_main_greenlet->thread_state())) { + // CAUTION: This may trigger memory allocations, gc, and + // arbitrary Python code. + throw PyErrOccurred( + mod_globs->PyExc_GreenletError, + "Cannot switch to a different thread\n\tCurrent: %R\n\tExpected: %R", + current_main_greenlet, main_greenlet); + } +} + +const OwnedObject +Greenlet::context() const +{ + using greenlet::PythonStateContext; + OwnedObject result; + + if (this->is_currently_running_in_some_thread()) { + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + if (GET_THREAD_STATE().state().is_current(this->self())) { + result = PythonStateContext::context(PyThreadState_GET()); + } + else { + throw ValueError( + "cannot get context of a " + "greenlet that is running in a different thread"); + } + } + else { + /* Greenlet is not running: just return context. */ + result = this->python_state.context(); + } + if (!result) { + result = OwnedObject::None(); + } + return result; +} + + +void +Greenlet::context(BorrowedObject given) +{ + using greenlet::PythonStateContext; + if (!given) { + throw AttributeError("can't delete context attribute"); + } + if (given.is_None()) { + /* "Empty context" is stored as NULL, not None. */ + given = nullptr; + } + + //checks type, incrs refcnt + greenlet::refs::OwnedContext context(given); + PyThreadState* tstate = PyThreadState_GET(); + + if (this->is_currently_running_in_some_thread()) { + if (!GET_THREAD_STATE().state().is_current(this->self())) { + throw ValueError("cannot set context of a greenlet" + " that is running in a different thread"); + } + + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + OwnedObject octx = OwnedObject::consuming(PythonStateContext::context(tstate)); + PythonStateContext::context(tstate, context.relinquish_ownership()); + } + else { + /* Greenlet is not running: just set context. Note that the + greenlet may be dead.*/ + this->python_state.context() = context; + } +} + +/** + * CAUTION: May invoke arbitrary Python code. + * + * Figure out what the result of ``greenlet.switch(arg, kwargs)`` + * should be and transfers ownership of it to the left-hand-side. + * + * If switch() was just passed an arg tuple, then we'll just return that. + * If only keyword arguments were passed, then we'll pass the keyword + * argument dict. Otherwise, we'll create a tuple of (args, kwargs) and + * return both. + * + * CAUTION: This may allocate a new tuple object, which may + * cause the Python garbage collector to run, which in turn may + * run arbitrary Python code that switches. + */ +OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept +{ + // Because this may invoke arbitrary Python code, which could + // result in switching back to us, we need to get the + // arguments locally on the stack. + assert(rhs); + OwnedObject args = rhs.args(); + OwnedObject kwargs = rhs.kwargs(); + rhs.CLEAR(); + // We shouldn't be called twice for the same switch. + assert(args || kwargs); + assert(!rhs); + + if (!kwargs) { + lhs = args; + } + else if (!PyDict_Size(kwargs.borrow())) { + lhs = args; + } + else if (!PySequence_Length(args.borrow())) { + lhs = kwargs; + } + else { + // PyTuple_Pack allocates memory, may GC, may run arbitrary + // Python code. + lhs = OwnedObject::consuming(PyTuple_Pack(2, args.borrow(), kwargs.borrow())); + } + return lhs; +} + +static OwnedObject +g_handle_exit(const OwnedObject& greenlet_result) +{ + if (!greenlet_result && mod_globs->PyExc_GreenletExit.PyExceptionMatches()) { + /* catch and ignore GreenletExit */ + PyErrFetchParam val; + PyErr_Fetch(PyErrFetchParam(), val, PyErrFetchParam()); + if (!val) { + return OwnedObject::None(); + } + return OwnedObject(val); + } + + if (greenlet_result) { + // package the result into a 1-tuple + // PyTuple_Pack increments the reference of its arguments, + // so we always need to decref the greenlet result; + // the owner will do that. + return OwnedObject::consuming(PyTuple_Pack(1, greenlet_result.borrow())); + } + + return OwnedObject(); +} + + + +/** + * May run arbitrary Python code. + */ +OwnedObject +Greenlet::g_switch_finish(const switchstack_result_t& err) +{ + assert(err.the_new_current_greenlet == this); + + ThreadState& state = *this->thread_state(); + // Because calling the trace function could do arbitrary things, + // including switching away from this greenlet and then maybe + // switching back, we need to capture the arguments now so that + // they don't change. + OwnedObject result; + if (this->args()) { + result <<= this->args(); + } + else { + assert(PyErr_Occurred()); + } + assert(!this->args()); + try { + // Our only caller handles the bad error case + assert(err.status >= 0); + assert(state.borrow_current() == this->self()); + if (OwnedObject tracefunc = state.get_tracefunc()) { + assert(result || PyErr_Occurred()); + g_calltrace(tracefunc, + result ? mod_globs->event_switch : mod_globs->event_throw, + err.origin_greenlet, + this->self()); + } + // The above could have invoked arbitrary Python code, but + // it couldn't switch back to this object and *also* + // throw an exception, so the args won't have changed. + + if (PyErr_Occurred()) { + // We get here if we fell of the end of the run() function + // raising an exception. The switch itself was + // successful, but the function raised. + // valgrind reports that memory allocated here can still + // be reached after a test run. + throw PyErrOccurred::from_current(); + } + return result; + } + catch (const PyErrOccurred&) { + /* Turn switch errors into switch throws */ + /* Turn trace errors into switch throws */ + this->release_args(); + throw; + } +} + +void +Greenlet::g_calltrace(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const BorrowedGreenlet& origin, + const BorrowedGreenlet& target) +{ + PyErrPieces saved_exc; + try { + TracingGuard tracing_guard; + // TODO: We have saved the active exception (if any) that's + // about to be raised. In the 'throw' case, we could provide + // the exception to the tracefunction, which seems very helpful. + tracing_guard.CallTraceFunction(tracefunc, event, origin, target); + } + catch (const PyErrOccurred&) { + // In case of exceptions trace function is removed, + // and any existing exception is replaced with the tracing + // exception. + GET_THREAD_STATE().state().set_tracefunc(Py_None); + throw; + } + + saved_exc.PyErrRestore(); + assert( + (event == mod_globs->event_throw && PyErr_Occurred()) + || (event == mod_globs->event_switch && !PyErr_Occurred()) + ); +} + +void +Greenlet::murder_in_place() +{ + if (this->active()) { + assert(!this->is_currently_running_in_some_thread()); + this->deactivate_and_free(); + } +} + +inline void +Greenlet::deactivate_and_free() +{ + if (!this->active()) { + return; + } + // Throw away any saved stack. + this->stack_state = StackState(); + assert(!this->stack_state.active()); + // Throw away any Python references. + // We're holding a borrowed reference to the last + // frame we executed. Since we borrowed it, the + // normal traversal, clear, and dealloc functions + // ignore it, meaning it leaks. (The thread state + // object can't find it to clear it when that's + // deallocated either, because by definition if we + // got an object on this list, it wasn't + // running and the thread state doesn't have + // this frame.) + // So here, we *do* clear it. + this->python_state.tp_clear(true); +} + +bool +Greenlet::belongs_to_thread(const ThreadState* thread_state) const +{ + if (!this->thread_state() // not running anywhere, or thread + // exited + || !thread_state) { // same, or there is no thread state. + return false; + } + return true; +} + + +void +Greenlet::deallocing_greenlet_in_thread(const ThreadState* current_thread_state) +{ + /* Cannot raise an exception to kill the greenlet if + it is not running in the same thread! */ + if (this->belongs_to_thread(current_thread_state)) { + assert(current_thread_state); + // To get here it had to have run before + /* Send the greenlet a GreenletExit exception. */ + + // We don't care about the return value, only whether an + // exception happened. + this->throw_GreenletExit_during_dealloc(*current_thread_state); + return; + } + + // Not the same thread! Temporarily save the greenlet + // into its thread's deleteme list, *if* it exists. + // If that thread has already exited, and processed its pending + // cleanup, we'll never be able to clean everything up: we won't + // be able to raise an exception. + // That's mostly OK! Since we can't add it to a list, our refcount + // won't increase, and we'll go ahead with the DECREFs later. + + ThreadState *const thread_state = this->thread_state(); + if (thread_state) { + thread_state->delete_when_thread_running(this->self()); + } + else { + // The thread is dead, we can't raise an exception. + // We need to make it look non-active, though, so that dealloc + // finishes killing it. + this->deactivate_and_free(); + } + return; +} + + +int +Greenlet::tp_traverse(visitproc visit, void* arg) +{ + + int result; + if ((result = this->exception_state.tp_traverse(visit, arg)) != 0) { + return result; + } + //XXX: This is ugly. But so is handling everything having to do + //with the top frame. + bool visit_top_frame = this->was_running_in_dead_thread(); + // When true, the thread is dead. Our implicit weak reference to the + // frame is now all that's left; we consider ourselves to + // strongly own it now. + if ((result = this->python_state.tp_traverse(visit, arg, visit_top_frame)) != 0) { + return result; + } + return 0; +} + +int +Greenlet::tp_clear() +{ + bool own_top_frame = this->was_running_in_dead_thread(); + this->exception_state.tp_clear(); + this->python_state.tp_clear(own_top_frame); + return 0; +} + +bool Greenlet::is_currently_running_in_some_thread() const +{ + return this->stack_state.active() && !this->python_state.top_frame(); +} + +#if GREENLET_PY312 +void GREENLET_NOINLINE(Greenlet::expose_frames)() +{ + if (!this->python_state.top_frame()) { + return; + } + + _PyInterpreterFrame* last_complete_iframe = nullptr; + _PyInterpreterFrame* iframe = this->python_state.top_frame()->f_frame; + while (iframe) { + // We must make a copy before looking at the iframe contents, + // since iframe might point to a portion of the greenlet's C stack + // that was spilled when switching greenlets. + _PyInterpreterFrame iframe_copy; + this->stack_state.copy_from_stack(&iframe_copy, iframe, sizeof(*iframe)); + if (!_PyFrame_IsIncomplete(&iframe_copy)) { + // If the iframe were OWNED_BY_CSTACK then it would always be + // incomplete. Since it's not incomplete, it's not on the C stack + // and we can access it through the original `iframe` pointer + // directly. This is important since GetFrameObject might + // lazily _create_ the frame object and we don't want the + // interpreter to lose track of it. + assert(iframe_copy.owner != FRAME_OWNED_BY_CSTACK); + + // We really want to just write: + // PyFrameObject* frame = _PyFrame_GetFrameObject(iframe); + // but _PyFrame_GetFrameObject calls _PyFrame_MakeAndSetFrameObject + // which is not a visible symbol in libpython. The easiest + // way to get a public function to call it is using + // PyFrame_GetBack, which is defined as follows: + // assert(frame != NULL); + // assert(!_PyFrame_IsIncomplete(frame->f_frame)); + // PyFrameObject *back = frame->f_back; + // if (back == NULL) { + // _PyInterpreterFrame *prev = frame->f_frame->previous; + // prev = _PyFrame_GetFirstComplete(prev); + // if (prev) { + // back = _PyFrame_GetFrameObject(prev); + // } + // } + // return (PyFrameObject*)Py_XNewRef(back); + if (!iframe->frame_obj) { + PyFrameObject dummy_frame; + _PyInterpreterFrame dummy_iframe; + dummy_frame.f_back = nullptr; + dummy_frame.f_frame = &dummy_iframe; + // force the iframe to be considered complete without + // needing to check its code object: + dummy_iframe.owner = FRAME_OWNED_BY_GENERATOR; + dummy_iframe.previous = iframe; + assert(!_PyFrame_IsIncomplete(&dummy_iframe)); + // Drop the returned reference immediately; the iframe + // continues to hold a strong reference + Py_XDECREF(PyFrame_GetBack(&dummy_frame)); + assert(iframe->frame_obj); + } + + // This is a complete frame, so make the last one of those we saw + // point at it, bypassing any incomplete frames (which may have + // been on the C stack) in between the two. We're overwriting + // last_complete_iframe->previous and need that to be reversible, + // so we store the original previous ptr in the frame object + // (which we must have created on a previous iteration through + // this loop). The frame object has a bunch of storage that is + // only used when its iframe is OWNED_BY_FRAME_OBJECT, which only + // occurs when the frame object outlives the frame's execution, + // which can't have happened yet because the frame is currently + // executing as far as the interpreter is concerned. So, we can + // reuse it for our own purposes. + assert(iframe->owner == FRAME_OWNED_BY_THREAD + || iframe->owner == FRAME_OWNED_BY_GENERATOR); + if (last_complete_iframe) { + assert(last_complete_iframe->frame_obj); + memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], + &last_complete_iframe->previous, sizeof(void *)); + last_complete_iframe->previous = iframe; + } + last_complete_iframe = iframe; + } + // Frames that are OWNED_BY_FRAME_OBJECT are linked via the + // frame's f_back while all others are linked via the iframe's + // previous ptr. Since all the frames we traverse are running + // as far as the interpreter is concerned, we don't have to + // worry about the OWNED_BY_FRAME_OBJECT case. + iframe = iframe_copy.previous; + } + + // Give the outermost complete iframe a null previous pointer to + // account for any potential incomplete/C-stack iframes between it + // and the actual top-of-stack + if (last_complete_iframe) { + assert(last_complete_iframe->frame_obj); + memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], + &last_complete_iframe->previous, sizeof(void *)); + last_complete_iframe->previous = nullptr; + } +} +#else +void Greenlet::expose_frames() +{ + +} +#endif + +}; // namespace greenlet +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/TGreenlet.hpp b/venv/lib/python3.12/site-packages/greenlet/TGreenlet.hpp new file mode 100644 index 0000000..e152353 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TGreenlet.hpp @@ -0,0 +1,830 @@ +#ifndef GREENLET_GREENLET_HPP +#define GREENLET_GREENLET_HPP +/* + * Declarations of the core data structures. +*/ + +#define PY_SSIZE_T_CLEAN +#include + +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_allocator.hpp" + +using greenlet::refs::OwnedObject; +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::OwnedMainGreenlet; +using greenlet::refs::BorrowedGreenlet; + +#if PY_VERSION_HEX < 0x30B00A6 +# define _PyCFrame CFrame +# define _PyInterpreterFrame _interpreter_frame +#endif + +#if GREENLET_PY312 +# define Py_BUILD_CORE +# include "internal/pycore_frame.h" +#endif + +#if GREENLET_PY314 +# include "internal/pycore_interpframe_structs.h" +#if defined(_MSC_VER) || defined(__MINGW64__) +# include "greenlet_msvc_compat.hpp" +#else +# include "internal/pycore_interpframe.h" +#endif +#endif + +// XXX: TODO: Work to remove all virtual functions +// for speed of calling and size of objects (no vtable). +// One pattern is the Curiously Recurring Template +namespace greenlet +{ + class ExceptionState + { + private: + G_NO_COPIES_OF_CLS(ExceptionState); + + // Even though these are borrowed objects, we actually own + // them, when they're not null. + // XXX: Express that in the API. + private: + _PyErr_StackItem* exc_info; + _PyErr_StackItem exc_state; + public: + ExceptionState(); + void operator<<(const PyThreadState *const tstate) noexcept; + void operator>>(PyThreadState* tstate) noexcept; + void clear() noexcept; + + int tp_traverse(visitproc visit, void* arg) noexcept; + void tp_clear() noexcept; + }; + + template + void operator<<(const PyThreadState *const tstate, T& exc); + + class PythonStateContext + { + protected: + greenlet::refs::OwnedContext _context; + public: + inline const greenlet::refs::OwnedContext& context() const + { + return this->_context; + } + inline greenlet::refs::OwnedContext& context() + { + return this->_context; + } + + inline void tp_clear() + { + this->_context.CLEAR(); + } + + template + inline static PyObject* context(T* tstate) + { + return tstate->context; + } + + template + inline static void context(T* tstate, PyObject* new_context) + { + tstate->context = new_context; + tstate->context_ver++; + } + }; + class SwitchingArgs; + class PythonState : public PythonStateContext + { + public: + typedef greenlet::refs::OwnedReference OwnedFrame; + private: + G_NO_COPIES_OF_CLS(PythonState); + // We own this if we're suspended (although currently we don't + // tp_traverse into it; that's a TODO). If we're running, it's + // empty. If we get deallocated and *still* have a frame, it + // won't be reachable from the place that normally decref's + // it, so we need to do it (hence owning it). + OwnedFrame _top_frame; +#if GREENLET_USE_CFRAME + _PyCFrame* cframe; + int use_tracing; +#endif +#if GREENLET_PY314 + int py_recursion_depth; + // I think this is only used by the JIT. At least, + // we only got errors not switching it when the JIT was enabled. + // Python/generated_cases.c.h:12469: _PyEval_EvalFrameDefault: + // Assertion `tstate->current_executor == NULL' failed. + // see https://github.com/python-greenlet/greenlet/issues/460 + PyObject* current_executor; +#elif GREENLET_PY312 + int py_recursion_depth; + int c_recursion_depth; +#else + int recursion_depth; +#endif +#if GREENLET_PY313 + PyObject *delete_later; +#else + int trash_delete_nesting; +#endif +#if GREENLET_PY311 + _PyInterpreterFrame* current_frame; + _PyStackChunk* datastack_chunk; + PyObject** datastack_top; + PyObject** datastack_limit; +#endif + // The PyInterpreterFrame list on 3.12+ contains some entries that are + // on the C stack, which can't be directly accessed while a greenlet is + // suspended. In order to keep greenlet gr_frame introspection working, + // we adjust stack switching to rewrite the interpreter frame list + // to skip these C-stack frames; we call this "exposing" the greenlet's + // frames because it makes them valid to work with in Python. Then when + // the greenlet is resumed we need to remember to reverse the operation + // we did. The C-stack frames are "entry frames" which are a low-level + // interpreter detail; they're not needed for introspection, but do + // need to be present for the eval loop to work. + void unexpose_frames(); + + public: + + PythonState(); + // You can use this for testing whether we have a frame + // or not. It returns const so they can't modify it. + const OwnedFrame& top_frame() const noexcept; + + inline void operator<<(const PyThreadState *const tstate) noexcept; + inline void operator>>(PyThreadState* tstate) noexcept; + void clear() noexcept; + + int tp_traverse(visitproc visit, void* arg, bool visit_top_frame) noexcept; + void tp_clear(bool own_top_frame) noexcept; + void set_initial_state(const PyThreadState* const tstate) noexcept; +#if GREENLET_USE_CFRAME + void set_new_cframe(_PyCFrame& frame) noexcept; +#endif + + void may_switch_away() noexcept; + inline void will_switch_from(PyThreadState *const origin_tstate) noexcept; + void did_finish(PyThreadState* tstate) noexcept; + }; + + class StackState + { + // By having only plain C (POD) members, no virtual functions + // or bases, we get a trivial assignment operator generated + // for us. However, that's not safe since we do manage memory. + // So we declare an assignment operator that only works if we + // don't have any memory allocated. (We don't use + // std::shared_ptr for reference counting just to keep this + // object small) + private: + char* _stack_start; + char* stack_stop; + char* stack_copy; + intptr_t _stack_saved; + StackState* stack_prev; + inline int copy_stack_to_heap_up_to(const char* const stop) noexcept; + inline void free_stack_copy() noexcept; + + public: + /** + * Creates a started, but inactive, state, using *current* + * as the previous. + */ + StackState(void* mark, StackState& current); + /** + * Creates an inactive, unstarted, state. + */ + StackState(); + ~StackState(); + StackState(const StackState& other); + StackState& operator=(const StackState& other); + inline void copy_heap_to_stack(const StackState& current) noexcept; + inline int copy_stack_to_heap(char* const stackref, const StackState& current) noexcept; + inline bool started() const noexcept; + inline bool main() const noexcept; + inline bool active() const noexcept; + inline void set_active() noexcept; + inline void set_inactive() noexcept; + inline intptr_t stack_saved() const noexcept; + inline char* stack_start() const noexcept; + static inline StackState make_main() noexcept; +#ifdef GREENLET_USE_STDIO + friend std::ostream& operator<<(std::ostream& os, const StackState& s); +#endif + + // Fill in [dest, dest + n) with the values that would be at + // [src, src + n) while this greenlet is running. This is like memcpy + // except that if the greenlet is suspended it accounts for the portion + // of the greenlet's stack that was spilled to the heap. `src` may + // be on this greenlet's stack, or on the heap, but not on a different + // greenlet's stack. + void copy_from_stack(void* dest, const void* src, size_t n) const; + }; +#ifdef GREENLET_USE_STDIO + std::ostream& operator<<(std::ostream& os, const StackState& s); +#endif + + class SwitchingArgs + { + private: + G_NO_ASSIGNMENT_OF_CLS(SwitchingArgs); + // If args and kwargs are both false (NULL), this is a *throw*, not a + // switch. PyErr_... must have been called already. + OwnedObject _args; + OwnedObject _kwargs; + public: + + SwitchingArgs() + {} + + SwitchingArgs(const OwnedObject& args, const OwnedObject& kwargs) + : _args(args), + _kwargs(kwargs) + {} + + SwitchingArgs(const SwitchingArgs& other) + : _args(other._args), + _kwargs(other._kwargs) + {} + + const OwnedObject& args() + { + return this->_args; + } + + const OwnedObject& kwargs() + { + return this->_kwargs; + } + + /** + * Moves ownership from the argument to this object. + */ + SwitchingArgs& operator<<=(SwitchingArgs& other) + { + if (this != &other) { + this->_args = other._args; + this->_kwargs = other._kwargs; + other.CLEAR(); + } + return *this; + } + + /** + * Acquires ownership of the argument (consumes the reference). + */ + SwitchingArgs& operator<<=(PyObject* args) + { + this->_args = OwnedObject::consuming(args); + this->_kwargs.CLEAR(); + return *this; + } + + /** + * Acquires ownership of the argument. + * + * Sets the args to be the given value; clears the kwargs. + */ + SwitchingArgs& operator<<=(OwnedObject& args) + { + assert(&args != &this->_args); + this->_args = args; + this->_kwargs.CLEAR(); + args.CLEAR(); + + return *this; + } + + explicit operator bool() const noexcept + { + return this->_args || this->_kwargs; + } + + inline void CLEAR() + { + this->_args.CLEAR(); + this->_kwargs.CLEAR(); + } + + const std::string as_str() const noexcept + { + return PyUnicode_AsUTF8( + OwnedObject::consuming( + PyUnicode_FromFormat( + "SwitchingArgs(args=%R, kwargs=%R)", + this->_args.borrow(), + this->_kwargs.borrow() + ) + ).borrow() + ); + } + }; + + class ThreadState; + + class UserGreenlet; + class MainGreenlet; + + class Greenlet + { + private: + G_NO_COPIES_OF_CLS(Greenlet); + PyGreenlet* const _self; + private: + // XXX: Work to remove these. + friend class ThreadState; + friend class UserGreenlet; + friend class MainGreenlet; + protected: + ExceptionState exception_state; + SwitchingArgs switch_args; + StackState stack_state; + PythonState python_state; + Greenlet(PyGreenlet* p, const StackState& initial_state); + public: + // This constructor takes ownership of the PyGreenlet, by + // setting ``p->pimpl = this;``. + Greenlet(PyGreenlet* p); + virtual ~Greenlet(); + + const OwnedObject context() const; + + // You MUST call this _very_ early in the switching process to + // prepare anything that may need prepared. This might perform + // garbage collections or otherwise run arbitrary Python code. + // + // One specific use of it is for Python 3.11+, preventing + // running arbitrary code at unsafe times. See + // PythonState::may_switch_away(). + inline void may_switch_away() + { + this->python_state.may_switch_away(); + } + + inline void context(refs::BorrowedObject new_context); + + inline SwitchingArgs& args() + { + return this->switch_args; + } + + virtual const refs::BorrowedMainGreenlet main_greenlet() const = 0; + + inline intptr_t stack_saved() const noexcept + { + return this->stack_state.stack_saved(); + } + + // This is used by the macro SLP_SAVE_STATE to compute the + // difference in stack sizes. It might be nice to handle the + // computation ourself, but the type of the result + // varies by platform, so doing it in the macro is the + // simplest way. + inline const char* stack_start() const noexcept + { + return this->stack_state.stack_start(); + } + + virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); + virtual OwnedObject g_switch() = 0; + /** + * Force the greenlet to appear dead. Used when it's not + * possible to throw an exception into a greenlet anymore. + * + * This losses access to the thread state and the main greenlet. + */ + virtual void murder_in_place(); + + /** + * Called when somebody notices we were running in a dead + * thread to allow cleaning up resources (because we can't + * raise GreenletExit into it anymore). + * This is very similar to ``murder_in_place()``, except that + * it DOES NOT lose the main greenlet or thread state. + */ + inline void deactivate_and_free(); + + + // Called when some thread wants to deallocate a greenlet + // object. + // The thread may or may not be the same thread the greenlet + // was running in. + // The thread state will be null if the thread the greenlet + // was running in was known to have exited. + void deallocing_greenlet_in_thread(const ThreadState* current_state); + + // Must be called on 3.12+ before exposing a suspended greenlet's + // frames to user code. This rewrites the linked list of interpreter + // frames to skip the ones that are being stored on the C stack (which + // can't be safely accessed while the greenlet is suspended because + // that stack space might be hosting a different greenlet), and + // sets PythonState::frames_were_exposed so we remember to restore + // the original list before resuming the greenlet. The C-stack frames + // are a low-level interpreter implementation detail; while they're + // important to the bytecode eval loop, they're superfluous for + // introspection purposes. + void expose_frames(); + + + // TODO: Figure out how to make these non-public. + inline void slp_restore_state() noexcept; + inline int slp_save_state(char *const stackref) noexcept; + + inline bool is_currently_running_in_some_thread() const; + virtual bool belongs_to_thread(const ThreadState* state) const; + + inline bool started() const + { + return this->stack_state.started(); + } + inline bool active() const + { + return this->stack_state.active(); + } + inline bool main() const + { + return this->stack_state.main(); + } + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const = 0; + + virtual const OwnedGreenlet parent() const = 0; + virtual void parent(const refs::BorrowedObject new_parent) = 0; + + inline const PythonState::OwnedFrame& top_frame() + { + return this->python_state.top_frame(); + } + + virtual const OwnedObject& run() const = 0; + virtual void run(const refs::BorrowedObject nrun) = 0; + + + virtual int tp_traverse(visitproc visit, void* arg); + virtual int tp_clear(); + + + // Return the thread state that the greenlet is running in, or + // null if the greenlet is not running or the thread is known + // to have exited. + virtual ThreadState* thread_state() const noexcept = 0; + + // Return true if the greenlet is known to have been running + // (active) in a thread that has now exited. + virtual bool was_running_in_dead_thread() const noexcept = 0; + + // Return a borrowed greenlet that is the Python object + // this object represents. + inline BorrowedGreenlet self() const noexcept + { + return BorrowedGreenlet(this->_self); + } + + // For testing. If this returns true, we should pretend that + // slp_switch() failed. + virtual bool force_slp_switch_error() const noexcept; + + protected: + inline void release_args(); + + // The functions that must not be inlined are declared virtual. + // We also mark them as protected, not private, so that the + // compiler is forced to call them through a function pointer. + // (A sufficiently smart compiler could directly call a private + // virtual function since it can never be overridden in a + // subclass). + + // Also TODO: Switch away from integer error codes and to enums, + // or throw exceptions when possible. + struct switchstack_result_t + { + int status; + Greenlet* the_new_current_greenlet; + OwnedGreenlet origin_greenlet; + + switchstack_result_t() + : status(0), + the_new_current_greenlet(nullptr) + {} + + switchstack_result_t(int err) + : status(err), + the_new_current_greenlet(nullptr) + {} + + switchstack_result_t(int err, Greenlet* state, OwnedGreenlet& origin) + : status(err), + the_new_current_greenlet(state), + origin_greenlet(origin) + { + } + + switchstack_result_t(int err, Greenlet* state, const BorrowedGreenlet& origin) + : status(err), + the_new_current_greenlet(state), + origin_greenlet(origin) + { + } + + switchstack_result_t(const switchstack_result_t& other) + : status(other.status), + the_new_current_greenlet(other.the_new_current_greenlet), + origin_greenlet(other.origin_greenlet) + {} + + switchstack_result_t& operator=(const switchstack_result_t& other) + { + this->status = other.status; + this->the_new_current_greenlet = other.the_new_current_greenlet; + this->origin_greenlet = other.origin_greenlet; + return *this; + } + }; + + OwnedObject on_switchstack_or_initialstub_failure( + Greenlet* target, + const switchstack_result_t& err, + const bool target_was_me=false, + const bool was_initial_stub=false); + + // Returns the previous greenlet we just switched away from. + virtual OwnedGreenlet g_switchstack_success() noexcept; + + + // Check the preconditions for switching to this greenlet; if they + // aren't met, throws PyErrOccurred. Most callers will want to + // catch this and clear the arguments + inline void check_switch_allowed() const; + class GreenletStartedWhileInPython : public std::runtime_error + { + public: + GreenletStartedWhileInPython() : std::runtime_error("") + {} + }; + + protected: + + + /** + Perform a stack switch into this greenlet. + + This temporarily sets the global variable + ``switching_thread_state`` to this greenlet; as soon as the + call to ``slp_switch`` completes, this is reset to NULL. + Consequently, this depends on the GIL. + + TODO: Adopt the stackman model and pass ``slp_switch`` a + callback function and context pointer; this eliminates the + need for global variables altogether. + + Because the stack switch happens in this function, this + function can't use its own stack (local) variables, set + before the switch, and then accessed after the switch. + + Further, you con't even access ``g_thread_state_global`` + before and after the switch from the global variable. + Because it is thread local some compilers cache it in a + register/on the stack, notably new versions of MSVC; this + breaks with strange crashes sometime later, because writing + to anything in ``g_thread_state_global`` after the switch + is actually writing to random memory. For this reason, we + call a non-inlined function to finish the operation. (XXX: + The ``/GT`` MSVC compiler argument probably fixes that.) + + It is very important that stack switch is 'atomic', i.e. no + calls into other Python code allowed (except very few that + are safe), because global variables are very fragile. (This + should no longer be the case with thread-local variables.) + + */ + // Made virtual to facilitate subclassing UserGreenlet for testing. + virtual switchstack_result_t g_switchstack(void); + +class TracingGuard +{ +private: + PyThreadState* tstate; +public: + TracingGuard() + : tstate(PyThreadState_GET()) + { + PyThreadState_EnterTracing(this->tstate); + } + + ~TracingGuard() + { + PyThreadState_LeaveTracing(this->tstate); + this->tstate = nullptr; + } + + inline void CallTraceFunction(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const BorrowedGreenlet& origin, + const BorrowedGreenlet& target) + { + // TODO: This calls tracefunc(event, (origin, target)). Add a shortcut + // function for that that's specialized to avoid the Py_BuildValue + // string parsing, or start with just using "ON" format with PyTuple_Pack(2, + // origin, target). That seems like what the N format is meant + // for. + // XXX: Why does event not automatically cast back to a PyObject? + // It tries to call the "deleted constructor ImmortalEventName + // const" instead. + assert(tracefunc); + assert(event); + assert(origin); + assert(target); + greenlet::refs::NewReference retval( + PyObject_CallFunction( + tracefunc.borrow(), + "O(OO)", + event.borrow(), + origin.borrow(), + target.borrow() + )); + if (!retval) { + throw PyErrOccurred::from_current(); + } + } +}; + + static void + g_calltrace(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const greenlet::refs::BorrowedGreenlet& origin, + const BorrowedGreenlet& target); + private: + OwnedObject g_switch_finish(const switchstack_result_t& err); + + }; + + class UserGreenlet : public Greenlet + { + private: + static greenlet::PythonAllocator allocator; + OwnedMainGreenlet _main_greenlet; + OwnedObject _run_callable; + OwnedGreenlet _parent; + public: + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + + UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent); + virtual ~UserGreenlet(); + + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; + virtual bool was_running_in_dead_thread() const noexcept; + virtual ThreadState* thread_state() const noexcept; + virtual OwnedObject g_switch(); + virtual const OwnedObject& run() const + { + if (this->started() || !this->_run_callable) { + throw AttributeError("run"); + } + return this->_run_callable; + } + virtual void run(const refs::BorrowedObject nrun); + + virtual const OwnedGreenlet parent() const; + virtual void parent(const refs::BorrowedObject new_parent); + + virtual const refs::BorrowedMainGreenlet main_greenlet() const; + + virtual void murder_in_place(); + virtual bool belongs_to_thread(const ThreadState* state) const; + virtual int tp_traverse(visitproc visit, void* arg); + virtual int tp_clear(); + class ParentIsCurrentGuard + { + private: + OwnedGreenlet oldparent; + UserGreenlet* greenlet; + G_NO_COPIES_OF_CLS(ParentIsCurrentGuard); + public: + ParentIsCurrentGuard(UserGreenlet* p, const ThreadState& thread_state); + ~ParentIsCurrentGuard(); + }; + virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); + protected: + virtual switchstack_result_t g_initialstub(void* mark); + private: + // This function isn't meant to return. + // This accepts raw pointers and the ownership of them at the + // same time. The caller should use ``inner_bootstrap(origin.relinquish_ownership())``. + void inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run); + }; + + class BrokenGreenlet : public UserGreenlet + { + private: + static greenlet::PythonAllocator allocator; + public: + bool _force_switch_error = false; + bool _force_slp_switch_error = false; + + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + BrokenGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) + : UserGreenlet(p, the_parent) + {} + virtual ~BrokenGreenlet() + {} + + virtual switchstack_result_t g_switchstack(void); + virtual bool force_slp_switch_error() const noexcept; + + }; + + class MainGreenlet : public Greenlet + { + private: + static greenlet::PythonAllocator allocator; + refs::BorrowedMainGreenlet _self; + ThreadState* _thread_state; + G_NO_COPIES_OF_CLS(MainGreenlet); + public: + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + + MainGreenlet(refs::BorrowedMainGreenlet::PyType*, ThreadState*); + virtual ~MainGreenlet(); + + + virtual const OwnedObject& run() const; + virtual void run(const refs::BorrowedObject nrun); + + virtual const OwnedGreenlet parent() const; + virtual void parent(const refs::BorrowedObject new_parent); + + virtual const refs::BorrowedMainGreenlet main_greenlet() const; + + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; + virtual bool was_running_in_dead_thread() const noexcept; + virtual ThreadState* thread_state() const noexcept; + void thread_state(ThreadState*) noexcept; + virtual OwnedObject g_switch(); + virtual int tp_traverse(visitproc visit, void* arg); + }; + + // Instantiate one on the stack to save the GC state, + // and then disable GC. When it goes out of scope, GC will be + // restored to its original state. Sadly, these APIs are only + // available on 3.10+; luckily, we only need them on 3.11+. +#if GREENLET_PY310 + class GCDisabledGuard + { + private: + int was_enabled = 0; + public: + GCDisabledGuard() + : was_enabled(PyGC_IsEnabled()) + { + PyGC_Disable(); + } + + ~GCDisabledGuard() + { + if (this->was_enabled) { + PyGC_Enable(); + } + } + }; +#endif + + OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept; + + //TODO: Greenlet::g_switch() should call this automatically on its + //return value. As it is, the module code is calling it. + static inline OwnedObject + single_result(const OwnedObject& results) + { + if (results + && PyTuple_Check(results.borrow()) + && PyTuple_GET_SIZE(results.borrow()) == 1) { + PyObject* result = PyTuple_GET_ITEM(results.borrow(), 0); + assert(result); + return OwnedObject::owning(result); + } + return results; + } + + + static OwnedObject + g_handle_exit(const OwnedObject& greenlet_result); + + + template + void operator<<(const PyThreadState *const lhs, T& rhs) + { + rhs.operator<<(lhs); + } + +} // namespace greenlet ; + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/TGreenletGlobals.cpp b/venv/lib/python3.12/site-packages/greenlet/TGreenletGlobals.cpp new file mode 100644 index 0000000..0087d2f --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TGreenletGlobals.cpp @@ -0,0 +1,94 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of GreenletGlobals. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_GREENLET_GLOBALS +#define T_GREENLET_GLOBALS + +#include "greenlet_refs.hpp" +#include "greenlet_exceptions.hpp" +#include "greenlet_thread_support.hpp" +#include "greenlet_internal.hpp" + +namespace greenlet { + +// This encapsulates what were previously module global "constants" +// established at init time. +// This is a step towards Python3 style module state that allows +// reloading. +// +// In an earlier iteration of this code, we used placement new to be +// able to allocate this object statically still, so that references +// to its members don't incur an extra pointer indirection. +// But under some scenarios, that could result in crashes at +// shutdown because apparently the destructor was getting run twice? +class GreenletGlobals +{ + +public: + const greenlet::refs::ImmortalEventName event_switch; + const greenlet::refs::ImmortalEventName event_throw; + const greenlet::refs::ImmortalException PyExc_GreenletError; + const greenlet::refs::ImmortalException PyExc_GreenletExit; + const greenlet::refs::ImmortalObject empty_tuple; + const greenlet::refs::ImmortalObject empty_dict; + const greenlet::refs::ImmortalString str_run; + Mutex* const thread_states_to_destroy_lock; + greenlet::cleanup_queue_t thread_states_to_destroy; + + GreenletGlobals() : + event_switch("switch"), + event_throw("throw"), + PyExc_GreenletError("greenlet.error"), + PyExc_GreenletExit("greenlet.GreenletExit", PyExc_BaseException), + empty_tuple(Require(PyTuple_New(0))), + empty_dict(Require(PyDict_New())), + str_run("run"), + thread_states_to_destroy_lock(new Mutex()) + {} + + ~GreenletGlobals() + { + // This object is (currently) effectively immortal, and not + // just because of those placement new tricks; if we try to + // deallocate the static object we allocated, and overwrote, + // we would be doing so at C++ teardown time, which is after + // the final Python GIL is released, and we can't use the API + // then. + // (The members will still be destructed, but they also don't + // do any deallocation.) + } + + void queue_to_destroy(ThreadState* ts) const + { + // we're currently accessed through a static const object, + // implicitly marking our members as const, so code can't just + // call push_back (or pop_back) without casting away the + // const. + // + // Do that for callers. + greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); + q.push_back(ts); + } + + ThreadState* take_next_to_destroy() const + { + greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); + ThreadState* result = q.back(); + q.pop_back(); + return result; + } +}; + +}; // namespace greenlet + +static const greenlet::GreenletGlobals* mod_globs; + +#endif // T_GREENLET_GLOBALS diff --git a/venv/lib/python3.12/site-packages/greenlet/TMainGreenlet.cpp b/venv/lib/python3.12/site-packages/greenlet/TMainGreenlet.cpp new file mode 100644 index 0000000..a2a9cfe --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TMainGreenlet.cpp @@ -0,0 +1,153 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::MainGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_MAIN_GREENLET_CPP +#define T_MAIN_GREENLET_CPP + +#include "TGreenlet.hpp" + + + +// Protected by the GIL. Incremented when we create a main greenlet, +// in a new thread, decremented when it is destroyed. +static Py_ssize_t G_TOTAL_MAIN_GREENLETS; + +namespace greenlet { +greenlet::PythonAllocator MainGreenlet::allocator; + +void* MainGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void MainGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + + +MainGreenlet::MainGreenlet(PyGreenlet* p, ThreadState* state) + : Greenlet(p, StackState::make_main()), + _self(p), + _thread_state(state) +{ + G_TOTAL_MAIN_GREENLETS++; +} + +MainGreenlet::~MainGreenlet() +{ + G_TOTAL_MAIN_GREENLETS--; + this->tp_clear(); +} + +ThreadState* +MainGreenlet::thread_state() const noexcept +{ + return this->_thread_state; +} + +void +MainGreenlet::thread_state(ThreadState* t) noexcept +{ + assert(!t); + this->_thread_state = t; +} + + +const BorrowedMainGreenlet +MainGreenlet::main_greenlet() const +{ + return this->_self; +} + +BorrowedMainGreenlet +MainGreenlet::find_main_greenlet_in_lineage() const +{ + return BorrowedMainGreenlet(this->_self); +} + +bool +MainGreenlet::was_running_in_dead_thread() const noexcept +{ + return !this->_thread_state; +} + +OwnedObject +MainGreenlet::g_switch() +{ + try { + this->check_switch_allowed(); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + + switchstack_result_t err = this->g_switchstack(); + if (err.status < 0) { + // XXX: This code path is untested, but it is shared + // with the UserGreenlet path that is tested. + return this->on_switchstack_or_initialstub_failure( + this, + err, + true, // target was me + false // was initial stub + ); + } + + return err.the_new_current_greenlet->g_switch_finish(err); +} + +int +MainGreenlet::tp_traverse(visitproc visit, void* arg) +{ + if (this->_thread_state) { + // we've already traversed main, (self), don't do it again. + int result = this->_thread_state->tp_traverse(visit, arg, false); + if (result) { + return result; + } + } + return Greenlet::tp_traverse(visit, arg); +} + +const OwnedObject& +MainGreenlet::run() const +{ + throw AttributeError("Main greenlets do not have a run attribute."); +} + +void +MainGreenlet::run(const BorrowedObject UNUSED(nrun)) +{ + throw AttributeError("Main greenlets do not have a run attribute."); +} + +void +MainGreenlet::parent(const BorrowedObject raw_new_parent) +{ + if (!raw_new_parent) { + throw AttributeError("can't delete attribute"); + } + throw AttributeError("cannot set the parent of a main greenlet"); +} + +const OwnedGreenlet +MainGreenlet::parent() const +{ + return OwnedGreenlet(); // null becomes None +} + +}; // namespace greenlet + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/TPythonState.cpp b/venv/lib/python3.12/site-packages/greenlet/TPythonState.cpp new file mode 100644 index 0000000..8833a80 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TPythonState.cpp @@ -0,0 +1,406 @@ +#ifndef GREENLET_PYTHON_STATE_CPP +#define GREENLET_PYTHON_STATE_CPP + +#include +#include "TGreenlet.hpp" + +namespace greenlet { + +PythonState::PythonState() + : _top_frame() +#if GREENLET_USE_CFRAME + ,cframe(nullptr) + ,use_tracing(0) +#endif +#if GREENLET_PY314 + ,py_recursion_depth(0) + ,current_executor(nullptr) +#elif GREENLET_PY312 + ,py_recursion_depth(0) + ,c_recursion_depth(0) +#else + ,recursion_depth(0) +#endif +#if GREENLET_PY313 + ,delete_later(nullptr) +#else + ,trash_delete_nesting(0) +#endif +#if GREENLET_PY311 + ,current_frame(nullptr) + ,datastack_chunk(nullptr) + ,datastack_top(nullptr) + ,datastack_limit(nullptr) +#endif +{ +#if GREENLET_USE_CFRAME + /* + The PyThreadState->cframe pointer usually points to memory on + the stack, alloceted in a call into PyEval_EvalFrameDefault. + + Initially, before any evaluation begins, it points to the + initial PyThreadState object's ``root_cframe`` object, which is + statically allocated for the lifetime of the thread. + + A greenlet can last for longer than a call to + PyEval_EvalFrameDefault, so we can't set its ``cframe`` pointer + to be the current ``PyThreadState->cframe``; nor could we use + one from the greenlet parent for the same reason. Yet a further + no: we can't allocate one scoped to the greenlet and then + destroy it when the greenlet is deallocated, because inside the + interpreter the _PyCFrame objects form a linked list, and that too + can result in accessing memory beyond its dynamic lifetime (if + the greenlet doesn't actually finish before it dies, its entry + could still be in the list). + + Using the ``root_cframe`` is problematic, though, because its + members are never modified by the interpreter and are set to 0, + meaning that its ``use_tracing`` flag is never updated. We don't + want to modify that value in the ``root_cframe`` ourself: it + *shouldn't* matter much because we should probably never get + back to the point where that's the only cframe on the stack; + even if it did matter, the major consequence of an incorrect + value for ``use_tracing`` is that if its true the interpreter + does some extra work --- however, it's just good code hygiene. + + Our solution: before a greenlet runs, after its initial + creation, it uses the ``root_cframe`` just to have something to + put there. However, once the greenlet is actually switched to + for the first time, ``g_initialstub`` (which doesn't actually + "return" while the greenlet is running) stores a new _PyCFrame on + its local stack, and copies the appropriate values from the + currently running _PyCFrame; this is then made the _PyCFrame for the + newly-minted greenlet. ``g_initialstub`` then proceeds to call + ``glet.run()``, which results in ``PyEval_...`` adding the + _PyCFrame to the list. Switches continue as normal. Finally, when + the greenlet finishes, the call to ``glet.run()`` returns and + the _PyCFrame is taken out of the linked list and the stack value + is now unused and free to expire. + + XXX: I think we can do better. If we're deallocing in the same + thread, can't we traverse the list and unlink our frame? + Can we just keep a reference to the thread state in case we + dealloc in another thread? (Is that even possible if we're still + running and haven't returned from g_initialstub?) + */ + this->cframe = &PyThreadState_GET()->root_cframe; +#endif +} + + +inline void PythonState::may_switch_away() noexcept +{ +#if GREENLET_PY311 + // PyThreadState_GetFrame is probably going to have to allocate a + // new frame object. That may trigger garbage collection. Because + // we call this during the early phases of a switch (it doesn't + // matter to which greenlet, as this has a global effect), if a GC + // triggers a switch away, two things can happen, both bad: + // - We might not get switched back to, halting forward progress. + // this is pathological, but possible. + // - We might get switched back to with a different set of + // arguments or a throw instead of a switch. That would corrupt + // our state (specifically, PyErr_Occurred() and this->args() + // would no longer agree). + // + // Thus, when we call this API, we need to have GC disabled. + // This method serves as a bottleneck we call when maybe beginning + // a switch. In this way, it is always safe -- no risk of GC -- to + // use ``_GetFrame()`` whenever we need to, just as it was in + // <=3.10 (because subsequent calls will be cached and not + // allocate memory). + + GCDisabledGuard no_gc; + Py_XDECREF(PyThreadState_GetFrame(PyThreadState_GET())); +#endif +} + +void PythonState::operator<<(const PyThreadState *const tstate) noexcept +{ + this->_context.steal(tstate->context); +#if GREENLET_USE_CFRAME + /* + IMPORTANT: ``cframe`` is a pointer into the STACK. Thus, because + the call to ``slp_switch()`` changes the contents of the stack, + you cannot read from ``ts_current->cframe`` after that call and + necessarily get the same values you get from reading it here. + Anything you need to restore from now to then must be saved in a + global/threadlocal variable (because we can't use stack + variables here either). For things that need to persist across + the switch, use `will_switch_from`. + */ + this->cframe = tstate->cframe; + #if !GREENLET_PY312 + this->use_tracing = tstate->cframe->use_tracing; + #endif +#endif // GREENLET_USE_CFRAME +#if GREENLET_PY311 + #if GREENLET_PY314 + this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; + this->current_executor = tstate->current_executor; + #elif GREENLET_PY312 + this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; + this->c_recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; + #else // not 312 + this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; + #endif // GREENLET_PY312 + #if GREENLET_PY313 + this->current_frame = tstate->current_frame; + #elif GREENLET_USE_CFRAME + this->current_frame = tstate->cframe->current_frame; + #endif + this->datastack_chunk = tstate->datastack_chunk; + this->datastack_top = tstate->datastack_top; + this->datastack_limit = tstate->datastack_limit; + + PyFrameObject *frame = PyThreadState_GetFrame((PyThreadState *)tstate); + Py_XDECREF(frame); // PyThreadState_GetFrame gives us a new + // reference. + this->_top_frame.steal(frame); + #if GREENLET_PY313 + this->delete_later = Py_XNewRef(tstate->delete_later); + #elif GREENLET_PY312 + this->trash_delete_nesting = tstate->trash.delete_nesting; + #else // not 312 + this->trash_delete_nesting = tstate->trash_delete_nesting; + #endif // GREENLET_PY312 +#else // Not 311 + this->recursion_depth = tstate->recursion_depth; + this->_top_frame.steal(tstate->frame); + this->trash_delete_nesting = tstate->trash_delete_nesting; +#endif // GREENLET_PY311 +} + +#if GREENLET_PY312 +void GREENLET_NOINLINE(PythonState::unexpose_frames)() +{ + if (!this->top_frame()) { + return; + } + + // See GreenletState::expose_frames() and the comment on frames_were_exposed + // for more information about this logic. + _PyInterpreterFrame *iframe = this->_top_frame->f_frame; + while (iframe != nullptr) { + _PyInterpreterFrame *prev_exposed = iframe->previous; + assert(iframe->frame_obj); + memcpy(&iframe->previous, &iframe->frame_obj->_f_frame_data[0], + sizeof(void *)); + iframe = prev_exposed; + } +} +#else +void PythonState::unexpose_frames() +{} +#endif + +void PythonState::operator>>(PyThreadState *const tstate) noexcept +{ + tstate->context = this->_context.relinquish_ownership(); + /* Incrementing this value invalidates the contextvars cache, + which would otherwise remain valid across switches */ + tstate->context_ver++; +#if GREENLET_USE_CFRAME + tstate->cframe = this->cframe; + /* + If we were tracing, we need to keep tracing. + There should never be the possibility of hitting the + root_cframe here. See note above about why we can't + just copy this from ``origin->cframe->use_tracing``. + */ + #if !GREENLET_PY312 + tstate->cframe->use_tracing = this->use_tracing; + #endif +#endif // GREENLET_USE_CFRAME +#if GREENLET_PY311 + #if GREENLET_PY314 + tstate->py_recursion_remaining = tstate->py_recursion_limit - this->py_recursion_depth; + tstate->current_executor = this->current_executor; + this->unexpose_frames(); + #elif GREENLET_PY312 + tstate->py_recursion_remaining = tstate->py_recursion_limit - this->py_recursion_depth; + tstate->c_recursion_remaining = Py_C_RECURSION_LIMIT - this->c_recursion_depth; + this->unexpose_frames(); + #else // \/ 3.11 + tstate->recursion_remaining = tstate->recursion_limit - this->recursion_depth; + #endif // GREENLET_PY312 + #if GREENLET_PY313 + tstate->current_frame = this->current_frame; + #elif GREENLET_USE_CFRAME + tstate->cframe->current_frame = this->current_frame; + #endif + tstate->datastack_chunk = this->datastack_chunk; + tstate->datastack_top = this->datastack_top; + tstate->datastack_limit = this->datastack_limit; + this->_top_frame.relinquish_ownership(); + #if GREENLET_PY313 + Py_XDECREF(tstate->delete_later); + tstate->delete_later = this->delete_later; + Py_CLEAR(this->delete_later); + #elif GREENLET_PY312 + tstate->trash.delete_nesting = this->trash_delete_nesting; + #else // not 3.12 + tstate->trash_delete_nesting = this->trash_delete_nesting; + #endif // GREENLET_PY312 +#else // not 3.11 + tstate->frame = this->_top_frame.relinquish_ownership(); + tstate->recursion_depth = this->recursion_depth; + tstate->trash_delete_nesting = this->trash_delete_nesting; +#endif // GREENLET_PY311 +} + +inline void PythonState::will_switch_from(PyThreadState *const origin_tstate) noexcept +{ +#if GREENLET_USE_CFRAME && !GREENLET_PY312 + // The weird thing is, we don't actually save this for an + // effect on the current greenlet, it's saved for an + // effect on the target greenlet. That is, we want + // continuity of this setting across the greenlet switch. + this->use_tracing = origin_tstate->cframe->use_tracing; +#endif +} + +void PythonState::set_initial_state(const PyThreadState* const tstate) noexcept +{ + this->_top_frame = nullptr; +#if GREENLET_PY314 + this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; + this->current_executor = tstate->current_executor; +#elif GREENLET_PY312 + this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; + // XXX: TODO: Comment from a reviewer: + // Should this be ``Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining``? + // But to me it looks more like that might not be the right + // initialization either? + this->c_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; +#elif GREENLET_PY311 + this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; +#else + this->recursion_depth = tstate->recursion_depth; +#endif +} +// TODO: Better state management about when we own the top frame. +int PythonState::tp_traverse(visitproc visit, void* arg, bool own_top_frame) noexcept +{ + Py_VISIT(this->_context.borrow()); + if (own_top_frame) { + Py_VISIT(this->_top_frame.borrow()); + } + return 0; +} + +void PythonState::tp_clear(bool own_top_frame) noexcept +{ + PythonStateContext::tp_clear(); + // If we get here owning a frame, + // we got dealloc'd without being finished. We may or may not be + // in the same thread. + if (own_top_frame) { + this->_top_frame.CLEAR(); + } +} + +#if GREENLET_USE_CFRAME +void PythonState::set_new_cframe(_PyCFrame& frame) noexcept +{ + frame = *PyThreadState_GET()->cframe; + /* Make the target greenlet refer to the stack value. */ + this->cframe = &frame; + /* + And restore the link to the previous frame so this one gets + unliked appropriately. + */ + this->cframe->previous = &PyThreadState_GET()->root_cframe; +} +#endif + +const PythonState::OwnedFrame& PythonState::top_frame() const noexcept +{ + return this->_top_frame; +} + +void PythonState::did_finish(PyThreadState* tstate) noexcept +{ +#if GREENLET_PY311 + // See https://github.com/gevent/gevent/issues/1924 and + // https://github.com/python-greenlet/greenlet/issues/328. In + // short, Python 3.11 allocates memory for frames as a sort of + // linked list that's kept as part of PyThreadState in the + // ``datastack_chunk`` member and friends. These are saved and + // restored as part of switching greenlets. + // + // When we initially switch to a greenlet, we set those to NULL. + // That causes the frame management code to treat this like a + // brand new thread and start a fresh list of chunks, beginning + // with a new "root" chunk. As we make calls in this greenlet, + // those chunks get added, and as calls return, they get popped. + // But the frame code (pystate.c) is careful to make sure that the + // root chunk never gets popped. + // + // Thus, when a greenlet exits for the last time, there will be at + // least a single root chunk that we must be responsible for + // deallocating. + // + // The complex part is that these chunks are allocated and freed + // using ``_PyObject_VirtualAlloc``/``Free``. Those aren't public + // functions, and they aren't exported for linking. It so happens + // that we know they are just thin wrappers around the Arena + // allocator, so we can use that directly to deallocate in a + // compatible way. + // + // CAUTION: Check this implementation detail on every major version. + // + // It might be nice to be able to do this in our destructor, but + // can we be sure that no one else is using that memory? Plus, as + // described below, our pointers may not even be valid anymore. As + // a special case, there is one time that we know we can do this, + // and that's from the destructor of the associated UserGreenlet + // (NOT main greenlet) + PyObjectArenaAllocator alloc; + _PyStackChunk* chunk = nullptr; + if (tstate) { + // We really did finish, we can never be switched to again. + chunk = tstate->datastack_chunk; + // Unfortunately, we can't do much sanity checking. Our + // this->datastack_chunk pointer is out of date (evaluation may + // have popped down through it already) so we can't verify that + // we deallocate it. I don't think we can even check datastack_top + // for the same reason. + + PyObject_GetArenaAllocator(&alloc); + tstate->datastack_chunk = nullptr; + tstate->datastack_limit = nullptr; + tstate->datastack_top = nullptr; + + } + else if (this->datastack_chunk) { + // The UserGreenlet (NOT the main greenlet!) is being deallocated. If we're + // still holding a stack chunk, it's garbage because we know + // we can never switch back to let cPython clean it up. + // Because the last time we got switched away from, and we + // haven't run since then, we know our chain is valid and can + // be dealloced. + chunk = this->datastack_chunk; + PyObject_GetArenaAllocator(&alloc); + } + + if (alloc.free && chunk) { + // In case the arena mechanism has been torn down already. + while (chunk) { + _PyStackChunk *prev = chunk->previous; + chunk->previous = nullptr; + alloc.free(alloc.ctx, chunk, chunk->size); + chunk = prev; + } + } + + this->datastack_chunk = nullptr; + this->datastack_limit = nullptr; + this->datastack_top = nullptr; +#endif +} + + +}; // namespace greenlet + +#endif // GREENLET_PYTHON_STATE_CPP diff --git a/venv/lib/python3.12/site-packages/greenlet/TStackState.cpp b/venv/lib/python3.12/site-packages/greenlet/TStackState.cpp new file mode 100644 index 0000000..9743ab5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TStackState.cpp @@ -0,0 +1,265 @@ +#ifndef GREENLET_STACK_STATE_CPP +#define GREENLET_STACK_STATE_CPP + +#include "TGreenlet.hpp" + +namespace greenlet { + +#ifdef GREENLET_USE_STDIO +#include +using std::cerr; +using std::endl; + +std::ostream& operator<<(std::ostream& os, const StackState& s) +{ + os << "StackState(stack_start=" << (void*)s._stack_start + << ", stack_stop=" << (void*)s.stack_stop + << ", stack_copy=" << (void*)s.stack_copy + << ", stack_saved=" << s._stack_saved + << ", stack_prev=" << s.stack_prev + << ", addr=" << &s + << ")"; + return os; +} +#endif + +StackState::StackState(void* mark, StackState& current) + : _stack_start(nullptr), + stack_stop((char*)mark), + stack_copy(nullptr), + _stack_saved(0), + /* Skip a dying greenlet */ + stack_prev(current._stack_start + ? ¤t + : current.stack_prev) +{ +} + +StackState::StackState() + : _stack_start(nullptr), + stack_stop(nullptr), + stack_copy(nullptr), + _stack_saved(0), + stack_prev(nullptr) +{ +} + +StackState::StackState(const StackState& other) +// can't use a delegating constructor because of +// MSVC for Python 2.7 + : _stack_start(nullptr), + stack_stop(nullptr), + stack_copy(nullptr), + _stack_saved(0), + stack_prev(nullptr) +{ + this->operator=(other); +} + +StackState& StackState::operator=(const StackState& other) +{ + if (&other == this) { + return *this; + } + if (other._stack_saved) { + throw std::runtime_error("Refusing to steal memory."); + } + + //If we have memory allocated, dispose of it + this->free_stack_copy(); + + this->_stack_start = other._stack_start; + this->stack_stop = other.stack_stop; + this->stack_copy = other.stack_copy; + this->_stack_saved = other._stack_saved; + this->stack_prev = other.stack_prev; + return *this; +} + +inline void StackState::free_stack_copy() noexcept +{ + PyMem_Free(this->stack_copy); + this->stack_copy = nullptr; + this->_stack_saved = 0; +} + +inline void StackState::copy_heap_to_stack(const StackState& current) noexcept +{ + + /* Restore the heap copy back into the C stack */ + if (this->_stack_saved != 0) { + memcpy(this->_stack_start, this->stack_copy, this->_stack_saved); + this->free_stack_copy(); + } + StackState* owner = const_cast(¤t); + if (!owner->_stack_start) { + owner = owner->stack_prev; /* greenlet is dying, skip it */ + } + while (owner && owner->stack_stop <= this->stack_stop) { + // cerr << "\tOwner: " << owner << endl; + owner = owner->stack_prev; /* find greenlet with more stack */ + } + this->stack_prev = owner; + // cerr << "\tFinished with: " << *this << endl; +} + +inline int StackState::copy_stack_to_heap_up_to(const char* const stop) noexcept +{ + /* Save more of g's stack into the heap -- at least up to 'stop' + g->stack_stop |________| + | | + | __ stop . . . . . + | | ==> . . + |________| _______ + | | | | + | | | | + g->stack_start | | |_______| g->stack_copy + */ + intptr_t sz1 = this->_stack_saved; + intptr_t sz2 = stop - this->_stack_start; + assert(this->_stack_start); + if (sz2 > sz1) { + char* c = (char*)PyMem_Realloc(this->stack_copy, sz2); + if (!c) { + PyErr_NoMemory(); + return -1; + } + memcpy(c + sz1, this->_stack_start + sz1, sz2 - sz1); + this->stack_copy = c; + this->_stack_saved = sz2; + } + return 0; +} + +inline int StackState::copy_stack_to_heap(char* const stackref, + const StackState& current) noexcept +{ + /* must free all the C stack up to target_stop */ + const char* const target_stop = this->stack_stop; + + StackState* owner = const_cast(¤t); + assert(owner->_stack_saved == 0); // everything is present on the stack + if (!owner->_stack_start) { + owner = owner->stack_prev; /* not saved if dying */ + } + else { + owner->_stack_start = stackref; + } + + while (owner->stack_stop < target_stop) { + /* ts_current is entierely within the area to free */ + if (owner->copy_stack_to_heap_up_to(owner->stack_stop)) { + return -1; /* XXX */ + } + owner = owner->stack_prev; + } + if (owner != this) { + if (owner->copy_stack_to_heap_up_to(target_stop)) { + return -1; /* XXX */ + } + } + return 0; +} + +inline bool StackState::started() const noexcept +{ + return this->stack_stop != nullptr; +} + +inline bool StackState::main() const noexcept +{ + return this->stack_stop == (char*)-1; +} + +inline bool StackState::active() const noexcept +{ + return this->_stack_start != nullptr; +} + +inline void StackState::set_active() noexcept +{ + assert(this->_stack_start == nullptr); + this->_stack_start = (char*)1; +} + +inline void StackState::set_inactive() noexcept +{ + this->_stack_start = nullptr; + // XXX: What if we still have memory out there? + // That case is actually triggered by + // test_issue251_issue252_explicit_reference_not_collectable (greenlet.tests.test_leaks.TestLeaks) + // and + // test_issue251_issue252_need_to_collect_in_background + // (greenlet.tests.test_leaks.TestLeaks) + // + // Those objects never get deallocated, so the destructor never + // runs. + // It *seems* safe to clean up the memory here? + if (this->_stack_saved) { + this->free_stack_copy(); + } +} + +inline intptr_t StackState::stack_saved() const noexcept +{ + return this->_stack_saved; +} + +inline char* StackState::stack_start() const noexcept +{ + return this->_stack_start; +} + + +inline StackState StackState::make_main() noexcept +{ + StackState s; + s._stack_start = (char*)1; + s.stack_stop = (char*)-1; + return s; +} + +StackState::~StackState() +{ + if (this->_stack_saved != 0) { + this->free_stack_copy(); + } +} + +void StackState::copy_from_stack(void* vdest, const void* vsrc, size_t n) const +{ + char* dest = static_cast(vdest); + const char* src = static_cast(vsrc); + if (src + n <= this->_stack_start + || src >= this->_stack_start + this->_stack_saved + || this->_stack_saved == 0) { + // Nothing we're copying was spilled from the stack + memcpy(dest, src, n); + return; + } + + if (src < this->_stack_start) { + // Copy the part before the saved stack. + // We know src + n > _stack_start due to the test above. + const size_t nbefore = this->_stack_start - src; + memcpy(dest, src, nbefore); + dest += nbefore; + src += nbefore; + n -= nbefore; + } + // We know src >= _stack_start after the before-copy, and + // src < _stack_start + _stack_saved due to the first if condition + size_t nspilled = std::min(n, this->_stack_start + this->_stack_saved - src); + memcpy(dest, this->stack_copy + (src - this->_stack_start), nspilled); + dest += nspilled; + src += nspilled; + n -= nspilled; + if (n > 0) { + // Copy the part after the saved stack + memcpy(dest, src, n); + } +} + +}; // namespace greenlet + +#endif // GREENLET_STACK_STATE_CPP diff --git a/venv/lib/python3.12/site-packages/greenlet/TThreadState.hpp b/venv/lib/python3.12/site-packages/greenlet/TThreadState.hpp new file mode 100644 index 0000000..e4e6f6c --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TThreadState.hpp @@ -0,0 +1,497 @@ +#ifndef GREENLET_THREAD_STATE_HPP +#define GREENLET_THREAD_STATE_HPP + +#include +#include + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_thread_support.hpp" + +using greenlet::refs::BorrowedObject; +using greenlet::refs::BorrowedGreenlet; +using greenlet::refs::BorrowedMainGreenlet; +using greenlet::refs::OwnedMainGreenlet; +using greenlet::refs::OwnedObject; +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::OwnedList; +using greenlet::refs::PyErrFetchParam; +using greenlet::refs::PyArgParseParam; +using greenlet::refs::ImmortalString; +using greenlet::refs::CreatedModule; +using greenlet::refs::PyErrPieces; +using greenlet::refs::NewReference; + +namespace greenlet { +/** + * Thread-local state of greenlets. + * + * Each native thread will get exactly one of these objects, + * automatically accessed through the best available thread-local + * mechanism the compiler supports (``thread_local`` for C++11 + * compilers or ``__thread``/``declspec(thread)`` for older GCC/clang + * or MSVC, respectively.) + * + * Previously, we kept thread-local state mostly in a bunch of + * ``static volatile`` variables in the main greenlet file.. This had + * the problem of requiring extra checks, loops, and great care + * accessing these variables if we potentially invoked any Python code + * that could release the GIL, because the state could change out from + * under us. Making the variables thread-local solves this problem. + * + * When we detected that a greenlet API accessing the current greenlet + * was invoked from a different thread than the greenlet belonged to, + * we stored a reference to the greenlet in the Python thread + * dictionary for the thread the greenlet belonged to. This could lead + * to memory leaks if the thread then exited (because of a reference + * cycle, as greenlets referred to the thread dictionary, and deleting + * non-current greenlets leaked their frame plus perhaps arguments on + * the C stack). If a thread exited while still having running + * greenlet objects (perhaps that had just switched back to the main + * greenlet), and did not invoke one of the greenlet APIs *in that + * thread, immediately before it exited, without some other thread + * then being invoked*, such a leak was guaranteed. + * + * This can be partly solved by using compiler thread-local variables + * instead of the Python thread dictionary, thus avoiding a cycle. + * + * To fully solve this problem, we need a reliable way to know that a + * thread is done and we should clean up the main greenlet. On POSIX, + * we can use the destructor function of ``pthread_key_create``, but + * there's nothing similar on Windows; a C++11 thread local object + * reliably invokes its destructor when the thread it belongs to exits + * (non-C++11 compilers offer ``__thread`` or ``declspec(thread)`` to + * create thread-local variables, but they can't hold C++ objects that + * invoke destructors; the C++11 version is the most portable solution + * I found). When the thread exits, we can drop references and + * otherwise manipulate greenlets and frames that we know can no + * longer be switched to. For compilers that don't support C++11 + * thread locals, we have a solution that uses the python thread + * dictionary, though it may not collect everything as promptly as + * other compilers do, if some other library is using the thread + * dictionary and has a cycle or extra reference. + * + * There are two small wrinkles. The first is that when the thread + * exits, it is too late to actually invoke Python APIs: the Python + * thread state is gone, and the GIL is released. To solve *this* + * problem, our destructor uses ``Py_AddPendingCall`` to transfer the + * destruction work to the main thread. (This is not an issue for the + * dictionary solution.) + * + * The second is that once the thread exits, the thread local object + * is invalid and we can't even access a pointer to it, so we can't + * pass it to ``Py_AddPendingCall``. This is handled by actually using + * a second object that's thread local (ThreadStateCreator) and having + * it dynamically allocate this object so it can live until the + * pending call runs. + */ + + + +class ThreadState { +private: + // As of commit 08ad1dd7012b101db953f492e0021fb08634afad + // this class needed 56 bytes in o Py_DEBUG build + // on 64-bit macOS 11. + // Adding the vector takes us up to 80 bytes () + + /* Strong reference to the main greenlet */ + OwnedMainGreenlet main_greenlet; + + /* Strong reference to the current greenlet. */ + OwnedGreenlet current_greenlet; + + /* Strong reference to the trace function, if any. */ + OwnedObject tracefunc; + + typedef std::vector > deleteme_t; + /* A vector of raw PyGreenlet pointers representing things that need + deleted when this thread is running. The vector owns the + references, but you need to manually INCREF/DECREF as you use + them. We don't use a vector because we + make copy of this vector, and that would become O(n) as all the + refcounts are incremented in the copy. + */ + deleteme_t deleteme; + +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + void* exception_state; +#endif + + static std::clock_t _clocks_used_doing_gc; + static ImmortalString get_referrers_name; + static PythonAllocator allocator; + + G_NO_COPIES_OF_CLS(ThreadState); + + + // Allocates a main greenlet for the thread state. If this fails, + // exits the process. Called only during constructing a ThreadState. + MainGreenlet* alloc_main() + { + PyGreenlet* gmain; + + /* create the main greenlet for this thread */ + gmain = reinterpret_cast(PyType_GenericAlloc(&PyGreenlet_Type, 0)); + if (gmain == NULL) { + throw PyFatalError("alloc_main failed to alloc"); //exits the process + } + + MainGreenlet* const main = new MainGreenlet(gmain, this); + + assert(Py_REFCNT(gmain) == 1); + assert(gmain->pimpl == main); + return main; + } + + +public: + static void* operator new(size_t UNUSED(count)) + { + return ThreadState::allocator.allocate(1); + } + + static void operator delete(void* ptr) + { + return ThreadState::allocator.deallocate(static_cast(ptr), + 1); + } + + static void init() + { + ThreadState::get_referrers_name = "get_referrers"; + ThreadState::_clocks_used_doing_gc = 0; + } + + ThreadState() + { + +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + this->exception_state = slp_get_exception_state(); +#endif + + // XXX: Potentially dangerous, exposing a not fully + // constructed object. + MainGreenlet* const main = this->alloc_main(); + this->main_greenlet = OwnedMainGreenlet::consuming( + main->self() + ); + assert(this->main_greenlet); + this->current_greenlet = main->self(); + // The main greenlet starts with 1 refs: The returned one. We + // then copied it to the current greenlet. + assert(this->main_greenlet.REFCNT() == 2); + } + + inline void restore_exception_state() + { +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + // It's probably important this be inlined and only call C + // functions to avoid adding an SEH frame. + slp_set_exception_state(this->exception_state); +#endif + } + + inline bool has_main_greenlet() const noexcept + { + return bool(this->main_greenlet); + } + + // Called from the ThreadStateCreator when we're in non-standard + // threading mode. In that case, there is an object in the Python + // thread state dictionary that points to us. The main greenlet + // also traverses into us, in which case it's crucial not to + // traverse back into the main greenlet. + int tp_traverse(visitproc visit, void* arg, bool traverse_main=true) + { + if (traverse_main) { + Py_VISIT(main_greenlet.borrow_o()); + } + if (traverse_main || current_greenlet != main_greenlet) { + Py_VISIT(current_greenlet.borrow_o()); + } + Py_VISIT(tracefunc.borrow()); + return 0; + } + + inline BorrowedMainGreenlet borrow_main_greenlet() const noexcept + { + assert(this->main_greenlet); + assert(this->main_greenlet.REFCNT() >= 2); + return this->main_greenlet; + }; + + inline OwnedMainGreenlet get_main_greenlet() const noexcept + { + return this->main_greenlet; + } + + /** + * In addition to returning a new reference to the currunt + * greenlet, this performs any maintenance needed. + */ + inline OwnedGreenlet get_current() + { + /* green_dealloc() cannot delete greenlets from other threads, so + it stores them in the thread dict; delete them now. */ + this->clear_deleteme_list(); + //assert(this->current_greenlet->main_greenlet == this->main_greenlet); + //assert(this->main_greenlet->main_greenlet == this->main_greenlet); + return this->current_greenlet; + } + + /** + * As for non-const get_current(); + */ + inline BorrowedGreenlet borrow_current() + { + this->clear_deleteme_list(); + return this->current_greenlet; + } + + /** + * Does no maintenance. + */ + inline OwnedGreenlet get_current() const + { + return this->current_greenlet; + } + + template + inline bool is_current(const refs::PyObjectPointer& obj) const + { + return this->current_greenlet.borrow_o() == obj.borrow_o(); + } + + inline void set_current(const OwnedGreenlet& target) + { + this->current_greenlet = target; + } + +private: + /** + * Deref and remove the greenlets from the deleteme list. Must be + * holding the GIL. + * + * If *murder* is true, then we must be called from a different + * thread than the one that these greenlets were running in. + * In that case, if the greenlet was actually running, we destroy + * the frame reference and otherwise make it appear dead before + * proceeding; otherwise, we would try (and fail) to raise an + * exception in it and wind up right back in this list. + */ + inline void clear_deleteme_list(const bool murder=false) + { + if (!this->deleteme.empty()) { + // It's possible we could add items to this list while + // running Python code if there's a thread switch, so we + // need to defensively copy it before that can happen. + deleteme_t copy = this->deleteme; + this->deleteme.clear(); // in case things come back on the list + for(deleteme_t::iterator it = copy.begin(), end = copy.end(); + it != end; + ++it ) { + PyGreenlet* to_del = *it; + if (murder) { + // Force each greenlet to appear dead; we can't raise an + // exception into it anymore anyway. + to_del->pimpl->murder_in_place(); + } + + // The only reference to these greenlets should be in + // this list, decreffing them should let them be + // deleted again, triggering calls to green_dealloc() + // in the correct thread (if we're not murdering). + // This may run arbitrary Python code and switch + // threads or greenlets! + Py_DECREF(to_del); + if (PyErr_Occurred()) { + PyErr_WriteUnraisable(nullptr); + PyErr_Clear(); + } + } + } + } + +public: + + /** + * Returns a new reference, or a false object. + */ + inline OwnedObject get_tracefunc() const + { + return tracefunc; + }; + + + inline void set_tracefunc(BorrowedObject tracefunc) + { + assert(tracefunc); + if (tracefunc == BorrowedObject(Py_None)) { + this->tracefunc.CLEAR(); + } + else { + this->tracefunc = tracefunc; + } + } + + /** + * Given a reference to a greenlet that some other thread + * attempted to delete (has a refcount of 0) store it for later + * deletion when the thread this state belongs to is current. + */ + inline void delete_when_thread_running(PyGreenlet* to_del) + { + Py_INCREF(to_del); + this->deleteme.push_back(to_del); + } + + /** + * Set to std::clock_t(-1) to disable. + */ + inline static std::clock_t& clocks_used_doing_gc() + { + return ThreadState::_clocks_used_doing_gc; + } + + ~ThreadState() + { + if (!PyInterpreterState_Head()) { + // We shouldn't get here (our callers protect us) + // but if we do, all we can do is bail early. + return; + } + + // We should not have an "origin" greenlet; that only exists + // for the temporary time during a switch, which should not + // be in progress as the thread dies. + //assert(!this->switching_state.origin); + + this->tracefunc.CLEAR(); + + // Forcibly GC as much as we can. + this->clear_deleteme_list(true); + + // The pending call did this. + assert(this->main_greenlet->thread_state() == nullptr); + + // If the main greenlet is the current greenlet, + // then we "fell off the end" and the thread died. + // It's possible that there is some other greenlet that + // switched to us, leaving a reference to the main greenlet + // on the stack, somewhere uncollectible. Try to detect that. + if (this->current_greenlet == this->main_greenlet && this->current_greenlet) { + assert(this->current_greenlet->is_currently_running_in_some_thread()); + // Drop one reference we hold. + this->current_greenlet.CLEAR(); + assert(!this->current_greenlet); + // Only our reference to the main greenlet should be left, + // But hold onto the pointer in case we need to do extra cleanup. + PyGreenlet* old_main_greenlet = this->main_greenlet.borrow(); + Py_ssize_t cnt = this->main_greenlet.REFCNT(); + this->main_greenlet.CLEAR(); + if (ThreadState::_clocks_used_doing_gc != std::clock_t(-1) + && cnt == 2 && Py_REFCNT(old_main_greenlet) == 1) { + // Highly likely that the reference is somewhere on + // the stack, not reachable by GC. Verify. + // XXX: This is O(n) in the total number of objects. + // TODO: Add a way to disable this at runtime, and + // another way to report on it. + std::clock_t begin = std::clock(); + NewReference gc(PyImport_ImportModule("gc")); + if (gc) { + OwnedObject get_referrers = gc.PyRequireAttr(ThreadState::get_referrers_name); + OwnedList refs(get_referrers.PyCall(old_main_greenlet)); + if (refs && refs.empty()) { + assert(refs.REFCNT() == 1); + // We found nothing! So we left a dangling + // reference: Probably the last thing some + // other greenlet did was call + // 'getcurrent().parent.switch()' to switch + // back to us. Clean it up. This will be the + // case on CPython 3.7 and newer, as they use + // an internal calling conversion that avoids + // creating method objects and storing them on + // the stack. + Py_DECREF(old_main_greenlet); + } + else if (refs + && refs.size() == 1 + && PyCFunction_Check(refs.at(0)) + && Py_REFCNT(refs.at(0)) == 2) { + assert(refs.REFCNT() == 1); + // Ok, we found a C method that refers to the + // main greenlet, and its only referenced + // twice, once in the list we just created, + // once from...somewhere else. If we can't + // find where else, then this is a leak. + // This happens in older versions of CPython + // that create a bound method object somewhere + // on the stack that we'll never get back to. + if (PyCFunction_GetFunction(refs.at(0).borrow()) == (PyCFunction)green_switch) { + BorrowedObject function_w = refs.at(0); + refs.clear(); // destroy the reference + // from the list. + // back to one reference. Can *it* be + // found? + assert(function_w.REFCNT() == 1); + refs = get_referrers.PyCall(function_w); + if (refs && refs.empty()) { + // Nope, it can't be found so it won't + // ever be GC'd. Drop it. + Py_CLEAR(function_w); + } + } + } + std::clock_t end = std::clock(); + ThreadState::_clocks_used_doing_gc += (end - begin); + } + } + } + + // We need to make sure this greenlet appears to be dead, + // because otherwise deallocing it would fail to raise an + // exception in it (the thread is dead) and put it back in our + // deleteme list. + if (this->current_greenlet) { + this->current_greenlet->murder_in_place(); + this->current_greenlet.CLEAR(); + } + + if (this->main_greenlet) { + // Couldn't have been the main greenlet that was running + // when the thread exited (because we already cleared this + // pointer if it was). This shouldn't be possible? + + // If the main greenlet was current when the thread died (it + // should be, right?) then we cleared its self pointer above + // when we cleared the current greenlet's main greenlet pointer. + // assert(this->main_greenlet->main_greenlet == this->main_greenlet + // || !this->main_greenlet->main_greenlet); + // // self reference, probably gone + // this->main_greenlet->main_greenlet.CLEAR(); + + // This will actually go away when the ivar is destructed. + this->main_greenlet.CLEAR(); + } + + if (PyErr_Occurred()) { + PyErr_WriteUnraisable(NULL); + PyErr_Clear(); + } + + } + +}; + +ImmortalString ThreadState::get_referrers_name(nullptr); +PythonAllocator ThreadState::allocator; +std::clock_t ThreadState::_clocks_used_doing_gc(0); + + + + + +}; // namespace greenlet + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/TThreadStateCreator.hpp b/venv/lib/python3.12/site-packages/greenlet/TThreadStateCreator.hpp new file mode 100644 index 0000000..2ec7ab5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TThreadStateCreator.hpp @@ -0,0 +1,102 @@ +#ifndef GREENLET_THREAD_STATE_CREATOR_HPP +#define GREENLET_THREAD_STATE_CREATOR_HPP + +#include +#include + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_thread_support.hpp" + +#include "TThreadState.hpp" + +namespace greenlet { + + +typedef void (*ThreadStateDestructor)(ThreadState* const); + +template +class ThreadStateCreator +{ +private: + // Initialized to 1, and, if still 1, created on access. + // Set to 0 on destruction. + ThreadState* _state; + G_NO_COPIES_OF_CLS(ThreadStateCreator); + + inline bool has_initialized_state() const noexcept + { + return this->_state != (ThreadState*)1; + } + + inline bool has_state() const noexcept + { + return this->has_initialized_state() && this->_state != nullptr; + } + +public: + + // Only one of these, auto created per thread. + // Constructing the state constructs the MainGreenlet. + ThreadStateCreator() : + _state((ThreadState*)1) + { + } + + ~ThreadStateCreator() + { + if (this->has_state()) { + Destructor(this->_state); + } + + this->_state = nullptr; + } + + inline ThreadState& state() + { + // The main greenlet will own this pointer when it is created, + // which will be right after this. The plan is to give every + // greenlet a pointer to the main greenlet for the thread it + // runs in; if we are doing something cross-thread, we need to + // access the pointer from the main greenlet. Deleting the + // thread, and hence the thread-local storage, will delete the + // state pointer in the main greenlet. + if (!this->has_initialized_state()) { + // XXX: Assuming allocation never fails + this->_state = new ThreadState; + // For non-standard threading, we need to store an object + // in the Python thread state dictionary so that it can be + // DECREF'd when the thread ends (ideally; the dict could + // last longer) and clean this object up. + } + if (!this->_state) { + throw std::runtime_error("Accessing state after destruction."); + } + return *this->_state; + } + + operator ThreadState&() + { + return this->state(); + } + + operator ThreadState*() + { + return &this->state(); + } + + inline int tp_traverse(visitproc visit, void* arg) + { + if (this->has_state()) { + return this->_state->tp_traverse(visit, arg); + } + return 0; + } + +}; + + + +}; // namespace greenlet + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/TThreadStateDestroy.cpp b/venv/lib/python3.12/site-packages/greenlet/TThreadStateDestroy.cpp new file mode 100644 index 0000000..449b788 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TThreadStateDestroy.cpp @@ -0,0 +1,217 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of the ThreadState destructors. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_THREADSTATE_DESTROY +#define T_THREADSTATE_DESTROY + +#include "TGreenlet.hpp" + +#include "greenlet_thread_support.hpp" +#include "greenlet_compiler_compat.hpp" +#include "TGreenletGlobals.cpp" +#include "TThreadState.hpp" +#include "TThreadStateCreator.hpp" + +namespace greenlet { + +extern "C" { + +struct ThreadState_DestroyNoGIL +{ + /** + This function uses the same lock that the PendingCallback does + */ + static void + MarkGreenletDeadAndQueueCleanup(ThreadState* const state) + { +#if GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK + return; +#endif + // We are *NOT* holding the GIL. Our thread is in the middle + // of its death throes and the Python thread state is already + // gone so we can't use most Python APIs. One that is safe is + // ``Py_AddPendingCall``, unless the interpreter itself has + // been torn down. There is a limited number of calls that can + // be queued: 32 (NPENDINGCALLS) in CPython 3.10, so we + // coalesce these calls using our own queue. + + if (!MarkGreenletDeadIfNeeded(state)) { + // No state, or no greenlet + return; + } + + // XXX: Because we don't have the GIL, this is a race condition. + if (!PyInterpreterState_Head()) { + // We have to leak the thread state, if the + // interpreter has shut down when we're getting + // deallocated, we can't run the cleanup code that + // deleting it would imply. + return; + } + + AddToCleanupQueue(state); + + } + +private: + + // If the state has an allocated main greenlet: + // - mark the greenlet as dead by disassociating it from the state; + // - return 1 + // Otherwise, return 0. + static bool + MarkGreenletDeadIfNeeded(ThreadState* const state) + { + if (state && state->has_main_greenlet()) { + // mark the thread as dead ASAP. + // this is racy! If we try to throw or switch to a + // greenlet from this thread from some other thread before + // we clear the state pointer, it won't realize the state + // is dead which can crash the process. + PyGreenlet* p(state->borrow_main_greenlet().borrow()); + assert(p->pimpl->thread_state() == state || p->pimpl->thread_state() == nullptr); + dynamic_cast(p->pimpl)->thread_state(nullptr); + return true; + } + return false; + } + + static void + AddToCleanupQueue(ThreadState* const state) + { + assert(state && state->has_main_greenlet()); + + // NOTE: Because we're not holding the GIL here, some other + // Python thread could run and call ``os.fork()``, which would + // be bad if that happened while we are holding the cleanup + // lock (it wouldn't function in the child process). + // Make a best effort to try to keep the duration we hold the + // lock short. + // TODO: On platforms that support it, use ``pthread_atfork`` to + // drop this lock. + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + + mod_globs->queue_to_destroy(state); + if (mod_globs->thread_states_to_destroy.size() == 1) { + // We added the first item to the queue. We need to schedule + // the cleanup. + + // A size greater than 1 means that we have already added the pending call, + // and in fact, it may be executing now. + // If it is executing, our lock makes sure that it will see the item we just added + // to the queue on its next iteration (after we release the lock) + // + // A size of 1 means there is no pending call, OR the pending call is + // currently executing, has dropped the lock, and is deleting the last item + // from the queue; its next iteration will go ahead and delete the item we just added. + // And the pending call we schedule here will have no work to do. + int result = AddPendingCall( + PendingCallback_DestroyQueueWithGIL, + nullptr); + if (result < 0) { + // Hmm, what can we do here? + fprintf(stderr, + "greenlet: WARNING: failed in call to Py_AddPendingCall; " + "expect a memory leak.\n"); + } + } + } + + static int + PendingCallback_DestroyQueueWithGIL(void* UNUSED(arg)) + { + // We're holding the GIL here, so no Python code should be able to + // run to call ``os.fork()``. + while (1) { + ThreadState* to_destroy; + { + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + if (mod_globs->thread_states_to_destroy.empty()) { + break; + } + to_destroy = mod_globs->take_next_to_destroy(); + } + assert(to_destroy); + assert(to_destroy->has_main_greenlet()); + // Drop the lock while we do the actual deletion. + // This allows other calls to MarkGreenletDeadAndQueueCleanup + // to enter and add to our queue. + DestroyOneWithGIL(to_destroy); + } + return 0; + } + + static void + DestroyOneWithGIL(const ThreadState* const state) + { + // Holding the GIL. + // Passed a non-shared pointer to the actual thread state. + // state -> main greenlet + assert(state->has_main_greenlet()); + PyGreenlet* main(state->borrow_main_greenlet()); + // When we need to do cross-thread operations, we check this. + // A NULL value means the thread died some time ago. + // We do this here, rather than in a Python dealloc function + // for the greenlet, in case there's still a reference out + // there. + dynamic_cast(main->pimpl)->thread_state(nullptr); + + delete state; // Deleting this runs the destructor, DECREFs the main greenlet. + } + + + static int AddPendingCall(int (*func)(void*), void* arg) + { + // If the interpreter is in the middle of finalizing, we can't add a + // pending call. Trying to do so will end up in a SIGSEGV, as + // Py_AddPendingCall will not be able to get the interpreter and will + // try to dereference a NULL pointer. It's possible this can still + // segfault if we happen to get context switched, and maybe we should + // just always implement our own AddPendingCall, but I'd like to see if + // this works first +#if GREENLET_PY313 + if (Py_IsFinalizing()) { +#else + if (_Py_IsFinalizing()) { +#endif +#ifdef GREENLET_DEBUG + // No need to log in the general case. Yes, we'll leak, + // but we're shutting down so it should be ok. + fprintf(stderr, + "greenlet: WARNING: Interpreter is finalizing. Ignoring " + "call to Py_AddPendingCall; \n"); +#endif + return 0; + } + return Py_AddPendingCall(func, arg); + } + + + + + +}; +}; + +}; // namespace greenlet + +// The intent when GET_THREAD_STATE() is needed multiple times in a +// function is to take a reference to its return value in a local +// variable, to avoid the thread-local indirection. On some platforms +// (macOS), accessing a thread-local involves a function call (plus an +// initial function call in each function that uses a thread local); +// in contrast, static volatile variables are at some pre-computed +// offset. +typedef greenlet::ThreadStateCreator ThreadStateCreator; +static thread_local ThreadStateCreator g_thread_state_global; +#define GET_THREAD_STATE() g_thread_state_global + +#endif //T_THREADSTATE_DESTROY diff --git a/venv/lib/python3.12/site-packages/greenlet/TUserGreenlet.cpp b/venv/lib/python3.12/site-packages/greenlet/TUserGreenlet.cpp new file mode 100644 index 0000000..73a8133 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/TUserGreenlet.cpp @@ -0,0 +1,662 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::UserGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_USER_GREENLET_CPP +#define T_USER_GREENLET_CPP + +#include "greenlet_internal.hpp" +#include "TGreenlet.hpp" + +#include "TThreadStateDestroy.cpp" + + +namespace greenlet { +using greenlet::refs::BorrowedMainGreenlet; +greenlet::PythonAllocator UserGreenlet::allocator; + +void* UserGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void UserGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + + +UserGreenlet::UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) + : Greenlet(p), _parent(the_parent) +{ +} + +UserGreenlet::~UserGreenlet() +{ + // Python 3.11: If we don't clear out the raw frame datastack + // when deleting an unfinished greenlet, + // TestLeaks.test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main fails. + this->python_state.did_finish(nullptr); + this->tp_clear(); +} + + +const BorrowedMainGreenlet +UserGreenlet::main_greenlet() const +{ + return this->_main_greenlet; +} + + +BorrowedMainGreenlet +UserGreenlet::find_main_greenlet_in_lineage() const +{ + if (this->started()) { + assert(this->_main_greenlet); + return BorrowedMainGreenlet(this->_main_greenlet); + } + + if (!this->_parent) { + /* garbage collected greenlet in chain */ + // XXX: WHAT? + return BorrowedMainGreenlet(nullptr); + } + + return this->_parent->find_main_greenlet_in_lineage(); +} + + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +UserGreenlet::throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state) +{ + /* The dying greenlet cannot be a parent of ts_current + because the 'parent' field chain would hold a + reference */ + UserGreenlet::ParentIsCurrentGuard with_current_parent(this, current_thread_state); + + // We don't care about the return value, only whether an + // exception happened. Whether or not an exception happens, + // we need to restore the parent in case the greenlet gets + // resurrected. + return Greenlet::throw_GreenletExit_during_dealloc(current_thread_state); +} + +ThreadState* +UserGreenlet::thread_state() const noexcept +{ + // TODO: maybe make this throw, if the thread state isn't there? + // if (!this->main_greenlet) { + // throw std::runtime_error("No thread state"); // TODO: Better exception + // } + if (!this->_main_greenlet) { + return nullptr; + } + return this->_main_greenlet->thread_state(); +} + + +bool +UserGreenlet::was_running_in_dead_thread() const noexcept +{ + return this->_main_greenlet && !this->thread_state(); +} + +OwnedObject +UserGreenlet::g_switch() +{ + assert(this->args() || PyErr_Occurred()); + + try { + this->check_switch_allowed(); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + + // Switching greenlets used to attempt to clean out ones that need + // deleted *if* we detected a thread switch. Should it still do + // that? + // An issue is that if we delete a greenlet from another thread, + // it gets queued to this thread, and ``kill_greenlet()`` switches + // back into the greenlet + + /* find the real target by ignoring dead greenlets, + and if necessary starting a greenlet. */ + switchstack_result_t err; + Greenlet* target = this; + // TODO: probably cleaner to handle the case where we do + // switch to ourself separately from the other cases. + // This can probably even further be simplified if we keep + // track of the switching_state we're going for and just call + // into g_switch() if it's not ourself. The main problem with that + // is that we would be using more stack space. + bool target_was_me = true; + bool was_initial_stub = false; + while (target) { + if (target->active()) { + if (!target_was_me) { + target->args() <<= this->args(); + assert(!this->args()); + } + err = target->g_switchstack(); + break; + } + if (!target->started()) { + // We never encounter a main greenlet that's not started. + assert(!target->main()); + UserGreenlet* real_target = static_cast(target); + assert(real_target); + void* dummymarker; + was_initial_stub = true; + if (!target_was_me) { + target->args() <<= this->args(); + assert(!this->args()); + } + try { + // This can only throw back to us while we're + // still in this greenlet. Once the new greenlet + // is bootstrapped, it has its own exception state. + err = real_target->g_initialstub(&dummymarker); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + catch (const GreenletStartedWhileInPython&) { + // The greenlet was started sometime before this + // greenlet actually switched to it, i.e., + // "concurrent" calls to switch() or throw(). + // We need to retry the switch. + // Note that the current greenlet has been reset + // to this one (or we wouldn't be running!) + continue; + } + break; + } + + target = target->parent(); + target_was_me = false; + } + // The ``this`` pointer and all other stack or register based + // variables are invalid now, at least where things succeed + // above. + // But this one, probably not so much? It's not clear if it's + // safe to throw an exception at this point. + + if (err.status < 0) { + // If we get here, either g_initialstub() + // failed, or g_switchstack() failed. Either one of those + // cases SHOULD leave us in the original greenlet with a valid + // stack. + return this->on_switchstack_or_initialstub_failure(target, err, target_was_me, was_initial_stub); + } + + // err.the_new_current_greenlet would be the same as ``target``, + // if target wasn't probably corrupt. + return err.the_new_current_greenlet->g_switch_finish(err); +} + + + +Greenlet::switchstack_result_t +UserGreenlet::g_initialstub(void* mark) +{ + OwnedObject run; + + // We need to grab a reference to the current switch arguments + // in case we're entered concurrently during the call to + // GetAttr() and have to try again. + // We'll restore them when we return in that case. + // Scope them tightly to avoid ref leaks. + { + SwitchingArgs args(this->args()); + + /* save exception in case getattr clears it */ + PyErrPieces saved; + + /* + self.run is the object to call in the new greenlet. + This could run arbitrary python code and switch greenlets! + */ + run = this->self().PyRequireAttr(mod_globs->str_run); + /* restore saved exception */ + saved.PyErrRestore(); + + + /* recheck that it's safe to switch in case greenlet reparented anywhere above */ + this->check_switch_allowed(); + + /* by the time we got here another start could happen elsewhere, + * that means it should now be a regular switch. + * This can happen if the Python code is a subclass that implements + * __getattribute__ or __getattr__, or makes ``run`` a descriptor; + * all of those can run arbitrary code that switches back into + * this greenlet. + */ + if (this->stack_state.started()) { + // the successful switch cleared these out, we need to + // restore our version. They will be copied on up to the + // next target. + assert(!this->args()); + this->args() <<= args; + throw GreenletStartedWhileInPython(); + } + } + + // Sweet, if we got here, we have the go-ahead and will switch + // greenlets. + // Nothing we do from here on out should allow for a thread or + // greenlet switch: No arbitrary calls to Python, including + // decref'ing + +#if GREENLET_USE_CFRAME + /* OK, we need it, we're about to switch greenlets, save the state. */ + /* + See green_new(). This is a stack-allocated variable used + while *self* is in PyObject_Call(). + We want to defer copying the state info until we're sure + we need it and are in a stable place to do so. + */ + _PyCFrame trace_info; + + this->python_state.set_new_cframe(trace_info); +#endif + /* start the greenlet */ + ThreadState& thread_state = GET_THREAD_STATE().state(); + this->stack_state = StackState(mark, + thread_state.borrow_current()->stack_state); + this->python_state.set_initial_state(PyThreadState_GET()); + this->exception_state.clear(); + this->_main_greenlet = thread_state.get_main_greenlet(); + + /* perform the initial switch */ + switchstack_result_t err = this->g_switchstack(); + /* returns twice! + The 1st time with ``err == 1``: we are in the new greenlet. + This one owns a greenlet that used to be current. + The 2nd time with ``err <= 0``: back in the caller's + greenlet; this happens if the child finishes or switches + explicitly to us. Either way, the ``err`` variable is + created twice at the same memory location, but possibly + having different ``origin`` values. Note that it's not + constructed for the second time until the switch actually happens. + */ + if (err.status == 1) { + // In the new greenlet. + + // This never returns! Calling inner_bootstrap steals + // the contents of our run object within this stack frame, so + // it is not valid to do anything with it. + try { + this->inner_bootstrap(err.origin_greenlet.relinquish_ownership(), + run.relinquish_ownership()); + } + // Getting a C++ exception here isn't good. It's probably a + // bug in the underlying greenlet, meaning it's probably a + // C++ extension. We're going to abort anyway, but try to + // display some nice information *if* possible. Some obscure + // platforms don't properly support this (old 32-bit Arm, see see + // https://github.com/python-greenlet/greenlet/issues/385); that's not + // great, but should usually be OK because, as mentioned above, we're + // terminating anyway. + // + // The catching is tested by + // ``test_cpp.CPPTests.test_unhandled_exception_in_greenlet_aborts``. + // + // PyErrOccurred can theoretically be thrown by + // inner_bootstrap() -> g_switch_finish(), but that should + // never make it back to here. It is a std::exception and + // would be caught if it is. + catch (const std::exception& e) { + std::string base = "greenlet: Unhandled C++ exception: "; + base += e.what(); + Py_FatalError(base.c_str()); + } + catch (...) { + // Some compilers/runtimes use exceptions internally. + // It appears that GCC on Linux with libstdc++ throws an + // exception internally at process shutdown time to unwind + // stacks and clean up resources. Depending on exactly + // where we are when the process exits, that could result + // in an unknown exception getting here. If we + // Py_FatalError() or abort() here, we interfere with + // orderly process shutdown. Throwing the exception on up + // is the right thing to do. + // + // gevent's ``examples/dns_mass_resolve.py`` demonstrates this. +#ifndef NDEBUG + fprintf(stderr, + "greenlet: inner_bootstrap threw unknown exception; " + "is the process terminating?\n"); +#endif + throw; + } + Py_FatalError("greenlet: inner_bootstrap returned with no exception.\n"); + } + + + // In contrast, notice that we're keeping the origin greenlet + // around as an owned reference; we need it to call the trace + // function for the switch back into the parent. It was only + // captured at the time the switch actually happened, though, + // so we haven't been keeping an extra reference around this + // whole time. + + /* back in the parent */ + if (err.status < 0) { + /* start failed badly, restore greenlet state */ + this->stack_state = StackState(); + this->_main_greenlet.CLEAR(); + // CAUTION: This may run arbitrary Python code. + run.CLEAR(); // inner_bootstrap didn't run, we own the reference. + } + + // In the success case, the spawned code (inner_bootstrap) will + // take care of decrefing this, so we relinquish ownership so as + // to not double-decref. + + run.relinquish_ownership(); + + return err; +} + + +void +UserGreenlet::inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run) +{ + // The arguments here would be another great place for move. + // As it is, we take them as a reference so that when we clear + // them we clear what's on the stack above us. Do that NOW, and + // without using a C++ RAII object, + // so there's no way that exiting the parent frame can clear it, + // or we clear it unexpectedly. This arises in the context of the + // interpreter shutting down. See https://github.com/python-greenlet/greenlet/issues/325 + //PyObject* run = _run.relinquish_ownership(); + + /* in the new greenlet */ + assert(this->thread_state()->borrow_current() == BorrowedGreenlet(this->_self)); + // C++ exceptions cannot propagate to the parent greenlet from + // here. (TODO: Do we need a catch(...) clause, perhaps on the + // function itself? ALl we could do is terminate the program.) + // NOTE: On 32-bit Windows, the call chain is extremely + // important here in ways that are subtle, having to do with + // the depth of the SEH list. The call to restore it MUST NOT + // add a new SEH handler to the list, or we'll restore it to + // the wrong thing. + this->thread_state()->restore_exception_state(); + /* stack variables from above are no good and also will not unwind! */ + // EXCEPT: That can't be true, we access run, among others, here. + + this->stack_state.set_active(); /* running */ + + // We're about to possibly run Python code again, which + // could switch back/away to/from us, so we need to grab the + // arguments locally. + SwitchingArgs args; + args <<= this->args(); + assert(!this->args()); + + // XXX: We could clear this much earlier, right? + // Or would that introduce the possibility of running Python + // code when we don't want to? + // CAUTION: This may run arbitrary Python code. + this->_run_callable.CLEAR(); + + + // The first switch we need to manually call the trace + // function here instead of in g_switch_finish, because we + // never return there. + if (OwnedObject tracefunc = this->thread_state()->get_tracefunc()) { + OwnedGreenlet trace_origin; + trace_origin = origin_greenlet; + try { + g_calltrace(tracefunc, + args ? mod_globs->event_switch : mod_globs->event_throw, + trace_origin, + this->_self); + } + catch (const PyErrOccurred&) { + /* Turn trace errors into switch throws */ + args.CLEAR(); + } + } + + // We no longer need the origin, it was only here for + // tracing. + // We may never actually exit this stack frame so we need + // to explicitly clear it. + // This could run Python code and switch. + Py_CLEAR(origin_greenlet); + + OwnedObject result; + if (!args) { + /* pending exception */ + result = NULL; + } + else { + /* call g.run(*args, **kwargs) */ + // This could result in further switches + try { + //result = run.PyCall(args.args(), args.kwargs()); + // CAUTION: Just invoking this, before the function even + // runs, may cause memory allocations, which may trigger + // GC, which may run arbitrary Python code. + result = OwnedObject::consuming(PyObject_Call(run, args.args().borrow(), args.kwargs().borrow())); + } + catch (...) { + // Unhandled C++ exception! + + // If we declare ourselves as noexcept, if we don't catch + // this here, most platforms will just abort() the + // process. But on 64-bit Windows with older versions of + // the C runtime, this can actually corrupt memory and + // just return. We see this when compiling with the + // Windows 7.0 SDK targeting Windows Server 2008, but not + // when using the Appveyor Visual Studio 2019 image. So + // this currently only affects Python 2.7 on Windows 64. + // That is, the tests pass and the runtime aborts + // everywhere else. + // + // However, if we catch it and try to continue with a + // Python error, then all Windows 64 bit platforms corrupt + // memory. So all we can do is manually abort, hopefully + // with a good error message. (Note that the above was + // tested WITHOUT the `/EHr` switch being used at compile + // time, so MSVC may have "optimized" out important + // checking. Using that switch, we may be in a better + // place in terms of memory corruption.) But sometimes it + // can't be caught here at all, which is confusing but not + // terribly surprising; so again, the G_NOEXCEPT_WIN32 + // plus "/EHr". + // + // Hopefully the basic C stdlib is still functional enough + // for us to at least print an error. + // + // It gets more complicated than that, though, on some + // platforms, specifically at least Linux/gcc/libstdc++. They use + // an exception to unwind the stack when a background + // thread exits. (See comments about noexcept.) So this + // may not actually represent anything untoward. On those + // platforms we allow throws of this to propagate, or + // attempt to anyway. +# if defined(WIN32) || defined(_WIN32) + Py_FatalError( + "greenlet: Unhandled C++ exception from a greenlet run function. " + "Because memory is likely corrupted, terminating process."); + std::abort(); +#else + throw; +#endif + } + } + // These lines may run arbitrary code + args.CLEAR(); + Py_CLEAR(run); + + if (!result + && mod_globs->PyExc_GreenletExit.PyExceptionMatches() + && (this->args())) { + // This can happen, for example, if our only reference + // goes away after we switch back to the parent. + // See test_dealloc_switch_args_not_lost + PyErrPieces clear_error; + result <<= this->args(); + result = single_result(result); + } + this->release_args(); + this->python_state.did_finish(PyThreadState_GET()); + + result = g_handle_exit(result); + assert(this->thread_state()->borrow_current() == this->_self); + + /* jump back to parent */ + this->stack_state.set_inactive(); /* dead */ + + + // TODO: Can we decref some things here? Release our main greenlet + // and maybe parent? + for (Greenlet* parent = this->_parent; + parent; + parent = parent->parent()) { + // We need to somewhere consume a reference to + // the result; in most cases we'll never have control + // back in this stack frame again. Calling + // green_switch actually adds another reference! + // This would probably be clearer with a specific API + // to hand results to the parent. + parent->args() <<= result; + assert(!result); + // The parent greenlet now owns the result; in the + // typical case we'll never get back here to assign to + // result and thus release the reference. + try { + result = parent->g_switch(); + } + catch (const PyErrOccurred&) { + // Ignore, keep passing the error on up. + } + + /* Return here means switch to parent failed, + * in which case we throw *current* exception + * to the next parent in chain. + */ + assert(!result); + } + /* We ran out of parents, cannot continue */ + PyErr_WriteUnraisable(this->self().borrow_o()); + Py_FatalError("greenlet: ran out of parent greenlets while propagating exception; " + "cannot continue"); + std::abort(); +} + +void +UserGreenlet::run(const BorrowedObject nrun) +{ + if (this->started()) { + throw AttributeError( + "run cannot be set " + "after the start of the greenlet"); + } + this->_run_callable = nrun; +} + +const OwnedGreenlet +UserGreenlet::parent() const +{ + return this->_parent; +} + +void +UserGreenlet::parent(const BorrowedObject raw_new_parent) +{ + if (!raw_new_parent) { + throw AttributeError("can't delete attribute"); + } + + BorrowedMainGreenlet main_greenlet_of_new_parent; + BorrowedGreenlet new_parent(raw_new_parent.borrow()); // could + // throw + // TypeError! + for (BorrowedGreenlet p = new_parent; p; p = p->parent()) { + if (p == this->self()) { + throw ValueError("cyclic parent chain"); + } + main_greenlet_of_new_parent = p->main_greenlet(); + } + + if (!main_greenlet_of_new_parent) { + throw ValueError("parent must not be garbage collected"); + } + + if (this->started() + && this->_main_greenlet != main_greenlet_of_new_parent) { + throw ValueError("parent cannot be on a different thread"); + } + + this->_parent = new_parent; +} + +void +UserGreenlet::murder_in_place() +{ + this->_main_greenlet.CLEAR(); + Greenlet::murder_in_place(); +} + +bool +UserGreenlet::belongs_to_thread(const ThreadState* thread_state) const +{ + return Greenlet::belongs_to_thread(thread_state) && this->_main_greenlet == thread_state->borrow_main_greenlet(); +} + + +int +UserGreenlet::tp_traverse(visitproc visit, void* arg) +{ + Py_VISIT(this->_parent.borrow_o()); + Py_VISIT(this->_main_greenlet.borrow_o()); + Py_VISIT(this->_run_callable.borrow_o()); + + return Greenlet::tp_traverse(visit, arg); +} + +int +UserGreenlet::tp_clear() +{ + Greenlet::tp_clear(); + this->_parent.CLEAR(); + this->_main_greenlet.CLEAR(); + this->_run_callable.CLEAR(); + return 0; +} + +UserGreenlet::ParentIsCurrentGuard::ParentIsCurrentGuard(UserGreenlet* p, + const ThreadState& thread_state) + : oldparent(p->_parent), + greenlet(p) +{ + p->_parent = thread_state.get_current(); +} + +UserGreenlet::ParentIsCurrentGuard::~ParentIsCurrentGuard() +{ + this->greenlet->_parent = oldparent; + oldparent.CLEAR(); +} + +}; //namespace greenlet +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/__init__.py b/venv/lib/python3.12/site-packages/greenlet/__init__.py new file mode 100644 index 0000000..6401497 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/__init__.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +The root of the greenlet package. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +__all__ = [ + '__version__', + '_C_API', + + 'GreenletExit', + 'error', + + 'getcurrent', + 'greenlet', + + 'gettrace', + 'settrace', +] + +# pylint:disable=no-name-in-module + +### +# Metadata +### +__version__ = '3.2.4' +from ._greenlet import _C_API # pylint:disable=no-name-in-module + +### +# Exceptions +### +from ._greenlet import GreenletExit +from ._greenlet import error + +### +# greenlets +### +from ._greenlet import getcurrent +from ._greenlet import greenlet + +### +# tracing +### +try: + from ._greenlet import gettrace + from ._greenlet import settrace +except ImportError: + # Tracing wasn't supported. + # XXX: The option to disable it was removed in 1.0, + # so this branch should be dead code. + pass + +### +# Constants +# These constants aren't documented and aren't recommended. +# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS +# is the same as ``sys.version_info[:2] >= 3.7`` +### +from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import + +# Controlling the use of the gc module. Provisional API for this greenlet +# implementation in 2.0. +from ._greenlet import CLOCKS_PER_SEC # pylint:disable=unused-import +from ._greenlet import enable_optional_cleanup # pylint:disable=unused-import +from ._greenlet import get_clocks_used_doing_optional_cleanup # pylint:disable=unused-import + +# Other APIS in the _greenlet module are for test support. diff --git a/venv/lib/python3.12/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..7d650f5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/_greenlet.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/greenlet/_greenlet.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..2f6cc1e Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/_greenlet.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet.cpp b/venv/lib/python3.12/site-packages/greenlet/greenlet.cpp new file mode 100644 index 0000000..e8d92a0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet.cpp @@ -0,0 +1,320 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/* Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#include +#include +#include +#include + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +// Code after this point can assume access to things declared in stdint.h, +// including the fixed-width types. This goes for the platform-specific switch functions +// as well. +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenletGlobals.cpp" + +#include "TGreenlet.cpp" +#include "TMainGreenlet.cpp" +#include "TUserGreenlet.cpp" +#include "TBrokenGreenlet.cpp" +#include "TExceptionState.cpp" +#include "TPythonState.cpp" +#include "TStackState.cpp" + +#include "TThreadState.hpp" +#include "TThreadStateCreator.hpp" +#include "TThreadStateDestroy.cpp" + +#include "PyGreenlet.cpp" +#include "PyGreenletUnswitchable.cpp" +#include "CObjects.cpp" + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + + +// ******* Implementation of things from included files +template +greenlet::refs::_BorrowedGreenlet& greenlet::refs::_BorrowedGreenlet::operator=(const greenlet::refs::BorrowedObject& other) +{ + this->_set_raw_pointer(static_cast(other)); + return *this; +} + +template +inline greenlet::refs::_BorrowedGreenlet::operator Greenlet*() const noexcept +{ + if (!this->p) { + return nullptr; + } + return reinterpret_cast(this->p)->pimpl; +} + +template +greenlet::refs::_BorrowedGreenlet::_BorrowedGreenlet(const BorrowedObject& p) + : BorrowedReference(nullptr) +{ + + this->_set_raw_pointer(p.borrow()); +} + +template +inline greenlet::refs::_OwnedGreenlet::operator Greenlet*() const noexcept +{ + if (!this->p) { + return nullptr; + } + return reinterpret_cast(this->p)->pimpl; +} + + + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wmissing-field-initializers" +# pragma clang diagnostic ignored "-Wwritable-strings" +#elif defined(__GNUC__) +# pragma GCC diagnostic push +// warning: ISO C++ forbids converting a string constant to ‘char*’ +// (The python APIs aren't const correct and accept writable char*) +# pragma GCC diagnostic ignored "-Wwrite-strings" +#endif + + +/*********************************************************** + +A PyGreenlet is a range of C stack addresses that must be +saved and restored in such a way that the full range of the +stack contains valid data when we switch to it. + +Stack layout for a greenlet: + + | ^^^ | + | older data | + | | + stack_stop . |_______________| + . | | + . | greenlet data | + . | in stack | + . * |_______________| . . _____________ stack_copy + stack_saved + . | | | | + . | data | |greenlet data| + . | unrelated | | saved | + . | to | | in heap | + stack_start . | this | . . |_____________| stack_copy + | greenlet | + | | + | newer data | + | vvv | + + +Note that a greenlet's stack data is typically partly at its correct +place in the stack, and partly saved away in the heap, but always in +the above configuration: two blocks, the more recent one in the heap +and the older one still in the stack (either block may be empty). + +Greenlets are chained: each points to the previous greenlet, which is +the one that owns the data currently in the C stack above my +stack_stop. The currently running greenlet is the first element of +this chain. The main (initial) greenlet is the last one. Greenlets +whose stack is entirely in the heap can be skipped from the chain. + +The chain is not related to execution order, but only to the order +in which bits of C stack happen to belong to greenlets at a particular +point in time. + +The main greenlet doesn't have a stack_stop: it is responsible for the +complete rest of the C stack, and we don't know where it begins. We +use (char*) -1, the largest possible address. + +States: + stack_stop == NULL && stack_start == NULL: did not start yet + stack_stop != NULL && stack_start == NULL: already finished + stack_stop != NULL && stack_start != NULL: active + +The running greenlet's stack_start is undefined but not NULL. + + ***********************************************************/ + + + + +/***********************************************************/ + +/* Some functions must not be inlined: + * slp_restore_state, when inlined into slp_switch might cause + it to restore stack over its own local variables + * slp_save_state, when inlined would add its own local + variables to the saved stack, wasting space + * slp_switch, cannot be inlined for obvious reasons + * g_initialstub, when inlined would receive a pointer into its + own stack frame, leading to incomplete stack save/restore + +g_initialstub is a member function and declared virtual so that the +compiler always calls it through a vtable. + +slp_save_state and slp_restore_state are also member functions. They +are called from trampoline functions that themselves are declared as +not eligible for inlining. +*/ + +extern "C" { +static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref) +{ + return switching_thread_state->slp_save_state(stackref); +} +static void GREENLET_NOINLINE(slp_restore_state_trampoline)() +{ + switching_thread_state->slp_restore_state(); +} +} + + +/***********************************************************/ + + +#include "PyModule.cpp" + + + +static PyObject* +greenlet_internal_mod_init() noexcept +{ + static void* _PyGreenlet_API[PyGreenlet_API_pointers]; + + try { + CreatedModule m(greenlet_module_def); + + Require(PyType_Ready(&PyGreenlet_Type)); + Require(PyType_Ready(&PyGreenletUnswitchable_Type)); + + mod_globs = new greenlet::GreenletGlobals; + ThreadState::init(); + + m.PyAddObject("greenlet", PyGreenlet_Type); + m.PyAddObject("UnswitchableGreenlet", PyGreenletUnswitchable_Type); + m.PyAddObject("error", mod_globs->PyExc_GreenletError); + m.PyAddObject("GreenletExit", mod_globs->PyExc_GreenletExit); + + m.PyAddObject("GREENLET_USE_GC", 1); + m.PyAddObject("GREENLET_USE_TRACING", 1); + m.PyAddObject("GREENLET_USE_CONTEXT_VARS", 1L); + m.PyAddObject("GREENLET_USE_STANDARD_THREADING", 1L); + + OwnedObject clocks_per_sec = OwnedObject::consuming(PyLong_FromSsize_t(CLOCKS_PER_SEC)); + m.PyAddObject("CLOCKS_PER_SEC", clocks_per_sec); + + /* also publish module-level data as attributes of the greentype. */ + // XXX: This is weird, and enables a strange pattern of + // confusing the class greenlet with the module greenlet; with + // the exception of (possibly) ``getcurrent()``, this + // shouldn't be encouraged so don't add new items here. + for (const char* const* p = copy_on_greentype; *p; p++) { + OwnedObject o = m.PyRequireAttr(*p); + PyDict_SetItemString(PyGreenlet_Type.tp_dict, *p, o.borrow()); + } + + /* + * Expose C API + */ + + /* types */ + _PyGreenlet_API[PyGreenlet_Type_NUM] = (void*)&PyGreenlet_Type; + + /* exceptions */ + _PyGreenlet_API[PyExc_GreenletError_NUM] = (void*)mod_globs->PyExc_GreenletError; + _PyGreenlet_API[PyExc_GreenletExit_NUM] = (void*)mod_globs->PyExc_GreenletExit; + + /* methods */ + _PyGreenlet_API[PyGreenlet_New_NUM] = (void*)PyGreenlet_New; + _PyGreenlet_API[PyGreenlet_GetCurrent_NUM] = (void*)PyGreenlet_GetCurrent; + _PyGreenlet_API[PyGreenlet_Throw_NUM] = (void*)PyGreenlet_Throw; + _PyGreenlet_API[PyGreenlet_Switch_NUM] = (void*)PyGreenlet_Switch; + _PyGreenlet_API[PyGreenlet_SetParent_NUM] = (void*)PyGreenlet_SetParent; + + /* Previously macros, but now need to be functions externally. */ + _PyGreenlet_API[PyGreenlet_MAIN_NUM] = (void*)Extern_PyGreenlet_MAIN; + _PyGreenlet_API[PyGreenlet_STARTED_NUM] = (void*)Extern_PyGreenlet_STARTED; + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM] = (void*)Extern_PyGreenlet_ACTIVE; + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM] = (void*)Extern_PyGreenlet_GET_PARENT; + + /* XXX: Note that our module name is ``greenlet._greenlet``, but for + backwards compatibility with existing C code, we need the _C_API to + be directly in greenlet. + */ + const NewReference c_api_object(Require( + PyCapsule_New( + (void*)_PyGreenlet_API, + "greenlet._C_API", + NULL))); + m.PyAddObject("_C_API", c_api_object); + assert(c_api_object.REFCNT() == 2); + + // cerr << "Sizes:" + // << "\n\tGreenlet : " << sizeof(Greenlet) + // << "\n\tUserGreenlet : " << sizeof(UserGreenlet) + // << "\n\tMainGreenlet : " << sizeof(MainGreenlet) + // << "\n\tExceptionState : " << sizeof(greenlet::ExceptionState) + // << "\n\tPythonState : " << sizeof(greenlet::PythonState) + // << "\n\tStackState : " << sizeof(greenlet::StackState) + // << "\n\tSwitchingArgs : " << sizeof(greenlet::SwitchingArgs) + // << "\n\tOwnedObject : " << sizeof(greenlet::refs::OwnedObject) + // << "\n\tBorrowedObject : " << sizeof(greenlet::refs::BorrowedObject) + // << "\n\tPyGreenlet : " << sizeof(PyGreenlet) + // << endl; + + return m.borrow(); // But really it's the main reference. + } + catch (const LockInitError& e) { + PyErr_SetString(PyExc_MemoryError, e.what()); + return NULL; + } + catch (const PyErrOccurred&) { + return NULL; + } + +} + +extern "C" { + +PyMODINIT_FUNC +PyInit__greenlet(void) +{ + return greenlet_internal_mod_init(); +} + +}; // extern C + +#ifdef __clang__ +# pragma clang diagnostic pop +#elif defined(__GNUC__) +# pragma GCC diagnostic pop +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet.h b/venv/lib/python3.12/site-packages/greenlet/greenlet.h new file mode 100644 index 0000000..d02a16e --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet.h @@ -0,0 +1,164 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +#ifndef GREENLET_MODULE +#define implementation_ptr_t void* +#endif + +typedef struct _greenlet { + PyObject_HEAD + PyObject* weakreflist; + PyObject* dict; + implementation_ptr_t pimpl; +} PyGreenlet; + +#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) + + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 12 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#define PyGreenlet_MAIN_NUM 8 +#define PyGreenlet_STARTED_NUM 9 +#define PyGreenlet_ACTIVE_NUM 10 +#define PyGreenlet_GET_PARENT_NUM 11 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* + * PyGreenlet_GetParent(PyObject* greenlet) + * + * return greenlet.parent; + * + * This could return NULL even if there is no exception active. + * If it does not return NULL, you are responsible for decrementing the + * reference count. + */ +# define PyGreenlet_GetParent \ + (*(PyGreenlet* (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) + +/* + * deprecated, undocumented alias. + */ +# define PyGreenlet_GET_PARENT PyGreenlet_GetParent + +# define PyGreenlet_MAIN \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_MAIN_NUM]) + +# define PyGreenlet_STARTED \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_STARTED_NUM]) + +# define PyGreenlet_ACTIVE \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) + + + + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_allocator.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_allocator.hpp new file mode 100644 index 0000000..dc2b969 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_allocator.hpp @@ -0,0 +1,89 @@ +#ifndef GREENLET_ALLOCATOR_HPP +#define GREENLET_ALLOCATOR_HPP + +#define PY_SSIZE_T_CLEAN +#include +#include +#include "greenlet_compiler_compat.hpp" +#include "greenlet_cpython_compat.hpp" + + +namespace greenlet +{ +#if defined(Py_GIL_DISABLED) +// Python on free threaded builds says this +// (https://docs.python.org/3/howto/free-threading-extensions.html#memory-allocation-apis): +// +// For thread-safety, the free-threaded build requires that only +// Python objects are allocated using the object domain, and that all +// Python object are allocated using that domain. +// +// This turns out to be important because the GC implementation on +// free threaded Python uses internal mimalloc APIs to find allocated +// objects. If we allocate non-PyObject objects using that API, then +// Bad Things could happen, including crashes and improper results. +// So in that case, we revert to standard C++ allocation. + + template + struct PythonAllocator : public std::allocator { + // This member is deprecated in C++17 and removed in C++20 + template< class U > + struct rebind { + typedef PythonAllocator other; + }; + }; + +#else + // This allocator is stateless; all instances are identical. + // It can *ONLY* be used when we're sure we're holding the GIL + // (Python's allocators require the GIL). + template + struct PythonAllocator : public std::allocator { + + PythonAllocator(const PythonAllocator& UNUSED(other)) + : std::allocator() + { + } + + PythonAllocator(const std::allocator other) + : std::allocator(other) + {} + + template + PythonAllocator(const std::allocator& other) + : std::allocator(other) + { + } + + PythonAllocator() : std::allocator() {} + + T* allocate(size_t number_objects, const void* UNUSED(hint)=0) + { + void* p; + if (number_objects == 1) + p = PyObject_Malloc(sizeof(T)); + else + p = PyMem_Malloc(sizeof(T) * number_objects); + return static_cast(p); + } + + void deallocate(T* t, size_t n) + { + void* p = t; + if (n == 1) { + PyObject_Free(p); + } + else + PyMem_Free(p); + } + // This member is deprecated in C++17 and removed in C++20 + template< class U > + struct rebind { + typedef PythonAllocator other; + }; + + }; +#endif // allocator type +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_compiler_compat.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_compiler_compat.hpp new file mode 100644 index 0000000..af24bd8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_compiler_compat.hpp @@ -0,0 +1,98 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_COMPILER_COMPAT_HPP +#define GREENLET_COMPILER_COMPAT_HPP + +/** + * Definitions to aid with compatibility with different compilers. + * + * .. caution:: Use extreme care with noexcept. + * Some compilers and runtimes, specifically gcc/libgcc/libstdc++ on + * Linux, implement stack unwinding by throwing an uncatchable + * exception, one that specifically does not appear to be an active + * exception to the rest of the runtime. If this happens while we're in a noexcept function, + * we have violated our dynamic exception contract, and so the runtime + * will call std::terminate(), which kills the process with the + * unhelpful message "terminate called without an active exception". + * + * This has happened in this scenario: A background thread is running + * a greenlet that has made a native call and released the GIL. + * Meanwhile, the main thread finishes and starts shutting down the + * interpreter. When the background thread is scheduled again and + * attempts to obtain the GIL, it notices that the interpreter is + * exiting and calls ``pthread_exit()``. This in turn starts to unwind + * the stack by throwing that exception. But we had the ``PyCall`` + * functions annotated as noexcept, so the runtime terminated us. + * + * #2 0x00007fab26fec2b7 in std::terminate() () from /lib/x86_64-linux-gnu/libstdc++.so.6 + * #3 0x00007fab26febb3c in __gxx_personality_v0 () from /lib/x86_64-linux-gnu/libstdc++.so.6 + * #4 0x00007fab26f34de6 in ?? () from /lib/x86_64-linux-gnu/libgcc_s.so.1 + * #6 0x00007fab276a34c6 in __GI___pthread_unwind at ./nptl/unwind.c:130 + * #7 0x00007fab2769bd3a in __do_cancel () at ../sysdeps/nptl/pthreadP.h:280 + * #8 __GI___pthread_exit (value=value@entry=0x0) at ./nptl/pthread_exit.c:36 + * #9 0x000000000052e567 in PyThread_exit_thread () at ../Python/thread_pthread.h:370 + * #10 0x00000000004d60b5 in take_gil at ../Python/ceval_gil.h:224 + * #11 0x00000000004d65f9 in PyEval_RestoreThread at ../Python/ceval.c:467 + * #12 0x000000000060cce3 in setipaddr at ../Modules/socketmodule.c:1203 + * #13 0x00000000006101cd in socket_gethostbyname + */ + +#include + +# define G_NO_COPIES_OF_CLS(Cls) private: \ + Cls(const Cls& other) = delete; \ + Cls& operator=(const Cls& other) = delete + +# define G_NO_ASSIGNMENT_OF_CLS(Cls) private: \ + Cls& operator=(const Cls& other) = delete + +# define G_NO_COPY_CONSTRUCTOR_OF_CLS(Cls) private: \ + Cls(const Cls& other) = delete; + + +// CAUTION: MSVC is stupidly picky: +// +// "The compiler ignores, without warning, any __declspec keywords +// placed after * or & and in front of the variable identifier in a +// declaration." +// (https://docs.microsoft.com/en-us/cpp/cpp/declspec?view=msvc-160) +// +// So pointer return types must be handled differently (because of the +// trailing *), or you get inscrutable compiler warnings like "error +// C2059: syntax error: ''" +// +// In C++ 11, there is a standard syntax for attributes, and +// GCC defines an attribute to use with this: [[gnu:noinline]]. +// In the future, this is expected to become standard. + +#if defined(__GNUC__) || defined(__clang__) +/* We used to check for GCC 4+ or 3.4+, but those compilers are + laughably out of date. Just assume they support it. */ +# define GREENLET_NOINLINE(name) __attribute__((noinline)) name +# define GREENLET_NOINLINE_P(rtype, name) rtype __attribute__((noinline)) name +# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) +#elif defined(_MSC_VER) +/* We used to check for && (_MSC_VER >= 1300) but that's also out of date. */ +# define GREENLET_NOINLINE(name) __declspec(noinline) name +# define GREENLET_NOINLINE_P(rtype, name) __declspec(noinline) rtype name +# define UNUSED(x) UNUSED_ ## x +#endif + +#if defined(_MSC_VER) +# define G_NOEXCEPT_WIN32 noexcept +#else +# define G_NOEXCEPT_WIN32 +#endif + +#if defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) +// 32-bit PPC/MacOSX. Only known to be tested on unreleased versions +// of macOS 10.6 using a macports build gcc 14. It appears that +// running C++ destructors of thread-local variables is broken. + +// See https://github.com/python-greenlet/greenlet/pull/419 +# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 1 +#else +# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 0 +#endif + + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_compat.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_compat.hpp new file mode 100644 index 0000000..a3b3850 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_cpython_compat.hpp @@ -0,0 +1,150 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_CPYTHON_COMPAT_H +#define GREENLET_CPYTHON_COMPAT_H + +/** + * Helpers for compatibility with multiple versions of CPython. + */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" + + +#if PY_VERSION_HEX >= 0x30A00B1 +# define GREENLET_PY310 1 +#else +# define GREENLET_PY310 0 +#endif + +/* +Python 3.10 beta 1 changed tstate->use_tracing to a nested cframe member. +See https://github.com/python/cpython/pull/25276 +We have to save and restore this as well. + +Python 3.13 removed PyThreadState.cframe (GH-108035). +*/ +#if GREENLET_PY310 && PY_VERSION_HEX < 0x30D0000 +# define GREENLET_USE_CFRAME 1 +#else +# define GREENLET_USE_CFRAME 0 +#endif + + +#if PY_VERSION_HEX >= 0x30B00A4 +/* +Greenlet won't compile on anything older than Python 3.11 alpha 4 (see +https://bugs.python.org/issue46090). Summary of breaking internal changes: +- Python 3.11 alpha 1 changed how frame objects are represented internally. + - https://github.com/python/cpython/pull/30122 +- Python 3.11 alpha 3 changed how recursion limits are stored. + - https://github.com/python/cpython/pull/29524 +- Python 3.11 alpha 4 changed how exception state is stored. It also includes a + change to help greenlet save and restore the interpreter frame "data stack". + - https://github.com/python/cpython/pull/30122 + - https://github.com/python/cpython/pull/30234 +*/ +# define GREENLET_PY311 1 +#else +# define GREENLET_PY311 0 +#endif + + +#if PY_VERSION_HEX >= 0x30C0000 +# define GREENLET_PY312 1 +#else +# define GREENLET_PY312 0 +#endif + +#if PY_VERSION_HEX >= 0x30D0000 +# define GREENLET_PY313 1 +#else +# define GREENLET_PY313 0 +#endif + +#if PY_VERSION_HEX >= 0x30E0000 +# define GREENLET_PY314 1 +#else +# define GREENLET_PY314 0 +#endif + +#ifndef Py_SET_REFCNT +/* Py_REFCNT and Py_SIZE macros are converted to functions +https://bugs.python.org/issue39573 */ +# define Py_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) +#endif + +#ifdef _Py_DEC_REFTOTAL +# define GREENLET_Py_DEC_REFTOTAL _Py_DEC_REFTOTAL +#else +/* _Py_DEC_REFTOTAL macro has been removed from Python 3.9 by: + https://github.com/python/cpython/commit/49932fec62c616ec88da52642339d83ae719e924 + + The symbol we use to replace it was removed by at least 3.12. +*/ +# ifdef Py_REF_DEBUG +# if GREENLET_PY312 +# define GREENLET_Py_DEC_REFTOTAL +# else +# define GREENLET_Py_DEC_REFTOTAL _Py_RefTotal-- +# endif +# else +# define GREENLET_Py_DEC_REFTOTAL +# endif +#endif +// Define these flags like Cython does if we're on an old version. +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif + +#ifndef Py_TPFLAGS_HAVE_VERSION_TAG + #define Py_TPFLAGS_HAVE_VERSION_TAG 0 +#endif + +#define G_TPFLAGS_DEFAULT Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_VERSION_TAG | Py_TPFLAGS_CHECKTYPES | Py_TPFLAGS_HAVE_NEWBUFFER | Py_TPFLAGS_HAVE_GC + + +#if PY_VERSION_HEX < 0x03090000 +// The official version only became available in 3.9 +# define PyObject_GC_IsTracked(o) _PyObject_GC_IS_TRACKED(o) +#endif + + +// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_EnterTracing(PyThreadState *tstate) +{ + tstate->tracing++; +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = 0; +#else + tstate->use_tracing = 0; +#endif +} +#endif + +// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) +{ + tstate->tracing--; + int use_tracing = (tstate->c_tracefunc != NULL + || tstate->c_profilefunc != NULL); +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = use_tracing; +#else + tstate->use_tracing = use_tracing; +#endif +} +#endif + +#if !defined(Py_C_RECURSION_LIMIT) && defined(C_RECURSION_LIMIT) +# define Py_C_RECURSION_LIMIT C_RECURSION_LIMIT +#endif + +#endif /* GREENLET_CPYTHON_COMPAT_H */ diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_exceptions.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_exceptions.hpp new file mode 100644 index 0000000..617f07c --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_exceptions.hpp @@ -0,0 +1,171 @@ +#ifndef GREENLET_EXCEPTIONS_HPP +#define GREENLET_EXCEPTIONS_HPP + +#define PY_SSIZE_T_CLEAN +#include +#include +#include + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +namespace greenlet { + + class PyErrOccurred : public std::runtime_error + { + public: + + // CAUTION: In debug builds, may run arbitrary Python code. + static const PyErrOccurred + from_current() + { + assert(PyErr_Occurred()); +#ifndef NDEBUG + // This is not exception safe, and + // not necessarily safe in general (what if it switches?) + // But we only do this in debug mode, where we are in + // tight control of what exceptions are getting raised and + // can prevent those issues. + + // You can't call PyObject_Str with a pending exception. + PyObject* typ; + PyObject* val; + PyObject* tb; + + PyErr_Fetch(&typ, &val, &tb); + PyObject* typs = PyObject_Str(typ); + PyObject* vals = PyObject_Str(val ? val : typ); + const char* typ_msg = PyUnicode_AsUTF8(typs); + const char* val_msg = PyUnicode_AsUTF8(vals); + PyErr_Restore(typ, val, tb); + + std::string msg(typ_msg); + msg += ": "; + msg += val_msg; + PyErrOccurred ex(msg); + Py_XDECREF(typs); + Py_XDECREF(vals); + + return ex; +#else + return PyErrOccurred(); +#endif + } + + PyErrOccurred() : std::runtime_error("") + { + assert(PyErr_Occurred()); + } + + PyErrOccurred(const std::string& msg) : std::runtime_error(msg) + { + assert(PyErr_Occurred()); + } + + PyErrOccurred(PyObject* exc_kind, const char* const msg) + : std::runtime_error(msg) + { + PyErr_SetString(exc_kind, msg); + } + + PyErrOccurred(PyObject* exc_kind, const std::string msg) + : std::runtime_error(msg) + { + // This copies the c_str, so we don't have any lifetime + // issues to worry about. + PyErr_SetString(exc_kind, msg.c_str()); + } + + PyErrOccurred(PyObject* exc_kind, + const std::string msg, //This is the format + //string; that's not + //usually safe! + + PyObject* borrowed_obj_one, PyObject* borrowed_obj_two) + : std::runtime_error(msg) + { + + //This is designed specifically for the + //``check_switch_allowed`` function. + + // PyObject_Str and PyObject_Repr are safe to call with + // NULL pointers; they return the string "" in that + // case. + // This function always returns null. + PyErr_Format(exc_kind, + msg.c_str(), + borrowed_obj_one, borrowed_obj_two); + } + }; + + class TypeError : public PyErrOccurred + { + public: + TypeError(const char* const what) + : PyErrOccurred(PyExc_TypeError, what) + { + } + TypeError(const std::string what) + : PyErrOccurred(PyExc_TypeError, what) + { + } + }; + + class ValueError : public PyErrOccurred + { + public: + ValueError(const char* const what) + : PyErrOccurred(PyExc_ValueError, what) + { + } + }; + + class AttributeError : public PyErrOccurred + { + public: + AttributeError(const char* const what) + : PyErrOccurred(PyExc_AttributeError, what) + { + } + }; + + /** + * Calls `Py_FatalError` when constructed, so you can't actually + * throw this. It just makes static analysis easier. + */ + class PyFatalError : public std::runtime_error + { + public: + PyFatalError(const char* const msg) + : std::runtime_error(msg) + { + Py_FatalError(msg); + } + }; + + static inline PyObject* + Require(PyObject* p, const std::string& msg="") + { + if (!p) { + throw PyErrOccurred(msg); + } + return p; + }; + + static inline void + Require(const int retval) + { + if (retval < 0) { + throw PyErrOccurred(); + } + }; + + +}; +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_internal.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_internal.hpp new file mode 100644 index 0000000..f2b15d5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_internal.hpp @@ -0,0 +1,107 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_INTERNAL_H +#define GREENLET_INTERNAL_H +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +/** + * Implementation helpers. + * + * C++ templates and inline functions should go here. + */ +#define PY_SSIZE_T_CLEAN +#include "greenlet_compiler_compat.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_exceptions.hpp" +#include "TGreenlet.hpp" +#include "greenlet_allocator.hpp" + +#include +#include + +#define GREENLET_MODULE +struct _greenlet; +typedef struct _greenlet PyGreenlet; +namespace greenlet { + + class ThreadState; + // We can't use the PythonAllocator for this, because we push to it + // from the thread state destructor, which doesn't have the GIL, + // and Python's allocators can only be called with the GIL. + typedef std::vector cleanup_queue_t; + +}; + + +#define implementation_ptr_t greenlet::Greenlet* + + +#include "greenlet.h" + +void +greenlet::refs::MainGreenletExactChecker(void *p) +{ + if (!p) { + return; + } + // We control the class of the main greenlet exactly. + if (Py_TYPE(p) != &PyGreenlet_Type) { + std::string err("MainGreenlet: Expected exactly a greenlet, not a "); + err += Py_TYPE(p)->tp_name; + throw greenlet::TypeError(err); + } + + // Greenlets from dead threads no longer respond to main() with a + // true value; so in that case we need to perform an additional + // check. + Greenlet* g = static_cast(p)->pimpl; + if (g->main()) { + return; + } + if (!dynamic_cast(g)) { + std::string err("MainGreenlet: Expected exactly a main greenlet, not a "); + err += Py_TYPE(p)->tp_name; + throw greenlet::TypeError(err); + } +} + + + +template +inline greenlet::Greenlet* greenlet::refs::_OwnedGreenlet::operator->() const noexcept +{ + return reinterpret_cast(this->p)->pimpl; +} + +template +inline greenlet::Greenlet* greenlet::refs::_BorrowedGreenlet::operator->() const noexcept +{ + return reinterpret_cast(this->p)->pimpl; +} + +#include +#include + + +extern PyTypeObject PyGreenlet_Type; + + + +/** + * Forward declarations needed in multiple files. + */ +static PyObject* green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs); + + +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + + +#endif + +// Local Variables: +// flycheck-clang-include-path: ("../../include" "/opt/local/Library/Frameworks/Python.framework/Versions/3.10/include/python3.10") +// End: diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_msvc_compat.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_msvc_compat.hpp new file mode 100644 index 0000000..c00245b --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_msvc_compat.hpp @@ -0,0 +1,91 @@ +#ifndef GREENLET_MSVC_COMPAT_HPP +#define GREENLET_MSVC_COMPAT_HPP +/* + * Support for MSVC on Windows. + * + * Beginning with Python 3.14, some of the internal + * include files we need are not compatible with MSVC + * in C++ mode: + * + * internal\pycore_stackref.h(253): error C4576: a parenthesized type + * followed by an initializer list is a non-standard explicit type conversion syntax + * + * This file is included from ``internal/pycore_interpframe.h``, which + * we need for the ``_PyFrame_IsIncomplete`` API. + * + * Unfortunately, that API is a ``static inline`` function, as are a + * bunch of the functions it calls. The only solution seems to be to + * copy those definitions and the supporting inline functions here. + * + * Now, this makes us VERY fragile to changes in those functions. Because + * they're internal and static, the CPython devs might feel free to change + * them in even minor versions, meaning that we could runtime link and load, + * but still crash. We have that problem on all platforms though. It's just worse + * here because we have to keep copying the updated definitions. + */ +#include +#include "greenlet_cpython_compat.hpp" + +// This file is only included on 3.14+ + +extern "C" { + +// pycore_code.h ---------------- +#define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive) +// End pycore_code.h ---------- + +// pycore_interpframe.h ---------- +#if !defined(Py_GIL_DISABLED) && defined(Py_STACKREF_DEBUG) + +#define Py_TAG_BITS 0 +#else +#define Py_TAG_BITS ((uintptr_t)1) +#define Py_TAG_DEFERRED (1) +#endif + + +static const _PyStackRef PyStackRef_NULL = { .bits = Py_TAG_DEFERRED}; +#define PyStackRef_IsNull(stackref) ((stackref).bits == PyStackRef_NULL.bits) + +static inline PyObject * +PyStackRef_AsPyObjectBorrow(_PyStackRef stackref) +{ + PyObject *cleared = ((PyObject *)((stackref).bits & (~Py_TAG_BITS))); + return cleared; +} + +static inline PyCodeObject *_PyFrame_GetCode(_PyInterpreterFrame *f) { + assert(!PyStackRef_IsNull(f->f_executable)); + PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable); + assert(PyCode_Check(executable)); + return (PyCodeObject *)executable; +} + + +static inline _Py_CODEUNIT * +_PyFrame_GetBytecode(_PyInterpreterFrame *f) +{ +#ifdef Py_GIL_DISABLED + PyCodeObject *co = _PyFrame_GetCode(f); + _PyCodeArray *tlbc = _PyCode_GetTLBCArray(co); + assert(f->tlbc_index >= 0 && f->tlbc_index < tlbc->size); + return (_Py_CODEUNIT *)tlbc->entries[f->tlbc_index]; +#else + return _PyCode_CODE(_PyFrame_GetCode(f)); +#endif +} + +static inline bool //_Py_NO_SANITIZE_THREAD +_PyFrame_IsIncomplete(_PyInterpreterFrame *frame) +{ + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { + return true; + } + return frame->owner != FRAME_OWNED_BY_GENERATOR && + frame->instr_ptr < _PyFrame_GetBytecode(frame) + + _PyFrame_GetCode(frame)->_co_firsttraceable; +} +// pycore_interpframe.h ---------- + +} +#endif // GREENLET_MSVC_COMPAT_HPP diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_refs.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_refs.hpp new file mode 100644 index 0000000..b7e5e3f --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_refs.hpp @@ -0,0 +1,1118 @@ +#ifndef GREENLET_REFS_HPP +#define GREENLET_REFS_HPP + +#define PY_SSIZE_T_CLEAN +#include + +#include + +//#include "greenlet_internal.hpp" +#include "greenlet_compiler_compat.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_exceptions.hpp" + +struct _greenlet; +struct _PyMainGreenlet; + +typedef struct _greenlet PyGreenlet; +extern PyTypeObject PyGreenlet_Type; + + +#ifdef GREENLET_USE_STDIO +#include +using std::cerr; +using std::endl; +#endif + +namespace greenlet +{ + class Greenlet; + + namespace refs + { + // Type checkers throw a TypeError if the argument is not + // null, and isn't of the required Python type. + // (We can't use most of the defined type checkers + // like PyList_Check, etc, directly, because they are + // implemented as macros.) + typedef void (*TypeChecker)(void*); + + void + NoOpChecker(void*) + { + return; + } + + void + GreenletChecker(void *p) + { + if (!p) { + return; + } + + PyTypeObject* typ = Py_TYPE(p); + // fast, common path. (PyObject_TypeCheck is a macro or + // static inline function, and it also does a + // direct comparison of the type pointers, but its fast + // path only handles one type) + if (typ == &PyGreenlet_Type) { + return; + } + + if (!PyObject_TypeCheck(p, &PyGreenlet_Type)) { + std::string err("GreenletChecker: Expected any type of greenlet, not "); + err += Py_TYPE(p)->tp_name; + throw TypeError(err); + } + } + + void + MainGreenletExactChecker(void *p); + + template + class PyObjectPointer; + + template + class OwnedReference; + + + template + class BorrowedReference; + + typedef BorrowedReference BorrowedObject; + typedef OwnedReference OwnedObject; + + class ImmortalObject; + class ImmortalString; + + template + class _OwnedGreenlet; + + typedef _OwnedGreenlet OwnedGreenlet; + typedef _OwnedGreenlet OwnedMainGreenlet; + + template + class _BorrowedGreenlet; + + typedef _BorrowedGreenlet BorrowedGreenlet; + + void + ContextExactChecker(void *p) + { + if (!p) { + return; + } + if (!PyContext_CheckExact(p)) { + throw TypeError( + "greenlet context must be a contextvars.Context or None" + ); + } + } + + typedef OwnedReference OwnedContext; + } +} + +namespace greenlet { + + + namespace refs { + // A set of classes to make reference counting rules in python + // code explicit. + // + // Rules of use: + // (1) Functions returning a new reference that the caller of the + // function is expected to dispose of should return a + // ``OwnedObject`` object. This object automatically releases its + // reference when it goes out of scope. It works like a ``std::shared_ptr`` + // and can be copied or used as a function parameter (but don't do + // that). Note that constructing a ``OwnedObject`` from a + // PyObject* steals the reference. + // (2) Parameters to functions should be either a + // ``OwnedObject&``, or, more generally, a ``PyObjectPointer&``. + // If the function needs to create its own new reference, it can + // do so by copying to a local ``OwnedObject``. + // (3) Functions returning an existing pointer that is NOT + // incref'd, and which the caller MUST NOT decref, + // should return a ``BorrowedObject``. + + // XXX: The following two paragraphs do not hold for all platforms. + // Notably, 32-bit PPC Linux passes structs by reference, not by + // value, so this actually doesn't work. (Although that's the only + // platform that doesn't work on.) DO NOT ATTEMPT IT. The + // unfortunate consequence of that is that the slots which we + // *know* are already type safe will wind up calling the type + // checker function (when we had the slots accepting + // BorrowedGreenlet, this was bypassed), so this slows us down. + // TODO: Optimize this again. + + // For a class with a single pointer member, whose constructor + // does nothing but copy a pointer parameter into the member, and + // which can then be converted back to the pointer type, compilers + // generate code that's the same as just passing the pointer. + // That is, func(BorrowedObject x) called like ``PyObject* p = + // ...; f(p)`` has 0 overhead. Similarly, they "unpack" to the + // pointer type with 0 overhead. + // + // If there are no virtual functions, no complex inheritance (maybe?) and + // no destructor, these can be directly used as parameters in + // Python callbacks like tp_init: the layout is the same as a + // single pointer. Only subclasses with trivial constructors that + // do nothing but set the single pointer member are safe to use + // that way. + + + // This is the base class for things that can be done with a + // PyObject pointer. It assumes nothing about memory management. + // NOTE: Nothing is virtual, so subclasses shouldn't add new + // storage fields or try to override these methods. + template + class PyObjectPointer + { + public: + typedef T PyType; + protected: + T* p; + public: + PyObjectPointer(T* it=nullptr) : p(it) + { + TC(p); + } + + // We don't allow automatic casting to PyObject* at this + // level, because then we could be passed to Py_DECREF/INCREF, + // but we want nothing to do with memory management. If you + // know better, then you can use the get() method, like on a + // std::shared_ptr. Except we name it borrow() to clarify that + // if this is a reference-tracked object, the pointer you get + // back will go away when the object does. + // TODO: This should probably not exist here, but be moved + // down to relevant sub-types. + + T* borrow() const noexcept + { + return this->p; + } + + PyObject* borrow_o() const noexcept + { + return reinterpret_cast(this->p); + } + + T* operator->() const noexcept + { + return this->p; + } + + bool is_None() const noexcept + { + return this->p == Py_None; + } + + PyObject* acquire_or_None() const noexcept + { + PyObject* result = this->p ? reinterpret_cast(this->p) : Py_None; + Py_INCREF(result); + return result; + } + + explicit operator bool() const noexcept + { + return this->p != nullptr; + } + + bool operator!() const noexcept + { + return this->p == nullptr; + } + + Py_ssize_t REFCNT() const noexcept + { + return p ? Py_REFCNT(p) : -42; + } + + PyTypeObject* TYPE() const noexcept + { + return p ? Py_TYPE(p) : nullptr; + } + + inline OwnedObject PyStr() const noexcept; + inline const std::string as_str() const noexcept; + inline OwnedObject PyGetAttr(const ImmortalObject& name) const noexcept; + inline OwnedObject PyRequireAttr(const char* const name) const; + inline OwnedObject PyRequireAttr(const ImmortalString& name) const; + inline OwnedObject PyCall(const BorrowedObject& arg) const; + inline OwnedObject PyCall(PyGreenlet* arg) const ; + inline OwnedObject PyCall(PyObject* arg) const ; + // PyObject_Call(this, args, kwargs); + inline OwnedObject PyCall(const BorrowedObject args, + const BorrowedObject kwargs) const; + inline OwnedObject PyCall(const OwnedObject& args, + const OwnedObject& kwargs) const; + + protected: + void _set_raw_pointer(void* t) + { + TC(t); + p = reinterpret_cast(t); + } + void* _get_raw_pointer() const + { + return p; + } + }; + +#ifdef GREENLET_USE_STDIO + template + std::ostream& operator<<(std::ostream& os, const PyObjectPointer& s) + { + const std::type_info& t = typeid(s); + os << t.name() + << "(addr=" << s.borrow() + << ", refcnt=" << s.REFCNT() + << ", value=" << s.as_str() + << ")"; + + return os; + } +#endif + + template + inline bool operator==(const PyObjectPointer& lhs, const PyObject* const rhs) noexcept + { + return static_cast(lhs.borrow_o()) == static_cast(rhs); + } + + template + inline bool operator==(const PyObjectPointer& lhs, const PyObjectPointer& rhs) noexcept + { + return lhs.borrow_o() == rhs.borrow_o(); + } + + template + inline bool operator!=(const PyObjectPointer& lhs, + const PyObjectPointer& rhs) noexcept + { + return lhs.borrow_o() != rhs.borrow_o(); + } + + template + class OwnedReference : public PyObjectPointer + { + private: + friend class OwnedList; + + protected: + explicit OwnedReference(T* it) : PyObjectPointer(it) + { + } + + public: + + // Constructors + + static OwnedReference consuming(PyObject* p) + { + return OwnedReference(reinterpret_cast(p)); + } + + static OwnedReference owning(T* p) + { + OwnedReference result(p); + Py_XINCREF(result.p); + return result; + } + + OwnedReference() : PyObjectPointer(nullptr) + {} + + explicit OwnedReference(const PyObjectPointer<>& other) + : PyObjectPointer(nullptr) + { + T* op = other.borrow(); + TC(op); + this->p = other.borrow(); + Py_XINCREF(this->p); + } + + // It would be good to make use of the C++11 distinction + // between move and copy operations, e.g., constructing from a + // pointer should be a move operation. + // In the common case of ``OwnedObject x = Py_SomeFunction()``, + // the call to the copy constructor will be elided completely. + OwnedReference(const OwnedReference& other) + : PyObjectPointer(other.p) + { + Py_XINCREF(this->p); + } + + static OwnedReference None() + { + Py_INCREF(Py_None); + return OwnedReference(Py_None); + } + + // We can assign from exactly our type without any extra checking + OwnedReference& operator=(const OwnedReference& other) + { + Py_XINCREF(other.p); + const T* tmp = this->p; + this->p = other.p; + Py_XDECREF(tmp); + return *this; + } + + OwnedReference& operator=(const BorrowedReference other) + { + return this->operator=(other.borrow()); + } + + OwnedReference& operator=(T* const other) + { + TC(other); + Py_XINCREF(other); + T* tmp = this->p; + this->p = other; + Py_XDECREF(tmp); + return *this; + } + + // We can assign from an arbitrary reference type + // if it passes our check. + template + OwnedReference& operator=(const OwnedReference& other) + { + X* op = other.borrow(); + TC(op); + return this->operator=(reinterpret_cast(op)); + } + + inline void steal(T* other) + { + assert(this->p == nullptr); + TC(other); + this->p = other; + } + + T* relinquish_ownership() + { + T* result = this->p; + this->p = nullptr; + return result; + } + + T* acquire() const + { + // Return a new reference. + // TODO: This may go away when we have reference objects + // throughout the code. + Py_XINCREF(this->p); + return this->p; + } + + // Nothing else declares a destructor, we're the leaf, so we + // should be able to get away without virtual. + ~OwnedReference() + { + Py_CLEAR(this->p); + } + + void CLEAR() + { + Py_CLEAR(this->p); + assert(this->p == nullptr); + } + }; + + static inline + void operator<<=(PyObject*& target, OwnedObject& o) + { + target = o.relinquish_ownership(); + } + + + class NewReference : public OwnedObject + { + private: + G_NO_COPIES_OF_CLS(NewReference); + public: + // Consumes the reference. Only use this + // for API return values. + NewReference(PyObject* it) : OwnedObject(it) + { + } + }; + + class NewDictReference : public NewReference + { + private: + G_NO_COPIES_OF_CLS(NewDictReference); + public: + NewDictReference() : NewReference(PyDict_New()) + { + if (!this->p) { + throw PyErrOccurred(); + } + } + + void SetItem(const char* const key, PyObject* value) + { + Require(PyDict_SetItemString(this->p, key, value)); + } + + void SetItem(const PyObjectPointer<>& key, PyObject* value) + { + Require(PyDict_SetItem(this->p, key.borrow_o(), value)); + } + }; + + template + class _OwnedGreenlet: public OwnedReference + { + private: + protected: + _OwnedGreenlet(T* it) : OwnedReference(it) + {} + + public: + _OwnedGreenlet() : OwnedReference() + {} + + _OwnedGreenlet(const _OwnedGreenlet& other) : OwnedReference(other) + { + } + _OwnedGreenlet(OwnedMainGreenlet& other) : + OwnedReference(reinterpret_cast(other.acquire())) + { + } + _OwnedGreenlet(const BorrowedGreenlet& other); + // Steals a reference. + static _OwnedGreenlet consuming(PyGreenlet* it) + { + return _OwnedGreenlet(reinterpret_cast(it)); + } + + inline _OwnedGreenlet& operator=(const OwnedGreenlet& other) + { + return this->operator=(other.borrow()); + } + + inline _OwnedGreenlet& operator=(const BorrowedGreenlet& other); + + _OwnedGreenlet& operator=(const OwnedMainGreenlet& other) + { + PyGreenlet* owned = other.acquire(); + Py_XDECREF(this->p); + this->p = reinterpret_cast(owned); + return *this; + } + + _OwnedGreenlet& operator=(T* const other) + { + OwnedReference::operator=(other); + return *this; + } + + T* relinquish_ownership() + { + T* result = this->p; + this->p = nullptr; + return result; + } + + PyObject* relinquish_ownership_o() + { + return reinterpret_cast(relinquish_ownership()); + } + + inline Greenlet* operator->() const noexcept; + inline operator Greenlet*() const noexcept; + }; + + template + class BorrowedReference : public PyObjectPointer + { + public: + // Allow implicit creation from PyObject* pointers as we + // transition to using these classes. Also allow automatic + // conversion to PyObject* for passing to C API calls and even + // for Py_INCREF/DECREF, because we ourselves do no memory management. + BorrowedReference(T* it) : PyObjectPointer(it) + {} + + BorrowedReference(const PyObjectPointer& ref) : PyObjectPointer(ref.borrow()) + {} + + BorrowedReference() : PyObjectPointer(nullptr) + {} + + operator T*() const + { + return this->p; + } + }; + + typedef BorrowedReference BorrowedObject; + //typedef BorrowedReference BorrowedGreenlet; + + template + class _BorrowedGreenlet : public BorrowedReference + { + public: + _BorrowedGreenlet() : + BorrowedReference(nullptr) + {} + + _BorrowedGreenlet(T* it) : + BorrowedReference(it) + {} + + _BorrowedGreenlet(const BorrowedObject& it); + + _BorrowedGreenlet(const OwnedGreenlet& it) : + BorrowedReference(it.borrow()) + {} + + _BorrowedGreenlet& operator=(const BorrowedObject& other); + + // We get one of these for PyGreenlet, but one for PyObject + // is handy as well + operator PyObject*() const + { + return reinterpret_cast(this->p); + } + Greenlet* operator->() const noexcept; + operator Greenlet*() const noexcept; + }; + + typedef _BorrowedGreenlet BorrowedGreenlet; + + template + _OwnedGreenlet::_OwnedGreenlet(const BorrowedGreenlet& other) + : OwnedReference(reinterpret_cast(other.borrow())) + { + Py_XINCREF(this->p); + } + + + class BorrowedMainGreenlet + : public _BorrowedGreenlet + { + public: + BorrowedMainGreenlet(const OwnedMainGreenlet& it) : + _BorrowedGreenlet(it.borrow()) + {} + BorrowedMainGreenlet(PyGreenlet* it=nullptr) + : _BorrowedGreenlet(it) + {} + }; + + template + _OwnedGreenlet& _OwnedGreenlet::operator=(const BorrowedGreenlet& other) + { + return this->operator=(other.borrow()); + } + + + class ImmortalObject : public PyObjectPointer<> + { + private: + G_NO_ASSIGNMENT_OF_CLS(ImmortalObject); + public: + explicit ImmortalObject(PyObject* it) : PyObjectPointer<>(it) + { + } + + ImmortalObject(const ImmortalObject& other) + : PyObjectPointer<>(other.p) + { + + } + + /** + * Become the new owner of the object. Does not change the + * reference count. + */ + ImmortalObject& operator=(PyObject* it) + { + assert(this->p == nullptr); + this->p = it; + return *this; + } + + static ImmortalObject consuming(PyObject* it) + { + return ImmortalObject(it); + } + + inline operator PyObject*() const + { + return this->p; + } + }; + + class ImmortalString : public ImmortalObject + { + private: + G_NO_COPIES_OF_CLS(ImmortalString); + const char* str; + public: + ImmortalString(const char* const str) : + ImmortalObject(str ? Require(PyUnicode_InternFromString(str)) : nullptr) + { + this->str = str; + } + + inline ImmortalString& operator=(const char* const str) + { + if (!this->p) { + this->p = Require(PyUnicode_InternFromString(str)); + this->str = str; + } + else { + assert(this->str == str); + } + return *this; + } + + inline operator std::string() const + { + return this->str; + } + + }; + + class ImmortalEventName : public ImmortalString + { + private: + G_NO_COPIES_OF_CLS(ImmortalEventName); + public: + ImmortalEventName(const char* const str) : ImmortalString(str) + {} + }; + + class ImmortalException : public ImmortalObject + { + private: + G_NO_COPIES_OF_CLS(ImmortalException); + public: + ImmortalException(const char* const name, PyObject* base=nullptr) : + ImmortalObject(name + // Python 2.7 isn't const correct + ? Require(PyErr_NewException((char*)name, base, nullptr)) + : nullptr) + {} + + inline bool PyExceptionMatches() const + { + return PyErr_ExceptionMatches(this->p) > 0; + } + + }; + + template + inline OwnedObject PyObjectPointer::PyStr() const noexcept + { + if (!this->p) { + return OwnedObject(); + } + return OwnedObject::consuming(PyObject_Str(reinterpret_cast(this->p))); + } + + template + inline const std::string PyObjectPointer::as_str() const noexcept + { + // NOTE: This is not Python exception safe. + if (this->p) { + // The Python APIs return a cached char* value that's only valid + // as long as the original object stays around, and we're + // about to (probably) toss it. Hence the copy to std::string. + OwnedObject py_str = this->PyStr(); + if (!py_str) { + return "(nil)"; + } + return PyUnicode_AsUTF8(py_str.borrow()); + } + return "(nil)"; + } + + template + inline OwnedObject PyObjectPointer::PyGetAttr(const ImmortalObject& name) const noexcept + { + assert(this->p); + return OwnedObject::consuming(PyObject_GetAttr(reinterpret_cast(this->p), name)); + } + + template + inline OwnedObject PyObjectPointer::PyRequireAttr(const char* const name) const + { + assert(this->p); + return OwnedObject::consuming(Require(PyObject_GetAttrString(this->p, name), name)); + } + + template + inline OwnedObject PyObjectPointer::PyRequireAttr(const ImmortalString& name) const + { + assert(this->p); + return OwnedObject::consuming(Require( + PyObject_GetAttr( + reinterpret_cast(this->p), + name + ), + name + )); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject& arg) const + { + return this->PyCall(arg.borrow()); + } + + template + inline OwnedObject PyObjectPointer::PyCall(PyGreenlet* arg) const + { + return this->PyCall(reinterpret_cast(arg)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(PyObject* arg) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_CallFunctionObjArgs(this->p, arg, NULL)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject args, + const BorrowedObject kwargs) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_Call(this->p, args, kwargs)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const OwnedObject& args, + const OwnedObject& kwargs) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_Call(this->p, args.borrow(), kwargs.borrow())); + } + + inline void + ListChecker(void * p) + { + if (!p) { + return; + } + if (!PyList_Check(p)) { + throw TypeError("Expected a list"); + } + } + + class OwnedList : public OwnedReference + { + private: + G_NO_ASSIGNMENT_OF_CLS(OwnedList); + public: + // TODO: Would like to use move. + explicit OwnedList(const OwnedObject& other) + : OwnedReference(other) + { + } + + OwnedList& operator=(const OwnedObject& other) + { + if (other && PyList_Check(other.p)) { + // Valid list. Own a new reference to it, discard the + // reference to what we did own. + PyObject* new_ptr = other.p; + Py_INCREF(new_ptr); + Py_XDECREF(this->p); + this->p = new_ptr; + } + else { + // Either the other object was NULL (an error) or it + // wasn't a list. Either way, we're now invalidated. + Py_XDECREF(this->p); + this->p = nullptr; + } + return *this; + } + + inline bool empty() const + { + return PyList_GET_SIZE(p) == 0; + } + + inline Py_ssize_t size() const + { + return PyList_GET_SIZE(p); + } + + inline BorrowedObject at(const Py_ssize_t index) const + { + return PyList_GET_ITEM(p, index); + } + + inline void clear() + { + PyList_SetSlice(p, 0, PyList_GET_SIZE(p), NULL); + } + }; + + // Use this to represent the module object used at module init + // time. + // This could either be a borrowed (Py2) or new (Py3) reference; + // either way, we don't want to do any memory management + // on it here, Python itself will handle that. + // XXX: Actually, that's not quite right. On Python 3, if an + // exception occurs before we return to the interpreter, this will + // leak; but all previous versions also had that problem. + class CreatedModule : public PyObjectPointer<> + { + private: + G_NO_COPIES_OF_CLS(CreatedModule); + public: + CreatedModule(PyModuleDef& mod_def) : PyObjectPointer<>( + Require(PyModule_Create(&mod_def))) + { + } + + // PyAddObject(): Add a reference to the object to the module. + // On return, the reference count of the object is unchanged. + // + // The docs warn that PyModule_AddObject only steals the + // reference on success, so if it fails after we've incref'd + // or allocated, we're responsible for the decref. + void PyAddObject(const char* name, const long new_bool) + { + OwnedObject p = OwnedObject::consuming(Require(PyBool_FromLong(new_bool))); + this->PyAddObject(name, p); + } + + void PyAddObject(const char* name, const OwnedObject& new_object) + { + // The caller already owns a reference they will decref + // when their variable goes out of scope, we still need to + // incref/decref. + this->PyAddObject(name, new_object.borrow()); + } + + void PyAddObject(const char* name, const ImmortalObject& new_object) + { + this->PyAddObject(name, new_object.borrow()); + } + + void PyAddObject(const char* name, PyTypeObject& type) + { + this->PyAddObject(name, reinterpret_cast(&type)); + } + + void PyAddObject(const char* name, PyObject* new_object) + { + Py_INCREF(new_object); + try { + Require(PyModule_AddObject(this->p, name, new_object)); + } + catch (const PyErrOccurred&) { + Py_DECREF(p); + throw; + } + } + }; + + class PyErrFetchParam : public PyObjectPointer<> + { + // Not an owned object, because we can't be initialized with + // one, and we only sometimes acquire ownership. + private: + G_NO_COPIES_OF_CLS(PyErrFetchParam); + public: + // To allow declaring these and passing them to + // PyErr_Fetch we implement the empty constructor, + // and the address operator. + PyErrFetchParam() : PyObjectPointer<>(nullptr) + { + } + + PyObject** operator&() + { + return &this->p; + } + + // This allows us to pass one directly without the &, + // BUT it has higher precedence than the bool operator + // if it's not explicit. + operator PyObject**() + { + return &this->p; + } + + // We don't want to be able to pass these to Py_DECREF and + // such so we don't have the implicit PyObject* conversion. + + inline PyObject* relinquish_ownership() + { + PyObject* result = this->p; + this->p = nullptr; + return result; + } + + ~PyErrFetchParam() + { + Py_XDECREF(p); + } + }; + + class OwnedErrPiece : public OwnedObject + { + private: + + public: + // Unlike OwnedObject, this increments the refcount. + OwnedErrPiece(PyObject* p=nullptr) : OwnedObject(p) + { + this->acquire(); + } + + PyObject** operator&() + { + return &this->p; + } + + inline operator PyObject*() const + { + return this->p; + } + + operator PyTypeObject*() const + { + return reinterpret_cast(this->p); + } + }; + + class PyErrPieces + { + private: + OwnedErrPiece type; + OwnedErrPiece instance; + OwnedErrPiece traceback; + bool restored; + public: + // Takes new references; if we're destroyed before + // restoring the error, we drop the references. + PyErrPieces(PyObject* t, PyObject* v, PyObject* tb) : + type(t), + instance(v), + traceback(tb), + restored(0) + { + this->normalize(); + } + + PyErrPieces() : + restored(0) + { + // PyErr_Fetch transfers ownership to us, so + // we don't actually need to INCREF; but we *do* + // need to DECREF if we're not restored. + PyErrFetchParam t, v, tb; + PyErr_Fetch(&t, &v, &tb); + type.steal(t.relinquish_ownership()); + instance.steal(v.relinquish_ownership()); + traceback.steal(tb.relinquish_ownership()); + } + + void PyErrRestore() + { + // can only do this once + assert(!this->restored); + this->restored = true; + PyErr_Restore( + this->type.relinquish_ownership(), + this->instance.relinquish_ownership(), + this->traceback.relinquish_ownership()); + assert(!this->type && !this->instance && !this->traceback); + } + + private: + void normalize() + { + // First, check the traceback argument, replacing None, + // with NULL + if (traceback.is_None()) { + traceback = nullptr; + } + + if (traceback && !PyTraceBack_Check(traceback.borrow())) { + throw PyErrOccurred(PyExc_TypeError, + "throw() third argument must be a traceback object"); + } + + if (PyExceptionClass_Check(type)) { + // If we just had a type, we'll now have a type and + // instance. + // The type's refcount will have gone up by one + // because of the instance and the instance will have + // a refcount of one. Either way, we owned, and still + // do own, exactly one reference. + PyErr_NormalizeException(&type, &instance, &traceback); + + } + else if (PyExceptionInstance_Check(type)) { + /* Raising an instance --- usually that means an + object that is a subclass of BaseException, but on + Python 2, that can also mean an arbitrary old-style + object. The value should be a dummy. */ + if (instance && !instance.is_None()) { + throw PyErrOccurred( + PyExc_TypeError, + "instance exception may not have a separate value"); + } + /* Normalize to raise , */ + this->instance = this->type; + this->type = PyExceptionInstance_Class(instance.borrow()); + + /* + It would be tempting to do this: + + Py_ssize_t type_count = Py_REFCNT(Py_TYPE(instance.borrow())); + this->type = PyExceptionInstance_Class(instance.borrow()); + assert(this->type.REFCNT() == type_count + 1); + + But that doesn't work on Python 2 in the case of + old-style instances: The result of Py_TYPE is going to + be the global shared that all + old-style classes have, while the return of Instance_Class() + will be the Python-level class object. The two are unrelated. + */ + } + else { + /* Not something you can raise. throw() fails. */ + PyErr_Format(PyExc_TypeError, + "exceptions must be classes, or instances, not %s", + Py_TYPE(type.borrow())->tp_name); + throw PyErrOccurred(); + } + } + }; + + // PyArg_Parse's O argument returns a borrowed reference. + class PyArgParseParam : public BorrowedObject + { + private: + G_NO_COPIES_OF_CLS(PyArgParseParam); + public: + explicit PyArgParseParam(PyObject* p=nullptr) : BorrowedObject(p) + { + } + + inline PyObject** operator&() + { + return &this->p; + } + }; + +};}; + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_slp_switch.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_slp_switch.hpp new file mode 100644 index 0000000..bd4b7ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_slp_switch.hpp @@ -0,0 +1,99 @@ +#ifndef GREENLET_SLP_SWITCH_HPP +#define GREENLET_SLP_SWITCH_HPP + +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" + +/* + * the following macros are spliced into the OS/compiler + * specific code, in order to simplify maintenance. + */ +// We can save about 10% of the time it takes to switch greenlets if +// we thread the thread state through the slp_save_state() and the +// following slp_restore_state() calls from +// slp_switch()->g_switchstack() (which already needs to access it). +// +// However: +// +// that requires changing the prototypes and implementations of the +// switching functions. If we just change the prototype of +// slp_switch() to accept the argument and update the macros, without +// changing the implementation of slp_switch(), we get crashes on +// 64-bit Linux and 32-bit x86 (for reasons that aren't 100% clear); +// on the other hand, 64-bit macOS seems to be fine. Also, 64-bit +// windows is an issue because slp_switch is written fully in assembly +// and currently ignores its argument so some code would have to be +// adjusted there to pass the argument on to the +// ``slp_save_state_asm()`` function (but interestingly, because of +// the calling convention, the extra argument is just ignored and +// things function fine, albeit slower, if we just modify +// ``slp_save_state_asm`()` to fetch the pointer to pass to the +// macro.) +// +// Our compromise is to use a *glabal*, untracked, weak, pointer +// to the necessary thread state during the process of switching only. +// This is safe because we're protected by the GIL, and if we're +// running this code, the thread isn't exiting. This also nets us a +// 10-12% speed improvement. + +static greenlet::Greenlet* volatile switching_thread_state = nullptr; + + +extern "C" { +static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref); +static void GREENLET_NOINLINE(slp_restore_state_trampoline)(); +} + + +#define SLP_SAVE_STATE(stackref, stsizediff) \ +do { \ + assert(switching_thread_state); \ + stackref += STACK_MAGIC; \ + if (slp_save_state_trampoline((char*)stackref)) \ + return -1; \ + if (!switching_thread_state->active()) \ + return 1; \ + stsizediff = switching_thread_state->stack_start() - (char*)stackref; \ +} while (0) + +#define SLP_RESTORE_STATE() slp_restore_state_trampoline() + +#define SLP_EVAL +extern "C" { +#define slp_switch GREENLET_NOINLINE(slp_switch) +#include "slp_platformselect.h" +} +#undef slp_switch + +#ifndef STACK_MAGIC +# error \ + "greenlet needs to be ported to this platform, or taught how to detect your compiler properly." +#endif /* !STACK_MAGIC */ + + + +#ifdef EXTERNAL_ASM +/* CCP addition: Make these functions, to be called from assembler. + * The token include file for the given platform should enable the + * EXTERNAL_ASM define so that this is included. + */ +extern "C" { +intptr_t +slp_save_state_asm(intptr_t* ref) +{ + intptr_t diff; + SLP_SAVE_STATE(ref, diff); + return diff; +} + +void +slp_restore_state_asm(void) +{ + SLP_RESTORE_STATE(); +} + +extern int slp_switch(void); +}; +#endif + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/greenlet_thread_support.hpp b/venv/lib/python3.12/site-packages/greenlet/greenlet_thread_support.hpp new file mode 100644 index 0000000..3ded7d2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/greenlet_thread_support.hpp @@ -0,0 +1,31 @@ +#ifndef GREENLET_THREAD_SUPPORT_HPP +#define GREENLET_THREAD_SUPPORT_HPP + +/** + * Defines various utility functions to help greenlet integrate well + * with threads. This used to be needed when we supported Python + * 2.7 on Windows, which used a very old compiler. We wrote an + * alternative implementation using Python APIs and POSIX or Windows + * APIs, but that's no longer needed. So this file is a shadow of its + * former self --- but may be needed in the future. + */ + +#include +#include +#include + +#include "greenlet_compiler_compat.hpp" + +namespace greenlet { + typedef std::mutex Mutex; + typedef std::lock_guard LockGuard; + class LockInitError : public std::runtime_error + { + public: + LockInitError(const char* what) : std::runtime_error(what) + {}; + }; +}; + + +#endif /* GREENLET_THREAD_SUPPORT_HPP */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/__init__.py b/venv/lib/python3.12/site-packages/greenlet/platform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d92a935 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/platform/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/setup_switch_x64_masm.cmd b/venv/lib/python3.12/site-packages/greenlet/platform/setup_switch_x64_masm.cmd new file mode 100644 index 0000000..038ced2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/setup_switch_x64_masm.cmd @@ -0,0 +1,2 @@ +call "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" amd64 +ml64 /nologo /c /Fo switch_x64_masm.obj switch_x64_masm.asm diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_aarch64_gcc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_aarch64_gcc.h new file mode 100644 index 0000000..058617c --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_aarch64_gcc.h @@ -0,0 +1,124 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall + * 13-Apr-13 Add support for strange GCC caller-save decisions + * 08-Apr-13 File creation. Michael Matz + * + * NOTES + * + * Simply save all callee saved registers + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \ + "x27", "x28", "x30" /* aka lr */, \ + "v8", "v9", "v10", "v11", \ + "v12", "v13", "v14", "v15" + +/* + * Recall: + asm asm-qualifiers ( AssemblerTemplate + : OutputOperands + [ : InputOperands + [ : Clobbers ] ]) + + or (if asm-qualifiers contains 'goto') + + asm asm-qualifiers ( AssemblerTemplate + : OutputOperands + : InputOperands + : Clobbers + : GotoLabels) + + and OutputOperands are + + [ [asmSymbolicName] ] constraint (cvariablename) + + When a name is given, refer to it as ``%[the name]``. + When not given, ``%i`` where ``i`` is the zero-based index. + + constraints starting with ``=`` means only writing; ``+`` means + reading and writing. + + This is followed by ``r`` (must be register) or ``m`` (must be memory) + and these can be combined. + + The ``cvariablename`` is actually an lvalue expression. + + In AArch65, 31 general purpose registers. If named X0... they are + 64-bit. If named W0... they are the bottom 32 bits of the + corresponding 64 bit register. + + XZR and WZR are hardcoded to 0, and ignore writes. + + Arguments are in X0..X7. C++ uses X0 for ``this``. X0 holds simple return + values (?) + + Whenever a W register is written, the top half of the X register is zeroed. + */ + +static int +slp_switch(void) +{ + int err; + void *fp; + /* Windowz uses a 32-bit long on a 64-bit platform, unlike the rest of + the world, and in theory we can be compiled with GCC/llvm on 64-bit + windows. So we need a fixed-width type. + */ + int64_t *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str x29, %0" : "=m"(fp) : : ); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp,sp,%0\n" + "add x29,x29,%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + /* SLP_SAVE_STATE macro contains some return statements + (of -1 and 1). It falls through only when + the return value of slp_save_state() is zero, which + is placed in x0. + In that case we (slp_switch) also want to return zero + (also in x0 of course). + Now, some GCC versions (seen with 4.8) think it's a + good idea to save/restore x0 around the call to + slp_restore_state(), instead of simply zeroing it + at the return below. But slp_restore_state + writes random values to the stack slot used for this + save/restore (from when it once was saved above in + SLP_SAVE_STATE, when it was still uninitialized), so + "restoring" that precious zero actually makes us + return random values. There are some ways to make + GCC not use that zero value in the normal return path + (e.g. making err volatile, but that costs a little + stack space), and the simplest is to call a function + that returns an unknown value (which happens to be zero), + so the saved/restored value is unused. + + Thus, this line stores a 0 into the ``err`` variable + (which must be held in a register for this instruction, + of course). The ``w`` qualifier causes the instruction + to use W0 instead of X0, otherwise we get a warning + about a value size mismatch (because err is an int, + and aarch64 platforms are LP64: 32-bit int, 64 bit long + and pointer). + */ + __asm__ volatile ("mov %w0, #0" : "=r" (err)); + } + __asm__ volatile ("ldr x29, %0" : : "m" (fp) :); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_alpha_unix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_alpha_unix.h new file mode 100644 index 0000000..7e07abf --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_alpha_unix.h @@ -0,0 +1,30 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$9", "$10", "$11", "$12", "$13", "$14", "$15", \ + "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $30, %0" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq $30, %0, $30\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $31, %0" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_amd64_unix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_amd64_unix.h new file mode 100644 index 0000000..d470110 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_amd64_unix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 18-Aug-11 Alexey Borzenkov + * Correctly save rbp, csr and cw + * 01-Apr-04 Hye-Shik Chang + * Ported from i386 to amd64. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + +static int +slp_switch(void) +{ + int err; + void* rbp; + void* rbx; + unsigned int csr; + unsigned short cw; + /* This used to be declared 'register', but that does nothing in + modern compilers and is explicitly forbidden in some new + standards. */ + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movq %%rbp, %0" : "=m" (rbp)); + __asm__ volatile ("movq %%rbx, %0" : "=m" (rbx)); + __asm__ ("movq %%rsp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq %0, %%rsp\n" + "addq %0, %%rbp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorq %%rax, %%rax" : "=a" (err)); + } + __asm__ volatile ("movq %0, %%rbx" : : "m" (rbx)); + __asm__ volatile ("movq %0, %%rbp" : : "m" (rbp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_gcc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_gcc.h new file mode 100644 index 0000000..655003a --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_gcc.h @@ -0,0 +1,79 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 14-Aug-06 File creation. Ported from Arm Thumb. Sylvain Baro + * 3-Sep-06 Commented out saving of r1-r3 (r4 already commented out) as I + * read that these do not need to be saved. Also added notes and + * errors related to the frame pointer. Richard Tew. + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#ifdef __thumb__ +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r9", "r10", "r11", "lr" +#else +#define REG_FP "fp" +#define REG_FPFP "fp,fp" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r8", "r9", "r10", "lr" +#endif +#if defined(__SOFTFP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#elif defined(__VFP_FP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" +#elif defined(__MAVERICK__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "mvf4", "mvf5", "mvf6", "mvf7", \ + "mvf8", "mvf9", "mvf10", "mvf11", \ + "mvf12", "mvf13", "mvf14", "mvf15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "f4", "f5", "f6", "f7" +#endif + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + int *stackref, stsizediff; + int result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov r0," REG_FP "\n\tstr r0,%0" : "=m" (fp) : : "r0"); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ldr r0,%1\n\tmov " REG_FP ",r0\n\tmov %0, #0" : "=r" (result) : "m" (fp) : "r0"); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return result; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_ios.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_ios.h new file mode 100644 index 0000000..9e640e1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm32_ios.h @@ -0,0 +1,67 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 31-May-15 iOS support. Ported from arm32. Proton + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r10", "r11", "lr" +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + int *stackref, stsizediff, result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str " REG_FP ",%0" : "=m" (fp)); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + : REGS_TO_SAVE /* Clobber registers, force compiler to + * recalculate address of void *fp from REG_SP or REG_FP */ + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ( + "ldr " REG_FP ", %1\n\t" + "mov %0, #0" + : "=r" (result) + : "m" (fp) + : REGS_TO_SAVE /* Force compiler to restore saved registers after this */ + ); + return result; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.asm b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.asm new file mode 100644 index 0000000..29f9c22 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.asm @@ -0,0 +1,53 @@ + AREA switch_arm64_masm, CODE, READONLY; + GLOBAL slp_switch [FUNC] + EXTERN slp_save_state_asm + EXTERN slp_restore_state_asm + +slp_switch + ; push callee saved registers to stack + stp x19, x20, [sp, #-16]! + stp x21, x22, [sp, #-16]! + stp x23, x24, [sp, #-16]! + stp x25, x26, [sp, #-16]! + stp x27, x28, [sp, #-16]! + stp x29, x30, [sp, #-16]! + stp d8, d9, [sp, #-16]! + stp d10, d11, [sp, #-16]! + stp d12, d13, [sp, #-16]! + stp d14, d15, [sp, #-16]! + + ; call slp_save_state_asm with stack pointer + mov x0, sp + bl slp_save_state_asm + + ; early return for return value of 1 and -1 + cmp x0, #-1 + b.eq RETURN + cmp x0, #1 + b.eq RETURN + + ; increment stack and frame pointer + add sp, sp, x0 + add x29, x29, x0 + + bl slp_restore_state_asm + + ; store return value for successful completion of routine + mov x0, #0 + +RETURN + ; pop registers from stack + ldp d14, d15, [sp], #16 + ldp d12, d13, [sp], #16 + ldp d10, d11, [sp], #16 + ldp d8, d9, [sp], #16 + ldp x29, x30, [sp], #16 + ldp x27, x28, [sp], #16 + ldp x25, x26, [sp], #16 + ldp x23, x24, [sp], #16 + ldp x21, x22, [sp], #16 + ldp x19, x20, [sp], #16 + + ret + + END diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.obj b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.obj new file mode 100644 index 0000000..f6f220e Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_masm.obj differ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_msvc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_msvc.h new file mode 100644 index 0000000..7ab7f45 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_arm64_msvc.h @@ -0,0 +1,17 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 21-Oct-21 Niyas Sait + * First version to enable win/arm64 support. + */ + +#define STACK_REFPLUS 1 +#define STACK_MAGIC 0 + +/* Use the generic support for an external assembly language slp_switch function. */ +#define EXTERNAL_ASM + +#ifdef SLP_EVAL +/* This always uses the external masm assembly file. */ +#endif \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_csky_gcc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_csky_gcc.h new file mode 100644 index 0000000..ac469d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_csky_gcc.h @@ -0,0 +1,48 @@ +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_FP "r8" +#ifdef __CSKYABIV2__ +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r9", "r10", "r11", "r15",\ + "r16", "r17", "r18", "r19", "r20", "r21", "r22",\ + "r23", "r24", "r25" + +#if defined (__CSKY_HARD_FLOAT__) || (__CSKY_VDSP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "vr8", "vr9", "vr10", "vr11", "vr12",\ + "vr13", "vr14", "vr15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#endif +#else +#define REGS_TO_SAVE "r9", "r10", "r11", "r12", "r13", "r15" +#endif + + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + int *stackref, stsizediff; + int result; + + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addu sp,%0\n" + "addu "REG_FP",%0\n" + : + : "r" (stsizediff) + ); + + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movi %0, 0" : "=r" (result)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + + return result; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_loongarch64_linux.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_loongarch64_linux.h new file mode 100644 index 0000000..9eaf34e --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_loongarch64_linux.h @@ -0,0 +1,31 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "s0", "s1", "s2", "s3", "s4", "s5", \ + "s6", "s7", "s8", "fp", \ + "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move %0, $sp" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add.d $sp, $sp, %0\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move %0, $zero" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_m68k_gcc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_m68k_gcc.h new file mode 100644 index 0000000..da761c2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_m68k_gcc.h @@ -0,0 +1,38 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 2014-01-06 Andreas Schwab + * File created. + */ + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "%d2", "%d3", "%d4", "%d5", "%d6", "%d7", \ + "%a2", "%a3", "%a4" + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + void *fp, *a5; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move.l %%fp, %0" : "=m"(fp)); + __asm__ volatile ("move.l %%a5, %0" : "=m"(a5)); + __asm__ ("move.l %%sp, %0" : "=r"(stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ("add.l %0, %%sp; add.l %0, %%fp" : : "r"(stsizediff)); + SLP_RESTORE_STATE(); + __asm__ volatile ("clr.l %0" : "=g" (err)); + } + __asm__ volatile ("move.l %0, %%a5" : : "m"(a5)); + __asm__ volatile ("move.l %0, %%fp" : : "m"(fp)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_mips_unix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_mips_unix.h new file mode 100644 index 0000000..b9003e9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_mips_unix.h @@ -0,0 +1,64 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 20-Sep-14 Matt Madison + * Re-code the saving of the gp register for MIPS64. + * 05-Jan-08 Thiemo Seufer + * Ported from ppc. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$16", "$17", "$18", "$19", "$20", "$21", "$22", \ + "$23", "$30" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; +#ifdef __mips64 + uint64_t gpsave; +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); +#ifdef __mips64 + __asm__ __volatile__ ("sd $28,%0" : "=m" (gpsave) : : ); +#endif + __asm__ ("move %0, $29" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ __volatile__ ( +#ifdef __mips64 + "daddu $29, %0\n" +#else + "addu $29, %0\n" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } +#ifdef __mips64 + __asm__ __volatile__ ("ld $28,%0" : : "m" (gpsave) : ); +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); + __asm__ __volatile__ ("move %0, $0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_aix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_aix.h new file mode 100644 index 0000000..e7e0b87 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_aix.h @@ -0,0 +1,103 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-Oct-20 Jesse Gorzinski + * Copied from Linux PPC64 implementation + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 6 + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_linux.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_linux.h new file mode 100644 index 0000000..3c324d0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc64_linux.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#if _CALL_ELF == 2 +#define STACK_MAGIC 4 +#else +#define STACK_MAGIC 6 +#endif + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_aix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_aix.h new file mode 100644 index 0000000..6d93c13 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_aix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Mar-11 Floris Bruynooghe + * Do not add stsizediff to general purpose + * register (GPR) 30 as this is a non-volatile and + * unused by the PowerOpen Environment, therefore + * this was modifying a user register instead of the + * frame pointer (which does not seem to exist). + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_linux.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_linux.h new file mode 100644 index 0000000..e83ad70 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_linux.h @@ -0,0 +1,84 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_macosx.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_macosx.h new file mode 100644 index 0000000..bd414c6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_macosx.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("; asm block 2\n\tmr %0, r1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "; asm block 3\n" + "\tmr r11, %0\n" + "\tadd r1, r1, r11\n" + "\tadd r30, r30, r11\n" + : /* no outputs */ + : "r" (stsizediff) + : "r11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_unix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_unix.h new file mode 100644 index 0000000..bb18808 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_ppc_unix.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_riscv_unix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_riscv_unix.h new file mode 100644 index 0000000..8761122 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_riscv_unix.h @@ -0,0 +1,41 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "s1", "s2", "s3", "s4", "s5", \ + "s6", "s7", "s8", "s9", "s10", "s11", "fs0", "fs1", \ + "fs2", "fs3", "fs4", "fs5", "fs6", "fs7", "fs8", "fs9", \ + "fs10", "fs11" + +static int +slp_switch(void) +{ + int ret; + long fp; + long *stackref, stsizediff; + + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mv %0, fp" : "=r" (fp) : ); + __asm__ volatile ("mv %0, sp" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp, sp, %0\n\t" + "add fp, fp, %0\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); +#if __riscv_xlen == 32 + __asm__ volatile ("lw fp, %0" : : "m" (fp)); +#else + __asm__ volatile ("ld fp, %0" : : "m" (fp)); +#endif + __asm__ volatile ("mv %0, zero" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_s390_unix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_s390_unix.h new file mode 100644 index 0000000..9199367 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_s390_unix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 25-Jan-12 Alexey Borzenkov + * Fixed Linux/S390 port to work correctly with + * different optimization options both on 31-bit + * and 64-bit. Thanks to Stefan Raabe for lots + * of testing. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 06-Oct-02 Gustavo Niemeyer + * Ported to Linux/S390. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#ifdef __s390x__ +#define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */ +#else +#define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */ +#endif + +/* Technically, r11-r13 also need saving, but function prolog starts + with stm(g) and since there are so many saved registers already + it won't be optimized, resulting in all r6-r15 being saved */ +#define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \ + "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \ + "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); +#ifdef __s390x__ + __asm__ volatile ("lgr %0, 15" : "=r" (stackref) : ); +#else + __asm__ volatile ("lr %0, 15" : "=r" (stackref) : ); +#endif + { + SLP_SAVE_STATE(stackref, stsizediff); +/* N.B. + r11 may be used as the frame pointer, and in that case it cannot be + clobbered and needs offsetting just like the stack pointer (but in cases + where frame pointer isn't used we might clobber it accidentally). What's + scary is that r11 is 2nd (and even 1st when GOT is used) callee saved + register that gcc would chose for surviving function calls. However, + since r6-r10 are clobbered above, their cost for reuse is reduced, so + gcc IRA will chose them over r11 (not seeing r11 is implicitly saved), + making it relatively safe to offset in all cases. :) */ + __asm__ volatile ( +#ifdef __s390x__ + "agr 15, %0\n\t" + "agr 11, %0" +#else + "ar 15, %0\n\t" + "ar 11, %0" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("lhi %0, 0" : "=r" (ret) : ); + return ret; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_sh_gcc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_sh_gcc.h new file mode 100644 index 0000000..5ecc3b3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_sh_gcc.h @@ -0,0 +1,36 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REGS_TO_SAVE "r8", "r9", "r10", "r11", "r13", \ + "fr12", "fr13", "fr14", "fr15" + +// r12 Global context pointer, GP +// r14 Frame pointer, FP +// r15 Stack pointer, SP + +static int +slp_switch(void) +{ + int err; + void* fp; + int *stackref, stsizediff; + __asm__ volatile("" : : : REGS_TO_SAVE); + __asm__ volatile("mov.l r14, %0" : "=m"(fp) : :); + __asm__("mov r15, %0" : "=r"(stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile( + "add %0, r15\n" + "add %0, r14\n" + : /* no outputs */ + : "r"(stsizediff)); + SLP_RESTORE_STATE(); + __asm__ volatile("mov r0, %0" : "=r"(err) : :); + } + __asm__ volatile("mov.l %0, r14" : : "m"(fp) :); + __asm__ volatile("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_sparc_sun_gcc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_sparc_sun_gcc.h new file mode 100644 index 0000000..96990c3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_sparc_sun_gcc.h @@ -0,0 +1,92 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-May-15 Alexey Borzenkov + * Move stack spilling code inside save/restore functions + * 30-Aug-13 Floris Bruynooghe + Clean the register windows again before returning. + This does not clobber the PIC register as it leaves + the current window intact and is required for multi- + threaded code to work correctly. + * 08-Mar-11 Floris Bruynooghe + * No need to set return value register explicitly + * before the stack and framepointer are adjusted + * as none of the other registers are influenced by + * this. Also don't needlessly clean the windows + * ('ta %0" :: "i" (ST_CLEAN_WINDOWS)') as that + * clobbers the gcc PIC register (%l7). + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * added support for SunOS sparc with gcc + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + + +#define STACK_MAGIC 0 + + +#if defined(__sparcv9) +#define SLP_FLUSHW __asm__ volatile ("flushw") +#else +#define SLP_FLUSHW __asm__ volatile ("ta 3") /* ST_FLUSH_WINDOWS */ +#endif + +/* On sparc we need to spill register windows inside save/restore functions */ +#define SLP_BEFORE_SAVE_STATE() SLP_FLUSHW +#define SLP_BEFORE_RESTORE_STATE() SLP_FLUSHW + + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + + /* Put current stack pointer into stackref. + * Register spilling is done in save/restore. + */ + __asm__ volatile ("mov %%sp, %0" : "=r" (stackref)); + + { + /* Thou shalt put SLP_SAVE_STATE into a local block */ + /* Copy the current stack onto the heap */ + SLP_SAVE_STATE(stackref, stsizediff); + + /* Increment stack and frame pointer by stsizediff */ + __asm__ volatile ( + "add %0, %%sp, %%sp\n\t" + "add %0, %%fp, %%fp" + : : "r" (stsizediff)); + + /* Copy new stack from it's save store on the heap */ + SLP_RESTORE_STATE(); + + __asm__ volatile ("mov %1, %0" : "=r" (err) : "i" (0)); + return err; + } +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_x32_unix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x32_unix.h new file mode 100644 index 0000000..893369c --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x32_unix.h @@ -0,0 +1,63 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 17-Aug-12 Fantix King + * Ported from amd64. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + + +static int +slp_switch(void) +{ + void* ebp; + void* ebx; + unsigned int csr; + unsigned short cw; + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.asm b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.asm new file mode 100644 index 0000000..f5c72a2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.asm @@ -0,0 +1,111 @@ +; +; stack switching code for MASM on x641 +; Kristjan Valur Jonsson, sept 2005 +; + + +;prototypes for our calls +slp_save_state_asm PROTO +slp_restore_state_asm PROTO + + +pushxmm MACRO reg + sub rsp, 16 + .allocstack 16 + movaps [rsp], reg ; faster than movups, but we must be aligned + ; .savexmm128 reg, offset (don't know what offset is, no documentation) +ENDM +popxmm MACRO reg + movaps reg, [rsp] ; faster than movups, but we must be aligned + add rsp, 16 +ENDM + +pushreg MACRO reg + push reg + .pushreg reg +ENDM +popreg MACRO reg + pop reg +ENDM + + +.code +slp_switch PROC FRAME + ;realign stack to 16 bytes after return address push, makes the following faster + sub rsp,8 + .allocstack 8 + + pushxmm xmm15 + pushxmm xmm14 + pushxmm xmm13 + pushxmm xmm12 + pushxmm xmm11 + pushxmm xmm10 + pushxmm xmm9 + pushxmm xmm8 + pushxmm xmm7 + pushxmm xmm6 + + pushreg r15 + pushreg r14 + pushreg r13 + pushreg r12 + + pushreg rbp + pushreg rbx + pushreg rdi + pushreg rsi + + sub rsp, 10h ;allocate the singlefunction argument (must be multiple of 16) + .allocstack 10h +.endprolog + + lea rcx, [rsp+10h] ;load stack base that we are saving + call slp_save_state_asm ;pass stackpointer, return offset in eax + cmp rax, 1 + je EXIT1 + cmp rax, -1 + je EXIT2 + ;actual stack switch: + add rsp, rax + call slp_restore_state_asm + xor rax, rax ;return 0 + +EXIT: + + add rsp, 10h + popreg rsi + popreg rdi + popreg rbx + popreg rbp + + popreg r12 + popreg r13 + popreg r14 + popreg r15 + + popxmm xmm6 + popxmm xmm7 + popxmm xmm8 + popxmm xmm9 + popxmm xmm10 + popxmm xmm11 + popxmm xmm12 + popxmm xmm13 + popxmm xmm14 + popxmm xmm15 + + add rsp, 8 + ret + +EXIT1: + mov rax, 1 + jmp EXIT + +EXIT2: + sar rax, 1 + jmp EXIT + +slp_switch ENDP + +END \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.obj b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.obj new file mode 100644 index 0000000..64e3e6b Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_masm.obj differ diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_msvc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_msvc.h new file mode 100644 index 0000000..601ea56 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x64_msvc.h @@ -0,0 +1,60 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +/* Avoid alloca redefined warning on mingw64 */ +#ifndef alloca +#define alloca _alloca +#endif + +#define STACK_REFPLUS 1 +#define STACK_MAGIC 0 + +/* Use the generic support for an external assembly language slp_switch function. */ +#define EXTERNAL_ASM + +#ifdef SLP_EVAL +/* This always uses the external masm assembly file. */ +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +/* +#define STACKLESS_SPY + +#ifdef IMPLEMENT_STACKLESSMODULE +#include "Windows.h" +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + intptr_t stackbase = ((intptr_t)&stackref) & 0xfffff000; + return (intptr_t)p >= stackbase && (intptr_t)p < stackbase + 0x00100000; +} + +#endif +*/ \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_msvc.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_msvc.h new file mode 100644 index 0000000..0f3a59f --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_msvc.h @@ -0,0 +1,326 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +#define alloca _alloca + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +/* Some magic to quell warnings and keep slp_switch() from crashing when built + with VC90. Disable global optimizations, and the warning: frame pointer + register 'ebp' modified by inline assembly code. + + We used to just disable global optimizations ("g") but upstream stackless + Python, as well as stackman, turn off all optimizations. + +References: +https://github.com/stackless-dev/stackman/blob/dbc72fe5207a2055e658c819fdeab9731dee78b9/stackman/platforms/switch_x86_msvc.h +https://github.com/stackless-dev/stackless/blob/main-slp/Stackless/platf/switch_x86_msvc.h +*/ +#define WIN32_LEAN_AND_MEAN +#include + +#pragma optimize("", off) /* so that autos are stored on the stack */ +#pragma warning(disable:4731) +#pragma warning(disable:4733) /* disable warning about modifying FS[0] */ + +/** + * Most modern compilers and environments handle C++ exceptions without any + * special help from us. MSVC on 32-bit windows is an exception. There, C++ + * exceptions are dealt with using Windows' Structured Exception Handling + * (SEH). + * + * SEH is implemented as a singly linked list of nodes. The + * head of this list is stored in the Thread Information Block, which itself + * is pointed to from the FS register. It's the first field in the structure, + * or offset 0, so we can access it using assembly FS:[0], or the compiler + * intrinsics and field offset information from the headers (as we do below). + * Somewhat unusually, the tail of the list doesn't have prev == NULL, it has + * prev == 0xFFFFFFFF. + * + * SEH was designed for C, and traditionally uses the MSVC compiler + * intrinsincs __try{}/__except{}. It is also utilized for C++ exceptions by + * MSVC; there, every throw of a C++ exception raises a SEH error with the + * ExceptionCode 0xE06D7363; the SEH handler list is then traversed to + * deal with the exception. + * + * If the SEH list is corrupt, then when a C++ exception is thrown the program + * will abruptly exit with exit code 1. This does not use std::terminate(), so + * std::set_terminate() is useless to debug this. + * + * The SEH list is closely tied to the call stack; entering a function that + * uses __try{} or most C++ functions will push a new handler onto the front + * of the list. Returning from the function will remove the handler. Saving + * and restoring the head node of the SEH list (FS:[0]) per-greenlet is NOT + * ENOUGH to make SEH or exceptions work. + * + * Stack switching breaks SEH because the call stack no longer necessarily + * matches the SEH list. For example, given greenlet A that switches to + * greenlet B, at the moment of entering greenlet B, we will have any SEH + * handlers from greenlet A on the SEH list; greenlet B can then add its own + * handlers to the SEH list. When greenlet B switches back to greenlet A, + * greenlet B's handlers would still be on the SEH stack, but when switch() + * returns control to greenlet A, we have replaced the contents of the stack + * in memory, so all the address that greenlet B added to the SEH list are now + * invalid: part of the call stack has been unwound, but the SEH list was out + * of sync with the call stack. The net effect is that exception handling + * stops working. + * + * Thus, when switching greenlets, we need to be sure that the SEH list + * matches the effective call stack, "cutting out" any handlers that were + * pushed by the greenlet that switched out and which are no longer valid. + * + * The easiest way to do this is to capture the SEH list at the time the main + * greenlet for a thread is created, and, when initially starting a greenlet, + * start a new SEH list for it, which contains nothing but the handler + * established for the new greenlet itself, with the tail being the handlers + * for the main greenlet. If we then save and restore the SEH per-greenlet, + * they won't interfere with each others SEH lists. (No greenlet can unwind + * the call stack past the handlers established by the main greenlet). + * + * By observation, a new thread starts with three SEH handlers on the list. By + * the time we get around to creating the main greenlet, though, there can be + * many more, established by transient calls that lead to the creation of the + * main greenlet. Therefore, 3 is a magic constant telling us when to perform + * the initial slice. + * + * All of this can be debugged using a vectored exception handler, which + * operates independently of the SEH handler list, and is called first. + * Walking the SEH list at key points can also be helpful. + * + * References: + * https://en.wikipedia.org/wiki/Win32_Thread_Information_Block + * https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 + * https://docs.microsoft.com/en-us/cpp/cpp/try-except-statement?view=msvc-160 + * https://docs.microsoft.com/en-us/cpp/cpp/structured-exception-handling-c-cpp?view=msvc-160 + * https://docs.microsoft.com/en-us/windows/win32/debug/structured-exception-handling + * https://docs.microsoft.com/en-us/windows/win32/debug/using-a-vectored-exception-handler + * https://bytepointer.com/resources/pietrek_crash_course_depths_of_win32_seh.htm + */ +#define GREENLET_NEEDS_EXCEPTION_STATE_SAVED + + +typedef struct _GExceptionRegistration { + struct _GExceptionRegistration* prev; + void* handler_f; +} GExceptionRegistration; + +static void +slp_set_exception_state(const void *const seh_state) +{ + // Because the stack from from which we do this is ALSO a handler, and + // that one we want to keep, we need to relink the current SEH handler + // frame to point to this one, cutting out the middle men, as it were. + // + // Entering a try block doesn't change the SEH frame, but entering a + // function containing a try block does. + GExceptionRegistration* current_seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + current_seh_state->prev = (GExceptionRegistration*)seh_state; +} + + +static GExceptionRegistration* +x86_slp_get_third_oldest_handler() +{ + GExceptionRegistration* a = NULL; /* Closest to the top */ + GExceptionRegistration* b = NULL; /* second */ + GExceptionRegistration* c = NULL; + GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + a = b = c = seh_state; + + while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { + if ((void*)seh_state->prev < (void*)100) { + fprintf(stderr, "\tERROR: Broken SEH chain.\n"); + return NULL; + } + a = b; + b = c; + c = seh_state; + + seh_state = seh_state->prev; + } + return a ? a : (b ? b : c); +} + + +static void* +slp_get_exception_state() +{ + // XXX: There appear to be three SEH handlers on the stack already at the + // start of the thread. Is that a guarantee? Almost certainly not. Yet in + // all observed cases it has been three. This is consistent with + // faulthandler off or on, and optimizations off or on. It may not be + // consistent with other operating system versions, though: we only have + // CI on one or two versions (don't ask what there are). + // In theory we could capture the number of handlers on the chain when + // PyInit__greenlet is called: there are probably only the default + // handlers at that point (unless we're embedded and people have used + // __try/__except or a C++ handler)? + return x86_slp_get_third_oldest_handler(); +} + +static int +slp_switch(void) +{ + /* MASM syntax is typically reversed from other assemblers. + It is usually + */ + int *stackref, stsizediff; + /* store the structured exception state for this stack */ + DWORD seh_state = __readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + __asm mov stackref, esp; + /* modify EBX, ESI and EDI in order to get them preserved */ + __asm mov ebx, ebx; + __asm xchg esi, edi; + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm { + mov eax, stsizediff + add esp, eax + add ebp, eax + } + SLP_RESTORE_STATE(); + } + __writefsdword(FIELD_OFFSET(NT_TIB, ExceptionList), seh_state); + return 0; +} + +/* re-enable ebp warning and global optimizations. */ +#pragma optimize("", on) +#pragma warning(default:4731) +#pragma warning(default:4733) /* disable warning about modifying FS[0] */ + + +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +#define STACKLESS_SPY + +#ifdef GREENLET_DEBUG + +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + int stackbase = ((int)&stackref) & 0xfffff000; + return (int)p >= stackbase && (int)p < stackbase + 0x00100000; +} + +static void +x86_slp_show_seh_chain() +{ + GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + fprintf(stderr, "====== SEH Chain ======\n"); + while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { + fprintf(stderr, "\tSEH_chain addr: %p handler: %p prev: %p\n", + seh_state, + seh_state->handler_f, seh_state->prev); + if ((void*)seh_state->prev < (void*)100) { + fprintf(stderr, "\tERROR: Broken chain.\n"); + break; + } + seh_state = seh_state->prev; + } + fprintf(stderr, "====== End SEH Chain ======\n"); + fflush(NULL); + return; +} + +//addVectoredExceptionHandler constants: +//CALL_FIRST means call this exception handler first; +//CALL_LAST means call this exception handler last +#define CALL_FIRST 1 +#define CALL_LAST 0 + +LONG WINAPI +GreenletVectorHandler(PEXCEPTION_POINTERS ExceptionInfo) +{ + // We get one of these for every C++ exception, with code + // E06D7363 + // This is a special value that means "C++ exception from MSVC" + // https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 + // + // Install in the module init function with: + // AddVectoredExceptionHandler(CALL_FIRST, GreenletVectorHandler); + PEXCEPTION_RECORD ExceptionRecord = ExceptionInfo->ExceptionRecord; + + fprintf(stderr, + "GOT VECTORED EXCEPTION:\n" + "\tExceptionCode : %p\n" + "\tExceptionFlags : %p\n" + "\tExceptionAddr : %p\n" + "\tNumberparams : %ld\n", + ExceptionRecord->ExceptionCode, + ExceptionRecord->ExceptionFlags, + ExceptionRecord->ExceptionAddress, + ExceptionRecord->NumberParameters + ); + if (ExceptionRecord->ExceptionFlags & 1) { + fprintf(stderr, "\t\tEH_NONCONTINUABLE\n" ); + } + if (ExceptionRecord->ExceptionFlags & 2) { + fprintf(stderr, "\t\tEH_UNWINDING\n" ); + } + if (ExceptionRecord->ExceptionFlags & 4) { + fprintf(stderr, "\t\tEH_EXIT_UNWIND\n" ); + } + if (ExceptionRecord->ExceptionFlags & 8) { + fprintf(stderr, "\t\tEH_STACK_INVALID\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x10) { + fprintf(stderr, "\t\tEH_NESTED_CALL\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x20) { + fprintf(stderr, "\t\tEH_TARGET_UNWIND\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x40) { + fprintf(stderr, "\t\tEH_COLLIDED_UNWIND\n" ); + } + fprintf(stderr, "\n"); + fflush(NULL); + for(DWORD i = 0; i < ExceptionRecord->NumberParameters; i++) { + fprintf(stderr, "\t\t\tParam %ld: %lX\n", i, ExceptionRecord->ExceptionInformation[i]); + } + + if (ExceptionRecord->NumberParameters == 3) { + fprintf(stderr, "\tAbout to traverse SEH chain\n"); + // C++ Exception records have 3 params. + x86_slp_show_seh_chain(); + } + + return EXCEPTION_CONTINUE_SEARCH; +} + + + + +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_unix.h b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_unix.h new file mode 100644 index 0000000..493fa6b --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/platform/switch_x86_unix.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 19-Aug-11 Alexey Borzenkov + * Correctly save ebp, ebx and cw + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'ebx' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) +# define ATTR_NOCLONE __attribute__((noclone)) +#else +# define ATTR_NOCLONE +#endif + +static int +slp_switch(void) +{ + int err; +#ifdef _WIN32 + void *seh; +#endif + void *ebp, *ebx; + unsigned short cw; + int *stackref, stsizediff; + __asm__ volatile ("" : : : "esi", "edi"); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); +#ifdef _WIN32 + __asm__ volatile ( + "movl %%fs:0x0, %%eax\n" + "movl %%eax, %0\n" + : "=m" (seh) + : + : "eax"); +#endif + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + } +#ifdef _WIN32 + __asm__ volatile ( + "movl %0, %%eax\n" + "movl %%eax, %%fs:0x0\n" + : + : "m" (seh) + : "eax"); +#endif + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : "esi", "edi"); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.12/site-packages/greenlet/slp_platformselect.h b/venv/lib/python3.12/site-packages/greenlet/slp_platformselect.h new file mode 100644 index 0000000..d9b7d0a --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/slp_platformselect.h @@ -0,0 +1,77 @@ +/* + * Platform Selection for Stackless Python + */ +#ifdef __cplusplus +extern "C" { +#endif + +#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86) && defined(_MSC_VER) +# include "platform/switch_x86_msvc.h" /* MS Visual Studio on X86 */ +#elif defined(MS_WIN64) && defined(_M_X64) && defined(_MSC_VER) || defined(__MINGW64__) +# include "platform/switch_x64_msvc.h" /* MS Visual Studio on X64 */ +#elif defined(MS_WIN64) && defined(_M_ARM64) +# include "platform/switch_arm64_msvc.h" /* MS Visual Studio on ARM64 */ +#elif defined(__GNUC__) && defined(__amd64__) && defined(__ILP32__) +# include "platform/switch_x32_unix.h" /* gcc on amd64 with x32 ABI */ +#elif defined(__GNUC__) && defined(__amd64__) +# include "platform/switch_amd64_unix.h" /* gcc on amd64 */ +#elif defined(__GNUC__) && defined(__i386__) +# include "platform/switch_x86_unix.h" /* gcc on X86 */ +#elif defined(__GNUC__) && defined(__powerpc64__) && (defined(__linux__) || defined(__FreeBSD__)) +# include "platform/switch_ppc64_linux.h" /* gcc on PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(__PPC__) && (defined(__linux__) || defined(__FreeBSD__)) +# include "platform/switch_ppc_linux.h" /* gcc on PowerPC */ +#elif defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) +# include "platform/switch_ppc_macosx.h" /* Apple MacOS X on 32-bit PowerPC */ +#elif defined(__GNUC__) && defined(__powerpc64__) && defined(_AIX) +# include "platform/switch_ppc64_aix.h" /* gcc on AIX/PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(_ARCH_PPC) && defined(_AIX) +# include "platform/switch_ppc_aix.h" /* gcc on AIX/PowerPC */ +#elif defined(__GNUC__) && defined(__powerpc__) && defined(__NetBSD__) +#include "platform/switch_ppc_unix.h" /* gcc on NetBSD/powerpc */ +#elif defined(__GNUC__) && defined(sparc) +# include "platform/switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */ +#elif defined(__GNUC__) && defined(__sparc__) +# include "platform/switch_sparc_sun_gcc.h" /* NetBSD sparc with gcc */ +#elif defined(__SUNPRO_C) && defined(sparc) && defined(sun) +# include "platform/switch_sparc_sun_gcc.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__amd64__) && defined(sun) +# include "platform/switch_amd64_unix.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__i386__) && defined(sun) +# include "platform/switch_x86_unix.h" /* SunStudio on x86 */ +#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__) +# include "platform/switch_s390_unix.h" /* Linux/S390 */ +#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__) +# include "platform/switch_s390_unix.h" /* Linux/S390 zSeries (64-bit) */ +#elif defined(__GNUC__) && defined(__arm__) +# ifdef __APPLE__ +# include +# endif +# if TARGET_OS_IPHONE +# include "platform/switch_arm32_ios.h" /* iPhone OS on arm32 */ +# else +# include "platform/switch_arm32_gcc.h" /* gcc using arm32 */ +# endif +#elif defined(__GNUC__) && defined(__mips__) && defined(__linux__) +# include "platform/switch_mips_unix.h" /* Linux/MIPS */ +#elif defined(__GNUC__) && defined(__aarch64__) +# include "platform/switch_aarch64_gcc.h" /* Aarch64 ABI */ +#elif defined(__GNUC__) && defined(__mc68000__) +# include "platform/switch_m68k_gcc.h" /* gcc on m68k */ +#elif defined(__GNUC__) && defined(__csky__) +#include "platform/switch_csky_gcc.h" /* gcc on csky */ +# elif defined(__GNUC__) && defined(__riscv) +# include "platform/switch_riscv_unix.h" /* gcc on RISC-V */ +#elif defined(__GNUC__) && defined(__alpha__) +# include "platform/switch_alpha_unix.h" /* gcc on DEC Alpha */ +#elif defined(MS_WIN32) && defined(__llvm__) && defined(__aarch64__) +# include "platform/switch_aarch64_gcc.h" /* LLVM Aarch64 ABI for Windows */ +#elif defined(__GNUC__) && defined(__loongarch64) && defined(__linux__) +# include "platform/switch_loongarch64_linux.h" /* LoongArch64 */ +#elif defined(__GNUC__) && defined(__sh__) +# include "platform/switch_sh_gcc.h" /* SuperH */ +#endif + +#ifdef __cplusplus +}; +#endif diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__init__.py b/venv/lib/python3.12/site-packages/greenlet/tests/__init__.py new file mode 100644 index 0000000..1861360 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/__init__.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +""" +Tests for greenlet. + +""" +import os +import sys +import sysconfig +import unittest + +from gc import collect +from gc import get_objects +from threading import active_count as active_thread_count +from time import sleep +from time import time + +import psutil + +from greenlet import greenlet as RawGreenlet +from greenlet import getcurrent + +from greenlet._greenlet import get_pending_cleanup_count +from greenlet._greenlet import get_total_main_greenlets + +from . import leakcheck + +PY312 = sys.version_info[:2] >= (3, 12) +PY313 = sys.version_info[:2] >= (3, 13) +# XXX: First tested on 3.14a7. Revisit all uses of this on later versions to ensure they +# are still valid. +PY314 = sys.version_info[:2] >= (3, 14) + +WIN = sys.platform.startswith("win") +RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') +RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS +RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') +RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR +RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') + +# Is the current interpreter free-threaded?) Note that this +# isn't the same as whether the GIL is enabled, this is the build-time +# value. Certain CPython details, like the garbage collector, +# work very differently on potentially-free-threaded builds than +# standard builds. +RUNNING_ON_FREETHREAD_BUILD = bool(sysconfig.get_config_var("Py_GIL_DISABLED")) + +class TestCaseMetaClass(type): + # wrap each test method with + # a) leak checks + def __new__(cls, classname, bases, classDict): + # pylint and pep8 fight over what this should be called (mcs or cls). + # pylint gets it right, but we can't scope disable pep8, so we go with + # its convention. + # pylint: disable=bad-mcs-classmethod-argument + check_totalrefcount = True + + # Python 3: must copy, we mutate the classDict. Interestingly enough, + # it doesn't actually error out, but under 3.6 we wind up wrapping + # and re-wrapping the same items over and over and over. + for key, value in list(classDict.items()): + if key.startswith('test') and callable(value): + classDict.pop(key) + if check_totalrefcount: + value = leakcheck.wrap_refcount(value) + classDict[key] = value + return type.__new__(cls, classname, bases, classDict) + + +class TestCase(unittest.TestCase, metaclass=TestCaseMetaClass): + + cleanup_attempt_sleep_duration = 0.001 + cleanup_max_sleep_seconds = 1 + + def wait_for_pending_cleanups(self, + initial_active_threads=None, + initial_main_greenlets=None): + initial_active_threads = initial_active_threads or self.threads_before_test + initial_main_greenlets = initial_main_greenlets or self.main_greenlets_before_test + sleep_time = self.cleanup_attempt_sleep_duration + # NOTE: This is racy! A Python-level thread object may be dead + # and gone, but the C thread may not yet have fired its + # destructors and added to the queue. There's no particular + # way to know that's about to happen. We try to watch the + # Python threads to make sure they, at least, have gone away. + # Counting the main greenlets, which we can easily do deterministically, + # also helps. + + # Always sleep at least once to let other threads run + sleep(sleep_time) + quit_after = time() + self.cleanup_max_sleep_seconds + # TODO: We could add an API that calls us back when a particular main greenlet is deleted? + # It would have to drop the GIL + while ( + get_pending_cleanup_count() + or active_thread_count() > initial_active_threads + or (not self.expect_greenlet_leak + and get_total_main_greenlets() > initial_main_greenlets)): + sleep(sleep_time) + if time() > quit_after: + print("Time limit exceeded.") + print("Threads: Waiting for only", initial_active_threads, + "-->", active_thread_count()) + print("MGlets : Waiting for only", initial_main_greenlets, + "-->", get_total_main_greenlets()) + break + collect() + + def count_objects(self, kind=list, exact_kind=True): + # pylint:disable=unidiomatic-typecheck + # Collect the garbage. + for _ in range(3): + collect() + if exact_kind: + return sum( + 1 + for x in get_objects() + if type(x) is kind + ) + # instances + return sum( + 1 + for x in get_objects() + if isinstance(x, kind) + ) + + greenlets_before_test = 0 + threads_before_test = 0 + main_greenlets_before_test = 0 + expect_greenlet_leak = False + + def count_greenlets(self): + """ + Find all the greenlets and subclasses tracked by the GC. + """ + return self.count_objects(RawGreenlet, False) + + def setUp(self): + # Ensure the main greenlet exists, otherwise the first test + # gets a false positive leak + super().setUp() + getcurrent() + self.threads_before_test = active_thread_count() + self.main_greenlets_before_test = get_total_main_greenlets() + self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) + self.greenlets_before_test = self.count_greenlets() + + def tearDown(self): + if getattr(self, 'skipTearDown', False): + return + + self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) + super().tearDown() + + def get_expected_returncodes_for_aborted_process(self): + import signal + # The child should be aborted in an unusual way. On POSIX + # platforms, this is done with abort() and signal.SIGABRT, + # which is reflected in a negative return value; however, on + # Windows, even though we observe the child print "Fatal + # Python error: Aborted" and in older versions of the C + # runtime "This application has requested the Runtime to + # terminate it in an unusual way," it always has an exit code + # of 3. This is interesting because 3 is the error code for + # ERROR_PATH_NOT_FOUND; BUT: the C runtime abort() function + # also uses this code. + # + # If we link to the static C library on Windows, the error + # code changes to '0xc0000409' (hex(3221226505)), which + # apparently is STATUS_STACK_BUFFER_OVERRUN; but "What this + # means is that nowadays when you get a + # STATUS_STACK_BUFFER_OVERRUN, it doesn’t actually mean that + # there is a stack buffer overrun. It just means that the + # application decided to terminate itself with great haste." + # + # + # On windows, we've also seen '0xc0000005' (hex(3221225477)). + # That's "Access Violation" + # + # See + # https://devblogs.microsoft.com/oldnewthing/20110519-00/?p=10623 + # and + # https://docs.microsoft.com/en-us/previous-versions/k089yyh0(v=vs.140)?redirectedfrom=MSDN + # and + # https://devblogs.microsoft.com/oldnewthing/20190108-00/?p=100655 + expected_exit = ( + -signal.SIGABRT, + # But beginning on Python 3.11, the faulthandler + # that prints the C backtraces sometimes segfaults after + # reporting the exception but before printing the stack. + # This has only been seen on linux/gcc. + -signal.SIGSEGV, + ) if not WIN else ( + 3, + 0xc0000409, + 0xc0000005, + ) + return expected_exit + + def get_process_uss(self): + """ + Return the current process's USS in bytes. + + uss is available on Linux, macOS, Windows. Also known as + "Unique Set Size", this is the memory which is unique to a + process and which would be freed if the process was terminated + right now. + + If this is not supported by ``psutil``, this raises the + :exc:`unittest.SkipTest` exception. + """ + try: + return psutil.Process().memory_full_info().uss + except AttributeError as e: + raise unittest.SkipTest("uss not supported") from e + + def run_script(self, script_name, show_output=True): + import subprocess + script = os.path.join( + os.path.dirname(__file__), + script_name, + ) + + try: + return subprocess.check_output([sys.executable, script], + encoding='utf-8', + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as ex: + if show_output: + print('-----') + print('Failed to run script', script) + print('~~~~~') + print(ex.output) + print('------') + raise + + + def assertScriptRaises(self, script_name, exitcodes=None): + import subprocess + with self.assertRaises(subprocess.CalledProcessError) as exc: + output = self.run_script(script_name, show_output=False) + __traceback_info__ = output + # We're going to fail the assertion if we get here, at least + # preserve the output in the traceback. + + if exitcodes is None: + exitcodes = self.get_expected_returncodes_for_aborted_process() + self.assertIn(exc.exception.returncode, exitcodes) + return exc.exception diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4768374 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc new file mode 100644 index 0000000..82b2f89 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc new file mode 100644 index 0000000..5af60cd Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc new file mode 100644 index 0000000..a887339 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc new file mode 100644 index 0000000..08afbef Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc new file mode 100644 index 0000000..9379c2d Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc new file mode 100644 index 0000000..b799b39 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc new file mode 100644 index 0000000..dbbf975 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc new file mode 100644 index 0000000..d020dc9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc new file mode 100644 index 0000000..3e094ae Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc new file mode 100644 index 0000000..5ebe672 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc new file mode 100644 index 0000000..edef46e Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc new file mode 100644 index 0000000..696ad50 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_gc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc new file mode 100644 index 0000000..8ac45ea Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc new file mode 100644 index 0000000..4a698ee Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc new file mode 100644 index 0000000..e7083d9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc new file mode 100644 index 0000000..d7ad69f Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc new file mode 100644 index 0000000..4efbeb4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc new file mode 100644 index 0000000..3e47f86 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc new file mode 100644 index 0000000..9d20af6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_throw.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc new file mode 100644 index 0000000..15bc89b Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc new file mode 100644 index 0000000..210354d Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc new file mode 100644 index 0000000..f98a13f Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.c b/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.c new file mode 100644 index 0000000..05e81c0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.c @@ -0,0 +1,231 @@ +/* This is a set of functions used by test_extension_interface.py to test the + * Greenlet C API. + */ + +#include "../greenlet.h" + +#ifndef Py_RETURN_NONE +# define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None +#endif + +#define TEST_MODULE_NAME "_test_extension" + +static PyObject* +test_switch(PyObject* self, PyObject* greenlet) +{ + PyObject* result = NULL; + + if (greenlet == NULL || !PyGreenlet_Check(greenlet)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch((PyGreenlet*)greenlet, NULL, NULL); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_INCREF(result); + return result; +} + +static PyObject* +test_switch_kwargs(PyObject* self, PyObject* args, PyObject* kwargs) +{ + PyGreenlet* g = NULL; + PyObject* result = NULL; + + PyArg_ParseTuple(args, "O!", &PyGreenlet_Type, &g); + + if (g == NULL || !PyGreenlet_Check(g)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch(g, NULL, kwargs); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_XINCREF(result); + return result; +} + +static PyObject* +test_getcurrent(PyObject* self) +{ + PyGreenlet* g = PyGreenlet_GetCurrent(); + if (g == NULL || !PyGreenlet_Check(g) || !PyGreenlet_ACTIVE(g)) { + PyErr_SetString(PyExc_AssertionError, + "getcurrent() returned an invalid greenlet"); + Py_XDECREF(g); + return NULL; + } + Py_DECREF(g); + Py_RETURN_NONE; +} + +static PyObject* +test_setparent(PyObject* self, PyObject* arg) +{ + PyGreenlet* current; + PyGreenlet* greenlet = NULL; + + if (arg == NULL || !PyGreenlet_Check(arg)) { + PyErr_BadArgument(); + return NULL; + } + if ((current = PyGreenlet_GetCurrent()) == NULL) { + return NULL; + } + greenlet = (PyGreenlet*)arg; + if (PyGreenlet_SetParent(greenlet, current)) { + Py_DECREF(current); + return NULL; + } + Py_DECREF(current); + if (PyGreenlet_Switch(greenlet, NULL, NULL) == NULL) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject* +test_new_greenlet(PyObject* self, PyObject* callable) +{ + PyObject* result = NULL; + PyGreenlet* greenlet = PyGreenlet_New(callable, NULL); + + if (!greenlet) { + return NULL; + } + + result = PyGreenlet_Switch(greenlet, NULL, NULL); + Py_CLEAR(greenlet); + if (result == NULL) { + return NULL; + } + + Py_INCREF(result); + return result; +} + +static PyObject* +test_raise_dead_greenlet(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletExit, "test GreenletExit exception."); + return NULL; +} + +static PyObject* +test_raise_greenlet_error(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletError, "test greenlet.error exception"); + return NULL; +} + +static PyObject* +test_throw(PyObject* self, PyGreenlet* g) +{ + const char msg[] = "take that sucka!"; + PyObject* msg_obj = Py_BuildValue("s", msg); + PyGreenlet_Throw(g, PyExc_ValueError, msg_obj, NULL); + Py_DECREF(msg_obj); + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject* +test_throw_exact(PyObject* self, PyObject* args) +{ + PyGreenlet* g = NULL; + PyObject* typ = NULL; + PyObject* val = NULL; + PyObject* tb = NULL; + + if (!PyArg_ParseTuple(args, "OOOO:throw", &g, &typ, &val, &tb)) { + return NULL; + } + + PyGreenlet_Throw(g, typ, val, tb); + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_switch", + (PyCFunction)test_switch, + METH_O, + "Switch to the provided greenlet sending provided arguments, and \n" + "return the results."}, + {"test_switch_kwargs", + (PyCFunction)test_switch_kwargs, + METH_VARARGS | METH_KEYWORDS, + "Switch to the provided greenlet sending the provided keyword args."}, + {"test_getcurrent", + (PyCFunction)test_getcurrent, + METH_NOARGS, + "Test PyGreenlet_GetCurrent()"}, + {"test_setparent", + (PyCFunction)test_setparent, + METH_O, + "Se the parent of the provided greenlet and switch to it."}, + {"test_new_greenlet", + (PyCFunction)test_new_greenlet, + METH_O, + "Test PyGreenlet_New()"}, + {"test_raise_dead_greenlet", + (PyCFunction)test_raise_dead_greenlet, + METH_NOARGS, + "Just raise greenlet.GreenletExit"}, + {"test_raise_greenlet_error", + (PyCFunction)test_raise_greenlet_error, + METH_NOARGS, + "Just raise greenlet.error"}, + {"test_throw", + (PyCFunction)test_throw, + METH_O, + "Throw a ValueError at the provided greenlet"}, + {"test_throw_exact", + (PyCFunction)test_throw_exact, + METH_VARARGS, + "Throw exactly the arguments given at the provided greenlet"}, + {NULL, NULL, 0, NULL} +}; + + +#define INITERROR return NULL + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + TEST_MODULE_NAME, + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension(void) +{ + PyObject* module = NULL; + module = PyModule_Create(&moduledef); + + if (module == NULL) { + return NULL; + } + + PyGreenlet_Import(); + return module; +} diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..da523f4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpp b/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpp new file mode 100644 index 0000000..5cbe6a7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpp @@ -0,0 +1,226 @@ +/* This is a set of functions used to test C++ exceptions are not + * broken during greenlet switches + */ + +#include "../greenlet.h" +#include "../greenlet_compiler_compat.hpp" +#include +#include + +struct exception_t { + int depth; + exception_t(int depth) : depth(depth) {} +}; + +/* Functions are called via pointers to prevent inlining */ +static void (*p_test_exception_throw_nonstd)(int depth); +static void (*p_test_exception_throw_std)(); +static PyObject* (*p_test_exception_switch_recurse)(int depth, int left); + +static void +test_exception_throw_nonstd(int depth) +{ + throw exception_t(depth); +} + +static void +test_exception_throw_std() +{ + throw std::runtime_error("Thrown from an extension."); +} + +static PyObject* +test_exception_switch_recurse(int depth, int left) +{ + if (left > 0) { + return p_test_exception_switch_recurse(depth, left - 1); + } + + PyObject* result = NULL; + PyGreenlet* self = PyGreenlet_GetCurrent(); + if (self == NULL) + return NULL; + + try { + if (PyGreenlet_Switch(PyGreenlet_GET_PARENT(self), NULL, NULL) == NULL) { + Py_DECREF(self); + return NULL; + } + p_test_exception_throw_nonstd(depth); + PyErr_SetString(PyExc_RuntimeError, + "throwing C++ exception didn't work"); + } + catch (const exception_t& e) { + if (e.depth != depth) + PyErr_SetString(PyExc_AssertionError, "depth mismatch"); + else + result = PyLong_FromLong(depth); + } + catch (...) { + PyErr_SetString(PyExc_RuntimeError, "unexpected C++ exception"); + } + + Py_DECREF(self); + return result; +} + +/* test_exception_switch(int depth) + * - recurses depth times + * - switches to parent inside try/catch block + * - throws an exception that (expected to be caught in the same function) + * - verifies depth matches (exceptions shouldn't be caught in other greenlets) + */ +static PyObject* +test_exception_switch(PyObject* UNUSED(self), PyObject* args) +{ + int depth; + if (!PyArg_ParseTuple(args, "i", &depth)) + return NULL; + return p_test_exception_switch_recurse(depth, depth); +} + + +static PyObject* +py_test_exception_throw_nonstd(PyObject* self, PyObject* args) +{ + if (!PyArg_ParseTuple(args, "")) + return NULL; + p_test_exception_throw_nonstd(0); + PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); + return NULL; +} + +static PyObject* +py_test_exception_throw_std(PyObject* self, PyObject* args) +{ + if (!PyArg_ParseTuple(args, "")) + return NULL; + p_test_exception_throw_std(); + PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); + return NULL; +} + +static PyObject* +py_test_call(PyObject* self, PyObject* arg) +{ + PyObject* noargs = PyTuple_New(0); + PyObject* ret = PyObject_Call(arg, noargs, nullptr); + Py_DECREF(noargs); + return ret; +} + + + +/* test_exception_switch_and_do_in_g2(g2func) + * - creates new greenlet g2 to run g2func + * - switches to g2 inside try/catch block + * - verifies that no exception has been caught + * + * it is used together with test_exception_throw to verify that unhandled + * exceptions thrown in one greenlet do not propagate to other greenlet nor + * segfault the process. + */ +static PyObject* +test_exception_switch_and_do_in_g2(PyObject* self, PyObject* args) +{ + PyObject* g2func = NULL; + PyObject* result = NULL; + + if (!PyArg_ParseTuple(args, "O", &g2func)) + return NULL; + PyGreenlet* g2 = PyGreenlet_New(g2func, NULL); + if (!g2) { + return NULL; + } + + try { + result = PyGreenlet_Switch(g2, NULL, NULL); + if (!result) { + return NULL; + } + } + catch (const exception_t& e) { + /* if we are here the memory can be already corrupted and the program + * might crash before below py-level exception might become printed. + * -> print something to stderr to make it clear that we had entered + * this catch block. + * See comments in inner_bootstrap() + */ +#if defined(WIN32) || defined(_WIN32) + fprintf(stderr, "C++ exception unexpectedly caught in g1\n"); + PyErr_SetString(PyExc_AssertionError, "C++ exception unexpectedly caught in g1"); + Py_XDECREF(result); + return NULL; +#else + throw; +#endif + } + + Py_XDECREF(result); + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_exception_switch", + (PyCFunction)&test_exception_switch, + METH_VARARGS, + "Switches to parent twice, to test exception handling and greenlet " + "switching."}, + {"test_exception_switch_and_do_in_g2", + (PyCFunction)&test_exception_switch_and_do_in_g2, + METH_VARARGS, + "Creates new greenlet g2 to run g2func and switches to it inside try/catch " + "block. Used together with test_exception_throw to verify that unhandled " + "C++ exceptions thrown in a greenlet doe not corrupt memory."}, + {"test_exception_throw_nonstd", + (PyCFunction)&py_test_exception_throw_nonstd, + METH_VARARGS, + "Throws non-standard C++ exception. Calling this function directly should abort the process." + }, + {"test_exception_throw_std", + (PyCFunction)&py_test_exception_throw_std, + METH_VARARGS, + "Throws standard C++ exception. Calling this function directly should abort the process." + }, + {"test_call", + (PyCFunction)&py_test_call, + METH_O, + "Call the given callable. Unlike calling it directly, this creates a " + "new C-level stack frame, which may be helpful in testing." + }, + {NULL, NULL, 0, NULL} +}; + + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + "greenlet.tests._test_extension_cpp", + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension_cpp(void) +{ + PyObject* module = NULL; + + module = PyModule_Create(&moduledef); + + if (module == NULL) { + return NULL; + } + + PyGreenlet_Import(); + if (_PyGreenlet_API == NULL) { + return NULL; + } + + p_test_exception_throw_nonstd = test_exception_throw_nonstd; + p_test_exception_throw_std = test_exception_throw_std; + p_test_exception_switch_recurse = test_exception_switch_recurse; + + return module; +} diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..d5513cc Binary files /dev/null and b/venv/lib/python3.12/site-packages/greenlet/tests/_test_extension_cpp.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/fail_clearing_run_switches.py b/venv/lib/python3.12/site-packages/greenlet/tests/fail_clearing_run_switches.py new file mode 100644 index 0000000..6dd1492 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/fail_clearing_run_switches.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" +If we have a run callable passed to the constructor or set as an +attribute, but we don't actually use that (because ``__getattribute__`` +or the like interferes), then when we clear callable before beginning +to run, there's an opportunity for Python code to run. + +""" +import greenlet + +g = None +main = greenlet.getcurrent() + +results = [] + +class RunCallable: + + def __del__(self): + results.append(('RunCallable', '__del__')) + main.switch('from RunCallable') + + +class G(greenlet.greenlet): + + def __getattribute__(self, name): + if name == 'run': + results.append(('G.__getattribute__', 'run')) + return run_func + return object.__getattribute__(self, name) + + +def run_func(): + results.append(('run_func', 'enter')) + + +g = G(RunCallable()) +# Try to start G. It will get to the point where it deletes +# its run callable C++ variable in inner_bootstrap. That triggers +# the __del__ method, which switches back to main before g +# actually even starts running. +x = g.switch() +results.append(('main: g.switch()', x)) +# In the C++ code, this results in g->g_switch() appearing to return, even though +# it has yet to run. +print('In main with', x, flush=True) +g.switch() +print('RESULTS', results) diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/fail_cpp_exception.py b/venv/lib/python3.12/site-packages/greenlet/tests/fail_cpp_exception.py new file mode 100644 index 0000000..fa4dc2e --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/fail_cpp_exception.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Helper for testing a C++ exception throw aborts the process. + +Takes one argument, the name of the function in :mod:`_test_extension_cpp` to call. +""" +import sys +import greenlet +from greenlet.tests import _test_extension_cpp +print('fail_cpp_exception is running') + +def run_unhandled_exception_in_greenlet_aborts(): + def _(): + _test_extension_cpp.test_exception_switch_and_do_in_g2( + _test_extension_cpp.test_exception_throw_nonstd + ) + g1 = greenlet.greenlet(_) + g1.switch() + + +func_name = sys.argv[1] +try: + func = getattr(_test_extension_cpp, func_name) +except AttributeError: + if func_name == run_unhandled_exception_in_greenlet_aborts.__name__: + func = run_unhandled_exception_in_greenlet_aborts + elif func_name == 'run_as_greenlet_target': + g = greenlet.greenlet(_test_extension_cpp.test_exception_throw_std) + func = g.switch + else: + raise +print('raising', func, flush=True) +func() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/fail_initialstub_already_started.py b/venv/lib/python3.12/site-packages/greenlet/tests/fail_initialstub_already_started.py new file mode 100644 index 0000000..c1a44ef --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/fail_initialstub_already_started.py @@ -0,0 +1,78 @@ +""" +Testing initialstub throwing an already started exception. +""" + +import greenlet + +a = None +b = None +c = None +main = greenlet.getcurrent() + +# If we switch into a dead greenlet, +# we go looking for its parents. +# if a parent is not yet started, we start it. + +results = [] + +def a_run(*args): + #results.append('A') + results.append(('Begin A', args)) + + +def c_run(): + results.append('Begin C') + b.switch('From C') + results.append('C done') + +class A(greenlet.greenlet): pass + +class B(greenlet.greenlet): + doing_it = False + def __getattribute__(self, name): + if name == 'run' and not self.doing_it: + assert greenlet.getcurrent() is c + self.doing_it = True + results.append('Switch to b from B.__getattribute__ in ' + + type(greenlet.getcurrent()).__name__) + b.switch() + results.append('B.__getattribute__ back from main in ' + + type(greenlet.getcurrent()).__name__) + if name == 'run': + name = '_B_run' + return object.__getattribute__(self, name) + + def _B_run(self, *arg): + results.append(('Begin B', arg)) + results.append('_B_run switching to main') + main.switch('From B') + +class C(greenlet.greenlet): + pass +a = A(a_run) +b = B(parent=a) +c = C(c_run, b) + +# Start a child; while running, it will start B, +# but starting B will ALSO start B. +result = c.switch() +results.append(('main from c', result)) + +# Switch back to C, which was in the middle of switching +# already. This will throw the ``GreenletStartedWhileInPython`` +# exception, which results in parent A getting started (B is finished) +c.switch() + +results.append(('A dead?', a.dead, 'B dead?', b.dead, 'C dead?', c.dead)) + +# A and B should both be dead now. +assert a.dead +assert b.dead +assert not c.dead + +result = c.switch() +results.append(('main from c.2', result)) +# Now C is dead +assert c.dead + +print("RESULTS:", results) diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/fail_slp_switch.py b/venv/lib/python3.12/site-packages/greenlet/tests/fail_slp_switch.py new file mode 100644 index 0000000..0990526 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/fail_slp_switch.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +A test helper for seeing what happens when slp_switch() +fails. +""" +# pragma: no cover + +import greenlet + + +print('fail_slp_switch is running', flush=True) + +runs = [] +def func(): + runs.append(1) + greenlet.getcurrent().parent.switch() + runs.append(2) + greenlet.getcurrent().parent.switch() + runs.append(3) + +g = greenlet._greenlet.UnswitchableGreenlet(func) +g.switch() +assert runs == [1] +g.switch() +assert runs == [1, 2] +g.force_slp_switch_error = True + +# This should crash. +g.switch() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets.py b/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets.py new file mode 100644 index 0000000..e151b19 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets.py @@ -0,0 +1,44 @@ +""" +Uses a trace function to switch greenlets at unexpected times. + +In the trace function, we switch from the current greenlet to another +greenlet, which switches +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = False + +def tracefunc(*args): + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch() + print('\tLEAVE TRACE', *args) + +def g1_run(): + print('In g1_run') + global switch_to_g2 + switch_to_g2 = True + from_parent = greenlet.getcurrent().parent.switch() + print('Return to g1_run') + print('From parent', from_parent) + +def g2_run(): + #g1.switch() + greenlet.getcurrent().parent.switch() + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +# This switch didn't actually finish! +# And if it did, it would raise TypeError +# because g1_run() doesn't take any arguments. +g1.switch(1) +print('Back in main') +g1.switch(2) diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets2.py b/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets2.py new file mode 100644 index 0000000..1f6b66b --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_three_greenlets2.py @@ -0,0 +1,55 @@ +""" +Like fail_switch_three_greenlets, but the call into g1_run would actually be +valid. +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = True + +results = [] + +def tracefunc(*args): + results.append(('trace', args[0])) + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch('g2 from tracefunc') + print('\tLEAVE TRACE', *args) + +def g1_run(arg): + results.append(('g1 arg', arg)) + print('In g1_run') + from_parent = greenlet.getcurrent().parent.switch('from g1_run') + results.append(('g1 from parent', from_parent)) + return 'g1 done' + +def g2_run(arg): + #g1.switch() + results.append(('g2 arg', arg)) + parent = greenlet.getcurrent().parent.switch('from g2_run') + global switch_to_g2 + switch_to_g2 = False + results.append(('g2 from parent', parent)) + return 'g2 done' + + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +x = g1.switch('g1 from main') +results.append(('main g1', x)) +print('Back in main', x) +x = g1.switch('g2 from main') +results.append(('main g2', x)) +print('back in amain again', x) +x = g1.switch('g1 from main 2') +results.append(('main g1.2', x)) +x = g2.switch() +results.append(('main g2.2', x)) +print("RESULTS:", results) diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_two_greenlets.py b/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_two_greenlets.py new file mode 100644 index 0000000..3e52345 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/fail_switch_two_greenlets.py @@ -0,0 +1,41 @@ +""" +Uses a trace function to switch greenlets at unexpected times. + +In the trace function, we switch from the current greenlet to another +greenlet, which switches +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = False + +def tracefunc(*args): + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch() + print('\tLEAVE TRACE', *args) + +def g1_run(): + print('In g1_run') + global switch_to_g2 + switch_to_g2 = True + greenlet.getcurrent().parent.switch() + print('Return to g1_run') + print('Falling off end of g1_run') + +def g2_run(): + g1.switch() + print('Falling off end of g2') + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +g1.switch() +print('Falling off end of main') +g2.switch() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/leakcheck.py b/venv/lib/python3.12/site-packages/greenlet/tests/leakcheck.py new file mode 100644 index 0000000..7046e41 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/leakcheck.py @@ -0,0 +1,336 @@ +# Copyright (c) 2018 gevent community +# Copyright (c) 2021 greenlet community +# +# This was originally part of gevent's test suite. The main author +# (Jason Madden) vendored a copy of it into greenlet. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import print_function + +import os +import sys +import gc + +from functools import wraps +import unittest + + +import objgraph + +# graphviz 0.18 (Nov 7 2021), available only on Python 3.6 and newer, +# has added type hints (sigh). It wants to use ``typing.Literal`` for +# some stuff, but that's only available on Python 3.9+. If that's not +# found, it creates a ``unittest.mock.MagicMock`` object and annotates +# with that. These are GC'able objects, and doing almost *anything* +# with them results in an explosion of objects. For example, trying to +# compare them for equality creates new objects. This causes our +# leakchecks to fail, with reports like: +# +# greenlet.tests.leakcheck.LeakCheckError: refcount increased by [337, 1333, 343, 430, 530, 643, 769] +# _Call 1820 +546 +# dict 4094 +76 +# MagicProxy 585 +73 +# tuple 2693 +66 +# _CallList 24 +3 +# weakref 1441 +1 +# function 5996 +1 +# type 736 +1 +# cell 592 +1 +# MagicMock 8 +1 +# +# To avoid this, we *could* filter this type of object out early. In +# principle it could leak, but we don't use mocks in greenlet, so it +# doesn't leak from us. However, a further issue is that ``MagicMock`` +# objects have subobjects that are also GC'able, like ``_Call``, and +# those create new mocks of their own too. So we'd have to filter them +# as well, and they're not public. That's OK, we can workaround the +# problem by being very careful to never compare by equality or other +# user-defined operators, only using object identity or other builtin +# functions. + +RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') +RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS +RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') +RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR +RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') +SKIP_LEAKCHECKS = RUNNING_ON_MANYLINUX or os.environ.get('GREENLET_SKIP_LEAKCHECKS') +SKIP_FAILING_LEAKCHECKS = os.environ.get('GREENLET_SKIP_FAILING_LEAKCHECKS') +ONLY_FAILING_LEAKCHECKS = os.environ.get('GREENLET_ONLY_FAILING_LEAKCHECKS') + +def ignores_leakcheck(func): + """ + Ignore the given object during leakchecks. + + Can be applied to a method, in which case the method will run, but + will not be subject to leak checks. + + If applied to a class, the entire class will be skipped during leakchecks. This + is intended to be used for classes that are very slow and cause problems such as + test timeouts; typically it will be used for classes that are subclasses of a base + class and specify variants of behaviour (such as pool sizes). + """ + func.ignore_leakcheck = True + return func + +def fails_leakcheck(func): + """ + Mark that the function is known to leak. + """ + func.fails_leakcheck = True + if SKIP_FAILING_LEAKCHECKS: + func = unittest.skip("Skipping known failures")(func) + return func + +class LeakCheckError(AssertionError): + pass + +if hasattr(sys, 'getobjects'): + # In a Python build with ``--with-trace-refs``, make objgraph + # trace *all* the objects, not just those that are tracked by the + # GC + class _MockGC(object): + def get_objects(self): + return sys.getobjects(0) # pylint:disable=no-member + def __getattr__(self, name): + return getattr(gc, name) + objgraph.gc = _MockGC() + fails_strict_leakcheck = fails_leakcheck +else: + def fails_strict_leakcheck(func): + """ + Decorator for a function that is known to fail when running + strict (``sys.getobjects()``) leakchecks. + + This type of leakcheck finds all objects, even those, such as + strings, which are not tracked by the garbage collector. + """ + return func + +class ignores_types_in_strict_leakcheck(object): + def __init__(self, types): + self.types = types + def __call__(self, func): + func.leakcheck_ignore_types = self.types + return func + +class _RefCountChecker(object): + + # Some builtin things that we ignore + # XXX: Those things were ignored by gevent, but they're important here, + # presumably. + IGNORED_TYPES = () #(tuple, dict, types.FrameType, types.TracebackType) + + # Names of types that should be ignored. Use this when we cannot + # or don't want to import the class directly. + IGNORED_TYPE_NAMES = ( + # This appears in Python3.14 with the JIT enabled. It + # doesn't seem to be directly exposed to Python; the only way to get + # one is to cause code to get jitted and then look for all objects + # and find one with this name. But they multiply as code + # executes and gets jitted, in ways we don't want to rely on. + # So just ignore it. + 'uop_executor', + ) + + def __init__(self, testcase, function): + self.testcase = testcase + self.function = function + self.deltas = [] + self.peak_stats = {} + self.ignored_types = () + + # The very first time we are called, we have already been + # self.setUp() by the test runner, so we don't need to do it again. + self.needs_setUp = False + + def _include_object_p(self, obj): + # pylint:disable=too-many-return-statements + # + # See the comment block at the top. We must be careful to + # avoid invoking user-defined operations. + if obj is self: + return False + kind = type(obj) + # ``self._include_object_p == obj`` returns NotImplemented + # for non-function objects, which causes the interpreter + # to try to reverse the order of arguments...which leads + # to the explosion of mock objects. We don't want that, so we implement + # the check manually. + if kind == type(self._include_object_p): + try: + # pylint:disable=not-callable + exact_method_equals = self._include_object_p.__eq__(obj) + except AttributeError: + # Python 2.7 methods may only have __cmp__, and that raises a + # TypeError for non-method arguments + # pylint:disable=no-member + exact_method_equals = self._include_object_p.__cmp__(obj) == 0 + + if exact_method_equals is not NotImplemented and exact_method_equals: + return False + + # Similarly, we need to check identity in our __dict__ to avoid mock explosions. + for x in self.__dict__.values(): + if obj is x: + return False + + + if ( + kind in self.ignored_types + or kind in self.IGNORED_TYPES + or kind.__name__ in self.IGNORED_TYPE_NAMES + ): + return False + + + return True + + def _growth(self): + return objgraph.growth(limit=None, peak_stats=self.peak_stats, + filter=self._include_object_p) + + def _report_diff(self, growth): + if not growth: + return "" + + lines = [] + width = max(len(name) for name, _, _ in growth) + for name, count, delta in growth: + lines.append('%-*s%9d %+9d' % (width, name, count, delta)) + + diff = '\n'.join(lines) + return diff + + + def _run_test(self, args, kwargs): + gc_enabled = gc.isenabled() + gc.disable() + + if self.needs_setUp: + self.testcase.setUp() + self.testcase.skipTearDown = False + try: + self.function(self.testcase, *args, **kwargs) + finally: + self.testcase.tearDown() + self.testcase.doCleanups() + self.testcase.skipTearDown = True + self.needs_setUp = True + if gc_enabled: + gc.enable() + + def _growth_after(self): + # Grab post snapshot + # pylint:disable=no-member + if 'urlparse' in sys.modules: + sys.modules['urlparse'].clear_cache() + if 'urllib.parse' in sys.modules: + sys.modules['urllib.parse'].clear_cache() + + return self._growth() + + def _check_deltas(self, growth): + # Return false when we have decided there is no leak, + # true if we should keep looping, raises an assertion + # if we have decided there is a leak. + + deltas = self.deltas + if not deltas: + # We haven't run yet, no data, keep looping + return True + + if gc.garbage: + raise LeakCheckError("Generated uncollectable garbage %r" % (gc.garbage,)) + + + # the following configurations are classified as "no leak" + # [0, 0] + # [x, 0, 0] + # [... a, b, c, d] where a+b+c+d = 0 + # + # the following configurations are classified as "leak" + # [... z, z, z] where z > 0 + + if deltas[-2:] == [0, 0] and len(deltas) in (2, 3): + return False + + if deltas[-3:] == [0, 0, 0]: + return False + + if len(deltas) >= 4 and sum(deltas[-4:]) == 0: + return False + + if len(deltas) >= 3 and deltas[-1] > 0 and deltas[-1] == deltas[-2] and deltas[-2] == deltas[-3]: + diff = self._report_diff(growth) + raise LeakCheckError('refcount increased by %r\n%s' % (deltas, diff)) + + # OK, we don't know for sure yet. Let's search for more + if sum(deltas[-3:]) <= 0 or sum(deltas[-4:]) <= 0 or deltas[-4:].count(0) >= 2: + # this is suspicious, so give a few more runs + limit = 11 + else: + limit = 7 + if len(deltas) >= limit: + raise LeakCheckError('refcount increased by %r\n%s' + % (deltas, + self._report_diff(growth))) + + # We couldn't decide yet, keep going + return True + + def __call__(self, args, kwargs): + for _ in range(3): + gc.collect() + + expect_failure = getattr(self.function, 'fails_leakcheck', False) + if expect_failure: + self.testcase.expect_greenlet_leak = True + self.ignored_types = getattr(self.function, "leakcheck_ignore_types", ()) + + # Capture state before; the incremental will be + # updated by each call to _growth_after + growth = self._growth() + + try: + while self._check_deltas(growth): + self._run_test(args, kwargs) + + growth = self._growth_after() + + self.deltas.append(sum((stat[2] for stat in growth))) + except LeakCheckError: + if not expect_failure: + raise + else: + if expect_failure: + raise LeakCheckError("Expected %s to leak but it did not." % (self.function,)) + +def wrap_refcount(method): + if getattr(method, 'ignore_leakcheck', False) or SKIP_LEAKCHECKS: + return method + + @wraps(method) + def wrapper(self, *args, **kwargs): # pylint:disable=too-many-branches + if getattr(self, 'ignore_leakcheck', False): + raise unittest.SkipTest("This class ignored during leakchecks") + if ONLY_FAILING_LEAKCHECKS and not getattr(method, 'fails_leakcheck', False): + raise unittest.SkipTest("Only running tests that fail leakchecks.") + return _RefCountChecker(self, method)(args, kwargs) + + return wrapper diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_contextvars.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_contextvars.py new file mode 100644 index 0000000..b0d1ccf --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_contextvars.py @@ -0,0 +1,312 @@ +from __future__ import print_function + +import gc +import sys +import unittest + +from functools import partial +from unittest import skipUnless +from unittest import skipIf + +from greenlet import greenlet +from greenlet import getcurrent +from . import TestCase +from . import PY314 + +try: + from contextvars import Context + from contextvars import ContextVar + from contextvars import copy_context + # From the documentation: + # + # Important: Context Variables should be created at the top module + # level and never in closures. Context objects hold strong + # references to context variables which prevents context variables + # from being properly garbage collected. + ID_VAR = ContextVar("id", default=None) + VAR_VAR = ContextVar("var", default=None) + ContextVar = None +except ImportError: + Context = ContextVar = copy_context = None + +# We don't support testing if greenlet's built-in context var support is disabled. +@skipUnless(Context is not None, "ContextVar not supported") +class ContextVarsTests(TestCase): + def _new_ctx_run(self, *args, **kwargs): + return copy_context().run(*args, **kwargs) + + def _increment(self, greenlet_id, callback, counts, expect): + ctx_var = ID_VAR + if expect is None: + self.assertIsNone(ctx_var.get()) + else: + self.assertEqual(ctx_var.get(), expect) + ctx_var.set(greenlet_id) + for _ in range(2): + counts[ctx_var.get()] += 1 + callback() + + def _test_context(self, propagate_by): + # pylint:disable=too-many-branches + ID_VAR.set(0) + + callback = getcurrent().switch + counts = dict((i, 0) for i in range(5)) + + lets = [ + greenlet(partial( + partial( + copy_context().run, + self._increment + ) if propagate_by == "run" else self._increment, + greenlet_id=i, + callback=callback, + counts=counts, + expect=( + i - 1 if propagate_by == "share" else + 0 if propagate_by in ("set", "run") else None + ) + )) + for i in range(1, 5) + ] + + for let in lets: + if propagate_by == "set": + let.gr_context = copy_context() + elif propagate_by == "share": + let.gr_context = getcurrent().gr_context + + for i in range(2): + counts[ID_VAR.get()] += 1 + for let in lets: + let.switch() + + if propagate_by == "run": + # Must leave each context.run() in reverse order of entry + for let in reversed(lets): + let.switch() + else: + # No context.run(), so fine to exit in any order. + for let in lets: + let.switch() + + for let in lets: + self.assertTrue(let.dead) + # When using run(), we leave the run() as the greenlet dies, + # and there's no context "underneath". When not using run(), + # gr_context still reflects the context the greenlet was + # running in. + if propagate_by == 'run': + self.assertIsNone(let.gr_context) + else: + self.assertIsNotNone(let.gr_context) + + + if propagate_by == "share": + self.assertEqual(counts, {0: 1, 1: 1, 2: 1, 3: 1, 4: 6}) + else: + self.assertEqual(set(counts.values()), set([2])) + + def test_context_propagated_by_context_run(self): + self._new_ctx_run(self._test_context, "run") + + def test_context_propagated_by_setting_attribute(self): + self._new_ctx_run(self._test_context, "set") + + def test_context_not_propagated(self): + self._new_ctx_run(self._test_context, None) + + def test_context_shared(self): + self._new_ctx_run(self._test_context, "share") + + def test_break_ctxvars(self): + let1 = greenlet(copy_context().run) + let2 = greenlet(copy_context().run) + let1.switch(getcurrent().switch) + let2.switch(getcurrent().switch) + # Since let2 entered the current context and let1 exits its own, the + # interpreter emits: + # RuntimeError: cannot exit context: thread state references a different context object + let1.switch() + + def test_not_broken_if_using_attribute_instead_of_context_run(self): + let1 = greenlet(getcurrent().switch) + let2 = greenlet(getcurrent().switch) + let1.gr_context = copy_context() + let2.gr_context = copy_context() + let1.switch() + let2.switch() + let1.switch() + let2.switch() + + def test_context_assignment_while_running(self): + # pylint:disable=too-many-statements + ID_VAR.set(None) + + def target(): + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + + # Context is created on first use + ID_VAR.set(1) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(ID_VAR.get(), 1) + self.assertEqual(gr.gr_context[ID_VAR], 1) + + # Clearing the context makes it get re-created as another + # empty context when next used + old_context = gr.gr_context + gr.gr_context = None # assign None while running + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + ID_VAR.set(2) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(ID_VAR.get(), 2) + self.assertEqual(gr.gr_context[ID_VAR], 2) + + new_context = gr.gr_context + getcurrent().parent.switch((old_context, new_context)) + # parent switches us back to old_context + + self.assertEqual(ID_VAR.get(), 1) + gr.gr_context = new_context # assign non-None while running + self.assertEqual(ID_VAR.get(), 2) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + gr.gr_context = old_context + self.assertEqual(ID_VAR.get(), 1) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + + gr = greenlet(target) + + with self.assertRaisesRegex(AttributeError, "can't delete context attribute"): + del gr.gr_context + + self.assertIsNone(gr.gr_context) + old_context, new_context = gr.switch() + self.assertIs(new_context, gr.gr_context) + self.assertEqual(old_context[ID_VAR], 1) + self.assertEqual(new_context[ID_VAR], 2) + self.assertEqual(new_context.run(ID_VAR.get), 2) + gr.gr_context = old_context # assign non-None while suspended + gr.switch() + self.assertIs(gr.gr_context, new_context) + gr.gr_context = None # assign None while suspended + gr.switch() + self.assertIs(gr.gr_context, old_context) + gr.gr_context = None + gr.switch() + self.assertIsNone(gr.gr_context) + + # Make sure there are no reference leaks + gr = None + gc.collect() + # Python 3.14 elides reference counting operations + # in some cases. See https://github.com/python/cpython/pull/130708 + self.assertEqual(sys.getrefcount(old_context), 2 if not PY314 else 1) + self.assertEqual(sys.getrefcount(new_context), 2 if not PY314 else 1) + + def test_context_assignment_different_thread(self): + import threading + VAR_VAR.set(None) + ctx = Context() + + is_running = threading.Event() + should_suspend = threading.Event() + did_suspend = threading.Event() + should_exit = threading.Event() + holder = [] + + def greenlet_in_thread_fn(): + VAR_VAR.set(1) + is_running.set() + should_suspend.wait(10) + VAR_VAR.set(2) + getcurrent().parent.switch() + holder.append(VAR_VAR.get()) + + def thread_fn(): + gr = greenlet(greenlet_in_thread_fn) + gr.gr_context = ctx + holder.append(gr) + gr.switch() + did_suspend.set() + should_exit.wait(10) + gr.switch() + del gr + greenlet() # trigger cleanup + + thread = threading.Thread(target=thread_fn, daemon=True) + thread.start() + is_running.wait(10) + gr = holder[0] + + # Can't access or modify context if the greenlet is running + # in a different thread + with self.assertRaisesRegex(ValueError, "running in a different"): + getattr(gr, 'gr_context') + with self.assertRaisesRegex(ValueError, "running in a different"): + gr.gr_context = None + + should_suspend.set() + did_suspend.wait(10) + + # OK to access and modify context if greenlet is suspended + self.assertIs(gr.gr_context, ctx) + self.assertEqual(gr.gr_context[VAR_VAR], 2) + gr.gr_context = None + + should_exit.set() + thread.join(10) + + self.assertEqual(holder, [gr, None]) + + # Context can still be accessed/modified when greenlet is dead: + self.assertIsNone(gr.gr_context) + gr.gr_context = ctx + self.assertIs(gr.gr_context, ctx) + + # Otherwise we leak greenlets on some platforms. + # XXX: Should be able to do this automatically + del holder[:] + gr = None + thread = None + + def test_context_assignment_wrong_type(self): + g = greenlet() + with self.assertRaisesRegex(TypeError, + "greenlet context must be a contextvars.Context or None"): + g.gr_context = self + + +@skipIf(Context is not None, "ContextVar supported") +class NoContextVarsTests(TestCase): + def test_contextvars_errors(self): + let1 = greenlet(getcurrent().switch) + self.assertFalse(hasattr(let1, 'gr_context')) + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + + with self.assertRaises(AttributeError): + let1.gr_context = None + + let1.switch() + + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + + with self.assertRaises(AttributeError): + let1.gr_context = None + + del let1 + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_cpp.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_cpp.py new file mode 100644 index 0000000..2d0cc9c --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_cpp.py @@ -0,0 +1,73 @@ +from __future__ import print_function +from __future__ import absolute_import + +import subprocess +import unittest + +import greenlet +from . import _test_extension_cpp +from . import TestCase +from . import WIN + +class CPPTests(TestCase): + def test_exception_switch(self): + greenlets = [] + for i in range(4): + g = greenlet.greenlet(_test_extension_cpp.test_exception_switch) + g.switch(i) + greenlets.append(g) + for i, g in enumerate(greenlets): + self.assertEqual(g.switch(), i) + + def _do_test_unhandled_exception(self, target): + import os + import sys + script = os.path.join( + os.path.dirname(__file__), + 'fail_cpp_exception.py', + ) + args = [sys.executable, script, target.__name__ if not isinstance(target, str) else target] + __traceback_info__ = args + with self.assertRaises(subprocess.CalledProcessError) as exc: + subprocess.check_output( + args, + encoding='utf-8', + stderr=subprocess.STDOUT + ) + + ex = exc.exception + expected_exit = self.get_expected_returncodes_for_aborted_process() + self.assertIn(ex.returncode, expected_exit) + self.assertIn('fail_cpp_exception is running', ex.output) + return ex.output + + + def test_unhandled_nonstd_exception_aborts(self): + # verify that plain unhandled throw aborts + self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_nonstd) + + def test_unhandled_std_exception_aborts(self): + # verify that plain unhandled throw aborts + self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_std) + + @unittest.skipIf(WIN, "XXX: This does not crash on Windows") + # Meaning the exception is getting lost somewhere... + def test_unhandled_std_exception_as_greenlet_function_aborts(self): + # verify that plain unhandled throw aborts + output = self._do_test_unhandled_exception('run_as_greenlet_target') + self.assertIn( + # We really expect this to be prefixed with "greenlet: Unhandled C++ exception:" + # as added by our handler for std::exception (see TUserGreenlet.cpp), but + # that's not correct everywhere --- our handler never runs before std::terminate + # gets called (for example, on arm32). + 'Thrown from an extension.', + output + ) + + def test_unhandled_exception_in_greenlet_aborts(self): + # verify that unhandled throw called in greenlet aborts too + self._do_test_unhandled_exception('run_unhandled_exception_in_greenlet_aborts') + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_extension_interface.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_extension_interface.py new file mode 100644 index 0000000..34b6656 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_extension_interface.py @@ -0,0 +1,115 @@ +from __future__ import print_function +from __future__ import absolute_import + +import sys + +import greenlet +from . import _test_extension +from . import TestCase + +# pylint:disable=c-extension-no-member + +class CAPITests(TestCase): + def test_switch(self): + self.assertEqual( + 50, _test_extension.test_switch(greenlet.greenlet(lambda: 50))) + + def test_switch_kwargs(self): + def adder(x, y): + return x * y + g = greenlet.greenlet(adder) + self.assertEqual(6, _test_extension.test_switch_kwargs(g, x=3, y=2)) + + def test_setparent(self): + # pylint:disable=disallowed-name + def foo(): + def bar(): + greenlet.getcurrent().parent.switch() + + # This final switch should go back to the main greenlet, since + # the test_setparent() function in the C extension should have + # reparented this greenlet. + greenlet.getcurrent().parent.switch() + raise AssertionError("Should never have reached this code") + child = greenlet.greenlet(bar) + child.switch() + greenlet.getcurrent().parent.switch(child) + greenlet.getcurrent().parent.throw( + AssertionError("Should never reach this code")) + foo_child = greenlet.greenlet(foo).switch() + self.assertEqual(None, _test_extension.test_setparent(foo_child)) + + def test_getcurrent(self): + _test_extension.test_getcurrent() + + def test_new_greenlet(self): + self.assertEqual(-15, _test_extension.test_new_greenlet(lambda: -15)) + + def test_raise_greenlet_dead(self): + self.assertRaises( + greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet) + + def test_raise_greenlet_error(self): + self.assertRaises( + greenlet.error, _test_extension.test_raise_greenlet_error) + + def test_throw(self): + seen = [] + + def foo(): # pylint:disable=disallowed-name + try: + greenlet.getcurrent().parent.switch() + except ValueError: + seen.append(sys.exc_info()[1]) + except greenlet.GreenletExit: + raise AssertionError + g = greenlet.greenlet(foo) + g.switch() + _test_extension.test_throw(g) + self.assertEqual(len(seen), 1) + self.assertTrue( + isinstance(seen[0], ValueError), + "ValueError was not raised in foo()") + self.assertEqual( + str(seen[0]), + 'take that sucka!', + "message doesn't match") + + def test_non_traceback_param(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + Exception, + Exception(), + self + ) + self.assertEqual(str(exc.exception), + "throw() third argument must be a traceback object") + + def test_instance_of_wrong_type(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + Exception(), + BaseException(), + None, + ) + + self.assertEqual(str(exc.exception), + "instance exception may not have a separate value") + + def test_not_throwable(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + "abc", + None, + None, + ) + self.assertEqual(str(exc.exception), + "exceptions must be classes, or instances, not str") + + +if __name__ == '__main__': + import unittest + unittest.main() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_gc.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_gc.py new file mode 100644 index 0000000..994addb --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_gc.py @@ -0,0 +1,86 @@ +import gc + +import weakref + +import greenlet + + +from . import TestCase +from .leakcheck import fails_leakcheck +# These only work with greenlet gc support +# which is no longer optional. +assert greenlet.GREENLET_USE_GC + +class GCTests(TestCase): + def test_dead_circular_ref(self): + o = weakref.ref(greenlet.greenlet(greenlet.getcurrent).switch()) + gc.collect() + if o() is not None: + import sys + print("O IS NOT NONE.", sys.getrefcount(o())) + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + def test_circular_greenlet(self): + class circular_greenlet(greenlet.greenlet): + self = None + o = circular_greenlet() + o.self = o + o = weakref.ref(o) + gc.collect() + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + def test_inactive_ref(self): + class inactive_greenlet(greenlet.greenlet): + def __init__(self): + greenlet.greenlet.__init__(self, run=self.run) + + def run(self): + pass + o = inactive_greenlet() + o = weakref.ref(o) + gc.collect() + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + @fails_leakcheck + def test_finalizer_crash(self): + # This test is designed to crash when active greenlets + # are made garbage collectable, until the underlying + # problem is resolved. How does it work: + # - order of object creation is important + # - array is created first, so it is moved to unreachable first + # - we create a cycle between a greenlet and this array + # - we create an object that participates in gc, is only + # referenced by a greenlet, and would corrupt gc lists + # on destruction, the easiest is to use an object with + # a finalizer + # - because array is the first object in unreachable it is + # cleared first, which causes all references to greenlet + # to disappear and causes greenlet to be destroyed, but since + # it is still live it causes a switch during gc, which causes + # an object with finalizer to be destroyed, which causes stack + # corruption and then a crash + + class object_with_finalizer(object): + def __del__(self): + pass + array = [] + parent = greenlet.getcurrent() + def greenlet_body(): + greenlet.getcurrent().object = object_with_finalizer() + try: + parent.switch() + except greenlet.GreenletExit: + print("Got greenlet exit!") + finally: + del greenlet.getcurrent().object + g = greenlet.greenlet(greenlet_body) + g.array = array + array.append(g) + g.switch() + del array + del g + greenlet.getcurrent() + gc.collect() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_generator.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_generator.py new file mode 100644 index 0000000..ca4a644 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_generator.py @@ -0,0 +1,59 @@ + +from greenlet import greenlet + +from . import TestCase + +class genlet(greenlet): + parent = None + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + + def run(self): + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def __next__(self): + self.parent = greenlet.getcurrent() + result = self.switch() + if self: + return result + + raise StopIteration + + next = __next__ + + +def Yield(value): + g = greenlet.getcurrent() + while not isinstance(g, genlet): + if g is None: + raise RuntimeError('yield outside a genlet') + g = g.parent + g.parent.switch(value) + + +def generator(func): + class Generator(genlet): + fn = (func,) + return Generator + +# ____________________________________________________________ + + +class GeneratorTests(TestCase): + def test_generator(self): + seen = [] + + def g(n): + for i in range(n): + seen.append(i) + Yield(i) + g = generator(g) + for _ in range(3): + for j in g(5): + seen.append(j) + self.assertEqual(seen, 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_generator_nested.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_generator_nested.py new file mode 100644 index 0000000..8d752a6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_generator_nested.py @@ -0,0 +1,168 @@ + +from greenlet import greenlet +from . import TestCase +from .leakcheck import fails_leakcheck + +class genlet(greenlet): + parent = None + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + self.child = None + + def run(self): + # Note the function is packed in a tuple + # to avoid creating a bound method for it. + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def set_child(self, child): + self.child = child + + def __next__(self): + if self.child: + child = self.child + while child.child: + tmp = child + child = child.child + tmp.child = None + + result = child.switch() + else: + self.parent = greenlet.getcurrent() + result = self.switch() + + if self: + return result + + raise StopIteration + + next = __next__ + +def Yield(value, level=1): + g = greenlet.getcurrent() + + while level != 0: + if not isinstance(g, genlet): + raise RuntimeError('yield outside a genlet') + if level > 1: + g.parent.set_child(g) + g = g.parent + level -= 1 + + g.switch(value) + + +def Genlet(func): + class TheGenlet(genlet): + fn = (func,) + return TheGenlet + +# ____________________________________________________________ + + +def g1(n, seen): + for i in range(n): + seen.append(i + 1) + yield i + + +def g2(n, seen): + for i in range(n): + seen.append(i + 1) + Yield(i) + +g2 = Genlet(g2) + + +def nested(i): + Yield(i) + + +def g3(n, seen): + for i in range(n): + seen.append(i + 1) + nested(i) +g3 = Genlet(g3) + + +def a(n): + if n == 0: + return + for ii in ax(n - 1): + Yield(ii) + Yield(n) +ax = Genlet(a) + + +def perms(l): + if len(l) > 1: + for e in l: + # No syntactical sugar for generator expressions + x = [Yield([e] + p) for p in perms([x for x in l if x != e])] + assert x + else: + Yield(l) +perms = Genlet(perms) + + +def gr1(n): + for ii in range(1, n): + Yield(ii) + Yield(ii * ii, 2) + +gr1 = Genlet(gr1) + + +def gr2(n, seen): + for ii in gr1(n): + seen.append(ii) + +gr2 = Genlet(gr2) + + +class NestedGeneratorTests(TestCase): + def test_layered_genlets(self): + seen = [] + for ii in gr2(5, seen): + seen.append(ii) + self.assertEqual(seen, [1, 1, 2, 4, 3, 9, 4, 16]) + + @fails_leakcheck + def test_permutations(self): + gen_perms = perms(list(range(4))) + permutations = list(gen_perms) + self.assertEqual(len(permutations), 4 * 3 * 2 * 1) + self.assertIn([0, 1, 2, 3], permutations) + self.assertIn([3, 2, 1, 0], permutations) + res = [] + for ii in zip(perms(list(range(4))), perms(list(range(3)))): + res.append(ii) + self.assertEqual( + res, + [([0, 1, 2, 3], [0, 1, 2]), ([0, 1, 3, 2], [0, 2, 1]), + ([0, 2, 1, 3], [1, 0, 2]), ([0, 2, 3, 1], [1, 2, 0]), + ([0, 3, 1, 2], [2, 0, 1]), ([0, 3, 2, 1], [2, 1, 0])]) + # XXX Test to make sure we are working as a generator expression + + def test_genlet_simple(self): + for g in g1, g2, g3: + seen = [] + for _ in range(3): + for j in g(5, seen): + seen.append(j) + self.assertEqual(seen, 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4]) + + def test_genlet_bad(self): + try: + Yield(10) + except RuntimeError: + pass + + def test_nested_genlets(self): + seen = [] + for ii in ax(5): + seen.append(ii) diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet.py new file mode 100644 index 0000000..1fa4bd1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet.py @@ -0,0 +1,1353 @@ +import gc +import sys +import time +import threading +import unittest + +from abc import ABCMeta +from abc import abstractmethod + +import greenlet +from greenlet import greenlet as RawGreenlet +from . import TestCase +from . import RUNNING_ON_MANYLINUX +from . import PY313 +from . import PY314 +from . import RUNNING_ON_FREETHREAD_BUILD +from .leakcheck import fails_leakcheck + + +# We manually manage locks in many tests +# pylint:disable=consider-using-with +# pylint:disable=too-many-public-methods +# This module is quite large. +# TODO: Refactor into separate test files. For example, +# put all the regression tests that used to produce +# crashes in test_greenlet_no_crash; put tests that DO deliberately crash +# the interpreter into test_greenlet_crash. +# pylint:disable=too-many-lines + +class SomeError(Exception): + pass + + +def fmain(seen): + try: + greenlet.getcurrent().parent.switch() + except: + seen.append(sys.exc_info()[0]) + raise + raise SomeError + + +def send_exception(g, exc): + # note: send_exception(g, exc) can be now done with g.throw(exc). + # the purpose of this test is to explicitly check the propagation rules. + def crasher(exc): + raise exc + g1 = RawGreenlet(crasher, parent=g) + g1.switch(exc) + + +class TestGreenlet(TestCase): + + def _do_simple_test(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.append(3) + g = RawGreenlet(f) + lst.append(0) + g.switch() + lst.append(2) + g.switch() + lst.append(4) + self.assertEqual(lst, list(range(5))) + + def test_simple(self): + self._do_simple_test() + + def test_switch_no_run_raises_AttributeError(self): + g = RawGreenlet() + with self.assertRaises(AttributeError) as exc: + g.switch() + + self.assertIn("run", str(exc.exception)) + + def test_throw_no_run_raises_AttributeError(self): + g = RawGreenlet() + with self.assertRaises(AttributeError) as exc: + g.throw(SomeError) + + self.assertIn("run", str(exc.exception)) + + def test_parent_equals_None(self): + g = RawGreenlet(parent=None) + self.assertIsNotNone(g) + self.assertIs(g.parent, greenlet.getcurrent()) + + def test_run_equals_None(self): + g = RawGreenlet(run=None) + self.assertIsNotNone(g) + self.assertIsNone(g.run) + + def test_two_children(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.extend([1, 1]) + g = RawGreenlet(f) + h = RawGreenlet(f) + g.switch() + self.assertEqual(len(lst), 1) + h.switch() + self.assertEqual(len(lst), 2) + h.switch() + self.assertEqual(len(lst), 4) + self.assertEqual(h.dead, True) + g.switch() + self.assertEqual(len(lst), 6) + self.assertEqual(g.dead, True) + + def test_two_recursive_children(self): + lst = [] + + def f(): + lst.append('b') + greenlet.getcurrent().parent.switch() + + def g(): + lst.append('a') + g = RawGreenlet(f) + g.switch() + lst.append('c') + self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) + g = RawGreenlet(g) + # Python 3.14 elides reference counting operations + # in some cases. See https://github.com/python/cpython/pull/130708 + self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) + g.switch() + self.assertEqual(lst, ['a', 'b', 'c']) + # Just the one in this frame, plus the one on the stack we pass to the function + self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) + + def test_threads(self): + success = [] + + def f(): + self._do_simple_test() + success.append(True) + ths = [threading.Thread(target=f) for i in range(10)] + for th in ths: + th.start() + for th in ths: + th.join(10) + self.assertEqual(len(success), len(ths)) + + def test_exception(self): + seen = [] + g1 = RawGreenlet(fmain) + g2 = RawGreenlet(fmain) + g1.switch(seen) + g2.switch(seen) + g2.parent = g1 + + self.assertEqual(seen, []) + #with self.assertRaises(SomeError): + # p("***Switching back") + # g2.switch() + # Creating this as a bound method can reveal bugs that + # are hidden on newer versions of Python that avoid creating + # bound methods for direct expressions; IOW, don't use the `with` + # form! + self.assertRaises(SomeError, g2.switch) + self.assertEqual(seen, [SomeError]) + + value = g2.switch() + self.assertEqual(value, ()) + self.assertEqual(seen, [SomeError]) + + value = g2.switch(25) + self.assertEqual(value, 25) + self.assertEqual(seen, [SomeError]) + + + def test_send_exception(self): + seen = [] + g1 = RawGreenlet(fmain) + g1.switch(seen) + self.assertRaises(KeyError, send_exception, g1, KeyError) + self.assertEqual(seen, [KeyError]) + + def test_dealloc(self): + seen = [] + g1 = RawGreenlet(fmain) + g2 = RawGreenlet(fmain) + g1.switch(seen) + g2.switch(seen) + self.assertEqual(seen, []) + del g1 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit]) + del g2 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit, greenlet.GreenletExit]) + + def test_dealloc_catches_GreenletExit_throws_other(self): + def run(): + try: + greenlet.getcurrent().parent.switch() + except greenlet.GreenletExit: + raise SomeError from None + + g = RawGreenlet(run) + g.switch() + # Destroying the only reference to the greenlet causes it + # to get GreenletExit; when it in turn raises, even though we're the parent + # we don't get the exception, it just gets printed. + # When we run on 3.8 only, we can use sys.unraisablehook + oldstderr = sys.stderr + from io import StringIO + stderr = sys.stderr = StringIO() + try: + del g + finally: + sys.stderr = oldstderr + + v = stderr.getvalue() + self.assertIn("Exception", v) + self.assertIn('ignored', v) + self.assertIn("SomeError", v) + + + @unittest.skipIf( + PY313 and RUNNING_ON_MANYLINUX, + "Sometimes flaky (getting one GreenletExit in the second list)" + # Probably due to funky timing interactions? + # TODO: FIXME Make that work. + ) + + def test_dealloc_other_thread(self): + seen = [] + someref = [] + + bg_glet_created_running_and_no_longer_ref_in_bg = threading.Event() + fg_ref_released = threading.Event() + bg_should_be_clear = threading.Event() + ok_to_exit_bg_thread = threading.Event() + + def f(): + g1 = RawGreenlet(fmain) + g1.switch(seen) + someref.append(g1) + del g1 + gc.collect() + bg_glet_created_running_and_no_longer_ref_in_bg.set() + fg_ref_released.wait(3) + + RawGreenlet() # trigger release + bg_should_be_clear.set() + ok_to_exit_bg_thread.wait(3) + RawGreenlet() # One more time + + t = threading.Thread(target=f) + t.start() + bg_glet_created_running_and_no_longer_ref_in_bg.wait(10) + + self.assertEqual(seen, []) + self.assertEqual(len(someref), 1) + del someref[:] + if not RUNNING_ON_FREETHREAD_BUILD: + # The free-threaded GC is very different. In 3.14rc1, + # the free-threaded GC traverses ``g1``, realizes it is + # not referenced from anywhere else IT cares about, + # calls ``tp_clear`` and then ``green_dealloc``. This causes + # the greenlet to lose its reference to the main greenlet and thread + # in which it was running, which means we can no longer throw an + # exception into it, preventing the rest of this test from working. + # Standard 3.14 traverses the object but doesn't ``tp_clear`` or + # ``green_dealloc`` it. + gc.collect() + # g1 is not released immediately because it's from another thread; + # switching back to that thread will allocate a greenlet and thus + # trigger deletion actions. + self.assertEqual(seen, []) + fg_ref_released.set() + bg_should_be_clear.wait(3) + try: + self.assertEqual(seen, [greenlet.GreenletExit]) + finally: + ok_to_exit_bg_thread.set() + t.join(10) + del seen[:] + del someref[:] + + def test_frame(self): + def f1(): + f = sys._getframe(0) # pylint:disable=protected-access + self.assertEqual(f.f_back, None) + greenlet.getcurrent().parent.switch(f) + return "meaning of life" + g = RawGreenlet(f1) + frame = g.switch() + self.assertTrue(frame is g.gr_frame) + self.assertTrue(g) + + from_g = g.switch() + self.assertFalse(g) + self.assertEqual(from_g, 'meaning of life') + self.assertEqual(g.gr_frame, None) + + def test_thread_bug(self): + def runner(x): + g = RawGreenlet(lambda: time.sleep(x)) + g.switch() + t1 = threading.Thread(target=runner, args=(0.2,)) + t2 = threading.Thread(target=runner, args=(0.3,)) + t1.start() + t2.start() + t1.join(10) + t2.join(10) + + def test_switch_kwargs(self): + def run(a, b): + self.assertEqual(a, 4) + self.assertEqual(b, 2) + return 42 + x = RawGreenlet(run).switch(a=4, b=2) + self.assertEqual(x, 42) + + def test_switch_kwargs_to_parent(self): + def run(x): + greenlet.getcurrent().parent.switch(x=x) + greenlet.getcurrent().parent.switch(2, x=3) + return x, x ** 2 + g = RawGreenlet(run) + self.assertEqual({'x': 3}, g.switch(3)) + self.assertEqual(((2,), {'x': 3}), g.switch()) + self.assertEqual((3, 9), g.switch()) + + def test_switch_to_another_thread(self): + data = {} + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = RawGreenlet(lambda: None) + created_event.set() + done_event.wait(10) + thread = threading.Thread(target=run) + thread.start() + created_event.wait(10) + with self.assertRaises(greenlet.error): + data['g'].switch() + done_event.set() + thread.join(10) + # XXX: Should handle this automatically + data.clear() + + def test_exc_state(self): + def f(): + try: + raise ValueError('fun') + except: # pylint:disable=bare-except + exc_info = sys.exc_info() + RawGreenlet(h).switch() + self.assertEqual(exc_info, sys.exc_info()) + + def h(): + self.assertEqual(sys.exc_info(), (None, None, None)) + + RawGreenlet(f).switch() + + def test_instance_dict(self): + def f(): + greenlet.getcurrent().test = 42 + def deldict(g): + del g.__dict__ + def setdict(g, value): + g.__dict__ = value + g = RawGreenlet(f) + self.assertEqual(g.__dict__, {}) + g.switch() + self.assertEqual(g.test, 42) + self.assertEqual(g.__dict__, {'test': 42}) + g.__dict__ = g.__dict__ + self.assertEqual(g.__dict__, {'test': 42}) + self.assertRaises(TypeError, deldict, g) + self.assertRaises(TypeError, setdict, g, 42) + + def test_running_greenlet_has_no_run(self): + has_run = [] + def func(): + has_run.append( + hasattr(greenlet.getcurrent(), 'run') + ) + + g = RawGreenlet(func) + g.switch() + self.assertEqual(has_run, [False]) + + def test_deepcopy(self): + import copy + self.assertRaises(TypeError, copy.copy, RawGreenlet()) + self.assertRaises(TypeError, copy.deepcopy, RawGreenlet()) + + def test_parent_restored_on_kill(self): + hub = RawGreenlet(lambda: None) + main = greenlet.getcurrent() + result = [] + def worker(): + try: + # Wait to be killed by going back to the test. + main.switch() + except greenlet.GreenletExit: + # Resurrect and switch to parent + result.append(greenlet.getcurrent().parent) + result.append(greenlet.getcurrent()) + hub.switch() + g = RawGreenlet(worker, parent=hub) + g.switch() + # delete the only reference, thereby raising GreenletExit + del g + self.assertTrue(result) + self.assertIs(result[0], main) + self.assertIs(result[1].parent, hub) + # Delete them, thereby breaking the cycle between the greenlet + # and the frame, which otherwise would never be collectable + # XXX: We should be able to automatically fix this. + del result[:] + hub = None + main = None + + def test_parent_return_failure(self): + # No run causes AttributeError on switch + g1 = RawGreenlet() + # Greenlet that implicitly switches to parent + g2 = RawGreenlet(lambda: None, parent=g1) + # AttributeError should propagate to us, no fatal errors + with self.assertRaises(AttributeError): + g2.switch() + + def test_throw_exception_not_lost(self): + class mygreenlet(RawGreenlet): + def __getattribute__(self, name): + try: + raise Exception # pylint:disable=broad-exception-raised + except: # pylint:disable=bare-except + pass + return RawGreenlet.__getattribute__(self, name) + g = mygreenlet(lambda: None) + self.assertRaises(SomeError, g.throw, SomeError()) + + @fails_leakcheck + def _do_test_throw_to_dead_thread_doesnt_crash(self, wait_for_cleanup=False): + result = [] + def worker(): + greenlet.getcurrent().parent.switch() + + def creator(): + g = RawGreenlet(worker) + g.switch() + result.append(g) + if wait_for_cleanup: + # Let this greenlet eventually be cleaned up. + g.switch() + greenlet.getcurrent() + t = threading.Thread(target=creator) + t.start() + t.join(10) + del t + # But, depending on the operating system, the thread + # deallocator may not actually have run yet! So we can't be + # sure about the error message unless we wait. + if wait_for_cleanup: + self.wait_for_pending_cleanups() + with self.assertRaises(greenlet.error) as exc: + result[0].throw(SomeError) + + if not wait_for_cleanup: + s = str(exc.exception) + self.assertTrue( + s == "cannot switch to a different thread (which happens to have exited)" + or 'Cannot switch' in s + ) + else: + self.assertEqual( + str(exc.exception), + "cannot switch to a different thread (which happens to have exited)", + ) + + if hasattr(result[0].gr_frame, 'clear'): + # The frame is actually executing (it thinks), we can't clear it. + with self.assertRaises(RuntimeError): + result[0].gr_frame.clear() + # Unfortunately, this doesn't actually clear the references, they're in the + # fast local array. + if not wait_for_cleanup: + # f_locals has no clear method in Python 3.13 + if hasattr(result[0].gr_frame.f_locals, 'clear'): + result[0].gr_frame.f_locals.clear() + else: + self.assertIsNone(result[0].gr_frame) + + del creator + worker = None + del result[:] + # XXX: we ought to be able to automatically fix this. + # See issue 252 + self.expect_greenlet_leak = True # direct us not to wait for it to go away + + @fails_leakcheck + def test_throw_to_dead_thread_doesnt_crash(self): + self._do_test_throw_to_dead_thread_doesnt_crash() + + def test_throw_to_dead_thread_doesnt_crash_wait(self): + self._do_test_throw_to_dead_thread_doesnt_crash(True) + + @fails_leakcheck + def test_recursive_startup(self): + class convoluted(RawGreenlet): + def __init__(self): + RawGreenlet.__init__(self) + self.count = 0 + def __getattribute__(self, name): + if name == 'run' and self.count == 0: + self.count = 1 + self.switch(43) + return RawGreenlet.__getattribute__(self, name) + def run(self, value): + while True: + self.parent.switch(value) + g = convoluted() + self.assertEqual(g.switch(42), 43) + # Exits the running greenlet, otherwise it leaks + # XXX: We should be able to automatically fix this + #g.throw(greenlet.GreenletExit) + #del g + self.expect_greenlet_leak = True + + def test_threaded_updatecurrent(self): + # released when main thread should execute + lock1 = threading.Lock() + lock1.acquire() + # released when another thread should execute + lock2 = threading.Lock() + lock2.acquire() + class finalized(object): + def __del__(self): + # happens while in green_updatecurrent() in main greenlet + # should be very careful not to accidentally call it again + # at the same time we must make sure another thread executes + lock2.release() + lock1.acquire() + # now ts_current belongs to another thread + def deallocator(): + greenlet.getcurrent().parent.switch() + def fthread(): + lock2.acquire() + greenlet.getcurrent() + del g[0] + lock1.release() + lock2.acquire() + greenlet.getcurrent() + lock1.release() + main = greenlet.getcurrent() + g = [RawGreenlet(deallocator)] + g[0].bomb = finalized() + g[0].switch() + t = threading.Thread(target=fthread) + t.start() + # let another thread grab ts_current and deallocate g[0] + lock2.release() + lock1.acquire() + # this is the corner stone + # getcurrent() will notice that ts_current belongs to another thread + # and start the update process, which would notice that g[0] should + # be deallocated, and that will execute an object's finalizer. Now, + # that object will let another thread run so it can grab ts_current + # again, which would likely crash the interpreter if there's no + # check for this case at the end of green_updatecurrent(). This test + # passes if getcurrent() returns correct result, but it's likely + # to randomly crash if it's not anyway. + self.assertEqual(greenlet.getcurrent(), main) + # wait for another thread to complete, just in case + t.join(10) + + def test_dealloc_switch_args_not_lost(self): + seen = [] + def worker(): + # wait for the value + value = greenlet.getcurrent().parent.switch() + # delete all references to ourself + del worker[0] + initiator.parent = greenlet.getcurrent().parent + # switch to main with the value, but because + # ts_current is the last reference to us we + # return here immediately, where we resurrect ourself. + try: + greenlet.getcurrent().parent.switch(value) + finally: + seen.append(greenlet.getcurrent()) + def initiator(): + return 42 # implicitly falls thru to parent + + worker = [RawGreenlet(worker)] + + worker[0].switch() # prime worker + initiator = RawGreenlet(initiator, worker[0]) + value = initiator.switch() + self.assertTrue(seen) + self.assertEqual(value, 42) + + def test_tuple_subclass(self): + # The point of this test is to see what happens when a custom + # tuple subclass is used as an object passed directly to the C + # function ``green_switch``; part of ``green_switch`` checks + # the ``len()`` of the ``args`` tuple, and that can call back + # into Python. Here, when it calls back into Python, we + # recursively enter ``green_switch`` again. + + # This test is really only relevant on Python 2. The builtin + # `apply` function directly passes the given args tuple object + # to the underlying function, whereas the Python 3 version + # unpacks and repacks into an actual tuple. This could still + # happen using the C API on Python 3 though. We should write a + # builtin version of apply() ourself. + def _apply(func, a, k): + func(*a, **k) + + class mytuple(tuple): + def __len__(self): + greenlet.getcurrent().switch() + return tuple.__len__(self) + args = mytuple() + kwargs = dict(a=42) + def switchapply(): + _apply(greenlet.getcurrent().parent.switch, args, kwargs) + g = RawGreenlet(switchapply) + self.assertEqual(g.switch(), kwargs) + + def test_abstract_subclasses(self): + AbstractSubclass = ABCMeta( + 'AbstractSubclass', + (RawGreenlet,), + {'run': abstractmethod(lambda self: None)}) + + class BadSubclass(AbstractSubclass): + pass + + class GoodSubclass(AbstractSubclass): + def run(self): + pass + + GoodSubclass() # should not raise + self.assertRaises(TypeError, BadSubclass) + + def test_implicit_parent_with_threads(self): + if not gc.isenabled(): + return # cannot test with disabled gc + N = gc.get_threshold()[0] + if N < 50: + return # cannot test with such a small N + def attempt(): + lock1 = threading.Lock() + lock1.acquire() + lock2 = threading.Lock() + lock2.acquire() + recycled = [False] + def another_thread(): + lock1.acquire() # wait for gc + greenlet.getcurrent() # update ts_current + lock2.release() # release gc + t = threading.Thread(target=another_thread) + t.start() + class gc_callback(object): + def __del__(self): + lock1.release() + lock2.acquire() + recycled[0] = True + class garbage(object): + def __init__(self): + self.cycle = self + self.callback = gc_callback() + l = [] + x = range(N*2) + current = greenlet.getcurrent() + g = garbage() + for _ in x: + g = None # lose reference to garbage + if recycled[0]: + # gc callback called prematurely + t.join(10) + return False + last = RawGreenlet() + if recycled[0]: + break # yes! gc called in green_new + l.append(last) # increase allocation counter + else: + # gc callback not called when expected + gc.collect() + if recycled[0]: + t.join(10) + return False + self.assertEqual(last.parent, current) + for g in l: + self.assertEqual(g.parent, current) + return True + for _ in range(5): + if attempt(): + break + + def test_issue_245_reference_counting_subclass_no_threads(self): + # https://github.com/python-greenlet/greenlet/issues/245 + # Before the fix, this crashed pretty reliably on + # Python 3.10, at least on macOS; but much less reliably on other + # interpreters (memory layout must have changed). + # The threaded test crashed more reliably on more interpreters. + from greenlet import getcurrent + from greenlet import GreenletExit + + class Greenlet(RawGreenlet): + pass + + initial_refs = sys.getrefcount(Greenlet) + # This has to be an instance variable because + # Python 2 raises a SyntaxError if we delete a local + # variable referenced in an inner scope. + self.glets = [] # pylint:disable=attribute-defined-outside-init + + def greenlet_main(): + try: + getcurrent().parent.switch() + except GreenletExit: + self.glets.append(getcurrent()) + + # Before the + for _ in range(10): + Greenlet(greenlet_main).switch() + + del self.glets + if RUNNING_ON_FREETHREAD_BUILD: + # Free-threaded builds make types immortal, which gives us + # weird numbers here, and we actually do APPEAR to end + # up with one more reference than we started with, at least on 3.14. + # If we change the code in green_dealloc to avoid increffing the type + # (which fixed this initial bug), then our leakchecks find other objects + # that have leaked, including a tuple, a dict, and a type. So that's not the + # right solution. Instead we change the test: + # XXX: FIXME: Is there a better way? + self.assertGreaterEqual(sys.getrefcount(Greenlet), initial_refs) + else: + self.assertEqual(sys.getrefcount(Greenlet), initial_refs) + + @unittest.skipIf( + PY313 and RUNNING_ON_MANYLINUX, + "The manylinux images appear to hang on this test on 3.13rc2" + # Or perhaps I just got tired of waiting for the 450s timeout. + # Still, it shouldn't take anywhere near that long. Does not reproduce in + # Ubuntu images, on macOS or Windows. + ) + def test_issue_245_reference_counting_subclass_threads(self): + # https://github.com/python-greenlet/greenlet/issues/245 + from threading import Thread + from threading import Event + + from greenlet import getcurrent + + class MyGreenlet(RawGreenlet): + pass + + glets = [] + ref_cleared = Event() + + def greenlet_main(): + getcurrent().parent.switch() + + def thread_main(greenlet_running_event): + mine = MyGreenlet(greenlet_main) + glets.append(mine) + # The greenlets being deleted must be active + mine.switch() + # Don't keep any reference to it in this thread + del mine + # Let main know we published our greenlet. + greenlet_running_event.set() + # Wait for main to let us know the references are + # gone and the greenlet objects no longer reachable + ref_cleared.wait(10) + # The creating thread must call getcurrent() (or a few other + # greenlet APIs) because that's when the thread-local list of dead + # greenlets gets cleared. + getcurrent() + + # We start with 3 references to the subclass: + # - This module + # - Its __mro__ + # - The __subclassess__ attribute of greenlet + # - (If we call gc.get_referents(), we find four entries, including + # some other tuple ``(greenlet)`` that I'm not sure about but must be part + # of the machinery.) + # + # On Python 3.10 it's often enough to just run 3 threads; on Python 2.7, + # more threads are needed, and the results are still + # non-deterministic. Presumably the memory layouts are different + initial_refs = sys.getrefcount(MyGreenlet) + thread_ready_events = [] + thread_count = initial_refs + 45 + if RUNNING_ON_FREETHREAD_BUILD: + # types are immortal, so this is a HUGE number most likely, + # and we can't create that many threads. + thread_count = 50 + for _ in range(thread_count): + event = Event() + thread = Thread(target=thread_main, args=(event,)) + thread_ready_events.append(event) + thread.start() + + + for done_event in thread_ready_events: + done_event.wait(10) + + + del glets[:] + ref_cleared.set() + # Let any other thread run; it will crash the interpreter + # if not fixed (or silently corrupt memory and we possibly crash + # later). + self.wait_for_pending_cleanups() + self.assertEqual(sys.getrefcount(MyGreenlet), initial_refs) + + def test_falling_off_end_switches_to_unstarted_parent_raises_error(self): + def no_args(): + return 13 + + parent_never_started = RawGreenlet(no_args) + + def leaf(): + return 42 + + child = RawGreenlet(leaf, parent_never_started) + + # Because the run function takes to arguments + with self.assertRaises(TypeError): + child.switch() + + def test_falling_off_end_switches_to_unstarted_parent_works(self): + def one_arg(x): + return (x, 24) + + parent_never_started = RawGreenlet(one_arg) + + def leaf(): + return 42 + + child = RawGreenlet(leaf, parent_never_started) + + result = child.switch() + self.assertEqual(result, (42, 24)) + + def test_switch_to_dead_greenlet_with_unstarted_perverse_parent(self): + class Parent(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + raise SomeError + + + parent_never_started = Parent() + seen = [] + child = RawGreenlet(lambda: seen.append(42), parent_never_started) + # Because we automatically start the parent when the child is + # finished + with self.assertRaises(SomeError): + child.switch() + + self.assertEqual(seen, [42]) + + with self.assertRaises(SomeError): + child.switch() + self.assertEqual(seen, [42]) + + def test_switch_to_dead_greenlet_reparent(self): + seen = [] + parent_never_started = RawGreenlet(lambda: seen.append(24)) + child = RawGreenlet(lambda: seen.append(42)) + + child.switch() + self.assertEqual(seen, [42]) + + child.parent = parent_never_started + # This actually is the same as switching to the parent. + result = child.switch() + self.assertIsNone(result) + self.assertEqual(seen, [42, 24]) + + def test_can_access_f_back_of_suspended_greenlet(self): + # This tests our frame rewriting to work around Python 3.12+ having + # some interpreter frames on the C stack. It will crash in the absence + # of that logic. + main = greenlet.getcurrent() + + def outer(): + inner() + + def inner(): + main.switch(sys._getframe(0)) + + hub = RawGreenlet(outer) + # start it + hub.switch() + + # start another greenlet to make sure we aren't relying on + # anything in `hub` still being on the C stack + unrelated = RawGreenlet(lambda: None) + unrelated.switch() + + # now it is suspended + self.assertIsNotNone(hub.gr_frame) + self.assertEqual(hub.gr_frame.f_code.co_name, "inner") + self.assertIsNotNone(hub.gr_frame.f_back) + self.assertEqual(hub.gr_frame.f_back.f_code.co_name, "outer") + # The next line is what would crash + self.assertIsNone(hub.gr_frame.f_back.f_back) + + def test_get_stack_with_nested_c_calls(self): + from functools import partial + from . import _test_extension_cpp + + def recurse(v): + if v > 0: + return v * _test_extension_cpp.test_call(partial(recurse, v - 1)) + return greenlet.getcurrent().parent.switch() + + gr = RawGreenlet(recurse) + gr.switch(5) + frame = gr.gr_frame + for i in range(5): + self.assertEqual(frame.f_locals["v"], i) + frame = frame.f_back + self.assertEqual(frame.f_locals["v"], 5) + self.assertIsNone(frame.f_back) + self.assertEqual(gr.switch(10), 1200) # 1200 = 5! * 10 + + def test_frames_always_exposed(self): + # On Python 3.12 this will crash if we don't set the + # gr_frames_always_exposed attribute. More background: + # https://github.com/python-greenlet/greenlet/issues/388 + main = greenlet.getcurrent() + + def outer(): + inner(sys._getframe(0)) + + def inner(frame): + main.switch(frame) + + gr = RawGreenlet(outer) + frame = gr.switch() + + # Do something else to clobber the part of the C stack used by `gr`, + # so we can't skate by on "it just happened to still be there" + unrelated = RawGreenlet(lambda: None) + unrelated.switch() + + self.assertEqual(frame.f_code.co_name, "outer") + # The next line crashes on 3.12 if we haven't exposed the frames. + self.assertIsNone(frame.f_back) + + +class TestGreenletSetParentErrors(TestCase): + def test_threaded_reparent(self): + data = {} + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = RawGreenlet(lambda: None) + created_event.set() + done_event.wait(10) + + def blank(): + greenlet.getcurrent().parent.switch() + + thread = threading.Thread(target=run) + thread.start() + created_event.wait(10) + g = RawGreenlet(blank) + g.switch() + with self.assertRaises(ValueError) as exc: + g.parent = data['g'] + done_event.set() + thread.join(10) + + self.assertEqual(str(exc.exception), "parent cannot be on a different thread") + + def test_unexpected_reparenting(self): + another = [] + def worker(): + g = RawGreenlet(lambda: None) + another.append(g) + g.switch() + t = threading.Thread(target=worker) + t.start() + t.join(10) + # The first time we switch (running g_initialstub(), which is + # when we look up the run attribute) we attempt to change the + # parent to one from another thread (which also happens to be + # dead). ``g_initialstub()`` should detect this and raise a + # greenlet error. + # + # EXCEPT: With the fix for #252, this is actually detected + # sooner, when setting the parent itself. Prior to that fix, + # the main greenlet from the background thread kept a valid + # value for ``run_info``, and appeared to be a valid parent + # until we actually started the greenlet. But now that it's + # cleared, this test is catching whether ``green_setparent`` + # can detect the dead thread. + # + # Further refactoring once again changes this back to a greenlet.error + # + # We need to wait for the cleanup to happen, but we're + # deliberately leaking a main greenlet here. + self.wait_for_pending_cleanups(initial_main_greenlets=self.main_greenlets_before_test + 1) + + class convoluted(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + self.parent = another[0] # pylint:disable=attribute-defined-outside-init + return RawGreenlet.__getattribute__(self, name) + g = convoluted(lambda: None) + with self.assertRaises(greenlet.error) as exc: + g.switch() + self.assertEqual(str(exc.exception), + "cannot switch to a different thread (which happens to have exited)") + del another[:] + + def test_unexpected_reparenting_thread_running(self): + # Like ``test_unexpected_reparenting``, except the background thread is + # actually still alive. + another = [] + switched_to_greenlet = threading.Event() + keep_main_alive = threading.Event() + def worker(): + g = RawGreenlet(lambda: None) + another.append(g) + g.switch() + switched_to_greenlet.set() + keep_main_alive.wait(10) + class convoluted(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + self.parent = another[0] # pylint:disable=attribute-defined-outside-init + return RawGreenlet.__getattribute__(self, name) + + t = threading.Thread(target=worker) + t.start() + + switched_to_greenlet.wait(10) + try: + g = convoluted(lambda: None) + + with self.assertRaises(greenlet.error) as exc: + g.switch() + self.assertIn("Cannot switch to a different thread", str(exc.exception)) + self.assertIn("Expected", str(exc.exception)) + self.assertIn("Current", str(exc.exception)) + finally: + keep_main_alive.set() + t.join(10) + # XXX: Should handle this automatically. + del another[:] + + def test_cannot_delete_parent(self): + worker = RawGreenlet(lambda: None) + self.assertIs(worker.parent, greenlet.getcurrent()) + + with self.assertRaises(AttributeError) as exc: + del worker.parent + self.assertEqual(str(exc.exception), "can't delete attribute") + + def test_cannot_delete_parent_of_main(self): + with self.assertRaises(AttributeError) as exc: + del greenlet.getcurrent().parent + self.assertEqual(str(exc.exception), "can't delete attribute") + + + def test_main_greenlet_parent_is_none(self): + # assuming we're in a main greenlet here. + self.assertIsNone(greenlet.getcurrent().parent) + + def test_set_parent_wrong_types(self): + def bg(): + # Go back to main. + greenlet.getcurrent().parent.switch() + + def check(glet): + for p in None, 1, self, "42": + with self.assertRaises(TypeError) as exc: + glet.parent = p + + self.assertEqual( + str(exc.exception), + "GreenletChecker: Expected any type of greenlet, not " + type(p).__name__) + + # First, not running + g = RawGreenlet(bg) + self.assertFalse(g) + check(g) + + # Then when running. + g.switch() + self.assertTrue(g) + check(g) + + # Let it finish + g.switch() + + + def test_trivial_cycle(self): + glet = RawGreenlet(lambda: None) + with self.assertRaises(ValueError) as exc: + glet.parent = glet + self.assertEqual(str(exc.exception), "cyclic parent chain") + + def test_trivial_cycle_main(self): + # This used to produce a ValueError, but we catch it earlier than that now. + with self.assertRaises(AttributeError) as exc: + greenlet.getcurrent().parent = greenlet.getcurrent() + self.assertEqual(str(exc.exception), "cannot set the parent of a main greenlet") + + def test_deeper_cycle(self): + g1 = RawGreenlet(lambda: None) + g2 = RawGreenlet(lambda: None) + g3 = RawGreenlet(lambda: None) + + g1.parent = g2 + g2.parent = g3 + with self.assertRaises(ValueError) as exc: + g3.parent = g1 + self.assertEqual(str(exc.exception), "cyclic parent chain") + + +class TestRepr(TestCase): + + def assertEndsWith(self, got, suffix): + self.assertTrue(got.endswith(suffix), (got, suffix)) + + def test_main_while_running(self): + r = repr(greenlet.getcurrent()) + self.assertEndsWith(r, " current active started main>") + + def test_main_in_background(self): + main = greenlet.getcurrent() + def run(): + return repr(main) + + g = RawGreenlet(run) + r = g.switch() + self.assertEndsWith(r, ' suspended active started main>') + + def test_initial(self): + r = repr(RawGreenlet()) + self.assertEndsWith(r, ' pending>') + + def test_main_from_other_thread(self): + main = greenlet.getcurrent() + + class T(threading.Thread): + original_main = thread_main = None + main_glet = None + def run(self): + self.original_main = repr(main) + self.main_glet = greenlet.getcurrent() + self.thread_main = repr(self.main_glet) + + t = T() + t.start() + t.join(10) + + self.assertEndsWith(t.original_main, ' suspended active started main>') + self.assertEndsWith(t.thread_main, ' current active started main>') + # give the machinery time to notice the death of the thread, + # and clean it up. Note that we don't use + # ``expect_greenlet_leak`` or wait_for_pending_cleanups, + # because at this point we know we have an extra greenlet + # still reachable. + for _ in range(3): + time.sleep(0.001) + + # In the past, main greenlets, even from dead threads, never + # really appear dead. We have fixed that, and we also report + # that the thread is dead in the repr. (Do this multiple times + # to make sure that we don't self-modify and forget our state + # in the C++ code). + for _ in range(3): + self.assertTrue(t.main_glet.dead) + r = repr(t.main_glet) + self.assertEndsWith(r, ' (thread exited) dead>') + + def test_dead(self): + g = RawGreenlet(lambda: None) + g.switch() + self.assertEndsWith(repr(g), ' dead>') + self.assertNotIn('suspended', repr(g)) + self.assertNotIn('started', repr(g)) + self.assertNotIn('active', repr(g)) + + def test_formatting_produces_native_str(self): + # https://github.com/python-greenlet/greenlet/issues/218 + # %s formatting on Python 2 was producing unicode, not str. + + g_dead = RawGreenlet(lambda: None) + g_not_started = RawGreenlet(lambda: None) + g_cur = greenlet.getcurrent() + + for g in g_dead, g_not_started, g_cur: + + self.assertIsInstance( + '%s' % (g,), + str + ) + self.assertIsInstance( + '%r' % (g,), + str, + ) + + +class TestMainGreenlet(TestCase): + # Tests some implementation details, and relies on some + # implementation details. + + def _check_current_is_main(self): + # implementation detail + assert 'main' in repr(greenlet.getcurrent()) + + t = type(greenlet.getcurrent()) + assert 'main' not in repr(t) + return t + + def test_main_greenlet_type_can_be_subclassed(self): + main_type = self._check_current_is_main() + subclass = type('subclass', (main_type,), {}) + self.assertIsNotNone(subclass) + + def test_main_greenlet_is_greenlet(self): + self._check_current_is_main() + self.assertIsInstance(greenlet.getcurrent(), RawGreenlet) + + + +class TestBrokenGreenlets(TestCase): + # Tests for things that used to, or still do, terminate the interpreter. + # This often means doing unsavory things. + + def test_failed_to_initialstub(self): + def func(): + raise AssertionError("Never get here") + + + g = greenlet._greenlet.UnswitchableGreenlet(func) + g.force_switch_error = True + + with self.assertRaisesRegex(SystemError, + "Failed to switch stacks into a greenlet for the first time."): + g.switch() + + def test_failed_to_switch_into_running(self): + runs = [] + def func(): + runs.append(1) + greenlet.getcurrent().parent.switch() + runs.append(2) + greenlet.getcurrent().parent.switch() + runs.append(3) # pragma: no cover + + g = greenlet._greenlet.UnswitchableGreenlet(func) + g.switch() + self.assertEqual(runs, [1]) + g.switch() + self.assertEqual(runs, [1, 2]) + g.force_switch_error = True + + with self.assertRaisesRegex(SystemError, + "Failed to switch stacks into a running greenlet."): + g.switch() + + # If we stopped here, we would fail the leakcheck, because we've left + # the ``inner_bootstrap()`` C frame and its descendents hanging around, + # which have a bunch of Python references. They'll never get cleaned up + # if we don't let the greenlet finish. + g.force_switch_error = False + g.switch() + self.assertEqual(runs, [1, 2, 3]) + + def test_failed_to_slp_switch_into_running(self): + ex = self.assertScriptRaises('fail_slp_switch.py') + + self.assertIn('fail_slp_switch is running', ex.output) + self.assertIn(ex.returncode, self.get_expected_returncodes_for_aborted_process()) + + def test_reentrant_switch_two_greenlets(self): + # Before we started capturing the arguments in g_switch_finish, this could crash. + output = self.run_script('fail_switch_two_greenlets.py') + self.assertIn('In g1_run', output) + self.assertIn('TRACE', output) + self.assertIn('LEAVE TRACE', output) + self.assertIn('Falling off end of main', output) + self.assertIn('Falling off end of g1_run', output) + self.assertIn('Falling off end of g2', output) + + def test_reentrant_switch_three_greenlets(self): + # On debug builds of greenlet, this used to crash with an assertion error; + # on non-debug versions, it ran fine (which it should not do!). + # Now it always crashes correctly with a TypeError + ex = self.assertScriptRaises('fail_switch_three_greenlets.py', exitcodes=(1,)) + + self.assertIn('TypeError', ex.output) + self.assertIn('positional arguments', ex.output) + + def test_reentrant_switch_three_greenlets2(self): + # This actually passed on debug and non-debug builds. It + # should probably have been triggering some debug assertions + # but it didn't. + # + # I think the fixes for the above test also kicked in here. + output = self.run_script('fail_switch_three_greenlets2.py') + self.assertIn( + "RESULTS: [('trace', 'switch'), " + "('trace', 'switch'), ('g2 arg', 'g2 from tracefunc'), " + "('trace', 'switch'), ('main g1', 'from g2_run'), ('trace', 'switch'), " + "('g1 arg', 'g1 from main'), ('trace', 'switch'), ('main g2', 'from g1_run'), " + "('trace', 'switch'), ('g1 from parent', 'g1 from main 2'), ('trace', 'switch'), " + "('main g1.2', 'g1 done'), ('trace', 'switch'), ('g2 from parent', ()), " + "('trace', 'switch'), ('main g2.2', 'g2 done')]", + output + ) + + def test_reentrant_switch_GreenletAlreadyStartedInPython(self): + output = self.run_script('fail_initialstub_already_started.py') + + self.assertIn( + "RESULTS: ['Begin C', 'Switch to b from B.__getattribute__ in C', " + "('Begin B', ()), '_B_run switching to main', ('main from c', 'From B'), " + "'B.__getattribute__ back from main in C', ('Begin A', (None,)), " + "('A dead?', True, 'B dead?', True, 'C dead?', False), " + "'C done', ('main from c.2', None)]", + output + ) + + def test_reentrant_switch_run_callable_has_del(self): + output = self.run_script('fail_clearing_run_switches.py') + self.assertIn( + "RESULTS [" + "('G.__getattribute__', 'run'), ('RunCallable', '__del__'), " + "('main: g.switch()', 'from RunCallable'), ('run_func', 'enter')" + "]", + output + ) + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet_trash.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet_trash.py new file mode 100644 index 0000000..c1fc137 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_greenlet_trash.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +""" +Tests for greenlets interacting with the CPython trash can API. + +The CPython trash can API is not designed to be re-entered from a +single thread. But this can happen using greenlets, if something +during the object deallocation process switches greenlets, and this second +greenlet then causes the trash can to get entered again. Here, we do this +very explicitly, but in other cases (like gevent) it could be arbitrarily more +complicated: for example, a weakref callback might try to acquire a lock that's +already held by another greenlet; that would allow a greenlet switch to occur. + +See https://github.com/gevent/gevent/issues/1909 + +This test is fragile and relies on details of the CPython +implementation (like most of the rest of this package): + + - We enter the trashcan and deferred deallocation after + ``_PyTrash_UNWIND_LEVEL`` calls. This constant, defined in + CPython's object.c, is generally 50. That's basically how many objects are required to + get us into the deferred deallocation situation. + + - The test fails by hitting an ``assert()`` in object.c; if the + build didn't enable assert, then we don't catch this. + + - If the test fails in that way, the interpreter crashes. +""" +from __future__ import print_function, absolute_import, division + +import unittest + + +class TestTrashCanReEnter(unittest.TestCase): + + def test_it(self): + try: + # pylint:disable-next=no-name-in-module + from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=unused-import + except ImportError: + import sys + # Python 3.13 has not "trash delete nesting" anymore (but "delete later") + assert sys.version_info[:2] >= (3, 13) + self.skipTest("get_tstate_trash_delete_nesting is not available.") + + # Try several times to trigger it, because it isn't 100% + # reliable. + for _ in range(10): + self.check_it() + + def check_it(self): # pylint:disable=too-many-statements + import greenlet + from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=no-name-in-module + main = greenlet.getcurrent() + + assert get_tstate_trash_delete_nesting() == 0 + + # We expect to be in deferred deallocation after this many + # deallocations have occurred. TODO: I wish we had a better way to do + # this --- that was before get_tstate_trash_delete_nesting; perhaps + # we can use that API to do better? + TRASH_UNWIND_LEVEL = 50 + # How many objects to put in a container; it's the container that + # queues objects for deferred deallocation. + OBJECTS_PER_CONTAINER = 500 + + class Dealloc: # define the class here because we alter class variables each time we run. + """ + An object with a ``__del__`` method. When it starts getting deallocated + from a deferred trash can run, it switches greenlets, allocates more objects + which then also go in the trash can. If we don't save state appropriately, + nesting gets out of order and we can crash the interpreter. + """ + + #: Has our deallocation actually run and switched greenlets? + #: When it does, this will be set to the current greenlet. This should + #: be happening in the main greenlet, so we check that down below. + SPAWNED = False + + #: Has the background greenlet run? + BG_RAN = False + + BG_GLET = None + + #: How many of these things have ever been allocated. + CREATED = 0 + + #: How many of these things have ever been deallocated. + DESTROYED = 0 + + #: How many were destroyed not in the main greenlet. There should always + #: be some. + #: If the test is broken or things change in the trashcan implementation, + #: this may not be correct. + DESTROYED_BG = 0 + + def __init__(self, sequence_number): + """ + :param sequence_number: The ordinal of this object during + one particular creation run. This is used to detect (guess, really) + when we have entered the trash can's deferred deallocation. + """ + self.i = sequence_number + Dealloc.CREATED += 1 + + def __del__(self): + if self.i == TRASH_UNWIND_LEVEL and not self.SPAWNED: + Dealloc.SPAWNED = greenlet.getcurrent() + other = Dealloc.BG_GLET = greenlet.greenlet(background_greenlet) + x = other.switch() + assert x == 42 + # It's important that we don't switch back to the greenlet, + # we leave it hanging there in an incomplete state. But we don't let it + # get collected, either. If we complete it now, while we're still + # in the scope of the initial trash can, things work out and we + # don't see the problem. We need this greenlet to complete + # at some point in the future, after we've exited this trash can invocation. + del other + elif self.i == 40 and greenlet.getcurrent() is not main: + Dealloc.BG_RAN = True + try: + main.switch(42) + except greenlet.GreenletExit as ex: + # We expect this; all references to us go away + # while we're still running, and we need to finish deleting + # ourself. + Dealloc.BG_RAN = type(ex) + del ex + + # Record the fact that we're dead last of all. This ensures that + # we actually get returned too. + Dealloc.DESTROYED += 1 + if greenlet.getcurrent() is not main: + Dealloc.DESTROYED_BG += 1 + + + def background_greenlet(): + # We direct through a second function, instead of + # directly calling ``make_some()``, so that we have complete + # control over when these objects are destroyed: we need them + # to be destroyed in the context of the background greenlet + t = make_some() + del t # Triggere deletion. + + def make_some(): + t = () + i = OBJECTS_PER_CONTAINER + while i: + # Nest the tuples; it's the recursion that gets us + # into trash. + t = (Dealloc(i), t) + i -= 1 + return t + + + some = make_some() + self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER) + self.assertEqual(Dealloc.DESTROYED, 0) + + # If we're going to crash, it should be on the following line. + # We only crash if ``assert()`` is enabled, of course. + del some + + # For non-debug builds of CPython, we won't crash. The best we can do is check + # the nesting level explicitly. + self.assertEqual(0, get_tstate_trash_delete_nesting()) + + # Discard this, raising GreenletExit into where it is waiting. + Dealloc.BG_GLET = None + # The same nesting level maintains. + self.assertEqual(0, get_tstate_trash_delete_nesting()) + + # We definitely cleaned some up in the background + self.assertGreater(Dealloc.DESTROYED_BG, 0) + + # Make sure all the cleanups happened. + self.assertIs(Dealloc.SPAWNED, main) + self.assertTrue(Dealloc.BG_RAN) + self.assertEqual(Dealloc.BG_RAN, greenlet.GreenletExit) + self.assertEqual(Dealloc.CREATED, Dealloc.DESTROYED ) + self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER * 2) + + import gc + gc.collect() + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_leaks.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_leaks.py new file mode 100644 index 0000000..e09da7d --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_leaks.py @@ -0,0 +1,457 @@ +# -*- coding: utf-8 -*- +""" +Testing scenarios that may have leaked. +""" +from __future__ import print_function, absolute_import, division + +import sys +import gc + +import time +import weakref +import threading + + +import greenlet +from . import TestCase +from . import PY314 +from . import RUNNING_ON_FREETHREAD_BUILD +from .leakcheck import fails_leakcheck +from .leakcheck import ignores_leakcheck +from .leakcheck import RUNNING_ON_MANYLINUX + + +# pylint:disable=protected-access + +assert greenlet.GREENLET_USE_GC # Option to disable this was removed in 1.0 + +class HasFinalizerTracksInstances(object): + EXTANT_INSTANCES = set() + def __init__(self, msg): + self.msg = sys.intern(msg) + self.EXTANT_INSTANCES.add(id(self)) + def __del__(self): + self.EXTANT_INSTANCES.remove(id(self)) + def __repr__(self): + return "" % ( + id(self), self.msg + ) + @classmethod + def reset(cls): + cls.EXTANT_INSTANCES.clear() + + +def fails_leakcheck_except_on_free_thraded(func): + if RUNNING_ON_FREETHREAD_BUILD: + # These all seem to pass on free threading because + # of the changes to the garbage collector + return func + return fails_leakcheck(func) + + +class TestLeaks(TestCase): + + def test_arg_refs(self): + args = ('a', 'b', 'c') + refcount_before = sys.getrefcount(args) + # pylint:disable=unnecessary-lambda + g = greenlet.greenlet( + lambda *args: greenlet.getcurrent().parent.switch(*args)) + for _ in range(100): + g.switch(*args) + self.assertEqual(sys.getrefcount(args), refcount_before) + + def test_kwarg_refs(self): + kwargs = {} + self.assertEqual(sys.getrefcount(kwargs), 2 if not PY314 else 1) + # pylint:disable=unnecessary-lambda + g = greenlet.greenlet( + lambda **gkwargs: greenlet.getcurrent().parent.switch(**gkwargs)) + for _ in range(100): + g.switch(**kwargs) + # Python 3.14 elides reference counting operations + # in some cases. See https://github.com/python/cpython/pull/130708 + self.assertEqual(sys.getrefcount(kwargs), 2 if not PY314 else 1) + + + @staticmethod + def __recycle_threads(): + # By introducing a thread that does sleep we allow other threads, + # that have triggered their __block condition, but did not have a + # chance to deallocate their thread state yet, to finally do so. + # The way it works is by requiring a GIL switch (different thread), + # which does a GIL release (sleep), which might do a GIL switch + # to finished threads and allow them to clean up. + def worker(): + time.sleep(0.001) + t = threading.Thread(target=worker) + t.start() + time.sleep(0.001) + t.join(10) + + def test_threaded_leak(self): + gg = [] + def worker(): + # only main greenlet present + gg.append(weakref.ref(greenlet.getcurrent())) + for _ in range(2): + t = threading.Thread(target=worker) + t.start() + t.join(10) + del t + greenlet.getcurrent() # update ts_current + self.__recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertIsNone(g()) + + def test_threaded_adv_leak(self): + gg = [] + def worker(): + # main and additional *finished* greenlets + ll = greenlet.getcurrent().ll = [] + def additional(): + ll.append(greenlet.getcurrent()) + for _ in range(2): + greenlet.greenlet(additional).switch() + gg.append(weakref.ref(greenlet.getcurrent())) + for _ in range(2): + t = threading.Thread(target=worker) + t.start() + t.join(10) + del t + greenlet.getcurrent() # update ts_current + self.__recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertIsNone(g()) + + def assertClocksUsed(self): + used = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() + self.assertGreaterEqual(used, 0) + # we don't lose the value + greenlet._greenlet.enable_optional_cleanup(True) + used2 = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() + self.assertEqual(used, used2) + self.assertGreater(greenlet._greenlet.CLOCKS_PER_SEC, 1) + + def _check_issue251(self, + manually_collect_background=True, + explicit_reference_to_switch=False): + # See https://github.com/python-greenlet/greenlet/issues/251 + # Killing a greenlet (probably not the main one) + # in one thread from another thread would + # result in leaking a list (the ts_delkey list). + # We no longer use lists to hold that stuff, though. + + # For the test to be valid, even empty lists have to be tracked by the + # GC + + assert gc.is_tracked([]) + HasFinalizerTracksInstances.reset() + greenlet.getcurrent() + greenlets_before = self.count_objects(greenlet.greenlet, exact_kind=False) + + background_glet_running = threading.Event() + background_glet_killed = threading.Event() + background_greenlets = [] + + # XXX: Switching this to a greenlet subclass that overrides + # run results in all callers failing the leaktest; that + # greenlet instance is leaked. There's a bound method for + # run() living on the stack of the greenlet in g_initialstub, + # and since we don't manually switch back to the background + # greenlet to let it "fall off the end" and exit the + # g_initialstub function, it never gets cleaned up. Making the + # garbage collector aware of this bound method (making it an + # attribute of the greenlet structure and traversing into it) + # doesn't help, for some reason. + def background_greenlet(): + # Throw control back to the main greenlet. + jd = HasFinalizerTracksInstances("DELETING STACK OBJECT") + greenlet._greenlet.set_thread_local( + 'test_leaks_key', + HasFinalizerTracksInstances("DELETING THREAD STATE")) + # Explicitly keeping 'switch' in a local variable + # breaks this test in all versions + if explicit_reference_to_switch: + s = greenlet.getcurrent().parent.switch + s([jd]) + else: + greenlet.getcurrent().parent.switch([jd]) + + bg_main_wrefs = [] + + def background_thread(): + glet = greenlet.greenlet(background_greenlet) + bg_main_wrefs.append(weakref.ref(glet.parent)) + + background_greenlets.append(glet) + glet.switch() # Be sure it's active. + # Control is ours again. + del glet # Delete one reference from the thread it runs in. + background_glet_running.set() + background_glet_killed.wait(10) + + # To trigger the background collection of the dead + # greenlet, thus clearing out the contents of the list, we + # need to run some APIs. See issue 252. + if manually_collect_background: + greenlet.getcurrent() + + + t = threading.Thread(target=background_thread) + t.start() + background_glet_running.wait(10) + greenlet.getcurrent() + lists_before = self.count_objects(list, exact_kind=True) + + assert len(background_greenlets) == 1 + self.assertFalse(background_greenlets[0].dead) + # Delete the last reference to the background greenlet + # from a different thread. This puts it in the background thread's + # ts_delkey list. + del background_greenlets[:] + background_glet_killed.set() + + # Now wait for the background thread to die. + t.join(10) + del t + # As part of the fix for 252, we need to cycle the ceval.c + # interpreter loop to be sure it has had a chance to process + # the pending call. + self.wait_for_pending_cleanups() + + lists_after = self.count_objects(list, exact_kind=True) + greenlets_after = self.count_objects(greenlet.greenlet, exact_kind=False) + + # On 2.7, we observe that lists_after is smaller than + # lists_before. No idea what lists got cleaned up. All the + # Python 3 versions match exactly. + self.assertLessEqual(lists_after, lists_before) + # On versions after 3.6, we've successfully cleaned up the + # greenlet references thanks to the internal "vectorcall" + # protocol; prior to that, there is a reference path through + # the ``greenlet.switch`` method still on the stack that we + # can't reach to clean up. The C code goes through terrific + # lengths to clean that up. + if not explicit_reference_to_switch \ + and greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: + # If cleanup was disabled, though, we may not find it. + self.assertEqual(greenlets_after, greenlets_before) + if manually_collect_background: + # TODO: Figure out how to make this work! + # The one on the stack is still leaking somehow + # in the non-manually-collect state. + self.assertEqual(HasFinalizerTracksInstances.EXTANT_INSTANCES, set()) + else: + # The explicit reference prevents us from collecting it + # and it isn't always found by the GC either for some + # reason. The entire frame is leaked somehow, on some + # platforms (e.g., MacPorts builds of Python (all + # versions!)), but not on other platforms (the linux and + # windows builds on GitHub actions and Appveyor). So we'd + # like to write a test that proves that the main greenlet + # sticks around, and we can on my machine (macOS 11.6, + # MacPorts builds of everything) but we can't write that + # same test on other platforms. However, hopefully iteration + # done by leakcheck will find it. + pass + + if greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: + self.assertClocksUsed() + + def test_issue251_killing_cross_thread_leaks_list(self): + self._check_issue251() + + def test_issue251_with_cleanup_disabled(self): + greenlet._greenlet.enable_optional_cleanup(False) + try: + self._check_issue251() + finally: + greenlet._greenlet.enable_optional_cleanup(True) + + @fails_leakcheck_except_on_free_thraded + def test_issue251_issue252_need_to_collect_in_background(self): + # Between greenlet 1.1.2 and the next version, this was still + # failing because the leak of the list still exists when we + # don't call a greenlet API before exiting the thread. The + # proximate cause is that neither of the two greenlets from + # the background thread are actually being destroyed, even + # though the GC is in fact visiting both objects. It's not + # clear where that leak is? For some reason the thread-local + # dict holding it isn't being cleaned up. + # + # The leak, I think, is in the CPYthon internal function that + # calls into green_switch(). The argument tuple is still on + # the C stack somewhere and can't be reached? That doesn't + # make sense, because the tuple should be collectable when + # this object goes away. + # + # Note that this test sometimes spuriously passes on Linux, + # for some reason, but I've never seen it pass on macOS. + self._check_issue251(manually_collect_background=False) + + @fails_leakcheck_except_on_free_thraded + def test_issue251_issue252_need_to_collect_in_background_cleanup_disabled(self): + self.expect_greenlet_leak = True + greenlet._greenlet.enable_optional_cleanup(False) + try: + self._check_issue251(manually_collect_background=False) + finally: + greenlet._greenlet.enable_optional_cleanup(True) + + @fails_leakcheck_except_on_free_thraded + def test_issue251_issue252_explicit_reference_not_collectable(self): + self._check_issue251( + manually_collect_background=False, + explicit_reference_to_switch=True) + + UNTRACK_ATTEMPTS = 100 + + def _only_test_some_versions(self): + # We're only looking for this problem specifically on 3.11, + # and this set of tests is relatively fragile, depending on + # OS and memory management details. So we want to run it on 3.11+ + # (obviously) but not every older 3.x version in order to reduce + # false negatives. At the moment, those false results seem to have + # resolved, so we are actually running this on 3.8+ + assert sys.version_info[0] >= 3 + if sys.version_info[:2] < (3, 8): + self.skipTest('Only observed on 3.11') + if RUNNING_ON_MANYLINUX: + self.skipTest("Slow and not worth repeating here") + + @ignores_leakcheck + # Because we're just trying to track raw memory, not objects, and running + # the leakcheck makes an already slow test slower. + def test_untracked_memory_doesnt_increase(self): + # See https://github.com/gevent/gevent/issues/1924 + # and https://github.com/python-greenlet/greenlet/issues/328 + self._only_test_some_versions() + def f(): + return 1 + + ITER = 10000 + def run_it(): + for _ in range(ITER): + greenlet.greenlet(f).switch() + + # Establish baseline + for _ in range(3): + run_it() + + # uss: (Linux, macOS, Windows): aka "Unique Set Size", this is + # the memory which is unique to a process and which would be + # freed if the process was terminated right now. + uss_before = self.get_process_uss() + + for count in range(self.UNTRACK_ATTEMPTS): + uss_before = max(uss_before, self.get_process_uss()) + run_it() + + uss_after = self.get_process_uss() + if uss_after <= uss_before and count > 1: + break + + self.assertLessEqual(uss_after, uss_before) + + def _check_untracked_memory_thread(self, deallocate_in_thread=True): + self._only_test_some_versions() + # Like the above test, but what if there are a bunch of + # unfinished greenlets in a thread that dies? + # Does it matter if we deallocate in the thread or not? + EXIT_COUNT = [0] + + def f(): + try: + greenlet.getcurrent().parent.switch() + except greenlet.GreenletExit: + EXIT_COUNT[0] += 1 + raise + return 1 + + ITER = 10000 + def run_it(): + glets = [] + for _ in range(ITER): + # Greenlet starts, switches back to us. + # We keep a strong reference to the greenlet though so it doesn't + # get a GreenletExit exception. + g = greenlet.greenlet(f) + glets.append(g) + g.switch() + + return glets + + test = self + + class ThreadFunc: + uss_before = uss_after = 0 + glets = () + ITER = 2 + def __call__(self): + self.uss_before = test.get_process_uss() + + for _ in range(self.ITER): + self.glets += tuple(run_it()) + + for g in self.glets: + test.assertIn('suspended active', str(g)) + # Drop them. + if deallocate_in_thread: + self.glets = () + self.uss_after = test.get_process_uss() + + # Establish baseline + uss_before = uss_after = None + for count in range(self.UNTRACK_ATTEMPTS): + EXIT_COUNT[0] = 0 + thread_func = ThreadFunc() + t = threading.Thread(target=thread_func) + t.start() + t.join(30) + self.assertFalse(t.is_alive()) + + if uss_before is None: + uss_before = thread_func.uss_before + + uss_before = max(uss_before, thread_func.uss_before) + if deallocate_in_thread: + self.assertEqual(thread_func.glets, ()) + self.assertEqual(EXIT_COUNT[0], ITER * thread_func.ITER) + + del thread_func # Deallocate the greenlets; but this won't raise into them + del t + if not deallocate_in_thread: + self.assertEqual(EXIT_COUNT[0], 0) + if deallocate_in_thread: + self.wait_for_pending_cleanups() + + uss_after = self.get_process_uss() + # See if we achieve a non-growth state at some point. Break when we do. + if uss_after <= uss_before and count > 1: + break + + self.wait_for_pending_cleanups() + uss_after = self.get_process_uss() + self.assertLessEqual(uss_after, uss_before, "after attempts %d" % (count,)) + + @ignores_leakcheck + # Because we're just trying to track raw memory, not objects, and running + # the leakcheck makes an already slow test slower. + def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_thread(self): + self._check_untracked_memory_thread(deallocate_in_thread=True) + + @ignores_leakcheck + # Because the main greenlets from the background threads do not exit in a timely fashion, + # we fail the object-based leakchecks. + def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main(self): + self._check_untracked_memory_thread(deallocate_in_thread=False) + +if __name__ == '__main__': + __import__('unittest').main() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_stack_saved.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_stack_saved.py new file mode 100644 index 0000000..b362bf9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_stack_saved.py @@ -0,0 +1,19 @@ +import greenlet +from . import TestCase + + +class Test(TestCase): + + def test_stack_saved(self): + main = greenlet.getcurrent() + self.assertEqual(main._stack_saved, 0) + + def func(): + main.switch(main._stack_saved) + + g = greenlet.greenlet(func) + x = g.switch() + self.assertGreater(x, 0) + self.assertGreater(g._stack_saved, 0) + g.switch() + self.assertEqual(g._stack_saved, 0) diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_throw.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_throw.py new file mode 100644 index 0000000..f4f9a14 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_throw.py @@ -0,0 +1,128 @@ +import sys + + +from greenlet import greenlet +from . import TestCase + +def switch(*args): + return greenlet.getcurrent().parent.switch(*args) + + +class ThrowTests(TestCase): + def test_class(self): + def f(): + try: + switch("ok") + except RuntimeError: + switch("ok") + return + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError) + self.assertEqual(res, "ok") + + def test_val(self): + def f(): + try: + switch("ok") + except RuntimeError: + val = sys.exc_info()[1] + if str(val) == "ciao": + switch("ok") + return + switch("fail") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError("ciao")) + self.assertEqual(res, "ok") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError, "ciao") + self.assertEqual(res, "ok") + + def test_kill(self): + def f(): + switch("ok") + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw() + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + self.assertTrue(g.dead) + res = g.throw() # immediately eaten by the already-dead greenlet + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + + def test_throw_goes_to_original_parent(self): + main = greenlet.getcurrent() + + def f1(): + try: + main.switch("f1 ready to catch") + except IndexError: + return "caught" + return "normal exit" + + def f2(): + main.switch("from f2") + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + with self.assertRaises(IndexError): + g2.throw(IndexError) + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.switch() + self.assertEqual(res, "from f2") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + def test_non_traceback_param(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + Exception, + Exception(), + self + ) + self.assertEqual(str(exc.exception), + "throw() third argument must be a traceback object") + + def test_instance_of_wrong_type(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + Exception(), + BaseException() + ) + + self.assertEqual(str(exc.exception), + "instance exception may not have a separate value") + + def test_not_throwable(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + "abc" + ) + self.assertEqual(str(exc.exception), + "exceptions must be classes, or instances, not str") diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_tracing.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_tracing.py new file mode 100644 index 0000000..235fbcd --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_tracing.py @@ -0,0 +1,299 @@ +from __future__ import print_function +import sys +import sysconfig +import greenlet +import unittest + +from . import TestCase +from . import PY312 + +# https://discuss.python.org/t/cpython-3-12-greenlet-and-tracing-profiling-how-to-not-crash-and-get-correct-results/33144/2 +# When build variables are available, OPT is the best way of detecting +# the build with assertions enabled. Otherwise, fallback to detecting PyDEBUG +# build. +ASSERTION_BUILD_PY312 = ( + PY312 and ( + "-DNDEBUG" not in sysconfig.get_config_var("OPT").split() + if sysconfig.get_config_var("OPT") is not None + else hasattr(sys, 'gettotalrefcount') + ), + "Broken on assertion-enabled builds of Python 3.12" +) + +class SomeError(Exception): + pass + +class GreenletTracer(object): + oldtrace = None + + def __init__(self, error_on_trace=False): + self.actions = [] + self.error_on_trace = error_on_trace + + def __call__(self, *args): + self.actions.append(args) + if self.error_on_trace: + raise SomeError + + def __enter__(self): + self.oldtrace = greenlet.settrace(self) + return self.actions + + def __exit__(self, *args): + greenlet.settrace(self.oldtrace) + + +class TestGreenletTracing(TestCase): + """ + Tests of ``greenlet.settrace()`` + """ + + def test_a_greenlet_tracing(self): + main = greenlet.getcurrent() + def dummy(): + pass + def dummyexc(): + raise SomeError() + + with GreenletTracer() as actions: + g1 = greenlet.greenlet(dummy) + g1.switch() + g2 = greenlet.greenlet(dummyexc) + self.assertRaises(SomeError, g2.switch) + + self.assertEqual(actions, [ + ('switch', (main, g1)), + ('switch', (g1, main)), + ('switch', (main, g2)), + ('throw', (g2, main)), + ]) + + def test_b_exception_disables_tracing(self): + main = greenlet.getcurrent() + def dummy(): + main.switch() + g = greenlet.greenlet(dummy) + g.switch() + with GreenletTracer(error_on_trace=True) as actions: + self.assertRaises(SomeError, g.switch) + self.assertEqual(greenlet.gettrace(), None) + + self.assertEqual(actions, [ + ('switch', (main, g)), + ]) + + def test_set_same_tracer_twice(self): + # https://github.com/python-greenlet/greenlet/issues/332 + # Our logic in asserting that the tracefunction should + # gain a reference was incorrect if the same tracefunction was set + # twice. + tracer = GreenletTracer() + with tracer: + greenlet.settrace(tracer) + + +class PythonTracer(object): + oldtrace = None + + def __init__(self): + self.actions = [] + + def __call__(self, frame, event, arg): + # Record the co_name so we have an idea what function we're in. + self.actions.append((event, frame.f_code.co_name)) + + def __enter__(self): + self.oldtrace = sys.setprofile(self) + return self.actions + + def __exit__(self, *args): + sys.setprofile(self.oldtrace) + +def tpt_callback(): + return 42 + +class TestPythonTracing(TestCase): + """ + Tests of the interaction of ``sys.settrace()`` + with greenlet facilities. + + NOTE: Most of this is probably CPython specific. + """ + + maxDiff = None + + def test_trace_events_trivial(self): + with PythonTracer() as actions: + tpt_callback() + # If we use the sys.settrace instead of setprofile, we get + # this: + + # self.assertEqual(actions, [ + # ('call', 'tpt_callback'), + # ('call', '__exit__'), + # ]) + + self.assertEqual(actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def _trace_switch(self, glet): + with PythonTracer() as actions: + glet.switch() + return actions + + def _check_trace_events_func_already_set(self, glet): + actions = self._trace_switch(glet) + self.assertEqual(actions, [ + ('return', '__enter__'), + ('c_call', '_trace_switch'), + ('call', 'run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('return', 'run'), + ('c_return', '_trace_switch'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def test_trace_events_into_greenlet_func_already_set(self): + def run(): + return tpt_callback() + + self._check_trace_events_func_already_set(greenlet.greenlet(run)) + + def test_trace_events_into_greenlet_subclass_already_set(self): + class X(greenlet.greenlet): + def run(self): + return tpt_callback() + self._check_trace_events_func_already_set(X()) + + def _check_trace_events_from_greenlet_sets_profiler(self, g, tracer): + g.switch() + tpt_callback() + tracer.__exit__() + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('return', 'run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + + def test_trace_events_from_greenlet_func_sets_profiler(self): + tracer = PythonTracer() + def run(): + tracer.__enter__() + return tpt_callback() + + self._check_trace_events_from_greenlet_sets_profiler(greenlet.greenlet(run), + tracer) + + def test_trace_events_from_greenlet_subclass_sets_profiler(self): + tracer = PythonTracer() + class X(greenlet.greenlet): + def run(self): + tracer.__enter__() + return tpt_callback() + + self._check_trace_events_from_greenlet_sets_profiler(X(), tracer) + + @unittest.skipIf(*ASSERTION_BUILD_PY312) + def test_trace_events_multiple_greenlets_switching(self): + tracer = PythonTracer() + + g1 = None + g2 = None + + def g1_run(): + tracer.__enter__() + tpt_callback() + g2.switch() + tpt_callback() + return 42 + + def g2_run(): + tpt_callback() + tracer.__exit__() + tpt_callback() + g1.switch() + + g1 = greenlet.greenlet(g1_run) + g2 = greenlet.greenlet(g2_run) + + x = g1.switch() + self.assertEqual(x, 42) + tpt_callback() # ensure not in the trace + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('c_call', 'g1_run'), + ('call', 'g2_run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + @unittest.skipIf(*ASSERTION_BUILD_PY312) + def test_trace_events_multiple_greenlets_switching_siblings(self): + # Like the first version, but get both greenlets running first + # as "siblings" and then establish the tracing. + tracer = PythonTracer() + + g1 = None + g2 = None + + def g1_run(): + greenlet.getcurrent().parent.switch() + tracer.__enter__() + tpt_callback() + g2.switch() + tpt_callback() + return 42 + + def g2_run(): + greenlet.getcurrent().parent.switch() + + tpt_callback() + tracer.__exit__() + tpt_callback() + g1.switch() + + g1 = greenlet.greenlet(g1_run) + g2 = greenlet.greenlet(g2_run) + + # Start g1 + g1.switch() + # And it immediately returns control to us. + # Start g2 + g2.switch() + # Which also returns. Now kick of the real part of the + # test. + x = g1.switch() + self.assertEqual(x, 42) + + tpt_callback() # ensure not in the trace + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('c_call', 'g1_run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_version.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_version.py new file mode 100644 index 0000000..96c17cf --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_version.py @@ -0,0 +1,41 @@ +#! /usr/bin/env python +from __future__ import absolute_import +from __future__ import print_function + +import sys +import os +from unittest import TestCase as NonLeakingTestCase + +import greenlet + +# No reason to run this multiple times under leakchecks, +# it doesn't do anything. +class VersionTests(NonLeakingTestCase): + def test_version(self): + def find_dominating_file(name): + if os.path.exists(name): + return name + + tried = [] + here = os.path.abspath(os.path.dirname(__file__)) + for i in range(10): + up = ['..'] * i + path = [here] + up + [name] + fname = os.path.join(*path) + fname = os.path.abspath(fname) + tried.append(fname) + if os.path.exists(fname): + return fname + raise AssertionError("Could not find file " + name + "; checked " + str(tried)) + + try: + setup_py = find_dominating_file('setup.py') + except AssertionError as e: + self.skipTest("Unable to find setup.py; must be out of tree. " + str(e)) + + + invoke_setup = "%s %s --version" % (sys.executable, setup_py) + with os.popen(invoke_setup) as f: + sversion = f.read().strip() + + self.assertEqual(sversion, greenlet.__version__) diff --git a/venv/lib/python3.12/site-packages/greenlet/tests/test_weakref.py b/venv/lib/python3.12/site-packages/greenlet/tests/test_weakref.py new file mode 100644 index 0000000..05a38a7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/greenlet/tests/test_weakref.py @@ -0,0 +1,35 @@ +import gc +import weakref + + +import greenlet +from . import TestCase + +class WeakRefTests(TestCase): + def test_dead_weakref(self): + def _dead_greenlet(): + g = greenlet.greenlet(lambda: None) + g.switch() + return g + o = weakref.ref(_dead_greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_inactive_weakref(self): + o = weakref.ref(greenlet.greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_dealloc_weakref(self): + seen = [] + def worker(): + try: + greenlet.getcurrent().parent.switch() + finally: + seen.append(g()) + g = greenlet.greenlet(worker) + g.switch() + g2 = greenlet.greenlet(lambda: None, g) + g = weakref.ref(g2) + g2 = None + self.assertEqual(seen, [None]) diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA new file mode 100644 index 0000000..8a2f639 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA @@ -0,0 +1,202 @@ +Metadata-Version: 2.4 +Name: h11 +Version: 0.16.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.8 +License-File: LICENSE.txt +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: requires-python +Dynamic: summary + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.8-3.12) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD new file mode 100644 index 0000000..fa60234 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD @@ -0,0 +1,29 @@ +h11-0.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.16.0.dist-info/METADATA,sha256=KPMmCYrAn8unm48YD5YIfIQf4kViFct7hyqcfVzRnWQ,8348 +h11-0.16.0.dist-info/RECORD,, +h11-0.16.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 +h11-0.16.0.dist-info/licenses/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.16.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/__pycache__/__init__.cpython-312.pyc,, +h11/__pycache__/_abnf.cpython-312.pyc,, +h11/__pycache__/_connection.cpython-312.pyc,, +h11/__pycache__/_events.cpython-312.pyc,, +h11/__pycache__/_headers.cpython-312.pyc,, +h11/__pycache__/_readers.cpython-312.pyc,, +h11/__pycache__/_receivebuffer.cpython-312.pyc,, +h11/__pycache__/_state.cpython-312.pyc,, +h11/__pycache__/_util.cpython-312.pyc,, +h11/__pycache__/_version.cpython-312.pyc,, +h11/__pycache__/_writers.cpython-312.pyc,, +h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 +h11/_connection.py,sha256=k9YRVf6koZqbttBW36xSWaJpWdZwa-xQVU9AHEo9DuI,26863 +h11/_events.py,sha256=I97aXoal1Wu7dkL548BANBUCkOIbe-x5CioYA9IBY14,11792 +h11/_headers.py,sha256=P7D-lBNxHwdLZPLimmYwrPG-9ZkjElvvJZJdZAgSP-4,10412 +h11/_readers.py,sha256=a4RypORUCC3d0q_kxPuBIM7jTD8iLt5X91TH0FsduN4,8590 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=_5LG_BGR8FCcFQeBPH-TMHgm_-B-EUcWCnQof_9XjFE,13231 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=GVSsbPSPDcOuF6ptfIiXnVJoaEm3ygXbMnqlr_Giahw,686 +h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL new file mode 100644 index 0000000..1eb3c49 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (78.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000..8f080ea --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt new file mode 100644 index 0000000..0d24def --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/venv/lib/python3.12/site-packages/h11/__init__.py b/venv/lib/python3.12/site-packages/h11/__init__.py new file mode 100644 index 0000000..989e92c --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b823dac Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc new file mode 100644 index 0000000..032a74b Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc new file mode 100644 index 0000000..9ea107c Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc new file mode 100644 index 0000000..95dac02 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc new file mode 100644 index 0000000..fbb0935 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc new file mode 100644 index 0000000..7ef7ad9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc new file mode 100644 index 0000000..6abec23 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc new file mode 100644 index 0000000..636f3fa Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc new file mode 100644 index 0000000..0169c12 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc new file mode 100644 index 0000000..fcc8a8b Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc new file mode 100644 index 0000000..b84618f Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/_abnf.py b/venv/lib/python3.12/site-packages/h11/_abnf.py new file mode 100644 index 0000000..933587f --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_abnf.py @@ -0,0 +1,132 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"{OWS}\r\n".format( + **globals() + ) # Even though the specification does not allow for extra whitespaces, + # we are lenient with trailing whitespaces because some servers on the wild use it. +) diff --git a/venv/lib/python3.12/site-packages/h11/_connection.py b/venv/lib/python3.12/site-packages/h11/_connection.py new file mode 100644 index 0000000..e37d82a --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_connection.py @@ -0,0 +1,659 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import ( + Any, + Callable, + cast, + Dict, + List, + Optional, + overload, + Tuple, + Type, + Union, +) + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError(f"expected CLIENT or SERVER, not {our_role!r}") + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore[return-value] + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event( + self, + ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + @overload + def send(self, event: ConnectionClosed) -> None: + ... + + @overload + def send( + self, event: Union[Request, InformationalResponse, Response, Data, EndOfMessage] + ) -> bytes: + ... + + @overload + def send(self, event: Event) -> Optional[bytes]: + ... + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/venv/lib/python3.12/site-packages/h11/_events.py b/venv/lib/python3.12/site-packages/h11/_events.py new file mode 100644 index 0000000..ca1c3ad --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass +from typing import List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/venv/lib/python3.12/site-packages/h11/_headers.py b/venv/lib/python3.12/site-packages/h11/_headers.py new file mode 100644 index 0000000..31da3e2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_headers.py @@ -0,0 +1,282 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + +CONTENT_LENGTH_MAX_DIGITS = 20 # allow up to 1 billion TB - 1 + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(rb"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if len(value) > CONTENT_LENGTH_MAX_DIGITS: + raise LocalProtocolError("bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/venv/lib/python3.12/site-packages/h11/_readers.py b/venv/lib/python3.12/site-packages/h11/_readers.py new file mode 100644 index 0000000..576804c --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_readers.py @@ -0,0 +1,250 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) +obs_fold_re = re.compile(rb"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + # Cast to a mutable type, avoiding copy on append to ensure O(n) time + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n. + # This tracks the bytes that we need to match and throw away. + self._bytes_to_discard = b"" + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard: + data = buf.maybe_extract_at_most(len(self._bytes_to_discard)) + if data is None: + return None + if data != self._bytes_to_discard[: len(data)]: + raise LocalProtocolError( + f"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})" + ) + self._bytes_to_discard = self._bytes_to_discard[len(data) :] + if self._bytes_to_discard: + return None + # else, fall through and read some more + assert self._bytes_to_discard == b"" + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = b"\r\n" + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/venv/lib/python3.12/site-packages/h11/_receivebuffer.py b/venv/lib/python3.12/site-packages/h11/_receivebuffer.py new file mode 100644 index 0000000..e5c4e08 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/venv/lib/python3.12/site-packages/h11/_state.py b/venv/lib/python3.12/site-packages/h11/_state.py new file mode 100644 index 0000000..3ad444b --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_state.py @@ -0,0 +1,365 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union + +from ._events import * +from ._util import LocalProtocolError, Sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + + +# States +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + + +# Switch types +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass + + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +StateTransitionType = Dict[ + Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] +] + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self) -> None: + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals: Set[Type[Sentinel]] = set() + + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role: Type[Sentinel]) -> None: + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self) -> None: + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server _SWITCH_UPGRADE event without a pending proposal" + ) + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, _event_type) + # Special case: the server state does get to see Request + # events. + if _event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + event_type = cast(Type[Event], event_type) + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) from None + self.states[role] = new_state + + def _fire_state_triggered_transitions(self) -> None: + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self) -> None: + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + f"not in a reusable state. self.states={self.states}" + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/venv/lib/python3.12/site-packages/h11/_util.py b/venv/lib/python3.12/site-packages/h11/_util.py new file mode 100644 index 0000000..6718445 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/venv/lib/python3.12/site-packages/h11/_version.py b/venv/lib/python3.12/site-packages/h11/_version.py new file mode 100644 index 0000000..76e7327 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.16.0" diff --git a/venv/lib/python3.12/site-packages/h11/_writers.py b/venv/lib/python3.12/site-packages/h11/_writers.py new file mode 100644 index 0000000..939cdb9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/venv/lib/python3.12/site-packages/h11/py.typed b/venv/lib/python3.12/site-packages/h11/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA new file mode 100644 index 0000000..8056834 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA @@ -0,0 +1,625 @@ +Metadata-Version: 2.4 +Name: httpcore +Version: 1.0.9 +Summary: A minimal low-level HTTP client. +Project-URL: Documentation, https://www.encode.io/httpcore +Project-URL: Homepage, https://www.encode.io/httpcore/ +Project-URL: Source, https://github.com/encode/httpcore +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: certifi +Requires-Dist: h11>=0.16 +Provides-Extra: asyncio +Requires-Dist: anyio<5.0,>=4.0; extra == 'asyncio' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Provides-Extra: trio +Requires-Dist: trio<1.0,>=0.22.0; extra == 'trio' +Description-Content-Type: text/markdown + +# HTTP Core + +[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) +[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) + +> *Do one thing, and do it well.* + +The HTTP Core package provides a minimal low-level HTTP client, which does +one thing only. Sending HTTP requests. + +It does not provide any high level model abstractions over the API, +does not handle redirects, multipart uploads, building authentication headers, +transparent HTTP caching, URL parsing, session cookie handling, +content or charset decoding, handling JSON, environment based configuration +defaults, or any of that Jazz. + +Some things HTTP Core does do: + +* Sending HTTP requests. +* Thread-safe / task-safe connection pooling. +* HTTP(S) proxy & SOCKS proxy support. +* Supports HTTP/1.1 and HTTP/2. +* Provides both sync and async interfaces. +* Async backend support for `asyncio` and `trio`. + +## Requirements + +Python 3.8+ + +## Installation + +For HTTP/1.1 only support, install with: + +```shell +$ pip install httpcore +``` + +There are also a number of optional extras available... + +```shell +$ pip install httpcore['asyncio,trio,http2,socks'] +``` + +## Sending requests + +Send an HTTP request: + +```python +import httpcore + +response = httpcore.request("GET", "https://www.example.com/") + +print(response) +# +print(response.status) +# 200 +print(response.headers) +# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] +print(response.content) +# b'\n\n\nExample Domain\n\n\n ...' +``` + +The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. + +```python +import httpcore + +http = httpcore.ConnectionPool() +response = http.request("GET", "https://www.example.com/") +``` + +Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). + +## Motivation + +You *probably* don't want to be using HTTP Core directly. It might make sense if +you're writing something like a proxy service in Python, and you just want +something at the lowest possible level, but more typically you'll want to use +a higher level client library, such as `httpx`. + +The motivation for `httpcore` is: + +* To provide a reusable low-level client library, that other packages can then build on top of. +* To provide a *really clear interface split* between the networking code and client logic, + so that each is easier to understand and reason about in isolation. + +## Dependencies + +The `httpcore` package has the following dependencies... + +* `h11` +* `certifi` + +And the following optional extras... + +* `anyio` - Required by `pip install httpcore['asyncio']`. +* `trio` - Required by `pip install httpcore['trio']`. +* `h2` - Required by `pip install httpcore['http2']`. +* `socksio` - Required by `pip install httpcore['socks']`. + +## Versioning + +We use [SEMVER for our versioning policy](https://semver.org/). + +For changes between package versions please see our [project changelog](CHANGELOG.md). + +We recommend pinning your requirements either the most current major version, or a more specific version range: + +```python +pip install 'httpcore==1.*' +``` +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## Version 1.0.9 (April 24th, 2025) + +- Resolve https://github.com/advisories/GHSA-vqfr-h8mv-ghfj with h11 dependency update. (#1008) + +## Version 1.0.8 (April 11th, 2025) + +- Fix `AttributeError` when importing on Python 3.14. (#1005) + +## Version 1.0.7 (November 15th, 2024) + +- Support `proxy=…` configuration on `ConnectionPool()`. (#974) + +## Version 1.0.6 (October 1st, 2024) + +- Relax `trio` dependency pinning. (#956) +- Handle `trio` raising `NotImplementedError` on unsupported platforms. (#955) +- Handle mapping `ssl.SSLError` to `httpcore.ConnectError`. (#918) + +## 1.0.5 (March 27th, 2024) + +- Handle `EndOfStream` exception for anyio backend. (#899) +- Allow trio `0.25.*` series in package dependancies. (#903) + +## 1.0.4 (February 21st, 2024) + +- Add `target` request extension. (#888) +- Fix support for connection `Upgrade` and `CONNECT` when some data in the stream has been read. (#882) + +## 1.0.3 (February 13th, 2024) + +- Fix support for async cancellations. (#880) +- Fix trace extension when used with socks proxy. (#849) +- Fix SSL context for connections using the "wss" scheme (#869) + +## 1.0.2 (November 10th, 2023) + +- Fix `float("inf")` timeouts in `Event.wait` function. (#846) + +## 1.0.1 (November 3rd, 2023) + +- Fix pool timeout to account for the total time spent retrying. (#823) +- Raise a neater RuntimeError when the correct async deps are not installed. (#826) +- Add support for synchronous TLS-in-TLS streams. (#840) + +## 1.0.0 (October 6th, 2023) + +From version 1.0 our async support is now optional, as the package has minimal dependencies by default. + +For async support use either `pip install 'httpcore[asyncio]'` or `pip install 'httpcore[trio]'`. + +The project versioning policy is now explicitly governed by SEMVER. See https://semver.org/. + +- Async support becomes fully optional. (#809) +- Add support for Python 3.12. (#807) + +## 0.18.0 (September 8th, 2023) + +- Add support for HTTPS proxies. (#745, #786) +- Drop Python 3.7 support. (#727) +- Handle `sni_hostname` extension with SOCKS proxy. (#774) +- Handle HTTP/1.1 half-closed connections gracefully. (#641) +- Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) + +## 0.17.3 (July 5th, 2023) + +- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) +- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) +- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) +- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) +- Drop Python 3.7 support. (#727) + +## 0.17.2 (May 23th, 2023) + +- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) +- Improve logging with per-module logger names. (#690) +- Add `sni_hostname` request extension. (#696) +- Resolve race condition during import of `anyio` package. (#692) +- Enable TCP_NODELAY for all synchronous sockets. (#651) + +## 0.17.1 (May 17th, 2023) + +- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) +- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) +- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) +- Fix edge-case exception when removing requests from the connection pool. (#680) +- Fix pool timeout edge-case. (#688) + +## 0.17.0 (March 16th, 2023) + +- Add DEBUG level logging. (#648) +- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) +- Increase the allowable HTTP header size to 100kB. (#647) +- Add `retries` option to SOCKS proxy classes. (#643) + +## 0.16.3 (December 20th, 2022) + +- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) +- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) +- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) +- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) + +## 0.16.2 (November 25th, 2022) + +- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) +- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) + +## 0.16.1 (November 17th, 2022) + +- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) + +## 0.16.0 (October 11th, 2022) + +- Support HTTP/1.1 informational responses. (#581) +- Fix async cancellation behaviour. (#580) +- Support `h11` 0.14. (#579) + +## 0.15.0 (May 17th, 2022) + +- Drop Python 3.6 support (#535) +- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) +- Switch to explicit `typing.Optional` for type hints. (#513) +- For `trio` map OSError exceptions to `ConnectError`. (#543) + +## 0.14.7 (February 4th, 2022) + +- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) +- Fix AttributeError that happened when Socks5Connection were terminated. (#501) + +## 0.14.6 (February 1st, 2022) + +- Fix SOCKS support for `http://` URLs. (#492) +- Resolve race condition around exceptions during streaming a response. (#491) + +## 0.14.5 (January 18th, 2022) + +- SOCKS proxy support. (#478) +- Add proxy_auth argument to HTTPProxy. (#481) +- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) + +## 0.14.4 (January 5th, 2022) + +- Support HTTP/2 on HTTPS tunnelling proxies. (#468) +- Fix proxy headers missing on HTTP forwarding. (#456) +- Only instantiate SSL context if required. (#457) +- More robust HTTP/2 handling. (#253, #439, #440, #441) + +## 0.14.3 (November 17th, 2021) + +- Fix race condition when removing closed connections from the pool. (#437) + +## 0.14.2 (November 16th, 2021) + +- Failed connections no longer remain in the pool. (Pull #433) + +## 0.14.1 (November 12th, 2021) + +- `max_connections` becomes optional. (Pull #429) +- `certifi` is now included in the install dependancies. (Pull #428) +- `h2` is now strictly optional. (Pull #428) + +## 0.14.0 (November 11th, 2021) + +The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. + +Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. + +See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. + +There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... + +* Log the point at which the connection is established, and the IP/port on which it is made. +* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) +* Log SSL version info / certificate info. + +Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. + +## 0.13.7 (September 13th, 2021) + +- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) + +## 0.13.6 (June 15th, 2021) + +### Fixed + +- Close sockets when read or write timeouts occur. (Pull #365) + +## 0.13.5 (June 14th, 2021) + +### Fixed + +- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) + +## 0.13.4 (June 9th, 2021) + +### Added + +- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) + +### Fixed + +- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). + +## 0.13.3 (May 6th, 2021) + +### Added + +- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) + +### Fixed + +- Handle cases where environment does not provide `select.poll` support. (Pull #331) + +## 0.13.2 (April 29th, 2021) + +### Added + +- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) + +## 0.13.1 (April 28th, 2021) + +### Fixed + +- More resiliant testing for closed connections. (Pull #311) +- Don't raise exceptions on ungraceful connection closes. (Pull #310) + +## 0.13.0 (April 21st, 2021) + +The 0.13 release updates the core API in order to match the HTTPX Transport API, +introduced in HTTPX 0.18 onwards. + +An example of making requests with the new interface is: + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +### Changed + +- The `.request()` method is now `handle_request()`. (Pull #296) +- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) +- The `headers` argument is no longer optional. (Pull #296) +- The `stream` argument is no longer optional. (Pull #296) +- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) +- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) +- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) +- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) + +### Added + +- Streams now support a `.read()` interface. (Pull #296) + +### Fixed + +- Task cancellation no longer leaks connections from the connection pool. (Pull #305) + +## 0.12.3 (December 7th, 2020) + +### Fixed + +- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) +- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) +- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) +- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) +- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) + +## 0.12.2 (November 20th, 2020) + +### Fixed + +- Properly wrap connect errors on the asyncio backend. (Pull #235) +- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) + +## 0.12.1 (November 7th, 2020) + +### Added + +- Add connect retries. (Pull #221) + +### Fixed + +- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) +- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) +- Properly wrap OS errors when using `trio`. (Pull #225) + +## 0.12.0 (October 6th, 2020) + +### Changed + +- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) + +### Added + +- Add Python 3.9 to officially supported versions. + +### Fixed + +- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) + +## 0.11.1 (September 28nd, 2020) + +### Fixed + +- Add await to async semaphore release() coroutine (#197) +- Drop incorrect curio classifier (#192) + +## 0.11.0 (September 22nd, 2020) + +The Transport API with 0.11.0 has a couple of significant changes. + +Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features +such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. + +The interface changes from: + +```python +def request(method, url, headers, stream, timeout): + return (http_version, status_code, reason, headers, stream) +``` + +To instead including an optional dictionary of extensions on the request and response: + +```python +def request(method, url, headers, stream, ext): + return (status_code, headers, stream, ext) +``` + +Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. + +In particular: + +* Trailing headers support. +* HTTP/2 Server Push +* sendfile. +* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. +* Exposing debug information out of the API, including template name, template context. + +Currently extensions are limited to: + +* request: `timeout` - Optional. Timeout dictionary. +* response: `http_version` - Optional. Include the HTTP version used on the response. +* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. + +See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. + +Secondly, the async version of `request` is now namespaced as `arequest`. + +This allows concrete transports to support both sync and async implementations on the same class. + +### Added + +- Add curio support. (Pull #168) +- Add anyio support, with `backend="anyio"`. (Pull #169) + +### Changed + +- Update the Transport API to use 'ext' for optional extensions. (Pull #190) +- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) + +## 0.10.2 (August 20th, 2020) + +### Added + +- Added Unix Domain Socket support. (Pull #139) + +### Fixed + +- Always include the port on proxy CONNECT requests. (Pull #154) +- Fix `max_keepalive_connections` configuration. (Pull #153) +- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) + +## 0.10.1 (August 7th, 2020) + +- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. + +## 0.10.0 (August 7th, 2020) + +The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. + +Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. + +### Added + +- HTTP/2 support becomes optional. (Pull #121, #130) +- Add `local_address=...` support. (Pull #100, #134) +- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) +- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) +- Add `UnsupportedProtocol` exception. (Pull #128) +- Add `.get_connection_info()` method. (Pull #102, #137) +- Add better TRACE logs. (Pull #101) + +### Changed + +- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) + +### Fixed + +- Improve handling of server disconnects. (Pull #112) + +## 0.9.1 (May 27th, 2020) + +### Fixed + +- Proper host resolution for sync case, including IPv6 support. (Pull #97) +- Close outstanding connections when connection pool is closed. (Pull #98) + +## 0.9.0 (May 21th, 2020) + +### Changed + +- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) + +### Fixed + +- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) +- Remove incorrect debug log. (Pull #83) + +## 0.8.4 (May 11th, 2020) + +### Added + +- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull #79) + +### Fixed + +- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) + +## 0.8.3 (May 6rd, 2020) + +### Fixed + +- Include `Host` and `Accept` headers on proxy "CONNECT" requests. +- De-duplicate any headers also contained in proxy_headers. +- HTTP/2 flag not being passed down to proxy connections. + +## 0.8.2 (May 3rd, 2020) + +### Fixed + +- Fix connections using proxy forwarding requests not being added to the +connection pool properly. (Pull #70) + +## 0.8.1 (April 30th, 2020) + +### Changed + +- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. + +## 0.8.0 (April 30th, 2020) + +### Fixed + +- Fixed tunnel proxy support. + +### Added + +- New `TimeoutException` base class. + +## 0.7.0 (March 5th, 2020) + +- First integration with HTTPX. diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD new file mode 100644 index 0000000..3f37ea0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD @@ -0,0 +1,68 @@ +httpcore-1.0.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpcore-1.0.9.dist-info/METADATA,sha256=_i1P2mGZEol4d54M8n88BFxTGGP83Zh-rMdPOhjUHCE,21529 +httpcore-1.0.9.dist-info/RECORD,, +httpcore-1.0.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +httpcore-1.0.9.dist-info/licenses/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 +httpcore/__init__.py,sha256=9kT_kqChCCJUTHww24ZmR_ezcdbpRYWksD-gYNzkZP8,3445 +httpcore/__pycache__/__init__.cpython-312.pyc,, +httpcore/__pycache__/_api.cpython-312.pyc,, +httpcore/__pycache__/_exceptions.cpython-312.pyc,, +httpcore/__pycache__/_models.cpython-312.pyc,, +httpcore/__pycache__/_ssl.cpython-312.pyc,, +httpcore/__pycache__/_synchronization.cpython-312.pyc,, +httpcore/__pycache__/_trace.cpython-312.pyc,, +httpcore/__pycache__/_utils.cpython-312.pyc,, +httpcore/_api.py,sha256=unZmeDschBWCGCPCwkS3Wot9euK6bg_kKxLtGTxw214,3146 +httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 +httpcore/_async/__pycache__/__init__.cpython-312.pyc,, +httpcore/_async/__pycache__/connection.cpython-312.pyc,, +httpcore/_async/__pycache__/connection_pool.cpython-312.pyc,, +httpcore/_async/__pycache__/http11.cpython-312.pyc,, +httpcore/_async/__pycache__/http2.cpython-312.pyc,, +httpcore/_async/__pycache__/http_proxy.cpython-312.pyc,, +httpcore/_async/__pycache__/interfaces.cpython-312.pyc,, +httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc,, +httpcore/_async/connection.py,sha256=6OcPXqMEfc0BU38_-iHUNDd1vKSTc2UVT09XqNb_BOk,8449 +httpcore/_async/connection_pool.py,sha256=DOIQ2s2ZCf9qfwxhzMprTPLqCL8OxGXiKF6qRHxvVyY,17307 +httpcore/_async/http11.py,sha256=-qM9bV7PjSQF5vxs37-eUXOIFwbIjPcZbNliuX9TtBw,13880 +httpcore/_async/http2.py,sha256=azX1fcmtXaIwjputFlZ4vd92J8xwjGOa9ax9QIv4394,23936 +httpcore/_async/http_proxy.py,sha256=2zVkrlv-Ds-rWGaqaXlrhEJiAQFPo23BT3Gq_sWoBXU,14701 +httpcore/_async/interfaces.py,sha256=jTiaWL83pgpGC9ziv90ZfwaKNMmHwmOalzaKiuTxATo,4455 +httpcore/_async/socks_proxy.py,sha256=lLKgLlggPfhFlqi0ODeBkOWvt9CghBBUyqsnsU1tx6Q,13841 +httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_backends/__pycache__/__init__.cpython-312.pyc,, +httpcore/_backends/__pycache__/anyio.cpython-312.pyc,, +httpcore/_backends/__pycache__/auto.cpython-312.pyc,, +httpcore/_backends/__pycache__/base.cpython-312.pyc,, +httpcore/_backends/__pycache__/mock.cpython-312.pyc,, +httpcore/_backends/__pycache__/sync.cpython-312.pyc,, +httpcore/_backends/__pycache__/trio.cpython-312.pyc,, +httpcore/_backends/anyio.py,sha256=x8PgEhXRC8bVqsdzk_YJx8Y6d9Tub06CuUSwnbmtqoY,5252 +httpcore/_backends/auto.py,sha256=zO136PKZmsaTDK-HRk84eA-MUg8_2wJf4NvmK432Aio,1662 +httpcore/_backends/base.py,sha256=aShgRdZnMmRhFWHetjumlM73f8Kz1YOAyCUP_4kHslA,3042 +httpcore/_backends/mock.py,sha256=er9T436uSe7NLrfiLa4x6Nuqg5ivQ693CxWYCWsgbH4,4077 +httpcore/_backends/sync.py,sha256=bhE4d9iK9Umxdsdsgm2EfKnXaBms2WggGYU-7jmUujU,7977 +httpcore/_backends/trio.py,sha256=LHu4_Mr5MswQmmT3yE4oLgf9b_JJfeVS4BjDxeJc7Ro,5996 +httpcore/_exceptions.py,sha256=looCKga3_YVYu3s-d3L9RMPRJyhsY7fiuuGxvkOD0c0,1184 +httpcore/_models.py,sha256=IO2CcXcdpovRcLTdGFGB6RyBZdEm2h_TOmoCc4rEKho,17623 +httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 +httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 +httpcore/_sync/__pycache__/__init__.cpython-312.pyc,, +httpcore/_sync/__pycache__/connection.cpython-312.pyc,, +httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc,, +httpcore/_sync/__pycache__/http11.cpython-312.pyc,, +httpcore/_sync/__pycache__/http2.cpython-312.pyc,, +httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc,, +httpcore/_sync/__pycache__/interfaces.cpython-312.pyc,, +httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc,, +httpcore/_sync/connection.py,sha256=9exGOb3PB-Mp2T1-sckSeL2t-tJ_9-NXomV8ihmWCgU,8238 +httpcore/_sync/connection_pool.py,sha256=a-T8LTsUxc7r0Ww1atfHSDoWPjQ0fA8Ul7S3-F0Mj70,16955 +httpcore/_sync/http11.py,sha256=IFobD1Md5JFlJGKWnh1_Q3epikUryI8qo09v8MiJIEA,13476 +httpcore/_sync/http2.py,sha256=AxU4yhcq68Bn5vqdJYtiXKYUj7nvhYbxz3v4rT4xnvA,23400 +httpcore/_sync/http_proxy.py,sha256=_al_6crKuEZu2wyvu493RZImJdBJnj5oGKNjLOJL2Zo,14463 +httpcore/_sync/interfaces.py,sha256=snXON42vUDHO5JBJvo8D4VWk2Wat44z2OXXHDrjbl94,4344 +httpcore/_sync/socks_proxy.py,sha256=zegZW9Snqj2_992DFJa8_CppOVBkVL4AgwduRkStakQ,13614 +httpcore/_synchronization.py,sha256=zSi13mAColBnknjZBknUC6hKNDQT4C6ijnezZ-r0T2s,9434 +httpcore/_trace.py,sha256=ck6ZoIzYTkdNAIfq5MGeKqBXDtqjOX-qfYwmZFbrGco,3952 +httpcore/_utils.py,sha256=_RLgXYOAYC350ikALV59GZ68IJrdocRZxPs9PjmzdFY,1537 +httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL new file mode 100644 index 0000000..12228d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..311b2b5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/httpcore/__init__.py b/venv/lib/python3.12/site-packages/httpcore/__init__.py new file mode 100644 index 0000000..9a92dc4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/__init__.py @@ -0,0 +1,141 @@ +from ._api import request, stream +from ._async import ( + AsyncConnectionInterface, + AsyncConnectionPool, + AsyncHTTP2Connection, + AsyncHTTP11Connection, + AsyncHTTPConnection, + AsyncHTTPProxy, + AsyncSOCKSProxy, +) +from ._backends.base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) +from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream +from ._backends.sync import SyncBackend +from ._exceptions import ( + ConnectError, + ConnectionNotAvailable, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import URL, Origin, Proxy, Request, Response +from ._ssl import default_ssl_context +from ._sync import ( + ConnectionInterface, + ConnectionPool, + HTTP2Connection, + HTTP11Connection, + HTTPConnection, + HTTPProxy, + SOCKSProxy, +) + +# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. +try: + from ._backends.anyio import AnyIOBackend +except ImportError: # pragma: nocover + + class AnyIOBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = ( + "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." + ) + raise RuntimeError(msg) + + +# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. +try: + from ._backends.trio import TrioBackend +except ImportError: # pragma: nocover + + class TrioBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." + raise RuntimeError(msg) + + +__all__ = [ + # top-level requests + "request", + "stream", + # models + "Origin", + "URL", + "Request", + "Response", + "Proxy", + # async + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", + # sync + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", + # network backends, implementations + "SyncBackend", + "AnyIOBackend", + "TrioBackend", + # network backends, mock implementations + "AsyncMockBackend", + "AsyncMockStream", + "MockBackend", + "MockStream", + # network backends, interface + "AsyncNetworkStream", + "AsyncNetworkBackend", + "NetworkStream", + "NetworkBackend", + # util + "default_ssl_context", + "SOCKET_OPTION", + # exceptions + "ConnectionNotAvailable", + "ProxyError", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "UnsupportedProtocol", + "TimeoutException", + "PoolTimeout", + "ConnectTimeout", + "ReadTimeout", + "WriteTimeout", + "NetworkError", + "ConnectError", + "ReadError", + "WriteError", +] + +__version__ = "1.0.9" + + +__locals = locals() +for __name in __all__: + # Exclude SOCKET_OPTION, it causes AttributeError on Python 3.14 + if not __name.startswith(("__", "SOCKET_OPTION")): + setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c27d329 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc new file mode 100644 index 0000000..57cc133 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000..ddf87ac Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc new file mode 100644 index 0000000..ebe36bf Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc new file mode 100644 index 0000000..a2e8bbb Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc new file mode 100644 index 0000000..38d3127 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc new file mode 100644 index 0000000..6e6fbdf Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 0000000..4f546e7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_api.py b/venv/lib/python3.12/site-packages/httpcore/_api.py new file mode 100644 index 0000000..38b961d --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_api.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import contextlib +import typing + +from ._models import URL, Extensions, HeaderTypes, Response +from ._sync.connection_pool import ConnectionPool + + +def request( + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, +) -> Response: + """ + Sends an HTTP request, returning the response. + + ``` + response = httpcore.request("GET", "https://www.example.com/") + ``` + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + return pool.request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + + +@contextlib.contextmanager +def stream( + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, +) -> typing.Iterator[Response]: + """ + Sends an HTTP request, returning the response within a content manager. + + ``` + with httpcore.stream("GET", "https://www.example.com/") as response: + ... + ``` + + When using the `stream()` function, the body of the response will not be + automatically read. If you want to access the response body you should + either use `content = response.read()`, or `for chunk in response.iter_content()`. + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + with pool.stream( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) as response: + yield response diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py new file mode 100644 index 0000000..88dc7f0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py @@ -0,0 +1,39 @@ +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .http_proxy import AsyncHTTPProxy +from .interfaces import AsyncConnectionInterface + +try: + from .http2 import AsyncHTTP2Connection +except ImportError: # pragma: nocover + + class AsyncHTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import AsyncSOCKSProxy +except ImportError: # pragma: nocover + + class AsyncSOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", +] diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e6afba5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc new file mode 100644 index 0000000..449ebe8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc new file mode 100644 index 0000000..8d3f957 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc new file mode 100644 index 0000000..fba281d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc new file mode 100644 index 0000000..4ad86aa Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc new file mode 100644 index 0000000..d80cd66 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 0000000..3903729 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc new file mode 100644 index 0000000..52469a5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/connection.py b/venv/lib/python3.12/site-packages/httpcore/_async/connection.py new file mode 100644 index 0000000..b42581d --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/connection.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import itertools +import logging +import ssl +import types +import typing + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class AsyncHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connection: AsyncConnectionInterface | None = None + self._connect_failed: bool = False + self._request_lock = AsyncLock() + self._socket_options = socket_options + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + async with self._request_lock: + if self._connection is None: + stream = await self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return await self._connection.handle_async_request(request) + + async def _connect(self, request: Request) -> AsyncNetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = await self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + async with Trace("retry", logger, request, kwargs) as trace: + await self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + async def aclose(self) -> None: + if self._connection is not None: + async with Trace("close", logger, None, {}): + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTPConnection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py b/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 0000000..96e973d --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import ssl +import sys +import types +import typing + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Proxy, Request, Response +from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock +from .connection import AsyncHTTPConnection +from .interfaces import AsyncConnectionInterface, AsyncRequestInterface + + +class AsyncPoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: AsyncConnectionInterface | None = None + self._connection_acquired = AsyncEvent() + + def assign_to_connection(self, connection: AsyncConnectionInterface | None) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = AsyncEvent() + + async def wait_for_connection( + self, timeout: float | None = None + ) -> AsyncConnectionInterface: + if self.connection is None: + await self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class AsyncConnectionPool(AsyncRequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: ssl.SSLContext | None = None, + proxy: Proxy | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + self._proxy = proxy + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: list[AsyncConnectionInterface] = [] + self._requests: list[AsyncPoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = AsyncThreadLock() + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if self._proxy is not None: + if self._proxy.url.scheme in (b"socks5", b"socks5h"): + from .socks_proxy import AsyncSocks5Connection + + return AsyncSocks5Connection( + proxy_origin=self._proxy.url.origin, + proxy_auth=self._proxy.auth, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + elif origin.scheme == b"http": + from .http_proxy import AsyncForwardHTTPConnection + + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + from .http_proxy import AsyncTunnelHTTPConnection + + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + return AsyncHTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> list[AsyncConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + async def handle_async_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = AsyncPoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + await self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = await pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = await connection.handle_async_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + await self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, typing.AsyncIterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> list[AsyncConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + available_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if available_connections: + # log: "reusing existing connection" + connection = available_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + async def _close_connections(self, closing: list[AsyncConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with AsyncShieldCancellation(): + for connection in closing: + await connection.aclose() + + async def aclose(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + await self._close_connections(closing_connections) + + async def __aenter__(self) -> AsyncConnectionPool: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: typing.AsyncIterable[bytes], + pool_request: AsyncPoolRequest, + pool: AsyncConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + try: + async for part in self._stream: + yield part + except BaseException as exc: + await self.aclose() + raise exc from None + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + with AsyncShieldCancellation(): + if hasattr(self._stream, "aclose"): + await self._stream.aclose() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + await self._pool._close_connections(closing) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http11.py b/venv/lib/python3.12/site-packages/httpcore/_async/http11.py new file mode 100644 index 0000000..e6d6d70 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/http11.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import enum +import logging +import ssl +import time +import types +import typing + +import h11 + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = typing.Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP11Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: float | None = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None + self._state = HTTPConnectionState.NEW + self._state_lock = AsyncLock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + async with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + await self._send_request_headers(**kwargs) + async with Trace("send_request_body", logger, request, kwargs) as trace: + await self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = await self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = AsyncHTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with AsyncShieldCancellation(): + async with Trace("response_closed", logger, request) as trace: + await self._response_closed() + raise exc + + # Sending the request... + + async def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + await self._send_event(event, timeout=timeout) + + async def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, typing.AsyncIterable) + async for chunk in request.stream: + event = h11.Data(data=chunk) + await self._send_event(event, timeout=timeout) + + await self._send_event(h11.EndOfMessage(), timeout=timeout) + + async def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + await self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + async def _receive_response_headers( + self, request: Request + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + async def _receive_response_body( + self, request: Request + ) -> typing.AsyncIterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + async def _receive_event( + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + async def _response_closed(self) -> None: + async with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + await self.aclose() + + # Once the connection is no longer required... + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # The AsyncConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTP11Connection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + async with Trace("response_closed", logger, self._request): + await self._connection._response_closed() + + +class AsyncHTTP11UpgradeStream(AsyncNetworkStream): + def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return await self._stream.read(max_bytes, timeout) + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + await self._stream.write(buffer, timeout) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + return await self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> typing.Any: + return self._stream.get_extra_info(info) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http2.py b/venv/lib/python3.12/site-packages/httpcore/_async/http2.py new file mode 100644 index 0000000..dbd0bee --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/http2.py @@ -0,0 +1,592 @@ +from __future__ import annotations + +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP2Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: float | None = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: float | None = None + self._request_count = 0 + self._init_lock = AsyncLock() + self._state_lock = AsyncLock() + self._read_lock = AsyncLock() + self._write_lock = AsyncLock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: dict[ + int, + list[ + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: h2.events.ConnectionTerminated | None = None + + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + async with self._init_lock: + if not self._sent_connection_init: + try: + sci_kwargs = {"request": request} + async with Trace( + "send_connection_init", logger, request, sci_kwargs + ): + await self._send_connection_init(**sci_kwargs) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + await self._max_streams_semaphore.acquire() + + await self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + async with Trace("send_request_headers", logger, request, kwargs): + await self._send_request_headers(request=request, stream_id=stream_id) + async with Trace("send_request_body", logger, request, kwargs): + await self._send_request_body(request=request, stream_id=stream_id) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = await self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with AsyncShieldCancellation(): + kwargs = {"stream_id": stream_id} + async with Trace("response_closed", logger, request, kwargs): + await self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + async def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + await self._write_outgoing_data(request) + + # Sending the request... + + async def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + await self._write_outgoing_data(request) + + async def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.AsyncIterable) + async for data in request.stream: + await self._send_stream_data(request, stream_id, data) + await self._send_end_stream(request, stream_id) + + async def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = await self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + await self._write_outgoing_data(request) + + async def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + await self._write_outgoing_data(request) + + # Receiving the response... + + async def _receive_response( + self, request: Request, stream_id: int + ) -> tuple[int, list[tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + assert event.headers is not None + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.AsyncIterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + assert event.flow_controlled_length is not None + assert event.data is not None + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + await self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + async def _receive_stream_event( + self, request: Request, stream_id: int + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + await self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + async def _receive_events( + self, request: Request, stream_id: int | None = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + async with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = await self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + async with Trace( + "receive_remote_settings", logger, request + ) as trace: + await self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + await self._write_outgoing_data(request) + + async def _receive_remote_settings_change( + self, event: h2.events.RemoteSettingsChanged + ) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + await self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + await self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + async def _response_closed(self, stream_id: int) -> None: + await self._max_streams_semaphore.release() + del self._events[stream_id] + async with self._state_lock: + if self._connection_terminated and not self._events: + await self.aclose() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + await self.aclose() + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # Wrappers around network read/write operations... + + async def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: list[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + async def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + async with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + await self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + await self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTP2Connection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: AsyncHTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + async with Trace("response_closed", logger, self._request, kwargs): + await self._connection._response_closed(stream_id=self._stream_id) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 0000000..cc9d920 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import base64 +import logging +import ssl +import typing + +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class AsyncHTTPProxy(AsyncConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if origin.scheme == b"http": + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncForwardHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + ) -> None: + self._connection = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + async def handle_async_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return await self._connection.handle_async_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class AsyncTunnelHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._connection: AsyncConnectionInterface = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = AsyncLock() + self._connected = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = await self._connection.handle_async_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + await self._connection.aclose() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py b/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py new file mode 100644 index 0000000..361583b --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import contextlib +import typing + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class AsyncRequestInterface: + async def request( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + await response.aread() + finally: + await response.aclose() + return response + + @contextlib.asynccontextmanager + async def stream( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> typing.AsyncIterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + yield response + finally: + await response.aclose() + + async def handle_async_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class AsyncConnectionInterface(AsyncRequestInterface): + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py new file mode 100644 index 0000000..b363f55 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import logging +import ssl + +import socksio + +from .._backends.auto import AutoBackend +from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +async def _init_socks5_connection( + stream: AsyncNetworkStream, + *, + host: bytes, + port: int, + auth: tuple[bytes, bytes] | None = None, +) -> None: + conn = socksio.socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socksio.socks5.SOCKS5CommandRequest.from_address( + socksio.socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5Reply) + if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class AsyncSOCKSProxy(AsyncConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: AsyncNetworkBackend | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: tuple[bytes, bytes] | None = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncSocks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncSocks5Connection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: AsyncNetworkBackend | None = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connect_lock = AsyncLock() + self._connection: AsyncConnectionInterface | None = None + self._connect_failed = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + async with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + await _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + if self._connection is not None: + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..420509d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc new file mode 100644 index 0000000..14fd141 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc new file mode 100644 index 0000000..6b704b4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..e1d9ddb Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc new file mode 100644 index 0000000..ed5be8a Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc new file mode 100644 index 0000000..7b093f4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc new file mode 100644 index 0000000..ffbba5a Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py b/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 0000000..a140095 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +import ssl +import typing + +import anyio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AnyIOStream(AsyncNetworkStream): + def __init__(self, stream: anyio.abc.ByteStream) -> None: + self._stream = stream + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map = { + TimeoutError: ReadTimeout, + anyio.BrokenResourceError: ReadError, + anyio.ClosedResourceError: ReadError, + anyio.EndOfStream: ReadError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + try: + return await self._stream.receive(max_bytes=max_bytes) + except anyio.EndOfStream: # pragma: nocover + return b"" + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + exc_map = { + TimeoutError: WriteTimeout, + anyio.BrokenResourceError: WriteError, + anyio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + await self._stream.send(item=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + exc_map = { + TimeoutError: ConnectTimeout, + anyio.BrokenResourceError: ConnectError, + anyio.EndOfStream: ConnectError, + ssl.SSLError: ConnectError, + } + with map_exceptions(exc_map): + try: + with anyio.fail_after(timeout): + ssl_stream = await anyio.streams.tls.TLSStream.wrap( + self._stream, + ssl_context=ssl_context, + hostname=server_hostname, + standard_compatible=False, + server_side=False, + ) + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return AnyIOStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) + if info == "client_addr": + return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) + if info == "server_addr": + return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) + if info == "socket": + return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + if info == "is_readable": + sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + return is_socket_readable(sock) + return None + + +class AnyIOBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_tcp( + remote_host=host, + remote_port=port, + local_host=local_address, + ) + # By default TCP sockets opened in `asyncio` include TCP_NODELAY. + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py b/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py new file mode 100644 index 0000000..49f0e69 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import typing + +from .._synchronization import current_async_library +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AutoBackend(AsyncNetworkBackend): + async def _init_backend(self) -> None: + if not (hasattr(self, "_backend")): + backend = current_async_library() + if backend == "trio": + from .trio import TrioBackend + + self._backend: AsyncNetworkBackend = TrioBackend() + else: + from .anyio import AnyIOBackend + + self._backend = AnyIOBackend() + + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + await self._init_backend() + return await self._backend.connect_tcp( + host, + port, + timeout=timeout, + local_address=local_address, + socket_options=socket_options, + ) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + await self._init_backend() + return await self._backend.connect_unix_socket( + path, timeout=timeout, socket_options=socket_options + ) + + async def sleep(self, seconds: float) -> None: # pragma: nocover + await self._init_backend() + return await self._backend.sleep(seconds) diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/base.py b/venv/lib/python3.12/site-packages/httpcore/_backends/base.py new file mode 100644 index 0000000..cf55c8b --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/base.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import ssl +import time +import typing + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +class NetworkStream: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class NetworkBackend: + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) # pragma: nocover + + +class AsyncNetworkStream: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class AsyncNetworkBackend: + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py b/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py new file mode 100644 index 0000000..9b6edca --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +import ssl +import typing + +from .._exceptions import ReadError +from .base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) + + +class MockSSLObject: + def __init__(self, http2: bool): + self._http2 = http2 + + def selected_alpn_protocol(self) -> str: + return "h2" if self._http2 else "http/1.1" + + +class MockStream(NetworkStream): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + pass + + def close(self) -> None: + self._closed = True + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class MockBackend(NetworkBackend): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def sleep(self, seconds: float) -> None: + pass + + +class AsyncMockStream(AsyncNetworkStream): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + pass + + async def aclose(self) -> None: + self._closed = True + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class AsyncMockBackend(AsyncNetworkBackend): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def sleep(self, seconds: float) -> None: + pass diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py b/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py new file mode 100644 index 0000000..4018a09 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,241 @@ +from __future__ import annotations + +import functools +import socket +import ssl +import sys +import typing + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, NetworkBackend, NetworkStream + + +class TLSinTLSStream(NetworkStream): # pragma: no cover + """ + Because the standard `SSLContext.wrap_socket` method does + not work for `SSLSocket` objects, we need this class + to implement TLS stream using an underlying `SSLObject` + instance in order to support TLS on top of TLS. + """ + + # Defined in RFC 8449 + TLS_RECORD_SIZE = 16384 + + def __init__( + self, + sock: socket.socket, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ): + self._sock = sock + self._incoming = ssl.MemoryBIO() + self._outgoing = ssl.MemoryBIO() + + self.ssl_obj = ssl_context.wrap_bio( + incoming=self._incoming, + outgoing=self._outgoing, + server_hostname=server_hostname, + ) + + self._sock.settimeout(timeout) + self._perform_io(self.ssl_obj.do_handshake) + + def _perform_io( + self, + func: typing.Callable[..., typing.Any], + ) -> typing.Any: + ret = None + + while True: + errno = None + try: + ret = func() + except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: + errno = e.errno + + self._sock.sendall(self._outgoing.read()) + + if errno == ssl.SSL_ERROR_WANT_READ: + buf = self._sock.recv(self.TLS_RECORD_SIZE) + + if buf: + self._incoming.write(buf) + else: + self._incoming.write_eof() + if errno is None: + return ret + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return typing.cast( + bytes, self._perform_io(functools.partial(self.ssl_obj.read, max_bytes)) + ) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + while buffer: + nsent = self._perform_io(functools.partial(self.ssl_obj.write, buffer)) + buffer = buffer[nsent:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + raise NotImplementedError() + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self.ssl_obj + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncStream(NetworkStream): + def __init__(self, sock: socket.socket) -> None: + self._sock = sock + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return self._sock.recv(max_bytes) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + while buffer: + self._sock.settimeout(timeout) + n = self._sock.send(buffer) + buffer = buffer[n:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + try: + if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover + # If the underlying socket has already been upgraded + # to the TLS layer (i.e. is an instance of SSLSocket), + # we need some additional smarts to support TLS-in-TLS. + return TLSinTLSStream( + self._sock, ssl_context, server_hostname, timeout + ) + else: + self._sock.settimeout(timeout) + sock = ssl_context.wrap_socket( + self._sock, server_hostname=server_hostname + ) + except Exception as exc: # pragma: nocover + self.close() + raise exc + return SyncStream(sock) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): + return self._sock._sslobj # type: ignore + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncBackend(NetworkBackend): + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + # Note that we automatically include `TCP_NODELAY` + # in addition to any other custom socket options. + if socket_options is None: + socket_options = [] # pragma: no cover + address = (host, port) + source_address = None if local_address is None else (local_address, 0) + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, + timeout, + source_address=source_address, + ) + for option in socket_options: + sock.setsockopt(*option) # pragma: no cover + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SyncStream(sock) + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: # pragma: nocover + if sys.platform == "win32": + raise RuntimeError( + "Attempted to connect to a UNIX socket on a Windows system." + ) + if socket_options is None: + socket_options = [] + + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + for option in socket_options: + sock.setsockopt(*option) + sock.settimeout(timeout) + sock.connect(path) + return SyncStream(sock) diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py b/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py new file mode 100644 index 0000000..6f53f5f --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import ssl +import typing + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class TrioStream(AsyncNetworkStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self._stream = stream + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ReadTimeout, + trio.BrokenResourceError: ReadError, + trio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + data: bytes = await self._stream.receive_some(max_bytes=max_bytes) + return data + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + trio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + await self._stream.send_all(data=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self._stream, + ssl_context=ssl_context, + server_hostname=server_hostname, + https_compatible=True, + server_side=False, + ) + with map_exceptions(exc_map): + try: + with trio.fail_after(timeout_or_inf): + await ssl_stream.do_handshake() + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return TrioStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): + # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. + # Tracked at https://github.com/python-trio/trio/issues/542 + return self._stream._ssl_object # type: ignore[attr-defined] + if info == "client_addr": + return self._get_socket_stream().socket.getsockname() + if info == "server_addr": + return self._get_socket_stream().socket.getpeername() + if info == "socket": + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream.socket + if info == "is_readable": + socket = self.get_extra_info("socket") + return socket.is_readable() + return None + + def _get_socket_stream(self) -> trio.SocketStream: + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream + + +class TrioBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + # By default for TCP sockets, trio enables TCP_NODELAY. + # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream + if socket_options is None: + socket_options = [] # pragma: no cover + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_tcp_stream( + host=host, port=port, local_address=local_address + ) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_exceptions.py b/venv/lib/python3.12/site-packages/httpcore/_exceptions.py new file mode 100644 index 0000000..bc28d44 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_exceptions.py @@ -0,0 +1,81 @@ +import contextlib +import typing + +ExceptionMapping = typing.Mapping[typing.Type[Exception], typing.Type[Exception]] + + +@contextlib.contextmanager +def map_exceptions(map: ExceptionMapping) -> typing.Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE786 + for from_exc, to_exc in map.items(): + if isinstance(exc, from_exc): + raise to_exc(exc) from exc + raise # pragma: nocover + + +class ConnectionNotAvailable(Exception): + pass + + +class ProxyError(Exception): + pass + + +class UnsupportedProtocol(Exception): + pass + + +class ProtocolError(Exception): + pass + + +class RemoteProtocolError(ProtocolError): + pass + + +class LocalProtocolError(ProtocolError): + pass + + +# Timeout errors + + +class TimeoutException(Exception): + pass + + +class PoolTimeout(TimeoutException): + pass + + +class ConnectTimeout(TimeoutException): + pass + + +class ReadTimeout(TimeoutException): + pass + + +class WriteTimeout(TimeoutException): + pass + + +# Network errors + + +class NetworkError(Exception): + pass + + +class ConnectError(NetworkError): + pass + + +class ReadError(NetworkError): + pass + + +class WriteError(NetworkError): + pass diff --git a/venv/lib/python3.12/site-packages/httpcore/_models.py b/venv/lib/python3.12/site-packages/httpcore/_models.py new file mode 100644 index 0000000..8a65f13 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_models.py @@ -0,0 +1,516 @@ +from __future__ import annotations + +import base64 +import ssl +import typing +import urllib.parse + +# Functions for typechecking... + + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] +HeaderTypes = typing.Union[HeadersAsSequence, HeadersAsMapping, None] + +Extensions = typing.MutableMapping[str, typing.Any] + + +def enforce_bytes(value: bytes | str, *, name: str) -> bytes: + """ + Any arguments that are ultimately represented as bytes can be specified + either as bytes or as strings. + + However we enforce that any string arguments must only contain characters in + the plain ASCII range. chr(0)...chr(127). If you need to use characters + outside that range then be precise, and use a byte-wise argument. + """ + if isinstance(value, str): + try: + return value.encode("ascii") + except UnicodeEncodeError: + raise TypeError(f"{name} strings may not include unicode characters.") + elif isinstance(value, bytes): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") + + +def enforce_url(value: URL | bytes | str, *, name: str) -> URL: + """ + Type check for URL parameters. + """ + if isinstance(value, (bytes, str)): + return URL(value) + elif isinstance(value, URL): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") + + +def enforce_headers( + value: HeadersAsMapping | HeadersAsSequence | None = None, *, name: str +) -> list[tuple[bytes, bytes]]: + """ + Convienence function that ensure all items in request or response headers + are either bytes or strings in the plain ASCII range. + """ + if value is None: + return [] + elif isinstance(value, typing.Mapping): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value.items() + ] + elif isinstance(value, typing.Sequence): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value + ] + + seen_type = type(value).__name__ + raise TypeError( + f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." + ) + + +def enforce_stream( + value: bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes] | None, + *, + name: str, +) -> typing.Iterable[bytes] | typing.AsyncIterable[bytes]: + if value is None: + return ByteStream(b"") + elif isinstance(value, bytes): + return ByteStream(value) + return value + + +# * https://tools.ietf.org/html/rfc3986#section-3.2.3 +# * https://url.spec.whatwg.org/#url-miscellaneous +# * https://url.spec.whatwg.org/#scheme-state +DEFAULT_PORTS = { + b"ftp": 21, + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, +} + + +def include_request_headers( + headers: list[tuple[bytes, bytes]], + *, + url: "URL", + content: None | bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes], +) -> list[tuple[bytes, bytes]]: + headers_set = set(k.lower() for k, v in headers) + + if b"host" not in headers_set: + default_port = DEFAULT_PORTS.get(url.scheme) + if url.port is None or url.port == default_port: + header_value = url.host + else: + header_value = b"%b:%d" % (url.host, url.port) + headers = [(b"Host", header_value)] + headers + + if ( + content is not None + and b"content-length" not in headers_set + and b"transfer-encoding" not in headers_set + ): + if isinstance(content, bytes): + content_length = str(len(content)).encode("ascii") + headers += [(b"Content-Length", content_length)] + else: + headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover + + return headers + + +# Interfaces for byte streams... + + +class ByteStream: + """ + A container for non-streaming content, and that supports both sync and async + stream iteration. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> typing.Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" + + +class Origin: + def __init__(self, scheme: bytes, host: bytes, port: int) -> None: + self.scheme = scheme + self.host = host + self.port = port + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, Origin) + and self.scheme == other.scheme + and self.host == other.host + and self.port == other.port + ) + + def __str__(self) -> str: + scheme = self.scheme.decode("ascii") + host = self.host.decode("ascii") + port = str(self.port) + return f"{scheme}://{host}:{port}" + + +class URL: + """ + Represents the URL against which an HTTP request may be made. + + The URL may either be specified as a plain string, for convienence: + + ```python + url = httpcore.URL("https://www.example.com/") + ``` + + Or be constructed with explicitily pre-parsed components: + + ```python + url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') + ``` + + Using this second more explicit style allows integrations that are using + `httpcore` to pass through URLs that have already been parsed in order to use + libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures + that URL parsing is treated identically at both the networking level and at any + higher layers of abstraction. + + The four components are important here, as they allow the URL to be precisely + specified in a pre-parsed format. They also allow certain types of request to + be created that could not otherwise be expressed. + + For example, an HTTP request to `http://www.example.com/` forwarded via a proxy + at `http://localhost:8080`... + + ```python + # Constructs an HTTP request with a complete URL as the target: + # GET https://www.example.com/ HTTP/1.1 + url = httpcore.URL( + scheme=b'http', + host=b'localhost', + port=8080, + target=b'https://www.example.com/' + ) + request = httpcore.Request( + method="GET", + url=url + ) + ``` + + Another example is constructing an `OPTIONS *` request... + + ```python + # Constructs an 'OPTIONS *' HTTP request: + # OPTIONS * HTTP/1.1 + url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') + request = httpcore.Request(method="OPTIONS", url=url) + ``` + + This kind of request is not possible to formulate with a URL string, + because the `/` delimiter is always used to demark the target from the + host/port portion of the URL. + + For convenience, string-like arguments may be specified either as strings or + as bytes. However, once a request is being issue over-the-wire, the URL + components are always ultimately required to be a bytewise representation. + + In order to avoid any ambiguity over character encodings, when strings are used + as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. + If you require a bytewise representation that is outside this range you must + handle the character encoding directly, and pass a bytes instance. + """ + + def __init__( + self, + url: bytes | str = "", + *, + scheme: bytes | str = b"", + host: bytes | str = b"", + port: int | None = None, + target: bytes | str = b"", + ) -> None: + """ + Parameters: + url: The complete URL as a string or bytes. + scheme: The URL scheme as a string or bytes. + Typically either `"http"` or `"https"`. + host: The URL host as a string or bytes. Such as `"www.example.com"`. + port: The port to connect to. Either an integer or `None`. + target: The target of the HTTP request. Such as `"/items?search=red"`. + """ + if url: + parsed = urllib.parse.urlparse(enforce_bytes(url, name="url")) + self.scheme = parsed.scheme + self.host = parsed.hostname or b"" + self.port = parsed.port + self.target = (parsed.path or b"/") + ( + b"?" + parsed.query if parsed.query else b"" + ) + else: + self.scheme = enforce_bytes(scheme, name="scheme") + self.host = enforce_bytes(host, name="host") + self.port = port + self.target = enforce_bytes(target, name="target") + + @property + def origin(self) -> Origin: + default_port = { + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, + b"socks5": 1080, + b"socks5h": 1080, + }[self.scheme] + return Origin( + scheme=self.scheme, host=self.host, port=self.port or default_port + ) + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, URL) + and other.scheme == self.scheme + and other.host == self.host + and other.port == self.port + and other.target == self.target + ) + + def __bytes__(self) -> bytes: + if self.port is None: + return b"%b://%b%b" % (self.scheme, self.host, self.target) + return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}(scheme={self.scheme!r}, " + f"host={self.host!r}, port={self.port!r}, target={self.target!r})" + ) + + +class Request: + """ + An HTTP request. + """ + + def __init__( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes + | typing.Iterable[bytes] + | typing.AsyncIterable[bytes] + | None = None, + extensions: Extensions | None = None, + ) -> None: + """ + Parameters: + method: The HTTP request method, either as a string or bytes. + For example: `GET`. + url: The request URL, either as a `URL` instance, or as a string or bytes. + For example: `"https://www.example.com".` + headers: The HTTP request headers. + content: The content of the request body. + extensions: A dictionary of optional extra information included on + the request. Possible keys include `"timeout"`, and `"trace"`. + """ + self.method: bytes = enforce_bytes(method, name="method") + self.url: URL = enforce_url(url, name="url") + self.headers: list[tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = ( + enforce_stream(content, name="content") + ) + self.extensions = {} if extensions is None else extensions + + if "target" in self.extensions: + self.url = URL( + scheme=self.url.scheme, + host=self.url.host, + port=self.url.port, + target=self.extensions["target"], + ) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.method!r}]>" + + +class Response: + """ + An HTTP response. + """ + + def __init__( + self, + status: int, + *, + headers: HeaderTypes = None, + content: bytes + | typing.Iterable[bytes] + | typing.AsyncIterable[bytes] + | None = None, + extensions: Extensions | None = None, + ) -> None: + """ + Parameters: + status: The HTTP status code of the response. For example `200`. + headers: The HTTP response headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the responseself.Possible keys include `"http_version"`, + `"reason_phrase"`, and `"network_stream"`. + """ + self.status: int = status + self.headers: list[tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = ( + enforce_stream(content, name="content") + ) + self.extensions = {} if extensions is None else extensions + + self._stream_consumed = False + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + if isinstance(self.stream, typing.Iterable): + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'response.read()' first." + ) + else: + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'await response.aread()' first." + ) + return self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.status}]>" + + # Sync interface... + + def read(self) -> bytes: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an asynchronous response using 'response.read()'. " + "You should use 'await response.aread()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part for part in self.iter_stream()]) + return self._content + + def iter_stream(self) -> typing.Iterator[bytes]: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an asynchronous response using 'for ... in " + "response.iter_stream()'. " + "You should use 'async for ... in response.aiter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'for ... in response.iter_stream()' more than once." + ) + self._stream_consumed = True + for chunk in self.stream: + yield chunk + + def close(self) -> None: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to close an asynchronous response using 'response.close()'. " + "You should use 'await response.aclose()' instead." + ) + if hasattr(self.stream, "close"): + self.stream.close() + + # Async interface... + + async def aread(self) -> bytes: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an synchronous response using " + "'await response.aread()'. " + "You should use 'response.read()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_stream()]) + return self._content + + async def aiter_stream(self) -> typing.AsyncIterator[bytes]: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an synchronous response using 'async for ... in " + "response.aiter_stream()'. " + "You should use 'for ... in response.iter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'async for ... in response.aiter_stream()' " + "more than once." + ) + self._stream_consumed = True + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to close a synchronous response using " + "'await response.aclose()'. " + "You should use 'response.close()' instead." + ) + if hasattr(self.stream, "aclose"): + await self.stream.aclose() + + +class Proxy: + def __init__( + self, + url: URL | bytes | str, + auth: tuple[bytes | str, bytes | str] | None = None, + headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + ): + self.url = enforce_url(url, name="url") + self.headers = enforce_headers(headers, name="headers") + self.ssl_context = ssl_context + + if auth is not None: + username = enforce_bytes(auth[0], name="auth") + password = enforce_bytes(auth[1], name="auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self.auth: tuple[bytes, bytes] | None = (username, password) + self.headers = [(b"Proxy-Authorization", authorization)] + self.headers + else: + self.auth = None diff --git a/venv/lib/python3.12/site-packages/httpcore/_ssl.py b/venv/lib/python3.12/site-packages/httpcore/_ssl.py new file mode 100644 index 0000000..c99c5a6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_ssl.py @@ -0,0 +1,9 @@ +import ssl + +import certifi + + +def default_ssl_context() -> ssl.SSLContext: + context = ssl.create_default_context() + context.load_verify_locations(certifi.where()) + return context diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 0000000..b476d76 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py @@ -0,0 +1,39 @@ +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .http_proxy import HTTPProxy +from .interfaces import ConnectionInterface + +try: + from .http2 import HTTP2Connection +except ImportError: # pragma: nocover + + class HTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import SOCKSProxy +except ImportError: # pragma: nocover + + class SOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", +] diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5c90845 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc new file mode 100644 index 0000000..e83f61b Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc new file mode 100644 index 0000000..de8acd1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc new file mode 100644 index 0000000..d08a6e0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc new file mode 100644 index 0000000..8a2721b Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc new file mode 100644 index 0000000..98ab636 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 0000000..10a9a59 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc new file mode 100644 index 0000000..61da5a5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py b/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py new file mode 100644 index 0000000..363f8be --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import itertools +import logging +import ssl +import types +import typing + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class HTTPConnection(ConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connection: ConnectionInterface | None = None + self._connect_failed: bool = False + self._request_lock = Lock() + self._socket_options = socket_options + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + with self._request_lock: + if self._connection is None: + stream = self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return self._connection.handle_request(request) + + def _connect(self, request: Request) -> NetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + with Trace("retry", logger, request, kwargs) as trace: + self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def close(self) -> None: + if self._connection is not None: + with Trace("close", logger, None, {}): + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTPConnection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py b/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 0000000..9ccfa53 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import ssl +import sys +import types +import typing + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Proxy, Request, Response +from .._synchronization import Event, ShieldCancellation, ThreadLock +from .connection import HTTPConnection +from .interfaces import ConnectionInterface, RequestInterface + + +class PoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: ConnectionInterface | None = None + self._connection_acquired = Event() + + def assign_to_connection(self, connection: ConnectionInterface | None) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = Event() + + def wait_for_connection( + self, timeout: float | None = None + ) -> ConnectionInterface: + if self.connection is None: + self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class ConnectionPool(RequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: ssl.SSLContext | None = None, + proxy: Proxy | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + self._proxy = proxy + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: list[ConnectionInterface] = [] + self._requests: list[PoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = ThreadLock() + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if self._proxy is not None: + if self._proxy.url.scheme in (b"socks5", b"socks5h"): + from .socks_proxy import Socks5Connection + + return Socks5Connection( + proxy_origin=self._proxy.url.origin, + proxy_auth=self._proxy.auth, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + elif origin.scheme == b"http": + from .http_proxy import ForwardHTTPConnection + + return ForwardHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + from .http_proxy import TunnelHTTPConnection + + return TunnelHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + return HTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> list[ConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + def handle_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = PoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = connection.handle_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, typing.Iterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> list[ConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + available_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if available_connections: + # log: "reusing existing connection" + connection = available_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + def _close_connections(self, closing: list[ConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with ShieldCancellation(): + for connection in closing: + connection.close() + + def close(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + self._close_connections(closing_connections) + + def __enter__(self) -> ConnectionPool: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: typing.Iterable[bytes], + pool_request: PoolRequest, + pool: ConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + try: + for part in self._stream: + yield part + except BaseException as exc: + self.close() + raise exc from None + + def close(self) -> None: + if not self._closed: + self._closed = True + with ShieldCancellation(): + if hasattr(self._stream, "close"): + self._stream.close() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + self._pool._close_connections(closing) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py new file mode 100644 index 0000000..ebd3a97 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import enum +import logging +import ssl +import time +import types +import typing + +import h11 + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = typing.Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP11Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: float | None = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None + self._state = HTTPConnectionState.NEW + self._state_lock = Lock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + self._send_request_headers(**kwargs) + with Trace("send_request_body", logger, request, kwargs) as trace: + self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = HTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with ShieldCancellation(): + with Trace("response_closed", logger, request) as trace: + self._response_closed() + raise exc + + # Sending the request... + + def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + self._send_event(event, timeout=timeout) + + def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, typing.Iterable) + for chunk in request.stream: + event = h11.Data(data=chunk) + self._send_event(event, timeout=timeout) + + self._send_event(h11.EndOfMessage(), timeout=timeout) + + def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + def _receive_response_headers( + self, request: Request + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + def _receive_response_body( + self, request: Request + ) -> typing.Iterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + def _receive_event( + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + def _response_closed(self) -> None: + with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + self.close() + + # Once the connection is no longer required... + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # The ConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTP11Connection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: HTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + with Trace("response_closed", logger, self._request): + self._connection._response_closed() + + +class HTTP11UpgradeStream(NetworkStream): + def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return self._stream.read(max_bytes, timeout) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + self._stream.write(buffer, timeout) + + def close(self) -> None: + self._stream.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + return self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> typing.Any: + return self._stream.get_extra_info(info) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py new file mode 100644 index 0000000..ddcc189 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,592 @@ +from __future__ import annotations + +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, Semaphore, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP2Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: float | None = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: float | None = None + self._request_count = 0 + self._init_lock = Lock() + self._state_lock = Lock() + self._read_lock = Lock() + self._write_lock = Lock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: dict[ + int, + list[ + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: h2.events.ConnectionTerminated | None = None + + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + with self._init_lock: + if not self._sent_connection_init: + try: + sci_kwargs = {"request": request} + with Trace( + "send_connection_init", logger, request, sci_kwargs + ): + self._send_connection_init(**sci_kwargs) + except BaseException as exc: + with ShieldCancellation(): + self.close() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = Semaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + self._max_streams_semaphore.acquire() + + self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + with Trace("send_request_headers", logger, request, kwargs): + self._send_request_headers(request=request, stream_id=stream_id) + with Trace("send_request_body", logger, request, kwargs): + self._send_request_body(request=request, stream_id=stream_id) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with ShieldCancellation(): + kwargs = {"stream_id": stream_id} + with Trace("response_closed", logger, request, kwargs): + self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + self._write_outgoing_data(request) + + # Sending the request... + + def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + self._write_outgoing_data(request) + + def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.Iterable) + for data in request.stream: + self._send_stream_data(request, stream_id, data) + self._send_end_stream(request, stream_id) + + def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + self._write_outgoing_data(request) + + def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + self._write_outgoing_data(request) + + # Receiving the response... + + def _receive_response( + self, request: Request, stream_id: int + ) -> tuple[int, list[tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + assert event.headers is not None + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.Iterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + assert event.flow_controlled_length is not None + assert event.data is not None + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + def _receive_stream_event( + self, request: Request, stream_id: int + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + def _receive_events( + self, request: Request, stream_id: int | None = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + with Trace( + "receive_remote_settings", logger, request + ) as trace: + self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + self._write_outgoing_data(request) + + def _receive_remote_settings_change( + self, event: h2.events.RemoteSettingsChanged + ) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + def _response_closed(self, stream_id: int) -> None: + self._max_streams_semaphore.release() + del self._events[stream_id] + with self._state_lock: + if self._connection_terminated and not self._events: + self.close() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + self.close() + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # Wrappers around network read/write operations... + + def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: list[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTP2Connection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: HTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + with Trace("response_closed", logger, self._request, kwargs): + self._connection._response_closed(stream_id=self._stream_id) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 0000000..ecca88f --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import base64 +import logging +import ssl +import typing + +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class HTTPProxy(ConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if origin.scheme == b"http": + return ForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return TunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class ForwardHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + ) -> None: + self._connection = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + def handle_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return self._connection.handle_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class TunnelHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._connection: ConnectionInterface = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = Lock() + self._connected = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = self._connection.handle_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + self._connection.close() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py b/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py new file mode 100644 index 0000000..e673d4c --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import contextlib +import typing + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class RequestInterface: + def request( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + response.read() + finally: + response.close() + return response + + @contextlib.contextmanager + def stream( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> typing.Iterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + yield response + finally: + response.close() + + def handle_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class ConnectionInterface(RequestInterface): + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py new file mode 100644 index 0000000..0ca96dd --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import logging +import ssl + +import socksio + +from .._backends.sync import SyncBackend +from .._backends.base import NetworkBackend, NetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +def _init_socks5_connection( + stream: NetworkStream, + *, + host: bytes, + port: int, + auth: tuple[bytes, bytes] | None = None, +) -> None: + conn = socksio.socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socksio.socks5.SOCKS5CommandRequest.from_address( + socksio.socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5Reply) + if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class SOCKSProxy(ConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: NetworkBackend | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: tuple[bytes, bytes] | None = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return Socks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class Socks5Connection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: NetworkBackend | None = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connect_lock = Lock() + self._connection: ConnectionInterface | None = None + self._connect_failed = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + if self._connection is not None: + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_synchronization.py b/venv/lib/python3.12/site-packages/httpcore/_synchronization.py new file mode 100644 index 0000000..2ecc9e9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_synchronization.py @@ -0,0 +1,318 @@ +from __future__ import annotations + +import threading +import types + +from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions + +# Our async synchronization primatives use either 'anyio' or 'trio' depending +# on if they're running under asyncio or trio. + +try: + import trio +except (ImportError, NotImplementedError): # pragma: nocover + trio = None # type: ignore + +try: + import anyio +except ImportError: # pragma: nocover + anyio = None # type: ignore + + +def current_async_library() -> str: + # Determine if we're running under trio or asyncio. + # See https://sniffio.readthedocs.io/en/latest/ + try: + import sniffio + except ImportError: # pragma: nocover + environment = "asyncio" + else: + environment = sniffio.current_async_library() + + if environment not in ("asyncio", "trio"): # pragma: nocover + raise RuntimeError("Running under an unsupported async environment.") + + if environment == "asyncio" and anyio is None: # pragma: nocover + raise RuntimeError( + "Running with asyncio requires installation of 'httpcore[asyncio]'." + ) + + if environment == "trio" and trio is None: # pragma: nocover + raise RuntimeError( + "Running with trio requires installation of 'httpcore[trio]'." + ) + + return environment + + +class AsyncLock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_lock = trio.Lock() + elif self._backend == "asyncio": + self._anyio_lock = anyio.Lock() + + async def __aenter__(self) -> AsyncLock: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_lock.acquire() + elif self._backend == "asyncio": + await self._anyio_lock.acquire() + + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self._backend == "trio": + self._trio_lock.release() + elif self._backend == "asyncio": + self._anyio_lock.release() + + +class AsyncThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __enter__(self) -> AsyncThreadLock: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + pass + + +class AsyncEvent: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_event = trio.Event() + elif self._backend == "asyncio": + self._anyio_event = anyio.Event() + + def set(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + self._trio_event.set() + elif self._backend == "asyncio": + self._anyio_event.set() + + async def wait(self, timeout: float | None = None) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} + timeout_or_inf = float("inf") if timeout is None else timeout + with map_exceptions(trio_exc_map): + with trio.fail_after(timeout_or_inf): + await self._trio_event.wait() + elif self._backend == "asyncio": + anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} + with map_exceptions(anyio_exc_map): + with anyio.fail_after(timeout): + await self._anyio_event.wait() + + +class AsyncSemaphore: + def __init__(self, bound: int) -> None: + self._bound = bound + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a semaphore with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_semaphore = trio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + elif self._backend == "asyncio": + self._anyio_semaphore = anyio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + + async def acquire(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_semaphore.acquire() + elif self._backend == "asyncio": + await self._anyio_semaphore.acquire() + + async def release(self) -> None: + if self._backend == "trio": + self._trio_semaphore.release() + elif self._backend == "asyncio": + self._anyio_semaphore.release() + + +class AsyncShieldCancellation: + # For certain portions of our codebase where we're dealing with + # closing connections during exception handling we want to shield + # the operation from being cancelled. + # + # with AsyncShieldCancellation(): + # ... # clean-up operations, shielded from cancellation. + + def __init__(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a shielded scope with the correct implementation. + """ + self._backend = current_async_library() + + if self._backend == "trio": + self._trio_shield = trio.CancelScope(shield=True) + elif self._backend == "asyncio": + self._anyio_shield = anyio.CancelScope(shield=True) + + def __enter__(self) -> AsyncShieldCancellation: + if self._backend == "trio": + self._trio_shield.__enter__() + elif self._backend == "asyncio": + self._anyio_shield.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self._backend == "trio": + self._trio_shield.__exit__(exc_type, exc_value, traceback) + elif self._backend == "asyncio": + self._anyio_shield.__exit__(exc_type, exc_value, traceback) + + +# Our thread-based synchronization primitives... + + +class Lock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> Lock: + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self._lock.release() + + +class ThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> ThreadLock: + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self._lock.release() + + +class Event: + def __init__(self) -> None: + self._event = threading.Event() + + def set(self) -> None: + self._event.set() + + def wait(self, timeout: float | None = None) -> None: + if timeout == float("inf"): # pragma: no cover + timeout = None + if not self._event.wait(timeout=timeout): + raise PoolTimeout() # pragma: nocover + + +class Semaphore: + def __init__(self, bound: int) -> None: + self._semaphore = threading.Semaphore(value=bound) + + def acquire(self) -> None: + self._semaphore.acquire() + + def release(self) -> None: + self._semaphore.release() + + +class ShieldCancellation: + # Thread-synchronous codebases don't support cancellation semantics. + # We have this class because we need to mirror the async and sync + # cases within our package, but it's just a no-op. + def __enter__(self) -> ShieldCancellation: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + pass diff --git a/venv/lib/python3.12/site-packages/httpcore/_trace.py b/venv/lib/python3.12/site-packages/httpcore/_trace.py new file mode 100644 index 0000000..5f1cd7c --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_trace.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +import inspect +import logging +import types +import typing + +from ._models import Request + + +class Trace: + def __init__( + self, + name: str, + logger: logging.Logger, + request: Request | None = None, + kwargs: dict[str, typing.Any] | None = None, + ) -> None: + self.name = name + self.logger = logger + self.trace_extension = ( + None if request is None else request.extensions.get("trace") + ) + self.debug = self.logger.isEnabledFor(logging.DEBUG) + self.kwargs = kwargs or {} + self.return_value: typing.Any = None + self.should_trace = self.debug or self.trace_extension is not None + self.prefix = self.logger.name.split(".")[-1] + + def trace(self, name: str, info: dict[str, typing.Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + ret = self.trace_extension(prefix_and_name, info) + if inspect.iscoroutine(ret): # pragma: no cover + raise TypeError( + "If you are using a synchronous interface, " + "the callback of the `trace` extension should " + "be a normal function instead of an asynchronous function." + ) + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + def __enter__(self) -> Trace: + if self.should_trace: + info = self.kwargs + self.trace(f"{self.name}.started", info) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + self.trace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + self.trace(f"{self.name}.failed", info) + + async def atrace(self, name: str, info: dict[str, typing.Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + coro = self.trace_extension(prefix_and_name, info) + if not inspect.iscoroutine(coro): # pragma: no cover + raise TypeError( + "If you're using an asynchronous interface, " + "the callback of the `trace` extension should " + "be an asynchronous function rather than a normal function." + ) + await coro + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + async def __aenter__(self) -> Trace: + if self.should_trace: + info = self.kwargs + await self.atrace(f"{self.name}.started", info) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + await self.atrace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + await self.atrace(f"{self.name}.failed", info) diff --git a/venv/lib/python3.12/site-packages/httpcore/_utils.py b/venv/lib/python3.12/site-packages/httpcore/_utils.py new file mode 100644 index 0000000..c44ff93 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_utils.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import select +import socket +import sys + + +def is_socket_readable(sock: socket.socket | None) -> bool: + """ + Return whether a socket, as identifed by its file descriptor, is readable. + "A socket is readable" means that the read buffer isn't empty, i.e. that calling + .recv() on it would immediately return some data. + """ + # NOTE: we want check for readability without actually attempting to read, because + # we don't want to block forever if it's not readable. + + # In the case that the socket no longer exists, or cannot return a file + # descriptor, we treat it as being readable, as if it the next read operation + # on it is ready to return the terminating `b""`. + sock_fd = None if sock is None else sock.fileno() + if sock_fd is None or sock_fd < 0: # pragma: nocover + return True + + # The implementation below was stolen from: + # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 + # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 + + # Use select.select on Windows, and when poll is unavailable and select.poll + # everywhere else. (E.g. When eventlet is in use. See #327) + if ( + sys.platform == "win32" or getattr(select, "poll", None) is None + ): # pragma: nocover + rready, _, _ = select.select([sock_fd], [], [], 0) + return bool(rready) + p = select.poll() + p.register(sock_fd, select.POLLIN) + return bool(p.poll(0)) diff --git a/venv/lib/python3.12/site-packages/httpcore/py.typed b/venv/lib/python3.12/site-packages/httpcore/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/METADATA new file mode 100644 index 0000000..6e7d27d --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/METADATA @@ -0,0 +1,130 @@ +Metadata-Version: 2.4 +Name: httptools +Version: 0.7.1 +Summary: A collection of framework independent HTTP protocol utils. +Author-email: Yury Selivanov +License-Expression: MIT +Project-URL: Homepage, https://github.com/MagicStack/httptools +Platform: macOS +Platform: POSIX +Platform: Windows +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3 +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Environment :: Web Environment +Classifier: Development Status :: 5 - Production/Stable +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: test +Dynamic: license-file +Dynamic: platform + +![Tests](https://github.com/MagicStack/httptools/workflows/Tests/badge.svg) + +httptools is a Python binding for the nodejs HTTP parser. + +The package is available on PyPI: `pip install httptools`. + + +# APIs + +httptools contains two classes `httptools.HttpRequestParser`, +`httptools.HttpResponseParser` (fulfilled through +[llhttp](https://github.com/nodejs/llhttp)) and a function for +parsing URLs `httptools.parse_url` (through +[http-parse](https://github.com/nodejs/http-parser) for now). +See unittests for examples. + + +```python + +class HttpRequestParser: + + def __init__(self, protocol): + """HttpRequestParser + + protocol -- a Python object with the following methods + (all optional): + + - on_message_begin() + - on_url(url: bytes) + - on_header(name: bytes, value: bytes) + - on_headers_complete() + - on_body(body: bytes) + - on_message_complete() + - on_chunk_header() + - on_chunk_complete() + - on_status(status: bytes) + """ + + def get_http_version(self) -> str: + """Return an HTTP protocol version.""" + + def should_keep_alive(self) -> bool: + """Return ``True`` if keep-alive mode is preferred.""" + + def should_upgrade(self) -> bool: + """Return ``True`` if the parsed request is a valid Upgrade request. + The method exposes a flag set just before on_headers_complete. + Calling this method earlier will only yield `False`. + """ + + def feed_data(self, data: bytes): + """Feed data to the parser. + + Will eventually trigger callbacks on the ``protocol`` + object. + + On HTTP upgrade, this method will raise an + ``HttpParserUpgrade`` exception, with its sole argument + set to the offset of the non-HTTP data in ``data``. + """ + + def get_method(self) -> bytes: + """Return HTTP request method (GET, HEAD, etc)""" + + +class HttpResponseParser: + + """Has all methods except ``get_method()`` that + HttpRequestParser has.""" + + def get_status_code(self) -> int: + """Return the status code of the HTTP response""" + + +def parse_url(url: bytes): + """Parse URL strings into a structured Python object. + + Returns an instance of ``httptools.URL`` class with the + following attributes: + + - schema: bytes + - host: bytes + - port: int + - path: bytes + - query: bytes + - fragment: bytes + - userinfo: bytes + """ +``` + + +# Development + +1. Clone this repository with + `git clone --recursive git@github.com:MagicStack/httptools.git` + +2. Create a virtual environment with Python 3: + `python3 -m venv envname` + +3. Activate the environment with `source envname/bin/activate` + +4. Run `make` and `make test`. + + +# License + +MIT. diff --git a/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/RECORD new file mode 100644 index 0000000..f04292f --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/RECORD @@ -0,0 +1,25 @@ +httptools-0.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httptools-0.7.1.dist-info/METADATA,sha256=KCA-E3UzuXwawAKyZOvig0wFWhivdukfugpZB6j_PCE,3472 +httptools-0.7.1.dist-info/RECORD,, +httptools-0.7.1.dist-info/WHEEL,sha256=mX4U4odf6w47aVjwZUmTYd1MF9BbrhVLKlaWSvZwHEk,186 +httptools-0.7.1.dist-info/licenses/LICENSE,sha256=9Fc-fLdnZ0X7W402-lSKqT45HPtoct2s1lEwxF6mqS0,1093 +httptools-0.7.1.dist-info/top_level.txt,sha256=APjJKTbZcj0OQ4fdgf2eTCk82nK1n2BFXOD7ky41MPY,10 +httptools/__init__.py,sha256=plt3MIbueJdco9Dy7zoH3ksLNeyirqWagat5rwRmAjo,147 +httptools/__pycache__/__init__.cpython-312.pyc,, +httptools/__pycache__/_version.cpython-312.pyc,, +httptools/_version.py,sha256=Yk1-X6eDU4hz1WslzAhZspK9gGNybULdq2TPINdaWwA,575 +httptools/parser/__init__.py,sha256=j6BrDPUt_nQDIeHnlAQzlR7mIihef0FKEoUsZFjXU-U,201 +httptools/parser/__pycache__/__init__.cpython-312.pyc,, +httptools/parser/__pycache__/errors.cpython-312.pyc,, +httptools/parser/__pycache__/protocol.cpython-312.pyc,, +httptools/parser/cparser.pxd,sha256=4qBxnma83Vz86Z9sOZRxjqYj20A-aLSWVGXZgTVLJqE,4977 +httptools/parser/errors.py,sha256=ZVrtN1smPIb_opQ2Ud3uCbGlNLMlECYM2-6S7r5LnHs,566 +httptools/parser/parser.cpython-312-x86_64-linux-gnu.so,sha256=ykVrTqSJkh92eBUCq6LI2cJrHf4UzjV510UdfUa6kTQ,1264464 +httptools/parser/parser.pyi,sha256=KuVXCsQrOEVhATJ760pCv6MnC_jFoSjtqws6Chqve9I,1861 +httptools/parser/parser.pyx,sha256=x0BUY9EzHNKCDaw-U8bkZ1MaKGtrOQ8iVCm1IuOtEQI,15140 +httptools/parser/protocol.py,sha256=lMU6OZfKi59cFYDM_yId7YFOu-zflHGeGl2dI3iFm1Y,542 +httptools/parser/python.pxd,sha256=zWCdGZh34fyQNt3BUHIUjPqY8a5sodRUkfdABxqYHgQ,138 +httptools/parser/url_cparser.pxd,sha256=X5dDI8A7T0l5HL_Czt0mTs0l_d2lXnUDHx1TN8LeiCM,779 +httptools/parser/url_parser.cpython-312-x86_64-linux-gnu.so,sha256=pVGqGBnyBpbJ8b5c_ki2vntWSUz0uOg-_uLLJ3IH9rA,483328 +httptools/parser/url_parser.pyi,sha256=JxsXm4dlIH-OK1dz6_h3Kle63eJzh_AyAPOixbqlRmc,565 +httptools/parser/url_parser.pyx,sha256=ZJVUZqrIDdhzVodA7tTtoFb570av-SczIyh2oAZXKzM,3758 diff --git a/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/WHEEL new file mode 100644 index 0000000..9921a02 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_5_x86_64 +Tag: cp312-cp312-manylinux1_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/licenses/LICENSE new file mode 100644 index 0000000..79a03ca --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2015 MagicStack Inc. http://magic.io + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/top_level.txt new file mode 100644 index 0000000..bef3b40 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools-0.7.1.dist-info/top_level.txt @@ -0,0 +1 @@ +httptools diff --git a/venv/lib/python3.12/site-packages/httptools/__init__.py b/venv/lib/python3.12/site-packages/httptools/__init__.py new file mode 100644 index 0000000..972053e --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/__init__.py @@ -0,0 +1,6 @@ +from . import parser +from .parser import * # NOQA + +from ._version import __version__ # NOQA + +__all__ = parser.__all__ + ('__version__',) # NOQA diff --git a/venv/lib/python3.12/site-packages/httptools/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httptools/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e985dbb Binary files /dev/null and b/venv/lib/python3.12/site-packages/httptools/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httptools/__pycache__/_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/httptools/__pycache__/_version.cpython-312.pyc new file mode 100644 index 0000000..4d196d6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httptools/__pycache__/_version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httptools/_version.py b/venv/lib/python3.12/site-packages/httptools/_version.py new file mode 100644 index 0000000..9dd51c1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/_version.py @@ -0,0 +1,13 @@ +# This file MUST NOT contain anything but the __version__ assignment. +# +# When making a release, change the value of __version__ +# to an appropriate value, and open a pull request against +# the correct branch (master if making a new feature release). +# The commit message MUST contain a properly formatted release +# log, and the commit must be signed. +# +# The release automation will: build and test the packages for the +# supported platforms, publish the packages on PyPI, merge the PR +# to the target branch, create a Git tag pointing to the commit. + +__version__ = '0.7.1' diff --git a/venv/lib/python3.12/site-packages/httptools/parser/__init__.py b/venv/lib/python3.12/site-packages/httptools/parser/__init__.py new file mode 100644 index 0000000..1d8df43 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/__init__.py @@ -0,0 +1,6 @@ +from .protocol import HTTPProtocol +from .parser import * # NoQA +from .errors import * # NoQA +from .url_parser import * # NoQA + +__all__ = parser.__all__ + errors.__all__ + url_parser.__all__ # NoQA diff --git a/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9e65430 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/errors.cpython-312.pyc b/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..e3ce8a1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/errors.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/protocol.cpython-312.pyc b/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/protocol.cpython-312.pyc new file mode 100644 index 0000000..4889eaf Binary files /dev/null and b/venv/lib/python3.12/site-packages/httptools/parser/__pycache__/protocol.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httptools/parser/cparser.pxd b/venv/lib/python3.12/site-packages/httptools/parser/cparser.pxd new file mode 100644 index 0000000..3281864 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/cparser.pxd @@ -0,0 +1,167 @@ +from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t + + +cdef extern from "llhttp.h": + struct llhttp__internal_s: + int32_t _index + void *_span_pos0 + void *_span_cb0 + int32_t error + const char *reason + const char *error_pos + void *data + void *_current + uint64_t content_length + uint8_t type + uint8_t method + uint8_t http_major + uint8_t http_minor + uint8_t header_state + uint16_t flags + uint8_t upgrade + uint16_t status_code + uint8_t finish + void *settings + ctypedef llhttp__internal_s llhttp__internal_t + ctypedef llhttp__internal_t llhttp_t + + ctypedef int (*llhttp_data_cb) (llhttp_t*, + const char *at, + size_t length) except -1 + + ctypedef int (*llhttp_cb) (llhttp_t*) except -1 + + struct llhttp_settings_s: + llhttp_cb on_message_begin + llhttp_data_cb on_url + llhttp_data_cb on_status + llhttp_data_cb on_header_field + llhttp_data_cb on_header_value + llhttp_cb on_headers_complete + llhttp_data_cb on_body + llhttp_cb on_message_complete + llhttp_cb on_chunk_header + llhttp_cb on_chunk_complete + ctypedef llhttp_settings_s llhttp_settings_t + + enum llhttp_type: + HTTP_BOTH, + HTTP_REQUEST, + HTTP_RESPONSE + ctypedef llhttp_type llhttp_type_t + + enum llhttp_errno: + HPE_OK, + HPE_INTERNAL, + HPE_STRICT, + HPE_LF_EXPECTED, + HPE_UNEXPECTED_CONTENT_LENGTH, + HPE_CLOSED_CONNECTION, + HPE_INVALID_METHOD, + HPE_INVALID_URL, + HPE_INVALID_CONSTANT, + HPE_INVALID_VERSION, + HPE_INVALID_HEADER_TOKEN, + HPE_INVALID_CONTENT_LENGTH, + HPE_INVALID_CHUNK_SIZE, + HPE_INVALID_STATUS, + HPE_INVALID_EOF_STATE, + HPE_INVALID_TRANSFER_ENCODING, + HPE_CB_MESSAGE_BEGIN, + HPE_CB_HEADERS_COMPLETE, + HPE_CB_MESSAGE_COMPLETE, + HPE_CB_CHUNK_HEADER, + HPE_CB_CHUNK_COMPLETE, + HPE_PAUSED, + HPE_PAUSED_UPGRADE, + HPE_USER + ctypedef llhttp_errno llhttp_errno_t + + enum llhttp_flags: + F_CONNECTION_KEEP_ALIVE, + F_CONNECTION_CLOSE, + F_CONNECTION_UPGRADE, + F_CHUNKED, + F_UPGRADE, + F_CONTENT_LENGTH, + F_SKIPBODY, + F_TRAILING, + F_LENIENT, + F_TRANSFER_ENCODING + ctypedef llhttp_flags llhttp_flags_t + + enum llhttp_method: + HTTP_DELETE, + HTTP_GET, + HTTP_HEAD, + HTTP_POST, + HTTP_PUT, + HTTP_CONNECT, + HTTP_OPTIONS, + HTTP_TRACE, + HTTP_COPY, + HTTP_LOCK, + HTTP_MKCOL, + HTTP_MOVE, + HTTP_PROPFIND, + HTTP_PROPPATCH, + HTTP_SEARCH, + HTTP_UNLOCK, + HTTP_BIND, + HTTP_REBIND, + HTTP_UNBIND, + HTTP_ACL, + HTTP_REPORT, + HTTP_MKACTIVITY, + HTTP_CHECKOUT, + HTTP_MERGE, + HTTP_MSEARCH, + HTTP_NOTIFY, + HTTP_SUBSCRIBE, + HTTP_UNSUBSCRIBE, + HTTP_PATCH, + HTTP_PURGE, + HTTP_MKCALENDAR, + HTTP_LINK, + HTTP_UNLINK, + HTTP_SOURCE, + HTTP_PRI, + HTTP_DESCRIBE, + HTTP_ANNOUNCE, + HTTP_SETUP, + HTTP_PLAY, + HTTP_PAUSE, + HTTP_TEARDOWN, + HTTP_GET_PARAMETER, + HTTP_SET_PARAMETER, + HTTP_REDIRECT, + HTTP_RECORD, + HTTP_FLUSH + ctypedef llhttp_method llhttp_method_t + + void llhttp_init(llhttp_t* parser, llhttp_type_t type, const llhttp_settings_t* settings) + + void llhttp_settings_init(llhttp_settings_t* settings) + + llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len) + + void llhttp_resume_after_upgrade(llhttp_t* parser) + + int llhttp_should_keep_alive(const llhttp_t* parser) + + const char* llhttp_get_error_pos(const llhttp_t* parser) + const char* llhttp_get_error_reason(const llhttp_t* parser) + const char* llhttp_method_name(llhttp_method_t method) + + void llhttp_set_error_reason(llhttp_t* parser, const char* reason); + + void llhttp_set_lenient_headers(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_chunked_length(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_keep_alive(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_transfer_encoding(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_version(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_data_after_close(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_optional_lf_after_cr(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_optional_crlf_after_chunk(llhttp_t* parser, bint enabled); + void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, bint enabled); diff --git a/venv/lib/python3.12/site-packages/httptools/parser/errors.py b/venv/lib/python3.12/site-packages/httptools/parser/errors.py new file mode 100644 index 0000000..bc24c46 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/errors.py @@ -0,0 +1,30 @@ +__all__ = ('HttpParserError', + 'HttpParserCallbackError', + 'HttpParserInvalidStatusError', + 'HttpParserInvalidMethodError', + 'HttpParserInvalidURLError', + 'HttpParserUpgrade') + + +class HttpParserError(Exception): + pass + + +class HttpParserCallbackError(HttpParserError): + pass + + +class HttpParserInvalidStatusError(HttpParserError): + pass + + +class HttpParserInvalidMethodError(HttpParserError): + pass + + +class HttpParserInvalidURLError(HttpParserError): + pass + + +class HttpParserUpgrade(Exception): + pass diff --git a/venv/lib/python3.12/site-packages/httptools/parser/parser.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/httptools/parser/parser.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..498d9a7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httptools/parser/parser.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/httptools/parser/parser.pyi b/venv/lib/python3.12/site-packages/httptools/parser/parser.pyi new file mode 100644 index 0000000..8a714bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/parser.pyi @@ -0,0 +1,57 @@ +from typing import Union, Any +from array import array +from .protocol import HTTPProtocol + +class HttpParser: + def __init__(self, protocol: Union[HTTPProtocol, Any]) -> None: + """ + protocol -- a Python object with the following methods + (all optional): + + - on_message_begin() + - on_url(url: bytes) + - on_header(name: bytes, value: bytes) + - on_headers_complete() + - on_body(body: bytes) + - on_message_complete() + - on_chunk_header() + - on_chunk_complete() + - on_status(status: bytes) + """ + + def get_http_version(self) -> str: + """Return an HTTP protocol version.""" + ... + + def should_keep_alive(self) -> bool: + """Return ``True`` if keep-alive mode is preferred.""" + ... + + def should_upgrade(self) -> bool: + """Return ``True`` if the parsed request is a valid Upgrade request. + The method exposes a flag set just before on_headers_complete. + Calling this method earlier will only yield `False`.""" + ... + + def feed_data(self, data: Union[bytes, bytearray, memoryview, array]) -> None: + """Feed data to the parser. + + Will eventually trigger callbacks on the ``protocol`` + object. + + On HTTP upgrade, this method will raise an + ``HttpParserUpgrade`` exception, with its sole argument + set to the offset of the non-HTTP data in ``data``. + """ + +class HttpRequestParser(HttpParser): + """Used for parsing http requests from the server's side""" + + def get_method(self) -> bytes: + """Return HTTP request method (GET, HEAD, etc)""" + +class HttpResponseParser(HttpParser): + """Used for parsing http requests from the client's side""" + + def get_status_code(self) -> int: + """Return the status code of the HTTP response""" diff --git a/venv/lib/python3.12/site-packages/httptools/parser/parser.pyx b/venv/lib/python3.12/site-packages/httptools/parser/parser.pyx new file mode 100644 index 0000000..2fa5026 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/parser.pyx @@ -0,0 +1,436 @@ +#cython: language_level=3 + +from __future__ import print_function +from typing import Optional + +from cpython.mem cimport PyMem_Malloc, PyMem_Free +from cpython cimport PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE, \ + Py_buffer, PyBytes_AsString + +from .python cimport PyMemoryView_Check, PyMemoryView_GET_BUFFER + + +from .errors import (HttpParserError, + HttpParserCallbackError, + HttpParserInvalidStatusError, + HttpParserInvalidMethodError, + HttpParserInvalidURLError, + HttpParserUpgrade) + +cimport cython +from . cimport cparser + + +__all__ = ('HttpRequestParser', 'HttpResponseParser') + + +@cython.internal +cdef class HttpParser: + + cdef: + cparser.llhttp_t* _cparser + cparser.llhttp_settings_t* _csettings + + bytes _current_header_name + bytes _current_header_value + + _proto_on_url, _proto_on_status, _proto_on_body, \ + _proto_on_header, _proto_on_headers_complete, \ + _proto_on_message_complete, _proto_on_chunk_header, \ + _proto_on_chunk_complete, _proto_on_message_begin + + object _last_error + + Py_buffer py_buf + + def __cinit__(self): + self._cparser = \ + PyMem_Malloc(sizeof(cparser.llhttp_t)) + if self._cparser is NULL: + raise MemoryError() + + self._csettings = \ + PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + if self._csettings is NULL: + raise MemoryError() + + def __dealloc__(self): + PyMem_Free(self._cparser) + PyMem_Free(self._csettings) + + cdef _init(self, protocol, cparser.llhttp_type_t mode): + cparser.llhttp_settings_init(self._csettings) + + cparser.llhttp_init(self._cparser, mode, self._csettings) + self._cparser.data = self + + self._current_header_name = None + self._current_header_value = None + + self._proto_on_header = getattr(protocol, 'on_header', None) + if self._proto_on_header is not None: + self._csettings.on_header_field = cb_on_header_field + self._csettings.on_header_value = cb_on_header_value + self._proto_on_headers_complete = getattr( + protocol, 'on_headers_complete', None) + self._csettings.on_headers_complete = cb_on_headers_complete + + self._proto_on_body = getattr(protocol, 'on_body', None) + if self._proto_on_body is not None: + self._csettings.on_body = cb_on_body + + self._proto_on_message_begin = getattr( + protocol, 'on_message_begin', None) + if self._proto_on_message_begin is not None: + self._csettings.on_message_begin = cb_on_message_begin + + self._proto_on_message_complete = getattr( + protocol, 'on_message_complete', None) + if self._proto_on_message_complete is not None: + self._csettings.on_message_complete = cb_on_message_complete + + self._proto_on_chunk_header = getattr( + protocol, 'on_chunk_header', None) + self._csettings.on_chunk_header = cb_on_chunk_header + + self._proto_on_chunk_complete = getattr( + protocol, 'on_chunk_complete', None) + self._csettings.on_chunk_complete = cb_on_chunk_complete + + self._last_error = None + + cdef _maybe_call_on_header(self): + if self._current_header_value is not None: + current_header_name = self._current_header_name + current_header_value = self._current_header_value + + self._current_header_name = self._current_header_value = None + + if self._proto_on_header is not None: + self._proto_on_header(current_header_name, + current_header_value) + + cdef _on_header_field(self, bytes field): + self._maybe_call_on_header() + if self._current_header_name is None: + self._current_header_name = field + else: + self._current_header_name += field + + cdef _on_header_value(self, bytes val): + if self._current_header_value is None: + self._current_header_value = val + else: + # This is unlikely, as mostly HTTP headers are one-line + self._current_header_value += val + + cdef _on_headers_complete(self): + self._maybe_call_on_header() + + if self._proto_on_headers_complete is not None: + self._proto_on_headers_complete() + + cdef _on_chunk_header(self): + if (self._current_header_value is not None or + self._current_header_name is not None): + raise HttpParserError('invalid headers state') + + if self._proto_on_chunk_header is not None: + self._proto_on_chunk_header() + + cdef _on_chunk_complete(self): + self._maybe_call_on_header() + + if self._proto_on_chunk_complete is not None: + self._proto_on_chunk_complete() + + ### Public API ### + + def set_dangerous_leniencies( + self, + lenient_headers: Optional[bool] = None, + lenient_chunked_length: Optional[bool] = None, + lenient_keep_alive: Optional[bool] = None, + lenient_transfer_encoding: Optional[bool] = None, + lenient_version: Optional[bool] = None, + lenient_data_after_close: Optional[bool] = None, + lenient_optional_lf_after_cr: Optional[bool] = None, + lenient_optional_cr_before_lf: Optional[bool] = None, + lenient_optional_crlf_after_chunk: Optional[bool] = None, + lenient_spaces_after_chunk_size: Optional[bool] = None, + ): + cdef cparser.llhttp_t* parser = self._cparser + if lenient_headers is not None: + cparser.llhttp_set_lenient_headers( + parser, lenient_headers) + if lenient_chunked_length is not None: + cparser.llhttp_set_lenient_chunked_length( + parser, lenient_chunked_length) + if lenient_keep_alive is not None: + cparser.llhttp_set_lenient_keep_alive( + parser, lenient_keep_alive) + if lenient_transfer_encoding is not None: + cparser.llhttp_set_lenient_transfer_encoding( + parser, lenient_transfer_encoding) + if lenient_version is not None: + cparser.llhttp_set_lenient_version( + parser, lenient_version) + if lenient_data_after_close is not None: + cparser.llhttp_set_lenient_data_after_close( + parser, lenient_data_after_close) + if lenient_optional_lf_after_cr is not None: + cparser.llhttp_set_lenient_optional_lf_after_cr( + parser, lenient_optional_lf_after_cr) + if lenient_optional_cr_before_lf is not None: + cparser.llhttp_set_lenient_optional_cr_before_lf( + parser, lenient_optional_cr_before_lf) + if lenient_optional_crlf_after_chunk is not None: + cparser.llhttp_set_lenient_optional_crlf_after_chunk( + parser, lenient_optional_crlf_after_chunk) + if lenient_spaces_after_chunk_size is not None: + cparser.llhttp_set_lenient_spaces_after_chunk_size( + parser, lenient_spaces_after_chunk_size) + + def get_http_version(self): + cdef cparser.llhttp_t* parser = self._cparser + return '{}.{}'.format(parser.http_major, parser.http_minor) + + def should_keep_alive(self): + return bool(cparser.llhttp_should_keep_alive(self._cparser)) + + def should_upgrade(self): + cdef cparser.llhttp_t* parser = self._cparser + return bool(parser.upgrade) + + def feed_data(self, data): + cdef: + size_t data_len + cparser.llhttp_errno_t err + Py_buffer *buf + bint owning_buf = False + const char* err_pos + + if PyMemoryView_Check(data): + buf = PyMemoryView_GET_BUFFER(data) + data_len = buf.len + err = cparser.llhttp_execute( + self._cparser, + buf.buf, + data_len) + + else: + buf = &self.py_buf + PyObject_GetBuffer(data, buf, PyBUF_SIMPLE) + owning_buf = True + data_len = buf.len + + err = cparser.llhttp_execute( + self._cparser, + buf.buf, + data_len) + + try: + if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE: + err_pos = cparser.llhttp_get_error_pos(self._cparser) + + # Immediately free the parser from "error" state, simulating + # http-parser behavior here because 1) we never had the API to + # allow users manually "resume after upgrade", and 2) the use + # case for resuming parsing is very rare. + cparser.llhttp_resume_after_upgrade(self._cparser) + + # The err_pos here is specific for the input buf. So if we ever + # switch to the llhttp behavior (re-raise HttpParserUpgrade for + # successive calls to feed_data() until resume_after_upgrade is + # called), we have to store the result and keep our own state. + raise HttpParserUpgrade(err_pos - buf.buf) + finally: + if owning_buf: + PyBuffer_Release(buf) + + if err != cparser.HPE_OK: + ex = parser_error_from_errno( + self._cparser, + self._cparser.error) + if isinstance(ex, HttpParserCallbackError): + if self._last_error is not None: + ex.__context__ = self._last_error + self._last_error = None + raise ex + + +cdef class HttpRequestParser(HttpParser): + + def __init__(self, protocol): + self._init(protocol, cparser.HTTP_REQUEST) + + self._proto_on_url = getattr(protocol, 'on_url', None) + if self._proto_on_url is not None: + self._csettings.on_url = cb_on_url + + def get_method(self): + cdef cparser.llhttp_t* parser = self._cparser + return cparser.llhttp_method_name( parser.method) + + +cdef class HttpResponseParser(HttpParser): + + def __init__(self, protocol): + self._init(protocol, cparser.HTTP_RESPONSE) + + self._proto_on_status = getattr(protocol, 'on_status', None) + if self._proto_on_status is not None: + self._csettings.on_status = cb_on_status + + def get_status_code(self): + cdef cparser.llhttp_t* parser = self._cparser + return parser.status_code + + +cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._proto_on_message_begin() + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_url(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._proto_on_url(at[:length]) + except BaseException as ex: + cparser.llhttp_set_error_reason(parser, "`on_url` callback error") + pyparser._last_error = ex + return cparser.HPE_USER + else: + return 0 + + +cdef int cb_on_status(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._proto_on_status(at[:length]) + except BaseException as ex: + cparser.llhttp_set_error_reason(parser, "`on_status` callback error") + pyparser._last_error = ex + return cparser.HPE_USER + else: + return 0 + + +cdef int cb_on_header_field(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_header_field(at[:length]) + except BaseException as ex: + cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error") + pyparser._last_error = ex + return cparser.HPE_USER + else: + return 0 + + +cdef int cb_on_header_value(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_header_value(at[:length]) + except BaseException as ex: + cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error") + pyparser._last_error = ex + return cparser.HPE_USER + else: + return 0 + + +cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_headers_complete() + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + if pyparser._cparser.upgrade: + return 1 + else: + return 0 + + +cdef int cb_on_body(cparser.llhttp_t* parser, + const char *at, size_t length) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._proto_on_body(at[:length]) + except BaseException as ex: + cparser.llhttp_set_error_reason(parser, "`on_body` callback error") + pyparser._last_error = ex + return cparser.HPE_USER + else: + return 0 + + +cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._proto_on_message_complete() + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_header() + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + cdef HttpParser pyparser = parser.data + try: + pyparser._on_chunk_complete() + except BaseException as ex: + pyparser._last_error = ex + return -1 + else: + return 0 + + +cdef parser_error_from_errno(cparser.llhttp_t* parser, cparser.llhttp_errno_t errno): + cdef bytes reason = cparser.llhttp_get_error_reason(parser) + + if errno in (cparser.HPE_CB_MESSAGE_BEGIN, + cparser.HPE_CB_HEADERS_COMPLETE, + cparser.HPE_CB_MESSAGE_COMPLETE, + cparser.HPE_CB_CHUNK_HEADER, + cparser.HPE_CB_CHUNK_COMPLETE, + cparser.HPE_USER): + cls = HttpParserCallbackError + + elif errno == cparser.HPE_INVALID_STATUS: + cls = HttpParserInvalidStatusError + + elif errno == cparser.HPE_INVALID_METHOD: + cls = HttpParserInvalidMethodError + + elif errno == cparser.HPE_INVALID_URL: + cls = HttpParserInvalidURLError + + else: + cls = HttpParserError + + return cls(reason.decode('latin-1')) diff --git a/venv/lib/python3.12/site-packages/httptools/parser/protocol.py b/venv/lib/python3.12/site-packages/httptools/parser/protocol.py new file mode 100644 index 0000000..c3b4234 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/protocol.py @@ -0,0 +1,15 @@ +from typing import Protocol + + +class HTTPProtocol(Protocol): + """Used for providing static type-checking when parsing through the http protocol""" + + def on_message_begin() -> None: ... + def on_url(url: bytes) -> None: ... + def on_header(name: bytes, value: bytes) -> None: ... + def on_headers_complete() -> None: ... + def on_body(body: bytes) -> None: ... + def on_message_complete() -> None: ... + def on_chunk_header() -> None: ... + def on_chunk_complete() -> None: ... + def on_status(status: bytes) -> None: ... diff --git a/venv/lib/python3.12/site-packages/httptools/parser/python.pxd b/venv/lib/python3.12/site-packages/httptools/parser/python.pxd new file mode 100644 index 0000000..8e95925 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/python.pxd @@ -0,0 +1,6 @@ +cimport cpython + + +cdef extern from "Python.h": + cpython.Py_buffer* PyMemoryView_GET_BUFFER(object) + bint PyMemoryView_Check(object) diff --git a/venv/lib/python3.12/site-packages/httptools/parser/url_cparser.pxd b/venv/lib/python3.12/site-packages/httptools/parser/url_cparser.pxd new file mode 100644 index 0000000..ab9265a --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/url_cparser.pxd @@ -0,0 +1,31 @@ +from libc.stdint cimport uint16_t + + +cdef extern from "http_parser.h": + # URL Parser + + enum http_parser_url_fields: + UF_SCHEMA = 0, + UF_HOST = 1, + UF_PORT = 2, + UF_PATH = 3, + UF_QUERY = 4, + UF_FRAGMENT = 5, + UF_USERINFO = 6, + UF_MAX = 7 + + struct http_parser_url_field_data: + uint16_t off + uint16_t len + + struct http_parser_url: + uint16_t field_set + uint16_t port + http_parser_url_field_data[UF_MAX] field_data + + void http_parser_url_init(http_parser_url *u) + + int http_parser_parse_url(const char *buf, + size_t buflen, + int is_connect, + http_parser_url *u) diff --git a/venv/lib/python3.12/site-packages/httptools/parser/url_parser.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/httptools/parser/url_parser.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..eec598d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httptools/parser/url_parser.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/httptools/parser/url_parser.pyi b/venv/lib/python3.12/site-packages/httptools/parser/url_parser.pyi new file mode 100644 index 0000000..f3d3488 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/url_parser.pyi @@ -0,0 +1,27 @@ +from typing import Union +from array import array + +class URL: + schema: bytes + host: bytes + port: int + path: bytes + query: bytes + fragment: bytes + userinfo: bytes + +def parse_url(url: Union[bytes, bytearray, memoryview, array]) -> URL: + """Parse URL strings into a structured Python object. + + Returns an instance of ``httptools.URL`` class with the + following attributes: + + - schema: bytes + - host: bytes + - port: int + - path: bytes + - query: bytes + - fragment: bytes + - userinfo: bytes + """ + ... diff --git a/venv/lib/python3.12/site-packages/httptools/parser/url_parser.pyx b/venv/lib/python3.12/site-packages/httptools/parser/url_parser.pyx new file mode 100644 index 0000000..49908f3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httptools/parser/url_parser.pyx @@ -0,0 +1,108 @@ +#cython: language_level=3 + +from __future__ import print_function +from cpython.mem cimport PyMem_Malloc, PyMem_Free +from cpython cimport PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE, \ + Py_buffer + +from .errors import HttpParserInvalidURLError + +cimport cython +from . cimport url_cparser as uparser + +__all__ = ('parse_url',) + +@cython.freelist(250) +cdef class URL: + cdef readonly bytes schema + cdef readonly bytes host + cdef readonly object port + cdef readonly bytes path + cdef readonly bytes query + cdef readonly bytes fragment + cdef readonly bytes userinfo + + def __cinit__(self, bytes schema, bytes host, object port, bytes path, + bytes query, bytes fragment, bytes userinfo): + + self.schema = schema + self.host = host + self.port = port + self.path = path + self.query = query + self.fragment = fragment + self.userinfo = userinfo + + def __repr__(self): + return ('' + .format(self.schema, self.host, self.port, self.path, + self.query, self.fragment, self.userinfo)) + + +def parse_url(url): + cdef: + Py_buffer py_buf + char* buf_data + uparser.http_parser_url* parsed + int res + bytes schema = None + bytes host = None + object port = None + bytes path = None + bytes query = None + bytes fragment = None + bytes userinfo = None + object result = None + int off + int ln + + parsed = \ + PyMem_Malloc(sizeof(uparser.http_parser_url)) + uparser.http_parser_url_init(parsed) + + PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) + try: + buf_data = py_buf.buf + res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed) + + if res == 0: + if parsed.field_set & (1 << uparser.UF_SCHEMA): + off = parsed.field_data[uparser.UF_SCHEMA].off + ln = parsed.field_data[uparser.UF_SCHEMA].len + schema = buf_data[off:off+ln] + + if parsed.field_set & (1 << uparser.UF_HOST): + off = parsed.field_data[uparser.UF_HOST].off + ln = parsed.field_data[uparser.UF_HOST].len + host = buf_data[off:off+ln] + + if parsed.field_set & (1 << uparser.UF_PORT): + port = parsed.port + + if parsed.field_set & (1 << uparser.UF_PATH): + off = parsed.field_data[uparser.UF_PATH].off + ln = parsed.field_data[uparser.UF_PATH].len + path = buf_data[off:off+ln] + + if parsed.field_set & (1 << uparser.UF_QUERY): + off = parsed.field_data[uparser.UF_QUERY].off + ln = parsed.field_data[uparser.UF_QUERY].len + query = buf_data[off:off+ln] + + if parsed.field_set & (1 << uparser.UF_FRAGMENT): + off = parsed.field_data[uparser.UF_FRAGMENT].off + ln = parsed.field_data[uparser.UF_FRAGMENT].len + fragment = buf_data[off:off+ln] + + if parsed.field_set & (1 << uparser.UF_USERINFO): + off = parsed.field_data[uparser.UF_USERINFO].off + ln = parsed.field_data[uparser.UF_USERINFO].len + userinfo = buf_data[off:off+ln] + + return URL(schema, host, port, path, query, fragment, userinfo) + else: + raise HttpParserInvalidURLError("invalid url {!r}".format(url)) + finally: + PyBuffer_Release(&py_buf) + PyMem_Free(parsed) diff --git a/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/METADATA new file mode 100644 index 0000000..ab2b707 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/METADATA @@ -0,0 +1,216 @@ +Metadata-Version: 2.1 +Name: httpx +Version: 0.26.0 +Summary: The next generation HTTP client. +Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md +Project-URL: Documentation, https://www.python-httpx.org +Project-URL: Homepage, https://github.com/encode/httpx +Project-URL: Source, https://github.com/encode/httpx +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: anyio +Requires-Dist: certifi +Requires-Dist: httpcore==1.* +Requires-Dist: idna +Requires-Dist: sniffio +Provides-Extra: brotli +Requires-Dist: brotli; (platform_python_implementation == 'CPython') and extra == 'brotli' +Requires-Dist: brotlicffi; (platform_python_implementation != 'CPython') and extra == 'brotli' +Provides-Extra: cli +Requires-Dist: click==8.*; extra == 'cli' +Requires-Dist: pygments==2.*; extra == 'cli' +Requires-Dist: rich<14,>=10; extra == 'cli' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Description-Content-Type: text/markdown + +

    + HTTPX +

    + +

    HTTPX - A next-generation HTTP client for Python.

    + +

    + + Test Suite + + + Package version + +

    + +HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated +command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync +and async APIs**. + +--- + +Install HTTPX using pip: + +```shell +$ pip install httpx +``` + +Now, let's get started: + +```pycon +>>> import httpx +>>> r = httpx.get('https://www.example.org/') +>>> r + +>>> r.status_code +200 +>>> r.headers['content-type'] +'text/html; charset=UTF-8' +>>> r.text +'\n\n\nExample Domain...' +``` + +Or, using the command-line client. + +```shell +$ pip install 'httpx[cli]' # The command line client is an optional dependency. +``` + +Which now allows us to use HTTPX directly from the command-line... + +

    + httpx --help +

    + +Sending a request... + +

    + httpx http://httpbin.org/json +

    + +## Features + +HTTPX builds on the well-established usability of `requests`, and gives you: + +* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). +* An integrated command-line client. +* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). +* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). +* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/#calling-into-python-web-apps) or [ASGI applications](https://www.python-httpx.org/async/#calling-into-python-web-apps). +* Strict timeouts everywhere. +* Fully type annotated. +* 100% test coverage. + +Plus all the standard features of `requests`... + +* International Domains and URLs +* Keep-Alive & Connection Pooling +* Sessions with Cookie Persistence +* Browser-style SSL Verification +* Basic/Digest Authentication +* Elegant Key/Value Cookies +* Automatic Decompression +* Automatic Content Decoding +* Unicode Response Bodies +* Multipart File Uploads +* HTTP(S) Proxy Support +* Connection Timeouts +* Streaming Downloads +* .netrc Support +* Chunked Requests + +## Installation + +Install with pip: + +```shell +$ pip install httpx +``` + +Or, to include the optional HTTP/2 support, use: + +```shell +$ pip install httpx[http2] +``` + +HTTPX requires Python 3.8+. + +## Documentation + +Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). + +For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). + +For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. + +The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. + +To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). + +## Contribute + +If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. + +## Dependencies + +The HTTPX project relies on these excellent libraries: + +* `httpcore` - The underlying transport implementation for `httpx`. + * `h11` - HTTP/1.1 support. +* `certifi` - SSL certificates. +* `idna` - Internationalized domain name support. +* `sniffio` - Async library autodetection. + +As well as these optional installs: + +* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* +* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* +* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* +* `click` - Command line client support. *(Optional, with `httpx[cli]`)* +* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* + +A huge amount of credit is due to `requests` for the API layout that +much of this work follows, as well as to `urllib3` for plenty of design +inspiration around the lower-level networking details. + +--- + +

    HTTPX is BSD licensed code.
    Designed & crafted with care.

    — 🦋 —

    + +## Release Information + +### Added + +* The `proxy` argument was added. You should use the `proxy` argument instead of the deprecated `proxies`, or use `mounts=` for more complex configurations. (#2879) + +### Deprecated + +* The `proxies` argument is now deprecated. It will still continue to work, but it will be removed in the future. (#2879) + +### Fixed + +* Fix cases of double escaping of URL path components. Allow / as a safe character in the query portion. (#2990) +* Handle `NO_PROXY` envvar cases when a fully qualified URL is supplied as the value. (#2741) +* Allow URLs where username or password contains unescaped '@'. (#2986) +* Ensure ASGI `raw_path` does not include URL query component. (#2999) +* Ensure `Response.iter_text()` cannot yield empty strings. (#2998) + + +--- + +[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/RECORD new file mode 100644 index 0000000..3dba9f6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/RECORD @@ -0,0 +1,57 @@ +../../../bin/httpx,sha256=mVkYo6Iyp7BQpWU_XtvWDhHeghgvKO91kPJMJdz991k,196 +httpx-0.26.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpx-0.26.0.dist-info/METADATA,sha256=Yjx1aywkmeqZ4B3Qp3qRML5utB4-TLCqNJvVkMvdIXo,7623 +httpx-0.26.0.dist-info/RECORD,, +httpx-0.26.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx-0.26.0.dist-info/WHEEL,sha256=mRYSEL3Ih6g5a_CVMIcwiF__0Ae4_gLYh01YFNwiq1k,87 +httpx-0.26.0.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 +httpx-0.26.0.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 +httpx/__init__.py,sha256=oCxVAsePEy5DE9eLhGAAq9H3RBGZUDaUROtGEyzbBRo,3210 +httpx/__pycache__/__init__.cpython-312.pyc,, +httpx/__pycache__/__version__.cpython-312.pyc,, +httpx/__pycache__/_api.cpython-312.pyc,, +httpx/__pycache__/_auth.cpython-312.pyc,, +httpx/__pycache__/_client.cpython-312.pyc,, +httpx/__pycache__/_compat.cpython-312.pyc,, +httpx/__pycache__/_config.cpython-312.pyc,, +httpx/__pycache__/_content.cpython-312.pyc,, +httpx/__pycache__/_decoders.cpython-312.pyc,, +httpx/__pycache__/_exceptions.cpython-312.pyc,, +httpx/__pycache__/_main.cpython-312.pyc,, +httpx/__pycache__/_models.cpython-312.pyc,, +httpx/__pycache__/_multipart.cpython-312.pyc,, +httpx/__pycache__/_status_codes.cpython-312.pyc,, +httpx/__pycache__/_types.cpython-312.pyc,, +httpx/__pycache__/_urlparse.cpython-312.pyc,, +httpx/__pycache__/_urls.cpython-312.pyc,, +httpx/__pycache__/_utils.cpython-312.pyc,, +httpx/__version__.py,sha256=-YQNBeT22C_ms9hS7Mi_iS0TewVSathpOoSBZY1omXU,108 +httpx/_api.py,sha256=Z_SNA7D3J66ZJGZAwDi0Hqn9YBB7JVLnraoNNpxUlfI,13722 +httpx/_auth.py,sha256=AfE6FgSxusanhWxs767QcIQ007zrcag0Zw3-7EFTdVw,12029 +httpx/_client.py,sha256=0IY3ZgB0n-DQmImIKAEcaTE0ATpFqW1GFQtmP9B-QQI,69273 +httpx/_compat.py,sha256=rJERfjHkRvvHFVfltbHyCVcAboNsfEeN6j_00Z2C4k8,1563 +httpx/_config.py,sha256=UdYiWEibNk4F93nn0-wHRtrgmQu7tzTb1AphKWZ5wYQ,12366 +httpx/_content.py,sha256=F243o1UsgN6BiP5B9J44FfwwfAYbbQdI2p4ruawP81s,8109 +httpx/_decoders.py,sha256=WAzI9jvalFzUlzWISrlMBKC8unja-DtnDqWJcLQOX6o,9937 +httpx/_exceptions.py,sha256=dHc3bdcNbD4Sc-neyTHPp1qn9rBdldR-cJUCTO-G2DA,7958 +httpx/_main.py,sha256=9X9CXI16ChsedePBwjm7BHzLWpq1TPBuIs50BsQXyss,15783 +httpx/_models.py,sha256=NY4j0tLBJ_peQBobF12g4KFpc0Zok1G9X6o1Q3WLShk,42974 +httpx/_multipart.py,sha256=Rl-66BO8mNxciBpfuK7H1ZJNIjTaF704ATlKjGS4QNM,9005 +httpx/_status_codes.py,sha256=XKArMrSoo8oKBQCHdFGA-wsM2PcSTaHE8svDYOUcwWk,5584 +httpx/_transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpx/_transports/__pycache__/__init__.cpython-312.pyc,, +httpx/_transports/__pycache__/asgi.cpython-312.pyc,, +httpx/_transports/__pycache__/base.cpython-312.pyc,, +httpx/_transports/__pycache__/default.cpython-312.pyc,, +httpx/_transports/__pycache__/mock.cpython-312.pyc,, +httpx/_transports/__pycache__/wsgi.cpython-312.pyc,, +httpx/_transports/asgi.py,sha256=wnP48krkIkxJR9k0d0mdv4AD3RGnutAIyOFEyOk-qKg,5484 +httpx/_transports/base.py,sha256=0BM8yZZEkdFT4tXXSm0h0dK0cSYA4hLgInj_BljGEGw,2510 +httpx/_transports/default.py,sha256=Iq-URQ-YqHNtPsnRbETvbpzhtHiej7WAzWWHyV8_Bc8,13428 +httpx/_transports/mock.py,sha256=sDt3BDXbz8-W94kC8OXtGzF1PWH0y73h1De7Q-XkVtg,1179 +httpx/_transports/wsgi.py,sha256=Zt3EhTagyF3o-HC2oPMp-hTy3M3kQThL1ECJRc8eXEM,4797 +httpx/_types.py,sha256=lveH-nW6V3VLKeY-EffHVDkOxCe94Irg-ebo3LxQ13s,3391 +httpx/_urlparse.py,sha256=-sBdbF0kyfOtPSIRywoXsFUTtybBv-_l7uJJkcQs6pI,17826 +httpx/_urls.py,sha256=25aWsmj3cxE6dytuyD6AS7SgrGusdbX5bbZq0gi1_U4,21901 +httpx/_utils.py,sha256=n4XdckwhaDCIUvCSIjQ-T0DybCorMAu1VLL9jV8N-E0,14069 +httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/WHEEL new file mode 100644 index 0000000..2860816 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/entry_points.txt new file mode 100644 index 0000000..8ae9600 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +httpx = httpx:main diff --git a/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/licenses/LICENSE.md b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..ab79d16 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx-0.26.0.dist-info/licenses/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/httpx/__init__.py b/venv/lib/python3.12/site-packages/httpx/__init__.py new file mode 100644 index 0000000..f61112f --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/__init__.py @@ -0,0 +1,138 @@ +from .__version__ import __description__, __title__, __version__ +from ._api import delete, get, head, options, patch, post, put, request, stream +from ._auth import Auth, BasicAuth, DigestAuth, NetRCAuth +from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client +from ._config import Limits, Proxy, Timeout, create_ssl_context +from ._content import ByteStream +from ._exceptions import ( + CloseError, + ConnectError, + ConnectTimeout, + CookieConflict, + DecodingError, + HTTPError, + HTTPStatusError, + InvalidURL, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + RequestError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + StreamError, + TimeoutException, + TooManyRedirects, + TransportError, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.mock import MockTransport +from ._transports.wsgi import WSGITransport +from ._types import AsyncByteStream, SyncByteStream +from ._urls import URL, QueryParams + +try: + from ._main import main +except ImportError: # pragma: no cover + + def main() -> None: # type: ignore + import sys + + print( + "The httpx command line client could not run because the required " + "dependencies were not installed.\nMake sure you've installed " + "everything with: pip install 'httpx[cli]'" + ) + sys.exit(1) + + +__all__ = [ + "__description__", + "__title__", + "__version__", + "ASGITransport", + "AsyncBaseTransport", + "AsyncByteStream", + "AsyncClient", + "AsyncHTTPTransport", + "Auth", + "BaseTransport", + "BasicAuth", + "ByteStream", + "Client", + "CloseError", + "codes", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "Cookies", + "create_ssl_context", + "DecodingError", + "delete", + "DigestAuth", + "get", + "head", + "Headers", + "HTTPError", + "HTTPStatusError", + "HTTPTransport", + "InvalidURL", + "Limits", + "LocalProtocolError", + "main", + "MockTransport", + "NetRCAuth", + "NetworkError", + "options", + "patch", + "PoolTimeout", + "post", + "ProtocolError", + "Proxy", + "ProxyError", + "put", + "QueryParams", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "request", + "Request", + "RequestError", + "RequestNotRead", + "Response", + "ResponseNotRead", + "stream", + "StreamClosed", + "StreamConsumed", + "StreamError", + "SyncByteStream", + "Timeout", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "URL", + "USE_CLIENT_DEFAULT", + "WriteError", + "WriteTimeout", + "WSGITransport", +] + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..caa6e0f Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc new file mode 100644 index 0000000..c9a953b Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc new file mode 100644 index 0000000..c9cf2c6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc new file mode 100644 index 0000000..e8e8876 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc new file mode 100644 index 0000000..b9268dc Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..24a8d70 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc new file mode 100644 index 0000000..949e8de Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc new file mode 100644 index 0000000..dac40d4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc new file mode 100644 index 0000000..a4f37fa Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000..2157eb4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc new file mode 100644 index 0000000..fa1431d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc new file mode 100644 index 0000000..33ec27c Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc new file mode 100644 index 0000000..caa685b Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc new file mode 100644 index 0000000..08e48be Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc new file mode 100644 index 0000000..3dda7f3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc new file mode 100644 index 0000000..68f28d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc new file mode 100644 index 0000000..42da4e9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 0000000..4c3fca4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__version__.py b/venv/lib/python3.12/site-packages/httpx/__version__.py new file mode 100644 index 0000000..3edc842 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/__version__.py @@ -0,0 +1,3 @@ +__title__ = "httpx" +__description__ = "A next generation HTTP client, for Python 3." +__version__ = "0.26.0" diff --git a/venv/lib/python3.12/site-packages/httpx/_api.py b/venv/lib/python3.12/site-packages/httpx/_api.py new file mode 100644 index 0000000..c7af947 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_api.py @@ -0,0 +1,465 @@ +import typing +from contextlib import contextmanager + +from ._client import Client +from ._config import DEFAULT_TIMEOUT_CONFIG +from ._models import Response +from ._types import ( + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, + URLTypes, + VerifyTypes, +) + + +def request( + method: str, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + trust_env: bool = True, +) -> Response: + """ + Sends an HTTP request. + + **Parameters:** + + * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, + `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. + * **url** - URL for the new `Request` object. + * **params** - *(optional)* Query parameters to include in the URL, as a + string, dictionary, or sequence of two-tuples. + * **content** - *(optional)* Binary content to include in the body of the + request, as bytes or a byte iterator. + * **data** - *(optional)* Form data to include in the body of the request, + as a dictionary. + * **files** - *(optional)* A dictionary of upload files to include in the + body of the request. + * **json** - *(optional)* A JSON serializable object to include in the body + of the request. + * **headers** - *(optional)* Dictionary of HTTP headers to include in the + request. + * **cookies** - *(optional)* Dictionary of Cookie items to include in the + request. + * **auth** - *(optional)* An authentication class to use when sending the + request. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + the request. + * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + + **Returns:** `Response` + + Usage: + + ``` + >>> import httpx + >>> response = httpx.request('GET', 'https://httpbin.org/get') + >>> response + + ``` + """ + with Client( + cookies=cookies, + proxy=proxy, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + return client.request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) + + +@contextmanager +def stream( + method: str, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + trust_env: bool = True, +) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + with Client( + cookies=cookies, + proxy=proxy, + proxies=proxies, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + with client.stream( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) as response: + yield response + + +def get( + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `GET` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `GET` requests should not include a request body. + """ + return request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def options( + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `OPTIONS` requests should not include a request body. + """ + return request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def head( + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `HEAD` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `HEAD` requests should not include a request body. + """ + return request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def post( + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def put( + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def patch( + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def delete( + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Optional[AuthTypes] = None, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + follow_redirects: bool = False, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `DELETE` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `DELETE` requests should not include a request body. + """ + return request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + proxies=proxies, + follow_redirects=follow_redirects, + cert=cert, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) diff --git a/venv/lib/python3.12/site-packages/httpx/_auth.py b/venv/lib/python3.12/site-packages/httpx/_auth.py new file mode 100644 index 0000000..6613250 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_auth.py @@ -0,0 +1,352 @@ +import hashlib +import os +import re +import time +import typing +from base64 import b64encode +from urllib.request import parse_http_list + +from ._exceptions import ProtocolError +from ._models import Cookies, Request, Response +from ._utils import to_bytes, to_str, unquote + +if typing.TYPE_CHECKING: # pragma: no cover + from hashlib import _Hash + + +class Auth: + """ + Base class for all authentication schemes. + + To implement a custom authentication scheme, subclass `Auth` and override + the `.auth_flow()` method. + + If the authentication scheme does I/O such as disk access or network calls, or uses + synchronization primitives such as locks, you should override `.sync_auth_flow()` + and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized + implementations that will be used by `Client` and `AsyncClient` respectively. + """ + + requires_request_body = False + requires_response_body = False + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow. + + To dispatch a request, `yield` it: + + ``` + yield request + ``` + + The client will `.send()` the response back into the flow generator. You can + access it like so: + + ``` + response = yield request + ``` + + A `return` (or reaching the end of the generator) will result in the + client returning the last response obtained from the server. + + You can dispatch as many requests as is necessary. + """ + yield request + + def sync_auth_flow( + self, request: Request + ) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow synchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + request.read() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + response.read() + + try: + request = flow.send(response) + except StopIteration: + break + + async def async_auth_flow( + self, request: Request + ) -> typing.AsyncGenerator[Request, Response]: + """ + Execute the authentication flow asynchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + await request.aread() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + await response.aread() + + try: + request = flow.send(response) + except StopIteration: + break + + +class FunctionAuth(Auth): + """ + Allows the 'auth' argument to be passed as a simple callable function, + that takes the request, and returns a new, modified request. + """ + + def __init__(self, func: typing.Callable[[Request], Request]) -> None: + self._func = func + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + yield self._func(request) + + +class BasicAuth(Auth): + """ + Allows the 'auth' argument to be passed as a (username, password) pair, + and uses HTTP Basic authentication. + """ + + def __init__( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> None: + self._auth_header = self._build_auth_header(username, password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + request.headers["Authorization"] = self._auth_header + yield request + + def _build_auth_header( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class NetRCAuth(Auth): + """ + Use a 'netrc' file to lookup basic auth credentials based on the url host. + """ + + def __init__(self, file: typing.Optional[str] = None) -> None: + # Lazily import 'netrc'. + # There's no need for us to load this module unless 'NetRCAuth' is being used. + import netrc + + self._netrc_info = netrc.netrc(file) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + auth_info = self._netrc_info.authenticators(request.url.host) + if auth_info is None or not auth_info[2]: + # The netrc file did not have authentication credentials for this host. + yield request + else: + # Build a basic auth header with credentials from the netrc file. + request.headers["Authorization"] = self._build_auth_header( + username=auth_info[0], password=auth_info[2] + ) + yield request + + def _build_auth_header( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class DigestAuth(Auth): + _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable[[bytes], "_Hash"]] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, + } + + def __init__( + self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] + ) -> None: + self._username = to_bytes(username) + self._password = to_bytes(password) + self._last_challenge: typing.Optional[_DigestAuthChallenge] = None + self._nonce_count = 1 + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + if self._last_challenge: + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + + response = yield request + + if response.status_code != 401 or "www-authenticate" not in response.headers: + # If the response is not a 401 then we don't + # need to build an authenticated request. + return + + for auth_header in response.headers.get_list("www-authenticate"): + if auth_header.lower().startswith("digest "): + break + else: + # If the response does not include a 'WWW-Authenticate: Digest ...' + # header, then we don't need to build an authenticated request. + return + + self._last_challenge = self._parse_challenge(request, response, auth_header) + self._nonce_count = 1 + + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + if response.cookies: + Cookies(response.cookies).set_cookie_header(request=request) + yield request + + def _parse_challenge( + self, request: Request, response: Response, auth_header: str + ) -> "_DigestAuthChallenge": + """ + Returns a challenge from a Digest WWW-Authenticate header. + These take the form of: + `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` + """ + scheme, _, fields = auth_header.partition(" ") + + # This method should only ever have been called with a Digest auth header. + assert scheme.lower() == "digest" + + header_dict: typing.Dict[str, str] = {} + for field in parse_http_list(fields): + key, value = field.strip().split("=", 1) + header_dict[key] = unquote(value) + + try: + realm = header_dict["realm"].encode() + nonce = header_dict["nonce"].encode() + algorithm = header_dict.get("algorithm", "MD5") + opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None + qop = header_dict["qop"].encode() if "qop" in header_dict else None + return _DigestAuthChallenge( + realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop + ) + except KeyError as exc: + message = "Malformed Digest WWW-Authenticate header" + raise ProtocolError(message, request=request) from exc + + def _build_auth_header( + self, request: Request, challenge: "_DigestAuthChallenge" + ) -> str: + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] + + def digest(data: bytes) -> bytes: + return hash_func(data).hexdigest().encode() + + A1 = b":".join((self._username, challenge.realm, self._password)) + + path = request.url.raw_path + A2 = b":".join((request.method.encode(), path)) + # TODO: implement auth-int + HA2 = digest(A2) + + nc_value = b"%08x" % self._nonce_count + cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) + self._nonce_count += 1 + + HA1 = digest(A1) + if challenge.algorithm.lower().endswith("-sess"): + HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) + + qop = self._resolve_qop(challenge.qop, request=request) + if qop is None: + digest_data = [HA1, challenge.nonce, HA2] + else: + digest_data = [challenge.nonce, nc_value, cnonce, qop, HA2] + key_digest = b":".join(digest_data) + + format_args = { + "username": self._username, + "realm": challenge.realm, + "nonce": challenge.nonce, + "uri": path, + "response": digest(b":".join((HA1, key_digest))), + "algorithm": challenge.algorithm.encode(), + } + if challenge.opaque: + format_args["opaque"] = challenge.opaque + if qop: + format_args["qop"] = b"auth" + format_args["nc"] = nc_value + format_args["cnonce"] = cnonce + + return "Digest " + self._get_header_value(format_args) + + def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: + s = str(nonce_count).encode() + s += nonce + s += time.ctime().encode() + s += os.urandom(8) + + return hashlib.sha1(s).hexdigest()[:16].encode() + + def _get_header_value(self, header_fields: typing.Dict[str, bytes]) -> str: + NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") + QUOTED_TEMPLATE = '{}="{}"' + NON_QUOTED_TEMPLATE = "{}={}" + + header_value = "" + for i, (field, value) in enumerate(header_fields.items()): + if i > 0: + header_value += ", " + template = ( + QUOTED_TEMPLATE + if field not in NON_QUOTED_FIELDS + else NON_QUOTED_TEMPLATE + ) + header_value += template.format(field, to_str(value)) + + return header_value + + def _resolve_qop( + self, qop: typing.Optional[bytes], request: Request + ) -> typing.Optional[bytes]: + if qop is None: + return None + qops = re.split(b", ?", qop) + if b"auth" in qops: + return b"auth" + + if qops == [b"auth-int"]: + raise NotImplementedError("Digest auth-int support is not yet implemented") + + message = f'Unexpected qop value "{qop!r}" in digest auth' + raise ProtocolError(message, request=request) + + +class _DigestAuthChallenge(typing.NamedTuple): + realm: bytes + nonce: bytes + algorithm: str + opaque: typing.Optional[bytes] + qop: typing.Optional[bytes] diff --git a/venv/lib/python3.12/site-packages/httpx/_client.py b/venv/lib/python3.12/site-packages/httpx/_client.py new file mode 100644 index 0000000..2813a84 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_client.py @@ -0,0 +1,2037 @@ +import datetime +import enum +import logging +import typing +import warnings +from contextlib import asynccontextmanager, contextmanager +from types import TracebackType + +from .__version__ import __version__ +from ._auth import Auth, BasicAuth, FunctionAuth +from ._config import ( + DEFAULT_LIMITS, + DEFAULT_MAX_REDIRECTS, + DEFAULT_TIMEOUT_CONFIG, + Limits, + Proxy, + Timeout, +) +from ._decoders import SUPPORTED_DECODERS +from ._exceptions import ( + InvalidURL, + RemoteProtocolError, + TooManyRedirects, + request_context, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.asgi import ASGITransport +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._transports.wsgi import WSGITransport +from ._types import ( + AsyncByteStream, + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxiesTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + SyncByteStream, + TimeoutTypes, + URLTypes, + VerifyTypes, +) +from ._urls import URL, QueryParams +from ._utils import ( + Timer, + URLPattern, + get_environment_proxies, + is_https_redirect, + same_origin, +) + +# The type annotation for @classmethod and context managers here follows PEP 484 +# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods +T = typing.TypeVar("T", bound="Client") +U = typing.TypeVar("U", bound="AsyncClient") + + +class UseClientDefault: + """ + For some parameters such as `auth=...` and `timeout=...` we need to be able + to indicate the default "unset" state, in a way that is distinctly different + to using `None`. + + The default "unset" state indicates that whatever default is set on the + client should be used. This is different to setting `None`, which + explicitly disables the parameter, possibly overriding a client default. + + For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. + Omitting the `timeout` parameter will send a request using whatever default + timeout has been configured on the client. Including `timeout=None` will + ensure no timeout is used. + + Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, + but it is used internally when a parameter is not included. + """ + + +USE_CLIENT_DEFAULT = UseClientDefault() + + +logger = logging.getLogger("httpx") + +USER_AGENT = f"python-httpx/{__version__}" +ACCEPT_ENCODING = ", ".join( + [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] +) + + +class ClientState(enum.Enum): + # UNOPENED: + # The client has been instantiated, but has not been used to send a request, + # or been opened by entering the context of a `with` block. + UNOPENED = 1 + # OPENED: + # The client has either sent a request, or is within a `with` block. + OPENED = 2 + # CLOSED: + # The client has either exited the `with` block, or `close()` has + # been called explicitly. + CLOSED = 3 + + +class BoundSyncStream(SyncByteStream): + """ + A byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: SyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self._stream: + yield chunk + + def close(self) -> None: + seconds = self._timer.sync_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + self._stream.close() + + +class BoundAsyncStream(AsyncByteStream): + """ + An async byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: AsyncByteStream, response: Response, timer: Timer + ) -> None: + self._stream = stream + self._response = response + self._timer = timer + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async for chunk in self._stream: + yield chunk + + async def aclose(self) -> None: + seconds = await self._timer.async_elapsed() + self._response.elapsed = datetime.timedelta(seconds=seconds) + await self._stream.aclose() + + +EventHook = typing.Callable[..., typing.Any] + + +class BaseClient: + def __init__( + self, + *, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[EventHook]] + ] = None, + base_url: URLTypes = "", + trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + ) -> None: + event_hooks = {} if event_hooks is None else event_hooks + + self._base_url = self._enforce_trailing_slash(URL(base_url)) + + self._auth = self._build_auth(auth) + self._params = QueryParams(params) + self.headers = Headers(headers) + self._cookies = Cookies(cookies) + self._timeout = Timeout(timeout) + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + self._trust_env = trust_env + self._default_encoding = default_encoding + self._state = ClientState.UNOPENED + + @property + def is_closed(self) -> bool: + """ + Check if the client being closed + """ + return self._state == ClientState.CLOSED + + @property + def trust_env(self) -> bool: + return self._trust_env + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _get_proxy_map( + self, proxies: typing.Optional[ProxiesTypes], allow_env_proxies: bool + ) -> typing.Dict[str, typing.Optional[Proxy]]: + if proxies is None: + if allow_env_proxies: + return { + key: None if url is None else Proxy(url=url) + for key, url in get_environment_proxies().items() + } + return {} + if isinstance(proxies, dict): + new_proxies = {} + for key, value in proxies.items(): + proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value + new_proxies[str(key)] = proxy + return new_proxies + else: + proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies + return {"all://": proxy} + + @property + def timeout(self) -> Timeout: + return self._timeout + + @timeout.setter + def timeout(self, timeout: TimeoutTypes) -> None: + self._timeout = Timeout(timeout) + + @property + def event_hooks(self) -> typing.Dict[str, typing.List[EventHook]]: + return self._event_hooks + + @event_hooks.setter + def event_hooks( + self, event_hooks: typing.Dict[str, typing.List[EventHook]] + ) -> None: + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + + @property + def auth(self) -> typing.Optional[Auth]: + """ + Authentication class used when none is passed at the request-level. + + See also [Authentication][0]. + + [0]: /quickstart/#authentication + """ + return self._auth + + @auth.setter + def auth(self, auth: AuthTypes) -> None: + self._auth = self._build_auth(auth) + + @property + def base_url(self) -> URL: + """ + Base URL to use when sending requests with relative URLs. + """ + return self._base_url + + @base_url.setter + def base_url(self, url: URLTypes) -> None: + self._base_url = self._enforce_trailing_slash(URL(url)) + + @property + def headers(self) -> Headers: + """ + HTTP headers to include when sending requests. + """ + return self._headers + + @headers.setter + def headers(self, headers: HeaderTypes) -> None: + client_headers = Headers( + { + b"Accept": b"*/*", + b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), + b"Connection": b"keep-alive", + b"User-Agent": USER_AGENT.encode("ascii"), + } + ) + client_headers.update(headers) + self._headers = client_headers + + @property + def cookies(self) -> Cookies: + """ + Cookie values to include when sending requests. + """ + return self._cookies + + @cookies.setter + def cookies(self, cookies: CookieTypes) -> None: + self._cookies = Cookies(cookies) + + @property + def params(self) -> QueryParams: + """ + Query parameters to include in the URL when sending requests. + """ + return self._params + + @params.setter + def params(self, params: QueryParamTypes) -> None: + self._params = QueryParams(params) + + def build_request( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Request: + """ + Build and return a request instance. + + * The `params`, `headers` and `cookies` arguments + are merged with any values set on the client. + * The `url` argument is merged with any `base_url` set on the client. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + url = self._merge_url(url) + headers = self._merge_headers(headers) + cookies = self._merge_cookies(cookies) + params = self._merge_queryparams(params) + extensions = {} if extensions is None else extensions + if "timeout" not in extensions: + timeout = ( + self.timeout + if isinstance(timeout, UseClientDefault) + else Timeout(timeout) + ) + extensions = dict(**extensions, timeout=timeout.as_dict()) + return Request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + extensions=extensions, + ) + + def _merge_url(self, url: URLTypes) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + merge_url = URL(url) + if merge_url.is_relative_url: + # To merge URLs we always append to the base URL. To get this + # behaviour correct we always ensure the base URL ends in a '/' + # separator, and strip any leading '/' from the merge URL. + # + # So, eg... + # + # >>> client = Client(base_url="https://www.example.com/subpath") + # >>> client.base_url + # URL('https://www.example.com/subpath/') + # >>> client.build_request("GET", "/path").url + # URL('https://www.example.com/subpath/path') + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + return merge_url + + def _merge_cookies( + self, cookies: typing.Optional[CookieTypes] = None + ) -> typing.Optional[CookieTypes]: + """ + Merge a cookies argument together with any cookies on the client, + to create the cookies used for the outgoing request. + """ + if cookies or self.cookies: + merged_cookies = Cookies(self.cookies) + merged_cookies.update(cookies) + return merged_cookies + return cookies + + def _merge_headers( + self, headers: typing.Optional[HeaderTypes] = None + ) -> typing.Optional[HeaderTypes]: + """ + Merge a headers argument together with any headers on the client, + to create the headers used for the outgoing request. + """ + merged_headers = Headers(self.headers) + merged_headers.update(headers) + return merged_headers + + def _merge_queryparams( + self, params: typing.Optional[QueryParamTypes] = None + ) -> typing.Optional[QueryParamTypes]: + """ + Merge a queryparams argument together with any queryparams on the client, + to create the queryparams used for the outgoing request. + """ + if params or self.params: + merged_queryparams = QueryParams(self.params) + return merged_queryparams.merge(params) + return params + + def _build_auth(self, auth: typing.Optional[AuthTypes]) -> typing.Optional[Auth]: + if auth is None: + return None + elif isinstance(auth, tuple): + return BasicAuth(username=auth[0], password=auth[1]) + elif isinstance(auth, Auth): + return auth + elif callable(auth): + return FunctionAuth(func=auth) + else: + raise TypeError(f'Invalid "auth" argument: {auth!r}') + + def _build_request_auth( + self, + request: Request, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + ) -> Auth: + auth = ( + self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) + ) + + if auth is not None: + return auth + + username, password = request.url.username, request.url.password + if username or password: + return BasicAuth(username=username, password=password) + + return Auth() + + def _build_redirect_request(self, request: Request, response: Response) -> Request: + """ + Given a request and a redirect response, return a new request that + should be used to effect the redirect. + """ + method = self._redirect_method(request, response) + url = self._redirect_url(request, response) + headers = self._redirect_headers(request, url, method) + stream = self._redirect_stream(request, method) + cookies = Cookies(self.cookies) + return Request( + method=method, + url=url, + headers=headers, + cookies=cookies, + stream=stream, + extensions=request.extensions, + ) + + def _redirect_method(self, request: Request, response: Response) -> str: + """ + When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.SEE_OTHER and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # Turn 302s into GETs. + if response.status_code == codes.FOUND and method != "HEAD": + method = "GET" + + # If a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in 'requests' issue 1704. + if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": + method = "GET" + + return method + + def _redirect_url(self, request: Request, response: Response) -> URL: + """ + Return the URL for the redirect to follow. + """ + location = response.headers["Location"] + + try: + url = URL(location) + except InvalidURL as exc: + raise RemoteProtocolError( + f"Invalid URL in location header: {exc}.", request=request + ) from None + + # Handle malformed 'Location' headers that are "absolute" form, have no host. + # See: https://github.com/encode/httpx/issues/771 + if url.scheme and not url.host: + url = url.copy_with(host=request.url.host) + + # Facilitate relative 'Location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if url.is_relative_url: + url = request.url.join(url) + + # Attach previous fragment if needed (RFC 7231 7.1.2) + if request.url.fragment and not url.fragment: + url = url.copy_with(fragment=request.url.fragment) + + return url + + def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: + """ + Return the headers that should be used for the redirect request. + """ + headers = Headers(request.headers) + + if not same_origin(url, request.url): + if not is_https_redirect(request.url, url): + # Strip Authorization headers when responses are redirected + # away from the origin. (Except for direct HTTP to HTTPS redirects.) + headers.pop("Authorization", None) + + # Update the Host header. + headers["Host"] = url.netloc.decode("ascii") + + if method != request.method and method == "GET": + # If we've switch to a 'GET' request, then strip any headers which + # are only relevant to the request body. + headers.pop("Content-Length", None) + headers.pop("Transfer-Encoding", None) + + # We should use the client cookie store to determine any cookie header, + # rather than whatever was on the original outgoing request. + headers.pop("Cookie", None) + + return headers + + def _redirect_stream( + self, request: Request, method: str + ) -> typing.Optional[typing.Union[SyncByteStream, AsyncByteStream]]: + """ + Return the body that should be used for the redirect request. + """ + if method != request.method and method == "GET": + return None + + return request.stream + + +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + It can be shared between threads. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An WSGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + mounts: typing.Optional[ + typing.Mapping[str, typing.Optional[BaseTransport]] + ] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[EventHook]] + ] = None, + base_url: URLTypes = "", + transport: typing.Optional[BaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, + trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + if proxies: + message = ( + "The 'proxies' argument is now deprecated." + " Use 'proxy' or 'mounts' instead." + ) + warnings.warn(message, DeprecationWarning) + if proxy: + raise RuntimeError("Use either `proxy` or 'proxies', not both.") + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + self._mounts: typing.Dict[URLPattern, typing.Optional[BaseTransport]] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: typing.Optional[BaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, + trust_env: bool = True, + ) -> BaseTransport: + if transport is not None: + return transport + + if app is not None: + return WSGITransport(app=app) + + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> BaseTransport: + return HTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> BaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + def request( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = client.send(request, ...) + ``` + + See `Client.build_request()`, `Client.send()` and + [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + if cookies is not None: + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return self.send(request, auth=auth, follow_redirects=follow_redirects) + + @contextmanager + def stream( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + response.close() + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `Client.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + auth = self._build_request_auth(request, auth) + + response = self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + response.read() + + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: typing.List[Response], + ) -> Response: + auth_flow = auth.sync_auth_flow(request) + try: + request = next(auth_flow) + + while True: + response = self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + + response.history = list(history) + response.read() + request = next_request + history.append(response) + + except BaseException as exc: + response.close() + raise exc + finally: + auth_flow.close() + + def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: typing.List[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + hook(request) + + response = self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + hook(response) + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + response.read() + else: + response.next_request = request + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + timer.sync_start() + + if not isinstance(request.stream, SyncByteStream): + raise RuntimeError( + "Attempted to send an async request with a sync Client instance." + ) + + with request_context(request=request): + response = transport.handle_request(request) + + assert isinstance(response.stream, SyncByteStream) + + response.request = request + response.stream = BoundSyncStream( + response.stream, response=response, timer=timer + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + def get( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def options( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def head( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def post( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def put( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def patch( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def delete( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def close(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + self._transport.close() + for transport in self._mounts.values(): + if transport is not None: + transport.close() + + def __enter__(self: T) -> T: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + self._transport.__enter__() + for transport in self._mounts.values(): + if transport is not None: + transport.__enter__() + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + self._state = ClientState.CLOSED + + self._transport.__exit__(exc_type, exc_value, traceback) + for transport in self._mounts.values(): + if transport is not None: + transport.__exit__(exc_type, exc_value, traceback) + + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + Usage: + + ```python + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, an `ssl.SSLContext`, or `False` + (which will disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **app** - *(optional)* An ASGI application to send requests to, + rather than sending actual network requests. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: typing.Optional[AuthTypes] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + proxy: typing.Optional[ProxyTypes] = None, + proxies: typing.Optional[ProxiesTypes] = None, + mounts: typing.Optional[ + typing.Mapping[str, typing.Optional[AsyncBaseTransport]] + ] = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: typing.Optional[ + typing.Mapping[str, typing.List[typing.Callable[..., typing.Any]]] + ] = None, + base_url: URLTypes = "", + transport: typing.Optional[AsyncBaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, + trust_env: bool = True, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + if proxies: + message = ( + "The 'proxies' argument is now deprecated." + " Use 'proxy' or 'mounts' instead." + ) + warnings.warn(message, DeprecationWarning) + if proxy: + raise RuntimeError("Use either `proxy` or 'proxies', not both.") + + allow_env_proxies = trust_env and app is None and transport is None + proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + app=app, + trust_env=trust_env, + ) + + self._mounts: typing.Dict[URLPattern, typing.Optional[AsyncBaseTransport]] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: typing.Optional[AsyncBaseTransport] = None, + app: typing.Optional[typing.Callable[..., typing.Any]] = None, + trust_env: bool = True, + ) -> AsyncBaseTransport: + if transport is not None: + return transport + + if app is not None: + return ASGITransport(app=app) + + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http1=http1, + http2=http2, + limits=limits, + trust_env=trust_env, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + ) -> AsyncBaseTransport: + return AsyncHTTPTransport( + verify=verify, + cert=cert, + http2=http2, + limits=limits, + trust_env=trust_env, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> AsyncBaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + async def request( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = await client.send(request, ...) + ``` + + See `AsyncClient.build_request()`, `AsyncClient.send()` + and [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/#merging-of-configuration + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return await self.send(request, auth=auth, follow_redirects=follow_redirects) + + @asynccontextmanager + async def stream( + self, + method: str, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> typing.AsyncIterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = await self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + await response.aclose() + + async def send( + self, + request: Request, + *, + stream: bool = False, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `AsyncClient.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + auth = self._build_request_auth(request, auth) + + response = await self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + await response.aread() + + return response + + except BaseException as exc: # pragma: no cover + await response.aclose() + raise exc + + async def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: typing.List[Response], + ) -> Response: + auth_flow = auth.async_auth_flow(request) + try: + request = await auth_flow.__anext__() + + while True: + response = await self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = await auth_flow.asend(response) + except StopAsyncIteration: + return response + + response.history = list(history) + await response.aread() + request = next_request + history.append(response) + + except BaseException as exc: + await response.aclose() + raise exc + finally: + await auth_flow.aclose() + + async def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: typing.List[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + await hook(request) + + response = await self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + await hook(response) + + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + await response.aread() + else: + response.next_request = request + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + timer = Timer() + await timer.async_start() + + if not isinstance(request.stream, AsyncByteStream): + raise RuntimeError( + "Attempted to send an sync request with an AsyncClient instance." + ) + + with request_context(request=request): + response = await transport.handle_async_request(request) + + assert isinstance(response.stream, AsyncByteStream) + response.request = request + response.stream = BoundAsyncStream( + response.stream, response=response, timer=timer + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + async def get( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def options( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def head( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def post( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def put( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def patch( + self, + url: URLTypes, + *, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def delete( + self, + url: URLTypes, + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, + timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, + extensions: typing.Optional[RequestExtensions] = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def aclose(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + await self._transport.aclose() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.aclose() + + async def __aenter__(self: U) -> U: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + await self._transport.__aenter__() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aenter__() + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + self._state = ClientState.CLOSED + + await self._transport.__aexit__(exc_type, exc_value, traceback) + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/venv/lib/python3.12/site-packages/httpx/_compat.py b/venv/lib/python3.12/site-packages/httpx/_compat.py new file mode 100644 index 0000000..493e621 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_compat.py @@ -0,0 +1,41 @@ +""" +The _compat module is used for code which requires branching between different +Python environments. It is excluded from the code coverage checks. +""" +import ssl +import sys + +# Brotli support is optional +# The C bindings in `brotli` are recommended for CPython. +# The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else. +try: + import brotlicffi as brotli +except ImportError: # pragma: no cover + try: + import brotli + except ImportError: + brotli = None + +if sys.version_info >= (3, 10) or ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7): + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of + # 'SSLContext.minimum_version' from Python 3.7 onwards, however + # this attribute is not available unless the ssl module is compiled + # with OpenSSL 1.1.0g or newer. + # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version + # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version + context.minimum_version = ssl.TLSVersion.TLSv1_2 + +else: + + def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: + # If 'minimum_version' isn't available, we configure these options with + # the older deprecated variants. + context.options |= ssl.OP_NO_SSLv2 + context.options |= ssl.OP_NO_SSLv3 + context.options |= ssl.OP_NO_TLSv1 + context.options |= ssl.OP_NO_TLSv1_1 + + +__all__ = ["brotli", "set_minimum_tls_version_1_2"] diff --git a/venv/lib/python3.12/site-packages/httpx/_config.py b/venv/lib/python3.12/site-packages/httpx/_config.py new file mode 100644 index 0000000..05c096d --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_config.py @@ -0,0 +1,368 @@ +import logging +import os +import ssl +import typing +from pathlib import Path + +import certifi + +from ._compat import set_minimum_tls_version_1_2 +from ._models import Headers +from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes +from ._urls import URL +from ._utils import get_ca_bundle_from_env + +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + + +logger = logging.getLogger("httpx") + + +class UnsetType: + pass # pragma: no cover + + +UNSET = UnsetType() + + +def create_ssl_context( + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, +) -> ssl.SSLContext: + return SSLConfig( + cert=cert, verify=verify, trust_env=trust_env, http2=http2 + ).ssl_context + + +class SSLConfig: + """ + SSL Configuration. + """ + + DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) + + def __init__( + self, + *, + cert: typing.Optional[CertTypes] = None, + verify: VerifyTypes = True, + trust_env: bool = True, + http2: bool = False, + ) -> None: + self.cert = cert + self.verify = verify + self.trust_env = trust_env + self.http2 = http2 + self.ssl_context = self.load_ssl_context() + + def load_ssl_context(self) -> ssl.SSLContext: + logger.debug( + "load_ssl_context verify=%r cert=%r trust_env=%r http2=%r", + self.verify, + self.cert, + self.trust_env, + self.http2, + ) + + if self.verify: + return self.load_ssl_context_verify() + return self.load_ssl_context_no_verify() + + def load_ssl_context_no_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for unverified connections. + """ + context = self._create_default_ssl_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + self._load_client_certs(context) + return context + + def load_ssl_context_verify(self) -> ssl.SSLContext: + """ + Return an SSL context for verified connections. + """ + if self.trust_env and self.verify is True: + ca_bundle = get_ca_bundle_from_env() + if ca_bundle is not None: + self.verify = ca_bundle + + if isinstance(self.verify, ssl.SSLContext): + # Allow passing in our own SSLContext object that's pre-configured. + context = self.verify + self._load_client_certs(context) + return context + elif isinstance(self.verify, bool): + ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH + elif Path(self.verify).exists(): + ca_bundle_path = Path(self.verify) + else: + raise IOError( + "Could not find a suitable TLS CA certificate bundle, " + "invalid path: {}".format(self.verify) + ) + + context = self._create_default_ssl_context() + context.verify_mode = ssl.CERT_REQUIRED + context.check_hostname = True + + # Signal to server support for PHA in TLS 1.3. Raises an + # AttributeError if only read-only access is implemented. + try: + context.post_handshake_auth = True + except AttributeError: # pragma: no cover + pass + + # Disable using 'commonName' for SSLContext.check_hostname + # when the 'subjectAltName' extension isn't available. + try: + context.hostname_checks_common_name = False + except AttributeError: # pragma: no cover + pass + + if ca_bundle_path.is_file(): + cafile = str(ca_bundle_path) + logger.debug("load_verify_locations cafile=%r", cafile) + context.load_verify_locations(cafile=cafile) + elif ca_bundle_path.is_dir(): + capath = str(ca_bundle_path) + logger.debug("load_verify_locations capath=%r", capath) + context.load_verify_locations(capath=capath) + + self._load_client_certs(context) + + return context + + def _create_default_ssl_context(self) -> ssl.SSLContext: + """ + Creates the default SSLContext object that's used for both verified + and unverified connections. + """ + context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + set_minimum_tls_version_1_2(context) + context.options |= ssl.OP_NO_COMPRESSION + context.set_ciphers(DEFAULT_CIPHERS) + + if ssl.HAS_ALPN: + alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] + context.set_alpn_protocols(alpn_idents) + + keylogfile = os.environ.get("SSLKEYLOGFILE") + if keylogfile and self.trust_env: + context.keylog_filename = keylogfile + + return context + + def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: + """ + Loads client certificates into our SSLContext object + """ + if self.cert is not None: + if isinstance(self.cert, str): + ssl_context.load_cert_chain(certfile=self.cert) + elif isinstance(self.cert, tuple) and len(self.cert) == 2: + ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) + elif isinstance(self.cert, tuple) and len(self.cert) == 3: + ssl_context.load_cert_chain( + certfile=self.cert[0], + keyfile=self.cert[1], + password=self.cert[2], # type: ignore + ) + + +class Timeout: + """ + Timeout configuration. + + **Usage**: + + Timeout(None) # No timeouts. + Timeout(5.0) # 5s timeout on all operations. + Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. + Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. + Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. + # 5s timeout elsewhere. + """ + + def __init__( + self, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + *, + connect: typing.Union[None, float, UnsetType] = UNSET, + read: typing.Union[None, float, UnsetType] = UNSET, + write: typing.Union[None, float, UnsetType] = UNSET, + pool: typing.Union[None, float, UnsetType] = UNSET, + ) -> None: + if isinstance(timeout, Timeout): + # Passed as a single explicit Timeout. + assert connect is UNSET + assert read is UNSET + assert write is UNSET + assert pool is UNSET + self.connect = timeout.connect # type: typing.Optional[float] + self.read = timeout.read # type: typing.Optional[float] + self.write = timeout.write # type: typing.Optional[float] + self.pool = timeout.pool # type: typing.Optional[float] + elif isinstance(timeout, tuple): + # Passed as a tuple. + self.connect = timeout[0] + self.read = timeout[1] + self.write = None if len(timeout) < 3 else timeout[2] + self.pool = None if len(timeout) < 4 else timeout[3] + elif not ( + isinstance(connect, UnsetType) + or isinstance(read, UnsetType) + or isinstance(write, UnsetType) + or isinstance(pool, UnsetType) + ): + self.connect = connect + self.read = read + self.write = write + self.pool = pool + else: + if isinstance(timeout, UnsetType): + raise ValueError( + "httpx.Timeout must either include a default, or set all " + "four parameters explicitly." + ) + self.connect = timeout if isinstance(connect, UnsetType) else connect + self.read = timeout if isinstance(read, UnsetType) else read + self.write = timeout if isinstance(write, UnsetType) else write + self.pool = timeout if isinstance(pool, UnsetType) else pool + + def as_dict(self) -> typing.Dict[str, typing.Optional[float]]: + return { + "connect": self.connect, + "read": self.read, + "write": self.write, + "pool": self.pool, + } + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.connect == other.connect + and self.read == other.read + and self.write == other.write + and self.pool == other.pool + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + if len({self.connect, self.read, self.write, self.pool}) == 1: + return f"{class_name}(timeout={self.connect})" + return ( + f"{class_name}(connect={self.connect}, " + f"read={self.read}, write={self.write}, pool={self.pool})" + ) + + +class Limits: + """ + Configuration for limits to various client behaviors. + + **Parameters:** + + * **max_connections** - The maximum number of concurrent connections that may be + established. + * **max_keepalive_connections** - Allow the connection pool to maintain + keep-alive connections below this point. Should be less than or equal + to `max_connections`. + * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. + """ + + def __init__( + self, + *, + max_connections: typing.Optional[int] = None, + max_keepalive_connections: typing.Optional[int] = None, + keepalive_expiry: typing.Optional[float] = 5.0, + ) -> None: + self.max_connections = max_connections + self.max_keepalive_connections = max_keepalive_connections + self.keepalive_expiry = keepalive_expiry + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.max_connections == other.max_connections + and self.max_keepalive_connections == other.max_keepalive_connections + and self.keepalive_expiry == other.keepalive_expiry + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return ( + f"{class_name}(max_connections={self.max_connections}, " + f"max_keepalive_connections={self.max_keepalive_connections}, " + f"keepalive_expiry={self.keepalive_expiry})" + ) + + +class Proxy: + def __init__( + self, + url: URLTypes, + *, + ssl_context: typing.Optional[ssl.SSLContext] = None, + auth: typing.Optional[typing.Tuple[str, str]] = None, + headers: typing.Optional[HeaderTypes] = None, + ) -> None: + url = URL(url) + headers = Headers(headers) + + if url.scheme not in ("http", "https", "socks5"): + raise ValueError(f"Unknown scheme for proxy URL {url!r}") + + if url.username or url.password: + # Remove any auth credentials from the URL. + auth = (url.username, url.password) + url = url.copy_with(username=None, password=None) + + self.url = url + self.auth = auth + self.headers = headers + self.ssl_context = ssl_context + + @property + def raw_auth(self) -> typing.Optional[typing.Tuple[bytes, bytes]]: + # The proxy authentication as raw bytes. + return ( + None + if self.auth is None + else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) + ) + + def __repr__(self) -> str: + # The authentication is represented with the password component masked. + auth = (self.auth[0], "********") if self.auth else None + + # Build a nice concise representation. + url_str = f"{str(self.url)!r}" + auth_str = f", auth={auth!r}" if auth else "" + headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" + return f"Proxy({url_str}{auth_str}{headers_str})" + + +DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) +DEFAULT_MAX_REDIRECTS = 20 diff --git a/venv/lib/python3.12/site-packages/httpx/_content.py b/venv/lib/python3.12/site-packages/httpx/_content.py new file mode 100644 index 0000000..cd0d17f --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_content.py @@ -0,0 +1,238 @@ +import inspect +import warnings +from json import dumps as json_dumps +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Dict, + Iterable, + Iterator, + Mapping, + Optional, + Tuple, + Union, +) +from urllib.parse import urlencode + +from ._exceptions import StreamClosed, StreamConsumed +from ._multipart import MultipartStream +from ._types import ( + AsyncByteStream, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + SyncByteStream, +) +from ._utils import peek_filelike_length, primitive_value_to_str + + +class ByteStream(AsyncByteStream, SyncByteStream): + def __init__(self, stream: bytes) -> None: + self._stream = stream + + def __iter__(self) -> Iterator[bytes]: + yield self._stream + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._stream + + +class IteratorByteStream(SyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: Iterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isgenerator(stream) + + def __iter__(self) -> Iterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "read"): + # File-like interfaces should use 'read' directly. + chunk = self._stream.read(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = self._stream.read(self.CHUNK_SIZE) + else: + # Otherwise iterate. + for part in self._stream: + yield part + + +class AsyncIteratorByteStream(AsyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: AsyncIterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isasyncgen(stream) + + async def __aiter__(self) -> AsyncIterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "aread"): + # File-like interfaces should use 'aread' directly. + chunk = await self._stream.aread(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = await self._stream.aread(self.CHUNK_SIZE) + else: + # Otherwise iterate. + async for part in self._stream: + yield part + + +class UnattachedStream(AsyncByteStream, SyncByteStream): + """ + If a request or response is serialized using pickle, then it is no longer + attached to a stream for I/O purposes. Any stream operations should result + in `httpx.StreamClosed`. + """ + + def __iter__(self) -> Iterator[bytes]: + raise StreamClosed() + + async def __aiter__(self) -> AsyncIterator[bytes]: + raise StreamClosed() + yield b"" # pragma: no cover + + +def encode_content( + content: Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]], +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + if isinstance(content, (bytes, str)): + body = content.encode("utf-8") if isinstance(content, str) else content + content_length = len(body) + headers = {"Content-Length": str(content_length)} if body else {} + return headers, ByteStream(body) + + elif isinstance(content, Iterable) and not isinstance(content, dict): + # `not isinstance(content, dict)` is a bit oddly specific, but it + # catches a case that's easy for users to make in error, and would + # otherwise pass through here, like any other bytes-iterable, + # because `dict` happens to be iterable. See issue #2491. + content_length_or_none = peek_filelike_length(content) + + if content_length_or_none is None: + headers = {"Transfer-Encoding": "chunked"} + else: + headers = {"Content-Length": str(content_length_or_none)} + return headers, IteratorByteStream(content) # type: ignore + + elif isinstance(content, AsyncIterable): + headers = {"Transfer-Encoding": "chunked"} + return headers, AsyncIteratorByteStream(content) + + raise TypeError(f"Unexpected type for 'content', {type(content)!r}") + + +def encode_urlencoded_data( + data: RequestData, +) -> Tuple[Dict[str, str], ByteStream]: + plain_data = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)): + plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) + else: + plain_data.append((key, primitive_value_to_str(value))) + body = urlencode(plain_data, doseq=True).encode("utf-8") + content_length = str(len(body)) + content_type = "application/x-www-form-urlencoded" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_multipart_data( + data: RequestData, files: RequestFiles, boundary: Optional[bytes] +) -> Tuple[Dict[str, str], MultipartStream]: + multipart = MultipartStream(data=data, files=files, boundary=boundary) + headers = multipart.get_headers() + return headers, multipart + + +def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]: + body = text.encode("utf-8") + content_length = str(len(body)) + content_type = "text/plain; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]: + body = html.encode("utf-8") + content_length = str(len(body)) + content_type = "text/html; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]: + body = json_dumps(json).encode("utf-8") + content_length = str(len(body)) + content_type = "application/json" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_request( + content: Optional[RequestContent] = None, + data: Optional[RequestData] = None, + files: Optional[RequestFiles] = None, + json: Optional[Any] = None, + boundary: Optional[bytes] = None, +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + """ + Handles encoding the given `content`, `data`, `files`, and `json`, + returning a two-tuple of (, ). + """ + if data is not None and not isinstance(data, Mapping): + # We prefer to separate `content=` + # for raw request content, and `data=
    ` for url encoded or + # multipart form content. + # + # However for compat with requests, we *do* still support + # `data=` usages. We deal with that case here, treating it + # as if `content=<...>` had been supplied instead. + message = "Use 'content=<...>' to upload raw bytes/text content." + warnings.warn(message, DeprecationWarning) + return encode_content(data) + + if content is not None: + return encode_content(content) + elif files: + return encode_multipart_data(data or {}, files, boundary) + elif data: + return encode_urlencoded_data(data) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") + + +def encode_response( + content: Optional[ResponseContent] = None, + text: Optional[str] = None, + html: Optional[str] = None, + json: Optional[Any] = None, +) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: + """ + Handles encoding the given `content`, returning a two-tuple of + (, ). + """ + if content is not None: + return encode_content(content) + elif text is not None: + return encode_text(text) + elif html is not None: + return encode_html(html) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") diff --git a/venv/lib/python3.12/site-packages/httpx/_decoders.py b/venv/lib/python3.12/site-packages/httpx/_decoders.py new file mode 100644 index 0000000..3f507c8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_decoders.py @@ -0,0 +1,327 @@ +""" +Handlers for Content-Encoding. + +See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding +""" +import codecs +import io +import typing +import zlib + +from ._compat import brotli +from ._exceptions import DecodingError + + +class ContentDecoder: + def decode(self, data: bytes) -> bytes: + raise NotImplementedError() # pragma: no cover + + def flush(self) -> bytes: + raise NotImplementedError() # pragma: no cover + + +class IdentityDecoder(ContentDecoder): + """ + Handle unencoded data. + """ + + def decode(self, data: bytes) -> bytes: + return data + + def flush(self) -> bytes: + return b"" + + +class DeflateDecoder(ContentDecoder): + """ + Handle 'deflate' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.first_attempt = True + self.decompressor = zlib.decompressobj() + + def decode(self, data: bytes) -> bytes: + was_first_attempt = self.first_attempt + self.first_attempt = False + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + if was_first_attempt: + self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) + return self.decode(data) + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class GZipDecoder(ContentDecoder): + """ + Handle 'gzip' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) + + def decode(self, data: bytes) -> bytes: + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class BrotliDecoder(ContentDecoder): + """ + Handle 'brotli' decoding. + + Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ + or `pip install brotli`. See https://github.com/google/brotli + Supports both 'brotlipy' and 'Brotli' packages since they share an import + name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' + """ + + def __init__(self) -> None: + if brotli is None: # pragma: no cover + raise ImportError( + "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " + "packages have been installed. " + "Make sure to install httpx using `pip install httpx[brotli]`." + ) from None + + self.decompressor = brotli.Decompressor() + self.seen_data = False + self._decompress: typing.Callable[[bytes], bytes] + if hasattr(self.decompressor, "decompress"): + # The 'brotlicffi' package. + self._decompress = self.decompressor.decompress # pragma: no cover + else: + # The 'brotli' package. + self._decompress = self.decompressor.process # pragma: no cover + + def decode(self, data: bytes) -> bytes: + if not data: + return b"" + self.seen_data = True + try: + return self._decompress(data) + except brotli.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + try: + if hasattr(self.decompressor, "finish"): + # Only available in the 'brotlicffi' package. + + # As the decompressor decompresses eagerly, this + # will never actually emit any data. However, it will potentially throw + # errors if a truncated or damaged data stream has been used. + self.decompressor.finish() # pragma: no cover + return b"" + except brotli.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class MultiDecoder(ContentDecoder): + """ + Handle the case where multiple encodings have been applied. + """ + + def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: + """ + 'children' should be a sequence of decoders in the order in which + each was applied. + """ + # Note that we reverse the order for decoding. + self.children = list(reversed(children)) + + def decode(self, data: bytes) -> bytes: + for child in self.children: + data = child.decode(data) + return data + + def flush(self) -> bytes: + data = b"" + for child in self.children: + data = child.decode(data) + child.flush() + return data + + +class ByteChunker: + """ + Handles returning byte content in fixed-size chunks. + """ + + def __init__(self, chunk_size: typing.Optional[int] = None) -> None: + self._buffer = io.BytesIO() + self._chunk_size = chunk_size + + def decode(self, content: bytes) -> typing.List[bytes]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> typing.List[bytes]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextChunker: + """ + Handles returning text content in fixed-size chunks. + """ + + def __init__(self, chunk_size: typing.Optional[int] = None) -> None: + self._buffer = io.StringIO() + self._chunk_size = chunk_size + + def decode(self, content: str) -> typing.List[str]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> typing.List[str]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextDecoder: + """ + Handles incrementally decoding bytes into text + """ + + def __init__(self, encoding: str = "utf-8") -> None: + self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") + + def decode(self, data: bytes) -> str: + return self.decoder.decode(data) + + def flush(self) -> str: + return self.decoder.decode(b"", True) + + +class LineDecoder: + """ + Handles incrementally reading lines from text. + + Has the same behaviour as the stdllib splitlines, + but handling the input iteratively. + """ + + def __init__(self) -> None: + self.buffer: typing.List[str] = [] + self.trailing_cr: bool = False + + def decode(self, text: str) -> typing.List[str]: + # See https://docs.python.org/3/library/stdtypes.html#str.splitlines + NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" + + # We always push a trailing `\r` into the next decode iteration. + if self.trailing_cr: + text = "\r" + text + self.trailing_cr = False + if text.endswith("\r"): + self.trailing_cr = True + text = text[:-1] + + if not text: + # NOTE: the edge case input of empty text doesn't occur in practice, + # because other httpx internals filter out this value + return [] # pragma: no cover + + trailing_newline = text[-1] in NEWLINE_CHARS + lines = text.splitlines() + + if len(lines) == 1 and not trailing_newline: + # No new lines, buffer the input and continue. + self.buffer.append(lines[0]) + return [] + + if self.buffer: + # Include any existing buffer in the first portion of the + # splitlines result. + lines = ["".join(self.buffer) + lines[0]] + lines[1:] + self.buffer = [] + + if not trailing_newline: + # If the last segment of splitlines is not newline terminated, + # then drop it from our output and start a new buffer. + self.buffer = [lines.pop()] + + return lines + + def flush(self) -> typing.List[str]: + if not self.buffer and not self.trailing_cr: + return [] + + lines = ["".join(self.buffer)] + self.buffer = [] + self.trailing_cr = False + return lines + + +SUPPORTED_DECODERS = { + "identity": IdentityDecoder, + "gzip": GZipDecoder, + "deflate": DeflateDecoder, + "br": BrotliDecoder, +} + + +if brotli is None: + SUPPORTED_DECODERS.pop("br") # pragma: no cover diff --git a/venv/lib/python3.12/site-packages/httpx/_exceptions.py b/venv/lib/python3.12/site-packages/httpx/_exceptions.py new file mode 100644 index 0000000..1236929 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_exceptions.py @@ -0,0 +1,349 @@ +""" +Our exception hierarchy: + +* HTTPError + x RequestError + + TransportError + - TimeoutException + · ConnectTimeout + · ReadTimeout + · WriteTimeout + · PoolTimeout + - NetworkError + · ConnectError + · ReadError + · WriteError + · CloseError + - ProtocolError + · LocalProtocolError + · RemoteProtocolError + - ProxyError + - UnsupportedProtocol + + DecodingError + + TooManyRedirects + x HTTPStatusError +* InvalidURL +* CookieConflict +* StreamError + x StreamConsumed + x StreamClosed + x ResponseNotRead + x RequestNotRead +""" +import contextlib +import typing + +if typing.TYPE_CHECKING: + from ._models import Request, Response # pragma: no cover + + +class HTTPError(Exception): + """ + Base class for `RequestError` and `HTTPStatusError`. + + Useful for `try...except` blocks when issuing a request, + and then calling `.raise_for_status()`. + + For example: + + ``` + try: + response = httpx.get("https://www.example.com") + response.raise_for_status() + except httpx.HTTPError as exc: + print(f"HTTP Exception for {exc.request.url} - {exc}") + ``` + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + self._request: typing.Optional["Request"] = None + + @property + def request(self) -> "Request": + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: "Request") -> None: + self._request = request + + +class RequestError(HTTPError): + """ + Base class for all exceptions that may occur when issuing a `.request()`. + """ + + def __init__( + self, message: str, *, request: typing.Optional["Request"] = None + ) -> None: + super().__init__(message) + # At the point an exception is raised we won't typically have a request + # instance to associate it with. + # + # The 'request_context' context manager is used within the Client and + # Response methods in order to ensure that any raised exceptions + # have a `.request` property set on them. + self._request = request + + +class TransportError(RequestError): + """ + Base class for all exceptions that occur at the level of the Transport API. + """ + + +# Timeout exceptions... + + +class TimeoutException(TransportError): + """ + The base class for timeout errors. + + An operation has timed out. + """ + + +class ConnectTimeout(TimeoutException): + """ + Timed out while connecting to the host. + """ + + +class ReadTimeout(TimeoutException): + """ + Timed out while receiving data from the host. + """ + + +class WriteTimeout(TimeoutException): + """ + Timed out while sending data to the host. + """ + + +class PoolTimeout(TimeoutException): + """ + Timed out waiting to acquire a connection from the pool. + """ + + +# Core networking exceptions... + + +class NetworkError(TransportError): + """ + The base class for network-related errors. + + An error occurred while interacting with the network. + """ + + +class ReadError(NetworkError): + """ + Failed to receive data from the network. + """ + + +class WriteError(NetworkError): + """ + Failed to send data through the network. + """ + + +class ConnectError(NetworkError): + """ + Failed to establish a connection. + """ + + +class CloseError(NetworkError): + """ + Failed to close a connection. + """ + + +# Other transport exceptions... + + +class ProxyError(TransportError): + """ + An error occurred while establishing a proxy connection. + """ + + +class UnsupportedProtocol(TransportError): + """ + Attempted to make a request to an unsupported protocol. + + For example issuing a request to `ftp://www.example.com`. + """ + + +class ProtocolError(TransportError): + """ + The protocol was violated. + """ + + +class LocalProtocolError(ProtocolError): + """ + A protocol was violated by the client. + + For example if the user instantiated a `Request` instance explicitly, + failed to include the mandatory `Host:` header, and then issued it directly + using `client.send()`. + """ + + +class RemoteProtocolError(ProtocolError): + """ + The protocol was violated by the server. + + For example, returning malformed HTTP. + """ + + +# Other request exceptions... + + +class DecodingError(RequestError): + """ + Decoding of the response failed, due to a malformed encoding. + """ + + +class TooManyRedirects(RequestError): + """ + Too many redirects. + """ + + +# Client errors + + +class HTTPStatusError(HTTPError): + """ + The response had an error HTTP status of 4xx or 5xx. + + May be raised when calling `response.raise_for_status()` + """ + + def __init__( + self, message: str, *, request: "Request", response: "Response" + ) -> None: + super().__init__(message) + self.request = request + self.response = response + + +class InvalidURL(Exception): + """ + URL is improperly formed or cannot be parsed. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class CookieConflict(Exception): + """ + Attempted to lookup a cookie by name, but multiple cookies existed. + + Can occur when calling `response.cookies.get(...)`. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +# Stream exceptions... + +# These may occur as the result of a programming error, by accessing +# the request/response stream in an invalid manner. + + +class StreamError(RuntimeError): + """ + The base class for stream exceptions. + + The developer made an error in accessing the request stream in + an invalid way. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class StreamConsumed(StreamError): + """ + Attempted to read or stream content, but the content has already + been streamed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. For requests, this could be due to passing " + "a generator as request content, and then receiving a redirect " + "response or a secondary request as part of an authentication flow." + "For responses, this could be due to attempting to stream the response " + "content more than once." + ) + super().__init__(message) + + +class StreamClosed(StreamError): + """ + Attempted to read or stream response content, but the request has been + closed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream content, but the stream has " "been closed." + ) + super().__init__(message) + + +class ResponseNotRead(StreamError): + """ + Attempted to access streaming response content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming response content," + " without having called `read()`." + ) + super().__init__(message) + + +class RequestNotRead(StreamError): + """ + Attempted to access streaming request content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming request content," + " without having called `read()`." + ) + super().__init__(message) + + +@contextlib.contextmanager +def request_context( + request: typing.Optional["Request"] = None, +) -> typing.Iterator[None]: + """ + A context manager that can be used to attach the given request context + to any `RequestError` exceptions that are raised within the block. + """ + try: + yield + except RequestError as exc: + if request is not None: + exc.request = request + raise exc diff --git a/venv/lib/python3.12/site-packages/httpx/_main.py b/venv/lib/python3.12/site-packages/httpx/_main.py new file mode 100644 index 0000000..adb57d5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_main.py @@ -0,0 +1,507 @@ +import functools +import json +import sys +import typing + +import click +import httpcore +import pygments.lexers +import pygments.util +import rich.console +import rich.markup +import rich.progress +import rich.syntax +import rich.table + +from ._client import Client +from ._exceptions import RequestError +from ._models import Response +from ._status_codes import codes + + +def print_help() -> None: + console = rich.console.Console() + + console.print("[bold]HTTPX :butterfly:", justify="center") + console.print() + console.print("A next generation HTTP client.", justify="center") + console.print() + console.print( + "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" + ) + console.print() + + table = rich.table.Table.grid(padding=1, pad_edge=True) + table.add_column("Parameter", no_wrap=True, justify="left", style="bold") + table.add_column("Description") + table.add_row( + "-m, --method [cyan]METHOD", + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" + "[Default: GET, or POST if a request body is included]", + ) + table.add_row( + "-p, --params [cyan] ...", + "Query parameters to include in the request URL.", + ) + table.add_row( + "-c, --content [cyan]TEXT", "Byte content to include in the request body." + ) + table.add_row( + "-d, --data [cyan] ...", "Form data to include in the request body." + ) + table.add_row( + "-f, --files [cyan] ...", + "Form files to include in the request body.", + ) + table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") + table.add_row( + "-h, --headers [cyan] ...", + "Include additional HTTP headers in the request.", + ) + table.add_row( + "--cookies [cyan] ...", "Cookies to include in the request." + ) + table.add_row( + "--auth [cyan]", + "Username and password to include in the request. Specify '-' for the password" + " to use a password prompt. Note that using --verbose/-v will expose" + " the Authorization header, including the password encoding" + " in a trivially reversible format.", + ) + + table.add_row( + "--proxy [cyan]URL", + "Send the request via a proxy. Should be the URL giving the proxy address.", + ) + + table.add_row( + "--timeout [cyan]FLOAT", + "Timeout value to use for network operations, such as establishing the" + " connection, reading some data, etc... [Default: 5.0]", + ) + + table.add_row("--follow-redirects", "Automatically follow redirects.") + table.add_row("--no-verify", "Disable SSL verification.") + table.add_row( + "--http2", "Send the request using HTTP/2, if the remote server supports it." + ) + + table.add_row( + "--download [cyan]FILE", + "Save the response content as a file, rather than displaying it.", + ) + + table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") + table.add_row("--help", "Show this message and exit.") + console.print(table) + + +def get_lexer_for_response(response: Response) -> str: + content_type = response.headers.get("Content-Type") + if content_type is not None: + mime_type, _, _ = content_type.partition(";") + try: + return typing.cast( + str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name + ) + except pygments.util.ClassNotFound: # pragma: no cover + pass + return "" # pragma: no cover + + +def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: + version = "HTTP/2" if http2 else "HTTP/1.1" + headers = [ + (name.lower() if http2 else name, value) for name, value in request.headers + ] + method = request.method.decode("ascii") + target = request.url.target.decode("ascii") + lines = [f"{method} {target} {version}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def format_response_headers( + http_version: bytes, + status: int, + reason_phrase: typing.Optional[bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], +) -> str: + version = http_version.decode("ascii") + reason = ( + codes.get_reason_phrase(status) + if reason_phrase is None + else reason_phrase.decode("ascii") + ) + lines = [f"{version} {status} {reason}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: + console = rich.console.Console() + http_text = format_request_headers(request, http2=http2) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response_headers( + http_version: bytes, + status: int, + reason_phrase: typing.Optional[bytes], + headers: typing.List[typing.Tuple[bytes, bytes]], +) -> None: + console = rich.console.Console() + http_text = format_response_headers(http_version, status, reason_phrase, headers) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response(response: Response) -> None: + console = rich.console.Console() + lexer_name = get_lexer_for_response(response) + if lexer_name: + if lexer_name.lower() == "json": + try: + data = response.json() + text = json.dumps(data, indent=4) + except ValueError: # pragma: no cover + text = response.text + else: + text = response.text + + syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) + console.print(syntax) + else: + console.print(f"<{len(response.content)} bytes of binary data>") + + +_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] +_PCTRTTT = typing.Tuple[_PCTRTT, ...] +_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] + + +def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover + lines = [] + for key, value in cert.items(): + if isinstance(value, (list, tuple)): + lines.append(f"* {key}:") + for item in value: + if key in ("subject", "issuer"): + for sub_item in item: + lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") + elif isinstance(item, tuple) and len(item) == 2: + lines.append(f"* {item[0]}: {item[1]!r}") + else: + lines.append(f"* {item!r}") + else: + lines.append(f"* {key}: {value!r}") + return "\n".join(lines) + + +def trace( + name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False +) -> None: + console = rich.console.Console() + if name == "connection.connect_tcp.started" and verbose: + host = info["host"] + console.print(f"* Connecting to {host!r}") + elif name == "connection.connect_tcp.complete" and verbose: + stream = info["return_value"] + server_addr = stream.get_extra_info("server_addr") + console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") + elif name == "connection.start_tls.complete" and verbose: # pragma: no cover + stream = info["return_value"] + ssl_object = stream.get_extra_info("ssl_object") + version = ssl_object.version() + cipher = ssl_object.cipher() + server_cert = ssl_object.getpeercert() + alpn = ssl_object.selected_alpn_protocol() + console.print(f"* SSL established using {version!r} / {cipher[0]!r}") + console.print(f"* Selected ALPN protocol: {alpn!r}") + if server_cert: + console.print("* Server certificate:") + console.print(format_certificate(server_cert)) + elif name == "http11.send_request_headers.started" and verbose: + request = info["request"] + print_request_headers(request, http2=False) + elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover + request = info["request"] + print_request_headers(request, http2=True) + elif name == "http11.receive_response_headers.complete": + http_version, status, reason_phrase, headers = info["return_value"] + print_response_headers(http_version, status, reason_phrase, headers) + elif name == "http2.receive_response_headers.complete": # pragma: no cover + status, headers = info["return_value"] + http_version = b"HTTP/2" + reason_phrase = None + print_response_headers(http_version, status, reason_phrase, headers) + + +def download_response(response: Response, download: typing.BinaryIO) -> None: + console = rich.console.Console() + console.print() + content_length = response.headers.get("Content-Length") + with rich.progress.Progress( + "[progress.description]{task.description}", + "[progress.percentage]{task.percentage:>3.0f}%", + rich.progress.BarColumn(bar_width=None), + rich.progress.DownloadColumn(), + rich.progress.TransferSpeedColumn(), + ) as progress: + description = f"Downloading [bold]{rich.markup.escape(download.name)}" + download_task = progress.add_task( + description, + total=int(content_length or 0), + start=content_length is not None, + ) + for chunk in response.iter_bytes(): + download.write(chunk) + progress.update(download_task, completed=response.num_bytes_downloaded) + + +def validate_json( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> typing.Any: + if value is None: + return None + + try: + return json.loads(value) + except json.JSONDecodeError: # pragma: no cover + raise click.BadParameter("Not valid JSON") + + +def validate_auth( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> typing.Any: + if value == (None, None): + return None + + username, password = value + if password == "-": # pragma: no cover + password = click.prompt("Password", hide_input=True) + return (username, password) + + +def handle_help( + ctx: click.Context, + param: typing.Union[click.Option, click.Parameter], + value: typing.Any, +) -> None: + if not value or ctx.resilient_parsing: + return + + print_help() + ctx.exit() + + +@click.command(add_help_option=False) +@click.argument("url", type=str) +@click.option( + "--method", + "-m", + "method", + type=str, + help=( + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " + "[Default: GET, or POST if a request body is included]" + ), +) +@click.option( + "--params", + "-p", + "params", + type=(str, str), + multiple=True, + help="Query parameters to include in the request URL.", +) +@click.option( + "--content", + "-c", + "content", + type=str, + help="Byte content to include in the request body.", +) +@click.option( + "--data", + "-d", + "data", + type=(str, str), + multiple=True, + help="Form data to include in the request body.", +) +@click.option( + "--files", + "-f", + "files", + type=(str, click.File(mode="rb")), + multiple=True, + help="Form files to include in the request body.", +) +@click.option( + "--json", + "-j", + "json", + type=str, + callback=validate_json, + help="JSON data to include in the request body.", +) +@click.option( + "--headers", + "-h", + "headers", + type=(str, str), + multiple=True, + help="Include additional HTTP headers in the request.", +) +@click.option( + "--cookies", + "cookies", + type=(str, str), + multiple=True, + help="Cookies to include in the request.", +) +@click.option( + "--auth", + "auth", + type=(str, str), + default=(None, None), + callback=validate_auth, + help=( + "Username and password to include in the request. " + "Specify '-' for the password to use a password prompt. " + "Note that using --verbose/-v will expose the Authorization header, " + "including the password encoding in a trivially reversible format." + ), +) +@click.option( + "--proxy", + "proxy", + type=str, + default=None, + help="Send the request via a proxy. Should be the URL giving the proxy address.", +) +@click.option( + "--timeout", + "timeout", + type=float, + default=5.0, + help=( + "Timeout value to use for network operations, such as establishing the " + "connection, reading some data, etc... [Default: 5.0]" + ), +) +@click.option( + "--follow-redirects", + "follow_redirects", + is_flag=True, + default=False, + help="Automatically follow redirects.", +) +@click.option( + "--no-verify", + "verify", + is_flag=True, + default=True, + help="Disable SSL verification.", +) +@click.option( + "--http2", + "http2", + type=bool, + is_flag=True, + default=False, + help="Send the request using HTTP/2, if the remote server supports it.", +) +@click.option( + "--download", + type=click.File("wb"), + help="Save the response content as a file, rather than displaying it.", +) +@click.option( + "--verbose", + "-v", + type=bool, + is_flag=True, + default=False, + help="Verbose. Show request as well as response.", +) +@click.option( + "--help", + is_flag=True, + is_eager=True, + expose_value=False, + callback=handle_help, + help="Show this message and exit.", +) +def main( + url: str, + method: str, + params: typing.List[typing.Tuple[str, str]], + content: str, + data: typing.List[typing.Tuple[str, str]], + files: typing.List[typing.Tuple[str, click.File]], + json: str, + headers: typing.List[typing.Tuple[str, str]], + cookies: typing.List[typing.Tuple[str, str]], + auth: typing.Optional[typing.Tuple[str, str]], + proxy: str, + timeout: float, + follow_redirects: bool, + verify: bool, + http2: bool, + download: typing.Optional[typing.BinaryIO], + verbose: bool, +) -> None: + """ + An HTTP command line client. + Sends a request and displays the response. + """ + if not method: + method = "POST" if content or data or files or json else "GET" + + try: + with Client( + proxy=proxy, + timeout=timeout, + verify=verify, + http2=http2, + ) as client: + with client.stream( + method, + url, + params=list(params), + content=content, + data=dict(data), + files=files, # type: ignore + json=json, + headers=headers, + cookies=dict(cookies), + auth=auth, + follow_redirects=follow_redirects, + extensions={"trace": functools.partial(trace, verbose=verbose)}, + ) as response: + if download is not None: + download_response(response, download) + else: + response.read() + if response.content: + print_response(response) + + except RequestError as exc: + console = rich.console.Console() + console.print(f"[red]{type(exc).__name__}[/red]: {exc}") + sys.exit(1) + + sys.exit(0 if response.is_success else 1) diff --git a/venv/lib/python3.12/site-packages/httpx/_models.py b/venv/lib/python3.12/site-packages/httpx/_models.py new file mode 100644 index 0000000..b8617cd --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_models.py @@ -0,0 +1,1215 @@ +import datetime +import email.message +import json as jsonlib +import typing +import urllib.request +from collections.abc import Mapping +from http.cookiejar import Cookie, CookieJar + +from ._content import ByteStream, UnattachedStream, encode_request, encode_response +from ._decoders import ( + SUPPORTED_DECODERS, + ByteChunker, + ContentDecoder, + IdentityDecoder, + LineDecoder, + MultiDecoder, + TextChunker, + TextDecoder, +) +from ._exceptions import ( + CookieConflict, + HTTPStatusError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + request_context, +) +from ._multipart import get_multipart_boundary_from_content_type +from ._status_codes import codes +from ._types import ( + AsyncByteStream, + CookieTypes, + HeaderTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + ResponseContent, + ResponseExtensions, + SyncByteStream, +) +from ._urls import URL +from ._utils import ( + is_known_encoding, + normalize_header_key, + normalize_header_value, + obfuscate_sensitive_headers, + parse_content_type_charset, + parse_header_links, +) + + +class Headers(typing.MutableMapping[str, str]): + """ + HTTP headers, as a case-insensitive multi-dict. + """ + + def __init__( + self, + headers: typing.Optional[HeaderTypes] = None, + encoding: typing.Optional[str] = None, + ) -> None: + if headers is None: + self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] + elif isinstance(headers, Headers): + self._list = list(headers._list) + elif isinstance(headers, Mapping): + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers.items() + ] + else: + self._list = [ + ( + normalize_header_key(k, lower=False, encoding=encoding), + normalize_header_key(k, lower=True, encoding=encoding), + normalize_header_value(v, encoding), + ) + for k, v in headers + ] + + self._encoding = encoding + + @property + def encoding(self) -> str: + """ + Header encoding is mandated as ascii, but we allow fallbacks to utf-8 + or iso-8859-1. + """ + if self._encoding is None: + for encoding in ["ascii", "utf-8"]: + for key, value in self.raw: + try: + key.decode(encoding) + value.decode(encoding) + except UnicodeDecodeError: + break + else: + # The else block runs if 'break' did not occur, meaning + # all values fitted the encoding. + self._encoding = encoding + break + else: + # The ISO-8859-1 encoding covers all 256 code points in a byte, + # so will never raise decode errors. + self._encoding = "iso-8859-1" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]: + """ + Returns a list of the raw header items, as byte pairs. + """ + return [(raw_key, value) for raw_key, _, value in self._list] + + def keys(self) -> typing.KeysView[str]: + return {key.decode(self.encoding): None for _, key, value in self._list}.keys() + + def values(self) -> typing.ValuesView[str]: + values_dict: typing.Dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return `(key, value)` items of headers. Concatenate headers + into a single comma separated value when a key occurs multiple times. + """ + values_dict: typing.Dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return a list of `(key, value)` pairs of headers. Allow multiple + occurrences of the same key without concatenating into a single + comma separated value. + """ + return [ + (key.decode(self.encoding), value.decode(self.encoding)) + for _, key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Return a header value. If multiple occurrences of the header occur + then concatenate them together with commas. + """ + try: + return self[key] + except KeyError: + return default + + def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: + """ + Return a list of all header values for a given key. + If `split_commas=True` is passed, then any comma separated header + values are split into multiple return strings. + """ + get_header_key = key.lower().encode(self.encoding) + + values = [ + item_value.decode(self.encoding) + for _, item_key, item_value in self._list + if item_key.lower() == get_header_key + ] + + if not split_commas: + return values + + split_values = [] + for value in values: + split_values.extend([item.strip() for item in value.split(",")]) + return split_values + + def update(self, headers: typing.Optional[HeaderTypes] = None) -> None: # type: ignore + headers = Headers(headers) + for key in headers.keys(): + if key in self: + self.pop(key) + self._list.extend(headers._list) + + def copy(self) -> "Headers": + return Headers(self, encoding=self.encoding) + + def __getitem__(self, key: str) -> str: + """ + Return a single header value. + + If there are multiple headers with the same key, then we concatenate + them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + normalized_key = key.lower().encode(self.encoding) + + items = [ + header_value.decode(self.encoding) + for _, header_key, header_value in self._list + if header_key == normalized_key + ] + + if items: + return ", ".join(items) + + raise KeyError(key) + + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.encode(self._encoding or "utf-8") + set_value = value.encode(self._encoding or "utf-8") + lookup_key = set_key.lower() + + found_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key == lookup_key + ] + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, lookup_key, set_value) + else: + self._list.append((set_key, lookup_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode(self.encoding) + + pop_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key.lower() == del_key + ] + + if not pop_indexes: + raise KeyError(key) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __contains__(self, key: typing.Any) -> bool: + header_key = key.lower().encode(self.encoding) + return header_key in [key for _, key, _ in self._list] + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_headers = Headers(other) + except ValueError: + return False + + self_list = [(key, value) for _, key, value in self._list] + other_list = [(key, value) for _, key, value in other_headers._list] + return sorted(self_list) == sorted(other_list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + + encoding_str = "" + if self.encoding != "ascii": + encoding_str = f", encoding={self.encoding!r}" + + as_list = list(obfuscate_sensitive_headers(self.multi_items())) + as_dict = dict(as_list) + + no_duplicate_keys = len(as_dict) == len(as_list) + if no_duplicate_keys: + return f"{class_name}({as_dict!r}{encoding_str})" + return f"{class_name}({as_list!r}{encoding_str})" + + +class Request: + def __init__( + self, + method: typing.Union[str, bytes], + url: typing.Union["URL", str], + *, + params: typing.Optional[QueryParamTypes] = None, + headers: typing.Optional[HeaderTypes] = None, + cookies: typing.Optional[CookieTypes] = None, + content: typing.Optional[RequestContent] = None, + data: typing.Optional[RequestData] = None, + files: typing.Optional[RequestFiles] = None, + json: typing.Optional[typing.Any] = None, + stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, + extensions: typing.Optional[RequestExtensions] = None, + ) -> None: + self.method = ( + method.decode("ascii").upper() + if isinstance(method, bytes) + else method.upper() + ) + self.url = URL(url) + if params is not None: + self.url = self.url.copy_merge_params(params=params) + self.headers = Headers(headers) + self.extensions = {} if extensions is None else extensions + + if cookies: + Cookies(cookies).set_cookie_header(self) + + if stream is None: + content_type: typing.Optional[str] = self.headers.get("content-type") + headers, stream = encode_request( + content=content, + data=data, + files=files, + json=json, + boundary=get_multipart_boundary_from_content_type( + content_type=content_type.encode(self.headers.encoding) + if content_type + else None + ), + ) + self._prepare(headers) + self.stream = stream + # Load the request body, except for streaming content. + if isinstance(stream, ByteStream): + self.read() + else: + # There's an important distinction between `Request(content=...)`, + # and `Request(stream=...)`. + # + # Using `content=...` implies automatically populated `Host` and content + # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include *any* + # auto-populated headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when: + # + # * Preserving the request stream when copying requests, eg for redirects. + # * Creating request instances on the *server-side* of the transport API. + self.stream = stream + + def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(key, value) + + auto_headers: typing.List[typing.Tuple[bytes, bytes]] = [] + + has_host = "Host" in self.headers + has_content_length = ( + "Content-Length" in self.headers or "Transfer-Encoding" in self.headers + ) + + if not has_host and self.url.host: + auto_headers.append((b"Host", self.url.netloc)) + if not has_content_length and self.method in ("POST", "PUT", "PATCH"): + auto_headers.append((b"Content-Length", b"0")) + + self.headers = Headers(auto_headers + self.headers.raw) + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise RequestNotRead() + return self._content + + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.Iterable) + self._content = b"".join(self.stream) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + async def aread(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.AsyncIterable) + self._content = b"".join([part async for part in self.stream]) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url = str(self.url) + return f"<{class_name}({self.method!r}, {url!r})>" + + def __getstate__(self) -> typing.Dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream"] + } + + def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.extensions = {} + self.stream = UnattachedStream() + + +class Response: + def __init__( + self, + status_code: int, + *, + headers: typing.Optional[HeaderTypes] = None, + content: typing.Optional[ResponseContent] = None, + text: typing.Optional[str] = None, + html: typing.Optional[str] = None, + json: typing.Any = None, + stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, + request: typing.Optional[Request] = None, + extensions: typing.Optional[ResponseExtensions] = None, + history: typing.Optional[typing.List["Response"]] = None, + default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", + ) -> None: + self.status_code = status_code + self.headers = Headers(headers) + + self._request: typing.Optional[Request] = request + + # When follow_redirects=False and a redirect is received, + # the client will set `response.next_request`. + self.next_request: typing.Optional[Request] = None + + self.extensions: ResponseExtensions = {} if extensions is None else extensions + self.history = [] if history is None else list(history) + + self.is_closed = False + self.is_stream_consumed = False + + self.default_encoding = default_encoding + + if stream is None: + headers, stream = encode_response(content, text, html, json) + self._prepare(headers) + self.stream = stream + if isinstance(stream, ByteStream): + # Load the response body, except for streaming content. + self.read() + else: + # There's an important distinction between `Response(content=...)`, + # and `Response(stream=...)`. + # + # Using `content=...` implies automatically populated content headers, + # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include any content headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when creating response instances having received a stream + # from the transport API. + self.stream = stream + + self._num_bytes_downloaded = 0 + + def _prepare(self, default_headers: typing.Dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "content-length" in self.headers: + continue + self.headers.setdefault(key, value) + + @property + def elapsed(self) -> datetime.timedelta: + """ + Returns the time taken for the complete request/response + cycle to complete. + """ + if not hasattr(self, "_elapsed"): + raise RuntimeError( + "'.elapsed' may only be accessed after the response " + "has been read or closed." + ) + return self._elapsed + + @elapsed.setter + def elapsed(self, elapsed: datetime.timedelta) -> None: + self._elapsed = elapsed + + @property + def request(self) -> Request: + """ + Returns the request instance associated to the current response. + """ + if self._request is None: + raise RuntimeError( + "The request instance has not been set on this response." + ) + return self._request + + @request.setter + def request(self, value: Request) -> None: + self._request = value + + @property + def http_version(self) -> str: + try: + http_version: bytes = self.extensions["http_version"] + except KeyError: + return "HTTP/1.1" + else: + return http_version.decode("ascii", errors="ignore") + + @property + def reason_phrase(self) -> str: + try: + reason_phrase: bytes = self.extensions["reason_phrase"] + except KeyError: + return codes.get_reason_phrase(self.status_code) + else: + return reason_phrase.decode("ascii", errors="ignore") + + @property + def url(self) -> URL: + """ + Returns the URL for which the request was made. + """ + return self.request.url + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise ResponseNotRead() + return self._content + + @property + def text(self) -> str: + if not hasattr(self, "_text"): + content = self.content + if not content: + self._text = "" + else: + decoder = TextDecoder(encoding=self.encoding or "utf-8") + self._text = "".join([decoder.decode(self.content), decoder.flush()]) + return self._text + + @property + def encoding(self) -> typing.Optional[str]: + """ + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `default_encoding`, which may either be + a string like "utf-8" indicating the encoding to use, or may be a callable + which enables charset autodetection. + """ + if not hasattr(self, "_encoding"): + encoding = self.charset_encoding + if encoding is None or not is_known_encoding(encoding): + if isinstance(self.default_encoding, str): + encoding = self.default_encoding + elif hasattr(self, "_content"): + encoding = self.default_encoding(self._content) + self._encoding = encoding or "utf-8" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + """ + Set the encoding to use for decoding the byte content into text. + + If the `text` attribute has been accessed, attempting to set the + encoding will throw a ValueError. + """ + if hasattr(self, "_text"): + raise ValueError( + "Setting encoding after `text` has been accessed is not allowed." + ) + self._encoding = value + + @property + def charset_encoding(self) -> typing.Optional[str]: + """ + Return the encoding, as specified by the Content-Type header. + """ + content_type = self.headers.get("Content-Type") + if content_type is None: + return None + + return parse_content_type_charset(content_type) + + def _get_content_decoder(self) -> ContentDecoder: + """ + Returns a decoder instance which can be used to decode the raw byte + content, depending on the Content-Encoding used in the response. + """ + if not hasattr(self, "_decoder"): + decoders: typing.List[ContentDecoder] = [] + values = self.headers.get_list("content-encoding", split_commas=True) + for value in values: + value = value.strip().lower() + try: + decoder_cls = SUPPORTED_DECODERS[value] + decoders.append(decoder_cls()) + except KeyError: + continue + + if len(decoders) == 1: + self._decoder = decoders[0] + elif len(decoders) > 1: + self._decoder = MultiDecoder(children=decoders) + else: + self._decoder = IdentityDecoder() + + return self._decoder + + @property + def is_informational(self) -> bool: + """ + A property which is `True` for 1xx status codes, `False` otherwise. + """ + return codes.is_informational(self.status_code) + + @property + def is_success(self) -> bool: + """ + A property which is `True` for 2xx status codes, `False` otherwise. + """ + return codes.is_success(self.status_code) + + @property + def is_redirect(self) -> bool: + """ + A property which is `True` for 3xx status codes, `False` otherwise. + + Note that not all responses with a 3xx status code indicate a URL redirect. + + Use `response.has_redirect_location` to determine responses with a properly + formed URL redirection. + """ + return codes.is_redirect(self.status_code) + + @property + def is_client_error(self) -> bool: + """ + A property which is `True` for 4xx status codes, `False` otherwise. + """ + return codes.is_client_error(self.status_code) + + @property + def is_server_error(self) -> bool: + """ + A property which is `True` for 5xx status codes, `False` otherwise. + """ + return codes.is_server_error(self.status_code) + + @property + def is_error(self) -> bool: + """ + A property which is `True` for 4xx and 5xx status codes, `False` otherwise. + """ + return codes.is_error(self.status_code) + + @property + def has_redirect_location(self) -> bool: + """ + Returns True for 3xx responses with a properly formed URL redirection, + `False` otherwise. + """ + return ( + self.status_code + in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + and "Location" in self.headers + ) + + def raise_for_status(self) -> "Response": + """ + Raise the `HTTPStatusError` if one occurred. + """ + request = self._request + if request is None: + raise RuntimeError( + "Cannot call `raise_for_status` as the request " + "instance has not been set on this response." + ) + + if self.is_success: + return self + + if self.has_redirect_location: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "Redirect location: '{0.headers[location]}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + else: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + + status_class = self.status_code // 100 + error_types = { + 1: "Informational response", + 3: "Redirect response", + 4: "Client error", + 5: "Server error", + } + error_type = error_types.get(status_class, "Invalid status code") + message = message.format(self, error_type=error_type) + raise HTTPStatusError(message, request=request, response=self) + + def json(self, **kwargs: typing.Any) -> typing.Any: + return jsonlib.loads(self.content, **kwargs) + + @property + def cookies(self) -> "Cookies": + if not hasattr(self, "_cookies"): + self._cookies = Cookies() + self._cookies.extract_cookies(self) + return self._cookies + + @property + def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]: + """ + Returns the parsed header links of the response, if any + """ + header = self.headers.get("link") + ldict = {} + if header: + links = parse_header_links(header) + for link in links: + key = link.get("rel") or link.get("url") + ldict[key] = link + return ldict + + @property + def num_bytes_downloaded(self) -> int: + return self._num_bytes_downloaded + + def __repr__(self) -> str: + return f"" + + def __getstate__(self) -> typing.Dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream", "is_closed", "_decoder"] + } + + def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.is_closed = True + self.extensions = {} + self.stream = UnattachedStream() + + def read(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join(self.iter_bytes()) + return self._content + + def iter_bytes( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for raw_bytes in self.iter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_text( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for byte_content in self.iter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_lines(self) -> typing.Iterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + for text in self.iter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + def iter_raw( + self, chunk_size: typing.Optional[int] = None + ) -> typing.Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call a sync iterator on an async stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call an sync close on an async stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + self.stream.close() + + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_bytes()]) + return self._content + + async def aiter_bytes( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for raw_bytes in self.aiter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_text( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for byte_content in self.aiter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_lines(self) -> typing.AsyncIterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + async for text in self.aiter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + async def aiter_raw( + self, chunk_size: typing.Optional[int] = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async iterator on an sync stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + async for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + await self.aclose() + + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async close on an sync stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + await self.stream.aclose() + + +class Cookies(typing.MutableMapping[str, str]): + """ + HTTP Cookies, as a mutable mapping. + """ + + def __init__(self, cookies: typing.Optional[CookieTypes] = None) -> None: + if cookies is None or isinstance(cookies, dict): + self.jar = CookieJar() + if isinstance(cookies, dict): + for key, value in cookies.items(): + self.set(key, value) + elif isinstance(cookies, list): + self.jar = CookieJar() + for key, value in cookies: + self.set(key, value) + elif isinstance(cookies, Cookies): + self.jar = CookieJar() + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + else: + self.jar = cookies + + def extract_cookies(self, response: Response) -> None: + """ + Loads any cookies based on the response `Set-Cookie` headers. + """ + urllib_response = self._CookieCompatResponse(response) + urllib_request = self._CookieCompatRequest(response.request) + + self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore + + def set_cookie_header(self, request: Request) -> None: + """ + Sets an appropriate 'Cookie:' HTTP header on the `Request`. + """ + urllib_request = self._CookieCompatRequest(request) + self.jar.add_cookie_header(urllib_request) + + def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: + """ + Set a cookie value by name. May optionally include domain and path. + """ + kwargs = { + "version": 0, + "name": name, + "value": value, + "port": None, + "port_specified": False, + "domain": domain, + "domain_specified": bool(domain), + "domain_initial_dot": domain.startswith("."), + "path": path, + "path_specified": bool(path), + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + cookie = Cookie(**kwargs) # type: ignore + self.jar.set_cookie(cookie) + + def get( # type: ignore + self, + name: str, + default: typing.Optional[str] = None, + domain: typing.Optional[str] = None, + path: typing.Optional[str] = None, + ) -> typing.Optional[str]: + """ + Get a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to retrieve. + """ + value = None + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if value is not None: + message = f"Multiple cookies exist with name={name}" + raise CookieConflict(message) + value = cookie.value + + if value is None: + return default + return value + + def delete( + self, + name: str, + domain: typing.Optional[str] = None, + path: typing.Optional[str] = None, + ) -> None: + """ + Delete a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to delete. + """ + if domain is not None and path is not None: + return self.jar.clear(domain, path, name) + + remove = [ + cookie + for cookie in self.jar + if cookie.name == name + and (domain is None or cookie.domain == domain) + and (path is None or cookie.path == path) + ] + + for cookie in remove: + self.jar.clear(cookie.domain, cookie.path, cookie.name) + + def clear( + self, domain: typing.Optional[str] = None, path: typing.Optional[str] = None + ) -> None: + """ + Delete all cookies. Optionally include a domain and path in + order to only delete a subset of all the cookies. + """ + args = [] + if domain is not None: + args.append(domain) + if path is not None: + assert domain is not None + args.append(path) + self.jar.clear(*args) + + def update(self, cookies: typing.Optional[CookieTypes] = None) -> None: # type: ignore + cookies = Cookies(cookies) + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + + def __setitem__(self, name: str, value: str) -> None: + return self.set(name, value) + + def __getitem__(self, name: str) -> str: + value = self.get(name) + if value is None: + raise KeyError(name) + return value + + def __delitem__(self, name: str) -> None: + return self.delete(name) + + def __len__(self) -> int: + return len(self.jar) + + def __iter__(self) -> typing.Iterator[str]: + return (cookie.name for cookie in self.jar) + + def __bool__(self) -> bool: + for _ in self.jar: + return True + return False + + def __repr__(self) -> str: + cookies_repr = ", ".join( + [ + f"" + for cookie in self.jar + ] + ) + + return f"" + + class _CookieCompatRequest(urllib.request.Request): + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, request: Request) -> None: + super().__init__( + url=str(request.url), + headers=dict(request.headers), + method=request.method, + ) + self.request = request + + def add_unredirected_header(self, key: str, value: str) -> None: + super().add_unredirected_header(key, value) + self.request.headers[key] = value + + class _CookieCompatResponse: + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, response: Response) -> None: + self.response = response + + def info(self) -> email.message.Message: + info = email.message.Message() + for key, value in self.response.headers.multi_items(): + # Note that setting `info[key]` here is an "append" operation, + # not a "replace" operation. + # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ + info[key] = value + return info diff --git a/venv/lib/python3.12/site-packages/httpx/_multipart.py b/venv/lib/python3.12/site-packages/httpx/_multipart.py new file mode 100644 index 0000000..5122d51 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_multipart.py @@ -0,0 +1,269 @@ +import io +import os +import typing +from pathlib import Path + +from ._types import ( + AsyncByteStream, + FileContent, + FileTypes, + RequestData, + RequestFiles, + SyncByteStream, +) +from ._utils import ( + format_form_param, + guess_content_type, + peek_filelike_length, + primitive_value_to_str, + to_bytes, +) + + +def get_multipart_boundary_from_content_type( + content_type: typing.Optional[bytes], +) -> typing.Optional[bytes]: + if not content_type or not content_type.startswith(b"multipart/form-data"): + return None + # parse boundary according to + # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 + if b";" in content_type: + for section in content_type.split(b";"): + if section.strip().lower().startswith(b"boundary="): + return section.strip()[len(b"boundary=") :].strip(b'"') + return None + + +class DataField: + """ + A single form field item, within a multipart form field. + """ + + def __init__( + self, name: str, value: typing.Union[str, bytes, int, float, None] + ) -> None: + if not isinstance(name, str): + raise TypeError( + f"Invalid type for name. Expected str, got {type(name)}: {name!r}" + ) + if value is not None and not isinstance(value, (str, bytes, int, float)): + raise TypeError( + "Invalid type for value. Expected primitive type," + f" got {type(value)}: {value!r}" + ) + self.name = name + self.value: typing.Union[str, bytes] = ( + value if isinstance(value, bytes) else primitive_value_to_str(value) + ) + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + name = format_form_param("name", self.name) + self._headers = b"".join( + [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] + ) + + return self._headers + + def render_data(self) -> bytes: + if not hasattr(self, "_data"): + self._data = to_bytes(self.value) + + return self._data + + def get_length(self) -> int: + headers = self.render_headers() + data = self.render_data() + return len(headers) + len(data) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield self.render_data() + + +class FileField: + """ + A single file field item, within a multipart form field. + """ + + CHUNK_SIZE = 64 * 1024 + + def __init__(self, name: str, value: FileTypes) -> None: + self.name = name + + fileobj: FileContent + + headers: typing.Dict[str, str] = {} + content_type: typing.Optional[str] = None + + # This large tuple based API largely mirror's requests' API + # It would be good to think of better APIs for this that we could + # include in httpx 2.0 since variable length tuples(especially of 4 elements) + # are quite unwieldly + if isinstance(value, tuple): + if len(value) == 2: + # neither the 3rd parameter (content_type) nor the 4th (headers) + # was included + filename, fileobj = value # type: ignore + elif len(value) == 3: + filename, fileobj, content_type = value # type: ignore + else: + # all 4 parameters included + filename, fileobj, content_type, headers = value # type: ignore + else: + filename = Path(str(getattr(value, "name", "upload"))).name + fileobj = value + + if content_type is None: + content_type = guess_content_type(filename) + + has_content_type_header = any("content-type" in key.lower() for key in headers) + if content_type is not None and not has_content_type_header: + # note that unlike requests, we ignore the content_type provided in the 3rd + # tuple element if it is also included in the headers requests does + # the opposite (it overwrites the headerwith the 3rd tuple element) + headers["Content-Type"] = content_type + + if isinstance(fileobj, io.StringIO): + raise TypeError( + "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." + ) + if isinstance(fileobj, io.TextIOBase): + raise TypeError( + "Multipart file uploads must be opened in binary mode, not text mode." + ) + + self.filename = filename + self.file = fileobj + self.headers = headers + + def get_length(self) -> typing.Optional[int]: + headers = self.render_headers() + + if isinstance(self.file, (str, bytes)): + return len(headers) + len(to_bytes(self.file)) + + file_length = peek_filelike_length(self.file) + + # If we can't determine the filesize without reading it into memory, + # then return `None` here, to indicate an unknown file length. + if file_length is None: + return None + + return len(headers) + file_length + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + parts = [ + b"Content-Disposition: form-data; ", + format_form_param("name", self.name), + ] + if self.filename: + filename = format_form_param("filename", self.filename) + parts.extend([b"; ", filename]) + for header_name, header_value in self.headers.items(): + key, val = f"\r\n{header_name}: ".encode(), header_value.encode() + parts.extend([key, val]) + parts.append(b"\r\n\r\n") + self._headers = b"".join(parts) + + return self._headers + + def render_data(self) -> typing.Iterator[bytes]: + if isinstance(self.file, (str, bytes)): + yield to_bytes(self.file) + return + + if hasattr(self.file, "seek"): + try: + self.file.seek(0) + except io.UnsupportedOperation: + pass + + chunk = self.file.read(self.CHUNK_SIZE) + while chunk: + yield to_bytes(chunk) + chunk = self.file.read(self.CHUNK_SIZE) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield from self.render_data() + + +class MultipartStream(SyncByteStream, AsyncByteStream): + """ + Request content as streaming multipart encoded form data. + """ + + def __init__( + self, + data: RequestData, + files: RequestFiles, + boundary: typing.Optional[bytes] = None, + ) -> None: + if boundary is None: + boundary = os.urandom(16).hex().encode("ascii") + + self.boundary = boundary + self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( + "ascii" + ) + self.fields = list(self._iter_fields(data, files)) + + def _iter_fields( + self, data: RequestData, files: RequestFiles + ) -> typing.Iterator[typing.Union[FileField, DataField]]: + for name, value in data.items(): + if isinstance(value, (tuple, list)): + for item in value: + yield DataField(name=name, value=item) + else: + yield DataField(name=name, value=value) + + file_items = files.items() if isinstance(files, typing.Mapping) else files + for name, value in file_items: + yield FileField(name=name, value=value) + + def iter_chunks(self) -> typing.Iterator[bytes]: + for field in self.fields: + yield b"--%s\r\n" % self.boundary + yield from field.render() + yield b"\r\n" + yield b"--%s--\r\n" % self.boundary + + def get_content_length(self) -> typing.Optional[int]: + """ + Return the length of the multipart encoded content, or `None` if + any of the files have a length that cannot be determined upfront. + """ + boundary_length = len(self.boundary) + length = 0 + + for field in self.fields: + field_length = field.get_length() + if field_length is None: + return None + + length += 2 + boundary_length + 2 # b"--{boundary}\r\n" + length += field_length + length += 2 # b"\r\n" + + length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" + return length + + # Content stream interface. + + def get_headers(self) -> typing.Dict[str, str]: + content_length = self.get_content_length() + content_type = self.content_type + if content_length is None: + return {"Transfer-Encoding": "chunked", "Content-Type": content_type} + return {"Content-Length": str(content_length), "Content-Type": content_type} + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk diff --git a/venv/lib/python3.12/site-packages/httpx/_status_codes.py b/venv/lib/python3.12/site-packages/httpx/_status_codes.py new file mode 100644 index 0000000..671c30e --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_status_codes.py @@ -0,0 +1,158 @@ +from enum import IntEnum + + +class codes(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + + def __new__(cls, value: int, phrase: str = "") -> "codes": + obj = int.__new__(cls, value) + obj._value_ = value + + obj.phrase = phrase # type: ignore[attr-defined] + return obj + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def get_reason_phrase(cls, value: int) -> str: + try: + return codes(value).phrase # type: ignore + except ValueError: + return "" + + @classmethod + def is_informational(cls, value: int) -> bool: + """ + Returns `True` for 1xx status codes, `False` otherwise. + """ + return 100 <= value <= 199 + + @classmethod + def is_success(cls, value: int) -> bool: + """ + Returns `True` for 2xx status codes, `False` otherwise. + """ + return 200 <= value <= 299 + + @classmethod + def is_redirect(cls, value: int) -> bool: + """ + Returns `True` for 3xx status codes, `False` otherwise. + """ + return 300 <= value <= 399 + + @classmethod + def is_client_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx status codes, `False` otherwise. + """ + return 400 <= value <= 499 + + @classmethod + def is_server_error(cls, value: int) -> bool: + """ + Returns `True` for 5xx status codes, `False` otherwise. + """ + return 500 <= value <= 599 + + @classmethod + def is_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx or 5xx status codes, `False` otherwise. + """ + return 400 <= value <= 599 + + # informational + CONTINUE = 100, "Continue" + SWITCHING_PROTOCOLS = 101, "Switching Protocols" + PROCESSING = 102, "Processing" + EARLY_HINTS = 103, "Early Hints" + + # success + OK = 200, "OK" + CREATED = 201, "Created" + ACCEPTED = 202, "Accepted" + NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" + NO_CONTENT = 204, "No Content" + RESET_CONTENT = 205, "Reset Content" + PARTIAL_CONTENT = 206, "Partial Content" + MULTI_STATUS = 207, "Multi-Status" + ALREADY_REPORTED = 208, "Already Reported" + IM_USED = 226, "IM Used" + + # redirection + MULTIPLE_CHOICES = 300, "Multiple Choices" + MOVED_PERMANENTLY = 301, "Moved Permanently" + FOUND = 302, "Found" + SEE_OTHER = 303, "See Other" + NOT_MODIFIED = 304, "Not Modified" + USE_PROXY = 305, "Use Proxy" + TEMPORARY_REDIRECT = 307, "Temporary Redirect" + PERMANENT_REDIRECT = 308, "Permanent Redirect" + + # client error + BAD_REQUEST = 400, "Bad Request" + UNAUTHORIZED = 401, "Unauthorized" + PAYMENT_REQUIRED = 402, "Payment Required" + FORBIDDEN = 403, "Forbidden" + NOT_FOUND = 404, "Not Found" + METHOD_NOT_ALLOWED = 405, "Method Not Allowed" + NOT_ACCEPTABLE = 406, "Not Acceptable" + PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" + REQUEST_TIMEOUT = 408, "Request Timeout" + CONFLICT = 409, "Conflict" + GONE = 410, "Gone" + LENGTH_REQUIRED = 411, "Length Required" + PRECONDITION_FAILED = 412, "Precondition Failed" + REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" + REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" + UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" + REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" + EXPECTATION_FAILED = 417, "Expectation Failed" + IM_A_TEAPOT = 418, "I'm a teapot" + MISDIRECTED_REQUEST = 421, "Misdirected Request" + UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" + LOCKED = 423, "Locked" + FAILED_DEPENDENCY = 424, "Failed Dependency" + TOO_EARLY = 425, "Too Early" + UPGRADE_REQUIRED = 426, "Upgrade Required" + PRECONDITION_REQUIRED = 428, "Precondition Required" + TOO_MANY_REQUESTS = 429, "Too Many Requests" + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" + UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" + + # server errors + INTERNAL_SERVER_ERROR = 500, "Internal Server Error" + NOT_IMPLEMENTED = 501, "Not Implemented" + BAD_GATEWAY = 502, "Bad Gateway" + SERVICE_UNAVAILABLE = 503, "Service Unavailable" + GATEWAY_TIMEOUT = 504, "Gateway Timeout" + HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" + VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" + INSUFFICIENT_STORAGE = 507, "Insufficient Storage" + LOOP_DETECTED = 508, "Loop Detected" + NOT_EXTENDED = 510, "Not Extended" + NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" + + +# Include lower-case styles for `requests` compatibility. +for code in codes: + setattr(codes, code._name_.lower(), int(code)) diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py b/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5ff0c20 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc new file mode 100644 index 0000000..4cbc35d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..d3f1278 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc new file mode 100644 index 0000000..8c978cc Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc new file mode 100644 index 0000000..5effd66 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc new file mode 100644 index 0000000..8298cda Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py b/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py new file mode 100644 index 0000000..08cd392 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,179 @@ +import typing + +import sniffio + +from .._models import Request, Response +from .._types import AsyncByteStream +from .base import AsyncBaseTransport + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +_Message = typing.Dict[str, typing.Any] +_Receive = typing.Callable[[], typing.Awaitable[_Message]] +_Send = typing.Callable[ + [typing.Dict[str, typing.Any]], typing.Coroutine[None, None, None] +] +_ASGIApp = typing.Callable[ + [typing.Dict[str, typing.Any], _Receive, _Send], typing.Coroutine[None, None, None] +] + + +def create_event() -> "Event": + if sniffio.current_async_library() == "trio": + import trio + + return trio.Event() + else: + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: typing.List[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.AsyncClient(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the ASGITransport class: + + ``` + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: _ASGIApp, + raise_app_exceptions: bool = True, + root_path: str = "", + client: typing.Tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + # ASGI scope. + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path.split(b"?")[0], + "query_string": request.url.query, + "server": (request.url.host, request.url.port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = request.stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> typing.Dict[str, typing.Any]: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: typing.Dict[str, typing.Any]) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and request.method != "HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: # noqa: PIE-786 + if self.raise_app_exceptions: + raise + + response_complete.set() + if status_code is None: + status_code = 500 + if response_headers is None: + response_headers = {} + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + + return Response(status_code, headers=response_headers, stream=stream) diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/base.py b/venv/lib/python3.12/site-packages/httpx/_transports/base.py new file mode 100644 index 0000000..f6fdfe6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/base.py @@ -0,0 +1,82 @@ +import typing +from types import TracebackType + +from .._models import Request, Response + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + self.close() + + def handle_request(self, request: Request) -> Response: + """ + Send a single HTTP request and return a response. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], + ) + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: no cover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + request: Request, + ) -> Response: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/default.py b/venv/lib/python3.12/site-packages/httpx/_transports/default.py new file mode 100644 index 0000000..14a0873 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/default.py @@ -0,0 +1,383 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int + +Example usages... + +# Disable HTTP/2 on a single specific domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" +import contextlib +import typing +from types import TracebackType + +import httpcore + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._models import Request, Response +from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream, VerifyTypes +from .._urls import URL +from .base import AsyncBaseTransport, BaseTransport + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + try: + yield + except Exception as exc: + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: no cover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +HTTPCORE_EXC_MAP = { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, +} + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + if hasattr(self._httpcore_stream, "close"): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: typing.Optional[ProxyTypes] = None, + uds: typing.Optional[str] = None, + local_address: typing.Optional[str] = None, + retries: int = 0, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + + if proxy is None: + self._pool = httpcore.ConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.HTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + proxy_ssl_context=proxy.ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.SOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', or 'socks5'," + f" but got {proxy.url.scheme!r}." + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, SyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = self._pool.handle_request(req) + + assert isinstance(resp.stream, typing.Iterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=ResponseStream(resp.stream), + extensions=resp.extensions, + ) + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + if hasattr(self._httpcore_stream, "aclose"): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: VerifyTypes = True, + cert: typing.Optional[CertTypes] = None, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + trust_env: bool = True, + proxy: typing.Optional[ProxyTypes] = None, + uds: typing.Optional[str] = None, + local_address: typing.Optional[str] = None, + retries: int = 0, + socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + ) -> None: + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme == "socks5": + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.AsyncSOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', or 'socks5'," + " but got {proxy.url.scheme!r}." + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]] = None, + exc_value: typing.Optional[BaseException] = None, + traceback: typing.Optional[TracebackType] = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = await self._pool.handle_async_request(req) + + assert isinstance(resp.stream, typing.AsyncIterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=AsyncResponseStream(resp.stream), + extensions=resp.extensions, + ) + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/mock.py b/venv/lib/python3.12/site-packages/httpx/_transports/mock.py new file mode 100644 index 0000000..82043da --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/mock.py @@ -0,0 +1,38 @@ +import typing + +from .._models import Request, Response +from .base import AsyncBaseTransport, BaseTransport + +SyncHandler = typing.Callable[[Request], Response] +AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: typing.Union[SyncHandler, AsyncHandler]) -> None: + self.handler = handler + + def handle_request( + self, + request: Request, + ) -> Response: + request.read() + response = self.handler(request) + if not isinstance(response, Response): # pragma: no cover + raise TypeError("Cannot use an async handler in a sync Client") + return response + + async def handle_async_request( + self, + request: Request, + ) -> Response: + await request.aread() + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + if not isinstance(response, Response): + response = await response + + return response diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py b/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 0000000..a23d42c --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,144 @@ +import io +import itertools +import sys +import typing + +from .._models import Request, Response +from .._types import SyncByteStream +from .base import BaseTransport + +if typing.TYPE_CHECKING: + from _typeshed import OptExcInfo # pragma: no cover + from _typeshed.wsgi import WSGIApplication # pragma: no cover + +_T = typing.TypeVar("_T") + + +def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._close = getattr(result, "close", None) + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + def close(self) -> None: + if self._close is not None: + self._close() + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The WSGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the WSGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: "WSGIApplication", + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + wsgi_errors: typing.Optional[typing.TextIO] = None, + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + self.wsgi_errors = wsgi_errors + + def handle_request(self, request: Request) -> Response: + request.read() + wsgi_input = io.BytesIO(request.content) + + port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.url.scheme, + "wsgi.input": wsgi_input, + "wsgi.errors": self.wsgi_errors or sys.stderr, + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query.decode("ascii"), + "SERVER_NAME": request.url.host, + "SERVER_PORT": str(port), + "SERVER_PROTOCOL": "HTTP/1.1", + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in request.headers.raw: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, + response_headers: typing.List[typing.Tuple[str, str]], + exc_info: typing.Optional["OptExcInfo"] = None, + ) -> typing.Callable[[bytes], typing.Any]: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + return lambda _: None + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + + return Response(status_code, headers=headers, stream=stream) diff --git a/venv/lib/python3.12/site-packages/httpx/_types.py b/venv/lib/python3.12/site-packages/httpx/_types.py new file mode 100644 index 0000000..649d101 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_types.py @@ -0,0 +1,134 @@ +""" +Type definitions for type checking purposes. +""" + +import ssl +from http.cookiejar import CookieJar +from typing import ( + IO, + TYPE_CHECKING, + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + NamedTuple, + Optional, + Sequence, + Tuple, + Union, +) + +if TYPE_CHECKING: # pragma: no cover + from ._auth import Auth # noqa: F401 + from ._config import Proxy, Timeout # noqa: F401 + from ._models import Cookies, Headers, Request # noqa: F401 + from ._urls import URL, QueryParams # noqa: F401 + + +PrimitiveData = Optional[Union[str, int, float, bool]] + +RawURL = NamedTuple( + "RawURL", + [ + ("raw_scheme", bytes), + ("raw_host", bytes), + ("port", Optional[int]), + ("raw_path", bytes), + ], +) + +URLTypes = Union["URL", str] + +QueryParamTypes = Union[ + "QueryParams", + Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], + List[Tuple[str, PrimitiveData]], + Tuple[Tuple[str, PrimitiveData], ...], + str, + bytes, +] + +HeaderTypes = Union[ + "Headers", + Mapping[str, str], + Mapping[bytes, bytes], + Sequence[Tuple[str, str]], + Sequence[Tuple[bytes, bytes]], +] + +CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] + +CertTypes = Union[ + # certfile + str, + # (certfile, keyfile) + Tuple[str, Optional[str]], + # (certfile, keyfile, password) + Tuple[str, Optional[str], Optional[str]], +] +VerifyTypes = Union[str, bool, ssl.SSLContext] +TimeoutTypes = Union[ + Optional[float], + Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], + "Timeout", +] +ProxyTypes = Union[URLTypes, "Proxy"] +ProxiesTypes = Union[ProxyTypes, Dict[URLTypes, Union[None, ProxyTypes]]] + +AuthTypes = Union[ + Tuple[Union[str, bytes], Union[str, bytes]], + Callable[["Request"], "Request"], + "Auth", +] + +RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseExtensions = MutableMapping[str, Any] + +RequestData = Mapping[str, Any] + +FileContent = Union[IO[bytes], bytes, str] +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +RequestExtensions = MutableMapping[str, Any] + + +class SyncByteStream: + def __iter__(self) -> Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + """ + + +class AsyncByteStream: + async def __aiter__(self) -> AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/venv/lib/python3.12/site-packages/httpx/_urlparse.py b/venv/lib/python3.12/site-packages/httpx/_urlparse.py new file mode 100644 index 0000000..07bbea9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_urlparse.py @@ -0,0 +1,502 @@ +""" +An implementation of `urlparse` that provides URL validation and normalization +as described by RFC3986. + +We rely on this implementation rather than the one in Python's stdlib, because: + +* It provides more complete URL validation. +* It properly differentiates between an empty querystring and an absent querystring, + to distinguish URLs with a trailing '?'. +* It handles scheme, hostname, port, and path normalization. +* It supports IDNA hostnames, normalizing them to their encoded form. +* The API supports passing individual components, as well as the complete URL string. + +Previously we relied on the excellent `rfc3986` package to handle URL parsing and +validation, but this module provides a simpler alternative, with less indirection +required. +""" +import ipaddress +import re +import typing + +import idna + +from ._exceptions import InvalidURL + +MAX_URL_LENGTH = 65536 + +# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 +UNRESERVED_CHARACTERS = ( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" +) +SUB_DELIMS = "!$&'()*+,;=" + +PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") + + +# {scheme}: (optional) +# //{authority} (optional) +# {path} +# ?{query} (optional) +# #{fragment} (optional) +URL_REGEX = re.compile( + ( + r"(?:(?P{scheme}):)?" + r"(?://(?P{authority}))?" + r"(?P{path})" + r"(?:\?(?P{query}))?" + r"(?:#(?P{fragment}))?" + ).format( + scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", + authority="[^/?#]*", + path="[^?#]*", + query="[^#]*", + fragment=".*", + ) +) + +# {userinfo}@ (optional) +# {host} +# :{port} (optional) +AUTHORITY_REGEX = re.compile( + ( + r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" + ).format( + userinfo=".*", # Any character sequence. + host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@', + # or an IPv6 address enclosed within square brackets. + port=".*", # Any character sequence. + ) +) + + +# If we call urlparse with an individual component, then we need to regex +# validate that component individually. +# Note that we're duplicating the same strings as above. Shock! Horror!! +COMPONENT_REGEX = { + "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), + "authority": re.compile("[^/?#]*"), + "path": re.compile("[^?#]*"), + "query": re.compile("[^#]*"), + "fragment": re.compile(".*"), + "userinfo": re.compile("[^@]*"), + "host": re.compile("(\\[.*\\]|[^:]*)"), + "port": re.compile(".*"), +} + + +# We use these simple regexs as a first pass before handing off to +# the stdlib 'ipaddress' module for IP address validation. +IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$") +IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") + + +class ParseResult(typing.NamedTuple): + scheme: str + userinfo: str + host: str + port: typing.Optional[int] + path: str + query: typing.Optional[str] + fragment: typing.Optional[str] + + @property + def authority(self) -> str: + return "".join( + [ + f"{self.userinfo}@" if self.userinfo else "", + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + @property + def netloc(self) -> str: + return "".join( + [ + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + def copy_with(self, **kwargs: typing.Optional[str]) -> "ParseResult": + if not kwargs: + return self + + defaults = { + "scheme": self.scheme, + "authority": self.authority, + "path": self.path, + "query": self.query, + "fragment": self.fragment, + } + defaults.update(kwargs) + return urlparse("", **defaults) + + def __str__(self) -> str: + authority = self.authority + return "".join( + [ + f"{self.scheme}:" if self.scheme else "", + f"//{authority}" if authority else "", + self.path, + f"?{self.query}" if self.query is not None else "", + f"#{self.fragment}" if self.fragment is not None else "", + ] + ) + + +def urlparse(url: str = "", **kwargs: typing.Optional[str]) -> ParseResult: + # Initial basic checks on allowable URLs. + # --------------------------------------- + + # Hard limit the maximum allowable URL length. + if len(url) > MAX_URL_LENGTH: + raise InvalidURL("URL too long") + + # If a URL includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in url): + raise InvalidURL("Invalid non-printable ASCII character in URL") + + # Some keyword arguments require special handling. + # ------------------------------------------------ + + # Coerce "port" to a string, if it is provided as an integer. + if "port" in kwargs: + port = kwargs["port"] + kwargs["port"] = str(port) if isinstance(port, int) else port + + # Replace "netloc" with "host and "port". + if "netloc" in kwargs: + netloc = kwargs.pop("netloc") or "" + kwargs["host"], _, kwargs["port"] = netloc.partition(":") + + # Replace "username" and/or "password" with "userinfo". + if "username" in kwargs or "password" in kwargs: + username = quote(kwargs.pop("username", "") or "") + password = quote(kwargs.pop("password", "") or "") + kwargs["userinfo"] = f"{username}:{password}" if password else username + + # Replace "raw_path" with "path" and "query". + if "raw_path" in kwargs: + raw_path = kwargs.pop("raw_path") or "" + kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") + if not seperator: + kwargs["query"] = None + + # Ensure that IPv6 "host" addresses are always escaped with "[...]". + if "host" in kwargs: + host = kwargs.get("host") or "" + if ":" in host and not (host.startswith("[") and host.endswith("]")): + kwargs["host"] = f"[{host}]" + + # If any keyword arguments are provided, ensure they are valid. + # ------------------------------------------------------------- + + for key, value in kwargs.items(): + if value is not None: + if len(value) > MAX_URL_LENGTH: + raise InvalidURL(f"URL component '{key}' too long") + + # If a component includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in value): + raise InvalidURL( + f"Invalid non-printable ASCII character in URL component '{key}'" + ) + + # Ensure that keyword arguments match as a valid regex. + if not COMPONENT_REGEX[key].fullmatch(value): + raise InvalidURL(f"Invalid URL component '{key}'") + + # The URL_REGEX will always match, but may have empty components. + url_match = URL_REGEX.match(url) + assert url_match is not None + url_dict = url_match.groupdict() + + # * 'scheme', 'authority', and 'path' may be empty strings. + # * 'query' may be 'None', indicating no trailing "?" portion. + # Any string including the empty string, indicates a trailing "?". + # * 'fragment' may be 'None', indicating no trailing "#" portion. + # Any string including the empty string, indicates a trailing "#". + scheme = kwargs.get("scheme", url_dict["scheme"]) or "" + authority = kwargs.get("authority", url_dict["authority"]) or "" + path = kwargs.get("path", url_dict["path"]) or "" + query = kwargs.get("query", url_dict["query"]) + fragment = kwargs.get("fragment", url_dict["fragment"]) + + # The AUTHORITY_REGEX will always match, but may have empty components. + authority_match = AUTHORITY_REGEX.match(authority) + assert authority_match is not None + authority_dict = authority_match.groupdict() + + # * 'userinfo' and 'host' may be empty strings. + # * 'port' may be 'None'. + userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" + host = kwargs.get("host", authority_dict["host"]) or "" + port = kwargs.get("port", authority_dict["port"]) + + # Normalize and validate each component. + # We end up with a parsed representation of the URL, + # with components that are plain ASCII bytestrings. + parsed_scheme: str = scheme.lower() + parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":") + parsed_host: str = encode_host(host) + parsed_port: typing.Optional[int] = normalize_port(port, scheme) + + has_scheme = parsed_scheme != "" + has_authority = ( + parsed_userinfo != "" or parsed_host != "" or parsed_port is not None + ) + validate_path(path, has_scheme=has_scheme, has_authority=has_authority) + if has_authority: + path = normalize_path(path) + + # The GEN_DELIMS set is... : / ? # [ ] @ + # These do not need to be percent-quoted unless they serve as delimiters for the + # specific component. + + # For 'path' we need to drop ? and # from the GEN_DELIMS set. + parsed_path: str = quote(path, safe=SUB_DELIMS + ":/[]@") + # For 'query' we need to drop '#' from the GEN_DELIMS set. + parsed_query: typing.Optional[str] = ( + None if query is None else quote(query, safe=SUB_DELIMS + ":/?[]@") + ) + # For 'fragment' we can include all of the GEN_DELIMS set. + parsed_fragment: typing.Optional[str] = ( + None if fragment is None else quote(fragment, safe=SUB_DELIMS + ":/?#[]@") + ) + + # The parsed ASCII bytestrings are our canonical form. + # All properties of the URL are derived from these. + return ParseResult( + parsed_scheme, + parsed_userinfo, + parsed_host, + parsed_port, + parsed_path, + parsed_query, + parsed_fragment, + ) + + +def encode_host(host: str) -> str: + if not host: + return "" + + elif IPv4_STYLE_HOSTNAME.match(host): + # Validate IPv4 hostnames like #.#.#.# + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet + try: + ipaddress.IPv4Address(host) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv4 address: {host!r}") + return host + + elif IPv6_STYLE_HOSTNAME.match(host): + # Validate IPv6 hostnames like [...] + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # "A host identified by an Internet Protocol literal address, version 6 + # [RFC3513] or later, is distinguished by enclosing the IP literal + # within square brackets ("[" and "]"). This is the only place where + # square bracket characters are allowed in the URI syntax." + try: + ipaddress.IPv6Address(host[1:-1]) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv6 address: {host!r}") + return host[1:-1] + + elif host.isascii(): + # Regular ASCII hostnames + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # reg-name = *( unreserved / pct-encoded / sub-delims ) + return quote(host.lower(), safe=SUB_DELIMS) + + # IDNA hostnames + try: + return idna.encode(host.lower()).decode("ascii") + except idna.IDNAError: + raise InvalidURL(f"Invalid IDNA hostname: {host!r}") + + +def normalize_port( + port: typing.Optional[typing.Union[str, int]], scheme: str +) -> typing.Optional[int]: + # From https://tools.ietf.org/html/rfc3986#section-3.2.3 + # + # "A scheme may define a default port. For example, the "http" scheme + # defines a default port of "80", corresponding to its reserved TCP + # port number. The type of port designated by the port number (e.g., + # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and + # normalizers should omit the port component and its ":" delimiter if + # port is empty or if its value would be the same as that of the + # scheme's default." + if port is None or port == "": + return None + + try: + port_as_int = int(port) + except ValueError: + raise InvalidURL(f"Invalid port: {port!r}") + + # See https://url.spec.whatwg.org/#url-miscellaneous + default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( + scheme + ) + if port_as_int == default_port: + return None + return port_as_int + + +def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: + """ + Path validation rules that depend on if the URL contains + a scheme or authority component. + + See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 + """ + if has_authority: + # If a URI contains an authority component, then the path component + # must either be empty or begin with a slash ("/") character." + if path and not path.startswith("/"): + raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") + else: + # If a URI does not contain an authority component, then the path cannot begin + # with two slash characters ("//"). + if path.startswith("//"): + raise InvalidURL( + "URLs with no authority component cannot have a path starting with '//'" + ) + # In addition, a URI reference (Section 4.1) may be a relative-path reference, + # in which case the first path segment cannot contain a colon (":") character. + if path.startswith(":") and not has_scheme: + raise InvalidURL( + "URLs with no scheme component cannot have a path starting with ':'" + ) + + +def normalize_path(path: str) -> str: + """ + Drop "." and ".." segments from a URL path. + + For example: + + normalize_path("/path/./to/somewhere/..") == "/path/to" + """ + # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 + components = path.split("/") + output: typing.List[str] = [] + for component in components: + if component == ".": + pass + elif component == "..": + if output and output != [""]: + output.pop() + else: + output.append(component) + return "/".join(output) + + +def percent_encode(char: str) -> str: + """ + Replace a single character with the percent-encoded representation. + + Characters outside the ASCII range are represented with their a percent-encoded + representation of their UTF-8 byte sequence. + + For example: + + percent_encode(" ") == "%20" + """ + return "".join([f"%{byte:02x}" for byte in char.encode("utf-8")]).upper() + + +def is_safe(string: str, safe: str = "/") -> bool: + """ + Determine if a given string is already quote-safe. + """ + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + "%" + + # All characters must already be non-escaping or '%' + for char in string: + if char not in NON_ESCAPED_CHARS: + return False + + return True + + +def percent_encoded(string: str, safe: str = "/") -> str: + """ + Use percent-encoding to quote a string. + """ + if is_safe(string, safe=safe): + return string + + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + return "".join( + [char if char in NON_ESCAPED_CHARS else percent_encode(char) for char in string] + ) + + +def quote(string: str, safe: str = "/") -> str: + """ + Use percent-encoding to quote a string, omitting existing '%xx' escape sequences. + + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1 + + * `string`: The string to be percent-escaped. + * `safe`: A string containing characters that may be treated as safe, and do not + need to be escaped. Unreserved characters are always treated as safe. + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3 + """ + parts = [] + current_position = 0 + for match in re.finditer(PERCENT_ENCODED_REGEX, string): + start_position, end_position = match.start(), match.end() + matched_text = match.group(0) + # Add any text up to the '%xx' escape sequence. + if start_position != current_position: + leading_text = string[current_position:start_position] + parts.append(percent_encoded(leading_text, safe=safe)) + + # Add the '%xx' escape sequence. + parts.append(matched_text) + current_position = end_position + + # Add any text after the final '%xx' escape sequence. + if current_position != len(string): + trailing_text = string[current_position:] + parts.append(percent_encoded(trailing_text, safe=safe)) + + return "".join(parts) + + +def urlencode(items: typing.List[typing.Tuple[str, str]]) -> str: + """ + We can use a much simpler version of the stdlib urlencode here because + we don't need to handle a bunch of different typing cases, such as bytes vs str. + + https://github.com/python/cpython/blob/b2f7b2ef0b5421e01efb8c7bee2ef95d3bab77eb/Lib/urllib/parse.py#L926 + + Note that we use '%20' encoding for spaces. and '%2F for '/'. + This is slightly different than `requests`, but is the behaviour that browsers use. + + See + - https://github.com/encode/httpx/issues/2536 + - https://github.com/encode/httpx/issues/2721 + - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + """ + return "&".join( + [ + percent_encoded(k, safe="") + "=" + percent_encoded(v, safe="") + for k, v in items + ] + ) diff --git a/venv/lib/python3.12/site-packages/httpx/_urls.py b/venv/lib/python3.12/site-packages/httpx/_urls.py new file mode 100644 index 0000000..26202e9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_urls.py @@ -0,0 +1,648 @@ +import typing +from urllib.parse import parse_qs, unquote + +import idna + +from ._types import QueryParamTypes, RawURL, URLTypes +from ._urlparse import urlencode, urlparse +from ._utils import primitive_value_to_str + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port + normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work + with `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can + only be properly URL escaped when decoding the parameter names and values + themselves. + """ + + def __init__( + self, url: typing.Union["URL", str] = "", **kwargs: typing.Any + ) -> None: + if kwargs: + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for URL()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + if isinstance(value, bytes): + kwargs[key] = value.decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + if isinstance(url, str): + self._uri_reference = urlparse(url, **kwargs) + elif isinstance(url, URL): + self._uri_reference = url._uri_reference.copy_with(**kwargs) + else: + raise TypeError( + "Invalid type for url. Expected str or httpx.URL," + f" got {type(url)}: {url!r}" + ) + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + return self._uri_reference.userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + return self._uri_reference.host.encode("ascii") + + @property + def port(self) -> typing.Optional[int]: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + return self._uri_reference.port + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + return self._uri_reference.netloc.encode("ascii") + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is necessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> "QueryParams": + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def raw(self) -> RawURL: + """ + Provides the (scheme, host, port, target) for the outgoing request. + + In older versions of `httpx` this was used in the low-level transport API. + We no longer use `RawURL`, and this property will be deprecated + in a future release. + """ + return RawURL( + self.raw_scheme, + self.raw_host, + self.port, + self.raw_path, + ) + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> "URL": + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with( + username="jo@gmail.com", password="a secret" + ) + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + return URL(self, **kwargs) + + def copy_set_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> "URL": + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> "URL": + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> "URL": + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URLTypes) -> "URL": + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + from urllib.parse import urljoin + + return URL(urljoin(str(self), str(URL(url)))) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return str(self._uri_reference) + + def __repr__(self) -> str: + scheme, userinfo, host, port, path, query, fragment = self._uri_reference + + if ":" in userinfo: + # Mask any password component. + userinfo = f'{userinfo.split(":")[0]}:[secure]' + + authority = "".join( + [ + f"{userinfo}@" if userinfo else "", + f"[{host}]" if ":" in host else host, + f":{port}" if port is not None else "", + ] + ) + url = "".join( + [ + f"{self.scheme}:" if scheme else "", + f"//{authority}" if authority else "", + path, + f"?{query}" if query is not None else "", + f"#{fragment}" if fragment is not None else "", + ] + ) + + return f"{self.__class__.__name__}({url!r})" + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__( + self, *args: typing.Optional[QueryParamTypes], **kwargs: typing.Any + ) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value, keep_blank_values=True) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView[str]: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView[str]: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: typing.List[typing.Tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> typing.List[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> "QueryParams": + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> "QueryParams": + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: typing.Optional[QueryParamTypes] = None) -> "QueryParams": + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + """ + Note that we use '%20' encoding for spaces, and treat '/' as a safe + character. + + See https://github.com/encode/httpx/issues/2536 and + https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode + """ + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: typing.Optional[QueryParamTypes] = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) diff --git a/venv/lib/python3.12/site-packages/httpx/_utils.py b/venv/lib/python3.12/site-packages/httpx/_utils.py new file mode 100644 index 0000000..bc3cb00 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_utils.py @@ -0,0 +1,440 @@ +import codecs +import email.message +import ipaddress +import mimetypes +import os +import re +import time +import typing +from pathlib import Path +from urllib.request import getproxies + +import sniffio + +from ._types import PrimitiveData + +if typing.TYPE_CHECKING: # pragma: no cover + from ._urls import URL + + +_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} +_HTML5_FORM_ENCODING_REPLACEMENTS.update( + {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} +) +_HTML5_FORM_ENCODING_RE = re.compile( + r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) +) + + +def normalize_header_key( + value: typing.Union[str, bytes], + lower: bool, + encoding: typing.Optional[str] = None, +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header key. + """ + if isinstance(value, bytes): + bytes_value = value + else: + bytes_value = value.encode(encoding or "ascii") + + return bytes_value.lower() if lower else bytes_value + + +def normalize_header_value( + value: typing.Union[str, bytes], encoding: typing.Optional[str] = None +) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header value. + """ + if isinstance(value, bytes): + return value + return value.encode(encoding or "ascii") + + +def primitive_value_to_str(value: "PrimitiveData") -> str: + """ + Coerce a primitive data type into a string value. + + Note that we prefer JSON-style 'true'/'false' for boolean values here. + """ + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +def is_known_encoding(encoding: str) -> bool: + """ + Return `True` if `encoding` is a known codec. + """ + try: + codecs.lookup(encoding) + except LookupError: + return False + return True + + +def format_form_param(name: str, value: str) -> bytes: + """ + Encode a name/value pair within a multipart form. + """ + + def replacer(match: typing.Match[str]) -> str: + return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] + + value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) + return f'{name}="{value}"'.encode() + + +def get_ca_bundle_from_env() -> typing.Optional[str]: + if "SSL_CERT_FILE" in os.environ: + ssl_file = Path(os.environ["SSL_CERT_FILE"]) + if ssl_file.is_file(): + return str(ssl_file) + if "SSL_CERT_DIR" in os.environ: + ssl_path = Path(os.environ["SSL_CERT_DIR"]) + if ssl_path.is_dir(): + return str(ssl_path) + return None + + +def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: + """ + Returns a list of parsed link headers, for more info see: + https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link + The generic syntax of those is: + Link: < uri-reference >; param1=value1; param2="value2" + So for instance: + Link; '; type="image/jpeg",;' + would return + [ + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ] + :param value: HTTP Link entity-header field + :return: list of parsed link headers + """ + links: typing.List[typing.Dict[str, str]] = [] + replace_chars = " '\"" + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + link = {"url": url.strip("<> '\"")} + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + link[key.strip(replace_chars)] = value.strip(replace_chars) + links.append(link) + return links + + +def parse_content_type_charset(content_type: str) -> typing.Optional[str]: + # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. + # See: https://peps.python.org/pep-0594/#cgi + msg = email.message.Message() + msg["content-type"] = content_type + return msg.get_content_charset(failobj=None) + + +SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} + + +def obfuscate_sensitive_headers( + items: typing.Iterable[typing.Tuple[typing.AnyStr, typing.AnyStr]], +) -> typing.Iterator[typing.Tuple[typing.AnyStr, typing.AnyStr]]: + for k, v in items: + if to_str(k.lower()) in SENSITIVE_HEADERS: + v = to_bytes_or_str("[secure]", match_type_of=v) + yield k, v + + +def port_or_default(url: "URL") -> typing.Optional[int]: + if url.port is not None: + return url.port + return {"http": 80, "https": 443}.get(url.scheme) + + +def same_origin(url: "URL", other: "URL") -> bool: + """ + Return 'True' if the given URLs share the same origin. + """ + return ( + url.scheme == other.scheme + and url.host == other.host + and port_or_default(url) == port_or_default(other) + ) + + +def is_https_redirect(url: "URL", location: "URL") -> bool: + """ + Return 'True' if 'location' is a HTTPS upgrade of 'url' + """ + if url.host != location.host: + return False + + return ( + url.scheme == "http" + and port_or_default(url) == 80 + and location.scheme == "https" + and port_or_default(location) == 443 + ) + + +def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: + """Gets proxy information from the environment""" + + # urllib.request.getproxies() falls back on System + # Registry and Config for proxies on Windows and macOS. + # We don't want to propagate non-HTTP proxies into + # our configuration such as 'TRAVIS_APT_PROXY'. + proxy_info = getproxies() + mounts: typing.Dict[str, typing.Optional[str]] = {} + + for scheme in ("http", "https", "all"): + if proxy_info.get(scheme): + hostname = proxy_info[scheme] + mounts[f"{scheme}://"] = ( + hostname if "://" in hostname else f"http://{hostname}" + ) + + no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] + for hostname in no_proxy_hosts: + # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details + # on how names in `NO_PROXY` are handled. + if hostname == "*": + # If NO_PROXY=* is used or if "*" occurs as any one of the comma + # separated hostnames, then we should just bypass any information + # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore + # proxies. + return {} + elif hostname: + # NO_PROXY=.google.com is marked as "all://*.google.com, + # which disables "www.google.com" but not "google.com" + # NO_PROXY=google.com is marked as "all://*google.com, + # which disables "www.google.com" and "google.com". + # (But not "wwwgoogle.com") + # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" + # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 + if "://" in hostname: + mounts[hostname] = None + elif is_ipv4_hostname(hostname): + mounts[f"all://{hostname}"] = None + elif is_ipv6_hostname(hostname): + mounts[f"all://[{hostname}]"] = None + elif hostname.lower() == "localhost": + mounts[f"all://{hostname}"] = None + else: + mounts[f"all://*{hostname}"] = None + + return mounts + + +def to_bytes(value: typing.Union[str, bytes], encoding: str = "utf-8") -> bytes: + return value.encode(encoding) if isinstance(value, str) else value + + +def to_str(value: typing.Union[str, bytes], encoding: str = "utf-8") -> str: + return value if isinstance(value, str) else value.decode(encoding) + + +def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: + return value if isinstance(match_type_of, str) else value.encode() + + +def unquote(value: str) -> str: + return value[1:-1] if value[0] == value[-1] == '"' else value + + +def guess_content_type(filename: typing.Optional[str]) -> typing.Optional[str]: + if filename: + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + return None + + +def peek_filelike_length(stream: typing.Any) -> typing.Optional[int]: + """ + Given a file-like stream object, return its length in number of bytes + without reading it into memory. + """ + try: + # Is it an actual file? + fd = stream.fileno() + # Yup, seems to be an actual file. + length = os.fstat(fd).st_size + except (AttributeError, OSError): + # No... Maybe it's something that supports random access, like `io.BytesIO`? + try: + # Assuming so, go to end of stream to figure out its length, + # then put it back in place. + offset = stream.tell() + length = stream.seek(0, os.SEEK_END) + stream.seek(offset) + except (AttributeError, OSError): + # Not even that? Sorry, we're doomed... + return None + + return length + + +class Timer: + async def _get_time(self) -> float: + library = sniffio.current_async_library() + if library == "trio": + import trio + + return trio.current_time() + else: + import asyncio + + return asyncio.get_event_loop().time() + + def sync_start(self) -> None: + self.started = time.perf_counter() + + async def async_start(self) -> None: + self.started = await self._get_time() + + def sync_elapsed(self) -> float: + now = time.perf_counter() + return now - self.started + + async def async_elapsed(self) -> float: + now = await self._get_time() + return now - self.started + + +class URLPattern: + """ + A utility class currently used for making lookups against proxy keys... + + # Wildcard matching... + >>> pattern = URLPattern("all://") + >>> pattern.matches(httpx.URL("http://example.com")) + True + + # Witch scheme matching... + >>> pattern = URLPattern("https://") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + + # With domain matching... + >>> pattern = URLPattern("https://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # Wildcard scheme, with domain matching... + >>> pattern = URLPattern("all://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + True + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # With port matching... + >>> pattern = URLPattern("https://example.com:1234") + >>> pattern.matches(httpx.URL("https://example.com:1234")) + True + >>> pattern.matches(httpx.URL("https://example.com")) + False + """ + + def __init__(self, pattern: str) -> None: + from ._urls import URL + + if pattern and ":" not in pattern: + raise ValueError( + f"Proxy keys should use proper URL forms rather " + f"than plain scheme strings. " + f'Instead of "{pattern}", use "{pattern}://"' + ) + + url = URL(pattern) + self.pattern = pattern + self.scheme = "" if url.scheme == "all" else url.scheme + self.host = "" if url.host == "*" else url.host + self.port = url.port + if not url.host or url.host == "*": + self.host_regex: typing.Optional[typing.Pattern[str]] = None + elif url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") + else: + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") + + def matches(self, other: "URL") -> bool: + if self.scheme and self.scheme != other.scheme: + return False + if ( + self.host + and self.host_regex is not None + and not self.host_regex.match(other.host) + ): + return False + if self.port is not None and self.port != other.port: + return False + return True + + @property + def priority(self) -> typing.Tuple[int, int, int]: + """ + The priority allows URLPattern instances to be sortable, so that + we can match from most specific to least specific. + """ + # URLs with a port should take priority over URLs without a port. + port_priority = 0 if self.port is not None else 1 + # Longer hostnames should match first. + host_priority = -len(self.host) + # Longer schemes should match first. + scheme_priority = -len(self.scheme) + return (port_priority, host_priority, scheme_priority) + + def __hash__(self) -> int: + return hash(self.pattern) + + def __lt__(self, other: "URLPattern") -> bool: + return self.priority < other.priority + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, URLPattern) and self.pattern == other.pattern + + +def is_ipv4_hostname(hostname: str) -> bool: + try: + ipaddress.IPv4Address(hostname.split("/")[0]) + except Exception: + return False + return True + + +def is_ipv6_hostname(hostname: str) -> bool: + try: + ipaddress.IPv6Address(hostname.split("/")[0]) + except Exception: + return False + return True diff --git a/venv/lib/python3.12/site-packages/httpx/py.typed b/venv/lib/python3.12/site-packages/httpx/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/idna-3.11.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/idna-3.11.dist-info/METADATA b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/METADATA new file mode 100644 index 0000000..7a4a4b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/METADATA @@ -0,0 +1,209 @@ +Metadata-Version: 2.4 +Name: idna +Version: 3.11 +Summary: Internationalized Domain Names in Applications (IDNA) +Author-email: Kim Davies +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-Expression: BSD-3-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +License-File: LICENSE.md +Requires-Dist: ruff >= 0.6.2 ; extra == "all" +Requires-Dist: mypy >= 1.11.2 ; extra == "all" +Requires-Dist: pytest >= 8.3.2 ; extra == "all" +Requires-Dist: flake8 >= 7.1.1 ; extra == "all" +Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst +Project-URL: Issue tracker, https://github.com/kjd/idna/issues +Project-URL: Source, https://github.com/kjd/idna +Provides-Extra: all + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for `Internationalized Domain Names in +Applications (IDNA) `_ +and `Unicode IDNA Compatibility Processing +`_. + +The latest versions of these standards supplied here provide +more comprehensive language coverage and reduce the potential of +allowing domains with known security vulnerabilities. This library +is a suitable replacement for the “encodings.idna” +module that comes with the Python standard library, but which +only supports an older superseded IDNA specification from 2003. + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +This package is available for installation from PyPI via the +typical mechanisms, such as: + +.. code-block:: bash + + $ python3 -m pip install idna + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a +domain name argument and perform a conversion to ASCII compatible encoding +(known as A-labels), or to Unicode strings (known as U-labels) +respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +Conversions can be applied at a per-label basis using the ``ulabel`` or +``alabel`` functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +This library provides support for `Unicode IDNA Compatibility +Processing `_ which normalizes input from +different potential ways a user may input a domain prior to performing the IDNA +conversion operations. This functionality, known as a +`mapping `_, is considered by the +specification to be a local user-interface issue distinct from IDNA +conversion functionality. + +For example, “Königsgäßchen” is not a permissible label as *LATIN +CAPITAL LETTER K* is not allowed (nor are capital letters in general). +UTS 46 will convert this into lower case prior to applying the IDNA +conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + + +Exceptions +---------- + +All errors raised during the conversion following the specification +should raise an exception derived from the ``idna.IDNAError`` base +class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its position in the string (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards using the command-line +script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and + ``uts46data.py``, the pre-calculated lookup tables used for IDNA and + UTS 46 conversions. Implementers who wish to track this library against + a different Unicode version may use this tool to manually generate a + different version of the ``idnadata.py`` and ``uts46data.py`` files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix + B.1 of RFC 5892 and the pre-computed tables published by `IANA + `_. + +* ``idna-data U+0061``. Prints debugging output on the various + properties associated with an individual Unicode codepoint (in this + case, U+0061), that are used to assess the IDNA and UTS 46 status of a + codepoint. This is helpful in debugging or analysis. + +The tool accepts a number of arguments, described using ``idna-data +-h``. Most notably, the ``--version`` argument allows the specification +of the version of Unicode to be used in computing the table data. For +example, ``idna-data --version 9.0.0 make-libdata`` will generate +library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.8 and higher. + As this library serves as a low-level toolkit for a variety of + applications, many of which strive for broad compatibility with older + Python versions, there is no rush to remove older interpreter support. + Support for older versions are likely to be removed from new releases + as automated tests can no longer easily be run, i.e. once the Python + version is officially end-of-life. + +* **Testing**. The library has a test suite based on each rule of the + IDNA specification, as well as tests that are provided as part of the + Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing + `_. + +* **Emoji**. It is an occasional request to support emoji domains in + this library. Encoding of symbols like emoji is expressly prohibited by + the technical standard IDNA 2008 and emoji domains are broadly phased + out across the domain industry due to associated security risks. For + now, applications that need to support these non-compliant labels + may wish to consider trying the encode/decode operation in this library + first, and then falling back to using `encodings.idna`. See `the Github + project `_ for more discussion. + +* **Transitional processing**. Unicode 16.0.0 removed transitional + processing so the `transitional` argument for the encode() method + no longer has any effect and will be removed at a later date. + diff --git a/venv/lib/python3.12/site-packages/idna-3.11.dist-info/RECORD b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/RECORD new file mode 100644 index 0000000..1203f10 --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/RECORD @@ -0,0 +1,22 @@ +idna-3.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.11.dist-info/METADATA,sha256=fCwSww9SuiN8TIHllFSASUQCW55hAs8dzKnr9RaEEbA,8378 +idna-3.11.dist-info/RECORD,, +idna-3.11.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +idna-3.11.dist-info/licenses/LICENSE.md,sha256=t6M2q_OwThgOwGXN0W5wXQeeHMehT5EKpukYfza5zYc,1541 +idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 +idna/__pycache__/__init__.cpython-312.pyc,, +idna/__pycache__/codec.cpython-312.pyc,, +idna/__pycache__/compat.cpython-312.pyc,, +idna/__pycache__/core.cpython-312.pyc,, +idna/__pycache__/idnadata.cpython-312.pyc,, +idna/__pycache__/intranges.cpython-312.pyc,, +idna/__pycache__/package_data.cpython-312.pyc,, +idna/__pycache__/uts46data.cpython-312.pyc,, +idna/codec.py,sha256=M2SGWN7cs_6B32QmKTyTN6xQGZeYQgQ2wiX3_DR6loE,3438 +idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 +idna/core.py,sha256=P26_XVycuMTZ1R2mNK1ZREVzM5mvTzdabBXfyZVU1Lc,13246 +idna/idnadata.py,sha256=SG8jhaGE53iiD6B49pt2pwTv_UvClciWE-N54oR2p4U,79623 +idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 +idna/package_data.py,sha256=_CUavOxobnbyNG2FLyHoN8QHP3QM9W1tKuw7eq9QwBk,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=H9J35VkD0F9L9mKOqjeNGd2A-Va6FlPoz6Jz4K7h-ps,243725 diff --git a/venv/lib/python3.12/site-packages/idna-3.11.dist-info/WHEEL b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/WHEEL new file mode 100644 index 0000000..d8b9936 --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/idna-3.11.dist-info/licenses/LICENSE.md b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..256ba90 --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna-3.11.dist-info/licenses/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2025, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/idna/__init__.py b/venv/lib/python3.12/site-packages/idna/__init__.py new file mode 100644 index 0000000..cfdc030 --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna/__init__.py @@ -0,0 +1,45 @@ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain +from .package_data import __version__ + +__all__ = [ + "__version__", + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..854be98 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc new file mode 100644 index 0000000..2c04219 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..e042856 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..5a372f0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc new file mode 100644 index 0000000..2097014 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc new file mode 100644 index 0000000..b1274c7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc new file mode 100644 index 0000000..bde824a Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc new file mode 100644 index 0000000..0481ea9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/codec.py b/venv/lib/python3.12/site-packages/idna/codec.py new file mode 100644 index 0000000..cbc2e4f --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna/codec.py @@ -0,0 +1,122 @@ +import codecs +import re +from typing import Any, Optional, Tuple + +from .core import IDNAError, alabel, decode, encode, ulabel + +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class Codec(codecs.Codec): + def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return "", 0 + + return decode(data), len(data) + + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = b"" + if labels: + if not labels[-1]: + trailing_dot = b"." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = b"." + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_bytes = b".".join(result) + trailing_dot + size += len(trailing_dot) + return result_bytes, size + + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return ("", 0) + + if not isinstance(data, str): + data = str(data, "ascii") + + labels = _unicode_dots_re.split(data) + trailing_dot = "" + if labels: + if not labels[-1]: + trailing_dot = "." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = "." + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != "idna2008": + return None + return codecs.CodecInfo( + name=name, + encode=Codec().encode, + decode=Codec().decode, # type: ignore + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) + + +codecs.register(search_function) diff --git a/venv/lib/python3.12/site-packages/idna/compat.py b/venv/lib/python3.12/site-packages/idna/compat.py new file mode 100644 index 0000000..1df9f2a --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna/compat.py @@ -0,0 +1,15 @@ +from typing import Any, Union + +from .core import decode, encode + + +def ToASCII(label: str) -> bytes: + return encode(label) + + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + + +def nameprep(s: Any) -> None: + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/venv/lib/python3.12/site-packages/idna/core.py b/venv/lib/python3.12/site-packages/idna/core.py new file mode 100644 index 0000000..8177bf7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna/core.py @@ -0,0 +1,437 @@ +import bisect +import re +import unicodedata +from typing import Optional, Union + +from . import idnadata +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b"xn--" +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class IDNAError(UnicodeError): + """Base exception for all IDNA-encoding related problems""" + + pass + + +class IDNABidiError(IDNAError): + """Exception when bidirectional requirements are not satisfied""" + + pass + + +class InvalidCodepoint(IDNAError): + """Exception when a disallowed or unallocated codepoint is used""" + + pass + + +class InvalidCodepointContext(IDNAError): + """Exception when the codepoint is not valid in the context it is used""" + + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + + +def _punycode(s: str) -> bytes: + return s.encode("punycode") + + +def _unot(s: int) -> str: + return "U+{:04X}".format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == "": + # String likely comes from a newer version of Unicode + raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) + if direction in ["R", "AL", "AN"]: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ["R", "AL"]: + rtl = True + elif direction == "L": + rtl = False + else: + raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) + + valid_ending = False + number_type: Optional[str] = None + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if direction not in [ + "R", + "AL", + "AN", + "EN", + "ES", + "CS", + "ET", + "ON", + "BN", + "NSM", + ]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) + # Bidi rule 3 + if direction in ["R", "AL", "EN", "AN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + # Bidi rule 4 + if direction in ["AN", "EN"]: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError("Can not mix numeral types in a right-to-left label") + else: + # Bidi rule 5 + if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) + # Bidi rule 6 + if direction in ["L", "EN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + + if not valid_ending: + raise IDNABidiError("Label ends with illegal codepoint directionality") + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == "M": + raise IDNAError("Label begins with an illegal combining character") + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == "--": + raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") + if label[0] == "-" or label[-1] == "-": + raise IDNAError("Label must not start or end with a hyphen") + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize("NFC", label) != label: + raise IDNAError("Label must be in Normalization Form C") + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200C: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos - 1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("L"), ord("D")]: + ok = True + break + else: + break + + if not ok: + return False + + ok = False + for i in range(pos + 1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("R"), ord("D")]: + ok = True + break + else: + break + return ok + + if cp_value == 0x200D: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00B7: + if 0 < pos < len(label) - 1: + if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label) - 1 and len(label) > 1: + return _is_script(label[pos + 1], "Greek") + return False + + elif cp_value == 0x05F3 or cp_value == 0x05F4: + if pos > 0: + return _is_script(label[pos - 1], "Hebrew") + return False + + elif cp_value == 0x30FB: + for cp in label: + if cp == "\u30fb": + continue + if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6F0 <= ord(cp) <= 0x06F9: + return False + return True + + elif 0x6F0 <= cp_value <= 0x6F9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode("utf-8") + if len(label) == 0: + raise IDNAError("Empty Label") + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for pos, cp in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext( + "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + except ValueError: + raise IDNAError( + "Unknown codepoint adjacent to joiner {} at position {} in {}".format( + _unot(cp_value), pos + 1, repr(label) + ) + ) + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): + if not valid_contexto(label, pos): + raise InvalidCodepointContext( + "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + else: + raise InvalidCodepoint( + "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) + ) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode("ascii") + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + return label_bytes + except UnicodeEncodeError: + pass + + check_label(label) + label_bytes = _alabel_prefix + _punycode(label) + + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode("ascii") + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = bytes(label) + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix) :] + if not label_bytes: + raise IDNAError("Malformed A-label, no Punycode eligible content found") + if label_bytes.decode("ascii")[-1] == "-": + raise IDNAError("A-label must not end with a hyphen") + else: + check_label(label_bytes) + return label_bytes.decode("ascii") + + try: + label = label_bytes.decode("punycode") + except UnicodeError: + raise IDNAError("Invalid A-label") + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + + output = "" + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement: Optional[str] = None + if len(uts46row) == 3: + replacement = uts46row[2] + if ( + status == "V" + or (status == "D" and not transitional) + or (status == "3" and not std3_rules and replacement is None) + ): + output += char + elif replacement is not None and ( + status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) + ): + output += replacement + elif status != "I": + raise IndexError() + except IndexError: + raise InvalidCodepoint( + "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) + ) + + return unicodedata.normalize("NFC", output) + + +def encode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, + transitional: bool = False, +) -> bytes: + if not isinstance(s, str): + try: + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("should pass a unicode string to the function rather than a byte string.") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split(".") + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if labels[-1] == "": + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append(b"") + s = b".".join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError("Domain too long") + return s + + +def decode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, +) -> str: + try: + if not isinstance(s, str): + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("Invalid ASCII in A-label") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(".") + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append("") + return ".".join(result) diff --git a/venv/lib/python3.12/site-packages/idna/idnadata.py b/venv/lib/python3.12/site-packages/idna/idnadata.py new file mode 100644 index 0000000..ded47ca --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna/idnadata.py @@ -0,0 +1,4309 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "16.0.0" + +scripts = { + "Greek": ( + 0x37000000374, + 0x37500000378, + 0x37A0000037E, + 0x37F00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038B, + 0x38C0000038D, + 0x38E000003A2, + 0x3A3000003E2, + 0x3F000000400, + 0x1D2600001D2B, + 0x1D5D00001D62, + 0x1D6600001D6B, + 0x1DBF00001DC0, + 0x1F0000001F16, + 0x1F1800001F1E, + 0x1F2000001F46, + 0x1F4800001F4E, + 0x1F5000001F58, + 0x1F5900001F5A, + 0x1F5B00001F5C, + 0x1F5D00001F5E, + 0x1F5F00001F7E, + 0x1F8000001FB5, + 0x1FB600001FC5, + 0x1FC600001FD4, + 0x1FD600001FDC, + 0x1FDD00001FF0, + 0x1FF200001FF5, + 0x1FF600001FFF, + 0x212600002127, + 0xAB650000AB66, + 0x101400001018F, + 0x101A0000101A1, + 0x1D2000001D246, + ), + "Han": ( + 0x2E8000002E9A, + 0x2E9B00002EF4, + 0x2F0000002FD6, + 0x300500003006, + 0x300700003008, + 0x30210000302A, + 0x30380000303C, + 0x340000004DC0, + 0x4E000000A000, + 0xF9000000FA6E, + 0xFA700000FADA, + 0x16FE200016FE4, + 0x16FF000016FF2, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x2F8000002FA1E, + 0x300000003134B, + 0x31350000323B0, + ), + "Hebrew": ( + 0x591000005C8, + 0x5D0000005EB, + 0x5EF000005F5, + 0xFB1D0000FB37, + 0xFB380000FB3D, + 0xFB3E0000FB3F, + 0xFB400000FB42, + 0xFB430000FB45, + 0xFB460000FB50, + ), + "Hiragana": ( + 0x304100003097, + 0x309D000030A0, + 0x1B0010001B120, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1F2000001F201, + ), + "Katakana": ( + 0x30A1000030FB, + 0x30FD00003100, + 0x31F000003200, + 0x32D0000032FF, + 0x330000003358, + 0xFF660000FF70, + 0xFF710000FF9E, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B001, + 0x1B1200001B123, + 0x1B1550001B156, + 0x1B1640001B168, + ), +} +joining_types = { + 0xAD: 84, + 0x300: 84, + 0x301: 84, + 0x302: 84, + 0x303: 84, + 0x304: 84, + 0x305: 84, + 0x306: 84, + 0x307: 84, + 0x308: 84, + 0x309: 84, + 0x30A: 84, + 0x30B: 84, + 0x30C: 84, + 0x30D: 84, + 0x30E: 84, + 0x30F: 84, + 0x310: 84, + 0x311: 84, + 0x312: 84, + 0x313: 84, + 0x314: 84, + 0x315: 84, + 0x316: 84, + 0x317: 84, + 0x318: 84, + 0x319: 84, + 0x31A: 84, + 0x31B: 84, + 0x31C: 84, + 0x31D: 84, + 0x31E: 84, + 0x31F: 84, + 0x320: 84, + 0x321: 84, + 0x322: 84, + 0x323: 84, + 0x324: 84, + 0x325: 84, + 0x326: 84, + 0x327: 84, + 0x328: 84, + 0x329: 84, + 0x32A: 84, + 0x32B: 84, + 0x32C: 84, + 0x32D: 84, + 0x32E: 84, + 0x32F: 84, + 0x330: 84, + 0x331: 84, + 0x332: 84, + 0x333: 84, + 0x334: 84, + 0x335: 84, + 0x336: 84, + 0x337: 84, + 0x338: 84, + 0x339: 84, + 0x33A: 84, + 0x33B: 84, + 0x33C: 84, + 0x33D: 84, + 0x33E: 84, + 0x33F: 84, + 0x340: 84, + 0x341: 84, + 0x342: 84, + 0x343: 84, + 0x344: 84, + 0x345: 84, + 0x346: 84, + 0x347: 84, + 0x348: 84, + 0x349: 84, + 0x34A: 84, + 0x34B: 84, + 0x34C: 84, + 0x34D: 84, + 0x34E: 84, + 0x34F: 84, + 0x350: 84, + 0x351: 84, + 0x352: 84, + 0x353: 84, + 0x354: 84, + 0x355: 84, + 0x356: 84, + 0x357: 84, + 0x358: 84, + 0x359: 84, + 0x35A: 84, + 0x35B: 84, + 0x35C: 84, + 0x35D: 84, + 0x35E: 84, + 0x35F: 84, + 0x360: 84, + 0x361: 84, + 0x362: 84, + 0x363: 84, + 0x364: 84, + 0x365: 84, + 0x366: 84, + 0x367: 84, + 0x368: 84, + 0x369: 84, + 0x36A: 84, + 0x36B: 84, + 0x36C: 84, + 0x36D: 84, + 0x36E: 84, + 0x36F: 84, + 0x483: 84, + 0x484: 84, + 0x485: 84, + 0x486: 84, + 0x487: 84, + 0x488: 84, + 0x489: 84, + 0x591: 84, + 0x592: 84, + 0x593: 84, + 0x594: 84, + 0x595: 84, + 0x596: 84, + 0x597: 84, + 0x598: 84, + 0x599: 84, + 0x59A: 84, + 0x59B: 84, + 0x59C: 84, + 0x59D: 84, + 0x59E: 84, + 0x59F: 84, + 0x5A0: 84, + 0x5A1: 84, + 0x5A2: 84, + 0x5A3: 84, + 0x5A4: 84, + 0x5A5: 84, + 0x5A6: 84, + 0x5A7: 84, + 0x5A8: 84, + 0x5A9: 84, + 0x5AA: 84, + 0x5AB: 84, + 0x5AC: 84, + 0x5AD: 84, + 0x5AE: 84, + 0x5AF: 84, + 0x5B0: 84, + 0x5B1: 84, + 0x5B2: 84, + 0x5B3: 84, + 0x5B4: 84, + 0x5B5: 84, + 0x5B6: 84, + 0x5B7: 84, + 0x5B8: 84, + 0x5B9: 84, + 0x5BA: 84, + 0x5BB: 84, + 0x5BC: 84, + 0x5BD: 84, + 0x5BF: 84, + 0x5C1: 84, + 0x5C2: 84, + 0x5C4: 84, + 0x5C5: 84, + 0x5C7: 84, + 0x610: 84, + 0x611: 84, + 0x612: 84, + 0x613: 84, + 0x614: 84, + 0x615: 84, + 0x616: 84, + 0x617: 84, + 0x618: 84, + 0x619: 84, + 0x61A: 84, + 0x61C: 84, + 0x620: 68, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62A: 68, + 0x62B: 68, + 0x62C: 68, + 0x62D: 68, + 0x62E: 68, + 0x62F: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63A: 68, + 0x63B: 68, + 0x63C: 68, + 0x63D: 68, + 0x63E: 68, + 0x63F: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64A: 68, + 0x64B: 84, + 0x64C: 84, + 0x64D: 84, + 0x64E: 84, + 0x64F: 84, + 0x650: 84, + 0x651: 84, + 0x652: 84, + 0x653: 84, + 0x654: 84, + 0x655: 84, + 0x656: 84, + 0x657: 84, + 0x658: 84, + 0x659: 84, + 0x65A: 84, + 0x65B: 84, + 0x65C: 84, + 0x65D: 84, + 0x65E: 84, + 0x65F: 84, + 0x66E: 68, + 0x66F: 68, + 0x670: 84, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67A: 68, + 0x67B: 68, + 0x67C: 68, + 0x67D: 68, + 0x67E: 68, + 0x67F: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68A: 82, + 0x68B: 82, + 0x68C: 82, + 0x68D: 82, + 0x68E: 82, + 0x68F: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69A: 68, + 0x69B: 68, + 0x69C: 68, + 0x69D: 68, + 0x69E: 68, + 0x69F: 68, + 0x6A0: 68, + 0x6A1: 68, + 0x6A2: 68, + 0x6A3: 68, + 0x6A4: 68, + 0x6A5: 68, + 0x6A6: 68, + 0x6A7: 68, + 0x6A8: 68, + 0x6A9: 68, + 0x6AA: 68, + 0x6AB: 68, + 0x6AC: 68, + 0x6AD: 68, + 0x6AE: 68, + 0x6AF: 68, + 0x6B0: 68, + 0x6B1: 68, + 0x6B2: 68, + 0x6B3: 68, + 0x6B4: 68, + 0x6B5: 68, + 0x6B6: 68, + 0x6B7: 68, + 0x6B8: 68, + 0x6B9: 68, + 0x6BA: 68, + 0x6BB: 68, + 0x6BC: 68, + 0x6BD: 68, + 0x6BE: 68, + 0x6BF: 68, + 0x6C0: 82, + 0x6C1: 68, + 0x6C2: 68, + 0x6C3: 82, + 0x6C4: 82, + 0x6C5: 82, + 0x6C6: 82, + 0x6C7: 82, + 0x6C8: 82, + 0x6C9: 82, + 0x6CA: 82, + 0x6CB: 82, + 0x6CC: 68, + 0x6CD: 82, + 0x6CE: 68, + 0x6CF: 82, + 0x6D0: 68, + 0x6D1: 68, + 0x6D2: 82, + 0x6D3: 82, + 0x6D5: 82, + 0x6D6: 84, + 0x6D7: 84, + 0x6D8: 84, + 0x6D9: 84, + 0x6DA: 84, + 0x6DB: 84, + 0x6DC: 84, + 0x6DF: 84, + 0x6E0: 84, + 0x6E1: 84, + 0x6E2: 84, + 0x6E3: 84, + 0x6E4: 84, + 0x6E7: 84, + 0x6E8: 84, + 0x6EA: 84, + 0x6EB: 84, + 0x6EC: 84, + 0x6ED: 84, + 0x6EE: 82, + 0x6EF: 82, + 0x6FA: 68, + 0x6FB: 68, + 0x6FC: 68, + 0x6FF: 68, + 0x70F: 84, + 0x710: 82, + 0x711: 84, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71A: 68, + 0x71B: 68, + 0x71C: 68, + 0x71D: 68, + 0x71E: 82, + 0x71F: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72A: 82, + 0x72B: 68, + 0x72C: 82, + 0x72D: 68, + 0x72E: 68, + 0x72F: 82, + 0x730: 84, + 0x731: 84, + 0x732: 84, + 0x733: 84, + 0x734: 84, + 0x735: 84, + 0x736: 84, + 0x737: 84, + 0x738: 84, + 0x739: 84, + 0x73A: 84, + 0x73B: 84, + 0x73C: 84, + 0x73D: 84, + 0x73E: 84, + 0x73F: 84, + 0x740: 84, + 0x741: 84, + 0x742: 84, + 0x743: 84, + 0x744: 84, + 0x745: 84, + 0x746: 84, + 0x747: 84, + 0x748: 84, + 0x749: 84, + 0x74A: 84, + 0x74D: 82, + 0x74E: 68, + 0x74F: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75A: 82, + 0x75B: 82, + 0x75C: 68, + 0x75D: 68, + 0x75E: 68, + 0x75F: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76A: 68, + 0x76B: 82, + 0x76C: 82, + 0x76D: 68, + 0x76E: 68, + 0x76F: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77A: 68, + 0x77B: 68, + 0x77C: 68, + 0x77D: 68, + 0x77E: 68, + 0x77F: 68, + 0x7A6: 84, + 0x7A7: 84, + 0x7A8: 84, + 0x7A9: 84, + 0x7AA: 84, + 0x7AB: 84, + 0x7AC: 84, + 0x7AD: 84, + 0x7AE: 84, + 0x7AF: 84, + 0x7B0: 84, + 0x7CA: 68, + 0x7CB: 68, + 0x7CC: 68, + 0x7CD: 68, + 0x7CE: 68, + 0x7CF: 68, + 0x7D0: 68, + 0x7D1: 68, + 0x7D2: 68, + 0x7D3: 68, + 0x7D4: 68, + 0x7D5: 68, + 0x7D6: 68, + 0x7D7: 68, + 0x7D8: 68, + 0x7D9: 68, + 0x7DA: 68, + 0x7DB: 68, + 0x7DC: 68, + 0x7DD: 68, + 0x7DE: 68, + 0x7DF: 68, + 0x7E0: 68, + 0x7E1: 68, + 0x7E2: 68, + 0x7E3: 68, + 0x7E4: 68, + 0x7E5: 68, + 0x7E6: 68, + 0x7E7: 68, + 0x7E8: 68, + 0x7E9: 68, + 0x7EA: 68, + 0x7EB: 84, + 0x7EC: 84, + 0x7ED: 84, + 0x7EE: 84, + 0x7EF: 84, + 0x7F0: 84, + 0x7F1: 84, + 0x7F2: 84, + 0x7F3: 84, + 0x7FA: 67, + 0x7FD: 84, + 0x816: 84, + 0x817: 84, + 0x818: 84, + 0x819: 84, + 0x81B: 84, + 0x81C: 84, + 0x81D: 84, + 0x81E: 84, + 0x81F: 84, + 0x820: 84, + 0x821: 84, + 0x822: 84, + 0x823: 84, + 0x825: 84, + 0x826: 84, + 0x827: 84, + 0x829: 84, + 0x82A: 84, + 0x82B: 84, + 0x82C: 84, + 0x82D: 84, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84A: 68, + 0x84B: 68, + 0x84C: 68, + 0x84D: 68, + 0x84E: 68, + 0x84F: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x859: 84, + 0x85A: 84, + 0x85B: 84, + 0x860: 68, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86A: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87A: 82, + 0x87B: 82, + 0x87C: 82, + 0x87D: 82, + 0x87E: 82, + 0x87F: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x889: 68, + 0x88A: 68, + 0x88B: 68, + 0x88C: 68, + 0x88D: 68, + 0x88E: 82, + 0x897: 84, + 0x898: 84, + 0x899: 84, + 0x89A: 84, + 0x89B: 84, + 0x89C: 84, + 0x89D: 84, + 0x89E: 84, + 0x89F: 84, + 0x8A0: 68, + 0x8A1: 68, + 0x8A2: 68, + 0x8A3: 68, + 0x8A4: 68, + 0x8A5: 68, + 0x8A6: 68, + 0x8A7: 68, + 0x8A8: 68, + 0x8A9: 68, + 0x8AA: 82, + 0x8AB: 82, + 0x8AC: 82, + 0x8AE: 82, + 0x8AF: 68, + 0x8B0: 68, + 0x8B1: 82, + 0x8B2: 82, + 0x8B3: 68, + 0x8B4: 68, + 0x8B5: 68, + 0x8B6: 68, + 0x8B7: 68, + 0x8B8: 68, + 0x8B9: 82, + 0x8BA: 68, + 0x8BB: 68, + 0x8BC: 68, + 0x8BD: 68, + 0x8BE: 68, + 0x8BF: 68, + 0x8C0: 68, + 0x8C1: 68, + 0x8C2: 68, + 0x8C3: 68, + 0x8C4: 68, + 0x8C5: 68, + 0x8C6: 68, + 0x8C7: 68, + 0x8C8: 68, + 0x8CA: 84, + 0x8CB: 84, + 0x8CC: 84, + 0x8CD: 84, + 0x8CE: 84, + 0x8CF: 84, + 0x8D0: 84, + 0x8D1: 84, + 0x8D2: 84, + 0x8D3: 84, + 0x8D4: 84, + 0x8D5: 84, + 0x8D6: 84, + 0x8D7: 84, + 0x8D8: 84, + 0x8D9: 84, + 0x8DA: 84, + 0x8DB: 84, + 0x8DC: 84, + 0x8DD: 84, + 0x8DE: 84, + 0x8DF: 84, + 0x8E0: 84, + 0x8E1: 84, + 0x8E3: 84, + 0x8E4: 84, + 0x8E5: 84, + 0x8E6: 84, + 0x8E7: 84, + 0x8E8: 84, + 0x8E9: 84, + 0x8EA: 84, + 0x8EB: 84, + 0x8EC: 84, + 0x8ED: 84, + 0x8EE: 84, + 0x8EF: 84, + 0x8F0: 84, + 0x8F1: 84, + 0x8F2: 84, + 0x8F3: 84, + 0x8F4: 84, + 0x8F5: 84, + 0x8F6: 84, + 0x8F7: 84, + 0x8F8: 84, + 0x8F9: 84, + 0x8FA: 84, + 0x8FB: 84, + 0x8FC: 84, + 0x8FD: 84, + 0x8FE: 84, + 0x8FF: 84, + 0x900: 84, + 0x901: 84, + 0x902: 84, + 0x93A: 84, + 0x93C: 84, + 0x941: 84, + 0x942: 84, + 0x943: 84, + 0x944: 84, + 0x945: 84, + 0x946: 84, + 0x947: 84, + 0x948: 84, + 0x94D: 84, + 0x951: 84, + 0x952: 84, + 0x953: 84, + 0x954: 84, + 0x955: 84, + 0x956: 84, + 0x957: 84, + 0x962: 84, + 0x963: 84, + 0x981: 84, + 0x9BC: 84, + 0x9C1: 84, + 0x9C2: 84, + 0x9C3: 84, + 0x9C4: 84, + 0x9CD: 84, + 0x9E2: 84, + 0x9E3: 84, + 0x9FE: 84, + 0xA01: 84, + 0xA02: 84, + 0xA3C: 84, + 0xA41: 84, + 0xA42: 84, + 0xA47: 84, + 0xA48: 84, + 0xA4B: 84, + 0xA4C: 84, + 0xA4D: 84, + 0xA51: 84, + 0xA70: 84, + 0xA71: 84, + 0xA75: 84, + 0xA81: 84, + 0xA82: 84, + 0xABC: 84, + 0xAC1: 84, + 0xAC2: 84, + 0xAC3: 84, + 0xAC4: 84, + 0xAC5: 84, + 0xAC7: 84, + 0xAC8: 84, + 0xACD: 84, + 0xAE2: 84, + 0xAE3: 84, + 0xAFA: 84, + 0xAFB: 84, + 0xAFC: 84, + 0xAFD: 84, + 0xAFE: 84, + 0xAFF: 84, + 0xB01: 84, + 0xB3C: 84, + 0xB3F: 84, + 0xB41: 84, + 0xB42: 84, + 0xB43: 84, + 0xB44: 84, + 0xB4D: 84, + 0xB55: 84, + 0xB56: 84, + 0xB62: 84, + 0xB63: 84, + 0xB82: 84, + 0xBC0: 84, + 0xBCD: 84, + 0xC00: 84, + 0xC04: 84, + 0xC3C: 84, + 0xC3E: 84, + 0xC3F: 84, + 0xC40: 84, + 0xC46: 84, + 0xC47: 84, + 0xC48: 84, + 0xC4A: 84, + 0xC4B: 84, + 0xC4C: 84, + 0xC4D: 84, + 0xC55: 84, + 0xC56: 84, + 0xC62: 84, + 0xC63: 84, + 0xC81: 84, + 0xCBC: 84, + 0xCBF: 84, + 0xCC6: 84, + 0xCCC: 84, + 0xCCD: 84, + 0xCE2: 84, + 0xCE3: 84, + 0xD00: 84, + 0xD01: 84, + 0xD3B: 84, + 0xD3C: 84, + 0xD41: 84, + 0xD42: 84, + 0xD43: 84, + 0xD44: 84, + 0xD4D: 84, + 0xD62: 84, + 0xD63: 84, + 0xD81: 84, + 0xDCA: 84, + 0xDD2: 84, + 0xDD3: 84, + 0xDD4: 84, + 0xDD6: 84, + 0xE31: 84, + 0xE34: 84, + 0xE35: 84, + 0xE36: 84, + 0xE37: 84, + 0xE38: 84, + 0xE39: 84, + 0xE3A: 84, + 0xE47: 84, + 0xE48: 84, + 0xE49: 84, + 0xE4A: 84, + 0xE4B: 84, + 0xE4C: 84, + 0xE4D: 84, + 0xE4E: 84, + 0xEB1: 84, + 0xEB4: 84, + 0xEB5: 84, + 0xEB6: 84, + 0xEB7: 84, + 0xEB8: 84, + 0xEB9: 84, + 0xEBA: 84, + 0xEBB: 84, + 0xEBC: 84, + 0xEC8: 84, + 0xEC9: 84, + 0xECA: 84, + 0xECB: 84, + 0xECC: 84, + 0xECD: 84, + 0xECE: 84, + 0xF18: 84, + 0xF19: 84, + 0xF35: 84, + 0xF37: 84, + 0xF39: 84, + 0xF71: 84, + 0xF72: 84, + 0xF73: 84, + 0xF74: 84, + 0xF75: 84, + 0xF76: 84, + 0xF77: 84, + 0xF78: 84, + 0xF79: 84, + 0xF7A: 84, + 0xF7B: 84, + 0xF7C: 84, + 0xF7D: 84, + 0xF7E: 84, + 0xF80: 84, + 0xF81: 84, + 0xF82: 84, + 0xF83: 84, + 0xF84: 84, + 0xF86: 84, + 0xF87: 84, + 0xF8D: 84, + 0xF8E: 84, + 0xF8F: 84, + 0xF90: 84, + 0xF91: 84, + 0xF92: 84, + 0xF93: 84, + 0xF94: 84, + 0xF95: 84, + 0xF96: 84, + 0xF97: 84, + 0xF99: 84, + 0xF9A: 84, + 0xF9B: 84, + 0xF9C: 84, + 0xF9D: 84, + 0xF9E: 84, + 0xF9F: 84, + 0xFA0: 84, + 0xFA1: 84, + 0xFA2: 84, + 0xFA3: 84, + 0xFA4: 84, + 0xFA5: 84, + 0xFA6: 84, + 0xFA7: 84, + 0xFA8: 84, + 0xFA9: 84, + 0xFAA: 84, + 0xFAB: 84, + 0xFAC: 84, + 0xFAD: 84, + 0xFAE: 84, + 0xFAF: 84, + 0xFB0: 84, + 0xFB1: 84, + 0xFB2: 84, + 0xFB3: 84, + 0xFB4: 84, + 0xFB5: 84, + 0xFB6: 84, + 0xFB7: 84, + 0xFB8: 84, + 0xFB9: 84, + 0xFBA: 84, + 0xFBB: 84, + 0xFBC: 84, + 0xFC6: 84, + 0x102D: 84, + 0x102E: 84, + 0x102F: 84, + 0x1030: 84, + 0x1032: 84, + 0x1033: 84, + 0x1034: 84, + 0x1035: 84, + 0x1036: 84, + 0x1037: 84, + 0x1039: 84, + 0x103A: 84, + 0x103D: 84, + 0x103E: 84, + 0x1058: 84, + 0x1059: 84, + 0x105E: 84, + 0x105F: 84, + 0x1060: 84, + 0x1071: 84, + 0x1072: 84, + 0x1073: 84, + 0x1074: 84, + 0x1082: 84, + 0x1085: 84, + 0x1086: 84, + 0x108D: 84, + 0x109D: 84, + 0x135D: 84, + 0x135E: 84, + 0x135F: 84, + 0x1712: 84, + 0x1713: 84, + 0x1714: 84, + 0x1732: 84, + 0x1733: 84, + 0x1752: 84, + 0x1753: 84, + 0x1772: 84, + 0x1773: 84, + 0x17B4: 84, + 0x17B5: 84, + 0x17B7: 84, + 0x17B8: 84, + 0x17B9: 84, + 0x17BA: 84, + 0x17BB: 84, + 0x17BC: 84, + 0x17BD: 84, + 0x17C6: 84, + 0x17C9: 84, + 0x17CA: 84, + 0x17CB: 84, + 0x17CC: 84, + 0x17CD: 84, + 0x17CE: 84, + 0x17CF: 84, + 0x17D0: 84, + 0x17D1: 84, + 0x17D2: 84, + 0x17D3: 84, + 0x17DD: 84, + 0x1807: 68, + 0x180A: 67, + 0x180B: 84, + 0x180C: 84, + 0x180D: 84, + 0x180F: 84, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182A: 68, + 0x182B: 68, + 0x182C: 68, + 0x182D: 68, + 0x182E: 68, + 0x182F: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183A: 68, + 0x183B: 68, + 0x183C: 68, + 0x183D: 68, + 0x183E: 68, + 0x183F: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184A: 68, + 0x184B: 68, + 0x184C: 68, + 0x184D: 68, + 0x184E: 68, + 0x184F: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185A: 68, + 0x185B: 68, + 0x185C: 68, + 0x185D: 68, + 0x185E: 68, + 0x185F: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186A: 68, + 0x186B: 68, + 0x186C: 68, + 0x186D: 68, + 0x186E: 68, + 0x186F: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188A: 68, + 0x188B: 68, + 0x188C: 68, + 0x188D: 68, + 0x188E: 68, + 0x188F: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189A: 68, + 0x189B: 68, + 0x189C: 68, + 0x189D: 68, + 0x189E: 68, + 0x189F: 68, + 0x18A0: 68, + 0x18A1: 68, + 0x18A2: 68, + 0x18A3: 68, + 0x18A4: 68, + 0x18A5: 68, + 0x18A6: 68, + 0x18A7: 68, + 0x18A8: 68, + 0x18A9: 84, + 0x18AA: 68, + 0x1920: 84, + 0x1921: 84, + 0x1922: 84, + 0x1927: 84, + 0x1928: 84, + 0x1932: 84, + 0x1939: 84, + 0x193A: 84, + 0x193B: 84, + 0x1A17: 84, + 0x1A18: 84, + 0x1A1B: 84, + 0x1A56: 84, + 0x1A58: 84, + 0x1A59: 84, + 0x1A5A: 84, + 0x1A5B: 84, + 0x1A5C: 84, + 0x1A5D: 84, + 0x1A5E: 84, + 0x1A60: 84, + 0x1A62: 84, + 0x1A65: 84, + 0x1A66: 84, + 0x1A67: 84, + 0x1A68: 84, + 0x1A69: 84, + 0x1A6A: 84, + 0x1A6B: 84, + 0x1A6C: 84, + 0x1A73: 84, + 0x1A74: 84, + 0x1A75: 84, + 0x1A76: 84, + 0x1A77: 84, + 0x1A78: 84, + 0x1A79: 84, + 0x1A7A: 84, + 0x1A7B: 84, + 0x1A7C: 84, + 0x1A7F: 84, + 0x1AB0: 84, + 0x1AB1: 84, + 0x1AB2: 84, + 0x1AB3: 84, + 0x1AB4: 84, + 0x1AB5: 84, + 0x1AB6: 84, + 0x1AB7: 84, + 0x1AB8: 84, + 0x1AB9: 84, + 0x1ABA: 84, + 0x1ABB: 84, + 0x1ABC: 84, + 0x1ABD: 84, + 0x1ABE: 84, + 0x1ABF: 84, + 0x1AC0: 84, + 0x1AC1: 84, + 0x1AC2: 84, + 0x1AC3: 84, + 0x1AC4: 84, + 0x1AC5: 84, + 0x1AC6: 84, + 0x1AC7: 84, + 0x1AC8: 84, + 0x1AC9: 84, + 0x1ACA: 84, + 0x1ACB: 84, + 0x1ACC: 84, + 0x1ACD: 84, + 0x1ACE: 84, + 0x1B00: 84, + 0x1B01: 84, + 0x1B02: 84, + 0x1B03: 84, + 0x1B34: 84, + 0x1B36: 84, + 0x1B37: 84, + 0x1B38: 84, + 0x1B39: 84, + 0x1B3A: 84, + 0x1B3C: 84, + 0x1B42: 84, + 0x1B6B: 84, + 0x1B6C: 84, + 0x1B6D: 84, + 0x1B6E: 84, + 0x1B6F: 84, + 0x1B70: 84, + 0x1B71: 84, + 0x1B72: 84, + 0x1B73: 84, + 0x1B80: 84, + 0x1B81: 84, + 0x1BA2: 84, + 0x1BA3: 84, + 0x1BA4: 84, + 0x1BA5: 84, + 0x1BA8: 84, + 0x1BA9: 84, + 0x1BAB: 84, + 0x1BAC: 84, + 0x1BAD: 84, + 0x1BE6: 84, + 0x1BE8: 84, + 0x1BE9: 84, + 0x1BED: 84, + 0x1BEF: 84, + 0x1BF0: 84, + 0x1BF1: 84, + 0x1C2C: 84, + 0x1C2D: 84, + 0x1C2E: 84, + 0x1C2F: 84, + 0x1C30: 84, + 0x1C31: 84, + 0x1C32: 84, + 0x1C33: 84, + 0x1C36: 84, + 0x1C37: 84, + 0x1CD0: 84, + 0x1CD1: 84, + 0x1CD2: 84, + 0x1CD4: 84, + 0x1CD5: 84, + 0x1CD6: 84, + 0x1CD7: 84, + 0x1CD8: 84, + 0x1CD9: 84, + 0x1CDA: 84, + 0x1CDB: 84, + 0x1CDC: 84, + 0x1CDD: 84, + 0x1CDE: 84, + 0x1CDF: 84, + 0x1CE0: 84, + 0x1CE2: 84, + 0x1CE3: 84, + 0x1CE4: 84, + 0x1CE5: 84, + 0x1CE6: 84, + 0x1CE7: 84, + 0x1CE8: 84, + 0x1CED: 84, + 0x1CF4: 84, + 0x1CF8: 84, + 0x1CF9: 84, + 0x1DC0: 84, + 0x1DC1: 84, + 0x1DC2: 84, + 0x1DC3: 84, + 0x1DC4: 84, + 0x1DC5: 84, + 0x1DC6: 84, + 0x1DC7: 84, + 0x1DC8: 84, + 0x1DC9: 84, + 0x1DCA: 84, + 0x1DCB: 84, + 0x1DCC: 84, + 0x1DCD: 84, + 0x1DCE: 84, + 0x1DCF: 84, + 0x1DD0: 84, + 0x1DD1: 84, + 0x1DD2: 84, + 0x1DD3: 84, + 0x1DD4: 84, + 0x1DD5: 84, + 0x1DD6: 84, + 0x1DD7: 84, + 0x1DD8: 84, + 0x1DD9: 84, + 0x1DDA: 84, + 0x1DDB: 84, + 0x1DDC: 84, + 0x1DDD: 84, + 0x1DDE: 84, + 0x1DDF: 84, + 0x1DE0: 84, + 0x1DE1: 84, + 0x1DE2: 84, + 0x1DE3: 84, + 0x1DE4: 84, + 0x1DE5: 84, + 0x1DE6: 84, + 0x1DE7: 84, + 0x1DE8: 84, + 0x1DE9: 84, + 0x1DEA: 84, + 0x1DEB: 84, + 0x1DEC: 84, + 0x1DED: 84, + 0x1DEE: 84, + 0x1DEF: 84, + 0x1DF0: 84, + 0x1DF1: 84, + 0x1DF2: 84, + 0x1DF3: 84, + 0x1DF4: 84, + 0x1DF5: 84, + 0x1DF6: 84, + 0x1DF7: 84, + 0x1DF8: 84, + 0x1DF9: 84, + 0x1DFA: 84, + 0x1DFB: 84, + 0x1DFC: 84, + 0x1DFD: 84, + 0x1DFE: 84, + 0x1DFF: 84, + 0x200B: 84, + 0x200D: 67, + 0x200E: 84, + 0x200F: 84, + 0x202A: 84, + 0x202B: 84, + 0x202C: 84, + 0x202D: 84, + 0x202E: 84, + 0x2060: 84, + 0x2061: 84, + 0x2062: 84, + 0x2063: 84, + 0x2064: 84, + 0x206A: 84, + 0x206B: 84, + 0x206C: 84, + 0x206D: 84, + 0x206E: 84, + 0x206F: 84, + 0x20D0: 84, + 0x20D1: 84, + 0x20D2: 84, + 0x20D3: 84, + 0x20D4: 84, + 0x20D5: 84, + 0x20D6: 84, + 0x20D7: 84, + 0x20D8: 84, + 0x20D9: 84, + 0x20DA: 84, + 0x20DB: 84, + 0x20DC: 84, + 0x20DD: 84, + 0x20DE: 84, + 0x20DF: 84, + 0x20E0: 84, + 0x20E1: 84, + 0x20E2: 84, + 0x20E3: 84, + 0x20E4: 84, + 0x20E5: 84, + 0x20E6: 84, + 0x20E7: 84, + 0x20E8: 84, + 0x20E9: 84, + 0x20EA: 84, + 0x20EB: 84, + 0x20EC: 84, + 0x20ED: 84, + 0x20EE: 84, + 0x20EF: 84, + 0x20F0: 84, + 0x2CEF: 84, + 0x2CF0: 84, + 0x2CF1: 84, + 0x2D7F: 84, + 0x2DE0: 84, + 0x2DE1: 84, + 0x2DE2: 84, + 0x2DE3: 84, + 0x2DE4: 84, + 0x2DE5: 84, + 0x2DE6: 84, + 0x2DE7: 84, + 0x2DE8: 84, + 0x2DE9: 84, + 0x2DEA: 84, + 0x2DEB: 84, + 0x2DEC: 84, + 0x2DED: 84, + 0x2DEE: 84, + 0x2DEF: 84, + 0x2DF0: 84, + 0x2DF1: 84, + 0x2DF2: 84, + 0x2DF3: 84, + 0x2DF4: 84, + 0x2DF5: 84, + 0x2DF6: 84, + 0x2DF7: 84, + 0x2DF8: 84, + 0x2DF9: 84, + 0x2DFA: 84, + 0x2DFB: 84, + 0x2DFC: 84, + 0x2DFD: 84, + 0x2DFE: 84, + 0x2DFF: 84, + 0x302A: 84, + 0x302B: 84, + 0x302C: 84, + 0x302D: 84, + 0x3099: 84, + 0x309A: 84, + 0xA66F: 84, + 0xA670: 84, + 0xA671: 84, + 0xA672: 84, + 0xA674: 84, + 0xA675: 84, + 0xA676: 84, + 0xA677: 84, + 0xA678: 84, + 0xA679: 84, + 0xA67A: 84, + 0xA67B: 84, + 0xA67C: 84, + 0xA67D: 84, + 0xA69E: 84, + 0xA69F: 84, + 0xA6F0: 84, + 0xA6F1: 84, + 0xA802: 84, + 0xA806: 84, + 0xA80B: 84, + 0xA825: 84, + 0xA826: 84, + 0xA82C: 84, + 0xA840: 68, + 0xA841: 68, + 0xA842: 68, + 0xA843: 68, + 0xA844: 68, + 0xA845: 68, + 0xA846: 68, + 0xA847: 68, + 0xA848: 68, + 0xA849: 68, + 0xA84A: 68, + 0xA84B: 68, + 0xA84C: 68, + 0xA84D: 68, + 0xA84E: 68, + 0xA84F: 68, + 0xA850: 68, + 0xA851: 68, + 0xA852: 68, + 0xA853: 68, + 0xA854: 68, + 0xA855: 68, + 0xA856: 68, + 0xA857: 68, + 0xA858: 68, + 0xA859: 68, + 0xA85A: 68, + 0xA85B: 68, + 0xA85C: 68, + 0xA85D: 68, + 0xA85E: 68, + 0xA85F: 68, + 0xA860: 68, + 0xA861: 68, + 0xA862: 68, + 0xA863: 68, + 0xA864: 68, + 0xA865: 68, + 0xA866: 68, + 0xA867: 68, + 0xA868: 68, + 0xA869: 68, + 0xA86A: 68, + 0xA86B: 68, + 0xA86C: 68, + 0xA86D: 68, + 0xA86E: 68, + 0xA86F: 68, + 0xA870: 68, + 0xA871: 68, + 0xA872: 76, + 0xA8C4: 84, + 0xA8C5: 84, + 0xA8E0: 84, + 0xA8E1: 84, + 0xA8E2: 84, + 0xA8E3: 84, + 0xA8E4: 84, + 0xA8E5: 84, + 0xA8E6: 84, + 0xA8E7: 84, + 0xA8E8: 84, + 0xA8E9: 84, + 0xA8EA: 84, + 0xA8EB: 84, + 0xA8EC: 84, + 0xA8ED: 84, + 0xA8EE: 84, + 0xA8EF: 84, + 0xA8F0: 84, + 0xA8F1: 84, + 0xA8FF: 84, + 0xA926: 84, + 0xA927: 84, + 0xA928: 84, + 0xA929: 84, + 0xA92A: 84, + 0xA92B: 84, + 0xA92C: 84, + 0xA92D: 84, + 0xA947: 84, + 0xA948: 84, + 0xA949: 84, + 0xA94A: 84, + 0xA94B: 84, + 0xA94C: 84, + 0xA94D: 84, + 0xA94E: 84, + 0xA94F: 84, + 0xA950: 84, + 0xA951: 84, + 0xA980: 84, + 0xA981: 84, + 0xA982: 84, + 0xA9B3: 84, + 0xA9B6: 84, + 0xA9B7: 84, + 0xA9B8: 84, + 0xA9B9: 84, + 0xA9BC: 84, + 0xA9BD: 84, + 0xA9E5: 84, + 0xAA29: 84, + 0xAA2A: 84, + 0xAA2B: 84, + 0xAA2C: 84, + 0xAA2D: 84, + 0xAA2E: 84, + 0xAA31: 84, + 0xAA32: 84, + 0xAA35: 84, + 0xAA36: 84, + 0xAA43: 84, + 0xAA4C: 84, + 0xAA7C: 84, + 0xAAB0: 84, + 0xAAB2: 84, + 0xAAB3: 84, + 0xAAB4: 84, + 0xAAB7: 84, + 0xAAB8: 84, + 0xAABE: 84, + 0xAABF: 84, + 0xAAC1: 84, + 0xAAEC: 84, + 0xAAED: 84, + 0xAAF6: 84, + 0xABE5: 84, + 0xABE8: 84, + 0xABED: 84, + 0xFB1E: 84, + 0xFE00: 84, + 0xFE01: 84, + 0xFE02: 84, + 0xFE03: 84, + 0xFE04: 84, + 0xFE05: 84, + 0xFE06: 84, + 0xFE07: 84, + 0xFE08: 84, + 0xFE09: 84, + 0xFE0A: 84, + 0xFE0B: 84, + 0xFE0C: 84, + 0xFE0D: 84, + 0xFE0E: 84, + 0xFE0F: 84, + 0xFE20: 84, + 0xFE21: 84, + 0xFE22: 84, + 0xFE23: 84, + 0xFE24: 84, + 0xFE25: 84, + 0xFE26: 84, + 0xFE27: 84, + 0xFE28: 84, + 0xFE29: 84, + 0xFE2A: 84, + 0xFE2B: 84, + 0xFE2C: 84, + 0xFE2D: 84, + 0xFE2E: 84, + 0xFE2F: 84, + 0xFEFF: 84, + 0xFFF9: 84, + 0xFFFA: 84, + 0xFFFB: 84, + 0x101FD: 84, + 0x102E0: 84, + 0x10376: 84, + 0x10377: 84, + 0x10378: 84, + 0x10379: 84, + 0x1037A: 84, + 0x10A01: 84, + 0x10A02: 84, + 0x10A03: 84, + 0x10A05: 84, + 0x10A06: 84, + 0x10A0C: 84, + 0x10A0D: 84, + 0x10A0E: 84, + 0x10A0F: 84, + 0x10A38: 84, + 0x10A39: 84, + 0x10A3A: 84, + 0x10A3F: 84, + 0x10AC0: 68, + 0x10AC1: 68, + 0x10AC2: 68, + 0x10AC3: 68, + 0x10AC4: 68, + 0x10AC5: 82, + 0x10AC7: 82, + 0x10AC9: 82, + 0x10ACA: 82, + 0x10ACD: 76, + 0x10ACE: 82, + 0x10ACF: 82, + 0x10AD0: 82, + 0x10AD1: 82, + 0x10AD2: 82, + 0x10AD3: 68, + 0x10AD4: 68, + 0x10AD5: 68, + 0x10AD6: 68, + 0x10AD7: 76, + 0x10AD8: 68, + 0x10AD9: 68, + 0x10ADA: 68, + 0x10ADB: 68, + 0x10ADC: 68, + 0x10ADD: 82, + 0x10ADE: 68, + 0x10ADF: 68, + 0x10AE0: 68, + 0x10AE1: 82, + 0x10AE4: 82, + 0x10AE5: 84, + 0x10AE6: 84, + 0x10AEB: 68, + 0x10AEC: 68, + 0x10AED: 68, + 0x10AEE: 68, + 0x10AEF: 82, + 0x10B80: 68, + 0x10B81: 82, + 0x10B82: 68, + 0x10B83: 82, + 0x10B84: 82, + 0x10B85: 82, + 0x10B86: 68, + 0x10B87: 68, + 0x10B88: 68, + 0x10B89: 82, + 0x10B8A: 68, + 0x10B8B: 68, + 0x10B8C: 82, + 0x10B8D: 68, + 0x10B8E: 82, + 0x10B8F: 82, + 0x10B90: 68, + 0x10B91: 82, + 0x10BA9: 82, + 0x10BAA: 82, + 0x10BAB: 82, + 0x10BAC: 82, + 0x10BAD: 68, + 0x10BAE: 68, + 0x10D00: 76, + 0x10D01: 68, + 0x10D02: 68, + 0x10D03: 68, + 0x10D04: 68, + 0x10D05: 68, + 0x10D06: 68, + 0x10D07: 68, + 0x10D08: 68, + 0x10D09: 68, + 0x10D0A: 68, + 0x10D0B: 68, + 0x10D0C: 68, + 0x10D0D: 68, + 0x10D0E: 68, + 0x10D0F: 68, + 0x10D10: 68, + 0x10D11: 68, + 0x10D12: 68, + 0x10D13: 68, + 0x10D14: 68, + 0x10D15: 68, + 0x10D16: 68, + 0x10D17: 68, + 0x10D18: 68, + 0x10D19: 68, + 0x10D1A: 68, + 0x10D1B: 68, + 0x10D1C: 68, + 0x10D1D: 68, + 0x10D1E: 68, + 0x10D1F: 68, + 0x10D20: 68, + 0x10D21: 68, + 0x10D22: 82, + 0x10D23: 68, + 0x10D24: 84, + 0x10D25: 84, + 0x10D26: 84, + 0x10D27: 84, + 0x10D69: 84, + 0x10D6A: 84, + 0x10D6B: 84, + 0x10D6C: 84, + 0x10D6D: 84, + 0x10EAB: 84, + 0x10EAC: 84, + 0x10EC2: 82, + 0x10EC3: 68, + 0x10EC4: 68, + 0x10EFC: 84, + 0x10EFD: 84, + 0x10EFE: 84, + 0x10EFF: 84, + 0x10F30: 68, + 0x10F31: 68, + 0x10F32: 68, + 0x10F33: 82, + 0x10F34: 68, + 0x10F35: 68, + 0x10F36: 68, + 0x10F37: 68, + 0x10F38: 68, + 0x10F39: 68, + 0x10F3A: 68, + 0x10F3B: 68, + 0x10F3C: 68, + 0x10F3D: 68, + 0x10F3E: 68, + 0x10F3F: 68, + 0x10F40: 68, + 0x10F41: 68, + 0x10F42: 68, + 0x10F43: 68, + 0x10F44: 68, + 0x10F46: 84, + 0x10F47: 84, + 0x10F48: 84, + 0x10F49: 84, + 0x10F4A: 84, + 0x10F4B: 84, + 0x10F4C: 84, + 0x10F4D: 84, + 0x10F4E: 84, + 0x10F4F: 84, + 0x10F50: 84, + 0x10F51: 68, + 0x10F52: 68, + 0x10F53: 68, + 0x10F54: 82, + 0x10F70: 68, + 0x10F71: 68, + 0x10F72: 68, + 0x10F73: 68, + 0x10F74: 82, + 0x10F75: 82, + 0x10F76: 68, + 0x10F77: 68, + 0x10F78: 68, + 0x10F79: 68, + 0x10F7A: 68, + 0x10F7B: 68, + 0x10F7C: 68, + 0x10F7D: 68, + 0x10F7E: 68, + 0x10F7F: 68, + 0x10F80: 68, + 0x10F81: 68, + 0x10F82: 84, + 0x10F83: 84, + 0x10F84: 84, + 0x10F85: 84, + 0x10FB0: 68, + 0x10FB2: 68, + 0x10FB3: 68, + 0x10FB4: 82, + 0x10FB5: 82, + 0x10FB6: 82, + 0x10FB8: 68, + 0x10FB9: 82, + 0x10FBA: 82, + 0x10FBB: 68, + 0x10FBC: 68, + 0x10FBD: 82, + 0x10FBE: 68, + 0x10FBF: 68, + 0x10FC1: 68, + 0x10FC2: 82, + 0x10FC3: 82, + 0x10FC4: 68, + 0x10FC9: 82, + 0x10FCA: 68, + 0x10FCB: 76, + 0x11001: 84, + 0x11038: 84, + 0x11039: 84, + 0x1103A: 84, + 0x1103B: 84, + 0x1103C: 84, + 0x1103D: 84, + 0x1103E: 84, + 0x1103F: 84, + 0x11040: 84, + 0x11041: 84, + 0x11042: 84, + 0x11043: 84, + 0x11044: 84, + 0x11045: 84, + 0x11046: 84, + 0x11070: 84, + 0x11073: 84, + 0x11074: 84, + 0x1107F: 84, + 0x11080: 84, + 0x11081: 84, + 0x110B3: 84, + 0x110B4: 84, + 0x110B5: 84, + 0x110B6: 84, + 0x110B9: 84, + 0x110BA: 84, + 0x110C2: 84, + 0x11100: 84, + 0x11101: 84, + 0x11102: 84, + 0x11127: 84, + 0x11128: 84, + 0x11129: 84, + 0x1112A: 84, + 0x1112B: 84, + 0x1112D: 84, + 0x1112E: 84, + 0x1112F: 84, + 0x11130: 84, + 0x11131: 84, + 0x11132: 84, + 0x11133: 84, + 0x11134: 84, + 0x11173: 84, + 0x11180: 84, + 0x11181: 84, + 0x111B6: 84, + 0x111B7: 84, + 0x111B8: 84, + 0x111B9: 84, + 0x111BA: 84, + 0x111BB: 84, + 0x111BC: 84, + 0x111BD: 84, + 0x111BE: 84, + 0x111C9: 84, + 0x111CA: 84, + 0x111CB: 84, + 0x111CC: 84, + 0x111CF: 84, + 0x1122F: 84, + 0x11230: 84, + 0x11231: 84, + 0x11234: 84, + 0x11236: 84, + 0x11237: 84, + 0x1123E: 84, + 0x11241: 84, + 0x112DF: 84, + 0x112E3: 84, + 0x112E4: 84, + 0x112E5: 84, + 0x112E6: 84, + 0x112E7: 84, + 0x112E8: 84, + 0x112E9: 84, + 0x112EA: 84, + 0x11300: 84, + 0x11301: 84, + 0x1133B: 84, + 0x1133C: 84, + 0x11340: 84, + 0x11366: 84, + 0x11367: 84, + 0x11368: 84, + 0x11369: 84, + 0x1136A: 84, + 0x1136B: 84, + 0x1136C: 84, + 0x11370: 84, + 0x11371: 84, + 0x11372: 84, + 0x11373: 84, + 0x11374: 84, + 0x113BB: 84, + 0x113BC: 84, + 0x113BD: 84, + 0x113BE: 84, + 0x113BF: 84, + 0x113C0: 84, + 0x113CE: 84, + 0x113D0: 84, + 0x113D2: 84, + 0x113E1: 84, + 0x113E2: 84, + 0x11438: 84, + 0x11439: 84, + 0x1143A: 84, + 0x1143B: 84, + 0x1143C: 84, + 0x1143D: 84, + 0x1143E: 84, + 0x1143F: 84, + 0x11442: 84, + 0x11443: 84, + 0x11444: 84, + 0x11446: 84, + 0x1145E: 84, + 0x114B3: 84, + 0x114B4: 84, + 0x114B5: 84, + 0x114B6: 84, + 0x114B7: 84, + 0x114B8: 84, + 0x114BA: 84, + 0x114BF: 84, + 0x114C0: 84, + 0x114C2: 84, + 0x114C3: 84, + 0x115B2: 84, + 0x115B3: 84, + 0x115B4: 84, + 0x115B5: 84, + 0x115BC: 84, + 0x115BD: 84, + 0x115BF: 84, + 0x115C0: 84, + 0x115DC: 84, + 0x115DD: 84, + 0x11633: 84, + 0x11634: 84, + 0x11635: 84, + 0x11636: 84, + 0x11637: 84, + 0x11638: 84, + 0x11639: 84, + 0x1163A: 84, + 0x1163D: 84, + 0x1163F: 84, + 0x11640: 84, + 0x116AB: 84, + 0x116AD: 84, + 0x116B0: 84, + 0x116B1: 84, + 0x116B2: 84, + 0x116B3: 84, + 0x116B4: 84, + 0x116B5: 84, + 0x116B7: 84, + 0x1171D: 84, + 0x1171F: 84, + 0x11722: 84, + 0x11723: 84, + 0x11724: 84, + 0x11725: 84, + 0x11727: 84, + 0x11728: 84, + 0x11729: 84, + 0x1172A: 84, + 0x1172B: 84, + 0x1182F: 84, + 0x11830: 84, + 0x11831: 84, + 0x11832: 84, + 0x11833: 84, + 0x11834: 84, + 0x11835: 84, + 0x11836: 84, + 0x11837: 84, + 0x11839: 84, + 0x1183A: 84, + 0x1193B: 84, + 0x1193C: 84, + 0x1193E: 84, + 0x11943: 84, + 0x119D4: 84, + 0x119D5: 84, + 0x119D6: 84, + 0x119D7: 84, + 0x119DA: 84, + 0x119DB: 84, + 0x119E0: 84, + 0x11A01: 84, + 0x11A02: 84, + 0x11A03: 84, + 0x11A04: 84, + 0x11A05: 84, + 0x11A06: 84, + 0x11A07: 84, + 0x11A08: 84, + 0x11A09: 84, + 0x11A0A: 84, + 0x11A33: 84, + 0x11A34: 84, + 0x11A35: 84, + 0x11A36: 84, + 0x11A37: 84, + 0x11A38: 84, + 0x11A3B: 84, + 0x11A3C: 84, + 0x11A3D: 84, + 0x11A3E: 84, + 0x11A47: 84, + 0x11A51: 84, + 0x11A52: 84, + 0x11A53: 84, + 0x11A54: 84, + 0x11A55: 84, + 0x11A56: 84, + 0x11A59: 84, + 0x11A5A: 84, + 0x11A5B: 84, + 0x11A8A: 84, + 0x11A8B: 84, + 0x11A8C: 84, + 0x11A8D: 84, + 0x11A8E: 84, + 0x11A8F: 84, + 0x11A90: 84, + 0x11A91: 84, + 0x11A92: 84, + 0x11A93: 84, + 0x11A94: 84, + 0x11A95: 84, + 0x11A96: 84, + 0x11A98: 84, + 0x11A99: 84, + 0x11C30: 84, + 0x11C31: 84, + 0x11C32: 84, + 0x11C33: 84, + 0x11C34: 84, + 0x11C35: 84, + 0x11C36: 84, + 0x11C38: 84, + 0x11C39: 84, + 0x11C3A: 84, + 0x11C3B: 84, + 0x11C3C: 84, + 0x11C3D: 84, + 0x11C3F: 84, + 0x11C92: 84, + 0x11C93: 84, + 0x11C94: 84, + 0x11C95: 84, + 0x11C96: 84, + 0x11C97: 84, + 0x11C98: 84, + 0x11C99: 84, + 0x11C9A: 84, + 0x11C9B: 84, + 0x11C9C: 84, + 0x11C9D: 84, + 0x11C9E: 84, + 0x11C9F: 84, + 0x11CA0: 84, + 0x11CA1: 84, + 0x11CA2: 84, + 0x11CA3: 84, + 0x11CA4: 84, + 0x11CA5: 84, + 0x11CA6: 84, + 0x11CA7: 84, + 0x11CAA: 84, + 0x11CAB: 84, + 0x11CAC: 84, + 0x11CAD: 84, + 0x11CAE: 84, + 0x11CAF: 84, + 0x11CB0: 84, + 0x11CB2: 84, + 0x11CB3: 84, + 0x11CB5: 84, + 0x11CB6: 84, + 0x11D31: 84, + 0x11D32: 84, + 0x11D33: 84, + 0x11D34: 84, + 0x11D35: 84, + 0x11D36: 84, + 0x11D3A: 84, + 0x11D3C: 84, + 0x11D3D: 84, + 0x11D3F: 84, + 0x11D40: 84, + 0x11D41: 84, + 0x11D42: 84, + 0x11D43: 84, + 0x11D44: 84, + 0x11D45: 84, + 0x11D47: 84, + 0x11D90: 84, + 0x11D91: 84, + 0x11D95: 84, + 0x11D97: 84, + 0x11EF3: 84, + 0x11EF4: 84, + 0x11F00: 84, + 0x11F01: 84, + 0x11F36: 84, + 0x11F37: 84, + 0x11F38: 84, + 0x11F39: 84, + 0x11F3A: 84, + 0x11F40: 84, + 0x11F42: 84, + 0x11F5A: 84, + 0x13430: 84, + 0x13431: 84, + 0x13432: 84, + 0x13433: 84, + 0x13434: 84, + 0x13435: 84, + 0x13436: 84, + 0x13437: 84, + 0x13438: 84, + 0x13439: 84, + 0x1343A: 84, + 0x1343B: 84, + 0x1343C: 84, + 0x1343D: 84, + 0x1343E: 84, + 0x1343F: 84, + 0x13440: 84, + 0x13447: 84, + 0x13448: 84, + 0x13449: 84, + 0x1344A: 84, + 0x1344B: 84, + 0x1344C: 84, + 0x1344D: 84, + 0x1344E: 84, + 0x1344F: 84, + 0x13450: 84, + 0x13451: 84, + 0x13452: 84, + 0x13453: 84, + 0x13454: 84, + 0x13455: 84, + 0x1611E: 84, + 0x1611F: 84, + 0x16120: 84, + 0x16121: 84, + 0x16122: 84, + 0x16123: 84, + 0x16124: 84, + 0x16125: 84, + 0x16126: 84, + 0x16127: 84, + 0x16128: 84, + 0x16129: 84, + 0x1612D: 84, + 0x1612E: 84, + 0x1612F: 84, + 0x16AF0: 84, + 0x16AF1: 84, + 0x16AF2: 84, + 0x16AF3: 84, + 0x16AF4: 84, + 0x16B30: 84, + 0x16B31: 84, + 0x16B32: 84, + 0x16B33: 84, + 0x16B34: 84, + 0x16B35: 84, + 0x16B36: 84, + 0x16F4F: 84, + 0x16F8F: 84, + 0x16F90: 84, + 0x16F91: 84, + 0x16F92: 84, + 0x16FE4: 84, + 0x1BC9D: 84, + 0x1BC9E: 84, + 0x1BCA0: 84, + 0x1BCA1: 84, + 0x1BCA2: 84, + 0x1BCA3: 84, + 0x1CF00: 84, + 0x1CF01: 84, + 0x1CF02: 84, + 0x1CF03: 84, + 0x1CF04: 84, + 0x1CF05: 84, + 0x1CF06: 84, + 0x1CF07: 84, + 0x1CF08: 84, + 0x1CF09: 84, + 0x1CF0A: 84, + 0x1CF0B: 84, + 0x1CF0C: 84, + 0x1CF0D: 84, + 0x1CF0E: 84, + 0x1CF0F: 84, + 0x1CF10: 84, + 0x1CF11: 84, + 0x1CF12: 84, + 0x1CF13: 84, + 0x1CF14: 84, + 0x1CF15: 84, + 0x1CF16: 84, + 0x1CF17: 84, + 0x1CF18: 84, + 0x1CF19: 84, + 0x1CF1A: 84, + 0x1CF1B: 84, + 0x1CF1C: 84, + 0x1CF1D: 84, + 0x1CF1E: 84, + 0x1CF1F: 84, + 0x1CF20: 84, + 0x1CF21: 84, + 0x1CF22: 84, + 0x1CF23: 84, + 0x1CF24: 84, + 0x1CF25: 84, + 0x1CF26: 84, + 0x1CF27: 84, + 0x1CF28: 84, + 0x1CF29: 84, + 0x1CF2A: 84, + 0x1CF2B: 84, + 0x1CF2C: 84, + 0x1CF2D: 84, + 0x1CF30: 84, + 0x1CF31: 84, + 0x1CF32: 84, + 0x1CF33: 84, + 0x1CF34: 84, + 0x1CF35: 84, + 0x1CF36: 84, + 0x1CF37: 84, + 0x1CF38: 84, + 0x1CF39: 84, + 0x1CF3A: 84, + 0x1CF3B: 84, + 0x1CF3C: 84, + 0x1CF3D: 84, + 0x1CF3E: 84, + 0x1CF3F: 84, + 0x1CF40: 84, + 0x1CF41: 84, + 0x1CF42: 84, + 0x1CF43: 84, + 0x1CF44: 84, + 0x1CF45: 84, + 0x1CF46: 84, + 0x1D167: 84, + 0x1D168: 84, + 0x1D169: 84, + 0x1D173: 84, + 0x1D174: 84, + 0x1D175: 84, + 0x1D176: 84, + 0x1D177: 84, + 0x1D178: 84, + 0x1D179: 84, + 0x1D17A: 84, + 0x1D17B: 84, + 0x1D17C: 84, + 0x1D17D: 84, + 0x1D17E: 84, + 0x1D17F: 84, + 0x1D180: 84, + 0x1D181: 84, + 0x1D182: 84, + 0x1D185: 84, + 0x1D186: 84, + 0x1D187: 84, + 0x1D188: 84, + 0x1D189: 84, + 0x1D18A: 84, + 0x1D18B: 84, + 0x1D1AA: 84, + 0x1D1AB: 84, + 0x1D1AC: 84, + 0x1D1AD: 84, + 0x1D242: 84, + 0x1D243: 84, + 0x1D244: 84, + 0x1DA00: 84, + 0x1DA01: 84, + 0x1DA02: 84, + 0x1DA03: 84, + 0x1DA04: 84, + 0x1DA05: 84, + 0x1DA06: 84, + 0x1DA07: 84, + 0x1DA08: 84, + 0x1DA09: 84, + 0x1DA0A: 84, + 0x1DA0B: 84, + 0x1DA0C: 84, + 0x1DA0D: 84, + 0x1DA0E: 84, + 0x1DA0F: 84, + 0x1DA10: 84, + 0x1DA11: 84, + 0x1DA12: 84, + 0x1DA13: 84, + 0x1DA14: 84, + 0x1DA15: 84, + 0x1DA16: 84, + 0x1DA17: 84, + 0x1DA18: 84, + 0x1DA19: 84, + 0x1DA1A: 84, + 0x1DA1B: 84, + 0x1DA1C: 84, + 0x1DA1D: 84, + 0x1DA1E: 84, + 0x1DA1F: 84, + 0x1DA20: 84, + 0x1DA21: 84, + 0x1DA22: 84, + 0x1DA23: 84, + 0x1DA24: 84, + 0x1DA25: 84, + 0x1DA26: 84, + 0x1DA27: 84, + 0x1DA28: 84, + 0x1DA29: 84, + 0x1DA2A: 84, + 0x1DA2B: 84, + 0x1DA2C: 84, + 0x1DA2D: 84, + 0x1DA2E: 84, + 0x1DA2F: 84, + 0x1DA30: 84, + 0x1DA31: 84, + 0x1DA32: 84, + 0x1DA33: 84, + 0x1DA34: 84, + 0x1DA35: 84, + 0x1DA36: 84, + 0x1DA3B: 84, + 0x1DA3C: 84, + 0x1DA3D: 84, + 0x1DA3E: 84, + 0x1DA3F: 84, + 0x1DA40: 84, + 0x1DA41: 84, + 0x1DA42: 84, + 0x1DA43: 84, + 0x1DA44: 84, + 0x1DA45: 84, + 0x1DA46: 84, + 0x1DA47: 84, + 0x1DA48: 84, + 0x1DA49: 84, + 0x1DA4A: 84, + 0x1DA4B: 84, + 0x1DA4C: 84, + 0x1DA4D: 84, + 0x1DA4E: 84, + 0x1DA4F: 84, + 0x1DA50: 84, + 0x1DA51: 84, + 0x1DA52: 84, + 0x1DA53: 84, + 0x1DA54: 84, + 0x1DA55: 84, + 0x1DA56: 84, + 0x1DA57: 84, + 0x1DA58: 84, + 0x1DA59: 84, + 0x1DA5A: 84, + 0x1DA5B: 84, + 0x1DA5C: 84, + 0x1DA5D: 84, + 0x1DA5E: 84, + 0x1DA5F: 84, + 0x1DA60: 84, + 0x1DA61: 84, + 0x1DA62: 84, + 0x1DA63: 84, + 0x1DA64: 84, + 0x1DA65: 84, + 0x1DA66: 84, + 0x1DA67: 84, + 0x1DA68: 84, + 0x1DA69: 84, + 0x1DA6A: 84, + 0x1DA6B: 84, + 0x1DA6C: 84, + 0x1DA75: 84, + 0x1DA84: 84, + 0x1DA9B: 84, + 0x1DA9C: 84, + 0x1DA9D: 84, + 0x1DA9E: 84, + 0x1DA9F: 84, + 0x1DAA1: 84, + 0x1DAA2: 84, + 0x1DAA3: 84, + 0x1DAA4: 84, + 0x1DAA5: 84, + 0x1DAA6: 84, + 0x1DAA7: 84, + 0x1DAA8: 84, + 0x1DAA9: 84, + 0x1DAAA: 84, + 0x1DAAB: 84, + 0x1DAAC: 84, + 0x1DAAD: 84, + 0x1DAAE: 84, + 0x1DAAF: 84, + 0x1E000: 84, + 0x1E001: 84, + 0x1E002: 84, + 0x1E003: 84, + 0x1E004: 84, + 0x1E005: 84, + 0x1E006: 84, + 0x1E008: 84, + 0x1E009: 84, + 0x1E00A: 84, + 0x1E00B: 84, + 0x1E00C: 84, + 0x1E00D: 84, + 0x1E00E: 84, + 0x1E00F: 84, + 0x1E010: 84, + 0x1E011: 84, + 0x1E012: 84, + 0x1E013: 84, + 0x1E014: 84, + 0x1E015: 84, + 0x1E016: 84, + 0x1E017: 84, + 0x1E018: 84, + 0x1E01B: 84, + 0x1E01C: 84, + 0x1E01D: 84, + 0x1E01E: 84, + 0x1E01F: 84, + 0x1E020: 84, + 0x1E021: 84, + 0x1E023: 84, + 0x1E024: 84, + 0x1E026: 84, + 0x1E027: 84, + 0x1E028: 84, + 0x1E029: 84, + 0x1E02A: 84, + 0x1E08F: 84, + 0x1E130: 84, + 0x1E131: 84, + 0x1E132: 84, + 0x1E133: 84, + 0x1E134: 84, + 0x1E135: 84, + 0x1E136: 84, + 0x1E2AE: 84, + 0x1E2EC: 84, + 0x1E2ED: 84, + 0x1E2EE: 84, + 0x1E2EF: 84, + 0x1E4EC: 84, + 0x1E4ED: 84, + 0x1E4EE: 84, + 0x1E4EF: 84, + 0x1E5EE: 84, + 0x1E5EF: 84, + 0x1E8D0: 84, + 0x1E8D1: 84, + 0x1E8D2: 84, + 0x1E8D3: 84, + 0x1E8D4: 84, + 0x1E8D5: 84, + 0x1E8D6: 84, + 0x1E900: 68, + 0x1E901: 68, + 0x1E902: 68, + 0x1E903: 68, + 0x1E904: 68, + 0x1E905: 68, + 0x1E906: 68, + 0x1E907: 68, + 0x1E908: 68, + 0x1E909: 68, + 0x1E90A: 68, + 0x1E90B: 68, + 0x1E90C: 68, + 0x1E90D: 68, + 0x1E90E: 68, + 0x1E90F: 68, + 0x1E910: 68, + 0x1E911: 68, + 0x1E912: 68, + 0x1E913: 68, + 0x1E914: 68, + 0x1E915: 68, + 0x1E916: 68, + 0x1E917: 68, + 0x1E918: 68, + 0x1E919: 68, + 0x1E91A: 68, + 0x1E91B: 68, + 0x1E91C: 68, + 0x1E91D: 68, + 0x1E91E: 68, + 0x1E91F: 68, + 0x1E920: 68, + 0x1E921: 68, + 0x1E922: 68, + 0x1E923: 68, + 0x1E924: 68, + 0x1E925: 68, + 0x1E926: 68, + 0x1E927: 68, + 0x1E928: 68, + 0x1E929: 68, + 0x1E92A: 68, + 0x1E92B: 68, + 0x1E92C: 68, + 0x1E92D: 68, + 0x1E92E: 68, + 0x1E92F: 68, + 0x1E930: 68, + 0x1E931: 68, + 0x1E932: 68, + 0x1E933: 68, + 0x1E934: 68, + 0x1E935: 68, + 0x1E936: 68, + 0x1E937: 68, + 0x1E938: 68, + 0x1E939: 68, + 0x1E93A: 68, + 0x1E93B: 68, + 0x1E93C: 68, + 0x1E93D: 68, + 0x1E93E: 68, + 0x1E93F: 68, + 0x1E940: 68, + 0x1E941: 68, + 0x1E942: 68, + 0x1E943: 68, + 0x1E944: 84, + 0x1E945: 84, + 0x1E946: 84, + 0x1E947: 84, + 0x1E948: 84, + 0x1E949: 84, + 0x1E94A: 84, + 0x1E94B: 84, + 0xE0001: 84, + 0xE0020: 84, + 0xE0021: 84, + 0xE0022: 84, + 0xE0023: 84, + 0xE0024: 84, + 0xE0025: 84, + 0xE0026: 84, + 0xE0027: 84, + 0xE0028: 84, + 0xE0029: 84, + 0xE002A: 84, + 0xE002B: 84, + 0xE002C: 84, + 0xE002D: 84, + 0xE002E: 84, + 0xE002F: 84, + 0xE0030: 84, + 0xE0031: 84, + 0xE0032: 84, + 0xE0033: 84, + 0xE0034: 84, + 0xE0035: 84, + 0xE0036: 84, + 0xE0037: 84, + 0xE0038: 84, + 0xE0039: 84, + 0xE003A: 84, + 0xE003B: 84, + 0xE003C: 84, + 0xE003D: 84, + 0xE003E: 84, + 0xE003F: 84, + 0xE0040: 84, + 0xE0041: 84, + 0xE0042: 84, + 0xE0043: 84, + 0xE0044: 84, + 0xE0045: 84, + 0xE0046: 84, + 0xE0047: 84, + 0xE0048: 84, + 0xE0049: 84, + 0xE004A: 84, + 0xE004B: 84, + 0xE004C: 84, + 0xE004D: 84, + 0xE004E: 84, + 0xE004F: 84, + 0xE0050: 84, + 0xE0051: 84, + 0xE0052: 84, + 0xE0053: 84, + 0xE0054: 84, + 0xE0055: 84, + 0xE0056: 84, + 0xE0057: 84, + 0xE0058: 84, + 0xE0059: 84, + 0xE005A: 84, + 0xE005B: 84, + 0xE005C: 84, + 0xE005D: 84, + 0xE005E: 84, + 0xE005F: 84, + 0xE0060: 84, + 0xE0061: 84, + 0xE0062: 84, + 0xE0063: 84, + 0xE0064: 84, + 0xE0065: 84, + 0xE0066: 84, + 0xE0067: 84, + 0xE0068: 84, + 0xE0069: 84, + 0xE006A: 84, + 0xE006B: 84, + 0xE006C: 84, + 0xE006D: 84, + 0xE006E: 84, + 0xE006F: 84, + 0xE0070: 84, + 0xE0071: 84, + 0xE0072: 84, + 0xE0073: 84, + 0xE0074: 84, + 0xE0075: 84, + 0xE0076: 84, + 0xE0077: 84, + 0xE0078: 84, + 0xE0079: 84, + 0xE007A: 84, + 0xE007B: 84, + 0xE007C: 84, + 0xE007D: 84, + 0xE007E: 84, + 0xE007F: 84, + 0xE0100: 84, + 0xE0101: 84, + 0xE0102: 84, + 0xE0103: 84, + 0xE0104: 84, + 0xE0105: 84, + 0xE0106: 84, + 0xE0107: 84, + 0xE0108: 84, + 0xE0109: 84, + 0xE010A: 84, + 0xE010B: 84, + 0xE010C: 84, + 0xE010D: 84, + 0xE010E: 84, + 0xE010F: 84, + 0xE0110: 84, + 0xE0111: 84, + 0xE0112: 84, + 0xE0113: 84, + 0xE0114: 84, + 0xE0115: 84, + 0xE0116: 84, + 0xE0117: 84, + 0xE0118: 84, + 0xE0119: 84, + 0xE011A: 84, + 0xE011B: 84, + 0xE011C: 84, + 0xE011D: 84, + 0xE011E: 84, + 0xE011F: 84, + 0xE0120: 84, + 0xE0121: 84, + 0xE0122: 84, + 0xE0123: 84, + 0xE0124: 84, + 0xE0125: 84, + 0xE0126: 84, + 0xE0127: 84, + 0xE0128: 84, + 0xE0129: 84, + 0xE012A: 84, + 0xE012B: 84, + 0xE012C: 84, + 0xE012D: 84, + 0xE012E: 84, + 0xE012F: 84, + 0xE0130: 84, + 0xE0131: 84, + 0xE0132: 84, + 0xE0133: 84, + 0xE0134: 84, + 0xE0135: 84, + 0xE0136: 84, + 0xE0137: 84, + 0xE0138: 84, + 0xE0139: 84, + 0xE013A: 84, + 0xE013B: 84, + 0xE013C: 84, + 0xE013D: 84, + 0xE013E: 84, + 0xE013F: 84, + 0xE0140: 84, + 0xE0141: 84, + 0xE0142: 84, + 0xE0143: 84, + 0xE0144: 84, + 0xE0145: 84, + 0xE0146: 84, + 0xE0147: 84, + 0xE0148: 84, + 0xE0149: 84, + 0xE014A: 84, + 0xE014B: 84, + 0xE014C: 84, + 0xE014D: 84, + 0xE014E: 84, + 0xE014F: 84, + 0xE0150: 84, + 0xE0151: 84, + 0xE0152: 84, + 0xE0153: 84, + 0xE0154: 84, + 0xE0155: 84, + 0xE0156: 84, + 0xE0157: 84, + 0xE0158: 84, + 0xE0159: 84, + 0xE015A: 84, + 0xE015B: 84, + 0xE015C: 84, + 0xE015D: 84, + 0xE015E: 84, + 0xE015F: 84, + 0xE0160: 84, + 0xE0161: 84, + 0xE0162: 84, + 0xE0163: 84, + 0xE0164: 84, + 0xE0165: 84, + 0xE0166: 84, + 0xE0167: 84, + 0xE0168: 84, + 0xE0169: 84, + 0xE016A: 84, + 0xE016B: 84, + 0xE016C: 84, + 0xE016D: 84, + 0xE016E: 84, + 0xE016F: 84, + 0xE0170: 84, + 0xE0171: 84, + 0xE0172: 84, + 0xE0173: 84, + 0xE0174: 84, + 0xE0175: 84, + 0xE0176: 84, + 0xE0177: 84, + 0xE0178: 84, + 0xE0179: 84, + 0xE017A: 84, + 0xE017B: 84, + 0xE017C: 84, + 0xE017D: 84, + 0xE017E: 84, + 0xE017F: 84, + 0xE0180: 84, + 0xE0181: 84, + 0xE0182: 84, + 0xE0183: 84, + 0xE0184: 84, + 0xE0185: 84, + 0xE0186: 84, + 0xE0187: 84, + 0xE0188: 84, + 0xE0189: 84, + 0xE018A: 84, + 0xE018B: 84, + 0xE018C: 84, + 0xE018D: 84, + 0xE018E: 84, + 0xE018F: 84, + 0xE0190: 84, + 0xE0191: 84, + 0xE0192: 84, + 0xE0193: 84, + 0xE0194: 84, + 0xE0195: 84, + 0xE0196: 84, + 0xE0197: 84, + 0xE0198: 84, + 0xE0199: 84, + 0xE019A: 84, + 0xE019B: 84, + 0xE019C: 84, + 0xE019D: 84, + 0xE019E: 84, + 0xE019F: 84, + 0xE01A0: 84, + 0xE01A1: 84, + 0xE01A2: 84, + 0xE01A3: 84, + 0xE01A4: 84, + 0xE01A5: 84, + 0xE01A6: 84, + 0xE01A7: 84, + 0xE01A8: 84, + 0xE01A9: 84, + 0xE01AA: 84, + 0xE01AB: 84, + 0xE01AC: 84, + 0xE01AD: 84, + 0xE01AE: 84, + 0xE01AF: 84, + 0xE01B0: 84, + 0xE01B1: 84, + 0xE01B2: 84, + 0xE01B3: 84, + 0xE01B4: 84, + 0xE01B5: 84, + 0xE01B6: 84, + 0xE01B7: 84, + 0xE01B8: 84, + 0xE01B9: 84, + 0xE01BA: 84, + 0xE01BB: 84, + 0xE01BC: 84, + 0xE01BD: 84, + 0xE01BE: 84, + 0xE01BF: 84, + 0xE01C0: 84, + 0xE01C1: 84, + 0xE01C2: 84, + 0xE01C3: 84, + 0xE01C4: 84, + 0xE01C5: 84, + 0xE01C6: 84, + 0xE01C7: 84, + 0xE01C8: 84, + 0xE01C9: 84, + 0xE01CA: 84, + 0xE01CB: 84, + 0xE01CC: 84, + 0xE01CD: 84, + 0xE01CE: 84, + 0xE01CF: 84, + 0xE01D0: 84, + 0xE01D1: 84, + 0xE01D2: 84, + 0xE01D3: 84, + 0xE01D4: 84, + 0xE01D5: 84, + 0xE01D6: 84, + 0xE01D7: 84, + 0xE01D8: 84, + 0xE01D9: 84, + 0xE01DA: 84, + 0xE01DB: 84, + 0xE01DC: 84, + 0xE01DD: 84, + 0xE01DE: 84, + 0xE01DF: 84, + 0xE01E0: 84, + 0xE01E1: 84, + 0xE01E2: 84, + 0xE01E3: 84, + 0xE01E4: 84, + 0xE01E5: 84, + 0xE01E6: 84, + 0xE01E7: 84, + 0xE01E8: 84, + 0xE01E9: 84, + 0xE01EA: 84, + 0xE01EB: 84, + 0xE01EC: 84, + 0xE01ED: 84, + 0xE01EE: 84, + 0xE01EF: 84, +} +codepoint_classes = { + "PVALID": ( + 0x2D0000002E, + 0x300000003A, + 0x610000007B, + 0xDF000000F7, + 0xF800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010A, + 0x10B0000010C, + 0x10D0000010E, + 0x10F00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011A, + 0x11B0000011C, + 0x11D0000011E, + 0x11F00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012A, + 0x12B0000012C, + 0x12D0000012E, + 0x12F00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13A0000013B, + 0x13C0000013D, + 0x13E0000013F, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14B0000014C, + 0x14D0000014E, + 0x14F00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015A, + 0x15B0000015C, + 0x15D0000015E, + 0x15F00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016A, + 0x16B0000016C, + 0x16D0000016E, + 0x16F00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17A0000017B, + 0x17C0000017D, + 0x17E0000017F, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18C0000018E, + 0x19200000193, + 0x19500000196, + 0x1990000019C, + 0x19E0000019F, + 0x1A1000001A2, + 0x1A3000001A4, + 0x1A5000001A6, + 0x1A8000001A9, + 0x1AA000001AC, + 0x1AD000001AE, + 0x1B0000001B1, + 0x1B4000001B5, + 0x1B6000001B7, + 0x1B9000001BC, + 0x1BD000001C4, + 0x1CE000001CF, + 0x1D0000001D1, + 0x1D2000001D3, + 0x1D4000001D5, + 0x1D6000001D7, + 0x1D8000001D9, + 0x1DA000001DB, + 0x1DC000001DE, + 0x1DF000001E0, + 0x1E1000001E2, + 0x1E3000001E4, + 0x1E5000001E6, + 0x1E7000001E8, + 0x1E9000001EA, + 0x1EB000001EC, + 0x1ED000001EE, + 0x1EF000001F1, + 0x1F5000001F6, + 0x1F9000001FA, + 0x1FB000001FC, + 0x1FD000001FE, + 0x1FF00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020A, + 0x20B0000020C, + 0x20D0000020E, + 0x20F00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021A, + 0x21B0000021C, + 0x21D0000021E, + 0x21F00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022A, + 0x22B0000022C, + 0x22D0000022E, + 0x22F00000230, + 0x23100000232, + 0x2330000023A, + 0x23C0000023D, + 0x23F00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024A, + 0x24B0000024C, + 0x24D0000024E, + 0x24F000002B0, + 0x2B9000002C2, + 0x2C6000002D2, + 0x2EC000002ED, + 0x2EE000002EF, + 0x30000000340, + 0x34200000343, + 0x3460000034F, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37B0000037E, + 0x39000000391, + 0x3AC000003CF, + 0x3D7000003D8, + 0x3D9000003DA, + 0x3DB000003DC, + 0x3DD000003DE, + 0x3DF000003E0, + 0x3E1000003E2, + 0x3E3000003E4, + 0x3E5000003E6, + 0x3E7000003E8, + 0x3E9000003EA, + 0x3EB000003EC, + 0x3ED000003EE, + 0x3EF000003F0, + 0x3F3000003F4, + 0x3F8000003F9, + 0x3FB000003FD, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046A, + 0x46B0000046C, + 0x46D0000046E, + 0x46F00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047A, + 0x47B0000047C, + 0x47D0000047E, + 0x47F00000480, + 0x48100000482, + 0x48300000488, + 0x48B0000048C, + 0x48D0000048E, + 0x48F00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049A, + 0x49B0000049C, + 0x49D0000049E, + 0x49F000004A0, + 0x4A1000004A2, + 0x4A3000004A4, + 0x4A5000004A6, + 0x4A7000004A8, + 0x4A9000004AA, + 0x4AB000004AC, + 0x4AD000004AE, + 0x4AF000004B0, + 0x4B1000004B2, + 0x4B3000004B4, + 0x4B5000004B6, + 0x4B7000004B8, + 0x4B9000004BA, + 0x4BB000004BC, + 0x4BD000004BE, + 0x4BF000004C0, + 0x4C2000004C3, + 0x4C4000004C5, + 0x4C6000004C7, + 0x4C8000004C9, + 0x4CA000004CB, + 0x4CC000004CD, + 0x4CE000004D0, + 0x4D1000004D2, + 0x4D3000004D4, + 0x4D5000004D6, + 0x4D7000004D8, + 0x4D9000004DA, + 0x4DB000004DC, + 0x4DD000004DE, + 0x4DF000004E0, + 0x4E1000004E2, + 0x4E3000004E4, + 0x4E5000004E6, + 0x4E7000004E8, + 0x4E9000004EA, + 0x4EB000004EC, + 0x4ED000004EE, + 0x4EF000004F0, + 0x4F1000004F2, + 0x4F3000004F4, + 0x4F5000004F6, + 0x4F7000004F8, + 0x4F9000004FA, + 0x4FB000004FC, + 0x4FD000004FE, + 0x4FF00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050A, + 0x50B0000050C, + 0x50D0000050E, + 0x50F00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051A, + 0x51B0000051C, + 0x51D0000051E, + 0x51F00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052A, + 0x52B0000052C, + 0x52D0000052E, + 0x52F00000530, + 0x5590000055A, + 0x56000000587, + 0x58800000589, + 0x591000005BE, + 0x5BF000005C0, + 0x5C1000005C3, + 0x5C4000005C6, + 0x5C7000005C8, + 0x5D0000005EB, + 0x5EF000005F3, + 0x6100000061B, + 0x62000000640, + 0x64100000660, + 0x66E00000675, + 0x679000006D4, + 0x6D5000006DD, + 0x6DF000006E9, + 0x6EA000006F0, + 0x6FA00000700, + 0x7100000074B, + 0x74D000007B2, + 0x7C0000007F6, + 0x7FD000007FE, + 0x8000000082E, + 0x8400000085C, + 0x8600000086B, + 0x87000000888, + 0x8890000088F, + 0x897000008E2, + 0x8E300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098D, + 0x98F00000991, + 0x993000009A9, + 0x9AA000009B1, + 0x9B2000009B3, + 0x9B6000009BA, + 0x9BC000009C5, + 0x9C7000009C9, + 0x9CB000009CF, + 0x9D7000009D8, + 0x9E0000009E4, + 0x9E6000009F2, + 0x9FC000009FD, + 0x9FE000009FF, + 0xA0100000A04, + 0xA0500000A0B, + 0xA0F00000A11, + 0xA1300000A29, + 0xA2A00000A31, + 0xA3200000A33, + 0xA3500000A36, + 0xA3800000A3A, + 0xA3C00000A3D, + 0xA3E00000A43, + 0xA4700000A49, + 0xA4B00000A4E, + 0xA5100000A52, + 0xA5C00000A5D, + 0xA6600000A76, + 0xA8100000A84, + 0xA8500000A8E, + 0xA8F00000A92, + 0xA9300000AA9, + 0xAAA00000AB1, + 0xAB200000AB4, + 0xAB500000ABA, + 0xABC00000AC6, + 0xAC700000ACA, + 0xACB00000ACE, + 0xAD000000AD1, + 0xAE000000AE4, + 0xAE600000AF0, + 0xAF900000B00, + 0xB0100000B04, + 0xB0500000B0D, + 0xB0F00000B11, + 0xB1300000B29, + 0xB2A00000B31, + 0xB3200000B34, + 0xB3500000B3A, + 0xB3C00000B45, + 0xB4700000B49, + 0xB4B00000B4E, + 0xB5500000B58, + 0xB5F00000B64, + 0xB6600000B70, + 0xB7100000B72, + 0xB8200000B84, + 0xB8500000B8B, + 0xB8E00000B91, + 0xB9200000B96, + 0xB9900000B9B, + 0xB9C00000B9D, + 0xB9E00000BA0, + 0xBA300000BA5, + 0xBA800000BAB, + 0xBAE00000BBA, + 0xBBE00000BC3, + 0xBC600000BC9, + 0xBCA00000BCE, + 0xBD000000BD1, + 0xBD700000BD8, + 0xBE600000BF0, + 0xC0000000C0D, + 0xC0E00000C11, + 0xC1200000C29, + 0xC2A00000C3A, + 0xC3C00000C45, + 0xC4600000C49, + 0xC4A00000C4E, + 0xC5500000C57, + 0xC5800000C5B, + 0xC5D00000C5E, + 0xC6000000C64, + 0xC6600000C70, + 0xC8000000C84, + 0xC8500000C8D, + 0xC8E00000C91, + 0xC9200000CA9, + 0xCAA00000CB4, + 0xCB500000CBA, + 0xCBC00000CC5, + 0xCC600000CC9, + 0xCCA00000CCE, + 0xCD500000CD7, + 0xCDD00000CDF, + 0xCE000000CE4, + 0xCE600000CF0, + 0xCF100000CF4, + 0xD0000000D0D, + 0xD0E00000D11, + 0xD1200000D45, + 0xD4600000D49, + 0xD4A00000D4F, + 0xD5400000D58, + 0xD5F00000D64, + 0xD6600000D70, + 0xD7A00000D80, + 0xD8100000D84, + 0xD8500000D97, + 0xD9A00000DB2, + 0xDB300000DBC, + 0xDBD00000DBE, + 0xDC000000DC7, + 0xDCA00000DCB, + 0xDCF00000DD5, + 0xDD600000DD7, + 0xDD800000DE0, + 0xDE600000DF0, + 0xDF200000DF4, + 0xE0100000E33, + 0xE3400000E3B, + 0xE4000000E4F, + 0xE5000000E5A, + 0xE8100000E83, + 0xE8400000E85, + 0xE8600000E8B, + 0xE8C00000EA4, + 0xEA500000EA6, + 0xEA700000EB3, + 0xEB400000EBE, + 0xEC000000EC5, + 0xEC600000EC7, + 0xEC800000ECF, + 0xED000000EDA, + 0xEDE00000EE0, + 0xF0000000F01, + 0xF0B00000F0C, + 0xF1800000F1A, + 0xF2000000F2A, + 0xF3500000F36, + 0xF3700000F38, + 0xF3900000F3A, + 0xF3E00000F43, + 0xF4400000F48, + 0xF4900000F4D, + 0xF4E00000F52, + 0xF5300000F57, + 0xF5800000F5C, + 0xF5D00000F69, + 0xF6A00000F6D, + 0xF7100000F73, + 0xF7400000F75, + 0xF7A00000F81, + 0xF8200000F85, + 0xF8600000F93, + 0xF9400000F98, + 0xF9900000F9D, + 0xF9E00000FA2, + 0xFA300000FA7, + 0xFA800000FAC, + 0xFAD00000FB9, + 0xFBA00000FBD, + 0xFC600000FC7, + 0x10000000104A, + 0x10500000109E, + 0x10D0000010FB, + 0x10FD00001100, + 0x120000001249, + 0x124A0000124E, + 0x125000001257, + 0x125800001259, + 0x125A0000125E, + 0x126000001289, + 0x128A0000128E, + 0x1290000012B1, + 0x12B2000012B6, + 0x12B8000012BF, + 0x12C0000012C1, + 0x12C2000012C6, + 0x12C8000012D7, + 0x12D800001311, + 0x131200001316, + 0x13180000135B, + 0x135D00001360, + 0x138000001390, + 0x13A0000013F6, + 0x14010000166D, + 0x166F00001680, + 0x16810000169B, + 0x16A0000016EB, + 0x16F1000016F9, + 0x170000001716, + 0x171F00001735, + 0x174000001754, + 0x17600000176D, + 0x176E00001771, + 0x177200001774, + 0x1780000017B4, + 0x17B6000017D4, + 0x17D7000017D8, + 0x17DC000017DE, + 0x17E0000017EA, + 0x18100000181A, + 0x182000001879, + 0x1880000018AB, + 0x18B0000018F6, + 0x19000000191F, + 0x19200000192C, + 0x19300000193C, + 0x19460000196E, + 0x197000001975, + 0x1980000019AC, + 0x19B0000019CA, + 0x19D0000019DA, + 0x1A0000001A1C, + 0x1A2000001A5F, + 0x1A6000001A7D, + 0x1A7F00001A8A, + 0x1A9000001A9A, + 0x1AA700001AA8, + 0x1AB000001ABE, + 0x1ABF00001ACF, + 0x1B0000001B4D, + 0x1B5000001B5A, + 0x1B6B00001B74, + 0x1B8000001BF4, + 0x1C0000001C38, + 0x1C4000001C4A, + 0x1C4D00001C7E, + 0x1C8A00001C8B, + 0x1CD000001CD3, + 0x1CD400001CFB, + 0x1D0000001D2C, + 0x1D2F00001D30, + 0x1D3B00001D3C, + 0x1D4E00001D4F, + 0x1D6B00001D78, + 0x1D7900001D9B, + 0x1DC000001E00, + 0x1E0100001E02, + 0x1E0300001E04, + 0x1E0500001E06, + 0x1E0700001E08, + 0x1E0900001E0A, + 0x1E0B00001E0C, + 0x1E0D00001E0E, + 0x1E0F00001E10, + 0x1E1100001E12, + 0x1E1300001E14, + 0x1E1500001E16, + 0x1E1700001E18, + 0x1E1900001E1A, + 0x1E1B00001E1C, + 0x1E1D00001E1E, + 0x1E1F00001E20, + 0x1E2100001E22, + 0x1E2300001E24, + 0x1E2500001E26, + 0x1E2700001E28, + 0x1E2900001E2A, + 0x1E2B00001E2C, + 0x1E2D00001E2E, + 0x1E2F00001E30, + 0x1E3100001E32, + 0x1E3300001E34, + 0x1E3500001E36, + 0x1E3700001E38, + 0x1E3900001E3A, + 0x1E3B00001E3C, + 0x1E3D00001E3E, + 0x1E3F00001E40, + 0x1E4100001E42, + 0x1E4300001E44, + 0x1E4500001E46, + 0x1E4700001E48, + 0x1E4900001E4A, + 0x1E4B00001E4C, + 0x1E4D00001E4E, + 0x1E4F00001E50, + 0x1E5100001E52, + 0x1E5300001E54, + 0x1E5500001E56, + 0x1E5700001E58, + 0x1E5900001E5A, + 0x1E5B00001E5C, + 0x1E5D00001E5E, + 0x1E5F00001E60, + 0x1E6100001E62, + 0x1E6300001E64, + 0x1E6500001E66, + 0x1E6700001E68, + 0x1E6900001E6A, + 0x1E6B00001E6C, + 0x1E6D00001E6E, + 0x1E6F00001E70, + 0x1E7100001E72, + 0x1E7300001E74, + 0x1E7500001E76, + 0x1E7700001E78, + 0x1E7900001E7A, + 0x1E7B00001E7C, + 0x1E7D00001E7E, + 0x1E7F00001E80, + 0x1E8100001E82, + 0x1E8300001E84, + 0x1E8500001E86, + 0x1E8700001E88, + 0x1E8900001E8A, + 0x1E8B00001E8C, + 0x1E8D00001E8E, + 0x1E8F00001E90, + 0x1E9100001E92, + 0x1E9300001E94, + 0x1E9500001E9A, + 0x1E9C00001E9E, + 0x1E9F00001EA0, + 0x1EA100001EA2, + 0x1EA300001EA4, + 0x1EA500001EA6, + 0x1EA700001EA8, + 0x1EA900001EAA, + 0x1EAB00001EAC, + 0x1EAD00001EAE, + 0x1EAF00001EB0, + 0x1EB100001EB2, + 0x1EB300001EB4, + 0x1EB500001EB6, + 0x1EB700001EB8, + 0x1EB900001EBA, + 0x1EBB00001EBC, + 0x1EBD00001EBE, + 0x1EBF00001EC0, + 0x1EC100001EC2, + 0x1EC300001EC4, + 0x1EC500001EC6, + 0x1EC700001EC8, + 0x1EC900001ECA, + 0x1ECB00001ECC, + 0x1ECD00001ECE, + 0x1ECF00001ED0, + 0x1ED100001ED2, + 0x1ED300001ED4, + 0x1ED500001ED6, + 0x1ED700001ED8, + 0x1ED900001EDA, + 0x1EDB00001EDC, + 0x1EDD00001EDE, + 0x1EDF00001EE0, + 0x1EE100001EE2, + 0x1EE300001EE4, + 0x1EE500001EE6, + 0x1EE700001EE8, + 0x1EE900001EEA, + 0x1EEB00001EEC, + 0x1EED00001EEE, + 0x1EEF00001EF0, + 0x1EF100001EF2, + 0x1EF300001EF4, + 0x1EF500001EF6, + 0x1EF700001EF8, + 0x1EF900001EFA, + 0x1EFB00001EFC, + 0x1EFD00001EFE, + 0x1EFF00001F08, + 0x1F1000001F16, + 0x1F2000001F28, + 0x1F3000001F38, + 0x1F4000001F46, + 0x1F5000001F58, + 0x1F6000001F68, + 0x1F7000001F71, + 0x1F7200001F73, + 0x1F7400001F75, + 0x1F7600001F77, + 0x1F7800001F79, + 0x1F7A00001F7B, + 0x1F7C00001F7D, + 0x1FB000001FB2, + 0x1FB600001FB7, + 0x1FC600001FC7, + 0x1FD000001FD3, + 0x1FD600001FD8, + 0x1FE000001FE3, + 0x1FE400001FE8, + 0x1FF600001FF7, + 0x214E0000214F, + 0x218400002185, + 0x2C3000002C60, + 0x2C6100002C62, + 0x2C6500002C67, + 0x2C6800002C69, + 0x2C6A00002C6B, + 0x2C6C00002C6D, + 0x2C7100002C72, + 0x2C7300002C75, + 0x2C7600002C7C, + 0x2C8100002C82, + 0x2C8300002C84, + 0x2C8500002C86, + 0x2C8700002C88, + 0x2C8900002C8A, + 0x2C8B00002C8C, + 0x2C8D00002C8E, + 0x2C8F00002C90, + 0x2C9100002C92, + 0x2C9300002C94, + 0x2C9500002C96, + 0x2C9700002C98, + 0x2C9900002C9A, + 0x2C9B00002C9C, + 0x2C9D00002C9E, + 0x2C9F00002CA0, + 0x2CA100002CA2, + 0x2CA300002CA4, + 0x2CA500002CA6, + 0x2CA700002CA8, + 0x2CA900002CAA, + 0x2CAB00002CAC, + 0x2CAD00002CAE, + 0x2CAF00002CB0, + 0x2CB100002CB2, + 0x2CB300002CB4, + 0x2CB500002CB6, + 0x2CB700002CB8, + 0x2CB900002CBA, + 0x2CBB00002CBC, + 0x2CBD00002CBE, + 0x2CBF00002CC0, + 0x2CC100002CC2, + 0x2CC300002CC4, + 0x2CC500002CC6, + 0x2CC700002CC8, + 0x2CC900002CCA, + 0x2CCB00002CCC, + 0x2CCD00002CCE, + 0x2CCF00002CD0, + 0x2CD100002CD2, + 0x2CD300002CD4, + 0x2CD500002CD6, + 0x2CD700002CD8, + 0x2CD900002CDA, + 0x2CDB00002CDC, + 0x2CDD00002CDE, + 0x2CDF00002CE0, + 0x2CE100002CE2, + 0x2CE300002CE5, + 0x2CEC00002CED, + 0x2CEE00002CF2, + 0x2CF300002CF4, + 0x2D0000002D26, + 0x2D2700002D28, + 0x2D2D00002D2E, + 0x2D3000002D68, + 0x2D7F00002D97, + 0x2DA000002DA7, + 0x2DA800002DAF, + 0x2DB000002DB7, + 0x2DB800002DBF, + 0x2DC000002DC7, + 0x2DC800002DCF, + 0x2DD000002DD7, + 0x2DD800002DDF, + 0x2DE000002E00, + 0x2E2F00002E30, + 0x300500003008, + 0x302A0000302E, + 0x303C0000303D, + 0x304100003097, + 0x30990000309B, + 0x309D0000309F, + 0x30A1000030FB, + 0x30FC000030FF, + 0x310500003130, + 0x31A0000031C0, + 0x31F000003200, + 0x340000004DC0, + 0x4E000000A48D, + 0xA4D00000A4FE, + 0xA5000000A60D, + 0xA6100000A62C, + 0xA6410000A642, + 0xA6430000A644, + 0xA6450000A646, + 0xA6470000A648, + 0xA6490000A64A, + 0xA64B0000A64C, + 0xA64D0000A64E, + 0xA64F0000A650, + 0xA6510000A652, + 0xA6530000A654, + 0xA6550000A656, + 0xA6570000A658, + 0xA6590000A65A, + 0xA65B0000A65C, + 0xA65D0000A65E, + 0xA65F0000A660, + 0xA6610000A662, + 0xA6630000A664, + 0xA6650000A666, + 0xA6670000A668, + 0xA6690000A66A, + 0xA66B0000A66C, + 0xA66D0000A670, + 0xA6740000A67E, + 0xA67F0000A680, + 0xA6810000A682, + 0xA6830000A684, + 0xA6850000A686, + 0xA6870000A688, + 0xA6890000A68A, + 0xA68B0000A68C, + 0xA68D0000A68E, + 0xA68F0000A690, + 0xA6910000A692, + 0xA6930000A694, + 0xA6950000A696, + 0xA6970000A698, + 0xA6990000A69A, + 0xA69B0000A69C, + 0xA69E0000A6E6, + 0xA6F00000A6F2, + 0xA7170000A720, + 0xA7230000A724, + 0xA7250000A726, + 0xA7270000A728, + 0xA7290000A72A, + 0xA72B0000A72C, + 0xA72D0000A72E, + 0xA72F0000A732, + 0xA7330000A734, + 0xA7350000A736, + 0xA7370000A738, + 0xA7390000A73A, + 0xA73B0000A73C, + 0xA73D0000A73E, + 0xA73F0000A740, + 0xA7410000A742, + 0xA7430000A744, + 0xA7450000A746, + 0xA7470000A748, + 0xA7490000A74A, + 0xA74B0000A74C, + 0xA74D0000A74E, + 0xA74F0000A750, + 0xA7510000A752, + 0xA7530000A754, + 0xA7550000A756, + 0xA7570000A758, + 0xA7590000A75A, + 0xA75B0000A75C, + 0xA75D0000A75E, + 0xA75F0000A760, + 0xA7610000A762, + 0xA7630000A764, + 0xA7650000A766, + 0xA7670000A768, + 0xA7690000A76A, + 0xA76B0000A76C, + 0xA76D0000A76E, + 0xA76F0000A770, + 0xA7710000A779, + 0xA77A0000A77B, + 0xA77C0000A77D, + 0xA77F0000A780, + 0xA7810000A782, + 0xA7830000A784, + 0xA7850000A786, + 0xA7870000A789, + 0xA78C0000A78D, + 0xA78E0000A790, + 0xA7910000A792, + 0xA7930000A796, + 0xA7970000A798, + 0xA7990000A79A, + 0xA79B0000A79C, + 0xA79D0000A79E, + 0xA79F0000A7A0, + 0xA7A10000A7A2, + 0xA7A30000A7A4, + 0xA7A50000A7A6, + 0xA7A70000A7A8, + 0xA7A90000A7AA, + 0xA7AF0000A7B0, + 0xA7B50000A7B6, + 0xA7B70000A7B8, + 0xA7B90000A7BA, + 0xA7BB0000A7BC, + 0xA7BD0000A7BE, + 0xA7BF0000A7C0, + 0xA7C10000A7C2, + 0xA7C30000A7C4, + 0xA7C80000A7C9, + 0xA7CA0000A7CB, + 0xA7CD0000A7CE, + 0xA7D10000A7D2, + 0xA7D30000A7D4, + 0xA7D50000A7D6, + 0xA7D70000A7D8, + 0xA7D90000A7DA, + 0xA7DB0000A7DC, + 0xA7F60000A7F8, + 0xA7FA0000A828, + 0xA82C0000A82D, + 0xA8400000A874, + 0xA8800000A8C6, + 0xA8D00000A8DA, + 0xA8E00000A8F8, + 0xA8FB0000A8FC, + 0xA8FD0000A92E, + 0xA9300000A954, + 0xA9800000A9C1, + 0xA9CF0000A9DA, + 0xA9E00000A9FF, + 0xAA000000AA37, + 0xAA400000AA4E, + 0xAA500000AA5A, + 0xAA600000AA77, + 0xAA7A0000AAC3, + 0xAADB0000AADE, + 0xAAE00000AAF0, + 0xAAF20000AAF7, + 0xAB010000AB07, + 0xAB090000AB0F, + 0xAB110000AB17, + 0xAB200000AB27, + 0xAB280000AB2F, + 0xAB300000AB5B, + 0xAB600000AB69, + 0xABC00000ABEB, + 0xABEC0000ABEE, + 0xABF00000ABFA, + 0xAC000000D7A4, + 0xFA0E0000FA10, + 0xFA110000FA12, + 0xFA130000FA15, + 0xFA1F0000FA20, + 0xFA210000FA22, + 0xFA230000FA25, + 0xFA270000FA2A, + 0xFB1E0000FB1F, + 0xFE200000FE30, + 0xFE730000FE74, + 0x100000001000C, + 0x1000D00010027, + 0x100280001003B, + 0x1003C0001003E, + 0x1003F0001004E, + 0x100500001005E, + 0x10080000100FB, + 0x101FD000101FE, + 0x102800001029D, + 0x102A0000102D1, + 0x102E0000102E1, + 0x1030000010320, + 0x1032D00010341, + 0x103420001034A, + 0x103500001037B, + 0x103800001039E, + 0x103A0000103C4, + 0x103C8000103D0, + 0x104280001049E, + 0x104A0000104AA, + 0x104D8000104FC, + 0x1050000010528, + 0x1053000010564, + 0x10597000105A2, + 0x105A3000105B2, + 0x105B3000105BA, + 0x105BB000105BD, + 0x105C0000105F4, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010781, + 0x1080000010806, + 0x1080800010809, + 0x1080A00010836, + 0x1083700010839, + 0x1083C0001083D, + 0x1083F00010856, + 0x1086000010877, + 0x108800001089F, + 0x108E0000108F3, + 0x108F4000108F6, + 0x1090000010916, + 0x109200001093A, + 0x10980000109B8, + 0x109BE000109C0, + 0x10A0000010A04, + 0x10A0500010A07, + 0x10A0C00010A14, + 0x10A1500010A18, + 0x10A1900010A36, + 0x10A3800010A3B, + 0x10A3F00010A40, + 0x10A6000010A7D, + 0x10A8000010A9D, + 0x10AC000010AC8, + 0x10AC900010AE7, + 0x10B0000010B36, + 0x10B4000010B56, + 0x10B6000010B73, + 0x10B8000010B92, + 0x10C0000010C49, + 0x10CC000010CF3, + 0x10D0000010D28, + 0x10D3000010D3A, + 0x10D4000010D50, + 0x10D6900010D6E, + 0x10D6F00010D86, + 0x10E8000010EAA, + 0x10EAB00010EAD, + 0x10EB000010EB2, + 0x10EC200010EC5, + 0x10EFC00010F1D, + 0x10F2700010F28, + 0x10F3000010F51, + 0x10F7000010F86, + 0x10FB000010FC5, + 0x10FE000010FF7, + 0x1100000011047, + 0x1106600011076, + 0x1107F000110BB, + 0x110C2000110C3, + 0x110D0000110E9, + 0x110F0000110FA, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111C5, + 0x111C9000111CD, + 0x111CE000111DB, + 0x111DC000111DD, + 0x1120000011212, + 0x1121300011238, + 0x1123E00011242, + 0x1128000011287, + 0x1128800011289, + 0x1128A0001128E, + 0x1128F0001129E, + 0x1129F000112A9, + 0x112B0000112EB, + 0x112F0000112FA, + 0x1130000011304, + 0x113050001130D, + 0x1130F00011311, + 0x1131300011329, + 0x1132A00011331, + 0x1133200011334, + 0x113350001133A, + 0x1133B00011345, + 0x1134700011349, + 0x1134B0001134E, + 0x1135000011351, + 0x1135700011358, + 0x1135D00011364, + 0x113660001136D, + 0x1137000011375, + 0x113800001138A, + 0x1138B0001138C, + 0x1138E0001138F, + 0x11390000113B6, + 0x113B7000113C1, + 0x113C2000113C3, + 0x113C5000113C6, + 0x113C7000113CB, + 0x113CC000113D4, + 0x113E1000113E3, + 0x114000001144B, + 0x114500001145A, + 0x1145E00011462, + 0x11480000114C6, + 0x114C7000114C8, + 0x114D0000114DA, + 0x11580000115B6, + 0x115B8000115C1, + 0x115D8000115DE, + 0x1160000011641, + 0x1164400011645, + 0x116500001165A, + 0x11680000116B9, + 0x116C0000116CA, + 0x116D0000116E4, + 0x117000001171B, + 0x1171D0001172C, + 0x117300001173A, + 0x1174000011747, + 0x118000001183B, + 0x118C0000118EA, + 0x118FF00011907, + 0x119090001190A, + 0x1190C00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193B00011944, + 0x119500001195A, + 0x119A0000119A8, + 0x119AA000119D8, + 0x119DA000119E2, + 0x119E3000119E5, + 0x11A0000011A3F, + 0x11A4700011A48, + 0x11A5000011A9A, + 0x11A9D00011A9E, + 0x11AB000011AF9, + 0x11BC000011BE1, + 0x11BF000011BFA, + 0x11C0000011C09, + 0x11C0A00011C37, + 0x11C3800011C41, + 0x11C5000011C5A, + 0x11C7200011C90, + 0x11C9200011CA8, + 0x11CA900011CB7, + 0x11D0000011D07, + 0x11D0800011D0A, + 0x11D0B00011D37, + 0x11D3A00011D3B, + 0x11D3C00011D3E, + 0x11D3F00011D48, + 0x11D5000011D5A, + 0x11D6000011D66, + 0x11D6700011D69, + 0x11D6A00011D8F, + 0x11D9000011D92, + 0x11D9300011D99, + 0x11DA000011DAA, + 0x11EE000011EF7, + 0x11F0000011F11, + 0x11F1200011F3B, + 0x11F3E00011F43, + 0x11F5000011F5B, + 0x11FB000011FB1, + 0x120000001239A, + 0x1248000012544, + 0x12F9000012FF1, + 0x1300000013430, + 0x1344000013456, + 0x13460000143FB, + 0x1440000014647, + 0x161000001613A, + 0x1680000016A39, + 0x16A4000016A5F, + 0x16A6000016A6A, + 0x16A7000016ABF, + 0x16AC000016ACA, + 0x16AD000016AEE, + 0x16AF000016AF5, + 0x16B0000016B37, + 0x16B4000016B44, + 0x16B5000016B5A, + 0x16B6300016B78, + 0x16B7D00016B90, + 0x16D4000016D6D, + 0x16D7000016D7A, + 0x16E6000016E80, + 0x16F0000016F4B, + 0x16F4F00016F88, + 0x16F8F00016FA0, + 0x16FE000016FE2, + 0x16FE300016FE5, + 0x16FF000016FF2, + 0x17000000187F8, + 0x1880000018CD6, + 0x18CFF00018D09, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B123, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1B1550001B156, + 0x1B1640001B168, + 0x1B1700001B2FC, + 0x1BC000001BC6B, + 0x1BC700001BC7D, + 0x1BC800001BC89, + 0x1BC900001BC9A, + 0x1BC9D0001BC9F, + 0x1CCF00001CCFA, + 0x1CF000001CF2E, + 0x1CF300001CF47, + 0x1DA000001DA37, + 0x1DA3B0001DA6D, + 0x1DA750001DA76, + 0x1DA840001DA85, + 0x1DA9B0001DAA0, + 0x1DAA10001DAB0, + 0x1DF000001DF1F, + 0x1DF250001DF2B, + 0x1E0000001E007, + 0x1E0080001E019, + 0x1E01B0001E022, + 0x1E0230001E025, + 0x1E0260001E02B, + 0x1E08F0001E090, + 0x1E1000001E12D, + 0x1E1300001E13E, + 0x1E1400001E14A, + 0x1E14E0001E14F, + 0x1E2900001E2AF, + 0x1E2C00001E2FA, + 0x1E4D00001E4FA, + 0x1E5D00001E5FB, + 0x1E7E00001E7E7, + 0x1E7E80001E7EC, + 0x1E7ED0001E7EF, + 0x1E7F00001E7FF, + 0x1E8000001E8C5, + 0x1E8D00001E8D7, + 0x1E9220001E94C, + 0x1E9500001E95A, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x300000003134B, + 0x31350000323B0, + ), + "CONTEXTJ": (0x200C0000200E,), + "CONTEXTO": ( + 0xB7000000B8, + 0x37500000376, + 0x5F3000005F5, + 0x6600000066A, + 0x6F0000006FA, + 0x30FB000030FC, + ), +} diff --git a/venv/lib/python3.12/site-packages/idna/intranges.py b/venv/lib/python3.12/site-packages/idna/intranges.py new file mode 100644 index 0000000..7bfaa8d --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna/intranges.py @@ -0,0 +1,57 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i + 1 < len(sorted_list): + if sorted_list[i] == sorted_list[i + 1] - 1: + continue + current_range = sorted_list[last_write + 1 : i + 1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos - 1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/venv/lib/python3.12/site-packages/idna/package_data.py b/venv/lib/python3.12/site-packages/idna/package_data.py new file mode 100644 index 0000000..7272c8d --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna/package_data.py @@ -0,0 +1 @@ +__version__ = "3.11" diff --git a/venv/lib/python3.12/site-packages/idna/py.typed b/venv/lib/python3.12/site-packages/idna/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/idna/uts46data.py b/venv/lib/python3.12/site-packages/idna/uts46data.py new file mode 100644 index 0000000..4610b71 --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna/uts46data.py @@ -0,0 +1,8841 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "16.0.0" + + +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, "V"), + (0x1, "V"), + (0x2, "V"), + (0x3, "V"), + (0x4, "V"), + (0x5, "V"), + (0x6, "V"), + (0x7, "V"), + (0x8, "V"), + (0x9, "V"), + (0xA, "V"), + (0xB, "V"), + (0xC, "V"), + (0xD, "V"), + (0xE, "V"), + (0xF, "V"), + (0x10, "V"), + (0x11, "V"), + (0x12, "V"), + (0x13, "V"), + (0x14, "V"), + (0x15, "V"), + (0x16, "V"), + (0x17, "V"), + (0x18, "V"), + (0x19, "V"), + (0x1A, "V"), + (0x1B, "V"), + (0x1C, "V"), + (0x1D, "V"), + (0x1E, "V"), + (0x1F, "V"), + (0x20, "V"), + (0x21, "V"), + (0x22, "V"), + (0x23, "V"), + (0x24, "V"), + (0x25, "V"), + (0x26, "V"), + (0x27, "V"), + (0x28, "V"), + (0x29, "V"), + (0x2A, "V"), + (0x2B, "V"), + (0x2C, "V"), + (0x2D, "V"), + (0x2E, "V"), + (0x2F, "V"), + (0x30, "V"), + (0x31, "V"), + (0x32, "V"), + (0x33, "V"), + (0x34, "V"), + (0x35, "V"), + (0x36, "V"), + (0x37, "V"), + (0x38, "V"), + (0x39, "V"), + (0x3A, "V"), + (0x3B, "V"), + (0x3C, "V"), + (0x3D, "V"), + (0x3E, "V"), + (0x3F, "V"), + (0x40, "V"), + (0x41, "M", "a"), + (0x42, "M", "b"), + (0x43, "M", "c"), + (0x44, "M", "d"), + (0x45, "M", "e"), + (0x46, "M", "f"), + (0x47, "M", "g"), + (0x48, "M", "h"), + (0x49, "M", "i"), + (0x4A, "M", "j"), + (0x4B, "M", "k"), + (0x4C, "M", "l"), + (0x4D, "M", "m"), + (0x4E, "M", "n"), + (0x4F, "M", "o"), + (0x50, "M", "p"), + (0x51, "M", "q"), + (0x52, "M", "r"), + (0x53, "M", "s"), + (0x54, "M", "t"), + (0x55, "M", "u"), + (0x56, "M", "v"), + (0x57, "M", "w"), + (0x58, "M", "x"), + (0x59, "M", "y"), + (0x5A, "M", "z"), + (0x5B, "V"), + (0x5C, "V"), + (0x5D, "V"), + (0x5E, "V"), + (0x5F, "V"), + (0x60, "V"), + (0x61, "V"), + (0x62, "V"), + (0x63, "V"), + ] + + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, "V"), + (0x65, "V"), + (0x66, "V"), + (0x67, "V"), + (0x68, "V"), + (0x69, "V"), + (0x6A, "V"), + (0x6B, "V"), + (0x6C, "V"), + (0x6D, "V"), + (0x6E, "V"), + (0x6F, "V"), + (0x70, "V"), + (0x71, "V"), + (0x72, "V"), + (0x73, "V"), + (0x74, "V"), + (0x75, "V"), + (0x76, "V"), + (0x77, "V"), + (0x78, "V"), + (0x79, "V"), + (0x7A, "V"), + (0x7B, "V"), + (0x7C, "V"), + (0x7D, "V"), + (0x7E, "V"), + (0x7F, "V"), + (0x80, "X"), + (0x81, "X"), + (0x82, "X"), + (0x83, "X"), + (0x84, "X"), + (0x85, "X"), + (0x86, "X"), + (0x87, "X"), + (0x88, "X"), + (0x89, "X"), + (0x8A, "X"), + (0x8B, "X"), + (0x8C, "X"), + (0x8D, "X"), + (0x8E, "X"), + (0x8F, "X"), + (0x90, "X"), + (0x91, "X"), + (0x92, "X"), + (0x93, "X"), + (0x94, "X"), + (0x95, "X"), + (0x96, "X"), + (0x97, "X"), + (0x98, "X"), + (0x99, "X"), + (0x9A, "X"), + (0x9B, "X"), + (0x9C, "X"), + (0x9D, "X"), + (0x9E, "X"), + (0x9F, "X"), + (0xA0, "M", " "), + (0xA1, "V"), + (0xA2, "V"), + (0xA3, "V"), + (0xA4, "V"), + (0xA5, "V"), + (0xA6, "V"), + (0xA7, "V"), + (0xA8, "M", " ̈"), + (0xA9, "V"), + (0xAA, "M", "a"), + (0xAB, "V"), + (0xAC, "V"), + (0xAD, "I"), + (0xAE, "V"), + (0xAF, "M", " ̄"), + (0xB0, "V"), + (0xB1, "V"), + (0xB2, "M", "2"), + (0xB3, "M", "3"), + (0xB4, "M", " ́"), + (0xB5, "M", "μ"), + (0xB6, "V"), + (0xB7, "V"), + (0xB8, "M", " ̧"), + (0xB9, "M", "1"), + (0xBA, "M", "o"), + (0xBB, "V"), + (0xBC, "M", "1⁄4"), + (0xBD, "M", "1⁄2"), + (0xBE, "M", "3⁄4"), + (0xBF, "V"), + (0xC0, "M", "à"), + (0xC1, "M", "á"), + (0xC2, "M", "â"), + (0xC3, "M", "ã"), + (0xC4, "M", "ä"), + (0xC5, "M", "å"), + (0xC6, "M", "æ"), + (0xC7, "M", "ç"), + ] + + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, "M", "è"), + (0xC9, "M", "é"), + (0xCA, "M", "ê"), + (0xCB, "M", "ë"), + (0xCC, "M", "ì"), + (0xCD, "M", "í"), + (0xCE, "M", "î"), + (0xCF, "M", "ï"), + (0xD0, "M", "ð"), + (0xD1, "M", "ñ"), + (0xD2, "M", "ò"), + (0xD3, "M", "ó"), + (0xD4, "M", "ô"), + (0xD5, "M", "õ"), + (0xD6, "M", "ö"), + (0xD7, "V"), + (0xD8, "M", "ø"), + (0xD9, "M", "ù"), + (0xDA, "M", "ú"), + (0xDB, "M", "û"), + (0xDC, "M", "ü"), + (0xDD, "M", "ý"), + (0xDE, "M", "þ"), + (0xDF, "D", "ss"), + (0xE0, "V"), + (0xE1, "V"), + (0xE2, "V"), + (0xE3, "V"), + (0xE4, "V"), + (0xE5, "V"), + (0xE6, "V"), + (0xE7, "V"), + (0xE8, "V"), + (0xE9, "V"), + (0xEA, "V"), + (0xEB, "V"), + (0xEC, "V"), + (0xED, "V"), + (0xEE, "V"), + (0xEF, "V"), + (0xF0, "V"), + (0xF1, "V"), + (0xF2, "V"), + (0xF3, "V"), + (0xF4, "V"), + (0xF5, "V"), + (0xF6, "V"), + (0xF7, "V"), + (0xF8, "V"), + (0xF9, "V"), + (0xFA, "V"), + (0xFB, "V"), + (0xFC, "V"), + (0xFD, "V"), + (0xFE, "V"), + (0xFF, "V"), + (0x100, "M", "ā"), + (0x101, "V"), + (0x102, "M", "ă"), + (0x103, "V"), + (0x104, "M", "ą"), + (0x105, "V"), + (0x106, "M", "ć"), + (0x107, "V"), + (0x108, "M", "ĉ"), + (0x109, "V"), + (0x10A, "M", "ċ"), + (0x10B, "V"), + (0x10C, "M", "č"), + (0x10D, "V"), + (0x10E, "M", "ď"), + (0x10F, "V"), + (0x110, "M", "đ"), + (0x111, "V"), + (0x112, "M", "ē"), + (0x113, "V"), + (0x114, "M", "ĕ"), + (0x115, "V"), + (0x116, "M", "ė"), + (0x117, "V"), + (0x118, "M", "ę"), + (0x119, "V"), + (0x11A, "M", "ě"), + (0x11B, "V"), + (0x11C, "M", "ĝ"), + (0x11D, "V"), + (0x11E, "M", "ğ"), + (0x11F, "V"), + (0x120, "M", "ġ"), + (0x121, "V"), + (0x122, "M", "ģ"), + (0x123, "V"), + (0x124, "M", "ĥ"), + (0x125, "V"), + (0x126, "M", "ħ"), + (0x127, "V"), + (0x128, "M", "ĩ"), + (0x129, "V"), + (0x12A, "M", "ī"), + (0x12B, "V"), + ] + + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, "M", "ĭ"), + (0x12D, "V"), + (0x12E, "M", "į"), + (0x12F, "V"), + (0x130, "M", "i̇"), + (0x131, "V"), + (0x132, "M", "ij"), + (0x134, "M", "ĵ"), + (0x135, "V"), + (0x136, "M", "ķ"), + (0x137, "V"), + (0x139, "M", "ĺ"), + (0x13A, "V"), + (0x13B, "M", "ļ"), + (0x13C, "V"), + (0x13D, "M", "ľ"), + (0x13E, "V"), + (0x13F, "M", "l·"), + (0x141, "M", "ł"), + (0x142, "V"), + (0x143, "M", "ń"), + (0x144, "V"), + (0x145, "M", "ņ"), + (0x146, "V"), + (0x147, "M", "ň"), + (0x148, "V"), + (0x149, "M", "ʼn"), + (0x14A, "M", "ŋ"), + (0x14B, "V"), + (0x14C, "M", "ō"), + (0x14D, "V"), + (0x14E, "M", "ŏ"), + (0x14F, "V"), + (0x150, "M", "ő"), + (0x151, "V"), + (0x152, "M", "œ"), + (0x153, "V"), + (0x154, "M", "ŕ"), + (0x155, "V"), + (0x156, "M", "ŗ"), + (0x157, "V"), + (0x158, "M", "ř"), + (0x159, "V"), + (0x15A, "M", "ś"), + (0x15B, "V"), + (0x15C, "M", "ŝ"), + (0x15D, "V"), + (0x15E, "M", "ş"), + (0x15F, "V"), + (0x160, "M", "š"), + (0x161, "V"), + (0x162, "M", "ţ"), + (0x163, "V"), + (0x164, "M", "ť"), + (0x165, "V"), + (0x166, "M", "ŧ"), + (0x167, "V"), + (0x168, "M", "ũ"), + (0x169, "V"), + (0x16A, "M", "ū"), + (0x16B, "V"), + (0x16C, "M", "ŭ"), + (0x16D, "V"), + (0x16E, "M", "ů"), + (0x16F, "V"), + (0x170, "M", "ű"), + (0x171, "V"), + (0x172, "M", "ų"), + (0x173, "V"), + (0x174, "M", "ŵ"), + (0x175, "V"), + (0x176, "M", "ŷ"), + (0x177, "V"), + (0x178, "M", "ÿ"), + (0x179, "M", "ź"), + (0x17A, "V"), + (0x17B, "M", "ż"), + (0x17C, "V"), + (0x17D, "M", "ž"), + (0x17E, "V"), + (0x17F, "M", "s"), + (0x180, "V"), + (0x181, "M", "ɓ"), + (0x182, "M", "ƃ"), + (0x183, "V"), + (0x184, "M", "ƅ"), + (0x185, "V"), + (0x186, "M", "ɔ"), + (0x187, "M", "ƈ"), + (0x188, "V"), + (0x189, "M", "ɖ"), + (0x18A, "M", "ɗ"), + (0x18B, "M", "ƌ"), + (0x18C, "V"), + (0x18E, "M", "ǝ"), + (0x18F, "M", "ə"), + (0x190, "M", "ɛ"), + (0x191, "M", "ƒ"), + (0x192, "V"), + (0x193, "M", "ɠ"), + ] + + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, "M", "ɣ"), + (0x195, "V"), + (0x196, "M", "ɩ"), + (0x197, "M", "ɨ"), + (0x198, "M", "ƙ"), + (0x199, "V"), + (0x19C, "M", "ɯ"), + (0x19D, "M", "ɲ"), + (0x19E, "V"), + (0x19F, "M", "ɵ"), + (0x1A0, "M", "ơ"), + (0x1A1, "V"), + (0x1A2, "M", "ƣ"), + (0x1A3, "V"), + (0x1A4, "M", "ƥ"), + (0x1A5, "V"), + (0x1A6, "M", "ʀ"), + (0x1A7, "M", "ƨ"), + (0x1A8, "V"), + (0x1A9, "M", "ʃ"), + (0x1AA, "V"), + (0x1AC, "M", "ƭ"), + (0x1AD, "V"), + (0x1AE, "M", "ʈ"), + (0x1AF, "M", "ư"), + (0x1B0, "V"), + (0x1B1, "M", "ʊ"), + (0x1B2, "M", "ʋ"), + (0x1B3, "M", "ƴ"), + (0x1B4, "V"), + (0x1B5, "M", "ƶ"), + (0x1B6, "V"), + (0x1B7, "M", "ʒ"), + (0x1B8, "M", "ƹ"), + (0x1B9, "V"), + (0x1BC, "M", "ƽ"), + (0x1BD, "V"), + (0x1C4, "M", "dž"), + (0x1C7, "M", "lj"), + (0x1CA, "M", "nj"), + (0x1CD, "M", "ǎ"), + (0x1CE, "V"), + (0x1CF, "M", "ǐ"), + (0x1D0, "V"), + (0x1D1, "M", "ǒ"), + (0x1D2, "V"), + (0x1D3, "M", "ǔ"), + (0x1D4, "V"), + (0x1D5, "M", "ǖ"), + (0x1D6, "V"), + (0x1D7, "M", "ǘ"), + (0x1D8, "V"), + (0x1D9, "M", "ǚ"), + (0x1DA, "V"), + (0x1DB, "M", "ǜ"), + (0x1DC, "V"), + (0x1DE, "M", "ǟ"), + (0x1DF, "V"), + (0x1E0, "M", "ǡ"), + (0x1E1, "V"), + (0x1E2, "M", "ǣ"), + (0x1E3, "V"), + (0x1E4, "M", "ǥ"), + (0x1E5, "V"), + (0x1E6, "M", "ǧ"), + (0x1E7, "V"), + (0x1E8, "M", "ǩ"), + (0x1E9, "V"), + (0x1EA, "M", "ǫ"), + (0x1EB, "V"), + (0x1EC, "M", "ǭ"), + (0x1ED, "V"), + (0x1EE, "M", "ǯ"), + (0x1EF, "V"), + (0x1F1, "M", "dz"), + (0x1F4, "M", "ǵ"), + (0x1F5, "V"), + (0x1F6, "M", "ƕ"), + (0x1F7, "M", "ƿ"), + (0x1F8, "M", "ǹ"), + (0x1F9, "V"), + (0x1FA, "M", "ǻ"), + (0x1FB, "V"), + (0x1FC, "M", "ǽ"), + (0x1FD, "V"), + (0x1FE, "M", "ǿ"), + (0x1FF, "V"), + (0x200, "M", "ȁ"), + (0x201, "V"), + (0x202, "M", "ȃ"), + (0x203, "V"), + (0x204, "M", "ȅ"), + (0x205, "V"), + (0x206, "M", "ȇ"), + (0x207, "V"), + (0x208, "M", "ȉ"), + (0x209, "V"), + (0x20A, "M", "ȋ"), + (0x20B, "V"), + (0x20C, "M", "ȍ"), + ] + + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, "V"), + (0x20E, "M", "ȏ"), + (0x20F, "V"), + (0x210, "M", "ȑ"), + (0x211, "V"), + (0x212, "M", "ȓ"), + (0x213, "V"), + (0x214, "M", "ȕ"), + (0x215, "V"), + (0x216, "M", "ȗ"), + (0x217, "V"), + (0x218, "M", "ș"), + (0x219, "V"), + (0x21A, "M", "ț"), + (0x21B, "V"), + (0x21C, "M", "ȝ"), + (0x21D, "V"), + (0x21E, "M", "ȟ"), + (0x21F, "V"), + (0x220, "M", "ƞ"), + (0x221, "V"), + (0x222, "M", "ȣ"), + (0x223, "V"), + (0x224, "M", "ȥ"), + (0x225, "V"), + (0x226, "M", "ȧ"), + (0x227, "V"), + (0x228, "M", "ȩ"), + (0x229, "V"), + (0x22A, "M", "ȫ"), + (0x22B, "V"), + (0x22C, "M", "ȭ"), + (0x22D, "V"), + (0x22E, "M", "ȯ"), + (0x22F, "V"), + (0x230, "M", "ȱ"), + (0x231, "V"), + (0x232, "M", "ȳ"), + (0x233, "V"), + (0x23A, "M", "ⱥ"), + (0x23B, "M", "ȼ"), + (0x23C, "V"), + (0x23D, "M", "ƚ"), + (0x23E, "M", "ⱦ"), + (0x23F, "V"), + (0x241, "M", "ɂ"), + (0x242, "V"), + (0x243, "M", "ƀ"), + (0x244, "M", "ʉ"), + (0x245, "M", "ʌ"), + (0x246, "M", "ɇ"), + (0x247, "V"), + (0x248, "M", "ɉ"), + (0x249, "V"), + (0x24A, "M", "ɋ"), + (0x24B, "V"), + (0x24C, "M", "ɍ"), + (0x24D, "V"), + (0x24E, "M", "ɏ"), + (0x24F, "V"), + (0x2B0, "M", "h"), + (0x2B1, "M", "ɦ"), + (0x2B2, "M", "j"), + (0x2B3, "M", "r"), + (0x2B4, "M", "ɹ"), + (0x2B5, "M", "ɻ"), + (0x2B6, "M", "ʁ"), + (0x2B7, "M", "w"), + (0x2B8, "M", "y"), + (0x2B9, "V"), + (0x2D8, "M", " ̆"), + (0x2D9, "M", " ̇"), + (0x2DA, "M", " ̊"), + (0x2DB, "M", " ̨"), + (0x2DC, "M", " ̃"), + (0x2DD, "M", " ̋"), + (0x2DE, "V"), + (0x2E0, "M", "ɣ"), + (0x2E1, "M", "l"), + (0x2E2, "M", "s"), + (0x2E3, "M", "x"), + (0x2E4, "M", "ʕ"), + (0x2E5, "V"), + (0x340, "M", "̀"), + (0x341, "M", "́"), + (0x342, "V"), + (0x343, "M", "̓"), + (0x344, "M", "̈́"), + (0x345, "M", "ι"), + (0x346, "V"), + (0x34F, "I"), + (0x350, "V"), + (0x370, "M", "ͱ"), + (0x371, "V"), + (0x372, "M", "ͳ"), + (0x373, "V"), + (0x374, "M", "ʹ"), + (0x375, "V"), + (0x376, "M", "ͷ"), + (0x377, "V"), + ] + + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, "X"), + (0x37A, "M", " ι"), + (0x37B, "V"), + (0x37E, "M", ";"), + (0x37F, "M", "ϳ"), + (0x380, "X"), + (0x384, "M", " ́"), + (0x385, "M", " ̈́"), + (0x386, "M", "ά"), + (0x387, "M", "·"), + (0x388, "M", "έ"), + (0x389, "M", "ή"), + (0x38A, "M", "ί"), + (0x38B, "X"), + (0x38C, "M", "ό"), + (0x38D, "X"), + (0x38E, "M", "ύ"), + (0x38F, "M", "ώ"), + (0x390, "V"), + (0x391, "M", "α"), + (0x392, "M", "β"), + (0x393, "M", "γ"), + (0x394, "M", "δ"), + (0x395, "M", "ε"), + (0x396, "M", "ζ"), + (0x397, "M", "η"), + (0x398, "M", "θ"), + (0x399, "M", "ι"), + (0x39A, "M", "κ"), + (0x39B, "M", "λ"), + (0x39C, "M", "μ"), + (0x39D, "M", "ν"), + (0x39E, "M", "ξ"), + (0x39F, "M", "ο"), + (0x3A0, "M", "π"), + (0x3A1, "M", "ρ"), + (0x3A2, "X"), + (0x3A3, "M", "σ"), + (0x3A4, "M", "τ"), + (0x3A5, "M", "υ"), + (0x3A6, "M", "φ"), + (0x3A7, "M", "χ"), + (0x3A8, "M", "ψ"), + (0x3A9, "M", "ω"), + (0x3AA, "M", "ϊ"), + (0x3AB, "M", "ϋ"), + (0x3AC, "V"), + (0x3C2, "D", "σ"), + (0x3C3, "V"), + (0x3CF, "M", "ϗ"), + (0x3D0, "M", "β"), + (0x3D1, "M", "θ"), + (0x3D2, "M", "υ"), + (0x3D3, "M", "ύ"), + (0x3D4, "M", "ϋ"), + (0x3D5, "M", "φ"), + (0x3D6, "M", "π"), + (0x3D7, "V"), + (0x3D8, "M", "ϙ"), + (0x3D9, "V"), + (0x3DA, "M", "ϛ"), + (0x3DB, "V"), + (0x3DC, "M", "ϝ"), + (0x3DD, "V"), + (0x3DE, "M", "ϟ"), + (0x3DF, "V"), + (0x3E0, "M", "ϡ"), + (0x3E1, "V"), + (0x3E2, "M", "ϣ"), + (0x3E3, "V"), + (0x3E4, "M", "ϥ"), + (0x3E5, "V"), + (0x3E6, "M", "ϧ"), + (0x3E7, "V"), + (0x3E8, "M", "ϩ"), + (0x3E9, "V"), + (0x3EA, "M", "ϫ"), + (0x3EB, "V"), + (0x3EC, "M", "ϭ"), + (0x3ED, "V"), + (0x3EE, "M", "ϯ"), + (0x3EF, "V"), + (0x3F0, "M", "κ"), + (0x3F1, "M", "ρ"), + (0x3F2, "M", "σ"), + (0x3F3, "V"), + (0x3F4, "M", "θ"), + (0x3F5, "M", "ε"), + (0x3F6, "V"), + (0x3F7, "M", "ϸ"), + (0x3F8, "V"), + (0x3F9, "M", "σ"), + (0x3FA, "M", "ϻ"), + (0x3FB, "V"), + (0x3FD, "M", "ͻ"), + (0x3FE, "M", "ͼ"), + (0x3FF, "M", "ͽ"), + (0x400, "M", "ѐ"), + (0x401, "M", "ё"), + (0x402, "M", "ђ"), + ] + + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, "M", "ѓ"), + (0x404, "M", "є"), + (0x405, "M", "ѕ"), + (0x406, "M", "і"), + (0x407, "M", "ї"), + (0x408, "M", "ј"), + (0x409, "M", "љ"), + (0x40A, "M", "њ"), + (0x40B, "M", "ћ"), + (0x40C, "M", "ќ"), + (0x40D, "M", "ѝ"), + (0x40E, "M", "ў"), + (0x40F, "M", "џ"), + (0x410, "M", "а"), + (0x411, "M", "б"), + (0x412, "M", "в"), + (0x413, "M", "г"), + (0x414, "M", "д"), + (0x415, "M", "е"), + (0x416, "M", "ж"), + (0x417, "M", "з"), + (0x418, "M", "и"), + (0x419, "M", "й"), + (0x41A, "M", "к"), + (0x41B, "M", "л"), + (0x41C, "M", "м"), + (0x41D, "M", "н"), + (0x41E, "M", "о"), + (0x41F, "M", "п"), + (0x420, "M", "р"), + (0x421, "M", "с"), + (0x422, "M", "т"), + (0x423, "M", "у"), + (0x424, "M", "ф"), + (0x425, "M", "х"), + (0x426, "M", "ц"), + (0x427, "M", "ч"), + (0x428, "M", "ш"), + (0x429, "M", "щ"), + (0x42A, "M", "ъ"), + (0x42B, "M", "ы"), + (0x42C, "M", "ь"), + (0x42D, "M", "э"), + (0x42E, "M", "ю"), + (0x42F, "M", "я"), + (0x430, "V"), + (0x460, "M", "ѡ"), + (0x461, "V"), + (0x462, "M", "ѣ"), + (0x463, "V"), + (0x464, "M", "ѥ"), + (0x465, "V"), + (0x466, "M", "ѧ"), + (0x467, "V"), + (0x468, "M", "ѩ"), + (0x469, "V"), + (0x46A, "M", "ѫ"), + (0x46B, "V"), + (0x46C, "M", "ѭ"), + (0x46D, "V"), + (0x46E, "M", "ѯ"), + (0x46F, "V"), + (0x470, "M", "ѱ"), + (0x471, "V"), + (0x472, "M", "ѳ"), + (0x473, "V"), + (0x474, "M", "ѵ"), + (0x475, "V"), + (0x476, "M", "ѷ"), + (0x477, "V"), + (0x478, "M", "ѹ"), + (0x479, "V"), + (0x47A, "M", "ѻ"), + (0x47B, "V"), + (0x47C, "M", "ѽ"), + (0x47D, "V"), + (0x47E, "M", "ѿ"), + (0x47F, "V"), + (0x480, "M", "ҁ"), + (0x481, "V"), + (0x48A, "M", "ҋ"), + (0x48B, "V"), + (0x48C, "M", "ҍ"), + (0x48D, "V"), + (0x48E, "M", "ҏ"), + (0x48F, "V"), + (0x490, "M", "ґ"), + (0x491, "V"), + (0x492, "M", "ғ"), + (0x493, "V"), + (0x494, "M", "ҕ"), + (0x495, "V"), + (0x496, "M", "җ"), + (0x497, "V"), + (0x498, "M", "ҙ"), + (0x499, "V"), + (0x49A, "M", "қ"), + (0x49B, "V"), + (0x49C, "M", "ҝ"), + (0x49D, "V"), + ] + + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, "M", "ҟ"), + (0x49F, "V"), + (0x4A0, "M", "ҡ"), + (0x4A1, "V"), + (0x4A2, "M", "ң"), + (0x4A3, "V"), + (0x4A4, "M", "ҥ"), + (0x4A5, "V"), + (0x4A6, "M", "ҧ"), + (0x4A7, "V"), + (0x4A8, "M", "ҩ"), + (0x4A9, "V"), + (0x4AA, "M", "ҫ"), + (0x4AB, "V"), + (0x4AC, "M", "ҭ"), + (0x4AD, "V"), + (0x4AE, "M", "ү"), + (0x4AF, "V"), + (0x4B0, "M", "ұ"), + (0x4B1, "V"), + (0x4B2, "M", "ҳ"), + (0x4B3, "V"), + (0x4B4, "M", "ҵ"), + (0x4B5, "V"), + (0x4B6, "M", "ҷ"), + (0x4B7, "V"), + (0x4B8, "M", "ҹ"), + (0x4B9, "V"), + (0x4BA, "M", "һ"), + (0x4BB, "V"), + (0x4BC, "M", "ҽ"), + (0x4BD, "V"), + (0x4BE, "M", "ҿ"), + (0x4BF, "V"), + (0x4C0, "M", "ӏ"), + (0x4C1, "M", "ӂ"), + (0x4C2, "V"), + (0x4C3, "M", "ӄ"), + (0x4C4, "V"), + (0x4C5, "M", "ӆ"), + (0x4C6, "V"), + (0x4C7, "M", "ӈ"), + (0x4C8, "V"), + (0x4C9, "M", "ӊ"), + (0x4CA, "V"), + (0x4CB, "M", "ӌ"), + (0x4CC, "V"), + (0x4CD, "M", "ӎ"), + (0x4CE, "V"), + (0x4D0, "M", "ӑ"), + (0x4D1, "V"), + (0x4D2, "M", "ӓ"), + (0x4D3, "V"), + (0x4D4, "M", "ӕ"), + (0x4D5, "V"), + (0x4D6, "M", "ӗ"), + (0x4D7, "V"), + (0x4D8, "M", "ә"), + (0x4D9, "V"), + (0x4DA, "M", "ӛ"), + (0x4DB, "V"), + (0x4DC, "M", "ӝ"), + (0x4DD, "V"), + (0x4DE, "M", "ӟ"), + (0x4DF, "V"), + (0x4E0, "M", "ӡ"), + (0x4E1, "V"), + (0x4E2, "M", "ӣ"), + (0x4E3, "V"), + (0x4E4, "M", "ӥ"), + (0x4E5, "V"), + (0x4E6, "M", "ӧ"), + (0x4E7, "V"), + (0x4E8, "M", "ө"), + (0x4E9, "V"), + (0x4EA, "M", "ӫ"), + (0x4EB, "V"), + (0x4EC, "M", "ӭ"), + (0x4ED, "V"), + (0x4EE, "M", "ӯ"), + (0x4EF, "V"), + (0x4F0, "M", "ӱ"), + (0x4F1, "V"), + (0x4F2, "M", "ӳ"), + (0x4F3, "V"), + (0x4F4, "M", "ӵ"), + (0x4F5, "V"), + (0x4F6, "M", "ӷ"), + (0x4F7, "V"), + (0x4F8, "M", "ӹ"), + (0x4F9, "V"), + (0x4FA, "M", "ӻ"), + (0x4FB, "V"), + (0x4FC, "M", "ӽ"), + (0x4FD, "V"), + (0x4FE, "M", "ӿ"), + (0x4FF, "V"), + (0x500, "M", "ԁ"), + (0x501, "V"), + (0x502, "M", "ԃ"), + ] + + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, "V"), + (0x504, "M", "ԅ"), + (0x505, "V"), + (0x506, "M", "ԇ"), + (0x507, "V"), + (0x508, "M", "ԉ"), + (0x509, "V"), + (0x50A, "M", "ԋ"), + (0x50B, "V"), + (0x50C, "M", "ԍ"), + (0x50D, "V"), + (0x50E, "M", "ԏ"), + (0x50F, "V"), + (0x510, "M", "ԑ"), + (0x511, "V"), + (0x512, "M", "ԓ"), + (0x513, "V"), + (0x514, "M", "ԕ"), + (0x515, "V"), + (0x516, "M", "ԗ"), + (0x517, "V"), + (0x518, "M", "ԙ"), + (0x519, "V"), + (0x51A, "M", "ԛ"), + (0x51B, "V"), + (0x51C, "M", "ԝ"), + (0x51D, "V"), + (0x51E, "M", "ԟ"), + (0x51F, "V"), + (0x520, "M", "ԡ"), + (0x521, "V"), + (0x522, "M", "ԣ"), + (0x523, "V"), + (0x524, "M", "ԥ"), + (0x525, "V"), + (0x526, "M", "ԧ"), + (0x527, "V"), + (0x528, "M", "ԩ"), + (0x529, "V"), + (0x52A, "M", "ԫ"), + (0x52B, "V"), + (0x52C, "M", "ԭ"), + (0x52D, "V"), + (0x52E, "M", "ԯ"), + (0x52F, "V"), + (0x530, "X"), + (0x531, "M", "ա"), + (0x532, "M", "բ"), + (0x533, "M", "գ"), + (0x534, "M", "դ"), + (0x535, "M", "ե"), + (0x536, "M", "զ"), + (0x537, "M", "է"), + (0x538, "M", "ը"), + (0x539, "M", "թ"), + (0x53A, "M", "ժ"), + (0x53B, "M", "ի"), + (0x53C, "M", "լ"), + (0x53D, "M", "խ"), + (0x53E, "M", "ծ"), + (0x53F, "M", "կ"), + (0x540, "M", "հ"), + (0x541, "M", "ձ"), + (0x542, "M", "ղ"), + (0x543, "M", "ճ"), + (0x544, "M", "մ"), + (0x545, "M", "յ"), + (0x546, "M", "ն"), + (0x547, "M", "շ"), + (0x548, "M", "ո"), + (0x549, "M", "չ"), + (0x54A, "M", "պ"), + (0x54B, "M", "ջ"), + (0x54C, "M", "ռ"), + (0x54D, "M", "ս"), + (0x54E, "M", "վ"), + (0x54F, "M", "տ"), + (0x550, "M", "ր"), + (0x551, "M", "ց"), + (0x552, "M", "ւ"), + (0x553, "M", "փ"), + (0x554, "M", "ք"), + (0x555, "M", "օ"), + (0x556, "M", "ֆ"), + (0x557, "X"), + (0x559, "V"), + (0x587, "M", "եւ"), + (0x588, "V"), + (0x58B, "X"), + (0x58D, "V"), + (0x590, "X"), + (0x591, "V"), + (0x5C8, "X"), + (0x5D0, "V"), + (0x5EB, "X"), + (0x5EF, "V"), + (0x5F5, "X"), + (0x606, "V"), + (0x61C, "X"), + (0x61D, "V"), + ] + + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, "M", "اٴ"), + (0x676, "M", "وٴ"), + (0x677, "M", "ۇٴ"), + (0x678, "M", "يٴ"), + (0x679, "V"), + (0x6DD, "X"), + (0x6DE, "V"), + (0x70E, "X"), + (0x710, "V"), + (0x74B, "X"), + (0x74D, "V"), + (0x7B2, "X"), + (0x7C0, "V"), + (0x7FB, "X"), + (0x7FD, "V"), + (0x82E, "X"), + (0x830, "V"), + (0x83F, "X"), + (0x840, "V"), + (0x85C, "X"), + (0x85E, "V"), + (0x85F, "X"), + (0x860, "V"), + (0x86B, "X"), + (0x870, "V"), + (0x88F, "X"), + (0x897, "V"), + (0x8E2, "X"), + (0x8E3, "V"), + (0x958, "M", "क़"), + (0x959, "M", "ख़"), + (0x95A, "M", "ग़"), + (0x95B, "M", "ज़"), + (0x95C, "M", "ड़"), + (0x95D, "M", "ढ़"), + (0x95E, "M", "फ़"), + (0x95F, "M", "य़"), + (0x960, "V"), + (0x984, "X"), + (0x985, "V"), + (0x98D, "X"), + (0x98F, "V"), + (0x991, "X"), + (0x993, "V"), + (0x9A9, "X"), + (0x9AA, "V"), + (0x9B1, "X"), + (0x9B2, "V"), + (0x9B3, "X"), + (0x9B6, "V"), + (0x9BA, "X"), + (0x9BC, "V"), + (0x9C5, "X"), + (0x9C7, "V"), + (0x9C9, "X"), + (0x9CB, "V"), + (0x9CF, "X"), + (0x9D7, "V"), + (0x9D8, "X"), + (0x9DC, "M", "ড়"), + (0x9DD, "M", "ঢ়"), + (0x9DE, "X"), + (0x9DF, "M", "য়"), + (0x9E0, "V"), + (0x9E4, "X"), + (0x9E6, "V"), + (0x9FF, "X"), + (0xA01, "V"), + (0xA04, "X"), + (0xA05, "V"), + (0xA0B, "X"), + (0xA0F, "V"), + (0xA11, "X"), + (0xA13, "V"), + (0xA29, "X"), + (0xA2A, "V"), + (0xA31, "X"), + (0xA32, "V"), + (0xA33, "M", "ਲ਼"), + (0xA34, "X"), + (0xA35, "V"), + (0xA36, "M", "ਸ਼"), + (0xA37, "X"), + (0xA38, "V"), + (0xA3A, "X"), + (0xA3C, "V"), + (0xA3D, "X"), + (0xA3E, "V"), + (0xA43, "X"), + (0xA47, "V"), + (0xA49, "X"), + (0xA4B, "V"), + (0xA4E, "X"), + (0xA51, "V"), + (0xA52, "X"), + (0xA59, "M", "ਖ਼"), + (0xA5A, "M", "ਗ਼"), + (0xA5B, "M", "ਜ਼"), + (0xA5C, "V"), + (0xA5D, "X"), + ] + + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, "M", "ਫ਼"), + (0xA5F, "X"), + (0xA66, "V"), + (0xA77, "X"), + (0xA81, "V"), + (0xA84, "X"), + (0xA85, "V"), + (0xA8E, "X"), + (0xA8F, "V"), + (0xA92, "X"), + (0xA93, "V"), + (0xAA9, "X"), + (0xAAA, "V"), + (0xAB1, "X"), + (0xAB2, "V"), + (0xAB4, "X"), + (0xAB5, "V"), + (0xABA, "X"), + (0xABC, "V"), + (0xAC6, "X"), + (0xAC7, "V"), + (0xACA, "X"), + (0xACB, "V"), + (0xACE, "X"), + (0xAD0, "V"), + (0xAD1, "X"), + (0xAE0, "V"), + (0xAE4, "X"), + (0xAE6, "V"), + (0xAF2, "X"), + (0xAF9, "V"), + (0xB00, "X"), + (0xB01, "V"), + (0xB04, "X"), + (0xB05, "V"), + (0xB0D, "X"), + (0xB0F, "V"), + (0xB11, "X"), + (0xB13, "V"), + (0xB29, "X"), + (0xB2A, "V"), + (0xB31, "X"), + (0xB32, "V"), + (0xB34, "X"), + (0xB35, "V"), + (0xB3A, "X"), + (0xB3C, "V"), + (0xB45, "X"), + (0xB47, "V"), + (0xB49, "X"), + (0xB4B, "V"), + (0xB4E, "X"), + (0xB55, "V"), + (0xB58, "X"), + (0xB5C, "M", "ଡ଼"), + (0xB5D, "M", "ଢ଼"), + (0xB5E, "X"), + (0xB5F, "V"), + (0xB64, "X"), + (0xB66, "V"), + (0xB78, "X"), + (0xB82, "V"), + (0xB84, "X"), + (0xB85, "V"), + (0xB8B, "X"), + (0xB8E, "V"), + (0xB91, "X"), + (0xB92, "V"), + (0xB96, "X"), + (0xB99, "V"), + (0xB9B, "X"), + (0xB9C, "V"), + (0xB9D, "X"), + (0xB9E, "V"), + (0xBA0, "X"), + (0xBA3, "V"), + (0xBA5, "X"), + (0xBA8, "V"), + (0xBAB, "X"), + (0xBAE, "V"), + (0xBBA, "X"), + (0xBBE, "V"), + (0xBC3, "X"), + (0xBC6, "V"), + (0xBC9, "X"), + (0xBCA, "V"), + (0xBCE, "X"), + (0xBD0, "V"), + (0xBD1, "X"), + (0xBD7, "V"), + (0xBD8, "X"), + (0xBE6, "V"), + (0xBFB, "X"), + (0xC00, "V"), + (0xC0D, "X"), + (0xC0E, "V"), + (0xC11, "X"), + (0xC12, "V"), + (0xC29, "X"), + (0xC2A, "V"), + ] + + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, "X"), + (0xC3C, "V"), + (0xC45, "X"), + (0xC46, "V"), + (0xC49, "X"), + (0xC4A, "V"), + (0xC4E, "X"), + (0xC55, "V"), + (0xC57, "X"), + (0xC58, "V"), + (0xC5B, "X"), + (0xC5D, "V"), + (0xC5E, "X"), + (0xC60, "V"), + (0xC64, "X"), + (0xC66, "V"), + (0xC70, "X"), + (0xC77, "V"), + (0xC8D, "X"), + (0xC8E, "V"), + (0xC91, "X"), + (0xC92, "V"), + (0xCA9, "X"), + (0xCAA, "V"), + (0xCB4, "X"), + (0xCB5, "V"), + (0xCBA, "X"), + (0xCBC, "V"), + (0xCC5, "X"), + (0xCC6, "V"), + (0xCC9, "X"), + (0xCCA, "V"), + (0xCCE, "X"), + (0xCD5, "V"), + (0xCD7, "X"), + (0xCDD, "V"), + (0xCDF, "X"), + (0xCE0, "V"), + (0xCE4, "X"), + (0xCE6, "V"), + (0xCF0, "X"), + (0xCF1, "V"), + (0xCF4, "X"), + (0xD00, "V"), + (0xD0D, "X"), + (0xD0E, "V"), + (0xD11, "X"), + (0xD12, "V"), + (0xD45, "X"), + (0xD46, "V"), + (0xD49, "X"), + (0xD4A, "V"), + (0xD50, "X"), + (0xD54, "V"), + (0xD64, "X"), + (0xD66, "V"), + (0xD80, "X"), + (0xD81, "V"), + (0xD84, "X"), + (0xD85, "V"), + (0xD97, "X"), + (0xD9A, "V"), + (0xDB2, "X"), + (0xDB3, "V"), + (0xDBC, "X"), + (0xDBD, "V"), + (0xDBE, "X"), + (0xDC0, "V"), + (0xDC7, "X"), + (0xDCA, "V"), + (0xDCB, "X"), + (0xDCF, "V"), + (0xDD5, "X"), + (0xDD6, "V"), + (0xDD7, "X"), + (0xDD8, "V"), + (0xDE0, "X"), + (0xDE6, "V"), + (0xDF0, "X"), + (0xDF2, "V"), + (0xDF5, "X"), + (0xE01, "V"), + (0xE33, "M", "ํา"), + (0xE34, "V"), + (0xE3B, "X"), + (0xE3F, "V"), + (0xE5C, "X"), + (0xE81, "V"), + (0xE83, "X"), + (0xE84, "V"), + (0xE85, "X"), + (0xE86, "V"), + (0xE8B, "X"), + (0xE8C, "V"), + (0xEA4, "X"), + (0xEA5, "V"), + (0xEA6, "X"), + (0xEA7, "V"), + (0xEB3, "M", "ໍາ"), + (0xEB4, "V"), + ] + + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, "X"), + (0xEC0, "V"), + (0xEC5, "X"), + (0xEC6, "V"), + (0xEC7, "X"), + (0xEC8, "V"), + (0xECF, "X"), + (0xED0, "V"), + (0xEDA, "X"), + (0xEDC, "M", "ຫນ"), + (0xEDD, "M", "ຫມ"), + (0xEDE, "V"), + (0xEE0, "X"), + (0xF00, "V"), + (0xF0C, "M", "་"), + (0xF0D, "V"), + (0xF43, "M", "གྷ"), + (0xF44, "V"), + (0xF48, "X"), + (0xF49, "V"), + (0xF4D, "M", "ཌྷ"), + (0xF4E, "V"), + (0xF52, "M", "དྷ"), + (0xF53, "V"), + (0xF57, "M", "བྷ"), + (0xF58, "V"), + (0xF5C, "M", "ཛྷ"), + (0xF5D, "V"), + (0xF69, "M", "ཀྵ"), + (0xF6A, "V"), + (0xF6D, "X"), + (0xF71, "V"), + (0xF73, "M", "ཱི"), + (0xF74, "V"), + (0xF75, "M", "ཱུ"), + (0xF76, "M", "ྲྀ"), + (0xF77, "M", "ྲཱྀ"), + (0xF78, "M", "ླྀ"), + (0xF79, "M", "ླཱྀ"), + (0xF7A, "V"), + (0xF81, "M", "ཱྀ"), + (0xF82, "V"), + (0xF93, "M", "ྒྷ"), + (0xF94, "V"), + (0xF98, "X"), + (0xF99, "V"), + (0xF9D, "M", "ྜྷ"), + (0xF9E, "V"), + (0xFA2, "M", "ྡྷ"), + (0xFA3, "V"), + (0xFA7, "M", "ྦྷ"), + (0xFA8, "V"), + (0xFAC, "M", "ྫྷ"), + (0xFAD, "V"), + (0xFB9, "M", "ྐྵ"), + (0xFBA, "V"), + (0xFBD, "X"), + (0xFBE, "V"), + (0xFCD, "X"), + (0xFCE, "V"), + (0xFDB, "X"), + (0x1000, "V"), + (0x10A0, "M", "ⴀ"), + (0x10A1, "M", "ⴁ"), + (0x10A2, "M", "ⴂ"), + (0x10A3, "M", "ⴃ"), + (0x10A4, "M", "ⴄ"), + (0x10A5, "M", "ⴅ"), + (0x10A6, "M", "ⴆ"), + (0x10A7, "M", "ⴇ"), + (0x10A8, "M", "ⴈ"), + (0x10A9, "M", "ⴉ"), + (0x10AA, "M", "ⴊ"), + (0x10AB, "M", "ⴋ"), + (0x10AC, "M", "ⴌ"), + (0x10AD, "M", "ⴍ"), + (0x10AE, "M", "ⴎ"), + (0x10AF, "M", "ⴏ"), + (0x10B0, "M", "ⴐ"), + (0x10B1, "M", "ⴑ"), + (0x10B2, "M", "ⴒ"), + (0x10B3, "M", "ⴓ"), + (0x10B4, "M", "ⴔ"), + (0x10B5, "M", "ⴕ"), + (0x10B6, "M", "ⴖ"), + (0x10B7, "M", "ⴗ"), + (0x10B8, "M", "ⴘ"), + (0x10B9, "M", "ⴙ"), + (0x10BA, "M", "ⴚ"), + (0x10BB, "M", "ⴛ"), + (0x10BC, "M", "ⴜ"), + (0x10BD, "M", "ⴝ"), + (0x10BE, "M", "ⴞ"), + (0x10BF, "M", "ⴟ"), + (0x10C0, "M", "ⴠ"), + (0x10C1, "M", "ⴡ"), + (0x10C2, "M", "ⴢ"), + (0x10C3, "M", "ⴣ"), + (0x10C4, "M", "ⴤ"), + (0x10C5, "M", "ⴥ"), + ] + + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10C6, "X"), + (0x10C7, "M", "ⴧ"), + (0x10C8, "X"), + (0x10CD, "M", "ⴭ"), + (0x10CE, "X"), + (0x10D0, "V"), + (0x10FC, "M", "ნ"), + (0x10FD, "V"), + (0x115F, "I"), + (0x1161, "V"), + (0x1249, "X"), + (0x124A, "V"), + (0x124E, "X"), + (0x1250, "V"), + (0x1257, "X"), + (0x1258, "V"), + (0x1259, "X"), + (0x125A, "V"), + (0x125E, "X"), + (0x1260, "V"), + (0x1289, "X"), + (0x128A, "V"), + (0x128E, "X"), + (0x1290, "V"), + (0x12B1, "X"), + (0x12B2, "V"), + (0x12B6, "X"), + (0x12B8, "V"), + (0x12BF, "X"), + (0x12C0, "V"), + (0x12C1, "X"), + (0x12C2, "V"), + (0x12C6, "X"), + (0x12C8, "V"), + (0x12D7, "X"), + (0x12D8, "V"), + (0x1311, "X"), + (0x1312, "V"), + (0x1316, "X"), + (0x1318, "V"), + (0x135B, "X"), + (0x135D, "V"), + (0x137D, "X"), + (0x1380, "V"), + (0x139A, "X"), + (0x13A0, "V"), + (0x13F6, "X"), + (0x13F8, "M", "Ᏸ"), + (0x13F9, "M", "Ᏹ"), + (0x13FA, "M", "Ᏺ"), + (0x13FB, "M", "Ᏻ"), + (0x13FC, "M", "Ᏼ"), + (0x13FD, "M", "Ᏽ"), + (0x13FE, "X"), + (0x1400, "V"), + (0x1680, "X"), + (0x1681, "V"), + (0x169D, "X"), + (0x16A0, "V"), + (0x16F9, "X"), + (0x1700, "V"), + (0x1716, "X"), + (0x171F, "V"), + (0x1737, "X"), + (0x1740, "V"), + (0x1754, "X"), + (0x1760, "V"), + (0x176D, "X"), + (0x176E, "V"), + (0x1771, "X"), + (0x1772, "V"), + (0x1774, "X"), + (0x1780, "V"), + (0x17B4, "I"), + (0x17B6, "V"), + (0x17DE, "X"), + (0x17E0, "V"), + (0x17EA, "X"), + (0x17F0, "V"), + (0x17FA, "X"), + (0x1800, "V"), + (0x180B, "I"), + (0x1810, "V"), + (0x181A, "X"), + (0x1820, "V"), + (0x1879, "X"), + (0x1880, "V"), + (0x18AB, "X"), + (0x18B0, "V"), + (0x18F6, "X"), + (0x1900, "V"), + (0x191F, "X"), + (0x1920, "V"), + (0x192C, "X"), + (0x1930, "V"), + (0x193C, "X"), + (0x1940, "V"), + (0x1941, "X"), + (0x1944, "V"), + (0x196E, "X"), + ] + + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1970, "V"), + (0x1975, "X"), + (0x1980, "V"), + (0x19AC, "X"), + (0x19B0, "V"), + (0x19CA, "X"), + (0x19D0, "V"), + (0x19DB, "X"), + (0x19DE, "V"), + (0x1A1C, "X"), + (0x1A1E, "V"), + (0x1A5F, "X"), + (0x1A60, "V"), + (0x1A7D, "X"), + (0x1A7F, "V"), + (0x1A8A, "X"), + (0x1A90, "V"), + (0x1A9A, "X"), + (0x1AA0, "V"), + (0x1AAE, "X"), + (0x1AB0, "V"), + (0x1ACF, "X"), + (0x1B00, "V"), + (0x1B4D, "X"), + (0x1B4E, "V"), + (0x1BF4, "X"), + (0x1BFC, "V"), + (0x1C38, "X"), + (0x1C3B, "V"), + (0x1C4A, "X"), + (0x1C4D, "V"), + (0x1C80, "M", "в"), + (0x1C81, "M", "д"), + (0x1C82, "M", "о"), + (0x1C83, "M", "с"), + (0x1C84, "M", "т"), + (0x1C86, "M", "ъ"), + (0x1C87, "M", "ѣ"), + (0x1C88, "M", "ꙋ"), + (0x1C89, "M", "ᲊ"), + (0x1C8A, "V"), + (0x1C8B, "X"), + (0x1C90, "M", "ა"), + (0x1C91, "M", "ბ"), + (0x1C92, "M", "გ"), + (0x1C93, "M", "დ"), + (0x1C94, "M", "ე"), + (0x1C95, "M", "ვ"), + (0x1C96, "M", "ზ"), + (0x1C97, "M", "თ"), + (0x1C98, "M", "ი"), + (0x1C99, "M", "კ"), + (0x1C9A, "M", "ლ"), + (0x1C9B, "M", "მ"), + (0x1C9C, "M", "ნ"), + (0x1C9D, "M", "ო"), + (0x1C9E, "M", "პ"), + (0x1C9F, "M", "ჟ"), + (0x1CA0, "M", "რ"), + (0x1CA1, "M", "ს"), + (0x1CA2, "M", "ტ"), + (0x1CA3, "M", "უ"), + (0x1CA4, "M", "ფ"), + (0x1CA5, "M", "ქ"), + (0x1CA6, "M", "ღ"), + (0x1CA7, "M", "ყ"), + (0x1CA8, "M", "შ"), + (0x1CA9, "M", "ჩ"), + (0x1CAA, "M", "ც"), + (0x1CAB, "M", "ძ"), + (0x1CAC, "M", "წ"), + (0x1CAD, "M", "ჭ"), + (0x1CAE, "M", "ხ"), + (0x1CAF, "M", "ჯ"), + (0x1CB0, "M", "ჰ"), + (0x1CB1, "M", "ჱ"), + (0x1CB2, "M", "ჲ"), + (0x1CB3, "M", "ჳ"), + (0x1CB4, "M", "ჴ"), + (0x1CB5, "M", "ჵ"), + (0x1CB6, "M", "ჶ"), + (0x1CB7, "M", "ჷ"), + (0x1CB8, "M", "ჸ"), + (0x1CB9, "M", "ჹ"), + (0x1CBA, "M", "ჺ"), + (0x1CBB, "X"), + (0x1CBD, "M", "ჽ"), + (0x1CBE, "M", "ჾ"), + (0x1CBF, "M", "ჿ"), + (0x1CC0, "V"), + (0x1CC8, "X"), + (0x1CD0, "V"), + (0x1CFB, "X"), + (0x1D00, "V"), + (0x1D2C, "M", "a"), + (0x1D2D, "M", "æ"), + (0x1D2E, "M", "b"), + (0x1D2F, "V"), + (0x1D30, "M", "d"), + (0x1D31, "M", "e"), + ] + + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D32, "M", "ǝ"), + (0x1D33, "M", "g"), + (0x1D34, "M", "h"), + (0x1D35, "M", "i"), + (0x1D36, "M", "j"), + (0x1D37, "M", "k"), + (0x1D38, "M", "l"), + (0x1D39, "M", "m"), + (0x1D3A, "M", "n"), + (0x1D3B, "V"), + (0x1D3C, "M", "o"), + (0x1D3D, "M", "ȣ"), + (0x1D3E, "M", "p"), + (0x1D3F, "M", "r"), + (0x1D40, "M", "t"), + (0x1D41, "M", "u"), + (0x1D42, "M", "w"), + (0x1D43, "M", "a"), + (0x1D44, "M", "ɐ"), + (0x1D45, "M", "ɑ"), + (0x1D46, "M", "ᴂ"), + (0x1D47, "M", "b"), + (0x1D48, "M", "d"), + (0x1D49, "M", "e"), + (0x1D4A, "M", "ə"), + (0x1D4B, "M", "ɛ"), + (0x1D4C, "M", "ɜ"), + (0x1D4D, "M", "g"), + (0x1D4E, "V"), + (0x1D4F, "M", "k"), + (0x1D50, "M", "m"), + (0x1D51, "M", "ŋ"), + (0x1D52, "M", "o"), + (0x1D53, "M", "ɔ"), + (0x1D54, "M", "ᴖ"), + (0x1D55, "M", "ᴗ"), + (0x1D56, "M", "p"), + (0x1D57, "M", "t"), + (0x1D58, "M", "u"), + (0x1D59, "M", "ᴝ"), + (0x1D5A, "M", "ɯ"), + (0x1D5B, "M", "v"), + (0x1D5C, "M", "ᴥ"), + (0x1D5D, "M", "β"), + (0x1D5E, "M", "γ"), + (0x1D5F, "M", "δ"), + (0x1D60, "M", "φ"), + (0x1D61, "M", "χ"), + (0x1D62, "M", "i"), + (0x1D63, "M", "r"), + (0x1D64, "M", "u"), + (0x1D65, "M", "v"), + (0x1D66, "M", "β"), + (0x1D67, "M", "γ"), + (0x1D68, "M", "ρ"), + (0x1D69, "M", "φ"), + (0x1D6A, "M", "χ"), + (0x1D6B, "V"), + (0x1D78, "M", "н"), + (0x1D79, "V"), + (0x1D9B, "M", "ɒ"), + (0x1D9C, "M", "c"), + (0x1D9D, "M", "ɕ"), + (0x1D9E, "M", "ð"), + (0x1D9F, "M", "ɜ"), + (0x1DA0, "M", "f"), + (0x1DA1, "M", "ɟ"), + (0x1DA2, "M", "ɡ"), + (0x1DA3, "M", "ɥ"), + (0x1DA4, "M", "ɨ"), + (0x1DA5, "M", "ɩ"), + (0x1DA6, "M", "ɪ"), + (0x1DA7, "M", "ᵻ"), + (0x1DA8, "M", "ʝ"), + (0x1DA9, "M", "ɭ"), + (0x1DAA, "M", "ᶅ"), + (0x1DAB, "M", "ʟ"), + (0x1DAC, "M", "ɱ"), + (0x1DAD, "M", "ɰ"), + (0x1DAE, "M", "ɲ"), + (0x1DAF, "M", "ɳ"), + (0x1DB0, "M", "ɴ"), + (0x1DB1, "M", "ɵ"), + (0x1DB2, "M", "ɸ"), + (0x1DB3, "M", "ʂ"), + (0x1DB4, "M", "ʃ"), + (0x1DB5, "M", "ƫ"), + (0x1DB6, "M", "ʉ"), + (0x1DB7, "M", "ʊ"), + (0x1DB8, "M", "ᴜ"), + (0x1DB9, "M", "ʋ"), + (0x1DBA, "M", "ʌ"), + (0x1DBB, "M", "z"), + (0x1DBC, "M", "ʐ"), + (0x1DBD, "M", "ʑ"), + (0x1DBE, "M", "ʒ"), + (0x1DBF, "M", "θ"), + (0x1DC0, "V"), + (0x1E00, "M", "ḁ"), + (0x1E01, "V"), + ] + + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E02, "M", "ḃ"), + (0x1E03, "V"), + (0x1E04, "M", "ḅ"), + (0x1E05, "V"), + (0x1E06, "M", "ḇ"), + (0x1E07, "V"), + (0x1E08, "M", "ḉ"), + (0x1E09, "V"), + (0x1E0A, "M", "ḋ"), + (0x1E0B, "V"), + (0x1E0C, "M", "ḍ"), + (0x1E0D, "V"), + (0x1E0E, "M", "ḏ"), + (0x1E0F, "V"), + (0x1E10, "M", "ḑ"), + (0x1E11, "V"), + (0x1E12, "M", "ḓ"), + (0x1E13, "V"), + (0x1E14, "M", "ḕ"), + (0x1E15, "V"), + (0x1E16, "M", "ḗ"), + (0x1E17, "V"), + (0x1E18, "M", "ḙ"), + (0x1E19, "V"), + (0x1E1A, "M", "ḛ"), + (0x1E1B, "V"), + (0x1E1C, "M", "ḝ"), + (0x1E1D, "V"), + (0x1E1E, "M", "ḟ"), + (0x1E1F, "V"), + (0x1E20, "M", "ḡ"), + (0x1E21, "V"), + (0x1E22, "M", "ḣ"), + (0x1E23, "V"), + (0x1E24, "M", "ḥ"), + (0x1E25, "V"), + (0x1E26, "M", "ḧ"), + (0x1E27, "V"), + (0x1E28, "M", "ḩ"), + (0x1E29, "V"), + (0x1E2A, "M", "ḫ"), + (0x1E2B, "V"), + (0x1E2C, "M", "ḭ"), + (0x1E2D, "V"), + (0x1E2E, "M", "ḯ"), + (0x1E2F, "V"), + (0x1E30, "M", "ḱ"), + (0x1E31, "V"), + (0x1E32, "M", "ḳ"), + (0x1E33, "V"), + (0x1E34, "M", "ḵ"), + (0x1E35, "V"), + (0x1E36, "M", "ḷ"), + (0x1E37, "V"), + (0x1E38, "M", "ḹ"), + (0x1E39, "V"), + (0x1E3A, "M", "ḻ"), + (0x1E3B, "V"), + (0x1E3C, "M", "ḽ"), + (0x1E3D, "V"), + (0x1E3E, "M", "ḿ"), + (0x1E3F, "V"), + (0x1E40, "M", "ṁ"), + (0x1E41, "V"), + (0x1E42, "M", "ṃ"), + (0x1E43, "V"), + (0x1E44, "M", "ṅ"), + (0x1E45, "V"), + (0x1E46, "M", "ṇ"), + (0x1E47, "V"), + (0x1E48, "M", "ṉ"), + (0x1E49, "V"), + (0x1E4A, "M", "ṋ"), + (0x1E4B, "V"), + (0x1E4C, "M", "ṍ"), + (0x1E4D, "V"), + (0x1E4E, "M", "ṏ"), + (0x1E4F, "V"), + (0x1E50, "M", "ṑ"), + (0x1E51, "V"), + (0x1E52, "M", "ṓ"), + (0x1E53, "V"), + (0x1E54, "M", "ṕ"), + (0x1E55, "V"), + (0x1E56, "M", "ṗ"), + (0x1E57, "V"), + (0x1E58, "M", "ṙ"), + (0x1E59, "V"), + (0x1E5A, "M", "ṛ"), + (0x1E5B, "V"), + (0x1E5C, "M", "ṝ"), + (0x1E5D, "V"), + (0x1E5E, "M", "ṟ"), + (0x1E5F, "V"), + (0x1E60, "M", "ṡ"), + (0x1E61, "V"), + (0x1E62, "M", "ṣ"), + (0x1E63, "V"), + (0x1E64, "M", "ṥ"), + (0x1E65, "V"), + ] + + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E66, "M", "ṧ"), + (0x1E67, "V"), + (0x1E68, "M", "ṩ"), + (0x1E69, "V"), + (0x1E6A, "M", "ṫ"), + (0x1E6B, "V"), + (0x1E6C, "M", "ṭ"), + (0x1E6D, "V"), + (0x1E6E, "M", "ṯ"), + (0x1E6F, "V"), + (0x1E70, "M", "ṱ"), + (0x1E71, "V"), + (0x1E72, "M", "ṳ"), + (0x1E73, "V"), + (0x1E74, "M", "ṵ"), + (0x1E75, "V"), + (0x1E76, "M", "ṷ"), + (0x1E77, "V"), + (0x1E78, "M", "ṹ"), + (0x1E79, "V"), + (0x1E7A, "M", "ṻ"), + (0x1E7B, "V"), + (0x1E7C, "M", "ṽ"), + (0x1E7D, "V"), + (0x1E7E, "M", "ṿ"), + (0x1E7F, "V"), + (0x1E80, "M", "ẁ"), + (0x1E81, "V"), + (0x1E82, "M", "ẃ"), + (0x1E83, "V"), + (0x1E84, "M", "ẅ"), + (0x1E85, "V"), + (0x1E86, "M", "ẇ"), + (0x1E87, "V"), + (0x1E88, "M", "ẉ"), + (0x1E89, "V"), + (0x1E8A, "M", "ẋ"), + (0x1E8B, "V"), + (0x1E8C, "M", "ẍ"), + (0x1E8D, "V"), + (0x1E8E, "M", "ẏ"), + (0x1E8F, "V"), + (0x1E90, "M", "ẑ"), + (0x1E91, "V"), + (0x1E92, "M", "ẓ"), + (0x1E93, "V"), + (0x1E94, "M", "ẕ"), + (0x1E95, "V"), + (0x1E9A, "M", "aʾ"), + (0x1E9B, "M", "ṡ"), + (0x1E9C, "V"), + (0x1E9E, "M", "ß"), + (0x1E9F, "V"), + (0x1EA0, "M", "ạ"), + (0x1EA1, "V"), + (0x1EA2, "M", "ả"), + (0x1EA3, "V"), + (0x1EA4, "M", "ấ"), + (0x1EA5, "V"), + (0x1EA6, "M", "ầ"), + (0x1EA7, "V"), + (0x1EA8, "M", "ẩ"), + (0x1EA9, "V"), + (0x1EAA, "M", "ẫ"), + (0x1EAB, "V"), + (0x1EAC, "M", "ậ"), + (0x1EAD, "V"), + (0x1EAE, "M", "ắ"), + (0x1EAF, "V"), + (0x1EB0, "M", "ằ"), + (0x1EB1, "V"), + (0x1EB2, "M", "ẳ"), + (0x1EB3, "V"), + (0x1EB4, "M", "ẵ"), + (0x1EB5, "V"), + (0x1EB6, "M", "ặ"), + (0x1EB7, "V"), + (0x1EB8, "M", "ẹ"), + (0x1EB9, "V"), + (0x1EBA, "M", "ẻ"), + (0x1EBB, "V"), + (0x1EBC, "M", "ẽ"), + (0x1EBD, "V"), + (0x1EBE, "M", "ế"), + (0x1EBF, "V"), + (0x1EC0, "M", "ề"), + (0x1EC1, "V"), + (0x1EC2, "M", "ể"), + (0x1EC3, "V"), + (0x1EC4, "M", "ễ"), + (0x1EC5, "V"), + (0x1EC6, "M", "ệ"), + (0x1EC7, "V"), + (0x1EC8, "M", "ỉ"), + (0x1EC9, "V"), + (0x1ECA, "M", "ị"), + (0x1ECB, "V"), + (0x1ECC, "M", "ọ"), + (0x1ECD, "V"), + (0x1ECE, "M", "ỏ"), + ] + + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1ECF, "V"), + (0x1ED0, "M", "ố"), + (0x1ED1, "V"), + (0x1ED2, "M", "ồ"), + (0x1ED3, "V"), + (0x1ED4, "M", "ổ"), + (0x1ED5, "V"), + (0x1ED6, "M", "ỗ"), + (0x1ED7, "V"), + (0x1ED8, "M", "ộ"), + (0x1ED9, "V"), + (0x1EDA, "M", "ớ"), + (0x1EDB, "V"), + (0x1EDC, "M", "ờ"), + (0x1EDD, "V"), + (0x1EDE, "M", "ở"), + (0x1EDF, "V"), + (0x1EE0, "M", "ỡ"), + (0x1EE1, "V"), + (0x1EE2, "M", "ợ"), + (0x1EE3, "V"), + (0x1EE4, "M", "ụ"), + (0x1EE5, "V"), + (0x1EE6, "M", "ủ"), + (0x1EE7, "V"), + (0x1EE8, "M", "ứ"), + (0x1EE9, "V"), + (0x1EEA, "M", "ừ"), + (0x1EEB, "V"), + (0x1EEC, "M", "ử"), + (0x1EED, "V"), + (0x1EEE, "M", "ữ"), + (0x1EEF, "V"), + (0x1EF0, "M", "ự"), + (0x1EF1, "V"), + (0x1EF2, "M", "ỳ"), + (0x1EF3, "V"), + (0x1EF4, "M", "ỵ"), + (0x1EF5, "V"), + (0x1EF6, "M", "ỷ"), + (0x1EF7, "V"), + (0x1EF8, "M", "ỹ"), + (0x1EF9, "V"), + (0x1EFA, "M", "ỻ"), + (0x1EFB, "V"), + (0x1EFC, "M", "ỽ"), + (0x1EFD, "V"), + (0x1EFE, "M", "ỿ"), + (0x1EFF, "V"), + (0x1F08, "M", "ἀ"), + (0x1F09, "M", "ἁ"), + (0x1F0A, "M", "ἂ"), + (0x1F0B, "M", "ἃ"), + (0x1F0C, "M", "ἄ"), + (0x1F0D, "M", "ἅ"), + (0x1F0E, "M", "ἆ"), + (0x1F0F, "M", "ἇ"), + (0x1F10, "V"), + (0x1F16, "X"), + (0x1F18, "M", "ἐ"), + (0x1F19, "M", "ἑ"), + (0x1F1A, "M", "ἒ"), + (0x1F1B, "M", "ἓ"), + (0x1F1C, "M", "ἔ"), + (0x1F1D, "M", "ἕ"), + (0x1F1E, "X"), + (0x1F20, "V"), + (0x1F28, "M", "ἠ"), + (0x1F29, "M", "ἡ"), + (0x1F2A, "M", "ἢ"), + (0x1F2B, "M", "ἣ"), + (0x1F2C, "M", "ἤ"), + (0x1F2D, "M", "ἥ"), + (0x1F2E, "M", "ἦ"), + (0x1F2F, "M", "ἧ"), + (0x1F30, "V"), + (0x1F38, "M", "ἰ"), + (0x1F39, "M", "ἱ"), + (0x1F3A, "M", "ἲ"), + (0x1F3B, "M", "ἳ"), + (0x1F3C, "M", "ἴ"), + (0x1F3D, "M", "ἵ"), + (0x1F3E, "M", "ἶ"), + (0x1F3F, "M", "ἷ"), + (0x1F40, "V"), + (0x1F46, "X"), + (0x1F48, "M", "ὀ"), + (0x1F49, "M", "ὁ"), + (0x1F4A, "M", "ὂ"), + (0x1F4B, "M", "ὃ"), + (0x1F4C, "M", "ὄ"), + (0x1F4D, "M", "ὅ"), + (0x1F4E, "X"), + (0x1F50, "V"), + (0x1F58, "X"), + (0x1F59, "M", "ὑ"), + (0x1F5A, "X"), + (0x1F5B, "M", "ὓ"), + (0x1F5C, "X"), + (0x1F5D, "M", "ὕ"), + ] + + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F5E, "X"), + (0x1F5F, "M", "ὗ"), + (0x1F60, "V"), + (0x1F68, "M", "ὠ"), + (0x1F69, "M", "ὡ"), + (0x1F6A, "M", "ὢ"), + (0x1F6B, "M", "ὣ"), + (0x1F6C, "M", "ὤ"), + (0x1F6D, "M", "ὥ"), + (0x1F6E, "M", "ὦ"), + (0x1F6F, "M", "ὧ"), + (0x1F70, "V"), + (0x1F71, "M", "ά"), + (0x1F72, "V"), + (0x1F73, "M", "έ"), + (0x1F74, "V"), + (0x1F75, "M", "ή"), + (0x1F76, "V"), + (0x1F77, "M", "ί"), + (0x1F78, "V"), + (0x1F79, "M", "ό"), + (0x1F7A, "V"), + (0x1F7B, "M", "ύ"), + (0x1F7C, "V"), + (0x1F7D, "M", "ώ"), + (0x1F7E, "X"), + (0x1F80, "M", "ἀι"), + (0x1F81, "M", "ἁι"), + (0x1F82, "M", "ἂι"), + (0x1F83, "M", "ἃι"), + (0x1F84, "M", "ἄι"), + (0x1F85, "M", "ἅι"), + (0x1F86, "M", "ἆι"), + (0x1F87, "M", "ἇι"), + (0x1F88, "M", "ἀι"), + (0x1F89, "M", "ἁι"), + (0x1F8A, "M", "ἂι"), + (0x1F8B, "M", "ἃι"), + (0x1F8C, "M", "ἄι"), + (0x1F8D, "M", "ἅι"), + (0x1F8E, "M", "ἆι"), + (0x1F8F, "M", "ἇι"), + (0x1F90, "M", "ἠι"), + (0x1F91, "M", "ἡι"), + (0x1F92, "M", "ἢι"), + (0x1F93, "M", "ἣι"), + (0x1F94, "M", "ἤι"), + (0x1F95, "M", "ἥι"), + (0x1F96, "M", "ἦι"), + (0x1F97, "M", "ἧι"), + (0x1F98, "M", "ἠι"), + (0x1F99, "M", "ἡι"), + (0x1F9A, "M", "ἢι"), + (0x1F9B, "M", "ἣι"), + (0x1F9C, "M", "ἤι"), + (0x1F9D, "M", "ἥι"), + (0x1F9E, "M", "ἦι"), + (0x1F9F, "M", "ἧι"), + (0x1FA0, "M", "ὠι"), + (0x1FA1, "M", "ὡι"), + (0x1FA2, "M", "ὢι"), + (0x1FA3, "M", "ὣι"), + (0x1FA4, "M", "ὤι"), + (0x1FA5, "M", "ὥι"), + (0x1FA6, "M", "ὦι"), + (0x1FA7, "M", "ὧι"), + (0x1FA8, "M", "ὠι"), + (0x1FA9, "M", "ὡι"), + (0x1FAA, "M", "ὢι"), + (0x1FAB, "M", "ὣι"), + (0x1FAC, "M", "ὤι"), + (0x1FAD, "M", "ὥι"), + (0x1FAE, "M", "ὦι"), + (0x1FAF, "M", "ὧι"), + (0x1FB0, "V"), + (0x1FB2, "M", "ὰι"), + (0x1FB3, "M", "αι"), + (0x1FB4, "M", "άι"), + (0x1FB5, "X"), + (0x1FB6, "V"), + (0x1FB7, "M", "ᾶι"), + (0x1FB8, "M", "ᾰ"), + (0x1FB9, "M", "ᾱ"), + (0x1FBA, "M", "ὰ"), + (0x1FBB, "M", "ά"), + (0x1FBC, "M", "αι"), + (0x1FBD, "M", " ̓"), + (0x1FBE, "M", "ι"), + (0x1FBF, "M", " ̓"), + (0x1FC0, "M", " ͂"), + (0x1FC1, "M", " ̈͂"), + (0x1FC2, "M", "ὴι"), + (0x1FC3, "M", "ηι"), + (0x1FC4, "M", "ήι"), + (0x1FC5, "X"), + (0x1FC6, "V"), + (0x1FC7, "M", "ῆι"), + (0x1FC8, "M", "ὲ"), + (0x1FC9, "M", "έ"), + (0x1FCA, "M", "ὴ"), + ] + + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FCB, "M", "ή"), + (0x1FCC, "M", "ηι"), + (0x1FCD, "M", " ̓̀"), + (0x1FCE, "M", " ̓́"), + (0x1FCF, "M", " ̓͂"), + (0x1FD0, "V"), + (0x1FD3, "M", "ΐ"), + (0x1FD4, "X"), + (0x1FD6, "V"), + (0x1FD8, "M", "ῐ"), + (0x1FD9, "M", "ῑ"), + (0x1FDA, "M", "ὶ"), + (0x1FDB, "M", "ί"), + (0x1FDC, "X"), + (0x1FDD, "M", " ̔̀"), + (0x1FDE, "M", " ̔́"), + (0x1FDF, "M", " ̔͂"), + (0x1FE0, "V"), + (0x1FE3, "M", "ΰ"), + (0x1FE4, "V"), + (0x1FE8, "M", "ῠ"), + (0x1FE9, "M", "ῡ"), + (0x1FEA, "M", "ὺ"), + (0x1FEB, "M", "ύ"), + (0x1FEC, "M", "ῥ"), + (0x1FED, "M", " ̈̀"), + (0x1FEE, "M", " ̈́"), + (0x1FEF, "M", "`"), + (0x1FF0, "X"), + (0x1FF2, "M", "ὼι"), + (0x1FF3, "M", "ωι"), + (0x1FF4, "M", "ώι"), + (0x1FF5, "X"), + (0x1FF6, "V"), + (0x1FF7, "M", "ῶι"), + (0x1FF8, "M", "ὸ"), + (0x1FF9, "M", "ό"), + (0x1FFA, "M", "ὼ"), + (0x1FFB, "M", "ώ"), + (0x1FFC, "M", "ωι"), + (0x1FFD, "M", " ́"), + (0x1FFE, "M", " ̔"), + (0x1FFF, "X"), + (0x2000, "M", " "), + (0x200B, "I"), + (0x200C, "D", ""), + (0x200E, "X"), + (0x2010, "V"), + (0x2011, "M", "‐"), + (0x2012, "V"), + (0x2017, "M", " ̳"), + (0x2018, "V"), + (0x2024, "X"), + (0x2027, "V"), + (0x2028, "X"), + (0x202F, "M", " "), + (0x2030, "V"), + (0x2033, "M", "′′"), + (0x2034, "M", "′′′"), + (0x2035, "V"), + (0x2036, "M", "‵‵"), + (0x2037, "M", "‵‵‵"), + (0x2038, "V"), + (0x203C, "M", "!!"), + (0x203D, "V"), + (0x203E, "M", " ̅"), + (0x203F, "V"), + (0x2047, "M", "??"), + (0x2048, "M", "?!"), + (0x2049, "M", "!?"), + (0x204A, "V"), + (0x2057, "M", "′′′′"), + (0x2058, "V"), + (0x205F, "M", " "), + (0x2060, "I"), + (0x2065, "X"), + (0x206A, "I"), + (0x2070, "M", "0"), + (0x2071, "M", "i"), + (0x2072, "X"), + (0x2074, "M", "4"), + (0x2075, "M", "5"), + (0x2076, "M", "6"), + (0x2077, "M", "7"), + (0x2078, "M", "8"), + (0x2079, "M", "9"), + (0x207A, "M", "+"), + (0x207B, "M", "−"), + (0x207C, "M", "="), + (0x207D, "M", "("), + (0x207E, "M", ")"), + (0x207F, "M", "n"), + (0x2080, "M", "0"), + (0x2081, "M", "1"), + (0x2082, "M", "2"), + (0x2083, "M", "3"), + (0x2084, "M", "4"), + (0x2085, "M", "5"), + (0x2086, "M", "6"), + (0x2087, "M", "7"), + ] + + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2088, "M", "8"), + (0x2089, "M", "9"), + (0x208A, "M", "+"), + (0x208B, "M", "−"), + (0x208C, "M", "="), + (0x208D, "M", "("), + (0x208E, "M", ")"), + (0x208F, "X"), + (0x2090, "M", "a"), + (0x2091, "M", "e"), + (0x2092, "M", "o"), + (0x2093, "M", "x"), + (0x2094, "M", "ə"), + (0x2095, "M", "h"), + (0x2096, "M", "k"), + (0x2097, "M", "l"), + (0x2098, "M", "m"), + (0x2099, "M", "n"), + (0x209A, "M", "p"), + (0x209B, "M", "s"), + (0x209C, "M", "t"), + (0x209D, "X"), + (0x20A0, "V"), + (0x20A8, "M", "rs"), + (0x20A9, "V"), + (0x20C1, "X"), + (0x20D0, "V"), + (0x20F1, "X"), + (0x2100, "M", "a/c"), + (0x2101, "M", "a/s"), + (0x2102, "M", "c"), + (0x2103, "M", "°c"), + (0x2104, "V"), + (0x2105, "M", "c/o"), + (0x2106, "M", "c/u"), + (0x2107, "M", "ɛ"), + (0x2108, "V"), + (0x2109, "M", "°f"), + (0x210A, "M", "g"), + (0x210B, "M", "h"), + (0x210F, "M", "ħ"), + (0x2110, "M", "i"), + (0x2112, "M", "l"), + (0x2114, "V"), + (0x2115, "M", "n"), + (0x2116, "M", "no"), + (0x2117, "V"), + (0x2119, "M", "p"), + (0x211A, "M", "q"), + (0x211B, "M", "r"), + (0x211E, "V"), + (0x2120, "M", "sm"), + (0x2121, "M", "tel"), + (0x2122, "M", "tm"), + (0x2123, "V"), + (0x2124, "M", "z"), + (0x2125, "V"), + (0x2126, "M", "ω"), + (0x2127, "V"), + (0x2128, "M", "z"), + (0x2129, "V"), + (0x212A, "M", "k"), + (0x212B, "M", "å"), + (0x212C, "M", "b"), + (0x212D, "M", "c"), + (0x212E, "V"), + (0x212F, "M", "e"), + (0x2131, "M", "f"), + (0x2132, "M", "ⅎ"), + (0x2133, "M", "m"), + (0x2134, "M", "o"), + (0x2135, "M", "א"), + (0x2136, "M", "ב"), + (0x2137, "M", "ג"), + (0x2138, "M", "ד"), + (0x2139, "M", "i"), + (0x213A, "V"), + (0x213B, "M", "fax"), + (0x213C, "M", "π"), + (0x213D, "M", "γ"), + (0x213F, "M", "π"), + (0x2140, "M", "∑"), + (0x2141, "V"), + (0x2145, "M", "d"), + (0x2147, "M", "e"), + (0x2148, "M", "i"), + (0x2149, "M", "j"), + (0x214A, "V"), + (0x2150, "M", "1⁄7"), + (0x2151, "M", "1⁄9"), + (0x2152, "M", "1⁄10"), + (0x2153, "M", "1⁄3"), + (0x2154, "M", "2⁄3"), + (0x2155, "M", "1⁄5"), + (0x2156, "M", "2⁄5"), + (0x2157, "M", "3⁄5"), + (0x2158, "M", "4⁄5"), + (0x2159, "M", "1⁄6"), + (0x215A, "M", "5⁄6"), + (0x215B, "M", "1⁄8"), + ] + + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x215C, "M", "3⁄8"), + (0x215D, "M", "5⁄8"), + (0x215E, "M", "7⁄8"), + (0x215F, "M", "1⁄"), + (0x2160, "M", "i"), + (0x2161, "M", "ii"), + (0x2162, "M", "iii"), + (0x2163, "M", "iv"), + (0x2164, "M", "v"), + (0x2165, "M", "vi"), + (0x2166, "M", "vii"), + (0x2167, "M", "viii"), + (0x2168, "M", "ix"), + (0x2169, "M", "x"), + (0x216A, "M", "xi"), + (0x216B, "M", "xii"), + (0x216C, "M", "l"), + (0x216D, "M", "c"), + (0x216E, "M", "d"), + (0x216F, "M", "m"), + (0x2170, "M", "i"), + (0x2171, "M", "ii"), + (0x2172, "M", "iii"), + (0x2173, "M", "iv"), + (0x2174, "M", "v"), + (0x2175, "M", "vi"), + (0x2176, "M", "vii"), + (0x2177, "M", "viii"), + (0x2178, "M", "ix"), + (0x2179, "M", "x"), + (0x217A, "M", "xi"), + (0x217B, "M", "xii"), + (0x217C, "M", "l"), + (0x217D, "M", "c"), + (0x217E, "M", "d"), + (0x217F, "M", "m"), + (0x2180, "V"), + (0x2183, "M", "ↄ"), + (0x2184, "V"), + (0x2189, "M", "0⁄3"), + (0x218A, "V"), + (0x218C, "X"), + (0x2190, "V"), + (0x222C, "M", "∫∫"), + (0x222D, "M", "∫∫∫"), + (0x222E, "V"), + (0x222F, "M", "∮∮"), + (0x2230, "M", "∮∮∮"), + (0x2231, "V"), + (0x2329, "M", "〈"), + (0x232A, "M", "〉"), + (0x232B, "V"), + (0x242A, "X"), + (0x2440, "V"), + (0x244B, "X"), + (0x2460, "M", "1"), + (0x2461, "M", "2"), + (0x2462, "M", "3"), + (0x2463, "M", "4"), + (0x2464, "M", "5"), + (0x2465, "M", "6"), + (0x2466, "M", "7"), + (0x2467, "M", "8"), + (0x2468, "M", "9"), + (0x2469, "M", "10"), + (0x246A, "M", "11"), + (0x246B, "M", "12"), + (0x246C, "M", "13"), + (0x246D, "M", "14"), + (0x246E, "M", "15"), + (0x246F, "M", "16"), + (0x2470, "M", "17"), + (0x2471, "M", "18"), + (0x2472, "M", "19"), + (0x2473, "M", "20"), + (0x2474, "M", "(1)"), + (0x2475, "M", "(2)"), + (0x2476, "M", "(3)"), + (0x2477, "M", "(4)"), + (0x2478, "M", "(5)"), + (0x2479, "M", "(6)"), + (0x247A, "M", "(7)"), + (0x247B, "M", "(8)"), + (0x247C, "M", "(9)"), + (0x247D, "M", "(10)"), + (0x247E, "M", "(11)"), + (0x247F, "M", "(12)"), + (0x2480, "M", "(13)"), + (0x2481, "M", "(14)"), + (0x2482, "M", "(15)"), + (0x2483, "M", "(16)"), + (0x2484, "M", "(17)"), + (0x2485, "M", "(18)"), + (0x2486, "M", "(19)"), + (0x2487, "M", "(20)"), + (0x2488, "X"), + (0x249C, "M", "(a)"), + (0x249D, "M", "(b)"), + (0x249E, "M", "(c)"), + (0x249F, "M", "(d)"), + ] + + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24A0, "M", "(e)"), + (0x24A1, "M", "(f)"), + (0x24A2, "M", "(g)"), + (0x24A3, "M", "(h)"), + (0x24A4, "M", "(i)"), + (0x24A5, "M", "(j)"), + (0x24A6, "M", "(k)"), + (0x24A7, "M", "(l)"), + (0x24A8, "M", "(m)"), + (0x24A9, "M", "(n)"), + (0x24AA, "M", "(o)"), + (0x24AB, "M", "(p)"), + (0x24AC, "M", "(q)"), + (0x24AD, "M", "(r)"), + (0x24AE, "M", "(s)"), + (0x24AF, "M", "(t)"), + (0x24B0, "M", "(u)"), + (0x24B1, "M", "(v)"), + (0x24B2, "M", "(w)"), + (0x24B3, "M", "(x)"), + (0x24B4, "M", "(y)"), + (0x24B5, "M", "(z)"), + (0x24B6, "M", "a"), + (0x24B7, "M", "b"), + (0x24B8, "M", "c"), + (0x24B9, "M", "d"), + (0x24BA, "M", "e"), + (0x24BB, "M", "f"), + (0x24BC, "M", "g"), + (0x24BD, "M", "h"), + (0x24BE, "M", "i"), + (0x24BF, "M", "j"), + (0x24C0, "M", "k"), + (0x24C1, "M", "l"), + (0x24C2, "M", "m"), + (0x24C3, "M", "n"), + (0x24C4, "M", "o"), + (0x24C5, "M", "p"), + (0x24C6, "M", "q"), + (0x24C7, "M", "r"), + (0x24C8, "M", "s"), + (0x24C9, "M", "t"), + (0x24CA, "M", "u"), + (0x24CB, "M", "v"), + (0x24CC, "M", "w"), + (0x24CD, "M", "x"), + (0x24CE, "M", "y"), + (0x24CF, "M", "z"), + (0x24D0, "M", "a"), + (0x24D1, "M", "b"), + (0x24D2, "M", "c"), + (0x24D3, "M", "d"), + (0x24D4, "M", "e"), + (0x24D5, "M", "f"), + (0x24D6, "M", "g"), + (0x24D7, "M", "h"), + (0x24D8, "M", "i"), + (0x24D9, "M", "j"), + (0x24DA, "M", "k"), + (0x24DB, "M", "l"), + (0x24DC, "M", "m"), + (0x24DD, "M", "n"), + (0x24DE, "M", "o"), + (0x24DF, "M", "p"), + (0x24E0, "M", "q"), + (0x24E1, "M", "r"), + (0x24E2, "M", "s"), + (0x24E3, "M", "t"), + (0x24E4, "M", "u"), + (0x24E5, "M", "v"), + (0x24E6, "M", "w"), + (0x24E7, "M", "x"), + (0x24E8, "M", "y"), + (0x24E9, "M", "z"), + (0x24EA, "M", "0"), + (0x24EB, "V"), + (0x2A0C, "M", "∫∫∫∫"), + (0x2A0D, "V"), + (0x2A74, "M", "::="), + (0x2A75, "M", "=="), + (0x2A76, "M", "==="), + (0x2A77, "V"), + (0x2ADC, "M", "⫝̸"), + (0x2ADD, "V"), + (0x2B74, "X"), + (0x2B76, "V"), + (0x2B96, "X"), + (0x2B97, "V"), + (0x2C00, "M", "ⰰ"), + (0x2C01, "M", "ⰱ"), + (0x2C02, "M", "ⰲ"), + (0x2C03, "M", "ⰳ"), + (0x2C04, "M", "ⰴ"), + (0x2C05, "M", "ⰵ"), + (0x2C06, "M", "ⰶ"), + (0x2C07, "M", "ⰷ"), + (0x2C08, "M", "ⰸ"), + (0x2C09, "M", "ⰹ"), + (0x2C0A, "M", "ⰺ"), + (0x2C0B, "M", "ⰻ"), + ] + + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C0C, "M", "ⰼ"), + (0x2C0D, "M", "ⰽ"), + (0x2C0E, "M", "ⰾ"), + (0x2C0F, "M", "ⰿ"), + (0x2C10, "M", "ⱀ"), + (0x2C11, "M", "ⱁ"), + (0x2C12, "M", "ⱂ"), + (0x2C13, "M", "ⱃ"), + (0x2C14, "M", "ⱄ"), + (0x2C15, "M", "ⱅ"), + (0x2C16, "M", "ⱆ"), + (0x2C17, "M", "ⱇ"), + (0x2C18, "M", "ⱈ"), + (0x2C19, "M", "ⱉ"), + (0x2C1A, "M", "ⱊ"), + (0x2C1B, "M", "ⱋ"), + (0x2C1C, "M", "ⱌ"), + (0x2C1D, "M", "ⱍ"), + (0x2C1E, "M", "ⱎ"), + (0x2C1F, "M", "ⱏ"), + (0x2C20, "M", "ⱐ"), + (0x2C21, "M", "ⱑ"), + (0x2C22, "M", "ⱒ"), + (0x2C23, "M", "ⱓ"), + (0x2C24, "M", "ⱔ"), + (0x2C25, "M", "ⱕ"), + (0x2C26, "M", "ⱖ"), + (0x2C27, "M", "ⱗ"), + (0x2C28, "M", "ⱘ"), + (0x2C29, "M", "ⱙ"), + (0x2C2A, "M", "ⱚ"), + (0x2C2B, "M", "ⱛ"), + (0x2C2C, "M", "ⱜ"), + (0x2C2D, "M", "ⱝ"), + (0x2C2E, "M", "ⱞ"), + (0x2C2F, "M", "ⱟ"), + (0x2C30, "V"), + (0x2C60, "M", "ⱡ"), + (0x2C61, "V"), + (0x2C62, "M", "ɫ"), + (0x2C63, "M", "ᵽ"), + (0x2C64, "M", "ɽ"), + (0x2C65, "V"), + (0x2C67, "M", "ⱨ"), + (0x2C68, "V"), + (0x2C69, "M", "ⱪ"), + (0x2C6A, "V"), + (0x2C6B, "M", "ⱬ"), + (0x2C6C, "V"), + (0x2C6D, "M", "ɑ"), + (0x2C6E, "M", "ɱ"), + (0x2C6F, "M", "ɐ"), + (0x2C70, "M", "ɒ"), + (0x2C71, "V"), + (0x2C72, "M", "ⱳ"), + (0x2C73, "V"), + (0x2C75, "M", "ⱶ"), + (0x2C76, "V"), + (0x2C7C, "M", "j"), + (0x2C7D, "M", "v"), + (0x2C7E, "M", "ȿ"), + (0x2C7F, "M", "ɀ"), + (0x2C80, "M", "ⲁ"), + (0x2C81, "V"), + (0x2C82, "M", "ⲃ"), + (0x2C83, "V"), + (0x2C84, "M", "ⲅ"), + (0x2C85, "V"), + (0x2C86, "M", "ⲇ"), + (0x2C87, "V"), + (0x2C88, "M", "ⲉ"), + (0x2C89, "V"), + (0x2C8A, "M", "ⲋ"), + (0x2C8B, "V"), + (0x2C8C, "M", "ⲍ"), + (0x2C8D, "V"), + (0x2C8E, "M", "ⲏ"), + (0x2C8F, "V"), + (0x2C90, "M", "ⲑ"), + (0x2C91, "V"), + (0x2C92, "M", "ⲓ"), + (0x2C93, "V"), + (0x2C94, "M", "ⲕ"), + (0x2C95, "V"), + (0x2C96, "M", "ⲗ"), + (0x2C97, "V"), + (0x2C98, "M", "ⲙ"), + (0x2C99, "V"), + (0x2C9A, "M", "ⲛ"), + (0x2C9B, "V"), + (0x2C9C, "M", "ⲝ"), + (0x2C9D, "V"), + (0x2C9E, "M", "ⲟ"), + (0x2C9F, "V"), + (0x2CA0, "M", "ⲡ"), + (0x2CA1, "V"), + (0x2CA2, "M", "ⲣ"), + (0x2CA3, "V"), + (0x2CA4, "M", "ⲥ"), + (0x2CA5, "V"), + ] + + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CA6, "M", "ⲧ"), + (0x2CA7, "V"), + (0x2CA8, "M", "ⲩ"), + (0x2CA9, "V"), + (0x2CAA, "M", "ⲫ"), + (0x2CAB, "V"), + (0x2CAC, "M", "ⲭ"), + (0x2CAD, "V"), + (0x2CAE, "M", "ⲯ"), + (0x2CAF, "V"), + (0x2CB0, "M", "ⲱ"), + (0x2CB1, "V"), + (0x2CB2, "M", "ⲳ"), + (0x2CB3, "V"), + (0x2CB4, "M", "ⲵ"), + (0x2CB5, "V"), + (0x2CB6, "M", "ⲷ"), + (0x2CB7, "V"), + (0x2CB8, "M", "ⲹ"), + (0x2CB9, "V"), + (0x2CBA, "M", "ⲻ"), + (0x2CBB, "V"), + (0x2CBC, "M", "ⲽ"), + (0x2CBD, "V"), + (0x2CBE, "M", "ⲿ"), + (0x2CBF, "V"), + (0x2CC0, "M", "ⳁ"), + (0x2CC1, "V"), + (0x2CC2, "M", "ⳃ"), + (0x2CC3, "V"), + (0x2CC4, "M", "ⳅ"), + (0x2CC5, "V"), + (0x2CC6, "M", "ⳇ"), + (0x2CC7, "V"), + (0x2CC8, "M", "ⳉ"), + (0x2CC9, "V"), + (0x2CCA, "M", "ⳋ"), + (0x2CCB, "V"), + (0x2CCC, "M", "ⳍ"), + (0x2CCD, "V"), + (0x2CCE, "M", "ⳏ"), + (0x2CCF, "V"), + (0x2CD0, "M", "ⳑ"), + (0x2CD1, "V"), + (0x2CD2, "M", "ⳓ"), + (0x2CD3, "V"), + (0x2CD4, "M", "ⳕ"), + (0x2CD5, "V"), + (0x2CD6, "M", "ⳗ"), + (0x2CD7, "V"), + (0x2CD8, "M", "ⳙ"), + (0x2CD9, "V"), + (0x2CDA, "M", "ⳛ"), + (0x2CDB, "V"), + (0x2CDC, "M", "ⳝ"), + (0x2CDD, "V"), + (0x2CDE, "M", "ⳟ"), + (0x2CDF, "V"), + (0x2CE0, "M", "ⳡ"), + (0x2CE1, "V"), + (0x2CE2, "M", "ⳣ"), + (0x2CE3, "V"), + (0x2CEB, "M", "ⳬ"), + (0x2CEC, "V"), + (0x2CED, "M", "ⳮ"), + (0x2CEE, "V"), + (0x2CF2, "M", "ⳳ"), + (0x2CF3, "V"), + (0x2CF4, "X"), + (0x2CF9, "V"), + (0x2D26, "X"), + (0x2D27, "V"), + (0x2D28, "X"), + (0x2D2D, "V"), + (0x2D2E, "X"), + (0x2D30, "V"), + (0x2D68, "X"), + (0x2D6F, "M", "ⵡ"), + (0x2D70, "V"), + (0x2D71, "X"), + (0x2D7F, "V"), + (0x2D97, "X"), + (0x2DA0, "V"), + (0x2DA7, "X"), + (0x2DA8, "V"), + (0x2DAF, "X"), + (0x2DB0, "V"), + (0x2DB7, "X"), + (0x2DB8, "V"), + (0x2DBF, "X"), + (0x2DC0, "V"), + (0x2DC7, "X"), + (0x2DC8, "V"), + (0x2DCF, "X"), + (0x2DD0, "V"), + (0x2DD7, "X"), + (0x2DD8, "V"), + (0x2DDF, "X"), + (0x2DE0, "V"), + (0x2E5E, "X"), + ] + + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2E80, "V"), + (0x2E9A, "X"), + (0x2E9B, "V"), + (0x2E9F, "M", "母"), + (0x2EA0, "V"), + (0x2EF3, "M", "龟"), + (0x2EF4, "X"), + (0x2F00, "M", "一"), + (0x2F01, "M", "丨"), + (0x2F02, "M", "丶"), + (0x2F03, "M", "丿"), + (0x2F04, "M", "乙"), + (0x2F05, "M", "亅"), + (0x2F06, "M", "二"), + (0x2F07, "M", "亠"), + (0x2F08, "M", "人"), + (0x2F09, "M", "儿"), + (0x2F0A, "M", "入"), + (0x2F0B, "M", "八"), + (0x2F0C, "M", "冂"), + (0x2F0D, "M", "冖"), + (0x2F0E, "M", "冫"), + (0x2F0F, "M", "几"), + (0x2F10, "M", "凵"), + (0x2F11, "M", "刀"), + (0x2F12, "M", "力"), + (0x2F13, "M", "勹"), + (0x2F14, "M", "匕"), + (0x2F15, "M", "匚"), + (0x2F16, "M", "匸"), + (0x2F17, "M", "十"), + (0x2F18, "M", "卜"), + (0x2F19, "M", "卩"), + (0x2F1A, "M", "厂"), + (0x2F1B, "M", "厶"), + (0x2F1C, "M", "又"), + (0x2F1D, "M", "口"), + (0x2F1E, "M", "囗"), + (0x2F1F, "M", "土"), + (0x2F20, "M", "士"), + (0x2F21, "M", "夂"), + (0x2F22, "M", "夊"), + (0x2F23, "M", "夕"), + (0x2F24, "M", "大"), + (0x2F25, "M", "女"), + (0x2F26, "M", "子"), + (0x2F27, "M", "宀"), + (0x2F28, "M", "寸"), + (0x2F29, "M", "小"), + (0x2F2A, "M", "尢"), + (0x2F2B, "M", "尸"), + (0x2F2C, "M", "屮"), + (0x2F2D, "M", "山"), + (0x2F2E, "M", "巛"), + (0x2F2F, "M", "工"), + (0x2F30, "M", "己"), + (0x2F31, "M", "巾"), + (0x2F32, "M", "干"), + (0x2F33, "M", "幺"), + (0x2F34, "M", "广"), + (0x2F35, "M", "廴"), + (0x2F36, "M", "廾"), + (0x2F37, "M", "弋"), + (0x2F38, "M", "弓"), + (0x2F39, "M", "彐"), + (0x2F3A, "M", "彡"), + (0x2F3B, "M", "彳"), + (0x2F3C, "M", "心"), + (0x2F3D, "M", "戈"), + (0x2F3E, "M", "戶"), + (0x2F3F, "M", "手"), + (0x2F40, "M", "支"), + (0x2F41, "M", "攴"), + (0x2F42, "M", "文"), + (0x2F43, "M", "斗"), + (0x2F44, "M", "斤"), + (0x2F45, "M", "方"), + (0x2F46, "M", "无"), + (0x2F47, "M", "日"), + (0x2F48, "M", "曰"), + (0x2F49, "M", "月"), + (0x2F4A, "M", "木"), + (0x2F4B, "M", "欠"), + (0x2F4C, "M", "止"), + (0x2F4D, "M", "歹"), + (0x2F4E, "M", "殳"), + (0x2F4F, "M", "毋"), + (0x2F50, "M", "比"), + (0x2F51, "M", "毛"), + (0x2F52, "M", "氏"), + (0x2F53, "M", "气"), + (0x2F54, "M", "水"), + (0x2F55, "M", "火"), + (0x2F56, "M", "爪"), + (0x2F57, "M", "父"), + (0x2F58, "M", "爻"), + (0x2F59, "M", "爿"), + (0x2F5A, "M", "片"), + (0x2F5B, "M", "牙"), + (0x2F5C, "M", "牛"), + ] + + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F5D, "M", "犬"), + (0x2F5E, "M", "玄"), + (0x2F5F, "M", "玉"), + (0x2F60, "M", "瓜"), + (0x2F61, "M", "瓦"), + (0x2F62, "M", "甘"), + (0x2F63, "M", "生"), + (0x2F64, "M", "用"), + (0x2F65, "M", "田"), + (0x2F66, "M", "疋"), + (0x2F67, "M", "疒"), + (0x2F68, "M", "癶"), + (0x2F69, "M", "白"), + (0x2F6A, "M", "皮"), + (0x2F6B, "M", "皿"), + (0x2F6C, "M", "目"), + (0x2F6D, "M", "矛"), + (0x2F6E, "M", "矢"), + (0x2F6F, "M", "石"), + (0x2F70, "M", "示"), + (0x2F71, "M", "禸"), + (0x2F72, "M", "禾"), + (0x2F73, "M", "穴"), + (0x2F74, "M", "立"), + (0x2F75, "M", "竹"), + (0x2F76, "M", "米"), + (0x2F77, "M", "糸"), + (0x2F78, "M", "缶"), + (0x2F79, "M", "网"), + (0x2F7A, "M", "羊"), + (0x2F7B, "M", "羽"), + (0x2F7C, "M", "老"), + (0x2F7D, "M", "而"), + (0x2F7E, "M", "耒"), + (0x2F7F, "M", "耳"), + (0x2F80, "M", "聿"), + (0x2F81, "M", "肉"), + (0x2F82, "M", "臣"), + (0x2F83, "M", "自"), + (0x2F84, "M", "至"), + (0x2F85, "M", "臼"), + (0x2F86, "M", "舌"), + (0x2F87, "M", "舛"), + (0x2F88, "M", "舟"), + (0x2F89, "M", "艮"), + (0x2F8A, "M", "色"), + (0x2F8B, "M", "艸"), + (0x2F8C, "M", "虍"), + (0x2F8D, "M", "虫"), + (0x2F8E, "M", "血"), + (0x2F8F, "M", "行"), + (0x2F90, "M", "衣"), + (0x2F91, "M", "襾"), + (0x2F92, "M", "見"), + (0x2F93, "M", "角"), + (0x2F94, "M", "言"), + (0x2F95, "M", "谷"), + (0x2F96, "M", "豆"), + (0x2F97, "M", "豕"), + (0x2F98, "M", "豸"), + (0x2F99, "M", "貝"), + (0x2F9A, "M", "赤"), + (0x2F9B, "M", "走"), + (0x2F9C, "M", "足"), + (0x2F9D, "M", "身"), + (0x2F9E, "M", "車"), + (0x2F9F, "M", "辛"), + (0x2FA0, "M", "辰"), + (0x2FA1, "M", "辵"), + (0x2FA2, "M", "邑"), + (0x2FA3, "M", "酉"), + (0x2FA4, "M", "釆"), + (0x2FA5, "M", "里"), + (0x2FA6, "M", "金"), + (0x2FA7, "M", "長"), + (0x2FA8, "M", "門"), + (0x2FA9, "M", "阜"), + (0x2FAA, "M", "隶"), + (0x2FAB, "M", "隹"), + (0x2FAC, "M", "雨"), + (0x2FAD, "M", "靑"), + (0x2FAE, "M", "非"), + (0x2FAF, "M", "面"), + (0x2FB0, "M", "革"), + (0x2FB1, "M", "韋"), + (0x2FB2, "M", "韭"), + (0x2FB3, "M", "音"), + (0x2FB4, "M", "頁"), + (0x2FB5, "M", "風"), + (0x2FB6, "M", "飛"), + (0x2FB7, "M", "食"), + (0x2FB8, "M", "首"), + (0x2FB9, "M", "香"), + (0x2FBA, "M", "馬"), + (0x2FBB, "M", "骨"), + (0x2FBC, "M", "高"), + (0x2FBD, "M", "髟"), + (0x2FBE, "M", "鬥"), + (0x2FBF, "M", "鬯"), + (0x2FC0, "M", "鬲"), + ] + + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2FC1, "M", "鬼"), + (0x2FC2, "M", "魚"), + (0x2FC3, "M", "鳥"), + (0x2FC4, "M", "鹵"), + (0x2FC5, "M", "鹿"), + (0x2FC6, "M", "麥"), + (0x2FC7, "M", "麻"), + (0x2FC8, "M", "黃"), + (0x2FC9, "M", "黍"), + (0x2FCA, "M", "黑"), + (0x2FCB, "M", "黹"), + (0x2FCC, "M", "黽"), + (0x2FCD, "M", "鼎"), + (0x2FCE, "M", "鼓"), + (0x2FCF, "M", "鼠"), + (0x2FD0, "M", "鼻"), + (0x2FD1, "M", "齊"), + (0x2FD2, "M", "齒"), + (0x2FD3, "M", "龍"), + (0x2FD4, "M", "龜"), + (0x2FD5, "M", "龠"), + (0x2FD6, "X"), + (0x3000, "M", " "), + (0x3001, "V"), + (0x3002, "M", "."), + (0x3003, "V"), + (0x3036, "M", "〒"), + (0x3037, "V"), + (0x3038, "M", "十"), + (0x3039, "M", "卄"), + (0x303A, "M", "卅"), + (0x303B, "V"), + (0x3040, "X"), + (0x3041, "V"), + (0x3097, "X"), + (0x3099, "V"), + (0x309B, "M", " ゙"), + (0x309C, "M", " ゚"), + (0x309D, "V"), + (0x309F, "M", "より"), + (0x30A0, "V"), + (0x30FF, "M", "コト"), + (0x3100, "X"), + (0x3105, "V"), + (0x3130, "X"), + (0x3131, "M", "ᄀ"), + (0x3132, "M", "ᄁ"), + (0x3133, "M", "ᆪ"), + (0x3134, "M", "ᄂ"), + (0x3135, "M", "ᆬ"), + (0x3136, "M", "ᆭ"), + (0x3137, "M", "ᄃ"), + (0x3138, "M", "ᄄ"), + (0x3139, "M", "ᄅ"), + (0x313A, "M", "ᆰ"), + (0x313B, "M", "ᆱ"), + (0x313C, "M", "ᆲ"), + (0x313D, "M", "ᆳ"), + (0x313E, "M", "ᆴ"), + (0x313F, "M", "ᆵ"), + (0x3140, "M", "ᄚ"), + (0x3141, "M", "ᄆ"), + (0x3142, "M", "ᄇ"), + (0x3143, "M", "ᄈ"), + (0x3144, "M", "ᄡ"), + (0x3145, "M", "ᄉ"), + (0x3146, "M", "ᄊ"), + (0x3147, "M", "ᄋ"), + (0x3148, "M", "ᄌ"), + (0x3149, "M", "ᄍ"), + (0x314A, "M", "ᄎ"), + (0x314B, "M", "ᄏ"), + (0x314C, "M", "ᄐ"), + (0x314D, "M", "ᄑ"), + (0x314E, "M", "ᄒ"), + (0x314F, "M", "ᅡ"), + (0x3150, "M", "ᅢ"), + (0x3151, "M", "ᅣ"), + (0x3152, "M", "ᅤ"), + (0x3153, "M", "ᅥ"), + (0x3154, "M", "ᅦ"), + (0x3155, "M", "ᅧ"), + (0x3156, "M", "ᅨ"), + (0x3157, "M", "ᅩ"), + (0x3158, "M", "ᅪ"), + (0x3159, "M", "ᅫ"), + (0x315A, "M", "ᅬ"), + (0x315B, "M", "ᅭ"), + (0x315C, "M", "ᅮ"), + (0x315D, "M", "ᅯ"), + (0x315E, "M", "ᅰ"), + (0x315F, "M", "ᅱ"), + (0x3160, "M", "ᅲ"), + (0x3161, "M", "ᅳ"), + (0x3162, "M", "ᅴ"), + (0x3163, "M", "ᅵ"), + (0x3164, "I"), + (0x3165, "M", "ᄔ"), + (0x3166, "M", "ᄕ"), + (0x3167, "M", "ᇇ"), + ] + + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3168, "M", "ᇈ"), + (0x3169, "M", "ᇌ"), + (0x316A, "M", "ᇎ"), + (0x316B, "M", "ᇓ"), + (0x316C, "M", "ᇗ"), + (0x316D, "M", "ᇙ"), + (0x316E, "M", "ᄜ"), + (0x316F, "M", "ᇝ"), + (0x3170, "M", "ᇟ"), + (0x3171, "M", "ᄝ"), + (0x3172, "M", "ᄞ"), + (0x3173, "M", "ᄠ"), + (0x3174, "M", "ᄢ"), + (0x3175, "M", "ᄣ"), + (0x3176, "M", "ᄧ"), + (0x3177, "M", "ᄩ"), + (0x3178, "M", "ᄫ"), + (0x3179, "M", "ᄬ"), + (0x317A, "M", "ᄭ"), + (0x317B, "M", "ᄮ"), + (0x317C, "M", "ᄯ"), + (0x317D, "M", "ᄲ"), + (0x317E, "M", "ᄶ"), + (0x317F, "M", "ᅀ"), + (0x3180, "M", "ᅇ"), + (0x3181, "M", "ᅌ"), + (0x3182, "M", "ᇱ"), + (0x3183, "M", "ᇲ"), + (0x3184, "M", "ᅗ"), + (0x3185, "M", "ᅘ"), + (0x3186, "M", "ᅙ"), + (0x3187, "M", "ᆄ"), + (0x3188, "M", "ᆅ"), + (0x3189, "M", "ᆈ"), + (0x318A, "M", "ᆑ"), + (0x318B, "M", "ᆒ"), + (0x318C, "M", "ᆔ"), + (0x318D, "M", "ᆞ"), + (0x318E, "M", "ᆡ"), + (0x318F, "X"), + (0x3190, "V"), + (0x3192, "M", "一"), + (0x3193, "M", "二"), + (0x3194, "M", "三"), + (0x3195, "M", "四"), + (0x3196, "M", "上"), + (0x3197, "M", "中"), + (0x3198, "M", "下"), + (0x3199, "M", "甲"), + (0x319A, "M", "乙"), + (0x319B, "M", "丙"), + (0x319C, "M", "丁"), + (0x319D, "M", "天"), + (0x319E, "M", "地"), + (0x319F, "M", "人"), + (0x31A0, "V"), + (0x31E6, "X"), + (0x31F0, "V"), + (0x3200, "M", "(ᄀ)"), + (0x3201, "M", "(ᄂ)"), + (0x3202, "M", "(ᄃ)"), + (0x3203, "M", "(ᄅ)"), + (0x3204, "M", "(ᄆ)"), + (0x3205, "M", "(ᄇ)"), + (0x3206, "M", "(ᄉ)"), + (0x3207, "M", "(ᄋ)"), + (0x3208, "M", "(ᄌ)"), + (0x3209, "M", "(ᄎ)"), + (0x320A, "M", "(ᄏ)"), + (0x320B, "M", "(ᄐ)"), + (0x320C, "M", "(ᄑ)"), + (0x320D, "M", "(ᄒ)"), + (0x320E, "M", "(가)"), + (0x320F, "M", "(나)"), + (0x3210, "M", "(다)"), + (0x3211, "M", "(라)"), + (0x3212, "M", "(마)"), + (0x3213, "M", "(바)"), + (0x3214, "M", "(사)"), + (0x3215, "M", "(아)"), + (0x3216, "M", "(자)"), + (0x3217, "M", "(차)"), + (0x3218, "M", "(카)"), + (0x3219, "M", "(타)"), + (0x321A, "M", "(파)"), + (0x321B, "M", "(하)"), + (0x321C, "M", "(주)"), + (0x321D, "M", "(오전)"), + (0x321E, "M", "(오후)"), + (0x321F, "X"), + (0x3220, "M", "(一)"), + (0x3221, "M", "(二)"), + (0x3222, "M", "(三)"), + (0x3223, "M", "(四)"), + (0x3224, "M", "(五)"), + (0x3225, "M", "(六)"), + (0x3226, "M", "(七)"), + (0x3227, "M", "(八)"), + (0x3228, "M", "(九)"), + (0x3229, "M", "(十)"), + ] + + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x322A, "M", "(月)"), + (0x322B, "M", "(火)"), + (0x322C, "M", "(水)"), + (0x322D, "M", "(木)"), + (0x322E, "M", "(金)"), + (0x322F, "M", "(土)"), + (0x3230, "M", "(日)"), + (0x3231, "M", "(株)"), + (0x3232, "M", "(有)"), + (0x3233, "M", "(社)"), + (0x3234, "M", "(名)"), + (0x3235, "M", "(特)"), + (0x3236, "M", "(財)"), + (0x3237, "M", "(祝)"), + (0x3238, "M", "(労)"), + (0x3239, "M", "(代)"), + (0x323A, "M", "(呼)"), + (0x323B, "M", "(学)"), + (0x323C, "M", "(監)"), + (0x323D, "M", "(企)"), + (0x323E, "M", "(資)"), + (0x323F, "M", "(協)"), + (0x3240, "M", "(祭)"), + (0x3241, "M", "(休)"), + (0x3242, "M", "(自)"), + (0x3243, "M", "(至)"), + (0x3244, "M", "問"), + (0x3245, "M", "幼"), + (0x3246, "M", "文"), + (0x3247, "M", "箏"), + (0x3248, "V"), + (0x3250, "M", "pte"), + (0x3251, "M", "21"), + (0x3252, "M", "22"), + (0x3253, "M", "23"), + (0x3254, "M", "24"), + (0x3255, "M", "25"), + (0x3256, "M", "26"), + (0x3257, "M", "27"), + (0x3258, "M", "28"), + (0x3259, "M", "29"), + (0x325A, "M", "30"), + (0x325B, "M", "31"), + (0x325C, "M", "32"), + (0x325D, "M", "33"), + (0x325E, "M", "34"), + (0x325F, "M", "35"), + (0x3260, "M", "ᄀ"), + (0x3261, "M", "ᄂ"), + (0x3262, "M", "ᄃ"), + (0x3263, "M", "ᄅ"), + (0x3264, "M", "ᄆ"), + (0x3265, "M", "ᄇ"), + (0x3266, "M", "ᄉ"), + (0x3267, "M", "ᄋ"), + (0x3268, "M", "ᄌ"), + (0x3269, "M", "ᄎ"), + (0x326A, "M", "ᄏ"), + (0x326B, "M", "ᄐ"), + (0x326C, "M", "ᄑ"), + (0x326D, "M", "ᄒ"), + (0x326E, "M", "가"), + (0x326F, "M", "나"), + (0x3270, "M", "다"), + (0x3271, "M", "라"), + (0x3272, "M", "마"), + (0x3273, "M", "바"), + (0x3274, "M", "사"), + (0x3275, "M", "아"), + (0x3276, "M", "자"), + (0x3277, "M", "차"), + (0x3278, "M", "카"), + (0x3279, "M", "타"), + (0x327A, "M", "파"), + (0x327B, "M", "하"), + (0x327C, "M", "참고"), + (0x327D, "M", "주의"), + (0x327E, "M", "우"), + (0x327F, "V"), + (0x3280, "M", "一"), + (0x3281, "M", "二"), + (0x3282, "M", "三"), + (0x3283, "M", "四"), + (0x3284, "M", "五"), + (0x3285, "M", "六"), + (0x3286, "M", "七"), + (0x3287, "M", "八"), + (0x3288, "M", "九"), + (0x3289, "M", "十"), + (0x328A, "M", "月"), + (0x328B, "M", "火"), + (0x328C, "M", "水"), + (0x328D, "M", "木"), + (0x328E, "M", "金"), + (0x328F, "M", "土"), + (0x3290, "M", "日"), + (0x3291, "M", "株"), + (0x3292, "M", "有"), + (0x3293, "M", "社"), + (0x3294, "M", "名"), + ] + + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3295, "M", "特"), + (0x3296, "M", "財"), + (0x3297, "M", "祝"), + (0x3298, "M", "労"), + (0x3299, "M", "秘"), + (0x329A, "M", "男"), + (0x329B, "M", "女"), + (0x329C, "M", "適"), + (0x329D, "M", "優"), + (0x329E, "M", "印"), + (0x329F, "M", "注"), + (0x32A0, "M", "項"), + (0x32A1, "M", "休"), + (0x32A2, "M", "写"), + (0x32A3, "M", "正"), + (0x32A4, "M", "上"), + (0x32A5, "M", "中"), + (0x32A6, "M", "下"), + (0x32A7, "M", "左"), + (0x32A8, "M", "右"), + (0x32A9, "M", "医"), + (0x32AA, "M", "宗"), + (0x32AB, "M", "学"), + (0x32AC, "M", "監"), + (0x32AD, "M", "企"), + (0x32AE, "M", "資"), + (0x32AF, "M", "協"), + (0x32B0, "M", "夜"), + (0x32B1, "M", "36"), + (0x32B2, "M", "37"), + (0x32B3, "M", "38"), + (0x32B4, "M", "39"), + (0x32B5, "M", "40"), + (0x32B6, "M", "41"), + (0x32B7, "M", "42"), + (0x32B8, "M", "43"), + (0x32B9, "M", "44"), + (0x32BA, "M", "45"), + (0x32BB, "M", "46"), + (0x32BC, "M", "47"), + (0x32BD, "M", "48"), + (0x32BE, "M", "49"), + (0x32BF, "M", "50"), + (0x32C0, "M", "1月"), + (0x32C1, "M", "2月"), + (0x32C2, "M", "3月"), + (0x32C3, "M", "4月"), + (0x32C4, "M", "5月"), + (0x32C5, "M", "6月"), + (0x32C6, "M", "7月"), + (0x32C7, "M", "8月"), + (0x32C8, "M", "9月"), + (0x32C9, "M", "10月"), + (0x32CA, "M", "11月"), + (0x32CB, "M", "12月"), + (0x32CC, "M", "hg"), + (0x32CD, "M", "erg"), + (0x32CE, "M", "ev"), + (0x32CF, "M", "ltd"), + (0x32D0, "M", "ア"), + (0x32D1, "M", "イ"), + (0x32D2, "M", "ウ"), + (0x32D3, "M", "エ"), + (0x32D4, "M", "オ"), + (0x32D5, "M", "カ"), + (0x32D6, "M", "キ"), + (0x32D7, "M", "ク"), + (0x32D8, "M", "ケ"), + (0x32D9, "M", "コ"), + (0x32DA, "M", "サ"), + (0x32DB, "M", "シ"), + (0x32DC, "M", "ス"), + (0x32DD, "M", "セ"), + (0x32DE, "M", "ソ"), + (0x32DF, "M", "タ"), + (0x32E0, "M", "チ"), + (0x32E1, "M", "ツ"), + (0x32E2, "M", "テ"), + (0x32E3, "M", "ト"), + (0x32E4, "M", "ナ"), + (0x32E5, "M", "ニ"), + (0x32E6, "M", "ヌ"), + (0x32E7, "M", "ネ"), + (0x32E8, "M", "ノ"), + (0x32E9, "M", "ハ"), + (0x32EA, "M", "ヒ"), + (0x32EB, "M", "フ"), + (0x32EC, "M", "ヘ"), + (0x32ED, "M", "ホ"), + (0x32EE, "M", "マ"), + (0x32EF, "M", "ミ"), + (0x32F0, "M", "ム"), + (0x32F1, "M", "メ"), + (0x32F2, "M", "モ"), + (0x32F3, "M", "ヤ"), + (0x32F4, "M", "ユ"), + (0x32F5, "M", "ヨ"), + (0x32F6, "M", "ラ"), + (0x32F7, "M", "リ"), + (0x32F8, "M", "ル"), + ] + + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32F9, "M", "レ"), + (0x32FA, "M", "ロ"), + (0x32FB, "M", "ワ"), + (0x32FC, "M", "ヰ"), + (0x32FD, "M", "ヱ"), + (0x32FE, "M", "ヲ"), + (0x32FF, "M", "令和"), + (0x3300, "M", "アパート"), + (0x3301, "M", "アルファ"), + (0x3302, "M", "アンペア"), + (0x3303, "M", "アール"), + (0x3304, "M", "イニング"), + (0x3305, "M", "インチ"), + (0x3306, "M", "ウォン"), + (0x3307, "M", "エスクード"), + (0x3308, "M", "エーカー"), + (0x3309, "M", "オンス"), + (0x330A, "M", "オーム"), + (0x330B, "M", "カイリ"), + (0x330C, "M", "カラット"), + (0x330D, "M", "カロリー"), + (0x330E, "M", "ガロン"), + (0x330F, "M", "ガンマ"), + (0x3310, "M", "ギガ"), + (0x3311, "M", "ギニー"), + (0x3312, "M", "キュリー"), + (0x3313, "M", "ギルダー"), + (0x3314, "M", "キロ"), + (0x3315, "M", "キログラム"), + (0x3316, "M", "キロメートル"), + (0x3317, "M", "キロワット"), + (0x3318, "M", "グラム"), + (0x3319, "M", "グラムトン"), + (0x331A, "M", "クルゼイロ"), + (0x331B, "M", "クローネ"), + (0x331C, "M", "ケース"), + (0x331D, "M", "コルナ"), + (0x331E, "M", "コーポ"), + (0x331F, "M", "サイクル"), + (0x3320, "M", "サンチーム"), + (0x3321, "M", "シリング"), + (0x3322, "M", "センチ"), + (0x3323, "M", "セント"), + (0x3324, "M", "ダース"), + (0x3325, "M", "デシ"), + (0x3326, "M", "ドル"), + (0x3327, "M", "トン"), + (0x3328, "M", "ナノ"), + (0x3329, "M", "ノット"), + (0x332A, "M", "ハイツ"), + (0x332B, "M", "パーセント"), + (0x332C, "M", "パーツ"), + (0x332D, "M", "バーレル"), + (0x332E, "M", "ピアストル"), + (0x332F, "M", "ピクル"), + (0x3330, "M", "ピコ"), + (0x3331, "M", "ビル"), + (0x3332, "M", "ファラッド"), + (0x3333, "M", "フィート"), + (0x3334, "M", "ブッシェル"), + (0x3335, "M", "フラン"), + (0x3336, "M", "ヘクタール"), + (0x3337, "M", "ペソ"), + (0x3338, "M", "ペニヒ"), + (0x3339, "M", "ヘルツ"), + (0x333A, "M", "ペンス"), + (0x333B, "M", "ページ"), + (0x333C, "M", "ベータ"), + (0x333D, "M", "ポイント"), + (0x333E, "M", "ボルト"), + (0x333F, "M", "ホン"), + (0x3340, "M", "ポンド"), + (0x3341, "M", "ホール"), + (0x3342, "M", "ホーン"), + (0x3343, "M", "マイクロ"), + (0x3344, "M", "マイル"), + (0x3345, "M", "マッハ"), + (0x3346, "M", "マルク"), + (0x3347, "M", "マンション"), + (0x3348, "M", "ミクロン"), + (0x3349, "M", "ミリ"), + (0x334A, "M", "ミリバール"), + (0x334B, "M", "メガ"), + (0x334C, "M", "メガトン"), + (0x334D, "M", "メートル"), + (0x334E, "M", "ヤード"), + (0x334F, "M", "ヤール"), + (0x3350, "M", "ユアン"), + (0x3351, "M", "リットル"), + (0x3352, "M", "リラ"), + (0x3353, "M", "ルピー"), + (0x3354, "M", "ルーブル"), + (0x3355, "M", "レム"), + (0x3356, "M", "レントゲン"), + (0x3357, "M", "ワット"), + (0x3358, "M", "0点"), + (0x3359, "M", "1点"), + (0x335A, "M", "2点"), + (0x335B, "M", "3点"), + (0x335C, "M", "4点"), + ] + + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x335D, "M", "5点"), + (0x335E, "M", "6点"), + (0x335F, "M", "7点"), + (0x3360, "M", "8点"), + (0x3361, "M", "9点"), + (0x3362, "M", "10点"), + (0x3363, "M", "11点"), + (0x3364, "M", "12点"), + (0x3365, "M", "13点"), + (0x3366, "M", "14点"), + (0x3367, "M", "15点"), + (0x3368, "M", "16点"), + (0x3369, "M", "17点"), + (0x336A, "M", "18点"), + (0x336B, "M", "19点"), + (0x336C, "M", "20点"), + (0x336D, "M", "21点"), + (0x336E, "M", "22点"), + (0x336F, "M", "23点"), + (0x3370, "M", "24点"), + (0x3371, "M", "hpa"), + (0x3372, "M", "da"), + (0x3373, "M", "au"), + (0x3374, "M", "bar"), + (0x3375, "M", "ov"), + (0x3376, "M", "pc"), + (0x3377, "M", "dm"), + (0x3378, "M", "dm2"), + (0x3379, "M", "dm3"), + (0x337A, "M", "iu"), + (0x337B, "M", "平成"), + (0x337C, "M", "昭和"), + (0x337D, "M", "大正"), + (0x337E, "M", "明治"), + (0x337F, "M", "株式会社"), + (0x3380, "M", "pa"), + (0x3381, "M", "na"), + (0x3382, "M", "μa"), + (0x3383, "M", "ma"), + (0x3384, "M", "ka"), + (0x3385, "M", "kb"), + (0x3386, "M", "mb"), + (0x3387, "M", "gb"), + (0x3388, "M", "cal"), + (0x3389, "M", "kcal"), + (0x338A, "M", "pf"), + (0x338B, "M", "nf"), + (0x338C, "M", "μf"), + (0x338D, "M", "μg"), + (0x338E, "M", "mg"), + (0x338F, "M", "kg"), + (0x3390, "M", "hz"), + (0x3391, "M", "khz"), + (0x3392, "M", "mhz"), + (0x3393, "M", "ghz"), + (0x3394, "M", "thz"), + (0x3395, "M", "μl"), + (0x3396, "M", "ml"), + (0x3397, "M", "dl"), + (0x3398, "M", "kl"), + (0x3399, "M", "fm"), + (0x339A, "M", "nm"), + (0x339B, "M", "μm"), + (0x339C, "M", "mm"), + (0x339D, "M", "cm"), + (0x339E, "M", "km"), + (0x339F, "M", "mm2"), + (0x33A0, "M", "cm2"), + (0x33A1, "M", "m2"), + (0x33A2, "M", "km2"), + (0x33A3, "M", "mm3"), + (0x33A4, "M", "cm3"), + (0x33A5, "M", "m3"), + (0x33A6, "M", "km3"), + (0x33A7, "M", "m∕s"), + (0x33A8, "M", "m∕s2"), + (0x33A9, "M", "pa"), + (0x33AA, "M", "kpa"), + (0x33AB, "M", "mpa"), + (0x33AC, "M", "gpa"), + (0x33AD, "M", "rad"), + (0x33AE, "M", "rad∕s"), + (0x33AF, "M", "rad∕s2"), + (0x33B0, "M", "ps"), + (0x33B1, "M", "ns"), + (0x33B2, "M", "μs"), + (0x33B3, "M", "ms"), + (0x33B4, "M", "pv"), + (0x33B5, "M", "nv"), + (0x33B6, "M", "μv"), + (0x33B7, "M", "mv"), + (0x33B8, "M", "kv"), + (0x33B9, "M", "mv"), + (0x33BA, "M", "pw"), + (0x33BB, "M", "nw"), + (0x33BC, "M", "μw"), + (0x33BD, "M", "mw"), + (0x33BE, "M", "kw"), + (0x33BF, "M", "mw"), + (0x33C0, "M", "kω"), + ] + + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33C1, "M", "mω"), + (0x33C2, "X"), + (0x33C3, "M", "bq"), + (0x33C4, "M", "cc"), + (0x33C5, "M", "cd"), + (0x33C6, "M", "c∕kg"), + (0x33C7, "X"), + (0x33C8, "M", "db"), + (0x33C9, "M", "gy"), + (0x33CA, "M", "ha"), + (0x33CB, "M", "hp"), + (0x33CC, "M", "in"), + (0x33CD, "M", "kk"), + (0x33CE, "M", "km"), + (0x33CF, "M", "kt"), + (0x33D0, "M", "lm"), + (0x33D1, "M", "ln"), + (0x33D2, "M", "log"), + (0x33D3, "M", "lx"), + (0x33D4, "M", "mb"), + (0x33D5, "M", "mil"), + (0x33D6, "M", "mol"), + (0x33D7, "M", "ph"), + (0x33D8, "X"), + (0x33D9, "M", "ppm"), + (0x33DA, "M", "pr"), + (0x33DB, "M", "sr"), + (0x33DC, "M", "sv"), + (0x33DD, "M", "wb"), + (0x33DE, "M", "v∕m"), + (0x33DF, "M", "a∕m"), + (0x33E0, "M", "1日"), + (0x33E1, "M", "2日"), + (0x33E2, "M", "3日"), + (0x33E3, "M", "4日"), + (0x33E4, "M", "5日"), + (0x33E5, "M", "6日"), + (0x33E6, "M", "7日"), + (0x33E7, "M", "8日"), + (0x33E8, "M", "9日"), + (0x33E9, "M", "10日"), + (0x33EA, "M", "11日"), + (0x33EB, "M", "12日"), + (0x33EC, "M", "13日"), + (0x33ED, "M", "14日"), + (0x33EE, "M", "15日"), + (0x33EF, "M", "16日"), + (0x33F0, "M", "17日"), + (0x33F1, "M", "18日"), + (0x33F2, "M", "19日"), + (0x33F3, "M", "20日"), + (0x33F4, "M", "21日"), + (0x33F5, "M", "22日"), + (0x33F6, "M", "23日"), + (0x33F7, "M", "24日"), + (0x33F8, "M", "25日"), + (0x33F9, "M", "26日"), + (0x33FA, "M", "27日"), + (0x33FB, "M", "28日"), + (0x33FC, "M", "29日"), + (0x33FD, "M", "30日"), + (0x33FE, "M", "31日"), + (0x33FF, "M", "gal"), + (0x3400, "V"), + (0xA48D, "X"), + (0xA490, "V"), + (0xA4C7, "X"), + (0xA4D0, "V"), + (0xA62C, "X"), + (0xA640, "M", "ꙁ"), + (0xA641, "V"), + (0xA642, "M", "ꙃ"), + (0xA643, "V"), + (0xA644, "M", "ꙅ"), + (0xA645, "V"), + (0xA646, "M", "ꙇ"), + (0xA647, "V"), + (0xA648, "M", "ꙉ"), + (0xA649, "V"), + (0xA64A, "M", "ꙋ"), + (0xA64B, "V"), + (0xA64C, "M", "ꙍ"), + (0xA64D, "V"), + (0xA64E, "M", "ꙏ"), + (0xA64F, "V"), + (0xA650, "M", "ꙑ"), + (0xA651, "V"), + (0xA652, "M", "ꙓ"), + (0xA653, "V"), + (0xA654, "M", "ꙕ"), + (0xA655, "V"), + (0xA656, "M", "ꙗ"), + (0xA657, "V"), + (0xA658, "M", "ꙙ"), + (0xA659, "V"), + (0xA65A, "M", "ꙛ"), + (0xA65B, "V"), + (0xA65C, "M", "ꙝ"), + (0xA65D, "V"), + (0xA65E, "M", "ꙟ"), + ] + + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA65F, "V"), + (0xA660, "M", "ꙡ"), + (0xA661, "V"), + (0xA662, "M", "ꙣ"), + (0xA663, "V"), + (0xA664, "M", "ꙥ"), + (0xA665, "V"), + (0xA666, "M", "ꙧ"), + (0xA667, "V"), + (0xA668, "M", "ꙩ"), + (0xA669, "V"), + (0xA66A, "M", "ꙫ"), + (0xA66B, "V"), + (0xA66C, "M", "ꙭ"), + (0xA66D, "V"), + (0xA680, "M", "ꚁ"), + (0xA681, "V"), + (0xA682, "M", "ꚃ"), + (0xA683, "V"), + (0xA684, "M", "ꚅ"), + (0xA685, "V"), + (0xA686, "M", "ꚇ"), + (0xA687, "V"), + (0xA688, "M", "ꚉ"), + (0xA689, "V"), + (0xA68A, "M", "ꚋ"), + (0xA68B, "V"), + (0xA68C, "M", "ꚍ"), + (0xA68D, "V"), + (0xA68E, "M", "ꚏ"), + (0xA68F, "V"), + (0xA690, "M", "ꚑ"), + (0xA691, "V"), + (0xA692, "M", "ꚓ"), + (0xA693, "V"), + (0xA694, "M", "ꚕ"), + (0xA695, "V"), + (0xA696, "M", "ꚗ"), + (0xA697, "V"), + (0xA698, "M", "ꚙ"), + (0xA699, "V"), + (0xA69A, "M", "ꚛ"), + (0xA69B, "V"), + (0xA69C, "M", "ъ"), + (0xA69D, "M", "ь"), + (0xA69E, "V"), + (0xA6F8, "X"), + (0xA700, "V"), + (0xA722, "M", "ꜣ"), + (0xA723, "V"), + (0xA724, "M", "ꜥ"), + (0xA725, "V"), + (0xA726, "M", "ꜧ"), + (0xA727, "V"), + (0xA728, "M", "ꜩ"), + (0xA729, "V"), + (0xA72A, "M", "ꜫ"), + (0xA72B, "V"), + (0xA72C, "M", "ꜭ"), + (0xA72D, "V"), + (0xA72E, "M", "ꜯ"), + (0xA72F, "V"), + (0xA732, "M", "ꜳ"), + (0xA733, "V"), + (0xA734, "M", "ꜵ"), + (0xA735, "V"), + (0xA736, "M", "ꜷ"), + (0xA737, "V"), + (0xA738, "M", "ꜹ"), + (0xA739, "V"), + (0xA73A, "M", "ꜻ"), + (0xA73B, "V"), + (0xA73C, "M", "ꜽ"), + (0xA73D, "V"), + (0xA73E, "M", "ꜿ"), + (0xA73F, "V"), + (0xA740, "M", "ꝁ"), + (0xA741, "V"), + (0xA742, "M", "ꝃ"), + (0xA743, "V"), + (0xA744, "M", "ꝅ"), + (0xA745, "V"), + (0xA746, "M", "ꝇ"), + (0xA747, "V"), + (0xA748, "M", "ꝉ"), + (0xA749, "V"), + (0xA74A, "M", "ꝋ"), + (0xA74B, "V"), + (0xA74C, "M", "ꝍ"), + (0xA74D, "V"), + (0xA74E, "M", "ꝏ"), + (0xA74F, "V"), + (0xA750, "M", "ꝑ"), + (0xA751, "V"), + (0xA752, "M", "ꝓ"), + (0xA753, "V"), + (0xA754, "M", "ꝕ"), + (0xA755, "V"), + (0xA756, "M", "ꝗ"), + (0xA757, "V"), + ] + + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA758, "M", "ꝙ"), + (0xA759, "V"), + (0xA75A, "M", "ꝛ"), + (0xA75B, "V"), + (0xA75C, "M", "ꝝ"), + (0xA75D, "V"), + (0xA75E, "M", "ꝟ"), + (0xA75F, "V"), + (0xA760, "M", "ꝡ"), + (0xA761, "V"), + (0xA762, "M", "ꝣ"), + (0xA763, "V"), + (0xA764, "M", "ꝥ"), + (0xA765, "V"), + (0xA766, "M", "ꝧ"), + (0xA767, "V"), + (0xA768, "M", "ꝩ"), + (0xA769, "V"), + (0xA76A, "M", "ꝫ"), + (0xA76B, "V"), + (0xA76C, "M", "ꝭ"), + (0xA76D, "V"), + (0xA76E, "M", "ꝯ"), + (0xA76F, "V"), + (0xA770, "M", "ꝯ"), + (0xA771, "V"), + (0xA779, "M", "ꝺ"), + (0xA77A, "V"), + (0xA77B, "M", "ꝼ"), + (0xA77C, "V"), + (0xA77D, "M", "ᵹ"), + (0xA77E, "M", "ꝿ"), + (0xA77F, "V"), + (0xA780, "M", "ꞁ"), + (0xA781, "V"), + (0xA782, "M", "ꞃ"), + (0xA783, "V"), + (0xA784, "M", "ꞅ"), + (0xA785, "V"), + (0xA786, "M", "ꞇ"), + (0xA787, "V"), + (0xA78B, "M", "ꞌ"), + (0xA78C, "V"), + (0xA78D, "M", "ɥ"), + (0xA78E, "V"), + (0xA790, "M", "ꞑ"), + (0xA791, "V"), + (0xA792, "M", "ꞓ"), + (0xA793, "V"), + (0xA796, "M", "ꞗ"), + (0xA797, "V"), + (0xA798, "M", "ꞙ"), + (0xA799, "V"), + (0xA79A, "M", "ꞛ"), + (0xA79B, "V"), + (0xA79C, "M", "ꞝ"), + (0xA79D, "V"), + (0xA79E, "M", "ꞟ"), + (0xA79F, "V"), + (0xA7A0, "M", "ꞡ"), + (0xA7A1, "V"), + (0xA7A2, "M", "ꞣ"), + (0xA7A3, "V"), + (0xA7A4, "M", "ꞥ"), + (0xA7A5, "V"), + (0xA7A6, "M", "ꞧ"), + (0xA7A7, "V"), + (0xA7A8, "M", "ꞩ"), + (0xA7A9, "V"), + (0xA7AA, "M", "ɦ"), + (0xA7AB, "M", "ɜ"), + (0xA7AC, "M", "ɡ"), + (0xA7AD, "M", "ɬ"), + (0xA7AE, "M", "ɪ"), + (0xA7AF, "V"), + (0xA7B0, "M", "ʞ"), + (0xA7B1, "M", "ʇ"), + (0xA7B2, "M", "ʝ"), + (0xA7B3, "M", "ꭓ"), + (0xA7B4, "M", "ꞵ"), + (0xA7B5, "V"), + (0xA7B6, "M", "ꞷ"), + (0xA7B7, "V"), + (0xA7B8, "M", "ꞹ"), + (0xA7B9, "V"), + (0xA7BA, "M", "ꞻ"), + (0xA7BB, "V"), + (0xA7BC, "M", "ꞽ"), + (0xA7BD, "V"), + (0xA7BE, "M", "ꞿ"), + (0xA7BF, "V"), + (0xA7C0, "M", "ꟁ"), + (0xA7C1, "V"), + (0xA7C2, "M", "ꟃ"), + (0xA7C3, "V"), + (0xA7C4, "M", "ꞔ"), + (0xA7C5, "M", "ʂ"), + (0xA7C6, "M", "ᶎ"), + (0xA7C7, "M", "ꟈ"), + (0xA7C8, "V"), + ] + + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA7C9, "M", "ꟊ"), + (0xA7CA, "V"), + (0xA7CB, "M", "ɤ"), + (0xA7CC, "M", "ꟍ"), + (0xA7CD, "V"), + (0xA7CE, "X"), + (0xA7D0, "M", "ꟑ"), + (0xA7D1, "V"), + (0xA7D2, "X"), + (0xA7D3, "V"), + (0xA7D4, "X"), + (0xA7D5, "V"), + (0xA7D6, "M", "ꟗ"), + (0xA7D7, "V"), + (0xA7D8, "M", "ꟙ"), + (0xA7D9, "V"), + (0xA7DA, "M", "ꟛ"), + (0xA7DB, "V"), + (0xA7DC, "M", "ƛ"), + (0xA7DD, "X"), + (0xA7F2, "M", "c"), + (0xA7F3, "M", "f"), + (0xA7F4, "M", "q"), + (0xA7F5, "M", "ꟶ"), + (0xA7F6, "V"), + (0xA7F8, "M", "ħ"), + (0xA7F9, "M", "œ"), + (0xA7FA, "V"), + (0xA82D, "X"), + (0xA830, "V"), + (0xA83A, "X"), + (0xA840, "V"), + (0xA878, "X"), + (0xA880, "V"), + (0xA8C6, "X"), + (0xA8CE, "V"), + (0xA8DA, "X"), + (0xA8E0, "V"), + (0xA954, "X"), + (0xA95F, "V"), + (0xA97D, "X"), + (0xA980, "V"), + (0xA9CE, "X"), + (0xA9CF, "V"), + (0xA9DA, "X"), + (0xA9DE, "V"), + (0xA9FF, "X"), + (0xAA00, "V"), + (0xAA37, "X"), + (0xAA40, "V"), + (0xAA4E, "X"), + (0xAA50, "V"), + (0xAA5A, "X"), + (0xAA5C, "V"), + (0xAAC3, "X"), + (0xAADB, "V"), + (0xAAF7, "X"), + (0xAB01, "V"), + (0xAB07, "X"), + (0xAB09, "V"), + (0xAB0F, "X"), + (0xAB11, "V"), + (0xAB17, "X"), + (0xAB20, "V"), + (0xAB27, "X"), + (0xAB28, "V"), + (0xAB2F, "X"), + (0xAB30, "V"), + (0xAB5C, "M", "ꜧ"), + (0xAB5D, "M", "ꬷ"), + (0xAB5E, "M", "ɫ"), + (0xAB5F, "M", "ꭒ"), + (0xAB60, "V"), + (0xAB69, "M", "ʍ"), + (0xAB6A, "V"), + (0xAB6C, "X"), + (0xAB70, "M", "Ꭰ"), + (0xAB71, "M", "Ꭱ"), + (0xAB72, "M", "Ꭲ"), + (0xAB73, "M", "Ꭳ"), + (0xAB74, "M", "Ꭴ"), + (0xAB75, "M", "Ꭵ"), + (0xAB76, "M", "Ꭶ"), + (0xAB77, "M", "Ꭷ"), + (0xAB78, "M", "Ꭸ"), + (0xAB79, "M", "Ꭹ"), + (0xAB7A, "M", "Ꭺ"), + (0xAB7B, "M", "Ꭻ"), + (0xAB7C, "M", "Ꭼ"), + (0xAB7D, "M", "Ꭽ"), + (0xAB7E, "M", "Ꭾ"), + (0xAB7F, "M", "Ꭿ"), + (0xAB80, "M", "Ꮀ"), + (0xAB81, "M", "Ꮁ"), + (0xAB82, "M", "Ꮂ"), + (0xAB83, "M", "Ꮃ"), + (0xAB84, "M", "Ꮄ"), + (0xAB85, "M", "Ꮅ"), + (0xAB86, "M", "Ꮆ"), + (0xAB87, "M", "Ꮇ"), + ] + + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xAB88, "M", "Ꮈ"), + (0xAB89, "M", "Ꮉ"), + (0xAB8A, "M", "Ꮊ"), + (0xAB8B, "M", "Ꮋ"), + (0xAB8C, "M", "Ꮌ"), + (0xAB8D, "M", "Ꮍ"), + (0xAB8E, "M", "Ꮎ"), + (0xAB8F, "M", "Ꮏ"), + (0xAB90, "M", "Ꮐ"), + (0xAB91, "M", "Ꮑ"), + (0xAB92, "M", "Ꮒ"), + (0xAB93, "M", "Ꮓ"), + (0xAB94, "M", "Ꮔ"), + (0xAB95, "M", "Ꮕ"), + (0xAB96, "M", "Ꮖ"), + (0xAB97, "M", "Ꮗ"), + (0xAB98, "M", "Ꮘ"), + (0xAB99, "M", "Ꮙ"), + (0xAB9A, "M", "Ꮚ"), + (0xAB9B, "M", "Ꮛ"), + (0xAB9C, "M", "Ꮜ"), + (0xAB9D, "M", "Ꮝ"), + (0xAB9E, "M", "Ꮞ"), + (0xAB9F, "M", "Ꮟ"), + (0xABA0, "M", "Ꮠ"), + (0xABA1, "M", "Ꮡ"), + (0xABA2, "M", "Ꮢ"), + (0xABA3, "M", "Ꮣ"), + (0xABA4, "M", "Ꮤ"), + (0xABA5, "M", "Ꮥ"), + (0xABA6, "M", "Ꮦ"), + (0xABA7, "M", "Ꮧ"), + (0xABA8, "M", "Ꮨ"), + (0xABA9, "M", "Ꮩ"), + (0xABAA, "M", "Ꮪ"), + (0xABAB, "M", "Ꮫ"), + (0xABAC, "M", "Ꮬ"), + (0xABAD, "M", "Ꮭ"), + (0xABAE, "M", "Ꮮ"), + (0xABAF, "M", "Ꮯ"), + (0xABB0, "M", "Ꮰ"), + (0xABB1, "M", "Ꮱ"), + (0xABB2, "M", "Ꮲ"), + (0xABB3, "M", "Ꮳ"), + (0xABB4, "M", "Ꮴ"), + (0xABB5, "M", "Ꮵ"), + (0xABB6, "M", "Ꮶ"), + (0xABB7, "M", "Ꮷ"), + (0xABB8, "M", "Ꮸ"), + (0xABB9, "M", "Ꮹ"), + (0xABBA, "M", "Ꮺ"), + (0xABBB, "M", "Ꮻ"), + (0xABBC, "M", "Ꮼ"), + (0xABBD, "M", "Ꮽ"), + (0xABBE, "M", "Ꮾ"), + (0xABBF, "M", "Ꮿ"), + (0xABC0, "V"), + (0xABEE, "X"), + (0xABF0, "V"), + (0xABFA, "X"), + (0xAC00, "V"), + (0xD7A4, "X"), + (0xD7B0, "V"), + (0xD7C7, "X"), + (0xD7CB, "V"), + (0xD7FC, "X"), + (0xF900, "M", "豈"), + (0xF901, "M", "更"), + (0xF902, "M", "車"), + (0xF903, "M", "賈"), + (0xF904, "M", "滑"), + (0xF905, "M", "串"), + (0xF906, "M", "句"), + (0xF907, "M", "龜"), + (0xF909, "M", "契"), + (0xF90A, "M", "金"), + (0xF90B, "M", "喇"), + (0xF90C, "M", "奈"), + (0xF90D, "M", "懶"), + (0xF90E, "M", "癩"), + (0xF90F, "M", "羅"), + (0xF910, "M", "蘿"), + (0xF911, "M", "螺"), + (0xF912, "M", "裸"), + (0xF913, "M", "邏"), + (0xF914, "M", "樂"), + (0xF915, "M", "洛"), + (0xF916, "M", "烙"), + (0xF917, "M", "珞"), + (0xF918, "M", "落"), + (0xF919, "M", "酪"), + (0xF91A, "M", "駱"), + (0xF91B, "M", "亂"), + (0xF91C, "M", "卵"), + (0xF91D, "M", "欄"), + (0xF91E, "M", "爛"), + (0xF91F, "M", "蘭"), + (0xF920, "M", "鸞"), + (0xF921, "M", "嵐"), + (0xF922, "M", "濫"), + ] + + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF923, "M", "藍"), + (0xF924, "M", "襤"), + (0xF925, "M", "拉"), + (0xF926, "M", "臘"), + (0xF927, "M", "蠟"), + (0xF928, "M", "廊"), + (0xF929, "M", "朗"), + (0xF92A, "M", "浪"), + (0xF92B, "M", "狼"), + (0xF92C, "M", "郎"), + (0xF92D, "M", "來"), + (0xF92E, "M", "冷"), + (0xF92F, "M", "勞"), + (0xF930, "M", "擄"), + (0xF931, "M", "櫓"), + (0xF932, "M", "爐"), + (0xF933, "M", "盧"), + (0xF934, "M", "老"), + (0xF935, "M", "蘆"), + (0xF936, "M", "虜"), + (0xF937, "M", "路"), + (0xF938, "M", "露"), + (0xF939, "M", "魯"), + (0xF93A, "M", "鷺"), + (0xF93B, "M", "碌"), + (0xF93C, "M", "祿"), + (0xF93D, "M", "綠"), + (0xF93E, "M", "菉"), + (0xF93F, "M", "錄"), + (0xF940, "M", "鹿"), + (0xF941, "M", "論"), + (0xF942, "M", "壟"), + (0xF943, "M", "弄"), + (0xF944, "M", "籠"), + (0xF945, "M", "聾"), + (0xF946, "M", "牢"), + (0xF947, "M", "磊"), + (0xF948, "M", "賂"), + (0xF949, "M", "雷"), + (0xF94A, "M", "壘"), + (0xF94B, "M", "屢"), + (0xF94C, "M", "樓"), + (0xF94D, "M", "淚"), + (0xF94E, "M", "漏"), + (0xF94F, "M", "累"), + (0xF950, "M", "縷"), + (0xF951, "M", "陋"), + (0xF952, "M", "勒"), + (0xF953, "M", "肋"), + (0xF954, "M", "凜"), + (0xF955, "M", "凌"), + (0xF956, "M", "稜"), + (0xF957, "M", "綾"), + (0xF958, "M", "菱"), + (0xF959, "M", "陵"), + (0xF95A, "M", "讀"), + (0xF95B, "M", "拏"), + (0xF95C, "M", "樂"), + (0xF95D, "M", "諾"), + (0xF95E, "M", "丹"), + (0xF95F, "M", "寧"), + (0xF960, "M", "怒"), + (0xF961, "M", "率"), + (0xF962, "M", "異"), + (0xF963, "M", "北"), + (0xF964, "M", "磻"), + (0xF965, "M", "便"), + (0xF966, "M", "復"), + (0xF967, "M", "不"), + (0xF968, "M", "泌"), + (0xF969, "M", "數"), + (0xF96A, "M", "索"), + (0xF96B, "M", "參"), + (0xF96C, "M", "塞"), + (0xF96D, "M", "省"), + (0xF96E, "M", "葉"), + (0xF96F, "M", "說"), + (0xF970, "M", "殺"), + (0xF971, "M", "辰"), + (0xF972, "M", "沈"), + (0xF973, "M", "拾"), + (0xF974, "M", "若"), + (0xF975, "M", "掠"), + (0xF976, "M", "略"), + (0xF977, "M", "亮"), + (0xF978, "M", "兩"), + (0xF979, "M", "凉"), + (0xF97A, "M", "梁"), + (0xF97B, "M", "糧"), + (0xF97C, "M", "良"), + (0xF97D, "M", "諒"), + (0xF97E, "M", "量"), + (0xF97F, "M", "勵"), + (0xF980, "M", "呂"), + (0xF981, "M", "女"), + (0xF982, "M", "廬"), + (0xF983, "M", "旅"), + (0xF984, "M", "濾"), + (0xF985, "M", "礪"), + (0xF986, "M", "閭"), + ] + + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF987, "M", "驪"), + (0xF988, "M", "麗"), + (0xF989, "M", "黎"), + (0xF98A, "M", "力"), + (0xF98B, "M", "曆"), + (0xF98C, "M", "歷"), + (0xF98D, "M", "轢"), + (0xF98E, "M", "年"), + (0xF98F, "M", "憐"), + (0xF990, "M", "戀"), + (0xF991, "M", "撚"), + (0xF992, "M", "漣"), + (0xF993, "M", "煉"), + (0xF994, "M", "璉"), + (0xF995, "M", "秊"), + (0xF996, "M", "練"), + (0xF997, "M", "聯"), + (0xF998, "M", "輦"), + (0xF999, "M", "蓮"), + (0xF99A, "M", "連"), + (0xF99B, "M", "鍊"), + (0xF99C, "M", "列"), + (0xF99D, "M", "劣"), + (0xF99E, "M", "咽"), + (0xF99F, "M", "烈"), + (0xF9A0, "M", "裂"), + (0xF9A1, "M", "說"), + (0xF9A2, "M", "廉"), + (0xF9A3, "M", "念"), + (0xF9A4, "M", "捻"), + (0xF9A5, "M", "殮"), + (0xF9A6, "M", "簾"), + (0xF9A7, "M", "獵"), + (0xF9A8, "M", "令"), + (0xF9A9, "M", "囹"), + (0xF9AA, "M", "寧"), + (0xF9AB, "M", "嶺"), + (0xF9AC, "M", "怜"), + (0xF9AD, "M", "玲"), + (0xF9AE, "M", "瑩"), + (0xF9AF, "M", "羚"), + (0xF9B0, "M", "聆"), + (0xF9B1, "M", "鈴"), + (0xF9B2, "M", "零"), + (0xF9B3, "M", "靈"), + (0xF9B4, "M", "領"), + (0xF9B5, "M", "例"), + (0xF9B6, "M", "禮"), + (0xF9B7, "M", "醴"), + (0xF9B8, "M", "隸"), + (0xF9B9, "M", "惡"), + (0xF9BA, "M", "了"), + (0xF9BB, "M", "僚"), + (0xF9BC, "M", "寮"), + (0xF9BD, "M", "尿"), + (0xF9BE, "M", "料"), + (0xF9BF, "M", "樂"), + (0xF9C0, "M", "燎"), + (0xF9C1, "M", "療"), + (0xF9C2, "M", "蓼"), + (0xF9C3, "M", "遼"), + (0xF9C4, "M", "龍"), + (0xF9C5, "M", "暈"), + (0xF9C6, "M", "阮"), + (0xF9C7, "M", "劉"), + (0xF9C8, "M", "杻"), + (0xF9C9, "M", "柳"), + (0xF9CA, "M", "流"), + (0xF9CB, "M", "溜"), + (0xF9CC, "M", "琉"), + (0xF9CD, "M", "留"), + (0xF9CE, "M", "硫"), + (0xF9CF, "M", "紐"), + (0xF9D0, "M", "類"), + (0xF9D1, "M", "六"), + (0xF9D2, "M", "戮"), + (0xF9D3, "M", "陸"), + (0xF9D4, "M", "倫"), + (0xF9D5, "M", "崙"), + (0xF9D6, "M", "淪"), + (0xF9D7, "M", "輪"), + (0xF9D8, "M", "律"), + (0xF9D9, "M", "慄"), + (0xF9DA, "M", "栗"), + (0xF9DB, "M", "率"), + (0xF9DC, "M", "隆"), + (0xF9DD, "M", "利"), + (0xF9DE, "M", "吏"), + (0xF9DF, "M", "履"), + (0xF9E0, "M", "易"), + (0xF9E1, "M", "李"), + (0xF9E2, "M", "梨"), + (0xF9E3, "M", "泥"), + (0xF9E4, "M", "理"), + (0xF9E5, "M", "痢"), + (0xF9E6, "M", "罹"), + (0xF9E7, "M", "裏"), + (0xF9E8, "M", "裡"), + (0xF9E9, "M", "里"), + (0xF9EA, "M", "離"), + ] + + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9EB, "M", "匿"), + (0xF9EC, "M", "溺"), + (0xF9ED, "M", "吝"), + (0xF9EE, "M", "燐"), + (0xF9EF, "M", "璘"), + (0xF9F0, "M", "藺"), + (0xF9F1, "M", "隣"), + (0xF9F2, "M", "鱗"), + (0xF9F3, "M", "麟"), + (0xF9F4, "M", "林"), + (0xF9F5, "M", "淋"), + (0xF9F6, "M", "臨"), + (0xF9F7, "M", "立"), + (0xF9F8, "M", "笠"), + (0xF9F9, "M", "粒"), + (0xF9FA, "M", "狀"), + (0xF9FB, "M", "炙"), + (0xF9FC, "M", "識"), + (0xF9FD, "M", "什"), + (0xF9FE, "M", "茶"), + (0xF9FF, "M", "刺"), + (0xFA00, "M", "切"), + (0xFA01, "M", "度"), + (0xFA02, "M", "拓"), + (0xFA03, "M", "糖"), + (0xFA04, "M", "宅"), + (0xFA05, "M", "洞"), + (0xFA06, "M", "暴"), + (0xFA07, "M", "輻"), + (0xFA08, "M", "行"), + (0xFA09, "M", "降"), + (0xFA0A, "M", "見"), + (0xFA0B, "M", "廓"), + (0xFA0C, "M", "兀"), + (0xFA0D, "M", "嗀"), + (0xFA0E, "V"), + (0xFA10, "M", "塚"), + (0xFA11, "V"), + (0xFA12, "M", "晴"), + (0xFA13, "V"), + (0xFA15, "M", "凞"), + (0xFA16, "M", "猪"), + (0xFA17, "M", "益"), + (0xFA18, "M", "礼"), + (0xFA19, "M", "神"), + (0xFA1A, "M", "祥"), + (0xFA1B, "M", "福"), + (0xFA1C, "M", "靖"), + (0xFA1D, "M", "精"), + (0xFA1E, "M", "羽"), + (0xFA1F, "V"), + (0xFA20, "M", "蘒"), + (0xFA21, "V"), + (0xFA22, "M", "諸"), + (0xFA23, "V"), + (0xFA25, "M", "逸"), + (0xFA26, "M", "都"), + (0xFA27, "V"), + (0xFA2A, "M", "飯"), + (0xFA2B, "M", "飼"), + (0xFA2C, "M", "館"), + (0xFA2D, "M", "鶴"), + (0xFA2E, "M", "郞"), + (0xFA2F, "M", "隷"), + (0xFA30, "M", "侮"), + (0xFA31, "M", "僧"), + (0xFA32, "M", "免"), + (0xFA33, "M", "勉"), + (0xFA34, "M", "勤"), + (0xFA35, "M", "卑"), + (0xFA36, "M", "喝"), + (0xFA37, "M", "嘆"), + (0xFA38, "M", "器"), + (0xFA39, "M", "塀"), + (0xFA3A, "M", "墨"), + (0xFA3B, "M", "層"), + (0xFA3C, "M", "屮"), + (0xFA3D, "M", "悔"), + (0xFA3E, "M", "慨"), + (0xFA3F, "M", "憎"), + (0xFA40, "M", "懲"), + (0xFA41, "M", "敏"), + (0xFA42, "M", "既"), + (0xFA43, "M", "暑"), + (0xFA44, "M", "梅"), + (0xFA45, "M", "海"), + (0xFA46, "M", "渚"), + (0xFA47, "M", "漢"), + (0xFA48, "M", "煮"), + (0xFA49, "M", "爫"), + (0xFA4A, "M", "琢"), + (0xFA4B, "M", "碑"), + (0xFA4C, "M", "社"), + (0xFA4D, "M", "祉"), + (0xFA4E, "M", "祈"), + (0xFA4F, "M", "祐"), + (0xFA50, "M", "祖"), + (0xFA51, "M", "祝"), + (0xFA52, "M", "禍"), + (0xFA53, "M", "禎"), + ] + + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA54, "M", "穀"), + (0xFA55, "M", "突"), + (0xFA56, "M", "節"), + (0xFA57, "M", "練"), + (0xFA58, "M", "縉"), + (0xFA59, "M", "繁"), + (0xFA5A, "M", "署"), + (0xFA5B, "M", "者"), + (0xFA5C, "M", "臭"), + (0xFA5D, "M", "艹"), + (0xFA5F, "M", "著"), + (0xFA60, "M", "褐"), + (0xFA61, "M", "視"), + (0xFA62, "M", "謁"), + (0xFA63, "M", "謹"), + (0xFA64, "M", "賓"), + (0xFA65, "M", "贈"), + (0xFA66, "M", "辶"), + (0xFA67, "M", "逸"), + (0xFA68, "M", "難"), + (0xFA69, "M", "響"), + (0xFA6A, "M", "頻"), + (0xFA6B, "M", "恵"), + (0xFA6C, "M", "𤋮"), + (0xFA6D, "M", "舘"), + (0xFA6E, "X"), + (0xFA70, "M", "並"), + (0xFA71, "M", "况"), + (0xFA72, "M", "全"), + (0xFA73, "M", "侀"), + (0xFA74, "M", "充"), + (0xFA75, "M", "冀"), + (0xFA76, "M", "勇"), + (0xFA77, "M", "勺"), + (0xFA78, "M", "喝"), + (0xFA79, "M", "啕"), + (0xFA7A, "M", "喙"), + (0xFA7B, "M", "嗢"), + (0xFA7C, "M", "塚"), + (0xFA7D, "M", "墳"), + (0xFA7E, "M", "奄"), + (0xFA7F, "M", "奔"), + (0xFA80, "M", "婢"), + (0xFA81, "M", "嬨"), + (0xFA82, "M", "廒"), + (0xFA83, "M", "廙"), + (0xFA84, "M", "彩"), + (0xFA85, "M", "徭"), + (0xFA86, "M", "惘"), + (0xFA87, "M", "慎"), + (0xFA88, "M", "愈"), + (0xFA89, "M", "憎"), + (0xFA8A, "M", "慠"), + (0xFA8B, "M", "懲"), + (0xFA8C, "M", "戴"), + (0xFA8D, "M", "揄"), + (0xFA8E, "M", "搜"), + (0xFA8F, "M", "摒"), + (0xFA90, "M", "敖"), + (0xFA91, "M", "晴"), + (0xFA92, "M", "朗"), + (0xFA93, "M", "望"), + (0xFA94, "M", "杖"), + (0xFA95, "M", "歹"), + (0xFA96, "M", "殺"), + (0xFA97, "M", "流"), + (0xFA98, "M", "滛"), + (0xFA99, "M", "滋"), + (0xFA9A, "M", "漢"), + (0xFA9B, "M", "瀞"), + (0xFA9C, "M", "煮"), + (0xFA9D, "M", "瞧"), + (0xFA9E, "M", "爵"), + (0xFA9F, "M", "犯"), + (0xFAA0, "M", "猪"), + (0xFAA1, "M", "瑱"), + (0xFAA2, "M", "甆"), + (0xFAA3, "M", "画"), + (0xFAA4, "M", "瘝"), + (0xFAA5, "M", "瘟"), + (0xFAA6, "M", "益"), + (0xFAA7, "M", "盛"), + (0xFAA8, "M", "直"), + (0xFAA9, "M", "睊"), + (0xFAAA, "M", "着"), + (0xFAAB, "M", "磌"), + (0xFAAC, "M", "窱"), + (0xFAAD, "M", "節"), + (0xFAAE, "M", "类"), + (0xFAAF, "M", "絛"), + (0xFAB0, "M", "練"), + (0xFAB1, "M", "缾"), + (0xFAB2, "M", "者"), + (0xFAB3, "M", "荒"), + (0xFAB4, "M", "華"), + (0xFAB5, "M", "蝹"), + (0xFAB6, "M", "襁"), + (0xFAB7, "M", "覆"), + (0xFAB8, "M", "視"), + (0xFAB9, "M", "調"), + ] + + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFABA, "M", "諸"), + (0xFABB, "M", "請"), + (0xFABC, "M", "謁"), + (0xFABD, "M", "諾"), + (0xFABE, "M", "諭"), + (0xFABF, "M", "謹"), + (0xFAC0, "M", "變"), + (0xFAC1, "M", "贈"), + (0xFAC2, "M", "輸"), + (0xFAC3, "M", "遲"), + (0xFAC4, "M", "醙"), + (0xFAC5, "M", "鉶"), + (0xFAC6, "M", "陼"), + (0xFAC7, "M", "難"), + (0xFAC8, "M", "靖"), + (0xFAC9, "M", "韛"), + (0xFACA, "M", "響"), + (0xFACB, "M", "頋"), + (0xFACC, "M", "頻"), + (0xFACD, "M", "鬒"), + (0xFACE, "M", "龜"), + (0xFACF, "M", "𢡊"), + (0xFAD0, "M", "𢡄"), + (0xFAD1, "M", "𣏕"), + (0xFAD2, "M", "㮝"), + (0xFAD3, "M", "䀘"), + (0xFAD4, "M", "䀹"), + (0xFAD5, "M", "𥉉"), + (0xFAD6, "M", "𥳐"), + (0xFAD7, "M", "𧻓"), + (0xFAD8, "M", "齃"), + (0xFAD9, "M", "龎"), + (0xFADA, "X"), + (0xFB00, "M", "ff"), + (0xFB01, "M", "fi"), + (0xFB02, "M", "fl"), + (0xFB03, "M", "ffi"), + (0xFB04, "M", "ffl"), + (0xFB05, "M", "st"), + (0xFB07, "X"), + (0xFB13, "M", "մն"), + (0xFB14, "M", "մե"), + (0xFB15, "M", "մի"), + (0xFB16, "M", "վն"), + (0xFB17, "M", "մխ"), + (0xFB18, "X"), + (0xFB1D, "M", "יִ"), + (0xFB1E, "V"), + (0xFB1F, "M", "ײַ"), + (0xFB20, "M", "ע"), + (0xFB21, "M", "א"), + (0xFB22, "M", "ד"), + (0xFB23, "M", "ה"), + (0xFB24, "M", "כ"), + (0xFB25, "M", "ל"), + (0xFB26, "M", "ם"), + (0xFB27, "M", "ר"), + (0xFB28, "M", "ת"), + (0xFB29, "M", "+"), + (0xFB2A, "M", "שׁ"), + (0xFB2B, "M", "שׂ"), + (0xFB2C, "M", "שּׁ"), + (0xFB2D, "M", "שּׂ"), + (0xFB2E, "M", "אַ"), + (0xFB2F, "M", "אָ"), + (0xFB30, "M", "אּ"), + (0xFB31, "M", "בּ"), + (0xFB32, "M", "גּ"), + (0xFB33, "M", "דּ"), + (0xFB34, "M", "הּ"), + (0xFB35, "M", "וּ"), + (0xFB36, "M", "זּ"), + (0xFB37, "X"), + (0xFB38, "M", "טּ"), + (0xFB39, "M", "יּ"), + (0xFB3A, "M", "ךּ"), + (0xFB3B, "M", "כּ"), + (0xFB3C, "M", "לּ"), + (0xFB3D, "X"), + (0xFB3E, "M", "מּ"), + (0xFB3F, "X"), + (0xFB40, "M", "נּ"), + (0xFB41, "M", "סּ"), + (0xFB42, "X"), + (0xFB43, "M", "ףּ"), + (0xFB44, "M", "פּ"), + (0xFB45, "X"), + (0xFB46, "M", "צּ"), + (0xFB47, "M", "קּ"), + (0xFB48, "M", "רּ"), + (0xFB49, "M", "שּ"), + (0xFB4A, "M", "תּ"), + (0xFB4B, "M", "וֹ"), + (0xFB4C, "M", "בֿ"), + (0xFB4D, "M", "כֿ"), + (0xFB4E, "M", "פֿ"), + (0xFB4F, "M", "אל"), + (0xFB50, "M", "ٱ"), + (0xFB52, "M", "ٻ"), + (0xFB56, "M", "پ"), + ] + + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB5A, "M", "ڀ"), + (0xFB5E, "M", "ٺ"), + (0xFB62, "M", "ٿ"), + (0xFB66, "M", "ٹ"), + (0xFB6A, "M", "ڤ"), + (0xFB6E, "M", "ڦ"), + (0xFB72, "M", "ڄ"), + (0xFB76, "M", "ڃ"), + (0xFB7A, "M", "چ"), + (0xFB7E, "M", "ڇ"), + (0xFB82, "M", "ڍ"), + (0xFB84, "M", "ڌ"), + (0xFB86, "M", "ڎ"), + (0xFB88, "M", "ڈ"), + (0xFB8A, "M", "ژ"), + (0xFB8C, "M", "ڑ"), + (0xFB8E, "M", "ک"), + (0xFB92, "M", "گ"), + (0xFB96, "M", "ڳ"), + (0xFB9A, "M", "ڱ"), + (0xFB9E, "M", "ں"), + (0xFBA0, "M", "ڻ"), + (0xFBA4, "M", "ۀ"), + (0xFBA6, "M", "ہ"), + (0xFBAA, "M", "ھ"), + (0xFBAE, "M", "ے"), + (0xFBB0, "M", "ۓ"), + (0xFBB2, "V"), + (0xFBC3, "X"), + (0xFBD3, "M", "ڭ"), + (0xFBD7, "M", "ۇ"), + (0xFBD9, "M", "ۆ"), + (0xFBDB, "M", "ۈ"), + (0xFBDD, "M", "ۇٴ"), + (0xFBDE, "M", "ۋ"), + (0xFBE0, "M", "ۅ"), + (0xFBE2, "M", "ۉ"), + (0xFBE4, "M", "ې"), + (0xFBE8, "M", "ى"), + (0xFBEA, "M", "ئا"), + (0xFBEC, "M", "ئە"), + (0xFBEE, "M", "ئو"), + (0xFBF0, "M", "ئۇ"), + (0xFBF2, "M", "ئۆ"), + (0xFBF4, "M", "ئۈ"), + (0xFBF6, "M", "ئې"), + (0xFBF9, "M", "ئى"), + (0xFBFC, "M", "ی"), + (0xFC00, "M", "ئج"), + (0xFC01, "M", "ئح"), + (0xFC02, "M", "ئم"), + (0xFC03, "M", "ئى"), + (0xFC04, "M", "ئي"), + (0xFC05, "M", "بج"), + (0xFC06, "M", "بح"), + (0xFC07, "M", "بخ"), + (0xFC08, "M", "بم"), + (0xFC09, "M", "بى"), + (0xFC0A, "M", "بي"), + (0xFC0B, "M", "تج"), + (0xFC0C, "M", "تح"), + (0xFC0D, "M", "تخ"), + (0xFC0E, "M", "تم"), + (0xFC0F, "M", "تى"), + (0xFC10, "M", "تي"), + (0xFC11, "M", "ثج"), + (0xFC12, "M", "ثم"), + (0xFC13, "M", "ثى"), + (0xFC14, "M", "ثي"), + (0xFC15, "M", "جح"), + (0xFC16, "M", "جم"), + (0xFC17, "M", "حج"), + (0xFC18, "M", "حم"), + (0xFC19, "M", "خج"), + (0xFC1A, "M", "خح"), + (0xFC1B, "M", "خم"), + (0xFC1C, "M", "سج"), + (0xFC1D, "M", "سح"), + (0xFC1E, "M", "سخ"), + (0xFC1F, "M", "سم"), + (0xFC20, "M", "صح"), + (0xFC21, "M", "صم"), + (0xFC22, "M", "ضج"), + (0xFC23, "M", "ضح"), + (0xFC24, "M", "ضخ"), + (0xFC25, "M", "ضم"), + (0xFC26, "M", "طح"), + (0xFC27, "M", "طم"), + (0xFC28, "M", "ظم"), + (0xFC29, "M", "عج"), + (0xFC2A, "M", "عم"), + (0xFC2B, "M", "غج"), + (0xFC2C, "M", "غم"), + (0xFC2D, "M", "فج"), + (0xFC2E, "M", "فح"), + (0xFC2F, "M", "فخ"), + (0xFC30, "M", "فم"), + (0xFC31, "M", "فى"), + (0xFC32, "M", "في"), + (0xFC33, "M", "قح"), + ] + + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC34, "M", "قم"), + (0xFC35, "M", "قى"), + (0xFC36, "M", "قي"), + (0xFC37, "M", "كا"), + (0xFC38, "M", "كج"), + (0xFC39, "M", "كح"), + (0xFC3A, "M", "كخ"), + (0xFC3B, "M", "كل"), + (0xFC3C, "M", "كم"), + (0xFC3D, "M", "كى"), + (0xFC3E, "M", "كي"), + (0xFC3F, "M", "لج"), + (0xFC40, "M", "لح"), + (0xFC41, "M", "لخ"), + (0xFC42, "M", "لم"), + (0xFC43, "M", "لى"), + (0xFC44, "M", "لي"), + (0xFC45, "M", "مج"), + (0xFC46, "M", "مح"), + (0xFC47, "M", "مخ"), + (0xFC48, "M", "مم"), + (0xFC49, "M", "مى"), + (0xFC4A, "M", "مي"), + (0xFC4B, "M", "نج"), + (0xFC4C, "M", "نح"), + (0xFC4D, "M", "نخ"), + (0xFC4E, "M", "نم"), + (0xFC4F, "M", "نى"), + (0xFC50, "M", "ني"), + (0xFC51, "M", "هج"), + (0xFC52, "M", "هم"), + (0xFC53, "M", "هى"), + (0xFC54, "M", "هي"), + (0xFC55, "M", "يج"), + (0xFC56, "M", "يح"), + (0xFC57, "M", "يخ"), + (0xFC58, "M", "يم"), + (0xFC59, "M", "يى"), + (0xFC5A, "M", "يي"), + (0xFC5B, "M", "ذٰ"), + (0xFC5C, "M", "رٰ"), + (0xFC5D, "M", "ىٰ"), + (0xFC5E, "M", " ٌّ"), + (0xFC5F, "M", " ٍّ"), + (0xFC60, "M", " َّ"), + (0xFC61, "M", " ُّ"), + (0xFC62, "M", " ِّ"), + (0xFC63, "M", " ّٰ"), + (0xFC64, "M", "ئر"), + (0xFC65, "M", "ئز"), + (0xFC66, "M", "ئم"), + (0xFC67, "M", "ئن"), + (0xFC68, "M", "ئى"), + (0xFC69, "M", "ئي"), + (0xFC6A, "M", "بر"), + (0xFC6B, "M", "بز"), + (0xFC6C, "M", "بم"), + (0xFC6D, "M", "بن"), + (0xFC6E, "M", "بى"), + (0xFC6F, "M", "بي"), + (0xFC70, "M", "تر"), + (0xFC71, "M", "تز"), + (0xFC72, "M", "تم"), + (0xFC73, "M", "تن"), + (0xFC74, "M", "تى"), + (0xFC75, "M", "تي"), + (0xFC76, "M", "ثر"), + (0xFC77, "M", "ثز"), + (0xFC78, "M", "ثم"), + (0xFC79, "M", "ثن"), + (0xFC7A, "M", "ثى"), + (0xFC7B, "M", "ثي"), + (0xFC7C, "M", "فى"), + (0xFC7D, "M", "في"), + (0xFC7E, "M", "قى"), + (0xFC7F, "M", "قي"), + (0xFC80, "M", "كا"), + (0xFC81, "M", "كل"), + (0xFC82, "M", "كم"), + (0xFC83, "M", "كى"), + (0xFC84, "M", "كي"), + (0xFC85, "M", "لم"), + (0xFC86, "M", "لى"), + (0xFC87, "M", "لي"), + (0xFC88, "M", "ما"), + (0xFC89, "M", "مم"), + (0xFC8A, "M", "نر"), + (0xFC8B, "M", "نز"), + (0xFC8C, "M", "نم"), + (0xFC8D, "M", "نن"), + (0xFC8E, "M", "نى"), + (0xFC8F, "M", "ني"), + (0xFC90, "M", "ىٰ"), + (0xFC91, "M", "ير"), + (0xFC92, "M", "يز"), + (0xFC93, "M", "يم"), + (0xFC94, "M", "ين"), + (0xFC95, "M", "يى"), + (0xFC96, "M", "يي"), + (0xFC97, "M", "ئج"), + ] + + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC98, "M", "ئح"), + (0xFC99, "M", "ئخ"), + (0xFC9A, "M", "ئم"), + (0xFC9B, "M", "ئه"), + (0xFC9C, "M", "بج"), + (0xFC9D, "M", "بح"), + (0xFC9E, "M", "بخ"), + (0xFC9F, "M", "بم"), + (0xFCA0, "M", "به"), + (0xFCA1, "M", "تج"), + (0xFCA2, "M", "تح"), + (0xFCA3, "M", "تخ"), + (0xFCA4, "M", "تم"), + (0xFCA5, "M", "ته"), + (0xFCA6, "M", "ثم"), + (0xFCA7, "M", "جح"), + (0xFCA8, "M", "جم"), + (0xFCA9, "M", "حج"), + (0xFCAA, "M", "حم"), + (0xFCAB, "M", "خج"), + (0xFCAC, "M", "خم"), + (0xFCAD, "M", "سج"), + (0xFCAE, "M", "سح"), + (0xFCAF, "M", "سخ"), + (0xFCB0, "M", "سم"), + (0xFCB1, "M", "صح"), + (0xFCB2, "M", "صخ"), + (0xFCB3, "M", "صم"), + (0xFCB4, "M", "ضج"), + (0xFCB5, "M", "ضح"), + (0xFCB6, "M", "ضخ"), + (0xFCB7, "M", "ضم"), + (0xFCB8, "M", "طح"), + (0xFCB9, "M", "ظم"), + (0xFCBA, "M", "عج"), + (0xFCBB, "M", "عم"), + (0xFCBC, "M", "غج"), + (0xFCBD, "M", "غم"), + (0xFCBE, "M", "فج"), + (0xFCBF, "M", "فح"), + (0xFCC0, "M", "فخ"), + (0xFCC1, "M", "فم"), + (0xFCC2, "M", "قح"), + (0xFCC3, "M", "قم"), + (0xFCC4, "M", "كج"), + (0xFCC5, "M", "كح"), + (0xFCC6, "M", "كخ"), + (0xFCC7, "M", "كل"), + (0xFCC8, "M", "كم"), + (0xFCC9, "M", "لج"), + (0xFCCA, "M", "لح"), + (0xFCCB, "M", "لخ"), + (0xFCCC, "M", "لم"), + (0xFCCD, "M", "له"), + (0xFCCE, "M", "مج"), + (0xFCCF, "M", "مح"), + (0xFCD0, "M", "مخ"), + (0xFCD1, "M", "مم"), + (0xFCD2, "M", "نج"), + (0xFCD3, "M", "نح"), + (0xFCD4, "M", "نخ"), + (0xFCD5, "M", "نم"), + (0xFCD6, "M", "نه"), + (0xFCD7, "M", "هج"), + (0xFCD8, "M", "هم"), + (0xFCD9, "M", "هٰ"), + (0xFCDA, "M", "يج"), + (0xFCDB, "M", "يح"), + (0xFCDC, "M", "يخ"), + (0xFCDD, "M", "يم"), + (0xFCDE, "M", "يه"), + (0xFCDF, "M", "ئم"), + (0xFCE0, "M", "ئه"), + (0xFCE1, "M", "بم"), + (0xFCE2, "M", "به"), + (0xFCE3, "M", "تم"), + (0xFCE4, "M", "ته"), + (0xFCE5, "M", "ثم"), + (0xFCE6, "M", "ثه"), + (0xFCE7, "M", "سم"), + (0xFCE8, "M", "سه"), + (0xFCE9, "M", "شم"), + (0xFCEA, "M", "شه"), + (0xFCEB, "M", "كل"), + (0xFCEC, "M", "كم"), + (0xFCED, "M", "لم"), + (0xFCEE, "M", "نم"), + (0xFCEF, "M", "نه"), + (0xFCF0, "M", "يم"), + (0xFCF1, "M", "يه"), + (0xFCF2, "M", "ـَّ"), + (0xFCF3, "M", "ـُّ"), + (0xFCF4, "M", "ـِّ"), + (0xFCF5, "M", "طى"), + (0xFCF6, "M", "طي"), + (0xFCF7, "M", "عى"), + (0xFCF8, "M", "عي"), + (0xFCF9, "M", "غى"), + (0xFCFA, "M", "غي"), + (0xFCFB, "M", "سى"), + ] + + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCFC, "M", "سي"), + (0xFCFD, "M", "شى"), + (0xFCFE, "M", "شي"), + (0xFCFF, "M", "حى"), + (0xFD00, "M", "حي"), + (0xFD01, "M", "جى"), + (0xFD02, "M", "جي"), + (0xFD03, "M", "خى"), + (0xFD04, "M", "خي"), + (0xFD05, "M", "صى"), + (0xFD06, "M", "صي"), + (0xFD07, "M", "ضى"), + (0xFD08, "M", "ضي"), + (0xFD09, "M", "شج"), + (0xFD0A, "M", "شح"), + (0xFD0B, "M", "شخ"), + (0xFD0C, "M", "شم"), + (0xFD0D, "M", "شر"), + (0xFD0E, "M", "سر"), + (0xFD0F, "M", "صر"), + (0xFD10, "M", "ضر"), + (0xFD11, "M", "طى"), + (0xFD12, "M", "طي"), + (0xFD13, "M", "عى"), + (0xFD14, "M", "عي"), + (0xFD15, "M", "غى"), + (0xFD16, "M", "غي"), + (0xFD17, "M", "سى"), + (0xFD18, "M", "سي"), + (0xFD19, "M", "شى"), + (0xFD1A, "M", "شي"), + (0xFD1B, "M", "حى"), + (0xFD1C, "M", "حي"), + (0xFD1D, "M", "جى"), + (0xFD1E, "M", "جي"), + (0xFD1F, "M", "خى"), + (0xFD20, "M", "خي"), + (0xFD21, "M", "صى"), + (0xFD22, "M", "صي"), + (0xFD23, "M", "ضى"), + (0xFD24, "M", "ضي"), + (0xFD25, "M", "شج"), + (0xFD26, "M", "شح"), + (0xFD27, "M", "شخ"), + (0xFD28, "M", "شم"), + (0xFD29, "M", "شر"), + (0xFD2A, "M", "سر"), + (0xFD2B, "M", "صر"), + (0xFD2C, "M", "ضر"), + (0xFD2D, "M", "شج"), + (0xFD2E, "M", "شح"), + (0xFD2F, "M", "شخ"), + (0xFD30, "M", "شم"), + (0xFD31, "M", "سه"), + (0xFD32, "M", "شه"), + (0xFD33, "M", "طم"), + (0xFD34, "M", "سج"), + (0xFD35, "M", "سح"), + (0xFD36, "M", "سخ"), + (0xFD37, "M", "شج"), + (0xFD38, "M", "شح"), + (0xFD39, "M", "شخ"), + (0xFD3A, "M", "طم"), + (0xFD3B, "M", "ظم"), + (0xFD3C, "M", "اً"), + (0xFD3E, "V"), + (0xFD50, "M", "تجم"), + (0xFD51, "M", "تحج"), + (0xFD53, "M", "تحم"), + (0xFD54, "M", "تخم"), + (0xFD55, "M", "تمج"), + (0xFD56, "M", "تمح"), + (0xFD57, "M", "تمخ"), + (0xFD58, "M", "جمح"), + (0xFD5A, "M", "حمي"), + (0xFD5B, "M", "حمى"), + (0xFD5C, "M", "سحج"), + (0xFD5D, "M", "سجح"), + (0xFD5E, "M", "سجى"), + (0xFD5F, "M", "سمح"), + (0xFD61, "M", "سمج"), + (0xFD62, "M", "سمم"), + (0xFD64, "M", "صحح"), + (0xFD66, "M", "صمم"), + (0xFD67, "M", "شحم"), + (0xFD69, "M", "شجي"), + (0xFD6A, "M", "شمخ"), + (0xFD6C, "M", "شمم"), + (0xFD6E, "M", "ضحى"), + (0xFD6F, "M", "ضخم"), + (0xFD71, "M", "طمح"), + (0xFD73, "M", "طمم"), + (0xFD74, "M", "طمي"), + (0xFD75, "M", "عجم"), + (0xFD76, "M", "عمم"), + (0xFD78, "M", "عمى"), + (0xFD79, "M", "غمم"), + (0xFD7A, "M", "غمي"), + (0xFD7B, "M", "غمى"), + (0xFD7C, "M", "فخم"), + ] + + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD7E, "M", "قمح"), + (0xFD7F, "M", "قمم"), + (0xFD80, "M", "لحم"), + (0xFD81, "M", "لحي"), + (0xFD82, "M", "لحى"), + (0xFD83, "M", "لجج"), + (0xFD85, "M", "لخم"), + (0xFD87, "M", "لمح"), + (0xFD89, "M", "محج"), + (0xFD8A, "M", "محم"), + (0xFD8B, "M", "محي"), + (0xFD8C, "M", "مجح"), + (0xFD8D, "M", "مجم"), + (0xFD8E, "M", "مخج"), + (0xFD8F, "M", "مخم"), + (0xFD90, "X"), + (0xFD92, "M", "مجخ"), + (0xFD93, "M", "همج"), + (0xFD94, "M", "همم"), + (0xFD95, "M", "نحم"), + (0xFD96, "M", "نحى"), + (0xFD97, "M", "نجم"), + (0xFD99, "M", "نجى"), + (0xFD9A, "M", "نمي"), + (0xFD9B, "M", "نمى"), + (0xFD9C, "M", "يمم"), + (0xFD9E, "M", "بخي"), + (0xFD9F, "M", "تجي"), + (0xFDA0, "M", "تجى"), + (0xFDA1, "M", "تخي"), + (0xFDA2, "M", "تخى"), + (0xFDA3, "M", "تمي"), + (0xFDA4, "M", "تمى"), + (0xFDA5, "M", "جمي"), + (0xFDA6, "M", "جحى"), + (0xFDA7, "M", "جمى"), + (0xFDA8, "M", "سخى"), + (0xFDA9, "M", "صحي"), + (0xFDAA, "M", "شحي"), + (0xFDAB, "M", "ضحي"), + (0xFDAC, "M", "لجي"), + (0xFDAD, "M", "لمي"), + (0xFDAE, "M", "يحي"), + (0xFDAF, "M", "يجي"), + (0xFDB0, "M", "يمي"), + (0xFDB1, "M", "ممي"), + (0xFDB2, "M", "قمي"), + (0xFDB3, "M", "نحي"), + (0xFDB4, "M", "قمح"), + (0xFDB5, "M", "لحم"), + (0xFDB6, "M", "عمي"), + (0xFDB7, "M", "كمي"), + (0xFDB8, "M", "نجح"), + (0xFDB9, "M", "مخي"), + (0xFDBA, "M", "لجم"), + (0xFDBB, "M", "كمم"), + (0xFDBC, "M", "لجم"), + (0xFDBD, "M", "نجح"), + (0xFDBE, "M", "جحي"), + (0xFDBF, "M", "حجي"), + (0xFDC0, "M", "مجي"), + (0xFDC1, "M", "فمي"), + (0xFDC2, "M", "بحي"), + (0xFDC3, "M", "كمم"), + (0xFDC4, "M", "عجم"), + (0xFDC5, "M", "صمم"), + (0xFDC6, "M", "سخي"), + (0xFDC7, "M", "نجي"), + (0xFDC8, "X"), + (0xFDCF, "V"), + (0xFDD0, "X"), + (0xFDF0, "M", "صلے"), + (0xFDF1, "M", "قلے"), + (0xFDF2, "M", "الله"), + (0xFDF3, "M", "اكبر"), + (0xFDF4, "M", "محمد"), + (0xFDF5, "M", "صلعم"), + (0xFDF6, "M", "رسول"), + (0xFDF7, "M", "عليه"), + (0xFDF8, "M", "وسلم"), + (0xFDF9, "M", "صلى"), + (0xFDFA, "M", "صلى الله عليه وسلم"), + (0xFDFB, "M", "جل جلاله"), + (0xFDFC, "M", "ریال"), + (0xFDFD, "V"), + (0xFE00, "I"), + (0xFE10, "M", ","), + (0xFE11, "M", "、"), + (0xFE12, "X"), + (0xFE13, "M", ":"), + (0xFE14, "M", ";"), + (0xFE15, "M", "!"), + (0xFE16, "M", "?"), + (0xFE17, "M", "〖"), + (0xFE18, "M", "〗"), + (0xFE19, "X"), + (0xFE20, "V"), + (0xFE30, "X"), + (0xFE31, "M", "—"), + (0xFE32, "M", "–"), + ] + + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE33, "M", "_"), + (0xFE35, "M", "("), + (0xFE36, "M", ")"), + (0xFE37, "M", "{"), + (0xFE38, "M", "}"), + (0xFE39, "M", "〔"), + (0xFE3A, "M", "〕"), + (0xFE3B, "M", "【"), + (0xFE3C, "M", "】"), + (0xFE3D, "M", "《"), + (0xFE3E, "M", "》"), + (0xFE3F, "M", "〈"), + (0xFE40, "M", "〉"), + (0xFE41, "M", "「"), + (0xFE42, "M", "」"), + (0xFE43, "M", "『"), + (0xFE44, "M", "』"), + (0xFE45, "V"), + (0xFE47, "M", "["), + (0xFE48, "M", "]"), + (0xFE49, "M", " ̅"), + (0xFE4D, "M", "_"), + (0xFE50, "M", ","), + (0xFE51, "M", "、"), + (0xFE52, "X"), + (0xFE54, "M", ";"), + (0xFE55, "M", ":"), + (0xFE56, "M", "?"), + (0xFE57, "M", "!"), + (0xFE58, "M", "—"), + (0xFE59, "M", "("), + (0xFE5A, "M", ")"), + (0xFE5B, "M", "{"), + (0xFE5C, "M", "}"), + (0xFE5D, "M", "〔"), + (0xFE5E, "M", "〕"), + (0xFE5F, "M", "#"), + (0xFE60, "M", "&"), + (0xFE61, "M", "*"), + (0xFE62, "M", "+"), + (0xFE63, "M", "-"), + (0xFE64, "M", "<"), + (0xFE65, "M", ">"), + (0xFE66, "M", "="), + (0xFE67, "X"), + (0xFE68, "M", "\\"), + (0xFE69, "M", "$"), + (0xFE6A, "M", "%"), + (0xFE6B, "M", "@"), + (0xFE6C, "X"), + (0xFE70, "M", " ً"), + (0xFE71, "M", "ـً"), + (0xFE72, "M", " ٌ"), + (0xFE73, "V"), + (0xFE74, "M", " ٍ"), + (0xFE75, "X"), + (0xFE76, "M", " َ"), + (0xFE77, "M", "ـَ"), + (0xFE78, "M", " ُ"), + (0xFE79, "M", "ـُ"), + (0xFE7A, "M", " ِ"), + (0xFE7B, "M", "ـِ"), + (0xFE7C, "M", " ّ"), + (0xFE7D, "M", "ـّ"), + (0xFE7E, "M", " ْ"), + (0xFE7F, "M", "ـْ"), + (0xFE80, "M", "ء"), + (0xFE81, "M", "آ"), + (0xFE83, "M", "أ"), + (0xFE85, "M", "ؤ"), + (0xFE87, "M", "إ"), + (0xFE89, "M", "ئ"), + (0xFE8D, "M", "ا"), + (0xFE8F, "M", "ب"), + (0xFE93, "M", "ة"), + (0xFE95, "M", "ت"), + (0xFE99, "M", "ث"), + (0xFE9D, "M", "ج"), + (0xFEA1, "M", "ح"), + (0xFEA5, "M", "خ"), + (0xFEA9, "M", "د"), + (0xFEAB, "M", "ذ"), + (0xFEAD, "M", "ر"), + (0xFEAF, "M", "ز"), + (0xFEB1, "M", "س"), + (0xFEB5, "M", "ش"), + (0xFEB9, "M", "ص"), + (0xFEBD, "M", "ض"), + (0xFEC1, "M", "ط"), + (0xFEC5, "M", "ظ"), + (0xFEC9, "M", "ع"), + (0xFECD, "M", "غ"), + (0xFED1, "M", "ف"), + (0xFED5, "M", "ق"), + (0xFED9, "M", "ك"), + (0xFEDD, "M", "ل"), + (0xFEE1, "M", "م"), + (0xFEE5, "M", "ن"), + (0xFEE9, "M", "ه"), + (0xFEED, "M", "و"), + ] + + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFEEF, "M", "ى"), + (0xFEF1, "M", "ي"), + (0xFEF5, "M", "لآ"), + (0xFEF7, "M", "لأ"), + (0xFEF9, "M", "لإ"), + (0xFEFB, "M", "لا"), + (0xFEFD, "X"), + (0xFEFF, "I"), + (0xFF00, "X"), + (0xFF01, "M", "!"), + (0xFF02, "M", '"'), + (0xFF03, "M", "#"), + (0xFF04, "M", "$"), + (0xFF05, "M", "%"), + (0xFF06, "M", "&"), + (0xFF07, "M", "'"), + (0xFF08, "M", "("), + (0xFF09, "M", ")"), + (0xFF0A, "M", "*"), + (0xFF0B, "M", "+"), + (0xFF0C, "M", ","), + (0xFF0D, "M", "-"), + (0xFF0E, "M", "."), + (0xFF0F, "M", "/"), + (0xFF10, "M", "0"), + (0xFF11, "M", "1"), + (0xFF12, "M", "2"), + (0xFF13, "M", "3"), + (0xFF14, "M", "4"), + (0xFF15, "M", "5"), + (0xFF16, "M", "6"), + (0xFF17, "M", "7"), + (0xFF18, "M", "8"), + (0xFF19, "M", "9"), + (0xFF1A, "M", ":"), + (0xFF1B, "M", ";"), + (0xFF1C, "M", "<"), + (0xFF1D, "M", "="), + (0xFF1E, "M", ">"), + (0xFF1F, "M", "?"), + (0xFF20, "M", "@"), + (0xFF21, "M", "a"), + (0xFF22, "M", "b"), + (0xFF23, "M", "c"), + (0xFF24, "M", "d"), + (0xFF25, "M", "e"), + (0xFF26, "M", "f"), + (0xFF27, "M", "g"), + (0xFF28, "M", "h"), + (0xFF29, "M", "i"), + (0xFF2A, "M", "j"), + (0xFF2B, "M", "k"), + (0xFF2C, "M", "l"), + (0xFF2D, "M", "m"), + (0xFF2E, "M", "n"), + (0xFF2F, "M", "o"), + (0xFF30, "M", "p"), + (0xFF31, "M", "q"), + (0xFF32, "M", "r"), + (0xFF33, "M", "s"), + (0xFF34, "M", "t"), + (0xFF35, "M", "u"), + (0xFF36, "M", "v"), + (0xFF37, "M", "w"), + (0xFF38, "M", "x"), + (0xFF39, "M", "y"), + (0xFF3A, "M", "z"), + (0xFF3B, "M", "["), + (0xFF3C, "M", "\\"), + (0xFF3D, "M", "]"), + (0xFF3E, "M", "^"), + (0xFF3F, "M", "_"), + (0xFF40, "M", "`"), + (0xFF41, "M", "a"), + (0xFF42, "M", "b"), + (0xFF43, "M", "c"), + (0xFF44, "M", "d"), + (0xFF45, "M", "e"), + (0xFF46, "M", "f"), + (0xFF47, "M", "g"), + (0xFF48, "M", "h"), + (0xFF49, "M", "i"), + (0xFF4A, "M", "j"), + (0xFF4B, "M", "k"), + (0xFF4C, "M", "l"), + (0xFF4D, "M", "m"), + (0xFF4E, "M", "n"), + (0xFF4F, "M", "o"), + (0xFF50, "M", "p"), + (0xFF51, "M", "q"), + (0xFF52, "M", "r"), + (0xFF53, "M", "s"), + (0xFF54, "M", "t"), + (0xFF55, "M", "u"), + (0xFF56, "M", "v"), + (0xFF57, "M", "w"), + (0xFF58, "M", "x"), + (0xFF59, "M", "y"), + (0xFF5A, "M", "z"), + (0xFF5B, "M", "{"), + ] + + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF5C, "M", "|"), + (0xFF5D, "M", "}"), + (0xFF5E, "M", "~"), + (0xFF5F, "M", "⦅"), + (0xFF60, "M", "⦆"), + (0xFF61, "M", "."), + (0xFF62, "M", "「"), + (0xFF63, "M", "」"), + (0xFF64, "M", "、"), + (0xFF65, "M", "・"), + (0xFF66, "M", "ヲ"), + (0xFF67, "M", "ァ"), + (0xFF68, "M", "ィ"), + (0xFF69, "M", "ゥ"), + (0xFF6A, "M", "ェ"), + (0xFF6B, "M", "ォ"), + (0xFF6C, "M", "ャ"), + (0xFF6D, "M", "ュ"), + (0xFF6E, "M", "ョ"), + (0xFF6F, "M", "ッ"), + (0xFF70, "M", "ー"), + (0xFF71, "M", "ア"), + (0xFF72, "M", "イ"), + (0xFF73, "M", "ウ"), + (0xFF74, "M", "エ"), + (0xFF75, "M", "オ"), + (0xFF76, "M", "カ"), + (0xFF77, "M", "キ"), + (0xFF78, "M", "ク"), + (0xFF79, "M", "ケ"), + (0xFF7A, "M", "コ"), + (0xFF7B, "M", "サ"), + (0xFF7C, "M", "シ"), + (0xFF7D, "M", "ス"), + (0xFF7E, "M", "セ"), + (0xFF7F, "M", "ソ"), + (0xFF80, "M", "タ"), + (0xFF81, "M", "チ"), + (0xFF82, "M", "ツ"), + (0xFF83, "M", "テ"), + (0xFF84, "M", "ト"), + (0xFF85, "M", "ナ"), + (0xFF86, "M", "ニ"), + (0xFF87, "M", "ヌ"), + (0xFF88, "M", "ネ"), + (0xFF89, "M", "ノ"), + (0xFF8A, "M", "ハ"), + (0xFF8B, "M", "ヒ"), + (0xFF8C, "M", "フ"), + (0xFF8D, "M", "ヘ"), + (0xFF8E, "M", "ホ"), + (0xFF8F, "M", "マ"), + (0xFF90, "M", "ミ"), + (0xFF91, "M", "ム"), + (0xFF92, "M", "メ"), + (0xFF93, "M", "モ"), + (0xFF94, "M", "ヤ"), + (0xFF95, "M", "ユ"), + (0xFF96, "M", "ヨ"), + (0xFF97, "M", "ラ"), + (0xFF98, "M", "リ"), + (0xFF99, "M", "ル"), + (0xFF9A, "M", "レ"), + (0xFF9B, "M", "ロ"), + (0xFF9C, "M", "ワ"), + (0xFF9D, "M", "ン"), + (0xFF9E, "M", "゙"), + (0xFF9F, "M", "゚"), + (0xFFA0, "I"), + (0xFFA1, "M", "ᄀ"), + (0xFFA2, "M", "ᄁ"), + (0xFFA3, "M", "ᆪ"), + (0xFFA4, "M", "ᄂ"), + (0xFFA5, "M", "ᆬ"), + (0xFFA6, "M", "ᆭ"), + (0xFFA7, "M", "ᄃ"), + (0xFFA8, "M", "ᄄ"), + (0xFFA9, "M", "ᄅ"), + (0xFFAA, "M", "ᆰ"), + (0xFFAB, "M", "ᆱ"), + (0xFFAC, "M", "ᆲ"), + (0xFFAD, "M", "ᆳ"), + (0xFFAE, "M", "ᆴ"), + (0xFFAF, "M", "ᆵ"), + (0xFFB0, "M", "ᄚ"), + (0xFFB1, "M", "ᄆ"), + (0xFFB2, "M", "ᄇ"), + (0xFFB3, "M", "ᄈ"), + (0xFFB4, "M", "ᄡ"), + (0xFFB5, "M", "ᄉ"), + (0xFFB6, "M", "ᄊ"), + (0xFFB7, "M", "ᄋ"), + (0xFFB8, "M", "ᄌ"), + (0xFFB9, "M", "ᄍ"), + (0xFFBA, "M", "ᄎ"), + (0xFFBB, "M", "ᄏ"), + (0xFFBC, "M", "ᄐ"), + (0xFFBD, "M", "ᄑ"), + (0xFFBE, "M", "ᄒ"), + (0xFFBF, "X"), + ] + + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFC2, "M", "ᅡ"), + (0xFFC3, "M", "ᅢ"), + (0xFFC4, "M", "ᅣ"), + (0xFFC5, "M", "ᅤ"), + (0xFFC6, "M", "ᅥ"), + (0xFFC7, "M", "ᅦ"), + (0xFFC8, "X"), + (0xFFCA, "M", "ᅧ"), + (0xFFCB, "M", "ᅨ"), + (0xFFCC, "M", "ᅩ"), + (0xFFCD, "M", "ᅪ"), + (0xFFCE, "M", "ᅫ"), + (0xFFCF, "M", "ᅬ"), + (0xFFD0, "X"), + (0xFFD2, "M", "ᅭ"), + (0xFFD3, "M", "ᅮ"), + (0xFFD4, "M", "ᅯ"), + (0xFFD5, "M", "ᅰ"), + (0xFFD6, "M", "ᅱ"), + (0xFFD7, "M", "ᅲ"), + (0xFFD8, "X"), + (0xFFDA, "M", "ᅳ"), + (0xFFDB, "M", "ᅴ"), + (0xFFDC, "M", "ᅵ"), + (0xFFDD, "X"), + (0xFFE0, "M", "¢"), + (0xFFE1, "M", "£"), + (0xFFE2, "M", "¬"), + (0xFFE3, "M", " ̄"), + (0xFFE4, "M", "¦"), + (0xFFE5, "M", "¥"), + (0xFFE6, "M", "₩"), + (0xFFE7, "X"), + (0xFFE8, "M", "│"), + (0xFFE9, "M", "←"), + (0xFFEA, "M", "↑"), + (0xFFEB, "M", "→"), + (0xFFEC, "M", "↓"), + (0xFFED, "M", "■"), + (0xFFEE, "M", "○"), + (0xFFEF, "X"), + (0x10000, "V"), + (0x1000C, "X"), + (0x1000D, "V"), + (0x10027, "X"), + (0x10028, "V"), + (0x1003B, "X"), + (0x1003C, "V"), + (0x1003E, "X"), + (0x1003F, "V"), + (0x1004E, "X"), + (0x10050, "V"), + (0x1005E, "X"), + (0x10080, "V"), + (0x100FB, "X"), + (0x10100, "V"), + (0x10103, "X"), + (0x10107, "V"), + (0x10134, "X"), + (0x10137, "V"), + (0x1018F, "X"), + (0x10190, "V"), + (0x1019D, "X"), + (0x101A0, "V"), + (0x101A1, "X"), + (0x101D0, "V"), + (0x101FE, "X"), + (0x10280, "V"), + (0x1029D, "X"), + (0x102A0, "V"), + (0x102D1, "X"), + (0x102E0, "V"), + (0x102FC, "X"), + (0x10300, "V"), + (0x10324, "X"), + (0x1032D, "V"), + (0x1034B, "X"), + (0x10350, "V"), + (0x1037B, "X"), + (0x10380, "V"), + (0x1039E, "X"), + (0x1039F, "V"), + (0x103C4, "X"), + (0x103C8, "V"), + (0x103D6, "X"), + (0x10400, "M", "𐐨"), + (0x10401, "M", "𐐩"), + (0x10402, "M", "𐐪"), + (0x10403, "M", "𐐫"), + (0x10404, "M", "𐐬"), + (0x10405, "M", "𐐭"), + (0x10406, "M", "𐐮"), + (0x10407, "M", "𐐯"), + (0x10408, "M", "𐐰"), + (0x10409, "M", "𐐱"), + (0x1040A, "M", "𐐲"), + (0x1040B, "M", "𐐳"), + (0x1040C, "M", "𐐴"), + (0x1040D, "M", "𐐵"), + (0x1040E, "M", "𐐶"), + ] + + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1040F, "M", "𐐷"), + (0x10410, "M", "𐐸"), + (0x10411, "M", "𐐹"), + (0x10412, "M", "𐐺"), + (0x10413, "M", "𐐻"), + (0x10414, "M", "𐐼"), + (0x10415, "M", "𐐽"), + (0x10416, "M", "𐐾"), + (0x10417, "M", "𐐿"), + (0x10418, "M", "𐑀"), + (0x10419, "M", "𐑁"), + (0x1041A, "M", "𐑂"), + (0x1041B, "M", "𐑃"), + (0x1041C, "M", "𐑄"), + (0x1041D, "M", "𐑅"), + (0x1041E, "M", "𐑆"), + (0x1041F, "M", "𐑇"), + (0x10420, "M", "𐑈"), + (0x10421, "M", "𐑉"), + (0x10422, "M", "𐑊"), + (0x10423, "M", "𐑋"), + (0x10424, "M", "𐑌"), + (0x10425, "M", "𐑍"), + (0x10426, "M", "𐑎"), + (0x10427, "M", "𐑏"), + (0x10428, "V"), + (0x1049E, "X"), + (0x104A0, "V"), + (0x104AA, "X"), + (0x104B0, "M", "𐓘"), + (0x104B1, "M", "𐓙"), + (0x104B2, "M", "𐓚"), + (0x104B3, "M", "𐓛"), + (0x104B4, "M", "𐓜"), + (0x104B5, "M", "𐓝"), + (0x104B6, "M", "𐓞"), + (0x104B7, "M", "𐓟"), + (0x104B8, "M", "𐓠"), + (0x104B9, "M", "𐓡"), + (0x104BA, "M", "𐓢"), + (0x104BB, "M", "𐓣"), + (0x104BC, "M", "𐓤"), + (0x104BD, "M", "𐓥"), + (0x104BE, "M", "𐓦"), + (0x104BF, "M", "𐓧"), + (0x104C0, "M", "𐓨"), + (0x104C1, "M", "𐓩"), + (0x104C2, "M", "𐓪"), + (0x104C3, "M", "𐓫"), + (0x104C4, "M", "𐓬"), + (0x104C5, "M", "𐓭"), + (0x104C6, "M", "𐓮"), + (0x104C7, "M", "𐓯"), + (0x104C8, "M", "𐓰"), + (0x104C9, "M", "𐓱"), + (0x104CA, "M", "𐓲"), + (0x104CB, "M", "𐓳"), + (0x104CC, "M", "𐓴"), + (0x104CD, "M", "𐓵"), + (0x104CE, "M", "𐓶"), + (0x104CF, "M", "𐓷"), + (0x104D0, "M", "𐓸"), + (0x104D1, "M", "𐓹"), + (0x104D2, "M", "𐓺"), + (0x104D3, "M", "𐓻"), + (0x104D4, "X"), + (0x104D8, "V"), + (0x104FC, "X"), + (0x10500, "V"), + (0x10528, "X"), + (0x10530, "V"), + (0x10564, "X"), + (0x1056F, "V"), + (0x10570, "M", "𐖗"), + (0x10571, "M", "𐖘"), + (0x10572, "M", "𐖙"), + (0x10573, "M", "𐖚"), + (0x10574, "M", "𐖛"), + (0x10575, "M", "𐖜"), + (0x10576, "M", "𐖝"), + (0x10577, "M", "𐖞"), + (0x10578, "M", "𐖟"), + (0x10579, "M", "𐖠"), + (0x1057A, "M", "𐖡"), + (0x1057B, "X"), + (0x1057C, "M", "𐖣"), + (0x1057D, "M", "𐖤"), + (0x1057E, "M", "𐖥"), + (0x1057F, "M", "𐖦"), + (0x10580, "M", "𐖧"), + (0x10581, "M", "𐖨"), + (0x10582, "M", "𐖩"), + (0x10583, "M", "𐖪"), + (0x10584, "M", "𐖫"), + (0x10585, "M", "𐖬"), + (0x10586, "M", "𐖭"), + (0x10587, "M", "𐖮"), + (0x10588, "M", "𐖯"), + (0x10589, "M", "𐖰"), + (0x1058A, "M", "𐖱"), + ] + + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1058B, "X"), + (0x1058C, "M", "𐖳"), + (0x1058D, "M", "𐖴"), + (0x1058E, "M", "𐖵"), + (0x1058F, "M", "𐖶"), + (0x10590, "M", "𐖷"), + (0x10591, "M", "𐖸"), + (0x10592, "M", "𐖹"), + (0x10593, "X"), + (0x10594, "M", "𐖻"), + (0x10595, "M", "𐖼"), + (0x10596, "X"), + (0x10597, "V"), + (0x105A2, "X"), + (0x105A3, "V"), + (0x105B2, "X"), + (0x105B3, "V"), + (0x105BA, "X"), + (0x105BB, "V"), + (0x105BD, "X"), + (0x105C0, "V"), + (0x105F4, "X"), + (0x10600, "V"), + (0x10737, "X"), + (0x10740, "V"), + (0x10756, "X"), + (0x10760, "V"), + (0x10768, "X"), + (0x10780, "V"), + (0x10781, "M", "ː"), + (0x10782, "M", "ˑ"), + (0x10783, "M", "æ"), + (0x10784, "M", "ʙ"), + (0x10785, "M", "ɓ"), + (0x10786, "X"), + (0x10787, "M", "ʣ"), + (0x10788, "M", "ꭦ"), + (0x10789, "M", "ʥ"), + (0x1078A, "M", "ʤ"), + (0x1078B, "M", "ɖ"), + (0x1078C, "M", "ɗ"), + (0x1078D, "M", "ᶑ"), + (0x1078E, "M", "ɘ"), + (0x1078F, "M", "ɞ"), + (0x10790, "M", "ʩ"), + (0x10791, "M", "ɤ"), + (0x10792, "M", "ɢ"), + (0x10793, "M", "ɠ"), + (0x10794, "M", "ʛ"), + (0x10795, "M", "ħ"), + (0x10796, "M", "ʜ"), + (0x10797, "M", "ɧ"), + (0x10798, "M", "ʄ"), + (0x10799, "M", "ʪ"), + (0x1079A, "M", "ʫ"), + (0x1079B, "M", "ɬ"), + (0x1079C, "M", "𝼄"), + (0x1079D, "M", "ꞎ"), + (0x1079E, "M", "ɮ"), + (0x1079F, "M", "𝼅"), + (0x107A0, "M", "ʎ"), + (0x107A1, "M", "𝼆"), + (0x107A2, "M", "ø"), + (0x107A3, "M", "ɶ"), + (0x107A4, "M", "ɷ"), + (0x107A5, "M", "q"), + (0x107A6, "M", "ɺ"), + (0x107A7, "M", "𝼈"), + (0x107A8, "M", "ɽ"), + (0x107A9, "M", "ɾ"), + (0x107AA, "M", "ʀ"), + (0x107AB, "M", "ʨ"), + (0x107AC, "M", "ʦ"), + (0x107AD, "M", "ꭧ"), + (0x107AE, "M", "ʧ"), + (0x107AF, "M", "ʈ"), + (0x107B0, "M", "ⱱ"), + (0x107B1, "X"), + (0x107B2, "M", "ʏ"), + (0x107B3, "M", "ʡ"), + (0x107B4, "M", "ʢ"), + (0x107B5, "M", "ʘ"), + (0x107B6, "M", "ǀ"), + (0x107B7, "M", "ǁ"), + (0x107B8, "M", "ǂ"), + (0x107B9, "M", "𝼊"), + (0x107BA, "M", "𝼞"), + (0x107BB, "X"), + (0x10800, "V"), + (0x10806, "X"), + (0x10808, "V"), + (0x10809, "X"), + (0x1080A, "V"), + (0x10836, "X"), + (0x10837, "V"), + (0x10839, "X"), + (0x1083C, "V"), + (0x1083D, "X"), + (0x1083F, "V"), + (0x10856, "X"), + ] + + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10857, "V"), + (0x1089F, "X"), + (0x108A7, "V"), + (0x108B0, "X"), + (0x108E0, "V"), + (0x108F3, "X"), + (0x108F4, "V"), + (0x108F6, "X"), + (0x108FB, "V"), + (0x1091C, "X"), + (0x1091F, "V"), + (0x1093A, "X"), + (0x1093F, "V"), + (0x10940, "X"), + (0x10980, "V"), + (0x109B8, "X"), + (0x109BC, "V"), + (0x109D0, "X"), + (0x109D2, "V"), + (0x10A04, "X"), + (0x10A05, "V"), + (0x10A07, "X"), + (0x10A0C, "V"), + (0x10A14, "X"), + (0x10A15, "V"), + (0x10A18, "X"), + (0x10A19, "V"), + (0x10A36, "X"), + (0x10A38, "V"), + (0x10A3B, "X"), + (0x10A3F, "V"), + (0x10A49, "X"), + (0x10A50, "V"), + (0x10A59, "X"), + (0x10A60, "V"), + (0x10AA0, "X"), + (0x10AC0, "V"), + (0x10AE7, "X"), + (0x10AEB, "V"), + (0x10AF7, "X"), + (0x10B00, "V"), + (0x10B36, "X"), + (0x10B39, "V"), + (0x10B56, "X"), + (0x10B58, "V"), + (0x10B73, "X"), + (0x10B78, "V"), + (0x10B92, "X"), + (0x10B99, "V"), + (0x10B9D, "X"), + (0x10BA9, "V"), + (0x10BB0, "X"), + (0x10C00, "V"), + (0x10C49, "X"), + (0x10C80, "M", "𐳀"), + (0x10C81, "M", "𐳁"), + (0x10C82, "M", "𐳂"), + (0x10C83, "M", "𐳃"), + (0x10C84, "M", "𐳄"), + (0x10C85, "M", "𐳅"), + (0x10C86, "M", "𐳆"), + (0x10C87, "M", "𐳇"), + (0x10C88, "M", "𐳈"), + (0x10C89, "M", "𐳉"), + (0x10C8A, "M", "𐳊"), + (0x10C8B, "M", "𐳋"), + (0x10C8C, "M", "𐳌"), + (0x10C8D, "M", "𐳍"), + (0x10C8E, "M", "𐳎"), + (0x10C8F, "M", "𐳏"), + (0x10C90, "M", "𐳐"), + (0x10C91, "M", "𐳑"), + (0x10C92, "M", "𐳒"), + (0x10C93, "M", "𐳓"), + (0x10C94, "M", "𐳔"), + (0x10C95, "M", "𐳕"), + (0x10C96, "M", "𐳖"), + (0x10C97, "M", "𐳗"), + (0x10C98, "M", "𐳘"), + (0x10C99, "M", "𐳙"), + (0x10C9A, "M", "𐳚"), + (0x10C9B, "M", "𐳛"), + (0x10C9C, "M", "𐳜"), + (0x10C9D, "M", "𐳝"), + (0x10C9E, "M", "𐳞"), + (0x10C9F, "M", "𐳟"), + (0x10CA0, "M", "𐳠"), + (0x10CA1, "M", "𐳡"), + (0x10CA2, "M", "𐳢"), + (0x10CA3, "M", "𐳣"), + (0x10CA4, "M", "𐳤"), + (0x10CA5, "M", "𐳥"), + (0x10CA6, "M", "𐳦"), + (0x10CA7, "M", "𐳧"), + (0x10CA8, "M", "𐳨"), + (0x10CA9, "M", "𐳩"), + (0x10CAA, "M", "𐳪"), + (0x10CAB, "M", "𐳫"), + (0x10CAC, "M", "𐳬"), + (0x10CAD, "M", "𐳭"), + ] + + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10CAE, "M", "𐳮"), + (0x10CAF, "M", "𐳯"), + (0x10CB0, "M", "𐳰"), + (0x10CB1, "M", "𐳱"), + (0x10CB2, "M", "𐳲"), + (0x10CB3, "X"), + (0x10CC0, "V"), + (0x10CF3, "X"), + (0x10CFA, "V"), + (0x10D28, "X"), + (0x10D30, "V"), + (0x10D3A, "X"), + (0x10D40, "V"), + (0x10D50, "M", "𐵰"), + (0x10D51, "M", "𐵱"), + (0x10D52, "M", "𐵲"), + (0x10D53, "M", "𐵳"), + (0x10D54, "M", "𐵴"), + (0x10D55, "M", "𐵵"), + (0x10D56, "M", "𐵶"), + (0x10D57, "M", "𐵷"), + (0x10D58, "M", "𐵸"), + (0x10D59, "M", "𐵹"), + (0x10D5A, "M", "𐵺"), + (0x10D5B, "M", "𐵻"), + (0x10D5C, "M", "𐵼"), + (0x10D5D, "M", "𐵽"), + (0x10D5E, "M", "𐵾"), + (0x10D5F, "M", "𐵿"), + (0x10D60, "M", "𐶀"), + (0x10D61, "M", "𐶁"), + (0x10D62, "M", "𐶂"), + (0x10D63, "M", "𐶃"), + (0x10D64, "M", "𐶄"), + (0x10D65, "M", "𐶅"), + (0x10D66, "X"), + (0x10D69, "V"), + (0x10D86, "X"), + (0x10D8E, "V"), + (0x10D90, "X"), + (0x10E60, "V"), + (0x10E7F, "X"), + (0x10E80, "V"), + (0x10EAA, "X"), + (0x10EAB, "V"), + (0x10EAE, "X"), + (0x10EB0, "V"), + (0x10EB2, "X"), + (0x10EC2, "V"), + (0x10EC5, "X"), + (0x10EFC, "V"), + (0x10F28, "X"), + (0x10F30, "V"), + (0x10F5A, "X"), + (0x10F70, "V"), + (0x10F8A, "X"), + (0x10FB0, "V"), + (0x10FCC, "X"), + (0x10FE0, "V"), + (0x10FF7, "X"), + (0x11000, "V"), + (0x1104E, "X"), + (0x11052, "V"), + (0x11076, "X"), + (0x1107F, "V"), + (0x110BD, "X"), + (0x110BE, "V"), + (0x110C3, "X"), + (0x110D0, "V"), + (0x110E9, "X"), + (0x110F0, "V"), + (0x110FA, "X"), + (0x11100, "V"), + (0x11135, "X"), + (0x11136, "V"), + (0x11148, "X"), + (0x11150, "V"), + (0x11177, "X"), + (0x11180, "V"), + (0x111E0, "X"), + (0x111E1, "V"), + (0x111F5, "X"), + (0x11200, "V"), + (0x11212, "X"), + (0x11213, "V"), + (0x11242, "X"), + (0x11280, "V"), + (0x11287, "X"), + (0x11288, "V"), + (0x11289, "X"), + (0x1128A, "V"), + (0x1128E, "X"), + (0x1128F, "V"), + (0x1129E, "X"), + (0x1129F, "V"), + (0x112AA, "X"), + (0x112B0, "V"), + (0x112EB, "X"), + (0x112F0, "V"), + (0x112FA, "X"), + ] + + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11300, "V"), + (0x11304, "X"), + (0x11305, "V"), + (0x1130D, "X"), + (0x1130F, "V"), + (0x11311, "X"), + (0x11313, "V"), + (0x11329, "X"), + (0x1132A, "V"), + (0x11331, "X"), + (0x11332, "V"), + (0x11334, "X"), + (0x11335, "V"), + (0x1133A, "X"), + (0x1133B, "V"), + (0x11345, "X"), + (0x11347, "V"), + (0x11349, "X"), + (0x1134B, "V"), + (0x1134E, "X"), + (0x11350, "V"), + (0x11351, "X"), + (0x11357, "V"), + (0x11358, "X"), + (0x1135D, "V"), + (0x11364, "X"), + (0x11366, "V"), + (0x1136D, "X"), + (0x11370, "V"), + (0x11375, "X"), + (0x11380, "V"), + (0x1138A, "X"), + (0x1138B, "V"), + (0x1138C, "X"), + (0x1138E, "V"), + (0x1138F, "X"), + (0x11390, "V"), + (0x113B6, "X"), + (0x113B7, "V"), + (0x113C1, "X"), + (0x113C2, "V"), + (0x113C3, "X"), + (0x113C5, "V"), + (0x113C6, "X"), + (0x113C7, "V"), + (0x113CB, "X"), + (0x113CC, "V"), + (0x113D6, "X"), + (0x113D7, "V"), + (0x113D9, "X"), + (0x113E1, "V"), + (0x113E3, "X"), + (0x11400, "V"), + (0x1145C, "X"), + (0x1145D, "V"), + (0x11462, "X"), + (0x11480, "V"), + (0x114C8, "X"), + (0x114D0, "V"), + (0x114DA, "X"), + (0x11580, "V"), + (0x115B6, "X"), + (0x115B8, "V"), + (0x115DE, "X"), + (0x11600, "V"), + (0x11645, "X"), + (0x11650, "V"), + (0x1165A, "X"), + (0x11660, "V"), + (0x1166D, "X"), + (0x11680, "V"), + (0x116BA, "X"), + (0x116C0, "V"), + (0x116CA, "X"), + (0x116D0, "V"), + (0x116E4, "X"), + (0x11700, "V"), + (0x1171B, "X"), + (0x1171D, "V"), + (0x1172C, "X"), + (0x11730, "V"), + (0x11747, "X"), + (0x11800, "V"), + (0x1183C, "X"), + (0x118A0, "M", "𑣀"), + (0x118A1, "M", "𑣁"), + (0x118A2, "M", "𑣂"), + (0x118A3, "M", "𑣃"), + (0x118A4, "M", "𑣄"), + (0x118A5, "M", "𑣅"), + (0x118A6, "M", "𑣆"), + (0x118A7, "M", "𑣇"), + (0x118A8, "M", "𑣈"), + (0x118A9, "M", "𑣉"), + (0x118AA, "M", "𑣊"), + (0x118AB, "M", "𑣋"), + (0x118AC, "M", "𑣌"), + (0x118AD, "M", "𑣍"), + (0x118AE, "M", "𑣎"), + (0x118AF, "M", "𑣏"), + ] + + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118B0, "M", "𑣐"), + (0x118B1, "M", "𑣑"), + (0x118B2, "M", "𑣒"), + (0x118B3, "M", "𑣓"), + (0x118B4, "M", "𑣔"), + (0x118B5, "M", "𑣕"), + (0x118B6, "M", "𑣖"), + (0x118B7, "M", "𑣗"), + (0x118B8, "M", "𑣘"), + (0x118B9, "M", "𑣙"), + (0x118BA, "M", "𑣚"), + (0x118BB, "M", "𑣛"), + (0x118BC, "M", "𑣜"), + (0x118BD, "M", "𑣝"), + (0x118BE, "M", "𑣞"), + (0x118BF, "M", "𑣟"), + (0x118C0, "V"), + (0x118F3, "X"), + (0x118FF, "V"), + (0x11907, "X"), + (0x11909, "V"), + (0x1190A, "X"), + (0x1190C, "V"), + (0x11914, "X"), + (0x11915, "V"), + (0x11917, "X"), + (0x11918, "V"), + (0x11936, "X"), + (0x11937, "V"), + (0x11939, "X"), + (0x1193B, "V"), + (0x11947, "X"), + (0x11950, "V"), + (0x1195A, "X"), + (0x119A0, "V"), + (0x119A8, "X"), + (0x119AA, "V"), + (0x119D8, "X"), + (0x119DA, "V"), + (0x119E5, "X"), + (0x11A00, "V"), + (0x11A48, "X"), + (0x11A50, "V"), + (0x11AA3, "X"), + (0x11AB0, "V"), + (0x11AF9, "X"), + (0x11B00, "V"), + (0x11B0A, "X"), + (0x11BC0, "V"), + (0x11BE2, "X"), + (0x11BF0, "V"), + (0x11BFA, "X"), + (0x11C00, "V"), + (0x11C09, "X"), + (0x11C0A, "V"), + (0x11C37, "X"), + (0x11C38, "V"), + (0x11C46, "X"), + (0x11C50, "V"), + (0x11C6D, "X"), + (0x11C70, "V"), + (0x11C90, "X"), + (0x11C92, "V"), + (0x11CA8, "X"), + (0x11CA9, "V"), + (0x11CB7, "X"), + (0x11D00, "V"), + (0x11D07, "X"), + (0x11D08, "V"), + (0x11D0A, "X"), + (0x11D0B, "V"), + (0x11D37, "X"), + (0x11D3A, "V"), + (0x11D3B, "X"), + (0x11D3C, "V"), + (0x11D3E, "X"), + (0x11D3F, "V"), + (0x11D48, "X"), + (0x11D50, "V"), + (0x11D5A, "X"), + (0x11D60, "V"), + (0x11D66, "X"), + (0x11D67, "V"), + (0x11D69, "X"), + (0x11D6A, "V"), + (0x11D8F, "X"), + (0x11D90, "V"), + (0x11D92, "X"), + (0x11D93, "V"), + (0x11D99, "X"), + (0x11DA0, "V"), + (0x11DAA, "X"), + (0x11EE0, "V"), + (0x11EF9, "X"), + (0x11F00, "V"), + (0x11F11, "X"), + (0x11F12, "V"), + (0x11F3B, "X"), + (0x11F3E, "V"), + (0x11F5B, "X"), + ] + + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11FB0, "V"), + (0x11FB1, "X"), + (0x11FC0, "V"), + (0x11FF2, "X"), + (0x11FFF, "V"), + (0x1239A, "X"), + (0x12400, "V"), + (0x1246F, "X"), + (0x12470, "V"), + (0x12475, "X"), + (0x12480, "V"), + (0x12544, "X"), + (0x12F90, "V"), + (0x12FF3, "X"), + (0x13000, "V"), + (0x13430, "X"), + (0x13440, "V"), + (0x13456, "X"), + (0x13460, "V"), + (0x143FB, "X"), + (0x14400, "V"), + (0x14647, "X"), + (0x16100, "V"), + (0x1613A, "X"), + (0x16800, "V"), + (0x16A39, "X"), + (0x16A40, "V"), + (0x16A5F, "X"), + (0x16A60, "V"), + (0x16A6A, "X"), + (0x16A6E, "V"), + (0x16ABF, "X"), + (0x16AC0, "V"), + (0x16ACA, "X"), + (0x16AD0, "V"), + (0x16AEE, "X"), + (0x16AF0, "V"), + (0x16AF6, "X"), + (0x16B00, "V"), + (0x16B46, "X"), + (0x16B50, "V"), + (0x16B5A, "X"), + (0x16B5B, "V"), + (0x16B62, "X"), + (0x16B63, "V"), + (0x16B78, "X"), + (0x16B7D, "V"), + (0x16B90, "X"), + (0x16D40, "V"), + (0x16D7A, "X"), + (0x16E40, "M", "𖹠"), + (0x16E41, "M", "𖹡"), + (0x16E42, "M", "𖹢"), + (0x16E43, "M", "𖹣"), + (0x16E44, "M", "𖹤"), + (0x16E45, "M", "𖹥"), + (0x16E46, "M", "𖹦"), + (0x16E47, "M", "𖹧"), + (0x16E48, "M", "𖹨"), + (0x16E49, "M", "𖹩"), + (0x16E4A, "M", "𖹪"), + (0x16E4B, "M", "𖹫"), + (0x16E4C, "M", "𖹬"), + (0x16E4D, "M", "𖹭"), + (0x16E4E, "M", "𖹮"), + (0x16E4F, "M", "𖹯"), + (0x16E50, "M", "𖹰"), + (0x16E51, "M", "𖹱"), + (0x16E52, "M", "𖹲"), + (0x16E53, "M", "𖹳"), + (0x16E54, "M", "𖹴"), + (0x16E55, "M", "𖹵"), + (0x16E56, "M", "𖹶"), + (0x16E57, "M", "𖹷"), + (0x16E58, "M", "𖹸"), + (0x16E59, "M", "𖹹"), + (0x16E5A, "M", "𖹺"), + (0x16E5B, "M", "𖹻"), + (0x16E5C, "M", "𖹼"), + (0x16E5D, "M", "𖹽"), + (0x16E5E, "M", "𖹾"), + (0x16E5F, "M", "𖹿"), + (0x16E60, "V"), + (0x16E9B, "X"), + (0x16F00, "V"), + (0x16F4B, "X"), + (0x16F4F, "V"), + (0x16F88, "X"), + (0x16F8F, "V"), + (0x16FA0, "X"), + (0x16FE0, "V"), + (0x16FE5, "X"), + (0x16FF0, "V"), + (0x16FF2, "X"), + (0x17000, "V"), + (0x187F8, "X"), + (0x18800, "V"), + (0x18CD6, "X"), + (0x18CFF, "V"), + (0x18D09, "X"), + ] + + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFF0, "V"), + (0x1AFF4, "X"), + (0x1AFF5, "V"), + (0x1AFFC, "X"), + (0x1AFFD, "V"), + (0x1AFFF, "X"), + (0x1B000, "V"), + (0x1B123, "X"), + (0x1B132, "V"), + (0x1B133, "X"), + (0x1B150, "V"), + (0x1B153, "X"), + (0x1B155, "V"), + (0x1B156, "X"), + (0x1B164, "V"), + (0x1B168, "X"), + (0x1B170, "V"), + (0x1B2FC, "X"), + (0x1BC00, "V"), + (0x1BC6B, "X"), + (0x1BC70, "V"), + (0x1BC7D, "X"), + (0x1BC80, "V"), + (0x1BC89, "X"), + (0x1BC90, "V"), + (0x1BC9A, "X"), + (0x1BC9C, "V"), + (0x1BCA0, "I"), + (0x1BCA4, "X"), + (0x1CC00, "V"), + (0x1CCD6, "M", "a"), + (0x1CCD7, "M", "b"), + (0x1CCD8, "M", "c"), + (0x1CCD9, "M", "d"), + (0x1CCDA, "M", "e"), + (0x1CCDB, "M", "f"), + (0x1CCDC, "M", "g"), + (0x1CCDD, "M", "h"), + (0x1CCDE, "M", "i"), + (0x1CCDF, "M", "j"), + (0x1CCE0, "M", "k"), + (0x1CCE1, "M", "l"), + (0x1CCE2, "M", "m"), + (0x1CCE3, "M", "n"), + (0x1CCE4, "M", "o"), + (0x1CCE5, "M", "p"), + (0x1CCE6, "M", "q"), + (0x1CCE7, "M", "r"), + (0x1CCE8, "M", "s"), + (0x1CCE9, "M", "t"), + (0x1CCEA, "M", "u"), + (0x1CCEB, "M", "v"), + (0x1CCEC, "M", "w"), + (0x1CCED, "M", "x"), + (0x1CCEE, "M", "y"), + (0x1CCEF, "M", "z"), + (0x1CCF0, "M", "0"), + (0x1CCF1, "M", "1"), + (0x1CCF2, "M", "2"), + (0x1CCF3, "M", "3"), + (0x1CCF4, "M", "4"), + (0x1CCF5, "M", "5"), + (0x1CCF6, "M", "6"), + (0x1CCF7, "M", "7"), + (0x1CCF8, "M", "8"), + (0x1CCF9, "M", "9"), + (0x1CCFA, "X"), + (0x1CD00, "V"), + (0x1CEB4, "X"), + (0x1CF00, "V"), + (0x1CF2E, "X"), + (0x1CF30, "V"), + (0x1CF47, "X"), + (0x1CF50, "V"), + (0x1CFC4, "X"), + (0x1D000, "V"), + (0x1D0F6, "X"), + (0x1D100, "V"), + (0x1D127, "X"), + (0x1D129, "V"), + (0x1D15E, "M", "𝅗𝅥"), + (0x1D15F, "M", "𝅘𝅥"), + (0x1D160, "M", "𝅘𝅥𝅮"), + (0x1D161, "M", "𝅘𝅥𝅯"), + (0x1D162, "M", "𝅘𝅥𝅰"), + (0x1D163, "M", "𝅘𝅥𝅱"), + (0x1D164, "M", "𝅘𝅥𝅲"), + (0x1D165, "V"), + (0x1D173, "I"), + (0x1D17B, "V"), + (0x1D1BB, "M", "𝆹𝅥"), + (0x1D1BC, "M", "𝆺𝅥"), + (0x1D1BD, "M", "𝆹𝅥𝅮"), + (0x1D1BE, "M", "𝆺𝅥𝅮"), + (0x1D1BF, "M", "𝆹𝅥𝅯"), + (0x1D1C0, "M", "𝆺𝅥𝅯"), + (0x1D1C1, "V"), + (0x1D1EB, "X"), + (0x1D200, "V"), + (0x1D246, "X"), + ] + + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D2C0, "V"), + (0x1D2D4, "X"), + (0x1D2E0, "V"), + (0x1D2F4, "X"), + (0x1D300, "V"), + (0x1D357, "X"), + (0x1D360, "V"), + (0x1D379, "X"), + (0x1D400, "M", "a"), + (0x1D401, "M", "b"), + (0x1D402, "M", "c"), + (0x1D403, "M", "d"), + (0x1D404, "M", "e"), + (0x1D405, "M", "f"), + (0x1D406, "M", "g"), + (0x1D407, "M", "h"), + (0x1D408, "M", "i"), + (0x1D409, "M", "j"), + (0x1D40A, "M", "k"), + (0x1D40B, "M", "l"), + (0x1D40C, "M", "m"), + (0x1D40D, "M", "n"), + (0x1D40E, "M", "o"), + (0x1D40F, "M", "p"), + (0x1D410, "M", "q"), + (0x1D411, "M", "r"), + (0x1D412, "M", "s"), + (0x1D413, "M", "t"), + (0x1D414, "M", "u"), + (0x1D415, "M", "v"), + (0x1D416, "M", "w"), + (0x1D417, "M", "x"), + (0x1D418, "M", "y"), + (0x1D419, "M", "z"), + (0x1D41A, "M", "a"), + (0x1D41B, "M", "b"), + (0x1D41C, "M", "c"), + (0x1D41D, "M", "d"), + (0x1D41E, "M", "e"), + (0x1D41F, "M", "f"), + (0x1D420, "M", "g"), + (0x1D421, "M", "h"), + (0x1D422, "M", "i"), + (0x1D423, "M", "j"), + (0x1D424, "M", "k"), + (0x1D425, "M", "l"), + (0x1D426, "M", "m"), + (0x1D427, "M", "n"), + (0x1D428, "M", "o"), + (0x1D429, "M", "p"), + (0x1D42A, "M", "q"), + (0x1D42B, "M", "r"), + (0x1D42C, "M", "s"), + (0x1D42D, "M", "t"), + (0x1D42E, "M", "u"), + (0x1D42F, "M", "v"), + (0x1D430, "M", "w"), + (0x1D431, "M", "x"), + (0x1D432, "M", "y"), + (0x1D433, "M", "z"), + (0x1D434, "M", "a"), + (0x1D435, "M", "b"), + (0x1D436, "M", "c"), + (0x1D437, "M", "d"), + (0x1D438, "M", "e"), + (0x1D439, "M", "f"), + (0x1D43A, "M", "g"), + (0x1D43B, "M", "h"), + (0x1D43C, "M", "i"), + (0x1D43D, "M", "j"), + (0x1D43E, "M", "k"), + (0x1D43F, "M", "l"), + (0x1D440, "M", "m"), + (0x1D441, "M", "n"), + (0x1D442, "M", "o"), + (0x1D443, "M", "p"), + (0x1D444, "M", "q"), + (0x1D445, "M", "r"), + (0x1D446, "M", "s"), + (0x1D447, "M", "t"), + (0x1D448, "M", "u"), + (0x1D449, "M", "v"), + (0x1D44A, "M", "w"), + (0x1D44B, "M", "x"), + (0x1D44C, "M", "y"), + (0x1D44D, "M", "z"), + (0x1D44E, "M", "a"), + (0x1D44F, "M", "b"), + (0x1D450, "M", "c"), + (0x1D451, "M", "d"), + (0x1D452, "M", "e"), + (0x1D453, "M", "f"), + (0x1D454, "M", "g"), + (0x1D455, "X"), + (0x1D456, "M", "i"), + (0x1D457, "M", "j"), + (0x1D458, "M", "k"), + (0x1D459, "M", "l"), + (0x1D45A, "M", "m"), + (0x1D45B, "M", "n"), + ] + + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D45C, "M", "o"), + (0x1D45D, "M", "p"), + (0x1D45E, "M", "q"), + (0x1D45F, "M", "r"), + (0x1D460, "M", "s"), + (0x1D461, "M", "t"), + (0x1D462, "M", "u"), + (0x1D463, "M", "v"), + (0x1D464, "M", "w"), + (0x1D465, "M", "x"), + (0x1D466, "M", "y"), + (0x1D467, "M", "z"), + (0x1D468, "M", "a"), + (0x1D469, "M", "b"), + (0x1D46A, "M", "c"), + (0x1D46B, "M", "d"), + (0x1D46C, "M", "e"), + (0x1D46D, "M", "f"), + (0x1D46E, "M", "g"), + (0x1D46F, "M", "h"), + (0x1D470, "M", "i"), + (0x1D471, "M", "j"), + (0x1D472, "M", "k"), + (0x1D473, "M", "l"), + (0x1D474, "M", "m"), + (0x1D475, "M", "n"), + (0x1D476, "M", "o"), + (0x1D477, "M", "p"), + (0x1D478, "M", "q"), + (0x1D479, "M", "r"), + (0x1D47A, "M", "s"), + (0x1D47B, "M", "t"), + (0x1D47C, "M", "u"), + (0x1D47D, "M", "v"), + (0x1D47E, "M", "w"), + (0x1D47F, "M", "x"), + (0x1D480, "M", "y"), + (0x1D481, "M", "z"), + (0x1D482, "M", "a"), + (0x1D483, "M", "b"), + (0x1D484, "M", "c"), + (0x1D485, "M", "d"), + (0x1D486, "M", "e"), + (0x1D487, "M", "f"), + (0x1D488, "M", "g"), + (0x1D489, "M", "h"), + (0x1D48A, "M", "i"), + (0x1D48B, "M", "j"), + (0x1D48C, "M", "k"), + (0x1D48D, "M", "l"), + (0x1D48E, "M", "m"), + (0x1D48F, "M", "n"), + (0x1D490, "M", "o"), + (0x1D491, "M", "p"), + (0x1D492, "M", "q"), + (0x1D493, "M", "r"), + (0x1D494, "M", "s"), + (0x1D495, "M", "t"), + (0x1D496, "M", "u"), + (0x1D497, "M", "v"), + (0x1D498, "M", "w"), + (0x1D499, "M", "x"), + (0x1D49A, "M", "y"), + (0x1D49B, "M", "z"), + (0x1D49C, "M", "a"), + (0x1D49D, "X"), + (0x1D49E, "M", "c"), + (0x1D49F, "M", "d"), + (0x1D4A0, "X"), + (0x1D4A2, "M", "g"), + (0x1D4A3, "X"), + (0x1D4A5, "M", "j"), + (0x1D4A6, "M", "k"), + (0x1D4A7, "X"), + (0x1D4A9, "M", "n"), + (0x1D4AA, "M", "o"), + (0x1D4AB, "M", "p"), + (0x1D4AC, "M", "q"), + (0x1D4AD, "X"), + (0x1D4AE, "M", "s"), + (0x1D4AF, "M", "t"), + (0x1D4B0, "M", "u"), + (0x1D4B1, "M", "v"), + (0x1D4B2, "M", "w"), + (0x1D4B3, "M", "x"), + (0x1D4B4, "M", "y"), + (0x1D4B5, "M", "z"), + (0x1D4B6, "M", "a"), + (0x1D4B7, "M", "b"), + (0x1D4B8, "M", "c"), + (0x1D4B9, "M", "d"), + (0x1D4BA, "X"), + (0x1D4BB, "M", "f"), + (0x1D4BC, "X"), + (0x1D4BD, "M", "h"), + (0x1D4BE, "M", "i"), + (0x1D4BF, "M", "j"), + (0x1D4C0, "M", "k"), + (0x1D4C1, "M", "l"), + (0x1D4C2, "M", "m"), + ] + + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4C3, "M", "n"), + (0x1D4C4, "X"), + (0x1D4C5, "M", "p"), + (0x1D4C6, "M", "q"), + (0x1D4C7, "M", "r"), + (0x1D4C8, "M", "s"), + (0x1D4C9, "M", "t"), + (0x1D4CA, "M", "u"), + (0x1D4CB, "M", "v"), + (0x1D4CC, "M", "w"), + (0x1D4CD, "M", "x"), + (0x1D4CE, "M", "y"), + (0x1D4CF, "M", "z"), + (0x1D4D0, "M", "a"), + (0x1D4D1, "M", "b"), + (0x1D4D2, "M", "c"), + (0x1D4D3, "M", "d"), + (0x1D4D4, "M", "e"), + (0x1D4D5, "M", "f"), + (0x1D4D6, "M", "g"), + (0x1D4D7, "M", "h"), + (0x1D4D8, "M", "i"), + (0x1D4D9, "M", "j"), + (0x1D4DA, "M", "k"), + (0x1D4DB, "M", "l"), + (0x1D4DC, "M", "m"), + (0x1D4DD, "M", "n"), + (0x1D4DE, "M", "o"), + (0x1D4DF, "M", "p"), + (0x1D4E0, "M", "q"), + (0x1D4E1, "M", "r"), + (0x1D4E2, "M", "s"), + (0x1D4E3, "M", "t"), + (0x1D4E4, "M", "u"), + (0x1D4E5, "M", "v"), + (0x1D4E6, "M", "w"), + (0x1D4E7, "M", "x"), + (0x1D4E8, "M", "y"), + (0x1D4E9, "M", "z"), + (0x1D4EA, "M", "a"), + (0x1D4EB, "M", "b"), + (0x1D4EC, "M", "c"), + (0x1D4ED, "M", "d"), + (0x1D4EE, "M", "e"), + (0x1D4EF, "M", "f"), + (0x1D4F0, "M", "g"), + (0x1D4F1, "M", "h"), + (0x1D4F2, "M", "i"), + (0x1D4F3, "M", "j"), + (0x1D4F4, "M", "k"), + (0x1D4F5, "M", "l"), + (0x1D4F6, "M", "m"), + (0x1D4F7, "M", "n"), + (0x1D4F8, "M", "o"), + (0x1D4F9, "M", "p"), + (0x1D4FA, "M", "q"), + (0x1D4FB, "M", "r"), + (0x1D4FC, "M", "s"), + (0x1D4FD, "M", "t"), + (0x1D4FE, "M", "u"), + (0x1D4FF, "M", "v"), + (0x1D500, "M", "w"), + (0x1D501, "M", "x"), + (0x1D502, "M", "y"), + (0x1D503, "M", "z"), + (0x1D504, "M", "a"), + (0x1D505, "M", "b"), + (0x1D506, "X"), + (0x1D507, "M", "d"), + (0x1D508, "M", "e"), + (0x1D509, "M", "f"), + (0x1D50A, "M", "g"), + (0x1D50B, "X"), + (0x1D50D, "M", "j"), + (0x1D50E, "M", "k"), + (0x1D50F, "M", "l"), + (0x1D510, "M", "m"), + (0x1D511, "M", "n"), + (0x1D512, "M", "o"), + (0x1D513, "M", "p"), + (0x1D514, "M", "q"), + (0x1D515, "X"), + (0x1D516, "M", "s"), + (0x1D517, "M", "t"), + (0x1D518, "M", "u"), + (0x1D519, "M", "v"), + (0x1D51A, "M", "w"), + (0x1D51B, "M", "x"), + (0x1D51C, "M", "y"), + (0x1D51D, "X"), + (0x1D51E, "M", "a"), + (0x1D51F, "M", "b"), + (0x1D520, "M", "c"), + (0x1D521, "M", "d"), + (0x1D522, "M", "e"), + (0x1D523, "M", "f"), + (0x1D524, "M", "g"), + (0x1D525, "M", "h"), + (0x1D526, "M", "i"), + (0x1D527, "M", "j"), + ] + + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D528, "M", "k"), + (0x1D529, "M", "l"), + (0x1D52A, "M", "m"), + (0x1D52B, "M", "n"), + (0x1D52C, "M", "o"), + (0x1D52D, "M", "p"), + (0x1D52E, "M", "q"), + (0x1D52F, "M", "r"), + (0x1D530, "M", "s"), + (0x1D531, "M", "t"), + (0x1D532, "M", "u"), + (0x1D533, "M", "v"), + (0x1D534, "M", "w"), + (0x1D535, "M", "x"), + (0x1D536, "M", "y"), + (0x1D537, "M", "z"), + (0x1D538, "M", "a"), + (0x1D539, "M", "b"), + (0x1D53A, "X"), + (0x1D53B, "M", "d"), + (0x1D53C, "M", "e"), + (0x1D53D, "M", "f"), + (0x1D53E, "M", "g"), + (0x1D53F, "X"), + (0x1D540, "M", "i"), + (0x1D541, "M", "j"), + (0x1D542, "M", "k"), + (0x1D543, "M", "l"), + (0x1D544, "M", "m"), + (0x1D545, "X"), + (0x1D546, "M", "o"), + (0x1D547, "X"), + (0x1D54A, "M", "s"), + (0x1D54B, "M", "t"), + (0x1D54C, "M", "u"), + (0x1D54D, "M", "v"), + (0x1D54E, "M", "w"), + (0x1D54F, "M", "x"), + (0x1D550, "M", "y"), + (0x1D551, "X"), + (0x1D552, "M", "a"), + (0x1D553, "M", "b"), + (0x1D554, "M", "c"), + (0x1D555, "M", "d"), + (0x1D556, "M", "e"), + (0x1D557, "M", "f"), + (0x1D558, "M", "g"), + (0x1D559, "M", "h"), + (0x1D55A, "M", "i"), + (0x1D55B, "M", "j"), + (0x1D55C, "M", "k"), + (0x1D55D, "M", "l"), + (0x1D55E, "M", "m"), + (0x1D55F, "M", "n"), + (0x1D560, "M", "o"), + (0x1D561, "M", "p"), + (0x1D562, "M", "q"), + (0x1D563, "M", "r"), + (0x1D564, "M", "s"), + (0x1D565, "M", "t"), + (0x1D566, "M", "u"), + (0x1D567, "M", "v"), + (0x1D568, "M", "w"), + (0x1D569, "M", "x"), + (0x1D56A, "M", "y"), + (0x1D56B, "M", "z"), + (0x1D56C, "M", "a"), + (0x1D56D, "M", "b"), + (0x1D56E, "M", "c"), + (0x1D56F, "M", "d"), + (0x1D570, "M", "e"), + (0x1D571, "M", "f"), + (0x1D572, "M", "g"), + (0x1D573, "M", "h"), + (0x1D574, "M", "i"), + (0x1D575, "M", "j"), + (0x1D576, "M", "k"), + (0x1D577, "M", "l"), + (0x1D578, "M", "m"), + (0x1D579, "M", "n"), + (0x1D57A, "M", "o"), + (0x1D57B, "M", "p"), + (0x1D57C, "M", "q"), + (0x1D57D, "M", "r"), + (0x1D57E, "M", "s"), + (0x1D57F, "M", "t"), + (0x1D580, "M", "u"), + (0x1D581, "M", "v"), + (0x1D582, "M", "w"), + (0x1D583, "M", "x"), + (0x1D584, "M", "y"), + (0x1D585, "M", "z"), + (0x1D586, "M", "a"), + (0x1D587, "M", "b"), + (0x1D588, "M", "c"), + (0x1D589, "M", "d"), + (0x1D58A, "M", "e"), + (0x1D58B, "M", "f"), + (0x1D58C, "M", "g"), + (0x1D58D, "M", "h"), + ] + + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D58E, "M", "i"), + (0x1D58F, "M", "j"), + (0x1D590, "M", "k"), + (0x1D591, "M", "l"), + (0x1D592, "M", "m"), + (0x1D593, "M", "n"), + (0x1D594, "M", "o"), + (0x1D595, "M", "p"), + (0x1D596, "M", "q"), + (0x1D597, "M", "r"), + (0x1D598, "M", "s"), + (0x1D599, "M", "t"), + (0x1D59A, "M", "u"), + (0x1D59B, "M", "v"), + (0x1D59C, "M", "w"), + (0x1D59D, "M", "x"), + (0x1D59E, "M", "y"), + (0x1D59F, "M", "z"), + (0x1D5A0, "M", "a"), + (0x1D5A1, "M", "b"), + (0x1D5A2, "M", "c"), + (0x1D5A3, "M", "d"), + (0x1D5A4, "M", "e"), + (0x1D5A5, "M", "f"), + (0x1D5A6, "M", "g"), + (0x1D5A7, "M", "h"), + (0x1D5A8, "M", "i"), + (0x1D5A9, "M", "j"), + (0x1D5AA, "M", "k"), + (0x1D5AB, "M", "l"), + (0x1D5AC, "M", "m"), + (0x1D5AD, "M", "n"), + (0x1D5AE, "M", "o"), + (0x1D5AF, "M", "p"), + (0x1D5B0, "M", "q"), + (0x1D5B1, "M", "r"), + (0x1D5B2, "M", "s"), + (0x1D5B3, "M", "t"), + (0x1D5B4, "M", "u"), + (0x1D5B5, "M", "v"), + (0x1D5B6, "M", "w"), + (0x1D5B7, "M", "x"), + (0x1D5B8, "M", "y"), + (0x1D5B9, "M", "z"), + (0x1D5BA, "M", "a"), + (0x1D5BB, "M", "b"), + (0x1D5BC, "M", "c"), + (0x1D5BD, "M", "d"), + (0x1D5BE, "M", "e"), + (0x1D5BF, "M", "f"), + (0x1D5C0, "M", "g"), + (0x1D5C1, "M", "h"), + (0x1D5C2, "M", "i"), + (0x1D5C3, "M", "j"), + (0x1D5C4, "M", "k"), + (0x1D5C5, "M", "l"), + (0x1D5C6, "M", "m"), + (0x1D5C7, "M", "n"), + (0x1D5C8, "M", "o"), + (0x1D5C9, "M", "p"), + (0x1D5CA, "M", "q"), + (0x1D5CB, "M", "r"), + (0x1D5CC, "M", "s"), + (0x1D5CD, "M", "t"), + (0x1D5CE, "M", "u"), + (0x1D5CF, "M", "v"), + (0x1D5D0, "M", "w"), + (0x1D5D1, "M", "x"), + (0x1D5D2, "M", "y"), + (0x1D5D3, "M", "z"), + (0x1D5D4, "M", "a"), + (0x1D5D5, "M", "b"), + (0x1D5D6, "M", "c"), + (0x1D5D7, "M", "d"), + (0x1D5D8, "M", "e"), + (0x1D5D9, "M", "f"), + (0x1D5DA, "M", "g"), + (0x1D5DB, "M", "h"), + (0x1D5DC, "M", "i"), + (0x1D5DD, "M", "j"), + (0x1D5DE, "M", "k"), + (0x1D5DF, "M", "l"), + (0x1D5E0, "M", "m"), + (0x1D5E1, "M", "n"), + (0x1D5E2, "M", "o"), + (0x1D5E3, "M", "p"), + (0x1D5E4, "M", "q"), + (0x1D5E5, "M", "r"), + (0x1D5E6, "M", "s"), + (0x1D5E7, "M", "t"), + (0x1D5E8, "M", "u"), + (0x1D5E9, "M", "v"), + (0x1D5EA, "M", "w"), + (0x1D5EB, "M", "x"), + (0x1D5EC, "M", "y"), + (0x1D5ED, "M", "z"), + (0x1D5EE, "M", "a"), + (0x1D5EF, "M", "b"), + (0x1D5F0, "M", "c"), + (0x1D5F1, "M", "d"), + ] + + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5F2, "M", "e"), + (0x1D5F3, "M", "f"), + (0x1D5F4, "M", "g"), + (0x1D5F5, "M", "h"), + (0x1D5F6, "M", "i"), + (0x1D5F7, "M", "j"), + (0x1D5F8, "M", "k"), + (0x1D5F9, "M", "l"), + (0x1D5FA, "M", "m"), + (0x1D5FB, "M", "n"), + (0x1D5FC, "M", "o"), + (0x1D5FD, "M", "p"), + (0x1D5FE, "M", "q"), + (0x1D5FF, "M", "r"), + (0x1D600, "M", "s"), + (0x1D601, "M", "t"), + (0x1D602, "M", "u"), + (0x1D603, "M", "v"), + (0x1D604, "M", "w"), + (0x1D605, "M", "x"), + (0x1D606, "M", "y"), + (0x1D607, "M", "z"), + (0x1D608, "M", "a"), + (0x1D609, "M", "b"), + (0x1D60A, "M", "c"), + (0x1D60B, "M", "d"), + (0x1D60C, "M", "e"), + (0x1D60D, "M", "f"), + (0x1D60E, "M", "g"), + (0x1D60F, "M", "h"), + (0x1D610, "M", "i"), + (0x1D611, "M", "j"), + (0x1D612, "M", "k"), + (0x1D613, "M", "l"), + (0x1D614, "M", "m"), + (0x1D615, "M", "n"), + (0x1D616, "M", "o"), + (0x1D617, "M", "p"), + (0x1D618, "M", "q"), + (0x1D619, "M", "r"), + (0x1D61A, "M", "s"), + (0x1D61B, "M", "t"), + (0x1D61C, "M", "u"), + (0x1D61D, "M", "v"), + (0x1D61E, "M", "w"), + (0x1D61F, "M", "x"), + (0x1D620, "M", "y"), + (0x1D621, "M", "z"), + (0x1D622, "M", "a"), + (0x1D623, "M", "b"), + (0x1D624, "M", "c"), + (0x1D625, "M", "d"), + (0x1D626, "M", "e"), + (0x1D627, "M", "f"), + (0x1D628, "M", "g"), + (0x1D629, "M", "h"), + (0x1D62A, "M", "i"), + (0x1D62B, "M", "j"), + (0x1D62C, "M", "k"), + (0x1D62D, "M", "l"), + (0x1D62E, "M", "m"), + (0x1D62F, "M", "n"), + (0x1D630, "M", "o"), + (0x1D631, "M", "p"), + (0x1D632, "M", "q"), + (0x1D633, "M", "r"), + (0x1D634, "M", "s"), + (0x1D635, "M", "t"), + (0x1D636, "M", "u"), + (0x1D637, "M", "v"), + (0x1D638, "M", "w"), + (0x1D639, "M", "x"), + (0x1D63A, "M", "y"), + (0x1D63B, "M", "z"), + (0x1D63C, "M", "a"), + (0x1D63D, "M", "b"), + (0x1D63E, "M", "c"), + (0x1D63F, "M", "d"), + (0x1D640, "M", "e"), + (0x1D641, "M", "f"), + (0x1D642, "M", "g"), + (0x1D643, "M", "h"), + (0x1D644, "M", "i"), + (0x1D645, "M", "j"), + (0x1D646, "M", "k"), + (0x1D647, "M", "l"), + (0x1D648, "M", "m"), + (0x1D649, "M", "n"), + (0x1D64A, "M", "o"), + (0x1D64B, "M", "p"), + (0x1D64C, "M", "q"), + (0x1D64D, "M", "r"), + (0x1D64E, "M", "s"), + (0x1D64F, "M", "t"), + (0x1D650, "M", "u"), + (0x1D651, "M", "v"), + (0x1D652, "M", "w"), + (0x1D653, "M", "x"), + (0x1D654, "M", "y"), + (0x1D655, "M", "z"), + ] + + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D656, "M", "a"), + (0x1D657, "M", "b"), + (0x1D658, "M", "c"), + (0x1D659, "M", "d"), + (0x1D65A, "M", "e"), + (0x1D65B, "M", "f"), + (0x1D65C, "M", "g"), + (0x1D65D, "M", "h"), + (0x1D65E, "M", "i"), + (0x1D65F, "M", "j"), + (0x1D660, "M", "k"), + (0x1D661, "M", "l"), + (0x1D662, "M", "m"), + (0x1D663, "M", "n"), + (0x1D664, "M", "o"), + (0x1D665, "M", "p"), + (0x1D666, "M", "q"), + (0x1D667, "M", "r"), + (0x1D668, "M", "s"), + (0x1D669, "M", "t"), + (0x1D66A, "M", "u"), + (0x1D66B, "M", "v"), + (0x1D66C, "M", "w"), + (0x1D66D, "M", "x"), + (0x1D66E, "M", "y"), + (0x1D66F, "M", "z"), + (0x1D670, "M", "a"), + (0x1D671, "M", "b"), + (0x1D672, "M", "c"), + (0x1D673, "M", "d"), + (0x1D674, "M", "e"), + (0x1D675, "M", "f"), + (0x1D676, "M", "g"), + (0x1D677, "M", "h"), + (0x1D678, "M", "i"), + (0x1D679, "M", "j"), + (0x1D67A, "M", "k"), + (0x1D67B, "M", "l"), + (0x1D67C, "M", "m"), + (0x1D67D, "M", "n"), + (0x1D67E, "M", "o"), + (0x1D67F, "M", "p"), + (0x1D680, "M", "q"), + (0x1D681, "M", "r"), + (0x1D682, "M", "s"), + (0x1D683, "M", "t"), + (0x1D684, "M", "u"), + (0x1D685, "M", "v"), + (0x1D686, "M", "w"), + (0x1D687, "M", "x"), + (0x1D688, "M", "y"), + (0x1D689, "M", "z"), + (0x1D68A, "M", "a"), + (0x1D68B, "M", "b"), + (0x1D68C, "M", "c"), + (0x1D68D, "M", "d"), + (0x1D68E, "M", "e"), + (0x1D68F, "M", "f"), + (0x1D690, "M", "g"), + (0x1D691, "M", "h"), + (0x1D692, "M", "i"), + (0x1D693, "M", "j"), + (0x1D694, "M", "k"), + (0x1D695, "M", "l"), + (0x1D696, "M", "m"), + (0x1D697, "M", "n"), + (0x1D698, "M", "o"), + (0x1D699, "M", "p"), + (0x1D69A, "M", "q"), + (0x1D69B, "M", "r"), + (0x1D69C, "M", "s"), + (0x1D69D, "M", "t"), + (0x1D69E, "M", "u"), + (0x1D69F, "M", "v"), + (0x1D6A0, "M", "w"), + (0x1D6A1, "M", "x"), + (0x1D6A2, "M", "y"), + (0x1D6A3, "M", "z"), + (0x1D6A4, "M", "ı"), + (0x1D6A5, "M", "ȷ"), + (0x1D6A6, "X"), + (0x1D6A8, "M", "α"), + (0x1D6A9, "M", "β"), + (0x1D6AA, "M", "γ"), + (0x1D6AB, "M", "δ"), + (0x1D6AC, "M", "ε"), + (0x1D6AD, "M", "ζ"), + (0x1D6AE, "M", "η"), + (0x1D6AF, "M", "θ"), + (0x1D6B0, "M", "ι"), + (0x1D6B1, "M", "κ"), + (0x1D6B2, "M", "λ"), + (0x1D6B3, "M", "μ"), + (0x1D6B4, "M", "ν"), + (0x1D6B5, "M", "ξ"), + (0x1D6B6, "M", "ο"), + (0x1D6B7, "M", "π"), + (0x1D6B8, "M", "ρ"), + (0x1D6B9, "M", "θ"), + (0x1D6BA, "M", "σ"), + ] + + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6BB, "M", "τ"), + (0x1D6BC, "M", "υ"), + (0x1D6BD, "M", "φ"), + (0x1D6BE, "M", "χ"), + (0x1D6BF, "M", "ψ"), + (0x1D6C0, "M", "ω"), + (0x1D6C1, "M", "∇"), + (0x1D6C2, "M", "α"), + (0x1D6C3, "M", "β"), + (0x1D6C4, "M", "γ"), + (0x1D6C5, "M", "δ"), + (0x1D6C6, "M", "ε"), + (0x1D6C7, "M", "ζ"), + (0x1D6C8, "M", "η"), + (0x1D6C9, "M", "θ"), + (0x1D6CA, "M", "ι"), + (0x1D6CB, "M", "κ"), + (0x1D6CC, "M", "λ"), + (0x1D6CD, "M", "μ"), + (0x1D6CE, "M", "ν"), + (0x1D6CF, "M", "ξ"), + (0x1D6D0, "M", "ο"), + (0x1D6D1, "M", "π"), + (0x1D6D2, "M", "ρ"), + (0x1D6D3, "M", "σ"), + (0x1D6D5, "M", "τ"), + (0x1D6D6, "M", "υ"), + (0x1D6D7, "M", "φ"), + (0x1D6D8, "M", "χ"), + (0x1D6D9, "M", "ψ"), + (0x1D6DA, "M", "ω"), + (0x1D6DB, "M", "∂"), + (0x1D6DC, "M", "ε"), + (0x1D6DD, "M", "θ"), + (0x1D6DE, "M", "κ"), + (0x1D6DF, "M", "φ"), + (0x1D6E0, "M", "ρ"), + (0x1D6E1, "M", "π"), + (0x1D6E2, "M", "α"), + (0x1D6E3, "M", "β"), + (0x1D6E4, "M", "γ"), + (0x1D6E5, "M", "δ"), + (0x1D6E6, "M", "ε"), + (0x1D6E7, "M", "ζ"), + (0x1D6E8, "M", "η"), + (0x1D6E9, "M", "θ"), + (0x1D6EA, "M", "ι"), + (0x1D6EB, "M", "κ"), + (0x1D6EC, "M", "λ"), + (0x1D6ED, "M", "μ"), + (0x1D6EE, "M", "ν"), + (0x1D6EF, "M", "ξ"), + (0x1D6F0, "M", "ο"), + (0x1D6F1, "M", "π"), + (0x1D6F2, "M", "ρ"), + (0x1D6F3, "M", "θ"), + (0x1D6F4, "M", "σ"), + (0x1D6F5, "M", "τ"), + (0x1D6F6, "M", "υ"), + (0x1D6F7, "M", "φ"), + (0x1D6F8, "M", "χ"), + (0x1D6F9, "M", "ψ"), + (0x1D6FA, "M", "ω"), + (0x1D6FB, "M", "∇"), + (0x1D6FC, "M", "α"), + (0x1D6FD, "M", "β"), + (0x1D6FE, "M", "γ"), + (0x1D6FF, "M", "δ"), + (0x1D700, "M", "ε"), + (0x1D701, "M", "ζ"), + (0x1D702, "M", "η"), + (0x1D703, "M", "θ"), + (0x1D704, "M", "ι"), + (0x1D705, "M", "κ"), + (0x1D706, "M", "λ"), + (0x1D707, "M", "μ"), + (0x1D708, "M", "ν"), + (0x1D709, "M", "ξ"), + (0x1D70A, "M", "ο"), + (0x1D70B, "M", "π"), + (0x1D70C, "M", "ρ"), + (0x1D70D, "M", "σ"), + (0x1D70F, "M", "τ"), + (0x1D710, "M", "υ"), + (0x1D711, "M", "φ"), + (0x1D712, "M", "χ"), + (0x1D713, "M", "ψ"), + (0x1D714, "M", "ω"), + (0x1D715, "M", "∂"), + (0x1D716, "M", "ε"), + (0x1D717, "M", "θ"), + (0x1D718, "M", "κ"), + (0x1D719, "M", "φ"), + (0x1D71A, "M", "ρ"), + (0x1D71B, "M", "π"), + (0x1D71C, "M", "α"), + (0x1D71D, "M", "β"), + (0x1D71E, "M", "γ"), + (0x1D71F, "M", "δ"), + (0x1D720, "M", "ε"), + ] + + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D721, "M", "ζ"), + (0x1D722, "M", "η"), + (0x1D723, "M", "θ"), + (0x1D724, "M", "ι"), + (0x1D725, "M", "κ"), + (0x1D726, "M", "λ"), + (0x1D727, "M", "μ"), + (0x1D728, "M", "ν"), + (0x1D729, "M", "ξ"), + (0x1D72A, "M", "ο"), + (0x1D72B, "M", "π"), + (0x1D72C, "M", "ρ"), + (0x1D72D, "M", "θ"), + (0x1D72E, "M", "σ"), + (0x1D72F, "M", "τ"), + (0x1D730, "M", "υ"), + (0x1D731, "M", "φ"), + (0x1D732, "M", "χ"), + (0x1D733, "M", "ψ"), + (0x1D734, "M", "ω"), + (0x1D735, "M", "∇"), + (0x1D736, "M", "α"), + (0x1D737, "M", "β"), + (0x1D738, "M", "γ"), + (0x1D739, "M", "δ"), + (0x1D73A, "M", "ε"), + (0x1D73B, "M", "ζ"), + (0x1D73C, "M", "η"), + (0x1D73D, "M", "θ"), + (0x1D73E, "M", "ι"), + (0x1D73F, "M", "κ"), + (0x1D740, "M", "λ"), + (0x1D741, "M", "μ"), + (0x1D742, "M", "ν"), + (0x1D743, "M", "ξ"), + (0x1D744, "M", "ο"), + (0x1D745, "M", "π"), + (0x1D746, "M", "ρ"), + (0x1D747, "M", "σ"), + (0x1D749, "M", "τ"), + (0x1D74A, "M", "υ"), + (0x1D74B, "M", "φ"), + (0x1D74C, "M", "χ"), + (0x1D74D, "M", "ψ"), + (0x1D74E, "M", "ω"), + (0x1D74F, "M", "∂"), + (0x1D750, "M", "ε"), + (0x1D751, "M", "θ"), + (0x1D752, "M", "κ"), + (0x1D753, "M", "φ"), + (0x1D754, "M", "ρ"), + (0x1D755, "M", "π"), + (0x1D756, "M", "α"), + (0x1D757, "M", "β"), + (0x1D758, "M", "γ"), + (0x1D759, "M", "δ"), + (0x1D75A, "M", "ε"), + (0x1D75B, "M", "ζ"), + (0x1D75C, "M", "η"), + (0x1D75D, "M", "θ"), + (0x1D75E, "M", "ι"), + (0x1D75F, "M", "κ"), + (0x1D760, "M", "λ"), + (0x1D761, "M", "μ"), + (0x1D762, "M", "ν"), + (0x1D763, "M", "ξ"), + (0x1D764, "M", "ο"), + (0x1D765, "M", "π"), + (0x1D766, "M", "ρ"), + (0x1D767, "M", "θ"), + (0x1D768, "M", "σ"), + (0x1D769, "M", "τ"), + (0x1D76A, "M", "υ"), + (0x1D76B, "M", "φ"), + (0x1D76C, "M", "χ"), + (0x1D76D, "M", "ψ"), + (0x1D76E, "M", "ω"), + (0x1D76F, "M", "∇"), + (0x1D770, "M", "α"), + (0x1D771, "M", "β"), + (0x1D772, "M", "γ"), + (0x1D773, "M", "δ"), + (0x1D774, "M", "ε"), + (0x1D775, "M", "ζ"), + (0x1D776, "M", "η"), + (0x1D777, "M", "θ"), + (0x1D778, "M", "ι"), + (0x1D779, "M", "κ"), + (0x1D77A, "M", "λ"), + (0x1D77B, "M", "μ"), + (0x1D77C, "M", "ν"), + (0x1D77D, "M", "ξ"), + (0x1D77E, "M", "ο"), + (0x1D77F, "M", "π"), + (0x1D780, "M", "ρ"), + (0x1D781, "M", "σ"), + (0x1D783, "M", "τ"), + (0x1D784, "M", "υ"), + (0x1D785, "M", "φ"), + (0x1D786, "M", "χ"), + ] + + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D787, "M", "ψ"), + (0x1D788, "M", "ω"), + (0x1D789, "M", "∂"), + (0x1D78A, "M", "ε"), + (0x1D78B, "M", "θ"), + (0x1D78C, "M", "κ"), + (0x1D78D, "M", "φ"), + (0x1D78E, "M", "ρ"), + (0x1D78F, "M", "π"), + (0x1D790, "M", "α"), + (0x1D791, "M", "β"), + (0x1D792, "M", "γ"), + (0x1D793, "M", "δ"), + (0x1D794, "M", "ε"), + (0x1D795, "M", "ζ"), + (0x1D796, "M", "η"), + (0x1D797, "M", "θ"), + (0x1D798, "M", "ι"), + (0x1D799, "M", "κ"), + (0x1D79A, "M", "λ"), + (0x1D79B, "M", "μ"), + (0x1D79C, "M", "ν"), + (0x1D79D, "M", "ξ"), + (0x1D79E, "M", "ο"), + (0x1D79F, "M", "π"), + (0x1D7A0, "M", "ρ"), + (0x1D7A1, "M", "θ"), + (0x1D7A2, "M", "σ"), + (0x1D7A3, "M", "τ"), + (0x1D7A4, "M", "υ"), + (0x1D7A5, "M", "φ"), + (0x1D7A6, "M", "χ"), + (0x1D7A7, "M", "ψ"), + (0x1D7A8, "M", "ω"), + (0x1D7A9, "M", "∇"), + (0x1D7AA, "M", "α"), + (0x1D7AB, "M", "β"), + (0x1D7AC, "M", "γ"), + (0x1D7AD, "M", "δ"), + (0x1D7AE, "M", "ε"), + (0x1D7AF, "M", "ζ"), + (0x1D7B0, "M", "η"), + (0x1D7B1, "M", "θ"), + (0x1D7B2, "M", "ι"), + (0x1D7B3, "M", "κ"), + (0x1D7B4, "M", "λ"), + (0x1D7B5, "M", "μ"), + (0x1D7B6, "M", "ν"), + (0x1D7B7, "M", "ξ"), + (0x1D7B8, "M", "ο"), + (0x1D7B9, "M", "π"), + (0x1D7BA, "M", "ρ"), + (0x1D7BB, "M", "σ"), + (0x1D7BD, "M", "τ"), + (0x1D7BE, "M", "υ"), + (0x1D7BF, "M", "φ"), + (0x1D7C0, "M", "χ"), + (0x1D7C1, "M", "ψ"), + (0x1D7C2, "M", "ω"), + (0x1D7C3, "M", "∂"), + (0x1D7C4, "M", "ε"), + (0x1D7C5, "M", "θ"), + (0x1D7C6, "M", "κ"), + (0x1D7C7, "M", "φ"), + (0x1D7C8, "M", "ρ"), + (0x1D7C9, "M", "π"), + (0x1D7CA, "M", "ϝ"), + (0x1D7CC, "X"), + (0x1D7CE, "M", "0"), + (0x1D7CF, "M", "1"), + (0x1D7D0, "M", "2"), + (0x1D7D1, "M", "3"), + (0x1D7D2, "M", "4"), + (0x1D7D3, "M", "5"), + (0x1D7D4, "M", "6"), + (0x1D7D5, "M", "7"), + (0x1D7D6, "M", "8"), + (0x1D7D7, "M", "9"), + (0x1D7D8, "M", "0"), + (0x1D7D9, "M", "1"), + (0x1D7DA, "M", "2"), + (0x1D7DB, "M", "3"), + (0x1D7DC, "M", "4"), + (0x1D7DD, "M", "5"), + (0x1D7DE, "M", "6"), + (0x1D7DF, "M", "7"), + (0x1D7E0, "M", "8"), + (0x1D7E1, "M", "9"), + (0x1D7E2, "M", "0"), + (0x1D7E3, "M", "1"), + (0x1D7E4, "M", "2"), + (0x1D7E5, "M", "3"), + (0x1D7E6, "M", "4"), + (0x1D7E7, "M", "5"), + (0x1D7E8, "M", "6"), + (0x1D7E9, "M", "7"), + (0x1D7EA, "M", "8"), + (0x1D7EB, "M", "9"), + (0x1D7EC, "M", "0"), + (0x1D7ED, "M", "1"), + ] + + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7EE, "M", "2"), + (0x1D7EF, "M", "3"), + (0x1D7F0, "M", "4"), + (0x1D7F1, "M", "5"), + (0x1D7F2, "M", "6"), + (0x1D7F3, "M", "7"), + (0x1D7F4, "M", "8"), + (0x1D7F5, "M", "9"), + (0x1D7F6, "M", "0"), + (0x1D7F7, "M", "1"), + (0x1D7F8, "M", "2"), + (0x1D7F9, "M", "3"), + (0x1D7FA, "M", "4"), + (0x1D7FB, "M", "5"), + (0x1D7FC, "M", "6"), + (0x1D7FD, "M", "7"), + (0x1D7FE, "M", "8"), + (0x1D7FF, "M", "9"), + (0x1D800, "V"), + (0x1DA8C, "X"), + (0x1DA9B, "V"), + (0x1DAA0, "X"), + (0x1DAA1, "V"), + (0x1DAB0, "X"), + (0x1DF00, "V"), + (0x1DF1F, "X"), + (0x1DF25, "V"), + (0x1DF2B, "X"), + (0x1E000, "V"), + (0x1E007, "X"), + (0x1E008, "V"), + (0x1E019, "X"), + (0x1E01B, "V"), + (0x1E022, "X"), + (0x1E023, "V"), + (0x1E025, "X"), + (0x1E026, "V"), + (0x1E02B, "X"), + (0x1E030, "M", "а"), + (0x1E031, "M", "б"), + (0x1E032, "M", "в"), + (0x1E033, "M", "г"), + (0x1E034, "M", "д"), + (0x1E035, "M", "е"), + (0x1E036, "M", "ж"), + (0x1E037, "M", "з"), + (0x1E038, "M", "и"), + (0x1E039, "M", "к"), + (0x1E03A, "M", "л"), + (0x1E03B, "M", "м"), + (0x1E03C, "M", "о"), + (0x1E03D, "M", "п"), + (0x1E03E, "M", "р"), + (0x1E03F, "M", "с"), + (0x1E040, "M", "т"), + (0x1E041, "M", "у"), + (0x1E042, "M", "ф"), + (0x1E043, "M", "х"), + (0x1E044, "M", "ц"), + (0x1E045, "M", "ч"), + (0x1E046, "M", "ш"), + (0x1E047, "M", "ы"), + (0x1E048, "M", "э"), + (0x1E049, "M", "ю"), + (0x1E04A, "M", "ꚉ"), + (0x1E04B, "M", "ә"), + (0x1E04C, "M", "і"), + (0x1E04D, "M", "ј"), + (0x1E04E, "M", "ө"), + (0x1E04F, "M", "ү"), + (0x1E050, "M", "ӏ"), + (0x1E051, "M", "а"), + (0x1E052, "M", "б"), + (0x1E053, "M", "в"), + (0x1E054, "M", "г"), + (0x1E055, "M", "д"), + (0x1E056, "M", "е"), + (0x1E057, "M", "ж"), + (0x1E058, "M", "з"), + (0x1E059, "M", "и"), + (0x1E05A, "M", "к"), + (0x1E05B, "M", "л"), + (0x1E05C, "M", "о"), + (0x1E05D, "M", "п"), + (0x1E05E, "M", "с"), + (0x1E05F, "M", "у"), + (0x1E060, "M", "ф"), + (0x1E061, "M", "х"), + (0x1E062, "M", "ц"), + (0x1E063, "M", "ч"), + (0x1E064, "M", "ш"), + (0x1E065, "M", "ъ"), + (0x1E066, "M", "ы"), + (0x1E067, "M", "ґ"), + (0x1E068, "M", "і"), + (0x1E069, "M", "ѕ"), + (0x1E06A, "M", "џ"), + (0x1E06B, "M", "ҫ"), + (0x1E06C, "M", "ꙑ"), + (0x1E06D, "M", "ұ"), + ] + + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E06E, "X"), + (0x1E08F, "V"), + (0x1E090, "X"), + (0x1E100, "V"), + (0x1E12D, "X"), + (0x1E130, "V"), + (0x1E13E, "X"), + (0x1E140, "V"), + (0x1E14A, "X"), + (0x1E14E, "V"), + (0x1E150, "X"), + (0x1E290, "V"), + (0x1E2AF, "X"), + (0x1E2C0, "V"), + (0x1E2FA, "X"), + (0x1E2FF, "V"), + (0x1E300, "X"), + (0x1E4D0, "V"), + (0x1E4FA, "X"), + (0x1E5D0, "V"), + (0x1E5FB, "X"), + (0x1E5FF, "V"), + (0x1E600, "X"), + (0x1E7E0, "V"), + (0x1E7E7, "X"), + (0x1E7E8, "V"), + (0x1E7EC, "X"), + (0x1E7ED, "V"), + (0x1E7EF, "X"), + (0x1E7F0, "V"), + (0x1E7FF, "X"), + (0x1E800, "V"), + (0x1E8C5, "X"), + (0x1E8C7, "V"), + (0x1E8D7, "X"), + (0x1E900, "M", "𞤢"), + (0x1E901, "M", "𞤣"), + (0x1E902, "M", "𞤤"), + (0x1E903, "M", "𞤥"), + (0x1E904, "M", "𞤦"), + (0x1E905, "M", "𞤧"), + (0x1E906, "M", "𞤨"), + (0x1E907, "M", "𞤩"), + (0x1E908, "M", "𞤪"), + (0x1E909, "M", "𞤫"), + (0x1E90A, "M", "𞤬"), + (0x1E90B, "M", "𞤭"), + (0x1E90C, "M", "𞤮"), + (0x1E90D, "M", "𞤯"), + (0x1E90E, "M", "𞤰"), + (0x1E90F, "M", "𞤱"), + (0x1E910, "M", "𞤲"), + (0x1E911, "M", "𞤳"), + (0x1E912, "M", "𞤴"), + (0x1E913, "M", "𞤵"), + (0x1E914, "M", "𞤶"), + (0x1E915, "M", "𞤷"), + (0x1E916, "M", "𞤸"), + (0x1E917, "M", "𞤹"), + (0x1E918, "M", "𞤺"), + (0x1E919, "M", "𞤻"), + (0x1E91A, "M", "𞤼"), + (0x1E91B, "M", "𞤽"), + (0x1E91C, "M", "𞤾"), + (0x1E91D, "M", "𞤿"), + (0x1E91E, "M", "𞥀"), + (0x1E91F, "M", "𞥁"), + (0x1E920, "M", "𞥂"), + (0x1E921, "M", "𞥃"), + (0x1E922, "V"), + (0x1E94C, "X"), + (0x1E950, "V"), + (0x1E95A, "X"), + (0x1E95E, "V"), + (0x1E960, "X"), + (0x1EC71, "V"), + (0x1ECB5, "X"), + (0x1ED01, "V"), + (0x1ED3E, "X"), + (0x1EE00, "M", "ا"), + (0x1EE01, "M", "ب"), + (0x1EE02, "M", "ج"), + (0x1EE03, "M", "د"), + (0x1EE04, "X"), + (0x1EE05, "M", "و"), + (0x1EE06, "M", "ز"), + (0x1EE07, "M", "ح"), + (0x1EE08, "M", "ط"), + (0x1EE09, "M", "ي"), + (0x1EE0A, "M", "ك"), + (0x1EE0B, "M", "ل"), + (0x1EE0C, "M", "م"), + (0x1EE0D, "M", "ن"), + (0x1EE0E, "M", "س"), + (0x1EE0F, "M", "ع"), + (0x1EE10, "M", "ف"), + (0x1EE11, "M", "ص"), + (0x1EE12, "M", "ق"), + (0x1EE13, "M", "ر"), + (0x1EE14, "M", "ش"), + ] + + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE15, "M", "ت"), + (0x1EE16, "M", "ث"), + (0x1EE17, "M", "خ"), + (0x1EE18, "M", "ذ"), + (0x1EE19, "M", "ض"), + (0x1EE1A, "M", "ظ"), + (0x1EE1B, "M", "غ"), + (0x1EE1C, "M", "ٮ"), + (0x1EE1D, "M", "ں"), + (0x1EE1E, "M", "ڡ"), + (0x1EE1F, "M", "ٯ"), + (0x1EE20, "X"), + (0x1EE21, "M", "ب"), + (0x1EE22, "M", "ج"), + (0x1EE23, "X"), + (0x1EE24, "M", "ه"), + (0x1EE25, "X"), + (0x1EE27, "M", "ح"), + (0x1EE28, "X"), + (0x1EE29, "M", "ي"), + (0x1EE2A, "M", "ك"), + (0x1EE2B, "M", "ل"), + (0x1EE2C, "M", "م"), + (0x1EE2D, "M", "ن"), + (0x1EE2E, "M", "س"), + (0x1EE2F, "M", "ع"), + (0x1EE30, "M", "ف"), + (0x1EE31, "M", "ص"), + (0x1EE32, "M", "ق"), + (0x1EE33, "X"), + (0x1EE34, "M", "ش"), + (0x1EE35, "M", "ت"), + (0x1EE36, "M", "ث"), + (0x1EE37, "M", "خ"), + (0x1EE38, "X"), + (0x1EE39, "M", "ض"), + (0x1EE3A, "X"), + (0x1EE3B, "M", "غ"), + (0x1EE3C, "X"), + (0x1EE42, "M", "ج"), + (0x1EE43, "X"), + (0x1EE47, "M", "ح"), + (0x1EE48, "X"), + (0x1EE49, "M", "ي"), + (0x1EE4A, "X"), + (0x1EE4B, "M", "ل"), + (0x1EE4C, "X"), + (0x1EE4D, "M", "ن"), + (0x1EE4E, "M", "س"), + (0x1EE4F, "M", "ع"), + (0x1EE50, "X"), + (0x1EE51, "M", "ص"), + (0x1EE52, "M", "ق"), + (0x1EE53, "X"), + (0x1EE54, "M", "ش"), + (0x1EE55, "X"), + (0x1EE57, "M", "خ"), + (0x1EE58, "X"), + (0x1EE59, "M", "ض"), + (0x1EE5A, "X"), + (0x1EE5B, "M", "غ"), + (0x1EE5C, "X"), + (0x1EE5D, "M", "ں"), + (0x1EE5E, "X"), + (0x1EE5F, "M", "ٯ"), + (0x1EE60, "X"), + (0x1EE61, "M", "ب"), + (0x1EE62, "M", "ج"), + (0x1EE63, "X"), + (0x1EE64, "M", "ه"), + (0x1EE65, "X"), + (0x1EE67, "M", "ح"), + (0x1EE68, "M", "ط"), + (0x1EE69, "M", "ي"), + (0x1EE6A, "M", "ك"), + (0x1EE6B, "X"), + (0x1EE6C, "M", "م"), + (0x1EE6D, "M", "ن"), + (0x1EE6E, "M", "س"), + (0x1EE6F, "M", "ع"), + (0x1EE70, "M", "ف"), + (0x1EE71, "M", "ص"), + (0x1EE72, "M", "ق"), + (0x1EE73, "X"), + (0x1EE74, "M", "ش"), + (0x1EE75, "M", "ت"), + (0x1EE76, "M", "ث"), + (0x1EE77, "M", "خ"), + (0x1EE78, "X"), + (0x1EE79, "M", "ض"), + (0x1EE7A, "M", "ظ"), + (0x1EE7B, "M", "غ"), + (0x1EE7C, "M", "ٮ"), + (0x1EE7D, "X"), + (0x1EE7E, "M", "ڡ"), + (0x1EE7F, "X"), + (0x1EE80, "M", "ا"), + (0x1EE81, "M", "ب"), + (0x1EE82, "M", "ج"), + (0x1EE83, "M", "د"), + ] + + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE84, "M", "ه"), + (0x1EE85, "M", "و"), + (0x1EE86, "M", "ز"), + (0x1EE87, "M", "ح"), + (0x1EE88, "M", "ط"), + (0x1EE89, "M", "ي"), + (0x1EE8A, "X"), + (0x1EE8B, "M", "ل"), + (0x1EE8C, "M", "م"), + (0x1EE8D, "M", "ن"), + (0x1EE8E, "M", "س"), + (0x1EE8F, "M", "ع"), + (0x1EE90, "M", "ف"), + (0x1EE91, "M", "ص"), + (0x1EE92, "M", "ق"), + (0x1EE93, "M", "ر"), + (0x1EE94, "M", "ش"), + (0x1EE95, "M", "ت"), + (0x1EE96, "M", "ث"), + (0x1EE97, "M", "خ"), + (0x1EE98, "M", "ذ"), + (0x1EE99, "M", "ض"), + (0x1EE9A, "M", "ظ"), + (0x1EE9B, "M", "غ"), + (0x1EE9C, "X"), + (0x1EEA1, "M", "ب"), + (0x1EEA2, "M", "ج"), + (0x1EEA3, "M", "د"), + (0x1EEA4, "X"), + (0x1EEA5, "M", "و"), + (0x1EEA6, "M", "ز"), + (0x1EEA7, "M", "ح"), + (0x1EEA8, "M", "ط"), + (0x1EEA9, "M", "ي"), + (0x1EEAA, "X"), + (0x1EEAB, "M", "ل"), + (0x1EEAC, "M", "م"), + (0x1EEAD, "M", "ن"), + (0x1EEAE, "M", "س"), + (0x1EEAF, "M", "ع"), + (0x1EEB0, "M", "ف"), + (0x1EEB1, "M", "ص"), + (0x1EEB2, "M", "ق"), + (0x1EEB3, "M", "ر"), + (0x1EEB4, "M", "ش"), + (0x1EEB5, "M", "ت"), + (0x1EEB6, "M", "ث"), + (0x1EEB7, "M", "خ"), + (0x1EEB8, "M", "ذ"), + (0x1EEB9, "M", "ض"), + (0x1EEBA, "M", "ظ"), + (0x1EEBB, "M", "غ"), + (0x1EEBC, "X"), + (0x1EEF0, "V"), + (0x1EEF2, "X"), + (0x1F000, "V"), + (0x1F02C, "X"), + (0x1F030, "V"), + (0x1F094, "X"), + (0x1F0A0, "V"), + (0x1F0AF, "X"), + (0x1F0B1, "V"), + (0x1F0C0, "X"), + (0x1F0C1, "V"), + (0x1F0D0, "X"), + (0x1F0D1, "V"), + (0x1F0F6, "X"), + (0x1F101, "M", "0,"), + (0x1F102, "M", "1,"), + (0x1F103, "M", "2,"), + (0x1F104, "M", "3,"), + (0x1F105, "M", "4,"), + (0x1F106, "M", "5,"), + (0x1F107, "M", "6,"), + (0x1F108, "M", "7,"), + (0x1F109, "M", "8,"), + (0x1F10A, "M", "9,"), + (0x1F10B, "V"), + (0x1F110, "M", "(a)"), + (0x1F111, "M", "(b)"), + (0x1F112, "M", "(c)"), + (0x1F113, "M", "(d)"), + (0x1F114, "M", "(e)"), + (0x1F115, "M", "(f)"), + (0x1F116, "M", "(g)"), + (0x1F117, "M", "(h)"), + (0x1F118, "M", "(i)"), + (0x1F119, "M", "(j)"), + (0x1F11A, "M", "(k)"), + (0x1F11B, "M", "(l)"), + (0x1F11C, "M", "(m)"), + (0x1F11D, "M", "(n)"), + (0x1F11E, "M", "(o)"), + (0x1F11F, "M", "(p)"), + (0x1F120, "M", "(q)"), + (0x1F121, "M", "(r)"), + (0x1F122, "M", "(s)"), + (0x1F123, "M", "(t)"), + (0x1F124, "M", "(u)"), + (0x1F125, "M", "(v)"), + ] + + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F126, "M", "(w)"), + (0x1F127, "M", "(x)"), + (0x1F128, "M", "(y)"), + (0x1F129, "M", "(z)"), + (0x1F12A, "M", "〔s〕"), + (0x1F12B, "M", "c"), + (0x1F12C, "M", "r"), + (0x1F12D, "M", "cd"), + (0x1F12E, "M", "wz"), + (0x1F12F, "V"), + (0x1F130, "M", "a"), + (0x1F131, "M", "b"), + (0x1F132, "M", "c"), + (0x1F133, "M", "d"), + (0x1F134, "M", "e"), + (0x1F135, "M", "f"), + (0x1F136, "M", "g"), + (0x1F137, "M", "h"), + (0x1F138, "M", "i"), + (0x1F139, "M", "j"), + (0x1F13A, "M", "k"), + (0x1F13B, "M", "l"), + (0x1F13C, "M", "m"), + (0x1F13D, "M", "n"), + (0x1F13E, "M", "o"), + (0x1F13F, "M", "p"), + (0x1F140, "M", "q"), + (0x1F141, "M", "r"), + (0x1F142, "M", "s"), + (0x1F143, "M", "t"), + (0x1F144, "M", "u"), + (0x1F145, "M", "v"), + (0x1F146, "M", "w"), + (0x1F147, "M", "x"), + (0x1F148, "M", "y"), + (0x1F149, "M", "z"), + (0x1F14A, "M", "hv"), + (0x1F14B, "M", "mv"), + (0x1F14C, "M", "sd"), + (0x1F14D, "M", "ss"), + (0x1F14E, "M", "ppv"), + (0x1F14F, "M", "wc"), + (0x1F150, "V"), + (0x1F16A, "M", "mc"), + (0x1F16B, "M", "md"), + (0x1F16C, "M", "mr"), + (0x1F16D, "V"), + (0x1F190, "M", "dj"), + (0x1F191, "V"), + (0x1F1AE, "X"), + (0x1F1E6, "V"), + (0x1F200, "M", "ほか"), + (0x1F201, "M", "ココ"), + (0x1F202, "M", "サ"), + (0x1F203, "X"), + (0x1F210, "M", "手"), + (0x1F211, "M", "字"), + (0x1F212, "M", "双"), + (0x1F213, "M", "デ"), + (0x1F214, "M", "二"), + (0x1F215, "M", "多"), + (0x1F216, "M", "解"), + (0x1F217, "M", "天"), + (0x1F218, "M", "交"), + (0x1F219, "M", "映"), + (0x1F21A, "M", "無"), + (0x1F21B, "M", "料"), + (0x1F21C, "M", "前"), + (0x1F21D, "M", "後"), + (0x1F21E, "M", "再"), + (0x1F21F, "M", "新"), + (0x1F220, "M", "初"), + (0x1F221, "M", "終"), + (0x1F222, "M", "生"), + (0x1F223, "M", "販"), + (0x1F224, "M", "声"), + (0x1F225, "M", "吹"), + (0x1F226, "M", "演"), + (0x1F227, "M", "投"), + (0x1F228, "M", "捕"), + (0x1F229, "M", "一"), + (0x1F22A, "M", "三"), + (0x1F22B, "M", "遊"), + (0x1F22C, "M", "左"), + (0x1F22D, "M", "中"), + (0x1F22E, "M", "右"), + (0x1F22F, "M", "指"), + (0x1F230, "M", "走"), + (0x1F231, "M", "打"), + (0x1F232, "M", "禁"), + (0x1F233, "M", "空"), + (0x1F234, "M", "合"), + (0x1F235, "M", "満"), + (0x1F236, "M", "有"), + (0x1F237, "M", "月"), + (0x1F238, "M", "申"), + (0x1F239, "M", "割"), + (0x1F23A, "M", "営"), + (0x1F23B, "M", "配"), + (0x1F23C, "X"), + ] + + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F240, "M", "〔本〕"), + (0x1F241, "M", "〔三〕"), + (0x1F242, "M", "〔二〕"), + (0x1F243, "M", "〔安〕"), + (0x1F244, "M", "〔点〕"), + (0x1F245, "M", "〔打〕"), + (0x1F246, "M", "〔盗〕"), + (0x1F247, "M", "〔勝〕"), + (0x1F248, "M", "〔敗〕"), + (0x1F249, "X"), + (0x1F250, "M", "得"), + (0x1F251, "M", "可"), + (0x1F252, "X"), + (0x1F260, "V"), + (0x1F266, "X"), + (0x1F300, "V"), + (0x1F6D8, "X"), + (0x1F6DC, "V"), + (0x1F6ED, "X"), + (0x1F6F0, "V"), + (0x1F6FD, "X"), + (0x1F700, "V"), + (0x1F777, "X"), + (0x1F77B, "V"), + (0x1F7DA, "X"), + (0x1F7E0, "V"), + (0x1F7EC, "X"), + (0x1F7F0, "V"), + (0x1F7F1, "X"), + (0x1F800, "V"), + (0x1F80C, "X"), + (0x1F810, "V"), + (0x1F848, "X"), + (0x1F850, "V"), + (0x1F85A, "X"), + (0x1F860, "V"), + (0x1F888, "X"), + (0x1F890, "V"), + (0x1F8AE, "X"), + (0x1F8B0, "V"), + (0x1F8BC, "X"), + (0x1F8C0, "V"), + (0x1F8C2, "X"), + (0x1F900, "V"), + (0x1FA54, "X"), + (0x1FA60, "V"), + (0x1FA6E, "X"), + (0x1FA70, "V"), + (0x1FA7D, "X"), + (0x1FA80, "V"), + (0x1FA8A, "X"), + (0x1FA8F, "V"), + (0x1FAC7, "X"), + (0x1FACE, "V"), + (0x1FADD, "X"), + (0x1FADF, "V"), + (0x1FAEA, "X"), + (0x1FAF0, "V"), + (0x1FAF9, "X"), + (0x1FB00, "V"), + (0x1FB93, "X"), + (0x1FB94, "V"), + (0x1FBF0, "M", "0"), + (0x1FBF1, "M", "1"), + (0x1FBF2, "M", "2"), + (0x1FBF3, "M", "3"), + (0x1FBF4, "M", "4"), + (0x1FBF5, "M", "5"), + (0x1FBF6, "M", "6"), + (0x1FBF7, "M", "7"), + (0x1FBF8, "M", "8"), + (0x1FBF9, "M", "9"), + (0x1FBFA, "X"), + (0x20000, "V"), + (0x2A6E0, "X"), + (0x2A700, "V"), + (0x2B73A, "X"), + (0x2B740, "V"), + (0x2B81E, "X"), + (0x2B820, "V"), + (0x2CEA2, "X"), + (0x2CEB0, "V"), + (0x2EBE1, "X"), + (0x2EBF0, "V"), + (0x2EE5E, "X"), + (0x2F800, "M", "丽"), + (0x2F801, "M", "丸"), + (0x2F802, "M", "乁"), + (0x2F803, "M", "𠄢"), + (0x2F804, "M", "你"), + (0x2F805, "M", "侮"), + (0x2F806, "M", "侻"), + (0x2F807, "M", "倂"), + (0x2F808, "M", "偺"), + (0x2F809, "M", "備"), + (0x2F80A, "M", "僧"), + (0x2F80B, "M", "像"), + (0x2F80C, "M", "㒞"), + (0x2F80D, "M", "𠘺"), + (0x2F80E, "M", "免"), + ] + + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F80F, "M", "兔"), + (0x2F810, "M", "兤"), + (0x2F811, "M", "具"), + (0x2F812, "M", "𠔜"), + (0x2F813, "M", "㒹"), + (0x2F814, "M", "內"), + (0x2F815, "M", "再"), + (0x2F816, "M", "𠕋"), + (0x2F817, "M", "冗"), + (0x2F818, "M", "冤"), + (0x2F819, "M", "仌"), + (0x2F81A, "M", "冬"), + (0x2F81B, "M", "况"), + (0x2F81C, "M", "𩇟"), + (0x2F81D, "M", "凵"), + (0x2F81E, "M", "刃"), + (0x2F81F, "M", "㓟"), + (0x2F820, "M", "刻"), + (0x2F821, "M", "剆"), + (0x2F822, "M", "割"), + (0x2F823, "M", "剷"), + (0x2F824, "M", "㔕"), + (0x2F825, "M", "勇"), + (0x2F826, "M", "勉"), + (0x2F827, "M", "勤"), + (0x2F828, "M", "勺"), + (0x2F829, "M", "包"), + (0x2F82A, "M", "匆"), + (0x2F82B, "M", "北"), + (0x2F82C, "M", "卉"), + (0x2F82D, "M", "卑"), + (0x2F82E, "M", "博"), + (0x2F82F, "M", "即"), + (0x2F830, "M", "卽"), + (0x2F831, "M", "卿"), + (0x2F834, "M", "𠨬"), + (0x2F835, "M", "灰"), + (0x2F836, "M", "及"), + (0x2F837, "M", "叟"), + (0x2F838, "M", "𠭣"), + (0x2F839, "M", "叫"), + (0x2F83A, "M", "叱"), + (0x2F83B, "M", "吆"), + (0x2F83C, "M", "咞"), + (0x2F83D, "M", "吸"), + (0x2F83E, "M", "呈"), + (0x2F83F, "M", "周"), + (0x2F840, "M", "咢"), + (0x2F841, "M", "哶"), + (0x2F842, "M", "唐"), + (0x2F843, "M", "啓"), + (0x2F844, "M", "啣"), + (0x2F845, "M", "善"), + (0x2F847, "M", "喙"), + (0x2F848, "M", "喫"), + (0x2F849, "M", "喳"), + (0x2F84A, "M", "嗂"), + (0x2F84B, "M", "圖"), + (0x2F84C, "M", "嘆"), + (0x2F84D, "M", "圗"), + (0x2F84E, "M", "噑"), + (0x2F84F, "M", "噴"), + (0x2F850, "M", "切"), + (0x2F851, "M", "壮"), + (0x2F852, "M", "城"), + (0x2F853, "M", "埴"), + (0x2F854, "M", "堍"), + (0x2F855, "M", "型"), + (0x2F856, "M", "堲"), + (0x2F857, "M", "報"), + (0x2F858, "M", "墬"), + (0x2F859, "M", "𡓤"), + (0x2F85A, "M", "売"), + (0x2F85B, "M", "壷"), + (0x2F85C, "M", "夆"), + (0x2F85D, "M", "多"), + (0x2F85E, "M", "夢"), + (0x2F85F, "M", "奢"), + (0x2F860, "M", "𡚨"), + (0x2F861, "M", "𡛪"), + (0x2F862, "M", "姬"), + (0x2F863, "M", "娛"), + (0x2F864, "M", "娧"), + (0x2F865, "M", "姘"), + (0x2F866, "M", "婦"), + (0x2F867, "M", "㛮"), + (0x2F868, "M", "㛼"), + (0x2F869, "M", "嬈"), + (0x2F86A, "M", "嬾"), + (0x2F86C, "M", "𡧈"), + (0x2F86D, "M", "寃"), + (0x2F86E, "M", "寘"), + (0x2F86F, "M", "寧"), + (0x2F870, "M", "寳"), + (0x2F871, "M", "𡬘"), + (0x2F872, "M", "寿"), + (0x2F873, "M", "将"), + (0x2F874, "M", "当"), + (0x2F875, "M", "尢"), + (0x2F876, "M", "㞁"), + ] + + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F877, "M", "屠"), + (0x2F878, "M", "屮"), + (0x2F879, "M", "峀"), + (0x2F87A, "M", "岍"), + (0x2F87B, "M", "𡷤"), + (0x2F87C, "M", "嵃"), + (0x2F87D, "M", "𡷦"), + (0x2F87E, "M", "嵮"), + (0x2F87F, "M", "嵫"), + (0x2F880, "M", "嵼"), + (0x2F881, "M", "巡"), + (0x2F882, "M", "巢"), + (0x2F883, "M", "㠯"), + (0x2F884, "M", "巽"), + (0x2F885, "M", "帨"), + (0x2F886, "M", "帽"), + (0x2F887, "M", "幩"), + (0x2F888, "M", "㡢"), + (0x2F889, "M", "𢆃"), + (0x2F88A, "M", "㡼"), + (0x2F88B, "M", "庰"), + (0x2F88C, "M", "庳"), + (0x2F88D, "M", "庶"), + (0x2F88E, "M", "廊"), + (0x2F88F, "M", "𪎒"), + (0x2F890, "M", "廾"), + (0x2F891, "M", "𢌱"), + (0x2F893, "M", "舁"), + (0x2F894, "M", "弢"), + (0x2F896, "M", "㣇"), + (0x2F897, "M", "𣊸"), + (0x2F898, "M", "𦇚"), + (0x2F899, "M", "形"), + (0x2F89A, "M", "彫"), + (0x2F89B, "M", "㣣"), + (0x2F89C, "M", "徚"), + (0x2F89D, "M", "忍"), + (0x2F89E, "M", "志"), + (0x2F89F, "M", "忹"), + (0x2F8A0, "M", "悁"), + (0x2F8A1, "M", "㤺"), + (0x2F8A2, "M", "㤜"), + (0x2F8A3, "M", "悔"), + (0x2F8A4, "M", "𢛔"), + (0x2F8A5, "M", "惇"), + (0x2F8A6, "M", "慈"), + (0x2F8A7, "M", "慌"), + (0x2F8A8, "M", "慎"), + (0x2F8A9, "M", "慌"), + (0x2F8AA, "M", "慺"), + (0x2F8AB, "M", "憎"), + (0x2F8AC, "M", "憲"), + (0x2F8AD, "M", "憤"), + (0x2F8AE, "M", "憯"), + (0x2F8AF, "M", "懞"), + (0x2F8B0, "M", "懲"), + (0x2F8B1, "M", "懶"), + (0x2F8B2, "M", "成"), + (0x2F8B3, "M", "戛"), + (0x2F8B4, "M", "扝"), + (0x2F8B5, "M", "抱"), + (0x2F8B6, "M", "拔"), + (0x2F8B7, "M", "捐"), + (0x2F8B8, "M", "𢬌"), + (0x2F8B9, "M", "挽"), + (0x2F8BA, "M", "拼"), + (0x2F8BB, "M", "捨"), + (0x2F8BC, "M", "掃"), + (0x2F8BD, "M", "揤"), + (0x2F8BE, "M", "𢯱"), + (0x2F8BF, "M", "搢"), + (0x2F8C0, "M", "揅"), + (0x2F8C1, "M", "掩"), + (0x2F8C2, "M", "㨮"), + (0x2F8C3, "M", "摩"), + (0x2F8C4, "M", "摾"), + (0x2F8C5, "M", "撝"), + (0x2F8C6, "M", "摷"), + (0x2F8C7, "M", "㩬"), + (0x2F8C8, "M", "敏"), + (0x2F8C9, "M", "敬"), + (0x2F8CA, "M", "𣀊"), + (0x2F8CB, "M", "旣"), + (0x2F8CC, "M", "書"), + (0x2F8CD, "M", "晉"), + (0x2F8CE, "M", "㬙"), + (0x2F8CF, "M", "暑"), + (0x2F8D0, "M", "㬈"), + (0x2F8D1, "M", "㫤"), + (0x2F8D2, "M", "冒"), + (0x2F8D3, "M", "冕"), + (0x2F8D4, "M", "最"), + (0x2F8D5, "M", "暜"), + (0x2F8D6, "M", "肭"), + (0x2F8D7, "M", "䏙"), + (0x2F8D8, "M", "朗"), + (0x2F8D9, "M", "望"), + (0x2F8DA, "M", "朡"), + (0x2F8DB, "M", "杞"), + (0x2F8DC, "M", "杓"), + ] + + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8DD, "M", "𣏃"), + (0x2F8DE, "M", "㭉"), + (0x2F8DF, "M", "柺"), + (0x2F8E0, "M", "枅"), + (0x2F8E1, "M", "桒"), + (0x2F8E2, "M", "梅"), + (0x2F8E3, "M", "𣑭"), + (0x2F8E4, "M", "梎"), + (0x2F8E5, "M", "栟"), + (0x2F8E6, "M", "椔"), + (0x2F8E7, "M", "㮝"), + (0x2F8E8, "M", "楂"), + (0x2F8E9, "M", "榣"), + (0x2F8EA, "M", "槪"), + (0x2F8EB, "M", "檨"), + (0x2F8EC, "M", "𣚣"), + (0x2F8ED, "M", "櫛"), + (0x2F8EE, "M", "㰘"), + (0x2F8EF, "M", "次"), + (0x2F8F0, "M", "𣢧"), + (0x2F8F1, "M", "歔"), + (0x2F8F2, "M", "㱎"), + (0x2F8F3, "M", "歲"), + (0x2F8F4, "M", "殟"), + (0x2F8F5, "M", "殺"), + (0x2F8F6, "M", "殻"), + (0x2F8F7, "M", "𣪍"), + (0x2F8F8, "M", "𡴋"), + (0x2F8F9, "M", "𣫺"), + (0x2F8FA, "M", "汎"), + (0x2F8FB, "M", "𣲼"), + (0x2F8FC, "M", "沿"), + (0x2F8FD, "M", "泍"), + (0x2F8FE, "M", "汧"), + (0x2F8FF, "M", "洖"), + (0x2F900, "M", "派"), + (0x2F901, "M", "海"), + (0x2F902, "M", "流"), + (0x2F903, "M", "浩"), + (0x2F904, "M", "浸"), + (0x2F905, "M", "涅"), + (0x2F906, "M", "𣴞"), + (0x2F907, "M", "洴"), + (0x2F908, "M", "港"), + (0x2F909, "M", "湮"), + (0x2F90A, "M", "㴳"), + (0x2F90B, "M", "滋"), + (0x2F90C, "M", "滇"), + (0x2F90D, "M", "𣻑"), + (0x2F90E, "M", "淹"), + (0x2F90F, "M", "潮"), + (0x2F910, "M", "𣽞"), + (0x2F911, "M", "𣾎"), + (0x2F912, "M", "濆"), + (0x2F913, "M", "瀹"), + (0x2F914, "M", "瀞"), + (0x2F915, "M", "瀛"), + (0x2F916, "M", "㶖"), + (0x2F917, "M", "灊"), + (0x2F918, "M", "災"), + (0x2F919, "M", "灷"), + (0x2F91A, "M", "炭"), + (0x2F91B, "M", "𠔥"), + (0x2F91C, "M", "煅"), + (0x2F91D, "M", "𤉣"), + (0x2F91E, "M", "熜"), + (0x2F91F, "M", "𤎫"), + (0x2F920, "M", "爨"), + (0x2F921, "M", "爵"), + (0x2F922, "M", "牐"), + (0x2F923, "M", "𤘈"), + (0x2F924, "M", "犀"), + (0x2F925, "M", "犕"), + (0x2F926, "M", "𤜵"), + (0x2F927, "M", "𤠔"), + (0x2F928, "M", "獺"), + (0x2F929, "M", "王"), + (0x2F92A, "M", "㺬"), + (0x2F92B, "M", "玥"), + (0x2F92C, "M", "㺸"), + (0x2F92E, "M", "瑇"), + (0x2F92F, "M", "瑜"), + (0x2F930, "M", "瑱"), + (0x2F931, "M", "璅"), + (0x2F932, "M", "瓊"), + (0x2F933, "M", "㼛"), + (0x2F934, "M", "甤"), + (0x2F935, "M", "𤰶"), + (0x2F936, "M", "甾"), + (0x2F937, "M", "𤲒"), + (0x2F938, "M", "異"), + (0x2F939, "M", "𢆟"), + (0x2F93A, "M", "瘐"), + (0x2F93B, "M", "𤾡"), + (0x2F93C, "M", "𤾸"), + (0x2F93D, "M", "𥁄"), + (0x2F93E, "M", "㿼"), + (0x2F93F, "M", "䀈"), + (0x2F940, "M", "直"), + (0x2F941, "M", "𥃳"), + ] + + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F942, "M", "𥃲"), + (0x2F943, "M", "𥄙"), + (0x2F944, "M", "𥄳"), + (0x2F945, "M", "眞"), + (0x2F946, "M", "真"), + (0x2F948, "M", "睊"), + (0x2F949, "M", "䀹"), + (0x2F94A, "M", "瞋"), + (0x2F94B, "M", "䁆"), + (0x2F94C, "M", "䂖"), + (0x2F94D, "M", "𥐝"), + (0x2F94E, "M", "硎"), + (0x2F94F, "M", "碌"), + (0x2F950, "M", "磌"), + (0x2F951, "M", "䃣"), + (0x2F952, "M", "𥘦"), + (0x2F953, "M", "祖"), + (0x2F954, "M", "𥚚"), + (0x2F955, "M", "𥛅"), + (0x2F956, "M", "福"), + (0x2F957, "M", "秫"), + (0x2F958, "M", "䄯"), + (0x2F959, "M", "穀"), + (0x2F95A, "M", "穊"), + (0x2F95B, "M", "穏"), + (0x2F95C, "M", "𥥼"), + (0x2F95D, "M", "𥪧"), + (0x2F95F, "M", "竮"), + (0x2F960, "M", "䈂"), + (0x2F961, "M", "𥮫"), + (0x2F962, "M", "篆"), + (0x2F963, "M", "築"), + (0x2F964, "M", "䈧"), + (0x2F965, "M", "𥲀"), + (0x2F966, "M", "糒"), + (0x2F967, "M", "䊠"), + (0x2F968, "M", "糨"), + (0x2F969, "M", "糣"), + (0x2F96A, "M", "紀"), + (0x2F96B, "M", "𥾆"), + (0x2F96C, "M", "絣"), + (0x2F96D, "M", "䌁"), + (0x2F96E, "M", "緇"), + (0x2F96F, "M", "縂"), + (0x2F970, "M", "繅"), + (0x2F971, "M", "䌴"), + (0x2F972, "M", "𦈨"), + (0x2F973, "M", "𦉇"), + (0x2F974, "M", "䍙"), + (0x2F975, "M", "𦋙"), + (0x2F976, "M", "罺"), + (0x2F977, "M", "𦌾"), + (0x2F978, "M", "羕"), + (0x2F979, "M", "翺"), + (0x2F97A, "M", "者"), + (0x2F97B, "M", "𦓚"), + (0x2F97C, "M", "𦔣"), + (0x2F97D, "M", "聠"), + (0x2F97E, "M", "𦖨"), + (0x2F97F, "M", "聰"), + (0x2F980, "M", "𣍟"), + (0x2F981, "M", "䏕"), + (0x2F982, "M", "育"), + (0x2F983, "M", "脃"), + (0x2F984, "M", "䐋"), + (0x2F985, "M", "脾"), + (0x2F986, "M", "媵"), + (0x2F987, "M", "𦞧"), + (0x2F988, "M", "𦞵"), + (0x2F989, "M", "𣎓"), + (0x2F98A, "M", "𣎜"), + (0x2F98B, "M", "舁"), + (0x2F98C, "M", "舄"), + (0x2F98D, "M", "辞"), + (0x2F98E, "M", "䑫"), + (0x2F98F, "M", "芑"), + (0x2F990, "M", "芋"), + (0x2F991, "M", "芝"), + (0x2F992, "M", "劳"), + (0x2F993, "M", "花"), + (0x2F994, "M", "芳"), + (0x2F995, "M", "芽"), + (0x2F996, "M", "苦"), + (0x2F997, "M", "𦬼"), + (0x2F998, "M", "若"), + (0x2F999, "M", "茝"), + (0x2F99A, "M", "荣"), + (0x2F99B, "M", "莭"), + (0x2F99C, "M", "茣"), + (0x2F99D, "M", "莽"), + (0x2F99E, "M", "菧"), + (0x2F99F, "M", "著"), + (0x2F9A0, "M", "荓"), + (0x2F9A1, "M", "菊"), + (0x2F9A2, "M", "菌"), + (0x2F9A3, "M", "菜"), + (0x2F9A4, "M", "𦰶"), + (0x2F9A5, "M", "𦵫"), + (0x2F9A6, "M", "𦳕"), + (0x2F9A7, "M", "䔫"), + ] + + +def _seg_82() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9A8, "M", "蓱"), + (0x2F9A9, "M", "蓳"), + (0x2F9AA, "M", "蔖"), + (0x2F9AB, "M", "𧏊"), + (0x2F9AC, "M", "蕤"), + (0x2F9AD, "M", "𦼬"), + (0x2F9AE, "M", "䕝"), + (0x2F9AF, "M", "䕡"), + (0x2F9B0, "M", "𦾱"), + (0x2F9B1, "M", "𧃒"), + (0x2F9B2, "M", "䕫"), + (0x2F9B3, "M", "虐"), + (0x2F9B4, "M", "虜"), + (0x2F9B5, "M", "虧"), + (0x2F9B6, "M", "虩"), + (0x2F9B7, "M", "蚩"), + (0x2F9B8, "M", "蚈"), + (0x2F9B9, "M", "蜎"), + (0x2F9BA, "M", "蛢"), + (0x2F9BB, "M", "蝹"), + (0x2F9BC, "M", "蜨"), + (0x2F9BD, "M", "蝫"), + (0x2F9BE, "M", "螆"), + (0x2F9BF, "M", "䗗"), + (0x2F9C0, "M", "蟡"), + (0x2F9C1, "M", "蠁"), + (0x2F9C2, "M", "䗹"), + (0x2F9C3, "M", "衠"), + (0x2F9C4, "M", "衣"), + (0x2F9C5, "M", "𧙧"), + (0x2F9C6, "M", "裗"), + (0x2F9C7, "M", "裞"), + (0x2F9C8, "M", "䘵"), + (0x2F9C9, "M", "裺"), + (0x2F9CA, "M", "㒻"), + (0x2F9CB, "M", "𧢮"), + (0x2F9CC, "M", "𧥦"), + (0x2F9CD, "M", "䚾"), + (0x2F9CE, "M", "䛇"), + (0x2F9CF, "M", "誠"), + (0x2F9D0, "M", "諭"), + (0x2F9D1, "M", "變"), + (0x2F9D2, "M", "豕"), + (0x2F9D3, "M", "𧲨"), + (0x2F9D4, "M", "貫"), + (0x2F9D5, "M", "賁"), + (0x2F9D6, "M", "贛"), + (0x2F9D7, "M", "起"), + (0x2F9D8, "M", "𧼯"), + (0x2F9D9, "M", "𠠄"), + (0x2F9DA, "M", "跋"), + (0x2F9DB, "M", "趼"), + (0x2F9DC, "M", "跰"), + (0x2F9DD, "M", "𠣞"), + (0x2F9DE, "M", "軔"), + (0x2F9DF, "M", "輸"), + (0x2F9E0, "M", "𨗒"), + (0x2F9E1, "M", "𨗭"), + (0x2F9E2, "M", "邔"), + (0x2F9E3, "M", "郱"), + (0x2F9E4, "M", "鄑"), + (0x2F9E5, "M", "𨜮"), + (0x2F9E6, "M", "鄛"), + (0x2F9E7, "M", "鈸"), + (0x2F9E8, "M", "鋗"), + (0x2F9E9, "M", "鋘"), + (0x2F9EA, "M", "鉼"), + (0x2F9EB, "M", "鏹"), + (0x2F9EC, "M", "鐕"), + (0x2F9ED, "M", "𨯺"), + (0x2F9EE, "M", "開"), + (0x2F9EF, "M", "䦕"), + (0x2F9F0, "M", "閷"), + (0x2F9F1, "M", "𨵷"), + (0x2F9F2, "M", "䧦"), + (0x2F9F3, "M", "雃"), + (0x2F9F4, "M", "嶲"), + (0x2F9F5, "M", "霣"), + (0x2F9F6, "M", "𩅅"), + (0x2F9F7, "M", "𩈚"), + (0x2F9F8, "M", "䩮"), + (0x2F9F9, "M", "䩶"), + (0x2F9FA, "M", "韠"), + (0x2F9FB, "M", "𩐊"), + (0x2F9FC, "M", "䪲"), + (0x2F9FD, "M", "𩒖"), + (0x2F9FE, "M", "頋"), + (0x2FA00, "M", "頩"), + (0x2FA01, "M", "𩖶"), + (0x2FA02, "M", "飢"), + (0x2FA03, "M", "䬳"), + (0x2FA04, "M", "餩"), + (0x2FA05, "M", "馧"), + (0x2FA06, "M", "駂"), + (0x2FA07, "M", "駾"), + (0x2FA08, "M", "䯎"), + (0x2FA09, "M", "𩬰"), + (0x2FA0A, "M", "鬒"), + (0x2FA0B, "M", "鱀"), + (0x2FA0C, "M", "鳽"), + ] + + +def _seg_83() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2FA0D, "M", "䳎"), + (0x2FA0E, "M", "䳭"), + (0x2FA0F, "M", "鵧"), + (0x2FA10, "M", "𪃎"), + (0x2FA11, "M", "䳸"), + (0x2FA12, "M", "𪄅"), + (0x2FA13, "M", "𪈎"), + (0x2FA14, "M", "𪊑"), + (0x2FA15, "M", "麻"), + (0x2FA16, "M", "䵖"), + (0x2FA17, "M", "黹"), + (0x2FA18, "M", "黾"), + (0x2FA19, "M", "鼅"), + (0x2FA1A, "M", "鼏"), + (0x2FA1B, "M", "鼖"), + (0x2FA1C, "M", "鼻"), + (0x2FA1D, "M", "𪘀"), + (0x2FA1E, "X"), + (0x30000, "V"), + (0x3134B, "X"), + (0x31350, "V"), + (0x323B0, "X"), + (0xE0100, "I"), + (0xE01F0, "X"), + ] + + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() + + _seg_81() + + _seg_82() + + _seg_83() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/venv/lib/python3.12/site-packages/jose/__init__.py b/venv/lib/python3.12/site-packages/jose/__init__.py new file mode 100644 index 0000000..054baa7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/__init__.py @@ -0,0 +1,10 @@ +__version__ = "3.3.0" +__author__ = "Michael Davis" +__license__ = "MIT" +__copyright__ = "Copyright 2016 Michael Davis" + + +from .exceptions import ExpiredSignatureError # noqa: F401 +from .exceptions import JOSEError # noqa: F401 +from .exceptions import JWSError # noqa: F401 +from .exceptions import JWTError # noqa: F401 diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..0bbe1f2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/constants.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/constants.cpython-312.pyc new file mode 100644 index 0000000..1f6e904 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/__pycache__/constants.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..73d935b Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/jwe.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/jwe.cpython-312.pyc new file mode 100644 index 0000000..2bc1325 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/__pycache__/jwe.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/jwk.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/jwk.cpython-312.pyc new file mode 100644 index 0000000..816e991 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/__pycache__/jwk.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/jws.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/jws.cpython-312.pyc new file mode 100644 index 0000000..1dde6fa Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/__pycache__/jws.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/jwt.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/jwt.cpython-312.pyc new file mode 100644 index 0000000..f4a096b Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/__pycache__/jwt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..1c78971 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__init__.py b/venv/lib/python3.12/site-packages/jose/backends/__init__.py new file mode 100644 index 0000000..e7bba69 --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/backends/__init__.py @@ -0,0 +1,32 @@ +try: + from jose.backends.cryptography_backend import get_random_bytes # noqa: F401 +except ImportError: + try: + from jose.backends.pycrypto_backend import get_random_bytes # noqa: F401 + except ImportError: + from jose.backends.native import get_random_bytes # noqa: F401 + +try: + from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey # noqa: F401 +except ImportError: + try: + from jose.backends.rsa_backend import RSAKey # noqa: F401 + except ImportError: + RSAKey = None + +try: + from jose.backends.cryptography_backend import CryptographyECKey as ECKey # noqa: F401 +except ImportError: + from jose.backends.ecdsa_backend import ECDSAECKey as ECKey # noqa: F401 + +try: + from jose.backends.cryptography_backend import CryptographyAESKey as AESKey # noqa: F401 +except ImportError: + AESKey = None + +try: + from jose.backends.cryptography_backend import CryptographyHMACKey as HMACKey # noqa: F401 +except ImportError: + from jose.backends.native import HMACKey # noqa: F401 + +from .base import DIRKey # noqa: F401 diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c2757d3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/_asn1.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/_asn1.cpython-312.pyc new file mode 100644 index 0000000..1a57328 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/_asn1.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..6cff9b6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-312.pyc new file mode 100644 index 0000000..a88a019 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-312.pyc new file mode 100644 index 0000000..93ed7e0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/native.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/native.cpython-312.pyc new file mode 100644 index 0000000..0d5fac2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/native.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/rsa_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/rsa_backend.cpython-312.pyc new file mode 100644 index 0000000..edcb62a Binary files /dev/null and b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/rsa_backend.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/_asn1.py b/venv/lib/python3.12/site-packages/jose/backends/_asn1.py new file mode 100644 index 0000000..af5fa8b --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/backends/_asn1.py @@ -0,0 +1,83 @@ +"""ASN1 encoding helpers for converting between PKCS1 and PKCS8. + +Required by rsa_backend but not cryptography_backend. +""" +from pyasn1.codec.der import decoder, encoder +from pyasn1.type import namedtype, univ + +RSA_ENCRYPTION_ASN1_OID = "1.2.840.113549.1.1.1" + + +class RsaAlgorithmIdentifier(univ.Sequence): + """ASN1 structure for recording RSA PrivateKeyAlgorithm identifiers.""" + + componentType = namedtype.NamedTypes( + namedtype.NamedType("rsaEncryption", univ.ObjectIdentifier()), namedtype.NamedType("parameters", univ.Null()) + ) + + +class PKCS8PrivateKey(univ.Sequence): + """ASN1 structure for recording PKCS8 private keys.""" + + componentType = namedtype.NamedTypes( + namedtype.NamedType("version", univ.Integer()), + namedtype.NamedType("privateKeyAlgorithm", RsaAlgorithmIdentifier()), + namedtype.NamedType("privateKey", univ.OctetString()), + ) + + +class PublicKeyInfo(univ.Sequence): + """ASN1 structure for recording PKCS8 public keys.""" + + componentType = namedtype.NamedTypes( + namedtype.NamedType("algorithm", RsaAlgorithmIdentifier()), namedtype.NamedType("publicKey", univ.BitString()) + ) + + +def rsa_private_key_pkcs8_to_pkcs1(pkcs8_key): + """Convert a PKCS8-encoded RSA private key to PKCS1.""" + decoded_values = decoder.decode(pkcs8_key, asn1Spec=PKCS8PrivateKey()) + + try: + decoded_key = decoded_values[0] + except IndexError: + raise ValueError("Invalid private key encoding") + + return decoded_key["privateKey"] + + +def rsa_private_key_pkcs1_to_pkcs8(pkcs1_key): + """Convert a PKCS1-encoded RSA private key to PKCS8.""" + algorithm = RsaAlgorithmIdentifier() + algorithm["rsaEncryption"] = RSA_ENCRYPTION_ASN1_OID + + pkcs8_key = PKCS8PrivateKey() + pkcs8_key["version"] = 0 + pkcs8_key["privateKeyAlgorithm"] = algorithm + pkcs8_key["privateKey"] = pkcs1_key + + return encoder.encode(pkcs8_key) + + +def rsa_public_key_pkcs1_to_pkcs8(pkcs1_key): + """Convert a PKCS1-encoded RSA private key to PKCS8.""" + algorithm = RsaAlgorithmIdentifier() + algorithm["rsaEncryption"] = RSA_ENCRYPTION_ASN1_OID + + pkcs8_key = PublicKeyInfo() + pkcs8_key["algorithm"] = algorithm + pkcs8_key["publicKey"] = univ.BitString.fromOctetString(pkcs1_key) + + return encoder.encode(pkcs8_key) + + +def rsa_public_key_pkcs8_to_pkcs1(pkcs8_key): + """Convert a PKCS8-encoded RSA private key to PKCS1.""" + decoded_values = decoder.decode(pkcs8_key, asn1Spec=PublicKeyInfo()) + + try: + decoded_key = decoded_values[0] + except IndexError: + raise ValueError("Invalid public key encoding.") + + return decoded_key["publicKey"].asOctets() diff --git a/venv/lib/python3.12/site-packages/jose/backends/base.py b/venv/lib/python3.12/site-packages/jose/backends/base.py new file mode 100644 index 0000000..b000a52 --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/backends/base.py @@ -0,0 +1,89 @@ +from ..utils import base64url_encode, ensure_binary + + +class Key: + """ + A simple interface for implementing JWK keys. + """ + + def __init__(self, key, algorithm): + pass + + def sign(self, msg): + raise NotImplementedError() + + def verify(self, msg, sig): + raise NotImplementedError() + + def public_key(self): + raise NotImplementedError() + + def to_pem(self): + raise NotImplementedError() + + def to_dict(self): + raise NotImplementedError() + + def encrypt(self, plain_text, aad=None): + """ + Encrypt the plain text and generate an auth tag if appropriate + + Args: + plain_text (bytes): Data to encrypt + aad (bytes, optional): Authenticated Additional Data if key's algorithm supports auth mode + + Returns: + (bytes, bytes, bytes): IV, cipher text, and auth tag + """ + raise NotImplementedError() + + def decrypt(self, cipher_text, iv=None, aad=None, tag=None): + """ + Decrypt the cipher text and validate the auth tag if present + Args: + cipher_text (bytes): Cipher text to decrypt + iv (bytes): IV if block mode + aad (bytes): Additional Authenticated Data to verify if auth mode + tag (bytes): Authentication tag if auth mode + + Returns: + bytes: Decrypted value + """ + raise NotImplementedError() + + def wrap_key(self, key_data): + """ + Wrap the the plain text key data + + Args: + key_data (bytes): Key data to wrap + + Returns: + bytes: Wrapped key + """ + raise NotImplementedError() + + def unwrap_key(self, wrapped_key): + """ + Unwrap the the wrapped key data + + Args: + wrapped_key (bytes): Wrapped key data to unwrap + + Returns: + bytes: Unwrapped key + """ + raise NotImplementedError() + + +class DIRKey(Key): + def __init__(self, key_data, algorithm): + self._key = ensure_binary(key_data) + self._alg = algorithm + + def to_dict(self): + return { + "alg": self._alg, + "kty": "oct", + "k": base64url_encode(self._key), + } diff --git a/venv/lib/python3.12/site-packages/jose/backends/cryptography_backend.py b/venv/lib/python3.12/site-packages/jose/backends/cryptography_backend.py new file mode 100644 index 0000000..abd2426 --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/backends/cryptography_backend.py @@ -0,0 +1,605 @@ +import math +import warnings + +from cryptography.exceptions import InvalidSignature, InvalidTag +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.bindings.openssl.binding import Binding +from cryptography.hazmat.primitives import hashes, hmac, serialization +from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa +from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature, encode_dss_signature +from cryptography.hazmat.primitives.ciphers import Cipher, aead, algorithms, modes +from cryptography.hazmat.primitives.keywrap import InvalidUnwrap, aes_key_unwrap, aes_key_wrap +from cryptography.hazmat.primitives.padding import PKCS7 +from cryptography.hazmat.primitives.serialization import load_pem_private_key, load_pem_public_key +from cryptography.utils import int_to_bytes +from cryptography.x509 import load_pem_x509_certificate + +from ..constants import ALGORITHMS +from ..exceptions import JWEError, JWKError +from ..utils import base64_to_long, base64url_decode, base64url_encode, ensure_binary, long_to_base64 +from .base import Key + +_binding = None + + +def get_random_bytes(num_bytes): + """ + Get random bytes + + Currently, Cryptography returns OS random bytes. If you want OpenSSL + generated random bytes, you'll have to switch the RAND engine after + initializing the OpenSSL backend + Args: + num_bytes (int): Number of random bytes to generate and return + Returns: + bytes: Random bytes + """ + global _binding + + if _binding is None: + _binding = Binding() + + buf = _binding.ffi.new("char[]", num_bytes) + _binding.lib.RAND_bytes(buf, num_bytes) + rand_bytes = _binding.ffi.buffer(buf, num_bytes)[:] + return rand_bytes + + +class CryptographyECKey(Key): + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, key, algorithm, cryptography_backend=default_backend): + if algorithm not in ALGORITHMS.EC: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + + self.hash_alg = { + ALGORITHMS.ES256: self.SHA256, + ALGORITHMS.ES384: self.SHA384, + ALGORITHMS.ES512: self.SHA512, + }.get(algorithm) + self._algorithm = algorithm + + self.cryptography_backend = cryptography_backend + + if hasattr(key, "public_bytes") or hasattr(key, "private_bytes"): + self.prepared_key = key + return + + if hasattr(key, "to_pem"): + # convert to PEM and let cryptography below load it as PEM + key = key.to_pem().decode("utf-8") + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if isinstance(key, str): + key = key.encode("utf-8") + + if isinstance(key, bytes): + # Attempt to load key. We don't know if it's + # a Public Key or a Private Key, so we try + # the Public Key first. + try: + try: + key = load_pem_public_key(key, self.cryptography_backend()) + except ValueError: + key = load_pem_private_key(key, password=None, backend=self.cryptography_backend()) + except Exception as e: + raise JWKError(e) + + self.prepared_key = key + return + + raise JWKError("Unable to parse an ECKey from key: %s" % key) + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "EC": + raise JWKError("Incorrect key type. Expected: 'EC', Received: %s" % jwk_dict.get("kty")) + + if not all(k in jwk_dict for k in ["x", "y", "crv"]): + raise JWKError("Mandatory parameters are missing") + + x = base64_to_long(jwk_dict.get("x")) + y = base64_to_long(jwk_dict.get("y")) + curve = { + "P-256": ec.SECP256R1, + "P-384": ec.SECP384R1, + "P-521": ec.SECP521R1, + }[jwk_dict["crv"]] + + public = ec.EllipticCurvePublicNumbers(x, y, curve()) + + if "d" in jwk_dict: + d = base64_to_long(jwk_dict.get("d")) + private = ec.EllipticCurvePrivateNumbers(d, public) + + return private.private_key(self.cryptography_backend()) + else: + return public.public_key(self.cryptography_backend()) + + def _sig_component_length(self): + """Determine the correct serialization length for an encoded signature component. + + This is the number of bytes required to encode the maximum key value. + """ + return int(math.ceil(self.prepared_key.key_size / 8.0)) + + def _der_to_raw(self, der_signature): + """Convert signature from DER encoding to RAW encoding.""" + r, s = decode_dss_signature(der_signature) + component_length = self._sig_component_length() + return int_to_bytes(r, component_length) + int_to_bytes(s, component_length) + + def _raw_to_der(self, raw_signature): + """Convert signature from RAW encoding to DER encoding.""" + component_length = self._sig_component_length() + if len(raw_signature) != int(2 * component_length): + raise ValueError("Invalid signature") + + r_bytes = raw_signature[:component_length] + s_bytes = raw_signature[component_length:] + r = int.from_bytes(r_bytes, "big") + s = int.from_bytes(s_bytes, "big") + return encode_dss_signature(r, s) + + def sign(self, msg): + if self.hash_alg.digest_size * 8 > self.prepared_key.curve.key_size: + raise TypeError( + "this curve (%s) is too short " + "for your digest (%d)" % (self.prepared_key.curve.name, 8 * self.hash_alg.digest_size) + ) + signature = self.prepared_key.sign(msg, ec.ECDSA(self.hash_alg())) + return self._der_to_raw(signature) + + def verify(self, msg, sig): + try: + signature = self._raw_to_der(sig) + self.prepared_key.verify(signature, msg, ec.ECDSA(self.hash_alg())) + return True + except Exception: + return False + + def is_public(self): + return hasattr(self.prepared_key, "public_bytes") + + def public_key(self): + if self.is_public(): + return self + return self.__class__(self.prepared_key.public_key(), self._algorithm) + + def to_pem(self): + if self.is_public(): + pem = self.prepared_key.public_bytes( + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + return pem + pem = self.prepared_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + return pem + + def to_dict(self): + if not self.is_public(): + public_key = self.prepared_key.public_key() + else: + public_key = self.prepared_key + + crv = { + "secp256r1": "P-256", + "secp384r1": "P-384", + "secp521r1": "P-521", + }[self.prepared_key.curve.name] + + # Calculate the key size in bytes. Section 6.2.1.2 and 6.2.1.3 of + # RFC7518 prescribes that the 'x', 'y' and 'd' parameters of the curve + # points must be encoded as octed-strings of this length. + key_size = (self.prepared_key.curve.key_size + 7) // 8 + + data = { + "alg": self._algorithm, + "kty": "EC", + "crv": crv, + "x": long_to_base64(public_key.public_numbers().x, size=key_size).decode("ASCII"), + "y": long_to_base64(public_key.public_numbers().y, size=key_size).decode("ASCII"), + } + + if not self.is_public(): + private_value = self.prepared_key.private_numbers().private_value + data["d"] = long_to_base64(private_value, size=key_size).decode("ASCII") + + return data + + +class CryptographyRSAKey(Key): + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + RSA1_5 = padding.PKCS1v15() + RSA_OAEP = padding.OAEP(padding.MGF1(hashes.SHA1()), hashes.SHA1(), None) + RSA_OAEP_256 = padding.OAEP(padding.MGF1(hashes.SHA256()), hashes.SHA256(), None) + + def __init__(self, key, algorithm, cryptography_backend=default_backend): + if algorithm not in ALGORITHMS.RSA: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + + self.hash_alg = { + ALGORITHMS.RS256: self.SHA256, + ALGORITHMS.RS384: self.SHA384, + ALGORITHMS.RS512: self.SHA512, + }.get(algorithm) + self._algorithm = algorithm + + self.padding = { + ALGORITHMS.RSA1_5: self.RSA1_5, + ALGORITHMS.RSA_OAEP: self.RSA_OAEP, + ALGORITHMS.RSA_OAEP_256: self.RSA_OAEP_256, + }.get(algorithm) + + self.cryptography_backend = cryptography_backend + + # if it conforms to RSAPublicKey interface + if hasattr(key, "public_bytes") and hasattr(key, "public_numbers"): + self.prepared_key = key + return + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if isinstance(key, str): + key = key.encode("utf-8") + + if isinstance(key, bytes): + try: + if key.startswith(b"-----BEGIN CERTIFICATE-----"): + self._process_cert(key) + return + + try: + self.prepared_key = load_pem_public_key(key, self.cryptography_backend()) + except ValueError: + self.prepared_key = load_pem_private_key(key, password=None, backend=self.cryptography_backend()) + except Exception as e: + raise JWKError(e) + return + + raise JWKError("Unable to parse an RSA_JWK from key: %s" % key) + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "RSA": + raise JWKError("Incorrect key type. Expected: 'RSA', Received: %s" % jwk_dict.get("kty")) + + e = base64_to_long(jwk_dict.get("e", 256)) + n = base64_to_long(jwk_dict.get("n")) + public = rsa.RSAPublicNumbers(e, n) + + if "d" not in jwk_dict: + return public.public_key(self.cryptography_backend()) + else: + # This is a private key. + d = base64_to_long(jwk_dict.get("d")) + + extra_params = ["p", "q", "dp", "dq", "qi"] + + if any(k in jwk_dict for k in extra_params): + # Precomputed private key parameters are available. + if not all(k in jwk_dict for k in extra_params): + # These values must be present when 'p' is according to + # Section 6.3.2 of RFC7518, so if they are not we raise + # an error. + raise JWKError("Precomputed private key parameters are incomplete.") + + p = base64_to_long(jwk_dict["p"]) + q = base64_to_long(jwk_dict["q"]) + dp = base64_to_long(jwk_dict["dp"]) + dq = base64_to_long(jwk_dict["dq"]) + qi = base64_to_long(jwk_dict["qi"]) + else: + # The precomputed private key parameters are not available, + # so we use cryptography's API to fill them in. + p, q = rsa.rsa_recover_prime_factors(n, e, d) + dp = rsa.rsa_crt_dmp1(d, p) + dq = rsa.rsa_crt_dmq1(d, q) + qi = rsa.rsa_crt_iqmp(p, q) + + private = rsa.RSAPrivateNumbers(p, q, d, dp, dq, qi, public) + + return private.private_key(self.cryptography_backend()) + + def _process_cert(self, key): + key = load_pem_x509_certificate(key, self.cryptography_backend()) + self.prepared_key = key.public_key() + + def sign(self, msg): + try: + signature = self.prepared_key.sign(msg, padding.PKCS1v15(), self.hash_alg()) + except Exception as e: + raise JWKError(e) + return signature + + def verify(self, msg, sig): + if not self.is_public(): + warnings.warn("Attempting to verify a message with a private key. " "This is not recommended.") + + try: + self.public_key().prepared_key.verify(sig, msg, padding.PKCS1v15(), self.hash_alg()) + return True + except InvalidSignature: + return False + + def is_public(self): + return hasattr(self.prepared_key, "public_bytes") + + def public_key(self): + if self.is_public(): + return self + return self.__class__(self.prepared_key.public_key(), self._algorithm) + + def to_pem(self, pem_format="PKCS8"): + if self.is_public(): + if pem_format == "PKCS8": + fmt = serialization.PublicFormat.SubjectPublicKeyInfo + elif pem_format == "PKCS1": + fmt = serialization.PublicFormat.PKCS1 + else: + raise ValueError("Invalid format specified: %r" % pem_format) + pem = self.prepared_key.public_bytes(encoding=serialization.Encoding.PEM, format=fmt) + return pem + + if pem_format == "PKCS8": + fmt = serialization.PrivateFormat.PKCS8 + elif pem_format == "PKCS1": + fmt = serialization.PrivateFormat.TraditionalOpenSSL + else: + raise ValueError("Invalid format specified: %r" % pem_format) + + return self.prepared_key.private_bytes( + encoding=serialization.Encoding.PEM, format=fmt, encryption_algorithm=serialization.NoEncryption() + ) + + def to_dict(self): + if not self.is_public(): + public_key = self.prepared_key.public_key() + else: + public_key = self.prepared_key + + data = { + "alg": self._algorithm, + "kty": "RSA", + "n": long_to_base64(public_key.public_numbers().n).decode("ASCII"), + "e": long_to_base64(public_key.public_numbers().e).decode("ASCII"), + } + + if not self.is_public(): + data.update( + { + "d": long_to_base64(self.prepared_key.private_numbers().d).decode("ASCII"), + "p": long_to_base64(self.prepared_key.private_numbers().p).decode("ASCII"), + "q": long_to_base64(self.prepared_key.private_numbers().q).decode("ASCII"), + "dp": long_to_base64(self.prepared_key.private_numbers().dmp1).decode("ASCII"), + "dq": long_to_base64(self.prepared_key.private_numbers().dmq1).decode("ASCII"), + "qi": long_to_base64(self.prepared_key.private_numbers().iqmp).decode("ASCII"), + } + ) + + return data + + def wrap_key(self, key_data): + try: + wrapped_key = self.prepared_key.encrypt(key_data, self.padding) + except Exception as e: + raise JWEError(e) + + return wrapped_key + + def unwrap_key(self, wrapped_key): + try: + unwrapped_key = self.prepared_key.decrypt(wrapped_key, self.padding) + return unwrapped_key + except Exception as e: + raise JWEError(e) + + +class CryptographyAESKey(Key): + KEY_128 = (ALGORITHMS.A128GCM, ALGORITHMS.A128GCMKW, ALGORITHMS.A128KW, ALGORITHMS.A128CBC) + KEY_192 = (ALGORITHMS.A192GCM, ALGORITHMS.A192GCMKW, ALGORITHMS.A192KW, ALGORITHMS.A192CBC) + KEY_256 = ( + ALGORITHMS.A256GCM, + ALGORITHMS.A256GCMKW, + ALGORITHMS.A256KW, + ALGORITHMS.A128CBC_HS256, + ALGORITHMS.A256CBC, + ) + KEY_384 = (ALGORITHMS.A192CBC_HS384,) + KEY_512 = (ALGORITHMS.A256CBC_HS512,) + + AES_KW_ALGS = (ALGORITHMS.A128KW, ALGORITHMS.A192KW, ALGORITHMS.A256KW) + + MODES = { + ALGORITHMS.A128GCM: modes.GCM, + ALGORITHMS.A192GCM: modes.GCM, + ALGORITHMS.A256GCM: modes.GCM, + ALGORITHMS.A128CBC_HS256: modes.CBC, + ALGORITHMS.A192CBC_HS384: modes.CBC, + ALGORITHMS.A256CBC_HS512: modes.CBC, + ALGORITHMS.A128CBC: modes.CBC, + ALGORITHMS.A192CBC: modes.CBC, + ALGORITHMS.A256CBC: modes.CBC, + ALGORITHMS.A128GCMKW: modes.GCM, + ALGORITHMS.A192GCMKW: modes.GCM, + ALGORITHMS.A256GCMKW: modes.GCM, + ALGORITHMS.A128KW: None, + ALGORITHMS.A192KW: None, + ALGORITHMS.A256KW: None, + } + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.AES: + raise JWKError("%s is not a valid AES algorithm" % algorithm) + if algorithm not in ALGORITHMS.SUPPORTED.union(ALGORITHMS.AES_PSEUDO): + raise JWKError("%s is not a supported algorithm" % algorithm) + + self._algorithm = algorithm + self._mode = self.MODES.get(self._algorithm) + + if algorithm in self.KEY_128 and len(key) != 16: + raise JWKError(f"Key must be 128 bit for alg {algorithm}") + elif algorithm in self.KEY_192 and len(key) != 24: + raise JWKError(f"Key must be 192 bit for alg {algorithm}") + elif algorithm in self.KEY_256 and len(key) != 32: + raise JWKError(f"Key must be 256 bit for alg {algorithm}") + elif algorithm in self.KEY_384 and len(key) != 48: + raise JWKError(f"Key must be 384 bit for alg {algorithm}") + elif algorithm in self.KEY_512 and len(key) != 64: + raise JWKError(f"Key must be 512 bit for alg {algorithm}") + + self._key = key + + def to_dict(self): + data = {"alg": self._algorithm, "kty": "oct", "k": base64url_encode(self._key)} + return data + + def encrypt(self, plain_text, aad=None): + plain_text = ensure_binary(plain_text) + try: + iv = get_random_bytes(algorithms.AES.block_size // 8) + mode = self._mode(iv) + if mode.name == "GCM": + cipher = aead.AESGCM(self._key) + cipher_text_and_tag = cipher.encrypt(iv, plain_text, aad) + cipher_text = cipher_text_and_tag[: len(cipher_text_and_tag) - 16] + auth_tag = cipher_text_and_tag[-16:] + else: + cipher = Cipher(algorithms.AES(self._key), mode, backend=default_backend()) + encryptor = cipher.encryptor() + padder = PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(plain_text) + padded_data += padder.finalize() + cipher_text = encryptor.update(padded_data) + encryptor.finalize() + auth_tag = None + return iv, cipher_text, auth_tag + except Exception as e: + raise JWEError(e) + + def decrypt(self, cipher_text, iv=None, aad=None, tag=None): + cipher_text = ensure_binary(cipher_text) + try: + iv = ensure_binary(iv) + mode = self._mode(iv) + if mode.name == "GCM": + if tag is None: + raise ValueError("tag cannot be None") + cipher = aead.AESGCM(self._key) + cipher_text_and_tag = cipher_text + tag + try: + plain_text = cipher.decrypt(iv, cipher_text_and_tag, aad) + except InvalidTag: + raise JWEError("Invalid JWE Auth Tag") + else: + cipher = Cipher(algorithms.AES(self._key), mode, backend=default_backend()) + decryptor = cipher.decryptor() + padded_plain_text = decryptor.update(cipher_text) + padded_plain_text += decryptor.finalize() + unpadder = PKCS7(algorithms.AES.block_size).unpadder() + plain_text = unpadder.update(padded_plain_text) + plain_text += unpadder.finalize() + + return plain_text + except Exception as e: + raise JWEError(e) + + def wrap_key(self, key_data): + key_data = ensure_binary(key_data) + cipher_text = aes_key_wrap(self._key, key_data, default_backend()) + return cipher_text # IV, cipher text, auth tag + + def unwrap_key(self, wrapped_key): + wrapped_key = ensure_binary(wrapped_key) + try: + plain_text = aes_key_unwrap(self._key, wrapped_key, default_backend()) + except InvalidUnwrap as cause: + raise JWEError(cause) + return plain_text + + +class CryptographyHMACKey(Key): + """ + Performs signing and verification operations using HMAC + and the specified hash function. + """ + + ALG_MAP = {ALGORITHMS.HS256: hashes.SHA256(), ALGORITHMS.HS384: hashes.SHA384(), ALGORITHMS.HS512: hashes.SHA512()} + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.HMAC: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + self._algorithm = algorithm + self._hash_alg = self.ALG_MAP.get(algorithm) + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if not isinstance(key, str) and not isinstance(key, bytes): + raise JWKError("Expecting a string- or bytes-formatted key.") + + if isinstance(key, str): + key = key.encode("utf-8") + + invalid_strings = [ + b"-----BEGIN PUBLIC KEY-----", + b"-----BEGIN RSA PUBLIC KEY-----", + b"-----BEGIN CERTIFICATE-----", + b"ssh-rsa", + ] + + if any(string_value in key for string_value in invalid_strings): + raise JWKError( + "The specified key is an asymmetric key or x509 certificate and" + " should not be used as an HMAC secret." + ) + + self.prepared_key = key + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "oct": + raise JWKError("Incorrect key type. Expected: 'oct', Received: %s" % jwk_dict.get("kty")) + + k = jwk_dict.get("k") + k = k.encode("utf-8") + k = bytes(k) + k = base64url_decode(k) + + return k + + def to_dict(self): + return { + "alg": self._algorithm, + "kty": "oct", + "k": base64url_encode(self.prepared_key).decode("ASCII"), + } + + def sign(self, msg): + msg = ensure_binary(msg) + h = hmac.HMAC(self.prepared_key, self._hash_alg, backend=default_backend()) + h.update(msg) + signature = h.finalize() + return signature + + def verify(self, msg, sig): + msg = ensure_binary(msg) + sig = ensure_binary(sig) + h = hmac.HMAC(self.prepared_key, self._hash_alg, backend=default_backend()) + h.update(msg) + try: + h.verify(sig) + verified = True + except InvalidSignature: + verified = False + return verified diff --git a/venv/lib/python3.12/site-packages/jose/backends/ecdsa_backend.py b/venv/lib/python3.12/site-packages/jose/backends/ecdsa_backend.py new file mode 100644 index 0000000..756c7ea --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/backends/ecdsa_backend.py @@ -0,0 +1,150 @@ +import hashlib + +import ecdsa + +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWKError +from jose.utils import base64_to_long, long_to_base64 + + +class ECDSAECKey(Key): + """ + Performs signing and verification operations using + ECDSA and the specified hash function + + This class requires the ecdsa package to be installed. + + This is based off of the implementation in PyJWT 0.3.2 + """ + + SHA256 = hashlib.sha256 + SHA384 = hashlib.sha384 + SHA512 = hashlib.sha512 + + CURVE_MAP = { + SHA256: ecdsa.curves.NIST256p, + SHA384: ecdsa.curves.NIST384p, + SHA512: ecdsa.curves.NIST521p, + } + CURVE_NAMES = ( + (ecdsa.curves.NIST256p, "P-256"), + (ecdsa.curves.NIST384p, "P-384"), + (ecdsa.curves.NIST521p, "P-521"), + ) + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.EC: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + + self.hash_alg = { + ALGORITHMS.ES256: self.SHA256, + ALGORITHMS.ES384: self.SHA384, + ALGORITHMS.ES512: self.SHA512, + }.get(algorithm) + self._algorithm = algorithm + + self.curve = self.CURVE_MAP.get(self.hash_alg) + + if isinstance(key, (ecdsa.SigningKey, ecdsa.VerifyingKey)): + self.prepared_key = key + return + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if isinstance(key, str): + key = key.encode("utf-8") + + if isinstance(key, bytes): + # Attempt to load key. We don't know if it's + # a Signing Key or a Verifying Key, so we try + # the Verifying Key first. + try: + key = ecdsa.VerifyingKey.from_pem(key) + except ecdsa.der.UnexpectedDER: + key = ecdsa.SigningKey.from_pem(key) + except Exception as e: + raise JWKError(e) + + self.prepared_key = key + return + + raise JWKError("Unable to parse an ECKey from key: %s" % key) + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "EC": + raise JWKError("Incorrect key type. Expected: 'EC', Received: %s" % jwk_dict.get("kty")) + + if not all(k in jwk_dict for k in ["x", "y", "crv"]): + raise JWKError("Mandatory parameters are missing") + + if "d" in jwk_dict: + # We are dealing with a private key; the secret exponent is enough + # to create an ecdsa key. + d = base64_to_long(jwk_dict.get("d")) + return ecdsa.keys.SigningKey.from_secret_exponent(d, self.curve) + else: + x = base64_to_long(jwk_dict.get("x")) + y = base64_to_long(jwk_dict.get("y")) + + if not ecdsa.ecdsa.point_is_valid(self.curve.generator, x, y): + raise JWKError(f"Point: {x}, {y} is not a valid point") + + point = ecdsa.ellipticcurve.Point(self.curve.curve, x, y, self.curve.order) + return ecdsa.keys.VerifyingKey.from_public_point(point, self.curve) + + def sign(self, msg): + return self.prepared_key.sign( + msg, hashfunc=self.hash_alg, sigencode=ecdsa.util.sigencode_string, allow_truncate=False + ) + + def verify(self, msg, sig): + try: + return self.prepared_key.verify( + sig, msg, hashfunc=self.hash_alg, sigdecode=ecdsa.util.sigdecode_string, allow_truncate=False + ) + except Exception: + return False + + def is_public(self): + return isinstance(self.prepared_key, ecdsa.VerifyingKey) + + def public_key(self): + if self.is_public(): + return self + return self.__class__(self.prepared_key.get_verifying_key(), self._algorithm) + + def to_pem(self): + return self.prepared_key.to_pem() + + def to_dict(self): + if not self.is_public(): + public_key = self.prepared_key.get_verifying_key() + else: + public_key = self.prepared_key + crv = None + for key, value in self.CURVE_NAMES: + if key == self.prepared_key.curve: + crv = value + if not crv: + raise KeyError(f"Can't match {self.prepared_key.curve}") + + # Calculate the key size in bytes. Section 6.2.1.2 and 6.2.1.3 of + # RFC7518 prescribes that the 'x', 'y' and 'd' parameters of the curve + # points must be encoded as octed-strings of this length. + key_size = self.prepared_key.curve.baselen + + data = { + "alg": self._algorithm, + "kty": "EC", + "crv": crv, + "x": long_to_base64(public_key.pubkey.point.x(), size=key_size).decode("ASCII"), + "y": long_to_base64(public_key.pubkey.point.y(), size=key_size).decode("ASCII"), + } + + if not self.is_public(): + data["d"] = long_to_base64(self.prepared_key.privkey.secret_multiplier, size=key_size).decode("ASCII") + + return data diff --git a/venv/lib/python3.12/site-packages/jose/backends/native.py b/venv/lib/python3.12/site-packages/jose/backends/native.py new file mode 100644 index 0000000..eb3a6ae --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/backends/native.py @@ -0,0 +1,76 @@ +import hashlib +import hmac +import os + +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWKError +from jose.utils import base64url_decode, base64url_encode + + +def get_random_bytes(num_bytes): + return bytes(os.urandom(num_bytes)) + + +class HMACKey(Key): + """ + Performs signing and verification operations using HMAC + and the specified hash function. + """ + + HASHES = {ALGORITHMS.HS256: hashlib.sha256, ALGORITHMS.HS384: hashlib.sha384, ALGORITHMS.HS512: hashlib.sha512} + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.HMAC: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + self._algorithm = algorithm + self._hash_alg = self.HASHES.get(algorithm) + + if isinstance(key, dict): + self.prepared_key = self._process_jwk(key) + return + + if not isinstance(key, str) and not isinstance(key, bytes): + raise JWKError("Expecting a string- or bytes-formatted key.") + + if isinstance(key, str): + key = key.encode("utf-8") + + invalid_strings = [ + b"-----BEGIN PUBLIC KEY-----", + b"-----BEGIN RSA PUBLIC KEY-----", + b"-----BEGIN CERTIFICATE-----", + b"ssh-rsa", + ] + + if any(string_value in key for string_value in invalid_strings): + raise JWKError( + "The specified key is an asymmetric key or x509 certificate and" + " should not be used as an HMAC secret." + ) + + self.prepared_key = key + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "oct": + raise JWKError("Incorrect key type. Expected: 'oct', Received: %s" % jwk_dict.get("kty")) + + k = jwk_dict.get("k") + k = k.encode("utf-8") + k = bytes(k) + k = base64url_decode(k) + + return k + + def sign(self, msg): + return hmac.new(self.prepared_key, msg, self._hash_alg).digest() + + def verify(self, msg, sig): + return hmac.compare_digest(sig, self.sign(msg)) + + def to_dict(self): + return { + "alg": self._algorithm, + "kty": "oct", + "k": base64url_encode(self.prepared_key).decode("ASCII"), + } diff --git a/venv/lib/python3.12/site-packages/jose/backends/rsa_backend.py b/venv/lib/python3.12/site-packages/jose/backends/rsa_backend.py new file mode 100644 index 0000000..4e8ccf1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/backends/rsa_backend.py @@ -0,0 +1,284 @@ +import binascii +import warnings + +import rsa as pyrsa +import rsa.pem as pyrsa_pem +from pyasn1.error import PyAsn1Error +from rsa import DecryptionError + +from jose.backends._asn1 import ( + rsa_private_key_pkcs1_to_pkcs8, + rsa_private_key_pkcs8_to_pkcs1, + rsa_public_key_pkcs1_to_pkcs8, +) +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWEError, JWKError +from jose.utils import base64_to_long, long_to_base64 + +ALGORITHMS.SUPPORTED.remove(ALGORITHMS.RSA_OAEP) # RSA OAEP not supported + +LEGACY_INVALID_PKCS8_RSA_HEADER = binascii.unhexlify( + "30" # sequence + "8204BD" # DER-encoded sequence contents length of 1213 bytes -- INCORRECT STATIC LENGTH + "020100" # integer: 0 -- Version + "30" # sequence + "0D" # DER-encoded sequence contents length of 13 bytes -- PrivateKeyAlgorithmIdentifier + "06092A864886F70D010101" # OID -- rsaEncryption + "0500" # NULL -- parameters +) +ASN1_SEQUENCE_ID = binascii.unhexlify("30") +RSA_ENCRYPTION_ASN1_OID = "1.2.840.113549.1.1.1" + +# Functions gcd and rsa_recover_prime_factors were copied from cryptography 1.9 +# to enable pure python rsa module to be in compliance with section 6.3.1 of RFC7518 +# which requires only private exponent (d) for private key. + + +def _gcd(a, b): + """Calculate the Greatest Common Divisor of a and b. + + Unless b==0, the result will have the same sign as b (so that when + b is divided by it, the result comes out positive). + """ + while b: + a, b = b, (a % b) + return a + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. Each iteration increments by 2 so the actual +# maximum attempts is half this number. +_MAX_RECOVERY_ATTEMPTS = 1000 + + +def _rsa_recover_prime_factors(n, e, d): + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = 2 + while not spotted and a < _MAX_RECOVERY_ATTEMPTS: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = _gcd(cand + 1, n) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + p, q = sorted((p, q), reverse=True) + return (p, q) + + +def pem_to_spki(pem, fmt="PKCS8"): + key = RSAKey(pem, ALGORITHMS.RS256) + return key.to_pem(fmt) + + +def _legacy_private_key_pkcs8_to_pkcs1(pkcs8_key): + """Legacy RSA private key PKCS8-to-PKCS1 conversion. + + .. warning:: + + This is incorrect parsing and only works because the legacy PKCS1-to-PKCS8 + encoding was also incorrect. + """ + # Only allow this processing if the prefix matches + # AND the following byte indicates an ASN1 sequence, + # as we would expect with the legacy encoding. + if not pkcs8_key.startswith(LEGACY_INVALID_PKCS8_RSA_HEADER + ASN1_SEQUENCE_ID): + raise ValueError("Invalid private key encoding") + + return pkcs8_key[len(LEGACY_INVALID_PKCS8_RSA_HEADER) :] + + +class RSAKey(Key): + SHA256 = "SHA-256" + SHA384 = "SHA-384" + SHA512 = "SHA-512" + + def __init__(self, key, algorithm): + if algorithm not in ALGORITHMS.RSA: + raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) + + if algorithm in ALGORITHMS.RSA_KW and algorithm != ALGORITHMS.RSA1_5: + raise JWKError("alg: %s is not supported by the RSA backend" % algorithm) + + self.hash_alg = { + ALGORITHMS.RS256: self.SHA256, + ALGORITHMS.RS384: self.SHA384, + ALGORITHMS.RS512: self.SHA512, + }.get(algorithm) + self._algorithm = algorithm + + if isinstance(key, dict): + self._prepared_key = self._process_jwk(key) + return + + if isinstance(key, (pyrsa.PublicKey, pyrsa.PrivateKey)): + self._prepared_key = key + return + + if isinstance(key, str): + key = key.encode("utf-8") + + if isinstance(key, bytes): + try: + self._prepared_key = pyrsa.PublicKey.load_pkcs1(key) + except ValueError: + try: + self._prepared_key = pyrsa.PublicKey.load_pkcs1_openssl_pem(key) + except ValueError: + try: + self._prepared_key = pyrsa.PrivateKey.load_pkcs1(key) + except ValueError: + try: + der = pyrsa_pem.load_pem(key, b"PRIVATE KEY") + try: + pkcs1_key = rsa_private_key_pkcs8_to_pkcs1(der) + except PyAsn1Error: + # If the key was encoded using the old, invalid, + # encoding then pyasn1 will throw an error attempting + # to parse the key. + pkcs1_key = _legacy_private_key_pkcs8_to_pkcs1(der) + self._prepared_key = pyrsa.PrivateKey.load_pkcs1(pkcs1_key, format="DER") + except ValueError as e: + raise JWKError(e) + return + raise JWKError("Unable to parse an RSA_JWK from key: %s" % key) + + def _process_jwk(self, jwk_dict): + if not jwk_dict.get("kty") == "RSA": + raise JWKError("Incorrect key type. Expected: 'RSA', Received: %s" % jwk_dict.get("kty")) + + e = base64_to_long(jwk_dict.get("e")) + n = base64_to_long(jwk_dict.get("n")) + + if "d" not in jwk_dict: + return pyrsa.PublicKey(e=e, n=n) + else: + d = base64_to_long(jwk_dict.get("d")) + extra_params = ["p", "q", "dp", "dq", "qi"] + + if any(k in jwk_dict for k in extra_params): + # Precomputed private key parameters are available. + if not all(k in jwk_dict for k in extra_params): + # These values must be present when 'p' is according to + # Section 6.3.2 of RFC7518, so if they are not we raise + # an error. + raise JWKError("Precomputed private key parameters are incomplete.") + + p = base64_to_long(jwk_dict["p"]) + q = base64_to_long(jwk_dict["q"]) + return pyrsa.PrivateKey(e=e, n=n, d=d, p=p, q=q) + else: + p, q = _rsa_recover_prime_factors(n, e, d) + return pyrsa.PrivateKey(n=n, e=e, d=d, p=p, q=q) + + def sign(self, msg): + return pyrsa.sign(msg, self._prepared_key, self.hash_alg) + + def verify(self, msg, sig): + if not self.is_public(): + warnings.warn("Attempting to verify a message with a private key. " "This is not recommended.") + try: + pyrsa.verify(msg, sig, self._prepared_key) + return True + except pyrsa.pkcs1.VerificationError: + return False + + def is_public(self): + return isinstance(self._prepared_key, pyrsa.PublicKey) + + def public_key(self): + if isinstance(self._prepared_key, pyrsa.PublicKey): + return self + return self.__class__(pyrsa.PublicKey(n=self._prepared_key.n, e=self._prepared_key.e), self._algorithm) + + def to_pem(self, pem_format="PKCS8"): + + if isinstance(self._prepared_key, pyrsa.PrivateKey): + der = self._prepared_key.save_pkcs1(format="DER") + if pem_format == "PKCS8": + pkcs8_der = rsa_private_key_pkcs1_to_pkcs8(der) + pem = pyrsa_pem.save_pem(pkcs8_der, pem_marker="PRIVATE KEY") + elif pem_format == "PKCS1": + pem = pyrsa_pem.save_pem(der, pem_marker="RSA PRIVATE KEY") + else: + raise ValueError(f"Invalid pem format specified: {pem_format!r}") + else: + if pem_format == "PKCS8": + pkcs1_der = self._prepared_key.save_pkcs1(format="DER") + pkcs8_der = rsa_public_key_pkcs1_to_pkcs8(pkcs1_der) + pem = pyrsa_pem.save_pem(pkcs8_der, pem_marker="PUBLIC KEY") + elif pem_format == "PKCS1": + der = self._prepared_key.save_pkcs1(format="DER") + pem = pyrsa_pem.save_pem(der, pem_marker="RSA PUBLIC KEY") + else: + raise ValueError(f"Invalid pem format specified: {pem_format!r}") + return pem + + def to_dict(self): + if not self.is_public(): + public_key = self.public_key()._prepared_key + else: + public_key = self._prepared_key + + data = { + "alg": self._algorithm, + "kty": "RSA", + "n": long_to_base64(public_key.n).decode("ASCII"), + "e": long_to_base64(public_key.e).decode("ASCII"), + } + + if not self.is_public(): + data.update( + { + "d": long_to_base64(self._prepared_key.d).decode("ASCII"), + "p": long_to_base64(self._prepared_key.p).decode("ASCII"), + "q": long_to_base64(self._prepared_key.q).decode("ASCII"), + "dp": long_to_base64(self._prepared_key.exp1).decode("ASCII"), + "dq": long_to_base64(self._prepared_key.exp2).decode("ASCII"), + "qi": long_to_base64(self._prepared_key.coef).decode("ASCII"), + } + ) + + return data + + def wrap_key(self, key_data): + if not self.is_public(): + warnings.warn("Attempting to encrypt a message with a private key." " This is not recommended.") + wrapped_key = pyrsa.encrypt(key_data, self._prepared_key) + return wrapped_key + + def unwrap_key(self, wrapped_key): + try: + unwrapped_key = pyrsa.decrypt(wrapped_key, self._prepared_key) + except DecryptionError as e: + raise JWEError(e) + return unwrapped_key diff --git a/venv/lib/python3.12/site-packages/jose/constants.py b/venv/lib/python3.12/site-packages/jose/constants.py new file mode 100644 index 0000000..ab4d74d --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/constants.py @@ -0,0 +1,98 @@ +import hashlib + + +class Algorithms: + # DS Algorithms + NONE = "none" + HS256 = "HS256" + HS384 = "HS384" + HS512 = "HS512" + RS256 = "RS256" + RS384 = "RS384" + RS512 = "RS512" + ES256 = "ES256" + ES384 = "ES384" + ES512 = "ES512" + + # Content Encryption Algorithms + A128CBC_HS256 = "A128CBC-HS256" + A192CBC_HS384 = "A192CBC-HS384" + A256CBC_HS512 = "A256CBC-HS512" + A128GCM = "A128GCM" + A192GCM = "A192GCM" + A256GCM = "A256GCM" + + # Pseudo algorithm for encryption + A128CBC = "A128CBC" + A192CBC = "A192CBC" + A256CBC = "A256CBC" + + # CEK Encryption Algorithms + DIR = "dir" + RSA1_5 = "RSA1_5" + RSA_OAEP = "RSA-OAEP" + RSA_OAEP_256 = "RSA-OAEP-256" + A128KW = "A128KW" + A192KW = "A192KW" + A256KW = "A256KW" + ECDH_ES = "ECDH-ES" + ECDH_ES_A128KW = "ECDH-ES+A128KW" + ECDH_ES_A192KW = "ECDH-ES+A192KW" + ECDH_ES_A256KW = "ECDH-ES+A256KW" + A128GCMKW = "A128GCMKW" + A192GCMKW = "A192GCMKW" + A256GCMKW = "A256GCMKW" + PBES2_HS256_A128KW = "PBES2-HS256+A128KW" + PBES2_HS384_A192KW = "PBES2-HS384+A192KW" + PBES2_HS512_A256KW = "PBES2-HS512+A256KW" + + # Compression Algorithms + DEF = "DEF" + + HMAC = {HS256, HS384, HS512} + RSA_DS = {RS256, RS384, RS512} + RSA_KW = {RSA1_5, RSA_OAEP, RSA_OAEP_256} + RSA = RSA_DS.union(RSA_KW) + EC_DS = {ES256, ES384, ES512} + EC_KW = {ECDH_ES, ECDH_ES_A128KW, ECDH_ES_A192KW, ECDH_ES_A256KW} + EC = EC_DS.union(EC_KW) + AES_PSEUDO = {A128CBC, A192CBC, A256CBC, A128GCM, A192GCM, A256GCM} + AES_JWE_ENC = {A128CBC_HS256, A192CBC_HS384, A256CBC_HS512, A128GCM, A192GCM, A256GCM} + AES_ENC = AES_JWE_ENC.union(AES_PSEUDO) + AES_KW = {A128KW, A192KW, A256KW} + AEC_GCM_KW = {A128GCMKW, A192GCMKW, A256GCMKW} + AES = AES_ENC.union(AES_KW) + PBES2_KW = {PBES2_HS256_A128KW, PBES2_HS384_A192KW, PBES2_HS512_A256KW} + + HMAC_AUTH_TAG = {A128CBC_HS256, A192CBC_HS384, A256CBC_HS512} + GCM = {A128GCM, A192GCM, A256GCM} + + SUPPORTED = HMAC.union(RSA_DS).union(EC_DS).union([DIR]).union(AES_JWE_ENC).union(RSA_KW).union(AES_KW) + + ALL = SUPPORTED.union([NONE]).union(AEC_GCM_KW).union(EC_KW).union(PBES2_KW) + + HASHES = { + HS256: hashlib.sha256, + HS384: hashlib.sha384, + HS512: hashlib.sha512, + RS256: hashlib.sha256, + RS384: hashlib.sha384, + RS512: hashlib.sha512, + ES256: hashlib.sha256, + ES384: hashlib.sha384, + ES512: hashlib.sha512, + } + + KEYS = {} + + +ALGORITHMS = Algorithms() + + +class Zips: + DEF = "DEF" + NONE = None + SUPPORTED = {DEF, NONE} + + +ZIPS = Zips() diff --git a/venv/lib/python3.12/site-packages/jose/exceptions.py b/venv/lib/python3.12/site-packages/jose/exceptions.py new file mode 100644 index 0000000..e8edc3b --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/exceptions.py @@ -0,0 +1,59 @@ +class JOSEError(Exception): + pass + + +class JWSError(JOSEError): + pass + + +class JWSSignatureError(JWSError): + pass + + +class JWSAlgorithmError(JWSError): + pass + + +class JWTError(JOSEError): + pass + + +class JWTClaimsError(JWTError): + pass + + +class ExpiredSignatureError(JWTError): + pass + + +class JWKError(JOSEError): + pass + + +class JWEError(JOSEError): + """Base error for all JWE errors""" + + pass + + +class JWEParseError(JWEError): + """Could not parse the JWE string provided""" + + pass + + +class JWEInvalidAuth(JWEError): + """ + The authentication tag did not match the protected sections of the + JWE string provided + """ + + pass + + +class JWEAlgorithmUnsupportedError(JWEError): + """ + The JWE algorithm is not supported by the backend + """ + + pass diff --git a/venv/lib/python3.12/site-packages/jose/jwe.py b/venv/lib/python3.12/site-packages/jose/jwe.py new file mode 100644 index 0000000..2c387ff --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/jwe.py @@ -0,0 +1,607 @@ +import binascii +import json +import zlib +from collections.abc import Mapping +from struct import pack + +from . import jwk +from .backends import get_random_bytes +from .constants import ALGORITHMS, ZIPS +from .exceptions import JWEError, JWEParseError +from .utils import base64url_decode, base64url_encode, ensure_binary + + +def encrypt(plaintext, key, encryption=ALGORITHMS.A256GCM, algorithm=ALGORITHMS.DIR, zip=None, cty=None, kid=None): + """Encrypts plaintext and returns a JWE cmpact serialization string. + + Args: + plaintext (bytes): A bytes object to encrypt + key (str or dict): The key(s) to use for encrypting the content. Can be + individual JWK or JWK set. + encryption (str, optional): The content encryption algorithm used to + perform authenticated encryption on the plaintext to produce the + ciphertext and the Authentication Tag. Defaults to A256GCM. + algorithm (str, optional): The cryptographic algorithm used + to encrypt or determine the value of the CEK. Defaults to dir. + zip (str, optional): The compression algorithm) applied to the + plaintext before encryption. Defaults to None. + cty (str, optional): The media type for the secured content. + See http://www.iana.org/assignments/media-types/media-types.xhtml + kid (str, optional): Key ID for the provided key + + Returns: + bytes: The string representation of the header, encrypted key, + initialization vector, ciphertext, and authentication tag. + + Raises: + JWEError: If there is an error signing the token. + + Examples: + >>> from jose import jwe + >>> jwe.encrypt('Hello, World!', 'asecret128bitkey', algorithm='dir', encryption='A128GCM') + 'eyJhbGciOiJkaXIiLCJlbmMiOiJBMTI4R0NNIn0..McILMB3dYsNJSuhcDzQshA.OfX9H_mcUpHDeRM4IA.CcnTWqaqxNsjT4eCaUABSg' + + """ + plaintext = ensure_binary(plaintext) # Make sure it's bytes + if algorithm not in ALGORITHMS.SUPPORTED: + raise JWEError("Algorithm %s not supported." % algorithm) + if encryption not in ALGORITHMS.SUPPORTED: + raise JWEError("Algorithm %s not supported." % encryption) + key = jwk.construct(key, algorithm) + encoded_header = _encoded_header(algorithm, encryption, zip, cty, kid) + + plaintext = _compress(zip, plaintext) + enc_cek, iv, cipher_text, auth_tag = _encrypt_and_auth(key, algorithm, encryption, zip, plaintext, encoded_header) + + jwe_string = _jwe_compact_serialize(encoded_header, enc_cek, iv, cipher_text, auth_tag) + return jwe_string + + +def decrypt(jwe_str, key): + """Decrypts a JWE compact serialized string and returns the plaintext. + + Args: + jwe_str (str): A JWE to be decrypt. + key (str or dict): A key to attempt to decrypt the payload with. Can be + individual JWK or JWK set. + + Returns: + bytes: The plaintext bytes, assuming the authentication tag is valid. + + Raises: + JWEError: If there is an exception verifying the token. + + Examples: + >>> from jose import jwe + >>> jwe.decrypt(jwe_string, 'asecret128bitkey') + 'Hello, World!' + """ + header, encoded_header, encrypted_key, iv, cipher_text, auth_tag = _jwe_compact_deserialize(jwe_str) + + # Verify that the implementation understands and can process all + # fields that it is required to support, whether required by this + # specification, by the algorithms being used, or by the "crit" + # Header Parameter value, and that the values of those parameters + # are also understood and supported. + + try: + # Determine the Key Management Mode employed by the algorithm + # specified by the "alg" (algorithm) Header Parameter. + alg = header["alg"] + enc = header["enc"] + if alg not in ALGORITHMS.SUPPORTED: + raise JWEError("Algorithm %s not supported." % alg) + if enc not in ALGORITHMS.SUPPORTED: + raise JWEError("Algorithm %s not supported." % enc) + + except KeyError: + raise JWEParseError("alg and enc headers are required!") + + # Verify that the JWE uses a key known to the recipient. + key = jwk.construct(key, alg) + + # When Direct Key Agreement or Key Agreement with Key Wrapping are + # employed, use the key agreement algorithm to compute the value + # of the agreed upon key. When Direct Key Agreement is employed, + # let the CEK be the agreed upon key. When Key Agreement with Key + # Wrapping is employed, the agreed upon key will be used to + # decrypt the JWE Encrypted Key. + # + # When Key Wrapping, Key Encryption, or Key Agreement with Key + # Wrapping are employed, decrypt the JWE Encrypted Key to produce + # the CEK. The CEK MUST have a length equal to that required for + # the content encryption algorithm. Note that when there are + # multiple recipients, each recipient will only be able to decrypt + # JWE Encrypted Key values that were encrypted to a key in that + # recipient's possession. It is therefore normal to only be able + # to decrypt one of the per-recipient JWE Encrypted Key values to + # obtain the CEK value. Also, see Section 11.5 for security + # considerations on mitigating timing attacks. + if alg == ALGORITHMS.DIR: + # When Direct Key Agreement or Direct Encryption are employed, + # verify that the JWE Encrypted Key value is an empty octet + # sequence. + + # Record whether the CEK could be successfully determined for this + # recipient or not. + cek_valid = encrypted_key == b"" + + # When Direct Encryption is employed, let the CEK be the shared + # symmetric key. + cek_bytes = _get_key_bytes_from_key(key) + else: + try: + cek_bytes = key.unwrap_key(encrypted_key) + + # Record whether the CEK could be successfully determined for this + # recipient or not. + cek_valid = True + except NotImplementedError: + raise JWEError(f"alg {alg} is not implemented") + except Exception: + # Record whether the CEK could be successfully determined for this + # recipient or not. + cek_valid = False + + # To mitigate the attacks described in RFC 3218 [RFC3218], the + # recipient MUST NOT distinguish between format, padding, and length + # errors of encrypted keys. It is strongly recommended, in the event + # of receiving an improperly formatted key, that the recipient + # substitute a randomly generated CEK and proceed to the next step, to + # mitigate timing attacks. + cek_bytes = _get_random_cek_bytes_for_enc(enc) + + # Compute the Encoded Protected Header value BASE64URL(UTF8(JWE + # Protected Header)). If the JWE Protected Header is not present + # (which can only happen when using the JWE JSON Serialization and + # no "protected" member is present), let this value be the empty + # string. + protected_header = encoded_header + + # Let the Additional Authenticated Data encryption parameter be + # ASCII(Encoded Protected Header). However, if a JWE AAD value is + # present (which can only be the case when using the JWE JSON + # Serialization), instead let the Additional Authenticated Data + # encryption parameter be ASCII(Encoded Protected Header || '.' || + # BASE64URL(JWE AAD)). + aad = protected_header + + # Decrypt the JWE Ciphertext using the CEK, the JWE Initialization + # Vector, the Additional Authenticated Data value, and the JWE + # Authentication Tag (which is the Authentication Tag input to the + # calculation) using the specified content encryption algorithm, + # returning the decrypted plaintext and validating the JWE + # Authentication Tag in the manner specified for the algorithm, + # rejecting the input without emitting any decrypted output if the + # JWE Authentication Tag is incorrect. + try: + plain_text = _decrypt_and_auth(cek_bytes, enc, cipher_text, iv, aad, auth_tag) + except NotImplementedError: + raise JWEError(f"enc {enc} is not implemented") + except Exception as e: + raise JWEError(e) + + # If a "zip" parameter was included, uncompress the decrypted + # plaintext using the specified compression algorithm. + if plain_text is not None: + plain_text = _decompress(header.get("zip"), plain_text) + + return plain_text if cek_valid else None + + +def get_unverified_header(jwe_str): + """Returns the decoded headers without verification of any kind. + + Args: + jwe_str (str): A compact serialized JWE to decode the headers from. + + Returns: + dict: The dict representation of the JWE headers. + + Raises: + JWEError: If there is an exception decoding the JWE. + """ + header = _jwe_compact_deserialize(jwe_str)[0] + return header + + +def _decrypt_and_auth(cek_bytes, enc, cipher_text, iv, aad, auth_tag): + """ + Decrypt and verify the data + + Args: + cek_bytes (bytes): cek to derive encryption and possible auth key to + verify the auth tag + cipher_text (bytes): Encrypted data + iv (bytes): Initialization vector (iv) used to encrypt data + aad (bytes): Additional Authenticated Data used to verify the data + auth_tag (bytes): Authentication ntag to verify the data + + Returns: + (bytes): Decrypted data + """ + # Decrypt the JWE Ciphertext using the CEK, the JWE Initialization + # Vector, the Additional Authenticated Data value, and the JWE + # Authentication Tag (which is the Authentication Tag input to the + # calculation) using the specified content encryption algorithm, + # returning the decrypted plaintext + # and validating the JWE + # Authentication Tag in the manner specified for the algorithm, + if enc in ALGORITHMS.HMAC_AUTH_TAG: + encryption_key, mac_key, key_len = _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc) + auth_tag_check = _auth_tag(cipher_text, iv, aad, mac_key, key_len) + elif enc in ALGORITHMS.GCM: + encryption_key = jwk.construct(cek_bytes, enc) + auth_tag_check = auth_tag # GCM check auth on decrypt + else: + raise NotImplementedError(f"enc {enc} is not implemented!") + + plaintext = encryption_key.decrypt(cipher_text, iv, aad, auth_tag) + if auth_tag != auth_tag_check: + raise JWEError("Invalid JWE Auth Tag") + + return plaintext + + +def _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc): + derived_key_len = len(cek_bytes) // 2 + mac_key_bytes = cek_bytes[0:derived_key_len] + mac_key = _get_hmac_key(enc, mac_key_bytes) + encryption_key_bytes = cek_bytes[-derived_key_len:] + encryption_alg, _ = enc.split("-") + encryption_key = jwk.construct(encryption_key_bytes, encryption_alg) + return encryption_key, mac_key, derived_key_len + + +def _jwe_compact_deserialize(jwe_bytes): + """ + Deserialize and verify the header and segments are appropriate. + + Args: + jwe_bytes (bytes): The compact serialized JWE + Returns: + (dict, bytes, bytes, bytes, bytes, bytes) + """ + + # Base64url decode the encoded representations of the JWE + # Protected Header, the JWE Encrypted Key, the JWE Initialization + # Vector, the JWE Ciphertext, the JWE Authentication Tag, and the + # JWE AAD, following the restriction that no line breaks, + # whitespace, or other additional characters have been used. + jwe_bytes = ensure_binary(jwe_bytes) + try: + header_segment, encrypted_key_segment, iv_segment, cipher_text_segment, auth_tag_segment = jwe_bytes.split( + b".", 4 + ) + header_data = base64url_decode(header_segment) + except ValueError: + raise JWEParseError("Not enough segments") + except (TypeError, binascii.Error): + raise JWEParseError("Invalid header") + + # Verify that the octet sequence resulting from decoding the + # encoded JWE Protected Header is a UTF-8-encoded representation + # of a completely valid JSON object conforming to RFC 7159 + # [RFC7159]; let the JWE Protected Header be this JSON object. + # + # If using the JWE Compact Serialization, let the JOSE Header be + # the JWE Protected Header. Otherwise, when using the JWE JSON + # Serialization, let the JOSE Header be the union of the members + # of the JWE Protected Header, the JWE Shared Unprotected Header + # and the corresponding JWE Per-Recipient Unprotected Header, all + # of which must be completely valid JSON objects. During this + # step, verify that the resulting JOSE Header does not contain + # duplicate Header Parameter names. When using the JWE JSON + # Serialization, this restriction includes that the same Header + # Parameter name also MUST NOT occur in distinct JSON object + # values that together comprise the JOSE Header. + + try: + header = json.loads(header_data) + except ValueError as e: + raise JWEParseError(f"Invalid header string: {e}") + + if not isinstance(header, Mapping): + raise JWEParseError("Invalid header string: must be a json object") + + try: + encrypted_key = base64url_decode(encrypted_key_segment) + except (TypeError, binascii.Error): + raise JWEParseError("Invalid encrypted key") + + try: + iv = base64url_decode(iv_segment) + except (TypeError, binascii.Error): + raise JWEParseError("Invalid IV") + + try: + ciphertext = base64url_decode(cipher_text_segment) + except (TypeError, binascii.Error): + raise JWEParseError("Invalid cyphertext") + + try: + auth_tag = base64url_decode(auth_tag_segment) + except (TypeError, binascii.Error): + raise JWEParseError("Invalid auth tag") + + return header, header_segment, encrypted_key, iv, ciphertext, auth_tag + + +def _encoded_header(alg, enc, zip, cty, kid): + """ + Generate an appropriate JOSE header based on the values provided + Args: + alg (str): Key wrap/negotiation algorithm + enc (str): Encryption algorithm + zip (str): Compression method + cty (str): Content type of the encrypted data + kid (str): ID for the key used for the operation + + Returns: + bytes: JSON object of header based on input + """ + header = {"alg": alg, "enc": enc} + if zip: + header["zip"] = zip + if cty: + header["cty"] = cty + if kid: + header["kid"] = kid + json_header = json.dumps( + header, + separators=(",", ":"), + sort_keys=True, + ).encode("utf-8") + return base64url_encode(json_header) + + +def _big_endian(int_val): + return pack("!Q", int_val) + + +def _encrypt_and_auth(key, alg, enc, zip, plaintext, aad): + """ + Generate a content encryption key (cek) and initialization + vector (iv) based on enc and alg, compress the plaintext based on zip, + encrypt the compressed plaintext using the cek and iv based on enc + + Args: + key (Key): The key provided for encryption + alg (str): The algorithm use for key wrap/negotiation + enc (str): The encryption algorithm with which to encrypt the plaintext + zip (str): The compression algorithm with which to compress the plaintext + plaintext (bytes): The data to encrypt + aad (str): Additional authentication data utilized for generating an + auth tag + + Returns: + (bytes, bytes, bytes, bytes): A tuple of the following data + (key wrapped cek, iv, cipher text, auth tag) + """ + try: + cek_bytes, kw_cek = _get_cek(enc, alg, key) + except NotImplementedError: + raise JWEError(f"alg {alg} is not implemented") + + if enc in ALGORITHMS.HMAC_AUTH_TAG: + encryption_key, mac_key, key_len = _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc) + iv, ciphertext, tag = encryption_key.encrypt(plaintext, aad) + auth_tag = _auth_tag(ciphertext, iv, aad, mac_key, key_len) + elif enc in ALGORITHMS.GCM: + encryption_key = jwk.construct(cek_bytes, enc) + iv, ciphertext, auth_tag = encryption_key.encrypt(plaintext, aad) + else: + raise NotImplementedError(f"enc {enc} is not implemented!") + + return kw_cek, iv, ciphertext, auth_tag + + +def _get_hmac_key(enc, mac_key_bytes): + """ + Get an HMACKey for the provided encryption algorithm and key bytes + + Args: + enc (str): Encryption algorithm + mac_key_bytes (bytes): vytes for the HMAC key + + Returns: + (HMACKey): The key to perform HMAC actions + """ + _, hash_alg = enc.split("-") + mac_key = jwk.construct(mac_key_bytes, hash_alg) + return mac_key + + +def _compress(zip, plaintext): + """ + Compress the plaintext based on the algorithm supplied + + Args: + zip (str): Compression Algorithm + plaintext (bytes): plaintext to compress + + Returns: + (bytes): Compressed plaintext + """ + if zip not in ZIPS.SUPPORTED: + raise NotImplementedError("ZIP {} is not supported!") + if zip is None: + compressed = plaintext + elif zip == ZIPS.DEF: + compressed = zlib.compress(plaintext) + else: + raise NotImplementedError("ZIP {} is not implemented!") + return compressed + + +def _decompress(zip, compressed): + """ + Decompress the plaintext based on the algorithm supplied + + Args: + zip (str): Compression Algorithm + plaintext (bytes): plaintext to decompress + + Returns: + (bytes): Compressed plaintext + """ + if zip not in ZIPS.SUPPORTED: + raise NotImplementedError("ZIP {} is not supported!") + if zip is None: + decompressed = compressed + elif zip == ZIPS.DEF: + decompressed = zlib.decompress(compressed) + else: + raise NotImplementedError("ZIP {} is not implemented!") + return decompressed + + +def _get_cek(enc, alg, key): + """ + Get the content encryption key + + Args: + enc (str): Encryption algorithm + alg (str): kwy wrap/negotiation algorithm + key (Key): Key provided to encryption method + + Return: + (bytes, bytes): Tuple of (cek bytes and wrapped cek) + """ + if alg == ALGORITHMS.DIR: + cek, wrapped_cek = _get_direct_key_wrap_cek(key) + else: + cek, wrapped_cek = _get_key_wrap_cek(enc, key) + + return cek, wrapped_cek + + +def _get_direct_key_wrap_cek(key): + """ + Get the cek and wrapped cek from the encryption key direct + + Args: + key (Key): Key provided to encryption method + + Return: + (Key, bytes): Tuple of (cek Key object and wrapped cek) + """ + # Get the JWK data to determine how to derive the cek + jwk_data = key.to_dict() + if jwk_data["kty"] == "oct": + # Get the last half of an octal key as the cek + cek_bytes = _get_key_bytes_from_key(key) + wrapped_cek = b"" + else: + raise NotImplementedError("JWK type {} not supported!".format(jwk_data["kty"])) + return cek_bytes, wrapped_cek + + +def _get_key_bytes_from_key(key): + """ + Get the raw key bytes from a Key object + + Args: + key (Key): Key from which to extract the raw key bytes + Returns: + (bytes) key data + """ + jwk_data = key.to_dict() + encoded_key = jwk_data["k"] + cek_bytes = base64url_decode(encoded_key) + return cek_bytes + + +def _get_key_wrap_cek(enc, key): + """_get_rsa_key_wrap_cek + Get the content encryption key for RSA key wrap + + Args: + enc (str): Encryption algorithm + key (Key): Key provided to encryption method + + Returns: + (Key, bytes): Tuple of (cek Key object and wrapped cek) + """ + cek_bytes = _get_random_cek_bytes_for_enc(enc) + wrapped_cek = key.wrap_key(cek_bytes) + return cek_bytes, wrapped_cek + + +def _get_random_cek_bytes_for_enc(enc): + """ + Get the random cek bytes based on the encryptionn algorithm + + Args: + enc (str): Encryption algorithm + + Returns: + (bytes) random bytes for cek key + """ + if enc == ALGORITHMS.A128GCM: + num_bits = 128 + elif enc == ALGORITHMS.A192GCM: + num_bits = 192 + elif enc in (ALGORITHMS.A128CBC_HS256, ALGORITHMS.A256GCM): + num_bits = 256 + elif enc == ALGORITHMS.A192CBC_HS384: + num_bits = 384 + elif enc == ALGORITHMS.A256CBC_HS512: + num_bits = 512 + else: + raise NotImplementedError(f"{enc} not supported") + cek_bytes = get_random_bytes(num_bits // 8) + return cek_bytes + + +def _auth_tag(ciphertext, iv, aad, mac_key, tag_length): + """ + Get ann auth tag from the provided data + + Args: + ciphertext (bytes): Encrypted value + iv (bytes): Initialization vector + aad (bytes): Additional Authenticated Data + mac_key (bytes): Key to use in generating the MAC + tag_length (int): How log the tag should be + + Returns: + (bytes) Auth tag + """ + al = _big_endian(len(aad) * 8) + auth_tag_input = aad + iv + ciphertext + al + signature = mac_key.sign(auth_tag_input) + auth_tag = signature[0:tag_length] + return auth_tag + + +def _jwe_compact_serialize(encoded_header, encrypted_cek, iv, cipher_text, auth_tag): + """ + Generate a compact serialized JWE + + Args: + encoded_header (bytes): Base64 URL Encoded JWE header JSON + encrypted_cek (bytes): Encrypted content encryption key (cek) + iv (bytes): Initialization vector (IV) + cipher_text (bytes): Cipher text + auth_tag (bytes): JWE Auth Tag + + Returns: + (str): JWE compact serialized string + """ + cipher_text = ensure_binary(cipher_text) + encoded_encrypted_cek = base64url_encode(encrypted_cek) + encoded_iv = base64url_encode(iv) + encoded_cipher_text = base64url_encode(cipher_text) + encoded_auth_tag = base64url_encode(auth_tag) + return ( + encoded_header + + b"." + + encoded_encrypted_cek + + b"." + + encoded_iv + + b"." + + encoded_cipher_text + + b"." + + encoded_auth_tag + ) diff --git a/venv/lib/python3.12/site-packages/jose/jwk.py b/venv/lib/python3.12/site-packages/jose/jwk.py new file mode 100644 index 0000000..7afc054 --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/jwk.py @@ -0,0 +1,79 @@ +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWKError + +try: + from jose.backends import RSAKey # noqa: F401 +except ImportError: + pass + +try: + from jose.backends import ECKey # noqa: F401 +except ImportError: + pass + +try: + from jose.backends import AESKey # noqa: F401 +except ImportError: + pass + +try: + from jose.backends import DIRKey # noqa: F401 +except ImportError: + pass + +try: + from jose.backends import HMACKey # noqa: F401 +except ImportError: + pass + + +def get_key(algorithm): + if algorithm in ALGORITHMS.KEYS: + return ALGORITHMS.KEYS[algorithm] + elif algorithm in ALGORITHMS.HMAC: # noqa: F811 + return HMACKey + elif algorithm in ALGORITHMS.RSA: + from jose.backends import RSAKey # noqa: F811 + + return RSAKey + elif algorithm in ALGORITHMS.EC: + from jose.backends import ECKey # noqa: F811 + + return ECKey + elif algorithm in ALGORITHMS.AES: + from jose.backends import AESKey # noqa: F811 + + return AESKey + elif algorithm == ALGORITHMS.DIR: + from jose.backends import DIRKey # noqa: F811 + + return DIRKey + return None + + +def register_key(algorithm, key_class): + if not issubclass(key_class, Key): + raise TypeError("Key class is not a subclass of jwk.Key") + ALGORITHMS.KEYS[algorithm] = key_class + ALGORITHMS.SUPPORTED.add(algorithm) + return True + + +def construct(key_data, algorithm=None): + """ + Construct a Key object for the given algorithm with the given + key_data. + """ + + # Allow for pulling the algorithm off of the passed in jwk. + if not algorithm and isinstance(key_data, dict): + algorithm = key_data.get("alg", None) + + if not algorithm: + raise JWKError("Unable to find an algorithm for key: %s" % key_data) + + key_class = get_key(algorithm) + if not key_class: + raise JWKError("Unable to find an algorithm for key: %s" % key_data) + return key_class(key_data, algorithm) diff --git a/venv/lib/python3.12/site-packages/jose/jws.py b/venv/lib/python3.12/site-packages/jose/jws.py new file mode 100644 index 0000000..bfaf6bd --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/jws.py @@ -0,0 +1,266 @@ +import binascii +import json +from collections.abc import Iterable, Mapping + +from jose import jwk +from jose.backends.base import Key +from jose.constants import ALGORITHMS +from jose.exceptions import JWSError, JWSSignatureError +from jose.utils import base64url_decode, base64url_encode + + +def sign(payload, key, headers=None, algorithm=ALGORITHMS.HS256): + """Signs a claims set and returns a JWS string. + + Args: + payload (str or dict): A string to sign + key (str or dict): The key to use for signing the claim set. Can be + individual JWK or JWK set. + headers (dict, optional): A set of headers that will be added to + the default headers. Any headers that are added as additional + headers will override the default headers. + algorithm (str, optional): The algorithm to use for signing the + the claims. Defaults to HS256. + + Returns: + str: The string representation of the header, claims, and signature. + + Raises: + JWSError: If there is an error signing the token. + + Examples: + + >>> jws.sign({'a': 'b'}, 'secret', algorithm='HS256') + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' + + """ + + if algorithm not in ALGORITHMS.SUPPORTED: + raise JWSError("Algorithm %s not supported." % algorithm) + + encoded_header = _encode_header(algorithm, additional_headers=headers) + encoded_payload = _encode_payload(payload) + signed_output = _sign_header_and_claims(encoded_header, encoded_payload, algorithm, key) + + return signed_output + + +def verify(token, key, algorithms, verify=True): + """Verifies a JWS string's signature. + + Args: + token (str): A signed JWS to be verified. + key (str or dict): A key to attempt to verify the payload with. Can be + individual JWK or JWK set. + algorithms (str or list): Valid algorithms that should be used to verify the JWS. + + Returns: + str: The str representation of the payload, assuming the signature is valid. + + Raises: + JWSError: If there is an exception verifying a token. + + Examples: + + >>> token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' + >>> jws.verify(token, 'secret', algorithms='HS256') + + """ + + header, payload, signing_input, signature = _load(token) + + if verify: + _verify_signature(signing_input, header, signature, key, algorithms) + + return payload + + +def get_unverified_header(token): + """Returns the decoded headers without verification of any kind. + + Args: + token (str): A signed JWS to decode the headers from. + + Returns: + dict: The dict representation of the token headers. + + Raises: + JWSError: If there is an exception decoding the token. + """ + header, claims, signing_input, signature = _load(token) + return header + + +def get_unverified_headers(token): + """Returns the decoded headers without verification of any kind. + + This is simply a wrapper of get_unverified_header() for backwards + compatibility. + + Args: + token (str): A signed JWS to decode the headers from. + + Returns: + dict: The dict representation of the token headers. + + Raises: + JWSError: If there is an exception decoding the token. + """ + return get_unverified_header(token) + + +def get_unverified_claims(token): + """Returns the decoded claims without verification of any kind. + + Args: + token (str): A signed JWS to decode the headers from. + + Returns: + str: The str representation of the token claims. + + Raises: + JWSError: If there is an exception decoding the token. + """ + header, claims, signing_input, signature = _load(token) + return claims + + +def _encode_header(algorithm, additional_headers=None): + header = {"typ": "JWT", "alg": algorithm} + + if additional_headers: + header.update(additional_headers) + + json_header = json.dumps( + header, + separators=(",", ":"), + sort_keys=True, + ).encode("utf-8") + + return base64url_encode(json_header) + + +def _encode_payload(payload): + if isinstance(payload, Mapping): + try: + payload = json.dumps( + payload, + separators=(",", ":"), + ).encode("utf-8") + except ValueError: + pass + + return base64url_encode(payload) + + +def _sign_header_and_claims(encoded_header, encoded_claims, algorithm, key): + signing_input = b".".join([encoded_header, encoded_claims]) + try: + if not isinstance(key, Key): + key = jwk.construct(key, algorithm) + signature = key.sign(signing_input) + except Exception as e: + raise JWSError(e) + + encoded_signature = base64url_encode(signature) + + encoded_string = b".".join([encoded_header, encoded_claims, encoded_signature]) + + return encoded_string.decode("utf-8") + + +def _load(jwt): + if isinstance(jwt, str): + jwt = jwt.encode("utf-8") + try: + signing_input, crypto_segment = jwt.rsplit(b".", 1) + header_segment, claims_segment = signing_input.split(b".", 1) + header_data = base64url_decode(header_segment) + except ValueError: + raise JWSError("Not enough segments") + except (TypeError, binascii.Error): + raise JWSError("Invalid header padding") + + try: + header = json.loads(header_data.decode("utf-8")) + except ValueError as e: + raise JWSError("Invalid header string: %s" % e) + + if not isinstance(header, Mapping): + raise JWSError("Invalid header string: must be a json object") + + try: + payload = base64url_decode(claims_segment) + except (TypeError, binascii.Error): + raise JWSError("Invalid payload padding") + + try: + signature = base64url_decode(crypto_segment) + except (TypeError, binascii.Error): + raise JWSError("Invalid crypto padding") + + return (header, payload, signing_input, signature) + + +def _sig_matches_keys(keys, signing_input, signature, alg): + for key in keys: + if not isinstance(key, Key): + key = jwk.construct(key, alg) + try: + if key.verify(signing_input, signature): + return True + except Exception: + pass + return False + + +def _get_keys(key): + + if isinstance(key, Key): + return (key,) + + try: + key = json.loads(key, parse_int=str, parse_float=str) + except Exception: + pass + + if isinstance(key, Mapping): + if "keys" in key: + # JWK Set per RFC 7517 + return key["keys"] + elif "kty" in key: + # Individual JWK per RFC 7517 + return (key,) + else: + # Some other mapping. Firebase uses just dict of kid, cert pairs + values = key.values() + if values: + return values + return (key,) + + # Iterable but not text or mapping => list- or tuple-like + elif isinstance(key, Iterable) and not (isinstance(key, str) or isinstance(key, bytes)): + return key + + # Scalar value, wrap in tuple. + else: + return (key,) + + +def _verify_signature(signing_input, header, signature, key="", algorithms=None): + + alg = header.get("alg") + if not alg: + raise JWSError("No algorithm was specified in the JWS header.") + + if algorithms is not None and alg not in algorithms: + raise JWSError("The specified alg value is not allowed") + + keys = _get_keys(key) + try: + if not _sig_matches_keys(keys, signing_input, signature, alg): + raise JWSSignatureError() + except JWSSignatureError: + raise JWSError("Signature verification failed.") + except JWSError: + raise JWSError("Invalid or unsupported algorithm: %s" % alg) diff --git a/venv/lib/python3.12/site-packages/jose/jwt.py b/venv/lib/python3.12/site-packages/jose/jwt.py new file mode 100644 index 0000000..3f2142e --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/jwt.py @@ -0,0 +1,496 @@ +import json +from calendar import timegm +from collections.abc import Mapping +from datetime import datetime, timedelta + +from jose import jws + +from .constants import ALGORITHMS +from .exceptions import ExpiredSignatureError, JWSError, JWTClaimsError, JWTError +from .utils import calculate_at_hash, timedelta_total_seconds + + +def encode(claims, key, algorithm=ALGORITHMS.HS256, headers=None, access_token=None): + """Encodes a claims set and returns a JWT string. + + JWTs are JWS signed objects with a few reserved claims. + + Args: + claims (dict): A claims set to sign + key (str or dict): The key to use for signing the claim set. Can be + individual JWK or JWK set. + algorithm (str, optional): The algorithm to use for signing the + the claims. Defaults to HS256. + headers (dict, optional): A set of headers that will be added to + the default headers. Any headers that are added as additional + headers will override the default headers. + access_token (str, optional): If present, the 'at_hash' claim will + be calculated and added to the claims present in the 'claims' + parameter. + + Returns: + str: The string representation of the header, claims, and signature. + + Raises: + JWTError: If there is an error encoding the claims. + + Examples: + + >>> jwt.encode({'a': 'b'}, 'secret', algorithm='HS256') + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' + + """ + + for time_claim in ["exp", "iat", "nbf"]: + + # Convert datetime to a intDate value in known time-format claims + if isinstance(claims.get(time_claim), datetime): + claims[time_claim] = timegm(claims[time_claim].utctimetuple()) + + if access_token: + claims["at_hash"] = calculate_at_hash(access_token, ALGORITHMS.HASHES[algorithm]) + + return jws.sign(claims, key, headers=headers, algorithm=algorithm) + + +def decode(token, key, algorithms=None, options=None, audience=None, issuer=None, subject=None, access_token=None): + """Verifies a JWT string's signature and validates reserved claims. + + Args: + token (str): A signed JWS to be verified. + key (str or dict): A key to attempt to verify the payload with. Can be + individual JWK or JWK set. + algorithms (str or list): Valid algorithms that should be used to verify the JWS. + audience (str): The intended audience of the token. If the "aud" claim is + included in the claim set, then the audience must be included and must equal + the provided claim. + issuer (str or iterable): Acceptable value(s) for the issuer of the token. + If the "iss" claim is included in the claim set, then the issuer must be + given and the claim in the token must be among the acceptable values. + subject (str): The subject of the token. If the "sub" claim is + included in the claim set, then the subject must be included and must equal + the provided claim. + access_token (str): An access token string. If the "at_hash" claim is included in the + claim set, then the access_token must be included, and it must match + the "at_hash" claim. + options (dict): A dictionary of options for skipping validation steps. + + defaults = { + 'verify_signature': True, + 'verify_aud': True, + 'verify_iat': True, + 'verify_exp': True, + 'verify_nbf': True, + 'verify_iss': True, + 'verify_sub': True, + 'verify_jti': True, + 'verify_at_hash': True, + 'require_aud': False, + 'require_iat': False, + 'require_exp': False, + 'require_nbf': False, + 'require_iss': False, + 'require_sub': False, + 'require_jti': False, + 'require_at_hash': False, + 'leeway': 0, + } + + Returns: + dict: The dict representation of the claims set, assuming the signature is valid + and all requested data validation passes. + + Raises: + JWTError: If the signature is invalid in any way. + ExpiredSignatureError: If the signature has expired. + JWTClaimsError: If any claim is invalid in any way. + + Examples: + + >>> payload = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' + >>> jwt.decode(payload, 'secret', algorithms='HS256') + + """ + + defaults = { + "verify_signature": True, + "verify_aud": True, + "verify_iat": True, + "verify_exp": True, + "verify_nbf": True, + "verify_iss": True, + "verify_sub": True, + "verify_jti": True, + "verify_at_hash": True, + "require_aud": False, + "require_iat": False, + "require_exp": False, + "require_nbf": False, + "require_iss": False, + "require_sub": False, + "require_jti": False, + "require_at_hash": False, + "leeway": 0, + } + + if options: + defaults.update(options) + + verify_signature = defaults.get("verify_signature", True) + + try: + payload = jws.verify(token, key, algorithms, verify=verify_signature) + except JWSError as e: + raise JWTError(e) + + # Needed for at_hash verification + algorithm = jws.get_unverified_header(token)["alg"] + + try: + claims = json.loads(payload.decode("utf-8")) + except ValueError as e: + raise JWTError("Invalid payload string: %s" % e) + + if not isinstance(claims, Mapping): + raise JWTError("Invalid payload string: must be a json object") + + _validate_claims( + claims, + audience=audience, + issuer=issuer, + subject=subject, + algorithm=algorithm, + access_token=access_token, + options=defaults, + ) + + return claims + + +def get_unverified_header(token): + """Returns the decoded headers without verification of any kind. + + Args: + token (str): A signed JWT to decode the headers from. + + Returns: + dict: The dict representation of the token headers. + + Raises: + JWTError: If there is an exception decoding the token. + """ + try: + headers = jws.get_unverified_headers(token) + except Exception: + raise JWTError("Error decoding token headers.") + + return headers + + +def get_unverified_headers(token): + """Returns the decoded headers without verification of any kind. + + This is simply a wrapper of get_unverified_header() for backwards + compatibility. + + Args: + token (str): A signed JWT to decode the headers from. + + Returns: + dict: The dict representation of the token headers. + + Raises: + JWTError: If there is an exception decoding the token. + """ + return get_unverified_header(token) + + +def get_unverified_claims(token): + """Returns the decoded claims without verification of any kind. + + Args: + token (str): A signed JWT to decode the headers from. + + Returns: + dict: The dict representation of the token claims. + + Raises: + JWTError: If there is an exception decoding the token. + """ + try: + claims = jws.get_unverified_claims(token) + except Exception: + raise JWTError("Error decoding token claims.") + + try: + claims = json.loads(claims.decode("utf-8")) + except ValueError as e: + raise JWTError("Invalid claims string: %s" % e) + + if not isinstance(claims, Mapping): + raise JWTError("Invalid claims string: must be a json object") + + return claims + + +def _validate_iat(claims): + """Validates that the 'iat' claim is valid. + + The "iat" (issued at) claim identifies the time at which the JWT was + issued. This claim can be used to determine the age of the JWT. Its + value MUST be a number containing a NumericDate value. Use of this + claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + """ + + if "iat" not in claims: + return + + try: + int(claims["iat"]) + except ValueError: + raise JWTClaimsError("Issued At claim (iat) must be an integer.") + + +def _validate_nbf(claims, leeway=0): + """Validates that the 'nbf' claim is valid. + + The "nbf" (not before) claim identifies the time before which the JWT + MUST NOT be accepted for processing. The processing of the "nbf" + claim requires that the current date/time MUST be after or equal to + the not-before date/time listed in the "nbf" claim. Implementers MAY + provide for some small leeway, usually no more than a few minutes, to + account for clock skew. Its value MUST be a number containing a + NumericDate value. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + leeway (int): The number of seconds of skew that is allowed. + """ + + if "nbf" not in claims: + return + + try: + nbf = int(claims["nbf"]) + except ValueError: + raise JWTClaimsError("Not Before claim (nbf) must be an integer.") + + now = timegm(datetime.utcnow().utctimetuple()) + + if nbf > (now + leeway): + raise JWTClaimsError("The token is not yet valid (nbf)") + + +def _validate_exp(claims, leeway=0): + """Validates that the 'exp' claim is valid. + + The "exp" (expiration time) claim identifies the expiration time on + or after which the JWT MUST NOT be accepted for processing. The + processing of the "exp" claim requires that the current date/time + MUST be before the expiration date/time listed in the "exp" claim. + Implementers MAY provide for some small leeway, usually no more than + a few minutes, to account for clock skew. Its value MUST be a number + containing a NumericDate value. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + leeway (int): The number of seconds of skew that is allowed. + """ + + if "exp" not in claims: + return + + try: + exp = int(claims["exp"]) + except ValueError: + raise JWTClaimsError("Expiration Time claim (exp) must be an integer.") + + now = timegm(datetime.utcnow().utctimetuple()) + + if exp < (now - leeway): + raise ExpiredSignatureError("Signature has expired.") + + +def _validate_aud(claims, audience=None): + """Validates that the 'aud' claim is valid. + + The "aud" (audience) claim identifies the recipients that the JWT is + intended for. Each principal intended to process the JWT MUST + identify itself with a value in the audience claim. If the principal + processing the claim does not identify itself with a value in the + "aud" claim when this claim is present, then the JWT MUST be + rejected. In the general case, the "aud" value is an array of case- + sensitive strings, each containing a StringOrURI value. In the + special case when the JWT has one audience, the "aud" value MAY be a + single case-sensitive string containing a StringOrURI value. The + interpretation of audience values is generally application specific. + Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + audience (str): The audience that is verifying the token. + """ + + if "aud" not in claims: + # if audience: + # raise JWTError('Audience claim expected, but not in claims') + return + + audience_claims = claims["aud"] + if isinstance(audience_claims, str): + audience_claims = [audience_claims] + if not isinstance(audience_claims, list): + raise JWTClaimsError("Invalid claim format in token") + if any(not isinstance(c, str) for c in audience_claims): + raise JWTClaimsError("Invalid claim format in token") + if audience not in audience_claims: + raise JWTClaimsError("Invalid audience") + + +def _validate_iss(claims, issuer=None): + """Validates that the 'iss' claim is valid. + + The "iss" (issuer) claim identifies the principal that issued the + JWT. The processing of this claim is generally application specific. + The "iss" value is a case-sensitive string containing a StringOrURI + value. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + issuer (str or iterable): Acceptable value(s) for the issuer that + signed the token. + """ + + if issuer is not None: + if isinstance(issuer, str): + issuer = (issuer,) + if claims.get("iss") not in issuer: + raise JWTClaimsError("Invalid issuer") + + +def _validate_sub(claims, subject=None): + """Validates that the 'sub' claim is valid. + + The "sub" (subject) claim identifies the principal that is the + subject of the JWT. The claims in a JWT are normally statements + about the subject. The subject value MUST either be scoped to be + locally unique in the context of the issuer or be globally unique. + The processing of this claim is generally application specific. The + "sub" value is a case-sensitive string containing a StringOrURI + value. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + subject (str): The subject of the token. + """ + + if "sub" not in claims: + return + + if not isinstance(claims["sub"], str): + raise JWTClaimsError("Subject must be a string.") + + if subject is not None: + if claims.get("sub") != subject: + raise JWTClaimsError("Invalid subject") + + +def _validate_jti(claims): + """Validates that the 'jti' claim is valid. + + The "jti" (JWT ID) claim provides a unique identifier for the JWT. + The identifier value MUST be assigned in a manner that ensures that + there is a negligible probability that the same value will be + accidentally assigned to a different data object; if the application + uses multiple issuers, collisions MUST be prevented among values + produced by different issuers as well. The "jti" claim can be used + to prevent the JWT from being replayed. The "jti" value is a case- + sensitive string. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + """ + if "jti" not in claims: + return + + if not isinstance(claims["jti"], str): + raise JWTClaimsError("JWT ID must be a string.") + + +def _validate_at_hash(claims, access_token, algorithm): + """ + Validates that the 'at_hash' is valid. + + Its value is the base64url encoding of the left-most half of the hash + of the octets of the ASCII representation of the access_token value, + where the hash algorithm used is the hash algorithm used in the alg + Header Parameter of the ID Token's JOSE Header. For instance, if the + alg is RS256, hash the access_token value with SHA-256, then take the + left-most 128 bits and base64url encode them. The at_hash value is a + case sensitive string. Use of this claim is OPTIONAL. + + Args: + claims (dict): The claims dictionary to validate. + access_token (str): The access token returned by the OpenID Provider. + algorithm (str): The algorithm used to sign the JWT, as specified by + the token headers. + """ + if "at_hash" not in claims: + return + + if not access_token: + msg = "No access_token provided to compare against at_hash claim." + raise JWTClaimsError(msg) + + try: + expected_hash = calculate_at_hash(access_token, ALGORITHMS.HASHES[algorithm]) + except (TypeError, ValueError): + msg = "Unable to calculate at_hash to verify against token claims." + raise JWTClaimsError(msg) + + if claims["at_hash"] != expected_hash: + raise JWTClaimsError("at_hash claim does not match access_token.") + + +def _validate_claims(claims, audience=None, issuer=None, subject=None, algorithm=None, access_token=None, options=None): + + leeway = options.get("leeway", 0) + + if isinstance(leeway, timedelta): + leeway = timedelta_total_seconds(leeway) + required_claims = [e[len("require_") :] for e in options.keys() if e.startswith("require_") and options[e]] + for require_claim in required_claims: + if require_claim not in claims: + raise JWTError('missing required key "%s" among claims' % require_claim) + else: + options["verify_" + require_claim] = True # override verify when required + + if not isinstance(audience, ((str,), type(None))): + raise JWTError("audience must be a string or None") + + if options.get("verify_iat"): + _validate_iat(claims) + + if options.get("verify_nbf"): + _validate_nbf(claims, leeway=leeway) + + if options.get("verify_exp"): + _validate_exp(claims, leeway=leeway) + + if options.get("verify_aud"): + _validate_aud(claims, audience=audience) + + if options.get("verify_iss"): + _validate_iss(claims, issuer=issuer) + + if options.get("verify_sub"): + _validate_sub(claims, subject=subject) + + if options.get("verify_jti"): + _validate_jti(claims) + + if options.get("verify_at_hash"): + _validate_at_hash(claims, access_token, algorithm) diff --git a/venv/lib/python3.12/site-packages/jose/utils.py b/venv/lib/python3.12/site-packages/jose/utils.py new file mode 100644 index 0000000..fcef885 --- /dev/null +++ b/venv/lib/python3.12/site-packages/jose/utils.py @@ -0,0 +1,108 @@ +import base64 +import struct + +# Piggyback of the backends implementation of the function that converts a long +# to a bytes stream. Some plumbing is necessary to have the signatures match. +try: + from cryptography.utils import int_to_bytes as _long_to_bytes + + def long_to_bytes(n, blocksize=0): + return _long_to_bytes(n, blocksize or None) + + +except ImportError: + from ecdsa.ecdsa import int_to_string as _long_to_bytes + + def long_to_bytes(n, blocksize=0): + ret = _long_to_bytes(n) + if blocksize == 0: + return ret + else: + assert len(ret) <= blocksize + padding = blocksize - len(ret) + return b"\x00" * padding + ret + + +def long_to_base64(data, size=0): + return base64.urlsafe_b64encode(long_to_bytes(data, size)).strip(b"=") + + +def int_arr_to_long(arr): + return int("".join(["%02x" % byte for byte in arr]), 16) + + +def base64_to_long(data): + if isinstance(data, str): + data = data.encode("ascii") + + # urlsafe_b64decode will happily convert b64encoded data + _d = base64.urlsafe_b64decode(bytes(data) + b"==") + return int_arr_to_long(struct.unpack("%sB" % len(_d), _d)) + + +def calculate_at_hash(access_token, hash_alg): + """Helper method for calculating an access token + hash, as described in http://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken + + Its value is the base64url encoding of the left-most half of the hash of the octets + of the ASCII representation of the access_token value, where the hash algorithm + used is the hash algorithm used in the alg Header Parameter of the ID Token's JOSE + Header. For instance, if the alg is RS256, hash the access_token value with SHA-256, + then take the left-most 128 bits and base64url encode them. The at_hash value is a + case sensitive string. + + Args: + access_token (str): An access token string. + hash_alg (callable): A callable returning a hash object, e.g. hashlib.sha256 + + """ + hash_digest = hash_alg(access_token.encode("utf-8")).digest() + cut_at = int(len(hash_digest) / 2) + truncated = hash_digest[:cut_at] + at_hash = base64url_encode(truncated) + return at_hash.decode("utf-8") + + +def base64url_decode(input): + """Helper method to base64url_decode a string. + + Args: + input (str): A base64url_encoded string to decode. + + """ + rem = len(input) % 4 + + if rem > 0: + input += b"=" * (4 - rem) + + return base64.urlsafe_b64decode(input) + + +def base64url_encode(input): + """Helper method to base64url_encode a string. + + Args: + input (str): A base64url_encoded string to encode. + + """ + return base64.urlsafe_b64encode(input).replace(b"=", b"") + + +def timedelta_total_seconds(delta): + """Helper method to determine the total number of seconds + from a timedelta. + + Args: + delta (timedelta): A timedelta to convert to seconds. + """ + return delta.days * 24 * 60 * 60 + delta.seconds + + +def ensure_binary(s): + """Coerce **s** to bytes.""" + + if isinstance(s, bytes): + return s + if isinstance(s, str): + return s.encode("utf-8", "strict") + raise TypeError(f"not expecting type '{type(s)}'") diff --git a/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/METADATA b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/METADATA new file mode 100644 index 0000000..067e8fa --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/METADATA @@ -0,0 +1,88 @@ +Metadata-Version: 2.4 +Name: Mako +Version: 1.3.10 +Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. +Home-page: https://www.makotemplates.org/ +Author: Mike Bayer +Author-email: mike@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.makotemplates.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: MarkupSafe>=0.9.2 +Provides-Extra: testing +Requires-Dist: pytest; extra == "testing" +Provides-Extra: babel +Requires-Dist: Babel; extra == "babel" +Provides-Extra: lingua +Requires-Dist: lingua; extra == "lingua" +Dynamic: license-file + +========================= +Mako Templates for Python +========================= + +Mako is a template library written in Python. It provides a familiar, non-XML +syntax which compiles into Python modules for maximum performance. Mako's +syntax and API borrows from the best ideas of many others, including Django +templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded +Python (i.e. Python Server Page) language, which refines the familiar ideas +of componentized layout and inheritance to produce one of the most +straightforward and flexible models available, while also maintaining close +ties to Python calling and scoping semantics. + +Nutshell +======== + +:: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
    + + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + +Philosophy +=========== + +Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + +Documentation +============== + +See documentation for Mako at https://docs.makotemplates.org/en/latest/ + +License +======== + +Mako is licensed under an MIT-style license (see LICENSE). +Other incorporated projects may be licensed under different licenses. +All licenses allow for non-commercial and commercial use. diff --git a/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/RECORD b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/RECORD new file mode 100644 index 0000000..e8b0eb8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/RECORD @@ -0,0 +1,74 @@ +../../../bin/mako-render,sha256=fiExbcvlAt1R593jNX6c6RLWbQOEeSmNe7YMbKwlqo0,205 +mako-1.3.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +mako-1.3.10.dist-info/METADATA,sha256=VAbwPOfALVKbDJYv5l848ZceyGwynnE9s_TXP6_dr5o,2919 +mako-1.3.10.dist-info/RECORD,, +mako-1.3.10.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 +mako-1.3.10.dist-info/entry_points.txt,sha256=LsKkUsOsJQYbJ2M72hZCm968wi5K8Ywb5uFxCuN8Obk,512 +mako-1.3.10.dist-info/licenses/LICENSE,sha256=aNcGTlPj_6jp9CCp5gS9LiBZ2cMwSS-m69TrPUgRFok,1098 +mako-1.3.10.dist-info/top_level.txt,sha256=LItdH8cDPetpUu8rUyBG3DObS6h9Gcpr9j_WLj2S-R0,5 +mako/__init__.py,sha256=n-XoSW8ChrO6NkJ2xUHElzvSnU6XUu5ruMZe86QldTI,243 +mako/__pycache__/__init__.cpython-312.pyc,, +mako/__pycache__/_ast_util.cpython-312.pyc,, +mako/__pycache__/ast.cpython-312.pyc,, +mako/__pycache__/cache.cpython-312.pyc,, +mako/__pycache__/cmd.cpython-312.pyc,, +mako/__pycache__/codegen.cpython-312.pyc,, +mako/__pycache__/compat.cpython-312.pyc,, +mako/__pycache__/exceptions.cpython-312.pyc,, +mako/__pycache__/filters.cpython-312.pyc,, +mako/__pycache__/lexer.cpython-312.pyc,, +mako/__pycache__/lookup.cpython-312.pyc,, +mako/__pycache__/parsetree.cpython-312.pyc,, +mako/__pycache__/pygen.cpython-312.pyc,, +mako/__pycache__/pyparser.cpython-312.pyc,, +mako/__pycache__/runtime.cpython-312.pyc,, +mako/__pycache__/template.cpython-312.pyc,, +mako/__pycache__/util.cpython-312.pyc,, +mako/_ast_util.py,sha256=hCbfnnizWEa3xRCA-uVyShC2HohSpmVvexz5as_lHc8,20247 +mako/ast.py,sha256=xYrdSiJFbf1CxJ9tU9pcPEWK0BYfwF2aDNDNLQG9PqQ,6642 +mako/cache.py,sha256=kA6FKGl5NeTBnSTcnhoPkSaeJ0JeYpF6GM8qzEZGSts,7680 +mako/cmd.py,sha256=Y2Y6VxNCYwO2Y8EXOdLTf5tpYgfdoondV6Ehlbh8a84,2813 +mako/codegen.py,sha256=N49EH57CcTUfKGzI8V7GFBrAEoFhRo9lkdXBzpFYzBY,47736 +mako/compat.py,sha256=owGbObdoF0C-6rCCs6Vnk3YGHv0bf0PpTja55Woxqb4,1820 +mako/exceptions.py,sha256=87Djuoi1IS5zyVFml1Z5zpCP1IoI7UMOH3h4ejt3x3g,12530 +mako/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mako/ext/__pycache__/__init__.cpython-312.pyc,, +mako/ext/__pycache__/autohandler.cpython-312.pyc,, +mako/ext/__pycache__/babelplugin.cpython-312.pyc,, +mako/ext/__pycache__/beaker_cache.cpython-312.pyc,, +mako/ext/__pycache__/extract.cpython-312.pyc,, +mako/ext/__pycache__/linguaplugin.cpython-312.pyc,, +mako/ext/__pycache__/preprocessors.cpython-312.pyc,, +mako/ext/__pycache__/pygmentplugin.cpython-312.pyc,, +mako/ext/__pycache__/turbogears.cpython-312.pyc,, +mako/ext/autohandler.py,sha256=QuhNiPSF1LZ53awQ1Qfpwu1Zi9RGOmrtCDnwCLgQzeE,1885 +mako/ext/babelplugin.py,sha256=mmt5fG3pcYII1QsrLEV3wH1ltj-C7uKl8vBIpAjxsKY,2091 +mako/ext/beaker_cache.py,sha256=4quuJQuXRKKUyF6kM43LQhJT1J2z1KSglRZVDW-7c1I,2578 +mako/ext/extract.py,sha256=ZUeaRL2jWcUlrpnhXFXJB0CUJfvQVGBF9tJr5nKJAWI,4659 +mako/ext/linguaplugin.py,sha256=sUZalJSI_XeON9aRBb2hds-ilVQaxHKlfCg_nAl7EuU,1935 +mako/ext/preprocessors.py,sha256=HYG45idRJUwJkDpswEELL8lPFLisQzgDhW5EHpTDGkI,576 +mako/ext/pygmentplugin.py,sha256=TnpJDyQeWTTGHZraMDpw8FB3PNzbmXhaZcjyIBudyi0,4753 +mako/ext/turbogears.py,sha256=egv8hradAnnSJwxtmW8uXAsqPUzX8cZZCXclmO_8hMA,2141 +mako/filters.py,sha256=IBXyGOby4eFE3UGvNLhJlzbe1FfwJ2dcEr1-gKO_Ljc,4658 +mako/lexer.py,sha256=GMq8yf0dEn04-xw2EVDEzaOLXMSVVQz9HyUbfwKnZKg,16321 +mako/lookup.py,sha256=4ALORJiL0wIdDvK1okW8rbjq2jL5F_TNASekDFQSULY,12428 +mako/parsetree.py,sha256=-wmyX_mklAoKhc-7Psx0U15EKpNSd8oGRXFmvk2JQWo,19021 +mako/pygen.py,sha256=689_jR0GG_8Am62Dmlm5h59VY6eyZAU3GroodqEDnZ0,10416 +mako/pyparser.py,sha256=8ZqUbCDIlgEup4si2zP7-xJns14JvVzX_QMO9sCCGJ0,7558 +mako/runtime.py,sha256=ZsUEN22nX3d3dECQujF69mBKDQS6yVv2nvz_0eTvFGg,27804 +mako/template.py,sha256=Vn5nLoBY-YzSQJKvRPFGb4fiPyZryA5Q-8mWutv0b9A,23563 +mako/testing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mako/testing/__pycache__/__init__.cpython-312.pyc,, +mako/testing/__pycache__/_config.cpython-312.pyc,, +mako/testing/__pycache__/assertions.cpython-312.pyc,, +mako/testing/__pycache__/config.cpython-312.pyc,, +mako/testing/__pycache__/exclusions.cpython-312.pyc,, +mako/testing/__pycache__/fixtures.cpython-312.pyc,, +mako/testing/__pycache__/helpers.cpython-312.pyc,, +mako/testing/_config.py,sha256=k-qpnsnbXUoN-ykMN5BRpg84i1x0p6UsAddKQnrIytU,3566 +mako/testing/assertions.py,sha256=pfbGl84QlW7QWGg3_lo3wP8XnBAVo9AjzNp2ajmn7FA,5161 +mako/testing/config.py,sha256=wmYVZfzGvOK3mJUZpzmgO8-iIgvaCH41Woi4yDpxq6E,323 +mako/testing/exclusions.py,sha256=_t6ADKdatk3f18tOfHV_ZY6u_ZwQsKphZ2MXJVSAOcI,1553 +mako/testing/fixtures.py,sha256=nEp7wTusf7E0n3Q-BHJW2s_t1vx0KB9poadQ1BmIJzE,3044 +mako/testing/helpers.py,sha256=z4HAactwlht4ut1cbvxKt1QLb3yLPk1U7cnh5BwVUlc,1623 +mako/util.py,sha256=SNYeX2_PmajQJIR3-S1Yqxxylz8lwS65rC8YbCdTkUU,10638 diff --git a/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/WHEEL b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/WHEEL new file mode 100644 index 0000000..1eb3c49 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (78.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/entry_points.txt new file mode 100644 index 0000000..30f31b2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/entry_points.txt @@ -0,0 +1,18 @@ +[babel.extractors] +mako = mako.ext.babelplugin:extract [babel] + +[console_scripts] +mako-render = mako.cmd:cmdline + +[lingua.extractors] +mako = mako.ext.linguaplugin:LinguaMakoExtractor [lingua] + +[pygments.lexers] +css+mako = mako.ext.pygmentplugin:MakoCssLexer +html+mako = mako.ext.pygmentplugin:MakoHtmlLexer +js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer +mako = mako.ext.pygmentplugin:MakoLexer +xml+mako = mako.ext.pygmentplugin:MakoXmlLexer + +[python.templating.engines] +mako = mako.ext.turbogears:TGPlugin diff --git a/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/licenses/LICENSE new file mode 100644 index 0000000..4c18ade --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/licenses/LICENSE @@ -0,0 +1,19 @@ +Copyright 2006-2025 the Mako authors and contributors . + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/top_level.txt new file mode 100644 index 0000000..2951cdd --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako-1.3.10.dist-info/top_level.txt @@ -0,0 +1 @@ +mako diff --git a/venv/lib/python3.12/site-packages/mako/__init__.py b/venv/lib/python3.12/site-packages/mako/__init__.py new file mode 100644 index 0000000..71f8f60 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/__init__.py @@ -0,0 +1,8 @@ +# mako/__init__.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +__version__ = "1.3.10" diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4f30368 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/_ast_util.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/_ast_util.cpython-312.pyc new file mode 100644 index 0000000..04a61ad Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/_ast_util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/ast.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/ast.cpython-312.pyc new file mode 100644 index 0000000..3b7fef2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/ast.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/cache.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..b4ca473 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/cache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/cmd.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/cmd.cpython-312.pyc new file mode 100644 index 0000000..6305cb0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/cmd.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/codegen.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/codegen.cpython-312.pyc new file mode 100644 index 0000000..bb8ec36 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/codegen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..a0510ad Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..3ff2e8a Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/filters.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/filters.cpython-312.pyc new file mode 100644 index 0000000..ba91648 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/filters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/lexer.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/lexer.cpython-312.pyc new file mode 100644 index 0000000..6d092af Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/lexer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/lookup.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/lookup.cpython-312.pyc new file mode 100644 index 0000000..a77e224 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/lookup.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/parsetree.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/parsetree.cpython-312.pyc new file mode 100644 index 0000000..1d9a396 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/parsetree.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/pygen.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/pygen.cpython-312.pyc new file mode 100644 index 0000000..64fc6c1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/pygen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/pyparser.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/pyparser.cpython-312.pyc new file mode 100644 index 0000000..b1aa466 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/pyparser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/runtime.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/runtime.cpython-312.pyc new file mode 100644 index 0000000..70343f7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/runtime.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/template.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/template.cpython-312.pyc new file mode 100644 index 0000000..54ddc5c Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/template.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..c971844 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/__pycache__/util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/_ast_util.py b/venv/lib/python3.12/site-packages/mako/_ast_util.py new file mode 100644 index 0000000..385d7a0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/_ast_util.py @@ -0,0 +1,713 @@ +# mako/_ast_util.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +""" + ast + ~~~ + + This is a stripped down version of Armin Ronacher's ast module. + + :copyright: Copyright 2008 by Armin Ronacher. + :license: Python License. +""" + + +from _ast import Add +from _ast import And +from _ast import AST +from _ast import BitAnd +from _ast import BitOr +from _ast import BitXor +from _ast import Div +from _ast import Eq +from _ast import FloorDiv +from _ast import Gt +from _ast import GtE +from _ast import If +from _ast import In +from _ast import Invert +from _ast import Is +from _ast import IsNot +from _ast import LShift +from _ast import Lt +from _ast import LtE +from _ast import Mod +from _ast import Mult +from _ast import Name +from _ast import Not +from _ast import NotEq +from _ast import NotIn +from _ast import Or +from _ast import PyCF_ONLY_AST +from _ast import RShift +from _ast import Sub +from _ast import UAdd +from _ast import USub + + +BOOLOP_SYMBOLS = {And: "and", Or: "or"} + +BINOP_SYMBOLS = { + Add: "+", + Sub: "-", + Mult: "*", + Div: "/", + FloorDiv: "//", + Mod: "%", + LShift: "<<", + RShift: ">>", + BitOr: "|", + BitAnd: "&", + BitXor: "^", +} + +CMPOP_SYMBOLS = { + Eq: "==", + Gt: ">", + GtE: ">=", + In: "in", + Is: "is", + IsNot: "is not", + Lt: "<", + LtE: "<=", + NotEq: "!=", + NotIn: "not in", +} + +UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"} + +ALL_SYMBOLS = {} +ALL_SYMBOLS.update(BOOLOP_SYMBOLS) +ALL_SYMBOLS.update(BINOP_SYMBOLS) +ALL_SYMBOLS.update(CMPOP_SYMBOLS) +ALL_SYMBOLS.update(UNARYOP_SYMBOLS) + + +def parse(expr, filename="", mode="exec"): + """Parse an expression into an AST node.""" + return compile(expr, filename, mode, PyCF_ONLY_AST) + + +def iter_fields(node): + """Iterate over all fields of a node, only yielding existing fields.""" + + for field in node._fields: + try: + yield field, getattr(node, field) + except AttributeError: + pass + + +class NodeVisitor: + + """ + Walks the abstract syntax tree and call visitor functions for every node + found. The visitor functions may return values which will be forwarded + by the `visit` method. + + Per default the visitor functions for the nodes are ``'visit_'`` + + class name of the node. So a `TryFinally` node visit function would + be `visit_TryFinally`. This behavior can be changed by overriding + the `get_visitor` function. If no visitor function exists for a node + (return value `None`) the `generic_visit` visitor is used instead. + + Don't use the `NodeVisitor` if you want to apply changes to nodes during + traversing. For this a special visitor exists (`NodeTransformer`) that + allows modifications. + """ + + def get_visitor(self, node): + """ + Return the visitor function for this node or `None` if no visitor + exists for this node. In that case the generic visit function is + used instead. + """ + method = "visit_" + node.__class__.__name__ + return getattr(self, method, None) + + def visit(self, node): + """Visit a node.""" + f = self.get_visitor(node) + if f is not None: + return f(node) + return self.generic_visit(node) + + def generic_visit(self, node): + """Called if no explicit visitor function exists for a node.""" + for field, value in iter_fields(node): + if isinstance(value, list): + for item in value: + if isinstance(item, AST): + self.visit(item) + elif isinstance(value, AST): + self.visit(value) + + +class NodeTransformer(NodeVisitor): + + """ + Walks the abstract syntax tree and allows modifications of nodes. + + The `NodeTransformer` will walk the AST and use the return value of the + visitor functions to replace or remove the old node. If the return + value of the visitor function is `None` the node will be removed + from the previous location otherwise it's replaced with the return + value. The return value may be the original node in which case no + replacement takes place. + + Here an example transformer that rewrites all `foo` to `data['foo']`:: + + class RewriteName(NodeTransformer): + + def visit_Name(self, node): + return copy_location(Subscript( + value=Name(id='data', ctx=Load()), + slice=Index(value=Str(s=node.id)), + ctx=node.ctx + ), node) + + Keep in mind that if the node you're operating on has child nodes + you must either transform the child nodes yourself or call the generic + visit function for the node first. + + Nodes that were part of a collection of statements (that applies to + all statement nodes) may also return a list of nodes rather than just + a single node. + + Usually you use the transformer like this:: + + node = YourTransformer().visit(node) + """ + + def generic_visit(self, node): + for field, old_value in iter_fields(node): + old_value = getattr(node, field, None) + if isinstance(old_value, list): + new_values = [] + for value in old_value: + if isinstance(value, AST): + value = self.visit(value) + if value is None: + continue + elif not isinstance(value, AST): + new_values.extend(value) + continue + new_values.append(value) + old_value[:] = new_values + elif isinstance(old_value, AST): + new_node = self.visit(old_value) + if new_node is None: + delattr(node, field) + else: + setattr(node, field, new_node) + return node + + +class SourceGenerator(NodeVisitor): + + """ + This visitor is able to transform a well formed syntax tree into python + sourcecode. For more details have a look at the docstring of the + `node_to_source` function. + """ + + def __init__(self, indent_with): + self.result = [] + self.indent_with = indent_with + self.indentation = 0 + self.new_lines = 0 + + def write(self, x): + if self.new_lines: + if self.result: + self.result.append("\n" * self.new_lines) + self.result.append(self.indent_with * self.indentation) + self.new_lines = 0 + self.result.append(x) + + def newline(self, n=1): + self.new_lines = max(self.new_lines, n) + + def body(self, statements): + self.new_line = True + self.indentation += 1 + for stmt in statements: + self.visit(stmt) + self.indentation -= 1 + + def body_or_else(self, node): + self.body(node.body) + if node.orelse: + self.newline() + self.write("else:") + self.body(node.orelse) + + def signature(self, node): + want_comma = [] + + def write_comma(): + if want_comma: + self.write(", ") + else: + want_comma.append(True) + + padding = [None] * (len(node.args) - len(node.defaults)) + for arg, default in zip(node.args, padding + node.defaults): + write_comma() + self.visit(arg) + if default is not None: + self.write("=") + self.visit(default) + if node.vararg is not None: + write_comma() + self.write("*" + node.vararg.arg) + if node.kwarg is not None: + write_comma() + self.write("**" + node.kwarg.arg) + + def decorators(self, node): + for decorator in node.decorator_list: + self.newline() + self.write("@") + self.visit(decorator) + + # Statements + + def visit_Assign(self, node): + self.newline() + for idx, target in enumerate(node.targets): + if idx: + self.write(", ") + self.visit(target) + self.write(" = ") + self.visit(node.value) + + def visit_AugAssign(self, node): + self.newline() + self.visit(node.target) + self.write(BINOP_SYMBOLS[type(node.op)] + "=") + self.visit(node.value) + + def visit_ImportFrom(self, node): + self.newline() + self.write("from %s%s import " % ("." * node.level, node.module)) + for idx, item in enumerate(node.names): + if idx: + self.write(", ") + self.write(item) + + def visit_Import(self, node): + self.newline() + for item in node.names: + self.write("import ") + self.visit(item) + + def visit_Expr(self, node): + self.newline() + self.generic_visit(node) + + def visit_FunctionDef(self, node): + self.newline(n=2) + self.decorators(node) + self.newline() + self.write("def %s(" % node.name) + self.signature(node.args) + self.write("):") + self.body(node.body) + + def visit_ClassDef(self, node): + have_args = [] + + def paren_or_comma(): + if have_args: + self.write(", ") + else: + have_args.append(True) + self.write("(") + + self.newline(n=3) + self.decorators(node) + self.newline() + self.write("class %s" % node.name) + for base in node.bases: + paren_or_comma() + self.visit(base) + # XXX: the if here is used to keep this module compatible + # with python 2.6. + if hasattr(node, "keywords"): + for keyword in node.keywords: + paren_or_comma() + self.write(keyword.arg + "=") + self.visit(keyword.value) + if getattr(node, "starargs", None): + paren_or_comma() + self.write("*") + self.visit(node.starargs) + if getattr(node, "kwargs", None): + paren_or_comma() + self.write("**") + self.visit(node.kwargs) + self.write(have_args and "):" or ":") + self.body(node.body) + + def visit_If(self, node): + self.newline() + self.write("if ") + self.visit(node.test) + self.write(":") + self.body(node.body) + while True: + else_ = node.orelse + if len(else_) == 1 and isinstance(else_[0], If): + node = else_[0] + self.newline() + self.write("elif ") + self.visit(node.test) + self.write(":") + self.body(node.body) + else: + self.newline() + self.write("else:") + self.body(else_) + break + + def visit_For(self, node): + self.newline() + self.write("for ") + self.visit(node.target) + self.write(" in ") + self.visit(node.iter) + self.write(":") + self.body_or_else(node) + + def visit_While(self, node): + self.newline() + self.write("while ") + self.visit(node.test) + self.write(":") + self.body_or_else(node) + + def visit_With(self, node): + self.newline() + self.write("with ") + self.visit(node.context_expr) + if node.optional_vars is not None: + self.write(" as ") + self.visit(node.optional_vars) + self.write(":") + self.body(node.body) + + def visit_Pass(self, node): + self.newline() + self.write("pass") + + def visit_Print(self, node): + # XXX: python 2.6 only + self.newline() + self.write("print ") + want_comma = False + if node.dest is not None: + self.write(" >> ") + self.visit(node.dest) + want_comma = True + for value in node.values: + if want_comma: + self.write(", ") + self.visit(value) + want_comma = True + if not node.nl: + self.write(",") + + def visit_Delete(self, node): + self.newline() + self.write("del ") + for idx, target in enumerate(node): + if idx: + self.write(", ") + self.visit(target) + + def visit_TryExcept(self, node): + self.newline() + self.write("try:") + self.body(node.body) + for handler in node.handlers: + self.visit(handler) + + def visit_TryFinally(self, node): + self.newline() + self.write("try:") + self.body(node.body) + self.newline() + self.write("finally:") + self.body(node.finalbody) + + def visit_Global(self, node): + self.newline() + self.write("global " + ", ".join(node.names)) + + def visit_Nonlocal(self, node): + self.newline() + self.write("nonlocal " + ", ".join(node.names)) + + def visit_Return(self, node): + self.newline() + self.write("return ") + self.visit(node.value) + + def visit_Break(self, node): + self.newline() + self.write("break") + + def visit_Continue(self, node): + self.newline() + self.write("continue") + + def visit_Raise(self, node): + # XXX: Python 2.6 / 3.0 compatibility + self.newline() + self.write("raise") + if hasattr(node, "exc") and node.exc is not None: + self.write(" ") + self.visit(node.exc) + if node.cause is not None: + self.write(" from ") + self.visit(node.cause) + elif hasattr(node, "type") and node.type is not None: + self.visit(node.type) + if node.inst is not None: + self.write(", ") + self.visit(node.inst) + if node.tback is not None: + self.write(", ") + self.visit(node.tback) + + # Expressions + + def visit_Attribute(self, node): + self.visit(node.value) + self.write("." + node.attr) + + def visit_Call(self, node): + want_comma = [] + + def write_comma(): + if want_comma: + self.write(", ") + else: + want_comma.append(True) + + self.visit(node.func) + self.write("(") + for arg in node.args: + write_comma() + self.visit(arg) + for keyword in node.keywords: + write_comma() + self.write(keyword.arg + "=") + self.visit(keyword.value) + if getattr(node, "starargs", None): + write_comma() + self.write("*") + self.visit(node.starargs) + if getattr(node, "kwargs", None): + write_comma() + self.write("**") + self.visit(node.kwargs) + self.write(")") + + def visit_Name(self, node): + self.write(node.id) + + def visit_NameConstant(self, node): + self.write(str(node.value)) + + def visit_arg(self, node): + self.write(node.arg) + + def visit_Str(self, node): + self.write(repr(node.s)) + + def visit_Bytes(self, node): + self.write(repr(node.s)) + + def visit_Num(self, node): + self.write(repr(node.n)) + + # newly needed in Python 3.8 + def visit_Constant(self, node): + self.write(repr(node.value)) + + def visit_Tuple(self, node): + self.write("(") + idx = -1 + for idx, item in enumerate(node.elts): + if idx: + self.write(", ") + self.visit(item) + self.write(idx and ")" or ",)") + + def sequence_visit(left, right): + def visit(self, node): + self.write(left) + for idx, item in enumerate(node.elts): + if idx: + self.write(", ") + self.visit(item) + self.write(right) + + return visit + + visit_List = sequence_visit("[", "]") + visit_Set = sequence_visit("{", "}") + del sequence_visit + + def visit_Dict(self, node): + self.write("{") + for idx, (key, value) in enumerate(zip(node.keys, node.values)): + if idx: + self.write(", ") + self.visit(key) + self.write(": ") + self.visit(value) + self.write("}") + + def visit_BinOp(self, node): + self.write("(") + self.visit(node.left) + self.write(" %s " % BINOP_SYMBOLS[type(node.op)]) + self.visit(node.right) + self.write(")") + + def visit_BoolOp(self, node): + self.write("(") + for idx, value in enumerate(node.values): + if idx: + self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)]) + self.visit(value) + self.write(")") + + def visit_Compare(self, node): + self.write("(") + self.visit(node.left) + for op, right in zip(node.ops, node.comparators): + self.write(" %s " % CMPOP_SYMBOLS[type(op)]) + self.visit(right) + self.write(")") + + def visit_UnaryOp(self, node): + self.write("(") + op = UNARYOP_SYMBOLS[type(node.op)] + self.write(op) + if op == "not": + self.write(" ") + self.visit(node.operand) + self.write(")") + + def visit_Subscript(self, node): + self.visit(node.value) + self.write("[") + self.visit(node.slice) + self.write("]") + + def visit_Slice(self, node): + if node.lower is not None: + self.visit(node.lower) + self.write(":") + if node.upper is not None: + self.visit(node.upper) + if node.step is not None: + self.write(":") + if not (isinstance(node.step, Name) and node.step.id == "None"): + self.visit(node.step) + + def visit_ExtSlice(self, node): + for idx, item in node.dims: + if idx: + self.write(", ") + self.visit(item) + + def visit_Yield(self, node): + self.write("yield ") + self.visit(node.value) + + def visit_Lambda(self, node): + self.write("lambda ") + self.signature(node.args) + self.write(": ") + self.visit(node.body) + + def visit_Ellipsis(self, node): + self.write("Ellipsis") + + def generator_visit(left, right): + def visit(self, node): + self.write(left) + self.visit(node.elt) + for comprehension in node.generators: + self.visit(comprehension) + self.write(right) + + return visit + + visit_ListComp = generator_visit("[", "]") + visit_GeneratorExp = generator_visit("(", ")") + visit_SetComp = generator_visit("{", "}") + del generator_visit + + def visit_DictComp(self, node): + self.write("{") + self.visit(node.key) + self.write(": ") + self.visit(node.value) + for comprehension in node.generators: + self.visit(comprehension) + self.write("}") + + def visit_IfExp(self, node): + self.visit(node.body) + self.write(" if ") + self.visit(node.test) + self.write(" else ") + self.visit(node.orelse) + + def visit_Starred(self, node): + self.write("*") + self.visit(node.value) + + def visit_Repr(self, node): + # XXX: python 2.6 only + self.write("`") + self.visit(node.value) + self.write("`") + + # Helper Nodes + + def visit_alias(self, node): + self.write(node.name) + if node.asname is not None: + self.write(" as " + node.asname) + + def visit_comprehension(self, node): + self.write(" for ") + self.visit(node.target) + self.write(" in ") + self.visit(node.iter) + if node.ifs: + for if_ in node.ifs: + self.write(" if ") + self.visit(if_) + + def visit_excepthandler(self, node): + self.newline() + self.write("except") + if node.type is not None: + self.write(" ") + self.visit(node.type) + if node.name is not None: + self.write(" as ") + self.visit(node.name) + self.write(":") + self.body(node.body) diff --git a/venv/lib/python3.12/site-packages/mako/ast.py b/venv/lib/python3.12/site-packages/mako/ast.py new file mode 100644 index 0000000..82cfa11 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ast.py @@ -0,0 +1,202 @@ +# mako/ast.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""utilities for analyzing expressions and blocks of Python +code, as well as generating Python from AST nodes""" + +import re + +from mako import exceptions +from mako import pyparser + + +class PythonCode: + + """represents information about a string containing Python code""" + + def __init__(self, code, **exception_kwargs): + self.code = code + + # represents all identifiers which are assigned to at some point in + # the code + self.declared_identifiers = set() + + # represents all identifiers which are referenced before their + # assignment, if any + self.undeclared_identifiers = set() + + # note that an identifier can be in both the undeclared and declared + # lists. + + # using AST to parse instead of using code.co_varnames, + # code.co_names has several advantages: + # - we can locate an identifier as "undeclared" even if + # its declared later in the same block of code + # - AST is less likely to break with version changes + # (for example, the behavior of co_names changed a little bit + # in python version 2.5) + if isinstance(code, str): + expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs) + else: + expr = code + + f = pyparser.FindIdentifiers(self, **exception_kwargs) + f.visit(expr) + + +class ArgumentList: + + """parses a fragment of code as a comma-separated list of expressions""" + + def __init__(self, code, **exception_kwargs): + self.codeargs = [] + self.args = [] + self.declared_identifiers = set() + self.undeclared_identifiers = set() + if isinstance(code, str): + if re.match(r"\S", code) and not re.match(r",\s*$", code): + # if theres text and no trailing comma, insure its parsed + # as a tuple by adding a trailing comma + code += "," + expr = pyparser.parse(code, "exec", **exception_kwargs) + else: + expr = code + + f = pyparser.FindTuple(self, PythonCode, **exception_kwargs) + f.visit(expr) + + +class PythonFragment(PythonCode): + + """extends PythonCode to provide identifier lookups in partial control + statements + + e.g.:: + + for x in 5: + elif y==9: + except (MyException, e): + + """ + + def __init__(self, code, **exception_kwargs): + m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S) + if not m: + raise exceptions.CompileException( + "Fragment '%s' is not a partial control statement" % code, + **exception_kwargs, + ) + if m.group(3): + code = code[: m.start(3)] + (keyword, expr) = m.group(1, 2) + if keyword in ["for", "if", "while"]: + code = code + "pass" + elif keyword == "try": + code = code + "pass\nexcept:pass" + elif keyword in ["elif", "else"]: + code = "if False:pass\n" + code + "pass" + elif keyword == "except": + code = "try:pass\n" + code + "pass" + elif keyword == "with": + code = code + "pass" + else: + raise exceptions.CompileException( + "Unsupported control keyword: '%s'" % keyword, + **exception_kwargs, + ) + super().__init__(code, **exception_kwargs) + + +class FunctionDecl: + + """function declaration""" + + def __init__(self, code, allow_kwargs=True, **exception_kwargs): + self.code = code + expr = pyparser.parse(code, "exec", **exception_kwargs) + + f = pyparser.ParseFunc(self, **exception_kwargs) + f.visit(expr) + if not hasattr(self, "funcname"): + raise exceptions.CompileException( + "Code '%s' is not a function declaration" % code, + **exception_kwargs, + ) + if not allow_kwargs and self.kwargs: + raise exceptions.CompileException( + "'**%s' keyword argument not allowed here" + % self.kwargnames[-1], + **exception_kwargs, + ) + + def get_argument_expressions(self, as_call=False): + """Return the argument declarations of this FunctionDecl as a printable + list. + + By default the return value is appropriate for writing in a ``def``; + set `as_call` to true to build arguments to be passed to the function + instead (assuming locals with the same names as the arguments exist). + """ + + namedecls = [] + + # Build in reverse order, since defaults and slurpy args come last + argnames = self.argnames[::-1] + kwargnames = self.kwargnames[::-1] + defaults = self.defaults[::-1] + kwdefaults = self.kwdefaults[::-1] + + # Named arguments + if self.kwargs: + namedecls.append("**" + kwargnames.pop(0)) + + for name in kwargnames: + # Keyword-only arguments must always be used by name, so even if + # this is a call, print out `foo=foo` + if as_call: + namedecls.append("%s=%s" % (name, name)) + elif kwdefaults: + default = kwdefaults.pop(0) + if default is None: + # The AST always gives kwargs a default, since you can do + # `def foo(*, a=1, b, c=3)` + namedecls.append(name) + else: + namedecls.append( + "%s=%s" + % (name, pyparser.ExpressionGenerator(default).value()) + ) + else: + namedecls.append(name) + + # Positional arguments + if self.varargs: + namedecls.append("*" + argnames.pop(0)) + + for name in argnames: + if as_call or not defaults: + namedecls.append(name) + else: + default = defaults.pop(0) + namedecls.append( + "%s=%s" + % (name, pyparser.ExpressionGenerator(default).value()) + ) + + namedecls.reverse() + return namedecls + + @property + def allargnames(self): + return tuple(self.argnames) + tuple(self.kwargnames) + + +class FunctionArgs(FunctionDecl): + + """the argument portion of a function declaration""" + + def __init__(self, code, **kwargs): + super().__init__("def ANON(%s):pass" % code, **kwargs) diff --git a/venv/lib/python3.12/site-packages/mako/cache.py b/venv/lib/python3.12/site-packages/mako/cache.py new file mode 100644 index 0000000..267411c --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/cache.py @@ -0,0 +1,239 @@ +# mako/cache.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from mako import util + +_cache_plugins = util.PluginLoader("mako.cache") + +register_plugin = _cache_plugins.register +register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl") + + +class Cache: + + """Represents a data content cache made available to the module + space of a specific :class:`.Template` object. + + .. versionadded:: 0.6 + :class:`.Cache` by itself is mostly a + container for a :class:`.CacheImpl` object, which implements + a fixed API to provide caching services; specific subclasses exist to + implement different + caching strategies. Mako includes a backend that works with + the Beaker caching system. Beaker itself then supports + a number of backends (i.e. file, memory, memcached, etc.) + + The construction of a :class:`.Cache` is part of the mechanics + of a :class:`.Template`, and programmatic access to this + cache is typically via the :attr:`.Template.cache` attribute. + + """ + + impl = None + """Provide the :class:`.CacheImpl` in use by this :class:`.Cache`. + + This accessor allows a :class:`.CacheImpl` with additional + methods beyond that of :class:`.Cache` to be used programmatically. + + """ + + id = None + """Return the 'id' that identifies this cache. + + This is a value that should be globally unique to the + :class:`.Template` associated with this cache, and can + be used by a caching system to name a local container + for data specific to this template. + + """ + + starttime = None + """Epochal time value for when the owning :class:`.Template` was + first compiled. + + A cache implementation may wish to invalidate data earlier than + this timestamp; this has the effect of the cache for a specific + :class:`.Template` starting clean any time the :class:`.Template` + is recompiled, such as when the original template file changed on + the filesystem. + + """ + + def __init__(self, template, *args): + # check for a stale template calling the + # constructor + if isinstance(template, str) and args: + return + self.template = template + self.id = template.module.__name__ + self.starttime = template.module._modified_time + self._def_regions = {} + self.impl = self._load_impl(self.template.cache_impl) + + def _load_impl(self, name): + return _cache_plugins.load(name)(self) + + def get_or_create(self, key, creation_function, **kw): + """Retrieve a value from the cache, using the given creation function + to generate a new value.""" + + return self._ctx_get_or_create(key, creation_function, None, **kw) + + def _ctx_get_or_create(self, key, creation_function, context, **kw): + """Retrieve a value from the cache, using the given creation function + to generate a new value.""" + + if not self.template.cache_enabled: + return creation_function() + + return self.impl.get_or_create( + key, creation_function, **self._get_cache_kw(kw, context) + ) + + def set(self, key, value, **kw): + r"""Place a value in the cache. + + :param key: the value's key. + :param value: the value. + :param \**kw: cache configuration arguments. + + """ + + self.impl.set(key, value, **self._get_cache_kw(kw, None)) + + put = set + """A synonym for :meth:`.Cache.set`. + + This is here for backwards compatibility. + + """ + + def get(self, key, **kw): + r"""Retrieve a value from the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. The + backend is configured using these arguments upon first request. + Subsequent requests that use the same series of configuration + values will use that same backend. + + """ + return self.impl.get(key, **self._get_cache_kw(kw, None)) + + def invalidate(self, key, **kw): + r"""Invalidate a value in the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. The + backend is configured using these arguments upon first request. + Subsequent requests that use the same series of configuration + values will use that same backend. + + """ + self.impl.invalidate(key, **self._get_cache_kw(kw, None)) + + def invalidate_body(self): + """Invalidate the cached content of the "body" method for this + template. + + """ + self.invalidate("render_body", __M_defname="render_body") + + def invalidate_def(self, name): + """Invalidate the cached content of a particular ``<%def>`` within this + template. + + """ + + self.invalidate("render_%s" % name, __M_defname="render_%s" % name) + + def invalidate_closure(self, name): + """Invalidate a nested ``<%def>`` within this template. + + Caching of nested defs is a blunt tool as there is no + management of scope -- nested defs that use cache tags + need to have names unique of all other nested defs in the + template, else their content will be overwritten by + each other. + + """ + + self.invalidate(name, __M_defname=name) + + def _get_cache_kw(self, kw, context): + defname = kw.pop("__M_defname", None) + if not defname: + tmpl_kw = self.template.cache_args.copy() + tmpl_kw.update(kw) + elif defname in self._def_regions: + tmpl_kw = self._def_regions[defname] + else: + tmpl_kw = self.template.cache_args.copy() + tmpl_kw.update(kw) + self._def_regions[defname] = tmpl_kw + if context and self.impl.pass_context: + tmpl_kw = tmpl_kw.copy() + tmpl_kw.setdefault("context", context) + return tmpl_kw + + +class CacheImpl: + + """Provide a cache implementation for use by :class:`.Cache`.""" + + def __init__(self, cache): + self.cache = cache + + pass_context = False + """If ``True``, the :class:`.Context` will be passed to + :meth:`get_or_create <.CacheImpl.get_or_create>` as the name ``'context'``. + """ + + def get_or_create(self, key, creation_function, **kw): + r"""Retrieve a value from the cache, using the given creation function + to generate a new value. + + This function *must* return a value, either from + the cache, or via the given creation function. + If the creation function is called, the newly + created value should be populated into the cache + under the given key before being returned. + + :param key: the value's key. + :param creation_function: function that when called generates + a new value. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def set(self, key, value, **kw): + r"""Place a value in the cache. + + :param key: the value's key. + :param value: the value. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def get(self, key, **kw): + r"""Retrieve a value from the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() + + def invalidate(self, key, **kw): + r"""Invalidate a value in the cache. + + :param key: the value's key. + :param \**kw: cache configuration arguments. + + """ + raise NotImplementedError() diff --git a/venv/lib/python3.12/site-packages/mako/cmd.py b/venv/lib/python3.12/site-packages/mako/cmd.py new file mode 100644 index 0000000..3fff197 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/cmd.py @@ -0,0 +1,99 @@ +# mako/cmd.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from argparse import ArgumentParser +from os.path import dirname +from os.path import isfile +import sys + +from mako import exceptions +from mako.lookup import TemplateLookup +from mako.template import Template + + +def varsplit(var): + if "=" not in var: + return (var, "") + return var.split("=", 1) + + +def _exit(): + sys.stderr.write(exceptions.text_error_template().render()) + sys.exit(1) + + +def cmdline(argv=None): + parser = ArgumentParser() + parser.add_argument( + "--var", + default=[], + action="append", + help="variable (can be used multiple times, use name=value)", + ) + parser.add_argument( + "--template-dir", + default=[], + action="append", + help="Directory to use for template lookup (multiple " + "directories may be provided). If not given then if the " + "template is read from stdin, the value defaults to be " + "the current directory, otherwise it defaults to be the " + "parent directory of the file provided.", + ) + parser.add_argument( + "--output-encoding", default=None, help="force output encoding" + ) + parser.add_argument( + "--output-file", + default=None, + help="Write to file upon successful render instead of stdout", + ) + parser.add_argument("input", nargs="?", default="-") + + options = parser.parse_args(argv) + + output_encoding = options.output_encoding + output_file = options.output_file + + if options.input == "-": + lookup_dirs = options.template_dir or ["."] + lookup = TemplateLookup(lookup_dirs) + try: + template = Template( + sys.stdin.read(), + lookup=lookup, + output_encoding=output_encoding, + ) + except: + _exit() + else: + filename = options.input + if not isfile(filename): + raise SystemExit("error: can't find %s" % filename) + lookup_dirs = options.template_dir or [dirname(filename)] + lookup = TemplateLookup(lookup_dirs) + try: + template = Template( + filename=filename, + lookup=lookup, + output_encoding=output_encoding, + ) + except: + _exit() + + kw = dict(varsplit(var) for var in options.var) + try: + rendered = template.render(**kw) + except: + _exit() + else: + if output_file: + open(output_file, "wt", encoding=output_encoding).write(rendered) + else: + sys.stdout.write(rendered) + + +if __name__ == "__main__": + cmdline() diff --git a/venv/lib/python3.12/site-packages/mako/codegen.py b/venv/lib/python3.12/site-packages/mako/codegen.py new file mode 100644 index 0000000..b5293f4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/codegen.py @@ -0,0 +1,1319 @@ +# mako/codegen.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides functionality for rendering a parsetree constructing into module +source code.""" + +import json +import re +import time + +from mako import ast +from mako import exceptions +from mako import filters +from mako import parsetree +from mako import util +from mako.pygen import PythonPrinter + + +MAGIC_NUMBER = 10 + +# names which are hardwired into the +# template and are not accessed via the +# context itself +TOPLEVEL_DECLARED = {"UNDEFINED", "STOP_RENDERING"} +RESERVED_NAMES = {"context", "loop"}.union(TOPLEVEL_DECLARED) + + +def compile( # noqa + node, + uri, + filename=None, + default_filters=None, + buffer_filters=None, + imports=None, + future_imports=None, + source_encoding=None, + generate_magic_comment=True, + strict_undefined=False, + enable_loop=True, + reserved_names=frozenset(), +): + """Generate module source code given a parsetree node, + uri, and optional source filename""" + + buf = util.FastEncodingBuffer() + + printer = PythonPrinter(buf) + _GenerateRenderMethod( + printer, + _CompileContext( + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + strict_undefined, + enable_loop, + reserved_names, + ), + node, + ) + return buf.getvalue() + + +class _CompileContext: + def __init__( + self, + uri, + filename, + default_filters, + buffer_filters, + imports, + future_imports, + source_encoding, + generate_magic_comment, + strict_undefined, + enable_loop, + reserved_names, + ): + self.uri = uri + self.filename = filename + self.default_filters = default_filters + self.buffer_filters = buffer_filters + self.imports = imports + self.future_imports = future_imports + self.source_encoding = source_encoding + self.generate_magic_comment = generate_magic_comment + self.strict_undefined = strict_undefined + self.enable_loop = enable_loop + self.reserved_names = reserved_names + + +class _GenerateRenderMethod: + + """A template visitor object which generates the + full module source for a template. + + """ + + def __init__(self, printer, compiler, node): + self.printer = printer + self.compiler = compiler + self.node = node + self.identifier_stack = [None] + self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag)) + + if self.in_def: + name = "render_%s" % node.funcname + args = node.get_argument_expressions() + filtered = len(node.filter_args.args) > 0 + buffered = eval(node.attributes.get("buffered", "False")) + cached = eval(node.attributes.get("cached", "False")) + defs = None + pagetag = None + if node.is_block and not node.is_anonymous: + args += ["**pageargs"] + else: + defs = self.write_toplevel() + pagetag = self.compiler.pagetag + name = "render_body" + if pagetag is not None: + args = pagetag.body_decl.get_argument_expressions() + if not pagetag.body_decl.kwargs: + args += ["**pageargs"] + cached = eval(pagetag.attributes.get("cached", "False")) + self.compiler.enable_loop = self.compiler.enable_loop or eval( + pagetag.attributes.get("enable_loop", "False") + ) + else: + args = ["**pageargs"] + cached = False + buffered = filtered = False + if args is None: + args = ["context"] + else: + args = [a for a in ["context"] + args] + + self.write_render_callable( + pagetag or node, name, args, buffered, filtered, cached + ) + + if defs is not None: + for node in defs: + _GenerateRenderMethod(printer, compiler, node) + + if not self.in_def: + self.write_metadata_struct() + + def write_metadata_struct(self): + self.printer.source_map[self.printer.lineno] = max( + self.printer.source_map + ) + struct = { + "filename": self.compiler.filename, + "uri": self.compiler.uri, + "source_encoding": self.compiler.source_encoding, + "line_map": self.printer.source_map, + } + self.printer.writelines( + '"""', + "__M_BEGIN_METADATA", + json.dumps(struct), + "__M_END_METADATA\n" '"""', + ) + + @property + def identifiers(self): + return self.identifier_stack[-1] + + def write_toplevel(self): + """Traverse a template structure for module-level directives and + generate the start of module-level code. + + """ + inherit = [] + namespaces = {} + module_code = [] + + self.compiler.pagetag = None + + class FindTopLevel: + def visitInheritTag(s, node): + inherit.append(node) + + def visitNamespaceTag(s, node): + namespaces[node.name] = node + + def visitPageTag(s, node): + self.compiler.pagetag = node + + def visitCode(s, node): + if node.ismodule: + module_code.append(node) + + f = FindTopLevel() + for n in self.node.nodes: + n.accept_visitor(f) + + self.compiler.namespaces = namespaces + + module_ident = set() + for n in module_code: + module_ident = module_ident.union(n.declared_identifiers()) + + module_identifiers = _Identifiers(self.compiler) + module_identifiers.declared = module_ident + + # module-level names, python code + if ( + self.compiler.generate_magic_comment + and self.compiler.source_encoding + ): + self.printer.writeline( + "# -*- coding:%s -*-" % self.compiler.source_encoding + ) + + if self.compiler.future_imports: + self.printer.writeline( + "from __future__ import %s" + % (", ".join(self.compiler.future_imports),) + ) + self.printer.writeline("from mako import runtime, filters, cache") + self.printer.writeline("UNDEFINED = runtime.UNDEFINED") + self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING") + self.printer.writeline("__M_dict_builtin = dict") + self.printer.writeline("__M_locals_builtin = locals") + self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER) + self.printer.writeline("_modified_time = %r" % time.time()) + self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop) + self.printer.writeline( + "_template_filename = %r" % self.compiler.filename + ) + self.printer.writeline("_template_uri = %r" % self.compiler.uri) + self.printer.writeline( + "_source_encoding = %r" % self.compiler.source_encoding + ) + if self.compiler.imports: + buf = "" + for imp in self.compiler.imports: + buf += imp + "\n" + self.printer.writeline(imp) + impcode = ast.PythonCode( + buf, + source="", + lineno=0, + pos=0, + filename="template defined imports", + ) + else: + impcode = None + + main_identifiers = module_identifiers.branch(self.node) + mit = module_identifiers.topleveldefs + module_identifiers.topleveldefs = mit.union( + main_identifiers.topleveldefs + ) + module_identifiers.declared.update(TOPLEVEL_DECLARED) + if impcode: + module_identifiers.declared.update(impcode.declared_identifiers) + + self.compiler.identifiers = module_identifiers + self.printer.writeline( + "_exports = %r" + % [n.name for n in main_identifiers.topleveldefs.values()] + ) + self.printer.write_blanks(2) + + if len(module_code): + self.write_module_code(module_code) + + if len(inherit): + self.write_namespaces(namespaces) + self.write_inherit(inherit[-1]) + elif len(namespaces): + self.write_namespaces(namespaces) + + return list(main_identifiers.topleveldefs.values()) + + def write_render_callable( + self, node, name, args, buffered, filtered, cached + ): + """write a top-level render callable. + + this could be the main render() method or that of a top-level def.""" + + if self.in_def: + decorator = node.decorator + if decorator: + self.printer.writeline( + "@runtime._decorate_toplevel(%s)" % decorator + ) + + self.printer.start_source(node.lineno) + self.printer.writelines( + "def %s(%s):" % (name, ",".join(args)), + # push new frame, assign current frame to __M_caller + "__M_caller = context.caller_stack._push_frame()", + "try:", + ) + if buffered or filtered or cached: + self.printer.writeline("context._push_buffer()") + + self.identifier_stack.append( + self.compiler.identifiers.branch(self.node) + ) + if (not self.in_def or self.node.is_block) and "**pageargs" in args: + self.identifier_stack[-1].argument_declared.add("pageargs") + + if not self.in_def and ( + len(self.identifiers.locally_assigned) > 0 + or len(self.identifiers.argument_declared) > 0 + ): + self.printer.writeline( + "__M_locals = __M_dict_builtin(%s)" + % ",".join( + [ + "%s=%s" % (x, x) + for x in self.identifiers.argument_declared + ] + ) + ) + + self.write_variable_declares(self.identifiers, toplevel=True) + + for n in self.node.nodes: + n.accept_visitor(self) + + self.write_def_finish(self.node, buffered, filtered, cached) + self.printer.writeline(None) + self.printer.write_blanks(2) + if cached: + self.write_cache_decorator( + node, name, args, buffered, self.identifiers, toplevel=True + ) + + def write_module_code(self, module_code): + """write module-level template code, i.e. that which + is enclosed in <%! %> tags in the template.""" + for n in module_code: + self.printer.write_indented_block(n.text, starting_lineno=n.lineno) + + def write_inherit(self, node): + """write the module-level inheritance-determination callable.""" + + self.printer.writelines( + "def _mako_inherit(template, context):", + "_mako_generate_namespaces(context)", + "return runtime._inherit_from(context, %s, _template_uri)" + % (node.parsed_attributes["file"]), + None, + ) + + def write_namespaces(self, namespaces): + """write the module-level namespace-generating callable.""" + self.printer.writelines( + "def _mako_get_namespace(context, name):", + "try:", + "return context.namespaces[(__name__, name)]", + "except KeyError:", + "_mako_generate_namespaces(context)", + "return context.namespaces[(__name__, name)]", + None, + None, + ) + self.printer.writeline("def _mako_generate_namespaces(context):") + + for node in namespaces.values(): + if "import" in node.attributes: + self.compiler.has_ns_imports = True + self.printer.start_source(node.lineno) + if len(node.nodes): + self.printer.writeline("def make_namespace():") + export = [] + identifiers = self.compiler.identifiers.branch(node) + self.in_def = True + + class NSDefVisitor: + def visitDefTag(s, node): + s.visitDefOrBase(node) + + def visitBlockTag(s, node): + s.visitDefOrBase(node) + + def visitDefOrBase(s, node): + if node.is_anonymous: + raise exceptions.CompileException( + "Can't put anonymous blocks inside " + "<%namespace>", + **node.exception_kwargs, + ) + self.write_inline_def(node, identifiers, nested=False) + export.append(node.funcname) + + vis = NSDefVisitor() + for n in node.nodes: + n.accept_visitor(vis) + self.printer.writeline("return [%s]" % (",".join(export))) + self.printer.writeline(None) + self.in_def = False + callable_name = "make_namespace()" + else: + callable_name = "None" + + if "file" in node.parsed_attributes: + self.printer.writeline( + "ns = runtime.TemplateNamespace(%r," + " context._clean_inheritance_tokens()," + " templateuri=%s, callables=%s, " + " calling_uri=_template_uri)" + % ( + node.name, + node.parsed_attributes.get("file", "None"), + callable_name, + ) + ) + elif "module" in node.parsed_attributes: + self.printer.writeline( + "ns = runtime.ModuleNamespace(%r," + " context._clean_inheritance_tokens()," + " callables=%s, calling_uri=_template_uri," + " module=%s)" + % ( + node.name, + callable_name, + node.parsed_attributes.get("module", "None"), + ) + ) + else: + self.printer.writeline( + "ns = runtime.Namespace(%r," + " context._clean_inheritance_tokens()," + " callables=%s, calling_uri=_template_uri)" + % (node.name, callable_name) + ) + if eval(node.attributes.get("inheritable", "False")): + self.printer.writeline("context['self'].%s = ns" % (node.name)) + + self.printer.writeline( + "context.namespaces[(__name__, %s)] = ns" % repr(node.name) + ) + self.printer.write_blanks(1) + if not len(namespaces): + self.printer.writeline("pass") + self.printer.writeline(None) + + def write_variable_declares(self, identifiers, toplevel=False, limit=None): + """write variable declarations at the top of a function. + + the variable declarations are in the form of callable + definitions for defs and/or name lookup within the + function's context argument. the names declared are based + on the names that are referenced in the function body, + which don't otherwise have any explicit assignment + operation. names that are assigned within the body are + assumed to be locally-scoped variables and are not + separately declared. + + for def callable definitions, if the def is a top-level + callable then a 'stub' callable is generated which wraps + the current Context into a closure. if the def is not + top-level, it is fully rendered as a local closure. + + """ + + # collection of all defs available to us in this scope + comp_idents = {c.funcname: c for c in identifiers.defs} + to_write = set() + + # write "context.get()" for all variables we are going to + # need that arent in the namespace yet + to_write = to_write.union(identifiers.undeclared) + + # write closure functions for closures that we define + # right here + to_write = to_write.union( + [c.funcname for c in identifiers.closuredefs.values()] + ) + + # remove identifiers that are declared in the argument + # signature of the callable + to_write = to_write.difference(identifiers.argument_declared) + + # remove identifiers that we are going to assign to. + # in this way we mimic Python's behavior, + # i.e. assignment to a variable within a block + # means that variable is now a "locally declared" var, + # which cannot be referenced beforehand. + to_write = to_write.difference(identifiers.locally_declared) + + if self.compiler.enable_loop: + has_loop = "loop" in to_write + to_write.discard("loop") + else: + has_loop = False + + # if a limiting set was sent, constraint to those items in that list + # (this is used for the caching decorator) + if limit is not None: + to_write = to_write.intersection(limit) + + if toplevel and getattr(self.compiler, "has_ns_imports", False): + self.printer.writeline("_import_ns = {}") + self.compiler.has_imports = True + for ident, ns in self.compiler.namespaces.items(): + if "import" in ns.attributes: + self.printer.writeline( + "_mako_get_namespace(context, %r)." + "_populate(_import_ns, %r)" + % ( + ident, + re.split(r"\s*,\s*", ns.attributes["import"]), + ) + ) + + if has_loop: + self.printer.writeline("loop = __M_loop = runtime.LoopStack()") + + for ident in to_write: + if ident in comp_idents: + comp = comp_idents[ident] + if comp.is_block: + if not comp.is_anonymous: + self.write_def_decl(comp, identifiers) + else: + self.write_inline_def(comp, identifiers, nested=True) + else: + if comp.is_root(): + self.write_def_decl(comp, identifiers) + else: + self.write_inline_def(comp, identifiers, nested=True) + + elif ident in self.compiler.namespaces: + self.printer.writeline( + "%s = _mako_get_namespace(context, %r)" % (ident, ident) + ) + else: + if getattr(self.compiler, "has_ns_imports", False): + if self.compiler.strict_undefined: + self.printer.writelines( + "%s = _import_ns.get(%r, UNDEFINED)" + % (ident, ident), + "if %s is UNDEFINED:" % ident, + "try:", + "%s = context[%r]" % (ident, ident), + "except KeyError:", + "raise NameError(\"'%s' is not defined\")" % ident, + None, + None, + ) + else: + self.printer.writeline( + "%s = _import_ns.get" + "(%r, context.get(%r, UNDEFINED))" + % (ident, ident, ident) + ) + else: + if self.compiler.strict_undefined: + self.printer.writelines( + "try:", + "%s = context[%r]" % (ident, ident), + "except KeyError:", + "raise NameError(\"'%s' is not defined\")" % ident, + None, + ) + else: + self.printer.writeline( + "%s = context.get(%r, UNDEFINED)" % (ident, ident) + ) + + self.printer.writeline("__M_writer = context.writer()") + + def write_def_decl(self, node, identifiers): + """write a locally-available callable referencing a top-level def""" + funcname = node.funcname + namedecls = node.get_argument_expressions() + nameargs = node.get_argument_expressions(as_call=True) + + if not self.in_def and ( + len(self.identifiers.locally_assigned) > 0 + or len(self.identifiers.argument_declared) > 0 + ): + nameargs.insert(0, "context._locals(__M_locals)") + else: + nameargs.insert(0, "context") + self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls))) + self.printer.writeline( + "return render_%s(%s)" % (funcname, ",".join(nameargs)) + ) + self.printer.writeline(None) + + def write_inline_def(self, node, identifiers, nested): + """write a locally-available def callable inside an enclosing def.""" + + namedecls = node.get_argument_expressions() + + decorator = node.decorator + if decorator: + self.printer.writeline( + "@runtime._decorate_inline(context, %s)" % decorator + ) + self.printer.writeline( + "def %s(%s):" % (node.funcname, ",".join(namedecls)) + ) + filtered = len(node.filter_args.args) > 0 + buffered = eval(node.attributes.get("buffered", "False")) + cached = eval(node.attributes.get("cached", "False")) + self.printer.writelines( + # push new frame, assign current frame to __M_caller + "__M_caller = context.caller_stack._push_frame()", + "try:", + ) + if buffered or filtered or cached: + self.printer.writelines("context._push_buffer()") + + identifiers = identifiers.branch(node, nested=nested) + + self.write_variable_declares(identifiers) + + self.identifier_stack.append(identifiers) + for n in node.nodes: + n.accept_visitor(self) + self.identifier_stack.pop() + + self.write_def_finish(node, buffered, filtered, cached) + self.printer.writeline(None) + if cached: + self.write_cache_decorator( + node, + node.funcname, + namedecls, + False, + identifiers, + inline=True, + toplevel=False, + ) + + def write_def_finish( + self, node, buffered, filtered, cached, callstack=True + ): + """write the end section of a rendering function, either outermost or + inline. + + this takes into account if the rendering function was filtered, + buffered, etc. and closes the corresponding try: block if any, and + writes code to retrieve captured content, apply filters, send proper + return value.""" + + if not buffered and not cached and not filtered: + self.printer.writeline("return ''") + if callstack: + self.printer.writelines( + "finally:", "context.caller_stack._pop_frame()", None + ) + + if buffered or filtered or cached: + if buffered or cached: + # in a caching scenario, don't try to get a writer + # from the context after popping; assume the caching + # implemenation might be using a context with no + # extra buffers + self.printer.writelines( + "finally:", "__M_buf = context._pop_buffer()" + ) + else: + self.printer.writelines( + "finally:", + "__M_buf, __M_writer = context._pop_buffer_and_writer()", + ) + + if callstack: + self.printer.writeline("context.caller_stack._pop_frame()") + + s = "__M_buf.getvalue()" + if filtered: + s = self.create_filter_callable( + node.filter_args.args, s, False + ) + self.printer.writeline(None) + if buffered and not cached: + s = self.create_filter_callable( + self.compiler.buffer_filters, s, False + ) + if buffered or cached: + self.printer.writeline("return %s" % s) + else: + self.printer.writelines("__M_writer(%s)" % s, "return ''") + + def write_cache_decorator( + self, + node_or_pagetag, + name, + args, + buffered, + identifiers, + inline=False, + toplevel=False, + ): + """write a post-function decorator to replace a rendering + callable with a cached version of itself.""" + + self.printer.writeline("__M_%s = %s" % (name, name)) + cachekey = node_or_pagetag.parsed_attributes.get( + "cache_key", repr(name) + ) + + cache_args = {} + if self.compiler.pagetag is not None: + cache_args.update( + (pa[6:], self.compiler.pagetag.parsed_attributes[pa]) + for pa in self.compiler.pagetag.parsed_attributes + if pa.startswith("cache_") and pa != "cache_key" + ) + cache_args.update( + (pa[6:], node_or_pagetag.parsed_attributes[pa]) + for pa in node_or_pagetag.parsed_attributes + if pa.startswith("cache_") and pa != "cache_key" + ) + if "timeout" in cache_args: + cache_args["timeout"] = int(eval(cache_args["timeout"])) + + self.printer.writeline("def %s(%s):" % (name, ",".join(args))) + + # form "arg1, arg2, arg3=arg3, arg4=arg4", etc. + pass_args = [ + "%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args + ] + + self.write_variable_declares( + identifiers, + toplevel=toplevel, + limit=node_or_pagetag.undeclared_identifiers(), + ) + if buffered: + s = ( + "context.get('local')." + "cache._ctx_get_or_create(" + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r)" + % ( + cachekey, + name, + ",".join(pass_args), + "".join( + ["%s=%s, " % (k, v) for k, v in cache_args.items()] + ), + name, + ) + ) + # apply buffer_filters + s = self.create_filter_callable( + self.compiler.buffer_filters, s, False + ) + self.printer.writelines("return " + s, None) + else: + self.printer.writelines( + "__M_writer(context.get('local')." + "cache._ctx_get_or_create(" + "%s, lambda:__M_%s(%s), context, %s__M_defname=%r))" + % ( + cachekey, + name, + ",".join(pass_args), + "".join( + ["%s=%s, " % (k, v) for k, v in cache_args.items()] + ), + name, + ), + "return ''", + None, + ) + + def create_filter_callable(self, args, target, is_expression): + """write a filter-applying expression based on the filters + present in the given filter names, adjusting for the global + 'default' filter aliases as needed.""" + + def locate_encode(name): + if re.match(r"decode\..+", name): + return "filters." + name + else: + return filters.DEFAULT_ESCAPES.get(name, name) + + if "n" not in args: + if is_expression: + if self.compiler.pagetag: + args = self.compiler.pagetag.filter_args.args + args + if self.compiler.default_filters and "n" not in args: + args = self.compiler.default_filters + args + for e in args: + # if filter given as a function, get just the identifier portion + if e == "n": + continue + m = re.match(r"(.+?)(\(.*\))", e) + if m: + ident, fargs = m.group(1, 2) + f = locate_encode(ident) + e = f + fargs + else: + e = locate_encode(e) + assert e is not None + target = "%s(%s)" % (e, target) + return target + + def visitExpression(self, node): + self.printer.start_source(node.lineno) + if ( + len(node.escapes) + or ( + self.compiler.pagetag is not None + and len(self.compiler.pagetag.filter_args.args) + ) + or len(self.compiler.default_filters) + ): + s = self.create_filter_callable( + node.escapes_code.args, "%s" % node.text, True + ) + self.printer.writeline("__M_writer(%s)" % s) + else: + self.printer.writeline("__M_writer(%s)" % node.text) + + def visitControlLine(self, node): + if node.isend: + self.printer.writeline(None) + if node.has_loop_context: + self.printer.writeline("finally:") + self.printer.writeline("loop = __M_loop._exit()") + self.printer.writeline(None) + else: + self.printer.start_source(node.lineno) + if self.compiler.enable_loop and node.keyword == "for": + text = mangle_mako_loop(node, self.printer) + else: + text = node.text + self.printer.writeline(text) + children = node.get_children() + + # this covers the four situations where we want to insert a pass: + # 1) a ternary control line with no children, + # 2) a primary control line with nothing but its own ternary + # and end control lines, and + # 3) any control line with no content other than comments + # 4) the first control block with no content other than comments + def _search_for_control_line(): + for c in children: + if isinstance(c, parsetree.Comment): + continue + elif isinstance(c, parsetree.ControlLine): + return True + return False + + if ( + not children + or all( + isinstance(c, (parsetree.Comment, parsetree.ControlLine)) + for c in children + ) + and all( + (node.is_ternary(c.keyword) or c.isend) + for c in children + if isinstance(c, parsetree.ControlLine) + ) + or _search_for_control_line() + ): + self.printer.writeline("pass") + + def visitText(self, node): + self.printer.start_source(node.lineno) + self.printer.writeline("__M_writer(%s)" % repr(node.content)) + + def visitTextTag(self, node): + filtered = len(node.filter_args.args) > 0 + if filtered: + self.printer.writelines( + "__M_writer = context._push_writer()", "try:" + ) + for n in node.nodes: + n.accept_visitor(self) + if filtered: + self.printer.writelines( + "finally:", + "__M_buf, __M_writer = context._pop_buffer_and_writer()", + "__M_writer(%s)" + % self.create_filter_callable( + node.filter_args.args, "__M_buf.getvalue()", False + ), + None, + ) + + def visitCode(self, node): + if not node.ismodule: + self.printer.write_indented_block( + node.text, starting_lineno=node.lineno + ) + + if not self.in_def and len(self.identifiers.locally_assigned) > 0: + # if we are the "template" def, fudge locally + # declared/modified variables into the "__M_locals" dictionary, + # which is used for def calls within the same template, + # to simulate "enclosing scope" + self.printer.writeline( + "__M_locals_builtin_stored = __M_locals_builtin()" + ) + self.printer.writeline( + "__M_locals.update(__M_dict_builtin([(__M_key," + " __M_locals_builtin_stored[__M_key]) for __M_key in" + " [%s] if __M_key in __M_locals_builtin_stored]))" + % ",".join([repr(x) for x in node.declared_identifiers()]) + ) + + def visitIncludeTag(self, node): + self.printer.start_source(node.lineno) + args = node.attributes.get("args") + if args: + self.printer.writeline( + "runtime._include_file(context, %s, _template_uri, %s)" + % (node.parsed_attributes["file"], args) + ) + else: + self.printer.writeline( + "runtime._include_file(context, %s, _template_uri)" + % (node.parsed_attributes["file"]) + ) + + def visitNamespaceTag(self, node): + pass + + def visitDefTag(self, node): + pass + + def visitBlockTag(self, node): + if node.is_anonymous: + self.printer.writeline("%s()" % node.funcname) + else: + nameargs = node.get_argument_expressions(as_call=True) + nameargs += ["**pageargs"] + self.printer.writeline( + "if 'parent' not in context._data or " + "not hasattr(context._data['parent'], '%s'):" % node.funcname + ) + self.printer.writeline( + "context['self'].%s(%s)" % (node.funcname, ",".join(nameargs)) + ) + self.printer.writeline("\n") + + def visitCallNamespaceTag(self, node): + # TODO: we can put namespace-specific checks here, such + # as ensure the given namespace will be imported, + # pre-import the namespace, etc. + self.visitCallTag(node) + + def visitCallTag(self, node): + self.printer.writeline("def ccall(caller):") + export = ["body"] + callable_identifiers = self.identifiers.branch(node, nested=True) + body_identifiers = callable_identifiers.branch(node, nested=False) + # we want the 'caller' passed to ccall to be used + # for the body() function, but for other non-body() + # <%def>s within <%call> we want the current caller + # off the call stack (if any) + body_identifiers.add_declared("caller") + + self.identifier_stack.append(body_identifiers) + + class DefVisitor: + def visitDefTag(s, node): + s.visitDefOrBase(node) + + def visitBlockTag(s, node): + s.visitDefOrBase(node) + + def visitDefOrBase(s, node): + self.write_inline_def(node, callable_identifiers, nested=False) + if not node.is_anonymous: + export.append(node.funcname) + # remove defs that are within the <%call> from the + # "closuredefs" defined in the body, so they dont render twice + if node.funcname in body_identifiers.closuredefs: + del body_identifiers.closuredefs[node.funcname] + + vis = DefVisitor() + for n in node.nodes: + n.accept_visitor(vis) + self.identifier_stack.pop() + + bodyargs = node.body_decl.get_argument_expressions() + self.printer.writeline("def body(%s):" % ",".join(bodyargs)) + + # TODO: figure out best way to specify + # buffering/nonbuffering (at call time would be better) + buffered = False + if buffered: + self.printer.writelines("context._push_buffer()", "try:") + self.write_variable_declares(body_identifiers) + self.identifier_stack.append(body_identifiers) + + for n in node.nodes: + n.accept_visitor(self) + self.identifier_stack.pop() + + self.write_def_finish(node, buffered, False, False, callstack=False) + self.printer.writelines(None, "return [%s]" % (",".join(export)), None) + + self.printer.writelines( + # push on caller for nested call + "context.caller_stack.nextcaller = " + "runtime.Namespace('caller', context, " + "callables=ccall(__M_caller))", + "try:", + ) + self.printer.start_source(node.lineno) + self.printer.writelines( + "__M_writer(%s)" + % self.create_filter_callable([], node.expression, True), + "finally:", + "context.caller_stack.nextcaller = None", + None, + ) + + +class _Identifiers: + + """tracks the status of identifier names as template code is rendered.""" + + def __init__(self, compiler, node=None, parent=None, nested=False): + if parent is not None: + # if we are the branch created in write_namespaces(), + # we don't share any context from the main body(). + if isinstance(node, parsetree.NamespaceTag): + self.declared = set() + self.topleveldefs = util.SetLikeDict() + else: + # things that have already been declared + # in an enclosing namespace (i.e. names we can just use) + self.declared = ( + set(parent.declared) + .union([c.name for c in parent.closuredefs.values()]) + .union(parent.locally_declared) + .union(parent.argument_declared) + ) + + # if these identifiers correspond to a "nested" + # scope, it means whatever the parent identifiers + # had as undeclared will have been declared by that parent, + # and therefore we have them in our scope. + if nested: + self.declared = self.declared.union(parent.undeclared) + + # top level defs that are available + self.topleveldefs = util.SetLikeDict(**parent.topleveldefs) + else: + self.declared = set() + self.topleveldefs = util.SetLikeDict() + + self.compiler = compiler + + # things within this level that are referenced before they + # are declared (e.g. assigned to) + self.undeclared = set() + + # things that are declared locally. some of these things + # could be in the "undeclared" list as well if they are + # referenced before declared + self.locally_declared = set() + + # assignments made in explicit python blocks. + # these will be propagated to + # the context of local def calls. + self.locally_assigned = set() + + # things that are declared in the argument + # signature of the def callable + self.argument_declared = set() + + # closure defs that are defined in this level + self.closuredefs = util.SetLikeDict() + + self.node = node + + if node is not None: + node.accept_visitor(self) + + illegal_names = self.compiler.reserved_names.intersection( + self.locally_declared + ) + if illegal_names: + raise exceptions.NameConflictError( + "Reserved words declared in template: %s" + % ", ".join(illegal_names) + ) + + def branch(self, node, **kwargs): + """create a new Identifiers for a new Node, with + this Identifiers as the parent.""" + + return _Identifiers(self.compiler, node, self, **kwargs) + + @property + def defs(self): + return set(self.topleveldefs.union(self.closuredefs).values()) + + def __repr__(self): + return ( + "Identifiers(declared=%r, locally_declared=%r, " + "undeclared=%r, topleveldefs=%r, closuredefs=%r, " + "argumentdeclared=%r)" + % ( + list(self.declared), + list(self.locally_declared), + list(self.undeclared), + [c.name for c in self.topleveldefs.values()], + [c.name for c in self.closuredefs.values()], + self.argument_declared, + ) + ) + + def check_declared(self, node): + """update the state of this Identifiers with the undeclared + and declared identifiers of the given node.""" + + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + for ident in node.declared_identifiers(): + self.locally_declared.add(ident) + + def add_declared(self, ident): + self.declared.add(ident) + if ident in self.undeclared: + self.undeclared.remove(ident) + + def visitExpression(self, node): + self.check_declared(node) + + def visitControlLine(self, node): + self.check_declared(node) + + def visitCode(self, node): + if not node.ismodule: + self.check_declared(node) + self.locally_assigned = self.locally_assigned.union( + node.declared_identifiers() + ) + + def visitNamespaceTag(self, node): + # only traverse into the sub-elements of a + # <%namespace> tag if we are the branch created in + # write_namespaces() + if self.node is node: + for n in node.nodes: + n.accept_visitor(self) + + def _check_name_exists(self, collection, node): + existing = collection.get(node.funcname) + collection[node.funcname] = node + if ( + existing is not None + and existing is not node + and (node.is_block or existing.is_block) + ): + raise exceptions.CompileException( + "%%def or %%block named '%s' already " + "exists in this template." % node.funcname, + **node.exception_kwargs, + ) + + def visitDefTag(self, node): + if node.is_root() and not node.is_anonymous: + self._check_name_exists(self.topleveldefs, node) + elif node is not self.node: + self._check_name_exists(self.closuredefs, node) + + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + + # visit defs only one level deep + if node is self.node: + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + + for n in node.nodes: + n.accept_visitor(self) + + def visitBlockTag(self, node): + if node is not self.node and not node.is_anonymous: + if isinstance(self.node, parsetree.DefTag): + raise exceptions.CompileException( + "Named block '%s' not allowed inside of def '%s'" + % (node.name, self.node.name), + **node.exception_kwargs, + ) + elif isinstance( + self.node, (parsetree.CallTag, parsetree.CallNamespaceTag) + ): + raise exceptions.CompileException( + "Named block '%s' not allowed inside of <%%call> tag" + % (node.name,), + **node.exception_kwargs, + ) + + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + + if not node.is_anonymous: + self._check_name_exists(self.topleveldefs, node) + self.undeclared.add(node.funcname) + elif node is not self.node: + self._check_name_exists(self.closuredefs, node) + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + for n in node.nodes: + n.accept_visitor(self) + + def visitTextTag(self, node): + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + + def visitIncludeTag(self, node): + self.check_declared(node) + + def visitPageTag(self, node): + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + self.check_declared(node) + + def visitCallNamespaceTag(self, node): + self.visitCallTag(node) + + def visitCallTag(self, node): + if node is self.node: + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + for ident in node.declared_identifiers(): + self.argument_declared.add(ident) + for n in node.nodes: + n.accept_visitor(self) + else: + for ident in node.undeclared_identifiers(): + if ident != "context" and ident not in self.declared.union( + self.locally_declared + ): + self.undeclared.add(ident) + + +_FOR_LOOP = re.compile( + r"^for\s+((?:\(?)\s*" + r"(?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*" + r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z_0-9]*),??)*\s*(?:\)?)" + r"(?:\s*,\s*(?:" + r"(?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*" + r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z_0-9]*),??)*\s*(?:\)?)" + r"),??)*\s*(?:\)?))\s+in\s+(.*):" +) + + +def mangle_mako_loop(node, printer): + """converts a for loop into a context manager wrapped around a for loop + when access to the `loop` variable has been detected in the for loop body + """ + loop_variable = LoopVariable() + node.accept_visitor(loop_variable) + if loop_variable.detected: + node.nodes[-1].has_loop_context = True + match = _FOR_LOOP.match(node.text) + if match: + printer.writelines( + "loop = __M_loop._enter(%s)" % match.group(2), + "try:" + # 'with __M_loop(%s) as loop:' % match.group(2) + ) + text = "for %s in loop:" % match.group(1) + else: + raise SyntaxError("Couldn't apply loop context: %s" % node.text) + else: + text = node.text + return text + + +class LoopVariable: + + """A node visitor which looks for the name 'loop' within undeclared + identifiers.""" + + def __init__(self): + self.detected = False + + def _loop_reference_detected(self, node): + if "loop" in node.undeclared_identifiers(): + self.detected = True + else: + for n in node.get_children(): + n.accept_visitor(self) + + def visitControlLine(self, node): + self._loop_reference_detected(node) + + def visitCode(self, node): + self._loop_reference_detected(node) + + def visitExpression(self, node): + self._loop_reference_detected(node) diff --git a/venv/lib/python3.12/site-packages/mako/compat.py b/venv/lib/python3.12/site-packages/mako/compat.py new file mode 100644 index 0000000..1c17601 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/compat.py @@ -0,0 +1,70 @@ +# mako/compat.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import collections +from importlib import metadata as importlib_metadata +from importlib import util +import inspect +import sys + +win32 = sys.platform.startswith("win") +pypy = hasattr(sys, "pypy_version_info") + +ArgSpec = collections.namedtuple( + "ArgSpec", ["args", "varargs", "keywords", "defaults"] +) + + +def inspect_getargspec(func): + """getargspec based on fully vendored getfullargspec from Python 3.3.""" + + if inspect.ismethod(func): + func = func.__func__ + if not inspect.isfunction(func): + raise TypeError(f"{func!r} is not a Python function") + + co = func.__code__ + if not inspect.iscode(co): + raise TypeError(f"{co!r} is not a code object") + + nargs = co.co_argcount + names = co.co_varnames + nkwargs = co.co_kwonlyargcount + args = list(names[:nargs]) + + nargs += nkwargs + varargs = None + if co.co_flags & inspect.CO_VARARGS: + varargs = co.co_varnames[nargs] + nargs = nargs + 1 + varkw = None + if co.co_flags & inspect.CO_VARKEYWORDS: + varkw = co.co_varnames[nargs] + + return ArgSpec(args, varargs, varkw, func.__defaults__) + + +def load_module(module_id, path): + spec = util.spec_from_file_location(module_id, path) + module = util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def exception_as(): + return sys.exc_info()[1] + + +def exception_name(exc): + return exc.__class__.__name__ + + +def importlib_metadata_get(group): + ep = importlib_metadata.entry_points() + if hasattr(ep, "select"): + return ep.select(group=group) + else: + return ep.get(group, ()) diff --git a/venv/lib/python3.12/site-packages/mako/exceptions.py b/venv/lib/python3.12/site-packages/mako/exceptions.py new file mode 100644 index 0000000..6aeb2d2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/exceptions.py @@ -0,0 +1,417 @@ +# mako/exceptions.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""exception classes""" + +import sys +import traceback + +from mako import compat +from mako import util + + +class MakoException(Exception): + pass + + +class RuntimeException(MakoException): + pass + + +def _format_filepos(lineno, pos, filename): + if filename is None: + return " at line: %d char: %d" % (lineno, pos) + else: + return " in file '%s' at line: %d char: %d" % (filename, lineno, pos) + + +class CompileException(MakoException): + def __init__(self, message, source, lineno, pos, filename): + MakoException.__init__( + self, message + _format_filepos(lineno, pos, filename) + ) + self.lineno = lineno + self.pos = pos + self.filename = filename + self.source = source + + +class SyntaxException(MakoException): + def __init__(self, message, source, lineno, pos, filename): + MakoException.__init__( + self, message + _format_filepos(lineno, pos, filename) + ) + self.lineno = lineno + self.pos = pos + self.filename = filename + self.source = source + + +class UnsupportedError(MakoException): + + """raised when a retired feature is used.""" + + +class NameConflictError(MakoException): + + """raised when a reserved word is used inappropriately""" + + +class TemplateLookupException(MakoException): + pass + + +class TopLevelLookupException(TemplateLookupException): + pass + + +class RichTraceback: + + """Pull the current exception from the ``sys`` traceback and extracts + Mako-specific template information. + + See the usage examples in :ref:`handling_exceptions`. + + """ + + def __init__(self, error=None, traceback=None): + self.source, self.lineno = "", 0 + + if error is None or traceback is None: + t, value, tback = sys.exc_info() + + if error is None: + error = value or t + + if traceback is None: + traceback = tback + + self.error = error + self.records = self._init(traceback) + + if isinstance(self.error, (CompileException, SyntaxException)): + self.source = self.error.source + self.lineno = self.error.lineno + self._has_source = True + + self._init_message() + + @property + def errorname(self): + return compat.exception_name(self.error) + + def _init_message(self): + """Find a unicode representation of self.error""" + try: + self.message = str(self.error) + except UnicodeError: + try: + self.message = str(self.error) + except UnicodeEncodeError: + # Fallback to args as neither unicode nor + # str(Exception(u'\xe6')) work in Python < 2.6 + self.message = self.error.args[0] + if not isinstance(self.message, str): + self.message = str(self.message, "ascii", "replace") + + def _get_reformatted_records(self, records): + for rec in records: + if rec[6] is not None: + yield (rec[4], rec[5], rec[2], rec[6]) + else: + yield tuple(rec[0:4]) + + @property + def traceback(self): + """Return a list of 4-tuple traceback records (i.e. normal python + format) with template-corresponding lines remapped to the originating + template. + + """ + return list(self._get_reformatted_records(self.records)) + + @property + def reverse_records(self): + return reversed(self.records) + + @property + def reverse_traceback(self): + """Return the same data as traceback, except in reverse order.""" + + return list(self._get_reformatted_records(self.reverse_records)) + + def _init(self, trcback): + """format a traceback from sys.exc_info() into 7-item tuples, + containing the regular four traceback tuple items, plus the original + template filename, the line number adjusted relative to the template + source, and code line from that line number of the template.""" + + import mako.template + + mods = {} + rawrecords = traceback.extract_tb(trcback) + new_trcback = [] + for filename, lineno, function, line in rawrecords: + if not line: + line = "" + try: + (line_map, template_lines, template_filename) = mods[filename] + except KeyError: + try: + info = mako.template._get_module_info(filename) + module_source = info.code + template_source = info.source + template_filename = ( + info.template_filename or info.template_uri or filename + ) + except KeyError: + # A normal .py file (not a Template) + new_trcback.append( + ( + filename, + lineno, + function, + line, + None, + None, + None, + None, + ) + ) + continue + + template_ln = 1 + + mtm = mako.template.ModuleInfo + source_map = mtm.get_module_source_metadata( + module_source, full_line_map=True + ) + line_map = source_map["full_line_map"] + + template_lines = [ + line_ for line_ in template_source.split("\n") + ] + mods[filename] = (line_map, template_lines, template_filename) + + template_ln = line_map[lineno - 1] + + if template_ln <= len(template_lines): + template_line = template_lines[template_ln - 1] + else: + template_line = None + new_trcback.append( + ( + filename, + lineno, + function, + line, + template_filename, + template_ln, + template_line, + template_source, + ) + ) + if not self.source: + for l in range(len(new_trcback) - 1, 0, -1): + if new_trcback[l][5]: + self.source = new_trcback[l][7] + self.lineno = new_trcback[l][5] + break + else: + if new_trcback: + try: + # A normal .py file (not a Template) + with open(new_trcback[-1][0], "rb") as fp: + encoding = util.parse_encoding(fp) + if not encoding: + encoding = "utf-8" + fp.seek(0) + self.source = fp.read() + if encoding: + self.source = self.source.decode(encoding) + except IOError: + self.source = "" + self.lineno = new_trcback[-1][1] + return new_trcback + + +def text_error_template(lookup=None): + """Provides a template that renders a stack trace in a similar format to + the Python interpreter, substituting source template filenames, line + numbers and code for that of the originating source template, as + applicable. + + """ + import mako.template + + return mako.template.Template( + r""" +<%page args="error=None, traceback=None"/> +<%! + from mako.exceptions import RichTraceback +%>\ +<% + tback = RichTraceback(error=error, traceback=traceback) +%>\ +Traceback (most recent call last): +% for (filename, lineno, function, line) in tback.traceback: + File "${filename}", line ${lineno}, in ${function or '?'} + ${line | trim} +% endfor +${tback.errorname}: ${tback.message} +""" + ) + + +def _install_pygments(): + global syntax_highlight, pygments_html_formatter + from mako.ext.pygmentplugin import syntax_highlight # noqa + from mako.ext.pygmentplugin import pygments_html_formatter # noqa + + +def _install_fallback(): + global syntax_highlight, pygments_html_formatter + from mako.filters import html_escape + + pygments_html_formatter = None + + def syntax_highlight(filename="", language=None): + return html_escape + + +def _install_highlighting(): + try: + _install_pygments() + except ImportError: + _install_fallback() + + +_install_highlighting() + + +def html_error_template(): + """Provides a template that renders a stack trace in an HTML format, + providing an excerpt of code as well as substituting source template + filenames, line numbers and code for that of the originating source + template, as applicable. + + The template's default ``encoding_errors`` value is + ``'htmlentityreplace'``. The template has two options. With the + ``full`` option disabled, only a section of an HTML document is + returned. With the ``css`` option disabled, the default stylesheet + won't be included. + + """ + import mako.template + + return mako.template.Template( + r""" +<%! + from mako.exceptions import RichTraceback, syntax_highlight,\ + pygments_html_formatter +%> +<%page args="full=True, css=True, error=None, traceback=None"/> +% if full: + + + Mako Runtime Error +% endif +% if css: + +% endif +% if full: + + +% endif + +

    Error !

    +<% + tback = RichTraceback(error=error, traceback=traceback) + src = tback.source + line = tback.lineno + if src: + lines = src.split('\n') + else: + lines = None +%> +

    ${tback.errorname}: ${tback.message|h}

    + +% if lines: +
    +
    +% for index in range(max(0, line-4),min(len(lines), line+5)): + <% + if pygments_html_formatter: + pygments_html_formatter.linenostart = index + 1 + %> + % if index + 1 == line: + <% + if pygments_html_formatter: + old_cssclass = pygments_html_formatter.cssclass + pygments_html_formatter.cssclass = 'error ' + old_cssclass + %> + ${lines[index] | syntax_highlight(language='mako')} + <% + if pygments_html_formatter: + pygments_html_formatter.cssclass = old_cssclass + %> + % else: + ${lines[index] | syntax_highlight(language='mako')} + % endif +% endfor +
    +
    +% endif + +
    +% for (filename, lineno, function, line) in tback.reverse_traceback: +
    ${filename}, line ${lineno}:
    +
    + <% + if pygments_html_formatter: + pygments_html_formatter.linenostart = lineno + %> +
    ${line | syntax_highlight(filename)}
    +
    +% endfor +
    + +% if full: + + +% endif +""", + output_encoding=sys.getdefaultencoding(), + encoding_errors="htmlentityreplace", + ) diff --git a/venv/lib/python3.12/site-packages/mako/ext/__init__.py b/venv/lib/python3.12/site-packages/mako/ext/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9ee4224 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/autohandler.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/autohandler.cpython-312.pyc new file mode 100644 index 0000000..7ce26f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/autohandler.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/babelplugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/babelplugin.cpython-312.pyc new file mode 100644 index 0000000..77e9ba7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/babelplugin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/beaker_cache.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/beaker_cache.cpython-312.pyc new file mode 100644 index 0000000..2e3fdce Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/beaker_cache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/extract.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/extract.cpython-312.pyc new file mode 100644 index 0000000..5c1f880 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/extract.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/linguaplugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/linguaplugin.cpython-312.pyc new file mode 100644 index 0000000..9b04c83 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/linguaplugin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/preprocessors.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/preprocessors.cpython-312.pyc new file mode 100644 index 0000000..fb5fa61 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/preprocessors.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-312.pyc new file mode 100644 index 0000000..caa65b0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/__pycache__/turbogears.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/turbogears.cpython-312.pyc new file mode 100644 index 0000000..136e4b3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/ext/__pycache__/turbogears.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/ext/autohandler.py b/venv/lib/python3.12/site-packages/mako/ext/autohandler.py new file mode 100644 index 0000000..829ed09 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ext/autohandler.py @@ -0,0 +1,70 @@ +# ext/autohandler.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""adds autohandler functionality to Mako templates. + +requires that the TemplateLookup class is used with templates. + +usage:: + + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context)}"/> + + +or with custom autohandler filename:: + + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context, name='somefilename')}"/> + +""" + +import os +import posixpath +import re + + +def autohandler(template, context, name="autohandler"): + lookup = context.lookup + _template_uri = template.module._template_uri + if not lookup.filesystem_checks: + try: + return lookup._uri_cache[(autohandler, _template_uri, name)] + except KeyError: + pass + + tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name] + while len(tokens): + path = "/" + "/".join(tokens) + if path != _template_uri and _file_exists(lookup, path): + if not lookup.filesystem_checks: + return lookup._uri_cache.setdefault( + (autohandler, _template_uri, name), path + ) + else: + return path + if len(tokens) == 1: + break + tokens[-2:] = [name] + + if not lookup.filesystem_checks: + return lookup._uri_cache.setdefault( + (autohandler, _template_uri, name), None + ) + else: + return None + + +def _file_exists(lookup, path): + psub = re.sub(r"^/", "", path) + for d in lookup.directories: + if os.path.exists(d + "/" + psub): + return True + else: + return False diff --git a/venv/lib/python3.12/site-packages/mako/ext/babelplugin.py b/venv/lib/python3.12/site-packages/mako/ext/babelplugin.py new file mode 100644 index 0000000..541ea54 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ext/babelplugin.py @@ -0,0 +1,57 @@ +# ext/babelplugin.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""gettext message extraction via Babel: https://pypi.org/project/Babel/""" +from babel.messages.extract import extract_python + +from mako.ext.extract import MessageExtractor + + +class BabelMakoExtractor(MessageExtractor): + def __init__(self, keywords, comment_tags, options): + self.keywords = keywords + self.options = options + self.config = { + "comment-tags": " ".join(comment_tags), + "encoding": options.get( + "input_encoding", options.get("encoding", None) + ), + } + super().__init__() + + def __call__(self, fileobj): + return self.process_file(fileobj) + + def process_python(self, code, code_lineno, translator_strings): + comment_tags = self.config["comment-tags"] + for ( + lineno, + funcname, + messages, + python_translator_comments, + ) in extract_python(code, self.keywords, comment_tags, self.options): + yield ( + code_lineno + (lineno - 1), + funcname, + messages, + translator_strings + python_translator_comments, + ) + + +def extract(fileobj, keywords, comment_tags, options): + """Extract messages from Mako templates. + + :param fileobj: the file-like object the messages should be extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + :return: an iterator over ``(lineno, funcname, message, comments)`` tuples + :rtype: ``iterator`` + """ + extractor = BabelMakoExtractor(keywords, comment_tags, options) + yield from extractor(fileobj) diff --git a/venv/lib/python3.12/site-packages/mako/ext/beaker_cache.py b/venv/lib/python3.12/site-packages/mako/ext/beaker_cache.py new file mode 100644 index 0000000..b1392f3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ext/beaker_cache.py @@ -0,0 +1,82 @@ +# ext/beaker_cache.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provide a :class:`.CacheImpl` for the Beaker caching system.""" + +from mako import exceptions +from mako.cache import CacheImpl + +try: + from beaker import cache as beaker_cache +except: + has_beaker = False +else: + has_beaker = True + +_beaker_cache = None + + +class BeakerCacheImpl(CacheImpl): + + """A :class:`.CacheImpl` provided for the Beaker caching system. + + This plugin is used by default, based on the default + value of ``'beaker'`` for the ``cache_impl`` parameter of the + :class:`.Template` or :class:`.TemplateLookup` classes. + + """ + + def __init__(self, cache): + if not has_beaker: + raise exceptions.RuntimeException( + "Can't initialize Beaker plugin; Beaker is not installed." + ) + global _beaker_cache + if _beaker_cache is None: + if "manager" in cache.template.cache_args: + _beaker_cache = cache.template.cache_args["manager"] + else: + _beaker_cache = beaker_cache.CacheManager() + super().__init__(cache) + + def _get_cache(self, **kw): + expiretime = kw.pop("timeout", None) + if "dir" in kw: + kw["data_dir"] = kw.pop("dir") + elif self.cache.template.module_directory: + kw["data_dir"] = self.cache.template.module_directory + + if "manager" in kw: + kw.pop("manager") + + if kw.get("type") == "memcached": + kw["type"] = "ext:memcached" + + if "region" in kw: + region = kw.pop("region") + cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw) + else: + cache = _beaker_cache.get_cache(self.cache.id, **kw) + cache_args = {"starttime": self.cache.starttime} + if expiretime: + cache_args["expiretime"] = expiretime + return cache, cache_args + + def get_or_create(self, key, creation_function, **kw): + cache, kw = self._get_cache(**kw) + return cache.get(key, createfunc=creation_function, **kw) + + def put(self, key, value, **kw): + cache, kw = self._get_cache(**kw) + cache.put(key, value, **kw) + + def get(self, key, **kw): + cache, kw = self._get_cache(**kw) + return cache.get(key, **kw) + + def invalidate(self, key, **kw): + cache, kw = self._get_cache(**kw) + cache.remove_value(key, **kw) diff --git a/venv/lib/python3.12/site-packages/mako/ext/extract.py b/venv/lib/python3.12/site-packages/mako/ext/extract.py new file mode 100644 index 0000000..609c5c2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ext/extract.py @@ -0,0 +1,129 @@ +# ext/extract.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from io import BytesIO +from io import StringIO +import re + +from mako import lexer +from mako import parsetree + + +class MessageExtractor: + use_bytes = True + + def process_file(self, fileobj): + template_node = lexer.Lexer( + fileobj.read(), input_encoding=self.config["encoding"] + ).parse() + yield from self.extract_nodes(template_node.get_children()) + + def extract_nodes(self, nodes): + translator_comments = [] + in_translator_comments = False + input_encoding = self.config["encoding"] or "ascii" + comment_tags = list( + filter(None, re.split(r"\s+", self.config["comment-tags"])) + ) + + for node in nodes: + child_nodes = None + if ( + in_translator_comments + and isinstance(node, parsetree.Text) + and not node.content.strip() + ): + # Ignore whitespace within translator comments + continue + + if isinstance(node, parsetree.Comment): + value = node.text.strip() + if in_translator_comments: + translator_comments.extend( + self._split_comment(node.lineno, value) + ) + continue + for comment_tag in comment_tags: + if value.startswith(comment_tag): + in_translator_comments = True + translator_comments.extend( + self._split_comment(node.lineno, value) + ) + continue + + if isinstance(node, parsetree.DefTag): + code = node.function_decl.code + child_nodes = node.nodes + elif isinstance(node, parsetree.BlockTag): + code = node.body_decl.code + child_nodes = node.nodes + elif isinstance(node, parsetree.CallTag): + code = node.code.code + child_nodes = node.nodes + elif isinstance(node, parsetree.PageTag): + code = node.body_decl.code + elif isinstance(node, parsetree.CallNamespaceTag): + code = node.expression + child_nodes = node.nodes + elif isinstance(node, parsetree.ControlLine): + if node.isend: + in_translator_comments = False + continue + code = node.text + elif isinstance(node, parsetree.Code): + in_translator_comments = False + code = node.code.code + elif isinstance(node, parsetree.Expression): + code = node.code.code + else: + continue + + # Comments don't apply unless they immediately precede the message + if ( + translator_comments + and translator_comments[-1][0] < node.lineno - 1 + ): + translator_comments = [] + + translator_strings = [ + comment[1] for comment in translator_comments + ] + + if isinstance(code, str) and self.use_bytes: + code = code.encode(input_encoding, "backslashreplace") + + used_translator_comments = False + # We add extra newline to work around a pybabel bug + # (see python-babel/babel#274, parse_encoding dies if the first + # input string of the input is non-ascii) + # Also, because we added it, we have to subtract one from + # node.lineno + if self.use_bytes: + code = BytesIO(b"\n" + code) + else: + code = StringIO("\n" + code) + + for message in self.process_python( + code, node.lineno - 1, translator_strings + ): + yield message + used_translator_comments = True + + if used_translator_comments: + translator_comments = [] + in_translator_comments = False + + if child_nodes: + yield from self.extract_nodes(child_nodes) + + @staticmethod + def _split_comment(lineno, comment): + """Return the multiline comment at lineno split into a list of + comment line numbers and the accompanying comment line""" + return [ + (lineno + index, line) + for index, line in enumerate(comment.splitlines()) + ] diff --git a/venv/lib/python3.12/site-packages/mako/ext/linguaplugin.py b/venv/lib/python3.12/site-packages/mako/ext/linguaplugin.py new file mode 100644 index 0000000..118668d --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ext/linguaplugin.py @@ -0,0 +1,57 @@ +# ext/linguaplugin.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import contextlib +import io + +from lingua.extractors import Extractor +from lingua.extractors import get_extractor +from lingua.extractors import Message + +from mako.ext.extract import MessageExtractor + + +class LinguaMakoExtractor(Extractor, MessageExtractor): + """Mako templates""" + + use_bytes = False + extensions = [".mako"] + default_config = {"encoding": "utf-8", "comment-tags": ""} + + def __call__(self, filename, options, fileobj=None): + self.options = options + self.filename = filename + self.python_extractor = get_extractor("x.py") + if fileobj is None: + ctx = open(filename, "r") + else: + ctx = contextlib.nullcontext(fileobj) + with ctx as file_: + yield from self.process_file(file_) + + def process_python(self, code, code_lineno, translator_strings): + source = code.getvalue().strip() + if source.endswith(":"): + if source in ("try:", "else:") or source.startswith("except"): + source = "" # Ignore try/except and else + elif source.startswith("elif"): + source = source[2:] # Replace "elif" with "if" + source += "pass" + code = io.StringIO(source) + for msg in self.python_extractor( + self.filename, self.options, code, code_lineno - 1 + ): + if translator_strings: + msg = Message( + msg.msgctxt, + msg.msgid, + msg.msgid_plural, + msg.flags, + " ".join(translator_strings + [msg.comment]), + msg.tcomment, + msg.location, + ) + yield msg diff --git a/venv/lib/python3.12/site-packages/mako/ext/preprocessors.py b/venv/lib/python3.12/site-packages/mako/ext/preprocessors.py new file mode 100644 index 0000000..21f2db5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ext/preprocessors.py @@ -0,0 +1,20 @@ +# ext/preprocessors.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""preprocessing functions, used with the 'preprocessor' +argument on Template, TemplateLookup""" + +import re + + +def convert_comments(text): + """preprocess old style comments. + + example: + + from mako.ext.preprocessors import convert_comments + t = Template(..., preprocessor=convert_comments)""" + return re.sub(r"(?<=\n)\s*#[^#]", "##", text) diff --git a/venv/lib/python3.12/site-packages/mako/ext/pygmentplugin.py b/venv/lib/python3.12/site-packages/mako/ext/pygmentplugin.py new file mode 100644 index 0000000..74a8fb9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ext/pygmentplugin.py @@ -0,0 +1,150 @@ +# ext/pygmentplugin.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from pygments import highlight +from pygments.formatters.html import HtmlFormatter +from pygments.lexer import bygroups +from pygments.lexer import DelegatingLexer +from pygments.lexer import include +from pygments.lexer import RegexLexer +from pygments.lexer import using +from pygments.lexers.agile import Python3Lexer +from pygments.lexers.agile import PythonLexer +from pygments.lexers.web import CssLexer +from pygments.lexers.web import HtmlLexer +from pygments.lexers.web import JavascriptLexer +from pygments.lexers.web import XmlLexer +from pygments.token import Comment +from pygments.token import Keyword +from pygments.token import Name +from pygments.token import Operator +from pygments.token import Other +from pygments.token import String +from pygments.token import Text + + +class MakoLexer(RegexLexer): + name = "Mako" + aliases = ["mako"] + filenames = ["*.mao"] + + tokens = { + "root": [ + ( + r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)", + bygroups(Text, Comment.Preproc, Keyword, Other), + ), + ( + r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, using(PythonLexer), Other), + ), + ( + r"(\s*)(##[^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, Other), + ), + (r"""(?s)<%doc>.*?""", Comment.Preproc), + ( + r"(<%)([\w\.\:]+)", + bygroups(Comment.Preproc, Name.Builtin), + "tag", + ), + ( + r"()", + bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc), + ), + (r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"), + ( + r"(?s)(<%(?:!?))(.*?)(%>)", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"(\$\{)(.*?)(\})", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"""(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line + (?=\#\*) | # multiline comment + (?=", Comment.Preproc, "#pop"), + (r"\s+", Text), + ], + "attr": [ + ('".*?"', String, "#pop"), + ("'.*?'", String, "#pop"), + (r"[^\s>]+", String, "#pop"), + ], + } + + +class MakoHtmlLexer(DelegatingLexer): + name = "HTML+Mako" + aliases = ["html+mako"] + + def __init__(self, **options): + super().__init__(HtmlLexer, MakoLexer, **options) + + +class MakoXmlLexer(DelegatingLexer): + name = "XML+Mako" + aliases = ["xml+mako"] + + def __init__(self, **options): + super().__init__(XmlLexer, MakoLexer, **options) + + +class MakoJavascriptLexer(DelegatingLexer): + name = "JavaScript+Mako" + aliases = ["js+mako", "javascript+mako"] + + def __init__(self, **options): + super().__init__(JavascriptLexer, MakoLexer, **options) + + +class MakoCssLexer(DelegatingLexer): + name = "CSS+Mako" + aliases = ["css+mako"] + + def __init__(self, **options): + super().__init__(CssLexer, MakoLexer, **options) + + +pygments_html_formatter = HtmlFormatter( + cssclass="syntax-highlighted", linenos=True +) + + +def syntax_highlight(filename="", language=None): + mako_lexer = MakoLexer() + python_lexer = Python3Lexer() + if filename.startswith("memory:") or language == "mako": + return lambda string: highlight( + string, mako_lexer, pygments_html_formatter + ) + return lambda string: highlight( + string, python_lexer, pygments_html_formatter + ) diff --git a/venv/lib/python3.12/site-packages/mako/ext/turbogears.py b/venv/lib/python3.12/site-packages/mako/ext/turbogears.py new file mode 100644 index 0000000..25ad35e --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/ext/turbogears.py @@ -0,0 +1,61 @@ +# ext/turbogears.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from mako import compat +from mako.lookup import TemplateLookup +from mako.template import Template + + +class TGPlugin: + + """TurboGears compatible Template Plugin.""" + + def __init__(self, extra_vars_func=None, options=None, extension="mak"): + self.extra_vars_func = extra_vars_func + self.extension = extension + if not options: + options = {} + + # Pull the options out and initialize the lookup + lookup_options = {} + for k, v in options.items(): + if k.startswith("mako."): + lookup_options[k[5:]] = v + elif k in ["directories", "filesystem_checks", "module_directory"]: + lookup_options[k] = v + self.lookup = TemplateLookup(**lookup_options) + + self.tmpl_options = {} + # transfer lookup args to template args, based on those available + # in getargspec + for kw in compat.inspect_getargspec(Template.__init__)[0]: + if kw in lookup_options: + self.tmpl_options[kw] = lookup_options[kw] + + def load_template(self, templatename, template_string=None): + """Loads a template from a file or a string""" + if template_string is not None: + return Template(template_string, **self.tmpl_options) + # Translate TG dot notation to normal / template path + if "/" not in templatename: + templatename = ( + "/" + templatename.replace(".", "/") + "." + self.extension + ) + + # Lookup template + return self.lookup.get_template(templatename) + + def render( + self, info, format="html", fragment=False, template=None # noqa + ): + if isinstance(template, str): + template = self.load_template(template) + + # Load extra vars func if provided + if self.extra_vars_func: + info.update(self.extra_vars_func()) + + return template.render(**info) diff --git a/venv/lib/python3.12/site-packages/mako/filters.py b/venv/lib/python3.12/site-packages/mako/filters.py new file mode 100644 index 0000000..d5338bd --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/filters.py @@ -0,0 +1,163 @@ +# mako/filters.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +import codecs +from html.entities import codepoint2name +from html.entities import name2codepoint +import re +from urllib.parse import quote_plus + +import markupsafe + +html_escape = markupsafe.escape + +xml_escapes = { + "&": "&", + ">": ">", + "<": "<", + '"': """, # also " in html-only + "'": "'", # also ' in html-only +} + + +def xml_escape(string): + return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string) + + +def url_escape(string): + # convert into a list of octets + string = string.encode("utf8") + return quote_plus(string) + + +def trim(string): + return string.strip() + + +class Decode: + def __getattr__(self, key): + def decode(x): + if isinstance(x, str): + return x + elif not isinstance(x, bytes): + return decode(str(x)) + else: + return str(x, encoding=key) + + return decode + + +decode = Decode() + + +class XMLEntityEscaper: + def __init__(self, codepoint2name, name2codepoint): + self.codepoint2entity = { + c: str("&%s;" % n) for c, n in codepoint2name.items() + } + self.name2codepoint = name2codepoint + + def escape_entities(self, text): + """Replace characters with their character entity references. + + Only characters corresponding to a named entity are replaced. + """ + return str(text).translate(self.codepoint2entity) + + def __escape(self, m): + codepoint = ord(m.group()) + try: + return self.codepoint2entity[codepoint] + except (KeyError, IndexError): + return "&#x%X;" % codepoint + + __escapable = re.compile(r'["&<>]|[^\x00-\x7f]') + + def escape(self, text): + """Replace characters with their character references. + + Replace characters by their named entity references. + Non-ASCII characters, if they do not have a named entity reference, + are replaced by numerical character references. + + The return value is guaranteed to be ASCII. + """ + return self.__escapable.sub(self.__escape, str(text)).encode("ascii") + + # XXX: This regexp will not match all valid XML entity names__. + # (It punts on details involving involving CombiningChars and Extenders.) + # + # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef + __characterrefs = re.compile( + r"""& (?: + \#(\d+) + | \#x([\da-f]+) + | ( (?!\d) [:\w] [-.:\w]+ ) + ) ;""", + re.X | re.UNICODE, + ) + + def __unescape(self, m): + dval, hval, name = m.groups() + if dval: + codepoint = int(dval) + elif hval: + codepoint = int(hval, 16) + else: + codepoint = self.name2codepoint.get(name, 0xFFFD) + # U+FFFD = "REPLACEMENT CHARACTER" + if codepoint < 128: + return chr(codepoint) + return chr(codepoint) + + def unescape(self, text): + """Unescape character references. + + All character references (both entity references and numerical + character references) are unescaped. + """ + return self.__characterrefs.sub(self.__unescape, text) + + +_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint) + +html_entities_escape = _html_entities_escaper.escape_entities +html_entities_unescape = _html_entities_escaper.unescape + + +def htmlentityreplace_errors(ex): + """An encoding error handler. + + This python codecs error handler replaces unencodable + characters with HTML entities, or, if no HTML entity exists for + the character, XML character references:: + + >>> 'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') + 'The cost was €12.' + """ + if isinstance(ex, UnicodeEncodeError): + # Handle encoding errors + bad_text = ex.object[ex.start : ex.end] + text = _html_entities_escaper.escape(bad_text) + return (str(text), ex.end) + raise ex + + +codecs.register_error("htmlentityreplace", htmlentityreplace_errors) + + +DEFAULT_ESCAPES = { + "x": "filters.xml_escape", + "h": "filters.html_escape", + "u": "filters.url_escape", + "trim": "filters.trim", + "entity": "filters.html_entities_escape", + "unicode": "str", + "decode": "decode", + "str": "str", + "n": "n", +} diff --git a/venv/lib/python3.12/site-packages/mako/lexer.py b/venv/lib/python3.12/site-packages/mako/lexer.py new file mode 100644 index 0000000..37afb4d --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/lexer.py @@ -0,0 +1,481 @@ +# mako/lexer.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides the Lexer class for parsing template strings into parse trees.""" + +import codecs +import re + +from mako import exceptions +from mako import parsetree +from mako.pygen import adjust_whitespace + +_regexp_cache = {} + + +class Lexer: + def __init__( + self, text, filename=None, input_encoding=None, preprocessor=None + ): + self.text = text + self.filename = filename + self.template = parsetree.TemplateNode(self.filename) + self.matched_lineno = 1 + self.matched_charpos = 0 + self.lineno = 1 + self.match_position = 0 + self.tag = [] + self.control_line = [] + self.ternary_stack = [] + self.encoding = input_encoding + + if preprocessor is None: + self.preprocessor = [] + elif not hasattr(preprocessor, "__iter__"): + self.preprocessor = [preprocessor] + else: + self.preprocessor = preprocessor + + @property + def exception_kwargs(self): + return { + "source": self.text, + "lineno": self.matched_lineno, + "pos": self.matched_charpos, + "filename": self.filename, + } + + def match(self, regexp, flags=None): + """compile the given regexp, cache the reg, and call match_reg().""" + + try: + reg = _regexp_cache[(regexp, flags)] + except KeyError: + reg = re.compile(regexp, flags) if flags else re.compile(regexp) + _regexp_cache[(regexp, flags)] = reg + + return self.match_reg(reg) + + def match_reg(self, reg): + """match the given regular expression object to the current text + position. + + if a match occurs, update the current text and line position. + + """ + + mp = self.match_position + + match = reg.match(self.text, self.match_position) + if match: + (start, end) = match.span() + self.match_position = end + 1 if end == start else end + self.matched_lineno = self.lineno + cp = mp - 1 + if cp >= 0 and cp < self.textlength: + cp = self.text[: cp + 1].rfind("\n") + self.matched_charpos = mp - cp + self.lineno += self.text[mp : self.match_position].count("\n") + return match + + def parse_until_text(self, watch_nesting, *text): + startpos = self.match_position + text_re = r"|".join(text) + brace_level = 0 + paren_level = 0 + bracket_level = 0 + while True: + match = self.match(r"#.*\n") + if match: + continue + match = self.match( + r"(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1", re.S + ) + if match: + continue + match = self.match(r"(%s)" % text_re) + if match and not ( + watch_nesting + and (brace_level > 0 or paren_level > 0 or bracket_level > 0) + ): + return ( + self.text[ + startpos : self.match_position - len(match.group(1)) + ], + match.group(1), + ) + elif not match: + match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) + if match: + brace_level += match.group(1).count("{") + brace_level -= match.group(1).count("}") + paren_level += match.group(1).count("(") + paren_level -= match.group(1).count(")") + bracket_level += match.group(1).count("[") + bracket_level -= match.group(1).count("]") + continue + raise exceptions.SyntaxException( + "Expected: %s" % ",".join(text), **self.exception_kwargs + ) + + def append_node(self, nodecls, *args, **kwargs): + kwargs.setdefault("source", self.text) + kwargs.setdefault("lineno", self.matched_lineno) + kwargs.setdefault("pos", self.matched_charpos) + kwargs["filename"] = self.filename + node = nodecls(*args, **kwargs) + if len(self.tag): + self.tag[-1].nodes.append(node) + else: + self.template.nodes.append(node) + # build a set of child nodes for the control line + # (used for loop variable detection) + # also build a set of child nodes on ternary control lines + # (used for determining if a pass needs to be auto-inserted + if self.control_line: + control_frame = self.control_line[-1] + control_frame.nodes.append(node) + if ( + not ( + isinstance(node, parsetree.ControlLine) + and control_frame.is_ternary(node.keyword) + ) + and self.ternary_stack + and self.ternary_stack[-1] + ): + self.ternary_stack[-1][-1].nodes.append(node) + if isinstance(node, parsetree.Tag): + if len(self.tag): + node.parent = self.tag[-1] + self.tag.append(node) + elif isinstance(node, parsetree.ControlLine): + if node.isend: + self.control_line.pop() + self.ternary_stack.pop() + elif node.is_primary: + self.control_line.append(node) + self.ternary_stack.append([]) + elif self.control_line and self.control_line[-1].is_ternary( + node.keyword + ): + self.ternary_stack[-1].append(node) + elif self.control_line and not self.control_line[-1].is_ternary( + node.keyword + ): + raise exceptions.SyntaxException( + "Keyword '%s' not a legal ternary for keyword '%s'" + % (node.keyword, self.control_line[-1].keyword), + **self.exception_kwargs, + ) + + _coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n") + + def decode_raw_stream(self, text, decode_raw, known_encoding, filename): + """given string/unicode or bytes/string, determine encoding + from magic encoding comment, return body as unicode + or raw if decode_raw=False + + """ + if isinstance(text, str): + m = self._coding_re.match(text) + encoding = m and m.group(1) or known_encoding or "utf-8" + return encoding, text + + if text.startswith(codecs.BOM_UTF8): + text = text[len(codecs.BOM_UTF8) :] + parsed_encoding = "utf-8" + m = self._coding_re.match(text.decode("utf-8", "ignore")) + if m is not None and m.group(1) != "utf-8": + raise exceptions.CompileException( + "Found utf-8 BOM in file, with conflicting " + "magic encoding comment of '%s'" % m.group(1), + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) + else: + m = self._coding_re.match(text.decode("utf-8", "ignore")) + parsed_encoding = m.group(1) if m else known_encoding or "utf-8" + if decode_raw: + try: + text = text.decode(parsed_encoding) + except UnicodeDecodeError: + raise exceptions.CompileException( + "Unicode decode operation of encoding '%s' failed" + % parsed_encoding, + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) + + return parsed_encoding, text + + def parse(self): + self.encoding, self.text = self.decode_raw_stream( + self.text, True, self.encoding, self.filename + ) + + for preproc in self.preprocessor: + self.text = preproc(self.text) + + # push the match marker past the + # encoding comment. + self.match_reg(self._coding_re) + + self.textlength = len(self.text) + + while True: + if self.match_position > self.textlength: + break + + if self.match_end(): + break + if self.match_expression(): + continue + if self.match_control_line(): + continue + if self.match_comment(): + continue + if self.match_tag_start(): + continue + if self.match_tag_end(): + continue + if self.match_python_block(): + continue + if self.match_percent(): + continue + if self.match_text(): + continue + + if self.match_position > self.textlength: + break + # TODO: no coverage here + raise exceptions.MakoException("assertion failed") + + if len(self.tag): + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs, + ) + if len(self.control_line): + raise exceptions.SyntaxException( + "Unterminated control keyword: '%s'" + % self.control_line[-1].keyword, + self.text, + self.control_line[-1].lineno, + self.control_line[-1].pos, + self.filename, + ) + return self.template + + def match_tag_start(self): + reg = r""" + \<% # opening tag + + ([\w\.\:]+) # keyword + + ((?:\s+\w+|\s*=\s*|"[^"]*?"|'[^']*?'|\s*,\s*)*) # attrname, = \ + # sign, string expression + # comma is for backwards compat + # identified in #366 + + \s* # more whitespace + + (/)?> # closing + + """ + + match = self.match( + reg, + re.I | re.S | re.X, + ) + + if not match: + return False + + keyword, attr, isend = match.groups() + self.keyword = keyword + attributes = {} + if attr: + for att in re.findall( + r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr + ): + key, val1, val2 = att + text = val1 or val2 + text = text.replace("\r\n", "\n") + attributes[key] = text + self.append_node(parsetree.Tag, keyword, attributes) + if isend: + self.tag.pop() + elif keyword == "text": + match = self.match(r"(.*?)(?=\)", re.S) + if not match: + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs, + ) + self.append_node(parsetree.Text, match.group(1)) + return self.match_tag_end() + return True + + def match_tag_end(self): + match = self.match(r"\") + if match: + if not len(self.tag): + raise exceptions.SyntaxException( + "Closing tag without opening tag: " + % match.group(1), + **self.exception_kwargs, + ) + elif self.tag[-1].keyword != match.group(1): + raise exceptions.SyntaxException( + "Closing tag does not match tag: <%%%s>" + % (match.group(1), self.tag[-1].keyword), + **self.exception_kwargs, + ) + self.tag.pop() + return True + else: + return False + + def match_end(self): + match = self.match(r"\Z", re.S) + if not match: + return False + + string = match.group() + if string: + return string + else: + return True + + def match_percent(self): + match = self.match(r"(?<=^)(\s*)%%(%*)", re.M) + if match: + self.append_node( + parsetree.Text, match.group(1) + "%" + match.group(2) + ) + return True + else: + return False + + def match_text(self): + match = self.match( + r""" + (.*?) # anything, followed by: + ( + (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based + # comment, preceded by a + # consumed newline and whitespace + | + (?=\${) # an expression + | + (?=") + # the trailing newline helps + # compiler.parse() not complain about indentation + text = adjust_whitespace(text) + "\n" + self.append_node( + parsetree.Code, + text, + match.group(1) == "!", + lineno=line, + pos=pos, + ) + return True + else: + return False + + def match_expression(self): + match = self.match(r"\${") + if not match: + return False + + line, pos = self.matched_lineno, self.matched_charpos + text, end = self.parse_until_text(True, r"\|", r"}") + if end == "|": + escapes, end = self.parse_until_text(True, r"}") + else: + escapes = "" + text = text.replace("\r\n", "\n") + self.append_node( + parsetree.Expression, + text, + escapes.strip(), + lineno=line, + pos=pos, + ) + return True + + def match_control_line(self): + match = self.match( + r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\\r?\n)|[^\r\n])*)" + r"(?:\r?\n|\Z)", + re.M, + ) + if not match: + return False + + operator = match.group(1) + text = match.group(2) + if operator == "%": + m2 = re.match(r"(end)?(\w+)\s*(.*)", text) + if not m2: + raise exceptions.SyntaxException( + "Invalid control line: '%s'" % text, + **self.exception_kwargs, + ) + isend, keyword = m2.group(1, 2) + isend = isend is not None + + if isend: + if not len(self.control_line): + raise exceptions.SyntaxException( + "No starting keyword '%s' for '%s'" % (keyword, text), + **self.exception_kwargs, + ) + elif self.control_line[-1].keyword != keyword: + raise exceptions.SyntaxException( + "Keyword '%s' doesn't match keyword '%s'" + % (text, self.control_line[-1].keyword), + **self.exception_kwargs, + ) + self.append_node(parsetree.ControlLine, keyword, isend, text) + else: + self.append_node(parsetree.Comment, text) + return True + + def match_comment(self): + """matches the multiline version of a comment""" + match = self.match(r"<%doc>(.*?)", re.S) + if match: + self.append_node(parsetree.Comment, match.group(1)) + return True + else: + return False diff --git a/venv/lib/python3.12/site-packages/mako/lookup.py b/venv/lib/python3.12/site-packages/mako/lookup.py new file mode 100644 index 0000000..1a431c8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/lookup.py @@ -0,0 +1,361 @@ +# mako/lookup.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import os +import posixpath +import re +import stat +import threading + +from mako import exceptions +from mako import util +from mako.template import Template + + +class TemplateCollection: + + """Represent a collection of :class:`.Template` objects, + identifiable via URI. + + A :class:`.TemplateCollection` is linked to the usage of + all template tags that address other templates, such + as ``<%include>``, ``<%namespace>``, and ``<%inherit>``. + The ``file`` attribute of each of those tags refers + to a string URI that is passed to that :class:`.Template` + object's :class:`.TemplateCollection` for resolution. + + :class:`.TemplateCollection` is an abstract class, + with the usual default implementation being :class:`.TemplateLookup`. + + """ + + def has_template(self, uri): + """Return ``True`` if this :class:`.TemplateLookup` is + capable of returning a :class:`.Template` object for the + given ``uri``. + + :param uri: String URI of the template to be resolved. + + """ + try: + self.get_template(uri) + return True + except exceptions.TemplateLookupException: + return False + + def get_template(self, uri, relativeto=None): + """Return a :class:`.Template` object corresponding to the given + ``uri``. + + The default implementation raises + :class:`.NotImplementedError`. Implementations should + raise :class:`.TemplateLookupException` if the given ``uri`` + cannot be resolved. + + :param uri: String URI of the template to be resolved. + :param relativeto: if present, the given ``uri`` is assumed to + be relative to this URI. + + """ + raise NotImplementedError() + + def filename_to_uri(self, uri, filename): + """Convert the given ``filename`` to a URI relative to + this :class:`.TemplateCollection`.""" + + return uri + + def adjust_uri(self, uri, filename): + """Adjust the given ``uri`` based on the calling ``filename``. + + When this method is called from the runtime, the + ``filename`` parameter is taken directly to the ``filename`` + attribute of the calling template. Therefore a custom + :class:`.TemplateCollection` subclass can place any string + identifier desired in the ``filename`` parameter of the + :class:`.Template` objects it constructs and have them come back + here. + + """ + return uri + + +class TemplateLookup(TemplateCollection): + + """Represent a collection of templates that locates template source files + from the local filesystem. + + The primary argument is the ``directories`` argument, the list of + directories to search: + + .. sourcecode:: python + + lookup = TemplateLookup(["/path/to/templates"]) + some_template = lookup.get_template("/index.html") + + The :class:`.TemplateLookup` can also be given :class:`.Template` objects + programatically using :meth:`.put_string` or :meth:`.put_template`: + + .. sourcecode:: python + + lookup = TemplateLookup() + lookup.put_string("base.html", ''' + ${self.next()} + ''') + lookup.put_string("hello.html", ''' + <%include file='base.html'/> + + Hello, world ! + ''') + + + :param directories: A list of directory names which will be + searched for a particular template URI. The URI is appended + to each directory and the filesystem checked. + + :param collection_size: Approximate size of the collection used + to store templates. If left at its default of ``-1``, the size + is unbounded, and a plain Python dictionary is used to + relate URI strings to :class:`.Template` instances. + Otherwise, a least-recently-used cache object is used which + will maintain the size of the collection approximately to + the number given. + + :param filesystem_checks: When at its default value of ``True``, + each call to :meth:`.TemplateLookup.get_template()` will + compare the filesystem last modified time to the time in + which an existing :class:`.Template` object was created. + This allows the :class:`.TemplateLookup` to regenerate a + new :class:`.Template` whenever the original source has + been updated. Set this to ``False`` for a very minor + performance increase. + + :param modulename_callable: A callable which, when present, + is passed the path of the source file as well as the + requested URI, and then returns the full path of the + generated Python module file. This is used to inject + alternate schemes for Python module location. If left at + its default of ``None``, the built in system of generation + based on ``module_directory`` plus ``uri`` is used. + + All other keyword parameters available for + :class:`.Template` are mirrored here. When new + :class:`.Template` objects are created, the keywords + established with this :class:`.TemplateLookup` are passed on + to each new :class:`.Template`. + + """ + + def __init__( + self, + directories=None, + module_directory=None, + filesystem_checks=True, + collection_size=-1, + format_exceptions=False, + error_handler=None, + output_encoding=None, + encoding_errors="strict", + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + modulename_callable=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + input_encoding=None, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): + self.directories = [ + posixpath.normpath(d) for d in util.to_list(directories, ()) + ] + self.module_directory = module_directory + self.modulename_callable = modulename_callable + self.filesystem_checks = filesystem_checks + self.collection_size = collection_size + + if cache_args is None: + cache_args = {} + # transfer deprecated cache_* args + if cache_dir: + cache_args.setdefault("dir", cache_dir) + if cache_url: + cache_args.setdefault("url", cache_url) + if cache_type: + cache_args.setdefault("type", cache_type) + + self.template_args = { + "format_exceptions": format_exceptions, + "error_handler": error_handler, + "include_error_handler": include_error_handler, + "output_encoding": output_encoding, + "cache_impl": cache_impl, + "encoding_errors": encoding_errors, + "input_encoding": input_encoding, + "module_directory": module_directory, + "module_writer": module_writer, + "cache_args": cache_args, + "cache_enabled": cache_enabled, + "default_filters": default_filters, + "buffer_filters": buffer_filters, + "strict_undefined": strict_undefined, + "imports": imports, + "future_imports": future_imports, + "enable_loop": enable_loop, + "preprocessor": preprocessor, + "lexer_cls": lexer_cls, + } + + if collection_size == -1: + self._collection = {} + self._uri_cache = {} + else: + self._collection = util.LRUCache(collection_size) + self._uri_cache = util.LRUCache(collection_size) + self._mutex = threading.Lock() + + def get_template(self, uri): + """Return a :class:`.Template` object corresponding to the given + ``uri``. + + .. note:: The ``relativeto`` argument is not supported here at + the moment. + + """ + + try: + if self.filesystem_checks: + return self._check(uri, self._collection[uri]) + else: + return self._collection[uri] + except KeyError as e: + u = re.sub(r"^\/+", "", uri) + for dir_ in self.directories: + # make sure the path seperators are posix - os.altsep is empty + # on POSIX and cannot be used. + dir_ = dir_.replace(os.path.sep, posixpath.sep) + srcfile = posixpath.normpath(posixpath.join(dir_, u)) + if os.path.isfile(srcfile): + return self._load(srcfile, uri) + else: + raise exceptions.TopLevelLookupException( + "Can't locate template for uri %r" % uri + ) from e + + def adjust_uri(self, uri, relativeto): + """Adjust the given ``uri`` based on the given relative URI.""" + + key = (uri, relativeto) + if key in self._uri_cache: + return self._uri_cache[key] + + if uri[0] == "/": + v = self._uri_cache[key] = uri + elif relativeto is not None: + v = self._uri_cache[key] = posixpath.join( + posixpath.dirname(relativeto), uri + ) + else: + v = self._uri_cache[key] = "/" + uri + return v + + def filename_to_uri(self, filename): + """Convert the given ``filename`` to a URI relative to + this :class:`.TemplateCollection`.""" + + try: + return self._uri_cache[filename] + except KeyError: + value = self._relativeize(filename) + self._uri_cache[filename] = value + return value + + def _relativeize(self, filename): + """Return the portion of a filename that is 'relative' + to the directories in this lookup. + + """ + + filename = posixpath.normpath(filename) + for dir_ in self.directories: + if filename[0 : len(dir_)] == dir_: + return filename[len(dir_) :] + else: + return None + + def _load(self, filename, uri): + self._mutex.acquire() + try: + try: + # try returning from collection one + # more time in case concurrent thread already loaded + return self._collection[uri] + except KeyError: + pass + try: + if self.modulename_callable is not None: + module_filename = self.modulename_callable(filename, uri) + else: + module_filename = None + self._collection[uri] = template = Template( + uri=uri, + filename=posixpath.normpath(filename), + lookup=self, + module_filename=module_filename, + **self.template_args, + ) + return template + except: + # if compilation fails etc, ensure + # template is removed from collection, + # re-raise + self._collection.pop(uri, None) + raise + finally: + self._mutex.release() + + def _check(self, uri, template): + if template.filename is None: + return template + + try: + template_stat = os.stat(template.filename) + if template.module._modified_time >= template_stat[stat.ST_MTIME]: + return template + self._collection.pop(uri, None) + return self._load(template.filename, uri) + except OSError as e: + self._collection.pop(uri, None) + raise exceptions.TemplateLookupException( + "Can't locate template for uri %r" % uri + ) from e + + def put_string(self, uri, text): + """Place a new :class:`.Template` object into this + :class:`.TemplateLookup`, based on the given string of + ``text``. + + """ + self._collection[uri] = Template( + text, lookup=self, uri=uri, **self.template_args + ) + + def put_template(self, uri, template): + """Place a new :class:`.Template` object into this + :class:`.TemplateLookup`, based on the given + :class:`.Template` object. + + """ + self._collection[uri] = template diff --git a/venv/lib/python3.12/site-packages/mako/parsetree.py b/venv/lib/python3.12/site-packages/mako/parsetree.py new file mode 100644 index 0000000..f7bdda8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/parsetree.py @@ -0,0 +1,656 @@ +# mako/parsetree.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""defines the parse tree components for Mako templates.""" + +import re + +from mako import ast +from mako import exceptions +from mako import filters +from mako import util + + +class Node: + + """base class for a Node in the parse tree.""" + + def __init__(self, source, lineno, pos, filename): + self.source = source + self.lineno = lineno + self.pos = pos + self.filename = filename + + @property + def exception_kwargs(self): + return { + "source": self.source, + "lineno": self.lineno, + "pos": self.pos, + "filename": self.filename, + } + + def get_children(self): + return [] + + def accept_visitor(self, visitor): + def traverse(node): + for n in node.get_children(): + n.accept_visitor(visitor) + + method = getattr(visitor, "visit" + self.__class__.__name__, traverse) + method(self) + + +class TemplateNode(Node): + + """a 'container' node that stores the overall collection of nodes.""" + + def __init__(self, filename): + super().__init__("", 0, 0, filename) + self.nodes = [] + self.page_attributes = {} + + def get_children(self): + return self.nodes + + def __repr__(self): + return "TemplateNode(%s, %r)" % ( + util.sorted_dict_repr(self.page_attributes), + self.nodes, + ) + + +class ControlLine(Node): + + """defines a control line, a line-oriented python line or end tag. + + e.g.:: + + % if foo: + (markup) + % endif + + """ + + has_loop_context = False + + def __init__(self, keyword, isend, text, **kwargs): + super().__init__(**kwargs) + self.text = text + self.keyword = keyword + self.isend = isend + self.is_primary = keyword in ["for", "if", "while", "try", "with"] + self.nodes = [] + if self.isend: + self._declared_identifiers = [] + self._undeclared_identifiers = [] + else: + code = ast.PythonFragment(text, **self.exception_kwargs) + self._declared_identifiers = code.declared_identifiers + self._undeclared_identifiers = code.undeclared_identifiers + + def get_children(self): + return self.nodes + + def declared_identifiers(self): + return self._declared_identifiers + + def undeclared_identifiers(self): + return self._undeclared_identifiers + + def is_ternary(self, keyword): + """return true if the given keyword is a ternary keyword + for this ControlLine""" + + cases = { + "if": {"else", "elif"}, + "try": {"except", "finally"}, + "for": {"else"}, + } + + return keyword in cases.get(self.keyword, set()) + + def __repr__(self): + return "ControlLine(%r, %r, %r, %r)" % ( + self.keyword, + self.text, + self.isend, + (self.lineno, self.pos), + ) + + +class Text(Node): + """defines plain text in the template.""" + + def __init__(self, content, **kwargs): + super().__init__(**kwargs) + self.content = content + + def __repr__(self): + return "Text(%r, %r)" % (self.content, (self.lineno, self.pos)) + + +class Code(Node): + """defines a Python code block, either inline or module level. + + e.g.:: + + inline: + <% + x = 12 + %> + + module level: + <%! + import logger + %> + + """ + + def __init__(self, text, ismodule, **kwargs): + super().__init__(**kwargs) + self.text = text + self.ismodule = ismodule + self.code = ast.PythonCode(text, **self.exception_kwargs) + + def declared_identifiers(self): + return self.code.declared_identifiers + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers + + def __repr__(self): + return "Code(%r, %r, %r)" % ( + self.text, + self.ismodule, + (self.lineno, self.pos), + ) + + +class Comment(Node): + """defines a comment line. + + # this is a comment + + """ + + def __init__(self, text, **kwargs): + super().__init__(**kwargs) + self.text = text + + def __repr__(self): + return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos)) + + +class Expression(Node): + """defines an inline expression. + + ${x+y} + + """ + + def __init__(self, text, escapes, **kwargs): + super().__init__(**kwargs) + self.text = text + self.escapes = escapes + self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs) + self.code = ast.PythonCode(text, **self.exception_kwargs) + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + # TODO: make the "filter" shortcut list configurable at parse/gen time + return self.code.undeclared_identifiers.union( + self.escapes_code.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES + ) + ).difference(self.code.declared_identifiers) + + def __repr__(self): + return "Expression(%r, %r, %r)" % ( + self.text, + self.escapes_code.args, + (self.lineno, self.pos), + ) + + +class _TagMeta(type): + """metaclass to allow Tag to produce a subclass according to + its keyword""" + + _classmap = {} + + def __init__(cls, clsname, bases, dict_): + if getattr(cls, "__keyword__", None) is not None: + cls._classmap[cls.__keyword__] = cls + super().__init__(clsname, bases, dict_) + + def __call__(cls, keyword, attributes, **kwargs): + if ":" in keyword: + ns, defname = keyword.split(":") + return type.__call__( + CallNamespaceTag, ns, defname, attributes, **kwargs + ) + + try: + cls = _TagMeta._classmap[keyword] + except KeyError: + raise exceptions.CompileException( + "No such tag: '%s'" % keyword, + source=kwargs["source"], + lineno=kwargs["lineno"], + pos=kwargs["pos"], + filename=kwargs["filename"], + ) + return type.__call__(cls, keyword, attributes, **kwargs) + + +class Tag(Node, metaclass=_TagMeta): + """abstract base class for tags. + + e.g.:: + + <%sometag/> + + <%someothertag> + stuff + + + """ + + __keyword__ = None + + def __init__( + self, + keyword, + attributes, + expressions, + nonexpressions, + required, + **kwargs, + ): + r"""construct a new Tag instance. + + this constructor not called directly, and is only called + by subclasses. + + :param keyword: the tag keyword + + :param attributes: raw dictionary of attribute key/value pairs + + :param expressions: a set of identifiers that are legal attributes, + which can also contain embedded expressions + + :param nonexpressions: a set of identifiers that are legal + attributes, which cannot contain embedded expressions + + :param \**kwargs: + other arguments passed to the Node superclass (lineno, pos) + + """ + super().__init__(**kwargs) + self.keyword = keyword + self.attributes = attributes + self._parse_attributes(expressions, nonexpressions) + missing = [r for r in required if r not in self.parsed_attributes] + if len(missing): + raise exceptions.CompileException( + ( + "Missing attribute(s): %s" + % ",".join(repr(m) for m in missing) + ), + **self.exception_kwargs, + ) + + self.parent = None + self.nodes = [] + + def is_root(self): + return self.parent is None + + def get_children(self): + return self.nodes + + def _parse_attributes(self, expressions, nonexpressions): + undeclared_identifiers = set() + self.parsed_attributes = {} + for key in self.attributes: + if key in expressions: + expr = [] + for x in re.compile(r"(\${(?:[^$]*?{.+|.+?)})", re.S).split( + self.attributes[key] + ): + m = re.compile(r"^\${(.+?)}$", re.S).match(x) + if m: + code = ast.PythonCode( + m.group(1).rstrip(), **self.exception_kwargs + ) + # we aren't discarding "declared_identifiers" here, + # which we do so that list comprehension-declared + # variables aren't counted. As yet can't find a + # condition that requires it here. + undeclared_identifiers = undeclared_identifiers.union( + code.undeclared_identifiers + ) + expr.append("(%s)" % m.group(1)) + elif x: + expr.append(repr(x)) + self.parsed_attributes[key] = " + ".join(expr) or repr("") + elif key in nonexpressions: + if re.search(r"\${.+?}", self.attributes[key]): + raise exceptions.CompileException( + "Attribute '%s' in tag '%s' does not allow embedded " + "expressions" % (key, self.keyword), + **self.exception_kwargs, + ) + self.parsed_attributes[key] = repr(self.attributes[key]) + else: + raise exceptions.CompileException( + "Invalid attribute for tag '%s': '%s'" + % (self.keyword, key), + **self.exception_kwargs, + ) + self.expression_undeclared_identifiers = undeclared_identifiers + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + return self.expression_undeclared_identifiers + + def __repr__(self): + return "%s(%r, %s, %r, %r)" % ( + self.__class__.__name__, + self.keyword, + util.sorted_dict_repr(self.attributes), + (self.lineno, self.pos), + self.nodes, + ) + + +class IncludeTag(Tag): + __keyword__ = "include" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, + attributes, + ("file", "import", "args"), + (), + ("file",), + **kwargs, + ) + self.page_args = ast.PythonCode( + "__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + identifiers = self.page_args.undeclared_identifiers.difference( + {"__DUMMY"} + ).difference(self.page_args.declared_identifiers) + return identifiers.union(super().undeclared_identifiers()) + + +class NamespaceTag(Tag): + __keyword__ = "namespace" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, + attributes, + ("file",), + ("name", "inheritable", "import", "module"), + (), + **kwargs, + ) + + self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self)))) + if "name" not in attributes and "import" not in attributes: + raise exceptions.CompileException( + "'name' and/or 'import' attributes are required " + "for <%namespace>", + **self.exception_kwargs, + ) + if "file" in attributes and "module" in attributes: + raise exceptions.CompileException( + "<%namespace> may only have one of 'file' or 'module'", + **self.exception_kwargs, + ) + + def declared_identifiers(self): + return [] + + +class TextTag(Tag): + __keyword__ = "text" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__(keyword, attributes, (), ("filter"), (), **kwargs) + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + def undeclared_identifiers(self): + return self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ).union(self.expression_undeclared_identifiers) + + +class DefTag(Tag): + __keyword__ = "def" + + def __init__(self, keyword, attributes, **kwargs): + expressions = ["buffered", "cached"] + [ + c for c in attributes if c.startswith("cache_") + ] + + super().__init__( + keyword, + attributes, + expressions, + ("name", "filter", "decorator"), + ("name",), + **kwargs, + ) + name = attributes["name"] + if re.match(r"^[\w_]+$", name): + raise exceptions.CompileException( + "Missing parenthesis in %def", **self.exception_kwargs + ) + self.function_decl = ast.FunctionDecl( + "def " + name + ":pass", **self.exception_kwargs + ) + self.name = self.function_decl.funcname + self.decorator = attributes.get("decorator", "") + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + is_anonymous = False + is_block = False + + @property + def funcname(self): + return self.function_decl.funcname + + def get_argument_expressions(self, **kw): + return self.function_decl.get_argument_expressions(**kw) + + def declared_identifiers(self): + return self.function_decl.allargnames + + def undeclared_identifiers(self): + res = [] + for c in self.function_decl.defaults: + res += list( + ast.PythonCode( + c, **self.exception_kwargs + ).undeclared_identifiers + ) + return ( + set(res) + .union( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ) + .union(self.expression_undeclared_identifiers) + .difference(self.function_decl.allargnames) + ) + + +class BlockTag(Tag): + __keyword__ = "block" + + def __init__(self, keyword, attributes, **kwargs): + expressions = ["buffered", "cached", "args"] + [ + c for c in attributes if c.startswith("cache_") + ] + + super().__init__( + keyword, + attributes, + expressions, + ("name", "filter", "decorator"), + (), + **kwargs, + ) + name = attributes.get("name") + if name and not re.match(r"^[\w_]+$", name): + raise exceptions.CompileException( + "%block may not specify an argument signature", + **self.exception_kwargs, + ) + if not name and attributes.get("args", None): + raise exceptions.CompileException( + "Only named %blocks may specify args", **self.exception_kwargs + ) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + self.name = name + self.decorator = attributes.get("decorator", "") + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + is_block = True + + @property + def is_anonymous(self): + return self.name is None + + @property + def funcname(self): + return self.name or "__M_anon_%d" % (self.lineno,) + + def get_argument_expressions(self, **kw): + return self.body_decl.get_argument_expressions(**kw) + + def declared_identifiers(self): + return self.body_decl.allargnames + + def undeclared_identifiers(self): + return ( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ).union(self.expression_undeclared_identifiers) + + +class CallTag(Tag): + __keyword__ = "call" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs + ) + self.expression = attributes["expr"] + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.code.declared_identifiers.union(self.body_decl.allargnames) + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) + + +class CallNamespaceTag(Tag): + def __init__(self, namespace, defname, attributes, **kwargs): + super().__init__( + namespace + ":" + defname, + attributes, + tuple(attributes.keys()) + ("args",), + (), + (), + **kwargs, + ) + + self.expression = "%s.%s(%s)" % ( + namespace, + defname, + ",".join( + "%s=%s" % (k, v) + for k, v in self.parsed_attributes.items() + if k != "args" + ), + ) + + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.code.declared_identifiers.union(self.body_decl.allargnames) + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) + + +class InheritTag(Tag): + __keyword__ = "inherit" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, attributes, ("file",), (), ("file",), **kwargs + ) + + +class PageTag(Tag): + __keyword__ = "page" + + def __init__(self, keyword, attributes, **kwargs): + expressions = [ + "cached", + "args", + "expression_filter", + "enable_loop", + ] + [c for c in attributes if c.startswith("cache_")] + + super().__init__(keyword, attributes, expressions, (), (), **kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + self.filter_args = ast.ArgumentList( + attributes.get("expression_filter", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.body_decl.allargnames diff --git a/venv/lib/python3.12/site-packages/mako/pygen.py b/venv/lib/python3.12/site-packages/mako/pygen.py new file mode 100644 index 0000000..41f9afd --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/pygen.py @@ -0,0 +1,309 @@ +# mako/pygen.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""utilities for generating and formatting literal Python code.""" + +import re + +from mako import exceptions + + +class PythonPrinter: + def __init__(self, stream): + # indentation counter + self.indent = 0 + + # a stack storing information about why we incremented + # the indentation counter, to help us determine if we + # should decrement it + self.indent_detail = [] + + # the string of whitespace multiplied by the indent + # counter to produce a line + self.indentstring = " " + + # the stream we are writing to + self.stream = stream + + # current line number + self.lineno = 1 + + # a list of lines that represents a buffered "block" of code, + # which can be later printed relative to an indent level + self.line_buffer = [] + + self.in_indent_lines = False + + self._reset_multi_line_flags() + + # mapping of generated python lines to template + # source lines + self.source_map = {} + + self._re_space_comment = re.compile(r"^\s*#") + self._re_space = re.compile(r"^\s*$") + self._re_indent = re.compile(r":[ \t]*(?:#.*)?$") + self._re_compound = re.compile(r"^\s*(if|try|elif|while|for|with)") + self._re_indent_keyword = re.compile( + r"^\s*(def|class|else|elif|except|finally)" + ) + self._re_unindentor = re.compile(r"^\s*(else|elif|except|finally).*\:") + + def _update_lineno(self, num): + self.lineno += num + + def start_source(self, lineno): + if self.lineno not in self.source_map: + self.source_map[self.lineno] = lineno + + def write_blanks(self, num): + self.stream.write("\n" * num) + self._update_lineno(num) + + def write_indented_block(self, block, starting_lineno=None): + """print a line or lines of python which already contain indentation. + + The indentation of the total block of lines will be adjusted to that of + the current indent level.""" + self.in_indent_lines = False + for i, l in enumerate(re.split(r"\r?\n", block)): + self.line_buffer.append(l) + if starting_lineno is not None: + self.start_source(starting_lineno + i) + self._update_lineno(1) + + def writelines(self, *lines): + """print a series of lines of python.""" + for line in lines: + self.writeline(line) + + def writeline(self, line): + """print a line of python, indenting it according to the current + indent level. + + this also adjusts the indentation counter according to the + content of the line. + + """ + + if not self.in_indent_lines: + self._flush_adjusted_lines() + self.in_indent_lines = True + + if ( + line is None + or self._re_space_comment.match(line) + or self._re_space.match(line) + ): + hastext = False + else: + hastext = True + + is_comment = line and len(line) and line[0] == "#" + + # see if this line should decrease the indentation level + if ( + not is_comment + and (not hastext or self._is_unindentor(line)) + and self.indent > 0 + ): + self.indent -= 1 + # if the indent_detail stack is empty, the user + # probably put extra closures - the resulting + # module wont compile. + if len(self.indent_detail) == 0: + # TODO: no coverage here + raise exceptions.MakoException("Too many whitespace closures") + self.indent_detail.pop() + + if line is None: + return + + # write the line + self.stream.write(self._indent_line(line) + "\n") + self._update_lineno(len(line.split("\n"))) + + # see if this line should increase the indentation level. + # note that a line can both decrase (before printing) and + # then increase (after printing) the indentation level. + + if self._re_indent.search(line): + # increment indentation count, and also + # keep track of what the keyword was that indented us, + # if it is a python compound statement keyword + # where we might have to look for an "unindent" keyword + match = self._re_compound.match(line) + if match: + # its a "compound" keyword, so we will check for "unindentors" + indentor = match.group(1) + self.indent += 1 + self.indent_detail.append(indentor) + else: + indentor = None + # its not a "compound" keyword. but lets also + # test for valid Python keywords that might be indenting us, + # else assume its a non-indenting line + m2 = self._re_indent_keyword.match(line) + if m2: + self.indent += 1 + self.indent_detail.append(indentor) + + def close(self): + """close this printer, flushing any remaining lines.""" + self._flush_adjusted_lines() + + def _is_unindentor(self, line): + """return true if the given line is an 'unindentor', + relative to the last 'indent' event received. + + """ + + # no indentation detail has been pushed on; return False + if len(self.indent_detail) == 0: + return False + + indentor = self.indent_detail[-1] + + # the last indent keyword we grabbed is not a + # compound statement keyword; return False + if indentor is None: + return False + + # if the current line doesnt have one of the "unindentor" keywords, + # return False + match = self._re_unindentor.match(line) + # if True, whitespace matches up, we have a compound indentor, + # and this line has an unindentor, this + # is probably good enough + return bool(match) + + # should we decide that its not good enough, heres + # more stuff to check. + # keyword = match.group(1) + + # match the original indent keyword + # for crit in [ + # (r'if|elif', r'else|elif'), + # (r'try', r'except|finally|else'), + # (r'while|for', r'else'), + # ]: + # if re.match(crit[0], indentor) and re.match(crit[1], keyword): + # return True + + # return False + + def _indent_line(self, line, stripspace=""): + """indent the given line according to the current indent level. + + stripspace is a string of space that will be truncated from the + start of the line before indenting.""" + if stripspace == "": + # Fast path optimization. + return self.indentstring * self.indent + line + + return re.sub( + r"^%s" % stripspace, self.indentstring * self.indent, line + ) + + def _reset_multi_line_flags(self): + """reset the flags which would indicate we are in a backslashed + or triple-quoted section.""" + + self.backslashed, self.triplequoted = False, False + + def _in_multi_line(self, line): + """return true if the given line is part of a multi-line block, + via backslash or triple-quote.""" + + # we are only looking for explicitly joined lines here, not + # implicit ones (i.e. brackets, braces etc.). this is just to + # guard against the possibility of modifying the space inside of + # a literal multiline string with unfortunately placed + # whitespace + + current_state = self.backslashed or self.triplequoted + + self.backslashed = bool(re.search(r"\\$", line)) + triples = len(re.findall(r"\"\"\"|\'\'\'", line)) + if triples == 1 or triples % 2 != 0: + self.triplequoted = not self.triplequoted + + return current_state + + def _flush_adjusted_lines(self): + stripspace = None + self._reset_multi_line_flags() + + for entry in self.line_buffer: + if self._in_multi_line(entry): + self.stream.write(entry + "\n") + else: + entry = entry.expandtabs() + if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry): + stripspace = re.match(r"^([ \t]*)", entry).group(1) + self.stream.write(self._indent_line(entry, stripspace) + "\n") + + self.line_buffer = [] + self._reset_multi_line_flags() + + +def adjust_whitespace(text): + """remove the left-whitespace margin of a block of Python code.""" + + state = [False, False] + (backslashed, triplequoted) = (0, 1) + + def in_multi_line(line): + start_state = state[backslashed] or state[triplequoted] + + if re.search(r"\\$", line): + state[backslashed] = True + else: + state[backslashed] = False + + def match(reg, t): + m = re.match(reg, t) + if m: + return m, t[len(m.group(0)) :] + else: + return None, t + + while line: + if state[triplequoted]: + m, line = match(r"%s" % state[triplequoted], line) + if m: + state[triplequoted] = False + else: + m, line = match(r".*?(?=%s|$)" % state[triplequoted], line) + else: + m, line = match(r"#", line) + if m: + return start_state + + m, line = match(r"\"\"\"|\'\'\'", line) + if m: + state[triplequoted] = m.group(0) + continue + + m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line) + + return start_state + + def _indent_line(line, stripspace=""): + return re.sub(r"^%s" % stripspace, "", line) + + lines = [] + stripspace = None + + for line in re.split(r"\r?\n", text): + if in_multi_line(line): + lines.append(line) + else: + line = line.expandtabs() + if stripspace is None and re.search(r"^[ \t]*[^# \t]", line): + stripspace = re.match(r"^([ \t]*)", line).group(1) + lines.append(_indent_line(line, stripspace)) + return "\n".join(lines) diff --git a/venv/lib/python3.12/site-packages/mako/pyparser.py b/venv/lib/python3.12/site-packages/mako/pyparser.py new file mode 100644 index 0000000..1da9ddb --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/pyparser.py @@ -0,0 +1,235 @@ +# mako/pyparser.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Handles parsing of Python code. + +Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler +module is used. +""" + +import operator + +import _ast + +from mako import _ast_util +from mako import compat +from mako import exceptions +from mako import util + +# words that cannot be assigned to (notably +# smaller than the total keys in __builtins__) +reserved = {"True", "False", "None", "print"} + +# the "id" attribute on a function node +arg_id = operator.attrgetter("arg") + +util.restore__ast(_ast) + + +def parse(code, mode="exec", **exception_kwargs): + """Parse an expression into AST""" + + try: + return _ast_util.parse(code, "", mode) + except Exception as e: + raise exceptions.SyntaxException( + "(%s) %s (%r)" + % ( + compat.exception_as().__class__.__name__, + compat.exception_as(), + code[0:50], + ), + **exception_kwargs, + ) from e + + +class FindIdentifiers(_ast_util.NodeVisitor): + def __init__(self, listener, **exception_kwargs): + self.in_function = False + self.in_assign_targets = False + self.local_ident_stack = set() + self.listener = listener + self.exception_kwargs = exception_kwargs + + def _add_declared(self, name): + if not self.in_function: + self.listener.declared_identifiers.add(name) + else: + self.local_ident_stack.add(name) + + def visit_ClassDef(self, node): + self._add_declared(node.name) + + def visit_Assign(self, node): + # flip around the visiting of Assign so the expression gets + # evaluated first, in the case of a clause like "x=x+5" (x + # is undeclared) + + self.visit(node.value) + in_a = self.in_assign_targets + self.in_assign_targets = True + for n in node.targets: + self.visit(n) + self.in_assign_targets = in_a + + def visit_ExceptHandler(self, node): + if node.name is not None: + self._add_declared(node.name) + if node.type is not None: + self.visit(node.type) + for statement in node.body: + self.visit(statement) + + def visit_Lambda(self, node, *args): + self._visit_function(node, True) + + def visit_FunctionDef(self, node): + self._add_declared(node.name) + self._visit_function(node, False) + + def visit_ListComp(self, node): + if self.in_function: + for comp in node.generators: + self.visit(comp.target) + self.visit(comp.iter) + else: + self.generic_visit(node) + + visit_SetComp = visit_GeneratorExp = visit_ListComp + + def visit_DictComp(self, node): + if self.in_function: + for comp in node.generators: + self.visit(comp.target) + self.visit(comp.iter) + else: + self.generic_visit(node) + + def _expand_tuples(self, args): + for arg in args: + if isinstance(arg, _ast.Tuple): + yield from arg.elts + else: + yield arg + + def _visit_function(self, node, islambda): + # push function state onto stack. dont log any more + # identifiers as "declared" until outside of the function, + # but keep logging identifiers as "undeclared". track + # argument names in each function header so they arent + # counted as "undeclared" + + inf = self.in_function + self.in_function = True + + local_ident_stack = self.local_ident_stack + self.local_ident_stack = local_ident_stack.union( + [arg_id(arg) for arg in self._expand_tuples(node.args.args)] + ) + if islambda: + self.visit(node.body) + else: + for n in node.body: + self.visit(n) + self.in_function = inf + self.local_ident_stack = local_ident_stack + + def visit_For(self, node): + # flip around visit + + self.visit(node.iter) + self.visit(node.target) + for statement in node.body: + self.visit(statement) + for statement in node.orelse: + self.visit(statement) + + def visit_Name(self, node): + if isinstance(node.ctx, _ast.Store): + # this is eqiuvalent to visit_AssName in + # compiler + self._add_declared(node.id) + elif ( + node.id not in reserved + and node.id not in self.listener.declared_identifiers + and node.id not in self.local_ident_stack + ): + self.listener.undeclared_identifiers.add(node.id) + + def visit_Import(self, node): + for name in node.names: + if name.asname is not None: + self._add_declared(name.asname) + else: + self._add_declared(name.name.split(".")[0]) + + def visit_ImportFrom(self, node): + for name in node.names: + if name.asname is not None: + self._add_declared(name.asname) + elif name.name == "*": + raise exceptions.CompileException( + "'import *' is not supported, since all identifier " + "names must be explicitly declared. Please use the " + "form 'from import , , " + "...' instead.", + **self.exception_kwargs, + ) + else: + self._add_declared(name.name) + + +class FindTuple(_ast_util.NodeVisitor): + def __init__(self, listener, code_factory, **exception_kwargs): + self.listener = listener + self.exception_kwargs = exception_kwargs + self.code_factory = code_factory + + def visit_Tuple(self, node): + for n in node.elts: + p = self.code_factory(n, **self.exception_kwargs) + self.listener.codeargs.append(p) + self.listener.args.append(ExpressionGenerator(n).value()) + ldi = self.listener.declared_identifiers + self.listener.declared_identifiers = ldi.union( + p.declared_identifiers + ) + lui = self.listener.undeclared_identifiers + self.listener.undeclared_identifiers = lui.union( + p.undeclared_identifiers + ) + + +class ParseFunc(_ast_util.NodeVisitor): + def __init__(self, listener, **exception_kwargs): + self.listener = listener + self.exception_kwargs = exception_kwargs + + def visit_FunctionDef(self, node): + self.listener.funcname = node.name + + argnames = [arg_id(arg) for arg in node.args.args] + if node.args.vararg: + argnames.append(node.args.vararg.arg) + + kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs] + if node.args.kwarg: + kwargnames.append(node.args.kwarg.arg) + self.listener.argnames = argnames + self.listener.defaults = node.args.defaults # ast + self.listener.kwargnames = kwargnames + self.listener.kwdefaults = node.args.kw_defaults + self.listener.varargs = node.args.vararg + self.listener.kwargs = node.args.kwarg + + +class ExpressionGenerator: + def __init__(self, astnode): + self.generator = _ast_util.SourceGenerator(" " * 4) + self.generator.visit(astnode) + + def value(self): + return "".join(self.generator.result) diff --git a/venv/lib/python3.12/site-packages/mako/runtime.py b/venv/lib/python3.12/site-packages/mako/runtime.py new file mode 100644 index 0000000..23401b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/runtime.py @@ -0,0 +1,968 @@ +# mako/runtime.py +# Copyright 2006-2020 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides runtime services for templates, including Context, +Namespace, and various helper functions.""" + +import builtins +import functools +import sys + +from mako import compat +from mako import exceptions +from mako import util + + +class Context: + + """Provides runtime namespace, output buffer, and various + callstacks for templates. + + See :ref:`runtime_toplevel` for detail on the usage of + :class:`.Context`. + + """ + + def __init__(self, buffer, **data): + self._buffer_stack = [buffer] + + self._data = data + + self._kwargs = data.copy() + self._with_template = None + self._outputting_as_unicode = None + self.namespaces = {} + + # "capture" function which proxies to the + # generic "capture" function + self._data["capture"] = functools.partial(capture, self) + + # "caller" stack used by def calls with content + self.caller_stack = self._data["caller"] = CallerStack() + + def _set_with_template(self, t): + self._with_template = t + illegal_names = t.reserved_names.intersection(self._data) + if illegal_names: + raise exceptions.NameConflictError( + "Reserved words passed to render(): %s" + % ", ".join(illegal_names) + ) + + @property + def lookup(self): + """Return the :class:`.TemplateLookup` associated + with this :class:`.Context`. + + """ + return self._with_template.lookup + + @property + def kwargs(self): + """Return the dictionary of top level keyword arguments associated + with this :class:`.Context`. + + This dictionary only includes the top-level arguments passed to + :meth:`.Template.render`. It does not include names produced within + the template execution such as local variable names or special names + such as ``self``, ``next``, etc. + + The purpose of this dictionary is primarily for the case that + a :class:`.Template` accepts arguments via its ``<%page>`` tag, + which are normally expected to be passed via :meth:`.Template.render`, + except the template is being called in an inheritance context, + using the ``body()`` method. :attr:`.Context.kwargs` can then be + used to propagate these arguments to the inheriting template:: + + ${next.body(**context.kwargs)} + + """ + return self._kwargs.copy() + + def push_caller(self, caller): + """Push a ``caller`` callable onto the callstack for + this :class:`.Context`.""" + + self.caller_stack.append(caller) + + def pop_caller(self): + """Pop a ``caller`` callable onto the callstack for this + :class:`.Context`.""" + + del self.caller_stack[-1] + + def keys(self): + """Return a list of all names established in this :class:`.Context`.""" + + return list(self._data.keys()) + + def __getitem__(self, key): + if key in self._data: + return self._data[key] + else: + return builtins.__dict__[key] + + def _push_writer(self): + """push a capturing buffer onto this Context and return + the new writer function.""" + + buf = util.FastEncodingBuffer() + self._buffer_stack.append(buf) + return buf.write + + def _pop_buffer_and_writer(self): + """pop the most recent capturing buffer from this Context + and return the current writer after the pop. + + """ + + buf = self._buffer_stack.pop() + return buf, self._buffer_stack[-1].write + + def _push_buffer(self): + """push a capturing buffer onto this Context.""" + + self._push_writer() + + def _pop_buffer(self): + """pop the most recent capturing buffer from this Context.""" + + return self._buffer_stack.pop() + + def get(self, key, default=None): + """Return a value from this :class:`.Context`.""" + + return self._data.get(key, builtins.__dict__.get(key, default)) + + def write(self, string): + """Write a string to this :class:`.Context` object's + underlying output buffer.""" + + self._buffer_stack[-1].write(string) + + def writer(self): + """Return the current writer function.""" + + return self._buffer_stack[-1].write + + def _copy(self): + c = Context.__new__(Context) + c._buffer_stack = self._buffer_stack + c._data = self._data.copy() + c._kwargs = self._kwargs + c._with_template = self._with_template + c._outputting_as_unicode = self._outputting_as_unicode + c.namespaces = self.namespaces + c.caller_stack = self.caller_stack + return c + + def _locals(self, d): + """Create a new :class:`.Context` with a copy of this + :class:`.Context`'s current state, + updated with the given dictionary. + + The :attr:`.Context.kwargs` collection remains + unaffected. + + + """ + + if not d: + return self + c = self._copy() + c._data.update(d) + return c + + def _clean_inheritance_tokens(self): + """create a new copy of this :class:`.Context`. with + tokens related to inheritance state removed.""" + + c = self._copy() + x = c._data + x.pop("self", None) + x.pop("parent", None) + x.pop("next", None) + return c + + +class CallerStack(list): + def __init__(self): + self.nextcaller = None + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + return len(self) and self._get_caller() and True or False + + def _get_caller(self): + # this method can be removed once + # codegen MAGIC_NUMBER moves past 7 + return self[-1] + + def __getattr__(self, key): + return getattr(self._get_caller(), key) + + def _push_frame(self): + frame = self.nextcaller or None + self.append(frame) + self.nextcaller = None + return frame + + def _pop_frame(self): + self.nextcaller = self.pop() + + +class Undefined: + + """Represents an undefined value in a template. + + All template modules have a constant value + ``UNDEFINED`` present which is an instance of this + object. + + """ + + def __str__(self): + raise NameError("Undefined") + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + return False + + +UNDEFINED = Undefined() +STOP_RENDERING = "" + + +class LoopStack: + + """a stack for LoopContexts that implements the context manager protocol + to automatically pop off the top of the stack on context exit + """ + + def __init__(self): + self.stack = [] + + def _enter(self, iterable): + self._push(iterable) + return self._top + + def _exit(self): + self._pop() + return self._top + + @property + def _top(self): + if self.stack: + return self.stack[-1] + else: + return self + + def _pop(self): + return self.stack.pop() + + def _push(self, iterable): + new = LoopContext(iterable) + if self.stack: + new.parent = self.stack[-1] + return self.stack.append(new) + + def __getattr__(self, key): + raise exceptions.RuntimeException("No loop context is established") + + def __iter__(self): + return iter(self._top) + + +class LoopContext: + + """A magic loop variable. + Automatically accessible in any ``% for`` block. + + See the section :ref:`loop_context` for usage + notes. + + :attr:`parent` -> :class:`.LoopContext` or ``None`` + The parent loop, if one exists. + :attr:`index` -> `int` + The 0-based iteration count. + :attr:`reverse_index` -> `int` + The number of iterations remaining. + :attr:`first` -> `bool` + ``True`` on the first iteration, ``False`` otherwise. + :attr:`last` -> `bool` + ``True`` on the last iteration, ``False`` otherwise. + :attr:`even` -> `bool` + ``True`` when ``index`` is even. + :attr:`odd` -> `bool` + ``True`` when ``index`` is odd. + """ + + def __init__(self, iterable): + self._iterable = iterable + self.index = 0 + self.parent = None + + def __iter__(self): + for i in self._iterable: + yield i + self.index += 1 + + @util.memoized_instancemethod + def __len__(self): + return len(self._iterable) + + @property + def reverse_index(self): + return len(self) - self.index - 1 + + @property + def first(self): + return self.index == 0 + + @property + def last(self): + return self.index == len(self) - 1 + + @property + def even(self): + return not self.odd + + @property + def odd(self): + return bool(self.index % 2) + + def cycle(self, *values): + """Cycle through values as the loop progresses.""" + if not values: + raise ValueError("You must provide values to cycle through") + return values[self.index % len(values)] + + +class _NSAttr: + def __init__(self, parent): + self.__parent = parent + + def __getattr__(self, key): + ns = self.__parent + while ns: + if hasattr(ns.module, key): + return getattr(ns.module, key) + else: + ns = ns.inherits + raise AttributeError(key) + + +class Namespace: + + """Provides access to collections of rendering methods, which + can be local, from other templates, or from imported modules. + + To access a particular rendering method referenced by a + :class:`.Namespace`, use plain attribute access: + + .. sourcecode:: mako + + ${some_namespace.foo(x, y, z)} + + :class:`.Namespace` also contains several built-in attributes + described here. + + """ + + def __init__( + self, + name, + context, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + callables = () + + module = None + """The Python module referenced by this :class:`.Namespace`. + + If the namespace references a :class:`.Template`, then + this module is the equivalent of ``template.module``, + i.e. the generated module for the template. + + """ + + template = None + """The :class:`.Template` object referenced by this + :class:`.Namespace`, if any. + + """ + + context = None + """The :class:`.Context` object for this :class:`.Namespace`. + + Namespaces are often created with copies of contexts that + contain slightly different data, particularly in inheritance + scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one + can traverse an entire chain of templates that inherit from + one-another. + + """ + + filename = None + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + + If this is a pure module-based + :class:`.Namespace`, this evaluates to ``module.__file__``. If a + template-based namespace, it evaluates to the original + template file location. + + """ + + uri = None + """The URI for this :class:`.Namespace`'s template. + + I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. + + This is the equivalent of :attr:`.Template.uri`. + + """ + + _templateuri = None + + @util.memoized_property + def attr(self): + """Access module level attributes by name. + + This accessor allows templates to supply "scalar" + attributes which are particularly handy in inheritance + relationships. + + .. seealso:: + + :ref:`inheritance_attr` + + :ref:`namespace_attr_for_includes` + + """ + return _NSAttr(self) + + def get_namespace(self, uri): + """Return a :class:`.Namespace` corresponding to the given ``uri``. + + If the given ``uri`` is a relative URI (i.e. it does not + contain a leading slash ``/``), the ``uri`` is adjusted to + be relative to the ``uri`` of the namespace itself. This + method is therefore mostly useful off of the built-in + ``local`` namespace, described in :ref:`namespace_local`. + + In + most cases, a template wouldn't need this function, and + should instead use the ``<%namespace>`` tag to load + namespaces. However, since all ``<%namespace>`` tags are + evaluated before the body of a template ever runs, + this method can be used to locate namespaces using + expressions that were generated within the body code of + the template, or to conditionally use a particular + namespace. + + """ + key = (self, uri) + if key in self.context.namespaces: + return self.context.namespaces[key] + ns = TemplateNamespace( + uri, + self.context._copy(), + templateuri=uri, + calling_uri=self._templateuri, + ) + self.context.namespaces[key] = ns + return ns + + def get_template(self, uri): + """Return a :class:`.Template` from the given ``uri``. + + The ``uri`` resolution is relative to the ``uri`` of this + :class:`.Namespace` object's :class:`.Template`. + + """ + return _lookup_template(self.context, uri, self._templateuri) + + def get_cached(self, key, **kwargs): + """Return a value from the :class:`.Cache` referenced by this + :class:`.Namespace` object's :class:`.Template`. + + The advantage to this method versus direct access to the + :class:`.Cache` is that the configuration parameters + declared in ``<%page>`` take effect here, thereby calling + up the same configured backend as that configured + by ``<%page>``. + + """ + + return self.cache.get(key, **kwargs) + + @property + def cache(self): + """Return the :class:`.Cache` object referenced + by this :class:`.Namespace` object's + :class:`.Template`. + + """ + return self.template.cache + + def include_file(self, uri, **kwargs): + """Include a file at the given ``uri``.""" + + _include_file(self.context, uri, self._templateuri, **kwargs) + + def _populate(self, d, l): + for ident in l: + if ident == "*": + for k, v in self._get_star(): + d[k] = v + else: + d[ident] = getattr(self, ident) + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif self.inherits: + val = getattr(self.inherits, key) + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +class TemplateNamespace(Namespace): + + """A :class:`.Namespace` specific to a :class:`.Template` instance.""" + + def __init__( + self, + name, + context, + template=None, + templateuri=None, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + if templateuri is not None: + self.template = _lookup_template(context, templateuri, calling_uri) + self._templateuri = self.template.module._template_uri + elif template is not None: + self.template = template + self._templateuri = template.module._template_uri + else: + raise TypeError("'template' argument is required.") + + if populate_self: + lclcallable, lclcontext = _populate_self_namespace( + context, self.template, self_ns=self + ) + + @property + def module(self): + """The Python module referenced by this :class:`.Namespace`. + + If the namespace references a :class:`.Template`, then + this module is the equivalent of ``template.module``, + i.e. the generated module for the template. + + """ + return self.template.module + + @property + def filename(self): + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + """ + return self.template.filename + + @property + def uri(self): + """The URI for this :class:`.Namespace`'s template. + + I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. + + This is the equivalent of :attr:`.Template.uri`. + + """ + return self.template.uri + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + + def get(key): + callable_ = self.template._get_def_callable(key) + return functools.partial(callable_, self.context) + + for k in self.template.module._exports: + yield (k, get(k)) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif self.template.has_def(key): + callable_ = self.template._get_def_callable(key) + val = functools.partial(callable_, self.context) + elif self.inherits: + val = getattr(self.inherits, key) + + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +class ModuleNamespace(Namespace): + + """A :class:`.Namespace` specific to a Python module instance.""" + + def __init__( + self, + name, + context, + module, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + mod = __import__(module) + for token in module.split(".")[1:]: + mod = getattr(mod, token) + self.module = mod + + @property + def filename(self): + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + """ + return self.module.__file__ + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + for key in dir(self.module): + if key[0] != "_": + callable_ = getattr(self.module, key) + if callable(callable_): + yield key, functools.partial(callable_, self.context) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif hasattr(self.module, key): + callable_ = getattr(self.module, key) + val = functools.partial(callable_, self.context) + elif self.inherits: + val = getattr(self.inherits, key) + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +def supports_caller(func): + """Apply a caller_stack compatibility decorator to a plain + Python function. + + See the example in :ref:`namespaces_python_modules`. + + """ + + def wrap_stackframe(context, *args, **kwargs): + context.caller_stack._push_frame() + try: + return func(context, *args, **kwargs) + finally: + context.caller_stack._pop_frame() + + return wrap_stackframe + + +def capture(context, callable_, *args, **kwargs): + """Execute the given template def, capturing the output into + a buffer. + + See the example in :ref:`namespaces_python_modules`. + + """ + + if not callable(callable_): + raise exceptions.RuntimeException( + "capture() function expects a callable as " + "its argument (i.e. capture(func, *args, **kwargs))" + ) + context._push_buffer() + try: + callable_(*args, **kwargs) + finally: + buf = context._pop_buffer() + return buf.getvalue() + + +def _decorate_toplevel(fn): + def decorate_render(render_fn): + def go(context, *args, **kw): + def y(*args, **kw): + return render_fn(context, *args, **kw) + + try: + y.__name__ = render_fn.__name__[7:] + except TypeError: + # < Python 2.4 + pass + return fn(y)(context, *args, **kw) + + return go + + return decorate_render + + +def _decorate_inline(context, fn): + def decorate_render(render_fn): + dec = fn(render_fn) + + def go(*args, **kw): + return dec(context, *args, **kw) + + return go + + return decorate_render + + +def _include_file(context, uri, calling_uri, **kwargs): + """locate the template from the given uri and include it in + the current output.""" + + template = _lookup_template(context, uri, calling_uri) + (callable_, ctx) = _populate_self_namespace( + context._clean_inheritance_tokens(), template + ) + kwargs = _kwargs_for_include(callable_, context._data, **kwargs) + if template.include_error_handler: + try: + callable_(ctx, **kwargs) + except Exception: + result = template.include_error_handler(ctx, compat.exception_as()) + if not result: + raise + else: + callable_(ctx, **kwargs) + + +def _inherit_from(context, uri, calling_uri): + """called by the _inherit method in template modules to set + up the inheritance chain at the start of a template's + execution.""" + + if uri is None: + return None + template = _lookup_template(context, uri, calling_uri) + self_ns = context["self"] + ih = self_ns + while ih.inherits is not None: + ih = ih.inherits + lclcontext = context._locals({"next": ih}) + ih.inherits = TemplateNamespace( + "self:%s" % template.uri, + lclcontext, + template=template, + populate_self=False, + ) + context._data["parent"] = lclcontext._data["local"] = ih.inherits + callable_ = getattr(template.module, "_mako_inherit", None) + if callable_ is not None: + ret = callable_(template, lclcontext) + if ret: + return ret + + gen_ns = getattr(template.module, "_mako_generate_namespaces", None) + if gen_ns is not None: + gen_ns(context) + return (template.callable_, lclcontext) + + +def _lookup_template(context, uri, relativeto): + lookup = context._with_template.lookup + if lookup is None: + raise exceptions.TemplateLookupException( + "Template '%s' has no TemplateLookup associated" + % context._with_template.uri + ) + uri = lookup.adjust_uri(uri, relativeto) + try: + return lookup.get_template(uri) + except exceptions.TopLevelLookupException as e: + raise exceptions.TemplateLookupException( + str(compat.exception_as()) + ) from e + + +def _populate_self_namespace(context, template, self_ns=None): + if self_ns is None: + self_ns = TemplateNamespace( + "self:%s" % template.uri, + context, + template=template, + populate_self=False, + ) + context._data["self"] = context._data["local"] = self_ns + if hasattr(template.module, "_mako_inherit"): + ret = template.module._mako_inherit(template, context) + if ret: + return ret + return (template.callable_, context) + + +def _render(template, callable_, args, data, as_unicode=False): + """create a Context and return the string + output of the given template and template callable.""" + + if as_unicode: + buf = util.FastEncodingBuffer() + else: + buf = util.FastEncodingBuffer( + encoding=template.output_encoding, errors=template.encoding_errors + ) + context = Context(buf, **data) + context._outputting_as_unicode = as_unicode + context._set_with_template(template) + + _render_context( + template, + callable_, + context, + *args, + **_kwargs_for_callable(callable_, data), + ) + return context._pop_buffer().getvalue() + + +def _kwargs_for_callable(callable_, data): + argspec = compat.inspect_getargspec(callable_) + # for normal pages, **pageargs is usually present + if argspec[2]: + return data + + # for rendering defs from the top level, figure out the args + namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] + kwargs = {} + for arg in namedargs: + if arg != "context" and arg in data and arg not in kwargs: + kwargs[arg] = data[arg] + return kwargs + + +def _kwargs_for_include(callable_, data, **kwargs): + argspec = compat.inspect_getargspec(callable_) + namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] + for arg in namedargs: + if arg != "context" and arg in data and arg not in kwargs: + kwargs[arg] = data[arg] + return kwargs + + +def _render_context(tmpl, callable_, context, *args, **kwargs): + import mako.template as template + + # create polymorphic 'self' namespace for this + # template with possibly updated context + if not isinstance(tmpl, template.DefTemplate): + # if main render method, call from the base of the inheritance stack + (inherit, lclcontext) = _populate_self_namespace(context, tmpl) + _exec_template(inherit, lclcontext, args=args, kwargs=kwargs) + else: + # otherwise, call the actual rendering method specified + (inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent) + _exec_template(callable_, context, args=args, kwargs=kwargs) + + +def _exec_template(callable_, context, args=None, kwargs=None): + """execute a rendering callable given the callable, a + Context, and optional explicit arguments + + the contextual Template will be located if it exists, and + the error handling options specified on that Template will + be interpreted here. + """ + template = context._with_template + if template is not None and ( + template.format_exceptions or template.error_handler + ): + try: + callable_(context, *args, **kwargs) + except Exception: + _render_error(template, context, compat.exception_as()) + except: + e = sys.exc_info()[0] + _render_error(template, context, e) + else: + callable_(context, *args, **kwargs) + + +def _render_error(template, context, error): + if template.error_handler: + result = template.error_handler(context, error) + if not result: + tp, value, tb = sys.exc_info() + if value and tb: + raise value.with_traceback(tb) + else: + raise error + else: + error_template = exceptions.html_error_template() + if context._outputting_as_unicode: + context._buffer_stack[:] = [util.FastEncodingBuffer()] + else: + context._buffer_stack[:] = [ + util.FastEncodingBuffer( + error_template.output_encoding, + error_template.encoding_errors, + ) + ] + + context._set_with_template(error_template) + error_template.render_context(context, error=error) diff --git a/venv/lib/python3.12/site-packages/mako/template.py b/venv/lib/python3.12/site-packages/mako/template.py new file mode 100644 index 0000000..82c7cba --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/template.py @@ -0,0 +1,711 @@ +# mako/template.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides the Template class, a facade for parsing, generating and executing +template strings, as well as template runtime operations.""" + +import json +import os +import re +import shutil +import stat +import tempfile +import types +import weakref + +from mako import cache +from mako import codegen +from mako import compat +from mako import exceptions +from mako import runtime +from mako import util +from mako.lexer import Lexer + + +class Template: + r"""Represents a compiled template. + + :class:`.Template` includes a reference to the original + template source (via the :attr:`.source` attribute) + as well as the source code of the + generated Python module (i.e. the :attr:`.code` attribute), + as well as a reference to an actual Python module. + + :class:`.Template` is constructed using either a literal string + representing the template text, or a filename representing a filesystem + path to a source file. + + :param text: textual template source. This argument is mutually + exclusive versus the ``filename`` parameter. + + :param filename: filename of the source template. This argument is + mutually exclusive versus the ``text`` parameter. + + :param buffer_filters: string list of filters to be applied + to the output of ``%def``\ s which are buffered, cached, or otherwise + filtered, after all filters + defined with the ``%def`` itself have been applied. Allows the + creation of default expression filters that let the output + of return-valued ``%def``\ s "opt out" of that filtering via + passing special attributes or objects. + + :param cache_args: Dictionary of cache configuration arguments that + will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`. + + :param cache_dir: + + .. deprecated:: 0.6 + Use the ``'dir'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param cache_enabled: Boolean flag which enables caching of this + template. See :ref:`caching_toplevel`. + + :param cache_impl: String name of a :class:`.CacheImpl` caching + implementation to use. Defaults to ``'beaker'``. + + :param cache_type: + + .. deprecated:: 0.6 + Use the ``'type'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param cache_url: + + .. deprecated:: 0.6 + Use the ``'url'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param default_filters: List of string filter names that will + be applied to all expressions. See :ref:`filtering_default_filters`. + + :param enable_loop: When ``True``, enable the ``loop`` context variable. + This can be set to ``False`` to support templates that may + be making usage of the name "``loop``". Individual templates can + re-enable the "loop" context by placing the directive + ``enable_loop="True"`` inside the ``<%page>`` tag -- see + :ref:`migrating_loop`. + + :param encoding_errors: Error parameter passed to ``encode()`` when + string encoding is performed. See :ref:`usage_unicode`. + + :param error_handler: Python callable which is called whenever + compile or runtime exceptions occur. The callable is passed + the current context as well as the exception. If the + callable returns ``True``, the exception is considered to + be handled, else it is re-raised after the function + completes. Is used to provide custom error-rendering + functions. + + .. seealso:: + + :paramref:`.Template.include_error_handler` - include-specific + error handler function + + :param format_exceptions: if ``True``, exceptions which occur during + the render phase of this template will be caught and + formatted into an HTML error page, which then becomes the + rendered result of the :meth:`.render` call. Otherwise, + runtime exceptions are propagated outwards. + + :param imports: String list of Python statements, typically individual + "import" lines, which will be placed into the module level + preamble of all generated Python modules. See the example + in :ref:`filtering_default_filters`. + + :param future_imports: String list of names to import from `__future__`. + These will be concatenated into a comma-separated string and inserted + into the beginning of the template, e.g. ``futures_imports=['FOO', + 'BAR']`` results in ``from __future__ import FOO, BAR``. + + :param include_error_handler: An error handler that runs when this template + is included within another one via the ``<%include>`` tag, and raises an + error. Compare to the :paramref:`.Template.error_handler` option. + + .. versionadded:: 1.0.6 + + .. seealso:: + + :paramref:`.Template.error_handler` - top-level error handler function + + :param input_encoding: Encoding of the template's source code. Can + be used in lieu of the coding comment. See + :ref:`usage_unicode` as well as :ref:`unicode_toplevel` for + details on source encoding. + + :param lookup: a :class:`.TemplateLookup` instance that will be used + for all file lookups via the ``<%namespace>``, + ``<%include>``, and ``<%inherit>`` tags. See + :ref:`usage_templatelookup`. + + :param module_directory: Filesystem location where generated + Python module files will be placed. + + :param module_filename: Overrides the filename of the generated + Python module file. For advanced usage only. + + :param module_writer: A callable which overrides how the Python + module is written entirely. The callable is passed the + encoded source content of the module and the destination + path to be written to. The default behavior of module writing + uses a tempfile in conjunction with a file move in order + to make the operation atomic. So a user-defined module + writing function that mimics the default behavior would be: + + .. sourcecode:: python + + import tempfile + import os + import shutil + + def module_writer(source, outputpath): + (dest, name) = \\ + tempfile.mkstemp( + dir=os.path.dirname(outputpath) + ) + + os.write(dest, source) + os.close(dest) + shutil.move(name, outputpath) + + from mako.template import Template + mytemplate = Template( + filename="index.html", + module_directory="/path/to/modules", + module_writer=module_writer + ) + + The function is provided for unusual configurations where + certain platform-specific permissions or other special + steps are needed. + + :param output_encoding: The encoding to use when :meth:`.render` + is called. + See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`. + + :param preprocessor: Python callable which will be passed + the full template source before it is parsed. The return + result of the callable will be used as the template source + code. + + :param lexer_cls: A :class:`.Lexer` class used to parse + the template. The :class:`.Lexer` class is used by + default. + + .. versionadded:: 0.7.4 + + :param strict_undefined: Replaces the automatic usage of + ``UNDEFINED`` for any undeclared variables not located in + the :class:`.Context` with an immediate raise of + ``NameError``. The advantage is immediate reporting of + missing variables which include the name. + + .. versionadded:: 0.3.6 + + :param uri: string URI or other identifier for this template. + If not provided, the ``uri`` is generated from the filesystem + path, or from the in-memory identity of a non-file-based + template. The primary usage of the ``uri`` is to provide a key + within :class:`.TemplateLookup`, as well as to generate the + file path of the generated Python module file, if + ``module_directory`` is specified. + + """ + + lexer_cls = Lexer + + def __init__( + self, + text=None, + filename=None, + uri=None, + format_exceptions=False, + error_handler=None, + lookup=None, + output_encoding=None, + encoding_errors="strict", + module_directory=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + module_filename=None, + input_encoding=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): + if uri: + self.module_id = re.sub(r"\W", "_", uri) + self.uri = uri + elif filename: + self.module_id = re.sub(r"\W", "_", filename) + drive, path = os.path.splitdrive(filename) + path = os.path.normpath(path).replace(os.path.sep, "/") + self.uri = path + else: + self.module_id = "memory:" + hex(id(self)) + self.uri = self.module_id + + u_norm = self.uri + if u_norm.startswith("/"): + u_norm = u_norm[1:] + u_norm = os.path.normpath(u_norm) + if u_norm.startswith(".."): + raise exceptions.TemplateLookupException( + 'Template uri "%s" is invalid - ' + "it cannot be relative outside " + "of the root path." % self.uri + ) + + self.input_encoding = input_encoding + self.output_encoding = output_encoding + self.encoding_errors = encoding_errors + self.enable_loop = enable_loop + self.strict_undefined = strict_undefined + self.module_writer = module_writer + + if default_filters is None: + self.default_filters = ["str"] + else: + self.default_filters = default_filters + self.buffer_filters = buffer_filters + + self.imports = imports + self.future_imports = future_imports + self.preprocessor = preprocessor + + if lexer_cls is not None: + self.lexer_cls = lexer_cls + + # if plain text, compile code in memory only + if text is not None: + (code, module) = _compile_text(self, text, filename) + self._code = code + self._source = text + ModuleInfo(module, None, self, filename, code, text, uri) + elif filename is not None: + # if template filename and a module directory, load + # a filesystem-based module file, generating if needed + if module_filename is not None: + path = module_filename + elif module_directory is not None: + path = os.path.abspath( + os.path.join( + os.path.normpath(module_directory), u_norm + ".py" + ) + ) + else: + path = None + module = self._compile_from_file(path, filename) + else: + raise exceptions.RuntimeException( + "Template requires text or filename" + ) + + self.module = module + self.filename = filename + self.callable_ = self.module.render_body + self.format_exceptions = format_exceptions + self.error_handler = error_handler + self.include_error_handler = include_error_handler + self.lookup = lookup + + self.module_directory = module_directory + + self._setup_cache_args( + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ) + + @util.memoized_property + def reserved_names(self): + if self.enable_loop: + return codegen.RESERVED_NAMES + else: + return codegen.RESERVED_NAMES.difference(["loop"]) + + def _setup_cache_args( + self, + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ): + self.cache_impl = cache_impl + self.cache_enabled = cache_enabled + self.cache_args = cache_args or {} + # transfer deprecated cache_* args + if cache_type: + self.cache_args["type"] = cache_type + if cache_dir: + self.cache_args["dir"] = cache_dir + if cache_url: + self.cache_args["url"] = cache_url + + def _compile_from_file(self, path, filename): + if path is not None: + util.verify_directory(os.path.dirname(path)) + filemtime = os.stat(filename)[stat.ST_MTIME] + if ( + not os.path.exists(path) + or os.stat(path)[stat.ST_MTIME] < filemtime + ): + data = util.read_file(filename) + _compile_module_file( + self, data, filename, path, self.module_writer + ) + module = compat.load_module(self.module_id, path) + if module._magic_number != codegen.MAGIC_NUMBER: + data = util.read_file(filename) + _compile_module_file( + self, data, filename, path, self.module_writer + ) + module = compat.load_module(self.module_id, path) + ModuleInfo(module, path, self, filename, None, None, None) + else: + # template filename and no module directory, compile code + # in memory + data = util.read_file(filename) + code, module = _compile_text(self, data, filename) + self._source = None + self._code = code + ModuleInfo(module, None, self, filename, code, None, None) + return module + + @property + def source(self): + """Return the template source code for this :class:`.Template`.""" + + return _get_module_info_from_callable(self.callable_).source + + @property + def code(self): + """Return the module source code for this :class:`.Template`.""" + + return _get_module_info_from_callable(self.callable_).code + + @util.memoized_property + def cache(self): + return cache.Cache(self) + + @property + def cache_dir(self): + return self.cache_args["dir"] + + @property + def cache_url(self): + return self.cache_args["url"] + + @property + def cache_type(self): + return self.cache_args["type"] + + def render(self, *args, **data): + """Render the output of this template as a string. + + If the template specifies an output encoding, the string + will be encoded accordingly, else the output is raw (raw + output uses `StringIO` and can't handle multibyte + characters). A :class:`.Context` object is created corresponding + to the given data. Arguments that are explicitly declared + by this template's internal rendering method are also + pulled from the given ``*args``, ``**data`` members. + + """ + return runtime._render(self, self.callable_, args, data) + + def render_unicode(self, *args, **data): + """Render the output of this template as a unicode object.""" + + return runtime._render( + self, self.callable_, args, data, as_unicode=True + ) + + def render_context(self, context, *args, **kwargs): + """Render this :class:`.Template` with the given context. + + The data is written to the context's buffer. + + """ + if getattr(context, "_with_template", None) is None: + context._set_with_template(self) + runtime._render_context(self, self.callable_, context, *args, **kwargs) + + def has_def(self, name): + return hasattr(self.module, "render_%s" % name) + + def get_def(self, name): + """Return a def of this template as a :class:`.DefTemplate`.""" + + return DefTemplate(self, getattr(self.module, "render_%s" % name)) + + def list_defs(self): + """return a list of defs in the template. + + .. versionadded:: 1.0.4 + + """ + return [i[7:] for i in dir(self.module) if i[:7] == "render_"] + + def _get_def_callable(self, name): + return getattr(self.module, "render_%s" % name) + + @property + def last_modified(self): + return self.module._modified_time + + +class ModuleTemplate(Template): + + """A Template which is constructed given an existing Python module. + + e.g.:: + + t = Template("this is a template") + f = file("mymodule.py", "w") + f.write(t.code) + f.close() + + import mymodule + + t = ModuleTemplate(mymodule) + print(t.render()) + + """ + + def __init__( + self, + module, + module_filename=None, + template=None, + template_filename=None, + module_source=None, + template_source=None, + output_encoding=None, + encoding_errors="strict", + format_exceptions=False, + error_handler=None, + lookup=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + include_error_handler=None, + ): + self.module_id = re.sub(r"\W", "_", module._template_uri) + self.uri = module._template_uri + self.input_encoding = module._source_encoding + self.output_encoding = output_encoding + self.encoding_errors = encoding_errors + self.enable_loop = module._enable_loop + + self.module = module + self.filename = template_filename + ModuleInfo( + module, + module_filename, + self, + template_filename, + module_source, + template_source, + module._template_uri, + ) + + self.callable_ = self.module.render_body + self.format_exceptions = format_exceptions + self.error_handler = error_handler + self.include_error_handler = include_error_handler + self.lookup = lookup + self._setup_cache_args( + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ) + + +class DefTemplate(Template): + + """A :class:`.Template` which represents a callable def in a parent + template.""" + + def __init__(self, parent, callable_): + self.parent = parent + self.callable_ = callable_ + self.output_encoding = parent.output_encoding + self.module = parent.module + self.encoding_errors = parent.encoding_errors + self.format_exceptions = parent.format_exceptions + self.error_handler = parent.error_handler + self.include_error_handler = parent.include_error_handler + self.enable_loop = parent.enable_loop + self.lookup = parent.lookup + + def get_def(self, name): + return self.parent.get_def(name) + + +class ModuleInfo: + + """Stores information about a module currently loaded into + memory, provides reverse lookups of template source, module + source code based on a module's identifier. + + """ + + _modules = weakref.WeakValueDictionary() + + def __init__( + self, + module, + module_filename, + template, + template_filename, + module_source, + template_source, + template_uri, + ): + self.module = module + self.module_filename = module_filename + self.template_filename = template_filename + self.module_source = module_source + self.template_source = template_source + self.template_uri = template_uri + self._modules[module.__name__] = template._mmarker = self + if module_filename: + self._modules[module_filename] = self + + @classmethod + def get_module_source_metadata(cls, module_source, full_line_map=False): + source_map = re.search( + r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S + ).group(1) + source_map = json.loads(source_map) + source_map["line_map"] = { + int(k): int(v) for k, v in source_map["line_map"].items() + } + if full_line_map: + f_line_map = source_map["full_line_map"] = [] + line_map = source_map["line_map"] + + curr_templ_line = 1 + for mod_line in range(1, max(line_map)): + if mod_line in line_map: + curr_templ_line = line_map[mod_line] + f_line_map.append(curr_templ_line) + return source_map + + @property + def code(self): + if self.module_source is not None: + return self.module_source + else: + return util.read_python_file(self.module_filename) + + @property + def source(self): + if self.template_source is None: + data = util.read_file(self.template_filename) + if self.module._source_encoding: + return data.decode(self.module._source_encoding) + else: + return data + + elif self.module._source_encoding and not isinstance( + self.template_source, str + ): + return self.template_source.decode(self.module._source_encoding) + else: + return self.template_source + + +def _compile(template, text, filename, generate_magic_comment): + lexer = template.lexer_cls( + text, + filename, + input_encoding=template.input_encoding, + preprocessor=template.preprocessor, + ) + node = lexer.parse() + source = codegen.compile( + node, + template.uri, + filename, + default_filters=template.default_filters, + buffer_filters=template.buffer_filters, + imports=template.imports, + future_imports=template.future_imports, + source_encoding=lexer.encoding, + generate_magic_comment=generate_magic_comment, + strict_undefined=template.strict_undefined, + enable_loop=template.enable_loop, + reserved_names=template.reserved_names, + ) + return source, lexer + + +def _compile_text(template, text, filename): + identifier = template.module_id + source, lexer = _compile( + template, text, filename, generate_magic_comment=False + ) + + cid = identifier + module = types.ModuleType(cid) + code = compile(source, cid, "exec") + + # this exec() works for 2.4->3.3. + exec(code, module.__dict__, module.__dict__) + return (source, module) + + +def _compile_module_file(template, text, filename, outputpath, module_writer): + source, lexer = _compile( + template, text, filename, generate_magic_comment=True + ) + + if isinstance(source, str): + source = source.encode(lexer.encoding or "ascii") + + if module_writer: + module_writer(source, outputpath) + else: + # make tempfiles in the same location as the ultimate + # location. this ensures they're on the same filesystem, + # avoiding synchronization issues. + (dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath)) + + os.write(dest, source) + os.close(dest) + shutil.move(name, outputpath) + + +def _get_module_info_from_callable(callable_): + return _get_module_info(callable_.__globals__["__name__"]) + + +def _get_module_info(filename): + return ModuleInfo._modules[filename] diff --git a/venv/lib/python3.12/site-packages/mako/testing/__init__.py b/venv/lib/python3.12/site-packages/mako/testing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/mako/testing/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..dae4c04 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/testing/__pycache__/_config.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/_config.cpython-312.pyc new file mode 100644 index 0000000..09cb1d7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/_config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/testing/__pycache__/assertions.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/assertions.cpython-312.pyc new file mode 100644 index 0000000..553bc27 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/assertions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/testing/__pycache__/config.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..f4d4a72 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/testing/__pycache__/exclusions.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/exclusions.cpython-312.pyc new file mode 100644 index 0000000..1a388bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/exclusions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/testing/__pycache__/fixtures.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/fixtures.cpython-312.pyc new file mode 100644 index 0000000..e7b15a8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/fixtures.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/testing/__pycache__/helpers.cpython-312.pyc b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/helpers.cpython-312.pyc new file mode 100644 index 0000000..89f79eb Binary files /dev/null and b/venv/lib/python3.12/site-packages/mako/testing/__pycache__/helpers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/mako/testing/_config.py b/venv/lib/python3.12/site-packages/mako/testing/_config.py new file mode 100644 index 0000000..4ee3d0a --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/testing/_config.py @@ -0,0 +1,128 @@ +import configparser +import dataclasses +from dataclasses import dataclass +from pathlib import Path +from typing import Callable +from typing import ClassVar +from typing import Optional +from typing import Union + +from .helpers import make_path + + +class ConfigError(BaseException): + pass + + +class MissingConfig(ConfigError): + pass + + +class MissingConfigSection(ConfigError): + pass + + +class MissingConfigItem(ConfigError): + pass + + +class ConfigValueTypeError(ConfigError): + pass + + +class _GetterDispatch: + def __init__(self, initialdata, default_getter: Callable): + self.default_getter = default_getter + self.data = initialdata + + def get_fn_for_type(self, type_): + return self.data.get(type_, self.default_getter) + + def get_typed_value(self, type_, name): + get_fn = self.get_fn_for_type(type_) + return get_fn(name) + + +def _parse_cfg_file(filespec: Union[Path, str]): + cfg = configparser.ConfigParser() + try: + filepath = make_path(filespec, check_exists=True) + except FileNotFoundError as e: + raise MissingConfig(f"No config file found at {filespec}") from e + else: + with open(filepath, encoding="utf-8") as f: + cfg.read_file(f) + return cfg + + +def _build_getter(cfg_obj, cfg_section, method, converter=None): + def caller(option, **kwargs): + try: + rv = getattr(cfg_obj, method)(cfg_section, option, **kwargs) + except configparser.NoSectionError as nse: + raise MissingConfigSection( + f"No config section named {cfg_section}" + ) from nse + except configparser.NoOptionError as noe: + raise MissingConfigItem(f"No config item for {option}") from noe + except ValueError as ve: + # ConfigParser.getboolean, .getint, .getfloat raise ValueError + # on bad types + raise ConfigValueTypeError( + f"Wrong value type for {option}" + ) from ve + else: + if converter: + try: + rv = converter(rv) + except Exception as e: + raise ConfigValueTypeError( + f"Wrong value type for {option}" + ) from e + return rv + + return caller + + +def _build_getter_dispatch(cfg_obj, cfg_section, converters=None): + converters = converters or {} + + default_getter = _build_getter(cfg_obj, cfg_section, "get") + + # support ConfigParser builtins + getters = { + int: _build_getter(cfg_obj, cfg_section, "getint"), + bool: _build_getter(cfg_obj, cfg_section, "getboolean"), + float: _build_getter(cfg_obj, cfg_section, "getfloat"), + str: default_getter, + } + + # use ConfigParser.get and convert value + getters.update( + { + type_: _build_getter( + cfg_obj, cfg_section, "get", converter=converter_fn + ) + for type_, converter_fn in converters.items() + } + ) + + return _GetterDispatch(getters, default_getter) + + +@dataclass +class ReadsCfg: + section_header: ClassVar[str] + converters: ClassVar[Optional[dict]] = None + + @classmethod + def from_cfg_file(cls, filespec: Union[Path, str]): + cfg = _parse_cfg_file(filespec) + dispatch = _build_getter_dispatch( + cfg, cls.section_header, converters=cls.converters + ) + kwargs = { + field.name: dispatch.get_typed_value(field.type, field.name) + for field in dataclasses.fields(cls) + } + return cls(**kwargs) diff --git a/venv/lib/python3.12/site-packages/mako/testing/assertions.py b/venv/lib/python3.12/site-packages/mako/testing/assertions.py new file mode 100644 index 0000000..22221cd --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/testing/assertions.py @@ -0,0 +1,166 @@ +import contextlib +import re +import sys + + +def eq_(a, b, msg=None): + """Assert a == b, with repr messaging on failure.""" + assert a == b, msg or "%r != %r" % (a, b) + + +def ne_(a, b, msg=None): + """Assert a != b, with repr messaging on failure.""" + assert a != b, msg or "%r == %r" % (a, b) + + +def in_(a, b, msg=None): + """Assert a in b, with repr messaging on failure.""" + assert a in b, msg or "%r not in %r" % (a, b) + + +def not_in(a, b, msg=None): + """Assert a in not b, with repr messaging on failure.""" + assert a not in b, msg or "%r is in %r" % (a, b) + + +def _assert_proper_exception_context(exception): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. We want + these exceptions in a cause chain. + + """ + + if ( + exception.__context__ is not exception.__cause__ + and not exception.__suppress_context__ + ): + assert False, ( + "Exception %r was correctly raised but did not set a cause, " + "within context %r as its cause." + % (exception, exception.__context__) + ) + + +def _assert_proper_cause_cls(exception, cause_cls): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. We want + these exceptions in a cause chain. + + """ + assert isinstance(exception.__cause__, cause_cls), ( + "Exception %r was correctly raised but has cause %r, which does not " + "have the expected cause type %r." + % (exception, exception.__cause__, cause_cls) + ) + + +def assert_raises(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw) + + +def assert_raises_with_proper_context(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw, check_context=True) + + +def assert_raises_with_given_cause( + except_cls, cause_cls, callable_, *args, **kw +): + return _assert_raises(except_cls, callable_, args, kw, cause_cls=cause_cls) + + +def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): + return _assert_raises(except_cls, callable_, args, kwargs, msg=msg) + + +def assert_raises_message_with_proper_context( + except_cls, msg, callable_, *args, **kwargs +): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, check_context=True + ) + + +def assert_raises_message_with_given_cause( + except_cls, msg, cause_cls, callable_, *args, **kwargs +): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, cause_cls=cause_cls + ) + + +def _assert_raises( + except_cls, + callable_, + args, + kwargs, + msg=None, + check_context=False, + cause_cls=None, +): + with _expect_raises(except_cls, msg, check_context, cause_cls) as ec: + callable_(*args, **kwargs) + return ec.error + + +class _ErrorContainer: + error = None + + +@contextlib.contextmanager +def _expect_raises(except_cls, msg=None, check_context=False, cause_cls=None): + ec = _ErrorContainer() + if check_context: + are_we_already_in_a_traceback = sys.exc_info()[0] + try: + yield ec + success = False + except except_cls as err: + ec.error = err + success = True + if msg is not None: + # I'm often pdbing here, and "err" above isn't + # in scope, so assign the string explicitly + error_as_string = str(err) + assert re.search(msg, error_as_string, re.UNICODE), "%r !~ %s" % ( + msg, + error_as_string, + ) + if cause_cls is not None: + _assert_proper_cause_cls(err, cause_cls) + if check_context and not are_we_already_in_a_traceback: + _assert_proper_exception_context(err) + print(str(err).encode("utf-8")) + + # it's generally a good idea to not carry traceback objects outside + # of the except: block, but in this case especially we seem to have + # hit some bug in either python 3.10.0b2 or greenlet or both which + # this seems to fix: + # https://github.com/python-greenlet/greenlet/issues/242 + del ec + + # assert outside the block so it works for AssertionError too ! + assert success, "Callable did not raise an exception" + + +def expect_raises(except_cls, check_context=False): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message(except_cls, msg, check_context=False): + return _expect_raises(except_cls, msg=msg, check_context=check_context) + + +def expect_raises_with_proper_context(except_cls, check_context=True): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message_with_proper_context( + except_cls, msg, check_context=True +): + return _expect_raises(except_cls, msg=msg, check_context=check_context) diff --git a/venv/lib/python3.12/site-packages/mako/testing/config.py b/venv/lib/python3.12/site-packages/mako/testing/config.py new file mode 100644 index 0000000..b77d0c0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/testing/config.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from pathlib import Path + +from ._config import ReadsCfg +from .helpers import make_path + + +@dataclass +class Config(ReadsCfg): + module_base: Path + template_base: Path + + section_header = "mako_testing" + converters = {Path: make_path} + + +config = Config.from_cfg_file("./setup.cfg") diff --git a/venv/lib/python3.12/site-packages/mako/testing/exclusions.py b/venv/lib/python3.12/site-packages/mako/testing/exclusions.py new file mode 100644 index 0000000..37b2d14 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/testing/exclusions.py @@ -0,0 +1,80 @@ +import pytest + +from mako.ext.beaker_cache import has_beaker +from mako.util import update_wrapper + + +try: + import babel.messages.extract as babel +except ImportError: + babel = None + + +try: + import lingua +except ImportError: + lingua = None + + +try: + import dogpile.cache # noqa +except ImportError: + has_dogpile_cache = False +else: + has_dogpile_cache = True + + +requires_beaker = pytest.mark.skipif( + not has_beaker, reason="Beaker is required for these tests." +) + + +requires_babel = pytest.mark.skipif( + babel is None, reason="babel not installed: skipping babelplugin test" +) + + +requires_lingua = pytest.mark.skipif( + lingua is None, reason="lingua not installed: skipping linguaplugin test" +) + + +requires_dogpile_cache = pytest.mark.skipif( + not has_dogpile_cache, + reason="dogpile.cache is required to run these tests", +) + + +def _pygments_version(): + try: + import pygments + + version = pygments.__version__ + except: + version = "0" + return version + + +requires_pygments_14 = pytest.mark.skipif( + _pygments_version() < "1.4", reason="Requires pygments 1.4 or greater" +) + + +# def requires_pygments_14(fn): + +# return skip_if( +# lambda: version < "1.4", "Requires pygments 1.4 or greater" +# )(fn) + + +def requires_no_pygments_exceptions(fn): + def go(*arg, **kw): + from mako import exceptions + + exceptions._install_fallback() + try: + return fn(*arg, **kw) + finally: + exceptions._install_highlighting() + + return update_wrapper(go, fn) diff --git a/venv/lib/python3.12/site-packages/mako/testing/fixtures.py b/venv/lib/python3.12/site-packages/mako/testing/fixtures.py new file mode 100644 index 0000000..01e9961 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/testing/fixtures.py @@ -0,0 +1,119 @@ +import os + +from mako.cache import CacheImpl +from mako.cache import register_plugin +from mako.template import Template +from .assertions import eq_ +from .config import config + + +class TemplateTest: + def _file_template(self, filename, **kw): + filepath = self._file_path(filename) + return Template( + uri=filename, + filename=filepath, + module_directory=config.module_base, + **kw, + ) + + def _file_path(self, filename): + name, ext = os.path.splitext(filename) + py3k_path = os.path.join(config.template_base, name + "_py3k" + ext) + if os.path.exists(py3k_path): + return py3k_path + + return os.path.join(config.template_base, filename) + + def _do_file_test( + self, + filename, + expected, + filters=None, + unicode_=True, + template_args=None, + **kw, + ): + t1 = self._file_template(filename, **kw) + self._do_test( + t1, + expected, + filters=filters, + unicode_=unicode_, + template_args=template_args, + ) + + def _do_memory_test( + self, + source, + expected, + filters=None, + unicode_=True, + template_args=None, + **kw, + ): + t1 = Template(text=source, **kw) + self._do_test( + t1, + expected, + filters=filters, + unicode_=unicode_, + template_args=template_args, + ) + + def _do_test( + self, + template, + expected, + filters=None, + template_args=None, + unicode_=True, + ): + if template_args is None: + template_args = {} + if unicode_: + output = template.render_unicode(**template_args) + else: + output = template.render(**template_args) + + if filters: + output = filters(output) + eq_(output, expected) + + def indicates_unbound_local_error(self, rendered_output, unbound_var): + var = f"'{unbound_var}'" + error_msgs = ( + # < 3.11 + f"local variable {var} referenced before assignment", + # >= 3.11 + f"cannot access local variable {var} where it is not associated", + ) + return any((msg in rendered_output) for msg in error_msgs) + + +class PlainCacheImpl(CacheImpl): + """Simple memory cache impl so that tests which + use caching can run without beaker.""" + + def __init__(self, cache): + self.cache = cache + self.data = {} + + def get_or_create(self, key, creation_function, **kw): + if key in self.data: + return self.data[key] + else: + self.data[key] = data = creation_function(**kw) + return data + + def put(self, key, value, **kw): + self.data[key] = value + + def get(self, key, **kw): + return self.data[key] + + def invalidate(self, key, **kw): + del self.data[key] + + +register_plugin("plain", __name__, "PlainCacheImpl") diff --git a/venv/lib/python3.12/site-packages/mako/testing/helpers.py b/venv/lib/python3.12/site-packages/mako/testing/helpers.py new file mode 100644 index 0000000..5ae9d38 --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/testing/helpers.py @@ -0,0 +1,71 @@ +import contextlib +import pathlib +from pathlib import Path +import re +import time +from typing import Union +from unittest import mock + + +def flatten_result(result): + return re.sub(r"[\s\r\n]+", " ", result).strip() + + +def result_lines(result): + return [ + x.strip() + for x in re.split(r"\r?\n", re.sub(r" +", " ", result)) + if x.strip() != "" + ] + + +def result_raw_lines(result): + return [x for x in re.split(r"\r?\n", result) if x.strip() != ""] + + +def make_path( + filespec: Union[Path, str], + make_absolute: bool = True, + check_exists: bool = False, +) -> Path: + path = Path(filespec) + if make_absolute: + path = path.resolve(strict=check_exists) + if check_exists and (not path.exists()): + raise FileNotFoundError(f"No file or directory at {filespec}") + return path + + +def _unlink_path(path, missing_ok=False): + # Replicate 3.8+ functionality in 3.7 + cm = contextlib.nullcontext() + if missing_ok: + cm = contextlib.suppress(FileNotFoundError) + + with cm: + path.unlink() + + +def replace_file_with_dir(pathspec): + path = pathlib.Path(pathspec) + _unlink_path(path, missing_ok=True) + path.mkdir(exist_ok=True) + return path + + +def file_with_template_code(filespec): + with open(filespec, "w") as f: + f.write( + """ +i am an artificial template just for you +""" + ) + return filespec + + +@contextlib.contextmanager +def rewind_compile_time(hours=1): + rewound = time.time() - (hours * 3_600) + with mock.patch("mako.codegen.time") as codegen_time: + codegen_time.time.return_value = rewound + yield diff --git a/venv/lib/python3.12/site-packages/mako/util.py b/venv/lib/python3.12/site-packages/mako/util.py new file mode 100644 index 0000000..470713a --- /dev/null +++ b/venv/lib/python3.12/site-packages/mako/util.py @@ -0,0 +1,388 @@ +# mako/util.py +# Copyright 2006-2025 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from ast import parse +import codecs +import collections +import operator +import os +import re +import timeit + +from .compat import importlib_metadata_get + + +def update_wrapper(decorated, fn): + decorated.__wrapped__ = fn + decorated.__name__ = fn.__name__ + return decorated + + +class PluginLoader: + def __init__(self, group): + self.group = group + self.impls = {} + + def load(self, name): + if name in self.impls: + return self.impls[name]() + + for impl in importlib_metadata_get(self.group): + if impl.name == name: + self.impls[name] = impl.load + return impl.load() + + from mako import exceptions + + raise exceptions.RuntimeException( + "Can't load plugin %s %s" % (self.group, name) + ) + + def register(self, name, modulepath, objname): + def load(): + mod = __import__(modulepath) + for token in modulepath.split(".")[1:]: + mod = getattr(mod, token) + return getattr(mod, objname) + + self.impls[name] = load + + +def verify_directory(dir_): + """create and/or verify a filesystem directory.""" + + tries = 0 + + while not os.path.exists(dir_): + try: + tries += 1 + os.makedirs(dir_, 0o755) + except: + if tries > 5: + raise + + +def to_list(x, default=None): + if x is None: + return default + if not isinstance(x, (list, tuple)): + return [x] + else: + return x + + +class memoized_property: + + """A read-only @property that is only evaluated once.""" + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + obj.__dict__[self.__name__] = result = self.fget(obj) + return result + + +class memoized_instancemethod: + + """Decorate a method memoize its return value. + + Best applied to no-arg methods: memoization is not sensitive to + argument values, and will always return the same value even when + called with different arguments. + + """ + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + + def oneshot(*args, **kw): + result = self.fget(obj, *args, **kw) + + def memo(*a, **kw): + return result + + memo.__name__ = self.__name__ + memo.__doc__ = self.__doc__ + obj.__dict__[self.__name__] = memo + return result + + oneshot.__name__ = self.__name__ + oneshot.__doc__ = self.__doc__ + return oneshot + + +class SetLikeDict(dict): + + """a dictionary that has some setlike methods on it""" + + def union(self, other): + """produce a 'union' of this dict and another (at the key level). + + values in the second dict take precedence over that of the first""" + x = SetLikeDict(**self) + x.update(other) + return x + + +class FastEncodingBuffer: + + """a very rudimentary buffer that is faster than StringIO, + and supports unicode data.""" + + def __init__(self, encoding=None, errors="strict"): + self.data = collections.deque() + self.encoding = encoding + self.delim = "" + self.errors = errors + self.write = self.data.append + + def truncate(self): + self.data = collections.deque() + self.write = self.data.append + + def getvalue(self): + if self.encoding: + return self.delim.join(self.data).encode( + self.encoding, self.errors + ) + else: + return self.delim.join(self.data) + + +class LRUCache(dict): + + """A dictionary-like object that stores a limited number of items, + discarding lesser used items periodically. + + this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based + paradigm so that synchronization is not really needed. the size management + is inexact. + """ + + class _Item: + def __init__(self, key, value): + self.key = key + self.value = value + self.timestamp = timeit.default_timer() + + def __repr__(self): + return repr(self.value) + + def __init__(self, capacity, threshold=0.5): + self.capacity = capacity + self.threshold = threshold + + def __getitem__(self, key): + item = dict.__getitem__(self, key) + item.timestamp = timeit.default_timer() + return item.value + + def values(self): + return [i.value for i in dict.values(self)] + + def setdefault(self, key, value): + if key in self: + return self[key] + self[key] = value + return value + + def __setitem__(self, key, value): + item = dict.get(self, key) + if item is None: + item = self._Item(key, value) + dict.__setitem__(self, key, item) + else: + item.value = value + self._manage_size() + + def _manage_size(self): + while len(self) > self.capacity + self.capacity * self.threshold: + bytime = sorted( + dict.values(self), + key=operator.attrgetter("timestamp"), + reverse=True, + ) + for item in bytime[self.capacity :]: + try: + del self[item.key] + except KeyError: + # if we couldn't find a key, most likely some other thread + # broke in on us. loop around and try again + break + + +# Regexp to match python magic encoding line +_PYTHON_MAGIC_COMMENT_re = re.compile( + r"[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)", re.VERBOSE +) + + +def parse_encoding(fp): + """Deduce the encoding of a Python source file (binary mode) from magic + comment. + + It does this in the same way as the `Python interpreter`__ + + .. __: http://docs.python.org/ref/encodings.html + + The ``fp`` argument should be a seekable file object in binary mode. + """ + pos = fp.tell() + fp.seek(0) + try: + line1 = fp.readline() + has_bom = line1.startswith(codecs.BOM_UTF8) + if has_bom: + line1 = line1[len(codecs.BOM_UTF8) :] + + m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode("ascii", "ignore")) + if not m: + try: + parse(line1.decode("ascii", "ignore")) + except (ImportError, SyntaxError): + # Either it's a real syntax error, in which case the source + # is not valid python source, or line2 is a continuation of + # line1, in which case we don't want to scan line2 for a magic + # comment. + pass + else: + line2 = fp.readline() + m = _PYTHON_MAGIC_COMMENT_re.match( + line2.decode("ascii", "ignore") + ) + + if has_bom: + if m: + raise SyntaxError( + "python refuses to compile code with both a UTF8" + " byte-order-mark and a magic encoding comment" + ) + return "utf_8" + elif m: + return m.group(1) + else: + return None + finally: + fp.seek(pos) + + +def sorted_dict_repr(d): + """repr() a dictionary with the keys in order. + + Used by the lexer unit test to compare parse trees based on strings. + + """ + keys = list(d.keys()) + keys.sort() + return "{" + ", ".join("%r: %r" % (k, d[k]) for k in keys) + "}" + + +def restore__ast(_ast): + """Attempt to restore the required classes to the _ast module if it + appears to be missing them + """ + if hasattr(_ast, "AST"): + return + _ast.PyCF_ONLY_AST = 2 << 9 + m = compile( + """\ +def foo(): pass +class Bar: pass +if False: pass +baz = 'mako' +1 + 2 - 3 * 4 / 5 +6 // 7 % 8 << 9 >> 10 +11 & 12 ^ 13 | 14 +15 and 16 or 17 +-baz + (not +18) - ~17 +baz and 'foo' or 'bar' +(mako is baz == baz) is not baz != mako +mako > baz < mako >= baz <= mako +mako in baz not in mako""", + "", + "exec", + _ast.PyCF_ONLY_AST, + ) + _ast.Module = type(m) + + for cls in _ast.Module.__mro__: + if cls.__name__ == "mod": + _ast.mod = cls + elif cls.__name__ == "AST": + _ast.AST = cls + + _ast.FunctionDef = type(m.body[0]) + _ast.ClassDef = type(m.body[1]) + _ast.If = type(m.body[2]) + + _ast.Name = type(m.body[3].targets[0]) + _ast.Store = type(m.body[3].targets[0].ctx) + _ast.Str = type(m.body[3].value) + + _ast.Sub = type(m.body[4].value.op) + _ast.Add = type(m.body[4].value.left.op) + _ast.Div = type(m.body[4].value.right.op) + _ast.Mult = type(m.body[4].value.right.left.op) + + _ast.RShift = type(m.body[5].value.op) + _ast.LShift = type(m.body[5].value.left.op) + _ast.Mod = type(m.body[5].value.left.left.op) + _ast.FloorDiv = type(m.body[5].value.left.left.left.op) + + _ast.BitOr = type(m.body[6].value.op) + _ast.BitXor = type(m.body[6].value.left.op) + _ast.BitAnd = type(m.body[6].value.left.left.op) + + _ast.Or = type(m.body[7].value.op) + _ast.And = type(m.body[7].value.values[0].op) + + _ast.Invert = type(m.body[8].value.right.op) + _ast.Not = type(m.body[8].value.left.right.op) + _ast.UAdd = type(m.body[8].value.left.right.operand.op) + _ast.USub = type(m.body[8].value.left.left.op) + + _ast.Or = type(m.body[9].value.op) + _ast.And = type(m.body[9].value.values[0].op) + + _ast.IsNot = type(m.body[10].value.ops[0]) + _ast.NotEq = type(m.body[10].value.ops[1]) + _ast.Is = type(m.body[10].value.left.ops[0]) + _ast.Eq = type(m.body[10].value.left.ops[1]) + + _ast.Gt = type(m.body[11].value.ops[0]) + _ast.Lt = type(m.body[11].value.ops[1]) + _ast.GtE = type(m.body[11].value.ops[2]) + _ast.LtE = type(m.body[11].value.ops[3]) + + _ast.In = type(m.body[12].value.ops[0]) + _ast.NotIn = type(m.body[12].value.ops[1]) + + +def read_file(path, mode="rb"): + with open(path, mode) as fp: + return fp.read() + + +def read_python_file(path): + fp = open(path, "rb") + try: + encoding = parse_encoding(fp) + data = fp.read() + if encoding: + data = data.decode(encoding) + return data + finally: + fp.close() diff --git a/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/METADATA b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/METADATA new file mode 100644 index 0000000..0282933 --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/METADATA @@ -0,0 +1,74 @@ +Metadata-Version: 2.4 +Name: MarkupSafe +Version: 3.0.3 +Summary: Safely add untrusted strings to HTML/XML markup. +Maintainer-email: Pallets +License-Expression: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/page/changes/ +Project-URL: Source, https://github.com/pallets/markupsafe/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Typing :: Typed +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE.txt +Dynamic: license-file + +
    + +# MarkupSafe + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +## Examples + +```pycon +>>> from markupsafe import Markup, escape + +>>> # escape replaces special characters and wraps in Markup +>>> escape("") +Markup('<script>alert(document.cookie);</script>') + +>>> # wrap in Markup to mark text "safe" and prevent escaping +>>> Markup("Hello") +Markup('hello') + +>>> escape(Markup("Hello")) +Markup('hello') + +>>> # Markup is a str subclass +>>> # methods and operators escape their arguments +>>> template = Markup("Hello {name}") +>>> template.format(name='"World"') +Markup('Hello "World"') +``` + +## Donate + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +[please donate today][]. + +[please donate today]: https://palletsprojects.com/donate + +## Contributing + +See our [detailed contributing documentation][contrib] for many ways to +contribute, including reporting issues, requesting features, asking or answering +questions, and making PRs. + +[contrib]: https://palletsprojects.com/contributing/ diff --git a/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/RECORD b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/RECORD new file mode 100644 index 0000000..3903e1a --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/RECORD @@ -0,0 +1,14 @@ +markupsafe-3.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +markupsafe-3.0.3.dist-info/METADATA,sha256=ErTMYaf6KIz3Zn7j8hN4bZYZ21f7M_9vk0r7y1wS7IE,2690 +markupsafe-3.0.3.dist-info/RECORD,, +markupsafe-3.0.3.dist-info/WHEEL,sha256=DxRnWQz-Kp9-4a4hdDHsSv0KUC3H7sN9Nbef3-8RjXU,190 +markupsafe-3.0.3.dist-info/licenses/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +markupsafe-3.0.3.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=u1fLcNCx0P8E7LT6z3OXf6ipnSmgm6aefne5y-d7o40,13248 +markupsafe/__pycache__/__init__.cpython-312.pyc,, +markupsafe/__pycache__/_native.cpython-312.pyc,, +markupsafe/_native.py,sha256=hSLs8Jmz5aqayuengJJ3kdT5PwNpBWpKrmQSdipndC8,210 +markupsafe/_speedups.c,sha256=t3tC6oVV7-bmKUqvCO5pVSky-G8ACIXpWMaJwkNtJjg,4327 +markupsafe/_speedups.cpython-312-x86_64-linux-gnu.so,sha256=9KMd4HHFZZWQAtspvzbG-NNa55KfZHAK3uukb6T5Da8,44072 +markupsafe/_speedups.pyi,sha256=ENd1bYe7gbBUf2ywyYWOGUpnXOHNJ-cgTNqetlW8h5k,41 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/WHEEL b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/WHEEL new file mode 100644 index 0000000..f3e8a97 --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/licenses/LICENSE.txt b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/licenses/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe-3.0.3.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/venv/lib/python3.12/site-packages/markupsafe/__init__.py b/venv/lib/python3.12/site-packages/markupsafe/__init__.py new file mode 100644 index 0000000..4c395d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe/__init__.py @@ -0,0 +1,396 @@ +from __future__ import annotations + +import collections.abc as cabc +import string +import typing as t + +try: + from ._speedups import _escape_inner +except ImportError: + from ._native import _escape_inner + +if t.TYPE_CHECKING: + import typing_extensions as te + + +class _HasHTML(t.Protocol): + def __html__(self, /) -> str: ... + + +class _TPEscape(t.Protocol): + def __call__(self, s: t.Any, /) -> Markup: ... + + +def escape(s: t.Any, /) -> Markup: + """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in + the string with HTML-safe sequences. Use this if you need to display + text that might contain such characters in HTML. + + If the object has an ``__html__`` method, it is called and the + return value is assumed to already be safe for HTML. + + :param s: An object to be converted to a string and escaped. + :return: A :class:`Markup` string with the escaped text. + """ + # If the object is already a plain string, skip __html__ check and string + # conversion. This is the most common use case. + # Use type(s) instead of s.__class__ because a proxy object may be reporting + # the __class__ of the proxied value. + if type(s) is str: + return Markup(_escape_inner(s)) + + if hasattr(s, "__html__"): + return Markup(s.__html__()) + + return Markup(_escape_inner(str(s))) + + +def escape_silent(s: t.Any | None, /) -> Markup: + """Like :func:`escape` but treats ``None`` as the empty string. + Useful with optional values, as otherwise you get the string + ``'None'`` when the value is ``None``. + + >>> escape(None) + Markup('None') + >>> escape_silent(None) + Markup('') + """ + if s is None: + return Markup() + + return escape(s) + + +def soft_str(s: t.Any, /) -> str: + """Convert an object to a string if it isn't already. This preserves + a :class:`Markup` string rather than converting it back to a basic + string, so it will still be marked as safe and won't be escaped + again. + + >>> value = escape("") + >>> value + Markup('<User 1>') + >>> escape(str(value)) + Markup('&lt;User 1&gt;') + >>> escape(soft_str(value)) + Markup('<User 1>') + """ + if not isinstance(s, str): + return str(s) + + return s + + +class Markup(str): + """A string that is ready to be safely inserted into an HTML or XML + document, either because it was escaped or because it was marked + safe. + + Passing an object to the constructor converts it to text and wraps + it to mark it safe without escaping. To escape the text, use the + :meth:`escape` class method instead. + + >>> Markup("Hello, World!") + Markup('Hello, World!') + >>> Markup(42) + Markup('42') + >>> Markup.escape("Hello, World!") + Markup('Hello <em>World</em>!') + + This implements the ``__html__()`` interface that some frameworks + use. Passing an object that implements ``__html__()`` will wrap the + output of that method, marking it safe. + + >>> class Foo: + ... def __html__(self): + ... return 'foo' + ... + >>> Markup(Foo()) + Markup('foo') + + This is a subclass of :class:`str`. It has the same methods, but + escapes their arguments and returns a ``Markup`` instance. + + >>> Markup("%s") % ("foo & bar",) + Markup('foo & bar') + >>> Markup("Hello ") + "" + Markup('Hello <foo>') + """ + + __slots__ = () + + def __new__( + cls, object: t.Any = "", encoding: str | None = None, errors: str = "strict" + ) -> te.Self: + if hasattr(object, "__html__"): + object = object.__html__() + + if encoding is None: + return super().__new__(cls, object) + + return super().__new__(cls, object, encoding, errors) + + def __html__(self, /) -> te.Self: + return self + + def __add__(self, value: str | _HasHTML, /) -> te.Self: + if isinstance(value, str) or hasattr(value, "__html__"): + return self.__class__(super().__add__(self.escape(value))) + + return NotImplemented + + def __radd__(self, value: str | _HasHTML, /) -> te.Self: + if isinstance(value, str) or hasattr(value, "__html__"): + return self.escape(value).__add__(self) + + return NotImplemented + + def __mul__(self, value: t.SupportsIndex, /) -> te.Self: + return self.__class__(super().__mul__(value)) + + def __rmul__(self, value: t.SupportsIndex, /) -> te.Self: + return self.__class__(super().__mul__(value)) + + def __mod__(self, value: t.Any, /) -> te.Self: + if isinstance(value, tuple): + # a tuple of arguments, each wrapped + value = tuple(_MarkupEscapeHelper(x, self.escape) for x in value) + elif hasattr(type(value), "__getitem__") and not isinstance(value, str): + # a mapping of arguments, wrapped + value = _MarkupEscapeHelper(value, self.escape) + else: + # a single argument, wrapped with the helper and a tuple + value = (_MarkupEscapeHelper(value, self.escape),) + + return self.__class__(super().__mod__(value)) + + def __repr__(self, /) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + def join(self, iterable: cabc.Iterable[str | _HasHTML], /) -> te.Self: + return self.__class__(super().join(map(self.escape, iterable))) + + def split( # type: ignore[override] + self, /, sep: str | None = None, maxsplit: t.SupportsIndex = -1 + ) -> list[te.Self]: + return [self.__class__(v) for v in super().split(sep, maxsplit)] + + def rsplit( # type: ignore[override] + self, /, sep: str | None = None, maxsplit: t.SupportsIndex = -1 + ) -> list[te.Self]: + return [self.__class__(v) for v in super().rsplit(sep, maxsplit)] + + def splitlines( # type: ignore[override] + self, /, keepends: bool = False + ) -> list[te.Self]: + return [self.__class__(v) for v in super().splitlines(keepends)] + + def unescape(self, /) -> str: + """Convert escaped markup back into a text string. This replaces + HTML entities with the characters they represent. + + >>> Markup("Main » About").unescape() + 'Main » About' + """ + from html import unescape + + return unescape(str(self)) + + def striptags(self, /) -> str: + """:meth:`unescape` the markup, remove tags, and normalize + whitespace to single spaces. + + >>> Markup("Main »\tAbout").striptags() + 'Main » About' + """ + value = str(self) + + # Look for comments then tags separately. Otherwise, a comment that + # contains a tag would end early, leaving some of the comment behind. + + # keep finding comment start marks + while (start := value.find("", start)) == -1: + break + + value = f"{value[:start]}{value[end + 3 :]}" + + # remove tags using the same method + while (start := value.find("<")) != -1: + if (end := value.find(">", start)) == -1: + break + + value = f"{value[:start]}{value[end + 1 :]}" + + # collapse spaces + value = " ".join(value.split()) + return self.__class__(value).unescape() + + @classmethod + def escape(cls, s: t.Any, /) -> te.Self: + """Escape a string. Calls :func:`escape` and ensures that for + subclasses the correct type is returned. + """ + rv = escape(s) + + if rv.__class__ is not cls: + return cls(rv) + + return rv # type: ignore[return-value] + + def __getitem__(self, key: t.SupportsIndex | slice, /) -> te.Self: + return self.__class__(super().__getitem__(key)) + + def capitalize(self, /) -> te.Self: + return self.__class__(super().capitalize()) + + def title(self, /) -> te.Self: + return self.__class__(super().title()) + + def lower(self, /) -> te.Self: + return self.__class__(super().lower()) + + def upper(self, /) -> te.Self: + return self.__class__(super().upper()) + + def replace(self, old: str, new: str, count: t.SupportsIndex = -1, /) -> te.Self: + return self.__class__(super().replace(old, self.escape(new), count)) + + def ljust(self, width: t.SupportsIndex, fillchar: str = " ", /) -> te.Self: + return self.__class__(super().ljust(width, self.escape(fillchar))) + + def rjust(self, width: t.SupportsIndex, fillchar: str = " ", /) -> te.Self: + return self.__class__(super().rjust(width, self.escape(fillchar))) + + def lstrip(self, chars: str | None = None, /) -> te.Self: + return self.__class__(super().lstrip(chars)) + + def rstrip(self, chars: str | None = None, /) -> te.Self: + return self.__class__(super().rstrip(chars)) + + def center(self, width: t.SupportsIndex, fillchar: str = " ", /) -> te.Self: + return self.__class__(super().center(width, self.escape(fillchar))) + + def strip(self, chars: str | None = None, /) -> te.Self: + return self.__class__(super().strip(chars)) + + def translate( + self, + table: cabc.Mapping[int, str | int | None], # type: ignore[override] + /, + ) -> str: + return self.__class__(super().translate(table)) + + def expandtabs(self, /, tabsize: t.SupportsIndex = 8) -> te.Self: + return self.__class__(super().expandtabs(tabsize)) + + def swapcase(self, /) -> te.Self: + return self.__class__(super().swapcase()) + + def zfill(self, width: t.SupportsIndex, /) -> te.Self: + return self.__class__(super().zfill(width)) + + def casefold(self, /) -> te.Self: + return self.__class__(super().casefold()) + + def removeprefix(self, prefix: str, /) -> te.Self: + return self.__class__(super().removeprefix(prefix)) + + def removesuffix(self, suffix: str) -> te.Self: + return self.__class__(super().removesuffix(suffix)) + + def partition(self, sep: str, /) -> tuple[te.Self, te.Self, te.Self]: + left, sep, right = super().partition(sep) + cls = self.__class__ + return cls(left), cls(sep), cls(right) + + def rpartition(self, sep: str, /) -> tuple[te.Self, te.Self, te.Self]: + left, sep, right = super().rpartition(sep) + cls = self.__class__ + return cls(left), cls(sep), cls(right) + + def format(self, *args: t.Any, **kwargs: t.Any) -> te.Self: + formatter = EscapeFormatter(self.escape) + return self.__class__(formatter.vformat(self, args, kwargs)) + + def format_map( + self, + mapping: cabc.Mapping[str, t.Any], # type: ignore[override] + /, + ) -> te.Self: + formatter = EscapeFormatter(self.escape) + return self.__class__(formatter.vformat(self, (), mapping)) + + def __html_format__(self, format_spec: str, /) -> te.Self: + if format_spec: + raise ValueError("Unsupported format specification for Markup.") + + return self + + +class EscapeFormatter(string.Formatter): + __slots__ = ("escape",) + + def __init__(self, escape: _TPEscape) -> None: + self.escape: _TPEscape = escape + super().__init__() + + def format_field(self, value: t.Any, format_spec: str) -> str: + if hasattr(value, "__html_format__"): + rv = value.__html_format__(format_spec) + elif hasattr(value, "__html__"): + if format_spec: + raise ValueError( + f"Format specifier {format_spec} given, but {type(value)} does not" + " define __html_format__. A class that defines __html__ must define" + " __html_format__ to work with format specifiers." + ) + rv = value.__html__() + else: + # We need to make sure the format spec is str here as + # otherwise the wrong callback methods are invoked. + rv = super().format_field(value, str(format_spec)) + return str(self.escape(rv)) + + +class _MarkupEscapeHelper: + """Helper for :meth:`Markup.__mod__`.""" + + __slots__ = ("obj", "escape") + + def __init__(self, obj: t.Any, escape: _TPEscape) -> None: + self.obj: t.Any = obj + self.escape: _TPEscape = escape + + def __getitem__(self, key: t.Any, /) -> te.Self: + return self.__class__(self.obj[key], self.escape) + + def __str__(self, /) -> str: + return str(self.escape(self.obj)) + + def __repr__(self, /) -> str: + return str(self.escape(repr(self.obj))) + + def __int__(self, /) -> int: + return int(self.obj) + + def __float__(self, /) -> float: + return float(self.obj) + + +def __getattr__(name: str) -> t.Any: + if name == "__version__": + import importlib.metadata + import warnings + + warnings.warn( + "The '__version__' attribute is deprecated and will be removed in" + " MarkupSafe 3.1. Use feature detection, or" + ' `importlib.metadata.version("markupsafe")`, instead.', + DeprecationWarning, + stacklevel=2, + ) + return importlib.metadata.version("markupsafe") + + raise AttributeError(name) diff --git a/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..57afa09 Binary files /dev/null and b/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/markupsafe/__pycache__/_native.cpython-312.pyc b/venv/lib/python3.12/site-packages/markupsafe/__pycache__/_native.cpython-312.pyc new file mode 100644 index 0000000..6cf7cee Binary files /dev/null and b/venv/lib/python3.12/site-packages/markupsafe/__pycache__/_native.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/markupsafe/_native.py b/venv/lib/python3.12/site-packages/markupsafe/_native.py new file mode 100644 index 0000000..088b3bc --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe/_native.py @@ -0,0 +1,8 @@ +def _escape_inner(s: str, /) -> str: + return ( + s.replace("&", "&") + .replace(">", ">") + .replace("<", "<") + .replace("'", "'") + .replace('"', """) + ) diff --git a/venv/lib/python3.12/site-packages/markupsafe/_speedups.c b/venv/lib/python3.12/site-packages/markupsafe/_speedups.c new file mode 100644 index 0000000..8a315f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe/_speedups.c @@ -0,0 +1,200 @@ +#include + +#define GET_DELTA(inp, inp_end, delta) \ + while (inp < inp_end) { \ + switch (*inp++) { \ + case '"': \ + case '\'': \ + case '&': \ + delta += 4; \ + break; \ + case '<': \ + case '>': \ + delta += 3; \ + break; \ + } \ + } + +#define DO_ESCAPE(inp, inp_end, outp) \ + { \ + Py_ssize_t ncopy = 0; \ + while (inp < inp_end) { \ + switch (*inp) { \ + case '"': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '4'; \ + *outp++ = ';'; \ + break; \ + case '\'': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '9'; \ + *outp++ = ';'; \ + break; \ + case '&': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'a'; \ + *outp++ = 'm'; \ + *outp++ = 'p'; \ + *outp++ = ';'; \ + break; \ + case '<': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'l'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + case '>': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'g'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + default: \ + ncopy++; \ + } \ + inp++; \ + } \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + } + +static PyObject* +escape_unicode_kind1(PyUnicodeObject *in) +{ + Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in); + Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS1 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, + PyUnicode_IS_ASCII(in) ? 127 : 255); + if (!out) + return NULL; + + inp = PyUnicode_1BYTE_DATA(in); + outp = PyUnicode_1BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode_kind2(PyUnicodeObject *in) +{ + Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in); + Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS2 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535); + if (!out) + return NULL; + + inp = PyUnicode_2BYTE_DATA(in); + outp = PyUnicode_2BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + + +static PyObject* +escape_unicode_kind4(PyUnicodeObject *in) +{ + Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in); + Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS4 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111); + if (!out) + return NULL; + + inp = PyUnicode_4BYTE_DATA(in); + outp = PyUnicode_4BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode(PyObject *self, PyObject *s) +{ + if (!PyUnicode_Check(s)) + return NULL; + + // This check is no longer needed in Python 3.12. + if (PyUnicode_READY(s)) + return NULL; + + switch (PyUnicode_KIND(s)) { + case PyUnicode_1BYTE_KIND: + return escape_unicode_kind1((PyUnicodeObject*) s); + case PyUnicode_2BYTE_KIND: + return escape_unicode_kind2((PyUnicodeObject*) s); + case PyUnicode_4BYTE_KIND: + return escape_unicode_kind4((PyUnicodeObject*) s); + } + assert(0); /* shouldn't happen */ + return NULL; +} + +static PyMethodDef module_methods[] = { + {"_escape_inner", (PyCFunction)escape_unicode, METH_O, NULL}, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +static PyModuleDef_Slot module_slots[] = { +#ifdef Py_mod_multiple_interpreters // Python 3.12+ + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, +#endif +#ifdef Py_mod_gil // Python 3.13+ + {Py_mod_gil, Py_MOD_GIL_NOT_USED}, +#endif + {0, NULL} /* Sentinel */ +}; + +static struct PyModuleDef module_definition = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "markupsafe._speedups", + .m_size = 0, + .m_methods = module_methods, + .m_slots = module_slots, +}; + +PyMODINIT_FUNC +PyInit__speedups(void) +{ + return PyModuleDef_Init(&module_definition); +} diff --git a/venv/lib/python3.12/site-packages/markupsafe/_speedups.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/markupsafe/_speedups.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..6d4b8f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/markupsafe/_speedups.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/markupsafe/_speedups.pyi b/venv/lib/python3.12/site-packages/markupsafe/_speedups.pyi new file mode 100644 index 0000000..8c88858 --- /dev/null +++ b/venv/lib/python3.12/site-packages/markupsafe/_speedups.pyi @@ -0,0 +1 @@ +def _escape_inner(s: str, /) -> str: ... diff --git a/venv/lib/python3.12/site-packages/markupsafe/py.typed b/venv/lib/python3.12/site-packages/markupsafe/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/METADATA new file mode 100644 index 0000000..1ab8dd6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/METADATA @@ -0,0 +1,149 @@ +Metadata-Version: 2.4 +Name: multidict +Version: 6.7.0 +Summary: multidict implementation +Home-page: https://github.com/aio-libs/multidict +Author: Andrew Svetlov +Author-email: andrew.svetlov@gmail.com +License: Apache License 2.0 +Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org +Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org +Project-URL: CI: GitHub, https://github.com/aio-libs/multidict/actions +Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict +Project-URL: Docs: Changelog, https://multidict.aio-libs.org/en/latest/changes/ +Project-URL: Docs: RTD, https://multidict.aio-libs.org +Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/multidict +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: typing-extensions>=4.1.0; python_version < "3.11" +Dynamic: license-file + +========= +multidict +========= + +.. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg + :target: https://github.com/aio-libs/multidict/actions + :alt: GitHub status for master branch + +.. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg?flag=pytest + :target: https://codecov.io/gh/aio-libs/multidict?flags[]=pytest + :alt: Coverage metrics + +.. image:: https://img.shields.io/pypi/v/multidict.svg + :target: https://pypi.org/project/multidict + :alt: PyPI + +.. image:: https://readthedocs.org/projects/multidict/badge/?version=latest + :target: https://multidict.aio-libs.org + :alt: Read The Docs build status badge + +.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json + :target: https://codspeed.io/aio-libs/multidict + :alt: CodSpeed + +.. image:: https://img.shields.io/pypi/pyversions/multidict.svg + :target: https://pypi.org/project/multidict + :alt: Python versions + +.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs:matrix.org + :alt: Matrix Room — #aio-libs:matrix.org + +.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs-space:matrix.org + :alt: Matrix Space — #aio-libs-space:matrix.org + +Multidict is dict-like collection of *key-value pairs* where key +might occur more than once in the container. + +Introduction +------------ + +*HTTP Headers* and *URL query string* require specific data structure: +*multidict*. It behaves mostly like a regular ``dict`` but it may have +several *values* for the same *key* and *preserves insertion ordering*. + +The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries). + +``multidict`` has four multidict classes: +``MultiDict``, ``MultiDictProxy``, ``CIMultiDict`` +and ``CIMultiDictProxy``. + +Immutable proxies (``MultiDictProxy`` and +``CIMultiDictProxy``) provide a dynamic view for the +proxied multidict, the view reflects underlying collection changes. They +implement the ``collections.abc.Mapping`` interface. + +Regular mutable (``MultiDict`` and ``CIMultiDict``) classes +implement ``collections.abc.MutableMapping`` and allows them to change +their own content. + + +*Case insensitive* (``CIMultiDict`` and +``CIMultiDictProxy``) assume the *keys* are case +insensitive, e.g.:: + + >>> dct = CIMultiDict(key='val') + >>> 'Key' in dct + True + >>> dct['Key'] + 'val' + +*Keys* should be ``str`` or ``istr`` instances. + +The library has optional C Extensions for speed. + + +License +------- + +Apache 2 + +Library Installation +-------------------- + +.. code-block:: bash + + $ pip install multidict + +The library is Python 3 only! + +PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install +``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the +tarball will be used to compile the library from source. It requires a C compiler and +Python headers to be installed. + +To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable, +e.g.: + +.. code-block:: bash + + $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict + +Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on +the usage scenario!!! + +For extension development, set the ``MULTIDICT_DEBUG_BUILD`` environment variable to compile +the extensions in debug mode: + +.. code-block:: console + + $ MULTIDICT_DEBUG_BUILD=1 pip install multidict + +Changelog +--------- +See `RTD page `_. diff --git a/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/RECORD new file mode 100644 index 0000000..4eb4cf8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/RECORD @@ -0,0 +1,16 @@ +multidict-6.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +multidict-6.7.0.dist-info/METADATA,sha256=q_zO5zLDpLVGLHpjw8BHBRhNX7yuB6_RlehDOZPYrZQ,5321 +multidict-6.7.0.dist-info/RECORD,, +multidict-6.7.0.dist-info/WHEEL,sha256=DxRnWQz-Kp9-4a4hdDHsSv0KUC3H7sN9Nbef3-8RjXU,190 +multidict-6.7.0.dist-info/licenses/LICENSE,sha256=k9Ealo4vDzY3PECBH_bSDhc_WMPKtYhM1mF7v9eVSSo,611 +multidict-6.7.0.dist-info/top_level.txt,sha256=-euDElkk5_qkmfIJ7WiqCab02ZlSFZWynejKg59qZQQ,10 +multidict/__init__.py,sha256=vrqM7ruZH18zqUQumAaWtGekJFYb_oWvThnAdNuAxg4,1228 +multidict/__pycache__/__init__.cpython-312.pyc,, +multidict/__pycache__/_abc.cpython-312.pyc,, +multidict/__pycache__/_compat.cpython-312.pyc,, +multidict/__pycache__/_multidict_py.cpython-312.pyc,, +multidict/_abc.py,sha256=e_0JDJi7E6LWS0A3gUJ17SkgDLlmg8ffjfylTu_vboc,2402 +multidict/_compat.py,sha256=TcRjCStk2iIY1_DwDNj8kNpJRQ9rtLj92Xvk1z2G_ak,422 +multidict/_multidict.cpython-312-x86_64-linux-gnu.so,sha256=xnz-7eI31DiuDjqpB2WfX4zbC-ht1BwpL7gTvIUrrXo,848984 +multidict/_multidict_py.py,sha256=VGQ58P7VOd6lRf3WVAinb62aD16DPdAWRt68qmiJMXE,39955 +multidict/py.typed,sha256=e9bmbH3UFxsabQrnNFPG9qxIXztwbcM6IKDYnvZwprY,15 diff --git a/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/WHEEL new file mode 100644 index 0000000..f3e8a97 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/WHEEL @@ -0,0 +1,7 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp312-cp312-manylinux_2_17_x86_64 +Tag: cp312-cp312-manylinux2014_x86_64 +Tag: cp312-cp312-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..8727172 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/licenses/LICENSE @@ -0,0 +1,13 @@ + Copyright 2016 Andrew Svetlov and aio-libs contributors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/top_level.txt new file mode 100644 index 0000000..afcecdf --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict-6.7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +multidict diff --git a/venv/lib/python3.12/site-packages/multidict/__init__.py b/venv/lib/python3.12/site-packages/multidict/__init__.py new file mode 100644 index 0000000..a688932 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict/__init__.py @@ -0,0 +1,60 @@ +""" +Multidict implementation. + +HTTP Headers and URL query string require specific data structure: +multidict. It behaves mostly like a dict but it can have +several values for the same key. +""" + +from typing import TYPE_CHECKING + +from ._abc import MultiMapping, MutableMultiMapping +from ._compat import USE_EXTENSIONS + +__all__ = ( + "CIMultiDict", + "CIMultiDictProxy", + "MultiDict", + "MultiDictProxy", + "MultiMapping", + "MutableMultiMapping", + "getversion", + "istr", + "upstr", +) + +__version__ = "6.7.0" + + +if TYPE_CHECKING or not USE_EXTENSIONS: + from ._multidict_py import ( + CIMultiDict, + CIMultiDictProxy, + MultiDict, + MultiDictProxy, + getversion, + istr, + ) +else: + from collections.abc import ItemsView, KeysView, ValuesView + + from ._multidict import ( + CIMultiDict, + CIMultiDictProxy, + MultiDict, + MultiDictProxy, + _ItemsView, + _KeysView, + _ValuesView, + getversion, + istr, + ) + + MultiMapping.register(MultiDictProxy) + MutableMultiMapping.register(MultiDict) + KeysView.register(_KeysView) + ItemsView.register(_ItemsView) + ValuesView.register(_ValuesView) + + +upstr = istr diff --git a/venv/lib/python3.12/site-packages/multidict/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/multidict/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6afe6c6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/multidict/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multidict/__pycache__/_abc.cpython-312.pyc b/venv/lib/python3.12/site-packages/multidict/__pycache__/_abc.cpython-312.pyc new file mode 100644 index 0000000..efdb67d Binary files /dev/null and b/venv/lib/python3.12/site-packages/multidict/__pycache__/_abc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multidict/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/multidict/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..23499a6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/multidict/__pycache__/_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multidict/__pycache__/_multidict_py.cpython-312.pyc b/venv/lib/python3.12/site-packages/multidict/__pycache__/_multidict_py.cpython-312.pyc new file mode 100644 index 0000000..3910d7e Binary files /dev/null and b/venv/lib/python3.12/site-packages/multidict/__pycache__/_multidict_py.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multidict/_abc.py b/venv/lib/python3.12/site-packages/multidict/_abc.py new file mode 100644 index 0000000..54253e9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict/_abc.py @@ -0,0 +1,73 @@ +import abc +from collections.abc import Iterable, Mapping, MutableMapping +from typing import TYPE_CHECKING, Protocol, TypeVar, Union, overload + +if TYPE_CHECKING: + from ._multidict_py import istr +else: + istr = str + +_V = TypeVar("_V") +_V_co = TypeVar("_V_co", covariant=True) +_T = TypeVar("_T") + + +class SupportsKeys(Protocol[_V_co]): + def keys(self) -> Iterable[str]: ... + def __getitem__(self, key: str, /) -> _V_co: ... + + +class SupportsIKeys(Protocol[_V_co]): + def keys(self) -> Iterable[istr]: ... + def __getitem__(self, key: istr, /) -> _V_co: ... + + +MDArg = Union[SupportsKeys[_V], SupportsIKeys[_V], Iterable[tuple[str, _V]], None] + + +class MultiMapping(Mapping[str, _V_co]): + @overload + def getall(self, key: str) -> list[_V_co]: ... + @overload + def getall(self, key: str, default: _T) -> Union[list[_V_co], _T]: ... + @abc.abstractmethod + def getall(self, key: str, default: _T = ...) -> Union[list[_V_co], _T]: + """Return all values for key.""" + + @overload + def getone(self, key: str) -> _V_co: ... + @overload + def getone(self, key: str, default: _T) -> Union[_V_co, _T]: ... + @abc.abstractmethod + def getone(self, key: str, default: _T = ...) -> Union[_V_co, _T]: + """Return first value for key.""" + + +class MutableMultiMapping(MultiMapping[_V], MutableMapping[str, _V]): + @abc.abstractmethod + def add(self, key: str, value: _V) -> None: + """Add value to list.""" + + @abc.abstractmethod + def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: + """Add everything from arg and kwargs to the mapping.""" + + @abc.abstractmethod + def merge(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: + """Merge into the mapping, adding non-existing keys.""" + + @overload + def popone(self, key: str) -> _V: ... + @overload + def popone(self, key: str, default: _T) -> Union[_V, _T]: ... + @abc.abstractmethod + def popone(self, key: str, default: _T = ...) -> Union[_V, _T]: + """Remove specified key and return the corresponding value.""" + + @overload + def popall(self, key: str) -> list[_V]: ... + @overload + def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ... + @abc.abstractmethod + def popall(self, key: str, default: _T = ...) -> Union[list[_V], _T]: + """Remove all occurrences of key and return the list of corresponding values.""" diff --git a/venv/lib/python3.12/site-packages/multidict/_compat.py b/venv/lib/python3.12/site-packages/multidict/_compat.py new file mode 100644 index 0000000..264d327 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict/_compat.py @@ -0,0 +1,15 @@ +import os +import platform + +NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS")) + +PYPY = platform.python_implementation() == "PyPy" + +USE_EXTENSIONS = not NO_EXTENSIONS and not PYPY + +if USE_EXTENSIONS: + try: + from . import _multidict # type: ignore[attr-defined] # noqa: F401 + except ImportError: # pragma: no cover + # FIXME: Refactor for coverage. See #837. + USE_EXTENSIONS = False diff --git a/venv/lib/python3.12/site-packages/multidict/_multidict.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/multidict/_multidict.cpython-312-x86_64-linux-gnu.so new file mode 100644 index 0000000..2c5eec4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/multidict/_multidict.cpython-312-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.12/site-packages/multidict/_multidict_py.py b/venv/lib/python3.12/site-packages/multidict/_multidict_py.py new file mode 100644 index 0000000..6b68d52 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict/_multidict_py.py @@ -0,0 +1,1242 @@ +import enum +import functools +import reprlib +import sys +from array import array +from collections.abc import ( + ItemsView, + Iterable, + Iterator, + KeysView, + Mapping, + ValuesView, +) +from dataclasses import dataclass +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Generic, + NoReturn, + Optional, + TypeVar, + Union, + cast, + overload, +) + +from ._abc import MDArg, MultiMapping, MutableMultiMapping, SupportsKeys + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + + +class istr(str): + """Case insensitive str.""" + + __is_istr__ = True + __istr_identity__: Optional[str] = None + + +_V = TypeVar("_V") +_T = TypeVar("_T") + +_SENTINEL = enum.Enum("_SENTINEL", "sentinel") +sentinel = _SENTINEL.sentinel + +_version = array("Q", [0]) + + +class _Iter(Generic[_T]): + __slots__ = ("_size", "_iter") + + def __init__(self, size: int, iterator: Iterator[_T]): + self._size = size + self._iter = iterator + + def __iter__(self) -> Self: + return self + + def __next__(self) -> _T: + return next(self._iter) + + def __length_hint__(self) -> int: + return self._size + + +class _ViewBase(Generic[_V]): + def __init__( + self, + md: "MultiDict[_V]", + ): + self._md = md + + def __len__(self) -> int: + return len(self._md) + + +class _ItemsView(_ViewBase[_V], ItemsView[str, _V]): + def __contains__(self, item: object) -> bool: + if not isinstance(item, (tuple, list)) or len(item) != 2: + return False + key, value = item + try: + identity = self._md._identity(key) + except TypeError: + return False + hash_ = hash(identity) + for slot, idx, e in self._md._keys.iter_hash(hash_): + if e.identity == identity and value == e.value: + return True + return False + + def __iter__(self) -> _Iter[tuple[str, _V]]: + return _Iter(len(self), self._iter(self._md._version)) + + def _iter(self, version: int) -> Iterator[tuple[str, _V]]: + for e in self._md._keys.iter_entries(): + if version != self._md._version: + raise RuntimeError("Dictionary changed during iteration") + yield self._md._key(e.key), e.value + + @reprlib.recursive_repr() + def __repr__(self) -> str: + lst = [] + for e in self._md._keys.iter_entries(): + lst.append(f"'{e.key}': {e.value!r}") + body = ", ".join(lst) + return f"<{self.__class__.__name__}({body})>" + + def _parse_item( + self, arg: Union[tuple[str, _V], _T] + ) -> Optional[tuple[int, str, str, _V]]: + if not isinstance(arg, tuple): + return None + if len(arg) != 2: + return None + try: + identity = self._md._identity(arg[0]) + return (hash(identity), identity, arg[0], arg[1]) + except TypeError: + return None + + def _tmp_set(self, it: Iterable[_T]) -> set[tuple[str, _V]]: + tmp = set() + for arg in it: + item = self._parse_item(arg) + if item is None: + continue + else: + tmp.add((item[1], item[3])) + return tmp + + def __and__(self, other: Iterable[Any]) -> set[tuple[str, _V]]: + ret = set() + try: + it = iter(other) + except TypeError: + return NotImplemented + for arg in it: + item = self._parse_item(arg) + if item is None: + continue + hash_, identity, key, value = item + for slot, idx, e in self._md._keys.iter_hash(hash_): + e.hash = -1 + if e.identity == identity and e.value == value: + ret.add((e.key, e.value)) + self._md._keys.restore_hash(hash_) + return ret + + def __rand__(self, other: Iterable[_T]) -> set[_T]: + ret = set() + try: + it = iter(other) + except TypeError: + return NotImplemented + for arg in it: + item = self._parse_item(arg) + if item is None: + continue + hash_, identity, key, value = item + for slot, idx, e in self._md._keys.iter_hash(hash_): + if e.identity == identity and e.value == value: + ret.add(arg) + break + return ret + + def __or__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]: + ret: set[Union[tuple[str, _V], _T]] = set(self) + try: + it = iter(other) + except TypeError: + return NotImplemented + for arg in it: + item: Optional[tuple[int, str, str, _V]] = self._parse_item(arg) + if item is None: + ret.add(arg) + continue + hash_, identity, key, value = item + for slot, idx, e in self._md._keys.iter_hash(hash_): + if e.identity == identity and e.value == value: # pragma: no branch + break + else: + ret.add(arg) + return ret + + def __ror__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]: + try: + ret: set[Union[tuple[str, _V], _T]] = set(other) + except TypeError: + return NotImplemented + tmp = self._tmp_set(ret) + + for e in self._md._keys.iter_entries(): + if (e.identity, e.value) not in tmp: + ret.add((e.key, e.value)) + return ret + + def __sub__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]: + ret: set[Union[tuple[str, _V], _T]] = set() + try: + it = iter(other) + except TypeError: + return NotImplemented + tmp = self._tmp_set(it) + + for e in self._md._keys.iter_entries(): + if (e.identity, e.value) not in tmp: + ret.add((e.key, e.value)) + + return ret + + def __rsub__(self, other: Iterable[_T]) -> set[_T]: + ret: set[_T] = set() + try: + it = iter(other) + except TypeError: + return NotImplemented + for arg in it: + item = self._parse_item(arg) + if item is None: + ret.add(arg) + continue + + hash_, identity, key, value = item + for slot, idx, e in self._md._keys.iter_hash(hash_): + if e.identity == identity and e.value == value: # pragma: no branch + break + else: + ret.add(arg) + return ret + + def __xor__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]: + try: + rgt = set(other) + except TypeError: + return NotImplemented + ret: set[Union[tuple[str, _V], _T]] = self - rgt + ret |= rgt - self + return ret + + __rxor__ = __xor__ + + def isdisjoint(self, other: Iterable[tuple[str, _V]]) -> bool: + for arg in other: + item = self._parse_item(arg) + if item is None: + continue + + hash_, identity, key, value = item + for slot, idx, e in self._md._keys.iter_hash(hash_): + if e.identity == identity and e.value == value: # pragma: no branch + return False + return True + + +class _ValuesView(_ViewBase[_V], ValuesView[_V]): + def __contains__(self, value: object) -> bool: + for e in self._md._keys.iter_entries(): + if e.value == value: + return True + return False + + def __iter__(self) -> _Iter[_V]: + return _Iter(len(self), self._iter(self._md._version)) + + def _iter(self, version: int) -> Iterator[_V]: + for e in self._md._keys.iter_entries(): + if version != self._md._version: + raise RuntimeError("Dictionary changed during iteration") + yield e.value + + @reprlib.recursive_repr() + def __repr__(self) -> str: + lst = [] + for e in self._md._keys.iter_entries(): + lst.append(repr(e.value)) + body = ", ".join(lst) + return f"<{self.__class__.__name__}({body})>" + + +class _KeysView(_ViewBase[_V], KeysView[str]): + def __contains__(self, key: object) -> bool: + if not isinstance(key, str): + return False + identity = self._md._identity(key) + hash_ = hash(identity) + for slot, idx, e in self._md._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + return True + return False + + def __iter__(self) -> _Iter[str]: + return _Iter(len(self), self._iter(self._md._version)) + + def _iter(self, version: int) -> Iterator[str]: + for e in self._md._keys.iter_entries(): + if version != self._md._version: + raise RuntimeError("Dictionary changed during iteration") + yield self._md._key(e.key) + + def __repr__(self) -> str: + lst = [] + for e in self._md._keys.iter_entries(): + lst.append(f"'{e.key}'") + body = ", ".join(lst) + return f"<{self.__class__.__name__}({body})>" + + def __and__(self, other: Iterable[object]) -> set[str]: + ret = set() + try: + it = iter(other) + except TypeError: + return NotImplemented + for key in it: + if not isinstance(key, str): + continue + identity = self._md._identity(key) + hash_ = hash(identity) + for slot, idx, e in self._md._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + ret.add(e.key) + break + return ret + + def __rand__(self, other: Iterable[_T]) -> set[_T]: + ret = set() + try: + it = iter(other) + except TypeError: + return NotImplemented + for key in it: + if not isinstance(key, str): + continue + if key in self._md: + ret.add(key) + return cast(set[_T], ret) + + def __or__(self, other: Iterable[_T]) -> set[Union[str, _T]]: + ret: set[Union[str, _T]] = set(self) + try: + it = iter(other) + except TypeError: + return NotImplemented + for key in it: + if not isinstance(key, str): + ret.add(key) + continue + if key not in self._md: + ret.add(key) + return ret + + def __ror__(self, other: Iterable[_T]) -> set[Union[str, _T]]: + try: + ret: set[Union[str, _T]] = set(other) + except TypeError: + return NotImplemented + + tmp = set() + for key in ret: + if not isinstance(key, str): + continue + identity = self._md._identity(key) + tmp.add(identity) + + for e in self._md._keys.iter_entries(): + if e.identity not in tmp: + ret.add(e.key) + return ret + + def __sub__(self, other: Iterable[object]) -> set[str]: + ret = set(self) + try: + it = iter(other) + except TypeError: + return NotImplemented + for key in it: + if not isinstance(key, str): + continue + identity = self._md._identity(key) + hash_ = hash(identity) + for slot, idx, e in self._md._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + ret.discard(e.key) + break + return ret + + def __rsub__(self, other: Iterable[_T]) -> set[_T]: + try: + ret: set[_T] = set(other) + except TypeError: + return NotImplemented + for key in other: + if not isinstance(key, str): + continue + if key in self._md: + ret.discard(key) # type: ignore[arg-type] + return ret + + def __xor__(self, other: Iterable[_T]) -> set[Union[str, _T]]: + try: + rgt = set(other) + except TypeError: + return NotImplemented + ret: set[Union[str, _T]] = self - rgt # type: ignore[assignment] + ret |= rgt - self + return ret + + __rxor__ = __xor__ + + def isdisjoint(self, other: Iterable[object]) -> bool: + for key in other: + if not isinstance(key, str): + continue + if key in self._md: + return False + return True + + +class _CSMixin: + _ci: ClassVar[bool] = False + + def _key(self, key: str) -> str: + return key + + def _identity(self, key: str) -> str: + if isinstance(key, str): + return key + else: + raise TypeError("MultiDict keys should be either str or subclasses of str") + + +class _CIMixin: + _ci: ClassVar[bool] = True + + def _key(self, key: str) -> str: + if type(key) is istr: + return key + else: + return istr(key) + + def _identity(self, key: str) -> str: + if isinstance(key, istr): + ret = key.__istr_identity__ + if ret is None: + ret = key.lower() + key.__istr_identity__ = ret + return ret + if isinstance(key, str): + return key.lower() + else: + raise TypeError("MultiDict keys should be either str or subclasses of str") + + +def estimate_log2_keysize(n: int) -> int: + # 7 == HT_MINSIZE - 1 + return (((n * 3 + 1) // 2) | 7).bit_length() + + +@dataclass +class _Entry(Generic[_V]): + hash: int + identity: str + key: str + value: _V + + +@dataclass +class _HtKeys(Generic[_V]): # type: ignore[misc] + LOG_MINSIZE: ClassVar[int] = 3 + MINSIZE: ClassVar[int] = 8 + PREALLOCATED_INDICES: ClassVar[dict[int, array]] = { # type: ignore[type-arg] + log2_size: array( + "b" if log2_size < 8 else "h", (-1 for i in range(1 << log2_size)) + ) + for log2_size in range(3, 10) + } + + log2_size: int + usable: int + + indices: array # type: ignore[type-arg] # in py3.9 array is not generic + entries: list[Optional[_Entry[_V]]] + + @functools.cached_property + def nslots(self) -> int: + return 1 << self.log2_size + + @functools.cached_property + def mask(self) -> int: + return self.nslots - 1 + + if sys.implementation.name != "pypy": + + def __sizeof__(self) -> int: + return ( + object.__sizeof__(self) + + sys.getsizeof(self.indices) + + sys.getsizeof(self.entries) + ) + + @classmethod + def new(cls, log2_size: int, entries: list[Optional[_Entry[_V]]]) -> Self: + size = 1 << log2_size + usable = (size << 1) // 3 + if log2_size < 10: + indices = cls.PREALLOCATED_INDICES[log2_size].__copy__() + elif log2_size < 16: + indices = array("h", (-1 for i in range(size))) + elif log2_size < 32: + indices = array("l", (-1 for i in range(size))) + else: # pragma: no cover # don't test huge multidicts + indices = array("q", (-1 for i in range(size))) + ret = cls( + log2_size=log2_size, + usable=usable, + indices=indices, + entries=entries, + ) + return ret + + def clone(self) -> "_HtKeys[_V]": + entries = [ + _Entry(e.hash, e.identity, e.key, e.value) if e is not None else None + for e in self.entries + ] + + return _HtKeys( + log2_size=self.log2_size, + usable=self.usable, + indices=self.indices.__copy__(), + entries=entries, + ) + + def build_indices(self, update: bool) -> None: + mask = self.mask + indices = self.indices + for idx, e in enumerate(self.entries): + assert e is not None + hash_ = e.hash + if update: + if hash_ == -1: + hash_ = hash(e.identity) + else: + assert hash_ != -1 + i = hash_ & mask + perturb = hash_ & sys.maxsize + while indices[i] != -1: + perturb >>= 5 + i = mask & (i * 5 + perturb + 1) + indices[i] = idx + + def find_empty_slot(self, hash_: int) -> int: + mask = self.mask + indices = self.indices + i = hash_ & mask + perturb = hash_ & sys.maxsize + ix = indices[i] + while ix != -1: + perturb >>= 5 + i = (i * 5 + perturb + 1) & mask + ix = indices[i] + return i + + def iter_hash(self, hash_: int) -> Iterator[tuple[int, int, _Entry[_V]]]: + mask = self.mask + indices = self.indices + entries = self.entries + i = hash_ & mask + perturb = hash_ & sys.maxsize + ix = indices[i] + while ix != -1: + if ix != -2: + e = entries[ix] + if e.hash == hash_: + yield i, ix, e + perturb >>= 5 + i = (i * 5 + perturb + 1) & mask + ix = indices[i] + + def del_idx(self, hash_: int, idx: int) -> None: + mask = self.mask + indices = self.indices + i = hash_ & mask + perturb = hash_ & sys.maxsize + ix = indices[i] + while ix != idx: + perturb >>= 5 + i = (i * 5 + perturb + 1) & mask + ix = indices[i] + indices[i] = -2 + + def iter_entries(self) -> Iterator[_Entry[_V]]: + return filter(None, self.entries) + + def restore_hash(self, hash_: int) -> None: + mask = self.mask + indices = self.indices + entries = self.entries + i = hash_ & mask + perturb = hash_ & sys.maxsize + ix = indices[i] + while ix != -1: + if ix != -2: + entry = entries[ix] + if entry.hash == -1: + entry.hash = hash_ + perturb >>= 5 + i = (i * 5 + perturb + 1) & mask + ix = indices[i] + + +class MultiDict(_CSMixin, MutableMultiMapping[_V]): + """Dictionary with the support for duplicate keys.""" + + __slots__ = ("_keys", "_used", "_version") + + def __init__(self, arg: MDArg[_V] = None, /, **kwargs: _V): + self._used = 0 + v = _version + v[0] += 1 + self._version = v[0] + if not kwargs: + md = None + if isinstance(arg, MultiDictProxy): + md = arg._md + elif isinstance(arg, MultiDict): + md = arg + if md is not None and md._ci is self._ci: + self._from_md(md) + return + + it = self._parse_args(arg, kwargs) + log2_size = estimate_log2_keysize(cast(int, next(it))) + if log2_size > 17: # pragma: no cover + # Don't overallocate really huge keys space in init + log2_size = 17 + self._keys: _HtKeys[_V] = _HtKeys.new(log2_size, []) + self._extend_items(cast(Iterator[_Entry[_V]], it)) + + def _from_md(self, md: "MultiDict[_V]") -> None: + # Copy everything as-is without compacting the new multidict, + # otherwise it requires reindexing + self._keys = md._keys.clone() + self._used = md._used + + @overload + def getall(self, key: str) -> list[_V]: ... + @overload + def getall(self, key: str, default: _T) -> Union[list[_V], _T]: ... + def getall( + self, key: str, default: Union[_T, _SENTINEL] = sentinel + ) -> Union[list[_V], _T]: + """Return a list of all values matching the key.""" + identity = self._identity(key) + hash_ = hash(identity) + res = [] + restore = [] + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + res.append(e.value) + e.hash = -1 + restore.append(idx) + + if res: + entries = self._keys.entries + for idx in restore: + entries[idx].hash = hash_ # type: ignore[union-attr] + return res + if not res and default is not sentinel: + return default + raise KeyError("Key not found: %r" % key) + + @overload + def getone(self, key: str) -> _V: ... + @overload + def getone(self, key: str, default: _T) -> Union[_V, _T]: ... + def getone( + self, key: str, default: Union[_T, _SENTINEL] = sentinel + ) -> Union[_V, _T]: + """Get first value matching the key. + + Raises KeyError if the key is not found and no default is provided. + """ + identity = self._identity(key) + hash_ = hash(identity) + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + return e.value + if default is not sentinel: + return default + raise KeyError("Key not found: %r" % key) + + # Mapping interface # + + def __getitem__(self, key: str) -> _V: + return self.getone(key) + + @overload + def get(self, key: str, /) -> Union[_V, None]: ... + @overload + def get(self, key: str, /, default: _T) -> Union[_V, _T]: ... + def get(self, key: str, default: Union[_T, None] = None) -> Union[_V, _T, None]: + """Get first value matching the key. + + If the key is not found, returns the default (or None if no default is provided) + """ + return self.getone(key, default) + + def __iter__(self) -> Iterator[str]: + return iter(self.keys()) + + def __len__(self) -> int: + return self._used + + def keys(self) -> KeysView[str]: + """Return a new view of the dictionary's keys.""" + return _KeysView(self) + + def items(self) -> ItemsView[str, _V]: + """Return a new view of the dictionary's items *(key, value) pairs).""" + return _ItemsView(self) + + def values(self) -> _ValuesView[_V]: + """Return a new view of the dictionary's values.""" + return _ValuesView(self) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Mapping): + return NotImplemented + if isinstance(other, MultiDictProxy): + return self == other._md + if isinstance(other, MultiDict): + lft = self._keys + rht = other._keys + if self._used != other._used: + return False + for e1, e2 in zip(lft.iter_entries(), rht.iter_entries()): + if e1.identity != e2.identity or e1.value != e2.value: + return False + return True + if self._used != len(other): + return False + for k, v in self.items(): + nv = other.get(k, sentinel) + if v != nv: + return False + return True + + def __contains__(self, key: object) -> bool: + if not isinstance(key, str): + return False + identity = self._identity(key) + hash_ = hash(identity) + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + return True + return False + + @reprlib.recursive_repr() + def __repr__(self) -> str: + body = ", ".join(f"'{e.key}': {e.value!r}" for e in self._keys.iter_entries()) + return f"<{self.__class__.__name__}({body})>" + + if sys.implementation.name != "pypy": + + def __sizeof__(self) -> int: + return object.__sizeof__(self) + sys.getsizeof(self._keys) + + def __reduce__(self) -> tuple[type[Self], tuple[list[tuple[str, _V]]]]: + return (self.__class__, (list(self.items()),)) + + def add(self, key: str, value: _V) -> None: + identity = self._identity(key) + hash_ = hash(identity) + self._add_with_hash(_Entry(hash_, identity, key, value)) + self._incr_version() + + def copy(self) -> Self: + """Return a copy of itself.""" + cls = self.__class__ + return cls(self) + + __copy__ = copy + + def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: + """Extend current MultiDict with more values. + + This method must be used instead of update. + """ + it = self._parse_args(arg, kwargs) + newsize = self._used + cast(int, next(it)) + self._resize(estimate_log2_keysize(newsize), False) + self._extend_items(cast(Iterator[_Entry[_V]], it)) + + def _parse_args( + self, + arg: MDArg[_V], + kwargs: Mapping[str, _V], + ) -> Iterator[Union[int, _Entry[_V]]]: + identity_func = self._identity + if arg: + if isinstance(arg, MultiDictProxy): + arg = arg._md + if isinstance(arg, MultiDict): + yield len(arg) + len(kwargs) + if self._ci is not arg._ci: + for e in arg._keys.iter_entries(): + identity = identity_func(e.key) + yield _Entry(hash(identity), identity, e.key, e.value) + else: + for e in arg._keys.iter_entries(): + yield _Entry(e.hash, e.identity, e.key, e.value) + if kwargs: + for key, value in kwargs.items(): + identity = identity_func(key) + yield _Entry(hash(identity), identity, key, value) + else: + if hasattr(arg, "keys"): + arg = cast(SupportsKeys[_V], arg) + arg = [(k, arg[k]) for k in arg.keys()] + if kwargs: + arg = list(arg) + arg.extend(list(kwargs.items())) + try: + yield len(arg) + len(kwargs) # type: ignore[arg-type] + except TypeError: + yield 0 + for pos, item in enumerate(arg): + if not len(item) == 2: + raise ValueError( + f"multidict update sequence element #{pos}" + f"has length {len(item)}; 2 is required" + ) + identity = identity_func(item[0]) + yield _Entry(hash(identity), identity, item[0], item[1]) + else: + yield len(kwargs) + for key, value in kwargs.items(): + identity = identity_func(key) + yield _Entry(hash(identity), identity, key, value) + + def _extend_items(self, items: Iterable[_Entry[_V]]) -> None: + for e in items: + self._add_with_hash(e) + self._incr_version() + + def clear(self) -> None: + """Remove all items from MultiDict.""" + self._used = 0 + self._keys = _HtKeys.new(_HtKeys.LOG_MINSIZE, []) + self._incr_version() + + # Mapping interface # + + def __setitem__(self, key: str, value: _V) -> None: + identity = self._identity(key) + hash_ = hash(identity) + found = False + + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + if not found: + e.key = key + e.value = value + e.hash = -1 + found = True + self._incr_version() + elif e.hash != -1: # pragma: no branch + self._del_at(slot, idx) + + if not found: + self._add_with_hash(_Entry(hash_, identity, key, value)) + else: + self._keys.restore_hash(hash_) + + def __delitem__(self, key: str) -> None: + found = False + identity = self._identity(key) + hash_ = hash(identity) + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + self._del_at(slot, idx) + found = True + if not found: + raise KeyError(key) + else: + self._incr_version() + + @overload + def setdefault( + self: "MultiDict[Union[_T, None]]", key: str, default: None = None + ) -> Union[_T, None]: ... + @overload + def setdefault(self, key: str, default: _V) -> _V: ... + def setdefault(self, key: str, default: Union[_V, None] = None) -> Union[_V, None]: # type: ignore[misc] + """Return value for key, set value to default if key is not present.""" + identity = self._identity(key) + hash_ = hash(identity) + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + return e.value + self.add(key, default) # type: ignore[arg-type] + return default + + @overload + def popone(self, key: str) -> _V: ... + @overload + def popone(self, key: str, default: _T) -> Union[_V, _T]: ... + def popone( + self, key: str, default: Union[_T, _SENTINEL] = sentinel + ) -> Union[_V, _T]: + """Remove specified key and return the corresponding value. + + If key is not found, d is returned if given, otherwise + KeyError is raised. + + """ + identity = self._identity(key) + hash_ = hash(identity) + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + value = e.value + self._del_at(slot, idx) + self._incr_version() + return value + if default is sentinel: + raise KeyError(key) + else: + return default + + # Type checking will inherit signature for pop() if we don't confuse it here. + if not TYPE_CHECKING: + pop = popone + + @overload + def popall(self, key: str) -> list[_V]: ... + @overload + def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ... + def popall( + self, key: str, default: Union[_T, _SENTINEL] = sentinel + ) -> Union[list[_V], _T]: + """Remove all occurrences of key and return the list of corresponding + values. + + If key is not found, default is returned if given, otherwise + KeyError is raised. + + """ + found = False + identity = self._identity(key) + hash_ = hash(identity) + ret = [] + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + found = True + ret.append(e.value) + self._del_at(slot, idx) + self._incr_version() + + if not found: + if default is sentinel: + raise KeyError(key) + else: + return default + else: + return ret + + def popitem(self) -> tuple[str, _V]: + """Remove and return an arbitrary (key, value) pair.""" + if self._used <= 0: + raise KeyError("empty multidict") + + pos = len(self._keys.entries) - 1 + entry = self._keys.entries.pop() + + while entry is None: + pos -= 1 + entry = self._keys.entries.pop() + + ret = self._key(entry.key), entry.value + self._keys.del_idx(entry.hash, pos) + self._used -= 1 + self._incr_version() + return ret + + def update(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: + """Update the dictionary, overwriting existing keys.""" + it = self._parse_args(arg, kwargs) + newsize = self._used + cast(int, next(it)) + log2_size = estimate_log2_keysize(newsize) + if log2_size > 17: # pragma: no cover + # Don't overallocate really huge keys space in update, + # duplicate keys could reduce the resulting anount of entries + log2_size = 17 + if log2_size > self._keys.log2_size: + self._resize(log2_size, False) + try: + self._update_items(cast(Iterator[_Entry[_V]], it)) + finally: + self._post_update() + + def _update_items(self, items: Iterator[_Entry[_V]]) -> None: + for entry in items: + found = False + hash_ = entry.hash + identity = entry.identity + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + if not found: + found = True + e.key = entry.key + e.value = entry.value + e.hash = -1 + else: + self._del_at_for_upd(e) + if not found: + self._add_with_hash_for_upd(entry) + + def _post_update(self) -> None: + keys = self._keys + indices = keys.indices + entries = keys.entries + for slot in range(keys.nslots): + idx = indices[slot] + if idx >= 0: + e2 = entries[idx] + assert e2 is not None + if e2.key is None: + entries[idx] = None + indices[slot] = -2 + self._used -= 1 + if e2.hash == -1: + e2.hash = hash(e2.identity) + + self._incr_version() + + def merge(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None: + """Merge into the dictionary, adding non-existing keys.""" + it = self._parse_args(arg, kwargs) + newsize = self._used + cast(int, next(it)) + log2_size = estimate_log2_keysize(newsize) + if log2_size > 17: # pragma: no cover + # Don't overallocate really huge keys space in update, + # duplicate keys could reduce the resulting anount of entries + log2_size = 17 + if log2_size > self._keys.log2_size: + self._resize(log2_size, False) + try: + self._merge_items(cast(Iterator[_Entry[_V]], it)) + finally: + self._post_update() + + def _merge_items(self, items: Iterator[_Entry[_V]]) -> None: + for entry in items: + hash_ = entry.hash + identity = entry.identity + for slot, idx, e in self._keys.iter_hash(hash_): + if e.identity == identity: # pragma: no branch + break + else: + self._add_with_hash_for_upd(entry) + + def _incr_version(self) -> None: + v = _version + v[0] += 1 + self._version = v[0] + + def _resize(self, log2_newsize: int, update: bool) -> None: + oldkeys = self._keys + newentries = self._used + + if len(oldkeys.entries) == newentries: + entries = oldkeys.entries + else: + entries = [e for e in oldkeys.entries if e is not None] + newkeys: _HtKeys[_V] = _HtKeys.new(log2_newsize, entries) + newkeys.usable -= newentries + newkeys.build_indices(update) + self._keys = newkeys + + def _add_with_hash(self, entry: _Entry[_V]) -> None: + if self._keys.usable <= 0: + self._resize((self._used * 3 | _HtKeys.MINSIZE - 1).bit_length(), False) + keys = self._keys + slot = keys.find_empty_slot(entry.hash) + keys.indices[slot] = len(keys.entries) + keys.entries.append(entry) + self._incr_version() + self._used += 1 + keys.usable -= 1 + + def _add_with_hash_for_upd(self, entry: _Entry[_V]) -> None: + if self._keys.usable <= 0: + self._resize((self._used * 3 | _HtKeys.MINSIZE - 1).bit_length(), True) + keys = self._keys + slot = keys.find_empty_slot(entry.hash) + keys.indices[slot] = len(keys.entries) + entry.hash = -1 + keys.entries.append(entry) + self._incr_version() + self._used += 1 + keys.usable -= 1 + + def _del_at(self, slot: int, idx: int) -> None: + self._keys.entries[idx] = None + self._keys.indices[slot] = -2 + self._used -= 1 + + def _del_at_for_upd(self, entry: _Entry[_V]) -> None: + entry.key = None # type: ignore[assignment] + entry.value = None # type: ignore[assignment] + + +class CIMultiDict(_CIMixin, MultiDict[_V]): + """Dictionary with the support for duplicate case-insensitive keys.""" + + +class MultiDictProxy(_CSMixin, MultiMapping[_V]): + """Read-only proxy for MultiDict instance.""" + + __slots__ = ("_md",) + + _md: MultiDict[_V] + + def __init__(self, arg: Union[MultiDict[_V], "MultiDictProxy[_V]"]): + if not isinstance(arg, (MultiDict, MultiDictProxy)): + raise TypeError( + f"ctor requires MultiDict or MultiDictProxy instance, not {type(arg)}" + ) + if isinstance(arg, MultiDictProxy): + self._md = arg._md + else: + self._md = arg + + def __reduce__(self) -> NoReturn: + raise TypeError(f"can't pickle {self.__class__.__name__} objects") + + @overload + def getall(self, key: str) -> list[_V]: ... + @overload + def getall(self, key: str, default: _T) -> Union[list[_V], _T]: ... + def getall( + self, key: str, default: Union[_T, _SENTINEL] = sentinel + ) -> Union[list[_V], _T]: + """Return a list of all values matching the key.""" + if default is not sentinel: + return self._md.getall(key, default) + else: + return self._md.getall(key) + + @overload + def getone(self, key: str) -> _V: ... + @overload + def getone(self, key: str, default: _T) -> Union[_V, _T]: ... + def getone( + self, key: str, default: Union[_T, _SENTINEL] = sentinel + ) -> Union[_V, _T]: + """Get first value matching the key. + + Raises KeyError if the key is not found and no default is provided. + """ + if default is not sentinel: + return self._md.getone(key, default) + else: + return self._md.getone(key) + + # Mapping interface # + + def __getitem__(self, key: str) -> _V: + return self.getone(key) + + @overload + def get(self, key: str, /) -> Union[_V, None]: ... + @overload + def get(self, key: str, /, default: _T) -> Union[_V, _T]: ... + def get(self, key: str, default: Union[_T, None] = None) -> Union[_V, _T, None]: + """Get first value matching the key. + + If the key is not found, returns the default (or None if no default is provided) + """ + return self._md.getone(key, default) + + def __iter__(self) -> Iterator[str]: + return iter(self._md.keys()) + + def __len__(self) -> int: + return len(self._md) + + def keys(self) -> KeysView[str]: + """Return a new view of the dictionary's keys.""" + return self._md.keys() + + def items(self) -> ItemsView[str, _V]: + """Return a new view of the dictionary's items *(key, value) pairs).""" + return self._md.items() + + def values(self) -> _ValuesView[_V]: + """Return a new view of the dictionary's values.""" + return self._md.values() + + def __eq__(self, other: object) -> bool: + return self._md == other + + def __contains__(self, key: object) -> bool: + return key in self._md + + @reprlib.recursive_repr() + def __repr__(self) -> str: + body = ", ".join(f"'{k}': {v!r}" for k, v in self.items()) + return f"<{self.__class__.__name__}({body})>" + + def copy(self) -> MultiDict[_V]: + """Return a copy of itself.""" + return MultiDict(self._md) + + +class CIMultiDictProxy(_CIMixin, MultiDictProxy[_V]): + """Read-only proxy for CIMultiDict instance.""" + + def __init__(self, arg: Union[MultiDict[_V], MultiDictProxy[_V]]): + if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)): + raise TypeError( + "ctor requires CIMultiDict or CIMultiDictProxy instance" + f", not {type(arg)}" + ) + + super().__init__(arg) + + def copy(self) -> CIMultiDict[_V]: + """Return a copy of itself.""" + return CIMultiDict(self._md) + + +def getversion(md: Union[MultiDict[object], MultiDictProxy[object]]) -> int: + if isinstance(md, MultiDictProxy): + md = md._md + elif not isinstance(md, MultiDict): + raise TypeError("Parameter should be multidict or proxy") + return md._version diff --git a/venv/lib/python3.12/site-packages/multidict/py.typed b/venv/lib/python3.12/site-packages/multidict/py.typed new file mode 100644 index 0000000..dfe8cc0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multidict/py.typed @@ -0,0 +1 @@ +PEP-561 marker. \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/__init__.py b/venv/lib/python3.12/site-packages/multipart/__init__.py new file mode 100644 index 0000000..309d698 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/__init__.py @@ -0,0 +1,15 @@ +# This is the canonical package information. +__author__ = 'Andrew Dunham' +__license__ = 'Apache' +__copyright__ = "Copyright (c) 2012-2013, Andrew Dunham" +__version__ = "0.0.6" + + +from .multipart import ( + FormParser, + MultipartParser, + QuerystringParser, + OctetStreamParser, + create_form_parser, + parse_form, +) diff --git a/venv/lib/python3.12/site-packages/multipart/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/multipart/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f343a25 Binary files /dev/null and b/venv/lib/python3.12/site-packages/multipart/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multipart/__pycache__/decoders.cpython-312.pyc b/venv/lib/python3.12/site-packages/multipart/__pycache__/decoders.cpython-312.pyc new file mode 100644 index 0000000..3d47860 Binary files /dev/null and b/venv/lib/python3.12/site-packages/multipart/__pycache__/decoders.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multipart/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/multipart/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..163aa5e Binary files /dev/null and b/venv/lib/python3.12/site-packages/multipart/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multipart/__pycache__/multipart.cpython-312.pyc b/venv/lib/python3.12/site-packages/multipart/__pycache__/multipart.cpython-312.pyc new file mode 100644 index 0000000..bf91e2a Binary files /dev/null and b/venv/lib/python3.12/site-packages/multipart/__pycache__/multipart.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multipart/decoders.py b/venv/lib/python3.12/site-packages/multipart/decoders.py new file mode 100644 index 0000000..0d7ab32 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/decoders.py @@ -0,0 +1,171 @@ +import base64 +import binascii + +from .exceptions import DecodeError + + +class Base64Decoder: + """This object provides an interface to decode a stream of Base64 data. It + is instantiated with an "underlying object", and whenever a write() + operation is performed, it will decode the incoming data as Base64, and + call write() on the underlying object. This is primarily used for decoding + form data encoded as Base64, but can be used for other purposes:: + + from multipart.decoders import Base64Decoder + fd = open("notb64.txt", "wb") + decoder = Base64Decoder(fd) + try: + decoder.write("Zm9vYmFy") # "foobar" in Base64 + decoder.finalize() + finally: + decoder.close() + + # The contents of "notb64.txt" should be "foobar". + + This object will also pass all finalize() and close() calls to the + underlying object, if the underlying object supports them. + + Note that this class maintains a cache of base64 chunks, so that a write of + arbitrary size can be performed. You must call :meth:`finalize` on this + object after all writes are completed to ensure that all data is flushed + to the underlying object. + + :param underlying: the underlying object to pass writes to + """ + + def __init__(self, underlying): + self.cache = bytearray() + self.underlying = underlying + + def write(self, data): + """Takes any input data provided, decodes it as base64, and passes it + on to the underlying object. If the data provided is invalid base64 + data, then this method will raise + a :class:`multipart.exceptions.DecodeError` + + :param data: base64 data to decode + """ + + # Prepend any cache info to our data. + if len(self.cache) > 0: + data = self.cache + data + + # Slice off a string that's a multiple of 4. + decode_len = (len(data) // 4) * 4 + val = data[:decode_len] + + # Decode and write, if we have any. + if len(val) > 0: + try: + decoded = base64.b64decode(val) + except binascii.Error: + raise DecodeError('There was an error raised while decoding ' + 'base64-encoded data.') + + self.underlying.write(decoded) + + # Get the remaining bytes and save in our cache. + remaining_len = len(data) % 4 + if remaining_len > 0: + self.cache = data[-remaining_len:] + else: + self.cache = b'' + + # Return the length of the data to indicate no error. + return len(data) + + def close(self): + """Close this decoder. If the underlying object has a `close()` + method, this function will call it. + """ + if hasattr(self.underlying, 'close'): + self.underlying.close() + + def finalize(self): + """Finalize this object. This should be called when no more data + should be written to the stream. This function can raise a + :class:`multipart.exceptions.DecodeError` if there is some remaining + data in the cache. + + If the underlying object has a `finalize()` method, this function will + call it. + """ + if len(self.cache) > 0: + raise DecodeError('There are %d bytes remaining in the ' + 'Base64Decoder cache when finalize() is called' + % len(self.cache)) + + if hasattr(self.underlying, 'finalize'): + self.underlying.finalize() + + def __repr__(self): + return f"{self.__class__.__name__}(underlying={self.underlying!r})" + + +class QuotedPrintableDecoder: + """This object provides an interface to decode a stream of quoted-printable + data. It is instantiated with an "underlying object", in the same manner + as the :class:`multipart.decoders.Base64Decoder` class. This class behaves + in exactly the same way, including maintaining a cache of quoted-printable + chunks. + + :param underlying: the underlying object to pass writes to + """ + def __init__(self, underlying): + self.cache = b'' + self.underlying = underlying + + def write(self, data): + """Takes any input data provided, decodes it as quoted-printable, and + passes it on to the underlying object. + + :param data: quoted-printable data to decode + """ + # Prepend any cache info to our data. + if len(self.cache) > 0: + data = self.cache + data + + # If the last 2 characters have an '=' sign in it, then we won't be + # able to decode the encoded value and we'll need to save it for the + # next decoding step. + if data[-2:].find(b'=') != -1: + enc, rest = data[:-2], data[-2:] + else: + enc = data + rest = b'' + + # Encode and write, if we have data. + if len(enc) > 0: + self.underlying.write(binascii.a2b_qp(enc)) + + # Save remaining in cache. + self.cache = rest + return len(data) + + def close(self): + """Close this decoder. If the underlying object has a `close()` + method, this function will call it. + """ + if hasattr(self.underlying, 'close'): + self.underlying.close() + + def finalize(self): + """Finalize this object. This should be called when no more data + should be written to the stream. This function will not raise any + exceptions, but it may write more data to the underlying object if + there is data remaining in the cache. + + If the underlying object has a `finalize()` method, this function will + call it. + """ + # If we have a cache, write and then remove it. + if len(self.cache) > 0: + self.underlying.write(binascii.a2b_qp(self.cache)) + self.cache = b'' + + # Finalize our underlying stream. + if hasattr(self.underlying, 'finalize'): + self.underlying.finalize() + + def __repr__(self): + return f"{self.__class__.__name__}(underlying={self.underlying!r})" diff --git a/venv/lib/python3.12/site-packages/multipart/exceptions.py b/venv/lib/python3.12/site-packages/multipart/exceptions.py new file mode 100644 index 0000000..016e7f7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/exceptions.py @@ -0,0 +1,46 @@ +class FormParserError(ValueError): + """Base error class for our form parser.""" + pass + + +class ParseError(FormParserError): + """This exception (or a subclass) is raised when there is an error while + parsing something. + """ + + #: This is the offset in the input data chunk (*NOT* the overall stream) in + #: which the parse error occurred. It will be -1 if not specified. + offset = -1 + + +class MultipartParseError(ParseError): + """This is a specific error that is raised when the MultipartParser detects + an error while parsing. + """ + pass + + +class QuerystringParseError(ParseError): + """This is a specific error that is raised when the QuerystringParser + detects an error while parsing. + """ + pass + + +class DecodeError(ParseError): + """This exception is raised when there is a decoding error - for example + with the Base64Decoder or QuotedPrintableDecoder. + """ + pass + + +# On Python 3.3, IOError is the same as OSError, so we don't want to inherit +# from both of them. We handle this case below. +if IOError is not OSError: # pragma: no cover + class FileError(FormParserError, IOError, OSError): + """Exception class for problems with the File class.""" + pass +else: # pragma: no cover + class FileError(FormParserError, OSError): + """Exception class for problems with the File class.""" + pass diff --git a/venv/lib/python3.12/site-packages/multipart/multipart.py b/venv/lib/python3.12/site-packages/multipart/multipart.py new file mode 100644 index 0000000..a9f1f9f --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/multipart.py @@ -0,0 +1,1893 @@ +from .decoders import * +from .exceptions import * + +import os +import re +import sys +import shutil +import logging +import tempfile +from io import BytesIO +from numbers import Number + +# Unique missing object. +_missing = object() + +# States for the querystring parser. +STATE_BEFORE_FIELD = 0 +STATE_FIELD_NAME = 1 +STATE_FIELD_DATA = 2 + +# States for the multipart parser +STATE_START = 0 +STATE_START_BOUNDARY = 1 +STATE_HEADER_FIELD_START = 2 +STATE_HEADER_FIELD = 3 +STATE_HEADER_VALUE_START = 4 +STATE_HEADER_VALUE = 5 +STATE_HEADER_VALUE_ALMOST_DONE = 6 +STATE_HEADERS_ALMOST_DONE = 7 +STATE_PART_DATA_START = 8 +STATE_PART_DATA = 9 +STATE_PART_DATA_END = 10 +STATE_END = 11 + +STATES = [ + "START", + "START_BOUNDARY", "HEADER_FIELD_START", "HEADER_FIELD", "HEADER_VALUE_START", "HEADER_VALUE", + "HEADER_VALUE_ALMOST_DONE", "HEADRES_ALMOST_DONE", "PART_DATA_START", "PART_DATA", "PART_DATA_END", "END" +] + + +# Flags for the multipart parser. +FLAG_PART_BOUNDARY = 1 +FLAG_LAST_BOUNDARY = 2 + +# Get constants. Since iterating over a str on Python 2 gives you a 1-length +# string, but iterating over a bytes object on Python 3 gives you an integer, +# we need to save these constants. +CR = b'\r'[0] +LF = b'\n'[0] +COLON = b':'[0] +SPACE = b' '[0] +HYPHEN = b'-'[0] +AMPERSAND = b'&'[0] +SEMICOLON = b';'[0] +LOWER_A = b'a'[0] +LOWER_Z = b'z'[0] +NULL = b'\x00'[0] + +# Lower-casing a character is different, because of the difference between +# str on Py2, and bytes on Py3. Same with getting the ordinal value of a byte, +# and joining a list of bytes together. +# These functions abstract that. +lower_char = lambda c: c | 0x20 +ord_char = lambda c: c +join_bytes = lambda b: bytes(list(b)) + +# These are regexes for parsing header values. +SPECIAL_CHARS = re.escape(b'()<>@,;:\\"/[]?={} \t') +QUOTED_STR = br'"(?:\\.|[^"])*"' +VALUE_STR = br'(?:[^' + SPECIAL_CHARS + br']+|' + QUOTED_STR + br')' +OPTION_RE_STR = ( + br'(?:;|^)\s*([^' + SPECIAL_CHARS + br']+)\s*=\s*(' + VALUE_STR + br')' +) +OPTION_RE = re.compile(OPTION_RE_STR) +QUOTE = b'"'[0] + + +def parse_options_header(value): + """ + Parses a Content-Type header into a value in the following format: + (content_type, {parameters}) + """ + if not value: + return (b'', {}) + + # If we are passed a string, we assume that it conforms to WSGI and does + # not contain any code point that's not in latin-1. + if isinstance(value, str): # pragma: no cover + value = value.encode('latin-1') + + # If we have no options, return the string as-is. + if b';' not in value: + return (value.lower().strip(), {}) + + # Split at the first semicolon, to get our value and then options. + ctype, rest = value.split(b';', 1) + options = {} + + # Parse the options. + for match in OPTION_RE.finditer(rest): + key = match.group(1).lower() + value = match.group(2) + if value[0] == QUOTE and value[-1] == QUOTE: + # Unquote the value. + value = value[1:-1] + value = value.replace(b'\\\\', b'\\').replace(b'\\"', b'"') + + # If the value is a filename, we need to fix a bug on IE6 that sends + # the full file path instead of the filename. + if key == b'filename': + if value[1:3] == b':\\' or value[:2] == b'\\\\': + value = value.split(b'\\')[-1] + + options[key] = value + + return ctype, options + + +class Field: + """A Field object represents a (parsed) form field. It represents a single + field with a corresponding name and value. + + The name that a :class:`Field` will be instantiated with is the same name + that would be found in the following HTML:: + + + + This class defines two methods, :meth:`on_data` and :meth:`on_end`, that + will be called when data is written to the Field, and when the Field is + finalized, respectively. + + :param name: the name of the form field + """ + def __init__(self, name): + self._name = name + self._value = [] + + # We cache the joined version of _value for speed. + self._cache = _missing + + @classmethod + def from_value(klass, name, value): + """Create an instance of a :class:`Field`, and set the corresponding + value - either None or an actual value. This method will also + finalize the Field itself. + + :param name: the name of the form field + :param value: the value of the form field - either a bytestring or + None + """ + + f = klass(name) + if value is None: + f.set_none() + else: + f.write(value) + f.finalize() + return f + + def write(self, data): + """Write some data into the form field. + + :param data: a bytestring + """ + return self.on_data(data) + + def on_data(self, data): + """This method is a callback that will be called whenever data is + written to the Field. + + :param data: a bytestring + """ + self._value.append(data) + self._cache = _missing + return len(data) + + def on_end(self): + """This method is called whenever the Field is finalized. + """ + if self._cache is _missing: + self._cache = b''.join(self._value) + + def finalize(self): + """Finalize the form field. + """ + self.on_end() + + def close(self): + """Close the Field object. This will free any underlying cache. + """ + # Free our value array. + if self._cache is _missing: + self._cache = b''.join(self._value) + + del self._value + + def set_none(self): + """Some fields in a querystring can possibly have a value of None - for + example, the string "foo&bar=&baz=asdf" will have a field with the + name "foo" and value None, one with name "bar" and value "", and one + with name "baz" and value "asdf". Since the write() interface doesn't + support writing None, this function will set the field value to None. + """ + self._cache = None + + @property + def field_name(self): + """This property returns the name of the field.""" + return self._name + + @property + def value(self): + """This property returns the value of the form field.""" + if self._cache is _missing: + self._cache = b''.join(self._value) + + return self._cache + + def __eq__(self, other): + if isinstance(other, Field): + return ( + self.field_name == other.field_name and + self.value == other.value + ) + else: + return NotImplemented + + def __repr__(self): + if len(self.value) > 97: + # We get the repr, and then insert three dots before the final + # quote. + v = repr(self.value[:97])[:-1] + "...'" + else: + v = repr(self.value) + + return "{}(field_name={!r}, value={})".format( + self.__class__.__name__, + self.field_name, + v + ) + + +class File: + """This class represents an uploaded file. It handles writing file data to + either an in-memory file or a temporary file on-disk, if the optional + threshold is passed. + + There are some options that can be passed to the File to change behavior + of the class. Valid options are as follows: + + .. list-table:: + :widths: 15 5 5 30 + :header-rows: 1 + + * - Name + - Type + - Default + - Description + * - UPLOAD_DIR + - `str` + - None + - The directory to store uploaded files in. If this is None, a + temporary file will be created in the system's standard location. + * - UPLOAD_DELETE_TMP + - `bool` + - True + - Delete automatically created TMP file + * - UPLOAD_KEEP_FILENAME + - `bool` + - False + - Whether or not to keep the filename of the uploaded file. If True, + then the filename will be converted to a safe representation (e.g. + by removing any invalid path segments), and then saved with the + same name). Otherwise, a temporary name will be used. + * - UPLOAD_KEEP_EXTENSIONS + - `bool` + - False + - Whether or not to keep the uploaded file's extension. If False, the + file will be saved with the default temporary extension (usually + ".tmp"). Otherwise, the file's extension will be maintained. Note + that this will properly combine with the UPLOAD_KEEP_FILENAME + setting. + * - MAX_MEMORY_FILE_SIZE + - `int` + - 1 MiB + - The maximum number of bytes of a File to keep in memory. By + default, the contents of a File are kept into memory until a certain + limit is reached, after which the contents of the File are written + to a temporary file. This behavior can be disabled by setting this + value to an appropriately large value (or, for example, infinity, + such as `float('inf')`. + + :param file_name: The name of the file that this :class:`File` represents + + :param field_name: The field name that uploaded this file. Note that this + can be None, if, for example, the file was uploaded + with Content-Type application/octet-stream + + :param config: The configuration for this File. See above for valid + configuration keys and their corresponding values. + """ + def __init__(self, file_name, field_name=None, config={}): + # Save configuration, set other variables default. + self.logger = logging.getLogger(__name__) + self._config = config + self._in_memory = True + self._bytes_written = 0 + self._fileobj = BytesIO() + + # Save the provided field/file name. + self._field_name = field_name + self._file_name = file_name + + # Our actual file name is None by default, since, depending on our + # config, we may not actually use the provided name. + self._actual_file_name = None + + # Split the extension from the filename. + if file_name is not None: + base, ext = os.path.splitext(file_name) + self._file_base = base + self._ext = ext + + @property + def field_name(self): + """The form field associated with this file. May be None if there isn't + one, for example when we have an application/octet-stream upload. + """ + return self._field_name + + @property + def file_name(self): + """The file name given in the upload request. + """ + return self._file_name + + @property + def actual_file_name(self): + """The file name that this file is saved as. Will be None if it's not + currently saved on disk. + """ + return self._actual_file_name + + @property + def file_object(self): + """The file object that we're currently writing to. Note that this + will either be an instance of a :class:`io.BytesIO`, or a regular file + object. + """ + return self._fileobj + + @property + def size(self): + """The total size of this file, counted as the number of bytes that + currently have been written to the file. + """ + return self._bytes_written + + @property + def in_memory(self): + """A boolean representing whether or not this file object is currently + stored in-memory or on-disk. + """ + return self._in_memory + + def flush_to_disk(self): + """If the file is already on-disk, do nothing. Otherwise, copy from + the in-memory buffer to a disk file, and then reassign our internal + file object to this new disk file. + + Note that if you attempt to flush a file that is already on-disk, a + warning will be logged to this module's logger. + """ + if not self._in_memory: + self.logger.warning( + "Trying to flush to disk when we're not in memory" + ) + return + + # Go back to the start of our file. + self._fileobj.seek(0) + + # Open a new file. + new_file = self._get_disk_file() + + # Copy the file objects. + shutil.copyfileobj(self._fileobj, new_file) + + # Seek to the new position in our new file. + new_file.seek(self._bytes_written) + + # Reassign the fileobject. + old_fileobj = self._fileobj + self._fileobj = new_file + + # We're no longer in memory. + self._in_memory = False + + # Close the old file object. + old_fileobj.close() + + def _get_disk_file(self): + """This function is responsible for getting a file object on-disk for us. + """ + self.logger.info("Opening a file on disk") + + file_dir = self._config.get('UPLOAD_DIR') + keep_filename = self._config.get('UPLOAD_KEEP_FILENAME', False) + keep_extensions = self._config.get('UPLOAD_KEEP_EXTENSIONS', False) + delete_tmp = self._config.get('UPLOAD_DELETE_TMP', True) + + # If we have a directory and are to keep the filename... + if file_dir is not None and keep_filename: + self.logger.info("Saving with filename in: %r", file_dir) + + # Build our filename. + # TODO: what happens if we don't have a filename? + fname = self._file_base + if keep_extensions: + fname = fname + self._ext + + path = os.path.join(file_dir, fname) + try: + self.logger.info("Opening file: %r", path) + tmp_file = open(path, 'w+b') + except OSError as e: + tmp_file = None + + self.logger.exception("Error opening temporary file") + raise FileError("Error opening temporary file: %r" % path) + else: + # Build options array. + # Note that on Python 3, tempfile doesn't support byte names. We + # encode our paths using the default filesystem encoding. + options = {} + if keep_extensions: + ext = self._ext + if isinstance(ext, bytes): + ext = ext.decode(sys.getfilesystemencoding()) + + options['suffix'] = ext + if file_dir is not None: + d = file_dir + if isinstance(d, bytes): + d = d.decode(sys.getfilesystemencoding()) + + options['dir'] = d + options['delete'] = delete_tmp + + # Create a temporary (named) file with the appropriate settings. + self.logger.info("Creating a temporary file with options: %r", + options) + try: + tmp_file = tempfile.NamedTemporaryFile(**options) + except OSError: + self.logger.exception("Error creating named temporary file") + raise FileError("Error creating named temporary file") + + fname = tmp_file.name + + # Encode filename as bytes. + if isinstance(fname, str): + fname = fname.encode(sys.getfilesystemencoding()) + + self._actual_file_name = fname + return tmp_file + + def write(self, data): + """Write some data to the File. + + :param data: a bytestring + """ + return self.on_data(data) + + def on_data(self, data): + """This method is a callback that will be called whenever data is + written to the File. + + :param data: a bytestring + """ + pos = self._fileobj.tell() + bwritten = self._fileobj.write(data) + # true file objects write returns None + if bwritten is None: + bwritten = self._fileobj.tell() - pos + + # If the bytes written isn't the same as the length, just return. + if bwritten != len(data): + self.logger.warning("bwritten != len(data) (%d != %d)", bwritten, + len(data)) + return bwritten + + # Keep track of how many bytes we've written. + self._bytes_written += bwritten + + # If we're in-memory and are over our limit, we create a file. + if (self._in_memory and + self._config.get('MAX_MEMORY_FILE_SIZE') is not None and + (self._bytes_written > + self._config.get('MAX_MEMORY_FILE_SIZE'))): + self.logger.info("Flushing to disk") + self.flush_to_disk() + + # Return the number of bytes written. + return bwritten + + def on_end(self): + """This method is called whenever the Field is finalized. + """ + # Flush the underlying file object + self._fileobj.flush() + + def finalize(self): + """Finalize the form file. This will not close the underlying file, + but simply signal that we are finished writing to the File. + """ + self.on_end() + + def close(self): + """Close the File object. This will actually close the underlying + file object (whether it's a :class:`io.BytesIO` or an actual file + object). + """ + self._fileobj.close() + + def __repr__(self): + return "{}(file_name={!r}, field_name={!r})".format( + self.__class__.__name__, + self.file_name, + self.field_name + ) + + +class BaseParser: + """This class is the base class for all parsers. It contains the logic for + calling and adding callbacks. + + A callback can be one of two different forms. "Notification callbacks" are + callbacks that are called when something happens - for example, when a new + part of a multipart message is encountered by the parser. "Data callbacks" + are called when we get some sort of data - for example, part of the body of + a multipart chunk. Notification callbacks are called with no parameters, + whereas data callbacks are called with three, as follows:: + + data_callback(data, start, end) + + The "data" parameter is a bytestring (i.e. "foo" on Python 2, or b"foo" on + Python 3). "start" and "end" are integer indexes into the "data" string + that represent the data of interest. Thus, in a data callback, the slice + `data[start:end]` represents the data that the callback is "interested in". + The callback is not passed a copy of the data, since copying severely hurts + performance. + """ + def __init__(self): + self.logger = logging.getLogger(__name__) + + def callback(self, name, data=None, start=None, end=None): + """This function calls a provided callback with some data. If the + callback is not set, will do nothing. + + :param name: The name of the callback to call (as a string). + + :param data: Data to pass to the callback. If None, then it is + assumed that the callback is a notification callback, + and no parameters are given. + + :param end: An integer that is passed to the data callback. + + :param start: An integer that is passed to the data callback. + """ + name = "on_" + name + func = self.callbacks.get(name) + if func is None: + return + + # Depending on whether we're given a buffer... + if data is not None: + # Don't do anything if we have start == end. + if start is not None and start == end: + return + + self.logger.debug("Calling %s with data[%d:%d]", name, start, end) + func(data, start, end) + else: + self.logger.debug("Calling %s with no data", name) + func() + + def set_callback(self, name, new_func): + """Update the function for a callback. Removes from the callbacks dict + if new_func is None. + + :param name: The name of the callback to call (as a string). + + :param new_func: The new function for the callback. If None, then the + callback will be removed (with no error if it does not + exist). + """ + if new_func is None: + self.callbacks.pop('on_' + name, None) + else: + self.callbacks['on_' + name] = new_func + + def close(self): + pass # pragma: no cover + + def finalize(self): + pass # pragma: no cover + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + +class OctetStreamParser(BaseParser): + """This parser parses an octet-stream request body and calls callbacks when + incoming data is received. Callbacks are as follows: + + .. list-table:: + :widths: 15 10 30 + :header-rows: 1 + + * - Callback Name + - Parameters + - Description + * - on_start + - None + - Called when the first data is parsed. + * - on_data + - data, start, end + - Called for each data chunk that is parsed. + * - on_end + - None + - Called when the parser is finished parsing all data. + + :param callbacks: A dictionary of callbacks. See the documentation for + :class:`BaseParser`. + + :param max_size: The maximum size of body to parse. Defaults to infinity - + i.e. unbounded. + """ + def __init__(self, callbacks={}, max_size=float('inf')): + super().__init__() + self.callbacks = callbacks + self._started = False + + if not isinstance(max_size, Number) or max_size < 1: + raise ValueError("max_size must be a positive number, not %r" % + max_size) + self.max_size = max_size + self._current_size = 0 + + def write(self, data): + """Write some data to the parser, which will perform size verification, + and then pass the data to the underlying callback. + + :param data: a bytestring + """ + if not self._started: + self.callback('start') + self._started = True + + # Truncate data length. + data_len = len(data) + if (self._current_size + data_len) > self.max_size: + # We truncate the length of data that we are to process. + new_size = int(self.max_size - self._current_size) + self.logger.warning("Current size is %d (max %d), so truncating " + "data length from %d to %d", + self._current_size, self.max_size, data_len, + new_size) + data_len = new_size + + # Increment size, then callback, in case there's an exception. + self._current_size += data_len + self.callback('data', data, 0, data_len) + return data_len + + def finalize(self): + """Finalize this parser, which signals to that we are finished parsing, + and sends the on_end callback. + """ + self.callback('end') + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + +class QuerystringParser(BaseParser): + """This is a streaming querystring parser. It will consume data, and call + the callbacks given when it has data. + + .. list-table:: + :widths: 15 10 30 + :header-rows: 1 + + * - Callback Name + - Parameters + - Description + * - on_field_start + - None + - Called when a new field is encountered. + * - on_field_name + - data, start, end + - Called when a portion of a field's name is encountered. + * - on_field_data + - data, start, end + - Called when a portion of a field's data is encountered. + * - on_field_end + - None + - Called when the end of a field is encountered. + * - on_end + - None + - Called when the parser is finished parsing all data. + + :param callbacks: A dictionary of callbacks. See the documentation for + :class:`BaseParser`. + + :param strict_parsing: Whether or not to parse the body strictly. Defaults + to False. If this is set to True, then the behavior + of the parser changes as the following: if a field + has a value with an equal sign (e.g. "foo=bar", or + "foo="), it is always included. If a field has no + equals sign (e.g. "...&name&..."), it will be + treated as an error if 'strict_parsing' is True, + otherwise included. If an error is encountered, + then a + :class:`multipart.exceptions.QuerystringParseError` + will be raised. + + :param max_size: The maximum size of body to parse. Defaults to infinity - + i.e. unbounded. + """ + def __init__(self, callbacks={}, strict_parsing=False, + max_size=float('inf')): + super().__init__() + self.state = STATE_BEFORE_FIELD + self._found_sep = False + + self.callbacks = callbacks + + # Max-size stuff + if not isinstance(max_size, Number) or max_size < 1: + raise ValueError("max_size must be a positive number, not %r" % + max_size) + self.max_size = max_size + self._current_size = 0 + + # Should parsing be strict? + self.strict_parsing = strict_parsing + + def write(self, data): + """Write some data to the parser, which will perform size verification, + parse into either a field name or value, and then pass the + corresponding data to the underlying callback. If an error is + encountered while parsing, a QuerystringParseError will be raised. The + "offset" attribute of the raised exception will be set to the offset in + the input data chunk (NOT the overall stream) that caused the error. + + :param data: a bytestring + """ + # Handle sizing. + data_len = len(data) + if (self._current_size + data_len) > self.max_size: + # We truncate the length of data that we are to process. + new_size = int(self.max_size - self._current_size) + self.logger.warning("Current size is %d (max %d), so truncating " + "data length from %d to %d", + self._current_size, self.max_size, data_len, + new_size) + data_len = new_size + + l = 0 + try: + l = self._internal_write(data, data_len) + finally: + self._current_size += l + + return l + + def _internal_write(self, data, length): + state = self.state + strict_parsing = self.strict_parsing + found_sep = self._found_sep + + i = 0 + while i < length: + ch = data[i] + + # Depending on our state... + if state == STATE_BEFORE_FIELD: + # If the 'found_sep' flag is set, we've already encountered + # and skipped a single separator. If so, we check our strict + # parsing flag and decide what to do. Otherwise, we haven't + # yet reached a separator, and thus, if we do, we need to skip + # it as it will be the boundary between fields that's supposed + # to be there. + if ch == AMPERSAND or ch == SEMICOLON: + if found_sep: + # If we're parsing strictly, we disallow blank chunks. + if strict_parsing: + e = QuerystringParseError( + "Skipping duplicate ampersand/semicolon at " + "%d" % i + ) + e.offset = i + raise e + else: + self.logger.debug("Skipping duplicate ampersand/" + "semicolon at %d", i) + else: + # This case is when we're skipping the (first) + # separator between fields, so we just set our flag + # and continue on. + found_sep = True + else: + # Emit a field-start event, and go to that state. Also, + # reset the "found_sep" flag, for the next time we get to + # this state. + self.callback('field_start') + i -= 1 + state = STATE_FIELD_NAME + found_sep = False + + elif state == STATE_FIELD_NAME: + # Try and find a separator - we ensure that, if we do, we only + # look for the equal sign before it. + sep_pos = data.find(b'&', i) + if sep_pos == -1: + sep_pos = data.find(b';', i) + + # See if we can find an equals sign in the remaining data. If + # so, we can immediately emit the field name and jump to the + # data state. + if sep_pos != -1: + equals_pos = data.find(b'=', i, sep_pos) + else: + equals_pos = data.find(b'=', i) + + if equals_pos != -1: + # Emit this name. + self.callback('field_name', data, i, equals_pos) + + # Jump i to this position. Note that it will then have 1 + # added to it below, which means the next iteration of this + # loop will inspect the character after the equals sign. + i = equals_pos + state = STATE_FIELD_DATA + else: + # No equals sign found. + if not strict_parsing: + # See also comments in the STATE_FIELD_DATA case below. + # If we found the separator, we emit the name and just + # end - there's no data callback at all (not even with + # a blank value). + if sep_pos != -1: + self.callback('field_name', data, i, sep_pos) + self.callback('field_end') + + i = sep_pos - 1 + state = STATE_BEFORE_FIELD + else: + # Otherwise, no separator in this block, so the + # rest of this chunk must be a name. + self.callback('field_name', data, i, length) + i = length + + else: + # We're parsing strictly. If we find a separator, + # this is an error - we require an equals sign. + if sep_pos != -1: + e = QuerystringParseError( + "When strict_parsing is True, we require an " + "equals sign in all field chunks. Did not " + "find one in the chunk that starts at %d" % + (i,) + ) + e.offset = i + raise e + + # No separator in the rest of this chunk, so it's just + # a field name. + self.callback('field_name', data, i, length) + i = length + + elif state == STATE_FIELD_DATA: + # Try finding either an ampersand or a semicolon after this + # position. + sep_pos = data.find(b'&', i) + if sep_pos == -1: + sep_pos = data.find(b';', i) + + # If we found it, callback this bit as data and then go back + # to expecting to find a field. + if sep_pos != -1: + self.callback('field_data', data, i, sep_pos) + self.callback('field_end') + + # Note that we go to the separator, which brings us to the + # "before field" state. This allows us to properly emit + # "field_start" events only when we actually have data for + # a field of some sort. + i = sep_pos - 1 + state = STATE_BEFORE_FIELD + + # Otherwise, emit the rest as data and finish. + else: + self.callback('field_data', data, i, length) + i = length + + else: # pragma: no cover (error case) + msg = "Reached an unknown state %d at %d" % (state, i) + self.logger.warning(msg) + e = QuerystringParseError(msg) + e.offset = i + raise e + + i += 1 + + self.state = state + self._found_sep = found_sep + return len(data) + + def finalize(self): + """Finalize this parser, which signals to that we are finished parsing, + if we're still in the middle of a field, an on_field_end callback, and + then the on_end callback. + """ + # If we're currently in the middle of a field, we finish it. + if self.state == STATE_FIELD_DATA: + self.callback('field_end') + self.callback('end') + + def __repr__(self): + return "{}(strict_parsing={!r}, max_size={!r})".format( + self.__class__.__name__, + self.strict_parsing, self.max_size + ) + + +class MultipartParser(BaseParser): + """This class is a streaming multipart/form-data parser. + + .. list-table:: + :widths: 15 10 30 + :header-rows: 1 + + * - Callback Name + - Parameters + - Description + * - on_part_begin + - None + - Called when a new part of the multipart message is encountered. + * - on_part_data + - data, start, end + - Called when a portion of a part's data is encountered. + * - on_part_end + - None + - Called when the end of a part is reached. + * - on_header_begin + - None + - Called when we've found a new header in a part of a multipart + message + * - on_header_field + - data, start, end + - Called each time an additional portion of a header is read (i.e. the + part of the header that is before the colon; the "Foo" in + "Foo: Bar"). + * - on_header_value + - data, start, end + - Called when we get data for a header. + * - on_header_end + - None + - Called when the current header is finished - i.e. we've reached the + newline at the end of the header. + * - on_headers_finished + - None + - Called when all headers are finished, and before the part data + starts. + * - on_end + - None + - Called when the parser is finished parsing all data. + + + :param boundary: The multipart boundary. This is required, and must match + what is given in the HTTP request - usually in the + Content-Type header. + + :param callbacks: A dictionary of callbacks. See the documentation for + :class:`BaseParser`. + + :param max_size: The maximum size of body to parse. Defaults to infinity - + i.e. unbounded. + """ + + def __init__(self, boundary, callbacks={}, max_size=float('inf')): + # Initialize parser state. + super().__init__() + self.state = STATE_START + self.index = self.flags = 0 + + self.callbacks = callbacks + + if not isinstance(max_size, Number) or max_size < 1: + raise ValueError("max_size must be a positive number, not %r" % + max_size) + self.max_size = max_size + self._current_size = 0 + + # Setup marks. These are used to track the state of data received. + self.marks = {} + + # TODO: Actually use this rather than the dumb version we currently use + # # Precompute the skip table for the Boyer-Moore-Horspool algorithm. + # skip = [len(boundary) for x in range(256)] + # for i in range(len(boundary) - 1): + # skip[ord_char(boundary[i])] = len(boundary) - i - 1 + # + # # We use a tuple since it's a constant, and marginally faster. + # self.skip = tuple(skip) + + # Save our boundary. + if isinstance(boundary, str): # pragma: no cover + boundary = boundary.encode('latin-1') + self.boundary = b'\r\n--' + boundary + + # Get a set of characters that belong to our boundary. + self.boundary_chars = frozenset(self.boundary) + + # We also create a lookbehind list. + # Note: the +8 is since we can have, at maximum, "\r\n--" + boundary + + # "--\r\n" at the final boundary, and the length of '\r\n--' and + # '--\r\n' is 8 bytes. + self.lookbehind = [NULL for x in range(len(boundary) + 8)] + + def write(self, data): + """Write some data to the parser, which will perform size verification, + and then parse the data into the appropriate location (e.g. header, + data, etc.), and pass this on to the underlying callback. If an error + is encountered, a MultipartParseError will be raised. The "offset" + attribute on the raised exception will be set to the offset of the byte + in the input chunk that caused the error. + + :param data: a bytestring + """ + # Handle sizing. + data_len = len(data) + if (self._current_size + data_len) > self.max_size: + # We truncate the length of data that we are to process. + new_size = int(self.max_size - self._current_size) + self.logger.warning("Current size is %d (max %d), so truncating " + "data length from %d to %d", + self._current_size, self.max_size, data_len, + new_size) + data_len = new_size + + l = 0 + try: + l = self._internal_write(data, data_len) + finally: + self._current_size += l + + return l + + def _internal_write(self, data, length): + # Get values from locals. + boundary = self.boundary + + # Get our state, flags and index. These are persisted between calls to + # this function. + state = self.state + index = self.index + flags = self.flags + + # Our index defaults to 0. + i = 0 + + # Set a mark. + def set_mark(name): + self.marks[name] = i + + # Remove a mark. + def delete_mark(name, reset=False): + self.marks.pop(name, None) + + # Helper function that makes calling a callback with data easier. The + # 'remaining' parameter will callback from the marked value until the + # end of the buffer, and reset the mark, instead of deleting it. This + # is used at the end of the function to call our callbacks with any + # remaining data in this chunk. + def data_callback(name, remaining=False): + marked_index = self.marks.get(name) + if marked_index is None: + return + + # If we're getting remaining data, we ignore the current i value + # and just call with the remaining data. + if remaining: + self.callback(name, data, marked_index, length) + self.marks[name] = 0 + + # Otherwise, we call it from the mark to the current byte we're + # processing. + else: + self.callback(name, data, marked_index, i) + self.marks.pop(name, None) + + # For each byte... + while i < length: + c = data[i] + + if state == STATE_START: + # Skip leading newlines + if c == CR or c == LF: + i += 1 + self.logger.debug("Skipping leading CR/LF at %d", i) + continue + + # index is used as in index into our boundary. Set to 0. + index = 0 + + # Move to the next state, but decrement i so that we re-process + # this character. + state = STATE_START_BOUNDARY + i -= 1 + + elif state == STATE_START_BOUNDARY: + # Check to ensure that the last 2 characters in our boundary + # are CRLF. + if index == len(boundary) - 2: + if c != CR: + # Error! + msg = "Did not find CR at end of boundary (%d)" % (i,) + self.logger.warning(msg) + e = MultipartParseError(msg) + e.offset = i + raise e + + index += 1 + + elif index == len(boundary) - 2 + 1: + if c != LF: + msg = "Did not find LF at end of boundary (%d)" % (i,) + self.logger.warning(msg) + e = MultipartParseError(msg) + e.offset = i + raise e + + # The index is now used for indexing into our boundary. + index = 0 + + # Callback for the start of a part. + self.callback('part_begin') + + # Move to the next character and state. + state = STATE_HEADER_FIELD_START + + else: + # Check to ensure our boundary matches + if c != boundary[index + 2]: + msg = "Did not find boundary character %r at index " \ + "%d" % (c, index + 2) + self.logger.warning(msg) + e = MultipartParseError(msg) + e.offset = i + raise e + + # Increment index into boundary and continue. + index += 1 + + elif state == STATE_HEADER_FIELD_START: + # Mark the start of a header field here, reset the index, and + # continue parsing our header field. + index = 0 + + # Set a mark of our header field. + set_mark('header_field') + + # Move to parsing header fields. + state = STATE_HEADER_FIELD + i -= 1 + + elif state == STATE_HEADER_FIELD: + # If we've reached a CR at the beginning of a header, it means + # that we've reached the second of 2 newlines, and so there are + # no more headers to parse. + if c == CR: + delete_mark('header_field') + state = STATE_HEADERS_ALMOST_DONE + i += 1 + continue + + # Increment our index in the header. + index += 1 + + # Do nothing if we encounter a hyphen. + if c == HYPHEN: + pass + + # If we've reached a colon, we're done with this header. + elif c == COLON: + # A 0-length header is an error. + if index == 1: + msg = "Found 0-length header at %d" % (i,) + self.logger.warning(msg) + e = MultipartParseError(msg) + e.offset = i + raise e + + # Call our callback with the header field. + data_callback('header_field') + + # Move to parsing the header value. + state = STATE_HEADER_VALUE_START + + else: + # Lower-case this character, and ensure that it is in fact + # a valid letter. If not, it's an error. + cl = lower_char(c) + if cl < LOWER_A or cl > LOWER_Z: + msg = "Found non-alphanumeric character %r in " \ + "header at %d" % (c, i) + self.logger.warning(msg) + e = MultipartParseError(msg) + e.offset = i + raise e + + elif state == STATE_HEADER_VALUE_START: + # Skip leading spaces. + if c == SPACE: + i += 1 + continue + + # Mark the start of the header value. + set_mark('header_value') + + # Move to the header-value state, reprocessing this character. + state = STATE_HEADER_VALUE + i -= 1 + + elif state == STATE_HEADER_VALUE: + # If we've got a CR, we're nearly done our headers. Otherwise, + # we do nothing and just move past this character. + if c == CR: + data_callback('header_value') + self.callback('header_end') + state = STATE_HEADER_VALUE_ALMOST_DONE + + elif state == STATE_HEADER_VALUE_ALMOST_DONE: + # The last character should be a LF. If not, it's an error. + if c != LF: + msg = "Did not find LF character at end of header " \ + "(found %r)" % (c,) + self.logger.warning(msg) + e = MultipartParseError(msg) + e.offset = i + raise e + + # Move back to the start of another header. Note that if that + # state detects ANOTHER newline, it'll trigger the end of our + # headers. + state = STATE_HEADER_FIELD_START + + elif state == STATE_HEADERS_ALMOST_DONE: + # We're almost done our headers. This is reached when we parse + # a CR at the beginning of a header, so our next character + # should be a LF, or it's an error. + if c != LF: + msg = f"Did not find LF at end of headers (found {c!r})" + self.logger.warning(msg) + e = MultipartParseError(msg) + e.offset = i + raise e + + self.callback('headers_finished') + state = STATE_PART_DATA_START + + elif state == STATE_PART_DATA_START: + # Mark the start of our part data. + set_mark('part_data') + + # Start processing part data, including this character. + state = STATE_PART_DATA + i -= 1 + + elif state == STATE_PART_DATA: + # We're processing our part data right now. During this, we + # need to efficiently search for our boundary, since any data + # on any number of lines can be a part of the current data. + # We use the Boyer-Moore-Horspool algorithm to efficiently + # search through the remainder of the buffer looking for our + # boundary. + + # Save the current value of our index. We use this in case we + # find part of a boundary, but it doesn't match fully. + prev_index = index + + # Set up variables. + boundary_length = len(boundary) + boundary_end = boundary_length - 1 + data_length = length + boundary_chars = self.boundary_chars + + # If our index is 0, we're starting a new part, so start our + # search. + if index == 0: + # Search forward until we either hit the end of our buffer, + # or reach a character that's in our boundary. + i += boundary_end + while i < data_length - 1 and data[i] not in boundary_chars: + i += boundary_length + + # Reset i back the length of our boundary, which is the + # earliest possible location that could be our match (i.e. + # if we've just broken out of our loop since we saw the + # last character in our boundary) + i -= boundary_end + c = data[i] + + # Now, we have a couple of cases here. If our index is before + # the end of the boundary... + if index < boundary_length: + # If the character matches... + if boundary[index] == c: + # If we found a match for our boundary, we send the + # existing data. + if index == 0: + data_callback('part_data') + + # The current character matches, so continue! + index += 1 + else: + index = 0 + + # Our index is equal to the length of our boundary! + elif index == boundary_length: + # First we increment it. + index += 1 + + # Now, if we've reached a newline, we need to set this as + # the potential end of our boundary. + if c == CR: + flags |= FLAG_PART_BOUNDARY + + # Otherwise, if this is a hyphen, we might be at the last + # of all boundaries. + elif c == HYPHEN: + flags |= FLAG_LAST_BOUNDARY + + # Otherwise, we reset our index, since this isn't either a + # newline or a hyphen. + else: + index = 0 + + # Our index is right after the part boundary, which should be + # a LF. + elif index == boundary_length + 1: + # If we're at a part boundary (i.e. we've seen a CR + # character already)... + if flags & FLAG_PART_BOUNDARY: + # We need a LF character next. + if c == LF: + # Unset the part boundary flag. + flags &= (~FLAG_PART_BOUNDARY) + + # Callback indicating that we've reached the end of + # a part, and are starting a new one. + self.callback('part_end') + self.callback('part_begin') + + # Move to parsing new headers. + index = 0 + state = STATE_HEADER_FIELD_START + i += 1 + continue + + # We didn't find an LF character, so no match. Reset + # our index and clear our flag. + index = 0 + flags &= (~FLAG_PART_BOUNDARY) + + # Otherwise, if we're at the last boundary (i.e. we've + # seen a hyphen already)... + elif flags & FLAG_LAST_BOUNDARY: + # We need a second hyphen here. + if c == HYPHEN: + # Callback to end the current part, and then the + # message. + self.callback('part_end') + self.callback('end') + state = STATE_END + else: + # No match, so reset index. + index = 0 + + # If we have an index, we need to keep this byte for later, in + # case we can't match the full boundary. + if index > 0: + self.lookbehind[index - 1] = c + + # Otherwise, our index is 0. If the previous index is not, it + # means we reset something, and we need to take the data we + # thought was part of our boundary and send it along as actual + # data. + elif prev_index > 0: + # Callback to write the saved data. + lb_data = join_bytes(self.lookbehind) + self.callback('part_data', lb_data, 0, prev_index) + + # Overwrite our previous index. + prev_index = 0 + + # Re-set our mark for part data. + set_mark('part_data') + + # Re-consider the current character, since this could be + # the start of the boundary itself. + i -= 1 + + elif state == STATE_END: + # Do nothing and just consume a byte in the end state. + if c not in (CR, LF): + self.logger.warning("Consuming a byte '0x%x' in the end state", c) + + else: # pragma: no cover (error case) + # We got into a strange state somehow! Just stop processing. + msg = "Reached an unknown state %d at %d" % (state, i) + self.logger.warning(msg) + e = MultipartParseError(msg) + e.offset = i + raise e + + # Move to the next byte. + i += 1 + + # We call our callbacks with any remaining data. Note that we pass + # the 'remaining' flag, which sets the mark back to 0 instead of + # deleting it, if it's found. This is because, if the mark is found + # at this point, we assume that there's data for one of these things + # that has been parsed, but not yet emitted. And, as such, it implies + # that we haven't yet reached the end of this 'thing'. So, by setting + # the mark to 0, we cause any data callbacks that take place in future + # calls to this function to start from the beginning of that buffer. + data_callback('header_field', True) + data_callback('header_value', True) + data_callback('part_data', True) + + # Save values to locals. + self.state = state + self.index = index + self.flags = flags + + # Return our data length to indicate no errors, and that we processed + # all of it. + return length + + def finalize(self): + """Finalize this parser, which signals to that we are finished parsing. + + Note: It does not currently, but in the future, it will verify that we + are in the final state of the parser (i.e. the end of the multipart + message is well-formed), and, if not, throw an error. + """ + # TODO: verify that we're in the state STATE_END, otherwise throw an + # error or otherwise state that we're not finished parsing. + pass + + def __repr__(self): + return f"{self.__class__.__name__}(boundary={self.boundary!r})" + + +class FormParser: + """This class is the all-in-one form parser. Given all the information + necessary to parse a form, it will instantiate the correct parser, create + the proper :class:`Field` and :class:`File` classes to store the data that + is parsed, and call the two given callbacks with each field and file as + they become available. + + :param content_type: The Content-Type of the incoming request. This is + used to select the appropriate parser. + + :param on_field: The callback to call when a field has been parsed and is + ready for usage. See above for parameters. + + :param on_file: The callback to call when a file has been parsed and is + ready for usage. See above for parameters. + + :param on_end: An optional callback to call when all fields and files in a + request has been parsed. Can be None. + + :param boundary: If the request is a multipart/form-data request, this + should be the boundary of the request, as given in the + Content-Type header, as a bytestring. + + :param file_name: If the request is of type application/octet-stream, then + the body of the request will not contain any information + about the uploaded file. In such cases, you can provide + the file name of the uploaded file manually. + + :param FileClass: The class to use for uploaded files. Defaults to + :class:`File`, but you can provide your own class if you + wish to customize behaviour. The class will be + instantiated as FileClass(file_name, field_name), and it + must provide the following functions:: + file_instance.write(data) + file_instance.finalize() + file_instance.close() + + :param FieldClass: The class to use for uploaded fields. Defaults to + :class:`Field`, but you can provide your own class if + you wish to customize behaviour. The class will be + instantiated as FieldClass(field_name), and it must + provide the following functions:: + field_instance.write(data) + field_instance.finalize() + field_instance.close() + + :param config: Configuration to use for this FormParser. The default + values are taken from the DEFAULT_CONFIG value, and then + any keys present in this dictionary will overwrite the + default values. + + """ + #: This is the default configuration for our form parser. + #: Note: all file sizes should be in bytes. + DEFAULT_CONFIG = { + 'MAX_BODY_SIZE': float('inf'), + 'MAX_MEMORY_FILE_SIZE': 1 * 1024 * 1024, + 'UPLOAD_DIR': None, + 'UPLOAD_KEEP_FILENAME': False, + 'UPLOAD_KEEP_EXTENSIONS': False, + + # Error on invalid Content-Transfer-Encoding? + 'UPLOAD_ERROR_ON_BAD_CTE': False, + } + + def __init__(self, content_type, on_field, on_file, on_end=None, + boundary=None, file_name=None, FileClass=File, + FieldClass=Field, config={}): + + self.logger = logging.getLogger(__name__) + + # Save variables. + self.content_type = content_type + self.boundary = boundary + self.bytes_received = 0 + self.parser = None + + # Save callbacks. + self.on_field = on_field + self.on_file = on_file + self.on_end = on_end + + # Save classes. + self.FileClass = File + self.FieldClass = Field + + # Set configuration options. + self.config = self.DEFAULT_CONFIG.copy() + self.config.update(config) + + # Depending on the Content-Type, we instantiate the correct parser. + if content_type == 'application/octet-stream': + # Work around the lack of 'nonlocal' in Py2 + class vars: + f = None + + def on_start(): + vars.f = FileClass(file_name, None, config=self.config) + + def on_data(data, start, end): + vars.f.write(data[start:end]) + + def on_end(): + # Finalize the file itself. + vars.f.finalize() + + # Call our callback. + on_file(vars.f) + + # Call the on-end callback. + if self.on_end is not None: + self.on_end() + + callbacks = { + 'on_start': on_start, + 'on_data': on_data, + 'on_end': on_end, + } + + # Instantiate an octet-stream parser + parser = OctetStreamParser(callbacks, + max_size=self.config['MAX_BODY_SIZE']) + + elif (content_type == 'application/x-www-form-urlencoded' or + content_type == 'application/x-url-encoded'): + + name_buffer = [] + + class vars: + f = None + + def on_field_start(): + pass + + def on_field_name(data, start, end): + name_buffer.append(data[start:end]) + + def on_field_data(data, start, end): + if vars.f is None: + vars.f = FieldClass(b''.join(name_buffer)) + del name_buffer[:] + vars.f.write(data[start:end]) + + def on_field_end(): + # Finalize and call callback. + if vars.f is None: + # If we get here, it's because there was no field data. + # We create a field, set it to None, and then continue. + vars.f = FieldClass(b''.join(name_buffer)) + del name_buffer[:] + vars.f.set_none() + + vars.f.finalize() + on_field(vars.f) + vars.f = None + + def on_end(): + if self.on_end is not None: + self.on_end() + + # Setup callbacks. + callbacks = { + 'on_field_start': on_field_start, + 'on_field_name': on_field_name, + 'on_field_data': on_field_data, + 'on_field_end': on_field_end, + 'on_end': on_end, + } + + # Instantiate parser. + parser = QuerystringParser( + callbacks=callbacks, + max_size=self.config['MAX_BODY_SIZE'] + ) + + elif content_type == 'multipart/form-data': + if boundary is None: + self.logger.error("No boundary given") + raise FormParserError("No boundary given") + + header_name = [] + header_value = [] + headers = {} + + # No 'nonlocal' on Python 2 :-( + class vars: + f = None + writer = None + is_file = False + + def on_part_begin(): + pass + + def on_part_data(data, start, end): + bytes_processed = vars.writer.write(data[start:end]) + # TODO: check for error here. + return bytes_processed + + def on_part_end(): + vars.f.finalize() + if vars.is_file: + on_file(vars.f) + else: + on_field(vars.f) + + def on_header_field(data, start, end): + header_name.append(data[start:end]) + + def on_header_value(data, start, end): + header_value.append(data[start:end]) + + def on_header_end(): + headers[b''.join(header_name)] = b''.join(header_value) + del header_name[:] + del header_value[:] + + def on_headers_finished(): + # Reset the 'is file' flag. + vars.is_file = False + + # Parse the content-disposition header. + # TODO: handle mixed case + content_disp = headers.get(b'Content-Disposition') + disp, options = parse_options_header(content_disp) + + # Get the field and filename. + field_name = options.get(b'name') + file_name = options.get(b'filename') + # TODO: check for errors + + # Create the proper class. + if file_name is None: + vars.f = FieldClass(field_name) + else: + vars.f = FileClass(file_name, field_name, config=self.config) + vars.is_file = True + + # Parse the given Content-Transfer-Encoding to determine what + # we need to do with the incoming data. + # TODO: check that we properly handle 8bit / 7bit encoding. + transfer_encoding = headers.get(b'Content-Transfer-Encoding', + b'7bit') + + if (transfer_encoding == b'binary' or + transfer_encoding == b'8bit' or + transfer_encoding == b'7bit'): + vars.writer = vars.f + + elif transfer_encoding == b'base64': + vars.writer = Base64Decoder(vars.f) + + elif transfer_encoding == b'quoted-printable': + vars.writer = QuotedPrintableDecoder(vars.f) + + else: + self.logger.warning("Unknown Content-Transfer-Encoding: " + "%r", transfer_encoding) + if self.config['UPLOAD_ERROR_ON_BAD_CTE']: + raise FormParserError( + 'Unknown Content-Transfer-Encoding "{}"'.format( + transfer_encoding + ) + ) + else: + # If we aren't erroring, then we just treat this as an + # unencoded Content-Transfer-Encoding. + vars.writer = vars.f + + def on_end(): + vars.writer.finalize() + if self.on_end is not None: + self.on_end() + + # These are our callbacks for the parser. + callbacks = { + 'on_part_begin': on_part_begin, + 'on_part_data': on_part_data, + 'on_part_end': on_part_end, + 'on_header_field': on_header_field, + 'on_header_value': on_header_value, + 'on_header_end': on_header_end, + 'on_headers_finished': on_headers_finished, + 'on_end': on_end, + } + + # Instantiate a multipart parser. + parser = MultipartParser(boundary, callbacks, + max_size=self.config['MAX_BODY_SIZE']) + + else: + self.logger.warning("Unknown Content-Type: %r", content_type) + raise FormParserError("Unknown Content-Type: {}".format( + content_type + )) + + self.parser = parser + + def write(self, data): + """Write some data. The parser will forward this to the appropriate + underlying parser. + + :param data: a bytestring + """ + self.bytes_received += len(data) + # TODO: check the parser's return value for errors? + return self.parser.write(data) + + def finalize(self): + """Finalize the parser.""" + if self.parser is not None and hasattr(self.parser, 'finalize'): + self.parser.finalize() + + def close(self): + """Close the parser.""" + if self.parser is not None and hasattr(self.parser, 'close'): + self.parser.close() + + def __repr__(self): + return "{}(content_type={!r}, parser={!r})".format( + self.__class__.__name__, + self.content_type, + self.parser, + ) + + +def create_form_parser(headers, on_field, on_file, trust_x_headers=False, + config={}): + """This function is a helper function to aid in creating a FormParser + instances. Given a dictionary-like headers object, it will determine + the correct information needed, instantiate a FormParser with the + appropriate values and given callbacks, and then return the corresponding + parser. + + :param headers: A dictionary-like object of HTTP headers. The only + required header is Content-Type. + + :param on_field: Callback to call with each parsed field. + + :param on_file: Callback to call with each parsed file. + + :param trust_x_headers: Whether or not to trust information received from + certain X-Headers - for example, the file name from + X-File-Name. + + :param config: Configuration variables to pass to the FormParser. + """ + content_type = headers.get('Content-Type') + if content_type is None: + logging.getLogger(__name__).warning("No Content-Type header given") + raise ValueError("No Content-Type header given!") + + # Boundaries are optional (the FormParser will raise if one is needed + # but not given). + content_type, params = parse_options_header(content_type) + boundary = params.get(b'boundary') + + # We need content_type to be a string, not a bytes object. + content_type = content_type.decode('latin-1') + + # File names are optional. + file_name = headers.get('X-File-Name') + + # Instantiate a form parser. + form_parser = FormParser(content_type, + on_field, + on_file, + boundary=boundary, + file_name=file_name, + config=config) + + # Return our parser. + return form_parser + + +def parse_form(headers, input_stream, on_field, on_file, chunk_size=1048576, + **kwargs): + """This function is useful if you just want to parse a request body, + without too much work. Pass it a dictionary-like object of the request's + headers, and a file-like object for the input stream, along with two + callbacks that will get called whenever a field or file is parsed. + + :param headers: A dictionary-like object of HTTP headers. The only + required header is Content-Type. + + :param input_stream: A file-like object that represents the request body. + The read() method must return bytestrings. + + :param on_field: Callback to call with each parsed field. + + :param on_file: Callback to call with each parsed file. + + :param chunk_size: The maximum size to read from the input stream and write + to the parser at one time. Defaults to 1 MiB. + """ + + # Create our form parser. + parser = create_form_parser(headers, on_field, on_file) + + # Read chunks of 100KiB and write to the parser, but never read more than + # the given Content-Length, if any. + content_length = headers.get('Content-Length') + if content_length is not None: + content_length = int(content_length) + else: + content_length = float('inf') + bytes_read = 0 + + while True: + # Read only up to the Content-Length given. + max_readable = min(content_length - bytes_read, 1048576) + buff = input_stream.read(max_readable) + + # Write to the parser and update our length. + parser.write(buff) + bytes_read += len(buff) + + # If we get a buffer that's smaller than the size requested, or if we + # have read up to our content length, we're done. + if len(buff) != max_readable or bytes_read == content_length: + break + + # Tell our parser that we're done writing data. + parser.finalize() diff --git a/venv/lib/python3.12/site-packages/multipart/tests/__init__.py b/venv/lib/python3.12/site-packages/multipart/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5b9e162 Binary files /dev/null and b/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..7e4da7d Binary files /dev/null and b/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/test_multipart.cpython-312.pyc b/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/test_multipart.cpython-312.pyc new file mode 100644 index 0000000..94f8553 Binary files /dev/null and b/venv/lib/python3.12/site-packages/multipart/tests/__pycache__/test_multipart.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/multipart/tests/compat.py b/venv/lib/python3.12/site-packages/multipart/tests/compat.py new file mode 100644 index 0000000..897188d --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/compat.py @@ -0,0 +1,133 @@ +import os +import re +import sys +import types +import functools + + +def ensure_in_path(path): + """ + Ensure that a given path is in the sys.path array + """ + if not os.path.isdir(path): + raise RuntimeError('Tried to add nonexisting path') + + def _samefile(x, y): + try: + return os.path.samefile(x, y) + except OSError: + return False + except AttributeError: + # Probably on Windows. + path1 = os.path.abspath(x).lower() + path2 = os.path.abspath(y).lower() + return path1 == path2 + + # Remove existing copies of it. + for pth in sys.path: + if _samefile(pth, path): + sys.path.remove(pth) + + # Add it at the beginning. + sys.path.insert(0, path) + + +# Check if pytest is imported. If so, we use it to create marking decorators. +# If not, we just create a function that does nothing. +try: + import pytest +except ImportError: + pytest = None + +if pytest is not None: + slow_test = pytest.mark.slow_test + xfail = pytest.mark.xfail + +else: + slow_test = lambda x: x + + def xfail(*args, **kwargs): + if len(args) > 0 and isinstance(args[0], types.FunctionType): + return args[0] + + return lambda x: x + + +# We don't use the pytest parametrizing function, since it seems to break +# with unittest.TestCase subclasses. +def parametrize(field_names, field_values): + # If we're not given a list of field names, we make it. + if not isinstance(field_names, (tuple, list)): + field_names = (field_names,) + field_values = [(val,) for val in field_values] + + # Create a decorator that saves this list of field names and values on the + # function for later parametrizing. + def decorator(func): + func.__dict__['param_names'] = field_names + func.__dict__['param_values'] = field_values + return func + + return decorator + + +# This is a metaclass that actually performs the parametrization. +class ParametrizingMetaclass(type): + IDENTIFIER_RE = re.compile('[^A-Za-z0-9]') + + def __new__(klass, name, bases, attrs): + new_attrs = attrs.copy() + for attr_name, attr in attrs.items(): + # We only care about functions + if not isinstance(attr, types.FunctionType): + continue + + param_names = attr.__dict__.pop('param_names', None) + param_values = attr.__dict__.pop('param_values', None) + if param_names is None or param_values is None: + continue + + # Create multiple copies of the function. + for i, values in enumerate(param_values): + assert len(param_names) == len(values) + + # Get a repr of the values, and fix it to be a valid identifier + human = '_'.join( + [klass.IDENTIFIER_RE.sub('', repr(x)) for x in values] + ) + + # Create a new name. + # new_name = attr.__name__ + "_%d" % i + new_name = attr.__name__ + "__" + human + + # Create a replacement function. + def create_new_func(func, names, values): + # Create a kwargs dictionary. + kwargs = dict(zip(names, values)) + + @functools.wraps(func) + def new_func(self): + return func(self, **kwargs) + + # Manually set the name and return the new function. + new_func.__name__ = new_name + return new_func + + # Actually create the new function. + new_func = create_new_func(attr, param_names, values) + + # Save this new function in our attrs dict. + new_attrs[new_name] = new_func + + # Remove the old attribute from our new dictionary. + del new_attrs[attr_name] + + # We create the class as normal, except we use our new attributes. + return type.__new__(klass, name, bases, new_attrs) + + +# This is a class decorator that actually applies the above metaclass. +def parametrize_class(klass): + return ParametrizingMetaclass(klass.__name__, + klass.__bases__, + klass.__dict__) diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header.http new file mode 100644 index 0000000..0c81dae --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header.http @@ -0,0 +1,5 @@ +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content- isposition: form-data; name="field" + +This is a test. +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header.yaml new file mode 100644 index 0000000..c9b55f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header.yaml @@ -0,0 +1,3 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + error: 51 diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header_value.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header_value.http new file mode 100644 index 0000000..f3dc834 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header_value.http @@ -0,0 +1,5 @@ +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content-Disposition: form-data; n me="field" + +This is a test. +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header_value.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header_value.yaml new file mode 100644 index 0000000..a6efa7d --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/CR_in_header_value.yaml @@ -0,0 +1,3 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + error: 76 diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary.http new file mode 100644 index 0000000..7d97e51 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary.http @@ -0,0 +1,13 @@ +----boundary +Content-Disposition: form-data; name="file"; filename="test.txt" +Content-Type: text/plain + +--boundari +--boundaryq--boundary q--boundarq +--bounaryd-- +--notbound-- +--mismatch +--mismatch-- +--boundary-Q +--boundary Q--boundaryQ +----boundary-- diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary.yaml new file mode 100644 index 0000000..235493e --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary.yaml @@ -0,0 +1,8 @@ +boundary: --boundary +expected: + - name: file + type: file + file_name: test.txt + data: !!binary | + LS1ib3VuZGFyaQ0KLS1ib3VuZGFyeXEtLWJvdW5kYXJ5DXEtLWJvdW5kYXJxDQotLWJvdW5hcnlkLS0NCi0tbm90Ym91bmQtLQ0KLS1taXNtYXRjaA0KLS1taXNtYXRjaC0tDQotLWJvdW5kYXJ5LVENCi0tYm91bmRhcnkNUS0tYm91bmRhcnlR + diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_CR.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_CR.http new file mode 100644 index 0000000..327cc9b --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_CR.http @@ -0,0 +1,6 @@ +----boundary +Content-Disposition: form-data; name="field" + +QQQQQQQQQQQQQQQQQQQQ +----boundaryQQQQQQQQQQQQQQQQQQQQ +----boundary-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_CR.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_CR.yaml new file mode 100644 index 0000000..921637f --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_CR.yaml @@ -0,0 +1,8 @@ +boundary: --boundary +expected: + - name: field + type: field + data: !!binary | + UVFRUVFRUVFRUVFRUVFRUVFRUVENCi0tLS1ib3VuZGFyeVFRUVFRUVFRUVFRUVFRUVFRUVFR + + diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_LF.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_LF.http new file mode 100644 index 0000000..e9a5a6c --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_LF.http @@ -0,0 +1,6 @@ +----boundary +Content-Disposition: form-data; name="field" + +QQQQQQQQQQQQQQQQQQQQ +----boundary QQQQQQQQQQQQQQQQQQQQ +----boundary-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_LF.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_LF.yaml new file mode 100644 index 0000000..7346e03 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_LF.yaml @@ -0,0 +1,8 @@ +boundary: --boundary +expected: + - name: field + type: field + data: !!binary | + UVFRUVFRUVFRUVFRUVFRUVFRUVENCi0tLS1ib3VuZGFyeQ1RUVFRUVFRUVFRUVFRUVFRUVFRUQ== + + diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_final_hyphen.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_final_hyphen.http new file mode 100644 index 0000000..9261f1b --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_final_hyphen.http @@ -0,0 +1,6 @@ +----boundary +Content-Disposition: form-data; name="field" + +QQQQQQQQQQQQQQQQQQQQ +----boundary-QQQQQQQQQQQQQQQQQQQQ +----boundary-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_final_hyphen.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_final_hyphen.yaml new file mode 100644 index 0000000..17133c9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/almost_match_boundary_without_final_hyphen.yaml @@ -0,0 +1,8 @@ +boundary: --boundary +expected: + - name: field + type: field + data: !!binary | + UVFRUVFRUVFRUVFRUVFRUVFRUVENCi0tLS1ib3VuZGFyeS1RUVFRUVFRUVFRUVFRUVFRUVFRUQ== + + diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_end_of_headers.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_end_of_headers.http new file mode 100644 index 0000000..de14ae1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_end_of_headers.http @@ -0,0 +1,4 @@ +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content-Disposition: form-data; name="field" + QThis is a test. +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_end_of_headers.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_end_of_headers.yaml new file mode 100644 index 0000000..5fc1ec0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_end_of_headers.yaml @@ -0,0 +1,3 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + error: 89 diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_header_char.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_header_char.http new file mode 100644 index 0000000..b90d00d --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_header_char.http @@ -0,0 +1,5 @@ +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content-999position: form-data; name="field" + +This is a test. +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_header_char.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_header_char.yaml new file mode 100644 index 0000000..9d5f62a --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_header_char.yaml @@ -0,0 +1,3 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + error: 50 diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_initial_boundary.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_initial_boundary.http new file mode 100644 index 0000000..6aab9da --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_initial_boundary.http @@ -0,0 +1,5 @@ +------WebQitFormBoundaryTkr3kCBQlBe1nrhc +Content-Disposition: form-data; name="field" + +This is a test. +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_initial_boundary.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_initial_boundary.yaml new file mode 100644 index 0000000..ffa4eb7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/bad_initial_boundary.yaml @@ -0,0 +1,3 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + error: 9 diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/base64_encoding.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/base64_encoding.http new file mode 100644 index 0000000..3d2980f --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/base64_encoding.http @@ -0,0 +1,7 @@ +----boundary +Content-Disposition: form-data; name="file"; filename="test.txt" +Content-Type: text/plain +Content-Transfer-Encoding: base64 + +VGVzdCAxMjM= +----boundary-- diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/base64_encoding.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/base64_encoding.yaml new file mode 100644 index 0000000..1033150 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/base64_encoding.yaml @@ -0,0 +1,7 @@ +boundary: --boundary +expected: + - name: file + type: file + file_name: test.txt + data: !!binary | + VGVzdCAxMjM= diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/empty_header.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/empty_header.http new file mode 100644 index 0000000..bd593f4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/empty_header.http @@ -0,0 +1,5 @@ +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +: form-data; name="field" + +This is a test. +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/empty_header.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/empty_header.yaml new file mode 100644 index 0000000..574ed4c --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/empty_header.yaml @@ -0,0 +1,3 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + error: 42 diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_fields.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_fields.http new file mode 100644 index 0000000..4f13037 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_fields.http @@ -0,0 +1,9 @@ +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content-Disposition: form-data; name="field1" + +field1 +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content-Disposition: form-data; name="field2" + +field2 +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_fields.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_fields.yaml new file mode 100644 index 0000000..cb2c2d6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_fields.yaml @@ -0,0 +1,10 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + - name: field1 + type: field + data: !!binary | + ZmllbGQx + - name: field2 + type: field + data: !!binary | + ZmllbGQy diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_files.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_files.http new file mode 100644 index 0000000..fd2e468 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_files.http @@ -0,0 +1,11 @@ +------WebKitFormBoundarygbACTUR58IyeurVf +Content-Disposition: form-data; name="file1"; filename="test1.txt" +Content-Type: text/plain + +Test file #1 +------WebKitFormBoundarygbACTUR58IyeurVf +Content-Disposition: form-data; name="file2"; filename="test2.txt" +Content-Type: text/plain + +Test file #2 +------WebKitFormBoundarygbACTUR58IyeurVf-- diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_files.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_files.yaml new file mode 100644 index 0000000..3bf70e2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/multiple_files.yaml @@ -0,0 +1,13 @@ +boundary: ----WebKitFormBoundarygbACTUR58IyeurVf +expected: + - name: file1 + type: file + file_name: test1.txt + data: !!binary | + VGVzdCBmaWxlICMx + - name: file2 + type: file + file_name: test2.txt + data: !!binary | + VGVzdCBmaWxlICMy + diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/quoted_printable_encoding.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/quoted_printable_encoding.http new file mode 100644 index 0000000..09e555a --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/quoted_printable_encoding.http @@ -0,0 +1,7 @@ +----boundary +Content-Disposition: form-data; name="file"; filename="test.txt" +Content-Type: text/plain +Content-Transfer-Encoding: quoted-printable + +foo=3Dbar +----boundary-- diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/quoted_printable_encoding.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/quoted_printable_encoding.yaml new file mode 100644 index 0000000..2c6bbfb --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/quoted_printable_encoding.yaml @@ -0,0 +1,7 @@ +boundary: --boundary +expected: + - name: file + type: file + file_name: test.txt + data: !!binary | + Zm9vPWJhcg== diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field.http new file mode 100644 index 0000000..8b90b73 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field.http @@ -0,0 +1,5 @@ +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content-Disposition: form-data; name="field" + +This is a test. +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field.yaml new file mode 100644 index 0000000..7690f08 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field.yaml @@ -0,0 +1,6 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + - name: field + type: field + data: !!binary | + VGhpcyBpcyBhIHRlc3Qu diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_blocks.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_blocks.http new file mode 100644 index 0000000..5a61d83 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_blocks.http @@ -0,0 +1,5 @@ +--boundary +Content-Disposition: form-data; name="field" + +0123456789ABCDEFGHIJ0123456789ABCDEFGHIJ +--boundary-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_blocks.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_blocks.yaml new file mode 100644 index 0000000..efb1b32 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_blocks.yaml @@ -0,0 +1,6 @@ +boundary: --boundary +expected: + - name: field + type: field + data: !!binary | + MDEyMzQ1Njc4OUFCQ0RFRkdISUowMTIzNDU2Nzg5QUJDREVGR0hJSg== diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_longer.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_longer.http new file mode 100644 index 0000000..46bd7e1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_longer.http @@ -0,0 +1,5 @@ +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content-Disposition: form-data; name="field" + +qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_longer.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_longer.yaml new file mode 100644 index 0000000..5a11840 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_longer.yaml @@ -0,0 +1,6 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + - name: field + type: field + data: !!binary | + cXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXFxcXE= diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_single_file.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_single_file.http new file mode 100644 index 0000000..34a822b --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_single_file.http @@ -0,0 +1,10 @@ +--boundary +Content-Disposition: form-data; name="field" + +test1 +--boundary +Content-Disposition: form-data; name="file"; filename="file.txt" +Content-Type: text/plain + +test2 +--boundary-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_single_file.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_single_file.yaml new file mode 100644 index 0000000..47c8d6e --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_single_file.yaml @@ -0,0 +1,13 @@ +boundary: boundary +expected: + - name: field + type: field + data: !!binary | + dGVzdDE= + - name: file + type: file + file_name: file.txt + data: !!binary | + dGVzdDI= + + diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_with_leading_newlines.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_with_leading_newlines.http new file mode 100644 index 0000000..10ebc2e --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_with_leading_newlines.http @@ -0,0 +1,7 @@ + + +------WebKitFormBoundaryTkr3kCBQlBe1nrhc +Content-Disposition: form-data; name="field" + +This is a test. +------WebKitFormBoundaryTkr3kCBQlBe1nrhc-- \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_with_leading_newlines.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_with_leading_newlines.yaml new file mode 100644 index 0000000..7690f08 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_field_with_leading_newlines.yaml @@ -0,0 +1,6 @@ +boundary: ----WebKitFormBoundaryTkr3kCBQlBe1nrhc +expected: + - name: field + type: field + data: !!binary | + VGhpcyBpcyBhIHRlc3Qu diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_file.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_file.http new file mode 100644 index 0000000..104bfd0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_file.http @@ -0,0 +1,6 @@ +------WebKitFormBoundary5BZGOJCWtXGYC9HW +Content-Disposition: form-data; name="file"; filename="test.txt" +Content-Type: text/plain + +This is a test file. +------WebKitFormBoundary5BZGOJCWtXGYC9HW-- diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_file.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_file.yaml new file mode 100644 index 0000000..2a8e005 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/single_file.yaml @@ -0,0 +1,8 @@ +boundary: ----WebKitFormBoundary5BZGOJCWtXGYC9HW +expected: + - name: file + type: file + file_name: test.txt + data: !!binary | + VGhpcyBpcyBhIHRlc3QgZmlsZS4= + diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/utf8_filename.http b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/utf8_filename.http new file mode 100644 index 0000000..c26df08 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/utf8_filename.http @@ -0,0 +1,6 @@ +------WebKitFormBoundaryI9SCEFp2lpx5DR2K +Content-Disposition: form-data; name="file"; filename="???.txt" +Content-Type: text/plain + +これはテストです。 +------WebKitFormBoundaryI9SCEFp2lpx5DR2K-- diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/utf8_filename.yaml b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/utf8_filename.yaml new file mode 100644 index 0000000..507ba2c --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_data/http/utf8_filename.yaml @@ -0,0 +1,8 @@ +boundary: ----WebKitFormBoundaryI9SCEFp2lpx5DR2K +expected: + - name: file + type: file + file_name: ???.txt + data: !!binary | + 44GT44KM44Gv44OG44K544OI44Gn44GZ44CC + diff --git a/venv/lib/python3.12/site-packages/multipart/tests/test_multipart.py b/venv/lib/python3.12/site-packages/multipart/tests/test_multipart.py new file mode 100644 index 0000000..089f451 --- /dev/null +++ b/venv/lib/python3.12/site-packages/multipart/tests/test_multipart.py @@ -0,0 +1,1305 @@ +import os +import sys +import glob +import yaml +import base64 +import random +import tempfile +import unittest +from .compat import ( + parametrize, + parametrize_class, + slow_test, +) +from io import BytesIO +from unittest.mock import MagicMock, Mock, patch + +from ..multipart import * + + +# Get the current directory for our later test cases. +curr_dir = os.path.abspath(os.path.dirname(__file__)) + + +def force_bytes(val): + if isinstance(val, str): + val = val.encode(sys.getfilesystemencoding()) + + return val + + +class TestField(unittest.TestCase): + def setUp(self): + self.f = Field('foo') + + def test_name(self): + self.assertEqual(self.f.field_name, 'foo') + + def test_data(self): + self.f.write(b'test123') + self.assertEqual(self.f.value, b'test123') + + def test_cache_expiration(self): + self.f.write(b'test') + self.assertEqual(self.f.value, b'test') + self.f.write(b'123') + self.assertEqual(self.f.value, b'test123') + + def test_finalize(self): + self.f.write(b'test123') + self.f.finalize() + self.assertEqual(self.f.value, b'test123') + + def test_close(self): + self.f.write(b'test123') + self.f.close() + self.assertEqual(self.f.value, b'test123') + + def test_from_value(self): + f = Field.from_value(b'name', b'value') + self.assertEqual(f.field_name, b'name') + self.assertEqual(f.value, b'value') + + f2 = Field.from_value(b'name', None) + self.assertEqual(f2.value, None) + + def test_equality(self): + f1 = Field.from_value(b'name', b'value') + f2 = Field.from_value(b'name', b'value') + + self.assertEqual(f1, f2) + + def test_equality_with_other(self): + f = Field.from_value(b'foo', b'bar') + self.assertFalse(f == b'foo') + self.assertFalse(b'foo' == f) + + def test_set_none(self): + f = Field(b'foo') + self.assertEqual(f.value, b'') + + f.set_none() + self.assertEqual(f.value, None) + + +class TestFile(unittest.TestCase): + def setUp(self): + self.c = {} + self.d = force_bytes(tempfile.mkdtemp()) + self.f = File(b'foo.txt', config=self.c) + + def assert_data(self, data): + f = self.f.file_object + f.seek(0) + self.assertEqual(f.read(), data) + f.seek(0) + f.truncate() + + def assert_exists(self): + full_path = os.path.join(self.d, self.f.actual_file_name) + self.assertTrue(os.path.exists(full_path)) + + def test_simple(self): + self.f.write(b'foobar') + self.assert_data(b'foobar') + + def test_invalid_write(self): + m = Mock() + m.write.return_value = 5 + self.f._fileobj = m + v = self.f.write(b'foobar') + self.assertEqual(v, 5) + + def test_file_fallback(self): + self.c['MAX_MEMORY_FILE_SIZE'] = 1 + + self.f.write(b'1') + self.assertTrue(self.f.in_memory) + self.assert_data(b'1') + + self.f.write(b'123') + self.assertFalse(self.f.in_memory) + self.assert_data(b'123') + + # Test flushing too. + old_obj = self.f.file_object + self.f.flush_to_disk() + self.assertFalse(self.f.in_memory) + self.assertIs(self.f.file_object, old_obj) + + def test_file_fallback_with_data(self): + self.c['MAX_MEMORY_FILE_SIZE'] = 10 + + self.f.write(b'1' * 10) + self.assertTrue(self.f.in_memory) + + self.f.write(b'2' * 10) + self.assertFalse(self.f.in_memory) + + self.assert_data(b'11111111112222222222') + + def test_file_name(self): + # Write to this dir. + self.c['UPLOAD_DIR'] = self.d + self.c['MAX_MEMORY_FILE_SIZE'] = 10 + + # Write. + self.f.write(b'12345678901') + self.assertFalse(self.f.in_memory) + + # Assert that the file exists + self.assertIsNotNone(self.f.actual_file_name) + self.assert_exists() + + def test_file_full_name(self): + # Write to this dir. + self.c['UPLOAD_DIR'] = self.d + self.c['UPLOAD_KEEP_FILENAME'] = True + self.c['MAX_MEMORY_FILE_SIZE'] = 10 + + # Write. + self.f.write(b'12345678901') + self.assertFalse(self.f.in_memory) + + # Assert that the file exists + self.assertEqual(self.f.actual_file_name, b'foo') + self.assert_exists() + + def test_file_full_name_with_ext(self): + self.c['UPLOAD_DIR'] = self.d + self.c['UPLOAD_KEEP_FILENAME'] = True + self.c['UPLOAD_KEEP_EXTENSIONS'] = True + self.c['MAX_MEMORY_FILE_SIZE'] = 10 + + # Write. + self.f.write(b'12345678901') + self.assertFalse(self.f.in_memory) + + # Assert that the file exists + self.assertEqual(self.f.actual_file_name, b'foo.txt') + self.assert_exists() + + def test_file_full_name_with_ext(self): + self.c['UPLOAD_DIR'] = self.d + self.c['UPLOAD_KEEP_FILENAME'] = True + self.c['UPLOAD_KEEP_EXTENSIONS'] = True + self.c['MAX_MEMORY_FILE_SIZE'] = 10 + + # Write. + self.f.write(b'12345678901') + self.assertFalse(self.f.in_memory) + + # Assert that the file exists + self.assertEqual(self.f.actual_file_name, b'foo.txt') + self.assert_exists() + + def test_no_dir_with_extension(self): + self.c['UPLOAD_KEEP_EXTENSIONS'] = True + self.c['MAX_MEMORY_FILE_SIZE'] = 10 + + # Write. + self.f.write(b'12345678901') + self.assertFalse(self.f.in_memory) + + # Assert that the file exists + ext = os.path.splitext(self.f.actual_file_name)[1] + self.assertEqual(ext, b'.txt') + self.assert_exists() + + def test_invalid_dir_with_name(self): + # Write to this dir. + self.c['UPLOAD_DIR'] = force_bytes(os.path.join('/', 'tmp', 'notexisting')) + self.c['UPLOAD_KEEP_FILENAME'] = True + self.c['MAX_MEMORY_FILE_SIZE'] = 5 + + # Write. + with self.assertRaises(FileError): + self.f.write(b'1234567890') + + def test_invalid_dir_no_name(self): + # Write to this dir. + self.c['UPLOAD_DIR'] = force_bytes(os.path.join('/', 'tmp', 'notexisting')) + self.c['UPLOAD_KEEP_FILENAME'] = False + self.c['MAX_MEMORY_FILE_SIZE'] = 5 + + # Write. + with self.assertRaises(FileError): + self.f.write(b'1234567890') + + # TODO: test uploading two files with the same name. + + +class TestParseOptionsHeader(unittest.TestCase): + def test_simple(self): + t, p = parse_options_header('application/json') + self.assertEqual(t, b'application/json') + self.assertEqual(p, {}) + + def test_blank(self): + t, p = parse_options_header('') + self.assertEqual(t, b'') + self.assertEqual(p, {}) + + def test_single_param(self): + t, p = parse_options_header('application/json;par=val') + self.assertEqual(t, b'application/json') + self.assertEqual(p, {b'par': b'val'}) + + def test_single_param_with_spaces(self): + t, p = parse_options_header(b'application/json; par=val') + self.assertEqual(t, b'application/json') + self.assertEqual(p, {b'par': b'val'}) + + def test_multiple_params(self): + t, p = parse_options_header(b'application/json;par=val;asdf=foo') + self.assertEqual(t, b'application/json') + self.assertEqual(p, {b'par': b'val', b'asdf': b'foo'}) + + def test_quoted_param(self): + t, p = parse_options_header(b'application/json;param="quoted"') + self.assertEqual(t, b'application/json') + self.assertEqual(p, {b'param': b'quoted'}) + + def test_quoted_param_with_semicolon(self): + t, p = parse_options_header(b'application/json;param="quoted;with;semicolons"') + self.assertEqual(p[b'param'], b'quoted;with;semicolons') + + def test_quoted_param_with_escapes(self): + t, p = parse_options_header(b'application/json;param="This \\" is \\" a \\" quote"') + self.assertEqual(p[b'param'], b'This " is " a " quote') + + def test_handles_ie6_bug(self): + t, p = parse_options_header(b'text/plain; filename="C:\\this\\is\\a\\path\\file.txt"') + + self.assertEqual(p[b'filename'], b'file.txt') + + +class TestBaseParser(unittest.TestCase): + def setUp(self): + self.b = BaseParser() + self.b.callbacks = {} + + def test_callbacks(self): + # The stupid list-ness is to get around lack of nonlocal on py2 + l = [0] + def on_foo(): + l[0] += 1 + + self.b.set_callback('foo', on_foo) + self.b.callback('foo') + self.assertEqual(l[0], 1) + + self.b.set_callback('foo', None) + self.b.callback('foo') + self.assertEqual(l[0], 1) + + +class TestQuerystringParser(unittest.TestCase): + def assert_fields(self, *args, **kwargs): + if kwargs.pop('finalize', True): + self.p.finalize() + + self.assertEqual(self.f, list(args)) + if kwargs.get('reset', True): + self.f = [] + + def setUp(self): + self.reset() + + def reset(self): + self.f = [] + + name_buffer = [] + data_buffer = [] + + def on_field_name(data, start, end): + name_buffer.append(data[start:end]) + + def on_field_data(data, start, end): + data_buffer.append(data[start:end]) + + def on_field_end(): + self.f.append(( + b''.join(name_buffer), + b''.join(data_buffer) + )) + + del name_buffer[:] + del data_buffer[:] + + callbacks = { + 'on_field_name': on_field_name, + 'on_field_data': on_field_data, + 'on_field_end': on_field_end + } + + self.p = QuerystringParser(callbacks) + + def test_simple_querystring(self): + self.p.write(b'foo=bar') + + self.assert_fields((b'foo', b'bar')) + + def test_querystring_blank_beginning(self): + self.p.write(b'&foo=bar') + + self.assert_fields((b'foo', b'bar')) + + def test_querystring_blank_end(self): + self.p.write(b'foo=bar&') + + self.assert_fields((b'foo', b'bar')) + + def test_multiple_querystring(self): + self.p.write(b'foo=bar&asdf=baz') + + self.assert_fields( + (b'foo', b'bar'), + (b'asdf', b'baz') + ) + + def test_streaming_simple(self): + self.p.write(b'foo=bar&') + self.assert_fields( + (b'foo', b'bar'), + finalize=False + ) + + self.p.write(b'asdf=baz') + self.assert_fields( + (b'asdf', b'baz') + ) + + def test_streaming_break(self): + self.p.write(b'foo=one') + self.assert_fields(finalize=False) + + self.p.write(b'two') + self.assert_fields(finalize=False) + + self.p.write(b'three') + self.assert_fields(finalize=False) + + self.p.write(b'&asd') + self.assert_fields( + (b'foo', b'onetwothree'), + finalize=False + ) + + self.p.write(b'f=baz') + self.assert_fields( + (b'asdf', b'baz') + ) + + def test_semicolon_separator(self): + self.p.write(b'foo=bar;asdf=baz') + + self.assert_fields( + (b'foo', b'bar'), + (b'asdf', b'baz') + ) + + def test_too_large_field(self): + self.p.max_size = 15 + + # Note: len = 8 + self.p.write(b"foo=bar&") + self.assert_fields((b'foo', b'bar'), finalize=False) + + # Note: len = 8, only 7 bytes processed + self.p.write(b'a=123456') + self.assert_fields((b'a', b'12345')) + + def test_invalid_max_size(self): + with self.assertRaises(ValueError): + p = QuerystringParser(max_size=-100) + + def test_strict_parsing_pass(self): + data = b'foo=bar&another=asdf' + for first, last in split_all(data): + self.reset() + self.p.strict_parsing = True + + print(f"{first!r} / {last!r}") + + self.p.write(first) + self.p.write(last) + self.assert_fields((b'foo', b'bar'), (b'another', b'asdf')) + + def test_strict_parsing_fail_double_sep(self): + data = b'foo=bar&&another=asdf' + for first, last in split_all(data): + self.reset() + self.p.strict_parsing = True + + cnt = 0 + with self.assertRaises(QuerystringParseError) as cm: + cnt += self.p.write(first) + cnt += self.p.write(last) + self.p.finalize() + + # The offset should occur at 8 bytes into the data (as a whole), + # so we calculate the offset into the chunk. + if cm is not None: + self.assertEqual(cm.exception.offset, 8 - cnt) + + def test_double_sep(self): + data = b'foo=bar&&another=asdf' + for first, last in split_all(data): + print(f" {first!r} / {last!r} ") + self.reset() + + cnt = 0 + cnt += self.p.write(first) + cnt += self.p.write(last) + + self.assert_fields((b'foo', b'bar'), (b'another', b'asdf')) + + def test_strict_parsing_fail_no_value(self): + self.p.strict_parsing = True + with self.assertRaises(QuerystringParseError) as cm: + self.p.write(b'foo=bar&blank&another=asdf') + + if cm is not None: + self.assertEqual(cm.exception.offset, 8) + + def test_success_no_value(self): + self.p.write(b'foo=bar&blank&another=asdf') + self.assert_fields( + (b'foo', b'bar'), + (b'blank', b''), + (b'another', b'asdf') + ) + + def test_repr(self): + # Issue #29; verify we don't assert on repr() + _ignored = repr(self.p) + + +class TestOctetStreamParser(unittest.TestCase): + def setUp(self): + self.d = [] + self.started = 0 + self.finished = 0 + + def on_start(): + self.started += 1 + + def on_data(data, start, end): + self.d.append(data[start:end]) + + def on_end(): + self.finished += 1 + + callbacks = { + 'on_start': on_start, + 'on_data': on_data, + 'on_end': on_end + } + + self.p = OctetStreamParser(callbacks) + + def assert_data(self, data, finalize=True): + self.assertEqual(b''.join(self.d), data) + self.d = [] + + def assert_started(self, val=True): + if val: + self.assertEqual(self.started, 1) + else: + self.assertEqual(self.started, 0) + + def assert_finished(self, val=True): + if val: + self.assertEqual(self.finished, 1) + else: + self.assertEqual(self.finished, 0) + + def test_simple(self): + # Assert is not started + self.assert_started(False) + + # Write something, it should then be started + have data + self.p.write(b'foobar') + self.assert_started() + self.assert_data(b'foobar') + + # Finalize, and check + self.assert_finished(False) + self.p.finalize() + self.assert_finished() + + def test_multiple_chunks(self): + self.p.write(b'foo') + self.p.write(b'bar') + self.p.write(b'baz') + self.p.finalize() + + self.assert_data(b'foobarbaz') + self.assert_finished() + + def test_max_size(self): + self.p.max_size = 5 + + self.p.write(b'0123456789') + self.p.finalize() + + self.assert_data(b'01234') + self.assert_finished() + + def test_invalid_max_size(self): + with self.assertRaises(ValueError): + q = OctetStreamParser(max_size='foo') + + +class TestBase64Decoder(unittest.TestCase): + # Note: base64('foobar') == 'Zm9vYmFy' + def setUp(self): + self.f = BytesIO() + self.d = Base64Decoder(self.f) + + def assert_data(self, data, finalize=True): + if finalize: + self.d.finalize() + + self.f.seek(0) + self.assertEqual(self.f.read(), data) + self.f.seek(0) + self.f.truncate() + + def test_simple(self): + self.d.write(b'Zm9vYmFy') + self.assert_data(b'foobar') + + def test_bad(self): + with self.assertRaises(DecodeError): + self.d.write(b'Zm9v!mFy') + + def test_split_properly(self): + self.d.write(b'Zm9v') + self.d.write(b'YmFy') + self.assert_data(b'foobar') + + def test_bad_split(self): + buff = b'Zm9v' + for i in range(1, 4): + first, second = buff[:i], buff[i:] + + self.setUp() + self.d.write(first) + self.d.write(second) + self.assert_data(b'foo') + + def test_long_bad_split(self): + buff = b'Zm9vYmFy' + for i in range(5, 8): + first, second = buff[:i], buff[i:] + + self.setUp() + self.d.write(first) + self.d.write(second) + self.assert_data(b'foobar') + + def test_close_and_finalize(self): + parser = Mock() + f = Base64Decoder(parser) + + f.finalize() + parser.finalize.assert_called_once_with() + + f.close() + parser.close.assert_called_once_with() + + def test_bad_length(self): + self.d.write(b'Zm9vYmF') # missing ending 'y' + + with self.assertRaises(DecodeError): + self.d.finalize() + + +class TestQuotedPrintableDecoder(unittest.TestCase): + def setUp(self): + self.f = BytesIO() + self.d = QuotedPrintableDecoder(self.f) + + def assert_data(self, data, finalize=True): + if finalize: + self.d.finalize() + + self.f.seek(0) + self.assertEqual(self.f.read(), data) + self.f.seek(0) + self.f.truncate() + + def test_simple(self): + self.d.write(b'foobar') + self.assert_data(b'foobar') + + def test_with_escape(self): + self.d.write(b'foo=3Dbar') + self.assert_data(b'foo=bar') + + def test_with_newline_escape(self): + self.d.write(b'foo=\r\nbar') + self.assert_data(b'foobar') + + def test_with_only_newline_escape(self): + self.d.write(b'foo=\nbar') + self.assert_data(b'foobar') + + def test_with_split_escape(self): + self.d.write(b'foo=3') + self.d.write(b'Dbar') + self.assert_data(b'foo=bar') + + def test_with_split_newline_escape_1(self): + self.d.write(b'foo=\r') + self.d.write(b'\nbar') + self.assert_data(b'foobar') + + def test_with_split_newline_escape_2(self): + self.d.write(b'foo=') + self.d.write(b'\r\nbar') + self.assert_data(b'foobar') + + def test_close_and_finalize(self): + parser = Mock() + f = QuotedPrintableDecoder(parser) + + f.finalize() + parser.finalize.assert_called_once_with() + + f.close() + parser.close.assert_called_once_with() + + def test_not_aligned(self): + """ + https://github.com/andrew-d/python-multipart/issues/6 + """ + self.d.write(b'=3AX') + self.assert_data(b':X') + + # Additional offset tests + self.d.write(b'=3') + self.d.write(b'AX') + self.assert_data(b':X') + + self.d.write(b'q=3AX') + self.assert_data(b'q:X') + + +# Load our list of HTTP test cases. +http_tests_dir = os.path.join(curr_dir, 'test_data', 'http') + +# Read in all test cases and load them. +NON_PARAMETRIZED_TESTS = {'single_field_blocks'} +http_tests = [] +for f in os.listdir(http_tests_dir): + # Only load the HTTP test cases. + fname, ext = os.path.splitext(f) + if fname in NON_PARAMETRIZED_TESTS: + continue + + if ext == '.http': + # Get the YAML file and load it too. + yaml_file = os.path.join(http_tests_dir, fname + '.yaml') + + # Load both. + with open(os.path.join(http_tests_dir, f), 'rb') as f: + test_data = f.read() + + with open(yaml_file, 'rb') as f: + yaml_data = yaml.safe_load(f) + + http_tests.append({ + 'name': fname, + 'test': test_data, + 'result': yaml_data + }) + + +def split_all(val): + """ + This function will split an array all possible ways. For example: + split_all([1,2,3,4]) + will give: + ([1], [2,3,4]), ([1,2], [3,4]), ([1,2,3], [4]) + """ + for i in range(1, len(val) - 1): + yield (val[:i], val[i:]) + + +@parametrize_class +class TestFormParser(unittest.TestCase): + def make(self, boundary, config={}): + self.ended = False + self.files = [] + self.fields = [] + + def on_field(f): + self.fields.append(f) + + def on_file(f): + self.files.append(f) + + def on_end(): + self.ended = True + + # Get a form-parser instance. + self.f = FormParser('multipart/form-data', on_field, on_file, on_end, + boundary=boundary, config=config) + + def assert_file_data(self, f, data): + o = f.file_object + o.seek(0) + file_data = o.read() + self.assertEqual(file_data, data) + + def assert_file(self, field_name, file_name, data): + # Find this file. + found = None + for f in self.files: + if f.field_name == field_name: + found = f + break + + # Assert that we found it. + self.assertIsNotNone(found) + + try: + # Assert about this file. + self.assert_file_data(found, data) + self.assertEqual(found.file_name, file_name) + + # Remove it from our list. + self.files.remove(found) + finally: + # Close our file + found.close() + + def assert_field(self, name, value): + # Find this field in our fields list. + found = None + for f in self.fields: + if f.field_name == name: + found = f + break + + # Assert that it exists and matches. + self.assertIsNotNone(found) + self.assertEqual(value, found.value) + + # Remove it for future iterations. + self.fields.remove(found) + + @parametrize('param', http_tests) + def test_http(self, param): + # Firstly, create our parser with the given boundary. + boundary = param['result']['boundary'] + if isinstance(boundary, str): + boundary = boundary.encode('latin-1') + self.make(boundary) + + # Now, we feed the parser with data. + exc = None + try: + processed = self.f.write(param['test']) + self.f.finalize() + except MultipartParseError as e: + processed = 0 + exc = e + + # print(repr(param)) + # print("") + # print(repr(self.fields)) + # print(repr(self.files)) + + # Do we expect an error? + if 'error' in param['result']['expected']: + self.assertIsNotNone(exc) + self.assertEqual(param['result']['expected']['error'], exc.offset) + return + + # No error! + self.assertEqual(processed, len(param['test'])) + + # Assert that the parser gave us the appropriate fields/files. + for e in param['result']['expected']: + # Get our type and name. + type = e['type'] + name = e['name'].encode('latin-1') + + if type == 'field': + self.assert_field(name, e['data']) + + elif type == 'file': + self.assert_file( + name, + e['file_name'].encode('latin-1'), + e['data'] + ) + + else: + assert False + + def test_random_splitting(self): + """ + This test runs a simple multipart body with one field and one file + through every possible split. + """ + # Load test data. + test_file = 'single_field_single_file.http' + with open(os.path.join(http_tests_dir, test_file), 'rb') as f: + test_data = f.read() + + # We split the file through all cases. + for first, last in split_all(test_data): + # Create form parser. + self.make('boundary') + + # Feed with data in 2 chunks. + i = 0 + i += self.f.write(first) + i += self.f.write(last) + self.f.finalize() + + # Assert we processed everything. + self.assertEqual(i, len(test_data)) + + # Assert that our file and field are here. + self.assert_field(b'field', b'test1') + self.assert_file(b'file', b'file.txt', b'test2') + + def test_feed_single_bytes(self): + """ + This test parses a simple multipart body 1 byte at a time. + """ + # Load test data. + test_file = 'single_field_single_file.http' + with open(os.path.join(http_tests_dir, test_file), 'rb') as f: + test_data = f.read() + + # Create form parser. + self.make('boundary') + + # Write all bytes. + # NOTE: Can't simply do `for b in test_data`, since that gives + # an integer when iterating over a bytes object on Python 3. + i = 0 + for x in range(len(test_data)): + b = test_data[x:x + 1] + i += self.f.write(b) + + self.f.finalize() + + # Assert we processed everything. + self.assertEqual(i, len(test_data)) + + # Assert that our file and field are here. + self.assert_field(b'field', b'test1') + self.assert_file(b'file', b'file.txt', b'test2') + + def test_feed_blocks(self): + """ + This test parses a simple multipart body 1 byte at a time. + """ + # Load test data. + test_file = 'single_field_blocks.http' + with open(os.path.join(http_tests_dir, test_file), 'rb') as f: + test_data = f.read() + + for c in range(1, len(test_data) + 1): + # Skip first `d` bytes - not interesting + for d in range(c): + + # Create form parser. + self.make('boundary') + # Skip + i = 0 + self.f.write(test_data[:d]) + i += d + for x in range(d, len(test_data), c): + # Write a chunk to achieve condition + # `i == data_length - 1` + # in boundary search loop (multipatr.py:1302) + b = test_data[x:x + c] + i += self.f.write(b) + + self.f.finalize() + + # Assert we processed everything. + self.assertEqual(i, len(test_data)) + + # Assert that our field is here. + self.assert_field(b'field', + b'0123456789ABCDEFGHIJ0123456789ABCDEFGHIJ') + + @slow_test + def test_request_body_fuzz(self): + """ + This test randomly fuzzes the request body to ensure that no strange + exceptions are raised and we don't end up in a strange state. The + fuzzing consists of randomly doing one of the following: + - Adding a random byte at a random offset + - Randomly deleting a single byte + - Randomly swapping two bytes + """ + # Load test data. + test_file = 'single_field_single_file.http' + with open(os.path.join(http_tests_dir, test_file), 'rb') as f: + test_data = f.read() + + iterations = 1000 + successes = 0 + failures = 0 + exceptions = 0 + + print("Running %d iterations of fuzz testing:" % (iterations,)) + for i in range(iterations): + # Create a bytearray to mutate. + fuzz_data = bytearray(test_data) + + # Pick what we're supposed to do. + choice = random.choice([1, 2, 3]) + if choice == 1: + # Add a random byte. + i = random.randrange(len(test_data)) + b = random.randrange(256) + + fuzz_data.insert(i, b) + msg = "Inserting byte %r at offset %d" % (b, i) + + elif choice == 2: + # Remove a random byte. + i = random.randrange(len(test_data)) + del fuzz_data[i] + + msg = "Deleting byte at offset %d" % (i,) + + elif choice == 3: + # Swap two bytes. + i = random.randrange(len(test_data) - 1) + fuzz_data[i], fuzz_data[i + 1] = fuzz_data[i + 1], fuzz_data[i] + + msg = "Swapping bytes %d and %d" % (i, i + 1) + + # Print message, so if this crashes, we can inspect the output. + print(" " + msg) + + # Create form parser. + self.make('boundary') + + # Feed with data, and ignore form parser exceptions. + i = 0 + try: + i = self.f.write(bytes(fuzz_data)) + self.f.finalize() + except FormParserError: + exceptions += 1 + else: + if i == len(fuzz_data): + successes += 1 + else: + failures += 1 + + print("--------------------------------------------------") + print("Successes: %d" % (successes,)) + print("Failures: %d" % (failures,)) + print("Exceptions: %d" % (exceptions,)) + + @slow_test + def test_request_body_fuzz_random_data(self): + """ + This test will fuzz the multipart parser with some number of iterations + of randomly-generated data. + """ + iterations = 1000 + successes = 0 + failures = 0 + exceptions = 0 + + print("Running %d iterations of fuzz testing:" % (iterations,)) + for i in range(iterations): + data_size = random.randrange(100, 4096) + data = os.urandom(data_size) + print(" Testing with %d random bytes..." % (data_size,)) + + # Create form parser. + self.make('boundary') + + # Feed with data, and ignore form parser exceptions. + i = 0 + try: + i = self.f.write(bytes(data)) + self.f.finalize() + except FormParserError: + exceptions += 1 + else: + if i == len(data): + successes += 1 + else: + failures += 1 + + print("--------------------------------------------------") + print("Successes: %d" % (successes,)) + print("Failures: %d" % (failures,)) + print("Exceptions: %d" % (exceptions,)) + + def test_bad_start_boundary(self): + self.make('boundary') + data = b'--boundary\rfoobar' + with self.assertRaises(MultipartParseError): + self.f.write(data) + + self.make('boundary') + data = b'--boundaryfoobar' + with self.assertRaises(MultipartParseError): + i = self.f.write(data) + + def test_octet_stream(self): + files = [] + def on_file(f): + files.append(f) + on_field = Mock() + on_end = Mock() + + f = FormParser('application/octet-stream', on_field, on_file, on_end=on_end, file_name=b'foo.txt') + self.assertTrue(isinstance(f.parser, OctetStreamParser)) + + f.write(b'test') + f.write(b'1234') + f.finalize() + + # Assert that we only received a single file, with the right data, and that we're done. + self.assertFalse(on_field.called) + self.assertEqual(len(files), 1) + self.assert_file_data(files[0], b'test1234') + self.assertTrue(on_end.called) + + def test_querystring(self): + fields = [] + def on_field(f): + fields.append(f) + on_file = Mock() + on_end = Mock() + + def simple_test(f): + # Reset tracking. + del fields[:] + on_file.reset_mock() + on_end.reset_mock() + + # Write test data. + f.write(b'foo=bar') + f.write(b'&test=asdf') + f.finalize() + + # Assert we only received 2 fields... + self.assertFalse(on_file.called) + self.assertEqual(len(fields), 2) + + # ...assert that we have the correct data... + self.assertEqual(fields[0].field_name, b'foo') + self.assertEqual(fields[0].value, b'bar') + + self.assertEqual(fields[1].field_name, b'test') + self.assertEqual(fields[1].value, b'asdf') + + # ... and assert that we've finished. + self.assertTrue(on_end.called) + + f = FormParser('application/x-www-form-urlencoded', on_field, on_file, on_end=on_end) + self.assertTrue(isinstance(f.parser, QuerystringParser)) + simple_test(f) + + f = FormParser('application/x-url-encoded', on_field, on_file, on_end=on_end) + self.assertTrue(isinstance(f.parser, QuerystringParser)) + simple_test(f) + + def test_close_methods(self): + parser = Mock() + f = FormParser('application/x-url-encoded', None, None) + f.parser = parser + + f.finalize() + parser.finalize.assert_called_once_with() + + f.close() + parser.close.assert_called_once_with() + + def test_bad_content_type(self): + # We should raise a ValueError for a bad Content-Type + with self.assertRaises(ValueError): + f = FormParser('application/bad', None, None) + + def test_no_boundary_given(self): + # We should raise a FormParserError when parsing a multipart message + # without a boundary. + with self.assertRaises(FormParserError): + f = FormParser('multipart/form-data', None, None) + + def test_bad_content_transfer_encoding(self): + data = b'----boundary\r\nContent-Disposition: form-data; name="file"; filename="test.txt"\r\nContent-Type: text/plain\r\nContent-Transfer-Encoding: badstuff\r\n\r\nTest\r\n----boundary--\r\n' + + files = [] + def on_file(f): + files.append(f) + on_field = Mock() + on_end = Mock() + + # Test with erroring. + config = {'UPLOAD_ERROR_ON_BAD_CTE': True} + f = FormParser('multipart/form-data', on_field, on_file, + on_end=on_end, boundary='--boundary', config=config) + + with self.assertRaises(FormParserError): + f.write(data) + f.finalize() + + # Test without erroring. + config = {'UPLOAD_ERROR_ON_BAD_CTE': False} + f = FormParser('multipart/form-data', on_field, on_file, + on_end=on_end, boundary='--boundary', config=config) + + f.write(data) + f.finalize() + self.assert_file_data(files[0], b'Test') + + def test_handles_None_fields(self): + fields = [] + def on_field(f): + fields.append(f) + on_file = Mock() + on_end = Mock() + + f = FormParser('application/x-www-form-urlencoded', on_field, on_file, on_end=on_end) + f.write(b'foo=bar&another&baz=asdf') + f.finalize() + + self.assertEqual(fields[0].field_name, b'foo') + self.assertEqual(fields[0].value, b'bar') + + self.assertEqual(fields[1].field_name, b'another') + self.assertEqual(fields[1].value, None) + + self.assertEqual(fields[2].field_name, b'baz') + self.assertEqual(fields[2].value, b'asdf') + + def test_max_size_multipart(self): + # Load test data. + test_file = 'single_field_single_file.http' + with open(os.path.join(http_tests_dir, test_file), 'rb') as f: + test_data = f.read() + + # Create form parser. + self.make('boundary') + + # Set the maximum length that we can process to be halfway through the + # given data. + self.f.parser.max_size = len(test_data) / 2 + + i = self.f.write(test_data) + self.f.finalize() + + # Assert we processed the correct amount. + self.assertEqual(i, len(test_data) / 2) + + def test_max_size_form_parser(self): + # Load test data. + test_file = 'single_field_single_file.http' + with open(os.path.join(http_tests_dir, test_file), 'rb') as f: + test_data = f.read() + + # Create form parser setting the maximum length that we can process to + # be halfway through the given data. + size = len(test_data) / 2 + self.make('boundary', config={'MAX_BODY_SIZE': size}) + + i = self.f.write(test_data) + self.f.finalize() + + # Assert we processed the correct amount. + self.assertEqual(i, len(test_data) / 2) + + def test_octet_stream_max_size(self): + files = [] + def on_file(f): + files.append(f) + on_field = Mock() + on_end = Mock() + + f = FormParser('application/octet-stream', on_field, on_file, + on_end=on_end, file_name=b'foo.txt', + config={'MAX_BODY_SIZE': 10}) + + f.write(b'0123456789012345689') + f.finalize() + + self.assert_file_data(files[0], b'0123456789') + + def test_invalid_max_size_multipart(self): + with self.assertRaises(ValueError): + q = MultipartParser(b'bound', max_size='foo') + + +class TestHelperFunctions(unittest.TestCase): + def test_create_form_parser(self): + r = create_form_parser({'Content-Type': 'application/octet-stream'}, + None, None) + self.assertTrue(isinstance(r, FormParser)) + + def test_create_form_parser_error(self): + headers = {} + with self.assertRaises(ValueError): + create_form_parser(headers, None, None) + + def test_parse_form(self): + on_field = Mock() + on_file = Mock() + + parse_form( + {'Content-Type': 'application/octet-stream', + }, + BytesIO(b'123456789012345'), + on_field, + on_file + ) + + assert on_file.call_count == 1 + + # Assert that the first argument of the call (a File object) has size + # 15 - i.e. all data is written. + self.assertEqual(on_file.call_args[0][0].size, 15) + + def test_parse_form_content_length(self): + files = [] + def on_file(file): + files.append(file) + + parse_form( + {'Content-Type': 'application/octet-stream', + 'Content-Length': '10' + }, + BytesIO(b'123456789012345'), + None, + on_file + ) + + self.assertEqual(len(files), 1) + self.assertEqual(files[0].size, 10) + + + +def suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestFile)) + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestParseOptionsHeader)) + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestBaseParser)) + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestQuerystringParser)) + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestOctetStreamParser)) + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestBase64Decoder)) + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestQuotedPrintableDecoder)) + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestFormParser)) + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestHelperFunctions)) + + return suite diff --git a/venv/lib/python3.12/site-packages/nacl/__init__.py b/venv/lib/python3.12/site-packages/nacl/__init__.py new file mode 100644 index 0000000..6ab2708 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/__init__.py @@ -0,0 +1,25 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +__all__ = [ + "__uri__", + "__version__", + "__email__", +] + +__uri__ = "https://github.com/pyca/pynacl/" + +# Must be kept in sync with `pyproject.toml` +__version__ = "1.6.1" diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..61d6eed Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/encoding.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/encoding.cpython-312.pyc new file mode 100644 index 0000000..5bbcd0c Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/encoding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..8df61a1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/hash.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/hash.cpython-312.pyc new file mode 100644 index 0000000..4df9632 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/hash.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/hashlib.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/hashlib.cpython-312.pyc new file mode 100644 index 0000000..20cb69a Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/hashlib.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/public.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/public.cpython-312.pyc new file mode 100644 index 0000000..75c6842 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/public.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/secret.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/secret.cpython-312.pyc new file mode 100644 index 0000000..f9992b2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/secret.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/signing.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/signing.cpython-312.pyc new file mode 100644 index 0000000..7ca69da Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/signing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..d9c00d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/_sodium.abi3.so b/venv/lib/python3.12/site-packages/nacl/_sodium.abi3.so new file mode 100644 index 0000000..01d04e8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/_sodium.abi3.so differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__init__.py b/venv/lib/python3.12/site-packages/nacl/bindings/__init__.py new file mode 100644 index 0000000..2e07ba1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/__init__.py @@ -0,0 +1,508 @@ +# Copyright 2013-2019 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl.bindings.crypto_aead import ( + crypto_aead_aegis128l_ABYTES, + crypto_aead_aegis128l_KEYBYTES, + crypto_aead_aegis128l_MESSAGEBYTES_MAX, + crypto_aead_aegis128l_NPUBBYTES, + crypto_aead_aegis128l_NSECBYTES, + crypto_aead_aegis128l_decrypt, + crypto_aead_aegis128l_encrypt, + crypto_aead_aegis256_ABYTES, + crypto_aead_aegis256_KEYBYTES, + crypto_aead_aegis256_MESSAGEBYTES_MAX, + crypto_aead_aegis256_NPUBBYTES, + crypto_aead_aegis256_NSECBYTES, + crypto_aead_aegis256_decrypt, + crypto_aead_aegis256_encrypt, + crypto_aead_aes256gcm_ABYTES, + crypto_aead_aes256gcm_KEYBYTES, + crypto_aead_aes256gcm_MESSAGEBYTES_MAX, + crypto_aead_aes256gcm_NPUBBYTES, + crypto_aead_aes256gcm_NSECBYTES, + crypto_aead_aes256gcm_decrypt, + crypto_aead_aes256gcm_encrypt, + crypto_aead_chacha20poly1305_ABYTES, + crypto_aead_chacha20poly1305_KEYBYTES, + crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX, + crypto_aead_chacha20poly1305_NPUBBYTES, + crypto_aead_chacha20poly1305_NSECBYTES, + crypto_aead_chacha20poly1305_decrypt, + crypto_aead_chacha20poly1305_encrypt, + crypto_aead_chacha20poly1305_ietf_ABYTES, + crypto_aead_chacha20poly1305_ietf_KEYBYTES, + crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX, + crypto_aead_chacha20poly1305_ietf_NPUBBYTES, + crypto_aead_chacha20poly1305_ietf_NSECBYTES, + crypto_aead_chacha20poly1305_ietf_decrypt, + crypto_aead_chacha20poly1305_ietf_encrypt, + crypto_aead_xchacha20poly1305_ietf_ABYTES, + crypto_aead_xchacha20poly1305_ietf_KEYBYTES, + crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX, + crypto_aead_xchacha20poly1305_ietf_NPUBBYTES, + crypto_aead_xchacha20poly1305_ietf_NSECBYTES, + crypto_aead_xchacha20poly1305_ietf_decrypt, + crypto_aead_xchacha20poly1305_ietf_encrypt, +) +from nacl.bindings.crypto_box import ( + crypto_box, + crypto_box_BEFORENMBYTES, + crypto_box_BOXZEROBYTES, + crypto_box_NONCEBYTES, + crypto_box_PUBLICKEYBYTES, + crypto_box_SEALBYTES, + crypto_box_SECRETKEYBYTES, + crypto_box_SEEDBYTES, + crypto_box_ZEROBYTES, + crypto_box_afternm, + crypto_box_beforenm, + crypto_box_easy, + crypto_box_easy_afternm, + crypto_box_keypair, + crypto_box_open, + crypto_box_open_afternm, + crypto_box_open_easy, + crypto_box_open_easy_afternm, + crypto_box_seal, + crypto_box_seal_open, + crypto_box_seed_keypair, +) +from nacl.bindings.crypto_core import ( + crypto_core_ed25519_BYTES, + crypto_core_ed25519_NONREDUCEDSCALARBYTES, + crypto_core_ed25519_SCALARBYTES, + crypto_core_ed25519_add, + crypto_core_ed25519_from_uniform, + crypto_core_ed25519_is_valid_point, + crypto_core_ed25519_scalar_add, + crypto_core_ed25519_scalar_complement, + crypto_core_ed25519_scalar_invert, + crypto_core_ed25519_scalar_mul, + crypto_core_ed25519_scalar_negate, + crypto_core_ed25519_scalar_reduce, + crypto_core_ed25519_scalar_sub, + crypto_core_ed25519_sub, + has_crypto_core_ed25519, +) +from nacl.bindings.crypto_generichash import ( + crypto_generichash_BYTES, + crypto_generichash_BYTES_MAX, + crypto_generichash_BYTES_MIN, + crypto_generichash_KEYBYTES, + crypto_generichash_KEYBYTES_MAX, + crypto_generichash_KEYBYTES_MIN, + crypto_generichash_PERSONALBYTES, + crypto_generichash_SALTBYTES, + crypto_generichash_STATEBYTES, + generichash_blake2b_final as crypto_generichash_blake2b_final, + generichash_blake2b_init as crypto_generichash_blake2b_init, + generichash_blake2b_salt_personal as crypto_generichash_blake2b_salt_personal, + generichash_blake2b_update as crypto_generichash_blake2b_update, +) +from nacl.bindings.crypto_hash import ( + crypto_hash, + crypto_hash_BYTES, + crypto_hash_sha256, + crypto_hash_sha256_BYTES, + crypto_hash_sha512, + crypto_hash_sha512_BYTES, +) +from nacl.bindings.crypto_kx import ( + crypto_kx_PUBLIC_KEY_BYTES, + crypto_kx_SECRET_KEY_BYTES, + crypto_kx_SEED_BYTES, + crypto_kx_SESSION_KEY_BYTES, + crypto_kx_client_session_keys, + crypto_kx_keypair, + crypto_kx_seed_keypair, + crypto_kx_server_session_keys, +) +from nacl.bindings.crypto_pwhash import ( + crypto_pwhash_ALG_ARGON2I13, + crypto_pwhash_ALG_ARGON2ID13, + crypto_pwhash_ALG_DEFAULT, + crypto_pwhash_BYTES_MAX, + crypto_pwhash_BYTES_MIN, + crypto_pwhash_PASSWD_MAX, + crypto_pwhash_PASSWD_MIN, + crypto_pwhash_SALTBYTES, + crypto_pwhash_STRBYTES, + crypto_pwhash_alg, + crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE, + crypto_pwhash_argon2i_MEMLIMIT_MAX, + crypto_pwhash_argon2i_MEMLIMIT_MIN, + crypto_pwhash_argon2i_MEMLIMIT_MODERATE, + crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE, + crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE, + crypto_pwhash_argon2i_OPSLIMIT_MAX, + crypto_pwhash_argon2i_OPSLIMIT_MIN, + crypto_pwhash_argon2i_OPSLIMIT_MODERATE, + crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE, + crypto_pwhash_argon2i_STRPREFIX, + crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE, + crypto_pwhash_argon2id_MEMLIMIT_MAX, + crypto_pwhash_argon2id_MEMLIMIT_MIN, + crypto_pwhash_argon2id_MEMLIMIT_MODERATE, + crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE, + crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE, + crypto_pwhash_argon2id_OPSLIMIT_MAX, + crypto_pwhash_argon2id_OPSLIMIT_MIN, + crypto_pwhash_argon2id_OPSLIMIT_MODERATE, + crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE, + crypto_pwhash_argon2id_STRPREFIX, + crypto_pwhash_scryptsalsa208sha256_BYTES_MAX, + crypto_pwhash_scryptsalsa208sha256_BYTES_MIN, + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE, + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX, + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN, + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE, + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE, + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX, + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN, + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE, + crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX, + crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN, + crypto_pwhash_scryptsalsa208sha256_SALTBYTES, + crypto_pwhash_scryptsalsa208sha256_STRBYTES, + crypto_pwhash_scryptsalsa208sha256_STRPREFIX, + crypto_pwhash_scryptsalsa208sha256_ll, + crypto_pwhash_scryptsalsa208sha256_str, + crypto_pwhash_scryptsalsa208sha256_str_verify, + crypto_pwhash_str_alg, + crypto_pwhash_str_verify, + has_crypto_pwhash_scryptsalsa208sha256, + nacl_bindings_pick_scrypt_params, +) +from nacl.bindings.crypto_scalarmult import ( + crypto_scalarmult, + crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES, + crypto_scalarmult_base, + crypto_scalarmult_ed25519, + crypto_scalarmult_ed25519_BYTES, + crypto_scalarmult_ed25519_SCALARBYTES, + crypto_scalarmult_ed25519_base, + crypto_scalarmult_ed25519_base_noclamp, + crypto_scalarmult_ed25519_noclamp, + has_crypto_scalarmult_ed25519, +) +from nacl.bindings.crypto_secretbox import ( + crypto_secretbox, + crypto_secretbox_BOXZEROBYTES, + crypto_secretbox_KEYBYTES, + crypto_secretbox_MACBYTES, + crypto_secretbox_MESSAGEBYTES_MAX, + crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES, + crypto_secretbox_easy, + crypto_secretbox_open, + crypto_secretbox_open_easy, +) +from nacl.bindings.crypto_secretstream import ( + crypto_secretstream_xchacha20poly1305_ABYTES, + crypto_secretstream_xchacha20poly1305_HEADERBYTES, + crypto_secretstream_xchacha20poly1305_KEYBYTES, + crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX, + crypto_secretstream_xchacha20poly1305_STATEBYTES, + crypto_secretstream_xchacha20poly1305_TAG_FINAL, + crypto_secretstream_xchacha20poly1305_TAG_MESSAGE, + crypto_secretstream_xchacha20poly1305_TAG_PUSH, + crypto_secretstream_xchacha20poly1305_TAG_REKEY, + crypto_secretstream_xchacha20poly1305_init_pull, + crypto_secretstream_xchacha20poly1305_init_push, + crypto_secretstream_xchacha20poly1305_keygen, + crypto_secretstream_xchacha20poly1305_pull, + crypto_secretstream_xchacha20poly1305_push, + crypto_secretstream_xchacha20poly1305_rekey, + crypto_secretstream_xchacha20poly1305_state, +) +from nacl.bindings.crypto_shorthash import ( + BYTES as crypto_shorthash_siphash24_BYTES, + KEYBYTES as crypto_shorthash_siphash24_KEYBYTES, + XBYTES as crypto_shorthash_siphashx24_BYTES, + XKEYBYTES as crypto_shorthash_siphashx24_KEYBYTES, + crypto_shorthash_siphash24, + crypto_shorthash_siphashx24, + has_crypto_shorthash_siphashx24, +) +from nacl.bindings.crypto_sign import ( + crypto_sign, + crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES, + crypto_sign_ed25519_pk_to_curve25519, + crypto_sign_ed25519_sk_to_curve25519, + crypto_sign_ed25519_sk_to_pk, + crypto_sign_ed25519_sk_to_seed, + crypto_sign_ed25519ph_STATEBYTES, + crypto_sign_ed25519ph_final_create, + crypto_sign_ed25519ph_final_verify, + crypto_sign_ed25519ph_state, + crypto_sign_ed25519ph_update, + crypto_sign_keypair, + crypto_sign_open, + crypto_sign_seed_keypair, +) +from nacl.bindings.randombytes import ( + randombytes, + randombytes_buf_deterministic, +) +from nacl.bindings.sodium_core import sodium_init +from nacl.bindings.utils import ( + sodium_add, + sodium_increment, + sodium_memcmp, + sodium_pad, + sodium_unpad, +) + + +__all__ = [ + "crypto_aead_aegis128l_ABYTES", + "crypto_aead_aegis128l_KEYBYTES", + "crypto_aead_aegis128l_MESSAGEBYTES_MAX", + "crypto_aead_aegis128l_NPUBBYTES", + "crypto_aead_aegis128l_NSECBYTES", + "crypto_aead_aegis128l_decrypt", + "crypto_aead_aegis128l_encrypt", + "crypto_aead_aegis256_ABYTES", + "crypto_aead_aegis256_KEYBYTES", + "crypto_aead_aegis256_MESSAGEBYTES_MAX", + "crypto_aead_aegis256_NPUBBYTES", + "crypto_aead_aegis256_NSECBYTES", + "crypto_aead_aegis256_decrypt", + "crypto_aead_aegis256_encrypt", + "crypto_aead_aes256gcm_ABYTES", + "crypto_aead_aes256gcm_KEYBYTES", + "crypto_aead_aes256gcm_MESSAGEBYTES_MAX", + "crypto_aead_aes256gcm_NPUBBYTES", + "crypto_aead_aes256gcm_NSECBYTES", + "crypto_aead_aes256gcm_decrypt", + "crypto_aead_aes256gcm_encrypt", + "crypto_aead_chacha20poly1305_ABYTES", + "crypto_aead_chacha20poly1305_KEYBYTES", + "crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX", + "crypto_aead_chacha20poly1305_NPUBBYTES", + "crypto_aead_chacha20poly1305_NSECBYTES", + "crypto_aead_chacha20poly1305_decrypt", + "crypto_aead_chacha20poly1305_encrypt", + "crypto_aead_chacha20poly1305_ietf_ABYTES", + "crypto_aead_chacha20poly1305_ietf_KEYBYTES", + "crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX", + "crypto_aead_chacha20poly1305_ietf_NPUBBYTES", + "crypto_aead_chacha20poly1305_ietf_NSECBYTES", + "crypto_aead_chacha20poly1305_ietf_decrypt", + "crypto_aead_chacha20poly1305_ietf_encrypt", + "crypto_aead_xchacha20poly1305_ietf_ABYTES", + "crypto_aead_xchacha20poly1305_ietf_KEYBYTES", + "crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX", + "crypto_aead_xchacha20poly1305_ietf_NPUBBYTES", + "crypto_aead_xchacha20poly1305_ietf_NSECBYTES", + "crypto_aead_xchacha20poly1305_ietf_decrypt", + "crypto_aead_xchacha20poly1305_ietf_encrypt", + "crypto_box_SECRETKEYBYTES", + "crypto_box_PUBLICKEYBYTES", + "crypto_box_SEEDBYTES", + "crypto_box_NONCEBYTES", + "crypto_box_ZEROBYTES", + "crypto_box_BOXZEROBYTES", + "crypto_box_BEFORENMBYTES", + "crypto_box_SEALBYTES", + "crypto_box_keypair", + "crypto_box", + "crypto_box_open", + "crypto_box_beforenm", + "crypto_box_afternm", + "crypto_box_open_afternm", + "crypto_box_easy", + "crypto_box_easy_afternm", + "crypto_box_open_easy", + "crypto_box_open_easy_afternm", + "crypto_box_seal", + "crypto_box_seal_open", + "crypto_box_seed_keypair", + "has_crypto_core_ed25519", + "crypto_core_ed25519_BYTES", + "crypto_core_ed25519_UNIFORMBYTES", + "crypto_core_ed25519_SCALARBYTES", + "crypto_core_ed25519_NONREDUCEDSCALARBYTES", + "crypto_core_ed25519_add", + "crypto_core_ed25519_from_uniform", + "crypto_core_ed25519_is_valid_point", + "crypto_core_ed25519_sub", + "crypto_core_ed25519_scalar_invert", + "crypto_core_ed25519_scalar_negate", + "crypto_core_ed25519_scalar_complement", + "crypto_core_ed25519_scalar_add", + "crypto_core_ed25519_scalar_sub", + "crypto_core_ed25519_scalar_mul", + "crypto_core_ed25519_scalar_reduce", + "crypto_hash_BYTES", + "crypto_hash_sha256_BYTES", + "crypto_hash_sha512_BYTES", + "crypto_hash", + "crypto_hash_sha256", + "crypto_hash_sha512", + "crypto_generichash_BYTES", + "crypto_generichash_BYTES_MIN", + "crypto_generichash_BYTES_MAX", + "crypto_generichash_KEYBYTES", + "crypto_generichash_KEYBYTES_MIN", + "crypto_generichash_KEYBYTES_MAX", + "crypto_generichash_SALTBYTES", + "crypto_generichash_PERSONALBYTES", + "crypto_generichash_STATEBYTES", + "crypto_generichash_blake2b_salt_personal", + "crypto_generichash_blake2b_init", + "crypto_generichash_blake2b_update", + "crypto_generichash_blake2b_final", + "crypto_kx_keypair", + "crypto_kx_seed_keypair", + "crypto_kx_client_session_keys", + "crypto_kx_server_session_keys", + "crypto_kx_PUBLIC_KEY_BYTES", + "crypto_kx_SECRET_KEY_BYTES", + "crypto_kx_SEED_BYTES", + "crypto_kx_SESSION_KEY_BYTES", + "has_crypto_scalarmult_ed25519", + "crypto_scalarmult_BYTES", + "crypto_scalarmult_SCALARBYTES", + "crypto_scalarmult", + "crypto_scalarmult_base", + "crypto_scalarmult_ed25519_BYTES", + "crypto_scalarmult_ed25519_SCALARBYTES", + "crypto_scalarmult_ed25519", + "crypto_scalarmult_ed25519_base", + "crypto_scalarmult_ed25519_noclamp", + "crypto_scalarmult_ed25519_base_noclamp", + "crypto_secretbox_KEYBYTES", + "crypto_secretbox_NONCEBYTES", + "crypto_secretbox_ZEROBYTES", + "crypto_secretbox_BOXZEROBYTES", + "crypto_secretbox_MACBYTES", + "crypto_secretbox_MESSAGEBYTES_MAX", + "crypto_secretbox", + "crypto_secretbox_easy", + "crypto_secretbox_open", + "crypto_secretbox_open_easy", + "crypto_secretstream_xchacha20poly1305_ABYTES", + "crypto_secretstream_xchacha20poly1305_HEADERBYTES", + "crypto_secretstream_xchacha20poly1305_KEYBYTES", + "crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX", + "crypto_secretstream_xchacha20poly1305_STATEBYTES", + "crypto_secretstream_xchacha20poly1305_TAG_FINAL", + "crypto_secretstream_xchacha20poly1305_TAG_MESSAGE", + "crypto_secretstream_xchacha20poly1305_TAG_PUSH", + "crypto_secretstream_xchacha20poly1305_TAG_REKEY", + "crypto_secretstream_xchacha20poly1305_init_pull", + "crypto_secretstream_xchacha20poly1305_init_push", + "crypto_secretstream_xchacha20poly1305_keygen", + "crypto_secretstream_xchacha20poly1305_pull", + "crypto_secretstream_xchacha20poly1305_push", + "crypto_secretstream_xchacha20poly1305_rekey", + "crypto_secretstream_xchacha20poly1305_state", + "has_crypto_shorthash_siphashx24", + "crypto_shorthash_siphash24_BYTES", + "crypto_shorthash_siphash24_KEYBYTES", + "crypto_shorthash_siphash24", + "crypto_shorthash_siphashx24_BYTES", + "crypto_shorthash_siphashx24_KEYBYTES", + "crypto_shorthash_siphashx24", + "crypto_sign_BYTES", + "crypto_sign_SEEDBYTES", + "crypto_sign_PUBLICKEYBYTES", + "crypto_sign_SECRETKEYBYTES", + "crypto_sign_keypair", + "crypto_sign_seed_keypair", + "crypto_sign", + "crypto_sign_open", + "crypto_sign_ed25519_pk_to_curve25519", + "crypto_sign_ed25519_sk_to_curve25519", + "crypto_sign_ed25519_sk_to_pk", + "crypto_sign_ed25519_sk_to_seed", + "crypto_sign_ed25519ph_STATEBYTES", + "crypto_sign_ed25519ph_final_create", + "crypto_sign_ed25519ph_final_verify", + "crypto_sign_ed25519ph_state", + "crypto_sign_ed25519ph_update", + "crypto_pwhash_ALG_ARGON2I13", + "crypto_pwhash_ALG_ARGON2ID13", + "crypto_pwhash_ALG_DEFAULT", + "crypto_pwhash_BYTES_MAX", + "crypto_pwhash_BYTES_MIN", + "crypto_pwhash_PASSWD_MAX", + "crypto_pwhash_PASSWD_MIN", + "crypto_pwhash_SALTBYTES", + "crypto_pwhash_STRBYTES", + "crypto_pwhash_alg", + "crypto_pwhash_argon2i_MEMLIMIT_MIN", + "crypto_pwhash_argon2i_MEMLIMIT_MAX", + "crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE", + "crypto_pwhash_argon2i_MEMLIMIT_MODERATE", + "crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE", + "crypto_pwhash_argon2i_OPSLIMIT_MIN", + "crypto_pwhash_argon2i_OPSLIMIT_MAX", + "crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE", + "crypto_pwhash_argon2i_OPSLIMIT_MODERATE", + "crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE", + "crypto_pwhash_argon2i_STRPREFIX", + "crypto_pwhash_argon2id_MEMLIMIT_MIN", + "crypto_pwhash_argon2id_MEMLIMIT_MAX", + "crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE", + "crypto_pwhash_argon2id_MEMLIMIT_MODERATE", + "crypto_pwhash_argon2id_OPSLIMIT_MIN", + "crypto_pwhash_argon2id_OPSLIMIT_MAX", + "crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE", + "crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE", + "crypto_pwhash_argon2id_OPSLIMIT_MODERATE", + "crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE", + "crypto_pwhash_argon2id_STRPREFIX", + "crypto_pwhash_str_alg", + "crypto_pwhash_str_verify", + "has_crypto_pwhash_scryptsalsa208sha256", + "crypto_pwhash_scryptsalsa208sha256_BYTES_MAX", + "crypto_pwhash_scryptsalsa208sha256_BYTES_MIN", + "crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE", + "crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX", + "crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN", + "crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE", + "crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE", + "crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX", + "crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN", + "crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE", + "crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX", + "crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN", + "crypto_pwhash_scryptsalsa208sha256_SALTBYTES", + "crypto_pwhash_scryptsalsa208sha256_STRBYTES", + "crypto_pwhash_scryptsalsa208sha256_STRPREFIX", + "crypto_pwhash_scryptsalsa208sha256_ll", + "crypto_pwhash_scryptsalsa208sha256_str", + "crypto_pwhash_scryptsalsa208sha256_str_verify", + "nacl_bindings_pick_scrypt_params", + "randombytes", + "randombytes_buf_deterministic", + "sodium_init", + "sodium_add", + "sodium_increment", + "sodium_memcmp", + "sodium_pad", + "sodium_unpad", +] + + +# Initialize Sodium +sodium_init() diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..44f6273 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_aead.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_aead.cpython-312.pyc new file mode 100644 index 0000000..8abc8f6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_aead.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_box.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_box.cpython-312.pyc new file mode 100644 index 0000000..bd69b67 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_box.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_core.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_core.cpython-312.pyc new file mode 100644 index 0000000..88f45f1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_generichash.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_generichash.cpython-312.pyc new file mode 100644 index 0000000..3d412ec Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_generichash.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_hash.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_hash.cpython-312.pyc new file mode 100644 index 0000000..4adebec Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_hash.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_kx.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_kx.cpython-312.pyc new file mode 100644 index 0000000..ef7f68e Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_kx.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_pwhash.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_pwhash.cpython-312.pyc new file mode 100644 index 0000000..1481b06 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_pwhash.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_scalarmult.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_scalarmult.cpython-312.pyc new file mode 100644 index 0000000..6d43774 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_scalarmult.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_secretbox.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_secretbox.cpython-312.pyc new file mode 100644 index 0000000..3fa06d1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_secretbox.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_secretstream.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_secretstream.cpython-312.pyc new file mode 100644 index 0000000..5cac2ea Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_secretstream.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_shorthash.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_shorthash.cpython-312.pyc new file mode 100644 index 0000000..d6ae940 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_shorthash.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_sign.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_sign.cpython-312.pyc new file mode 100644 index 0000000..615ec72 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/crypto_sign.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/randombytes.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/randombytes.cpython-312.pyc new file mode 100644 index 0000000..9dc8f50 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/randombytes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/sodium_core.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/sodium_core.cpython-312.pyc new file mode 100644 index 0000000..2e07ac8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/sodium_core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..20d3119 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/bindings/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_aead.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_aead.py new file mode 100644 index 0000000..2f7da78 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_aead.py @@ -0,0 +1,1069 @@ +# Copyright 2017 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + +""" +Implementations of authenticated encription with associated data (*AEAD*) +constructions building on the chacha20 stream cipher and the poly1305 +authenticator +""" + +crypto_aead_chacha20poly1305_ietf_KEYBYTES: int = ( + lib.crypto_aead_chacha20poly1305_ietf_keybytes() +) +crypto_aead_chacha20poly1305_ietf_NSECBYTES: int = ( + lib.crypto_aead_chacha20poly1305_ietf_nsecbytes() +) +crypto_aead_chacha20poly1305_ietf_NPUBBYTES: int = ( + lib.crypto_aead_chacha20poly1305_ietf_npubbytes() +) +crypto_aead_chacha20poly1305_ietf_ABYTES: int = ( + lib.crypto_aead_chacha20poly1305_ietf_abytes() +) +crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_chacha20poly1305_ietf_messagebytes_max() +) +_aead_chacha20poly1305_ietf_CRYPTBYTES_MAX = ( + crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX + + crypto_aead_chacha20poly1305_ietf_ABYTES +) + +crypto_aead_chacha20poly1305_KEYBYTES: int = ( + lib.crypto_aead_chacha20poly1305_keybytes() +) +crypto_aead_chacha20poly1305_NSECBYTES: int = ( + lib.crypto_aead_chacha20poly1305_nsecbytes() +) +crypto_aead_chacha20poly1305_NPUBBYTES: int = ( + lib.crypto_aead_chacha20poly1305_npubbytes() +) +crypto_aead_chacha20poly1305_ABYTES: int = ( + lib.crypto_aead_chacha20poly1305_abytes() +) +crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_chacha20poly1305_messagebytes_max() +) +_aead_chacha20poly1305_CRYPTBYTES_MAX = ( + crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX + + crypto_aead_chacha20poly1305_ABYTES +) + +crypto_aead_xchacha20poly1305_ietf_KEYBYTES: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_keybytes() +) +crypto_aead_xchacha20poly1305_ietf_NSECBYTES: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_nsecbytes() +) +crypto_aead_xchacha20poly1305_ietf_NPUBBYTES: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_npubbytes() +) +crypto_aead_xchacha20poly1305_ietf_ABYTES: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_abytes() +) +crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_xchacha20poly1305_ietf_messagebytes_max() +) +_aead_xchacha20poly1305_ietf_CRYPTBYTES_MAX = ( + crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX + + crypto_aead_xchacha20poly1305_ietf_ABYTES +) + +crypto_aead_aegis256_KEYBYTES: int = lib.crypto_aead_aegis256_keybytes() +crypto_aead_aegis256_NSECBYTES: int = lib.crypto_aead_aegis256_nsecbytes() +crypto_aead_aegis256_NPUBBYTES: int = lib.crypto_aead_aegis256_npubbytes() +crypto_aead_aegis256_ABYTES: int = lib.crypto_aead_aegis256_abytes() +crypto_aead_aegis256_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_aegis256_messagebytes_max() +) +_aead_aegis256_CRYPTBYTES_MAX = ( + crypto_aead_aegis256_MESSAGEBYTES_MAX + crypto_aead_aegis256_ABYTES +) + +crypto_aead_aegis128l_KEYBYTES: int = lib.crypto_aead_aegis128l_keybytes() +crypto_aead_aegis128l_NSECBYTES: int = lib.crypto_aead_aegis128l_nsecbytes() +crypto_aead_aegis128l_NPUBBYTES: int = lib.crypto_aead_aegis128l_npubbytes() +crypto_aead_aegis128l_ABYTES: int = lib.crypto_aead_aegis128l_abytes() +crypto_aead_aegis128l_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_aegis128l_messagebytes_max() +) +_aead_aegis256_CRYPTBYTES_MAX = ( + crypto_aead_aegis128l_MESSAGEBYTES_MAX + crypto_aead_aegis128l_ABYTES +) + +crypto_aead_aes256gcm_KEYBYTES: int = lib.crypto_aead_aes256gcm_keybytes() +crypto_aead_aes256gcm_NSECBYTES: int = lib.crypto_aead_aes256gcm_nsecbytes() +crypto_aead_aes256gcm_NPUBBYTES: int = lib.crypto_aead_aes256gcm_npubbytes() +crypto_aead_aes256gcm_ABYTES: int = lib.crypto_aead_aes256gcm_abytes() +crypto_aead_aes256gcm_MESSAGEBYTES_MAX: int = ( + lib.crypto_aead_aes256gcm_messagebytes_max() +) +_aead_aegis256_CRYPTBYTES_MAX = ( + crypto_aead_aes256gcm_MESSAGEBYTES_MAX + crypto_aead_aes256gcm_ABYTES +) + + +def crypto_aead_chacha20poly1305_ietf_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the IETF ratified chacha20poly1305 + construction described in RFC7539. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_chacha20poly1305_ietf_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_chacha20poly1305_ietf_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_ietf_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_chacha20poly1305_ietf_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_ietf_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_chacha20poly1305_ietf_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_chacha20poly1305_ietf_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_chacha20poly1305_ietf_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the IETF ratified chacha20poly1305 + construction described in RFC7539. + + :param ciphertext: + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_chacha20poly1305_ietf_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_chacha20poly1305_ietf_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_chacha20poly1305_ietf_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_ietf_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_chacha20poly1305_ietf_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_ietf_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_chacha20poly1305_ietf_ABYTES + + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_chacha20poly1305_ietf_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_chacha20poly1305_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the "legacy" construction + described in draft-agl-tls-chacha20poly1305. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_chacha20poly1305_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_chacha20poly1305_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_chacha20poly1305_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_chacha20poly1305_ietf_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_chacha20poly1305_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_chacha20poly1305_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the "legacy" construction + described in draft-agl-tls-chacha20poly1305. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_chacha20poly1305_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_chacha20poly1305_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_chacha20poly1305_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_chacha20poly1305_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_chacha20poly1305_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_chacha20poly1305_ABYTES + + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_chacha20poly1305_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_xchacha20poly1305_ietf_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the long-nonces xchacha20poly1305 + construction. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_xchacha20poly1305_ietf_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_xchacha20poly1305_ietf_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_xchacha20poly1305_ietf_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_xchacha20poly1305_ietf_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_xchacha20poly1305_ietf_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_xchacha20poly1305_ietf_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_xchacha20poly1305_ietf_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the long-nonces xchacha20poly1305 + construction. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_xchacha20poly1305_ietf_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_xchacha20poly1305_ietf_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_xchacha20poly1305_ietf_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_xchacha20poly1305_ietf_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) + and len(key) == crypto_aead_xchacha20poly1305_ietf_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_xchacha20poly1305_ietf_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_xchacha20poly1305_ietf_ABYTES + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_xchacha20poly1305_ietf_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_aegis256_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the AEGIS-256 + construction. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_aegis256_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_aegis256_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aegis256_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aegis256_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aegis256_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aegis256_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_aegis256_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_aegis256_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_aegis256_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the AEGIS-256 + construction. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_aegis256_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_aegis256_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aegis256_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aegis256_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aegis256_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aegis256_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_aegis256_ABYTES + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_aegis256_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_aegis128l_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the AEGIS-128L + construction. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_aegis128l_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_aegis128l_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aegis128l_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aegis128l_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aegis128l_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aegis128l_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_aegis128l_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_aegis128l_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_aegis128l_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the AEGIS-128L + construction. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_aegis256_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_aegis256_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aegis128l_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aegis128l_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aegis128l_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aegis128l_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_aegis128l_ABYTES + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_aegis128l_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] + + +def crypto_aead_aes256gcm_encrypt( + message: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Encrypt the given ``message`` using the AES-256-GCM + construction. Requires the Intel AES-NI extensions, + or the ARM Crypto extensions. + + :param message: + :type message: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: authenticated ciphertext + :rtype: bytes + """ + ensure( + lib.crypto_aead_aes256gcm_is_available() == 1, + "Construction requires hardware acceleration", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(message, bytes), + "Input message type must be bytes", + raising=exc.TypeError, + ) + + mlen = len(message) + + ensure( + mlen <= crypto_aead_aes256gcm_MESSAGEBYTES_MAX, + "Message must be at most {} bytes long".format( + crypto_aead_aes256gcm_MESSAGEBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aes256gcm_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aes256gcm_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aes256gcm_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aes256gcm_KEYBYTES + ), + raising=exc.TypeError, + ) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + mxout = mlen + crypto_aead_aes256gcm_ABYTES + + clen = ffi.new("unsigned long long *") + + ciphertext = ffi.new("unsigned char[]", mxout) + + res = lib.crypto_aead_aes256gcm_encrypt( + ciphertext, clen, message, mlen, _aad, aalen, ffi.NULL, nonce, key + ) + + ensure(res == 0, "Encryption failed.", raising=exc.CryptoError) + return ffi.buffer(ciphertext, clen[0])[:] + + +def crypto_aead_aes256gcm_decrypt( + ciphertext: bytes, aad: Optional[bytes], nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt the given ``ciphertext`` using the AES-256-GCM + construction. Requires the Intel AES-NI extensions, + or the ARM Crypto extensions. + + :param ciphertext: authenticated ciphertext + :type ciphertext: bytes + :param aad: + :type aad: Optional[bytes] + :param nonce: + :type nonce: bytes + :param key: + :type key: bytes + :return: message + :rtype: bytes + """ + ensure( + lib.crypto_aead_aes256gcm_is_available() == 1, + "Construction requires hardware acceleration", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(ciphertext, bytes), + "Input ciphertext type must be bytes", + raising=exc.TypeError, + ) + + clen = len(ciphertext) + + ensure( + clen <= _aead_aegis256_CRYPTBYTES_MAX, + "Ciphertext must be at most {} bytes long".format( + _aead_aegis256_CRYPTBYTES_MAX + ), + raising=exc.ValueError, + ) + + ensure( + isinstance(aad, bytes) or (aad is None), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + ensure( + isinstance(nonce, bytes) + and len(nonce) == crypto_aead_aes256gcm_NPUBBYTES, + "Nonce must be a {} bytes long bytes sequence".format( + crypto_aead_aes256gcm_NPUBBYTES + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(key, bytes) and len(key) == crypto_aead_aes256gcm_KEYBYTES, + "Key must be a {} bytes long bytes sequence".format( + crypto_aead_aes256gcm_KEYBYTES + ), + raising=exc.TypeError, + ) + + mxout = clen - crypto_aead_aes256gcm_ABYTES + mlen = ffi.new("unsigned long long *") + message = ffi.new("unsigned char[]", mxout) + + if aad: + _aad = aad + aalen = len(aad) + else: + _aad = ffi.NULL + aalen = 0 + + res = lib.crypto_aead_aes256gcm_decrypt( + message, mlen, ffi.NULL, ciphertext, clen, _aad, aalen, nonce, key + ) + + ensure(res == 0, "Decryption failed.", raising=exc.CryptoError) + + return ffi.buffer(message, mlen[0])[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_box.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_box.py new file mode 100644 index 0000000..da6e4cb --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_box.py @@ -0,0 +1,475 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Tuple + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +__all__ = ["crypto_box_keypair", "crypto_box"] + + +crypto_box_SECRETKEYBYTES: int = lib.crypto_box_secretkeybytes() +crypto_box_PUBLICKEYBYTES: int = lib.crypto_box_publickeybytes() +crypto_box_SEEDBYTES: int = lib.crypto_box_seedbytes() +crypto_box_NONCEBYTES: int = lib.crypto_box_noncebytes() +crypto_box_ZEROBYTES: int = lib.crypto_box_zerobytes() +crypto_box_BOXZEROBYTES: int = lib.crypto_box_boxzerobytes() +crypto_box_BEFORENMBYTES: int = lib.crypto_box_beforenmbytes() +crypto_box_SEALBYTES: int = lib.crypto_box_sealbytes() +crypto_box_MACBYTES: int = lib.crypto_box_macbytes() + + +def crypto_box_keypair() -> Tuple[bytes, bytes]: + """ + Returns a randomly generated public and secret key. + + :rtype: (bytes(public_key), bytes(secret_key)) + """ + pk = ffi.new("unsigned char[]", crypto_box_PUBLICKEYBYTES) + sk = ffi.new("unsigned char[]", crypto_box_SECRETKEYBYTES) + + rc = lib.crypto_box_keypair(pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ( + ffi.buffer(pk, crypto_box_PUBLICKEYBYTES)[:], + ffi.buffer(sk, crypto_box_SECRETKEYBYTES)[:], + ) + + +def crypto_box_seed_keypair(seed: bytes) -> Tuple[bytes, bytes]: + """ + Returns a (public, secret) key pair deterministically generated + from an input ``seed``. + + .. warning:: The seed **must** be high-entropy; therefore, + its generator **must** be a cryptographic quality + random function like, for example, :func:`~nacl.utils.random`. + + .. warning:: The seed **must** be protected and remain secret. + Anyone who knows the seed is really in possession of + the corresponding PrivateKey. + + + :param seed: bytes + :rtype: (bytes(public_key), bytes(secret_key)) + """ + ensure(isinstance(seed, bytes), "seed must be bytes", raising=TypeError) + + if len(seed) != crypto_box_SEEDBYTES: + raise exc.ValueError("Invalid seed") + + pk = ffi.new("unsigned char[]", crypto_box_PUBLICKEYBYTES) + sk = ffi.new("unsigned char[]", crypto_box_SECRETKEYBYTES) + + rc = lib.crypto_box_seed_keypair(pk, sk, seed) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ( + ffi.buffer(pk, crypto_box_PUBLICKEYBYTES)[:], + ffi.buffer(sk, crypto_box_SECRETKEYBYTES)[:], + ) + + +def crypto_box(message: bytes, nonce: bytes, pk: bytes, sk: bytes) -> bytes: + """ + Encrypts and returns a message ``message`` using the secret key ``sk``, + public key ``pk``, and the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce size") + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + padded = (b"\x00" * crypto_box_ZEROBYTES) + message + ciphertext = ffi.new("unsigned char[]", len(padded)) + + rc = lib.crypto_box(ciphertext, padded, len(padded), nonce, pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, len(padded))[crypto_box_BOXZEROBYTES:] + + +def crypto_box_open( + ciphertext: bytes, nonce: bytes, pk: bytes, sk: bytes +) -> bytes: + """ + Decrypts and returns an encrypted message ``ciphertext``, using the secret + key ``sk``, public key ``pk``, and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce size") + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + padded = (b"\x00" * crypto_box_BOXZEROBYTES) + ciphertext + plaintext = ffi.new("unsigned char[]", len(padded)) + + res = lib.crypto_box_open(plaintext, padded, len(padded), nonce, pk, sk) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, len(padded))[crypto_box_ZEROBYTES:] + + +def crypto_box_beforenm(pk: bytes, sk: bytes) -> bytes: + """ + Computes and returns the shared key for the public key ``pk`` and the + secret key ``sk``. This can be used to speed up operations where the same + set of keys is going to be used multiple times. + + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + k = ffi.new("unsigned char[]", crypto_box_BEFORENMBYTES) + + rc = lib.crypto_box_beforenm(k, pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(k, crypto_box_BEFORENMBYTES)[:] + + +def crypto_box_afternm(message: bytes, nonce: bytes, k: bytes) -> bytes: + """ + Encrypts and returns the message ``message`` using the shared key ``k`` and + the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param k: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + if len(k) != crypto_box_BEFORENMBYTES: + raise exc.ValueError("Invalid shared key") + + padded = b"\x00" * crypto_box_ZEROBYTES + message + ciphertext = ffi.new("unsigned char[]", len(padded)) + + rc = lib.crypto_box_afternm(ciphertext, padded, len(padded), nonce, k) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, len(padded))[crypto_box_BOXZEROBYTES:] + + +def crypto_box_open_afternm( + ciphertext: bytes, nonce: bytes, k: bytes +) -> bytes: + """ + Decrypts and returns the encrypted message ``ciphertext``, using the shared + key ``k`` and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param k: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + if len(k) != crypto_box_BEFORENMBYTES: + raise exc.ValueError("Invalid shared key") + + padded = (b"\x00" * crypto_box_BOXZEROBYTES) + ciphertext + plaintext = ffi.new("unsigned char[]", len(padded)) + + res = lib.crypto_box_open_afternm(plaintext, padded, len(padded), nonce, k) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, len(padded))[crypto_box_ZEROBYTES:] + + +def crypto_box_easy( + message: bytes, nonce: bytes, pk: bytes, sk: bytes +) -> bytes: + """ + Encrypts and returns a message ``message`` using the secret key ``sk``, + public key ``pk``, and the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce size") + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + _mlen = len(message) + _clen = crypto_box_MACBYTES + _mlen + + ciphertext = ffi.new("unsigned char[]", _clen) + + rc = lib.crypto_box_easy(ciphertext, message, _mlen, nonce, pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, _clen)[:] + + +def crypto_box_open_easy( + ciphertext: bytes, nonce: bytes, pk: bytes, sk: bytes +) -> bytes: + """ + Decrypts and returns an encrypted message ``ciphertext``, using the secret + key ``sk``, public key ``pk``, and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce size") + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + _clen = len(ciphertext) + + ensure( + _clen >= crypto_box_MACBYTES, + "Input ciphertext must be at least {} long".format( + crypto_box_MACBYTES + ), + raising=exc.TypeError, + ) + + _mlen = _clen - crypto_box_MACBYTES + + plaintext = ffi.new("unsigned char[]", max(1, _mlen)) + + res = lib.crypto_box_open_easy(plaintext, ciphertext, _clen, nonce, pk, sk) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, _mlen)[:] + + +def crypto_box_easy_afternm(message: bytes, nonce: bytes, k: bytes) -> bytes: + """ + Encrypts and returns the message ``message`` using the shared key ``k`` and + the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param k: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + if len(k) != crypto_box_BEFORENMBYTES: + raise exc.ValueError("Invalid shared key") + + _mlen = len(message) + _clen = crypto_box_MACBYTES + _mlen + + ciphertext = ffi.new("unsigned char[]", _clen) + + rc = lib.crypto_box_easy_afternm(ciphertext, message, _mlen, nonce, k) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, _clen)[:] + + +def crypto_box_open_easy_afternm( + ciphertext: bytes, nonce: bytes, k: bytes +) -> bytes: + """ + Decrypts and returns the encrypted message ``ciphertext``, using the shared + key ``k`` and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param k: bytes + :rtype: bytes + """ + if len(nonce) != crypto_box_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + if len(k) != crypto_box_BEFORENMBYTES: + raise exc.ValueError("Invalid shared key") + + _clen = len(ciphertext) + + ensure( + _clen >= crypto_box_MACBYTES, + "Input ciphertext must be at least {} long".format( + crypto_box_MACBYTES + ), + raising=exc.TypeError, + ) + + _mlen = _clen - crypto_box_MACBYTES + + plaintext = ffi.new("unsigned char[]", max(1, _mlen)) + + res = lib.crypto_box_open_easy_afternm( + plaintext, ciphertext, _clen, nonce, k + ) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, _mlen)[:] + + +def crypto_box_seal(message: bytes, pk: bytes) -> bytes: + """ + Encrypts and returns a message ``message`` using an ephemeral secret key + and the public key ``pk``. + The ephemeral public key, which is embedded in the sealed box, is also + used, in combination with ``pk``, to derive the nonce needed for the + underlying box construct. + + :param message: bytes + :param pk: bytes + :rtype: bytes + + .. versionadded:: 1.2 + """ + ensure( + isinstance(message, bytes), + "input message must be bytes", + raising=TypeError, + ) + + ensure( + isinstance(pk, bytes), "public key must be bytes", raising=TypeError + ) + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + _mlen = len(message) + _clen = crypto_box_SEALBYTES + _mlen + + ciphertext = ffi.new("unsigned char[]", _clen) + + rc = lib.crypto_box_seal(ciphertext, message, _mlen, pk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(ciphertext, _clen)[:] + + +def crypto_box_seal_open(ciphertext: bytes, pk: bytes, sk: bytes) -> bytes: + """ + Decrypts and returns an encrypted message ``ciphertext``, using the + recipent's secret key ``sk`` and the sender's ephemeral public key + embedded in the sealed box. The box construct nonce is derived from + the recipient's public key ``pk`` and the sender's public key. + + :param ciphertext: bytes + :param pk: bytes + :param sk: bytes + :rtype: bytes + + .. versionadded:: 1.2 + """ + ensure( + isinstance(ciphertext, bytes), + "input ciphertext must be bytes", + raising=TypeError, + ) + + ensure( + isinstance(pk, bytes), "public key must be bytes", raising=TypeError + ) + + ensure( + isinstance(sk, bytes), "secret key must be bytes", raising=TypeError + ) + + if len(pk) != crypto_box_PUBLICKEYBYTES: + raise exc.ValueError("Invalid public key") + + if len(sk) != crypto_box_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + _clen = len(ciphertext) + + ensure( + _clen >= crypto_box_SEALBYTES, + ("Input ciphertext must be at least {} long").format( + crypto_box_SEALBYTES + ), + raising=exc.TypeError, + ) + + _mlen = _clen - crypto_box_SEALBYTES + + # zero-length malloc results are implementation.dependent + plaintext = ffi.new("unsigned char[]", max(1, _mlen)) + + res = lib.crypto_box_seal_open(plaintext, ciphertext, _clen, pk, sk) + ensure( + res == 0, + "An error occurred trying to decrypt the message", + raising=exc.CryptoError, + ) + + return ffi.buffer(plaintext, _mlen)[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_core.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_core.py new file mode 100644 index 0000000..e64a064 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_core.py @@ -0,0 +1,449 @@ +# Copyright 2018 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +has_crypto_core_ed25519 = bool(lib.PYNACL_HAS_CRYPTO_CORE_ED25519) + +crypto_core_ed25519_BYTES = 0 +crypto_core_ed25519_SCALARBYTES = 0 +crypto_core_ed25519_NONREDUCEDSCALARBYTES = 0 + +if has_crypto_core_ed25519: + crypto_core_ed25519_BYTES = lib.crypto_core_ed25519_bytes() + crypto_core_ed25519_SCALARBYTES = lib.crypto_core_ed25519_scalarbytes() + crypto_core_ed25519_NONREDUCEDSCALARBYTES = ( + lib.crypto_core_ed25519_nonreducedscalarbytes() + ) + + +def crypto_core_ed25519_is_valid_point(p: bytes) -> bool: + """ + Check if ``p`` represents a point on the edwards25519 curve, in canonical + form, on the main subgroup, and that the point doesn't have a small order. + + :param p: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :return: point validity + :rtype: bool + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) and len(p) == crypto_core_ed25519_BYTES, + "Point must be a crypto_core_ed25519_BYTES long bytes sequence", + raising=exc.TypeError, + ) + + rc = lib.crypto_core_ed25519_is_valid_point(p) + return rc == 1 + + +def crypto_core_ed25519_from_uniform(r: bytes) -> bytes: + """ + Maps a 32 bytes vector ``r`` to a point. The point is guaranteed to be on the main subgroup. + This function directly exposes the Elligator 2 map, uses the high bit to set + the sign of the X coordinate, and the resulting point is multiplied by the cofactor. + + :param r: a :py:data:`.crypto_core_ed25519_BYTES` long bytes + sequence representing arbitrary data + :type r: bytes + :return: a point on the edwards25519 curve main order subgroup, represented as a + :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(r, bytes) and len(r) == crypto_core_ed25519_BYTES, + "Integer r must be a {} long bytes sequence".format( + "crypto_core_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + p = ffi.new("unsigned char[]", crypto_core_ed25519_BYTES) + + rc = lib.crypto_core_ed25519_from_uniform(p, r) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(p, crypto_core_ed25519_BYTES)[:] + + +def crypto_core_ed25519_add(p: bytes, q: bytes) -> bytes: + """ + Add two points on the edwards25519 curve. + + :param p: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type q: bytes + :return: a point on the edwards25519 curve represented as + a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_BYTES + and len(q) == crypto_core_ed25519_BYTES, + "Each point must be a {} long bytes sequence".format( + "crypto_core_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_BYTES) + + rc = lib.crypto_core_ed25519_add(r, p, q) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(r, crypto_core_ed25519_BYTES)[:] + + +def crypto_core_ed25519_sub(p: bytes, q: bytes) -> bytes: + """ + Subtract a point from another on the edwards25519 curve. + + :param p: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type q: bytes + :return: a point on the edwards25519 curve represented as + a :py:data:`.crypto_core_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_BYTES + and len(q) == crypto_core_ed25519_BYTES, + "Each point must be a {} long bytes sequence".format( + "crypto_core_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_BYTES) + + rc = lib.crypto_core_ed25519_sub(r, p, q) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(r, crypto_core_ed25519_BYTES)[:] + + +def crypto_core_ed25519_scalar_invert(s: bytes) -> bytes: + """ + Return the multiplicative inverse of integer ``s`` modulo ``L``, + i.e an integer ``i`` such that ``s * i = 1 (mod L)``, where ``L`` + is the order of the main subgroup. + + Raises a ``exc.RuntimeError`` if ``s`` is the integer zero. + + :param s: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type s: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(s, bytes) and len(s) == crypto_core_ed25519_SCALARBYTES, + "Integer s must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + rc = lib.crypto_core_ed25519_scalar_invert(r, s) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_negate(s: bytes) -> bytes: + """ + Return the integer ``n`` such that ``s + n = 0 (mod L)``, where ``L`` + is the order of the main subgroup. + + :param s: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type s: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(s, bytes) and len(s) == crypto_core_ed25519_SCALARBYTES, + "Integer s must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_negate(r, s) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_complement(s: bytes) -> bytes: + """ + Return the complement of integer ``s`` modulo ``L``, i.e. an integer + ``c`` such that ``s + c = 1 (mod L)``, where ``L`` is the order of + the main subgroup. + + :param s: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type s: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(s, bytes) and len(s) == crypto_core_ed25519_SCALARBYTES, + "Integer s must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_complement(r, s) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_add(p: bytes, q: bytes) -> bytes: + """ + Add integers ``p`` and ``q`` modulo ``L``, where ``L`` is the order of + the main subgroup. + + :param p: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type q: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_SCALARBYTES + and len(q) == crypto_core_ed25519_SCALARBYTES, + "Each integer must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_add(r, p, q) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_sub(p: bytes, q: bytes) -> bytes: + """ + Subtract integers ``p`` and ``q`` modulo ``L``, where ``L`` is the + order of the main subgroup. + + :param p: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type q: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_SCALARBYTES + and len(q) == crypto_core_ed25519_SCALARBYTES, + "Each integer must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_sub(r, p, q) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_mul(p: bytes, q: bytes) -> bytes: + """ + Multiply integers ``p`` and ``q`` modulo ``L``, where ``L`` is the + order of the main subgroup. + + :param p: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type p: bytes + :param q: a :py:data:`.crypto_core_ed25519_SCALARBYTES` + long bytes sequence representing an integer + :type q: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(p, bytes) + and isinstance(q, bytes) + and len(p) == crypto_core_ed25519_SCALARBYTES + and len(q) == crypto_core_ed25519_SCALARBYTES, + "Each integer must be a {} long bytes sequence".format( + "crypto_core_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_mul(r, p, q) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] + + +def crypto_core_ed25519_scalar_reduce(s: bytes) -> bytes: + """ + Reduce integer ``s`` to ``s`` modulo ``L``, where ``L`` is the order + of the main subgroup. + + :param s: a :py:data:`.crypto_core_ed25519_NONREDUCEDSCALARBYTES` + long bytes sequence representing an integer + :type s: bytes + :return: an integer represented as a + :py:data:`.crypto_core_ed25519_SCALARBYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_core_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(s, bytes) + and len(s) == crypto_core_ed25519_NONREDUCEDSCALARBYTES, + "Integer s must be a {} long bytes sequence".format( + "crypto_core_ed25519_NONREDUCEDSCALARBYTES" + ), + raising=exc.TypeError, + ) + + r = ffi.new("unsigned char[]", crypto_core_ed25519_SCALARBYTES) + + lib.crypto_core_ed25519_scalar_reduce(r, s) + + return ffi.buffer(r, crypto_core_ed25519_SCALARBYTES)[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_generichash.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_generichash.py new file mode 100644 index 0000000..6ab385a --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_generichash.py @@ -0,0 +1,281 @@ +# Copyright 2013-2019 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import NoReturn, TypeVar + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +crypto_generichash_BYTES: int = lib.crypto_generichash_blake2b_bytes() +crypto_generichash_BYTES_MIN: int = lib.crypto_generichash_blake2b_bytes_min() +crypto_generichash_BYTES_MAX: int = lib.crypto_generichash_blake2b_bytes_max() +crypto_generichash_KEYBYTES: int = lib.crypto_generichash_blake2b_keybytes() +crypto_generichash_KEYBYTES_MIN: int = ( + lib.crypto_generichash_blake2b_keybytes_min() +) +crypto_generichash_KEYBYTES_MAX: int = ( + lib.crypto_generichash_blake2b_keybytes_max() +) +crypto_generichash_SALTBYTES: int = lib.crypto_generichash_blake2b_saltbytes() +crypto_generichash_PERSONALBYTES: int = ( + lib.crypto_generichash_blake2b_personalbytes() +) +crypto_generichash_STATEBYTES: int = lib.crypto_generichash_statebytes() + +_OVERLONG = "{0} length greater than {1} bytes" +_TOOBIG = "{0} greater than {1}" + + +def _checkparams( + digest_size: int, key: bytes, salt: bytes, person: bytes +) -> None: + """Check hash parameters""" + ensure( + isinstance(key, bytes), + "Key must be a bytes sequence", + raising=exc.TypeError, + ) + + ensure( + isinstance(salt, bytes), + "Salt must be a bytes sequence", + raising=exc.TypeError, + ) + + ensure( + isinstance(person, bytes), + "Person must be a bytes sequence", + raising=exc.TypeError, + ) + + ensure( + isinstance(digest_size, int), + "Digest size must be an integer number", + raising=exc.TypeError, + ) + + ensure( + digest_size <= crypto_generichash_BYTES_MAX, + _TOOBIG.format("Digest_size", crypto_generichash_BYTES_MAX), + raising=exc.ValueError, + ) + + ensure( + len(key) <= crypto_generichash_KEYBYTES_MAX, + _OVERLONG.format("Key", crypto_generichash_KEYBYTES_MAX), + raising=exc.ValueError, + ) + + ensure( + len(salt) <= crypto_generichash_SALTBYTES, + _OVERLONG.format("Salt", crypto_generichash_SALTBYTES), + raising=exc.ValueError, + ) + + ensure( + len(person) <= crypto_generichash_PERSONALBYTES, + _OVERLONG.format("Person", crypto_generichash_PERSONALBYTES), + raising=exc.ValueError, + ) + + +def generichash_blake2b_salt_personal( + data: bytes, + digest_size: int = crypto_generichash_BYTES, + key: bytes = b"", + salt: bytes = b"", + person: bytes = b"", +) -> bytes: + """One shot hash interface + + :param data: the input data to the hash function + :type data: bytes + :param digest_size: must be at most + :py:data:`.crypto_generichash_BYTES_MAX`; + the default digest size is + :py:data:`.crypto_generichash_BYTES` + :type digest_size: int + :param key: must be at most + :py:data:`.crypto_generichash_KEYBYTES_MAX` long + :type key: bytes + :param salt: must be at most + :py:data:`.crypto_generichash_SALTBYTES` long; + will be zero-padded if needed + :type salt: bytes + :param person: must be at most + :py:data:`.crypto_generichash_PERSONALBYTES` long: + will be zero-padded if needed + :type person: bytes + :return: digest_size long digest + :rtype: bytes + """ + + _checkparams(digest_size, key, salt, person) + + ensure( + isinstance(data, bytes), + "Input data must be a bytes sequence", + raising=exc.TypeError, + ) + + digest = ffi.new("unsigned char[]", digest_size) + + # both _salt and _personal must be zero-padded to the correct length + _salt = ffi.new("unsigned char []", crypto_generichash_SALTBYTES) + _person = ffi.new("unsigned char []", crypto_generichash_PERSONALBYTES) + + ffi.memmove(_salt, salt, len(salt)) + ffi.memmove(_person, person, len(person)) + + rc = lib.crypto_generichash_blake2b_salt_personal( + digest, digest_size, data, len(data), key, len(key), _salt, _person + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + return ffi.buffer(digest, digest_size)[:] + + +_Blake2State = TypeVar("_Blake2State", bound="Blake2State") + + +class Blake2State: + """ + Python-level wrapper for the crypto_generichash_blake2b state buffer + """ + + __slots__ = ["_statebuf", "digest_size"] + + def __init__(self, digest_size: int): + self._statebuf = ffi.new( + "unsigned char[]", crypto_generichash_STATEBYTES + ) + self.digest_size = digest_size + + def __reduce__(self) -> NoReturn: + """ + Raise the same exception as hashlib's blake implementation + on copy.copy() + """ + raise TypeError( + "can't pickle {} objects".format(self.__class__.__name__) + ) + + def copy(self: _Blake2State) -> _Blake2State: + _st = self.__class__(self.digest_size) + ffi.memmove( + _st._statebuf, self._statebuf, crypto_generichash_STATEBYTES + ) + return _st + + +def generichash_blake2b_init( + key: bytes = b"", + salt: bytes = b"", + person: bytes = b"", + digest_size: int = crypto_generichash_BYTES, +) -> Blake2State: + """ + Create a new initialized blake2b hash state + + :param key: must be at most + :py:data:`.crypto_generichash_KEYBYTES_MAX` long + :type key: bytes + :param salt: must be at most + :py:data:`.crypto_generichash_SALTBYTES` long; + will be zero-padded if needed + :type salt: bytes + :param person: must be at most + :py:data:`.crypto_generichash_PERSONALBYTES` long: + will be zero-padded if needed + :type person: bytes + :param digest_size: must be at most + :py:data:`.crypto_generichash_BYTES_MAX`; + the default digest size is + :py:data:`.crypto_generichash_BYTES` + :type digest_size: int + :return: a initialized :py:class:`.Blake2State` + :rtype: object + """ + + _checkparams(digest_size, key, salt, person) + + state = Blake2State(digest_size) + + # both _salt and _personal must be zero-padded to the correct length + _salt = ffi.new("unsigned char []", crypto_generichash_SALTBYTES) + _person = ffi.new("unsigned char []", crypto_generichash_PERSONALBYTES) + + ffi.memmove(_salt, salt, len(salt)) + ffi.memmove(_person, person, len(person)) + + rc = lib.crypto_generichash_blake2b_init_salt_personal( + state._statebuf, key, len(key), digest_size, _salt, _person + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + return state + + +def generichash_blake2b_update(state: Blake2State, data: bytes) -> None: + """Update the blake2b hash state + + :param state: a initialized Blake2bState object as returned from + :py:func:`.crypto_generichash_blake2b_init` + :type state: :py:class:`.Blake2State` + :param data: + :type data: bytes + """ + + ensure( + isinstance(state, Blake2State), + "State must be a Blake2State object", + raising=exc.TypeError, + ) + + ensure( + isinstance(data, bytes), + "Input data must be a bytes sequence", + raising=exc.TypeError, + ) + + rc = lib.crypto_generichash_blake2b_update( + state._statebuf, data, len(data) + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + +def generichash_blake2b_final(state: Blake2State) -> bytes: + """Finalize the blake2b hash state and return the digest. + + :param state: a initialized Blake2bState object as returned from + :py:func:`.crypto_generichash_blake2b_init` + :type state: :py:class:`.Blake2State` + :return: the blake2 digest of the passed-in data stream + :rtype: bytes + """ + + ensure( + isinstance(state, Blake2State), + "State must be a Blake2State object", + raising=exc.TypeError, + ) + + _digest = ffi.new("unsigned char[]", crypto_generichash_BYTES_MAX) + rc = lib.crypto_generichash_blake2b_final( + state._statebuf, _digest, state.digest_size + ) + + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + return ffi.buffer(_digest, state.digest_size)[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_hash.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_hash.py new file mode 100644 index 0000000..2bab399 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_hash.py @@ -0,0 +1,63 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +# crypto_hash_BYTES = lib.crypto_hash_bytes() +crypto_hash_BYTES: int = lib.crypto_hash_sha512_bytes() +crypto_hash_sha256_BYTES: int = lib.crypto_hash_sha256_bytes() +crypto_hash_sha512_BYTES: int = lib.crypto_hash_sha512_bytes() + + +def crypto_hash(message: bytes) -> bytes: + """ + Hashes and returns the message ``message``. + + :param message: bytes + :rtype: bytes + """ + digest = ffi.new("unsigned char[]", crypto_hash_BYTES) + rc = lib.crypto_hash(digest, message, len(message)) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + return ffi.buffer(digest, crypto_hash_BYTES)[:] + + +def crypto_hash_sha256(message: bytes) -> bytes: + """ + Hashes and returns the message ``message``. + + :param message: bytes + :rtype: bytes + """ + digest = ffi.new("unsigned char[]", crypto_hash_sha256_BYTES) + rc = lib.crypto_hash_sha256(digest, message, len(message)) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + return ffi.buffer(digest, crypto_hash_sha256_BYTES)[:] + + +def crypto_hash_sha512(message: bytes) -> bytes: + """ + Hashes and returns the message ``message``. + + :param message: bytes + :rtype: bytes + """ + digest = ffi.new("unsigned char[]", crypto_hash_sha512_BYTES) + rc = lib.crypto_hash_sha512(digest, message, len(message)) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + return ffi.buffer(digest, crypto_hash_sha512_BYTES)[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_kx.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_kx.py new file mode 100644 index 0000000..3c649e4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_kx.py @@ -0,0 +1,200 @@ +# Copyright 2018 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Tuple + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + +__all__ = [ + "crypto_kx_keypair", + "crypto_kx_client_session_keys", + "crypto_kx_server_session_keys", + "crypto_kx_PUBLIC_KEY_BYTES", + "crypto_kx_SECRET_KEY_BYTES", + "crypto_kx_SEED_BYTES", + "crypto_kx_SESSION_KEY_BYTES", +] + +""" +Implementations of client, server key exchange +""" +crypto_kx_PUBLIC_KEY_BYTES: int = lib.crypto_kx_publickeybytes() +crypto_kx_SECRET_KEY_BYTES: int = lib.crypto_kx_secretkeybytes() +crypto_kx_SEED_BYTES: int = lib.crypto_kx_seedbytes() +crypto_kx_SESSION_KEY_BYTES: int = lib.crypto_kx_sessionkeybytes() + + +def crypto_kx_keypair() -> Tuple[bytes, bytes]: + """ + Generate a key pair. + This is a duplicate crypto_box_keypair, but + is included for api consistency. + :return: (public_key, secret_key) + :rtype: (bytes, bytes) + """ + public_key = ffi.new("unsigned char[]", crypto_kx_PUBLIC_KEY_BYTES) + secret_key = ffi.new("unsigned char[]", crypto_kx_SECRET_KEY_BYTES) + res = lib.crypto_kx_keypair(public_key, secret_key) + ensure(res == 0, "Key generation failed.", raising=exc.CryptoError) + + return ( + ffi.buffer(public_key, crypto_kx_PUBLIC_KEY_BYTES)[:], + ffi.buffer(secret_key, crypto_kx_SECRET_KEY_BYTES)[:], + ) + + +def crypto_kx_seed_keypair(seed: bytes) -> Tuple[bytes, bytes]: + """ + Generate a key pair with a given seed. + This is functionally the same as crypto_box_seed_keypair, however + it uses the blake2b hash primitive instead of sha512. + It is included mainly for api consistency when using crypto_kx. + :param seed: random seed + :type seed: bytes + :return: (public_key, secret_key) + :rtype: (bytes, bytes) + """ + public_key = ffi.new("unsigned char[]", crypto_kx_PUBLIC_KEY_BYTES) + secret_key = ffi.new("unsigned char[]", crypto_kx_SECRET_KEY_BYTES) + ensure( + isinstance(seed, bytes) and len(seed) == crypto_kx_SEED_BYTES, + "Seed must be a {} byte long bytes sequence".format( + crypto_kx_SEED_BYTES + ), + raising=exc.TypeError, + ) + res = lib.crypto_kx_seed_keypair(public_key, secret_key, seed) + ensure(res == 0, "Key generation failed.", raising=exc.CryptoError) + + return ( + ffi.buffer(public_key, crypto_kx_PUBLIC_KEY_BYTES)[:], + ffi.buffer(secret_key, crypto_kx_SECRET_KEY_BYTES)[:], + ) + + +def crypto_kx_client_session_keys( + client_public_key: bytes, + client_secret_key: bytes, + server_public_key: bytes, +) -> Tuple[bytes, bytes]: + """ + Generate session keys for the client. + :param client_public_key: + :type client_public_key: bytes + :param client_secret_key: + :type client_secret_key: bytes + :param server_public_key: + :type server_public_key: bytes + :return: (rx_key, tx_key) + :rtype: (bytes, bytes) + """ + ensure( + isinstance(client_public_key, bytes) + and len(client_public_key) == crypto_kx_PUBLIC_KEY_BYTES, + "Client public key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + ensure( + isinstance(client_secret_key, bytes) + and len(client_secret_key) == crypto_kx_SECRET_KEY_BYTES, + "Client secret key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + ensure( + isinstance(server_public_key, bytes) + and len(server_public_key) == crypto_kx_PUBLIC_KEY_BYTES, + "Server public key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + + rx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES) + tx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES) + res = lib.crypto_kx_client_session_keys( + rx_key, tx_key, client_public_key, client_secret_key, server_public_key + ) + ensure( + res == 0, + "Client session key generation failed.", + raising=exc.CryptoError, + ) + + return ( + ffi.buffer(rx_key, crypto_kx_SESSION_KEY_BYTES)[:], + ffi.buffer(tx_key, crypto_kx_SESSION_KEY_BYTES)[:], + ) + + +def crypto_kx_server_session_keys( + server_public_key: bytes, + server_secret_key: bytes, + client_public_key: bytes, +) -> Tuple[bytes, bytes]: + """ + Generate session keys for the server. + :param server_public_key: + :type server_public_key: bytes + :param server_secret_key: + :type server_secret_key: bytes + :param client_public_key: + :type client_public_key: bytes + :return: (rx_key, tx_key) + :rtype: (bytes, bytes) + """ + ensure( + isinstance(server_public_key, bytes) + and len(server_public_key) == crypto_kx_PUBLIC_KEY_BYTES, + "Server public key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + ensure( + isinstance(server_secret_key, bytes) + and len(server_secret_key) == crypto_kx_SECRET_KEY_BYTES, + "Server secret key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + ensure( + isinstance(client_public_key, bytes) + and len(client_public_key) == crypto_kx_PUBLIC_KEY_BYTES, + "Client public key must be a {} bytes long bytes sequence".format( + crypto_kx_PUBLIC_KEY_BYTES + ), + raising=exc.TypeError, + ) + + rx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES) + tx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES) + res = lib.crypto_kx_server_session_keys( + rx_key, tx_key, server_public_key, server_secret_key, client_public_key + ) + ensure( + res == 0, + "Server session key generation failed.", + raising=exc.CryptoError, + ) + + return ( + ffi.buffer(rx_key, crypto_kx_SESSION_KEY_BYTES)[:], + ffi.buffer(tx_key, crypto_kx_SESSION_KEY_BYTES)[:], + ) diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_pwhash.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_pwhash.py new file mode 100644 index 0000000..7f62360 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_pwhash.py @@ -0,0 +1,599 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from typing import Tuple + +import nacl.exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +has_crypto_pwhash_scryptsalsa208sha256 = bool( + lib.PYNACL_HAS_CRYPTO_PWHASH_SCRYPTSALSA208SHA256 +) + +crypto_pwhash_scryptsalsa208sha256_STRPREFIX = b"" +crypto_pwhash_scryptsalsa208sha256_SALTBYTES = 0 +crypto_pwhash_scryptsalsa208sha256_STRBYTES = 0 +crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN = 0 +crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX = 0 +crypto_pwhash_scryptsalsa208sha256_BYTES_MIN = 0 +crypto_pwhash_scryptsalsa208sha256_BYTES_MAX = 0 +crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN = 0 +crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX = 0 +crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN = 0 +crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX = 0 +crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE = 0 +crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE = 0 +crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE = 0 +crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE = 0 + +if has_crypto_pwhash_scryptsalsa208sha256: + crypto_pwhash_scryptsalsa208sha256_STRPREFIX = ffi.string( + ffi.cast("char *", lib.crypto_pwhash_scryptsalsa208sha256_strprefix()) + )[:] + crypto_pwhash_scryptsalsa208sha256_SALTBYTES = ( + lib.crypto_pwhash_scryptsalsa208sha256_saltbytes() + ) + crypto_pwhash_scryptsalsa208sha256_STRBYTES = ( + lib.crypto_pwhash_scryptsalsa208sha256_strbytes() + ) + crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN = ( + lib.crypto_pwhash_scryptsalsa208sha256_passwd_min() + ) + crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX = ( + lib.crypto_pwhash_scryptsalsa208sha256_passwd_max() + ) + crypto_pwhash_scryptsalsa208sha256_BYTES_MIN = ( + lib.crypto_pwhash_scryptsalsa208sha256_bytes_min() + ) + crypto_pwhash_scryptsalsa208sha256_BYTES_MAX = ( + lib.crypto_pwhash_scryptsalsa208sha256_bytes_max() + ) + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN = ( + lib.crypto_pwhash_scryptsalsa208sha256_memlimit_min() + ) + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX = ( + lib.crypto_pwhash_scryptsalsa208sha256_memlimit_max() + ) + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN = ( + lib.crypto_pwhash_scryptsalsa208sha256_opslimit_min() + ) + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX = ( + lib.crypto_pwhash_scryptsalsa208sha256_opslimit_max() + ) + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE = ( + lib.crypto_pwhash_scryptsalsa208sha256_opslimit_interactive() + ) + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE = ( + lib.crypto_pwhash_scryptsalsa208sha256_memlimit_interactive() + ) + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE = ( + lib.crypto_pwhash_scryptsalsa208sha256_opslimit_sensitive() + ) + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE = ( + lib.crypto_pwhash_scryptsalsa208sha256_memlimit_sensitive() + ) + +crypto_pwhash_ALG_ARGON2I13: int = lib.crypto_pwhash_alg_argon2i13() +crypto_pwhash_ALG_ARGON2ID13: int = lib.crypto_pwhash_alg_argon2id13() +crypto_pwhash_ALG_DEFAULT: int = lib.crypto_pwhash_alg_default() + +crypto_pwhash_SALTBYTES: int = lib.crypto_pwhash_saltbytes() +crypto_pwhash_STRBYTES: int = lib.crypto_pwhash_strbytes() + +crypto_pwhash_PASSWD_MIN: int = lib.crypto_pwhash_passwd_min() +crypto_pwhash_PASSWD_MAX: int = lib.crypto_pwhash_passwd_max() +crypto_pwhash_BYTES_MIN: int = lib.crypto_pwhash_bytes_min() +crypto_pwhash_BYTES_MAX: int = lib.crypto_pwhash_bytes_max() + +crypto_pwhash_argon2i_STRPREFIX: bytes = ffi.string( + ffi.cast("char *", lib.crypto_pwhash_argon2i_strprefix()) +)[:] +crypto_pwhash_argon2i_MEMLIMIT_MIN: int = ( + lib.crypto_pwhash_argon2i_memlimit_min() +) +crypto_pwhash_argon2i_MEMLIMIT_MAX: int = ( + lib.crypto_pwhash_argon2i_memlimit_max() +) +crypto_pwhash_argon2i_OPSLIMIT_MIN: int = ( + lib.crypto_pwhash_argon2i_opslimit_min() +) +crypto_pwhash_argon2i_OPSLIMIT_MAX: int = ( + lib.crypto_pwhash_argon2i_opslimit_max() +) +crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE: int = ( + lib.crypto_pwhash_argon2i_opslimit_interactive() +) +crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE: int = ( + lib.crypto_pwhash_argon2i_memlimit_interactive() +) +crypto_pwhash_argon2i_OPSLIMIT_MODERATE: int = ( + lib.crypto_pwhash_argon2i_opslimit_moderate() +) +crypto_pwhash_argon2i_MEMLIMIT_MODERATE: int = ( + lib.crypto_pwhash_argon2i_memlimit_moderate() +) +crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE: int = ( + lib.crypto_pwhash_argon2i_opslimit_sensitive() +) +crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE: int = ( + lib.crypto_pwhash_argon2i_memlimit_sensitive() +) + +crypto_pwhash_argon2id_STRPREFIX: bytes = ffi.string( + ffi.cast("char *", lib.crypto_pwhash_argon2id_strprefix()) +)[:] +crypto_pwhash_argon2id_MEMLIMIT_MIN: int = ( + lib.crypto_pwhash_argon2id_memlimit_min() +) +crypto_pwhash_argon2id_MEMLIMIT_MAX: int = ( + lib.crypto_pwhash_argon2id_memlimit_max() +) +crypto_pwhash_argon2id_OPSLIMIT_MIN: int = ( + lib.crypto_pwhash_argon2id_opslimit_min() +) +crypto_pwhash_argon2id_OPSLIMIT_MAX: int = ( + lib.crypto_pwhash_argon2id_opslimit_max() +) +crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE: int = ( + lib.crypto_pwhash_argon2id_opslimit_interactive() +) +crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE: int = ( + lib.crypto_pwhash_argon2id_memlimit_interactive() +) +crypto_pwhash_argon2id_OPSLIMIT_MODERATE: int = ( + lib.crypto_pwhash_argon2id_opslimit_moderate() +) +crypto_pwhash_argon2id_MEMLIMIT_MODERATE: int = ( + lib.crypto_pwhash_argon2id_memlimit_moderate() +) +crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE: int = ( + lib.crypto_pwhash_argon2id_opslimit_sensitive() +) +crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE: int = ( + lib.crypto_pwhash_argon2id_memlimit_sensitive() +) + +SCRYPT_OPSLIMIT_INTERACTIVE = ( + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE +) +SCRYPT_MEMLIMIT_INTERACTIVE = ( + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE +) +SCRYPT_OPSLIMIT_SENSITIVE = ( + crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE +) +SCRYPT_MEMLIMIT_SENSITIVE = ( + crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE +) +SCRYPT_SALTBYTES = crypto_pwhash_scryptsalsa208sha256_SALTBYTES +SCRYPT_STRBYTES = crypto_pwhash_scryptsalsa208sha256_STRBYTES + +SCRYPT_PR_MAX = (1 << 30) - 1 +LOG2_UINT64_MAX = 63 +UINT64_MAX = (1 << 64) - 1 +SCRYPT_MAX_MEM = 32 * (1024 * 1024) + + +def _check_memory_occupation( + n: int, r: int, p: int, maxmem: int = SCRYPT_MAX_MEM +) -> None: + ensure(r != 0, "Invalid block size", raising=exc.ValueError) + + ensure(p != 0, "Invalid parallelization factor", raising=exc.ValueError) + + ensure( + (n & (n - 1)) == 0, + "Cost factor must be a power of 2", + raising=exc.ValueError, + ) + + ensure(n > 1, "Cost factor must be at least 2", raising=exc.ValueError) + + ensure( + p <= SCRYPT_PR_MAX / r, + "p*r is greater than {}".format(SCRYPT_PR_MAX), + raising=exc.ValueError, + ) + + ensure(n < (1 << (16 * r)), raising=exc.ValueError) + + Blen = p * 128 * r + + i = UINT64_MAX / 128 + + ensure(n + 2 <= i / r, raising=exc.ValueError) + + Vlen = 32 * r * (n + 2) * 4 + + ensure(Blen <= UINT64_MAX - Vlen, raising=exc.ValueError) + + ensure(Blen <= sys.maxsize - Vlen, raising=exc.ValueError) + + ensure( + Blen + Vlen <= maxmem, + "Memory limit would be exceeded with the chosen n, r, p", + raising=exc.ValueError, + ) + + +def nacl_bindings_pick_scrypt_params( + opslimit: int, memlimit: int +) -> Tuple[int, int, int]: + """Python implementation of libsodium's pickparams""" + + if opslimit < 32768: + opslimit = 32768 + + r = 8 + + if opslimit < (memlimit // 32): + p = 1 + maxn = opslimit // (4 * r) + for n_log2 in range(1, 63): # pragma: no branch + if (2**n_log2) > (maxn // 2): + break + else: + maxn = memlimit // (r * 128) + for n_log2 in range(1, 63): # pragma: no branch + if (2**n_log2) > maxn // 2: + break + + maxrp = (opslimit // 4) // (2**n_log2) + + if maxrp > 0x3FFFFFFF: # pragma: no cover + maxrp = 0x3FFFFFFF + + p = maxrp // r + + return n_log2, r, p + + +def crypto_pwhash_scryptsalsa208sha256_ll( + passwd: bytes, + salt: bytes, + n: int, + r: int, + p: int, + dklen: int = 64, + maxmem: int = SCRYPT_MAX_MEM, +) -> bytes: + """ + Derive a cryptographic key using the ``passwd`` and ``salt`` + given as input. + + The work factor can be tuned by by picking different + values for the parameters + + :param bytes passwd: + :param bytes salt: + :param bytes salt: *must* be *exactly* :py:const:`.SALTBYTES` long + :param int dklen: + :param int opslimit: + :param int n: + :param int r: block size, + :param int p: the parallelism factor + :param int maxmem: the maximum available memory available for scrypt's + operations + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_pwhash_scryptsalsa208sha256, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure(isinstance(n, int), raising=TypeError) + ensure(isinstance(r, int), raising=TypeError) + ensure(isinstance(p, int), raising=TypeError) + + ensure(isinstance(passwd, bytes), raising=TypeError) + ensure(isinstance(salt, bytes), raising=TypeError) + + _check_memory_occupation(n, r, p, maxmem) + + buf = ffi.new("uint8_t[]", dklen) + + ret = lib.crypto_pwhash_scryptsalsa208sha256_ll( + passwd, len(passwd), salt, len(salt), n, r, p, buf, dklen + ) + + ensure( + ret == 0, + "Unexpected failure in key derivation", + raising=exc.RuntimeError, + ) + + return ffi.buffer(ffi.cast("char *", buf), dklen)[:] + + +def crypto_pwhash_scryptsalsa208sha256_str( + passwd: bytes, + opslimit: int = SCRYPT_OPSLIMIT_INTERACTIVE, + memlimit: int = SCRYPT_MEMLIMIT_INTERACTIVE, +) -> bytes: + """ + Derive a cryptographic key using the ``passwd`` and ``salt`` + given as input, returning a string representation which includes + the salt and the tuning parameters. + + The returned string can be directly stored as a password hash. + + See :py:func:`.crypto_pwhash_scryptsalsa208sha256` for a short + discussion about ``opslimit`` and ``memlimit`` values. + + :param bytes passwd: + :param int opslimit: + :param int memlimit: + :return: serialized key hash, including salt and tuning parameters + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_pwhash_scryptsalsa208sha256, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + buf = ffi.new("char[]", SCRYPT_STRBYTES) + + ret = lib.crypto_pwhash_scryptsalsa208sha256_str( + buf, passwd, len(passwd), opslimit, memlimit + ) + + ensure( + ret == 0, + "Unexpected failure in password hashing", + raising=exc.RuntimeError, + ) + + return ffi.string(buf) + + +def crypto_pwhash_scryptsalsa208sha256_str_verify( + passwd_hash: bytes, passwd: bytes +) -> bool: + """ + Verifies the ``passwd`` against the ``passwd_hash`` that was generated. + Returns True or False depending on the success + + :param passwd_hash: bytes + :param passwd: bytes + :rtype: boolean + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_pwhash_scryptsalsa208sha256, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + len(passwd_hash) == SCRYPT_STRBYTES - 1, + "Invalid password hash", + raising=exc.ValueError, + ) + + ret = lib.crypto_pwhash_scryptsalsa208sha256_str_verify( + passwd_hash, passwd, len(passwd) + ) + ensure(ret == 0, "Wrong password", raising=exc.InvalidkeyError) + # all went well, therefore: + return True + + +def _check_argon2_limits_alg(opslimit: int, memlimit: int, alg: int) -> None: + if alg == crypto_pwhash_ALG_ARGON2I13: + if memlimit < crypto_pwhash_argon2i_MEMLIMIT_MIN: + raise exc.ValueError( + "memlimit must be at least {} bytes".format( + crypto_pwhash_argon2i_MEMLIMIT_MIN + ) + ) + elif memlimit > crypto_pwhash_argon2i_MEMLIMIT_MAX: + raise exc.ValueError( + "memlimit must be at most {} bytes".format( + crypto_pwhash_argon2i_MEMLIMIT_MAX + ) + ) + if opslimit < crypto_pwhash_argon2i_OPSLIMIT_MIN: + raise exc.ValueError( + "opslimit must be at least {}".format( + crypto_pwhash_argon2i_OPSLIMIT_MIN + ) + ) + elif opslimit > crypto_pwhash_argon2i_OPSLIMIT_MAX: + raise exc.ValueError( + "opslimit must be at most {}".format( + crypto_pwhash_argon2i_OPSLIMIT_MAX + ) + ) + + elif alg == crypto_pwhash_ALG_ARGON2ID13: + if memlimit < crypto_pwhash_argon2id_MEMLIMIT_MIN: + raise exc.ValueError( + "memlimit must be at least {} bytes".format( + crypto_pwhash_argon2id_MEMLIMIT_MIN + ) + ) + elif memlimit > crypto_pwhash_argon2id_MEMLIMIT_MAX: + raise exc.ValueError( + "memlimit must be at most {} bytes".format( + crypto_pwhash_argon2id_MEMLIMIT_MAX + ) + ) + if opslimit < crypto_pwhash_argon2id_OPSLIMIT_MIN: + raise exc.ValueError( + "opslimit must be at least {}".format( + crypto_pwhash_argon2id_OPSLIMIT_MIN + ) + ) + elif opslimit > crypto_pwhash_argon2id_OPSLIMIT_MAX: + raise exc.ValueError( + "opslimit must be at most {}".format( + crypto_pwhash_argon2id_OPSLIMIT_MAX + ) + ) + else: + raise exc.TypeError("Unsupported algorithm") + + +def crypto_pwhash_alg( + outlen: int, + passwd: bytes, + salt: bytes, + opslimit: int, + memlimit: int, + alg: int, +) -> bytes: + """ + Derive a raw cryptographic key using the ``passwd`` and the ``salt`` + given as input to the ``alg`` algorithm. + + :param outlen: the length of the derived key + :type outlen: int + :param passwd: The input password + :type passwd: bytes + :param salt: + :type salt: bytes + :param opslimit: computational cost + :type opslimit: int + :param memlimit: memory cost + :type memlimit: int + :param alg: algorithm identifier + :type alg: int + :return: derived key + :rtype: bytes + """ + ensure(isinstance(outlen, int), raising=exc.TypeError) + ensure(isinstance(opslimit, int), raising=exc.TypeError) + ensure(isinstance(memlimit, int), raising=exc.TypeError) + ensure(isinstance(alg, int), raising=exc.TypeError) + ensure(isinstance(passwd, bytes), raising=exc.TypeError) + + if len(salt) != crypto_pwhash_SALTBYTES: + raise exc.ValueError( + "salt must be exactly {} bytes long".format( + crypto_pwhash_SALTBYTES + ) + ) + + if outlen < crypto_pwhash_BYTES_MIN: + raise exc.ValueError( + "derived key must be at least {} bytes long".format( + crypto_pwhash_BYTES_MIN + ) + ) + + elif outlen > crypto_pwhash_BYTES_MAX: + raise exc.ValueError( + "derived key must be at most {} bytes long".format( + crypto_pwhash_BYTES_MAX + ) + ) + + _check_argon2_limits_alg(opslimit, memlimit, alg) + + outbuf = ffi.new("unsigned char[]", outlen) + + ret = lib.crypto_pwhash( + outbuf, outlen, passwd, len(passwd), salt, opslimit, memlimit, alg + ) + + ensure( + ret == 0, + "Unexpected failure in key derivation", + raising=exc.RuntimeError, + ) + + return ffi.buffer(outbuf, outlen)[:] + + +def crypto_pwhash_str_alg( + passwd: bytes, + opslimit: int, + memlimit: int, + alg: int, +) -> bytes: + """ + Derive a cryptographic key using the ``passwd`` given as input + and a random salt, returning a string representation which + includes the salt, the tuning parameters and the used algorithm. + + :param passwd: The input password + :type passwd: bytes + :param opslimit: computational cost + :type opslimit: int + :param memlimit: memory cost + :type memlimit: int + :param alg: The algorithm to use + :type alg: int + :return: serialized derived key and parameters + :rtype: bytes + """ + ensure(isinstance(opslimit, int), raising=TypeError) + ensure(isinstance(memlimit, int), raising=TypeError) + ensure(isinstance(passwd, bytes), raising=TypeError) + + _check_argon2_limits_alg(opslimit, memlimit, alg) + + outbuf = ffi.new("char[]", 128) + + ret = lib.crypto_pwhash_str_alg( + outbuf, passwd, len(passwd), opslimit, memlimit, alg + ) + + ensure( + ret == 0, + "Unexpected failure in key derivation", + raising=exc.RuntimeError, + ) + + return ffi.string(outbuf) + + +def crypto_pwhash_str_verify(passwd_hash: bytes, passwd: bytes) -> bool: + """ + Verifies the ``passwd`` against a given password hash. + + Returns True on success, raises InvalidkeyError on failure + :param passwd_hash: saved password hash + :type passwd_hash: bytes + :param passwd: password to be checked + :type passwd: bytes + :return: success + :rtype: boolean + """ + ensure(isinstance(passwd_hash, bytes), raising=TypeError) + ensure(isinstance(passwd, bytes), raising=TypeError) + ensure( + len(passwd_hash) <= 127, + "Hash must be at most 127 bytes long", + raising=exc.ValueError, + ) + + ret = lib.crypto_pwhash_str_verify(passwd_hash, passwd, len(passwd)) + + ensure(ret == 0, "Wrong password", raising=exc.InvalidkeyError) + # all went well, therefore: + return True + + +crypto_pwhash_argon2i_str_verify = crypto_pwhash_str_verify diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_scalarmult.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_scalarmult.py new file mode 100644 index 0000000..ca4a281 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_scalarmult.py @@ -0,0 +1,240 @@ +# Copyright 2013-2018 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +has_crypto_scalarmult_ed25519 = bool(lib.PYNACL_HAS_CRYPTO_SCALARMULT_ED25519) + +crypto_scalarmult_BYTES: int = lib.crypto_scalarmult_bytes() +crypto_scalarmult_SCALARBYTES: int = lib.crypto_scalarmult_scalarbytes() + +crypto_scalarmult_ed25519_BYTES = 0 +crypto_scalarmult_ed25519_SCALARBYTES = 0 + +if has_crypto_scalarmult_ed25519: + crypto_scalarmult_ed25519_BYTES = lib.crypto_scalarmult_ed25519_bytes() + crypto_scalarmult_ed25519_SCALARBYTES = ( + lib.crypto_scalarmult_ed25519_scalarbytes() + ) + + +def crypto_scalarmult_base(n: bytes) -> bytes: + """ + Computes and returns the scalar product of a standard group element and an + integer ``n``. + + :param n: bytes + :rtype: bytes + """ + q = ffi.new("unsigned char[]", crypto_scalarmult_BYTES) + + rc = lib.crypto_scalarmult_base(q, n) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_SCALARBYTES)[:] + + +def crypto_scalarmult(n: bytes, p: bytes) -> bytes: + """ + Computes and returns the scalar product of the given group element and an + integer ``n``. + + :param p: bytes + :param n: bytes + :rtype: bytes + """ + q = ffi.new("unsigned char[]", crypto_scalarmult_BYTES) + + rc = lib.crypto_scalarmult(q, n, p) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_SCALARBYTES)[:] + + +def crypto_scalarmult_ed25519_base(n: bytes) -> bytes: + """ + Computes and returns the scalar product of a standard group element and an + integer ``n`` on the edwards25519 curve. + + :param n: a :py:data:`.crypto_scalarmult_ed25519_SCALARBYTES` long bytes + sequence representing a scalar + :type n: bytes + :return: a point on the edwards25519 curve, represented as a + :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_scalarmult_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(n, bytes) + and len(n) == crypto_scalarmult_ed25519_SCALARBYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + q = ffi.new("unsigned char[]", crypto_scalarmult_ed25519_BYTES) + + rc = lib.crypto_scalarmult_ed25519_base(q, n) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_ed25519_BYTES)[:] + + +def crypto_scalarmult_ed25519_base_noclamp(n: bytes) -> bytes: + """ + Computes and returns the scalar product of a standard group element and an + integer ``n`` on the edwards25519 curve. The integer ``n`` is not clamped. + + :param n: a :py:data:`.crypto_scalarmult_ed25519_SCALARBYTES` long bytes + sequence representing a scalar + :type n: bytes + :return: a point on the edwards25519 curve, represented as a + :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_scalarmult_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(n, bytes) + and len(n) == crypto_scalarmult_ed25519_SCALARBYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + q = ffi.new("unsigned char[]", crypto_scalarmult_ed25519_BYTES) + + rc = lib.crypto_scalarmult_ed25519_base_noclamp(q, n) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_ed25519_BYTES)[:] + + +def crypto_scalarmult_ed25519(n: bytes, p: bytes) -> bytes: + """ + Computes and returns the scalar product of a *clamped* integer ``n`` + and the given group element on the edwards25519 curve. + The scalar is clamped, as done in the public key generation case, + by setting to zero the bits in position [0, 1, 2, 255] and setting + to one the bit in position 254. + + :param n: a :py:data:`.crypto_scalarmult_ed25519_SCALARBYTES` long bytes + sequence representing a scalar + :type n: bytes + :param p: a :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :return: a point on the edwards25519 curve, represented as a + :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_scalarmult_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(n, bytes) + and len(n) == crypto_scalarmult_ed25519_SCALARBYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(p, bytes) and len(p) == crypto_scalarmult_ed25519_BYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + q = ffi.new("unsigned char[]", crypto_scalarmult_ed25519_BYTES) + + rc = lib.crypto_scalarmult_ed25519(q, n, p) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_ed25519_BYTES)[:] + + +def crypto_scalarmult_ed25519_noclamp(n: bytes, p: bytes) -> bytes: + """ + Computes and returns the scalar product of an integer ``n`` + and the given group element on the edwards25519 curve. The integer + ``n`` is not clamped. + + :param n: a :py:data:`.crypto_scalarmult_ed25519_SCALARBYTES` long bytes + sequence representing a scalar + :type n: bytes + :param p: a :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + representing a point on the edwards25519 curve + :type p: bytes + :return: a point on the edwards25519 curve, represented as a + :py:data:`.crypto_scalarmult_ed25519_BYTES` long bytes sequence + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_scalarmult_ed25519, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + isinstance(n, bytes) + and len(n) == crypto_scalarmult_ed25519_SCALARBYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_SCALARBYTES" + ), + raising=exc.TypeError, + ) + + ensure( + isinstance(p, bytes) and len(p) == crypto_scalarmult_ed25519_BYTES, + "Input must be a {} long bytes sequence".format( + "crypto_scalarmult_ed25519_BYTES" + ), + raising=exc.TypeError, + ) + + q = ffi.new("unsigned char[]", crypto_scalarmult_ed25519_BYTES) + + rc = lib.crypto_scalarmult_ed25519_noclamp(q, n, p) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(q, crypto_scalarmult_ed25519_BYTES)[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_secretbox.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_secretbox.py new file mode 100644 index 0000000..d1ad113 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_secretbox.py @@ -0,0 +1,159 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +crypto_secretbox_KEYBYTES: int = lib.crypto_secretbox_keybytes() +crypto_secretbox_NONCEBYTES: int = lib.crypto_secretbox_noncebytes() +crypto_secretbox_ZEROBYTES: int = lib.crypto_secretbox_zerobytes() +crypto_secretbox_BOXZEROBYTES: int = lib.crypto_secretbox_boxzerobytes() +crypto_secretbox_MACBYTES: int = lib.crypto_secretbox_macbytes() +crypto_secretbox_MESSAGEBYTES_MAX: int = ( + lib.crypto_secretbox_messagebytes_max() +) + + +def crypto_secretbox(message: bytes, nonce: bytes, key: bytes) -> bytes: + """ + Encrypts and returns the message ``message`` with the secret ``key`` and + the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param key: bytes + :rtype: bytes + """ + if len(key) != crypto_secretbox_KEYBYTES: + raise exc.ValueError("Invalid key") + + if len(nonce) != crypto_secretbox_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + padded = b"\x00" * crypto_secretbox_ZEROBYTES + message + ciphertext = ffi.new("unsigned char[]", len(padded)) + + res = lib.crypto_secretbox(ciphertext, padded, len(padded), nonce, key) + ensure(res == 0, "Encryption failed", raising=exc.CryptoError) + + ciphertext = ffi.buffer(ciphertext, len(padded)) + return ciphertext[crypto_secretbox_BOXZEROBYTES:] + + +def crypto_secretbox_open( + ciphertext: bytes, nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt and returns the encrypted message ``ciphertext`` with the secret + ``key`` and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param key: bytes + :rtype: bytes + """ + if len(key) != crypto_secretbox_KEYBYTES: + raise exc.ValueError("Invalid key") + + if len(nonce) != crypto_secretbox_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + padded = b"\x00" * crypto_secretbox_BOXZEROBYTES + ciphertext + plaintext = ffi.new("unsigned char[]", len(padded)) + + res = lib.crypto_secretbox_open(plaintext, padded, len(padded), nonce, key) + ensure( + res == 0, + "Decryption failed. Ciphertext failed verification", + raising=exc.CryptoError, + ) + + plaintext = ffi.buffer(plaintext, len(padded)) + return plaintext[crypto_secretbox_ZEROBYTES:] + + +def crypto_secretbox_easy(message: bytes, nonce: bytes, key: bytes) -> bytes: + """ + Encrypts and returns the message ``message`` with the secret ``key`` and + the nonce ``nonce``. + + :param message: bytes + :param nonce: bytes + :param key: bytes + :rtype: bytes + """ + if len(key) != crypto_secretbox_KEYBYTES: + raise exc.ValueError("Invalid key") + + if len(nonce) != crypto_secretbox_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + _mlen = len(message) + _clen = crypto_secretbox_MACBYTES + _mlen + + ciphertext = ffi.new("unsigned char[]", _clen) + + res = lib.crypto_secretbox_easy(ciphertext, message, _mlen, nonce, key) + ensure(res == 0, "Encryption failed", raising=exc.CryptoError) + + ciphertext = ffi.buffer(ciphertext, _clen) + return ciphertext[:] + + +def crypto_secretbox_open_easy( + ciphertext: bytes, nonce: bytes, key: bytes +) -> bytes: + """ + Decrypt and returns the encrypted message ``ciphertext`` with the secret + ``key`` and the nonce ``nonce``. + + :param ciphertext: bytes + :param nonce: bytes + :param key: bytes + :rtype: bytes + """ + if len(key) != crypto_secretbox_KEYBYTES: + raise exc.ValueError("Invalid key") + + if len(nonce) != crypto_secretbox_NONCEBYTES: + raise exc.ValueError("Invalid nonce") + + _clen = len(ciphertext) + + ensure( + _clen >= crypto_secretbox_MACBYTES, + "Input ciphertext must be at least {} long".format( + crypto_secretbox_MACBYTES + ), + raising=exc.TypeError, + ) + + _mlen = _clen - crypto_secretbox_MACBYTES + + plaintext = ffi.new("unsigned char[]", max(1, _mlen)) + + res = lib.crypto_secretbox_open_easy( + plaintext, ciphertext, _clen, nonce, key + ) + ensure( + res == 0, + "Decryption failed. Ciphertext failed verification", + raising=exc.CryptoError, + ) + + plaintext = ffi.buffer(plaintext, _mlen) + return plaintext[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_secretstream.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_secretstream.py new file mode 100644 index 0000000..59b074c --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_secretstream.py @@ -0,0 +1,358 @@ +# Copyright 2013-2018 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional, Tuple, Union, cast + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +crypto_secretstream_xchacha20poly1305_ABYTES: int = ( + lib.crypto_secretstream_xchacha20poly1305_abytes() +) +crypto_secretstream_xchacha20poly1305_HEADERBYTES: int = ( + lib.crypto_secretstream_xchacha20poly1305_headerbytes() +) +crypto_secretstream_xchacha20poly1305_KEYBYTES: int = ( + lib.crypto_secretstream_xchacha20poly1305_keybytes() +) +crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX: int = ( + lib.crypto_secretstream_xchacha20poly1305_messagebytes_max() +) +crypto_secretstream_xchacha20poly1305_STATEBYTES: int = ( + lib.crypto_secretstream_xchacha20poly1305_statebytes() +) + + +crypto_secretstream_xchacha20poly1305_TAG_MESSAGE: int = ( + lib.crypto_secretstream_xchacha20poly1305_tag_message() +) +crypto_secretstream_xchacha20poly1305_TAG_PUSH: int = ( + lib.crypto_secretstream_xchacha20poly1305_tag_push() +) +crypto_secretstream_xchacha20poly1305_TAG_REKEY: int = ( + lib.crypto_secretstream_xchacha20poly1305_tag_rekey() +) +crypto_secretstream_xchacha20poly1305_TAG_FINAL: int = ( + lib.crypto_secretstream_xchacha20poly1305_tag_final() +) + + +def crypto_secretstream_xchacha20poly1305_keygen() -> bytes: + """ + Generate a key for use with + :func:`.crypto_secretstream_xchacha20poly1305_init_push`. + + """ + keybuf = ffi.new( + "unsigned char[]", + crypto_secretstream_xchacha20poly1305_KEYBYTES, + ) + lib.crypto_secretstream_xchacha20poly1305_keygen(keybuf) + return ffi.buffer(keybuf)[:] + + +class crypto_secretstream_xchacha20poly1305_state: + """ + An object wrapping the crypto_secretstream_xchacha20poly1305 state. + + """ + + __slots__ = ["statebuf", "rawbuf", "tagbuf"] + + def __init__(self) -> None: + """Initialize a clean state object.""" + ByteString = Union[bytes, bytearray, memoryview] + self.statebuf: ByteString = ffi.new( + "unsigned char[]", + crypto_secretstream_xchacha20poly1305_STATEBYTES, + ) + + self.rawbuf: Optional[ByteString] = None + self.tagbuf: Optional[ByteString] = None + + +def crypto_secretstream_xchacha20poly1305_init_push( + state: crypto_secretstream_xchacha20poly1305_state, key: bytes +) -> bytes: + """ + Initialize a crypto_secretstream_xchacha20poly1305 encryption buffer. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + :param key: must be + :data:`.crypto_secretstream_xchacha20poly1305_KEYBYTES` long + :type key: bytes + :return: header + :rtype: bytes + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(key, bytes), + "Key must be a bytes sequence", + raising=exc.TypeError, + ) + ensure( + len(key) == crypto_secretstream_xchacha20poly1305_KEYBYTES, + "Invalid key length", + raising=exc.ValueError, + ) + + headerbuf = ffi.new( + "unsigned char []", + crypto_secretstream_xchacha20poly1305_HEADERBYTES, + ) + + rc = lib.crypto_secretstream_xchacha20poly1305_init_push( + state.statebuf, headerbuf, key + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + return ffi.buffer(headerbuf)[:] + + +def crypto_secretstream_xchacha20poly1305_push( + state: crypto_secretstream_xchacha20poly1305_state, + m: bytes, + ad: Optional[bytes] = None, + tag: int = crypto_secretstream_xchacha20poly1305_TAG_MESSAGE, +) -> bytes: + """ + Add an encrypted message to the secret stream. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + :param m: the message to encrypt, the maximum length of an individual + message is + :data:`.crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX`. + :type m: bytes + :param ad: additional data to include in the authentication tag + :type ad: bytes or None + :param tag: the message tag, usually + :data:`.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE` or + :data:`.crypto_secretstream_xchacha20poly1305_TAG_FINAL`. + :type tag: int + :return: ciphertext + :rtype: bytes + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + ensure(isinstance(m, bytes), "Message is not bytes", raising=exc.TypeError) + ensure( + len(m) <= crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX, + "Message is too long", + raising=exc.ValueError, + ) + ensure( + ad is None or isinstance(ad, bytes), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + clen = len(m) + crypto_secretstream_xchacha20poly1305_ABYTES + if state.rawbuf is None or len(state.rawbuf) < clen: + state.rawbuf = ffi.new("unsigned char[]", clen) + + if ad is None: + ad = ffi.NULL + adlen = 0 + else: + adlen = len(ad) + + rc = lib.crypto_secretstream_xchacha20poly1305_push( + state.statebuf, + state.rawbuf, + ffi.NULL, + m, + len(m), + ad, + adlen, + tag, + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + return ffi.buffer(state.rawbuf, clen)[:] + + +def crypto_secretstream_xchacha20poly1305_init_pull( + state: crypto_secretstream_xchacha20poly1305_state, + header: bytes, + key: bytes, +) -> None: + """ + Initialize a crypto_secretstream_xchacha20poly1305 decryption buffer. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + :param header: must be + :data:`.crypto_secretstream_xchacha20poly1305_HEADERBYTES` long + :type header: bytes + :param key: must be + :data:`.crypto_secretstream_xchacha20poly1305_KEYBYTES` long + :type key: bytes + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(header, bytes), + "Header must be a bytes sequence", + raising=exc.TypeError, + ) + ensure( + len(header) == crypto_secretstream_xchacha20poly1305_HEADERBYTES, + "Invalid header length", + raising=exc.ValueError, + ) + ensure( + isinstance(key, bytes), + "Key must be a bytes sequence", + raising=exc.TypeError, + ) + ensure( + len(key) == crypto_secretstream_xchacha20poly1305_KEYBYTES, + "Invalid key length", + raising=exc.ValueError, + ) + + if state.tagbuf is None: + state.tagbuf = ffi.new("unsigned char *") + + rc = lib.crypto_secretstream_xchacha20poly1305_init_pull( + state.statebuf, header, key + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + +def crypto_secretstream_xchacha20poly1305_pull( + state: crypto_secretstream_xchacha20poly1305_state, + c: bytes, + ad: Optional[bytes] = None, +) -> Tuple[bytes, int]: + """ + Read a decrypted message from the secret stream. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + :param c: the ciphertext to decrypt, the maximum length of an individual + ciphertext is + :data:`.crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX` + + :data:`.crypto_secretstream_xchacha20poly1305_ABYTES`. + :type c: bytes + :param ad: additional data to include in the authentication tag + :type ad: bytes or None + :return: (message, tag) + :rtype: (bytes, int) + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + ensure( + state.tagbuf is not None, + ( + "State must be initialized using " + "crypto_secretstream_xchacha20poly1305_init_pull" + ), + raising=exc.ValueError, + ) + ensure( + isinstance(c, bytes), + "Ciphertext is not bytes", + raising=exc.TypeError, + ) + ensure( + len(c) >= crypto_secretstream_xchacha20poly1305_ABYTES, + "Ciphertext is too short", + raising=exc.ValueError, + ) + ensure( + len(c) + <= ( + crypto_secretstream_xchacha20poly1305_MESSAGEBYTES_MAX + + crypto_secretstream_xchacha20poly1305_ABYTES + ), + "Ciphertext is too long", + raising=exc.ValueError, + ) + ensure( + ad is None or isinstance(ad, bytes), + "Additional data must be bytes or None", + raising=exc.TypeError, + ) + + mlen = len(c) - crypto_secretstream_xchacha20poly1305_ABYTES + if state.rawbuf is None or len(state.rawbuf) < mlen: + state.rawbuf = ffi.new("unsigned char[]", mlen) + + if ad is None: + ad = ffi.NULL + adlen = 0 + else: + adlen = len(ad) + + rc = lib.crypto_secretstream_xchacha20poly1305_pull( + state.statebuf, + state.rawbuf, + ffi.NULL, + state.tagbuf, + c, + len(c), + ad, + adlen, + ) + ensure(rc == 0, "Unexpected failure", raising=exc.RuntimeError) + + # Cast safety: we `ensure` above that `state.tagbuf is not None`. + return ( + ffi.buffer(state.rawbuf, mlen)[:], + int(cast(bytes, state.tagbuf)[0]), + ) + + +def crypto_secretstream_xchacha20poly1305_rekey( + state: crypto_secretstream_xchacha20poly1305_state, +) -> None: + """ + Explicitly change the encryption key in the stream. + + Normally the stream is re-keyed as needed or an explicit ``tag`` of + :data:`.crypto_secretstream_xchacha20poly1305_TAG_REKEY` is added to a + message to ensure forward secrecy, but this method can be used instead + if the re-keying is controlled without adding the tag. + + :param state: a secretstream state object + :type state: crypto_secretstream_xchacha20poly1305_state + + """ + ensure( + isinstance(state, crypto_secretstream_xchacha20poly1305_state), + "State must be a crypto_secretstream_xchacha20poly1305_state object", + raising=exc.TypeError, + ) + lib.crypto_secretstream_xchacha20poly1305_rekey(state.statebuf) diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_shorthash.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_shorthash.py new file mode 100644 index 0000000..8f7d209 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_shorthash.py @@ -0,0 +1,81 @@ +# Copyright 2016 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import nacl.exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +has_crypto_shorthash_siphashx24 = bool( + lib.PYNACL_HAS_CRYPTO_SHORTHASH_SIPHASHX24 +) + +BYTES: int = lib.crypto_shorthash_siphash24_bytes() +KEYBYTES: int = lib.crypto_shorthash_siphash24_keybytes() + +XBYTES = 0 +XKEYBYTES = 0 + +if has_crypto_shorthash_siphashx24: + XBYTES = lib.crypto_shorthash_siphashx24_bytes() + XKEYBYTES = lib.crypto_shorthash_siphashx24_keybytes() + + +def crypto_shorthash_siphash24(data: bytes, key: bytes) -> bytes: + """Compute a fast, cryptographic quality, keyed hash of the input data + + :param data: + :type data: bytes + :param key: len(key) must be equal to + :py:data:`.KEYBYTES` (16) + :type key: bytes + """ + if len(key) != KEYBYTES: + raise exc.ValueError( + "Key length must be exactly {} bytes".format(KEYBYTES) + ) + digest = ffi.new("unsigned char[]", BYTES) + rc = lib.crypto_shorthash_siphash24(digest, data, len(data), key) + + ensure(rc == 0, raising=exc.RuntimeError) + return ffi.buffer(digest, BYTES)[:] + + +def crypto_shorthash_siphashx24(data: bytes, key: bytes) -> bytes: + """Compute a fast, cryptographic quality, keyed hash of the input data + + :param data: + :type data: bytes + :param key: len(key) must be equal to + :py:data:`.XKEYBYTES` (16) + :type key: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + """ + ensure( + has_crypto_shorthash_siphashx24, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + if len(key) != XKEYBYTES: + raise exc.ValueError( + "Key length must be exactly {} bytes".format(XKEYBYTES) + ) + digest = ffi.new("unsigned char[]", XBYTES) + rc = lib.crypto_shorthash_siphashx24(digest, data, len(data), key) + + ensure(rc == 0, raising=exc.RuntimeError) + return ffi.buffer(digest, XBYTES)[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/crypto_sign.py b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_sign.py new file mode 100644 index 0000000..f459f6a --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/crypto_sign.py @@ -0,0 +1,327 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Tuple + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +crypto_sign_BYTES: int = lib.crypto_sign_bytes() +# crypto_sign_SEEDBYTES = lib.crypto_sign_seedbytes() +crypto_sign_SEEDBYTES: int = lib.crypto_sign_secretkeybytes() // 2 +crypto_sign_PUBLICKEYBYTES: int = lib.crypto_sign_publickeybytes() +crypto_sign_SECRETKEYBYTES: int = lib.crypto_sign_secretkeybytes() + +crypto_sign_curve25519_BYTES: int = lib.crypto_box_secretkeybytes() + +crypto_sign_ed25519ph_STATEBYTES: int = lib.crypto_sign_ed25519ph_statebytes() + + +def crypto_sign_keypair() -> Tuple[bytes, bytes]: + """ + Returns a randomly generated public key and secret key. + + :rtype: (bytes(public_key), bytes(secret_key)) + """ + pk = ffi.new("unsigned char[]", crypto_sign_PUBLICKEYBYTES) + sk = ffi.new("unsigned char[]", crypto_sign_SECRETKEYBYTES) + + rc = lib.crypto_sign_keypair(pk, sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ( + ffi.buffer(pk, crypto_sign_PUBLICKEYBYTES)[:], + ffi.buffer(sk, crypto_sign_SECRETKEYBYTES)[:], + ) + + +def crypto_sign_seed_keypair(seed: bytes) -> Tuple[bytes, bytes]: + """ + Computes and returns the public key and secret key using the seed ``seed``. + + :param seed: bytes + :rtype: (bytes(public_key), bytes(secret_key)) + """ + if len(seed) != crypto_sign_SEEDBYTES: + raise exc.ValueError("Invalid seed") + + pk = ffi.new("unsigned char[]", crypto_sign_PUBLICKEYBYTES) + sk = ffi.new("unsigned char[]", crypto_sign_SECRETKEYBYTES) + + rc = lib.crypto_sign_seed_keypair(pk, sk, seed) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ( + ffi.buffer(pk, crypto_sign_PUBLICKEYBYTES)[:], + ffi.buffer(sk, crypto_sign_SECRETKEYBYTES)[:], + ) + + +def crypto_sign(message: bytes, sk: bytes) -> bytes: + """ + Signs the message ``message`` using the secret key ``sk`` and returns the + signed message. + + :param message: bytes + :param sk: bytes + :rtype: bytes + """ + signed = ffi.new("unsigned char[]", len(message) + crypto_sign_BYTES) + signed_len = ffi.new("unsigned long long *") + + rc = lib.crypto_sign(signed, signed_len, message, len(message), sk) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(signed, signed_len[0])[:] + + +def crypto_sign_open(signed: bytes, pk: bytes) -> bytes: + """ + Verifies the signature of the signed message ``signed`` using the public + key ``pk`` and returns the unsigned message. + + :param signed: bytes + :param pk: bytes + :rtype: bytes + """ + message = ffi.new("unsigned char[]", len(signed)) + message_len = ffi.new("unsigned long long *") + + if ( + lib.crypto_sign_open(message, message_len, signed, len(signed), pk) + != 0 + ): + raise exc.BadSignatureError("Signature was forged or corrupt") + + return ffi.buffer(message, message_len[0])[:] + + +def crypto_sign_ed25519_pk_to_curve25519(public_key_bytes: bytes) -> bytes: + """ + Converts a public Ed25519 key (encoded as bytes ``public_key_bytes``) to + a public Curve25519 key as bytes. + + Raises a ValueError if ``public_key_bytes`` is not of length + ``crypto_sign_PUBLICKEYBYTES`` + + :param public_key_bytes: bytes + :rtype: bytes + """ + if len(public_key_bytes) != crypto_sign_PUBLICKEYBYTES: + raise exc.ValueError("Invalid curve public key") + + curve_public_key_len = crypto_sign_curve25519_BYTES + curve_public_key = ffi.new("unsigned char[]", curve_public_key_len) + + rc = lib.crypto_sign_ed25519_pk_to_curve25519( + curve_public_key, public_key_bytes + ) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(curve_public_key, curve_public_key_len)[:] + + +def crypto_sign_ed25519_sk_to_curve25519(secret_key_bytes: bytes) -> bytes: + """ + Converts a secret Ed25519 key (encoded as bytes ``secret_key_bytes``) to + a secret Curve25519 key as bytes. + + Raises a ValueError if ``secret_key_bytes``is not of length + ``crypto_sign_SECRETKEYBYTES`` + + :param secret_key_bytes: bytes + :rtype: bytes + """ + if len(secret_key_bytes) != crypto_sign_SECRETKEYBYTES: + raise exc.ValueError("Invalid curve secret key") + + curve_secret_key_len = crypto_sign_curve25519_BYTES + curve_secret_key = ffi.new("unsigned char[]", curve_secret_key_len) + + rc = lib.crypto_sign_ed25519_sk_to_curve25519( + curve_secret_key, secret_key_bytes + ) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(curve_secret_key, curve_secret_key_len)[:] + + +def crypto_sign_ed25519_sk_to_pk(secret_key_bytes: bytes) -> bytes: + """ + Extract the public Ed25519 key from a secret Ed25519 key (encoded + as bytes ``secret_key_bytes``). + + Raises a ValueError if ``secret_key_bytes``is not of length + ``crypto_sign_SECRETKEYBYTES`` + + :param secret_key_bytes: bytes + :rtype: bytes + """ + if len(secret_key_bytes) != crypto_sign_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + return secret_key_bytes[crypto_sign_SEEDBYTES:] + + +def crypto_sign_ed25519_sk_to_seed(secret_key_bytes: bytes) -> bytes: + """ + Extract the seed from a secret Ed25519 key (encoded + as bytes ``secret_key_bytes``). + + Raises a ValueError if ``secret_key_bytes``is not of length + ``crypto_sign_SECRETKEYBYTES`` + + :param secret_key_bytes: bytes + :rtype: bytes + """ + if len(secret_key_bytes) != crypto_sign_SECRETKEYBYTES: + raise exc.ValueError("Invalid secret key") + + return secret_key_bytes[:crypto_sign_SEEDBYTES] + + +class crypto_sign_ed25519ph_state: + """ + State object wrapping the sha-512 state used in ed25519ph computation + """ + + __slots__ = ["state"] + + def __init__(self) -> None: + self.state: bytes = ffi.new( + "unsigned char[]", crypto_sign_ed25519ph_STATEBYTES + ) + + rc = lib.crypto_sign_ed25519ph_init(self.state) + + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + +def crypto_sign_ed25519ph_update( + edph: crypto_sign_ed25519ph_state, pmsg: bytes +) -> None: + """ + Update the hash state wrapped in edph + + :param edph: the ed25519ph state being updated + :type edph: crypto_sign_ed25519ph_state + :param pmsg: the partial message + :type pmsg: bytes + :rtype: None + """ + ensure( + isinstance(edph, crypto_sign_ed25519ph_state), + "edph parameter must be a ed25519ph_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(pmsg, bytes), + "pmsg parameter must be a bytes object", + raising=exc.TypeError, + ) + rc = lib.crypto_sign_ed25519ph_update(edph.state, pmsg, len(pmsg)) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + +def crypto_sign_ed25519ph_final_create( + edph: crypto_sign_ed25519ph_state, sk: bytes +) -> bytes: + """ + Create a signature for the data hashed in edph + using the secret key sk + + :param edph: the ed25519ph state for the data + being signed + :type edph: crypto_sign_ed25519ph_state + :param sk: the ed25519 secret key (secret and public part) + :type sk: bytes + :return: ed25519ph signature + :rtype: bytes + """ + ensure( + isinstance(edph, crypto_sign_ed25519ph_state), + "edph parameter must be a ed25519ph_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(sk, bytes), + "secret key parameter must be a bytes object", + raising=exc.TypeError, + ) + ensure( + len(sk) == crypto_sign_SECRETKEYBYTES, + ("secret key must be {} bytes long").format( + crypto_sign_SECRETKEYBYTES + ), + raising=exc.TypeError, + ) + signature = ffi.new("unsigned char[]", crypto_sign_BYTES) + rc = lib.crypto_sign_ed25519ph_final_create( + edph.state, signature, ffi.NULL, sk + ) + ensure(rc == 0, "Unexpected library error", raising=exc.RuntimeError) + + return ffi.buffer(signature, crypto_sign_BYTES)[:] + + +def crypto_sign_ed25519ph_final_verify( + edph: crypto_sign_ed25519ph_state, signature: bytes, pk: bytes +) -> bool: + """ + Verify a prehashed signature using the public key pk + + :param edph: the ed25519ph state for the data + being verified + :type edph: crypto_sign_ed25519ph_state + :param signature: the signature being verified + :type signature: bytes + :param pk: the ed25519 public part of the signing key + :type pk: bytes + :return: True if the signature is valid + :rtype: boolean + :raises exc.BadSignatureError: if the signature is not valid + """ + ensure( + isinstance(edph, crypto_sign_ed25519ph_state), + "edph parameter must be a ed25519ph_state object", + raising=exc.TypeError, + ) + ensure( + isinstance(signature, bytes), + "signature parameter must be a bytes object", + raising=exc.TypeError, + ) + ensure( + len(signature) == crypto_sign_BYTES, + ("signature must be {} bytes long").format(crypto_sign_BYTES), + raising=exc.TypeError, + ) + ensure( + isinstance(pk, bytes), + "public key parameter must be a bytes object", + raising=exc.TypeError, + ) + ensure( + len(pk) == crypto_sign_PUBLICKEYBYTES, + ("public key must be {} bytes long").format( + crypto_sign_PUBLICKEYBYTES + ), + raising=exc.TypeError, + ) + rc = lib.crypto_sign_ed25519ph_final_verify(edph.state, signature, pk) + if rc != 0: + raise exc.BadSignatureError("Signature was forged or corrupt") + + return True diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/randombytes.py b/venv/lib/python3.12/site-packages/nacl/bindings/randombytes.py new file mode 100644 index 0000000..ed76deb --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/randombytes.py @@ -0,0 +1,51 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib + +randombytes_SEEDBYTES: int = lib.randombytes_seedbytes() + + +def randombytes(size: int) -> bytes: + """ + Returns ``size`` number of random bytes from a cryptographically secure + random source. + + :param size: int + :rtype: bytes + """ + buf = ffi.new("unsigned char[]", size) + lib.randombytes(buf, size) + return ffi.buffer(buf, size)[:] + + +def randombytes_buf_deterministic(size: int, seed: bytes) -> bytes: + """ + Returns ``size`` number of deterministically generated pseudorandom bytes + from a seed + + :param size: int + :param seed: bytes + :rtype: bytes + """ + if len(seed) != randombytes_SEEDBYTES: + raise exc.TypeError( + "Deterministic random bytes must be generated from 32 bytes" + ) + + buf = ffi.new("unsigned char[]", size) + lib.randombytes_buf_deterministic(buf, size, seed) + return ffi.buffer(buf, size)[:] diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/sodium_core.py b/venv/lib/python3.12/site-packages/nacl/bindings/sodium_core.py new file mode 100644 index 0000000..7ebb84c --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/sodium_core.py @@ -0,0 +1,33 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from nacl import exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +def _sodium_init() -> None: + ensure( + lib.sodium_init() != -1, + "Could not initialize sodium", + raising=exc.RuntimeError, + ) + + +def sodium_init() -> None: + """ + Initializes sodium, picking the best implementations available for this + machine. + """ + ffi.init_once(_sodium_init, "libsodium") diff --git a/venv/lib/python3.12/site-packages/nacl/bindings/utils.py b/venv/lib/python3.12/site-packages/nacl/bindings/utils.py new file mode 100644 index 0000000..0ff22e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/bindings/utils.py @@ -0,0 +1,141 @@ +# Copyright 2013-2017 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import nacl.exceptions as exc +from nacl._sodium import ffi, lib +from nacl.exceptions import ensure + + +def sodium_memcmp(inp1: bytes, inp2: bytes) -> bool: + """ + Compare contents of two memory regions in constant time + """ + ensure(isinstance(inp1, bytes), raising=exc.TypeError) + ensure(isinstance(inp2, bytes), raising=exc.TypeError) + + ln = max(len(inp1), len(inp2)) + + buf1 = ffi.new("char []", ln) + buf2 = ffi.new("char []", ln) + + ffi.memmove(buf1, inp1, len(inp1)) + ffi.memmove(buf2, inp2, len(inp2)) + + eqL = len(inp1) == len(inp2) + eqC = lib.sodium_memcmp(buf1, buf2, ln) == 0 + + return eqL and eqC + + +def sodium_pad(s: bytes, blocksize: int) -> bytes: + """ + Pad the input bytearray ``s`` to a multiple of ``blocksize`` + using the ISO/IEC 7816-4 algorithm + + :param s: input bytes string + :type s: bytes + :param blocksize: + :type blocksize: int + :return: padded string + :rtype: bytes + """ + ensure(isinstance(s, bytes), raising=exc.TypeError) + ensure(isinstance(blocksize, int), raising=exc.TypeError) + if blocksize <= 0: + raise exc.ValueError + s_len = len(s) + m_len = s_len + blocksize + buf = ffi.new("unsigned char []", m_len) + p_len = ffi.new("size_t []", 1) + ffi.memmove(buf, s, s_len) + rc = lib.sodium_pad(p_len, buf, s_len, blocksize, m_len) + ensure(rc == 0, "Padding failure", raising=exc.CryptoError) + return ffi.buffer(buf, p_len[0])[:] + + +def sodium_unpad(s: bytes, blocksize: int) -> bytes: + """ + Remove ISO/IEC 7816-4 padding from the input byte array ``s`` + + :param s: input bytes string + :type s: bytes + :param blocksize: + :type blocksize: int + :return: unpadded string + :rtype: bytes + """ + ensure(isinstance(s, bytes), raising=exc.TypeError) + ensure(isinstance(blocksize, int), raising=exc.TypeError) + s_len = len(s) + u_len = ffi.new("size_t []", 1) + rc = lib.sodium_unpad(u_len, s, s_len, blocksize) + if rc != 0: + raise exc.CryptoError("Unpadding failure") + return s[: u_len[0]] + + +def sodium_increment(inp: bytes) -> bytes: + """ + Increment the value of a byte-sequence interpreted + as the little-endian representation of a unsigned big integer. + + :param inp: input bytes buffer + :type inp: bytes + :return: a byte-sequence representing, as a little-endian + unsigned big integer, the value ``to_int(inp)`` + incremented by one. + :rtype: bytes + + """ + ensure(isinstance(inp, bytes), raising=exc.TypeError) + + ln = len(inp) + buf = ffi.new("unsigned char []", ln) + + ffi.memmove(buf, inp, ln) + + lib.sodium_increment(buf, ln) + + return ffi.buffer(buf, ln)[:] + + +def sodium_add(a: bytes, b: bytes) -> bytes: + """ + Given a couple of *same-sized* byte sequences, interpreted as the + little-endian representation of two unsigned integers, compute + the modular addition of the represented values, in constant time for + a given common length of the byte sequences. + + :param a: input bytes buffer + :type a: bytes + :param b: input bytes buffer + :type b: bytes + :return: a byte-sequence representing, as a little-endian big integer, + the integer value of ``(to_int(a) + to_int(b)) mod 2^(8*len(a))`` + :rtype: bytes + """ + ensure(isinstance(a, bytes), raising=exc.TypeError) + ensure(isinstance(b, bytes), raising=exc.TypeError) + ln = len(a) + ensure(len(b) == ln, raising=exc.TypeError) + + buf_a = ffi.new("unsigned char []", ln) + buf_b = ffi.new("unsigned char []", ln) + + ffi.memmove(buf_a, a, ln) + ffi.memmove(buf_b, b, ln) + + lib.sodium_add(buf_a, buf_b, ln) + + return ffi.buffer(buf_a, ln)[:] diff --git a/venv/lib/python3.12/site-packages/nacl/encoding.py b/venv/lib/python3.12/site-packages/nacl/encoding.py new file mode 100644 index 0000000..6740cfb --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/encoding.py @@ -0,0 +1,105 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import base64 +import binascii +from abc import ABCMeta, abstractmethod +from typing import SupportsBytes, Type + + +# TODO: when the minimum supported version of Python is 3.8, we can import +# Protocol from typing, and replace Encoder with a Protocol instead. +class _Encoder(metaclass=ABCMeta): + @staticmethod + @abstractmethod + def encode(data: bytes) -> bytes: + """Transform raw data to encoded data.""" + + @staticmethod + @abstractmethod + def decode(data: bytes) -> bytes: + """Transform encoded data back to raw data. + + Decoding after encoding should be a no-op, i.e. `decode(encode(x)) == x`. + """ + + +# Functions that use encoders are passed a subclass of _Encoder, not an instance +# (because the methods are all static). Let's gloss over that detail by defining +# an alias for Type[_Encoder]. +Encoder = Type[_Encoder] + + +class RawEncoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return data + + @staticmethod + def decode(data: bytes) -> bytes: + return data + + +class HexEncoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return binascii.hexlify(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return binascii.unhexlify(data) + + +class Base16Encoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return base64.b16encode(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return base64.b16decode(data) + + +class Base32Encoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return base64.b32encode(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return base64.b32decode(data) + + +class Base64Encoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return base64.b64encode(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return base64.b64decode(data) + + +class URLSafeBase64Encoder(_Encoder): + @staticmethod + def encode(data: bytes) -> bytes: + return base64.urlsafe_b64encode(data) + + @staticmethod + def decode(data: bytes) -> bytes: + return base64.urlsafe_b64decode(data) + + +class Encodable: + def encode(self: SupportsBytes, encoder: Encoder = RawEncoder) -> bytes: + return encoder.encode(bytes(self)) diff --git a/venv/lib/python3.12/site-packages/nacl/exceptions.py b/venv/lib/python3.12/site-packages/nacl/exceptions.py new file mode 100644 index 0000000..40b1635 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/exceptions.py @@ -0,0 +1,88 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# We create a clone of various builtin Exception types which additionally +# inherit from CryptoError. Below, we refer to the parent types via the +# `builtins` namespace, so mypy can distinguish between (e.g.) +# `nacl.exceptions.RuntimeError` and `builtins.RuntimeError`. +import builtins +from typing import Type + + +class CryptoError(Exception): + """ + Base exception for all nacl related errors + """ + + +class BadSignatureError(CryptoError): + """ + Raised when the signature was forged or otherwise corrupt. + """ + + +class RuntimeError(builtins.RuntimeError, CryptoError): + pass + + +class AssertionError(builtins.AssertionError, CryptoError): + pass + + +class TypeError(builtins.TypeError, CryptoError): + pass + + +class ValueError(builtins.ValueError, CryptoError): + pass + + +class InvalidkeyError(CryptoError): + pass + + +class CryptPrefixError(InvalidkeyError): + pass + + +class UnavailableError(RuntimeError): + """ + is a subclass of :class:`~nacl.exceptions.RuntimeError`, raised when + trying to call functions not available in a minimal build of + libsodium or due to hardware limitations. + """ + + pass + + +def ensure(cond: bool, *args: object, **kwds: Type[Exception]) -> None: + """ + Return if a condition is true, otherwise raise a caller-configurable + :py:class:`Exception` + :param bool cond: the condition to be checked + :param sequence args: the arguments to be passed to the exception's + constructor + The only accepted named parameter is `raising` used to configure the + exception to be raised if `cond` is not `True` + """ + _CHK_UNEXP = "check_condition() got an unexpected keyword argument {0}" + + raising = kwds.pop("raising", AssertionError) + if kwds: + raise TypeError(_CHK_UNEXP.format(repr(kwds.popitem()[0]))) + + if cond is True: + return + raise raising(*args) diff --git a/venv/lib/python3.12/site-packages/nacl/hash.py b/venv/lib/python3.12/site-packages/nacl/hash.py new file mode 100644 index 0000000..9f81590 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/hash.py @@ -0,0 +1,181 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +The :mod:`nacl.hash` module exposes one-shot interfaces +for libsodium selected hash primitives and the constants needed +for their usage. +""" + +import nacl.bindings +import nacl.encoding + + +BLAKE2B_BYTES = nacl.bindings.crypto_generichash_BYTES +"""Default digest size for :func:`blake2b` hash""" +BLAKE2B_BYTES_MIN = nacl.bindings.crypto_generichash_BYTES_MIN +"""Minimum allowed digest size for :func:`blake2b` hash""" +BLAKE2B_BYTES_MAX = nacl.bindings.crypto_generichash_BYTES_MAX +"""Maximum allowed digest size for :func:`blake2b` hash""" +BLAKE2B_KEYBYTES = nacl.bindings.crypto_generichash_KEYBYTES +"""Default size of the ``key`` byte array for :func:`blake2b` hash""" +BLAKE2B_KEYBYTES_MIN = nacl.bindings.crypto_generichash_KEYBYTES_MIN +"""Minimum allowed size of the ``key`` byte array for :func:`blake2b` hash""" +BLAKE2B_KEYBYTES_MAX = nacl.bindings.crypto_generichash_KEYBYTES_MAX +"""Maximum allowed size of the ``key`` byte array for :func:`blake2b` hash""" +BLAKE2B_SALTBYTES = nacl.bindings.crypto_generichash_SALTBYTES +"""Maximum allowed length of the ``salt`` byte array for +:func:`blake2b` hash""" +BLAKE2B_PERSONALBYTES = nacl.bindings.crypto_generichash_PERSONALBYTES +"""Maximum allowed length of the ``personalization`` +byte array for :func:`blake2b` hash""" + +SIPHASH_BYTES = nacl.bindings.crypto_shorthash_siphash24_BYTES +"""Size of the :func:`siphash24` digest""" +SIPHASH_KEYBYTES = nacl.bindings.crypto_shorthash_siphash24_KEYBYTES +"""Size of the secret ``key`` used by the :func:`siphash24` MAC""" + +SIPHASHX_AVAILABLE = nacl.bindings.has_crypto_shorthash_siphashx24 +"""``True`` if :func:`siphashx24` is available to be called""" + +SIPHASHX_BYTES = nacl.bindings.crypto_shorthash_siphashx24_BYTES +"""Size of the :func:`siphashx24` digest""" +SIPHASHX_KEYBYTES = nacl.bindings.crypto_shorthash_siphashx24_KEYBYTES +"""Size of the secret ``key`` used by the :func:`siphashx24` MAC""" + +_b2b_hash = nacl.bindings.crypto_generichash_blake2b_salt_personal +_sip_hash = nacl.bindings.crypto_shorthash_siphash24 +_sip_hashx = nacl.bindings.crypto_shorthash_siphashx24 + + +def sha256( + message: bytes, encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder +) -> bytes: + """ + Hashes ``message`` with SHA256. + + :param message: The message to hash. + :type message: bytes + :param encoder: A class that is able to encode the hashed message. + :returns: The hashed message. + :rtype: bytes + """ + return encoder.encode(nacl.bindings.crypto_hash_sha256(message)) + + +def sha512( + message: bytes, encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder +) -> bytes: + """ + Hashes ``message`` with SHA512. + + :param message: The message to hash. + :type message: bytes + :param encoder: A class that is able to encode the hashed message. + :returns: The hashed message. + :rtype: bytes + """ + return encoder.encode(nacl.bindings.crypto_hash_sha512(message)) + + +def blake2b( + data: bytes, + digest_size: int = BLAKE2B_BYTES, + key: bytes = b"", + salt: bytes = b"", + person: bytes = b"", + encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder, +) -> bytes: + """ + Hashes ``data`` with blake2b. + + :param data: the digest input byte sequence + :type data: bytes + :param digest_size: the requested digest size; must be at most + :const:`BLAKE2B_BYTES_MAX`; + the default digest size is + :const:`BLAKE2B_BYTES` + :type digest_size: int + :param key: the key to be set for keyed MAC/PRF usage; if set, the key + must be at most :data:`~nacl.hash.BLAKE2B_KEYBYTES_MAX` long + :type key: bytes + :param salt: an initialization salt at most + :const:`BLAKE2B_SALTBYTES` long; + it will be zero-padded if needed + :type salt: bytes + :param person: a personalization string at most + :const:`BLAKE2B_PERSONALBYTES` long; + it will be zero-padded if needed + :type person: bytes + :param encoder: the encoder to use on returned digest + :type encoder: class + :returns: The hashed message. + :rtype: bytes + """ + + digest = _b2b_hash( + data, digest_size=digest_size, key=key, salt=salt, person=person + ) + return encoder.encode(digest) + + +generichash = blake2b + + +def siphash24( + message: bytes, + key: bytes = b"", + encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder, +) -> bytes: + """ + Computes a keyed MAC of ``message`` using the short-input-optimized + siphash-2-4 construction. + + :param message: The message to hash. + :type message: bytes + :param key: the message authentication key for the siphash MAC construct + :type key: bytes(:const:`SIPHASH_KEYBYTES`) + :param encoder: A class that is able to encode the hashed message. + :returns: The hashed message. + :rtype: bytes(:const:`SIPHASH_BYTES`) + """ + digest = _sip_hash(message, key) + return encoder.encode(digest) + + +shorthash = siphash24 + + +def siphashx24( + message: bytes, + key: bytes = b"", + encoder: nacl.encoding.Encoder = nacl.encoding.HexEncoder, +) -> bytes: + """ + Computes a keyed MAC of ``message`` using the 128 bit variant of the + siphash-2-4 construction. + + :param message: The message to hash. + :type message: bytes + :param key: the message authentication key for the siphash MAC construct + :type key: bytes(:const:`SIPHASHX_KEYBYTES`) + :param encoder: A class that is able to encode the hashed message. + :returns: The hashed message. + :rtype: bytes(:const:`SIPHASHX_BYTES`) + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + .. versionadded:: 1.2 + """ + digest = _sip_hashx(message, key) + return encoder.encode(digest) diff --git a/venv/lib/python3.12/site-packages/nacl/hashlib.py b/venv/lib/python3.12/site-packages/nacl/hashlib.py new file mode 100644 index 0000000..9d289da --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/hashlib.py @@ -0,0 +1,143 @@ +# Copyright 2016-2019 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import binascii +from typing import NoReturn + +import nacl.bindings +from nacl.utils import bytes_as_string + +BYTES = nacl.bindings.crypto_generichash_BYTES +BYTES_MIN = nacl.bindings.crypto_generichash_BYTES_MIN +BYTES_MAX = nacl.bindings.crypto_generichash_BYTES_MAX +KEYBYTES = nacl.bindings.crypto_generichash_KEYBYTES +KEYBYTES_MIN = nacl.bindings.crypto_generichash_KEYBYTES_MIN +KEYBYTES_MAX = nacl.bindings.crypto_generichash_KEYBYTES_MAX +SALTBYTES = nacl.bindings.crypto_generichash_SALTBYTES +PERSONALBYTES = nacl.bindings.crypto_generichash_PERSONALBYTES + +SCRYPT_AVAILABLE = nacl.bindings.has_crypto_pwhash_scryptsalsa208sha256 + +_b2b_init = nacl.bindings.crypto_generichash_blake2b_init +_b2b_final = nacl.bindings.crypto_generichash_blake2b_final +_b2b_update = nacl.bindings.crypto_generichash_blake2b_update + + +class blake2b: + """ + :py:mod:`hashlib` API compatible blake2b algorithm implementation + """ + + MAX_DIGEST_SIZE = BYTES + MAX_KEY_SIZE = KEYBYTES_MAX + PERSON_SIZE = PERSONALBYTES + SALT_SIZE = SALTBYTES + + def __init__( + self, + data: bytes = b"", + digest_size: int = BYTES, + key: bytes = b"", + salt: bytes = b"", + person: bytes = b"", + ): + """ + :py:class:`.blake2b` algorithm initializer + + :param data: + :type data: bytes + :param int digest_size: the requested digest size; must be + at most :py:attr:`.MAX_DIGEST_SIZE`; + the default digest size is :py:data:`.BYTES` + :param key: the key to be set for keyed MAC/PRF usage; if set, + the key must be at most :py:data:`.KEYBYTES_MAX` long + :type key: bytes + :param salt: a initialization salt at most + :py:attr:`.SALT_SIZE` long; it will be zero-padded + if needed + :type salt: bytes + :param person: a personalization string at most + :py:attr:`.PERSONAL_SIZE` long; it will be zero-padded + if needed + :type person: bytes + """ + + self._state = _b2b_init( + key=key, salt=salt, person=person, digest_size=digest_size + ) + self._digest_size = digest_size + + if data: + self.update(data) + + @property + def digest_size(self) -> int: + return self._digest_size + + @property + def block_size(self) -> int: + return 128 + + @property + def name(self) -> str: + return "blake2b" + + def update(self, data: bytes) -> None: + _b2b_update(self._state, data) + + def digest(self) -> bytes: + _st = self._state.copy() + return _b2b_final(_st) + + def hexdigest(self) -> str: + return bytes_as_string(binascii.hexlify(self.digest())) + + def copy(self) -> "blake2b": + _cp = type(self)(digest_size=self.digest_size) + _st = self._state.copy() + _cp._state = _st + return _cp + + def __reduce__(self) -> NoReturn: + """ + Raise the same exception as hashlib's blake implementation + on copy.copy() + """ + raise TypeError( + "can't pickle {} objects".format(self.__class__.__name__) + ) + + +def scrypt( + password: bytes, + salt: bytes = b"", + n: int = 2**20, + r: int = 8, + p: int = 1, + maxmem: int = 2**25, + dklen: int = 64, +) -> bytes: + """ + Derive a cryptographic key using the scrypt KDF. + + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + Implements the same signature as the ``hashlib.scrypt`` implemented + in cpython version 3.6 + """ + return nacl.bindings.crypto_pwhash_scryptsalsa208sha256_ll( + password, salt, n, r, p, maxmem=maxmem, dklen=dklen + ) diff --git a/venv/lib/python3.12/site-packages/nacl/public.py b/venv/lib/python3.12/site-packages/nacl/public.py new file mode 100644 index 0000000..a6fc958 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/public.py @@ -0,0 +1,421 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import ClassVar, Generic, Optional, Type, TypeVar + +import nacl.bindings +from nacl import encoding +from nacl import exceptions as exc +from nacl.encoding import Encoder +from nacl.utils import EncryptedMessage, StringFixer, random + + +class PublicKey(encoding.Encodable, StringFixer): + """ + The public key counterpart to an Curve25519 :class:`nacl.public.PrivateKey` + for encrypting messages. + + :param public_key: [:class:`bytes`] Encoded Curve25519 public key + :param encoder: A class that is able to decode the `public_key` + + :cvar SIZE: The size that the public key is required to be + """ + + SIZE: ClassVar[int] = nacl.bindings.crypto_box_PUBLICKEYBYTES + + def __init__( + self, + public_key: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ): + self._public_key = encoder.decode(public_key) + if not isinstance(self._public_key, bytes): + raise exc.TypeError("PublicKey must be created from 32 bytes") + + if len(self._public_key) != self.SIZE: + raise exc.ValueError( + "The public key must be exactly {} bytes long".format( + self.SIZE + ) + ) + + def __bytes__(self) -> bytes: + return self._public_key + + def __hash__(self) -> int: + return hash(bytes(self)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return False + return nacl.bindings.sodium_memcmp(bytes(self), bytes(other)) + + def __ne__(self, other: object) -> bool: + return not (self == other) + + +class PrivateKey(encoding.Encodable, StringFixer): + """ + Private key for decrypting messages using the Curve25519 algorithm. + + .. warning:: This **must** be protected and remain secret. Anyone who + knows the value of your :class:`~nacl.public.PrivateKey` can decrypt + any message encrypted by the corresponding + :class:`~nacl.public.PublicKey` + + :param private_key: The private key used to decrypt messages + :param encoder: The encoder class used to decode the given keys + + :cvar SIZE: The size that the private key is required to be + :cvar SEED_SIZE: The size that the seed used to generate the + private key is required to be + """ + + SIZE: ClassVar[int] = nacl.bindings.crypto_box_SECRETKEYBYTES + SEED_SIZE: ClassVar[int] = nacl.bindings.crypto_box_SEEDBYTES + + def __init__( + self, + private_key: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ): + # Decode the secret_key + private_key = encoder.decode(private_key) + # verify the given secret key type and size are correct + if not ( + isinstance(private_key, bytes) and len(private_key) == self.SIZE + ): + raise exc.TypeError( + ( + "PrivateKey must be created from a {} bytes long raw secret key" + ).format(self.SIZE) + ) + + raw_public_key = nacl.bindings.crypto_scalarmult_base(private_key) + + self._private_key = private_key + self.public_key = PublicKey(raw_public_key) + + @classmethod + def from_seed( + cls, + seed: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> "PrivateKey": + """ + Generate a PrivateKey using a deterministic construction + starting from a caller-provided seed + + .. warning:: The seed **must** be high-entropy; therefore, + its generator **must** be a cryptographic quality + random function like, for example, :func:`~nacl.utils.random`. + + .. warning:: The seed **must** be protected and remain secret. + Anyone who knows the seed is really in possession of + the corresponding PrivateKey. + + :param seed: The seed used to generate the private key + :rtype: :class:`~nacl.public.PrivateKey` + """ + # decode the seed + seed = encoder.decode(seed) + # Verify the given seed type and size are correct + if not (isinstance(seed, bytes) and len(seed) == cls.SEED_SIZE): + raise exc.TypeError( + ( + "PrivateKey seed must be a {} bytes long binary sequence" + ).format(cls.SEED_SIZE) + ) + # generate a raw key pair from the given seed + raw_pk, raw_sk = nacl.bindings.crypto_box_seed_keypair(seed) + # construct a instance from the raw secret key + return cls(raw_sk) + + def __bytes__(self) -> bytes: + return self._private_key + + def __hash__(self) -> int: + return hash((type(self), bytes(self.public_key))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return False + return self.public_key == other.public_key + + def __ne__(self, other: object) -> bool: + return not (self == other) + + @classmethod + def generate(cls) -> "PrivateKey": + """ + Generates a random :class:`~nacl.public.PrivateKey` object + + :rtype: :class:`~nacl.public.PrivateKey` + """ + return cls(random(PrivateKey.SIZE), encoder=encoding.RawEncoder) + + +_Box = TypeVar("_Box", bound="Box") + + +class Box(encoding.Encodable, StringFixer): + """ + The Box class boxes and unboxes messages between a pair of keys + + The ciphertexts generated by :class:`~nacl.public.Box` include a 16 + byte authenticator which is checked as part of the decryption. An invalid + authenticator will cause the decrypt function to raise an exception. The + authenticator is not a signature. Once you've decrypted the message you've + demonstrated the ability to create arbitrary valid message, so messages you + send are repudiable. For non-repudiable messages, sign them after + encryption. + + :param private_key: :class:`~nacl.public.PrivateKey` used to encrypt and + decrypt messages + :param public_key: :class:`~nacl.public.PublicKey` used to encrypt and + decrypt messages + + :cvar NONCE_SIZE: The size that the nonce is required to be. + """ + + NONCE_SIZE: ClassVar[int] = nacl.bindings.crypto_box_NONCEBYTES + _shared_key: bytes + + def __init__(self, private_key: PrivateKey, public_key: PublicKey): + if not isinstance(private_key, PrivateKey) or not isinstance( + public_key, PublicKey + ): + raise exc.TypeError( + "Box must be created from a PrivateKey and a PublicKey" + ) + self._shared_key = nacl.bindings.crypto_box_beforenm( + public_key.encode(encoder=encoding.RawEncoder), + private_key.encode(encoder=encoding.RawEncoder), + ) + + def __bytes__(self) -> bytes: + return self._shared_key + + @classmethod + def decode( + cls: Type[_Box], encoded: bytes, encoder: Encoder = encoding.RawEncoder + ) -> _Box: + """ + Alternative constructor. Creates a Box from an existing Box's shared key. + """ + # Create an empty box + box: _Box = cls.__new__(cls) + + # Assign our decoded value to the shared key of the box + box._shared_key = encoder.decode(encoded) + + return box + + def encrypt( + self, + plaintext: bytes, + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> EncryptedMessage: + """ + Encrypts the plaintext message using the given `nonce` (or generates + one randomly if omitted) and returns the ciphertext encoded with the + encoder. + + .. warning:: It is **VITALLY** important that the nonce is a nonce, + i.e. it is a number used only once for any given key. If you fail + to do this, you compromise the privacy of the messages encrypted. + + :param plaintext: [:class:`bytes`] The plaintext message to encrypt + :param nonce: [:class:`bytes`] The nonce to use in the encryption + :param encoder: The encoder to use to encode the ciphertext + :rtype: [:class:`nacl.utils.EncryptedMessage`] + """ + if nonce is None: + nonce = random(self.NONCE_SIZE) + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE + ) + + ciphertext = nacl.bindings.crypto_box_easy_afternm( + plaintext, + nonce, + self._shared_key, + ) + + encoded_nonce = encoder.encode(nonce) + encoded_ciphertext = encoder.encode(ciphertext) + + return EncryptedMessage._from_parts( + encoded_nonce, + encoded_ciphertext, + encoder.encode(nonce + ciphertext), + ) + + def decrypt( + self, + ciphertext: bytes, + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Decrypts the ciphertext using the `nonce` (explicitly, when passed as a + parameter or implicitly, when omitted, as part of the ciphertext) and + returns the plaintext message. + + :param ciphertext: [:class:`bytes`] The encrypted message to decrypt + :param nonce: [:class:`bytes`] The nonce used when encrypting the + ciphertext + :param encoder: The encoder used to decode the ciphertext. + :rtype: [:class:`bytes`] + """ + # Decode our ciphertext + ciphertext = encoder.decode(ciphertext) + + if nonce is None: + # If we were given the nonce and ciphertext combined, split them. + nonce = ciphertext[: self.NONCE_SIZE] + ciphertext = ciphertext[self.NONCE_SIZE :] + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE + ) + + plaintext = nacl.bindings.crypto_box_open_easy_afternm( + ciphertext, + nonce, + self._shared_key, + ) + + return plaintext + + def shared_key(self) -> bytes: + """ + Returns the Curve25519 shared secret, that can then be used as a key in + other symmetric ciphers. + + .. warning:: It is **VITALLY** important that you use a nonce with your + symmetric cipher. If you fail to do this, you compromise the + privacy of the messages encrypted. Ensure that the key length of + your cipher is 32 bytes. + :rtype: [:class:`bytes`] + """ + + return self._shared_key + + +_Key = TypeVar("_Key", PublicKey, PrivateKey) + + +class SealedBox(Generic[_Key], encoding.Encodable, StringFixer): + """ + The SealedBox class boxes and unboxes messages addressed to + a specified key-pair by using ephemeral sender's key pairs, + whose private part will be discarded just after encrypting + a single plaintext message. + + The ciphertexts generated by :class:`~nacl.public.SecretBox` include + the public part of the ephemeral key before the :class:`~nacl.public.Box` + ciphertext. + + :param recipient_key: a :class:`~nacl.public.PublicKey` used to encrypt + messages and derive nonces, or a :class:`~nacl.public.PrivateKey` used + to decrypt messages. + + .. versionadded:: 1.2 + """ + + _public_key: bytes + _private_key: Optional[bytes] + + def __init__(self, recipient_key: _Key): + if isinstance(recipient_key, PublicKey): + self._public_key = recipient_key.encode( + encoder=encoding.RawEncoder + ) + self._private_key = None + elif isinstance(recipient_key, PrivateKey): + self._private_key = recipient_key.encode( + encoder=encoding.RawEncoder + ) + self._public_key = recipient_key.public_key.encode( + encoder=encoding.RawEncoder + ) + else: + raise exc.TypeError( + "SealedBox must be created from a PublicKey or a PrivateKey" + ) + + def __bytes__(self) -> bytes: + return self._public_key + + def encrypt( + self, + plaintext: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Encrypts the plaintext message using a random-generated ephemeral + key pair and returns a "composed ciphertext", containing both + the public part of the key pair and the ciphertext proper, + encoded with the encoder. + + The private part of the ephemeral key-pair will be scrubbed before + returning the ciphertext, therefore, the sender will not be able to + decrypt the generated ciphertext. + + :param plaintext: [:class:`bytes`] The plaintext message to encrypt + :param encoder: The encoder to use to encode the ciphertext + :return bytes: encoded ciphertext + """ + + ciphertext = nacl.bindings.crypto_box_seal(plaintext, self._public_key) + + encoded_ciphertext = encoder.encode(ciphertext) + + return encoded_ciphertext + + def decrypt( + self: "SealedBox[PrivateKey]", + ciphertext: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Decrypts the ciphertext using the ephemeral public key enclosed + in the ciphertext and the SealedBox private key, returning + the plaintext message. + + :param ciphertext: [:class:`bytes`] The encrypted message to decrypt + :param encoder: The encoder used to decode the ciphertext. + :return bytes: The original plaintext + :raises TypeError: if this SealedBox was created with a + :class:`~nacl.public.PublicKey` rather than a + :class:`~nacl.public.PrivateKey`. + """ + # Decode our ciphertext + ciphertext = encoder.decode(ciphertext) + + if self._private_key is None: + raise TypeError( + "SealedBoxes created with a public key cannot decrypt" + ) + plaintext = nacl.bindings.crypto_box_seal_open( + ciphertext, + self._public_key, + self._private_key, + ) + + return plaintext diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/__init__.py b/venv/lib/python3.12/site-packages/nacl/pwhash/__init__.py new file mode 100644 index 0000000..ffd76a6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/pwhash/__init__.py @@ -0,0 +1,75 @@ +# Copyright 2017 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from nacl.exceptions import CryptPrefixError + +from . import _argon2, argon2i, argon2id, scrypt + +STRPREFIX = argon2id.STRPREFIX + +PWHASH_SIZE = argon2id.PWHASH_SIZE + +assert _argon2.ALG_ARGON2_DEFAULT == _argon2.ALG_ARGON2ID13 +# since version 1.0.15 of libsodium + +PASSWD_MIN = argon2id.PASSWD_MIN +PASSWD_MAX = argon2id.PASSWD_MAX +MEMLIMIT_MAX = argon2id.MEMLIMIT_MAX +MEMLIMIT_MIN = argon2id.MEMLIMIT_MIN +OPSLIMIT_MAX = argon2id.OPSLIMIT_MAX +OPSLIMIT_MIN = argon2id.OPSLIMIT_MIN +OPSLIMIT_INTERACTIVE = argon2id.OPSLIMIT_INTERACTIVE +MEMLIMIT_INTERACTIVE = argon2id.MEMLIMIT_INTERACTIVE +OPSLIMIT_MODERATE = argon2id.OPSLIMIT_MODERATE +MEMLIMIT_MODERATE = argon2id.MEMLIMIT_MODERATE +OPSLIMIT_SENSITIVE = argon2id.OPSLIMIT_SENSITIVE +MEMLIMIT_SENSITIVE = argon2id.MEMLIMIT_SENSITIVE + +str = argon2id.str + +assert argon2i.ALG != argon2id.ALG + +SCRYPT_SALTBYTES = scrypt.SALTBYTES +SCRYPT_PWHASH_SIZE = scrypt.PWHASH_SIZE +SCRYPT_OPSLIMIT_INTERACTIVE = scrypt.OPSLIMIT_INTERACTIVE +SCRYPT_MEMLIMIT_INTERACTIVE = scrypt.MEMLIMIT_INTERACTIVE +SCRYPT_OPSLIMIT_SENSITIVE = scrypt.OPSLIMIT_SENSITIVE +SCRYPT_MEMLIMIT_SENSITIVE = scrypt.MEMLIMIT_SENSITIVE + + +kdf_scryptsalsa208sha256 = scrypt.kdf +scryptsalsa208sha256_str = scrypt.str +verify_scryptsalsa208sha256 = scrypt.verify + + +def verify(password_hash: bytes, password: bytes) -> bool: + """ + Takes a modular crypt encoded stored password hash derived using one + of the algorithms supported by `libsodium` and checks if the user provided + password will hash to the same string when using the parameters saved + in the stored hash + """ + if password_hash.startswith(argon2id.STRPREFIX): + return argon2id.verify(password_hash, password) + elif password_hash.startswith(argon2i.STRPREFIX): + return argon2id.verify(password_hash, password) + elif scrypt.AVAILABLE and password_hash.startswith(scrypt.STRPREFIX): + return scrypt.verify(password_hash, password) + else: + raise ( + CryptPrefixError( + "given password_hash is not in a supported format" + ) + ) diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8c7f771 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/_argon2.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/_argon2.cpython-312.pyc new file mode 100644 index 0000000..b88e62b Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/_argon2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/argon2i.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/argon2i.cpython-312.pyc new file mode 100644 index 0000000..e56a4a2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/argon2i.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/argon2id.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/argon2id.cpython-312.pyc new file mode 100644 index 0000000..bca7b57 Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/argon2id.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/scrypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/scrypt.cpython-312.pyc new file mode 100644 index 0000000..eee7fad Binary files /dev/null and b/venv/lib/python3.12/site-packages/nacl/pwhash/__pycache__/scrypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/_argon2.py b/venv/lib/python3.12/site-packages/nacl/pwhash/_argon2.py new file mode 100644 index 0000000..856eda0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/pwhash/_argon2.py @@ -0,0 +1,49 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import nacl.bindings + +_argon2_strbytes_plus_one = nacl.bindings.crypto_pwhash_STRBYTES + +PWHASH_SIZE = _argon2_strbytes_plus_one - 1 +SALTBYTES = nacl.bindings.crypto_pwhash_SALTBYTES + +PASSWD_MIN = nacl.bindings.crypto_pwhash_PASSWD_MIN +PASSWD_MAX = nacl.bindings.crypto_pwhash_PASSWD_MAX + +PWHASH_SIZE = _argon2_strbytes_plus_one - 1 + +BYTES_MAX = nacl.bindings.crypto_pwhash_BYTES_MAX +BYTES_MIN = nacl.bindings.crypto_pwhash_BYTES_MIN + +ALG_ARGON2I13 = nacl.bindings.crypto_pwhash_ALG_ARGON2I13 +ALG_ARGON2ID13 = nacl.bindings.crypto_pwhash_ALG_ARGON2ID13 +ALG_ARGON2_DEFAULT = nacl.bindings.crypto_pwhash_ALG_DEFAULT + + +def verify(password_hash: bytes, password: bytes) -> bool: + """ + Takes a modular crypt encoded argon2i or argon2id stored password hash + and checks if the user provided password will hash to the same string + when using the stored parameters + + :param password_hash: password hash serialized in modular crypt() format + :type password_hash: bytes + :param password: user provided password + :type password: bytes + :rtype: boolean + + .. versionadded:: 1.2 + """ + return nacl.bindings.crypto_pwhash_str_verify(password_hash, password) diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/argon2i.py b/venv/lib/python3.12/site-packages/nacl/pwhash/argon2i.py new file mode 100644 index 0000000..f9b3af7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/pwhash/argon2i.py @@ -0,0 +1,132 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import nacl.bindings +import nacl.encoding + +from . import _argon2 + +ALG = _argon2.ALG_ARGON2I13 +STRPREFIX = nacl.bindings.crypto_pwhash_argon2i_STRPREFIX + +SALTBYTES = _argon2.SALTBYTES + +PASSWD_MIN = _argon2.PASSWD_MIN +PASSWD_MAX = _argon2.PASSWD_MAX + +PWHASH_SIZE = _argon2.PWHASH_SIZE + +BYTES_MIN = _argon2.BYTES_MIN +BYTES_MAX = _argon2.BYTES_MAX + +verify = _argon2.verify + +MEMLIMIT_MAX = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_MAX +MEMLIMIT_MIN = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_MIN +OPSLIMIT_MAX = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_MAX +OPSLIMIT_MIN = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_MIN + +OPSLIMIT_INTERACTIVE = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_INTERACTIVE +MEMLIMIT_INTERACTIVE = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_INTERACTIVE +OPSLIMIT_SENSITIVE = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_SENSITIVE +MEMLIMIT_SENSITIVE = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_SENSITIVE + +OPSLIMIT_MODERATE = nacl.bindings.crypto_pwhash_argon2i_OPSLIMIT_MODERATE +MEMLIMIT_MODERATE = nacl.bindings.crypto_pwhash_argon2i_MEMLIMIT_MODERATE + + +def kdf( + size: int, + password: bytes, + salt: bytes, + opslimit: int = OPSLIMIT_SENSITIVE, + memlimit: int = MEMLIMIT_SENSITIVE, + encoder: nacl.encoding.Encoder = nacl.encoding.RawEncoder, +) -> bytes: + """ + Derive a ``size`` bytes long key from a caller-supplied + ``password`` and ``salt`` pair using the argon2i + memory-hard construct. + + the enclosing module provides the constants + + - :py:const:`.OPSLIMIT_INTERACTIVE` + - :py:const:`.MEMLIMIT_INTERACTIVE` + - :py:const:`.OPSLIMIT_MODERATE` + - :py:const:`.MEMLIMIT_MODERATE` + - :py:const:`.OPSLIMIT_SENSITIVE` + - :py:const:`.MEMLIMIT_SENSITIVE` + + as a guidance for correct settings. + + :param size: derived key size, must be between + :py:const:`.BYTES_MIN` and + :py:const:`.BYTES_MAX` + :type size: int + :param password: password used to seed the key derivation procedure; + it length must be between + :py:const:`.PASSWD_MIN` and + :py:const:`.PASSWD_MAX` + :type password: bytes + :param salt: **RANDOM** salt used in the key derivation procedure; + its length must be exactly :py:const:`.SALTBYTES` + :type salt: bytes + :param opslimit: the time component (operation count) + of the key derivation procedure's computational cost; + it must be between + :py:const:`.OPSLIMIT_MIN` and + :py:const:`.OPSLIMIT_MAX` + :type opslimit: int + :param memlimit: the memory occupation component + of the key derivation procedure's computational cost; + it must be between + :py:const:`.MEMLIMIT_MIN` and + :py:const:`.MEMLIMIT_MAX` + :type memlimit: int + :rtype: bytes + + .. versionadded:: 1.2 + """ + + return encoder.encode( + nacl.bindings.crypto_pwhash_alg( + size, password, salt, opslimit, memlimit, ALG + ) + ) + + +def str( + password: bytes, + opslimit: int = OPSLIMIT_INTERACTIVE, + memlimit: int = MEMLIMIT_INTERACTIVE, +) -> bytes: + """ + Hashes a password with a random salt, using the memory-hard + argon2i construct and returning an ascii string that has all + the needed info to check against a future password + + + The default settings for opslimit and memlimit are those deemed + correct for the interactive user login case. + + :param bytes password: + :param int opslimit: + :param int memlimit: + :rtype: bytes + + .. versionadded:: 1.2 + """ + return nacl.bindings.crypto_pwhash_str_alg( + password, opslimit, memlimit, ALG + ) diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/argon2id.py b/venv/lib/python3.12/site-packages/nacl/pwhash/argon2id.py new file mode 100644 index 0000000..f3aa3f7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/pwhash/argon2id.py @@ -0,0 +1,135 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import nacl.bindings +import nacl.encoding + +from . import _argon2 + +ALG = _argon2.ALG_ARGON2ID13 +STRPREFIX = nacl.bindings.crypto_pwhash_argon2id_STRPREFIX + +SALTBYTES = _argon2.SALTBYTES + +PASSWD_MIN = _argon2.PASSWD_MIN +PASSWD_MAX = _argon2.PASSWD_MAX + +PWHASH_SIZE = _argon2.PWHASH_SIZE + +BYTES_MIN = _argon2.BYTES_MIN +BYTES_MAX = _argon2.BYTES_MAX + +verify = _argon2.verify + +MEMLIMIT_MIN = nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_MIN +MEMLIMIT_MAX = nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_MAX +OPSLIMIT_MIN = nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_MIN +OPSLIMIT_MAX = nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_MAX + +OPSLIMIT_INTERACTIVE = ( + nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE +) +MEMLIMIT_INTERACTIVE = ( + nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE +) +OPSLIMIT_SENSITIVE = nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE +MEMLIMIT_SENSITIVE = nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE + +OPSLIMIT_MODERATE = nacl.bindings.crypto_pwhash_argon2id_OPSLIMIT_MODERATE +MEMLIMIT_MODERATE = nacl.bindings.crypto_pwhash_argon2id_MEMLIMIT_MODERATE + + +def kdf( + size: int, + password: bytes, + salt: bytes, + opslimit: int = OPSLIMIT_SENSITIVE, + memlimit: int = MEMLIMIT_SENSITIVE, + encoder: nacl.encoding.Encoder = nacl.encoding.RawEncoder, +) -> bytes: + """ + Derive a ``size`` bytes long key from a caller-supplied + ``password`` and ``salt`` pair using the argon2id + memory-hard construct. + + the enclosing module provides the constants + + - :py:const:`.OPSLIMIT_INTERACTIVE` + - :py:const:`.MEMLIMIT_INTERACTIVE` + - :py:const:`.OPSLIMIT_MODERATE` + - :py:const:`.MEMLIMIT_MODERATE` + - :py:const:`.OPSLIMIT_SENSITIVE` + - :py:const:`.MEMLIMIT_SENSITIVE` + + as a guidance for correct settings. + + :param size: derived key size, must be between + :py:const:`.BYTES_MIN` and + :py:const:`.BYTES_MAX` + :type size: int + :param password: password used to seed the key derivation procedure; + it length must be between + :py:const:`.PASSWD_MIN` and + :py:const:`.PASSWD_MAX` + :type password: bytes + :param salt: **RANDOM** salt used in the key derivation procedure; + its length must be exactly :py:const:`.SALTBYTES` + :type salt: bytes + :param opslimit: the time component (operation count) + of the key derivation procedure's computational cost; + it must be between + :py:const:`.OPSLIMIT_MIN` and + :py:const:`.OPSLIMIT_MAX` + :type opslimit: int + :param memlimit: the memory occupation component + of the key derivation procedure's computational cost; + it must be between + :py:const:`.MEMLIMIT_MIN` and + :py:const:`.MEMLIMIT_MAX` + :type memlimit: int + :rtype: bytes + + .. versionadded:: 1.2 + """ + + return encoder.encode( + nacl.bindings.crypto_pwhash_alg( + size, password, salt, opslimit, memlimit, ALG + ) + ) + + +def str( + password: bytes, + opslimit: int = OPSLIMIT_INTERACTIVE, + memlimit: int = MEMLIMIT_INTERACTIVE, +) -> bytes: + """ + Hashes a password with a random salt, using the memory-hard + argon2id construct and returning an ascii string that has all + the needed info to check against a future password + + The default settings for opslimit and memlimit are those deemed + correct for the interactive user login case. + + :param bytes password: + :param int opslimit: + :param int memlimit: + :rtype: bytes + + .. versionadded:: 1.2 + """ + return nacl.bindings.crypto_pwhash_str_alg( + password, opslimit, memlimit, ALG + ) diff --git a/venv/lib/python3.12/site-packages/nacl/pwhash/scrypt.py b/venv/lib/python3.12/site-packages/nacl/pwhash/scrypt.py new file mode 100644 index 0000000..b9fc9d8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/pwhash/scrypt.py @@ -0,0 +1,211 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import cast + +import nacl.bindings +import nacl.encoding +from nacl import exceptions as exc +from nacl.exceptions import ensure + +_strbytes_plus_one = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_STRBYTES + +AVAILABLE = nacl.bindings.has_crypto_pwhash_scryptsalsa208sha256 + +STRPREFIX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_STRPREFIX + +SALTBYTES = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_SALTBYTES + +PASSWD_MIN = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_PASSWD_MIN +PASSWD_MAX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_PASSWD_MAX + +PWHASH_SIZE = _strbytes_plus_one - 1 + +BYTES_MIN = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_BYTES_MIN +BYTES_MAX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_BYTES_MAX + +MEMLIMIT_MIN = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN +MEMLIMIT_MAX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MAX +OPSLIMIT_MIN = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN +OPSLIMIT_MAX = nacl.bindings.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MAX + +OPSLIMIT_INTERACTIVE = ( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE +) +MEMLIMIT_INTERACTIVE = ( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE +) +OPSLIMIT_SENSITIVE = ( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE +) +MEMLIMIT_SENSITIVE = ( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE +) + +OPSLIMIT_MODERATE = 8 * OPSLIMIT_INTERACTIVE +MEMLIMIT_MODERATE = 8 * MEMLIMIT_INTERACTIVE + + +def kdf( + size: int, + password: bytes, + salt: bytes, + opslimit: int = OPSLIMIT_SENSITIVE, + memlimit: int = MEMLIMIT_SENSITIVE, + encoder: nacl.encoding.Encoder = nacl.encoding.RawEncoder, +) -> bytes: + """ + Derive a ``size`` bytes long key from a caller-supplied + ``password`` and ``salt`` pair using the scryptsalsa208sha256 + memory-hard construct. + + + the enclosing module provides the constants + + - :py:const:`.OPSLIMIT_INTERACTIVE` + - :py:const:`.MEMLIMIT_INTERACTIVE` + - :py:const:`.OPSLIMIT_SENSITIVE` + - :py:const:`.MEMLIMIT_SENSITIVE` + - :py:const:`.OPSLIMIT_MODERATE` + - :py:const:`.MEMLIMIT_MODERATE` + + as a guidance for correct settings respectively for the + interactive login and the long term key protecting sensitive data + use cases. + + :param size: derived key size, must be between + :py:const:`.BYTES_MIN` and + :py:const:`.BYTES_MAX` + :type size: int + :param password: password used to seed the key derivation procedure; + it length must be between + :py:const:`.PASSWD_MIN` and + :py:const:`.PASSWD_MAX` + :type password: bytes + :param salt: **RANDOM** salt used in the key derivation procedure; + its length must be exactly :py:const:`.SALTBYTES` + :type salt: bytes + :param opslimit: the time component (operation count) + of the key derivation procedure's computational cost; + it must be between + :py:const:`.OPSLIMIT_MIN` and + :py:const:`.OPSLIMIT_MAX` + :type opslimit: int + :param memlimit: the memory occupation component + of the key derivation procedure's computational cost; + it must be between + :py:const:`.MEMLIMIT_MIN` and + :py:const:`.MEMLIMIT_MAX` + :type memlimit: int + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + .. versionadded:: 1.2 + """ + ensure( + AVAILABLE, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + len(salt) == SALTBYTES, + "The salt must be exactly %s, not %s bytes long" + % (SALTBYTES, len(salt)), + raising=exc.ValueError, + ) + + n_log2, r, p = nacl.bindings.nacl_bindings_pick_scrypt_params( + opslimit, memlimit + ) + maxmem = memlimit + (2**16) + + return encoder.encode( + nacl.bindings.crypto_pwhash_scryptsalsa208sha256_ll( + password, + salt, + # Cast safety: n_log2 is a positive integer, and so 2 ** n_log2 is also + # a positive integer. Mypy+typeshed can't deduce this, because there's no + # way to for them to know that n_log2: int is positive. + cast(int, 2**n_log2), + r, + p, + maxmem=maxmem, + dklen=size, + ) + ) + + +def str( + password: bytes, + opslimit: int = OPSLIMIT_INTERACTIVE, + memlimit: int = MEMLIMIT_INTERACTIVE, +) -> bytes: + """ + Hashes a password with a random salt, using the memory-hard + scryptsalsa208sha256 construct and returning an ascii string + that has all the needed info to check against a future password + + The default settings for opslimit and memlimit are those deemed + correct for the interactive user login case. + + :param bytes password: + :param int opslimit: + :param int memlimit: + :rtype: bytes + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + .. versionadded:: 1.2 + """ + ensure( + AVAILABLE, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + return nacl.bindings.crypto_pwhash_scryptsalsa208sha256_str( + password, opslimit, memlimit + ) + + +def verify(password_hash: bytes, password: bytes) -> bool: + """ + Takes the output of scryptsalsa208sha256 and compares it against + a user provided password to see if they are the same + + :param password_hash: bytes + :param password: bytes + :rtype: boolean + :raises nacl.exceptions.UnavailableError: If called when using a + minimal build of libsodium. + + .. versionadded:: 1.2 + """ + ensure( + AVAILABLE, + "Not available in minimal build", + raising=exc.UnavailableError, + ) + + ensure( + len(password_hash) == PWHASH_SIZE, + "The password hash must be exactly %s bytes long" + % nacl.bindings.crypto_pwhash_scryptsalsa208sha256_STRBYTES, + raising=exc.ValueError, + ) + + return nacl.bindings.crypto_pwhash_scryptsalsa208sha256_str_verify( + password_hash, password + ) diff --git a/venv/lib/python3.12/site-packages/nacl/py.typed b/venv/lib/python3.12/site-packages/nacl/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/nacl/secret.py b/venv/lib/python3.12/site-packages/nacl/secret.py new file mode 100644 index 0000000..5c3064f --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/secret.py @@ -0,0 +1,305 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import ClassVar, Optional + +import nacl.bindings +from nacl import encoding +from nacl import exceptions as exc +from nacl.utils import EncryptedMessage, StringFixer, random + + +class SecretBox(encoding.Encodable, StringFixer): + """ + The SecretBox class encrypts and decrypts messages using the given secret + key. + + The ciphertexts generated by :class:`~nacl.secret.Secretbox` include a 16 + byte authenticator which is checked as part of the decryption. An invalid + authenticator will cause the decrypt function to raise an exception. The + authenticator is not a signature. Once you've decrypted the message you've + demonstrated the ability to create arbitrary valid message, so messages you + send are repudiable. For non-repudiable messages, sign them after + encryption. + + Encryption is done using `XSalsa20-Poly1305`_, and there are no practical + limits on the number or size of messages (up to 2⁶⁴ messages, each up to 2⁶⁴ + bytes). + + .. _XSalsa20-Poly1305: https://doc.libsodium.org/secret-key_cryptography/secretbox#algorithm-details + + :param key: The secret key used to encrypt and decrypt messages + :param encoder: The encoder class used to decode the given key + + :cvar KEY_SIZE: The size that the key is required to be. + :cvar NONCE_SIZE: The size that the nonce is required to be. + :cvar MACBYTES: The size of the authentication MAC tag in bytes. + :cvar MESSAGEBYTES_MAX: The maximum size of a message which can be + safely encrypted with a single key/nonce + pair. + """ + + KEY_SIZE: ClassVar[int] = nacl.bindings.crypto_secretbox_KEYBYTES + NONCE_SIZE: ClassVar[int] = nacl.bindings.crypto_secretbox_NONCEBYTES + MACBYTES: ClassVar[int] = nacl.bindings.crypto_secretbox_MACBYTES + MESSAGEBYTES_MAX: ClassVar[int] = ( + nacl.bindings.crypto_secretbox_MESSAGEBYTES_MAX + ) + + def __init__( + self, key: bytes, encoder: encoding.Encoder = encoding.RawEncoder + ): + key = encoder.decode(key) + if not isinstance(key, bytes): + raise exc.TypeError("SecretBox must be created from 32 bytes") + + if len(key) != self.KEY_SIZE: + raise exc.ValueError( + "The key must be exactly %s bytes long" % self.KEY_SIZE, + ) + + self._key = key + + def __bytes__(self) -> bytes: + return self._key + + def encrypt( + self, + plaintext: bytes, + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> EncryptedMessage: + """ + Encrypts the plaintext message using the given `nonce` (or generates + one randomly if omitted) and returns the ciphertext encoded with the + encoder. + + .. warning:: It is **VITALLY** important that the nonce is a nonce, + i.e. it is a number used only once for any given key. If you fail + to do this, you compromise the privacy of the messages encrypted. + Give your nonces a different prefix, or have one side use an odd + counter and one an even counter. Just make sure they are different. + + :param plaintext: [:class:`bytes`] The plaintext message to encrypt + :param nonce: [:class:`bytes`] The nonce to use in the encryption + :param encoder: The encoder to use to encode the ciphertext + :rtype: [:class:`nacl.utils.EncryptedMessage`] + """ + if nonce is None: + nonce = random(self.NONCE_SIZE) + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, + ) + + ciphertext = nacl.bindings.crypto_secretbox_easy( + plaintext, nonce, self._key + ) + + encoded_nonce = encoder.encode(nonce) + encoded_ciphertext = encoder.encode(ciphertext) + + return EncryptedMessage._from_parts( + encoded_nonce, + encoded_ciphertext, + encoder.encode(nonce + ciphertext), + ) + + def decrypt( + self, + ciphertext: bytes, + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Decrypts the ciphertext using the `nonce` (explicitly, when passed as a + parameter or implicitly, when omitted, as part of the ciphertext) and + returns the plaintext message. + + :param ciphertext: [:class:`bytes`] The encrypted message to decrypt + :param nonce: [:class:`bytes`] The nonce used when encrypting the + ciphertext + :param encoder: The encoder used to decode the ciphertext. + :rtype: [:class:`bytes`] + """ + # Decode our ciphertext + ciphertext = encoder.decode(ciphertext) + + if nonce is None: + # If we were given the nonce and ciphertext combined, split them. + nonce = ciphertext[: self.NONCE_SIZE] + ciphertext = ciphertext[self.NONCE_SIZE :] + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, + ) + + plaintext = nacl.bindings.crypto_secretbox_open_easy( + ciphertext, nonce, self._key + ) + + return plaintext + + +class Aead(encoding.Encodable, StringFixer): + """ + The AEAD class encrypts and decrypts messages using the given secret key. + + Unlike :class:`~nacl.secret.SecretBox`, AEAD supports authenticating + non-confidential data received alongside the message, such as a length + or type tag. + + Like :class:`~nacl.secret.Secretbox`, this class provides authenticated + encryption. An inauthentic message will cause the decrypt function to raise + an exception. + + Likewise, the authenticator should not be mistaken for a (public-key) + signature: recipients (with the ability to decrypt messages) are capable of + creating arbitrary valid message; in particular, this means AEAD messages + are repudiable. For non-repudiable messages, sign them after encryption. + + The cryptosystem used is `XChacha20-Poly1305`_ as specified for + `standardization`_. There are `no practical limits`_ to how much can safely + be encrypted under a given key (up to 2⁶⁴ messages each containing up + to 2⁶⁴ bytes). + + .. _standardization: https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-xchacha + .. _XChacha20-Poly1305: https://doc.libsodium.org/secret-key_cryptography/aead#xchacha-20-poly1305 + .. _no practical limits: https://doc.libsodium.org/secret-key_cryptography/aead#limitations + + :param key: The secret key used to encrypt and decrypt messages + :param encoder: The encoder class used to decode the given key + + :cvar KEY_SIZE: The size that the key is required to be. + :cvar NONCE_SIZE: The size that the nonce is required to be. + :cvar MACBYTES: The size of the authentication MAC tag in bytes. + :cvar MESSAGEBYTES_MAX: The maximum size of a message which can be + safely encrypted with a single key/nonce + pair. + """ + + KEY_SIZE = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_KEYBYTES + NONCE_SIZE = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_NPUBBYTES + MACBYTES = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_ABYTES + MESSAGEBYTES_MAX = ( + nacl.bindings.crypto_aead_xchacha20poly1305_ietf_MESSAGEBYTES_MAX + ) + + def __init__( + self, + key: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ): + key = encoder.decode(key) + if not isinstance(key, bytes): + raise exc.TypeError("AEAD must be created from 32 bytes") + + if len(key) != self.KEY_SIZE: + raise exc.ValueError( + "The key must be exactly %s bytes long" % self.KEY_SIZE, + ) + + self._key = key + + def __bytes__(self) -> bytes: + return self._key + + def encrypt( + self, + plaintext: bytes, + aad: bytes = b"", + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> EncryptedMessage: + """ + Encrypts the plaintext message using the given `nonce` (or generates + one randomly if omitted) and returns the ciphertext encoded with the + encoder. + + .. warning:: It is vitally important for :param nonce: to be unique. + By default, it is generated randomly; [:class:`Aead`] uses XChacha20 + for extended (192b) nonce size, so the risk of reusing random nonces + is negligible. It is *strongly recommended* to keep this behaviour, + as nonce reuse will compromise the privacy of encrypted messages. + Should implicit nonces be inadequate for your application, the + second best option is using split counters; e.g. if sending messages + encrypted under a shared key between 2 users, each user can use the + number of messages it sent so far, prefixed or suffixed with a 1bit + user id. Note that the counter must **never** be rolled back (due + to overflow, on-disk state being rolled back to an earlier backup, + ...) + + :param plaintext: [:class:`bytes`] The plaintext message to encrypt + :param nonce: [:class:`bytes`] The nonce to use in the encryption + :param encoder: The encoder to use to encode the ciphertext + :rtype: [:class:`nacl.utils.EncryptedMessage`] + """ + if nonce is None: + nonce = random(self.NONCE_SIZE) + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, + ) + + ciphertext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_encrypt( + plaintext, aad, nonce, self._key + ) + + encoded_nonce = encoder.encode(nonce) + encoded_ciphertext = encoder.encode(ciphertext) + + return EncryptedMessage._from_parts( + encoded_nonce, + encoded_ciphertext, + encoder.encode(nonce + ciphertext), + ) + + def decrypt( + self, + ciphertext: bytes, + aad: bytes = b"", + nonce: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Decrypts the ciphertext using the `nonce` (explicitly, when passed as a + parameter or implicitly, when omitted, as part of the ciphertext) and + returns the plaintext message. + + :param ciphertext: [:class:`bytes`] The encrypted message to decrypt + :param nonce: [:class:`bytes`] The nonce used when encrypting the + ciphertext + :param encoder: The encoder used to decode the ciphertext. + :rtype: [:class:`bytes`] + """ + # Decode our ciphertext + ciphertext = encoder.decode(ciphertext) + + if nonce is None: + # If we were given the nonce and ciphertext combined, split them. + nonce = ciphertext[: self.NONCE_SIZE] + ciphertext = ciphertext[self.NONCE_SIZE :] + + if len(nonce) != self.NONCE_SIZE: + raise exc.ValueError( + "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, + ) + + plaintext = nacl.bindings.crypto_aead_xchacha20poly1305_ietf_decrypt( + ciphertext, aad, nonce, self._key + ) + + return plaintext diff --git a/venv/lib/python3.12/site-packages/nacl/signing.py b/venv/lib/python3.12/site-packages/nacl/signing.py new file mode 100644 index 0000000..536b369 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/signing.py @@ -0,0 +1,250 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional + +import nacl.bindings +from nacl import encoding +from nacl import exceptions as exc +from nacl.public import ( + PrivateKey as _Curve25519_PrivateKey, + PublicKey as _Curve25519_PublicKey, +) +from nacl.utils import StringFixer, random + + +class SignedMessage(bytes): + """ + A bytes subclass that holds a message that has been signed by a + :class:`SigningKey`. + """ + + _signature: bytes + _message: bytes + + @classmethod + def _from_parts( + cls, signature: bytes, message: bytes, combined: bytes + ) -> "SignedMessage": + obj = cls(combined) + obj._signature = signature + obj._message = message + return obj + + @property + def signature(self) -> bytes: + """ + The signature contained within the :class:`SignedMessage`. + """ + return self._signature + + @property + def message(self) -> bytes: + """ + The message contained within the :class:`SignedMessage`. + """ + return self._message + + +class VerifyKey(encoding.Encodable, StringFixer): + """ + The public key counterpart to an Ed25519 SigningKey for producing digital + signatures. + + :param key: [:class:`bytes`] Serialized Ed25519 public key + :param encoder: A class that is able to decode the `key` + """ + + def __init__( + self, key: bytes, encoder: encoding.Encoder = encoding.RawEncoder + ): + # Decode the key + key = encoder.decode(key) + if not isinstance(key, bytes): + raise exc.TypeError("VerifyKey must be created from 32 bytes") + + if len(key) != nacl.bindings.crypto_sign_PUBLICKEYBYTES: + raise exc.ValueError( + "The key must be exactly %s bytes long" + % nacl.bindings.crypto_sign_PUBLICKEYBYTES, + ) + + self._key = key + + def __bytes__(self) -> bytes: + return self._key + + def __hash__(self) -> int: + return hash(bytes(self)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return False + return nacl.bindings.sodium_memcmp(bytes(self), bytes(other)) + + def __ne__(self, other: object) -> bool: + return not (self == other) + + def verify( + self, + smessage: bytes, + signature: Optional[bytes] = None, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> bytes: + """ + Verifies the signature of a signed message, returning the message + if it has not been tampered with else raising + :class:`~nacl.exceptions.BadSignatureError`. + + :param smessage: [:class:`bytes`] Either the original messaged or a + signature and message concated together. + :param signature: [:class:`bytes`] If an unsigned message is given for + smessage then the detached signature must be provided. + :param encoder: A class that is able to decode the secret message and + signature. + :rtype: :class:`bytes` + """ + if signature is not None: + # If we were given the message and signature separately, validate + # signature size and combine them. + if not isinstance(signature, bytes): + raise exc.TypeError( + "Verification signature must be created from %d bytes" + % nacl.bindings.crypto_sign_BYTES, + ) + + if len(signature) != nacl.bindings.crypto_sign_BYTES: + raise exc.ValueError( + "The signature must be exactly %d bytes long" + % nacl.bindings.crypto_sign_BYTES, + ) + + smessage = signature + encoder.decode(smessage) + else: + # Decode the signed message + smessage = encoder.decode(smessage) + + return nacl.bindings.crypto_sign_open(smessage, self._key) + + def to_curve25519_public_key(self) -> _Curve25519_PublicKey: + """ + Converts a :class:`~nacl.signing.VerifyKey` to a + :class:`~nacl.public.PublicKey` + + :rtype: :class:`~nacl.public.PublicKey` + """ + raw_pk = nacl.bindings.crypto_sign_ed25519_pk_to_curve25519(self._key) + return _Curve25519_PublicKey(raw_pk) + + +class SigningKey(encoding.Encodable, StringFixer): + """ + Private key for producing digital signatures using the Ed25519 algorithm. + + Signing keys are produced from a 32-byte (256-bit) random seed value. This + value can be passed into the :class:`~nacl.signing.SigningKey` as a + :func:`bytes` whose length is 32. + + .. warning:: This **must** be protected and remain secret. Anyone who knows + the value of your :class:`~nacl.signing.SigningKey` or it's seed can + masquerade as you. + + :param seed: [:class:`bytes`] Random 32-byte value (i.e. private key) + :param encoder: A class that is able to decode the seed + + :ivar: verify_key: [:class:`~nacl.signing.VerifyKey`] The verify + (i.e. public) key that corresponds with this signing key. + """ + + def __init__( + self, + seed: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ): + # Decode the seed + seed = encoder.decode(seed) + if not isinstance(seed, bytes): + raise exc.TypeError( + "SigningKey must be created from a 32 byte seed" + ) + + # Verify that our seed is the proper size + if len(seed) != nacl.bindings.crypto_sign_SEEDBYTES: + raise exc.ValueError( + "The seed must be exactly %d bytes long" + % nacl.bindings.crypto_sign_SEEDBYTES + ) + + public_key, secret_key = nacl.bindings.crypto_sign_seed_keypair(seed) + + self._seed = seed + self._signing_key = secret_key + self.verify_key = VerifyKey(public_key) + + def __bytes__(self) -> bytes: + return self._seed + + def __hash__(self) -> int: + return hash(bytes(self)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return False + return nacl.bindings.sodium_memcmp(bytes(self), bytes(other)) + + def __ne__(self, other: object) -> bool: + return not (self == other) + + @classmethod + def generate(cls) -> "SigningKey": + """ + Generates a random :class:`~nacl.signing.SigningKey` object. + + :rtype: :class:`~nacl.signing.SigningKey` + """ + return cls( + random(nacl.bindings.crypto_sign_SEEDBYTES), + encoder=encoding.RawEncoder, + ) + + def sign( + self, + message: bytes, + encoder: encoding.Encoder = encoding.RawEncoder, + ) -> SignedMessage: + """ + Sign a message using this key. + + :param message: [:class:`bytes`] The data to be signed. + :param encoder: A class that is used to encode the signed message. + :rtype: :class:`~nacl.signing.SignedMessage` + """ + raw_signed = nacl.bindings.crypto_sign(message, self._signing_key) + + crypto_sign_BYTES = nacl.bindings.crypto_sign_BYTES + signature = encoder.encode(raw_signed[:crypto_sign_BYTES]) + message = encoder.encode(raw_signed[crypto_sign_BYTES:]) + signed = encoder.encode(raw_signed) + + return SignedMessage._from_parts(signature, message, signed) + + def to_curve25519_private_key(self) -> _Curve25519_PrivateKey: + """ + Converts a :class:`~nacl.signing.SigningKey` to a + :class:`~nacl.public.PrivateKey` + + :rtype: :class:`~nacl.public.PrivateKey` + """ + sk = self._signing_key + raw_private = nacl.bindings.crypto_sign_ed25519_sk_to_curve25519(sk) + return _Curve25519_PrivateKey(raw_private) diff --git a/venv/lib/python3.12/site-packages/nacl/utils.py b/venv/lib/python3.12/site-packages/nacl/utils.py new file mode 100644 index 0000000..d19d236 --- /dev/null +++ b/venv/lib/python3.12/site-packages/nacl/utils.py @@ -0,0 +1,88 @@ +# Copyright 2013 Donald Stufft and individual contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +from typing import SupportsBytes, Type, TypeVar + +import nacl.bindings +from nacl import encoding + +_EncryptedMessage = TypeVar("_EncryptedMessage", bound="EncryptedMessage") + + +class EncryptedMessage(bytes): + """ + A bytes subclass that holds a messaged that has been encrypted by a + :class:`SecretBox`. + """ + + _nonce: bytes + _ciphertext: bytes + + @classmethod + def _from_parts( + cls: Type[_EncryptedMessage], + nonce: bytes, + ciphertext: bytes, + combined: bytes, + ) -> _EncryptedMessage: + obj = cls(combined) + obj._nonce = nonce + obj._ciphertext = ciphertext + return obj + + @property + def nonce(self) -> bytes: + """ + The nonce used during the encryption of the :class:`EncryptedMessage`. + """ + return self._nonce + + @property + def ciphertext(self) -> bytes: + """ + The ciphertext contained within the :class:`EncryptedMessage`. + """ + return self._ciphertext + + +class StringFixer: + def __str__(self: SupportsBytes) -> str: + return str(self.__bytes__()) + + +def bytes_as_string(bytes_in: bytes) -> str: + return bytes_in.decode("ascii") + + +def random(size: int = 32) -> bytes: + return os.urandom(size) + + +def randombytes_deterministic( + size: int, seed: bytes, encoder: encoding.Encoder = encoding.RawEncoder +) -> bytes: + """ + Returns ``size`` number of deterministically generated pseudorandom bytes + from a seed + + :param size: int + :param seed: bytes + :param encoder: The encoder class used to encode the produced bytes + :rtype: bytes + """ + raw_data = nacl.bindings.randombytes_buf_deterministic(size, seed) + + return encoder.encode(raw_data) diff --git a/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/LICENSE b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/LICENSE new file mode 100644 index 0000000..d12bef0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Suite 500, Boston, MA 02110-1335 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! + + diff --git a/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/METADATA new file mode 100644 index 0000000..408416c --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/METADATA @@ -0,0 +1,99 @@ +Metadata-Version: 2.1 +Name: paramiko +Version: 3.4.0 +Summary: SSH2 protocol library +Home-page: https://paramiko.org +Author: Jeff Forcier +Author-email: jeff@bitprophet.org +License: LGPL +Project-URL: Docs, https://docs.paramiko.org +Project-URL: Source, https://github.com/paramiko/paramiko +Project-URL: Issues, https://github.com/paramiko/paramiko/issues +Project-URL: Changelog, https://www.paramiko.org/changelog.html +Project-URL: CI, https://app.circleci.com/pipelines/github/paramiko/paramiko +Platform: Posix; MacOS X; Windows +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Operating System :: OS Independent +Classifier: Topic :: Internet +Classifier: Topic :: Security :: Cryptography +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.6 +Requires-Dist: bcrypt (>=3.2) +Requires-Dist: cryptography (>=3.3) +Requires-Dist: pynacl (>=1.5) +Provides-Extra: all +Requires-Dist: pyasn1 (>=0.1.7) ; extra == 'all' +Requires-Dist: invoke (>=2.0) ; extra == 'all' +Requires-Dist: gssapi (>=1.4.1) ; (platform_system != "Windows") and extra == 'all' +Requires-Dist: pywin32 (>=2.1.8) ; (platform_system == "Windows") and extra == 'all' +Provides-Extra: ed25519 +Provides-Extra: gssapi +Requires-Dist: pyasn1 (>=0.1.7) ; extra == 'gssapi' +Requires-Dist: gssapi (>=1.4.1) ; (platform_system != "Windows") and extra == 'gssapi' +Requires-Dist: pywin32 (>=2.1.8) ; (platform_system == "Windows") and extra == 'gssapi' +Provides-Extra: invoke +Requires-Dist: invoke (>=2.0) ; extra == 'invoke' + +|version| |python| |license| |ci| |coverage| + +.. |version| image:: https://img.shields.io/pypi/v/paramiko + :target: https://pypi.org/project/paramiko/ + :alt: PyPI - Package Version +.. |python| image:: https://img.shields.io/pypi/pyversions/paramiko + :target: https://pypi.org/project/paramiko/ + :alt: PyPI - Python Version +.. |license| image:: https://img.shields.io/pypi/l/paramiko + :target: https://github.com/paramiko/paramiko/blob/main/LICENSE + :alt: PyPI - License +.. |ci| image:: https://img.shields.io/circleci/build/github/paramiko/paramiko/main + :target: https://app.circleci.com/pipelines/github/paramiko/paramiko + :alt: CircleCI +.. |coverage| image:: https://img.shields.io/codecov/c/gh/paramiko/paramiko + :target: https://app.codecov.io/gh/paramiko/paramiko + :alt: Codecov + +Welcome to Paramiko! +==================== + +Paramiko is a pure-Python [#]_ (3.6+) implementation of the SSHv2 protocol +[#]_, providing both client and server functionality. It provides the +foundation for the high-level SSH library `Fabric `_, +which is what we recommend you use for common client use-cases such as running +remote shell commands or transferring files. + +Direct use of Paramiko itself is only intended for users who need +advanced/low-level primitives or want to run an in-Python sshd. + +For installation information, changelogs, FAQs and similar, please visit `our +main project website `_; for API details, see `the +versioned docs `_. Additionally, the project +maintainer keeps a `roadmap `_ on his +personal site. + +.. [#] + Paramiko relies on `cryptography `_ for crypto + functionality, which makes use of C and Rust extensions but has many + precompiled options available. See `our installation page + `_ for details. + +.. [#] + OpenSSH's RFC specification page is a fantastic resource and collection of + links that we won't bother replicating here: + https://www.openssh.com/specs.html + + OpenSSH itself also happens to be our primary reference implementation: + when in doubt, we consult how they do things, unless there are good reasons + not to. There are always some gaps, but we do our best to reconcile them + when possible. + + diff --git a/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/RECORD new file mode 100644 index 0000000..0c11b17 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/RECORD @@ -0,0 +1,99 @@ +paramiko-3.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +paramiko-3.4.0.dist-info/LICENSE,sha256=X6Jb9fOV_SbnAcLh3kyn0WKBaYbceRwi-PQiaFetG7I,26436 +paramiko-3.4.0.dist-info/METADATA,sha256=FFzjUcnnCqiy3pYJymf5e1YtI-zJZxayIdqNQPd4E0E,4404 +paramiko-3.4.0.dist-info/RECORD,, +paramiko-3.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +paramiko-3.4.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +paramiko-3.4.0.dist-info/top_level.txt,sha256=R9n-eCc_1kx1DnijF7Glmm-H67k9jUz5rm2YoPL8n54,9 +paramiko/__init__.py,sha256=3wgH5SDq89MGbiIIdCWg-w48eVu7lgP8bUPfSC4Jtuc,4446 +paramiko/__pycache__/__init__.cpython-312.pyc,, +paramiko/__pycache__/_version.cpython-312.pyc,, +paramiko/__pycache__/_winapi.cpython-312.pyc,, +paramiko/__pycache__/agent.cpython-312.pyc,, +paramiko/__pycache__/auth_handler.cpython-312.pyc,, +paramiko/__pycache__/auth_strategy.cpython-312.pyc,, +paramiko/__pycache__/ber.cpython-312.pyc,, +paramiko/__pycache__/buffered_pipe.cpython-312.pyc,, +paramiko/__pycache__/channel.cpython-312.pyc,, +paramiko/__pycache__/client.cpython-312.pyc,, +paramiko/__pycache__/common.cpython-312.pyc,, +paramiko/__pycache__/compress.cpython-312.pyc,, +paramiko/__pycache__/config.cpython-312.pyc,, +paramiko/__pycache__/dsskey.cpython-312.pyc,, +paramiko/__pycache__/ecdsakey.cpython-312.pyc,, +paramiko/__pycache__/ed25519key.cpython-312.pyc,, +paramiko/__pycache__/file.cpython-312.pyc,, +paramiko/__pycache__/hostkeys.cpython-312.pyc,, +paramiko/__pycache__/kex_curve25519.cpython-312.pyc,, +paramiko/__pycache__/kex_ecdh_nist.cpython-312.pyc,, +paramiko/__pycache__/kex_gex.cpython-312.pyc,, +paramiko/__pycache__/kex_group1.cpython-312.pyc,, +paramiko/__pycache__/kex_group14.cpython-312.pyc,, +paramiko/__pycache__/kex_group16.cpython-312.pyc,, +paramiko/__pycache__/kex_gss.cpython-312.pyc,, +paramiko/__pycache__/message.cpython-312.pyc,, +paramiko/__pycache__/packet.cpython-312.pyc,, +paramiko/__pycache__/pipe.cpython-312.pyc,, +paramiko/__pycache__/pkey.cpython-312.pyc,, +paramiko/__pycache__/primes.cpython-312.pyc,, +paramiko/__pycache__/proxy.cpython-312.pyc,, +paramiko/__pycache__/rsakey.cpython-312.pyc,, +paramiko/__pycache__/server.cpython-312.pyc,, +paramiko/__pycache__/sftp.cpython-312.pyc,, +paramiko/__pycache__/sftp_attr.cpython-312.pyc,, +paramiko/__pycache__/sftp_client.cpython-312.pyc,, +paramiko/__pycache__/sftp_file.cpython-312.pyc,, +paramiko/__pycache__/sftp_handle.cpython-312.pyc,, +paramiko/__pycache__/sftp_server.cpython-312.pyc,, +paramiko/__pycache__/sftp_si.cpython-312.pyc,, +paramiko/__pycache__/ssh_exception.cpython-312.pyc,, +paramiko/__pycache__/ssh_gss.cpython-312.pyc,, +paramiko/__pycache__/transport.cpython-312.pyc,, +paramiko/__pycache__/util.cpython-312.pyc,, +paramiko/__pycache__/win_openssh.cpython-312.pyc,, +paramiko/__pycache__/win_pageant.cpython-312.pyc,, +paramiko/_version.py,sha256=4IasKKZG4lBGKgdHYEL1__tcACeAyfW65UMDrxM-B6M,80 +paramiko/_winapi.py,sha256=e4PyDmHmyLcAkZo4WAX7ah_I6fq4ex7A8FhxOPYAoA8,11204 +paramiko/agent.py,sha256=4vP4knAAzZiSblzSM_srbTYK2hVnUUT561vTBdCe2i4,15877 +paramiko/auth_handler.py,sha256=kMY00x5sUkrcR9uRHIIakQw4E6649oW1tMtIQPrFMFo,43006 +paramiko/auth_strategy.py,sha256=Pjcp8q64gUwk4CneGOnOhW0WBeKBRFURieWqC9AN0Ec,11437 +paramiko/ber.py,sha256=uFb-YokU4Rg2fKjyX8VMAu05STVk37YRgghlNHmdoYo,4369 +paramiko/buffered_pipe.py,sha256=AlkTLHYWbj4W-ZD7ORQZFjEFv7kC7QSvEYypfiHpwxw,7225 +paramiko/channel.py,sha256=q8sCj7wmDgoPyp_7_TABFsJ5ZoXbQAssxi2aS_Vhfog,49191 +paramiko/client.py,sha256=BjTberxDuu43FHdP273lGRfZI-kf183tQ3CIyB1z4Io,34492 +paramiko/common.py,sha256=sBJW8KJz_EE8TsT7wLWTPuUiL2nNsLa_cfrTCe9Fyio,7756 +paramiko/compress.py,sha256=RCHTino0cHz1dy1pLbOhFhdWfGl4u50VmBcbT7qBWNc,1282 +paramiko/config.py,sha256=QPzwsk4Vem-Ecg2NhjRu78O9SU5ZO6DmfxZTA6cHWco,27362 +paramiko/dsskey.py,sha256=jX9Q5gsKR1hUrvQKSHBZsa96At7NvEO_liT_8G64EP8,8248 +paramiko/ecdsakey.py,sha256=nK8oxORGgLP-zoC2REG46bAchVrlr35jfuxTn_Ac8sM,11653 +paramiko/ed25519key.py,sha256=FYurG0gqxmhNKh_22Hp3XEON5zuvzv-r5w8y9yJQgqY,7457 +paramiko/file.py,sha256=NgbhUjYgrLh-HQtsdYlPZ3CyvS0jhXqePk45GhHPMSo,19063 +paramiko/hostkeys.py,sha256=ErKfOnfzmp8MvonOqkgcySt_Ci9Mp6zDM2QF9aOT4Ms,13208 +paramiko/kex_curve25519.py,sha256=voEFDs_zkgEdWOqDakU-5DLYO3qotWcXYiqOCUP4GDo,4436 +paramiko/kex_ecdh_nist.py,sha256=RbHPwv8Gu5iR9LwMf-N0yUjXEQgRKKBLaAT3dacv44Q,5012 +paramiko/kex_gex.py,sha256=j5fPexu48CGObvpPKn0kZTjdn1onfz0iYhh8p8kIgM0,10320 +paramiko/kex_group1.py,sha256=HfzkLH1SKaIavnN-LGuF-lAMaAECB6Izj_TELhg4Omc,5740 +paramiko/kex_group14.py,sha256=AX7xrTCqMROrMQ_3Dp8WmLkNN8dTovhPjtWgaLLpRxs,1833 +paramiko/kex_group16.py,sha256=s7qB7tSDFkG5ztlg3mV958UVWnKgn1LIA-B2t-h1eX4,2288 +paramiko/kex_gss.py,sha256=BadM1nNN-ORDRuJmb93v0xBGQlce1n29lT4ihsnmY-4,24562 +paramiko/message.py,sha256=wHTWVU_Xgfq-djOOPVF5jAsE-XgADoH47G0iI5N69gY,9349 +paramiko/packet.py,sha256=vslnBfmZALqhQ4xWWWCHgq4kjcVPS2MpcUA8lYY6q28,22229 +paramiko/pipe.py,sha256=cmWwOyMdys62IGLC9lDznwTu11xLg6wB9mV-60lr86A,3902 +paramiko/pkey.py,sha256=bGFA-Zbk1xjsNaY6E_PtQQ7WVqW2zPmtWtnXnSySS-M,36007 +paramiko/primes.py,sha256=6Uv0fFsTmIJxInMqeNhryw9jrzvgNksKbA7ecBI0g5E,5107 +paramiko/proxy.py,sha256=I5XxN1aDren3Fw1f3SOoQLP4O9O7jeyey9meG6Og0q4,4648 +paramiko/rsakey.py,sha256=7xoDJvfcaZVVYRGlv8xamhO3zYvE-wI_Nd814L8TxzQ,7546 +paramiko/server.py,sha256=oNkI7t2gSMYIwLov5vl_BbHU-AwFC5LxP78YIXw7mq4,30457 +paramiko/sftp.py,sha256=pyZPnR0fv94YopfPDpslloTiYelu5GuM70cXUGOaKHM,6471 +paramiko/sftp_attr.py,sha256=AX-cG_FiPinftQQq8Ndo1Mc_bZz-AhXFQQpac-oV0wg,8258 +paramiko/sftp_client.py,sha256=e_zi6V233tjx3DH9TH7rRDKRO-TCZ_zyOkBw4sSRIjo,35855 +paramiko/sftp_file.py,sha256=NgVfDhxxURhFrEqniIJQgKQ6wlgCTgOVu5GwQczW_hk,21820 +paramiko/sftp_handle.py,sha256=ho-eyiEvhYHt-_VytznNzNeGktfaIsQX5l4bespWZAk,7424 +paramiko/sftp_server.py,sha256=yH-BgsYj7BuZNGn_EHpnLRPmoNGoYB9g_XxOlK4IcYA,19492 +paramiko/sftp_si.py,sha256=Uf90bFme6Jy6yl7k4jJ28IJboq6KiyPWLjXgP9DR6gk,12544 +paramiko/ssh_exception.py,sha256=F82_vTnKr3UF7ai8dTEv6PnqwVoREyk2c9_Bo3smsrg,7494 +paramiko/ssh_gss.py,sha256=eq_Dcs8Ga1R3xlP2SneSgHOlboY_WgaPJN503OeYCaA,28887 +paramiko/transport.py,sha256=HrnWq9Lz8dWbjSSY_6G7_t6AbcY_5mBFnvuDEPp46ns,133429 +paramiko/util.py,sha256=v6fIp589sPWNFTaq58X6OhbJY7eOHuzjFz8M7p8PFNE,9550 +paramiko/win_openssh.py,sha256=DbWJT0hiE6UImAbMqehcGuVLDWIl-2rObe-AhaGuWpk,1918 +paramiko/win_pageant.py,sha256=i5TG472VzJKVnK08oxM4hK_qb9IzL_Fo96B8ouaxXHo,4177 diff --git a/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..8608c1b --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko-3.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +paramiko diff --git a/venv/lib/python3.12/site-packages/paramiko/__init__.py b/venv/lib/python3.12/site-packages/paramiko/__init__.py new file mode 100644 index 0000000..65148d2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/__init__.py @@ -0,0 +1,165 @@ +# Copyright (C) 2003-2011 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +# flake8: noqa +import sys +from paramiko._version import __version__, __version_info__ +from paramiko.transport import ( + SecurityOptions, + ServiceRequestingTransport, + Transport, +) +from paramiko.client import ( + AutoAddPolicy, + MissingHostKeyPolicy, + RejectPolicy, + SSHClient, + WarningPolicy, +) +from paramiko.auth_handler import AuthHandler +from paramiko.auth_strategy import ( + AuthFailure, + AuthStrategy, + AuthResult, + AuthSource, + InMemoryPrivateKey, + NoneAuth, + OnDiskPrivateKey, + Password, + PrivateKey, + SourceResult, +) +from paramiko.ssh_gss import GSSAuth, GSS_AUTH_AVAILABLE, GSS_EXCEPTIONS +from paramiko.channel import ( + Channel, + ChannelFile, + ChannelStderrFile, + ChannelStdinFile, +) +from paramiko.ssh_exception import ( + AuthenticationException, + BadAuthenticationType, + BadHostKeyException, + ChannelException, + ConfigParseError, + CouldNotCanonicalize, + IncompatiblePeer, + MessageOrderError, + PasswordRequiredException, + ProxyCommandFailure, + SSHException, +) +from paramiko.server import ServerInterface, SubsystemHandler, InteractiveQuery +from paramiko.rsakey import RSAKey +from paramiko.dsskey import DSSKey +from paramiko.ecdsakey import ECDSAKey +from paramiko.ed25519key import Ed25519Key +from paramiko.sftp import SFTPError, BaseSFTP +from paramiko.sftp_client import SFTP, SFTPClient +from paramiko.sftp_server import SFTPServer +from paramiko.sftp_attr import SFTPAttributes +from paramiko.sftp_handle import SFTPHandle +from paramiko.sftp_si import SFTPServerInterface +from paramiko.sftp_file import SFTPFile +from paramiko.message import Message +from paramiko.packet import Packetizer +from paramiko.file import BufferedFile +from paramiko.agent import Agent, AgentKey +from paramiko.pkey import PKey, PublicBlob, UnknownKeyType +from paramiko.hostkeys import HostKeys +from paramiko.config import SSHConfig, SSHConfigDict +from paramiko.proxy import ProxyCommand + +from paramiko.common import ( + AUTH_SUCCESSFUL, + AUTH_PARTIALLY_SUCCESSFUL, + AUTH_FAILED, + OPEN_SUCCEEDED, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_FAILED_CONNECT_FAILED, + OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, + OPEN_FAILED_RESOURCE_SHORTAGE, +) + +from paramiko.sftp import ( + SFTP_OK, + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, + SFTP_FAILURE, + SFTP_BAD_MESSAGE, + SFTP_NO_CONNECTION, + SFTP_CONNECTION_LOST, + SFTP_OP_UNSUPPORTED, +) + +from paramiko.common import io_sleep + + +# TODO: I guess a real plugin system might be nice for future expansion... +key_classes = [DSSKey, RSAKey, Ed25519Key, ECDSAKey] + + +__author__ = "Jeff Forcier " +__license__ = "GNU Lesser General Public License (LGPL)" + +# TODO 4.0: remove this, jeez +__all__ = [ + "Agent", + "AgentKey", + "AuthenticationException", + "AutoAddPolicy", + "BadAuthenticationType", + "BadHostKeyException", + "BufferedFile", + "Channel", + "ChannelException", + "ConfigParseError", + "CouldNotCanonicalize", + "DSSKey", + "ECDSAKey", + "Ed25519Key", + "HostKeys", + "Message", + "MissingHostKeyPolicy", + "PKey", + "PasswordRequiredException", + "ProxyCommand", + "ProxyCommandFailure", + "RSAKey", + "RejectPolicy", + "SFTP", + "SFTPAttributes", + "SFTPClient", + "SFTPError", + "SFTPFile", + "SFTPHandle", + "SFTPServer", + "SFTPServerInterface", + "SSHClient", + "SSHConfig", + "SSHConfigDict", + "SSHException", + "SecurityOptions", + "ServerInterface", + "SubsystemHandler", + "Transport", + "WarningPolicy", + "io_sleep", + "util", +] diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1fe17b3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/_version.cpython-312.pyc new file mode 100644 index 0000000..48b7257 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/_version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/_winapi.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/_winapi.cpython-312.pyc new file mode 100644 index 0000000..e1bda20 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/_winapi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/agent.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/agent.cpython-312.pyc new file mode 100644 index 0000000..c0f0d1d Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/agent.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/auth_handler.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/auth_handler.cpython-312.pyc new file mode 100644 index 0000000..cb83737 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/auth_handler.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/auth_strategy.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/auth_strategy.cpython-312.pyc new file mode 100644 index 0000000..31d4831 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/auth_strategy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/ber.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ber.cpython-312.pyc new file mode 100644 index 0000000..3d426c6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ber.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/buffered_pipe.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/buffered_pipe.cpython-312.pyc new file mode 100644 index 0000000..8094bfc Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/buffered_pipe.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/channel.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/channel.cpython-312.pyc new file mode 100644 index 0000000..50e0b94 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/channel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/client.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..0869d70 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/common.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/common.cpython-312.pyc new file mode 100644 index 0000000..219901d Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/common.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/compress.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/compress.cpython-312.pyc new file mode 100644 index 0000000..9bde01c Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/compress.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/config.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..115a669 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/dsskey.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/dsskey.cpython-312.pyc new file mode 100644 index 0000000..ef7a7be Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/dsskey.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/ecdsakey.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ecdsakey.cpython-312.pyc new file mode 100644 index 0000000..5954724 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ecdsakey.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/ed25519key.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ed25519key.cpython-312.pyc new file mode 100644 index 0000000..5dc2b96 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ed25519key.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/file.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/file.cpython-312.pyc new file mode 100644 index 0000000..5bfdbe0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/file.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/hostkeys.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/hostkeys.cpython-312.pyc new file mode 100644 index 0000000..3c652e1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/hostkeys.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_curve25519.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_curve25519.cpython-312.pyc new file mode 100644 index 0000000..3ec3c6f Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_curve25519.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_ecdh_nist.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_ecdh_nist.cpython-312.pyc new file mode 100644 index 0000000..7793b9d Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_ecdh_nist.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_gex.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_gex.cpython-312.pyc new file mode 100644 index 0000000..013435c Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_gex.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group1.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group1.cpython-312.pyc new file mode 100644 index 0000000..39619fe Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group1.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group14.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group14.cpython-312.pyc new file mode 100644 index 0000000..3d056c7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group14.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group16.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group16.cpython-312.pyc new file mode 100644 index 0000000..06e07c8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_group16.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_gss.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_gss.cpython-312.pyc new file mode 100644 index 0000000..361fe36 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/kex_gss.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/message.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/message.cpython-312.pyc new file mode 100644 index 0000000..fb99755 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/message.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/packet.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/packet.cpython-312.pyc new file mode 100644 index 0000000..fec0d12 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/packet.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/pipe.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/pipe.cpython-312.pyc new file mode 100644 index 0000000..6b5c75e Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/pipe.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/pkey.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/pkey.cpython-312.pyc new file mode 100644 index 0000000..3e10a4d Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/pkey.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/primes.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/primes.cpython-312.pyc new file mode 100644 index 0000000..8048448 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/primes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/proxy.cpython-312.pyc new file mode 100644 index 0000000..954a078 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/rsakey.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/rsakey.cpython-312.pyc new file mode 100644 index 0000000..a5a84a0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/rsakey.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/server.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/server.cpython-312.pyc new file mode 100644 index 0000000..2a52fcb Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/server.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp.cpython-312.pyc new file mode 100644 index 0000000..21d8a9d Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_attr.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_attr.cpython-312.pyc new file mode 100644 index 0000000..eab4e25 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_attr.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_client.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_client.cpython-312.pyc new file mode 100644 index 0000000..63f127b Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_file.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_file.cpython-312.pyc new file mode 100644 index 0000000..2dabaf6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_file.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_handle.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_handle.cpython-312.pyc new file mode 100644 index 0000000..8bfac1d Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_handle.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_server.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_server.cpython-312.pyc new file mode 100644 index 0000000..14e964e Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_server.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_si.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_si.cpython-312.pyc new file mode 100644 index 0000000..25bbf7f Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/sftp_si.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/ssh_exception.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ssh_exception.cpython-312.pyc new file mode 100644 index 0000000..ffa9a02 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ssh_exception.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/ssh_gss.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ssh_gss.cpython-312.pyc new file mode 100644 index 0000000..656e6ad Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/ssh_gss.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/transport.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/transport.cpython-312.pyc new file mode 100644 index 0000000..1eb9744 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/transport.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..8b46292 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/win_openssh.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/win_openssh.cpython-312.pyc new file mode 100644 index 0000000..df7cb1c Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/win_openssh.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/__pycache__/win_pageant.cpython-312.pyc b/venv/lib/python3.12/site-packages/paramiko/__pycache__/win_pageant.cpython-312.pyc new file mode 100644 index 0000000..214fcb4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/paramiko/__pycache__/win_pageant.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/paramiko/_version.py b/venv/lib/python3.12/site-packages/paramiko/_version.py new file mode 100644 index 0000000..640ef72 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/_version.py @@ -0,0 +1,2 @@ +__version_info__ = (3, 4, 0) +__version__ = ".".join(map(str, __version_info__)) diff --git a/venv/lib/python3.12/site-packages/paramiko/_winapi.py b/venv/lib/python3.12/site-packages/paramiko/_winapi.py new file mode 100644 index 0000000..4295457 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/_winapi.py @@ -0,0 +1,413 @@ +""" +Windows API functions implemented as ctypes functions and classes as found +in jaraco.windows (3.4.1). + +If you encounter issues with this module, please consider reporting the issues +in jaraco.windows and asking the author to port the fixes back here. +""" + +import builtins +import ctypes.wintypes + +from paramiko.util import u + + +###################### +# jaraco.windows.error + + +def format_system_message(errno): + """ + Call FormatMessage with a system error number to retrieve + the descriptive error message. + """ + # first some flags used by FormatMessageW + ALLOCATE_BUFFER = 0x100 + FROM_SYSTEM = 0x1000 + + # Let FormatMessageW allocate the buffer (we'll free it below) + # Also, let it know we want a system error message. + flags = ALLOCATE_BUFFER | FROM_SYSTEM + source = None + message_id = errno + language_id = 0 + result_buffer = ctypes.wintypes.LPWSTR() + buffer_size = 0 + arguments = None + bytes = ctypes.windll.kernel32.FormatMessageW( + flags, + source, + message_id, + language_id, + ctypes.byref(result_buffer), + buffer_size, + arguments, + ) + # note the following will cause an infinite loop if GetLastError + # repeatedly returns an error that cannot be formatted, although + # this should not happen. + handle_nonzero_success(bytes) + message = result_buffer.value + ctypes.windll.kernel32.LocalFree(result_buffer) + return message + + +class WindowsError(builtins.WindowsError): + """more info about errors at + http://msdn.microsoft.com/en-us/library/ms681381(VS.85).aspx""" + + def __init__(self, value=None): + if value is None: + value = ctypes.windll.kernel32.GetLastError() + strerror = format_system_message(value) + args = 0, strerror, None, value + super().__init__(*args) + + @property + def message(self): + return self.strerror + + @property + def code(self): + return self.winerror + + def __str__(self): + return self.message + + def __repr__(self): + return "{self.__class__.__name__}({self.winerror})".format(**vars()) + + +def handle_nonzero_success(result): + if result == 0: + raise WindowsError() + + +########################### +# jaraco.windows.api.memory + +GMEM_MOVEABLE = 0x2 + +GlobalAlloc = ctypes.windll.kernel32.GlobalAlloc +GlobalAlloc.argtypes = ctypes.wintypes.UINT, ctypes.c_size_t +GlobalAlloc.restype = ctypes.wintypes.HANDLE + +GlobalLock = ctypes.windll.kernel32.GlobalLock +GlobalLock.argtypes = (ctypes.wintypes.HGLOBAL,) +GlobalLock.restype = ctypes.wintypes.LPVOID + +GlobalUnlock = ctypes.windll.kernel32.GlobalUnlock +GlobalUnlock.argtypes = (ctypes.wintypes.HGLOBAL,) +GlobalUnlock.restype = ctypes.wintypes.BOOL + +GlobalSize = ctypes.windll.kernel32.GlobalSize +GlobalSize.argtypes = (ctypes.wintypes.HGLOBAL,) +GlobalSize.restype = ctypes.c_size_t + +CreateFileMapping = ctypes.windll.kernel32.CreateFileMappingW +CreateFileMapping.argtypes = [ + ctypes.wintypes.HANDLE, + ctypes.c_void_p, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ctypes.wintypes.LPWSTR, +] +CreateFileMapping.restype = ctypes.wintypes.HANDLE + +MapViewOfFile = ctypes.windll.kernel32.MapViewOfFile +MapViewOfFile.restype = ctypes.wintypes.HANDLE + +UnmapViewOfFile = ctypes.windll.kernel32.UnmapViewOfFile +UnmapViewOfFile.argtypes = (ctypes.wintypes.HANDLE,) + +RtlMoveMemory = ctypes.windll.kernel32.RtlMoveMemory +RtlMoveMemory.argtypes = (ctypes.c_void_p, ctypes.c_void_p, ctypes.c_size_t) + +ctypes.windll.kernel32.LocalFree.argtypes = (ctypes.wintypes.HLOCAL,) + +##################### +# jaraco.windows.mmap + + +class MemoryMap: + """ + A memory map object which can have security attributes overridden. + """ + + def __init__(self, name, length, security_attributes=None): + self.name = name + self.length = length + self.security_attributes = security_attributes + self.pos = 0 + + def __enter__(self): + p_SA = ( + ctypes.byref(self.security_attributes) + if self.security_attributes + else None + ) + INVALID_HANDLE_VALUE = -1 + PAGE_READWRITE = 0x4 + FILE_MAP_WRITE = 0x2 + filemap = ctypes.windll.kernel32.CreateFileMappingW( + INVALID_HANDLE_VALUE, + p_SA, + PAGE_READWRITE, + 0, + self.length, + u(self.name), + ) + handle_nonzero_success(filemap) + if filemap == INVALID_HANDLE_VALUE: + raise Exception("Failed to create file mapping") + self.filemap = filemap + self.view = MapViewOfFile(filemap, FILE_MAP_WRITE, 0, 0, 0) + return self + + def seek(self, pos): + self.pos = pos + + def write(self, msg): + assert isinstance(msg, bytes) + n = len(msg) + if self.pos + n >= self.length: # A little safety. + raise ValueError(f"Refusing to write {n} bytes") + dest = self.view + self.pos + length = ctypes.c_size_t(n) + ctypes.windll.kernel32.RtlMoveMemory(dest, msg, length) + self.pos += n + + def read(self, n): + """ + Read n bytes from mapped view. + """ + out = ctypes.create_string_buffer(n) + source = self.view + self.pos + length = ctypes.c_size_t(n) + ctypes.windll.kernel32.RtlMoveMemory(out, source, length) + self.pos += n + return out.raw + + def __exit__(self, exc_type, exc_val, tb): + ctypes.windll.kernel32.UnmapViewOfFile(self.view) + ctypes.windll.kernel32.CloseHandle(self.filemap) + + +############################# +# jaraco.windows.api.security + +# from WinNT.h +READ_CONTROL = 0x00020000 +STANDARD_RIGHTS_REQUIRED = 0x000F0000 +STANDARD_RIGHTS_READ = READ_CONTROL +STANDARD_RIGHTS_WRITE = READ_CONTROL +STANDARD_RIGHTS_EXECUTE = READ_CONTROL +STANDARD_RIGHTS_ALL = 0x001F0000 + +# from NTSecAPI.h +POLICY_VIEW_LOCAL_INFORMATION = 0x00000001 +POLICY_VIEW_AUDIT_INFORMATION = 0x00000002 +POLICY_GET_PRIVATE_INFORMATION = 0x00000004 +POLICY_TRUST_ADMIN = 0x00000008 +POLICY_CREATE_ACCOUNT = 0x00000010 +POLICY_CREATE_SECRET = 0x00000020 +POLICY_CREATE_PRIVILEGE = 0x00000040 +POLICY_SET_DEFAULT_QUOTA_LIMITS = 0x00000080 +POLICY_SET_AUDIT_REQUIREMENTS = 0x00000100 +POLICY_AUDIT_LOG_ADMIN = 0x00000200 +POLICY_SERVER_ADMIN = 0x00000400 +POLICY_LOOKUP_NAMES = 0x00000800 +POLICY_NOTIFICATION = 0x00001000 + +POLICY_ALL_ACCESS = ( + STANDARD_RIGHTS_REQUIRED + | POLICY_VIEW_LOCAL_INFORMATION + | POLICY_VIEW_AUDIT_INFORMATION + | POLICY_GET_PRIVATE_INFORMATION + | POLICY_TRUST_ADMIN + | POLICY_CREATE_ACCOUNT + | POLICY_CREATE_SECRET + | POLICY_CREATE_PRIVILEGE + | POLICY_SET_DEFAULT_QUOTA_LIMITS + | POLICY_SET_AUDIT_REQUIREMENTS + | POLICY_AUDIT_LOG_ADMIN + | POLICY_SERVER_ADMIN + | POLICY_LOOKUP_NAMES +) + + +POLICY_READ = ( + STANDARD_RIGHTS_READ + | POLICY_VIEW_AUDIT_INFORMATION + | POLICY_GET_PRIVATE_INFORMATION +) + +POLICY_WRITE = ( + STANDARD_RIGHTS_WRITE + | POLICY_TRUST_ADMIN + | POLICY_CREATE_ACCOUNT + | POLICY_CREATE_SECRET + | POLICY_CREATE_PRIVILEGE + | POLICY_SET_DEFAULT_QUOTA_LIMITS + | POLICY_SET_AUDIT_REQUIREMENTS + | POLICY_AUDIT_LOG_ADMIN + | POLICY_SERVER_ADMIN +) + +POLICY_EXECUTE = ( + STANDARD_RIGHTS_EXECUTE + | POLICY_VIEW_LOCAL_INFORMATION + | POLICY_LOOKUP_NAMES +) + + +class TokenAccess: + TOKEN_QUERY = 0x8 + + +class TokenInformationClass: + TokenUser = 1 + + +class TOKEN_USER(ctypes.Structure): + num = 1 + _fields_ = [ + ("SID", ctypes.c_void_p), + ("ATTRIBUTES", ctypes.wintypes.DWORD), + ] + + +class SECURITY_DESCRIPTOR(ctypes.Structure): + """ + typedef struct _SECURITY_DESCRIPTOR + { + UCHAR Revision; + UCHAR Sbz1; + SECURITY_DESCRIPTOR_CONTROL Control; + PSID Owner; + PSID Group; + PACL Sacl; + PACL Dacl; + } SECURITY_DESCRIPTOR; + """ + + SECURITY_DESCRIPTOR_CONTROL = ctypes.wintypes.USHORT + REVISION = 1 + + _fields_ = [ + ("Revision", ctypes.c_ubyte), + ("Sbz1", ctypes.c_ubyte), + ("Control", SECURITY_DESCRIPTOR_CONTROL), + ("Owner", ctypes.c_void_p), + ("Group", ctypes.c_void_p), + ("Sacl", ctypes.c_void_p), + ("Dacl", ctypes.c_void_p), + ] + + +class SECURITY_ATTRIBUTES(ctypes.Structure): + """ + typedef struct _SECURITY_ATTRIBUTES { + DWORD nLength; + LPVOID lpSecurityDescriptor; + BOOL bInheritHandle; + } SECURITY_ATTRIBUTES; + """ + + _fields_ = [ + ("nLength", ctypes.wintypes.DWORD), + ("lpSecurityDescriptor", ctypes.c_void_p), + ("bInheritHandle", ctypes.wintypes.BOOL), + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.nLength = ctypes.sizeof(SECURITY_ATTRIBUTES) + + @property + def descriptor(self): + return self._descriptor + + @descriptor.setter + def descriptor(self, value): + self._descriptor = value + self.lpSecurityDescriptor = ctypes.addressof(value) + + +ctypes.windll.advapi32.SetSecurityDescriptorOwner.argtypes = ( + ctypes.POINTER(SECURITY_DESCRIPTOR), + ctypes.c_void_p, + ctypes.wintypes.BOOL, +) + +######################### +# jaraco.windows.security + + +def GetTokenInformation(token, information_class): + """ + Given a token, get the token information for it. + """ + data_size = ctypes.wintypes.DWORD() + ctypes.windll.advapi32.GetTokenInformation( + token, information_class.num, 0, 0, ctypes.byref(data_size) + ) + data = ctypes.create_string_buffer(data_size.value) + handle_nonzero_success( + ctypes.windll.advapi32.GetTokenInformation( + token, + information_class.num, + ctypes.byref(data), + ctypes.sizeof(data), + ctypes.byref(data_size), + ) + ) + return ctypes.cast(data, ctypes.POINTER(TOKEN_USER)).contents + + +def OpenProcessToken(proc_handle, access): + result = ctypes.wintypes.HANDLE() + proc_handle = ctypes.wintypes.HANDLE(proc_handle) + handle_nonzero_success( + ctypes.windll.advapi32.OpenProcessToken( + proc_handle, access, ctypes.byref(result) + ) + ) + return result + + +def get_current_user(): + """ + Return a TOKEN_USER for the owner of this process. + """ + process = OpenProcessToken( + ctypes.windll.kernel32.GetCurrentProcess(), TokenAccess.TOKEN_QUERY + ) + return GetTokenInformation(process, TOKEN_USER) + + +def get_security_attributes_for_user(user=None): + """ + Return a SECURITY_ATTRIBUTES structure with the SID set to the + specified user (uses current user if none is specified). + """ + if user is None: + user = get_current_user() + + assert isinstance(user, TOKEN_USER), "user must be TOKEN_USER instance" + + SD = SECURITY_DESCRIPTOR() + SA = SECURITY_ATTRIBUTES() + # by attaching the actual security descriptor, it will be garbage- + # collected with the security attributes + SA.descriptor = SD + SA.bInheritHandle = 1 + + ctypes.windll.advapi32.InitializeSecurityDescriptor( + ctypes.byref(SD), SECURITY_DESCRIPTOR.REVISION + ) + ctypes.windll.advapi32.SetSecurityDescriptorOwner( + ctypes.byref(SD), user.SID, 0 + ) + return SA diff --git a/venv/lib/python3.12/site-packages/paramiko/agent.py b/venv/lib/python3.12/site-packages/paramiko/agent.py new file mode 100644 index 0000000..b29a0d1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/agent.py @@ -0,0 +1,497 @@ +# Copyright (C) 2003-2007 John Rochester +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +SSH Agent interface +""" + +import os +import socket +import struct +import sys +import threading +import time +import tempfile +import stat +from logging import DEBUG +from select import select +from paramiko.common import io_sleep, byte_chr + +from paramiko.ssh_exception import SSHException, AuthenticationException +from paramiko.message import Message +from paramiko.pkey import PKey, UnknownKeyType +from paramiko.util import asbytes, get_logger + +cSSH2_AGENTC_REQUEST_IDENTITIES = byte_chr(11) +SSH2_AGENT_IDENTITIES_ANSWER = 12 +cSSH2_AGENTC_SIGN_REQUEST = byte_chr(13) +SSH2_AGENT_SIGN_RESPONSE = 14 + +SSH_AGENT_RSA_SHA2_256 = 2 +SSH_AGENT_RSA_SHA2_512 = 4 +# NOTE: RFC mildly confusing; while these flags are OR'd together, OpenSSH at +# least really treats them like "AND"s, in the sense that if it finds the +# SHA256 flag set it won't continue looking at the SHA512 one; it +# short-circuits right away. +# Thus, we never want to eg submit 6 to say "either's good". +ALGORITHM_FLAG_MAP = { + "rsa-sha2-256": SSH_AGENT_RSA_SHA2_256, + "rsa-sha2-512": SSH_AGENT_RSA_SHA2_512, +} +for key, value in list(ALGORITHM_FLAG_MAP.items()): + ALGORITHM_FLAG_MAP[f"{key}-cert-v01@openssh.com"] = value + + +# TODO 4.0: rename all these - including making some of their methods public? +class AgentSSH: + def __init__(self): + self._conn = None + self._keys = () + + def get_keys(self): + """ + Return the list of keys available through the SSH agent, if any. If + no SSH agent was running (or it couldn't be contacted), an empty list + will be returned. + + This method performs no IO, just returns the list of keys retrieved + when the connection was made. + + :return: + a tuple of `.AgentKey` objects representing keys available on the + SSH agent + """ + return self._keys + + def _connect(self, conn): + self._conn = conn + ptype, result = self._send_message(cSSH2_AGENTC_REQUEST_IDENTITIES) + if ptype != SSH2_AGENT_IDENTITIES_ANSWER: + raise SSHException("could not get keys from ssh-agent") + keys = [] + for i in range(result.get_int()): + keys.append( + AgentKey( + agent=self, + blob=result.get_binary(), + comment=result.get_text(), + ) + ) + self._keys = tuple(keys) + + def _close(self): + if self._conn is not None: + self._conn.close() + self._conn = None + self._keys = () + + def _send_message(self, msg): + msg = asbytes(msg) + self._conn.send(struct.pack(">I", len(msg)) + msg) + data = self._read_all(4) + msg = Message(self._read_all(struct.unpack(">I", data)[0])) + return ord(msg.get_byte()), msg + + def _read_all(self, wanted): + result = self._conn.recv(wanted) + while len(result) < wanted: + if len(result) == 0: + raise SSHException("lost ssh-agent") + extra = self._conn.recv(wanted - len(result)) + if len(extra) == 0: + raise SSHException("lost ssh-agent") + result += extra + return result + + +class AgentProxyThread(threading.Thread): + """ + Class in charge of communication between two channels. + """ + + def __init__(self, agent): + threading.Thread.__init__(self, target=self.run) + self._agent = agent + self._exit = False + + def run(self): + try: + (r, addr) = self.get_connection() + # Found that r should be either + # a socket from the socket library or None + self.__inr = r + # The address should be an IP address as a string? or None + self.__addr = addr + self._agent.connect() + if not isinstance(self._agent, int) and ( + self._agent._conn is None + or not hasattr(self._agent._conn, "fileno") + ): + raise AuthenticationException("Unable to connect to SSH agent") + self._communicate() + except: + # XXX Not sure what to do here ... raise or pass ? + raise + + def _communicate(self): + import fcntl + + oldflags = fcntl.fcntl(self.__inr, fcntl.F_GETFL) + fcntl.fcntl(self.__inr, fcntl.F_SETFL, oldflags | os.O_NONBLOCK) + while not self._exit: + events = select([self._agent._conn, self.__inr], [], [], 0.5) + for fd in events[0]: + if self._agent._conn == fd: + data = self._agent._conn.recv(512) + if len(data) != 0: + self.__inr.send(data) + else: + self._close() + break + elif self.__inr == fd: + data = self.__inr.recv(512) + if len(data) != 0: + self._agent._conn.send(data) + else: + self._close() + break + time.sleep(io_sleep) + + def _close(self): + self._exit = True + self.__inr.close() + self._agent._conn.close() + + +class AgentLocalProxy(AgentProxyThread): + """ + Class to be used when wanting to ask a local SSH Agent being + asked from a remote fake agent (so use a unix socket for ex.) + """ + + def __init__(self, agent): + AgentProxyThread.__init__(self, agent) + + def get_connection(self): + """ + Return a pair of socket object and string address. + + May block! + """ + conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + conn.bind(self._agent._get_filename()) + conn.listen(1) + (r, addr) = conn.accept() + return r, addr + except: + raise + + +class AgentRemoteProxy(AgentProxyThread): + """ + Class to be used when wanting to ask a remote SSH Agent + """ + + def __init__(self, agent, chan): + AgentProxyThread.__init__(self, agent) + self.__chan = chan + + def get_connection(self): + return self.__chan, None + + +def get_agent_connection(): + """ + Returns some SSH agent object, or None if none were found/supported. + + .. versionadded:: 2.10 + """ + if ("SSH_AUTH_SOCK" in os.environ) and (sys.platform != "win32"): + conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + conn.connect(os.environ["SSH_AUTH_SOCK"]) + return conn + except: + # probably a dangling env var: the ssh agent is gone + return + elif sys.platform == "win32": + from . import win_pageant, win_openssh + + conn = None + if win_pageant.can_talk_to_agent(): + conn = win_pageant.PageantConnection() + elif win_openssh.can_talk_to_agent(): + conn = win_openssh.OpenSSHAgentConnection() + return conn + else: + # no agent support + return + + +class AgentClientProxy: + """ + Class proxying request as a client: + + #. client ask for a request_forward_agent() + #. server creates a proxy and a fake SSH Agent + #. server ask for establishing a connection when needed, + calling the forward_agent_handler at client side. + #. the forward_agent_handler launch a thread for connecting + the remote fake agent and the local agent + #. Communication occurs ... + """ + + def __init__(self, chanRemote): + self._conn = None + self.__chanR = chanRemote + self.thread = AgentRemoteProxy(self, chanRemote) + self.thread.start() + + def __del__(self): + self.close() + + def connect(self): + """ + Method automatically called by ``AgentProxyThread.run``. + """ + conn = get_agent_connection() + if not conn: + return + self._conn = conn + + def close(self): + """ + Close the current connection and terminate the agent + Should be called manually + """ + if hasattr(self, "thread"): + self.thread._exit = True + self.thread.join(1000) + if self._conn is not None: + self._conn.close() + + +class AgentServerProxy(AgentSSH): + """ + Allows an SSH server to access a forwarded agent. + + This also creates a unix domain socket on the system to allow external + programs to also access the agent. For this reason, you probably only want + to create one of these. + + :meth:`connect` must be called before it is usable. This will also load the + list of keys the agent contains. You must also call :meth:`close` in + order to clean up the unix socket and the thread that maintains it. + (:class:`contextlib.closing` might be helpful to you.) + + :param .Transport t: Transport used for SSH Agent communication forwarding + + :raises: `.SSHException` -- mostly if we lost the agent + """ + + def __init__(self, t): + AgentSSH.__init__(self) + self.__t = t + self._dir = tempfile.mkdtemp("sshproxy") + os.chmod(self._dir, stat.S_IRWXU) + self._file = self._dir + "/sshproxy.ssh" + self.thread = AgentLocalProxy(self) + self.thread.start() + + def __del__(self): + self.close() + + def connect(self): + conn_sock = self.__t.open_forward_agent_channel() + if conn_sock is None: + raise SSHException("lost ssh-agent") + conn_sock.set_name("auth-agent") + self._connect(conn_sock) + + def close(self): + """ + Terminate the agent, clean the files, close connections + Should be called manually + """ + os.remove(self._file) + os.rmdir(self._dir) + self.thread._exit = True + self.thread.join(1000) + self._close() + + def get_env(self): + """ + Helper for the environment under unix + + :return: + a dict containing the ``SSH_AUTH_SOCK`` environment variables + """ + return {"SSH_AUTH_SOCK": self._get_filename()} + + def _get_filename(self): + return self._file + + +class AgentRequestHandler: + """ + Primary/default implementation of SSH agent forwarding functionality. + + Simply instantiate this class, handing it a live command-executing session + object, and it will handle forwarding any local SSH agent processes it + finds. + + For example:: + + # Connect + client = SSHClient() + client.connect(host, port, username) + # Obtain session + session = client.get_transport().open_session() + # Forward local agent + AgentRequestHandler(session) + # Commands executed after this point will see the forwarded agent on + # the remote end. + session.exec_command("git clone https://my.git.repository/") + """ + + def __init__(self, chanClient): + self._conn = None + self.__chanC = chanClient + chanClient.request_forward_agent(self._forward_agent_handler) + self.__clientProxys = [] + + def _forward_agent_handler(self, chanRemote): + self.__clientProxys.append(AgentClientProxy(chanRemote)) + + def __del__(self): + self.close() + + def close(self): + for p in self.__clientProxys: + p.close() + + +class Agent(AgentSSH): + """ + Client interface for using private keys from an SSH agent running on the + local machine. If an SSH agent is running, this class can be used to + connect to it and retrieve `.PKey` objects which can be used when + attempting to authenticate to remote SSH servers. + + Upon initialization, a session with the local machine's SSH agent is + opened, if one is running. If no agent is running, initialization will + succeed, but `get_keys` will return an empty tuple. + + :raises: `.SSHException` -- + if an SSH agent is found, but speaks an incompatible protocol + + .. versionchanged:: 2.10 + Added support for native openssh agent on windows (extending previous + putty pageant support) + """ + + def __init__(self): + AgentSSH.__init__(self) + + conn = get_agent_connection() + if not conn: + return + self._connect(conn) + + def close(self): + """ + Close the SSH agent connection. + """ + self._close() + + +class AgentKey(PKey): + """ + Private key held in a local SSH agent. This type of key can be used for + authenticating to a remote server (signing). Most other key operations + work as expected. + + .. versionchanged:: 3.2 + Added the ``comment`` kwarg and attribute. + + .. versionchanged:: 3.2 + Added the ``.inner_key`` attribute holding a reference to the 'real' + key instance this key is a proxy for, if one was obtainable, else None. + """ + + def __init__(self, agent, blob, comment=""): + self.agent = agent + self.blob = blob + self.comment = comment + msg = Message(blob) + self.name = msg.get_text() + self._logger = get_logger(__file__) + self.inner_key = None + try: + self.inner_key = PKey.from_type_string( + key_type=self.name, key_bytes=blob + ) + except UnknownKeyType: + # Log, but don't explode, since inner_key is a best-effort thing. + err = "Unable to derive inner_key for agent key of type {!r}" + self.log(DEBUG, err.format(self.name)) + + def log(self, *args, **kwargs): + return self._logger.log(*args, **kwargs) + + def asbytes(self): + # Prefer inner_key.asbytes, since that will differ for eg RSA-CERT + return self.inner_key.asbytes() if self.inner_key else self.blob + + def get_name(self): + return self.name + + def get_bits(self): + # Have to work around PKey's default get_bits being crap + if self.inner_key is not None: + return self.inner_key.get_bits() + return super().get_bits() + + def __getattr__(self, name): + """ + Proxy any un-implemented methods/properties to the inner_key. + """ + if self.inner_key is None: # nothing to proxy to + raise AttributeError(name) + return getattr(self.inner_key, name) + + @property + def _fields(self): + fallback = [self.get_name(), self.blob] + return self.inner_key._fields if self.inner_key else fallback + + def sign_ssh_data(self, data, algorithm=None): + msg = Message() + msg.add_byte(cSSH2_AGENTC_SIGN_REQUEST) + # NOTE: this used to be just self.blob, which is not entirely right for + # RSA-CERT 'keys' - those end up always degrading to ssh-rsa type + # signatures, for reasons probably internal to OpenSSH's agent code, + # even if everything else wants SHA2 (including our flag map). + msg.add_string(self.asbytes()) + msg.add_string(data) + msg.add_int(ALGORITHM_FLAG_MAP.get(algorithm, 0)) + ptype, result = self.agent._send_message(msg) + if ptype != SSH2_AGENT_SIGN_RESPONSE: + raise SSHException("key cannot be used for signing") + return result.get_binary() diff --git a/venv/lib/python3.12/site-packages/paramiko/auth_handler.py b/venv/lib/python3.12/site-packages/paramiko/auth_handler.py new file mode 100644 index 0000000..bc7f298 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/auth_handler.py @@ -0,0 +1,1092 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +`.AuthHandler` +""" + +import weakref +import threading +import time +import re + +from paramiko.common import ( + cMSG_SERVICE_REQUEST, + cMSG_DISCONNECT, + DISCONNECT_SERVICE_NOT_AVAILABLE, + DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE, + cMSG_USERAUTH_REQUEST, + cMSG_SERVICE_ACCEPT, + DEBUG, + AUTH_SUCCESSFUL, + INFO, + cMSG_USERAUTH_SUCCESS, + cMSG_USERAUTH_FAILURE, + AUTH_PARTIALLY_SUCCESSFUL, + cMSG_USERAUTH_INFO_REQUEST, + WARNING, + AUTH_FAILED, + cMSG_USERAUTH_PK_OK, + cMSG_USERAUTH_INFO_RESPONSE, + MSG_SERVICE_REQUEST, + MSG_SERVICE_ACCEPT, + MSG_USERAUTH_REQUEST, + MSG_USERAUTH_SUCCESS, + MSG_USERAUTH_FAILURE, + MSG_USERAUTH_BANNER, + MSG_USERAUTH_INFO_REQUEST, + MSG_USERAUTH_INFO_RESPONSE, + cMSG_USERAUTH_GSSAPI_RESPONSE, + cMSG_USERAUTH_GSSAPI_TOKEN, + cMSG_USERAUTH_GSSAPI_MIC, + MSG_USERAUTH_GSSAPI_RESPONSE, + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_GSSAPI_ERROR, + MSG_USERAUTH_GSSAPI_ERRTOK, + MSG_USERAUTH_GSSAPI_MIC, + MSG_NAMES, + cMSG_USERAUTH_BANNER, +) +from paramiko.message import Message +from paramiko.util import b, u +from paramiko.ssh_exception import ( + SSHException, + AuthenticationException, + BadAuthenticationType, + PartialAuthentication, +) +from paramiko.server import InteractiveQuery +from paramiko.ssh_gss import GSSAuth, GSS_EXCEPTIONS + + +class AuthHandler: + """ + Internal class to handle the mechanics of authentication. + """ + + def __init__(self, transport): + self.transport = weakref.proxy(transport) + self.username = None + self.authenticated = False + self.auth_event = None + self.auth_method = "" + self.banner = None + self.password = None + self.private_key = None + self.interactive_handler = None + self.submethods = None + # for server mode: + self.auth_username = None + self.auth_fail_count = 0 + # for GSSAPI + self.gss_host = None + self.gss_deleg_creds = True + + def _log(self, *args): + return self.transport._log(*args) + + def is_authenticated(self): + return self.authenticated + + def get_username(self): + if self.transport.server_mode: + return self.auth_username + else: + return self.username + + def auth_none(self, username, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "none" + self.username = username + self._request_auth() + finally: + self.transport.lock.release() + + def auth_publickey(self, username, key, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "publickey" + self.username = username + self.private_key = key + self._request_auth() + finally: + self.transport.lock.release() + + def auth_password(self, username, password, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "password" + self.username = username + self.password = password + self._request_auth() + finally: + self.transport.lock.release() + + def auth_interactive(self, username, handler, event, submethods=""): + """ + response_list = handler(title, instructions, prompt_list) + """ + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "keyboard-interactive" + self.username = username + self.interactive_handler = handler + self.submethods = submethods + self._request_auth() + finally: + self.transport.lock.release() + + def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "gssapi-with-mic" + self.username = username + self.gss_host = gss_host + self.gss_deleg_creds = gss_deleg_creds + self._request_auth() + finally: + self.transport.lock.release() + + def auth_gssapi_keyex(self, username, event): + self.transport.lock.acquire() + try: + self.auth_event = event + self.auth_method = "gssapi-keyex" + self.username = username + self._request_auth() + finally: + self.transport.lock.release() + + def abort(self): + if self.auth_event is not None: + self.auth_event.set() + + # ...internals... + + def _request_auth(self): + m = Message() + m.add_byte(cMSG_SERVICE_REQUEST) + m.add_string("ssh-userauth") + self.transport._send_message(m) + + def _disconnect_service_not_available(self): + m = Message() + m.add_byte(cMSG_DISCONNECT) + m.add_int(DISCONNECT_SERVICE_NOT_AVAILABLE) + m.add_string("Service not available") + m.add_string("en") + self.transport._send_message(m) + self.transport.close() + + def _disconnect_no_more_auth(self): + m = Message() + m.add_byte(cMSG_DISCONNECT) + m.add_int(DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE) + m.add_string("No more auth methods available") + m.add_string("en") + self.transport._send_message(m) + self.transport.close() + + def _get_key_type_and_bits(self, key): + """ + Given any key, return its type/algorithm & bits-to-sign. + + Intended for input to or verification of, key signatures. + """ + # Use certificate contents, if available, plain pubkey otherwise + if key.public_blob: + return key.public_blob.key_type, key.public_blob.key_blob + else: + return key.get_name(), key + + def _get_session_blob(self, key, service, username, algorithm): + m = Message() + m.add_string(self.transport.session_id) + m.add_byte(cMSG_USERAUTH_REQUEST) + m.add_string(username) + m.add_string(service) + m.add_string("publickey") + m.add_boolean(True) + _, bits = self._get_key_type_and_bits(key) + m.add_string(algorithm) + m.add_string(bits) + return m.asbytes() + + def wait_for_response(self, event): + max_ts = None + if self.transport.auth_timeout is not None: + max_ts = time.time() + self.transport.auth_timeout + while True: + event.wait(0.1) + if not self.transport.is_active(): + e = self.transport.get_exception() + if (e is None) or issubclass(e.__class__, EOFError): + e = AuthenticationException( + "Authentication failed: transport shut down or saw EOF" + ) + raise e + if event.is_set(): + break + if max_ts is not None and max_ts <= time.time(): + raise AuthenticationException("Authentication timeout.") + + if not self.is_authenticated(): + e = self.transport.get_exception() + if e is None: + e = AuthenticationException("Authentication failed.") + # this is horrible. Python Exception isn't yet descended from + # object, so type(e) won't work. :( + # TODO 4.0: lol. just lmao. + if issubclass(e.__class__, PartialAuthentication): + return e.allowed_types + raise e + return [] + + def _parse_service_request(self, m): + service = m.get_text() + if self.transport.server_mode and (service == "ssh-userauth"): + # accepted + m = Message() + m.add_byte(cMSG_SERVICE_ACCEPT) + m.add_string(service) + self.transport._send_message(m) + banner, language = self.transport.server_object.get_banner() + if banner: + m = Message() + m.add_byte(cMSG_USERAUTH_BANNER) + m.add_string(banner) + m.add_string(language) + self.transport._send_message(m) + return + # dunno this one + self._disconnect_service_not_available() + + def _generate_key_from_request(self, algorithm, keyblob): + # For use in server mode. + options = self.transport.preferred_pubkeys + if algorithm.replace("-cert-v01@openssh.com", "") not in options: + err = ( + "Auth rejected: pubkey algorithm '{}' unsupported or disabled" + ) + self._log(INFO, err.format(algorithm)) + return None + return self.transport._key_info[algorithm](Message(keyblob)) + + def _choose_fallback_pubkey_algorithm(self, key_type, my_algos): + # Fallback: first one in our (possibly tweaked by caller) list + pubkey_algo = my_algos[0] + msg = "Server did not send a server-sig-algs list; defaulting to our first preferred algo ({!r})" # noqa + self._log(DEBUG, msg.format(pubkey_algo)) + self._log( + DEBUG, + "NOTE: you may use the 'disabled_algorithms' SSHClient/Transport init kwarg to disable that or other algorithms if your server does not support them!", # noqa + ) + return pubkey_algo + + def _finalize_pubkey_algorithm(self, key_type): + # Short-circuit for non-RSA keys + if "rsa" not in key_type: + return key_type + self._log( + DEBUG, + "Finalizing pubkey algorithm for key of type {!r}".format( + key_type + ), + ) + # NOTE re #2017: When the key is an RSA cert and the remote server is + # OpenSSH 7.7 or earlier, always use ssh-rsa-cert-v01@openssh.com. + # Those versions of the server won't support rsa-sha2 family sig algos + # for certs specifically, and in tandem with various server bugs + # regarding server-sig-algs, it's impossible to fit this into the rest + # of the logic here. + if key_type.endswith("-cert-v01@openssh.com") and re.search( + r"-OpenSSH_(?:[1-6]|7\.[0-7])", self.transport.remote_version + ): + pubkey_algo = "ssh-rsa-cert-v01@openssh.com" + self.transport._agreed_pubkey_algorithm = pubkey_algo + self._log(DEBUG, "OpenSSH<7.8 + RSA cert = forcing ssh-rsa!") + self._log( + DEBUG, "Agreed upon {!r} pubkey algorithm".format(pubkey_algo) + ) + return pubkey_algo + # Normal attempts to handshake follow from here. + # Only consider RSA algos from our list, lest we agree on another! + my_algos = [x for x in self.transport.preferred_pubkeys if "rsa" in x] + self._log(DEBUG, "Our pubkey algorithm list: {}".format(my_algos)) + # Short-circuit negatively if user disabled all RSA algos (heh) + if not my_algos: + raise SSHException( + "An RSA key was specified, but no RSA pubkey algorithms are configured!" # noqa + ) + # Check for server-sig-algs if supported & sent + server_algo_str = u( + self.transport.server_extensions.get("server-sig-algs", b("")) + ) + pubkey_algo = None + # Prefer to match against server-sig-algs + if server_algo_str: + server_algos = server_algo_str.split(",") + self._log( + DEBUG, "Server-side algorithm list: {}".format(server_algos) + ) + # Only use algos from our list that the server likes, in our own + # preference order. (NOTE: purposefully using same style as in + # Transport...expect to refactor later) + agreement = list(filter(server_algos.__contains__, my_algos)) + if agreement: + pubkey_algo = agreement[0] + self._log( + DEBUG, + "Agreed upon {!r} pubkey algorithm".format(pubkey_algo), + ) + else: + self._log(DEBUG, "No common pubkey algorithms exist! Dying.") + # TODO: MAY want to use IncompatiblePeer again here but that's + # technically for initial key exchange, not pubkey auth. + err = "Unable to agree on a pubkey algorithm for signing a {!r} key!" # noqa + raise AuthenticationException(err.format(key_type)) + # Fallback to something based purely on the key & our configuration + else: + pubkey_algo = self._choose_fallback_pubkey_algorithm( + key_type, my_algos + ) + if key_type.endswith("-cert-v01@openssh.com"): + pubkey_algo += "-cert-v01@openssh.com" + self.transport._agreed_pubkey_algorithm = pubkey_algo + return pubkey_algo + + def _parse_service_accept(self, m): + service = m.get_text() + if service == "ssh-userauth": + self._log(DEBUG, "userauth is OK") + m = Message() + m.add_byte(cMSG_USERAUTH_REQUEST) + m.add_string(self.username) + m.add_string("ssh-connection") + m.add_string(self.auth_method) + if self.auth_method == "password": + m.add_boolean(False) + password = b(self.password) + m.add_string(password) + elif self.auth_method == "publickey": + m.add_boolean(True) + key_type, bits = self._get_key_type_and_bits(self.private_key) + algorithm = self._finalize_pubkey_algorithm(key_type) + m.add_string(algorithm) + m.add_string(bits) + blob = self._get_session_blob( + self.private_key, + "ssh-connection", + self.username, + algorithm, + ) + sig = self.private_key.sign_ssh_data(blob, algorithm) + m.add_string(sig) + elif self.auth_method == "keyboard-interactive": + m.add_string("") + m.add_string(self.submethods) + elif self.auth_method == "gssapi-with-mic": + sshgss = GSSAuth(self.auth_method, self.gss_deleg_creds) + m.add_bytes(sshgss.ssh_gss_oids()) + # send the supported GSSAPI OIDs to the server + self.transport._send_message(m) + ptype, m = self.transport.packetizer.read_message() + if ptype == MSG_USERAUTH_BANNER: + self._parse_userauth_banner(m) + ptype, m = self.transport.packetizer.read_message() + if ptype == MSG_USERAUTH_GSSAPI_RESPONSE: + # Read the mechanism selected by the server. We send just + # the Kerberos V5 OID, so the server can only respond with + # this OID. + mech = m.get_string() + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) + try: + m.add_string( + sshgss.ssh_init_sec_context( + self.gss_host, mech, self.username + ) + ) + except GSS_EXCEPTIONS as e: + return self._handle_local_gss_failure(e) + self.transport._send_message(m) + while True: + ptype, m = self.transport.packetizer.read_message() + if ptype == MSG_USERAUTH_GSSAPI_TOKEN: + srv_token = m.get_string() + try: + next_token = sshgss.ssh_init_sec_context( + self.gss_host, + mech, + self.username, + srv_token, + ) + except GSS_EXCEPTIONS as e: + return self._handle_local_gss_failure(e) + # After this step the GSSAPI should not return any + # token. If it does, we keep sending the token to + # the server until no more token is returned. + if next_token is None: + break + else: + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) + m.add_string(next_token) + self.transport.send_message(m) + else: + raise SSHException( + "Received Package: {}".format(MSG_NAMES[ptype]) + ) + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_MIC) + # send the MIC to the server + m.add_string(sshgss.ssh_get_mic(self.transport.session_id)) + elif ptype == MSG_USERAUTH_GSSAPI_ERRTOK: + # RFC 4462 says we are not required to implement GSS-API + # error messages. + # See RFC 4462 Section 3.8 in + # http://www.ietf.org/rfc/rfc4462.txt + raise SSHException("Server returned an error token") + elif ptype == MSG_USERAUTH_GSSAPI_ERROR: + maj_status = m.get_int() + min_status = m.get_int() + err_msg = m.get_string() + m.get_string() # Lang tag - discarded + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) + elif ptype == MSG_USERAUTH_FAILURE: + self._parse_userauth_failure(m) + return + else: + raise SSHException( + "Received Package: {}".format(MSG_NAMES[ptype]) + ) + elif ( + self.auth_method == "gssapi-keyex" + and self.transport.gss_kex_used + ): + kexgss = self.transport.kexgss_ctxt + kexgss.set_username(self.username) + mic_token = kexgss.ssh_get_mic(self.transport.session_id) + m.add_string(mic_token) + elif self.auth_method == "none": + pass + else: + raise SSHException( + 'Unknown auth method "{}"'.format(self.auth_method) + ) + self.transport._send_message(m) + else: + self._log( + DEBUG, 'Service request "{}" accepted (?)'.format(service) + ) + + def _send_auth_result(self, username, method, result): + # okay, send result + m = Message() + if result == AUTH_SUCCESSFUL: + self._log(INFO, "Auth granted ({}).".format(method)) + m.add_byte(cMSG_USERAUTH_SUCCESS) + self.authenticated = True + else: + self._log(INFO, "Auth rejected ({}).".format(method)) + m.add_byte(cMSG_USERAUTH_FAILURE) + m.add_string( + self.transport.server_object.get_allowed_auths(username) + ) + if result == AUTH_PARTIALLY_SUCCESSFUL: + m.add_boolean(True) + else: + m.add_boolean(False) + self.auth_fail_count += 1 + self.transport._send_message(m) + if self.auth_fail_count >= 10: + self._disconnect_no_more_auth() + if result == AUTH_SUCCESSFUL: + self.transport._auth_trigger() + + def _interactive_query(self, q): + # make interactive query instead of response + m = Message() + m.add_byte(cMSG_USERAUTH_INFO_REQUEST) + m.add_string(q.name) + m.add_string(q.instructions) + m.add_string(bytes()) + m.add_int(len(q.prompts)) + for p in q.prompts: + m.add_string(p[0]) + m.add_boolean(p[1]) + self.transport._send_message(m) + + def _parse_userauth_request(self, m): + if not self.transport.server_mode: + # er, uh... what? + m = Message() + m.add_byte(cMSG_USERAUTH_FAILURE) + m.add_string("none") + m.add_boolean(False) + self.transport._send_message(m) + return + if self.authenticated: + # ignore + return + username = m.get_text() + service = m.get_text() + method = m.get_text() + self._log( + DEBUG, + "Auth request (type={}) service={}, username={}".format( + method, service, username + ), + ) + if service != "ssh-connection": + self._disconnect_service_not_available() + return + if (self.auth_username is not None) and ( + self.auth_username != username + ): + self._log( + WARNING, + "Auth rejected because the client attempted to change username in mid-flight", # noqa + ) + self._disconnect_no_more_auth() + return + self.auth_username = username + # check if GSS-API authentication is enabled + gss_auth = self.transport.server_object.enable_auth_gssapi() + + if method == "none": + result = self.transport.server_object.check_auth_none(username) + elif method == "password": + changereq = m.get_boolean() + password = m.get_binary() + try: + password = password.decode("UTF-8") + except UnicodeError: + # some clients/servers expect non-utf-8 passwords! + # in this case, just return the raw byte string. + pass + if changereq: + # always treated as failure, since we don't support changing + # passwords, but collect the list of valid auth types from + # the callback anyway + self._log(DEBUG, "Auth request to change passwords (rejected)") + newpassword = m.get_binary() + try: + newpassword = newpassword.decode("UTF-8", "replace") + except UnicodeError: + pass + result = AUTH_FAILED + else: + result = self.transport.server_object.check_auth_password( + username, password + ) + elif method == "publickey": + sig_attached = m.get_boolean() + # NOTE: server never wants to guess a client's algo, they're + # telling us directly. No need for _finalize_pubkey_algorithm + # anywhere in this flow. + algorithm = m.get_text() + keyblob = m.get_binary() + try: + key = self._generate_key_from_request(algorithm, keyblob) + except SSHException as e: + self._log(INFO, "Auth rejected: public key: {}".format(str(e))) + key = None + except Exception as e: + msg = "Auth rejected: unsupported or mangled public key ({}: {})" # noqa + self._log(INFO, msg.format(e.__class__.__name__, e)) + key = None + if key is None: + self._disconnect_no_more_auth() + return + # first check if this key is okay... if not, we can skip the verify + result = self.transport.server_object.check_auth_publickey( + username, key + ) + if result != AUTH_FAILED: + # key is okay, verify it + if not sig_attached: + # client wants to know if this key is acceptable, before it + # signs anything... send special "ok" message + m = Message() + m.add_byte(cMSG_USERAUTH_PK_OK) + m.add_string(algorithm) + m.add_string(keyblob) + self.transport._send_message(m) + return + sig = Message(m.get_binary()) + blob = self._get_session_blob( + key, service, username, algorithm + ) + if not key.verify_ssh_sig(blob, sig): + self._log(INFO, "Auth rejected: invalid signature") + result = AUTH_FAILED + elif method == "keyboard-interactive": + submethods = m.get_string() + result = self.transport.server_object.check_auth_interactive( + username, submethods + ) + if isinstance(result, InteractiveQuery): + # make interactive query instead of response + self._interactive_query(result) + return + elif method == "gssapi-with-mic" and gss_auth: + sshgss = GSSAuth(method) + # Read the number of OID mechanisms supported by the client. + # OpenSSH sends just one OID. It's the Kerveros V5 OID and that's + # the only OID we support. + mechs = m.get_int() + # We can't accept more than one OID, so if the SSH client sends + # more than one, disconnect. + if mechs > 1: + self._log( + INFO, + "Disconnect: Received more than one GSS-API OID mechanism", + ) + self._disconnect_no_more_auth() + desired_mech = m.get_string() + mech_ok = sshgss.ssh_check_mech(desired_mech) + # if we don't support the mechanism, disconnect. + if not mech_ok: + self._log( + INFO, + "Disconnect: Received an invalid GSS-API OID mechanism", + ) + self._disconnect_no_more_auth() + # send the Kerberos V5 GSSAPI OID to the client + supported_mech = sshgss.ssh_gss_oids("server") + # RFC 4462 says we are not required to implement GSS-API error + # messages. See section 3.8 in http://www.ietf.org/rfc/rfc4462.txt + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_RESPONSE) + m.add_bytes(supported_mech) + self.transport.auth_handler = GssapiWithMicAuthHandler( + self, sshgss + ) + self.transport._expected_packet = ( + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_REQUEST, + MSG_SERVICE_REQUEST, + ) + self.transport._send_message(m) + return + elif method == "gssapi-keyex" and gss_auth: + mic_token = m.get_string() + sshgss = self.transport.kexgss_ctxt + if sshgss is None: + # If there is no valid context, we reject the authentication + result = AUTH_FAILED + self._send_auth_result(username, method, result) + try: + sshgss.ssh_check_mic( + mic_token, self.transport.session_id, self.auth_username + ) + except Exception: + result = AUTH_FAILED + self._send_auth_result(username, method, result) + raise + result = AUTH_SUCCESSFUL + self.transport.server_object.check_auth_gssapi_keyex( + username, result + ) + else: + result = self.transport.server_object.check_auth_none(username) + # okay, send result + self._send_auth_result(username, method, result) + + def _parse_userauth_success(self, m): + self._log( + INFO, "Authentication ({}) successful!".format(self.auth_method) + ) + self.authenticated = True + self.transport._auth_trigger() + if self.auth_event is not None: + self.auth_event.set() + + def _parse_userauth_failure(self, m): + authlist = m.get_list() + # TODO 4.0: we aren't giving callers access to authlist _unless_ it's + # partial authentication, so eg authtype=none can't work unless we + # tweak this. + partial = m.get_boolean() + if partial: + self._log(INFO, "Authentication continues...") + self._log(DEBUG, "Methods: " + str(authlist)) + self.transport.saved_exception = PartialAuthentication(authlist) + elif self.auth_method not in authlist: + for msg in ( + "Authentication type ({}) not permitted.".format( + self.auth_method + ), + "Allowed methods: {}".format(authlist), + ): + self._log(DEBUG, msg) + self.transport.saved_exception = BadAuthenticationType( + "Bad authentication type", authlist + ) + else: + self._log( + INFO, "Authentication ({}) failed.".format(self.auth_method) + ) + self.authenticated = False + self.username = None + if self.auth_event is not None: + self.auth_event.set() + + def _parse_userauth_banner(self, m): + banner = m.get_string() + self.banner = banner + self._log(INFO, "Auth banner: {}".format(banner)) + # who cares. + + def _parse_userauth_info_request(self, m): + if self.auth_method != "keyboard-interactive": + raise SSHException("Illegal info request from server") + title = m.get_text() + instructions = m.get_text() + m.get_binary() # lang + prompts = m.get_int() + prompt_list = [] + for i in range(prompts): + prompt_list.append((m.get_text(), m.get_boolean())) + response_list = self.interactive_handler( + title, instructions, prompt_list + ) + + m = Message() + m.add_byte(cMSG_USERAUTH_INFO_RESPONSE) + m.add_int(len(response_list)) + for r in response_list: + m.add_string(r) + self.transport._send_message(m) + + def _parse_userauth_info_response(self, m): + if not self.transport.server_mode: + raise SSHException("Illegal info response from server") + n = m.get_int() + responses = [] + for i in range(n): + responses.append(m.get_text()) + result = self.transport.server_object.check_auth_interactive_response( + responses + ) + if isinstance(result, InteractiveQuery): + # make interactive query instead of response + self._interactive_query(result) + return + self._send_auth_result( + self.auth_username, "keyboard-interactive", result + ) + + def _handle_local_gss_failure(self, e): + self.transport.saved_exception = e + self._log(DEBUG, "GSSAPI failure: {}".format(e)) + self._log(INFO, "Authentication ({}) failed.".format(self.auth_method)) + self.authenticated = False + self.username = None + if self.auth_event is not None: + self.auth_event.set() + return + + # TODO 4.0: MAY make sense to make these tables into actual + # classes/instances that can be fed a mode bool or whatever. Or, + # alternately (both?) make the message types small classes or enums that + # embed this info within themselves (which could also then tidy up the + # current 'integer -> human readable short string' stuff in common.py). + # TODO: if we do that, also expose 'em publicly. + + # Messages which should be handled _by_ servers (sent by clients) + @property + def _server_handler_table(self): + return { + # TODO 4.0: MSG_SERVICE_REQUEST ought to eventually move into + # Transport's server mode like the client side did, just for + # consistency. + MSG_SERVICE_REQUEST: self._parse_service_request, + MSG_USERAUTH_REQUEST: self._parse_userauth_request, + MSG_USERAUTH_INFO_RESPONSE: self._parse_userauth_info_response, + } + + # Messages which should be handled _by_ clients (sent by servers) + @property + def _client_handler_table(self): + return { + MSG_SERVICE_ACCEPT: self._parse_service_accept, + MSG_USERAUTH_SUCCESS: self._parse_userauth_success, + MSG_USERAUTH_FAILURE: self._parse_userauth_failure, + MSG_USERAUTH_BANNER: self._parse_userauth_banner, + MSG_USERAUTH_INFO_REQUEST: self._parse_userauth_info_request, + } + + # NOTE: prior to the fix for #1283, this was a static dict instead of a + # property. Should be backwards compatible in most/all cases. + @property + def _handler_table(self): + if self.transport.server_mode: + return self._server_handler_table + else: + return self._client_handler_table + + +class GssapiWithMicAuthHandler: + """A specialized Auth handler for gssapi-with-mic + + During the GSSAPI token exchange we need a modified dispatch table, + because the packet type numbers are not unique. + """ + + method = "gssapi-with-mic" + + def __init__(self, delegate, sshgss): + self._delegate = delegate + self.sshgss = sshgss + + def abort(self): + self._restore_delegate_auth_handler() + return self._delegate.abort() + + @property + def transport(self): + return self._delegate.transport + + @property + def _send_auth_result(self): + return self._delegate._send_auth_result + + @property + def auth_username(self): + return self._delegate.auth_username + + @property + def gss_host(self): + return self._delegate.gss_host + + def _restore_delegate_auth_handler(self): + self.transport.auth_handler = self._delegate + + def _parse_userauth_gssapi_token(self, m): + client_token = m.get_string() + # use the client token as input to establish a secure + # context. + sshgss = self.sshgss + try: + token = sshgss.ssh_accept_sec_context( + self.gss_host, client_token, self.auth_username + ) + except Exception as e: + self.transport.saved_exception = e + result = AUTH_FAILED + self._restore_delegate_auth_handler() + self._send_auth_result(self.auth_username, self.method, result) + raise + if token is not None: + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) + m.add_string(token) + self.transport._expected_packet = ( + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_GSSAPI_MIC, + MSG_USERAUTH_REQUEST, + ) + self.transport._send_message(m) + + def _parse_userauth_gssapi_mic(self, m): + mic_token = m.get_string() + sshgss = self.sshgss + username = self.auth_username + self._restore_delegate_auth_handler() + try: + sshgss.ssh_check_mic( + mic_token, self.transport.session_id, username + ) + except Exception as e: + self.transport.saved_exception = e + result = AUTH_FAILED + self._send_auth_result(username, self.method, result) + raise + # TODO: Implement client credential saving. + # The OpenSSH server is able to create a TGT with the delegated + # client credentials, but this is not supported by GSS-API. + result = AUTH_SUCCESSFUL + self.transport.server_object.check_auth_gssapi_with_mic( + username, result + ) + # okay, send result + self._send_auth_result(username, self.method, result) + + def _parse_service_request(self, m): + self._restore_delegate_auth_handler() + return self._delegate._parse_service_request(m) + + def _parse_userauth_request(self, m): + self._restore_delegate_auth_handler() + return self._delegate._parse_userauth_request(m) + + __handler_table = { + MSG_SERVICE_REQUEST: _parse_service_request, + MSG_USERAUTH_REQUEST: _parse_userauth_request, + MSG_USERAUTH_GSSAPI_TOKEN: _parse_userauth_gssapi_token, + MSG_USERAUTH_GSSAPI_MIC: _parse_userauth_gssapi_mic, + } + + @property + def _handler_table(self): + # TODO: determine if we can cut this up like we did for the primary + # AuthHandler class. + return self.__handler_table + + +class AuthOnlyHandler(AuthHandler): + """ + AuthHandler, and just auth, no service requests! + + .. versionadded:: 3.2 + """ + + # NOTE: this purposefully duplicates some of the parent class in order to + # modernize, refactor, etc. The intent is that eventually we will collapse + # this one onto the parent in a backwards incompatible release. + + @property + def _client_handler_table(self): + my_table = super()._client_handler_table.copy() + del my_table[MSG_SERVICE_ACCEPT] + return my_table + + def send_auth_request(self, username, method, finish_message=None): + """ + Submit a userauth request message & wait for response. + + Performs the transport message send call, sets self.auth_event, and + will lock-n-block as necessary to both send, and wait for response to, + the USERAUTH_REQUEST. + + Most callers will want to supply a callback to ``finish_message``, + which accepts a Message ``m`` and may call mutator methods on it to add + more fields. + """ + # Store a few things for reference in handlers, including auth failure + # handler (which needs to know if we were using a bad method, etc) + self.auth_method = method + self.username = username + # Generic userauth request fields + m = Message() + m.add_byte(cMSG_USERAUTH_REQUEST) + m.add_string(username) + m.add_string("ssh-connection") + m.add_string(method) + # Caller usually has more to say, such as injecting password, key etc + finish_message(m) + # TODO 4.0: seems odd to have the client handle the lock and not + # Transport; that _may_ have been an artifact of allowing user + # threading event injection? Regardless, we don't want to move _this_ + # locking into Transport._send_message now, because lots of other + # untouched code also uses that method and we might end up + # double-locking (?) but 4.0 would be a good time to revisit. + with self.transport.lock: + self.transport._send_message(m) + # We have cut out the higher level event args, but self.auth_event is + # still required for self.wait_for_response to function correctly (it's + # the mechanism used by the auth success/failure handlers, the abort + # handler, and a few other spots like in gssapi. + # TODO: interestingly, wait_for_response itself doesn't actually + # enforce that its event argument and self.auth_event are the same... + self.auth_event = threading.Event() + return self.wait_for_response(self.auth_event) + + def auth_none(self, username): + return self.send_auth_request(username, "none") + + def auth_publickey(self, username, key): + key_type, bits = self._get_key_type_and_bits(key) + algorithm = self._finalize_pubkey_algorithm(key_type) + blob = self._get_session_blob( + key, + "ssh-connection", + username, + algorithm, + ) + + def finish(m): + # This field doesn't appear to be named, but is False when querying + # for permission (ie knowing whether to even prompt a user for + # passphrase, etc) or True when just going for it. Paramiko has + # never bothered with the former type of message, apparently. + m.add_boolean(True) + m.add_string(algorithm) + m.add_string(bits) + m.add_string(key.sign_ssh_data(blob, algorithm)) + + return self.send_auth_request(username, "publickey", finish) + + def auth_password(self, username, password): + def finish(m): + # Unnamed field that equates to "I am changing my password", which + # Paramiko clientside never supported and serverside only sort of + # supported. + m.add_boolean(False) + m.add_string(b(password)) + + return self.send_auth_request(username, "password", finish) + + def auth_interactive(self, username, handler, submethods=""): + """ + response_list = handler(title, instructions, prompt_list) + """ + # Unlike most siblings, this auth method _does_ require other + # superclass handlers (eg userauth info request) to understand + # what's going on, so we still set some self attributes. + self.auth_method = "keyboard_interactive" + self.interactive_handler = handler + + def finish(m): + # Empty string for deprecated language tag field, per RFC 4256: + # https://www.rfc-editor.org/rfc/rfc4256#section-3.1 + m.add_string("") + m.add_string(submethods) + + return self.send_auth_request(username, "keyboard-interactive", finish) + + # NOTE: not strictly 'auth only' related, but allows users to opt-in. + def _choose_fallback_pubkey_algorithm(self, key_type, my_algos): + msg = "Server did not send a server-sig-algs list; defaulting to something in our preferred algorithms list" # noqa + self._log(DEBUG, msg) + noncert_key_type = key_type.replace("-cert-v01@openssh.com", "") + if key_type in my_algos or noncert_key_type in my_algos: + actual = key_type if key_type in my_algos else noncert_key_type + msg = f"Current key type, {actual!r}, is in our preferred list; using that" # noqa + algo = actual + else: + algo = my_algos[0] + msg = f"{key_type!r} not in our list - trying first list item instead, {algo!r}" # noqa + self._log(DEBUG, msg) + return algo diff --git a/venv/lib/python3.12/site-packages/paramiko/auth_strategy.py b/venv/lib/python3.12/site-packages/paramiko/auth_strategy.py new file mode 100644 index 0000000..03c1d87 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/auth_strategy.py @@ -0,0 +1,306 @@ +""" +Modern, adaptable authentication machinery. + +Replaces certain parts of `.SSHClient`. For a concrete implementation, see the +``OpenSSHAuthStrategy`` class in `Fabric `_. +""" + +from collections import namedtuple + +from .agent import AgentKey +from .util import get_logger +from .ssh_exception import AuthenticationException + + +class AuthSource: + """ + Some SSH authentication source, such as a password, private key, or agent. + + See subclasses in this module for concrete implementations. + + All implementations must accept at least a ``username`` (``str``) kwarg. + """ + + def __init__(self, username): + self.username = username + + def _repr(self, **kwargs): + # TODO: are there any good libs for this? maybe some helper from + # structlog? + pairs = [f"{k}={v!r}" for k, v in kwargs.items()] + joined = ", ".join(pairs) + return f"{self.__class__.__name__}({joined})" + + def __repr__(self): + return self._repr() + + def authenticate(self, transport): + """ + Perform authentication. + """ + raise NotImplementedError + + +class NoneAuth(AuthSource): + """ + Auth type "none", ie https://www.rfc-editor.org/rfc/rfc4252#section-5.2 . + """ + + def authenticate(self, transport): + return transport.auth_none(self.username) + + +class Password(AuthSource): + """ + Password authentication. + + :param callable password_getter: + A lazy callable that should return a `str` password value at + authentication time, such as a `functools.partial` wrapping + `getpass.getpass`, an API call to a secrets store, or similar. + + If you already know the password at instantiation time, you should + simply use something like ``lambda: "my literal"`` (for a literal, but + also, shame on you!) or ``lambda: variable_name`` (for something stored + in a variable). + """ + + def __init__(self, username, password_getter): + super().__init__(username=username) + self.password_getter = password_getter + + def __repr__(self): + # Password auth is marginally more 'username-caring' than pkeys, so may + # as well log that info here. + return super()._repr(user=self.username) + + def authenticate(self, transport): + # Lazily get the password, in case it's prompting a user + # TODO: be nice to log source _of_ the password? + password = self.password_getter() + return transport.auth_password(self.username, password) + + +# TODO 4.0: twiddle this, or PKey, or both, so they're more obviously distinct. +# TODO 4.0: the obvious is to make this more wordy (PrivateKeyAuth), the +# minimalist approach might be to rename PKey to just Key (esp given all the +# subclasses are WhateverKey and not WhateverPKey) +class PrivateKey(AuthSource): + """ + Essentially a mixin for private keys. + + Knows how to auth, but leaves key material discovery/loading/decryption to + subclasses. + + Subclasses **must** ensure that they've set ``self.pkey`` to a decrypted + `.PKey` instance before calling ``super().authenticate``; typically + either in their ``__init__``, or in an overridden ``authenticate`` prior to + its `super` call. + """ + + def authenticate(self, transport): + return transport.auth_publickey(self.username, self.pkey) + + +class InMemoryPrivateKey(PrivateKey): + """ + An in-memory, decrypted `.PKey` object. + """ + + def __init__(self, username, pkey): + super().__init__(username=username) + # No decryption (presumably) necessary! + self.pkey = pkey + + def __repr__(self): + # NOTE: most of interesting repr-bits for private keys is in PKey. + # TODO: tacking on agent-ness like this is a bit awkward, but, eh? + rep = super()._repr(pkey=self.pkey) + if isinstance(self.pkey, AgentKey): + rep += " [agent]" + return rep + + +class OnDiskPrivateKey(PrivateKey): + """ + Some on-disk private key that needs opening and possibly decrypting. + + :param str source: + String tracking where this key's path was specified; should be one of + ``"ssh-config"``, ``"python-config"``, or ``"implicit-home"``. + :param Path path: + The filesystem path this key was loaded from. + :param PKey pkey: + The `PKey` object this auth source uses/represents. + """ + + def __init__(self, username, source, path, pkey): + super().__init__(username=username) + self.source = source + allowed = ("ssh-config", "python-config", "implicit-home") + if source not in allowed: + raise ValueError(f"source argument must be one of: {allowed!r}") + self.path = path + # Superclass wants .pkey, other two are mostly for display/debugging. + self.pkey = pkey + + def __repr__(self): + return self._repr( + key=self.pkey, source=self.source, path=str(self.path) + ) + + +# TODO re sources: is there anything in an OpenSSH config file that doesn't fit +# into what Paramiko already had kwargs for? + + +SourceResult = namedtuple("SourceResult", ["source", "result"]) + +# TODO: tempting to make this an OrderedDict, except the keys essentially want +# to be rich objects (AuthSources) which do not make for useful user indexing? +# TODO: members being vanilla tuples is pretty old-school/expedient; they +# "really" want to be something that's type friendlier (unless the tuple's 2nd +# member being a Union of two types is "fine"?), which I assume means yet more +# classes, eg an abstract SourceResult with concrete AuthSuccess and +# AuthFailure children? +# TODO: arguably we want __init__ typechecking of the members (or to leverage +# mypy by classifying this literally as list-of-AuthSource?) +class AuthResult(list): + """ + Represents a partial or complete SSH authentication attempt. + + This class conceptually extends `AuthStrategy` by pairing the former's + authentication **sources** with the **results** of trying to authenticate + with them. + + `AuthResult` is a (subclass of) `list` of `namedtuple`, which are of the + form ``namedtuple('SourceResult', 'source', 'result')`` (where the + ``source`` member is an `AuthSource` and the ``result`` member is either a + return value from the relevant `.Transport` method, or an exception + object). + + .. note:: + Transport auth method results are always themselves a ``list`` of "next + allowable authentication methods". + + In the simple case of "you just authenticated successfully", it's an + empty list; if your auth was rejected but you're allowed to try again, + it will be a list of string method names like ``pubkey`` or + ``password``. + + The ``__str__`` of this class represents the empty-list scenario as the + word ``success``, which should make reading the result of an + authentication session more obvious to humans. + + Instances also have a `strategy` attribute referencing the `AuthStrategy` + which was attempted. + """ + + def __init__(self, strategy, *args, **kwargs): + self.strategy = strategy + super().__init__(*args, **kwargs) + + def __str__(self): + # NOTE: meaningfully distinct from __repr__, which still wants to use + # superclass' implementation. + # TODO: go hog wild, use rich.Table? how is that on degraded term's? + # TODO: test this lol + return "\n".join( + f"{x.source} -> {x.result or 'success'}" for x in self + ) + + +# TODO 4.0: descend from SSHException or even just Exception +class AuthFailure(AuthenticationException): + """ + Basic exception wrapping an `AuthResult` indicating overall auth failure. + + Note that `AuthFailure` descends from `AuthenticationException` but is + generally "higher level"; the latter is now only raised by individual + `AuthSource` attempts and should typically only be seen by users when + encapsulated in this class. It subclasses `AuthenticationException` + primarily for backwards compatibility reasons. + """ + + def __init__(self, result): + self.result = result + + def __str__(self): + return "\n" + str(self.result) + + +class AuthStrategy: + """ + This class represents one or more attempts to auth with an SSH server. + + By default, subclasses must at least accept an ``ssh_config`` + (`.SSHConfig`) keyword argument, but may opt to accept more as needed for + their particular strategy. + """ + + def __init__( + self, + ssh_config, + ): + self.ssh_config = ssh_config + self.log = get_logger(__name__) + + def get_sources(self): + """ + Generator yielding `AuthSource` instances, in the order to try. + + This is the primary override point for subclasses: you figure out what + sources you need, and ``yield`` them. + + Subclasses _of_ subclasses may find themselves wanting to do things + like filtering or discarding around a call to `super`. + """ + raise NotImplementedError + + def authenticate(self, transport): + """ + Handles attempting `AuthSource` instances yielded from `get_sources`. + + You *normally* won't need to override this, but it's an option for + advanced users. + """ + succeeded = False + overall_result = AuthResult(strategy=self) + # TODO: arguably we could fit in a "send none auth, record allowed auth + # types sent back" thing here as OpenSSH-client does, but that likely + # wants to live in fabric.OpenSSHAuthStrategy as not all target servers + # will implement it! + # TODO: needs better "server told us too many attempts" checking! + for source in self.get_sources(): + self.log.debug(f"Trying {source}") + try: # NOTE: this really wants to _only_ wrap the authenticate()! + result = source.authenticate(transport) + succeeded = True + # TODO: 'except PartialAuthentication' is needed for 2FA and + # similar, as per old SSHClient.connect - it is the only way + # AuthHandler supplies access to the 'name-list' field from + # MSG_USERAUTH_FAILURE, at present. + except Exception as e: + result = e + # TODO: look at what this could possibly raise, we don't really + # want Exception here, right? just SSHException subclasses? or + # do we truly want to capture anything at all with assumption + # it's easy enough for users to look afterwards? + # NOTE: showing type, not message, for tersity & also most of + # the time it's basically just "Authentication failed." + source_class = e.__class__.__name__ + self.log.info( + f"Authentication via {source} failed with {source_class}" + ) + overall_result.append(SourceResult(source, result)) + if succeeded: + break + # Gotta die here if nothing worked, otherwise Transport's main loop + # just kinda hangs out until something times out! + if not succeeded: + raise AuthFailure(result=overall_result) + # Success: give back what was done, in case they care. + return overall_result + + # TODO: is there anything OpenSSH client does which _can't_ cleanly map to + # iterating a generator? diff --git a/venv/lib/python3.12/site-packages/paramiko/ber.py b/venv/lib/python3.12/site-packages/paramiko/ber.py new file mode 100644 index 0000000..b8287f5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/ber.py @@ -0,0 +1,139 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +from paramiko.common import max_byte, zero_byte, byte_ord, byte_chr + +import paramiko.util as util +from paramiko.util import b +from paramiko.sftp import int64 + + +class BERException(Exception): + pass + + +class BER: + """ + Robey's tiny little attempt at a BER decoder. + """ + + def __init__(self, content=bytes()): + self.content = b(content) + self.idx = 0 + + def asbytes(self): + return self.content + + def __str__(self): + return self.asbytes() + + def __repr__(self): + return "BER('" + repr(self.content) + "')" + + def decode(self): + return self.decode_next() + + def decode_next(self): + if self.idx >= len(self.content): + return None + ident = byte_ord(self.content[self.idx]) + self.idx += 1 + if (ident & 31) == 31: + # identifier > 30 + ident = 0 + while self.idx < len(self.content): + t = byte_ord(self.content[self.idx]) + self.idx += 1 + ident = (ident << 7) | (t & 0x7F) + if not (t & 0x80): + break + if self.idx >= len(self.content): + return None + # now fetch length + size = byte_ord(self.content[self.idx]) + self.idx += 1 + if size & 0x80: + # more complimicated... + # FIXME: theoretically should handle indefinite-length (0x80) + t = size & 0x7F + if self.idx + t > len(self.content): + return None + size = util.inflate_long( + self.content[self.idx : self.idx + t], True + ) + self.idx += t + if self.idx + size > len(self.content): + # can't fit + return None + data = self.content[self.idx : self.idx + size] + self.idx += size + # now switch on id + if ident == 0x30: + # sequence + return self.decode_sequence(data) + elif ident == 2: + # int + return util.inflate_long(data) + else: + # 1: boolean (00 false, otherwise true) + msg = "Unknown ber encoding type {:d} (robey is lazy)" + raise BERException(msg.format(ident)) + + @staticmethod + def decode_sequence(data): + out = [] + ber = BER(data) + while True: + x = ber.decode_next() + if x is None: + break + out.append(x) + return out + + def encode_tlv(self, ident, val): + # no need to support ident > 31 here + self.content += byte_chr(ident) + if len(val) > 0x7F: + lenstr = util.deflate_long(len(val)) + self.content += byte_chr(0x80 + len(lenstr)) + lenstr + else: + self.content += byte_chr(len(val)) + self.content += val + + def encode(self, x): + if type(x) is bool: + if x: + self.encode_tlv(1, max_byte) + else: + self.encode_tlv(1, zero_byte) + elif (type(x) is int) or (type(x) is int64): + self.encode_tlv(2, util.deflate_long(x)) + elif type(x) is str: + self.encode_tlv(4, x) + elif (type(x) is list) or (type(x) is tuple): + self.encode_tlv(0x30, self.encode_sequence(x)) + else: + raise BERException( + "Unknown type for encoding: {!r}".format(type(x)) + ) + + @staticmethod + def encode_sequence(data): + ber = BER() + for item in data: + ber.encode(item) + return ber.asbytes() diff --git a/venv/lib/python3.12/site-packages/paramiko/buffered_pipe.py b/venv/lib/python3.12/site-packages/paramiko/buffered_pipe.py new file mode 100644 index 0000000..c19279c --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/buffered_pipe.py @@ -0,0 +1,212 @@ +# Copyright (C) 2006-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Attempt to generalize the "feeder" part of a `.Channel`: an object which can be +read from and closed, but is reading from a buffer fed by another thread. The +read operations are blocking and can have a timeout set. +""" + +import array +import threading +import time +from paramiko.util import b + + +class PipeTimeout(IOError): + """ + Indicates that a timeout was reached on a read from a `.BufferedPipe`. + """ + + pass + + +class BufferedPipe: + """ + A buffer that obeys normal read (with timeout) & close semantics for a + file or socket, but is fed data from another thread. This is used by + `.Channel`. + """ + + def __init__(self): + self._lock = threading.Lock() + self._cv = threading.Condition(self._lock) + self._event = None + self._buffer = array.array("B") + self._closed = False + + def _buffer_frombytes(self, data): + self._buffer.frombytes(data) + + def _buffer_tobytes(self, limit=None): + return self._buffer[:limit].tobytes() + + def set_event(self, event): + """ + Set an event on this buffer. When data is ready to be read (or the + buffer has been closed), the event will be set. When no data is + ready, the event will be cleared. + + :param threading.Event event: the event to set/clear + """ + self._lock.acquire() + try: + self._event = event + # Make sure the event starts in `set` state if we appear to already + # be closed; otherwise, if we start in `clear` state & are closed, + # nothing will ever call `.feed` and the event (& OS pipe, if we're + # wrapping one - see `Channel.fileno`) will permanently stay in + # `clear`, causing deadlock if e.g. `select`ed upon. + if self._closed or len(self._buffer) > 0: + event.set() + else: + event.clear() + finally: + self._lock.release() + + def feed(self, data): + """ + Feed new data into this pipe. This method is assumed to be called + from a separate thread, so synchronization is done. + + :param data: the data to add, as a ``str`` or ``bytes`` + """ + self._lock.acquire() + try: + if self._event is not None: + self._event.set() + self._buffer_frombytes(b(data)) + self._cv.notify_all() + finally: + self._lock.release() + + def read_ready(self): + """ + Returns true if data is buffered and ready to be read from this + feeder. A ``False`` result does not mean that the feeder has closed; + it means you may need to wait before more data arrives. + + :return: + ``True`` if a `read` call would immediately return at least one + byte; ``False`` otherwise. + """ + self._lock.acquire() + try: + if len(self._buffer) == 0: + return False + return True + finally: + self._lock.release() + + def read(self, nbytes, timeout=None): + """ + Read data from the pipe. The return value is a string representing + the data received. The maximum amount of data to be received at once + is specified by ``nbytes``. If a string of length zero is returned, + the pipe has been closed. + + The optional ``timeout`` argument can be a nonnegative float expressing + seconds, or ``None`` for no timeout. If a float is given, a + `.PipeTimeout` will be raised if the timeout period value has elapsed + before any data arrives. + + :param int nbytes: maximum number of bytes to read + :param float timeout: + maximum seconds to wait (or ``None``, the default, to wait forever) + :return: the read data, as a ``str`` or ``bytes`` + + :raises: + `.PipeTimeout` -- if a timeout was specified and no data was ready + before that timeout + """ + out = bytes() + self._lock.acquire() + try: + if len(self._buffer) == 0: + if self._closed: + return out + # should we block? + if timeout == 0.0: + raise PipeTimeout() + # loop here in case we get woken up but a different thread has + # grabbed everything in the buffer. + while (len(self._buffer) == 0) and not self._closed: + then = time.time() + self._cv.wait(timeout) + if timeout is not None: + timeout -= time.time() - then + if timeout <= 0.0: + raise PipeTimeout() + + # something's in the buffer and we have the lock! + if len(self._buffer) <= nbytes: + out = self._buffer_tobytes() + del self._buffer[:] + if (self._event is not None) and not self._closed: + self._event.clear() + else: + out = self._buffer_tobytes(nbytes) + del self._buffer[:nbytes] + finally: + self._lock.release() + + return out + + def empty(self): + """ + Clear out the buffer and return all data that was in it. + + :return: + any data that was in the buffer prior to clearing it out, as a + `str` + """ + self._lock.acquire() + try: + out = self._buffer_tobytes() + del self._buffer[:] + if (self._event is not None) and not self._closed: + self._event.clear() + return out + finally: + self._lock.release() + + def close(self): + """ + Close this pipe object. Future calls to `read` after the buffer + has been emptied will return immediately with an empty string. + """ + self._lock.acquire() + try: + self._closed = True + self._cv.notify_all() + if self._event is not None: + self._event.set() + finally: + self._lock.release() + + def __len__(self): + """ + Return the number of bytes buffered. + + :return: number (`int`) of bytes buffered + """ + self._lock.acquire() + try: + return len(self._buffer) + finally: + self._lock.release() diff --git a/venv/lib/python3.12/site-packages/paramiko/channel.py b/venv/lib/python3.12/site-packages/paramiko/channel.py new file mode 100644 index 0000000..2757450 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/channel.py @@ -0,0 +1,1390 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Abstraction for an SSH2 channel. +""" + +import binascii +import os +import socket +import time +import threading + +from functools import wraps + +from paramiko import util +from paramiko.common import ( + cMSG_CHANNEL_REQUEST, + cMSG_CHANNEL_WINDOW_ADJUST, + cMSG_CHANNEL_DATA, + cMSG_CHANNEL_EXTENDED_DATA, + DEBUG, + ERROR, + cMSG_CHANNEL_SUCCESS, + cMSG_CHANNEL_FAILURE, + cMSG_CHANNEL_EOF, + cMSG_CHANNEL_CLOSE, +) +from paramiko.message import Message +from paramiko.ssh_exception import SSHException +from paramiko.file import BufferedFile +from paramiko.buffered_pipe import BufferedPipe, PipeTimeout +from paramiko import pipe +from paramiko.util import ClosingContextManager + + +def open_only(func): + """ + Decorator for `.Channel` methods which performs an openness check. + + :raises: + `.SSHException` -- If the wrapped method is called on an unopened + `.Channel`. + """ + + @wraps(func) + def _check(self, *args, **kwds): + if ( + self.closed + or self.eof_received + or self.eof_sent + or not self.active + ): + raise SSHException("Channel is not open") + return func(self, *args, **kwds) + + return _check + + +class Channel(ClosingContextManager): + """ + A secure tunnel across an SSH `.Transport`. A Channel is meant to behave + like a socket, and has an API that should be indistinguishable from the + Python socket API. + + Because SSH2 has a windowing kind of flow control, if you stop reading data + from a Channel and its buffer fills up, the server will be unable to send + you any more data until you read some of it. (This won't affect other + channels on the same transport -- all channels on a single transport are + flow-controlled independently.) Similarly, if the server isn't reading + data you send, calls to `send` may block, unless you set a timeout. This + is exactly like a normal network socket, so it shouldn't be too surprising. + + Instances of this class may be used as context managers. + """ + + def __init__(self, chanid): + """ + Create a new channel. The channel is not associated with any + particular session or `.Transport` until the Transport attaches it. + Normally you would only call this method from the constructor of a + subclass of `.Channel`. + + :param int chanid: + the ID of this channel, as passed by an existing `.Transport`. + """ + #: Channel ID + self.chanid = chanid + #: Remote channel ID + self.remote_chanid = 0 + #: `.Transport` managing this channel + self.transport = None + #: Whether the connection is presently active + self.active = False + self.eof_received = 0 + self.eof_sent = 0 + self.in_buffer = BufferedPipe() + self.in_stderr_buffer = BufferedPipe() + self.timeout = None + #: Whether the connection has been closed + self.closed = False + self.ultra_debug = False + self.lock = threading.Lock() + self.out_buffer_cv = threading.Condition(self.lock) + self.in_window_size = 0 + self.out_window_size = 0 + self.in_max_packet_size = 0 + self.out_max_packet_size = 0 + self.in_window_threshold = 0 + self.in_window_sofar = 0 + self.status_event = threading.Event() + self._name = str(chanid) + self.logger = util.get_logger("paramiko.transport") + self._pipe = None + self.event = threading.Event() + self.event_ready = False + self.combine_stderr = False + self.exit_status = -1 + self.origin_addr = None + + def __del__(self): + try: + self.close() + except: + pass + + def __repr__(self): + """ + Return a string representation of this object, for debugging. + """ + out = " 0: + out += " in-buffer={}".format(len(self.in_buffer)) + out += " -> " + repr(self.transport) + out += ">" + return out + + @open_only + def get_pty( + self, + term="vt100", + width=80, + height=24, + width_pixels=0, + height_pixels=0, + ): + """ + Request a pseudo-terminal from the server. This is usually used right + after creating a client channel, to ask the server to provide some + basic terminal semantics for a shell invoked with `invoke_shell`. + It isn't necessary (or desirable) to call this method if you're going + to execute a single command with `exec_command`. + + :param str term: the terminal type to emulate + (for example, ``'vt100'``) + :param int width: width (in characters) of the terminal screen + :param int height: height (in characters) of the terminal screen + :param int width_pixels: width (in pixels) of the terminal screen + :param int height_pixels: height (in pixels) of the terminal screen + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("pty-req") + m.add_boolean(True) + m.add_string(term) + m.add_int(width) + m.add_int(height) + m.add_int(width_pixels) + m.add_int(height_pixels) + m.add_string(bytes()) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + + @open_only + def invoke_shell(self): + """ + Request an interactive shell session on this channel. If the server + allows it, the channel will then be directly connected to the stdin, + stdout, and stderr of the shell. + + Normally you would call `get_pty` before this, in which case the + shell will operate through the pty, and the channel will be connected + to the stdin and stdout of the pty. + + When the shell exits, the channel will be closed and can't be reused. + You must open a new channel if you wish to open another shell. + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("shell") + m.add_boolean(True) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + + @open_only + def exec_command(self, command): + """ + Execute a command on the server. If the server allows it, the channel + will then be directly connected to the stdin, stdout, and stderr of + the command being executed. + + When the command finishes executing, the channel will be closed and + can't be reused. You must open a new channel if you wish to execute + another command. + + :param str command: a shell command to execute. + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("exec") + m.add_boolean(True) + m.add_string(command) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + + @open_only + def invoke_subsystem(self, subsystem): + """ + Request a subsystem on the server (for example, ``sftp``). If the + server allows it, the channel will then be directly connected to the + requested subsystem. + + When the subsystem finishes, the channel will be closed and can't be + reused. + + :param str subsystem: name of the subsystem being requested. + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("subsystem") + m.add_boolean(True) + m.add_string(subsystem) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + + @open_only + def resize_pty(self, width=80, height=24, width_pixels=0, height_pixels=0): + """ + Resize the pseudo-terminal. This can be used to change the width and + height of the terminal emulation created in a previous `get_pty` call. + + :param int width: new width (in characters) of the terminal screen + :param int height: new height (in characters) of the terminal screen + :param int width_pixels: new width (in pixels) of the terminal screen + :param int height_pixels: new height (in pixels) of the terminal screen + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("window-change") + m.add_boolean(False) + m.add_int(width) + m.add_int(height) + m.add_int(width_pixels) + m.add_int(height_pixels) + self.transport._send_user_message(m) + + @open_only + def update_environment(self, environment): + """ + Updates this channel's remote shell environment. + + .. note:: + This operation is additive - i.e. the current environment is not + reset before the given environment variables are set. + + .. warning:: + Servers may silently reject some environment variables; see the + warning in `set_environment_variable` for details. + + :param dict environment: + a dictionary containing the name and respective values to set + :raises: + `.SSHException` -- if any of the environment variables was rejected + by the server or the channel was closed + """ + for name, value in environment.items(): + try: + self.set_environment_variable(name, value) + except SSHException as e: + err = 'Failed to set environment variable "{}".' + raise SSHException(err.format(name), e) + + @open_only + def set_environment_variable(self, name, value): + """ + Set the value of an environment variable. + + .. warning:: + The server may reject this request depending on its ``AcceptEnv`` + setting; such rejections will fail silently (which is common client + practice for this particular request type). Make sure you + understand your server's configuration before using! + + :param str name: name of the environment variable + :param str value: value of the environment variable + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("env") + m.add_boolean(False) + m.add_string(name) + m.add_string(value) + self.transport._send_user_message(m) + + def exit_status_ready(self): + """ + Return true if the remote process has exited and returned an exit + status. You may use this to poll the process status if you don't + want to block in `recv_exit_status`. Note that the server may not + return an exit status in some cases (like bad servers). + + :return: + ``True`` if `recv_exit_status` will return immediately, else + ``False``. + + .. versionadded:: 1.7.3 + """ + return self.closed or self.status_event.is_set() + + def recv_exit_status(self): + """ + Return the exit status from the process on the server. This is + mostly useful for retrieving the results of an `exec_command`. + If the command hasn't finished yet, this method will wait until + it does, or until the channel is closed. If no exit status is + provided by the server, -1 is returned. + + .. warning:: + In some situations, receiving remote output larger than the current + `.Transport` or session's ``window_size`` (e.g. that set by the + ``default_window_size`` kwarg for `.Transport.__init__`) will cause + `.recv_exit_status` to hang indefinitely if it is called prior to a + sufficiently large `.Channel.recv` (or if there are no threads + calling `.Channel.recv` in the background). + + In these cases, ensuring that `.recv_exit_status` is called *after* + `.Channel.recv` (or, again, using threads) can avoid the hang. + + :return: the exit code (as an `int`) of the process on the server. + + .. versionadded:: 1.2 + """ + self.status_event.wait() + assert self.status_event.is_set() + return self.exit_status + + def send_exit_status(self, status): + """ + Send the exit status of an executed command to the client. (This + really only makes sense in server mode.) Many clients expect to + get some sort of status code back from an executed command after + it completes. + + :param int status: the exit code of the process + + .. versionadded:: 1.2 + """ + # in many cases, the channel will not still be open here. + # that's fine. + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("exit-status") + m.add_boolean(False) + m.add_int(status) + self.transport._send_user_message(m) + + @open_only + def request_x11( + self, + screen_number=0, + auth_protocol=None, + auth_cookie=None, + single_connection=False, + handler=None, + ): + """ + Request an x11 session on this channel. If the server allows it, + further x11 requests can be made from the server to the client, + when an x11 application is run in a shell session. + + From :rfc:`4254`:: + + It is RECOMMENDED that the 'x11 authentication cookie' that is + sent be a fake, random cookie, and that the cookie be checked and + replaced by the real cookie when a connection request is received. + + If you omit the auth_cookie, a new secure random 128-bit value will be + generated, used, and returned. You will need to use this value to + verify incoming x11 requests and replace them with the actual local + x11 cookie (which requires some knowledge of the x11 protocol). + + If a handler is passed in, the handler is called from another thread + whenever a new x11 connection arrives. The default handler queues up + incoming x11 connections, which may be retrieved using + `.Transport.accept`. The handler's calling signature is:: + + handler(channel: Channel, (address: str, port: int)) + + :param int screen_number: the x11 screen number (0, 10, etc.) + :param str auth_protocol: + the name of the X11 authentication method used; if none is given, + ``"MIT-MAGIC-COOKIE-1"`` is used + :param str auth_cookie: + hexadecimal string containing the x11 auth cookie; if none is + given, a secure random 128-bit value is generated + :param bool single_connection: + if True, only a single x11 connection will be forwarded (by + default, any number of x11 connections can arrive over this + session) + :param handler: + an optional callable handler to use for incoming X11 connections + :return: the auth_cookie used + """ + if auth_protocol is None: + auth_protocol = "MIT-MAGIC-COOKIE-1" + if auth_cookie is None: + auth_cookie = binascii.hexlify(os.urandom(16)) + + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("x11-req") + m.add_boolean(True) + m.add_boolean(single_connection) + m.add_string(auth_protocol) + m.add_string(auth_cookie) + m.add_int(screen_number) + self._event_pending() + self.transport._send_user_message(m) + self._wait_for_event() + self.transport._set_x11_handler(handler) + return auth_cookie + + @open_only + def request_forward_agent(self, handler): + """ + Request for a forward SSH Agent on this channel. + This is only valid for an ssh-agent from OpenSSH !!! + + :param handler: + a required callable handler to use for incoming SSH Agent + connections + + :return: True if we are ok, else False + (at that time we always return ok) + + :raises: SSHException in case of channel problem. + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("auth-agent-req@openssh.com") + m.add_boolean(False) + self.transport._send_user_message(m) + self.transport._set_forward_agent_handler(handler) + return True + + def get_transport(self): + """ + Return the `.Transport` associated with this channel. + """ + return self.transport + + def set_name(self, name): + """ + Set a name for this channel. Currently it's only used to set the name + of the channel in logfile entries. The name can be fetched with the + `get_name` method. + + :param str name: new channel name + """ + self._name = name + + def get_name(self): + """ + Get the name of this channel that was previously set by `set_name`. + """ + return self._name + + def get_id(self): + """ + Return the `int` ID # for this channel. + + The channel ID is unique across a `.Transport` and usually a small + number. It's also the number passed to + `.ServerInterface.check_channel_request` when determining whether to + accept a channel request in server mode. + """ + return self.chanid + + def set_combine_stderr(self, combine): + """ + Set whether stderr should be combined into stdout on this channel. + The default is ``False``, but in some cases it may be convenient to + have both streams combined. + + If this is ``False``, and `exec_command` is called (or ``invoke_shell`` + with no pty), output to stderr will not show up through the `recv` + and `recv_ready` calls. You will have to use `recv_stderr` and + `recv_stderr_ready` to get stderr output. + + If this is ``True``, data will never show up via `recv_stderr` or + `recv_stderr_ready`. + + :param bool combine: + ``True`` if stderr output should be combined into stdout on this + channel. + :return: the previous setting (a `bool`). + + .. versionadded:: 1.1 + """ + data = bytes() + self.lock.acquire() + try: + old = self.combine_stderr + self.combine_stderr = combine + if combine and not old: + # copy old stderr buffer into primary buffer + data = self.in_stderr_buffer.empty() + finally: + self.lock.release() + if len(data) > 0: + self._feed(data) + return old + + # ...socket API... + + def settimeout(self, timeout): + """ + Set a timeout on blocking read/write operations. The ``timeout`` + argument can be a nonnegative float expressing seconds, or ``None``. + If a float is given, subsequent channel read/write operations will + raise a timeout exception if the timeout period value has elapsed + before the operation has completed. Setting a timeout of ``None`` + disables timeouts on socket operations. + + ``chan.settimeout(0.0)`` is equivalent to ``chan.setblocking(0)``; + ``chan.settimeout(None)`` is equivalent to ``chan.setblocking(1)``. + + :param float timeout: + seconds to wait for a pending read/write operation before raising + ``socket.timeout``, or ``None`` for no timeout. + """ + self.timeout = timeout + + def gettimeout(self): + """ + Returns the timeout in seconds (as a float) associated with socket + operations, or ``None`` if no timeout is set. This reflects the last + call to `setblocking` or `settimeout`. + """ + return self.timeout + + def setblocking(self, blocking): + """ + Set blocking or non-blocking mode of the channel: if ``blocking`` is 0, + the channel is set to non-blocking mode; otherwise it's set to blocking + mode. Initially all channels are in blocking mode. + + In non-blocking mode, if a `recv` call doesn't find any data, or if a + `send` call can't immediately dispose of the data, an error exception + is raised. In blocking mode, the calls block until they can proceed. An + EOF condition is considered "immediate data" for `recv`, so if the + channel is closed in the read direction, it will never block. + + ``chan.setblocking(0)`` is equivalent to ``chan.settimeout(0)``; + ``chan.setblocking(1)`` is equivalent to ``chan.settimeout(None)``. + + :param int blocking: + 0 to set non-blocking mode; non-0 to set blocking mode. + """ + if blocking: + self.settimeout(None) + else: + self.settimeout(0.0) + + def getpeername(self): + """ + Return the address of the remote side of this Channel, if possible. + + This simply wraps `.Transport.getpeername`, used to provide enough of a + socket-like interface to allow asyncore to work. (asyncore likes to + call ``'getpeername'``.) + """ + return self.transport.getpeername() + + def close(self): + """ + Close the channel. All future read/write operations on the channel + will fail. The remote end will receive no more data (after queued data + is flushed). Channels are automatically closed when their `.Transport` + is closed or when they are garbage collected. + """ + self.lock.acquire() + try: + # only close the pipe when the user explicitly closes the channel. + # otherwise they will get unpleasant surprises. (and do it before + # checking self.closed, since the remote host may have already + # closed the connection.) + if self._pipe is not None: + self._pipe.close() + self._pipe = None + + if not self.active or self.closed: + return + msgs = self._close_internal() + finally: + self.lock.release() + for m in msgs: + if m is not None: + self.transport._send_user_message(m) + + def recv_ready(self): + """ + Returns true if data is buffered and ready to be read from this + channel. A ``False`` result does not mean that the channel has closed; + it means you may need to wait before more data arrives. + + :return: + ``True`` if a `recv` call on this channel would immediately return + at least one byte; ``False`` otherwise. + """ + return self.in_buffer.read_ready() + + def recv(self, nbytes): + """ + Receive data from the channel. The return value is a string + representing the data received. The maximum amount of data to be + received at once is specified by ``nbytes``. If a string of + length zero is returned, the channel stream has closed. + + :param int nbytes: maximum number of bytes to read. + :return: received data, as a `bytes`. + + :raises socket.timeout: + if no data is ready before the timeout set by `settimeout`. + """ + try: + out = self.in_buffer.read(nbytes, self.timeout) + except PipeTimeout: + raise socket.timeout() + + ack = self._check_add_window(len(out)) + # no need to hold the channel lock when sending this + if ack > 0: + m = Message() + m.add_byte(cMSG_CHANNEL_WINDOW_ADJUST) + m.add_int(self.remote_chanid) + m.add_int(ack) + self.transport._send_user_message(m) + + return out + + def recv_stderr_ready(self): + """ + Returns true if data is buffered and ready to be read from this + channel's stderr stream. Only channels using `exec_command` or + `invoke_shell` without a pty will ever have data on the stderr + stream. + + :return: + ``True`` if a `recv_stderr` call on this channel would immediately + return at least one byte; ``False`` otherwise. + + .. versionadded:: 1.1 + """ + return self.in_stderr_buffer.read_ready() + + def recv_stderr(self, nbytes): + """ + Receive data from the channel's stderr stream. Only channels using + `exec_command` or `invoke_shell` without a pty will ever have data + on the stderr stream. The return value is a string representing the + data received. The maximum amount of data to be received at once is + specified by ``nbytes``. If a string of length zero is returned, the + channel stream has closed. + + :param int nbytes: maximum number of bytes to read. + :return: received data as a `bytes` + + :raises socket.timeout: if no data is ready before the timeout set by + `settimeout`. + + .. versionadded:: 1.1 + """ + try: + out = self.in_stderr_buffer.read(nbytes, self.timeout) + except PipeTimeout: + raise socket.timeout() + + ack = self._check_add_window(len(out)) + # no need to hold the channel lock when sending this + if ack > 0: + m = Message() + m.add_byte(cMSG_CHANNEL_WINDOW_ADJUST) + m.add_int(self.remote_chanid) + m.add_int(ack) + self.transport._send_user_message(m) + + return out + + def send_ready(self): + """ + Returns true if data can be written to this channel without blocking. + This means the channel is either closed (so any write attempt would + return immediately) or there is at least one byte of space in the + outbound buffer. If there is at least one byte of space in the + outbound buffer, a `send` call will succeed immediately and return + the number of bytes actually written. + + :return: + ``True`` if a `send` call on this channel would immediately succeed + or fail + """ + self.lock.acquire() + try: + if self.closed or self.eof_sent: + return True + return self.out_window_size > 0 + finally: + self.lock.release() + + def send(self, s): + """ + Send data to the channel. Returns the number of bytes sent, or 0 if + the channel stream is closed. Applications are responsible for + checking that all data has been sent: if only some of the data was + transmitted, the application needs to attempt delivery of the remaining + data. + + :param bytes s: data to send + :return: number of bytes actually sent, as an `int` + + :raises socket.timeout: if no data could be sent before the timeout set + by `settimeout`. + """ + + m = Message() + m.add_byte(cMSG_CHANNEL_DATA) + m.add_int(self.remote_chanid) + return self._send(s, m) + + def send_stderr(self, s): + """ + Send data to the channel on the "stderr" stream. This is normally + only used by servers to send output from shell commands -- clients + won't use this. Returns the number of bytes sent, or 0 if the channel + stream is closed. Applications are responsible for checking that all + data has been sent: if only some of the data was transmitted, the + application needs to attempt delivery of the remaining data. + + :param bytes s: data to send. + :return: number of bytes actually sent, as an `int`. + + :raises socket.timeout: + if no data could be sent before the timeout set by `settimeout`. + + .. versionadded:: 1.1 + """ + + m = Message() + m.add_byte(cMSG_CHANNEL_EXTENDED_DATA) + m.add_int(self.remote_chanid) + m.add_int(1) + return self._send(s, m) + + def sendall(self, s): + """ + Send data to the channel, without allowing partial results. Unlike + `send`, this method continues to send data from the given string until + either all data has been sent or an error occurs. Nothing is returned. + + :param bytes s: data to send. + + :raises socket.timeout: + if sending stalled for longer than the timeout set by `settimeout`. + :raises socket.error: + if an error occurred before the entire string was sent. + + .. note:: + If the channel is closed while only part of the data has been + sent, there is no way to determine how much data (if any) was sent. + This is irritating, but identically follows Python's API. + """ + while s: + sent = self.send(s) + s = s[sent:] + return None + + def sendall_stderr(self, s): + """ + Send data to the channel's "stderr" stream, without allowing partial + results. Unlike `send_stderr`, this method continues to send data + from the given bytestring until all data has been sent or an error + occurs. Nothing is returned. + + :param bytes s: data to send to the client as "stderr" output. + + :raises socket.timeout: + if sending stalled for longer than the timeout set by `settimeout`. + :raises socket.error: + if an error occurred before the entire string was sent. + + .. versionadded:: 1.1 + """ + while s: + sent = self.send_stderr(s) + s = s[sent:] + return None + + def makefile(self, *params): + """ + Return a file-like object associated with this channel. The optional + ``mode`` and ``bufsize`` arguments are interpreted the same way as by + the built-in ``file()`` function in Python. + + :return: `.ChannelFile` object which can be used for Python file I/O. + """ + return ChannelFile(*([self] + list(params))) + + def makefile_stderr(self, *params): + """ + Return a file-like object associated with this channel's stderr + stream. Only channels using `exec_command` or `invoke_shell` + without a pty will ever have data on the stderr stream. + + The optional ``mode`` and ``bufsize`` arguments are interpreted the + same way as by the built-in ``file()`` function in Python. For a + client, it only makes sense to open this file for reading. For a + server, it only makes sense to open this file for writing. + + :returns: + `.ChannelStderrFile` object which can be used for Python file I/O. + + .. versionadded:: 1.1 + """ + return ChannelStderrFile(*([self] + list(params))) + + def makefile_stdin(self, *params): + """ + Return a file-like object associated with this channel's stdin + stream. + + The optional ``mode`` and ``bufsize`` arguments are interpreted the + same way as by the built-in ``file()`` function in Python. For a + client, it only makes sense to open this file for writing. For a + server, it only makes sense to open this file for reading. + + :returns: + `.ChannelStdinFile` object which can be used for Python file I/O. + + .. versionadded:: 2.6 + """ + return ChannelStdinFile(*([self] + list(params))) + + def fileno(self): + """ + Returns an OS-level file descriptor which can be used for polling, but + but not for reading or writing. This is primarily to allow Python's + ``select`` module to work. + + The first time ``fileno`` is called on a channel, a pipe is created to + simulate real OS-level file descriptor (FD) behavior. Because of this, + two OS-level FDs are created, which will use up FDs faster than normal. + (You won't notice this effect unless you have hundreds of channels + open at the same time.) + + :return: an OS-level file descriptor (`int`) + + .. warning:: + This method causes channel reads to be slightly less efficient. + """ + self.lock.acquire() + try: + if self._pipe is not None: + return self._pipe.fileno() + # create the pipe and feed in any existing data + self._pipe = pipe.make_pipe() + p1, p2 = pipe.make_or_pipe(self._pipe) + self.in_buffer.set_event(p1) + self.in_stderr_buffer.set_event(p2) + return self._pipe.fileno() + finally: + self.lock.release() + + def shutdown(self, how): + """ + Shut down one or both halves of the connection. If ``how`` is 0, + further receives are disallowed. If ``how`` is 1, further sends + are disallowed. If ``how`` is 2, further sends and receives are + disallowed. This closes the stream in one or both directions. + + :param int how: + 0 (stop receiving), 1 (stop sending), or 2 (stop receiving and + sending). + """ + if (how == 0) or (how == 2): + # feign "read" shutdown + self.eof_received = 1 + if (how == 1) or (how == 2): + self.lock.acquire() + try: + m = self._send_eof() + finally: + self.lock.release() + if m is not None: + self.transport._send_user_message(m) + + def shutdown_read(self): + """ + Shutdown the receiving side of this socket, closing the stream in + the incoming direction. After this call, future reads on this + channel will fail instantly. This is a convenience method, equivalent + to ``shutdown(0)``, for people who don't make it a habit to + memorize unix constants from the 1970s. + + .. versionadded:: 1.2 + """ + self.shutdown(0) + + def shutdown_write(self): + """ + Shutdown the sending side of this socket, closing the stream in + the outgoing direction. After this call, future writes on this + channel will fail instantly. This is a convenience method, equivalent + to ``shutdown(1)``, for people who don't make it a habit to + memorize unix constants from the 1970s. + + .. versionadded:: 1.2 + """ + self.shutdown(1) + + @property + def _closed(self): + # Concession to Python 3's socket API, which has a private ._closed + # attribute instead of a semipublic .closed attribute. + return self.closed + + # ...calls from Transport + + def _set_transport(self, transport): + self.transport = transport + self.logger = util.get_logger(self.transport.get_log_channel()) + + def _set_window(self, window_size, max_packet_size): + self.in_window_size = window_size + self.in_max_packet_size = max_packet_size + # threshold of bytes we receive before we bother to send + # a window update + self.in_window_threshold = window_size // 10 + self.in_window_sofar = 0 + self._log(DEBUG, "Max packet in: {} bytes".format(max_packet_size)) + + def _set_remote_channel(self, chanid, window_size, max_packet_size): + self.remote_chanid = chanid + self.out_window_size = window_size + self.out_max_packet_size = self.transport._sanitize_packet_size( + max_packet_size + ) + self.active = 1 + self._log( + DEBUG, "Max packet out: {} bytes".format(self.out_max_packet_size) + ) + + def _request_success(self, m): + self._log(DEBUG, "Sesch channel {} request ok".format(self.chanid)) + self.event_ready = True + self.event.set() + return + + def _request_failed(self, m): + self.lock.acquire() + try: + msgs = self._close_internal() + finally: + self.lock.release() + for m in msgs: + if m is not None: + self.transport._send_user_message(m) + + def _feed(self, m): + if isinstance(m, bytes): + # passed from _feed_extended + s = m + else: + s = m.get_binary() + self.in_buffer.feed(s) + + def _feed_extended(self, m): + code = m.get_int() + s = m.get_binary() + if code != 1: + self._log( + ERROR, "unknown extended_data type {}; discarding".format(code) + ) + return + if self.combine_stderr: + self._feed(s) + else: + self.in_stderr_buffer.feed(s) + + def _window_adjust(self, m): + nbytes = m.get_int() + self.lock.acquire() + try: + if self.ultra_debug: + self._log(DEBUG, "window up {}".format(nbytes)) + self.out_window_size += nbytes + self.out_buffer_cv.notify_all() + finally: + self.lock.release() + + def _handle_request(self, m): + key = m.get_text() + want_reply = m.get_boolean() + server = self.transport.server_object + ok = False + if key == "exit-status": + self.exit_status = m.get_int() + self.status_event.set() + ok = True + elif key == "xon-xoff": + # ignore + ok = True + elif key == "pty-req": + term = m.get_string() + width = m.get_int() + height = m.get_int() + pixelwidth = m.get_int() + pixelheight = m.get_int() + modes = m.get_string() + if server is None: + ok = False + else: + ok = server.check_channel_pty_request( + self, term, width, height, pixelwidth, pixelheight, modes + ) + elif key == "shell": + if server is None: + ok = False + else: + ok = server.check_channel_shell_request(self) + elif key == "env": + name = m.get_string() + value = m.get_string() + if server is None: + ok = False + else: + ok = server.check_channel_env_request(self, name, value) + elif key == "exec": + cmd = m.get_string() + if server is None: + ok = False + else: + ok = server.check_channel_exec_request(self, cmd) + elif key == "subsystem": + name = m.get_text() + if server is None: + ok = False + else: + ok = server.check_channel_subsystem_request(self, name) + elif key == "window-change": + width = m.get_int() + height = m.get_int() + pixelwidth = m.get_int() + pixelheight = m.get_int() + if server is None: + ok = False + else: + ok = server.check_channel_window_change_request( + self, width, height, pixelwidth, pixelheight + ) + elif key == "x11-req": + single_connection = m.get_boolean() + auth_proto = m.get_text() + auth_cookie = m.get_binary() + screen_number = m.get_int() + if server is None: + ok = False + else: + ok = server.check_channel_x11_request( + self, + single_connection, + auth_proto, + auth_cookie, + screen_number, + ) + elif key == "auth-agent-req@openssh.com": + if server is None: + ok = False + else: + ok = server.check_channel_forward_agent_request(self) + else: + self._log(DEBUG, 'Unhandled channel request "{}"'.format(key)) + ok = False + if want_reply: + m = Message() + if ok: + m.add_byte(cMSG_CHANNEL_SUCCESS) + else: + m.add_byte(cMSG_CHANNEL_FAILURE) + m.add_int(self.remote_chanid) + self.transport._send_user_message(m) + + def _handle_eof(self, m): + self.lock.acquire() + try: + if not self.eof_received: + self.eof_received = True + self.in_buffer.close() + self.in_stderr_buffer.close() + if self._pipe is not None: + self._pipe.set_forever() + finally: + self.lock.release() + self._log(DEBUG, "EOF received ({})".format(self._name)) + + def _handle_close(self, m): + self.lock.acquire() + try: + msgs = self._close_internal() + self.transport._unlink_channel(self.chanid) + finally: + self.lock.release() + for m in msgs: + if m is not None: + self.transport._send_user_message(m) + + # ...internals... + + def _send(self, s, m): + size = len(s) + self.lock.acquire() + try: + if self.closed: + # this doesn't seem useful, but it is the documented behavior + # of Socket + raise socket.error("Socket is closed") + size = self._wait_for_send_window(size) + if size == 0: + # eof or similar + return 0 + m.add_string(s[:size]) + finally: + self.lock.release() + # Note: We release self.lock before calling _send_user_message. + # Otherwise, we can deadlock during re-keying. + self.transport._send_user_message(m) + return size + + def _log(self, level, msg, *args): + self.logger.log(level, "[chan " + self._name + "] " + msg, *args) + + def _event_pending(self): + self.event.clear() + self.event_ready = False + + def _wait_for_event(self): + self.event.wait() + assert self.event.is_set() + if self.event_ready: + return + e = self.transport.get_exception() + if e is None: + e = SSHException("Channel closed.") + raise e + + def _set_closed(self): + # you are holding the lock. + self.closed = True + self.in_buffer.close() + self.in_stderr_buffer.close() + self.out_buffer_cv.notify_all() + # Notify any waiters that we are closed + self.event.set() + self.status_event.set() + if self._pipe is not None: + self._pipe.set_forever() + + def _send_eof(self): + # you are holding the lock. + if self.eof_sent: + return None + m = Message() + m.add_byte(cMSG_CHANNEL_EOF) + m.add_int(self.remote_chanid) + self.eof_sent = True + self._log(DEBUG, "EOF sent ({})".format(self._name)) + return m + + def _close_internal(self): + # you are holding the lock. + if not self.active or self.closed: + return None, None + m1 = self._send_eof() + m2 = Message() + m2.add_byte(cMSG_CHANNEL_CLOSE) + m2.add_int(self.remote_chanid) + self._set_closed() + # can't unlink from the Transport yet -- the remote side may still + # try to send meta-data (exit-status, etc) + return m1, m2 + + def _unlink(self): + # server connection could die before we become active: + # still signal the close! + if self.closed: + return + self.lock.acquire() + try: + self._set_closed() + self.transport._unlink_channel(self.chanid) + finally: + self.lock.release() + + def _check_add_window(self, n): + self.lock.acquire() + try: + if self.closed or self.eof_received or not self.active: + return 0 + if self.ultra_debug: + self._log(DEBUG, "addwindow {}".format(n)) + self.in_window_sofar += n + if self.in_window_sofar <= self.in_window_threshold: + return 0 + if self.ultra_debug: + self._log( + DEBUG, "addwindow send {}".format(self.in_window_sofar) + ) + out = self.in_window_sofar + self.in_window_sofar = 0 + return out + finally: + self.lock.release() + + def _wait_for_send_window(self, size): + """ + (You are already holding the lock.) + Wait for the send window to open up, and allocate up to ``size`` bytes + for transmission. If no space opens up before the timeout, a timeout + exception is raised. Returns the number of bytes available to send + (may be less than requested). + """ + # you are already holding the lock + if self.closed or self.eof_sent: + return 0 + if self.out_window_size == 0: + # should we block? + if self.timeout == 0.0: + raise socket.timeout() + # loop here in case we get woken up but a different thread has + # filled the buffer + timeout = self.timeout + while self.out_window_size == 0: + if self.closed or self.eof_sent: + return 0 + then = time.time() + self.out_buffer_cv.wait(timeout) + if timeout is not None: + timeout -= time.time() - then + if timeout <= 0.0: + raise socket.timeout() + # we have some window to squeeze into + if self.closed or self.eof_sent: + return 0 + if self.out_window_size < size: + size = self.out_window_size + if self.out_max_packet_size - 64 < size: + size = self.out_max_packet_size - 64 + self.out_window_size -= size + if self.ultra_debug: + self._log(DEBUG, "window down to {}".format(self.out_window_size)) + return size + + +class ChannelFile(BufferedFile): + """ + A file-like wrapper around `.Channel`. A ChannelFile is created by calling + `Channel.makefile`. + + .. warning:: + To correctly emulate the file object created from a socket's `makefile + ` method, a `.Channel` and its + `.ChannelFile` should be able to be closed or garbage-collected + independently. Currently, closing the `ChannelFile` does nothing but + flush the buffer. + """ + + def __init__(self, channel, mode="r", bufsize=-1): + self.channel = channel + BufferedFile.__init__(self) + self._set_mode(mode, bufsize) + + def __repr__(self): + """ + Returns a string representation of this object, for debugging. + """ + return "" + + def _read(self, size): + return self.channel.recv(size) + + def _write(self, data): + self.channel.sendall(data) + return len(data) + + +class ChannelStderrFile(ChannelFile): + """ + A file-like wrapper around `.Channel` stderr. + + See `Channel.makefile_stderr` for details. + """ + + def _read(self, size): + return self.channel.recv_stderr(size) + + def _write(self, data): + self.channel.sendall_stderr(data) + return len(data) + + +class ChannelStdinFile(ChannelFile): + """ + A file-like wrapper around `.Channel` stdin. + + See `Channel.makefile_stdin` for details. + """ + + def close(self): + super().close() + self.channel.shutdown_write() diff --git a/venv/lib/python3.12/site-packages/paramiko/client.py b/venv/lib/python3.12/site-packages/paramiko/client.py new file mode 100644 index 0000000..d8be910 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/client.py @@ -0,0 +1,893 @@ +# Copyright (C) 2006-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +SSH client & key policies +""" + +from binascii import hexlify +import getpass +import inspect +import os +import socket +import warnings +from errno import ECONNREFUSED, EHOSTUNREACH + +from paramiko.agent import Agent +from paramiko.common import DEBUG +from paramiko.config import SSH_PORT +from paramiko.dsskey import DSSKey +from paramiko.ecdsakey import ECDSAKey +from paramiko.ed25519key import Ed25519Key +from paramiko.hostkeys import HostKeys +from paramiko.rsakey import RSAKey +from paramiko.ssh_exception import ( + SSHException, + BadHostKeyException, + NoValidConnectionsError, +) +from paramiko.transport import Transport +from paramiko.util import ClosingContextManager + + +class SSHClient(ClosingContextManager): + """ + A high-level representation of a session with an SSH server. This class + wraps `.Transport`, `.Channel`, and `.SFTPClient` to take care of most + aspects of authenticating and opening channels. A typical use case is:: + + client = SSHClient() + client.load_system_host_keys() + client.connect('ssh.example.com') + stdin, stdout, stderr = client.exec_command('ls -l') + + You may pass in explicit overrides for authentication and server host key + checking. The default mechanism is to try to use local key files or an + SSH agent (if one is running). + + Instances of this class may be used as context managers. + + .. versionadded:: 1.6 + """ + + def __init__(self): + """ + Create a new SSHClient. + """ + self._system_host_keys = HostKeys() + self._host_keys = HostKeys() + self._host_keys_filename = None + self._log_channel = None + self._policy = RejectPolicy() + self._transport = None + self._agent = None + + def load_system_host_keys(self, filename=None): + """ + Load host keys from a system (read-only) file. Host keys read with + this method will not be saved back by `save_host_keys`. + + This method can be called multiple times. Each new set of host keys + will be merged with the existing set (new replacing old if there are + conflicts). + + If ``filename`` is left as ``None``, an attempt will be made to read + keys from the user's local "known hosts" file, as used by OpenSSH, + and no exception will be raised if the file can't be read. This is + probably only useful on posix. + + :param str filename: the filename to read, or ``None`` + + :raises: ``IOError`` -- + if a filename was provided and the file could not be read + """ + if filename is None: + # try the user's .ssh key file, and mask exceptions + filename = os.path.expanduser("~/.ssh/known_hosts") + try: + self._system_host_keys.load(filename) + except IOError: + pass + return + self._system_host_keys.load(filename) + + def load_host_keys(self, filename): + """ + Load host keys from a local host-key file. Host keys read with this + method will be checked after keys loaded via `load_system_host_keys`, + but will be saved back by `save_host_keys` (so they can be modified). + The missing host key policy `.AutoAddPolicy` adds keys to this set and + saves them, when connecting to a previously-unknown server. + + This method can be called multiple times. Each new set of host keys + will be merged with the existing set (new replacing old if there are + conflicts). When automatically saving, the last hostname is used. + + :param str filename: the filename to read + + :raises: ``IOError`` -- if the filename could not be read + """ + self._host_keys_filename = filename + self._host_keys.load(filename) + + def save_host_keys(self, filename): + """ + Save the host keys back to a file. Only the host keys loaded with + `load_host_keys` (plus any added directly) will be saved -- not any + host keys loaded with `load_system_host_keys`. + + :param str filename: the filename to save to + + :raises: ``IOError`` -- if the file could not be written + """ + + # update local host keys from file (in case other SSH clients + # have written to the known_hosts file meanwhile. + if self._host_keys_filename is not None: + self.load_host_keys(self._host_keys_filename) + + with open(filename, "w") as f: + for hostname, keys in self._host_keys.items(): + for keytype, key in keys.items(): + f.write( + "{} {} {}\n".format( + hostname, keytype, key.get_base64() + ) + ) + + def get_host_keys(self): + """ + Get the local `.HostKeys` object. This can be used to examine the + local host keys or change them. + + :return: the local host keys as a `.HostKeys` object. + """ + return self._host_keys + + def set_log_channel(self, name): + """ + Set the channel for logging. The default is ``"paramiko.transport"`` + but it can be set to anything you want. + + :param str name: new channel name for logging + """ + self._log_channel = name + + def set_missing_host_key_policy(self, policy): + """ + Set policy to use when connecting to servers without a known host key. + + Specifically: + + * A **policy** is a "policy class" (or instance thereof), namely some + subclass of `.MissingHostKeyPolicy` such as `.RejectPolicy` (the + default), `.AutoAddPolicy`, `.WarningPolicy`, or a user-created + subclass. + * A host key is **known** when it appears in the client object's cached + host keys structures (those manipulated by `load_system_host_keys` + and/or `load_host_keys`). + + :param .MissingHostKeyPolicy policy: + the policy to use when receiving a host key from a + previously-unknown server + """ + if inspect.isclass(policy): + policy = policy() + self._policy = policy + + def _families_and_addresses(self, hostname, port): + """ + Yield pairs of address families and addresses to try for connecting. + + :param str hostname: the server to connect to + :param int port: the server port to connect to + :returns: Yields an iterable of ``(family, address)`` tuples + """ + guess = True + addrinfos = socket.getaddrinfo( + hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM + ) + for (family, socktype, proto, canonname, sockaddr) in addrinfos: + if socktype == socket.SOCK_STREAM: + yield family, sockaddr + guess = False + + # some OS like AIX don't indicate SOCK_STREAM support, so just + # guess. :( We only do this if we did not get a single result marked + # as socktype == SOCK_STREAM. + if guess: + for family, _, _, _, sockaddr in addrinfos: + yield family, sockaddr + + def connect( + self, + hostname, + port=SSH_PORT, + username=None, + password=None, + pkey=None, + key_filename=None, + timeout=None, + allow_agent=True, + look_for_keys=True, + compress=False, + sock=None, + gss_auth=False, + gss_kex=False, + gss_deleg_creds=True, + gss_host=None, + banner_timeout=None, + auth_timeout=None, + channel_timeout=None, + gss_trust_dns=True, + passphrase=None, + disabled_algorithms=None, + transport_factory=None, + auth_strategy=None, + ): + """ + Connect to an SSH server and authenticate to it. The server's host key + is checked against the system host keys (see `load_system_host_keys`) + and any local host keys (`load_host_keys`). If the server's hostname + is not found in either set of host keys, the missing host key policy + is used (see `set_missing_host_key_policy`). The default policy is + to reject the key and raise an `.SSHException`. + + Authentication is attempted in the following order of priority: + + - The ``pkey`` or ``key_filename`` passed in (if any) + + - ``key_filename`` may contain OpenSSH public certificate paths + as well as regular private-key paths; when files ending in + ``-cert.pub`` are found, they are assumed to match a private + key, and both components will be loaded. (The private key + itself does *not* need to be listed in ``key_filename`` for + this to occur - *just* the certificate.) + + - Any key we can find through an SSH agent + - Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in + ``~/.ssh/`` + + - When OpenSSH-style public certificates exist that match an + existing such private key (so e.g. one has ``id_rsa`` and + ``id_rsa-cert.pub``) the certificate will be loaded alongside + the private key and used for authentication. + + - Plain username/password auth, if a password was given + + If a private key requires a password to unlock it, and a password is + passed in, that password will be used to attempt to unlock the key. + + :param str hostname: the server to connect to + :param int port: the server port to connect to + :param str username: + the username to authenticate as (defaults to the current local + username) + :param str password: + Used for password authentication; is also used for private key + decryption if ``passphrase`` is not given. + :param str passphrase: + Used for decrypting private keys. + :param .PKey pkey: an optional private key to use for authentication + :param str key_filename: + the filename, or list of filenames, of optional private key(s) + and/or certs to try for authentication + :param float timeout: + an optional timeout (in seconds) for the TCP connect + :param bool allow_agent: + set to False to disable connecting to the SSH agent + :param bool look_for_keys: + set to False to disable searching for discoverable private key + files in ``~/.ssh/`` + :param bool compress: set to True to turn on compression + :param socket sock: + an open socket or socket-like object (such as a `.Channel`) to use + for communication to the target host + :param bool gss_auth: + ``True`` if you want to use GSS-API authentication + :param bool gss_kex: + Perform GSS-API Key Exchange and user authentication + :param bool gss_deleg_creds: Delegate GSS-API client credentials or not + :param str gss_host: + The targets name in the kerberos database. default: hostname + :param bool gss_trust_dns: + Indicates whether or not the DNS is trusted to securely + canonicalize the name of the host being connected to (default + ``True``). + :param float banner_timeout: an optional timeout (in seconds) to wait + for the SSH banner to be presented. + :param float auth_timeout: an optional timeout (in seconds) to wait for + an authentication response. + :param float channel_timeout: an optional timeout (in seconds) to wait + for a channel open response. + :param dict disabled_algorithms: + an optional dict passed directly to `.Transport` and its keyword + argument of the same name. + :param transport_factory: + an optional callable which is handed a subset of the constructor + arguments (primarily those related to the socket, GSS + functionality, and algorithm selection) and generates a + `.Transport` instance to be used by this client. Defaults to + `.Transport.__init__`. + :param auth_strategy: + an optional instance of `.AuthStrategy`, triggering use of this + newer authentication mechanism instead of SSHClient's legacy auth + method. + + .. warning:: + This parameter is **incompatible** with all other + authentication-related parameters (such as, but not limited to, + ``password``, ``key_filename`` and ``allow_agent``) and will + trigger an exception if given alongside them. + + :returns: + `.AuthResult` if ``auth_strategy`` is non-``None``; otherwise, + returns ``None``. + + :raises BadHostKeyException: + if the server's host key could not be verified. + :raises AuthenticationException: + if authentication failed. + :raises UnableToAuthenticate: + if authentication failed (when ``auth_strategy`` is non-``None``; + and note that this is a subclass of ``AuthenticationException``). + :raises socket.error: + if a socket error (other than connection-refused or + host-unreachable) occurred while connecting. + :raises NoValidConnectionsError: + if all valid connection targets for the requested hostname (eg IPv4 + and IPv6) yielded connection-refused or host-unreachable socket + errors. + :raises SSHException: + if there was any other error connecting or establishing an SSH + session. + + .. versionchanged:: 1.15 + Added the ``banner_timeout``, ``gss_auth``, ``gss_kex``, + ``gss_deleg_creds`` and ``gss_host`` arguments. + .. versionchanged:: 2.3 + Added the ``gss_trust_dns`` argument. + .. versionchanged:: 2.4 + Added the ``passphrase`` argument. + .. versionchanged:: 2.6 + Added the ``disabled_algorithms`` argument. + .. versionchanged:: 2.12 + Added the ``transport_factory`` argument. + .. versionchanged:: 3.2 + Added the ``auth_strategy`` argument. + """ + if not sock: + errors = {} + # Try multiple possible address families (e.g. IPv4 vs IPv6) + to_try = list(self._families_and_addresses(hostname, port)) + for af, addr in to_try: + try: + sock = socket.socket(af, socket.SOCK_STREAM) + if timeout is not None: + try: + sock.settimeout(timeout) + except: + pass + sock.connect(addr) + # Break out of the loop on success + break + except socket.error as e: + # As mentioned in socket docs it is better + # to close sockets explicitly + if sock: + sock.close() + # Raise anything that isn't a straight up connection error + # (such as a resolution error) + if e.errno not in (ECONNREFUSED, EHOSTUNREACH): + raise + # Capture anything else so we know how the run looks once + # iteration is complete. Retain info about which attempt + # this was. + errors[addr] = e + + # Make sure we explode usefully if no address family attempts + # succeeded. We've no way of knowing which error is the "right" + # one, so we construct a hybrid exception containing all the real + # ones, of a subclass that client code should still be watching for + # (socket.error) + if len(errors) == len(to_try): + raise NoValidConnectionsError(errors) + + if transport_factory is None: + transport_factory = Transport + t = self._transport = transport_factory( + sock, + gss_kex=gss_kex, + gss_deleg_creds=gss_deleg_creds, + disabled_algorithms=disabled_algorithms, + ) + t.use_compression(compress=compress) + t.set_gss_host( + # t.hostname may be None, but GSS-API requires a target name. + # Therefore use hostname as fallback. + gss_host=gss_host or hostname, + trust_dns=gss_trust_dns, + gssapi_requested=gss_auth or gss_kex, + ) + if self._log_channel is not None: + t.set_log_channel(self._log_channel) + if banner_timeout is not None: + t.banner_timeout = banner_timeout + if auth_timeout is not None: + t.auth_timeout = auth_timeout + if channel_timeout is not None: + t.channel_timeout = channel_timeout + + if port == SSH_PORT: + server_hostkey_name = hostname + else: + server_hostkey_name = "[{}]:{}".format(hostname, port) + our_server_keys = None + + our_server_keys = self._system_host_keys.get(server_hostkey_name) + if our_server_keys is None: + our_server_keys = self._host_keys.get(server_hostkey_name) + if our_server_keys is not None: + keytype = our_server_keys.keys()[0] + sec_opts = t.get_security_options() + other_types = [x for x in sec_opts.key_types if x != keytype] + sec_opts.key_types = [keytype] + other_types + + t.start_client(timeout=timeout) + + # If GSS-API Key Exchange is performed we are not required to check the + # host key, because the host is authenticated via GSS-API / SSPI as + # well as our client. + if not self._transport.gss_kex_used: + server_key = t.get_remote_server_key() + if our_server_keys is None: + # will raise exception if the key is rejected + self._policy.missing_host_key( + self, server_hostkey_name, server_key + ) + else: + our_key = our_server_keys.get(server_key.get_name()) + if our_key != server_key: + if our_key is None: + our_key = list(our_server_keys.values())[0] + raise BadHostKeyException(hostname, server_key, our_key) + + if username is None: + username = getpass.getuser() + + # New auth flow! + if auth_strategy is not None: + return auth_strategy.authenticate(transport=t) + + # Old auth flow! + if key_filename is None: + key_filenames = [] + elif isinstance(key_filename, str): + key_filenames = [key_filename] + else: + key_filenames = key_filename + + self._auth( + username, + password, + pkey, + key_filenames, + allow_agent, + look_for_keys, + gss_auth, + gss_kex, + gss_deleg_creds, + t.gss_host, + passphrase, + ) + + def close(self): + """ + Close this SSHClient and its underlying `.Transport`. + + This should be called anytime you are done using the client object. + + .. warning:: + Paramiko registers garbage collection hooks that will try to + automatically close connections for you, but this is not presently + reliable. Failure to explicitly close your client after use may + lead to end-of-process hangs! + """ + if self._transport is None: + return + self._transport.close() + self._transport = None + + if self._agent is not None: + self._agent.close() + self._agent = None + + def exec_command( + self, + command, + bufsize=-1, + timeout=None, + get_pty=False, + environment=None, + ): + """ + Execute a command on the SSH server. A new `.Channel` is opened and + the requested command is executed. The command's input and output + streams are returned as Python ``file``-like objects representing + stdin, stdout, and stderr. + + :param str command: the command to execute + :param int bufsize: + interpreted the same way as by the built-in ``file()`` function in + Python + :param int timeout: + set command's channel timeout. See `.Channel.settimeout` + :param bool get_pty: + Request a pseudo-terminal from the server (default ``False``). + See `.Channel.get_pty` + :param dict environment: + a dict of shell environment variables, to be merged into the + default environment that the remote command executes within. + + .. warning:: + Servers may silently reject some environment variables; see the + warning in `.Channel.set_environment_variable` for details. + + :return: + the stdin, stdout, and stderr of the executing command, as a + 3-tuple + + :raises: `.SSHException` -- if the server fails to execute the command + + .. versionchanged:: 1.10 + Added the ``get_pty`` kwarg. + """ + chan = self._transport.open_session(timeout=timeout) + if get_pty: + chan.get_pty() + chan.settimeout(timeout) + if environment: + chan.update_environment(environment) + chan.exec_command(command) + stdin = chan.makefile_stdin("wb", bufsize) + stdout = chan.makefile("r", bufsize) + stderr = chan.makefile_stderr("r", bufsize) + return stdin, stdout, stderr + + def invoke_shell( + self, + term="vt100", + width=80, + height=24, + width_pixels=0, + height_pixels=0, + environment=None, + ): + """ + Start an interactive shell session on the SSH server. A new `.Channel` + is opened and connected to a pseudo-terminal using the requested + terminal type and size. + + :param str term: + the terminal type to emulate (for example, ``"vt100"``) + :param int width: the width (in characters) of the terminal window + :param int height: the height (in characters) of the terminal window + :param int width_pixels: the width (in pixels) of the terminal window + :param int height_pixels: the height (in pixels) of the terminal window + :param dict environment: the command's environment + :return: a new `.Channel` connected to the remote shell + + :raises: `.SSHException` -- if the server fails to invoke a shell + """ + chan = self._transport.open_session() + chan.get_pty(term, width, height, width_pixels, height_pixels) + chan.invoke_shell() + return chan + + def open_sftp(self): + """ + Open an SFTP session on the SSH server. + + :return: a new `.SFTPClient` session object + """ + return self._transport.open_sftp_client() + + def get_transport(self): + """ + Return the underlying `.Transport` object for this SSH connection. + This can be used to perform lower-level tasks, like opening specific + kinds of channels. + + :return: the `.Transport` for this connection + """ + return self._transport + + def _key_from_filepath(self, filename, klass, password): + """ + Attempt to derive a `.PKey` from given string path ``filename``: + + - If ``filename`` appears to be a cert, the matching private key is + loaded. + - Otherwise, the filename is assumed to be a private key, and the + matching public cert will be loaded if it exists. + """ + cert_suffix = "-cert.pub" + # Assume privkey, not cert, by default + if filename.endswith(cert_suffix): + key_path = filename[: -len(cert_suffix)] + cert_path = filename + else: + key_path = filename + cert_path = filename + cert_suffix + # Blindly try the key path; if no private key, nothing will work. + key = klass.from_private_key_file(key_path, password) + # TODO: change this to 'Loading' instead of 'Trying' sometime; probably + # when #387 is released, since this is a critical log message users are + # likely testing/filtering for (bah.) + msg = "Trying discovered key {} in {}".format( + hexlify(key.get_fingerprint()), key_path + ) + self._log(DEBUG, msg) + # Attempt to load cert if it exists. + if os.path.isfile(cert_path): + key.load_certificate(cert_path) + self._log(DEBUG, "Adding public certificate {}".format(cert_path)) + return key + + def _auth( + self, + username, + password, + pkey, + key_filenames, + allow_agent, + look_for_keys, + gss_auth, + gss_kex, + gss_deleg_creds, + gss_host, + passphrase, + ): + """ + Try, in order: + + - The key(s) passed in, if one was passed in. + - Any key we can find through an SSH agent (if allowed). + - Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ~/.ssh/ + (if allowed). + - Plain username/password auth, if a password was given. + + (The password might be needed to unlock a private key [if 'passphrase' + isn't also given], or for two-factor authentication [for which it is + required].) + """ + saved_exception = None + two_factor = False + allowed_types = set() + two_factor_types = {"keyboard-interactive", "password"} + if passphrase is None and password is not None: + passphrase = password + + # If GSS-API support and GSS-PI Key Exchange was performed, we attempt + # authentication with gssapi-keyex. + if gss_kex and self._transport.gss_kex_used: + try: + self._transport.auth_gssapi_keyex(username) + return + except Exception as e: + saved_exception = e + + # Try GSS-API authentication (gssapi-with-mic) only if GSS-API Key + # Exchange is not performed, because if we use GSS-API for the key + # exchange, there is already a fully established GSS-API context, so + # why should we do that again? + if gss_auth: + try: + return self._transport.auth_gssapi_with_mic( + username, gss_host, gss_deleg_creds + ) + except Exception as e: + saved_exception = e + + if pkey is not None: + try: + self._log( + DEBUG, + "Trying SSH key {}".format( + hexlify(pkey.get_fingerprint()) + ), + ) + allowed_types = set( + self._transport.auth_publickey(username, pkey) + ) + two_factor = allowed_types & two_factor_types + if not two_factor: + return + except SSHException as e: + saved_exception = e + + if not two_factor: + for key_filename in key_filenames: + # TODO 4.0: leverage PKey.from_path() if we don't end up just + # killing SSHClient entirely + for pkey_class in (RSAKey, DSSKey, ECDSAKey, Ed25519Key): + try: + key = self._key_from_filepath( + key_filename, pkey_class, passphrase + ) + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types + if not two_factor: + return + break + except SSHException as e: + saved_exception = e + + if not two_factor and allow_agent: + if self._agent is None: + self._agent = Agent() + + for key in self._agent.get_keys(): + try: + id_ = hexlify(key.get_fingerprint()) + self._log(DEBUG, "Trying SSH agent key {}".format(id_)) + # for 2-factor auth a successfully auth'd key password + # will return an allowed 2fac auth method + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types + if not two_factor: + return + break + except SSHException as e: + saved_exception = e + + if not two_factor: + keyfiles = [] + + for keytype, name in [ + (RSAKey, "rsa"), + (DSSKey, "dsa"), + (ECDSAKey, "ecdsa"), + (Ed25519Key, "ed25519"), + ]: + # ~/ssh/ is for windows + for directory in [".ssh", "ssh"]: + full_path = os.path.expanduser( + "~/{}/id_{}".format(directory, name) + ) + if os.path.isfile(full_path): + # TODO: only do this append if below did not run + keyfiles.append((keytype, full_path)) + if os.path.isfile(full_path + "-cert.pub"): + keyfiles.append((keytype, full_path + "-cert.pub")) + + if not look_for_keys: + keyfiles = [] + + for pkey_class, filename in keyfiles: + try: + key = self._key_from_filepath( + filename, pkey_class, passphrase + ) + # for 2-factor auth a successfully auth'd key will result + # in ['password'] + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types + if not two_factor: + return + break + except (SSHException, IOError) as e: + saved_exception = e + + if password is not None: + try: + self._transport.auth_password(username, password) + return + except SSHException as e: + saved_exception = e + elif two_factor: + try: + self._transport.auth_interactive_dumb(username) + return + except SSHException as e: + saved_exception = e + + # if we got an auth-failed exception earlier, re-raise it + if saved_exception is not None: + raise saved_exception + raise SSHException("No authentication methods available") + + def _log(self, level, msg): + self._transport._log(level, msg) + + +class MissingHostKeyPolicy: + """ + Interface for defining the policy that `.SSHClient` should use when the + SSH server's hostname is not in either the system host keys or the + application's keys. Pre-made classes implement policies for automatically + adding the key to the application's `.HostKeys` object (`.AutoAddPolicy`), + and for automatically rejecting the key (`.RejectPolicy`). + + This function may be used to ask the user to verify the key, for example. + """ + + def missing_host_key(self, client, hostname, key): + """ + Called when an `.SSHClient` receives a server key for a server that + isn't in either the system or local `.HostKeys` object. To accept + the key, simply return. To reject, raised an exception (which will + be passed to the calling application). + """ + pass + + +class AutoAddPolicy(MissingHostKeyPolicy): + """ + Policy for automatically adding the hostname and new host key to the + local `.HostKeys` object, and saving it. This is used by `.SSHClient`. + """ + + def missing_host_key(self, client, hostname, key): + client._host_keys.add(hostname, key.get_name(), key) + if client._host_keys_filename is not None: + client.save_host_keys(client._host_keys_filename) + client._log( + DEBUG, + "Adding {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ), + ) + + +class RejectPolicy(MissingHostKeyPolicy): + """ + Policy for automatically rejecting the unknown hostname & key. This is + used by `.SSHClient`. + """ + + def missing_host_key(self, client, hostname, key): + client._log( + DEBUG, + "Rejecting {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ), + ) + raise SSHException( + "Server {!r} not found in known_hosts".format(hostname) + ) + + +class WarningPolicy(MissingHostKeyPolicy): + """ + Policy for logging a Python-style warning for an unknown host key, but + accepting it. This is used by `.SSHClient`. + """ + + def missing_host_key(self, client, hostname, key): + warnings.warn( + "Unknown {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ) + ) diff --git a/venv/lib/python3.12/site-packages/paramiko/common.py b/venv/lib/python3.12/site-packages/paramiko/common.py new file mode 100644 index 0000000..b57149b --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/common.py @@ -0,0 +1,245 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Common constants and global variables. +""" +import logging +import struct + +# +# Formerly of py3compat.py. May be fully delete'able with a deeper look? +# + + +def byte_chr(c): + assert isinstance(c, int) + return struct.pack("B", c) + + +def byte_mask(c, mask): + assert isinstance(c, int) + return struct.pack("B", c & mask) + + +def byte_ord(c): + # In case we're handed a string instead of an int. + if not isinstance(c, int): + c = ord(c) + return c + + +( + MSG_DISCONNECT, + MSG_IGNORE, + MSG_UNIMPLEMENTED, + MSG_DEBUG, + MSG_SERVICE_REQUEST, + MSG_SERVICE_ACCEPT, + MSG_EXT_INFO, +) = range(1, 8) +(MSG_KEXINIT, MSG_NEWKEYS) = range(20, 22) +( + MSG_USERAUTH_REQUEST, + MSG_USERAUTH_FAILURE, + MSG_USERAUTH_SUCCESS, + MSG_USERAUTH_BANNER, +) = range(50, 54) +MSG_USERAUTH_PK_OK = 60 +(MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE) = range(60, 62) +(MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN) = range(60, 62) +( + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, + MSG_USERAUTH_GSSAPI_ERROR, + MSG_USERAUTH_GSSAPI_ERRTOK, + MSG_USERAUTH_GSSAPI_MIC, +) = range(63, 67) +HIGHEST_USERAUTH_MESSAGE_ID = 79 +(MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE) = range(80, 83) +( + MSG_CHANNEL_OPEN, + MSG_CHANNEL_OPEN_SUCCESS, + MSG_CHANNEL_OPEN_FAILURE, + MSG_CHANNEL_WINDOW_ADJUST, + MSG_CHANNEL_DATA, + MSG_CHANNEL_EXTENDED_DATA, + MSG_CHANNEL_EOF, + MSG_CHANNEL_CLOSE, + MSG_CHANNEL_REQUEST, + MSG_CHANNEL_SUCCESS, + MSG_CHANNEL_FAILURE, +) = range(90, 101) + +cMSG_DISCONNECT = byte_chr(MSG_DISCONNECT) +cMSG_IGNORE = byte_chr(MSG_IGNORE) +cMSG_UNIMPLEMENTED = byte_chr(MSG_UNIMPLEMENTED) +cMSG_DEBUG = byte_chr(MSG_DEBUG) +cMSG_SERVICE_REQUEST = byte_chr(MSG_SERVICE_REQUEST) +cMSG_SERVICE_ACCEPT = byte_chr(MSG_SERVICE_ACCEPT) +cMSG_EXT_INFO = byte_chr(MSG_EXT_INFO) +cMSG_KEXINIT = byte_chr(MSG_KEXINIT) +cMSG_NEWKEYS = byte_chr(MSG_NEWKEYS) +cMSG_USERAUTH_REQUEST = byte_chr(MSG_USERAUTH_REQUEST) +cMSG_USERAUTH_FAILURE = byte_chr(MSG_USERAUTH_FAILURE) +cMSG_USERAUTH_SUCCESS = byte_chr(MSG_USERAUTH_SUCCESS) +cMSG_USERAUTH_BANNER = byte_chr(MSG_USERAUTH_BANNER) +cMSG_USERAUTH_PK_OK = byte_chr(MSG_USERAUTH_PK_OK) +cMSG_USERAUTH_INFO_REQUEST = byte_chr(MSG_USERAUTH_INFO_REQUEST) +cMSG_USERAUTH_INFO_RESPONSE = byte_chr(MSG_USERAUTH_INFO_RESPONSE) +cMSG_USERAUTH_GSSAPI_RESPONSE = byte_chr(MSG_USERAUTH_GSSAPI_RESPONSE) +cMSG_USERAUTH_GSSAPI_TOKEN = byte_chr(MSG_USERAUTH_GSSAPI_TOKEN) +cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = byte_chr( + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE +) +cMSG_USERAUTH_GSSAPI_ERROR = byte_chr(MSG_USERAUTH_GSSAPI_ERROR) +cMSG_USERAUTH_GSSAPI_ERRTOK = byte_chr(MSG_USERAUTH_GSSAPI_ERRTOK) +cMSG_USERAUTH_GSSAPI_MIC = byte_chr(MSG_USERAUTH_GSSAPI_MIC) +cMSG_GLOBAL_REQUEST = byte_chr(MSG_GLOBAL_REQUEST) +cMSG_REQUEST_SUCCESS = byte_chr(MSG_REQUEST_SUCCESS) +cMSG_REQUEST_FAILURE = byte_chr(MSG_REQUEST_FAILURE) +cMSG_CHANNEL_OPEN = byte_chr(MSG_CHANNEL_OPEN) +cMSG_CHANNEL_OPEN_SUCCESS = byte_chr(MSG_CHANNEL_OPEN_SUCCESS) +cMSG_CHANNEL_OPEN_FAILURE = byte_chr(MSG_CHANNEL_OPEN_FAILURE) +cMSG_CHANNEL_WINDOW_ADJUST = byte_chr(MSG_CHANNEL_WINDOW_ADJUST) +cMSG_CHANNEL_DATA = byte_chr(MSG_CHANNEL_DATA) +cMSG_CHANNEL_EXTENDED_DATA = byte_chr(MSG_CHANNEL_EXTENDED_DATA) +cMSG_CHANNEL_EOF = byte_chr(MSG_CHANNEL_EOF) +cMSG_CHANNEL_CLOSE = byte_chr(MSG_CHANNEL_CLOSE) +cMSG_CHANNEL_REQUEST = byte_chr(MSG_CHANNEL_REQUEST) +cMSG_CHANNEL_SUCCESS = byte_chr(MSG_CHANNEL_SUCCESS) +cMSG_CHANNEL_FAILURE = byte_chr(MSG_CHANNEL_FAILURE) + +# for debugging: +MSG_NAMES = { + MSG_DISCONNECT: "disconnect", + MSG_IGNORE: "ignore", + MSG_UNIMPLEMENTED: "unimplemented", + MSG_DEBUG: "debug", + MSG_SERVICE_REQUEST: "service-request", + MSG_SERVICE_ACCEPT: "service-accept", + MSG_KEXINIT: "kexinit", + MSG_EXT_INFO: "ext-info", + MSG_NEWKEYS: "newkeys", + 30: "kex30", + 31: "kex31", + 32: "kex32", + 33: "kex33", + 34: "kex34", + 40: "kex40", + 41: "kex41", + MSG_USERAUTH_REQUEST: "userauth-request", + MSG_USERAUTH_FAILURE: "userauth-failure", + MSG_USERAUTH_SUCCESS: "userauth-success", + MSG_USERAUTH_BANNER: "userauth--banner", + MSG_USERAUTH_PK_OK: "userauth-60(pk-ok/info-request)", + MSG_USERAUTH_INFO_RESPONSE: "userauth-info-response", + MSG_GLOBAL_REQUEST: "global-request", + MSG_REQUEST_SUCCESS: "request-success", + MSG_REQUEST_FAILURE: "request-failure", + MSG_CHANNEL_OPEN: "channel-open", + MSG_CHANNEL_OPEN_SUCCESS: "channel-open-success", + MSG_CHANNEL_OPEN_FAILURE: "channel-open-failure", + MSG_CHANNEL_WINDOW_ADJUST: "channel-window-adjust", + MSG_CHANNEL_DATA: "channel-data", + MSG_CHANNEL_EXTENDED_DATA: "channel-extended-data", + MSG_CHANNEL_EOF: "channel-eof", + MSG_CHANNEL_CLOSE: "channel-close", + MSG_CHANNEL_REQUEST: "channel-request", + MSG_CHANNEL_SUCCESS: "channel-success", + MSG_CHANNEL_FAILURE: "channel-failure", + MSG_USERAUTH_GSSAPI_RESPONSE: "userauth-gssapi-response", + MSG_USERAUTH_GSSAPI_TOKEN: "userauth-gssapi-token", + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: "userauth-gssapi-exchange-complete", + MSG_USERAUTH_GSSAPI_ERROR: "userauth-gssapi-error", + MSG_USERAUTH_GSSAPI_ERRTOK: "userauth-gssapi-error-token", + MSG_USERAUTH_GSSAPI_MIC: "userauth-gssapi-mic", +} + + +# authentication request return codes: +AUTH_SUCCESSFUL, AUTH_PARTIALLY_SUCCESSFUL, AUTH_FAILED = range(3) + + +# channel request failed reasons: +( + OPEN_SUCCEEDED, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_FAILED_CONNECT_FAILED, + OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, + OPEN_FAILED_RESOURCE_SHORTAGE, +) = range(0, 5) + + +CONNECTION_FAILED_CODE = { + 1: "Administratively prohibited", + 2: "Connect failed", + 3: "Unknown channel type", + 4: "Resource shortage", +} + + +( + DISCONNECT_SERVICE_NOT_AVAILABLE, + DISCONNECT_AUTH_CANCELLED_BY_USER, + DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE, +) = (7, 13, 14) + +zero_byte = byte_chr(0) +one_byte = byte_chr(1) +four_byte = byte_chr(4) +max_byte = byte_chr(0xFF) +cr_byte = byte_chr(13) +linefeed_byte = byte_chr(10) +crlf = cr_byte + linefeed_byte +cr_byte_value = 13 +linefeed_byte_value = 10 + + +xffffffff = 0xFFFFFFFF +x80000000 = 0x80000000 +o666 = 438 +o660 = 432 +o644 = 420 +o600 = 384 +o777 = 511 +o700 = 448 +o70 = 56 + +DEBUG = logging.DEBUG +INFO = logging.INFO +WARNING = logging.WARNING +ERROR = logging.ERROR +CRITICAL = logging.CRITICAL + +# Common IO/select/etc sleep period, in seconds +io_sleep = 0.01 + +DEFAULT_WINDOW_SIZE = 64 * 2**15 +DEFAULT_MAX_PACKET_SIZE = 2**15 + +# lower bound on the max packet size we'll accept from the remote host +# Minimum packet size is 32768 bytes according to +# http://www.ietf.org/rfc/rfc4254.txt +MIN_WINDOW_SIZE = 2**15 + +# However, according to http://www.ietf.org/rfc/rfc4253.txt it is perfectly +# legal to accept a size much smaller, as OpenSSH client does as size 16384. +MIN_PACKET_SIZE = 2**12 + +# Max windows size according to http://www.ietf.org/rfc/rfc4254.txt +MAX_WINDOW_SIZE = 2**32 - 1 diff --git a/venv/lib/python3.12/site-packages/paramiko/compress.py b/venv/lib/python3.12/site-packages/paramiko/compress.py new file mode 100644 index 0000000..18ff484 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/compress.py @@ -0,0 +1,40 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Compression implementations for a Transport. +""" + +import zlib + + +class ZlibCompressor: + def __init__(self): + # Use the default level of zlib compression + self.z = zlib.compressobj() + + def __call__(self, data): + return self.z.compress(data) + self.z.flush(zlib.Z_FULL_FLUSH) + + +class ZlibDecompressor: + def __init__(self): + self.z = zlib.decompressobj() + + def __call__(self, data): + return self.z.decompress(data) diff --git a/venv/lib/python3.12/site-packages/paramiko/config.py b/venv/lib/python3.12/site-packages/paramiko/config.py new file mode 100644 index 0000000..8ab55c6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/config.py @@ -0,0 +1,696 @@ +# Copyright (C) 2006-2007 Robey Pointer +# Copyright (C) 2012 Olle Lundberg +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Configuration file (aka ``ssh_config``) support. +""" + +import fnmatch +import getpass +import os +import re +import shlex +import socket +from hashlib import sha1 +from io import StringIO +from functools import partial + +invoke, invoke_import_error = None, None +try: + import invoke +except ImportError as e: + invoke_import_error = e + +from .ssh_exception import CouldNotCanonicalize, ConfigParseError + + +SSH_PORT = 22 + + +class SSHConfig: + """ + Representation of config information as stored in the format used by + OpenSSH. Queries can be made via `lookup`. The format is described in + OpenSSH's ``ssh_config`` man page. This class is provided primarily as a + convenience to posix users (since the OpenSSH format is a de-facto + standard on posix) but should work fine on Windows too. + + .. versionadded:: 1.6 + """ + + SETTINGS_REGEX = re.compile(r"(\w+)(?:\s*=\s*|\s+)(.+)") + + # TODO: do a full scan of ssh.c & friends to make sure we're fully + # compatible across the board, e.g. OpenSSH 8.1 added %n to ProxyCommand. + TOKENS_BY_CONFIG_KEY = { + "controlpath": ["%C", "%h", "%l", "%L", "%n", "%p", "%r", "%u"], + "hostname": ["%h"], + "identityfile": ["%C", "~", "%d", "%h", "%l", "%u", "%r"], + "proxycommand": ["~", "%h", "%p", "%r"], + "proxyjump": ["%h", "%p", "%r"], + # Doesn't seem worth making this 'special' for now, it will fit well + # enough (no actual match-exec config key to be confused with). + "match-exec": ["%C", "%d", "%h", "%L", "%l", "%n", "%p", "%r", "%u"], + } + + def __init__(self): + """ + Create a new OpenSSH config object. + + Note: the newer alternate constructors `from_path`, `from_file` and + `from_text` are simpler to use, as they parse on instantiation. For + example, instead of:: + + config = SSHConfig() + config.parse(open("some-path.config") + + you could:: + + config = SSHConfig.from_file(open("some-path.config")) + # Or more directly: + config = SSHConfig.from_path("some-path.config") + # Or if you have arbitrary ssh_config text from some other source: + config = SSHConfig.from_text("Host foo\\n\\tUser bar") + """ + self._config = [] + + @classmethod + def from_text(cls, text): + """ + Create a new, parsed `SSHConfig` from ``text`` string. + + .. versionadded:: 2.7 + """ + return cls.from_file(StringIO(text)) + + @classmethod + def from_path(cls, path): + """ + Create a new, parsed `SSHConfig` from the file found at ``path``. + + .. versionadded:: 2.7 + """ + with open(path) as flo: + return cls.from_file(flo) + + @classmethod + def from_file(cls, flo): + """ + Create a new, parsed `SSHConfig` from file-like object ``flo``. + + .. versionadded:: 2.7 + """ + obj = cls() + obj.parse(flo) + return obj + + def parse(self, file_obj): + """ + Read an OpenSSH config from the given file object. + + :param file_obj: a file-like object to read the config file from + """ + # Start out w/ implicit/anonymous global host-like block to hold + # anything not contained by an explicit one. + context = {"host": ["*"], "config": {}} + for line in file_obj: + # Strip any leading or trailing whitespace from the line. + # Refer to https://github.com/paramiko/paramiko/issues/499 + line = line.strip() + # Skip blanks, comments + if not line or line.startswith("#"): + continue + + # Parse line into key, value + match = re.match(self.SETTINGS_REGEX, line) + if not match: + raise ConfigParseError("Unparsable line {}".format(line)) + key = match.group(1).lower() + value = match.group(2) + + # Host keyword triggers switch to new block/context + if key in ("host", "match"): + self._config.append(context) + context = {"config": {}} + if key == "host": + # TODO 4.0: make these real objects or at least name this + # "hosts" to acknowledge it's an iterable. (Doing so prior + # to 3.0, despite it being a private API, feels bad - + # surely such an old codebase has folks actually relying on + # these keys.) + context["host"] = self._get_hosts(value) + else: + context["matches"] = self._get_matches(value) + # Special-case for noop ProxyCommands + elif key == "proxycommand" and value.lower() == "none": + # Store 'none' as None - not as a string implying that the + # proxycommand is the literal shell command "none"! + context["config"][key] = None + # All other keywords get stored, directly or via append + else: + if value.startswith('"') and value.endswith('"'): + value = value[1:-1] + + # identityfile, localforward, remoteforward keys are special + # cases, since they are allowed to be specified multiple times + # and they should be tried in order of specification. + if key in ["identityfile", "localforward", "remoteforward"]: + if key in context["config"]: + context["config"][key].append(value) + else: + context["config"][key] = [value] + elif key not in context["config"]: + context["config"][key] = value + # Store last 'open' block and we're done + self._config.append(context) + + def lookup(self, hostname): + """ + Return a dict (`SSHConfigDict`) of config options for a given hostname. + + The host-matching rules of OpenSSH's ``ssh_config`` man page are used: + For each parameter, the first obtained value will be used. The + configuration files contain sections separated by ``Host`` and/or + ``Match`` specifications, and that section is only applied for hosts + which match the given patterns or keywords + + Since the first obtained value for each parameter is used, more host- + specific declarations should be given near the beginning of the file, + and general defaults at the end. + + The keys in the returned dict are all normalized to lowercase (look for + ``"port"``, not ``"Port"``. The values are processed according to the + rules for substitution variable expansion in ``ssh_config``. + + Finally, please see the docs for `SSHConfigDict` for deeper info on + features such as optional type conversion methods, e.g.:: + + conf = my_config.lookup('myhost') + assert conf['passwordauthentication'] == 'yes' + assert conf.as_bool('passwordauthentication') is True + + .. note:: + If there is no explicitly configured ``HostName`` value, it will be + set to the being-looked-up hostname, which is as close as we can + get to OpenSSH's behavior around that particular option. + + :param str hostname: the hostname to lookup + + .. versionchanged:: 2.5 + Returns `SSHConfigDict` objects instead of dict literals. + .. versionchanged:: 2.7 + Added canonicalization support. + .. versionchanged:: 2.7 + Added ``Match`` support. + .. versionchanged:: 3.3 + Added ``Match final`` support. + """ + # First pass + options = self._lookup(hostname=hostname) + # Inject HostName if it was not set (this used to be done incidentally + # during tokenization, for some reason). + if "hostname" not in options: + options["hostname"] = hostname + # Handle canonicalization + canon = options.get("canonicalizehostname", None) in ("yes", "always") + maxdots = int(options.get("canonicalizemaxdots", 1)) + if canon and hostname.count(".") <= maxdots: + # NOTE: OpenSSH manpage does not explicitly state this, but its + # implementation for CanonicalDomains is 'split on any whitespace'. + domains = options["canonicaldomains"].split() + hostname = self.canonicalize(hostname, options, domains) + # Overwrite HostName again here (this is also what OpenSSH does) + options["hostname"] = hostname + options = self._lookup( + hostname, options, canonical=True, final=True + ) + else: + options = self._lookup( + hostname, options, canonical=False, final=True + ) + return options + + def _lookup(self, hostname, options=None, canonical=False, final=False): + # Init + if options is None: + options = SSHConfigDict() + # Iterate all stanzas, applying any that match, in turn (so that things + # like Match can reference currently understood state) + for context in self._config: + if not ( + self._pattern_matches(context.get("host", []), hostname) + or self._does_match( + context.get("matches", []), + hostname, + canonical, + final, + options, + ) + ): + continue + for key, value in context["config"].items(): + if key not in options: + # Create a copy of the original value, + # else it will reference the original list + # in self._config and update that value too + # when the extend() is being called. + options[key] = value[:] if value is not None else value + elif key == "identityfile": + options[key].extend( + x for x in value if x not in options[key] + ) + if final: + # Expand variables in resulting values + # (besides 'Match exec' which was already handled above) + options = self._expand_variables(options, hostname) + return options + + def canonicalize(self, hostname, options, domains): + """ + Return canonicalized version of ``hostname``. + + :param str hostname: Target hostname. + :param options: An `SSHConfigDict` from a previous lookup pass. + :param domains: List of domains (e.g. ``["paramiko.org"]``). + + :returns: A canonicalized hostname if one was found, else ``None``. + + .. versionadded:: 2.7 + """ + found = False + for domain in domains: + candidate = "{}.{}".format(hostname, domain) + family_specific = _addressfamily_host_lookup(candidate, options) + if family_specific is not None: + # TODO: would we want to dig deeper into other results? e.g. to + # find something that satisfies PermittedCNAMEs when that is + # implemented? + found = family_specific[0] + else: + # TODO: what does ssh use here and is there a reason to use + # that instead of gethostbyname? + try: + found = socket.gethostbyname(candidate) + except socket.gaierror: + pass + if found: + # TODO: follow CNAME (implied by found != candidate?) if + # CanonicalizePermittedCNAMEs allows it + return candidate + # If we got here, it means canonicalization failed. + # When CanonicalizeFallbackLocal is undefined or 'yes', we just spit + # back the original hostname. + if options.get("canonicalizefallbacklocal", "yes") == "yes": + return hostname + # And here, we failed AND fallback was set to a non-yes value, so we + # need to get mad. + raise CouldNotCanonicalize(hostname) + + def get_hostnames(self): + """ + Return the set of literal hostnames defined in the SSH config (both + explicit hostnames and wildcard entries). + """ + hosts = set() + for entry in self._config: + hosts.update(entry["host"]) + return hosts + + def _pattern_matches(self, patterns, target): + # Convenience auto-splitter if not already a list + if hasattr(patterns, "split"): + patterns = patterns.split(",") + match = False + for pattern in patterns: + # Short-circuit if target matches a negated pattern + if pattern.startswith("!") and fnmatch.fnmatch( + target, pattern[1:] + ): + return False + # Flag a match, but continue (in case of later negation) if regular + # match occurs + elif fnmatch.fnmatch(target, pattern): + match = True + return match + + def _does_match( + self, match_list, target_hostname, canonical, final, options + ): + matched = [] + candidates = match_list[:] + local_username = getpass.getuser() + while candidates: + candidate = candidates.pop(0) + passed = None + # Obtain latest host/user value every loop, so later Match may + # reference values assigned within a prior Match. + configured_host = options.get("hostname", None) + configured_user = options.get("user", None) + type_, param = candidate["type"], candidate["param"] + # Canonical is a hard pass/fail based on whether this is a + # canonicalized re-lookup. + if type_ == "canonical": + if self._should_fail(canonical, candidate): + return False + if type_ == "final": + passed = final + # The parse step ensures we only see this by itself or after + # canonical, so it's also an easy hard pass. (No negation here as + # that would be uh, pretty weird?) + elif type_ == "all": + return True + # From here, we are testing various non-hard criteria, + # short-circuiting only on fail + elif type_ == "host": + hostval = configured_host or target_hostname + passed = self._pattern_matches(param, hostval) + elif type_ == "originalhost": + passed = self._pattern_matches(param, target_hostname) + elif type_ == "user": + user = configured_user or local_username + passed = self._pattern_matches(param, user) + elif type_ == "localuser": + passed = self._pattern_matches(param, local_username) + elif type_ == "exec": + exec_cmd = self._tokenize( + options, target_hostname, "match-exec", param + ) + # This is the laziest spot in which we can get mad about an + # inability to import Invoke. + if invoke is None: + raise invoke_import_error + # Like OpenSSH, we 'redirect' stdout but let stderr bubble up + passed = invoke.run(exec_cmd, hide="stdout", warn=True).ok + # Tackle any 'passed, but was negated' results from above + if passed is not None and self._should_fail(passed, candidate): + return False + # Made it all the way here? Everything matched! + matched.append(candidate) + # Did anything match? (To be treated as bool, usually.) + return matched + + def _should_fail(self, would_pass, candidate): + return would_pass if candidate["negate"] else not would_pass + + def _tokenize(self, config, target_hostname, key, value): + """ + Tokenize a string based on current config/hostname data. + + :param config: Current config data. + :param target_hostname: Original target connection hostname. + :param key: Config key being tokenized (used to filter token list). + :param value: Config value being tokenized. + + :returns: The tokenized version of the input ``value`` string. + """ + allowed_tokens = self._allowed_tokens(key) + # Short-circuit if no tokenization possible + if not allowed_tokens: + return value + # Obtain potentially configured hostname, for use with %h. + # Special-case where we are tokenizing the hostname itself, to avoid + # replacing %h with a %h-bearing value, etc. + configured_hostname = target_hostname + if key != "hostname": + configured_hostname = config.get("hostname", configured_hostname) + # Ditto the rest of the source values + if "port" in config: + port = config["port"] + else: + port = SSH_PORT + user = getpass.getuser() + if "user" in config: + remoteuser = config["user"] + else: + remoteuser = user + local_hostname = socket.gethostname().split(".")[0] + local_fqdn = LazyFqdn(config, local_hostname) + homedir = os.path.expanduser("~") + tohash = local_hostname + target_hostname + repr(port) + remoteuser + # The actual tokens! + replacements = { + # TODO: %%??? + "%C": sha1(tohash.encode()).hexdigest(), + "%d": homedir, + "%h": configured_hostname, + # TODO: %i? + "%L": local_hostname, + "%l": local_fqdn, + # also this is pseudo buggy when not in Match exec mode so document + # that. also WHY is that the case?? don't we do all of this late? + "%n": target_hostname, + "%p": port, + "%r": remoteuser, + # TODO: %T? don't believe this is possible however + "%u": user, + "~": homedir, + } + # Do the thing with the stuff + tokenized = value + for find, replace in replacements.items(): + if find not in allowed_tokens: + continue + tokenized = tokenized.replace(find, str(replace)) + # TODO: log? eg that value -> tokenized + return tokenized + + def _allowed_tokens(self, key): + """ + Given config ``key``, return list of token strings to tokenize. + + .. note:: + This feels like it wants to eventually go away, but is used to + preserve as-strict-as-possible compatibility with OpenSSH, which + for whatever reason only applies some tokens to some config keys. + """ + return self.TOKENS_BY_CONFIG_KEY.get(key, []) + + def _expand_variables(self, config, target_hostname): + """ + Return a dict of config options with expanded substitutions + for a given original & current target hostname. + + Please refer to :doc:`/api/config` for details. + + :param dict config: the currently parsed config + :param str hostname: the hostname whose config is being looked up + """ + for k in config: + if config[k] is None: + continue + tokenizer = partial(self._tokenize, config, target_hostname, k) + if isinstance(config[k], list): + for i, value in enumerate(config[k]): + config[k][i] = tokenizer(value) + else: + config[k] = tokenizer(config[k]) + return config + + def _get_hosts(self, host): + """ + Return a list of host_names from host value. + """ + try: + return shlex.split(host) + except ValueError: + raise ConfigParseError("Unparsable host {}".format(host)) + + def _get_matches(self, match): + """ + Parse a specific Match config line into a list-of-dicts for its values. + + Performs some parse-time validation as well. + """ + matches = [] + tokens = shlex.split(match) + while tokens: + match = {"type": None, "param": None, "negate": False} + type_ = tokens.pop(0) + # Handle per-keyword negation + if type_.startswith("!"): + match["negate"] = True + type_ = type_[1:] + match["type"] = type_ + # all/canonical have no params (everything else does) + if type_ in ("all", "canonical", "final"): + matches.append(match) + continue + if not tokens: + raise ConfigParseError( + "Missing parameter to Match '{}' keyword".format(type_) + ) + match["param"] = tokens.pop(0) + matches.append(match) + # Perform some (easier to do now than in the middle) validation that is + # better handled here than at lookup time. + keywords = [x["type"] for x in matches] + if "all" in keywords: + allowable = ("all", "canonical") + ok, bad = ( + list(filter(lambda x: x in allowable, keywords)), + list(filter(lambda x: x not in allowable, keywords)), + ) + err = None + if any(bad): + err = "Match does not allow 'all' mixed with anything but 'canonical'" # noqa + elif "canonical" in ok and ok.index("canonical") > ok.index("all"): + err = "Match does not allow 'all' before 'canonical'" + if err is not None: + raise ConfigParseError(err) + return matches + + +def _addressfamily_host_lookup(hostname, options): + """ + Try looking up ``hostname`` in an IPv4 or IPv6 specific manner. + + This is an odd duck due to needing use in two divergent use cases. It looks + up ``AddressFamily`` in ``options`` and if it is ``inet`` or ``inet6``, + this function uses `socket.getaddrinfo` to perform a family-specific + lookup, returning the result if successful. + + In any other situation -- lookup failure, or ``AddressFamily`` being + unspecified or ``any`` -- ``None`` is returned instead and the caller is + expected to do something situation-appropriate like calling + `socket.gethostbyname`. + + :param str hostname: Hostname to look up. + :param options: `SSHConfigDict` instance w/ parsed options. + :returns: ``getaddrinfo``-style tuples, or ``None``, depending. + """ + address_family = options.get("addressfamily", "any").lower() + if address_family == "any": + return + try: + family = socket.AF_INET6 + if address_family == "inet": + family = socket.AF_INET + return socket.getaddrinfo( + hostname, + None, + family, + socket.SOCK_DGRAM, + socket.IPPROTO_IP, + socket.AI_CANONNAME, + ) + except socket.gaierror: + pass + + +class LazyFqdn: + """ + Returns the host's fqdn on request as string. + """ + + def __init__(self, config, host=None): + self.fqdn = None + self.config = config + self.host = host + + def __str__(self): + if self.fqdn is None: + # + # If the SSH config contains AddressFamily, use that when + # determining the local host's FQDN. Using socket.getfqdn() from + # the standard library is the most general solution, but can + # result in noticeable delays on some platforms when IPv6 is + # misconfigured or not available, as it calls getaddrinfo with no + # address family specified, so both IPv4 and IPv6 are checked. + # + + # Handle specific option + fqdn = None + results = _addressfamily_host_lookup(self.host, self.config) + if results is not None: + for res in results: + af, socktype, proto, canonname, sa = res + if canonname and "." in canonname: + fqdn = canonname + break + # Handle 'any' / unspecified / lookup failure + if fqdn is None: + fqdn = socket.getfqdn() + # Cache + self.fqdn = fqdn + return self.fqdn + + +class SSHConfigDict(dict): + """ + A dictionary wrapper/subclass for per-host configuration structures. + + This class introduces some usage niceties for consumers of `SSHConfig`, + specifically around the issue of variable type conversions: normal value + access yields strings, but there are now methods such as `as_bool` and + `as_int` that yield casted values instead. + + For example, given the following ``ssh_config`` file snippet:: + + Host foo.example.com + PasswordAuthentication no + Compression yes + ServerAliveInterval 60 + + the following code highlights how you can access the raw strings as well as + usefully Python type-casted versions (recalling that keys are all + normalized to lowercase first):: + + my_config = SSHConfig() + my_config.parse(open('~/.ssh/config')) + conf = my_config.lookup('foo.example.com') + + assert conf['passwordauthentication'] == 'no' + assert conf.as_bool('passwordauthentication') is False + assert conf['compression'] == 'yes' + assert conf.as_bool('compression') is True + assert conf['serveraliveinterval'] == '60' + assert conf.as_int('serveraliveinterval') == 60 + + .. versionadded:: 2.5 + """ + + def as_bool(self, key): + """ + Express given key's value as a boolean type. + + Typically, this is used for ``ssh_config``'s pseudo-boolean values + which are either ``"yes"`` or ``"no"``. In such cases, ``"yes"`` yields + ``True`` and any other value becomes ``False``. + + .. note:: + If (for whatever reason) the stored value is already boolean in + nature, it's simply returned. + + .. versionadded:: 2.5 + """ + val = self[key] + if isinstance(val, bool): + return val + return val.lower() == "yes" + + def as_int(self, key): + """ + Express given key's value as an integer, if possible. + + This method will raise ``ValueError`` or similar if the value is not + int-appropriate, same as the builtin `int` type. + + .. versionadded:: 2.5 + """ + return int(self[key]) diff --git a/venv/lib/python3.12/site-packages/paramiko/dsskey.py b/venv/lib/python3.12/site-packages/paramiko/dsskey.py new file mode 100644 index 0000000..5215d28 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/dsskey.py @@ -0,0 +1,258 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +DSS keys. +""" + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import dsa +from cryptography.hazmat.primitives.asymmetric.utils import ( + decode_dss_signature, + encode_dss_signature, +) + +from paramiko import util +from paramiko.common import zero_byte +from paramiko.ssh_exception import SSHException +from paramiko.message import Message +from paramiko.ber import BER, BERException +from paramiko.pkey import PKey + + +class DSSKey(PKey): + """ + Representation of a DSS key which can be used to sign an verify SSH2 + data. + """ + + name = "ssh-dss" + + def __init__( + self, + msg=None, + data=None, + filename=None, + password=None, + vals=None, + file_obj=None, + ): + self.p = None + self.q = None + self.g = None + self.y = None + self.x = None + self.public_blob = None + if file_obj is not None: + self._from_private_key(file_obj, password) + return + if filename is not None: + self._from_private_key_file(filename, password) + return + if (msg is None) and (data is not None): + msg = Message(data) + if vals is not None: + self.p, self.q, self.g, self.y = vals + else: + self._check_type_and_load_cert( + msg=msg, + key_type=self.name, + cert_type=f"{self.name}-cert-v01@openssh.com", + ) + self.p = msg.get_mpint() + self.q = msg.get_mpint() + self.g = msg.get_mpint() + self.y = msg.get_mpint() + self.size = util.bit_length(self.p) + + def asbytes(self): + m = Message() + m.add_string(self.name) + m.add_mpint(self.p) + m.add_mpint(self.q) + m.add_mpint(self.g) + m.add_mpint(self.y) + return m.asbytes() + + def __str__(self): + return self.asbytes() + + @property + def _fields(self): + return (self.get_name(), self.p, self.q, self.g, self.y) + + # TODO 4.0: remove + def get_name(self): + return self.name + + def get_bits(self): + return self.size + + def can_sign(self): + return self.x is not None + + def sign_ssh_data(self, data, algorithm=None): + key = dsa.DSAPrivateNumbers( + x=self.x, + public_numbers=dsa.DSAPublicNumbers( + y=self.y, + parameter_numbers=dsa.DSAParameterNumbers( + p=self.p, q=self.q, g=self.g + ), + ), + ).private_key(backend=default_backend()) + sig = key.sign(data, hashes.SHA1()) + r, s = decode_dss_signature(sig) + + m = Message() + m.add_string(self.name) + # apparently, in rare cases, r or s may be shorter than 20 bytes! + rstr = util.deflate_long(r, 0) + sstr = util.deflate_long(s, 0) + if len(rstr) < 20: + rstr = zero_byte * (20 - len(rstr)) + rstr + if len(sstr) < 20: + sstr = zero_byte * (20 - len(sstr)) + sstr + m.add_string(rstr + sstr) + return m + + def verify_ssh_sig(self, data, msg): + if len(msg.asbytes()) == 40: + # spies.com bug: signature has no header + sig = msg.asbytes() + else: + kind = msg.get_text() + if kind != self.name: + return 0 + sig = msg.get_binary() + + # pull out (r, s) which are NOT encoded as mpints + sigR = util.inflate_long(sig[:20], 1) + sigS = util.inflate_long(sig[20:], 1) + + signature = encode_dss_signature(sigR, sigS) + + key = dsa.DSAPublicNumbers( + y=self.y, + parameter_numbers=dsa.DSAParameterNumbers( + p=self.p, q=self.q, g=self.g + ), + ).public_key(backend=default_backend()) + try: + key.verify(signature, data, hashes.SHA1()) + except InvalidSignature: + return False + else: + return True + + def write_private_key_file(self, filename, password=None): + key = dsa.DSAPrivateNumbers( + x=self.x, + public_numbers=dsa.DSAPublicNumbers( + y=self.y, + parameter_numbers=dsa.DSAParameterNumbers( + p=self.p, q=self.q, g=self.g + ), + ), + ).private_key(backend=default_backend()) + + self._write_private_key_file( + filename, + key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + def write_private_key(self, file_obj, password=None): + key = dsa.DSAPrivateNumbers( + x=self.x, + public_numbers=dsa.DSAPublicNumbers( + y=self.y, + parameter_numbers=dsa.DSAParameterNumbers( + p=self.p, q=self.q, g=self.g + ), + ), + ).private_key(backend=default_backend()) + + self._write_private_key( + file_obj, + key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + @staticmethod + def generate(bits=1024, progress_func=None): + """ + Generate a new private DSS key. This factory function can be used to + generate a new host key or authentication key. + + :param int bits: number of bits the generated key should be. + :param progress_func: Unused + :return: new `.DSSKey` private key + """ + numbers = dsa.generate_private_key( + bits, backend=default_backend() + ).private_numbers() + key = DSSKey( + vals=( + numbers.public_numbers.parameter_numbers.p, + numbers.public_numbers.parameter_numbers.q, + numbers.public_numbers.parameter_numbers.g, + numbers.public_numbers.y, + ) + ) + key.x = numbers.x + return key + + # ...internals... + + def _from_private_key_file(self, filename, password): + data = self._read_private_key_file("DSA", filename, password) + self._decode_key(data) + + def _from_private_key(self, file_obj, password): + data = self._read_private_key("DSA", file_obj, password) + self._decode_key(data) + + def _decode_key(self, data): + pkformat, data = data + # private key file contains: + # DSAPrivateKey = { version = 0, p, q, g, y, x } + if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL: + try: + keylist = BER(data).decode() + except BERException as e: + raise SSHException("Unable to parse key file: {}".format(e)) + elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH: + keylist = self._uint32_cstruct_unpack(data, "iiiii") + keylist = [0] + list(keylist) + else: + self._got_bad_key_format_id(pkformat) + if type(keylist) is not list or len(keylist) < 6 or keylist[0] != 0: + raise SSHException( + "not a valid DSA private key file (bad ber encoding)" + ) + self.p = keylist[1] + self.q = keylist[2] + self.g = keylist[3] + self.y = keylist[4] + self.x = keylist[5] + self.size = util.bit_length(self.p) diff --git a/venv/lib/python3.12/site-packages/paramiko/ecdsakey.py b/venv/lib/python3.12/site-packages/paramiko/ecdsakey.py new file mode 100644 index 0000000..6fd95fa --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/ecdsakey.py @@ -0,0 +1,339 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +ECDSA keys +""" + +from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.asymmetric.utils import ( + decode_dss_signature, + encode_dss_signature, +) + +from paramiko.common import four_byte +from paramiko.message import Message +from paramiko.pkey import PKey +from paramiko.ssh_exception import SSHException +from paramiko.util import deflate_long + + +class _ECDSACurve: + """ + Represents a specific ECDSA Curve (nistp256, nistp384, etc). + + Handles the generation of the key format identifier and the selection of + the proper hash function. Also grabs the proper curve from the 'ecdsa' + package. + """ + + def __init__(self, curve_class, nist_name): + self.nist_name = nist_name + self.key_length = curve_class.key_size + + # Defined in RFC 5656 6.2 + self.key_format_identifier = "ecdsa-sha2-" + self.nist_name + + # Defined in RFC 5656 6.2.1 + if self.key_length <= 256: + self.hash_object = hashes.SHA256 + elif self.key_length <= 384: + self.hash_object = hashes.SHA384 + else: + self.hash_object = hashes.SHA512 + + self.curve_class = curve_class + + +class _ECDSACurveSet: + """ + A collection to hold the ECDSA curves. Allows querying by oid and by key + format identifier. The two ways in which ECDSAKey needs to be able to look + up curves. + """ + + def __init__(self, ecdsa_curves): + self.ecdsa_curves = ecdsa_curves + + def get_key_format_identifier_list(self): + return [curve.key_format_identifier for curve in self.ecdsa_curves] + + def get_by_curve_class(self, curve_class): + for curve in self.ecdsa_curves: + if curve.curve_class == curve_class: + return curve + + def get_by_key_format_identifier(self, key_format_identifier): + for curve in self.ecdsa_curves: + if curve.key_format_identifier == key_format_identifier: + return curve + + def get_by_key_length(self, key_length): + for curve in self.ecdsa_curves: + if curve.key_length == key_length: + return curve + + +class ECDSAKey(PKey): + """ + Representation of an ECDSA key which can be used to sign and verify SSH2 + data. + """ + + _ECDSA_CURVES = _ECDSACurveSet( + [ + _ECDSACurve(ec.SECP256R1, "nistp256"), + _ECDSACurve(ec.SECP384R1, "nistp384"), + _ECDSACurve(ec.SECP521R1, "nistp521"), + ] + ) + + def __init__( + self, + msg=None, + data=None, + filename=None, + password=None, + vals=None, + file_obj=None, + # TODO 4.0: remove; it does nothing since porting to cryptography.io + validate_point=True, + ): + self.verifying_key = None + self.signing_key = None + self.public_blob = None + if file_obj is not None: + self._from_private_key(file_obj, password) + return + if filename is not None: + self._from_private_key_file(filename, password) + return + if (msg is None) and (data is not None): + msg = Message(data) + if vals is not None: + self.signing_key, self.verifying_key = vals + c_class = self.signing_key.curve.__class__ + self.ecdsa_curve = self._ECDSA_CURVES.get_by_curve_class(c_class) + else: + # Must set ecdsa_curve first; subroutines called herein may need to + # spit out our get_name(), which relies on this. + key_type = msg.get_text() + # But this also means we need to hand it a real key/curve + # identifier, so strip out any cert business. (NOTE: could push + # that into _ECDSACurveSet.get_by_key_format_identifier(), but it + # feels more correct to do it here?) + suffix = "-cert-v01@openssh.com" + if key_type.endswith(suffix): + key_type = key_type[: -len(suffix)] + self.ecdsa_curve = self._ECDSA_CURVES.get_by_key_format_identifier( + key_type + ) + key_types = self._ECDSA_CURVES.get_key_format_identifier_list() + cert_types = [ + "{}-cert-v01@openssh.com".format(x) for x in key_types + ] + self._check_type_and_load_cert( + msg=msg, key_type=key_types, cert_type=cert_types + ) + curvename = msg.get_text() + if curvename != self.ecdsa_curve.nist_name: + raise SSHException( + "Can't handle curve of type {}".format(curvename) + ) + + pointinfo = msg.get_binary() + try: + key = ec.EllipticCurvePublicKey.from_encoded_point( + self.ecdsa_curve.curve_class(), pointinfo + ) + self.verifying_key = key + except ValueError: + raise SSHException("Invalid public key") + + @classmethod + def identifiers(cls): + return cls._ECDSA_CURVES.get_key_format_identifier_list() + + # TODO 4.0: deprecate/remove + @classmethod + def supported_key_format_identifiers(cls): + return cls.identifiers() + + def asbytes(self): + key = self.verifying_key + m = Message() + m.add_string(self.ecdsa_curve.key_format_identifier) + m.add_string(self.ecdsa_curve.nist_name) + + numbers = key.public_numbers() + + key_size_bytes = (key.curve.key_size + 7) // 8 + + x_bytes = deflate_long(numbers.x, add_sign_padding=False) + x_bytes = b"\x00" * (key_size_bytes - len(x_bytes)) + x_bytes + + y_bytes = deflate_long(numbers.y, add_sign_padding=False) + y_bytes = b"\x00" * (key_size_bytes - len(y_bytes)) + y_bytes + + point_str = four_byte + x_bytes + y_bytes + m.add_string(point_str) + return m.asbytes() + + def __str__(self): + return self.asbytes() + + @property + def _fields(self): + return ( + self.get_name(), + self.verifying_key.public_numbers().x, + self.verifying_key.public_numbers().y, + ) + + def get_name(self): + return self.ecdsa_curve.key_format_identifier + + def get_bits(self): + return self.ecdsa_curve.key_length + + def can_sign(self): + return self.signing_key is not None + + def sign_ssh_data(self, data, algorithm=None): + ecdsa = ec.ECDSA(self.ecdsa_curve.hash_object()) + sig = self.signing_key.sign(data, ecdsa) + r, s = decode_dss_signature(sig) + + m = Message() + m.add_string(self.ecdsa_curve.key_format_identifier) + m.add_string(self._sigencode(r, s)) + return m + + def verify_ssh_sig(self, data, msg): + if msg.get_text() != self.ecdsa_curve.key_format_identifier: + return False + sig = msg.get_binary() + sigR, sigS = self._sigdecode(sig) + signature = encode_dss_signature(sigR, sigS) + + try: + self.verifying_key.verify( + signature, data, ec.ECDSA(self.ecdsa_curve.hash_object()) + ) + except InvalidSignature: + return False + else: + return True + + def write_private_key_file(self, filename, password=None): + self._write_private_key_file( + filename, + self.signing_key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + def write_private_key(self, file_obj, password=None): + self._write_private_key( + file_obj, + self.signing_key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + @classmethod + def generate(cls, curve=ec.SECP256R1(), progress_func=None, bits=None): + """ + Generate a new private ECDSA key. This factory function can be used to + generate a new host key or authentication key. + + :param progress_func: Not used for this type of key. + :returns: A new private key (`.ECDSAKey`) object + """ + if bits is not None: + curve = cls._ECDSA_CURVES.get_by_key_length(bits) + if curve is None: + raise ValueError("Unsupported key length: {:d}".format(bits)) + curve = curve.curve_class() + + private_key = ec.generate_private_key(curve, backend=default_backend()) + return ECDSAKey(vals=(private_key, private_key.public_key())) + + # ...internals... + + def _from_private_key_file(self, filename, password): + data = self._read_private_key_file("EC", filename, password) + self._decode_key(data) + + def _from_private_key(self, file_obj, password): + data = self._read_private_key("EC", file_obj, password) + self._decode_key(data) + + def _decode_key(self, data): + pkformat, data = data + if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL: + try: + key = serialization.load_der_private_key( + data, password=None, backend=default_backend() + ) + except ( + ValueError, + AssertionError, + TypeError, + UnsupportedAlgorithm, + ) as e: + raise SSHException(str(e)) + elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH: + try: + msg = Message(data) + curve_name = msg.get_text() + verkey = msg.get_binary() # noqa: F841 + sigkey = msg.get_mpint() + name = "ecdsa-sha2-" + curve_name + curve = self._ECDSA_CURVES.get_by_key_format_identifier(name) + if not curve: + raise SSHException("Invalid key curve identifier") + key = ec.derive_private_key( + sigkey, curve.curve_class(), default_backend() + ) + except Exception as e: + # PKey._read_private_key_openssh() should check or return + # keytype - parsing could fail for any reason due to wrong type + raise SSHException(str(e)) + else: + self._got_bad_key_format_id(pkformat) + + self.signing_key = key + self.verifying_key = key.public_key() + curve_class = key.curve.__class__ + self.ecdsa_curve = self._ECDSA_CURVES.get_by_curve_class(curve_class) + + def _sigencode(self, r, s): + msg = Message() + msg.add_mpint(r) + msg.add_mpint(s) + return msg.asbytes() + + def _sigdecode(self, sig): + msg = Message(sig) + r = msg.get_mpint() + s = msg.get_mpint() + return r, s diff --git a/venv/lib/python3.12/site-packages/paramiko/ed25519key.py b/venv/lib/python3.12/site-packages/paramiko/ed25519key.py new file mode 100644 index 0000000..e5e81ac --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/ed25519key.py @@ -0,0 +1,212 @@ +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import bcrypt + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher + +import nacl.signing + +from paramiko.message import Message +from paramiko.pkey import PKey, OPENSSH_AUTH_MAGIC, _unpad_openssh +from paramiko.util import b +from paramiko.ssh_exception import SSHException, PasswordRequiredException + + +class Ed25519Key(PKey): + """ + Representation of an `Ed25519 `_ key. + + .. note:: + Ed25519 key support was added to OpenSSH in version 6.5. + + .. versionadded:: 2.2 + .. versionchanged:: 2.3 + Added a ``file_obj`` parameter to match other key classes. + """ + + name = "ssh-ed25519" + + def __init__( + self, msg=None, data=None, filename=None, password=None, file_obj=None + ): + self.public_blob = None + verifying_key = signing_key = None + if msg is None and data is not None: + msg = Message(data) + if msg is not None: + self._check_type_and_load_cert( + msg=msg, + key_type=self.name, + cert_type="ssh-ed25519-cert-v01@openssh.com", + ) + verifying_key = nacl.signing.VerifyKey(msg.get_binary()) + elif filename is not None: + with open(filename, "r") as f: + pkformat, data = self._read_private_key("OPENSSH", f) + elif file_obj is not None: + pkformat, data = self._read_private_key("OPENSSH", file_obj) + + if filename or file_obj: + signing_key = self._parse_signing_key_data(data, password) + + if signing_key is None and verifying_key is None: + raise ValueError("need a key") + + self._signing_key = signing_key + self._verifying_key = verifying_key + + def _parse_signing_key_data(self, data, password): + from paramiko.transport import Transport + + # We may eventually want this to be usable for other key types, as + # OpenSSH moves to it, but for now this is just for Ed25519 keys. + # This format is described here: + # https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key + # The description isn't totally complete, and I had to refer to the + # source for a full implementation. + message = Message(data) + if message.get_bytes(len(OPENSSH_AUTH_MAGIC)) != OPENSSH_AUTH_MAGIC: + raise SSHException("Invalid key") + + ciphername = message.get_text() + kdfname = message.get_text() + kdfoptions = message.get_binary() + num_keys = message.get_int() + + if kdfname == "none": + # kdfname of "none" must have an empty kdfoptions, the ciphername + # must be "none" + if kdfoptions or ciphername != "none": + raise SSHException("Invalid key") + elif kdfname == "bcrypt": + if not password: + raise PasswordRequiredException( + "Private key file is encrypted" + ) + kdf = Message(kdfoptions) + bcrypt_salt = kdf.get_binary() + bcrypt_rounds = kdf.get_int() + else: + raise SSHException("Invalid key") + + if ciphername != "none" and ciphername not in Transport._cipher_info: + raise SSHException("Invalid key") + + public_keys = [] + for _ in range(num_keys): + pubkey = Message(message.get_binary()) + if pubkey.get_text() != self.name: + raise SSHException("Invalid key") + public_keys.append(pubkey.get_binary()) + + private_ciphertext = message.get_binary() + if ciphername == "none": + private_data = private_ciphertext + else: + cipher = Transport._cipher_info[ciphername] + key = bcrypt.kdf( + password=b(password), + salt=bcrypt_salt, + desired_key_bytes=cipher["key-size"] + cipher["block-size"], + rounds=bcrypt_rounds, + # We can't control how many rounds are on disk, so no sense + # warning about it. + ignore_few_rounds=True, + ) + decryptor = Cipher( + cipher["class"](key[: cipher["key-size"]]), + cipher["mode"](key[cipher["key-size"] :]), + backend=default_backend(), + ).decryptor() + private_data = ( + decryptor.update(private_ciphertext) + decryptor.finalize() + ) + + message = Message(_unpad_openssh(private_data)) + if message.get_int() != message.get_int(): + raise SSHException("Invalid key") + + signing_keys = [] + for i in range(num_keys): + if message.get_text() != self.name: + raise SSHException("Invalid key") + # A copy of the public key, again, ignore. + public = message.get_binary() + key_data = message.get_binary() + # The second half of the key data is yet another copy of the public + # key... + signing_key = nacl.signing.SigningKey(key_data[:32]) + # Verify that all the public keys are the same... + assert ( + signing_key.verify_key.encode() + == public + == public_keys[i] + == key_data[32:] + ) + signing_keys.append(signing_key) + # Comment, ignore. + message.get_binary() + + if len(signing_keys) != 1: + raise SSHException("Invalid key") + return signing_keys[0] + + def asbytes(self): + if self.can_sign(): + v = self._signing_key.verify_key + else: + v = self._verifying_key + m = Message() + m.add_string(self.name) + m.add_string(v.encode()) + return m.asbytes() + + @property + def _fields(self): + if self.can_sign(): + v = self._signing_key.verify_key + else: + v = self._verifying_key + return (self.get_name(), v) + + # TODO 4.0: remove + def get_name(self): + return self.name + + def get_bits(self): + return 256 + + def can_sign(self): + return self._signing_key is not None + + def sign_ssh_data(self, data, algorithm=None): + m = Message() + m.add_string(self.name) + m.add_string(self._signing_key.sign(data).signature) + return m + + def verify_ssh_sig(self, data, msg): + if msg.get_text() != self.name: + return False + + try: + self._verifying_key.verify(data, msg.get_binary()) + except nacl.exceptions.BadSignatureError: + return False + else: + return True diff --git a/venv/lib/python3.12/site-packages/paramiko/file.py b/venv/lib/python3.12/site-packages/paramiko/file.py new file mode 100644 index 0000000..a36abb9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/file.py @@ -0,0 +1,528 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +from io import BytesIO + +from paramiko.common import ( + linefeed_byte_value, + crlf, + cr_byte, + linefeed_byte, + cr_byte_value, +) + +from paramiko.util import ClosingContextManager, u + + +class BufferedFile(ClosingContextManager): + """ + Reusable base class to implement Python-style file buffering around a + simpler stream. + """ + + _DEFAULT_BUFSIZE = 8192 + + SEEK_SET = 0 + SEEK_CUR = 1 + SEEK_END = 2 + + FLAG_READ = 0x1 + FLAG_WRITE = 0x2 + FLAG_APPEND = 0x4 + FLAG_BINARY = 0x10 + FLAG_BUFFERED = 0x20 + FLAG_LINE_BUFFERED = 0x40 + FLAG_UNIVERSAL_NEWLINE = 0x80 + + def __init__(self): + self.newlines = None + self._flags = 0 + self._bufsize = self._DEFAULT_BUFSIZE + self._wbuffer = BytesIO() + self._rbuffer = bytes() + self._at_trailing_cr = False + self._closed = False + # pos - position within the file, according to the user + # realpos - position according the OS + # (these may be different because we buffer for line reading) + self._pos = self._realpos = 0 + # size only matters for seekable files + self._size = 0 + + def __del__(self): + self.close() + + def __iter__(self): + """ + Returns an iterator that can be used to iterate over the lines in this + file. This iterator happens to return the file itself, since a file is + its own iterator. + + :raises: ``ValueError`` -- if the file is closed. + """ + if self._closed: + raise ValueError("I/O operation on closed file") + return self + + def close(self): + """ + Close the file. Future read and write operations will fail. + """ + self.flush() + self._closed = True + + def flush(self): + """ + Write out any data in the write buffer. This may do nothing if write + buffering is not turned on. + """ + self._write_all(self._wbuffer.getvalue()) + self._wbuffer = BytesIO() + return + + def __next__(self): + """ + Returns the next line from the input, or raises ``StopIteration`` + when EOF is hit. Unlike python file objects, it's okay to mix + calls to `.next` and `.readline`. + + :raises: ``StopIteration`` -- when the end of the file is reached. + + :returns: + a line (`str`, or `bytes` if the file was opened in binary mode) + read from the file. + """ + line = self.readline() + if not line: + raise StopIteration + return line + + def readable(self): + """ + Check if the file can be read from. + + :returns: + `True` if the file can be read from. If `False`, `read` will raise + an exception. + """ + return (self._flags & self.FLAG_READ) == self.FLAG_READ + + def writable(self): + """ + Check if the file can be written to. + + :returns: + `True` if the file can be written to. If `False`, `write` will + raise an exception. + """ + return (self._flags & self.FLAG_WRITE) == self.FLAG_WRITE + + def seekable(self): + """ + Check if the file supports random access. + + :returns: + `True` if the file supports random access. If `False`, `seek` will + raise an exception. + """ + return False + + def readinto(self, buff): + """ + Read up to ``len(buff)`` bytes into ``bytearray`` *buff* and return the + number of bytes read. + + :returns: + The number of bytes read. + """ + data = self.read(len(buff)) + buff[: len(data)] = data + return len(data) + + def read(self, size=None): + """ + Read at most ``size`` bytes from the file (less if we hit the end of + the file first). If the ``size`` argument is negative or omitted, + read all the remaining data in the file. + + .. note:: + ``'b'`` mode flag is ignored (``self.FLAG_BINARY`` in + ``self._flags``), because SSH treats all files as binary, since we + have no idea what encoding the file is in, or even if the file is + text data. + + :param int size: maximum number of bytes to read + :returns: + data read from the file (as bytes), or an empty string if EOF was + encountered immediately + """ + if self._closed: + raise IOError("File is closed") + if not (self._flags & self.FLAG_READ): + raise IOError("File is not open for reading") + if (size is None) or (size < 0): + # go for broke + result = bytearray(self._rbuffer) + self._rbuffer = bytes() + self._pos += len(result) + while True: + try: + new_data = self._read(self._DEFAULT_BUFSIZE) + except EOFError: + new_data = None + if (new_data is None) or (len(new_data) == 0): + break + result.extend(new_data) + self._realpos += len(new_data) + self._pos += len(new_data) + return bytes(result) + if size <= len(self._rbuffer): + result = self._rbuffer[:size] + self._rbuffer = self._rbuffer[size:] + self._pos += len(result) + return result + while len(self._rbuffer) < size: + read_size = size - len(self._rbuffer) + if self._flags & self.FLAG_BUFFERED: + read_size = max(self._bufsize, read_size) + try: + new_data = self._read(read_size) + except EOFError: + new_data = None + if (new_data is None) or (len(new_data) == 0): + break + self._rbuffer += new_data + self._realpos += len(new_data) + result = self._rbuffer[:size] + self._rbuffer = self._rbuffer[size:] + self._pos += len(result) + return result + + def readline(self, size=None): + """ + Read one entire line from the file. A trailing newline character is + kept in the string (but may be absent when a file ends with an + incomplete line). If the size argument is present and non-negative, it + is a maximum byte count (including the trailing newline) and an + incomplete line may be returned. An empty string is returned only when + EOF is encountered immediately. + + .. note:: + Unlike stdio's ``fgets``, the returned string contains null + characters (``'\\0'``) if they occurred in the input. + + :param int size: maximum length of returned string. + :returns: + next line of the file, or an empty string if the end of the + file has been reached. + + If the file was opened in binary (``'b'``) mode: bytes are returned + Else: the encoding of the file is assumed to be UTF-8 and character + strings (`str`) are returned + """ + # it's almost silly how complex this function is. + if self._closed: + raise IOError("File is closed") + if not (self._flags & self.FLAG_READ): + raise IOError("File not open for reading") + line = self._rbuffer + truncated = False + while True: + if ( + self._at_trailing_cr + and self._flags & self.FLAG_UNIVERSAL_NEWLINE + and len(line) > 0 + ): + # edge case: the newline may be '\r\n' and we may have read + # only the first '\r' last time. + if line[0] == linefeed_byte_value: + line = line[1:] + self._record_newline(crlf) + else: + self._record_newline(cr_byte) + self._at_trailing_cr = False + # check size before looking for a linefeed, in case we already have + # enough. + if (size is not None) and (size >= 0): + if len(line) >= size: + # truncate line + self._rbuffer = line[size:] + line = line[:size] + truncated = True + break + n = size - len(line) + else: + n = self._bufsize + if linefeed_byte in line or ( + self._flags & self.FLAG_UNIVERSAL_NEWLINE and cr_byte in line + ): + break + try: + new_data = self._read(n) + except EOFError: + new_data = None + if (new_data is None) or (len(new_data) == 0): + self._rbuffer = bytes() + self._pos += len(line) + return line if self._flags & self.FLAG_BINARY else u(line) + line += new_data + self._realpos += len(new_data) + # find the newline + pos = line.find(linefeed_byte) + if self._flags & self.FLAG_UNIVERSAL_NEWLINE: + rpos = line.find(cr_byte) + if (rpos >= 0) and (rpos < pos or pos < 0): + pos = rpos + if pos == -1: + # we couldn't find a newline in the truncated string, return it + self._pos += len(line) + return line if self._flags & self.FLAG_BINARY else u(line) + xpos = pos + 1 + if ( + line[pos] == cr_byte_value + and xpos < len(line) + and line[xpos] == linefeed_byte_value + ): + xpos += 1 + # if the string was truncated, _rbuffer needs to have the string after + # the newline character plus the truncated part of the line we stored + # earlier in _rbuffer + if truncated: + self._rbuffer = line[xpos:] + self._rbuffer + else: + self._rbuffer = line[xpos:] + + lf = line[pos:xpos] + line = line[:pos] + linefeed_byte + if (len(self._rbuffer) == 0) and (lf == cr_byte): + # we could read the line up to a '\r' and there could still be a + # '\n' following that we read next time. note that and eat it. + self._at_trailing_cr = True + else: + self._record_newline(lf) + self._pos += len(line) + return line if self._flags & self.FLAG_BINARY else u(line) + + def readlines(self, sizehint=None): + """ + Read all remaining lines using `readline` and return them as a list. + If the optional ``sizehint`` argument is present, instead of reading up + to EOF, whole lines totalling approximately sizehint bytes (possibly + after rounding up to an internal buffer size) are read. + + :param int sizehint: desired maximum number of bytes to read. + :returns: list of lines read from the file. + """ + lines = [] + byte_count = 0 + while True: + line = self.readline() + if len(line) == 0: + break + lines.append(line) + byte_count += len(line) + if (sizehint is not None) and (byte_count >= sizehint): + break + return lines + + def seek(self, offset, whence=0): + """ + Set the file's current position, like stdio's ``fseek``. Not all file + objects support seeking. + + .. note:: + If a file is opened in append mode (``'a'`` or ``'a+'``), any seek + operations will be undone at the next write (as the file position + will move back to the end of the file). + + :param int offset: + position to move to within the file, relative to ``whence``. + :param int whence: + type of movement: 0 = absolute; 1 = relative to the current + position; 2 = relative to the end of the file. + + :raises: ``IOError`` -- if the file doesn't support random access. + """ + raise IOError("File does not support seeking.") + + def tell(self): + """ + Return the file's current position. This may not be accurate or + useful if the underlying file doesn't support random access, or was + opened in append mode. + + :returns: file position (`number ` of bytes). + """ + return self._pos + + def write(self, data): + """ + Write data to the file. If write buffering is on (``bufsize`` was + specified and non-zero), some or all of the data may not actually be + written yet. (Use `flush` or `close` to force buffered data to be + written out.) + + :param data: ``str``/``bytes`` data to write + """ + if isinstance(data, str): + # Accept text and encode as utf-8 for compatibility only. + data = data.encode("utf-8") + if self._closed: + raise IOError("File is closed") + if not (self._flags & self.FLAG_WRITE): + raise IOError("File not open for writing") + if not (self._flags & self.FLAG_BUFFERED): + self._write_all(data) + return + self._wbuffer.write(data) + if self._flags & self.FLAG_LINE_BUFFERED: + # only scan the new data for linefeed, to avoid wasting time. + last_newline_pos = data.rfind(linefeed_byte) + if last_newline_pos >= 0: + wbuf = self._wbuffer.getvalue() + last_newline_pos += len(wbuf) - len(data) + self._write_all(wbuf[: last_newline_pos + 1]) + self._wbuffer = BytesIO() + self._wbuffer.write(wbuf[last_newline_pos + 1 :]) + return + # even if we're line buffering, if the buffer has grown past the + # buffer size, force a flush. + if self._wbuffer.tell() >= self._bufsize: + self.flush() + return + + def writelines(self, sequence): + """ + Write a sequence of strings to the file. The sequence can be any + iterable object producing strings, typically a list of strings. (The + name is intended to match `readlines`; `writelines` does not add line + separators.) + + :param sequence: an iterable sequence of strings. + """ + for line in sequence: + self.write(line) + return + + def xreadlines(self): + """ + Identical to ``iter(f)``. This is a deprecated file interface that + predates Python iterator support. + """ + return self + + @property + def closed(self): + return self._closed + + # ...overrides... + + def _read(self, size): + """ + (subclass override) + Read data from the stream. Return ``None`` or raise ``EOFError`` to + indicate EOF. + """ + raise EOFError() + + def _write(self, data): + """ + (subclass override) + Write data into the stream. + """ + raise IOError("write not implemented") + + def _get_size(self): + """ + (subclass override) + Return the size of the file. This is called from within `_set_mode` + if the file is opened in append mode, so the file position can be + tracked and `seek` and `tell` will work correctly. If the file is + a stream that can't be randomly accessed, you don't need to override + this method, + """ + return 0 + + # ...internals... + + def _set_mode(self, mode="r", bufsize=-1): + """ + Subclasses call this method to initialize the BufferedFile. + """ + # set bufsize in any event, because it's used for readline(). + self._bufsize = self._DEFAULT_BUFSIZE + if bufsize < 0: + # do no buffering by default, because otherwise writes will get + # buffered in a way that will probably confuse people. + bufsize = 0 + if bufsize == 1: + # apparently, line buffering only affects writes. reads are only + # buffered if you call readline (directly or indirectly: iterating + # over a file will indirectly call readline). + self._flags |= self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED + elif bufsize > 1: + self._bufsize = bufsize + self._flags |= self.FLAG_BUFFERED + self._flags &= ~self.FLAG_LINE_BUFFERED + elif bufsize == 0: + # unbuffered + self._flags &= ~(self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED) + + if ("r" in mode) or ("+" in mode): + self._flags |= self.FLAG_READ + if ("w" in mode) or ("+" in mode): + self._flags |= self.FLAG_WRITE + if "a" in mode: + self._flags |= self.FLAG_WRITE | self.FLAG_APPEND + self._size = self._get_size() + self._pos = self._realpos = self._size + if "b" in mode: + self._flags |= self.FLAG_BINARY + if "U" in mode: + self._flags |= self.FLAG_UNIVERSAL_NEWLINE + # built-in file objects have this attribute to store which kinds of + # line terminations they've seen: + # + self.newlines = None + + def _write_all(self, raw_data): + # the underlying stream may be something that does partial writes (like + # a socket). + data = memoryview(raw_data) + while len(data) > 0: + count = self._write(data) + data = data[count:] + if self._flags & self.FLAG_APPEND: + self._size += count + self._pos = self._realpos = self._size + else: + self._pos += count + self._realpos += count + return None + + def _record_newline(self, newline): + # silliness about tracking what kinds of newlines we've seen. + # i don't understand why it can be None, a string, or a tuple, instead + # of just always being a tuple, but we'll emulate that behavior anyway. + if not (self._flags & self.FLAG_UNIVERSAL_NEWLINE): + return + if self.newlines is None: + self.newlines = newline + elif self.newlines != newline and isinstance(self.newlines, bytes): + self.newlines = (self.newlines, newline) + elif newline not in self.newlines: + self.newlines += (newline,) diff --git a/venv/lib/python3.12/site-packages/paramiko/hostkeys.py b/venv/lib/python3.12/site-packages/paramiko/hostkeys.py new file mode 100644 index 0000000..4d47e95 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/hostkeys.py @@ -0,0 +1,384 @@ +# Copyright (C) 2006-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +from base64 import encodebytes, decodebytes +import binascii +import os +import re + +from collections.abc import MutableMapping +from hashlib import sha1 +from hmac import HMAC + + +from paramiko.pkey import PKey, UnknownKeyType +from paramiko.util import get_logger, constant_time_bytes_eq, b, u +from paramiko.ssh_exception import SSHException + + +class HostKeys(MutableMapping): + """ + Representation of an OpenSSH-style "known hosts" file. Host keys can be + read from one or more files, and then individual hosts can be looked up to + verify server keys during SSH negotiation. + + A `.HostKeys` object can be treated like a dict; any dict lookup is + equivalent to calling `lookup`. + + .. versionadded:: 1.5.3 + """ + + def __init__(self, filename=None): + """ + Create a new HostKeys object, optionally loading keys from an OpenSSH + style host-key file. + + :param str filename: filename to load host keys from, or ``None`` + """ + # emulate a dict of { hostname: { keytype: PKey } } + self._entries = [] + if filename is not None: + self.load(filename) + + def add(self, hostname, keytype, key): + """ + Add a host key entry to the table. Any existing entry for a + ``(hostname, keytype)`` pair will be replaced. + + :param str hostname: the hostname (or IP) to add + :param str keytype: key type (``"ssh-rsa"`` or ``"ssh-dss"``) + :param .PKey key: the key to add + """ + for e in self._entries: + if (hostname in e.hostnames) and (e.key.get_name() == keytype): + e.key = key + return + self._entries.append(HostKeyEntry([hostname], key)) + + def load(self, filename): + """ + Read a file of known SSH host keys, in the format used by OpenSSH. + This type of file unfortunately doesn't exist on Windows, but on + posix, it will usually be stored in + ``os.path.expanduser("~/.ssh/known_hosts")``. + + If this method is called multiple times, the host keys are merged, + not cleared. So multiple calls to `load` will just call `add`, + replacing any existing entries and adding new ones. + + :param str filename: name of the file to read host keys from + + :raises: ``IOError`` -- if there was an error reading the file + """ + with open(filename, "r") as f: + for lineno, line in enumerate(f, 1): + line = line.strip() + if (len(line) == 0) or (line[0] == "#"): + continue + try: + entry = HostKeyEntry.from_line(line, lineno) + except SSHException: + continue + if entry is not None: + _hostnames = entry.hostnames + for h in _hostnames: + if self.check(h, entry.key): + entry.hostnames.remove(h) + if len(entry.hostnames): + self._entries.append(entry) + + def save(self, filename): + """ + Save host keys into a file, in the format used by OpenSSH. The order + of keys in the file will be preserved when possible (if these keys were + loaded from a file originally). The single exception is that combined + lines will be split into individual key lines, which is arguably a bug. + + :param str filename: name of the file to write + + :raises: ``IOError`` -- if there was an error writing the file + + .. versionadded:: 1.6.1 + """ + with open(filename, "w") as f: + for e in self._entries: + line = e.to_line() + if line: + f.write(line) + + def lookup(self, hostname): + """ + Find a hostkey entry for a given hostname or IP. If no entry is found, + ``None`` is returned. Otherwise a dictionary of keytype to key is + returned. The keytype will be either ``"ssh-rsa"`` or ``"ssh-dss"``. + + :param str hostname: the hostname (or IP) to lookup + :return: dict of `str` -> `.PKey` keys associated with this host + (or ``None``) + """ + + class SubDict(MutableMapping): + def __init__(self, hostname, entries, hostkeys): + self._hostname = hostname + self._entries = entries + self._hostkeys = hostkeys + + def __iter__(self): + for k in self.keys(): + yield k + + def __len__(self): + return len(self.keys()) + + def __delitem__(self, key): + for e in list(self._entries): + if e.key.get_name() == key: + self._entries.remove(e) + break + else: + raise KeyError(key) + + def __getitem__(self, key): + for e in self._entries: + if e.key.get_name() == key: + return e.key + raise KeyError(key) + + def __setitem__(self, key, val): + for e in self._entries: + if e.key is None: + continue + if e.key.get_name() == key: + # replace + e.key = val + break + else: + # add a new one + e = HostKeyEntry([hostname], val) + self._entries.append(e) + self._hostkeys._entries.append(e) + + def keys(self): + return [ + e.key.get_name() + for e in self._entries + if e.key is not None + ] + + entries = [] + for e in self._entries: + if self._hostname_matches(hostname, e): + entries.append(e) + if len(entries) == 0: + return None + return SubDict(hostname, entries, self) + + def _hostname_matches(self, hostname, entry): + """ + Tests whether ``hostname`` string matches given SubDict ``entry``. + + :returns bool: + """ + for h in entry.hostnames: + if ( + h == hostname + or h.startswith("|1|") + and not hostname.startswith("|1|") + and constant_time_bytes_eq(self.hash_host(hostname, h), h) + ): + return True + return False + + def check(self, hostname, key): + """ + Return True if the given key is associated with the given hostname + in this dictionary. + + :param str hostname: hostname (or IP) of the SSH server + :param .PKey key: the key to check + :return: + ``True`` if the key is associated with the hostname; else ``False`` + """ + k = self.lookup(hostname) + if k is None: + return False + host_key = k.get(key.get_name(), None) + if host_key is None: + return False + return host_key.asbytes() == key.asbytes() + + def clear(self): + """ + Remove all host keys from the dictionary. + """ + self._entries = [] + + def __iter__(self): + for k in self.keys(): + yield k + + def __len__(self): + return len(self.keys()) + + def __getitem__(self, key): + ret = self.lookup(key) + if ret is None: + raise KeyError(key) + return ret + + def __delitem__(self, key): + index = None + for i, entry in enumerate(self._entries): + if self._hostname_matches(key, entry): + index = i + break + if index is None: + raise KeyError(key) + self._entries.pop(index) + + def __setitem__(self, hostname, entry): + # don't use this please. + if len(entry) == 0: + self._entries.append(HostKeyEntry([hostname], None)) + return + for key_type in entry.keys(): + found = False + for e in self._entries: + if (hostname in e.hostnames) and e.key.get_name() == key_type: + # replace + e.key = entry[key_type] + found = True + if not found: + self._entries.append(HostKeyEntry([hostname], entry[key_type])) + + def keys(self): + ret = [] + for e in self._entries: + for h in e.hostnames: + if h not in ret: + ret.append(h) + return ret + + def values(self): + ret = [] + for k in self.keys(): + ret.append(self.lookup(k)) + return ret + + @staticmethod + def hash_host(hostname, salt=None): + """ + Return a "hashed" form of the hostname, as used by OpenSSH when storing + hashed hostnames in the known_hosts file. + + :param str hostname: the hostname to hash + :param str salt: optional salt to use when hashing + (must be 20 bytes long) + :return: the hashed hostname as a `str` + """ + if salt is None: + salt = os.urandom(sha1().digest_size) + else: + if salt.startswith("|1|"): + salt = salt.split("|")[2] + salt = decodebytes(b(salt)) + assert len(salt) == sha1().digest_size + hmac = HMAC(salt, b(hostname), sha1).digest() + hostkey = "|1|{}|{}".format(u(encodebytes(salt)), u(encodebytes(hmac))) + return hostkey.replace("\n", "") + + +class InvalidHostKey(Exception): + def __init__(self, line, exc): + self.line = line + self.exc = exc + self.args = (line, exc) + + +class HostKeyEntry: + """ + Representation of a line in an OpenSSH-style "known hosts" file. + """ + + def __init__(self, hostnames=None, key=None): + self.valid = (hostnames is not None) and (key is not None) + self.hostnames = hostnames + self.key = key + + @classmethod + def from_line(cls, line, lineno=None): + """ + Parses the given line of text to find the names for the host, + the type of key, and the key data. The line is expected to be in the + format used by the OpenSSH known_hosts file. Fields are separated by a + single space or tab. + + Lines are expected to not have leading or trailing whitespace. + We don't bother to check for comments or empty lines. All of + that should be taken care of before sending the line to us. + + :param str line: a line from an OpenSSH known_hosts file + """ + log = get_logger("paramiko.hostkeys") + fields = re.split(" |\t", line) + if len(fields) < 3: + # Bad number of fields + msg = "Not enough fields found in known_hosts in line {} ({!r})" + log.info(msg.format(lineno, line)) + return None + fields = fields[:3] + + names, key_type, key = fields + names = names.split(",") + + # Decide what kind of key we're looking at and create an object + # to hold it accordingly. + try: + # TODO: this grew organically and doesn't seem /wrong/ per se (file + # read -> unicode str -> bytes for base64 decode -> decoded bytes); + # but in Python 3 forever land, can we simply use + # `base64.b64decode(str-from-file)` here? + key_bytes = decodebytes(b(key)) + except binascii.Error as e: + raise InvalidHostKey(line, e) + + try: + return cls(names, PKey.from_type_string(key_type, key_bytes)) + except UnknownKeyType: + # TODO 4.0: consider changing HostKeys API so this just raises + # naturally and the exception is muted higher up in the stack? + log.info("Unable to handle key of type {}".format(key_type)) + return None + + def to_line(self): + """ + Returns a string in OpenSSH known_hosts file format, or None if + the object is not in a valid state. A trailing newline is + included. + """ + if self.valid: + return "{} {} {}\n".format( + ",".join(self.hostnames), + self.key.get_name(), + self.key.get_base64(), + ) + return None + + def __repr__(self): + return "".format(self.hostnames, self.key) diff --git a/venv/lib/python3.12/site-packages/paramiko/kex_curve25519.py b/venv/lib/python3.12/site-packages/paramiko/kex_curve25519.py new file mode 100644 index 0000000..20c23e4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/kex_curve25519.py @@ -0,0 +1,131 @@ +import binascii +import hashlib + +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.x25519 import ( + X25519PrivateKey, + X25519PublicKey, +) + +from paramiko.message import Message +from paramiko.common import byte_chr +from paramiko.ssh_exception import SSHException + + +_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32) +c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)] + + +class KexCurve25519: + hash_algo = hashlib.sha256 + + def __init__(self, transport): + self.transport = transport + self.key = None + + @classmethod + def is_available(cls): + try: + X25519PrivateKey.generate() + except UnsupportedAlgorithm: + return False + else: + return True + + def _perform_exchange(self, peer_key): + secret = self.key.exchange(peer_key) + if constant_time.bytes_eq(secret, b"\x00" * 32): + raise SSHException( + "peer's curve25519 public value has wrong order" + ) + return secret + + def start_kex(self): + self.key = X25519PrivateKey.generate() + if self.transport.server_mode: + self.transport._expect_packet(_MSG_KEXECDH_INIT) + return + + m = Message() + m.add_byte(c_MSG_KEXECDH_INIT) + m.add_string( + self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + ) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXECDH_REPLY) + + def parse_next(self, ptype, m): + if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT): + return self._parse_kexecdh_init(m) + elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY): + return self._parse_kexecdh_reply(m) + raise SSHException( + "KexCurve25519 asked to handle packet type {:d}".format(ptype) + ) + + def _parse_kexecdh_init(self, m): + peer_key_bytes = m.get_string() + peer_key = X25519PublicKey.from_public_bytes(peer_key_bytes) + K = self._perform_exchange(peer_key) + K = int(binascii.hexlify(K), 16) + # compute exchange hash + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + server_key_bytes = self.transport.get_server_key().asbytes() + exchange_key_bytes = self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + hm.add_string(server_key_bytes) + hm.add_string(peer_key_bytes) + hm.add_string(exchange_key_bytes) + hm.add_mpint(K) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # construct reply + m = Message() + m.add_byte(c_MSG_KEXECDH_REPLY) + m.add_string(server_key_bytes) + m.add_string(exchange_key_bytes) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() + + def _parse_kexecdh_reply(self, m): + peer_host_key_bytes = m.get_string() + peer_key_bytes = m.get_string() + sig = m.get_binary() + + peer_key = X25519PublicKey.from_public_bytes(peer_key_bytes) + + K = self._perform_exchange(peer_key) + K = int(binascii.hexlify(K), 16) + # compute exchange hash and verify signature + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(peer_host_key_bytes) + hm.add_string( + self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + ) + hm.add_string(peer_key_bytes) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(peer_host_key_bytes, sig) + self.transport._activate_outbound() diff --git a/venv/lib/python3.12/site-packages/paramiko/kex_ecdh_nist.py b/venv/lib/python3.12/site-packages/paramiko/kex_ecdh_nist.py new file mode 100644 index 0000000..41fab46 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/kex_ecdh_nist.py @@ -0,0 +1,151 @@ +""" +Ephemeral Elliptic Curve Diffie-Hellman (ECDH) key exchange +RFC 5656, Section 4 +""" + +from hashlib import sha256, sha384, sha512 +from paramiko.common import byte_chr +from paramiko.message import Message +from paramiko.ssh_exception import SSHException +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives import serialization +from binascii import hexlify + +_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32) +c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)] + + +class KexNistp256: + + name = "ecdh-sha2-nistp256" + hash_algo = sha256 + curve = ec.SECP256R1() + + def __init__(self, transport): + self.transport = transport + # private key, client public and server public keys + self.P = 0 + self.Q_C = None + self.Q_S = None + + def start_kex(self): + self._generate_key_pair() + if self.transport.server_mode: + self.transport._expect_packet(_MSG_KEXECDH_INIT) + return + m = Message() + m.add_byte(c_MSG_KEXECDH_INIT) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + m.add_string( + self.Q_C.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXECDH_REPLY) + + def parse_next(self, ptype, m): + if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT): + return self._parse_kexecdh_init(m) + elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY): + return self._parse_kexecdh_reply(m) + raise SSHException( + "KexECDH asked to handle packet type {:d}".format(ptype) + ) + + def _generate_key_pair(self): + self.P = ec.generate_private_key(self.curve, default_backend()) + if self.transport.server_mode: + self.Q_S = self.P.public_key() + return + self.Q_C = self.P.public_key() + + def _parse_kexecdh_init(self, m): + Q_C_bytes = m.get_string() + self.Q_C = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, Q_C_bytes + ) + K_S = self.transport.get_server_key().asbytes() + K = self.P.exchange(ec.ECDH(), self.Q_C) + K = int(hexlify(K), 16) + # compute exchange hash + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + hm.add_string(K_S) + hm.add_string(Q_C_bytes) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + hm.add_string( + self.Q_S.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + hm.add_mpint(int(K)) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # construct reply + m = Message() + m.add_byte(c_MSG_KEXECDH_REPLY) + m.add_string(K_S) + m.add_string( + self.Q_S.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() + + def _parse_kexecdh_reply(self, m): + K_S = m.get_string() + Q_S_bytes = m.get_string() + self.Q_S = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, Q_S_bytes + ) + sig = m.get_binary() + K = self.P.exchange(ec.ECDH(), self.Q_S) + K = int(hexlify(K), 16) + # compute exchange hash and verify signature + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(K_S) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + hm.add_string( + self.Q_C.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + hm.add_string(Q_S_bytes) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(K_S, sig) + self.transport._activate_outbound() + + +class KexNistp384(KexNistp256): + name = "ecdh-sha2-nistp384" + hash_algo = sha384 + curve = ec.SECP384R1() + + +class KexNistp521(KexNistp256): + name = "ecdh-sha2-nistp521" + hash_algo = sha512 + curve = ec.SECP521R1() diff --git a/venv/lib/python3.12/site-packages/paramiko/kex_gex.py b/venv/lib/python3.12/site-packages/paramiko/kex_gex.py new file mode 100644 index 0000000..baa0803 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/kex_gex.py @@ -0,0 +1,288 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Variant on `KexGroup1 ` where the prime "p" and +generator "g" are provided by the server. A bit more work is required on the +client side, and a **lot** more on the server side. +""" + +import os +from hashlib import sha1, sha256 + +from paramiko import util +from paramiko.common import DEBUG, byte_chr, byte_ord, byte_mask +from paramiko.message import Message +from paramiko.ssh_exception import SSHException + + +( + _MSG_KEXDH_GEX_REQUEST_OLD, + _MSG_KEXDH_GEX_GROUP, + _MSG_KEXDH_GEX_INIT, + _MSG_KEXDH_GEX_REPLY, + _MSG_KEXDH_GEX_REQUEST, +) = range(30, 35) + +( + c_MSG_KEXDH_GEX_REQUEST_OLD, + c_MSG_KEXDH_GEX_GROUP, + c_MSG_KEXDH_GEX_INIT, + c_MSG_KEXDH_GEX_REPLY, + c_MSG_KEXDH_GEX_REQUEST, +) = [byte_chr(c) for c in range(30, 35)] + + +class KexGex: + + name = "diffie-hellman-group-exchange-sha1" + min_bits = 1024 + max_bits = 8192 + preferred_bits = 2048 + hash_algo = sha1 + + def __init__(self, transport): + self.transport = transport + self.p = None + self.q = None + self.g = None + self.x = None + self.e = None + self.f = None + self.old_style = False + + def start_kex(self, _test_old_style=False): + if self.transport.server_mode: + self.transport._expect_packet( + _MSG_KEXDH_GEX_REQUEST, _MSG_KEXDH_GEX_REQUEST_OLD + ) + return + # request a bit range: we accept (min_bits) to (max_bits), but prefer + # (preferred_bits). according to the spec, we shouldn't pull the + # minimum up above 1024. + m = Message() + if _test_old_style: + # only used for unit tests: we shouldn't ever send this + m.add_byte(c_MSG_KEXDH_GEX_REQUEST_OLD) + m.add_int(self.preferred_bits) + self.old_style = True + else: + m.add_byte(c_MSG_KEXDH_GEX_REQUEST) + m.add_int(self.min_bits) + m.add_int(self.preferred_bits) + m.add_int(self.max_bits) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_GEX_GROUP) + + def parse_next(self, ptype, m): + if ptype == _MSG_KEXDH_GEX_REQUEST: + return self._parse_kexdh_gex_request(m) + elif ptype == _MSG_KEXDH_GEX_GROUP: + return self._parse_kexdh_gex_group(m) + elif ptype == _MSG_KEXDH_GEX_INIT: + return self._parse_kexdh_gex_init(m) + elif ptype == _MSG_KEXDH_GEX_REPLY: + return self._parse_kexdh_gex_reply(m) + elif ptype == _MSG_KEXDH_GEX_REQUEST_OLD: + return self._parse_kexdh_gex_request_old(m) + msg = "KexGex {} asked to handle packet type {:d}" + raise SSHException(msg.format(self.name, ptype)) + + # ...internals... + + def _generate_x(self): + # generate an "x" (1 < x < (p-1)/2). + q = (self.p - 1) // 2 + qnorm = util.deflate_long(q, 0) + qhbyte = byte_ord(qnorm[0]) + byte_count = len(qnorm) + qmask = 0xFF + while not (qhbyte & 0x80): + qhbyte <<= 1 + qmask >>= 1 + while True: + x_bytes = os.urandom(byte_count) + x_bytes = byte_mask(x_bytes[0], qmask) + x_bytes[1:] + x = util.inflate_long(x_bytes, 1) + if (x > 1) and (x < q): + break + self.x = x + + def _parse_kexdh_gex_request(self, m): + minbits = m.get_int() + preferredbits = m.get_int() + maxbits = m.get_int() + # smoosh the user's preferred size into our own limits + if preferredbits > self.max_bits: + preferredbits = self.max_bits + if preferredbits < self.min_bits: + preferredbits = self.min_bits + # fix min/max if they're inconsistent. technically, we could just pout + # and hang up, but there's no harm in giving them the benefit of the + # doubt and just picking a bitsize for them. + if minbits > preferredbits: + minbits = preferredbits + if maxbits < preferredbits: + maxbits = preferredbits + # now save a copy + self.min_bits = minbits + self.preferred_bits = preferredbits + self.max_bits = maxbits + # generate prime + pack = self.transport._get_modulus_pack() + if pack is None: + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, + "Picking p ({} <= {} <= {} bits)".format( + minbits, preferredbits, maxbits + ), + ) + self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits) + m = Message() + m.add_byte(c_MSG_KEXDH_GEX_GROUP) + m.add_mpint(self.p) + m.add_mpint(self.g) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_GEX_INIT) + + def _parse_kexdh_gex_request_old(self, m): + # same as above, but without min_bits or max_bits (used by older + # clients like putty) + self.preferred_bits = m.get_int() + # smoosh the user's preferred size into our own limits + if self.preferred_bits > self.max_bits: + self.preferred_bits = self.max_bits + if self.preferred_bits < self.min_bits: + self.preferred_bits = self.min_bits + # generate prime + pack = self.transport._get_modulus_pack() + if pack is None: + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, "Picking p (~ {} bits)".format(self.preferred_bits) + ) + self.g, self.p = pack.get_modulus( + self.min_bits, self.preferred_bits, self.max_bits + ) + m = Message() + m.add_byte(c_MSG_KEXDH_GEX_GROUP) + m.add_mpint(self.p) + m.add_mpint(self.g) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_GEX_INIT) + self.old_style = True + + def _parse_kexdh_gex_group(self, m): + self.p = m.get_mpint() + self.g = m.get_mpint() + # reject if p's bit length < 1024 or > 8192 + bitlen = util.bit_length(self.p) + if (bitlen < 1024) or (bitlen > 8192): + raise SSHException( + "Server-generated gex p (don't ask) is out of range " + "({} bits)".format(bitlen) + ) + self.transport._log(DEBUG, "Got server p ({} bits)".format(bitlen)) + self._generate_x() + # now compute e = g^x mod p + self.e = pow(self.g, self.x, self.p) + m = Message() + m.add_byte(c_MSG_KEXDH_GEX_INIT) + m.add_mpint(self.e) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_GEX_REPLY) + + def _parse_kexdh_gex_init(self, m): + self.e = m.get_mpint() + if (self.e < 1) or (self.e > self.p - 1): + raise SSHException('Client kex "e" is out of range') + self._generate_x() + self.f = pow(self.g, self.x, self.p) + K = pow(self.e, self.x, self.p) + key = self.transport.get_server_key().asbytes() + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + key, + ) + if not self.old_style: + hm.add_int(self.min_bits) + hm.add_int(self.preferred_bits) + if not self.old_style: + hm.add_int(self.max_bits) + hm.add_mpint(self.p) + hm.add_mpint(self.g) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + # sign it + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # send reply + m = Message() + m.add_byte(c_MSG_KEXDH_GEX_REPLY) + m.add_string(key) + m.add_mpint(self.f) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() + + def _parse_kexdh_gex_reply(self, m): + host_key = m.get_string() + self.f = m.get_mpint() + sig = m.get_string() + if (self.f < 1) or (self.f > self.p - 1): + raise SSHException('Server kex "f" is out of range') + K = pow(self.f, self.x, self.p) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + host_key, + ) + if not self.old_style: + hm.add_int(self.min_bits) + hm.add_int(self.preferred_bits) + if not self.old_style: + hm.add_int(self.max_bits) + hm.add_mpint(self.p) + hm.add_mpint(self.g) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(host_key, sig) + self.transport._activate_outbound() + + +class KexGexSHA256(KexGex): + name = "diffie-hellman-group-exchange-sha256" + hash_algo = sha256 diff --git a/venv/lib/python3.12/site-packages/paramiko/kex_group1.py b/venv/lib/python3.12/site-packages/paramiko/kex_group1.py new file mode 100644 index 0000000..f074256 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/kex_group1.py @@ -0,0 +1,155 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of +1024 bit key halves, using a known "p" prime and "g" generator. +""" + +import os +from hashlib import sha1 + +from paramiko import util +from paramiko.common import max_byte, zero_byte, byte_chr, byte_mask +from paramiko.message import Message +from paramiko.ssh_exception import SSHException + + +_MSG_KEXDH_INIT, _MSG_KEXDH_REPLY = range(30, 32) +c_MSG_KEXDH_INIT, c_MSG_KEXDH_REPLY = [byte_chr(c) for c in range(30, 32)] + +b7fffffffffffffff = byte_chr(0x7F) + max_byte * 7 +b0000000000000000 = zero_byte * 8 + + +class KexGroup1: + + # draft-ietf-secsh-transport-09.txt, page 17 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa + G = 2 + + name = "diffie-hellman-group1-sha1" + hash_algo = sha1 + + def __init__(self, transport): + self.transport = transport + self.x = 0 + self.e = 0 + self.f = 0 + + def start_kex(self): + self._generate_x() + if self.transport.server_mode: + # compute f = g^x mod p, but don't send it yet + self.f = pow(self.G, self.x, self.P) + self.transport._expect_packet(_MSG_KEXDH_INIT) + return + # compute e = g^x mod p (where g=2), and send it + self.e = pow(self.G, self.x, self.P) + m = Message() + m.add_byte(c_MSG_KEXDH_INIT) + m.add_mpint(self.e) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXDH_REPLY) + + def parse_next(self, ptype, m): + if self.transport.server_mode and (ptype == _MSG_KEXDH_INIT): + return self._parse_kexdh_init(m) + elif not self.transport.server_mode and (ptype == _MSG_KEXDH_REPLY): + return self._parse_kexdh_reply(m) + msg = "KexGroup1 asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) + + # ...internals... + + def _generate_x(self): + # generate an "x" (1 < x < q), where q is (p-1)/2. + # p is a 128-byte (1024-bit) number, where the first 64 bits are 1. + # therefore q can be approximated as a 2^1023. we drop the subset of + # potential x where the first 63 bits are 1, because some of those + # will be larger than q (but this is a tiny tiny subset of + # potential x). + while 1: + x_bytes = os.urandom(128) + x_bytes = byte_mask(x_bytes[0], 0x7F) + x_bytes[1:] + if ( + x_bytes[:8] != b7fffffffffffffff + and x_bytes[:8] != b0000000000000000 + ): + break + self.x = util.inflate_long(x_bytes) + + def _parse_kexdh_reply(self, m): + # client mode + host_key = m.get_string() + self.f = m.get_mpint() + if (self.f < 1) or (self.f > self.P - 1): + raise SSHException('Server kex "f" is out of range') + sig = m.get_binary() + K = pow(self.f, self.x, self.P) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(host_key) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(host_key, sig) + self.transport._activate_outbound() + + def _parse_kexdh_init(self, m): + # server mode + self.e = m.get_mpint() + if (self.e < 1) or (self.e > self.P - 1): + raise SSHException('Client kex "e" is out of range') + K = pow(self.e, self.x, self.P) + key = self.transport.get_server_key().asbytes() + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + hm.add_string(key) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + # sign it + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # send reply + m = Message() + m.add_byte(c_MSG_KEXDH_REPLY) + m.add_string(key) + m.add_mpint(self.f) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() diff --git a/venv/lib/python3.12/site-packages/paramiko/kex_group14.py b/venv/lib/python3.12/site-packages/paramiko/kex_group14.py new file mode 100644 index 0000000..8dee551 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/kex_group14.py @@ -0,0 +1,40 @@ +# Copyright (C) 2013 Torsten Landschoff +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of +2048 bit key halves, using a known "p" prime and "g" generator. +""" + +from paramiko.kex_group1 import KexGroup1 +from hashlib import sha1, sha256 + + +class KexGroup14(KexGroup1): + + # http://tools.ietf.org/html/rfc3526#section-3 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF # noqa + G = 2 + + name = "diffie-hellman-group14-sha1" + hash_algo = sha1 + + +class KexGroup14SHA256(KexGroup14): + name = "diffie-hellman-group14-sha256" + hash_algo = sha256 diff --git a/venv/lib/python3.12/site-packages/paramiko/kex_group16.py b/venv/lib/python3.12/site-packages/paramiko/kex_group16.py new file mode 100644 index 0000000..c675f87 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/kex_group16.py @@ -0,0 +1,35 @@ +# Copyright (C) 2019 Edgar Sousa +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of +4096 bit key halves, using a known "p" prime and "g" generator. +""" + +from paramiko.kex_group1 import KexGroup1 +from hashlib import sha512 + + +class KexGroup16SHA512(KexGroup1): + name = "diffie-hellman-group16-sha512" + # http://tools.ietf.org/html/rfc3526#section-5 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF # noqa + G = 2 + + name = "diffie-hellman-group16-sha512" + hash_algo = sha512 diff --git a/venv/lib/python3.12/site-packages/paramiko/kex_gss.py b/venv/lib/python3.12/site-packages/paramiko/kex_gss.py new file mode 100644 index 0000000..2a5f29e --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/kex_gss.py @@ -0,0 +1,686 @@ +# Copyright (C) 2003-2007 Robey Pointer +# Copyright (C) 2013-2014 science + computing ag +# Author: Sebastian Deiss +# +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +""" +This module provides GSS-API / SSPI Key Exchange as defined in :rfc:`4462`. + +.. note:: Credential delegation is not supported in server mode. + +.. note:: + `RFC 4462 Section 2.2 + `_ says we are not + required to implement GSS-API error messages. Thus, in many methods within + this module, if an error occurs an exception will be thrown and the + connection will be terminated. + +.. seealso:: :doc:`/api/ssh_gss` + +.. versionadded:: 1.15 +""" + +import os +from hashlib import sha1 + +from paramiko.common import ( + DEBUG, + max_byte, + zero_byte, + byte_chr, + byte_mask, + byte_ord, +) +from paramiko import util +from paramiko.message import Message +from paramiko.ssh_exception import SSHException + + +( + MSG_KEXGSS_INIT, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_ERROR, +) = range(30, 35) +(MSG_KEXGSS_GROUPREQ, MSG_KEXGSS_GROUP) = range(40, 42) +( + c_MSG_KEXGSS_INIT, + c_MSG_KEXGSS_CONTINUE, + c_MSG_KEXGSS_COMPLETE, + c_MSG_KEXGSS_HOSTKEY, + c_MSG_KEXGSS_ERROR, +) = [byte_chr(c) for c in range(30, 35)] +(c_MSG_KEXGSS_GROUPREQ, c_MSG_KEXGSS_GROUP) = [ + byte_chr(c) for c in range(40, 42) +] + + +class KexGSSGroup1: + """ + GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange as defined in `RFC + 4462 Section 2 `_ + """ + + # draft-ietf-secsh-transport-09.txt, page 17 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa + G = 2 + b7fffffffffffffff = byte_chr(0x7F) + max_byte * 7 # noqa + b0000000000000000 = zero_byte * 8 # noqa + NAME = "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==" + + def __init__(self, transport): + self.transport = transport + self.kexgss = self.transport.kexgss_ctxt + self.gss_host = None + self.x = 0 + self.e = 0 + self.f = 0 + + def start_kex(self): + """ + Start the GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange. + """ + self._generate_x() + if self.transport.server_mode: + # compute f = g^x mod p, but don't send it yet + self.f = pow(self.G, self.x, self.P) + self.transport._expect_packet(MSG_KEXGSS_INIT) + return + # compute e = g^x mod p (where g=2), and send it + self.e = pow(self.G, self.x, self.P) + # Initialize GSS-API Key Exchange + self.gss_host = self.transport.gss_host + m = Message() + m.add_byte(c_MSG_KEXGSS_INIT) + m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host)) + m.add_mpint(self.e) + self.transport._send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_ERROR, + ) + + def parse_next(self, ptype, m): + """ + Parse the next packet. + + :param ptype: The (string) type of the incoming packet + :param `.Message` m: The packet content + """ + if self.transport.server_mode and (ptype == MSG_KEXGSS_INIT): + return self._parse_kexgss_init(m) + elif not self.transport.server_mode and (ptype == MSG_KEXGSS_HOSTKEY): + return self._parse_kexgss_hostkey(m) + elif self.transport.server_mode and (ptype == MSG_KEXGSS_CONTINUE): + return self._parse_kexgss_continue(m) + elif not self.transport.server_mode and (ptype == MSG_KEXGSS_COMPLETE): + return self._parse_kexgss_complete(m) + elif ptype == MSG_KEXGSS_ERROR: + return self._parse_kexgss_error(m) + msg = "GSS KexGroup1 asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) + + # ## internals... + + def _generate_x(self): + """ + generate an "x" (1 < x < q), where q is (p-1)/2. + p is a 128-byte (1024-bit) number, where the first 64 bits are 1. + therefore q can be approximated as a 2^1023. we drop the subset of + potential x where the first 63 bits are 1, because some of those will + be larger than q (but this is a tiny tiny subset of potential x). + """ + while 1: + x_bytes = os.urandom(128) + x_bytes = byte_mask(x_bytes[0], 0x7F) + x_bytes[1:] + first = x_bytes[:8] + if first not in (self.b7fffffffffffffff, self.b0000000000000000): + break + self.x = util.inflate_long(x_bytes) + + def _parse_kexgss_hostkey(self, m): + """ + Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode). + + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message + """ + # client mode + host_key = m.get_string() + self.transport.host_key = host_key + sig = m.get_string() + self.transport._verify_key(host_key, sig) + self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE) + + def _parse_kexgss_continue(self, m): + """ + Parse the SSH2_MSG_KEXGSS_CONTINUE message. + + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE + message + """ + if not self.transport.server_mode: + srv_token = m.get_string() + m = Message() + m.add_byte(c_MSG_KEXGSS_CONTINUE) + m.add_string( + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + ) + self.transport.send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) + else: + pass + + def _parse_kexgss_complete(self, m): + """ + Parse the SSH2_MSG_KEXGSS_COMPLETE message (client mode). + + :param `.Message` m: The content of the + SSH2_MSG_KEXGSS_COMPLETE message + """ + # client mode + if self.transport.host_key is None: + self.transport.host_key = NullHostKey() + self.f = m.get_mpint() + if (self.f < 1) or (self.f > self.P - 1): + raise SSHException('Server kex "f" is out of range') + mic_token = m.get_string() + # This must be TRUE, if there is a GSS-API token in this message. + bool = m.get_boolean() + srv_token = None + if bool: + srv_token = m.get_string() + K = pow(self.f, self.x, self.P) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(self.transport.host_key.__str__()) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = sha1(str(hm)).digest() + self.transport._set_K_H(K, H) + if srv_token is not None: + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + self.kexgss.ssh_check_mic(mic_token, H) + else: + self.kexgss.ssh_check_mic(mic_token, H) + self.transport.gss_kex_used = True + self.transport._activate_outbound() + + def _parse_kexgss_init(self, m): + """ + Parse the SSH2_MSG_KEXGSS_INIT message (server mode). + + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_INIT message + """ + # server mode + client_token = m.get_string() + self.e = m.get_mpint() + if (self.e < 1) or (self.e > self.P - 1): + raise SSHException('Client kex "e" is out of range') + K = pow(self.e, self.x, self.P) + self.transport.host_key = NullHostKey() + key = self.transport.host_key.__str__() + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + hm.add_string(key) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = sha1(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + srv_token = self.kexgss.ssh_accept_sec_context( + self.gss_host, client_token + ) + m = Message() + if self.kexgss._gss_srv_ctxt_status: + mic_token = self.kexgss.ssh_get_mic( + self.transport.session_id, gss_kex=True + ) + m.add_byte(c_MSG_KEXGSS_COMPLETE) + m.add_mpint(self.f) + m.add_string(mic_token) + if srv_token is not None: + m.add_boolean(True) + m.add_string(srv_token) + else: + m.add_boolean(False) + self.transport._send_message(m) + self.transport.gss_kex_used = True + self.transport._activate_outbound() + else: + m.add_byte(c_MSG_KEXGSS_CONTINUE) + m.add_string(srv_token) + self.transport._send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) + + def _parse_kexgss_error(self, m): + """ + Parse the SSH2_MSG_KEXGSS_ERROR message (client mode). + The server may send a GSS-API error message. if it does, we display + the error by throwing an exception (client mode). + + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_ERROR message + :raise SSHException: Contains GSS-API major and minor status as well as + the error message and the language tag of the + message + """ + maj_status = m.get_int() + min_status = m.get_int() + err_msg = m.get_string() + m.get_string() # we don't care about the language! + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) + + +class KexGSSGroup14(KexGSSGroup1): + """ + GSS-API / SSPI Authenticated Diffie-Hellman Group14 Key Exchange as defined + in `RFC 4462 Section 2 + `_ + """ + + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF # noqa + G = 2 + NAME = "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==" + + +class KexGSSGex: + """ + GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange as defined in + `RFC 4462 Section 2 `_ + """ + + NAME = "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==" + min_bits = 1024 + max_bits = 8192 + preferred_bits = 2048 + + def __init__(self, transport): + self.transport = transport + self.kexgss = self.transport.kexgss_ctxt + self.gss_host = None + self.p = None + self.q = None + self.g = None + self.x = None + self.e = None + self.f = None + self.old_style = False + + def start_kex(self): + """ + Start the GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange + """ + if self.transport.server_mode: + self.transport._expect_packet(MSG_KEXGSS_GROUPREQ) + return + # request a bit range: we accept (min_bits) to (max_bits), but prefer + # (preferred_bits). according to the spec, we shouldn't pull the + # minimum up above 1024. + self.gss_host = self.transport.gss_host + m = Message() + m.add_byte(c_MSG_KEXGSS_GROUPREQ) + m.add_int(self.min_bits) + m.add_int(self.preferred_bits) + m.add_int(self.max_bits) + self.transport._send_message(m) + self.transport._expect_packet(MSG_KEXGSS_GROUP) + + def parse_next(self, ptype, m): + """ + Parse the next packet. + + :param ptype: The (string) type of the incoming packet + :param `.Message` m: The packet content + """ + if ptype == MSG_KEXGSS_GROUPREQ: + return self._parse_kexgss_groupreq(m) + elif ptype == MSG_KEXGSS_GROUP: + return self._parse_kexgss_group(m) + elif ptype == MSG_KEXGSS_INIT: + return self._parse_kexgss_gex_init(m) + elif ptype == MSG_KEXGSS_HOSTKEY: + return self._parse_kexgss_hostkey(m) + elif ptype == MSG_KEXGSS_CONTINUE: + return self._parse_kexgss_continue(m) + elif ptype == MSG_KEXGSS_COMPLETE: + return self._parse_kexgss_complete(m) + elif ptype == MSG_KEXGSS_ERROR: + return self._parse_kexgss_error(m) + msg = "KexGex asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) + + # ## internals... + + def _generate_x(self): + # generate an "x" (1 < x < (p-1)/2). + q = (self.p - 1) // 2 + qnorm = util.deflate_long(q, 0) + qhbyte = byte_ord(qnorm[0]) + byte_count = len(qnorm) + qmask = 0xFF + while not (qhbyte & 0x80): + qhbyte <<= 1 + qmask >>= 1 + while True: + x_bytes = os.urandom(byte_count) + x_bytes = byte_mask(x_bytes[0], qmask) + x_bytes[1:] + x = util.inflate_long(x_bytes, 1) + if (x > 1) and (x < q): + break + self.x = x + + def _parse_kexgss_groupreq(self, m): + """ + Parse the SSH2_MSG_KEXGSS_GROUPREQ message (server mode). + + :param `.Message` m: The content of the + SSH2_MSG_KEXGSS_GROUPREQ message + """ + minbits = m.get_int() + preferredbits = m.get_int() + maxbits = m.get_int() + # smoosh the user's preferred size into our own limits + if preferredbits > self.max_bits: + preferredbits = self.max_bits + if preferredbits < self.min_bits: + preferredbits = self.min_bits + # fix min/max if they're inconsistent. technically, we could just pout + # and hang up, but there's no harm in giving them the benefit of the + # doubt and just picking a bitsize for them. + if minbits > preferredbits: + minbits = preferredbits + if maxbits < preferredbits: + maxbits = preferredbits + # now save a copy + self.min_bits = minbits + self.preferred_bits = preferredbits + self.max_bits = maxbits + # generate prime + pack = self.transport._get_modulus_pack() + if pack is None: + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, # noqa + "Picking p ({} <= {} <= {} bits)".format( + minbits, preferredbits, maxbits + ), + ) + self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits) + m = Message() + m.add_byte(c_MSG_KEXGSS_GROUP) + m.add_mpint(self.p) + m.add_mpint(self.g) + self.transport._send_message(m) + self.transport._expect_packet(MSG_KEXGSS_INIT) + + def _parse_kexgss_group(self, m): + """ + Parse the SSH2_MSG_KEXGSS_GROUP message (client mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_GROUP message + """ + self.p = m.get_mpint() + self.g = m.get_mpint() + # reject if p's bit length < 1024 or > 8192 + bitlen = util.bit_length(self.p) + if (bitlen < 1024) or (bitlen > 8192): + raise SSHException( + "Server-generated gex p (don't ask) is out of range " + "({} bits)".format(bitlen) + ) + self.transport._log( + DEBUG, "Got server p ({} bits)".format(bitlen) + ) # noqa + self._generate_x() + # now compute e = g^x mod p + self.e = pow(self.g, self.x, self.p) + m = Message() + m.add_byte(c_MSG_KEXGSS_INIT) + m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host)) + m.add_mpint(self.e) + self.transport._send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_ERROR, + ) + + def _parse_kexgss_gex_init(self, m): + """ + Parse the SSH2_MSG_KEXGSS_INIT message (server mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_INIT message + """ + client_token = m.get_string() + self.e = m.get_mpint() + if (self.e < 1) or (self.e > self.p - 1): + raise SSHException('Client kex "e" is out of range') + self._generate_x() + self.f = pow(self.g, self.x, self.p) + K = pow(self.e, self.x, self.p) + self.transport.host_key = NullHostKey() + key = self.transport.host_key.__str__() + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + key, + ) + hm.add_int(self.min_bits) + hm.add_int(self.preferred_bits) + hm.add_int(self.max_bits) + hm.add_mpint(self.p) + hm.add_mpint(self.g) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = sha1(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + srv_token = self.kexgss.ssh_accept_sec_context( + self.gss_host, client_token + ) + m = Message() + if self.kexgss._gss_srv_ctxt_status: + mic_token = self.kexgss.ssh_get_mic( + self.transport.session_id, gss_kex=True + ) + m.add_byte(c_MSG_KEXGSS_COMPLETE) + m.add_mpint(self.f) + m.add_string(mic_token) + if srv_token is not None: + m.add_boolean(True) + m.add_string(srv_token) + else: + m.add_boolean(False) + self.transport._send_message(m) + self.transport.gss_kex_used = True + self.transport._activate_outbound() + else: + m.add_byte(c_MSG_KEXGSS_CONTINUE) + m.add_string(srv_token) + self.transport._send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) + + def _parse_kexgss_hostkey(self, m): + """ + Parse the SSH2_MSG_KEXGSS_HOSTKEY message (client mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_HOSTKEY message + """ + # client mode + host_key = m.get_string() + self.transport.host_key = host_key + sig = m.get_string() + self.transport._verify_key(host_key, sig) + self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE) + + def _parse_kexgss_continue(self, m): + """ + Parse the SSH2_MSG_KEXGSS_CONTINUE message. + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE message + """ + if not self.transport.server_mode: + srv_token = m.get_string() + m = Message() + m.add_byte(c_MSG_KEXGSS_CONTINUE) + m.add_string( + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + ) + self.transport.send_message(m) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) + else: + pass + + def _parse_kexgss_complete(self, m): + """ + Parse the SSH2_MSG_KEXGSS_COMPLETE message (client mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_COMPLETE message + """ + if self.transport.host_key is None: + self.transport.host_key = NullHostKey() + self.f = m.get_mpint() + mic_token = m.get_string() + # This must be TRUE, if there is a GSS-API token in this message. + bool = m.get_boolean() + srv_token = None + if bool: + srv_token = m.get_string() + if (self.f < 1) or (self.f > self.p - 1): + raise SSHException('Server kex "f" is out of range') + K = pow(self.f, self.x, self.p) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + self.transport.host_key.__str__(), + ) + if not self.old_style: + hm.add_int(self.min_bits) + hm.add_int(self.preferred_bits) + if not self.old_style: + hm.add_int(self.max_bits) + hm.add_mpint(self.p) + hm.add_mpint(self.g) + hm.add_mpint(self.e) + hm.add_mpint(self.f) + hm.add_mpint(K) + H = sha1(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + if srv_token is not None: + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + self.kexgss.ssh_check_mic(mic_token, H) + else: + self.kexgss.ssh_check_mic(mic_token, H) + self.transport.gss_kex_used = True + self.transport._activate_outbound() + + def _parse_kexgss_error(self, m): + """ + Parse the SSH2_MSG_KEXGSS_ERROR message (client mode). + The server may send a GSS-API error message. if it does, we display + the error by throwing an exception (client mode). + + :param `Message` m: The content of the SSH2_MSG_KEXGSS_ERROR message + :raise SSHException: Contains GSS-API major and minor status as well as + the error message and the language tag of the + message + """ + maj_status = m.get_int() + min_status = m.get_int() + err_msg = m.get_string() + m.get_string() # we don't care about the language (lang_tag)! + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) + + +class NullHostKey: + """ + This class represents the Null Host Key for GSS-API Key Exchange as defined + in `RFC 4462 Section 5 + `_ + """ + + def __init__(self): + self.key = "" + + def __str__(self): + return self.key + + def get_name(self): + return self.key diff --git a/venv/lib/python3.12/site-packages/paramiko/message.py b/venv/lib/python3.12/site-packages/paramiko/message.py new file mode 100644 index 0000000..8c2b3bd --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/message.py @@ -0,0 +1,318 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Implementation of an SSH2 "message". +""" + +import struct +from io import BytesIO + +from paramiko import util +from paramiko.common import zero_byte, max_byte, one_byte +from paramiko.util import u + + +class Message: + """ + An SSH2 message is a stream of bytes that encodes some combination of + strings, integers, bools, and infinite-precision integers. This class + builds or breaks down such a byte stream. + + Normally you don't need to deal with anything this low-level, but it's + exposed for people implementing custom extensions, or features that + paramiko doesn't support yet. + """ + + big_int = 0xFF000000 + + def __init__(self, content=None): + """ + Create a new SSH2 message. + + :param bytes content: + the byte stream to use as the message content (passed in only when + decomposing a message). + """ + if content is not None: + self.packet = BytesIO(content) + else: + self.packet = BytesIO() + + def __bytes__(self): + return self.asbytes() + + def __repr__(self): + """ + Returns a string representation of this object, for debugging. + """ + return "paramiko.Message(" + repr(self.packet.getvalue()) + ")" + + # TODO 4.0: just merge into __bytes__ (everywhere) + def asbytes(self): + """ + Return the byte stream content of this Message, as a `bytes`. + """ + return self.packet.getvalue() + + def rewind(self): + """ + Rewind the message to the beginning as if no items had been parsed + out of it yet. + """ + self.packet.seek(0) + + def get_remainder(self): + """ + Return the `bytes` of this message that haven't already been parsed and + returned. + """ + position = self.packet.tell() + remainder = self.packet.read() + self.packet.seek(position) + return remainder + + def get_so_far(self): + """ + Returns the `bytes` of this message that have been parsed and + returned. The string passed into a message's constructor can be + regenerated by concatenating ``get_so_far`` and `get_remainder`. + """ + position = self.packet.tell() + self.rewind() + return self.packet.read(position) + + def get_bytes(self, n): + """ + Return the next ``n`` bytes of the message, without decomposing into an + int, decoded string, etc. Just the raw bytes are returned. Returns a + string of ``n`` zero bytes if there weren't ``n`` bytes remaining in + the message. + """ + b = self.packet.read(n) + max_pad_size = 1 << 20 # Limit padding to 1 MB + if len(b) < n < max_pad_size: + return b + zero_byte * (n - len(b)) + return b + + def get_byte(self): + """ + Return the next byte of the message, without decomposing it. This + is equivalent to `get_bytes(1) `. + + :return: + the next (`bytes`) byte of the message, or ``b'\000'`` if there + aren't any bytes remaining. + """ + return self.get_bytes(1) + + def get_boolean(self): + """ + Fetch a boolean from the stream. + """ + b = self.get_bytes(1) + return b != zero_byte + + def get_adaptive_int(self): + """ + Fetch an int from the stream. + + :return: a 32-bit unsigned `int`. + """ + byte = self.get_bytes(1) + if byte == max_byte: + return util.inflate_long(self.get_binary()) + byte += self.get_bytes(3) + return struct.unpack(">I", byte)[0] + + def get_int(self): + """ + Fetch an int from the stream. + """ + return struct.unpack(">I", self.get_bytes(4))[0] + + def get_int64(self): + """ + Fetch a 64-bit int from the stream. + + :return: a 64-bit unsigned integer (`int`). + """ + return struct.unpack(">Q", self.get_bytes(8))[0] + + def get_mpint(self): + """ + Fetch a long int (mpint) from the stream. + + :return: an arbitrary-length integer (`int`). + """ + return util.inflate_long(self.get_binary()) + + # TODO 4.0: depending on where this is used internally or downstream, force + # users to specify get_binary instead and delete this. + def get_string(self): + """ + Fetch a "string" from the stream. This will actually be a `bytes` + object, and may contain unprintable characters. (It's not unheard of + for a string to contain another byte-stream message.) + """ + return self.get_bytes(self.get_int()) + + # TODO 4.0: also consider having this take over the get_string name, and + # remove this name instead. + def get_text(self): + """ + Fetch a Unicode string from the stream. + + This currently operates by attempting to encode the next "string" as + ``utf-8``. + """ + return u(self.get_string()) + + def get_binary(self): + """ + Alias for `get_string` (obtains a bytestring). + """ + return self.get_bytes(self.get_int()) + + def get_list(self): + """ + Fetch a list of `strings ` from the stream. + + These are trivially encoded as comma-separated values in a string. + """ + return self.get_text().split(",") + + def add_bytes(self, b): + """ + Write bytes to the stream, without any formatting. + + :param bytes b: bytes to add + """ + self.packet.write(b) + return self + + def add_byte(self, b): + """ + Write a single byte to the stream, without any formatting. + + :param bytes b: byte to add + """ + self.packet.write(b) + return self + + def add_boolean(self, b): + """ + Add a boolean value to the stream. + + :param bool b: boolean value to add + """ + if b: + self.packet.write(one_byte) + else: + self.packet.write(zero_byte) + return self + + def add_int(self, n): + """ + Add an integer to the stream. + + :param int n: integer to add + """ + self.packet.write(struct.pack(">I", n)) + return self + + def add_adaptive_int(self, n): + """ + Add an integer to the stream. + + :param int n: integer to add + """ + if n >= Message.big_int: + self.packet.write(max_byte) + self.add_string(util.deflate_long(n)) + else: + self.packet.write(struct.pack(">I", n)) + return self + + def add_int64(self, n): + """ + Add a 64-bit int to the stream. + + :param int n: long int to add + """ + self.packet.write(struct.pack(">Q", n)) + return self + + def add_mpint(self, z): + """ + Add a long int to the stream, encoded as an infinite-precision + integer. This method only works on positive numbers. + + :param int z: long int to add + """ + self.add_string(util.deflate_long(z)) + return self + + # TODO: see the TODO for get_string/get_text/et al, this should change + # to match. + def add_string(self, s): + """ + Add a bytestring to the stream. + + :param byte s: bytestring to add + """ + s = util.asbytes(s) + self.add_int(len(s)) + self.packet.write(s) + return self + + def add_list(self, l): # noqa: E741 + """ + Add a list of strings to the stream. They are encoded identically to + a single string of values separated by commas. (Yes, really, that's + how SSH2 does it.) + + :param l: list of strings to add + """ + self.add_string(",".join(l)) + return self + + def _add(self, i): + if type(i) is bool: + return self.add_boolean(i) + elif isinstance(i, int): + return self.add_adaptive_int(i) + elif type(i) is list: + return self.add_list(i) + else: + return self.add_string(i) + + # TODO: this would never have worked for unicode strings under Python 3, + # guessing nobody/nothing ever used it for that purpose? + def add(self, *seq): + """ + Add a sequence of items to the stream. The values are encoded based + on their type: bytes, str, int, bool, or list. + + .. warning:: + Longs are encoded non-deterministically. Don't use this method. + + :param seq: the sequence of items + """ + for item in seq: + self._add(item) diff --git a/venv/lib/python3.12/site-packages/paramiko/packet.py b/venv/lib/python3.12/site-packages/paramiko/packet.py new file mode 100644 index 0000000..1274a23 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/packet.py @@ -0,0 +1,649 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Packet handling +""" + +import errno +import os +import socket +import struct +import threading +import time +from hmac import HMAC + +from paramiko import util +from paramiko.common import ( + linefeed_byte, + cr_byte_value, + MSG_NAMES, + DEBUG, + xffffffff, + zero_byte, + byte_ord, +) +from paramiko.util import u +from paramiko.ssh_exception import SSHException, ProxyCommandFailure +from paramiko.message import Message + + +def compute_hmac(key, message, digest_class): + return HMAC(key, message, digest_class).digest() + + +class NeedRekeyException(Exception): + """ + Exception indicating a rekey is needed. + """ + + pass + + +def first_arg(e): + arg = None + if type(e.args) is tuple and len(e.args) > 0: + arg = e.args[0] + return arg + + +class Packetizer: + """ + Implementation of the base SSH packet protocol. + """ + + # READ the secsh RFC's before raising these values. if anything, + # they should probably be lower. + REKEY_PACKETS = pow(2, 29) + REKEY_BYTES = pow(2, 29) + + # Allow receiving this many packets after a re-key request before + # terminating + REKEY_PACKETS_OVERFLOW_MAX = pow(2, 29) + # Allow receiving this many bytes after a re-key request before terminating + REKEY_BYTES_OVERFLOW_MAX = pow(2, 29) + + def __init__(self, socket): + self.__socket = socket + self.__logger = None + self.__closed = False + self.__dump_packets = False + self.__need_rekey = False + self.__init_count = 0 + self.__remainder = bytes() + self._initial_kex_done = False + + # used for noticing when to re-key: + self.__sent_bytes = 0 + self.__sent_packets = 0 + self.__received_bytes = 0 + self.__received_packets = 0 + self.__received_bytes_overflow = 0 + self.__received_packets_overflow = 0 + + # current inbound/outbound ciphering: + self.__block_size_out = 8 + self.__block_size_in = 8 + self.__mac_size_out = 0 + self.__mac_size_in = 0 + self.__block_engine_out = None + self.__block_engine_in = None + self.__sdctr_out = False + self.__mac_engine_out = None + self.__mac_engine_in = None + self.__mac_key_out = bytes() + self.__mac_key_in = bytes() + self.__compress_engine_out = None + self.__compress_engine_in = None + self.__sequence_number_out = 0 + self.__sequence_number_in = 0 + self.__etm_out = False + self.__etm_in = False + + # lock around outbound writes (packet computation) + self.__write_lock = threading.RLock() + + # keepalives: + self.__keepalive_interval = 0 + self.__keepalive_last = time.time() + self.__keepalive_callback = None + + self.__timer = None + self.__handshake_complete = False + self.__timer_expired = False + + @property + def closed(self): + return self.__closed + + def reset_seqno_out(self): + self.__sequence_number_out = 0 + + def reset_seqno_in(self): + self.__sequence_number_in = 0 + + def set_log(self, log): + """ + Set the Python log object to use for logging. + """ + self.__logger = log + + def set_outbound_cipher( + self, + block_engine, + block_size, + mac_engine, + mac_size, + mac_key, + sdctr=False, + etm=False, + ): + """ + Switch outbound data cipher. + :param etm: Set encrypt-then-mac from OpenSSH + """ + self.__block_engine_out = block_engine + self.__sdctr_out = sdctr + self.__block_size_out = block_size + self.__mac_engine_out = mac_engine + self.__mac_size_out = mac_size + self.__mac_key_out = mac_key + self.__sent_bytes = 0 + self.__sent_packets = 0 + self.__etm_out = etm + # wait until the reset happens in both directions before clearing + # rekey flag + self.__init_count |= 1 + if self.__init_count == 3: + self.__init_count = 0 + self.__need_rekey = False + + def set_inbound_cipher( + self, + block_engine, + block_size, + mac_engine, + mac_size, + mac_key, + etm=False, + ): + """ + Switch inbound data cipher. + :param etm: Set encrypt-then-mac from OpenSSH + """ + self.__block_engine_in = block_engine + self.__block_size_in = block_size + self.__mac_engine_in = mac_engine + self.__mac_size_in = mac_size + self.__mac_key_in = mac_key + self.__received_bytes = 0 + self.__received_packets = 0 + self.__received_bytes_overflow = 0 + self.__received_packets_overflow = 0 + self.__etm_in = etm + # wait until the reset happens in both directions before clearing + # rekey flag + self.__init_count |= 2 + if self.__init_count == 3: + self.__init_count = 0 + self.__need_rekey = False + + def set_outbound_compressor(self, compressor): + self.__compress_engine_out = compressor + + def set_inbound_compressor(self, compressor): + self.__compress_engine_in = compressor + + def close(self): + self.__closed = True + self.__socket.close() + + def set_hexdump(self, hexdump): + self.__dump_packets = hexdump + + def get_hexdump(self): + return self.__dump_packets + + def get_mac_size_in(self): + return self.__mac_size_in + + def get_mac_size_out(self): + return self.__mac_size_out + + def need_rekey(self): + """ + Returns ``True`` if a new set of keys needs to be negotiated. This + will be triggered during a packet read or write, so it should be + checked after every read or write, or at least after every few. + """ + return self.__need_rekey + + def set_keepalive(self, interval, callback): + """ + Turn on/off the callback keepalive. If ``interval`` seconds pass with + no data read from or written to the socket, the callback will be + executed and the timer will be reset. + """ + self.__keepalive_interval = interval + self.__keepalive_callback = callback + self.__keepalive_last = time.time() + + def read_timer(self): + self.__timer_expired = True + + def start_handshake(self, timeout): + """ + Tells `Packetizer` that the handshake process started. + Starts a book keeping timer that can signal a timeout in the + handshake process. + + :param float timeout: amount of seconds to wait before timing out + """ + if not self.__timer: + self.__timer = threading.Timer(float(timeout), self.read_timer) + self.__timer.start() + + def handshake_timed_out(self): + """ + Checks if the handshake has timed out. + + If `start_handshake` wasn't called before the call to this function, + the return value will always be `False`. If the handshake completed + before a timeout was reached, the return value will be `False` + + :return: handshake time out status, as a `bool` + """ + if not self.__timer: + return False + if self.__handshake_complete: + return False + return self.__timer_expired + + def complete_handshake(self): + """ + Tells `Packetizer` that the handshake has completed. + """ + if self.__timer: + self.__timer.cancel() + self.__timer_expired = False + self.__handshake_complete = True + + def read_all(self, n, check_rekey=False): + """ + Read as close to N bytes as possible, blocking as long as necessary. + + :param int n: number of bytes to read + :return: the data read, as a `str` + + :raises: + ``EOFError`` -- if the socket was closed before all the bytes could + be read + """ + out = bytes() + # handle over-reading from reading the banner line + if len(self.__remainder) > 0: + out = self.__remainder[:n] + self.__remainder = self.__remainder[n:] + n -= len(out) + while n > 0: + got_timeout = False + if self.handshake_timed_out(): + raise EOFError() + try: + x = self.__socket.recv(n) + if len(x) == 0: + raise EOFError() + out += x + n -= len(x) + except socket.timeout: + got_timeout = True + except socket.error as e: + # on Linux, sometimes instead of socket.timeout, we get + # EAGAIN. this is a bug in recent (> 2.6.9) kernels but + # we need to work around it. + arg = first_arg(e) + if arg == errno.EAGAIN: + got_timeout = True + elif self.__closed: + raise EOFError() + else: + raise + if got_timeout: + if self.__closed: + raise EOFError() + if check_rekey and (len(out) == 0) and self.__need_rekey: + raise NeedRekeyException() + self._check_keepalive() + return out + + def write_all(self, out): + self.__keepalive_last = time.time() + iteration_with_zero_as_return_value = 0 + while len(out) > 0: + retry_write = False + try: + n = self.__socket.send(out) + except socket.timeout: + retry_write = True + except socket.error as e: + arg = first_arg(e) + if arg == errno.EAGAIN: + retry_write = True + else: + n = -1 + except ProxyCommandFailure: + raise # so it doesn't get swallowed by the below catchall + except Exception: + # could be: (32, 'Broken pipe') + n = -1 + if retry_write: + n = 0 + if self.__closed: + n = -1 + else: + if n == 0 and iteration_with_zero_as_return_value > 10: + # We shouldn't retry the write, but we didn't + # manage to send anything over the socket. This might be an + # indication that we have lost contact with the remote + # side, but are yet to receive an EOFError or other socket + # errors. Let's give it some iteration to try and catch up. + n = -1 + iteration_with_zero_as_return_value += 1 + if n < 0: + raise EOFError() + if n == len(out): + break + out = out[n:] + return + + def readline(self, timeout): + """ + Read a line from the socket. We assume no data is pending after the + line, so it's okay to attempt large reads. + """ + buf = self.__remainder + while linefeed_byte not in buf: + buf += self._read_timeout(timeout) + n = buf.index(linefeed_byte) + self.__remainder = buf[n + 1 :] + buf = buf[:n] + if (len(buf) > 0) and (buf[-1] == cr_byte_value): + buf = buf[:-1] + return u(buf) + + def send_message(self, data): + """ + Write a block of data using the current cipher, as an SSH block. + """ + # encrypt this sucka + data = data.asbytes() + cmd = byte_ord(data[0]) + if cmd in MSG_NAMES: + cmd_name = MSG_NAMES[cmd] + else: + cmd_name = "${:x}".format(cmd) + orig_len = len(data) + self.__write_lock.acquire() + try: + if self.__compress_engine_out is not None: + data = self.__compress_engine_out(data) + packet = self._build_packet(data) + if self.__dump_packets: + self._log( + DEBUG, + "Write packet <{}>, length {}".format(cmd_name, orig_len), + ) + self._log(DEBUG, util.format_binary(packet, "OUT: ")) + if self.__block_engine_out is not None: + if self.__etm_out: + # packet length is not encrypted in EtM + out = packet[0:4] + self.__block_engine_out.update( + packet[4:] + ) + else: + out = self.__block_engine_out.update(packet) + else: + out = packet + # + mac + if self.__block_engine_out is not None: + packed = struct.pack(">I", self.__sequence_number_out) + payload = packed + (out if self.__etm_out else packet) + out += compute_hmac( + self.__mac_key_out, payload, self.__mac_engine_out + )[: self.__mac_size_out] + next_seq = (self.__sequence_number_out + 1) & xffffffff + if next_seq == 0 and not self._initial_kex_done: + raise SSHException( + "Sequence number rolled over during initial kex!" + ) + self.__sequence_number_out = next_seq + self.write_all(out) + + self.__sent_bytes += len(out) + self.__sent_packets += 1 + sent_too_much = ( + self.__sent_packets >= self.REKEY_PACKETS + or self.__sent_bytes >= self.REKEY_BYTES + ) + if sent_too_much and not self.__need_rekey: + # only ask once for rekeying + msg = "Rekeying (hit {} packets, {} bytes sent)" + self._log( + DEBUG, msg.format(self.__sent_packets, self.__sent_bytes) + ) + self.__received_bytes_overflow = 0 + self.__received_packets_overflow = 0 + self._trigger_rekey() + finally: + self.__write_lock.release() + + def read_message(self): + """ + Only one thread should ever be in this function (no other locking is + done). + + :raises: `.SSHException` -- if the packet is mangled + :raises: `.NeedRekeyException` -- if the transport should rekey + """ + header = self.read_all(self.__block_size_in, check_rekey=True) + if self.__etm_in: + packet_size = struct.unpack(">I", header[:4])[0] + remaining = packet_size - self.__block_size_in + 4 + packet = header[4:] + self.read_all(remaining, check_rekey=False) + mac = self.read_all(self.__mac_size_in, check_rekey=False) + mac_payload = ( + struct.pack(">II", self.__sequence_number_in, packet_size) + + packet + ) + my_mac = compute_hmac( + self.__mac_key_in, mac_payload, self.__mac_engine_in + )[: self.__mac_size_in] + if not util.constant_time_bytes_eq(my_mac, mac): + raise SSHException("Mismatched MAC") + header = packet + + if self.__block_engine_in is not None: + header = self.__block_engine_in.update(header) + if self.__dump_packets: + self._log(DEBUG, util.format_binary(header, "IN: ")) + + # When ETM is in play, we've already read the packet size & decrypted + # everything, so just set the packet back to the header we obtained. + if self.__etm_in: + packet = header + # Otherwise, use the older non-ETM logic + else: + packet_size = struct.unpack(">I", header[:4])[0] + + # leftover contains decrypted bytes from the first block (after the + # length field) + leftover = header[4:] + if (packet_size - len(leftover)) % self.__block_size_in != 0: + raise SSHException("Invalid packet blocking") + buf = self.read_all( + packet_size + self.__mac_size_in - len(leftover) + ) + packet = buf[: packet_size - len(leftover)] + post_packet = buf[packet_size - len(leftover) :] + + if self.__block_engine_in is not None: + packet = self.__block_engine_in.update(packet) + packet = leftover + packet + + if self.__dump_packets: + self._log(DEBUG, util.format_binary(packet, "IN: ")) + + if self.__mac_size_in > 0 and not self.__etm_in: + mac = post_packet[: self.__mac_size_in] + mac_payload = ( + struct.pack(">II", self.__sequence_number_in, packet_size) + + packet + ) + my_mac = compute_hmac( + self.__mac_key_in, mac_payload, self.__mac_engine_in + )[: self.__mac_size_in] + if not util.constant_time_bytes_eq(my_mac, mac): + raise SSHException("Mismatched MAC") + padding = byte_ord(packet[0]) + payload = packet[1 : packet_size - padding] + + if self.__dump_packets: + self._log( + DEBUG, + "Got payload ({} bytes, {} padding)".format( + packet_size, padding + ), + ) + + if self.__compress_engine_in is not None: + payload = self.__compress_engine_in(payload) + + msg = Message(payload[1:]) + msg.seqno = self.__sequence_number_in + next_seq = (self.__sequence_number_in + 1) & xffffffff + if next_seq == 0 and not self._initial_kex_done: + raise SSHException( + "Sequence number rolled over during initial kex!" + ) + self.__sequence_number_in = next_seq + + # check for rekey + raw_packet_size = packet_size + self.__mac_size_in + 4 + self.__received_bytes += raw_packet_size + self.__received_packets += 1 + if self.__need_rekey: + # we've asked to rekey -- give them some packets to comply before + # dropping the connection + self.__received_bytes_overflow += raw_packet_size + self.__received_packets_overflow += 1 + if ( + self.__received_packets_overflow + >= self.REKEY_PACKETS_OVERFLOW_MAX + ) or ( + self.__received_bytes_overflow >= self.REKEY_BYTES_OVERFLOW_MAX + ): + raise SSHException( + "Remote transport is ignoring rekey requests" + ) + elif (self.__received_packets >= self.REKEY_PACKETS) or ( + self.__received_bytes >= self.REKEY_BYTES + ): + # only ask once for rekeying + err = "Rekeying (hit {} packets, {} bytes received)" + self._log( + DEBUG, + err.format(self.__received_packets, self.__received_bytes), + ) + self.__received_bytes_overflow = 0 + self.__received_packets_overflow = 0 + self._trigger_rekey() + + cmd = byte_ord(payload[0]) + if cmd in MSG_NAMES: + cmd_name = MSG_NAMES[cmd] + else: + cmd_name = "${:x}".format(cmd) + if self.__dump_packets: + self._log( + DEBUG, + "Read packet <{}>, length {}".format(cmd_name, len(payload)), + ) + return cmd, msg + + # ...protected... + + def _log(self, level, msg): + if self.__logger is None: + return + if issubclass(type(msg), list): + for m in msg: + self.__logger.log(level, m) + else: + self.__logger.log(level, msg) + + def _check_keepalive(self): + if ( + not self.__keepalive_interval + or not self.__block_engine_out + or self.__need_rekey + ): + # wait till we're encrypting, and not in the middle of rekeying + return + now = time.time() + if now > self.__keepalive_last + self.__keepalive_interval: + self.__keepalive_callback() + self.__keepalive_last = now + + def _read_timeout(self, timeout): + start = time.time() + while True: + try: + x = self.__socket.recv(128) + if len(x) == 0: + raise EOFError() + break + except socket.timeout: + pass + if self.__closed: + raise EOFError() + now = time.time() + if now - start >= timeout: + raise socket.timeout() + return x + + def _build_packet(self, payload): + # pad up at least 4 bytes, to nearest block-size (usually 8) + bsize = self.__block_size_out + # do not include payload length in computations for padding in EtM mode + # (payload length won't be encrypted) + addlen = 4 if self.__etm_out else 8 + padding = 3 + bsize - ((len(payload) + addlen) % bsize) + packet = struct.pack(">IB", len(payload) + padding + 1, padding) + packet += payload + if self.__sdctr_out or self.__block_engine_out is None: + # cute trick i caught openssh doing: if we're not encrypting or + # SDCTR mode (RFC4344), + # don't waste random bytes for the padding + packet += zero_byte * padding + else: + packet += os.urandom(padding) + return packet + + def _trigger_rekey(self): + # outside code should check for this flag + self.__need_rekey = True diff --git a/venv/lib/python3.12/site-packages/paramiko/pipe.py b/venv/lib/python3.12/site-packages/paramiko/pipe.py new file mode 100644 index 0000000..65944fa --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/pipe.py @@ -0,0 +1,148 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Abstraction of a one-way pipe where the read end can be used in +`select.select`. Normally this is trivial, but Windows makes it nearly +impossible. + +The pipe acts like an Event, which can be set or cleared. When set, the pipe +will trigger as readable in `select `. +""" + +import sys +import os +import socket + + +def make_pipe(): + if sys.platform[:3] != "win": + p = PosixPipe() + else: + p = WindowsPipe() + return p + + +class PosixPipe: + def __init__(self): + self._rfd, self._wfd = os.pipe() + self._set = False + self._forever = False + self._closed = False + + def close(self): + os.close(self._rfd) + os.close(self._wfd) + # used for unit tests: + self._closed = True + + def fileno(self): + return self._rfd + + def clear(self): + if not self._set or self._forever: + return + os.read(self._rfd, 1) + self._set = False + + def set(self): + if self._set or self._closed: + return + self._set = True + os.write(self._wfd, b"*") + + def set_forever(self): + self._forever = True + self.set() + + +class WindowsPipe: + """ + On Windows, only an OS-level "WinSock" may be used in select(), but reads + and writes must be to the actual socket object. + """ + + def __init__(self): + serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + serv.bind(("127.0.0.1", 0)) + serv.listen(1) + + # need to save sockets in _rsock/_wsock so they don't get closed + self._rsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self._rsock.connect(("127.0.0.1", serv.getsockname()[1])) + + self._wsock, addr = serv.accept() + serv.close() + self._set = False + self._forever = False + self._closed = False + + def close(self): + self._rsock.close() + self._wsock.close() + # used for unit tests: + self._closed = True + + def fileno(self): + return self._rsock.fileno() + + def clear(self): + if not self._set or self._forever: + return + self._rsock.recv(1) + self._set = False + + def set(self): + if self._set or self._closed: + return + self._set = True + self._wsock.send(b"*") + + def set_forever(self): + self._forever = True + self.set() + + +class OrPipe: + def __init__(self, pipe): + self._set = False + self._partner = None + self._pipe = pipe + + def set(self): + self._set = True + if not self._partner._set: + self._pipe.set() + + def clear(self): + self._set = False + if not self._partner._set: + self._pipe.clear() + + +def make_or_pipe(pipe): + """ + wraps a pipe into two pipe-like objects which are "or"d together to + affect the real pipe. if either returned pipe is set, the wrapped pipe + is set. when both are cleared, the wrapped pipe is cleared. + """ + p1 = OrPipe(pipe) + p2 = OrPipe(pipe) + p1._partner = p2 + p2._partner = p1 + return p1, p2 diff --git a/venv/lib/python3.12/site-packages/paramiko/pkey.py b/venv/lib/python3.12/site-packages/paramiko/pkey.py new file mode 100644 index 0000000..ef37100 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/pkey.py @@ -0,0 +1,938 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Common API for all public keys. +""" + +import base64 +from base64 import encodebytes, decodebytes +from binascii import unhexlify +import os +from pathlib import Path +from hashlib import md5, sha256 +import re +import struct + +import bcrypt + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.ciphers import algorithms, modes, Cipher +from cryptography.hazmat.primitives import asymmetric + +from paramiko import util +from paramiko.util import u, b +from paramiko.common import o600 +from paramiko.ssh_exception import SSHException, PasswordRequiredException +from paramiko.message import Message + + +OPENSSH_AUTH_MAGIC = b"openssh-key-v1\x00" + + +def _unpad_openssh(data): + # At the moment, this is only used for unpadding private keys on disk. This + # really ought to be made constant time (possibly by upstreaming this logic + # into pyca/cryptography). + padding_length = data[-1] + if 0x20 <= padding_length < 0x7F: + return data # no padding, last byte part comment (printable ascii) + if padding_length > 15: + raise SSHException("Invalid key") + for i in range(padding_length): + if data[i - padding_length] != i + 1: + raise SSHException("Invalid key") + return data[:-padding_length] + + +class UnknownKeyType(Exception): + """ + An unknown public/private key algorithm was attempted to be read. + """ + + def __init__(self, key_type=None, key_bytes=None): + self.key_type = key_type + self.key_bytes = key_bytes + + def __str__(self): + return f"UnknownKeyType(type={self.key_type!r}, bytes=<{len(self.key_bytes)}>)" # noqa + + +class PKey: + """ + Base class for public keys. + + Also includes some "meta" level convenience constructors such as + `.from_type_string`. + """ + + # known encryption types for private key files: + _CIPHER_TABLE = { + "AES-128-CBC": { + "cipher": algorithms.AES, + "keysize": 16, + "blocksize": 16, + "mode": modes.CBC, + }, + "AES-256-CBC": { + "cipher": algorithms.AES, + "keysize": 32, + "blocksize": 16, + "mode": modes.CBC, + }, + "DES-EDE3-CBC": { + "cipher": algorithms.TripleDES, + "keysize": 24, + "blocksize": 8, + "mode": modes.CBC, + }, + } + _PRIVATE_KEY_FORMAT_ORIGINAL = 1 + _PRIVATE_KEY_FORMAT_OPENSSH = 2 + BEGIN_TAG = re.compile( + r"^-{5}BEGIN (RSA|DSA|EC|OPENSSH) PRIVATE KEY-{5}\s*$" + ) + END_TAG = re.compile(r"^-{5}END (RSA|DSA|EC|OPENSSH) PRIVATE KEY-{5}\s*$") + + @staticmethod + def from_path(path, passphrase=None): + """ + Attempt to instantiate appropriate key subclass from given file path. + + :param Path path: The path to load (may also be a `str`). + + :returns: + A `PKey` subclass instance. + + :raises: + `UnknownKeyType`, if our crypto backend doesn't know this key type. + + .. versionadded:: 3.2 + """ + # TODO: make sure sphinx is reading Path right in param list... + + # Lazy import to avoid circular import issues + from paramiko import DSSKey, RSAKey, Ed25519Key, ECDSAKey + + # Normalize to string, as cert suffix isn't quite an extension, so + # pathlib isn't useful for this. + path = str(path) + + # Sort out cert vs key, i.e. it is 'legal' to hand this kind of API + # /either/ the key /or/ the cert, when there is a key/cert pair. + cert_suffix = "-cert.pub" + if str(path).endswith(cert_suffix): + key_path = path[: -len(cert_suffix)] + cert_path = path + else: + key_path = path + cert_path = path + cert_suffix + + key_path = Path(key_path).expanduser() + cert_path = Path(cert_path).expanduser() + + data = key_path.read_bytes() + # Like OpenSSH, try modern/OpenSSH-specific key load first + try: + loaded = serialization.load_ssh_private_key( + data=data, password=passphrase + ) + # Then fall back to assuming legacy PEM type + except ValueError: + loaded = serialization.load_pem_private_key( + data=data, password=passphrase + ) + # TODO Python 3.10: match statement? (NOTE: we cannot use a dict + # because the results from the loader are literal backend, eg openssl, + # private classes, so isinstance tests work but exact 'x class is y' + # tests will not work) + # TODO: leverage already-parsed/math'd obj to avoid duplicate cpu + # cycles? seemingly requires most of our key subclasses to be rewritten + # to be cryptography-object-forward. this is still likely faster than + # the old SSHClient code that just tried instantiating every class! + key_class = None + if isinstance(loaded, asymmetric.dsa.DSAPrivateKey): + key_class = DSSKey + elif isinstance(loaded, asymmetric.rsa.RSAPrivateKey): + key_class = RSAKey + elif isinstance(loaded, asymmetric.ed25519.Ed25519PrivateKey): + key_class = Ed25519Key + elif isinstance(loaded, asymmetric.ec.EllipticCurvePrivateKey): + key_class = ECDSAKey + else: + raise UnknownKeyType(key_bytes=data, key_type=loaded.__class__) + with key_path.open() as fd: + key = key_class.from_private_key(fd, password=passphrase) + if cert_path.exists(): + # load_certificate can take Message, path-str, or value-str + key.load_certificate(str(cert_path)) + return key + + @staticmethod + def from_type_string(key_type, key_bytes): + """ + Given type `str` & raw `bytes`, return a `PKey` subclass instance. + + For example, ``PKey.from_type_string("ssh-ed25519", )`` + will (if successful) return a new `.Ed25519Key`. + + :param str key_type: + The key type, eg ``"ssh-ed25519"``. + :param bytes key_bytes: + The raw byte data forming the key material, as expected by + subclasses' ``data`` parameter. + + :returns: + A `PKey` subclass instance. + + :raises: + `UnknownKeyType`, if no registered classes knew about this type. + + .. versionadded:: 3.2 + """ + from paramiko import key_classes + + for key_class in key_classes: + if key_type in key_class.identifiers(): + # TODO: needs to passthru things like passphrase + return key_class(data=key_bytes) + raise UnknownKeyType(key_type=key_type, key_bytes=key_bytes) + + @classmethod + def identifiers(cls): + """ + returns an iterable of key format/name strings this class can handle. + + Most classes only have a single identifier, and thus this default + implementation suffices; see `.ECDSAKey` for one example of an + override. + """ + return [cls.name] + + # TODO 4.0: make this and subclasses consistent, some of our own + # classmethods even assume kwargs we don't define! + # TODO 4.0: prob also raise NotImplementedError instead of pass'ing; the + # contract is pretty obviously that you need to handle msg/data/filename + # appropriately. (If 'pass' is a concession to testing, see about doing the + # work to fix the tests instead) + def __init__(self, msg=None, data=None): + """ + Create a new instance of this public key type. If ``msg`` is given, + the key's public part(s) will be filled in from the message. If + ``data`` is given, the key's public part(s) will be filled in from + the string. + + :param .Message msg: + an optional SSH `.Message` containing a public key of this type. + :param bytes data: + optional, the bytes of a public key of this type + + :raises: `.SSHException` -- + if a key cannot be created from the ``data`` or ``msg`` given, or + no key was passed in. + """ + pass + + # TODO: arguably this might want to be __str__ instead? ehh + # TODO: ditto the interplay between showing class name (currently we just + # say PKey writ large) and algorithm (usually == class name, but not + # always, also sometimes shows certificate-ness) + # TODO: if we do change it, we also want to tweak eg AgentKey, as it + # currently displays agent-ness with a suffix + def __repr__(self): + comment = "" + # Works for AgentKey, may work for others? + if hasattr(self, "comment") and self.comment: + comment = f", comment={self.comment!r}" + return f"PKey(alg={self.algorithm_name}, bits={self.get_bits()}, fp={self.fingerprint}{comment})" # noqa + + # TODO 4.0: just merge into __bytes__ (everywhere) + def asbytes(self): + """ + Return a string of an SSH `.Message` made up of the public part(s) of + this key. This string is suitable for passing to `__init__` to + re-create the key object later. + """ + return bytes() + + def __bytes__(self): + return self.asbytes() + + def __eq__(self, other): + return isinstance(other, PKey) and self._fields == other._fields + + def __hash__(self): + return hash(self._fields) + + @property + def _fields(self): + raise NotImplementedError + + def get_name(self): + """ + Return the name of this private key implementation. + + :return: + name of this private key type, in SSH terminology, as a `str` (for + example, ``"ssh-rsa"``). + """ + return "" + + @property + def algorithm_name(self): + """ + Return the key algorithm identifier for this key. + + Similar to `get_name`, but aimed at pure algorithm name instead of SSH + protocol field value. + """ + # Nuke the leading 'ssh-' + # TODO in Python 3.9: use .removeprefix() + name = self.get_name().replace("ssh-", "") + # Trim any cert suffix (but leave the -cert, as OpenSSH does) + cert_tail = "-cert-v01@openssh.com" + if cert_tail in name: + name = name.replace(cert_tail, "-cert") + # Nuke any eg ECDSA suffix, OpenSSH does basically this too. + else: + name = name.split("-")[0] + return name.upper() + + def get_bits(self): + """ + Return the number of significant bits in this key. This is useful + for judging the relative security of a key. + + :return: bits in the key (as an `int`) + """ + # TODO 4.0: raise NotImplementedError, 0 is unlikely to ever be + # _correct_ and nothing in the critical path seems to use this. + return 0 + + def can_sign(self): + """ + Return ``True`` if this key has the private part necessary for signing + data. + """ + return False + + def get_fingerprint(self): + """ + Return an MD5 fingerprint of the public part of this key. Nothing + secret is revealed. + + :return: + a 16-byte `string ` (binary) of the MD5 fingerprint, in SSH + format. + """ + return md5(self.asbytes()).digest() + + @property + def fingerprint(self): + """ + Modern fingerprint property designed to be comparable to OpenSSH. + + Currently only does SHA256 (the OpenSSH default). + + .. versionadded:: 3.2 + """ + hashy = sha256(bytes(self)) + hash_name = hashy.name.upper() + b64ed = encodebytes(hashy.digest()) + cleaned = u(b64ed).strip().rstrip("=") # yes, OpenSSH does this too! + return f"{hash_name}:{cleaned}" + + def get_base64(self): + """ + Return a base64 string containing the public part of this key. Nothing + secret is revealed. This format is compatible with that used to store + public key files or recognized host keys. + + :return: a base64 `string ` containing the public part of the key. + """ + return u(encodebytes(self.asbytes())).replace("\n", "") + + def sign_ssh_data(self, data, algorithm=None): + """ + Sign a blob of data with this private key, and return a `.Message` + representing an SSH signature message. + + :param bytes data: + the data to sign. + :param str algorithm: + the signature algorithm to use, if different from the key's + internal name. Default: ``None``. + :return: an SSH signature `message <.Message>`. + + .. versionchanged:: 2.9 + Added the ``algorithm`` kwarg. + """ + return bytes() + + def verify_ssh_sig(self, data, msg): + """ + Given a blob of data, and an SSH message representing a signature of + that data, verify that it was signed with this key. + + :param bytes data: the data that was signed. + :param .Message msg: an SSH signature message + :return: + ``True`` if the signature verifies correctly; ``False`` otherwise. + """ + return False + + @classmethod + def from_private_key_file(cls, filename, password=None): + """ + Create a key object by reading a private key file. If the private + key is encrypted and ``password`` is not ``None``, the given password + will be used to decrypt the key (otherwise `.PasswordRequiredException` + is thrown). Through the magic of Python, this factory method will + exist in all subclasses of PKey (such as `.RSAKey` or `.DSSKey`), but + is useless on the abstract PKey class. + + :param str filename: name of the file to read + :param str password: + an optional password to use to decrypt the key file, if it's + encrypted + :return: a new `.PKey` based on the given private key + + :raises: ``IOError`` -- if there was an error reading the file + :raises: `.PasswordRequiredException` -- if the private key file is + encrypted, and ``password`` is ``None`` + :raises: `.SSHException` -- if the key file is invalid + """ + key = cls(filename=filename, password=password) + return key + + @classmethod + def from_private_key(cls, file_obj, password=None): + """ + Create a key object by reading a private key from a file (or file-like) + object. If the private key is encrypted and ``password`` is not + ``None``, the given password will be used to decrypt the key (otherwise + `.PasswordRequiredException` is thrown). + + :param file_obj: the file-like object to read from + :param str password: + an optional password to use to decrypt the key, if it's encrypted + :return: a new `.PKey` based on the given private key + + :raises: ``IOError`` -- if there was an error reading the key + :raises: `.PasswordRequiredException` -- + if the private key file is encrypted, and ``password`` is ``None`` + :raises: `.SSHException` -- if the key file is invalid + """ + key = cls(file_obj=file_obj, password=password) + return key + + def write_private_key_file(self, filename, password=None): + """ + Write private key contents into a file. If the password is not + ``None``, the key is encrypted before writing. + + :param str filename: name of the file to write + :param str password: + an optional password to use to encrypt the key file + + :raises: ``IOError`` -- if there was an error writing the file + :raises: `.SSHException` -- if the key is invalid + """ + raise Exception("Not implemented in PKey") + + def write_private_key(self, file_obj, password=None): + """ + Write private key contents into a file (or file-like) object. If the + password is not ``None``, the key is encrypted before writing. + + :param file_obj: the file-like object to write into + :param str password: an optional password to use to encrypt the key + + :raises: ``IOError`` -- if there was an error writing to the file + :raises: `.SSHException` -- if the key is invalid + """ + # TODO 4.0: NotImplementedError (plus everywhere else in here) + raise Exception("Not implemented in PKey") + + def _read_private_key_file(self, tag, filename, password=None): + """ + Read an SSH2-format private key file, looking for a string of the type + ``"BEGIN xxx PRIVATE KEY"`` for some ``xxx``, base64-decode the text we + find, and return it as a string. If the private key is encrypted and + ``password`` is not ``None``, the given password will be used to + decrypt the key (otherwise `.PasswordRequiredException` is thrown). + + :param str tag: ``"RSA"`` or ``"DSA"``, the tag used to mark the + data block. + :param str filename: name of the file to read. + :param str password: + an optional password to use to decrypt the key file, if it's + encrypted. + :return: the `bytes` that make up the private key. + + :raises: ``IOError`` -- if there was an error reading the file. + :raises: `.PasswordRequiredException` -- if the private key file is + encrypted, and ``password`` is ``None``. + :raises: `.SSHException` -- if the key file is invalid. + """ + with open(filename, "r") as f: + data = self._read_private_key(tag, f, password) + return data + + def _read_private_key(self, tag, f, password=None): + lines = f.readlines() + if not lines: + raise SSHException("no lines in {} private key file".format(tag)) + + # find the BEGIN tag + start = 0 + m = self.BEGIN_TAG.match(lines[start]) + line_range = len(lines) - 1 + while start < line_range and not m: + start += 1 + m = self.BEGIN_TAG.match(lines[start]) + start += 1 + keytype = m.group(1) if m else None + if start >= len(lines) or keytype is None: + raise SSHException("not a valid {} private key file".format(tag)) + + # find the END tag + end = start + m = self.END_TAG.match(lines[end]) + while end < line_range and not m: + end += 1 + m = self.END_TAG.match(lines[end]) + + if keytype == tag: + data = self._read_private_key_pem(lines, end, password) + pkformat = self._PRIVATE_KEY_FORMAT_ORIGINAL + elif keytype == "OPENSSH": + data = self._read_private_key_openssh(lines[start:end], password) + pkformat = self._PRIVATE_KEY_FORMAT_OPENSSH + else: + raise SSHException( + "encountered {} key, expected {} key".format(keytype, tag) + ) + + return pkformat, data + + def _got_bad_key_format_id(self, id_): + err = "{}._read_private_key() spat out an unknown key format id '{}'" + raise SSHException(err.format(self.__class__.__name__, id_)) + + def _read_private_key_pem(self, lines, end, password): + start = 0 + # parse any headers first + headers = {} + start += 1 + while start < len(lines): + line = lines[start].split(": ") + if len(line) == 1: + break + headers[line[0].lower()] = line[1].strip() + start += 1 + # if we trudged to the end of the file, just try to cope. + try: + data = decodebytes(b("".join(lines[start:end]))) + except base64.binascii.Error as e: + raise SSHException("base64 decoding error: {}".format(e)) + if "proc-type" not in headers: + # unencryped: done + return data + # encrypted keyfile: will need a password + proc_type = headers["proc-type"] + if proc_type != "4,ENCRYPTED": + raise SSHException( + 'Unknown private key structure "{}"'.format(proc_type) + ) + try: + encryption_type, saltstr = headers["dek-info"].split(",") + except: + raise SSHException("Can't parse DEK-info in private key file") + if encryption_type not in self._CIPHER_TABLE: + raise SSHException( + 'Unknown private key cipher "{}"'.format(encryption_type) + ) + # if no password was passed in, + # raise an exception pointing out that we need one + if password is None: + raise PasswordRequiredException("Private key file is encrypted") + cipher = self._CIPHER_TABLE[encryption_type]["cipher"] + keysize = self._CIPHER_TABLE[encryption_type]["keysize"] + mode = self._CIPHER_TABLE[encryption_type]["mode"] + salt = unhexlify(b(saltstr)) + key = util.generate_key_bytes(md5, salt, password, keysize) + decryptor = Cipher( + cipher(key), mode(salt), backend=default_backend() + ).decryptor() + return decryptor.update(data) + decryptor.finalize() + + def _read_private_key_openssh(self, lines, password): + """ + Read the new OpenSSH SSH2 private key format available + since OpenSSH version 6.5 + Reference: + https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key + """ + try: + data = decodebytes(b("".join(lines))) + except base64.binascii.Error as e: + raise SSHException("base64 decoding error: {}".format(e)) + + # read data struct + auth_magic = data[:15] + if auth_magic != OPENSSH_AUTH_MAGIC: + raise SSHException("unexpected OpenSSH key header encountered") + + cstruct = self._uint32_cstruct_unpack(data[15:], "sssur") + cipher, kdfname, kdf_options, num_pubkeys, remainder = cstruct + # For now, just support 1 key. + if num_pubkeys > 1: + raise SSHException( + "unsupported: private keyfile has multiple keys" + ) + pubkey, privkey_blob = self._uint32_cstruct_unpack(remainder, "ss") + + if kdfname == b("bcrypt"): + if cipher == b("aes256-cbc"): + mode = modes.CBC + elif cipher == b("aes256-ctr"): + mode = modes.CTR + else: + raise SSHException( + "unknown cipher `{}` used in private key file".format( + cipher.decode("utf-8") + ) + ) + # Encrypted private key. + # If no password was passed in, raise an exception pointing + # out that we need one + if password is None: + raise PasswordRequiredException( + "private key file is encrypted" + ) + + # Unpack salt and rounds from kdfoptions + salt, rounds = self._uint32_cstruct_unpack(kdf_options, "su") + + # run bcrypt kdf to derive key and iv/nonce (32 + 16 bytes) + key_iv = bcrypt.kdf( + b(password), + b(salt), + 48, + rounds, + # We can't control how many rounds are on disk, so no sense + # warning about it. + ignore_few_rounds=True, + ) + key = key_iv[:32] + iv = key_iv[32:] + + # decrypt private key blob + decryptor = Cipher( + algorithms.AES(key), mode(iv), default_backend() + ).decryptor() + decrypted_privkey = decryptor.update(privkey_blob) + decrypted_privkey += decryptor.finalize() + elif cipher == b("none") and kdfname == b("none"): + # Unencrypted private key + decrypted_privkey = privkey_blob + else: + raise SSHException( + "unknown cipher or kdf used in private key file" + ) + + # Unpack private key and verify checkints + cstruct = self._uint32_cstruct_unpack(decrypted_privkey, "uusr") + checkint1, checkint2, keytype, keydata = cstruct + + if checkint1 != checkint2: + raise SSHException( + "OpenSSH private key file checkints do not match" + ) + + return _unpad_openssh(keydata) + + def _uint32_cstruct_unpack(self, data, strformat): + """ + Used to read new OpenSSH private key format. + Unpacks a c data structure containing a mix of 32-bit uints and + variable length strings prefixed by 32-bit uint size field, + according to the specified format. Returns the unpacked vars + in a tuple. + Format strings: + s - denotes a string + i - denotes a long integer, encoded as a byte string + u - denotes a 32-bit unsigned integer + r - the remainder of the input string, returned as a string + """ + arr = [] + idx = 0 + try: + for f in strformat: + if f == "s": + # string + s_size = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + s = data[idx : idx + s_size] + idx += s_size + arr.append(s) + if f == "i": + # long integer + s_size = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + s = data[idx : idx + s_size] + idx += s_size + i = util.inflate_long(s, True) + arr.append(i) + elif f == "u": + # 32-bit unsigned int + u = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + arr.append(u) + elif f == "r": + # remainder as string + s = data[idx:] + arr.append(s) + break + except Exception as e: + # PKey-consuming code frequently wants to save-and-skip-over issues + # with loading keys, and uses SSHException as the (really friggin + # awful) signal for this. So for now...we do this. + raise SSHException(str(e)) + return tuple(arr) + + def _write_private_key_file(self, filename, key, format, password=None): + """ + Write an SSH2-format private key file in a form that can be read by + paramiko or openssh. If no password is given, the key is written in + a trivially-encoded format (base64) which is completely insecure. If + a password is given, DES-EDE3-CBC is used. + + :param str tag: + ``"RSA"`` or ``"DSA"``, the tag used to mark the data block. + :param filename: name of the file to write. + :param bytes data: data blob that makes up the private key. + :param str password: an optional password to use to encrypt the file. + + :raises: ``IOError`` -- if there was an error writing the file. + """ + # Ensure that we create new key files directly with a user-only mode, + # instead of opening, writing, then chmodding, which leaves us open to + # CVE-2022-24302. + with os.fdopen( + os.open( + filename, + # NOTE: O_TRUNC is a noop on new files, and O_CREAT is a noop + # on existing files, so using all 3 in both cases is fine. + flags=os.O_WRONLY | os.O_TRUNC | os.O_CREAT, + # Ditto the use of the 'mode' argument; it should be safe to + # give even for existing files (though it will not act like a + # chmod in that case). + mode=o600, + ), + # Yea, you still gotta inform the FLO that it is in "write" mode. + "w", + ) as f: + self._write_private_key(f, key, format, password=password) + + def _write_private_key(self, f, key, format, password=None): + if password is None: + encryption = serialization.NoEncryption() + else: + encryption = serialization.BestAvailableEncryption(b(password)) + + f.write( + key.private_bytes( + serialization.Encoding.PEM, format, encryption + ).decode() + ) + + def _check_type_and_load_cert(self, msg, key_type, cert_type): + """ + Perform message type-checking & optional certificate loading. + + This includes fast-forwarding cert ``msg`` objects past the nonce, so + that the subsequent fields are the key numbers; thus the caller may + expect to treat the message as key material afterwards either way. + + The obtained key type is returned for classes which need to know what + it was (e.g. ECDSA.) + """ + # Normalization; most classes have a single key type and give a string, + # but eg ECDSA is a 1:N mapping. + key_types = key_type + cert_types = cert_type + if isinstance(key_type, str): + key_types = [key_types] + if isinstance(cert_types, str): + cert_types = [cert_types] + # Can't do much with no message, that should've been handled elsewhere + if msg is None: + raise SSHException("Key object may not be empty") + # First field is always key type, in either kind of object. (make sure + # we rewind before grabbing it - sometimes caller had to do their own + # introspection first!) + msg.rewind() + type_ = msg.get_text() + # Regular public key - nothing special to do besides the implicit + # type check. + if type_ in key_types: + pass + # OpenSSH-compatible certificate - store full copy as .public_blob + # (so signing works correctly) and then fast-forward past the + # nonce. + elif type_ in cert_types: + # This seems the cleanest way to 'clone' an already-being-read + # message; they're *IO objects at heart and their .getvalue() + # always returns the full value regardless of pointer position. + self.load_certificate(Message(msg.asbytes())) + # Read out nonce as it comes before the public numbers - our caller + # is likely going to use the (only borrowed by us, not owned) + # 'msg' object for loading those numbers right after this. + # TODO: usefully interpret it & other non-public-number fields + # (requires going back into per-type subclasses.) + msg.get_string() + else: + err = "Invalid key (class: {}, data type: {}" + raise SSHException(err.format(self.__class__.__name__, type_)) + + def load_certificate(self, value): + """ + Supplement the private key contents with data loaded from an OpenSSH + public key (``.pub``) or certificate (``-cert.pub``) file, a string + containing such a file, or a `.Message` object. + + The .pub contents adds no real value, since the private key + file includes sufficient information to derive the public + key info. For certificates, however, this can be used on + the client side to offer authentication requests to the server + based on certificate instead of raw public key. + + See: + https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.certkeys + + Note: very little effort is made to validate the certificate contents, + that is for the server to decide if it is good enough to authenticate + successfully. + """ + if isinstance(value, Message): + constructor = "from_message" + elif os.path.isfile(value): + constructor = "from_file" + else: + constructor = "from_string" + blob = getattr(PublicBlob, constructor)(value) + if not blob.key_type.startswith(self.get_name()): + err = "PublicBlob type {} incompatible with key type {}" + raise ValueError(err.format(blob.key_type, self.get_name())) + self.public_blob = blob + + +# General construct for an OpenSSH style Public Key blob +# readable from a one-line file of the format: +# [] +# Of little value in the case of standard public keys +# {ssh-rsa, ssh-dss, ssh-ecdsa, ssh-ed25519}, but should +# provide rudimentary support for {*-cert.v01} +class PublicBlob: + """ + OpenSSH plain public key or OpenSSH signed public key (certificate). + + Tries to be as dumb as possible and barely cares about specific + per-key-type data. + + .. note:: + + Most of the time you'll want to call `from_file`, `from_string` or + `from_message` for useful instantiation, the main constructor is + basically "I should be using ``attrs`` for this." + """ + + def __init__(self, type_, blob, comment=None): + """ + Create a new public blob of given type and contents. + + :param str type_: Type indicator, eg ``ssh-rsa``. + :param bytes blob: The blob bytes themselves. + :param str comment: A comment, if one was given (e.g. file-based.) + """ + self.key_type = type_ + self.key_blob = blob + self.comment = comment + + @classmethod + def from_file(cls, filename): + """ + Create a public blob from a ``-cert.pub``-style file on disk. + """ + with open(filename) as f: + string = f.read() + return cls.from_string(string) + + @classmethod + def from_string(cls, string): + """ + Create a public blob from a ``-cert.pub``-style string. + """ + fields = string.split(None, 2) + if len(fields) < 2: + msg = "Not enough fields for public blob: {}" + raise ValueError(msg.format(fields)) + key_type = fields[0] + key_blob = decodebytes(b(fields[1])) + try: + comment = fields[2].strip() + except IndexError: + comment = None + # Verify that the blob message first (string) field matches the + # key_type + m = Message(key_blob) + blob_type = m.get_text() + if blob_type != key_type: + deets = "key type={!r}, but blob type={!r}".format( + key_type, blob_type + ) + raise ValueError("Invalid PublicBlob contents: {}".format(deets)) + # All good? All good. + return cls(type_=key_type, blob=key_blob, comment=comment) + + @classmethod + def from_message(cls, message): + """ + Create a public blob from a network `.Message`. + + Specifically, a cert-bearing pubkey auth packet, because by definition + OpenSSH-style certificates 'are' their own network representation." + """ + type_ = message.get_text() + return cls(type_=type_, blob=message.asbytes()) + + def __str__(self): + ret = "{} public key/certificate".format(self.key_type) + if self.comment: + ret += "- {}".format(self.comment) + return ret + + def __eq__(self, other): + # Just piggyback on Message/BytesIO, since both of these should be one. + return self and other and self.key_blob == other.key_blob + + def __ne__(self, other): + return not self == other diff --git a/venv/lib/python3.12/site-packages/paramiko/primes.py b/venv/lib/python3.12/site-packages/paramiko/primes.py new file mode 100644 index 0000000..663c58e --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/primes.py @@ -0,0 +1,148 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Utility functions for dealing with primes. +""" + +import os + +from paramiko import util +from paramiko.common import byte_mask +from paramiko.ssh_exception import SSHException + + +def _roll_random(n): + """returns a random # from 0 to N-1""" + bits = util.bit_length(n - 1) + byte_count = (bits + 7) // 8 + hbyte_mask = pow(2, bits % 8) - 1 + + # so here's the plan: + # we fetch as many random bits as we'd need to fit N-1, and if the + # generated number is >= N, we try again. in the worst case (N-1 is a + # power of 2), we have slightly better than 50% odds of getting one that + # fits, so i can't guarantee that this loop will ever finish, but the odds + # of it looping forever should be infinitesimal. + while True: + x = os.urandom(byte_count) + if hbyte_mask > 0: + x = byte_mask(x[0], hbyte_mask) + x[1:] + num = util.inflate_long(x, 1) + if num < n: + break + return num + + +class ModulusPack: + """ + convenience object for holding the contents of the /etc/ssh/moduli file, + on systems that have such a file. + """ + + def __init__(self): + # pack is a hash of: bits -> [ (generator, modulus) ... ] + self.pack = {} + self.discarded = [] + + def _parse_modulus(self, line): + ( + timestamp, + mod_type, + tests, + tries, + size, + generator, + modulus, + ) = line.split() + mod_type = int(mod_type) + tests = int(tests) + tries = int(tries) + size = int(size) + generator = int(generator) + modulus = int(modulus, 16) + + # weed out primes that aren't at least: + # type 2 (meets basic structural requirements) + # test 4 (more than just a small-prime sieve) + # tries < 100 if test & 4 (at least 100 tries of miller-rabin) + if ( + mod_type < 2 + or tests < 4 + or (tests & 4 and tests < 8 and tries < 100) + ): + self.discarded.append( + (modulus, "does not meet basic requirements") + ) + return + if generator == 0: + generator = 2 + + # there's a bug in the ssh "moduli" file (yeah, i know: shock! dismay! + # call cnn!) where it understates the bit lengths of these primes by 1. + # this is okay. + bl = util.bit_length(modulus) + if (bl != size) and (bl != size + 1): + self.discarded.append( + (modulus, "incorrectly reported bit length {}".format(size)) + ) + return + if bl not in self.pack: + self.pack[bl] = [] + self.pack[bl].append((generator, modulus)) + + def read_file(self, filename): + """ + :raises IOError: passed from any file operations that fail. + """ + self.pack = {} + with open(filename, "r") as f: + for line in f: + line = line.strip() + if (len(line) == 0) or (line[0] == "#"): + continue + try: + self._parse_modulus(line) + except: + continue + + def get_modulus(self, min, prefer, max): + bitsizes = sorted(self.pack.keys()) + if len(bitsizes) == 0: + raise SSHException("no moduli available") + good = -1 + # find nearest bitsize >= preferred + for b in bitsizes: + if (b >= prefer) and (b <= max) and (b < good or good == -1): + good = b + # if that failed, find greatest bitsize >= min + if good == -1: + for b in bitsizes: + if (b >= min) and (b <= max) and (b > good): + good = b + if good == -1: + # their entire (min, max) range has no intersection with our range. + # if their range is below ours, pick the smallest. otherwise pick + # the largest. it'll be out of their range requirement either way, + # but we'll be sending them the closest one we have. + good = bitsizes[0] + if min > good: + good = bitsizes[-1] + # now pick a random modulus of this bitsize + n = _roll_random(len(self.pack[good])) + return self.pack[good][n] diff --git a/venv/lib/python3.12/site-packages/paramiko/proxy.py b/venv/lib/python3.12/site-packages/paramiko/proxy.py new file mode 100644 index 0000000..f7609c9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/proxy.py @@ -0,0 +1,134 @@ +# Copyright (C) 2012 Yipit, Inc +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +import os +import shlex +import signal +from select import select +import socket +import time + +# Try-and-ignore import so platforms w/o subprocess (eg Google App Engine) can +# still import paramiko. +subprocess, subprocess_import_error = None, None +try: + import subprocess +except ImportError as e: + subprocess_import_error = e + +from paramiko.ssh_exception import ProxyCommandFailure +from paramiko.util import ClosingContextManager + + +class ProxyCommand(ClosingContextManager): + """ + Wraps a subprocess running ProxyCommand-driven programs. + + This class implements a the socket-like interface needed by the + `.Transport` and `.Packetizer` classes. Using this class instead of a + regular socket makes it possible to talk with a Popen'd command that will + proxy traffic between the client and a server hosted in another machine. + + Instances of this class may be used as context managers. + """ + + def __init__(self, command_line): + """ + Create a new CommandProxy instance. The instance created by this + class can be passed as an argument to the `.Transport` class. + + :param str command_line: + the command that should be executed and used as the proxy. + """ + if subprocess is None: + raise subprocess_import_error + self.cmd = shlex.split(command_line) + self.process = subprocess.Popen( + self.cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=0, + ) + self.timeout = None + + def send(self, content): + """ + Write the content received from the SSH client to the standard + input of the forked command. + + :param str content: string to be sent to the forked command + """ + try: + self.process.stdin.write(content) + except IOError as e: + # There was a problem with the child process. It probably + # died and we can't proceed. The best option here is to + # raise an exception informing the user that the informed + # ProxyCommand is not working. + raise ProxyCommandFailure(" ".join(self.cmd), e.strerror) + return len(content) + + def recv(self, size): + """ + Read from the standard output of the forked program. + + :param int size: how many chars should be read + + :return: the string of bytes read, which may be shorter than requested + """ + try: + buffer = b"" + start = time.time() + while len(buffer) < size: + select_timeout = None + if self.timeout is not None: + elapsed = time.time() - start + if elapsed >= self.timeout: + raise socket.timeout() + select_timeout = self.timeout - elapsed + + r, w, x = select([self.process.stdout], [], [], select_timeout) + if r and r[0] == self.process.stdout: + buffer += os.read( + self.process.stdout.fileno(), size - len(buffer) + ) + return buffer + except socket.timeout: + if buffer: + # Don't raise socket.timeout, return partial result instead + return buffer + raise # socket.timeout is a subclass of IOError + except IOError as e: + raise ProxyCommandFailure(" ".join(self.cmd), e.strerror) + + def close(self): + os.kill(self.process.pid, signal.SIGTERM) + + @property + def closed(self): + return self.process.returncode is not None + + @property + def _closed(self): + # Concession to Python 3 socket-like API + return self.closed + + def settimeout(self, timeout): + self.timeout = timeout diff --git a/venv/lib/python3.12/site-packages/paramiko/rsakey.py b/venv/lib/python3.12/site-packages/paramiko/rsakey.py new file mode 100644 index 0000000..b7ad3ce --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/rsakey.py @@ -0,0 +1,227 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +RSA keys. +""" + +from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import rsa, padding + +from paramiko.message import Message +from paramiko.pkey import PKey +from paramiko.ssh_exception import SSHException + + +class RSAKey(PKey): + """ + Representation of an RSA key which can be used to sign and verify SSH2 + data. + """ + + name = "ssh-rsa" + HASHES = { + "ssh-rsa": hashes.SHA1, + "ssh-rsa-cert-v01@openssh.com": hashes.SHA1, + "rsa-sha2-256": hashes.SHA256, + "rsa-sha2-256-cert-v01@openssh.com": hashes.SHA256, + "rsa-sha2-512": hashes.SHA512, + "rsa-sha2-512-cert-v01@openssh.com": hashes.SHA512, + } + + def __init__( + self, + msg=None, + data=None, + filename=None, + password=None, + key=None, + file_obj=None, + ): + self.key = None + self.public_blob = None + if file_obj is not None: + self._from_private_key(file_obj, password) + return + if filename is not None: + self._from_private_key_file(filename, password) + return + if (msg is None) and (data is not None): + msg = Message(data) + if key is not None: + self.key = key + else: + self._check_type_and_load_cert( + msg=msg, + # NOTE: this does NOT change when using rsa2 signatures; it's + # purely about key loading, not exchange or verification + key_type=self.name, + cert_type="ssh-rsa-cert-v01@openssh.com", + ) + self.key = rsa.RSAPublicNumbers( + e=msg.get_mpint(), n=msg.get_mpint() + ).public_key(default_backend()) + + @classmethod + def identifiers(cls): + return list(cls.HASHES.keys()) + + @property + def size(self): + return self.key.key_size + + @property + def public_numbers(self): + if isinstance(self.key, rsa.RSAPrivateKey): + return self.key.private_numbers().public_numbers + else: + return self.key.public_numbers() + + def asbytes(self): + m = Message() + m.add_string(self.name) + m.add_mpint(self.public_numbers.e) + m.add_mpint(self.public_numbers.n) + return m.asbytes() + + def __str__(self): + # NOTE: see #853 to explain some legacy behavior. + # TODO 4.0: replace with a nice clean fingerprint display or something + return self.asbytes().decode("utf8", errors="ignore") + + @property + def _fields(self): + return (self.get_name(), self.public_numbers.e, self.public_numbers.n) + + def get_name(self): + return self.name + + def get_bits(self): + return self.size + + def can_sign(self): + return isinstance(self.key, rsa.RSAPrivateKey) + + def sign_ssh_data(self, data, algorithm=None): + if algorithm is None: + algorithm = self.name + sig = self.key.sign( + data, + padding=padding.PKCS1v15(), + # HASHES being just a map from long identifier to either SHA1 or + # SHA256 - cert'ness is not truly relevant. + algorithm=self.HASHES[algorithm](), + ) + m = Message() + # And here again, cert'ness is irrelevant, so it is stripped out. + m.add_string(algorithm.replace("-cert-v01@openssh.com", "")) + m.add_string(sig) + return m + + def verify_ssh_sig(self, data, msg): + sig_algorithm = msg.get_text() + if sig_algorithm not in self.HASHES: + return False + key = self.key + if isinstance(key, rsa.RSAPrivateKey): + key = key.public_key() + + # NOTE: pad received signature with leading zeros, key.verify() + # expects a signature of key size (e.g. PuTTY doesn't pad) + sign = msg.get_binary() + diff = key.key_size - len(sign) * 8 + if diff > 0: + sign = b"\x00" * ((diff + 7) // 8) + sign + + try: + key.verify( + sign, data, padding.PKCS1v15(), self.HASHES[sig_algorithm]() + ) + except InvalidSignature: + return False + else: + return True + + def write_private_key_file(self, filename, password=None): + self._write_private_key_file( + filename, + self.key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + def write_private_key(self, file_obj, password=None): + self._write_private_key( + file_obj, + self.key, + serialization.PrivateFormat.TraditionalOpenSSL, + password=password, + ) + + @staticmethod + def generate(bits, progress_func=None): + """ + Generate a new private RSA key. This factory function can be used to + generate a new host key or authentication key. + + :param int bits: number of bits the generated key should be. + :param progress_func: Unused + :return: new `.RSAKey` private key + """ + key = rsa.generate_private_key( + public_exponent=65537, key_size=bits, backend=default_backend() + ) + return RSAKey(key=key) + + # ...internals... + + def _from_private_key_file(self, filename, password): + data = self._read_private_key_file("RSA", filename, password) + self._decode_key(data) + + def _from_private_key(self, file_obj, password): + data = self._read_private_key("RSA", file_obj, password) + self._decode_key(data) + + def _decode_key(self, data): + pkformat, data = data + if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL: + try: + key = serialization.load_der_private_key( + data, password=None, backend=default_backend() + ) + except (ValueError, TypeError, UnsupportedAlgorithm) as e: + raise SSHException(str(e)) + elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH: + n, e, d, iqmp, p, q = self._uint32_cstruct_unpack(data, "iiiiii") + public_numbers = rsa.RSAPublicNumbers(e=e, n=n) + key = rsa.RSAPrivateNumbers( + p=p, + q=q, + d=d, + dmp1=d % (p - 1), + dmq1=d % (q - 1), + iqmp=iqmp, + public_numbers=public_numbers, + ).private_key(default_backend()) + else: + self._got_bad_key_format_id(pkformat) + assert isinstance(key, rsa.RSAPrivateKey) + self.key = key diff --git a/venv/lib/python3.12/site-packages/paramiko/server.py b/venv/lib/python3.12/site-packages/paramiko/server.py new file mode 100644 index 0000000..6923bdf --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/server.py @@ -0,0 +1,732 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +`.ServerInterface` is an interface to override for server support. +""" + +import threading +from paramiko import util +from paramiko.common import ( + DEBUG, + ERROR, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + AUTH_FAILED, + AUTH_SUCCESSFUL, +) + + +class ServerInterface: + """ + This class defines an interface for controlling the behavior of Paramiko + in server mode. + + Methods on this class are called from Paramiko's primary thread, so you + shouldn't do too much work in them. (Certainly nothing that blocks or + sleeps.) + """ + + def check_channel_request(self, kind, chanid): + """ + Determine if a channel request of a given type will be granted, and + return ``OPEN_SUCCEEDED`` or an error code. This method is + called in server mode when the client requests a channel, after + authentication is complete. + + If you allow channel requests (and an ssh server that didn't would be + useless), you should also override some of the channel request methods + below, which are used to determine which services will be allowed on + a given channel: + + - `check_channel_pty_request` + - `check_channel_shell_request` + - `check_channel_subsystem_request` + - `check_channel_window_change_request` + - `check_channel_x11_request` + - `check_channel_forward_agent_request` + + The ``chanid`` parameter is a small number that uniquely identifies the + channel within a `.Transport`. A `.Channel` object is not created + unless this method returns ``OPEN_SUCCEEDED`` -- once a + `.Channel` object is created, you can call `.Channel.get_id` to + retrieve the channel ID. + + The return value should either be ``OPEN_SUCCEEDED`` (or + ``0``) to allow the channel request, or one of the following error + codes to reject it: + + - ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED`` + - ``OPEN_FAILED_CONNECT_FAILED`` + - ``OPEN_FAILED_UNKNOWN_CHANNEL_TYPE`` + - ``OPEN_FAILED_RESOURCE_SHORTAGE`` + + The default implementation always returns + ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``. + + :param str kind: + the kind of channel the client would like to open (usually + ``"session"``). + :param int chanid: ID of the channel + :return: an `int` success or failure code (listed above) + """ + return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED + + def get_allowed_auths(self, username): + """ + Return a list of authentication methods supported by the server. + This list is sent to clients attempting to authenticate, to inform them + of authentication methods that might be successful. + + The "list" is actually a string of comma-separated names of types of + authentication. Possible values are ``"password"``, ``"publickey"``, + and ``"none"``. + + The default implementation always returns ``"password"``. + + :param str username: the username requesting authentication. + :return: a comma-separated `str` of authentication types + """ + return "password" + + def check_auth_none(self, username): + """ + Determine if a client may open channels with no (further) + authentication. + + Return ``AUTH_FAILED`` if the client must authenticate, or + ``AUTH_SUCCESSFUL`` if it's okay for the client to not + authenticate. + + The default implementation always returns ``AUTH_FAILED``. + + :param str username: the username of the client. + :return: + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if + it succeeds. + :rtype: int + """ + return AUTH_FAILED + + def check_auth_password(self, username, password): + """ + Determine if a given username and password supplied by the client is + acceptable for use in authentication. + + Return ``AUTH_FAILED`` if the password is not accepted, + ``AUTH_SUCCESSFUL`` if the password is accepted and completes + the authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your + authentication is stateful, and this key is accepted for + authentication, but more authentication is required. (In this latter + case, `get_allowed_auths` will be called to report to the client what + options it has for continuing the authentication.) + + The default implementation always returns ``AUTH_FAILED``. + + :param str username: the username of the authenticating client. + :param str password: the password given by the client. + :return: + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if + it succeeds; ``AUTH_PARTIALLY_SUCCESSFUL`` if the password auth is + successful, but authentication must continue. + :rtype: int + """ + return AUTH_FAILED + + def check_auth_publickey(self, username, key): + """ + Determine if a given key supplied by the client is acceptable for use + in authentication. You should override this method in server mode to + check the username and key and decide if you would accept a signature + made using this key. + + Return ``AUTH_FAILED`` if the key is not accepted, + ``AUTH_SUCCESSFUL`` if the key is accepted and completes the + authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your + authentication is stateful, and this password is accepted for + authentication, but more authentication is required. (In this latter + case, `get_allowed_auths` will be called to report to the client what + options it has for continuing the authentication.) + + Note that you don't have to actually verify any key signtature here. + If you're willing to accept the key, Paramiko will do the work of + verifying the client's signature. + + The default implementation always returns ``AUTH_FAILED``. + + :param str username: the username of the authenticating client + :param .PKey key: the key object provided by the client + :return: + ``AUTH_FAILED`` if the client can't authenticate with this key; + ``AUTH_SUCCESSFUL`` if it can; ``AUTH_PARTIALLY_SUCCESSFUL`` if it + can authenticate with this key but must continue with + authentication + :rtype: int + """ + return AUTH_FAILED + + def check_auth_interactive(self, username, submethods): + """ + Begin an interactive authentication challenge, if supported. You + should override this method in server mode if you want to support the + ``"keyboard-interactive"`` auth type, which requires you to send a + series of questions for the client to answer. + + Return ``AUTH_FAILED`` if this auth method isn't supported. Otherwise, + you should return an `.InteractiveQuery` object containing the prompts + and instructions for the user. The response will be sent via a call + to `check_auth_interactive_response`. + + The default implementation always returns ``AUTH_FAILED``. + + :param str username: the username of the authenticating client + :param str submethods: + a comma-separated list of methods preferred by the client (usually + empty) + :return: + ``AUTH_FAILED`` if this auth method isn't supported; otherwise an + object containing queries for the user + :rtype: int or `.InteractiveQuery` + """ + return AUTH_FAILED + + def check_auth_interactive_response(self, responses): + """ + Continue or finish an interactive authentication challenge, if + supported. You should override this method in server mode if you want + to support the ``"keyboard-interactive"`` auth type. + + Return ``AUTH_FAILED`` if the responses are not accepted, + ``AUTH_SUCCESSFUL`` if the responses are accepted and complete + the authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your + authentication is stateful, and this set of responses is accepted for + authentication, but more authentication is required. (In this latter + case, `get_allowed_auths` will be called to report to the client what + options it has for continuing the authentication.) + + If you wish to continue interactive authentication with more questions, + you may return an `.InteractiveQuery` object, which should cause the + client to respond with more answers, calling this method again. This + cycle can continue indefinitely. + + The default implementation always returns ``AUTH_FAILED``. + + :param responses: list of `str` responses from the client + :return: + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if + it succeeds; ``AUTH_PARTIALLY_SUCCESSFUL`` if the interactive auth + is successful, but authentication must continue; otherwise an + object containing queries for the user + :rtype: int or `.InteractiveQuery` + """ + return AUTH_FAILED + + def check_auth_gssapi_with_mic( + self, username, gss_authenticated=AUTH_FAILED, cc_file=None + ): + """ + Authenticate the given user to the server if he is a valid krb5 + principal. + + :param str username: The username of the authenticating client + :param int gss_authenticated: The result of the krb5 authentication + :param str cc_filename: The krb5 client credentials cache filename + :return: ``AUTH_FAILED`` if the user is not authenticated otherwise + ``AUTH_SUCCESSFUL`` + :rtype: int + :note: Kerberos credential delegation is not supported. + :see: `.ssh_gss` + :note: : We are just checking in L{AuthHandler} that the given user is + a valid krb5 principal! + We don't check if the krb5 principal is allowed to log in on + the server, because there is no way to do that in python. So + if you develop your own SSH server with paramiko for a certain + platform like Linux, you should call C{krb5_kuserok()} in + your local kerberos library to make sure that the + krb5_principal has an account on the server and is allowed to + log in as a user. + :see: http://www.unix.com/man-page/all/3/krb5_kuserok/ + """ + if gss_authenticated == AUTH_SUCCESSFUL: + return AUTH_SUCCESSFUL + return AUTH_FAILED + + def check_auth_gssapi_keyex( + self, username, gss_authenticated=AUTH_FAILED, cc_file=None + ): + """ + Authenticate the given user to the server if he is a valid krb5 + principal and GSS-API Key Exchange was performed. + If GSS-API Key Exchange was not performed, this authentication method + won't be available. + + :param str username: The username of the authenticating client + :param int gss_authenticated: The result of the krb5 authentication + :param str cc_filename: The krb5 client credentials cache filename + :return: ``AUTH_FAILED`` if the user is not authenticated otherwise + ``AUTH_SUCCESSFUL`` + :rtype: int + :note: Kerberos credential delegation is not supported. + :see: `.ssh_gss` `.kex_gss` + :note: : We are just checking in L{AuthHandler} that the given user is + a valid krb5 principal! + We don't check if the krb5 principal is allowed to log in on + the server, because there is no way to do that in python. So + if you develop your own SSH server with paramiko for a certain + platform like Linux, you should call C{krb5_kuserok()} in + your local kerberos library to make sure that the + krb5_principal has an account on the server and is allowed + to log in as a user. + :see: http://www.unix.com/man-page/all/3/krb5_kuserok/ + """ + if gss_authenticated == AUTH_SUCCESSFUL: + return AUTH_SUCCESSFUL + return AUTH_FAILED + + def enable_auth_gssapi(self): + """ + Overwrite this function in your SSH server to enable GSSAPI + authentication. + The default implementation always returns false. + + :returns bool: Whether GSSAPI authentication is enabled. + :see: `.ssh_gss` + """ + UseGSSAPI = False + return UseGSSAPI + + def check_port_forward_request(self, address, port): + """ + Handle a request for port forwarding. The client is asking that + connections to the given address and port be forwarded back across + this ssh connection. An address of ``"0.0.0.0"`` indicates a global + address (any address associated with this server) and a port of ``0`` + indicates that no specific port is requested (usually the OS will pick + a port). + + The default implementation always returns ``False``, rejecting the + port forwarding request. If the request is accepted, you should return + the port opened for listening. + + :param str address: the requested address + :param int port: the requested port + :return: + the port number (`int`) that was opened for listening, or ``False`` + to reject + """ + return False + + def cancel_port_forward_request(self, address, port): + """ + The client would like to cancel a previous port-forwarding request. + If the given address and port is being forwarded across this ssh + connection, the port should be closed. + + :param str address: the forwarded address + :param int port: the forwarded port + """ + pass + + def check_global_request(self, kind, msg): + """ + Handle a global request of the given ``kind``. This method is called + in server mode and client mode, whenever the remote host makes a global + request. If there are any arguments to the request, they will be in + ``msg``. + + There aren't any useful global requests defined, aside from port + forwarding, so usually this type of request is an extension to the + protocol. + + If the request was successful and you would like to return contextual + data to the remote host, return a tuple. Items in the tuple will be + sent back with the successful result. (Note that the items in the + tuple can only be strings, ints, or bools.) + + The default implementation always returns ``False``, indicating that it + does not support any global requests. + + .. note:: Port forwarding requests are handled separately, in + `check_port_forward_request`. + + :param str kind: the kind of global request being made. + :param .Message msg: any extra arguments to the request. + :return: + ``True`` or a `tuple` of data if the request was granted; ``False`` + otherwise. + """ + return False + + # ...Channel requests... + + def check_channel_pty_request( + self, channel, term, width, height, pixelwidth, pixelheight, modes + ): + """ + Determine if a pseudo-terminal of the given dimensions (usually + requested for shell access) can be provided on the given channel. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the pty request arrived on. + :param str term: type of terminal requested (for example, ``"vt100"``). + :param int width: width of screen in characters. + :param int height: height of screen in characters. + :param int pixelwidth: + width of screen in pixels, if known (may be ``0`` if unknown). + :param int pixelheight: + height of screen in pixels, if known (may be ``0`` if unknown). + :return: + ``True`` if the pseudo-terminal has been allocated; ``False`` + otherwise. + """ + return False + + def check_channel_shell_request(self, channel): + """ + Determine if a shell will be provided to the client on the given + channel. If this method returns ``True``, the channel should be + connected to the stdin/stdout of a shell (or something that acts like + a shell). + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the request arrived on. + :return: + ``True`` if this channel is now hooked up to a shell; ``False`` if + a shell can't or won't be provided. + """ + return False + + def check_channel_exec_request(self, channel, command): + """ + Determine if a shell command will be executed for the client. If this + method returns ``True``, the channel should be connected to the stdin, + stdout, and stderr of the shell command. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the request arrived on. + :param str command: the command to execute. + :return: + ``True`` if this channel is now hooked up to the stdin, stdout, and + stderr of the executing command; ``False`` if the command will not + be executed. + + .. versionadded:: 1.1 + """ + return False + + def check_channel_subsystem_request(self, channel, name): + """ + Determine if a requested subsystem will be provided to the client on + the given channel. If this method returns ``True``, all future I/O + through this channel will be assumed to be connected to the requested + subsystem. An example of a subsystem is ``sftp``. + + The default implementation checks for a subsystem handler assigned via + `.Transport.set_subsystem_handler`. + If one has been set, the handler is invoked and this method returns + ``True``. Otherwise it returns ``False``. + + .. note:: Because the default implementation uses the `.Transport` to + identify valid subsystems, you probably won't need to override this + method. + + :param .Channel channel: the `.Channel` the pty request arrived on. + :param str name: name of the requested subsystem. + :return: + ``True`` if this channel is now hooked up to the requested + subsystem; ``False`` if that subsystem can't or won't be provided. + """ + transport = channel.get_transport() + handler_class, args, kwargs = transport._get_subsystem_handler(name) + if handler_class is None: + return False + handler = handler_class(channel, name, self, *args, **kwargs) + handler.start() + return True + + def check_channel_window_change_request( + self, channel, width, height, pixelwidth, pixelheight + ): + """ + Determine if the pseudo-terminal on the given channel can be resized. + This only makes sense if a pty was previously allocated on it. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the pty request arrived on. + :param int width: width of screen in characters. + :param int height: height of screen in characters. + :param int pixelwidth: + width of screen in pixels, if known (may be ``0`` if unknown). + :param int pixelheight: + height of screen in pixels, if known (may be ``0`` if unknown). + :return: ``True`` if the terminal was resized; ``False`` if not. + """ + return False + + def check_channel_x11_request( + self, + channel, + single_connection, + auth_protocol, + auth_cookie, + screen_number, + ): + """ + Determine if the client will be provided with an X11 session. If this + method returns ``True``, X11 applications should be routed through new + SSH channels, using `.Transport.open_x11_channel`. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the X11 request arrived on + :param bool single_connection: + ``True`` if only a single X11 channel should be opened, else + ``False``. + :param str auth_protocol: the protocol used for X11 authentication + :param str auth_cookie: the cookie used to authenticate to X11 + :param int screen_number: the number of the X11 screen to connect to + :return: ``True`` if the X11 session was opened; ``False`` if not + """ + return False + + def check_channel_forward_agent_request(self, channel): + """ + Determine if the client will be provided with an forward agent session. + If this method returns ``True``, the server will allow SSH Agent + forwarding. + + The default implementation always returns ``False``. + + :param .Channel channel: the `.Channel` the request arrived on + :return: ``True`` if the AgentForward was loaded; ``False`` if not + + If ``True`` is returned, the server should create an + :class:`AgentServerProxy` to access the agent. + """ + return False + + def check_channel_direct_tcpip_request(self, chanid, origin, destination): + """ + Determine if a local port forwarding channel will be granted, and + return ``OPEN_SUCCEEDED`` or an error code. This method is + called in server mode when the client requests a channel, after + authentication is complete. + + The ``chanid`` parameter is a small number that uniquely identifies the + channel within a `.Transport`. A `.Channel` object is not created + unless this method returns ``OPEN_SUCCEEDED`` -- once a + `.Channel` object is created, you can call `.Channel.get_id` to + retrieve the channel ID. + + The origin and destination parameters are (ip_address, port) tuples + that correspond to both ends of the TCP connection in the forwarding + tunnel. + + The return value should either be ``OPEN_SUCCEEDED`` (or + ``0``) to allow the channel request, or one of the following error + codes to reject it: + + - ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED`` + - ``OPEN_FAILED_CONNECT_FAILED`` + - ``OPEN_FAILED_UNKNOWN_CHANNEL_TYPE`` + - ``OPEN_FAILED_RESOURCE_SHORTAGE`` + + The default implementation always returns + ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``. + + :param int chanid: ID of the channel + :param tuple origin: + 2-tuple containing the IP address and port of the originator + (client side) + :param tuple destination: + 2-tuple containing the IP address and port of the destination + (server side) + :return: an `int` success or failure code (listed above) + """ + return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED + + def check_channel_env_request(self, channel, name, value): + """ + Check whether a given environment variable can be specified for the + given channel. This method should return ``True`` if the server + is willing to set the specified environment variable. Note that + some environment variables (e.g., PATH) can be exceedingly + dangerous, so blindly allowing the client to set the environment + is almost certainly not a good idea. + + The default implementation always returns ``False``. + + :param channel: the `.Channel` the env request arrived on + :param str name: name + :param str value: Channel value + :returns: A boolean + """ + return False + + def get_banner(self): + """ + A pre-login banner to display to the user. The message may span + multiple lines separated by crlf pairs. The language should be in + rfc3066 style, for example: en-US + + The default implementation always returns ``(None, None)``. + + :returns: A tuple containing the banner and language code. + + .. versionadded:: 2.3 + """ + return (None, None) + + +class InteractiveQuery: + """ + A query (set of prompts) for a user during interactive authentication. + """ + + def __init__(self, name="", instructions="", *prompts): + """ + Create a new interactive query to send to the client. The name and + instructions are optional, but are generally displayed to the end + user. A list of prompts may be included, or they may be added via + the `add_prompt` method. + + :param str name: name of this query + :param str instructions: + user instructions (usually short) about this query + :param str prompts: one or more authentication prompts + """ + self.name = name + self.instructions = instructions + self.prompts = [] + for x in prompts: + if isinstance(x, str): + self.add_prompt(x) + else: + self.add_prompt(x[0], x[1]) + + def add_prompt(self, prompt, echo=True): + """ + Add a prompt to this query. The prompt should be a (reasonably short) + string. Multiple prompts can be added to the same query. + + :param str prompt: the user prompt + :param bool echo: + ``True`` (default) if the user's response should be echoed; + ``False`` if not (for a password or similar) + """ + self.prompts.append((prompt, echo)) + + +class SubsystemHandler(threading.Thread): + """ + Handler for a subsystem in server mode. If you create a subclass of this + class and pass it to `.Transport.set_subsystem_handler`, an object of this + class will be created for each request for this subsystem. Each new object + will be executed within its own new thread by calling `start_subsystem`. + When that method completes, the channel is closed. + + For example, if you made a subclass ``MP3Handler`` and registered it as the + handler for subsystem ``"mp3"``, then whenever a client has successfully + authenticated and requests subsystem ``"mp3"``, an object of class + ``MP3Handler`` will be created, and `start_subsystem` will be called on + it from a new thread. + """ + + def __init__(self, channel, name, server): + """ + Create a new handler for a channel. This is used by `.ServerInterface` + to start up a new handler when a channel requests this subsystem. You + don't need to override this method, but if you do, be sure to pass the + ``channel`` and ``name`` parameters through to the original + ``__init__`` method here. + + :param .Channel channel: the channel associated with this + subsystem request. + :param str name: name of the requested subsystem. + :param .ServerInterface server: + the server object for the session that started this subsystem + """ + threading.Thread.__init__(self, target=self._run) + self.__channel = channel + self.__transport = channel.get_transport() + self.__name = name + self.__server = server + + def get_server(self): + """ + Return the `.ServerInterface` object associated with this channel and + subsystem. + """ + return self.__server + + def _run(self): + try: + self.__transport._log( + DEBUG, "Starting handler for subsystem {}".format(self.__name) + ) + self.start_subsystem(self.__name, self.__transport, self.__channel) + except Exception as e: + self.__transport._log( + ERROR, + 'Exception in subsystem handler for "{}": {}'.format( + self.__name, e + ), + ) + self.__transport._log(ERROR, util.tb_strings()) + try: + self.finish_subsystem() + except: + pass + + def start_subsystem(self, name, transport, channel): + """ + Process an ssh subsystem in server mode. This method is called on a + new object (and in a new thread) for each subsystem request. It is + assumed that all subsystem logic will take place here, and when the + subsystem is finished, this method will return. After this method + returns, the channel is closed. + + The combination of ``transport`` and ``channel`` are unique; this + handler corresponds to exactly one `.Channel` on one `.Transport`. + + .. note:: + It is the responsibility of this method to exit if the underlying + `.Transport` is closed. This can be done by checking + `.Transport.is_active` or noticing an EOF on the `.Channel`. If + this method loops forever without checking for this case, your + Python interpreter may refuse to exit because this thread will + still be running. + + :param str name: name of the requested subsystem. + :param .Transport transport: the server-mode `.Transport`. + :param .Channel channel: the channel associated with this subsystem + request. + """ + pass + + def finish_subsystem(self): + """ + Perform any cleanup at the end of a subsystem. The default + implementation just closes the channel. + + .. versionadded:: 1.1 + """ + self.__channel.close() diff --git a/venv/lib/python3.12/site-packages/paramiko/sftp.py b/venv/lib/python3.12/site-packages/paramiko/sftp.py new file mode 100644 index 0000000..b3528d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/sftp.py @@ -0,0 +1,224 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import select +import socket +import struct + +from paramiko import util +from paramiko.common import DEBUG, byte_chr, byte_ord +from paramiko.message import Message + + +( + CMD_INIT, + CMD_VERSION, + CMD_OPEN, + CMD_CLOSE, + CMD_READ, + CMD_WRITE, + CMD_LSTAT, + CMD_FSTAT, + CMD_SETSTAT, + CMD_FSETSTAT, + CMD_OPENDIR, + CMD_READDIR, + CMD_REMOVE, + CMD_MKDIR, + CMD_RMDIR, + CMD_REALPATH, + CMD_STAT, + CMD_RENAME, + CMD_READLINK, + CMD_SYMLINK, +) = range(1, 21) +(CMD_STATUS, CMD_HANDLE, CMD_DATA, CMD_NAME, CMD_ATTRS) = range(101, 106) +(CMD_EXTENDED, CMD_EXTENDED_REPLY) = range(200, 202) + +SFTP_OK = 0 +( + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, + SFTP_FAILURE, + SFTP_BAD_MESSAGE, + SFTP_NO_CONNECTION, + SFTP_CONNECTION_LOST, + SFTP_OP_UNSUPPORTED, +) = range(1, 9) + +SFTP_DESC = [ + "Success", + "End of file", + "No such file", + "Permission denied", + "Failure", + "Bad message", + "No connection", + "Connection lost", + "Operation unsupported", +] + +SFTP_FLAG_READ = 0x1 +SFTP_FLAG_WRITE = 0x2 +SFTP_FLAG_APPEND = 0x4 +SFTP_FLAG_CREATE = 0x8 +SFTP_FLAG_TRUNC = 0x10 +SFTP_FLAG_EXCL = 0x20 + +_VERSION = 3 + + +# for debugging +CMD_NAMES = { + CMD_INIT: "init", + CMD_VERSION: "version", + CMD_OPEN: "open", + CMD_CLOSE: "close", + CMD_READ: "read", + CMD_WRITE: "write", + CMD_LSTAT: "lstat", + CMD_FSTAT: "fstat", + CMD_SETSTAT: "setstat", + CMD_FSETSTAT: "fsetstat", + CMD_OPENDIR: "opendir", + CMD_READDIR: "readdir", + CMD_REMOVE: "remove", + CMD_MKDIR: "mkdir", + CMD_RMDIR: "rmdir", + CMD_REALPATH: "realpath", + CMD_STAT: "stat", + CMD_RENAME: "rename", + CMD_READLINK: "readlink", + CMD_SYMLINK: "symlink", + CMD_STATUS: "status", + CMD_HANDLE: "handle", + CMD_DATA: "data", + CMD_NAME: "name", + CMD_ATTRS: "attrs", + CMD_EXTENDED: "extended", + CMD_EXTENDED_REPLY: "extended_reply", +} + + +# TODO: rewrite SFTP file/server modules' overly-flexible "make a request with +# xyz components" so we don't need this very silly method of signaling whether +# a given Python integer should be 32- or 64-bit. +# NOTE: this only became an issue when dropping Python 2 support; prior to +# doing so, we had to support actual-longs, which served as that signal. This +# is simply recreating that structure in a more tightly scoped fashion. +class int64(int): + pass + + +class SFTPError(Exception): + pass + + +class BaseSFTP: + def __init__(self): + self.logger = util.get_logger("paramiko.sftp") + self.sock = None + self.ultra_debug = False + + # ...internals... + + def _send_version(self): + m = Message() + m.add_int(_VERSION) + self._send_packet(CMD_INIT, m) + t, data = self._read_packet() + if t != CMD_VERSION: + raise SFTPError("Incompatible sftp protocol") + version = struct.unpack(">I", data[:4])[0] + # if version != _VERSION: + # raise SFTPError('Incompatible sftp protocol') + return version + + def _send_server_version(self): + # winscp will freak out if the server sends version info before the + # client finishes sending INIT. + t, data = self._read_packet() + if t != CMD_INIT: + raise SFTPError("Incompatible sftp protocol") + version = struct.unpack(">I", data[:4])[0] + # advertise that we support "check-file" + extension_pairs = ["check-file", "md5,sha1"] + msg = Message() + msg.add_int(_VERSION) + msg.add(*extension_pairs) + self._send_packet(CMD_VERSION, msg) + return version + + def _log(self, level, msg, *args): + self.logger.log(level, msg, *args) + + def _write_all(self, out): + while len(out) > 0: + n = self.sock.send(out) + if n <= 0: + raise EOFError() + if n == len(out): + return + out = out[n:] + return + + def _read_all(self, n): + out = bytes() + while n > 0: + if isinstance(self.sock, socket.socket): + # sometimes sftp is used directly over a socket instead of + # through a paramiko channel. in this case, check periodically + # if the socket is closed. (for some reason, recv() won't ever + # return or raise an exception, but calling select on a closed + # socket will.) + while True: + read, write, err = select.select([self.sock], [], [], 0.1) + if len(read) > 0: + x = self.sock.recv(n) + break + else: + x = self.sock.recv(n) + + if len(x) == 0: + raise EOFError() + out += x + n -= len(x) + return out + + def _send_packet(self, t, packet): + packet = packet.asbytes() + out = struct.pack(">I", len(packet) + 1) + byte_chr(t) + packet + if self.ultra_debug: + self._log(DEBUG, util.format_binary(out, "OUT: ")) + self._write_all(out) + + def _read_packet(self): + x = self._read_all(4) + # most sftp servers won't accept packets larger than about 32k, so + # anything with the high byte set (> 16MB) is just garbage. + if byte_ord(x[0]): + raise SFTPError("Garbage packet received") + size = struct.unpack(">I", x)[0] + data = self._read_all(size) + if self.ultra_debug: + self._log(DEBUG, util.format_binary(data, "IN: ")) + if size > 0: + t = byte_ord(data[0]) + return t, data[1:] + return 0, bytes() diff --git a/venv/lib/python3.12/site-packages/paramiko/sftp_attr.py b/venv/lib/python3.12/site-packages/paramiko/sftp_attr.py new file mode 100644 index 0000000..18ffbf8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/sftp_attr.py @@ -0,0 +1,239 @@ +# Copyright (C) 2003-2006 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import stat +import time +from paramiko.common import x80000000, o700, o70, xffffffff + + +class SFTPAttributes: + """ + Representation of the attributes of a file (or proxied file) for SFTP in + client or server mode. It attempts to mirror the object returned by + `os.stat` as closely as possible, so it may have the following fields, + with the same meanings as those returned by an `os.stat` object: + + - ``st_size`` + - ``st_uid`` + - ``st_gid`` + - ``st_mode`` + - ``st_atime`` + - ``st_mtime`` + + Because SFTP allows flags to have other arbitrary named attributes, these + are stored in a dict named ``attr``. Occasionally, the filename is also + stored, in ``filename``. + """ + + FLAG_SIZE = 1 + FLAG_UIDGID = 2 + FLAG_PERMISSIONS = 4 + FLAG_AMTIME = 8 + FLAG_EXTENDED = x80000000 + + def __init__(self): + """ + Create a new (empty) SFTPAttributes object. All fields will be empty. + """ + self._flags = 0 + self.st_size = None + self.st_uid = None + self.st_gid = None + self.st_mode = None + self.st_atime = None + self.st_mtime = None + self.attr = {} + + @classmethod + def from_stat(cls, obj, filename=None): + """ + Create an `.SFTPAttributes` object from an existing ``stat`` object (an + object returned by `os.stat`). + + :param object obj: an object returned by `os.stat` (or equivalent). + :param str filename: the filename associated with this file. + :return: new `.SFTPAttributes` object with the same attribute fields. + """ + attr = cls() + attr.st_size = obj.st_size + attr.st_uid = obj.st_uid + attr.st_gid = obj.st_gid + attr.st_mode = obj.st_mode + attr.st_atime = obj.st_atime + attr.st_mtime = obj.st_mtime + if filename is not None: + attr.filename = filename + return attr + + def __repr__(self): + return "".format(self._debug_str()) + + # ...internals... + @classmethod + def _from_msg(cls, msg, filename=None, longname=None): + attr = cls() + attr._unpack(msg) + if filename is not None: + attr.filename = filename + if longname is not None: + attr.longname = longname + return attr + + def _unpack(self, msg): + self._flags = msg.get_int() + if self._flags & self.FLAG_SIZE: + self.st_size = msg.get_int64() + if self._flags & self.FLAG_UIDGID: + self.st_uid = msg.get_int() + self.st_gid = msg.get_int() + if self._flags & self.FLAG_PERMISSIONS: + self.st_mode = msg.get_int() + if self._flags & self.FLAG_AMTIME: + self.st_atime = msg.get_int() + self.st_mtime = msg.get_int() + if self._flags & self.FLAG_EXTENDED: + count = msg.get_int() + for i in range(count): + self.attr[msg.get_string()] = msg.get_string() + + def _pack(self, msg): + self._flags = 0 + if self.st_size is not None: + self._flags |= self.FLAG_SIZE + if (self.st_uid is not None) and (self.st_gid is not None): + self._flags |= self.FLAG_UIDGID + if self.st_mode is not None: + self._flags |= self.FLAG_PERMISSIONS + if (self.st_atime is not None) and (self.st_mtime is not None): + self._flags |= self.FLAG_AMTIME + if len(self.attr) > 0: + self._flags |= self.FLAG_EXTENDED + msg.add_int(self._flags) + if self._flags & self.FLAG_SIZE: + msg.add_int64(self.st_size) + if self._flags & self.FLAG_UIDGID: + msg.add_int(self.st_uid) + msg.add_int(self.st_gid) + if self._flags & self.FLAG_PERMISSIONS: + msg.add_int(self.st_mode) + if self._flags & self.FLAG_AMTIME: + # throw away any fractional seconds + msg.add_int(int(self.st_atime)) + msg.add_int(int(self.st_mtime)) + if self._flags & self.FLAG_EXTENDED: + msg.add_int(len(self.attr)) + for key, val in self.attr.items(): + msg.add_string(key) + msg.add_string(val) + return + + def _debug_str(self): + out = "[ " + if self.st_size is not None: + out += "size={} ".format(self.st_size) + if (self.st_uid is not None) and (self.st_gid is not None): + out += "uid={} gid={} ".format(self.st_uid, self.st_gid) + if self.st_mode is not None: + out += "mode=" + oct(self.st_mode) + " " + if (self.st_atime is not None) and (self.st_mtime is not None): + out += "atime={} mtime={} ".format(self.st_atime, self.st_mtime) + for k, v in self.attr.items(): + out += '"{}"={!r} '.format(str(k), v) + out += "]" + return out + + @staticmethod + def _rwx(n, suid, sticky=False): + if suid: + suid = 2 + out = "-r"[n >> 2] + "-w"[(n >> 1) & 1] + if sticky: + out += "-xTt"[suid + (n & 1)] + else: + out += "-xSs"[suid + (n & 1)] + return out + + def __str__(self): + """create a unix-style long description of the file (like ls -l)""" + if self.st_mode is not None: + kind = stat.S_IFMT(self.st_mode) + if kind == stat.S_IFIFO: + ks = "p" + elif kind == stat.S_IFCHR: + ks = "c" + elif kind == stat.S_IFDIR: + ks = "d" + elif kind == stat.S_IFBLK: + ks = "b" + elif kind == stat.S_IFREG: + ks = "-" + elif kind == stat.S_IFLNK: + ks = "l" + elif kind == stat.S_IFSOCK: + ks = "s" + else: + ks = "?" + ks += self._rwx( + (self.st_mode & o700) >> 6, self.st_mode & stat.S_ISUID + ) + ks += self._rwx( + (self.st_mode & o70) >> 3, self.st_mode & stat.S_ISGID + ) + ks += self._rwx( + self.st_mode & 7, self.st_mode & stat.S_ISVTX, True + ) + else: + ks = "?---------" + # compute display date + if (self.st_mtime is None) or (self.st_mtime == xffffffff): + # shouldn't really happen + datestr = "(unknown date)" + else: + time_tuple = time.localtime(self.st_mtime) + if abs(time.time() - self.st_mtime) > 15_552_000: + # (15,552,000s = 6 months) + datestr = time.strftime("%d %b %Y", time_tuple) + else: + datestr = time.strftime("%d %b %H:%M", time_tuple) + filename = getattr(self, "filename", "?") + + # not all servers support uid/gid + uid = self.st_uid + gid = self.st_gid + size = self.st_size + if uid is None: + uid = 0 + if gid is None: + gid = 0 + if size is None: + size = 0 + + # TODO: not sure this actually worked as expected beforehand, leaving + # it untouched for the time being, re: .format() upgrade, until someone + # has time to doublecheck + return "%s 1 %-8d %-8d %8d %-12s %s" % ( + ks, + uid, + gid, + size, + datestr, + filename, + ) + + def asbytes(self): + return str(self).encode() diff --git a/venv/lib/python3.12/site-packages/paramiko/sftp_client.py b/venv/lib/python3.12/site-packages/paramiko/sftp_client.py new file mode 100644 index 0000000..066cd83 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/sftp_client.py @@ -0,0 +1,965 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of Paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +from binascii import hexlify +import errno +import os +import stat +import threading +import time +import weakref +from paramiko import util +from paramiko.channel import Channel +from paramiko.message import Message +from paramiko.common import INFO, DEBUG, o777 +from paramiko.sftp import ( + BaseSFTP, + CMD_OPENDIR, + CMD_HANDLE, + SFTPError, + CMD_READDIR, + CMD_NAME, + CMD_CLOSE, + SFTP_FLAG_READ, + SFTP_FLAG_WRITE, + SFTP_FLAG_CREATE, + SFTP_FLAG_TRUNC, + SFTP_FLAG_APPEND, + SFTP_FLAG_EXCL, + CMD_OPEN, + CMD_REMOVE, + CMD_RENAME, + CMD_MKDIR, + CMD_RMDIR, + CMD_STAT, + CMD_ATTRS, + CMD_LSTAT, + CMD_SYMLINK, + CMD_SETSTAT, + CMD_READLINK, + CMD_REALPATH, + CMD_STATUS, + CMD_EXTENDED, + SFTP_OK, + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, + int64, +) + +from paramiko.sftp_attr import SFTPAttributes +from paramiko.ssh_exception import SSHException +from paramiko.sftp_file import SFTPFile +from paramiko.util import ClosingContextManager, b, u + + +def _to_unicode(s): + """ + decode a string as ascii or utf8 if possible (as required by the sftp + protocol). if neither works, just return a byte string because the server + probably doesn't know the filename's encoding. + """ + try: + return s.encode("ascii") + except (UnicodeError, AttributeError): + try: + return s.decode("utf-8") + except UnicodeError: + return s + + +b_slash = b"/" + + +class SFTPClient(BaseSFTP, ClosingContextManager): + """ + SFTP client object. + + Used to open an SFTP session across an open SSH `.Transport` and perform + remote file operations. + + Instances of this class may be used as context managers. + """ + + def __init__(self, sock): + """ + Create an SFTP client from an existing `.Channel`. The channel + should already have requested the ``"sftp"`` subsystem. + + An alternate way to create an SFTP client context is by using + `from_transport`. + + :param .Channel sock: an open `.Channel` using the ``"sftp"`` subsystem + + :raises: + `.SSHException` -- if there's an exception while negotiating sftp + """ + BaseSFTP.__init__(self) + self.sock = sock + self.ultra_debug = False + self.request_number = 1 + # lock for request_number + self._lock = threading.Lock() + self._cwd = None + # request # -> SFTPFile + self._expecting = weakref.WeakValueDictionary() + if type(sock) is Channel: + # override default logger + transport = self.sock.get_transport() + self.logger = util.get_logger( + transport.get_log_channel() + ".sftp" + ) + self.ultra_debug = transport.get_hexdump() + try: + server_version = self._send_version() + except EOFError: + raise SSHException("EOF during negotiation") + self._log( + INFO, + "Opened sftp connection (server version {})".format( + server_version + ), + ) + + @classmethod + def from_transport(cls, t, window_size=None, max_packet_size=None): + """ + Create an SFTP client channel from an open `.Transport`. + + Setting the window and packet sizes might affect the transfer speed. + The default settings in the `.Transport` class are the same as in + OpenSSH and should work adequately for both files transfers and + interactive sessions. + + :param .Transport t: an open `.Transport` which is already + authenticated + :param int window_size: + optional window size for the `.SFTPClient` session. + :param int max_packet_size: + optional max packet size for the `.SFTPClient` session.. + + :return: + a new `.SFTPClient` object, referring to an sftp session (channel) + across the transport + + .. versionchanged:: 1.15 + Added the ``window_size`` and ``max_packet_size`` arguments. + """ + chan = t.open_session( + window_size=window_size, max_packet_size=max_packet_size + ) + if chan is None: + return None + chan.invoke_subsystem("sftp") + return cls(chan) + + def _log(self, level, msg, *args): + if isinstance(msg, list): + for m in msg: + self._log(level, m, *args) + else: + # NOTE: these bits MUST continue using %-style format junk because + # logging.Logger.log() explicitly requires it. Grump. + # escape '%' in msg (they could come from file or directory names) + # before logging + msg = msg.replace("%", "%%") + super()._log( + level, + "[chan %s] " + msg, + *([self.sock.get_name()] + list(args)) + ) + + def close(self): + """ + Close the SFTP session and its underlying channel. + + .. versionadded:: 1.4 + """ + self._log(INFO, "sftp session closed.") + self.sock.close() + + def get_channel(self): + """ + Return the underlying `.Channel` object for this SFTP session. This + might be useful for doing things like setting a timeout on the channel. + + .. versionadded:: 1.7.1 + """ + return self.sock + + def listdir(self, path="."): + """ + Return a list containing the names of the entries in the given + ``path``. + + The list is in arbitrary order. It does not include the special + entries ``'.'`` and ``'..'`` even if they are present in the folder. + This method is meant to mirror ``os.listdir`` as closely as possible. + For a list of full `.SFTPAttributes` objects, see `listdir_attr`. + + :param str path: path to list (defaults to ``'.'``) + """ + return [f.filename for f in self.listdir_attr(path)] + + def listdir_attr(self, path="."): + """ + Return a list containing `.SFTPAttributes` objects corresponding to + files in the given ``path``. The list is in arbitrary order. It does + not include the special entries ``'.'`` and ``'..'`` even if they are + present in the folder. + + The returned `.SFTPAttributes` objects will each have an additional + field: ``longname``, which may contain a formatted string of the file's + attributes, in unix format. The content of this string will probably + depend on the SFTP server implementation. + + :param str path: path to list (defaults to ``'.'``) + :return: list of `.SFTPAttributes` objects + + .. versionadded:: 1.2 + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "listdir({!r})".format(path)) + t, msg = self._request(CMD_OPENDIR, path) + if t != CMD_HANDLE: + raise SFTPError("Expected handle") + handle = msg.get_binary() + filelist = [] + while True: + try: + t, msg = self._request(CMD_READDIR, handle) + except EOFError: + # done with handle + break + if t != CMD_NAME: + raise SFTPError("Expected name response") + count = msg.get_int() + for i in range(count): + filename = msg.get_text() + longname = msg.get_text() + attr = SFTPAttributes._from_msg(msg, filename, longname) + if (filename != ".") and (filename != ".."): + filelist.append(attr) + self._request(CMD_CLOSE, handle) + return filelist + + def listdir_iter(self, path=".", read_aheads=50): + """ + Generator version of `.listdir_attr`. + + See the API docs for `.listdir_attr` for overall details. + + This function adds one more kwarg on top of `.listdir_attr`: + ``read_aheads``, an integer controlling how many + ``SSH_FXP_READDIR`` requests are made to the server. The default of 50 + should suffice for most file listings as each request/response cycle + may contain multiple files (dependent on server implementation.) + + .. versionadded:: 1.15 + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "listdir({!r})".format(path)) + t, msg = self._request(CMD_OPENDIR, path) + + if t != CMD_HANDLE: + raise SFTPError("Expected handle") + + handle = msg.get_string() + + nums = list() + while True: + try: + # Send out a bunch of readdir requests so that we can read the + # responses later on Section 6.7 of the SSH file transfer RFC + # explains this + # http://filezilla-project.org/specs/draft-ietf-secsh-filexfer-02.txt + for i in range(read_aheads): + num = self._async_request(type(None), CMD_READDIR, handle) + nums.append(num) + + # For each of our sent requests + # Read and parse the corresponding packets + # If we're at the end of our queued requests, then fire off + # some more requests + # Exit the loop when we've reached the end of the directory + # handle + for num in nums: + t, pkt_data = self._read_packet() + msg = Message(pkt_data) + new_num = msg.get_int() + if num == new_num: + if t == CMD_STATUS: + self._convert_status(msg) + count = msg.get_int() + for i in range(count): + filename = msg.get_text() + longname = msg.get_text() + attr = SFTPAttributes._from_msg( + msg, filename, longname + ) + if (filename != ".") and (filename != ".."): + yield attr + + # If we've hit the end of our queued requests, reset nums. + nums = list() + + except EOFError: + self._request(CMD_CLOSE, handle) + return + + def open(self, filename, mode="r", bufsize=-1): + """ + Open a file on the remote server. The arguments are the same as for + Python's built-in `python:file` (aka `python:open`). A file-like + object is returned, which closely mimics the behavior of a normal + Python file object, including the ability to be used as a context + manager. + + The mode indicates how the file is to be opened: ``'r'`` for reading, + ``'w'`` for writing (truncating an existing file), ``'a'`` for + appending, ``'r+'`` for reading/writing, ``'w+'`` for reading/writing + (truncating an existing file), ``'a+'`` for reading/appending. The + Python ``'b'`` flag is ignored, since SSH treats all files as binary. + The ``'U'`` flag is supported in a compatible way. + + Since 1.5.2, an ``'x'`` flag indicates that the operation should only + succeed if the file was created and did not previously exist. This has + no direct mapping to Python's file flags, but is commonly known as the + ``O_EXCL`` flag in posix. + + The file will be buffered in standard Python style by default, but + can be altered with the ``bufsize`` parameter. ``<=0`` turns off + buffering, ``1`` uses line buffering, and any number greater than 1 + (``>1``) uses that specific buffer size. + + :param str filename: name of the file to open + :param str mode: mode (Python-style) to open in + :param int bufsize: desired buffering (default: ``-1``) + :return: an `.SFTPFile` object representing the open file + + :raises: ``IOError`` -- if the file could not be opened. + """ + filename = self._adjust_cwd(filename) + self._log(DEBUG, "open({!r}, {!r})".format(filename, mode)) + imode = 0 + if ("r" in mode) or ("+" in mode): + imode |= SFTP_FLAG_READ + if ("w" in mode) or ("+" in mode) or ("a" in mode): + imode |= SFTP_FLAG_WRITE + if "w" in mode: + imode |= SFTP_FLAG_CREATE | SFTP_FLAG_TRUNC + if "a" in mode: + imode |= SFTP_FLAG_CREATE | SFTP_FLAG_APPEND + if "x" in mode: + imode |= SFTP_FLAG_CREATE | SFTP_FLAG_EXCL + attrblock = SFTPAttributes() + t, msg = self._request(CMD_OPEN, filename, imode, attrblock) + if t != CMD_HANDLE: + raise SFTPError("Expected handle") + handle = msg.get_binary() + self._log( + DEBUG, + "open({!r}, {!r}) -> {}".format( + filename, mode, u(hexlify(handle)) + ), + ) + return SFTPFile(self, handle, mode, bufsize) + + # Python continues to vacillate about "open" vs "file"... + file = open + + def remove(self, path): + """ + Remove the file at the given path. This only works on files; for + removing folders (directories), use `rmdir`. + + :param str path: path (absolute or relative) of the file to remove + + :raises: ``IOError`` -- if the path refers to a folder (directory) + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "remove({!r})".format(path)) + self._request(CMD_REMOVE, path) + + unlink = remove + + def rename(self, oldpath, newpath): + """ + Rename a file or folder from ``oldpath`` to ``newpath``. + + .. note:: + This method implements 'standard' SFTP ``RENAME`` behavior; those + seeking the OpenSSH "POSIX rename" extension behavior should use + `posix_rename`. + + :param str oldpath: + existing name of the file or folder + :param str newpath: + new name for the file or folder, must not exist already + + :raises: + ``IOError`` -- if ``newpath`` is a folder, or something else goes + wrong + """ + oldpath = self._adjust_cwd(oldpath) + newpath = self._adjust_cwd(newpath) + self._log(DEBUG, "rename({!r}, {!r})".format(oldpath, newpath)) + self._request(CMD_RENAME, oldpath, newpath) + + def posix_rename(self, oldpath, newpath): + """ + Rename a file or folder from ``oldpath`` to ``newpath``, following + posix conventions. + + :param str oldpath: existing name of the file or folder + :param str newpath: new name for the file or folder, will be + overwritten if it already exists + + :raises: + ``IOError`` -- if ``newpath`` is a folder, posix-rename is not + supported by the server or something else goes wrong + + :versionadded: 2.2 + """ + oldpath = self._adjust_cwd(oldpath) + newpath = self._adjust_cwd(newpath) + self._log(DEBUG, "posix_rename({!r}, {!r})".format(oldpath, newpath)) + self._request( + CMD_EXTENDED, "posix-rename@openssh.com", oldpath, newpath + ) + + def mkdir(self, path, mode=o777): + """ + Create a folder (directory) named ``path`` with numeric mode ``mode``. + The default mode is 0777 (octal). On some systems, mode is ignored. + Where it is used, the current umask value is first masked out. + + :param str path: name of the folder to create + :param int mode: permissions (posix-style) for the newly-created folder + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "mkdir({!r}, {!r})".format(path, mode)) + attr = SFTPAttributes() + attr.st_mode = mode + self._request(CMD_MKDIR, path, attr) + + def rmdir(self, path): + """ + Remove the folder named ``path``. + + :param str path: name of the folder to remove + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "rmdir({!r})".format(path)) + self._request(CMD_RMDIR, path) + + def stat(self, path): + """ + Retrieve information about a file on the remote system. The return + value is an object whose attributes correspond to the attributes of + Python's ``stat`` structure as returned by ``os.stat``, except that it + contains fewer fields. An SFTP server may return as much or as little + info as it wants, so the results may vary from server to server. + + Unlike a Python `python:stat` object, the result may not be accessed as + a tuple. This is mostly due to the author's slack factor. + + The fields supported are: ``st_mode``, ``st_size``, ``st_uid``, + ``st_gid``, ``st_atime``, and ``st_mtime``. + + :param str path: the filename to stat + :return: + an `.SFTPAttributes` object containing attributes about the given + file + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "stat({!r})".format(path)) + t, msg = self._request(CMD_STAT, path) + if t != CMD_ATTRS: + raise SFTPError("Expected attributes") + return SFTPAttributes._from_msg(msg) + + def lstat(self, path): + """ + Retrieve information about a file on the remote system, without + following symbolic links (shortcuts). This otherwise behaves exactly + the same as `stat`. + + :param str path: the filename to stat + :return: + an `.SFTPAttributes` object containing attributes about the given + file + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "lstat({!r})".format(path)) + t, msg = self._request(CMD_LSTAT, path) + if t != CMD_ATTRS: + raise SFTPError("Expected attributes") + return SFTPAttributes._from_msg(msg) + + def symlink(self, source, dest): + """ + Create a symbolic link to the ``source`` path at ``destination``. + + :param str source: path of the original file + :param str dest: path of the newly created symlink + """ + dest = self._adjust_cwd(dest) + self._log(DEBUG, "symlink({!r}, {!r})".format(source, dest)) + source = b(source) + self._request(CMD_SYMLINK, source, dest) + + def chmod(self, path, mode): + """ + Change the mode (permissions) of a file. The permissions are + unix-style and identical to those used by Python's `os.chmod` + function. + + :param str path: path of the file to change the permissions of + :param int mode: new permissions + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "chmod({!r}, {!r})".format(path, mode)) + attr = SFTPAttributes() + attr.st_mode = mode + self._request(CMD_SETSTAT, path, attr) + + def chown(self, path, uid, gid): + """ + Change the owner (``uid``) and group (``gid``) of a file. As with + Python's `os.chown` function, you must pass both arguments, so if you + only want to change one, use `stat` first to retrieve the current + owner and group. + + :param str path: path of the file to change the owner and group of + :param int uid: new owner's uid + :param int gid: new group id + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "chown({!r}, {!r}, {!r})".format(path, uid, gid)) + attr = SFTPAttributes() + attr.st_uid, attr.st_gid = uid, gid + self._request(CMD_SETSTAT, path, attr) + + def utime(self, path, times): + """ + Set the access and modified times of the file specified by ``path``. + If ``times`` is ``None``, then the file's access and modified times + are set to the current time. Otherwise, ``times`` must be a 2-tuple + of numbers, of the form ``(atime, mtime)``, which is used to set the + access and modified times, respectively. This bizarre API is mimicked + from Python for the sake of consistency -- I apologize. + + :param str path: path of the file to modify + :param tuple times: + ``None`` or a tuple of (access time, modified time) in standard + internet epoch time (seconds since 01 January 1970 GMT) + """ + path = self._adjust_cwd(path) + if times is None: + times = (time.time(), time.time()) + self._log(DEBUG, "utime({!r}, {!r})".format(path, times)) + attr = SFTPAttributes() + attr.st_atime, attr.st_mtime = times + self._request(CMD_SETSTAT, path, attr) + + def truncate(self, path, size): + """ + Change the size of the file specified by ``path``. This usually + extends or shrinks the size of the file, just like the `~file.truncate` + method on Python file objects. + + :param str path: path of the file to modify + :param int size: the new size of the file + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "truncate({!r}, {!r})".format(path, size)) + attr = SFTPAttributes() + attr.st_size = size + self._request(CMD_SETSTAT, path, attr) + + def readlink(self, path): + """ + Return the target of a symbolic link (shortcut). You can use + `symlink` to create these. The result may be either an absolute or + relative pathname. + + :param str path: path of the symbolic link file + :return: target path, as a `str` + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "readlink({!r})".format(path)) + t, msg = self._request(CMD_READLINK, path) + if t != CMD_NAME: + raise SFTPError("Expected name response") + count = msg.get_int() + if count == 0: + return None + if count != 1: + raise SFTPError("Readlink returned {} results".format(count)) + return _to_unicode(msg.get_string()) + + def normalize(self, path): + """ + Return the normalized path (on the server) of a given path. This + can be used to quickly resolve symbolic links or determine what the + server is considering to be the "current folder" (by passing ``'.'`` + as ``path``). + + :param str path: path to be normalized + :return: normalized form of the given path (as a `str`) + + :raises: ``IOError`` -- if the path can't be resolved on the server + """ + path = self._adjust_cwd(path) + self._log(DEBUG, "normalize({!r})".format(path)) + t, msg = self._request(CMD_REALPATH, path) + if t != CMD_NAME: + raise SFTPError("Expected name response") + count = msg.get_int() + if count != 1: + raise SFTPError("Realpath returned {} results".format(count)) + return msg.get_text() + + def chdir(self, path=None): + """ + Change the "current directory" of this SFTP session. Since SFTP + doesn't really have the concept of a current working directory, this is + emulated by Paramiko. Once you use this method to set a working + directory, all operations on this `.SFTPClient` object will be relative + to that path. You can pass in ``None`` to stop using a current working + directory. + + :param str path: new current working directory + + :raises: + ``IOError`` -- if the requested path doesn't exist on the server + + .. versionadded:: 1.4 + """ + if path is None: + self._cwd = None + return + if not stat.S_ISDIR(self.stat(path).st_mode): + code = errno.ENOTDIR + raise SFTPError(code, "{}: {}".format(os.strerror(code), path)) + self._cwd = b(self.normalize(path)) + + def getcwd(self): + """ + Return the "current working directory" for this SFTP session, as + emulated by Paramiko. If no directory has been set with `chdir`, + this method will return ``None``. + + .. versionadded:: 1.4 + """ + # TODO: make class initialize with self._cwd set to self.normalize('.') + return self._cwd and u(self._cwd) + + def _transfer_with_callback(self, reader, writer, file_size, callback): + size = 0 + while True: + data = reader.read(32768) + writer.write(data) + size += len(data) + if len(data) == 0: + break + if callback is not None: + callback(size, file_size) + return size + + def putfo(self, fl, remotepath, file_size=0, callback=None, confirm=True): + """ + Copy the contents of an open file object (``fl``) to the SFTP server as + ``remotepath``. Any exception raised by operations will be passed + through. + + The SFTP operations use pipelining for speed. + + :param fl: opened file or file-like object to copy + :param str remotepath: the destination path on the SFTP server + :param int file_size: + optional size parameter passed to callback. If none is specified, + size defaults to 0 + :param callable callback: + optional callback function (form: ``func(int, int)``) that accepts + the bytes transferred so far and the total bytes to be transferred + (since 1.7.4) + :param bool confirm: + whether to do a stat() on the file afterwards to confirm the file + size (since 1.7.7) + + :return: + an `.SFTPAttributes` object containing attributes about the given + file. + + .. versionadded:: 1.10 + """ + with self.file(remotepath, "wb") as fr: + fr.set_pipelined(True) + size = self._transfer_with_callback( + reader=fl, writer=fr, file_size=file_size, callback=callback + ) + if confirm: + s = self.stat(remotepath) + if s.st_size != size: + raise IOError( + "size mismatch in put! {} != {}".format(s.st_size, size) + ) + else: + s = SFTPAttributes() + return s + + def put(self, localpath, remotepath, callback=None, confirm=True): + """ + Copy a local file (``localpath``) to the SFTP server as ``remotepath``. + Any exception raised by operations will be passed through. This + method is primarily provided as a convenience. + + The SFTP operations use pipelining for speed. + + :param str localpath: the local file to copy + :param str remotepath: the destination path on the SFTP server. Note + that the filename should be included. Only specifying a directory + may result in an error. + :param callable callback: + optional callback function (form: ``func(int, int)``) that accepts + the bytes transferred so far and the total bytes to be transferred + :param bool confirm: + whether to do a stat() on the file afterwards to confirm the file + size + + :return: an `.SFTPAttributes` object containing attributes about the + given file + + .. versionadded:: 1.4 + .. versionchanged:: 1.7.4 + ``callback`` and rich attribute return value added. + .. versionchanged:: 1.7.7 + ``confirm`` param added. + """ + file_size = os.stat(localpath).st_size + with open(localpath, "rb") as fl: + return self.putfo(fl, remotepath, file_size, callback, confirm) + + def getfo( + self, + remotepath, + fl, + callback=None, + prefetch=True, + max_concurrent_prefetch_requests=None, + ): + """ + Copy a remote file (``remotepath``) from the SFTP server and write to + an open file or file-like object, ``fl``. Any exception raised by + operations will be passed through. This method is primarily provided + as a convenience. + + :param object remotepath: opened file or file-like object to copy to + :param str fl: + the destination path on the local host or open file object + :param callable callback: + optional callback function (form: ``func(int, int)``) that accepts + the bytes transferred so far and the total bytes to be transferred + :param bool prefetch: + controls whether prefetching is performed (default: True) + :param int max_concurrent_prefetch_requests: + The maximum number of concurrent read requests to prefetch. See + `.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param) + for details. + :return: the `number ` of bytes written to the opened file object + + .. versionadded:: 1.10 + .. versionchanged:: 2.8 + Added the ``prefetch`` keyword argument. + .. versionchanged:: 3.3 + Added ``max_concurrent_prefetch_requests``. + """ + file_size = self.stat(remotepath).st_size + with self.open(remotepath, "rb") as fr: + if prefetch: + fr.prefetch(file_size, max_concurrent_prefetch_requests) + return self._transfer_with_callback( + reader=fr, writer=fl, file_size=file_size, callback=callback + ) + + def get( + self, + remotepath, + localpath, + callback=None, + prefetch=True, + max_concurrent_prefetch_requests=None, + ): + """ + Copy a remote file (``remotepath``) from the SFTP server to the local + host as ``localpath``. Any exception raised by operations will be + passed through. This method is primarily provided as a convenience. + + :param str remotepath: the remote file to copy + :param str localpath: the destination path on the local host + :param callable callback: + optional callback function (form: ``func(int, int)``) that accepts + the bytes transferred so far and the total bytes to be transferred + :param bool prefetch: + controls whether prefetching is performed (default: True) + :param int max_concurrent_prefetch_requests: + The maximum number of concurrent read requests to prefetch. + When this is ``None`` (the default), do not limit the number of + concurrent prefetch requests. Note: OpenSSH's sftp internally + imposes a limit of 64 concurrent requests, while Paramiko imposes + no limit by default; consider setting a limit if a file can be + successfully received with sftp but hangs with Paramiko. + + .. versionadded:: 1.4 + .. versionchanged:: 1.7.4 + Added the ``callback`` param + .. versionchanged:: 2.8 + Added the ``prefetch`` keyword argument. + .. versionchanged:: 3.3 + Added ``max_concurrent_prefetch_requests``. + """ + with open(localpath, "wb") as fl: + size = self.getfo( + remotepath, + fl, + callback, + prefetch, + max_concurrent_prefetch_requests, + ) + s = os.stat(localpath) + if s.st_size != size: + raise IOError( + "size mismatch in get! {} != {}".format(s.st_size, size) + ) + + # ...internals... + + def _request(self, t, *args): + num = self._async_request(type(None), t, *args) + return self._read_response(num) + + def _async_request(self, fileobj, t, *args): + # this method may be called from other threads (prefetch) + self._lock.acquire() + try: + msg = Message() + msg.add_int(self.request_number) + for item in args: + if isinstance(item, int64): + msg.add_int64(item) + elif isinstance(item, int): + msg.add_int(item) + elif isinstance(item, SFTPAttributes): + item._pack(msg) + else: + # For all other types, rely on as_string() to either coerce + # to bytes before writing or raise a suitable exception. + msg.add_string(item) + num = self.request_number + self._expecting[num] = fileobj + self.request_number += 1 + finally: + self._lock.release() + self._send_packet(t, msg) + return num + + def _read_response(self, waitfor=None): + while True: + try: + t, data = self._read_packet() + except EOFError as e: + raise SSHException("Server connection dropped: {}".format(e)) + msg = Message(data) + num = msg.get_int() + self._lock.acquire() + try: + if num not in self._expecting: + # might be response for a file that was closed before + # responses came back + self._log(DEBUG, "Unexpected response #{}".format(num)) + if waitfor is None: + # just doing a single check + break + continue + fileobj = self._expecting[num] + del self._expecting[num] + finally: + self._lock.release() + if num == waitfor: + # synchronous + if t == CMD_STATUS: + self._convert_status(msg) + return t, msg + + # can not rewrite this to deal with E721, either as a None check + # nor as not an instance of None or NoneType + if fileobj is not type(None): # noqa + fileobj._async_response(t, msg, num) + if waitfor is None: + # just doing a single check + break + return None, None + + def _finish_responses(self, fileobj): + while fileobj in self._expecting.values(): + self._read_response() + fileobj._check_exception() + + def _convert_status(self, msg): + """ + Raises EOFError or IOError on error status; otherwise does nothing. + """ + code = msg.get_int() + text = msg.get_text() + if code == SFTP_OK: + return + elif code == SFTP_EOF: + raise EOFError(text) + elif code == SFTP_NO_SUCH_FILE: + # clever idea from john a. meinel: map the error codes to errno + raise IOError(errno.ENOENT, text) + elif code == SFTP_PERMISSION_DENIED: + raise IOError(errno.EACCES, text) + else: + raise IOError(text) + + def _adjust_cwd(self, path): + """ + Return an adjusted path if we're emulating a "current working + directory" for the server. + """ + path = b(path) + if self._cwd is None: + return path + if len(path) and path[0:1] == b_slash: + # absolute path + return path + if self._cwd == b_slash: + return self._cwd + path + return self._cwd + b_slash + path + + +class SFTP(SFTPClient): + """ + An alias for `.SFTPClient` for backwards compatibility. + """ + + pass diff --git a/venv/lib/python3.12/site-packages/paramiko/sftp_file.py b/venv/lib/python3.12/site-packages/paramiko/sftp_file.py new file mode 100644 index 0000000..c74695e --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/sftp_file.py @@ -0,0 +1,594 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +SFTP file object +""" + + +from binascii import hexlify +from collections import deque +import socket +import threading +import time +from paramiko.common import DEBUG, io_sleep + +from paramiko.file import BufferedFile +from paramiko.util import u +from paramiko.sftp import ( + CMD_CLOSE, + CMD_READ, + CMD_DATA, + SFTPError, + CMD_WRITE, + CMD_STATUS, + CMD_FSTAT, + CMD_ATTRS, + CMD_FSETSTAT, + CMD_EXTENDED, + int64, +) +from paramiko.sftp_attr import SFTPAttributes + + +class SFTPFile(BufferedFile): + """ + Proxy object for a file on the remote server, in client mode SFTP. + + Instances of this class may be used as context managers in the same way + that built-in Python file objects are. + """ + + # Some sftp servers will choke if you send read/write requests larger than + # this size. + MAX_REQUEST_SIZE = 32768 + + def __init__(self, sftp, handle, mode="r", bufsize=-1): + BufferedFile.__init__(self) + self.sftp = sftp + self.handle = handle + BufferedFile._set_mode(self, mode, bufsize) + self.pipelined = False + self._prefetching = False + self._prefetch_done = False + self._prefetch_data = {} + self._prefetch_extents = {} + self._prefetch_lock = threading.Lock() + self._saved_exception = None + self._reqs = deque() + + def __del__(self): + self._close(async_=True) + + def close(self): + """ + Close the file. + """ + self._close(async_=False) + + def _close(self, async_=False): + # We allow double-close without signaling an error, because real + # Python file objects do. However, we must protect against actually + # sending multiple CMD_CLOSE packets, because after we close our + # handle, the same handle may be re-allocated by the server, and we + # may end up mysteriously closing some random other file. (This is + # especially important because we unconditionally call close() from + # __del__.) + if self._closed: + return + self.sftp._log(DEBUG, "close({})".format(u(hexlify(self.handle)))) + if self.pipelined: + self.sftp._finish_responses(self) + BufferedFile.close(self) + try: + if async_: + # GC'd file handle could be called from an arbitrary thread + # -- don't wait for a response + self.sftp._async_request(type(None), CMD_CLOSE, self.handle) + else: + self.sftp._request(CMD_CLOSE, self.handle) + except EOFError: + # may have outlived the Transport connection + pass + except (IOError, socket.error): + # may have outlived the Transport connection + pass + + def _data_in_prefetch_requests(self, offset, size): + k = [ + x for x in list(self._prefetch_extents.values()) if x[0] <= offset + ] + if len(k) == 0: + return False + k.sort(key=lambda x: x[0]) + buf_offset, buf_size = k[-1] + if buf_offset + buf_size <= offset: + # prefetch request ends before this one begins + return False + if buf_offset + buf_size >= offset + size: + # inclusive + return True + # well, we have part of the request. see if another chunk has + # the rest. + return self._data_in_prefetch_requests( + buf_offset + buf_size, offset + size - buf_offset - buf_size + ) + + def _data_in_prefetch_buffers(self, offset): + """ + if a block of data is present in the prefetch buffers, at the given + offset, return the offset of the relevant prefetch buffer. otherwise, + return None. this guarantees nothing about the number of bytes + collected in the prefetch buffer so far. + """ + k = [i for i in self._prefetch_data.keys() if i <= offset] + if len(k) == 0: + return None + index = max(k) + buf_offset = offset - index + if buf_offset >= len(self._prefetch_data[index]): + # it's not here + return None + return index + + def _read_prefetch(self, size): + """ + read data out of the prefetch buffer, if possible. if the data isn't + in the buffer, return None. otherwise, behaves like a normal read. + """ + # while not closed, and haven't fetched past the current position, + # and haven't reached EOF... + while True: + offset = self._data_in_prefetch_buffers(self._realpos) + if offset is not None: + break + if self._prefetch_done or self._closed: + break + self.sftp._read_response() + self._check_exception() + if offset is None: + self._prefetching = False + return None + prefetch = self._prefetch_data[offset] + del self._prefetch_data[offset] + + buf_offset = self._realpos - offset + if buf_offset > 0: + self._prefetch_data[offset] = prefetch[:buf_offset] + prefetch = prefetch[buf_offset:] + if size < len(prefetch): + self._prefetch_data[self._realpos + size] = prefetch[size:] + prefetch = prefetch[:size] + return prefetch + + def _read(self, size): + size = min(size, self.MAX_REQUEST_SIZE) + if self._prefetching: + data = self._read_prefetch(size) + if data is not None: + return data + t, msg = self.sftp._request( + CMD_READ, self.handle, int64(self._realpos), int(size) + ) + if t != CMD_DATA: + raise SFTPError("Expected data") + return msg.get_string() + + def _write(self, data): + # may write less than requested if it would exceed max packet size + chunk = min(len(data), self.MAX_REQUEST_SIZE) + sftp_async_request = self.sftp._async_request( + type(None), + CMD_WRITE, + self.handle, + int64(self._realpos), + data[:chunk], + ) + self._reqs.append(sftp_async_request) + if not self.pipelined or ( + len(self._reqs) > 100 and self.sftp.sock.recv_ready() + ): + while len(self._reqs): + req = self._reqs.popleft() + t, msg = self.sftp._read_response(req) + if t != CMD_STATUS: + raise SFTPError("Expected status") + # convert_status already called + return chunk + + def settimeout(self, timeout): + """ + Set a timeout on read/write operations on the underlying socket or + ssh `.Channel`. + + :param float timeout: + seconds to wait for a pending read/write operation before raising + ``socket.timeout``, or ``None`` for no timeout + + .. seealso:: `.Channel.settimeout` + """ + self.sftp.sock.settimeout(timeout) + + def gettimeout(self): + """ + Returns the timeout in seconds (as a `float`) associated with the + socket or ssh `.Channel` used for this file. + + .. seealso:: `.Channel.gettimeout` + """ + return self.sftp.sock.gettimeout() + + def setblocking(self, blocking): + """ + Set blocking or non-blocking mode on the underiying socket or ssh + `.Channel`. + + :param int blocking: + 0 to set non-blocking mode; non-0 to set blocking mode. + + .. seealso:: `.Channel.setblocking` + """ + self.sftp.sock.setblocking(blocking) + + def seekable(self): + """ + Check if the file supports random access. + + :return: + `True` if the file supports random access. If `False`, + :meth:`seek` will raise an exception + """ + return True + + def seek(self, offset, whence=0): + """ + Set the file's current position. + + See `file.seek` for details. + """ + self.flush() + if whence == self.SEEK_SET: + self._realpos = self._pos = offset + elif whence == self.SEEK_CUR: + self._pos += offset + self._realpos = self._pos + else: + self._realpos = self._pos = self._get_size() + offset + self._rbuffer = bytes() + + def stat(self): + """ + Retrieve information about this file from the remote system. This is + exactly like `.SFTPClient.stat`, except that it operates on an + already-open file. + + :returns: + an `.SFTPAttributes` object containing attributes about this file. + """ + t, msg = self.sftp._request(CMD_FSTAT, self.handle) + if t != CMD_ATTRS: + raise SFTPError("Expected attributes") + return SFTPAttributes._from_msg(msg) + + def chmod(self, mode): + """ + Change the mode (permissions) of this file. The permissions are + unix-style and identical to those used by Python's `os.chmod` + function. + + :param int mode: new permissions + """ + self.sftp._log( + DEBUG, "chmod({}, {!r})".format(hexlify(self.handle), mode) + ) + attr = SFTPAttributes() + attr.st_mode = mode + self.sftp._request(CMD_FSETSTAT, self.handle, attr) + + def chown(self, uid, gid): + """ + Change the owner (``uid``) and group (``gid``) of this file. As with + Python's `os.chown` function, you must pass both arguments, so if you + only want to change one, use `stat` first to retrieve the current + owner and group. + + :param int uid: new owner's uid + :param int gid: new group id + """ + self.sftp._log( + DEBUG, + "chown({}, {!r}, {!r})".format(hexlify(self.handle), uid, gid), + ) + attr = SFTPAttributes() + attr.st_uid, attr.st_gid = uid, gid + self.sftp._request(CMD_FSETSTAT, self.handle, attr) + + def utime(self, times): + """ + Set the access and modified times of this file. If + ``times`` is ``None``, then the file's access and modified times are + set to the current time. Otherwise, ``times`` must be a 2-tuple of + numbers, of the form ``(atime, mtime)``, which is used to set the + access and modified times, respectively. This bizarre API is mimicked + from Python for the sake of consistency -- I apologize. + + :param tuple times: + ``None`` or a tuple of (access time, modified time) in standard + internet epoch time (seconds since 01 January 1970 GMT) + """ + if times is None: + times = (time.time(), time.time()) + self.sftp._log( + DEBUG, "utime({}, {!r})".format(hexlify(self.handle), times) + ) + attr = SFTPAttributes() + attr.st_atime, attr.st_mtime = times + self.sftp._request(CMD_FSETSTAT, self.handle, attr) + + def truncate(self, size): + """ + Change the size of this file. This usually extends + or shrinks the size of the file, just like the ``truncate()`` method on + Python file objects. + + :param size: the new size of the file + """ + self.sftp._log( + DEBUG, "truncate({}, {!r})".format(hexlify(self.handle), size) + ) + attr = SFTPAttributes() + attr.st_size = size + self.sftp._request(CMD_FSETSTAT, self.handle, attr) + + def check(self, hash_algorithm, offset=0, length=0, block_size=0): + """ + Ask the server for a hash of a section of this file. This can be used + to verify a successful upload or download, or for various rsync-like + operations. + + The file is hashed from ``offset``, for ``length`` bytes. + If ``length`` is 0, the remainder of the file is hashed. Thus, if both + ``offset`` and ``length`` are zero, the entire file is hashed. + + Normally, ``block_size`` will be 0 (the default), and this method will + return a byte string representing the requested hash (for example, a + string of length 16 for MD5, or 20 for SHA-1). If a non-zero + ``block_size`` is given, each chunk of the file (from ``offset`` to + ``offset + length``) of ``block_size`` bytes is computed as a separate + hash. The hash results are all concatenated and returned as a single + string. + + For example, ``check('sha1', 0, 1024, 512)`` will return a string of + length 40. The first 20 bytes will be the SHA-1 of the first 512 bytes + of the file, and the last 20 bytes will be the SHA-1 of the next 512 + bytes. + + :param str hash_algorithm: + the name of the hash algorithm to use (normally ``"sha1"`` or + ``"md5"``) + :param offset: + offset into the file to begin hashing (0 means to start from the + beginning) + :param length: + number of bytes to hash (0 means continue to the end of the file) + :param int block_size: + number of bytes to hash per result (must not be less than 256; 0 + means to compute only one hash of the entire segment) + :return: + `str` of bytes representing the hash of each block, concatenated + together + + :raises: + ``IOError`` -- if the server doesn't support the "check-file" + extension, or possibly doesn't support the hash algorithm requested + + .. note:: Many (most?) servers don't support this extension yet. + + .. versionadded:: 1.4 + """ + t, msg = self.sftp._request( + CMD_EXTENDED, + "check-file", + self.handle, + hash_algorithm, + int64(offset), + int64(length), + block_size, + ) + msg.get_text() # ext + msg.get_text() # alg + data = msg.get_remainder() + return data + + def set_pipelined(self, pipelined=True): + """ + Turn on/off the pipelining of write operations to this file. When + pipelining is on, paramiko won't wait for the server response after + each write operation. Instead, they're collected as they come in. At + the first non-write operation (including `.close`), all remaining + server responses are collected. This means that if there was an error + with one of your later writes, an exception might be thrown from within + `.close` instead of `.write`. + + By default, files are not pipelined. + + :param bool pipelined: + ``True`` if pipelining should be turned on for this file; ``False`` + otherwise + + .. versionadded:: 1.5 + """ + self.pipelined = pipelined + + def prefetch(self, file_size=None, max_concurrent_requests=None): + """ + Pre-fetch the remaining contents of this file in anticipation of future + `.read` calls. If reading the entire file, pre-fetching can + dramatically improve the download speed by avoiding roundtrip latency. + The file's contents are incrementally buffered in a background thread. + + The prefetched data is stored in a buffer until read via the `.read` + method. Once data has been read, it's removed from the buffer. The + data may be read in a random order (using `.seek`); chunks of the + buffer that haven't been read will continue to be buffered. + + :param int file_size: + When this is ``None`` (the default), this method calls `stat` to + determine the remote file size. In some situations, doing so can + cause exceptions or hangs (see `#562 + `_); as a + workaround, one may call `stat` explicitly and pass its value in + via this parameter. + :param int max_concurrent_requests: + The maximum number of concurrent read requests to prefetch. See + `.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param) + for details. + + .. versionadded:: 1.5.1 + .. versionchanged:: 1.16.0 + The ``file_size`` parameter was added (with no default value). + .. versionchanged:: 1.16.1 + The ``file_size`` parameter was made optional for backwards + compatibility. + .. versionchanged:: 3.3 + Added ``max_concurrent_requests``. + """ + if file_size is None: + file_size = self.stat().st_size + + # queue up async reads for the rest of the file + chunks = [] + n = self._realpos + while n < file_size: + chunk = min(self.MAX_REQUEST_SIZE, file_size - n) + chunks.append((n, chunk)) + n += chunk + if len(chunks) > 0: + self._start_prefetch(chunks, max_concurrent_requests) + + def readv(self, chunks, max_concurrent_prefetch_requests=None): + """ + Read a set of blocks from the file by (offset, length). This is more + efficient than doing a series of `.seek` and `.read` calls, since the + prefetch machinery is used to retrieve all the requested blocks at + once. + + :param chunks: + a list of ``(offset, length)`` tuples indicating which sections of + the file to read + :param int max_concurrent_prefetch_requests: + The maximum number of concurrent read requests to prefetch. See + `.SFTPClient.get` (its ``max_concurrent_prefetch_requests`` param) + for details. + :return: a list of blocks read, in the same order as in ``chunks`` + + .. versionadded:: 1.5.4 + .. versionchanged:: 3.3 + Added ``max_concurrent_prefetch_requests``. + """ + self.sftp._log( + DEBUG, "readv({}, {!r})".format(hexlify(self.handle), chunks) + ) + + read_chunks = [] + for offset, size in chunks: + # don't fetch data that's already in the prefetch buffer + if self._data_in_prefetch_buffers( + offset + ) or self._data_in_prefetch_requests(offset, size): + continue + + # break up anything larger than the max read size + while size > 0: + chunk_size = min(size, self.MAX_REQUEST_SIZE) + read_chunks.append((offset, chunk_size)) + offset += chunk_size + size -= chunk_size + + self._start_prefetch(read_chunks, max_concurrent_prefetch_requests) + # now we can just devolve to a bunch of read()s :) + for x in chunks: + self.seek(x[0]) + yield self.read(x[1]) + + # ...internals... + + def _get_size(self): + try: + return self.stat().st_size + except: + return 0 + + def _start_prefetch(self, chunks, max_concurrent_requests=None): + self._prefetching = True + self._prefetch_done = False + + t = threading.Thread( + target=self._prefetch_thread, + args=(chunks, max_concurrent_requests), + ) + t.daemon = True + t.start() + + def _prefetch_thread(self, chunks, max_concurrent_requests): + # do these read requests in a temporary thread because there may be + # a lot of them, so it may block. + for offset, length in chunks: + # Limit the number of concurrent requests in a busy-loop + if max_concurrent_requests is not None: + while True: + with self._prefetch_lock: + pf_len = len(self._prefetch_extents) + if pf_len < max_concurrent_requests: + break + time.sleep(io_sleep) + + num = self.sftp._async_request( + self, CMD_READ, self.handle, int64(offset), int(length) + ) + with self._prefetch_lock: + self._prefetch_extents[num] = (offset, length) + + def _async_response(self, t, msg, num): + if t == CMD_STATUS: + # save exception and re-raise it on next file operation + try: + self.sftp._convert_status(msg) + except Exception as e: + self._saved_exception = e + return + if t != CMD_DATA: + raise SFTPError("Expected data") + data = msg.get_string() + while True: + with self._prefetch_lock: + # spin if in race with _prefetch_thread + if num in self._prefetch_extents: + offset, length = self._prefetch_extents[num] + self._prefetch_data[offset] = data + del self._prefetch_extents[num] + if len(self._prefetch_extents) == 0: + self._prefetch_done = True + break + + def _check_exception(self): + """if there's a saved exception, raise & clear it""" + if self._saved_exception is not None: + x = self._saved_exception + self._saved_exception = None + raise x diff --git a/venv/lib/python3.12/site-packages/paramiko/sftp_handle.py b/venv/lib/python3.12/site-packages/paramiko/sftp_handle.py new file mode 100644 index 0000000..b204652 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/sftp_handle.py @@ -0,0 +1,196 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Abstraction of an SFTP file handle (for server mode). +""" + +import os +from paramiko.sftp import SFTP_OP_UNSUPPORTED, SFTP_OK +from paramiko.util import ClosingContextManager + + +class SFTPHandle(ClosingContextManager): + """ + Abstract object representing a handle to an open file (or folder) in an + SFTP server implementation. Each handle has a string representation used + by the client to refer to the underlying file. + + Server implementations can (and should) subclass SFTPHandle to implement + features of a file handle, like `stat` or `chattr`. + + Instances of this class may be used as context managers. + """ + + def __init__(self, flags=0): + """ + Create a new file handle representing a local file being served over + SFTP. If ``flags`` is passed in, it's used to determine if the file + is open in append mode. + + :param int flags: optional flags as passed to + `.SFTPServerInterface.open` + """ + self.__flags = flags + self.__name = None + # only for handles to folders: + self.__files = {} + self.__tell = None + + def close(self): + """ + When a client closes a file, this method is called on the handle. + Normally you would use this method to close the underlying OS level + file object(s). + + The default implementation checks for attributes on ``self`` named + ``readfile`` and/or ``writefile``, and if either or both are present, + their ``close()`` methods are called. This means that if you are + using the default implementations of `read` and `write`, this + method's default implementation should be fine also. + """ + readfile = getattr(self, "readfile", None) + if readfile is not None: + readfile.close() + writefile = getattr(self, "writefile", None) + if writefile is not None: + writefile.close() + + def read(self, offset, length): + """ + Read up to ``length`` bytes from this file, starting at position + ``offset``. The offset may be a Python long, since SFTP allows it + to be 64 bits. + + If the end of the file has been reached, this method may return an + empty string to signify EOF, or it may also return ``SFTP_EOF``. + + The default implementation checks for an attribute on ``self`` named + ``readfile``, and if present, performs the read operation on the Python + file-like object found there. (This is meant as a time saver for the + common case where you are wrapping a Python file object.) + + :param offset: position in the file to start reading from. + :param int length: number of bytes to attempt to read. + :return: the `bytes` read, or an error code `int`. + """ + readfile = getattr(self, "readfile", None) + if readfile is None: + return SFTP_OP_UNSUPPORTED + try: + if self.__tell is None: + self.__tell = readfile.tell() + if offset != self.__tell: + readfile.seek(offset) + self.__tell = offset + data = readfile.read(length) + except IOError as e: + self.__tell = None + return SFTPServer.convert_errno(e.errno) + self.__tell += len(data) + return data + + def write(self, offset, data): + """ + Write ``data`` into this file at position ``offset``. Extending the + file past its original end is expected. Unlike Python's normal + ``write()`` methods, this method cannot do a partial write: it must + write all of ``data`` or else return an error. + + The default implementation checks for an attribute on ``self`` named + ``writefile``, and if present, performs the write operation on the + Python file-like object found there. The attribute is named + differently from ``readfile`` to make it easy to implement read-only + (or write-only) files, but if both attributes are present, they should + refer to the same file. + + :param offset: position in the file to start reading from. + :param bytes data: data to write into the file. + :return: an SFTP error code like ``SFTP_OK``. + """ + writefile = getattr(self, "writefile", None) + if writefile is None: + return SFTP_OP_UNSUPPORTED + try: + # in append mode, don't care about seeking + if (self.__flags & os.O_APPEND) == 0: + if self.__tell is None: + self.__tell = writefile.tell() + if offset != self.__tell: + writefile.seek(offset) + self.__tell = offset + writefile.write(data) + writefile.flush() + except IOError as e: + self.__tell = None + return SFTPServer.convert_errno(e.errno) + if self.__tell is not None: + self.__tell += len(data) + return SFTP_OK + + def stat(self): + """ + Return an `.SFTPAttributes` object referring to this open file, or an + error code. This is equivalent to `.SFTPServerInterface.stat`, except + it's called on an open file instead of a path. + + :return: + an attributes object for the given file, or an SFTP error code + (like ``SFTP_PERMISSION_DENIED``). + :rtype: `.SFTPAttributes` or error code + """ + return SFTP_OP_UNSUPPORTED + + def chattr(self, attr): + """ + Change the attributes of this file. The ``attr`` object will contain + only those fields provided by the client in its request, so you should + check for the presence of fields before using them. + + :param .SFTPAttributes attr: the attributes to change on this file. + :return: an `int` error code like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + # ...internals... + + def _set_files(self, files): + """ + Used by the SFTP server code to cache a directory listing. (In + the SFTP protocol, listing a directory is a multi-stage process + requiring a temporary handle.) + """ + self.__files = files + + def _get_next_files(self): + """ + Used by the SFTP server code to retrieve a cached directory + listing. + """ + fnlist = self.__files[:16] + self.__files = self.__files[16:] + return fnlist + + def _get_name(self): + return self.__name + + def _set_name(self, name): + self.__name = name + + +from paramiko.sftp_server import SFTPServer diff --git a/venv/lib/python3.12/site-packages/paramiko/sftp_server.py b/venv/lib/python3.12/site-packages/paramiko/sftp_server.py new file mode 100644 index 0000000..cd3910d --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/sftp_server.py @@ -0,0 +1,537 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Server-mode SFTP support. +""" + +import os +import errno +import sys +from hashlib import md5, sha1 + +from paramiko import util +from paramiko.sftp import ( + BaseSFTP, + Message, + SFTP_FAILURE, + SFTP_PERMISSION_DENIED, + SFTP_NO_SUCH_FILE, + int64, +) +from paramiko.sftp_si import SFTPServerInterface +from paramiko.sftp_attr import SFTPAttributes +from paramiko.common import DEBUG +from paramiko.server import SubsystemHandler +from paramiko.util import b + + +# known hash algorithms for the "check-file" extension +from paramiko.sftp import ( + CMD_HANDLE, + SFTP_DESC, + CMD_STATUS, + SFTP_EOF, + CMD_NAME, + SFTP_BAD_MESSAGE, + CMD_EXTENDED_REPLY, + SFTP_FLAG_READ, + SFTP_FLAG_WRITE, + SFTP_FLAG_APPEND, + SFTP_FLAG_CREATE, + SFTP_FLAG_TRUNC, + SFTP_FLAG_EXCL, + CMD_NAMES, + CMD_OPEN, + CMD_CLOSE, + SFTP_OK, + CMD_READ, + CMD_DATA, + CMD_WRITE, + CMD_REMOVE, + CMD_RENAME, + CMD_MKDIR, + CMD_RMDIR, + CMD_OPENDIR, + CMD_READDIR, + CMD_STAT, + CMD_ATTRS, + CMD_LSTAT, + CMD_FSTAT, + CMD_SETSTAT, + CMD_FSETSTAT, + CMD_READLINK, + CMD_SYMLINK, + CMD_REALPATH, + CMD_EXTENDED, + SFTP_OP_UNSUPPORTED, +) + +_hash_class = {"sha1": sha1, "md5": md5} + + +class SFTPServer(BaseSFTP, SubsystemHandler): + """ + Server-side SFTP subsystem support. Since this is a `.SubsystemHandler`, + it can be (and is meant to be) set as the handler for ``"sftp"`` requests. + Use `.Transport.set_subsystem_handler` to activate this class. + """ + + def __init__( + self, + channel, + name, + server, + sftp_si=SFTPServerInterface, + *args, + **kwargs + ): + """ + The constructor for SFTPServer is meant to be called from within the + `.Transport` as a subsystem handler. ``server`` and any additional + parameters or keyword parameters are passed from the original call to + `.Transport.set_subsystem_handler`. + + :param .Channel channel: channel passed from the `.Transport`. + :param str name: name of the requested subsystem. + :param .ServerInterface server: + the server object associated with this channel and subsystem + :param sftp_si: + a subclass of `.SFTPServerInterface` to use for handling individual + requests. + """ + BaseSFTP.__init__(self) + SubsystemHandler.__init__(self, channel, name, server) + transport = channel.get_transport() + self.logger = util.get_logger(transport.get_log_channel() + ".sftp") + self.ultra_debug = transport.get_hexdump() + self.next_handle = 1 + # map of handle-string to SFTPHandle for files & folders: + self.file_table = {} + self.folder_table = {} + self.server = sftp_si(server, *args, **kwargs) + + def _log(self, level, msg): + if issubclass(type(msg), list): + for m in msg: + super()._log(level, "[chan " + self.sock.get_name() + "] " + m) + else: + super()._log(level, "[chan " + self.sock.get_name() + "] " + msg) + + def start_subsystem(self, name, transport, channel): + self.sock = channel + self._log(DEBUG, "Started sftp server on channel {!r}".format(channel)) + self._send_server_version() + self.server.session_started() + while True: + try: + t, data = self._read_packet() + except EOFError: + self._log(DEBUG, "EOF -- end of session") + return + except Exception as e: + self._log(DEBUG, "Exception on channel: " + str(e)) + self._log(DEBUG, util.tb_strings()) + return + msg = Message(data) + request_number = msg.get_int() + try: + self._process(t, request_number, msg) + except Exception as e: + self._log(DEBUG, "Exception in server processing: " + str(e)) + self._log(DEBUG, util.tb_strings()) + # send some kind of failure message, at least + try: + self._send_status(request_number, SFTP_FAILURE) + except: + pass + + def finish_subsystem(self): + self.server.session_ended() + super().finish_subsystem() + # close any file handles that were left open + # (so we can return them to the OS quickly) + for f in self.file_table.values(): + f.close() + for f in self.folder_table.values(): + f.close() + self.file_table = {} + self.folder_table = {} + + @staticmethod + def convert_errno(e): + """ + Convert an errno value (as from an ``OSError`` or ``IOError``) into a + standard SFTP result code. This is a convenience function for trapping + exceptions in server code and returning an appropriate result. + + :param int e: an errno code, as from ``OSError.errno``. + :return: an `int` SFTP error code like ``SFTP_NO_SUCH_FILE``. + """ + if e == errno.EACCES: + # permission denied + return SFTP_PERMISSION_DENIED + elif (e == errno.ENOENT) or (e == errno.ENOTDIR): + # no such file + return SFTP_NO_SUCH_FILE + else: + return SFTP_FAILURE + + @staticmethod + def set_file_attr(filename, attr): + """ + Change a file's attributes on the local filesystem. The contents of + ``attr`` are used to change the permissions, owner, group ownership, + and/or modification & access time of the file, depending on which + attributes are present in ``attr``. + + This is meant to be a handy helper function for translating SFTP file + requests into local file operations. + + :param str filename: + name of the file to alter (should usually be an absolute path). + :param .SFTPAttributes attr: attributes to change. + """ + if sys.platform != "win32": + # mode operations are meaningless on win32 + if attr._flags & attr.FLAG_PERMISSIONS: + os.chmod(filename, attr.st_mode) + if attr._flags & attr.FLAG_UIDGID: + os.chown(filename, attr.st_uid, attr.st_gid) + if attr._flags & attr.FLAG_AMTIME: + os.utime(filename, (attr.st_atime, attr.st_mtime)) + if attr._flags & attr.FLAG_SIZE: + with open(filename, "w+") as f: + f.truncate(attr.st_size) + + # ...internals... + + def _response(self, request_number, t, *args): + msg = Message() + msg.add_int(request_number) + for item in args: + # NOTE: this is a very silly tiny class used for SFTPFile mostly + if isinstance(item, int64): + msg.add_int64(item) + elif isinstance(item, int): + msg.add_int(item) + elif isinstance(item, (str, bytes)): + msg.add_string(item) + elif type(item) is SFTPAttributes: + item._pack(msg) + else: + raise Exception( + "unknown type for {!r} type {!r}".format(item, type(item)) + ) + self._send_packet(t, msg) + + def _send_handle_response(self, request_number, handle, folder=False): + if not issubclass(type(handle), SFTPHandle): + # must be error code + self._send_status(request_number, handle) + return + handle._set_name(b("hx{:d}".format(self.next_handle))) + self.next_handle += 1 + if folder: + self.folder_table[handle._get_name()] = handle + else: + self.file_table[handle._get_name()] = handle + self._response(request_number, CMD_HANDLE, handle._get_name()) + + def _send_status(self, request_number, code, desc=None): + if desc is None: + try: + desc = SFTP_DESC[code] + except IndexError: + desc = "Unknown" + # some clients expect a "language" tag at the end + # (but don't mind it being blank) + self._response(request_number, CMD_STATUS, code, desc, "") + + def _open_folder(self, request_number, path): + resp = self.server.list_folder(path) + if issubclass(type(resp), list): + # got an actual list of filenames in the folder + folder = SFTPHandle() + folder._set_files(resp) + self._send_handle_response(request_number, folder, True) + return + # must be an error code + self._send_status(request_number, resp) + + def _read_folder(self, request_number, folder): + flist = folder._get_next_files() + if len(flist) == 0: + self._send_status(request_number, SFTP_EOF) + return + msg = Message() + msg.add_int(request_number) + msg.add_int(len(flist)) + for attr in flist: + msg.add_string(attr.filename) + msg.add_string(attr) + attr._pack(msg) + self._send_packet(CMD_NAME, msg) + + def _check_file(self, request_number, msg): + # this extension actually comes from v6 protocol, but since it's an + # extension, i feel like we can reasonably support it backported. + # it's very useful for verifying uploaded files or checking for + # rsync-like differences between local and remote files. + handle = msg.get_binary() + alg_list = msg.get_list() + start = msg.get_int64() + length = msg.get_int64() + block_size = msg.get_int() + if handle not in self.file_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + f = self.file_table[handle] + for x in alg_list: + if x in _hash_class: + algname = x + alg = _hash_class[x] + break + else: + self._send_status( + request_number, SFTP_FAILURE, "No supported hash types found" + ) + return + if length == 0: + st = f.stat() + if not issubclass(type(st), SFTPAttributes): + self._send_status(request_number, st, "Unable to stat file") + return + length = st.st_size - start + if block_size == 0: + block_size = length + if block_size < 256: + self._send_status( + request_number, SFTP_FAILURE, "Block size too small" + ) + return + + sum_out = bytes() + offset = start + while offset < start + length: + blocklen = min(block_size, start + length - offset) + # don't try to read more than about 64KB at a time + chunklen = min(blocklen, 65536) + count = 0 + hash_obj = alg() + while count < blocklen: + data = f.read(offset, chunklen) + if not isinstance(data, bytes): + self._send_status( + request_number, data, "Unable to hash file" + ) + return + hash_obj.update(data) + count += len(data) + offset += count + sum_out += hash_obj.digest() + + msg = Message() + msg.add_int(request_number) + msg.add_string("check-file") + msg.add_string(algname) + msg.add_bytes(sum_out) + self._send_packet(CMD_EXTENDED_REPLY, msg) + + def _convert_pflags(self, pflags): + """convert SFTP-style open() flags to Python's os.open() flags""" + if (pflags & SFTP_FLAG_READ) and (pflags & SFTP_FLAG_WRITE): + flags = os.O_RDWR + elif pflags & SFTP_FLAG_WRITE: + flags = os.O_WRONLY + else: + flags = os.O_RDONLY + if pflags & SFTP_FLAG_APPEND: + flags |= os.O_APPEND + if pflags & SFTP_FLAG_CREATE: + flags |= os.O_CREAT + if pflags & SFTP_FLAG_TRUNC: + flags |= os.O_TRUNC + if pflags & SFTP_FLAG_EXCL: + flags |= os.O_EXCL + return flags + + def _process(self, t, request_number, msg): + self._log(DEBUG, "Request: {}".format(CMD_NAMES[t])) + if t == CMD_OPEN: + path = msg.get_text() + flags = self._convert_pflags(msg.get_int()) + attr = SFTPAttributes._from_msg(msg) + self._send_handle_response( + request_number, self.server.open(path, flags, attr) + ) + elif t == CMD_CLOSE: + handle = msg.get_binary() + if handle in self.folder_table: + del self.folder_table[handle] + self._send_status(request_number, SFTP_OK) + return + if handle in self.file_table: + self.file_table[handle].close() + del self.file_table[handle] + self._send_status(request_number, SFTP_OK) + return + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + elif t == CMD_READ: + handle = msg.get_binary() + offset = msg.get_int64() + length = msg.get_int() + if handle not in self.file_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + data = self.file_table[handle].read(offset, length) + if isinstance(data, (bytes, str)): + if len(data) == 0: + self._send_status(request_number, SFTP_EOF) + else: + self._response(request_number, CMD_DATA, data) + else: + self._send_status(request_number, data) + elif t == CMD_WRITE: + handle = msg.get_binary() + offset = msg.get_int64() + data = msg.get_binary() + if handle not in self.file_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + self._send_status( + request_number, self.file_table[handle].write(offset, data) + ) + elif t == CMD_REMOVE: + path = msg.get_text() + self._send_status(request_number, self.server.remove(path)) + elif t == CMD_RENAME: + oldpath = msg.get_text() + newpath = msg.get_text() + self._send_status( + request_number, self.server.rename(oldpath, newpath) + ) + elif t == CMD_MKDIR: + path = msg.get_text() + attr = SFTPAttributes._from_msg(msg) + self._send_status(request_number, self.server.mkdir(path, attr)) + elif t == CMD_RMDIR: + path = msg.get_text() + self._send_status(request_number, self.server.rmdir(path)) + elif t == CMD_OPENDIR: + path = msg.get_text() + self._open_folder(request_number, path) + return + elif t == CMD_READDIR: + handle = msg.get_binary() + if handle not in self.folder_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + folder = self.folder_table[handle] + self._read_folder(request_number, folder) + elif t == CMD_STAT: + path = msg.get_text() + resp = self.server.stat(path) + if issubclass(type(resp), SFTPAttributes): + self._response(request_number, CMD_ATTRS, resp) + else: + self._send_status(request_number, resp) + elif t == CMD_LSTAT: + path = msg.get_text() + resp = self.server.lstat(path) + if issubclass(type(resp), SFTPAttributes): + self._response(request_number, CMD_ATTRS, resp) + else: + self._send_status(request_number, resp) + elif t == CMD_FSTAT: + handle = msg.get_binary() + if handle not in self.file_table: + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + resp = self.file_table[handle].stat() + if issubclass(type(resp), SFTPAttributes): + self._response(request_number, CMD_ATTRS, resp) + else: + self._send_status(request_number, resp) + elif t == CMD_SETSTAT: + path = msg.get_text() + attr = SFTPAttributes._from_msg(msg) + self._send_status(request_number, self.server.chattr(path, attr)) + elif t == CMD_FSETSTAT: + handle = msg.get_binary() + attr = SFTPAttributes._from_msg(msg) + if handle not in self.file_table: + self._response( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) + return + self._send_status( + request_number, self.file_table[handle].chattr(attr) + ) + elif t == CMD_READLINK: + path = msg.get_text() + resp = self.server.readlink(path) + if isinstance(resp, (bytes, str)): + self._response( + request_number, CMD_NAME, 1, resp, "", SFTPAttributes() + ) + else: + self._send_status(request_number, resp) + elif t == CMD_SYMLINK: + # the sftp 2 draft is incorrect here! + # path always follows target_path + target_path = msg.get_text() + path = msg.get_text() + self._send_status( + request_number, self.server.symlink(target_path, path) + ) + elif t == CMD_REALPATH: + path = msg.get_text() + rpath = self.server.canonicalize(path) + self._response( + request_number, CMD_NAME, 1, rpath, "", SFTPAttributes() + ) + elif t == CMD_EXTENDED: + tag = msg.get_text() + if tag == "check-file": + self._check_file(request_number, msg) + elif tag == "posix-rename@openssh.com": + oldpath = msg.get_text() + newpath = msg.get_text() + self._send_status( + request_number, self.server.posix_rename(oldpath, newpath) + ) + else: + self._send_status(request_number, SFTP_OP_UNSUPPORTED) + else: + self._send_status(request_number, SFTP_OP_UNSUPPORTED) + + +from paramiko.sftp_handle import SFTPHandle diff --git a/venv/lib/python3.12/site-packages/paramiko/sftp_si.py b/venv/lib/python3.12/site-packages/paramiko/sftp_si.py new file mode 100644 index 0000000..72b5db9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/sftp_si.py @@ -0,0 +1,316 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +An interface to override for SFTP server support. +""" + +import os +import sys +from paramiko.sftp import SFTP_OP_UNSUPPORTED + + +class SFTPServerInterface: + """ + This class defines an interface for controlling the behavior of paramiko + when using the `.SFTPServer` subsystem to provide an SFTP server. + + Methods on this class are called from the SFTP session's thread, so you can + block as long as necessary without affecting other sessions (even other + SFTP sessions). However, raising an exception will usually cause the SFTP + session to abruptly end, so you will usually want to catch exceptions and + return an appropriate error code. + + All paths are in string form instead of unicode because not all SFTP + clients & servers obey the requirement that paths be encoded in UTF-8. + """ + + def __init__(self, server, *args, **kwargs): + """ + Create a new SFTPServerInterface object. This method does nothing by + default and is meant to be overridden by subclasses. + + :param .ServerInterface server: + the server object associated with this channel and SFTP subsystem + """ + super().__init__(*args, **kwargs) + + def session_started(self): + """ + The SFTP server session has just started. This method is meant to be + overridden to perform any necessary setup before handling callbacks + from SFTP operations. + """ + pass + + def session_ended(self): + """ + The SFTP server session has just ended, either cleanly or via an + exception. This method is meant to be overridden to perform any + necessary cleanup before this `.SFTPServerInterface` object is + destroyed. + """ + pass + + def open(self, path, flags, attr): + """ + Open a file on the server and create a handle for future operations + on that file. On success, a new object subclassed from `.SFTPHandle` + should be returned. This handle will be used for future operations + on the file (read, write, etc). On failure, an error code such as + ``SFTP_PERMISSION_DENIED`` should be returned. + + ``flags`` contains the requested mode for opening (read-only, + write-append, etc) as a bitset of flags from the ``os`` module: + + - ``os.O_RDONLY`` + - ``os.O_WRONLY`` + - ``os.O_RDWR`` + - ``os.O_APPEND`` + - ``os.O_CREAT`` + - ``os.O_TRUNC`` + - ``os.O_EXCL`` + + (One of ``os.O_RDONLY``, ``os.O_WRONLY``, or ``os.O_RDWR`` will always + be set.) + + The ``attr`` object contains requested attributes of the file if it + has to be created. Some or all attribute fields may be missing if + the client didn't specify them. + + .. note:: The SFTP protocol defines all files to be in "binary" mode. + There is no equivalent to Python's "text" mode. + + :param str path: + the requested path (relative or absolute) of the file to be opened. + :param int flags: + flags or'd together from the ``os`` module indicating the requested + mode for opening the file. + :param .SFTPAttributes attr: + requested attributes of the file if it is newly created. + :return: a new `.SFTPHandle` or error code. + """ + return SFTP_OP_UNSUPPORTED + + def list_folder(self, path): + """ + Return a list of files within a given folder. The ``path`` will use + posix notation (``"/"`` separates folder names) and may be an absolute + or relative path. + + The list of files is expected to be a list of `.SFTPAttributes` + objects, which are similar in structure to the objects returned by + ``os.stat``. In addition, each object should have its ``filename`` + field filled in, since this is important to a directory listing and + not normally present in ``os.stat`` results. The method + `.SFTPAttributes.from_stat` will usually do what you want. + + In case of an error, you should return one of the ``SFTP_*`` error + codes, such as ``SFTP_PERMISSION_DENIED``. + + :param str path: the requested path (relative or absolute) to be + listed. + :return: + a list of the files in the given folder, using `.SFTPAttributes` + objects. + + .. note:: + You should normalize the given ``path`` first (see the `os.path` + module) and check appropriate permissions before returning the list + of files. Be careful of malicious clients attempting to use + relative paths to escape restricted folders, if you're doing a + direct translation from the SFTP server path to your local + filesystem. + """ + return SFTP_OP_UNSUPPORTED + + def stat(self, path): + """ + Return an `.SFTPAttributes` object for a path on the server, or an + error code. If your server supports symbolic links (also known as + "aliases"), you should follow them. (`lstat` is the corresponding + call that doesn't follow symlinks/aliases.) + + :param str path: + the requested path (relative or absolute) to fetch file statistics + for. + :return: + an `.SFTPAttributes` object for the given file, or an SFTP error + code (like ``SFTP_PERMISSION_DENIED``). + """ + return SFTP_OP_UNSUPPORTED + + def lstat(self, path): + """ + Return an `.SFTPAttributes` object for a path on the server, or an + error code. If your server supports symbolic links (also known as + "aliases"), you should not follow them -- instead, you should + return data on the symlink or alias itself. (`stat` is the + corresponding call that follows symlinks/aliases.) + + :param str path: + the requested path (relative or absolute) to fetch file statistics + for. + :type path: str + :return: + an `.SFTPAttributes` object for the given file, or an SFTP error + code (like ``SFTP_PERMISSION_DENIED``). + """ + return SFTP_OP_UNSUPPORTED + + def remove(self, path): + """ + Delete a file, if possible. + + :param str path: + the requested path (relative or absolute) of the file to delete. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def rename(self, oldpath, newpath): + """ + Rename (or move) a file. The SFTP specification implies that this + method can be used to move an existing file into a different folder, + and since there's no other (easy) way to move files via SFTP, it's + probably a good idea to implement "move" in this method too, even for + files that cross disk partition boundaries, if at all possible. + + .. note:: You should return an error if a file with the same name as + ``newpath`` already exists. (The rename operation should be + non-desctructive.) + + .. note:: + This method implements 'standard' SFTP ``RENAME`` behavior; those + seeking the OpenSSH "POSIX rename" extension behavior should use + `posix_rename`. + + :param str oldpath: + the requested path (relative or absolute) of the existing file. + :param str newpath: the requested new path of the file. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def posix_rename(self, oldpath, newpath): + """ + Rename (or move) a file, following posix conventions. If newpath + already exists, it will be overwritten. + + :param str oldpath: + the requested path (relative or absolute) of the existing file. + :param str newpath: the requested new path of the file. + :return: an SFTP error code `int` like ``SFTP_OK``. + + :versionadded: 2.2 + """ + return SFTP_OP_UNSUPPORTED + + def mkdir(self, path, attr): + """ + Create a new directory with the given attributes. The ``attr`` + object may be considered a "hint" and ignored. + + The ``attr`` object will contain only those fields provided by the + client in its request, so you should use ``hasattr`` to check for + the presence of fields before using them. In some cases, the ``attr`` + object may be completely empty. + + :param str path: + requested path (relative or absolute) of the new folder. + :param .SFTPAttributes attr: requested attributes of the new folder. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def rmdir(self, path): + """ + Remove a directory if it exists. The ``path`` should refer to an + existing, empty folder -- otherwise this method should return an + error. + + :param str path: + requested path (relative or absolute) of the folder to remove. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def chattr(self, path, attr): + """ + Change the attributes of a file. The ``attr`` object will contain + only those fields provided by the client in its request, so you + should check for the presence of fields before using them. + + :param str path: + requested path (relative or absolute) of the file to change. + :param attr: + requested attributes to change on the file (an `.SFTPAttributes` + object) + :return: an error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def canonicalize(self, path): + """ + Return the canonical form of a path on the server. For example, + if the server's home folder is ``/home/foo``, the path + ``"../betty"`` would be canonicalized to ``"/home/betty"``. Note + the obvious security issues: if you're serving files only from a + specific folder, you probably don't want this method to reveal path + names outside that folder. + + You may find the Python methods in ``os.path`` useful, especially + ``os.path.normpath`` and ``os.path.realpath``. + + The default implementation returns ``os.path.normpath('/' + path)``. + """ + if os.path.isabs(path): + out = os.path.normpath(path) + else: + out = os.path.normpath("/" + path) + if sys.platform == "win32": + # on windows, normalize backslashes to sftp/posix format + out = out.replace("\\", "/") + return out + + def readlink(self, path): + """ + Return the target of a symbolic link (or shortcut) on the server. + If the specified path doesn't refer to a symbolic link, an error + should be returned. + + :param str path: path (relative or absolute) of the symbolic link. + :return: + the target `str` path of the symbolic link, or an error code like + ``SFTP_NO_SUCH_FILE``. + """ + return SFTP_OP_UNSUPPORTED + + def symlink(self, target_path, path): + """ + Create a symbolic link on the server, as new pathname ``path``, + with ``target_path`` as the target of the link. + + :param str target_path: + path (relative or absolute) of the target for this new symbolic + link. + :param str path: + path (relative or absolute) of the symbolic link to create. + :return: an error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED diff --git a/venv/lib/python3.12/site-packages/paramiko/ssh_exception.py b/venv/lib/python3.12/site-packages/paramiko/ssh_exception.py new file mode 100644 index 0000000..2b68ebe --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/ssh_exception.py @@ -0,0 +1,250 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import socket + + +class SSHException(Exception): + """ + Exception raised by failures in SSH2 protocol negotiation or logic errors. + """ + + pass + + +class AuthenticationException(SSHException): + """ + Exception raised when authentication failed for some reason. It may be + possible to retry with different credentials. (Other classes specify more + specific reasons.) + + .. versionadded:: 1.6 + """ + + pass + + +class PasswordRequiredException(AuthenticationException): + """ + Exception raised when a password is needed to unlock a private key file. + """ + + pass + + +class BadAuthenticationType(AuthenticationException): + """ + Exception raised when an authentication type (like password) is used, but + the server isn't allowing that type. (It may only allow public-key, for + example.) + + .. versionadded:: 1.1 + """ + + allowed_types = [] + + # TODO 4.0: remove explanation kwarg + def __init__(self, explanation, types): + # TODO 4.0: remove this supercall unless it's actually required for + # pickling (after fixing pickling) + AuthenticationException.__init__(self, explanation, types) + self.explanation = explanation + self.allowed_types = types + + def __str__(self): + return "{}; allowed types: {!r}".format( + self.explanation, self.allowed_types + ) + + +class PartialAuthentication(AuthenticationException): + """ + An internal exception thrown in the case of partial authentication. + """ + + allowed_types = [] + + def __init__(self, types): + AuthenticationException.__init__(self, types) + self.allowed_types = types + + def __str__(self): + return "Partial authentication; allowed types: {!r}".format( + self.allowed_types + ) + + +# TODO 4.0: stop inheriting from SSHException, move to auth.py +class UnableToAuthenticate(AuthenticationException): + pass + + +class ChannelException(SSHException): + """ + Exception raised when an attempt to open a new `.Channel` fails. + + :param int code: the error code returned by the server + + .. versionadded:: 1.6 + """ + + def __init__(self, code, text): + SSHException.__init__(self, code, text) + self.code = code + self.text = text + + def __str__(self): + return "ChannelException({!r}, {!r})".format(self.code, self.text) + + +class BadHostKeyException(SSHException): + """ + The host key given by the SSH server did not match what we were expecting. + + :param str hostname: the hostname of the SSH server + :param PKey got_key: the host key presented by the server + :param PKey expected_key: the host key expected + + .. versionadded:: 1.6 + """ + + def __init__(self, hostname, got_key, expected_key): + SSHException.__init__(self, hostname, got_key, expected_key) + self.hostname = hostname + self.key = got_key + self.expected_key = expected_key + + def __str__(self): + msg = "Host key for server '{}' does not match: got '{}', expected '{}'" # noqa + return msg.format( + self.hostname, + self.key.get_base64(), + self.expected_key.get_base64(), + ) + + +class IncompatiblePeer(SSHException): + """ + A disagreement arose regarding an algorithm required for key exchange. + + .. versionadded:: 2.9 + """ + + # TODO 4.0: consider making this annotate w/ 1..N 'missing' algorithms, + # either just the first one that would halt kex, or even updating the + # Transport logic so we record /all/ that /could/ halt kex. + # TODO: update docstrings where this may end up raised so they are more + # specific. + pass + + +class ProxyCommandFailure(SSHException): + """ + The "ProxyCommand" found in the .ssh/config file returned an error. + + :param str command: The command line that is generating this exception. + :param str error: The error captured from the proxy command output. + """ + + def __init__(self, command, error): + SSHException.__init__(self, command, error) + self.command = command + self.error = error + + def __str__(self): + return 'ProxyCommand("{}") returned nonzero exit status: {}'.format( + self.command, self.error + ) + + +class NoValidConnectionsError(socket.error): + """ + Multiple connection attempts were made and no families succeeded. + + This exception class wraps multiple "real" underlying connection errors, + all of which represent failed connection attempts. Because these errors are + not guaranteed to all be of the same error type (i.e. different errno, + `socket.error` subclass, message, etc) we expose a single unified error + message and a ``None`` errno so that instances of this class match most + normal handling of `socket.error` objects. + + To see the wrapped exception objects, access the ``errors`` attribute. + ``errors`` is a dict whose keys are address tuples (e.g. ``('127.0.0.1', + 22)``) and whose values are the exception encountered trying to connect to + that address. + + It is implied/assumed that all the errors given to a single instance of + this class are from connecting to the same hostname + port (and thus that + the differences are in the resolution of the hostname - e.g. IPv4 vs v6). + + .. versionadded:: 1.16 + """ + + def __init__(self, errors): + """ + :param dict errors: + The errors dict to store, as described by class docstring. + """ + addrs = sorted(errors.keys()) + body = ", ".join([x[0] for x in addrs[:-1]]) + tail = addrs[-1][0] + if body: + msg = "Unable to connect to port {0} on {1} or {2}" + else: + msg = "Unable to connect to port {0} on {2}" + super().__init__( + None, msg.format(addrs[0][1], body, tail) # stand-in for errno + ) + self.errors = errors + + def __reduce__(self): + return (self.__class__, (self.errors,)) + + +class CouldNotCanonicalize(SSHException): + """ + Raised when hostname canonicalization fails & fallback is disabled. + + .. versionadded:: 2.7 + """ + + pass + + +class ConfigParseError(SSHException): + """ + A fatal error was encountered trying to parse SSH config data. + + Typically this means a config file violated the ``ssh_config`` + specification in a manner that requires exiting immediately, such as not + matching ``key = value`` syntax or misusing certain ``Match`` keywords. + + .. versionadded:: 2.7 + """ + + pass + + +class MessageOrderError(SSHException): + """ + Out-of-order protocol messages were received, violating "strict kex" mode. + + .. versionadded:: 3.4 + """ + + pass diff --git a/venv/lib/python3.12/site-packages/paramiko/ssh_gss.py b/venv/lib/python3.12/site-packages/paramiko/ssh_gss.py new file mode 100644 index 0000000..ee49c34 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/ssh_gss.py @@ -0,0 +1,778 @@ +# Copyright (C) 2013-2014 science + computing ag +# Author: Sebastian Deiss +# +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + + +""" +This module provides GSS-API / SSPI authentication as defined in :rfc:`4462`. + +.. note:: Credential delegation is not supported in server mode. + +.. seealso:: :doc:`/api/kex_gss` + +.. versionadded:: 1.15 +""" + +import struct +import os +import sys + + +#: A boolean constraint that indicates if GSS-API / SSPI is available. +GSS_AUTH_AVAILABLE = True + + +#: A tuple of the exception types used by the underlying GSSAPI implementation. +GSS_EXCEPTIONS = () + + +#: :var str _API: Constraint for the used API +_API = None + +try: + import gssapi + + if hasattr(gssapi, "__title__") and gssapi.__title__ == "python-gssapi": + # old, unmaintained python-gssapi package + _API = "MIT" # keep this for compatibility + GSS_EXCEPTIONS = (gssapi.GSSException,) + else: + _API = "PYTHON-GSSAPI-NEW" + GSS_EXCEPTIONS = ( + gssapi.exceptions.GeneralError, + gssapi.raw.misc.GSSError, + ) +except (ImportError, OSError): + try: + import pywintypes + import sspicon + import sspi + + _API = "SSPI" + GSS_EXCEPTIONS = (pywintypes.error,) + except ImportError: + GSS_AUTH_AVAILABLE = False + _API = None + +from paramiko.common import MSG_USERAUTH_REQUEST +from paramiko.ssh_exception import SSHException +from paramiko._version import __version_info__ + + +def GSSAuth(auth_method, gss_deleg_creds=True): + """ + Provide SSH2 GSS-API / SSPI authentication. + + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not. + We delegate credentials by default. + :return: Either an `._SSH_GSSAPI_OLD` or `._SSH_GSSAPI_NEW` (Unix) + object or an `_SSH_SSPI` (Windows) object + :rtype: object + + :raises: ``ImportError`` -- If no GSS-API / SSPI module could be imported. + + :see: `RFC 4462 `_ + :note: Check for the available API and return either an `._SSH_GSSAPI_OLD` + (MIT GSSAPI using python-gssapi package) object, an + `._SSH_GSSAPI_NEW` (MIT GSSAPI using gssapi package) object + or an `._SSH_SSPI` (MS SSPI) object. + If there is no supported API available, + ``None`` will be returned. + """ + if _API == "MIT": + return _SSH_GSSAPI_OLD(auth_method, gss_deleg_creds) + elif _API == "PYTHON-GSSAPI-NEW": + return _SSH_GSSAPI_NEW(auth_method, gss_deleg_creds) + elif _API == "SSPI" and os.name == "nt": + return _SSH_SSPI(auth_method, gss_deleg_creds) + else: + raise ImportError("Unable to import a GSS-API / SSPI module!") + + +class _SSH_GSSAuth: + """ + Contains the shared variables and methods of `._SSH_GSSAPI_OLD`, + `._SSH_GSSAPI_NEW` and `._SSH_SSPI`. + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + self._auth_method = auth_method + self._gss_deleg_creds = gss_deleg_creds + self._gss_host = None + self._username = None + self._session_id = None + self._service = "ssh-connection" + """ + OpenSSH supports Kerberos V5 mechanism only for GSS-API authentication, + so we also support the krb5 mechanism only. + """ + self._krb5_mech = "1.2.840.113554.1.2.2" + + # client mode + self._gss_ctxt = None + self._gss_ctxt_status = False + + # server mode + self._gss_srv_ctxt = None + self._gss_srv_ctxt_status = False + self.cc_file = None + + def set_service(self, service): + """ + This is just a setter to use a non default service. + I added this method, because RFC 4462 doesn't specify "ssh-connection" + as the only service value. + + :param str service: The desired SSH service + """ + if service.find("ssh-"): + self._service = service + + def set_username(self, username): + """ + Setter for C{username}. If GSS-API Key Exchange is performed, the + username is not set by C{ssh_init_sec_context}. + + :param str username: The name of the user who attempts to login + """ + self._username = username + + def ssh_gss_oids(self, mode="client"): + """ + This method returns a single OID, because we only support the + Kerberos V5 mechanism. + + :param str mode: Client for client mode and server for server mode + :return: A byte sequence containing the number of supported + OIDs, the length of the OID and the actual OID encoded with + DER + :note: In server mode we just return the OID length and the DER encoded + OID. + """ + from pyasn1.type.univ import ObjectIdentifier + from pyasn1.codec.der import encoder + + OIDs = self._make_uint32(1) + krb5_OID = encoder.encode(ObjectIdentifier(self._krb5_mech)) + OID_len = self._make_uint32(len(krb5_OID)) + if mode == "server": + return OID_len + krb5_OID + return OIDs + OID_len + krb5_OID + + def ssh_check_mech(self, desired_mech): + """ + Check if the given OID is the Kerberos V5 OID (server mode). + + :param str desired_mech: The desired GSS-API mechanism of the client + :return: ``True`` if the given OID is supported, otherwise C{False} + """ + from pyasn1.codec.der import decoder + + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + return False + return True + + # Internals + # ------------------------------------------------------------------------- + def _make_uint32(self, integer): + """ + Create a 32 bit unsigned integer (The byte sequence of an integer). + + :param int integer: The integer value to convert + :return: The byte sequence of an 32 bit integer + """ + return struct.pack("!I", integer) + + def _ssh_build_mic(self, session_id, username, service, auth_method): + """ + Create the SSH2 MIC filed for gssapi-with-mic. + + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :param str service: The requested SSH service + :param str auth_method: The requested SSH authentication mechanism + :return: The MIC as defined in RFC 4462. The contents of the + MIC field are: + string session_identifier, + byte SSH_MSG_USERAUTH_REQUEST, + string user-name, + string service (ssh-connection), + string authentication-method + (gssapi-with-mic or gssapi-keyex) + """ + mic = self._make_uint32(len(session_id)) + mic += session_id + mic += struct.pack("B", MSG_USERAUTH_REQUEST) + mic += self._make_uint32(len(username)) + mic += username.encode() + mic += self._make_uint32(len(service)) + mic += service.encode() + mic += self._make_uint32(len(auth_method)) + mic += auth_method.encode() + return mic + + +class _SSH_GSSAPI_OLD(_SSH_GSSAuth): + """ + Implementation of the GSS-API MIT Kerberos Authentication for SSH2, + using the older (unmaintained) python-gssapi package. + + :see: `.GSSAuth` + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) + + if self._gss_deleg_creds: + self._gss_flags = ( + gssapi.C_PROT_READY_FLAG, + gssapi.C_INTEG_FLAG, + gssapi.C_MUTUAL_FLAG, + gssapi.C_DELEG_FLAG, + ) + else: + self._gss_flags = ( + gssapi.C_PROT_READY_FLAG, + gssapi.C_INTEG_FLAG, + gssapi.C_MUTUAL_FLAG, + ) + + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): + """ + Initialize a GSS-API context. + + :param str username: The name of the user who attempts to login + :param str target: The hostname of the target to connect to + :param str desired_mech: The negotiated GSS-API mechanism + ("pseudo negotiated" mechanism, because we + support just the krb5 mechanism :-)) + :param str recv_token: The GSS-API token received from the Server + :raises: + `.SSHException` -- Is raised if the desired mechanism of the client + is not supported + :return: A ``String`` if the GSS-API has returned a token or + ``None`` if no token was returned + """ + from pyasn1.codec.der import decoder + + self._username = username + self._gss_host = target + targ_name = gssapi.Name( + "host@" + self._gss_host, gssapi.C_NT_HOSTBASED_SERVICE + ) + ctx = gssapi.Context() + ctx.flags = self._gss_flags + if desired_mech is None: + krb5_mech = gssapi.OID.mech_from_string(self._krb5_mech) + else: + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + raise SSHException("Unsupported mechanism OID.") + else: + krb5_mech = gssapi.OID.mech_from_string(self._krb5_mech) + token = None + try: + if recv_token is None: + self._gss_ctxt = gssapi.InitContext( + peer_name=targ_name, + mech_type=krb5_mech, + req_flags=ctx.flags, + ) + token = self._gss_ctxt.step(token) + else: + token = self._gss_ctxt.step(recv_token) + except gssapi.GSSException: + message = "{} Target: {}".format(sys.exc_info()[1], self._gss_host) + raise gssapi.GSSException(message) + self._gss_ctxt_status = self._gss_ctxt.established + return token + + def ssh_get_mic(self, session_id, gss_kex=False): + """ + Create the MIC token for a SSH2 message. + + :param str session_id: The SSH session ID + :param bool gss_kex: Generate the MIC for GSS-API Key Exchange or not + :return: gssapi-with-mic: + Returns the MIC token from GSS-API for the message we created + with ``_ssh_build_mic``. + gssapi-keyex: + Returns the MIC token from GSS-API with the SSH session ID as + message. + """ + self._session_id = session_id + if not gss_kex: + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + mic_token = self._gss_ctxt.get_mic(mic_field) + else: + # for key exchange with gssapi-keyex + mic_token = self._gss_srv_ctxt.get_mic(self._session_id) + return mic_token + + def ssh_accept_sec_context(self, hostname, recv_token, username=None): + """ + Accept a GSS-API context (server mode). + + :param str hostname: The servers hostname + :param str username: The name of the user who attempts to login + :param str recv_token: The GSS-API Token received from the server, + if it's not the initial call. + :return: A ``String`` if the GSS-API has returned a token or ``None`` + if no token was returned + """ + # hostname and username are not required for GSSAPI, but for SSPI + self._gss_host = hostname + self._username = username + if self._gss_srv_ctxt is None: + self._gss_srv_ctxt = gssapi.AcceptContext() + token = self._gss_srv_ctxt.step(recv_token) + self._gss_srv_ctxt_status = self._gss_srv_ctxt.established + return token + + def ssh_check_mic(self, mic_token, session_id, username=None): + """ + Verify the MIC token for a SSH2 message. + + :param str mic_token: The MIC token received from the client + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :return: None if the MIC check was successful + :raises: ``gssapi.GSSException`` -- if the MIC check failed + """ + self._session_id = session_id + self._username = username + if self._username is not None: + # server mode + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + self._gss_srv_ctxt.verify_mic(mic_field, mic_token) + else: + # for key exchange with gssapi-keyex + # client mode + self._gss_ctxt.verify_mic(self._session_id, mic_token) + + @property + def credentials_delegated(self): + """ + Checks if credentials are delegated (server mode). + + :return: ``True`` if credentials are delegated, otherwise ``False`` + """ + if self._gss_srv_ctxt.delegated_cred is not None: + return True + return False + + def save_client_creds(self, client_token): + """ + Save the Client token in a file. This is used by the SSH server + to store the client credentials if credentials are delegated + (server mode). + + :param str client_token: The GSS-API token received form the client + :raises: + ``NotImplementedError`` -- Credential delegation is currently not + supported in server mode + """ + raise NotImplementedError + + +if __version_info__ < (2, 5): + # provide the old name for strict backward compatibility + _SSH_GSSAPI = _SSH_GSSAPI_OLD + + +class _SSH_GSSAPI_NEW(_SSH_GSSAuth): + """ + Implementation of the GSS-API MIT Kerberos Authentication for SSH2, + using the newer, currently maintained gssapi package. + + :see: `.GSSAuth` + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) + + if self._gss_deleg_creds: + self._gss_flags = ( + gssapi.RequirementFlag.protection_ready, + gssapi.RequirementFlag.integrity, + gssapi.RequirementFlag.mutual_authentication, + gssapi.RequirementFlag.delegate_to_peer, + ) + else: + self._gss_flags = ( + gssapi.RequirementFlag.protection_ready, + gssapi.RequirementFlag.integrity, + gssapi.RequirementFlag.mutual_authentication, + ) + + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): + """ + Initialize a GSS-API context. + + :param str username: The name of the user who attempts to login + :param str target: The hostname of the target to connect to + :param str desired_mech: The negotiated GSS-API mechanism + ("pseudo negotiated" mechanism, because we + support just the krb5 mechanism :-)) + :param str recv_token: The GSS-API token received from the Server + :raises: `.SSHException` -- Is raised if the desired mechanism of the + client is not supported + :raises: ``gssapi.exceptions.GSSError`` if there is an error signaled + by the GSS-API implementation + :return: A ``String`` if the GSS-API has returned a token or ``None`` + if no token was returned + """ + from pyasn1.codec.der import decoder + + self._username = username + self._gss_host = target + targ_name = gssapi.Name( + "host@" + self._gss_host, + name_type=gssapi.NameType.hostbased_service, + ) + if desired_mech is not None: + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + raise SSHException("Unsupported mechanism OID.") + krb5_mech = gssapi.MechType.kerberos + token = None + if recv_token is None: + self._gss_ctxt = gssapi.SecurityContext( + name=targ_name, + flags=self._gss_flags, + mech=krb5_mech, + usage="initiate", + ) + token = self._gss_ctxt.step(token) + else: + token = self._gss_ctxt.step(recv_token) + self._gss_ctxt_status = self._gss_ctxt.complete + return token + + def ssh_get_mic(self, session_id, gss_kex=False): + """ + Create the MIC token for a SSH2 message. + + :param str session_id: The SSH session ID + :param bool gss_kex: Generate the MIC for GSS-API Key Exchange or not + :return: gssapi-with-mic: + Returns the MIC token from GSS-API for the message we created + with ``_ssh_build_mic``. + gssapi-keyex: + Returns the MIC token from GSS-API with the SSH session ID as + message. + :rtype: str + """ + self._session_id = session_id + if not gss_kex: + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + mic_token = self._gss_ctxt.get_signature(mic_field) + else: + # for key exchange with gssapi-keyex + mic_token = self._gss_srv_ctxt.get_signature(self._session_id) + return mic_token + + def ssh_accept_sec_context(self, hostname, recv_token, username=None): + """ + Accept a GSS-API context (server mode). + + :param str hostname: The servers hostname + :param str username: The name of the user who attempts to login + :param str recv_token: The GSS-API Token received from the server, + if it's not the initial call. + :return: A ``String`` if the GSS-API has returned a token or ``None`` + if no token was returned + """ + # hostname and username are not required for GSSAPI, but for SSPI + self._gss_host = hostname + self._username = username + if self._gss_srv_ctxt is None: + self._gss_srv_ctxt = gssapi.SecurityContext(usage="accept") + token = self._gss_srv_ctxt.step(recv_token) + self._gss_srv_ctxt_status = self._gss_srv_ctxt.complete + return token + + def ssh_check_mic(self, mic_token, session_id, username=None): + """ + Verify the MIC token for a SSH2 message. + + :param str mic_token: The MIC token received from the client + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :return: None if the MIC check was successful + :raises: ``gssapi.exceptions.GSSError`` -- if the MIC check failed + """ + self._session_id = session_id + self._username = username + if self._username is not None: + # server mode + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + self._gss_srv_ctxt.verify_signature(mic_field, mic_token) + else: + # for key exchange with gssapi-keyex + # client mode + self._gss_ctxt.verify_signature(self._session_id, mic_token) + + @property + def credentials_delegated(self): + """ + Checks if credentials are delegated (server mode). + + :return: ``True`` if credentials are delegated, otherwise ``False`` + :rtype: bool + """ + if self._gss_srv_ctxt.delegated_creds is not None: + return True + return False + + def save_client_creds(self, client_token): + """ + Save the Client token in a file. This is used by the SSH server + to store the client credentials if credentials are delegated + (server mode). + + :param str client_token: The GSS-API token received form the client + :raises: ``NotImplementedError`` -- Credential delegation is currently + not supported in server mode + """ + raise NotImplementedError + + +class _SSH_SSPI(_SSH_GSSAuth): + """ + Implementation of the Microsoft SSPI Kerberos Authentication for SSH2. + + :see: `.GSSAuth` + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) + + if self._gss_deleg_creds: + self._gss_flags = ( + sspicon.ISC_REQ_INTEGRITY + | sspicon.ISC_REQ_MUTUAL_AUTH + | sspicon.ISC_REQ_DELEGATE + ) + else: + self._gss_flags = ( + sspicon.ISC_REQ_INTEGRITY | sspicon.ISC_REQ_MUTUAL_AUTH + ) + + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): + """ + Initialize a SSPI context. + + :param str username: The name of the user who attempts to login + :param str target: The FQDN of the target to connect to + :param str desired_mech: The negotiated SSPI mechanism + ("pseudo negotiated" mechanism, because we + support just the krb5 mechanism :-)) + :param recv_token: The SSPI token received from the Server + :raises: + `.SSHException` -- Is raised if the desired mechanism of the client + is not supported + :return: A ``String`` if the SSPI has returned a token or ``None`` if + no token was returned + """ + from pyasn1.codec.der import decoder + + self._username = username + self._gss_host = target + error = 0 + targ_name = "host/" + self._gss_host + if desired_mech is not None: + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + raise SSHException("Unsupported mechanism OID.") + try: + if recv_token is None: + self._gss_ctxt = sspi.ClientAuth( + "Kerberos", scflags=self._gss_flags, targetspn=targ_name + ) + error, token = self._gss_ctxt.authorize(recv_token) + token = token[0].Buffer + except pywintypes.error as e: + e.strerror += ", Target: {}".format(self._gss_host) + raise + + if error == 0: + """ + if the status is GSS_COMPLETE (error = 0) the context is fully + established an we can set _gss_ctxt_status to True. + """ + self._gss_ctxt_status = True + token = None + """ + You won't get another token if the context is fully established, + so i set token to None instead of "" + """ + return token + + def ssh_get_mic(self, session_id, gss_kex=False): + """ + Create the MIC token for a SSH2 message. + + :param str session_id: The SSH session ID + :param bool gss_kex: Generate the MIC for Key Exchange with SSPI or not + :return: gssapi-with-mic: + Returns the MIC token from SSPI for the message we created + with ``_ssh_build_mic``. + gssapi-keyex: + Returns the MIC token from SSPI with the SSH session ID as + message. + """ + self._session_id = session_id + if not gss_kex: + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + mic_token = self._gss_ctxt.sign(mic_field) + else: + # for key exchange with gssapi-keyex + mic_token = self._gss_srv_ctxt.sign(self._session_id) + return mic_token + + def ssh_accept_sec_context(self, hostname, username, recv_token): + """ + Accept a SSPI context (server mode). + + :param str hostname: The servers FQDN + :param str username: The name of the user who attempts to login + :param str recv_token: The SSPI Token received from the server, + if it's not the initial call. + :return: A ``String`` if the SSPI has returned a token or ``None`` if + no token was returned + """ + self._gss_host = hostname + self._username = username + targ_name = "host/" + self._gss_host + self._gss_srv_ctxt = sspi.ServerAuth("Kerberos", spn=targ_name) + error, token = self._gss_srv_ctxt.authorize(recv_token) + token = token[0].Buffer + if error == 0: + self._gss_srv_ctxt_status = True + token = None + return token + + def ssh_check_mic(self, mic_token, session_id, username=None): + """ + Verify the MIC token for a SSH2 message. + + :param str mic_token: The MIC token received from the client + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :return: None if the MIC check was successful + :raises: ``sspi.error`` -- if the MIC check failed + """ + self._session_id = session_id + self._username = username + if username is not None: + # server mode + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + # Verifies data and its signature. If verification fails, an + # sspi.error will be raised. + self._gss_srv_ctxt.verify(mic_field, mic_token) + else: + # for key exchange with gssapi-keyex + # client mode + # Verifies data and its signature. If verification fails, an + # sspi.error will be raised. + self._gss_ctxt.verify(self._session_id, mic_token) + + @property + def credentials_delegated(self): + """ + Checks if credentials are delegated (server mode). + + :return: ``True`` if credentials are delegated, otherwise ``False`` + """ + return self._gss_flags & sspicon.ISC_REQ_DELEGATE and ( + self._gss_srv_ctxt_status or self._gss_flags + ) + + def save_client_creds(self, client_token): + """ + Save the Client token in a file. This is used by the SSH server + to store the client credentails if credentials are delegated + (server mode). + + :param str client_token: The SSPI token received form the client + :raises: + ``NotImplementedError`` -- Credential delegation is currently not + supported in server mode + """ + raise NotImplementedError diff --git a/venv/lib/python3.12/site-packages/paramiko/transport.py b/venv/lib/python3.12/site-packages/paramiko/transport.py new file mode 100644 index 0000000..8301917 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/transport.py @@ -0,0 +1,3389 @@ +# Copyright (C) 2003-2007 Robey Pointer +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Core protocol implementation +""" + +import os +import socket +import sys +import threading +import time +import weakref +from hashlib import md5, sha1, sha256, sha512 + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import algorithms, Cipher, modes + +import paramiko +from paramiko import util +from paramiko.auth_handler import AuthHandler, AuthOnlyHandler +from paramiko.ssh_gss import GSSAuth +from paramiko.channel import Channel +from paramiko.common import ( + xffffffff, + cMSG_CHANNEL_OPEN, + cMSG_IGNORE, + cMSG_GLOBAL_REQUEST, + DEBUG, + MSG_KEXINIT, + MSG_IGNORE, + MSG_DISCONNECT, + MSG_DEBUG, + ERROR, + WARNING, + cMSG_UNIMPLEMENTED, + INFO, + cMSG_KEXINIT, + cMSG_NEWKEYS, + MSG_NEWKEYS, + cMSG_REQUEST_SUCCESS, + cMSG_REQUEST_FAILURE, + CONNECTION_FAILED_CODE, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_SUCCEEDED, + cMSG_CHANNEL_OPEN_FAILURE, + cMSG_CHANNEL_OPEN_SUCCESS, + MSG_GLOBAL_REQUEST, + MSG_REQUEST_SUCCESS, + MSG_REQUEST_FAILURE, + cMSG_SERVICE_REQUEST, + MSG_SERVICE_ACCEPT, + MSG_CHANNEL_OPEN_SUCCESS, + MSG_CHANNEL_OPEN_FAILURE, + MSG_CHANNEL_OPEN, + MSG_CHANNEL_SUCCESS, + MSG_CHANNEL_FAILURE, + MSG_CHANNEL_DATA, + MSG_CHANNEL_EXTENDED_DATA, + MSG_CHANNEL_WINDOW_ADJUST, + MSG_CHANNEL_REQUEST, + MSG_CHANNEL_EOF, + MSG_CHANNEL_CLOSE, + MIN_WINDOW_SIZE, + MIN_PACKET_SIZE, + MAX_WINDOW_SIZE, + DEFAULT_WINDOW_SIZE, + DEFAULT_MAX_PACKET_SIZE, + HIGHEST_USERAUTH_MESSAGE_ID, + MSG_UNIMPLEMENTED, + MSG_NAMES, + MSG_EXT_INFO, + cMSG_EXT_INFO, + byte_ord, +) +from paramiko.compress import ZlibCompressor, ZlibDecompressor +from paramiko.dsskey import DSSKey +from paramiko.ed25519key import Ed25519Key +from paramiko.kex_curve25519 import KexCurve25519 +from paramiko.kex_gex import KexGex, KexGexSHA256 +from paramiko.kex_group1 import KexGroup1 +from paramiko.kex_group14 import KexGroup14, KexGroup14SHA256 +from paramiko.kex_group16 import KexGroup16SHA512 +from paramiko.kex_ecdh_nist import KexNistp256, KexNistp384, KexNistp521 +from paramiko.kex_gss import KexGSSGex, KexGSSGroup1, KexGSSGroup14 +from paramiko.message import Message +from paramiko.packet import Packetizer, NeedRekeyException +from paramiko.primes import ModulusPack +from paramiko.rsakey import RSAKey +from paramiko.ecdsakey import ECDSAKey +from paramiko.server import ServerInterface +from paramiko.sftp_client import SFTPClient +from paramiko.ssh_exception import ( + BadAuthenticationType, + ChannelException, + IncompatiblePeer, + MessageOrderError, + ProxyCommandFailure, + SSHException, +) +from paramiko.util import ( + ClosingContextManager, + clamp_value, + b, +) + + +# for thread cleanup +_active_threads = [] + + +def _join_lingering_threads(): + for thr in _active_threads: + thr.stop_thread() + + +import atexit + +atexit.register(_join_lingering_threads) + + +class Transport(threading.Thread, ClosingContextManager): + """ + An SSH Transport attaches to a stream (usually a socket), negotiates an + encrypted session, authenticates, and then creates stream tunnels, called + `channels <.Channel>`, across the session. Multiple channels can be + multiplexed across a single session (and often are, in the case of port + forwardings). + + Instances of this class may be used as context managers. + """ + + _ENCRYPT = object() + _DECRYPT = object() + + _PROTO_ID = "2.0" + _CLIENT_ID = "paramiko_{}".format(paramiko.__version__) + + # These tuples of algorithm identifiers are in preference order; do not + # reorder without reason! + # NOTE: if you need to modify these, we suggest leveraging the + # `disabled_algorithms` constructor argument (also available in SSHClient) + # instead of monkeypatching or subclassing. + _preferred_ciphers = ( + "aes128-ctr", + "aes192-ctr", + "aes256-ctr", + "aes128-cbc", + "aes192-cbc", + "aes256-cbc", + "3des-cbc", + ) + _preferred_macs = ( + "hmac-sha2-256", + "hmac-sha2-512", + "hmac-sha2-256-etm@openssh.com", + "hmac-sha2-512-etm@openssh.com", + "hmac-sha1", + "hmac-md5", + "hmac-sha1-96", + "hmac-md5-96", + ) + # ~= HostKeyAlgorithms in OpenSSH land + _preferred_keys = ( + "ssh-ed25519", + "ecdsa-sha2-nistp256", + "ecdsa-sha2-nistp384", + "ecdsa-sha2-nistp521", + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + "ssh-dss", + ) + # ~= PubKeyAcceptedAlgorithms + _preferred_pubkeys = ( + "ssh-ed25519", + "ecdsa-sha2-nistp256", + "ecdsa-sha2-nistp384", + "ecdsa-sha2-nistp521", + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + "ssh-dss", + ) + _preferred_kex = ( + "ecdh-sha2-nistp256", + "ecdh-sha2-nistp384", + "ecdh-sha2-nistp521", + "diffie-hellman-group16-sha512", + "diffie-hellman-group-exchange-sha256", + "diffie-hellman-group14-sha256", + "diffie-hellman-group-exchange-sha1", + "diffie-hellman-group14-sha1", + "diffie-hellman-group1-sha1", + ) + if KexCurve25519.is_available(): + _preferred_kex = ("curve25519-sha256@libssh.org",) + _preferred_kex + _preferred_gsskex = ( + "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==", + "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==", + "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==", + ) + _preferred_compression = ("none",) + + _cipher_info = { + "aes128-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 16, + }, + "aes192-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 24, + }, + "aes256-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 32, + }, + "aes128-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 16, + }, + "aes192-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 24, + }, + "aes256-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 32, + }, + "3des-cbc": { + "class": algorithms.TripleDES, + "mode": modes.CBC, + "block-size": 8, + "key-size": 24, + }, + } + + _mac_info = { + "hmac-sha1": {"class": sha1, "size": 20}, + "hmac-sha1-96": {"class": sha1, "size": 12}, + "hmac-sha2-256": {"class": sha256, "size": 32}, + "hmac-sha2-256-etm@openssh.com": {"class": sha256, "size": 32}, + "hmac-sha2-512": {"class": sha512, "size": 64}, + "hmac-sha2-512-etm@openssh.com": {"class": sha512, "size": 64}, + "hmac-md5": {"class": md5, "size": 16}, + "hmac-md5-96": {"class": md5, "size": 12}, + } + + _key_info = { + # TODO: at some point we will want to drop this as it's no longer + # considered secure due to using SHA-1 for signatures. OpenSSH 8.8 no + # longer supports it. Question becomes at what point do we want to + # prevent users with older setups from using this? + "ssh-rsa": RSAKey, + "ssh-rsa-cert-v01@openssh.com": RSAKey, + "rsa-sha2-256": RSAKey, + "rsa-sha2-256-cert-v01@openssh.com": RSAKey, + "rsa-sha2-512": RSAKey, + "rsa-sha2-512-cert-v01@openssh.com": RSAKey, + "ssh-dss": DSSKey, + "ssh-dss-cert-v01@openssh.com": DSSKey, + "ecdsa-sha2-nistp256": ECDSAKey, + "ecdsa-sha2-nistp256-cert-v01@openssh.com": ECDSAKey, + "ecdsa-sha2-nistp384": ECDSAKey, + "ecdsa-sha2-nistp384-cert-v01@openssh.com": ECDSAKey, + "ecdsa-sha2-nistp521": ECDSAKey, + "ecdsa-sha2-nistp521-cert-v01@openssh.com": ECDSAKey, + "ssh-ed25519": Ed25519Key, + "ssh-ed25519-cert-v01@openssh.com": Ed25519Key, + } + + _kex_info = { + "diffie-hellman-group1-sha1": KexGroup1, + "diffie-hellman-group14-sha1": KexGroup14, + "diffie-hellman-group-exchange-sha1": KexGex, + "diffie-hellman-group-exchange-sha256": KexGexSHA256, + "diffie-hellman-group14-sha256": KexGroup14SHA256, + "diffie-hellman-group16-sha512": KexGroup16SHA512, + "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGroup1, + "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGroup14, + "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGex, + "ecdh-sha2-nistp256": KexNistp256, + "ecdh-sha2-nistp384": KexNistp384, + "ecdh-sha2-nistp521": KexNistp521, + } + if KexCurve25519.is_available(): + _kex_info["curve25519-sha256@libssh.org"] = KexCurve25519 + + _compression_info = { + # zlib@openssh.com is just zlib, but only turned on after a successful + # authentication. openssh servers may only offer this type because + # they've had troubles with security holes in zlib in the past. + "zlib@openssh.com": (ZlibCompressor, ZlibDecompressor), + "zlib": (ZlibCompressor, ZlibDecompressor), + "none": (None, None), + } + + _modulus_pack = None + _active_check_timeout = 0.1 + + def __init__( + self, + sock, + default_window_size=DEFAULT_WINDOW_SIZE, + default_max_packet_size=DEFAULT_MAX_PACKET_SIZE, + gss_kex=False, + gss_deleg_creds=True, + disabled_algorithms=None, + server_sig_algs=True, + strict_kex=True, + packetizer_class=None, + ): + """ + Create a new SSH session over an existing socket, or socket-like + object. This only creates the `.Transport` object; it doesn't begin + the SSH session yet. Use `connect` or `start_client` to begin a client + session, or `start_server` to begin a server session. + + If the object is not actually a socket, it must have the following + methods: + + - ``send(bytes)``: Writes from 1 to ``len(bytes)`` bytes, and returns + an int representing the number of bytes written. Returns + 0 or raises ``EOFError`` if the stream has been closed. + - ``recv(int)``: Reads from 1 to ``int`` bytes and returns them as a + string. Returns 0 or raises ``EOFError`` if the stream has been + closed. + - ``close()``: Closes the socket. + - ``settimeout(n)``: Sets a (float) timeout on I/O operations. + + For ease of use, you may also pass in an address (as a tuple) or a host + string as the ``sock`` argument. (A host string is a hostname with an + optional port (separated by ``":"``) which will be converted into a + tuple of ``(hostname, port)``.) A socket will be connected to this + address and used for communication. Exceptions from the ``socket`` + call may be thrown in this case. + + .. note:: + Modifying the the window and packet sizes might have adverse + effects on your channels created from this transport. The default + values are the same as in the OpenSSH code base and have been + battle tested. + + :param socket sock: + a socket or socket-like object to create the session over. + :param int default_window_size: + sets the default window size on the transport. (defaults to + 2097152) + :param int default_max_packet_size: + sets the default max packet size on the transport. (defaults to + 32768) + :param bool gss_kex: + Whether to enable GSSAPI key exchange when GSSAPI is in play. + Default: ``False``. + :param bool gss_deleg_creds: + Whether to enable GSSAPI credential delegation when GSSAPI is in + play. Default: ``True``. + :param dict disabled_algorithms: + If given, must be a dictionary mapping algorithm type to an + iterable of algorithm identifiers, which will be disabled for the + lifetime of the transport. + + Keys should match the last word in the class' builtin algorithm + tuple attributes, such as ``"ciphers"`` to disable names within + ``_preferred_ciphers``; or ``"kex"`` to disable something defined + inside ``_preferred_kex``. Values should exactly match members of + the matching attribute. + + For example, if you need to disable + ``diffie-hellman-group16-sha512`` key exchange (perhaps because + your code talks to a server which implements it differently from + Paramiko), specify ``disabled_algorithms={"kex": + ["diffie-hellman-group16-sha512"]}``. + :param bool server_sig_algs: + Whether to send an extra message to compatible clients, in server + mode, with a list of supported pubkey algorithms. Default: + ``True``. + :param bool strict_kex: + Whether to advertise (and implement, if client also advertises + support for) a "strict kex" mode for safer handshaking. Default: + ``True``. + :param packetizer_class: + Which class to use for instantiating the internal packet handler. + Default: ``None`` (i.e.: use `Packetizer` as normal). + + .. versionchanged:: 1.15 + Added the ``default_window_size`` and ``default_max_packet_size`` + arguments. + .. versionchanged:: 1.15 + Added the ``gss_kex`` and ``gss_deleg_creds`` kwargs. + .. versionchanged:: 2.6 + Added the ``disabled_algorithms`` kwarg. + .. versionchanged:: 2.9 + Added the ``server_sig_algs`` kwarg. + .. versionchanged:: 3.4 + Added the ``strict_kex`` kwarg. + .. versionchanged:: 3.4 + Added the ``packetizer_class`` kwarg. + """ + self.active = False + self.hostname = None + self.server_extensions = {} + self.advertise_strict_kex = strict_kex + self.agreed_on_strict_kex = False + + # TODO: these two overrides on sock's type should go away sometime, too + # many ways to do it! + if isinstance(sock, str): + # convert "host:port" into (host, port) + hl = sock.split(":", 1) + self.hostname = hl[0] + if len(hl) == 1: + sock = (hl[0], 22) + else: + sock = (hl[0], int(hl[1])) + if type(sock) is tuple: + # connect to the given (host, port) + hostname, port = sock + self.hostname = hostname + reason = "No suitable address family" + addrinfos = socket.getaddrinfo( + hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM + ) + for family, socktype, proto, canonname, sockaddr in addrinfos: + if socktype == socket.SOCK_STREAM: + af = family + # addr = sockaddr + sock = socket.socket(af, socket.SOCK_STREAM) + try: + sock.connect((hostname, port)) + except socket.error as e: + reason = str(e) + else: + break + else: + raise SSHException( + "Unable to connect to {}: {}".format(hostname, reason) + ) + # okay, normal socket-ish flow here... + threading.Thread.__init__(self) + self.daemon = True + self.sock = sock + # we set the timeout so we can check self.active periodically to + # see if we should bail. socket.timeout exception is never propagated. + self.sock.settimeout(self._active_check_timeout) + + # negotiated crypto parameters + self.packetizer = (packetizer_class or Packetizer)(sock) + self.local_version = "SSH-" + self._PROTO_ID + "-" + self._CLIENT_ID + self.remote_version = "" + self.local_cipher = self.remote_cipher = "" + self.local_kex_init = self.remote_kex_init = None + self.local_mac = self.remote_mac = None + self.local_compression = self.remote_compression = None + self.session_id = None + self.host_key_type = None + self.host_key = None + + # GSS-API / SSPI Key Exchange + self.use_gss_kex = gss_kex + # This will be set to True if GSS-API Key Exchange was performed + self.gss_kex_used = False + self.kexgss_ctxt = None + self.gss_host = None + if self.use_gss_kex: + self.kexgss_ctxt = GSSAuth("gssapi-keyex", gss_deleg_creds) + self._preferred_kex = self._preferred_gsskex + self._preferred_kex + + # state used during negotiation + self.kex_engine = None + self.H = None + self.K = None + + self.initial_kex_done = False + self.in_kex = False + self.authenticated = False + self._expected_packet = tuple() + # synchronization (always higher level than write_lock) + self.lock = threading.Lock() + + # tracking open channels + self._channels = ChannelMap() + self.channel_events = {} # (id -> Event) + self.channels_seen = {} # (id -> True) + self._channel_counter = 0 + self.default_max_packet_size = default_max_packet_size + self.default_window_size = default_window_size + self._forward_agent_handler = None + self._x11_handler = None + self._tcp_handler = None + + self.saved_exception = None + self.clear_to_send = threading.Event() + self.clear_to_send_lock = threading.Lock() + self.clear_to_send_timeout = 30.0 + self.log_name = "paramiko.transport" + self.logger = util.get_logger(self.log_name) + self.packetizer.set_log(self.logger) + self.auth_handler = None + # response Message from an arbitrary global request + self.global_response = None + # user-defined event callbacks + self.completion_event = None + # how long (seconds) to wait for the SSH banner + self.banner_timeout = 15 + # how long (seconds) to wait for the handshake to finish after SSH + # banner sent. + self.handshake_timeout = 15 + # how long (seconds) to wait for the auth response. + self.auth_timeout = 30 + # how long (seconds) to wait for opening a channel + self.channel_timeout = 60 * 60 + self.disabled_algorithms = disabled_algorithms or {} + self.server_sig_algs = server_sig_algs + + # server mode: + self.server_mode = False + self.server_object = None + self.server_key_dict = {} + self.server_accepts = [] + self.server_accept_cv = threading.Condition(self.lock) + self.subsystem_table = {} + + # Handler table, now set at init time for easier per-instance + # manipulation and subclass twiddling. + self._handler_table = { + MSG_EXT_INFO: self._parse_ext_info, + MSG_NEWKEYS: self._parse_newkeys, + MSG_GLOBAL_REQUEST: self._parse_global_request, + MSG_REQUEST_SUCCESS: self._parse_request_success, + MSG_REQUEST_FAILURE: self._parse_request_failure, + MSG_CHANNEL_OPEN_SUCCESS: self._parse_channel_open_success, + MSG_CHANNEL_OPEN_FAILURE: self._parse_channel_open_failure, + MSG_CHANNEL_OPEN: self._parse_channel_open, + MSG_KEXINIT: self._negotiate_keys, + } + + def _filter_algorithm(self, type_): + default = getattr(self, "_preferred_{}".format(type_)) + return tuple( + x + for x in default + if x not in self.disabled_algorithms.get(type_, []) + ) + + @property + def preferred_ciphers(self): + return self._filter_algorithm("ciphers") + + @property + def preferred_macs(self): + return self._filter_algorithm("macs") + + @property + def preferred_keys(self): + # Interleave cert variants here; resistant to various background + # overwriting of _preferred_keys, and necessary as hostkeys can't use + # the logic pubkey auth does re: injecting/checking for certs at + # runtime + filtered = self._filter_algorithm("keys") + return tuple( + filtered + + tuple("{}-cert-v01@openssh.com".format(x) for x in filtered) + ) + + @property + def preferred_pubkeys(self): + return self._filter_algorithm("pubkeys") + + @property + def preferred_kex(self): + return self._filter_algorithm("kex") + + @property + def preferred_compression(self): + return self._filter_algorithm("compression") + + def __repr__(self): + """ + Returns a string representation of this object, for debugging. + """ + id_ = hex(id(self) & xffffffff) + out = "` or + `auth_publickey `. + + .. note:: `connect` is a simpler method for connecting as a client. + + .. note:: + After calling this method (or `start_server` or `connect`), you + should no longer directly read from or write to the original socket + object. + + :param .threading.Event event: + an event to trigger when negotiation is complete (optional) + + :param float timeout: + a timeout, in seconds, for SSH2 session negotiation (optional) + + :raises: + `.SSHException` -- if negotiation fails (and no ``event`` was + passed in) + """ + self.active = True + if event is not None: + # async, return immediately and let the app poll for completion + self.completion_event = event + self.start() + return + + # synchronous, wait for a result + self.completion_event = event = threading.Event() + self.start() + max_time = time.time() + timeout if timeout is not None else None + while True: + event.wait(0.1) + if not self.active: + e = self.get_exception() + if e is not None: + raise e + raise SSHException("Negotiation failed.") + if event.is_set() or ( + timeout is not None and time.time() >= max_time + ): + break + + def start_server(self, event=None, server=None): + """ + Negotiate a new SSH2 session as a server. This is the first step after + creating a new `.Transport` and setting up your server host key(s). A + separate thread is created for protocol negotiation. + + If an event is passed in, this method returns immediately. When + negotiation is done (successful or not), the given ``Event`` will + be triggered. On failure, `is_active` will return ``False``. + + (Since 1.4) If ``event`` is ``None``, this method will not return until + negotiation is done. On success, the method returns normally. + Otherwise an SSHException is raised. + + After a successful negotiation, the client will need to authenticate. + Override the methods `get_allowed_auths + <.ServerInterface.get_allowed_auths>`, `check_auth_none + <.ServerInterface.check_auth_none>`, `check_auth_password + <.ServerInterface.check_auth_password>`, and `check_auth_publickey + <.ServerInterface.check_auth_publickey>` in the given ``server`` object + to control the authentication process. + + After a successful authentication, the client should request to open a + channel. Override `check_channel_request + <.ServerInterface.check_channel_request>` in the given ``server`` + object to allow channels to be opened. + + .. note:: + After calling this method (or `start_client` or `connect`), you + should no longer directly read from or write to the original socket + object. + + :param .threading.Event event: + an event to trigger when negotiation is complete. + :param .ServerInterface server: + an object used to perform authentication and create `channels + <.Channel>` + + :raises: + `.SSHException` -- if negotiation fails (and no ``event`` was + passed in) + """ + if server is None: + server = ServerInterface() + self.server_mode = True + self.server_object = server + self.active = True + if event is not None: + # async, return immediately and let the app poll for completion + self.completion_event = event + self.start() + return + + # synchronous, wait for a result + self.completion_event = event = threading.Event() + self.start() + while True: + event.wait(0.1) + if not self.active: + e = self.get_exception() + if e is not None: + raise e + raise SSHException("Negotiation failed.") + if event.is_set(): + break + + def add_server_key(self, key): + """ + Add a host key to the list of keys used for server mode. When behaving + as a server, the host key is used to sign certain packets during the + SSH2 negotiation, so that the client can trust that we are who we say + we are. Because this is used for signing, the key must contain private + key info, not just the public half. Only one key of each type (RSA or + DSS) is kept. + + :param .PKey key: + the host key to add, usually an `.RSAKey` or `.DSSKey`. + """ + self.server_key_dict[key.get_name()] = key + # Handle SHA-2 extensions for RSA by ensuring that lookups into + # self.server_key_dict will yield this key for any of the algorithm + # names. + if isinstance(key, RSAKey): + self.server_key_dict["rsa-sha2-256"] = key + self.server_key_dict["rsa-sha2-512"] = key + + def get_server_key(self): + """ + Return the active host key, in server mode. After negotiating with the + client, this method will return the negotiated host key. If only one + type of host key was set with `add_server_key`, that's the only key + that will ever be returned. But in cases where you have set more than + one type of host key (for example, an RSA key and a DSS key), the key + type will be negotiated by the client, and this method will return the + key of the type agreed on. If the host key has not been negotiated + yet, ``None`` is returned. In client mode, the behavior is undefined. + + :return: + host key (`.PKey`) of the type negotiated by the client, or + ``None``. + """ + try: + return self.server_key_dict[self.host_key_type] + except KeyError: + pass + return None + + @staticmethod + def load_server_moduli(filename=None): + """ + (optional) + Load a file of prime moduli for use in doing group-exchange key + negotiation in server mode. It's a rather obscure option and can be + safely ignored. + + In server mode, the remote client may request "group-exchange" key + negotiation, which asks the server to send a random prime number that + fits certain criteria. These primes are pretty difficult to compute, + so they can't be generated on demand. But many systems contain a file + of suitable primes (usually named something like ``/etc/ssh/moduli``). + If you call `load_server_moduli` and it returns ``True``, then this + file of primes has been loaded and we will support "group-exchange" in + server mode. Otherwise server mode will just claim that it doesn't + support that method of key negotiation. + + :param str filename: + optional path to the moduli file, if you happen to know that it's + not in a standard location. + :return: + True if a moduli file was successfully loaded; False otherwise. + + .. note:: This has no effect when used in client mode. + """ + Transport._modulus_pack = ModulusPack() + # places to look for the openssh "moduli" file + file_list = ["/etc/ssh/moduli", "/usr/local/etc/moduli"] + if filename is not None: + file_list.insert(0, filename) + for fn in file_list: + try: + Transport._modulus_pack.read_file(fn) + return True + except IOError: + pass + # none succeeded + Transport._modulus_pack = None + return False + + def close(self): + """ + Close this session, and any open channels that are tied to it. + """ + if not self.active: + return + self.stop_thread() + for chan in list(self._channels.values()): + chan._unlink() + self.sock.close() + + def get_remote_server_key(self): + """ + Return the host key of the server (in client mode). + + .. note:: + Previously this call returned a tuple of ``(key type, key + string)``. You can get the same effect by calling `.PKey.get_name` + for the key type, and ``str(key)`` for the key string. + + :raises: `.SSHException` -- if no session is currently active. + + :return: public key (`.PKey`) of the remote server + """ + if (not self.active) or (not self.initial_kex_done): + raise SSHException("No existing session") + return self.host_key + + def is_active(self): + """ + Return true if this session is active (open). + + :return: + True if the session is still active (open); False if the session is + closed + """ + return self.active + + def open_session( + self, window_size=None, max_packet_size=None, timeout=None + ): + """ + Request a new channel to the server, of type ``"session"``. This is + just an alias for calling `open_channel` with an argument of + ``"session"``. + + .. note:: Modifying the the window and packet sizes might have adverse + effects on the session created. The default values are the same + as in the OpenSSH code base and have been battle tested. + + :param int window_size: + optional window size for this session. + :param int max_packet_size: + optional max packet size for this session. + + :return: a new `.Channel` + + :raises: + `.SSHException` -- if the request is rejected or the session ends + prematurely + + .. versionchanged:: 1.13.4/1.14.3/1.15.3 + Added the ``timeout`` argument. + .. versionchanged:: 1.15 + Added the ``window_size`` and ``max_packet_size`` arguments. + """ + return self.open_channel( + "session", + window_size=window_size, + max_packet_size=max_packet_size, + timeout=timeout, + ) + + def open_x11_channel(self, src_addr=None): + """ + Request a new channel to the client, of type ``"x11"``. This + is just an alias for ``open_channel('x11', src_addr=src_addr)``. + + :param tuple src_addr: + the source address (``(str, int)``) of the x11 server (port is the + x11 port, ie. 6010) + :return: a new `.Channel` + + :raises: + `.SSHException` -- if the request is rejected or the session ends + prematurely + """ + return self.open_channel("x11", src_addr=src_addr) + + def open_forward_agent_channel(self): + """ + Request a new channel to the client, of type + ``"auth-agent@openssh.com"``. + + This is just an alias for ``open_channel('auth-agent@openssh.com')``. + + :return: a new `.Channel` + + :raises: `.SSHException` -- + if the request is rejected or the session ends prematurely + """ + return self.open_channel("auth-agent@openssh.com") + + def open_forwarded_tcpip_channel(self, src_addr, dest_addr): + """ + Request a new channel back to the client, of type ``forwarded-tcpip``. + + This is used after a client has requested port forwarding, for sending + incoming connections back to the client. + + :param src_addr: originator's address + :param dest_addr: local (server) connected address + """ + return self.open_channel("forwarded-tcpip", dest_addr, src_addr) + + def open_channel( + self, + kind, + dest_addr=None, + src_addr=None, + window_size=None, + max_packet_size=None, + timeout=None, + ): + """ + Request a new channel to the server. `Channels <.Channel>` are + socket-like objects used for the actual transfer of data across the + session. You may only request a channel after negotiating encryption + (using `connect` or `start_client`) and authenticating. + + .. note:: Modifying the the window and packet sizes might have adverse + effects on the channel created. The default values are the same + as in the OpenSSH code base and have been battle tested. + + :param str kind: + the kind of channel requested (usually ``"session"``, + ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"``) + :param tuple dest_addr: + the destination address (address + port tuple) of this port + forwarding, if ``kind`` is ``"forwarded-tcpip"`` or + ``"direct-tcpip"`` (ignored for other channel types) + :param src_addr: the source address of this port forwarding, if + ``kind`` is ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"`` + :param int window_size: + optional window size for this session. + :param int max_packet_size: + optional max packet size for this session. + :param float timeout: + optional timeout opening a channel, default 3600s (1h) + + :return: a new `.Channel` on success + + :raises: + `.SSHException` -- if the request is rejected, the session ends + prematurely or there is a timeout opening a channel + + .. versionchanged:: 1.15 + Added the ``window_size`` and ``max_packet_size`` arguments. + """ + if not self.active: + raise SSHException("SSH session not active") + timeout = self.channel_timeout if timeout is None else timeout + self.lock.acquire() + try: + window_size = self._sanitize_window_size(window_size) + max_packet_size = self._sanitize_packet_size(max_packet_size) + chanid = self._next_channel() + m = Message() + m.add_byte(cMSG_CHANNEL_OPEN) + m.add_string(kind) + m.add_int(chanid) + m.add_int(window_size) + m.add_int(max_packet_size) + if (kind == "forwarded-tcpip") or (kind == "direct-tcpip"): + m.add_string(dest_addr[0]) + m.add_int(dest_addr[1]) + m.add_string(src_addr[0]) + m.add_int(src_addr[1]) + elif kind == "x11": + m.add_string(src_addr[0]) + m.add_int(src_addr[1]) + chan = Channel(chanid) + self._channels.put(chanid, chan) + self.channel_events[chanid] = event = threading.Event() + self.channels_seen[chanid] = True + chan._set_transport(self) + chan._set_window(window_size, max_packet_size) + finally: + self.lock.release() + self._send_user_message(m) + start_ts = time.time() + while True: + event.wait(0.1) + if not self.active: + e = self.get_exception() + if e is None: + e = SSHException("Unable to open channel.") + raise e + if event.is_set(): + break + elif start_ts + timeout < time.time(): + raise SSHException("Timeout opening channel.") + chan = self._channels.get(chanid) + if chan is not None: + return chan + e = self.get_exception() + if e is None: + e = SSHException("Unable to open channel.") + raise e + + def request_port_forward(self, address, port, handler=None): + """ + Ask the server to forward TCP connections from a listening port on + the server, across this SSH session. + + If a handler is given, that handler is called from a different thread + whenever a forwarded connection arrives. The handler parameters are:: + + handler( + channel, + (origin_addr, origin_port), + (server_addr, server_port), + ) + + where ``server_addr`` and ``server_port`` are the address and port that + the server was listening on. + + If no handler is set, the default behavior is to send new incoming + forwarded connections into the accept queue, to be picked up via + `accept`. + + :param str address: the address to bind when forwarding + :param int port: + the port to forward, or 0 to ask the server to allocate any port + :param callable handler: + optional handler for incoming forwarded connections, of the form + ``func(Channel, (str, int), (str, int))``. + + :return: the port number (`int`) allocated by the server + + :raises: + `.SSHException` -- if the server refused the TCP forward request + """ + if not self.active: + raise SSHException("SSH session not active") + port = int(port) + response = self.global_request( + "tcpip-forward", (address, port), wait=True + ) + if response is None: + raise SSHException("TCP forwarding request denied") + if port == 0: + port = response.get_int() + if handler is None: + + def default_handler(channel, src_addr, dest_addr_port): + # src_addr, src_port = src_addr_port + # dest_addr, dest_port = dest_addr_port + self._queue_incoming_channel(channel) + + handler = default_handler + self._tcp_handler = handler + return port + + def cancel_port_forward(self, address, port): + """ + Ask the server to cancel a previous port-forwarding request. No more + connections to the given address & port will be forwarded across this + ssh connection. + + :param str address: the address to stop forwarding + :param int port: the port to stop forwarding + """ + if not self.active: + return + self._tcp_handler = None + self.global_request("cancel-tcpip-forward", (address, port), wait=True) + + def open_sftp_client(self): + """ + Create an SFTP client channel from an open transport. On success, an + SFTP session will be opened with the remote host, and a new + `.SFTPClient` object will be returned. + + :return: + a new `.SFTPClient` referring to an sftp session (channel) across + this transport + """ + return SFTPClient.from_transport(self) + + def send_ignore(self, byte_count=None): + """ + Send a junk packet across the encrypted link. This is sometimes used + to add "noise" to a connection to confuse would-be attackers. It can + also be used as a keep-alive for long lived connections traversing + firewalls. + + :param int byte_count: + the number of random bytes to send in the payload of the ignored + packet -- defaults to a random number from 10 to 41. + """ + m = Message() + m.add_byte(cMSG_IGNORE) + if byte_count is None: + byte_count = (byte_ord(os.urandom(1)) % 32) + 10 + m.add_bytes(os.urandom(byte_count)) + self._send_user_message(m) + + def renegotiate_keys(self): + """ + Force this session to switch to new keys. Normally this is done + automatically after the session hits a certain number of packets or + bytes sent or received, but this method gives you the option of forcing + new keys whenever you want. Negotiating new keys causes a pause in + traffic both ways as the two sides swap keys and do computations. This + method returns when the session has switched to new keys. + + :raises: + `.SSHException` -- if the key renegotiation failed (which causes + the session to end) + """ + self.completion_event = threading.Event() + self._send_kex_init() + while True: + self.completion_event.wait(0.1) + if not self.active: + e = self.get_exception() + if e is not None: + raise e + raise SSHException("Negotiation failed.") + if self.completion_event.is_set(): + break + return + + def set_keepalive(self, interval): + """ + Turn on/off keepalive packets (default is off). If this is set, after + ``interval`` seconds without sending any data over the connection, a + "keepalive" packet will be sent (and ignored by the remote host). This + can be useful to keep connections alive over a NAT, for example. + + :param int interval: + seconds to wait before sending a keepalive packet (or + 0 to disable keepalives). + """ + + def _request(x=weakref.proxy(self)): + return x.global_request("keepalive@lag.net", wait=False) + + self.packetizer.set_keepalive(interval, _request) + + def global_request(self, kind, data=None, wait=True): + """ + Make a global request to the remote host. These are normally + extensions to the SSH2 protocol. + + :param str kind: name of the request. + :param tuple data: + an optional tuple containing additional data to attach to the + request. + :param bool wait: + ``True`` if this method should not return until a response is + received; ``False`` otherwise. + :return: + a `.Message` containing possible additional data if the request was + successful (or an empty `.Message` if ``wait`` was ``False``); + ``None`` if the request was denied. + """ + if wait: + self.completion_event = threading.Event() + m = Message() + m.add_byte(cMSG_GLOBAL_REQUEST) + m.add_string(kind) + m.add_boolean(wait) + if data is not None: + m.add(*data) + self._log(DEBUG, 'Sending global request "{}"'.format(kind)) + self._send_user_message(m) + if not wait: + return None + while True: + self.completion_event.wait(0.1) + if not self.active: + return None + if self.completion_event.is_set(): + break + return self.global_response + + def accept(self, timeout=None): + """ + Return the next channel opened by the client over this transport, in + server mode. If no channel is opened before the given timeout, + ``None`` is returned. + + :param int timeout: + seconds to wait for a channel, or ``None`` to wait forever + :return: a new `.Channel` opened by the client + """ + self.lock.acquire() + try: + if len(self.server_accepts) > 0: + chan = self.server_accepts.pop(0) + else: + self.server_accept_cv.wait(timeout) + if len(self.server_accepts) > 0: + chan = self.server_accepts.pop(0) + else: + # timeout + chan = None + finally: + self.lock.release() + return chan + + def connect( + self, + hostkey=None, + username="", + password=None, + pkey=None, + gss_host=None, + gss_auth=False, + gss_kex=False, + gss_deleg_creds=True, + gss_trust_dns=True, + ): + """ + Negotiate an SSH2 session, and optionally verify the server's host key + and authenticate using a password or private key. This is a shortcut + for `start_client`, `get_remote_server_key`, and + `Transport.auth_password` or `Transport.auth_publickey`. Use those + methods if you want more control. + + You can use this method immediately after creating a Transport to + negotiate encryption with a server. If it fails, an exception will be + thrown. On success, the method will return cleanly, and an encrypted + session exists. You may immediately call `open_channel` or + `open_session` to get a `.Channel` object, which is used for data + transfer. + + .. note:: + If you fail to supply a password or private key, this method may + succeed, but a subsequent `open_channel` or `open_session` call may + fail because you haven't authenticated yet. + + :param .PKey hostkey: + the host key expected from the server, or ``None`` if you don't + want to do host key verification. + :param str username: the username to authenticate as. + :param str password: + a password to use for authentication, if you want to use password + authentication; otherwise ``None``. + :param .PKey pkey: + a private key to use for authentication, if you want to use private + key authentication; otherwise ``None``. + :param str gss_host: + The target's name in the kerberos database. Default: hostname + :param bool gss_auth: + ``True`` if you want to use GSS-API authentication. + :param bool gss_kex: + Perform GSS-API Key Exchange and user authentication. + :param bool gss_deleg_creds: + Whether to delegate GSS-API client credentials. + :param gss_trust_dns: + Indicates whether or not the DNS is trusted to securely + canonicalize the name of the host being connected to (default + ``True``). + + :raises: `.SSHException` -- if the SSH2 negotiation fails, the host key + supplied by the server is incorrect, or authentication fails. + + .. versionchanged:: 2.3 + Added the ``gss_trust_dns`` argument. + """ + if hostkey is not None: + # TODO: a more robust implementation would be to ask each key class + # for its nameS plural, and just use that. + # TODO: that could be used in a bunch of other spots too + if isinstance(hostkey, RSAKey): + self._preferred_keys = [ + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + ] + else: + self._preferred_keys = [hostkey.get_name()] + + self.set_gss_host( + gss_host=gss_host, + trust_dns=gss_trust_dns, + gssapi_requested=gss_kex or gss_auth, + ) + + self.start_client() + + # check host key if we were given one + # If GSS-API Key Exchange was performed, we are not required to check + # the host key. + if (hostkey is not None) and not gss_kex: + key = self.get_remote_server_key() + if ( + key.get_name() != hostkey.get_name() + or key.asbytes() != hostkey.asbytes() + ): + self._log(DEBUG, "Bad host key from server") + self._log( + DEBUG, + "Expected: {}: {}".format( + hostkey.get_name(), repr(hostkey.asbytes()) + ), + ) + self._log( + DEBUG, + "Got : {}: {}".format( + key.get_name(), repr(key.asbytes()) + ), + ) + raise SSHException("Bad host key from server") + self._log( + DEBUG, "Host key verified ({})".format(hostkey.get_name()) + ) + + if (pkey is not None) or (password is not None) or gss_auth or gss_kex: + if gss_auth: + self._log( + DEBUG, "Attempting GSS-API auth... (gssapi-with-mic)" + ) # noqa + self.auth_gssapi_with_mic( + username, self.gss_host, gss_deleg_creds + ) + elif gss_kex: + self._log(DEBUG, "Attempting GSS-API auth... (gssapi-keyex)") + self.auth_gssapi_keyex(username) + elif pkey is not None: + self._log(DEBUG, "Attempting public-key auth...") + self.auth_publickey(username, pkey) + else: + self._log(DEBUG, "Attempting password auth...") + self.auth_password(username, password) + + return + + def get_exception(self): + """ + Return any exception that happened during the last server request. + This can be used to fetch more specific error information after using + calls like `start_client`. The exception (if any) is cleared after + this call. + + :return: + an exception, or ``None`` if there is no stored exception. + + .. versionadded:: 1.1 + """ + self.lock.acquire() + try: + e = self.saved_exception + self.saved_exception = None + return e + finally: + self.lock.release() + + def set_subsystem_handler(self, name, handler, *args, **kwargs): + """ + Set the handler class for a subsystem in server mode. If a request + for this subsystem is made on an open ssh channel later, this handler + will be constructed and called -- see `.SubsystemHandler` for more + detailed documentation. + + Any extra parameters (including keyword arguments) are saved and + passed to the `.SubsystemHandler` constructor later. + + :param str name: name of the subsystem. + :param handler: + subclass of `.SubsystemHandler` that handles this subsystem. + """ + try: + self.lock.acquire() + self.subsystem_table[name] = (handler, args, kwargs) + finally: + self.lock.release() + + def is_authenticated(self): + """ + Return true if this session is active and authenticated. + + :return: + True if the session is still open and has been authenticated + successfully; False if authentication failed and/or the session is + closed. + """ + return ( + self.active + and self.auth_handler is not None + and self.auth_handler.is_authenticated() + ) + + def get_username(self): + """ + Return the username this connection is authenticated for. If the + session is not authenticated (or authentication failed), this method + returns ``None``. + + :return: username that was authenticated (a `str`), or ``None``. + """ + if not self.active or (self.auth_handler is None): + return None + return self.auth_handler.get_username() + + def get_banner(self): + """ + Return the banner supplied by the server upon connect. If no banner is + supplied, this method returns ``None``. + + :returns: server supplied banner (`str`), or ``None``. + + .. versionadded:: 1.13 + """ + if not self.active or (self.auth_handler is None): + return None + return self.auth_handler.banner + + def auth_none(self, username): + """ + Try to authenticate to the server using no authentication at all. + This will almost always fail. It may be useful for determining the + list of authentication types supported by the server, by catching the + `.BadAuthenticationType` exception raised. + + :param str username: the username to authenticate as + :return: + list of auth types permissible for the next stage of + authentication (normally empty) + + :raises: + `.BadAuthenticationType` -- if "none" authentication isn't allowed + by the server for this user + :raises: + `.SSHException` -- if the authentication failed due to a network + error + + .. versionadded:: 1.5 + """ + if (not self.active) or (not self.initial_kex_done): + raise SSHException("No existing session") + my_event = threading.Event() + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_none(username, my_event) + return self.auth_handler.wait_for_response(my_event) + + def auth_password(self, username, password, event=None, fallback=True): + """ + Authenticate to the server using a password. The username and password + are sent over an encrypted link. + + If an ``event`` is passed in, this method will return immediately, and + the event will be triggered once authentication succeeds or fails. On + success, `is_authenticated` will return ``True``. On failure, you may + use `get_exception` to get more detailed error information. + + Since 1.1, if no event is passed, this method will block until the + authentication succeeds or fails. On failure, an exception is raised. + Otherwise, the method simply returns. + + Since 1.5, if no event is passed and ``fallback`` is ``True`` (the + default), if the server doesn't support plain password authentication + but does support so-called "keyboard-interactive" mode, an attempt + will be made to authenticate using this interactive mode. If it fails, + the normal exception will be thrown as if the attempt had never been + made. This is useful for some recent Gentoo and Debian distributions, + which turn off plain password authentication in a misguided belief + that interactive authentication is "more secure". (It's not.) + + If the server requires multi-step authentication (which is very rare), + this method will return a list of auth types permissible for the next + step. Otherwise, in the normal case, an empty list is returned. + + :param str username: the username to authenticate as + :param basestring password: the password to authenticate with + :param .threading.Event event: + an event to trigger when the authentication attempt is complete + (whether it was successful or not) + :param bool fallback: + ``True`` if an attempt at an automated "interactive" password auth + should be made if the server doesn't support normal password auth + :return: + list of auth types permissible for the next stage of + authentication (normally empty) + + :raises: + `.BadAuthenticationType` -- if password authentication isn't + allowed by the server for this user (and no event was passed in) + :raises: + `.AuthenticationException` -- if the authentication failed (and no + event was passed in) + :raises: `.SSHException` -- if there was a network error + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to send the password unless we're on a secure + # link + raise SSHException("No existing session") + if event is None: + my_event = threading.Event() + else: + my_event = event + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_password(username, password, my_event) + if event is not None: + # caller wants to wait for event themselves + return [] + try: + return self.auth_handler.wait_for_response(my_event) + except BadAuthenticationType as e: + # if password auth isn't allowed, but keyboard-interactive *is*, + # try to fudge it + if not fallback or ("keyboard-interactive" not in e.allowed_types): + raise + try: + + def handler(title, instructions, fields): + if len(fields) > 1: + raise SSHException("Fallback authentication failed.") + if len(fields) == 0: + # for some reason, at least on os x, a 2nd request will + # be made with zero fields requested. maybe it's just + # to try to fake out automated scripting of the exact + # type we're doing here. *shrug* :) + return [] + return [password] + + return self.auth_interactive(username, handler) + except SSHException: + # attempt failed; just raise the original exception + raise e + + def auth_publickey(self, username, key, event=None): + """ + Authenticate to the server using a private key. The key is used to + sign data from the server, so it must include the private part. + + If an ``event`` is passed in, this method will return immediately, and + the event will be triggered once authentication succeeds or fails. On + success, `is_authenticated` will return ``True``. On failure, you may + use `get_exception` to get more detailed error information. + + Since 1.1, if no event is passed, this method will block until the + authentication succeeds or fails. On failure, an exception is raised. + Otherwise, the method simply returns. + + If the server requires multi-step authentication (which is very rare), + this method will return a list of auth types permissible for the next + step. Otherwise, in the normal case, an empty list is returned. + + :param str username: the username to authenticate as + :param .PKey key: the private key to authenticate with + :param .threading.Event event: + an event to trigger when the authentication attempt is complete + (whether it was successful or not) + :return: + list of auth types permissible for the next stage of + authentication (normally empty) + + :raises: + `.BadAuthenticationType` -- if public-key authentication isn't + allowed by the server for this user (and no event was passed in) + :raises: + `.AuthenticationException` -- if the authentication failed (and no + event was passed in) + :raises: `.SSHException` -- if there was a network error + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to authenticate unless we're on a secure link + raise SSHException("No existing session") + if event is None: + my_event = threading.Event() + else: + my_event = event + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_publickey(username, key, my_event) + if event is not None: + # caller wants to wait for event themselves + return [] + return self.auth_handler.wait_for_response(my_event) + + def auth_interactive(self, username, handler, submethods=""): + """ + Authenticate to the server interactively. A handler is used to answer + arbitrary questions from the server. On many servers, this is just a + dumb wrapper around PAM. + + This method will block until the authentication succeeds or fails, + periodically calling the handler asynchronously to get answers to + authentication questions. The handler may be called more than once + if the server continues to ask questions. + + The handler is expected to be a callable that will handle calls of the + form: ``handler(title, instructions, prompt_list)``. The ``title`` is + meant to be a dialog-window title, and the ``instructions`` are user + instructions (both are strings). ``prompt_list`` will be a list of + prompts, each prompt being a tuple of ``(str, bool)``. The string is + the prompt and the boolean indicates whether the user text should be + echoed. + + A sample call would thus be: + ``handler('title', 'instructions', [('Password:', False)])``. + + The handler should return a list or tuple of answers to the server's + questions. + + If the server requires multi-step authentication (which is very rare), + this method will return a list of auth types permissible for the next + step. Otherwise, in the normal case, an empty list is returned. + + :param str username: the username to authenticate as + :param callable handler: a handler for responding to server questions + :param str submethods: a string list of desired submethods (optional) + :return: + list of auth types permissible for the next stage of + authentication (normally empty). + + :raises: `.BadAuthenticationType` -- if public-key authentication isn't + allowed by the server for this user + :raises: `.AuthenticationException` -- if the authentication failed + :raises: `.SSHException` -- if there was a network error + + .. versionadded:: 1.5 + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to authenticate unless we're on a secure link + raise SSHException("No existing session") + my_event = threading.Event() + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_interactive( + username, handler, my_event, submethods + ) + return self.auth_handler.wait_for_response(my_event) + + def auth_interactive_dumb(self, username, handler=None, submethods=""): + """ + Authenticate to the server interactively but dumber. + Just print the prompt and / or instructions to stdout and send back + the response. This is good for situations where partial auth is + achieved by key and then the user has to enter a 2fac token. + """ + + if not handler: + + def handler(title, instructions, prompt_list): + answers = [] + if title: + print(title.strip()) + if instructions: + print(instructions.strip()) + for prompt, show_input in prompt_list: + print(prompt.strip(), end=" ") + answers.append(input()) + return answers + + return self.auth_interactive(username, handler, submethods) + + def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds): + """ + Authenticate to the Server using GSS-API / SSPI. + + :param str username: The username to authenticate as + :param str gss_host: The target host + :param bool gss_deleg_creds: Delegate credentials or not + :return: list of auth types permissible for the next stage of + authentication (normally empty) + :raises: `.BadAuthenticationType` -- if gssapi-with-mic isn't + allowed by the server (and no event was passed in) + :raises: + `.AuthenticationException` -- if the authentication failed (and no + event was passed in) + :raises: `.SSHException` -- if there was a network error + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to authenticate unless we're on a secure link + raise SSHException("No existing session") + my_event = threading.Event() + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_gssapi_with_mic( + username, gss_host, gss_deleg_creds, my_event + ) + return self.auth_handler.wait_for_response(my_event) + + def auth_gssapi_keyex(self, username): + """ + Authenticate to the server with GSS-API/SSPI if GSS-API kex is in use. + + :param str username: The username to authenticate as. + :returns: + a list of auth types permissible for the next stage of + authentication (normally empty) + :raises: `.BadAuthenticationType` -- + if GSS-API Key Exchange was not performed (and no event was passed + in) + :raises: `.AuthenticationException` -- + if the authentication failed (and no event was passed in) + :raises: `.SSHException` -- if there was a network error + """ + if (not self.active) or (not self.initial_kex_done): + # we should never try to authenticate unless we're on a secure link + raise SSHException("No existing session") + my_event = threading.Event() + self.auth_handler = AuthHandler(self) + self.auth_handler.auth_gssapi_keyex(username, my_event) + return self.auth_handler.wait_for_response(my_event) + + def set_log_channel(self, name): + """ + Set the channel for this transport's logging. The default is + ``"paramiko.transport"`` but it can be set to anything you want. (See + the `.logging` module for more info.) SSH Channels will log to a + sub-channel of the one specified. + + :param str name: new channel name for logging + + .. versionadded:: 1.1 + """ + self.log_name = name + self.logger = util.get_logger(name) + self.packetizer.set_log(self.logger) + + def get_log_channel(self): + """ + Return the channel name used for this transport's logging. + + :return: channel name as a `str` + + .. versionadded:: 1.2 + """ + return self.log_name + + def set_hexdump(self, hexdump): + """ + Turn on/off logging a hex dump of protocol traffic at DEBUG level in + the logs. Normally you would want this off (which is the default), + but if you are debugging something, it may be useful. + + :param bool hexdump: + ``True`` to log protocol traffix (in hex) to the log; ``False`` + otherwise. + """ + self.packetizer.set_hexdump(hexdump) + + def get_hexdump(self): + """ + Return ``True`` if the transport is currently logging hex dumps of + protocol traffic. + + :return: ``True`` if hex dumps are being logged, else ``False``. + + .. versionadded:: 1.4 + """ + return self.packetizer.get_hexdump() + + def use_compression(self, compress=True): + """ + Turn on/off compression. This will only have an affect before starting + the transport (ie before calling `connect`, etc). By default, + compression is off since it negatively affects interactive sessions. + + :param bool compress: + ``True`` to ask the remote client/server to compress traffic; + ``False`` to refuse compression + + .. versionadded:: 1.5.2 + """ + if compress: + self._preferred_compression = ("zlib@openssh.com", "zlib", "none") + else: + self._preferred_compression = ("none",) + + def getpeername(self): + """ + Return the address of the remote side of this Transport, if possible. + + This is effectively a wrapper around ``getpeername`` on the underlying + socket. If the socket-like object has no ``getpeername`` method, then + ``("unknown", 0)`` is returned. + + :return: + the address of the remote host, if known, as a ``(str, int)`` + tuple. + """ + gp = getattr(self.sock, "getpeername", None) + if gp is None: + return "unknown", 0 + return gp() + + def stop_thread(self): + self.active = False + self.packetizer.close() + # Keep trying to join() our main thread, quickly, until: + # * We join()ed successfully (self.is_alive() == False) + # * Or it looks like we've hit issue #520 (socket.recv hitting some + # race condition preventing it from timing out correctly), wherein + # our socket and packetizer are both closed (but where we'd + # otherwise be sitting forever on that recv()). + while ( + self.is_alive() + and self is not threading.current_thread() + and not self.sock._closed + and not self.packetizer.closed + ): + self.join(0.1) + + # internals... + + # TODO 4.0: make a public alias for this because multiple other classes + # already explicitly rely on it...or just rewrite logging :D + def _log(self, level, msg, *args): + if issubclass(type(msg), list): + for m in msg: + self.logger.log(level, m) + else: + self.logger.log(level, msg, *args) + + def _get_modulus_pack(self): + """used by KexGex to find primes for group exchange""" + return self._modulus_pack + + def _next_channel(self): + """you are holding the lock""" + chanid = self._channel_counter + while self._channels.get(chanid) is not None: + self._channel_counter = (self._channel_counter + 1) & 0xFFFFFF + chanid = self._channel_counter + self._channel_counter = (self._channel_counter + 1) & 0xFFFFFF + return chanid + + def _unlink_channel(self, chanid): + """used by a Channel to remove itself from the active channel list""" + self._channels.delete(chanid) + + def _send_message(self, data): + self.packetizer.send_message(data) + + def _send_user_message(self, data): + """ + send a message, but block if we're in key negotiation. this is used + for user-initiated requests. + """ + start = time.time() + while True: + self.clear_to_send.wait(0.1) + if not self.active: + self._log( + DEBUG, "Dropping user packet because connection is dead." + ) # noqa + return + self.clear_to_send_lock.acquire() + if self.clear_to_send.is_set(): + break + self.clear_to_send_lock.release() + if time.time() > start + self.clear_to_send_timeout: + raise SSHException( + "Key-exchange timed out waiting for key negotiation" + ) # noqa + try: + self._send_message(data) + finally: + self.clear_to_send_lock.release() + + def _set_K_H(self, k, h): + """ + Used by a kex obj to set the K (root key) and H (exchange hash). + """ + self.K = k + self.H = h + if self.session_id is None: + self.session_id = h + + def _expect_packet(self, *ptypes): + """ + Used by a kex obj to register the next packet type it expects to see. + """ + self._expected_packet = tuple(ptypes) + + def _verify_key(self, host_key, sig): + key = self._key_info[self.host_key_type](Message(host_key)) + if key is None: + raise SSHException("Unknown host key type") + if not key.verify_ssh_sig(self.H, Message(sig)): + raise SSHException( + "Signature verification ({}) failed.".format( + self.host_key_type + ) + ) # noqa + self.host_key = key + + def _compute_key(self, id, nbytes): + """id is 'A' - 'F' for the various keys used by ssh""" + m = Message() + m.add_mpint(self.K) + m.add_bytes(self.H) + m.add_byte(b(id)) + m.add_bytes(self.session_id) + # Fallback to SHA1 for kex engines that fail to specify a hex + # algorithm, or for e.g. transport tests that don't run kexinit. + hash_algo = getattr(self.kex_engine, "hash_algo", None) + hash_select_msg = "kex engine {} specified hash_algo {!r}".format( + self.kex_engine.__class__.__name__, hash_algo + ) + if hash_algo is None: + hash_algo = sha1 + hash_select_msg += ", falling back to sha1" + if not hasattr(self, "_logged_hash_selection"): + self._log(DEBUG, hash_select_msg) + setattr(self, "_logged_hash_selection", True) + out = sofar = hash_algo(m.asbytes()).digest() + while len(out) < nbytes: + m = Message() + m.add_mpint(self.K) + m.add_bytes(self.H) + m.add_bytes(sofar) + digest = hash_algo(m.asbytes()).digest() + out += digest + sofar += digest + return out[:nbytes] + + def _get_cipher(self, name, key, iv, operation): + if name not in self._cipher_info: + raise SSHException("Unknown client cipher " + name) + else: + cipher = Cipher( + self._cipher_info[name]["class"](key), + self._cipher_info[name]["mode"](iv), + backend=default_backend(), + ) + if operation is self._ENCRYPT: + return cipher.encryptor() + else: + return cipher.decryptor() + + def _set_forward_agent_handler(self, handler): + if handler is None: + + def default_handler(channel): + self._queue_incoming_channel(channel) + + self._forward_agent_handler = default_handler + else: + self._forward_agent_handler = handler + + def _set_x11_handler(self, handler): + # only called if a channel has turned on x11 forwarding + if handler is None: + # by default, use the same mechanism as accept() + def default_handler(channel, src_addr_port): + self._queue_incoming_channel(channel) + + self._x11_handler = default_handler + else: + self._x11_handler = handler + + def _queue_incoming_channel(self, channel): + self.lock.acquire() + try: + self.server_accepts.append(channel) + self.server_accept_cv.notify() + finally: + self.lock.release() + + def _sanitize_window_size(self, window_size): + if window_size is None: + window_size = self.default_window_size + return clamp_value(MIN_WINDOW_SIZE, window_size, MAX_WINDOW_SIZE) + + def _sanitize_packet_size(self, max_packet_size): + if max_packet_size is None: + max_packet_size = self.default_max_packet_size + return clamp_value(MIN_PACKET_SIZE, max_packet_size, MAX_WINDOW_SIZE) + + def _ensure_authed(self, ptype, message): + """ + Checks message type against current auth state. + + If server mode, and auth has not succeeded, and the message is of a + post-auth type (channel open or global request) an appropriate error + response Message is crafted and returned to caller for sending. + + Otherwise (client mode, authed, or pre-auth message) returns None. + """ + if ( + not self.server_mode + or ptype <= HIGHEST_USERAUTH_MESSAGE_ID + or self.is_authenticated() + ): + return None + # WELP. We must be dealing with someone trying to do non-auth things + # without being authed. Tell them off, based on message class. + reply = Message() + # Global requests have no details, just failure. + if ptype == MSG_GLOBAL_REQUEST: + reply.add_byte(cMSG_REQUEST_FAILURE) + # Channel opens let us reject w/ a specific type + message. + elif ptype == MSG_CHANNEL_OPEN: + kind = message.get_text() # noqa + chanid = message.get_int() + reply.add_byte(cMSG_CHANNEL_OPEN_FAILURE) + reply.add_int(chanid) + reply.add_int(OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED) + reply.add_string("") + reply.add_string("en") + # NOTE: Post-open channel messages do not need checking; the above will + # reject attempts to open channels, meaning that even if a malicious + # user tries to send a MSG_CHANNEL_REQUEST, it will simply fall under + # the logic that handles unknown channel IDs (as the channel list will + # be empty.) + return reply + + def _enforce_strict_kex(self, ptype): + """ + Conditionally raise `MessageOrderError` during strict initial kex. + + This method should only be called inside code that handles non-KEXINIT + messages; it does not interrogate ``ptype`` besides using it to log + more accurately. + """ + if self.agreed_on_strict_kex and not self.initial_kex_done: + name = MSG_NAMES.get(ptype, f"msg {ptype}") + raise MessageOrderError( + f"In strict-kex mode, but was sent {name!r}!" + ) + + def run(self): + # (use the exposed "run" method, because if we specify a thread target + # of a private method, threading.Thread will keep a reference to it + # indefinitely, creating a GC cycle and not letting Transport ever be + # GC'd. it's a bug in Thread.) + + # Hold reference to 'sys' so we can test sys.modules to detect + # interpreter shutdown. + self.sys = sys + + # active=True occurs before the thread is launched, to avoid a race + _active_threads.append(self) + tid = hex(id(self) & xffffffff) + if self.server_mode: + self._log(DEBUG, "starting thread (server mode): {}".format(tid)) + else: + self._log(DEBUG, "starting thread (client mode): {}".format(tid)) + try: + try: + self.packetizer.write_all(b(self.local_version + "\r\n")) + self._log( + DEBUG, + "Local version/idstring: {}".format(self.local_version), + ) # noqa + self._check_banner() + # The above is actually very much part of the handshake, but + # sometimes the banner can be read but the machine is not + # responding, for example when the remote ssh daemon is loaded + # in to memory but we can not read from the disk/spawn a new + # shell. + # Make sure we can specify a timeout for the initial handshake. + # Re-use the banner timeout for now. + self.packetizer.start_handshake(self.handshake_timeout) + self._send_kex_init() + self._expect_packet(MSG_KEXINIT) + + while self.active: + if self.packetizer.need_rekey() and not self.in_kex: + self._send_kex_init() + try: + ptype, m = self.packetizer.read_message() + except NeedRekeyException: + continue + if ptype == MSG_IGNORE: + self._enforce_strict_kex(ptype) + continue + elif ptype == MSG_DISCONNECT: + self._parse_disconnect(m) + break + elif ptype == MSG_DEBUG: + self._enforce_strict_kex(ptype) + self._parse_debug(m) + continue + if len(self._expected_packet) > 0: + if ptype not in self._expected_packet: + exc_class = SSHException + if self.agreed_on_strict_kex: + exc_class = MessageOrderError + raise exc_class( + "Expecting packet from {!r}, got {:d}".format( + self._expected_packet, ptype + ) + ) # noqa + self._expected_packet = tuple() + # These message IDs indicate key exchange & will differ + # depending on exact exchange algorithm + if (ptype >= 30) and (ptype <= 41): + self.kex_engine.parse_next(ptype, m) + continue + + if ptype in self._handler_table: + error_msg = self._ensure_authed(ptype, m) + if error_msg: + self._send_message(error_msg) + else: + self._handler_table[ptype](m) + elif ptype in self._channel_handler_table: + chanid = m.get_int() + chan = self._channels.get(chanid) + if chan is not None: + self._channel_handler_table[ptype](chan, m) + elif chanid in self.channels_seen: + self._log( + DEBUG, + "Ignoring message for dead channel {:d}".format( # noqa + chanid + ), + ) + else: + self._log( + ERROR, + "Channel request for unknown channel {:d}".format( # noqa + chanid + ), + ) + break + elif ( + self.auth_handler is not None + and ptype in self.auth_handler._handler_table + ): + handler = self.auth_handler._handler_table[ptype] + handler(m) + if len(self._expected_packet) > 0: + continue + else: + # Respond with "I don't implement this particular + # message type" message (unless the message type was + # itself literally MSG_UNIMPLEMENTED, in which case, we + # just shut up to avoid causing a useless loop). + name = MSG_NAMES[ptype] + warning = "Oops, unhandled type {} ({!r})".format( + ptype, name + ) + self._log(WARNING, warning) + if ptype != MSG_UNIMPLEMENTED: + msg = Message() + msg.add_byte(cMSG_UNIMPLEMENTED) + msg.add_int(m.seqno) + self._send_message(msg) + self.packetizer.complete_handshake() + except SSHException as e: + self._log( + ERROR, + "Exception ({}): {}".format( + "server" if self.server_mode else "client", e + ), + ) + self._log(ERROR, util.tb_strings()) + self.saved_exception = e + except EOFError as e: + self._log(DEBUG, "EOF in transport thread") + self.saved_exception = e + except socket.error as e: + if type(e.args) is tuple: + if e.args: + emsg = "{} ({:d})".format(e.args[1], e.args[0]) + else: # empty tuple, e.g. socket.timeout + emsg = str(e) or repr(e) + else: + emsg = e.args + self._log(ERROR, "Socket exception: " + emsg) + self.saved_exception = e + except Exception as e: + self._log(ERROR, "Unknown exception: " + str(e)) + self._log(ERROR, util.tb_strings()) + self.saved_exception = e + _active_threads.remove(self) + for chan in list(self._channels.values()): + chan._unlink() + if self.active: + self.active = False + self.packetizer.close() + if self.completion_event is not None: + self.completion_event.set() + if self.auth_handler is not None: + self.auth_handler.abort() + for event in self.channel_events.values(): + event.set() + try: + self.lock.acquire() + self.server_accept_cv.notify() + finally: + self.lock.release() + self.sock.close() + except: + # Don't raise spurious 'NoneType has no attribute X' errors when we + # wake up during interpreter shutdown. Or rather -- raise + # everything *if* sys.modules (used as a convenient sentinel) + # appears to still exist. + if self.sys.modules is not None: + raise + + def _log_agreement(self, which, local, remote): + # Log useful, non-duplicative line re: an agreed-upon algorithm. + # Old code implied algorithms could be asymmetrical (different for + # inbound vs outbound) so we preserve that possibility. + msg = "{}: ".format(which) + if local == remote: + msg += local + else: + msg += "local={}, remote={}".format(local, remote) + self._log(DEBUG, msg) + + # protocol stages + + def _negotiate_keys(self, m): + # throws SSHException on anything unusual + self.clear_to_send_lock.acquire() + try: + self.clear_to_send.clear() + finally: + self.clear_to_send_lock.release() + if self.local_kex_init is None: + # remote side wants to renegotiate + self._send_kex_init() + self._parse_kex_init(m) + self.kex_engine.start_kex() + + def _check_banner(self): + # this is slow, but we only have to do it once + for i in range(100): + # give them 15 seconds for the first line, then just 2 seconds + # each additional line. (some sites have very high latency.) + if i == 0: + timeout = self.banner_timeout + else: + timeout = 2 + try: + buf = self.packetizer.readline(timeout) + except ProxyCommandFailure: + raise + except Exception as e: + raise SSHException( + "Error reading SSH protocol banner" + str(e) + ) + if buf[:4] == "SSH-": + break + self._log(DEBUG, "Banner: " + buf) + if buf[:4] != "SSH-": + raise SSHException('Indecipherable protocol version "' + buf + '"') + # save this server version string for later + self.remote_version = buf + self._log(DEBUG, "Remote version/idstring: {}".format(buf)) + # pull off any attached comment + # NOTE: comment used to be stored in a variable and then...never used. + # since 2003. ca 877cd974b8182d26fa76d566072917ea67b64e67 + i = buf.find(" ") + if i >= 0: + buf = buf[:i] + # parse out version string and make sure it matches + segs = buf.split("-", 2) + if len(segs) < 3: + raise SSHException("Invalid SSH banner") + version = segs[1] + client = segs[2] + if version != "1.99" and version != "2.0": + msg = "Incompatible version ({} instead of 2.0)" + raise IncompatiblePeer(msg.format(version)) + msg = "Connected (version {}, client {})".format(version, client) + self._log(INFO, msg) + + def _send_kex_init(self): + """ + announce to the other side that we'd like to negotiate keys, and what + kind of key negotiation we support. + """ + self.clear_to_send_lock.acquire() + try: + self.clear_to_send.clear() + finally: + self.clear_to_send_lock.release() + self.gss_kex_used = False + self.in_kex = True + kex_algos = list(self.preferred_kex) + if self.server_mode: + mp_required_prefix = "diffie-hellman-group-exchange-sha" + kex_mp = [k for k in kex_algos if k.startswith(mp_required_prefix)] + if (self._modulus_pack is None) and (len(kex_mp) > 0): + # can't do group-exchange if we don't have a pack of potential + # primes + pkex = [ + k + for k in self.get_security_options().kex + if not k.startswith(mp_required_prefix) + ] + self.get_security_options().kex = pkex + available_server_keys = list( + filter( + list(self.server_key_dict.keys()).__contains__, + # TODO: ensure tests will catch if somebody streamlines + # this by mistake - case is the admittedly silly one where + # the only calls to add_server_key() contain keys which + # were filtered out of the below via disabled_algorithms. + # If this is streamlined, we would then be allowing the + # disabled algorithm(s) for hostkey use + # TODO: honestly this prob just wants to get thrown out + # when we make kex configuration more straightforward + self.preferred_keys, + ) + ) + else: + available_server_keys = self.preferred_keys + # Signal support for MSG_EXT_INFO so server will send it to us. + # NOTE: doing this here handily means we don't even consider this + # value when agreeing on real kex algo to use (which is a common + # pitfall when adding this apparently). + kex_algos.append("ext-info-c") + + # Similar to ext-info, but used in both server modes, so done outside + # of above if/else. + if self.advertise_strict_kex: + which = "s" if self.server_mode else "c" + kex_algos.append(f"kex-strict-{which}-v00@openssh.com") + + m = Message() + m.add_byte(cMSG_KEXINIT) + m.add_bytes(os.urandom(16)) + m.add_list(kex_algos) + m.add_list(available_server_keys) + m.add_list(self.preferred_ciphers) + m.add_list(self.preferred_ciphers) + m.add_list(self.preferred_macs) + m.add_list(self.preferred_macs) + m.add_list(self.preferred_compression) + m.add_list(self.preferred_compression) + m.add_string(bytes()) + m.add_string(bytes()) + m.add_boolean(False) + m.add_int(0) + # save a copy for later (needed to compute a hash) + self.local_kex_init = self._latest_kex_init = m.asbytes() + self._send_message(m) + + def _really_parse_kex_init(self, m, ignore_first_byte=False): + parsed = {} + if ignore_first_byte: + m.get_byte() + m.get_bytes(16) # cookie, discarded + parsed["kex_algo_list"] = m.get_list() + parsed["server_key_algo_list"] = m.get_list() + parsed["client_encrypt_algo_list"] = m.get_list() + parsed["server_encrypt_algo_list"] = m.get_list() + parsed["client_mac_algo_list"] = m.get_list() + parsed["server_mac_algo_list"] = m.get_list() + parsed["client_compress_algo_list"] = m.get_list() + parsed["server_compress_algo_list"] = m.get_list() + parsed["client_lang_list"] = m.get_list() + parsed["server_lang_list"] = m.get_list() + parsed["kex_follows"] = m.get_boolean() + m.get_int() # unused + return parsed + + def _get_latest_kex_init(self): + return self._really_parse_kex_init( + Message(self._latest_kex_init), + ignore_first_byte=True, + ) + + def _parse_kex_init(self, m): + parsed = self._really_parse_kex_init(m) + kex_algo_list = parsed["kex_algo_list"] + server_key_algo_list = parsed["server_key_algo_list"] + client_encrypt_algo_list = parsed["client_encrypt_algo_list"] + server_encrypt_algo_list = parsed["server_encrypt_algo_list"] + client_mac_algo_list = parsed["client_mac_algo_list"] + server_mac_algo_list = parsed["server_mac_algo_list"] + client_compress_algo_list = parsed["client_compress_algo_list"] + server_compress_algo_list = parsed["server_compress_algo_list"] + client_lang_list = parsed["client_lang_list"] + server_lang_list = parsed["server_lang_list"] + kex_follows = parsed["kex_follows"] + + self._log(DEBUG, "=== Key exchange possibilities ===") + for prefix, value in ( + ("kex algos", kex_algo_list), + ("server key", server_key_algo_list), + # TODO: shouldn't these two lines say "cipher" to match usual + # terminology (including elsewhere in paramiko!)? + ("client encrypt", client_encrypt_algo_list), + ("server encrypt", server_encrypt_algo_list), + ("client mac", client_mac_algo_list), + ("server mac", server_mac_algo_list), + ("client compress", client_compress_algo_list), + ("server compress", server_compress_algo_list), + ("client lang", client_lang_list), + ("server lang", server_lang_list), + ): + if value == [""]: + value = [""] + value = ", ".join(value) + self._log(DEBUG, "{}: {}".format(prefix, value)) + self._log(DEBUG, "kex follows: {}".format(kex_follows)) + self._log(DEBUG, "=== Key exchange agreements ===") + + # Record, and strip out, ext-info and/or strict-kex non-algorithms + self._remote_ext_info = None + self._remote_strict_kex = None + to_pop = [] + for i, algo in enumerate(kex_algo_list): + if algo.startswith("ext-info-"): + self._remote_ext_info = algo + to_pop.insert(0, i) + elif algo.startswith("kex-strict-"): + # NOTE: this is what we are expecting from the /remote/ end. + which = "c" if self.server_mode else "s" + expected = f"kex-strict-{which}-v00@openssh.com" + # Set strict mode if agreed. + self.agreed_on_strict_kex = ( + algo == expected and self.advertise_strict_kex + ) + self._log( + DEBUG, f"Strict kex mode: {self.agreed_on_strict_kex}" + ) + to_pop.insert(0, i) + for i in to_pop: + kex_algo_list.pop(i) + + # CVE mitigation: expect zeroed-out seqno anytime we are performing kex + # init phase, if strict mode was negotiated. + if ( + self.agreed_on_strict_kex + and not self.initial_kex_done + and m.seqno != 0 + ): + raise MessageOrderError( + "In strict-kex mode, but KEXINIT was not the first packet!" + ) + + # as a server, we pick the first item in the client's list that we + # support. + # as a client, we pick the first item in our list that the server + # supports. + if self.server_mode: + agreed_kex = list( + filter(self.preferred_kex.__contains__, kex_algo_list) + ) + else: + agreed_kex = list( + filter(kex_algo_list.__contains__, self.preferred_kex) + ) + if len(agreed_kex) == 0: + # TODO: do an auth-overhaul style aggregate exception here? + # TODO: would let us streamline log output & show all failures up + # front + raise IncompatiblePeer( + "Incompatible ssh peer (no acceptable kex algorithm)" + ) # noqa + self.kex_engine = self._kex_info[agreed_kex[0]](self) + self._log(DEBUG, "Kex: {}".format(agreed_kex[0])) + + if self.server_mode: + available_server_keys = list( + filter( + list(self.server_key_dict.keys()).__contains__, + self.preferred_keys, + ) + ) + agreed_keys = list( + filter( + available_server_keys.__contains__, server_key_algo_list + ) + ) + else: + agreed_keys = list( + filter(server_key_algo_list.__contains__, self.preferred_keys) + ) + if len(agreed_keys) == 0: + raise IncompatiblePeer( + "Incompatible ssh peer (no acceptable host key)" + ) # noqa + self.host_key_type = agreed_keys[0] + if self.server_mode and (self.get_server_key() is None): + raise IncompatiblePeer( + "Incompatible ssh peer (can't match requested host key type)" + ) # noqa + self._log_agreement("HostKey", agreed_keys[0], agreed_keys[0]) + + if self.server_mode: + agreed_local_ciphers = list( + filter( + self.preferred_ciphers.__contains__, + server_encrypt_algo_list, + ) + ) + agreed_remote_ciphers = list( + filter( + self.preferred_ciphers.__contains__, + client_encrypt_algo_list, + ) + ) + else: + agreed_local_ciphers = list( + filter( + client_encrypt_algo_list.__contains__, + self.preferred_ciphers, + ) + ) + agreed_remote_ciphers = list( + filter( + server_encrypt_algo_list.__contains__, + self.preferred_ciphers, + ) + ) + if len(agreed_local_ciphers) == 0 or len(agreed_remote_ciphers) == 0: + raise IncompatiblePeer( + "Incompatible ssh server (no acceptable ciphers)" + ) # noqa + self.local_cipher = agreed_local_ciphers[0] + self.remote_cipher = agreed_remote_ciphers[0] + self._log_agreement( + "Cipher", local=self.local_cipher, remote=self.remote_cipher + ) + + if self.server_mode: + agreed_remote_macs = list( + filter(self.preferred_macs.__contains__, client_mac_algo_list) + ) + agreed_local_macs = list( + filter(self.preferred_macs.__contains__, server_mac_algo_list) + ) + else: + agreed_local_macs = list( + filter(client_mac_algo_list.__contains__, self.preferred_macs) + ) + agreed_remote_macs = list( + filter(server_mac_algo_list.__contains__, self.preferred_macs) + ) + if (len(agreed_local_macs) == 0) or (len(agreed_remote_macs) == 0): + raise IncompatiblePeer( + "Incompatible ssh server (no acceptable macs)" + ) + self.local_mac = agreed_local_macs[0] + self.remote_mac = agreed_remote_macs[0] + self._log_agreement( + "MAC", local=self.local_mac, remote=self.remote_mac + ) + + if self.server_mode: + agreed_remote_compression = list( + filter( + self.preferred_compression.__contains__, + client_compress_algo_list, + ) + ) + agreed_local_compression = list( + filter( + self.preferred_compression.__contains__, + server_compress_algo_list, + ) + ) + else: + agreed_local_compression = list( + filter( + client_compress_algo_list.__contains__, + self.preferred_compression, + ) + ) + agreed_remote_compression = list( + filter( + server_compress_algo_list.__contains__, + self.preferred_compression, + ) + ) + if ( + len(agreed_local_compression) == 0 + or len(agreed_remote_compression) == 0 + ): + msg = "Incompatible ssh server (no acceptable compression)" + msg += " {!r} {!r} {!r}" + raise IncompatiblePeer( + msg.format( + agreed_local_compression, + agreed_remote_compression, + self.preferred_compression, + ) + ) + self.local_compression = agreed_local_compression[0] + self.remote_compression = agreed_remote_compression[0] + self._log_agreement( + "Compression", + local=self.local_compression, + remote=self.remote_compression, + ) + self._log(DEBUG, "=== End of kex handshake ===") + + # save for computing hash later... + # now wait! openssh has a bug (and others might too) where there are + # actually some extra bytes (one NUL byte in openssh's case) added to + # the end of the packet but not parsed. turns out we need to throw + # away those bytes because they aren't part of the hash. + self.remote_kex_init = cMSG_KEXINIT + m.get_so_far() + + def _activate_inbound(self): + """switch on newly negotiated encryption parameters for + inbound traffic""" + block_size = self._cipher_info[self.remote_cipher]["block-size"] + if self.server_mode: + IV_in = self._compute_key("A", block_size) + key_in = self._compute_key( + "C", self._cipher_info[self.remote_cipher]["key-size"] + ) + else: + IV_in = self._compute_key("B", block_size) + key_in = self._compute_key( + "D", self._cipher_info[self.remote_cipher]["key-size"] + ) + engine = self._get_cipher( + self.remote_cipher, key_in, IV_in, self._DECRYPT + ) + etm = "etm@openssh.com" in self.remote_mac + mac_size = self._mac_info[self.remote_mac]["size"] + mac_engine = self._mac_info[self.remote_mac]["class"] + # initial mac keys are done in the hash's natural size (not the + # potentially truncated transmission size) + if self.server_mode: + mac_key = self._compute_key("E", mac_engine().digest_size) + else: + mac_key = self._compute_key("F", mac_engine().digest_size) + self.packetizer.set_inbound_cipher( + engine, block_size, mac_engine, mac_size, mac_key, etm=etm + ) + compress_in = self._compression_info[self.remote_compression][1] + if compress_in is not None and ( + self.remote_compression != "zlib@openssh.com" or self.authenticated + ): + self._log(DEBUG, "Switching on inbound compression ...") + self.packetizer.set_inbound_compressor(compress_in()) + # Reset inbound sequence number if strict mode. + if self.agreed_on_strict_kex: + self._log( + DEBUG, + "Resetting inbound seqno after NEWKEYS due to strict mode", + ) + self.packetizer.reset_seqno_in() + + def _activate_outbound(self): + """switch on newly negotiated encryption parameters for + outbound traffic""" + m = Message() + m.add_byte(cMSG_NEWKEYS) + self._send_message(m) + # Reset outbound sequence number if strict mode. + if self.agreed_on_strict_kex: + self._log( + DEBUG, + "Resetting outbound seqno after NEWKEYS due to strict mode", + ) + self.packetizer.reset_seqno_out() + block_size = self._cipher_info[self.local_cipher]["block-size"] + if self.server_mode: + IV_out = self._compute_key("B", block_size) + key_out = self._compute_key( + "D", self._cipher_info[self.local_cipher]["key-size"] + ) + else: + IV_out = self._compute_key("A", block_size) + key_out = self._compute_key( + "C", self._cipher_info[self.local_cipher]["key-size"] + ) + engine = self._get_cipher( + self.local_cipher, key_out, IV_out, self._ENCRYPT + ) + etm = "etm@openssh.com" in self.local_mac + mac_size = self._mac_info[self.local_mac]["size"] + mac_engine = self._mac_info[self.local_mac]["class"] + # initial mac keys are done in the hash's natural size (not the + # potentially truncated transmission size) + if self.server_mode: + mac_key = self._compute_key("F", mac_engine().digest_size) + else: + mac_key = self._compute_key("E", mac_engine().digest_size) + sdctr = self.local_cipher.endswith("-ctr") + self.packetizer.set_outbound_cipher( + engine, block_size, mac_engine, mac_size, mac_key, sdctr, etm=etm + ) + compress_out = self._compression_info[self.local_compression][0] + if compress_out is not None and ( + self.local_compression != "zlib@openssh.com" or self.authenticated + ): + self._log(DEBUG, "Switching on outbound compression ...") + self.packetizer.set_outbound_compressor(compress_out()) + if not self.packetizer.need_rekey(): + self.in_kex = False + # If client indicated extension support, send that packet immediately + if ( + self.server_mode + and self.server_sig_algs + and self._remote_ext_info == "ext-info-c" + ): + extensions = {"server-sig-algs": ",".join(self.preferred_pubkeys)} + m = Message() + m.add_byte(cMSG_EXT_INFO) + m.add_int(len(extensions)) + for name, value in sorted(extensions.items()): + m.add_string(name) + m.add_string(value) + self._send_message(m) + # we always expect to receive NEWKEYS now + self._expect_packet(MSG_NEWKEYS) + + def _auth_trigger(self): + self.authenticated = True + # delayed initiation of compression + if self.local_compression == "zlib@openssh.com": + compress_out = self._compression_info[self.local_compression][0] + self._log(DEBUG, "Switching on outbound compression ...") + self.packetizer.set_outbound_compressor(compress_out()) + if self.remote_compression == "zlib@openssh.com": + compress_in = self._compression_info[self.remote_compression][1] + self._log(DEBUG, "Switching on inbound compression ...") + self.packetizer.set_inbound_compressor(compress_in()) + + def _parse_ext_info(self, msg): + # Packet is a count followed by that many key-string to possibly-bytes + # pairs. + extensions = {} + for _ in range(msg.get_int()): + name = msg.get_text() + value = msg.get_string() + extensions[name] = value + self._log(DEBUG, "Got EXT_INFO: {}".format(extensions)) + # NOTE: this should work ok in cases where a server sends /two/ such + # messages; the RFC explicitly states a 2nd one should overwrite the + # 1st. + self.server_extensions = extensions + + def _parse_newkeys(self, m): + self._log(DEBUG, "Switch to new keys ...") + self._activate_inbound() + # can also free a bunch of stuff here + self.local_kex_init = self.remote_kex_init = None + self.K = None + self.kex_engine = None + if self.server_mode and (self.auth_handler is None): + # create auth handler for server mode + self.auth_handler = AuthHandler(self) + if not self.initial_kex_done: + # this was the first key exchange + # (also signal to packetizer as it sometimes wants to know this + # status as well, eg when seqnos rollover) + self.initial_kex_done = self.packetizer._initial_kex_done = True + # send an event? + if self.completion_event is not None: + self.completion_event.set() + # it's now okay to send data again (if this was a re-key) + if not self.packetizer.need_rekey(): + self.in_kex = False + self.clear_to_send_lock.acquire() + try: + self.clear_to_send.set() + finally: + self.clear_to_send_lock.release() + return + + def _parse_disconnect(self, m): + code = m.get_int() + desc = m.get_text() + self._log(INFO, "Disconnect (code {:d}): {}".format(code, desc)) + + def _parse_global_request(self, m): + kind = m.get_text() + self._log(DEBUG, 'Received global request "{}"'.format(kind)) + want_reply = m.get_boolean() + if not self.server_mode: + self._log( + DEBUG, + 'Rejecting "{}" global request from server.'.format(kind), + ) + ok = False + elif kind == "tcpip-forward": + address = m.get_text() + port = m.get_int() + ok = self.server_object.check_port_forward_request(address, port) + if ok: + ok = (ok,) + elif kind == "cancel-tcpip-forward": + address = m.get_text() + port = m.get_int() + self.server_object.cancel_port_forward_request(address, port) + ok = True + else: + ok = self.server_object.check_global_request(kind, m) + extra = () + if type(ok) is tuple: + extra = ok + ok = True + if want_reply: + msg = Message() + if ok: + msg.add_byte(cMSG_REQUEST_SUCCESS) + msg.add(*extra) + else: + msg.add_byte(cMSG_REQUEST_FAILURE) + self._send_message(msg) + + def _parse_request_success(self, m): + self._log(DEBUG, "Global request successful.") + self.global_response = m + if self.completion_event is not None: + self.completion_event.set() + + def _parse_request_failure(self, m): + self._log(DEBUG, "Global request denied.") + self.global_response = None + if self.completion_event is not None: + self.completion_event.set() + + def _parse_channel_open_success(self, m): + chanid = m.get_int() + server_chanid = m.get_int() + server_window_size = m.get_int() + server_max_packet_size = m.get_int() + chan = self._channels.get(chanid) + if chan is None: + self._log(WARNING, "Success for unrequested channel! [??]") + return + self.lock.acquire() + try: + chan._set_remote_channel( + server_chanid, server_window_size, server_max_packet_size + ) + self._log(DEBUG, "Secsh channel {:d} opened.".format(chanid)) + if chanid in self.channel_events: + self.channel_events[chanid].set() + del self.channel_events[chanid] + finally: + self.lock.release() + return + + def _parse_channel_open_failure(self, m): + chanid = m.get_int() + reason = m.get_int() + reason_str = m.get_text() + m.get_text() # ignored language + reason_text = CONNECTION_FAILED_CODE.get(reason, "(unknown code)") + self._log( + ERROR, + "Secsh channel {:d} open FAILED: {}: {}".format( + chanid, reason_str, reason_text + ), + ) + self.lock.acquire() + try: + self.saved_exception = ChannelException(reason, reason_text) + if chanid in self.channel_events: + self._channels.delete(chanid) + if chanid in self.channel_events: + self.channel_events[chanid].set() + del self.channel_events[chanid] + finally: + self.lock.release() + return + + def _parse_channel_open(self, m): + kind = m.get_text() + chanid = m.get_int() + initial_window_size = m.get_int() + max_packet_size = m.get_int() + reject = False + if ( + kind == "auth-agent@openssh.com" + and self._forward_agent_handler is not None + ): + self._log(DEBUG, "Incoming forward agent connection") + self.lock.acquire() + try: + my_chanid = self._next_channel() + finally: + self.lock.release() + elif (kind == "x11") and (self._x11_handler is not None): + origin_addr = m.get_text() + origin_port = m.get_int() + self._log( + DEBUG, + "Incoming x11 connection from {}:{:d}".format( + origin_addr, origin_port + ), + ) + self.lock.acquire() + try: + my_chanid = self._next_channel() + finally: + self.lock.release() + elif (kind == "forwarded-tcpip") and (self._tcp_handler is not None): + server_addr = m.get_text() + server_port = m.get_int() + origin_addr = m.get_text() + origin_port = m.get_int() + self._log( + DEBUG, + "Incoming tcp forwarded connection from {}:{:d}".format( + origin_addr, origin_port + ), + ) + self.lock.acquire() + try: + my_chanid = self._next_channel() + finally: + self.lock.release() + elif not self.server_mode: + self._log( + DEBUG, + 'Rejecting "{}" channel request from server.'.format(kind), + ) + reject = True + reason = OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED + else: + self.lock.acquire() + try: + my_chanid = self._next_channel() + finally: + self.lock.release() + if kind == "direct-tcpip": + # handle direct-tcpip requests coming from the client + dest_addr = m.get_text() + dest_port = m.get_int() + origin_addr = m.get_text() + origin_port = m.get_int() + reason = self.server_object.check_channel_direct_tcpip_request( + my_chanid, + (origin_addr, origin_port), + (dest_addr, dest_port), + ) + else: + reason = self.server_object.check_channel_request( + kind, my_chanid + ) + if reason != OPEN_SUCCEEDED: + self._log( + DEBUG, + 'Rejecting "{}" channel request from client.'.format(kind), + ) + reject = True + if reject: + msg = Message() + msg.add_byte(cMSG_CHANNEL_OPEN_FAILURE) + msg.add_int(chanid) + msg.add_int(reason) + msg.add_string("") + msg.add_string("en") + self._send_message(msg) + return + + chan = Channel(my_chanid) + self.lock.acquire() + try: + self._channels.put(my_chanid, chan) + self.channels_seen[my_chanid] = True + chan._set_transport(self) + chan._set_window( + self.default_window_size, self.default_max_packet_size + ) + chan._set_remote_channel( + chanid, initial_window_size, max_packet_size + ) + finally: + self.lock.release() + m = Message() + m.add_byte(cMSG_CHANNEL_OPEN_SUCCESS) + m.add_int(chanid) + m.add_int(my_chanid) + m.add_int(self.default_window_size) + m.add_int(self.default_max_packet_size) + self._send_message(m) + self._log( + DEBUG, "Secsh channel {:d} ({}) opened.".format(my_chanid, kind) + ) + if kind == "auth-agent@openssh.com": + self._forward_agent_handler(chan) + elif kind == "x11": + self._x11_handler(chan, (origin_addr, origin_port)) + elif kind == "forwarded-tcpip": + chan.origin_addr = (origin_addr, origin_port) + self._tcp_handler( + chan, (origin_addr, origin_port), (server_addr, server_port) + ) + else: + self._queue_incoming_channel(chan) + + def _parse_debug(self, m): + m.get_boolean() # always_display + msg = m.get_string() + m.get_string() # language + self._log(DEBUG, "Debug msg: {}".format(util.safe_string(msg))) + + def _get_subsystem_handler(self, name): + try: + self.lock.acquire() + if name not in self.subsystem_table: + return None, [], {} + return self.subsystem_table[name] + finally: + self.lock.release() + + _channel_handler_table = { + MSG_CHANNEL_SUCCESS: Channel._request_success, + MSG_CHANNEL_FAILURE: Channel._request_failed, + MSG_CHANNEL_DATA: Channel._feed, + MSG_CHANNEL_EXTENDED_DATA: Channel._feed_extended, + MSG_CHANNEL_WINDOW_ADJUST: Channel._window_adjust, + MSG_CHANNEL_REQUEST: Channel._handle_request, + MSG_CHANNEL_EOF: Channel._handle_eof, + MSG_CHANNEL_CLOSE: Channel._handle_close, + } + + +# TODO 4.0: drop this, we barely use it ourselves, it badly replicates the +# Transport-internal algorithm management, AND does so in a way which doesn't +# honor newer things like disabled_algorithms! +class SecurityOptions: + """ + Simple object containing the security preferences of an ssh transport. + These are tuples of acceptable ciphers, digests, key types, and key + exchange algorithms, listed in order of preference. + + Changing the contents and/or order of these fields affects the underlying + `.Transport` (but only if you change them before starting the session). + If you try to add an algorithm that paramiko doesn't recognize, + ``ValueError`` will be raised. If you try to assign something besides a + tuple to one of the fields, ``TypeError`` will be raised. + """ + + __slots__ = "_transport" + + def __init__(self, transport): + self._transport = transport + + def __repr__(self): + """ + Returns a string representation of this object, for debugging. + """ + return "".format(self._transport) + + def _set(self, name, orig, x): + if type(x) is list: + x = tuple(x) + if type(x) is not tuple: + raise TypeError("expected tuple or list") + possible = list(getattr(self._transport, orig).keys()) + forbidden = [n for n in x if n not in possible] + if len(forbidden) > 0: + raise ValueError("unknown cipher") + setattr(self._transport, name, x) + + @property + def ciphers(self): + """Symmetric encryption ciphers""" + return self._transport._preferred_ciphers + + @ciphers.setter + def ciphers(self, x): + self._set("_preferred_ciphers", "_cipher_info", x) + + @property + def digests(self): + """Digest (one-way hash) algorithms""" + return self._transport._preferred_macs + + @digests.setter + def digests(self, x): + self._set("_preferred_macs", "_mac_info", x) + + @property + def key_types(self): + """Public-key algorithms""" + return self._transport._preferred_keys + + @key_types.setter + def key_types(self, x): + self._set("_preferred_keys", "_key_info", x) + + @property + def kex(self): + """Key exchange algorithms""" + return self._transport._preferred_kex + + @kex.setter + def kex(self, x): + self._set("_preferred_kex", "_kex_info", x) + + @property + def compression(self): + """Compression algorithms""" + return self._transport._preferred_compression + + @compression.setter + def compression(self, x): + self._set("_preferred_compression", "_compression_info", x) + + +class ChannelMap: + def __init__(self): + # (id -> Channel) + self._map = weakref.WeakValueDictionary() + self._lock = threading.Lock() + + def put(self, chanid, chan): + self._lock.acquire() + try: + self._map[chanid] = chan + finally: + self._lock.release() + + def get(self, chanid): + self._lock.acquire() + try: + return self._map.get(chanid, None) + finally: + self._lock.release() + + def delete(self, chanid): + self._lock.acquire() + try: + try: + del self._map[chanid] + except KeyError: + pass + finally: + self._lock.release() + + def values(self): + self._lock.acquire() + try: + return list(self._map.values()) + finally: + self._lock.release() + + def __len__(self): + self._lock.acquire() + try: + return len(self._map) + finally: + self._lock.release() + + +class ServiceRequestingTransport(Transport): + """ + Transport, but also handling service requests, like it oughtta! + + .. versionadded:: 3.2 + """ + + # NOTE: this purposefully duplicates some of the parent class in order to + # modernize, refactor, etc. The intent is that eventually we will collapse + # this one onto the parent in a backwards incompatible release. + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._service_userauth_accepted = False + self._handler_table[MSG_SERVICE_ACCEPT] = self._parse_service_accept + + def _parse_service_accept(self, m): + service = m.get_text() + # Short-circuit for any service name not ssh-userauth. + # NOTE: it's technically possible for 'service name' in + # SERVICE_REQUEST/ACCEPT messages to be "ssh-connection" -- + # but I don't see evidence of Paramiko ever initiating or expecting to + # receive one of these. We /do/ see the 'service name' field in + # MSG_USERAUTH_REQUEST/ACCEPT/FAILURE set to this string, but that is a + # different set of handlers, so...! + if service != "ssh-userauth": + # TODO 4.0: consider erroring here (with an ability to opt out?) + # instead as it probably means something went Very Wrong. + self._log( + DEBUG, 'Service request "{}" accepted (?)'.format(service) + ) + return + # Record that we saw a service-userauth acceptance, meaning we are free + # to submit auth requests. + self._service_userauth_accepted = True + self._log(DEBUG, "MSG_SERVICE_ACCEPT received; auth may begin") + + def ensure_session(self): + # Make sure we're not trying to auth on a not-yet-open or + # already-closed transport session; that's our responsibility, not that + # of AuthHandler. + if (not self.active) or (not self.initial_kex_done): + # TODO: better error message? this can happen in many places, eg + # user error (authing before connecting) or developer error (some + # improperly handled pre/mid auth shutdown didn't become fatal + # enough). The latter is much more common & should ideally be fixed + # by terminating things harder? + raise SSHException("No existing session") + # Also make sure we've actually been told we are allowed to auth. + if self._service_userauth_accepted: + return + # Or request to do so, otherwise. + m = Message() + m.add_byte(cMSG_SERVICE_REQUEST) + m.add_string("ssh-userauth") + self._log(DEBUG, "Sending MSG_SERVICE_REQUEST: ssh-userauth") + self._send_message(m) + # Now we wait to hear back; the user is expecting a blocking-style auth + # request so there's no point giving control back anywhere. + while not self._service_userauth_accepted: + # TODO: feels like we're missing an AuthHandler Event like + # 'self.auth_event' which is set when AuthHandler shuts down in + # ways good AND bad. Transport only seems to have completion_event + # which is unclear re: intent, eg it's set by newkeys which always + # happens on connection, so it'll always be set by the time we get + # here. + # NOTE: this copies the timing of event.wait() in + # AuthHandler.wait_for_response, re: 1/10 of a second. Could + # presumably be smaller, but seems unlikely this period is going to + # be "too long" for any code doing ssh networking... + time.sleep(0.1) + self.auth_handler = self.get_auth_handler() + + def get_auth_handler(self): + # NOTE: using new sibling subclass instead of classic AuthHandler + return AuthOnlyHandler(self) + + def auth_none(self, username): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + return self.auth_handler.auth_none(username) + + def auth_password(self, username, password, fallback=True): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + try: + return self.auth_handler.auth_password(username, password) + except BadAuthenticationType as e: + # if password auth isn't allowed, but keyboard-interactive *is*, + # try to fudge it + if not fallback or ("keyboard-interactive" not in e.allowed_types): + raise + try: + + def handler(title, instructions, fields): + if len(fields) > 1: + raise SSHException("Fallback authentication failed.") + if len(fields) == 0: + # for some reason, at least on os x, a 2nd request will + # be made with zero fields requested. maybe it's just + # to try to fake out automated scripting of the exact + # type we're doing here. *shrug* :) + return [] + return [password] + + return self.auth_interactive(username, handler) + except SSHException: + # attempt to fudge failed; just raise the original exception + raise e + + def auth_publickey(self, username, key): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + return self.auth_handler.auth_publickey(username, key) + + def auth_interactive(self, username, handler, submethods=""): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + return self.auth_handler.auth_interactive( + username, handler, submethods + ) + + def auth_interactive_dumb(self, username, handler=None, submethods=""): + # TODO 4.0: merge to parent, preserving (most of) docstring + # NOTE: legacy impl omitted equiv of ensure_session since it just wraps + # another call to an auth method. however we reinstate it for + # consistency reasons. + self.ensure_session() + if not handler: + + def handler(title, instructions, prompt_list): + answers = [] + if title: + print(title.strip()) + if instructions: + print(instructions.strip()) + for prompt, show_input in prompt_list: + print(prompt.strip(), end=" ") + answers.append(input()) + return answers + + return self.auth_interactive(username, handler, submethods) + + def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + self.auth_handler = self.get_auth_handler() + return self.auth_handler.auth_gssapi_with_mic( + username, gss_host, gss_deleg_creds + ) + + def auth_gssapi_keyex(self, username): + # TODO 4.0: merge to parent, preserving (most of) docstring + self.ensure_session() + self.auth_handler = self.get_auth_handler() + return self.auth_handler.auth_gssapi_keyex(username) diff --git a/venv/lib/python3.12/site-packages/paramiko/util.py b/venv/lib/python3.12/site-packages/paramiko/util.py new file mode 100644 index 0000000..f1e33a5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/util.py @@ -0,0 +1,337 @@ +# Copyright (C) 2003-2007 Robey Pointer +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Useful functions used by the rest of paramiko. +""" + + +import sys +import struct +import traceback +import threading +import logging + +from paramiko.common import ( + DEBUG, + zero_byte, + xffffffff, + max_byte, + byte_ord, + byte_chr, +) +from paramiko.config import SSHConfig + + +def inflate_long(s, always_positive=False): + """turns a normalized byte string into a long-int + (adapted from Crypto.Util.number)""" + out = 0 + negative = 0 + if not always_positive and (len(s) > 0) and (byte_ord(s[0]) >= 0x80): + negative = 1 + if len(s) % 4: + filler = zero_byte + if negative: + filler = max_byte + # never convert this to ``s +=`` because this is a string, not a number + # noinspection PyAugmentAssignment + s = filler * (4 - len(s) % 4) + s + for i in range(0, len(s), 4): + out = (out << 32) + struct.unpack(">I", s[i : i + 4])[0] + if negative: + out -= 1 << (8 * len(s)) + return out + + +def deflate_long(n, add_sign_padding=True): + """turns a long-int into a normalized byte string + (adapted from Crypto.Util.number)""" + # after much testing, this algorithm was deemed to be the fastest + s = bytes() + n = int(n) + while (n != 0) and (n != -1): + s = struct.pack(">I", n & xffffffff) + s + n >>= 32 + # strip off leading zeros, FFs + for i in enumerate(s): + if (n == 0) and (i[1] != 0): + break + if (n == -1) and (i[1] != 0xFF): + break + else: + # degenerate case, n was either 0 or -1 + i = (0,) + if n == 0: + s = zero_byte + else: + s = max_byte + s = s[i[0] :] + if add_sign_padding: + if (n == 0) and (byte_ord(s[0]) >= 0x80): + s = zero_byte + s + if (n == -1) and (byte_ord(s[0]) < 0x80): + s = max_byte + s + return s + + +def format_binary(data, prefix=""): + x = 0 + out = [] + while len(data) > x + 16: + out.append(format_binary_line(data[x : x + 16])) + x += 16 + if x < len(data): + out.append(format_binary_line(data[x:])) + return [prefix + line for line in out] + + +def format_binary_line(data): + left = " ".join(["{:02X}".format(byte_ord(c)) for c in data]) + right = "".join( + [".{:c}..".format(byte_ord(c))[(byte_ord(c) + 63) // 95] for c in data] + ) + return "{:50s} {}".format(left, right) + + +def safe_string(s): + out = b"" + for c in s: + i = byte_ord(c) + if 32 <= i <= 127: + out += byte_chr(i) + else: + out += b("%{:02X}".format(i)) + return out + + +def bit_length(n): + try: + return n.bit_length() + except AttributeError: + norm = deflate_long(n, False) + hbyte = byte_ord(norm[0]) + if hbyte == 0: + return 1 + bitlen = len(norm) * 8 + while not (hbyte & 0x80): + hbyte <<= 1 + bitlen -= 1 + return bitlen + + +def tb_strings(): + return "".join(traceback.format_exception(*sys.exc_info())).split("\n") + + +def generate_key_bytes(hash_alg, salt, key, nbytes): + """ + Given a password, passphrase, or other human-source key, scramble it + through a secure hash into some keyworthy bytes. This specific algorithm + is used for encrypting/decrypting private key files. + + :param function hash_alg: A function which creates a new hash object, such + as ``hashlib.sha256``. + :param salt: data to salt the hash with. + :type bytes salt: Hash salt bytes. + :param str key: human-entered password or passphrase. + :param int nbytes: number of bytes to generate. + :return: Key data, as `bytes`. + """ + keydata = bytes() + digest = bytes() + if len(salt) > 8: + salt = salt[:8] + while nbytes > 0: + hash_obj = hash_alg() + if len(digest) > 0: + hash_obj.update(digest) + hash_obj.update(b(key)) + hash_obj.update(salt) + digest = hash_obj.digest() + size = min(nbytes, len(digest)) + keydata += digest[:size] + nbytes -= size + return keydata + + +def load_host_keys(filename): + """ + Read a file of known SSH host keys, in the format used by openssh, and + return a compound dict of ``hostname -> keytype ->`` `PKey + `. The hostname may be an IP address or DNS name. The + keytype will be either ``"ssh-rsa"`` or ``"ssh-dss"``. + + This type of file unfortunately doesn't exist on Windows, but on posix, + it will usually be stored in ``os.path.expanduser("~/.ssh/known_hosts")``. + + Since 1.5.3, this is just a wrapper around `.HostKeys`. + + :param str filename: name of the file to read host keys from + :return: + nested dict of `.PKey` objects, indexed by hostname and then keytype + """ + from paramiko.hostkeys import HostKeys + + return HostKeys(filename) + + +def parse_ssh_config(file_obj): + """ + Provided only as a backward-compatible wrapper around `.SSHConfig`. + + .. deprecated:: 2.7 + Use `SSHConfig.from_file` instead. + """ + config = SSHConfig() + config.parse(file_obj) + return config + + +def lookup_ssh_host_config(hostname, config): + """ + Provided only as a backward-compatible wrapper around `.SSHConfig`. + """ + return config.lookup(hostname) + + +def mod_inverse(x, m): + # it's crazy how small Python can make this function. + u1, u2, u3 = 1, 0, m + v1, v2, v3 = 0, 1, x + + while v3 > 0: + q = u3 // v3 + u1, v1 = v1, u1 - v1 * q + u2, v2 = v2, u2 - v2 * q + u3, v3 = v3, u3 - v3 * q + if u2 < 0: + u2 += m + return u2 + + +_g_thread_data = threading.local() +_g_thread_counter = 0 +_g_thread_lock = threading.Lock() + + +def get_thread_id(): + global _g_thread_data, _g_thread_counter, _g_thread_lock + try: + return _g_thread_data.id + except AttributeError: + with _g_thread_lock: + _g_thread_counter += 1 + _g_thread_data.id = _g_thread_counter + return _g_thread_data.id + + +def log_to_file(filename, level=DEBUG): + """send paramiko logs to a logfile, + if they're not already going somewhere""" + logger = logging.getLogger("paramiko") + if len(logger.handlers) > 0: + return + logger.setLevel(level) + f = open(filename, "a") + handler = logging.StreamHandler(f) + frm = "%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(_threadid)-3d" + frm += " %(name)s: %(message)s" + handler.setFormatter(logging.Formatter(frm, "%Y%m%d-%H:%M:%S")) + logger.addHandler(handler) + + +# make only one filter object, so it doesn't get applied more than once +class PFilter: + def filter(self, record): + record._threadid = get_thread_id() + return True + + +_pfilter = PFilter() + + +def get_logger(name): + logger = logging.getLogger(name) + logger.addFilter(_pfilter) + return logger + + +def constant_time_bytes_eq(a, b): + if len(a) != len(b): + return False + res = 0 + # noinspection PyUnresolvedReferences + for i in range(len(a)): # noqa: F821 + res |= byte_ord(a[i]) ^ byte_ord(b[i]) + return res == 0 + + +class ClosingContextManager: + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +def clamp_value(minimum, val, maximum): + return max(minimum, min(val, maximum)) + + +def asbytes(s): + """ + Coerce to bytes if possible or return unchanged. + """ + try: + # Attempt to run through our version of b(), which does the Right Thing + # for unicode strings vs bytestrings, and raises TypeError if it's not + # one of those types. + return b(s) + except TypeError: + try: + # If it wasn't a string/byte/buffer-ish object, try calling an + # asbytes() method, which many of our internal classes implement. + return s.asbytes() + except AttributeError: + # Finally, just do nothing & assume this object is sufficiently + # byte-y or buffer-y that everything will work out (or that callers + # are capable of handling whatever it is.) + return s + + +# TODO: clean this up / force callers to assume bytes OR unicode +def b(s, encoding="utf8"): + """cast unicode or bytes to bytes""" + if isinstance(s, bytes): + return s + elif isinstance(s, str): + return s.encode(encoding) + else: + raise TypeError(f"Expected unicode or bytes, got {type(s)}") + + +# TODO: clean this up / force callers to assume bytes OR unicode +def u(s, encoding="utf8"): + """cast bytes or unicode to unicode""" + if isinstance(s, bytes): + return s.decode(encoding) + elif isinstance(s, str): + return s + else: + raise TypeError(f"Expected unicode or bytes, got {type(s)}") diff --git a/venv/lib/python3.12/site-packages/paramiko/win_openssh.py b/venv/lib/python3.12/site-packages/paramiko/win_openssh.py new file mode 100644 index 0000000..614b589 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/win_openssh.py @@ -0,0 +1,56 @@ +# Copyright (C) 2021 Lew Gordon +# Copyright (C) 2022 Patrick Spendrin +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os.path +import time + +PIPE_NAME = r"\\.\pipe\openssh-ssh-agent" + + +def can_talk_to_agent(): + # use os.listdir() instead of os.path.exists(), because os.path.exists() + # uses CreateFileW() API and the pipe cannot be reopen unless the server + # calls DisconnectNamedPipe(). + dir_, name = os.path.split(PIPE_NAME) + name = name.lower() + return any(name == n.lower() for n in os.listdir(dir_)) + + +class OpenSSHAgentConnection: + def __init__(self): + while True: + try: + self._pipe = os.open(PIPE_NAME, os.O_RDWR | os.O_BINARY) + except OSError as e: + # retry when errno 22 which means that the server has not + # called DisconnectNamedPipe() yet. + if e.errno != 22: + raise + else: + break + time.sleep(0.1) + + def send(self, data): + return os.write(self._pipe, data) + + def recv(self, n): + return os.read(self._pipe, n) + + def close(self): + return os.close(self._pipe) diff --git a/venv/lib/python3.12/site-packages/paramiko/win_pageant.py b/venv/lib/python3.12/site-packages/paramiko/win_pageant.py new file mode 100644 index 0000000..c927de6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/paramiko/win_pageant.py @@ -0,0 +1,138 @@ +# Copyright (C) 2005 John Arbash-Meinel +# Modified up by: Todd Whiteman +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +""" +Functions for communicating with Pageant, the basic windows ssh agent program. +""" + +import array +import ctypes.wintypes +import platform +import struct +from paramiko.common import zero_byte +from paramiko.util import b + +import _thread as thread + +from . import _winapi + + +_AGENT_COPYDATA_ID = 0x804E50BA +_AGENT_MAX_MSGLEN = 8192 +# Note: The WM_COPYDATA value is pulled from win32con, as a workaround +# so we do not have to import this huge library just for this one variable. +win32con_WM_COPYDATA = 74 + + +def _get_pageant_window_object(): + return ctypes.windll.user32.FindWindowA(b"Pageant", b"Pageant") + + +def can_talk_to_agent(): + """ + Check to see if there is a "Pageant" agent we can talk to. + + This checks both if we have the required libraries (win32all or ctypes) + and if there is a Pageant currently running. + """ + return bool(_get_pageant_window_object()) + + +if platform.architecture()[0] == "64bit": + ULONG_PTR = ctypes.c_uint64 +else: + ULONG_PTR = ctypes.c_uint32 + + +class COPYDATASTRUCT(ctypes.Structure): + """ + ctypes implementation of + http://msdn.microsoft.com/en-us/library/windows/desktop/ms649010%28v=vs.85%29.aspx + """ + + _fields_ = [ + ("num_data", ULONG_PTR), + ("data_size", ctypes.wintypes.DWORD), + ("data_loc", ctypes.c_void_p), + ] + + +def _query_pageant(msg): + """ + Communication with the Pageant process is done through a shared + memory-mapped file. + """ + hwnd = _get_pageant_window_object() + if not hwnd: + # Raise a failure to connect exception, pageant isn't running anymore! + return None + + # create a name for the mmap + map_name = f"PageantRequest{thread.get_ident():08x}" + + pymap = _winapi.MemoryMap( + map_name, _AGENT_MAX_MSGLEN, _winapi.get_security_attributes_for_user() + ) + with pymap: + pymap.write(msg) + # Create an array buffer containing the mapped filename + char_buffer = array.array("b", b(map_name) + zero_byte) # noqa + char_buffer_address, char_buffer_size = char_buffer.buffer_info() + # Create a string to use for the SendMessage function call + cds = COPYDATASTRUCT( + _AGENT_COPYDATA_ID, char_buffer_size, char_buffer_address + ) + + response = ctypes.windll.user32.SendMessageA( + hwnd, win32con_WM_COPYDATA, ctypes.sizeof(cds), ctypes.byref(cds) + ) + + if response > 0: + pymap.seek(0) + datalen = pymap.read(4) + retlen = struct.unpack(">I", datalen)[0] + return datalen + pymap.read(retlen) + return None + + +class PageantConnection: + """ + Mock "connection" to an agent which roughly approximates the behavior of + a unix local-domain socket (as used by Agent). Requests are sent to the + pageant daemon via special Windows magick, and responses are buffered back + for subsequent reads. + """ + + def __init__(self): + self._response = None + + def send(self, data): + self._response = _query_pageant(data) + + def recv(self, n): + if self._response is None: + return "" + ret = self._response[:n] + self._response = self._response[n:] + if self._response == "": + self._response = None + return ret + + def close(self): + pass diff --git a/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/LICENSE b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/LICENSE new file mode 100644 index 0000000..48c0f72 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/LICENSE @@ -0,0 +1,116 @@ +.. -*- restructuredtext -*- + +===================== +Copyrights & Licenses +===================== + +Credits +======= +Passlib is primarily developed by Eli Collins. + +Special thanks to Darin Gordon for testing and +feedback on the :mod:`passlib.totp` module. + +License for Passlib +=================== +Passlib is (c) `Assurance Technologies `_, +and is released under the `BSD license `_:: + + Passlib + Copyright (c) 2008-2020 Assurance Technologies, LLC. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of Assurance Technologies, nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Licenses for incorporated software +================================== +Passlib contains some code derived from the following sources: + +MD5-Crypt +--------- +The source file ``passlib/handlers/md5_crypt.py`` contains code derived from the original +`FreeBSD md5-crypt implementation `_, +which is available under the following license:: + + "THE BEER-WARE LICENSE" (Revision 42): + wrote this file. As long as you retain this notice you + can do whatever you want with this stuff. If we meet some day, and you think + this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp + + converted to python May 2008 + by Eli Collins + +DES +--- +The source file ``passlib/crypto/des.py`` contains code derived from +`UnixCrypt.java `_, +a pure-java implementation of the historic unix-crypt password hash algorithm. +It is available under the following license:: + + UnixCrypt.java 0.9 96/11/25 + Copyright (c) 1996 Aki Yoshida. All rights reserved. + Permission to use, copy, modify and distribute this software + for non-commercial or commercial purposes and without fee is + hereby granted provided that this copyright notice appears in + all copies. + + modified April 2001 + by Iris Van den Broeke, Daniel Deville + + modified Aug 2005 + by Greg Wilkins (gregw) + + converted to python Jun 2009 + by Eli Collins + +jBCrypt +------- +The source file ``passlib/crypto/_blowfish/base.py`` contains code derived +from `jBcrypt 0.2 `_, a Java +implementation of the BCrypt password hash algorithm. It is available under +a BSD/ISC license:: + + Copyright (c) 2006 Damien Miller + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTUOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Wordsets +-------- +The EFF wordsets in ``passlib/_data/wordsets`` are (c) 2016 the Electronic Freedom Foundation. +They were downloaded from ``_, +and are released under the `Creative Commons License `_. diff --git a/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/METADATA b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/METADATA new file mode 100644 index 0000000..0665d8a --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/METADATA @@ -0,0 +1,40 @@ +Metadata-Version: 2.1 +Name: passlib +Version: 1.7.4 +Summary: comprehensive password hashing framework supporting over 30 schemes +Home-page: https://passlib.readthedocs.io +Author: Eli Collins +Author-email: elic@assurancetechnologies.com +License: BSD +Download-URL: https://pypi.python.org/packages/source/p/passlib/passlib-1.7.4.tar.gz +Keywords: password secret hash security +Provides-Extra: argon2 +Requires-Dist: argon2-cffi (>=18.2.0) ; extra == 'argon2' +Provides-Extra: bcrypt +Requires-Dist: bcrypt (>=3.1.0) ; extra == 'bcrypt' +Provides-Extra: build_docs +Requires-Dist: sphinx (>=1.6) ; extra == 'build_docs' +Requires-Dist: sphinxcontrib-fulltoc (>=1.2.0) ; extra == 'build_docs' +Requires-Dist: cloud-sptheme (>=1.10.1) ; extra == 'build_docs' +Provides-Extra: totp +Requires-Dist: cryptography ; extra == 'totp' + +Passlib is a password hashing library for Python 2 & 3, which provides +cross-platform implementations of over 30 password hashing algorithms, as well +as a framework for managing existing password hashes. It's designed to be useful +for a wide range of tasks, from verifying a hash found in /etc/shadow, to +providing full-strength password hashing for multi-user applications. + +* See the `documentation `_ + for details, installation instructions, and examples. + +* See the `homepage `_ + for the latest news and more information. + +* See the `changelog `_ + for a description of what's new in Passlib. + +All releases are signed with the gpg key +`4D8592DF4CE1ED31 `_. + + diff --git a/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/RECORD b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/RECORD new file mode 100644 index 0000000..17528b6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/RECORD @@ -0,0 +1,202 @@ +passlib-1.7.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +passlib-1.7.4.dist-info/LICENSE,sha256=qVuo8a-I_41fDQwzUZ9DC3-diZK2nUvDaawEI6egWok,4954 +passlib-1.7.4.dist-info/METADATA,sha256=l-uRq14ie328RCoVsayT7AfMHaJqv34ICbpQtKG00jM,1688 +passlib-1.7.4.dist-info/RECORD,, +passlib-1.7.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +passlib-1.7.4.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +passlib-1.7.4.dist-info/top_level.txt,sha256=BA9xbJpLdaTxqvYbKigYnMQkzp8-UQr6S4m3lBTkxzw,8 +passlib-1.7.4.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +passlib/__init__.py,sha256=nSZrPEtlMQSKZqxERmYcWCDBD6pJ1P_DL5TdeSjIReU,87 +passlib/__pycache__/__init__.cpython-312.pyc,, +passlib/__pycache__/apache.cpython-312.pyc,, +passlib/__pycache__/apps.cpython-312.pyc,, +passlib/__pycache__/context.cpython-312.pyc,, +passlib/__pycache__/exc.cpython-312.pyc,, +passlib/__pycache__/hash.cpython-312.pyc,, +passlib/__pycache__/hosts.cpython-312.pyc,, +passlib/__pycache__/ifc.cpython-312.pyc,, +passlib/__pycache__/pwd.cpython-312.pyc,, +passlib/__pycache__/registry.cpython-312.pyc,, +passlib/__pycache__/totp.cpython-312.pyc,, +passlib/__pycache__/win32.cpython-312.pyc,, +passlib/_data/wordsets/bip39.txt,sha256=JM5Cwv1KlcG4a77pvOHhzyVb0AIuGbq2vVka_Wi379s,13117 +passlib/_data/wordsets/eff_long.txt,sha256=bVV_BpOVj7XmULaLW-5YXrgs9NoyllUFx4npJHQ7xSI,62144 +passlib/_data/wordsets/eff_prefixed.txt,sha256=eqV6TT7PZYFymZK62VdbrN6_fCg3ivKuxqUPEa7DJvU,10778 +passlib/_data/wordsets/eff_short.txt,sha256=NuzKSeT6IMqEsXbDLy6cgvmPRGWFGQ51-YealcCCR78,7180 +passlib/apache.py,sha256=TsHUCur5W8tK3Rsb9jYeeBCc7Ua_hP9e2tSxzoUVzwc,46661 +passlib/apps.py,sha256=AYqni3QIelR7HCiPj_hv2Mcr8bsfdcUkh07DwQqZxWs,8067 +passlib/context.py,sha256=aJeTjA-h7ke3KObvEM8aSJzKdN3wrOyu0hTt-MTbJt0,109195 +passlib/crypto/__init__.py,sha256=St6CGqhrfz3L5Da3aZvRK69le_FcLLE3gA2dEByOmC0,84 +passlib/crypto/__pycache__/__init__.cpython-312.pyc,, +passlib/crypto/__pycache__/_md4.cpython-312.pyc,, +passlib/crypto/__pycache__/des.cpython-312.pyc,, +passlib/crypto/__pycache__/digest.cpython-312.pyc,, +passlib/crypto/_blowfish/__init__.py,sha256=iZb7ft1vxBjCW7lpDtWwTxuMicgvi673M5F_1PKdVkg,6426 +passlib/crypto/_blowfish/__pycache__/__init__.cpython-312.pyc,, +passlib/crypto/_blowfish/__pycache__/_gen_files.cpython-312.pyc,, +passlib/crypto/_blowfish/__pycache__/base.cpython-312.pyc,, +passlib/crypto/_blowfish/__pycache__/unrolled.cpython-312.pyc,, +passlib/crypto/_blowfish/_gen_files.py,sha256=fUrNGWA5NX9CyvoJbNhJv7PJmptbp1uSR9iaWzKkb1I,6176 +passlib/crypto/_blowfish/base.py,sha256=_zF7x6XSbqCl2HH5Eya8KIhhJVbDYuYAWKfxbjOQZWg,20390 +passlib/crypto/_blowfish/unrolled.py,sha256=FOMhVo_jnGS3bMafXfjEffDPSP5vMogFvupnVKAa1lg,37153 +passlib/crypto/_md4.py,sha256=_5RXBX_gowtN0x05PnN0EF_csO4Q_NA5whm6e_vJx08,6905 +passlib/crypto/des.py,sha256=1EsvVd34Z82BYmGb8JIzfVWvTMN70fWhJGmIfmNrBAU,51878 +passlib/crypto/digest.py,sha256=WsfpcC8IM-gvZh56m6v8bjzG4nsNAsaoSv2LNY1_5go,36158 +passlib/crypto/scrypt/__init__.py,sha256=bXmeIerN6DKJSw8XsQEYcsUKCfRpXGb190e-gdHbbqU,9630 +passlib/crypto/scrypt/__pycache__/__init__.cpython-312.pyc,, +passlib/crypto/scrypt/__pycache__/_builtin.cpython-312.pyc,, +passlib/crypto/scrypt/__pycache__/_gen_files.cpython-312.pyc,, +passlib/crypto/scrypt/__pycache__/_salsa.cpython-312.pyc,, +passlib/crypto/scrypt/_builtin.py,sha256=82RZc_4LQv2JCL06bX70hCICBaK30Uy7PGzmZtiOjA0,8910 +passlib/crypto/scrypt/_gen_files.py,sha256=vRhjlIKqwvcILCo20sVf8dXr15tW636t5oojAZFssJE,4683 +passlib/crypto/scrypt/_salsa.py,sha256=b87_YEP3jJSmlU2BHSx-NKiJ4e_1eK-RlC4pWA4y71I,5719 +passlib/exc.py,sha256=MIjUTBLcOai52paDLM1nFh6lMTLBLPAn1PTdbCm-9Fo,14481 +passlib/ext/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +passlib/ext/__pycache__/__init__.cpython-312.pyc,, +passlib/ext/django/__init__.py,sha256=RvooHmuUwjLXuSuUJr-9URnY1CRVCzU2xdh-jW-mrN0,228 +passlib/ext/django/__pycache__/__init__.cpython-312.pyc,, +passlib/ext/django/__pycache__/models.cpython-312.pyc,, +passlib/ext/django/__pycache__/utils.cpython-312.pyc,, +passlib/ext/django/models.py,sha256=-XpQRLGG2kTuLWNoh-EhKOaeEV5aIfzavw8qTQ-p1fM,1314 +passlib/ext/django/utils.py,sha256=ObpILR1seOZyecYhuQ1G_R9_N6DMuS4kWZve_giRLiw,49409 +passlib/handlers/__init__.py,sha256=sIPjJgOGHpOIstAwDeHTfxKR8wLVqP4zSa4mvBhAZ_8,86 +passlib/handlers/__pycache__/__init__.cpython-312.pyc,, +passlib/handlers/__pycache__/argon2.cpython-312.pyc,, +passlib/handlers/__pycache__/bcrypt.cpython-312.pyc,, +passlib/handlers/__pycache__/cisco.cpython-312.pyc,, +passlib/handlers/__pycache__/des_crypt.cpython-312.pyc,, +passlib/handlers/__pycache__/digests.cpython-312.pyc,, +passlib/handlers/__pycache__/django.cpython-312.pyc,, +passlib/handlers/__pycache__/fshp.cpython-312.pyc,, +passlib/handlers/__pycache__/ldap_digests.cpython-312.pyc,, +passlib/handlers/__pycache__/md5_crypt.cpython-312.pyc,, +passlib/handlers/__pycache__/misc.cpython-312.pyc,, +passlib/handlers/__pycache__/mssql.cpython-312.pyc,, +passlib/handlers/__pycache__/mysql.cpython-312.pyc,, +passlib/handlers/__pycache__/oracle.cpython-312.pyc,, +passlib/handlers/__pycache__/pbkdf2.cpython-312.pyc,, +passlib/handlers/__pycache__/phpass.cpython-312.pyc,, +passlib/handlers/__pycache__/postgres.cpython-312.pyc,, +passlib/handlers/__pycache__/roundup.cpython-312.pyc,, +passlib/handlers/__pycache__/scram.cpython-312.pyc,, +passlib/handlers/__pycache__/scrypt.cpython-312.pyc,, +passlib/handlers/__pycache__/sha1_crypt.cpython-312.pyc,, +passlib/handlers/__pycache__/sha2_crypt.cpython-312.pyc,, +passlib/handlers/__pycache__/sun_md5_crypt.cpython-312.pyc,, +passlib/handlers/__pycache__/windows.cpython-312.pyc,, +passlib/handlers/argon2.py,sha256=XrMPknuG-16IAwrd7WUuTKdIkKOD-3UPlHHZOjXZe68,38934 +passlib/handlers/bcrypt.py,sha256=LF33HnoxOhjtr7aFtrKgU5SB4mtw3xGx7C4tqecosrk,53582 +passlib/handlers/cisco.py,sha256=Yz0KhmqVVAV_szNnuZq40WgYg6eomBRoAJBbRrSUkGg,16284 +passlib/handlers/des_crypt.py,sha256=W3srE5kIaRQdhfIObz237sm0vPgqR4p_9ZkSd-9UNPo,22367 +passlib/handlers/digests.py,sha256=AeuVSxas2793ILXX0s6xm1lA1u4RPpE9G8wZSaq0Bs4,6327 +passlib/handlers/django.py,sha256=MmoLua6kZWVItsjRrnDgfktzuEpqlvwlPaexvti6I9M,20185 +passlib/handlers/fshp.py,sha256=78sMdnAkW5YHCPC13bLptdElLFrWzZF7rm3bwUWHATo,7799 +passlib/handlers/ldap_digests.py,sha256=jgxtxcERep4xXgVVfKfSVe5JEE45b5tt87NKGvK9_Zk,13049 +passlib/handlers/md5_crypt.py,sha256=jLt3IP-l0HFfU1u2VEtGI1WBYVNjTqhjvwovfFREiwg,13740 +passlib/handlers/misc.py,sha256=o1tWKAdTp3EnCYJOERpdkQnRwrQfWWKeiJXSQurbVMo,10109 +passlib/handlers/mssql.py,sha256=BECU0VaVtc-RzhGx7E_LVu2moZpEI5GASChFJnzDVxA,8482 +passlib/handlers/mysql.py,sha256=8h83lpTHs5q8zflXP0TyMavrELgtlvgUbcLtFUHnbDY,4796 +passlib/handlers/oracle.py,sha256=WDCqJEo2rDihcuUs4Ka48JBpSm4_JnNqXIVsCGUrkO8,6691 +passlib/handlers/pbkdf2.py,sha256=jVqdo1MSD3_7B5m-osqUTBTwTXnhedLan9lQaM-gysU,19010 +passlib/handlers/phpass.py,sha256=0c7maDUNGxIuyiG_O2hK6MJbxcTu7V2vx67xOq8d7ps,4785 +passlib/handlers/postgres.py,sha256=y9AzGpxjK-z1HLHElRQtLzCMqqtvBwd_xxJraHdGpN4,2274 +passlib/handlers/roundup.py,sha256=lvYArKerC702_MHZXMi3F-iZ9Y2jH10h2UXKDpgqoO8,1178 +passlib/handlers/scram.py,sha256=wBsoBg0qLW8HA5Nsgcnd1bM7ZDYEFbapAGoP0_44N58,22539 +passlib/handlers/scrypt.py,sha256=OYfF2Jjltydr5BswyZ-uFgl4yEjQZowGdIZpEyB7s5Q,14146 +passlib/handlers/sha1_crypt.py,sha256=DZOdKExzlucHCfpgszG1cFdareTqpGUGORNIEn4FJCs,5873 +passlib/handlers/sha2_crypt.py,sha256=kTZm-jmRVnKRhquetVBbiDWi9eY87NTJvUYkjGEm7MY,21800 +passlib/handlers/sun_md5_crypt.py,sha256=uWhoKxBITVwPlh9MIQ3WjVrYjlRMgLrBjLR1Ui2kmZw,13933 +passlib/handlers/windows.py,sha256=nviGebFjOiJO_cDJRo7RiccEhlN2UM7nAQL0pTso9MQ,12384 +passlib/hash.py,sha256=9lVasGFiXDGcL8VOWuEwAjzlATQbmEYF30wOIVotP-U,3750 +passlib/hosts.py,sha256=odRo2WnSfjMuktSIwfR50rzxbKGfzUwZ2CUkvcxvJoA,3302 +passlib/ifc.py,sha256=kL2svtkF99VQDOim_6TE6OGhmSf2EyHrzp0v_UQksqA,14196 +passlib/pwd.py,sha256=VeU_PVkZSvwXPI6AQA96cjqIKyuTvXtUoCK7eI5ab7w,28690 +passlib/registry.py,sha256=5qLDF72XHGSQVoEVqhvEngfZsO2fxVsBpWntX_D0YRs,20301 +passlib/tests/__init__.py,sha256=JIK29mBP8OKz3ChmaEbyr9vvml3weGe7YHMTHzBJcr0,20 +passlib/tests/__main__.py,sha256=iKv9ZuQe5jBzp4Gyp_G3wXhQBxSTJguMx1BCCVVZL6Y,82 +passlib/tests/__pycache__/__init__.cpython-312.pyc,, +passlib/tests/__pycache__/__main__.cpython-312.pyc,, +passlib/tests/__pycache__/_test_bad_register.cpython-312.pyc,, +passlib/tests/__pycache__/backports.cpython-312.pyc,, +passlib/tests/__pycache__/test_apache.cpython-312.pyc,, +passlib/tests/__pycache__/test_apps.cpython-312.pyc,, +passlib/tests/__pycache__/test_context.cpython-312.pyc,, +passlib/tests/__pycache__/test_context_deprecated.cpython-312.pyc,, +passlib/tests/__pycache__/test_crypto_builtin_md4.cpython-312.pyc,, +passlib/tests/__pycache__/test_crypto_des.cpython-312.pyc,, +passlib/tests/__pycache__/test_crypto_digest.cpython-312.pyc,, +passlib/tests/__pycache__/test_crypto_scrypt.cpython-312.pyc,, +passlib/tests/__pycache__/test_ext_django.cpython-312.pyc,, +passlib/tests/__pycache__/test_ext_django_source.cpython-312.pyc,, +passlib/tests/__pycache__/test_handlers.cpython-312.pyc,, +passlib/tests/__pycache__/test_handlers_argon2.cpython-312.pyc,, +passlib/tests/__pycache__/test_handlers_bcrypt.cpython-312.pyc,, +passlib/tests/__pycache__/test_handlers_cisco.cpython-312.pyc,, +passlib/tests/__pycache__/test_handlers_django.cpython-312.pyc,, +passlib/tests/__pycache__/test_handlers_pbkdf2.cpython-312.pyc,, +passlib/tests/__pycache__/test_handlers_scrypt.cpython-312.pyc,, +passlib/tests/__pycache__/test_hosts.cpython-312.pyc,, +passlib/tests/__pycache__/test_pwd.cpython-312.pyc,, +passlib/tests/__pycache__/test_registry.cpython-312.pyc,, +passlib/tests/__pycache__/test_totp.cpython-312.pyc,, +passlib/tests/__pycache__/test_utils.cpython-312.pyc,, +passlib/tests/__pycache__/test_utils_handlers.cpython-312.pyc,, +passlib/tests/__pycache__/test_utils_md4.cpython-312.pyc,, +passlib/tests/__pycache__/test_utils_pbkdf2.cpython-312.pyc,, +passlib/tests/__pycache__/test_win32.cpython-312.pyc,, +passlib/tests/__pycache__/tox_support.cpython-312.pyc,, +passlib/tests/__pycache__/utils.cpython-312.pyc,, +passlib/tests/_test_bad_register.py,sha256=yws8uO2HsUWg8GRQPlxKvE5HniP84QSQW6ncCPiZDpw,541 +passlib/tests/backports.py,sha256=QTi9tD9DO_RlawkInpPDsFaol--5hsMI-cFvwLIE9B0,2593 +passlib/tests/sample1.cfg,sha256=lJsayArbi6FElINzcTQ1VbgTTGY5LKpMdbCJvK_6H8s,243 +passlib/tests/sample1b.cfg,sha256=2ZQnnpumQsEJpKFsTOHuv_ULhQY5PhQPnsa2rSZmTEU,252 +passlib/tests/sample1c.cfg,sha256=u-BGMklAN05efndzADJfFV9gP1Jbns1gDdwC__VfW-8,490 +passlib/tests/sample_config_1s.cfg,sha256=mMgYjX_UvxVVLFTfZ4m-vxVo31MbSNrZA0R7VY6DzTk,238 +passlib/tests/test_apache.py,sha256=_XhDKgV1nON4ddQQU3GdUfSXrwY_x2OoJQ6l7w2Gzbw,29432 +passlib/tests/test_apps.py,sha256=6MrGeFenjSACzbAtp6jf3PNHoITv_v5DbT_7nhrR-KA,5281 +passlib/tests/test_context.py,sha256=Vsl2hhouEi3yn4_J7J10E09OotLneRHzkAY_jS16F08,74546 +passlib/tests/test_context_deprecated.py,sha256=cVXqcPx_Xqlsh6QF2az34RY23wP3pv8SOBbJFQn65Jg,29282 +passlib/tests/test_crypto_builtin_md4.py,sha256=5PWKh1HoQKC4gI4BcgVDh89xw7lix0R1n9Jn0Y8t8mQ,5660 +passlib/tests/test_crypto_des.py,sha256=0xWgS74G6ygl7gIvF6uhjcoThVTt1TqIH4ZUeqXbVmA,8874 +passlib/tests/test_crypto_digest.py,sha256=b15XIFLDUsjsaxPEQUJkb-csM65IRz_9glwZz7qwN7U,20478 +passlib/tests/test_crypto_scrypt.py,sha256=xJDU3e4bt9N1X0fA9zBLBxESk3PsTR89qJeEWNX2Em4,26646 +passlib/tests/test_ext_django.py,sha256=QUKoa6rLn3hbCVNk7_0z9JW5aOFmyLbBwj0PiWhQJ7s,41364 +passlib/tests/test_ext_django_source.py,sha256=AW-PQRQeLz2cOpKGPeKPLSESC4o-ATbu3-Zd45Coi3k,11034 +passlib/tests/test_handlers.py,sha256=WxYhRTthTzDj-FIP2vS_mH0nlpjgrWOp2C-h3mN6DzE,68622 +passlib/tests/test_handlers_argon2.py,sha256=bSNARahGKPZTawLq-qhVdcuvprCDTNXGWPhSh8aRyaY,22837 +passlib/tests/test_handlers_bcrypt.py,sha256=izOVd0WthIi90YKkvskrW5DZPMMCvO2qtwRkefvgkdY,29549 +passlib/tests/test_handlers_cisco.py,sha256=TLvuGQZygEZbjA01t1hfGfBvx3THnv6ZwbNQCKUhsuI,20471 +passlib/tests/test_handlers_django.py,sha256=ADphUgbG9PwoXQPFbEAPeIDfqjK6DENl_wizP52wYSE,15538 +passlib/tests/test_handlers_pbkdf2.py,sha256=vDM9ipts9EYoauheNHtOOYq0Nl8-9ltTML4gnw2EB2g,18788 +passlib/tests/test_handlers_scrypt.py,sha256=wHsbgoV5xhY4SQtgWFCuit3lygkNvd0AQKZ0lmp72do,4188 +passlib/tests/test_hosts.py,sha256=n0gCywmbsw8q8p4WLp-AlQrQuPfe-29fYwUfWwXi4Co,3906 +passlib/tests/test_pwd.py,sha256=Si9qFDXwkbjTsJ9wQTYe-QhlprVoMQ2E79-eX11FPBk,7190 +passlib/tests/test_registry.py,sha256=9BgXvMhHKQQHBGdgV4WyDDZUboUh0tbHYdgPYr1upSo,9246 +passlib/tests/test_totp.py,sha256=T1o3B97SltvC1OKweXQpX1bBGf6KYQnMl8jcpBSg5DU,65746 +passlib/tests/test_utils.py,sha256=yMWrrnsMIg8b8guyzRK8lDJ243rul6ANhrIgImGlyVI,46118 +passlib/tests/test_utils_handlers.py,sha256=rVSuaNqRUb4Q520nVD4C5smzVs-LdFqQjFZMDRTz-zU,32134 +passlib/tests/test_utils_md4.py,sha256=CfQor3ZfV2JO_8x2RxY5Tl5ZsS0hDvIje46cLvLN5Ew,1474 +passlib/tests/test_utils_pbkdf2.py,sha256=gIhycQf4NUNd5yjUrtKfRm3eqqpklS9W2B7-8INp4Cg,12193 +passlib/tests/test_win32.py,sha256=BXVpHSm71ePXmmbBPTN4H38lUgGqG6-iZasbj_l1mVg,1920 +passlib/tests/tox_support.py,sha256=PDaO1ftDtOFzd299EXm0X5HWRzg37VsBiHsdiMOu5FA,2473 +passlib/tests/utils.py,sha256=mNbhjFNG16dmU13ChMyqOSY39OiR2d8LRUBi41dAMko,147541 +passlib/totp.py,sha256=Wryr57req8NFJnw1fI_eycCaTwmSY8WA7Z3OFjAwHOE,73033 +passlib/utils/__init__.py,sha256=VHkQHu7DcdVKyDjhPuyRG_2-25aI4Zwat3wr6K-rAlo,42925 +passlib/utils/__pycache__/__init__.cpython-312.pyc,, +passlib/utils/__pycache__/binary.cpython-312.pyc,, +passlib/utils/__pycache__/decor.cpython-312.pyc,, +passlib/utils/__pycache__/des.cpython-312.pyc,, +passlib/utils/__pycache__/handlers.cpython-312.pyc,, +passlib/utils/__pycache__/md4.cpython-312.pyc,, +passlib/utils/__pycache__/pbkdf2.cpython-312.pyc,, +passlib/utils/binary.py,sha256=dZe2ZjuGr0g6iQseO-ThkQ5XM6KnQFISGQr68vUOOhM,31422 +passlib/utils/compat/__init__.py,sha256=xuPP5PsmLJh_I5NrlaYa012zmWrdzfrYbL_oHqc4tCk,14235 +passlib/utils/compat/__pycache__/__init__.cpython-312.pyc,, +passlib/utils/compat/__pycache__/_ordered_dict.cpython-312.pyc,, +passlib/utils/compat/_ordered_dict.py,sha256=1nga6blaxokrrDdY3UrQgRXYdifZHCDgPYie1aCJkuI,8368 +passlib/utils/decor.py,sha256=svc2C-_DKfiCMmOBNhn_DK7IeS_WYNg26asjhx76LUA,7651 +passlib/utils/des.py,sha256=jFuvhUA3aaiR1xWX4NpXYm5XgcdewRT5Uas-7jLoSTE,2163 +passlib/utils/handlers.py,sha256=E3oRL908uudK_ZLZWeX5DoPxJL8uCfCGpmAkyfJoWQ8,105286 +passlib/utils/md4.py,sha256=pyxEpUe_t8E0u2ZDWOzYIJa0oXgTQBO7DQ8SMKGX8ag,1218 +passlib/utils/pbkdf2.py,sha256=foDGTAKeZywBAVlLZIRf4bX6fC3bzsoC1i_DtcdXr2I,6832 +passlib/win32.py,sha256=E6Ca-4Ki5ZlCSzd86N1CXjh-xQoJYjW-74-kJ6VsHUU,2591 diff --git a/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/WHEEL b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/WHEEL new file mode 100644 index 0000000..6d38aa0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/top_level.txt new file mode 100644 index 0000000..419829d --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/top_level.txt @@ -0,0 +1 @@ +passlib diff --git a/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/zip-safe b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib-1.7.4.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/venv/lib/python3.12/site-packages/passlib/__init__.py b/venv/lib/python3.12/site-packages/passlib/__init__.py new file mode 100644 index 0000000..963bfcc --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/__init__.py @@ -0,0 +1,3 @@ +"""passlib - suite of password hashing & generation routines""" + +__version__ = '1.7.4' diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..be0be90 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/apache.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/apache.cpython-312.pyc new file mode 100644 index 0000000..37c3bcc Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/apache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/apps.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/apps.cpython-312.pyc new file mode 100644 index 0000000..42b96bd Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/apps.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/context.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/context.cpython-312.pyc new file mode 100644 index 0000000..8e4a53b Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/context.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/exc.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/exc.cpython-312.pyc new file mode 100644 index 0000000..18ab6ac Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/exc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/hash.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/hash.cpython-312.pyc new file mode 100644 index 0000000..9f18871 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/hash.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/hosts.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/hosts.cpython-312.pyc new file mode 100644 index 0000000..907b2ce Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/hosts.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/ifc.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/ifc.cpython-312.pyc new file mode 100644 index 0000000..ac0934c Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/ifc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/pwd.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/pwd.cpython-312.pyc new file mode 100644 index 0000000..65572a8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/pwd.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/registry.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/registry.cpython-312.pyc new file mode 100644 index 0000000..b5b3e25 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/registry.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/totp.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/totp.cpython-312.pyc new file mode 100644 index 0000000..e822936 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/totp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/__pycache__/win32.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/__pycache__/win32.cpython-312.pyc new file mode 100644 index 0000000..9e26c99 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/__pycache__/win32.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/_data/wordsets/bip39.txt b/venv/lib/python3.12/site-packages/passlib/_data/wordsets/bip39.txt new file mode 100644 index 0000000..e29842e --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/_data/wordsets/bip39.txt @@ -0,0 +1,2049 @@ +abandon +ability +able +about +above +absent +absorb +abstract +absurd +abuse +access +accident +account +accuse +achieve +acid +acoustic +acquire +across +act +action +actor +actress +actual +adapt +add +addict +address +adjust +admit +adult +advance +advice +aerobic +affair +afford +afraid +again +age +agent +agree +ahead +aim +air +airport +aisle +alarm +album +alcohol +alert +alien +all +alley +allow +almost +alone +alpha +already +also +alter +always +amateur +amazing +among +amount +amused +analyst +anchor +ancient +anger +angle +angry +animal +ankle +announce +annual +another +answer +antenna +antique +anxiety +any +apart +apology +appear +apple +approve +april +arch +arctic +area +arena +argue +arm +armed +armor +army +around +arrange +arrest +arrive +arrow +art +artefact +artist +artwork +ask +aspect +assault +asset +assist +assume +asthma +athlete +atom +attack +attend +attitude +attract +auction +audit +august +aunt +author +auto +autumn +average +avocado +avoid +awake +aware +away +awesome +awful +awkward +axis +baby +bachelor +bacon +badge +bag +balance +balcony +ball +bamboo +banana +banner +bar +barely +bargain +barrel +base +basic +basket +battle +beach +bean +beauty +because +become +beef +before +begin +behave +behind +believe +below +belt +bench +benefit +best +betray +better +between +beyond +bicycle +bid +bike +bind +biology +bird +birth +bitter +black +blade +blame +blanket +blast +bleak +bless +blind +blood +blossom +blouse +blue +blur +blush +board +boat +body +boil +bomb +bone +bonus +book +boost +border +boring +borrow +boss +bottom +bounce +box +boy +bracket +brain +brand +brass +brave +bread +breeze +brick +bridge +brief +bright +bring +brisk +broccoli +broken +bronze +broom +brother +brown +brush +bubble +buddy +budget +buffalo +build +bulb +bulk +bullet +bundle +bunker +burden +burger +burst +bus +business +busy +butter +buyer +buzz +cabbage +cabin +cable +cactus +cage +cake +call +calm +camera +camp +can +canal +cancel +candy +cannon +canoe +canvas +canyon +capable +capital +captain +car +carbon +card +cargo +carpet +carry +cart +case +cash +casino +castle +casual +cat +catalog +catch +category +cattle +caught +cause +caution +cave +ceiling +celery +cement +census +century +cereal +certain +chair +chalk +champion +change +chaos +chapter +charge +chase +chat +cheap +check +cheese +chef +cherry +chest +chicken +chief +child +chimney +choice +choose +chronic +chuckle +chunk +churn +cigar +cinnamon +circle +citizen +city +civil +claim +clap +clarify +claw +clay +clean +clerk +clever +click +client +cliff +climb +clinic +clip +clock +clog +close +cloth +cloud +clown +club +clump +cluster +clutch +coach +coast +coconut +code +coffee +coil +coin +collect +color +column +combine +come +comfort +comic +common +company +concert +conduct +confirm +congress +connect +consider +control +convince +cook +cool +copper +copy +coral +core +corn +correct +cost +cotton +couch +country +couple +course +cousin +cover +coyote +crack +cradle +craft +cram +crane +crash +crater +crawl +crazy +cream +credit +creek +crew +cricket +crime +crisp +critic +crop +cross +crouch +crowd +crucial +cruel +cruise +crumble +crunch +crush +cry +crystal +cube +culture +cup +cupboard +curious +current +curtain +curve +cushion +custom +cute +cycle +dad +damage +damp +dance +danger +daring +dash +daughter +dawn +day +deal +debate +debris +decade +december +decide +decline +decorate +decrease +deer +defense +define +defy +degree +delay +deliver +demand +demise +denial +dentist +deny +depart +depend +deposit +depth +deputy +derive +describe +desert +design +desk +despair +destroy +detail +detect +develop +device +devote +diagram +dial +diamond +diary +dice +diesel +diet +differ +digital +dignity +dilemma +dinner +dinosaur +direct +dirt +disagree +discover +disease +dish +dismiss +disorder +display +distance +divert +divide +divorce +dizzy +doctor +document +dog +doll +dolphin +domain +donate +donkey +donor +door +dose +double +dove +draft +dragon +drama +drastic +draw +dream +dress +drift +drill +drink +drip +drive +drop +drum +dry +duck +dumb +dune +during +dust +dutch +duty +dwarf +dynamic +eager +eagle +early +earn +earth +easily +east +easy +echo +ecology +economy +edge +edit +educate +effort +egg +eight +either +elbow +elder +electric +elegant +element +elephant +elevator +elite +else +embark +embody +embrace +emerge +emotion +employ +empower +empty +enable +enact +end +endless +endorse +enemy +energy +enforce +engage +engine +enhance +enjoy +enlist +enough +enrich +enroll +ensure +enter +entire +entry +envelope +episode +equal +equip +era +erase +erode +erosion +error +erupt +escape +essay +essence +estate +eternal +ethics +evidence +evil +evoke +evolve +exact +example +excess +exchange +excite +exclude +excuse +execute +exercise +exhaust +exhibit +exile +exist +exit +exotic +expand +expect +expire +explain +expose +express +extend +extra +eye +eyebrow +fabric +face +faculty +fade +faint +faith +fall +false +fame +family +famous +fan +fancy +fantasy +farm +fashion +fat +fatal +father +fatigue +fault +favorite +feature +february +federal +fee +feed +feel +female +fence +festival +fetch +fever +few +fiber +fiction +field +figure +file +film +filter +final +find +fine +finger +finish +fire +firm +first +fiscal +fish +fit +fitness +fix +flag +flame +flash +flat +flavor +flee +flight +flip +float +flock +floor +flower +fluid +flush +fly +foam +focus +fog +foil +fold +follow +food +foot +force +forest +forget +fork +fortune +forum +forward +fossil +foster +found +fox +fragile +frame +frequent +fresh +friend +fringe +frog +front +frost +frown +frozen +fruit +fuel +fun +funny +furnace +fury +future +gadget +gain +galaxy +gallery +game +gap +garage +garbage +garden +garlic +garment +gas +gasp +gate +gather +gauge +gaze +general +genius +genre +gentle +genuine +gesture +ghost +giant +gift +giggle +ginger +giraffe +girl +give +glad +glance +glare +glass +glide +glimpse +globe +gloom +glory +glove +glow +glue +goat +goddess +gold +good +goose +gorilla +gospel +gossip +govern +gown +grab +grace +grain +grant +grape +grass +gravity +great +green +grid +grief +grit +grocery +group +grow +grunt +guard +guess +guide +guilt +guitar +gun +gym +habit +hair +half +hammer +hamster +hand +happy +harbor +hard +harsh +harvest +hat +have +hawk +hazard +head +health +heart +heavy +hedgehog +height +hello +helmet +help +hen +hero +hidden +high +hill +hint +hip +hire +history +hobby +hockey +hold +hole +holiday +hollow +home +honey +hood +hope +horn +horror +horse +hospital +host +hotel +hour +hover +hub +huge +human +humble +humor +hundred +hungry +hunt +hurdle +hurry +hurt +husband +hybrid +ice +icon +idea +identify +idle +ignore +ill +illegal +illness +image +imitate +immense +immune +impact +impose +improve +impulse +inch +include +income +increase +index +indicate +indoor +industry +infant +inflict +inform +inhale +inherit +initial +inject +injury +inmate +inner +innocent +input +inquiry +insane +insect +inside +inspire +install +intact +interest +into +invest +invite +involve +iron +island +isolate +issue +item +ivory +jacket +jaguar +jar +jazz +jealous +jeans +jelly +jewel +job +join +joke +journey +joy +judge +juice +jump +jungle +junior +junk +just +kangaroo +keen +keep +ketchup +key +kick +kid +kidney +kind +kingdom +kiss +kit +kitchen +kite +kitten +kiwi +knee +knife +knock +know +lab +label +labor +ladder +lady +lake +lamp +language +laptop +large +later +latin +laugh +laundry +lava +law +lawn +lawsuit +layer +lazy +leader +leaf +learn +leave +lecture +left +leg +legal +legend +leisure +lemon +lend +length +lens +leopard +lesson +letter +level +liar +liberty +library +license +life +lift +light +like +limb +limit +link +lion +liquid +list +little +live +lizard +load +loan +lobster +local +lock +logic +lonely +long +loop +lottery +loud +lounge +love +loyal +lucky +luggage +lumber +lunar +lunch +luxury +lyrics +machine +mad +magic +magnet +maid +mail +main +major +make +mammal +man +manage +mandate +mango +mansion +manual +maple +marble +march +margin +marine +market +marriage +mask +mass +master +match +material +math +matrix +matter +maximum +maze +meadow +mean +measure +meat +mechanic +medal +media +melody +melt +member +memory +mention +menu +mercy +merge +merit +merry +mesh +message +metal +method +middle +midnight +milk +million +mimic +mind +minimum +minor +minute +miracle +mirror +misery +miss +mistake +mix +mixed +mixture +mobile +model +modify +mom +moment +monitor +monkey +monster +month +moon +moral +more +morning +mosquito +mother +motion +motor +mountain +mouse +move +movie +much +muffin +mule +multiply +muscle +museum +mushroom +music +must +mutual +myself +mystery +myth +naive +name +napkin +narrow +nasty +nation +nature +near +neck +need +negative +neglect +neither +nephew +nerve +nest +net +network +neutral +never +news +next +nice +night +noble +noise +nominee +noodle +normal +north +nose +notable +note +nothing +notice +novel +now +nuclear +number +nurse +nut +oak +obey +object +oblige +obscure +observe +obtain +obvious +occur +ocean +october +odor +off +offer +office +often +oil +okay +old +olive +olympic +omit +once +one +onion +online +only +open +opera +opinion +oppose +option +orange +orbit +orchard +order +ordinary +organ +orient +original +orphan +ostrich +other +outdoor +outer +output +outside +oval +oven +over +own +owner +oxygen +oyster +ozone +pact +paddle +page +pair +palace +palm +panda +panel +panic +panther +paper +parade +parent +park +parrot +party +pass +patch +path +patient +patrol +pattern +pause +pave +payment +peace +peanut +pear +peasant +pelican +pen +penalty +pencil +people +pepper +perfect +permit +person +pet +phone +photo +phrase +physical +piano +picnic +picture +piece +pig +pigeon +pill +pilot +pink +pioneer +pipe +pistol +pitch +pizza +place +planet +plastic +plate +play +please +pledge +pluck +plug +plunge +poem +poet +point +polar +pole +police +pond +pony +pool +popular +portion +position +possible +post +potato +pottery +poverty +powder +power +practice +praise +predict +prefer +prepare +present +pretty +prevent +price +pride +primary +print +priority +prison +private +prize +problem +process +produce +profit +program +project +promote +proof +property +prosper +protect +proud +provide +public +pudding +pull +pulp +pulse +pumpkin +punch +pupil +puppy +purchase +purity +purpose +purse +push +put +puzzle +pyramid +quality +quantum +quarter +question +quick +quit +quiz +quote +rabbit +raccoon +race +rack +radar +radio +rail +rain +raise +rally +ramp +ranch +random +range +rapid +rare +rate +rather +raven +raw +razor +ready +real +reason +rebel +rebuild +recall +receive +recipe +record +recycle +reduce +reflect +reform +refuse +region +regret +regular +reject +relax +release +relief +rely +remain +remember +remind +remove +render +renew +rent +reopen +repair +repeat +replace +report +require +rescue +resemble +resist +resource +response +result +retire +retreat +return +reunion +reveal +review +reward +rhythm +rib +ribbon +rice +rich +ride +ridge +rifle +right +rigid +ring +riot +ripple +risk +ritual +rival +river +road +roast +robot +robust +rocket +romance +roof +rookie +room +rose +rotate +rough +round +route +royal +rubber +rude +rug +rule +run +runway +rural +sad +saddle +sadness +safe +sail +salad +salmon +salon +salt +salute +same +sample +sand +satisfy +satoshi +sauce +sausage +save +say +scale +scan +scare +scatter +scene +scheme +school +science +scissors +scorpion +scout +scrap +screen +script +scrub +sea +search +season +seat +second +secret +section +security +seed +seek +segment +select +sell +seminar +senior +sense +sentence +series +service +session +settle +setup +seven +shadow +shaft +shallow +share +shed +shell +sheriff +shield +shift +shine +ship +shiver +shock +shoe +shoot +shop +short +shoulder +shove +shrimp +shrug +shuffle +shy +sibling +sick +side +siege +sight +sign +silent +silk +silly +silver +similar +simple +since +sing +siren +sister +situate +six +size +skate +sketch +ski +skill +skin +skirt +skull +slab +slam +sleep +slender +slice +slide +slight +slim +slogan +slot +slow +slush +small +smart +smile +smoke +smooth +snack +snake +snap +sniff +snow +soap +soccer +social +sock +soda +soft +solar +soldier +solid +solution +solve +someone +song +soon +sorry +sort +soul +sound +soup +source +south +space +spare +spatial +spawn +speak +special +speed +spell +spend +sphere +spice +spider +spike +spin +spirit +split +spoil +sponsor +spoon +sport +spot +spray +spread +spring +spy +square +squeeze +squirrel +stable +stadium +staff +stage +stairs +stamp +stand +start +state +stay +steak +steel +stem +step +stereo +stick +still +sting +stock +stomach +stone +stool +story +stove +strategy +street +strike +strong +struggle +student +stuff +stumble +style +subject +submit +subway +success +such +sudden +suffer +sugar +suggest +suit +summer +sun +sunny +sunset +super +supply +supreme +sure +surface +surge +surprise +surround +survey +suspect +sustain +swallow +swamp +swap +swarm +swear +sweet +swift +swim +swing +switch +sword +symbol +symptom +syrup +system +table +tackle +tag +tail +talent +talk +tank +tape +target +task +taste +tattoo +taxi +teach +team +tell +ten +tenant +tennis +tent +term +test +text +thank +that +theme +then +theory +there +they +thing +this +thought +three +thrive +throw +thumb +thunder +ticket +tide +tiger +tilt +timber +time +tiny +tip +tired +tissue +title +toast +tobacco +today +toddler +toe +together +toilet +token +tomato +tomorrow +tone +tongue +tonight +tool +tooth +top +topic +topple +torch +tornado +tortoise +toss +total +tourist +toward +tower +town +toy +track +trade +traffic +tragic +train +transfer +trap +trash +travel +tray +treat +tree +trend +trial +tribe +trick +trigger +trim +trip +trophy +trouble +truck +true +truly +trumpet +trust +truth +try +tube +tuition +tumble +tuna +tunnel +turkey +turn +turtle +twelve +twenty +twice +twin +twist +two +type +typical +ugly +umbrella +unable +unaware +uncle +uncover +under +undo +unfair +unfold +unhappy +uniform +unique +unit +universe +unknown +unlock +until +unusual +unveil +update +upgrade +uphold +upon +upper +upset +urban +urge +usage +use +used +useful +useless +usual +utility +vacant +vacuum +vague +valid +valley +valve +van +vanish +vapor +various +vast +vault +vehicle +velvet +vendor +venture +venue +verb +verify +version +very +vessel +veteran +viable +vibrant +vicious +victory +video +view +village +vintage +violin +virtual +virus +visa +visit +visual +vital +vivid +vocal +voice +void +volcano +volume +vote +voyage +wage +wagon +wait +walk +wall +walnut +want +warfare +warm +warrior +wash +wasp +waste +water +wave +way +wealth +weapon +wear +weasel +weather +web +wedding +weekend +weird +welcome +west +wet +whale +what +wheat +wheel +when +where +whip +whisper +wide +width +wife +wild +will +win +window +wine +wing +wink +winner +winter +wire +wisdom +wise +wish +witness +wolf +woman +wonder +wood +wool +word +work +world +worry +worth +wrap +wreck +wrestle +wrist +write +wrong +yard +year +yellow +you +young +youth +zebra +zero +zone +zoo + diff --git a/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_long.txt b/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_long.txt new file mode 100644 index 0000000..caf71f5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_long.txt @@ -0,0 +1,7776 @@ +abacus +abdomen +abdominal +abide +abiding +ability +ablaze +able +abnormal +abrasion +abrasive +abreast +abridge +abroad +abruptly +absence +absentee +absently +absinthe +absolute +absolve +abstain +abstract +absurd +accent +acclaim +acclimate +accompany +account +accuracy +accurate +accustom +acetone +achiness +aching +acid +acorn +acquaint +acquire +acre +acrobat +acronym +acting +action +activate +activator +active +activism +activist +activity +actress +acts +acutely +acuteness +aeration +aerobics +aerosol +aerospace +afar +affair +affected +affecting +affection +affidavit +affiliate +affirm +affix +afflicted +affluent +afford +affront +aflame +afloat +aflutter +afoot +afraid +afterglow +afterlife +aftermath +aftermost +afternoon +aged +ageless +agency +agenda +agent +aggregate +aghast +agile +agility +aging +agnostic +agonize +agonizing +agony +agreeable +agreeably +agreed +agreeing +agreement +aground +ahead +ahoy +aide +aids +aim +ajar +alabaster +alarm +albatross +album +alfalfa +algebra +algorithm +alias +alibi +alienable +alienate +aliens +alike +alive +alkaline +alkalize +almanac +almighty +almost +aloe +aloft +aloha +alone +alongside +aloof +alphabet +alright +although +altitude +alto +aluminum +alumni +always +amaretto +amaze +amazingly +amber +ambiance +ambiguity +ambiguous +ambition +ambitious +ambulance +ambush +amendable +amendment +amends +amenity +amiable +amicably +amid +amigo +amino +amiss +ammonia +ammonium +amnesty +amniotic +among +amount +amperage +ample +amplifier +amplify +amply +amuck +amulet +amusable +amused +amusement +amuser +amusing +anaconda +anaerobic +anagram +anatomist +anatomy +anchor +anchovy +ancient +android +anemia +anemic +aneurism +anew +angelfish +angelic +anger +angled +angler +angles +angling +angrily +angriness +anguished +angular +animal +animate +animating +animation +animator +anime +animosity +ankle +annex +annotate +announcer +annoying +annually +annuity +anointer +another +answering +antacid +antarctic +anteater +antelope +antennae +anthem +anthill +anthology +antibody +antics +antidote +antihero +antiquely +antiques +antiquity +antirust +antitoxic +antitrust +antiviral +antivirus +antler +antonym +antsy +anvil +anybody +anyhow +anymore +anyone +anyplace +anything +anytime +anyway +anywhere +aorta +apache +apostle +appealing +appear +appease +appeasing +appendage +appendix +appetite +appetizer +applaud +applause +apple +appliance +applicant +applied +apply +appointee +appraisal +appraiser +apprehend +approach +approval +approve +apricot +april +apron +aptitude +aptly +aqua +aqueduct +arbitrary +arbitrate +ardently +area +arena +arguable +arguably +argue +arise +armadillo +armband +armchair +armed +armful +armhole +arming +armless +armoire +armored +armory +armrest +army +aroma +arose +around +arousal +arrange +array +arrest +arrival +arrive +arrogance +arrogant +arson +art +ascend +ascension +ascent +ascertain +ashamed +ashen +ashes +ashy +aside +askew +asleep +asparagus +aspect +aspirate +aspire +aspirin +astonish +astound +astride +astrology +astronaut +astronomy +astute +atlantic +atlas +atom +atonable +atop +atrium +atrocious +atrophy +attach +attain +attempt +attendant +attendee +attention +attentive +attest +attic +attire +attitude +attractor +attribute +atypical +auction +audacious +audacity +audible +audibly +audience +audio +audition +augmented +august +authentic +author +autism +autistic +autograph +automaker +automated +automatic +autopilot +available +avalanche +avatar +avenge +avenging +avenue +average +aversion +avert +aviation +aviator +avid +avoid +await +awaken +award +aware +awhile +awkward +awning +awoke +awry +axis +babble +babbling +babied +baboon +backache +backboard +backboned +backdrop +backed +backer +backfield +backfire +backhand +backing +backlands +backlash +backless +backlight +backlit +backlog +backpack +backpedal +backrest +backroom +backshift +backside +backslid +backspace +backspin +backstab +backstage +backtalk +backtrack +backup +backward +backwash +backwater +backyard +bacon +bacteria +bacterium +badass +badge +badland +badly +badness +baffle +baffling +bagel +bagful +baggage +bagged +baggie +bagginess +bagging +baggy +bagpipe +baguette +baked +bakery +bakeshop +baking +balance +balancing +balcony +balmy +balsamic +bamboo +banana +banish +banister +banjo +bankable +bankbook +banked +banker +banking +banknote +bankroll +banner +bannister +banshee +banter +barbecue +barbed +barbell +barber +barcode +barge +bargraph +barista +baritone +barley +barmaid +barman +barn +barometer +barrack +barracuda +barrel +barrette +barricade +barrier +barstool +bartender +barterer +bash +basically +basics +basil +basin +basis +basket +batboy +batch +bath +baton +bats +battalion +battered +battering +battery +batting +battle +bauble +bazooka +blabber +bladder +blade +blah +blame +blaming +blanching +blandness +blank +blaspheme +blasphemy +blast +blatancy +blatantly +blazer +blazing +bleach +bleak +bleep +blemish +blend +bless +blighted +blimp +bling +blinked +blinker +blinking +blinks +blip +blissful +blitz +blizzard +bloated +bloating +blob +blog +bloomers +blooming +blooper +blot +blouse +blubber +bluff +bluish +blunderer +blunt +blurb +blurred +blurry +blurt +blush +blustery +boaster +boastful +boasting +boat +bobbed +bobbing +bobble +bobcat +bobsled +bobtail +bodacious +body +bogged +boggle +bogus +boil +bok +bolster +bolt +bonanza +bonded +bonding +bondless +boned +bonehead +boneless +bonelike +boney +bonfire +bonnet +bonsai +bonus +bony +boogeyman +boogieman +book +boondocks +booted +booth +bootie +booting +bootlace +bootleg +boots +boozy +borax +boring +borough +borrower +borrowing +boss +botanical +botanist +botany +botch +both +bottle +bottling +bottom +bounce +bouncing +bouncy +bounding +boundless +bountiful +bovine +boxcar +boxer +boxing +boxlike +boxy +breach +breath +breeches +breeching +breeder +breeding +breeze +breezy +brethren +brewery +brewing +briar +bribe +brick +bride +bridged +brigade +bright +brilliant +brim +bring +brink +brisket +briskly +briskness +bristle +brittle +broadband +broadcast +broaden +broadly +broadness +broadside +broadways +broiler +broiling +broken +broker +bronchial +bronco +bronze +bronzing +brook +broom +brought +browbeat +brownnose +browse +browsing +bruising +brunch +brunette +brunt +brush +brussels +brute +brutishly +bubble +bubbling +bubbly +buccaneer +bucked +bucket +buckle +buckshot +buckskin +bucktooth +buckwheat +buddhism +buddhist +budding +buddy +budget +buffalo +buffed +buffer +buffing +buffoon +buggy +bulb +bulge +bulginess +bulgur +bulk +bulldog +bulldozer +bullfight +bullfrog +bullhorn +bullion +bullish +bullpen +bullring +bullseye +bullwhip +bully +bunch +bundle +bungee +bunion +bunkbed +bunkhouse +bunkmate +bunny +bunt +busboy +bush +busily +busload +bust +busybody +buzz +cabana +cabbage +cabbie +cabdriver +cable +caboose +cache +cackle +cacti +cactus +caddie +caddy +cadet +cadillac +cadmium +cage +cahoots +cake +calamari +calamity +calcium +calculate +calculus +caliber +calibrate +calm +caloric +calorie +calzone +camcorder +cameo +camera +camisole +camper +campfire +camping +campsite +campus +canal +canary +cancel +candied +candle +candy +cane +canine +canister +cannabis +canned +canning +cannon +cannot +canola +canon +canopener +canopy +canteen +canyon +capable +capably +capacity +cape +capillary +capital +capitol +capped +capricorn +capsize +capsule +caption +captivate +captive +captivity +capture +caramel +carat +caravan +carbon +cardboard +carded +cardiac +cardigan +cardinal +cardstock +carefully +caregiver +careless +caress +caretaker +cargo +caring +carless +carload +carmaker +carnage +carnation +carnival +carnivore +carol +carpenter +carpentry +carpool +carport +carried +carrot +carrousel +carry +cartel +cartload +carton +cartoon +cartridge +cartwheel +carve +carving +carwash +cascade +case +cash +casing +casino +casket +cassette +casually +casualty +catacomb +catalog +catalyst +catalyze +catapult +cataract +catatonic +catcall +catchable +catcher +catching +catchy +caterer +catering +catfight +catfish +cathedral +cathouse +catlike +catnap +catnip +catsup +cattail +cattishly +cattle +catty +catwalk +caucasian +caucus +causal +causation +cause +causing +cauterize +caution +cautious +cavalier +cavalry +caviar +cavity +cedar +celery +celestial +celibacy +celibate +celtic +cement +census +ceramics +ceremony +certainly +certainty +certified +certify +cesarean +cesspool +chafe +chaffing +chain +chair +chalice +challenge +chamber +chamomile +champion +chance +change +channel +chant +chaos +chaperone +chaplain +chapped +chaps +chapter +character +charbroil +charcoal +charger +charging +chariot +charity +charm +charred +charter +charting +chase +chasing +chaste +chastise +chastity +chatroom +chatter +chatting +chatty +cheating +cheddar +cheek +cheer +cheese +cheesy +chef +chemicals +chemist +chemo +cherisher +cherub +chess +chest +chevron +chevy +chewable +chewer +chewing +chewy +chief +chihuahua +childcare +childhood +childish +childless +childlike +chili +chill +chimp +chip +chirping +chirpy +chitchat +chivalry +chive +chloride +chlorine +choice +chokehold +choking +chomp +chooser +choosing +choosy +chop +chosen +chowder +chowtime +chrome +chubby +chuck +chug +chummy +chump +chunk +churn +chute +cider +cilantro +cinch +cinema +cinnamon +circle +circling +circular +circulate +circus +citable +citadel +citation +citizen +citric +citrus +city +civic +civil +clad +claim +clambake +clammy +clamor +clamp +clamshell +clang +clanking +clapped +clapper +clapping +clarify +clarinet +clarity +clash +clasp +class +clatter +clause +clavicle +claw +clay +clean +clear +cleat +cleaver +cleft +clench +clergyman +clerical +clerk +clever +clicker +client +climate +climatic +cling +clinic +clinking +clip +clique +cloak +clobber +clock +clone +cloning +closable +closure +clothes +clothing +cloud +clover +clubbed +clubbing +clubhouse +clump +clumsily +clumsy +clunky +clustered +clutch +clutter +coach +coagulant +coastal +coaster +coasting +coastland +coastline +coat +coauthor +cobalt +cobbler +cobweb +cocoa +coconut +cod +coeditor +coerce +coexist +coffee +cofounder +cognition +cognitive +cogwheel +coherence +coherent +cohesive +coil +coke +cola +cold +coleslaw +coliseum +collage +collapse +collar +collected +collector +collide +collie +collision +colonial +colonist +colonize +colony +colossal +colt +coma +come +comfort +comfy +comic +coming +comma +commence +commend +comment +commerce +commode +commodity +commodore +common +commotion +commute +commuting +compacted +compacter +compactly +compactor +companion +company +compare +compel +compile +comply +component +composed +composer +composite +compost +composure +compound +compress +comprised +computer +computing +comrade +concave +conceal +conceded +concept +concerned +concert +conch +concierge +concise +conclude +concrete +concur +condense +condiment +condition +condone +conducive +conductor +conduit +cone +confess +confetti +confidant +confident +confider +confiding +configure +confined +confining +confirm +conflict +conform +confound +confront +confused +confusing +confusion +congenial +congested +congrats +congress +conical +conjoined +conjure +conjuror +connected +connector +consensus +consent +console +consoling +consonant +constable +constant +constrain +constrict +construct +consult +consumer +consuming +contact +container +contempt +contend +contented +contently +contents +contest +context +contort +contour +contrite +control +contusion +convene +convent +copartner +cope +copied +copier +copilot +coping +copious +copper +copy +coral +cork +cornball +cornbread +corncob +cornea +corned +corner +cornfield +cornflake +cornhusk +cornmeal +cornstalk +corny +coronary +coroner +corporal +corporate +corral +correct +corridor +corrode +corroding +corrosive +corsage +corset +cortex +cosigner +cosmetics +cosmic +cosmos +cosponsor +cost +cottage +cotton +couch +cough +could +countable +countdown +counting +countless +country +county +courier +covenant +cover +coveted +coveting +coyness +cozily +coziness +cozy +crabbing +crabgrass +crablike +crabmeat +cradle +cradling +crafter +craftily +craftsman +craftwork +crafty +cramp +cranberry +crane +cranial +cranium +crank +crate +crave +craving +crawfish +crawlers +crawling +crayfish +crayon +crazed +crazily +craziness +crazy +creamed +creamer +creamlike +crease +creasing +creatable +create +creation +creative +creature +credible +credibly +credit +creed +creme +creole +crepe +crept +crescent +crested +cresting +crestless +crevice +crewless +crewman +crewmate +crib +cricket +cried +crier +crimp +crimson +cringe +cringing +crinkle +crinkly +crisped +crisping +crisply +crispness +crispy +criteria +critter +croak +crock +crook +croon +crop +cross +crouch +crouton +crowbar +crowd +crown +crucial +crudely +crudeness +cruelly +cruelness +cruelty +crumb +crummiest +crummy +crumpet +crumpled +cruncher +crunching +crunchy +crusader +crushable +crushed +crusher +crushing +crust +crux +crying +cryptic +crystal +cubbyhole +cube +cubical +cubicle +cucumber +cuddle +cuddly +cufflink +culinary +culminate +culpable +culprit +cultivate +cultural +culture +cupbearer +cupcake +cupid +cupped +cupping +curable +curator +curdle +cure +curfew +curing +curled +curler +curliness +curling +curly +curry +curse +cursive +cursor +curtain +curtly +curtsy +curvature +curve +curvy +cushy +cusp +cussed +custard +custodian +custody +customary +customer +customize +customs +cut +cycle +cyclic +cycling +cyclist +cylinder +cymbal +cytoplasm +cytoplast +dab +dad +daffodil +dagger +daily +daintily +dainty +dairy +daisy +dallying +dance +dancing +dandelion +dander +dandruff +dandy +danger +dangle +dangling +daredevil +dares +daringly +darkened +darkening +darkish +darkness +darkroom +darling +darn +dart +darwinism +dash +dastardly +data +datebook +dating +daughter +daunting +dawdler +dawn +daybed +daybreak +daycare +daydream +daylight +daylong +dayroom +daytime +dazzler +dazzling +deacon +deafening +deafness +dealer +dealing +dealmaker +dealt +dean +debatable +debate +debating +debit +debrief +debtless +debtor +debug +debunk +decade +decaf +decal +decathlon +decay +deceased +deceit +deceiver +deceiving +december +decency +decent +deception +deceptive +decibel +decidable +decimal +decimeter +decipher +deck +declared +decline +decode +decompose +decorated +decorator +decoy +decrease +decree +dedicate +dedicator +deduce +deduct +deed +deem +deepen +deeply +deepness +deface +defacing +defame +default +defeat +defection +defective +defendant +defender +defense +defensive +deferral +deferred +defiance +defiant +defile +defiling +define +definite +deflate +deflation +deflator +deflected +deflector +defog +deforest +defraud +defrost +deftly +defuse +defy +degraded +degrading +degrease +degree +dehydrate +deity +dejected +delay +delegate +delegator +delete +deletion +delicacy +delicate +delicious +delighted +delirious +delirium +deliverer +delivery +delouse +delta +deluge +delusion +deluxe +demanding +demeaning +demeanor +demise +democracy +democrat +demote +demotion +demystify +denatured +deniable +denial +denim +denote +dense +density +dental +dentist +denture +deny +deodorant +deodorize +departed +departure +depict +deplete +depletion +deplored +deploy +deport +depose +depraved +depravity +deprecate +depress +deprive +depth +deputize +deputy +derail +deranged +derby +derived +desecrate +deserve +deserving +designate +designed +designer +designing +deskbound +desktop +deskwork +desolate +despair +despise +despite +destiny +destitute +destruct +detached +detail +detection +detective +detector +detention +detergent +detest +detonate +detonator +detoxify +detract +deuce +devalue +deviancy +deviant +deviate +deviation +deviator +device +devious +devotedly +devotee +devotion +devourer +devouring +devoutly +dexterity +dexterous +diabetes +diabetic +diabolic +diagnoses +diagnosis +diagram +dial +diameter +diaper +diaphragm +diary +dice +dicing +dictate +dictation +dictator +difficult +diffused +diffuser +diffusion +diffusive +dig +dilation +diligence +diligent +dill +dilute +dime +diminish +dimly +dimmed +dimmer +dimness +dimple +diner +dingbat +dinghy +dinginess +dingo +dingy +dining +dinner +diocese +dioxide +diploma +dipped +dipper +dipping +directed +direction +directive +directly +directory +direness +dirtiness +disabled +disagree +disallow +disarm +disarray +disaster +disband +disbelief +disburse +discard +discern +discharge +disclose +discolor +discount +discourse +discover +discuss +disdain +disengage +disfigure +disgrace +dish +disinfect +disjoin +disk +dislike +disliking +dislocate +dislodge +disloyal +dismantle +dismay +dismiss +dismount +disobey +disorder +disown +disparate +disparity +dispatch +dispense +dispersal +dispersed +disperser +displace +display +displease +disposal +dispose +disprove +dispute +disregard +disrupt +dissuade +distance +distant +distaste +distill +distinct +distort +distract +distress +district +distrust +ditch +ditto +ditzy +dividable +divided +dividend +dividers +dividing +divinely +diving +divinity +divisible +divisibly +division +divisive +divorcee +dizziness +dizzy +doable +docile +dock +doctrine +document +dodge +dodgy +doily +doing +dole +dollar +dollhouse +dollop +dolly +dolphin +domain +domelike +domestic +dominion +dominoes +donated +donation +donator +donor +donut +doodle +doorbell +doorframe +doorknob +doorman +doormat +doornail +doorpost +doorstep +doorstop +doorway +doozy +dork +dormitory +dorsal +dosage +dose +dotted +doubling +douche +dove +down +dowry +doze +drab +dragging +dragonfly +dragonish +dragster +drainable +drainage +drained +drainer +drainpipe +dramatic +dramatize +drank +drapery +drastic +draw +dreaded +dreadful +dreadlock +dreamboat +dreamily +dreamland +dreamless +dreamlike +dreamt +dreamy +drearily +dreary +drench +dress +drew +dribble +dried +drier +drift +driller +drilling +drinkable +drinking +dripping +drippy +drivable +driven +driver +driveway +driving +drizzle +drizzly +drone +drool +droop +drop-down +dropbox +dropkick +droplet +dropout +dropper +drove +drown +drowsily +drudge +drum +dry +dubbed +dubiously +duchess +duckbill +ducking +duckling +ducktail +ducky +duct +dude +duffel +dugout +duh +duke +duller +dullness +duly +dumping +dumpling +dumpster +duo +dupe +duplex +duplicate +duplicity +durable +durably +duration +duress +during +dusk +dust +dutiful +duty +duvet +dwarf +dweeb +dwelled +dweller +dwelling +dwindle +dwindling +dynamic +dynamite +dynasty +dyslexia +dyslexic +each +eagle +earache +eardrum +earflap +earful +earlobe +early +earmark +earmuff +earphone +earpiece +earplugs +earring +earshot +earthen +earthlike +earthling +earthly +earthworm +earthy +earwig +easeful +easel +easiest +easily +easiness +easing +eastbound +eastcoast +easter +eastward +eatable +eaten +eatery +eating +eats +ebay +ebony +ebook +ecard +eccentric +echo +eclair +eclipse +ecologist +ecology +economic +economist +economy +ecosphere +ecosystem +edge +edginess +edging +edgy +edition +editor +educated +education +educator +eel +effective +effects +efficient +effort +eggbeater +egging +eggnog +eggplant +eggshell +egomaniac +egotism +egotistic +either +eject +elaborate +elastic +elated +elbow +eldercare +elderly +eldest +electable +election +elective +elephant +elevate +elevating +elevation +elevator +eleven +elf +eligible +eligibly +eliminate +elite +elitism +elixir +elk +ellipse +elliptic +elm +elongated +elope +eloquence +eloquent +elsewhere +elude +elusive +elves +email +embargo +embark +embassy +embattled +embellish +ember +embezzle +emblaze +emblem +embody +embolism +emboss +embroider +emcee +emerald +emergency +emission +emit +emote +emoticon +emotion +empathic +empathy +emperor +emphases +emphasis +emphasize +emphatic +empirical +employed +employee +employer +emporium +empower +emptier +emptiness +empty +emu +enable +enactment +enamel +enchanted +enchilada +encircle +enclose +enclosure +encode +encore +encounter +encourage +encroach +encrust +encrypt +endanger +endeared +endearing +ended +ending +endless +endnote +endocrine +endorphin +endorse +endowment +endpoint +endurable +endurance +enduring +energetic +energize +energy +enforced +enforcer +engaged +engaging +engine +engorge +engraved +engraver +engraving +engross +engulf +enhance +enigmatic +enjoyable +enjoyably +enjoyer +enjoying +enjoyment +enlarged +enlarging +enlighten +enlisted +enquirer +enrage +enrich +enroll +enslave +ensnare +ensure +entail +entangled +entering +entertain +enticing +entire +entitle +entity +entomb +entourage +entrap +entree +entrench +entrust +entryway +entwine +enunciate +envelope +enviable +enviably +envious +envision +envoy +envy +enzyme +epic +epidemic +epidermal +epidermis +epidural +epilepsy +epileptic +epilogue +epiphany +episode +equal +equate +equation +equator +equinox +equipment +equity +equivocal +eradicate +erasable +erased +eraser +erasure +ergonomic +errand +errant +erratic +error +erupt +escalate +escalator +escapable +escapade +escapist +escargot +eskimo +esophagus +espionage +espresso +esquire +essay +essence +essential +establish +estate +esteemed +estimate +estimator +estranged +estrogen +etching +eternal +eternity +ethanol +ether +ethically +ethics +euphemism +evacuate +evacuee +evade +evaluate +evaluator +evaporate +evasion +evasive +even +everglade +evergreen +everybody +everyday +everyone +evict +evidence +evident +evil +evoke +evolution +evolve +exact +exalted +example +excavate +excavator +exceeding +exception +excess +exchange +excitable +exciting +exclaim +exclude +excluding +exclusion +exclusive +excretion +excretory +excursion +excusable +excusably +excuse +exemplary +exemplify +exemption +exerciser +exert +exes +exfoliate +exhale +exhaust +exhume +exile +existing +exit +exodus +exonerate +exorcism +exorcist +expand +expanse +expansion +expansive +expectant +expedited +expediter +expel +expend +expenses +expensive +expert +expire +expiring +explain +expletive +explicit +explode +exploit +explore +exploring +exponent +exporter +exposable +expose +exposure +express +expulsion +exquisite +extended +extending +extent +extenuate +exterior +external +extinct +extortion +extradite +extras +extrovert +extrude +extruding +exuberant +fable +fabric +fabulous +facebook +facecloth +facedown +faceless +facelift +faceplate +faceted +facial +facility +facing +facsimile +faction +factoid +factor +factsheet +factual +faculty +fade +fading +failing +falcon +fall +false +falsify +fame +familiar +family +famine +famished +fanatic +fancied +fanciness +fancy +fanfare +fang +fanning +fantasize +fantastic +fantasy +fascism +fastball +faster +fasting +fastness +faucet +favorable +favorably +favored +favoring +favorite +fax +feast +federal +fedora +feeble +feed +feel +feisty +feline +felt-tip +feminine +feminism +feminist +feminize +femur +fence +fencing +fender +ferment +fernlike +ferocious +ferocity +ferret +ferris +ferry +fervor +fester +festival +festive +festivity +fetal +fetch +fever +fiber +fiction +fiddle +fiddling +fidelity +fidgeting +fidgety +fifteen +fifth +fiftieth +fifty +figment +figure +figurine +filing +filled +filler +filling +film +filter +filth +filtrate +finale +finalist +finalize +finally +finance +financial +finch +fineness +finer +finicky +finished +finisher +finishing +finite +finless +finlike +fiscally +fit +five +flaccid +flagman +flagpole +flagship +flagstick +flagstone +flail +flakily +flaky +flame +flammable +flanked +flanking +flannels +flap +flaring +flashback +flashbulb +flashcard +flashily +flashing +flashy +flask +flatbed +flatfoot +flatly +flatness +flatten +flattered +flatterer +flattery +flattop +flatware +flatworm +flavored +flavorful +flavoring +flaxseed +fled +fleshed +fleshy +flick +flier +flight +flinch +fling +flint +flip +flirt +float +flock +flogging +flop +floral +florist +floss +flounder +flyable +flyaway +flyer +flying +flyover +flypaper +foam +foe +fog +foil +folic +folk +follicle +follow +fondling +fondly +fondness +fondue +font +food +fool +footage +football +footbath +footboard +footer +footgear +foothill +foothold +footing +footless +footman +footnote +footpad +footpath +footprint +footrest +footsie +footsore +footwear +footwork +fossil +foster +founder +founding +fountain +fox +foyer +fraction +fracture +fragile +fragility +fragment +fragrance +fragrant +frail +frame +framing +frantic +fraternal +frayed +fraying +frays +freckled +freckles +freebase +freebee +freebie +freedom +freefall +freehand +freeing +freeload +freely +freemason +freeness +freestyle +freeware +freeway +freewill +freezable +freezing +freight +french +frenzied +frenzy +frequency +frequent +fresh +fretful +fretted +friction +friday +fridge +fried +friend +frighten +frightful +frigidity +frigidly +frill +fringe +frisbee +frisk +fritter +frivolous +frolic +from +front +frostbite +frosted +frostily +frosting +frostlike +frosty +froth +frown +frozen +fructose +frugality +frugally +fruit +frustrate +frying +gab +gaffe +gag +gainfully +gaining +gains +gala +gallantly +galleria +gallery +galley +gallon +gallows +gallstone +galore +galvanize +gambling +game +gaming +gamma +gander +gangly +gangrene +gangway +gap +garage +garbage +garden +gargle +garland +garlic +garment +garnet +garnish +garter +gas +gatherer +gathering +gating +gauging +gauntlet +gauze +gave +gawk +gazing +gear +gecko +geek +geiger +gem +gender +generic +generous +genetics +genre +gentile +gentleman +gently +gents +geography +geologic +geologist +geology +geometric +geometry +geranium +gerbil +geriatric +germicide +germinate +germless +germproof +gestate +gestation +gesture +getaway +getting +getup +giant +gibberish +giblet +giddily +giddiness +giddy +gift +gigabyte +gigahertz +gigantic +giggle +giggling +giggly +gigolo +gilled +gills +gimmick +girdle +giveaway +given +giver +giving +gizmo +gizzard +glacial +glacier +glade +gladiator +gladly +glamorous +glamour +glance +glancing +glandular +glare +glaring +glass +glaucoma +glazing +gleaming +gleeful +glider +gliding +glimmer +glimpse +glisten +glitch +glitter +glitzy +gloater +gloating +gloomily +gloomy +glorified +glorifier +glorify +glorious +glory +gloss +glove +glowing +glowworm +glucose +glue +gluten +glutinous +glutton +gnarly +gnat +goal +goatskin +goes +goggles +going +goldfish +goldmine +goldsmith +golf +goliath +gonad +gondola +gone +gong +good +gooey +goofball +goofiness +goofy +google +goon +gopher +gore +gorged +gorgeous +gory +gosling +gossip +gothic +gotten +gout +gown +grab +graceful +graceless +gracious +gradation +graded +grader +gradient +grading +gradually +graduate +graffiti +grafted +grafting +grain +granddad +grandkid +grandly +grandma +grandpa +grandson +granite +granny +granola +grant +granular +grape +graph +grapple +grappling +grasp +grass +gratified +gratify +grating +gratitude +gratuity +gravel +graveness +graves +graveyard +gravitate +gravity +gravy +gray +grazing +greasily +greedily +greedless +greedy +green +greeter +greeting +grew +greyhound +grid +grief +grievance +grieving +grievous +grill +grimace +grimacing +grime +griminess +grimy +grinch +grinning +grip +gristle +grit +groggily +groggy +groin +groom +groove +grooving +groovy +grope +ground +grouped +grout +grove +grower +growing +growl +grub +grudge +grudging +grueling +gruffly +grumble +grumbling +grumbly +grumpily +grunge +grunt +guacamole +guidable +guidance +guide +guiding +guileless +guise +gulf +gullible +gully +gulp +gumball +gumdrop +gumminess +gumming +gummy +gurgle +gurgling +guru +gush +gusto +gusty +gutless +guts +gutter +guy +guzzler +gyration +habitable +habitant +habitat +habitual +hacked +hacker +hacking +hacksaw +had +haggler +haiku +half +halogen +halt +halved +halves +hamburger +hamlet +hammock +hamper +hamster +hamstring +handbag +handball +handbook +handbrake +handcart +handclap +handclasp +handcraft +handcuff +handed +handful +handgrip +handgun +handheld +handiness +handiwork +handlebar +handled +handler +handling +handmade +handoff +handpick +handprint +handrail +handsaw +handset +handsfree +handshake +handstand +handwash +handwork +handwoven +handwrite +handyman +hangnail +hangout +hangover +hangup +hankering +hankie +hanky +haphazard +happening +happier +happiest +happily +happiness +happy +harbor +hardcopy +hardcore +hardcover +harddisk +hardened +hardener +hardening +hardhat +hardhead +hardiness +hardly +hardness +hardship +hardware +hardwired +hardwood +hardy +harmful +harmless +harmonica +harmonics +harmonize +harmony +harness +harpist +harsh +harvest +hash +hassle +haste +hastily +hastiness +hasty +hatbox +hatchback +hatchery +hatchet +hatching +hatchling +hate +hatless +hatred +haunt +haven +hazard +hazelnut +hazily +haziness +hazing +hazy +headache +headband +headboard +headcount +headdress +headed +header +headfirst +headgear +heading +headlamp +headless +headlock +headphone +headpiece +headrest +headroom +headscarf +headset +headsman +headstand +headstone +headway +headwear +heap +heat +heave +heavily +heaviness +heaving +hedge +hedging +heftiness +hefty +helium +helmet +helper +helpful +helping +helpless +helpline +hemlock +hemstitch +hence +henchman +henna +herald +herbal +herbicide +herbs +heritage +hermit +heroics +heroism +herring +herself +hertz +hesitancy +hesitant +hesitate +hexagon +hexagram +hubcap +huddle +huddling +huff +hug +hula +hulk +hull +human +humble +humbling +humbly +humid +humiliate +humility +humming +hummus +humongous +humorist +humorless +humorous +humpback +humped +humvee +hunchback +hundredth +hunger +hungrily +hungry +hunk +hunter +hunting +huntress +huntsman +hurdle +hurled +hurler +hurling +hurray +hurricane +hurried +hurry +hurt +husband +hush +husked +huskiness +hut +hybrid +hydrant +hydrated +hydration +hydrogen +hydroxide +hyperlink +hypertext +hyphen +hypnoses +hypnosis +hypnotic +hypnotism +hypnotist +hypnotize +hypocrisy +hypocrite +ibuprofen +ice +iciness +icing +icky +icon +icy +idealism +idealist +idealize +ideally +idealness +identical +identify +identity +ideology +idiocy +idiom +idly +igloo +ignition +ignore +iguana +illicitly +illusion +illusive +image +imaginary +imagines +imaging +imbecile +imitate +imitation +immature +immerse +immersion +imminent +immobile +immodest +immorally +immortal +immovable +immovably +immunity +immunize +impaired +impale +impart +impatient +impeach +impeding +impending +imperfect +imperial +impish +implant +implement +implicate +implicit +implode +implosion +implosive +imply +impolite +important +importer +impose +imposing +impotence +impotency +impotent +impound +imprecise +imprint +imprison +impromptu +improper +improve +improving +improvise +imprudent +impulse +impulsive +impure +impurity +iodine +iodize +ion +ipad +iphone +ipod +irate +irk +iron +irregular +irrigate +irritable +irritably +irritant +irritate +islamic +islamist +isolated +isolating +isolation +isotope +issue +issuing +italicize +italics +item +itinerary +itunes +ivory +ivy +jab +jackal +jacket +jackknife +jackpot +jailbird +jailbreak +jailer +jailhouse +jalapeno +jam +janitor +january +jargon +jarring +jasmine +jaundice +jaunt +java +jawed +jawless +jawline +jaws +jaybird +jaywalker +jazz +jeep +jeeringly +jellied +jelly +jersey +jester +jet +jiffy +jigsaw +jimmy +jingle +jingling +jinx +jitters +jittery +job +jockey +jockstrap +jogger +jogging +john +joining +jokester +jokingly +jolliness +jolly +jolt +jot +jovial +joyfully +joylessly +joyous +joyride +joystick +jubilance +jubilant +judge +judgingly +judicial +judiciary +judo +juggle +juggling +jugular +juice +juiciness +juicy +jujitsu +jukebox +july +jumble +jumbo +jump +junction +juncture +june +junior +juniper +junkie +junkman +junkyard +jurist +juror +jury +justice +justifier +justify +justly +justness +juvenile +kabob +kangaroo +karaoke +karate +karma +kebab +keenly +keenness +keep +keg +kelp +kennel +kept +kerchief +kerosene +kettle +kick +kiln +kilobyte +kilogram +kilometer +kilowatt +kilt +kimono +kindle +kindling +kindly +kindness +kindred +kinetic +kinfolk +king +kinship +kinsman +kinswoman +kissable +kisser +kissing +kitchen +kite +kitten +kitty +kiwi +kleenex +knapsack +knee +knelt +knickers +knoll +koala +kooky +kosher +krypton +kudos +kung +labored +laborer +laboring +laborious +labrador +ladder +ladies +ladle +ladybug +ladylike +lagged +lagging +lagoon +lair +lake +lance +landed +landfall +landfill +landing +landlady +landless +landline +landlord +landmark +landmass +landmine +landowner +landscape +landside +landslide +language +lankiness +lanky +lantern +lapdog +lapel +lapped +lapping +laptop +lard +large +lark +lash +lasso +last +latch +late +lather +latitude +latrine +latter +latticed +launch +launder +laundry +laurel +lavender +lavish +laxative +lazily +laziness +lazy +lecturer +left +legacy +legal +legend +legged +leggings +legible +legibly +legislate +lego +legroom +legume +legwarmer +legwork +lemon +lend +length +lens +lent +leotard +lesser +letdown +lethargic +lethargy +letter +lettuce +level +leverage +levers +levitate +levitator +liability +liable +liberty +librarian +library +licking +licorice +lid +life +lifter +lifting +liftoff +ligament +likely +likeness +likewise +liking +lilac +lilly +lily +limb +limeade +limelight +limes +limit +limping +limpness +line +lingo +linguini +linguist +lining +linked +linoleum +linseed +lint +lion +lip +liquefy +liqueur +liquid +lisp +list +litigate +litigator +litmus +litter +little +livable +lived +lively +liver +livestock +lividly +living +lizard +lubricant +lubricate +lucid +luckily +luckiness +luckless +lucrative +ludicrous +lugged +lukewarm +lullaby +lumber +luminance +luminous +lumpiness +lumping +lumpish +lunacy +lunar +lunchbox +luncheon +lunchroom +lunchtime +lung +lurch +lure +luridness +lurk +lushly +lushness +luster +lustfully +lustily +lustiness +lustrous +lusty +luxurious +luxury +lying +lyrically +lyricism +lyricist +lyrics +macarena +macaroni +macaw +mace +machine +machinist +magazine +magenta +maggot +magical +magician +magma +magnesium +magnetic +magnetism +magnetize +magnifier +magnify +magnitude +magnolia +mahogany +maimed +majestic +majesty +majorette +majority +makeover +maker +makeshift +making +malformed +malt +mama +mammal +mammary +mammogram +manager +managing +manatee +mandarin +mandate +mandatory +mandolin +manger +mangle +mango +mangy +manhandle +manhole +manhood +manhunt +manicotti +manicure +manifesto +manila +mankind +manlike +manliness +manly +manmade +manned +mannish +manor +manpower +mantis +mantra +manual +many +map +marathon +marauding +marbled +marbles +marbling +march +mardi +margarine +margarita +margin +marigold +marina +marine +marital +maritime +marlin +marmalade +maroon +married +marrow +marry +marshland +marshy +marsupial +marvelous +marxism +mascot +masculine +mashed +mashing +massager +masses +massive +mastiff +matador +matchbook +matchbox +matcher +matching +matchless +material +maternal +maternity +math +mating +matriarch +matrimony +matrix +matron +matted +matter +maturely +maturing +maturity +mauve +maverick +maximize +maximum +maybe +mayday +mayflower +moaner +moaning +mobile +mobility +mobilize +mobster +mocha +mocker +mockup +modified +modify +modular +modulator +module +moisten +moistness +moisture +molar +molasses +mold +molecular +molecule +molehill +mollusk +mom +monastery +monday +monetary +monetize +moneybags +moneyless +moneywise +mongoose +mongrel +monitor +monkhood +monogamy +monogram +monologue +monopoly +monorail +monotone +monotype +monoxide +monsieur +monsoon +monstrous +monthly +monument +moocher +moodiness +moody +mooing +moonbeam +mooned +moonlight +moonlike +moonlit +moonrise +moonscape +moonshine +moonstone +moonwalk +mop +morale +morality +morally +morbidity +morbidly +morphine +morphing +morse +mortality +mortally +mortician +mortified +mortify +mortuary +mosaic +mossy +most +mothball +mothproof +motion +motivate +motivator +motive +motocross +motor +motto +mountable +mountain +mounted +mounting +mourner +mournful +mouse +mousiness +moustache +mousy +mouth +movable +move +movie +moving +mower +mowing +much +muck +mud +mug +mulberry +mulch +mule +mulled +mullets +multiple +multiply +multitask +multitude +mumble +mumbling +mumbo +mummified +mummify +mummy +mumps +munchkin +mundane +municipal +muppet +mural +murkiness +murky +murmuring +muscular +museum +mushily +mushiness +mushroom +mushy +music +musket +muskiness +musky +mustang +mustard +muster +mustiness +musty +mutable +mutate +mutation +mute +mutilated +mutilator +mutiny +mutt +mutual +muzzle +myself +myspace +mystified +mystify +myth +nacho +nag +nail +name +naming +nanny +nanometer +nape +napkin +napped +napping +nappy +narrow +nastily +nastiness +national +native +nativity +natural +nature +naturist +nautical +navigate +navigator +navy +nearby +nearest +nearly +nearness +neatly +neatness +nebula +nebulizer +nectar +negate +negation +negative +neglector +negligee +negligent +negotiate +nemeses +nemesis +neon +nephew +nerd +nervous +nervy +nest +net +neurology +neuron +neurosis +neurotic +neuter +neutron +never +next +nibble +nickname +nicotine +niece +nifty +nimble +nimbly +nineteen +ninetieth +ninja +nintendo +ninth +nuclear +nuclei +nucleus +nugget +nullify +number +numbing +numbly +numbness +numeral +numerate +numerator +numeric +numerous +nuptials +nursery +nursing +nurture +nutcase +nutlike +nutmeg +nutrient +nutshell +nuttiness +nutty +nuzzle +nylon +oaf +oak +oasis +oat +obedience +obedient +obituary +object +obligate +obliged +oblivion +oblivious +oblong +obnoxious +oboe +obscure +obscurity +observant +observer +observing +obsessed +obsession +obsessive +obsolete +obstacle +obstinate +obstruct +obtain +obtrusive +obtuse +obvious +occultist +occupancy +occupant +occupier +occupy +ocean +ocelot +octagon +octane +october +octopus +ogle +oil +oink +ointment +okay +old +olive +olympics +omega +omen +ominous +omission +omit +omnivore +onboard +oncoming +ongoing +onion +online +onlooker +only +onscreen +onset +onshore +onslaught +onstage +onto +onward +onyx +oops +ooze +oozy +opacity +opal +open +operable +operate +operating +operation +operative +operator +opium +opossum +opponent +oppose +opposing +opposite +oppressed +oppressor +opt +opulently +osmosis +other +otter +ouch +ought +ounce +outage +outback +outbid +outboard +outbound +outbreak +outburst +outcast +outclass +outcome +outdated +outdoors +outer +outfield +outfit +outflank +outgoing +outgrow +outhouse +outing +outlast +outlet +outline +outlook +outlying +outmatch +outmost +outnumber +outplayed +outpost +outpour +output +outrage +outrank +outreach +outright +outscore +outsell +outshine +outshoot +outsider +outskirts +outsmart +outsource +outspoken +outtakes +outthink +outward +outweigh +outwit +oval +ovary +oven +overact +overall +overarch +overbid +overbill +overbite +overblown +overboard +overbook +overbuilt +overcast +overcoat +overcome +overcook +overcrowd +overdraft +overdrawn +overdress +overdrive +overdue +overeager +overeater +overexert +overfed +overfeed +overfill +overflow +overfull +overgrown +overhand +overhang +overhaul +overhead +overhear +overheat +overhung +overjoyed +overkill +overlabor +overlaid +overlap +overlay +overload +overlook +overlord +overlying +overnight +overpass +overpay +overplant +overplay +overpower +overprice +overrate +overreach +overreact +override +overripe +overrule +overrun +overshoot +overshot +oversight +oversized +oversleep +oversold +overspend +overstate +overstay +overstep +overstock +overstuff +oversweet +overtake +overthrow +overtime +overtly +overtone +overture +overturn +overuse +overvalue +overview +overwrite +owl +oxford +oxidant +oxidation +oxidize +oxidizing +oxygen +oxymoron +oyster +ozone +paced +pacemaker +pacific +pacifier +pacifism +pacifist +pacify +padded +padding +paddle +paddling +padlock +pagan +pager +paging +pajamas +palace +palatable +palm +palpable +palpitate +paltry +pampered +pamperer +pampers +pamphlet +panama +pancake +pancreas +panda +pandemic +pang +panhandle +panic +panning +panorama +panoramic +panther +pantomime +pantry +pants +pantyhose +paparazzi +papaya +paper +paprika +papyrus +parabola +parachute +parade +paradox +paragraph +parakeet +paralegal +paralyses +paralysis +paralyze +paramedic +parameter +paramount +parasail +parasite +parasitic +parcel +parched +parchment +pardon +parish +parka +parking +parkway +parlor +parmesan +parole +parrot +parsley +parsnip +partake +parted +parting +partition +partly +partner +partridge +party +passable +passably +passage +passcode +passenger +passerby +passing +passion +passive +passivism +passover +passport +password +pasta +pasted +pastel +pastime +pastor +pastrami +pasture +pasty +patchwork +patchy +paternal +paternity +path +patience +patient +patio +patriarch +patriot +patrol +patronage +patronize +pauper +pavement +paver +pavestone +pavilion +paving +pawing +payable +payback +paycheck +payday +payee +payer +paying +payment +payphone +payroll +pebble +pebbly +pecan +pectin +peculiar +peddling +pediatric +pedicure +pedigree +pedometer +pegboard +pelican +pellet +pelt +pelvis +penalize +penalty +pencil +pendant +pending +penholder +penknife +pennant +penniless +penny +penpal +pension +pentagon +pentagram +pep +perceive +percent +perch +percolate +perennial +perfected +perfectly +perfume +periscope +perish +perjurer +perjury +perkiness +perky +perm +peroxide +perpetual +perplexed +persecute +persevere +persuaded +persuader +pesky +peso +pessimism +pessimist +pester +pesticide +petal +petite +petition +petri +petroleum +petted +petticoat +pettiness +petty +petunia +phantom +phobia +phoenix +phonebook +phoney +phonics +phoniness +phony +phosphate +photo +phrase +phrasing +placard +placate +placidly +plank +planner +plant +plasma +plaster +plastic +plated +platform +plating +platinum +platonic +platter +platypus +plausible +plausibly +playable +playback +player +playful +playgroup +playhouse +playing +playlist +playmaker +playmate +playoff +playpen +playroom +playset +plaything +playtime +plaza +pleading +pleat +pledge +plentiful +plenty +plethora +plexiglas +pliable +plod +plop +plot +plow +ploy +pluck +plug +plunder +plunging +plural +plus +plutonium +plywood +poach +pod +poem +poet +pogo +pointed +pointer +pointing +pointless +pointy +poise +poison +poker +poking +polar +police +policy +polio +polish +politely +polka +polo +polyester +polygon +polygraph +polymer +poncho +pond +pony +popcorn +pope +poplar +popper +poppy +popsicle +populace +popular +populate +porcupine +pork +porous +porridge +portable +portal +portfolio +porthole +portion +portly +portside +poser +posh +posing +possible +possibly +possum +postage +postal +postbox +postcard +posted +poster +posting +postnasal +posture +postwar +pouch +pounce +pouncing +pound +pouring +pout +powdered +powdering +powdery +power +powwow +pox +praising +prance +prancing +pranker +prankish +prankster +prayer +praying +preacher +preaching +preachy +preamble +precinct +precise +precision +precook +precut +predator +predefine +predict +preface +prefix +preflight +preformed +pregame +pregnancy +pregnant +preheated +prelaunch +prelaw +prelude +premiere +premises +premium +prenatal +preoccupy +preorder +prepaid +prepay +preplan +preppy +preschool +prescribe +preseason +preset +preshow +president +presoak +press +presume +presuming +preteen +pretended +pretender +pretense +pretext +pretty +pretzel +prevail +prevalent +prevent +preview +previous +prewar +prewashed +prideful +pried +primal +primarily +primary +primate +primer +primp +princess +print +prior +prism +prison +prissy +pristine +privacy +private +privatize +prize +proactive +probable +probably +probation +probe +probing +probiotic +problem +procedure +process +proclaim +procreate +procurer +prodigal +prodigy +produce +product +profane +profanity +professed +professor +profile +profound +profusely +progeny +prognosis +program +progress +projector +prologue +prolonged +promenade +prominent +promoter +promotion +prompter +promptly +prone +prong +pronounce +pronto +proofing +proofread +proofs +propeller +properly +property +proponent +proposal +propose +props +prorate +protector +protegee +proton +prototype +protozoan +protract +protrude +proud +provable +proved +proven +provided +provider +providing +province +proving +provoke +provoking +provolone +prowess +prowler +prowling +proximity +proxy +prozac +prude +prudishly +prune +pruning +pry +psychic +public +publisher +pucker +pueblo +pug +pull +pulmonary +pulp +pulsate +pulse +pulverize +puma +pumice +pummel +punch +punctual +punctuate +punctured +pungent +punisher +punk +pupil +puppet +puppy +purchase +pureblood +purebred +purely +pureness +purgatory +purge +purging +purifier +purify +purist +puritan +purity +purple +purplish +purposely +purr +purse +pursuable +pursuant +pursuit +purveyor +pushcart +pushchair +pusher +pushiness +pushing +pushover +pushpin +pushup +pushy +putdown +putt +puzzle +puzzling +pyramid +pyromania +python +quack +quadrant +quail +quaintly +quake +quaking +qualified +qualifier +qualify +quality +qualm +quantum +quarrel +quarry +quartered +quarterly +quarters +quartet +quench +query +quicken +quickly +quickness +quicksand +quickstep +quiet +quill +quilt +quintet +quintuple +quirk +quit +quiver +quizzical +quotable +quotation +quote +rabid +race +racing +racism +rack +racoon +radar +radial +radiance +radiantly +radiated +radiation +radiator +radio +radish +raffle +raft +rage +ragged +raging +ragweed +raider +railcar +railing +railroad +railway +raisin +rake +raking +rally +ramble +rambling +ramp +ramrod +ranch +rancidity +random +ranged +ranger +ranging +ranked +ranking +ransack +ranting +rants +rare +rarity +rascal +rash +rasping +ravage +raven +ravine +raving +ravioli +ravishing +reabsorb +reach +reacquire +reaction +reactive +reactor +reaffirm +ream +reanalyze +reappear +reapply +reappoint +reapprove +rearrange +rearview +reason +reassign +reassure +reattach +reawake +rebalance +rebate +rebel +rebirth +reboot +reborn +rebound +rebuff +rebuild +rebuilt +reburial +rebuttal +recall +recant +recapture +recast +recede +recent +recess +recharger +recipient +recital +recite +reckless +reclaim +recliner +reclining +recluse +reclusive +recognize +recoil +recollect +recolor +reconcile +reconfirm +reconvene +recopy +record +recount +recoup +recovery +recreate +rectal +rectangle +rectified +rectify +recycled +recycler +recycling +reemerge +reenact +reenter +reentry +reexamine +referable +referee +reference +refill +refinance +refined +refinery +refining +refinish +reflected +reflector +reflex +reflux +refocus +refold +reforest +reformat +reformed +reformer +reformist +refract +refrain +refreeze +refresh +refried +refueling +refund +refurbish +refurnish +refusal +refuse +refusing +refutable +refute +regain +regalia +regally +reggae +regime +region +register +registrar +registry +regress +regretful +regroup +regular +regulate +regulator +rehab +reheat +rehire +rehydrate +reimburse +reissue +reiterate +rejoice +rejoicing +rejoin +rekindle +relapse +relapsing +relatable +related +relation +relative +relax +relay +relearn +release +relenting +reliable +reliably +reliance +reliant +relic +relieve +relieving +relight +relish +relive +reload +relocate +relock +reluctant +rely +remake +remark +remarry +rematch +remedial +remedy +remember +reminder +remindful +remission +remix +remnant +remodeler +remold +remorse +remote +removable +removal +removed +remover +removing +rename +renderer +rendering +rendition +renegade +renewable +renewably +renewal +renewed +renounce +renovate +renovator +rentable +rental +rented +renter +reoccupy +reoccur +reopen +reorder +repackage +repacking +repaint +repair +repave +repaying +repayment +repeal +repeated +repeater +repent +rephrase +replace +replay +replica +reply +reporter +repose +repossess +repost +repressed +reprimand +reprint +reprise +reproach +reprocess +reproduce +reprogram +reps +reptile +reptilian +repugnant +repulsion +repulsive +repurpose +reputable +reputably +request +require +requisite +reroute +rerun +resale +resample +rescuer +reseal +research +reselect +reseller +resemble +resend +resent +reset +reshape +reshoot +reshuffle +residence +residency +resident +residual +residue +resigned +resilient +resistant +resisting +resize +resolute +resolved +resonant +resonate +resort +resource +respect +resubmit +result +resume +resupply +resurface +resurrect +retail +retainer +retaining +retake +retaliate +retention +rethink +retinal +retired +retiree +retiring +retold +retool +retorted +retouch +retrace +retract +retrain +retread +retreat +retrial +retrieval +retriever +retry +return +retying +retype +reunion +reunite +reusable +reuse +reveal +reveler +revenge +revenue +reverb +revered +reverence +reverend +reversal +reverse +reversing +reversion +revert +revisable +revise +revision +revisit +revivable +revival +reviver +reviving +revocable +revoke +revolt +revolver +revolving +reward +rewash +rewind +rewire +reword +rework +rewrap +rewrite +rhyme +ribbon +ribcage +rice +riches +richly +richness +rickety +ricotta +riddance +ridden +ride +riding +rifling +rift +rigging +rigid +rigor +rimless +rimmed +rind +rink +rinse +rinsing +riot +ripcord +ripeness +ripening +ripping +ripple +rippling +riptide +rise +rising +risk +risotto +ritalin +ritzy +rival +riverbank +riverbed +riverboat +riverside +riveter +riveting +roamer +roaming +roast +robbing +robe +robin +robotics +robust +rockband +rocker +rocket +rockfish +rockiness +rocking +rocklike +rockslide +rockstar +rocky +rogue +roman +romp +rope +roping +roster +rosy +rotten +rotting +rotunda +roulette +rounding +roundish +roundness +roundup +roundworm +routine +routing +rover +roving +royal +rubbed +rubber +rubbing +rubble +rubdown +ruby +ruckus +rudder +rug +ruined +rule +rumble +rumbling +rummage +rumor +runaround +rundown +runner +running +runny +runt +runway +rupture +rural +ruse +rush +rust +rut +sabbath +sabotage +sacrament +sacred +sacrifice +sadden +saddlebag +saddled +saddling +sadly +sadness +safari +safeguard +safehouse +safely +safeness +saffron +saga +sage +sagging +saggy +said +saint +sake +salad +salami +salaried +salary +saline +salon +saloon +salsa +salt +salutary +salute +salvage +salvaging +salvation +same +sample +sampling +sanction +sanctity +sanctuary +sandal +sandbag +sandbank +sandbar +sandblast +sandbox +sanded +sandfish +sanding +sandlot +sandpaper +sandpit +sandstone +sandstorm +sandworm +sandy +sanitary +sanitizer +sank +santa +sapling +sappiness +sappy +sarcasm +sarcastic +sardine +sash +sasquatch +sassy +satchel +satiable +satin +satirical +satisfied +satisfy +saturate +saturday +sauciness +saucy +sauna +savage +savanna +saved +savings +savior +savor +saxophone +say +scabbed +scabby +scalded +scalding +scale +scaling +scallion +scallop +scalping +scam +scandal +scanner +scanning +scant +scapegoat +scarce +scarcity +scarecrow +scared +scarf +scarily +scariness +scarring +scary +scavenger +scenic +schedule +schematic +scheme +scheming +schilling +schnapps +scholar +science +scientist +scion +scoff +scolding +scone +scoop +scooter +scope +scorch +scorebook +scorecard +scored +scoreless +scorer +scoring +scorn +scorpion +scotch +scoundrel +scoured +scouring +scouting +scouts +scowling +scrabble +scraggly +scrambled +scrambler +scrap +scratch +scrawny +screen +scribble +scribe +scribing +scrimmage +script +scroll +scrooge +scrounger +scrubbed +scrubber +scruffy +scrunch +scrutiny +scuba +scuff +sculptor +sculpture +scurvy +scuttle +secluded +secluding +seclusion +second +secrecy +secret +sectional +sector +secular +securely +security +sedan +sedate +sedation +sedative +sediment +seduce +seducing +segment +seismic +seizing +seldom +selected +selection +selective +selector +self +seltzer +semantic +semester +semicolon +semifinal +seminar +semisoft +semisweet +senate +senator +send +senior +senorita +sensation +sensitive +sensitize +sensually +sensuous +sepia +september +septic +septum +sequel +sequence +sequester +series +sermon +serotonin +serpent +serrated +serve +service +serving +sesame +sessions +setback +setting +settle +settling +setup +sevenfold +seventeen +seventh +seventy +severity +shabby +shack +shaded +shadily +shadiness +shading +shadow +shady +shaft +shakable +shakily +shakiness +shaking +shaky +shale +shallot +shallow +shame +shampoo +shamrock +shank +shanty +shape +shaping +share +sharpener +sharper +sharpie +sharply +sharpness +shawl +sheath +shed +sheep +sheet +shelf +shell +shelter +shelve +shelving +sherry +shield +shifter +shifting +shiftless +shifty +shimmer +shimmy +shindig +shine +shingle +shininess +shining +shiny +ship +shirt +shivering +shock +shone +shoplift +shopper +shopping +shoptalk +shore +shortage +shortcake +shortcut +shorten +shorter +shorthand +shortlist +shortly +shortness +shorts +shortwave +shorty +shout +shove +showbiz +showcase +showdown +shower +showgirl +showing +showman +shown +showoff +showpiece +showplace +showroom +showy +shrank +shrapnel +shredder +shredding +shrewdly +shriek +shrill +shrimp +shrine +shrink +shrivel +shrouded +shrubbery +shrubs +shrug +shrunk +shucking +shudder +shuffle +shuffling +shun +shush +shut +shy +siamese +siberian +sibling +siding +sierra +siesta +sift +sighing +silenced +silencer +silent +silica +silicon +silk +silliness +silly +silo +silt +silver +similarly +simile +simmering +simple +simplify +simply +sincere +sincerity +singer +singing +single +singular +sinister +sinless +sinner +sinuous +sip +siren +sister +sitcom +sitter +sitting +situated +situation +sixfold +sixteen +sixth +sixties +sixtieth +sixtyfold +sizable +sizably +size +sizing +sizzle +sizzling +skater +skating +skedaddle +skeletal +skeleton +skeptic +sketch +skewed +skewer +skid +skied +skier +skies +skiing +skilled +skillet +skillful +skimmed +skimmer +skimming +skimpily +skincare +skinhead +skinless +skinning +skinny +skintight +skipper +skipping +skirmish +skirt +skittle +skydiver +skylight +skyline +skype +skyrocket +skyward +slab +slacked +slacker +slacking +slackness +slacks +slain +slam +slander +slang +slapping +slapstick +slashed +slashing +slate +slather +slaw +sled +sleek +sleep +sleet +sleeve +slept +sliceable +sliced +slicer +slicing +slick +slider +slideshow +sliding +slighted +slighting +slightly +slimness +slimy +slinging +slingshot +slinky +slip +slit +sliver +slobbery +slogan +sloped +sloping +sloppily +sloppy +slot +slouching +slouchy +sludge +slug +slum +slurp +slush +sly +small +smartly +smartness +smasher +smashing +smashup +smell +smelting +smile +smilingly +smirk +smite +smith +smitten +smock +smog +smoked +smokeless +smokiness +smoking +smoky +smolder +smooth +smother +smudge +smudgy +smuggler +smuggling +smugly +smugness +snack +snagged +snaking +snap +snare +snarl +snazzy +sneak +sneer +sneeze +sneezing +snide +sniff +snippet +snipping +snitch +snooper +snooze +snore +snoring +snorkel +snort +snout +snowbird +snowboard +snowbound +snowcap +snowdrift +snowdrop +snowfall +snowfield +snowflake +snowiness +snowless +snowman +snowplow +snowshoe +snowstorm +snowsuit +snowy +snub +snuff +snuggle +snugly +snugness +speak +spearfish +spearhead +spearman +spearmint +species +specimen +specked +speckled +specks +spectacle +spectator +spectrum +speculate +speech +speed +spellbind +speller +spelling +spendable +spender +spending +spent +spew +sphere +spherical +sphinx +spider +spied +spiffy +spill +spilt +spinach +spinal +spindle +spinner +spinning +spinout +spinster +spiny +spiral +spirited +spiritism +spirits +spiritual +splashed +splashing +splashy +splatter +spleen +splendid +splendor +splice +splicing +splinter +splotchy +splurge +spoilage +spoiled +spoiler +spoiling +spoils +spoken +spokesman +sponge +spongy +sponsor +spoof +spookily +spooky +spool +spoon +spore +sporting +sports +sporty +spotless +spotlight +spotted +spotter +spotting +spotty +spousal +spouse +spout +sprain +sprang +sprawl +spray +spree +sprig +spring +sprinkled +sprinkler +sprint +sprite +sprout +spruce +sprung +spry +spud +spur +sputter +spyglass +squabble +squad +squall +squander +squash +squatted +squatter +squatting +squeak +squealer +squealing +squeamish +squeegee +squeeze +squeezing +squid +squiggle +squiggly +squint +squire +squirt +squishier +squishy +stability +stabilize +stable +stack +stadium +staff +stage +staging +stagnant +stagnate +stainable +stained +staining +stainless +stalemate +staleness +stalling +stallion +stamina +stammer +stamp +stand +stank +staple +stapling +starboard +starch +stardom +stardust +starfish +stargazer +staring +stark +starless +starlet +starlight +starlit +starring +starry +starship +starter +starting +startle +startling +startup +starved +starving +stash +state +static +statistic +statue +stature +status +statute +statutory +staunch +stays +steadfast +steadier +steadily +steadying +steam +steed +steep +steerable +steering +steersman +stegosaur +stellar +stem +stench +stencil +step +stereo +sterile +sterility +sterilize +sterling +sternness +sternum +stew +stick +stiffen +stiffly +stiffness +stifle +stifling +stillness +stilt +stimulant +stimulate +stimuli +stimulus +stinger +stingily +stinging +stingray +stingy +stinking +stinky +stipend +stipulate +stir +stitch +stock +stoic +stoke +stole +stomp +stonewall +stoneware +stonework +stoning +stony +stood +stooge +stool +stoop +stoplight +stoppable +stoppage +stopped +stopper +stopping +stopwatch +storable +storage +storeroom +storewide +storm +stout +stove +stowaway +stowing +straddle +straggler +strained +strainer +straining +strangely +stranger +strangle +strategic +strategy +stratus +straw +stray +streak +stream +street +strength +strenuous +strep +stress +stretch +strewn +stricken +strict +stride +strife +strike +striking +strive +striving +strobe +strode +stroller +strongbox +strongly +strongman +struck +structure +strudel +struggle +strum +strung +strut +stubbed +stubble +stubbly +stubborn +stucco +stuck +student +studied +studio +study +stuffed +stuffing +stuffy +stumble +stumbling +stump +stung +stunned +stunner +stunning +stunt +stupor +sturdily +sturdy +styling +stylishly +stylist +stylized +stylus +suave +subarctic +subatomic +subdivide +subdued +subduing +subfloor +subgroup +subheader +subject +sublease +sublet +sublevel +sublime +submarine +submerge +submersed +submitter +subpanel +subpar +subplot +subprime +subscribe +subscript +subsector +subside +subsiding +subsidize +subsidy +subsoil +subsonic +substance +subsystem +subtext +subtitle +subtly +subtotal +subtract +subtype +suburb +subway +subwoofer +subzero +succulent +such +suction +sudden +sudoku +suds +sufferer +suffering +suffice +suffix +suffocate +suffrage +sugar +suggest +suing +suitable +suitably +suitcase +suitor +sulfate +sulfide +sulfite +sulfur +sulk +sullen +sulphate +sulphuric +sultry +superbowl +superglue +superhero +superior +superjet +superman +supermom +supernova +supervise +supper +supplier +supply +support +supremacy +supreme +surcharge +surely +sureness +surface +surfacing +surfboard +surfer +surgery +surgical +surging +surname +surpass +surplus +surprise +surreal +surrender +surrogate +surround +survey +survival +survive +surviving +survivor +sushi +suspect +suspend +suspense +sustained +sustainer +swab +swaddling +swagger +swampland +swan +swapping +swarm +sway +swear +sweat +sweep +swell +swept +swerve +swifter +swiftly +swiftness +swimmable +swimmer +swimming +swimsuit +swimwear +swinger +swinging +swipe +swirl +switch +swivel +swizzle +swooned +swoop +swoosh +swore +sworn +swung +sycamore +sympathy +symphonic +symphony +symptom +synapse +syndrome +synergy +synopses +synopsis +synthesis +synthetic +syrup +system +t-shirt +tabasco +tabby +tableful +tables +tablet +tableware +tabloid +tackiness +tacking +tackle +tackling +tacky +taco +tactful +tactical +tactics +tactile +tactless +tadpole +taekwondo +tag +tainted +take +taking +talcum +talisman +tall +talon +tamale +tameness +tamer +tamper +tank +tanned +tannery +tanning +tantrum +tapeless +tapered +tapering +tapestry +tapioca +tapping +taps +tarantula +target +tarmac +tarnish +tarot +tartar +tartly +tartness +task +tassel +taste +tastiness +tasting +tasty +tattered +tattle +tattling +tattoo +taunt +tavern +thank +that +thaw +theater +theatrics +thee +theft +theme +theology +theorize +thermal +thermos +thesaurus +these +thesis +thespian +thicken +thicket +thickness +thieving +thievish +thigh +thimble +thing +think +thinly +thinner +thinness +thinning +thirstily +thirsting +thirsty +thirteen +thirty +thong +thorn +those +thousand +thrash +thread +threaten +threefold +thrift +thrill +thrive +thriving +throat +throbbing +throng +throttle +throwaway +throwback +thrower +throwing +thud +thumb +thumping +thursday +thus +thwarting +thyself +tiara +tibia +tidal +tidbit +tidiness +tidings +tidy +tiger +tighten +tightly +tightness +tightrope +tightwad +tigress +tile +tiling +till +tilt +timid +timing +timothy +tinderbox +tinfoil +tingle +tingling +tingly +tinker +tinkling +tinsel +tinsmith +tint +tinwork +tiny +tipoff +tipped +tipper +tipping +tiptoeing +tiptop +tiring +tissue +trace +tracing +track +traction +tractor +trade +trading +tradition +traffic +tragedy +trailing +trailside +train +traitor +trance +tranquil +transfer +transform +translate +transpire +transport +transpose +trapdoor +trapeze +trapezoid +trapped +trapper +trapping +traps +trash +travel +traverse +travesty +tray +treachery +treading +treadmill +treason +treat +treble +tree +trekker +tremble +trembling +tremor +trench +trend +trespass +triage +trial +triangle +tribesman +tribunal +tribune +tributary +tribute +triceps +trickery +trickily +tricking +trickle +trickster +tricky +tricolor +tricycle +trident +tried +trifle +trifocals +trillion +trilogy +trimester +trimmer +trimming +trimness +trinity +trio +tripod +tripping +triumph +trivial +trodden +trolling +trombone +trophy +tropical +tropics +trouble +troubling +trough +trousers +trout +trowel +truce +truck +truffle +trump +trunks +trustable +trustee +trustful +trusting +trustless +truth +try +tubby +tubeless +tubular +tucking +tuesday +tug +tuition +tulip +tumble +tumbling +tummy +turban +turbine +turbofan +turbojet +turbulent +turf +turkey +turmoil +turret +turtle +tusk +tutor +tutu +tux +tweak +tweed +tweet +tweezers +twelve +twentieth +twenty +twerp +twice +twiddle +twiddling +twig +twilight +twine +twins +twirl +twistable +twisted +twister +twisting +twisty +twitch +twitter +tycoon +tying +tyke +udder +ultimate +ultimatum +ultra +umbilical +umbrella +umpire +unabashed +unable +unadorned +unadvised +unafraid +unaired +unaligned +unaltered +unarmored +unashamed +unaudited +unawake +unaware +unbaked +unbalance +unbeaten +unbend +unbent +unbiased +unbitten +unblended +unblessed +unblock +unbolted +unbounded +unboxed +unbraided +unbridle +unbroken +unbuckled +unbundle +unburned +unbutton +uncanny +uncapped +uncaring +uncertain +unchain +unchanged +uncharted +uncheck +uncivil +unclad +unclaimed +unclamped +unclasp +uncle +unclip +uncloak +unclog +unclothed +uncoated +uncoiled +uncolored +uncombed +uncommon +uncooked +uncork +uncorrupt +uncounted +uncouple +uncouth +uncover +uncross +uncrown +uncrushed +uncured +uncurious +uncurled +uncut +undamaged +undated +undaunted +undead +undecided +undefined +underage +underarm +undercoat +undercook +undercut +underdog +underdone +underfed +underfeed +underfoot +undergo +undergrad +underhand +underline +underling +undermine +undermost +underpaid +underpass +underpay +underrate +undertake +undertone +undertook +undertow +underuse +underwear +underwent +underwire +undesired +undiluted +undivided +undocked +undoing +undone +undrafted +undress +undrilled +undusted +undying +unearned +unearth +unease +uneasily +uneasy +uneatable +uneaten +unedited +unelected +unending +unengaged +unenvied +unequal +unethical +uneven +unexpired +unexposed +unfailing +unfair +unfasten +unfazed +unfeeling +unfiled +unfilled +unfitted +unfitting +unfixable +unfixed +unflawed +unfocused +unfold +unfounded +unframed +unfreeze +unfrosted +unfrozen +unfunded +unglazed +ungloved +unglue +ungodly +ungraded +ungreased +unguarded +unguided +unhappily +unhappy +unharmed +unhealthy +unheard +unhearing +unheated +unhelpful +unhidden +unhinge +unhitched +unholy +unhook +unicorn +unicycle +unified +unifier +uniformed +uniformly +unify +unimpeded +uninjured +uninstall +uninsured +uninvited +union +uniquely +unisexual +unison +unissued +unit +universal +universe +unjustly +unkempt +unkind +unknotted +unknowing +unknown +unlaced +unlatch +unlawful +unleaded +unlearned +unleash +unless +unleveled +unlighted +unlikable +unlimited +unlined +unlinked +unlisted +unlit +unlivable +unloaded +unloader +unlocked +unlocking +unlovable +unloved +unlovely +unloving +unluckily +unlucky +unmade +unmanaged +unmanned +unmapped +unmarked +unmasked +unmasking +unmatched +unmindful +unmixable +unmixed +unmolded +unmoral +unmovable +unmoved +unmoving +unnamable +unnamed +unnatural +unneeded +unnerve +unnerving +unnoticed +unopened +unopposed +unpack +unpadded +unpaid +unpainted +unpaired +unpaved +unpeeled +unpicked +unpiloted +unpinned +unplanned +unplanted +unpleased +unpledged +unplowed +unplug +unpopular +unproven +unquote +unranked +unrated +unraveled +unreached +unread +unreal +unreeling +unrefined +unrelated +unrented +unrest +unretired +unrevised +unrigged +unripe +unrivaled +unroasted +unrobed +unroll +unruffled +unruly +unrushed +unsaddle +unsafe +unsaid +unsalted +unsaved +unsavory +unscathed +unscented +unscrew +unsealed +unseated +unsecured +unseeing +unseemly +unseen +unselect +unselfish +unsent +unsettled +unshackle +unshaken +unshaved +unshaven +unsheathe +unshipped +unsightly +unsigned +unskilled +unsliced +unsmooth +unsnap +unsocial +unsoiled +unsold +unsolved +unsorted +unspoiled +unspoken +unstable +unstaffed +unstamped +unsteady +unsterile +unstirred +unstitch +unstopped +unstuck +unstuffed +unstylish +unsubtle +unsubtly +unsuited +unsure +unsworn +untagged +untainted +untaken +untamed +untangled +untapped +untaxed +unthawed +unthread +untidy +untie +until +untimed +untimely +untitled +untoasted +untold +untouched +untracked +untrained +untreated +untried +untrimmed +untrue +untruth +unturned +untwist +untying +unusable +unused +unusual +unvalued +unvaried +unvarying +unveiled +unveiling +unvented +unviable +unvisited +unvocal +unwanted +unwarlike +unwary +unwashed +unwatched +unweave +unwed +unwelcome +unwell +unwieldy +unwilling +unwind +unwired +unwitting +unwomanly +unworldly +unworn +unworried +unworthy +unwound +unwoven +unwrapped +unwritten +unzip +upbeat +upchuck +upcoming +upcountry +update +upfront +upgrade +upheaval +upheld +uphill +uphold +uplifted +uplifting +upload +upon +upper +upright +uprising +upriver +uproar +uproot +upscale +upside +upstage +upstairs +upstart +upstate +upstream +upstroke +upswing +uptake +uptight +uptown +upturned +upward +upwind +uranium +urban +urchin +urethane +urgency +urgent +urging +urologist +urology +usable +usage +useable +used +uselessly +user +usher +usual +utensil +utility +utilize +utmost +utopia +utter +vacancy +vacant +vacate +vacation +vagabond +vagrancy +vagrantly +vaguely +vagueness +valiant +valid +valium +valley +valuables +value +vanilla +vanish +vanity +vanquish +vantage +vaporizer +variable +variably +varied +variety +various +varmint +varnish +varsity +varying +vascular +vaseline +vastly +vastness +veal +vegan +veggie +vehicular +velcro +velocity +velvet +vendetta +vending +vendor +veneering +vengeful +venomous +ventricle +venture +venue +venus +verbalize +verbally +verbose +verdict +verify +verse +version +versus +vertebrae +vertical +vertigo +very +vessel +vest +veteran +veto +vexingly +viability +viable +vibes +vice +vicinity +victory +video +viewable +viewer +viewing +viewless +viewpoint +vigorous +village +villain +vindicate +vineyard +vintage +violate +violation +violator +violet +violin +viper +viral +virtual +virtuous +virus +visa +viscosity +viscous +viselike +visible +visibly +vision +visiting +visitor +visor +vista +vitality +vitalize +vitally +vitamins +vivacious +vividly +vividness +vixen +vocalist +vocalize +vocally +vocation +voice +voicing +void +volatile +volley +voltage +volumes +voter +voting +voucher +vowed +vowel +voyage +wackiness +wad +wafer +waffle +waged +wager +wages +waggle +wagon +wake +waking +walk +walmart +walnut +walrus +waltz +wand +wannabe +wanted +wanting +wasabi +washable +washbasin +washboard +washbowl +washcloth +washday +washed +washer +washhouse +washing +washout +washroom +washstand +washtub +wasp +wasting +watch +water +waviness +waving +wavy +whacking +whacky +wham +wharf +wheat +whenever +whiff +whimsical +whinny +whiny +whisking +whoever +whole +whomever +whoopee +whooping +whoops +why +wick +widely +widen +widget +widow +width +wieldable +wielder +wife +wifi +wikipedia +wildcard +wildcat +wilder +wildfire +wildfowl +wildland +wildlife +wildly +wildness +willed +willfully +willing +willow +willpower +wilt +wimp +wince +wincing +wind +wing +winking +winner +winnings +winter +wipe +wired +wireless +wiring +wiry +wisdom +wise +wish +wisplike +wispy +wistful +wizard +wobble +wobbling +wobbly +wok +wolf +wolverine +womanhood +womankind +womanless +womanlike +womanly +womb +woof +wooing +wool +woozy +word +work +worried +worrier +worrisome +worry +worsening +worshiper +worst +wound +woven +wow +wrangle +wrath +wreath +wreckage +wrecker +wrecking +wrench +wriggle +wriggly +wrinkle +wrinkly +wrist +writing +written +wrongdoer +wronged +wrongful +wrongly +wrongness +wrought +xbox +xerox +yahoo +yam +yanking +yapping +yard +yarn +yeah +yearbook +yearling +yearly +yearning +yeast +yelling +yelp +yen +yesterday +yiddish +yield +yin +yippee +yo-yo +yodel +yoga +yogurt +yonder +yoyo +yummy +zap +zealous +zebra +zen +zeppelin +zero +zestfully +zesty +zigzagged +zipfile +zipping +zippy +zips +zit +zodiac +zombie +zone +zoning +zookeeper +zoologist +zoology +zoom diff --git a/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_prefixed.txt b/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_prefixed.txt new file mode 100644 index 0000000..9ac732f --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_prefixed.txt @@ -0,0 +1,1296 @@ +aardvark +abandoned +abbreviate +abdomen +abhorrence +abiding +abnormal +abrasion +absorbing +abundant +abyss +academy +accountant +acetone +achiness +acid +acoustics +acquire +acrobat +actress +acuteness +aerosol +aesthetic +affidavit +afloat +afraid +aftershave +again +agency +aggressor +aghast +agitate +agnostic +agonizing +agreeing +aidless +aimlessly +ajar +alarmclock +albatross +alchemy +alfalfa +algae +aliens +alkaline +almanac +alongside +alphabet +already +also +altitude +aluminum +always +amazingly +ambulance +amendment +amiable +ammunition +amnesty +amoeba +amplifier +amuser +anagram +anchor +android +anesthesia +angelfish +animal +anklet +announcer +anonymous +answer +antelope +anxiety +anyplace +aorta +apartment +apnea +apostrophe +apple +apricot +aquamarine +arachnid +arbitrate +ardently +arena +argument +aristocrat +armchair +aromatic +arrowhead +arsonist +artichoke +asbestos +ascend +aseptic +ashamed +asinine +asleep +asocial +asparagus +astronaut +asymmetric +atlas +atmosphere +atom +atrocious +attic +atypical +auctioneer +auditorium +augmented +auspicious +automobile +auxiliary +avalanche +avenue +aviator +avocado +awareness +awhile +awkward +awning +awoke +axially +azalea +babbling +backpack +badass +bagpipe +bakery +balancing +bamboo +banana +barracuda +basket +bathrobe +bazooka +blade +blender +blimp +blouse +blurred +boatyard +bobcat +body +bogusness +bohemian +boiler +bonnet +boots +borough +bossiness +bottle +bouquet +boxlike +breath +briefcase +broom +brushes +bubblegum +buckle +buddhist +buffalo +bullfrog +bunny +busboy +buzzard +cabin +cactus +cadillac +cafeteria +cage +cahoots +cajoling +cakewalk +calculator +camera +canister +capsule +carrot +cashew +cathedral +caucasian +caviar +ceasefire +cedar +celery +cement +census +ceramics +cesspool +chalkboard +cheesecake +chimney +chlorine +chopsticks +chrome +chute +cilantro +cinnamon +circle +cityscape +civilian +clay +clergyman +clipboard +clock +clubhouse +coathanger +cobweb +coconut +codeword +coexistent +coffeecake +cognitive +cohabitate +collarbone +computer +confetti +copier +cornea +cosmetics +cotton +couch +coverless +coyote +coziness +crawfish +crewmember +crib +croissant +crumble +crystal +cubical +cucumber +cuddly +cufflink +cuisine +culprit +cup +curry +cushion +cuticle +cybernetic +cyclist +cylinder +cymbal +cynicism +cypress +cytoplasm +dachshund +daffodil +dagger +dairy +dalmatian +dandelion +dartboard +dastardly +datebook +daughter +dawn +daytime +dazzler +dealer +debris +decal +dedicate +deepness +defrost +degree +dehydrator +deliverer +democrat +dentist +deodorant +depot +deranged +desktop +detergent +device +dexterity +diamond +dibs +dictionary +diffuser +digit +dilated +dimple +dinnerware +dioxide +diploma +directory +dishcloth +ditto +dividers +dizziness +doctor +dodge +doll +dominoes +donut +doorstep +dorsal +double +downstairs +dozed +drainpipe +dresser +driftwood +droppings +drum +dryer +dubiously +duckling +duffel +dugout +dumpster +duplex +durable +dustpan +dutiful +duvet +dwarfism +dwelling +dwindling +dynamite +dyslexia +eagerness +earlobe +easel +eavesdrop +ebook +eccentric +echoless +eclipse +ecosystem +ecstasy +edged +editor +educator +eelworm +eerie +effects +eggnog +egomaniac +ejection +elastic +elbow +elderly +elephant +elfishly +eliminator +elk +elliptical +elongated +elsewhere +elusive +elves +emancipate +embroidery +emcee +emerald +emission +emoticon +emperor +emulate +enactment +enchilada +endorphin +energy +enforcer +engine +enhance +enigmatic +enjoyably +enlarged +enormous +enquirer +enrollment +ensemble +entryway +enunciate +envoy +enzyme +epidemic +equipment +erasable +ergonomic +erratic +eruption +escalator +eskimo +esophagus +espresso +essay +estrogen +etching +eternal +ethics +etiquette +eucalyptus +eulogy +euphemism +euthanize +evacuation +evergreen +evidence +evolution +exam +excerpt +exerciser +exfoliate +exhale +exist +exorcist +explode +exquisite +exterior +exuberant +fabric +factory +faded +failsafe +falcon +family +fanfare +fasten +faucet +favorite +feasibly +february +federal +feedback +feigned +feline +femur +fence +ferret +festival +fettuccine +feudalist +feverish +fiberglass +fictitious +fiddle +figurine +fillet +finalist +fiscally +fixture +flashlight +fleshiness +flight +florist +flypaper +foamless +focus +foggy +folksong +fondue +footpath +fossil +fountain +fox +fragment +freeway +fridge +frosting +fruit +fryingpan +gadget +gainfully +gallstone +gamekeeper +gangway +garlic +gaslight +gathering +gauntlet +gearbox +gecko +gem +generator +geographer +gerbil +gesture +getaway +geyser +ghoulishly +gibberish +giddiness +giftshop +gigabyte +gimmick +giraffe +giveaway +gizmo +glasses +gleeful +glisten +glove +glucose +glycerin +gnarly +gnomish +goatskin +goggles +goldfish +gong +gooey +gorgeous +gosling +gothic +gourmet +governor +grape +greyhound +grill +groundhog +grumbling +guacamole +guerrilla +guitar +gullible +gumdrop +gurgling +gusto +gutless +gymnast +gynecology +gyration +habitat +hacking +haggard +haiku +halogen +hamburger +handgun +happiness +hardhat +hastily +hatchling +haughty +hazelnut +headband +hedgehog +hefty +heinously +helmet +hemoglobin +henceforth +herbs +hesitation +hexagon +hubcap +huddling +huff +hugeness +hullabaloo +human +hunter +hurricane +hushing +hyacinth +hybrid +hydrant +hygienist +hypnotist +ibuprofen +icepack +icing +iconic +identical +idiocy +idly +igloo +ignition +iguana +illuminate +imaging +imbecile +imitator +immigrant +imprint +iodine +ionosphere +ipad +iphone +iridescent +irksome +iron +irrigation +island +isotope +issueless +italicize +itemizer +itinerary +itunes +ivory +jabbering +jackrabbit +jaguar +jailhouse +jalapeno +jamboree +janitor +jarring +jasmine +jaundice +jawbreaker +jaywalker +jazz +jealous +jeep +jelly +jeopardize +jersey +jetski +jezebel +jiffy +jigsaw +jingling +jobholder +jockstrap +jogging +john +joinable +jokingly +journal +jovial +joystick +jubilant +judiciary +juggle +juice +jujitsu +jukebox +jumpiness +junkyard +juror +justifying +juvenile +kabob +kamikaze +kangaroo +karate +kayak +keepsake +kennel +kerosene +ketchup +khaki +kickstand +kilogram +kimono +kingdom +kiosk +kissing +kite +kleenex +knapsack +kneecap +knickers +koala +krypton +laboratory +ladder +lakefront +lantern +laptop +laryngitis +lasagna +latch +laundry +lavender +laxative +lazybones +lecturer +leftover +leggings +leisure +lemon +length +leopard +leprechaun +lettuce +leukemia +levers +lewdness +liability +library +licorice +lifeboat +lightbulb +likewise +lilac +limousine +lint +lioness +lipstick +liquid +listless +litter +liverwurst +lizard +llama +luau +lubricant +lucidity +ludicrous +luggage +lukewarm +lullaby +lumberjack +lunchbox +luridness +luscious +luxurious +lyrics +macaroni +maestro +magazine +mahogany +maimed +majority +makeover +malformed +mammal +mango +mapmaker +marbles +massager +matchstick +maverick +maximum +mayonnaise +moaning +mobilize +moccasin +modify +moisture +molecule +momentum +monastery +moonshine +mortuary +mosquito +motorcycle +mousetrap +movie +mower +mozzarella +muckiness +mudflow +mugshot +mule +mummy +mundane +muppet +mural +mustard +mutation +myriad +myspace +myth +nail +namesake +nanosecond +napkin +narrator +nastiness +natives +nautically +navigate +nearest +nebula +nectar +nefarious +negotiator +neither +nemesis +neoliberal +nephew +nervously +nest +netting +neuron +nevermore +nextdoor +nicotine +niece +nimbleness +nintendo +nirvana +nuclear +nugget +nuisance +nullify +numbing +nuptials +nursery +nutcracker +nylon +oasis +oat +obediently +obituary +object +obliterate +obnoxious +observer +obtain +obvious +occupation +oceanic +octopus +ocular +office +oftentimes +oiliness +ointment +older +olympics +omissible +omnivorous +oncoming +onion +onlooker +onstage +onward +onyx +oomph +opaquely +opera +opium +opossum +opponent +optical +opulently +oscillator +osmosis +ostrich +otherwise +ought +outhouse +ovation +oven +owlish +oxford +oxidize +oxygen +oyster +ozone +pacemaker +padlock +pageant +pajamas +palm +pamphlet +pantyhose +paprika +parakeet +passport +patio +pauper +pavement +payphone +pebble +peculiarly +pedometer +pegboard +pelican +penguin +peony +pepperoni +peroxide +pesticide +petroleum +pewter +pharmacy +pheasant +phonebook +phrasing +physician +plank +pledge +plotted +plug +plywood +pneumonia +podiatrist +poetic +pogo +poison +poking +policeman +poncho +popcorn +porcupine +postcard +poultry +powerboat +prairie +pretzel +princess +propeller +prune +pry +pseudo +psychopath +publisher +pucker +pueblo +pulley +pumpkin +punchbowl +puppy +purse +pushup +putt +puzzle +pyramid +python +quarters +quesadilla +quilt +quote +racoon +radish +ragweed +railroad +rampantly +rancidity +rarity +raspberry +ravishing +rearrange +rebuilt +receipt +reentry +refinery +register +rehydrate +reimburse +rejoicing +rekindle +relic +remote +renovator +reopen +reporter +request +rerun +reservoir +retriever +reunion +revolver +rewrite +rhapsody +rhetoric +rhino +rhubarb +rhyme +ribbon +riches +ridden +rigidness +rimmed +riptide +riskily +ritzy +riverboat +roamer +robe +rocket +romancer +ropelike +rotisserie +roundtable +royal +rubber +rudderless +rugby +ruined +rulebook +rummage +running +rupture +rustproof +sabotage +sacrifice +saddlebag +saffron +sainthood +saltshaker +samurai +sandworm +sapphire +sardine +sassy +satchel +sauna +savage +saxophone +scarf +scenario +schoolbook +scientist +scooter +scrapbook +sculpture +scythe +secretary +sedative +segregator +seismology +selected +semicolon +senator +septum +sequence +serpent +sesame +settler +severely +shack +shelf +shirt +shovel +shrimp +shuttle +shyness +siamese +sibling +siesta +silicon +simmering +singles +sisterhood +sitcom +sixfold +sizable +skateboard +skeleton +skies +skulk +skylight +slapping +sled +slingshot +sloth +slumbering +smartphone +smelliness +smitten +smokestack +smudge +snapshot +sneezing +sniff +snowsuit +snugness +speakers +sphinx +spider +splashing +sponge +sprout +spur +spyglass +squirrel +statue +steamboat +stingray +stopwatch +strawberry +student +stylus +suave +subway +suction +suds +suffocate +sugar +suitcase +sulphur +superstore +surfer +sushi +swan +sweatshirt +swimwear +sword +sycamore +syllable +symphony +synagogue +syringes +systemize +tablespoon +taco +tadpole +taekwondo +tagalong +takeout +tallness +tamale +tanned +tapestry +tarantula +tastebud +tattoo +tavern +thaw +theater +thimble +thorn +throat +thumb +thwarting +tiara +tidbit +tiebreaker +tiger +timid +tinsel +tiptoeing +tirade +tissue +tractor +tree +tripod +trousers +trucks +tryout +tubeless +tuesday +tugboat +tulip +tumbleweed +tupperware +turtle +tusk +tutorial +tuxedo +tweezers +twins +tyrannical +ultrasound +umbrella +umpire +unarmored +unbuttoned +uncle +underwear +unevenness +unflavored +ungloved +unhinge +unicycle +unjustly +unknown +unlocking +unmarked +unnoticed +unopened +unpaved +unquenched +unroll +unscrewing +untied +unusual +unveiled +unwrinkled +unyielding +unzip +upbeat +upcountry +update +upfront +upgrade +upholstery +upkeep +upload +uppercut +upright +upstairs +uptown +upwind +uranium +urban +urchin +urethane +urgent +urologist +username +usher +utensil +utility +utmost +utopia +utterance +vacuum +vagrancy +valuables +vanquished +vaporizer +varied +vaseline +vegetable +vehicle +velcro +vendor +vertebrae +vestibule +veteran +vexingly +vicinity +videogame +viewfinder +vigilante +village +vinegar +violin +viperfish +virus +visor +vitamins +vivacious +vixen +vocalist +vogue +voicemail +volleyball +voucher +voyage +vulnerable +waffle +wagon +wakeup +walrus +wanderer +wasp +water +waving +wheat +whisper +wholesaler +wick +widow +wielder +wifeless +wikipedia +wildcat +windmill +wipeout +wired +wishbone +wizardry +wobbliness +wolverine +womb +woolworker +workbasket +wound +wrangle +wreckage +wristwatch +wrongdoing +xerox +xylophone +yacht +yahoo +yard +yearbook +yesterday +yiddish +yield +yo-yo +yodel +yogurt +yuppie +zealot +zebra +zeppelin +zestfully +zigzagged +zillion +zipping +zirconium +zodiac +zombie +zookeeper +zucchini diff --git a/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_short.txt b/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_short.txt new file mode 100644 index 0000000..4c8baa4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/_data/wordsets/eff_short.txt @@ -0,0 +1,1296 @@ +acid +acorn +acre +acts +afar +affix +aged +agent +agile +aging +agony +ahead +aide +aids +aim +ajar +alarm +alias +alibi +alien +alike +alive +aloe +aloft +aloha +alone +amend +amino +ample +amuse +angel +anger +angle +ankle +apple +april +apron +aqua +area +arena +argue +arise +armed +armor +army +aroma +array +arson +art +ashen +ashes +atlas +atom +attic +audio +avert +avoid +awake +award +awoke +axis +bacon +badge +bagel +baggy +baked +baker +balmy +banjo +barge +barn +bash +basil +bask +batch +bath +baton +bats +blade +blank +blast +blaze +bleak +blend +bless +blimp +blink +bloat +blob +blog +blot +blunt +blurt +blush +boast +boat +body +boil +bok +bolt +boned +boney +bonus +bony +book +booth +boots +boss +botch +both +boxer +breed +bribe +brick +bride +brim +bring +brink +brisk +broad +broil +broke +brook +broom +brush +buck +bud +buggy +bulge +bulk +bully +bunch +bunny +bunt +bush +bust +busy +buzz +cable +cache +cadet +cage +cake +calm +cameo +canal +candy +cane +canon +cape +card +cargo +carol +carry +carve +case +cash +cause +cedar +chain +chair +chant +chaos +charm +chase +cheek +cheer +chef +chess +chest +chew +chief +chili +chill +chip +chomp +chop +chow +chuck +chump +chunk +churn +chute +cider +cinch +city +civic +civil +clad +claim +clamp +clap +clash +clasp +class +claw +clay +clean +clear +cleat +cleft +clerk +click +cling +clink +clip +cloak +clock +clone +cloth +cloud +clump +coach +coast +coat +cod +coil +coke +cola +cold +colt +coma +come +comic +comma +cone +cope +copy +coral +cork +cost +cot +couch +cough +cover +cozy +craft +cramp +crane +crank +crate +crave +crawl +crazy +creme +crepe +crept +crib +cried +crisp +crook +crop +cross +crowd +crown +crumb +crush +crust +cub +cult +cupid +cure +curl +curry +curse +curve +curvy +cushy +cut +cycle +dab +dad +daily +dairy +daisy +dance +dandy +darn +dart +dash +data +date +dawn +deaf +deal +dean +debit +debt +debug +decaf +decal +decay +deck +decor +decoy +deed +delay +denim +dense +dent +depth +derby +desk +dial +diary +dice +dig +dill +dime +dimly +diner +dingy +disco +dish +disk +ditch +ditzy +dizzy +dock +dodge +doing +doll +dome +donor +donut +dose +dot +dove +down +dowry +doze +drab +drama +drank +draw +dress +dried +drift +drill +drive +drone +droop +drove +drown +drum +dry +duck +duct +dude +dug +duke +duo +dusk +dust +duty +dwarf +dwell +eagle +early +earth +easel +east +eaten +eats +ebay +ebony +ebook +echo +edge +eel +eject +elbow +elder +elf +elk +elm +elope +elude +elves +email +emit +empty +emu +enter +entry +envoy +equal +erase +error +erupt +essay +etch +evade +even +evict +evil +evoke +exact +exit +fable +faced +fact +fade +fall +false +fancy +fang +fax +feast +feed +femur +fence +fend +ferry +fetal +fetch +fever +fiber +fifth +fifty +film +filth +final +finch +fit +five +flag +flaky +flame +flap +flask +fled +flick +fling +flint +flip +flirt +float +flock +flop +floss +flyer +foam +foe +fog +foil +folic +folk +food +fool +found +fox +foyer +frail +frame +fray +fresh +fried +frill +frisk +from +front +frost +froth +frown +froze +fruit +gag +gains +gala +game +gap +gas +gave +gear +gecko +geek +gem +genre +gift +gig +gills +given +giver +glad +glass +glide +gloss +glove +glow +glue +goal +going +golf +gong +good +gooey +goofy +gore +gown +grab +grain +grant +grape +graph +grasp +grass +grave +gravy +gray +green +greet +grew +grid +grief +grill +grip +grit +groom +grope +growl +grub +grunt +guide +gulf +gulp +gummy +guru +gush +gut +guy +habit +half +halo +halt +happy +harm +hash +hasty +hatch +hate +haven +hazel +hazy +heap +heat +heave +hedge +hefty +help +herbs +hers +hub +hug +hula +hull +human +humid +hump +hung +hunk +hunt +hurry +hurt +hush +hut +ice +icing +icon +icy +igloo +image +ion +iron +islam +issue +item +ivory +ivy +jab +jam +jaws +jazz +jeep +jelly +jet +jiffy +job +jog +jolly +jolt +jot +joy +judge +juice +juicy +july +jumbo +jump +junky +juror +jury +keep +keg +kept +kick +kilt +king +kite +kitty +kiwi +knee +knelt +koala +kung +ladle +lady +lair +lake +lance +land +lapel +large +lash +lasso +last +latch +late +lazy +left +legal +lemon +lend +lens +lent +level +lever +lid +life +lift +lilac +lily +limb +limes +line +lint +lion +lip +list +lived +liver +lunar +lunch +lung +lurch +lure +lurk +lying +lyric +mace +maker +malt +mama +mango +manor +many +map +march +mardi +marry +mash +match +mate +math +moan +mocha +moist +mold +mom +moody +mop +morse +most +motor +motto +mount +mouse +mousy +mouth +move +movie +mower +mud +mug +mulch +mule +mull +mumbo +mummy +mural +muse +music +musky +mute +nacho +nag +nail +name +nanny +nap +navy +near +neat +neon +nerd +nest +net +next +niece +ninth +nutty +oak +oasis +oat +ocean +oil +old +olive +omen +onion +only +ooze +opal +open +opera +opt +otter +ouch +ounce +outer +oval +oven +owl +ozone +pace +pagan +pager +palm +panda +panic +pants +panty +paper +park +party +pasta +patch +path +patio +payer +pecan +penny +pep +perch +perky +perm +pest +petal +petri +petty +photo +plank +plant +plaza +plead +plot +plow +pluck +plug +plus +poach +pod +poem +poet +pogo +point +poise +poker +polar +polio +polka +polo +pond +pony +poppy +pork +poser +pouch +pound +pout +power +prank +press +print +prior +prism +prize +probe +prong +proof +props +prude +prune +pry +pug +pull +pulp +pulse +puma +punch +punk +pupil +puppy +purr +purse +push +putt +quack +quake +query +quiet +quill +quilt +quit +quota +quote +rabid +race +rack +radar +radio +raft +rage +raid +rail +rake +rally +ramp +ranch +range +rank +rant +rash +raven +reach +react +ream +rebel +recap +relax +relay +relic +remix +repay +repel +reply +rerun +reset +rhyme +rice +rich +ride +rigid +rigor +rinse +riot +ripen +rise +risk +ritzy +rival +river +roast +robe +robin +rock +rogue +roman +romp +rope +rover +royal +ruby +rug +ruin +rule +runny +rush +rust +rut +sadly +sage +said +saint +salad +salon +salsa +salt +same +sandy +santa +satin +sauna +saved +savor +sax +say +scale +scam +scan +scare +scarf +scary +scoff +scold +scoop +scoot +scope +score +scorn +scout +scowl +scrap +scrub +scuba +scuff +sect +sedan +self +send +sepia +serve +set +seven +shack +shade +shady +shaft +shaky +sham +shape +share +sharp +shed +sheep +sheet +shelf +shell +shine +shiny +ship +shirt +shock +shop +shore +shout +shove +shown +showy +shred +shrug +shun +shush +shut +shy +sift +silk +silly +silo +sip +siren +sixth +size +skate +skew +skid +skier +skies +skip +skirt +skit +sky +slab +slack +slain +slam +slang +slash +slate +slaw +sled +sleek +sleep +sleet +slept +slice +slick +slimy +sling +slip +slit +slob +slot +slug +slum +slurp +slush +small +smash +smell +smile +smirk +smog +snack +snap +snare +snarl +sneak +sneer +sniff +snore +snort +snout +snowy +snub +snuff +speak +speed +spend +spent +spew +spied +spill +spiny +spoil +spoke +spoof +spool +spoon +sport +spot +spout +spray +spree +spur +squad +squat +squid +stack +staff +stage +stain +stall +stamp +stand +stank +stark +start +stash +state +stays +steam +steep +stem +step +stew +stick +sting +stir +stock +stole +stomp +stony +stood +stool +stoop +stop +storm +stout +stove +straw +stray +strut +stuck +stud +stuff +stump +stung +stunt +suds +sugar +sulk +surf +sushi +swab +swan +swarm +sway +swear +sweat +sweep +swell +swept +swim +swing +swipe +swirl +swoop +swore +syrup +tacky +taco +tag +take +tall +talon +tamer +tank +taper +taps +tarot +tart +task +taste +tasty +taunt +thank +thaw +theft +theme +thigh +thing +think +thong +thorn +those +throb +thud +thumb +thump +thus +tiara +tidal +tidy +tiger +tile +tilt +tint +tiny +trace +track +trade +train +trait +trap +trash +tray +treat +tree +trek +trend +trial +tribe +trick +trio +trout +truce +truck +trump +trunk +try +tug +tulip +tummy +turf +tusk +tutor +tutu +tux +tweak +tweet +twice +twine +twins +twirl +twist +uncle +uncut +undo +unify +union +unit +untie +upon +upper +urban +used +user +usher +utter +value +vapor +vegan +venue +verse +vest +veto +vice +video +view +viral +virus +visa +visor +vixen +vocal +voice +void +volt +voter +vowel +wad +wafer +wager +wages +wagon +wake +walk +wand +wasp +watch +water +wavy +wheat +whiff +whole +whoop +wick +widen +widow +width +wife +wifi +wilt +wimp +wind +wing +wink +wipe +wired +wiry +wise +wish +wispy +wok +wolf +womb +wool +woozy +word +work +worry +wound +woven +wrath +wreck +wrist +xerox +yahoo +yam +yard +year +yeast +yelp +yield +yo-yo +yodel +yoga +yoyo +yummy +zebra +zero +zesty +zippy +zone +zoom diff --git a/venv/lib/python3.12/site-packages/passlib/apache.py b/venv/lib/python3.12/site-packages/passlib/apache.py new file mode 100644 index 0000000..a75f2cf --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/apache.py @@ -0,0 +1,1255 @@ +"""passlib.apache - apache password support""" +# XXX: relocate this to passlib.ext.apache? +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import os +from warnings import warn +# site +# pkg +from passlib import exc, registry +from passlib.context import CryptContext +from passlib.exc import ExpectedStringError +from passlib.hash import htdigest +from passlib.utils import render_bytes, to_bytes, is_ascii_codec +from passlib.utils.decor import deprecated_method +from passlib.utils.compat import join_bytes, unicode, BytesIO, PY3 +# local +__all__ = [ + 'HtpasswdFile', + 'HtdigestFile', +] + +#============================================================================= +# constants & support +#============================================================================= +_UNSET = object() + +_BCOLON = b":" +_BHASH = b"#" + +# byte values that aren't allowed in fields. +_INVALID_FIELD_CHARS = b":\n\r\t\x00" + +#: _CommonFile._source token types +_SKIPPED = "skipped" +_RECORD = "record" + +#============================================================================= +# common helpers +#============================================================================= +class _CommonFile(object): + """common framework for HtpasswdFile & HtdigestFile""" + #=================================================================== + # instance attrs + #=================================================================== + + # charset encoding used by file (defaults to utf-8) + encoding = None + + # whether users() and other public methods should return unicode or bytes? + # (defaults to False under PY2, True under PY3) + return_unicode = None + + # if bound to local file, these will be set. + _path = None # local file path + _mtime = None # mtime when last loaded, or 0 + + # if true, automatically save to local file after changes are made. + autosave = False + + # dict mapping key -> value for all records in database. + # (e.g. user => hash for Htpasswd) + _records = None + + #: list of tokens for recreating original file contents when saving. if present, + #: will be sequence of (_SKIPPED, b"whitespace/comments") and (_RECORD, ) tuples. + _source = None + + #=================================================================== + # alt constuctors + #=================================================================== + @classmethod + def from_string(cls, data, **kwds): + """create new object from raw string. + + :type data: unicode or bytes + :arg data: + database to load, as single string. + + :param \\*\\*kwds: + all other keywords are the same as in the class constructor + """ + if 'path' in kwds: + raise TypeError("'path' not accepted by from_string()") + self = cls(**kwds) + self.load_string(data) + return self + + @classmethod + def from_path(cls, path, **kwds): + """create new object from file, without binding object to file. + + :type path: str + :arg path: + local filepath to load from + + :param \\*\\*kwds: + all other keywords are the same as in the class constructor + """ + self = cls(**kwds) + self.load(path) + return self + + #=================================================================== + # init + #=================================================================== + def __init__(self, path=None, new=False, autoload=True, autosave=False, + encoding="utf-8", return_unicode=PY3, + ): + # set encoding + if not encoding: + warn("``encoding=None`` is deprecated as of Passlib 1.6, " + "and will cause a ValueError in Passlib 1.8, " + "use ``return_unicode=False`` instead.", + DeprecationWarning, stacklevel=2) + encoding = "utf-8" + return_unicode = False + elif not is_ascii_codec(encoding): + # htpasswd/htdigest files assumes 1-byte chars, and use ":" separator, + # so only ascii-compatible encodings are allowed. + raise ValueError("encoding must be 7-bit ascii compatible") + self.encoding = encoding + + # set other attrs + self.return_unicode = return_unicode + self.autosave = autosave + self._path = path + self._mtime = 0 + + # init db + if not autoload: + warn("``autoload=False`` is deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8, use ``new=True`` instead", + DeprecationWarning, stacklevel=2) + new = True + if path and not new: + self.load() + else: + self._records = {} + self._source = [] + + def __repr__(self): + tail = '' + if self.autosave: + tail += ' autosave=True' + if self._path: + tail += ' path=%r' % self._path + if self.encoding != "utf-8": + tail += ' encoding=%r' % self.encoding + return "<%s 0x%0x%s>" % (self.__class__.__name__, id(self), tail) + + # NOTE: ``path`` is a property so that ``_mtime`` is wiped when it's set. + + @property + def path(self): + return self._path + + @path.setter + def path(self, value): + if value != self._path: + self._mtime = 0 + self._path = value + + @property + def mtime(self): + """modify time when last loaded (if bound to a local file)""" + return self._mtime + + #=================================================================== + # loading + #=================================================================== + def load_if_changed(self): + """Reload from ``self.path`` only if file has changed since last load""" + if not self._path: + raise RuntimeError("%r is not bound to a local file" % self) + if self._mtime and self._mtime == os.path.getmtime(self._path): + return False + self.load() + return True + + def load(self, path=None, force=True): + """Load state from local file. + If no path is specified, attempts to load from ``self.path``. + + :type path: str + :arg path: local file to load from + + :type force: bool + :param force: + if ``force=False``, only load from ``self.path`` if file + has changed since last load. + + .. deprecated:: 1.6 + This keyword will be removed in Passlib 1.8; + Applications should use :meth:`load_if_changed` instead. + """ + if path is not None: + with open(path, "rb") as fh: + self._mtime = 0 + self._load_lines(fh) + elif not force: + warn("%(name)s.load(force=False) is deprecated as of Passlib 1.6," + "and will be removed in Passlib 1.8; " + "use %(name)s.load_if_changed() instead." % + dict(name=self.__class__.__name__), + DeprecationWarning, stacklevel=2) + return self.load_if_changed() + elif self._path: + with open(self._path, "rb") as fh: + self._mtime = os.path.getmtime(self._path) + self._load_lines(fh) + else: + raise RuntimeError("%s().path is not set, an explicit path is required" % + self.__class__.__name__) + return True + + def load_string(self, data): + """Load state from unicode or bytes string, replacing current state""" + data = to_bytes(data, self.encoding, "data") + self._mtime = 0 + self._load_lines(BytesIO(data)) + + def _load_lines(self, lines): + """load from sequence of lists""" + parse = self._parse_record + records = {} + source = [] + skipped = b'' + for idx, line in enumerate(lines): + # NOTE: per htpasswd source (https://github.com/apache/httpd/blob/trunk/support/htpasswd.c), + # lines with only whitespace, or with "#" as first non-whitespace char, + # are left alone / ignored. + tmp = line.lstrip() + if not tmp or tmp.startswith(_BHASH): + skipped += line + continue + + # parse valid line + key, value = parse(line, idx+1) + + # NOTE: if multiple entries for a key, we use the first one, + # which seems to match htpasswd source + if key in records: + log.warning("username occurs multiple times in source file: %r" % key) + skipped += line + continue + + # flush buffer of skipped whitespace lines + if skipped: + source.append((_SKIPPED, skipped)) + skipped = b'' + + # store new user line + records[key] = value + source.append((_RECORD, key)) + + # don't bother preserving trailing whitespace, but do preserve trailing comments + if skipped.rstrip(): + source.append((_SKIPPED, skipped)) + + # NOTE: not replacing ._records until parsing succeeds, so loading is atomic. + self._records = records + self._source = source + + def _parse_record(self, record, lineno): # pragma: no cover - abstract method + """parse line of file into (key, value) pair""" + raise NotImplementedError("should be implemented in subclass") + + def _set_record(self, key, value): + """ + helper for setting record which takes care of inserting source line if needed; + + :returns: + bool if key already present + """ + records = self._records + existing = (key in records) + records[key] = value + if not existing: + self._source.append((_RECORD, key)) + return existing + + #=================================================================== + # saving + #=================================================================== + def _autosave(self): + """subclass helper to call save() after any changes""" + if self.autosave and self._path: + self.save() + + def save(self, path=None): + """Save current state to file. + If no path is specified, attempts to save to ``self.path``. + """ + if path is not None: + with open(path, "wb") as fh: + fh.writelines(self._iter_lines()) + elif self._path: + self.save(self._path) + self._mtime = os.path.getmtime(self._path) + else: + raise RuntimeError("%s().path is not set, cannot autosave" % + self.__class__.__name__) + + def to_string(self): + """Export current state as a string of bytes""" + return join_bytes(self._iter_lines()) + + # def clean(self): + # """ + # discard any comments or whitespace that were being preserved from the source file, + # and re-sort keys in alphabetical order + # """ + # self._source = [(_RECORD, key) for key in sorted(self._records)] + # self._autosave() + + def _iter_lines(self): + """iterator yielding lines of database""" + # NOTE: this relies on being an OrderedDict so that it outputs + # records in a deterministic order. + records = self._records + if __debug__: + pending = set(records) + for action, content in self._source: + if action == _SKIPPED: + # 'content' is whitespace/comments to write + yield content + else: + assert action == _RECORD + # 'content' is record key + if content not in records: + # record was deleted + # NOTE: doing it lazily like this so deleting & re-adding user + # preserves their original location in the file. + continue + yield self._render_record(content, records[content]) + if __debug__: + pending.remove(content) + if __debug__: + # sanity check that we actually wrote all the records + # (otherwise _source & _records are somehow out of sync) + assert not pending, "failed to write all records: missing=%r" % (pending,) + + def _render_record(self, key, value): # pragma: no cover - abstract method + """given key/value pair, encode as line of file""" + raise NotImplementedError("should be implemented in subclass") + + #=================================================================== + # field encoding + #=================================================================== + def _encode_user(self, user): + """user-specific wrapper for _encode_field()""" + return self._encode_field(user, "user") + + def _encode_realm(self, realm): # pragma: no cover - abstract method + """realm-specific wrapper for _encode_field()""" + return self._encode_field(realm, "realm") + + def _encode_field(self, value, param="field"): + """convert field to internal representation. + + internal representation is always bytes. byte strings are left as-is, + unicode strings encoding using file's default encoding (or ``utf-8`` + if no encoding has been specified). + + :raises UnicodeEncodeError: + if unicode value cannot be encoded using default encoding. + + :raises ValueError: + if resulting byte string contains a forbidden character, + or is too long (>255 bytes). + + :returns: + encoded identifer as bytes + """ + if isinstance(value, unicode): + value = value.encode(self.encoding) + elif not isinstance(value, bytes): + raise ExpectedStringError(value, param) + if len(value) > 255: + raise ValueError("%s must be at most 255 characters: %r" % + (param, value)) + if any(c in _INVALID_FIELD_CHARS for c in value): + raise ValueError("%s contains invalid characters: %r" % + (param, value,)) + return value + + def _decode_field(self, value): + """decode field from internal representation to format + returns by users() method, etc. + + :raises UnicodeDecodeError: + if unicode value cannot be decoded using default encoding. + (usually indicates wrong encoding set for file). + + :returns: + field as unicode or bytes, as appropriate. + """ + assert isinstance(value, bytes), "expected value to be bytes" + if self.return_unicode: + return value.decode(self.encoding) + else: + return value + + # FIXME: htpasswd doc says passwords limited to 255 chars under Windows & MPE, + # and that longer ones are truncated. this may be side-effect of those + # platforms supporting the 'plaintext' scheme. these classes don't currently + # check for this. + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htpasswd context +# +# This section sets up a CryptContexts to mimic what schemes Apache +# (and the htpasswd tool) should support on the current system. +# +# Apache has long-time supported some basic builtin schemes (listed below), +# as well as the host's crypt() method -- though it's limited to being able +# to *verify* any scheme using that method, but can only generate "des_crypt" hashes. +# +# Apache 2.4 added builtin bcrypt support (even for platforms w/o native support). +# c.f. http://httpd.apache.org/docs/2.4/programs/htpasswd.html vs the 2.2 docs. +#============================================================================= + +#: set of default schemes that (if chosen) should be using bcrypt, +#: but can't due to lack of bcrypt. +_warn_no_bcrypt = set() + +def _init_default_schemes(): + + #: pick strongest one for host + host_best = None + for name in ["bcrypt", "sha256_crypt"]: + if registry.has_os_crypt_support(name): + host_best = name + break + + # check if we have a bcrypt backend -- otherwise issue warning + # XXX: would like to not spam this unless the user *requests* apache 24 + bcrypt = "bcrypt" if registry.has_backend("bcrypt") else None + _warn_no_bcrypt.clear() + if not bcrypt: + _warn_no_bcrypt.update(["portable_apache_24", "host_apache_24", + "linux_apache_24", "portable", "host"]) + + defaults = dict( + # strongest hash builtin to specific apache version + portable_apache_24=bcrypt or "apr_md5_crypt", + portable_apache_22="apr_md5_crypt", + + # strongest hash across current host & specific apache version + host_apache_24=bcrypt or host_best or "apr_md5_crypt", + host_apache_22=host_best or "apr_md5_crypt", + + # strongest hash on a linux host + linux_apache_24=bcrypt or "sha256_crypt", + linux_apache_22="sha256_crypt", + ) + + # set latest-apache version aliases + # XXX: could check for apache install, and pick correct host 22/24 default? + # could reuse _detect_htpasswd() helper in UTs + defaults.update( + portable=defaults['portable_apache_24'], + host=defaults['host_apache_24'], + ) + return defaults + +#: dict mapping default alias -> appropriate scheme +htpasswd_defaults = _init_default_schemes() + +def _init_htpasswd_context(): + + # start with schemes built into apache + schemes = [ + # builtin support added in apache 2.4 + # (https://bz.apache.org/bugzilla/show_bug.cgi?id=49288) + "bcrypt", + + # support not "builtin" to apache, instead it requires support through host's crypt(). + # adding them here to allow editing htpasswd under windows and then deploying under unix. + "sha256_crypt", + "sha512_crypt", + "des_crypt", + + # apache default as of 2.2.18, and still default in 2.4 + "apr_md5_crypt", + + # NOTE: apache says ONLY intended for transitioning htpasswd <-> ldap + "ldap_sha1", + + # NOTE: apache says ONLY supported on Windows, Netware, TPF + "plaintext" + ] + + # apache can verify anything supported by the native crypt(), + # though htpasswd tool can only generate a limited set of hashes. + # (this list may overlap w/ builtin apache schemes) + schemes.extend(registry.get_supported_os_crypt_schemes()) + + # hack to remove dups and sort into preferred order + preferred = schemes[:3] + ["apr_md5_crypt"] + schemes + schemes = sorted(set(schemes), key=preferred.index) + + # create context object + return CryptContext( + schemes=schemes, + + # NOTE: default will change to "portable" in passlib 2.0 + default=htpasswd_defaults['portable_apache_22'], + + # NOTE: bcrypt "2y" is required, "2b" isn't recognized by libapr (issue 95) + bcrypt__ident="2y", + ) + +#: CryptContext configured to match htpasswd +htpasswd_context = _init_htpasswd_context() + +#============================================================================= +# htpasswd editing +#============================================================================= + +class HtpasswdFile(_CommonFile): + """class for reading & writing Htpasswd files. + + The class constructor accepts the following arguments: + + :type path: filepath + :param path: + + Specifies path to htpasswd file, use to implicitly load from and save to. + + This class has two modes of operation: + + 1. It can be "bound" to a local file by passing a ``path`` to the class + constructor. In this case it will load the contents of the file when + created, and the :meth:`load` and :meth:`save` methods will automatically + load from and save to that file if they are called without arguments. + + 2. Alternately, it can exist as an independant object, in which case + :meth:`load` and :meth:`save` will require an explicit path to be + provided whenever they are called. As well, ``autosave`` behavior + will not be available. + + This feature is new in Passlib 1.6, and is the default if no + ``path`` value is provided to the constructor. + + This is also exposed as a readonly instance attribute. + + :type new: bool + :param new: + + Normally, if *path* is specified, :class:`HtpasswdFile` will + immediately load the contents of the file. However, when creating + a new htpasswd file, applications can set ``new=True`` so that + the existing file (if any) will not be loaded. + + .. versionadded:: 1.6 + This feature was previously enabled by setting ``autoload=False``. + That alias has been deprecated, and will be removed in Passlib 1.8 + + :type autosave: bool + :param autosave: + + Normally, any changes made to an :class:`HtpasswdFile` instance + will not be saved until :meth:`save` is explicitly called. However, + if ``autosave=True`` is specified, any changes made will be + saved to disk immediately (assuming *path* has been set). + + This is also exposed as a writeable instance attribute. + + :type encoding: str + :param encoding: + + Optionally specify character encoding used to read/write file + and hash passwords. Defaults to ``utf-8``, though ``latin-1`` + is the only other commonly encountered encoding. + + This is also exposed as a readonly instance attribute. + + :type default_scheme: str + :param default_scheme: + Optionally specify default scheme to use when encoding new passwords. + + This can be any of the schemes with builtin Apache support, + OR natively supported by the host OS's :func:`crypt.crypt` function. + + * Builtin schemes include ``"bcrypt"`` (apache 2.4+), ``"apr_md5_crypt"`, + and ``"des_crypt"``. + + * Schemes commonly supported by Unix hosts + include ``"bcrypt"``, ``"sha256_crypt"``, and ``"des_crypt"``. + + In order to not have to sort out what you should use, + passlib offers a number of aliases, that will resolve + to the most appropriate scheme based on your needs: + + * ``"portable"``, ``"portable_apache_24"`` -- pick scheme that's portable across hosts + running apache >= 2.4. **This will be the default as of Passlib 2.0**. + + * ``"portable_apache_22"`` -- pick scheme that's portable across hosts + running apache >= 2.4. **This is the default up to Passlib 1.9**. + + * ``"host"``, ``"host_apache_24"`` -- pick strongest scheme supported by + apache >= 2.4 and/or host OS. + + * ``"host_apache_22"`` -- pick strongest scheme supported by + apache >= 2.2 and/or host OS. + + .. versionadded:: 1.6 + This keyword was previously named ``default``. That alias + has been deprecated, and will be removed in Passlib 1.8. + + .. versionchanged:: 1.6.3 + + Added support for ``"bcrypt"``, ``"sha256_crypt"``, and ``"portable"`` alias. + + .. versionchanged:: 1.7 + + Added apache 2.4 semantics, and additional aliases. + + :type context: :class:`~passlib.context.CryptContext` + :param context: + :class:`!CryptContext` instance used to create + and verify the hashes found in the htpasswd file. + The default value is a pre-built context which supports all + of the hashes officially allowed in an htpasswd file. + + This is also exposed as a readonly instance attribute. + + .. warning:: + + This option may be used to add support for non-standard hash + formats to an htpasswd file. However, the resulting file + will probably not be usable by another application, + and particularly not by Apache. + + :param autoload: + Set to ``False`` to prevent the constructor from automatically + loaded the file from disk. + + .. deprecated:: 1.6 + This has been replaced by the *new* keyword. + Instead of setting ``autoload=False``, you should use + ``new=True``. Support for this keyword will be removed + in Passlib 1.8. + + :param default: + Change the default algorithm used to hash new passwords. + + .. deprecated:: 1.6 + This has been renamed to *default_scheme* for clarity. + Support for this alias will be removed in Passlib 1.8. + + Loading & Saving + ================ + .. automethod:: load + .. automethod:: load_if_changed + .. automethod:: load_string + .. automethod:: save + .. automethod:: to_string + + Inspection + ================ + .. automethod:: users + .. automethod:: check_password + .. automethod:: get_hash + + Modification + ================ + .. automethod:: set_password + .. automethod:: delete + + Alternate Constructors + ====================== + .. automethod:: from_string + + Attributes + ========== + .. attribute:: path + + Path to local file that will be used as the default + for all :meth:`load` and :meth:`save` operations. + May be written to, initialized by the *path* constructor keyword. + + .. attribute:: autosave + + Writeable flag indicating whether changes will be automatically + written to *path*. + + Errors + ====== + :raises ValueError: + All of the methods in this class will raise a :exc:`ValueError` if + any user name contains a forbidden character (one of ``:\\r\\n\\t\\x00``), + or is longer than 255 characters. + """ + #=================================================================== + # instance attrs + #=================================================================== + + # NOTE: _records map stores for the key, and for the value, + # both in bytes which use self.encoding + + #=================================================================== + # init & serialization + #=================================================================== + def __init__(self, path=None, default_scheme=None, context=htpasswd_context, + **kwds): + if 'default' in kwds: + warn("``default`` is deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8, it has been renamed " + "to ``default_scheem``.", + DeprecationWarning, stacklevel=2) + default_scheme = kwds.pop("default") + if default_scheme: + if default_scheme in _warn_no_bcrypt: + warn("HtpasswdFile: no bcrypt backends available, " + "using fallback for default scheme %r" % default_scheme, + exc.PasslibSecurityWarning) + default_scheme = htpasswd_defaults.get(default_scheme, default_scheme) + context = context.copy(default=default_scheme) + self.context = context + super(HtpasswdFile, self).__init__(path, **kwds) + + def _parse_record(self, record, lineno): + # NOTE: should return (user, hash) tuple + result = record.rstrip().split(_BCOLON) + if len(result) != 2: + raise ValueError("malformed htpasswd file (error reading line %d)" + % lineno) + return result + + def _render_record(self, user, hash): + return render_bytes("%s:%s\n", user, hash) + + #=================================================================== + # public methods + #=================================================================== + + def users(self): + """ + Return list of all users in database + """ + return [self._decode_field(user) for user in self._records] + + ##def has_user(self, user): + ## "check whether entry is present for user" + ## return self._encode_user(user) in self._records + + ##def rename(self, old, new): + ## """rename user account""" + ## old = self._encode_user(old) + ## new = self._encode_user(new) + ## hash = self._records.pop(old) + ## self._records[new] = hash + ## self._autosave() + + def set_password(self, user, password): + """Set password for user; adds user if needed. + + :returns: + * ``True`` if existing user was updated. + * ``False`` if user account was added. + + .. versionchanged:: 1.6 + This method was previously called ``update``, it was renamed + to prevent ambiguity with the dictionary method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + hash = self.context.hash(password) + return self.set_hash(user, hash) + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="set_password") + def update(self, user, password): + """set password for user""" + return self.set_password(user, password) + + def get_hash(self, user): + """Return hash stored for user, or ``None`` if user not found. + + .. versionchanged:: 1.6 + This method was previously named ``find``, it was renamed + for clarity. The old name is deprecated, and will be removed + in Passlib 1.8. + """ + try: + return self._records[self._encode_user(user)] + except KeyError: + return None + + def set_hash(self, user, hash): + """ + semi-private helper which allows writing a hash directly; + adds user if needed. + + .. warning:: + does not (currently) do any validation of the hash string + + .. versionadded:: 1.7 + """ + # assert self.context.identify(hash), "unrecognized hash format" + if PY3 and isinstance(hash, str): + hash = hash.encode(self.encoding) + user = self._encode_user(user) + existing = self._set_record(user, hash) + self._autosave() + return existing + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="get_hash") + def find(self, user): + """return hash for user""" + return self.get_hash(user) + + # XXX: rename to something more explicit, like delete_user()? + def delete(self, user): + """Delete user's entry. + + :returns: + * ``True`` if user deleted. + * ``False`` if user not found. + """ + try: + del self._records[self._encode_user(user)] + except KeyError: + return False + self._autosave() + return True + + def check_password(self, user, password): + """ + Verify password for specified user. + If algorithm marked as deprecated by CryptContext, will automatically be re-hashed. + + :returns: + * ``None`` if user not found. + * ``False`` if user found, but password does not match. + * ``True`` if user found and password matches. + + .. versionchanged:: 1.6 + This method was previously called ``verify``, it was renamed + to prevent ambiguity with the :class:`!CryptContext` method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + user = self._encode_user(user) + hash = self._records.get(user) + if hash is None: + return None + if isinstance(password, unicode): + # NOTE: encoding password to match file, making the assumption + # that server will use same encoding to hash the password. + password = password.encode(self.encoding) + ok, new_hash = self.context.verify_and_update(password, hash) + if ok and new_hash is not None: + # rehash user's password if old hash was deprecated + assert user in self._records # otherwise would have to use ._set_record() + self._records[user] = new_hash + self._autosave() + return ok + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="check_password") + def verify(self, user, password): + """verify password for user""" + return self.check_password(user, password) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htdigest editing +#============================================================================= +class HtdigestFile(_CommonFile): + """class for reading & writing Htdigest files. + + The class constructor accepts the following arguments: + + :type path: filepath + :param path: + + Specifies path to htdigest file, use to implicitly load from and save to. + + This class has two modes of operation: + + 1. It can be "bound" to a local file by passing a ``path`` to the class + constructor. In this case it will load the contents of the file when + created, and the :meth:`load` and :meth:`save` methods will automatically + load from and save to that file if they are called without arguments. + + 2. Alternately, it can exist as an independant object, in which case + :meth:`load` and :meth:`save` will require an explicit path to be + provided whenever they are called. As well, ``autosave`` behavior + will not be available. + + This feature is new in Passlib 1.6, and is the default if no + ``path`` value is provided to the constructor. + + This is also exposed as a readonly instance attribute. + + :type default_realm: str + :param default_realm: + + If ``default_realm`` is set, all the :class:`HtdigestFile` + methods that require a realm will use this value if one is not + provided explicitly. If unset, they will raise an error stating + that an explicit realm is required. + + This is also exposed as a writeable instance attribute. + + .. versionadded:: 1.6 + + :type new: bool + :param new: + + Normally, if *path* is specified, :class:`HtdigestFile` will + immediately load the contents of the file. However, when creating + a new htpasswd file, applications can set ``new=True`` so that + the existing file (if any) will not be loaded. + + .. versionadded:: 1.6 + This feature was previously enabled by setting ``autoload=False``. + That alias has been deprecated, and will be removed in Passlib 1.8 + + :type autosave: bool + :param autosave: + + Normally, any changes made to an :class:`HtdigestFile` instance + will not be saved until :meth:`save` is explicitly called. However, + if ``autosave=True`` is specified, any changes made will be + saved to disk immediately (assuming *path* has been set). + + This is also exposed as a writeable instance attribute. + + :type encoding: str + :param encoding: + + Optionally specify character encoding used to read/write file + and hash passwords. Defaults to ``utf-8``, though ``latin-1`` + is the only other commonly encountered encoding. + + This is also exposed as a readonly instance attribute. + + :param autoload: + Set to ``False`` to prevent the constructor from automatically + loaded the file from disk. + + .. deprecated:: 1.6 + This has been replaced by the *new* keyword. + Instead of setting ``autoload=False``, you should use + ``new=True``. Support for this keyword will be removed + in Passlib 1.8. + + Loading & Saving + ================ + .. automethod:: load + .. automethod:: load_if_changed + .. automethod:: load_string + .. automethod:: save + .. automethod:: to_string + + Inspection + ========== + .. automethod:: realms + .. automethod:: users + .. automethod:: check_password(user[, realm], password) + .. automethod:: get_hash + + Modification + ============ + .. automethod:: set_password(user[, realm], password) + .. automethod:: delete + .. automethod:: delete_realm + + Alternate Constructors + ====================== + .. automethod:: from_string + + Attributes + ========== + .. attribute:: default_realm + + The default realm that will be used if one is not provided + to methods that require it. By default this is ``None``, + in which case an explicit realm must be provided for every + method call. Can be written to. + + .. attribute:: path + + Path to local file that will be used as the default + for all :meth:`load` and :meth:`save` operations. + May be written to, initialized by the *path* constructor keyword. + + .. attribute:: autosave + + Writeable flag indicating whether changes will be automatically + written to *path*. + + Errors + ====== + :raises ValueError: + All of the methods in this class will raise a :exc:`ValueError` if + any user name or realm contains a forbidden character (one of ``:\\r\\n\\t\\x00``), + or is longer than 255 characters. + """ + #=================================================================== + # instance attrs + #=================================================================== + + # NOTE: _records map stores (,) for the key, + # and as the value, all as bytes. + + # NOTE: unlike htpasswd, this class doesn't use a CryptContext, + # as only one hash format is supported: htdigest. + + # optionally specify default realm that will be used if none + # is provided to a method call. otherwise realm is always required. + default_realm = None + + #=================================================================== + # init & serialization + #=================================================================== + def __init__(self, path=None, default_realm=None, **kwds): + self.default_realm = default_realm + super(HtdigestFile, self).__init__(path, **kwds) + + def _parse_record(self, record, lineno): + result = record.rstrip().split(_BCOLON) + if len(result) != 3: + raise ValueError("malformed htdigest file (error reading line %d)" + % lineno) + user, realm, hash = result + return (user, realm), hash + + def _render_record(self, key, hash): + user, realm = key + return render_bytes("%s:%s:%s\n", user, realm, hash) + + def _require_realm(self, realm): + if realm is None: + realm = self.default_realm + if realm is None: + raise TypeError("you must specify a realm explicitly, " + "or set the default_realm attribute") + return realm + + def _encode_realm(self, realm): + realm = self._require_realm(realm) + return self._encode_field(realm, "realm") + + def _encode_key(self, user, realm): + return self._encode_user(user), self._encode_realm(realm) + + #=================================================================== + # public methods + #=================================================================== + + def realms(self): + """Return list of all realms in database""" + realms = set(key[1] for key in self._records) + return [self._decode_field(realm) for realm in realms] + + def users(self, realm=None): + """Return list of all users in specified realm. + + * uses ``self.default_realm`` if no realm explicitly provided. + * returns empty list if realm not found. + """ + realm = self._encode_realm(realm) + return [self._decode_field(key[0]) for key in self._records + if key[1] == realm] + + ##def has_user(self, user, realm=None): + ## "check if user+realm combination exists" + ## return self._encode_key(user,realm) in self._records + + ##def rename_realm(self, old, new): + ## """rename all accounts in realm""" + ## old = self._encode_realm(old) + ## new = self._encode_realm(new) + ## keys = [key for key in self._records if key[1] == old] + ## for key in keys: + ## hash = self._records.pop(key) + ## self._set_record((key[0], new), hash) + ## self._autosave() + ## return len(keys) + + ##def rename(self, old, new, realm=None): + ## """rename user account""" + ## old = self._encode_user(old) + ## new = self._encode_user(new) + ## realm = self._encode_realm(realm) + ## hash = self._records.pop((old,realm)) + ## self._set_record((new, realm), hash) + ## self._autosave() + + def set_password(self, user, realm=None, password=_UNSET): + """Set password for user; adds user & realm if needed. + + If ``self.default_realm`` has been set, this may be called + with the syntax ``set_password(user, password)``, + otherwise it must be called with all three arguments: + ``set_password(user, realm, password)``. + + :returns: + * ``True`` if existing user was updated + * ``False`` if user account added. + """ + if password is _UNSET: + # called w/ two args - (user, password), use default realm + realm, password = None, realm + realm = self._require_realm(realm) + hash = htdigest.hash(password, user, realm, encoding=self.encoding) + return self.set_hash(user, realm, hash) + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="set_password") + def update(self, user, realm, password): + """set password for user""" + return self.set_password(user, realm, password) + + def get_hash(self, user, realm=None): + """Return :class:`~passlib.hash.htdigest` hash stored for user. + + * uses ``self.default_realm`` if no realm explicitly provided. + * returns ``None`` if user or realm not found. + + .. versionchanged:: 1.6 + This method was previously named ``find``, it was renamed + for clarity. The old name is deprecated, and will be removed + in Passlib 1.8. + """ + key = self._encode_key(user, realm) + hash = self._records.get(key) + if hash is None: + return None + if PY3: + hash = hash.decode(self.encoding) + return hash + + def set_hash(self, user, realm=None, hash=_UNSET): + """ + semi-private helper which allows writing a hash directly; + adds user & realm if needed. + + If ``self.default_realm`` has been set, this may be called + with the syntax ``set_hash(user, hash)``, + otherwise it must be called with all three arguments: + ``set_hash(user, realm, hash)``. + + .. warning:: + does not (currently) do any validation of the hash string + + .. versionadded:: 1.7 + """ + if hash is _UNSET: + # called w/ two args - (user, hash), use default realm + realm, hash = None, realm + # assert htdigest.identify(hash), "unrecognized hash format" + if PY3 and isinstance(hash, str): + hash = hash.encode(self.encoding) + key = self._encode_key(user, realm) + existing = self._set_record(key, hash) + self._autosave() + return existing + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="get_hash") + def find(self, user, realm): + """return hash for user""" + return self.get_hash(user, realm) + + # XXX: rename to something more explicit, like delete_user()? + def delete(self, user, realm=None): + """Delete user's entry for specified realm. + + if realm is not specified, uses ``self.default_realm``. + + :returns: + * ``True`` if user deleted, + * ``False`` if user not found in realm. + """ + key = self._encode_key(user, realm) + try: + del self._records[key] + except KeyError: + return False + self._autosave() + return True + + def delete_realm(self, realm): + """Delete all users for specified realm. + + if realm is not specified, uses ``self.default_realm``. + + :returns: number of users deleted (0 if realm not found) + """ + realm = self._encode_realm(realm) + records = self._records + keys = [key for key in records if key[1] == realm] + for key in keys: + del records[key] + self._autosave() + return len(keys) + + def check_password(self, user, realm=None, password=_UNSET): + """Verify password for specified user + realm. + + If ``self.default_realm`` has been set, this may be called + with the syntax ``check_password(user, password)``, + otherwise it must be called with all three arguments: + ``check_password(user, realm, password)``. + + :returns: + * ``None`` if user or realm not found. + * ``False`` if user found, but password does not match. + * ``True`` if user found and password matches. + + .. versionchanged:: 1.6 + This method was previously called ``verify``, it was renamed + to prevent ambiguity with the :class:`!CryptContext` method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + if password is _UNSET: + # called w/ two args - (user, password), use default realm + realm, password = None, realm + user = self._encode_user(user) + realm = self._encode_realm(realm) + hash = self._records.get((user,realm)) + if hash is None: + return None + return htdigest.verify(password, hash, user, realm, + encoding=self.encoding) + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="check_password") + def verify(self, user, realm, password): + """verify password for user""" + return self.check_password(user, realm, password) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/apps.py b/venv/lib/python3.12/site-packages/passlib/apps.py new file mode 100644 index 0000000..682bbff --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/apps.py @@ -0,0 +1,245 @@ +"""passlib.apps""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +from itertools import chain +# site +# pkg +from passlib import hash +from passlib.context import LazyCryptContext +from passlib.utils import sys_bits +# local +__all__ = [ + 'custom_app_context', + 'django_context', + 'ldap_context', 'ldap_nocrypt_context', + 'mysql_context', 'mysql4_context', 'mysql3_context', + 'phpass_context', + 'phpbb3_context', + 'postgres_context', +] + +#============================================================================= +# master containing all identifiable hashes +#============================================================================= +def _load_master_config(): + from passlib.registry import list_crypt_handlers + + # get master list + schemes = list_crypt_handlers() + + # exclude the ones we know have ambiguous or greedy identify() methods. + excluded = [ + # frequently confused for eachother + 'bigcrypt', + 'crypt16', + + # no good identifiers + 'cisco_pix', + 'cisco_type7', + 'htdigest', + 'mysql323', + 'oracle10', + + # all have same size + 'lmhash', + 'msdcc', + 'msdcc2', + 'nthash', + + # plaintext handlers + 'plaintext', + 'ldap_plaintext', + + # disabled handlers + 'django_disabled', + 'unix_disabled', + 'unix_fallback', + ] + for name in excluded: + schemes.remove(name) + + # return config + return dict(schemes=schemes, default="sha256_crypt") +master_context = LazyCryptContext(onload=_load_master_config) + +#============================================================================= +# for quickly bootstrapping new custom applications +#============================================================================= +custom_app_context = LazyCryptContext( + # choose some reasonbly strong schemes + schemes=["sha512_crypt", "sha256_crypt"], + + # set some useful global options + default="sha256_crypt" if sys_bits < 64 else "sha512_crypt", + + # set a good starting point for rounds selection + sha512_crypt__min_rounds = 535000, + sha256_crypt__min_rounds = 535000, + + # if the admin user category is selected, make a much stronger hash, + admin__sha512_crypt__min_rounds = 1024000, + admin__sha256_crypt__min_rounds = 1024000, + ) + +#============================================================================= +# django +#============================================================================= + +#----------------------------------------------------------------------- +# 1.0 +#----------------------------------------------------------------------- + +_django10_schemes = [ + "django_salted_sha1", + "django_salted_md5", + "django_des_crypt", + "hex_md5", + "django_disabled", +] + +django10_context = LazyCryptContext( + schemes=_django10_schemes, + default="django_salted_sha1", + deprecated=["hex_md5"], +) + +#----------------------------------------------------------------------- +# 1.4 +#----------------------------------------------------------------------- + +_django14_schemes = [ + "django_pbkdf2_sha256", + "django_pbkdf2_sha1", + "django_bcrypt" +] + _django10_schemes + +django14_context = LazyCryptContext( + schemes=_django14_schemes, + deprecated=_django10_schemes, +) + +#----------------------------------------------------------------------- +# 1.6 +#----------------------------------------------------------------------- + +_django16_schemes = list(_django14_schemes) +_django16_schemes.insert(1, "django_bcrypt_sha256") +django16_context = LazyCryptContext( + schemes=_django16_schemes, + deprecated=_django10_schemes, +) + +#----------------------------------------------------------------------- +# 1.10 +#----------------------------------------------------------------------- + +_django_110_schemes = [ + "django_pbkdf2_sha256", + "django_pbkdf2_sha1", + "django_argon2", + "django_bcrypt", + "django_bcrypt_sha256", + "django_disabled", +] +django110_context = LazyCryptContext(schemes=_django_110_schemes) + +#----------------------------------------------------------------------- +# 2.1 +#----------------------------------------------------------------------- + +_django21_schemes = list(_django_110_schemes) +_django21_schemes.remove("django_bcrypt") +django21_context = LazyCryptContext(schemes=_django21_schemes) + +#----------------------------------------------------------------------- +# latest +#----------------------------------------------------------------------- + +# this will always point to latest version in passlib +django_context = django21_context + +#============================================================================= +# ldap +#============================================================================= + +#: standard ldap schemes +std_ldap_schemes = [ + "ldap_salted_sha512", + "ldap_salted_sha256", + "ldap_salted_sha1", + "ldap_salted_md5", + "ldap_sha1", + "ldap_md5", + "ldap_plaintext", +] + +# create context with all std ldap schemes EXCEPT crypt +ldap_nocrypt_context = LazyCryptContext(std_ldap_schemes) + +# create context with all possible std ldap + ldap crypt schemes +def _iter_ldap_crypt_schemes(): + from passlib.utils import unix_crypt_schemes + return ('ldap_' + name for name in unix_crypt_schemes) + +def _iter_ldap_schemes(): + """helper which iterates over supported std ldap schemes""" + return chain(std_ldap_schemes, _iter_ldap_crypt_schemes()) +ldap_context = LazyCryptContext(_iter_ldap_schemes()) + +### create context with all std ldap schemes + crypt schemes for localhost +##def _iter_host_ldap_schemes(): +## "helper which iterates over supported std ldap schemes" +## from passlib.handlers.ldap_digests import get_host_ldap_crypt_schemes +## return chain(std_ldap_schemes, get_host_ldap_crypt_schemes()) +##ldap_host_context = LazyCryptContext(_iter_host_ldap_schemes()) + +#============================================================================= +# mysql +#============================================================================= +mysql3_context = LazyCryptContext(["mysql323"]) +mysql4_context = LazyCryptContext(["mysql41", "mysql323"], deprecated="mysql323") +mysql_context = mysql4_context # tracks latest mysql version supported + +#============================================================================= +# postgres +#============================================================================= +postgres_context = LazyCryptContext(["postgres_md5"]) + +#============================================================================= +# phpass & variants +#============================================================================= +def _create_phpass_policy(**kwds): + """helper to choose default alg based on bcrypt availability""" + kwds['default'] = 'bcrypt' if hash.bcrypt.has_backend() else 'phpass' + return kwds + +phpass_context = LazyCryptContext( + schemes=["bcrypt", "phpass", "bsdi_crypt"], + onload=_create_phpass_policy, + ) + +phpbb3_context = LazyCryptContext(["phpass"], phpass__ident="H") + +# TODO: support the drupal phpass variants (see phpass homepage) + +#============================================================================= +# roundup +#============================================================================= + +_std_roundup_schemes = [ "ldap_hex_sha1", "ldap_hex_md5", "ldap_des_crypt", "roundup_plaintext" ] +roundup10_context = LazyCryptContext(_std_roundup_schemes) + +# NOTE: 'roundup15' really applies to roundup 1.4.17+ +roundup_context = roundup15_context = LazyCryptContext( + schemes=_std_roundup_schemes + [ "ldap_pbkdf2_sha1" ], + deprecated=_std_roundup_schemes, + default = "ldap_pbkdf2_sha1", + ldap_pbkdf2_sha1__default_rounds = 10000, + ) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/context.py b/venv/lib/python3.12/site-packages/passlib/context.py new file mode 100644 index 0000000..bc3cbf5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/context.py @@ -0,0 +1,2637 @@ +"""passlib.context - CryptContext implementation""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import re +import logging; log = logging.getLogger(__name__) +import threading +import time +from warnings import warn +# site +# pkg +from passlib import exc +from passlib.exc import ExpectedStringError, ExpectedTypeError, PasslibConfigWarning +from passlib.registry import get_crypt_handler, _validate_handler_name +from passlib.utils import (handlers as uh, to_bytes, + to_unicode, splitcomma, + as_bool, timer, rng, getrandstr, + ) +from passlib.utils.binary import BASE64_CHARS +from passlib.utils.compat import (iteritems, num_types, irange, + PY2, PY3, unicode, SafeConfigParser, + NativeStringIO, BytesIO, + unicode_or_bytes_types, native_string_types, + ) +from passlib.utils.decor import deprecated_method, memoized_property +# local +__all__ = [ + 'CryptContext', + 'LazyCryptContext', + 'CryptPolicy', +] + +#============================================================================= +# support +#============================================================================= + +# private object to detect unset params +_UNSET = object() + +def _coerce_vary_rounds(value): + """parse vary_rounds string to percent as [0,1) float, or integer""" + if value.endswith("%"): + # XXX: deprecate this in favor of raw float? + return float(value.rstrip("%"))*.01 + try: + return int(value) + except ValueError: + return float(value) + +# set of options which aren't allowed to be set via policy +_forbidden_scheme_options = set(["salt"]) + # 'salt' - not allowed since a fixed salt would defeat the purpose. + +# dict containing funcs used to coerce strings to correct type for scheme option keys. +# NOTE: this isn't really needed any longer, since Handler.using() handles the actual parsing. +# keeping this around for now, though, since it makes context.to_dict() output cleaner. +_coerce_scheme_options = dict( + min_rounds=int, + max_rounds=int, + default_rounds=int, + vary_rounds=_coerce_vary_rounds, + salt_size=int, +) + +def _is_handler_registered(handler): + """detect if handler is registered or a custom handler""" + return get_crypt_handler(handler.name, None) is handler + +@staticmethod +def _always_needs_update(hash, secret=None): + """ + dummy function patched into handler.needs_update() by _CryptConfig + when hash alg has been deprecated for context. + """ + return True + +#: list of keys allowed under wildcard "all" scheme w/o a security warning. +_global_settings = set(["truncate_error", "vary_rounds"]) + +#============================================================================= +# crypt policy +#============================================================================= +_preamble = ("The CryptPolicy class has been deprecated as of " + "Passlib 1.6, and will be removed in Passlib 1.8. ") + +class CryptPolicy(object): + """ + .. deprecated:: 1.6 + This class has been deprecated, and will be removed in Passlib 1.8. + All of its functionality has been rolled into :class:`CryptContext`. + + This class previously stored the configuration options for the + CryptContext class. In the interest of interface simplification, + all of this class' functionality has been rolled into the CryptContext + class itself. + The documentation for this class is now focused on documenting how to + migrate to the new api. Additionally, where possible, the deprecation + warnings issued by the CryptPolicy methods will list the replacement call + that should be used. + + Constructors + ============ + CryptPolicy objects can be constructed directly using any of + the keywords accepted by :class:`CryptContext`. Direct uses of the + :class:`!CryptPolicy` constructor should either pass the keywords + directly into the CryptContext constructor, or to :meth:`CryptContext.update` + if the policy object was being used to update an existing context object. + + In addition to passing in keywords directly, + CryptPolicy objects can be constructed by the following methods: + + .. automethod:: from_path + .. automethod:: from_string + .. automethod:: from_source + .. automethod:: from_sources + .. automethod:: replace + + Introspection + ============= + All of the informational methods provided by this class have been deprecated + by identical or similar methods in the :class:`CryptContext` class: + + .. automethod:: has_schemes + .. automethod:: schemes + .. automethod:: iter_handlers + .. automethod:: get_handler + .. automethod:: get_options + .. automethod:: handler_is_deprecated + .. automethod:: get_min_verify_time + + Exporting + ========= + .. automethod:: iter_config + .. automethod:: to_dict + .. automethod:: to_file + .. automethod:: to_string + + .. note:: + CryptPolicy are immutable. + Use the :meth:`replace` method to mutate existing instances. + + .. deprecated:: 1.6 + """ + #=================================================================== + # class methods + #=================================================================== + @classmethod + def from_path(cls, path, section="passlib", encoding="utf-8"): + """create a CryptPolicy instance from a local file. + + .. deprecated:: 1.6 + + Creating a new CryptContext from a file, which was previously done via + ``CryptContext(policy=CryptPolicy.from_path(path))``, can now be + done via ``CryptContext.from_path(path)``. + See :meth:`CryptContext.from_path` for details. + + Updating an existing CryptContext from a file, which was previously done + ``context.policy = CryptPolicy.from_path(path)``, can now be + done via ``context.load_path(path)``. + See :meth:`CryptContext.load_path` for details. + """ + warn(_preamble + + "Instead of ``CryptPolicy.from_path(path)``, " + "use ``CryptContext.from_path(path)`` " + " or ``context.load_path(path)`` for an existing CryptContext.", + DeprecationWarning, stacklevel=2) + return cls(_internal_context=CryptContext.from_path(path, section, + encoding)) + + @classmethod + def from_string(cls, source, section="passlib", encoding="utf-8"): + """create a CryptPolicy instance from a string. + + .. deprecated:: 1.6 + + Creating a new CryptContext from a string, which was previously done via + ``CryptContext(policy=CryptPolicy.from_string(data))``, can now be + done via ``CryptContext.from_string(data)``. + See :meth:`CryptContext.from_string` for details. + + Updating an existing CryptContext from a string, which was previously done + ``context.policy = CryptPolicy.from_string(data)``, can now be + done via ``context.load(data)``. + See :meth:`CryptContext.load` for details. + """ + warn(_preamble + + "Instead of ``CryptPolicy.from_string(source)``, " + "use ``CryptContext.from_string(source)`` or " + "``context.load(source)`` for an existing CryptContext.", + DeprecationWarning, stacklevel=2) + return cls(_internal_context=CryptContext.from_string(source, section, + encoding)) + + @classmethod + def from_source(cls, source, _warn=True): + """create a CryptPolicy instance from some source. + + this method autodetects the source type, and invokes + the appropriate constructor automatically. it attempts + to detect whether the source is a configuration string, a filepath, + a dictionary, or an existing CryptPolicy instance. + + .. deprecated:: 1.6 + + Create a new CryptContext, which could previously be done via + ``CryptContext(policy=CryptPolicy.from_source(source))``, should + now be done using an explicit method: the :class:`CryptContext` + constructor itself, :meth:`CryptContext.from_path`, + or :meth:`CryptContext.from_string`. + + Updating an existing CryptContext, which could previously be done via + ``context.policy = CryptPolicy.from_source(source)``, should + now be done using an explicit method: :meth:`CryptContext.update`, + or :meth:`CryptContext.load`. + """ + if _warn: + warn(_preamble + + "Instead of ``CryptPolicy.from_source()``, " + "use ``CryptContext.from_string(path)`` " + " or ``CryptContext.from_path(source)``, as appropriate.", + DeprecationWarning, stacklevel=2) + if isinstance(source, CryptPolicy): + return source + elif isinstance(source, dict): + return cls(_internal_context=CryptContext(**source)) + elif not isinstance(source, (bytes,unicode)): + raise TypeError("source must be CryptPolicy, dict, config string, " + "or file path: %r" % (type(source),)) + elif any(c in source for c in "\n\r\t") or not source.strip(" \t./;:"): + return cls(_internal_context=CryptContext.from_string(source)) + else: + return cls(_internal_context=CryptContext.from_path(source)) + + @classmethod + def from_sources(cls, sources, _warn=True): + """create a CryptPolicy instance by merging multiple sources. + + each source is interpreted as by :meth:`from_source`, + and the results are merged together. + + .. deprecated:: 1.6 + Instead of using this method to merge multiple policies together, + a :class:`CryptContext` instance should be created, and then + the multiple sources merged together via :meth:`CryptContext.load`. + """ + if _warn: + warn(_preamble + + "Instead of ``CryptPolicy.from_sources()``, " + "use the various CryptContext constructors " + " followed by ``context.update()``.", + DeprecationWarning, stacklevel=2) + if len(sources) == 0: + raise ValueError("no sources specified") + if len(sources) == 1: + return cls.from_source(sources[0], _warn=False) + kwds = {} + for source in sources: + kwds.update(cls.from_source(source, _warn=False)._context.to_dict(resolve=True)) + return cls(_internal_context=CryptContext(**kwds)) + + def replace(self, *args, **kwds): + """create a new CryptPolicy, optionally updating parts of the + existing configuration. + + .. deprecated:: 1.6 + Callers of this method should :meth:`CryptContext.update` or + :meth:`CryptContext.copy` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.replace()``, " + "use ``context.update()`` or ``context.copy()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().replace()``, " + "create a CryptContext instance and " + "use ``context.update()`` or ``context.copy()``.", + DeprecationWarning, stacklevel=2) + sources = [ self ] + if args: + sources.extend(args) + if kwds: + sources.append(kwds) + return CryptPolicy.from_sources(sources, _warn=False) + + #=================================================================== + # instance attrs + #=================================================================== + + # internal CryptContext we're wrapping to handle everything + # until this class is removed. + _context = None + + # flag indicating this is wrapper generated by the CryptContext.policy + # attribute, rather than one created independantly by the application. + _stub_policy = False + + #=================================================================== + # init + #=================================================================== + def __init__(self, *args, **kwds): + context = kwds.pop("_internal_context", None) + if context: + assert isinstance(context, CryptContext) + self._context = context + self._stub_policy = kwds.pop("_stub_policy", False) + assert not (args or kwds), "unexpected args: %r %r" % (args,kwds) + else: + if args: + if len(args) != 1: + raise TypeError("only one positional argument accepted") + if kwds: + raise TypeError("cannot specify positional arg and kwds") + kwds = args[0] + warn(_preamble + + "Instead of constructing a CryptPolicy instance, " + "create a CryptContext directly, or use ``context.update()`` " + "and ``context.load()`` to reconfigure existing CryptContext " + "instances.", + DeprecationWarning, stacklevel=2) + self._context = CryptContext(**kwds) + + #=================================================================== + # public interface for examining options + #=================================================================== + def has_schemes(self): + """return True if policy defines *any* schemes for use. + + .. deprecated:: 1.6 + applications should use ``bool(context.schemes())`` instead. + see :meth:`CryptContext.schemes`. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.has_schemes()``, " + "use ``bool(context.schemes())``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().has_schemes()``, " + "create a CryptContext instance and " + "use ``bool(context.schemes())``.", + DeprecationWarning, stacklevel=2) + return bool(self._context.schemes()) + + def iter_handlers(self): + """return iterator over handlers defined in policy. + + .. deprecated:: 1.6 + applications should use ``context.schemes(resolve=True))`` instead. + see :meth:`CryptContext.schemes`. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.iter_handlers()``, " + "use ``context.schemes(resolve=True)``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().iter_handlers()``, " + "create a CryptContext instance and " + "use ``context.schemes(resolve=True)``.", + DeprecationWarning, stacklevel=2) + return self._context.schemes(resolve=True, unconfigured=True) + + def schemes(self, resolve=False): + """return list of schemes defined in policy. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.schemes` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.schemes()``, " + "use ``context.schemes()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().schemes()``, " + "create a CryptContext instance and " + "use ``context.schemes()``.", + DeprecationWarning, stacklevel=2) + return list(self._context.schemes(resolve=resolve, unconfigured=True)) + + def get_handler(self, name=None, category=None, required=False): + """return handler as specified by name, or default handler. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.handler` instead, + though note that the ``required`` keyword has been removed, + and the new method will always act as if ``required=True``. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.get_handler()``, " + "use ``context.handler()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().get_handler()``, " + "create a CryptContext instance and " + "use ``context.handler()``.", + DeprecationWarning, stacklevel=2) + # CryptContext.handler() doesn't support required=False, + # so wrapping it in try/except + try: + return self._context.handler(name, category, unconfigured=True) + except KeyError: + if required: + raise + else: + return None + + def get_min_verify_time(self, category=None): + """get min_verify_time setting for policy. + + .. deprecated:: 1.6 + min_verify_time option will be removed entirely in passlib 1.8 + + .. versionchanged:: 1.7 + this method now always returns the value automatically + calculated by :meth:`CryptContext.min_verify_time`, + any value specified by policy is ignored. + """ + warn("get_min_verify_time() and min_verify_time option is deprecated and ignored, " + "and will be removed in Passlib 1.8", DeprecationWarning, + stacklevel=2) + return 0 + + def get_options(self, name, category=None): + """return dictionary of options specific to a given handler. + + .. deprecated:: 1.6 + this method has no direct replacement in the 1.6 api, as there + is not a clearly defined use-case. however, examining the output of + :meth:`CryptContext.to_dict` should serve as the closest alternative. + """ + # XXX: might make a public replacement, but need more study of the use cases. + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "``context.policy.get_options()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "``CryptPolicy().get_options()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + if hasattr(name, "name"): + name = name.name + return self._context._config._get_record_options_with_flag(name, category)[0] + + def handler_is_deprecated(self, name, category=None): + """check if handler has been deprecated by policy. + + .. deprecated:: 1.6 + this method has no direct replacement in the 1.6 api, as there + is not a clearly defined use-case. however, examining the output of + :meth:`CryptContext.to_dict` should serve as the closest alternative. + """ + # XXX: might make a public replacement, but need more study of the use cases. + if self._stub_policy: + warn(_preamble + + "``context.policy.handler_is_deprecated()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "``CryptPolicy().handler_is_deprecated()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + if hasattr(name, "name"): + name = name.name + return self._context.handler(name, category).deprecated + + #=================================================================== + # serialization + #=================================================================== + + def iter_config(self, ini=False, resolve=False): + """iterate over key/value pairs representing the policy object. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_dict` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.iter_config()``, " + "use ``context.to_dict().items()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().iter_config()``, " + "create a CryptContext instance and " + "use ``context.to_dict().items()``.", + DeprecationWarning, stacklevel=2) + # hacked code that renders keys & values in manner that approximates + # old behavior. context.to_dict() is much cleaner. + context = self._context + if ini: + def render_key(key): + return context._render_config_key(key).replace("__", ".") + def render_value(value): + if isinstance(value, (list,tuple)): + value = ", ".join(value) + return value + resolve = False + else: + render_key = context._render_config_key + render_value = lambda value: value + return ( + (render_key(key), render_value(value)) + for key, value in context._config.iter_config(resolve) + ) + + def to_dict(self, resolve=False): + """export policy object as dictionary of options. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_dict` instead. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.to_dict()``, " + "use ``context.to_dict()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_dict()``, " + "create a CryptContext instance and " + "use ``context.to_dict()``.", + DeprecationWarning, stacklevel=2) + return self._context.to_dict(resolve) + + def to_file(self, stream, section="passlib"): # pragma: no cover -- deprecated & unused + """export policy to file. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_string` instead, + and then write the output to a file as desired. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.to_file(stream)``, " + "use ``stream.write(context.to_string())``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_file(stream)``, " + "create a CryptContext instance and " + "use ``stream.write(context.to_string())``.", + DeprecationWarning, stacklevel=2) + out = self._context.to_string(section=section) + if PY2: + out = out.encode("utf-8") + stream.write(out) + + def to_string(self, section="passlib", encoding=None): + """export policy to file. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_string` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.to_string()``, " + "use ``context.to_string()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_string()``, " + "create a CryptContext instance and " + "use ``context.to_string()``.", + DeprecationWarning, stacklevel=2) + out = self._context.to_string(section=section) + if encoding: + out = out.encode(encoding) + return out + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# _CryptConfig helper class +#============================================================================= +class _CryptConfig(object): + """parses, validates, and stores CryptContext config + + this is a helper used internally by CryptContext to handle + parsing, validation, and serialization of its config options. + split out from the main class, but not made public since + that just complicates interface too much (c.f. CryptPolicy) + + :arg source: config as dict mapping ``(cat,scheme,option) -> value`` + """ + #=================================================================== + # instance attrs + #=================================================================== + + # triple-nested dict which maps scheme -> category -> key -> value, + # storing all hash-specific options + _scheme_options = None + + # double-nested dict which maps key -> category -> value + # storing all CryptContext options + _context_options = None + + # tuple of handler objects + handlers = None + + # tuple of scheme objects in same order as handlers + schemes = None + + # tuple of categories in alphabetical order (not including None) + categories = None + + # set of all context keywords used by active schemes + context_kwds = None + + # dict mapping category -> default scheme + _default_schemes = None + + # dict mapping (scheme, category) -> custom handler + _records = None + + # dict mapping category -> list of custom handler instances for that category, + # in order of schemes(). populated on demand by _get_record_list() + _record_lists = None + + #=================================================================== + # constructor + #=================================================================== + def __init__(self, source): + self._init_scheme_list(source.get((None,None,"schemes"))) + self._init_options(source) + self._init_default_schemes() + self._init_records() + + def _init_scheme_list(self, data): + """initialize .handlers and .schemes attributes""" + handlers = [] + schemes = [] + if isinstance(data, native_string_types): + data = splitcomma(data) + for elem in data or (): + # resolve elem -> handler & scheme + if hasattr(elem, "name"): + handler = elem + scheme = handler.name + _validate_handler_name(scheme) + elif isinstance(elem, native_string_types): + handler = get_crypt_handler(elem) + scheme = handler.name + else: + raise TypeError("scheme must be name or CryptHandler, " + "not %r" % type(elem)) + + # check scheme name isn't already in use + if scheme in schemes: + raise KeyError("multiple handlers with same name: %r" % + (scheme,)) + + # add to handler list + handlers.append(handler) + schemes.append(scheme) + + self.handlers = tuple(handlers) + self.schemes = tuple(schemes) + + #=================================================================== + # lowlevel options + #=================================================================== + + #--------------------------------------------------------------- + # init lowlevel option storage + #--------------------------------------------------------------- + def _init_options(self, source): + """load config dict into internal representation, + and init .categories attr + """ + # prepare dicts & locals + norm_scheme_option = self._norm_scheme_option + norm_context_option = self._norm_context_option + self._scheme_options = scheme_options = {} + self._context_options = context_options = {} + categories = set() + + # load source config into internal storage + for (cat, scheme, key), value in iteritems(source): + categories.add(cat) + explicit_scheme = scheme + if not cat and not scheme and key in _global_settings: + # going forward, not using "__all__" format. instead... + # whitelisting set of keys which should be passed to (all) schemes, + # rather than passed to the CryptContext itself + scheme = "all" + if scheme: + # normalize scheme option + key, value = norm_scheme_option(key, value) + + # e.g. things like "min_rounds" should never be set cross-scheme + # this will be fatal under 2.0. + if scheme == "all" and key not in _global_settings: + warn("The '%s' option should be configured per-algorithm, and not set " + "globally in the context; This will be an error in Passlib 2.0" % + (key,), PasslibConfigWarning) + + # this scheme is going away in 2.0; + # but most keys deserve an extra warning since it impacts security. + if explicit_scheme == "all": + warn("The 'all' scheme is deprecated as of Passlib 1.7, " + "and will be removed in Passlib 2.0; Please configure " + "options on a per-algorithm basis.", DeprecationWarning) + + # store in scheme_options + # map structure: scheme_options[scheme][category][key] = value + try: + category_map = scheme_options[scheme] + except KeyError: + scheme_options[scheme] = {cat: {key: value}} + else: + try: + option_map = category_map[cat] + except KeyError: + category_map[cat] = {key: value} + else: + option_map[key] = value + else: + # normalize context option + if cat and key == "schemes": + raise KeyError("'schemes' context option is not allowed " + "per category") + key, value = norm_context_option(cat, key, value) + if key == "min_verify_time": # ignored in 1.7, to be removed in 1.8 + continue + + # store in context_options + # map structure: context_options[key][category] = value + try: + category_map = context_options[key] + except KeyError: + context_options[key] = {cat: value} + else: + category_map[cat] = value + + # store list of configured categories + categories.discard(None) + self.categories = tuple(sorted(categories)) + + def _norm_scheme_option(self, key, value): + # check for invalid options + if key in _forbidden_scheme_options: + raise KeyError("%r option not allowed in CryptContext " + "configuration" % (key,)) + # coerce strings for certain fields (e.g. min_rounds uses ints) + if isinstance(value, native_string_types): + func = _coerce_scheme_options.get(key) + if func: + value = func(value) + return key, value + + def _norm_context_option(self, cat, key, value): + schemes = self.schemes + if key == "default": + if hasattr(value, "name"): + value = value.name + elif not isinstance(value, native_string_types): + raise ExpectedTypeError(value, "str", "default") + if schemes and value not in schemes: + raise KeyError("default scheme not found in policy") + elif key == "deprecated": + if isinstance(value, native_string_types): + value = splitcomma(value) + elif not isinstance(value, (list,tuple)): + raise ExpectedTypeError(value, "str or seq", "deprecated") + if 'auto' in value: + # XXX: have any statements been made about when this is default? + # should do it in 1.8 at latest. + if len(value) > 1: + raise ValueError("cannot list other schemes if " + "``deprecated=['auto']`` is used") + elif schemes: + # make sure list of deprecated schemes is subset of configured schemes + for scheme in value: + if not isinstance(scheme, native_string_types): + raise ExpectedTypeError(value, "str", "deprecated element") + if scheme not in schemes: + raise KeyError("deprecated scheme not found " + "in policy: %r" % (scheme,)) + elif key == "min_verify_time": + warn("'min_verify_time' was deprecated in Passlib 1.6, is " + "ignored in 1.7, and will be removed in 1.8", + DeprecationWarning) + elif key == "harden_verify": + warn("'harden_verify' is deprecated & ignored as of Passlib 1.7.1, " + " and will be removed in 1.8", + DeprecationWarning) + elif key != "schemes": + raise KeyError("unknown CryptContext keyword: %r" % (key,)) + return key, value + + #--------------------------------------------------------------- + # reading context options + #--------------------------------------------------------------- + def get_context_optionmap(self, key, _default={}): + """return dict mapping category->value for specific context option. + + .. warning:: treat return value as readonly! + """ + return self._context_options.get(key, _default) + + def get_context_option_with_flag(self, category, key): + """return value of specific option, handling category inheritance. + also returns flag indicating whether value is category-specific. + """ + try: + category_map = self._context_options[key] + except KeyError: + return None, False + value = category_map.get(None) + if category: + try: + alt = category_map[category] + except KeyError: + pass + else: + if value is None or alt != value: + return alt, True + return value, False + + #--------------------------------------------------------------- + # reading scheme options + #--------------------------------------------------------------- + def _get_scheme_optionmap(self, scheme, category, default={}): + """return all options for (scheme,category) combination + + .. warning:: treat return value as readonly! + """ + try: + return self._scheme_options[scheme][category] + except KeyError: + return default + + def get_base_handler(self, scheme): + return self.handlers[self.schemes.index(scheme)] + + @staticmethod + def expand_settings(handler): + setting_kwds = handler.setting_kwds + if 'rounds' in handler.setting_kwds: + # XXX: historically this extras won't be listed in setting_kwds + setting_kwds += uh.HasRounds.using_rounds_kwds + return setting_kwds + + # NOTE: this is only used by _get_record_options_with_flag()... + def get_scheme_options_with_flag(self, scheme, category): + """return composite dict of all options set for scheme. + includes options inherited from 'all' and from default category. + result can be modified. + returns (kwds, has_cat_specific_options) + """ + # start out with copy of global options + get_optionmap = self._get_scheme_optionmap + kwds = get_optionmap("all", None).copy() + has_cat_options = False + + # add in category-specific global options + if category: + defkwds = kwds.copy() # <-- used to detect category-specific options + kwds.update(get_optionmap("all", category)) + + # filter out global settings not supported by handler + allowed_settings = self.expand_settings(self.get_base_handler(scheme)) + for key in set(kwds).difference(allowed_settings): + kwds.pop(key) + if category: + for key in set(defkwds).difference(allowed_settings): + defkwds.pop(key) + + # add in default options for scheme + other = get_optionmap(scheme, None) + kwds.update(other) + + # load category-specific options for scheme + if category: + defkwds.update(other) + kwds.update(get_optionmap(scheme, category)) + + # compare default category options to see if there's anything + # category-specific + if kwds != defkwds: + has_cat_options = True + + return kwds, has_cat_options + + #=================================================================== + # deprecated & default schemes + #=================================================================== + def _init_default_schemes(self): + """initialize maps containing default scheme for each category. + + have to do this after _init_options(), since the default scheme + is affected by the list of deprecated schemes. + """ + # init maps & locals + get_optionmap = self.get_context_optionmap + default_map = self._default_schemes = get_optionmap("default").copy() + dep_map = get_optionmap("deprecated") + schemes = self.schemes + if not schemes: + return + + # figure out default scheme + deps = dep_map.get(None) or () + default = default_map.get(None) + if not default: + for scheme in schemes: + if scheme not in deps: + default_map[None] = scheme + break + else: + raise ValueError("must have at least one non-deprecated scheme") + elif default in deps: + raise ValueError("default scheme cannot be deprecated") + + # figure out per-category default schemes, + for cat in self.categories: + cdeps = dep_map.get(cat, deps) + cdefault = default_map.get(cat, default) + if not cdefault: + for scheme in schemes: + if scheme not in cdeps: + default_map[cat] = scheme + break + else: + raise ValueError("must have at least one non-deprecated " + "scheme for %r category" % cat) + elif cdefault in cdeps: + raise ValueError("default scheme for %r category " + "cannot be deprecated" % cat) + + def default_scheme(self, category): + """return default scheme for specific category""" + defaults = self._default_schemes + try: + return defaults[category] + except KeyError: + pass + if not self.schemes: + raise KeyError("no hash schemes configured for this " + "CryptContext instance") + return defaults[None] + + def is_deprecated_with_flag(self, scheme, category): + """is scheme deprecated under particular category?""" + depmap = self.get_context_optionmap("deprecated") + def test(cat): + source = depmap.get(cat, depmap.get(None)) + if source is None: + return None + elif 'auto' in source: + return scheme != self.default_scheme(cat) + else: + return scheme in source + value = test(None) or False + if category: + alt = test(category) + if alt is not None and value != alt: + return alt, True + return value, False + + #=================================================================== + # CryptRecord objects + #=================================================================== + def _init_records(self): + # NOTE: this step handles final validation of settings, + # checking for violations against handler's internal invariants. + # this is why we create all the records now, + # so CryptContext throws error immediately rather than later. + self._record_lists = {} + records = self._records = {} + all_context_kwds = self.context_kwds = set() + get_options = self._get_record_options_with_flag + categories = (None,) + self.categories + for handler in self.handlers: + scheme = handler.name + all_context_kwds.update(handler.context_kwds) + for cat in categories: + kwds, has_cat_options = get_options(scheme, cat) + if cat is None or has_cat_options: + records[scheme, cat] = self._create_record(handler, cat, **kwds) + # NOTE: if handler has no category-specific opts, get_record() + # will automatically use the default category's record. + # NOTE: default records for specific category stored under the + # key (None,category); these are populated on-demand by get_record(). + + @staticmethod + def _create_record(handler, category=None, deprecated=False, **settings): + # create custom handler if needed. + try: + # XXX: relaxed=True is mostly here to retain backwards-compat behavior. + # could make this optional flag in future. + subcls = handler.using(relaxed=True, **settings) + except TypeError as err: + m = re.match(r".* unexpected keyword argument '(.*)'$", str(err)) + if m and m.group(1) in settings: + # translate into KeyError, for backwards compat. + # XXX: push this down to GenericHandler.using() implementation? + key = m.group(1) + raise KeyError("keyword not supported by %s handler: %r" % + (handler.name, key)) + raise + + # using private attrs to store some extra metadata in custom handler + assert subcls is not handler, "expected unique variant of handler" + ##subcls._Context__category = category + subcls._Context__orig_handler = handler + subcls.deprecated = deprecated # attr reserved for this purpose + return subcls + + def _get_record_options_with_flag(self, scheme, category): + """return composite dict of options for given scheme + category. + + this is currently a private method, though some variant + of its output may eventually be made public. + + given a scheme & category, it returns two things: + a set of all the keyword options to pass to :meth:`_create_record`, + and a bool flag indicating whether any of these options + were specific to the named category. if this flag is false, + the options are identical to the options for the default category. + + the options dict includes all the scheme-specific settings, + as well as optional *deprecated* keyword. + """ + # get scheme options + kwds, has_cat_options = self.get_scheme_options_with_flag(scheme, category) + + # throw in deprecated flag + value, not_inherited = self.is_deprecated_with_flag(scheme, category) + if value: + kwds['deprecated'] = True + if not_inherited: + has_cat_options = True + + return kwds, has_cat_options + + def get_record(self, scheme, category): + """return record for specific scheme & category (cached)""" + # NOTE: this is part of the critical path shared by + # all of CryptContext's PasswordHash methods, + # hence all the caching and error checking. + + # quick lookup in cache + try: + return self._records[scheme, category] + except KeyError: + pass + + # type check + if category is not None and not isinstance(category, native_string_types): + if PY2 and isinstance(category, unicode): + # for compatibility with unicode-centric py2 apps + return self.get_record(scheme, category.encode("utf-8")) + raise ExpectedTypeError(category, "str or None", "category") + if scheme is not None and not isinstance(scheme, native_string_types): + raise ExpectedTypeError(scheme, "str or None", "scheme") + + # if scheme=None, + # use record for category's default scheme, and cache result. + if not scheme: + default = self.default_scheme(category) + assert default + record = self._records[None, category] = self.get_record(default, + category) + return record + + # if no record for (scheme, category), + # use record for (scheme, None), and cache result. + if category: + try: + cache = self._records + record = cache[scheme, category] = cache[scheme, None] + return record + except KeyError: + pass + + # scheme not found in configuration for default category + raise KeyError("crypt algorithm not found in policy: %r" % (scheme,)) + + def _get_record_list(self, category=None): + """return list of records for category (cached) + + this is an internal helper used only by identify_record() + """ + # type check of category - handled by _get_record() + # quick lookup in cache + try: + return self._record_lists[category] + except KeyError: + pass + # cache miss - build list from scratch + value = self._record_lists[category] = [ + self.get_record(scheme, category) + for scheme in self.schemes + ] + return value + + def identify_record(self, hash, category, required=True): + """internal helper to identify appropriate custom handler for hash""" + # NOTE: this is part of the critical path shared by + # all of CryptContext's PasswordHash methods, + # hence all the caching and error checking. + # FIXME: if multiple hashes could match (e.g. lmhash vs nthash) + # this will only return first match. might want to do something + # about this in future, but for now only hashes with + # unique identifiers will work properly in a CryptContext. + # XXX: if all handlers have a unique prefix (e.g. all are MCF / LDAP), + # could use dict-lookup to speed up this search. + if not isinstance(hash, unicode_or_bytes_types): + raise ExpectedStringError(hash, "hash") + # type check of category - handled by _get_record_list() + for record in self._get_record_list(category): + if record.identify(hash): + return record + if not required: + return None + elif not self.schemes: + raise KeyError("no crypt algorithms supported") + else: + raise exc.UnknownHashError("hash could not be identified") + + @memoized_property + def disabled_record(self): + for record in self._get_record_list(None): + if record.is_disabled: + return record + raise RuntimeError("no disabled hasher present " + "(perhaps add 'unix_disabled' to list of schemes?)") + + #=================================================================== + # serialization + #=================================================================== + def iter_config(self, resolve=False): + """regenerate original config. + + this is an iterator which yields ``(cat,scheme,option),value`` items, + in the order they generally appear inside an INI file. + if interpreted as a dictionary, it should match the original + keywords passed to the CryptContext (aside from any canonization). + + it's mainly used as the internal backend for most of the public + serialization methods. + """ + # grab various bits of data + scheme_options = self._scheme_options + context_options = self._context_options + scheme_keys = sorted(scheme_options) + context_keys = sorted(context_options) + + # write loaded schemes (may differ from 'schemes' local var) + if 'schemes' in context_keys: + context_keys.remove("schemes") + value = self.handlers if resolve else self.schemes + if value: + yield (None, None, "schemes"), list(value) + + # then run through config for each user category + for cat in (None,) + self.categories: + + # write context options + for key in context_keys: + try: + value = context_options[key][cat] + except KeyError: + pass + else: + if isinstance(value, list): + value = list(value) + yield (cat, None, key), value + + # write per-scheme options for all schemes. + for scheme in scheme_keys: + try: + kwds = scheme_options[scheme][cat] + except KeyError: + pass + else: + for key in sorted(kwds): + yield (cat, scheme, key), kwds[key] + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# main CryptContext class +#============================================================================= +class CryptContext(object): + """Helper for hashing & verifying passwords using multiple algorithms. + + Instances of this class allow applications to choose a specific + set of hash algorithms which they wish to support, set limits and defaults + for the rounds and salt sizes those algorithms should use, flag + which algorithms should be deprecated, and automatically handle + migrating users to stronger hashes when they log in. + + Basic usage:: + + >>> ctx = CryptContext(schemes=[...]) + + See the Passlib online documentation for details and full documentation. + """ + # FIXME: altering the configuration of this object isn't threadsafe, + # but is generally only done during application init, so not a major + # issue (just yet). + + # XXX: would like some way to restrict the categories that are allowed, + # to restrict what the app OR the config can use. + + # XXX: add wrap/unwrap callback hooks so app can mutate hash format? + + # XXX: add method for detecting and warning user about schemes + # which don't have any good distinguishing marks? + # or greedy ones (unix_disabled, plaintext) which are not listed at the end? + + #=================================================================== + # instance attrs + #=================================================================== + + # _CryptConfig instance holding current parsed config + _config = None + + # copy of _config methods, stored in CryptContext instance for speed. + _get_record = None + _identify_record = None + + #=================================================================== + # secondary constructors + #=================================================================== + @classmethod + def _norm_source(cls, source): + """internal helper - accepts string, dict, or context""" + if isinstance(source, dict): + return cls(**source) + elif isinstance(source, cls): + return source + else: + self = cls() + self.load(source) + return self + + @classmethod + def from_string(cls, source, section="passlib", encoding="utf-8"): + """create new CryptContext instance from an INI-formatted string. + + :type source: unicode or bytes + :arg source: + string containing INI-formatted content. + + :type section: str + :param section: + option name of section to read from, defaults to ``"passlib"``. + + :type encoding: str + :arg encoding: + optional encoding used when source is bytes, defaults to ``"utf-8"``. + + :returns: + new :class:`CryptContext` instance, configured based on the + parameters in the *source* string. + + Usage example:: + + >>> from passlib.context import CryptContext + >>> context = CryptContext.from_string(''' + ... [passlib] + ... schemes = sha256_crypt, des_crypt + ... sha256_crypt__default_rounds = 30000 + ... ''') + + .. versionadded:: 1.6 + + .. seealso:: :meth:`to_string`, the inverse of this constructor. + """ + if not isinstance(source, unicode_or_bytes_types): + raise ExpectedTypeError(source, "unicode or bytes", "source") + self = cls(_autoload=False) + self.load(source, section=section, encoding=encoding) + return self + + @classmethod + def from_path(cls, path, section="passlib", encoding="utf-8"): + """create new CryptContext instance from an INI-formatted file. + + this functions exactly the same as :meth:`from_string`, + except that it loads from a local file. + + :type path: str + :arg path: + path to local file containing INI-formatted config. + + :type section: str + :param section: + option name of section to read from, defaults to ``"passlib"``. + + :type encoding: str + :arg encoding: + encoding used to load file, defaults to ``"utf-8"``. + + :returns: + new CryptContext instance, configured based on the parameters + stored in the file *path*. + + .. versionadded:: 1.6 + + .. seealso:: :meth:`from_string` for an equivalent usage example. + """ + self = cls(_autoload=False) + self.load_path(path, section=section, encoding=encoding) + return self + + def copy(self, **kwds): + """Return copy of existing CryptContext instance. + + This function returns a new CryptContext instance whose configuration + is exactly the same as the original, with the exception that any keywords + passed in will take precedence over the original settings. + As an example:: + + >>> from passlib.context import CryptContext + + >>> # given an existing context... + >>> ctx1 = CryptContext(["sha256_crypt", "md5_crypt"]) + + >>> # copy can be used to make a clone, and update + >>> # some of the settings at the same time... + >>> ctx2 = custom_app_context.copy(default="md5_crypt") + + >>> # and the original will be unaffected by the change + >>> ctx1.default_scheme() + "sha256_crypt" + >>> ctx2.default_scheme() + "md5_crypt" + + .. versionadded:: 1.6 + This method was previously named :meth:`!replace`. That alias + has been deprecated, and will be removed in Passlib 1.8. + + .. seealso:: :meth:`update` + """ + # XXX: it would be faster to store ref to self._config, + # but don't want to share config objects til sure + # can rely on them being immutable. + other = CryptContext(_autoload=False) + other.load(self) + if kwds: + other.load(kwds, update=True) + return other + + def using(self, **kwds): + """ + alias for :meth:`copy`, to match PasswordHash.using() + """ + return self.copy(**kwds) + + def replace(self, **kwds): + """deprecated alias of :meth:`copy`""" + warn("CryptContext().replace() has been deprecated in Passlib 1.6, " + "and will be removed in Passlib 1.8, " + "it has been renamed to CryptContext().copy()", + DeprecationWarning, stacklevel=2) + return self.copy(**kwds) + + #=================================================================== + # init + #=================================================================== + def __init__(self, schemes=None, + # keyword only... + policy=_UNSET, # <-- deprecated + _autoload=True, **kwds): + # XXX: add ability to make flag certain contexts as immutable, + # e.g. the builtin passlib ones? + # XXX: add a name or import path for the contexts, to help out repr? + if schemes is not None: + kwds['schemes'] = schemes + if policy is not _UNSET: + warn("The CryptContext ``policy`` keyword has been deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8; please use " + "``CryptContext.from_string()` or " + "``CryptContext.from_path()`` instead.", + DeprecationWarning) + if policy is None: + self.load(kwds) + elif isinstance(policy, CryptPolicy): + self.load(policy._context) + self.update(kwds) + else: + raise TypeError("policy must be a CryptPolicy instance") + elif _autoload: + self.load(kwds) + else: + assert not kwds, "_autoload=False and kwds are mutually exclusive" + + # XXX: would this be useful? + ##def __str__(self): + ## if PY3: + ## return self.to_string() + ## else: + ## return self.to_string().encode("utf-8") + + def __repr__(self): + return "" % id(self) + + #=================================================================== + # deprecated policy object + #=================================================================== + def _get_policy(self): + # The CryptPolicy class has been deprecated, so to support any + # legacy accesses, we create a stub policy object so .policy attr + # will continue to work. + # + # the code waits until app accesses a specific policy object attribute + # before issuing deprecation warning, so developer gets method-specific + # suggestion for how to upgrade. + + # NOTE: making a copy of the context so the policy acts like a snapshot, + # to retain the pre-1.6 behavior. + return CryptPolicy(_internal_context=self.copy(), _stub_policy=True) + + def _set_policy(self, policy): + warn("The CryptPolicy class and the ``context.policy`` attribute have " + "been deprecated as of Passlib 1.6, and will be removed in " + "Passlib 1.8; please use the ``context.load()`` and " + "``context.update()`` methods instead.", + DeprecationWarning, stacklevel=2) + if isinstance(policy, CryptPolicy): + self.load(policy._context) + else: + raise TypeError("expected CryptPolicy instance") + + policy = property(_get_policy, _set_policy, + doc="[deprecated] returns CryptPolicy instance " + "tied to this CryptContext") + + #=================================================================== + # loading / updating configuration + #=================================================================== + @staticmethod + def _parse_ini_stream(stream, section, filename): + """helper read INI from stream, extract passlib section as dict""" + # NOTE: this expects a unicode stream under py3, + # and a utf-8 bytes stream under py2, + # allowing the resulting dict to always use native strings. + p = SafeConfigParser() + if PY3: + # python 3.2 deprecated readfp in favor of read_file + p.read_file(stream, filename) + else: + p.readfp(stream, filename) + # XXX: could change load() to accept list of items, + # and skip intermediate dict creation + return dict(p.items(section)) + + def load_path(self, path, update=False, section="passlib", encoding="utf-8"): + """Load new configuration into CryptContext from a local file. + + This function is a wrapper for :meth:`load` which + loads a configuration string from the local file *path*, + instead of an in-memory source. Its behavior and options + are otherwise identical to :meth:`!load` when provided with + an INI-formatted string. + + .. versionadded:: 1.6 + """ + def helper(stream): + kwds = self._parse_ini_stream(stream, section, path) + return self.load(kwds, update=update) + if PY3: + # decode to unicode, which load() expected under py3 + with open(path, "rt", encoding=encoding) as stream: + return helper(stream) + elif encoding in ["utf-8", "ascii"]: + # keep as utf-8 bytes, which load() expects under py2 + with open(path, "rb") as stream: + return helper(stream) + else: + # transcode to utf-8 bytes + with open(path, "rb") as fh: + tmp = fh.read().decode(encoding).encode("utf-8") + return helper(BytesIO(tmp)) + + def load(self, source, update=False, section="passlib", encoding="utf-8"): + """Load new configuration into CryptContext, replacing existing config. + + :arg source: + source of new configuration to load. + this value can be a number of different types: + + * a :class:`!dict` object, or compatible Mapping + + the key/value pairs will be interpreted the same + keywords for the :class:`CryptContext` class constructor. + + * a :class:`!unicode` or :class:`!bytes` string + + this will be interpreted as an INI-formatted file, + and appropriate key/value pairs will be loaded from + the specified *section*. + + * another :class:`!CryptContext` object. + + this will export a snapshot of its configuration + using :meth:`to_dict`. + + :type update: bool + :param update: + By default, :meth:`load` will replace the existing configuration + entirely. If ``update=True``, it will preserve any existing + configuration options that are not overridden by the new source, + much like the :meth:`update` method. + + :type section: str + :param section: + When parsing an INI-formatted string, :meth:`load` will look for + a section named ``"passlib"``. This option allows an alternate + section name to be used. Ignored when loading from a dictionary. + + :type encoding: str + :param encoding: + Encoding to use when **source** is bytes. + Defaults to ``"utf-8"``. Ignored when loading from a dictionary. + + .. deprecated:: 1.8 + + This keyword, and support for bytes input, will be dropped in Passlib 2.0 + + :raises TypeError: + * If the source cannot be identified. + * If an unknown / malformed keyword is encountered. + + :raises ValueError: + If an invalid keyword value is encountered. + + .. note:: + + If an error occurs during a :meth:`!load` call, the :class:`!CryptContext` + instance will be restored to the configuration it was in before + the :meth:`!load` call was made; this is to ensure it is + *never* left in an inconsistent state due to a load error. + + .. versionadded:: 1.6 + """ + #----------------------------------------------------------- + # autodetect source type, convert to dict + #----------------------------------------------------------- + parse_keys = True + if isinstance(source, unicode_or_bytes_types): + if PY3: + source = to_unicode(source, encoding, param="source") + else: + source = to_bytes(source, "utf-8", source_encoding=encoding, + param="source") + source = self._parse_ini_stream(NativeStringIO(source), section, + "") + elif isinstance(source, CryptContext): + # extract dict directly from config, so it can be merged later + source = dict(source._config.iter_config(resolve=True)) + parse_keys = False + elif not hasattr(source, "items"): + # mappings are left alone, otherwise throw an error. + raise ExpectedTypeError(source, "string or dict", "source") + + # XXX: add support for other iterable types, e.g. sequence of pairs? + + #----------------------------------------------------------- + # parse dict keys into (category, scheme, option) format, + # and merge with existing configuration if needed. + #----------------------------------------------------------- + if parse_keys: + parse = self._parse_config_key + source = dict((parse(key), value) + for key, value in iteritems(source)) + if update and self._config is not None: + # if updating, do nothing if source is empty, + if not source: + return + # otherwise overlay source on top of existing config + tmp = source + source = dict(self._config.iter_config(resolve=True)) + source.update(tmp) + + #----------------------------------------------------------- + # compile into _CryptConfig instance, and update state + #----------------------------------------------------------- + config = _CryptConfig(source) + self._config = config + self._reset_dummy_verify() + self._get_record = config.get_record + self._identify_record = config.identify_record + if config.context_kwds: + # (re-)enable method for this instance (in case ELSE clause below ran last load). + self.__dict__.pop("_strip_unused_context_kwds", None) + else: + # disable method for this instance, it's not needed. + self._strip_unused_context_kwds = None + + @staticmethod + def _parse_config_key(ckey): + """helper used to parse ``cat__scheme__option`` keys into a tuple""" + # split string into 1-3 parts + assert isinstance(ckey, native_string_types) + parts = ckey.replace(".", "__").split("__") + count = len(parts) + if count == 1: + cat, scheme, key = None, None, parts[0] + elif count == 2: + cat = None + scheme, key = parts + elif count == 3: + cat, scheme, key = parts + else: + raise TypeError("keys must have less than 3 separators: %r" % + (ckey,)) + # validate & normalize the parts + if cat == "default": + cat = None + elif not cat and cat is not None: + raise TypeError("empty category: %r" % ckey) + if scheme == "context": + scheme = None + elif not scheme and scheme is not None: + raise TypeError("empty scheme: %r" % ckey) + if not key: + raise TypeError("empty option: %r" % ckey) + return cat, scheme, key + + def update(self, *args, **kwds): + """Helper for quickly changing configuration. + + This acts much like the :meth:`!dict.update` method: + it updates the context's configuration, + replacing the original value(s) for the specified keys, + and preserving the rest. + It accepts any :ref:`keyword ` + accepted by the :class:`!CryptContext` constructor. + + .. versionadded:: 1.6 + + .. seealso:: :meth:`copy` + """ + if args: + if len(args) > 1: + raise TypeError("expected at most one positional argument") + if kwds: + raise TypeError("positional arg and keywords mutually exclusive") + self.load(args[0], update=True) + elif kwds: + self.load(kwds, update=True) + + # XXX: make this public? even just as flag to load? + # FIXME: this function suffered some bitrot in 1.6.1, + # will need to be updated before works again. + ##def _simplify(self): + ## "helper to remove redundant/unused options" + ## # don't do anything if no schemes are defined + ## if not self._schemes: + ## return + ## + ## def strip_items(target, filter): + ## keys = [key for key,value in iteritems(target) + ## if filter(key,value)] + ## for key in keys: + ## del target[key] + ## + ## # remove redundant default. + ## defaults = self._default_schemes + ## if defaults.get(None) == self._schemes[0]: + ## del defaults[None] + ## + ## # remove options for unused schemes. + ## scheme_options = self._scheme_options + ## schemes = self._schemes + ("all",) + ## strip_items(scheme_options, lambda k,v: k not in schemes) + ## + ## # remove rendundant cat defaults. + ## cur = self.default_scheme() + ## strip_items(defaults, lambda k,v: k and v==cur) + ## + ## # remove redundant category deprecations. + ## # TODO: this should work w/ 'auto', but needs closer inspection + ## deprecated = self._deprecated_schemes + ## cur = self._deprecated_schemes.get(None) + ## strip_items(deprecated, lambda k,v: k and v==cur) + ## + ## # remove redundant category options. + ## for scheme, config in iteritems(scheme_options): + ## if None in config: + ## cur = config[None] + ## strip_items(config, lambda k,v: k and v==cur) + ## + ## # XXX: anything else? + + #=================================================================== + # reading configuration + #=================================================================== + def schemes(self, resolve=False, category=None, unconfigured=False): + """return schemes loaded into this CryptContext instance. + + :type resolve: bool + :arg resolve: + if ``True``, will return a tuple of :class:`~passlib.ifc.PasswordHash` + objects instead of their names. + + :returns: + returns tuple of the schemes configured for this context + via the *schemes* option. + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.schemes()`` + + .. seealso:: the :ref:`schemes ` option for usage example. + """ + # XXX: should resolv return records rather than handlers? + # or deprecate resolve keyword completely? + # offering up a .hashers Mapping in v1.8 would be great. + # NOTE: supporting 'category' and 'unconfigured' kwds as of 1.7 + # just to pass through to .handler(), but not documenting them... + # may not need to put them to use. + schemes = self._config.schemes + if resolve: + return tuple(self.handler(scheme, category, unconfigured=unconfigured) + for scheme in schemes) + else: + return schemes + + def default_scheme(self, category=None, resolve=False, unconfigured=False): + """return name of scheme that :meth:`hash` will use by default. + + :type resolve: bool + :arg resolve: + if ``True``, will return a :class:`~passlib.ifc.PasswordHash` + object instead of the name. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will return the catgory-specific default scheme instead. + + :returns: + name of the default scheme. + + .. seealso:: the :ref:`default ` option for usage example. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.7 + + This now returns a hasher configured with any CryptContext-specific + options (custom rounds settings, etc). Previously this returned + the base hasher from :mod:`passlib.hash`. + """ + # XXX: deprecate this in favor of .handler() or whatever it's replaced with? + # NOTE: supporting 'unconfigured' kwds as of 1.7 + # just to pass through to .handler(), but not documenting them... + # may not need to put them to use. + hasher = self.handler(None, category, unconfigured=unconfigured) + return hasher if resolve else hasher.name + + # XXX: need to decide if exposing this would be useful in any way + ##def categories(self): + ## """return user-categories with algorithm-specific options in this CryptContext. + ## + ## this will always return a tuple. + ## if no categories besides the default category have been configured, + ## the tuple will be empty. + ## """ + ## return self._config.categories + + # XXX: need to decide if exposing this would be useful to applications + # in any meaningful way that isn't already served by to_dict() + ##def options(self, scheme, category=None): + ## kwds, percat = self._config.get_options(scheme, category) + ## return kwds + + def handler(self, scheme=None, category=None, unconfigured=False): + """helper to resolve name of scheme -> :class:`~passlib.ifc.PasswordHash` object used by scheme. + + :arg scheme: + This should identify the scheme to lookup. + If omitted or set to ``None``, this will return the handler + for the default scheme. + + :arg category: + If a user category is specified, and no scheme is provided, + it will use the default for that category. + Otherwise this parameter is ignored. + + :param unconfigured: + + By default, this returns a handler object whose .hash() + and .needs_update() methods will honor the configured + provided by CryptContext. See ``unconfigured=True`` + to get the underlying handler from before any context-specific + configuration was applied. + + :raises KeyError: + If the scheme does not exist OR is not being used within this context. + + :returns: + :class:`~passlib.ifc.PasswordHash` object used to implement + the named scheme within this context (this will usually + be one of the objects from :mod:`passlib.hash`) + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.get_handler()`` + + .. versionchanged:: 1.7 + + This now returns a hasher configured with any CryptContext-specific + options (custom rounds settings, etc). Previously this returned + the base hasher from :mod:`passlib.hash`. + """ + try: + hasher = self._get_record(scheme, category) + if unconfigured: + return hasher._Context__orig_handler + else: + return hasher + except KeyError: + pass + if self._config.handlers: + raise KeyError("crypt algorithm not found in this " + "CryptContext instance: %r" % (scheme,)) + else: + raise KeyError("no crypt algorithms loaded in this " + "CryptContext instance") + + def _get_unregistered_handlers(self): + """check if any handlers in this context aren't in the global registry""" + return tuple(handler for handler in self._config.handlers + if not _is_handler_registered(handler)) + + @property + def context_kwds(self): + """ + return :class:`!set` containing union of all :ref:`contextual keywords ` + supported by the handlers in this context. + + .. versionadded:: 1.6.6 + """ + return self._config.context_kwds + + #=================================================================== + # exporting config + #=================================================================== + @staticmethod + def _render_config_key(key): + """convert 3-part config key to single string""" + cat, scheme, option = key + if cat: + return "%s__%s__%s" % (cat, scheme or "context", option) + elif scheme: + return "%s__%s" % (scheme, option) + else: + return option + + @staticmethod + def _render_ini_value(key, value): + """render value to string suitable for INI file""" + # convert lists to comma separated lists + # (mainly 'schemes' & 'deprecated') + if isinstance(value, (list,tuple)): + value = ", ".join(value) + + # convert numbers to strings + elif isinstance(value, num_types): + if isinstance(value, float) and key[2] == "vary_rounds": + value = ("%.2f" % value).rstrip("0") if value else "0" + else: + value = str(value) + + assert isinstance(value, native_string_types), \ + "expected string for key: %r %r" % (key, value) + + # escape any percent signs. + return value.replace("%", "%%") + + def to_dict(self, resolve=False): + """Return current configuration as a dictionary. + + :type resolve: bool + :arg resolve: + if ``True``, the ``schemes`` key will contain a list of + a :class:`~passlib.ifc.PasswordHash` objects instead of just + their names. + + This method dumps the current configuration of the CryptContext + instance. The key/value pairs should be in the format accepted + by the :class:`!CryptContext` class constructor, in fact + ``CryptContext(**myctx.to_dict())`` will create an exact copy of ``myctx``. + As an example:: + + >>> # you can dump the configuration of any crypt context... + >>> from passlib.apps import ldap_nocrypt_context + >>> ldap_nocrypt_context.to_dict() + {'schemes': ['ldap_salted_sha1', + 'ldap_salted_md5', + 'ldap_sha1', + 'ldap_md5', + 'ldap_plaintext']} + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.to_dict()`` + + .. seealso:: the :ref:`context-serialization-example` example in the tutorial. + """ + # XXX: should resolve default to conditional behavior + # based on presence of unregistered handlers? + render_key = self._render_config_key + return dict((render_key(key), value) + for key, value in self._config.iter_config(resolve)) + + def _write_to_parser(self, parser, section): + """helper to write to ConfigParser instance""" + render_key = self._render_config_key + render_value = self._render_ini_value + parser.add_section(section) + for k,v in self._config.iter_config(): + v = render_value(k, v) + k = render_key(k) + parser.set(section, k, v) + + def to_string(self, section="passlib"): + """serialize to INI format and return as unicode string. + + :param section: + name of INI section to output, defaults to ``"passlib"``. + + :returns: + CryptContext configuration, serialized to a INI unicode string. + + This function acts exactly like :meth:`to_dict`, except that it + serializes all the contents into a single human-readable string, + which can be hand edited, and/or stored in a file. The + output of this method is accepted by :meth:`from_string`, + :meth:`from_path`, and :meth:`load`. As an example:: + + >>> # you can dump the configuration of any crypt context... + >>> from passlib.apps import ldap_nocrypt_context + >>> print ldap_nocrypt_context.to_string() + [passlib] + schemes = ldap_salted_sha1, ldap_salted_md5, ldap_sha1, ldap_md5, ldap_plaintext + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.to_string()`` + + .. seealso:: the :ref:`context-serialization-example` example in the tutorial. + """ + parser = SafeConfigParser() + self._write_to_parser(parser, section) + buf = NativeStringIO() + parser.write(buf) + unregistered = self._get_unregistered_handlers() + if unregistered: + buf.write(( + "# NOTE: the %s handler(s) are not registered with Passlib,\n" + "# this string may not correctly reproduce the current configuration.\n\n" + ) % ", ".join(repr(handler.name) for handler in unregistered)) + out = buf.getvalue() + if not PY3: + out = out.decode("utf-8") + return out + + # XXX: is this useful enough to enable? + ##def write_to_path(self, path, section="passlib", update=False): + ## "write to INI file" + ## parser = ConfigParser() + ## if update and os.path.exists(path): + ## if not parser.read([path]): + ## raise EnvironmentError("failed to read existing file") + ## parser.remove_section(section) + ## self._write_to_parser(parser, section) + ## fh = file(path, "w") + ## parser.write(fh) + ## fh.close() + + #=================================================================== + # verify() hardening + # NOTE: this entire feature has been disabled. + # all contents of this section are NOOPs as of 1.7.1, + # and will be removed in 1.8. + #=================================================================== + + mvt_estimate_max_samples = 20 + mvt_estimate_min_samples = 10 + mvt_estimate_max_time = 2 + mvt_estimate_resolution = 0.01 + harden_verify = None + min_verify_time = 0 + + def reset_min_verify_time(self): + self._reset_dummy_verify() + + #=================================================================== + # password hash api + #=================================================================== + + # NOTE: all the following methods do is look up the appropriate + # custom handler for a given (scheme,category) combination, + # and hand off the real work to the handler itself, + # which is optimized for the specific (scheme,category) configuration. + # + # The custom handlers are cached inside the _CryptConfig + # instance stored in self._config, and are retrieved + # via get_record() and identify_record(). + # + # _get_record() and _identify_record() are references + # to _config methods of the same name, + # stored in CryptContext for speed. + + def _get_or_identify_record(self, hash, scheme=None, category=None): + """return record based on scheme, or failing that, by identifying hash""" + if scheme: + if not isinstance(hash, unicode_or_bytes_types): + raise ExpectedStringError(hash, "hash") + return self._get_record(scheme, category) + else: + # hash typecheck handled by identify_record() + return self._identify_record(hash, category) + + def _strip_unused_context_kwds(self, kwds, record): + """ + helper which removes any context keywords from **kwds** + that are known to be used by another scheme in this context, + but are NOT supported by handler specified by **record**. + + .. note:: + as optimization, load() will set this method to None on a per-instance basis + if there are no context kwds. + """ + if not kwds: + return + unused_kwds = self._config.context_kwds.difference(record.context_kwds) + for key in unused_kwds: + kwds.pop(key, None) + + def needs_update(self, hash, scheme=None, category=None, secret=None): + """Check if hash needs to be replaced for some reason, + in which case the secret should be re-hashed. + + This function is the core of CryptContext's support for hash migration: + This function takes in a hash string, and checks the scheme, + number of rounds, and other properties against the current policy. + It returns ``True`` if the hash is using a deprecated scheme, + or is otherwise outside of the bounds specified by the policy + (e.g. the number of rounds is lower than :ref:`min_rounds ` + configuration for that algorithm). + If so, the password should be re-hashed using :meth:`hash` + Otherwise, it will return ``False``. + + :type hash: unicode or bytes + :arg hash: + The hash string to examine. + + :type scheme: str or None + :param scheme: + + Optional scheme to use. Scheme must be one of the ones + configured for this context (see the + :ref:`schemes ` option). + If no scheme is specified, it will be identified + based on the value of *hash*. + + .. deprecated:: 1.7 + + Support for this keyword is deprecated, and will be removed in Passlib 2.0. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used when determining if the hash needs to be updated + (e.g. is below the minimum rounds). + + :type secret: unicode, bytes, or None + :param secret: + Optional secret associated with the provided ``hash``. + This is not required, or even currently used for anything... + it's for forward-compatibility with any future + update checks that might need this information. + If provided, Passlib assumes the secret has already been + verified successfully against the hash. + + .. versionadded:: 1.6 + + :returns: ``True`` if hash should be replaced, otherwise ``False``. + + :raises ValueError: + If the hash did not match any of the configured :meth:`schemes`. + + .. versionadded:: 1.6 + This method was previously named :meth:`hash_needs_update`. + + .. seealso:: the :ref:`context-migration-example` example in the tutorial. + """ + if scheme is not None: + # TODO: offer replacement alternative. + # ``context.handler(scheme).needs_update()`` would work, + # but may deprecate .handler() in passlib 1.8. + warn("CryptContext.needs_update(): 'scheme' keyword is deprecated as of " + "Passlib 1.7, and will be removed in Passlib 2.0", + DeprecationWarning) + record = self._get_or_identify_record(hash, scheme, category) + return record.deprecated or record.needs_update(hash, secret=secret) + + @deprecated_method(deprecated="1.6", removed="2.0", replacement="CryptContext.needs_update()") + def hash_needs_update(self, hash, scheme=None, category=None): + """Legacy alias for :meth:`needs_update`. + + .. deprecated:: 1.6 + This method was renamed to :meth:`!needs_update` in version 1.6. + This alias will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.5. + """ + return self.needs_update(hash, scheme, category) + + @deprecated_method(deprecated="1.7", removed="2.0") + def genconfig(self, scheme=None, category=None, **settings): + """Generate a config string for specified scheme. + + .. deprecated:: 1.7 + + This method will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.6. + """ + record = self._get_record(scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused: + strip_unused(settings, record) + return record.genconfig(**settings) + + @deprecated_method(deprecated="1.7", removed="2.0") + def genhash(self, secret, config, scheme=None, category=None, **kwds): + """Generate hash for the specified secret using another hash. + + .. deprecated:: 1.7 + + This method will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.6. + """ + record = self._get_or_identify_record(config, scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused: + strip_unused(kwds, record) + return record.genhash(secret, config, **kwds) + + def identify(self, hash, category=None, resolve=False, required=False, + unconfigured=False): + """Attempt to identify which algorithm the hash belongs to. + + Note that this will only consider the algorithms + currently configured for this context + (see the :ref:`schemes ` option). + All registered algorithms will be checked, from first to last, + and whichever one positively identifies the hash first will be returned. + + :type hash: unicode or bytes + :arg hash: + The hash string to test. + + :type category: str or None + :param category: + Optional :ref:`user category `. + Ignored by this function, this parameter + is provided for symmetry with the other methods. + + :type resolve: bool + :param resolve: + If ``True``, returns the hash handler itself, + instead of the name of the hash. + + :type required: bool + :param required: + If ``True``, this will raise a ValueError if the hash + cannot be identified, instead of returning ``None``. + + :returns: + The handler which first identifies the hash, + or ``None`` if none of the algorithms identify the hash. + """ + record = self._identify_record(hash, category, required) + if record is None: + return None + elif resolve: + if unconfigured: + return record._Context__orig_handler + else: + return record + else: + return record.name + + def hash(self, secret, scheme=None, category=None, **kwds): + """run secret through selected algorithm, returning resulting hash. + + :type secret: unicode or bytes + :arg secret: + the password to hash. + + :type scheme: str or None + :param scheme: + + Optional scheme to use. Scheme must be one of the ones + configured for this context (see the + :ref:`schemes ` option). + If no scheme is specified, the configured default + will be used. + + .. deprecated:: 1.7 + + Support for this keyword is deprecated, and will be removed in Passlib 2.0. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used when hashing the password (e.g. different default scheme, + different default rounds values, etc). + + :param \\*\\*kwds: + All other keyword options are passed to the selected algorithm's + :meth:`PasswordHash.hash() ` method. + + :returns: + The secret as encoded by the specified algorithm and options. + The return value will always be a :class:`!str`. + + :raises TypeError, ValueError: + * If any of the arguments have an invalid type or value. + This includes any keywords passed to the underlying hash's + :meth:`PasswordHash.hash() ` method. + + .. seealso:: the :ref:`context-basic-example` example in the tutorial + """ + # XXX: could insert normalization to preferred unicode encoding here + if scheme is not None: + # TODO: offer replacement alternative. + # ``context.handler(scheme).hash()`` would work, + # but may deprecate .handler() in passlib 1.8. + warn("CryptContext.hash(): 'scheme' keyword is deprecated as of " + "Passlib 1.7, and will be removed in Passlib 2.0", + DeprecationWarning) + record = self._get_record(scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused: + strip_unused(kwds, record) + return record.hash(secret, **kwds) + + @deprecated_method(deprecated="1.7", removed="2.0", replacement="CryptContext.hash()") + def encrypt(self, *args, **kwds): + """ + Legacy alias for :meth:`hash`. + + .. deprecated:: 1.7 + This method was renamed to :meth:`!hash` in version 1.7. + This alias will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.6. + """ + return self.hash(*args, **kwds) + + def verify(self, secret, hash, scheme=None, category=None, **kwds): + """verify secret against an existing hash. + + If no scheme is specified, this will attempt to identify + the scheme based on the contents of the provided hash + (limited to the schemes configured for this context). + It will then check whether the password verifies against the hash. + + :type secret: unicode or bytes + :arg secret: + the secret to verify + + :type hash: unicode or bytes + :arg hash: + hash string to compare to + + if ``None`` is passed in, this will be treated as "never verifying" + + :type scheme: str + :param scheme: + Optionally force context to use specific scheme. + This is usually not needed, as most hashes can be unambiguously + identified. Scheme must be one of the ones configured + for this context + (see the :ref:`schemes ` option). + + .. deprecated:: 1.7 + + Support for this keyword is deprecated, and will be removed in Passlib 2.0. + + :type category: str or None + :param category: + Optional :ref:`user category ` string. + This is mainly used when generating new hashes, it has little + effect when verifying; this keyword is mainly provided for symmetry. + + :param \\*\\*kwds: + All additional keywords are passed to the appropriate handler, + and should match its :attr:`~passlib.ifc.PasswordHash.context_kwds`. + + :returns: + ``True`` if the password matched the hash, else ``False``. + + :raises ValueError: + * if the hash did not match any of the configured :meth:`schemes`. + + * if any of the arguments have an invalid value (this includes + any keywords passed to the underlying hash's + :meth:`PasswordHash.verify() ` method). + + :raises TypeError: + * if any of the arguments have an invalid type (this includes + any keywords passed to the underlying hash's + :meth:`PasswordHash.verify() ` method). + + .. seealso:: the :ref:`context-basic-example` example in the tutorial + """ + # XXX: could insert normalization to preferred unicode encoding here + # XXX: what about supporting a setter() callback ala django 1.4 ? + if scheme is not None: + # TODO: offer replacement alternative. + # ``context.handler(scheme).verify()`` would work, + # but may deprecate .handler() in passlib 1.8. + warn("CryptContext.verify(): 'scheme' keyword is deprecated as of " + "Passlib 1.7, and will be removed in Passlib 2.0", + DeprecationWarning) + if hash is None: + # convenience feature -- let apps pass in hash=None when user + # isn't found / has no hash; useful because it invokes dummy_verify() + self.dummy_verify() + return False + record = self._get_or_identify_record(hash, scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused: + strip_unused(kwds, record) + return record.verify(secret, hash, **kwds) + + def verify_and_update(self, secret, hash, scheme=None, category=None, **kwds): + """verify password and re-hash the password if needed, all in a single call. + + This is a convenience method which takes care of all the following: + first it verifies the password (:meth:`~CryptContext.verify`), if this is successfull + it checks if the hash needs updating (:meth:`~CryptContext.needs_update`), and if so, + re-hashes the password (:meth:`~CryptContext.hash`), returning the replacement hash. + This series of steps is a very common task for applications + which wish to update deprecated hashes, and this call takes + care of all 3 steps efficiently. + + :type secret: unicode or bytes + :arg secret: + the secret to verify + + :type secret: unicode or bytes + :arg hash: + hash string to compare to. + + if ``None`` is passed in, this will be treated as "never verifying" + + :type scheme: str + :param scheme: + Optionally force context to use specific scheme. + This is usually not needed, as most hashes can be unambiguously + identified. Scheme must be one of the ones configured + for this context + (see the :ref:`schemes ` option). + + .. deprecated:: 1.7 + + Support for this keyword is deprecated, and will be removed in Passlib 2.0. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used if the password has to be re-hashed. + + :param \\*\\*kwds: + all additional keywords are passed to the appropriate handler, + and should match that hash's + :attr:`PasswordHash.context_kwds `. + + :returns: + This function returns a tuple containing two elements: + ``(verified, replacement_hash)``. The first is a boolean + flag indicating whether the password verified, + and the second an optional replacement hash. + The tuple will always match one of the following 3 cases: + + * ``(False, None)`` indicates the secret failed to verify. + * ``(True, None)`` indicates the secret verified correctly, + and the hash does not need updating. + * ``(True, str)`` indicates the secret verified correctly, + but the current hash needs to be updated. The :class:`!str` + will be the freshly generated hash, to replace the old one. + + :raises TypeError, ValueError: + For the same reasons as :meth:`verify`. + + .. seealso:: the :ref:`context-migration-example` example in the tutorial. + """ + # XXX: could insert normalization to preferred unicode encoding here. + if scheme is not None: + warn("CryptContext.verify(): 'scheme' keyword is deprecated as of " + "Passlib 1.7, and will be removed in Passlib 2.0", + DeprecationWarning) + if hash is None: + # convenience feature -- let apps pass in hash=None when user + # isn't found / has no hash; useful because it invokes dummy_verify() + self.dummy_verify() + return False, None + record = self._get_or_identify_record(hash, scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused and kwds: + clean_kwds = kwds.copy() + strip_unused(clean_kwds, record) + else: + clean_kwds = kwds + # XXX: if record is default scheme, could extend PasswordHash + # api to combine verify & needs_update to single call, + # potentially saving some round-trip parsing. + # but might make these codepaths more complex... + if not record.verify(secret, hash, **clean_kwds): + return False, None + elif record.deprecated or record.needs_update(hash, secret=secret): + # NOTE: we re-hash with default scheme, not current one. + return True, self.hash(secret, category=category, **kwds) + else: + return True, None + + #=================================================================== + # missing-user helper + #=================================================================== + + #: secret used for dummy_verify() + _dummy_secret = "too many secrets" + + @memoized_property + def _dummy_hash(self): + """ + precalculated hash for dummy_verify() to use + """ + return self.hash(self._dummy_secret) + + def _reset_dummy_verify(self): + """ + flush memoized values used by dummy_verify() + """ + type(self)._dummy_hash.clear_cache(self) + + def dummy_verify(self, elapsed=0): + """ + Helper that applications can call when user wasn't found, + in order to simulate time it would take to hash a password. + + Runs verify() against a dummy hash, to simulate verification + of a real account password. + + :param elapsed: + + .. deprecated:: 1.7.1 + + this option is ignored, and will be removed in passlib 1.8. + + .. versionadded:: 1.7 + """ + self.verify(self._dummy_secret, self._dummy_hash) + return False + + #=================================================================== + # disabled hash support + #=================================================================== + + def is_enabled(self, hash): + """ + test if hash represents a usuable password -- + i.e. does not represent an unusuable password such as ``"!"``, + which is recognized by the :class:`~passlib.hash.unix_disabled` hash. + + :raises ValueError: + if the hash is not recognized + (typically solved by adding ``unix_disabled`` to the list of schemes). + """ + return not self._identify_record(hash, None).is_disabled + + def disable(self, hash=None): + """ + return a string to disable logins for user, + usually by returning a non-verifying string such as ``"!"``. + + :param hash: + Callers can optionally provide the account's existing hash. + Some disabled handlers (such as :class:`!unix_disabled`) + will encode this into the returned value, + so that it can be recovered via :meth:`enable`. + + :raises RuntimeError: + if this function is called w/o a disabled hasher + (such as :class:`~passlib.hash.unix_disabled`) included + in the list of schemes. + + :returns: + hash string which will be recognized as valid by the context, + but is guaranteed to not validate against *any* password. + """ + record = self._config.disabled_record + assert record.is_disabled + return record.disable(hash) + + def enable(self, hash): + """ + inverse of :meth:`disable` -- + attempts to recover original hash which was converted + by a :meth:`!disable` call into a disabled hash -- + thus restoring the user's original password. + + :raises ValueError: + if original hash not present, or if the disabled handler doesn't + support encoding the original hash (e.g. ``django_disabled``) + + :returns: + the original hash. + """ + record = self._identify_record(hash, None) + if record.is_disabled: + # XXX: should we throw error if result can't be identified by context? + return record.enable(hash) + else: + # hash wasn't a disabled hash, so return unchanged + return hash + + #=================================================================== + # eoc + #=================================================================== + +class LazyCryptContext(CryptContext): + """CryptContext subclass which doesn't load handlers until needed. + + This is a subclass of CryptContext which takes in a set of arguments + exactly like CryptContext, but won't import any handlers + (or even parse its arguments) until + the first time one of its methods is accessed. + + :arg schemes: + The first positional argument can be a list of schemes, or omitted, + just like CryptContext. + + :param onload: + + If a callable is passed in via this keyword, + it will be invoked at lazy-load time + with the following signature: + ``onload(**kwds) -> kwds``; + where ``kwds`` is all the additional kwds passed to LazyCryptContext. + It should perform any additional deferred initialization, + and return the final dict of options to be passed to CryptContext. + + .. versionadded:: 1.6 + + :param create_policy: + + .. deprecated:: 1.6 + This option will be removed in Passlib 1.8, + applications should use ``onload`` instead. + + :param kwds: + + All additional keywords are passed to CryptContext; + or to the *onload* function (if provided). + + This is mainly used internally by modules such as :mod:`passlib.apps`, + which define a large number of contexts, but only a few of them will be needed + at any one time. Use of this class saves the memory needed to import + the specified handlers until the context instance is actually accessed. + As well, it allows constructing a context at *module-init* time, + but using :func:`!onload()` to provide dynamic configuration + at *application-run* time. + + .. note:: + This class is only useful if you're referencing handler objects by name, + and don't want them imported until runtime. If you want to have the config + validated before your application runs, or are passing in already-imported + handler instances, you should use :class:`CryptContext` instead. + + .. versionadded:: 1.4 + """ + _lazy_kwds = None + + # NOTE: the way this class works changed in 1.6. + # previously it just called _lazy_init() when ``.policy`` was + # first accessed. now that is done whenever any of the public + # attributes are accessed, and the class itself is changed + # to a regular CryptContext, to remove the overhead once it's unneeded. + + def __init__(self, schemes=None, **kwds): + if schemes is not None: + kwds['schemes'] = schemes + self._lazy_kwds = kwds + + def _lazy_init(self): + kwds = self._lazy_kwds + if 'create_policy' in kwds: + warn("The CryptPolicy class, and LazyCryptContext's " + "``create_policy`` keyword have been deprecated as of " + "Passlib 1.6, and will be removed in Passlib 1.8; " + "please use the ``onload`` keyword instead.", + DeprecationWarning) + create_policy = kwds.pop("create_policy") + result = create_policy(**kwds) + policy = CryptPolicy.from_source(result, _warn=False) + kwds = policy._context.to_dict() + elif 'onload' in kwds: + onload = kwds.pop("onload") + kwds = onload(**kwds) + del self._lazy_kwds + super(LazyCryptContext, self).__init__(**kwds) + self.__class__ = CryptContext + + def __getattribute__(self, attr): + if (not attr.startswith("_") or attr.startswith("__")) and \ + self._lazy_kwds is not None: + self._lazy_init() + return object.__getattribute__(self, attr) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/__init__.py b/venv/lib/python3.12/site-packages/passlib/crypto/__init__.py new file mode 100644 index 0000000..89f5484 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/__init__.py @@ -0,0 +1 @@ +"""passlib.crypto -- package containing cryptographic primitives used by passlib""" diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..770ddfa Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/_md4.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/_md4.cpython-312.pyc new file mode 100644 index 0000000..75b0a5f Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/_md4.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/des.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/des.cpython-312.pyc new file mode 100644 index 0000000..b77a795 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/des.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/digest.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/digest.cpython-312.pyc new file mode 100644 index 0000000..f14e622 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/crypto/__pycache__/digest.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/__init__.py b/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/__init__.py new file mode 100644 index 0000000..1aa1c85 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/__init__.py @@ -0,0 +1,169 @@ +"""passlib.crypto._blowfish - pure-python eks-blowfish implementation for bcrypt + +This is a pure-python implementation of the EKS-Blowfish algorithm described by +Provos and Mazieres in `A Future-Adaptable Password Scheme +`_. + +This package contains two submodules: + +* ``_blowfish/base.py`` contains a class implementing the eks-blowfish algorithm + using easy-to-examine code. + +* ``_blowfish/unrolled.py`` contains a subclass which replaces some methods + of the original class with sped-up versions, mainly using unrolled loops + and local variables. this is the class which is actually used by + Passlib to perform BCrypt in pure python. + + This module is auto-generated by a script, ``_blowfish/_gen_files.py``. + +Status +------ +This implementation is usable, but is an order of magnitude too slow to be +usable with real security. For "ok" security, BCrypt hashes should have at +least 2**11 rounds (as of 2011). Assuming a desired response time <= 100ms, +this means a BCrypt implementation should get at least 20 rounds/ms in order +to be both usable *and* secure. On a 2 ghz cpu, this implementation gets +roughly 0.09 rounds/ms under CPython (220x too slow), and 1.9 rounds/ms +under PyPy (10x too slow). + +History +------- +While subsequently modified considerly for Passlib, this code was originally +based on `jBcrypt 0.2 `_, which was +released under the BSD license:: + + Copyright (c) 2006 Damien Miller + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +#============================================================================= +# imports +#============================================================================= +# core +from itertools import chain +import struct +# pkg +from passlib.utils import getrandbytes, rng +from passlib.utils.binary import bcrypt64 +from passlib.utils.compat import BytesIO, unicode, u, native_string_types +from passlib.crypto._blowfish.unrolled import BlowfishEngine +# local +__all__ = [ + 'BlowfishEngine', + 'raw_bcrypt', +] + +#============================================================================= +# bcrypt constants +#============================================================================= + +# bcrypt constant data "OrpheanBeholderScryDoubt" as 6 integers +BCRYPT_CDATA = [ + 0x4f727068, 0x65616e42, 0x65686f6c, + 0x64657253, 0x63727944, 0x6f756274 +] + +# struct used to encode ciphertext as digest (last output byte discarded) +digest_struct = struct.Struct(">6I") + +#============================================================================= +# base bcrypt helper +# +# interface designed only for use by passlib.handlers.bcrypt:BCrypt +# probably not suitable for other purposes +#============================================================================= +BNULL = b'\x00' + +def raw_bcrypt(password, ident, salt, log_rounds): + """perform central password hashing step in bcrypt scheme. + + :param password: the password to hash + :param ident: identifier w/ minor version (e.g. 2, 2a) + :param salt: the binary salt to use (encoded in bcrypt-base64) + :param log_rounds: the log2 of the number of rounds (as int) + :returns: bcrypt-base64 encoded checksum + """ + #=================================================================== + # parse inputs + #=================================================================== + + # parse ident + assert isinstance(ident, native_string_types) + add_null_padding = True + if ident == u('2a') or ident == u('2y') or ident == u('2b'): + pass + elif ident == u('2'): + add_null_padding = False + elif ident == u('2x'): + raise ValueError("crypt_blowfish's buggy '2x' hashes are not " + "currently supported") + else: + raise ValueError("unknown ident: %r" % (ident,)) + + # decode & validate salt + assert isinstance(salt, bytes) + salt = bcrypt64.decode_bytes(salt) + if len(salt) < 16: + raise ValueError("Missing salt bytes") + elif len(salt) > 16: + salt = salt[:16] + + # prepare password + assert isinstance(password, bytes) + if add_null_padding: + password += BNULL + + # validate rounds + if log_rounds < 4 or log_rounds > 31: + raise ValueError("Bad number of rounds") + + #=================================================================== + # + # run EKS-Blowfish algorithm + # + # This uses the "enhanced key schedule" step described by + # Provos and Mazieres in "A Future-Adaptable Password Scheme" + # http://www.openbsd.org/papers/bcrypt-paper.ps + # + #=================================================================== + + engine = BlowfishEngine() + + # convert password & salt into list of 18 32-bit integers (72 bytes total). + pass_words = engine.key_to_words(password) + salt_words = engine.key_to_words(salt) + + # truncate salt_words to original 16 byte salt, or loop won't wrap + # correctly when passed to .eks_salted_expand() + salt_words16 = salt_words[:4] + + # do EKS key schedule setup + engine.eks_salted_expand(pass_words, salt_words16) + + # apply password & salt keys to key schedule a bunch more times. + rounds = 1<> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) +""".strip() + +def render_encipher(write, indent=0): + for i in irange(0, 15, 2): + write(indent, """\ + # Feistel substitution on left word (round %(i)d) + r ^= %(left)s ^ p%(i1)d + + # Feistel substitution on right word (round %(i1)d) + l ^= %(right)s ^ p%(i2)d + """, i=i, i1=i+1, i2=i+2, + left=BFSTR, right=BFSTR.replace("l","r"), + ) + +def write_encipher_function(write, indent=0): + write(indent, """\ + def encipher(self, l, r): + \"""blowfish encipher a single 64-bit block encoded as two 32-bit ints\""" + + (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) = self.P + S0, S1, S2, S3 = self.S + + l ^= p0 + + """) + render_encipher(write, indent+1) + + write(indent+1, """\ + + return r ^ p17, l + + """) + +def write_expand_function(write, indent=0): + write(indent, """\ + def expand(self, key_words): + \"""unrolled version of blowfish key expansion\""" + ##assert len(key_words) >= 18, "size of key_words must be >= 18" + + P, S = self.P, self.S + S0, S1, S2, S3 = S + + #============================================================= + # integrate key + #============================================================= + """) + for i in irange(18): + write(indent+1, """\ + p%(i)d = P[%(i)d] ^ key_words[%(i)d] + """, i=i) + write(indent+1, """\ + + #============================================================= + # update P + #============================================================= + + #------------------------------------------------ + # update P[0] and P[1] + #------------------------------------------------ + l, r = p0, 0 + + """) + + render_encipher(write, indent+1) + + write(indent+1, """\ + + p0, p1 = l, r = r ^ p17, l + + """) + + for i in irange(2, 18, 2): + write(indent+1, """\ + #------------------------------------------------ + # update P[%(i)d] and P[%(i1)d] + #------------------------------------------------ + l ^= p0 + + """, i=i, i1=i+1) + + render_encipher(write, indent+1) + + write(indent+1, """\ + p%(i)d, p%(i1)d = l, r = r ^ p17, l + + """, i=i, i1=i+1) + + write(indent+1, """\ + + #------------------------------------------------ + # save changes to original P array + #------------------------------------------------ + P[:] = (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) + + #============================================================= + # update S + #============================================================= + + for box in S: + j = 0 + while j < 256: + l ^= p0 + + """) + + render_encipher(write, indent+3) + + write(indent+3, """\ + + box[j], box[j+1] = l, r = r ^ p17, l + j += 2 + """) + +#============================================================================= +# main +#============================================================================= + +def main(): + target = os.path.join(os.path.dirname(__file__), "unrolled.py") + fh = file(target, "w") + + def write(indent, msg, **kwds): + literal = kwds.pop("literal", False) + if kwds: + msg %= kwds + if not literal: + msg = textwrap.dedent(msg.rstrip(" ")) + if indent: + msg = indent_block(msg, " " * (indent*4)) + fh.write(msg) + + write(0, """\ + \"""passlib.crypto._blowfish.unrolled - unrolled loop implementation of bcrypt, + autogenerated by _gen_files.py + + currently this override the encipher() and expand() methods + with optimized versions, and leaves the other base.py methods alone. + \""" + #================================================================= + # imports + #================================================================= + # pkg + from passlib.crypto._blowfish.base import BlowfishEngine as _BlowfishEngine + # local + __all__ = [ + "BlowfishEngine", + ] + #================================================================= + # + #================================================================= + class BlowfishEngine(_BlowfishEngine): + + """) + + write_encipher_function(write, indent=1) + write_expand_function(write, indent=1) + + write(0, """\ + #================================================================= + # eoc + #================================================================= + + #================================================================= + # eof + #================================================================= + """) + +if __name__ == "__main__": + main() + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/base.py b/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/base.py new file mode 100644 index 0000000..7b4f2cb --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/base.py @@ -0,0 +1,441 @@ +"""passlib.crypto._blowfish.base - unoptimized pure-python blowfish engine""" +#============================================================================= +# imports +#============================================================================= +# core +import struct +# pkg +from passlib.utils import repeat_string +# local +__all__ = [ + "BlowfishEngine", +] + +#============================================================================= +# blowfish constants +#============================================================================= +BLOWFISH_P = BLOWFISH_S = None + +def _init_constants(): + global BLOWFISH_P, BLOWFISH_S + + # NOTE: blowfish's spec states these numbers are the hex representation + # of the fractional portion of PI, in order. + + # Initial contents of key schedule - 18 integers + BLOWFISH_P = [ + 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, + 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, + 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, + 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, + 0x9216d5d9, 0x8979fb1b, + ] + + # all 4 blowfish S boxes in one array - 256 integers per S box + BLOWFISH_S = [ + # sbox 1 + [ + 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, + 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, + 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, + 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, + 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, + 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, + 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, + 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, + 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, + 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, + 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, + 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, + 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, + 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, + 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, + 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, + 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, + 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, + 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, + 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, + 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, + 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, + 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, + 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, + 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, + 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, + 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, + 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, + 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, + 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, + 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, + 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, + 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, + 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, + 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, + 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, + 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, + 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, + 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, + 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, + 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, + 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, + 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, + 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, + 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, + 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, + 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, + 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, + 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, + 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, + 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, + 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, + 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, + 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, + 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, + 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, + 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, + 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, + 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, + 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, + 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, + 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, + 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, + 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a, + ], + # sbox 2 + [ + 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, + 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, + 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, + 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, + 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, + 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, + 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, + 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, + 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, + 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, + 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, + 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, + 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, + 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, + 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, + 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, + 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, + 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, + 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, + 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, + 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, + 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, + 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, + 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, + 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, + 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, + 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, + 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, + 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, + 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, + 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, + 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, + 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, + 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, + 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, + 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, + 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, + 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, + 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, + 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, + 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, + 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, + 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, + 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, + 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, + 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, + 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, + 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, + 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, + 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, + 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, + 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, + 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, + 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, + 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, + 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, + 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, + 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, + 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, + 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, + 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, + 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, + 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, + 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7, + ], + # sbox 3 + [ + 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, + 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, + 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, + 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, + 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, + 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, + 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, + 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, + 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, + 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, + 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, + 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, + 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, + 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, + 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, + 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, + 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, + 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, + 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, + 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, + 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, + 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, + 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, + 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, + 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, + 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, + 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, + 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, + 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, + 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, + 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, + 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, + 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, + 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, + 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, + 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, + 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, + 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, + 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, + 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, + 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, + 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, + 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, + 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, + 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, + 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, + 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, + 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, + 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, + 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, + 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, + 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, + 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, + 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, + 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, + 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, + 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, + 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, + 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, + 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, + 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, + 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, + 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, + 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0, + ], + # sbox 4 + [ + 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, + 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, + 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, + 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, + 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, + 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, + 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, + 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, + 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, + 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, + 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, + 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, + 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, + 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, + 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, + 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, + 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, + 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, + 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, + 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, + 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, + 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, + 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, + 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, + 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, + 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, + 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, + 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, + 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, + 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, + 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, + 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, + 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, + 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, + 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, + 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, + 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, + 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, + 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, + 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, + 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, + 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, + 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, + 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, + 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, + 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, + 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, + 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, + 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, + 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, + 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, + 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, + 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, + 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, + 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, + 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, + 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, + 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, + 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, + 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, + 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, + 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, + 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, + 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6, + ] + ] + +#============================================================================= +# engine +#============================================================================= +class BlowfishEngine(object): + + def __init__(self): + if BLOWFISH_P is None: + _init_constants() + self.P = list(BLOWFISH_P) + self.S = [ list(box) for box in BLOWFISH_S ] + + #=================================================================== + # common helpers + #=================================================================== + @staticmethod + def key_to_words(data, size=18): + """convert data to tuple of 4-byte integers, repeating or + truncating data as needed to reach specified size""" + assert isinstance(data, bytes) + dlen = len(data) + if not dlen: + # return all zeros - original C code would just read the NUL after + # the password, so mimicing that behavior for this edge case. + return [0]*size + + # repeat data until it fills up 4*size bytes + data = repeat_string(data, size<<2) + + # unpack + return struct.unpack(">%dI" % (size,), data) + + #=================================================================== + # blowfish routines + #=================================================================== + def encipher(self, l, r): + """loop version of blowfish encipher routine""" + P, S = self.P, self.S + l ^= P[0] + i = 1 + while i < 17: + # Feistel substitution on left word + r = ((((S[0][l >> 24] + S[1][(l >> 16) & 0xff]) ^ S[2][(l >> 8) & 0xff]) + + S[3][l & 0xff]) & 0xffffffff) ^ P[i] ^ r + # swap vars so even rounds do Feistel substition on right word + l, r = r, l + i += 1 + return r ^ P[17], l + + # NOTE: decipher is same as above, just with reversed(P) instead. + + def expand(self, key_words): + """perform stock Blowfish keyschedule setup""" + assert len(key_words) >= 18, "key_words must be at least as large as P" + P, S, encipher = self.P, self.S, self.encipher + + i = 0 + while i < 18: + P[i] ^= key_words[i] + i += 1 + + i = l = r = 0 + while i < 18: + P[i], P[i+1] = l,r = encipher(l,r) + i += 2 + + for box in S: + i = 0 + while i < 256: + box[i], box[i+1] = l,r = encipher(l,r) + i += 2 + + #=================================================================== + # eks-blowfish routines + #=================================================================== + def eks_salted_expand(self, key_words, salt_words): + """perform EKS' salted version of Blowfish keyschedule setup""" + # NOTE: this is the same as expand(), except for the addition + # of the operations involving *salt_words*. + + assert len(key_words) >= 18, "key_words must be at least as large as P" + salt_size = len(salt_words) + assert salt_size, "salt_words must not be empty" + assert not salt_size & 1, "salt_words must have even length" + P, S, encipher = self.P, self.S, self.encipher + + i = 0 + while i < 18: + P[i] ^= key_words[i] + i += 1 + + s = i = l = r = 0 + while i < 18: + l ^= salt_words[s] + r ^= salt_words[s+1] + s += 2 + if s == salt_size: + s = 0 + P[i], P[i+1] = l,r = encipher(l,r) # next() + i += 2 + + for box in S: + i = 0 + while i < 256: + l ^= salt_words[s] + r ^= salt_words[s+1] + s += 2 + if s == salt_size: + s = 0 + box[i], box[i+1] = l,r = encipher(l,r) # next() + i += 2 + + def eks_repeated_expand(self, key_words, salt_words, rounds): + """perform rounds stage of EKS keyschedule setup""" + expand = self.expand + n = 0 + while n < rounds: + expand(key_words) + expand(salt_words) + n += 1 + + def repeat_encipher(self, l, r, count): + """repeatedly apply encipher operation to a block""" + encipher = self.encipher + n = 0 + while n < count: + l, r = encipher(l, r) + n += 1 + return l, r + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/unrolled.py b/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/unrolled.py new file mode 100644 index 0000000..4acf6e1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/_blowfish/unrolled.py @@ -0,0 +1,771 @@ +"""passlib.crypto._blowfish.unrolled - unrolled loop implementation of bcrypt, +autogenerated by _gen_files.py + +currently this override the encipher() and expand() methods +with optimized versions, and leaves the other base.py methods alone. +""" +#============================================================================= +# imports +#============================================================================= +# pkg +from passlib.crypto._blowfish.base import BlowfishEngine as _BlowfishEngine +# local +__all__ = [ + "BlowfishEngine", +] +#============================================================================= +# +#============================================================================= +class BlowfishEngine(_BlowfishEngine): + + def encipher(self, l, r): + """blowfish encipher a single 64-bit block encoded as two 32-bit ints""" + + (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) = self.P + S0, S1, S2, S3 = self.S + + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + return r ^ p17, l + + def expand(self, key_words): + """unrolled version of blowfish key expansion""" + ##assert len(key_words) >= 18, "size of key_words must be >= 18" + + P, S = self.P, self.S + S0, S1, S2, S3 = S + + #============================================================= + # integrate key + #============================================================= + p0 = P[0] ^ key_words[0] + p1 = P[1] ^ key_words[1] + p2 = P[2] ^ key_words[2] + p3 = P[3] ^ key_words[3] + p4 = P[4] ^ key_words[4] + p5 = P[5] ^ key_words[5] + p6 = P[6] ^ key_words[6] + p7 = P[7] ^ key_words[7] + p8 = P[8] ^ key_words[8] + p9 = P[9] ^ key_words[9] + p10 = P[10] ^ key_words[10] + p11 = P[11] ^ key_words[11] + p12 = P[12] ^ key_words[12] + p13 = P[13] ^ key_words[13] + p14 = P[14] ^ key_words[14] + p15 = P[15] ^ key_words[15] + p16 = P[16] ^ key_words[16] + p17 = P[17] ^ key_words[17] + + #============================================================= + # update P + #============================================================= + + #------------------------------------------------ + # update P[0] and P[1] + #------------------------------------------------ + l, r = p0, 0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + p0, p1 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[2] and P[3] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p2, p3 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[4] and P[5] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p4, p5 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[6] and P[7] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p6, p7 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[8] and P[9] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p8, p9 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[10] and P[11] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p10, p11 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[12] and P[13] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p12, p13 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[14] and P[15] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p14, p15 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[16] and P[17] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p16, p17 = l, r = r ^ p17, l + + + #------------------------------------------------ + # save changes to original P array + #------------------------------------------------ + P[:] = (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) + + #============================================================= + # update S + #============================================================= + + for box in S: + j = 0 + while j < 256: + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + box[j], box[j+1] = l, r = r ^ p17, l + j += 2 + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/_md4.py b/venv/lib/python3.12/site-packages/passlib/crypto/_md4.py new file mode 100644 index 0000000..bdc211f --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/_md4.py @@ -0,0 +1,244 @@ +""" +passlib.crypto._md4 -- fallback implementation of MD4 + +Helper implementing insecure and obsolete md4 algorithm. +used for NTHASH format, which is also insecure and broken, +since it's just md4(password). + +Implementated based on rfc at http://www.faqs.org/rfcs/rfc1320.html + +.. note:: + + This shouldn't be imported directly, it's merely used conditionally + by ``passlib.crypto.lookup_hash()`` when a native implementation can't be found. +""" + +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import struct +# site +from passlib.utils.compat import bascii_to_str, irange, PY3 +# local +__all__ = ["md4"] + +#============================================================================= +# utils +#============================================================================= +def F(x,y,z): + return (x&y) | ((~x) & z) + +def G(x,y,z): + return (x&y) | (x&z) | (y&z) + +##def H(x,y,z): +## return x ^ y ^ z + +MASK_32 = 2**32-1 + +#============================================================================= +# main class +#============================================================================= +class md4(object): + """pep-247 compatible implementation of MD4 hash algorithm + + .. attribute:: digest_size + + size of md4 digest in bytes (16 bytes) + + .. method:: update + + update digest by appending additional content + + .. method:: copy + + create clone of digest object, including current state + + .. method:: digest + + return bytes representing md4 digest of current content + + .. method:: hexdigest + + return hexadecimal version of digest + """ + # FIXME: make this follow hash object PEP better. + # FIXME: this isn't threadsafe + + name = "md4" + digest_size = digestsize = 16 + block_size = 64 + + _count = 0 # number of 64-byte blocks processed so far (not including _buf) + _state = None # list of [a,b,c,d] 32 bit ints used as internal register + _buf = None # data processed in 64 byte blocks, this holds leftover from last update + + def __init__(self, content=None): + self._count = 0 + self._state = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476] + self._buf = b'' + if content: + self.update(content) + + # round 1 table - [abcd k s] + _round1 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 1,7], + [2,3,0,1, 2,11], + [1,2,3,0, 3,19], + + [0,1,2,3, 4,3], + [3,0,1,2, 5,7], + [2,3,0,1, 6,11], + [1,2,3,0, 7,19], + + [0,1,2,3, 8,3], + [3,0,1,2, 9,7], + [2,3,0,1, 10,11], + [1,2,3,0, 11,19], + + [0,1,2,3, 12,3], + [3,0,1,2, 13,7], + [2,3,0,1, 14,11], + [1,2,3,0, 15,19], + ] + + # round 2 table - [abcd k s] + _round2 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 4,5], + [2,3,0,1, 8,9], + [1,2,3,0, 12,13], + + [0,1,2,3, 1,3], + [3,0,1,2, 5,5], + [2,3,0,1, 9,9], + [1,2,3,0, 13,13], + + [0,1,2,3, 2,3], + [3,0,1,2, 6,5], + [2,3,0,1, 10,9], + [1,2,3,0, 14,13], + + [0,1,2,3, 3,3], + [3,0,1,2, 7,5], + [2,3,0,1, 11,9], + [1,2,3,0, 15,13], + ] + + # round 3 table - [abcd k s] + _round3 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 8,9], + [2,3,0,1, 4,11], + [1,2,3,0, 12,15], + + [0,1,2,3, 2,3], + [3,0,1,2, 10,9], + [2,3,0,1, 6,11], + [1,2,3,0, 14,15], + + [0,1,2,3, 1,3], + [3,0,1,2, 9,9], + [2,3,0,1, 5,11], + [1,2,3,0, 13,15], + + [0,1,2,3, 3,3], + [3,0,1,2, 11,9], + [2,3,0,1, 7,11], + [1,2,3,0, 15,15], + ] + + def _process(self, block): + """process 64 byte block""" + # unpack block into 16 32-bit ints + X = struct.unpack("<16I", block) + + # clone state + orig = self._state + state = list(orig) + + # round 1 - F function - (x&y)|(~x & z) + for a,b,c,d,k,s in self._round1: + t = (state[a] + F(state[b],state[c],state[d]) + X[k]) & MASK_32 + state[a] = ((t<>(32-s)) + + # round 2 - G function + for a,b,c,d,k,s in self._round2: + t = (state[a] + G(state[b],state[c],state[d]) + X[k] + 0x5a827999) & MASK_32 + state[a] = ((t<>(32-s)) + + # round 3 - H function - x ^ y ^ z + for a,b,c,d,k,s in self._round3: + t = (state[a] + (state[b] ^ state[c] ^ state[d]) + X[k] + 0x6ed9eba1) & MASK_32 + state[a] = ((t<>(32-s)) + + # add back into original state + for i in irange(4): + orig[i] = (orig[i]+state[i]) & MASK_32 + + def update(self, content): + if not isinstance(content, bytes): + if PY3: + raise TypeError("expected bytes") + else: + # replicate behavior of hashlib under py2 + content = content.encode("ascii") + buf = self._buf + if buf: + content = buf + content + idx = 0 + end = len(content) + while True: + next = idx + 64 + if next <= end: + self._process(content[idx:next]) + self._count += 1 + idx = next + else: + self._buf = content[idx:] + return + + def copy(self): + other = md4() + other._count = self._count + other._state = list(self._state) + other._buf = self._buf + return other + + def digest(self): + # NOTE: backing up state so we can restore it after _process is called, + # in case object is updated again (this is only attr altered by this method) + orig = list(self._state) + + # final block: buf + 0x80, + # then 0x00 padding until congruent w/ 56 mod 64 bytes + # then last 8 bytes = msg length in bits + buf = self._buf + msglen = self._count*512 + len(buf)*8 + block = buf + b'\x80' + b'\x00' * ((119-len(buf)) % 64) + \ + struct.pack("<2I", msglen & MASK_32, (msglen>>32) & MASK_32) + if len(block) == 128: + self._process(block[:64]) + self._process(block[64:]) + else: + assert len(block) == 64 + self._process(block) + + # render digest & restore un-finalized state + out = struct.pack("<4I", *self._state) + self._state = orig + return out + + def hexdigest(self): + return bascii_to_str(hexlify(self.digest())) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/des.py b/venv/lib/python3.12/site-packages/passlib/crypto/des.py new file mode 100644 index 0000000..3f87aef --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/des.py @@ -0,0 +1,848 @@ +"""passlib.crypto.des -- DES block encryption routines + +History +======= +These routines (which have since been drastically modified for python) +are based on a Java implementation of the des-crypt algorithm, +found at ``_. + +The copyright & license for that source is as follows:: + + UnixCrypt.java 0.9 96/11/25 + Copyright (c) 1996 Aki Yoshida. All rights reserved. + Permission to use, copy, modify and distribute this software + for non-commercial or commercial purposes and without fee is + hereby granted provided that this copyright notice appears in + all copies. + + --- + + Unix crypt(3C) utility + @version 0.9, 11/25/96 + @author Aki Yoshida + + --- + + modified April 2001 + by Iris Van den Broeke, Daniel Deville + + --- + Unix Crypt. + Implements the one way cryptography used by Unix systems for + simple password protection. + @version $Id: UnixCrypt2.txt,v 1.1.1.1 2005/09/13 22:20:13 christos Exp $ + @author Greg Wilkins (gregw) + +The netbsd des-crypt implementation has some nice notes on how this all works - + http://fxr.googlebit.com/source/lib/libcrypt/crypt.c?v=NETBSD-CURRENT +""" + +# TODO: could use an accelerated C version of this module to speed up lmhash, +# des-crypt, and ext-des-crypt + +#============================================================================= +# imports +#============================================================================= +# core +import struct +# pkg +from passlib import exc +from passlib.utils.compat import join_byte_values, byte_elem_value, \ + irange, irange, int_types +# local +__all__ = [ + "expand_des_key", + "des_encrypt_block", +] + +#============================================================================= +# constants +#============================================================================= + +# masks/upper limits for various integer sizes +INT_24_MASK = 0xffffff +INT_56_MASK = 0xffffffffffffff +INT_64_MASK = 0xffffffffffffffff + +# mask to clear parity bits from 64-bit key +_KDATA_MASK = 0xfefefefefefefefe +_KPARITY_MASK = 0x0101010101010101 + +# mask used to setup key schedule +_KS_MASK = 0xfcfcfcfcffffffff + +#============================================================================= +# static DES tables +#============================================================================= + +# placeholders filled in by _load_tables() +PCXROT = IE3264 = SPE = CF6464 = None + +def _load_tables(): + """delay loading tables until they are actually needed""" + global PCXROT, IE3264, SPE, CF6464 + + #--------------------------------------------------------------- + # Initial key schedule permutation + # PC1ROT - bit reverse, then PC1, then Rotate, then PC2 + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC1ROT=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000002000, 0x0000000000002000, + 0x0000000000000020, 0x0000000000000020, 0x0000000000002020, 0x0000000000002020, + 0x0000000000000400, 0x0000000000000400, 0x0000000000002400, 0x0000000000002400, + 0x0000000000000420, 0x0000000000000420, 0x0000000000002420, 0x0000000000002420, ), + ( 0x0000000000000000, 0x2000000000000000, 0x0000000400000000, 0x2000000400000000, + 0x0000800000000000, 0x2000800000000000, 0x0000800400000000, 0x2000800400000000, + 0x0008000000000000, 0x2008000000000000, 0x0008000400000000, 0x2008000400000000, + 0x0008800000000000, 0x2008800000000000, 0x0008800400000000, 0x2008800400000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000040, 0x0000000000000040, + 0x0000000020000000, 0x0000000020000000, 0x0000000020000040, 0x0000000020000040, + 0x0000000000200000, 0x0000000000200000, 0x0000000000200040, 0x0000000000200040, + 0x0000000020200000, 0x0000000020200000, 0x0000000020200040, 0x0000000020200040, ), + ( 0x0000000000000000, 0x0002000000000000, 0x0800000000000000, 0x0802000000000000, + 0x0100000000000000, 0x0102000000000000, 0x0900000000000000, 0x0902000000000000, + 0x4000000000000000, 0x4002000000000000, 0x4800000000000000, 0x4802000000000000, + 0x4100000000000000, 0x4102000000000000, 0x4900000000000000, 0x4902000000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000040000, 0x0000000000040000, + 0x0000020000000000, 0x0000020000000000, 0x0000020000040000, 0x0000020000040000, + 0x0000000000000004, 0x0000000000000004, 0x0000000000040004, 0x0000000000040004, + 0x0000020000000004, 0x0000020000000004, 0x0000020000040004, 0x0000020000040004, ), + ( 0x0000000000000000, 0x0000400000000000, 0x0200000000000000, 0x0200400000000000, + 0x0080000000000000, 0x0080400000000000, 0x0280000000000000, 0x0280400000000000, + 0x0000008000000000, 0x0000408000000000, 0x0200008000000000, 0x0200408000000000, + 0x0080008000000000, 0x0080408000000000, 0x0280008000000000, 0x0280408000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000010000000, 0x0000000010000000, + 0x0000000000001000, 0x0000000000001000, 0x0000000010001000, 0x0000000010001000, + 0x0000000040000000, 0x0000000040000000, 0x0000000050000000, 0x0000000050000000, + 0x0000000040001000, 0x0000000040001000, 0x0000000050001000, 0x0000000050001000, ), + ( 0x0000000000000000, 0x0000001000000000, 0x0000080000000000, 0x0000081000000000, + 0x1000000000000000, 0x1000001000000000, 0x1000080000000000, 0x1000081000000000, + 0x0004000000000000, 0x0004001000000000, 0x0004080000000000, 0x0004081000000000, + 0x1004000000000000, 0x1004001000000000, 0x1004080000000000, 0x1004081000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000080, 0x0000000000000080, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080080, 0x0000000000080080, + 0x0000000000800000, 0x0000000000800000, 0x0000000000800080, 0x0000000000800080, + 0x0000000000880000, 0x0000000000880000, 0x0000000000880080, 0x0000000000880080, ), + ( 0x0000000000000000, 0x0000000008000000, 0x0000002000000000, 0x0000002008000000, + 0x0000100000000000, 0x0000100008000000, 0x0000102000000000, 0x0000102008000000, + 0x0000200000000000, 0x0000200008000000, 0x0000202000000000, 0x0000202008000000, + 0x0000300000000000, 0x0000300008000000, 0x0000302000000000, 0x0000302008000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000400000, 0x0000000000400000, + 0x0000000004000000, 0x0000000004000000, 0x0000000004400000, 0x0000000004400000, + 0x0000000000000800, 0x0000000000000800, 0x0000000000400800, 0x0000000000400800, + 0x0000000004000800, 0x0000000004000800, 0x0000000004400800, 0x0000000004400800, ), + ( 0x0000000000000000, 0x0000000000008000, 0x0040000000000000, 0x0040000000008000, + 0x0000004000000000, 0x0000004000008000, 0x0040004000000000, 0x0040004000008000, + 0x8000000000000000, 0x8000000000008000, 0x8040000000000000, 0x8040000000008000, + 0x8000004000000000, 0x8000004000008000, 0x8040004000000000, 0x8040004000008000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000004000, 0x0000000000004000, + 0x0000000000000008, 0x0000000000000008, 0x0000000000004008, 0x0000000000004008, + 0x0000000000000010, 0x0000000000000010, 0x0000000000004010, 0x0000000000004010, + 0x0000000000000018, 0x0000000000000018, 0x0000000000004018, 0x0000000000004018, ), + ( 0x0000000000000000, 0x0000000200000000, 0x0001000000000000, 0x0001000200000000, + 0x0400000000000000, 0x0400000200000000, 0x0401000000000000, 0x0401000200000000, + 0x0020000000000000, 0x0020000200000000, 0x0021000000000000, 0x0021000200000000, + 0x0420000000000000, 0x0420000200000000, 0x0421000000000000, 0x0421000200000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000010000000000, 0x0000010000000000, + 0x0000000100000000, 0x0000000100000000, 0x0000010100000000, 0x0000010100000000, + 0x0000000000100000, 0x0000000000100000, 0x0000010000100000, 0x0000010000100000, + 0x0000000100100000, 0x0000000100100000, 0x0000010100100000, 0x0000010100100000, ), + ( 0x0000000000000000, 0x0000000080000000, 0x0000040000000000, 0x0000040080000000, + 0x0010000000000000, 0x0010000080000000, 0x0010040000000000, 0x0010040080000000, + 0x0000000800000000, 0x0000000880000000, 0x0000040800000000, 0x0000040880000000, + 0x0010000800000000, 0x0010000880000000, 0x0010040800000000, 0x0010040880000000, ), + ) + #--------------------------------------------------------------- + # Subsequent key schedule rotation permutations + # PC2ROT - PC2 inverse, then Rotate, then PC2 + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC2ROTA=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, + 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, + 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, ), + ( 0x0000000000000000, 0x0000000000000800, 0x0000010000000000, 0x0000010000000800, + 0x0000000000002000, 0x0000000000002800, 0x0000010000002000, 0x0000010000002800, + 0x0000000010000000, 0x0000000010000800, 0x0000010010000000, 0x0000010010000800, + 0x0000000010002000, 0x0000000010002800, 0x0000010010002000, 0x0000010010002800, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, + 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, + 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, ), + ( 0x0000000000000000, 0x0000020000000000, 0x0000000080000000, 0x0000020080000000, + 0x0000000000400000, 0x0000020000400000, 0x0000000080400000, 0x0000020080400000, + 0x0000000008000000, 0x0000020008000000, 0x0000000088000000, 0x0000020088000000, + 0x0000000008400000, 0x0000020008400000, 0x0000000088400000, 0x0000020088400000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, + 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, + 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, ), + ( 0x0000000000000000, 0x0000000000000010, 0x0000000000000400, 0x0000000000000410, + 0x0000000000000080, 0x0000000000000090, 0x0000000000000480, 0x0000000000000490, + 0x0000000040000000, 0x0000000040000010, 0x0000000040000400, 0x0000000040000410, + 0x0000000040000080, 0x0000000040000090, 0x0000000040000480, 0x0000000040000490, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, + 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, + 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, ), + ( 0x0000000000000000, 0x0000000000040000, 0x0000000000000020, 0x0000000000040020, + 0x0000000000000004, 0x0000000000040004, 0x0000000000000024, 0x0000000000040024, + 0x0000000200000000, 0x0000000200040000, 0x0000000200000020, 0x0000000200040020, + 0x0000000200000004, 0x0000000200040004, 0x0000000200000024, 0x0000000200040024, ), + ( 0x0000000000000000, 0x0000000000000008, 0x0000000000008000, 0x0000000000008008, + 0x0010000000000000, 0x0010000000000008, 0x0010000000008000, 0x0010000000008008, + 0x0020000000000000, 0x0020000000000008, 0x0020000000008000, 0x0020000000008008, + 0x0030000000000000, 0x0030000000000008, 0x0030000000008000, 0x0030000000008008, ), + ( 0x0000000000000000, 0x0000400000000000, 0x0000080000000000, 0x0000480000000000, + 0x0000100000000000, 0x0000500000000000, 0x0000180000000000, 0x0000580000000000, + 0x4000000000000000, 0x4000400000000000, 0x4000080000000000, 0x4000480000000000, + 0x4000100000000000, 0x4000500000000000, 0x4000180000000000, 0x4000580000000000, ), + ( 0x0000000000000000, 0x0000000000004000, 0x0000000020000000, 0x0000000020004000, + 0x0001000000000000, 0x0001000000004000, 0x0001000020000000, 0x0001000020004000, + 0x0200000000000000, 0x0200000000004000, 0x0200000020000000, 0x0200000020004000, + 0x0201000000000000, 0x0201000000004000, 0x0201000020000000, 0x0201000020004000, ), + ( 0x0000000000000000, 0x1000000000000000, 0x0004000000000000, 0x1004000000000000, + 0x0002000000000000, 0x1002000000000000, 0x0006000000000000, 0x1006000000000000, + 0x0000000800000000, 0x1000000800000000, 0x0004000800000000, 0x1004000800000000, + 0x0002000800000000, 0x1002000800000000, 0x0006000800000000, 0x1006000800000000, ), + ( 0x0000000000000000, 0x0040000000000000, 0x2000000000000000, 0x2040000000000000, + 0x0000008000000000, 0x0040008000000000, 0x2000008000000000, 0x2040008000000000, + 0x0000001000000000, 0x0040001000000000, 0x2000001000000000, 0x2040001000000000, + 0x0000009000000000, 0x0040009000000000, 0x2000009000000000, 0x2040009000000000, ), + ( 0x0000000000000000, 0x0400000000000000, 0x8000000000000000, 0x8400000000000000, + 0x0000002000000000, 0x0400002000000000, 0x8000002000000000, 0x8400002000000000, + 0x0100000000000000, 0x0500000000000000, 0x8100000000000000, 0x8500000000000000, + 0x0100002000000000, 0x0500002000000000, 0x8100002000000000, 0x8500002000000000, ), + ( 0x0000000000000000, 0x0000800000000000, 0x0800000000000000, 0x0800800000000000, + 0x0000004000000000, 0x0000804000000000, 0x0800004000000000, 0x0800804000000000, + 0x0000000400000000, 0x0000800400000000, 0x0800000400000000, 0x0800800400000000, + 0x0000004400000000, 0x0000804400000000, 0x0800004400000000, 0x0800804400000000, ), + ( 0x0000000000000000, 0x0080000000000000, 0x0000040000000000, 0x0080040000000000, + 0x0008000000000000, 0x0088000000000000, 0x0008040000000000, 0x0088040000000000, + 0x0000200000000000, 0x0080200000000000, 0x0000240000000000, 0x0080240000000000, + 0x0008200000000000, 0x0088200000000000, 0x0008240000000000, 0x0088240000000000, ), + ) + + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC2ROTB=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, + 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, ), + ( 0x0000000000000000, 0x0000000000800000, 0x0000000000004000, 0x0000000000804000, + 0x0000000080000000, 0x0000000080800000, 0x0000000080004000, 0x0000000080804000, + 0x0000000000040000, 0x0000000000840000, 0x0000000000044000, 0x0000000000844000, + 0x0000000080040000, 0x0000000080840000, 0x0000000080044000, 0x0000000080844000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, + 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, + 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, ), + ( 0x0000000000000000, 0x0000000020000000, 0x0000000200000000, 0x0000000220000000, + 0x0000000000000080, 0x0000000020000080, 0x0000000200000080, 0x0000000220000080, + 0x0000000000100000, 0x0000000020100000, 0x0000000200100000, 0x0000000220100000, + 0x0000000000100080, 0x0000000020100080, 0x0000000200100080, 0x0000000220100080, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, + 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, + 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, ), + ( 0x0000000000000000, 0x0000000000000800, 0x0000000100000000, 0x0000000100000800, + 0x0000000010000000, 0x0000000010000800, 0x0000000110000000, 0x0000000110000800, + 0x0000000000000004, 0x0000000000000804, 0x0000000100000004, 0x0000000100000804, + 0x0000000010000004, 0x0000000010000804, 0x0000000110000004, 0x0000000110000804, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, + 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, + 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, ), + ( 0x0000000000000000, 0x0000000000000040, 0x0000010000000000, 0x0000010000000040, + 0x0000000000200000, 0x0000000000200040, 0x0000010000200000, 0x0000010000200040, + 0x0000000000008000, 0x0000000000008040, 0x0000010000008000, 0x0000010000008040, + 0x0000000000208000, 0x0000000000208040, 0x0000010000208000, 0x0000010000208040, ), + ( 0x0000000000000000, 0x0000000004000000, 0x0000000008000000, 0x000000000c000000, + 0x0400000000000000, 0x0400000004000000, 0x0400000008000000, 0x040000000c000000, + 0x8000000000000000, 0x8000000004000000, 0x8000000008000000, 0x800000000c000000, + 0x8400000000000000, 0x8400000004000000, 0x8400000008000000, 0x840000000c000000, ), + ( 0x0000000000000000, 0x0002000000000000, 0x0200000000000000, 0x0202000000000000, + 0x1000000000000000, 0x1002000000000000, 0x1200000000000000, 0x1202000000000000, + 0x0008000000000000, 0x000a000000000000, 0x0208000000000000, 0x020a000000000000, + 0x1008000000000000, 0x100a000000000000, 0x1208000000000000, 0x120a000000000000, ), + ( 0x0000000000000000, 0x0000000000400000, 0x0000000000000020, 0x0000000000400020, + 0x0040000000000000, 0x0040000000400000, 0x0040000000000020, 0x0040000000400020, + 0x0800000000000000, 0x0800000000400000, 0x0800000000000020, 0x0800000000400020, + 0x0840000000000000, 0x0840000000400000, 0x0840000000000020, 0x0840000000400020, ), + ( 0x0000000000000000, 0x0080000000000000, 0x0000008000000000, 0x0080008000000000, + 0x2000000000000000, 0x2080000000000000, 0x2000008000000000, 0x2080008000000000, + 0x0020000000000000, 0x00a0000000000000, 0x0020008000000000, 0x00a0008000000000, + 0x2020000000000000, 0x20a0000000000000, 0x2020008000000000, 0x20a0008000000000, ), + ( 0x0000000000000000, 0x0000002000000000, 0x0000040000000000, 0x0000042000000000, + 0x4000000000000000, 0x4000002000000000, 0x4000040000000000, 0x4000042000000000, + 0x0000400000000000, 0x0000402000000000, 0x0000440000000000, 0x0000442000000000, + 0x4000400000000000, 0x4000402000000000, 0x4000440000000000, 0x4000442000000000, ), + ( 0x0000000000000000, 0x0000004000000000, 0x0000200000000000, 0x0000204000000000, + 0x0000080000000000, 0x0000084000000000, 0x0000280000000000, 0x0000284000000000, + 0x0000800000000000, 0x0000804000000000, 0x0000a00000000000, 0x0000a04000000000, + 0x0000880000000000, 0x0000884000000000, 0x0000a80000000000, 0x0000a84000000000, ), + ( 0x0000000000000000, 0x0000000800000000, 0x0000000400000000, 0x0000000c00000000, + 0x0000100000000000, 0x0000100800000000, 0x0000100400000000, 0x0000100c00000000, + 0x0010000000000000, 0x0010000800000000, 0x0010000400000000, 0x0010000c00000000, + 0x0010100000000000, 0x0010100800000000, 0x0010100400000000, 0x0010100c00000000, ), + ( 0x0000000000000000, 0x0100000000000000, 0x0001000000000000, 0x0101000000000000, + 0x0000001000000000, 0x0100001000000000, 0x0001001000000000, 0x0101001000000000, + 0x0004000000000000, 0x0104000000000000, 0x0005000000000000, 0x0105000000000000, + 0x0004001000000000, 0x0104001000000000, 0x0005001000000000, 0x0105001000000000, ), + ) + #--------------------------------------------------------------- + # PCXROT - PC1ROT, PC2ROTA, PC2ROTB listed in order + # of the PC1 rotation schedule, as used by des_setkey + #--------------------------------------------------------------- + ##ROTATES = (1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1) + ##PCXROT = ( + ## PC1ROT, PC2ROTA, PC2ROTB, PC2ROTB, + ## PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTB, + ## PC2ROTA, PC2ROTB, PC2ROTB, PC2ROTB, + ## PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTA, + ## ) + + # NOTE: modified PCXROT to contain entrys broken into pairs, + # to help generate them in format best used by encoder. + PCXROT = ( + (PC1ROT, PC2ROTA), (PC2ROTB, PC2ROTB), + (PC2ROTB, PC2ROTB), (PC2ROTB, PC2ROTB), + (PC2ROTA, PC2ROTB), (PC2ROTB, PC2ROTB), + (PC2ROTB, PC2ROTB), (PC2ROTB, PC2ROTA), + ) + + #--------------------------------------------------------------- + # Bit reverse, intial permupation, expantion + # Initial permutation/expansion table + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + IE3264=( + ( 0x0000000000000000, 0x0000000000800800, 0x0000000000008008, 0x0000000000808808, + 0x0000008008000000, 0x0000008008800800, 0x0000008008008008, 0x0000008008808808, + 0x0000000080080000, 0x0000000080880800, 0x0000000080088008, 0x0000000080888808, + 0x0000008088080000, 0x0000008088880800, 0x0000008088088008, 0x0000008088888808, ), + ( 0x0000000000000000, 0x0080080000000000, 0x0000800800000000, 0x0080880800000000, + 0x0800000000000080, 0x0880080000000080, 0x0800800800000080, 0x0880880800000080, + 0x8008000000000000, 0x8088080000000000, 0x8008800800000000, 0x8088880800000000, + 0x8808000000000080, 0x8888080000000080, 0x8808800800000080, 0x8888880800000080, ), + ( 0x0000000000000000, 0x0000000000001000, 0x0000000000000010, 0x0000000000001010, + 0x0000000010000000, 0x0000000010001000, 0x0000000010000010, 0x0000000010001010, + 0x0000000000100000, 0x0000000000101000, 0x0000000000100010, 0x0000000000101010, + 0x0000000010100000, 0x0000000010101000, 0x0000000010100010, 0x0000000010101010, ), + ( 0x0000000000000000, 0x0000100000000000, 0x0000001000000000, 0x0000101000000000, + 0x1000000000000000, 0x1000100000000000, 0x1000001000000000, 0x1000101000000000, + 0x0010000000000000, 0x0010100000000000, 0x0010001000000000, 0x0010101000000000, + 0x1010000000000000, 0x1010100000000000, 0x1010001000000000, 0x1010101000000000, ), + ( 0x0000000000000000, 0x0000000000002000, 0x0000000000000020, 0x0000000000002020, + 0x0000000020000000, 0x0000000020002000, 0x0000000020000020, 0x0000000020002020, + 0x0000000000200000, 0x0000000000202000, 0x0000000000200020, 0x0000000000202020, + 0x0000000020200000, 0x0000000020202000, 0x0000000020200020, 0x0000000020202020, ), + ( 0x0000000000000000, 0x0000200000000000, 0x0000002000000000, 0x0000202000000000, + 0x2000000000000000, 0x2000200000000000, 0x2000002000000000, 0x2000202000000000, + 0x0020000000000000, 0x0020200000000000, 0x0020002000000000, 0x0020202000000000, + 0x2020000000000000, 0x2020200000000000, 0x2020002000000000, 0x2020202000000000, ), + ( 0x0000000000000000, 0x0000000000004004, 0x0400000000000040, 0x0400000000004044, + 0x0000000040040000, 0x0000000040044004, 0x0400000040040040, 0x0400000040044044, + 0x0000000000400400, 0x0000000000404404, 0x0400000000400440, 0x0400000000404444, + 0x0000000040440400, 0x0000000040444404, 0x0400000040440440, 0x0400000040444444, ), + ( 0x0000000000000000, 0x0000400400000000, 0x0000004004000000, 0x0000404404000000, + 0x4004000000000000, 0x4004400400000000, 0x4004004004000000, 0x4004404404000000, + 0x0040040000000000, 0x0040440400000000, 0x0040044004000000, 0x0040444404000000, + 0x4044040000000000, 0x4044440400000000, 0x4044044004000000, 0x4044444404000000, ), + ) + + #--------------------------------------------------------------- + # Table that combines the S, P, and E operations. + #--------------------------------------------------------------- + SPE=( + ( 0x0080088008200000, 0x0000008008000000, 0x0000000000200020, 0x0080088008200020, + 0x0000000000200000, 0x0080088008000020, 0x0000008008000020, 0x0000000000200020, + 0x0080088008000020, 0x0080088008200000, 0x0000008008200000, 0x0080080000000020, + 0x0080080000200020, 0x0000000000200000, 0x0000000000000000, 0x0000008008000020, + 0x0000008008000000, 0x0000000000000020, 0x0080080000200000, 0x0080088008000000, + 0x0080088008200020, 0x0000008008200000, 0x0080080000000020, 0x0080080000200000, + 0x0000000000000020, 0x0080080000000000, 0x0080088008000000, 0x0000008008200020, + 0x0080080000000000, 0x0080080000200020, 0x0000008008200020, 0x0000000000000000, + 0x0000000000000000, 0x0080088008200020, 0x0080080000200000, 0x0000008008000020, + 0x0080088008200000, 0x0000008008000000, 0x0080080000000020, 0x0080080000200000, + 0x0000008008200020, 0x0080080000000000, 0x0080088008000000, 0x0000000000200020, + 0x0080088008000020, 0x0000000000000020, 0x0000000000200020, 0x0000008008200000, + 0x0080088008200020, 0x0080088008000000, 0x0000008008200000, 0x0080080000200020, + 0x0000000000200000, 0x0080080000000020, 0x0000008008000020, 0x0000000000000000, + 0x0000008008000000, 0x0000000000200000, 0x0080080000200020, 0x0080088008200000, + 0x0000000000000020, 0x0000008008200020, 0x0080080000000000, 0x0080088008000020, ), + ( 0x1000800810004004, 0x0000000000000000, 0x0000800810000000, 0x0000000010004004, + 0x1000000000004004, 0x1000800800000000, 0x0000800800004004, 0x0000800810000000, + 0x0000800800000000, 0x1000000010004004, 0x1000000000000000, 0x0000800800004004, + 0x1000000010000000, 0x0000800810004004, 0x0000000010004004, 0x1000000000000000, + 0x0000000010000000, 0x1000800800004004, 0x1000000010004004, 0x0000800800000000, + 0x1000800810000000, 0x0000000000004004, 0x0000000000000000, 0x1000000010000000, + 0x1000800800004004, 0x1000800810000000, 0x0000800810004004, 0x1000000000004004, + 0x0000000000004004, 0x0000000010000000, 0x1000800800000000, 0x1000800810004004, + 0x1000000010000000, 0x0000800810004004, 0x0000800800004004, 0x1000800810000000, + 0x1000800810004004, 0x1000000010000000, 0x1000000000004004, 0x0000000000000000, + 0x0000000000004004, 0x1000800800000000, 0x0000000010000000, 0x1000000010004004, + 0x0000800800000000, 0x0000000000004004, 0x1000800810000000, 0x1000800800004004, + 0x0000800810004004, 0x0000800800000000, 0x0000000000000000, 0x1000000000004004, + 0x1000000000000000, 0x1000800810004004, 0x0000800810000000, 0x0000000010004004, + 0x1000000010004004, 0x0000000010000000, 0x1000800800000000, 0x0000800800004004, + 0x1000800800004004, 0x1000000000000000, 0x0000000010004004, 0x0000800810000000, ), + ( 0x0000000000400410, 0x0010004004400400, 0x0010000000000000, 0x0010000000400410, + 0x0000004004000010, 0x0000000000400400, 0x0010000000400410, 0x0010004004000000, + 0x0010000000400400, 0x0000004004000000, 0x0000004004400400, 0x0000000000000010, + 0x0010004004400410, 0x0010000000000010, 0x0000000000000010, 0x0000004004400410, + 0x0000000000000000, 0x0000004004000010, 0x0010004004400400, 0x0010000000000000, + 0x0010000000000010, 0x0010004004400410, 0x0000004004000000, 0x0000000000400410, + 0x0000004004400410, 0x0010000000400400, 0x0010004004000010, 0x0000004004400400, + 0x0010004004000000, 0x0000000000000000, 0x0000000000400400, 0x0010004004000010, + 0x0010004004400400, 0x0010000000000000, 0x0000000000000010, 0x0000004004000000, + 0x0010000000000010, 0x0000004004000010, 0x0000004004400400, 0x0010000000400410, + 0x0000000000000000, 0x0010004004400400, 0x0010004004000000, 0x0000004004400410, + 0x0000004004000010, 0x0000000000400400, 0x0010004004400410, 0x0000000000000010, + 0x0010004004000010, 0x0000000000400410, 0x0000000000400400, 0x0010004004400410, + 0x0000004004000000, 0x0010000000400400, 0x0010000000400410, 0x0010004004000000, + 0x0010000000400400, 0x0000000000000000, 0x0000004004400410, 0x0010000000000010, + 0x0000000000400410, 0x0010004004000010, 0x0010000000000000, 0x0000004004400400, ), + ( 0x0800100040040080, 0x0000100000001000, 0x0800000000000080, 0x0800100040041080, + 0x0000000000000000, 0x0000000040041000, 0x0800100000001080, 0x0800000040040080, + 0x0000100040041000, 0x0800000000001080, 0x0000000000001000, 0x0800100000000080, + 0x0800000000001080, 0x0800100040040080, 0x0000000040040000, 0x0000000000001000, + 0x0800000040041080, 0x0000100040040000, 0x0000100000000000, 0x0800000000000080, + 0x0000100040040000, 0x0800100000001080, 0x0000000040041000, 0x0000100000000000, + 0x0800100000000080, 0x0000000000000000, 0x0800000040040080, 0x0000100040041000, + 0x0000100000001000, 0x0800000040041080, 0x0800100040041080, 0x0000000040040000, + 0x0800000040041080, 0x0800100000000080, 0x0000000040040000, 0x0800000000001080, + 0x0000100040040000, 0x0000100000001000, 0x0800000000000080, 0x0000000040041000, + 0x0800100000001080, 0x0000000000000000, 0x0000100000000000, 0x0800000040040080, + 0x0000000000000000, 0x0800000040041080, 0x0000100040041000, 0x0000100000000000, + 0x0000000000001000, 0x0800100040041080, 0x0800100040040080, 0x0000000040040000, + 0x0800100040041080, 0x0800000000000080, 0x0000100000001000, 0x0800100040040080, + 0x0800000040040080, 0x0000100040040000, 0x0000000040041000, 0x0800100000001080, + 0x0800100000000080, 0x0000000000001000, 0x0800000000001080, 0x0000100040041000, ), + ( 0x0000000000800800, 0x0000001000000000, 0x0040040000000000, 0x2040041000800800, + 0x2000001000800800, 0x0040040000800800, 0x2040041000000000, 0x0000001000800800, + 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040041000000000, + 0x2040040000800800, 0x2000001000800800, 0x0040041000800800, 0x0000000000000000, + 0x0040041000000000, 0x0000000000800800, 0x2000001000000000, 0x2040040000000000, + 0x0040040000800800, 0x2040041000000000, 0x0000000000000000, 0x2000000000800800, + 0x2000000000000000, 0x2040040000800800, 0x2040041000800800, 0x2000001000000000, + 0x0000001000800800, 0x0040040000000000, 0x2040040000000000, 0x0040041000800800, + 0x0040041000800800, 0x2040040000800800, 0x2000001000000000, 0x0000001000800800, + 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040040000800800, + 0x0000000000800800, 0x0040041000000000, 0x2040041000800800, 0x0000000000000000, + 0x2040041000000000, 0x0000000000800800, 0x0040040000000000, 0x2000001000000000, + 0x2040040000800800, 0x0040040000000000, 0x0000000000000000, 0x2040041000800800, + 0x2000001000800800, 0x0040041000800800, 0x2040040000000000, 0x0000001000000000, + 0x0040041000000000, 0x2000001000800800, 0x0040040000800800, 0x2040040000000000, + 0x2000000000000000, 0x2040041000000000, 0x0000001000800800, 0x2000000000800800, ), + ( 0x4004000000008008, 0x4004000020000000, 0x0000000000000000, 0x0000200020008008, + 0x4004000020000000, 0x0000200000000000, 0x4004200000008008, 0x0000000020000000, + 0x4004200000000000, 0x4004200020008008, 0x0000200020000000, 0x0000000000008008, + 0x0000200000008008, 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, + 0x0000000020000000, 0x4004200000008008, 0x4004000020008008, 0x0000000000000000, + 0x0000200000000000, 0x4004000000000000, 0x0000200020008008, 0x4004000020008008, + 0x4004200020008008, 0x0000000020008008, 0x0000000000008008, 0x4004200000000000, + 0x4004000000000000, 0x0000200020000000, 0x4004200020000000, 0x0000200000008008, + 0x4004200000000000, 0x0000000000008008, 0x0000200000008008, 0x4004200020000000, + 0x0000200020008008, 0x4004000020000000, 0x0000000000000000, 0x0000200000008008, + 0x0000000000008008, 0x0000200000000000, 0x4004000020008008, 0x0000000020000000, + 0x4004000020000000, 0x4004200020008008, 0x0000200020000000, 0x4004000000000000, + 0x4004200020008008, 0x0000200020000000, 0x0000000020000000, 0x4004200000008008, + 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, 0x0000000000000000, + 0x0000200000000000, 0x4004000000008008, 0x4004200000008008, 0x0000200020008008, + 0x0000000020008008, 0x4004200000000000, 0x4004000000000000, 0x4004000020008008, ), + ( 0x0000400400000000, 0x0020000000000000, 0x0020000000100000, 0x0400000000100040, + 0x0420400400100040, 0x0400400400000040, 0x0020400400000000, 0x0000000000000000, + 0x0000000000100000, 0x0420000000100040, 0x0420000000000040, 0x0000400400100000, + 0x0400000000000040, 0x0020400400100000, 0x0000400400100000, 0x0420000000000040, + 0x0420000000100040, 0x0000400400000000, 0x0400400400000040, 0x0420400400100040, + 0x0000000000000000, 0x0020000000100000, 0x0400000000100040, 0x0020400400000000, + 0x0400400400100040, 0x0420400400000040, 0x0020400400100000, 0x0400000000000040, + 0x0420400400000040, 0x0400400400100040, 0x0020000000000000, 0x0000000000100000, + 0x0420400400000040, 0x0000400400100000, 0x0400400400100040, 0x0420000000000040, + 0x0000400400000000, 0x0020000000000000, 0x0000000000100000, 0x0400400400100040, + 0x0420000000100040, 0x0420400400000040, 0x0020400400000000, 0x0000000000000000, + 0x0020000000000000, 0x0400000000100040, 0x0400000000000040, 0x0020000000100000, + 0x0000000000000000, 0x0420000000100040, 0x0020000000100000, 0x0020400400000000, + 0x0420000000000040, 0x0000400400000000, 0x0420400400100040, 0x0000000000100000, + 0x0020400400100000, 0x0400000000000040, 0x0400400400000040, 0x0420400400100040, + 0x0400000000100040, 0x0020400400100000, 0x0000400400100000, 0x0400400400000040, ), + ( 0x8008000080082000, 0x0000002080082000, 0x8008002000000000, 0x0000000000000000, + 0x0000002000002000, 0x8008000080080000, 0x0000000080082000, 0x8008002080082000, + 0x8008000000000000, 0x0000000000002000, 0x0000002080080000, 0x8008002000000000, + 0x8008002080080000, 0x8008002000002000, 0x8008000000002000, 0x0000000080082000, + 0x0000002000000000, 0x8008002080080000, 0x8008000080080000, 0x0000002000002000, + 0x8008002080082000, 0x8008000000002000, 0x0000000000000000, 0x0000002080080000, + 0x0000000000002000, 0x0000000080080000, 0x8008002000002000, 0x8008000080082000, + 0x0000000080080000, 0x0000002000000000, 0x0000002080082000, 0x8008000000000000, + 0x0000000080080000, 0x0000002000000000, 0x8008000000002000, 0x8008002080082000, + 0x8008002000000000, 0x0000000000002000, 0x0000000000000000, 0x0000002080080000, + 0x8008000080082000, 0x8008002000002000, 0x0000002000002000, 0x8008000080080000, + 0x0000002080082000, 0x8008000000000000, 0x8008000080080000, 0x0000002000002000, + 0x8008002080082000, 0x0000000080080000, 0x0000000080082000, 0x8008000000002000, + 0x0000002080080000, 0x8008002000000000, 0x8008002000002000, 0x0000000080082000, + 0x8008000000000000, 0x0000002080082000, 0x8008002080080000, 0x0000000000000000, + 0x0000000000002000, 0x8008000080082000, 0x0000002000000000, 0x8008002080080000, ), + ) + + #--------------------------------------------------------------- + # compressed/interleaved => final permutation table + # Compression, final permutation, bit reverse + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm6464 logic simpler + CF6464=( + ( 0x0000000000000000, 0x0000002000000000, 0x0000200000000000, 0x0000202000000000, + 0x0020000000000000, 0x0020002000000000, 0x0020200000000000, 0x0020202000000000, + 0x2000000000000000, 0x2000002000000000, 0x2000200000000000, 0x2000202000000000, + 0x2020000000000000, 0x2020002000000000, 0x2020200000000000, 0x2020202000000000, ), + ( 0x0000000000000000, 0x0000000200000000, 0x0000020000000000, 0x0000020200000000, + 0x0002000000000000, 0x0002000200000000, 0x0002020000000000, 0x0002020200000000, + 0x0200000000000000, 0x0200000200000000, 0x0200020000000000, 0x0200020200000000, + 0x0202000000000000, 0x0202000200000000, 0x0202020000000000, 0x0202020200000000, ), + ( 0x0000000000000000, 0x0000000000000020, 0x0000000000002000, 0x0000000000002020, + 0x0000000000200000, 0x0000000000200020, 0x0000000000202000, 0x0000000000202020, + 0x0000000020000000, 0x0000000020000020, 0x0000000020002000, 0x0000000020002020, + 0x0000000020200000, 0x0000000020200020, 0x0000000020202000, 0x0000000020202020, ), + ( 0x0000000000000000, 0x0000000000000002, 0x0000000000000200, 0x0000000000000202, + 0x0000000000020000, 0x0000000000020002, 0x0000000000020200, 0x0000000000020202, + 0x0000000002000000, 0x0000000002000002, 0x0000000002000200, 0x0000000002000202, + 0x0000000002020000, 0x0000000002020002, 0x0000000002020200, 0x0000000002020202, ), + ( 0x0000000000000000, 0x0000008000000000, 0x0000800000000000, 0x0000808000000000, + 0x0080000000000000, 0x0080008000000000, 0x0080800000000000, 0x0080808000000000, + 0x8000000000000000, 0x8000008000000000, 0x8000800000000000, 0x8000808000000000, + 0x8080000000000000, 0x8080008000000000, 0x8080800000000000, 0x8080808000000000, ), + ( 0x0000000000000000, 0x0000000800000000, 0x0000080000000000, 0x0000080800000000, + 0x0008000000000000, 0x0008000800000000, 0x0008080000000000, 0x0008080800000000, + 0x0800000000000000, 0x0800000800000000, 0x0800080000000000, 0x0800080800000000, + 0x0808000000000000, 0x0808000800000000, 0x0808080000000000, 0x0808080800000000, ), + ( 0x0000000000000000, 0x0000000000000080, 0x0000000000008000, 0x0000000000008080, + 0x0000000000800000, 0x0000000000800080, 0x0000000000808000, 0x0000000000808080, + 0x0000000080000000, 0x0000000080000080, 0x0000000080008000, 0x0000000080008080, + 0x0000000080800000, 0x0000000080800080, 0x0000000080808000, 0x0000000080808080, ), + ( 0x0000000000000000, 0x0000000000000008, 0x0000000000000800, 0x0000000000000808, + 0x0000000000080000, 0x0000000000080008, 0x0000000000080800, 0x0000000000080808, + 0x0000000008000000, 0x0000000008000008, 0x0000000008000800, 0x0000000008000808, + 0x0000000008080000, 0x0000000008080008, 0x0000000008080800, 0x0000000008080808, ), + ( 0x0000000000000000, 0x0000001000000000, 0x0000100000000000, 0x0000101000000000, + 0x0010000000000000, 0x0010001000000000, 0x0010100000000000, 0x0010101000000000, + 0x1000000000000000, 0x1000001000000000, 0x1000100000000000, 0x1000101000000000, + 0x1010000000000000, 0x1010001000000000, 0x1010100000000000, 0x1010101000000000, ), + ( 0x0000000000000000, 0x0000000100000000, 0x0000010000000000, 0x0000010100000000, + 0x0001000000000000, 0x0001000100000000, 0x0001010000000000, 0x0001010100000000, + 0x0100000000000000, 0x0100000100000000, 0x0100010000000000, 0x0100010100000000, + 0x0101000000000000, 0x0101000100000000, 0x0101010000000000, 0x0101010100000000, ), + ( 0x0000000000000000, 0x0000000000000010, 0x0000000000001000, 0x0000000000001010, + 0x0000000000100000, 0x0000000000100010, 0x0000000000101000, 0x0000000000101010, + 0x0000000010000000, 0x0000000010000010, 0x0000000010001000, 0x0000000010001010, + 0x0000000010100000, 0x0000000010100010, 0x0000000010101000, 0x0000000010101010, ), + ( 0x0000000000000000, 0x0000000000000001, 0x0000000000000100, 0x0000000000000101, + 0x0000000000010000, 0x0000000000010001, 0x0000000000010100, 0x0000000000010101, + 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, 0x0000000001000101, + 0x0000000001010000, 0x0000000001010001, 0x0000000001010100, 0x0000000001010101, ), + ( 0x0000000000000000, 0x0000004000000000, 0x0000400000000000, 0x0000404000000000, + 0x0040000000000000, 0x0040004000000000, 0x0040400000000000, 0x0040404000000000, + 0x4000000000000000, 0x4000004000000000, 0x4000400000000000, 0x4000404000000000, + 0x4040000000000000, 0x4040004000000000, 0x4040400000000000, 0x4040404000000000, ), + ( 0x0000000000000000, 0x0000000400000000, 0x0000040000000000, 0x0000040400000000, + 0x0004000000000000, 0x0004000400000000, 0x0004040000000000, 0x0004040400000000, + 0x0400000000000000, 0x0400000400000000, 0x0400040000000000, 0x0400040400000000, + 0x0404000000000000, 0x0404000400000000, 0x0404040000000000, 0x0404040400000000, ), + ( 0x0000000000000000, 0x0000000000000040, 0x0000000000004000, 0x0000000000004040, + 0x0000000000400000, 0x0000000000400040, 0x0000000000404000, 0x0000000000404040, + 0x0000000040000000, 0x0000000040000040, 0x0000000040004000, 0x0000000040004040, + 0x0000000040400000, 0x0000000040400040, 0x0000000040404000, 0x0000000040404040, ), + ( 0x0000000000000000, 0x0000000000000004, 0x0000000000000400, 0x0000000000000404, + 0x0000000000040000, 0x0000000000040004, 0x0000000000040400, 0x0000000000040404, + 0x0000000004000000, 0x0000000004000004, 0x0000000004000400, 0x0000000004000404, + 0x0000000004040000, 0x0000000004040004, 0x0000000004040400, 0x0000000004040404, ), + ) + #=================================================================== + # eof _load_tables() + #=================================================================== + +#============================================================================= +# support +#============================================================================= + +def _permute(c, p): + """Returns the permutation of the given 32-bit or 64-bit code with + the specified permutation table.""" + # NOTE: only difference between 32 & 64 bit permutations + # is that len(p)==8 for 32 bit, and len(p)==16 for 64 bit. + out = 0 + for r in p: + out |= r[c&0xf] + c >>= 4 + return out + +#============================================================================= +# packing & unpacking +#============================================================================= +# FIXME: more properly named _uint8_struct... +_uint64_struct = struct.Struct(">Q") + +def _pack64(value): + return _uint64_struct.pack(value) + +def _unpack64(value): + return _uint64_struct.unpack(value)[0] + +def _pack56(value): + return _uint64_struct.pack(value)[1:] + +def _unpack56(value): + return _uint64_struct.unpack(b'\x00' + value)[0] + +#============================================================================= +# 56->64 key manipulation +#============================================================================= + +##def expand_7bit(value): +## "expand 7-bit integer => 7-bits + 1 odd-parity bit" +## # parity calc adapted from 32-bit even parity alg found at +## # http://graphics.stanford.edu/~seander/bithacks.html#ParityParallel +## assert 0 <= value < 0x80, "value out of range" +## return (value<<1) | (0x9669 >> ((value ^ (value >> 4)) & 0xf)) & 1 + +_EXPAND_ITER = irange(49,-7,-7) + +def expand_des_key(key): + """convert DES from 7 bytes to 8 bytes (by inserting empty parity bits)""" + if isinstance(key, bytes): + if len(key) != 7: + raise ValueError("key must be 7 bytes in size") + elif isinstance(key, int_types): + if key < 0 or key > INT_56_MASK: + raise ValueError("key must be 56-bit non-negative integer") + return _unpack64(expand_des_key(_pack56(key))) + else: + raise exc.ExpectedTypeError(key, "bytes or int", "key") + key = _unpack56(key) + # NOTE: the following would insert correctly-valued parity bits in each key, + # but the parity bit would just be ignored in des_encrypt_block(), + # so not bothering to use it. + # XXX: could make parity-restoring optionally available via flag + ##return join_byte_values(expand_7bit((key >> shift) & 0x7f) + ## for shift in _EXPAND_ITER) + return join_byte_values(((key>>shift) & 0x7f)<<1 for shift in _EXPAND_ITER) + +def shrink_des_key(key): + """convert DES key from 8 bytes to 7 bytes (by discarding the parity bits)""" + if isinstance(key, bytes): + if len(key) != 8: + raise ValueError("key must be 8 bytes in size") + return _pack56(shrink_des_key(_unpack64(key))) + elif isinstance(key, int_types): + if key < 0 or key > INT_64_MASK: + raise ValueError("key must be 64-bit non-negative integer") + else: + raise exc.ExpectedTypeError(key, "bytes or int", "key") + key >>= 1 + result = 0 + offset = 0 + while offset < 56: + result |= (key & 0x7f)<>= 8 + offset += 7 + assert not (result & ~INT_64_MASK) + return result + +#============================================================================= +# des encryption +#============================================================================= +def des_encrypt_block(key, input, salt=0, rounds=1): + """encrypt single block of data using DES, operates on 8-byte strings. + + :arg key: + DES key as 7 byte string, or 8 byte string with parity bits + (parity bit values are ignored). + + :arg input: + plaintext block to encrypt, as 8 byte string. + + :arg salt: + Optional 24-bit integer used to mutate the base DES algorithm in a + manner specific to :class:`~passlib.hash.des_crypt` and its variants. + The default value ``0`` provides the normal (unsalted) DES behavior. + The salt functions as follows: + if the ``i``'th bit of ``salt`` is set, + bits ``i`` and ``i+24`` are swapped in the DES E-box output. + + :arg rounds: + Optional number of rounds of to apply the DES key schedule. + the default (``rounds=1``) provides the normal DES behavior, + but :class:`~passlib.hash.des_crypt` and its variants use + alternate rounds values. + + :raises TypeError: if any of the provided args are of the wrong type. + :raises ValueError: + if any of the input blocks are the wrong size, + or the salt/rounds values are out of range. + + :returns: + resulting 8-byte ciphertext block. + """ + # validate & unpack key + if isinstance(key, bytes): + if len(key) == 7: + key = expand_des_key(key) + elif len(key) != 8: + raise ValueError("key must be 7 or 8 bytes") + key = _unpack64(key) + else: + raise exc.ExpectedTypeError(key, "bytes", "key") + + # validate & unpack input + if isinstance(input, bytes): + if len(input) != 8: + raise ValueError("input block must be 8 bytes") + input = _unpack64(input) + else: + raise exc.ExpectedTypeError(input, "bytes", "input") + + # hand things off to other func + result = des_encrypt_int_block(key, input, salt, rounds) + + # repack result + return _pack64(result) + +def des_encrypt_int_block(key, input, salt=0, rounds=1): + """encrypt single block of data using DES, operates on 64-bit integers. + + this function is essentially the same as :func:`des_encrypt_block`, + except that it operates on integers, and will NOT automatically + expand 56-bit keys if provided (since there's no way to detect them). + + :arg key: + DES key as 64-bit integer (the parity bits are ignored). + + :arg input: + input block as 64-bit integer + + :arg salt: + optional 24-bit integer used to mutate the base DES algorithm. + defaults to ``0`` (no mutation applied). + + :arg rounds: + optional number of rounds of to apply the DES key schedule. + defaults to ``1``. + + :raises TypeError: if any of the provided args are of the wrong type. + :raises ValueError: + if any of the input blocks are the wrong size, + or the salt/rounds values are out of range. + + :returns: + resulting ciphertext as 64-bit integer. + """ + #--------------------------------------------------------------- + # input validation + #--------------------------------------------------------------- + + # validate salt, rounds + if rounds < 1: + raise ValueError("rounds must be positive integer") + if salt < 0 or salt > INT_24_MASK: + raise ValueError("salt must be 24-bit non-negative integer") + + # validate & unpack key + if not isinstance(key, int_types): + raise exc.ExpectedTypeError(key, "int", "key") + elif key < 0 or key > INT_64_MASK: + raise ValueError("key must be 64-bit non-negative integer") + + # validate & unpack input + if not isinstance(input, int_types): + raise exc.ExpectedTypeError(input, "int", "input") + elif input < 0 or input > INT_64_MASK: + raise ValueError("input must be 64-bit non-negative integer") + + #--------------------------------------------------------------- + # DES setup + #--------------------------------------------------------------- + # load tables if not already done + global SPE, PCXROT, IE3264, CF6464 + if PCXROT is None: + _load_tables() + + # load SPE into local vars to speed things up and remove an array access call + SPE0, SPE1, SPE2, SPE3, SPE4, SPE5, SPE6, SPE7 = SPE + + # NOTE: parity bits are ignored completely + # (UTs do fuzz testing to ensure this) + + # generate key schedule + # NOTE: generation was modified to output two elements at a time, + # so that per-round loop could do two passes at once. + def _iter_key_schedule(ks_odd): + """given 64-bit key, iterates over the 8 (even,odd) key schedule pairs""" + for p_even, p_odd in PCXROT: + ks_even = _permute(ks_odd, p_even) + ks_odd = _permute(ks_even, p_odd) + yield ks_even & _KS_MASK, ks_odd & _KS_MASK + ks_list = list(_iter_key_schedule(key)) + + # expand 24 bit salt -> 32 bit per des_crypt & bsdi_crypt + salt = ( + ((salt & 0x00003f) << 26) | + ((salt & 0x000fc0) << 12) | + ((salt & 0x03f000) >> 2) | + ((salt & 0xfc0000) >> 16) + ) + + # init L & R + if input == 0: + L = R = 0 + else: + L = ((input >> 31) & 0xaaaaaaaa) | (input & 0x55555555) + L = _permute(L, IE3264) + + R = ((input >> 32) & 0xaaaaaaaa) | ((input >> 1) & 0x55555555) + R = _permute(R, IE3264) + + #--------------------------------------------------------------- + # main DES loop - run for specified number of rounds + #--------------------------------------------------------------- + while rounds: + rounds -= 1 + + # run over each part of the schedule, 2 parts at a time + for ks_even, ks_odd in ks_list: + k = ((R>>32) ^ R) & salt # use the salt to flip specific bits + B = (k<<32) ^ k ^ R ^ ks_even + + L ^= (SPE0[(B>>58)&0x3f] ^ SPE1[(B>>50)&0x3f] ^ + SPE2[(B>>42)&0x3f] ^ SPE3[(B>>34)&0x3f] ^ + SPE4[(B>>26)&0x3f] ^ SPE5[(B>>18)&0x3f] ^ + SPE6[(B>>10)&0x3f] ^ SPE7[(B>>2)&0x3f]) + + k = ((L>>32) ^ L) & salt # use the salt to flip specific bits + B = (k<<32) ^ k ^ L ^ ks_odd + + R ^= (SPE0[(B>>58)&0x3f] ^ SPE1[(B>>50)&0x3f] ^ + SPE2[(B>>42)&0x3f] ^ SPE3[(B>>34)&0x3f] ^ + SPE4[(B>>26)&0x3f] ^ SPE5[(B>>18)&0x3f] ^ + SPE6[(B>>10)&0x3f] ^ SPE7[(B>>2)&0x3f]) + + # swap L and R + L, R = R, L + + #--------------------------------------------------------------- + # return final result + #--------------------------------------------------------------- + C = ( + ((L>>3) & 0x0f0f0f0f00000000) + | + ((L<<33) & 0xf0f0f0f000000000) + | + ((R>>35) & 0x000000000f0f0f0f) + | + ((R<<1) & 0x00000000f0f0f0f0) + ) + return _permute(C, CF6464) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/digest.py b/venv/lib/python3.12/site-packages/passlib/crypto/digest.py new file mode 100644 index 0000000..90e0cad --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/digest.py @@ -0,0 +1,1057 @@ +"""passlib.crypto.digest -- crytographic helpers used by the password hashes in passlib + +.. versionadded:: 1.7 +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import division +# core +import hashlib +import logging; log = logging.getLogger(__name__) +try: + # new in py3.4 + from hashlib import pbkdf2_hmac as _stdlib_pbkdf2_hmac + if _stdlib_pbkdf2_hmac.__module__ == "hashlib": + # builtin pure-python backends are slightly faster than stdlib's pure python fallback, + # so only using stdlib's version if it's backed by openssl's pbkdf2_hmac() + log.debug("ignoring pure-python hashlib.pbkdf2_hmac()") + _stdlib_pbkdf2_hmac = None +except ImportError: + _stdlib_pbkdf2_hmac = None +import re +import os +from struct import Struct +from warnings import warn +# site +try: + # https://pypi.python.org/pypi/fastpbkdf2/ + from fastpbkdf2 import pbkdf2_hmac as _fast_pbkdf2_hmac +except ImportError: + _fast_pbkdf2_hmac = None +# pkg +from passlib import exc +from passlib.utils import join_bytes, to_native_str, join_byte_values, to_bytes, \ + SequenceMixin, as_bool +from passlib.utils.compat import irange, int_types, unicode_or_bytes_types, PY3, error_from +from passlib.utils.decor import memoized_property +# local +__all__ = [ + # hash utils + "lookup_hash", + "HashInfo", + "norm_hash_name", + + # hmac utils + "compile_hmac", + + # kdfs + "pbkdf1", + "pbkdf2_hmac", +] + +#============================================================================= +# generic constants +#============================================================================= + +#: max 32-bit value +MAX_UINT32 = (1 << 32) - 1 + +#: max 64-bit value +MAX_UINT64 = (1 << 64) - 1 + +#============================================================================= +# hash utils +#============================================================================= + +#: list of known hash names, used by lookup_hash()'s _norm_hash_name() helper +_known_hash_names = [ + # format: (hashlib/ssl name, iana name or standin, other known aliases ...) + + #---------------------------------------------------- + # hashes with official IANA-assigned names + # (as of 2012-03 - http://www.iana.org/assignments/hash-function-text-names) + #---------------------------------------------------- + ("md2", "md2"), # NOTE: openssl dropped md2 support in v1.0.0 + ("md5", "md5"), + ("sha1", "sha-1"), + ("sha224", "sha-224", "sha2-224"), + ("sha256", "sha-256", "sha2-256"), + ("sha384", "sha-384", "sha2-384"), + ("sha512", "sha-512", "sha2-512"), + + # TODO: add sha3 to this table. + + #---------------------------------------------------- + # hashlib/ssl-supported hashes without official IANA names, + # (hopefully-) compatible stand-ins have been chosen. + #---------------------------------------------------- + + ("blake2b", "blake-2b"), + ("blake2s", "blake-2s"), + ("md4", "md4"), + # NOTE: there was an older "ripemd" and "ripemd-128", + # but python 2.7+ resolves "ripemd" -> "ripemd160", + # so treating "ripemd" as alias here. + ("ripemd160", "ripemd-160", "ripemd"), +] + + +#: dict mapping hashlib names to hardcoded digest info; +#: so this is available even when hashes aren't present. +_fallback_info = { + # name: (digest_size, block_size) + 'blake2b': (64, 128), + 'blake2s': (32, 64), + 'md4': (16, 64), + 'md5': (16, 64), + 'sha1': (20, 64), + 'sha224': (28, 64), + 'sha256': (32, 64), + 'sha384': (48, 128), + 'sha3_224': (28, 144), + 'sha3_256': (32, 136), + 'sha3_384': (48, 104), + 'sha3_512': (64, 72), + 'sha512': (64, 128), + 'shake128': (16, 168), + 'shake256': (32, 136), +} + + +def _gen_fallback_info(): + """ + internal helper used to generate ``_fallback_info`` dict. + currently only run manually to update the above list; + not invoked at runtime. + """ + out = {} + for alg in sorted(hashlib.algorithms_available | set(["md4"])): + info = lookup_hash(alg) + out[info.name] = (info.digest_size, info.block_size) + return out + + +#: cache of hash info instances used by lookup_hash() +_hash_info_cache = {} + +def _get_hash_aliases(name): + """ + internal helper used by :func:`lookup_hash` -- + normalize arbitrary hash name to hashlib format. + if name not recognized, returns dummy record and issues a warning. + + :arg name: + unnormalized name + + :returns: + tuple with 2+ elements: ``(hashlib_name, iana_name|None, ... 0+ aliases)``. + """ + + # normalize input + orig = name + if not isinstance(name, str): + name = to_native_str(name, 'utf-8', 'hash name') + name = re.sub("[_ /]", "-", name.strip().lower()) + if name.startswith("scram-"): # helper for SCRAM protocol (see passlib.handlers.scram) + name = name[6:] + if name.endswith("-plus"): + name = name[:-5] + + # look through standard names and known aliases + def check_table(name): + for row in _known_hash_names: + if name in row: + return row + result = check_table(name) + if result: + return result + + # try to clean name up some more + m = re.match(r"(?i)^(?P[a-z]+)-?(?P\d)?-?(?P\d{3,4})?$", name) + if m: + # roughly follows "SHA2-256" style format, normalize representation, + # and checked table. + iana_name, rev, size = m.group("name", "rev", "size") + if rev: + iana_name += rev + hashlib_name = iana_name + if size: + iana_name += "-" + size + if rev: + hashlib_name += "_" + hashlib_name += size + result = check_table(iana_name) + if result: + return result + + # not found in table, but roughly recognize format. use names we built up as fallback. + log.info("normalizing unrecognized hash name %r => %r / %r", + orig, hashlib_name, iana_name) + + else: + # just can't make sense of it. return something + iana_name = name + hashlib_name = name.replace("-", "_") + log.warning("normalizing unrecognized hash name and format %r => %r / %r", + orig, hashlib_name, iana_name) + + return hashlib_name, iana_name + + +def _get_hash_const(name): + """ + internal helper used by :func:`lookup_hash` -- + lookup hash constructor by name + + :arg name: + name (normalized to hashlib format, e.g. ``"sha256"``) + + :returns: + hash constructor, e.g. ``hashlib.sha256()``; + or None if hash can't be located. + """ + # check hashlib. for an efficient constructor + if not name.startswith("_") and name not in ("new", "algorithms"): + try: + return getattr(hashlib, name) + except AttributeError: + pass + + # check hashlib.new() in case SSL supports the digest + new_ssl_hash = hashlib.new + try: + # new() should throw ValueError if alg is unknown + new_ssl_hash(name, b"") + except ValueError: + pass + else: + # create wrapper function + # XXX: is there a faster way to wrap this? + def const(msg=b""): + return new_ssl_hash(name, msg) + const.__name__ = name + const.__module__ = "hashlib" + const.__doc__ = ("wrapper for hashlib.new(%r),\n" + "generated by passlib.crypto.digest.lookup_hash()") % name + return const + + # use builtin md4 as fallback when not supported by hashlib + if name == "md4": + from passlib.crypto._md4 import md4 + return md4 + + # XXX: any other modules / registries we should check? + # TODO: add pysha3 support. + + return None + + +def lookup_hash(digest, # *, + return_unknown=False, required=True): + """ + Returns a :class:`HashInfo` record containing information about a given hash function. + Can be used to look up a hash constructor by name, normalize hash name representation, etc. + + :arg digest: + This can be any of: + + * A string containing a :mod:`!hashlib` digest name (e.g. ``"sha256"``), + * A string containing an IANA-assigned hash name, + * A digest constructor function (e.g. ``hashlib.sha256``). + + Case is ignored, underscores are converted to hyphens, + and various other cleanups are made. + + :param required: + By default (True), this function will throw an :exc:`~passlib.exc.UnknownHashError` if no hash constructor + can be found, or if the hash is not actually available. + + If this flag is False, it will instead return a dummy :class:`!HashInfo` record + which will defer throwing the error until it's constructor function is called. + This is mainly used by :func:`norm_hash_name`. + + :param return_unknown: + + .. deprecated:: 1.7.3 + + deprecated, and will be removed in passlib 2.0. + this acts like inverse of **required**. + + :returns HashInfo: + :class:`HashInfo` instance containing information about specified digest. + + Multiple calls resolving to the same hash should always + return the same :class:`!HashInfo` instance. + """ + # check for cached entry + cache = _hash_info_cache + try: + return cache[digest] + except (KeyError, TypeError): + # NOTE: TypeError is to catch 'TypeError: unhashable type' (e.g. HashInfo) + pass + + # legacy alias + if return_unknown: + required = False + + # resolve ``digest`` to ``const`` & ``name_record`` + cache_by_name = True + if isinstance(digest, unicode_or_bytes_types): + # normalize name + name_list = _get_hash_aliases(digest) + name = name_list[0] + assert name + + # if name wasn't normalized to hashlib format, + # get info for normalized name and reuse it. + if name != digest: + info = lookup_hash(name, required=required) + cache[digest] = info + return info + + # else look up constructor + # NOTE: may return None, which is handled by HashInfo constructor + const = _get_hash_const(name) + + # if mock fips mode is enabled, replace with dummy constructor + # (to replicate how it would behave on a real fips system). + if const and mock_fips_mode and name not in _fips_algorithms: + def const(source=b""): + raise ValueError("%r disabled for fips by passlib set_mock_fips_mode()" % name) + + elif isinstance(digest, HashInfo): + # handle border case where HashInfo is passed in. + return digest + + elif callable(digest): + # try to lookup digest based on it's self-reported name + # (which we trust to be the canonical "hashlib" name) + const = digest + name_list = _get_hash_aliases(const().name) + name = name_list[0] + other_const = _get_hash_const(name) + if other_const is None: + # this is probably a third-party digest we don't know about, + # so just pass it on through, and register reverse lookup for it's name. + pass + + elif other_const is const: + # if we got back same constructor, this is just a known stdlib constructor, + # which was passed in before we had cached it by name. proceed normally. + pass + + else: + # if we got back different object, then ``const`` is something else + # (such as a mock object), in which case we want to skip caching it by name, + # as that would conflict with real hash. + cache_by_name = False + + else: + raise exc.ExpectedTypeError(digest, "digest name or constructor", "digest") + + # create new instance + info = HashInfo(const=const, names=name_list, required=required) + + # populate cache + if const is not None: + cache[const] = info + if cache_by_name: + for name in name_list: + if name: # (skips iana name if it's empty) + assert cache.get(name) in [None, info], "%r already in cache" % name + cache[name] = info + return info + +#: UT helper for clearing internal cache +lookup_hash.clear_cache = _hash_info_cache.clear + + +def norm_hash_name(name, format="hashlib"): + """Normalize hash function name (convenience wrapper for :func:`lookup_hash`). + + :arg name: + Original hash function name. + + This name can be a Python :mod:`~hashlib` digest name, + a SCRAM mechanism name, IANA assigned hash name, etc. + Case is ignored, and underscores are converted to hyphens. + + :param format: + Naming convention to normalize to. + Possible values are: + + * ``"hashlib"`` (the default) - normalizes name to be compatible + with Python's :mod:`!hashlib`. + + * ``"iana"`` - normalizes name to IANA-assigned hash function name. + For hashes which IANA hasn't assigned a name for, this issues a warning, + and then uses a heuristic to return a "best guess" name. + + :returns: + Hash name, returned as native :class:`!str`. + """ + info = lookup_hash(name, required=False) + if info.unknown: + warn("norm_hash_name(): " + info.error_text, exc.PasslibRuntimeWarning) + if format == "hashlib": + return info.name + elif format == "iana": + return info.iana_name + else: + raise ValueError("unknown format: %r" % (format,)) + + +class HashInfo(SequenceMixin): + """ + Record containing information about a given hash algorithm, as returned :func:`lookup_hash`. + + This class exposes the following attributes: + + .. autoattribute:: const + .. autoattribute:: digest_size + .. autoattribute:: block_size + .. autoattribute:: name + .. autoattribute:: iana_name + .. autoattribute:: aliases + .. autoattribute:: supported + + This object can also be treated a 3-element sequence + containing ``(const, digest_size, block_size)``. + """ + #========================================================================= + # instance attrs + #========================================================================= + + #: Canonical / hashlib-compatible name (e.g. ``"sha256"``). + name = None + + #: IANA assigned name (e.g. ``"sha-256"``), may be ``None`` if unknown. + iana_name = None + + #: Tuple of other known aliases (may be empty) + aliases = () + + #: Hash constructor function (e.g. :func:`hashlib.sha256`) + const = None + + #: Hash's digest size + digest_size = None + + #: Hash's block size + block_size = None + + #: set when hash isn't available, will be filled in with string containing error text + #: that const() will raise. + error_text = None + + #: set when error_text is due to hash algorithm being completely unknown + #: (not just unavailable on current system) + unknown = False + + #========================================================================= + # init + #========================================================================= + + def __init__(self, # *, + const, names, required=True): + """ + initialize new instance. + :arg const: + hash constructor + :arg names: + list of 2+ names. should be list of ``(name, iana_name, ... 0+ aliases)``. + names must be lower-case. only iana name may be None. + """ + # init names + name = self.name = names[0] + self.iana_name = names[1] + self.aliases = names[2:] + + def use_stub_const(msg): + """ + helper that installs stub constructor which throws specified error . + """ + def const(source=b""): + raise exc.UnknownHashError(msg, name) + if required: + # if caller only wants supported digests returned, + # just throw error immediately... + const() + assert "shouldn't get here" + self.error_text = msg + self.const = const + try: + self.digest_size, self.block_size = _fallback_info[name] + except KeyError: + pass + + # handle "constructor not available" case + if const is None: + if names in _known_hash_names: + msg = "unsupported hash: %r" % name + else: + msg = "unknown hash: %r" % name + self.unknown = True + use_stub_const(msg) + # TODO: load in preset digest size info for known hashes. + return + + # create hash instance to inspect + try: + hash = const() + except ValueError as err: + # per issue 116, FIPS compliant systems will have a constructor; + # but it will throw a ValueError with this message. As of 1.7.3, + # translating this into DisabledHashError. + # "ValueError: error:060800A3:digital envelope routines:EVP_DigestInit_ex:disabled for fips" + if "disabled for fips" in str(err).lower(): + msg = "%r hash disabled for fips" % name + else: + msg = "internal error in %r constructor\n(%s: %s)" % (name, type(err).__name__, err) + use_stub_const(msg) + return + + # store stats about hash + self.const = const + self.digest_size = hash.digest_size + self.block_size = hash.block_size + + # do sanity check on digest size + if len(hash.digest()) != hash.digest_size: + raise RuntimeError("%r constructor failed sanity check" % self.name) + + # do sanity check on name. + if hash.name != self.name: + warn("inconsistent digest name: %r resolved to %r, which reports name as %r" % + (self.name, const, hash.name), exc.PasslibRuntimeWarning) + + #========================================================================= + # methods + #========================================================================= + def __repr__(self): + return " digest output``. + + However, if ``multipart=True``, the returned function has the signature + ``hmac() -> update, finalize``, where ``update(msg)`` may be called multiple times, + and ``finalize() -> digest_output`` may be repeatedly called at any point to + calculate the HMAC digest so far. + + The returned object will also have a ``digest_info`` attribute, containing + a :class:`lookup_hash` instance for the specified digest. + + This function exists, and has the weird signature it does, in order to squeeze as + provide as much efficiency as possible, by omitting much of the setup cost + and features of the stdlib :mod:`hmac` module. + """ + # all the following was adapted from stdlib's hmac module + + # resolve digest (cached) + digest_info = lookup_hash(digest) + const, digest_size, block_size = digest_info + assert block_size >= 16, "block size too small" + + # prepare key + if not isinstance(key, bytes): + key = to_bytes(key, param="key") + klen = len(key) + if klen > block_size: + key = const(key).digest() + klen = digest_size + if klen < block_size: + key += b'\x00' * (block_size - klen) + + # create pre-initialized hash constructors + _inner_copy = const(key.translate(_TRANS_36)).copy + _outer_copy = const(key.translate(_TRANS_5C)).copy + + if multipart: + # create multi-part function + # NOTE: this is slightly slower than the single-shot version, + # and should only be used if needed. + def hmac(): + """generated by compile_hmac(multipart=True)""" + inner = _inner_copy() + def finalize(): + outer = _outer_copy() + outer.update(inner.digest()) + return outer.digest() + return inner.update, finalize + else: + + # single-shot function + def hmac(msg): + """generated by compile_hmac()""" + inner = _inner_copy() + inner.update(msg) + outer = _outer_copy() + outer.update(inner.digest()) + return outer.digest() + + # add info attr + hmac.digest_info = digest_info + return hmac + +#============================================================================= +# pbkdf1 +#============================================================================= +def pbkdf1(digest, secret, salt, rounds, keylen=None): + """pkcs#5 password-based key derivation v1.5 + + :arg digest: + digest name or constructor. + + :arg secret: + secret to use when generating the key. + may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). + + :arg salt: + salt string to use when generating key. + may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). + + :param rounds: + number of rounds to use to generate key. + + :arg keylen: + number of bytes to generate (if omitted / ``None``, uses digest's native size) + + :returns: + raw :class:`bytes` of generated key + + .. note:: + + This algorithm has been deprecated, new code should use PBKDF2. + Among other limitations, ``keylen`` cannot be larger + than the digest size of the specified hash. + """ + # resolve digest + const, digest_size, block_size = lookup_hash(digest) + + # validate secret & salt + secret = to_bytes(secret, param="secret") + salt = to_bytes(salt, param="salt") + + # validate rounds + if not isinstance(rounds, int_types): + raise exc.ExpectedTypeError(rounds, "int", "rounds") + if rounds < 1: + raise ValueError("rounds must be at least 1") + + # validate keylen + if keylen is None: + keylen = digest_size + elif not isinstance(keylen, int_types): + raise exc.ExpectedTypeError(keylen, "int or None", "keylen") + elif keylen < 0: + raise ValueError("keylen must be at least 0") + elif keylen > digest_size: + raise ValueError("keylength too large for digest: %r > %r" % + (keylen, digest_size)) + + # main pbkdf1 loop + block = secret + salt + for _ in irange(rounds): + block = const(block).digest() + return block[:keylen] + +#============================================================================= +# pbkdf2 +#============================================================================= + +_pack_uint32 = Struct(">L").pack + +def pbkdf2_hmac(digest, secret, salt, rounds, keylen=None): + """pkcs#5 password-based key derivation v2.0 using HMAC + arbitrary digest. + + :arg digest: + digest name or constructor. + + :arg secret: + passphrase to use to generate key. + may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). + + :arg salt: + salt string to use when generating key. + may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). + + :param rounds: + number of rounds to use to generate key. + + :arg keylen: + number of bytes to generate. + if omitted / ``None``, will use digest's native output size. + + :returns: + raw bytes of generated key + + .. versionchanged:: 1.7 + + This function will use the first available of the following backends: + + * `fastpbk2 `_ + * :func:`hashlib.pbkdf2_hmac` (only available in py2 >= 2.7.8, and py3 >= 3.4) + * builtin pure-python backend + + See :data:`passlib.crypto.digest.PBKDF2_BACKENDS` to determine + which backend(s) are in use. + """ + # validate secret & salt + secret = to_bytes(secret, param="secret") + salt = to_bytes(salt, param="salt") + + # resolve digest + digest_info = lookup_hash(digest) + digest_size = digest_info.digest_size + + # validate rounds + if not isinstance(rounds, int_types): + raise exc.ExpectedTypeError(rounds, "int", "rounds") + if rounds < 1: + raise ValueError("rounds must be at least 1") + + # validate keylen + if keylen is None: + keylen = digest_size + elif not isinstance(keylen, int_types): + raise exc.ExpectedTypeError(keylen, "int or None", "keylen") + elif keylen < 1: + # XXX: could allow keylen=0, but want to be compat w/ stdlib + raise ValueError("keylen must be at least 1") + + # find smallest block count s.t. keylen <= block_count * digest_size; + # make sure block count won't overflow (per pbkdf2 spec) + # this corresponds to throwing error if keylen > digest_size * MAX_UINT32 + # NOTE: stdlib will throw error at lower bound (keylen > MAX_SINT32) + # NOTE: have do this before other backends checked, since fastpbkdf2 raises wrong error + # (InvocationError, not OverflowError) + block_count = (keylen + digest_size - 1) // digest_size + if block_count > MAX_UINT32: + raise OverflowError("keylen too long for digest") + + # + # check for various high-speed backends + # + + # ~3x faster than pure-python backend + # NOTE: have to do this after above guards since fastpbkdf2 lacks bounds checks. + if digest_info.supported_by_fastpbkdf2: + return _fast_pbkdf2_hmac(digest_info.name, secret, salt, rounds, keylen) + + # ~1.4x faster than pure-python backend + # NOTE: have to do this after fastpbkdf2 since hashlib-ssl is slower, + # will support larger number of hashes. + if digest_info.supported_by_hashlib_pbkdf2: + return _stdlib_pbkdf2_hmac(digest_info.name, secret, salt, rounds, keylen) + + # + # otherwise use our own implementation + # + + # generated keyed hmac + keyed_hmac = compile_hmac(digest, secret) + + # get helper to calculate pbkdf2 inner loop efficiently + calc_block = _get_pbkdf2_looper(digest_size) + + # assemble & return result + return join_bytes( + calc_block(keyed_hmac, keyed_hmac(salt + _pack_uint32(i)), rounds) + for i in irange(1, block_count + 1) + )[:keylen] + +#------------------------------------------------------------------------------------- +# pick best choice for pure-python helper +# TODO: consider some alternatives, such as C-accelerated xor_bytes helper if available +#------------------------------------------------------------------------------------- +# NOTE: this env var is only present to support the admin/benchmark_pbkdf2 script +_force_backend = os.environ.get("PASSLIB_PBKDF2_BACKEND") or "any" + +if PY3 and _force_backend in ["any", "from-bytes"]: + from functools import partial + + def _get_pbkdf2_looper(digest_size): + return partial(_pbkdf2_looper, digest_size) + + def _pbkdf2_looper(digest_size, keyed_hmac, digest, rounds): + """ + py3-only implementation of pbkdf2 inner loop; + uses 'int.from_bytes' + integer XOR + """ + from_bytes = int.from_bytes + BIG = "big" # endianess doesn't matter, just has to be consistent + accum = from_bytes(digest, BIG) + for _ in irange(rounds - 1): + digest = keyed_hmac(digest) + accum ^= from_bytes(digest, BIG) + return accum.to_bytes(digest_size, BIG) + + _builtin_backend = "from-bytes" + +elif _force_backend in ["any", "unpack", "from-bytes"]: + from struct import Struct + from passlib.utils import sys_bits + + _have_64_bit = (sys_bits >= 64) + + #: cache used by _get_pbkdf2_looper + _looper_cache = {} + + def _get_pbkdf2_looper(digest_size): + """ + We want a helper function which performs equivalent of the following:: + + def helper(keyed_hmac, digest, rounds): + accum = digest + for _ in irange(rounds - 1): + digest = keyed_hmac(digest) + accum ^= digest + return accum + + However, no efficient way to implement "bytes ^ bytes" in python. + Instead, using approach where we dynamically compile a helper function based + on digest size. Instead of a single `accum` var, this helper breaks the digest + into a series of integers. + + It stores these in a series of`accum_` vars, and performs `accum ^= digest` + by unpacking digest and perform xor for each "accum_ ^= digest_". + this keeps everything in locals, avoiding excessive list creation, encoding or decoding, + etc. + + :param digest_size: + digest size to compile for, in bytes. (must be multiple of 4). + + :return: + helper function with call signature outlined above. + """ + # + # cache helpers + # + try: + return _looper_cache[digest_size] + except KeyError: + pass + + # + # figure out most efficient struct format to unpack digest into list of native ints + # + if _have_64_bit and not digest_size & 0x7: + # digest size multiple of 8, on a 64 bit system -- use array of UINT64 + count = (digest_size >> 3) + fmt = "=%dQ" % count + elif not digest_size & 0x3: + if _have_64_bit: + # digest size multiple of 4, on a 64 bit system -- use array of UINT64 + 1 UINT32 + count = (digest_size >> 3) + fmt = "=%dQI" % count + count += 1 + else: + # digest size multiple of 4, on a 32 bit system -- use array of UINT32 + count = (digest_size >> 2) + fmt = "=%dI" % count + else: + # stopping here, cause no known hashes have digest size that isn't multiple of 4 bytes. + # if needed, could go crazy w/ "H" & "B" + raise NotImplementedError("unsupported digest size: %d" % digest_size) + struct = Struct(fmt) + + # + # build helper source + # + tdict = dict( + digest_size=digest_size, + accum_vars=", ".join("acc_%d" % i for i in irange(count)), + digest_vars=", ".join("dig_%d" % i for i in irange(count)), + ) + + # head of function + source = ( + "def helper(keyed_hmac, digest, rounds):\n" + " '''pbkdf2 loop helper for digest_size={digest_size}'''\n" + " unpack_digest = struct.unpack\n" + " {accum_vars} = unpack_digest(digest)\n" + " for _ in irange(1, rounds):\n" + " digest = keyed_hmac(digest)\n" + " {digest_vars} = unpack_digest(digest)\n" + ).format(**tdict) + + # xor digest + for i in irange(count): + source += " acc_%d ^= dig_%d\n" % (i, i) + + # return result + source += " return struct.pack({accum_vars})\n".format(**tdict) + + # + # compile helper + # + code = compile(source, "", "exec") + gdict = dict(irange=irange, struct=struct) + ldict = dict() + eval(code, gdict, ldict) + helper = ldict['helper'] + if __debug__: + helper.__source__ = source + + # + # store in cache + # + _looper_cache[digest_size] = helper + return helper + + _builtin_backend = "unpack" + +else: + assert _force_backend in ["any", "hexlify"] + + # XXX: older & slower approach that used int(hexlify()), + # keeping it around for a little while just for benchmarking. + + from binascii import hexlify as _hexlify + from passlib.utils import int_to_bytes + + def _get_pbkdf2_looper(digest_size): + return _pbkdf2_looper + + def _pbkdf2_looper(keyed_hmac, digest, rounds): + hexlify = _hexlify + accum = int(hexlify(digest), 16) + for _ in irange(rounds - 1): + digest = keyed_hmac(digest) + accum ^= int(hexlify(digest), 16) + return int_to_bytes(accum, len(digest)) + + _builtin_backend = "hexlify" + +# helper for benchmark script -- disable hashlib, fastpbkdf2 support if builtin requested +if _force_backend == _builtin_backend: + _fast_pbkdf2_hmac = _stdlib_pbkdf2_hmac = None + +# expose info about what backends are active +PBKDF2_BACKENDS = [b for b in [ + "fastpbkdf2" if _fast_pbkdf2_hmac else None, + "hashlib-ssl" if _stdlib_pbkdf2_hmac else None, + "builtin-" + _builtin_backend +] if b] + +# *very* rough estimate of relative speed (compared to sha256 using 'unpack' backend on 64bit arch) +if "fastpbkdf2" in PBKDF2_BACKENDS: + PBKDF2_SPEED_FACTOR = 3 +elif "hashlib-ssl" in PBKDF2_BACKENDS: + PBKDF2_SPEED_FACTOR = 1.4 +else: + # remaining backends have *some* difference in performance, but not enough to matter + PBKDF2_SPEED_FACTOR = 1 + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__init__.py b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__init__.py new file mode 100644 index 0000000..c71873a --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__init__.py @@ -0,0 +1,281 @@ +""" +passlib.utils.scrypt -- scrypt hash frontend and help utilities + +XXX: add this module to public docs? +""" +#========================================================================== +# imports +#========================================================================== +from __future__ import absolute_import +# core +import logging; log = logging.getLogger(__name__) +from warnings import warn +# pkg +from passlib import exc +from passlib.utils import to_bytes +from passlib.utils.compat import PYPY +# local +__all__ =[ + "validate", + "scrypt", +] + +#========================================================================== +# config validation +#========================================================================== + +#: internal global constant for setting stdlib scrypt's maxmem (int bytes). +#: set to -1 to auto-calculate (see _load_stdlib_backend() below) +#: set to 0 for openssl default (32mb according to python docs) +#: TODO: standardize this across backends, and expose support via scrypt hash config; +#: currently not very configurable, and only applies to stdlib backend. +SCRYPT_MAXMEM = -1 + +#: max output length in bytes +MAX_KEYLEN = ((1 << 32) - 1) * 32 + +#: max ``r * p`` limit +MAX_RP = (1 << 30) - 1 + +# TODO: unittests for this function +def validate(n, r, p): + """ + helper which validates a set of scrypt config parameters. + scrypt will take ``O(n * r * p)`` time and ``O(n * r)`` memory. + limitations are that ``n = 2**``, ``n < 2**(16*r)``, ``r * p < 2 ** 30``. + + :param n: scrypt rounds + :param r: scrypt block size + :param p: scrypt parallel factor + """ + if r < 1: + raise ValueError("r must be > 0: r=%r" % r) + + if p < 1: + raise ValueError("p must be > 0: p=%r" % p) + + if r * p > MAX_RP: + # pbkdf2-hmac-sha256 limitation - it will be requested to generate ``p*(2*r)*64`` bytes, + # but pbkdf2 can do max of (2**31-1) blocks, and sha-256 has 32 byte block size... + # so ``(2**31-1)*32 >= p*r*128`` -> ``r*p < 2**30`` + raise ValueError("r * p must be < 2**30: r=%r, p=%r" % (r,p)) + + if n < 2 or n & (n - 1): + raise ValueError("n must be > 1, and a power of 2: n=%r" % n) + + return True + + +UINT32_SIZE = 4 + + +def estimate_maxmem(n, r, p, fudge=1.05): + """ + calculate memory required for parameter combination. + assumes parameters have already been validated. + + .. warning:: + this is derived from OpenSSL's scrypt maxmem formula; + and may not be correct for other implementations + (additional buffers, different parallelism tradeoffs, etc). + """ + # XXX: expand to provide upper bound for diff backends, or max across all of them? + # NOTE: openssl's scrypt() enforces it's maxmem parameter based on calc located at + # , ending in line containing "Blen + Vlen > maxmem" + # using the following formula: + # Blen = p * 128 * r + # Vlen = 32 * r * (N + 2) * sizeof(uint32_t) + # total_bytes = Blen + Vlen + maxmem = r * (128 * p + 32 * (n + 2) * UINT32_SIZE) + # add fudge factor so we don't have off-by-one mismatch w/ openssl + maxmem = int(maxmem * fudge) + return maxmem + + +# TODO: configuration picker (may need psutil for full effect) + +#========================================================================== +# hash frontend +#========================================================================== + +#: backend function used by scrypt(), filled in by _set_backend() +_scrypt = None + +#: name of backend currently in use, exposed for informational purposes. +backend = None + +def scrypt(secret, salt, n, r, p=1, keylen=32): + """run SCrypt key derivation function using specified parameters. + + :arg secret: + passphrase string (unicode is encoded to bytes using utf-8). + + :arg salt: + salt string (unicode is encoded to bytes using utf-8). + + :arg n: + integer 'N' parameter + + :arg r: + integer 'r' parameter + + :arg p: + integer 'p' parameter + + :arg keylen: + number of bytes of key to generate. + defaults to 32 (the internal block size). + + :returns: + a *keylen*-sized bytes instance + + SCrypt imposes a number of constraints on it's input parameters: + + * ``r * p < 2**30`` -- due to a limitation of PBKDF2-HMAC-SHA256. + * ``keylen < (2**32 - 1) * 32`` -- due to a limitation of PBKDF2-HMAC-SHA256. + * ``n`` must a be a power of 2, and > 1 -- internal limitation of scrypt() implementation + + :raises ValueError: if the provided parameters are invalid (see constraints above). + + .. warning:: + + Unless the third-party ``scrypt ``_ package + is installed, passlib will use a builtin pure-python implementation of scrypt, + which is *considerably* slower (and thus requires a much lower / less secure + ``n`` value in order to be usuable). Installing the :mod:`!scrypt` package + is strongly recommended. + """ + validate(n, r, p) + secret = to_bytes(secret, param="secret") + salt = to_bytes(salt, param="salt") + if keylen < 1: + raise ValueError("keylen must be at least 1") + if keylen > MAX_KEYLEN: + raise ValueError("keylen too large, must be <= %d" % MAX_KEYLEN) + return _scrypt(secret, salt, n, r, p, keylen) + + +def _load_builtin_backend(): + """ + Load pure-python scrypt implementation built into passlib. + """ + slowdown = 10 if PYPY else 100 + warn("Using builtin scrypt backend, which is %dx slower than is required " + "for adequate security. Installing scrypt support (via 'pip install scrypt') " + "is strongly recommended" % slowdown, exc.PasslibSecurityWarning) + from ._builtin import ScryptEngine + return ScryptEngine.execute + + +def _load_cffi_backend(): + """ + Try to import the ctypes-based scrypt hash function provided by the + ``scrypt ``_ package. + """ + try: + from scrypt import hash + return hash + except ImportError: + pass + # not available, but check to see if package present but outdated / not installed right + try: + import scrypt + except ImportError as err: + if "scrypt" not in str(err): + # e.g. if cffi isn't set up right + # user should try importing scrypt explicitly to diagnose problem. + warn("'scrypt' package failed to import correctly (possible installation issue?)", + exc.PasslibWarning) + # else: package just isn't installed + else: + warn("'scrypt' package is too old (lacks ``hash()`` method)", exc.PasslibWarning) + return None + + +def _load_stdlib_backend(): + """ + Attempt to load stdlib scrypt() implement and return wrapper. + Returns None if not found. + """ + try: + # new in python 3.6, if compiled with openssl >= 1.1 + from hashlib import scrypt as stdlib_scrypt + except ImportError: + return None + + def stdlib_scrypt_wrapper(secret, salt, n, r, p, keylen): + # work out appropriate "maxmem" parameter + # + # TODO: would like to enforce a single "maxmem" policy across all backends; + # and maybe expose this via scrypt hasher config. + # + # for now, since parameters should all be coming from internally-controlled sources + # (password hashes), using policy of "whatever memory the parameters needs". + # furthermore, since stdlib scrypt is only place that needs this, + # currently calculating exactly what maxmem needs to make things work for stdlib call. + # as hack, this can be overriden via SCRYPT_MAXMEM above, + # would like to formalize all of this. + maxmem = SCRYPT_MAXMEM + if maxmem < 0: + maxmem = estimate_maxmem(n, r, p) + return stdlib_scrypt(password=secret, salt=salt, n=n, r=r, p=p, dklen=keylen, + maxmem=maxmem) + + return stdlib_scrypt_wrapper + + +#: list of potential backends +backend_values = ("stdlib", "scrypt", "builtin") + +#: dict mapping backend name -> loader +_backend_loaders = dict( + stdlib=_load_stdlib_backend, + scrypt=_load_cffi_backend, # XXX: rename backend constant to "cffi"? + builtin=_load_builtin_backend, +) + + +def _set_backend(name, dryrun=False): + """ + set backend for scrypt(). if name not specified, loads first available. + + :raises ~passlib.exc.MissingBackendError: if backend can't be found + + .. note:: mainly intended to be called by unittests, and scrypt hash handler + """ + if name == "any": + return + elif name == "default": + for name in backend_values: + try: + return _set_backend(name, dryrun=dryrun) + except exc.MissingBackendError: + continue + raise exc.MissingBackendError("no scrypt backends available") + else: + loader = _backend_loaders.get(name) + if not loader: + raise ValueError("unknown scrypt backend: %r" % (name,)) + hash = loader() + if not hash: + raise exc.MissingBackendError("scrypt backend %r not available" % name) + if dryrun: + return + global _scrypt, backend + backend = name + _scrypt = hash + +# initialize backend +_set_backend("default") + + +def _has_backend(name): + try: + _set_backend(name, dryrun=True) + return True + except exc.MissingBackendError: + return False + +#========================================================================== +# eof +#========================================================================== diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5bcbf18 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_builtin.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_builtin.cpython-312.pyc new file mode 100644 index 0000000..e690021 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_builtin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_gen_files.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_gen_files.cpython-312.pyc new file mode 100644 index 0000000..8360674 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_gen_files.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_salsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_salsa.cpython-312.pyc new file mode 100644 index 0000000..e643c38 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/__pycache__/_salsa.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_builtin.py b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_builtin.py new file mode 100644 index 0000000..e9bb305 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_builtin.py @@ -0,0 +1,244 @@ +"""passlib.utils.scrypt._builtin -- scrypt() kdf in pure-python""" +#========================================================================== +# imports +#========================================================================== +# core +import operator +import struct +# pkg +from passlib.utils.compat import izip +from passlib.crypto.digest import pbkdf2_hmac +from passlib.crypto.scrypt._salsa import salsa20 +# local +__all__ =[ + "ScryptEngine", +] + +#========================================================================== +# scrypt engine +#========================================================================== +class ScryptEngine(object): + """ + helper class used to run scrypt kdf, see scrypt() for frontend + + .. warning:: + this class does NO validation of the input ranges or types. + + it's not intended to be used directly, + but only as a backend for :func:`passlib.utils.scrypt.scrypt()`. + """ + #================================================================= + # instance attrs + #================================================================= + + # primary scrypt config parameters + n = 0 + r = 0 + p = 0 + + # derived values & objects + smix_bytes = 0 + iv_bytes = 0 + bmix_len = 0 + bmix_half_len = 0 + bmix_struct = None + integerify = None + + #================================================================= + # frontend + #================================================================= + @classmethod + def execute(cls, secret, salt, n, r, p, keylen): + """create engine & run scrypt() hash calculation""" + return cls(n, r, p).run(secret, salt, keylen) + + #================================================================= + # init + #================================================================= + def __init__(self, n, r, p): + # store config + self.n = n + self.r = r + self.p = p + self.smix_bytes = r << 7 # num bytes in smix input - 2*r*16*4 + self.iv_bytes = self.smix_bytes * p + self.bmix_len = bmix_len = r << 5 # length of bmix block list - 32*r integers + self.bmix_half_len = r << 4 + assert struct.calcsize("I") == 4 + self.bmix_struct = struct.Struct("<" + str(bmix_len) + "I") + + # use optimized bmix for certain cases + if r == 1: + self.bmix = self._bmix_1 + + # pick best integerify function - integerify(bmix_block) should + # take last 64 bytes of block and return a little-endian integer. + # since it's immediately converted % n, we only have to extract + # the first 32 bytes if n < 2**32 - which due to the current + # internal representation, is already unpacked as a 32-bit int. + if n <= 0xFFFFffff: + integerify = operator.itemgetter(-16) + else: + assert n <= 0xFFFFffffFFFFffff + ig1 = operator.itemgetter(-16) + ig2 = operator.itemgetter(-17) + def integerify(X): + return ig1(X) | (ig2(X)<<32) + self.integerify = integerify + + #================================================================= + # frontend + #================================================================= + def run(self, secret, salt, keylen): + """ + run scrypt kdf for specified secret, salt, and keylen + + .. note:: + + * time cost is ``O(n * r * p)`` + * mem cost is ``O(n * r)`` + """ + # stretch salt into initial byte array via pbkdf2 + iv_bytes = self.iv_bytes + input = pbkdf2_hmac("sha256", secret, salt, rounds=1, keylen=iv_bytes) + + # split initial byte array into 'p' mflen-sized chunks, + # and run each chunk through smix() to generate output chunk. + smix = self.smix + if self.p == 1: + output = smix(input) + else: + # XXX: *could* use threading here, if really high p values encountered, + # but would tradeoff for more memory usage. + smix_bytes = self.smix_bytes + output = b''.join( + smix(input[offset:offset+smix_bytes]) + for offset in range(0, iv_bytes, smix_bytes) + ) + + # stretch final byte array into output via pbkdf2 + return pbkdf2_hmac("sha256", secret, output, rounds=1, keylen=keylen) + + #================================================================= + # smix() helper + #================================================================= + def smix(self, input): + """run SCrypt smix function on a single input block + + :arg input: + byte string containing input data. + interpreted as 32*r little endian 4 byte integers. + + :returns: + byte string containing output data + derived by mixing input using n & r parameters. + + .. note:: time & mem cost are both ``O(n * r)`` + """ + # gather locals + bmix = self.bmix + bmix_struct = self.bmix_struct + integerify = self.integerify + n = self.n + + # parse input into 32*r integers ('X' in scrypt source) + # mem cost -- O(r) + buffer = list(bmix_struct.unpack(input)) + + # starting with initial buffer contents, derive V s.t. + # V[0]=initial_buffer ... V[i] = bmix(V[i-1], V[i-1]) ... V[n-1] = bmix(V[n-2], V[n-2]) + # final buffer contents should equal bmix(V[n-1], V[n-1]) + # + # time cost -- O(n * r) -- n loops, bmix is O(r) + # mem cost -- O(n * r) -- V is n-element array of r-element tuples + # NOTE: could do time / memory tradeoff to shrink size of V + def vgen(): + i = 0 + while i < n: + last = tuple(buffer) + yield last + bmix(last, buffer) + i += 1 + V = list(vgen()) + + # generate result from X & V. + # + # time cost -- O(n * r) -- loops n times, calls bmix() which has O(r) time cost + # mem cost -- O(1) -- allocates nothing, calls bmix() which has O(1) mem cost + get_v_elem = V.__getitem__ + n_mask = n - 1 + i = 0 + while i < n: + j = integerify(buffer) & n_mask + result = tuple(a ^ b for a, b in izip(buffer, get_v_elem(j))) + bmix(result, buffer) + i += 1 + + # # NOTE: we could easily support arbitrary values of ``n``, not just powers of 2, + # # but very few implementations have that ability, so not enabling it for now... + # if not n_is_log_2: + # while i < n: + # j = integerify(buffer) % n + # tmp = tuple(a^b for a,b in izip(buffer, get_v_elem(j))) + # bmix(tmp,buffer) + # i += 1 + + # repack tmp + return bmix_struct.pack(*buffer) + + #================================================================= + # bmix() helper + #================================================================= + def bmix(self, source, target): + """ + block mixing function used by smix() + uses salsa20/8 core to mix block contents. + + :arg source: + source to read from. + should be list of 32*r 4-byte integers + (2*r salsa20 blocks). + + :arg target: + target to write to. + should be list with same size as source. + the existing value of this buffer is ignored. + + .. warning:: + + this operates *in place* on target, + so source & target should NOT be same list. + + .. note:: + + * time cost is ``O(r)`` -- loops 16*r times, salsa20() has ``O(1)`` cost. + + * memory cost is ``O(1)`` -- salsa20() uses 16 x uint4, + all other operations done in-place. + """ + ## assert source is not target + # Y[-1] = B[2r-1], Y[i] = hash( Y[i-1] xor B[i]) + # B' <-- (Y_0, Y_2 ... Y_{2r-2}, Y_1, Y_3 ... Y_{2r-1}) */ + half = self.bmix_half_len # 16*r out of 32*r - start of Y_1 + tmp = source[-16:] # 'X' in scrypt source + siter = iter(source) + j = 0 + while j < half: + jn = j+16 + target[j:jn] = tmp = salsa20(a ^ b for a, b in izip(tmp, siter)) + target[half+j:half+jn] = tmp = salsa20(a ^ b for a, b in izip(tmp, siter)) + j = jn + + def _bmix_1(self, source, target): + """special bmix() method optimized for ``r=1`` case""" + B = source[16:] + target[:16] = tmp = salsa20(a ^ b for a, b in izip(B, iter(source))) + target[16:] = salsa20(a ^ b for a, b in izip(tmp, B)) + + #================================================================= + # eoc + #================================================================= + +#========================================================================== +# eof +#========================================================================== diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_gen_files.py b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_gen_files.py new file mode 100644 index 0000000..55ddfae --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_gen_files.py @@ -0,0 +1,154 @@ +"""passlib.utils.scrypt._gen_files - meta script that generates _salsa.py""" +#========================================================================== +# imports +#========================================================================== +# core +import os +# pkg +# local +#========================================================================== +# constants +#========================================================================== + +_SALSA_OPS = [ + # row = (target idx, source idx 1, source idx 2, rotate) + # interpreted as salsa operation over uint32... + # target = (source1+source2)<> (32 - (b)))) + ##x[ 4] ^= R(x[ 0]+x[12], 7); x[ 8] ^= R(x[ 4]+x[ 0], 9); + ##x[12] ^= R(x[ 8]+x[ 4],13); x[ 0] ^= R(x[12]+x[ 8],18); + ( 4, 0, 12, 7), + ( 8, 4, 0, 9), + ( 12, 8, 4, 13), + ( 0, 12, 8, 18), + + ##x[ 9] ^= R(x[ 5]+x[ 1], 7); x[13] ^= R(x[ 9]+x[ 5], 9); + ##x[ 1] ^= R(x[13]+x[ 9],13); x[ 5] ^= R(x[ 1]+x[13],18); + ( 9, 5, 1, 7), + ( 13, 9, 5, 9), + ( 1, 13, 9, 13), + ( 5, 1, 13, 18), + + ##x[14] ^= R(x[10]+x[ 6], 7); x[ 2] ^= R(x[14]+x[10], 9); + ##x[ 6] ^= R(x[ 2]+x[14],13); x[10] ^= R(x[ 6]+x[ 2],18); + ( 14, 10, 6, 7), + ( 2, 14, 10, 9), + ( 6, 2, 14, 13), + ( 10, 6, 2, 18), + + ##x[ 3] ^= R(x[15]+x[11], 7); x[ 7] ^= R(x[ 3]+x[15], 9); + ##x[11] ^= R(x[ 7]+x[ 3],13); x[15] ^= R(x[11]+x[ 7],18); + ( 3, 15, 11, 7), + ( 7, 3, 15, 9), + ( 11, 7, 3, 13), + ( 15, 11, 7, 18), + + ##/* Operate on rows. */ + ##x[ 1] ^= R(x[ 0]+x[ 3], 7); x[ 2] ^= R(x[ 1]+x[ 0], 9); + ##x[ 3] ^= R(x[ 2]+x[ 1],13); x[ 0] ^= R(x[ 3]+x[ 2],18); + ( 1, 0, 3, 7), + ( 2, 1, 0, 9), + ( 3, 2, 1, 13), + ( 0, 3, 2, 18), + + ##x[ 6] ^= R(x[ 5]+x[ 4], 7); x[ 7] ^= R(x[ 6]+x[ 5], 9); + ##x[ 4] ^= R(x[ 7]+x[ 6],13); x[ 5] ^= R(x[ 4]+x[ 7],18); + ( 6, 5, 4, 7), + ( 7, 6, 5, 9), + ( 4, 7, 6, 13), + ( 5, 4, 7, 18), + + ##x[11] ^= R(x[10]+x[ 9], 7); x[ 8] ^= R(x[11]+x[10], 9); + ##x[ 9] ^= R(x[ 8]+x[11],13); x[10] ^= R(x[ 9]+x[ 8],18); + ( 11, 10, 9, 7), + ( 8, 11, 10, 9), + ( 9, 8, 11, 13), + ( 10, 9, 8, 18), + + ##x[12] ^= R(x[15]+x[14], 7); x[13] ^= R(x[12]+x[15], 9); + ##x[14] ^= R(x[13]+x[12],13); x[15] ^= R(x[14]+x[13],18); + ( 12, 15, 14, 7), + ( 13, 12, 15, 9), + ( 14, 13, 12, 13), + ( 15, 14, 13, 18), +] + +def main(): + target = os.path.join(os.path.dirname(__file__), "_salsa.py") + fh = file(target, "w") + write = fh.write + + VNAMES = ["v%d" % i for i in range(16)] + + PAD = " " * 4 + PAD2 = " " * 8 + PAD3 = " " * 12 + TLIST = ", ".join("b%d" % i for i in range(16)) + VLIST = ", ".join(VNAMES) + kwds = dict( + VLIST=VLIST, + TLIST=TLIST, + ) + + write('''\ +"""passlib.utils.scrypt._salsa - salsa 20/8 core, autogenerated by _gen_salsa.py""" +#================================================================= +# salsa function +#================================================================= + +def salsa20(input): + \"""apply the salsa20/8 core to the provided input + + :args input: input list containing 16 32-bit integers + :returns: result list containing 16 32-bit integers + \""" + + %(TLIST)s = input + %(VLIST)s = \\ + %(TLIST)s + + i = 0 + while i < 4: +''' % kwds) + + for idx, (target, source1, source2, rotate) in enumerate(_SALSA_OPS): + write('''\ + # salsa op %(idx)d: [%(it)d] ^= ([%(is1)d]+[%(is2)d])<<<%(rot1)d + t = (%(src1)s + %(src2)s) & 0xffffffff + %(dst)s ^= ((t & 0x%(rmask)08x) << %(rot1)d) | (t >> %(rot2)d) + +''' % dict( + idx=idx, is1 = source1, is2=source2, it=target, + src1=VNAMES[source1], + src2=VNAMES[source2], + dst=VNAMES[target], + rmask=(1<<(32-rotate))-1, + rot1=rotate, + rot2=32-rotate, + )) + + write('''\ + i += 1 + +''') + + for idx in range(16): + write(PAD + "b%d = (b%d + v%d) & 0xffffffff\n" % (idx,idx,idx)) + + write('''\ + + return %(TLIST)s + +#================================================================= +# eof +#================================================================= +''' % kwds) + +if __name__ == "__main__": + main() + +#========================================================================== +# eof +#========================================================================== diff --git a/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_salsa.py b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_salsa.py new file mode 100644 index 0000000..9112732 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/crypto/scrypt/_salsa.py @@ -0,0 +1,170 @@ +"""passlib.utils.scrypt._salsa - salsa 20/8 core, autogenerated by _gen_salsa.py""" +#================================================================= +# salsa function +#================================================================= + +def salsa20(input): + """apply the salsa20/8 core to the provided input + + :args input: input list containing 16 32-bit integers + :returns: result list containing 16 32-bit integers + """ + + b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 = input + v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 = \ + b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 + + i = 0 + while i < 4: + # salsa op 0: [4] ^= ([0]+[12])<<<7 + t = (v0 + v12) & 0xffffffff + v4 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 1: [8] ^= ([4]+[0])<<<9 + t = (v4 + v0) & 0xffffffff + v8 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 2: [12] ^= ([8]+[4])<<<13 + t = (v8 + v4) & 0xffffffff + v12 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 3: [0] ^= ([12]+[8])<<<18 + t = (v12 + v8) & 0xffffffff + v0 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 4: [9] ^= ([5]+[1])<<<7 + t = (v5 + v1) & 0xffffffff + v9 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 5: [13] ^= ([9]+[5])<<<9 + t = (v9 + v5) & 0xffffffff + v13 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 6: [1] ^= ([13]+[9])<<<13 + t = (v13 + v9) & 0xffffffff + v1 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 7: [5] ^= ([1]+[13])<<<18 + t = (v1 + v13) & 0xffffffff + v5 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 8: [14] ^= ([10]+[6])<<<7 + t = (v10 + v6) & 0xffffffff + v14 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 9: [2] ^= ([14]+[10])<<<9 + t = (v14 + v10) & 0xffffffff + v2 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 10: [6] ^= ([2]+[14])<<<13 + t = (v2 + v14) & 0xffffffff + v6 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 11: [10] ^= ([6]+[2])<<<18 + t = (v6 + v2) & 0xffffffff + v10 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 12: [3] ^= ([15]+[11])<<<7 + t = (v15 + v11) & 0xffffffff + v3 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 13: [7] ^= ([3]+[15])<<<9 + t = (v3 + v15) & 0xffffffff + v7 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 14: [11] ^= ([7]+[3])<<<13 + t = (v7 + v3) & 0xffffffff + v11 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 15: [15] ^= ([11]+[7])<<<18 + t = (v11 + v7) & 0xffffffff + v15 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 16: [1] ^= ([0]+[3])<<<7 + t = (v0 + v3) & 0xffffffff + v1 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 17: [2] ^= ([1]+[0])<<<9 + t = (v1 + v0) & 0xffffffff + v2 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 18: [3] ^= ([2]+[1])<<<13 + t = (v2 + v1) & 0xffffffff + v3 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 19: [0] ^= ([3]+[2])<<<18 + t = (v3 + v2) & 0xffffffff + v0 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 20: [6] ^= ([5]+[4])<<<7 + t = (v5 + v4) & 0xffffffff + v6 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 21: [7] ^= ([6]+[5])<<<9 + t = (v6 + v5) & 0xffffffff + v7 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 22: [4] ^= ([7]+[6])<<<13 + t = (v7 + v6) & 0xffffffff + v4 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 23: [5] ^= ([4]+[7])<<<18 + t = (v4 + v7) & 0xffffffff + v5 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 24: [11] ^= ([10]+[9])<<<7 + t = (v10 + v9) & 0xffffffff + v11 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 25: [8] ^= ([11]+[10])<<<9 + t = (v11 + v10) & 0xffffffff + v8 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 26: [9] ^= ([8]+[11])<<<13 + t = (v8 + v11) & 0xffffffff + v9 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 27: [10] ^= ([9]+[8])<<<18 + t = (v9 + v8) & 0xffffffff + v10 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 28: [12] ^= ([15]+[14])<<<7 + t = (v15 + v14) & 0xffffffff + v12 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 29: [13] ^= ([12]+[15])<<<9 + t = (v12 + v15) & 0xffffffff + v13 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 30: [14] ^= ([13]+[12])<<<13 + t = (v13 + v12) & 0xffffffff + v14 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 31: [15] ^= ([14]+[13])<<<18 + t = (v14 + v13) & 0xffffffff + v15 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + i += 1 + + b0 = (b0 + v0) & 0xffffffff + b1 = (b1 + v1) & 0xffffffff + b2 = (b2 + v2) & 0xffffffff + b3 = (b3 + v3) & 0xffffffff + b4 = (b4 + v4) & 0xffffffff + b5 = (b5 + v5) & 0xffffffff + b6 = (b6 + v6) & 0xffffffff + b7 = (b7 + v7) & 0xffffffff + b8 = (b8 + v8) & 0xffffffff + b9 = (b9 + v9) & 0xffffffff + b10 = (b10 + v10) & 0xffffffff + b11 = (b11 + v11) & 0xffffffff + b12 = (b12 + v12) & 0xffffffff + b13 = (b13 + v13) & 0xffffffff + b14 = (b14 + v14) & 0xffffffff + b15 = (b15 + v15) & 0xffffffff + + return b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 + +#================================================================= +# eof +#================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/exc.py b/venv/lib/python3.12/site-packages/passlib/exc.py new file mode 100644 index 0000000..755c7dc --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/exc.py @@ -0,0 +1,397 @@ +"""passlib.exc -- exceptions & warnings raised by passlib""" +#============================================================================= +# exceptions +#============================================================================= +class UnknownBackendError(ValueError): + """ + Error raised if multi-backend handler doesn't recognize backend name. + Inherits from :exc:`ValueError`. + + .. versionadded:: 1.7 + """ + def __init__(self, hasher, backend): + self.hasher = hasher + self.backend = backend + message = "%s: unknown backend: %r" % (hasher.name, backend) + ValueError.__init__(self, message) + + +# XXX: add a PasslibRuntimeError as base for Missing/Internal/Security runtime errors? + + +class MissingBackendError(RuntimeError): + """Error raised if multi-backend handler has no available backends; + or if specifically requested backend is not available. + + :exc:`!MissingBackendError` derives + from :exc:`RuntimeError`, since it usually indicates + lack of an external library or OS feature. + This is primarily raised by handlers which depend on + external libraries (which is currently just + :class:`~passlib.hash.bcrypt`). + """ + + +class InternalBackendError(RuntimeError): + """ + Error raised if something unrecoverable goes wrong with backend call; + such as if ``crypt.crypt()`` returning a malformed hash. + + .. versionadded:: 1.7.3 + """ + + +class PasswordValueError(ValueError): + """ + Error raised if a password can't be hashed / verified for various reasons. + This exception derives from the builtin :exc:`!ValueError`. + + May be thrown directly when password violates internal invariants of hasher + (e.g. some don't support NULL characters). Hashers may also throw more specific subclasses, + such as :exc:`!PasswordSizeError`. + + .. versionadded:: 1.7.3 + """ + pass + + +class PasswordSizeError(PasswordValueError): + """ + Error raised if a password exceeds the maximum size allowed + by Passlib (by default, 4096 characters); or if password exceeds + a hash-specific size limitation. + + This exception derives from :exc:`PasswordValueError` (above). + + Many password hash algorithms take proportionately larger amounts of time and/or + memory depending on the size of the password provided. This could present + a potential denial of service (DOS) situation if a maliciously large + password is provided to an application. Because of this, Passlib enforces + a maximum size limit, but one which should be *much* larger + than any legitimate password. :exc:`PasswordSizeError` derives + from :exc:`!ValueError`. + + .. note:: + Applications wishing to use a different limit should set the + ``PASSLIB_MAX_PASSWORD_SIZE`` environmental variable before + Passlib is loaded. The value can be any large positive integer. + + .. attribute:: max_size + + indicates the maximum allowed size. + + .. versionadded:: 1.6 + """ + + max_size = None + + def __init__(self, max_size, msg=None): + self.max_size = max_size + if msg is None: + msg = "password exceeds maximum allowed size" + PasswordValueError.__init__(self, msg) + + # this also prevents a glibc crypt segfault issue, detailed here ... + # http://www.openwall.com/lists/oss-security/2011/11/15/1 + +class PasswordTruncateError(PasswordSizeError): + """ + Error raised if password would be truncated by hash. + This derives from :exc:`PasswordSizeError` (above). + + Hashers such as :class:`~passlib.hash.bcrypt` can be configured to raises + this error by setting ``truncate_error=True``. + + .. attribute:: max_size + + indicates the maximum allowed size. + + .. versionadded:: 1.7 + """ + + def __init__(self, cls, msg=None): + if msg is None: + msg = ("Password too long (%s truncates to %d characters)" % + (cls.name, cls.truncate_size)) + PasswordSizeError.__init__(self, cls.truncate_size, msg) + + +class PasslibSecurityError(RuntimeError): + """ + Error raised if critical security issue is detected + (e.g. an attempt is made to use a vulnerable version of a bcrypt backend). + + .. versionadded:: 1.6.3 + """ + + +class TokenError(ValueError): + """ + Base error raised by v:mod:`passlib.totp` when + a token can't be parsed / isn't valid / etc. + Derives from :exc:`!ValueError`. + + Usually one of the more specific subclasses below will be raised: + + * :class:`MalformedTokenError` -- invalid chars, too few digits + * :class:`InvalidTokenError` -- no match found + * :class:`UsedTokenError` -- match found, but token already used + + .. versionadded:: 1.7 + """ + + #: default message to use if none provided -- subclasses may fill this in + _default_message = 'Token not acceptable' + + def __init__(self, msg=None, *args, **kwds): + if msg is None: + msg = self._default_message + ValueError.__init__(self, msg, *args, **kwds) + + +class MalformedTokenError(TokenError): + """ + Error raised by :mod:`passlib.totp` when a token isn't formatted correctly + (contains invalid characters, wrong number of digits, etc) + """ + _default_message = "Unrecognized token" + + +class InvalidTokenError(TokenError): + """ + Error raised by :mod:`passlib.totp` when a token is formatted correctly, + but doesn't match any tokens within valid range. + """ + _default_message = "Token did not match" + + +class UsedTokenError(TokenError): + """ + Error raised by :mod:`passlib.totp` if a token is reused. + Derives from :exc:`TokenError`. + + .. autoattribute:: expire_time + + .. versionadded:: 1.7 + """ + _default_message = "Token has already been used, please wait for another." + + #: optional value indicating when current counter period will end, + #: and a new token can be generated. + expire_time = None + + def __init__(self, *args, **kwds): + self.expire_time = kwds.pop("expire_time", None) + TokenError.__init__(self, *args, **kwds) + + +class UnknownHashError(ValueError): + """ + Error raised by :class:`~passlib.crypto.lookup_hash` if hash name is not recognized. + This exception derives from :exc:`!ValueError`. + + As of version 1.7.3, this may also be raised if hash algorithm is known, + but has been disabled due to FIPS mode (message will include phrase "disabled for fips"). + + As of version 1.7.4, this may be raised if a :class:`~passlib.context.CryptContext` + is unable to identify the algorithm used by a password hash. + + .. versionadded:: 1.7 + + .. versionchanged: 1.7.3 + added 'message' argument. + + .. versionchanged:: 1.7.4 + altered call signature. + """ + def __init__(self, message=None, value=None): + self.value = value + if message is None: + message = "unknown hash algorithm: %r" % value + self.message = message + ValueError.__init__(self, message, value) + + def __str__(self): + return self.message + + +#============================================================================= +# warnings +#============================================================================= +class PasslibWarning(UserWarning): + """base class for Passlib's user warnings, + derives from the builtin :exc:`UserWarning`. + + .. versionadded:: 1.6 + """ + +# XXX: there's only one reference to this class, and it will go away in 2.0; +# so can probably remove this along with this / roll this into PasslibHashWarning. +class PasslibConfigWarning(PasslibWarning): + """Warning issued when non-fatal issue is found related to the configuration + of a :class:`~passlib.context.CryptContext` instance. + + This occurs primarily in one of two cases: + + * The CryptContext contains rounds limits which exceed the hard limits + imposed by the underlying algorithm. + * An explicit rounds value was provided which exceeds the limits + imposed by the CryptContext. + + In both of these cases, the code will perform correctly & securely; + but the warning is issued as a sign the configuration may need updating. + + .. versionadded:: 1.6 + """ + +class PasslibHashWarning(PasslibWarning): + """Warning issued when non-fatal issue is found with parameters + or hash string passed to a passlib hash class. + + This occurs primarily in one of two cases: + + * A rounds value or other setting was explicitly provided which + exceeded the handler's limits (and has been clamped + by the :ref:`relaxed` flag). + + * A malformed hash string was encountered which (while parsable) + should be re-encoded. + + .. versionadded:: 1.6 + """ + +class PasslibRuntimeWarning(PasslibWarning): + """Warning issued when something unexpected happens during runtime. + + The fact that it's a warning instead of an error means Passlib + was able to correct for the issue, but that it's anomalous enough + that the developers would love to hear under what conditions it occurred. + + .. versionadded:: 1.6 + """ + +class PasslibSecurityWarning(PasslibWarning): + """Special warning issued when Passlib encounters something + that might affect security. + + .. versionadded:: 1.6 + """ + +#============================================================================= +# error constructors +# +# note: these functions are used by the hashes in Passlib to raise common +# error messages. They are currently just functions which return ValueError, +# rather than subclasses of ValueError, since the specificity isn't needed +# yet; and who wants to import a bunch of error classes when catching +# ValueError will do? +#============================================================================= + +def _get_name(handler): + return handler.name if handler else "" + +#------------------------------------------------------------------------ +# generic helpers +#------------------------------------------------------------------------ +def type_name(value): + """return pretty-printed string containing name of value's type""" + cls = value.__class__ + if cls.__module__ and cls.__module__ not in ["__builtin__", "builtins"]: + return "%s.%s" % (cls.__module__, cls.__name__) + elif value is None: + return 'None' + else: + return cls.__name__ + +def ExpectedTypeError(value, expected, param): + """error message when param was supposed to be one type, but found another""" + # NOTE: value is never displayed, since it may sometimes be a password. + name = type_name(value) + return TypeError("%s must be %s, not %s" % (param, expected, name)) + +def ExpectedStringError(value, param): + """error message when param was supposed to be unicode or bytes""" + return ExpectedTypeError(value, "unicode or bytes", param) + +#------------------------------------------------------------------------ +# hash/verify parameter errors +#------------------------------------------------------------------------ +def MissingDigestError(handler=None): + """raised when verify() method gets passed config string instead of hash""" + name = _get_name(handler) + return ValueError("expected %s hash, got %s config string instead" % + (name, name)) + +def NullPasswordError(handler=None): + """raised by OS crypt() supporting hashes, which forbid NULLs in password""" + name = _get_name(handler) + return PasswordValueError("%s does not allow NULL bytes in password" % name) + +#------------------------------------------------------------------------ +# errors when parsing hashes +#------------------------------------------------------------------------ +def InvalidHashError(handler=None): + """error raised if unrecognized hash provided to handler""" + return ValueError("not a valid %s hash" % _get_name(handler)) + +def MalformedHashError(handler=None, reason=None): + """error raised if recognized-but-malformed hash provided to handler""" + text = "malformed %s hash" % _get_name(handler) + if reason: + text = "%s (%s)" % (text, reason) + return ValueError(text) + +def ZeroPaddedRoundsError(handler=None): + """error raised if hash was recognized but contained zero-padded rounds field""" + return MalformedHashError(handler, "zero-padded rounds") + +#------------------------------------------------------------------------ +# settings / hash component errors +#------------------------------------------------------------------------ +def ChecksumSizeError(handler, raw=False): + """error raised if hash was recognized, but checksum was wrong size""" + # TODO: if handler.use_defaults is set, this came from app-provided value, + # not from parsing a hash string, might want different error msg. + checksum_size = handler.checksum_size + unit = "bytes" if raw else "chars" + reason = "checksum must be exactly %d %s" % (checksum_size, unit) + return MalformedHashError(handler, reason) + +#============================================================================= +# sensitive info helpers +#============================================================================= + +#: global flag, set temporarily by UTs to allow debug_only_repr() to display sensitive values. +ENABLE_DEBUG_ONLY_REPR = False + + +def debug_only_repr(value, param="hash"): + """ + helper used to display sensitive data (hashes etc) within error messages. + currently returns placeholder test UNLESS unittests are running, + in which case the real value is displayed. + + mainly useful to prevent hashes / secrets from being exposed in production tracebacks; + while still being visible from test failures. + + NOTE: api subject to change, may formalize this more in the future. + """ + if ENABLE_DEBUG_ONLY_REPR or value is None or isinstance(value, bool): + return repr(value) + return "<%s %s value omitted>" % (param, type(value)) + + +def CryptBackendError(handler, config, hash, # * + source="crypt.crypt()"): + """ + helper to generate standard message when ``crypt.crypt()`` returns invalid result. + takes care of automatically masking contents of config & hash outside of UTs. + """ + name = _get_name(handler) + msg = "%s returned invalid %s hash: config=%s hash=%s" % \ + (source, name, debug_only_repr(config), debug_only_repr(hash)) + raise InternalBackendError(msg) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/ext/__init__.py b/venv/lib/python3.12/site-packages/passlib/ext/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/ext/__init__.py @@ -0,0 +1 @@ + diff --git a/venv/lib/python3.12/site-packages/passlib/ext/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/ext/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e898f0e Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/ext/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/ext/django/__init__.py b/venv/lib/python3.12/site-packages/passlib/ext/django/__init__.py new file mode 100644 index 0000000..2dc9b28 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/ext/django/__init__.py @@ -0,0 +1,6 @@ +"""passlib.ext.django.models -- monkeypatch django hashing framework + +this plugin monkeypatches django's hashing framework +so that it uses a passlib context object, allowing handling of arbitrary +hashes in Django databases. +""" diff --git a/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..39562e9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..89ae377 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..49e35a2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/ext/django/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/ext/django/models.py b/venv/lib/python3.12/site-packages/passlib/ext/django/models.py new file mode 100644 index 0000000..e766c2d --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/ext/django/models.py @@ -0,0 +1,36 @@ +"""passlib.ext.django.models -- monkeypatch django hashing framework""" +#============================================================================= +# imports +#============================================================================= +# core +# site +# pkg +from passlib.context import CryptContext +from passlib.ext.django.utils import DjangoContextAdapter +# local +__all__ = ["password_context"] + +#============================================================================= +# global attrs +#============================================================================= + +#: adapter instance used to drive most of this +adapter = DjangoContextAdapter() + +# the context object which this patches contrib.auth to use for password hashing. +# configuration controlled by ``settings.PASSLIB_CONFIG``. +password_context = adapter.context + +#: hook callers should use if context is changed +context_changed = adapter.reset_hashers + +#============================================================================= +# main code +#============================================================================= + +# load config & install monkeypatch +adapter.load_model() + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/ext/django/utils.py b/venv/lib/python3.12/site-packages/passlib/ext/django/utils.py new file mode 100644 index 0000000..2f8a2ef --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/ext/django/utils.py @@ -0,0 +1,1276 @@ +"""passlib.ext.django.utils - helper functions used by this plugin""" +#============================================================================= +# imports +#============================================================================= +# core +from functools import update_wrapper, wraps +import logging; log = logging.getLogger(__name__) +import sys +import weakref +from warnings import warn +# site +try: + from django import VERSION as DJANGO_VERSION + log.debug("found django %r installation", DJANGO_VERSION) +except ImportError: + log.debug("django installation not found") + DJANGO_VERSION = () +# pkg +from passlib import exc, registry +from passlib.context import CryptContext +from passlib.exc import PasslibRuntimeWarning +from passlib.utils.compat import get_method_function, iteritems, OrderedDict, unicode +from passlib.utils.decor import memoized_property +# local +__all__ = [ + "DJANGO_VERSION", + "MIN_DJANGO_VERSION", + "get_preset_config", + "quirks", +] + +#: minimum version supported by passlib.ext.django +MIN_DJANGO_VERSION = (1, 8) + +#============================================================================= +# quirk detection +#============================================================================= + +class quirks: + + #: django check_password() started throwing error on encoded=None + #: (really identify_hasher did) + none_causes_check_password_error = DJANGO_VERSION >= (2, 1) + + #: django is_usable_password() started returning True for password = {None, ""} values. + empty_is_usable_password = DJANGO_VERSION >= (2, 1) + + #: django is_usable_password() started returning True for non-hash strings in 2.1 + invalid_is_usable_password = DJANGO_VERSION >= (2, 1) + +#============================================================================= +# default policies +#============================================================================= + +# map preset names -> passlib.app attrs +_preset_map = { + "django-1.0": "django10_context", + "django-1.4": "django14_context", + "django-1.6": "django16_context", + "django-latest": "django_context", +} + +def get_preset_config(name): + """Returns configuration string for one of the preset strings + supported by the ``PASSLIB_CONFIG`` setting. + Currently supported presets: + + * ``"passlib-default"`` - default config used by this release of passlib. + * ``"django-default"`` - config matching currently installed django version. + * ``"django-latest"`` - config matching newest django version (currently same as ``"django-1.6"``). + * ``"django-1.0"`` - config used by stock Django 1.0 - 1.3 installs + * ``"django-1.4"`` - config used by stock Django 1.4 installs + * ``"django-1.6"`` - config used by stock Django 1.6 installs + """ + # TODO: add preset which includes HASHERS + PREFERRED_HASHERS, + # after having imported any custom hashers. e.g. "django-current" + if name == "django-default": + if not DJANGO_VERSION: + raise ValueError("can't resolve django-default preset, " + "django not installed") + name = "django-1.6" + if name == "passlib-default": + return PASSLIB_DEFAULT + try: + attr = _preset_map[name] + except KeyError: + raise ValueError("unknown preset config name: %r" % name) + import passlib.apps + return getattr(passlib.apps, attr).to_string() + +# default context used by passlib 1.6 +PASSLIB_DEFAULT = """ +[passlib] + +; list of schemes supported by configuration +; currently all django 1.6, 1.4, and 1.0 hashes, +; and three common modular crypt format hashes. +schemes = + django_pbkdf2_sha256, django_pbkdf2_sha1, django_bcrypt, django_bcrypt_sha256, + django_salted_sha1, django_salted_md5, django_des_crypt, hex_md5, + sha512_crypt, bcrypt, phpass + +; default scheme to use for new hashes +default = django_pbkdf2_sha256 + +; hashes using these schemes will automatically be re-hashed +; when the user logs in (currently all django 1.0 hashes) +deprecated = + django_pbkdf2_sha1, django_salted_sha1, django_salted_md5, + django_des_crypt, hex_md5 + +; sets some common options, including minimum rounds for two primary hashes. +; if a hash has less than this number of rounds, it will be re-hashed. +sha512_crypt__min_rounds = 80000 +django_pbkdf2_sha256__min_rounds = 10000 + +; set somewhat stronger iteration counts for ``User.is_staff`` +staff__sha512_crypt__default_rounds = 100000 +staff__django_pbkdf2_sha256__default_rounds = 12500 + +; and even stronger ones for ``User.is_superuser`` +superuser__sha512_crypt__default_rounds = 120000 +superuser__django_pbkdf2_sha256__default_rounds = 15000 +""" + +#============================================================================= +# helpers +#============================================================================= + +#: prefix used to shoehorn passlib's handler names into django hasher namespace +PASSLIB_WRAPPER_PREFIX = "passlib_" + +#: prefix used by all the django-specific hash formats in passlib; +#: all of these hashes should have a ``.django_name`` attribute. +DJANGO_COMPAT_PREFIX = "django_" + +#: set of hashes w/o "django_" prefix, but which also expose ``.django_name``. +_other_django_hashes = set(["hex_md5"]) + +def _wrap_method(method): + """wrap method object in bare function""" + @wraps(method) + def wrapper(*args, **kwds): + return method(*args, **kwds) + return wrapper + +#============================================================================= +# translator +#============================================================================= +class DjangoTranslator(object): + """ + Object which helps translate passlib hasher objects / names + to and from django hasher objects / names. + + These methods are wrapped in a class so that results can be cached, + but with the ability to have independant caches, since django hasher + names may / may not correspond to the same instance (or even class). + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: CryptContext instance + #: (if any -- generally only set by DjangoContextAdapter subclass) + context = None + + #: internal cache of passlib hasher -> django hasher instance. + #: key stores weakref to passlib hasher. + _django_hasher_cache = None + + #: special case -- unsalted_sha1 + _django_unsalted_sha1 = None + + #: internal cache of django name -> passlib hasher + #: value stores weakrefs to passlib hasher. + _passlib_hasher_cache = None + + #============================================================================= + # init + #============================================================================= + + def __init__(self, context=None, **kwds): + super(DjangoTranslator, self).__init__(**kwds) + if context is not None: + self.context = context + + self._django_hasher_cache = weakref.WeakKeyDictionary() + self._passlib_hasher_cache = weakref.WeakValueDictionary() + + def reset_hashers(self): + self._django_hasher_cache.clear() + self._passlib_hasher_cache.clear() + self._django_unsalted_sha1 = None + + def _get_passlib_hasher(self, passlib_name): + """ + resolve passlib hasher by name, using context if available. + """ + context = self.context + if context is None: + return registry.get_crypt_handler(passlib_name) + else: + return context.handler(passlib_name) + + #============================================================================= + # resolve passlib hasher -> django hasher + #============================================================================= + + def passlib_to_django_name(self, passlib_name): + """ + Convert passlib hasher / name to Django hasher name. + """ + return self.passlib_to_django(passlib_name).algorithm + + # XXX: add option (in class, or call signature) to always return a wrapper, + # rather than native builtin -- would let HashersTest check that + # our own wrapper + implementations are matching up with their tests. + def passlib_to_django(self, passlib_hasher, cached=True): + """ + Convert passlib hasher / name to Django hasher. + + :param passlib_hasher: + passlib hasher / name + + :returns: + django hasher instance + """ + # resolve names to hasher + if not hasattr(passlib_hasher, "name"): + passlib_hasher = self._get_passlib_hasher(passlib_hasher) + + # check cache + if cached: + cache = self._django_hasher_cache + try: + return cache[passlib_hasher] + except KeyError: + pass + result = cache[passlib_hasher] = \ + self.passlib_to_django(passlib_hasher, cached=False) + return result + + # find native equivalent, and return wrapper if there isn't one + django_name = getattr(passlib_hasher, "django_name", None) + if django_name: + return self._create_django_hasher(django_name) + else: + return _PasslibHasherWrapper(passlib_hasher) + + _builtin_django_hashers = dict( + md5="MD5PasswordHasher", + ) + + if DJANGO_VERSION > (2, 1): + # present but disabled by default as of django 2.1; not sure when added, + # so not listing it by default. + _builtin_django_hashers.update( + bcrypt="BCryptPasswordHasher", + ) + + def _create_django_hasher(self, django_name): + """ + helper to create new django hasher by name. + wraps underlying django methods. + """ + # if we haven't patched django, can use it directly + module = sys.modules.get("passlib.ext.django.models") + if module is None or not module.adapter.patched: + from django.contrib.auth.hashers import get_hasher + try: + return get_hasher(django_name) + except ValueError as err: + if not str(err).startswith("Unknown password hashing algorithm"): + raise + else: + # We've patched django's get_hashers(), so calling django's get_hasher() + # or get_hashers_by_algorithm() would only land us back here. + # As non-ideal workaround, have to use original get_hashers(), + get_hashers = module.adapter._manager.getorig("django.contrib.auth.hashers:get_hashers").__wrapped__ + for hasher in get_hashers(): + if hasher.algorithm == django_name: + return hasher + + # hardcode a few for cases where get_hashers() lookup won't work + # (mainly, hashers that are present in django, but disabled by their default config) + path = self._builtin_django_hashers.get(django_name) + if path: + if "." not in path: + path = "django.contrib.auth.hashers." + path + from django.utils.module_loading import import_string + return import_string(path)() + + raise ValueError("unknown hasher: %r" % django_name) + + #============================================================================= + # reverse django -> passlib + #============================================================================= + + def django_to_passlib_name(self, django_name): + """ + Convert Django hasher / name to Passlib hasher name. + """ + return self.django_to_passlib(django_name).name + + def django_to_passlib(self, django_name, cached=True): + """ + Convert Django hasher / name to Passlib hasher / name. + If present, CryptContext will be checked instead of main registry. + + :param django_name: + Django hasher class or algorithm name. + "default" allowed if context provided. + + :raises ValueError: + if can't resolve hasher. + + :returns: + passlib hasher or name + """ + # check for django hasher + if hasattr(django_name, "algorithm"): + + # check for passlib adapter + if isinstance(django_name, _PasslibHasherWrapper): + return django_name.passlib_handler + + # resolve django hasher -> name + django_name = django_name.algorithm + + # check cache + if cached: + cache = self._passlib_hasher_cache + try: + return cache[django_name] + except KeyError: + pass + result = cache[django_name] = \ + self.django_to_passlib(django_name, cached=False) + return result + + # check if it's an obviously-wrapped name + if django_name.startswith(PASSLIB_WRAPPER_PREFIX): + passlib_name = django_name[len(PASSLIB_WRAPPER_PREFIX):] + return self._get_passlib_hasher(passlib_name) + + # resolve default + if django_name == "default": + context = self.context + if context is None: + raise TypeError("can't determine default scheme w/ context") + return context.handler() + + # special case: Django uses a separate hasher for "sha1$$digest" + # hashes (unsalted_sha1) and "sha1$salt$digest" (sha1); + # but passlib uses "django_salted_sha1" for both of these. + if django_name == "unsalted_sha1": + django_name = "sha1" + + # resolve name + # XXX: bother caching these lists / mapping? + # not needed in long-term due to cache above. + context = self.context + if context is None: + # check registry + # TODO: should make iteration via registry easier + candidates = ( + registry.get_crypt_handler(passlib_name) + for passlib_name in registry.list_crypt_handlers() + if passlib_name.startswith(DJANGO_COMPAT_PREFIX) or + passlib_name in _other_django_hashes + ) + else: + # check context + candidates = context.schemes(resolve=True) + for handler in candidates: + if getattr(handler, "django_name", None) == django_name: + return handler + + # give up + # NOTE: this should only happen for custom django hashers that we don't + # know the equivalents for. _HasherHandler (below) is work in + # progress that would allow us to at least return a wrapper. + raise ValueError("can't translate django name to passlib name: %r" % + (django_name,)) + + #============================================================================= + # django hasher lookup + #============================================================================= + + def resolve_django_hasher(self, django_name, cached=True): + """ + Take in a django algorithm name, return django hasher. + """ + # check for django hasher + if hasattr(django_name, "algorithm"): + return django_name + + # resolve to passlib hasher + passlib_hasher = self.django_to_passlib(django_name, cached=cached) + + # special case: Django uses a separate hasher for "sha1$$digest" + # hashes (unsalted_sha1) and "sha1$salt$digest" (sha1); + # but passlib uses "django_salted_sha1" for both of these. + # XXX: this isn't ideal way to handle this. would like to do something + # like pass "django_variant=django_name" into passlib_to_django(), + # and have it cache separate hasher there. + # but that creates a LOT of complication in it's cache structure, + # for what is just one special case. + if django_name == "unsalted_sha1" and passlib_hasher.name == "django_salted_sha1": + if not cached: + return self._create_django_hasher(django_name) + result = self._django_unsalted_sha1 + if result is None: + result = self._django_unsalted_sha1 = self._create_django_hasher(django_name) + return result + + # lookup corresponding django hasher + return self.passlib_to_django(passlib_hasher, cached=cached) + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# adapter +#============================================================================= +class DjangoContextAdapter(DjangoTranslator): + """ + Object which tries to adapt a Passlib CryptContext object, + using a Django-hasher compatible API. + + When installed in django, :mod:`!passlib.ext.django` will create + an instance of this class, and then monkeypatch the appropriate + methods into :mod:`!django.contrib.auth` and other appropriate places. + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: CryptContext instance we're wrapping + context = None + + #: ref to original make_password(), + #: needed to generate usuable passwords that match django + _orig_make_password = None + + #: ref to django helper of this name -- not monkeypatched + is_password_usable = None + + #: PatchManager instance used to track installation + _manager = None + + #: whether config=disabled flag was set + enabled = True + + #: patch status + patched = False + + #============================================================================= + # init + #============================================================================= + def __init__(self, context=None, get_user_category=None, **kwds): + + # init log + self.log = logging.getLogger(__name__ + ".DjangoContextAdapter") + + # init parent, filling in default context object + if context is None: + context = CryptContext() + super(DjangoContextAdapter, self).__init__(context=context, **kwds) + + # setup user category + if get_user_category: + assert callable(get_user_category) + self.get_user_category = get_user_category + + # install lru cache wrappers + try: + from functools import lru_cache # new py32 + except ImportError: + from django.utils.lru_cache import lru_cache # py2 compat, removed in django 3 (or earlier?) + self.get_hashers = lru_cache()(self.get_hashers) + + # get copy of original make_password + from django.contrib.auth.hashers import make_password + if make_password.__module__.startswith("passlib."): + make_password = _PatchManager.peek_unpatched_func(make_password) + self._orig_make_password = make_password + + # get other django helpers + from django.contrib.auth.hashers import is_password_usable + self.is_password_usable = is_password_usable + + # init manager + mlog = logging.getLogger(__name__ + ".DjangoContextAdapter._manager") + self._manager = _PatchManager(log=mlog) + + def reset_hashers(self): + """ + Wrapper to manually reset django's hasher lookup cache + """ + # resets cache for .get_hashers() & .get_hashers_by_algorithm() + from django.contrib.auth.hashers import reset_hashers + reset_hashers(setting="PASSWORD_HASHERS") + + # reset internal caches + super(DjangoContextAdapter, self).reset_hashers() + + #============================================================================= + # django hashers helpers -- hasher lookup + #============================================================================= + + # lru_cache()'ed by init + def get_hashers(self): + """ + Passlib replacement for get_hashers() -- + Return list of available django hasher classes + """ + passlib_to_django = self.passlib_to_django + return [passlib_to_django(hasher) + for hasher in self.context.schemes(resolve=True)] + + def get_hasher(self, algorithm="default"): + """ + Passlib replacement for get_hasher() -- + Return django hasher by name + """ + return self.resolve_django_hasher(algorithm) + + def identify_hasher(self, encoded): + """ + Passlib replacement for identify_hasher() -- + Identify django hasher based on hash. + """ + handler = self.context.identify(encoded, resolve=True, required=True) + if handler.name == "django_salted_sha1" and encoded.startswith("sha1$$"): + # Django uses a separate hasher for "sha1$$digest" hashes, but + # passlib identifies it as belonging to "sha1$salt$digest" handler. + # We want to resolve to correct django hasher. + return self.get_hasher("unsalted_sha1") + return self.passlib_to_django(handler) + + #============================================================================= + # django.contrib.auth.hashers helpers -- password helpers + #============================================================================= + + def make_password(self, password, salt=None, hasher="default"): + """ + Passlib replacement for make_password() + """ + if password is None: + return self._orig_make_password(None) + # NOTE: relying on hasher coming from context, and thus having + # context-specific config baked into it. + passlib_hasher = self.django_to_passlib(hasher) + if "salt" not in passlib_hasher.setting_kwds: + # ignore salt param even if preset + pass + elif hasher.startswith("unsalted_"): + # Django uses a separate 'unsalted_sha1' hasher for "sha1$$digest", + # but passlib just reuses it's "sha1" handler ("sha1$salt$digest"). To make + # this work, have to explicitly tell the sha1 handler to use an empty salt. + passlib_hasher = passlib_hasher.using(salt="") + elif salt: + # Django make_password() autogenerates a salt if salt is bool False (None / ''), + # so we only pass the keyword on if there's actually a fixed salt. + passlib_hasher = passlib_hasher.using(salt=salt) + return passlib_hasher.hash(password) + + def check_password(self, password, encoded, setter=None, preferred="default"): + """ + Passlib replacement for check_password() + """ + # XXX: this currently ignores "preferred" keyword, since its purpose + # was for hash migration, and that's handled by the context. + # XXX: honor "none_causes_check_password_error" quirk for django 2.2+? + # seems safer to return False. + if password is None or not self.is_password_usable(encoded): + return False + + # verify password + context = self.context + try: + correct = context.verify(password, encoded) + except exc.UnknownHashError: + # As of django 1.5, unidentifiable hashes returns False + # (side-effect of django issue 18453) + return False + + if not (correct and setter): + return correct + + # check if we need to rehash + if preferred == "default": + if not context.needs_update(encoded, secret=password): + return correct + else: + # Django's check_password() won't call setter() on a + # 'preferred' alg, even if it's otherwise deprecated. To try and + # replicate this behavior if preferred is set, we look up the + # passlib hasher, and call it's original needs_update() method. + # TODO: Solve redundancy that verify() call + # above is already identifying hash. + hasher = self.django_to_passlib(preferred) + if (hasher.identify(encoded) and + not hasher.needs_update(encoded, secret=password)): + # alg is 'preferred' and hash itself doesn't need updating, + # so nothing to do. + return correct + # else: either hash isn't preferred, or it needs updating. + + # call setter to rehash + setter(password) + return correct + + #============================================================================= + # django users helpers + #============================================================================= + + def user_check_password(self, user, password): + """ + Passlib replacement for User.check_password() + """ + if password is None: + return False + hash = user.password + if not self.is_password_usable(hash): + return False + cat = self.get_user_category(user) + try: + ok, new_hash = self.context.verify_and_update(password, hash, category=cat) + except exc.UnknownHashError: + # As of django 1.5, unidentifiable hashes returns False + # (side-effect of django issue 18453) + return False + if ok and new_hash is not None: + # migrate to new hash if needed. + user.password = new_hash + user.save() + return ok + + def user_set_password(self, user, password): + """ + Passlib replacement for User.set_password() + """ + if password is None: + user.set_unusable_password() + else: + cat = self.get_user_category(user) + user.password = self.context.hash(password, category=cat) + + def get_user_category(self, user): + """ + Helper for hashing passwords per-user -- + figure out the CryptContext category for specified Django user object. + .. note:: + This may be overridden via PASSLIB_GET_CATEGORY django setting + """ + if user.is_superuser: + return "superuser" + elif user.is_staff: + return "staff" + else: + return None + + #============================================================================= + # patch control + #============================================================================= + + HASHERS_PATH = "django.contrib.auth.hashers" + MODELS_PATH = "django.contrib.auth.models" + USER_CLASS_PATH = MODELS_PATH + ":User" + FORMS_PATH = "django.contrib.auth.forms" + + #: list of locations to patch + patch_locations = [ + # + # User object + # NOTE: could leave defaults alone, but want to have user available + # so that we can support get_user_category() + # + (USER_CLASS_PATH + ".check_password", "user_check_password", dict(method=True)), + (USER_CLASS_PATH + ".set_password", "user_set_password", dict(method=True)), + + # + # Hashers module + # + (HASHERS_PATH + ":", "check_password"), + (HASHERS_PATH + ":", "make_password"), + (HASHERS_PATH + ":", "get_hashers"), + (HASHERS_PATH + ":", "get_hasher"), + (HASHERS_PATH + ":", "identify_hasher"), + + # + # Patch known imports from hashers module + # + (MODELS_PATH + ":", "check_password"), + (MODELS_PATH + ":", "make_password"), + (FORMS_PATH + ":", "get_hasher"), + (FORMS_PATH + ":", "identify_hasher"), + + ] + + def install_patch(self): + """ + Install monkeypatch to replace django hasher framework. + """ + # don't reapply + log = self.log + if self.patched: + log.warning("monkeypatching already applied, refusing to reapply") + return False + + # version check + if DJANGO_VERSION < MIN_DJANGO_VERSION: + raise RuntimeError("passlib.ext.django requires django >= %s" % + (MIN_DJANGO_VERSION,)) + + # log start + log.debug("preparing to monkeypatch django ...") + + # run through patch locations + manager = self._manager + for record in self.patch_locations: + if len(record) == 2: + record += ({},) + target, source, opts = record + if target.endswith((":", ",")): + target += source + value = getattr(self, source) + if opts.get("method"): + # have to wrap our method in a function, + # since we're installing it in a class *as* a method + # XXX: make this a flag for .patch()? + value = _wrap_method(value) + manager.patch(target, value) + + # reset django's caches (e.g. get_hash_by_algorithm) + self.reset_hashers() + + # done! + self.patched = True + log.debug("... finished monkeypatching django") + return True + + def remove_patch(self): + """ + Remove monkeypatch from django hasher framework. + As precaution in case there are lingering refs to context, + context object will be wiped. + + .. warning:: + This may cause problems if any other Django modules have imported + their own copies of the patched functions, though the patched + code has been designed to throw an error as soon as possible in + this case. + """ + log = self.log + manager = self._manager + + if self.patched: + log.debug("removing django monkeypatching...") + manager.unpatch_all(unpatch_conflicts=True) + self.context.load({}) + self.patched = False + self.reset_hashers() + log.debug("...finished removing django monkeypatching") + return True + + if manager.isactive(): # pragma: no cover -- sanity check + log.warning("reverting partial monkeypatching of django...") + manager.unpatch_all() + self.context.load({}) + self.reset_hashers() + log.debug("...finished removing django monkeypatching") + return True + + log.debug("django not monkeypatched") + return False + + #============================================================================= + # loading config + #============================================================================= + + def load_model(self): + """ + Load configuration from django, and install patch. + """ + self._load_settings() + if self.enabled: + try: + self.install_patch() + except: + # try to undo what we can + self.remove_patch() + raise + else: + if self.patched: # pragma: no cover -- sanity check + log.error("didn't expect monkeypatching would be applied!") + self.remove_patch() + log.debug("passlib.ext.django loaded") + + def _load_settings(self): + """ + Update settings from django + """ + from django.conf import settings + + # TODO: would like to add support for inheriting config from a preset + # (or from existing hasher state) and letting PASSLIB_CONFIG + # be an update, not a replacement. + + # TODO: wrap and import any custom hashers as passlib handlers, + # so they could be used in the passlib config. + + # load config from settings + _UNSET = object() + config = getattr(settings, "PASSLIB_CONFIG", _UNSET) + if config is _UNSET: + # XXX: should probably deprecate this alias + config = getattr(settings, "PASSLIB_CONTEXT", _UNSET) + if config is _UNSET: + config = "passlib-default" + if config is None: + warn("setting PASSLIB_CONFIG=None is deprecated, " + "and support will be removed in Passlib 1.8, " + "use PASSLIB_CONFIG='disabled' instead.", + DeprecationWarning) + config = "disabled" + elif not isinstance(config, (unicode, bytes, dict)): + raise exc.ExpectedTypeError(config, "str or dict", "PASSLIB_CONFIG") + + # load custom category func (if any) + get_category = getattr(settings, "PASSLIB_GET_CATEGORY", None) + if get_category and not callable(get_category): + raise exc.ExpectedTypeError(get_category, "callable", "PASSLIB_GET_CATEGORY") + + # check if we've been disabled + if config == "disabled": + self.enabled = False + return + else: + self.__dict__.pop("enabled", None) + + # resolve any preset aliases + if isinstance(config, str) and '\n' not in config: + config = get_preset_config(config) + + # setup category func + if get_category: + self.get_user_category = get_category + else: + self.__dict__.pop("get_category", None) + + # setup context + self.context.load(config) + self.reset_hashers() + + #============================================================================= + # eof + #============================================================================= + +#============================================================================= +# wrapping passlib handlers as django hashers +#============================================================================= +_GEN_SALT_SIGNAL = "--!!!generate-new-salt!!!--" + +class ProxyProperty(object): + """helper that proxies another attribute""" + + def __init__(self, attr): + self.attr = attr + + def __get__(self, obj, cls): + if obj is None: + cls = obj + return getattr(obj, self.attr) + + def __set__(self, obj, value): + setattr(obj, self.attr, value) + + def __delete__(self, obj): + delattr(obj, self.attr) + + +class _PasslibHasherWrapper(object): + """ + adapter which which wraps a :cls:`passlib.ifc.PasswordHash` class, + and provides an interface compatible with the Django hasher API. + + :param passlib_handler: + passlib hash handler (e.g. :cls:`passlib.hash.sha256_crypt`. + """ + #===================================================================== + # instance attrs + #===================================================================== + + #: passlib handler that we're adapting. + passlib_handler = None + + # NOTE: 'rounds' attr will store variable rounds, IF handler supports it. + # 'iterations' will act as proxy, for compatibility with django pbkdf2 hashers. + # rounds = None + # iterations = None + + #===================================================================== + # init + #===================================================================== + def __init__(self, passlib_handler): + # init handler + if getattr(passlib_handler, "django_name", None): + raise ValueError("handlers that reflect an official django " + "hasher shouldn't be wrapped: %r" % + (passlib_handler.name,)) + if passlib_handler.is_disabled: + # XXX: could this be implemented? + raise ValueError("can't wrap disabled-hash handlers: %r" % + (passlib_handler.name)) + self.passlib_handler = passlib_handler + + # init rounds support + if self._has_rounds: + self.rounds = passlib_handler.default_rounds + self.iterations = ProxyProperty("rounds") + + #===================================================================== + # internal methods + #===================================================================== + def __repr__(self): + return "" % self.passlib_handler + + #===================================================================== + # internal properties + #===================================================================== + + @memoized_property + def __name__(self): + return "Passlib_%s_PasswordHasher" % self.passlib_handler.name.title() + + @memoized_property + def _has_rounds(self): + return "rounds" in self.passlib_handler.setting_kwds + + @memoized_property + def _translate_kwds(self): + """ + internal helper for safe_summary() -- + used to translate passlib hash options -> django keywords + """ + out = dict(checksum="hash") + if self._has_rounds and "pbkdf2" in self.passlib_handler.name: + out['rounds'] = 'iterations' + return out + + #===================================================================== + # hasher properties + #===================================================================== + + @memoized_property + def algorithm(self): + return PASSLIB_WRAPPER_PREFIX + self.passlib_handler.name + + #===================================================================== + # hasher api + #===================================================================== + def salt(self): + # NOTE: passlib's handler.hash() should generate new salt each time, + # so this just returns a special constant which tells + # encode() (below) not to pass a salt keyword along. + return _GEN_SALT_SIGNAL + + def verify(self, password, encoded): + return self.passlib_handler.verify(password, encoded) + + def encode(self, password, salt=None, rounds=None, iterations=None): + kwds = {} + if salt is not None and salt != _GEN_SALT_SIGNAL: + kwds['salt'] = salt + if self._has_rounds: + if rounds is not None: + kwds['rounds'] = rounds + elif iterations is not None: + kwds['rounds'] = iterations + else: + kwds['rounds'] = self.rounds + elif rounds is not None or iterations is not None: + warn("%s.hash(): 'rounds' and 'iterations' are ignored" % self.__name__) + handler = self.passlib_handler + if kwds: + handler = handler.using(**kwds) + return handler.hash(password) + + def safe_summary(self, encoded): + from django.contrib.auth.hashers import mask_hash + from django.utils.translation import ugettext_noop as _ + handler = self.passlib_handler + items = [ + # since this is user-facing, we're reporting passlib's name, + # without the distracting PASSLIB_HASHER_PREFIX prepended. + (_('algorithm'), handler.name), + ] + if hasattr(handler, "parsehash"): + kwds = handler.parsehash(encoded, sanitize=mask_hash) + for key, value in iteritems(kwds): + key = self._translate_kwds.get(key, key) + items.append((_(key), value)) + return OrderedDict(items) + + def must_update(self, encoded): + # TODO: would like access CryptContext, would need caller to pass it to get_passlib_hasher(). + # for now (as of passlib 1.6.6), replicating django policy that this returns True + # if 'encoded' hash has different rounds value from self.rounds + if self._has_rounds: + # XXX: could cache this subclass somehow (would have to intercept writes to self.rounds) + # TODO: always call subcls/handler.needs_update() in case there's other things to check + subcls = self.passlib_handler.using(min_rounds=self.rounds, max_rounds=self.rounds) + if subcls.needs_update(encoded): + return True + return False + + #===================================================================== + # eoc + #===================================================================== + +#============================================================================= +# adapting django hashers -> passlib handlers +#============================================================================= +# TODO: this code probably halfway works, mainly just needs +# a routine to read HASHERS and PREFERRED_HASHER. + +##from passlib.registry import register_crypt_handler +##from passlib.utils import classproperty, to_native_str, to_unicode +##from passlib.utils.compat import unicode +## +## +##class _HasherHandler(object): +## "helper for wrapping Hasher instances as passlib handlers" +## # FIXME: this generic wrapper doesn't handle custom settings +## # FIXME: genconfig / genhash not supported. +## +## def __init__(self, hasher): +## self.django_hasher = hasher +## if hasattr(hasher, "iterations"): +## # assume encode() accepts an "iterations" parameter. +## # fake min/max rounds +## self.min_rounds = 1 +## self.max_rounds = 0xFFFFffff +## self.default_rounds = self.django_hasher.iterations +## self.setting_kwds += ("rounds",) +## +## # hasher instance - filled in by constructor +## django_hasher = None +## +## setting_kwds = ("salt",) +## context_kwds = () +## +## @property +## def name(self): +## # XXX: need to make sure this wont' collide w/ builtin django hashes. +## # maybe by renaming this to django compatible aliases? +## return DJANGO_PASSLIB_PREFIX + self.django_name +## +## @property +## def django_name(self): +## # expose this so hasher_to_passlib_name() extracts original name +## return self.django_hasher.algorithm +## +## @property +## def ident(self): +## # this should always be correct, as django relies on ident prefix. +## return unicode(self.django_name + "$") +## +## @property +## def identify(self, hash): +## # this should always work, as django relies on ident prefix. +## return to_unicode(hash, "latin-1", "hash").startswith(self.ident) +## +## @property +## def hash(self, secret, salt=None, **kwds): +## # NOTE: from how make_password() is coded, all hashers +## # should have salt param. but only some will have +## # 'iterations' parameter. +## opts = {} +## if 'rounds' in self.setting_kwds and 'rounds' in kwds: +## opts['iterations'] = kwds.pop("rounds") +## if kwds: +## raise TypeError("unexpected keyword arguments: %r" % list(kwds)) +## if isinstance(secret, unicode): +## secret = secret.encode("utf-8") +## if salt is None: +## salt = self.django_hasher.salt() +## return to_native_str(self.django_hasher(secret, salt, **opts)) +## +## @property +## def verify(self, secret, hash): +## hash = to_native_str(hash, "utf-8", "hash") +## if isinstance(secret, unicode): +## secret = secret.encode("utf-8") +## return self.django_hasher.verify(secret, hash) +## +##def register_hasher(hasher): +## handler = _HasherHandler(hasher) +## register_crypt_handler(handler) +## return handler + +#============================================================================= +# monkeypatch helpers +#============================================================================= +# private singleton indicating lack-of-value +_UNSET = object() + +class _PatchManager(object): + """helper to manage monkeypatches and run sanity checks""" + + # NOTE: this could easily use a dict interface, + # but keeping it distinct to make clear that it's not a dict, + # since it has important side-effects. + + #=================================================================== + # init and support + #=================================================================== + def __init__(self, log=None): + # map of key -> (original value, patched value) + # original value may be _UNSET + self.log = log or logging.getLogger(__name__ + "._PatchManager") + self._state = {} + + def isactive(self): + return bool(self._state) + + # bool value tests if any patches are currently applied. + # NOTE: this behavior is deprecated in favor of .isactive + __bool__ = __nonzero__ = isactive + + def _import_path(self, path): + """retrieve obj and final attribute name from resource path""" + name, attr = path.split(":") + obj = __import__(name, fromlist=[attr], level=0) + while '.' in attr: + head, attr = attr.split(".", 1) + obj = getattr(obj, head) + return obj, attr + + @staticmethod + def _is_same_value(left, right): + """check if two values are the same (stripping method wrappers, etc)""" + return get_method_function(left) == get_method_function(right) + + #=================================================================== + # reading + #=================================================================== + def _get_path(self, key, default=_UNSET): + obj, attr = self._import_path(key) + return getattr(obj, attr, default) + + def get(self, path, default=None): + """return current value for path""" + return self._get_path(path, default) + + def getorig(self, path, default=None): + """return original (unpatched) value for path""" + try: + value, _= self._state[path] + except KeyError: + value = self._get_path(path) + return default if value is _UNSET else value + + def check_all(self, strict=False): + """run sanity check on all keys, issue warning if out of sync""" + same = self._is_same_value + for path, (orig, expected) in iteritems(self._state): + if same(self._get_path(path), expected): + continue + msg = "another library has patched resource: %r" % path + if strict: + raise RuntimeError(msg) + else: + warn(msg, PasslibRuntimeWarning) + + #=================================================================== + # patching + #=================================================================== + def _set_path(self, path, value): + obj, attr = self._import_path(path) + if value is _UNSET: + if hasattr(obj, attr): + delattr(obj, attr) + else: + setattr(obj, attr, value) + + def patch(self, path, value, wrap=False): + """monkeypatch object+attr at to have , stores original""" + assert value != _UNSET + current = self._get_path(path) + try: + orig, expected = self._state[path] + except KeyError: + self.log.debug("patching resource: %r", path) + orig = current + else: + self.log.debug("modifying resource: %r", path) + if not self._is_same_value(current, expected): + warn("overridding resource another library has patched: %r" + % path, PasslibRuntimeWarning) + if wrap: + assert callable(value) + wrapped = orig + wrapped_by = value + def wrapper(*args, **kwds): + return wrapped_by(wrapped, *args, **kwds) + update_wrapper(wrapper, value) + value = wrapper + if callable(value): + # needed by DjangoContextAdapter init + get_method_function(value)._patched_original_value = orig + self._set_path(path, value) + self._state[path] = (orig, value) + + @classmethod + def peek_unpatched_func(cls, value): + return value._patched_original_value + + ##def patch_many(self, **kwds): + ## "override specified resources with new values" + ## for path, value in iteritems(kwds): + ## self.patch(path, value) + + def monkeypatch(self, parent, name=None, enable=True, wrap=False): + """function decorator which patches function of same name in """ + def builder(func): + if enable: + sep = "." if ":" in parent else ":" + path = parent + sep + (name or func.__name__) + self.patch(path, func, wrap=wrap) + return func + if callable(name): + # called in non-decorator mode + func = name + name = None + builder(func) + return None + return builder + + #=================================================================== + # unpatching + #=================================================================== + def unpatch(self, path, unpatch_conflicts=True): + try: + orig, expected = self._state[path] + except KeyError: + return + current = self._get_path(path) + self.log.debug("unpatching resource: %r", path) + if not self._is_same_value(current, expected): + if unpatch_conflicts: + warn("reverting resource another library has patched: %r" + % path, PasslibRuntimeWarning) + else: + warn("not reverting resource another library has patched: %r" + % path, PasslibRuntimeWarning) + del self._state[path] + return + self._set_path(path, orig) + del self._state[path] + + def unpatch_all(self, **kwds): + for key in list(self._state): + self.unpatch(key, **kwds) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__init__.py b/venv/lib/python3.12/site-packages/passlib/handlers/__init__.py new file mode 100644 index 0000000..0a0338c --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/__init__.py @@ -0,0 +1 @@ +"""passlib.handlers -- holds implementations of all passlib's builtin hash formats""" diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1c92979 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/argon2.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/argon2.cpython-312.pyc new file mode 100644 index 0000000..080f6ef Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/argon2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/bcrypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/bcrypt.cpython-312.pyc new file mode 100644 index 0000000..790ea64 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/bcrypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/cisco.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/cisco.cpython-312.pyc new file mode 100644 index 0000000..fae5294 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/cisco.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/des_crypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/des_crypt.cpython-312.pyc new file mode 100644 index 0000000..c21d9da Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/des_crypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/digests.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/digests.cpython-312.pyc new file mode 100644 index 0000000..a880b41 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/digests.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/django.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/django.cpython-312.pyc new file mode 100644 index 0000000..ecc5c22 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/django.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/fshp.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/fshp.cpython-312.pyc new file mode 100644 index 0000000..0ec803f Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/fshp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/ldap_digests.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/ldap_digests.cpython-312.pyc new file mode 100644 index 0000000..50c7f4a Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/ldap_digests.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/md5_crypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/md5_crypt.cpython-312.pyc new file mode 100644 index 0000000..17019ad Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/md5_crypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/misc.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/misc.cpython-312.pyc new file mode 100644 index 0000000..c111c85 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/misc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/mssql.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/mssql.cpython-312.pyc new file mode 100644 index 0000000..d8bcb73 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/mssql.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/mysql.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/mysql.cpython-312.pyc new file mode 100644 index 0000000..8b7a7ed Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/mysql.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/oracle.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/oracle.cpython-312.pyc new file mode 100644 index 0000000..c42ea44 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/oracle.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/pbkdf2.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/pbkdf2.cpython-312.pyc new file mode 100644 index 0000000..1eac291 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/pbkdf2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/phpass.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/phpass.cpython-312.pyc new file mode 100644 index 0000000..0898933 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/phpass.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/postgres.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/postgres.cpython-312.pyc new file mode 100644 index 0000000..c01032a Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/postgres.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/roundup.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/roundup.cpython-312.pyc new file mode 100644 index 0000000..6a6287c Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/roundup.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/scram.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/scram.cpython-312.pyc new file mode 100644 index 0000000..bcfd82e Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/scram.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/scrypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/scrypt.cpython-312.pyc new file mode 100644 index 0000000..6a675dd Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/scrypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sha1_crypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sha1_crypt.cpython-312.pyc new file mode 100644 index 0000000..2777c8d Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sha1_crypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sha2_crypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sha2_crypt.cpython-312.pyc new file mode 100644 index 0000000..784f7be Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sha2_crypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sun_md5_crypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sun_md5_crypt.cpython-312.pyc new file mode 100644 index 0000000..18cfddb Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/sun_md5_crypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/windows.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/windows.cpython-312.pyc new file mode 100644 index 0000000..7fa0503 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/handlers/__pycache__/windows.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/argon2.py b/venv/lib/python3.12/site-packages/passlib/handlers/argon2.py new file mode 100644 index 0000000..4a5691b --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/argon2.py @@ -0,0 +1,1009 @@ +"""passlib.handlers.argon2 -- argon2 password hash wrapper + +References +========== +* argon2 + - home: https://github.com/P-H-C/phc-winner-argon2 + - whitepaper: https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf +* argon2 cffi wrapper + - pypi: https://pypi.python.org/pypi/argon2_cffi + - home: https://github.com/hynek/argon2_cffi +* argon2 pure python + - pypi: https://pypi.python.org/pypi/argon2pure + - home: https://github.com/bwesterb/argon2pure +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, absolute_import +# core +import logging +log = logging.getLogger(__name__) +import re +import types +from warnings import warn +# site +_argon2_cffi = None # loaded below +_argon2pure = None # dynamically imported by _load_backend_argon2pure() +# pkg +from passlib import exc +from passlib.crypto.digest import MAX_UINT32 +from passlib.utils import classproperty, to_bytes, render_bytes +from passlib.utils.binary import b64s_encode, b64s_decode +from passlib.utils.compat import u, unicode, bascii_to_str, uascii_to_str, PY2 +import passlib.utils.handlers as uh +# local +__all__ = [ + "argon2", +] + +#============================================================================= +# helpers +#============================================================================= + +# NOTE: when adding a new argon2 hash type, need to do the following: +# * add TYPE_XXX constant, and add to ALL_TYPES +# * make sure "_backend_type_map" constructors handle it correctly for all backends +# * make sure _hash_regex & _ident_regex (below) support type string. +# * add reference vectors for testing. + +#: argon2 type constants -- subclasses handle mapping these to backend-specific type constants. +#: (should be lowercase, to match representation in hash string) +TYPE_I = u("i") +TYPE_D = u("d") +TYPE_ID = u("id") # new 2016-10-29; passlib 1.7.2 requires backends new enough for support + +#: list of all known types; first (supported) type will be used as default. +ALL_TYPES = (TYPE_ID, TYPE_I, TYPE_D) +ALL_TYPES_SET = set(ALL_TYPES) + +#============================================================================= +# import argon2 package (https://pypi.python.org/pypi/argon2_cffi) +#============================================================================= + +# import cffi package +# NOTE: we try to do this even if caller is going to use argon2pure, +# so that we can always use the libargon2 default settings when possible. +_argon2_cffi_error = None +try: + import argon2 as _argon2_cffi +except ImportError: + _argon2_cffi = None +else: + if not hasattr(_argon2_cffi, "Type"): + # they have incompatible "argon2" package installed, instead of "argon2_cffi" package. + _argon2_cffi_error = ( + "'argon2' module points to unsupported 'argon2' pypi package; " + "please install 'argon2-cffi' instead." + ) + _argon2_cffi = None + elif not hasattr(_argon2_cffi, "low_level"): + # they have pre-v16 argon2_cffi package + _argon2_cffi_error = "'argon2-cffi' is too old, please update to argon2_cffi >= 18.2.0" + _argon2_cffi = None + +# init default settings for our hasher class -- +# if we have argon2_cffi >= 16.0, use their default hasher settings, otherwise use static default +if hasattr(_argon2_cffi, "PasswordHasher"): + # use cffi's default settings + _default_settings = _argon2_cffi.PasswordHasher() + _default_version = _argon2_cffi.low_level.ARGON2_VERSION +else: + # use fallback settings (for no backend, or argon2pure) + class _DummyCffiHasher: + """ + dummy object to use as source of defaults when argon2_cffi isn't present. + this tries to mimic the attributes of ``argon2.PasswordHasher()`` which the rest of + this module reads. + + .. note:: values last synced w/ argon2 19.2 as of 2019-11-09 + """ + time_cost = 2 + memory_cost = 512 + parallelism = 2 + salt_len = 16 + hash_len = 16 + # NOTE: "type" attribute added in argon2_cffi v18.2; but currently not reading it + # type = _argon2_cffi.Type.ID + + _default_settings = _DummyCffiHasher() + _default_version = 0x13 # v1.9 + +#============================================================================= +# handler +#============================================================================= +class _Argon2Common(uh.SubclassBackendMixin, uh.ParallelismMixin, + uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, + uh.GenericHandler): + """ + Base class which implements brunt of Argon2 code. + This is then subclassed by the various backends, + to override w/ backend-specific methods. + + When a backend is loaded, the bases of the 'argon2' class proper + are modified to prepend the correct backend-specific subclass. + """ + #=================================================================== + # class attrs + #=================================================================== + + #------------------------ + # PasswordHash + #------------------------ + + name = "argon2" + setting_kwds = ("salt", + "salt_size", + "salt_len", # 'salt_size' alias for compat w/ argon2 package + "rounds", + "time_cost", # 'rounds' alias for compat w/ argon2 package + "memory_cost", + "parallelism", + "digest_size", + "hash_len", # 'digest_size' alias for compat w/ argon2 package + "type", # the type of argon2 hash used + ) + + # TODO: could support the optional 'data' parameter, + # but need to research the uses, what a more descriptive name would be, + # and deal w/ fact that argon2_cffi 16.1 doesn't currently support it. + # (argon2_pure does though) + + #------------------------ + # GenericHandler + #------------------------ + + # NOTE: ident -- all argon2 hashes start with "$argon2$" + # XXX: could programmaticaly generate "ident_values" string from ALL_TYPES above + + checksum_size = _default_settings.hash_len + + #: force parsing these kwds + _always_parse_settings = uh.GenericHandler._always_parse_settings + \ + ("type",) + + #: exclude these kwds from parsehash() result (most are aliases for other keys) + _unparsed_settings = uh.GenericHandler._unparsed_settings + \ + ("salt_len", "time_cost", "hash_len", "digest_size") + + #------------------------ + # HasSalt + #------------------------ + default_salt_size = _default_settings.salt_len + min_salt_size = 8 + max_salt_size = MAX_UINT32 + + #------------------------ + # HasRounds + # TODO: once rounds limit logic is factored out, + # make 'rounds' and 'cost' an alias for 'time_cost' + #------------------------ + default_rounds = _default_settings.time_cost + min_rounds = 1 + max_rounds = MAX_UINT32 + rounds_cost = "linear" + + #------------------------ + # ParalleismMixin + #------------------------ + max_parallelism = (1 << 24) - 1 # from argon2.h / ARGON2_MAX_LANES + + #------------------------ + # custom + #------------------------ + + #: max version support + #: NOTE: this is dependant on the backend, and initialized/modified by set_backend() + max_version = _default_version + + #: minimum version before needs_update() marks the hash; if None, defaults to max_version + min_desired_version = None + + #: minimum valid memory_cost + min_memory_cost = 8 # from argon2.h / ARGON2_MIN_MEMORY + + #: maximum number of threads (-1=unlimited); + #: number of threads used by .hash() will be min(parallelism, max_threads) + max_threads = -1 + + #: global flag signalling argon2pure backend to use threads + #: rather than subprocesses. + pure_use_threads = False + + #: internal helper used to store mapping of TYPE_XXX constants -> backend-specific type constants; + #: this is populated by _load_backend_mixin(); and used to detect which types are supported. + #: XXX: could expose keys as class-level .supported_types property? + _backend_type_map = {} + + @classproperty + def type_values(cls): + """ + return tuple of types supported by this backend + + .. versionadded:: 1.7.2 + """ + cls.get_backend() # make sure backend is loaded + return tuple(cls._backend_type_map) + + #=================================================================== + # instance attrs + #=================================================================== + + #: argon2 hash type, one of ALL_TYPES -- class value controls the default + #: .. versionadded:: 1.7.2 + type = TYPE_ID + + #: parallelism setting -- class value controls the default + parallelism = _default_settings.parallelism + + #: hash version (int) + #: NOTE: this is modified by set_backend() + version = _default_version + + #: memory cost -- class value controls the default + memory_cost = _default_settings.memory_cost + + @property + def type_d(self): + """ + flag indicating a Type D hash + + .. deprecated:: 1.7.2; will be removed in passlib 2.0 + """ + return self.type == TYPE_D + + #: optional secret data + data = None + + #=================================================================== + # variant constructor + #=================================================================== + + @classmethod + def using(cls, type=None, memory_cost=None, salt_len=None, time_cost=None, digest_size=None, + checksum_size=None, hash_len=None, max_threads=None, **kwds): + # support aliases which match argon2 naming convention + if time_cost is not None: + if "rounds" in kwds: + raise TypeError("'time_cost' and 'rounds' are mutually exclusive") + kwds['rounds'] = time_cost + + if salt_len is not None: + if "salt_size" in kwds: + raise TypeError("'salt_len' and 'salt_size' are mutually exclusive") + kwds['salt_size'] = salt_len + + if hash_len is not None: + if digest_size is not None: + raise TypeError("'hash_len' and 'digest_size' are mutually exclusive") + digest_size = hash_len + + if checksum_size is not None: + if digest_size is not None: + raise TypeError("'checksum_size' and 'digest_size' are mutually exclusive") + digest_size = checksum_size + + # create variant + subcls = super(_Argon2Common, cls).using(**kwds) + + # set type + if type is not None: + subcls.type = subcls._norm_type(type) + + # set checksum size + relaxed = kwds.get("relaxed") + if digest_size is not None: + if isinstance(digest_size, uh.native_string_types): + digest_size = int(digest_size) + # NOTE: this isn't *really* digest size minimum, but want to enforce secure minimum. + subcls.checksum_size = uh.norm_integer(subcls, digest_size, min=16, max=MAX_UINT32, + param="digest_size", relaxed=relaxed) + + # set memory cost + if memory_cost is not None: + if isinstance(memory_cost, uh.native_string_types): + memory_cost = int(memory_cost) + subcls.memory_cost = subcls._norm_memory_cost(memory_cost, relaxed=relaxed) + + # validate constraints + subcls._validate_constraints(subcls.memory_cost, subcls.parallelism) + + # set max threads + if max_threads is not None: + if isinstance(max_threads, uh.native_string_types): + max_threads = int(max_threads) + if max_threads < 1 and max_threads != -1: + raise ValueError("max_threads (%d) must be -1 (unlimited), or at least 1." % + (max_threads,)) + subcls.max_threads = max_threads + + return subcls + + @classmethod + def _validate_constraints(cls, memory_cost, parallelism): + # NOTE: this is used by class & instance, hence passing in via arguments. + # could switch and make this a hybrid method. + min_memory_cost = 8 * parallelism + if memory_cost < min_memory_cost: + raise ValueError("%s: memory_cost (%d) is too low, must be at least " + "8 * parallelism (8 * %d = %d)" % + (cls.name, memory_cost, + parallelism, min_memory_cost)) + + #=================================================================== + # public api + #=================================================================== + + #: shorter version of _hash_regex, used to quickly identify hashes + _ident_regex = re.compile(r"^\$argon2[a-z]+\$") + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + return cls._ident_regex.match(hash) is not None + + # hash(), verify(), genhash() -- implemented by backend subclass + + #=================================================================== + # hash parsing / rendering + #=================================================================== + + # info taken from source of decode_string() function in + # + # + # hash format: + # $argon2[$v=]$m=,t=,p=[,keyid=][,data=][$[$]] + # + # NOTE: as of 2016-6-17, the official source (above) lists the "keyid" param in the comments, + # but the actual source of decode_string & encode_string don't mention it at all. + # we're supporting parsing it, but throw NotImplementedError if encountered. + # + # sample hashes: + # v1.0: '$argon2i$m=512,t=2,p=2$5VtWOO3cGWYQHEMaYGbsfQ$AcmqasQgW/wI6wAHAMk4aQ' + # v1.3: '$argon2i$v=19$m=512,t=2,p=2$5VtWOO3cGWYQHEMaYGbsfQ$AcmqasQgW/wI6wAHAMk4aQ' + + #: regex to parse argon hash + _hash_regex = re.compile(br""" + ^ + \$argon2(?P[a-z]+)\$ + (?: + v=(?P\d+) + \$ + )? + m=(?P\d+) + , + t=(?P\d+) + , + p=(?P\d+) + (?: + ,keyid=(?P[^,$]+) + )? + (?: + ,data=(?P[^,$]+) + )? + (?: + \$ + (?P[^$]+) + (?: + \$ + (?P.+) + )? + )? + $ + """, re.X) + + @classmethod + def from_string(cls, hash): + # NOTE: assuming hash will be unicode, or use ascii-compatible encoding. + # TODO: switch to working w/ str or unicode + if isinstance(hash, unicode): + hash = hash.encode("utf-8") + if not isinstance(hash, bytes): + raise exc.ExpectedStringError(hash, "hash") + m = cls._hash_regex.match(hash) + if not m: + raise exc.MalformedHashError(cls) + type, version, memory_cost, time_cost, parallelism, keyid, data, salt, digest = \ + m.group("type", "version", "memory_cost", "time_cost", "parallelism", + "keyid", "data", "salt", "digest") + if keyid: + raise NotImplementedError("argon2 'keyid' parameter not supported") + return cls( + type=type.decode("ascii"), + version=int(version) if version else 0x10, + memory_cost=int(memory_cost), + rounds=int(time_cost), + parallelism=int(parallelism), + salt=b64s_decode(salt) if salt else None, + data=b64s_decode(data) if data else None, + checksum=b64s_decode(digest) if digest else None, + ) + + def to_string(self): + version = self.version + if version == 0x10: + vstr = "" + else: + vstr = "v=%d$" % version + + data = self.data + if data: + kdstr = ",data=" + bascii_to_str(b64s_encode(self.data)) + else: + kdstr = "" + + # NOTE: 'keyid' param currently not supported + return "$argon2%s$%sm=%d,t=%d,p=%d%s$%s$%s" % ( + uascii_to_str(self.type), + vstr, + self.memory_cost, + self.rounds, + self.parallelism, + kdstr, + bascii_to_str(b64s_encode(self.salt)), + bascii_to_str(b64s_encode(self.checksum)), + ) + + #=================================================================== + # init + #=================================================================== + def __init__(self, type=None, type_d=False, version=None, memory_cost=None, data=None, **kwds): + + # handle deprecated kwds + if type_d: + warn('argon2 `type_d=True` keyword is deprecated, and will be removed in passlib 2.0; ' + 'please use ``type="d"`` instead') + assert type is None + type = TYPE_D + + # TODO: factor out variable checksum size support into a mixin. + # set checksum size to specific value before _norm_checksum() is called + checksum = kwds.get("checksum") + if checksum is not None: + self.checksum_size = len(checksum) + + # call parent + super(_Argon2Common, self).__init__(**kwds) + + # init type + if type is None: + assert uh.validate_default_value(self, self.type, self._norm_type, param="type") + else: + self.type = self._norm_type(type) + + # init version + if version is None: + assert uh.validate_default_value(self, self.version, self._norm_version, + param="version") + else: + self.version = self._norm_version(version) + + # init memory cost + if memory_cost is None: + assert uh.validate_default_value(self, self.memory_cost, self._norm_memory_cost, + param="memory_cost") + else: + self.memory_cost = self._norm_memory_cost(memory_cost) + + # init data + if data is None: + assert self.data is None + else: + if not isinstance(data, bytes): + raise uh.exc.ExpectedTypeError(data, "bytes", "data") + self.data = data + + #------------------------------------------------------------------- + # parameter guards + #------------------------------------------------------------------- + + @classmethod + def _norm_type(cls, value): + # type check + if not isinstance(value, unicode): + if PY2 and isinstance(value, bytes): + value = value.decode('ascii') + else: + raise uh.exc.ExpectedTypeError(value, "str", "type") + + # check if type is valid + if value in ALL_TYPES_SET: + return value + + # translate from uppercase + temp = value.lower() + if temp in ALL_TYPES_SET: + return temp + + # failure! + raise ValueError("unknown argon2 hash type: %r" % (value,)) + + @classmethod + def _norm_version(cls, version): + if not isinstance(version, uh.int_types): + raise uh.exc.ExpectedTypeError(version, "integer", "version") + + # minimum valid version + if version < 0x13 and version != 0x10: + raise ValueError("invalid argon2 hash version: %d" % (version,)) + + # check this isn't past backend's max version + backend = cls.get_backend() + if version > cls.max_version: + raise ValueError("%s: hash version 0x%X not supported by %r backend " + "(max version is 0x%X); try updating or switching backends" % + (cls.name, version, backend, cls.max_version)) + return version + + @classmethod + def _norm_memory_cost(cls, memory_cost, relaxed=False): + return uh.norm_integer(cls, memory_cost, min=cls.min_memory_cost, + param="memory_cost", relaxed=relaxed) + + #=================================================================== + # digest calculation + #=================================================================== + + # NOTE: _calc_checksum implemented by backend subclass + + @classmethod + def _get_backend_type(cls, value): + """ + helper to resolve backend constant from type + """ + try: + return cls._backend_type_map[value] + except KeyError: + pass + # XXX: pick better error class? + msg = "unsupported argon2 hash (type %r not supported by %s backend)" % \ + (value, cls.get_backend()) + raise ValueError(msg) + + #=================================================================== + # hash migration + #=================================================================== + + def _calc_needs_update(self, **kwds): + cls = type(self) + if self.type != cls.type: + return True + minver = cls.min_desired_version + if minver is None or minver > cls.max_version: + minver = cls.max_version + if self.version < minver: + # version is too old. + return True + if self.memory_cost != cls.memory_cost: + return True + if self.checksum_size != cls.checksum_size: + return True + return super(_Argon2Common, self)._calc_needs_update(**kwds) + + #=================================================================== + # backend loading + #=================================================================== + + _no_backend_suggestion = " -- recommend you install one (e.g. 'pip install argon2_cffi')" + + @classmethod + def _finalize_backend_mixin(mixin_cls, name, dryrun): + """ + helper called by from backend mixin classes' _load_backend_mixin() -- + invoked after backend imports have been loaded, and performs + feature detection & testing common to all backends. + """ + # check argon2 version + max_version = mixin_cls.max_version + assert isinstance(max_version, int) and max_version >= 0x10 + if max_version < 0x13: + warn("%r doesn't support argon2 v1.3, and should be upgraded" % name, + uh.exc.PasslibSecurityWarning) + + # prefer best available type + for type in ALL_TYPES: + if type in mixin_cls._backend_type_map: + mixin_cls.type = type + break + else: + warn("%r lacks support for all known hash types" % name, uh.exc.PasslibRuntimeWarning) + # NOTE: class will just throw "unsupported argon2 hash" error if they try to use it... + mixin_cls.type = TYPE_ID + + return True + + @classmethod + def _adapt_backend_error(cls, err, hash=None, self=None): + """ + internal helper invoked when backend has hash/verification error; + used to adapt to passlib message. + """ + backend = cls.get_backend() + + # parse hash to throw error if format was invalid, parameter out of range, etc. + if self is None and hash is not None: + self = cls.from_string(hash) + + # check constraints on parsed object + # XXX: could move this to __init__, but not needed by needs_update calls + if self is not None: + self._validate_constraints(self.memory_cost, self.parallelism) + + # as of cffi 16.1, lacks support in hash_secret(), so genhash() will get here. + # as of cffi 16.2, support removed from verify_secret() as well. + if backend == "argon2_cffi" and self.data is not None: + raise NotImplementedError("argon2_cffi backend doesn't support the 'data' parameter") + + # fallback to reporting a malformed hash + text = str(err) + if text not in [ + "Decoding failed" # argon2_cffi's default message + ]: + reason = "%s reported: %s: hash=%r" % (backend, text, hash) + else: + reason = repr(hash) + raise exc.MalformedHashError(cls, reason=reason) + + #=================================================================== + # eoc + #=================================================================== + +#----------------------------------------------------------------------- +# stub backend +#----------------------------------------------------------------------- +class _NoBackend(_Argon2Common): + """ + mixin used before any backend has been loaded. + contains stubs that force loading of one of the available backends. + """ + #=================================================================== + # primary methods + #=================================================================== + @classmethod + def hash(cls, secret): + cls._stub_requires_backend() + return cls.hash(secret) + + @classmethod + def verify(cls, secret, hash): + cls._stub_requires_backend() + return cls.verify(secret, hash) + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config): + cls._stub_requires_backend() + return cls.genhash(secret, config) + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + # NOTE: since argon2_cffi takes care of rendering hash, + # _calc_checksum() is only used by the argon2pure backend. + self._stub_requires_backend() + # NOTE: have to use super() here so that we don't recursively + # call subclass's wrapped _calc_checksum + return super(argon2, self)._calc_checksum(secret) + + #=================================================================== + # eoc + #=================================================================== + +#----------------------------------------------------------------------- +# argon2_cffi backend +#----------------------------------------------------------------------- +class _CffiBackend(_Argon2Common): + """ + argon2_cffi backend + """ + #=================================================================== + # backend loading + #=================================================================== + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # make sure we write info to base class's __dict__, not that of a subclass + assert mixin_cls is _CffiBackend + + # we automatically import this at top, so just grab info + if _argon2_cffi is None: + if _argon2_cffi_error: + raise exc.PasslibSecurityError(_argon2_cffi_error) + return False + max_version = _argon2_cffi.low_level.ARGON2_VERSION + log.debug("detected 'argon2_cffi' backend, version %r, with support for 0x%x argon2 hashes", + _argon2_cffi.__version__, max_version) + + # build type map + TypeEnum = _argon2_cffi.Type + type_map = {} + for type in ALL_TYPES: + try: + type_map[type] = getattr(TypeEnum, type.upper()) + except AttributeError: + # TYPE_ID support not added until v18.2 + assert type not in (TYPE_I, TYPE_D), "unexpected missing type: %r" % type + mixin_cls._backend_type_map = type_map + + # set version info, and run common setup + mixin_cls.version = mixin_cls.max_version = max_version + return mixin_cls._finalize_backend_mixin(name, dryrun) + + #=================================================================== + # primary methods + #=================================================================== + @classmethod + def hash(cls, secret): + # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. + uh.validate_secret(secret) + secret = to_bytes(secret, "utf-8") + # XXX: doesn't seem to be a way to make this honor max_threads + try: + return bascii_to_str(_argon2_cffi.low_level.hash_secret( + type=cls._get_backend_type(cls.type), + memory_cost=cls.memory_cost, + time_cost=cls.default_rounds, + parallelism=cls.parallelism, + salt=to_bytes(cls._generate_salt()), + hash_len=cls.checksum_size, + secret=secret, + )) + except _argon2_cffi.exceptions.HashingError as err: + raise cls._adapt_backend_error(err) + + #: helper for verify() method below -- maps prefixes to type constants + _byte_ident_map = dict((render_bytes(b"$argon2%s$", type.encode("ascii")), type) + for type in ALL_TYPES) + + @classmethod + def verify(cls, secret, hash): + # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. + uh.validate_secret(secret) + secret = to_bytes(secret, "utf-8") + hash = to_bytes(hash, "ascii") + + # read type from start of hash + # NOTE: don't care about malformed strings, lowlevel will throw error for us + type = cls._byte_ident_map.get(hash[:1+hash.find(b"$", 1)], TYPE_I) + type_code = cls._get_backend_type(type) + + # XXX: doesn't seem to be a way to make this honor max_threads + try: + result = _argon2_cffi.low_level.verify_secret(hash, secret, type_code) + assert result is True + return True + except _argon2_cffi.exceptions.VerifyMismatchError: + return False + except _argon2_cffi.exceptions.VerificationError as err: + raise cls._adapt_backend_error(err, hash=hash) + + # NOTE: deprecated, will be removed in 2.0 + @classmethod + def genhash(cls, secret, config): + # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. + uh.validate_secret(secret) + secret = to_bytes(secret, "utf-8") + self = cls.from_string(config) + # XXX: doesn't seem to be a way to make this honor max_threads + try: + result = bascii_to_str(_argon2_cffi.low_level.hash_secret( + type=cls._get_backend_type(self.type), + memory_cost=self.memory_cost, + time_cost=self.rounds, + parallelism=self.parallelism, + salt=to_bytes(self.salt), + hash_len=self.checksum_size, + secret=secret, + version=self.version, + )) + except _argon2_cffi.exceptions.HashingError as err: + raise cls._adapt_backend_error(err, hash=config) + if self.version == 0x10: + # workaround: argon2 0x13 always returns "v=" segment, even for 0x10 hashes + result = result.replace("$v=16$", "$") + return result + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + raise AssertionError("shouldn't be called under argon2_cffi backend") + + #=================================================================== + # eoc + #=================================================================== + +#----------------------------------------------------------------------- +# argon2pure backend +#----------------------------------------------------------------------- +class _PureBackend(_Argon2Common): + """ + argon2pure backend + """ + #=================================================================== + # backend loading + #=================================================================== + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # make sure we write info to base class's __dict__, not that of a subclass + assert mixin_cls is _PureBackend + + # import argon2pure + global _argon2pure + try: + import argon2pure as _argon2pure + except ImportError: + return False + + # get default / max supported version -- added in v1.2.2 + try: + from argon2pure import ARGON2_DEFAULT_VERSION as max_version + except ImportError: + log.warning("detected 'argon2pure' backend, but package is too old " + "(passlib requires argon2pure >= 1.2.3)") + return False + + log.debug("detected 'argon2pure' backend, with support for 0x%x argon2 hashes", + max_version) + + if not dryrun: + warn("Using argon2pure backend, which is 100x+ slower than is required " + "for adequate security. Installing argon2_cffi (via 'pip install argon2_cffi') " + "is strongly recommended", exc.PasslibSecurityWarning) + + # build type map + type_map = {} + for type in ALL_TYPES: + try: + type_map[type] = getattr(_argon2pure, "ARGON2" + type.upper()) + except AttributeError: + # TYPE_ID support not added until v1.3 + assert type not in (TYPE_I, TYPE_D), "unexpected missing type: %r" % type + mixin_cls._backend_type_map = type_map + + mixin_cls.version = mixin_cls.max_version = max_version + return mixin_cls._finalize_backend_mixin(name, dryrun) + + #=================================================================== + # primary methods + #=================================================================== + + # NOTE: this backend uses default .hash() & .verify() implementations. + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. + uh.validate_secret(secret) + secret = to_bytes(secret, "utf-8") + kwds = dict( + password=secret, + salt=self.salt, + time_cost=self.rounds, + memory_cost=self.memory_cost, + parallelism=self.parallelism, + tag_length=self.checksum_size, + type_code=self._get_backend_type(self.type), + version=self.version, + ) + if self.max_threads > 0: + kwds['threads'] = self.max_threads + if self.pure_use_threads: + kwds['use_threads'] = True + if self.data: + kwds['associated_data'] = self.data + # NOTE: should return raw bytes + # NOTE: this may raise _argon2pure.Argon2ParameterError, + # but it if does that, there's a bug in our own parameter checking code. + try: + return _argon2pure.argon2(**kwds) + except _argon2pure.Argon2Error as err: + raise self._adapt_backend_error(err, self=self) + + #=================================================================== + # eoc + #=================================================================== + +class argon2(_NoBackend, _Argon2Common): + """ + This class implements the Argon2 password hash [#argon2-home]_, and follows the :ref:`password-hash-api`. + + Argon2 supports a variable-length salt, and variable time & memory cost, + and a number of other configurable parameters. + + The :meth:`~passlib.ifc.PasswordHash.replace` method accepts the following optional keywords: + + :type type: str + :param type: + Specify the type of argon2 hash to generate. + Can be one of "ID", "I", "D". + + This defaults to "ID" if supported by the backend, otherwise "I". + + :type salt: str + :param salt: + Optional salt string. + If specified, the length must be between 0-1024 bytes. + If not specified, one will be auto-generated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + This corresponds linearly to the amount of time hashing will take. + + :type time_cost: int + :param time_cost: + An alias for **rounds**, for compatibility with underlying argon2 library. + + :param int memory_cost: + Defines the memory usage in kibibytes. + This corresponds linearly to the amount of memory hashing will take. + + :param int parallelism: + Defines the parallelization factor. + *NOTE: this will affect the resulting hash value.* + + :param int digest_size: + Length of the digest in bytes. + + :param int max_threads: + Maximum number of threads that will be used. + -1 means unlimited; otherwise hashing will use ``min(parallelism, max_threads)`` threads. + + .. note:: + + This option is currently only honored by the argon2pure backend. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionchanged:: 1.7.2 + + Added the "type" keyword, and support for type "D" and "ID" hashes. + (Prior versions could verify type "D" hashes, but not generate them). + + .. todo:: + + * Support configurable threading limits. + """ + #============================================================================= + # backend + #============================================================================= + + # NOTE: the brunt of the argon2 class is implemented in _Argon2Common. + # there are then subclass for each backend (e.g. _PureBackend), + # these are dynamically prepended to this class's bases + # in order to load the appropriate backend. + + #: list of potential backends + backends = ("argon2_cffi", "argon2pure") + + #: flag that this class's bases should be modified by SubclassBackendMixin + _backend_mixin_target = True + + #: map of backend -> mixin class, used by _get_backend_loader() + _backend_mixin_map = { + None: _NoBackend, + "argon2_cffi": _CffiBackend, + "argon2pure": _PureBackend, + } + + #============================================================================= + # + #============================================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/bcrypt.py b/venv/lib/python3.12/site-packages/passlib/handlers/bcrypt.py new file mode 100644 index 0000000..b83b110 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/bcrypt.py @@ -0,0 +1,1243 @@ +"""passlib.bcrypt -- implementation of OpenBSD's BCrypt algorithm. + +TODO: + +* support 2x and altered-2a hashes? + http://www.openwall.com/lists/oss-security/2011/06/27/9 + +* deal with lack of PY3-compatibile c-ext implementation +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, absolute_import +# core +from base64 import b64encode +from hashlib import sha256 +import os +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +_bcrypt = None # dynamically imported by _load_backend_bcrypt() +_pybcrypt = None # dynamically imported by _load_backend_pybcrypt() +_bcryptor = None # dynamically imported by _load_backend_bcryptor() +# pkg +_builtin_bcrypt = None # dynamically imported by _load_backend_builtin() +from passlib.crypto.digest import compile_hmac +from passlib.exc import PasslibHashWarning, PasslibSecurityWarning, PasslibSecurityError +from passlib.utils import safe_crypt, repeat_string, to_bytes, parse_version, \ + rng, getrandstr, test_crypt, to_unicode, \ + utf8_truncate, utf8_repeat_string, crypt_accepts_bytes +from passlib.utils.binary import bcrypt64 +from passlib.utils.compat import get_unbound_method_function +from passlib.utils.compat import u, uascii_to_str, unicode, str_to_uascii, PY3, error_from +import passlib.utils.handlers as uh + +# local +__all__ = [ + "bcrypt", +] + +#============================================================================= +# support funcs & constants +#============================================================================= +IDENT_2 = u("$2$") +IDENT_2A = u("$2a$") +IDENT_2X = u("$2x$") +IDENT_2Y = u("$2y$") +IDENT_2B = u("$2b$") +_BNULL = b'\x00' + +# reference hash of "test", used in various self-checks +TEST_HASH_2A = "$2a$04$5BJqKfqMQvV7nS.yUguNcueVirQqDBGaLXSqj.rs.pZPlNR0UX/HK" + +def _detect_pybcrypt(): + """ + internal helper which tries to distinguish pybcrypt vs bcrypt. + + :returns: + True if cext-based py-bcrypt, + False if ffi-based bcrypt, + None if 'bcrypt' module not found. + + .. versionchanged:: 1.6.3 + + Now assuming bcrypt installed, unless py-bcrypt explicitly detected. + Previous releases assumed py-bcrypt by default. + + Making this change since py-bcrypt is (apparently) unmaintained and static, + whereas bcrypt is being actively maintained, and it's internal structure may shift. + """ + # NOTE: this is also used by the unittests. + + # check for module. + try: + import bcrypt + except ImportError: + # XXX: this is ignoring case where py-bcrypt's "bcrypt._bcrypt" C Ext fails to import; + # would need to inspect actual ImportError message to catch that. + return None + + # py-bcrypt has a "._bcrypt.__version__" attribute (confirmed for v0.1 - 0.4), + # which bcrypt lacks (confirmed for v1.0 - 2.0) + # "._bcrypt" alone isn't sufficient, since bcrypt 2.0 now has that attribute. + try: + from bcrypt._bcrypt import __version__ + except ImportError: + return False + return True + +#============================================================================= +# backend mixins +#============================================================================= +class _BcryptCommon(uh.SubclassBackendMixin, uh.TruncateMixin, uh.HasManyIdents, + uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """ + Base class which implements brunt of BCrypt code. + This is then subclassed by the various backends, + to override w/ backend-specific methods. + + When a backend is loaded, the bases of the 'bcrypt' class proper + are modified to prepend the correct backend-specific subclass. + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "bcrypt" + setting_kwds = ("salt", "rounds", "ident", "truncate_error") + + #-------------------- + # GenericHandler + #-------------------- + checksum_size = 31 + checksum_chars = bcrypt64.charmap + + #-------------------- + # HasManyIdents + #-------------------- + default_ident = IDENT_2B + ident_values = (IDENT_2, IDENT_2A, IDENT_2X, IDENT_2Y, IDENT_2B) + ident_aliases = {u("2"): IDENT_2, u("2a"): IDENT_2A, u("2y"): IDENT_2Y, + u("2b"): IDENT_2B} + + #-------------------- + # HasSalt + #-------------------- + min_salt_size = max_salt_size = 22 + salt_chars = bcrypt64.charmap + + # NOTE: 22nd salt char must be in restricted set of ``final_salt_chars``, not full set above. + final_salt_chars = ".Oeu" # bcrypt64._padinfo2[1] + + #-------------------- + # HasRounds + #-------------------- + default_rounds = 12 # current passlib default + min_rounds = 4 # minimum from bcrypt specification + max_rounds = 31 # 32-bit integer limit (since real_rounds=1< class + + # NOTE: set_backend() will execute the ._load_backend_mixin() + # of the matching mixin class, which will handle backend detection + + # appended to HasManyBackends' "no backends available" error message + _no_backend_suggestion = " -- recommend you install one (e.g. 'pip install bcrypt')" + + @classmethod + def _finalize_backend_mixin(mixin_cls, backend, dryrun): + """ + helper called by from backend mixin classes' _load_backend_mixin() -- + invoked after backend imports have been loaded, and performs + feature detection & testing common to all backends. + """ + #---------------------------------------------------------------- + # setup helpers + #---------------------------------------------------------------- + assert mixin_cls is bcrypt._backend_mixin_map[backend], \ + "_configure_workarounds() invoked from wrong class" + + if mixin_cls._workrounds_initialized: + return True + + verify = mixin_cls.verify + + err_types = (ValueError, uh.exc.MissingBackendError) + if _bcryptor: + err_types += (_bcryptor.engine.SaltError,) + + def safe_verify(secret, hash): + """verify() wrapper which traps 'unknown identifier' errors""" + try: + return verify(secret, hash) + except err_types: + # backends without support for given ident will throw various + # errors about unrecognized version: + # os_crypt -- internal code below throws + # - PasswordValueError if there's encoding issue w/ password. + # - InternalBackendError if crypt fails for unknown reason + # (trapped below so we can debug it) + # pybcrypt, bcrypt -- raises ValueError + # bcryptor -- raises bcryptor.engine.SaltError + return NotImplemented + except uh.exc.InternalBackendError: + # _calc_checksum() code may also throw CryptBackendError + # if correct hash isn't returned (e.g. 2y hash converted to 2b, + # such as happens with bcrypt 3.0.0) + log.debug("trapped unexpected response from %r backend: verify(%r, %r):", + backend, secret, hash, exc_info=True) + return NotImplemented + + def assert_lacks_8bit_bug(ident): + """ + helper to check for cryptblowfish 8bit bug (fixed in 2y/2b); + even though it's not known to be present in any of passlib's backends. + this is treated as FATAL, because it can easily result in seriously malformed hashes, + and we can't correct for it ourselves. + + test cases from + reference hash is the incorrectly generated $2x$ hash taken from above url + """ + # NOTE: passlib 1.7.2 and earlier used the commented-out LATIN-1 test vector to detect + # this bug; but python3's crypt.crypt() only supports unicode inputs (and + # always encodes them as UTF8 before passing to crypt); so passlib 1.7.3 + # switched to the UTF8-compatible test vector below. This one's bug_hash value + # ("$2x$...rcAS") was drawn from the same openwall source (above); and the correct + # hash ("$2a$...X6eu") was generated by passing the raw bytes to python2's + # crypt.crypt() using OpenBSD 6.7 (hash confirmed as same for $2a$ & $2b$). + + # LATIN-1 test vector + # secret = b"\xA3" + # bug_hash = ident.encode("ascii") + b"05$/OK.fbVrR/bpIqNJ5ianF.CE5elHaaO4EbggVDjb8P19RukzXSM3e" + # correct_hash = ident.encode("ascii") + b"05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq" + + # UTF-8 test vector + secret = b"\xd1\x91" # aka "\u0451" + bug_hash = ident.encode("ascii") + b"05$6bNw2HLQYeqHYyBfLMsv/OiwqTymGIGzFsA4hOTWebfehXHNprcAS" + correct_hash = ident.encode("ascii") + b"05$6bNw2HLQYeqHYyBfLMsv/OUcZd0LKP39b87nBw3.S2tVZSqiQX6eu" + + if verify(secret, bug_hash): + # NOTE: this only EVER be observed in (broken) 2a and (backward-compat) 2x hashes + # generated by crypt_blowfish library. 2y/2b hashes should not have the bug + # (but we check w/ them anyways). + raise PasslibSecurityError( + "passlib.hash.bcrypt: Your installation of the %r backend is vulnerable to " + "the crypt_blowfish 8-bit bug (CVE-2011-2483) under %r hashes, " + "and should be upgraded or replaced with another backend" % (backend, ident)) + + # it doesn't have wraparound bug, but make sure it *does* verify against the correct + # hash, or we're in some weird third case! + if not verify(secret, correct_hash): + raise RuntimeError("%s backend failed to verify %s 8bit hash" % (backend, ident)) + + def detect_wrap_bug(ident): + """ + check for bsd wraparound bug (fixed in 2b) + this is treated as a warning, because it's rare in the field, + and pybcrypt (as of 2015-7-21) is unpatched, but some people may be stuck with it. + + test cases from + + NOTE: reference hash is of password "0"*72 + + NOTE: if in future we need to deliberately create hashes which have this bug, + can use something like 'hashpw(repeat_string(secret[:((1+secret) % 256) or 1]), 72)' + """ + # check if it exhibits wraparound bug + secret = (b"0123456789"*26)[:255] + bug_hash = ident.encode("ascii") + b"04$R1lJ2gkNaoPGdafE.H.16.nVyh2niHsGJhayOHLMiXlI45o8/DU.6" + if verify(secret, bug_hash): + return True + + # if it doesn't have wraparound bug, make sure it *does* handle things + # correctly -- or we're in some weird third case. + correct_hash = ident.encode("ascii") + b"04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi" + if not verify(secret, correct_hash): + raise RuntimeError("%s backend failed to verify %s wraparound hash" % (backend, ident)) + + return False + + def assert_lacks_wrap_bug(ident): + if not detect_wrap_bug(ident): + return + # should only see in 2a, later idents should NEVER exhibit this bug: + # * 2y implementations should have been free of it + # * 2b was what (supposedly) fixed it + raise RuntimeError("%s backend unexpectedly has wraparound bug for %s" % (backend, ident)) + + #---------------------------------------------------------------- + # check for old 20 support + #---------------------------------------------------------------- + test_hash_20 = b"$2$04$5BJqKfqMQvV7nS.yUguNcuRfMMOXK0xPWavM7pOzjEi5ze5T1k8/S" + result = safe_verify("test", test_hash_20) + if result is NotImplemented: + mixin_cls._lacks_20_support = True + log.debug("%r backend lacks $2$ support, enabling workaround", backend) + elif not result: + raise RuntimeError("%s incorrectly rejected $2$ hash" % backend) + + #---------------------------------------------------------------- + # check for 2a support + #---------------------------------------------------------------- + result = safe_verify("test", TEST_HASH_2A) + if result is NotImplemented: + # 2a support is required, and should always be present + raise RuntimeError("%s lacks support for $2a$ hashes" % backend) + elif not result: + raise RuntimeError("%s incorrectly rejected $2a$ hash" % backend) + else: + assert_lacks_8bit_bug(IDENT_2A) + if detect_wrap_bug(IDENT_2A): + if backend == "os_crypt": + # don't make this a warning for os crypt (e.g. openbsd); + # they'll have proper 2b implementation which will be used for new hashes. + # so even if we didn't have a workaround, this bug wouldn't be a concern. + log.debug("%r backend has $2a$ bsd wraparound bug, enabling workaround", backend) + else: + # installed library has the bug -- want to let users know, + # so they can upgrade it to something better (e.g. bcrypt cffi library) + warn("passlib.hash.bcrypt: Your installation of the %r backend is vulnerable to " + "the bsd wraparound bug, " + "and should be upgraded or replaced with another backend " + "(enabling workaround for now)." % backend, + uh.exc.PasslibSecurityWarning) + mixin_cls._has_2a_wraparound_bug = True + + #---------------------------------------------------------------- + # check for 2y support + #---------------------------------------------------------------- + test_hash_2y = TEST_HASH_2A.replace("2a", "2y") + result = safe_verify("test", test_hash_2y) + if result is NotImplemented: + mixin_cls._lacks_2y_support = True + log.debug("%r backend lacks $2y$ support, enabling workaround", backend) + elif not result: + raise RuntimeError("%s incorrectly rejected $2y$ hash" % backend) + else: + # NOTE: Not using this as fallback candidate, + # lacks wide enough support across implementations. + assert_lacks_8bit_bug(IDENT_2Y) + assert_lacks_wrap_bug(IDENT_2Y) + + #---------------------------------------------------------------- + # TODO: check for 2x support + #---------------------------------------------------------------- + + #---------------------------------------------------------------- + # check for 2b support + #---------------------------------------------------------------- + test_hash_2b = TEST_HASH_2A.replace("2a", "2b") + result = safe_verify("test", test_hash_2b) + if result is NotImplemented: + mixin_cls._lacks_2b_support = True + log.debug("%r backend lacks $2b$ support, enabling workaround", backend) + elif not result: + raise RuntimeError("%s incorrectly rejected $2b$ hash" % backend) + else: + mixin_cls._fallback_ident = IDENT_2B + assert_lacks_8bit_bug(IDENT_2B) + assert_lacks_wrap_bug(IDENT_2B) + + # set flag so we don't have to run this again + mixin_cls._workrounds_initialized = True + return True + + #=================================================================== + # digest calculation + #=================================================================== + + # _calc_checksum() defined by backends + + def _prepare_digest_args(self, secret): + """ + common helper for backends to implement _calc_checksum(). + takes in secret, returns (secret, ident) pair, + """ + return self._norm_digest_args(secret, self.ident, new=self.use_defaults) + + @classmethod + def _norm_digest_args(cls, secret, ident, new=False): + # make sure secret is unicode + require_valid_utf8_bytes = cls._require_valid_utf8_bytes + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + elif require_valid_utf8_bytes: + # if backend requires utf8 bytes (os_crypt); + # make sure input actually is utf8, or don't bother enabling utf-8 specific helpers. + try: + secret.decode("utf-8") + except UnicodeDecodeError: + # XXX: could just throw PasswordValueError here, backend will just do that + # when _calc_digest() is actually called. + require_valid_utf8_bytes = False + + # check max secret size + uh.validate_secret(secret) + + # check for truncation (during .hash() calls only) + if new: + cls._check_truncate_policy(secret) + + # NOTE: especially important to forbid NULLs for bcrypt, since many + # backends (bcryptor, bcrypt) happily accept them, and then + # silently truncate the password at first NULL they encounter! + if _BNULL in secret: + raise uh.exc.NullPasswordError(cls) + + # TODO: figure out way to skip these tests when not needed... + + # protect from wraparound bug by truncating secret before handing it to the backend. + # bcrypt only uses first 72 bytes anyways. + # NOTE: not needed for 2y/2b, but might use 2a as fallback for them. + if cls._has_2a_wraparound_bug and len(secret) >= 255: + if require_valid_utf8_bytes: + # backend requires valid utf8 bytes, so truncate secret to nearest valid segment. + # want to do this in constant time to not give away info about secret. + # NOTE: this only works because bcrypt will ignore everything past + # secret[71], so padding to include a full utf8 sequence + # won't break anything about the final output. + secret = utf8_truncate(secret, 72) + else: + secret = secret[:72] + + # special case handling for variants (ordered most common first) + if ident == IDENT_2A: + # nothing needs to be done. + pass + + elif ident == IDENT_2B: + if cls._lacks_2b_support: + # handle $2b$ hash format even if backend is too old. + # have it generate a 2A/2Y digest, then return it as a 2B hash. + # 2a-only backend could potentially exhibit wraparound bug -- + # but we work around that issue above. + ident = cls._fallback_ident + + elif ident == IDENT_2Y: + if cls._lacks_2y_support: + # handle $2y$ hash format (not supported by BSDs, being phased out on others) + # have it generate a 2A/2B digest, then return it as a 2Y hash. + ident = cls._fallback_ident + + elif ident == IDENT_2: + if cls._lacks_20_support: + # handle legacy $2$ format (not supported by most backends except BSD os_crypt) + # we can fake $2$ behavior using the 2A/2Y/2B algorithm + # by repeating the password until it's at least 72 chars in length. + if secret: + if require_valid_utf8_bytes: + # NOTE: this only works because bcrypt will ignore everything past + # secret[71], so padding to include a full utf8 sequence + # won't break anything about the final output. + secret = utf8_repeat_string(secret, 72) + else: + secret = repeat_string(secret, 72) + ident = cls._fallback_ident + + elif ident == IDENT_2X: + + # NOTE: shouldn't get here. + # XXX: could check if backend does actually offer 'support' + raise RuntimeError("$2x$ hashes not currently supported by passlib") + + else: + raise AssertionError("unexpected ident value: %r" % ident) + + return secret, ident + +#----------------------------------------------------------------------- +# stub backend +#----------------------------------------------------------------------- +class _NoBackend(_BcryptCommon): + """ + mixin used before any backend has been loaded. + contains stubs that force loading of one of the available backends. + """ + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + self._stub_requires_backend() + # NOTE: have to use super() here so that we don't recursively + # call subclass's wrapped _calc_checksum, e.g. bcrypt_sha256._calc_checksum + return super(bcrypt, self)._calc_checksum(secret) + + #=================================================================== + # eoc + #=================================================================== + +#----------------------------------------------------------------------- +# bcrypt backend +#----------------------------------------------------------------------- +class _BcryptBackend(_BcryptCommon): + """ + backend which uses 'bcrypt' package + """ + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # try to import bcrypt + global _bcrypt + if _detect_pybcrypt(): + # pybcrypt was installed instead + return False + try: + import bcrypt as _bcrypt + except ImportError: # pragma: no cover + return False + try: + version = _bcrypt.__about__.__version__ + except: + log.warning("(trapped) error reading bcrypt version", exc_info=True) + version = '' + + log.debug("detected 'bcrypt' backend, version %r", version) + return mixin_cls._finalize_backend_mixin(name, dryrun) + + # # TODO: would like to implementing verify() directly, + # # to skip need for parsing hash strings. + # # below method has a few edge cases where it chokes though. + # @classmethod + # def verify(cls, secret, hash): + # if isinstance(hash, unicode): + # hash = hash.encode("ascii") + # ident = hash[:hash.index(b"$", 1)+1].decode("ascii") + # if ident not in cls.ident_values: + # raise uh.exc.InvalidHashError(cls) + # secret, eff_ident = cls._norm_digest_args(secret, ident) + # if eff_ident != ident: + # # lacks support for original ident, replace w/ new one. + # hash = eff_ident.encode("ascii") + hash[len(ident):] + # result = _bcrypt.hashpw(secret, hash) + # assert result.startswith(eff_ident) + # return consteq(result, hash) + + def _calc_checksum(self, secret): + # bcrypt behavior: + # secret must be bytes + # config must be ascii bytes + # returns ascii bytes + secret, ident = self._prepare_digest_args(secret) + config = self._get_config(ident) + if isinstance(config, unicode): + config = config.encode("ascii") + hash = _bcrypt.hashpw(secret, config) + assert isinstance(hash, bytes) + if not hash.startswith(config) or len(hash) != len(config)+31: + raise uh.exc.CryptBackendError(self, config, hash, source="`bcrypt` package") + return hash[-31:].decode("ascii") + +#----------------------------------------------------------------------- +# bcryptor backend +#----------------------------------------------------------------------- +class _BcryptorBackend(_BcryptCommon): + """ + backend which uses 'bcryptor' package + """ + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # try to import bcryptor + global _bcryptor + try: + import bcryptor as _bcryptor + except ImportError: # pragma: no cover + return False + + # deprecated as of 1.7.2 + if not dryrun: + warn("Support for `bcryptor` is deprecated, and will be removed in Passlib 1.8; " + "Please use `pip install bcrypt` instead", DeprecationWarning) + + return mixin_cls._finalize_backend_mixin(name, dryrun) + + def _calc_checksum(self, secret): + # bcryptor behavior: + # py2: unicode secret/hash encoded as ascii bytes before use, + # bytes taken as-is; returns ascii bytes. + # py3: not supported + secret, ident = self._prepare_digest_args(secret) + config = self._get_config(ident) + hash = _bcryptor.engine.Engine(False).hash_key(secret, config) + if not hash.startswith(config) or len(hash) != len(config) + 31: + raise uh.exc.CryptBackendError(self, config, hash, source="bcryptor library") + return str_to_uascii(hash[-31:]) + +#----------------------------------------------------------------------- +# pybcrypt backend +#----------------------------------------------------------------------- +class _PyBcryptBackend(_BcryptCommon): + """ + backend which uses 'pybcrypt' package + """ + + #: classwide thread lock used for pybcrypt < 0.3 + _calc_lock = None + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # try to import pybcrypt + global _pybcrypt + if not _detect_pybcrypt(): + # not installed, or bcrypt installed instead + return False + try: + import bcrypt as _pybcrypt + except ImportError: # pragma: no cover + # XXX: should we raise AssertionError here? (if get here, _detect_pybcrypt() is broken) + return False + + # deprecated as of 1.7.2 + if not dryrun: + warn("Support for `py-bcrypt` is deprecated, and will be removed in Passlib 1.8; " + "Please use `pip install bcrypt` instead", DeprecationWarning) + + # determine pybcrypt version + try: + version = _pybcrypt._bcrypt.__version__ + except: + log.warning("(trapped) error reading pybcrypt version", exc_info=True) + version = "" + log.debug("detected 'pybcrypt' backend, version %r", version) + + # return calc function based on version + vinfo = parse_version(version) or (0, 0) + if vinfo < (0, 3): + warn("py-bcrypt %s has a major security vulnerability, " + "you should upgrade to py-bcrypt 0.3 immediately." + % version, uh.exc.PasslibSecurityWarning) + if mixin_cls._calc_lock is None: + import threading + mixin_cls._calc_lock = threading.Lock() + mixin_cls._calc_checksum = get_unbound_method_function(mixin_cls._calc_checksum_threadsafe) + + return mixin_cls._finalize_backend_mixin(name, dryrun) + + def _calc_checksum_threadsafe(self, secret): + # as workaround for pybcrypt < 0.3's concurrency issue, + # we wrap everything in a thread lock. as long as bcrypt is only + # used through passlib, this should be safe. + with self._calc_lock: + return self._calc_checksum_raw(secret) + + def _calc_checksum_raw(self, secret): + # py-bcrypt behavior: + # py2: unicode secret/hash encoded as ascii bytes before use, + # bytes taken as-is; returns ascii bytes. + # py3: unicode secret encoded as utf-8 bytes, + # hash encoded as ascii bytes, returns ascii unicode. + secret, ident = self._prepare_digest_args(secret) + config = self._get_config(ident) + hash = _pybcrypt.hashpw(secret, config) + if not hash.startswith(config) or len(hash) != len(config) + 31: + raise uh.exc.CryptBackendError(self, config, hash, source="pybcrypt library") + return str_to_uascii(hash[-31:]) + + _calc_checksum = _calc_checksum_raw + +#----------------------------------------------------------------------- +# os crypt backend +#----------------------------------------------------------------------- +class _OsCryptBackend(_BcryptCommon): + """ + backend which uses :func:`crypt.crypt` + """ + + #: set flag to ensure _prepare_digest_args() doesn't create invalid utf8 string + #: when truncating bytes. + _require_valid_utf8_bytes = not crypt_accepts_bytes + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + if not test_crypt("test", TEST_HASH_2A): + return False + return mixin_cls._finalize_backend_mixin(name, dryrun) + + def _calc_checksum(self, secret): + # + # run secret through crypt.crypt(). + # if everything goes right, we'll get back a properly formed bcrypt hash. + # + secret, ident = self._prepare_digest_args(secret) + config = self._get_config(ident) + hash = safe_crypt(secret, config) + if hash is not None: + if not hash.startswith(config) or len(hash) != len(config) + 31: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-31:] + + # + # Check if this failed due to non-UTF8 bytes + # In detail: under py3, crypt.crypt() requires unicode inputs, which are then encoded to + # utf8 before passing them to os crypt() call. this is done according to the "s" format + # specifier for PyArg_ParseTuple (https://docs.python.org/3/c-api/arg.html). + # There appears no way to get around that to pass raw bytes; so we just throw error here + # to let user know they need to use another backend if they want raw bytes support. + # + # XXX: maybe just let safe_crypt() throw UnicodeDecodeError under passlib 2.0, + # and then catch it above? maybe have safe_crypt ALWAYS throw error + # instead of returning None? (would save re-detecting what went wrong) + # XXX: isn't secret ALWAYS bytes at this point? + # + if PY3 and isinstance(secret, bytes): + try: + secret.decode("utf-8") + except UnicodeDecodeError: + raise error_from(uh.exc.PasswordValueError( + "python3 crypt.crypt() ony supports bytes passwords using UTF8; " + "passlib recommends running `pip install bcrypt` for general bcrypt support.", + ), None) + + # + # else crypt() call failed for unknown reason. + # + # NOTE: getting here should be considered a bug in passlib -- + # if os_crypt backend detection said there's support, + # and we've already checked all known reasons above; + # want them to file bug so we can figure out what happened. + # in the meantime, users can avoid this by installing bcrypt-cffi backend; + # which won't have this (or utf8) edgecases. + # + # XXX: throw something more specific, like an "InternalBackendError"? + # NOTE: if do change this error, need to update test_81_crypt_fallback() expectations + # about what will be thrown; as well as safe_verify() above. + # + debug_only_repr = uh.exc.debug_only_repr + raise uh.exc.InternalBackendError( + "crypt.crypt() failed for unknown reason; " + "passlib recommends running `pip install bcrypt` for general bcrypt support." + # for debugging UTs -- + "(config=%s, secret=%s)" % (debug_only_repr(config), debug_only_repr(secret)), + ) + +#----------------------------------------------------------------------- +# builtin backend +#----------------------------------------------------------------------- +class _BuiltinBackend(_BcryptCommon): + """ + backend which uses passlib's pure-python implementation + """ + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + from passlib.utils import as_bool + if not as_bool(os.environ.get("PASSLIB_BUILTIN_BCRYPT")): + log.debug("bcrypt 'builtin' backend not enabled via $PASSLIB_BUILTIN_BCRYPT") + return False + global _builtin_bcrypt + from passlib.crypto._blowfish import raw_bcrypt as _builtin_bcrypt + return mixin_cls._finalize_backend_mixin(name, dryrun) + + def _calc_checksum(self, secret): + secret, ident = self._prepare_digest_args(secret) + chk = _builtin_bcrypt(secret, ident[1:-1], + self.salt.encode("ascii"), self.rounds) + return chk.decode("ascii") + +#============================================================================= +# handler +#============================================================================= +class bcrypt(_NoBackend, _BcryptCommon): + """This class implements the BCrypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 22 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 12, must be between 4 and 31, inclusive. + This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}` + -- increasing the rounds by +1 will double the amount of time taken. + + :type ident: str + :param ident: + Specifies which version of the BCrypt algorithm will be used when creating a new hash. + Typically this option is not needed, as the default (``"2b"``) is usually the correct choice. + If specified, it must be one of the following: + + * ``"2"`` - the first revision of BCrypt, which suffers from a minor security flaw and is generally not used anymore. + * ``"2a"`` - some implementations suffered from rare security flaws, replaced by 2b. + * ``"2y"`` - format specific to the *crypt_blowfish* BCrypt implementation, + identical to ``"2b"`` in all but name. + * ``"2b"`` - latest revision of the official BCrypt algorithm, current default. + + :param bool truncate_error: + By default, BCrypt will silently truncate passwords larger than 72 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This class now supports ``"2y"`` hashes, and recognizes + (but does not support) the broken ``"2x"`` hashes. + (see the :ref:`crypt_blowfish bug ` + for details). + + .. versionchanged:: 1.6 + Added a pure-python backend. + + .. versionchanged:: 1.6.3 + + Added support for ``"2b"`` variant. + + .. versionchanged:: 1.7 + + Now defaults to ``"2b"`` variant. + """ + #============================================================================= + # backend + #============================================================================= + + # NOTE: the brunt of the bcrypt class is implemented in _BcryptCommon. + # there are then subclass for each backend (e.g. _PyBcryptBackend), + # these are dynamically prepended to this class's bases + # in order to load the appropriate backend. + + #: list of potential backends + backends = ("bcrypt", "pybcrypt", "bcryptor", "os_crypt", "builtin") + + #: flag that this class's bases should be modified by SubclassBackendMixin + _backend_mixin_target = True + + #: map of backend -> mixin class, used by _get_backend_loader() + _backend_mixin_map = { + None: _NoBackend, + "bcrypt": _BcryptBackend, + "pybcrypt": _PyBcryptBackend, + "bcryptor": _BcryptorBackend, + "os_crypt": _OsCryptBackend, + "builtin": _BuiltinBackend, + } + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# variants +#============================================================================= +_UDOLLAR = u("$") + +# XXX: it might be better to have all the bcrypt variants share a common base class, +# and have the (django_)bcrypt_sha256 wrappers just proxy bcrypt instead of subclassing it. +class _wrapped_bcrypt(bcrypt): + """ + abstracts out some bits bcrypt_sha256 & django_bcrypt_sha256 share. + - bypass backend-loading wrappers for hash() etc + - disable truncation support, sha256 wrappers don't need it. + """ + setting_kwds = tuple(elem for elem in bcrypt.setting_kwds if elem not in ["truncate_error"]) + truncate_size = None + + # XXX: these will be needed if any bcrypt backends directly implement this... + # @classmethod + # def hash(cls, secret, **kwds): + # # bypass bcrypt backend overriding this method + # # XXX: would wrapping bcrypt make this easier than subclassing it? + # return super(_BcryptCommon, cls).hash(secret, **kwds) + # + # @classmethod + # def verify(cls, secret, hash): + # # bypass bcrypt backend overriding this method + # return super(_BcryptCommon, cls).verify(secret, hash) + # + # @classmethod + # def genhash(cls, secret, hash): + # # bypass bcrypt backend overriding this method + # return super(_BcryptCommon, cls).genhash(secret, hash) + + @classmethod + def _check_truncate_policy(cls, secret): + # disable check performed by bcrypt(), since this doesn't truncate passwords. + pass + +#============================================================================= +# bcrypt sha256 wrapper +#============================================================================= + +class bcrypt_sha256(_wrapped_bcrypt): + """ + This class implements a composition of BCrypt + HMAC_SHA256, + and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept + all the same optional keywords as the base :class:`bcrypt` hash. + + .. versionadded:: 1.6.2 + + .. versionchanged:: 1.7 + + Now defaults to ``"2b"`` bcrypt variant; though supports older hashes + generated using the ``"2a"`` bcrypt variant. + + .. versionchanged:: 1.7.3 + + For increased security, updated to use HMAC-SHA256 instead of plain SHA256. + Now only supports the ``"2b"`` bcrypt variant. Hash format updated to "v=2". + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "bcrypt_sha256" + + #-------------------- + # GenericHandler + #-------------------- + # this is locked at 2b for now (with 2a allowed only for legacy v1 format) + ident_values = (IDENT_2A, IDENT_2B) + + # clone bcrypt's ident aliases so they can be used here as well... + ident_aliases = (lambda ident_values: dict(item for item in bcrypt.ident_aliases.items() + if item[1] in ident_values))(ident_values) + default_ident = IDENT_2B + + #-------------------- + # class specific + #-------------------- + + _supported_versions = set([1, 2]) + + #=================================================================== + # instance attrs + #=================================================================== + + #: wrapper version. + #: v1 -- used prior to passlib 1.7.3; performs ``bcrypt(sha256(secret), salt, cost)`` + #: v2 -- new in passlib 1.7.3; performs `bcrypt(sha256_hmac(salt, secret), salt, cost)`` + version = 2 + + #=================================================================== + # configuration + #=================================================================== + + @classmethod + def using(cls, version=None, **kwds): + subcls = super(bcrypt_sha256, cls).using(**kwds) + if version is not None: + subcls.version = subcls._norm_version(version) + ident = subcls.default_ident + if subcls.version > 1 and ident != IDENT_2B: + raise ValueError("bcrypt %r hashes not allowed for version %r" % + (ident, subcls.version)) + return subcls + + #=================================================================== + # formatting + #=================================================================== + + # sample hash: + # $bcrypt-sha256$2a,6$/3OeRpbOf8/l6nPPRdZPp.$nRiyYqPobEZGdNRBWihQhiFDh1ws1tu + # $bcrypt-sha256$ -- prefix/identifier + # 2a -- bcrypt variant + # , -- field separator + # 6 -- bcrypt work factor + # $ -- section separator + # /3OeRpbOf8/l6nPPRdZPp. -- salt + # $ -- section separator + # nRiyYqPobEZGdNRBWihQhiFDh1ws1tu -- digest + + # XXX: we can't use .ident attr due to bcrypt code using it. + # working around that via prefix. + prefix = u('$bcrypt-sha256$') + + #: current version 2 hash format + _v2_hash_re = re.compile(r"""(?x) + ^ + [$]bcrypt-sha256[$] + v=(?P\d+), + t=(?P2b), + r=(?P\d{1,2}) + [$](?P[^$]{22}) + (?:[$](?P[^$]{31}))? + $ + """) + + #: old version 1 hash format + _v1_hash_re = re.compile(r"""(?x) + ^ + [$]bcrypt-sha256[$] + (?P2[ab]), + (?P\d{1,2}) + [$](?P[^$]{22}) + (?:[$](?P[^$]{31}))? + $ + """) + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + if not hash: + return False + return hash.startswith(cls.prefix) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if not hash.startswith(cls.prefix): + raise uh.exc.InvalidHashError(cls) + m = cls._v2_hash_re.match(hash) + if m: + version = int(m.group("version")) + if version < 2: + raise uh.exc.MalformedHashError(cls) + else: + m = cls._v1_hash_re.match(hash) + if m: + version = 1 + else: + raise uh.exc.MalformedHashError(cls) + rounds = m.group("rounds") + if rounds.startswith(uh._UZERO) and rounds != uh._UZERO: + raise uh.exc.ZeroPaddedRoundsError(cls) + return cls( + version=version, + ident=m.group("type"), + rounds=int(rounds), + salt=m.group("salt"), + checksum=m.group("digest"), + ) + + _v2_template = u("$bcrypt-sha256$v=2,t=%s,r=%d$%s$%s") + _v1_template = u("$bcrypt-sha256$%s,%d$%s$%s") + + def to_string(self): + if self.version == 1: + template = self._v1_template + else: + template = self._v2_template + hash = template % (self.ident.strip(_UDOLLAR), self.rounds, self.salt, self.checksum) + return uascii_to_str(hash) + + #=================================================================== + # init + #=================================================================== + + def __init__(self, version=None, **kwds): + if version is not None: + self.version = self._norm_version(version) + super(bcrypt_sha256, self).__init__(**kwds) + + #=================================================================== + # version + #=================================================================== + + @classmethod + def _norm_version(cls, version): + if version not in cls._supported_versions: + raise ValueError("%s: unknown or unsupported version: %r" % (cls.name, version)) + return version + + #=================================================================== + # checksum + #=================================================================== + + def _calc_checksum(self, secret): + # NOTE: can't use digest directly, since bcrypt stops at first NULL. + # NOTE: bcrypt doesn't fully mix entropy for bytes 55-72 of password + # (XXX: citation needed), so we don't want key to be > 55 bytes. + # thus, have to use base64 (44 bytes) rather than hex (64 bytes). + # XXX: it's later come out that 55-72 may be ok, so later revision of bcrypt_sha256 + # may switch to hex encoding, since it's simpler to implement elsewhere. + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + if self.version == 1: + # version 1 -- old version just ran secret through sha256(), + # though this could be vulnerable to a breach attach + # (c.f. issue 114); which is why v2 switched to hmac wrapper. + digest = sha256(secret).digest() + else: + # version 2 -- running secret through HMAC keyed off salt. + # this prevents known secret -> sha256 password tables from being + # used to test against a bcrypt_sha256 hash. + # keying off salt (instead of constant string) should minimize chances of this + # colliding with existing table of hmac digest lookups as well. + # NOTE: salt in this case is the "bcrypt64"-encoded value, not the raw salt bytes, + # to make things easier for parallel implementations of this hash -- + # saving them the trouble of implementing a "bcrypt64" decoder. + salt = self.salt + if salt[-1] not in self.final_salt_chars: + # forbidding salts with padding bits set, because bcrypt implementations + # won't consistently hash them the same. since we control this format, + # just prevent these from even getting used. + raise ValueError("invalid salt string") + digest = compile_hmac("sha256", salt.encode("ascii"))(secret) + + # NOTE: output of b64encode() uses "+/" altchars, "=" padding chars, + # and no leading/trailing whitespace. + key = b64encode(digest) + + # hand result off to normal bcrypt algorithm + return super(bcrypt_sha256, self)._calc_checksum(key) + + #=================================================================== + # other + #=================================================================== + + def _calc_needs_update(self, **kwds): + if self.version < type(self).version: + return True + return super(bcrypt_sha256, self)._calc_needs_update(**kwds) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/cisco.py b/venv/lib/python3.12/site-packages/passlib/handlers/cisco.py new file mode 100644 index 0000000..e715e1a --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/cisco.py @@ -0,0 +1,440 @@ +""" +passlib.handlers.cisco -- Cisco password hashes +""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import md5 +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import right_pad_string, to_unicode, repeat_string, to_bytes +from passlib.utils.binary import h64 +from passlib.utils.compat import unicode, u, join_byte_values, \ + join_byte_elems, iter_byte_values, uascii_to_str +import passlib.utils.handlers as uh +# local +__all__ = [ + "cisco_pix", + "cisco_asa", + "cisco_type7", +] + +#============================================================================= +# utils +#============================================================================= + +#: dummy bytes used by spoil_digest var in cisco_pix._calc_checksum() +_DUMMY_BYTES = b'\xFF' * 32 + +#============================================================================= +# cisco pix firewall hash +#============================================================================= +class cisco_pix(uh.HasUserContext, uh.StaticHandler): + """ + This class implements the password hash used by older Cisco PIX firewalls, + and follows the :ref:`password-hash-api`. + It does a single round of hashing, and relies on the username + as the salt. + + This class only allows passwords <= 16 bytes, anything larger + will result in a :exc:`~passlib.exc.PasswordSizeError` if passed to :meth:`~cisco_pix.hash`, + and be silently rejected if passed to :meth:`~cisco_pix.verify`. + + The :meth:`~passlib.ifc.PasswordHash.hash`, + :meth:`~passlib.ifc.PasswordHash.genhash`, and + :meth:`~passlib.ifc.PasswordHash.verify` methods + all support the following extra keyword: + + :param str user: + String containing name of user account this password is associated with. + + This is *required* in order to correctly hash passwords associated + with a user account on the Cisco device, as it is used to salt + the hash. + + Conversely, this *must* be omitted or set to ``""`` in order to correctly + hash passwords which don't have an associated user account + (such as the "enable" password). + + .. versionadded:: 1.6 + + .. versionchanged:: 1.7.1 + + Passwords > 16 bytes are now rejected / throw error instead of being silently truncated, + to match Cisco behavior. A number of :ref:`bugs ` were fixed + which caused prior releases to generate unverifiable hashes in certain cases. + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "cisco_pix" + + truncate_size = 16 + + # NOTE: these are the default policy for PasswordHash, + # but want to set them explicitly for now. + truncate_error = True + truncate_verify_reject = True + + #-------------------- + # GenericHandler + #-------------------- + checksum_size = 16 + checksum_chars = uh.HASH64_CHARS + + #-------------------- + # custom + #-------------------- + + #: control flag signalling "cisco_asa" mode, set by cisco_asa class + _is_asa = False + + #=================================================================== + # methods + #=================================================================== + def _calc_checksum(self, secret): + """ + This function implements the "encrypted" hash format used by Cisco + PIX & ASA. It's behavior has been confirmed for ASA 9.6, + but is presumed correct for PIX & other ASA releases, + as it fits with known test vectors, and existing literature. + + While nearly the same, the PIX & ASA hashes have slight differences, + so this function performs differently based on the _is_asa class flag. + Noteable changes from PIX to ASA include password size limit + increased from 16 -> 32, and other internal changes. + """ + # select PIX vs or ASA mode + asa = self._is_asa + + # + # encode secret + # + # per ASA 8.4 documentation, + # http://www.cisco.com/c/en/us/td/docs/security/asa/asa84/configuration/guide/asa_84_cli_config/ref_cli.html#Supported_Character_Sets, + # it supposedly uses UTF-8 -- though some double-encoding issues have + # been observed when trying to actually *set* a non-ascii password + # via ASDM, and access via SSH seems to strip 8-bit chars. + # + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + # + # check if password too large + # + # Per ASA 9.6 changes listed in + # http://www.cisco.com/c/en/us/td/docs/security/asa/roadmap/asa_new_features.html, + # prior releases had a maximum limit of 32 characters. + # Testing with an ASA 9.6 system bears this out -- + # setting 32-char password for a user account, + # and logins will fail if any chars are appended. + # (ASA 9.6 added new PBKDF2-based hash algorithm, + # which supports larger passwords). + # + # Per PIX documentation + # http://www.cisco.com/en/US/docs/security/pix/pix50/configuration/guide/commands.html, + # it would not allow passwords > 16 chars. + # + # Thus, we unconditionally throw a password size error here, + # as nothing valid can come from a larger password. + # NOTE: assuming PIX has same behavior, but at 16 char limit. + # + spoil_digest = None + if len(secret) > self.truncate_size: + if self.use_defaults: + # called from hash() + msg = "Password too long (%s allows at most %d bytes)" % \ + (self.name, self.truncate_size) + raise uh.exc.PasswordSizeError(self.truncate_size, msg=msg) + else: + # called from verify() -- + # We don't want to throw error, or return early, + # as that would let attacker know too much. Instead, we set a + # flag to add some dummy data into the md5 digest, so that + # output won't match truncated version of secret, or anything + # else that's fixed and predictable. + spoil_digest = secret + _DUMMY_BYTES + + # + # append user to secret + # + # Policy appears to be: + # + # * Nothing appended for enable password (user = "") + # + # * ASA: If user present, but secret is >= 28 chars, nothing appended. + # + # * 1-2 byte users not allowed. + # DEVIATION: we're letting them through, and repeating their + # chars ala 3-char user, to simplify testing. + # Could issue warning in the future though. + # + # * 3 byte user has first char repeated, to pad to 4. + # (observed under ASA 9.6, assuming true elsewhere) + # + # * 4 byte users are used directly. + # + # * 5+ byte users are truncated to 4 bytes. + # + user = self.user + if user: + if isinstance(user, unicode): + user = user.encode("utf-8") + if not asa or len(secret) < 28: + secret += repeat_string(user, 4) + + # + # pad / truncate result to limit + # + # While PIX always pads to 16 bytes, ASA increases to 32 bytes IFF + # secret+user > 16 bytes. This makes PIX & ASA have different results + # where secret size in range(13,16), and user is present -- + # PIX will truncate to 16, ASA will truncate to 32. + # + if asa and len(secret) > 16: + pad_size = 32 + else: + pad_size = 16 + secret = right_pad_string(secret, pad_size) + + # + # md5 digest + # + if spoil_digest: + # make sure digest won't match truncated version of secret + secret += spoil_digest + digest = md5(secret).digest() + + # + # drop every 4th byte + # NOTE: guessing this was done because it makes output exactly + # 16 bytes, which may have been a general 'char password[]' + # size limit under PIX + # + digest = join_byte_elems(c for i, c in enumerate(digest) if (i + 1) & 3) + + # + # encode using Hash64 + # + return h64.encode_bytes(digest).decode("ascii") + + # NOTE: works, but needs UTs. + # @classmethod + # def same_as_pix(cls, secret, user=""): + # """ + # test whether (secret + user) combination should + # have the same hash under PIX and ASA. + # + # mainly present to help unittests. + # """ + # # see _calc_checksum() above for details of this logic. + # size = len(to_bytes(secret, "utf-8")) + # if user and size < 28: + # size += 4 + # return size < 17 + + #=================================================================== + # eoc + #=================================================================== + + +class cisco_asa(cisco_pix): + """ + This class implements the password hash used by Cisco ASA/PIX 7.0 and newer (2005). + Aside from a different internal algorithm, it's use and format is identical + to the older :class:`cisco_pix` class. + + For passwords less than 13 characters, this should be identical to :class:`!cisco_pix`, + but will generate a different hash for most larger inputs + (See the `Format & Algorithm`_ section for the details). + + This class only allows passwords <= 32 bytes, anything larger + will result in a :exc:`~passlib.exc.PasswordSizeError` if passed to :meth:`~cisco_asa.hash`, + and be silently rejected if passed to :meth:`~cisco_asa.verify`. + + .. versionadded:: 1.7 + + .. versionchanged:: 1.7.1 + + Passwords > 32 bytes are now rejected / throw error instead of being silently truncated, + to match Cisco behavior. A number of :ref:`bugs ` were fixed + which caused prior releases to generate unverifiable hashes in certain cases. + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "cisco_asa" + + #-------------------- + # TruncateMixin + #-------------------- + truncate_size = 32 + + #-------------------- + # cisco_pix + #-------------------- + _is_asa = True + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# type 7 +#============================================================================= +class cisco_type7(uh.GenericHandler): + """ + This class implements the "Type 7" password encoding used by Cisco IOS, + and follows the :ref:`password-hash-api`. + It has a simple 4-5 bit salt, but is nonetheless a reversible encoding + instead of a real hash. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: int + :param salt: + This may be an optional salt integer drawn from ``range(0,16)``. + If omitted, one will be chosen at random. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` values that are out of range. + + Note that while this class outputs digests in upper-case hexadecimal, + it will accept lower-case as well. + + This class also provides the following additional method: + + .. automethod:: decode + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "cisco_type7" + setting_kwds = ("salt",) + + #-------------------- + # GenericHandler + #-------------------- + checksum_chars = uh.UPPER_HEX_CHARS + + #-------------------- + # HasSalt + #-------------------- + + # NOTE: encoding could handle max_salt_value=99, but since key is only 52 + # chars in size, not sure what appropriate behavior is for that edge case. + min_salt_value = 0 + max_salt_value = 52 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def using(cls, salt=None, **kwds): + subcls = super(cisco_type7, cls).using(**kwds) + if salt is not None: + salt = subcls._norm_salt(salt, relaxed=kwds.get("relaxed")) + subcls._generate_salt = staticmethod(lambda: salt) + return subcls + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if len(hash) < 2: + raise uh.exc.InvalidHashError(cls) + salt = int(hash[:2]) # may throw ValueError + return cls(salt=salt, checksum=hash[2:].upper()) + + def __init__(self, salt=None, **kwds): + super(cisco_type7, self).__init__(**kwds) + if salt is not None: + salt = self._norm_salt(salt) + elif self.use_defaults: + salt = self._generate_salt() + assert self._norm_salt(salt) == salt, "generated invalid salt: %r" % (salt,) + else: + raise TypeError("no salt specified") + self.salt = salt + + @classmethod + def _norm_salt(cls, salt, relaxed=False): + """ + validate & normalize salt value. + .. note:: + the salt for this algorithm is an integer 0-52, not a string + """ + if not isinstance(salt, int): + raise uh.exc.ExpectedTypeError(salt, "integer", "salt") + if 0 <= salt <= cls.max_salt_value: + return salt + msg = "salt/offset must be in 0..52 range" + if relaxed: + warn(msg, uh.PasslibHashWarning) + return 0 if salt < 0 else cls.max_salt_value + else: + raise ValueError(msg) + + @staticmethod + def _generate_salt(): + return uh.rng.randint(0, 15) + + def to_string(self): + return "%02d%s" % (self.salt, uascii_to_str(self.checksum)) + + def _calc_checksum(self, secret): + # XXX: no idea what unicode policy is, but all examples are + # 7-bit ascii compatible, so using UTF-8 + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return hexlify(self._cipher(secret, self.salt)).decode("ascii").upper() + + @classmethod + def decode(cls, hash, encoding="utf-8"): + """decode hash, returning original password. + + :arg hash: encoded password + :param encoding: optional encoding to use (defaults to ``UTF-8``). + :returns: password as unicode + """ + self = cls.from_string(hash) + tmp = unhexlify(self.checksum.encode("ascii")) + raw = self._cipher(tmp, self.salt) + return raw.decode(encoding) if encoding else raw + + # type7 uses a xor-based vingere variant, using the following secret key: + _key = u("dsfd;kfoA,.iyewrkldJKDHSUBsgvca69834ncxv9873254k;fg87") + + @classmethod + def _cipher(cls, data, salt): + """xor static key against data - encrypts & decrypts""" + key = cls._key + key_size = len(key) + return join_byte_values( + value ^ ord(key[(salt + idx) % key_size]) + for idx, value in enumerate(iter_byte_values(data)) + ) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/des_crypt.py b/venv/lib/python3.12/site-packages/passlib/handlers/des_crypt.py new file mode 100644 index 0000000..68a4ca7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/des_crypt.py @@ -0,0 +1,607 @@ +"""passlib.handlers.des_crypt - traditional unix (DES) crypt and variants""" +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import safe_crypt, test_crypt, to_unicode +from passlib.utils.binary import h64, h64big +from passlib.utils.compat import byte_elem_value, u, uascii_to_str, unicode, suppress_cause +from passlib.crypto.des import des_encrypt_int_block +import passlib.utils.handlers as uh +# local +__all__ = [ + "des_crypt", + "bsdi_crypt", + "bigcrypt", + "crypt16", +] + +#============================================================================= +# pure-python backend for des_crypt family +#============================================================================= +_BNULL = b'\x00' + +def _crypt_secret_to_key(secret): + """convert secret to 64-bit DES key. + + this only uses the first 8 bytes of the secret, + and discards the high 8th bit of each byte at that. + a null parity bit is inserted after every 7th bit of the output. + """ + # NOTE: this would set the parity bits correctly, + # but des_encrypt_int_block() would just ignore them... + ##return sum(expand_7bit(byte_elem_value(c) & 0x7f) << (56-i*8) + ## for i, c in enumerate(secret[:8])) + return sum((byte_elem_value(c) & 0x7f) << (57-i*8) + for i, c in enumerate(secret[:8])) + +def _raw_des_crypt(secret, salt): + """pure-python backed for des_crypt""" + assert len(salt) == 2 + + # NOTE: some OSes will accept non-HASH64 characters in the salt, + # but what value they assign these characters varies wildy, + # so just rejecting them outright. + # the same goes for single-character salts... + # some OSes duplicate the char, some insert a '.' char, + # and openbsd does (something) which creates an invalid hash. + salt_value = h64.decode_int12(salt) + + # gotta do something - no official policy since this predates unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + assert isinstance(secret, bytes) + + # forbidding NULL char because underlying crypt() rejects them too. + if _BNULL in secret: + raise uh.exc.NullPasswordError(des_crypt) + + # convert first 8 bytes of secret string into an integer + key_value = _crypt_secret_to_key(secret) + + # run data through des using input of 0 + result = des_encrypt_int_block(key_value, 0, salt_value, 25) + + # run h64 encode on result + return h64big.encode_int64(result) + +def _bsdi_secret_to_key(secret): + """convert secret to DES key used by bsdi_crypt""" + key_value = _crypt_secret_to_key(secret) + idx = 8 + end = len(secret) + while idx < end: + next = idx + 8 + tmp_value = _crypt_secret_to_key(secret[idx:next]) + key_value = des_encrypt_int_block(key_value, key_value) ^ tmp_value + idx = next + return key_value + +def _raw_bsdi_crypt(secret, rounds, salt): + """pure-python backend for bsdi_crypt""" + + # decode salt + salt_value = h64.decode_int24(salt) + + # gotta do something - no official policy since this predates unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + assert isinstance(secret, bytes) + + # forbidding NULL char because underlying crypt() rejects them too. + if _BNULL in secret: + raise uh.exc.NullPasswordError(bsdi_crypt) + + # convert secret string into an integer + key_value = _bsdi_secret_to_key(secret) + + # run data through des using input of 0 + result = des_encrypt_int_block(key_value, 0, salt_value, rounds) + + # run h64 encode on result + return h64big.encode_int64(result) + +#============================================================================= +# handlers +#============================================================================= +class des_crypt(uh.TruncateMixin, uh.HasManyBackends, uh.HasSalt, uh.GenericHandler): + """This class implements the des-crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :param bool truncate_error: + By default, des_crypt will silently truncate passwords larger than 8 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "des_crypt" + setting_kwds = ("salt", "truncate_error") + + #-------------------- + # GenericHandler + #-------------------- + checksum_chars = uh.HASH64_CHARS + checksum_size = 11 + + #-------------------- + # HasSalt + #-------------------- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #-------------------- + # TruncateMixin + #-------------------- + truncate_size = 8 + + #=================================================================== + # formatting + #=================================================================== + # FORMAT: 2 chars of H64-encoded salt + 11 chars of H64-encoded checksum + + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P[./a-z0-9]{11})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + salt, chk = hash[:2], hash[2:] + return cls(salt=salt, checksum=chk or None) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum) + return uascii_to_str(hash) + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + # check for truncation (during .hash() calls only) + if self.use_defaults: + self._check_truncate_policy(secret) + + return self._calc_checksum_backend(secret) + + #=================================================================== + # backend + #=================================================================== + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt("test", 'abgOeLfPimXQo'): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + # NOTE: we let safe_crypt() encode unicode secret -> utf8; + # no official policy since des-crypt predates unicode + hash = safe_crypt(secret, self.salt) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + if not hash.startswith(self.salt) or len(hash) != 13: + raise uh.exc.CryptBackendError(self, self.salt, hash) + return hash[2:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + return _raw_des_crypt(secret, self.salt.encode("ascii")).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +class bsdi_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the BSDi-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 5001, must be between 1 and 16777215, inclusive. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + :meth:`hash` will now issue a warning if an even number of rounds is used + (see :ref:`bsdi-crypt-security-issues` regarding weak DES keys). + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "bsdi_crypt" + setting_kwds = ("salt", "rounds") + checksum_size = 11 + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 4 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 5001 + min_rounds = 1 + max_rounds = 16777215 # (1<<24)-1 + rounds_cost = "linear" + + # NOTE: OpenBSD login.conf reports 7250 as minimum allowed rounds, + # but that seems to be an OS policy, not a algorithm limitation. + + #=================================================================== + # parsing + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + _ + (?P[./a-z0-9]{4}) + (?P[./a-z0-9]{4}) + (?P[./a-z0-9]{11})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + rounds, salt, chk = m.group("rounds", "salt", "chk") + return cls( + rounds=h64.decode_int24(rounds.encode("ascii")), + salt=salt, + checksum=chk, + ) + + def to_string(self): + hash = u("_%s%s%s") % (h64.encode_int24(self.rounds).decode("ascii"), + self.salt, self.checksum) + return uascii_to_str(hash) + + #=================================================================== + # validation + #=================================================================== + + # NOTE: keeping this flag for admin/choose_rounds.py script. + # want to eventually expose rounds logic to that script in better way. + _avoid_even_rounds = True + + @classmethod + def using(cls, **kwds): + subcls = super(bsdi_crypt, cls).using(**kwds) + if not subcls.default_rounds & 1: + # issue warning if caller set an even 'rounds' value. + warn("bsdi_crypt rounds should be odd, as even rounds may reveal weak DES keys", + uh.exc.PasslibSecurityWarning) + return subcls + + @classmethod + def _generate_rounds(cls): + rounds = super(bsdi_crypt, cls)._generate_rounds() + # ensure autogenerated rounds are always odd + # NOTE: doing this even for default_rounds so needs_update() doesn't get + # caught in a loop. + # FIXME: this technically might generate a rounds value 1 larger + # than the requested upper bound - but better to err on side of safety. + return rounds|1 + + #=================================================================== + # migration + #=================================================================== + + def _calc_needs_update(self, **kwds): + # mark bsdi_crypt hashes as deprecated if they have even rounds. + if not self.rounds & 1: + return True + # hand off to base implementation + return super(bsdi_crypt, self)._calc_needs_update(**kwds) + + #=================================================================== + # backends + #=================================================================== + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt("test", '_/...lLDAxARksGCHin.'): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + config = self.to_string() + hash = safe_crypt(secret, config) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + if not hash.startswith(config[:9]) or len(hash) != 20: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-11:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + return _raw_bsdi_crypt(secret, self.rounds, self.salt.encode("ascii")).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +class bigcrypt(uh.HasSalt, uh.GenericHandler): + """This class implements the BigCrypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 22 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "bigcrypt" + setting_kwds = ("salt",) + checksum_chars = uh.HASH64_CHARS + # NOTE: checksum chars must be multiple of 11 + + #--HasSalt-- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #=================================================================== + # internal helpers + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P([./a-z0-9]{11})+)? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum) + return uascii_to_str(hash) + + def _norm_checksum(self, checksum, relaxed=False): + checksum = super(bigcrypt, self)._norm_checksum(checksum, relaxed=relaxed) + if len(checksum) % 11: + raise uh.exc.InvalidHashError(self) + return checksum + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = _raw_des_crypt(secret, self.salt.encode("ascii")) + idx = 8 + end = len(secret) + while idx < end: + next = idx + 8 + chk += _raw_des_crypt(secret[idx:next], chk[-11:-9]) + idx = next + return chk.decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +class crypt16(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): + """This class implements the crypt16 password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :param bool truncate_error: + By default, crypt16 will silently truncate passwords larger than 16 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "crypt16" + setting_kwds = ("salt", "truncate_error") + + #-------------------- + # GenericHandler + #-------------------- + checksum_size = 22 + checksum_chars = uh.HASH64_CHARS + + #-------------------- + # HasSalt + #-------------------- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #-------------------- + # TruncateMixin + #-------------------- + truncate_size = 16 + + #=================================================================== + # internal helpers + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P[./a-z0-9]{22})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum) + return uascii_to_str(hash) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + # check for truncation (during .hash() calls only) + if self.use_defaults: + self._check_truncate_policy(secret) + + # parse salt value + try: + salt_value = h64.decode_int12(self.salt.encode("ascii")) + except ValueError: # pragma: no cover - caught by class + raise suppress_cause(ValueError("invalid chars in salt")) + + # convert first 8 byts of secret string into an integer, + key1 = _crypt_secret_to_key(secret) + + # run data through des using input of 0 + result1 = des_encrypt_int_block(key1, 0, salt_value, 20) + + # convert next 8 bytes of secret string into integer (key=0 if secret < 8 chars) + key2 = _crypt_secret_to_key(secret[8:16]) + + # run data through des using input of 0 + result2 = des_encrypt_int_block(key2, 0, salt_value, 5) + + # done + chk = h64big.encode_int64(result1) + h64big.encode_int64(result2) + return chk.decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/digests.py b/venv/lib/python3.12/site-packages/passlib/handlers/digests.py new file mode 100644 index 0000000..982155c --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/digests.py @@ -0,0 +1,168 @@ +"""passlib.handlers.digests - plain hash digests +""" +#============================================================================= +# imports +#============================================================================= +# core +import hashlib +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import to_native_str, to_bytes, render_bytes, consteq +from passlib.utils.compat import unicode, str_to_uascii +import passlib.utils.handlers as uh +from passlib.crypto.digest import lookup_hash +# local +__all__ = [ + "create_hex_hash", + "hex_md4", + "hex_md5", + "hex_sha1", + "hex_sha256", + "hex_sha512", +] + +#============================================================================= +# helpers for hexadecimal hashes +#============================================================================= +class HexDigestHash(uh.StaticHandler): + """this provides a template for supporting passwords stored as plain hexadecimal hashes""" + #=================================================================== + # class attrs + #=================================================================== + _hash_func = None # hash function to use - filled in by create_hex_hash() + checksum_size = None # filled in by create_hex_hash() + checksum_chars = uh.HEX_CHARS + + #: special for detecting if _hash_func is just a stub method. + supported = True + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(self._hash_func(secret).hexdigest()) + + #=================================================================== + # eoc + #=================================================================== + +def create_hex_hash(digest, module=__name__, django_name=None, required=True): + """ + create hex-encoded unsalted hasher for specified digest algorithm. + + .. versionchanged:: 1.7.3 + If called with unknown/supported digest, won't throw error immediately, + but instead return a dummy hasher that will throw error when called. + + set ``required=True`` to restore old behavior. + """ + info = lookup_hash(digest, required=required) + name = "hex_" + info.name + if not info.supported: + info.digest_size = 0 + hasher = type(name, (HexDigestHash,), dict( + name=name, + __module__=module, # so ABCMeta won't clobber it + _hash_func=staticmethod(info.const), # sometimes it's a function, sometimes not. so wrap it. + checksum_size=info.digest_size*2, + __doc__="""This class implements a plain hexadecimal %s hash, and follows the :ref:`password-hash-api`. + +It supports no optional or contextual keywords. +""" % (info.name,) + )) + if not info.supported: + hasher.supported = False + if django_name: + hasher.django_name = django_name + return hasher + +#============================================================================= +# predefined handlers +#============================================================================= + +# NOTE: some digests below are marked as "required=False", because these may not be present on +# FIPS systems (see issue 116). if missing, will return stub hasher that throws error +# if an attempt is made to actually use hash/verify with them. + +hex_md4 = create_hex_hash("md4", required=False) +hex_md5 = create_hex_hash("md5", django_name="unsalted_md5", required=False) +hex_sha1 = create_hex_hash("sha1", required=False) +hex_sha256 = create_hex_hash("sha256") +hex_sha512 = create_hex_hash("sha512") + +#============================================================================= +# htdigest +#============================================================================= +class htdigest(uh.MinimalHandler): + """htdigest hash function. + + .. todo:: + document this hash + """ + name = "htdigest" + setting_kwds = () + context_kwds = ("user", "realm", "encoding") + default_encoding = "utf-8" + + @classmethod + def hash(cls, secret, user, realm, encoding=None): + # NOTE: this was deliberately written so that raw bytes are passed through + # unchanged, the encoding kwd is only used to handle unicode values. + if not encoding: + encoding = cls.default_encoding + uh.validate_secret(secret) + if isinstance(secret, unicode): + secret = secret.encode(encoding) + user = to_bytes(user, encoding, "user") + realm = to_bytes(realm, encoding, "realm") + data = render_bytes("%s:%s:%s", user, realm, secret) + return hashlib.md5(data).hexdigest() + + @classmethod + def _norm_hash(cls, hash): + """normalize hash to native string, and validate it""" + hash = to_native_str(hash, param="hash") + if len(hash) != 32: + raise uh.exc.MalformedHashError(cls, "wrong size") + for char in hash: + if char not in uh.LC_HEX_CHARS: + raise uh.exc.MalformedHashError(cls, "invalid chars in hash") + return hash + + @classmethod + def verify(cls, secret, hash, user, realm, encoding="utf-8"): + hash = cls._norm_hash(hash) + other = cls.hash(secret, user, realm, encoding) + return consteq(hash, other) + + @classmethod + def identify(cls, hash): + try: + cls._norm_hash(hash) + except ValueError: + return False + return True + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genconfig(cls): + return cls.hash("", "", "") + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config, user, realm, encoding=None): + # NOTE: 'config' is ignored, as this hash has no salting / other configuration. + # just have to make sure it's valid. + cls._norm_hash(config) + return cls.hash(secret, user, realm, encoding) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/django.py b/venv/lib/python3.12/site-packages/passlib/handlers/django.py new file mode 100644 index 0000000..6dd499a --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/django.py @@ -0,0 +1,512 @@ +"""passlib.handlers.django- Django password hash support""" +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode +from binascii import hexlify +from hashlib import md5, sha1, sha256 +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.handlers.bcrypt import _wrapped_bcrypt +from passlib.hash import argon2, bcrypt, pbkdf2_sha1, pbkdf2_sha256 +from passlib.utils import to_unicode, rng, getrandstr +from passlib.utils.binary import BASE64_CHARS +from passlib.utils.compat import str_to_uascii, uascii_to_str, unicode, u +from passlib.crypto.digest import pbkdf2_hmac +import passlib.utils.handlers as uh +# local +__all__ = [ + "django_salted_sha1", + "django_salted_md5", + "django_bcrypt", + "django_pbkdf2_sha1", + "django_pbkdf2_sha256", + "django_argon2", + "django_des_crypt", + "django_disabled", +] + +#============================================================================= +# lazy imports & constants +#============================================================================= + +# imported by django_des_crypt._calc_checksum() +des_crypt = None + +def _import_des_crypt(): + global des_crypt + if des_crypt is None: + from passlib.hash import des_crypt + return des_crypt + +# django 1.4's salt charset +SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' + +#============================================================================= +# salted hashes +#============================================================================= +class DjangoSaltedHash(uh.HasSalt, uh.GenericHandler): + """base class providing common code for django hashes""" + # name, ident, checksum_size must be set by subclass. + # ident must include "$" suffix. + setting_kwds = ("salt", "salt_size") + + # NOTE: django 1.0-1.3 would accept empty salt strings. + # django 1.4 won't, but this appears to be regression + # (https://code.djangoproject.com/ticket/18144) + # so presumably it will be fixed in a later release. + default_salt_size = 12 + max_salt_size = None + salt_chars = SALT_CHARS + + checksum_chars = uh.LOWER_HEX_CHARS + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + return cls(salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc2(self.ident, self.salt, self.checksum) + +# NOTE: only used by PBKDF2 +class DjangoVariableHash(uh.HasRounds, DjangoSaltedHash): + """base class providing common code for django hashes w/ variable rounds""" + setting_kwds = DjangoSaltedHash.setting_kwds + ("rounds",) + + min_rounds = 1 + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc3(self.ident, self.rounds, self.salt, self.checksum) + +class django_salted_sha1(DjangoSaltedHash): + """This class implements Django's Salted SHA1 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and uses a single round of SHA1. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + This should be compatible with Django 1.4's :class:`!SHA1PasswordHasher` class. + + .. versionchanged: 1.6 + This class now generates 12-character salts instead of 5, + and generated salts uses the character range ``[0-9a-zA-Z]`` instead of + the ``[0-9a-f]``. This is to be compatible with how Django >= 1.4 + generates these hashes; but hashes generated in this manner will still be + correctly interpreted by earlier versions of Django. + """ + name = "django_salted_sha1" + django_name = "sha1" + ident = u("sha1$") + checksum_size = 40 + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(sha1(self.salt.encode("ascii") + secret).hexdigest()) + +class django_salted_md5(DjangoSaltedHash): + """This class implements Django's Salted MD5 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and uses a single round of MD5. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!MD5PasswordHasher` class. + + .. versionchanged: 1.6 + This class now generates 12-character salts instead of 5, + and generated salts uses the character range ``[0-9a-zA-Z]`` instead of + the ``[0-9a-f]``. This is to be compatible with how Django >= 1.4 + generates these hashes; but hashes generated in this manner will still be + correctly interpreted by earlier versions of Django. + """ + name = "django_salted_md5" + django_name = "md5" + ident = u("md5$") + checksum_size = 32 + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(md5(self.salt.encode("ascii") + secret).hexdigest()) + +#============================================================================= +# BCrypt +#============================================================================= + +django_bcrypt = uh.PrefixWrapper("django_bcrypt", bcrypt, + prefix=u('bcrypt$'), ident=u("bcrypt$"), + # NOTE: this docstring is duplicated in the docs, since sphinx + # seems to be having trouble reading it via autodata:: + doc="""This class implements Django 1.4's BCrypt wrapper, and follows the :ref:`password-hash-api`. + + This is identical to :class:`!bcrypt` itself, but with + the Django-specific prefix ``"bcrypt$"`` prepended. + + See :doc:`/lib/passlib.hash.bcrypt` for more details, + the usage and behavior is identical. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!BCryptPasswordHasher` class. + + .. versionadded:: 1.6 + """) +django_bcrypt.django_name = "bcrypt" +django_bcrypt._using_clone_attrs += ("django_name",) + +#============================================================================= +# BCRYPT + SHA256 +#============================================================================= + +class django_bcrypt_sha256(_wrapped_bcrypt): + """This class implements Django 1.6's Bcrypt+SHA256 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + While the algorithm and format is somewhat different, + the api and options for this hash are identical to :class:`!bcrypt` itself, + see :doc:`bcrypt ` for more details. + + .. versionadded:: 1.6.2 + """ + name = "django_bcrypt_sha256" + django_name = "bcrypt_sha256" + _digest = sha256 + + # sample hash: + # bcrypt_sha256$$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu + + # XXX: we can't use .ident attr due to bcrypt code using it. + # working around that via django_prefix + django_prefix = u('bcrypt_sha256$') + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + if not hash: + return False + return hash.startswith(cls.django_prefix) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if not hash.startswith(cls.django_prefix): + raise uh.exc.InvalidHashError(cls) + bhash = hash[len(cls.django_prefix):] + if not bhash.startswith("$2"): + raise uh.exc.MalformedHashError(cls) + return super(django_bcrypt_sha256, cls).from_string(bhash) + + def to_string(self): + bhash = super(django_bcrypt_sha256, self).to_string() + return uascii_to_str(self.django_prefix) + bhash + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + secret = hexlify(self._digest(secret).digest()) + return super(django_bcrypt_sha256, self)._calc_checksum(secret) + +#============================================================================= +# PBKDF2 variants +#============================================================================= + +class django_pbkdf2_sha256(DjangoVariableHash): + """This class implements Django's PBKDF2-HMAC-SHA256 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 29000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!PBKDF2PasswordHasher` class. + + .. versionadded:: 1.6 + """ + name = "django_pbkdf2_sha256" + django_name = "pbkdf2_sha256" + ident = u('pbkdf2_sha256$') + min_salt_size = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + checksum_chars = uh.PADDED_BASE64_CHARS + checksum_size = 44 # 32 bytes -> base64 + default_rounds = pbkdf2_sha256.default_rounds # NOTE: django 1.6 uses 12000 + _digest = "sha256" + + def _calc_checksum(self, secret): + # NOTE: secret & salt will be encoded using UTF-8 by pbkdf2_hmac() + hash = pbkdf2_hmac(self._digest, secret, self.salt, self.rounds) + return b64encode(hash).rstrip().decode("ascii") + +class django_pbkdf2_sha1(django_pbkdf2_sha256): + """This class implements Django's PBKDF2-HMAC-SHA1 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 131000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!PBKDF2SHA1PasswordHasher` class. + + .. versionadded:: 1.6 + """ + name = "django_pbkdf2_sha1" + django_name = "pbkdf2_sha1" + ident = u('pbkdf2_sha1$') + checksum_size = 28 # 20 bytes -> base64 + default_rounds = pbkdf2_sha1.default_rounds # NOTE: django 1.6 uses 12000 + _digest = "sha1" + +#============================================================================= +# Argon2 +#============================================================================= + +# NOTE: as of 2019-11-11, Django's Argon2PasswordHasher only supports Type I; +# so limiting this to ensure that as well. + +django_argon2 = uh.PrefixWrapper( + name="django_argon2", + wrapped=argon2.using(type="I"), + prefix=u('argon2'), + ident=u('argon2$argon2i$'), + # NOTE: this docstring is duplicated in the docs, since sphinx + # seems to be having trouble reading it via autodata:: + doc="""This class implements Django 1.10's Argon2 wrapper, and follows the :ref:`password-hash-api`. + + This is identical to :class:`!argon2` itself, but with + the Django-specific prefix ``"argon2$"`` prepended. + + See :doc:`argon2 ` for more details, + the usage and behavior is identical. + + This should be compatible with the hashes generated by + Django 1.10's :class:`!Argon2PasswordHasher` class. + + .. versionadded:: 1.7 + """) +django_argon2.django_name = "argon2" +django_argon2._using_clone_attrs += ("django_name",) + +#============================================================================= +# DES +#============================================================================= +class django_des_crypt(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): + """This class implements Django's :class:`des_crypt` wrapper, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :param bool truncate_error: + By default, django_des_crypt will silently truncate passwords larger than 8 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + This should be compatible with the hashes generated by + Django 1.4's :class:`!CryptPasswordHasher` class. + Note that Django only supports this hash on Unix systems + (though :class:`!django_des_crypt` is available cross-platform + under Passlib). + + .. versionchanged:: 1.6 + This class will now accept hashes with empty salt strings, + since Django 1.4 generates them this way. + """ + name = "django_des_crypt" + django_name = "crypt" + setting_kwds = ("salt", "salt_size", "truncate_error") + ident = u("crypt$") + checksum_chars = salt_chars = uh.HASH64_CHARS + checksum_size = 11 + min_salt_size = default_salt_size = 2 + truncate_size = 8 + + # NOTE: regarding duplicate salt field: + # + # django 1.0 had a "crypt$$" hash format, + # used [a-z0-9] to generate a 5 char salt, stored it in salt1, + # duplicated the first two chars of salt1 as salt2. + # it would throw an error if salt1 was empty. + # + # django 1.4 started generating 2 char salt using the full alphabet, + # left salt1 empty, and only paid attention to salt2. + # + # in order to be compatible with django 1.0, the hashes generated + # by this function will always include salt1, unless the following + # class-level field is disabled (mainly used for testing) + use_duplicate_salt = True + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + if chk: + # chk should be full des_crypt hash + if not salt: + # django 1.4 always uses empty salt field, + # so extract salt from des_crypt hash + salt = chk[:2] + elif salt[:2] != chk[:2]: + # django 1.0 stored 5 chars in salt field, and duplicated + # the first two chars in . we keep the full salt, + # but make sure the first two chars match as sanity check. + raise uh.exc.MalformedHashError(cls, + "first two digits of salt and checksum must match") + # in all cases, strip salt chars from + chk = chk[2:] + return cls(salt=salt, checksum=chk) + + def to_string(self): + salt = self.salt + chk = salt[:2] + self.checksum + if self.use_duplicate_salt: + # filling in salt field, so that we're compatible with django 1.0 + return uh.render_mc2(self.ident, salt, chk) + else: + # django 1.4+ style hash + return uh.render_mc2(self.ident, "", chk) + + def _calc_checksum(self, secret): + # NOTE: we lazily import des_crypt, + # since most django deploys won't use django_des_crypt + global des_crypt + if des_crypt is None: + _import_des_crypt() + # check for truncation (during .hash() calls only) + if self.use_defaults: + self._check_truncate_policy(secret) + return des_crypt(salt=self.salt[:2])._calc_checksum(secret) + +class django_disabled(uh.ifc.DisabledHash, uh.StaticHandler): + """This class provides disabled password behavior for Django, and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead + claims the special hash string ``"!"`` which Django uses + to indicate an account's password has been disabled. + + * newly encrypted passwords will hash to ``"!"``. + * it rejects all passwords. + + .. note:: + + Django 1.6 prepends a randomly generated 40-char alphanumeric string + to each unusuable password. This class recognizes such strings, + but for backwards compatibility, still returns ``"!"``. + + See ``_ for why + Django appends an alphanumeric string. + + .. versionchanged:: 1.6.2 added Django 1.6 support + + .. versionchanged:: 1.7 started appending an alphanumeric string. + """ + name = "django_disabled" + _hash_prefix = u("!") + suffix_length = 40 + + # XXX: move this to StaticHandler, or wherever _hash_prefix is being used? + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + return hash.startswith(cls._hash_prefix) + + def _calc_checksum(self, secret): + # generate random suffix to match django's behavior + return getrandstr(rng, BASE64_CHARS[:-2], self.suffix_length) + + @classmethod + def verify(cls, secret, hash): + uh.validate_secret(secret) + if not cls.identify(hash): + raise uh.exc.InvalidHashError(cls) + return False + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/fshp.py b/venv/lib/python3.12/site-packages/passlib/handlers/fshp.py new file mode 100644 index 0000000..db13e74 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/fshp.py @@ -0,0 +1,214 @@ +"""passlib.handlers.fshp +""" + +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode, b64decode +import re +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import to_unicode +import passlib.utils.handlers as uh +from passlib.utils.compat import bascii_to_str, iteritems, u,\ + unicode +from passlib.crypto.digest import pbkdf1 +# local +__all__ = [ + 'fshp', +] +#============================================================================= +# sha1-crypt +#============================================================================= +class fshp(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the FSHP password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :param salt: + Optional raw salt string. + If not specified, one will be autogenerated (this is recommended). + + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any non-negative value. + + :param rounds: + Optional number of rounds to use. + Defaults to 480000, must be between 1 and 4294967295, inclusive. + + :param variant: + Optionally specifies variant of FSHP to use. + + * ``0`` - uses SHA-1 digest (deprecated). + * ``1`` - uses SHA-2/256 digest (default). + * ``2`` - uses SHA-2/384 digest. + * ``3`` - uses SHA-2/512 digest. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "fshp" + setting_kwds = ("salt", "salt_size", "rounds", "variant") + checksum_chars = uh.PADDED_BASE64_CHARS + ident = u("{FSHP") + # checksum_size is property() that depends on variant + + #--HasRawSalt-- + default_salt_size = 16 # current passlib default, FSHP uses 8 + max_salt_size = None + + #--HasRounds-- + # FIXME: should probably use different default rounds + # based on the variant. setting for default variant (sha256) for now. + default_rounds = 480000 # current passlib default, FSHP uses 4096 + min_rounds = 1 # set by FSHP + max_rounds = 4294967295 # 32-bit integer limit - not set by FSHP + rounds_cost = "linear" + + #--variants-- + default_variant = 1 + _variant_info = { + # variant: (hash name, digest size) + 0: ("sha1", 20), + 1: ("sha256", 32), + 2: ("sha384", 48), + 3: ("sha512", 64), + } + _variant_aliases = dict( + [(unicode(k),k) for k in _variant_info] + + [(v[0],k) for k,v in iteritems(_variant_info)] + ) + + #=================================================================== + # configuration + #=================================================================== + @classmethod + def using(cls, variant=None, **kwds): + subcls = super(fshp, cls).using(**kwds) + if variant is not None: + subcls.default_variant = cls._norm_variant(variant) + return subcls + + #=================================================================== + # instance attrs + #=================================================================== + variant = None + + #=================================================================== + # init + #=================================================================== + def __init__(self, variant=None, **kwds): + # NOTE: variant must be set first, since it controls checksum size, etc. + self.use_defaults = kwds.get("use_defaults") # load this early + if variant is not None: + variant = self._norm_variant(variant) + elif self.use_defaults: + variant = self.default_variant + assert self._norm_variant(variant) == variant, "invalid default variant: %r" % (variant,) + else: + raise TypeError("no variant specified") + self.variant = variant + super(fshp, self).__init__(**kwds) + + @classmethod + def _norm_variant(cls, variant): + if isinstance(variant, bytes): + variant = variant.decode("ascii") + if isinstance(variant, unicode): + try: + variant = cls._variant_aliases[variant] + except KeyError: + raise ValueError("invalid fshp variant") + if not isinstance(variant, int): + raise TypeError("fshp variant must be int or known alias") + if variant not in cls._variant_info: + raise ValueError("invalid fshp variant") + return variant + + @property + def checksum_alg(self): + return self._variant_info[self.variant][0] + + @property + def checksum_size(self): + return self._variant_info[self.variant][1] + + #=================================================================== + # formatting + #=================================================================== + + _hash_regex = re.compile(u(r""" + ^ + \{FSHP + (\d+)\| # variant + (\d+)\| # salt size + (\d+)\} # rounds + ([a-zA-Z0-9+/]+={0,3}) # digest + $"""), re.X) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + variant, salt_size, rounds, data = m.group(1,2,3,4) + variant = int(variant) + salt_size = int(salt_size) + rounds = int(rounds) + try: + data = b64decode(data.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + salt = data[:salt_size] + chk = data[salt_size:] + return cls(salt=salt, checksum=chk, rounds=rounds, variant=variant) + + def to_string(self): + chk = self.checksum + salt = self.salt + data = bascii_to_str(b64encode(salt+chk)) + return "{FSHP%d|%d|%d}%s" % (self.variant, len(salt), self.rounds, data) + + #=================================================================== + # backend + #=================================================================== + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + # NOTE: for some reason, FSHP uses pbkdf1 with password & salt reversed. + # this has only a minimal impact on security, + # but it is worth noting this deviation. + return pbkdf1( + digest=self.checksum_alg, + secret=self.salt, + salt=secret, + rounds=self.rounds, + keylen=self.checksum_size, + ) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/ldap_digests.py b/venv/lib/python3.12/site-packages/passlib/handlers/ldap_digests.py new file mode 100644 index 0000000..30254f0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/ldap_digests.py @@ -0,0 +1,359 @@ +"""passlib.handlers.digests - plain hash digests +""" +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode, b64decode +from hashlib import md5, sha1, sha256, sha512 +import logging; log = logging.getLogger(__name__) +import re +# site +# pkg +from passlib.handlers.misc import plaintext +from passlib.utils import unix_crypt_schemes, to_unicode +from passlib.utils.compat import uascii_to_str, unicode, u +from passlib.utils.decor import classproperty +import passlib.utils.handlers as uh +# local +__all__ = [ + "ldap_plaintext", + "ldap_md5", + "ldap_sha1", + "ldap_salted_md5", + "ldap_salted_sha1", + "ldap_salted_sha256", + "ldap_salted_sha512", + + ##"get_active_ldap_crypt_schemes", + "ldap_des_crypt", + "ldap_bsdi_crypt", + "ldap_md5_crypt", + "ldap_sha1_crypt", + "ldap_bcrypt", + "ldap_sha256_crypt", + "ldap_sha512_crypt", +] + +#============================================================================= +# ldap helpers +#============================================================================= +class _Base64DigestHelper(uh.StaticHandler): + """helper for ldap_md5 / ldap_sha1""" + # XXX: could combine this with hex digests in digests.py + + ident = None # required - prefix identifier + _hash_func = None # required - hash function + _hash_regex = None # required - regexp to recognize hash + checksum_chars = uh.PADDED_BASE64_CHARS + + @classproperty + def _hash_prefix(cls): + """tell StaticHandler to strip ident from checksum""" + return cls.ident + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = self._hash_func(secret).digest() + return b64encode(chk).decode("ascii") + +class _SaltedBase64DigestHelper(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """helper for ldap_salted_md5 / ldap_salted_sha1""" + setting_kwds = ("salt", "salt_size") + checksum_chars = uh.PADDED_BASE64_CHARS + + ident = None # required - prefix identifier + _hash_func = None # required - hash function + _hash_regex = None # required - regexp to recognize hash + min_salt_size = max_salt_size = 4 + + # NOTE: openldap implementation uses 4 byte salt, + # but it's been reported (issue 30) that some servers use larger salts. + # the semi-related rfc3112 recommends support for up to 16 byte salts. + min_salt_size = 4 + default_salt_size = 4 + max_salt_size = 16 + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + try: + data = b64decode(m.group("tmp").encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + cs = cls.checksum_size + assert cs + return cls(checksum=data[:cs], salt=data[cs:]) + + def to_string(self): + data = self.checksum + self.salt + hash = self.ident + b64encode(data).decode("ascii") + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return self._hash_func(secret + self.salt).digest() + +#============================================================================= +# implementations +#============================================================================= +class ldap_md5(_Base64DigestHelper): + """This class stores passwords using LDAP's plain MD5 format, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods have no optional keywords. + """ + name = "ldap_md5" + ident = u("{MD5}") + _hash_func = md5 + _hash_regex = re.compile(u(r"^\{MD5\}(?P[+/a-zA-Z0-9]{22}==)$")) + +class ldap_sha1(_Base64DigestHelper): + """This class stores passwords using LDAP's plain SHA1 format, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods have no optional keywords. + """ + name = "ldap_sha1" + ident = u("{SHA}") + _hash_func = sha1 + _hash_regex = re.compile(u(r"^\{SHA\}(?P[+/a-zA-Z0-9]{27}=)$")) + +class ldap_salted_md5(_SaltedBase64DigestHelper): + """This class stores passwords using LDAP's salted MD5 format, and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 4 bytes for compatibility with the LDAP spec, + but some systems use larger salts, and Passlib supports + any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This format now supports variable length salts, instead of a fix 4 bytes. + """ + name = "ldap_salted_md5" + ident = u("{SMD5}") + checksum_size = 16 + _hash_func = md5 + _hash_regex = re.compile(u(r"^\{SMD5\}(?P[+/a-zA-Z0-9]{27,}={0,2})$")) + +class ldap_salted_sha1(_SaltedBase64DigestHelper): + """ + This class stores passwords using LDAP's "Salted SHA1" format, + and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 4 bytes for compatibility with the LDAP spec, + but some systems use larger salts, and Passlib supports + any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This format now supports variable length salts, instead of a fix 4 bytes. + """ + name = "ldap_salted_sha1" + ident = u("{SSHA}") + checksum_size = 20 + _hash_func = sha1 + # NOTE: 32 = ceil((20 + 4) * 4/3) + _hash_regex = re.compile(u(r"^\{SSHA\}(?P[+/a-zA-Z0-9]{32,}={0,2})$")) + + + +class ldap_salted_sha256(_SaltedBase64DigestHelper): + """ + This class stores passwords using LDAP's "Salted SHA2-256" format, + and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 8 bytes for compatibility with the LDAP spec, + but Passlib supports any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.7.3 + """ + name = "ldap_salted_sha256" + ident = u("{SSHA256}") + checksum_size = 32 + default_salt_size = 8 + _hash_func = sha256 + # NOTE: 48 = ceil((32 + 4) * 4/3) + _hash_regex = re.compile(u(r"^\{SSHA256\}(?P[+/a-zA-Z0-9]{48,}={0,2})$")) + + +class ldap_salted_sha512(_SaltedBase64DigestHelper): + """ + This class stores passwords using LDAP's "Salted SHA2-512" format, + and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 8 bytes for compatibility with the LDAP spec, + but Passlib supports any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.7.3 + """ + name = "ldap_salted_sha512" + ident = u("{SSHA512}") + checksum_size = 64 + default_salt_size = 8 + _hash_func = sha512 + # NOTE: 91 = ceil((64 + 4) * 4/3) + _hash_regex = re.compile(u(r"^\{SSHA512\}(?P[+/a-zA-Z0-9]{91,}={0,2})$")) + + +class ldap_plaintext(plaintext): + """This class stores passwords in plaintext, and follows the :ref:`password-hash-api`. + + This class acts much like the generic :class:`!passlib.hash.plaintext` handler, + except that it will identify a hash only if it does NOT begin with the ``{XXX}`` identifier prefix + used by RFC2307 passwords. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keyword: + + :type encoding: str + :param encoding: + This controls the character encoding to use (defaults to ``utf-8``). + + This encoding will be used to encode :class:`!unicode` passwords + under Python 2, and decode :class:`!bytes` hashes under Python 3. + + .. versionchanged:: 1.6 + The ``encoding`` keyword was added. + """ + # NOTE: this subclasses plaintext, since all it does differently + # is override identify() + + name = "ldap_plaintext" + _2307_pat = re.compile(u(r"^\{\w+\}.*$")) + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genconfig(cls): + # Overridding plaintext.genconfig() since it returns "", + # but have to return non-empty value due to identify() below + return "!" + + @classmethod + def identify(cls, hash): + # NOTE: identifies all strings EXCEPT those with {XXX} prefix + hash = uh.to_unicode_for_identify(hash) + return bool(hash) and cls._2307_pat.match(hash) is None + +#============================================================================= +# {CRYPT} wrappers +# the following are wrappers around the base crypt algorithms, +# which add the ldap required {CRYPT} prefix +#============================================================================= +ldap_crypt_schemes = [ 'ldap_' + name for name in unix_crypt_schemes ] + +def _init_ldap_crypt_handlers(): + # NOTE: I don't like to implicitly modify globals() like this, + # but don't want to write out all these handlers out either :) + g = globals() + for wname in unix_crypt_schemes: + name = 'ldap_' + wname + g[name] = uh.PrefixWrapper(name, wname, prefix=u("{CRYPT}"), lazy=True) + del g +_init_ldap_crypt_handlers() + +##_lcn_host = None +##def get_host_ldap_crypt_schemes(): +## global _lcn_host +## if _lcn_host is None: +## from passlib.hosts import host_context +## schemes = host_context.schemes() +## _lcn_host = [ +## "ldap_" + name +## for name in unix_crypt_names +## if name in schemes +## ] +## return _lcn_host + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/md5_crypt.py b/venv/lib/python3.12/site-packages/passlib/handlers/md5_crypt.py new file mode 100644 index 0000000..e3a2dfa --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/md5_crypt.py @@ -0,0 +1,346 @@ +"""passlib.handlers.md5_crypt - md5-crypt algorithm""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import safe_crypt, test_crypt, repeat_string +from passlib.utils.binary import h64 +from passlib.utils.compat import unicode, u +import passlib.utils.handlers as uh +# local +__all__ = [ + "md5_crypt", + "apr_md5_crypt", +] + +#============================================================================= +# pure-python backend +#============================================================================= +_BNULL = b"\x00" +_MD5_MAGIC = b"$1$" +_APR_MAGIC = b"$apr1$" + +# pre-calculated offsets used to speed up C digest stage (see notes below). +# sequence generated using the following: + ##perms_order = "p,pp,ps,psp,sp,spp".split(",") + ##def offset(i): + ## key = (("p" if i % 2 else "") + ("s" if i % 3 else "") + + ## ("p" if i % 7 else "") + ("" if i % 2 else "p")) + ## return perms_order.index(key) + ##_c_digest_offsets = [(offset(i), offset(i+1)) for i in range(0,42,2)] +_c_digest_offsets = ( + (0, 3), (5, 1), (5, 3), (1, 2), (5, 1), (5, 3), (1, 3), + (4, 1), (5, 3), (1, 3), (5, 0), (5, 3), (1, 3), (5, 1), + (4, 3), (1, 3), (5, 1), (5, 2), (1, 3), (5, 1), (5, 3), + ) + +# map used to transpose bytes when encoding final digest +_transpose_map = (12, 6, 0, 13, 7, 1, 14, 8, 2, 15, 9, 3, 5, 10, 4, 11) + +def _raw_md5_crypt(pwd, salt, use_apr=False): + """perform raw md5-crypt calculation + + this function provides a pure-python implementation of the internals + for the MD5-Crypt algorithms; it doesn't handle any of the + parsing/validation of the hash strings themselves. + + :arg pwd: password chars/bytes to hash + :arg salt: salt chars to use + :arg use_apr: use apache variant + + :returns: + encoded checksum chars + """ + # NOTE: regarding 'apr' format: + # really, apache? you had to invent a whole new "$apr1$" format, + # when all you did was change the ident incorporated into the hash? + # would love to find webpage explaining why just using a portable + # implementation of $1$ wasn't sufficient. *nothing else* was changed. + + #=================================================================== + # init & validate inputs + #=================================================================== + + # validate secret + # XXX: not sure what official unicode policy is, using this as default + if isinstance(pwd, unicode): + pwd = pwd.encode("utf-8") + assert isinstance(pwd, bytes), "pwd not unicode or bytes" + if _BNULL in pwd: + raise uh.exc.NullPasswordError(md5_crypt) + pwd_len = len(pwd) + + # validate salt - should have been taken care of by caller + assert isinstance(salt, unicode), "salt not unicode" + salt = salt.encode("ascii") + assert len(salt) < 9, "salt too large" + # NOTE: spec says salts larger than 8 bytes should be truncated, + # instead of causing an error. this function assumes that's been + # taken care of by the handler class. + + # load APR specific constants + if use_apr: + magic = _APR_MAGIC + else: + magic = _MD5_MAGIC + + #=================================================================== + # digest B - used as subinput to digest A + #=================================================================== + db = md5(pwd + salt + pwd).digest() + + #=================================================================== + # digest A - used to initialize first round of digest C + #=================================================================== + # start out with pwd + magic + salt + a_ctx = md5(pwd + magic + salt) + a_ctx_update = a_ctx.update + + # add pwd_len bytes of b, repeating b as many times as needed. + a_ctx_update(repeat_string(db, pwd_len)) + + # add null chars & first char of password + # NOTE: this may have historically been a bug, + # where they meant to use db[0] instead of B_NULL, + # but the original code memclear'ed db, + # and now all implementations have to use this. + i = pwd_len + evenchar = pwd[:1] + while i: + a_ctx_update(_BNULL if i & 1 else evenchar) + i >>= 1 + + # finish A + da = a_ctx.digest() + + #=================================================================== + # digest C - for a 1000 rounds, combine A, S, and P + # digests in various ways; in order to burn CPU time. + #=================================================================== + + # NOTE: the original MD5-Crypt implementation performs the C digest + # calculation using the following loop: + # + ##dc = da + ##i = 0 + ##while i < rounds: + ## tmp_ctx = md5(pwd if i & 1 else dc) + ## if i % 3: + ## tmp_ctx.update(salt) + ## if i % 7: + ## tmp_ctx.update(pwd) + ## tmp_ctx.update(dc if i & 1 else pwd) + ## dc = tmp_ctx.digest() + ## i += 1 + # + # The code Passlib uses (below) implements an equivalent algorithm, + # it's just been heavily optimized to pre-calculate a large number + # of things beforehand. It works off of a couple of observations + # about the original algorithm: + # + # 1. each round is a combination of 'dc', 'salt', and 'pwd'; and the exact + # combination is determined by whether 'i' a multiple of 2,3, and/or 7. + # 2. since lcm(2,3,7)==42, the series of combinations will repeat + # every 42 rounds. + # 3. even rounds 0-40 consist of 'hash(dc + round-specific-constant)'; + # while odd rounds 1-41 consist of hash(round-specific-constant + dc) + # + # Using these observations, the following code... + # * calculates the round-specific combination of salt & pwd for each round 0-41 + # * runs through as many 42-round blocks as possible (23) + # * runs through as many pairs of rounds as needed for remaining rounds (17) + # * this results in the required 42*23+2*17=1000 rounds required by md5_crypt. + # + # this cuts out a lot of the control overhead incurred when running the + # original loop 1000 times in python, resulting in ~20% increase in + # speed under CPython (though still 2x slower than glibc crypt) + + # prepare the 6 combinations of pwd & salt which are needed + # (order of 'perms' must match how _c_digest_offsets was generated) + pwd_pwd = pwd+pwd + pwd_salt = pwd+salt + perms = [pwd, pwd_pwd, pwd_salt, pwd_salt+pwd, salt+pwd, salt+pwd_pwd] + + # build up list of even-round & odd-round constants, + # and store in 21-element list as (even,odd) pairs. + data = [ (perms[even], perms[odd]) for even, odd in _c_digest_offsets] + + # perform 23 blocks of 42 rounds each (for a total of 966 rounds) + dc = da + blocks = 23 + while blocks: + for even, odd in data: + dc = md5(odd + md5(dc + even).digest()).digest() + blocks -= 1 + + # perform 17 more pairs of rounds (34 more rounds, for a total of 1000) + for even, odd in data[:17]: + dc = md5(odd + md5(dc + even).digest()).digest() + + #=================================================================== + # encode digest using appropriate transpose map + #=================================================================== + return h64.encode_transposed_bytes(dc, _transpose_map).decode("ascii") + +#============================================================================= +# handler +#============================================================================= +class _MD5_Common(uh.HasSalt, uh.GenericHandler): + """common code for md5_crypt and apr_md5_crypt""" + #=================================================================== + # class attrs + #=================================================================== + # name - set in subclass + setting_kwds = ("salt", "salt_size") + # ident - set in subclass + checksum_size = 22 + checksum_chars = uh.HASH64_CHARS + + max_salt_size = 8 + salt_chars = uh.HASH64_CHARS + + #=================================================================== + # methods + #=================================================================== + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + return cls(salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc2(self.ident, self.salt, self.checksum) + + # _calc_checksum() - provided by subclass + + #=================================================================== + # eoc + #=================================================================== + +class md5_crypt(uh.HasManyBackends, _MD5_Common): + """This class implements the MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 8, but can be any value between 0 and 8. + (This is mainly needed when generating Cisco-compatible hashes, + which require ``salt_size=4``). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "md5_crypt" + ident = u("$1$") + + #=================================================================== + # methods + #=================================================================== + # FIXME: can't find definitive policy on how md5-crypt handles non-ascii. + # all backends currently coerce -> utf-8 + + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt("test", '$1$test$pi/xDtU5WFVRqYS6BMU8X/'): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + config = self.ident + self.salt + hash = safe_crypt(secret, config) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + if not hash.startswith(config) or len(hash) != len(config) + 23: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-22:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + return _raw_md5_crypt(secret, self.salt) + + #=================================================================== + # eoc + #=================================================================== + +class apr_md5_crypt(_MD5_Common): + """This class implements the Apr-MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "apr_md5_crypt" + ident = u("$apr1$") + + #=================================================================== + # methods + #=================================================================== + def _calc_checksum(self, secret): + return _raw_md5_crypt(secret, self.salt, use_apr=True) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/misc.py b/venv/lib/python3.12/site-packages/passlib/handlers/misc.py new file mode 100644 index 0000000..44abc34 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/misc.py @@ -0,0 +1,269 @@ +"""passlib.handlers.misc - misc generic handlers +""" +#============================================================================= +# imports +#============================================================================= +# core +import sys +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_native_str, str_consteq +from passlib.utils.compat import unicode, u, unicode_or_bytes_types +import passlib.utils.handlers as uh +# local +__all__ = [ + "unix_disabled", + "unix_fallback", + "plaintext", +] + +#============================================================================= +# handler +#============================================================================= +class unix_fallback(uh.ifc.DisabledHash, uh.StaticHandler): + """This class provides the fallback behavior for unix shadow files, and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead provides fallback + behavior as found in /etc/shadow on most unix variants. + If used, should be the last scheme in the context. + + * this class will positively identify all hash strings. + * for security, passwords will always hash to ``!``. + * it rejects all passwords if the hash is NOT an empty string (``!`` or ``*`` are frequently used). + * by default it rejects all passwords if the hash is an empty string, + but if ``enable_wildcard=True`` is passed to verify(), + all passwords will be allowed through if the hash is an empty string. + + .. deprecated:: 1.6 + This has been deprecated due to its "wildcard" feature, + and will be removed in Passlib 1.8. Use :class:`unix_disabled` instead. + """ + name = "unix_fallback" + context_kwds = ("enable_wildcard",) + + @classmethod + def identify(cls, hash): + if isinstance(hash, unicode_or_bytes_types): + return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + + def __init__(self, enable_wildcard=False, **kwds): + warn("'unix_fallback' is deprecated, " + "and will be removed in Passlib 1.8; " + "please use 'unix_disabled' instead.", + DeprecationWarning) + super(unix_fallback, self).__init__(**kwds) + self.enable_wildcard = enable_wildcard + + def _calc_checksum(self, secret): + if self.checksum: + # NOTE: hash will generally be "!", but we want to preserve + # it in case it's something else, like "*". + return self.checksum + else: + return u("!") + + @classmethod + def verify(cls, secret, hash, enable_wildcard=False): + uh.validate_secret(secret) + if not isinstance(hash, unicode_or_bytes_types): + raise uh.exc.ExpectedStringError(hash, "hash") + elif hash: + return False + else: + return enable_wildcard + +_MARKER_CHARS = u("*!") +_MARKER_BYTES = b"*!" + +class unix_disabled(uh.ifc.DisabledHash, uh.MinimalHandler): + """This class provides disabled password behavior for unix shadow files, + and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead matches the "disabled account" + strings found in ``/etc/shadow`` on most Unix variants. "encrypting" a password + will simply return the disabled account marker. It will reject all passwords, + no matter the hash string. The :meth:`~passlib.ifc.PasswordHash.hash` + method supports one optional keyword: + + :type marker: str + :param marker: + Optional marker string which overrides the platform default + used to indicate a disabled account. + + If not specified, this will default to ``"*"`` on BSD systems, + and use the Linux default ``"!"`` for all other platforms. + (:attr:`!unix_disabled.default_marker` will contain the default value) + + .. versionadded:: 1.6 + This class was added as a replacement for the now-deprecated + :class:`unix_fallback` class, which had some undesirable features. + """ + name = "unix_disabled" + setting_kwds = ("marker",) + context_kwds = () + + _disable_prefixes = tuple(str(_MARKER_CHARS)) + + # TODO: rename attr to 'marker'... + if 'bsd' in sys.platform: # pragma: no cover -- runtime detection + default_marker = u("*") + else: + # use the linux default for other systems + # (glibc also supports adding old hash after the marker + # so it can be restored later). + default_marker = u("!") + + @classmethod + def using(cls, marker=None, **kwds): + subcls = super(unix_disabled, cls).using(**kwds) + if marker is not None: + if not cls.identify(marker): + raise ValueError("invalid marker: %r" % marker) + subcls.default_marker = marker + return subcls + + @classmethod + def identify(cls, hash): + # NOTE: technically, anything in the /etc/shadow password field + # which isn't valid crypt() output counts as "disabled". + # but that's rather ambiguous, and it's hard to predict what + # valid output is for unknown crypt() implementations. + # so to be on the safe side, we only match things *known* + # to be disabled field indicators, and will add others + # as they are found. things beginning w/ "$" should *never* match. + # + # things currently matched: + # * linux uses "!" + # * bsd uses "*" + # * linux may use "!" + hash to disable but preserve original hash + # * linux counts empty string as "any password"; + # this code recognizes it, but treats it the same as "!" + if isinstance(hash, unicode): + start = _MARKER_CHARS + elif isinstance(hash, bytes): + start = _MARKER_BYTES + else: + raise uh.exc.ExpectedStringError(hash, "hash") + return not hash or hash[0] in start + + @classmethod + def verify(cls, secret, hash): + uh.validate_secret(secret) + if not cls.identify(hash): # handles typecheck + raise uh.exc.InvalidHashError(cls) + return False + + @classmethod + def hash(cls, secret, **kwds): + if kwds: + uh.warn_hash_settings_deprecation(cls, kwds) + return cls.using(**kwds).hash(secret) + uh.validate_secret(secret) + marker = cls.default_marker + assert marker and cls.identify(marker) + return to_native_str(marker, param="marker") + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config, marker=None): + if not cls.identify(config): + raise uh.exc.InvalidHashError(cls) + elif config: + # preserve the existing str,since it might contain a disabled password hash ("!" + hash) + uh.validate_secret(secret) + return to_native_str(config, param="config") + else: + if marker is not None: + cls = cls.using(marker=marker) + return cls.hash(secret) + + @classmethod + def disable(cls, hash=None): + out = cls.hash("") + if hash is not None: + hash = to_native_str(hash, param="hash") + if cls.identify(hash): + # extract original hash, so that we normalize marker + hash = cls.enable(hash) + if hash: + out += hash + return out + + @classmethod + def enable(cls, hash): + hash = to_native_str(hash, param="hash") + for prefix in cls._disable_prefixes: + if hash.startswith(prefix): + orig = hash[len(prefix):] + if orig: + return orig + else: + raise ValueError("cannot restore original hash") + raise uh.exc.InvalidHashError(cls) + +class plaintext(uh.MinimalHandler): + """This class stores passwords in plaintext, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keyword: + + :type encoding: str + :param encoding: + This controls the character encoding to use (defaults to ``utf-8``). + + This encoding will be used to encode :class:`!unicode` passwords + under Python 2, and decode :class:`!bytes` hashes under Python 3. + + .. versionchanged:: 1.6 + The ``encoding`` keyword was added. + """ + # NOTE: this is subclassed by ldap_plaintext + + name = "plaintext" + setting_kwds = () + context_kwds = ("encoding",) + default_encoding = "utf-8" + + @classmethod + def identify(cls, hash): + if isinstance(hash, unicode_or_bytes_types): + return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + + @classmethod + def hash(cls, secret, encoding=None): + uh.validate_secret(secret) + if not encoding: + encoding = cls.default_encoding + return to_native_str(secret, encoding, "secret") + + @classmethod + def verify(cls, secret, hash, encoding=None): + if not encoding: + encoding = cls.default_encoding + hash = to_native_str(hash, encoding, "hash") + if not cls.identify(hash): + raise uh.exc.InvalidHashError(cls) + return str_consteq(cls.hash(secret, encoding), hash) + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genconfig(cls): + return cls.hash("") + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config, encoding=None): + # NOTE: 'config' is ignored, as this hash has no salting / etc + if not cls.identify(config): + raise uh.exc.InvalidHashError(cls) + return cls.hash(secret, encoding=encoding) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/mssql.py b/venv/lib/python3.12/site-packages/passlib/handlers/mssql.py new file mode 100644 index 0000000..b060b36 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/mssql.py @@ -0,0 +1,244 @@ +"""passlib.handlers.mssql - MS-SQL Password Hash + +Notes +===== +MS-SQL has used a number of hash algs over the years, +most of which were exposed through the undocumented +'pwdencrypt' and 'pwdcompare' sql functions. + +Known formats +------------- +6.5 + snefru hash, ascii encoded password + no examples found + +7.0 + snefru hash, unicode (what encoding?) + saw ref that these blobs were 16 bytes in size + no examples found + +2000 + byte string using displayed as 0x hex, using 0x0100 prefix. + contains hashes of password and upper-case password. + +2007 + same as 2000, but without the upper-case hash. + +refs +---------- +https://blogs.msdn.com/b/lcris/archive/2007/04/30/sql-server-2005-about-login-password-hashes.aspx?Redirected=true +http://us.generation-nt.com/securing-passwords-hash-help-35429432.html +http://forum.md5decrypter.co.uk/topic230-mysql-and-mssql-get-password-hashes.aspx +http://www.theregister.co.uk/2002/07/08/cracking_ms_sql_server_passwords/ +""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import consteq +from passlib.utils.compat import bascii_to_str, unicode, u +import passlib.utils.handlers as uh +# local +__all__ = [ + "mssql2000", + "mssql2005", +] + +#============================================================================= +# mssql 2000 +#============================================================================= +def _raw_mssql(secret, salt): + assert isinstance(secret, unicode) + assert isinstance(salt, bytes) + return sha1(secret.encode("utf-16-le") + salt).digest() + +BIDENT = b"0x0100" +##BIDENT2 = b("\x01\x00") +UIDENT = u("0x0100") + +def _ident_mssql(hash, csize, bsize): + """common identify for mssql 2000/2005""" + if isinstance(hash, unicode): + if len(hash) == csize and hash.startswith(UIDENT): + return True + elif isinstance(hash, bytes): + if len(hash) == csize and hash.startswith(BIDENT): + return True + ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes + ## return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + return False + +def _parse_mssql(hash, csize, bsize, handler): + """common parser for mssql 2000/2005; returns 4 byte salt + checksum""" + if isinstance(hash, unicode): + if len(hash) == csize and hash.startswith(UIDENT): + try: + return unhexlify(hash[6:].encode("utf-8")) + except TypeError: # throw when bad char found + pass + elif isinstance(hash, bytes): + # assumes ascii-compat encoding + assert isinstance(hash, bytes) + if len(hash) == csize and hash.startswith(BIDENT): + try: + return unhexlify(hash[6:]) + except TypeError: # throw when bad char found + pass + ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes + ## return hash[2:] + else: + raise uh.exc.ExpectedStringError(hash, "hash") + raise uh.exc.InvalidHashError(handler) + +class mssql2000(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the password hash used by MS-SQL 2000, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 bytes in length. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "mssql2000" + setting_kwds = ("salt",) + checksum_size = 40 + min_salt_size = max_salt_size = 4 + + #=================================================================== + # formatting + #=================================================================== + + # 0100 - 2 byte identifier + # 4 byte salt + # 20 byte checksum + # 20 byte checksum + # = 46 bytes + # encoded '0x' + 92 chars = 94 + + @classmethod + def identify(cls, hash): + return _ident_mssql(hash, 94, 46) + + @classmethod + def from_string(cls, hash): + data = _parse_mssql(hash, 94, 46, cls) + return cls(salt=data[:4], checksum=data[4:]) + + def to_string(self): + raw = self.salt + self.checksum + # raw bytes format - BIDENT2 + raw + return "0x0100" + bascii_to_str(hexlify(raw).upper()) + + def _calc_checksum(self, secret): + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + salt = self.salt + return _raw_mssql(secret, salt) + _raw_mssql(secret.upper(), salt) + + @classmethod + def verify(cls, secret, hash): + # NOTE: we only compare against the upper-case hash + # XXX: add 'full' just to verify both checksums? + uh.validate_secret(secret) + self = cls.from_string(hash) + chk = self.checksum + if chk is None: + raise uh.exc.MissingDigestError(cls) + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + result = _raw_mssql(secret.upper(), self.salt) + return consteq(result, chk[20:]) + +#============================================================================= +# handler +#============================================================================= +class mssql2005(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the password hash used by MS-SQL 2005, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 bytes in length. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "mssql2005" + setting_kwds = ("salt",) + + checksum_size = 20 + min_salt_size = max_salt_size = 4 + + #=================================================================== + # formatting + #=================================================================== + + # 0x0100 - 2 byte identifier + # 4 byte salt + # 20 byte checksum + # = 26 bytes + # encoded '0x' + 52 chars = 54 + + @classmethod + def identify(cls, hash): + return _ident_mssql(hash, 54, 26) + + @classmethod + def from_string(cls, hash): + data = _parse_mssql(hash, 54, 26, cls) + return cls(salt=data[:4], checksum=data[4:]) + + def to_string(self): + raw = self.salt + self.checksum + # raw bytes format - BIDENT2 + raw + return "0x0100" + bascii_to_str(hexlify(raw)).upper() + + def _calc_checksum(self, secret): + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + return _raw_mssql(secret, self.salt) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/mysql.py b/venv/lib/python3.12/site-packages/passlib/handlers/mysql.py new file mode 100644 index 0000000..4a71253 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/mysql.py @@ -0,0 +1,128 @@ +"""passlib.handlers.mysql + +MySQL 3.2.3 / OLD_PASSWORD() + + This implements Mysql's OLD_PASSWORD algorithm, introduced in version 3.2.3, deprecated in version 4.1. + + See :mod:`passlib.handlers.mysql_41` for the new algorithm was put in place in version 4.1 + + This algorithm is known to be very insecure, and should only be used to verify existing password hashes. + + http://djangosnippets.org/snippets/1508/ + +MySQL 4.1.1 / NEW PASSWORD + This implements Mysql new PASSWORD algorithm, introduced in version 4.1. + + This function is unsalted, and therefore not very secure against rainbow attacks. + It should only be used when dealing with mysql passwords, + for all other purposes, you should use a salted hash function. + + Description taken from http://dev.mysql.com/doc/refman/6.0/en/password-hashing.html +""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_native_str +from passlib.utils.compat import bascii_to_str, unicode, u, \ + byte_elem_value, str_to_uascii +import passlib.utils.handlers as uh +# local +__all__ = [ + 'mysql323', + 'mysq41', +] + +#============================================================================= +# backend +#============================================================================= +class mysql323(uh.StaticHandler): + """This class implements the MySQL 3.2.3 password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "mysql323" + checksum_size = 16 + checksum_chars = uh.HEX_CHARS + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + # FIXME: no idea if mysql has a policy about handling unicode passwords + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + MASK_32 = 0xffffffff + MASK_31 = 0x7fffffff + WHITE = b' \t' + + nr1 = 0x50305735 + nr2 = 0x12345671 + add = 7 + for c in secret: + if c in WHITE: + continue + tmp = byte_elem_value(c) + nr1 ^= ((((nr1 & 63)+add)*tmp) + (nr1 << 8)) & MASK_32 + nr2 = (nr2+((nr2 << 8) ^ nr1)) & MASK_32 + add = (add+tmp) & MASK_32 + return u("%08x%08x") % (nr1 & MASK_31, nr2 & MASK_31) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# handler +#============================================================================= +class mysql41(uh.StaticHandler): + """This class implements the MySQL 4.1 password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "mysql41" + _hash_prefix = u("*") + checksum_chars = uh.HEX_CHARS + checksum_size = 40 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.upper() + + def _calc_checksum(self, secret): + # FIXME: no idea if mysql has a policy about handling unicode passwords + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(sha1(sha1(secret).digest()).hexdigest()).upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/oracle.py b/venv/lib/python3.12/site-packages/passlib/handlers/oracle.py new file mode 100644 index 0000000..a094f37 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/oracle.py @@ -0,0 +1,172 @@ +"""passlib.handlers.oracle - Oracle DB Password Hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import to_unicode, xor_bytes +from passlib.utils.compat import irange, u, \ + uascii_to_str, unicode, str_to_uascii +from passlib.crypto.des import des_encrypt_block +import passlib.utils.handlers as uh +# local +__all__ = [ + "oracle10g", + "oracle11g" +] + +#============================================================================= +# oracle10 +#============================================================================= +def des_cbc_encrypt(key, value, iv=b'\x00' * 8, pad=b'\x00'): + """performs des-cbc encryption, returns only last block. + + this performs a specific DES-CBC encryption implementation + as needed by the Oracle10 hash. it probably won't be useful for + other purposes as-is. + + input value is null-padded to multiple of 8 bytes. + + :arg key: des key as bytes + :arg value: value to encrypt, as bytes. + :param iv: optional IV + :param pad: optional pad byte + + :returns: last block of DES-CBC encryption of all ``value``'s byte blocks. + """ + value += pad * (-len(value) % 8) # null pad to multiple of 8 + hash = iv # start things off + for offset in irange(0,len(value),8): + chunk = xor_bytes(hash, value[offset:offset+8]) + hash = des_encrypt_block(key, chunk) + return hash + +# magic string used as initial des key by oracle10 +ORACLE10_MAGIC = b"\x01\x23\x45\x67\x89\xAB\xCD\xEF" + +class oracle10(uh.HasUserContext, uh.StaticHandler): + """This class implements the password hash used by Oracle up to version 10g, and follows the :ref:`password-hash-api`. + + It does a single round of hashing, and relies on the username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keywords: + + :type user: str + :param user: name of oracle user account this password is associated with. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "oracle10" + checksum_chars = uh.HEX_CHARS + checksum_size = 16 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.upper() + + def _calc_checksum(self, secret): + # FIXME: not sure how oracle handles unicode. + # online docs about 10g hash indicate it puts ascii chars + # in a 2-byte encoding w/ the high byte set to null. + # they don't say how it handles other chars, or what encoding. + # + # so for now, encoding secret & user to utf-16-be, + # since that fits, and if secret/user is bytes, + # we assume utf-8, and decode first. + # + # this whole mess really needs someone w/ an oracle system, + # and some answers :) + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + user = to_unicode(self.user, "utf-8", param="user") + input = (user+secret).upper().encode("utf-16-be") + hash = des_cbc_encrypt(ORACLE10_MAGIC, input) + hash = des_cbc_encrypt(hash, input) + return hexlify(hash).decode("ascii").upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# oracle11 +#============================================================================= +class oracle11(uh.HasSalt, uh.GenericHandler): + """This class implements the Oracle11g password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 20 hexadecimal characters. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "oracle11" + setting_kwds = ("salt",) + checksum_size = 40 + checksum_chars = uh.UPPER_HEX_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 20 + salt_chars = uh.UPPER_HEX_CHARS + + + #=================================================================== + # methods + #=================================================================== + _hash_regex = re.compile(u("^S:(?P[0-9a-f]{40})(?P[0-9a-f]{20})$"), re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk.upper()) + + def to_string(self): + chk = self.checksum + hash = u("S:%s%s") % (chk.upper(), self.salt.upper()) + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = sha1(secret + unhexlify(self.salt.encode("ascii"))).hexdigest() + return str_to_uascii(chk).upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/pbkdf2.py b/venv/lib/python3.12/site-packages/passlib/handlers/pbkdf2.py new file mode 100644 index 0000000..274278d --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/pbkdf2.py @@ -0,0 +1,475 @@ +"""passlib.handlers.pbkdf - PBKDF2 based hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from base64 import b64encode, b64decode +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import to_unicode +from passlib.utils.binary import ab64_decode, ab64_encode +from passlib.utils.compat import str_to_bascii, u, uascii_to_str, unicode +from passlib.crypto.digest import pbkdf2_hmac +import passlib.utils.handlers as uh +# local +__all__ = [ + "pbkdf2_sha1", + "pbkdf2_sha256", + "pbkdf2_sha512", + "cta_pbkdf2_sha1", + "dlitz_pbkdf2_sha1", + "grub_pbkdf2_sha512", +] + +#============================================================================= +# +#============================================================================= +class Pbkdf2DigestHandler(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """base class for various pbkdf2_{digest} algorithms""" + #=================================================================== + # class attrs + #=================================================================== + + #--GenericHandler-- + setting_kwds = ("salt", "salt_size", "rounds") + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + default_salt_size = 16 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = None # set by subclass + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #--this class-- + _digest = None # name of subclass-specified hash + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide sanity check. + # the underlying pbkdf2 specifies no bounds for either. + + # NOTE: defaults chosen to be at least as large as pbkdf2 rfc recommends... + # >8 bytes of entropy in salt, >1000 rounds + # increased due to time since rfc established + + #=================================================================== + # methods + #=================================================================== + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + salt = ab64_decode(salt.encode("ascii")) + if chk: + chk = ab64_decode(chk.encode("ascii")) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + salt = ab64_encode(self.salt).decode("ascii") + chk = ab64_encode(self.checksum).decode("ascii") + return uh.render_mc3(self.ident, self.rounds, salt, chk) + + def _calc_checksum(self, secret): + # NOTE: pbkdf2_hmac() will encode secret & salt using UTF8 + return pbkdf2_hmac(self._digest, secret, self.salt, self.rounds, self.checksum_size) + +def create_pbkdf2_hash(hash_name, digest_size, rounds=12000, ident=None, module=__name__): + """create new Pbkdf2DigestHandler subclass for a specific hash""" + name = 'pbkdf2_' + hash_name + if ident is None: + ident = u("$pbkdf2-%s$") % (hash_name,) + base = Pbkdf2DigestHandler + return type(name, (base,), dict( + __module__=module, # so ABCMeta won't clobber it. + name=name, + ident=ident, + _digest = hash_name, + default_rounds=rounds, + checksum_size=digest_size, + encoded_checksum_size=(digest_size*4+2)//3, + __doc__="""This class implements a generic ``PBKDF2-HMAC-%(digest)s``-based password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be between 0-1024 bytes. + If not specified, a %(dsc)d byte salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to %(dsc)d bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to %(dr)d, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ % dict(digest=hash_name.upper(), dsc=base.default_salt_size, dr=rounds) + )) + +#------------------------------------------------------------------------ +# derived handlers +#------------------------------------------------------------------------ +pbkdf2_sha1 = create_pbkdf2_hash("sha1", 20, 131000, ident=u("$pbkdf2$")) +pbkdf2_sha256 = create_pbkdf2_hash("sha256", 32, 29000) +pbkdf2_sha512 = create_pbkdf2_hash("sha512", 64, 25000) + +ldap_pbkdf2_sha1 = uh.PrefixWrapper("ldap_pbkdf2_sha1", pbkdf2_sha1, "{PBKDF2}", "$pbkdf2$", ident=True) +ldap_pbkdf2_sha256 = uh.PrefixWrapper("ldap_pbkdf2_sha256", pbkdf2_sha256, "{PBKDF2-SHA256}", "$pbkdf2-sha256$", ident=True) +ldap_pbkdf2_sha512 = uh.PrefixWrapper("ldap_pbkdf2_sha512", pbkdf2_sha512, "{PBKDF2-SHA512}", "$pbkdf2-sha512$", ident=True) + +#============================================================================= +# cryptacular's pbkdf2 hash +#============================================================================= + +# bytes used by cta hash for base64 values 63 & 64 +CTA_ALTCHARS = b"-_" + +class cta_pbkdf2_sha1(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements Cryptacular's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, it may be any length. + If not specified, a one will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 60000, must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "cta_pbkdf2_sha1" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$p5k2$") + checksum_size = 20 + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. underlying algorithm (and reference implementation) + # allows effectively unbounded values for both of these parameters. + + #--HasSalt-- + default_salt_size = 16 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = pbkdf2_sha1.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + + # hash $p5k2$1000$ZxK4ZBJCfQg=$jJZVscWtO--p1-xIZl6jhO2LKR0= + # ident $p5k2$ + # rounds 1000 + # salt ZxK4ZBJCfQg= + # chk jJZVscWtO--p1-xIZl6jhO2LKR0= + # NOTE: rounds in hex + + @classmethod + def from_string(cls, hash): + # NOTE: passlib deviation - forbidding zero-padded rounds + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, handler=cls) + salt = b64decode(salt.encode("ascii"), CTA_ALTCHARS) + if chk: + chk = b64decode(chk.encode("ascii"), CTA_ALTCHARS) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + salt = b64encode(self.salt, CTA_ALTCHARS).decode("ascii") + chk = b64encode(self.checksum, CTA_ALTCHARS).decode("ascii") + return uh.render_mc3(self.ident, self.rounds, salt, chk, rounds_base=16) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 + return pbkdf2_hmac("sha1", secret, self.salt, self.rounds, 20) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# dlitz's pbkdf2 hash +#============================================================================= +class dlitz_pbkdf2_sha1(uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements Dwayne Litzenberger's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If specified, it may be any length, but must use the characters in the regexp range ``[./0-9A-Za-z]``. + If not specified, a 16 character salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 60000, must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "dlitz_pbkdf2_sha1" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$p5k2$") + _stub_checksum = u("0" * 48 + "=") + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. underlying algorithm (and reference implementation) + # allows effectively unbounded values for both of these parameters. + + #--HasSalt-- + default_salt_size = 16 + max_salt_size = 1024 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + # NOTE: for security, the default here is set to match pbkdf2_sha1, + # even though this hash's extra block makes it twice as slow. + default_rounds = pbkdf2_sha1.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + + # hash $p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g + # ident $p5k2$ + # rounds c + # salt u9HvcT4d + # chk Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g + # rounds in lowercase hex, no zero padding + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, + default_rounds=400, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + rounds = self.rounds + if rounds == 400: + rounds = None # omit rounds measurement if == 400 + return uh.render_mc3(self.ident, rounds, self.salt, self.checksum, rounds_base=16) + + def _get_config(self): + rounds = self.rounds + if rounds == 400: + rounds = None # omit rounds measurement if == 400 + return uh.render_mc3(self.ident, rounds, self.salt, None, rounds_base=16) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 + salt = self._get_config() + result = pbkdf2_hmac("sha1", secret, salt, self.rounds, 24) + return ab64_encode(result).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# crowd +#============================================================================= +class atlassian_pbkdf2_sha1(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the PBKDF2 hash used by Atlassian. + + It supports a fixed-length salt, and a fixed number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be exactly 16 bytes. + If not specified, a salt will be autogenerated (this is recommended). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #--GenericHandler-- + name = "atlassian_pbkdf2_sha1" + setting_kwds =("salt",) + ident = u("{PKCS5S2}") + checksum_size = 32 + + #--HasRawSalt-- + min_salt_size = max_salt_size = 16 + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + ident = cls.ident + if not hash.startswith(ident): + raise uh.exc.InvalidHashError(cls) + data = b64decode(hash[len(ident):].encode("ascii")) + salt, chk = data[:16], data[16:] + return cls(salt=salt, checksum=chk) + + def to_string(self): + data = self.salt + self.checksum + hash = self.ident + b64encode(data).decode("ascii") + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + # TODO: find out what crowd's policy is re: unicode + # crowd seems to use a fixed number of rounds. + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 + return pbkdf2_hmac("sha1", secret, self.salt, 10000, 32) + +#============================================================================= +# grub +#============================================================================= +class grub_pbkdf2_sha512(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements Grub's pbkdf2-hmac-sha512 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be between 0-1024 bytes. + If not specified, a 64 byte salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 64 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 19000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + name = "grub_pbkdf2_sha512" + setting_kwds = ("salt", "salt_size", "rounds") + + ident = u("grub.pbkdf2.sha512.") + checksum_size = 64 + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. the underlying pbkdf2 specifies no bounds for either, + # and it's not clear what grub specifies. + + default_salt_size = 64 + max_salt_size = 1024 + + default_rounds = pbkdf2_sha512.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, sep=u("."), + handler=cls) + salt = unhexlify(salt.encode("ascii")) + if chk: + chk = unhexlify(chk.encode("ascii")) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + salt = hexlify(self.salt).decode("ascii").upper() + chk = hexlify(self.checksum).decode("ascii").upper() + return uh.render_mc3(self.ident, self.rounds, salt, chk, sep=u(".")) + + def _calc_checksum(self, secret): + # TODO: find out what grub's policy is re: unicode + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 + return pbkdf2_hmac("sha512", secret, self.salt, self.rounds, 64) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/phpass.py b/venv/lib/python3.12/site-packages/passlib/handlers/phpass.py new file mode 100644 index 0000000..6736f0f --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/phpass.py @@ -0,0 +1,135 @@ +"""passlib.handlers.phpass - PHPass Portable Crypt + +phppass located - http://www.openwall.com/phpass/ +algorithm described - http://www.openwall.com/articles/PHP-Users-Passwords + +phpass context - blowfish, bsdi_crypt, phpass +""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils.binary import h64 +from passlib.utils.compat import u, uascii_to_str, unicode +import passlib.utils.handlers as uh +# local +__all__ = [ + "phpass", +] + +#============================================================================= +# phpass +#============================================================================= +class phpass(uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the PHPass Portable Hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 19, must be between 7 and 30, inclusive. + This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}`. + + :type ident: str + :param ident: + phpBB3 uses ``H`` instead of ``P`` for its identifier, + this may be set to ``H`` in order to generate phpBB3 compatible hashes. + it defaults to ``P``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "phpass" + setting_kwds = ("salt", "rounds", "ident") + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 8 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 19 + min_rounds = 7 + max_rounds = 30 + rounds_cost = "log2" + + #--HasManyIdents-- + default_ident = u("$P$") + ident_values = (u("$P$"), u("$H$")) + ident_aliases = {u("P"):u("$P$"), u("H"):u("$H$")} + + #=================================================================== + # formatting + #=================================================================== + + #$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r.L0 + # $P$ + # 9 + # IQRaTwmf + # eRo7ud9Fh4E2PdI0S3r.L0 + + @classmethod + def from_string(cls, hash): + ident, data = cls._parse_ident(hash) + rounds, salt, chk = data[0], data[1:9], data[9:] + return cls( + ident=ident, + rounds=h64.decode_int6(rounds.encode("ascii")), + salt=salt, + checksum=chk or None, + ) + + def to_string(self): + hash = u("%s%s%s%s") % (self.ident, + h64.encode_int6(self.rounds).decode("ascii"), + self.salt, + self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + # FIXME: can't find definitive policy on how phpass handles non-ascii. + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + real_rounds = 1<`_ + hash names. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + In addition to the standard :ref:`password-hash-api` methods, + this class also provides the following methods for manipulating Passlib + scram hashes in ways useful for pluging into a SCRAM protocol stack: + + .. automethod:: extract_digest_info + .. automethod:: extract_digest_algs + .. automethod:: derive_digest + """ + #=================================================================== + # class attrs + #=================================================================== + + # NOTE: unlike most GenericHandler classes, the 'checksum' attr of + # ScramHandler is actually a map from digest_name -> digest, so + # many of the standard methods have been overridden. + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide + # a sanity check; the underlying pbkdf2 specifies no bounds for either. + + #--GenericHandler-- + name = "scram" + setting_kwds = ("salt", "salt_size", "rounds", "algs") + ident = u("$scram$") + + #--HasSalt-- + default_salt_size = 12 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = 100000 + min_rounds = 1 + max_rounds = 2**32-1 + rounds_cost = "linear" + + #--custom-- + + # default algorithms when creating new hashes. + default_algs = ["sha-1", "sha-256", "sha-512"] + + # list of algs verify prefers to use, in order. + _verify_algs = ["sha-256", "sha-512", "sha-224", "sha-384", "sha-1"] + + #=================================================================== + # instance attrs + #=================================================================== + + # 'checksum' is different from most GenericHandler subclasses, + # in that it contains a dict mapping from alg -> digest, + # or None if no checksum present. + + # list of algorithms to create/compare digests for. + algs = None + + #=================================================================== + # scram frontend helpers + #=================================================================== + @classmethod + def extract_digest_info(cls, hash, alg): + """return (salt, rounds, digest) for specific hash algorithm. + + :type hash: str + :arg hash: + :class:`!scram` hash stored for desired user + + :type alg: str + :arg alg: + Name of digest algorithm (e.g. ``"sha-1"``) requested by client. + + This value is run through :func:`~passlib.crypto.digest.norm_hash_name`, + so it is case-insensitive, and can be the raw SCRAM + mechanism name (e.g. ``"SCRAM-SHA-1"``), the IANA name, + or the hashlib name. + + :raises KeyError: + If the hash does not contain an entry for the requested digest + algorithm. + + :returns: + A tuple containing ``(salt, rounds, digest)``, + where *digest* matches the raw bytes returned by + SCRAM's :func:`Hi` function for the stored password, + the provided *salt*, and the iteration count (*rounds*). + *salt* and *digest* are both raw (unencoded) bytes. + """ + # XXX: this could be sped up by writing custom parsing routine + # that just picks out relevant digest, and doesn't bother + # with full structure validation each time it's called. + alg = norm_hash_name(alg, 'iana') + self = cls.from_string(hash) + chkmap = self.checksum + if not chkmap: + raise ValueError("scram hash contains no digests") + return self.salt, self.rounds, chkmap[alg] + + @classmethod + def extract_digest_algs(cls, hash, format="iana"): + """Return names of all algorithms stored in a given hash. + + :type hash: str + :arg hash: + The :class:`!scram` hash to parse + + :type format: str + :param format: + This changes the naming convention used by the + returned algorithm names. By default the names + are IANA-compatible; possible values are ``"iana"`` or ``"hashlib"``. + + :returns: + Returns a list of digest algorithms; e.g. ``["sha-1"]`` + """ + # XXX: this could be sped up by writing custom parsing routine + # that just picks out relevant names, and doesn't bother + # with full structure validation each time it's called. + algs = cls.from_string(hash).algs + if format == "iana": + return algs + else: + return [norm_hash_name(alg, format) for alg in algs] + + @classmethod + def derive_digest(cls, password, salt, rounds, alg): + """helper to create SaltedPassword digest for SCRAM. + + This performs the step in the SCRAM protocol described as:: + + SaltedPassword := Hi(Normalize(password), salt, i) + + :type password: unicode or utf-8 bytes + :arg password: password to run through digest + + :type salt: bytes + :arg salt: raw salt data + + :type rounds: int + :arg rounds: number of iterations. + + :type alg: str + :arg alg: name of digest to use (e.g. ``"sha-1"``). + + :returns: + raw bytes of ``SaltedPassword`` + """ + if isinstance(password, bytes): + password = password.decode("utf-8") + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8, + # and handle normalizing alg name. + return pbkdf2_hmac(alg, saslprep(password), salt, rounds) + + #=================================================================== + # serialization + #=================================================================== + + @classmethod + def from_string(cls, hash): + hash = to_native_str(hash, "ascii", "hash") + if not hash.startswith("$scram$"): + raise uh.exc.InvalidHashError(cls) + parts = hash[7:].split("$") + if len(parts) != 3: + raise uh.exc.MalformedHashError(cls) + rounds_str, salt_str, chk_str = parts + + # decode rounds + rounds = int(rounds_str) + if rounds_str != str(rounds): # forbid zero padding, etc. + raise uh.exc.MalformedHashError(cls) + + # decode salt + try: + salt = ab64_decode(salt_str.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + + # decode algs/digest list + if not chk_str: + # scram hashes MUST have something here. + raise uh.exc.MalformedHashError(cls) + elif "=" in chk_str: + # comma-separated list of 'alg=digest' pairs + algs = None + chkmap = {} + for pair in chk_str.split(","): + alg, digest = pair.split("=") + try: + chkmap[alg] = ab64_decode(digest.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + else: + # comma-separated list of alg names, no digests + algs = chk_str + chkmap = None + + # return new object + return cls( + rounds=rounds, + salt=salt, + checksum=chkmap, + algs=algs, + ) + + def to_string(self): + salt = bascii_to_str(ab64_encode(self.salt)) + chkmap = self.checksum + chk_str = ",".join( + "%s=%s" % (alg, bascii_to_str(ab64_encode(chkmap[alg]))) + for alg in self.algs + ) + return '$scram$%d$%s$%s' % (self.rounds, salt, chk_str) + + #=================================================================== + # variant constructor + #=================================================================== + @classmethod + def using(cls, default_algs=None, algs=None, **kwds): + # parse aliases + if algs is not None: + assert default_algs is None + default_algs = algs + + # create subclass + subcls = super(scram, cls).using(**kwds) + + # fill in algs + if default_algs is not None: + subcls.default_algs = cls._norm_algs(default_algs) + return subcls + + #=================================================================== + # init + #=================================================================== + def __init__(self, algs=None, **kwds): + super(scram, self).__init__(**kwds) + + # init algs + digest_map = self.checksum + if algs is not None: + if digest_map is not None: + raise RuntimeError("checksum & algs kwds are mutually exclusive") + algs = self._norm_algs(algs) + elif digest_map is not None: + # derive algs list from digest map (if present). + algs = self._norm_algs(digest_map.keys()) + elif self.use_defaults: + algs = list(self.default_algs) + assert self._norm_algs(algs) == algs, "invalid default algs: %r" % (algs,) + else: + raise TypeError("no algs list specified") + self.algs = algs + + def _norm_checksum(self, checksum, relaxed=False): + if not isinstance(checksum, dict): + raise uh.exc.ExpectedTypeError(checksum, "dict", "checksum") + for alg, digest in iteritems(checksum): + if alg != norm_hash_name(alg, 'iana'): + raise ValueError("malformed algorithm name in scram hash: %r" % + (alg,)) + if len(alg) > 9: + raise ValueError("SCRAM limits algorithm names to " + "9 characters: %r" % (alg,)) + if not isinstance(digest, bytes): + raise uh.exc.ExpectedTypeError(digest, "raw bytes", "digests") + # TODO: verify digest size (if digest is known) + if 'sha-1' not in checksum: + # NOTE: required because of SCRAM spec. + raise ValueError("sha-1 must be in algorithm list of scram hash") + return checksum + + @classmethod + def _norm_algs(cls, algs): + """normalize algs parameter""" + if isinstance(algs, native_string_types): + algs = splitcomma(algs) + algs = sorted(norm_hash_name(alg, 'iana') for alg in algs) + if any(len(alg)>9 for alg in algs): + raise ValueError("SCRAM limits alg names to max of 9 characters") + if 'sha-1' not in algs: + # NOTE: required because of SCRAM spec (rfc 5802) + raise ValueError("sha-1 must be in algorithm list of scram hash") + return algs + + #=================================================================== + # migration + #=================================================================== + def _calc_needs_update(self, **kwds): + # marks hashes as deprecated if they don't include at least all default_algs. + # XXX: should we deprecate if they aren't exactly the same, + # to permit removing legacy hashes? + if not set(self.algs).issuperset(self.default_algs): + return True + + # hand off to base implementation + return super(scram, self)._calc_needs_update(**kwds) + + #=================================================================== + # digest methods + #=================================================================== + def _calc_checksum(self, secret, alg=None): + rounds = self.rounds + salt = self.salt + hash = self.derive_digest + if alg: + # if requested, generate digest for specific alg + return hash(secret, salt, rounds, alg) + else: + # by default, return dict containing digests for all algs + return dict( + (alg, hash(secret, salt, rounds, alg)) + for alg in self.algs + ) + + @classmethod + def verify(cls, secret, hash, full=False): + uh.validate_secret(secret) + self = cls.from_string(hash) + chkmap = self.checksum + if not chkmap: + raise ValueError("expected %s hash, got %s config string instead" % + (cls.name, cls.name)) + + # NOTE: to make the verify method efficient, we just calculate hash + # of shortest digest by default. apps can pass in "full=True" to + # check entire hash for consistency. + if full: + correct = failed = False + for alg, digest in iteritems(chkmap): + other = self._calc_checksum(secret, alg) + # NOTE: could do this length check in norm_algs(), + # but don't need to be that strict, and want to be able + # to parse hashes containing algs not supported by platform. + # it's fine if we fail here though. + if len(digest) != len(other): + raise ValueError("mis-sized %s digest in scram hash: %r != %r" + % (alg, len(digest), len(other))) + if consteq(other, digest): + correct = True + else: + failed = True + if correct and failed: + raise ValueError("scram hash verified inconsistently, " + "may be corrupted") + else: + return correct + else: + # XXX: should this just always use sha1 hash? would be faster. + # otherwise only verify against one hash, pick one w/ best security. + for alg in self._verify_algs: + if alg in chkmap: + other = self._calc_checksum(secret, alg) + return consteq(other, chkmap[alg]) + # there should always be sha-1 at the very least, + # or something went wrong inside _norm_algs() + raise AssertionError("sha-1 digest not found!") + + #=================================================================== + # + #=================================================================== + +#============================================================================= +# code used for testing scram against protocol examples during development. +#============================================================================= +##def _test_reference_scram(): +## "quick hack testing scram reference vectors" +## # NOTE: "n,," is GS2 header - see https://tools.ietf.org/html/rfc5801 +## from passlib.utils.compat import print_ +## +## engine = _scram_engine( +## alg="sha-1", +## salt='QSXCR+Q6sek8bf92'.decode("base64"), +## rounds=4096, +## password=u("pencil"), +## ) +## print_(engine.digest.encode("base64").rstrip()) +## +## msg = engine.format_auth_msg( +## username="user", +## client_nonce = "fyko+d2lbbFgONRv9qkxdawL", +## server_nonce = "3rfcNHYJY1ZVvWVs7j", +## header='c=biws', +## ) +## +## cp = engine.get_encoded_client_proof(msg) +## assert cp == "v0X8v3Bz2T0CJGbJQyF0X+HI4Ts=", cp +## +## ss = engine.get_encoded_server_sig(msg) +## assert ss == "rmF9pqV8S7suAoZWja4dJRkFsKQ=", ss +## +##class _scram_engine(object): +## """helper class for verifying scram hash behavior +## against SCRAM protocol examples. not officially part of Passlib. +## +## takes in alg, salt, rounds, and a digest or password. +## +## can calculate the various keys & messages of the scram protocol. +## +## """ +## #========================================================= +## # init +## #========================================================= +## +## @classmethod +## def from_string(cls, hash, alg): +## "create record from scram hash, for given alg" +## return cls(alg, *scram.extract_digest_info(hash, alg)) +## +## def __init__(self, alg, salt, rounds, digest=None, password=None): +## self.alg = norm_hash_name(alg) +## self.salt = salt +## self.rounds = rounds +## self.password = password +## if password: +## data = scram.derive_digest(password, salt, rounds, alg) +## if digest and data != digest: +## raise ValueError("password doesn't match digest") +## else: +## digest = data +## elif not digest: +## raise TypeError("must provide password or digest") +## self.digest = digest +## +## #========================================================= +## # frontend methods +## #========================================================= +## def get_hash(self, data): +## "return hash of raw data" +## return hashlib.new(iana_to_hashlib(self.alg), data).digest() +## +## def get_client_proof(self, msg): +## "return client proof of specified auth msg text" +## return xor_bytes(self.client_key, self.get_client_sig(msg)) +## +## def get_encoded_client_proof(self, msg): +## return self.get_client_proof(msg).encode("base64").rstrip() +## +## def get_client_sig(self, msg): +## "return client signature of specified auth msg text" +## return self.get_hmac(self.stored_key, msg) +## +## def get_server_sig(self, msg): +## "return server signature of specified auth msg text" +## return self.get_hmac(self.server_key, msg) +## +## def get_encoded_server_sig(self, msg): +## return self.get_server_sig(msg).encode("base64").rstrip() +## +## def format_server_response(self, client_nonce, server_nonce): +## return 'r={client_nonce}{server_nonce},s={salt},i={rounds}'.format( +## client_nonce=client_nonce, +## server_nonce=server_nonce, +## rounds=self.rounds, +## salt=self.encoded_salt, +## ) +## +## def format_auth_msg(self, username, client_nonce, server_nonce, +## header='c=biws'): +## return ( +## 'n={username},r={client_nonce}' +## ',' +## 'r={client_nonce}{server_nonce},s={salt},i={rounds}' +## ',' +## '{header},r={client_nonce}{server_nonce}' +## ).format( +## username=username, +## client_nonce=client_nonce, +## server_nonce=server_nonce, +## salt=self.encoded_salt, +## rounds=self.rounds, +## header=header, +## ) +## +## #========================================================= +## # helpers to calculate & cache constant data +## #========================================================= +## def _calc_get_hmac(self): +## return get_prf("hmac-" + iana_to_hashlib(self.alg))[0] +## +## def _calc_client_key(self): +## return self.get_hmac(self.digest, b("Client Key")) +## +## def _calc_stored_key(self): +## return self.get_hash(self.client_key) +## +## def _calc_server_key(self): +## return self.get_hmac(self.digest, b("Server Key")) +## +## def _calc_encoded_salt(self): +## return self.salt.encode("base64").rstrip() +## +## #========================================================= +## # hacks for calculated attributes +## #========================================================= +## +## def __getattr__(self, attr): +## if not attr.startswith("_"): +## f = getattr(self, "_calc_" + attr, None) +## if f: +## value = f() +## setattr(self, attr, value) +## return value +## raise AttributeError("attribute not found") +## +## def __dir__(self): +## cdir = dir(self.__class__) +## attrs = set(cdir) +## attrs.update(self.__dict__) +## attrs.update(attr[6:] for attr in cdir +## if attr.startswith("_calc_")) +## return sorted(attrs) +## #========================================================= +## # eoc +## #========================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/scrypt.py b/venv/lib/python3.12/site-packages/passlib/handlers/scrypt.py new file mode 100644 index 0000000..1686fda --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/scrypt.py @@ -0,0 +1,383 @@ +"""passlib.handlers.scrypt -- scrypt password hash""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, absolute_import +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.crypto import scrypt as _scrypt +from passlib.utils import h64, to_bytes +from passlib.utils.binary import h64, b64s_decode, b64s_encode +from passlib.utils.compat import u, bascii_to_str, suppress_cause +from passlib.utils.decor import classproperty +import passlib.utils.handlers as uh +# local +__all__ = [ + "scrypt", +] + +#============================================================================= +# scrypt format identifiers +#============================================================================= + +IDENT_SCRYPT = u("$scrypt$") # identifier used by passlib +IDENT_7 = u("$7$") # used by official scrypt spec + +_UDOLLAR = u("$") + +#============================================================================= +# handler +#============================================================================= +class scrypt(uh.ParallelismMixin, uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.HasManyIdents, + uh.GenericHandler): + """This class implements an SCrypt-based password [#scrypt-home]_ hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, a variable number of rounds, + as well as some custom tuning parameters unique to scrypt (see below). + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If specified, the length must be between 0-1024 bytes. + If not specified, one will be auto-generated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 16, but must be within ``range(1,32)``. + + .. warning:: + + Unlike many hash algorithms, increasing the rounds value + will increase both the time *and memory* required to hash a password. + + :type block_size: int + :param block_size: + Optional block size to pass to scrypt hash function (the ``r`` parameter). + Useful for tuning scrypt to optimal performance for your CPU architecture. + Defaults to 8. + + :type parallelism: int + :param parallelism: + Optional parallelism to pass to scrypt hash function (the ``p`` parameter). + Defaults to 1. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. note:: + + The underlying scrypt hash function has a number of limitations + on it's parameter values, which forbids certain combinations of settings. + The requirements are: + + * ``linear_rounds = 2**`` + * ``linear_rounds < 2**(16 * block_size)`` + * ``block_size * parallelism <= 2**30-1`` + + .. todo:: + + This class currently does not support configuring default values + for ``block_size`` or ``parallelism`` via a :class:`~passlib.context.CryptContext` + configuration. + """ + + #=================================================================== + # class attrs + #=================================================================== + + #------------------------ + # PasswordHash + #------------------------ + name = "scrypt" + setting_kwds = ("ident", "salt", "salt_size", "rounds", "block_size", "parallelism") + + #------------------------ + # GenericHandler + #------------------------ + # NOTE: scrypt supports arbitrary output sizes. since it's output runs through + # pbkdf2-hmac-sha256 before returning, and this could be raised eventually... + # but a 256-bit digest is more than sufficient for password hashing. + # XXX: make checksum size configurable? could merge w/ argon2 code that does this. + checksum_size = 32 + + #------------------------ + # HasManyIdents + #------------------------ + default_ident = IDENT_SCRYPT + ident_values = (IDENT_SCRYPT, IDENT_7) + + #------------------------ + # HasRawSalt + #------------------------ + default_salt_size = 16 + max_salt_size = 1024 + + #------------------------ + # HasRounds + #------------------------ + # TODO: would like to dynamically pick this based on system + default_rounds = 16 + min_rounds = 1 + max_rounds = 31 # limited by scrypt alg + rounds_cost = "log2" + + # TODO: make default block size configurable via using(), and deprecatable via .needs_update() + + #=================================================================== + # instance attrs + #=================================================================== + + #: default parallelism setting (min=1 currently hardcoded in mixin) + parallelism = 1 + + #: default block size setting + block_size = 8 + + #=================================================================== + # variant constructor + #=================================================================== + + @classmethod + def using(cls, block_size=None, **kwds): + subcls = super(scrypt, cls).using(**kwds) + if block_size is not None: + if isinstance(block_size, uh.native_string_types): + block_size = int(block_size) + subcls.block_size = subcls._norm_block_size(block_size, relaxed=kwds.get("relaxed")) + + # make sure param combination is valid for scrypt() + try: + _scrypt.validate(1 << cls.default_rounds, cls.block_size, cls.parallelism) + except ValueError as err: + raise suppress_cause(ValueError("scrypt: invalid settings combination: " + str(err))) + + return subcls + + #=================================================================== + # parsing + #=================================================================== + + @classmethod + def from_string(cls, hash): + return cls(**cls.parse(hash)) + + @classmethod + def parse(cls, hash): + ident, suffix = cls._parse_ident(hash) + func = getattr(cls, "_parse_%s_string" % ident.strip(_UDOLLAR), None) + if func: + return func(suffix) + else: + raise uh.exc.InvalidHashError(cls) + + # + # passlib's format: + # $scrypt$ln=,r=,p=

    $[$] + # where: + # logN, r, p -- decimal-encoded positive integer, no zero-padding + # logN -- log cost setting + # r -- block size setting (usually 8) + # p -- parallelism setting (usually 1) + # salt, digest -- b64-nopad encoded bytes + # + + @classmethod + def _parse_scrypt_string(cls, suffix): + # break params, salt, and digest sections + parts = suffix.split("$") + if len(parts) == 3: + params, salt, digest = parts + elif len(parts) == 2: + params, salt = parts + digest = None + else: + raise uh.exc.MalformedHashError(cls, "malformed hash") + + # break params apart + parts = params.split(",") + if len(parts) == 3: + nstr, bstr, pstr = parts + assert nstr.startswith("ln=") + assert bstr.startswith("r=") + assert pstr.startswith("p=") + else: + raise uh.exc.MalformedHashError(cls, "malformed settings field") + + return dict( + ident=IDENT_SCRYPT, + rounds=int(nstr[3:]), + block_size=int(bstr[2:]), + parallelism=int(pstr[2:]), + salt=b64s_decode(salt.encode("ascii")), + checksum=b64s_decode(digest.encode("ascii")) if digest else None, + ) + + # + # official format specification defined at + # https://gitlab.com/jas/scrypt-unix-crypt/blob/master/unix-scrypt.txt + # format: + # $7$[$] + # 0 12345 67890 1 + # where: + # All bytes use h64-little-endian encoding + # N: 6-bit log cost setting + # r: 30-bit block size setting + # p: 30-bit parallelism setting + # salt: variable length salt bytes + # digest: fixed 32-byte digest + # + + @classmethod + def _parse_7_string(cls, suffix): + # XXX: annoyingly, official spec embeds salt *raw*, yet doesn't specify a hash encoding. + # so assuming only h64 chars are valid for salt, and are ASCII encoded. + + # split into params & digest + parts = suffix.encode("ascii").split(b"$") + if len(parts) == 2: + params, digest = parts + elif len(parts) == 1: + params, = parts + digest = None + else: + raise uh.exc.MalformedHashError() + + # parse params & return + if len(params) < 11: + raise uh.exc.MalformedHashError(cls, "params field too short") + return dict( + ident=IDENT_7, + rounds=h64.decode_int6(params[:1]), + block_size=h64.decode_int30(params[1:6]), + parallelism=h64.decode_int30(params[6:11]), + salt=params[11:], + checksum=h64.decode_bytes(digest) if digest else None, + ) + + #=================================================================== + # formatting + #=================================================================== + def to_string(self): + ident = self.ident + if ident == IDENT_SCRYPT: + return "$scrypt$ln=%d,r=%d,p=%d$%s$%s" % ( + self.rounds, + self.block_size, + self.parallelism, + bascii_to_str(b64s_encode(self.salt)), + bascii_to_str(b64s_encode(self.checksum)), + ) + else: + assert ident == IDENT_7 + salt = self.salt + try: + salt.decode("ascii") + except UnicodeDecodeError: + raise suppress_cause(NotImplementedError("scrypt $7$ hashes dont support non-ascii salts")) + return bascii_to_str(b"".join([ + b"$7$", + h64.encode_int6(self.rounds), + h64.encode_int30(self.block_size), + h64.encode_int30(self.parallelism), + self.salt, + b"$", + h64.encode_bytes(self.checksum) + ])) + + #=================================================================== + # init + #=================================================================== + def __init__(self, block_size=None, **kwds): + super(scrypt, self).__init__(**kwds) + + # init block size + if block_size is None: + assert uh.validate_default_value(self, self.block_size, self._norm_block_size, + param="block_size") + else: + self.block_size = self._norm_block_size(block_size) + + # NOTE: if hash contains invalid complex constraint, relying on error + # being raised by scrypt call in _calc_checksum() + + @classmethod + def _norm_block_size(cls, block_size, relaxed=False): + return uh.norm_integer(cls, block_size, min=1, param="block_size", relaxed=relaxed) + + def _generate_salt(self): + salt = super(scrypt, self)._generate_salt() + if self.ident == IDENT_7: + # this format doesn't support non-ascii salts. + # as workaround, we take raw bytes, encoded to base64 + salt = b64s_encode(salt) + return salt + + #=================================================================== + # backend configuration + # NOTE: this following HasManyBackends' API, but provides it's own implementation, + # which actually switches the backend that 'passlib.crypto.scrypt.scrypt()' uses. + #=================================================================== + + @classproperty + def backends(cls): + return _scrypt.backend_values + + @classmethod + def get_backend(cls): + return _scrypt.backend + + @classmethod + def has_backend(cls, name="any"): + try: + cls.set_backend(name, dryrun=True) + return True + except uh.exc.MissingBackendError: + return False + + @classmethod + def set_backend(cls, name="any", dryrun=False): + _scrypt._set_backend(name, dryrun=dryrun) + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + secret = to_bytes(secret, param="secret") + return _scrypt.scrypt(secret, self.salt, n=(1 << self.rounds), r=self.block_size, + p=self.parallelism, keylen=self.checksum_size) + + #=================================================================== + # hash migration + #=================================================================== + + def _calc_needs_update(self, **kwds): + """ + mark hash as needing update if rounds is outside desired bounds. + """ + # XXX: for now, marking all hashes which don't have matching block_size setting + if self.block_size != type(self).block_size: + return True + return super(scrypt, self)._calc_needs_update(**kwds) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/sha1_crypt.py b/venv/lib/python3.12/site-packages/passlib/handlers/sha1_crypt.py new file mode 100644 index 0000000..8f9aa71 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/sha1_crypt.py @@ -0,0 +1,158 @@ +"""passlib.handlers.sha1_crypt +""" + +#============================================================================= +# imports +#============================================================================= + +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import safe_crypt, test_crypt +from passlib.utils.binary import h64 +from passlib.utils.compat import u, unicode, irange +from passlib.crypto.digest import compile_hmac +import passlib.utils.handlers as uh +# local +__all__ = [ +] +#============================================================================= +# sha1-crypt +#============================================================================= +_BNULL = b'\x00' + +class sha1_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the SHA1-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, an 8 character one will be autogenerated (this is recommended). + If specified, it must be 0-64 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 8 bytes, but can be any value between 0 and 64. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 480000, must be between 1 and 4294967295, inclusive. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "sha1_crypt" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$sha1$") + checksum_size = 28 + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + default_salt_size = 8 + max_salt_size = 64 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 480000 # current passlib default + min_rounds = 1 # really, this should be higher. + max_rounds = 4294967295 # 32-bit integer limit + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self, config=False): + chk = None if config else self.checksum + return uh.render_mc3(self.ident, self.rounds, self.salt, chk) + + #=================================================================== + # backend + #=================================================================== + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt("test", '$sha1$1$Wq3GL2Vp$C8U25GvfHS8qGHim' + 'ExLaiSFlGkAe'): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + config = self.to_string(config=True) + hash = safe_crypt(secret, config) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + if not hash.startswith(config) or len(hash) != len(config) + 29: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-28:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + if _BNULL in secret: + raise uh.exc.NullPasswordError(self) + rounds = self.rounds + # NOTE: this seed value is NOT the same as the config string + result = (u("%s$sha1$%s") % (self.salt, rounds)).encode("ascii") + # NOTE: this algorithm is essentially PBKDF1, modified to use HMAC. + keyed_hmac = compile_hmac("sha1", secret) + for _ in irange(rounds): + result = keyed_hmac(result) + return h64.encode_transposed_bytes(result, self._chk_offsets).decode("ascii") + + _chk_offsets = [ + 2,1,0, + 5,4,3, + 8,7,6, + 11,10,9, + 14,13,12, + 17,16,15, + 0,19,18, + ] + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/sha2_crypt.py b/venv/lib/python3.12/site-packages/passlib/handlers/sha2_crypt.py new file mode 100644 index 0000000..e6060c5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/sha2_crypt.py @@ -0,0 +1,534 @@ +"""passlib.handlers.sha2_crypt - SHA256-Crypt / SHA512-Crypt""" +#============================================================================= +# imports +#============================================================================= +# core +import hashlib +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import safe_crypt, test_crypt, \ + repeat_string, to_unicode +from passlib.utils.binary import h64 +from passlib.utils.compat import byte_elem_value, u, \ + uascii_to_str, unicode +import passlib.utils.handlers as uh +# local +__all__ = [ + "sha512_crypt", + "sha256_crypt", +] + +#============================================================================= +# pure-python backend, used by both sha256_crypt & sha512_crypt +# when crypt.crypt() backend is not available. +#============================================================================= +_BNULL = b'\x00' + +# pre-calculated offsets used to speed up C digest stage (see notes below). +# sequence generated using the following: + ##perms_order = "p,pp,ps,psp,sp,spp".split(",") + ##def offset(i): + ## key = (("p" if i % 2 else "") + ("s" if i % 3 else "") + + ## ("p" if i % 7 else "") + ("" if i % 2 else "p")) + ## return perms_order.index(key) + ##_c_digest_offsets = [(offset(i), offset(i+1)) for i in range(0,42,2)] +_c_digest_offsets = ( + (0, 3), (5, 1), (5, 3), (1, 2), (5, 1), (5, 3), (1, 3), + (4, 1), (5, 3), (1, 3), (5, 0), (5, 3), (1, 3), (5, 1), + (4, 3), (1, 3), (5, 1), (5, 2), (1, 3), (5, 1), (5, 3), + ) + +# map used to transpose bytes when encoding final sha256_crypt digest +_256_transpose_map = ( + 20, 10, 0, 11, 1, 21, 2, 22, 12, 23, 13, 3, 14, 4, 24, 5, + 25, 15, 26, 16, 6, 17, 7, 27, 8, 28, 18, 29, 19, 9, 30, 31, +) + +# map used to transpose bytes when encoding final sha512_crypt digest +_512_transpose_map = ( + 42, 21, 0, 1, 43, 22, 23, 2, 44, 45, 24, 3, 4, 46, 25, 26, + 5, 47, 48, 27, 6, 7, 49, 28, 29, 8, 50, 51, 30, 9, 10, 52, + 31, 32, 11, 53, 54, 33, 12, 13, 55, 34, 35, 14, 56, 57, 36, 15, + 16, 58, 37, 38, 17, 59, 60, 39, 18, 19, 61, 40, 41, 20, 62, 63, +) + +def _raw_sha2_crypt(pwd, salt, rounds, use_512=False): + """perform raw sha256-crypt / sha512-crypt + + this function provides a pure-python implementation of the internals + for the SHA256-Crypt and SHA512-Crypt algorithms; it doesn't + handle any of the parsing/validation of the hash strings themselves. + + :arg pwd: password chars/bytes to hash + :arg salt: salt chars to use + :arg rounds: linear rounds cost + :arg use_512: use sha512-crypt instead of sha256-crypt mode + + :returns: + encoded checksum chars + """ + #=================================================================== + # init & validate inputs + #=================================================================== + + # NOTE: the setup portion of this algorithm scales ~linearly in time + # with the size of the password, making it vulnerable to a DOS from + # unreasonably large inputs. the following code has some optimizations + # which would make things even worse, using O(pwd_len**2) memory + # when calculating digest P. + # + # to mitigate these two issues: 1) this code switches to a + # O(pwd_len)-memory algorithm for passwords that are much larger + # than average, and 2) Passlib enforces a library-wide max limit on + # the size of passwords it will allow, to prevent this algorithm and + # others from being DOSed in this way (see passlib.exc.PasswordSizeError + # for details). + + # validate secret + if isinstance(pwd, unicode): + # XXX: not sure what official unicode policy is, using this as default + pwd = pwd.encode("utf-8") + assert isinstance(pwd, bytes) + if _BNULL in pwd: + raise uh.exc.NullPasswordError(sha512_crypt if use_512 else sha256_crypt) + pwd_len = len(pwd) + + # validate rounds + assert 1000 <= rounds <= 999999999, "invalid rounds" + # NOTE: spec says out-of-range rounds should be clipped, instead of + # causing an error. this function assumes that's been taken care of + # by the handler class. + + # validate salt + assert isinstance(salt, unicode), "salt not unicode" + salt = salt.encode("ascii") + salt_len = len(salt) + assert salt_len < 17, "salt too large" + # NOTE: spec says salts larger than 16 bytes should be truncated, + # instead of causing an error. this function assumes that's been + # taken care of by the handler class. + + # load sha256/512 specific constants + if use_512: + hash_const = hashlib.sha512 + transpose_map = _512_transpose_map + else: + hash_const = hashlib.sha256 + transpose_map = _256_transpose_map + + #=================================================================== + # digest B - used as subinput to digest A + #=================================================================== + db = hash_const(pwd + salt + pwd).digest() + + #=================================================================== + # digest A - used to initialize first round of digest C + #=================================================================== + # start out with pwd + salt + a_ctx = hash_const(pwd + salt) + a_ctx_update = a_ctx.update + + # add pwd_len bytes of b, repeating b as many times as needed. + a_ctx_update(repeat_string(db, pwd_len)) + + # for each bit in pwd_len: add b if it's 1, or pwd if it's 0 + i = pwd_len + while i: + a_ctx_update(db if i & 1 else pwd) + i >>= 1 + + # finish A + da = a_ctx.digest() + + #=================================================================== + # digest P from password - used instead of password itself + # when calculating digest C. + #=================================================================== + if pwd_len < 96: + # this method is faster under python, but uses O(pwd_len**2) memory; + # so we don't use it for larger passwords to avoid a potential DOS. + dp = repeat_string(hash_const(pwd * pwd_len).digest(), pwd_len) + else: + # this method is slower under python, but uses a fixed amount of memory. + tmp_ctx = hash_const(pwd) + tmp_ctx_update = tmp_ctx.update + i = pwd_len-1 + while i: + tmp_ctx_update(pwd) + i -= 1 + dp = repeat_string(tmp_ctx.digest(), pwd_len) + assert len(dp) == pwd_len + + #=================================================================== + # digest S - used instead of salt itself when calculating digest C + #=================================================================== + ds = hash_const(salt * (16 + byte_elem_value(da[0]))).digest()[:salt_len] + assert len(ds) == salt_len, "salt_len somehow > hash_len!" + + #=================================================================== + # digest C - for a variable number of rounds, combine A, S, and P + # digests in various ways; in order to burn CPU time. + #=================================================================== + + # NOTE: the original SHA256/512-Crypt specification performs the C digest + # calculation using the following loop: + # + ##dc = da + ##i = 0 + ##while i < rounds: + ## tmp_ctx = hash_const(dp if i & 1 else dc) + ## if i % 3: + ## tmp_ctx.update(ds) + ## if i % 7: + ## tmp_ctx.update(dp) + ## tmp_ctx.update(dc if i & 1 else dp) + ## dc = tmp_ctx.digest() + ## i += 1 + # + # The code Passlib uses (below) implements an equivalent algorithm, + # it's just been heavily optimized to pre-calculate a large number + # of things beforehand. It works off of a couple of observations + # about the original algorithm: + # + # 1. each round is a combination of 'dc', 'ds', and 'dp'; determined + # by the whether 'i' a multiple of 2,3, and/or 7. + # 2. since lcm(2,3,7)==42, the series of combinations will repeat + # every 42 rounds. + # 3. even rounds 0-40 consist of 'hash(dc + round-specific-constant)'; + # while odd rounds 1-41 consist of hash(round-specific-constant + dc) + # + # Using these observations, the following code... + # * calculates the round-specific combination of ds & dp for each round 0-41 + # * runs through as many 42-round blocks as possible + # * runs through as many pairs of rounds as possible for remaining rounds + # * performs once last round if the total rounds should be odd. + # + # this cuts out a lot of the control overhead incurred when running the + # original loop 40,000+ times in python, resulting in ~20% increase in + # speed under CPython (though still 2x slower than glibc crypt) + + # prepare the 6 combinations of ds & dp which are needed + # (order of 'perms' must match how _c_digest_offsets was generated) + dp_dp = dp+dp + dp_ds = dp+ds + perms = [dp, dp_dp, dp_ds, dp_ds+dp, ds+dp, ds+dp_dp] + + # build up list of even-round & odd-round constants, + # and store in 21-element list as (even,odd) pairs. + data = [ (perms[even], perms[odd]) for even, odd in _c_digest_offsets] + + # perform as many full 42-round blocks as possible + dc = da + blocks, tail = divmod(rounds, 42) + while blocks: + for even, odd in data: + dc = hash_const(odd + hash_const(dc + even).digest()).digest() + blocks -= 1 + + # perform any leftover rounds + if tail: + # perform any pairs of rounds + pairs = tail>>1 + for even, odd in data[:pairs]: + dc = hash_const(odd + hash_const(dc + even).digest()).digest() + + # if rounds was odd, do one last round (since we started at 0, + # last round will be an even-numbered round) + if tail & 1: + dc = hash_const(dc + data[pairs][0]).digest() + + #=================================================================== + # encode digest using appropriate transpose map + #=================================================================== + return h64.encode_transposed_bytes(dc, transpose_map).decode("ascii") + +#============================================================================= +# handlers +#============================================================================= +_UROUNDS = u("rounds=") +_UDOLLAR = u("$") +_UZERO = u("0") + +class _SHA2_Common(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, + uh.GenericHandler): + """class containing common code shared by sha256_crypt & sha512_crypt""" + #=================================================================== + # class attrs + #=================================================================== + # name - set by subclass + setting_kwds = ("salt", "rounds", "implicit_rounds", "salt_size") + # ident - set by subclass + checksum_chars = uh.HASH64_CHARS + # checksum_size - set by subclass + + max_salt_size = 16 + salt_chars = uh.HASH64_CHARS + + min_rounds = 1000 # bounds set by spec + max_rounds = 999999999 # bounds set by spec + rounds_cost = "linear" + + _cdb_use_512 = False # flag for _calc_digest_builtin() + _rounds_prefix = None # ident + _UROUNDS + + #=================================================================== + # methods + #=================================================================== + implicit_rounds = False + + def __init__(self, implicit_rounds=None, **kwds): + super(_SHA2_Common, self).__init__(**kwds) + # if user calls hash() w/ 5000 rounds, default to compact form. + if implicit_rounds is None: + implicit_rounds = (self.use_defaults and self.rounds == 5000) + self.implicit_rounds = implicit_rounds + + def _parse_salt(self, salt): + # required per SHA2-crypt spec -- truncate config salts rather than throwing error + return self._norm_salt(salt, relaxed=self.checksum is None) + + def _parse_rounds(self, rounds): + # required per SHA2-crypt spec -- clip config rounds rather than throwing error + return self._norm_rounds(rounds, relaxed=self.checksum is None) + + @classmethod + def from_string(cls, hash): + # basic format this parses - + # $5$[rounds=$][$] + + # TODO: this *could* use uh.parse_mc3(), except that the rounds + # portion has a slightly different grammar. + + # convert to unicode, check for ident prefix, split on dollar signs. + hash = to_unicode(hash, "ascii", "hash") + ident = cls.ident + if not hash.startswith(ident): + raise uh.exc.InvalidHashError(cls) + assert len(ident) == 3 + parts = hash[3:].split(_UDOLLAR) + + # extract rounds value + if parts[0].startswith(_UROUNDS): + assert len(_UROUNDS) == 7 + rounds = parts.pop(0)[7:] + if rounds.startswith(_UZERO) and rounds != _UZERO: + raise uh.exc.ZeroPaddedRoundsError(cls) + rounds = int(rounds) + implicit_rounds = False + else: + rounds = 5000 + implicit_rounds = True + + # rest should be salt and checksum + if len(parts) == 2: + salt, chk = parts + elif len(parts) == 1: + salt = parts[0] + chk = None + else: + raise uh.exc.MalformedHashError(cls) + + # return new object + return cls( + rounds=rounds, + salt=salt, + checksum=chk or None, + implicit_rounds=implicit_rounds, + ) + + def to_string(self): + if self.rounds == 5000 and self.implicit_rounds: + hash = u("%s%s$%s") % (self.ident, self.salt, + self.checksum or u('')) + else: + hash = u("%srounds=%d$%s$%s") % (self.ident, self.rounds, + self.salt, self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # backends + #=================================================================== + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + + #: test hash for OS detection -- provided by subclass + _test_hash = None + + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt(*cls._test_hash): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + config = self.to_string() + hash = safe_crypt(secret, config) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + # NOTE: avoiding full parsing routine via from_string().checksum, + # and just extracting the bit we need. + cs = self.checksum_size + if not hash.startswith(self.ident) or hash[-cs-1] != _UDOLLAR: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-cs:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + return _raw_sha2_crypt(secret, self.salt, self.rounds, + self._cdb_use_512) + + #=================================================================== + # eoc + #=================================================================== + +class sha256_crypt(_SHA2_Common): + """This class implements the SHA256-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-16 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 535000, must be between 1000 and 999999999, inclusive. + + .. note:: + per the official specification, when the rounds parameter is set to 5000, + it may be omitted from the hash string. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. + commented out, currently only supported by :meth:`hash`, and not via :meth:`using`: + + :type implicit_rounds: bool + :param implicit_rounds: + this is an internal option which generally doesn't need to be touched. + + this flag determines whether the hash should omit the rounds parameter + when encoding it to a string; this is only permitted by the spec for rounds=5000, + and the flag is ignored otherwise. the spec requires the two different + encodings be preserved as they are, instead of normalizing them. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "sha256_crypt" + ident = u("$5$") + checksum_size = 43 + # NOTE: using 25/75 weighting of builtin & os_crypt backends + default_rounds = 535000 + + #=================================================================== + # backends + #=================================================================== + _test_hash = ("test", "$5$rounds=1000$test$QmQADEXMG8POI5W" + "Dsaeho0P36yK3Tcrgboabng6bkb/") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# sha 512 crypt +#============================================================================= +class sha512_crypt(_SHA2_Common): + """This class implements the SHA512-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-16 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 656000, must be between 1000 and 999999999, inclusive. + + .. note:: + per the official specification, when the rounds parameter is set to 5000, + it may be omitted from the hash string. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. + commented out, currently only supported by :meth:`hash`, and not via :meth:`using`: + + :type implicit_rounds: bool + :param implicit_rounds: + this is an internal option which generally doesn't need to be touched. + + this flag determines whether the hash should omit the rounds parameter + when encoding it to a string; this is only permitted by the spec for rounds=5000, + and the flag is ignored otherwise. the spec requires the two different + encodings be preserved as they are, instead of normalizing them. + """ + + #=================================================================== + # class attrs + #=================================================================== + name = "sha512_crypt" + ident = u("$6$") + checksum_size = 86 + _cdb_use_512 = True + # NOTE: using 25/75 weighting of builtin & os_crypt backends + default_rounds = 656000 + + #=================================================================== + # backend + #=================================================================== + _test_hash = ("test", "$6$rounds=1000$test$2M/Lx6Mtobqj" + "Ljobw0Wmo4Q5OFx5nVLJvmgseatA6oMn" + "yWeBdRDx4DU.1H3eGmse6pgsOgDisWBG" + "I5c7TZauS0") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/sun_md5_crypt.py b/venv/lib/python3.12/site-packages/passlib/handlers/sun_md5_crypt.py new file mode 100644 index 0000000..0eeb4e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/sun_md5_crypt.py @@ -0,0 +1,363 @@ +"""passlib.handlers.sun_md5_crypt - Sun's Md5 Crypt, used on Solaris + +.. warning:: + + This implementation may not reproduce + the original Solaris behavior in some border cases. + See documentation for details. +""" + +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_unicode +from passlib.utils.binary import h64 +from passlib.utils.compat import byte_elem_value, irange, u, \ + uascii_to_str, unicode, str_to_bascii +import passlib.utils.handlers as uh +# local +__all__ = [ + "sun_md5_crypt", +] + +#============================================================================= +# backend +#============================================================================= +# constant data used by alg - Hamlet act 3 scene 1 + null char +# exact bytes as in http://www.ibiblio.org/pub/docs/books/gutenberg/etext98/2ws2610.txt +# from Project Gutenberg. + +MAGIC_HAMLET = ( + b"To be, or not to be,--that is the question:--\n" + b"Whether 'tis nobler in the mind to suffer\n" + b"The slings and arrows of outrageous fortune\n" + b"Or to take arms against a sea of troubles,\n" + b"And by opposing end them?--To die,--to sleep,--\n" + b"No more; and by a sleep to say we end\n" + b"The heartache, and the thousand natural shocks\n" + b"That flesh is heir to,--'tis a consummation\n" + b"Devoutly to be wish'd. To die,--to sleep;--\n" + b"To sleep! perchance to dream:--ay, there's the rub;\n" + b"For in that sleep of death what dreams may come,\n" + b"When we have shuffled off this mortal coil,\n" + b"Must give us pause: there's the respect\n" + b"That makes calamity of so long life;\n" + b"For who would bear the whips and scorns of time,\n" + b"The oppressor's wrong, the proud man's contumely,\n" + b"The pangs of despis'd love, the law's delay,\n" + b"The insolence of office, and the spurns\n" + b"That patient merit of the unworthy takes,\n" + b"When he himself might his quietus make\n" + b"With a bare bodkin? who would these fardels bear,\n" + b"To grunt and sweat under a weary life,\n" + b"But that the dread of something after death,--\n" + b"The undiscover'd country, from whose bourn\n" + b"No traveller returns,--puzzles the will,\n" + b"And makes us rather bear those ills we have\n" + b"Than fly to others that we know not of?\n" + b"Thus conscience does make cowards of us all;\n" + b"And thus the native hue of resolution\n" + b"Is sicklied o'er with the pale cast of thought;\n" + b"And enterprises of great pith and moment,\n" + b"With this regard, their currents turn awry,\n" + b"And lose the name of action.--Soft you now!\n" + b"The fair Ophelia!--Nymph, in thy orisons\n" + b"Be all my sins remember'd.\n\x00" #<- apparently null at end of C string is included (test vector won't pass otherwise) +) + +# NOTE: these sequences are pre-calculated iteration ranges used by X & Y loops w/in rounds function below +xr = irange(7) +_XY_ROUNDS = [ + tuple((i,i,i+3) for i in xr), # xrounds 0 + tuple((i,i+1,i+4) for i in xr), # xrounds 1 + tuple((i,i+8,(i+11)&15) for i in xr), # yrounds 0 + tuple((i,(i+9)&15, (i+12)&15) for i in xr), # yrounds 1 +] +del xr + +def raw_sun_md5_crypt(secret, rounds, salt): + """given secret & salt, return encoded sun-md5-crypt checksum""" + global MAGIC_HAMLET + assert isinstance(secret, bytes) + assert isinstance(salt, bytes) + + # validate rounds + if rounds <= 0: + rounds = 0 + real_rounds = 4096 + rounds + # NOTE: spec seems to imply max 'rounds' is 2**32-1 + + # generate initial digest to start off round 0. + # NOTE: algorithm 'salt' includes full config string w/ trailing "$" + result = md5(secret + salt).digest() + assert len(result) == 16 + + # NOTE: many things in this function have been inlined (to speed up the loop + # as much as possible), to the point that this code barely resembles + # the algorithm as described in the docs. in particular: + # + # * all accesses to a given bit have been inlined using the formula + # rbitval(bit) = (rval((bit>>3) & 15) >> (bit & 7)) & 1 + # + # * the calculation of coinflip value R has been inlined + # + # * the conditional division of coinflip value V has been inlined as + # a shift right of 0 or 1. + # + # * the i, i+3, etc iterations are precalculated in lists. + # + # * the round-based conditional division of x & y is now performed + # by choosing an appropriate precalculated list, so that it only + # calculates the 7 bits which will actually be used. + # + X_ROUNDS_0, X_ROUNDS_1, Y_ROUNDS_0, Y_ROUNDS_1 = _XY_ROUNDS + + # NOTE: % appears to be *slightly* slower than &, so we prefer & if possible + + round = 0 + while round < real_rounds: + # convert last result byte string to list of byte-ints for easy access + rval = [ byte_elem_value(c) for c in result ].__getitem__ + + # build up X bit by bit + x = 0 + xrounds = X_ROUNDS_1 if (rval((round>>3) & 15)>>(round & 7)) & 1 else X_ROUNDS_0 + for i, ia, ib in xrounds: + a = rval(ia) + b = rval(ib) + v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1) + x |= ((rval((v>>3)&15)>>(v&7))&1) << i + + # build up Y bit by bit + y = 0 + yrounds = Y_ROUNDS_1 if (rval(((round+64)>>3) & 15)>>(round & 7)) & 1 else Y_ROUNDS_0 + for i, ia, ib in yrounds: + a = rval(ia) + b = rval(ib) + v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1) + y |= ((rval((v>>3)&15)>>(v&7))&1) << i + + # extract x'th and y'th bit, xoring them together to yeild "coin flip" + coin = ((rval(x>>3) >> (x&7)) ^ (rval(y>>3) >> (y&7))) & 1 + + # construct hash for this round + h = md5(result) + if coin: + h.update(MAGIC_HAMLET) + h.update(unicode(round).encode("ascii")) + result = h.digest() + + round += 1 + + # encode output + return h64.encode_transposed_bytes(result, _chk_offsets) + +# NOTE: same offsets as md5_crypt +_chk_offsets = ( + 12,6,0, + 13,7,1, + 14,8,2, + 15,9,3, + 5,10,4, + 11, +) + +#============================================================================= +# handler +#============================================================================= +class sun_md5_crypt(uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the Sun-MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a salt will be autogenerated (this is recommended). + If specified, it must be drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + If no salt is specified, this parameter can be used to specify + the size (in characters) of the autogenerated salt. + It currently defaults to 8. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 34000, must be between 0 and 4294963199, inclusive. + + :type bare_salt: bool + :param bare_salt: + Optional flag used to enable an alternate salt digest behavior + used by some hash strings in this scheme. + This flag can be ignored by most users. + Defaults to ``False``. + (see :ref:`smc-bare-salt` for details). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "sun_md5_crypt" + setting_kwds = ("salt", "rounds", "bare_salt", "salt_size") + checksum_chars = uh.HASH64_CHARS + checksum_size = 22 + + # NOTE: docs say max password length is 255. + # release 9u2 + + # NOTE: not sure if original crypt has a salt size limit, + # all instances that have been seen use 8 chars. + default_salt_size = 8 + max_salt_size = None + salt_chars = uh.HASH64_CHARS + + default_rounds = 34000 # current passlib default + min_rounds = 0 + max_rounds = 4294963199 ##2**32-1-4096 + # XXX: ^ not sure what it does if past this bound... does 32 int roll over? + rounds_cost = "linear" + + ident_values = (u("$md5$"), u("$md5,")) + + #=================================================================== + # instance attrs + #=================================================================== + bare_salt = False # flag to indicate legacy hashes that lack "$$" suffix + + #=================================================================== + # constructor + #=================================================================== + def __init__(self, bare_salt=False, **kwds): + self.bare_salt = bare_salt + super(sun_md5_crypt, self).__init__(**kwds) + + #=================================================================== + # internal helpers + #=================================================================== + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + return hash.startswith(cls.ident_values) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + + # + # detect if hash specifies rounds value. + # if so, parse and validate it. + # by end, set 'rounds' to int value, and 'tail' containing salt+chk + # + if hash.startswith(u("$md5$")): + rounds = 0 + salt_idx = 5 + elif hash.startswith(u("$md5,rounds=")): + idx = hash.find(u("$"), 12) + if idx == -1: + raise uh.exc.MalformedHashError(cls, "unexpected end of rounds") + rstr = hash[12:idx] + try: + rounds = int(rstr) + except ValueError: + raise uh.exc.MalformedHashError(cls, "bad rounds") + if rstr != unicode(rounds): + raise uh.exc.ZeroPaddedRoundsError(cls) + if rounds == 0: + # NOTE: not sure if this is forbidden by spec or not; + # but allowing it would complicate things, + # and it should never occur anyways. + raise uh.exc.MalformedHashError(cls, "explicit zero rounds") + salt_idx = idx+1 + else: + raise uh.exc.InvalidHashError(cls) + + # + # salt/checksum separation is kinda weird, + # to deal cleanly with some backward-compatible workarounds + # implemented by original implementation. + # + chk_idx = hash.rfind(u("$"), salt_idx) + if chk_idx == -1: + # ''-config for $-hash + salt = hash[salt_idx:] + chk = None + bare_salt = True + elif chk_idx == len(hash)-1: + if chk_idx > salt_idx and hash[-2] == u("$"): + raise uh.exc.MalformedHashError(cls, "too many '$' separators") + # $-config for $$-hash + salt = hash[salt_idx:-1] + chk = None + bare_salt = False + elif chk_idx > 0 and hash[chk_idx-1] == u("$"): + # $$-hash + salt = hash[salt_idx:chk_idx-1] + chk = hash[chk_idx+1:] + bare_salt = False + else: + # $-hash + salt = hash[salt_idx:chk_idx] + chk = hash[chk_idx+1:] + bare_salt = True + + return cls( + rounds=rounds, + salt=salt, + checksum=chk, + bare_salt=bare_salt, + ) + + def to_string(self, _withchk=True): + ss = u('') if self.bare_salt else u('$') + rounds = self.rounds + if rounds > 0: + hash = u("$md5,rounds=%d$%s%s") % (rounds, self.salt, ss) + else: + hash = u("$md5$%s%s") % (self.salt, ss) + if _withchk: + chk = self.checksum + hash = u("%s$%s") % (hash, chk) + return uascii_to_str(hash) + + #=================================================================== + # primary interface + #=================================================================== + # TODO: if we're on solaris, check for native crypt() support. + # this will require extra testing, to make sure native crypt + # actually behaves correctly. of particular importance: + # when using ""-config, make sure to append "$x" to string. + + def _calc_checksum(self, secret): + # NOTE: no reference for how sun_md5_crypt handles unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + config = str_to_bascii(self.to_string(_withchk=False)) + return raw_sun_md5_crypt(secret, self.rounds, config).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/handlers/windows.py b/venv/lib/python3.12/site-packages/passlib/handlers/windows.py new file mode 100644 index 0000000..e17beba --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/handlers/windows.py @@ -0,0 +1,334 @@ +"""passlib.handlers.nthash - Microsoft Windows -related hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_unicode, right_pad_string +from passlib.utils.compat import unicode +from passlib.crypto.digest import lookup_hash +md4 = lookup_hash("md4").const +import passlib.utils.handlers as uh +# local +__all__ = [ + "lmhash", + "nthash", + "bsd_nthash", + "msdcc", + "msdcc2", +] + +#============================================================================= +# lanman hash +#============================================================================= +class lmhash(uh.TruncateMixin, uh.HasEncodingContext, uh.StaticHandler): + """This class implements the Lan Manager Password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts a single + optional keyword: + + :param bool truncate_error: + By default, this will silently truncate passwords larger than 14 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.verify` methods accept a single + optional keyword: + + :type encoding: str + :param encoding: + + This specifies what character encoding LMHASH should use when + calculating digest. It defaults to ``cp437``, the most + common encoding encountered. + + Note that while this class outputs digests in lower-case hexadecimal, + it will accept upper-case as well. + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "lmhash" + setting_kwds = ("truncate_error",) + + #-------------------- + # GenericHandler + #-------------------- + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + #-------------------- + # TruncateMixin + #-------------------- + truncate_size = 14 + + #-------------------- + # custom + #-------------------- + default_encoding = "cp437" + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + # check for truncation (during .hash() calls only) + if self.use_defaults: + self._check_truncate_policy(secret) + + return hexlify(self.raw(secret, self.encoding)).decode("ascii") + + # magic constant used by LMHASH + _magic = b"KGS!@#$%" + + @classmethod + def raw(cls, secret, encoding=None): + """encode password using LANMAN hash algorithm. + + :type secret: unicode or utf-8 encoded bytes + :arg secret: secret to hash + :type encoding: str + :arg encoding: + optional encoding to use for unicode inputs. + this defaults to ``cp437``, which is the + common case for most situations. + + :returns: returns string of raw bytes + """ + if not encoding: + encoding = cls.default_encoding + # some nice empircal data re: different encodings is at... + # http://www.openwall.com/lists/john-dev/2011/08/01/2 + # http://www.freerainbowtables.com/phpBB3/viewtopic.php?t=387&p=12163 + from passlib.crypto.des import des_encrypt_block + MAGIC = cls._magic + if isinstance(secret, unicode): + # perform uppercasing while we're still unicode, + # to give a better shot at getting non-ascii chars right. + # (though some codepages do NOT upper-case the same as unicode). + secret = secret.upper().encode(encoding) + elif isinstance(secret, bytes): + # FIXME: just trusting ascii upper will work? + # and if not, how to do codepage specific case conversion? + # we could decode first using , + # but *that* might not always be right. + secret = secret.upper() + else: + raise TypeError("secret must be unicode or bytes") + secret = right_pad_string(secret, 14) + return des_encrypt_block(secret[0:7], MAGIC) + \ + des_encrypt_block(secret[7:14], MAGIC) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# ntlm hash +#============================================================================= +class nthash(uh.StaticHandler): + """This class implements the NT Password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + + Note that while this class outputs lower-case hexadecimal digests, + it will accept upper-case digests as well. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "nthash" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret)).decode("ascii") + + @classmethod + def raw(cls, secret): + """encode password using MD4-based NTHASH algorithm + + :arg secret: secret as unicode or utf-8 encoded bytes + + :returns: returns string of raw bytes + """ + secret = to_unicode(secret, "utf-8", param="secret") + # XXX: found refs that say only first 128 chars are used. + return md4(secret.encode("utf-16-le")).digest() + + @classmethod + def raw_nthash(cls, secret, hex=False): + warn("nthash.raw_nthash() is deprecated, and will be removed " + "in Passlib 1.8, please use nthash.raw() instead", + DeprecationWarning) + ret = nthash.raw(secret) + return hexlify(ret).decode("ascii") if hex else ret + + #=================================================================== + # eoc + #=================================================================== + +bsd_nthash = uh.PrefixWrapper("bsd_nthash", nthash, prefix="$3$$", ident="$3$$", + doc="""The class support FreeBSD's representation of NTHASH + (which is compatible with the :ref:`modular-crypt-format`), + and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """) + +##class ntlm_pair(object): +## "combined lmhash & nthash" +## name = "ntlm_pair" +## setting_kwds = () +## _hash_regex = re.compile(u"^(?P[0-9a-f]{32}):(?P[0-9][a-f]{32})$", +## re.I) +## +## @classmethod +## def identify(cls, hash): +## hash = to_unicode(hash, "latin-1", "hash") +## return len(hash) == 65 and cls._hash_regex.match(hash) is not None +## +## @classmethod +## def hash(cls, secret, config=None): +## if config is not None and not cls.identify(config): +## raise uh.exc.InvalidHashError(cls) +## return lmhash.hash(secret) + ":" + nthash.hash(secret) +## +## @classmethod +## def verify(cls, secret, hash): +## hash = to_unicode(hash, "ascii", "hash") +## m = cls._hash_regex.match(hash) +## if not m: +## raise uh.exc.InvalidHashError(cls) +## lm, nt = m.group("lm", "nt") +## # NOTE: verify against both in case encoding issue +## # causes one not to match. +## return lmhash.verify(secret, lm) or nthash.verify(secret, nt) + +#============================================================================= +# msdcc v1 +#============================================================================= +class msdcc(uh.HasUserContext, uh.StaticHandler): + """This class implements Microsoft's Domain Cached Credentials password hash, + and follows the :ref:`password-hash-api`. + + It has a fixed number of rounds, and uses the associated + username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods + have the following optional keywords: + + :type user: str + :param user: + String containing name of user account this password is associated with. + This is required to properly calculate the hash. + + This keyword is case-insensitive, and should contain just the username + (e.g. ``Administrator``, not ``SOMEDOMAIN\\Administrator``). + + Note that while this class outputs lower-case hexadecimal digests, + it will accept upper-case digests as well. + """ + name = "msdcc" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret, self.user)).decode("ascii") + + @classmethod + def raw(cls, secret, user): + """encode password using mscash v1 algorithm + + :arg secret: secret as unicode or utf-8 encoded bytes + :arg user: username to use as salt + + :returns: returns string of raw bytes + """ + secret = to_unicode(secret, "utf-8", param="secret").encode("utf-16-le") + user = to_unicode(user, "utf-8", param="user").lower().encode("utf-16-le") + return md4(md4(secret).digest() + user).digest() + +#============================================================================= +# msdcc2 aka mscash2 +#============================================================================= +class msdcc2(uh.HasUserContext, uh.StaticHandler): + """This class implements version 2 of Microsoft's Domain Cached Credentials + password hash, and follows the :ref:`password-hash-api`. + + It has a fixed number of rounds, and uses the associated + username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods + have the following extra keyword: + + :type user: str + :param user: + String containing name of user account this password is associated with. + This is required to properly calculate the hash. + + This keyword is case-insensitive, and should contain just the username + (e.g. ``Administrator``, not ``SOMEDOMAIN\\Administrator``). + """ + name = "msdcc2" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret, self.user)).decode("ascii") + + @classmethod + def raw(cls, secret, user): + """encode password using msdcc v2 algorithm + + :type secret: unicode or utf-8 bytes + :arg secret: secret + + :type user: str + :arg user: username to use as salt + + :returns: returns string of raw bytes + """ + from passlib.crypto.digest import pbkdf2_hmac + secret = to_unicode(secret, "utf-8", param="secret").encode("utf-16-le") + user = to_unicode(user, "utf-8", param="user").lower().encode("utf-16-le") + tmp = md4(md4(secret).digest() + user).digest() + return pbkdf2_hmac("sha1", tmp, user, 10240, 16) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/hash.py b/venv/lib/python3.12/site-packages/passlib/hash.py new file mode 100644 index 0000000..2cc0628 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/hash.py @@ -0,0 +1,68 @@ +""" +passlib.hash - proxy object mapping hash scheme names -> handlers + +================== +***** NOTICE ***** +================== + +This module does not actually contain any hashes. This file +is a stub that replaces itself with a proxy object. + +This proxy object (passlib.registry._PasslibRegistryProxy) +handles lazy-loading hashes as they are requested. + +The actual implementation of the various hashes is store elsewhere, +mainly in the submodules of the ``passlib.handlers`` subpackage. +""" + +#============================================================================= +# import proxy object and replace this module +#============================================================================= + +# XXX: if any platform has problem w/ lazy modules, could support 'non-lazy' +# version which just imports all schemes known to list_crypt_handlers() + +from passlib.registry import _proxy +import sys +sys.modules[__name__] = _proxy + +#============================================================================= +# HACK: the following bit of code is unreachable, but it's presence seems to +# help make autocomplete work for certain IDEs such as PyCharm. +# this list is automatically regenerated using $SOURCE/admin/regen.py +#============================================================================= + +#---------------------------------------------------- +# begin autocomplete hack (autogenerated 2016-11-10) +#---------------------------------------------------- +if False: + from passlib.handlers.argon2 import argon2 + from passlib.handlers.bcrypt import bcrypt, bcrypt_sha256 + from passlib.handlers.cisco import cisco_asa, cisco_pix, cisco_type7 + from passlib.handlers.des_crypt import bigcrypt, bsdi_crypt, crypt16, des_crypt + from passlib.handlers.digests import hex_md4, hex_md5, hex_sha1, hex_sha256, hex_sha512, htdigest + from passlib.handlers.django import django_bcrypt, django_bcrypt_sha256, django_des_crypt, django_disabled, django_pbkdf2_sha1, django_pbkdf2_sha256, django_salted_md5, django_salted_sha1 + from passlib.handlers.fshp import fshp + from passlib.handlers.ldap_digests import ldap_bcrypt, ldap_bsdi_crypt, ldap_des_crypt, ldap_md5, ldap_md5_crypt, ldap_plaintext, ldap_salted_md5, ldap_salted_sha1, ldap_salted_sha256, ldap_salted_sha512, ldap_sha1, ldap_sha1_crypt, ldap_sha256_crypt, ldap_sha512_crypt + from passlib.handlers.md5_crypt import apr_md5_crypt, md5_crypt + from passlib.handlers.misc import plaintext, unix_disabled, unix_fallback + from passlib.handlers.mssql import mssql2000, mssql2005 + from passlib.handlers.mysql import mysql323, mysql41 + from passlib.handlers.oracle import oracle10, oracle11 + from passlib.handlers.pbkdf2 import atlassian_pbkdf2_sha1, cta_pbkdf2_sha1, dlitz_pbkdf2_sha1, grub_pbkdf2_sha512, ldap_pbkdf2_sha1, ldap_pbkdf2_sha256, ldap_pbkdf2_sha512, pbkdf2_sha1, pbkdf2_sha256, pbkdf2_sha512 + from passlib.handlers.phpass import phpass + from passlib.handlers.postgres import postgres_md5 + from passlib.handlers.roundup import ldap_hex_md5, ldap_hex_sha1, roundup_plaintext + from passlib.handlers.scram import scram + from passlib.handlers.scrypt import scrypt + from passlib.handlers.sha1_crypt import sha1_crypt + from passlib.handlers.sha2_crypt import sha256_crypt, sha512_crypt + from passlib.handlers.sun_md5_crypt import sun_md5_crypt + from passlib.handlers.windows import bsd_nthash, lmhash, msdcc, msdcc2, nthash +#---------------------------------------------------- +# end autocomplete hack +#---------------------------------------------------- + +#============================================================================= +# eoc +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/hosts.py b/venv/lib/python3.12/site-packages/passlib/hosts.py new file mode 100644 index 0000000..1f137a2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/hosts.py @@ -0,0 +1,106 @@ +"""passlib.hosts""" +#============================================================================= +# imports +#============================================================================= +# core +from warnings import warn +# pkg +from passlib.context import LazyCryptContext +from passlib.exc import PasslibRuntimeWarning +from passlib import registry +from passlib.utils import has_crypt, unix_crypt_schemes +# local +__all__ = [ + "linux_context", "linux2_context", + "openbsd_context", + "netbsd_context", + "freebsd_context", + "host_context", +] + +#============================================================================= +# linux support +#============================================================================= + +# known platform names - linux2 + +linux_context = linux2_context = LazyCryptContext( + schemes = [ "sha512_crypt", "sha256_crypt", "md5_crypt", + "des_crypt", "unix_disabled" ], + deprecated = [ "des_crypt" ], + ) + +#============================================================================= +# bsd support +#============================================================================= + +# known platform names - +# freebsd2 +# freebsd3 +# freebsd4 +# freebsd5 +# freebsd6 +# freebsd7 +# +# netbsd1 + +# referencing source via -http://fxr.googlebit.com +# freebsd 6,7,8 - des, md5, bcrypt, bsd_nthash +# netbsd - des, ext, md5, bcrypt, sha1 +# openbsd - des, ext, md5, bcrypt + +freebsd_context = LazyCryptContext(["bcrypt", "md5_crypt", "bsd_nthash", + "des_crypt", "unix_disabled"]) + +openbsd_context = LazyCryptContext(["bcrypt", "md5_crypt", "bsdi_crypt", + "des_crypt", "unix_disabled"]) + +netbsd_context = LazyCryptContext(["bcrypt", "sha1_crypt", "md5_crypt", + "bsdi_crypt", "des_crypt", "unix_disabled"]) + +# XXX: include darwin in this list? it's got a BSD crypt variant, +# but that's not what it uses for user passwords. + +#============================================================================= +# current host +#============================================================================= +if registry.os_crypt_present: + # NOTE: this is basically mimicing the output of os crypt(), + # except that it uses passlib's (usually stronger) defaults settings, + # and can be inspected and used much more flexibly. + + def _iter_os_crypt_schemes(): + """helper which iterates over supported os_crypt schemes""" + out = registry.get_supported_os_crypt_schemes() + if out: + # only offer disabled handler if there's another scheme in front, + # as this can't actually hash any passwords + out += ("unix_disabled",) + return out + + host_context = LazyCryptContext(_iter_os_crypt_schemes()) + +#============================================================================= +# other platforms +#============================================================================= + +# known platform strings - +# aix3 +# aix4 +# atheos +# beos5 +# darwin +# generic +# hp-ux11 +# irix5 +# irix6 +# mac +# next3 +# os2emx +# riscos +# sunos5 +# unixware7 + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/ifc.py b/venv/lib/python3.12/site-packages/passlib/ifc.py new file mode 100644 index 0000000..559d256 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/ifc.py @@ -0,0 +1,353 @@ +"""passlib.ifc - abstract interfaces used by Passlib""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +import sys +# site +# pkg +from passlib.utils.decor import deprecated_method +# local +__all__ = [ + "PasswordHash", +] + +#============================================================================= +# 2/3 compatibility helpers +#============================================================================= +def recreate_with_metaclass(meta): + """class decorator that re-creates class using metaclass""" + def builder(cls): + if meta is type(cls): + return cls + return meta(cls.__name__, cls.__bases__, cls.__dict__.copy()) + return builder + +#============================================================================= +# PasswordHash interface +#============================================================================= +from abc import ABCMeta, abstractmethod, abstractproperty + +# TODO: make this actually use abstractproperty(), +# now that we dropped py25, 'abc' is always available. + +# XXX: rename to PasswordHasher? + +@recreate_with_metaclass(ABCMeta) +class PasswordHash(object): + """This class describes an abstract interface which all password hashes + in Passlib adhere to. Under Python 2.6 and up, this is an actual + Abstract Base Class built using the :mod:`!abc` module. + + See the Passlib docs for full documentation. + """ + #=================================================================== + # class attributes + #=================================================================== + + #--------------------------------------------------------------- + # general information + #--------------------------------------------------------------- + ##name + ##setting_kwds + ##context_kwds + + #: flag which indicates this hasher matches a "disabled" hash + #: (e.g. unix_disabled, or django_disabled); and doesn't actually + #: depend on the provided password. + is_disabled = False + + #: Should be None, or a positive integer indicating hash + #: doesn't support secrets larger than this value. + #: Whether hash throws error or silently truncates secret + #: depends on .truncate_error and .truncate_verify_reject flags below. + #: NOTE: calls may treat as boolean, since value will never be 0. + #: .. versionadded:: 1.7 + #: .. TODO: passlib 1.8: deprecate/rename this attr to "max_secret_size"? + truncate_size = None + + # NOTE: these next two default to the optimistic "ideal", + # most hashes in passlib have to default to False + # for backward compat and/or expected behavior with existing hashes. + + #: If True, .hash() should throw a :exc:`~passlib.exc.PasswordSizeError` for + #: any secrets larger than .truncate_size. Many hashers default to False + #: for historical / compatibility purposes, indicating they will silently + #: truncate instead. All such hashers SHOULD support changing + #: the policy via ``.using(truncate_error=True)``. + #: .. versionadded:: 1.7 + #: .. TODO: passlib 1.8: deprecate/rename this attr to "truncate_hash_error"? + truncate_error = True + + #: If True, .verify() should reject secrets larger than max_password_size. + #: Many hashers default to False for historical / compatibility purposes, + #: indicating they will match on the truncated portion instead. + #: .. versionadded:: 1.7.1 + truncate_verify_reject = True + + #--------------------------------------------------------------- + # salt information -- if 'salt' in setting_kwds + #--------------------------------------------------------------- + ##min_salt_size + ##max_salt_size + ##default_salt_size + ##salt_chars + ##default_salt_chars + + #--------------------------------------------------------------- + # rounds information -- if 'rounds' in setting_kwds + #--------------------------------------------------------------- + ##min_rounds + ##max_rounds + ##default_rounds + ##rounds_cost + + #--------------------------------------------------------------- + # encoding info -- if 'encoding' in context_kwds + #--------------------------------------------------------------- + ##default_encoding + + #=================================================================== + # primary methods + #=================================================================== + @classmethod + @abstractmethod + def hash(cls, secret, # * + **setting_and_context_kwds): # pragma: no cover -- abstract method + r""" + Hash secret, returning result. + Should handle generating salt, etc, and should return string + containing identifier, salt & other configuration, as well as digest. + + :param \\*\\*settings_kwds: + + Pass in settings to customize configuration of resulting hash. + + .. deprecated:: 1.7 + + Starting with Passlib 1.7, callers should no longer pass settings keywords + (e.g. ``rounds`` or ``salt`` directly to :meth:`!hash`); should use + ``.using(**settings).hash(secret)`` construction instead. + + Support will be removed in Passlib 2.0. + + :param \\*\\*context_kwds: + + Specific algorithms may require context-specific information (such as the user login). + """ + # FIXME: need stub for classes that define .encrypt() instead ... + # this should call .encrypt(), and check for recursion back to here. + raise NotImplementedError("must be implemented by subclass") + + @deprecated_method(deprecated="1.7", removed="2.0", replacement=".hash()") + @classmethod + def encrypt(cls, *args, **kwds): + """ + Legacy alias for :meth:`hash`. + + .. deprecated:: 1.7 + This method was renamed to :meth:`!hash` in version 1.7. + This alias will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.6. + """ + return cls.hash(*args, **kwds) + + # XXX: could provide default implementation which hands value to + # hash(), and then does constant-time comparision on the result + # (after making both are same string type) + @classmethod + @abstractmethod + def verify(cls, secret, hash, **context_kwds): # pragma: no cover -- abstract method + """verify secret against hash, returns True/False""" + raise NotImplementedError("must be implemented by subclass") + + #=================================================================== + # configuration + #=================================================================== + @classmethod + @abstractmethod + def using(cls, relaxed=False, **kwds): + """ + Return another hasher object (typically a subclass of the current one), + which integrates the configuration options specified by ``kwds``. + This should *always* return a new object, even if no configuration options are changed. + + .. todo:: + + document which options are accepted. + + :returns: + typically returns a subclass for most hasher implementations. + + .. todo:: + + add this method to main documentation. + """ + raise NotImplementedError("must be implemented by subclass") + + #=================================================================== + # migration + #=================================================================== + @classmethod + def needs_update(cls, hash, secret=None): + """ + check if hash's configuration is outside desired bounds, + or contains some other internal option which requires + updating the password hash. + + :param hash: + hash string to examine + + :param secret: + optional secret known to have verified against the provided hash. + (this is used by some hashes to detect legacy algorithm mistakes). + + :return: + whether secret needs re-hashing. + + .. versionadded:: 1.7 + """ + # by default, always report that we don't need update + return False + + #=================================================================== + # additional methods + #=================================================================== + @classmethod + @abstractmethod + def identify(cls, hash): # pragma: no cover -- abstract method + """check if hash belongs to this scheme, returns True/False""" + raise NotImplementedError("must be implemented by subclass") + + @deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genconfig(cls, **setting_kwds): # pragma: no cover -- abstract method + """ + compile settings into a configuration string for genhash() + + .. deprecated:: 1.7 + + As of 1.7, this method is deprecated, and slated for complete removal in Passlib 2.0. + + For all known real-world uses, hashing a constant string + should provide equivalent functionality. + + This deprecation may be reversed if a use-case presents itself in the mean time. + """ + # NOTE: this fallback runs full hash alg, w/ whatever cost param is passed along. + # implementations (esp ones w/ variable cost) will want to subclass this + # with a constant-time implementation that just renders a config string. + if cls.context_kwds: + raise NotImplementedError("must be implemented by subclass") + return cls.using(**setting_kwds).hash("") + + @deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config, **context): + """ + generated hash for secret, using settings from config/hash string + + .. deprecated:: 1.7 + + As of 1.7, this method is deprecated, and slated for complete removal in Passlib 2.0. + + This deprecation may be reversed if a use-case presents itself in the mean time. + """ + # XXX: if hashes reliably offered a .parse() method, could make a fallback for this. + raise NotImplementedError("must be implemented by subclass") + + #=================================================================== + # undocumented methods / attributes + #=================================================================== + # the following entry points are used internally by passlib, + # and aren't documented as part of the exposed interface. + # they are subject to change between releases, + # but are documented here so there's a list of them *somewhere*. + + #--------------------------------------------------------------- + # extra metdata + #--------------------------------------------------------------- + + #: this attribute shouldn't be used by hashers themselves, + #: it's reserved for the CryptContext to track which hashers are deprecated. + #: Note the context will only set this on objects it owns (and generated by .using()), + #: and WONT set it on global objects. + #: [added in 1.7] + #: TODO: document this, or at least the use of testing for + #: 'CryptContext().handler().deprecated' + deprecated = False + + #: optionally present if hasher corresponds to format built into Django. + #: this attribute (if not None) should be the Django 'algorithm' name. + #: also indicates to passlib.ext.django that (when installed in django), + #: django's native hasher should be used in preference to this one. + ## django_name + + #--------------------------------------------------------------- + # checksum information - defined for many hashes + #--------------------------------------------------------------- + ## checksum_chars + ## checksum_size + + #--------------------------------------------------------------- + # experimental methods + #--------------------------------------------------------------- + + ##@classmethod + ##def normhash(cls, hash): + ## """helper to clean up non-canonic instances of hash. + ## currently only provided by bcrypt() to fix an historical passlib issue. + ## """ + + # experimental helper to parse hash into components. + ##@classmethod + ##def parsehash(cls, hash, checksum=True, sanitize=False): + ## """helper to parse hash into components, returns dict""" + + # experiment helper to estimate bitsize of different hashes, + # implement for GenericHandler, but may be currently be off for some hashes. + # want to expand this into a way to programmatically compare + # "strengths" of different hashes and hash algorithms. + # still needs to have some factor for estimate relative cost per round, + # ala in the style of the scrypt whitepaper. + ##@classmethod + ##def bitsize(cls, **kwds): + ## """returns dict mapping component -> bits contributed. + ## components currently include checksum, salt, rounds. + ## """ + + #=================================================================== + # eoc + #=================================================================== + +class DisabledHash(PasswordHash): + """ + extended disabled-hash methods; only need be present if .disabled = True + """ + + is_disabled = True + + @classmethod + def disable(cls, hash=None): + """ + return string representing a 'disabled' hash; + optionally including previously enabled hash + (this is up to the individual scheme). + """ + # default behavior: ignore original hash, return standalone marker + return cls.hash("") + + @classmethod + def enable(cls, hash): + """ + given a disabled-hash string, + extract previously-enabled hash if one is present, + otherwise raises ValueError + """ + # default behavior: no way to restore original hash + raise ValueError("cannot restore original hash") + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/pwd.py b/venv/lib/python3.12/site-packages/passlib/pwd.py new file mode 100644 index 0000000..27ed228 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/pwd.py @@ -0,0 +1,809 @@ +"""passlib.pwd -- password generation helpers""" +#============================================================================= +# imports +#============================================================================= +from __future__ import absolute_import, division, print_function, unicode_literals +# core +import codecs +from collections import defaultdict +try: + from collections.abc import MutableMapping +except ImportError: + # py2 compat + from collections import MutableMapping +from math import ceil, log as logf +import logging; log = logging.getLogger(__name__) +import pkg_resources +import os +# site +# pkg +from passlib import exc +from passlib.utils.compat import PY2, irange, itervalues, int_types +from passlib.utils import rng, getrandstr, to_unicode +from passlib.utils.decor import memoized_property +# local +__all__ = [ + "genword", "default_charsets", + "genphrase", "default_wordsets", +] + +#============================================================================= +# constants +#============================================================================= + +# XXX: rename / publically document this map? +entropy_aliases = dict( + # barest protection from throttled online attack + unsafe=12, + + # some protection from unthrottled online attack + weak=24, + + # some protection from offline attacks + fair=36, + + # reasonable protection from offline attacks + strong=48, + + # very good protection from offline attacks + secure=60, +) + +#============================================================================= +# internal helpers +#============================================================================= + +def _superclasses(obj, cls): + """return remaining classes in object's MRO after cls""" + mro = type(obj).__mro__ + return mro[mro.index(cls)+1:] + + +def _self_info_rate(source): + """ + returns 'rate of self-information' -- + i.e. average (per-symbol) entropy of the sequence **source**, + where probability of a given symbol occurring is calculated based on + the number of occurrences within the sequence itself. + + if all elements of the source are unique, this should equal ``log(len(source), 2)``. + + :arg source: + iterable containing 0+ symbols + (e.g. list of strings or ints, string of characters, etc). + + :returns: + float bits of entropy + """ + try: + size = len(source) + except TypeError: + # if len() doesn't work, calculate size by summing counts later + size = None + counts = defaultdict(int) + for char in source: + counts[char] += 1 + if size is None: + values = counts.values() + size = sum(values) + else: + values = itervalues(counts) + if not size: + return 0 + # NOTE: the following performs ``- sum(value / size * logf(value / size, 2) for value in values)``, + # it just does so with as much pulled out of the sum() loop as possible... + return logf(size, 2) - sum(value * logf(value, 2) for value in values) / size + + +# def _total_self_info(source): +# """ +# return total self-entropy of a sequence +# (the average entropy per symbol * size of sequence) +# """ +# return _self_info_rate(source) * len(source) + + +def _open_asset_path(path, encoding=None): + """ + :param asset_path: + string containing absolute path to file, + or package-relative path using format + ``"python.module:relative/file/path"``. + + :returns: + filehandle opened in 'rb' mode + (unless encoding explicitly specified) + """ + if encoding: + return codecs.getreader(encoding)(_open_asset_path(path)) + if os.path.isabs(path): + return open(path, "rb") + package, sep, subpath = path.partition(":") + if not sep: + raise ValueError("asset path must be absolute file path " + "or use 'pkg.name:sub/path' format: %r" % (path,)) + return pkg_resources.resource_stream(package, subpath) + + +#: type aliases +_sequence_types = (list, tuple) +_set_types = (set, frozenset) + +#: set of elements that ensure_unique() has validated already. +_ensure_unique_cache = set() + + +def _ensure_unique(source, param="source"): + """ + helper for generators -- + Throws ValueError if source elements aren't unique. + Error message will display (abbreviated) repr of the duplicates in a string/list + """ + # check cache to speed things up for frozensets / tuples / strings + cache = _ensure_unique_cache + hashable = True + try: + if source in cache: + return True + except TypeError: + hashable = False + + # check if it has dup elements + if isinstance(source, _set_types) or len(set(source)) == len(source): + if hashable: + try: + cache.add(source) + except TypeError: + # XXX: under pypy, "list() in set()" above doesn't throw TypeError, + # but trying to add unhashable it to a set *does*. + pass + return True + + # build list of duplicate values + seen = set() + dups = set() + for elem in source: + (dups if elem in seen else seen).add(elem) + dups = sorted(dups) + trunc = 8 + if len(dups) > trunc: + trunc = 5 + dup_repr = ", ".join(repr(str(word)) for word in dups[:trunc]) + if len(dups) > trunc: + dup_repr += ", ... plus %d others" % (len(dups) - trunc) + + # throw error + raise ValueError("`%s` cannot contain duplicate elements: %s" % + (param, dup_repr)) + +#============================================================================= +# base generator class +#============================================================================= +class SequenceGenerator(object): + """ + Base class used by word & phrase generators. + + These objects take a series of options, corresponding + to those of the :func:`generate` function. + They act as callables which can be used to generate a password + or a list of 1+ passwords. They also expose some read-only + informational attributes. + + Parameters + ---------- + :param entropy: + Optionally specify the amount of entropy the resulting passwords + should contain (as measured with respect to the generator itself). + This will be used to auto-calculate the required password size. + + :param length: + Optionally specify the length of password to generate, + measured as count of whatever symbols the subclass uses (characters or words). + Note if ``entropy`` requires a larger minimum length, + that will be used instead. + + :param rng: + Optionally provide a custom RNG source to use. + Should be an instance of :class:`random.Random`, + defaults to :class:`random.SystemRandom`. + + Attributes + ---------- + .. autoattribute:: length + .. autoattribute:: symbol_count + .. autoattribute:: entropy_per_symbol + .. autoattribute:: entropy + + Subclassing + ----------- + Subclasses must implement the ``.__next__()`` method, + and set ``.symbol_count`` before calling base ``__init__`` method. + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: requested size of final password + length = None + + #: requested entropy of final password + requested_entropy = "strong" + + #: random number source to use + rng = rng + + #: number of potential symbols (must be filled in by subclass) + symbol_count = None + + #============================================================================= + # init + #============================================================================= + def __init__(self, entropy=None, length=None, rng=None, **kwds): + + # make sure subclass set things up correctly + assert self.symbol_count is not None, "subclass must set .symbol_count" + + # init length & requested entropy + if entropy is not None or length is None: + if entropy is None: + entropy = self.requested_entropy + entropy = entropy_aliases.get(entropy, entropy) + if entropy <= 0: + raise ValueError("`entropy` must be positive number") + min_length = int(ceil(entropy / self.entropy_per_symbol)) + if length is None or length < min_length: + length = min_length + + self.requested_entropy = entropy + + if length < 1: + raise ValueError("`length` must be positive integer") + self.length = length + + # init other common options + if rng is not None: + self.rng = rng + + # hand off to parent + if kwds and _superclasses(self, SequenceGenerator) == (object,): + raise TypeError("Unexpected keyword(s): %s" % ", ".join(kwds.keys())) + super(SequenceGenerator, self).__init__(**kwds) + + #============================================================================= + # informational helpers + #============================================================================= + + @memoized_property + def entropy_per_symbol(self): + """ + Average entropy per symbol (assuming all symbols have equal probability) + """ + return logf(self.symbol_count, 2) + + @memoized_property + def entropy(self): + """ + Effective entropy of generated passwords. + + This value will always be a multiple of :attr:`entropy_per_symbol`. + If entropy is specified in constructor, :attr:`length` will be chosen so + so that this value is the smallest multiple >= :attr:`requested_entropy`. + """ + return self.length * self.entropy_per_symbol + + #============================================================================= + # generation + #============================================================================= + def __next__(self): + """main generation function, should create one password/phrase""" + raise NotImplementedError("implement in subclass") + + def __call__(self, returns=None): + """ + frontend used by genword() / genphrase() to create passwords + """ + if returns is None: + return next(self) + elif isinstance(returns, int_types): + return [next(self) for _ in irange(returns)] + elif returns is iter: + return self + else: + raise exc.ExpectedTypeError(returns, ", int, or ", "returns") + + def __iter__(self): + return self + + if PY2: + def next(self): + return self.__next__() + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# default charsets +#============================================================================= + +#: global dict of predefined characters sets +default_charsets = dict( + # ascii letters, digits, and some punctuation + ascii_72='0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*?/', + + # ascii letters and digits + ascii_62='0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', + + # ascii_50, without visually similar '1IiLl', '0Oo', '5S', '8B' + ascii_50='234679abcdefghjkmnpqrstuvwxyzACDEFGHJKMNPQRTUVWXYZ', + + # lower case hexadecimal + hex='0123456789abcdef', +) + +#============================================================================= +# password generator +#============================================================================= + +class WordGenerator(SequenceGenerator): + """ + Class which generates passwords by randomly choosing from a string of unique characters. + + Parameters + ---------- + :param chars: + custom character string to draw from. + + :param charset: + predefined charset to draw from. + + :param \\*\\*kwds: + all other keywords passed to the :class:`SequenceGenerator` parent class. + + Attributes + ---------- + .. autoattribute:: chars + .. autoattribute:: charset + .. autoattribute:: default_charsets + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: Predefined character set in use (set to None for instances using custom 'chars') + charset = "ascii_62" + + #: string of chars to draw from -- usually filled in from charset + chars = None + + #============================================================================= + # init + #============================================================================= + def __init__(self, chars=None, charset=None, **kwds): + + # init chars and charset + if chars: + if charset: + raise TypeError("`chars` and `charset` are mutually exclusive") + else: + if not charset: + charset = self.charset + assert charset + chars = default_charsets[charset] + self.charset = charset + chars = to_unicode(chars, param="chars") + _ensure_unique(chars, param="chars") + self.chars = chars + + # hand off to parent + super(WordGenerator, self).__init__(**kwds) + # log.debug("WordGenerator(): entropy/char=%r", self.entropy_per_symbol) + + #============================================================================= + # informational helpers + #============================================================================= + + @memoized_property + def symbol_count(self): + return len(self.chars) + + #============================================================================= + # generation + #============================================================================= + + def __next__(self): + # XXX: could do things like optionally ensure certain character groups + # (e.g. letters & punctuation) are included + return getrandstr(self.rng, self.chars, self.length) + + #============================================================================= + # eoc + #============================================================================= + + +def genword(entropy=None, length=None, returns=None, **kwds): + """Generate one or more random passwords. + + This function uses :mod:`random.SystemRandom` to generate + one or more passwords using various character sets. + The complexity of the password can be specified + by size, or by the desired amount of entropy. + + Usage Example:: + + >>> # generate a random alphanumeric string with 48 bits of entropy (the default) + >>> from passlib import pwd + >>> pwd.genword() + 'DnBHvDjMK6' + + >>> # generate a random hexadecimal string with 52 bits of entropy + >>> pwd.genword(entropy=52, charset="hex") + '310f1a7ac793f' + + :param entropy: + Strength of resulting password, measured in 'guessing entropy' bits. + An appropriate **length** value will be calculated + based on the requested entropy amount, and the size of the character set. + + This can be a positive integer, or one of the following preset + strings: ``"weak"`` (24), ``"fair"`` (36), + ``"strong"`` (48), and ``"secure"`` (56). + + If neither this or **length** is specified, **entropy** will default + to ``"strong"`` (48). + + :param length: + Size of resulting password, measured in characters. + If omitted, the size is auto-calculated based on the **entropy** parameter. + + If both **entropy** and **length** are specified, + the stronger value will be used. + + :param returns: + Controls what this function returns: + + * If ``None`` (the default), this function will generate a single password. + * If an integer, this function will return a list containing that many passwords. + * If the ``iter`` constant, will return an iterator that yields passwords. + + :param chars: + + Optionally specify custom string of characters to use when randomly + generating a password. This option cannot be combined with **charset**. + + :param charset: + + The predefined character set to draw from (if not specified by **chars**). + There are currently four presets available: + + * ``"ascii_62"`` (the default) -- all digits and ascii upper & lowercase letters. + Provides ~5.95 entropy per character. + + * ``"ascii_50"`` -- subset which excludes visually similar characters + (``1IiLl0Oo5S8B``). Provides ~5.64 entropy per character. + + * ``"ascii_72"`` -- all digits and ascii upper & lowercase letters, + as well as some punctuation. Provides ~6.17 entropy per character. + + * ``"hex"`` -- Lower case hexadecimal. Providers 4 bits of entropy per character. + + :returns: + :class:`!unicode` string containing randomly generated password; + or list of 1+ passwords if :samp:`returns={int}` is specified. + """ + gen = WordGenerator(length=length, entropy=entropy, **kwds) + return gen(returns) + +#============================================================================= +# default wordsets +#============================================================================= + +def _load_wordset(asset_path): + """ + load wordset from compressed datafile within package data. + file should be utf-8 encoded + + :param asset_path: + string containing absolute path to wordset file, + or "python.module:relative/file/path". + + :returns: + tuple of words, as loaded from specified words file. + """ + # open resource file, convert to tuple of words (strip blank lines & ws) + with _open_asset_path(asset_path, "utf-8") as fh: + gen = (word.strip() for word in fh) + words = tuple(word for word in gen if word) + + # NOTE: works but not used + # # detect if file uses " " format, and strip numeric prefix + # def extract(row): + # idx, word = row.replace("\t", " ").split(" ", 1) + # if not idx.isdigit(): + # raise ValueError("row is not dice index + word") + # return word + # try: + # extract(words[-1]) + # except ValueError: + # pass + # else: + # words = tuple(extract(word) for word in words) + + log.debug("loaded %d-element wordset from %r", len(words), asset_path) + return words + + +class WordsetDict(MutableMapping): + """ + Special mapping used to store dictionary of wordsets. + Different from a regular dict in that some wordsets + may be lazy-loaded from an asset path. + """ + + #: dict of key -> asset path + paths = None + + #: dict of key -> value + _loaded = None + + def __init__(self, *args, **kwds): + self.paths = {} + self._loaded = {} + super(WordsetDict, self).__init__(*args, **kwds) + + def __getitem__(self, key): + try: + return self._loaded[key] + except KeyError: + pass + path = self.paths[key] + value = self._loaded[key] = _load_wordset(path) + return value + + def set_path(self, key, path): + """ + set asset path to lazy-load wordset from. + """ + self.paths[key] = path + + def __setitem__(self, key, value): + self._loaded[key] = value + + def __delitem__(self, key): + if key in self: + del self._loaded[key] + self.paths.pop(key, None) + else: + del self.paths[key] + + @property + def _keyset(self): + keys = set(self._loaded) + keys.update(self.paths) + return keys + + def __iter__(self): + return iter(self._keyset) + + def __len__(self): + return len(self._keyset) + + # NOTE: speeds things up, and prevents contains from lazy-loading + def __contains__(self, key): + return key in self._loaded or key in self.paths + + +#: dict of predefined word sets. +#: key is name of wordset, value should be sequence of words. +default_wordsets = WordsetDict() + +# register the wordsets built into passlib +for name in "eff_long eff_short eff_prefixed bip39".split(): + default_wordsets.set_path(name, "passlib:_data/wordsets/%s.txt" % name) + +#============================================================================= +# passphrase generator +#============================================================================= +class PhraseGenerator(SequenceGenerator): + """class which generates passphrases by randomly choosing + from a list of unique words. + + :param wordset: + wordset to draw from. + :param preset: + name of preset wordlist to use instead of ``wordset``. + :param spaces: + whether to insert spaces between words in output (defaults to ``True``). + :param \\*\\*kwds: + all other keywords passed to the :class:`SequenceGenerator` parent class. + + .. autoattribute:: wordset + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: predefined wordset to use + wordset = "eff_long" + + #: list of words to draw from + words = None + + #: separator to use when joining words + sep = " " + + #============================================================================= + # init + #============================================================================= + def __init__(self, wordset=None, words=None, sep=None, **kwds): + + # load wordset + if words is not None: + if wordset is not None: + raise TypeError("`words` and `wordset` are mutually exclusive") + else: + if wordset is None: + wordset = self.wordset + assert wordset + words = default_wordsets[wordset] + self.wordset = wordset + + # init words + if not isinstance(words, _sequence_types): + words = tuple(words) + _ensure_unique(words, param="words") + self.words = words + + # init separator + if sep is None: + sep = self.sep + sep = to_unicode(sep, param="sep") + self.sep = sep + + # hand off to parent + super(PhraseGenerator, self).__init__(**kwds) + ##log.debug("PhraseGenerator(): entropy/word=%r entropy/char=%r min_chars=%r", + ## self.entropy_per_symbol, self.entropy_per_char, self.min_chars) + + #============================================================================= + # informational helpers + #============================================================================= + + @memoized_property + def symbol_count(self): + return len(self.words) + + #============================================================================= + # generation + #============================================================================= + + def __next__(self): + words = (self.rng.choice(self.words) for _ in irange(self.length)) + return self.sep.join(words) + + #============================================================================= + # eoc + #============================================================================= + + +def genphrase(entropy=None, length=None, returns=None, **kwds): + """Generate one or more random password / passphrases. + + This function uses :mod:`random.SystemRandom` to generate + one or more passwords; it can be configured to generate + alphanumeric passwords, or full english phrases. + The complexity of the password can be specified + by size, or by the desired amount of entropy. + + Usage Example:: + + >>> # generate random phrase with 48 bits of entropy + >>> from passlib import pwd + >>> pwd.genphrase() + 'gangly robbing salt shove' + + >>> # generate a random phrase with 52 bits of entropy + >>> # using a particular wordset + >>> pwd.genword(entropy=52, wordset="bip39") + 'wheat dilemma reward rescue diary' + + :param entropy: + Strength of resulting password, measured in 'guessing entropy' bits. + An appropriate **length** value will be calculated + based on the requested entropy amount, and the size of the word set. + + This can be a positive integer, or one of the following preset + strings: ``"weak"`` (24), ``"fair"`` (36), + ``"strong"`` (48), and ``"secure"`` (56). + + If neither this or **length** is specified, **entropy** will default + to ``"strong"`` (48). + + :param length: + Length of resulting password, measured in words. + If omitted, the size is auto-calculated based on the **entropy** parameter. + + If both **entropy** and **length** are specified, + the stronger value will be used. + + :param returns: + Controls what this function returns: + + * If ``None`` (the default), this function will generate a single password. + * If an integer, this function will return a list containing that many passwords. + * If the ``iter`` builtin, will return an iterator that yields passwords. + + :param words: + + Optionally specifies a list/set of words to use when randomly generating a passphrase. + This option cannot be combined with **wordset**. + + :param wordset: + + The predefined word set to draw from (if not specified by **words**). + There are currently four presets available: + + ``"eff_long"`` (the default) + + Wordset containing 7776 english words of ~7 letters. + Constructed by the EFF, it offers ~12.9 bits of entropy per word. + + This wordset (and the other ``"eff_"`` wordsets) + were `created by the EFF `_ + to aid in generating passwords. See their announcement page + for more details about the design & properties of these wordsets. + + ``"eff_short"`` + + Wordset containing 1296 english words of ~4.5 letters. + Constructed by the EFF, it offers ~10.3 bits of entropy per word. + + ``"eff_prefixed"`` + + Wordset containing 1296 english words of ~8 letters, + selected so that they each have a unique 3-character prefix. + Constructed by the EFF, it offers ~10.3 bits of entropy per word. + + ``"bip39"`` + + Wordset of 2048 english words of ~5 letters, + selected so that they each have a unique 4-character prefix. + Published as part of Bitcoin's `BIP 39 `_, + this wordset has exactly 11 bits of entropy per word. + + This list offers words that are typically shorter than ``"eff_long"`` + (at the cost of slightly less entropy); and much shorter than + ``"eff_prefixed"`` (at the cost of a longer unique prefix). + + :param sep: + Optional separator to use when joining words. + Defaults to ``" "`` (a space), but can be an empty string, a hyphen, etc. + + :returns: + :class:`!unicode` string containing randomly generated passphrase; + or list of 1+ passphrases if :samp:`returns={int}` is specified. + """ + gen = PhraseGenerator(entropy=entropy, length=length, **kwds) + return gen(returns) + +#============================================================================= +# strength measurement +# +# NOTE: +# for a little while, had rough draft of password strength measurement alg here. +# but not sure if there's value in yet another measurement algorithm, +# that's not just duplicating the effort of libraries like zxcbn. +# may revive it later, but for now, leaving some refs to others out there: +# * NIST 800-63 has simple alg +# * zxcvbn (https://tech.dropbox.com/2012/04/zxcvbn-realistic-password-strength-estimation/) +# might also be good, and has approach similar to composite approach i was already thinking about, +# but much more well thought out. +# * passfault (https://github.com/c-a-m/passfault) looks thorough, +# but may have licensing issues, plus porting to python looks like very big job :( +# * give a look at running things through zlib - might be able to cheaply +# catch extra redundancies. +#============================================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/registry.py b/venv/lib/python3.12/site-packages/passlib/registry.py new file mode 100644 index 0000000..9964b25 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/registry.py @@ -0,0 +1,547 @@ +"""passlib.registry - registry for password hash handlers""" +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# pkg +from passlib import exc +from passlib.exc import ExpectedTypeError, PasslibWarning +from passlib.ifc import PasswordHash +from passlib.utils import ( + is_crypt_handler, has_crypt as os_crypt_present, + unix_crypt_schemes as os_crypt_schemes, +) +from passlib.utils.compat import unicode_or_str +from passlib.utils.decor import memoize_single_value +# local +__all__ = [ + "register_crypt_handler_path", + "register_crypt_handler", + "get_crypt_handler", + "list_crypt_handlers", +] + +#============================================================================= +# proxy object used in place of 'passlib.hash' module +#============================================================================= +class _PasslibRegistryProxy(object): + """proxy module passlib.hash + + this module is in fact an object which lazy-loads + the requested password hash algorithm from wherever it has been stored. + it acts as a thin wrapper around :func:`passlib.registry.get_crypt_handler`. + """ + __name__ = "passlib.hash" + __package__ = None + + def __getattr__(self, attr): + if attr.startswith("_"): + raise AttributeError("missing attribute: %r" % (attr,)) + handler = get_crypt_handler(attr, None) + if handler: + return handler + else: + raise AttributeError("unknown password hash: %r" % (attr,)) + + def __setattr__(self, attr, value): + if attr.startswith("_"): + # writing to private attributes should behave normally. + # (required so GAE can write to the __loader__ attribute). + object.__setattr__(self, attr, value) + else: + # writing to public attributes should be treated + # as attempting to register a handler. + register_crypt_handler(value, _attr=attr) + + def __repr__(self): + return "" + + def __dir__(self): + # this adds in lazy-loaded handler names, + # otherwise this is the standard dir() implementation. + attrs = set(dir(self.__class__)) + attrs.update(self.__dict__) + attrs.update(_locations) + return sorted(attrs) + +# create single instance - available publically as 'passlib.hash' +_proxy = _PasslibRegistryProxy() + +#============================================================================= +# internal registry state +#============================================================================= + +# singleton uses to detect omitted keywords +_UNSET = object() + +# dict mapping name -> loaded handlers (just uses proxy object's internal dict) +_handlers = _proxy.__dict__ + +# dict mapping names -> import path for lazy loading. +# * import path should be "module.path" or "module.path:attr" +# * if attr omitted, "name" used as default. +_locations = dict( + # NOTE: this is a hardcoded list of the handlers built into passlib, + # applications should call register_crypt_handler_path() + apr_md5_crypt = "passlib.handlers.md5_crypt", + argon2 = "passlib.handlers.argon2", + atlassian_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + bcrypt = "passlib.handlers.bcrypt", + bcrypt_sha256 = "passlib.handlers.bcrypt", + bigcrypt = "passlib.handlers.des_crypt", + bsd_nthash = "passlib.handlers.windows", + bsdi_crypt = "passlib.handlers.des_crypt", + cisco_pix = "passlib.handlers.cisco", + cisco_asa = "passlib.handlers.cisco", + cisco_type7 = "passlib.handlers.cisco", + cta_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + crypt16 = "passlib.handlers.des_crypt", + des_crypt = "passlib.handlers.des_crypt", + django_argon2 = "passlib.handlers.django", + django_bcrypt = "passlib.handlers.django", + django_bcrypt_sha256 = "passlib.handlers.django", + django_pbkdf2_sha256 = "passlib.handlers.django", + django_pbkdf2_sha1 = "passlib.handlers.django", + django_salted_sha1 = "passlib.handlers.django", + django_salted_md5 = "passlib.handlers.django", + django_des_crypt = "passlib.handlers.django", + django_disabled = "passlib.handlers.django", + dlitz_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + fshp = "passlib.handlers.fshp", + grub_pbkdf2_sha512 = "passlib.handlers.pbkdf2", + hex_md4 = "passlib.handlers.digests", + hex_md5 = "passlib.handlers.digests", + hex_sha1 = "passlib.handlers.digests", + hex_sha256 = "passlib.handlers.digests", + hex_sha512 = "passlib.handlers.digests", + htdigest = "passlib.handlers.digests", + ldap_plaintext = "passlib.handlers.ldap_digests", + ldap_md5 = "passlib.handlers.ldap_digests", + ldap_sha1 = "passlib.handlers.ldap_digests", + ldap_hex_md5 = "passlib.handlers.roundup", + ldap_hex_sha1 = "passlib.handlers.roundup", + ldap_salted_md5 = "passlib.handlers.ldap_digests", + ldap_salted_sha1 = "passlib.handlers.ldap_digests", + ldap_salted_sha256 = "passlib.handlers.ldap_digests", + ldap_salted_sha512 = "passlib.handlers.ldap_digests", + ldap_des_crypt = "passlib.handlers.ldap_digests", + ldap_bsdi_crypt = "passlib.handlers.ldap_digests", + ldap_md5_crypt = "passlib.handlers.ldap_digests", + ldap_bcrypt = "passlib.handlers.ldap_digests", + ldap_sha1_crypt = "passlib.handlers.ldap_digests", + ldap_sha256_crypt = "passlib.handlers.ldap_digests", + ldap_sha512_crypt = "passlib.handlers.ldap_digests", + ldap_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + ldap_pbkdf2_sha256 = "passlib.handlers.pbkdf2", + ldap_pbkdf2_sha512 = "passlib.handlers.pbkdf2", + lmhash = "passlib.handlers.windows", + md5_crypt = "passlib.handlers.md5_crypt", + msdcc = "passlib.handlers.windows", + msdcc2 = "passlib.handlers.windows", + mssql2000 = "passlib.handlers.mssql", + mssql2005 = "passlib.handlers.mssql", + mysql323 = "passlib.handlers.mysql", + mysql41 = "passlib.handlers.mysql", + nthash = "passlib.handlers.windows", + oracle10 = "passlib.handlers.oracle", + oracle11 = "passlib.handlers.oracle", + pbkdf2_sha1 = "passlib.handlers.pbkdf2", + pbkdf2_sha256 = "passlib.handlers.pbkdf2", + pbkdf2_sha512 = "passlib.handlers.pbkdf2", + phpass = "passlib.handlers.phpass", + plaintext = "passlib.handlers.misc", + postgres_md5 = "passlib.handlers.postgres", + roundup_plaintext = "passlib.handlers.roundup", + scram = "passlib.handlers.scram", + scrypt = "passlib.handlers.scrypt", + sha1_crypt = "passlib.handlers.sha1_crypt", + sha256_crypt = "passlib.handlers.sha2_crypt", + sha512_crypt = "passlib.handlers.sha2_crypt", + sun_md5_crypt = "passlib.handlers.sun_md5_crypt", + unix_disabled = "passlib.handlers.misc", + unix_fallback = "passlib.handlers.misc", +) + +# master regexp for detecting valid handler names +_name_re = re.compile("^[a-z][a-z0-9_]+[a-z0-9]$") + +# names which aren't allowed for various reasons +# (mainly keyword conflicts in CryptContext) +_forbidden_names = frozenset(["onload", "policy", "context", "all", + "default", "none", "auto"]) + +#============================================================================= +# registry frontend functions +#============================================================================= +def _validate_handler_name(name): + """helper to validate handler name + + :raises ValueError: + * if empty name + * if name not lower case + * if name contains double underscores + * if name is reserved (e.g. ``context``, ``all``). + """ + if not name: + raise ValueError("handler name cannot be empty: %r" % (name,)) + if name.lower() != name: + raise ValueError("name must be lower-case: %r" % (name,)) + if not _name_re.match(name): + raise ValueError("invalid name (must be 3+ characters, " + " begin with a-z, and contain only underscore, a-z, " + "0-9): %r" % (name,)) + if '__' in name: + raise ValueError("name may not contain double-underscores: %r" % + (name,)) + if name in _forbidden_names: + raise ValueError("that name is not allowed: %r" % (name,)) + return True + +def register_crypt_handler_path(name, path): + """register location to lazy-load handler when requested. + + custom hashes may be registered via :func:`register_crypt_handler`, + or they may be registered by this function, + which will delay actually importing and loading the handler + until a call to :func:`get_crypt_handler` is made for the specified name. + + :arg name: name of handler + :arg path: module import path + + the specified module path should contain a password hash handler + called :samp:`{name}`, or the path may contain a colon, + specifying the module and module attribute to use. + for example, the following would cause ``get_handler("myhash")`` to look + for a class named ``myhash`` within the ``myapp.helpers`` module:: + + >>> from passlib.registry import registry_crypt_handler_path + >>> registry_crypt_handler_path("myhash", "myapp.helpers") + + ...while this form would cause ``get_handler("myhash")`` to look + for a class name ``MyHash`` within the ``myapp.helpers`` module:: + + >>> from passlib.registry import registry_crypt_handler_path + >>> registry_crypt_handler_path("myhash", "myapp.helpers:MyHash") + """ + # validate name + _validate_handler_name(name) + + # validate path + if path.startswith("."): + raise ValueError("path cannot start with '.'") + if ':' in path: + if path.count(':') > 1: + raise ValueError("path cannot have more than one ':'") + if path.find('.', path.index(':')) > -1: + raise ValueError("path cannot have '.' to right of ':'") + + # store location + _locations[name] = path + log.debug("registered path to %r handler: %r", name, path) + +def register_crypt_handler(handler, force=False, _attr=None): + """register password hash handler. + + this method immediately registers a handler with the internal passlib registry, + so that it will be returned by :func:`get_crypt_handler` when requested. + + :arg handler: the password hash handler to register + :param force: force override of existing handler (defaults to False) + :param _attr: + [internal kwd] if specified, ensures ``handler.name`` + matches this value, or raises :exc:`ValueError`. + + :raises TypeError: + if the specified object does not appear to be a valid handler. + + :raises ValueError: + if the specified object's name (or other required attributes) + contain invalid values. + + :raises KeyError: + if a (different) handler was already registered with + the same name, and ``force=True`` was not specified. + """ + # validate handler + if not is_crypt_handler(handler): + raise ExpectedTypeError(handler, "password hash handler", "handler") + if not handler: + raise AssertionError("``bool(handler)`` must be True") + + # validate name + name = handler.name + _validate_handler_name(name) + if _attr and _attr != name: + raise ValueError("handlers must be stored only under their own name (%r != %r)" % + (_attr, name)) + + # check for existing handler + other = _handlers.get(name) + if other: + if other is handler: + log.debug("same %r handler already registered: %r", name, handler) + return + elif force: + log.warning("overriding previously registered %r handler: %r", + name, other) + else: + raise KeyError("another %r handler has already been registered: %r" % + (name, other)) + + # register handler + _handlers[name] = handler + log.debug("registered %r handler: %r", name, handler) + +def get_crypt_handler(name, default=_UNSET): + """return handler for specified password hash scheme. + + this method looks up a handler for the specified scheme. + if the handler is not already loaded, + it checks if the location is known, and loads it first. + + :arg name: name of handler to return + :param default: optional default value to return if no handler with specified name is found. + + :raises KeyError: if no handler matching that name is found, and no default specified, a KeyError will be raised. + + :returns: handler attached to name, or default value (if specified). + """ + # catch invalid names before we check _handlers, + # since it's a module dict, and exposes things like __package__, etc. + if name.startswith("_"): + if default is _UNSET: + raise KeyError("invalid handler name: %r" % (name,)) + else: + return default + + # check if handler is already loaded + try: + return _handlers[name] + except KeyError: + pass + + # normalize name (and if changed, check dict again) + assert isinstance(name, unicode_or_str), "name must be string instance" + alt = name.replace("-","_").lower() + if alt != name: + warn("handler names should be lower-case, and use underscores instead " + "of hyphens: %r => %r" % (name, alt), PasslibWarning, + stacklevel=2) + name = alt + + # try to load using new name + try: + return _handlers[name] + except KeyError: + pass + + # check if lazy load mapping has been specified for this driver + path = _locations.get(name) + if path: + if ':' in path: + modname, modattr = path.split(":") + else: + modname, modattr = path, name + ##log.debug("loading %r handler from path: '%s:%s'", name, modname, modattr) + + # try to load the module - any import errors indicate runtime config, usually + # either missing package, or bad path provided to register_crypt_handler_path() + mod = __import__(modname, fromlist=[modattr], level=0) + + # first check if importing module triggered register_crypt_handler(), + # (this is discouraged due to its magical implicitness) + handler = _handlers.get(name) + if handler: + # XXX: issue deprecation warning here? + assert is_crypt_handler(handler), "unexpected object: name=%r object=%r" % (name, handler) + return handler + + # then get real handler & register it + handler = getattr(mod, modattr) + register_crypt_handler(handler, _attr=name) + return handler + + # fail! + if default is _UNSET: + raise KeyError("no crypt handler found for algorithm: %r" % (name,)) + else: + return default + +def list_crypt_handlers(loaded_only=False): + """return sorted list of all known crypt handler names. + + :param loaded_only: if ``True``, only returns names of handlers which have actually been loaded. + + :returns: list of names of all known handlers + """ + names = set(_handlers) + if not loaded_only: + names.update(_locations) + # strip private attrs out of namespace and sort. + # TODO: make _handlers a separate list, so we don't have module namespace mixed in. + return sorted(name for name in names if not name.startswith("_")) + +# NOTE: these two functions mainly exist just for the unittests... + +def _has_crypt_handler(name, loaded_only=False): + """check if handler name is known. + + this is only useful for two cases: + + * quickly checking if handler has already been loaded + * checking if handler exists, without actually loading it + + :arg name: name of handler + :param loaded_only: if ``True``, returns False if handler exists but hasn't been loaded + """ + return (name in _handlers) or (not loaded_only and name in _locations) + +def _unload_handler_name(name, locations=True): + """unloads a handler from the registry. + + .. warning:: + + this is an internal function, + used only by the unittests. + + if loaded handler is found with specified name, it's removed. + if path to lazy load handler is found, it's removed. + + missing names are a noop. + + :arg name: name of handler to unload + :param locations: if False, won't purge registered handler locations (default True) + """ + if name in _handlers: + del _handlers[name] + if locations and name in _locations: + del _locations[name] + +#============================================================================= +# inspection helpers +#============================================================================= + +#------------------------------------------------------------------ +# general +#------------------------------------------------------------------ + +# TODO: needs UTs +def _resolve(hasher, param="value"): + """ + internal helper to resolve argument to hasher object + """ + if is_crypt_handler(hasher): + return hasher + elif isinstance(hasher, unicode_or_str): + return get_crypt_handler(hasher) + else: + raise exc.ExpectedTypeError(hasher, unicode_or_str, param) + + +#: backend aliases +ANY = "any" +BUILTIN = "builtin" +OS_CRYPT = "os_crypt" + +# TODO: needs UTs +def has_backend(hasher, backend=ANY, safe=False): + """ + Test if specified backend is available for hasher. + + :param hasher: + Hasher name or object. + + :param backend: + Name of backend, or ``"any"`` if any backend will do. + For hashers without multiple backends, will pretend + they have a single backend named ``"builtin"``. + + :param safe: + By default, throws error if backend is unknown. + If ``safe=True``, will just return false value. + + :raises ValueError: + * if hasher name is unknown. + * if backend is unknown to hasher, and safe=False. + + :return: + True if backend available, False if not available, + and None if unknown + safe=True. + """ + hasher = _resolve(hasher) + + if backend == ANY: + if not hasattr(hasher, "get_backend"): + # single backend, assume it's loaded + return True + + # multiple backends, check at least one is loadable + try: + hasher.get_backend() + return True + except exc.MissingBackendError: + return False + + # test for specific backend + if hasattr(hasher, "has_backend"): + # multiple backends + if safe and backend not in hasher.backends: + return None + return hasher.has_backend(backend) + + # single builtin backend + if backend == BUILTIN: + return True + elif safe: + return None + else: + raise exc.UnknownBackendError(hasher, backend) + +#------------------------------------------------------------------ +# os crypt +#------------------------------------------------------------------ + +# TODO: move unix_crypt_schemes list to here. +# os_crypt_schemes -- alias for unix_crypt_schemes above + + +# TODO: needs UTs +@memoize_single_value +def get_supported_os_crypt_schemes(): + """ + return tuple of schemes which :func:`crypt.crypt` natively supports. + """ + if not os_crypt_present: + return () + cache = tuple(name for name in os_crypt_schemes + if get_crypt_handler(name).has_backend(OS_CRYPT)) + if not cache: # pragma: no cover -- sanity check + # no idea what OS this could happen on... + import platform + warn("crypt.crypt() function is present, but doesn't support any " + "formats known to passlib! (system=%r release=%r)" % + (platform.system(), platform.release()), + exc.PasslibRuntimeWarning) + return cache + + +# TODO: needs UTs +def has_os_crypt_support(hasher): + """ + check if hash is supported by native :func:`crypt.crypt` function. + if :func:`crypt.crypt` is not present, will always return False. + + :param hasher: + name or hasher object. + + :returns bool: + True if hash format is supported by OS, else False. + """ + return os_crypt_present and has_backend(hasher, OS_CRYPT, safe=True) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__init__.py b/venv/lib/python3.12/site-packages/passlib/tests/__init__.py new file mode 100644 index 0000000..389da76 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/__init__.py @@ -0,0 +1 @@ +"""passlib tests""" diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__main__.py b/venv/lib/python3.12/site-packages/passlib/tests/__main__.py new file mode 100644 index 0000000..2424576 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/__main__.py @@ -0,0 +1,6 @@ +import os +from nose import run +run( + defaultTest=os.path.dirname(__file__), +) + diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6f4df53 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..0de0bb7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/_test_bad_register.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/_test_bad_register.cpython-312.pyc new file mode 100644 index 0000000..871534a Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/_test_bad_register.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/backports.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/backports.cpython-312.pyc new file mode 100644 index 0000000..f92019a Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/backports.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_apache.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_apache.cpython-312.pyc new file mode 100644 index 0000000..e008541 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_apache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_apps.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_apps.cpython-312.pyc new file mode 100644 index 0000000..7fee81e Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_apps.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_context.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_context.cpython-312.pyc new file mode 100644 index 0000000..6078ac7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_context.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_context_deprecated.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_context_deprecated.cpython-312.pyc new file mode 100644 index 0000000..9556b41 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_context_deprecated.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_builtin_md4.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_builtin_md4.cpython-312.pyc new file mode 100644 index 0000000..1b7af5e Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_builtin_md4.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_des.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_des.cpython-312.pyc new file mode 100644 index 0000000..f6b55d6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_des.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_digest.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_digest.cpython-312.pyc new file mode 100644 index 0000000..515f386 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_digest.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_scrypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_scrypt.cpython-312.pyc new file mode 100644 index 0000000..a9e0caf Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_crypto_scrypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_ext_django.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_ext_django.cpython-312.pyc new file mode 100644 index 0000000..e4dc9d8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_ext_django.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_ext_django_source.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_ext_django_source.cpython-312.pyc new file mode 100644 index 0000000..545a5eb Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_ext_django_source.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers.cpython-312.pyc new file mode 100644 index 0000000..6aebdc5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_argon2.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_argon2.cpython-312.pyc new file mode 100644 index 0000000..3e315c9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_argon2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_bcrypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_bcrypt.cpython-312.pyc new file mode 100644 index 0000000..285328e Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_bcrypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_cisco.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_cisco.cpython-312.pyc new file mode 100644 index 0000000..d81c9a1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_cisco.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_django.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_django.cpython-312.pyc new file mode 100644 index 0000000..deb1bc6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_django.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_pbkdf2.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_pbkdf2.cpython-312.pyc new file mode 100644 index 0000000..4ea1b7c Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_pbkdf2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_scrypt.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_scrypt.cpython-312.pyc new file mode 100644 index 0000000..04bc675 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_handlers_scrypt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_hosts.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_hosts.cpython-312.pyc new file mode 100644 index 0000000..4664361 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_hosts.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_pwd.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_pwd.cpython-312.pyc new file mode 100644 index 0000000..ce5e30c Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_pwd.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_registry.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_registry.cpython-312.pyc new file mode 100644 index 0000000..6c51e6a Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_registry.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_totp.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_totp.cpython-312.pyc new file mode 100644 index 0000000..895313b Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_totp.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils.cpython-312.pyc new file mode 100644 index 0000000..d602542 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_handlers.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_handlers.cpython-312.pyc new file mode 100644 index 0000000..501a02d Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_handlers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_md4.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_md4.cpython-312.pyc new file mode 100644 index 0000000..5ea1bc1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_md4.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_pbkdf2.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_pbkdf2.cpython-312.pyc new file mode 100644 index 0000000..142ddf8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_utils_pbkdf2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_win32.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_win32.cpython-312.pyc new file mode 100644 index 0000000..cc24c0b Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/test_win32.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/tox_support.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/tox_support.cpython-312.pyc new file mode 100644 index 0000000..0404450 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/tox_support.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..0e83560 Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/_test_bad_register.py b/venv/lib/python3.12/site-packages/passlib/tests/_test_bad_register.py new file mode 100644 index 0000000..f0683fc --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/_test_bad_register.py @@ -0,0 +1,15 @@ +"""helper for method in test_registry.py""" +from passlib.registry import register_crypt_handler +import passlib.utils.handlers as uh + +class dummy_bad(uh.StaticHandler): + name = "dummy_bad" + +class alt_dummy_bad(uh.StaticHandler): + name = "dummy_bad" + +# NOTE: if passlib.tests is being run from symlink (e.g. via gaeunit), +# this module may be imported a second time as test._test_bad_registry. +# we don't want it to do anything in that case. +if __name__.startswith("passlib.tests"): + register_crypt_handler(alt_dummy_bad) diff --git a/venv/lib/python3.12/site-packages/passlib/tests/backports.py b/venv/lib/python3.12/site-packages/passlib/tests/backports.py new file mode 100644 index 0000000..5058cec --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/backports.py @@ -0,0 +1,67 @@ +"""backports of needed unittest2 features""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import re +import sys +##from warnings import warn +# site +# pkg +from passlib.utils.compat import PY26 +# local +__all__ = [ + "TestCase", + "unittest", + # TODO: deprecate these exports in favor of "unittest.XXX" + "skip", "skipIf", "skipUnless", +] + +#============================================================================= +# import latest unittest module available +#============================================================================= +try: + import unittest2 as unittest +except ImportError: + if PY26: + raise ImportError("Passlib's tests require 'unittest2' under Python 2.6 (as of Passlib 1.7)") + # python 2.7 and python 3.2 both have unittest2 features (at least, the ones we use) + import unittest + +#============================================================================= +# unittest aliases +#============================================================================= +skip = unittest.skip +skipIf = unittest.skipIf +skipUnless = unittest.skipUnless +SkipTest = unittest.SkipTest + +#============================================================================= +# custom test harness +#============================================================================= +class TestCase(unittest.TestCase): + """backports a number of unittest2 features in TestCase""" + + #=================================================================== + # backport some unittest2 names + #=================================================================== + + #--------------------------------------------------------------- + # backport assertRegex() alias from 3.2 to 2.7 + # was present in 2.7 under an alternate name + #--------------------------------------------------------------- + if not hasattr(unittest.TestCase, "assertRegex"): + assertRegex = unittest.TestCase.assertRegexpMatches + + if not hasattr(unittest.TestCase, "assertRaisesRegex"): + assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/sample1.cfg b/venv/lib/python3.12/site-packages/passlib/tests/sample1.cfg new file mode 100644 index 0000000..56e3ae8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/sample1.cfg @@ -0,0 +1,9 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25001 +bsdi_crypt__max_rounds = 30001 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + diff --git a/venv/lib/python3.12/site-packages/passlib/tests/sample1b.cfg b/venv/lib/python3.12/site-packages/passlib/tests/sample1b.cfg new file mode 100644 index 0000000..56e3ae8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/sample1b.cfg @@ -0,0 +1,9 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25001 +bsdi_crypt__max_rounds = 30001 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + diff --git a/venv/lib/python3.12/site-packages/passlib/tests/sample1c.cfg b/venv/lib/python3.12/site-packages/passlib/tests/sample1c.cfg new file mode 100644 index 0000000..a5033eb Binary files /dev/null and b/venv/lib/python3.12/site-packages/passlib/tests/sample1c.cfg differ diff --git a/venv/lib/python3.12/site-packages/passlib/tests/sample_config_1s.cfg b/venv/lib/python3.12/site-packages/passlib/tests/sample_config_1s.cfg new file mode 100644 index 0000000..495a13e --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/sample_config_1s.cfg @@ -0,0 +1,8 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all.vary_rounds = 10%% +bsdi_crypt.max_rounds = 30000 +bsdi_crypt.default_rounds = 25000 +sha512_crypt.max_rounds = 50000 +sha512_crypt.min_rounds = 40000 diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_apache.py b/venv/lib/python3.12/site-packages/passlib/tests/test_apache.py new file mode 100644 index 0000000..198b425 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_apache.py @@ -0,0 +1,769 @@ +"""tests for passlib.apache -- (c) Assurance Technologies 2008-2011""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from logging import getLogger +import os +import subprocess +# site +# pkg +from passlib import apache, registry +from passlib.exc import MissingBackendError +from passlib.utils.compat import irange +from passlib.tests.backports import unittest +from passlib.tests.utils import TestCase, get_file, set_file, ensure_mtime_changed +from passlib.utils.compat import u +from passlib.utils import to_bytes +from passlib.utils.handlers import to_unicode_for_identify +# module +log = getLogger(__name__) + +#============================================================================= +# helpers +#============================================================================= + +def backdate_file_mtime(path, offset=10): + """backdate file's mtime by specified amount""" + # NOTE: this is used so we can test code which detects mtime changes, + # without having to actually *pause* for that long. + atime = os.path.getatime(path) + mtime = os.path.getmtime(path)-offset + os.utime(path, (atime, mtime)) + +#============================================================================= +# detect external HTPASSWD tool +#============================================================================= + + +htpasswd_path = os.environ.get("PASSLIB_TEST_HTPASSWD_PATH") or "htpasswd" + + +def _call_htpasswd(args, stdin=None): + """ + helper to run htpasswd cmd + """ + if stdin is not None: + stdin = stdin.encode("utf-8") + proc = subprocess.Popen([htpasswd_path] + args, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, stdin=subprocess.PIPE if stdin else None) + out, err = proc.communicate(stdin) + rc = proc.wait() + out = to_unicode_for_identify(out or "") + return out, rc + + +def _call_htpasswd_verify(path, user, password): + """ + wrapper for htpasswd verify + """ + out, rc = _call_htpasswd(["-vi", path, user], password) + return not rc + + +def _detect_htpasswd(): + """ + helper to check if htpasswd is present + """ + try: + out, rc = _call_htpasswd([]) + except OSError: + # TODO: under py3, could trap the more specific FileNotFoundError + # cmd not found + return False, False + # when called w/o args, it should print usage to stderr & return rc=2 + if not rc: + log.warning("htpasswd test returned with rc=0") + have_bcrypt = " -B " in out + return True, have_bcrypt + + +HAVE_HTPASSWD, HAVE_HTPASSWD_BCRYPT = _detect_htpasswd() + +requires_htpasswd_cmd = unittest.skipUnless(HAVE_HTPASSWD, "requires `htpasswd` cmdline tool") + + +#============================================================================= +# htpasswd +#============================================================================= +class HtpasswdFileTest(TestCase): + """test HtpasswdFile class""" + descriptionPrefix = "HtpasswdFile" + + # sample with 4 users + sample_01 = (b'user2:2CHkkwa2AtqGs\n' + b'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\n' + b'user4:pass4\n' + b'user1:$apr1$t4tc7jTh$GPIWVUo8sQKJlUdV8V5vu0\n') + + # sample 1 with user 1, 2 deleted; 4 changed + sample_02 = b'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\nuser4:pass4\n' + + # sample 1 with user2 updated, user 1 first entry removed, and user 5 added + sample_03 = (b'user2:pass2x\n' + b'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\n' + b'user4:pass4\n' + b'user1:$apr1$t4tc7jTh$GPIWVUo8sQKJlUdV8V5vu0\n' + b'user5:pass5\n') + + # standalone sample with 8-bit username + sample_04_utf8 = b'user\xc3\xa6:2CHkkwa2AtqGs\n' + sample_04_latin1 = b'user\xe6:2CHkkwa2AtqGs\n' + + sample_dup = b'user1:pass1\nuser1:pass2\n' + + # sample with bcrypt & sha256_crypt hashes + sample_05 = (b'user2:2CHkkwa2AtqGs\n' + b'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\n' + b'user4:pass4\n' + b'user1:$apr1$t4tc7jTh$GPIWVUo8sQKJlUdV8V5vu0\n' + b'user5:$2a$12$yktDxraxijBZ360orOyCOePFGhuis/umyPNJoL5EbsLk.s6SWdrRO\n' + b'user6:$5$rounds=110000$cCRp/xUUGVgwR4aP$' + b'p0.QKFS5qLNRqw1/47lXYiAcgIjJK.WjCO8nrEKuUK.\n') + + def test_00_constructor_autoload(self): + """test constructor autoload""" + # check with existing file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile(path) + self.assertEqual(ht.to_string(), self.sample_01) + self.assertEqual(ht.path, path) + self.assertTrue(ht.mtime) + + # check changing path + ht.path = path + "x" + self.assertEqual(ht.path, path + "x") + self.assertFalse(ht.mtime) + + # check new=True + ht = apache.HtpasswdFile(path, new=True) + self.assertEqual(ht.to_string(), b"") + self.assertEqual(ht.path, path) + self.assertFalse(ht.mtime) + + # check autoload=False (deprecated alias for new=True) + with self.assertWarningList("``autoload=False`` is deprecated"): + ht = apache.HtpasswdFile(path, autoload=False) + self.assertEqual(ht.to_string(), b"") + self.assertEqual(ht.path, path) + self.assertFalse(ht.mtime) + + # check missing file + os.remove(path) + self.assertRaises(IOError, apache.HtpasswdFile, path) + + # NOTE: "default_scheme" option checked via set_password() test, among others + + def test_00_from_path(self): + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile.from_path(path) + self.assertEqual(ht.to_string(), self.sample_01) + self.assertEqual(ht.path, None) + self.assertFalse(ht.mtime) + + def test_01_delete(self): + """test delete()""" + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertTrue(ht.delete("user1")) # should delete both entries + self.assertTrue(ht.delete("user2")) + self.assertFalse(ht.delete("user5")) # user not present + self.assertEqual(ht.to_string(), self.sample_02) + + # invalid user + self.assertRaises(ValueError, ht.delete, "user:") + + def test_01_delete_autosave(self): + path = self.mktemp() + sample = b'user1:pass1\nuser2:pass2\n' + set_file(path, sample) + + ht = apache.HtpasswdFile(path) + ht.delete("user1") + self.assertEqual(get_file(path), sample) + + ht = apache.HtpasswdFile(path, autosave=True) + ht.delete("user1") + self.assertEqual(get_file(path), b"user2:pass2\n") + + def test_02_set_password(self): + """test set_password()""" + ht = apache.HtpasswdFile.from_string( + self.sample_01, default_scheme="plaintext") + self.assertTrue(ht.set_password("user2", "pass2x")) + self.assertFalse(ht.set_password("user5", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # test legacy default kwd + with self.assertWarningList("``default`` is deprecated"): + ht = apache.HtpasswdFile.from_string(self.sample_01, default="plaintext") + self.assertTrue(ht.set_password("user2", "pass2x")) + self.assertFalse(ht.set_password("user5", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # invalid user + self.assertRaises(ValueError, ht.set_password, "user:", "pass") + + # test that legacy update() still works + with self.assertWarningList("update\(\) is deprecated"): + ht.update("user2", "test") + self.assertTrue(ht.check_password("user2", "test")) + + def test_02_set_password_autosave(self): + path = self.mktemp() + sample = b'user1:pass1\n' + set_file(path, sample) + + ht = apache.HtpasswdFile(path) + ht.set_password("user1", "pass2") + self.assertEqual(get_file(path), sample) + + ht = apache.HtpasswdFile(path, default_scheme="plaintext", autosave=True) + ht.set_password("user1", "pass2") + self.assertEqual(get_file(path), b"user1:pass2\n") + + def test_02_set_password_default_scheme(self): + """test set_password() -- default_scheme""" + + def check(scheme): + ht = apache.HtpasswdFile(default_scheme=scheme) + ht.set_password("user1", "pass1") + return ht.context.identify(ht.get_hash("user1")) + + # explicit scheme + self.assertEqual(check("sha256_crypt"), "sha256_crypt") + self.assertEqual(check("des_crypt"), "des_crypt") + + # unknown scheme + self.assertRaises(KeyError, check, "xxx") + + # alias resolution + self.assertEqual(check("portable"), apache.htpasswd_defaults["portable"]) + self.assertEqual(check("portable_apache_22"), apache.htpasswd_defaults["portable_apache_22"]) + self.assertEqual(check("host_apache_22"), apache.htpasswd_defaults["host_apache_22"]) + + # default + self.assertEqual(check(None), apache.htpasswd_defaults["portable_apache_22"]) + + def test_03_users(self): + """test users()""" + ht = apache.HtpasswdFile.from_string(self.sample_01) + ht.set_password("user5", "pass5") + ht.delete("user3") + ht.set_password("user3", "pass3") + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user3", "user4", "user5"]) + + def test_04_check_password(self): + """test check_password()""" + ht = apache.HtpasswdFile.from_string(self.sample_05) + self.assertRaises(TypeError, ht.check_password, 1, 'pass9') + self.assertTrue(ht.check_password("user9","pass9") is None) + + # users 1..6 of sample_01 run through all the main hash formats, + # to make sure they're recognized. + for i in irange(1, 7): + i = str(i) + try: + self.assertTrue(ht.check_password("user"+i, "pass"+i)) + self.assertTrue(ht.check_password("user"+i, "pass9") is False) + except MissingBackendError: + if i == "5": + # user5 uses bcrypt, which is apparently not available right now + continue + raise + + self.assertRaises(ValueError, ht.check_password, "user:", "pass") + + # test that legacy verify() still works + with self.assertWarningList(["verify\(\) is deprecated"]*2): + self.assertTrue(ht.verify("user1", "pass1")) + self.assertFalse(ht.verify("user1", "pass2")) + + def test_05_load(self): + """test load()""" + # setup empty file + path = self.mktemp() + set_file(path, "") + backdate_file_mtime(path, 5) + ha = apache.HtpasswdFile(path, default_scheme="plaintext") + self.assertEqual(ha.to_string(), b"") + + # make changes, check load_if_changed() does nothing + ha.set_password("user1", "pass1") + ha.load_if_changed() + self.assertEqual(ha.to_string(), b"user1:pass1\n") + + # change file + set_file(path, self.sample_01) + ha.load_if_changed() + self.assertEqual(ha.to_string(), self.sample_01) + + # make changes, check load() overwrites them + ha.set_password("user5", "pass5") + ha.load() + self.assertEqual(ha.to_string(), self.sample_01) + + # test load w/ no path + hb = apache.HtpasswdFile() + self.assertRaises(RuntimeError, hb.load) + self.assertRaises(RuntimeError, hb.load_if_changed) + + # test load w/ dups and explicit path + set_file(path, self.sample_dup) + hc = apache.HtpasswdFile() + hc.load(path) + self.assertTrue(hc.check_password('user1','pass1')) + + # NOTE: load_string() tested via from_string(), which is used all over this file + + def test_06_save(self): + """test save()""" + # load from file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile(path) + + # make changes, check they saved + ht.delete("user1") + ht.delete("user2") + ht.save() + self.assertEqual(get_file(path), self.sample_02) + + # test save w/ no path + hb = apache.HtpasswdFile(default_scheme="plaintext") + hb.set_password("user1", "pass1") + self.assertRaises(RuntimeError, hb.save) + + # test save w/ explicit path + hb.save(path) + self.assertEqual(get_file(path), b"user1:pass1\n") + + def test_07_encodings(self): + """test 'encoding' kwd""" + # test bad encodings cause failure in constructor + self.assertRaises(ValueError, apache.HtpasswdFile, encoding="utf-16") + + # check sample utf-8 + ht = apache.HtpasswdFile.from_string(self.sample_04_utf8, encoding="utf-8", + return_unicode=True) + self.assertEqual(ht.users(), [ u("user\u00e6") ]) + + # test deprecated encoding=None + with self.assertWarningList("``encoding=None`` is deprecated"): + ht = apache.HtpasswdFile.from_string(self.sample_04_utf8, encoding=None) + self.assertEqual(ht.users(), [ b'user\xc3\xa6' ]) + + # check sample latin-1 + ht = apache.HtpasswdFile.from_string(self.sample_04_latin1, + encoding="latin-1", return_unicode=True) + self.assertEqual(ht.users(), [ u("user\u00e6") ]) + + def test_08_get_hash(self): + """test get_hash()""" + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertEqual(ht.get_hash("user3"), b"{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=") + self.assertEqual(ht.get_hash("user4"), b"pass4") + self.assertEqual(ht.get_hash("user5"), None) + + with self.assertWarningList("find\(\) is deprecated"): + self.assertEqual(ht.find("user4"), b"pass4") + + def test_09_to_string(self): + """test to_string""" + + # check with known sample + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertEqual(ht.to_string(), self.sample_01) + + # test blank + ht = apache.HtpasswdFile() + self.assertEqual(ht.to_string(), b"") + + def test_10_repr(self): + ht = apache.HtpasswdFile("fakepath", autosave=True, new=True, encoding="latin-1") + repr(ht) + + def test_11_malformed(self): + self.assertRaises(ValueError, apache.HtpasswdFile.from_string, + b'realm:user1:pass1\n') + self.assertRaises(ValueError, apache.HtpasswdFile.from_string, + b'pass1\n') + + def test_12_from_string(self): + # forbid path kwd + self.assertRaises(TypeError, apache.HtpasswdFile.from_string, + b'', path=None) + + def test_13_whitespace(self): + """whitespace & comment handling""" + + # per htpasswd source (https://github.com/apache/httpd/blob/trunk/support/htpasswd.c), + # lines that match "^\s*(#.*)?$" should be ignored + source = to_bytes( + '\n' + 'user2:pass2\n' + 'user4:pass4\n' + 'user7:pass7\r\n' + ' \t \n' + 'user1:pass1\n' + ' # legacy users\n' + '#user6:pass6\n' + 'user5:pass5\n\n' + ) + + # loading should see all users (except user6, who was commented out) + ht = apache.HtpasswdFile.from_string(source) + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user4", "user5", "user7"]) + + # update existing user + ht.set_hash("user4", "althash4") + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user4", "user5", "user7"]) + + # add a new user + ht.set_hash("user6", "althash6") + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user4", "user5", "user6", "user7"]) + + # delete existing user + ht.delete("user7") + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user4", "user5", "user6"]) + + # re-serialization should preserve whitespace + target = to_bytes( + '\n' + 'user2:pass2\n' + 'user4:althash4\n' + ' \t \n' + 'user1:pass1\n' + ' # legacy users\n' + '#user6:pass6\n' + 'user5:pass5\n' + 'user6:althash6\n' + ) + self.assertEqual(ht.to_string(), target) + + @requires_htpasswd_cmd + def test_htpasswd_cmd_verify(self): + """ + verify "htpasswd" command can read output + """ + path = self.mktemp() + ht = apache.HtpasswdFile(path=path, new=True) + + def hash_scheme(pwd, scheme): + return ht.context.handler(scheme).hash(pwd) + + # base scheme + ht.set_hash("user1", hash_scheme("password","apr_md5_crypt")) + + # 2.2-compat scheme + host_no_bcrypt = apache.htpasswd_defaults["host_apache_22"] + ht.set_hash("user2", hash_scheme("password", host_no_bcrypt)) + + # 2.4-compat scheme + host_best = apache.htpasswd_defaults["host"] + ht.set_hash("user3", hash_scheme("password", host_best)) + + # unsupported scheme -- should always fail to verify + ht.set_hash("user4", "$xxx$foo$bar$baz") + + # make sure htpasswd properly recognizes hashes + ht.save() + + self.assertFalse(_call_htpasswd_verify(path, "user1", "wrong")) + self.assertFalse(_call_htpasswd_verify(path, "user2", "wrong")) + self.assertFalse(_call_htpasswd_verify(path, "user3", "wrong")) + self.assertFalse(_call_htpasswd_verify(path, "user4", "wrong")) + + self.assertTrue(_call_htpasswd_verify(path, "user1", "password")) + self.assertTrue(_call_htpasswd_verify(path, "user2", "password")) + self.assertTrue(_call_htpasswd_verify(path, "user3", "password")) + + @requires_htpasswd_cmd + @unittest.skipUnless(registry.has_backend("bcrypt"), "bcrypt support required") + def test_htpasswd_cmd_verify_bcrypt(self): + """ + verify "htpasswd" command can read bcrypt format + + this tests for regression of issue 95, where we output "$2b$" instead of "$2y$"; + fixed in v1.7.2. + """ + path = self.mktemp() + ht = apache.HtpasswdFile(path=path, new=True) + def hash_scheme(pwd, scheme): + return ht.context.handler(scheme).hash(pwd) + ht.set_hash("user1", hash_scheme("password", "bcrypt")) + ht.save() + self.assertFalse(_call_htpasswd_verify(path, "user1", "wrong")) + if HAVE_HTPASSWD_BCRYPT: + self.assertTrue(_call_htpasswd_verify(path, "user1", "password")) + else: + # apache2.2 should fail, acting like it's an unknown hash format + self.assertFalse(_call_htpasswd_verify(path, "user1", "password")) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htdigest +#============================================================================= +class HtdigestFileTest(TestCase): + """test HtdigestFile class""" + descriptionPrefix = "HtdigestFile" + + # sample with 4 users + sample_01 = (b'user2:realm:549d2a5f4659ab39a80dac99e159ab19\n' + b'user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + b'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n' + b'user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n') + + # sample 1 with user 1, 2 deleted; 4 changed + sample_02 = (b'user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + b'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n') + + # sample 1 with user2 updated, user 1 first entry removed, and user 5 added + sample_03 = (b'user2:realm:5ba6d8328943c23c64b50f8b29566059\n' + b'user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + b'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n' + b'user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n' + b'user5:realm:03c55fdc6bf71552356ad401bdb9af19\n') + + # standalone sample with 8-bit username & realm + sample_04_utf8 = b'user\xc3\xa6:realm\xc3\xa6:549d2a5f4659ab39a80dac99e159ab19\n' + sample_04_latin1 = b'user\xe6:realm\xe6:549d2a5f4659ab39a80dac99e159ab19\n' + + def test_00_constructor_autoload(self): + """test constructor autoload""" + # check with existing file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtdigestFile(path) + self.assertEqual(ht.to_string(), self.sample_01) + + # check without autoload + ht = apache.HtdigestFile(path, new=True) + self.assertEqual(ht.to_string(), b"") + + # check missing file + os.remove(path) + self.assertRaises(IOError, apache.HtdigestFile, path) + + # NOTE: default_realm option checked via other tests. + + def test_01_delete(self): + """test delete()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertTrue(ht.delete("user1", "realm")) + self.assertTrue(ht.delete("user2", "realm")) + self.assertFalse(ht.delete("user5", "realm")) + self.assertFalse(ht.delete("user3", "realm5")) + self.assertEqual(ht.to_string(), self.sample_02) + + # invalid user + self.assertRaises(ValueError, ht.delete, "user:", "realm") + + # invalid realm + self.assertRaises(ValueError, ht.delete, "user", "realm:") + + def test_01_delete_autosave(self): + path = self.mktemp() + set_file(path, self.sample_01) + + ht = apache.HtdigestFile(path) + self.assertTrue(ht.delete("user1", "realm")) + self.assertFalse(ht.delete("user3", "realm5")) + self.assertFalse(ht.delete("user5", "realm")) + self.assertEqual(get_file(path), self.sample_01) + + ht.autosave = True + self.assertTrue(ht.delete("user2", "realm")) + self.assertEqual(get_file(path), self.sample_02) + + def test_02_set_password(self): + """test update()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertTrue(ht.set_password("user2", "realm", "pass2x")) + self.assertFalse(ht.set_password("user5", "realm", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # default realm + self.assertRaises(TypeError, ht.set_password, "user2", "pass3") + ht.default_realm = "realm2" + ht.set_password("user2", "pass3") + ht.check_password("user2", "realm2", "pass3") + + # invalid user + self.assertRaises(ValueError, ht.set_password, "user:", "realm", "pass") + self.assertRaises(ValueError, ht.set_password, "u"*256, "realm", "pass") + + # invalid realm + self.assertRaises(ValueError, ht.set_password, "user", "realm:", "pass") + self.assertRaises(ValueError, ht.set_password, "user", "r"*256, "pass") + + # test that legacy update() still works + with self.assertWarningList("update\(\) is deprecated"): + ht.update("user2", "realm2", "test") + self.assertTrue(ht.check_password("user2", "test")) + + # TODO: test set_password autosave + + def test_03_users(self): + """test users()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + ht.set_password("user5", "realm", "pass5") + ht.delete("user3", "realm") + ht.set_password("user3", "realm", "pass3") + self.assertEqual(sorted(ht.users("realm")), ["user1", "user2", "user3", "user4", "user5"]) + + self.assertRaises(TypeError, ht.users, 1) + + def test_04_check_password(self): + """test check_password()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertRaises(TypeError, ht.check_password, 1, 'realm', 'pass5') + self.assertRaises(TypeError, ht.check_password, 'user', 1, 'pass5') + self.assertIs(ht.check_password("user5", "realm","pass5"), None) + for i in irange(1,5): + i = str(i) + self.assertTrue(ht.check_password("user"+i, "realm", "pass"+i)) + self.assertIs(ht.check_password("user"+i, "realm", "pass5"), False) + + # default realm + self.assertRaises(TypeError, ht.check_password, "user5", "pass5") + ht.default_realm = "realm" + self.assertTrue(ht.check_password("user1", "pass1")) + self.assertIs(ht.check_password("user5", "pass5"), None) + + # test that legacy verify() still works + with self.assertWarningList(["verify\(\) is deprecated"]*2): + self.assertTrue(ht.verify("user1", "realm", "pass1")) + self.assertFalse(ht.verify("user1", "realm", "pass2")) + + # invalid user + self.assertRaises(ValueError, ht.check_password, "user:", "realm", "pass") + + def test_05_load(self): + """test load()""" + # setup empty file + path = self.mktemp() + set_file(path, "") + backdate_file_mtime(path, 5) + ha = apache.HtdigestFile(path) + self.assertEqual(ha.to_string(), b"") + + # make changes, check load_if_changed() does nothing + ha.set_password("user1", "realm", "pass1") + ha.load_if_changed() + self.assertEqual(ha.to_string(), b'user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n') + + # change file + set_file(path, self.sample_01) + ha.load_if_changed() + self.assertEqual(ha.to_string(), self.sample_01) + + # make changes, check load_if_changed overwrites them + ha.set_password("user5", "realm", "pass5") + ha.load() + self.assertEqual(ha.to_string(), self.sample_01) + + # test load w/ no path + hb = apache.HtdigestFile() + self.assertRaises(RuntimeError, hb.load) + self.assertRaises(RuntimeError, hb.load_if_changed) + + # test load w/ explicit path + hc = apache.HtdigestFile() + hc.load(path) + self.assertEqual(hc.to_string(), self.sample_01) + + # change file, test deprecated force=False kwd + ensure_mtime_changed(path) + set_file(path, "") + with self.assertWarningList(r"load\(force=False\) is deprecated"): + ha.load(force=False) + self.assertEqual(ha.to_string(), b"") + + def test_06_save(self): + """test save()""" + # load from file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtdigestFile(path) + + # make changes, check they saved + ht.delete("user1", "realm") + ht.delete("user2", "realm") + ht.save() + self.assertEqual(get_file(path), self.sample_02) + + # test save w/ no path + hb = apache.HtdigestFile() + hb.set_password("user1", "realm", "pass1") + self.assertRaises(RuntimeError, hb.save) + + # test save w/ explicit path + hb.save(path) + self.assertEqual(get_file(path), hb.to_string()) + + def test_07_realms(self): + """test realms() & delete_realm()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + + self.assertEqual(ht.delete_realm("x"), 0) + self.assertEqual(ht.realms(), ['realm']) + + self.assertEqual(ht.delete_realm("realm"), 4) + self.assertEqual(ht.realms(), []) + self.assertEqual(ht.to_string(), b"") + + def test_08_get_hash(self): + """test get_hash()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertEqual(ht.get_hash("user3", "realm"), "a500bb8c02f6a9170ae46af10c898744") + self.assertEqual(ht.get_hash("user4", "realm"), "ab7b5d5f28ccc7666315f508c7358519") + self.assertEqual(ht.get_hash("user5", "realm"), None) + + with self.assertWarningList("find\(\) is deprecated"): + self.assertEqual(ht.find("user4", "realm"), "ab7b5d5f28ccc7666315f508c7358519") + + def test_09_encodings(self): + """test encoding parameter""" + # test bad encodings cause failure in constructor + self.assertRaises(ValueError, apache.HtdigestFile, encoding="utf-16") + + # check sample utf-8 + ht = apache.HtdigestFile.from_string(self.sample_04_utf8, encoding="utf-8", return_unicode=True) + self.assertEqual(ht.realms(), [ u("realm\u00e6") ]) + self.assertEqual(ht.users(u("realm\u00e6")), [ u("user\u00e6") ]) + + # check sample latin-1 + ht = apache.HtdigestFile.from_string(self.sample_04_latin1, encoding="latin-1", return_unicode=True) + self.assertEqual(ht.realms(), [ u("realm\u00e6") ]) + self.assertEqual(ht.users(u("realm\u00e6")), [ u("user\u00e6") ]) + + def test_10_to_string(self): + """test to_string()""" + + # check sample + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertEqual(ht.to_string(), self.sample_01) + + # check blank + ht = apache.HtdigestFile() + self.assertEqual(ht.to_string(), b"") + + def test_11_malformed(self): + self.assertRaises(ValueError, apache.HtdigestFile.from_string, + b'realm:user1:pass1:other\n') + self.assertRaises(ValueError, apache.HtdigestFile.from_string, + b'user1:pass1\n') + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_apps.py b/venv/lib/python3.12/site-packages/passlib/tests/test_apps.py new file mode 100644 index 0000000..167437f --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_apps.py @@ -0,0 +1,139 @@ +"""test passlib.apps""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib import apps, hash as hashmod +from passlib.tests.utils import TestCase +# module + +#============================================================================= +# test predefined app contexts +#============================================================================= +class AppsTest(TestCase): + """perform general tests to make sure contexts work""" + # NOTE: these tests are not really comprehensive, + # since they would do little but duplicate + # the presets in apps.py + # + # they mainly try to ensure no typos + # or dynamic behavior foul-ups. + + def test_master_context(self): + ctx = apps.master_context + self.assertGreater(len(ctx.schemes()), 50) + + def test_custom_app_context(self): + ctx = apps.custom_app_context + self.assertEqual(ctx.schemes(), ("sha512_crypt", "sha256_crypt")) + for hash in [ + ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751'), + ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17'), + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_django16_context(self): + ctx = apps.django16_context + for hash in [ + 'pbkdf2_sha256$29000$ZsgquwnCyBs2$fBxRQpfKd2PIeMxtkKPy0h7SrnrN+EU/cm67aitoZ2s=', + 'sha1$0d082$cdb462ae8b6be8784ef24b20778c4d0c82d5957f', + 'md5$b887a$37767f8a745af10612ad44c80ff52e92', + 'crypt$95a6d$95x74hLDQKXI2', + '098f6bcd4621d373cade4e832627b4f6', + ]: + self.assertTrue(ctx.verify("test", hash)) + + self.assertEqual(ctx.identify("!"), "django_disabled") + self.assertFalse(ctx.verify("test", "!")) + + def test_django_context(self): + ctx = apps.django_context + for hash in [ + 'pbkdf2_sha256$29000$ZsgquwnCyBs2$fBxRQpfKd2PIeMxtkKPy0h7SrnrN+EU/cm67aitoZ2s=', + ]: + self.assertTrue(ctx.verify("test", hash)) + + self.assertEqual(ctx.identify("!"), "django_disabled") + self.assertFalse(ctx.verify("test", "!")) + + def test_ldap_nocrypt_context(self): + ctx = apps.ldap_nocrypt_context + for hash in [ + '{SSHA}cPusOzd6d5n3OjSVK3R329ZGCNyFcC7F', + 'test', + ]: + self.assertTrue(ctx.verify("test", hash)) + + self.assertIs(ctx.identify('{CRYPT}$5$rounds=31817$iZGmlyBQ99JSB5' + 'n6$p4E.pdPBWx19OajgjLRiOW0itGnyxDGgMlDcOsfaI17'), None) + + def test_ldap_context(self): + ctx = apps.ldap_context + for hash in [ + ('{CRYPT}$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0' + 'itGnyxDGgMlDcOsfaI17'), + '{SSHA}cPusOzd6d5n3OjSVK3R329ZGCNyFcC7F', + 'test', + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_ldap_mysql_context(self): + ctx = apps.mysql_context + for hash in [ + '*94BDCEBE19083CE2A1F959FD02F964C7AF4CFC29', + '378b243e220ca493', + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_postgres_context(self): + ctx = apps.postgres_context + hash = 'md55d9c68c6c50ed3d02a2fcf54f63993b6' + self.assertTrue(ctx.verify("test", hash, user='user')) + + def test_phppass_context(self): + ctx = apps.phpass_context + for hash in [ + '$P$8Ja1vJsKa5qyy/b3mCJGXM7GyBnt6..', + '$H$8b95CoYQnQ9Y6fSTsACyphNh5yoM02.', + '_cD..aBxeRhYFJvtUvsI', + ]: + self.assertTrue(ctx.verify("test", hash)) + + h1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + if hashmod.bcrypt.has_backend(): + self.assertTrue(ctx.verify("test", h1)) + self.assertEqual(ctx.default_scheme(), "bcrypt") + self.assertEqual(ctx.handler().name, "bcrypt") + else: + self.assertEqual(ctx.identify(h1), "bcrypt") + self.assertEqual(ctx.default_scheme(), "phpass") + self.assertEqual(ctx.handler().name, "phpass") + + def test_phpbb3_context(self): + ctx = apps.phpbb3_context + for hash in [ + '$P$8Ja1vJsKa5qyy/b3mCJGXM7GyBnt6..', + '$H$8b95CoYQnQ9Y6fSTsACyphNh5yoM02.', + ]: + self.assertTrue(ctx.verify("test", hash)) + self.assertTrue(ctx.hash("test").startswith("$H$")) + + def test_roundup_context(self): + ctx = apps.roundup_context + for hash in [ + '{PBKDF2}9849$JMTYu3eOUSoFYExprVVqbQ$N5.gV.uR1.BTgLSvi0qyPiRlGZ0', + '{SHA}a94a8fe5ccb19ba61c4c0873d391e987982fbbd3', + '{CRYPT}dptOmKDriOGfU', + '{plaintext}test', + ]: + self.assertTrue(ctx.verify("test", hash)) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_context.py b/venv/lib/python3.12/site-packages/passlib/tests/test_context.py new file mode 100644 index 0000000..09b52c0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_context.py @@ -0,0 +1,1786 @@ +"""tests for passlib.context""" +#============================================================================= +# imports +#============================================================================= +# core +from __future__ import with_statement +from passlib.utils.compat import PY3 +if PY3: + from configparser import NoSectionError +else: + from ConfigParser import NoSectionError +import datetime +from functools import partial +import logging; log = logging.getLogger(__name__) +import os +import warnings +# site +# pkg +from passlib import hash +from passlib.context import CryptContext, LazyCryptContext +from passlib.exc import PasslibConfigWarning, PasslibHashWarning +from passlib.utils import tick, to_unicode +from passlib.utils.compat import irange, u, unicode, str_to_uascii, PY2, PY26 +import passlib.utils.handlers as uh +from passlib.tests.utils import (TestCase, set_file, TICK_RESOLUTION, + quicksleep, time_call, handler_derived_from) +from passlib.registry import (register_crypt_handler_path, + _has_crypt_handler as has_crypt_handler, + _unload_handler_name as unload_handler_name, + get_crypt_handler, + ) +# local +#============================================================================= +# support +#============================================================================= +here = os.path.abspath(os.path.dirname(__file__)) + +def merge_dicts(first, *args, **kwds): + target = first.copy() + for arg in args: + target.update(arg) + if kwds: + target.update(kwds) + return target + +#============================================================================= +# +#============================================================================= +class CryptContextTest(TestCase): + descriptionPrefix = "CryptContext" + + # TODO: these unittests could really use a good cleanup + # and reorganizing, to ensure they're getting everything. + + #=================================================================== + # sample configurations used in tests + #=================================================================== + + #--------------------------------------------------------------- + # sample 1 - typical configuration + #--------------------------------------------------------------- + sample_1_schemes = ["des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"] + sample_1_handlers = [get_crypt_handler(name) for name in sample_1_schemes] + + sample_1_dict = dict( + schemes = sample_1_schemes, + default = "md5_crypt", + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30001, + bsdi_crypt__default_rounds = 25001, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + sample_1_resolved_dict = merge_dicts(sample_1_dict, + schemes = sample_1_handlers) + + sample_1_unnormalized = u("""\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +; this is using %... +all__vary_rounds = 10%% +bsdi_crypt__default_rounds = 25001 +bsdi_crypt__max_rounds = 30001 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 +""") + + sample_1_unicode = u("""\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25001 +bsdi_crypt__max_rounds = 30001 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + +""") + + #--------------------------------------------------------------- + # sample 1 external files + #--------------------------------------------------------------- + + # sample 1 string with '\n' linesep + sample_1_path = os.path.join(here, "sample1.cfg") + + # sample 1 with '\r\n' linesep + sample_1b_unicode = sample_1_unicode.replace(u("\n"), u("\r\n")) + sample_1b_path = os.path.join(here, "sample1b.cfg") + + # sample 1 using UTF-16 and alt section + sample_1c_bytes = sample_1_unicode.replace(u("[passlib]"), + u("[mypolicy]")).encode("utf-16") + sample_1c_path = os.path.join(here, "sample1c.cfg") + + # enable to regenerate sample files + if False: + set_file(sample_1_path, sample_1_unicode) + set_file(sample_1b_path, sample_1b_unicode) + set_file(sample_1c_path, sample_1c_bytes) + + #--------------------------------------------------------------- + # sample 2 & 12 - options patch + #--------------------------------------------------------------- + sample_2_dict = dict( + # using this to test full replacement of existing options + bsdi_crypt__min_rounds = 29001, + bsdi_crypt__max_rounds = 35001, + bsdi_crypt__default_rounds = 31001, + # using this to test partial replacement of existing options + sha512_crypt__min_rounds=45000, + ) + + sample_2_unicode = """\ +[passlib] +bsdi_crypt__min_rounds = 29001 +bsdi_crypt__max_rounds = 35001 +bsdi_crypt__default_rounds = 31001 +sha512_crypt__min_rounds = 45000 +""" + + # sample 2 overlayed on top of sample 1 + sample_12_dict = merge_dicts(sample_1_dict, sample_2_dict) + + #--------------------------------------------------------------- + # sample 3 & 123 - just changing default from sample 1 + #--------------------------------------------------------------- + sample_3_dict = dict( + default="sha512_crypt", + ) + + # sample 3 overlayed on 2 overlayed on 1 + sample_123_dict = merge_dicts(sample_12_dict, sample_3_dict) + + #--------------------------------------------------------------- + # sample 4 - used by api tests + #--------------------------------------------------------------- + sample_4_dict = dict( + schemes = [ "des_crypt", "md5_crypt", "phpass", "bsdi_crypt", + "sha256_crypt"], + deprecated = [ "des_crypt", ], + default = "sha256_crypt", + bsdi_crypt__max_rounds = 31, + bsdi_crypt__default_rounds = 25, + bsdi_crypt__vary_rounds = 0, + sha256_crypt__max_rounds = 3000, + sha256_crypt__min_rounds = 2000, + sha256_crypt__default_rounds = 3000, + phpass__ident = "H", + phpass__default_rounds = 7, + ) + + #=================================================================== + # setup + #=================================================================== + def setUp(self): + super(CryptContextTest, self).setUp() + warnings.filterwarnings("ignore", "The 'all' scheme is deprecated.*") + warnings.filterwarnings("ignore", ".*'scheme' keyword is deprecated as of Passlib 1.7.*") + + #=================================================================== + # constructors + #=================================================================== + def test_01_constructor(self): + """test class constructor""" + + # test blank constructor works correctly + ctx = CryptContext() + self.assertEqual(ctx.to_dict(), {}) + + # test sample 1 with scheme=names + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 with scheme=handlers + ctx = CryptContext(**self.sample_1_resolved_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 2: options w/o schemes + ctx = CryptContext(**self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_2_dict) + + # test sample 3: default only + ctx = CryptContext(**self.sample_3_dict) + self.assertEqual(ctx.to_dict(), self.sample_3_dict) + + # test unicode scheme names (issue 54) + ctx = CryptContext(schemes=[u("sha256_crypt")]) + self.assertEqual(ctx.schemes(), ("sha256_crypt",)) + + def test_02_from_string(self): + """test from_string() constructor""" + # test sample 1 unicode + ctx = CryptContext.from_string(self.sample_1_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 with unnormalized inputs + ctx = CryptContext.from_string(self.sample_1_unnormalized) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 utf-8 + ctx = CryptContext.from_string(self.sample_1_unicode.encode("utf-8")) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 w/ '\r\n' linesep + ctx = CryptContext.from_string(self.sample_1b_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 using UTF-16 and alt section + ctx = CryptContext.from_string(self.sample_1c_bytes, section="mypolicy", + encoding="utf-16") + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test wrong type + self.assertRaises(TypeError, CryptContext.from_string, None) + + # test missing section + self.assertRaises(NoSectionError, CryptContext.from_string, + self.sample_1_unicode, section="fakesection") + + def test_03_from_path(self): + """test from_path() constructor""" + # make sure sample files exist + if not os.path.exists(self.sample_1_path): + raise RuntimeError("can't find data file: %r" % self.sample_1_path) + + # test sample 1 + ctx = CryptContext.from_path(self.sample_1_path) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 w/ '\r\n' linesep + ctx = CryptContext.from_path(self.sample_1b_path) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 encoding using UTF-16 and alt section + ctx = CryptContext.from_path(self.sample_1c_path, section="mypolicy", + encoding="utf-16") + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test missing file + self.assertRaises(EnvironmentError, CryptContext.from_path, + os.path.join(here, "sample1xxx.cfg")) + + # test missing section + self.assertRaises(NoSectionError, CryptContext.from_path, + self.sample_1_path, section="fakesection") + + def test_04_copy(self): + """test copy() method""" + cc1 = CryptContext(**self.sample_1_dict) + + # overlay sample 2 onto copy + cc2 = cc1.copy(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc2.to_dict(), self.sample_12_dict) + + # check that repeating overlay makes no change + cc2b = cc2.copy(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc2b.to_dict(), self.sample_12_dict) + + # overlay sample 3 on copy + cc3 = cc2.copy(**self.sample_3_dict) + self.assertEqual(cc3.to_dict(), self.sample_123_dict) + + # test empty copy creates separate copy + cc4 = cc1.copy() + self.assertIsNot(cc4, cc1) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc4.to_dict(), self.sample_1_dict) + + # ... and that modifying copy doesn't affect original + cc4.update(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc4.to_dict(), self.sample_12_dict) + + def test_09_repr(self): + """test repr()""" + cc1 = CryptContext(**self.sample_1_dict) + # NOTE: "0x-1234" format used by Pyston 0.5.1 (support deprecated 2019-11) + self.assertRegex(repr(cc1), "^$") + + #=================================================================== + # modifiers + #=================================================================== + def test_10_load(self): + """test load() / load_path() method""" + # NOTE: load() is the workhorse that handles all policy parsing, + # compilation, and validation. most of its features are tested + # elsewhere, since all the constructors and modifiers are just + # wrappers for it. + + # source_type 'auto' + ctx = CryptContext() + + # detect dict + ctx.load(self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # detect unicode string + ctx.load(self.sample_1_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # detect bytes string + ctx.load(self.sample_1_unicode.encode("utf-8")) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # anything else - TypeError + self.assertRaises(TypeError, ctx.load, None) + + # NOTE: load_path() tested by from_path() + # NOTE: additional string tests done by from_string() + + # update flag - tested by update() method tests + # encoding keyword - tested by from_string() & from_path() + # section keyword - tested by from_string() & from_path() + + # test load empty + ctx = CryptContext(**self.sample_1_dict) + ctx.load({}, update=True) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # multiple loads should clear the state + ctx = CryptContext() + ctx.load(self.sample_1_dict) + ctx.load(self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_2_dict) + + def test_11_load_rollback(self): + """test load() errors restore old state""" + # create initial context + cc = CryptContext(["des_crypt", "sha256_crypt"], + sha256_crypt__default_rounds=5000, + all__vary_rounds=0.1, + ) + result = cc.to_string() + + # do an update operation that should fail during parsing + # XXX: not sure what the right error type is here. + self.assertRaises(TypeError, cc.update, too__many__key__parts=True) + self.assertEqual(cc.to_string(), result) + + # do an update operation that should fail during extraction + # FIXME: this isn't failing even in broken case, need to figure out + # way to ensure some keys come after this one. + self.assertRaises(KeyError, cc.update, fake_context_option=True) + self.assertEqual(cc.to_string(), result) + + # do an update operation that should fail during compilation + self.assertRaises(ValueError, cc.update, sha256_crypt__min_rounds=10000) + self.assertEqual(cc.to_string(), result) + + def test_12_update(self): + """test update() method""" + + # empty overlay + ctx = CryptContext(**self.sample_1_dict) + ctx.update() + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test basic overlay + ctx = CryptContext(**self.sample_1_dict) + ctx.update(**self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # ... and again + ctx.update(**self.sample_3_dict) + self.assertEqual(ctx.to_dict(), self.sample_123_dict) + + # overlay w/ dict arg + ctx = CryptContext(**self.sample_1_dict) + ctx.update(self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # overlay w/ string + ctx = CryptContext(**self.sample_1_dict) + ctx.update(self.sample_2_unicode) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # too many args + self.assertRaises(TypeError, ctx.update, {}, {}) + self.assertRaises(TypeError, ctx.update, {}, schemes=['des_crypt']) + + # wrong arg type + self.assertRaises(TypeError, ctx.update, None) + + #=================================================================== + # option parsing + #=================================================================== + def test_20_options(self): + """test basic option parsing""" + def parse(**kwds): + return CryptContext(**kwds).to_dict() + + # + # common option parsing tests + # + + # test keys with blank fields are rejected + # blank option + self.assertRaises(TypeError, CryptContext, __=0.1) + self.assertRaises(TypeError, CryptContext, default__scheme__='x') + + # blank scheme + self.assertRaises(TypeError, CryptContext, __option='x') + self.assertRaises(TypeError, CryptContext, default____option='x') + + # blank category + self.assertRaises(TypeError, CryptContext, __scheme__option='x') + + # test keys with too many field are rejected + self.assertRaises(TypeError, CryptContext, + category__scheme__option__invalid = 30000) + + # keys with mixed separators should be handled correctly. + # (testing actual data, not to_dict(), since re-render hid original bug) + self.assertRaises(KeyError, parse, + **{"admin.context__schemes":"md5_crypt"}) + ctx = CryptContext(**{"schemes":"md5_crypt,des_crypt", + "admin.context__default":"des_crypt"}) + self.assertEqual(ctx.default_scheme("admin"), "des_crypt") + + # + # context option -specific tests + # + + # test context option key parsing + result = dict(default="md5_crypt") + self.assertEqual(parse(default="md5_crypt"), result) + self.assertEqual(parse(context__default="md5_crypt"), result) + self.assertEqual(parse(default__context__default="md5_crypt"), result) + self.assertEqual(parse(**{"context.default":"md5_crypt"}), result) + self.assertEqual(parse(**{"default.context.default":"md5_crypt"}), result) + + # test context option key parsing w/ category + result = dict(admin__context__default="md5_crypt") + self.assertEqual(parse(admin__context__default="md5_crypt"), result) + self.assertEqual(parse(**{"admin.context.default":"md5_crypt"}), result) + + # + # hash option -specific tests + # + + # test hash option key parsing + result = dict(all__vary_rounds=0.1) + self.assertEqual(parse(all__vary_rounds=0.1), result) + self.assertEqual(parse(default__all__vary_rounds=0.1), result) + self.assertEqual(parse(**{"all.vary_rounds":0.1}), result) + self.assertEqual(parse(**{"default.all.vary_rounds":0.1}), result) + + # test hash option key parsing w/ category + result = dict(admin__all__vary_rounds=0.1) + self.assertEqual(parse(admin__all__vary_rounds=0.1), result) + self.assertEqual(parse(**{"admin.all.vary_rounds":0.1}), result) + + # settings not allowed if not in hash.setting_kwds + ctx = CryptContext(["phpass", "md5_crypt"], phpass__ident="P") + self.assertRaises(KeyError, ctx.copy, md5_crypt__ident="P") + + # hash options 'salt' and 'rounds' not allowed + self.assertRaises(KeyError, CryptContext, schemes=["des_crypt"], + des_crypt__salt="xx") + self.assertRaises(KeyError, CryptContext, schemes=["des_crypt"], + all__salt="xx") + + def test_21_schemes(self): + """test 'schemes' context option parsing""" + + # schemes can be empty + cc = CryptContext(schemes=None) + self.assertEqual(cc.schemes(), ()) + + # schemes can be list of names + cc = CryptContext(schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # schemes can be comma-sep string + cc = CryptContext(schemes=" des_crypt, md5_crypt, ") + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # schemes can be list of handlers + cc = CryptContext(schemes=[hash.des_crypt, hash.md5_crypt]) + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # scheme must be name or handler + self.assertRaises(TypeError, CryptContext, schemes=[uh.StaticHandler]) + + # handlers must have a name + class nameless(uh.StaticHandler): + name = None + self.assertRaises(ValueError, CryptContext, schemes=[nameless]) + + # names must be unique + class dummy_1(uh.StaticHandler): + name = 'dummy_1' + self.assertRaises(KeyError, CryptContext, schemes=[dummy_1, dummy_1]) + + # schemes not allowed per-category + self.assertRaises(KeyError, CryptContext, + admin__context__schemes=["md5_crypt"]) + + def test_22_deprecated(self): + """test 'deprecated' context option parsing""" + def getdep(ctx, category=None): + return [name for name in ctx.schemes() + if ctx.handler(name, category).deprecated] + + # no schemes - all deprecated values allowed + cc = CryptContext(deprecated=["md5_crypt"]) + cc.update(schemes=["md5_crypt", "des_crypt"]) + self.assertEqual(getdep(cc),["md5_crypt"]) + + # deprecated values allowed if subset of schemes + cc = CryptContext(deprecated=["md5_crypt"], schemes=["md5_crypt", "des_crypt"]) + self.assertEqual(getdep(cc), ["md5_crypt"]) + + # can be handler + # XXX: allow handlers in deprecated list? not for now. + self.assertRaises(TypeError, CryptContext, deprecated=[hash.md5_crypt], + schemes=["md5_crypt", "des_crypt"]) +## cc = CryptContext(deprecated=[hash.md5_crypt], schemes=["md5_crypt", "des_crypt"]) +## self.assertEqual(getdep(cc), ["md5_crypt"]) + + # comma sep list + cc = CryptContext(deprecated="md5_crypt,des_crypt", schemes=["md5_crypt", "des_crypt", "sha256_crypt"]) + self.assertEqual(getdep(cc), ["md5_crypt", "des_crypt"]) + + # values outside of schemes not allowed + self.assertRaises(KeyError, CryptContext, schemes=['des_crypt'], + deprecated=['md5_crypt']) + + # deprecating ALL schemes should cause ValueError + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt'], + deprecated=['des_crypt']) + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__deprecated=['des_crypt', 'md5_crypt']) + + # deprecating explicit default scheme should cause ValueError + + # ... default listed as deprecated + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + default="md5_crypt", + deprecated="md5_crypt") + + # ... global default deprecated per-category + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + default="md5_crypt", + admin__context__deprecated="md5_crypt") + + # ... category default deprecated globally + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__default="md5_crypt", + deprecated="md5_crypt") + + # ... category default deprecated in category + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__default="md5_crypt", + admin__context__deprecated="md5_crypt") + + # category deplist should shadow default deplist + CryptContext( + schemes=['des_crypt', 'md5_crypt'], + deprecated="md5_crypt", + admin__context__default="md5_crypt", + admin__context__deprecated=[]) + + # wrong type + self.assertRaises(TypeError, CryptContext, deprecated=123) + + # deprecated per-category + cc = CryptContext(deprecated=["md5_crypt"], + schemes=["md5_crypt", "des_crypt"], + admin__context__deprecated=["des_crypt"], + ) + self.assertEqual(getdep(cc), ["md5_crypt"]) + self.assertEqual(getdep(cc, "user"), ["md5_crypt"]) + self.assertEqual(getdep(cc, "admin"), ["des_crypt"]) + + # blank per-category deprecated list, shadowing default list + cc = CryptContext(deprecated=["md5_crypt"], + schemes=["md5_crypt", "des_crypt"], + admin__context__deprecated=[], + ) + self.assertEqual(getdep(cc), ["md5_crypt"]) + self.assertEqual(getdep(cc, "user"), ["md5_crypt"]) + self.assertEqual(getdep(cc, "admin"), []) + + def test_23_default(self): + """test 'default' context option parsing""" + + # anything allowed if no schemes + self.assertEqual(CryptContext(default="md5_crypt").to_dict(), + dict(default="md5_crypt")) + + # default allowed if in scheme list + ctx = CryptContext(default="md5_crypt", schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # default can be handler + # XXX: sure we want to allow this ? maybe deprecate in future. + ctx = CryptContext(default=hash.md5_crypt, schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # implicit default should be first non-deprecated scheme + ctx = CryptContext(schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "des_crypt") + ctx.update(deprecated="des_crypt") + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # error if not in scheme list + self.assertRaises(KeyError, CryptContext, schemes=['des_crypt'], + default='md5_crypt') + + # wrong type + self.assertRaises(TypeError, CryptContext, default=1) + + # per-category + ctx = CryptContext(default="des_crypt", + schemes=["des_crypt", "md5_crypt"], + admin__context__default="md5_crypt") + self.assertEqual(ctx.default_scheme(), "des_crypt") + self.assertEqual(ctx.default_scheme("user"), "des_crypt") + self.assertEqual(ctx.default_scheme("admin"), "md5_crypt") + + def test_24_vary_rounds(self): + """test 'vary_rounds' hash option parsing""" + def parse(v): + return CryptContext(all__vary_rounds=v).to_dict()['all__vary_rounds'] + + # floats should be preserved + self.assertEqual(parse(0.1), 0.1) + self.assertEqual(parse('0.1'), 0.1) + + # 'xx%' should be converted to float + self.assertEqual(parse('10%'), 0.1) + + # ints should be preserved + self.assertEqual(parse(1000), 1000) + self.assertEqual(parse('1000'), 1000) + + #=================================================================== + # inspection & serialization + #=================================================================== + + def assertHandlerDerivedFrom(self, handler, base, msg=None): + self.assertTrue(handler_derived_from(handler, base), msg=msg) + + def test_30_schemes(self): + """test schemes() method""" + # NOTE: also checked under test_21 + + # test empty + ctx = CryptContext() + self.assertEqual(ctx.schemes(), ()) + self.assertEqual(ctx.schemes(resolve=True), ()) + + # test sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.schemes(), tuple(self.sample_1_schemes)) + self.assertEqual(ctx.schemes(resolve=True, unconfigured=True), tuple(self.sample_1_handlers)) + for result, correct in zip(ctx.schemes(resolve=True), self.sample_1_handlers): + self.assertTrue(handler_derived_from(result, correct)) + + # test sample 2 + ctx = CryptContext(**self.sample_2_dict) + self.assertEqual(ctx.schemes(), ()) + + def test_31_default_scheme(self): + """test default_scheme() method""" + # NOTE: also checked under test_23 + + # test empty + ctx = CryptContext() + self.assertRaises(KeyError, ctx.default_scheme) + + # test sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + self.assertEqual(ctx.default_scheme(resolve=True, unconfigured=True), hash.md5_crypt) + self.assertHandlerDerivedFrom(ctx.default_scheme(resolve=True), hash.md5_crypt) + + # test sample 2 + ctx = CryptContext(**self.sample_2_dict) + self.assertRaises(KeyError, ctx.default_scheme) + + # test defaults to first in scheme + ctx = CryptContext(schemes=self.sample_1_schemes) + self.assertEqual(ctx.default_scheme(), "des_crypt") + + # categories tested under test_23 + + def test_32_handler(self): + """test handler() method""" + + # default for empty + ctx = CryptContext() + self.assertRaises(KeyError, ctx.handler) + self.assertRaises(KeyError, ctx.handler, "md5_crypt") + + # default for sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.handler(unconfigured=True), hash.md5_crypt) + self.assertHandlerDerivedFrom(ctx.handler(), hash.md5_crypt) + + # by name + self.assertEqual(ctx.handler("des_crypt", unconfigured=True), hash.des_crypt) + self.assertHandlerDerivedFrom(ctx.handler("des_crypt"), hash.des_crypt) + + # name not in schemes + self.assertRaises(KeyError, ctx.handler, "mysql323") + + # check handler() honors category default + ctx = CryptContext("sha256_crypt,md5_crypt", admin__context__default="md5_crypt") + self.assertEqual(ctx.handler(unconfigured=True), hash.sha256_crypt) + self.assertHandlerDerivedFrom(ctx.handler(), hash.sha256_crypt) + + self.assertEqual(ctx.handler(category="staff", unconfigured=True), hash.sha256_crypt) + self.assertHandlerDerivedFrom(ctx.handler(category="staff"), hash.sha256_crypt) + + self.assertEqual(ctx.handler(category="admin", unconfigured=True), hash.md5_crypt) + self.assertHandlerDerivedFrom(ctx.handler(category="staff"), hash.sha256_crypt) + + # test unicode category strings are accepted under py2 + if PY2: + self.assertEqual(ctx.handler(category=u("staff"), unconfigured=True), hash.sha256_crypt) + self.assertEqual(ctx.handler(category=u("admin"), unconfigured=True), hash.md5_crypt) + + def test_33_options(self): + """test internal _get_record_options() method""" + + def options(ctx, scheme, category=None): + return ctx._config._get_record_options_with_flag(scheme, category)[0] + + # this checks that (3 schemes, 3 categories) inherit options correctly. + # the 'user' category is not present in the options. + cc4 = CryptContext( + truncate_error=True, + schemes = [ "sha512_crypt", "des_crypt", "bsdi_crypt"], + deprecated = ["sha512_crypt", "des_crypt"], + all__vary_rounds = 0.1, + bsdi_crypt__vary_rounds=0.2, + sha512_crypt__max_rounds = 20000, + admin__context__deprecated = [ "des_crypt", "bsdi_crypt" ], + admin__all__vary_rounds = 0.05, + admin__bsdi_crypt__vary_rounds=0.3, + admin__sha512_crypt__max_rounds = 40000, + ) + self.assertEqual(cc4._config.categories, ("admin",)) + + # + # sha512_crypt + # NOTE: 'truncate_error' shouldn't be passed along... + # + self.assertEqual(options(cc4, "sha512_crypt"), dict( + deprecated=True, + vary_rounds=0.1, # inherited from all__ + max_rounds=20000, + )) + + self.assertEqual(options(cc4, "sha512_crypt", "user"), dict( + deprecated=True, # unconfigured category inherits from default + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(options(cc4, "sha512_crypt", "admin"), dict( + # NOT deprecated - context option overridden per-category + vary_rounds=0.05, # global overridden per-cateogry + max_rounds=40000, # overridden per-category + )) + + # + # des_crypt + # NOTE: vary_rounds shouldn't be passed along... + # + self.assertEqual(options(cc4, "des_crypt"), dict( + deprecated=True, + truncate_error=True, + )) + + self.assertEqual(options(cc4, "des_crypt", "user"), dict( + deprecated=True, # unconfigured category inherits from default + truncate_error=True, + )) + + self.assertEqual(options(cc4, "des_crypt", "admin"), dict( + deprecated=True, # unchanged though overidden + truncate_error=True, + )) + + # + # bsdi_crypt + # + self.assertEqual(options(cc4, "bsdi_crypt"), dict( + vary_rounds=0.2, # overridden from all__vary_rounds + )) + + self.assertEqual(options(cc4, "bsdi_crypt", "user"), dict( + vary_rounds=0.2, # unconfigured category inherits from default + )) + + self.assertEqual(options(cc4, "bsdi_crypt", "admin"), dict( + vary_rounds=0.3, + deprecated=True, # deprecation set per-category + )) + + def test_34_to_dict(self): + """test to_dict() method""" + # NOTE: this is tested all throughout this test case. + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + self.assertEqual(ctx.to_dict(resolve=True), self.sample_1_resolved_dict) + + def test_35_to_string(self): + """test to_string() method""" + + # create ctx and serialize + ctx = CryptContext(**self.sample_1_dict) + dump = ctx.to_string() + + # check ctx->string returns canonical format. + # NOTE: ConfigParser for PY26 doesn't use OrderedDict, + # making to_string()'s ordering unpredictable... + # so we skip this test under PY26. + if not PY26: + self.assertEqual(dump, self.sample_1_unicode) + + # check ctx->string->ctx->dict returns original + ctx2 = CryptContext.from_string(dump) + self.assertEqual(ctx2.to_dict(), self.sample_1_dict) + + # test section kwd is honored + other = ctx.to_string(section="password-security") + self.assertEqual(other, dump.replace("[passlib]","[password-security]")) + + # test unmanaged handler warning + from passlib.tests.test_utils_handlers import UnsaltedHash + ctx3 = CryptContext([UnsaltedHash, "md5_crypt"]) + dump = ctx3.to_string() + self.assertRegex(dump, r"# NOTE: the 'unsalted_test_hash' handler\(s\)" + r" are not registered with Passlib") + + #=================================================================== + # password hash api + #=================================================================== + nonstring_vectors = [ + (None, {}), + (None, {"scheme": "des_crypt"}), + (1, {}), + ((), {}), + ] + + def test_40_basic(self): + """test basic hash/identify/verify functionality""" + handlers = [hash.md5_crypt, hash.des_crypt, hash.bsdi_crypt] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + # run through handlers + for crypt in handlers: + h = cc.hash("test", scheme=crypt.name) + self.assertEqual(cc.identify(h), crypt.name) + self.assertEqual(cc.identify(h, resolve=True, unconfigured=True), crypt) + self.assertHandlerDerivedFrom(cc.identify(h, resolve=True), crypt) + self.assertTrue(cc.verify('test', h)) + self.assertFalse(cc.verify('notest', h)) + + # test default + h = cc.hash("test") + self.assertEqual(cc.identify(h), "md5_crypt") + + # test genhash + h = cc.genhash('secret', cc.genconfig()) + self.assertEqual(cc.identify(h), 'md5_crypt') + + h = cc.genhash('secret', cc.genconfig(), scheme='md5_crypt') + self.assertEqual(cc.identify(h), 'md5_crypt') + + self.assertRaises(ValueError, cc.genhash, 'secret', cc.genconfig(), scheme="des_crypt") + + def test_41_genconfig(self): + """test genconfig() method""" + cc = CryptContext(schemes=["md5_crypt", "phpass"], + phpass__ident="H", + phpass__default_rounds=7, + admin__phpass__ident="P", + ) + + # uses default scheme + self.assertTrue(cc.genconfig().startswith("$1$")) + + # override scheme + self.assertTrue(cc.genconfig(scheme="phpass").startswith("$H$5")) + + # category override + self.assertTrue(cc.genconfig(scheme="phpass", category="admin").startswith("$P$5")) + self.assertTrue(cc.genconfig(scheme="phpass", category="staff").startswith("$H$5")) + + # override scheme & custom settings + self.assertEqual( + cc.genconfig(scheme="phpass", salt='.'*8, rounds=8, ident='P'), + '$P$6........22zGEuacuPOqEpYPDeR0R/', # NOTE: config string generated w/ rounds=1 + ) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # test unicode category strings are accepted under py2 + # this tests basic _get_record() used by hash/genhash/verify. + # we have to omit scheme=xxx so codepath is tested fully + if PY2: + c2 = cc.copy(default="phpass") + self.assertTrue(c2.genconfig(category=u("admin")).startswith("$P$5")) + self.assertTrue(c2.genconfig(category=u("staff")).startswith("$H$5")) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().genconfig) + self.assertRaises(KeyError, CryptContext().genconfig, scheme='md5_crypt') + + # bad scheme values + self.assertRaises(KeyError, cc.genconfig, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.genconfig, scheme=1, category='staff') + self.assertRaises(TypeError, cc.genconfig, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.genconfig, category=1) + + + def test_42_genhash(self): + """test genhash() method""" + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + hash = cc.hash('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.genhash, secret, hash, **kwds) + + # rejects non-string config strings + cc = CryptContext(["des_crypt"]) + for config, kwds in self.nonstring_vectors: + if hash is None: + # NOTE: as of 1.7, genhash is just wrapper for hash(), + # and handles genhash(secret, None) fine. + continue + self.assertRaises(TypeError, cc.genhash, 'secret', config, **kwds) + + # rejects config=None, even if default scheme lacks config string + cc = CryptContext(["mysql323"]) + self.assertRaises(TypeError, cc.genhash, "stub", None) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().genhash, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.genhash, 'secret', hash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.genhash, 'secret', hash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.genconfig, 'secret', hash, category=1) + + def test_43_hash(self,): + """test hash() method""" + # XXX: what more can we test here that isn't deprecated + # or handled under another test (e.g. context kwds?) + + # respects rounds + cc = CryptContext(**self.sample_4_dict) + hash = cc.hash("password") + self.assertTrue(hash.startswith("$5$rounds=3000$")) + self.assertTrue(cc.verify("password", hash)) + self.assertFalse(cc.verify("passwordx", hash)) + + # make default > max throws error if attempted + # XXX: move this to copy() test? + self.assertRaises(ValueError, cc.copy, + sha256_crypt__default_rounds=4000) + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.hash, secret, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().hash, 'secret') + + # bad category values + self.assertRaises(TypeError, cc.hash, 'secret', category=1) + + def test_43_hash_legacy(self, use_16_legacy=False): + """test hash() method -- legacy 'scheme' and settings keywords""" + cc = CryptContext(**self.sample_4_dict) + + # TODO: should migrate these tests elsewhere, or remove them. + # can be replaced with following equivalent: + # + # def wrapper(secret, scheme=None, category=None, **kwds): + # handler = cc.handler(scheme, category) + # if kwds: + # handler = handler.using(**kwds) + # return handler.hash(secret) + # + # need to make sure bits being tested here are tested + # under the tests for the equivalent methods called above, + # and then discard the rest of these under 2.0. + + # hash specific settings + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertEqual( + cc.hash("password", scheme="phpass", salt='.'*8), + '$H$5........De04R5Egz0aq8Tf.1eVhY/', + ) + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertEqual( + cc.hash("password", scheme="phpass", salt='.'*8, ident="P"), + '$P$5........De04R5Egz0aq8Tf.1eVhY/', + ) + + # NOTE: more thorough job of rounds limits done below. + + # min rounds + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertEqual( + cc.hash("password", rounds=1999, salt="nacl"), + '$5$rounds=1999$nacl$nmfwJIxqj0csloAAvSER0B8LU0ERCAbhmMug4Twl609', + ) + + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertEqual( + cc.hash("password", rounds=2001, salt="nacl"), + '$5$rounds=2001$nacl$8PdeoPL4aXQnJ0woHhqgIw/efyfCKC2WHneOpnvF.31' + ) + # NOTE: max rounds, etc tested in genconfig() + + # bad scheme values + self.assertRaises(KeyError, cc.hash, 'secret', scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.hash, 'secret', scheme=1) + + def test_44_identify(self): + """test identify() border cases""" + handlers = ["md5_crypt", "des_crypt", "bsdi_crypt"] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + # check unknown hash + self.assertEqual(cc.identify('$9$232323123$1287319827'), None) + self.assertRaises(ValueError, cc.identify, '$9$232323123$1287319827', required=True) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.identify, hash, **kwds) + + # throws error without schemes + cc = CryptContext() + self.assertIs(cc.identify('hash'), None) + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + + # bad category values + self.assertRaises(TypeError, cc.identify, None, category=1) + + def test_45_verify(self): + """test verify() scheme kwd""" + handlers = ["md5_crypt", "des_crypt", "bsdi_crypt"] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + h = hash.md5_crypt.hash("test") + + # check base verify + self.assertTrue(cc.verify("test", h)) + self.assertTrue(not cc.verify("notest", h)) + + # check verify using right alg + self.assertTrue(cc.verify('test', h, scheme='md5_crypt')) + self.assertTrue(not cc.verify('notest', h, scheme='md5_crypt')) + + # check verify using wrong alg + self.assertRaises(ValueError, cc.verify, 'test', h, scheme='bsdi_crypt') + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # unknown hash should throw error + self.assertRaises(ValueError, cc.verify, 'stub', '$6$232323123$1287319827') + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + h = refhash = cc.hash('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.verify, secret, h, **kwds) + + # always treat hash=None as False + self.assertFalse(cc.verify(secret, None)) + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for h, kwds in self.nonstring_vectors: + if h is None: + continue + self.assertRaises(TypeError, cc.verify, 'secret', h, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().verify, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.verify, 'secret', refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.verify, 'secret', refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.verify, 'secret', refhash, category=1) + + def test_46_needs_update(self): + """test needs_update() method""" + cc = CryptContext(**self.sample_4_dict) + + # check deprecated scheme + self.assertTrue(cc.needs_update('9XXD4trGYeGJA')) + self.assertFalse(cc.needs_update('$1$J8HC2RCr$HcmM.7NxB2weSvlw2FgzU0')) + + # check min rounds + self.assertTrue(cc.needs_update('$5$rounds=1999$jD81UCoo.zI.UETs$Y7qSTQ6mTiU9qZB4fRr43wRgQq4V.5AAf7F97Pzxey/')) + self.assertFalse(cc.needs_update('$5$rounds=2000$228SSRje04cnNCaQ$YGV4RYu.5sNiBvorQDlO0WWQjyJVGKBcJXz3OtyQ2u8')) + + # check max rounds + self.assertFalse(cc.needs_update('$5$rounds=3000$fS9iazEwTKi7QPW4$VasgBC8FqlOvD7x2HhABaMXCTh9jwHclPA9j5YQdns.')) + self.assertTrue(cc.needs_update('$5$rounds=3001$QlFHHifXvpFX4PLs$/0ekt7lSs/lOikSerQ0M/1porEHxYq7W/2hdFpxA3fA')) + + #-------------------------------------------------------------- + # test hash.needs_update() interface + #-------------------------------------------------------------- + check_state = [] + class dummy(uh.StaticHandler): + name = 'dummy' + _hash_prefix = '@' + + @classmethod + def needs_update(cls, hash, secret=None): + check_state.append((hash, secret)) + return secret == "nu" + + def _calc_checksum(self, secret): + from hashlib import md5 + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(md5(secret).hexdigest()) + + # calling needs_update should query callback + ctx = CryptContext([dummy]) + hash = refhash = dummy.hash("test") + self.assertFalse(ctx.needs_update(hash)) + self.assertEqual(check_state, [(hash,None)]) + del check_state[:] + + # now with a password + self.assertFalse(ctx.needs_update(hash, secret='bob')) + self.assertEqual(check_state, [(hash,'bob')]) + del check_state[:] + + # now when it returns True + self.assertTrue(ctx.needs_update(hash, secret='nu')) + self.assertEqual(check_state, [(hash,'nu')]) + del check_state[:] + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.needs_update, hash, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().needs_update, 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.needs_update, refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.needs_update, refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.needs_update, refhash, category=1) + + def test_47_verify_and_update(self): + """test verify_and_update()""" + cc = CryptContext(**self.sample_4_dict) + + # create some hashes + h1 = cc.handler("des_crypt").hash("password") + h2 = cc.handler("sha256_crypt").hash("password") + + # check bad password, deprecated hash + ok, new_hash = cc.verify_and_update("wrongpass", h1) + self.assertFalse(ok) + self.assertIs(new_hash, None) + + # check bad password, good hash + ok, new_hash = cc.verify_and_update("wrongpass", h2) + self.assertFalse(ok) + self.assertIs(new_hash, None) + + # check right password, deprecated hash + ok, new_hash = cc.verify_and_update("password", h1) + self.assertTrue(ok) + self.assertTrue(cc.identify(new_hash), "sha256_crypt") + + # check right password, good hash + ok, new_hash = cc.verify_and_update("password", h2) + self.assertTrue(ok) + self.assertIs(new_hash, None) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + hash = refhash = cc.hash('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.verify_and_update, secret, hash, **kwds) + + # always treat hash=None as False + self.assertEqual(cc.verify_and_update(secret, None), (False, None)) + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + if hash is None: + continue + self.assertRaises(TypeError, cc.verify_and_update, 'secret', hash, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().verify_and_update, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.verify_and_update, 'secret', refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.verify_and_update, 'secret', refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.verify_and_update, 'secret', refhash, category=1) + + def test_48_context_kwds(self): + """hash(), verify(), and verify_and_update() -- discard unused context keywords""" + + # setup test case + # NOTE: postgres_md5 hash supports 'user' context kwd, which is used for this test. + from passlib.hash import des_crypt, md5_crypt, postgres_md5 + des_hash = des_crypt.hash("stub") + pg_root_hash = postgres_md5.hash("stub", user="root") + pg_admin_hash = postgres_md5.hash("stub", user="admin") + + #------------------------------------------------------------ + # case 1: contextual kwds not supported by any hash in CryptContext + #------------------------------------------------------------ + cc1 = CryptContext([des_crypt, md5_crypt]) + self.assertEqual(cc1.context_kwds, set()) + + # des_scrypt should work w/o any contextual kwds + self.assertTrue(des_crypt.identify(cc1.hash("stub")), "des_crypt") + self.assertTrue(cc1.verify("stub", des_hash)) + self.assertEqual(cc1.verify_and_update("stub", des_hash), (True, None)) + + # des_crypt should throw error due to unknown context keyword + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertRaises(TypeError, cc1.hash, "stub", user="root") + self.assertRaises(TypeError, cc1.verify, "stub", des_hash, user="root") + self.assertRaises(TypeError, cc1.verify_and_update, "stub", des_hash, user="root") + + #------------------------------------------------------------ + # case 2: at least one contextual kwd supported by non-default hash + #------------------------------------------------------------ + cc2 = CryptContext([des_crypt, postgres_md5]) + self.assertEqual(cc2.context_kwds, set(["user"])) + + # verify des_crypt works w/o "user" kwd + self.assertTrue(des_crypt.identify(cc2.hash("stub")), "des_crypt") + self.assertTrue(cc2.verify("stub", des_hash)) + self.assertEqual(cc2.verify_and_update("stub", des_hash), (True, None)) + + # verify des_crypt ignores "user" kwd + self.assertTrue(des_crypt.identify(cc2.hash("stub", user="root")), "des_crypt") + self.assertTrue(cc2.verify("stub", des_hash, user="root")) + self.assertEqual(cc2.verify_and_update("stub", des_hash, user="root"), (True, None)) + + # verify error with unknown kwd + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertRaises(TypeError, cc2.hash, "stub", badkwd="root") + self.assertRaises(TypeError, cc2.verify, "stub", des_hash, badkwd="root") + self.assertRaises(TypeError, cc2.verify_and_update, "stub", des_hash, badkwd="root") + + #------------------------------------------------------------ + # case 3: at least one contextual kwd supported by default hash + #------------------------------------------------------------ + cc3 = CryptContext([postgres_md5, des_crypt], deprecated="auto") + self.assertEqual(cc3.context_kwds, set(["user"])) + + # postgres_md5 should have error w/o context kwd + self.assertRaises(TypeError, cc3.hash, "stub") + self.assertRaises(TypeError, cc3.verify, "stub", pg_root_hash) + self.assertRaises(TypeError, cc3.verify_and_update, "stub", pg_root_hash) + + # postgres_md5 should work w/ context kwd + self.assertEqual(cc3.hash("stub", user="root"), pg_root_hash) + self.assertTrue(cc3.verify("stub", pg_root_hash, user="root")) + self.assertEqual(cc3.verify_and_update("stub", pg_root_hash, user="root"), (True, None)) + + # verify_and_update() should fail against wrong user + self.assertEqual(cc3.verify_and_update("stub", pg_root_hash, user="admin"), (False, None)) + + # verify_and_update() should pass all context kwds through when rehashing + self.assertEqual(cc3.verify_and_update("stub", des_hash, user="root"), + (True, pg_root_hash)) + + #=================================================================== + # rounds options + #=================================================================== + + # TODO: now that rounds generation has moved out of _CryptRecord to HasRounds, + # this should just test that we're passing right options to handler.using(), + # and that resulting handler has right settings. + # Can then just let HasRounds tests (which are a copy of this) deal with things. + + # NOTE: the follow tests check how _CryptRecord handles + # the min/max/default/vary_rounds options, via the output of + # genconfig(). it's assumed hash() takes the same codepath. + + def test_50_rounds_limits(self): + """test rounds limits""" + cc = CryptContext(schemes=["sha256_crypt"], + sha256_crypt__min_rounds=2000, + sha256_crypt__max_rounds=3000, + sha256_crypt__default_rounds=2500, + ) + + # stub digest returned by sha256_crypt's genconfig calls.. + STUB = '...........................................' + + #-------------------------------------------------- + # settings should have been applied to custom handler, + # it should take care of the rest + #-------------------------------------------------- + custom_handler = cc._get_record("sha256_crypt", None) + self.assertEqual(custom_handler.min_desired_rounds, 2000) + self.assertEqual(custom_handler.max_desired_rounds, 3000) + self.assertEqual(custom_handler.default_rounds, 2500) + + #-------------------------------------------------- + # min_rounds + #-------------------------------------------------- + + # set below handler minimum + with self.assertWarningList([PasslibHashWarning]*2): + c2 = cc.copy(sha256_crypt__min_rounds=500, sha256_crypt__max_rounds=None, + sha256_crypt__default_rounds=500) + self.assertEqual(c2.genconfig(salt="nacl"), "$5$rounds=1000$nacl$" + STUB) + + # below policy minimum + # NOTE: formerly issued a warning in passlib 1.6, now just a wrapper for .replace() + with self.assertWarningList([]): + self.assertEqual( + cc.genconfig(rounds=1999, salt="nacl"), '$5$rounds=1999$nacl$' + STUB) + + # equal to policy minimum + self.assertEqual( + cc.genconfig(rounds=2000, salt="nacl"), '$5$rounds=2000$nacl$' + STUB) + + # above policy minimum + self.assertEqual( + cc.genconfig(rounds=2001, salt="nacl"), '$5$rounds=2001$nacl$' + STUB) + + #-------------------------------------------------- + # max rounds + #-------------------------------------------------- + + # set above handler max + with self.assertWarningList([PasslibHashWarning]*2): + c2 = cc.copy(sha256_crypt__max_rounds=int(1e9)+500, sha256_crypt__min_rounds=None, + sha256_crypt__default_rounds=int(1e9)+500) + + self.assertEqual(c2.genconfig(salt="nacl"), "$5$rounds=999999999$nacl$" + STUB) + + # above policy max + # NOTE: formerly issued a warning in passlib 1.6, now just a wrapper for .using() + with self.assertWarningList([]): + self.assertEqual( + cc.genconfig(rounds=3001, salt="nacl"), '$5$rounds=3001$nacl$' + STUB) + + # equal policy max + self.assertEqual( + cc.genconfig(rounds=3000, salt="nacl"), '$5$rounds=3000$nacl$' + STUB) + + # below policy max + self.assertEqual( + cc.genconfig(rounds=2999, salt="nacl"), '$5$rounds=2999$nacl$' + STUB) + + #-------------------------------------------------- + # default_rounds + #-------------------------------------------------- + + # explicit default rounds + self.assertEqual(cc.genconfig(salt="nacl"), '$5$rounds=2500$nacl$' + STUB) + + # fallback default rounds - use handler's + df = hash.sha256_crypt.default_rounds + c2 = cc.copy(sha256_crypt__default_rounds=None, sha256_crypt__max_rounds=df<<1) + self.assertEqual(c2.genconfig(salt="nacl"), '$5$rounds=%d$nacl$%s' % (df, STUB)) + + # fallback default rounds - use handler's, but clipped to max rounds + c2 = cc.copy(sha256_crypt__default_rounds=None, sha256_crypt__max_rounds=3000) + self.assertEqual(c2.genconfig(salt="nacl"), '$5$rounds=3000$nacl$' + STUB) + + # TODO: test default falls back to mx / mn if handler has no default. + + # default rounds - out of bounds + self.assertRaises(ValueError, cc.copy, sha256_crypt__default_rounds=1999) + cc.copy(sha256_crypt__default_rounds=2000) + cc.copy(sha256_crypt__default_rounds=3000) + self.assertRaises(ValueError, cc.copy, sha256_crypt__default_rounds=3001) + + #-------------------------------------------------- + # border cases + #-------------------------------------------------- + + # invalid min/max bounds + c2 = CryptContext(schemes=["sha256_crypt"]) + # NOTE: as of v1.7, these are clipped w/ a warning instead... + # self.assertRaises(ValueError, c2.copy, sha256_crypt__min_rounds=-1) + # self.assertRaises(ValueError, c2.copy, sha256_crypt__max_rounds=-1) + self.assertRaises(ValueError, c2.copy, sha256_crypt__min_rounds=2000, + sha256_crypt__max_rounds=1999) + + # test bad values + self.assertRaises(ValueError, CryptContext, sha256_crypt__min_rounds='x') + self.assertRaises(ValueError, CryptContext, sha256_crypt__max_rounds='x') + self.assertRaises(ValueError, CryptContext, all__vary_rounds='x') + self.assertRaises(ValueError, CryptContext, sha256_crypt__default_rounds='x') + + # test bad types rejected + bad = datetime.datetime.now() # picked cause can't be compared to int + self.assertRaises(TypeError, CryptContext, "sha256_crypt", sha256_crypt__min_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", sha256_crypt__max_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__vary_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", sha256_crypt__default_rounds=bad) + + def test_51_linear_vary_rounds(self): + """test linear vary rounds""" + cc = CryptContext(schemes=["sha256_crypt"], + sha256_crypt__min_rounds=1995, + sha256_crypt__max_rounds=2005, + sha256_crypt__default_rounds=2000, + ) + + # test negative + self.assertRaises(ValueError, cc.copy, all__vary_rounds=-1) + self.assertRaises(ValueError, cc.copy, all__vary_rounds="-1%") + self.assertRaises(ValueError, cc.copy, all__vary_rounds="101%") + + # test static + c2 = cc.copy(all__vary_rounds=0) + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 0) + self.assert_rounds_range(c2, "sha256_crypt", 2000, 2000) + + c2 = cc.copy(all__vary_rounds="0%") + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 0) + self.assert_rounds_range(c2, "sha256_crypt", 2000, 2000) + + # test absolute + c2 = cc.copy(all__vary_rounds=1) + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 1) + self.assert_rounds_range(c2, "sha256_crypt", 1999, 2001) + c2 = cc.copy(all__vary_rounds=100) + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 100) + self.assert_rounds_range(c2, "sha256_crypt", 1995, 2005) + + # test relative + c2 = cc.copy(all__vary_rounds="0.1%") + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 0.001) + self.assert_rounds_range(c2, "sha256_crypt", 1998, 2002) + c2 = cc.copy(all__vary_rounds="100%") + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 1.0) + self.assert_rounds_range(c2, "sha256_crypt", 1995, 2005) + + def test_52_log2_vary_rounds(self): + """test log2 vary rounds""" + cc = CryptContext(schemes=["bcrypt"], + bcrypt__min_rounds=15, + bcrypt__max_rounds=25, + bcrypt__default_rounds=20, + ) + + # test negative + self.assertRaises(ValueError, cc.copy, all__vary_rounds=-1) + self.assertRaises(ValueError, cc.copy, all__vary_rounds="-1%") + self.assertRaises(ValueError, cc.copy, all__vary_rounds="101%") + + # test static + c2 = cc.copy(all__vary_rounds=0) + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="0%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + # test absolute + c2 = cc.copy(all__vary_rounds=1) + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 1) + self.assert_rounds_range(c2, "bcrypt", 19, 21) + c2 = cc.copy(all__vary_rounds=100) + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 100) + self.assert_rounds_range(c2, "bcrypt", 15, 25) + + # test relative - should shift over at 50% mark + c2 = cc.copy(all__vary_rounds="1%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0.01) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="49%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0.49) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="50%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0.5) + self.assert_rounds_range(c2, "bcrypt", 19, 20) + + c2 = cc.copy(all__vary_rounds="100%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 1.0) + self.assert_rounds_range(c2, "bcrypt", 15, 21) + + def assert_rounds_range(self, context, scheme, lower, upper): + """helper to check vary_rounds covers specified range""" + # NOTE: this runs enough times the min and max *should* be hit, + # though there's a faint chance it will randomly fail. + handler = context.handler(scheme) + salt = handler.default_salt_chars[0:1] * handler.max_salt_size + seen = set() + for i in irange(300): + h = context.genconfig(scheme, salt=salt) + r = handler.from_string(h).rounds + seen.add(r) + self.assertEqual(min(seen), lower, "vary_rounds had wrong lower limit:") + self.assertEqual(max(seen), upper, "vary_rounds had wrong upper limit:") + + #=================================================================== + # harden_verify / min_verify_time + #=================================================================== + def test_harden_verify_parsing(self): + """harden_verify -- parsing""" + warnings.filterwarnings("ignore", ".*harden_verify.*", + category=DeprecationWarning) + + # valid values + ctx = CryptContext(schemes=["sha256_crypt"]) + self.assertEqual(ctx.harden_verify, None) + self.assertEqual(ctx.using(harden_verify="").harden_verify, None) + self.assertEqual(ctx.using(harden_verify="true").harden_verify, None) + self.assertEqual(ctx.using(harden_verify="false").harden_verify, None) + + def test_dummy_verify(self): + """ + dummy_verify() method + """ + # check dummy_verify() takes expected time + expected = 0.05 + accuracy = 0.2 + handler = DelayHash.using() + handler.delay = expected + ctx = CryptContext(schemes=[handler]) + ctx.dummy_verify() # prime the memoized helpers + elapsed, _ = time_call(ctx.dummy_verify) + self.assertAlmostEqual(elapsed, expected, delta=expected * accuracy) + + # TODO: test dummy_verify() invoked by .verify() when hash is None, + # and same for .verify_and_update() + + #=================================================================== + # feature tests + #=================================================================== + def test_61_autodeprecate(self): + """test deprecated='auto' is handled correctly""" + + def getstate(ctx, category=None): + return [ctx.handler(scheme, category).deprecated for scheme in ctx.schemes()] + + # correctly reports default + ctx = CryptContext("sha256_crypt,md5_crypt,des_crypt", deprecated="auto") + self.assertEqual(getstate(ctx, None), [False, True, True]) + self.assertEqual(getstate(ctx, "admin"), [False, True, True]) + + # correctly reports changed default + ctx.update(default="md5_crypt") + self.assertEqual(getstate(ctx, None), [True, False, True]) + self.assertEqual(getstate(ctx, "admin"), [True, False, True]) + + # category default is handled correctly + ctx.update(admin__context__default="des_crypt") + self.assertEqual(getstate(ctx, None), [True, False, True]) + self.assertEqual(getstate(ctx, "admin"), [True, True, False]) + + # handles 1 scheme + ctx = CryptContext(["sha256_crypt"], deprecated="auto") + self.assertEqual(getstate(ctx, None), [False]) + self.assertEqual(getstate(ctx, "admin"), [False]) + + # disallow auto & other deprecated schemes at same time. + self.assertRaises(ValueError, CryptContext, "sha256_crypt,md5_crypt", + deprecated="auto,md5_crypt") + self.assertRaises(ValueError, CryptContext, "sha256_crypt,md5_crypt", + deprecated="md5_crypt,auto") + + def test_disabled_hashes(self): + """disabled hash support""" + # + # init ref info + # + from passlib.exc import UnknownHashError + from passlib.hash import md5_crypt, unix_disabled + + ctx = CryptContext(["des_crypt"]) + ctx2 = CryptContext(["des_crypt", "unix_disabled"]) + h_ref = ctx.hash("foo") + h_other = md5_crypt.hash('foo') + + # + # ctx.disable() + # + + # test w/o disabled hash support + self.assertRaisesRegex(RuntimeError, "no disabled hasher present", + ctx.disable) + self.assertRaisesRegex(RuntimeError, "no disabled hasher present", + ctx.disable, h_ref) + self.assertRaisesRegex(RuntimeError, "no disabled hasher present", + ctx.disable, h_other) + + # test w/ disabled hash support + h_dis = ctx2.disable() + self.assertEqual(h_dis, unix_disabled.default_marker) + h_dis_ref = ctx2.disable(h_ref) + self.assertEqual(h_dis_ref, unix_disabled.default_marker + h_ref) + + h_dis_other = ctx2.disable(h_other) + self.assertEqual(h_dis_other, unix_disabled.default_marker + h_other) + + # don't double-wrap existing disabled hash + self.assertEqual(ctx2.disable(h_dis_ref), h_dis_ref) + + # + # ctx.is_enabled() + # + + # test w/o disabled hash support + self.assertTrue(ctx.is_enabled(h_ref)) + self.assertRaises(UnknownHashError, ctx.is_enabled, h_other) + self.assertRaises(UnknownHashError, ctx.is_enabled, h_dis) + self.assertRaises(UnknownHashError, ctx.is_enabled, h_dis_ref) + + # test w/ disabled hash support + self.assertTrue(ctx2.is_enabled(h_ref)) + self.assertRaises(UnknownHashError, ctx.is_enabled, h_other) + self.assertFalse(ctx2.is_enabled(h_dis)) + self.assertFalse(ctx2.is_enabled(h_dis_ref)) + + # + # ctx.enable() + # + + # test w/o disabled hash support + self.assertRaises(UnknownHashError, ctx.enable, "") + self.assertRaises(TypeError, ctx.enable, None) + self.assertEqual(ctx.enable(h_ref), h_ref) + self.assertRaises(UnknownHashError, ctx.enable, h_other) + self.assertRaises(UnknownHashError, ctx.enable, h_dis) + self.assertRaises(UnknownHashError, ctx.enable, h_dis_ref) + + # test w/ disabled hash support + self.assertRaises(UnknownHashError, ctx.enable, "") + self.assertRaises(TypeError, ctx2.enable, None) + self.assertEqual(ctx2.enable(h_ref), h_ref) + self.assertRaises(UnknownHashError, ctx2.enable, h_other) + self.assertRaisesRegex(ValueError, "cannot restore original hash", + ctx2.enable, h_dis) + self.assertEqual(ctx2.enable(h_dis_ref), h_ref) + + #=================================================================== + # eoc + #=================================================================== + +import hashlib, time + +class DelayHash(uh.StaticHandler): + """dummy hasher which delays by specified amount""" + name = "delay_hash" + checksum_chars = uh.LOWER_HEX_CHARS + checksum_size = 40 + delay = 0 + _hash_prefix = u("$x$") + + def _calc_checksum(self, secret): + time.sleep(self.delay) + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(hashlib.sha1(b"prefix" + secret).hexdigest()) + +#============================================================================= +# LazyCryptContext +#============================================================================= +class dummy_2(uh.StaticHandler): + name = "dummy_2" + +class LazyCryptContextTest(TestCase): + descriptionPrefix = "LazyCryptContext" + + def setUp(self): + # make sure this isn't registered before OR after + unload_handler_name("dummy_2") + self.addCleanup(unload_handler_name, "dummy_2") + + def test_kwd_constructor(self): + """test plain kwds""" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + cc = LazyCryptContext(iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertEqual(cc.schemes(), ("dummy_2", "des_crypt")) + self.assertTrue(cc.handler("des_crypt").deprecated) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + + def test_callable_constructor(self): + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + def onload(flag=False): + self.assertTrue(flag) + return dict(schemes=iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + cc = LazyCryptContext(onload=onload, flag=True) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertEqual(cc.schemes(), ("dummy_2", "des_crypt")) + self.assertTrue(cc.handler("des_crypt").deprecated) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_context_deprecated.py b/venv/lib/python3.12/site-packages/passlib/tests/test_context_deprecated.py new file mode 100644 index 0000000..0f76624 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_context_deprecated.py @@ -0,0 +1,743 @@ +"""tests for passlib.context + +this file is a clone of the 1.5 test_context.py, +containing the tests using the legacy CryptPolicy api. +it's being preserved here to ensure the old api doesn't break +(until Passlib 1.8, when this and the legacy api will be removed). +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from logging import getLogger +import os +import warnings +# site +try: + from pkg_resources import resource_filename +except ImportError: + resource_filename = None +# pkg +from passlib import hash +from passlib.context import CryptContext, CryptPolicy, LazyCryptContext +from passlib.utils import to_bytes, to_unicode +import passlib.utils.handlers as uh +from passlib.tests.utils import TestCase, set_file +from passlib.registry import (register_crypt_handler_path, + _has_crypt_handler as has_crypt_handler, + _unload_handler_name as unload_handler_name, + ) +# module +log = getLogger(__name__) + +#============================================================================= +# +#============================================================================= +class CryptPolicyTest(TestCase): + """test CryptPolicy object""" + + # TODO: need to test user categories w/in all this + + descriptionPrefix = "CryptPolicy" + + #=================================================================== + # sample crypt policies used for testing + #=================================================================== + + #--------------------------------------------------------------- + # sample 1 - average config file + #--------------------------------------------------------------- + # NOTE: copy of this is stored in file passlib/tests/sample_config_1s.cfg + sample_config_1s = """\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all.vary_rounds = 10%% +bsdi_crypt.max_rounds = 30000 +bsdi_crypt.default_rounds = 25000 +sha512_crypt.max_rounds = 50000 +sha512_crypt.min_rounds = 40000 +""" + sample_config_1s_path = os.path.abspath(os.path.join( + os.path.dirname(__file__), "sample_config_1s.cfg")) + if not os.path.exists(sample_config_1s_path) and resource_filename: + # in case we're zipped up in an egg. + sample_config_1s_path = resource_filename("passlib.tests", + "sample_config_1s.cfg") + + # make sure sample_config_1s uses \n linesep - tests rely on this + assert sample_config_1s.startswith("[passlib]\nschemes") + + sample_config_1pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30000, + bsdi_crypt__default_rounds = 25000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + sample_config_1pid = { + "schemes": "des_crypt, md5_crypt, bsdi_crypt, sha512_crypt", + "default": "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + "all.vary_rounds": 0.1, + "bsdi_crypt.max_rounds": 30000, + "bsdi_crypt.default_rounds": 25000, + "sha512_crypt.max_rounds": 50000, + "sha512_crypt.min_rounds": 40000, + } + + sample_config_1prd = dict( + schemes = [ hash.des_crypt, hash.md5_crypt, hash.bsdi_crypt, hash.sha512_crypt], + default = "md5_crypt", # NOTE: passlib <= 1.5 was handler obj. + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30000, + bsdi_crypt__default_rounds = 25000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + #--------------------------------------------------------------- + # sample 2 - partial policy & result of overlay on sample 1 + #--------------------------------------------------------------- + sample_config_2s = """\ +[passlib] +bsdi_crypt.min_rounds = 29000 +bsdi_crypt.max_rounds = 35000 +bsdi_crypt.default_rounds = 31000 +sha512_crypt.min_rounds = 45000 +""" + + sample_config_2pd = dict( + # using this to test full replacement of existing options + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + # using this to test partial replacement of existing options + sha512_crypt__min_rounds=45000, + ) + + sample_config_12pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds=45000, + ) + + #--------------------------------------------------------------- + # sample 3 - just changing default + #--------------------------------------------------------------- + sample_config_3pd = dict( + default="sha512_crypt", + ) + + sample_config_123pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "sha512_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds=45000, + ) + + #--------------------------------------------------------------- + # sample 4 - category specific + #--------------------------------------------------------------- + sample_config_4s = """ +[passlib] +schemes = sha512_crypt +all.vary_rounds = 10%% +default.sha512_crypt.max_rounds = 20000 +admin.all.vary_rounds = 5%% +admin.sha512_crypt.max_rounds = 40000 +""" + + sample_config_4pd = dict( + schemes = [ "sha512_crypt" ], + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + sha512_crypt__max_rounds = 20000, + # NOTE: not maintaining backwards compat for rendering to "5%" + admin__all__vary_rounds = 0.05, + admin__sha512_crypt__max_rounds = 40000, + ) + + #--------------------------------------------------------------- + # sample 5 - to_string & deprecation testing + #--------------------------------------------------------------- + sample_config_5s = sample_config_1s + """\ +deprecated = des_crypt +admin__context__deprecated = des_crypt, bsdi_crypt +""" + + sample_config_5pd = sample_config_1pd.copy() + sample_config_5pd.update( + deprecated = [ "des_crypt" ], + admin__context__deprecated = [ "des_crypt", "bsdi_crypt" ], + ) + + sample_config_5pid = sample_config_1pid.copy() + sample_config_5pid.update({ + "deprecated": "des_crypt", + "admin.context.deprecated": "des_crypt, bsdi_crypt", + }) + + sample_config_5prd = sample_config_1prd.copy() + sample_config_5prd.update({ + # XXX: should deprecated return the actual handlers in this case? + # would have to modify how policy stores info, for one. + "deprecated": ["des_crypt"], + "admin__context__deprecated": ["des_crypt", "bsdi_crypt"], + }) + + #=================================================================== + # constructors + #=================================================================== + def setUp(self): + TestCase.setUp(self) + warnings.filterwarnings("ignore", + r"The CryptPolicy class has been deprecated") + warnings.filterwarnings("ignore", + r"the method.*hash_needs_update.*is deprecated") + warnings.filterwarnings("ignore", "The 'all' scheme is deprecated.*") + warnings.filterwarnings("ignore", "bsdi_crypt rounds should be odd") + + def test_00_constructor(self): + """test CryptPolicy() constructor""" + policy = CryptPolicy(**self.sample_config_1pd) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + policy = CryptPolicy(self.sample_config_1pd) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + self.assertRaises(TypeError, CryptPolicy, {}, {}) + self.assertRaises(TypeError, CryptPolicy, {}, dummy=1) + + # check key with too many separators is rejected + self.assertRaises(TypeError, CryptPolicy, + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + bad__key__bsdi_crypt__max_rounds = 30000, + ) + + # check nameless handler rejected + class nameless(uh.StaticHandler): + name = None + self.assertRaises(ValueError, CryptPolicy, schemes=[nameless]) + + # check scheme must be name or crypt handler + self.assertRaises(TypeError, CryptPolicy, schemes=[uh.StaticHandler]) + + # check name conflicts are rejected + class dummy_1(uh.StaticHandler): + name = 'dummy_1' + self.assertRaises(KeyError, CryptPolicy, schemes=[dummy_1, dummy_1]) + + # with unknown deprecated value + self.assertRaises(KeyError, CryptPolicy, + schemes=['des_crypt'], + deprecated=['md5_crypt']) + + # with unknown default value + self.assertRaises(KeyError, CryptPolicy, + schemes=['des_crypt'], + default='md5_crypt') + + def test_01_from_path_simple(self): + """test CryptPolicy.from_path() constructor""" + # NOTE: this is separate so it can also run under GAE + + # test preset stored in existing file + path = self.sample_config_1s_path + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test if path missing + self.assertRaises(EnvironmentError, CryptPolicy.from_path, path + 'xxx') + + def test_01_from_path(self): + """test CryptPolicy.from_path() constructor with encodings""" + path = self.mktemp() + + # test "\n" linesep + set_file(path, self.sample_config_1s) + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test "\r\n" linesep + set_file(path, self.sample_config_1s.replace("\n","\r\n")) + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with custom encoding + uc2 = to_bytes(self.sample_config_1s, "utf-16", source_encoding="utf-8") + set_file(path, uc2) + policy = CryptPolicy.from_path(path, encoding="utf-16") + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + def test_02_from_string(self): + """test CryptPolicy.from_string() constructor""" + # test "\n" linesep + policy = CryptPolicy.from_string(self.sample_config_1s) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test "\r\n" linesep + policy = CryptPolicy.from_string( + self.sample_config_1s.replace("\n","\r\n")) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with unicode + data = to_unicode(self.sample_config_1s) + policy = CryptPolicy.from_string(data) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with non-ascii-compatible encoding + uc2 = to_bytes(self.sample_config_1s, "utf-16", source_encoding="utf-8") + policy = CryptPolicy.from_string(uc2, encoding="utf-16") + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test category specific options + policy = CryptPolicy.from_string(self.sample_config_4s) + self.assertEqual(policy.to_dict(), self.sample_config_4pd) + + def test_03_from_source(self): + """test CryptPolicy.from_source() constructor""" + # pass it a path + policy = CryptPolicy.from_source(self.sample_config_1s_path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it a string + policy = CryptPolicy.from_source(self.sample_config_1s) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it a dict (NOTE: make a copy to detect in-place modifications) + policy = CryptPolicy.from_source(self.sample_config_1pd.copy()) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it existing policy + p2 = CryptPolicy.from_source(policy) + self.assertIs(policy, p2) + + # pass it something wrong + self.assertRaises(TypeError, CryptPolicy.from_source, 1) + self.assertRaises(TypeError, CryptPolicy.from_source, []) + + def test_04_from_sources(self): + """test CryptPolicy.from_sources() constructor""" + + # pass it empty list + self.assertRaises(ValueError, CryptPolicy.from_sources, []) + + # pass it one-element list + policy = CryptPolicy.from_sources([self.sample_config_1s]) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass multiple sources + policy = CryptPolicy.from_sources( + [ + self.sample_config_1s_path, + self.sample_config_2s, + self.sample_config_3pd, + ]) + self.assertEqual(policy.to_dict(), self.sample_config_123pd) + + def test_05_replace(self): + """test CryptPolicy.replace() constructor""" + + p1 = CryptPolicy(**self.sample_config_1pd) + + # check overlaying sample 2 + p2 = p1.replace(**self.sample_config_2pd) + self.assertEqual(p2.to_dict(), self.sample_config_12pd) + + # check repeating overlay makes no change + p2b = p2.replace(**self.sample_config_2pd) + self.assertEqual(p2b.to_dict(), self.sample_config_12pd) + + # check overlaying sample 3 + p3 = p2.replace(self.sample_config_3pd) + self.assertEqual(p3.to_dict(), self.sample_config_123pd) + + def test_06_forbidden(self): + """test CryptPolicy() forbidden kwds""" + + # salt not allowed to be set + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + des_crypt__salt="xx", + ) + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + all__salt="xx", + ) + + # schemes not allowed for category + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + user__context__schemes=["md5_crypt"], + ) + + #=================================================================== + # reading + #=================================================================== + def test_10_has_schemes(self): + """test has_schemes() method""" + + p1 = CryptPolicy(**self.sample_config_1pd) + self.assertTrue(p1.has_schemes()) + + p3 = CryptPolicy(**self.sample_config_3pd) + self.assertTrue(not p3.has_schemes()) + + def test_11_iter_handlers(self): + """test iter_handlers() method""" + + p1 = CryptPolicy(**self.sample_config_1pd) + s = self.sample_config_1prd['schemes'] + self.assertEqual(list(p1.iter_handlers()), s) + + p3 = CryptPolicy(**self.sample_config_3pd) + self.assertEqual(list(p3.iter_handlers()), []) + + def test_12_get_handler(self): + """test get_handler() method""" + + p1 = CryptPolicy(**self.sample_config_1pd) + + # check by name + self.assertIs(p1.get_handler("bsdi_crypt"), hash.bsdi_crypt) + + # check by missing name + self.assertIs(p1.get_handler("sha256_crypt"), None) + self.assertRaises(KeyError, p1.get_handler, "sha256_crypt", required=True) + + # check default + self.assertIs(p1.get_handler(), hash.md5_crypt) + + def test_13_get_options(self): + """test get_options() method""" + + p12 = CryptPolicy(**self.sample_config_12pd) + + self.assertEqual(p12.get_options("bsdi_crypt"),dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds = 0.1, + min_rounds = 29000, + max_rounds = 35000, + default_rounds = 31000, + )) + + self.assertEqual(p12.get_options("sha512_crypt"),dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds = 0.1, + min_rounds = 45000, + max_rounds = 50000, + )) + + p4 = CryptPolicy.from_string(self.sample_config_4s) + self.assertEqual(p4.get_options("sha512_crypt"), dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(p4.get_options("sha512_crypt", "user"), dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(p4.get_options("sha512_crypt", "admin"), dict( + # NOTE: not maintaining backwards compat for rendering to "5%" + vary_rounds=0.05, + max_rounds=40000, + )) + + def test_14_handler_is_deprecated(self): + """test handler_is_deprecated() method""" + pa = CryptPolicy(**self.sample_config_1pd) + pb = CryptPolicy(**self.sample_config_5pd) + + self.assertFalse(pa.handler_is_deprecated("des_crypt")) + self.assertFalse(pa.handler_is_deprecated(hash.bsdi_crypt)) + self.assertFalse(pa.handler_is_deprecated("sha512_crypt")) + + self.assertTrue(pb.handler_is_deprecated("des_crypt")) + self.assertFalse(pb.handler_is_deprecated(hash.bsdi_crypt)) + self.assertFalse(pb.handler_is_deprecated("sha512_crypt")) + + # check categories as well + self.assertTrue(pb.handler_is_deprecated("des_crypt", "user")) + self.assertFalse(pb.handler_is_deprecated("bsdi_crypt", "user")) + self.assertTrue(pb.handler_is_deprecated("des_crypt", "admin")) + self.assertTrue(pb.handler_is_deprecated("bsdi_crypt", "admin")) + + # check deprecation is overridden per category + pc = CryptPolicy( + schemes=["md5_crypt", "des_crypt"], + deprecated=["md5_crypt"], + user__context__deprecated=["des_crypt"], + ) + self.assertTrue(pc.handler_is_deprecated("md5_crypt")) + self.assertFalse(pc.handler_is_deprecated("des_crypt")) + self.assertFalse(pc.handler_is_deprecated("md5_crypt", "user")) + self.assertTrue(pc.handler_is_deprecated("des_crypt", "user")) + + def test_15_min_verify_time(self): + """test get_min_verify_time() method""" + # silence deprecation warnings for min verify time + warnings.filterwarnings("ignore", category=DeprecationWarning) + + pa = CryptPolicy() + self.assertEqual(pa.get_min_verify_time(), 0) + self.assertEqual(pa.get_min_verify_time('admin'), 0) + + pb = pa.replace(min_verify_time=.1) + self.assertEqual(pb.get_min_verify_time(), 0) + self.assertEqual(pb.get_min_verify_time('admin'), 0) + + #=================================================================== + # serialization + #=================================================================== + def test_20_iter_config(self): + """test iter_config() method""" + p5 = CryptPolicy(**self.sample_config_5pd) + self.assertEqual(dict(p5.iter_config()), self.sample_config_5pd) + self.assertEqual(dict(p5.iter_config(resolve=True)), self.sample_config_5prd) + self.assertEqual(dict(p5.iter_config(ini=True)), self.sample_config_5pid) + + def test_21_to_dict(self): + """test to_dict() method""" + p5 = CryptPolicy(**self.sample_config_5pd) + self.assertEqual(p5.to_dict(), self.sample_config_5pd) + self.assertEqual(p5.to_dict(resolve=True), self.sample_config_5prd) + + def test_22_to_string(self): + """test to_string() method""" + pa = CryptPolicy(**self.sample_config_5pd) + s = pa.to_string() # NOTE: can't compare string directly, ordering etc may not match + pb = CryptPolicy.from_string(s) + self.assertEqual(pb.to_dict(), self.sample_config_5pd) + + s = pa.to_string(encoding="latin-1") + self.assertIsInstance(s, bytes) + + #=================================================================== + # + #=================================================================== + +#============================================================================= +# CryptContext +#============================================================================= +class CryptContextTest(TestCase): + """test CryptContext class""" + descriptionPrefix = "CryptContext" + + def setUp(self): + TestCase.setUp(self) + warnings.filterwarnings("ignore", + r"CryptContext\(\)\.replace\(\) has been deprecated.*") + warnings.filterwarnings("ignore", + r"The CryptContext ``policy`` keyword has been deprecated.*") + warnings.filterwarnings("ignore", ".*(CryptPolicy|context\.policy).*(has|have) been deprecated.*") + warnings.filterwarnings("ignore", + r"the method.*hash_needs_update.*is deprecated") + + #=================================================================== + # constructor + #=================================================================== + def test_00_constructor(self): + """test constructor""" + # create crypt context using handlers + cc = CryptContext([hash.md5_crypt, hash.bsdi_crypt, hash.des_crypt]) + c,b,a = cc.policy.iter_handlers() + self.assertIs(a, hash.des_crypt) + self.assertIs(b, hash.bsdi_crypt) + self.assertIs(c, hash.md5_crypt) + + # create context using names + cc = CryptContext(["md5_crypt", "bsdi_crypt", "des_crypt"]) + c,b,a = cc.policy.iter_handlers() + self.assertIs(a, hash.des_crypt) + self.assertIs(b, hash.bsdi_crypt) + self.assertIs(c, hash.md5_crypt) + + # policy kwd + policy = cc.policy + cc = CryptContext(policy=policy) + self.assertEqual(cc.to_dict(), policy.to_dict()) + + cc = CryptContext(policy=policy, default="bsdi_crypt") + self.assertNotEqual(cc.to_dict(), policy.to_dict()) + self.assertEqual(cc.to_dict(), dict(schemes=["md5_crypt","bsdi_crypt","des_crypt"], + default="bsdi_crypt")) + + self.assertRaises(TypeError, setattr, cc, 'policy', None) + self.assertRaises(TypeError, CryptContext, policy='x') + + def test_01_replace(self): + """test replace()""" + + cc = CryptContext(["md5_crypt", "bsdi_crypt", "des_crypt"]) + self.assertIs(cc.policy.get_handler(), hash.md5_crypt) + + cc2 = cc.replace() + self.assertIsNot(cc2, cc) + # NOTE: was not able to maintain backward compatibility with this... + ##self.assertIs(cc2.policy, cc.policy) + + cc3 = cc.replace(default="bsdi_crypt") + self.assertIsNot(cc3, cc) + # NOTE: was not able to maintain backward compatibility with this... + ##self.assertIs(cc3.policy, cc.policy) + self.assertIs(cc3.policy.get_handler(), hash.bsdi_crypt) + + def test_02_no_handlers(self): + """test no handlers""" + + # check constructor... + cc = CryptContext() + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + self.assertRaises(KeyError, cc.hash, 'secret') + self.assertRaises(KeyError, cc.verify, 'secret', 'hash') + + # check updating policy after the fact... + cc = CryptContext(['md5_crypt']) + p = CryptPolicy(schemes=[]) + cc.policy = p + + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + self.assertRaises(KeyError, cc.hash, 'secret') + self.assertRaises(KeyError, cc.verify, 'secret', 'hash') + + #=================================================================== + # policy adaptation + #=================================================================== + sample_policy_1 = dict( + schemes = [ "des_crypt", "md5_crypt", "phpass", "bsdi_crypt", + "sha256_crypt"], + deprecated = [ "des_crypt", ], + default = "sha256_crypt", + bsdi_crypt__max_rounds = 30, + bsdi_crypt__default_rounds = 25, + bsdi_crypt__vary_rounds = 0, + sha256_crypt__max_rounds = 3000, + sha256_crypt__min_rounds = 2000, + sha256_crypt__default_rounds = 3000, + phpass__ident = "H", + phpass__default_rounds = 7, + ) + + def test_12_hash_needs_update(self): + """test hash_needs_update() method""" + cc = CryptContext(**self.sample_policy_1) + + # check deprecated scheme + self.assertTrue(cc.hash_needs_update('9XXD4trGYeGJA')) + self.assertFalse(cc.hash_needs_update('$1$J8HC2RCr$HcmM.7NxB2weSvlw2FgzU0')) + + # check min rounds + self.assertTrue(cc.hash_needs_update('$5$rounds=1999$jD81UCoo.zI.UETs$Y7qSTQ6mTiU9qZB4fRr43wRgQq4V.5AAf7F97Pzxey/')) + self.assertFalse(cc.hash_needs_update('$5$rounds=2000$228SSRje04cnNCaQ$YGV4RYu.5sNiBvorQDlO0WWQjyJVGKBcJXz3OtyQ2u8')) + + # check max rounds + self.assertFalse(cc.hash_needs_update('$5$rounds=3000$fS9iazEwTKi7QPW4$VasgBC8FqlOvD7x2HhABaMXCTh9jwHclPA9j5YQdns.')) + self.assertTrue(cc.hash_needs_update('$5$rounds=3001$QlFHHifXvpFX4PLs$/0ekt7lSs/lOikSerQ0M/1porEHxYq7W/2hdFpxA3fA')) + + #=================================================================== + # border cases + #=================================================================== + def test_30_nonstring_hash(self): + """test non-string hash values cause error""" + warnings.filterwarnings("ignore", ".*needs_update.*'scheme' keyword is deprecated.*") + + # + # test hash=None or some other non-string causes TypeError + # and that explicit-scheme code path behaves the same. + # + cc = CryptContext(["des_crypt"]) + for hash, kwds in [ + (None, {}), + # NOTE: 'scheme' kwd is deprecated... + (None, {"scheme": "des_crypt"}), + (1, {}), + ((), {}), + ]: + + self.assertRaises(TypeError, cc.hash_needs_update, hash, **kwds) + + cc2 = CryptContext(["mysql323"]) + self.assertRaises(TypeError, cc2.hash_needs_update, None) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# LazyCryptContext +#============================================================================= +class dummy_2(uh.StaticHandler): + name = "dummy_2" + +class LazyCryptContextTest(TestCase): + descriptionPrefix = "LazyCryptContext" + + def setUp(self): + TestCase.setUp(self) + + # make sure this isn't registered before OR after + unload_handler_name("dummy_2") + self.addCleanup(unload_handler_name, "dummy_2") + + # silence some warnings + warnings.filterwarnings("ignore", + r"CryptContext\(\)\.replace\(\) has been deprecated") + warnings.filterwarnings("ignore", ".*(CryptPolicy|context\.policy).*(has|have) been deprecated.*") + + def test_kwd_constructor(self): + """test plain kwds""" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + cc = LazyCryptContext(iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertTrue(cc.policy.handler_is_deprecated("des_crypt")) + self.assertEqual(cc.policy.schemes(), ["dummy_2", "des_crypt"]) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + + def test_callable_constructor(self): + """test create_policy() hook, returning CryptPolicy""" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + def create_policy(flag=False): + self.assertTrue(flag) + return CryptPolicy(schemes=iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + cc = LazyCryptContext(create_policy=create_policy, flag=True) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertTrue(cc.policy.handler_is_deprecated("des_crypt")) + self.assertEqual(cc.policy.schemes(), ["dummy_2", "des_crypt"]) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_builtin_md4.py b/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_builtin_md4.py new file mode 100644 index 0000000..0aca1eb --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_builtin_md4.py @@ -0,0 +1,160 @@ +"""passlib.tests -- unittests for passlib.crypto._md4""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, division +# core +from binascii import hexlify +import hashlib +# site +# pkg +# module +from passlib.utils.compat import bascii_to_str, PY3, u +from passlib.crypto.digest import lookup_hash +from passlib.tests.utils import TestCase, skipUnless +# local +__all__ = [ + "_Common_MD4_Test", + "MD4_Builtin_Test", + "MD4_SSL_Test", +] +#============================================================================= +# test pure-python MD4 implementation +#============================================================================= +class _Common_MD4_Test(TestCase): + """common code for testing md4 backends""" + + vectors = [ + # input -> hex digest + # test vectors from http://www.faqs.org/rfcs/rfc1320.html - A.5 + (b"", "31d6cfe0d16ae931b73c59d7e0c089c0"), + (b"a", "bde52cb31de33e46245e05fbdbd6fb24"), + (b"abc", "a448017aaf21d8525fc10ae87aa6729d"), + (b"message digest", "d9130a8164549fe818874806e1c7014b"), + (b"abcdefghijklmnopqrstuvwxyz", "d79e1c308aa5bbcdeea8ed63df412da9"), + (b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", "043f8582f241db351ce627e153e7f0e4"), + (b"12345678901234567890123456789012345678901234567890123456789012345678901234567890", "e33b4ddc9c38f2199c3e7b164fcc0536"), + ] + + def get_md4_const(self): + """ + get md4 constructor -- + overridden by subclasses to use alternate backends. + """ + return lookup_hash("md4").const + + def test_attrs(self): + """informational attributes""" + h = self.get_md4_const()() + self.assertEqual(h.name, "md4") + self.assertEqual(h.digest_size, 16) + self.assertEqual(h.block_size, 64) + + def test_md4_update(self): + """update() method""" + md4 = self.get_md4_const() + h = md4(b'') + self.assertEqual(h.hexdigest(), "31d6cfe0d16ae931b73c59d7e0c089c0") + + h.update(b'a') + self.assertEqual(h.hexdigest(), "bde52cb31de33e46245e05fbdbd6fb24") + + h.update(b'bcdefghijklmnopqrstuvwxyz') + self.assertEqual(h.hexdigest(), "d79e1c308aa5bbcdeea8ed63df412da9") + + if PY3: + # reject unicode, hash should return digest of b'' + h = md4() + self.assertRaises(TypeError, h.update, u('a')) + self.assertEqual(h.hexdigest(), "31d6cfe0d16ae931b73c59d7e0c089c0") + else: + # coerce unicode to ascii, hash should return digest of b'a' + h = md4() + h.update(u('a')) + self.assertEqual(h.hexdigest(), "bde52cb31de33e46245e05fbdbd6fb24") + + def test_md4_hexdigest(self): + """hexdigest() method""" + md4 = self.get_md4_const() + for input, hex in self.vectors: + out = md4(input).hexdigest() + self.assertEqual(out, hex) + + def test_md4_digest(self): + """digest() method""" + md4 = self.get_md4_const() + for input, hex in self.vectors: + out = bascii_to_str(hexlify(md4(input).digest())) + self.assertEqual(out, hex) + + def test_md4_copy(self): + """copy() method""" + md4 = self.get_md4_const() + h = md4(b'abc') + + h2 = h.copy() + h2.update(b'def') + self.assertEqual(h2.hexdigest(), '804e7f1c2586e50b49ac65db5b645131') + + h.update(b'ghi') + self.assertEqual(h.hexdigest(), 'c5225580bfe176f6deeee33dee98732c') + + +#------------------------------------------------------------------------ +# create subclasses to test various backends +#------------------------------------------------------------------------ + +def has_native_md4(): # pragma: no cover -- runtime detection + """ + check if hashlib natively supports md4. + """ + try: + hashlib.new("md4") + return True + except ValueError: + # not supported - ssl probably missing (e.g. ironpython) + return False + + +@skipUnless(has_native_md4(), "hashlib lacks ssl/md4 support") +class MD4_SSL_Test(_Common_MD4_Test): + descriptionPrefix = "hashlib.new('md4')" + + # NOTE: we trust ssl got md4 implementation right, + # this is more to test our test is correct :) + + def setUp(self): + super(MD4_SSL_Test, self).setUp() + + # make sure we're using right constructor. + self.assertEqual(self.get_md4_const().__module__, "hashlib") + + +class MD4_Builtin_Test(_Common_MD4_Test): + descriptionPrefix = "passlib.crypto._md4.md4()" + + def setUp(self): + super(MD4_Builtin_Test, self).setUp() + + if has_native_md4(): + + # Temporarily make lookup_hash() use builtin pure-python implementation, + # by monkeypatching hashlib.new() to ensure we fall back to passlib's md4 class. + orig = hashlib.new + def wrapper(name, *args): + if name == "md4": + raise ValueError("md4 disabled for testing") + return orig(name, *args) + self.patchAttr(hashlib, "new", wrapper) + + # flush cache before & after test, since we're mucking with it. + lookup_hash.clear_cache() + self.addCleanup(lookup_hash.clear_cache) + + # make sure we're using right constructor. + self.assertEqual(self.get_md4_const().__module__, "passlib.crypto._md4") + + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_des.py b/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_des.py new file mode 100644 index 0000000..ab31845 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_des.py @@ -0,0 +1,194 @@ +"""passlib.tests -- unittests for passlib.crypto.des""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, division +# core +from functools import partial +# site +# pkg +# module +from passlib.utils import getrandbytes +from passlib.tests.utils import TestCase + +#============================================================================= +# test DES routines +#============================================================================= +class DesTest(TestCase): + descriptionPrefix = "passlib.crypto.des" + + # test vectors taken from http://www.skepticfiles.org/faq/testdes.htm + des_test_vectors = [ + # key, plaintext, ciphertext + (0x0000000000000000, 0x0000000000000000, 0x8CA64DE9C1B123A7), + (0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, 0x7359B2163E4EDC58), + (0x3000000000000000, 0x1000000000000001, 0x958E6E627A05557B), + (0x1111111111111111, 0x1111111111111111, 0xF40379AB9E0EC533), + (0x0123456789ABCDEF, 0x1111111111111111, 0x17668DFC7292532D), + (0x1111111111111111, 0x0123456789ABCDEF, 0x8A5AE1F81AB8F2DD), + (0x0000000000000000, 0x0000000000000000, 0x8CA64DE9C1B123A7), + (0xFEDCBA9876543210, 0x0123456789ABCDEF, 0xED39D950FA74BCC4), + (0x7CA110454A1A6E57, 0x01A1D6D039776742, 0x690F5B0D9A26939B), + (0x0131D9619DC1376E, 0x5CD54CA83DEF57DA, 0x7A389D10354BD271), + (0x07A1133E4A0B2686, 0x0248D43806F67172, 0x868EBB51CAB4599A), + (0x3849674C2602319E, 0x51454B582DDF440A, 0x7178876E01F19B2A), + (0x04B915BA43FEB5B6, 0x42FD443059577FA2, 0xAF37FB421F8C4095), + (0x0113B970FD34F2CE, 0x059B5E0851CF143A, 0x86A560F10EC6D85B), + (0x0170F175468FB5E6, 0x0756D8E0774761D2, 0x0CD3DA020021DC09), + (0x43297FAD38E373FE, 0x762514B829BF486A, 0xEA676B2CB7DB2B7A), + (0x07A7137045DA2A16, 0x3BDD119049372802, 0xDFD64A815CAF1A0F), + (0x04689104C2FD3B2F, 0x26955F6835AF609A, 0x5C513C9C4886C088), + (0x37D06BB516CB7546, 0x164D5E404F275232, 0x0A2AEEAE3FF4AB77), + (0x1F08260D1AC2465E, 0x6B056E18759F5CCA, 0xEF1BF03E5DFA575A), + (0x584023641ABA6176, 0x004BD6EF09176062, 0x88BF0DB6D70DEE56), + (0x025816164629B007, 0x480D39006EE762F2, 0xA1F9915541020B56), + (0x49793EBC79B3258F, 0x437540C8698F3CFA, 0x6FBF1CAFCFFD0556), + (0x4FB05E1515AB73A7, 0x072D43A077075292, 0x2F22E49BAB7CA1AC), + (0x49E95D6D4CA229BF, 0x02FE55778117F12A, 0x5A6B612CC26CCE4A), + (0x018310DC409B26D6, 0x1D9D5C5018F728C2, 0x5F4C038ED12B2E41), + (0x1C587F1C13924FEF, 0x305532286D6F295A, 0x63FAC0D034D9F793), + (0x0101010101010101, 0x0123456789ABCDEF, 0x617B3A0CE8F07100), + (0x1F1F1F1F0E0E0E0E, 0x0123456789ABCDEF, 0xDB958605F8C8C606), + (0xE0FEE0FEF1FEF1FE, 0x0123456789ABCDEF, 0xEDBFD1C66C29CCC7), + (0x0000000000000000, 0xFFFFFFFFFFFFFFFF, 0x355550B2150E2451), + (0xFFFFFFFFFFFFFFFF, 0x0000000000000000, 0xCAAAAF4DEAF1DBAE), + (0x0123456789ABCDEF, 0x0000000000000000, 0xD5D44FF720683D0D), + (0xFEDCBA9876543210, 0xFFFFFFFFFFFFFFFF, 0x2A2BB008DF97C2F2), + ] + + def test_01_expand(self): + """expand_des_key()""" + from passlib.crypto.des import expand_des_key, shrink_des_key, \ + _KDATA_MASK, INT_56_MASK + + # make sure test vectors are preserved (sans parity bits) + # uses ints, bytes are tested under # 02 + for key1, _, _ in self.des_test_vectors: + key2 = shrink_des_key(key1) + key3 = expand_des_key(key2) + # NOTE: this assumes expand_des_key() sets parity bits to 0 + self.assertEqual(key3, key1 & _KDATA_MASK) + + # type checks + self.assertRaises(TypeError, expand_des_key, 1.0) + + # too large + self.assertRaises(ValueError, expand_des_key, INT_56_MASK+1) + self.assertRaises(ValueError, expand_des_key, b"\x00"*8) + + # too small + self.assertRaises(ValueError, expand_des_key, -1) + self.assertRaises(ValueError, expand_des_key, b"\x00"*6) + + def test_02_shrink(self): + """shrink_des_key()""" + from passlib.crypto.des import expand_des_key, shrink_des_key, INT_64_MASK + rng = self.getRandom() + + # make sure reverse works for some random keys + # uses bytes, ints are tested under # 01 + for i in range(20): + key1 = getrandbytes(rng, 7) + key2 = expand_des_key(key1) + key3 = shrink_des_key(key2) + self.assertEqual(key3, key1) + + # type checks + self.assertRaises(TypeError, shrink_des_key, 1.0) + + # too large + self.assertRaises(ValueError, shrink_des_key, INT_64_MASK+1) + self.assertRaises(ValueError, shrink_des_key, b"\x00"*9) + + # too small + self.assertRaises(ValueError, shrink_des_key, -1) + self.assertRaises(ValueError, shrink_des_key, b"\x00"*7) + + def _random_parity(self, key): + """randomize parity bits""" + from passlib.crypto.des import _KDATA_MASK, _KPARITY_MASK, INT_64_MASK + rng = self.getRandom() + return (key & _KDATA_MASK) | (rng.randint(0,INT_64_MASK) & _KPARITY_MASK) + + def test_03_encrypt_bytes(self): + """des_encrypt_block()""" + from passlib.crypto.des import (des_encrypt_block, shrink_des_key, + _pack64, _unpack64) + + # run through test vectors + for key, plaintext, correct in self.des_test_vectors: + # convert to bytes + key = _pack64(key) + plaintext = _pack64(plaintext) + correct = _pack64(correct) + + # test 64-bit key + result = des_encrypt_block(key, plaintext) + self.assertEqual(result, correct, "key=%r plaintext=%r:" % + (key, plaintext)) + + # test 56-bit version + key2 = shrink_des_key(key) + result = des_encrypt_block(key2, plaintext) + self.assertEqual(result, correct, "key=%r shrink(key)=%r plaintext=%r:" % + (key, key2, plaintext)) + + # test with random parity bits + for _ in range(20): + key3 = _pack64(self._random_parity(_unpack64(key))) + result = des_encrypt_block(key3, plaintext) + self.assertEqual(result, correct, "key=%r rndparity(key)=%r plaintext=%r:" % + (key, key3, plaintext)) + + # check invalid keys + stub = b'\x00' * 8 + self.assertRaises(TypeError, des_encrypt_block, 0, stub) + self.assertRaises(ValueError, des_encrypt_block, b'\x00'*6, stub) + + # check invalid input + self.assertRaises(TypeError, des_encrypt_block, stub, 0) + self.assertRaises(ValueError, des_encrypt_block, stub, b'\x00'*7) + + # check invalid salts + self.assertRaises(ValueError, des_encrypt_block, stub, stub, salt=-1) + self.assertRaises(ValueError, des_encrypt_block, stub, stub, salt=1<<24) + + # check invalid rounds + self.assertRaises(ValueError, des_encrypt_block, stub, stub, 0, rounds=0) + + def test_04_encrypt_ints(self): + """des_encrypt_int_block()""" + from passlib.crypto.des import des_encrypt_int_block + + # run through test vectors + for key, plaintext, correct in self.des_test_vectors: + # test 64-bit key + result = des_encrypt_int_block(key, plaintext) + self.assertEqual(result, correct, "key=%r plaintext=%r:" % + (key, plaintext)) + + # test with random parity bits + for _ in range(20): + key3 = self._random_parity(key) + result = des_encrypt_int_block(key3, plaintext) + self.assertEqual(result, correct, "key=%r rndparity(key)=%r plaintext=%r:" % + (key, key3, plaintext)) + + # check invalid keys + self.assertRaises(TypeError, des_encrypt_int_block, b'\x00', 0) + self.assertRaises(ValueError, des_encrypt_int_block, -1, 0) + + # check invalid input + self.assertRaises(TypeError, des_encrypt_int_block, 0, b'\x00') + self.assertRaises(ValueError, des_encrypt_int_block, 0, -1) + + # check invalid salts + self.assertRaises(ValueError, des_encrypt_int_block, 0, 0, salt=-1) + self.assertRaises(ValueError, des_encrypt_int_block, 0, 0, salt=1<<24) + + # check invalid rounds + self.assertRaises(ValueError, des_encrypt_int_block, 0, 0, 0, rounds=0) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_digest.py b/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_digest.py new file mode 100644 index 0000000..461d209 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_digest.py @@ -0,0 +1,544 @@ +"""tests for passlib.utils.(des|pbkdf2|md4)""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, division +# core +from binascii import hexlify +import hashlib +import warnings +# site +# pkg +# module +from passlib.exc import UnknownHashError +from passlib.utils.compat import PY3, u, JYTHON +from passlib.tests.utils import TestCase, TEST_MODE, skipUnless, hb + +#============================================================================= +# test assorted crypto helpers +#============================================================================= +class HashInfoTest(TestCase): + """test various crypto functions""" + descriptionPrefix = "passlib.crypto.digest" + + #: list of formats norm_hash_name() should support + norm_hash_formats = ["hashlib", "iana"] + + #: test cases for norm_hash_name() + #: each row contains (iana name, hashlib name, ... 0+ unnormalized names) + norm_hash_samples = [ + # real hashes + ("md5", "md5", "SCRAM-MD5-PLUS", "MD-5"), + ("sha1", "sha-1", "SCRAM-SHA-1", "SHA1"), + ("sha256", "sha-256", "SHA_256", "sha2-256"), + ("ripemd160", "ripemd-160", "SCRAM-RIPEMD-160", "RIPEmd160", + # NOTE: there was an older "RIPEMD" & "RIPEMD-128", but python treates "RIPEMD" + # as alias for "RIPEMD-160" + "ripemd", "SCRAM-RIPEMD"), + + # fake hashes (to check if fallback normalization behaves sanely) + ("sha4_256", "sha4-256", "SHA4-256", "SHA-4-256"), + ("test128", "test-128", "TEST128"), + ("test2", "test2", "TEST-2"), + ("test3_128", "test3-128", "TEST-3-128"), + ] + + def test_norm_hash_name(self): + """norm_hash_name()""" + from itertools import chain + from passlib.crypto.digest import norm_hash_name, _known_hash_names + + # snapshot warning state, ignore unknown hash warnings + ctx = warnings.catch_warnings() + ctx.__enter__() + self.addCleanup(ctx.__exit__) + warnings.filterwarnings("ignore", '.*unknown hash') + warnings.filterwarnings("ignore", '.*unsupported hash') + + # test string types + self.assertEqual(norm_hash_name(u("MD4")), "md4") + self.assertEqual(norm_hash_name(b"MD4"), "md4") + self.assertRaises(TypeError, norm_hash_name, None) + + # test selected results + for row in chain(_known_hash_names, self.norm_hash_samples): + for idx, format in enumerate(self.norm_hash_formats): + correct = row[idx] + for value in row: + result = norm_hash_name(value, format) + self.assertEqual(result, correct, + "name=%r, format=%r:" % (value, + format)) + + def test_lookup_hash_ctor(self): + """lookup_hash() -- constructor""" + from passlib.crypto.digest import lookup_hash + + # invalid/unknown names should be rejected + self.assertRaises(ValueError, lookup_hash, "new") + self.assertRaises(ValueError, lookup_hash, "__name__") + self.assertRaises(ValueError, lookup_hash, "sha4") + + # 1. should return hashlib builtin if found + self.assertEqual(lookup_hash("md5"), (hashlib.md5, 16, 64)) + + # 2. should return wrapper around hashlib.new() if found + try: + hashlib.new("sha") + has_sha = True + except ValueError: + has_sha = False + if has_sha: + record = lookup_hash("sha") + const = record[0] + self.assertEqual(record, (const, 20, 64)) + self.assertEqual(hexlify(const(b"abc").digest()), + b"0164b8a914cd2a5e74c4f7ff082c4d97f1edf880") + + else: + self.assertRaises(ValueError, lookup_hash, "sha") + + # 3. should fall back to builtin md4 + try: + hashlib.new("md4") + has_md4 = True + except ValueError: + has_md4 = False + record = lookup_hash("md4") + const = record[0] + if not has_md4: + from passlib.crypto._md4 import md4 + self.assertIs(const, md4) + self.assertEqual(record, (const, 16, 64)) + self.assertEqual(hexlify(const(b"abc").digest()), + b"a448017aaf21d8525fc10ae87aa6729d") + + # should memoize records + self.assertIs(lookup_hash("md5"), lookup_hash("md5")) + + def test_lookup_hash_w_unknown_name(self): + """lookup_hash() -- unknown hash name""" + from passlib.crypto.digest import lookup_hash + + # unknown names should be rejected by default + self.assertRaises(UnknownHashError, lookup_hash, "xxx256") + + # required=False should return stub record instead + info = lookup_hash("xxx256", required=False) + self.assertFalse(info.supported) + self.assertRaisesRegex(UnknownHashError, "unknown hash: 'xxx256'", info.const) + self.assertEqual(info.name, "xxx256") + self.assertEqual(info.digest_size, None) + self.assertEqual(info.block_size, None) + + # should cache stub records + info2 = lookup_hash("xxx256", required=False) + self.assertIs(info2, info) + + def test_mock_fips_mode(self): + """ + lookup_hash() -- test set_mock_fips_mode() + """ + from passlib.crypto.digest import lookup_hash, _set_mock_fips_mode + + # check if md5 is available so we can test mock helper + if not lookup_hash("md5", required=False).supported: + raise self.skipTest("md5 not supported") + + # enable monkeypatch to mock up fips mode + _set_mock_fips_mode() + self.addCleanup(_set_mock_fips_mode, False) + + pat = "'md5' hash disabled for fips" + self.assertRaisesRegex(UnknownHashError, pat, lookup_hash, "md5") + + info = lookup_hash("md5", required=False) + self.assertRegex(info.error_text, pat) + self.assertRaisesRegex(UnknownHashError, pat, info.const) + + # should use hardcoded fallback info + self.assertEqual(info.digest_size, 16) + self.assertEqual(info.block_size, 64) + + def test_lookup_hash_metadata(self): + """lookup_hash() -- metadata""" + + from passlib.crypto.digest import lookup_hash + + # quick test of metadata using known reference - sha256 + info = lookup_hash("sha256") + self.assertEqual(info.name, "sha256") + self.assertEqual(info.iana_name, "sha-256") + self.assertEqual(info.block_size, 64) + self.assertEqual(info.digest_size, 32) + self.assertIs(lookup_hash("SHA2-256"), info) + + # quick test of metadata using known reference - md5 + info = lookup_hash("md5") + self.assertEqual(info.name, "md5") + self.assertEqual(info.iana_name, "md5") + self.assertEqual(info.block_size, 64) + self.assertEqual(info.digest_size, 16) + + def test_lookup_hash_alt_types(self): + """lookup_hash() -- alternate types""" + + from passlib.crypto.digest import lookup_hash + + info = lookup_hash("sha256") + self.assertIs(lookup_hash(info), info) + self.assertIs(lookup_hash(info.const), info) + + self.assertRaises(TypeError, lookup_hash, 123) + + # TODO: write full test of compile_hmac() -- currently relying on pbkdf2_hmac() tests + +#============================================================================= +# test PBKDF1 support +#============================================================================= +class Pbkdf1_Test(TestCase): + """test kdf helpers""" + descriptionPrefix = "passlib.crypto.digest.pbkdf1" + + pbkdf1_tests = [ + # (password, salt, rounds, keylen, hash, result) + + # + # from http://www.di-mgt.com.au/cryptoKDFs.html + # + (b'password', hb('78578E5A5D63CB06'), 1000, 16, 'sha1', hb('dc19847e05c64d2faf10ebfb4a3d2a20')), + + # + # custom + # + (b'password', b'salt', 1000, 0, 'md5', b''), + (b'password', b'salt', 1000, 1, 'md5', hb('84')), + (b'password', b'salt', 1000, 8, 'md5', hb('8475c6a8531a5d27')), + (b'password', b'salt', 1000, 16, 'md5', hb('8475c6a8531a5d27e386cd496457812c')), + (b'password', b'salt', 1000, None, 'md5', hb('8475c6a8531a5d27e386cd496457812c')), + (b'password', b'salt', 1000, None, 'sha1', hb('4a8fd48e426ed081b535be5769892fa396293efb')), + ] + if not JYTHON: # FIXME: find out why not jython, or reenable this. + pbkdf1_tests.append( + (b'password', b'salt', 1000, None, 'md4', hb('f7f2e91100a8f96190f2dd177cb26453')) + ) + + def test_known(self): + """test reference vectors""" + from passlib.crypto.digest import pbkdf1 + for secret, salt, rounds, keylen, digest, correct in self.pbkdf1_tests: + result = pbkdf1(digest, secret, salt, rounds, keylen) + self.assertEqual(result, correct) + + def test_border(self): + """test border cases""" + from passlib.crypto.digest import pbkdf1 + def helper(secret=b'secret', salt=b'salt', rounds=1, keylen=1, hash='md5'): + return pbkdf1(hash, secret, salt, rounds, keylen) + helper() + + # salt/secret wrong type + self.assertRaises(TypeError, helper, secret=1) + self.assertRaises(TypeError, helper, salt=1) + + # non-existent hashes + self.assertRaises(ValueError, helper, hash='missing') + + # rounds < 1 and wrong type + self.assertRaises(ValueError, helper, rounds=0) + self.assertRaises(TypeError, helper, rounds='1') + + # keylen < 0, keylen > block_size, and wrong type + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=17, hash='md5') + self.assertRaises(TypeError, helper, keylen='1') + +#============================================================================= +# test PBKDF2-HMAC support +#============================================================================= + +# import the test subject +from passlib.crypto.digest import pbkdf2_hmac, PBKDF2_BACKENDS + +# NOTE: relying on tox to verify this works under all the various backends. +class Pbkdf2Test(TestCase): + """test pbkdf2() support""" + descriptionPrefix = "passlib.crypto.digest.pbkdf2_hmac() " % ", ".join(PBKDF2_BACKENDS) + + pbkdf2_test_vectors = [ + # (result, secret, salt, rounds, keylen, digest="sha1") + + # + # from rfc 3962 + # + + # test case 1 / 128 bit + ( + hb("cdedb5281bb2f801565a1122b2563515"), + b"password", b"ATHENA.MIT.EDUraeburn", 1, 16 + ), + + # test case 2 / 128 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935d"), + b"password", b"ATHENA.MIT.EDUraeburn", 2, 16 + ), + + # test case 2 / 256 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86"), + b"password", b"ATHENA.MIT.EDUraeburn", 2, 32 + ), + + # test case 3 / 256 bit + ( + hb("5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13"), + b"password", b"ATHENA.MIT.EDUraeburn", 1200, 32 + ), + + # test case 4 / 256 bit + ( + hb("d1daa78615f287e6a1c8b120d7062a493f98d203e6be49a6adf4fa574b6e64ee"), + b"password", b'\x12\x34\x56\x78\x78\x56\x34\x12', 5, 32 + ), + + # test case 5 / 256 bit + ( + hb("139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1"), + b"X"*64, b"pass phrase equals block size", 1200, 32 + ), + + # test case 6 / 256 bit + ( + hb("9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a"), + b"X"*65, b"pass phrase exceeds block size", 1200, 32 + ), + + # + # from rfc 6070 + # + ( + hb("0c60c80f961f0e71f3a9b524af6012062fe037a6"), + b"password", b"salt", 1, 20, + ), + + ( + hb("ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + b"password", b"salt", 2, 20, + ), + + ( + hb("4b007901b765489abead49d926f721d065a429c1"), + b"password", b"salt", 4096, 20, + ), + + # just runs too long - could enable if ALL option is set + ##( + ## + ## hb("eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), + ## "password", "salt", 16777216, 20, + ##), + + ( + hb("3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + b"passwordPASSWORDpassword", + b"saltSALTsaltSALTsaltSALTsaltSALTsalt", + 4096, 25, + ), + + ( + hb("56fa6aa75548099dcc37d7f03425e0c3"), + b"pass\00word", b"sa\00lt", 4096, 16, + ), + + # + # from example in http://grub.enbug.org/Authentication + # + ( + hb("887CFF169EA8335235D8004242AA7D6187A41E3187DF0CE14E256D85ED" + "97A97357AAA8FF0A3871AB9EEFF458392F462F495487387F685B7472FC" + "6C29E293F0A0"), + b"hello", + hb("9290F727ED06C38BA4549EF7DE25CF5642659211B7FC076F2D28FEFD71" + "784BB8D8F6FB244A8CC5C06240631B97008565A120764C0EE9C2CB0073" + "994D79080136"), + 10000, 64, "sha512" + ), + + # + # test vectors from fastpbkdf2 + # + ( + hb('55ac046e56e3089fec1691c22544b605f94185216dde0465e68b9d57c20dacbc' + '49ca9cccf179b645991664b39d77ef317c71b845b1e30bd509112041d3a19783'), + b'passwd', b'salt', 1, 64, 'sha256', + ), + + ( + hb('4ddcd8f60b98be21830cee5ef22701f9641a4418d04c0414aeff08876b34ab56' + 'a1d425a1225833549adb841b51c9b3176a272bdebba1d078478f62b397f33c8d'), + b'Password', b'NaCl', 80000, 64, 'sha256', + ), + + ( + hb('120fb6cffcf8b32c43e7225256c4f837a86548c92ccc35480805987cb70be17b'), + b'password', b'salt', 1, 32, 'sha256', + ), + + ( + hb('ae4d0c95af6b46d32d0adff928f06dd02a303f8ef3c251dfd6e2d85a95474c43'), + b'password', b'salt', 2, 32, 'sha256', + ), + + ( + hb('c5e478d59288c841aa530db6845c4c8d962893a001ce4e11a4963873aa98134a'), + b'password', b'salt', 4096, 32, 'sha256', + ), + + ( + hb('348c89dbcbd32b2f32d814b8116e84cf2b17347ebc1800181c4e2a1fb8dd53e1c' + '635518c7dac47e9'), + b'passwordPASSWORDpassword', b'saltSALTsaltSALTsaltSALTsaltSALTsalt', + 4096, 40, 'sha256', + ), + + ( + hb('9e83f279c040f2a11aa4a02b24c418f2d3cb39560c9627fa4f47e3bcc2897c3d'), + b'', b'salt', 1024, 32, 'sha256', + ), + + ( + hb('ea5808411eb0c7e830deab55096cee582761e22a9bc034e3ece925225b07bf46'), + b'password', b'', 1024, 32, 'sha256', + ), + + ( + hb('89b69d0516f829893c696226650a8687'), + b'pass\x00word', b'sa\x00lt', 4096, 16, 'sha256', + ), + + ( + hb('867f70cf1ade02cff3752599a3a53dc4af34c7a669815ae5d513554e1c8cf252'), + b'password', b'salt', 1, 32, 'sha512', + ), + + ( + hb('e1d9c16aa681708a45f5c7c4e215ceb66e011a2e9f0040713f18aefdb866d53c'), + b'password', b'salt', 2, 32, 'sha512', + ), + + ( + hb('d197b1b33db0143e018b12f3d1d1479e6cdebdcc97c5c0f87f6902e072f457b5'), + b'password', b'salt', 4096, 32, 'sha512', + ), + + ( + hb('6e23f27638084b0f7ea1734e0d9841f55dd29ea60a834466f3396bac801fac1eeb' + '63802f03a0b4acd7603e3699c8b74437be83ff01ad7f55dac1ef60f4d56480c35e' + 'e68fd52c6936'), + b'passwordPASSWORDpassword', b'saltSALTsaltSALTsaltSALTsaltSALTsalt', + 1, 72, 'sha512', + ), + + ( + hb('0c60c80f961f0e71f3a9b524af6012062fe037a6'), + b'password', b'salt', 1, 20, 'sha1', + ), + + # + # custom tests + # + ( + hb('e248fb6b13365146f8ac6307cc222812'), + b"secret", b"salt", 10, 16, "sha1", + ), + ( + hb('e248fb6b13365146f8ac6307cc2228127872da6d'), + b"secret", b"salt", 10, None, "sha1", + ), + ( + hb('b1d5485772e6f76d5ebdc11b38d3eff0a5b2bd50dc11f937e86ecacd0cd40d1b' + '9113e0734e3b76a3'), + b"secret", b"salt", 62, 40, "md5", + ), + ( + hb('ea014cc01f78d3883cac364bb5d054e2be238fb0b6081795a9d84512126e3129' + '062104d2183464c4'), + b"secret", b"salt", 62, 40, "md4", + ), + ] + + def test_known(self): + """test reference vectors""" + for row in self.pbkdf2_test_vectors: + correct, secret, salt, rounds, keylen = row[:5] + digest = row[5] if len(row) == 6 else "sha1" + result = pbkdf2_hmac(digest, secret, salt, rounds, keylen) + self.assertEqual(result, correct) + + def test_backends(self): + """verify expected backends are present""" + from passlib.crypto.digest import PBKDF2_BACKENDS + + # check for fastpbkdf2 + try: + import fastpbkdf2 + has_fastpbkdf2 = True + except ImportError: + has_fastpbkdf2 = False + self.assertEqual("fastpbkdf2" in PBKDF2_BACKENDS, has_fastpbkdf2) + + # check for hashlib + try: + from hashlib import pbkdf2_hmac + has_hashlib_ssl = pbkdf2_hmac.__module__ != "hashlib" + except ImportError: + has_hashlib_ssl = False + self.assertEqual("hashlib-ssl" in PBKDF2_BACKENDS, has_hashlib_ssl) + + # check for appropriate builtin + from passlib.utils.compat import PY3 + if PY3: + self.assertIn("builtin-from-bytes", PBKDF2_BACKENDS) + else: + # XXX: only true as long as this is preferred over hexlify + self.assertIn("builtin-unpack", PBKDF2_BACKENDS) + + def test_border(self): + """test border cases""" + def helper(secret=b'password', salt=b'salt', rounds=1, keylen=None, digest="sha1"): + return pbkdf2_hmac(digest, secret, salt, rounds, keylen) + helper() + + # invalid rounds + self.assertRaises(ValueError, helper, rounds=-1) + self.assertRaises(ValueError, helper, rounds=0) + self.assertRaises(TypeError, helper, rounds='x') + + # invalid keylen + helper(keylen=1) + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=0) + # NOTE: hashlib actually throws error for keylen>=MAX_SINT32, + # but pbkdf2 forbids anything > MAX_UINT32 * digest_size + self.assertRaises(OverflowError, helper, keylen=20*(2**32-1)+1) + self.assertRaises(TypeError, helper, keylen='x') + + # invalid secret/salt type + self.assertRaises(TypeError, helper, salt=5) + self.assertRaises(TypeError, helper, secret=5) + + # invalid hash + self.assertRaises(ValueError, helper, digest='foo') + self.assertRaises(TypeError, helper, digest=5) + + def test_default_keylen(self): + """test keylen==None""" + def helper(secret=b'password', salt=b'salt', rounds=1, keylen=None, digest="sha1"): + return pbkdf2_hmac(digest, secret, salt, rounds, keylen) + self.assertEqual(len(helper(digest='sha1')), 20) + self.assertEqual(len(helper(digest='sha256')), 32) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_scrypt.py b/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_scrypt.py new file mode 100644 index 0000000..73ff1fa --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_crypto_scrypt.py @@ -0,0 +1,634 @@ +"""tests for passlib.utils.scrypt""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import hashlib +import logging; log = logging.getLogger(__name__) +import struct +import warnings +warnings.filterwarnings("ignore", ".*using builtin scrypt backend.*") +# site +# pkg +from passlib import exc +from passlib.utils import getrandbytes +from passlib.utils.compat import PYPY, u, bascii_to_str +from passlib.utils.decor import classproperty +from passlib.tests.utils import TestCase, skipUnless, TEST_MODE, hb +# subject +from passlib.crypto import scrypt as scrypt_mod +# local +__all__ = [ + "ScryptEngineTest", + "BuiltinScryptTest", + "FastScryptTest", +] + +#============================================================================= +# support functions +#============================================================================= +def hexstr(data): + """return bytes as hex str""" + return bascii_to_str(hexlify(data)) + +def unpack_uint32_list(data, check_count=None): + """unpack bytes as list of uint32 values""" + count = len(data) // 4 + assert check_count is None or check_count == count + return struct.unpack("<%dI" % count, data) + +def seed_bytes(seed, count): + """ + generate random reference bytes from specified seed. + used to generate some predictable test vectors. + """ + if hasattr(seed, "encode"): + seed = seed.encode("ascii") + buf = b'' + i = 0 + while len(buf) < count: + buf += hashlib.sha256(seed + struct.pack("" % cls.backend + backend = None + + #============================================================================= + # setup + #============================================================================= + def setUp(self): + assert self.backend + scrypt_mod._set_backend(self.backend) + super(_CommonScryptTest, self).setUp() + + #============================================================================= + # reference vectors + #============================================================================= + + reference_vectors = [ + # entry format: (secret, salt, n, r, p, keylen, result) + + #------------------------------------------------------------------------ + # test vectors from scrypt whitepaper -- + # http://www.tarsnap.com/scrypt/scrypt.pdf, appendix b + # + # also present in (expired) scrypt rfc draft -- + # https://tools.ietf.org/html/draft-josefsson-scrypt-kdf-01, section 11 + #------------------------------------------------------------------------ + ("", "", 16, 1, 1, 64, hb(""" + 77 d6 57 62 38 65 7b 20 3b 19 ca 42 c1 8a 04 97 + f1 6b 48 44 e3 07 4a e8 df df fa 3f ed e2 14 42 + fc d0 06 9d ed 09 48 f8 32 6a 75 3a 0f c8 1f 17 + e8 d3 e0 fb 2e 0d 36 28 cf 35 e2 0c 38 d1 89 06 + """)), + + ("password", "NaCl", 1024, 8, 16, 64, hb(""" + fd ba be 1c 9d 34 72 00 78 56 e7 19 0d 01 e9 fe + 7c 6a d7 cb c8 23 78 30 e7 73 76 63 4b 37 31 62 + 2e af 30 d9 2e 22 a3 88 6f f1 09 27 9d 98 30 da + c7 27 af b9 4a 83 ee 6d 83 60 cb df a2 cc 06 40 + """)), + + # NOTE: the following are skipped for all backends unless TEST_MODE="full" + + ("pleaseletmein", "SodiumChloride", 16384, 8, 1, 64, hb(""" + 70 23 bd cb 3a fd 73 48 46 1c 06 cd 81 fd 38 eb + fd a8 fb ba 90 4f 8e 3e a9 b5 43 f6 54 5d a1 f2 + d5 43 29 55 61 3f 0f cf 62 d4 97 05 24 2a 9a f9 + e6 1e 85 dc 0d 65 1e 40 df cf 01 7b 45 57 58 87 + """)), + + # NOTE: the following are always skipped for the builtin backend, + # (just takes too long to be worth it) + + ("pleaseletmein", "SodiumChloride", 1048576, 8, 1, 64, hb(""" + 21 01 cb 9b 6a 51 1a ae ad db be 09 cf 70 f8 81 + ec 56 8d 57 4a 2f fd 4d ab e5 ee 98 20 ad aa 47 + 8e 56 fd 8f 4b a5 d0 9f fa 1c 6d 92 7c 40 f4 c3 + 37 30 40 49 e8 a9 52 fb cb f4 5c 6f a7 7a 41 a4 + """)), + ] + + def test_reference_vectors(self): + """reference vectors""" + for secret, salt, n, r, p, keylen, result in self.reference_vectors: + if n >= 1024 and TEST_MODE(max="default"): + # skip large values unless we're running full test suite + continue + if n > 16384 and self.backend == "builtin": + # skip largest vector for builtin, takes WAAY too long + # (46s under pypy, ~5m under cpython) + continue + log.debug("scrypt reference vector: %r %r n=%r r=%r p=%r", secret, salt, n, r, p) + self.assertEqual(scrypt_mod.scrypt(secret, salt, n, r, p, keylen), result) + + #============================================================================= + # fuzz testing + #============================================================================= + + _already_tested_others = None + + def test_other_backends(self): + """compare output to other backends""" + # only run once, since test is symetric. + # maybe this means it should go somewhere else? + if self._already_tested_others: + raise self.skipTest("already run under %r backend test" % self._already_tested_others) + self._already_tested_others = self.backend + rng = self.getRandom() + + # get available backends + orig = scrypt_mod.backend + available = set(name for name in scrypt_mod.backend_values + if scrypt_mod._has_backend(name)) + scrypt_mod._set_backend(orig) + available.discard(self.backend) + if not available: + raise self.skipTest("no other backends found") + + warnings.filterwarnings("ignore", "(?i)using builtin scrypt backend", + category=exc.PasslibSecurityWarning) + + # generate some random options, and cross-check output + for _ in range(10): + # NOTE: keeping values low due to builtin test + secret = getrandbytes(rng, rng.randint(0, 64)) + salt = getrandbytes(rng, rng.randint(0, 64)) + n = 1 << rng.randint(1, 10) + r = rng.randint(1, 8) + p = rng.randint(1, 3) + ks = rng.randint(1, 64) + previous = None + backends = set() + for name in available: + scrypt_mod._set_backend(name) + self.assertNotIn(scrypt_mod._scrypt, backends) + backends.add(scrypt_mod._scrypt) + result = hexstr(scrypt_mod.scrypt(secret, salt, n, r, p, ks)) + self.assertEqual(len(result), 2*ks) + if previous is not None: + self.assertEqual(result, previous, + msg="%r output differs from others %r: %r" % + (name, available, [secret, salt, n, r, p, ks])) + + #============================================================================= + # test input types + #============================================================================= + def test_backend(self): + """backend management""" + # clobber backend + scrypt_mod.backend = None + scrypt_mod._scrypt = None + self.assertRaises(TypeError, scrypt_mod.scrypt, 's', 's', 2, 2, 2, 16) + + # reload backend + scrypt_mod._set_backend(self.backend) + self.assertEqual(scrypt_mod.backend, self.backend) + scrypt_mod.scrypt('s', 's', 2, 2, 2, 16) + + # throw error for unknown backend + self.assertRaises(ValueError, scrypt_mod._set_backend, 'xxx') + self.assertEqual(scrypt_mod.backend, self.backend) + + def test_secret_param(self): + """'secret' parameter""" + + def run_scrypt(secret): + return hexstr(scrypt_mod.scrypt(secret, "salt", 2, 2, 2, 16)) + + # unicode + TEXT = u("abc\u00defg") + self.assertEqual(run_scrypt(TEXT), '05717106997bfe0da42cf4779a2f8bd8') + + # utf8 bytes + TEXT_UTF8 = b'abc\xc3\x9efg' + self.assertEqual(run_scrypt(TEXT_UTF8), '05717106997bfe0da42cf4779a2f8bd8') + + # latin1 bytes + TEXT_LATIN1 = b'abc\xdefg' + self.assertEqual(run_scrypt(TEXT_LATIN1), '770825d10eeaaeaf98e8a3c40f9f441d') + + # accept empty string + self.assertEqual(run_scrypt(""), 'ca1399e5fae5d3b9578dcd2b1faff6e2') + + # reject other types + self.assertRaises(TypeError, run_scrypt, None) + self.assertRaises(TypeError, run_scrypt, 1) + + def test_salt_param(self): + """'salt' parameter""" + + def run_scrypt(salt): + return hexstr(scrypt_mod.scrypt("secret", salt, 2, 2, 2, 16)) + + # unicode + TEXT = u("abc\u00defg") + self.assertEqual(run_scrypt(TEXT), 'a748ec0f4613929e9e5f03d1ab741d88') + + # utf8 bytes + TEXT_UTF8 = b'abc\xc3\x9efg' + self.assertEqual(run_scrypt(TEXT_UTF8), 'a748ec0f4613929e9e5f03d1ab741d88') + + # latin1 bytes + TEXT_LATIN1 = b'abc\xdefg' + self.assertEqual(run_scrypt(TEXT_LATIN1), '91d056fb76fb6e9a7d1cdfffc0a16cd1') + + # reject other types + self.assertRaises(TypeError, run_scrypt, None) + self.assertRaises(TypeError, run_scrypt, 1) + + def test_n_param(self): + """'n' (rounds) parameter""" + + def run_scrypt(n): + return hexstr(scrypt_mod.scrypt("secret", "salt", n, 2, 2, 16)) + + # must be > 1, and a power of 2 + self.assertRaises(ValueError, run_scrypt, -1) + self.assertRaises(ValueError, run_scrypt, 0) + self.assertRaises(ValueError, run_scrypt, 1) + self.assertEqual(run_scrypt(2), 'dacf2bca255e2870e6636fa8c8957a66') + self.assertRaises(ValueError, run_scrypt, 3) + self.assertRaises(ValueError, run_scrypt, 15) + self.assertEqual(run_scrypt(16), '0272b8fc72bc54b1159340ed99425233') + + def test_r_param(self): + """'r' (block size) parameter""" + def run_scrypt(r, n=2, p=2): + return hexstr(scrypt_mod.scrypt("secret", "salt", n, r, p, 16)) + + # must be > 1 + self.assertRaises(ValueError, run_scrypt, -1) + self.assertRaises(ValueError, run_scrypt, 0) + self.assertEqual(run_scrypt(1), '3d630447d9f065363b8a79b0b3670251') + self.assertEqual(run_scrypt(2), 'dacf2bca255e2870e6636fa8c8957a66') + self.assertEqual(run_scrypt(5), '114f05e985a903c27237b5578e763736') + + # reject r*p >= 2**30 + self.assertRaises(ValueError, run_scrypt, (1<<30), p=1) + self.assertRaises(ValueError, run_scrypt, (1<<30) / 2, p=2) + + def test_p_param(self): + """'p' (parallelism) parameter""" + def run_scrypt(p, n=2, r=2): + return hexstr(scrypt_mod.scrypt("secret", "salt", n, r, p, 16)) + + # must be > 1 + self.assertRaises(ValueError, run_scrypt, -1) + self.assertRaises(ValueError, run_scrypt, 0) + self.assertEqual(run_scrypt(1), 'f2960ea8b7d48231fcec1b89b784a6fa') + self.assertEqual(run_scrypt(2), 'dacf2bca255e2870e6636fa8c8957a66') + self.assertEqual(run_scrypt(5), '848a0eeb2b3543e7f543844d6ca79782') + + # reject r*p >= 2**30 + self.assertRaises(ValueError, run_scrypt, (1<<30), r=1) + self.assertRaises(ValueError, run_scrypt, (1<<30) / 2, r=2) + + def test_keylen_param(self): + """'keylen' parameter""" + rng = self.getRandom() + + def run_scrypt(keylen): + return hexstr(scrypt_mod.scrypt("secret", "salt", 2, 2, 2, keylen)) + + # must be > 0 + self.assertRaises(ValueError, run_scrypt, -1) + self.assertRaises(ValueError, run_scrypt, 0) + self.assertEqual(run_scrypt(1), 'da') + + # pick random value + ksize = rng.randint(1, 1 << 10) + self.assertEqual(len(run_scrypt(ksize)), 2*ksize) # 2 hex chars per output + + # one more than upper bound + self.assertRaises(ValueError, run_scrypt, ((2**32) - 1) * 32 + 1) + + #============================================================================= + # eoc + #============================================================================= + + +#----------------------------------------------------------------------- +# check what backends 'should' be available +#----------------------------------------------------------------------- + +def _can_import_cffi_scrypt(): + try: + import scrypt + except ImportError as err: + if "scrypt" in str(err): + return False + raise + return True + +has_cffi_scrypt = _can_import_cffi_scrypt() + + +def _can_import_stdlib_scrypt(): + try: + from hashlib import scrypt + return True + except ImportError: + return False + +has_stdlib_scrypt = _can_import_stdlib_scrypt() + +#----------------------------------------------------------------------- +# test individual backends +#----------------------------------------------------------------------- + +# NOTE: builtin version runs VERY slow (except under PyPy, where it's only 11x slower), +# so skipping under quick test mode. +@skipUnless(PYPY or TEST_MODE(min="default"), "skipped under current test mode") +class BuiltinScryptTest(_CommonScryptTest): + backend = "builtin" + + def setUp(self): + super(BuiltinScryptTest, self).setUp() + warnings.filterwarnings("ignore", "(?i)using builtin scrypt backend", + category=exc.PasslibSecurityWarning) + + def test_missing_backend(self): + """backend management -- missing backend""" + if has_stdlib_scrypt or has_cffi_scrypt: + raise self.skipTest("non-builtin backend is present") + self.assertRaises(exc.MissingBackendError, scrypt_mod._set_backend, 'scrypt') + + +@skipUnless(has_cffi_scrypt, "'scrypt' package not found") +class ScryptPackageTest(_CommonScryptTest): + backend = "scrypt" + + def test_default_backend(self): + """backend management -- default backend""" + if has_stdlib_scrypt: + raise self.skipTest("higher priority backend present") + scrypt_mod._set_backend("default") + self.assertEqual(scrypt_mod.backend, "scrypt") + + +@skipUnless(has_stdlib_scrypt, "'hashlib.scrypt()' not found") +class StdlibScryptTest(_CommonScryptTest): + backend = "stdlib" + + def test_default_backend(self): + """backend management -- default backend""" + scrypt_mod._set_backend("default") + self.assertEqual(scrypt_mod.backend, "stdlib") + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_ext_django.py b/venv/lib/python3.12/site-packages/passlib/tests/test_ext_django.py new file mode 100644 index 0000000..2a0b418 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_ext_django.py @@ -0,0 +1,1080 @@ +"""test passlib.ext.django""" +#============================================================================= +# imports +#============================================================================= +# core +from __future__ import absolute_import, division, print_function +import logging; log = logging.getLogger(__name__) +import sys +import re +# site +# pkg +from passlib import apps as _apps, exc, registry +from passlib.apps import django10_context, django14_context, django16_context +from passlib.context import CryptContext +from passlib.ext.django.utils import ( + DJANGO_VERSION, MIN_DJANGO_VERSION, DjangoTranslator, quirks, +) +from passlib.utils.compat import iteritems, get_method_function, u +from passlib.utils.decor import memoized_property +# tests +from passlib.tests.utils import TestCase, TEST_MODE, handler_derived_from +from passlib.tests.test_handlers import get_handler_case +# local +__all__ = [ + "DjangoBehaviorTest", + "ExtensionBehaviorTest", + "DjangoExtensionTest", + + "_ExtensionSupport", + "_ExtensionTest", +] +#============================================================================= +# configure django settings for testcases +#============================================================================= + +# whether we have supported django version +has_min_django = DJANGO_VERSION >= MIN_DJANGO_VERSION + +# import and configure empty django settings +# NOTE: we don't want to set up entirety of django, so not using django.setup() directly. +# instead, manually configuring the settings, and setting it up w/ no apps installed. +# in future, may need to alter this so we call django.setup() after setting +# DJANGO_SETTINGS_MODULE to a custom settings module w/ a dummy django app. +if has_min_django: + # + # initialize django settings manually + # + from django.conf import settings, LazySettings + + if not isinstance(settings, LazySettings): + # this probably means django globals have been configured already, + # which we don't want, since test cases reset and manipulate settings. + raise RuntimeError("expected django.conf.settings to be LazySettings: %r" % (settings,)) + + # else configure a blank settings instance for the unittests + if not settings.configured: + settings.configure() + + # + # init django apps w/ NO installed apps. + # NOTE: required for django >= 1.9 + # + from django.apps import apps + apps.populate(["django.contrib.contenttypes", "django.contrib.auth"]) + +# log a warning if tested w/ newer version. +# NOTE: this is mainly here as place to mark what version it was run against before release. +if DJANGO_VERSION >= (3, 2): + log.info("this release hasn't been tested against Django %r", DJANGO_VERSION) + +#============================================================================= +# support funcs +#============================================================================= + +# flag for update_settings() to remove specified key entirely +UNSET = object() + +def update_settings(**kwds): + """helper to update django settings from kwds""" + for k,v in iteritems(kwds): + if v is UNSET: + if hasattr(settings, k): + delattr(settings, k) + else: + setattr(settings, k, v) + +if has_min_django: + from django.contrib.auth.models import User + + class FakeUser(User): + """mock user object for use in testing""" + # NOTE: this mainly just overrides .save() to test commit behavior. + + # NOTE: .Meta.app_label required for django >= 1.9 + class Meta: + app_label = __name__ + + @memoized_property + def saved_passwords(self): + return [] + + def pop_saved_passwords(self): + try: + return self.saved_passwords[:] + finally: + del self.saved_passwords[:] + + def save(self, update_fields=None): + # NOTE: ignoring update_fields for test purposes + self.saved_passwords.append(self.password) + +def create_mock_setter(): + state = [] + def setter(password): + state.append(password) + def popstate(): + try: + return state[:] + finally: + del state[:] + setter.popstate = popstate + return setter + + +def check_django_hasher_has_backend(name): + """ + check whether django hasher is available; + or if it should be skipped because django lacks third-party library. + """ + assert name + from django.contrib.auth.hashers import make_password + try: + make_password("", hasher=name) + return True + except ValueError as err: + if re.match("Couldn't load '.*?' algorithm .* No module named .*", str(err)): + return False + raise + +#============================================================================= +# work up stock django config +#============================================================================= + +def _modify_django_config(kwds, sha_rounds=None): + """ + helper to build django CryptContext config matching expected setup for stock django deploy. + :param kwds: + :param sha_rounds: + :return: + """ + # make sure we have dict + if hasattr(kwds, "to_dict"): + # type: CryptContext + kwds = kwds.to_dict() + + # update defaults + kwds.update( + # TODO: push this to passlib.apps django contexts + deprecated="auto", + ) + + # fill in default rounds for current django version, so our sample hashes come back + # unchanged, instead of being upgraded in-place by check_password(). + if sha_rounds is None and has_min_django: + from django.contrib.auth.hashers import PBKDF2PasswordHasher + sha_rounds = PBKDF2PasswordHasher.iterations + + # modify rounds + if sha_rounds: + kwds.update( + django_pbkdf2_sha1__default_rounds=sha_rounds, + django_pbkdf2_sha256__default_rounds=sha_rounds, + ) + + return kwds + +#---------------------------------------------------- +# build config dict that matches stock django +#---------------------------------------------------- + +# XXX: replace this with code that interrogates default django config directly? +# could then separate out "validation of djangoXX_context objects" +# and "validation that individual hashers match django". +# or maybe add a "get_django_context(django_version)" helper to passlib.apps? +if DJANGO_VERSION >= (2, 1): + stock_config = _modify_django_config(_apps.django21_context) +elif DJANGO_VERSION >= (1, 10): + stock_config = _modify_django_config(_apps.django110_context) +else: + # assert DJANGO_VERSION >= (1, 8) + stock_config = _modify_django_config(_apps.django16_context) + +#---------------------------------------------------- +# override sample hashes used in test cases +#---------------------------------------------------- +from passlib.hash import django_pbkdf2_sha256 +sample_hashes = dict( + django_pbkdf2_sha256=("not a password", django_pbkdf2_sha256 + .using(rounds=stock_config.get("django_pbkdf2_sha256__default_rounds")) + .hash("not a password")) +) + +#============================================================================= +# test utils +#============================================================================= + +class _ExtensionSupport(object): + """ + test support funcs for loading/unloading extension. + this class is mixed in to various TestCase subclasses. + """ + #=================================================================== + # support funcs + #=================================================================== + + @classmethod + def _iter_patch_candidates(cls): + """helper to scan for monkeypatches. + + returns tuple containing: + * object (module or class) + * attribute of object + * value of attribute + * whether it should or should not be patched + """ + # XXX: this and assert_unpatched() could probably be refactored to use + # the PatchManager class to do the heavy lifting. + from django.contrib.auth import models, hashers + user_attrs = ["check_password", "set_password"] + model_attrs = ["check_password", "make_password"] + hasher_attrs = ["check_password", "make_password", "get_hasher", "identify_hasher", + "get_hashers"] + objs = [(models, model_attrs), + (models.User, user_attrs), + (hashers, hasher_attrs), + ] + for obj, patched in objs: + for attr in dir(obj): + if attr.startswith("_"): + continue + value = obj.__dict__.get(attr, UNSET) # can't use getattr() due to GAE + if value is UNSET and attr not in patched: + continue + value = get_method_function(value) + source = getattr(value, "__module__", None) + if source: + yield obj, attr, source, (attr in patched) + + #=================================================================== + # verify current patch state + #=================================================================== + + def assert_unpatched(self): + """ + test that django is in unpatched state + """ + # make sure we aren't currently patched + mod = sys.modules.get("passlib.ext.django.models") + self.assertFalse(mod and mod.adapter.patched, "patch should not be enabled") + + # make sure no objects have been replaced, by checking __module__ + for obj, attr, source, patched in self._iter_patch_candidates(): + if patched: + self.assertTrue(source.startswith("django.contrib.auth."), + "obj=%r attr=%r was not reverted: %r" % + (obj, attr, source)) + else: + self.assertFalse(source.startswith("passlib."), + "obj=%r attr=%r should not have been patched: %r" % + (obj, attr, source)) + + def assert_patched(self, context=None): + """ + helper to ensure django HAS been patched, and is using specified config + """ + # make sure we're currently patched + mod = sys.modules.get("passlib.ext.django.models") + self.assertTrue(mod and mod.adapter.patched, "patch should have been enabled") + + # make sure only the expected objects have been patched + for obj, attr, source, patched in self._iter_patch_candidates(): + if patched: + self.assertTrue(source == "passlib.ext.django.utils", + "obj=%r attr=%r should have been patched: %r" % + (obj, attr, source)) + else: + self.assertFalse(source.startswith("passlib."), + "obj=%r attr=%r should not have been patched: %r" % + (obj, attr, source)) + + # check context matches + if context is not None: + context = CryptContext._norm_source(context) + self.assertEqual(mod.password_context.to_dict(resolve=True), + context.to_dict(resolve=True)) + + #=================================================================== + # load / unload the extension (and verify it worked) + #=================================================================== + + _config_keys = ["PASSLIB_CONFIG", "PASSLIB_CONTEXT", "PASSLIB_GET_CATEGORY"] + + def load_extension(self, check=True, **kwds): + """ + helper to load extension with specified config & patch django + """ + self.unload_extension() + if check: + config = kwds.get("PASSLIB_CONFIG") or kwds.get("PASSLIB_CONTEXT") + for key in self._config_keys: + kwds.setdefault(key, UNSET) + update_settings(**kwds) + import passlib.ext.django.models + if check: + self.assert_patched(context=config) + + def unload_extension(self): + """ + helper to remove patches and unload extension + """ + # remove patches and unload module + mod = sys.modules.get("passlib.ext.django.models") + if mod: + mod.adapter.remove_patch() + del sys.modules["passlib.ext.django.models"] + # wipe config from django settings + update_settings(**dict((key, UNSET) for key in self._config_keys)) + # check everything's gone + self.assert_unpatched() + + #=================================================================== + # eoc + #=================================================================== + + +# XXX: rename to ExtensionFixture? +# NOTE: would roll this into _ExtensionSupport class; +# but we have to mix that one into django's TestCase classes as well; +# and our TestCase class (and this setUp() method) would foul things up. +class _ExtensionTest(TestCase, _ExtensionSupport): + """ + TestCase mixin which makes sure extension is unloaded before test; + and make sure it's unloaded after test as well. + """ + #============================================================================= + # setup + #============================================================================= + + def setUp(self): + super(_ExtensionTest, self).setUp() + + self.require_TEST_MODE("default") + + if not DJANGO_VERSION: + raise self.skipTest("Django not installed") + elif not has_min_django: + raise self.skipTest("Django version too old") + + # reset to baseline, and verify it worked + self.unload_extension() + + # and do the same when the test exits + self.addCleanup(self.unload_extension) + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# extension tests +#============================================================================= + +#: static passwords used by DjangoBehaviorTest methods +PASS1 = "toomanysecrets" +WRONG1 = "letmein" + + +class DjangoBehaviorTest(_ExtensionTest): + """ + tests model to verify it matches django's behavior. + + running this class verifies the tests correctly assert what Django itself does. + + running the ExtensionBehaviorTest subclass below verifies "passlib.ext.django" + matches what the tests assert. + """ + #============================================================================= + # class attrs + #============================================================================= + + descriptionPrefix = "verify django behavior" + + #: tracks whether tests should assume "passlib.ext.django" monkeypatch is applied. + #: (set to True by ExtensionBehaviorTest subclass) + patched = False + + #: dict containing CryptContext() config which should match current django deploy. + #: used by tests to verify expected behavior. + config = stock_config + + # NOTE: if this test fails, it means we're not accounting for + # some part of django's hashing logic, or that this is + # running against an untested version of django with a new + # hashing policy. + + #============================================================================= + # test helpers + #============================================================================= + + @memoized_property + def context(self): + """ + per-test CryptContext() created from .config. + """ + return CryptContext._norm_source(self.config) + + def assert_unusable_password(self, user): + """ + check that user object is set to 'unusable password' constant + """ + self.assertTrue(user.password.startswith("!")) + self.assertFalse(user.has_usable_password()) + self.assertEqual(user.pop_saved_passwords(), []) + + def assert_valid_password(self, user, hash=UNSET, saved=None): + """ + check that user object has a usable password hash. + :param hash: optionally check it has this exact hash + :param saved: check that mock commit history for user.password matches this list + """ + if hash is UNSET: + self.assertNotEqual(user.password, "!") + self.assertNotEqual(user.password, None) + else: + self.assertEqual(user.password, hash) + self.assertTrue(user.has_usable_password(), + "hash should be usable: %r" % (user.password,)) + self.assertEqual(user.pop_saved_passwords(), + [] if saved is None else [saved]) + + #============================================================================= + # test hashing interface + #----------------------------------------------------------------------------- + # these functions are run against both the actual django code, + # to verify the assumptions of the unittests are correct; + # and run against the passlib extension, to verify it matches those assumptions. + # + # these tests check the following django methods: + # User.set_password() + # User.check_password() + # make_password() -- 1.4 only + # check_password() + # identify_hasher() + # User.has_usable_password() + # User.set_unusable_password() + # + # XXX: this take a while to run. what could be trimmed? + # + # TODO: add get_hasher() checks where appropriate in tests below. + #============================================================================= + + def test_extension_config(self): + """ + test extension config is loaded correctly + """ + if not self.patched: + raise self.skipTest("extension not loaded") + + ctx = self.context + + # contexts should match + from django.contrib.auth.hashers import check_password + from passlib.ext.django.models import password_context + self.assertEqual(password_context.to_dict(resolve=True), ctx.to_dict(resolve=True)) + + # should have patched both places + from django.contrib.auth.models import check_password as check_password2 + self.assertEqual(check_password2, check_password) + + def test_default_algorithm(self): + """ + test django's default algorithm + """ + ctx = self.context + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import make_password + + # User.set_password() should use default alg + user = FakeUser() + user.set_password(PASS1) + self.assertTrue(ctx.handler().verify(PASS1, user.password)) + self.assert_valid_password(user) + + # User.check_password() - n/a + + # make_password() should use default alg + hash = make_password(PASS1) + self.assertTrue(ctx.handler().verify(PASS1, hash)) + + # check_password() - n/a + + def test_empty_password(self): + """ + test how methods handle empty string as password + """ + ctx = self.context + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # User.set_password() should use default alg + user = FakeUser() + user.set_password('') + hash = user.password + self.assertTrue(ctx.handler().verify('', hash)) + self.assert_valid_password(user, hash) + + # User.check_password() should return True + self.assertTrue(user.check_password("")) + self.assert_valid_password(user, hash) + + # XXX: test make_password() ? + + # TODO: is_password_usable() + + # identify_hasher() -- na + + # check_password() should return True + self.assertTrue(check_password("", hash)) + + def test_unusable_flag(self): + """ + test how methods handle 'unusable flag' in hash + """ + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # sanity check via user.set_unusable_password() + user = FakeUser() + user.set_unusable_password() + self.assert_unusable_password(user) + + # ensure User.set_password() sets unusable flag + user = FakeUser() + user.set_password(None) + self.assert_unusable_password(user) + + # User.check_password() should always fail + self.assertFalse(user.check_password(None)) + self.assertFalse(user.check_password('None')) + self.assertFalse(user.check_password('')) + self.assertFalse(user.check_password(PASS1)) + self.assertFalse(user.check_password(WRONG1)) + self.assert_unusable_password(user) + + # make_password() should also set flag + self.assertTrue(make_password(None).startswith("!")) + + # check_password() should return False (didn't handle disabled under 1.3) + self.assertFalse(check_password(PASS1, '!')) + + # identify_hasher() and is_password_usable() should reject it + self.assertFalse(is_password_usable(user.password)) + self.assertRaises(ValueError, identify_hasher, user.password) + + def test_none_hash_value(self): + """ + test how methods handle None as hash value + """ + patched = self.patched + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # User.set_password() - n/a + + # User.check_password() - returns False + user = FakeUser() + user.password = None + if quirks.none_causes_check_password_error and not patched: + # django 2.1+ + self.assertRaises(TypeError, user.check_password, PASS1) + else: + self.assertFalse(user.check_password(PASS1)) + + self.assertEqual(user.has_usable_password(), + quirks.empty_is_usable_password) + + # TODO: is_password_usable() + + # make_password() - n/a + + # check_password() - error + if quirks.none_causes_check_password_error and not patched: + self.assertRaises(TypeError, check_password, PASS1, None) + else: + self.assertFalse(check_password(PASS1, None)) + + # identify_hasher() - error + self.assertRaises(TypeError, identify_hasher, None) + + def test_empty_hash_value(self): + """ + test how methods handle empty string as hash value + """ + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # User.set_password() - n/a + + # User.check_password() + # As of django 1.5, blank hash returns False (django issue 18453) + user = FakeUser() + user.password = "" + self.assertFalse(user.check_password(PASS1)) + + # verify hash wasn't changed/upgraded during check_password() call + self.assertEqual(user.password, "") + self.assertEqual(user.pop_saved_passwords(), []) + + # User.has_usable_password() + self.assertEqual(user.has_usable_password(), quirks.empty_is_usable_password) + + # TODO: is_password_usable() + + # make_password() - n/a + + # check_password() + self.assertFalse(check_password(PASS1, "")) + + # identify_hasher() - throws error + self.assertRaises(ValueError, identify_hasher, "") + + def test_invalid_hash_values(self): + """ + test how methods handle invalid hash values. + """ + for hash in [ + "$789$foo", # empty identifier + ]: + with self.subTest(hash=hash): + self._do_test_invalid_hash_value(hash) + + def _do_test_invalid_hash_value(self, hash): + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # User.set_password() - n/a + + # User.check_password() + # As of django 1.5, invalid hash returns False (side effect of django issue 18453) + user = FakeUser() + user.password = hash + self.assertFalse(user.check_password(PASS1)) + + # verify hash wasn't changed/upgraded during check_password() call + self.assertEqual(user.password, hash) + self.assertEqual(user.pop_saved_passwords(), []) + + # User.has_usable_password() + self.assertEqual(user.has_usable_password(), quirks.invalid_is_usable_password) + + # TODO: is_password_usable() + + # make_password() - n/a + + # check_password() + self.assertFalse(check_password(PASS1, hash)) + + # identify_hasher() - throws error + self.assertRaises(ValueError, identify_hasher, hash) + + def test_available_schemes(self): + """ + run a bunch of subtests for each hasher available in the default django setup + (as determined by reading self.context) + """ + for scheme in self.context.schemes(): + with self.subTest(scheme=scheme): + self._do_test_available_scheme(scheme) + + def _do_test_available_scheme(self, scheme): + """ + helper to test how specific hasher behaves. + :param scheme: *passlib* name of hasher (e.g. "django_pbkdf2_sha256") + """ + log = self.getLogger() + ctx = self.context + patched = self.patched + setter = create_mock_setter() + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + #------------------------------------------------------- + # setup constants & imports, pick a sample secret/hash combo + #------------------------------------------------------- + handler = ctx.handler(scheme) + log.debug("testing scheme: %r => %r", scheme, handler) + deprecated = ctx.handler(scheme).deprecated + assert not deprecated or scheme != ctx.default_scheme() + try: + testcase = get_handler_case(scheme) + except exc.MissingBackendError: + raise self.skipTest("backend not available") + assert handler_derived_from(handler, testcase.handler) + if handler.is_disabled: + raise self.skipTest("skip disabled hasher") + + # verify that django has a backend available + # (since our hasher may use different set of backends, + # get_handler_case() above may work, but django will have nothing) + if not patched and not check_django_hasher_has_backend(handler.django_name): + assert scheme in ["django_bcrypt", "django_bcrypt_sha256", "django_argon2"], \ + "%r scheme should always have active backend" % scheme + log.warning("skipping scheme %r due to missing django dependency", scheme) + raise self.skipTest("skip due to missing dependency") + + # find a sample (secret, hash) pair to test with + try: + secret, hash = sample_hashes[scheme] + except KeyError: + get_sample_hash = testcase("setUp").get_sample_hash + while True: + secret, hash = get_sample_hash() + if secret: # don't select blank passwords + break + other = 'dontletmein' + + #------------------------------------------------------- + # User.set_password() - not tested here + #------------------------------------------------------- + + #------------------------------------------------------- + # User.check_password()+migration against known hash + #------------------------------------------------------- + user = FakeUser() + user.password = hash + + # check against invalid password + self.assertFalse(user.check_password(None)) + ##self.assertFalse(user.check_password('')) + self.assertFalse(user.check_password(other)) + self.assert_valid_password(user, hash) + + # check against valid password + self.assertTrue(user.check_password(secret)) + + # check if it upgraded the hash + # NOTE: needs_update kept separate in case we need to test rounds. + needs_update = deprecated + if needs_update: + self.assertNotEqual(user.password, hash) + self.assertFalse(handler.identify(user.password)) + self.assertTrue(ctx.handler().verify(secret, user.password)) + self.assert_valid_password(user, saved=user.password) + else: + self.assert_valid_password(user, hash) + + # don't need to check rest for most deployments + if TEST_MODE(max="default"): + return + + #------------------------------------------------------- + # make_password() correctly selects algorithm + #------------------------------------------------------- + alg = DjangoTranslator().passlib_to_django_name(scheme) + hash2 = make_password(secret, hasher=alg) + self.assertTrue(handler.verify(secret, hash2)) + + #------------------------------------------------------- + # check_password()+setter against known hash + #------------------------------------------------------- + # should call setter only if it needs_update + self.assertTrue(check_password(secret, hash, setter=setter)) + self.assertEqual(setter.popstate(), [secret] if needs_update else []) + + # should not call setter + self.assertFalse(check_password(other, hash, setter=setter)) + self.assertEqual(setter.popstate(), []) + + ### check preferred kwd is ignored (feature we don't currently support fully) + ##self.assertTrue(check_password(secret, hash, setter=setter, preferred='fooey')) + ##self.assertEqual(setter.popstate(), [secret]) + + # TODO: get_hasher() + + #------------------------------------------------------- + # identify_hasher() recognizes known hash + #------------------------------------------------------- + self.assertTrue(is_password_usable(hash)) + name = DjangoTranslator().django_to_passlib_name(identify_hasher(hash).algorithm) + self.assertEqual(name, scheme) + + #=================================================================== + # eoc + #=================================================================== + +#=================================================================== +# extension fidelity tests +#=================================================================== + +class ExtensionBehaviorTest(DjangoBehaviorTest): + """ + test that "passlib.ext.django" conforms to behavioral assertions in DjangoBehaviorTest + """ + descriptionPrefix = "verify extension behavior" + + config = dict( + schemes="sha256_crypt,md5_crypt,des_crypt", + deprecated="des_crypt", + ) + + def setUp(self): + super(ExtensionBehaviorTest, self).setUp() + + # always load extension before each test + self.load_extension(PASSLIB_CONFIG=self.config) + self.patched = True + +#=================================================================== +# extension internal tests +#=================================================================== + +class DjangoExtensionTest(_ExtensionTest): + """ + test the ``passlib.ext.django`` plugin + """ + #=================================================================== + # class attrs + #=================================================================== + + descriptionPrefix = "passlib.ext.django plugin" + + #=================================================================== + # monkeypatch testing + #=================================================================== + + def test_00_patch_control(self): + """test set_django_password_context patch/unpatch""" + + # check config="disabled" + self.load_extension(PASSLIB_CONFIG="disabled", check=False) + self.assert_unpatched() + + # check legacy config=None + with self.assertWarningList("PASSLIB_CONFIG=None is deprecated"): + self.load_extension(PASSLIB_CONFIG=None, check=False) + self.assert_unpatched() + + # try stock django 1.0 context + self.load_extension(PASSLIB_CONFIG="django-1.0", check=False) + self.assert_patched(context=django10_context) + + # try to remove patch + self.unload_extension() + + # patch to use stock django 1.4 context + self.load_extension(PASSLIB_CONFIG="django-1.4", check=False) + self.assert_patched(context=django14_context) + + # try to remove patch again + self.unload_extension() + + def test_01_overwrite_detection(self): + """test detection of foreign monkeypatching""" + # NOTE: this sets things up, and spot checks two methods, + # this should be enough to verify patch manager is working. + # TODO: test unpatch behavior honors flag. + + # configure plugin to use sample context + config = "[passlib]\nschemes=des_crypt\n" + self.load_extension(PASSLIB_CONFIG=config) + + # setup helpers + import django.contrib.auth.models as models + from passlib.ext.django.models import adapter + def dummy(): + pass + + # mess with User.set_password, make sure it's detected + orig = models.User.set_password + models.User.set_password = dummy + with self.assertWarningList("another library has patched.*User\.set_password"): + adapter._manager.check_all() + models.User.set_password = orig + + # mess with models.check_password, make sure it's detected + orig = models.check_password + models.check_password = dummy + with self.assertWarningList("another library has patched.*models:check_password"): + adapter._manager.check_all() + models.check_password = orig + + def test_02_handler_wrapper(self): + """test Hasher-compatible handler wrappers""" + from django.contrib.auth import hashers + + passlib_to_django = DjangoTranslator().passlib_to_django + + # should return native django hasher if available + if DJANGO_VERSION > (1, 10): + self.assertRaises(ValueError, passlib_to_django, "hex_md5") + else: + hasher = passlib_to_django("hex_md5") + self.assertIsInstance(hasher, hashers.UnsaltedMD5PasswordHasher) + + # should return native django hasher + # NOTE: present but not enabled by default in django as of 2.1 + # (see _builtin_django_hashers) + hasher = passlib_to_django("django_bcrypt") + self.assertIsInstance(hasher, hashers.BCryptPasswordHasher) + + # otherwise should return wrapper + from passlib.hash import sha256_crypt + hasher = passlib_to_django("sha256_crypt") + self.assertEqual(hasher.algorithm, "passlib_sha256_crypt") + + # and wrapper should return correct hash + encoded = hasher.encode("stub") + self.assertTrue(sha256_crypt.verify("stub", encoded)) + self.assertTrue(hasher.verify("stub", encoded)) + self.assertFalse(hasher.verify("xxxx", encoded)) + + # test wrapper accepts options + encoded = hasher.encode("stub", "abcd"*4, rounds=1234) + self.assertEqual(encoded, "$5$rounds=1234$abcdabcdabcdabcd$" + "v2RWkZQzctPdejyRqmmTDQpZN6wTh7.RUy9zF2LftT6") + self.assertEqual(hasher.safe_summary(encoded), + {'algorithm': 'sha256_crypt', + 'salt': u('abcdab**********'), + 'rounds': 1234, + 'hash': u('v2RWkZ*************************************'), + }) + + # made up name should throw error + # XXX: should this throw ValueError instead, to match django? + self.assertRaises(KeyError, passlib_to_django, "does_not_exist") + + #=================================================================== + # PASSLIB_CONFIG settings + #=================================================================== + def test_11_config_disabled(self): + """test PASSLIB_CONFIG='disabled'""" + # test config=None (deprecated) + with self.assertWarningList("PASSLIB_CONFIG=None is deprecated"): + self.load_extension(PASSLIB_CONFIG=None, check=False) + self.assert_unpatched() + + # test disabled config + self.load_extension(PASSLIB_CONFIG="disabled", check=False) + self.assert_unpatched() + + def test_12_config_presets(self): + """test PASSLIB_CONFIG=''""" + # test django presets + self.load_extension(PASSLIB_CONTEXT="django-default", check=False) + ctx = django16_context + self.assert_patched(ctx) + + self.load_extension(PASSLIB_CONFIG="django-1.0", check=False) + self.assert_patched(django10_context) + + self.load_extension(PASSLIB_CONFIG="django-1.4", check=False) + self.assert_patched(django14_context) + + def test_13_config_defaults(self): + """test PASSLIB_CONFIG default behavior""" + # check implicit default + from passlib.ext.django.utils import PASSLIB_DEFAULT + default = CryptContext.from_string(PASSLIB_DEFAULT) + self.load_extension() + self.assert_patched(PASSLIB_DEFAULT) + + # check default preset + self.load_extension(PASSLIB_CONTEXT="passlib-default", check=False) + self.assert_patched(PASSLIB_DEFAULT) + + # check explicit string + self.load_extension(PASSLIB_CONTEXT=PASSLIB_DEFAULT, check=False) + self.assert_patched(PASSLIB_DEFAULT) + + def test_14_config_invalid(self): + """test PASSLIB_CONFIG type checks""" + update_settings(PASSLIB_CONTEXT=123, PASSLIB_CONFIG=UNSET) + self.assertRaises(TypeError, __import__, 'passlib.ext.django.models') + + self.unload_extension() + update_settings(PASSLIB_CONFIG="missing-preset", PASSLIB_CONTEXT=UNSET) + self.assertRaises(ValueError, __import__, 'passlib.ext.django.models') + + #=================================================================== + # PASSLIB_GET_CATEGORY setting + #=================================================================== + def test_21_category_setting(self): + """test PASSLIB_GET_CATEGORY parameter""" + # define config where rounds can be used to detect category + config = dict( + schemes = ["sha256_crypt"], + sha256_crypt__default_rounds = 1000, + staff__sha256_crypt__default_rounds = 2000, + superuser__sha256_crypt__default_rounds = 3000, + ) + from passlib.hash import sha256_crypt + + def run(**kwds): + """helper to take in user opts, return rounds used in password""" + user = FakeUser(**kwds) + user.set_password("stub") + return sha256_crypt.from_string(user.password).rounds + + # test default get_category + self.load_extension(PASSLIB_CONFIG=config) + self.assertEqual(run(), 1000) + self.assertEqual(run(is_staff=True), 2000) + self.assertEqual(run(is_superuser=True), 3000) + + # test patch uses explicit get_category function + def get_category(user): + return user.first_name or None + self.load_extension(PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY=get_category) + self.assertEqual(run(), 1000) + self.assertEqual(run(first_name='other'), 1000) + self.assertEqual(run(first_name='staff'), 2000) + self.assertEqual(run(first_name='superuser'), 3000) + + # test patch can disable get_category entirely + def get_category(user): + return None + self.load_extension(PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY=get_category) + self.assertEqual(run(), 1000) + self.assertEqual(run(first_name='other'), 1000) + self.assertEqual(run(first_name='staff', is_staff=True), 1000) + self.assertEqual(run(first_name='superuser', is_superuser=True), 1000) + + # test bad value + self.assertRaises(TypeError, self.load_extension, PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY='x') + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_ext_django_source.py b/venv/lib/python3.12/site-packages/passlib/tests/test_ext_django_source.py new file mode 100644 index 0000000..4b42e59 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_ext_django_source.py @@ -0,0 +1,250 @@ +""" +test passlib.ext.django against django source tests +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import absolute_import, division, print_function +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils.compat import suppress_cause +from passlib.ext.django.utils import DJANGO_VERSION, DjangoTranslator, _PasslibHasherWrapper +# tests +from passlib.tests.utils import TestCase, TEST_MODE +from .test_ext_django import ( + has_min_django, stock_config, _ExtensionSupport, +) +if has_min_django: + from .test_ext_django import settings +# local +__all__ = [ + "HashersTest", +] +#============================================================================= +# HashersTest -- +# hack up the some of the real django tests to run w/ extension loaded, +# to ensure we mimic their behavior. +# however, the django tests were moved out of the package, and into a source-only location +# as of django 1.7. so we disable tests from that point on unless test-runner specifies +#============================================================================= + +#: ref to django unittest root module (if found) +test_hashers_mod = None + +#: message about why test module isn't present (if not found) +hashers_skip_msg = None + +#---------------------------------------------------------------------- +# try to load django's tests/auth_tests/test_hasher.py module, +# or note why we failed. +#---------------------------------------------------------------------- +if TEST_MODE(max="quick"): + hashers_skip_msg = "requires >= 'default' test mode" + +elif has_min_django: + import os + import sys + source_path = os.environ.get("PASSLIB_TESTS_DJANGO_SOURCE_PATH") + + if source_path: + if not os.path.exists(source_path): + raise EnvironmentError("django source path not found: %r" % source_path) + if not all(os.path.exists(os.path.join(source_path, name)) + for name in ["django", "tests"]): + raise EnvironmentError("invalid django source path: %r" % source_path) + log.info("using django tests from source path: %r", source_path) + tests_path = os.path.join(source_path, "tests") + sys.path.insert(0, tests_path) + try: + from auth_tests import test_hashers as test_hashers_mod + except ImportError as err: + raise suppress_cause( + EnvironmentError("error trying to import django tests " + "from source path (%r): %r" % + (source_path, err))) + finally: + sys.path.remove(tests_path) + + else: + hashers_skip_msg = "requires PASSLIB_TESTS_DJANGO_SOURCE_PATH to be set" + + if TEST_MODE("full"): + # print warning so user knows what's happening + sys.stderr.write("\nWARNING: $PASSLIB_TESTS_DJANGO_SOURCE_PATH is not set; " + "can't run Django's own unittests against passlib.ext.django\n") + +elif DJANGO_VERSION: + hashers_skip_msg = "django version too old" + +else: + hashers_skip_msg = "django not installed" + +#---------------------------------------------------------------------- +# if found module, create wrapper to run django's own tests, +# but with passlib monkeypatched in. +#---------------------------------------------------------------------- +if test_hashers_mod: + from django.core.signals import setting_changed + from django.dispatch import receiver + from django.utils.module_loading import import_string + from passlib.utils.compat import get_unbound_method_function + + class HashersTest(test_hashers_mod.TestUtilsHashPass, _ExtensionSupport): + """ + Run django's hasher unittests against passlib's extension + and workalike implementations + """ + + #================================================================== + # helpers + #================================================================== + + # port patchAttr() helper method from passlib.tests.utils.TestCase + patchAttr = get_unbound_method_function(TestCase.patchAttr) + + #================================================================== + # custom setup + #================================================================== + def setUp(self): + #--------------------------------------------------------- + # install passlib.ext.django adapter, and get context + #--------------------------------------------------------- + self.load_extension(PASSLIB_CONTEXT=stock_config, check=False) + from passlib.ext.django.models import adapter + context = adapter.context + + #--------------------------------------------------------- + # patch tests module to use our versions of patched funcs + # (which should be installed in hashers module) + #--------------------------------------------------------- + from django.contrib.auth import hashers + for attr in ["make_password", + "check_password", + "identify_hasher", + "is_password_usable", + "get_hasher"]: + self.patchAttr(test_hashers_mod, attr, getattr(hashers, attr)) + + #--------------------------------------------------------- + # django tests expect empty django_des_crypt salt field + #--------------------------------------------------------- + from passlib.hash import django_des_crypt + self.patchAttr(django_des_crypt, "use_duplicate_salt", False) + + #--------------------------------------------------------- + # install receiver to update scheme list if test changes settings + #--------------------------------------------------------- + django_to_passlib_name = DjangoTranslator().django_to_passlib_name + + @receiver(setting_changed, weak=False) + def update_schemes(**kwds): + if kwds and kwds['setting'] != 'PASSWORD_HASHERS': + return + assert context is adapter.context + schemes = [ + django_to_passlib_name(import_string(hash_path)()) + for hash_path in settings.PASSWORD_HASHERS + ] + # workaround for a few tests that only specify hex_md5, + # but test for django_salted_md5 format. + if "hex_md5" in schemes and "django_salted_md5" not in schemes: + schemes.append("django_salted_md5") + schemes.append("django_disabled") + context.update(schemes=schemes, deprecated="auto") + adapter.reset_hashers() + + self.addCleanup(setting_changed.disconnect, update_schemes) + + update_schemes() + + #--------------------------------------------------------- + # need password_context to keep up to date with django_hasher.iterations, + # which is frequently patched by django tests. + # + # HACK: to fix this, inserting wrapper around a bunch of context + # methods so that any time adapter calls them, + # attrs are resynced first. + #--------------------------------------------------------- + + def update_rounds(): + """ + sync django hasher config -> passlib hashers + """ + for handler in context.schemes(resolve=True): + if 'rounds' not in handler.setting_kwds: + continue + hasher = adapter.passlib_to_django(handler) + if isinstance(hasher, _PasslibHasherWrapper): + continue + rounds = getattr(hasher, "rounds", None) or \ + getattr(hasher, "iterations", None) + if rounds is None: + continue + # XXX: this doesn't modify the context, which would + # cause other weirdness (since it would replace handler factories completely, + # instead of just updating their state) + handler.min_desired_rounds = handler.max_desired_rounds = handler.default_rounds = rounds + + _in_update = [False] + + def update_wrapper(wrapped, *args, **kwds): + """ + wrapper around arbitrary func, that first triggers sync + """ + if not _in_update[0]: + _in_update[0] = True + try: + update_rounds() + finally: + _in_update[0] = False + return wrapped(*args, **kwds) + + # sync before any context call + for attr in ["schemes", "handler", "default_scheme", "hash", + "verify", "needs_update", "verify_and_update"]: + self.patchAttr(context, attr, update_wrapper, wrap=True) + + # sync whenever adapter tries to resolve passlib hasher + self.patchAttr(adapter, "django_to_passlib", update_wrapper, wrap=True) + + def tearDown(self): + # NOTE: could rely on addCleanup() instead, but need py26 compat + self.unload_extension() + super(HashersTest, self).tearDown() + + #================================================================== + # skip a few methods that can't be replicated properly + # *want to minimize these as much as possible* + #================================================================== + + _OMIT = lambda self: self.skipTest("omitted by passlib") + + # XXX: this test registers two classes w/ same algorithm id, + # something we don't support -- how does django sanely handle + # that anyways? get_hashers_by_algorithm() should throw KeyError, right? + test_pbkdf2_upgrade_new_hasher = _OMIT + + # TODO: support wrapping django's harden-runtime feature? + # would help pass their tests. + test_check_password_calls_harden_runtime = _OMIT + test_bcrypt_harden_runtime = _OMIT + test_pbkdf2_harden_runtime = _OMIT + + #================================================================== + # eoc + #================================================================== + +else: + # otherwise leave a stub so test log tells why test was skipped. + + class HashersTest(TestCase): + + def test_external_django_hasher_tests(self): + """external django hasher tests""" + raise self.skipTest(hashers_skip_msg) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_handlers.py b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers.py new file mode 100644 index 0000000..cad5ef9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers.py @@ -0,0 +1,1819 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import os +import sys +import warnings +# site +# pkg +from passlib import exc, hash +from passlib.utils import repeat_string +from passlib.utils.compat import irange, PY3, u, get_method_function +from passlib.tests.utils import TestCase, HandlerCase, skipUnless, \ + TEST_MODE, UserHandlerMixin, EncodingHandlerMixin +# module + +#============================================================================= +# constants & support +#============================================================================= + +# some common unicode passwords which used as test cases +UPASS_WAV = u('\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2') +UPASS_USD = u("\u20AC\u00A5$") +UPASS_TABLE = u("t\u00e1\u0411\u2113\u0259") + +PASS_TABLE_UTF8 = b't\xc3\xa1\xd0\x91\xe2\x84\x93\xc9\x99' # utf-8 + +# handlers which support multiple backends, but don't have multi-backend tests. +_omitted_backend_tests = ["django_bcrypt", "django_bcrypt_sha256", "django_argon2"] + +#: modules where get_handler_case() should search for test cases. +_handler_test_modules = [ + "test_handlers", + "test_handlers_argon2", + "test_handlers_bcrypt", + "test_handlers_cisco", + "test_handlers_django", + "test_handlers_pbkdf2", + "test_handlers_scrypt", +] + +def get_handler_case(scheme): + """ + return HandlerCase instance for scheme, used by other tests. + + :param scheme: name of hasher to locate test for (e.g. "bcrypt") + + :raises KeyError: + if scheme isn't known hasher. + + :raises MissingBackendError: + if hasher doesn't have any available backends. + + :returns: + HandlerCase subclass (which derives from TestCase) + """ + from passlib.registry import get_crypt_handler + handler = get_crypt_handler(scheme) + if hasattr(handler, "backends") and scheme not in _omitted_backend_tests: + # XXX: if no backends available, could proceed to pick first backend for test lookup; + # should investigate if that would be useful to callers. + try: + backend = handler.get_backend() + except exc.MissingBackendError: + assert scheme in conditionally_available_hashes + raise + name = "%s_%s_test" % (scheme, backend) + else: + name = "%s_test" % scheme + for module in _handler_test_modules: + modname = "passlib.tests." + module + __import__(modname) + mod = sys.modules[modname] + try: + return getattr(mod, name) + except AttributeError: + pass + # every hasher should have test suite, so if we get here, means test is either missing, + # misnamed, or _handler_test_modules list is out of date. + raise RuntimeError("can't find test case named %r for %r" % (name, scheme)) + +#: hashes which there may not be a backend available for, +#: and get_handler_case() may (correctly) throw a MissingBackendError +conditionally_available_hashes = ["argon2", "bcrypt", "bcrypt_sha256"] + +#============================================================================= +# apr md5 crypt +#============================================================================= +class apr_md5_crypt_test(HandlerCase): + handler = hash.apr_md5_crypt + + known_correct_hashes = [ + # + # http://httpd.apache.org/docs/2.2/misc/password_encryptions.html + # + ('myPassword', '$apr1$r31.....$HqJZimcKQFAMYayBlzkrA/'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$apr1$bzYrOHUx$a1FcpXuQDJV3vPY20CS6N1'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash ----\/ + '$apr1$r31.....$HqJZimcKQFAMYayBlzkrA!' + ] + +#============================================================================= +# bigcrypt +#============================================================================= +class bigcrypt_test(HandlerCase): + handler = hash.bigcrypt + + # TODO: find an authoritative source of test vectors + known_correct_hashes = [ + + # + # various docs & messages on the web. + # + ("passphrase", "qiyh4XPJGsOZ2MEAyLkfWqeQ"), + ("This is very long passwd", "f8.SVpL2fvwjkAnxn8/rgTkwvrif6bjYB5c"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'SEChBAyMbMNhgGLyP7kD1HZU'), + ] + + known_unidentified_hashes = [ + # one char short (10 % 11) + "qiyh4XPJGsOZ2MEAyLkfWqe" + + # one char too many (1 % 11) + "f8.SVpL2fvwjkAnxn8/rgTkwvrif6bjYB5cd" + ] + + # omit des_crypt from known_other since it's a valid bigcrypt hash too. + known_other_hashes = [row for row in HandlerCase.known_other_hashes + if row[0] != "des_crypt"] + + def test_90_internal(self): + # check that _norm_checksum() also validates checksum size. + # (current code uses regex in parser) + self.assertRaises(ValueError, hash.bigcrypt, use_defaults=True, + checksum=u('yh4XPJGsOZ')) + +#============================================================================= +# bsdi crypt +#============================================================================= +class _bsdi_crypt_test(HandlerCase): + """test BSDiCrypt algorithm""" + handler = hash.bsdi_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '_J9..CCCCXBrJUJV154M'), + ('U*U***U', '_J9..CCCCXUhOBTXzaiE'), + ('U*U***U*', '_J9..CCCC4gQ.mB/PffM'), + ('*U*U*U*U', '_J9..XXXXvlzQGqpPPdk'), + ('*U*U*U*U*', '_J9..XXXXsqM/YSSP..Y'), + ('*U*U*U*U*U*U*U*U', '_J9..XXXXVL7qJCnku0I'), + ('*U*U*U*U*U*U*U*U*', '_J9..XXXXAj8cFbP5scI'), + ('ab1234567', '_J9..SDizh.vll5VED9g'), + ('cr1234567', '_J9..SDizRjWQ/zePPHc'), + ('zxyDPWgydbQjgq', '_J9..SDizxmRI1GjnQuE'), + ('726 even', '_K9..SaltNrQgIYUAeoY'), + ('', '_J9..SDSD5YGyRCr4W4c'), + + # + # custom + # + (" ", "_K1..crsmZxOLzfJH8iw"), + ("my", '_KR/.crsmykRplHbAvwA'), # <-- to detect old 12-bit rounds bug + ("my socra", "_K1..crsmf/9NzZr1fLM"), + ("my socrates", '_K1..crsmOv1rbde9A9o'), + ("my socrates note", "_K1..crsm/2qeAhdISMA"), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '_7C/.ABw0WIKy0ILVqo2'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correctly formatted hash + # \/ + "_K1.!crsmZxOLzfJH8iw" + ] + + platform_crypt_support = [ + # openbsd 5.8 dropped everything except bcrypt + ("openbsd[6789]", False), + ("openbsd5", None), + ("openbsd", True), + + ("freebsd|netbsd|darwin", True), + ("solaris", False), + ("linux", None), # may be present if libxcrypt is in use + ] + + def test_77_fuzz_input(self, **kwds): + # we want to generate even rounds to verify it's correct, but want to ignore warnings + warnings.filterwarnings("ignore", "bsdi_crypt rounds should be odd.*") + super(_bsdi_crypt_test, self).test_77_fuzz_input(**kwds) + + def test_needs_update_w_even_rounds(self): + """needs_update() should flag even rounds""" + handler = self.handler + even_hash = '_Y/../cG0zkJa6LY6k4c' + odd_hash = '_Z/..TgFg0/ptQtpAgws' + secret = 'test' + + # don't issue warning + self.assertTrue(handler.verify(secret, even_hash)) + self.assertTrue(handler.verify(secret, odd_hash)) + + # *do* signal as needing updates + self.assertTrue(handler.needs_update(even_hash)) + self.assertFalse(handler.needs_update(odd_hash)) + + # new hashes shouldn't have even rounds + new_hash = handler.hash("stub") + self.assertFalse(handler.needs_update(new_hash)) + +# create test cases for specific backends +bsdi_crypt_os_crypt_test = _bsdi_crypt_test.create_backend_case("os_crypt") +bsdi_crypt_builtin_test = _bsdi_crypt_test.create_backend_case("builtin") + +#============================================================================= +# crypt16 +#============================================================================= +class crypt16_test(HandlerCase): + handler = hash.crypt16 + + # TODO: find an authortative source of test vectors + known_correct_hashes = [ + # + # from messages around the web, including + # http://seclists.org/bugtraq/1999/Mar/76 + # + ("passphrase", "qi8H8R7OM4xMUNMPuRAZxlY."), + ("printf", "aaCjFz4Sh8Eg2QSqAReePlq6"), + ("printf", "AA/xje2RyeiSU0iBY3PDwjYo"), + ("LOLOAQICI82QB4IP", "/.FcK3mad6JwYt8LVmDqz9Lc"), + ("LOLOAQICI", "/.FcK3mad6JwYSaRHJoTPzY2"), + ("LOLOAQIC", "/.FcK3mad6JwYelhbtlysKy6"), + ("L", "/.CIu/PzYCkl6elhbtlysKy6"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'YeDc9tKkkmDvwP7buzpwhoqQ'), + ] + +#============================================================================= +# des crypt +#============================================================================= +class _des_crypt_test(HandlerCase): + """test des-crypt algorithm""" + handler = hash.des_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', 'CCNf8Sbh3HDfQ'), + ('U*U***U', 'CCX.K.MFy4Ois'), + ('U*U***U*', 'CC4rMpbg9AMZ.'), + ('*U*U*U*U', 'XXxzOu6maQKqQ'), + ('', 'SDbsugeBiC58A'), + + # + # custom + # + ('', 'OgAwTx2l6NADI'), + (' ', '/Hk.VPuwQTXbc'), + ('test', 'N1tQbOFcM5fpg'), + ('Compl3X AlphaNu3meric', 'um.Wguz3eVCx2'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', 'sNYqfOyauIyic'), + ('AlOtBsOl', 'cEpWz5IUCShqM'), + + # ensures utf-8 used for unicode + (u('hell\u00D6'), 'saykDgk3BPZ9E'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correctly formatted hash + #\/ + '!gAwTx2l6NADI', + + # wrong size + 'OgAwTx2l6NAD', + 'OgAwTx2l6NADIj', + ] + + platform_crypt_support = [ + # openbsd 5.8 dropped everything except bcrypt + ("openbsd[6789]", False), + ("openbsd5", None), + ("openbsd", True), + + ("freebsd|netbsd|linux|solaris|darwin", True), + ] + +# create test cases for specific backends +des_crypt_os_crypt_test = _des_crypt_test.create_backend_case("os_crypt") +des_crypt_builtin_test = _des_crypt_test.create_backend_case("builtin") + +#============================================================================= +# fshp +#============================================================================= +class fshp_test(HandlerCase): + """test fshp algorithm""" + handler = hash.fshp + + known_correct_hashes = [ + # + # test vectors from FSHP reference implementation + # https://github.com/bdd/fshp-is-not-secure-anymore/blob/master/python/test.py + # + ('test', '{FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M='), + + ('test', + '{FSHP1|8|4096}MTIzNDU2NzjTdHcmoXwNc0f' + 'f9+ArUHoN0CvlbPZpxFi1C6RDM/MHSA==' + ), + + ('OrpheanBeholderScryDoubt', + '{FSHP1|8|4096}GVSUFDAjdh0vBosn1GUhz' + 'GLHP7BmkbCZVH/3TQqGIjADXpc+6NCg3g==' + ), + ('ExecuteOrder66', + '{FSHP3|16|8192}0aY7rZQ+/PR+Rd5/I9ss' + 'RM7cjguyT8ibypNaSp/U1uziNO3BVlg5qPU' + 'ng+zHUDQC3ao/JbzOnIBUtAeWHEy7a2vZeZ' + '7jAwyJJa2EqOsq4Io=' + ), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '{FSHP1|16|16384}9v6/l3Lu/d9by5nznpOS' + 'cqQo8eKu/b/CKli3RCkgYg4nRTgZu5y659YV8cCZ68UL'), + ] + + known_unidentified_hashes = [ + # incorrect header + '{FSHX0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + 'FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + ] + + known_malformed_hashes = [ + # bad base64 padding + '{FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M', + + # wrong salt size + '{FSHP0|1|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + + # bad rounds + '{FSHP0|0|A}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + ] + + def test_90_variant(self): + """test variant keyword""" + handler = self.handler + kwds = dict(salt=b'a', rounds=1) + + # accepts ints + handler(variant=1, **kwds) + + # accepts bytes or unicode + handler(variant=u('1'), **kwds) + handler(variant=b'1', **kwds) + + # aliases + handler(variant=u('sha256'), **kwds) + handler(variant=b'sha256', **kwds) + + # rejects None + self.assertRaises(TypeError, handler, variant=None, **kwds) + + # rejects other types + self.assertRaises(TypeError, handler, variant=complex(1,1), **kwds) + + # invalid variant + self.assertRaises(ValueError, handler, variant='9', **kwds) + self.assertRaises(ValueError, handler, variant=9, **kwds) + +#============================================================================= +# hex digests +#============================================================================= +class hex_md4_test(HandlerCase): + handler = hash.hex_md4 + known_correct_hashes = [ + ("password", '8a9d093f14f8701df17732b2bb182c74'), + (UPASS_TABLE, '876078368c47817ce5f9115f3a42cf74'), + ] + +class hex_md5_test(HandlerCase): + handler = hash.hex_md5 + known_correct_hashes = [ + ("password", '5f4dcc3b5aa765d61d8327deb882cf99'), + (UPASS_TABLE, '05473f8a19f66815e737b33264a0d0b0'), + ] + + # XXX: should test this for ALL the create_hex_md5() hashers. + def test_mock_fips_mode(self): + """ + if md5 isn't available, a dummy instance should be created. + (helps on FIPS systems). + """ + from passlib.exc import UnknownHashError + from passlib.crypto.digest import lookup_hash, _set_mock_fips_mode + + # check if md5 is available so we can test mock helper + supported = lookup_hash("md5", required=False).supported + self.assertEqual(self.handler.supported, supported) + if supported: + _set_mock_fips_mode() + self.addCleanup(_set_mock_fips_mode, False) + + # HACK: have to recreate hasher, since underlying HashInfo has changed. + # could reload module and re-import, but this should be good enough. + from passlib.handlers.digests import create_hex_hash + hasher = create_hex_hash("md5", required=False) + self.assertFalse(hasher.supported) + + # can identify hashes even if disabled + ref1 = '5f4dcc3b5aa765d61d8327deb882cf99' + ref2 = 'xxx' + self.assertTrue(hasher.identify(ref1)) + self.assertFalse(hasher.identify(ref2)) + + # throw error if try to use it + pat = "'md5' hash disabled for fips" + self.assertRaisesRegex(UnknownHashError, pat, hasher.hash, "password") + self.assertRaisesRegex(UnknownHashError, pat, hasher.verify, "password", ref1) + + +class hex_sha1_test(HandlerCase): + handler = hash.hex_sha1 + known_correct_hashes = [ + ("password", '5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8'), + (UPASS_TABLE, 'e059b2628e3a3e2de095679de9822c1d1466e0f0'), + ] + +class hex_sha256_test(HandlerCase): + handler = hash.hex_sha256 + known_correct_hashes = [ + ("password", '5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8'), + (UPASS_TABLE, '6ed729e19bf24d3d20f564375820819932029df05547116cfc2cc868a27b4493'), + ] + +class hex_sha512_test(HandlerCase): + handler = hash.hex_sha512 + known_correct_hashes = [ + ("password", 'b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c' + '706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cac' + 'bc86'), + (UPASS_TABLE, 'd91bb0a23d66dca07a1781fd63ae6a05f6919ee5fc368049f350c9f' + '293b078a18165d66097cf0d89fdfbeed1ad6e7dba2344e57348cd6d51308c843a06f' + '29caf'), + ] + +#============================================================================= +# htdigest hash +#============================================================================= +class htdigest_test(UserHandlerMixin, HandlerCase): + handler = hash.htdigest + + known_correct_hashes = [ + # secret, user, realm + + # from RFC 2617 + (("Circle Of Life", "Mufasa", "testrealm@host.com"), + '939e7578ed9e3c518a452acee763bce9'), + + # custom + ((UPASS_TABLE, UPASS_USD, UPASS_WAV), + '4dabed2727d583178777fab468dd1f17'), + ] + + known_unidentified_hashes = [ + # bad char \/ - currently rejecting upper hex chars, may change + '939e7578edAe3c518a452acee763bce9', + + # bad char \/ + '939e7578edxe3c518a452acee763bce9', + ] + + def test_80_user(self): + raise self.skipTest("test case doesn't support 'realm' keyword") + + def populate_context(self, secret, kwds): + """insert username into kwds""" + if isinstance(secret, tuple): + secret, user, realm = secret + else: + user, realm = "user", "realm" + kwds.setdefault("user", user) + kwds.setdefault("realm", realm) + return secret + +#============================================================================= +# ldap hashes +#============================================================================= +class ldap_md5_test(HandlerCase): + handler = hash.ldap_md5 + known_correct_hashes = [ + ("helloworld", '{MD5}/F4DjTilcDIIVEHn/nAQsA=='), + (UPASS_TABLE, '{MD5}BUc/ihn2aBXnN7MyZKDQsA=='), + ] + +class ldap_sha1_test(HandlerCase): + handler = hash.ldap_sha1 + known_correct_hashes = [ + ("helloworld", '{SHA}at+xg6SiyUovktq1redipHiJpaE='), + (UPASS_TABLE, '{SHA}4FmyYo46Pi3glWed6YIsHRRm4PA='), + ] + +class ldap_salted_md5_test(HandlerCase): + handler = hash.ldap_salted_md5 + known_correct_hashes = [ + ("testing1234", '{SMD5}UjFY34os/pnZQ3oQOzjqGu4yeXE='), + (UPASS_TABLE, '{SMD5}Z0ioJ58LlzUeRxm3K6JPGAvBGIM='), + + # alternate salt sizes (8, 15, 16) + ('test', '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw'), + ('test', '{SMD5}XRlncfRzvGi0FDzgR98tUgBg7B3jXOs9p9S615qTkg=='), + ('test', '{SMD5}FbAkzOMOxRbMp6Nn4hnZuel9j9Gas7a2lvI+x5hT6j0='), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SMD5}IGVhwK+anvspmfDt2t0vgGjt/Q==', + + # incorrect base64 encoding + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4c', + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw' + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw=', + '{SMD5}LnuZPJhiaY95/4lmV=pg548xBsD4P4cw', + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P===', + ] + +class ldap_salted_sha1_test(HandlerCase): + handler = hash.ldap_salted_sha1 + known_correct_hashes = [ + ("testing123", '{SSHA}0c0blFTXXNuAMHECS4uxrj3ZieMoWImr'), + ("secret", "{SSHA}0H+zTv8o4MR4H43n03eCsvw1luG8LdB7"), + (UPASS_TABLE, '{SSHA}3yCSD1nLZXznra4N8XzZgAL+s1sQYsx5'), + + # alternate salt sizes (8, 15, 16) + ('test', '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOckw=='), + ('test', '{SSHA}/ZMF5KymNM+uEOjW+9STKlfCFj51bg3BmBNCiPHeW2ttbU0='), + ('test', '{SSHA}Pfx6Vf48AT9x3FVv8znbo8WQkEVSipHSWovxXmvNWUvp/d/7'), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SSHA}ZQK3Yvtvl6wtIRoISgMGPkcWU7Nfq5U=', + + # incorrect base64 encoding + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOck', + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOckw=', + '{SSHA}P90+qijSp8MJ1tN25j5o1Pf=UvlqjXHOGeOckw==', + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOck===', + ] + + +class ldap_salted_sha256_test(HandlerCase): + handler = hash.ldap_salted_sha256 + known_correct_hashes = [ + # generated locally + # salt size = 8 + ("password", '{SSHA256}x1tymSTVjozxQ2PtT46ysrzhZxbcskK0o2f8hEFx7fAQQmhtDSEkJA=='), + ("test", '{SSHA256}xfqc9aOR6z15YaEk3/Ufd7UL9+JozB/1EPmCDTizL0GkdA7BuNda6w=='), + ("toomanysecrets", '{SSHA256}RrTKrg6HFXcjJ+eDAq4UtbODxOr9RLeG+I69FoJvutcbY0zpfU+p1Q=='), + (u('letm\xe8\xefn'), '{SSHA256}km7UjUTBZN8a+gf1ND2/qn15N7LsO/jmGYJXvyTfJKAbI0RoLWWslQ=='), + + # alternate salt sizes (4, 15, 16) + # generated locally + ('test', '{SSHA256}TFv2RpwyO0U9mA0Hk8FsXRa1I+4dNUtv27Qa8dzGVLinlDIm'), + ('test', '{SSHA256}J6MFQdkfjdmXz9UyUPb773kekJdm4dgSL4y8WQEQW11VipHSundOKaV0LsV4L6U='), + ('test', '{SSHA256}uBLazLaiBaPb6Cpnvq2XTYDkvXbYIuqRW1anMKk85d1/j1GqFQIgpHSOMUYIIcS4'), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SSHA256}Lpdyr1+lR+rtxgp3SpQnUuNw33ENivTl28nzF2ZI4Gm41/o=', + + # incorrect base64 encoding + '{SSHA256}TFv2RpwyO0U9mA0Hk8FsXRa1I+4dNUtv27Qa8dzGVLinlDI@', + '{SSHA256}TFv2RpwyO0U9mA0Hk8FsXRa1I+4dNUtv27Qa8dzGVLinlDI', + '{SSHA256}TFv2RpwyO0U9mA0Hk8FsXRa1I+4dNUtv27Qa8dzGVLinlDIm===', + ] + + + +class ldap_salted_sha512_test(HandlerCase): + handler = hash.ldap_salted_sha512 + known_correct_hashes = [ + # generated by testing ldap server web interface (see issue 124 comments) + # salt size = 8 + ("toomanysecrets", '{SSHA512}wExp4xjiCHS0zidJDC4UJq9EEeIebAQPJ1PWSwfhxWjfutI9XiiKuHm2AE41cEFfK+8HyI8bh+ztbczUGsvVFIgICWWPt7qu'), + (u('letm\xe8\xefn'), '{SSHA512}mpNUSmZc3TNx+RnPwkIAVMf7ocEKLPrIoQNsg4Eu8dHvyCeb2xzHp5A6n4tF7ntknSvfvRZaJII4ImvNJlYsgiwAm0FMqR+3'), + + # generated locally + # salt size = 8 + ("password", '{SSHA512}f/lFQskkl7PdMsTGJxHZq8LDt/l+UqRMm6/pj4pV7/xZkcOaKCgvQqp+KCeXc/Vd4RY6vEHWn4y0DnFcQ6wgyv9fyxk='), + ("test", '{SSHA512}Tgx/uhHnlM9/GgQvI31dN7cheDXg7WypZwaaIkyRsgV/BKIzBG3G/wUd9o1dpi06p3SYzMedg0lvTc3b6CtdO0Xo/f9/L+Uc'), + + # alternate salt sizes (4, 15, 16) + # generated locally + ('test', '{SSHA512}Yg9DQ2wURCFGwobu7R2O6cq7nVbnGMPrFCX0aPQ9kj/y1hd6k9PEzkgWCB5aXdPwPzNrVb0PkiHiBnG1CxFiT+B8L8U='), + ('test', '{SSHA512}5ecDGWs5RY4xLszUO6hAcl90W3wAozGQoI4Gqj8xSZdcfU1lVEM4aY8s+4xVeLitcn7BO8i7xkzMFWLoxas7SeHc23sP4dx77937PyeE0A=='), + ('test', '{SSHA512}6FQv5W47HGg2MFBFZofoiIbO8KRW75Pm51NKoInpthYQQ5ujazHGhVGzrj3JXgA7j0k+UNmkHdbJjdY5xcUHPzynFEII4fwfIySEcG5NKSU='), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SSHA512}zFnn4/8x8GveUaMqgrYWyIWqFQ0Irt6gADPtRk4Uv3nUC6uR5cD8+YdQni/0ZNij9etm6p17kSFuww3M6l+d6AbAeA==', + + # incorrect base64 encoding + '{SSHA512}Tgx/uhHnlM9/GgQvI31dN7cheDXg7WypZwaaIkyRsgV/BKIzBG3G/wUd9o1dpi06p3SYzMedg0lvTc3b6CtdO0Xo/f9/L+U', + '{SSHA512}Tgx/uhHnlM9/GgQvI31dN7cheDXg7WypZwaaIkyRsgV/BKIzBG3G/wUd9o1dpi06p3SYzMedg0lvTc3b6CtdO0Xo/f9/L+U@', + '{SSHA512}Tgx/uhHnlM9/GgQvI31dN7cheDXg7WypZwaaIkyRsgV/BKIzBG3G/wUd9o1dpi06p3SYzMedg0lvTc3b6CtdO0Xo/f9/L+U===', + ] + + +class ldap_plaintext_test(HandlerCase): + # TODO: integrate EncodingHandlerMixin + handler = hash.ldap_plaintext + known_correct_hashes = [ + ("password", 'password'), + (UPASS_TABLE, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + (PASS_TABLE_UTF8, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + ] + known_unidentified_hashes = [ + "{FOO}bar", + + # NOTE: this hash currently rejects the empty string. + "", + ] + + known_other_hashes = [ + ("ldap_md5", "{MD5}/F4DjTilcDIIVEHn/nAQsA==") + ] + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_password(self): + # NOTE: this hash currently rejects the empty string. + while True: + pwd = super(ldap_plaintext_test.FuzzHashGenerator, self).random_password() + if pwd: + return pwd + +class _ldap_md5_crypt_test(HandlerCase): + # NOTE: since the ldap_{crypt} handlers are all wrappers, don't need + # separate test; this is just to test the codebase end-to-end + handler = hash.ldap_md5_crypt + + known_correct_hashes = [ + # + # custom + # + ('', '{CRYPT}$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + (' ', '{CRYPT}$1$m/5ee7ol$bZn0kIBFipq39e.KDXX8I0'), + ('test', '{CRYPT}$1$ec6XvcoW$ghEtNK2U1MC5l.Dwgi3020'), + ('Compl3X AlphaNu3meric', '{CRYPT}$1$nX1e7EeI$ljQn72ZUgt6Wxd9hfvHdV0'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '{CRYPT}$1$jQS7o98J$V6iTcr71CGgwW2laf17pi1'), + ('test', '{CRYPT}$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '{CRYPT}$1$d6/Ky1lU$/xpf8m7ftmWLF.TjHCqel0'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '{CRYPT}$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o!', + ] + +# create test cases for specific backends +ldap_md5_crypt_os_crypt_test =_ldap_md5_crypt_test.create_backend_case("os_crypt") +ldap_md5_crypt_builtin_test =_ldap_md5_crypt_test.create_backend_case("builtin") + +class _ldap_sha1_crypt_test(HandlerCase): + # NOTE: this isn't for testing the hash (see ldap_md5_crypt note) + # but as a self-test of the os_crypt patching code in HandlerCase. + handler = hash.ldap_sha1_crypt + + known_correct_hashes = [ + ('password', '{CRYPT}$sha1$10$c.mcTzCw$gF8UeYst9yXX7WNZKc5Fjkq0.au7'), + (UPASS_TABLE, '{CRYPT}$sha1$10$rnqXlOsF$aGJf.cdRPewJAXo1Rn1BkbaYh0fP'), + ] + + def populate_settings(self, kwds): + kwds.setdefault("rounds", 10) + super(_ldap_sha1_crypt_test, self).populate_settings(kwds) + + def test_77_fuzz_input(self, **ignored): + raise self.skipTest("unneeded") + +# create test cases for specific backends +ldap_sha1_crypt_os_crypt_test = _ldap_sha1_crypt_test.create_backend_case("os_crypt") + +#============================================================================= +# lanman +#============================================================================= +class lmhash_test(EncodingHandlerMixin, HandlerCase): + handler = hash.lmhash + secret_case_insensitive = True + + known_correct_hashes = [ + # + # http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + # + ("OLDPASSWORD", "c9b81d939d6fd80cd408e6b105741864"), + ("NEWPASSWORD", '09eeab5aa415d6e4d408e6b105741864'), + ("welcome", "c23413a8a1e7665faad3b435b51404ee"), + + # + # custom + # + ('', 'aad3b435b51404eeaad3b435b51404ee'), + ('zzZZZzz', 'a5e6066de61c3e35aad3b435b51404ee'), + ('passphrase', '855c3697d9979e78ac404c4ba2c66533'), + ('Yokohama', '5ecd9236d21095ce7584248b8d2c9f9e'), + + # ensures cp437 used for unicode + (u('ENCYCLOP\xC6DIA'), 'fed6416bffc9750d48462b9d7aaac065'), + (u('encyclop\xE6dia'), 'fed6416bffc9750d48462b9d7aaac065'), + + # test various encoding values + ((u("\xC6"), None), '25d8ab4a0659c97aaad3b435b51404ee'), + ((u("\xC6"), "cp437"), '25d8ab4a0659c97aaad3b435b51404ee'), + ((u("\xC6"), "latin-1"), '184eecbbe9991b44aad3b435b51404ee'), + ((u("\xC6"), "utf-8"), '00dd240fcfab20b8aad3b435b51404ee'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '855c3697d9979e78ac404c4ba2c6653X', + ] + + def test_90_raw(self): + """test lmhash.raw() method""" + from binascii import unhexlify + from passlib.utils.compat import str_to_bascii + lmhash = self.handler + for secret, hash in self.known_correct_hashes: + kwds = {} + secret = self.populate_context(secret, kwds) + data = unhexlify(str_to_bascii(hash)) + self.assertEqual(lmhash.raw(secret, **kwds), data) + self.assertRaises(TypeError, lmhash.raw, 1) + +#============================================================================= +# md5 crypt +#============================================================================= +class _md5_crypt_test(HandlerCase): + handler = hash.md5_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$1$dXc3I7Rw$ctlgjDdWJLMT.qwHsWhXR1'), + ('U*U***U', '$1$dXc3I7Rw$94JPyQc/eAgQ3MFMCoMF.0'), + ('U*U***U*', '$1$dXc3I7Rw$is1mVIAEtAhIzSdfn5JOO0'), + ('*U*U*U*U', '$1$eQT9Hwbt$XtuElNJD.eW5MN5UCWyTQ0'), + ('', '$1$Eu.GHtia$CFkL/nE1BYTlEPiVx1VWX0'), + + # + # custom + # + + # NOTE: would need to patch HandlerCase to coerce hashes + # to native str for this first one to work under py3. +## ('', b('$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.')), + ('', '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + (' ', '$1$m/5ee7ol$bZn0kIBFipq39e.KDXX8I0'), + ('test', '$1$ec6XvcoW$ghEtNK2U1MC5l.Dwgi3020'), + ('Compl3X AlphaNu3meric', '$1$nX1e7EeI$ljQn72ZUgt6Wxd9hfvHdV0'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$1$jQS7o98J$V6iTcr71CGgwW2laf17pi1'), + ('test', '$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + (b'test', '$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + (u('s'), '$1$ssssssss$YgmLTApYTv12qgTwBoj8i/'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$1$d6/Ky1lU$/xpf8m7ftmWLF.TjHCqel0'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash \/ + '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o!', + + # too many fields + '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.$', + ] + + platform_crypt_support = [ + # openbsd 5.8 dropped everything except bcrypt + ("openbsd[6789]", False), + ("openbsd5", None), + ("openbsd", True), + + ("freebsd|netbsd|linux|solaris", True), + ("darwin", False), + ] + +# create test cases for specific backends +md5_crypt_os_crypt_test = _md5_crypt_test.create_backend_case("os_crypt") +md5_crypt_builtin_test = _md5_crypt_test.create_backend_case("builtin") + +#============================================================================= +# msdcc 1 & 2 +#============================================================================= +class msdcc_test(UserHandlerMixin, HandlerCase): + handler = hash.msdcc + user_case_insensitive = True + + known_correct_hashes = [ + + # + # http://www.jedge.com/wordpress/windows-password-cache/ + # + (("Asdf999", "sevans"), "b1176c2587478785ec1037e5abc916d0"), + + # + # http://infosecisland.com/blogview/12156-Cachedump-for-Meterpreter-in-Action.html + # + (("ASDqwe123", "jdoe"), "592cdfbc3f1ef77ae95c75f851e37166"), + + # + # http://comments.gmane.org/gmane.comp.security.openwall.john.user/1917 + # + (("test1", "test1"), "64cd29e36a8431a2b111378564a10631"), + (("test2", "test2"), "ab60bdb4493822b175486810ac2abe63"), + (("test3", "test3"), "14dd041848e12fc48c0aa7a416a4a00c"), + (("test4", "test4"), "b945d24866af4b01a6d89b9d932a153c"), + + # + # http://ciscoit.wordpress.com/2011/04/13/metasploit-hashdump-vs-cachedump/ + # + (("1234qwer!@#$", "Administrator"), "7b69d06ef494621e3f47b9802fe7776d"), + + # + # http://www.securiteam.com/tools/5JP0I2KFPA.html + # + (("password", "user"), "2d9f0b052932ad18b87f315641921cda"), + + # + # from JTR 1.7.9 + # + (("", "root"), "176a4c2bd45ac73687676c2f09045353"), + (("test1", "TEST1"), "64cd29e36a8431a2b111378564a10631"), + (("okolada", "nineteen_characters"), "290efa10307e36a79b3eebf2a6b29455"), + ((u("\u00FC"), u("\u00FC")), "48f84e6f73d6d5305f6558a33fa2c9bb"), + ((u("\u00FC\u00FC"), u("\u00FC\u00FC")), "593246a8335cf0261799bda2a2a9c623"), + ((u("\u20AC\u20AC"), "user"), "9121790702dda0fa5d353014c334c2ce"), + + # + # custom + # + + # ensures utf-8 used for unicode + ((UPASS_TABLE, 'bob'), 'fcb82eb4212865c7ac3503156ca3f349'), + ] + + known_alternate_hashes = [ + # check uppercase accepted. + ("B1176C2587478785EC1037E5ABC916D0", ("Asdf999", "sevans"), + "b1176c2587478785ec1037e5abc916d0"), + ] + +class msdcc2_test(UserHandlerMixin, HandlerCase): + handler = hash.msdcc2 + user_case_insensitive = True + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + (("test1", "test1"), "607bbe89611e37446e736f7856515bf8"), + (("qerwt", "Joe"), "e09b38f84ab0be586b730baf61781e30"), + (("12345", "Joe"), "6432f517a900b3fc34ffe57f0f346e16"), + (("", "bin"), "c0cbe0313a861062e29f92ede58f9b36"), + (("w00t", "nineteen_characters"), "87136ae0a18b2dafe4a41d555425b2ed"), + (("w00t", "eighteencharacters"), "fc5df74eca97afd7cd5abb0032496223"), + (("longpassword", "twentyXXX_characters"), "cfc6a1e33eb36c3d4f84e4c2606623d2"), + (("longpassword", "twentyoneX_characters"), "99ff74cea552799da8769d30b2684bee"), + (("longpassword", "twentytwoXX_characters"), "0a721bdc92f27d7fb23b87a445ec562f"), + (("test2", "TEST2"), "c6758e5be7fc943d00b97972a8a97620"), + (("test3", "test3"), "360e51304a2d383ea33467ab0b639cc4"), + (("test4", "test4"), "6f79ee93518306f071c47185998566ae"), + ((u("\u00FC"), "joe"), "bdb80f2c4656a8b8591bd27d39064a54"), + ((u("\u20AC\u20AC"), "joe"), "1e1e20f482ff748038e47d801d0d1bda"), + ((u("\u00FC\u00FC"), "admin"), "0839e4a07c00f18a8c65cf5b985b9e73"), + + # + # custom + # + + # custom unicode test + ((UPASS_TABLE, 'bob'), 'cad511dc9edefcf69201da72efb6bb55'), + ] + +#============================================================================= +# mssql 2000 & 2005 +#============================================================================= +class mssql2000_test(HandlerCase): + handler = hash.mssql2000 + secret_case_insensitive = "verify-only" + # FIXME: fix UT framework - this hash is sensitive to password case, but verify() is not + + known_correct_hashes = [ + # + # http://hkashfi.blogspot.com/2007/08/breaking-sql-server-2005-hashes.html + # + ('Test', '0x010034767D5C0CFA5FDCA28C4A56085E65E882E71CB0ED2503412FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + ('TEST', '0x010034767D5C2FD54D6119FFF04129A1D72E7C3194F7284A7F3A2FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + + # + # http://www.sqlmag.com/forums/aft/68438 + # + ('x', '0x010086489146C46DD7318D2514D1AC706457CBF6CD3DF8407F071DB4BBC213939D484BF7A766E974F03C96524794'), + + # + # http://stackoverflow.com/questions/173329/how-to-decrypt-a-password-from-sql-server + # + ('AAAA', '0x0100CF465B7B12625EF019E157120D58DD46569AC7BF4118455D12625EF019E157120D58DD46569AC7BF4118455D'), + + # + # http://msmvps.com/blogs/gladchenko/archive/2005/04/06/41083.aspx + # + ('123', '0x01002D60BA07FE612C8DE537DF3BFCFA49CD9968324481C1A8A8FE612C8DE537DF3BFCFA49CD9968324481C1A8A8'), + + # + # http://www.simple-talk.com/sql/t-sql-programming/temporarily-changing-an-unknown-password-of-the-sa-account-/ + # + ('12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + + # + # XXX: sample is incomplete, password unknown + # https://anthonystechblog.wordpress.com/2011/04/20/password-encryption-in-sql-server-how-to-tell-if-a-user-is-using-a-weak-password/ + # (????, '0x0100813F782D66EF15E40B1A3FDF7AB88B322F51401A87D8D3E3A8483C4351A3D96FC38499E6CDD2B6F?????????'), + # + + # + # from JTR 1.7.9 + # + ('foo', '0x0100A607BA7C54A24D17B565C59F1743776A10250F581D482DA8B6D6261460D3F53B279CC6913CE747006A2E3254'), + ('bar', '0x01000508513EADDF6DB7DDD270CCA288BF097F2FF69CC2DB74FBB9644D6901764F999BAB9ECB80DE578D92E3F80D'), + ('canard', '0x01008408C523CF06DCB237835D701C165E68F9460580132E28ED8BC558D22CEDF8801F4503468A80F9C52A12C0A3'), + ('lapin', '0x0100BF088517935FC9183FE39FDEC77539FD5CB52BA5F5761881E5B9638641A79DBF0F1501647EC941F3355440A2'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_USD, '0x0100624C0961B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5'), + (UPASS_TABLE, '0x010083104228FAD559BE52477F2131E538BE9734E5C4B0ADEFD7F6D784B03C98585DC634FE2B8CA3A6DFFEC729B4'), + + ] + + known_alternate_hashes = [ + # lower case hex + ('0x01005b20054332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b3', + '12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + + known_unidentified_hashes = [ + # malformed start + '0X01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + + # wrong magic value + '0x02005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + + # wrong size + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3', + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3AF', + + # mssql2005 + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + ] + + known_malformed_hashes = [ + # non-hex char -----\/ + b'0x01005B200543327G2E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + u('0x01005B200543327G2E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + +class mssql2005_test(HandlerCase): + handler = hash.mssql2005 + + known_correct_hashes = [ + # + # http://hkashfi.blogspot.com/2007/08/breaking-sql-server-2005-hashes.html + # + ('TEST', '0x010034767D5C2FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + + # + # http://www.openwall.com/lists/john-users/2009/07/14/2 + # + ('toto', '0x01004086CEB6BF932BC4151A1AF1F13CD17301D70816A8886908'), + + # + # http://msmvps.com/blogs/gladchenko/archive/2005/04/06/41083.aspx + # + ('123', '0x01004A335DCEDB366D99F564D460B1965B146D6184E4E1025195'), + ('123', '0x0100E11D573F359629B344990DCD3D53DE82CF8AD6BBA7B638B6'), + + # + # XXX: password unknown + # http://www.simple-talk.com/sql/t-sql-programming/temporarily-changing-an-unknown-password-of-the-sa-account-/ + # (???, '0x01004086CEB6301EEC0A994E49E30DA235880057410264030797'), + # + + # + # http://therelentlessfrontend.com/2010/03/26/encrypting-and-decrypting-passwords-in-sql-server/ + # + ('AAAA', '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30'), + + # + # from JTR 1.7.9 + # + ("toto", "0x01004086CEB6BF932BC4151A1AF1F13CD17301D70816A8886908"), + ("titi", "0x01004086CEB60ED526885801C23B366965586A43D3DEAC6DD3FD"), + ("foo", "0x0100A607BA7C54A24D17B565C59F1743776A10250F581D482DA8"), + ("bar", "0x01000508513EADDF6DB7DDD270CCA288BF097F2FF69CC2DB74FB"), + ("canard", "0x01008408C523CF06DCB237835D701C165E68F9460580132E28ED"), + ("lapin", "0x0100BF088517935FC9183FE39FDEC77539FD5CB52BA5F5761881"), + + # + # adapted from mssql2000.known_correct_hashes (above) + # + ('Test', '0x010034767D5C0CFA5FDCA28C4A56085E65E882E71CB0ED250341'), + ('Test', '0x0100993BF2315F36CC441485B35C4D84687DC02C78B0E680411F'), + ('x', '0x010086489146C46DD7318D2514D1AC706457CBF6CD3DF8407F07'), + ('AAAA', '0x0100CF465B7B12625EF019E157120D58DD46569AC7BF4118455D'), + ('123', '0x01002D60BA07FE612C8DE537DF3BFCFA49CD9968324481C1A8A8'), + ('12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_USD, '0x0100624C0961B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5'), + (UPASS_TABLE, '0x010083104228FAD559BE52477F2131E538BE9734E5C4B0ADEFD7'), + ] + + known_alternate_hashes = [ + # lower case hex + ('0x01005b20054332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b3', + '12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + + known_unidentified_hashes = [ + # malformed start + '0X010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30', + + # wrong magic value + '0x020036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30', + + # wrong size + '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F', + '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F3012', + + # mssql2000 + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + ] + + known_malformed_hashes = [ + # non-hex char --\/ + '0x010036D726AE86G34E97F20B198ACD219D60B446AC5E48C54F30', + ] + +#============================================================================= +# mysql 323 & 41 +#============================================================================= +class mysql323_test(HandlerCase): + handler = hash.mysql323 + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('drew', '697a7de87c5390b2'), + ('password', "5d2e19393cc5ef67"), + + # + # custom + # + ('mypass', '6f8c114b58f2ce9e'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '4ef327ca5491c8d7'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '6z8c114b58f2ce9e', + ] + + def test_90_whitespace(self): + """check whitespace is ignored per spec""" + h = self.do_encrypt("mypass") + h2 = self.do_encrypt("my pass") + self.assertEqual(h, h2) + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def accept_password_pair(self, secret, other): + # override to handle whitespace + return secret.replace(" ","") != other.replace(" ","") + +class mysql41_test(HandlerCase): + handler = hash.mysql41 + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('verysecretpassword', '*2C905879F74F28F8570989947D06A8429FB943E6'), + ('12345678123456781234567812345678', '*F9F1470004E888963FB466A5452C9CBD9DF6239C'), + ("' OR 1 /*'", '*97CF7A3ACBE0CA58D5391AC8377B5D9AC11D46D9'), + + # + # custom + # + ('mypass', '*6C8989366EAF75BB670AD8EA7A7FC1176A95CEF4'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '*E7AFE21A9CFA2FC9D15D942AE8FB5C240FE5837B'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '*6Z8989366EAF75BB670AD8EA7A7FC1176A95CEF4', + ] + +#============================================================================= +# NTHASH +#============================================================================= +class nthash_test(HandlerCase): + handler = hash.nthash + + known_correct_hashes = [ + # + # http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + # + ("OLDPASSWORD", u("6677b2c394311355b54f25eec5bfacf5")), + ("NEWPASSWORD", u("256781a62031289d3c2c98c14f1efc8c")), + + # + # from JTR 1.7.9 + # + + # ascii + ('', '31d6cfe0d16ae931b73c59d7e0c089c0'), + ('tigger', 'b7e0ea9fbffcf6dd83086e905089effd'), + + # utf-8 + (b'\xC3\xBC', '8bd6e4fb88e01009818749c5443ea712'), + (b'\xC3\xBC\xC3\xBC', 'cc1260adb6985ca749f150c7e0b22063'), + (b'\xE2\x82\xAC', '030926b781938db4365d46adc7cfbcb8'), + (b'\xE2\x82\xAC\xE2\x82\xAC','682467b963bb4e61943e170a04f7db46'), + + # + # custom + # + ('passphrase', '7f8fe03093cc84b267b109625f6bbf4b'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '7f8fe03093cc84b267b109625f6bbfxb', + ] + +class bsd_nthash_test(HandlerCase): + handler = hash.bsd_nthash + + known_correct_hashes = [ + ('passphrase', '$3$$7f8fe03093cc84b267b109625f6bbf4b'), + (b'\xC3\xBC', '$3$$8bd6e4fb88e01009818749c5443ea712'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash --\/ + '$3$$7f8fe03093cc84b267b109625f6bbfxb', + ] + +#============================================================================= +# oracle 10 & 11 +#============================================================================= +class oracle10_test(UserHandlerMixin, HandlerCase): + handler = hash.oracle10 + secret_case_insensitive = True + user_case_insensitive = True + + # TODO: get more test vectors (especially ones which properly test unicode) + known_correct_hashes = [ + # ((secret,user),hash) + + # + # http://www.petefinnigan.com/default/default_password_list.htm + # + (('tiger', 'scott'), 'F894844C34402B67'), + ((u('ttTiGGeR'), u('ScO')), '7AA1A84E31ED7771'), + (("d_syspw", "SYSTEM"), '1B9F1F9A5CB9EB31'), + (("strat_passwd", "strat_user"), 'AEBEDBB4EFB5225B'), + + # + # http://openwall.info/wiki/john/sample-hashes + # + (('#95LWEIGHTS', 'USER'), '000EA4D72A142E29'), + (('CIAO2010', 'ALFREDO'), 'EB026A76F0650F7B'), + + # + # from JTR 1.7.9 + # + (('GLOUGlou', 'Bob'), 'CDC6B483874B875B'), + (('GLOUGLOUTER', 'bOB'), 'EF1F9139DB2D5279'), + (('LONG_MOT_DE_PASSE_OUI', 'BOB'), 'EC8147ABB3373D53'), + + # + # custom + # + ((UPASS_TABLE, 'System'), 'B915A853F297B281'), + ] + + known_unidentified_hashes = [ + # bad char in hash --\ + 'F894844C34402B6Z', + ] + +class oracle11_test(HandlerCase): + handler = hash.oracle11 + # TODO: find more test vectors (especially ones which properly test unicode) + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ("abc123", "S:5FDAB69F543563582BA57894FE1C1361FB8ED57B903603F2C52ED1B4D642"), + ("SyStEm123!@#", "S:450F957ECBE075D2FA009BA822A9E28709FBC3DA82B44D284DDABEC14C42"), + ("oracle", "S:3437FF72BD69E3FB4D10C750B92B8FB90B155E26227B9AB62D94F54E5951"), + ("11g", "S:61CE616647A4F7980AFD7C7245261AF25E0AFE9C9763FCF0D54DA667D4E6"), + ("11g", "S:B9E7556F53500C8C78A58F50F24439D79962DE68117654B6700CE7CC71CF"), + + # + # source? + # + ("SHAlala", "S:2BFCFDF5895014EE9BB2B9BA067B01E0389BB5711B7B5F82B7235E9E182C"), + + # + # custom + # + (UPASS_TABLE, 'S:51586343E429A6DF024B8F242F2E9F8507B1096FACD422E29142AA4974B0'), + ] + +#============================================================================= +# PHPass Portable Crypt +#============================================================================= +class phpass_test(HandlerCase): + handler = hash.phpass + + known_correct_hashes = [ + # + # from official 0.3 implementation + # http://www.openwall.com/phpass/ + # + ('test12345', '$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r.L0'), # from the source + + # + # from JTR 1.7.9 + # + ('test1', '$H$9aaaaaSXBjgypwqm.JsMssPLiS8YQ00'), + ('123456', '$H$9PE8jEklgZhgLmZl5.HYJAzfGCQtzi1'), + ('123456', '$H$9pdx7dbOW3Nnt32sikrjAxYFjX8XoK1'), + ('thisisalongertestPW', '$P$912345678LIjjb6PhecupozNBmDndU0'), + ('JohnRipper', '$P$612345678si5M0DDyPpmRCmcltU/YW/'), + ('JohnRipper', '$H$712345678WhEyvy1YWzT4647jzeOmo0'), + ('JohnRipper', '$P$B12345678L6Lpt4BxNotVIMILOa9u81'), + + # + # custom + # + ('', '$P$7JaFQsPzJSuenezefD/3jHgt5hVfNH0'), + ('compL3X!', '$P$FiS0N5L672xzQx1rt1vgdJQRYKnQM9/'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$P$7SMy8VxnfsIy2Sxm7fJxDSdil.h7TW.'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + # ---\/ + '$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r!L0', + ] + +#============================================================================= +# plaintext +#============================================================================= +class plaintext_test(HandlerCase): + # TODO: integrate EncodingHandlerMixin + handler = hash.plaintext + accepts_all_hashes = True + + known_correct_hashes = [ + ('',''), + ('password', 'password'), + + # ensure unicode uses utf-8 + (UPASS_TABLE, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + (PASS_TABLE_UTF8, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + ] + +#============================================================================= +# postgres_md5 +#============================================================================= +class postgres_md5_test(UserHandlerMixin, HandlerCase): + handler = hash.postgres_md5 + known_correct_hashes = [ + # ((secret,user),hash) + + # + # generated using postgres 8.1 + # + (('mypass', 'postgres'), 'md55fba2ea04fd36069d2574ea71c8efe9d'), + (('mypass', 'root'), 'md540c31989b20437833f697e485811254b'), + (("testpassword",'testuser'), 'md5d4fc5129cc2c25465a5370113ae9835f'), + + # + # custom + # + + # verify unicode->utf8 + ((UPASS_TABLE, 'postgres'), 'md5cb9f11283265811ce076db86d18a22d2'), + ] + known_unidentified_hashes = [ + # bad 'z' char in otherwise correct hash + 'md54zc31989b20437833f697e485811254b', + ] + +#============================================================================= +# (netbsd's) sha1 crypt +#============================================================================= +class _sha1_crypt_test(HandlerCase): + handler = hash.sha1_crypt + + known_correct_hashes = [ + # + # custom + # + ("password", "$sha1$19703$iVdJqfSE$v4qYKl1zqYThwpjJAoKX6UvlHq/a"), + ("password", "$sha1$21773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH"), + (UPASS_TABLE, '$sha1$40000$uJ3Sp7LE$.VEmLO5xntyRFYihC7ggd3297T/D'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '$sha1$21773$u!7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH', + + # zero padded rounds + '$sha1$01773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH', + + # too many fields + '$sha1$21773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH$', + + # empty rounds field + '$sha1$$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH$', + ] + + platform_crypt_support = [ + ("netbsd", True), + ("freebsd|openbsd|solaris|darwin", False), + ("linux", None), # may be present if libxcrypt is in use + ] + +# create test cases for specific backends +sha1_crypt_os_crypt_test = _sha1_crypt_test.create_backend_case("os_crypt") +sha1_crypt_builtin_test = _sha1_crypt_test.create_backend_case("builtin") + +#============================================================================= +# roundup +#============================================================================= + +# NOTE: all roundup hashes use PrefixWrapper, +# so there's nothing natively to test. +# so we just have a few quick cases... + +class RoundupTest(TestCase): + + def _test_pair(self, h, secret, hash): + self.assertTrue(h.verify(secret, hash)) + self.assertFalse(h.verify('x'+secret, hash)) + + def test_pairs(self): + self._test_pair( + hash.ldap_hex_sha1, + "sekrit", + '{SHA}8d42e738c7adee551324955458b5e2c0b49ee655') + + self._test_pair( + hash.ldap_hex_md5, + "sekrit", + '{MD5}ccbc53f4464604e714f69dd11138d8b5') + + self._test_pair( + hash.ldap_des_crypt, + "sekrit", + '{CRYPT}nFia0rj2TT59A') + + self._test_pair( + hash.roundup_plaintext, + "sekrit", + '{plaintext}sekrit') + + self._test_pair( + hash.ldap_pbkdf2_sha1, + "sekrit", + '{PBKDF2}5000$7BvbBq.EZzz/O0HuwX3iP.nAG3s$g3oPnFFaga2BJaX5PoPRljl4XIE') + +#============================================================================= +# sha256-crypt +#============================================================================= +class _sha256_crypt_test(HandlerCase): + handler = hash.sha256_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$5$LKO/Ute40T3FNF95$U0prpBQd4PloSGU0pnpM4z9wKn4vZ1.jsrzQfPqxph9'), + ('U*U***U', '$5$LKO/Ute40T3FNF95$fdgfoJEBoMajNxCv3Ru9LyQ0xZgv0OBMQoq80LQ/Qd.'), + ('U*U***U*', '$5$LKO/Ute40T3FNF95$8Ry82xGnnPI/6HtFYnvPBTYgOL23sdMXn8C29aO.x/A'), + ('*U*U*U*U', '$5$9mx1HkCz7G1xho50$O7V7YgleJKLUhcfk9pgzdh3RapEaWqMtEp9UUBAKIPA'), + ('', '$5$kc7lRD1fpYg0g.IP$d7CMTcEqJyTXyeq8hTdu/jB/I6DGkoo62NXbHIR7S43'), + + # + # custom tests + # + ('', '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3'), + (' ', '$5$rounds=10376$I5lNtXtRmf.OoMd8$Ko3AI1VvTANdyKhBPavaRjJzNpSatKU6QVN9uwS9MH.'), + ('test', '$5$rounds=11858$WH1ABM5sKhxbkgCK$aTQsjPkz0rBsH3lQlJxw9HDTDXPKBxC0LlVeV69P.t1'), + ('Compl3X AlphaNu3meric', '$5$rounds=10350$o.pwkySLCzwTdmQX$nCMVsnF3TXWcBPOympBUUSQi6LGGloZoOsVJMGJ09UB'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$5$rounds=11944$9dhlu07dQMRWvTId$LyUI5VWkGFwASlzntk1RLurxX54LUhgAcJZIt0pYGT7'), + (u('with unic\u00D6de'), '$5$rounds=1000$IbG0EuGQXw5EkMdP$LQ5AfPf13KufFsKtmazqnzSGZ4pxtUNw3woQ.ELRDF4'), + ] + + if TEST_MODE("full"): + # builtin alg was changed in 1.6, and had possibility of fencepost + # errors near rounds that are multiples of 42. these hashes test rounds + # 1004..1012 (42*24=1008 +/- 4) to ensure no mistakes were made. + # (also relying on fuzz testing against os_crypt backend). + known_correct_hashes.extend([ + ("secret", '$5$rounds=1004$nacl$oiWPbm.kQ7.jTCZoOtdv7/tO5mWv/vxw5yTqlBagVR7'), + ("secret", '$5$rounds=1005$nacl$6Mo/TmGDrXxg.bMK9isRzyWH3a..6HnSVVsJMEX7ud/'), + ("secret", '$5$rounds=1006$nacl$I46VwuAiUBwmVkfPFakCtjVxYYaOJscsuIeuZLbfKID'), + ("secret", '$5$rounds=1007$nacl$9fY4j1AV3N/dV/YMUn1enRHKH.7nEL4xf1wWB6wfDD4'), + ("secret", '$5$rounds=1008$nacl$CiFWCfn8ODmWs0I1xAdXFo09tM8jr075CyP64bu3by9'), + ("secret", '$5$rounds=1009$nacl$QtpFX.CJHgVQ9oAjVYStxAeiU38OmFILWm684c6FyED'), + ("secret", '$5$rounds=1010$nacl$ktAwXuT5WbjBW/0ZU1eNMpqIWY1Sm4twfRE1zbZyo.B'), + ("secret", '$5$rounds=1011$nacl$QJWLBEhO9qQHyMx4IJojSN9sS41P1Yuz9REddxdO721'), + ("secret", '$5$rounds=1012$nacl$mmf/k2PkbBF4VCtERgky3bEVavmLZKFwAcvxD1p3kV2'), + ]) + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '$5$rounds=10428$uy/:jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMeZGsGx2aBvxTvDFI613c3', + + # zero-padded rounds + '$5$rounds=010428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3', + + # extra "$" + '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3$', + ] + + known_correct_configs = [ + # config, secret, result + + # + # taken from official specification at http://www.akkadia.org/drepper/SHA-crypt.txt + # + ( "$5$saltstring", "Hello world!", + "$5$saltstring$5B8vYYiY.CVt1RlTTf8KbXBH3hsxY/GNooZaBBGWEc5" ), + ( "$5$rounds=10000$saltstringsaltstring", "Hello world!", + "$5$rounds=10000$saltstringsaltst$3xv.VbSHBb41AL9AvLeujZkZRBAwqFMz2." + "opqey6IcA" ), + ( "$5$rounds=5000$toolongsaltstring", "This is just a test", + "$5$rounds=5000$toolongsaltstrin$Un/5jzAHMgOGZ5.mWJpuVolil07guHPvOW8" + "mGRcvxa5" ), + ( "$5$rounds=1400$anotherlongsaltstring", + "a very much longer text to encrypt. This one even stretches over more" + "than one line.", + "$5$rounds=1400$anotherlongsalts$Rx.j8H.h8HjEDGomFU8bDkXm3XIUnzyxf12" + "oP84Bnq1" ), + ( "$5$rounds=77777$short", + "we have a short salt string but not a short password", + "$5$rounds=77777$short$JiO1O3ZpDAxGJeaDIuqCoEFysAe1mZNJRs3pw0KQRd/" ), + ( "$5$rounds=123456$asaltof16chars..", "a short string", + "$5$rounds=123456$asaltof16chars..$gP3VQ/6X7UUEW3HkBn2w1/Ptq2jxPyzV/" + "cZKmF/wJvD" ), + ( "$5$rounds=10$roundstoolow", "the minimum number is still observed", + "$5$rounds=1000$roundstoolow$yfvwcWrQ8l/K0DAWyuPMDNHpIVlTQebY9l/gL97" + "2bIC" ), + ] + + filter_config_warnings = True # rounds too low, salt too small + + platform_crypt_support = [ + ("freebsd(9|1\d)|linux", True), + ("freebsd8", None), # added in freebsd 8.3 + ("freebsd|openbsd|netbsd|darwin", False), + ("solaris", None), # depends on policy + ] + +# create test cases for specific backends +sha256_crypt_os_crypt_test = _sha256_crypt_test.create_backend_case("os_crypt") +sha256_crypt_builtin_test = _sha256_crypt_test.create_backend_case("builtin") + +#============================================================================= +# test sha512-crypt +#============================================================================= +class _sha512_crypt_test(HandlerCase): + handler = hash.sha512_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', "$6$LKO/Ute40T3FNF95$6S/6T2YuOIHY0N3XpLKABJ3soYcXD9mB7uVbtEZDj/LNscVhZoZ9DEH.sBciDrMsHOWOoASbNLTypH/5X26gN0"), + ('U*U***U', "$6$LKO/Ute40T3FNF95$wK80cNqkiAUzFuVGxW6eFe8J.fSVI65MD5yEm8EjYMaJuDrhwe5XXpHDJpwF/kY.afsUs1LlgQAaOapVNbggZ1"), + ('U*U***U*', "$6$LKO/Ute40T3FNF95$YS81pp1uhOHTgKLhSMtQCr2cDiUiN03Ud3gyD4ameviK1Zqz.w3oXsMgO6LrqmIEcG3hiqaUqHi/WEE2zrZqa/"), + ('*U*U*U*U', "$6$OmBOuxFYBZCYAadG$WCckkSZok9xhp4U1shIZEV7CCVwQUwMVea7L3A77th6SaE9jOPupEMJB.z0vIWCDiN9WLh2m9Oszrj5G.gt330"), + ('', "$6$ojWH1AiTee9x1peC$QVEnTvRVlPRhcLQCk/HnHaZmlGAAjCfrAN0FtOsOnUk5K5Bn/9eLHHiRzrTzaIKjW9NTLNIBUCtNVOowWS2mN."), + + # + # custom tests + # + ('', '$6$rounds=11021$KsvQipYPWpr93wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1'), + (' ', '$6$rounds=11104$ED9SA4qGmd57Fq2m$q/.PqACDM/JpAHKmr86nkPzzuR5.YpYa8ZJJvI8Zd89ZPUYTJExsFEIuTYbM7gAGcQtTkCEhBKmp1S1QZwaXx0'), + ('test', '$6$rounds=11531$G/gkPn17kHYo0gTF$Kq.uZBHlSBXyzsOJXtxJruOOH4yc0Is13uY7yK0PvAvXxbvc1w8DO1RzREMhKsc82K/Jh8OquV8FZUlreYPJk1'), + ('Compl3X AlphaNu3meric', '$6$rounds=10787$wakX8nGKEzgJ4Scy$X78uqaX1wYXcSCtS4BVYw2trWkvpa8p7lkAtS9O/6045fK4UB2/Jia0Uy/KzCpODlfVxVNZzCCoV9s2hoLfDs/'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$6$rounds=11065$5KXQoE1bztkY5IZr$Jf6krQSUKKOlKca4hSW07MSerFFzVIZt/N3rOTsUgKqp7cUdHrwV8MoIVNCk9q9WL3ZRMsdbwNXpVk0gVxKtz1'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$6$rounds=40000$PEZTJDiyzV28M3.m$GTlnzfzGB44DGd1XqlmC4erAJKCP.rhvLvrYxiT38htrNzVGBnplFOHjejUGVrCfusGWxLQCc3pFO0A/1jYYr0'), + ] + + known_malformed_hashes = [ + # zero-padded rounds + '$6$rounds=011021$KsvQipYPWpr93wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1', + # bad char in otherwise correct hash + '$6$rounds=11021$KsvQipYPWpr9:wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1', + ] + + known_correct_configs = [ + # config, secret, result + + # + # taken from official specification at http://www.akkadia.org/drepper/SHA-crypt.txt + # + ("$6$saltstring", "Hello world!", + "$6$saltstring$svn8UoSVapNtMuq1ukKS4tPQd8iKwSMHWjl/O817G3uBnIFNjnQJu" + "esI68u4OTLiBFdcbYEdFCoEOfaS35inz1" ), + + ( "$6$rounds=10000$saltstringsaltstring", "Hello world!", + "$6$rounds=10000$saltstringsaltst$OW1/O6BYHV6BcXZu8QVeXbDWra3Oeqh0sb" + "HbbMCVNSnCM/UrjmM0Dp8vOuZeHBy/YTBmSK6H9qs/y3RnOaw5v." ), + + ( "$6$rounds=5000$toolongsaltstring", "This is just a test", + "$6$rounds=5000$toolongsaltstrin$lQ8jolhgVRVhY4b5pZKaysCLi0QBxGoNeKQ" + "zQ3glMhwllF7oGDZxUhx1yxdYcz/e1JSbq3y6JMxxl8audkUEm0" ), + + ( "$6$rounds=1400$anotherlongsaltstring", + "a very much longer text to encrypt. This one even stretches over more" + "than one line.", + "$6$rounds=1400$anotherlongsalts$POfYwTEok97VWcjxIiSOjiykti.o/pQs.wP" + "vMxQ6Fm7I6IoYN3CmLs66x9t0oSwbtEW7o7UmJEiDwGqd8p4ur1" ), + + ( "$6$rounds=77777$short", + "we have a short salt string but not a short password", + "$6$rounds=77777$short$WuQyW2YR.hBNpjjRhpYD/ifIw05xdfeEyQoMxIXbkvr0g" + "ge1a1x3yRULJ5CCaUeOxFmtlcGZelFl5CxtgfiAc0" ), + + ( "$6$rounds=123456$asaltof16chars..", "a short string", + "$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc" + "elCjmw2kSYu.Ec6ycULevoBK25fs2xXgMNrCzIMVcgEJAstJeonj1" ), + + ( "$6$rounds=10$roundstoolow", "the minimum number is still observed", + "$6$rounds=1000$roundstoolow$kUMsbe306n21p9R.FRkW3IGn.S9NPN0x50YhH1x" + "hLsPuWGsUSklZt58jaTfF4ZEQpyUNGc0dqbpBYYBaHHrsX." ), + ] + + filter_config_warnings = True # rounds too low, salt too small + + platform_crypt_support = _sha256_crypt_test.platform_crypt_support + +# create test cases for specific backends +sha512_crypt_os_crypt_test = _sha512_crypt_test.create_backend_case("os_crypt") +sha512_crypt_builtin_test = _sha512_crypt_test.create_backend_case("builtin") + +#============================================================================= +# sun md5 crypt +#============================================================================= +class sun_md5_crypt_test(HandlerCase): + handler = hash.sun_md5_crypt + + # TODO: this scheme needs some real test vectors, especially due to + # the "bare salt" issue which plagued the official parser. + known_correct_hashes = [ + # + # http://forums.halcyoninc.com/showthread.php?t=258 + # + ("Gpcs3_adm", "$md5$zrdhpMlZ$$wBvMOEqbSjU.hu5T2VEP01"), + + # + # http://www.c0t0d0s0.org/archives/4453-Less-known-Solaris-features-On-passwords-Part-2-Using-stronger-password-hashing.html + # + ("aa12345678", "$md5$vyy8.OVF$$FY4TWzuauRl4.VQNobqMY."), + + # + # http://www.cuddletech.com/blog/pivot/entry.php?id=778 + # + ("this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + + # + # http://compgroups.net/comp.unix.solaris/password-file-in-linux-and-solaris-8-9 + # + ("passwd", "$md5$RPgLF6IJ$WTvAlUJ7MqH5xak2FMEwS/"), + + # + # source: http://solaris-training.com/301_HTML/docs/deepdiv.pdf page 27 + # FIXME: password unknown + # "$md5,rounds=8000$kS9FT1JC$$mnUrRO618lLah5iazwJ9m1" + + # + # source: http://www.visualexams.com/310-303.htm + # XXX: this has 9 salt chars unlike all other hashes. is that valid? + # FIXME: password unknown + # "$md5,rounds=2006$2amXesSj5$$kCF48vfPsHDjlKNXeEw7V." + # + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$md5,rounds=5000$10VYDzAA$$1arAVtMA3trgE1qJ2V0Ez1'), + ] + + known_correct_configs = [ + # (config, secret, hash) + + #--------------------------- + # test salt string handling + # + # these tests attempt to verify that passlib is handling + # the "bare salt" issue (see sun md5 crypt docs) + # in a sane manner + #--------------------------- + + # config with "$" suffix, hash strings with "$$" suffix, + # should all be treated the same, with one "$" added to salt digest. + ("$md5$3UqYqndY$", + "this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + ("$md5$3UqYqndY$$.................DUMMY", + "this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + + # config with no suffix, hash strings with "$" suffix, + # should all be treated the same, and no suffix added to salt digest. + # NOTE: this is just a guess re: config w/ no suffix, + # but otherwise there's no sane way to encode bare_salt=False + # within config string. + ("$md5$3UqYqndY", + "this", "$md5$3UqYqndY$HIZVnfJNGCPbDZ9nIRSgP1"), + ("$md5$3UqYqndY$.................DUMMY", + "this", "$md5$3UqYqndY$HIZVnfJNGCPbDZ9nIRSgP1"), + ] + + known_malformed_hashes = [ + # unexpected end of hash + "$md5,rounds=5000", + + # bad rounds + "$md5,rounds=500A$xxxx", + "$md5,rounds=0500$xxxx", + "$md5,rounds=0$xxxx", + + # bad char in otherwise correct hash + "$md5$RPgL!6IJ$WTvAlUJ7MqH5xak2FMEwS/", + + # digest too short + "$md5$RPgLa6IJ$WTvAlUJ7MqH5xak2FMEwS", + + # digest too long + "$md5$RPgLa6IJ$WTvAlUJ7MqH5xak2FMEwS/.", + + # 2+ "$" at end of salt in config + # NOTE: not sure what correct behavior is, so forbidding format for now. + "$md5$3UqYqndY$$", + + # 3+ "$" at end of salt in hash + # NOTE: not sure what correct behavior is, so forbidding format for now. + "$md5$RPgLa6IJ$$$WTvAlUJ7MqH5xak2FMEwS/", + + ] + + platform_crypt_support = [ + ("solaris", True), + ("freebsd|openbsd|netbsd|linux|darwin", False), + ] + def do_verify(self, secret, hash): + # Override to fake error for "$..." hash string listed in known_correct_configs (above) + # These have to be hash strings, in order to test bare salt issue. + if isinstance(hash, str) and hash.endswith("$.................DUMMY"): + raise ValueError("pretending '$...' stub hash is config string") + return self.handler.verify(secret, hash) + +#============================================================================= +# unix disabled / fallback +#============================================================================= +class unix_disabled_test(HandlerCase): + handler = hash.unix_disabled +# accepts_all_hashes = True # TODO: turn this off. + + known_correct_hashes = [ + # everything should hash to "!" (or "*" on BSD), + # and nothing should verify against either string + ("password", "!"), + (UPASS_TABLE, "*"), + ] + + known_unidentified_hashes = [ + # should never identify anything crypt() could return... + "$1$xxx", + "abc", + "./az", + "{SHA}xxx", + ] + + def test_76_hash_border(self): + # so empty strings pass + self.accepts_all_hashes = True + super(unix_disabled_test, self).test_76_hash_border() + + def test_90_special(self): + """test marker option & special behavior""" + warnings.filterwarnings("ignore", "passing settings to .*.hash\(\) is deprecated") + handler = self.handler + + # preserve hash if provided + self.assertEqual(handler.genhash("stub", "!asd"), "!asd") + + # use marker if no hash + self.assertEqual(handler.genhash("stub", ""), handler.default_marker) + self.assertEqual(handler.hash("stub"), handler.default_marker) + self.assertEqual(handler.using().default_marker, handler.default_marker) + + # custom marker + self.assertEqual(handler.genhash("stub", "", marker="*xxx"), "*xxx") + self.assertEqual(handler.hash("stub", marker="*xxx"), "*xxx") + self.assertEqual(handler.using(marker="*xxx").hash("stub"), "*xxx") + + # reject invalid marker + self.assertRaises(ValueError, handler.genhash, 'stub', "", marker='abc') + self.assertRaises(ValueError, handler.hash, 'stub', marker='abc') + self.assertRaises(ValueError, handler.using, marker='abc') + +class unix_fallback_test(HandlerCase): + handler = hash.unix_fallback + accepts_all_hashes = True + + known_correct_hashes = [ + # *everything* should hash to "!", and nothing should verify + ("password", "!"), + (UPASS_TABLE, "!"), + ] + + # silence annoying deprecation warning + def setUp(self): + super(unix_fallback_test, self).setUp() + warnings.filterwarnings("ignore", "'unix_fallback' is deprecated") + + def test_90_wildcard(self): + """test enable_wildcard flag""" + h = self.handler + self.assertTrue(h.verify('password','', enable_wildcard=True)) + self.assertFalse(h.verify('password','')) + for c in "!*x": + self.assertFalse(h.verify('password',c, enable_wildcard=True)) + self.assertFalse(h.verify('password',c)) + + def test_91_preserves_existing(self): + """test preserves existing disabled hash""" + handler = self.handler + + # use marker if no hash + self.assertEqual(handler.genhash("stub", ""), "!") + self.assertEqual(handler.hash("stub"), "!") + + # use hash if provided and valid + self.assertEqual(handler.genhash("stub", "!asd"), "!asd") + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_argon2.py b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_argon2.py new file mode 100644 index 0000000..e771769 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_argon2.py @@ -0,0 +1,507 @@ +"""passlib.tests.test_handlers_argon2 - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +# core +import logging +log = logging.getLogger(__name__) +import re +import warnings +# site +# pkg +from passlib import hash +from passlib.utils.compat import unicode +from passlib.tests.utils import HandlerCase, TEST_MODE +from passlib.tests.test_handlers import UPASS_TABLE, PASS_TABLE_UTF8 +# module + +#============================================================================= +# a bunch of tests lifted nearlky verbatim from official argon2 UTs... +# https://github.com/P-H-C/phc-winner-argon2/blob/master/src/test.c +#============================================================================= +def hashtest(version, t, logM, p, secret, salt, hex_digest, hash): + return dict(version=version, rounds=t, logM=logM, memory_cost=1< max uint32 + "$argon2i$v=19$m=65536,t=8589934592,p=4$c29tZXNhbHQAAAAAAAAAAA$QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY", + + # unexpected param + "$argon2i$v=19$m=65536,t=2,p=4,q=5$c29tZXNhbHQAAAAAAAAAAA$QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY", + + # wrong param order + "$argon2i$v=19$t=2,m=65536,p=4,q=5$c29tZXNhbHQAAAAAAAAAAA$QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY", + + # constraint violation: m < 8 * p + "$argon2i$v=19$m=127,t=2,p=16$c29tZXNhbHQ$IMit9qkFULCMA/ViizL57cnTLOa5DiVM9eMwpAvPwr4", + ] + + known_parsehash_results = [ + ('$argon2i$v=19$m=256,t=2,p=3$c29tZXNhbHQ$AJFIsNZTMKTAewB4+ETN1A', + dict(type="i", memory_cost=256, rounds=2, parallelism=3, salt=b'somesalt', + checksum=b'\x00\x91H\xb0\xd6S0\xa4\xc0{\x00x\xf8D\xcd\xd4')), + ] + + def setUpWarnings(self): + super(_base_argon2_test, self).setUpWarnings() + warnings.filterwarnings("ignore", ".*Using argon2pure backend.*") + + def do_stub_encrypt(self, handler=None, **settings): + if self.backend == "argon2_cffi": + # overriding default since no way to get stub config from argon2._calc_hash() + # (otherwise test_21b_max_rounds blocks trying to do max rounds) + handler = (handler or self.handler).using(**settings) + self = handler(use_defaults=True) + self.checksum = self._stub_checksum + assert self.checksum + return self.to_string() + else: + return super(_base_argon2_test, self).do_stub_encrypt(handler, **settings) + + def test_03_legacy_hash_workflow(self): + # override base method + raise self.skipTest("legacy 1.6 workflow not supported") + + def test_keyid_parameter(self): + # NOTE: keyid parameter currently not supported by official argon2 hash parser, + # even though it's mentioned in the format spec. + # we're trying to be consistent w/ this, so hashes w/ keyid should + # always through a NotImplementedError. + self.assertRaises(NotImplementedError, self.handler.verify, 'password', + "$argon2i$v=19$m=65536,t=2,p=4,keyid=ABCD$c29tZXNhbHQ$" + "IMit9qkFULCMA/ViizL57cnTLOa5DiVM9eMwpAvPwr4") + + def test_data_parameter(self): + # NOTE: argon2 c library doesn't support passing in a data parameter to argon2_hash(); + # but argon2_verify() appears to parse that info... but then discards it (!?). + # not sure what proper behavior is, filed issue -- https://github.com/P-H-C/phc-winner-argon2/issues/143 + # For now, replicating behavior we have for the two backends, to detect when things change. + handler = self.handler + + # ref hash of 'password' when 'data' is correctly passed into argon2() + sample1 = '$argon2i$v=19$m=512,t=2,p=2,data=c29tZWRhdGE$c29tZXNhbHQ$KgHyCesFyyjkVkihZ5VNFw' + + # ref hash of 'password' when 'data' is silently discarded (same digest as w/o data) + sample2 = '$argon2i$v=19$m=512,t=2,p=2,data=c29tZWRhdGE$c29tZXNhbHQ$uEeXt1dxN1iFKGhklseW4w' + + # hash of 'password' w/o the data field + sample3 = '$argon2i$v=19$m=512,t=2,p=2$c29tZXNhbHQ$uEeXt1dxN1iFKGhklseW4w' + + # + # test sample 1 + # + + if self.backend == "argon2_cffi": + # argon2_cffi v16.1 would incorrectly return False here. + # but v16.2 patches so it throws error on data parameter. + # our code should detect that, and adapt it into a NotImplementedError + self.assertRaises(NotImplementedError, handler.verify, "password", sample1) + + # incorrectly returns sample3, dropping data parameter + self.assertEqual(handler.genhash("password", sample1), sample3) + + else: + assert self.backend == "argon2pure" + # should parse and verify + self.assertTrue(handler.verify("password", sample1)) + + # should preserve sample1 + self.assertEqual(handler.genhash("password", sample1), sample1) + + # + # test sample 2 + # + + if self.backend == "argon2_cffi": + # argon2_cffi v16.1 would incorrectly return True here. + # but v16.2 patches so it throws error on data parameter. + # our code should detect that, and adapt it into a NotImplementedError + self.assertRaises(NotImplementedError, handler.verify,"password", sample2) + + # incorrectly returns sample3, dropping data parameter + self.assertEqual(handler.genhash("password", sample1), sample3) + + else: + assert self.backend == "argon2pure" + # should parse, but fail to verify + self.assertFalse(self.handler.verify("password", sample2)) + + # should return sample1 (corrected digest) + self.assertEqual(handler.genhash("password", sample2), sample1) + + def test_keyid_and_data_parameters(self): + # test combination of the two, just in case + self.assertRaises(NotImplementedError, self.handler.verify, 'stub', + "$argon2i$v=19$m=65536,t=2,p=4,keyid=ABCD,data=EFGH$c29tZXNhbHQ$" + "IMit9qkFULCMA/ViizL57cnTLOa5DiVM9eMwpAvPwr4") + + def test_type_kwd(self): + cls = self.handler + + # XXX: this mirrors test_30_HasManyIdents(); + # maybe switch argon2 class to use that mixin instead of "type" kwd? + + # check settings + self.assertTrue("type" in cls.setting_kwds) + + # check supported type_values + for value in cls.type_values: + self.assertIsInstance(value, unicode) + self.assertTrue("i" in cls.type_values) + self.assertTrue("d" in cls.type_values) + + # check default + self.assertTrue(cls.type in cls.type_values) + + # check constructor validates ident correctly. + handler = cls + hash = self.get_sample_hash()[1] + kwds = handler.parsehash(hash) + del kwds['type'] + + # ... accepts good type + handler(type=cls.type, **kwds) + + # XXX: this is policy "ident" uses, maybe switch to it? + # # ... requires type w/o defaults + # self.assertRaises(TypeError, handler, **kwds) + handler(**kwds) + + # ... supplies default type + handler(use_defaults=True, **kwds) + + # ... rejects bad type + self.assertRaises(ValueError, handler, type='xXx', **kwds) + + def test_type_using(self): + handler = self.handler + + # XXX: this mirrors test_has_many_idents_using(); + # maybe switch argon2 class to use that mixin instead of "type" kwd? + + orig_type = handler.type + for alt_type in handler.type_values: + if alt_type != orig_type: + break + else: + raise AssertionError("expected to find alternate type: default=%r values=%r" % + (orig_type, handler.type_values)) + + def effective_type(cls): + return cls(use_defaults=True).type + + # keep default if nothing else specified + subcls = handler.using() + self.assertEqual(subcls.type, orig_type) + + # accepts alt type + subcls = handler.using(type=alt_type) + self.assertEqual(subcls.type, alt_type) + self.assertEqual(handler.type, orig_type) + + # check subcls actually *generates* default type, + # and that we didn't affect orig handler + self.assertEqual(effective_type(subcls), alt_type) + self.assertEqual(effective_type(handler), orig_type) + + # rejects bad type + self.assertRaises(ValueError, handler.using, type='xXx') + + # honor 'type' alias + subcls = handler.using(type=alt_type) + self.assertEqual(subcls.type, alt_type) + self.assertEqual(handler.type, orig_type) + + # check type aliases are being honored + self.assertEqual(effective_type(handler.using(type="I")), "i") + + def test_needs_update_w_type(self): + handler = self.handler + + hash = handler.hash("stub") + self.assertFalse(handler.needs_update(hash)) + + hash2 = re.sub(r"\$argon2\w+\$", "$argon2d$", hash) + self.assertTrue(handler.needs_update(hash2)) + + def test_needs_update_w_version(self): + handler = self.handler.using(memory_cost=65536, time_cost=2, parallelism=4, + digest_size=32) + hash = ("$argon2i$m=65536,t=2,p=4$c29tZXNhbHQAAAAAAAAAAA$" + "QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY") + if handler.max_version == 0x10: + self.assertFalse(handler.needs_update(hash)) + else: + self.assertTrue(handler.needs_update(hash)) + + def test_argon_byte_encoding(self): + """verify we're using right base64 encoding for argon2""" + handler = self.handler + if handler.version != 0x13: + # TODO: make this fatal, and add refs for other version. + raise self.skipTest("handler uses wrong version for sample hashes") + + # 8 byte salt + salt = b'somesalt' + temp = handler.using(memory_cost=256, time_cost=2, parallelism=2, salt=salt, + checksum_size=32, type="i") + hash = temp.hash("password") + self.assertEqual(hash, "$argon2i$v=19$m=256,t=2,p=2" + "$c29tZXNhbHQ" + "$T/XOJ2mh1/TIpJHfCdQan76Q5esCFVoT5MAeIM1Oq2E") + + # 16 byte salt + salt = b'somesalt\x00\x00\x00\x00\x00\x00\x00\x00' + temp = handler.using(memory_cost=256, time_cost=2, parallelism=2, salt=salt, + checksum_size=32, type="i") + hash = temp.hash("password") + self.assertEqual(hash, "$argon2i$v=19$m=256,t=2,p=2" + "$c29tZXNhbHQAAAAAAAAAAA" + "$rqnbEp1/jFDUEKZZmw+z14amDsFqMDC53dIe57ZHD38") + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + settings_map = HandlerCase.FuzzHashGenerator.settings_map.copy() + settings_map.update(memory_cost="random_memory_cost", type="random_type") + + def random_type(self): + return self.rng.choice(self.handler.type_values) + + def random_memory_cost(self): + if self.test.backend == "argon2pure": + return self.randintgauss(128, 384, 256, 128) + else: + return self.randintgauss(128, 32767, 16384, 4096) + + # TODO: fuzz parallelism, digest_size + +#----------------------------------------- +# test suites for specific backends +#----------------------------------------- + +class argon2_argon2_cffi_test(_base_argon2_test.create_backend_case("argon2_cffi")): + + # add some more test vectors that take too long under argon2pure + known_correct_hashes = _base_argon2_test.known_correct_hashes + [ + # + # sample hashes from argon2 cffi package's unittests, + # which in turn were generated by official argon2 cmdline tool. + # + + # v1.2, type I, w/o a version tag + ('password', "$argon2i$m=65536,t=2,p=4$c29tZXNhbHQAAAAAAAAAAA$" + "QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY"), + + # v1.3, type I + ('password', "$argon2i$v=19$m=65536,t=2,p=4$c29tZXNhbHQ$" + "IMit9qkFULCMA/ViizL57cnTLOa5DiVM9eMwpAvPwr4"), + + # v1.3, type D + ('password', "$argon2d$v=19$m=65536,t=2,p=4$c29tZXNhbHQ$" + "cZn5d+rFh+ZfuRhm2iGUGgcrW5YLeM6q7L3vBsdmFA0"), + + # v1.3, type ID + ('password', "$argon2id$v=19$m=65536,t=2,p=4$c29tZXNhbHQ$" + "GpZ3sK/oH9p7VIiV56G/64Zo/8GaUw434IimaPqxwCo"), + + # + # custom + # + + # ensure trailing null bytes handled correctly + ('password\x00', "$argon2i$v=19$m=65536,t=2,p=4$c29tZXNhbHQ$" + "Vpzuc0v0SrP88LcVvmg+z5RoOYpMDKH/lt6O+CZabIQ"), + + ] + + # add reference hashes from argon2 clib tests + known_correct_hashes.extend( + (info['secret'], info['hash']) for info in reference_data + if info['logM'] <= (18 if TEST_MODE("full") else 16) + ) + +class argon2_argon2pure_test(_base_argon2_test.create_backend_case("argon2pure")): + + # XXX: setting max_threads at 1 to prevent argon2pure from using multiprocessing, + # which causes big problems when testing under pypy. + # would like a "pure_use_threads" option instead, to make it use multiprocessing.dummy instead. + handler = hash.argon2.using(memory_cost=32, parallelism=2) + + # don't use multiprocessing for unittests, makes it a lot harder to ctrl-c + # XXX: make this controlled by env var? + handler.pure_use_threads = True + + # add reference hashes from argon2 clib tests + known_correct_hashes = _base_argon2_test.known_correct_hashes[:] + + known_correct_hashes.extend( + (info['secret'], info['hash']) for info in reference_data + if info['logM'] < 16 + ) + + class FuzzHashGenerator(_base_argon2_test.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(1, 3, 2, 1) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_bcrypt.py b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_bcrypt.py new file mode 100644 index 0000000..64fc8bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_bcrypt.py @@ -0,0 +1,688 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import os +import warnings +# site +# pkg +from passlib import hash +from passlib.handlers.bcrypt import IDENT_2, IDENT_2X +from passlib.utils import repeat_string, to_bytes, is_safe_crypt_input +from passlib.utils.compat import irange, PY3 +from passlib.tests.utils import HandlerCase, TEST_MODE +from passlib.tests.test_handlers import UPASS_TABLE +# module + +#============================================================================= +# bcrypt +#============================================================================= +class _bcrypt_test(HandlerCase): + """base for BCrypt test cases""" + handler = hash.bcrypt + reduce_default_rounds = True + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$2a$05$c92SVSfjeiCD6F2nAD6y0uBpJDjdRkt0EgeC4/31Rf2LUZbDRDE.O'), + ('U*U***U', '$2a$05$WY62Xk2TXZ7EvVDQ5fmjNu7b0GEzSzUXUh2cllxJwhtOeMtWV3Ujq'), + ('U*U***U*', '$2a$05$Fa0iKV3E2SYVUlMknirWU.CFYGvJ67UwVKI1E2FP6XeLiZGcH3MJi'), + ('*U*U*U*U', '$2a$05$.WRrXibc1zPgIdRXYfv.4uu6TD1KWf0VnHzq/0imhUhuxSxCyeBs2'), + ('', '$2a$05$Otz9agnajgrAe0.kFVF9V.tzaStZ2s1s4ZWi/LY4sw2k/MTVFj/IO'), + + # + # test vectors from http://www.openwall.com/crypt v1.2 + # note that this omits any hashes that depend on crypt_blowfish's + # various CVE-2011-2483 workarounds (hash 2a and \xff\xff in password, + # and any 2x hashes); and only contain hashes which are correct + # under both crypt_blowfish 1.2 AND OpenBSD. + # + ('U*U', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.E5YPO9kmyuRGyh0XouQYb4YMJKvyOeW'), + ('U*U*', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.VGOzA784oUp/Z0DY336zx7pLYAy0lwK'), + ('U*U*U', '$2a$05$XXXXXXXXXXXXXXXXXXXXXOAcXxm9kjPGEMsLznoKqmqw7tc8WCx4a'), + ('', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.7uG0VCzI2bS7j6ymqJi9CdcdxiRTWNy'), + ('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789chars after 72 are ignored', + '$2a$05$abcdefghijklmnopqrstuu5s2v8.iXieOjg/.AySBTTZIIVFJeBui'), + (b'\xa3', + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq'), + (b'\xff\xa3345', + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.nRht2l/HRhr6zmCp9vYUvvsqynflf9e'), + (b'\xa3ab', + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.6IflQkJytoRVc1yuaNtHfiuq.FRlSIS'), + (b'\xaa'*72 + b'chars after 72 are ignored as usual', + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.swQOIzjOiJ9GHEPuhEkvqrUyvWhEMx6'), + (b'\xaa\x55'*36, + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.R9xrDjiycxMbQE2bp.vgqlYpW5wx2yy'), + (b'\x55\xaa\xff'*24, + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.9tQZzcJfm3uj2NvJ/n5xkhpqLrMpWCe'), + + # keeping one of their 2y tests, because we are supporting that. + (b'\xa3', + '$2y$05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq'), + + # + # 8bit bug (fixed in 2y/2b) + # + + # NOTE: see assert_lacks_8bit_bug() for origins of this test vector. + (b"\xd1\x91", "$2y$05$6bNw2HLQYeqHYyBfLMsv/OUcZd0LKP39b87nBw3.S2tVZSqiQX6eu"), + + # + # bsd wraparound bug (fixed in 2b) + # + + # NOTE: if backend is vulnerable, password will hash the same as '0'*72 + # ("$2a$04$R1lJ2gkNaoPGdafE.H.16.nVyh2niHsGJhayOHLMiXlI45o8/DU.6"), + # rather than same as ("0123456789"*8)[:72] + # 255 should be sufficient, but checking + (("0123456789"*26)[:254], '$2a$04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi'), + (("0123456789"*26)[:255], '$2a$04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi'), + (("0123456789"*26)[:256], '$2a$04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi'), + (("0123456789"*26)[:257], '$2a$04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi'), + + + # + # from py-bcrypt tests + # + ('', '$2a$06$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.'), + ('a', '$2a$10$k87L/MF28Q673VKh8/cPi.SUl7MU/rWuSiIDDFayrKk/1tBsSQu4u'), + ('abc', '$2a$10$WvvTPHKwdBJ3uk0Z37EMR.hLA2W6N9AEBhEgrAOljy2Ae5MtaSIUi'), + ('abcdefghijklmnopqrstuvwxyz', + '$2a$10$fVH8e28OQRj9tqiDXs1e1uxpsjN0c7II7YPKXua2NAKYvM6iQk7dq'), + ('~!@#$%^&*() ~!@#$%^&*()PNBFRD', + '$2a$10$LgfYWkbzEvQ4JakH7rOvHe0y8pHKF9OaFgwUZ2q7W2FFZmZzJYlfS'), + + # + # custom test vectors + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, + '$2a$05$Z17AXnnlpzddNUvnC6cZNOSwMA/8oNiKnHTHTwLlBijfucQQlHjaG'), + + # ensure 2b support + (UPASS_TABLE, + '$2b$05$Z17AXnnlpzddNUvnC6cZNOSwMA/8oNiKnHTHTwLlBijfucQQlHjaG'), + + ] + + if TEST_MODE("full"): + # + # add some extra tests related to 2/2a + # + CONFIG_2 = '$2$05$' + '.'*22 + CONFIG_A = '$2a$05$' + '.'*22 + known_correct_hashes.extend([ + ("", CONFIG_2 + 'J2ihDv8vVf7QZ9BsaRrKyqs2tkn55Yq'), + ("", CONFIG_A + 'J2ihDv8vVf7QZ9BsaRrKyqs2tkn55Yq'), + ("abc", CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc", CONFIG_A + 'ev6gDwpVye3oMCUpLY85aTpfBNHD0Ga'), + ("abc"*23, CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*23, CONFIG_A + '2kIdfSj/4/R/Q6n847VTvc68BXiRYZC'), + ("abc"*24, CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24, CONFIG_A + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24+'x', CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24+'x', CONFIG_A + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ]) + + known_correct_configs = [ + ('$2a$04$uM6csdM8R9SXTex/gbTaye', UPASS_TABLE, + '$2a$04$uM6csdM8R9SXTex/gbTayezuvzFEufYGd2uB6of7qScLjQ4GwcD4G'), + ] + + known_unidentified_hashes = [ + # invalid minor version + "$2f$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + "$2`$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + # \/ + "$2a$12$EXRkfkdmXn!gzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + + # unsupported (but recognized) minor version + "$2x$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + + # rounds not zero-padded (py-bcrypt rejects this, therefore so do we) + '$2a$6$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.' + + # NOTE: salts with padding bits set are technically malformed, + # but we can reliably correct & issue a warning for that. + ] + + platform_crypt_support = [ + ("freedbsd|openbsd|netbsd", True), + ("darwin", False), + ("linux", None), # may be present via addon, e.g. debian's libpam-unix2 + ("solaris", None), # depends on system policy + ] + + #=================================================================== + # override some methods + #=================================================================== + def setUp(self): + # ensure builtin is enabled for duration of test. + if TEST_MODE("full") and self.backend == "builtin": + key = "PASSLIB_BUILTIN_BCRYPT" + orig = os.environ.get(key) + if orig: + self.addCleanup(os.environ.__setitem__, key, orig) + else: + self.addCleanup(os.environ.__delitem__, key) + os.environ[key] = "true" + + super(_bcrypt_test, self).setUp() + + # silence this warning, will come up a bunch during testing of old 2a hashes. + warnings.filterwarnings("ignore", ".*backend is vulnerable to the bsd wraparound bug.*") + + def populate_settings(self, kwds): + # builtin is still just way too slow. + if self.backend == "builtin": + kwds.setdefault("rounds", 4) + super(_bcrypt_test, self).populate_settings(kwds) + + #=================================================================== + # fuzz testing + #=================================================================== + def crypt_supports_variant(self, hash): + """check if OS crypt is expected to support given ident""" + from passlib.handlers.bcrypt import bcrypt, IDENT_2X, IDENT_2Y + from passlib.utils import safe_crypt + ident = bcrypt.from_string(hash) + return (safe_crypt("test", ident + "04$5BJqKfqMQvV7nS.yUguNcu") or "").startswith(ident) + + fuzz_verifiers = HandlerCase.fuzz_verifiers + ( + "fuzz_verifier_bcrypt", + "fuzz_verifier_pybcrypt", + "fuzz_verifier_bcryptor", + ) + + def fuzz_verifier_bcrypt(self): + # test against bcrypt, if available + from passlib.handlers.bcrypt import IDENT_2, IDENT_2A, IDENT_2B, IDENT_2X, IDENT_2Y, _detect_pybcrypt + from passlib.utils import to_native_str, to_bytes + try: + import bcrypt + except ImportError: + return + if _detect_pybcrypt(): + return + def check_bcrypt(secret, hash): + """bcrypt""" + secret = to_bytes(secret, self.FuzzHashGenerator.password_encoding) + if hash.startswith(IDENT_2B): + # bcrypt <1.1 lacks 2B support + hash = IDENT_2A + hash[4:] + elif hash.startswith(IDENT_2): + # bcrypt doesn't support $2$ hashes; but we can fake it + # using the $2a$ algorithm, by repeating the password until + # it's 72 chars in length. + hash = IDENT_2A + hash[3:] + if secret: + secret = repeat_string(secret, 72) + elif hash.startswith(IDENT_2Y) and bcrypt.__version__ == "3.0.0": + hash = IDENT_2B + hash[4:] + hash = to_bytes(hash) + try: + return bcrypt.hashpw(secret, hash) == hash + except ValueError: + raise ValueError("bcrypt rejected hash: %r (secret=%r)" % (hash, secret)) + return check_bcrypt + + def fuzz_verifier_pybcrypt(self): + # test against py-bcrypt, if available + from passlib.handlers.bcrypt import ( + IDENT_2, IDENT_2A, IDENT_2B, IDENT_2X, IDENT_2Y, + _PyBcryptBackend, + ) + from passlib.utils import to_native_str + + loaded = _PyBcryptBackend._load_backend_mixin("pybcrypt", False) + if not loaded: + return + + from passlib.handlers.bcrypt import _pybcrypt as bcrypt_mod + + lock = _PyBcryptBackend._calc_lock # reuse threadlock workaround for pybcrypt 0.2 + + def check_pybcrypt(secret, hash): + """pybcrypt""" + secret = to_native_str(secret, self.FuzzHashGenerator.password_encoding) + if len(secret) > 200: # vulnerable to wraparound bug + secret = secret[:200] + if hash.startswith((IDENT_2B, IDENT_2Y)): + hash = IDENT_2A + hash[4:] + try: + if lock: + with lock: + return bcrypt_mod.hashpw(secret, hash) == hash + else: + return bcrypt_mod.hashpw(secret, hash) == hash + except ValueError: + raise ValueError("py-bcrypt rejected hash: %r" % (hash,)) + return check_pybcrypt + + def fuzz_verifier_bcryptor(self): + # test against bcryptor if available + from passlib.handlers.bcrypt import IDENT_2, IDENT_2A, IDENT_2Y, IDENT_2B + from passlib.utils import to_native_str + try: + from bcryptor.engine import Engine + except ImportError: + return + def check_bcryptor(secret, hash): + """bcryptor""" + secret = to_native_str(secret, self.FuzzHashGenerator.password_encoding) + if hash.startswith((IDENT_2B, IDENT_2Y)): + hash = IDENT_2A + hash[4:] + elif hash.startswith(IDENT_2): + # bcryptor doesn't support $2$ hashes; but we can fake it + # using the $2a$ algorithm, by repeating the password until + # it's 72 chars in length. + hash = IDENT_2A + hash[3:] + if secret: + secret = repeat_string(secret, 72) + return Engine(False).hash_key(secret, hash) == hash + return check_bcryptor + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def generate(self): + opts = super(_bcrypt_test.FuzzHashGenerator, self).generate() + + secret = opts['secret'] + other = opts['other'] + settings = opts['settings'] + ident = settings.get('ident') + + if ident == IDENT_2X: + # 2x is just recognized, not supported. don't test with it. + del settings['ident'] + + elif ident == IDENT_2 and other and repeat_string(to_bytes(other), len(to_bytes(secret))) == to_bytes(secret): + # avoid false failure due to flaw in 0-revision bcrypt: + # repeated strings like 'abc' and 'abcabc' hash identically. + opts['secret'], opts['other'] = self.random_password_pair() + + return opts + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(5, 8, 6, 1) + + #=================================================================== + # custom tests + #=================================================================== + known_incorrect_padding = [ + # password, bad hash, good hash + + # 2 bits of salt padding set +# ("loppux", # \/ +# "$2a$12$oaQbBqq8JnSM1NHRPQGXORm4GCUMqp7meTnkft4zgSnrbhoKdDV0C", +# "$2a$12$oaQbBqq8JnSM1NHRPQGXOOm4GCUMqp7meTnkft4zgSnrbhoKdDV0C"), + ("test", # \/ + '$2a$04$oaQbBqq8JnSM1NHRPQGXORY4Vw3bdHKLIXTecPDRAcJ98cz1ilveO', + '$2a$04$oaQbBqq8JnSM1NHRPQGXOOY4Vw3bdHKLIXTecPDRAcJ98cz1ilveO'), + + # all 4 bits of salt padding set +# ("Passlib11", # \/ +# "$2a$12$M8mKpW9a2vZ7PYhq/8eJVcUtKxpo6j0zAezu0G/HAMYgMkhPu4fLK", +# "$2a$12$M8mKpW9a2vZ7PYhq/8eJVOUtKxpo6j0zAezu0G/HAMYgMkhPu4fLK"), + ("test", # \/ + "$2a$04$yjDgE74RJkeqC0/1NheSScrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS", + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS"), + + # bad checksum padding + ("test", # \/ + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIV", + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS"), + ] + + def test_90_bcrypt_padding(self): + """test passlib correctly handles bcrypt padding bits""" + self.require_TEST_MODE("full") + # + # prevents reccurrence of issue 25 (https://code.google.com/p/passlib/issues/detail?id=25) + # were some unused bits were incorrectly set in bcrypt salt strings. + # (fixed since 1.5.3) + # + bcrypt = self.handler + corr_desc = ".*incorrectly set padding bits" + + # + # test hash() / genconfig() don't generate invalid salts anymore + # + def check_padding(hash): + assert hash.startswith(("$2a$", "$2b$")) and len(hash) >= 28, \ + "unexpectedly malformed hash: %r" % (hash,) + self.assertTrue(hash[28] in '.Oeu', + "unused bits incorrectly set in hash: %r" % (hash,)) + for i in irange(6): + check_padding(bcrypt.genconfig()) + for i in irange(3): + check_padding(bcrypt.using(rounds=bcrypt.min_rounds).hash("bob")) + + # + # test genconfig() corrects invalid salts & issues warning. + # + with self.assertWarningList(["salt too large", corr_desc]): + hash = bcrypt.genconfig(salt="."*21 + "A.", rounds=5, relaxed=True) + self.assertEqual(hash, "$2b$05$" + "." * (22 + 31)) + + # + # test public methods against good & bad hashes + # + samples = self.known_incorrect_padding + for pwd, bad, good in samples: + + # make sure genhash() corrects bad configs, leaves good unchanged + with self.assertWarningList([corr_desc]): + self.assertEqual(bcrypt.genhash(pwd, bad), good) + with self.assertWarningList([]): + self.assertEqual(bcrypt.genhash(pwd, good), good) + + # make sure verify() works correctly with good & bad hashes + with self.assertWarningList([corr_desc]): + self.assertTrue(bcrypt.verify(pwd, bad)) + with self.assertWarningList([]): + self.assertTrue(bcrypt.verify(pwd, good)) + + # make sure normhash() corrects bad hashes, leaves good unchanged + with self.assertWarningList([corr_desc]): + self.assertEqual(bcrypt.normhash(bad), good) + with self.assertWarningList([]): + self.assertEqual(bcrypt.normhash(good), good) + + # make sure normhash() leaves non-bcrypt hashes alone + self.assertEqual(bcrypt.normhash("$md5$abc"), "$md5$abc") + + def test_needs_update_w_padding(self): + """needs_update corrects bcrypt padding""" + # NOTE: see padding test above for details about issue this detects + bcrypt = self.handler.using(rounds=4) + + # PASS1 = "test" + # bad contains invalid 'c' char at end of salt: + # \/ + BAD1 = "$2a$04$yjDgE74RJkeqC0/1NheSScrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + GOOD1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + + self.assertTrue(bcrypt.needs_update(BAD1)) + self.assertFalse(bcrypt.needs_update(GOOD1)) + + #=================================================================== + # eoc + #=================================================================== + +# create test cases for specific backends +bcrypt_bcrypt_test = _bcrypt_test.create_backend_case("bcrypt") +bcrypt_pybcrypt_test = _bcrypt_test.create_backend_case("pybcrypt") +bcrypt_bcryptor_test = _bcrypt_test.create_backend_case("bcryptor") + +class bcrypt_os_crypt_test(_bcrypt_test.create_backend_case("os_crypt")): + + # os crypt doesn't support non-utf8 secret bytes + known_correct_hashes = [row for row in _bcrypt_test.known_correct_hashes + if is_safe_crypt_input(row[0])] + + # os crypt backend doesn't currently implement a per-call fallback if it fails + has_os_crypt_fallback = False + +bcrypt_builtin_test = _bcrypt_test.create_backend_case("builtin") + +#============================================================================= +# bcrypt +#============================================================================= +class _bcrypt_sha256_test(HandlerCase): + "base for BCrypt-SHA256 test cases" + handler = hash.bcrypt_sha256 + reduce_default_rounds = True + forbidden_characters = None + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + #------------------------------------------------------------------- + # custom test vectors for old v1 format + #------------------------------------------------------------------- + + # empty + ("", + '$bcrypt-sha256$2a,5$E/e/2AOhqM5W/KJTFQzLce$F6dYSxOdAEoJZO2eoHUZWZljW/e0TXO'), + + # ascii + ("password", + '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu'), + + # unicode / utf8 + (UPASS_TABLE, + '$bcrypt-sha256$2a,5$.US1fQ4TQS.ZTz/uJ5Kyn.$QNdPDOTKKT5/sovNz1iWg26quOU4Pje'), + (UPASS_TABLE.encode("utf-8"), + '$bcrypt-sha256$2a,5$.US1fQ4TQS.ZTz/uJ5Kyn.$QNdPDOTKKT5/sovNz1iWg26quOU4Pje'), + + # ensure 2b support + ("password", + '$bcrypt-sha256$2b,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu'), + (UPASS_TABLE, + '$bcrypt-sha256$2b,5$.US1fQ4TQS.ZTz/uJ5Kyn.$QNdPDOTKKT5/sovNz1iWg26quOU4Pje'), + + # test >72 chars is hashed correctly -- under bcrypt these hash the same. + # NOTE: test_60_truncate_size() handles this already, this is just for overkill :) + (repeat_string("abc123", 72), + '$bcrypt-sha256$2b,5$X1g1nh3g0v4h6970O68cxe$r/hyEtqJ0teqPEmfTLoZ83ciAI1Q74.'), + (repeat_string("abc123", 72) + "qwr", + '$bcrypt-sha256$2b,5$X1g1nh3g0v4h6970O68cxe$021KLEif6epjot5yoxk0m8I0929ohEa'), + (repeat_string("abc123", 72) + "xyz", + '$bcrypt-sha256$2b,5$X1g1nh3g0v4h6970O68cxe$7.1kgpHduMGEjvM3fX6e/QCvfn6OKja'), + + #------------------------------------------------------------------- + # custom test vectors for v2 format + # TODO: convert to v2 format + #------------------------------------------------------------------- + + # empty + ("", + '$bcrypt-sha256$v=2,t=2b,r=5$E/e/2AOhqM5W/KJTFQzLce$WFPIZKtDDTriqWwlmRFfHiOTeheAZWe'), + + # ascii + ("password", + '$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$wOK1VFFtS8IGTrGa7.h5fs0u84qyPbS'), + + # unicode / utf8 + (UPASS_TABLE, + '$bcrypt-sha256$v=2,t=2b,r=5$.US1fQ4TQS.ZTz/uJ5Kyn.$pzzgp40k8reM1CuQb03PvE0IDPQSdV6'), + (UPASS_TABLE.encode("utf-8"), + '$bcrypt-sha256$v=2,t=2b,r=5$.US1fQ4TQS.ZTz/uJ5Kyn.$pzzgp40k8reM1CuQb03PvE0IDPQSdV6'), + + # test >72 chars is hashed correctly -- under bcrypt these hash the same. + # NOTE: test_60_truncate_size() handles this already, this is just for overkill :) + (repeat_string("abc123", 72), + '$bcrypt-sha256$v=2,t=2b,r=5$X1g1nh3g0v4h6970O68cxe$zu1cloESVFIOsUIo7fCEgkdHaI9SSue'), + (repeat_string("abc123", 72) + "qwr", + '$bcrypt-sha256$v=2,t=2b,r=5$X1g1nh3g0v4h6970O68cxe$CBF9csfEdW68xv3DwE6xSULXMtqEFP.'), + (repeat_string("abc123", 72) + "xyz", + '$bcrypt-sha256$v=2,t=2b,r=5$X1g1nh3g0v4h6970O68cxe$zC/1UDUG2ofEXB6Onr2vvyFzfhEOS3S'), + ] + + known_correct_configs =[ + # v1 + ('$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe', + "password", '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu'), + # v2 + ('$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe', + "password", '$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$wOK1VFFtS8IGTrGa7.h5fs0u84qyPbS'), + ] + + known_malformed_hashes = [ + #------------------------------------------------------------------- + # v1 format + #------------------------------------------------------------------- + + # bad char in otherwise correct hash + # \/ + '$bcrypt-sha256$2a,5$5Hg1DKF!PE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unrecognized bcrypt variant + '$bcrypt-sha256$2c,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unsupported bcrypt variant + '$bcrypt-sha256$2x,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # rounds zero-padded + '$bcrypt-sha256$2a,05$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # config string w/ $ added + '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$', + + #------------------------------------------------------------------- + # v2 format + #------------------------------------------------------------------- + + # bad char in otherwise correct hash + # \/ + '$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKF!PE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unsupported version (for this format) + '$bcrypt-sha256$v=1,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unrecognized version + '$bcrypt-sha256$v=3,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unrecognized bcrypt variant + '$bcrypt-sha256$v=2,t=2c,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unsupported bcrypt variant + '$bcrypt-sha256$v=2,t=2a,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + '$bcrypt-sha256$v=2,t=2x,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # rounds zero-padded + '$bcrypt-sha256$v=2,t=2b,r=05$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # config string w/ $ added + '$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$', + ] + + #=================================================================== + # override some methods -- cloned from bcrypt + #=================================================================== + def setUp(self): + # ensure builtin is enabled for duration of test. + if TEST_MODE("full") and self.backend == "builtin": + key = "PASSLIB_BUILTIN_BCRYPT" + orig = os.environ.get(key) + if orig: + self.addCleanup(os.environ.__setitem__, key, orig) + else: + self.addCleanup(os.environ.__delitem__, key) + os.environ[key] = "enabled" + super(_bcrypt_sha256_test, self).setUp() + warnings.filterwarnings("ignore", ".*backend is vulnerable to the bsd wraparound bug.*") + + def populate_settings(self, kwds): + # builtin is still just way too slow. + if self.backend == "builtin": + kwds.setdefault("rounds", 4) + super(_bcrypt_sha256_test, self).populate_settings(kwds) + + #=================================================================== + # override ident tests for now + #=================================================================== + + def require_many_idents(self): + raise self.skipTest("multiple idents not supported") + + def test_30_HasOneIdent(self): + # forbidding ident keyword, we only support "2b" for now + handler = self.handler + handler(use_defaults=True) + self.assertRaises(ValueError, handler, ident="$2y$", use_defaults=True) + + #=================================================================== + # fuzz testing -- cloned from bcrypt + #=================================================================== + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(5, 8, 6, 1) + + def random_ident(self): + return "2b" + + #=================================================================== + # custom tests + #=================================================================== + + def test_using_version(self): + # default to v2 + handler = self.handler + self.assertEqual(handler.version, 2) + + # allow v1 explicitly + subcls = handler.using(version=1) + self.assertEqual(subcls.version, 1) + + # forbid unknown ver + self.assertRaises(ValueError, handler.using, version=999) + + # allow '2a' only for v1 + subcls = handler.using(version=1, ident="2a") + self.assertRaises(ValueError, handler.using, ident="2a") + + def test_calc_digest_v2(self): + """ + test digest calc v2 matches bcrypt() + """ + from passlib.hash import bcrypt + from passlib.crypto.digest import compile_hmac + from passlib.utils.binary import b64encode + + # manually calc intermediary digest + salt = "nyKYxTAvjmy6lMDYMl11Uu" + secret = "test" + temp_digest = compile_hmac("sha256", salt.encode("ascii"))(secret.encode("ascii")) + temp_digest = b64encode(temp_digest).decode("ascii") + self.assertEqual(temp_digest, "J5TlyIDm+IcSWmKiDJm+MeICndBkFVPn4kKdJW8f+xY=") + + # manually final hash from intermediary + # XXX: genhash() could be useful here + bcrypt_digest = bcrypt(ident="2b", salt=salt, rounds=12)._calc_checksum(temp_digest) + self.assertEqual(bcrypt_digest, "M0wE0Ov/9LXoQFCe.jRHu3MSHPF54Ta") + self.assertTrue(bcrypt.verify(temp_digest, "$2b$12$" + salt + bcrypt_digest)) + + # confirm handler outputs same thing. + # XXX: genhash() could be useful here + result = self.handler(ident="2b", salt=salt, rounds=12)._calc_checksum(secret) + self.assertEqual(result, bcrypt_digest) + + #=================================================================== + # eoc + #=================================================================== + +# create test cases for specific backends +bcrypt_sha256_bcrypt_test = _bcrypt_sha256_test.create_backend_case("bcrypt") +bcrypt_sha256_pybcrypt_test = _bcrypt_sha256_test.create_backend_case("pybcrypt") +bcrypt_sha256_bcryptor_test = _bcrypt_sha256_test.create_backend_case("bcryptor") + +class bcrypt_sha256_os_crypt_test(_bcrypt_sha256_test.create_backend_case("os_crypt")): + + @classmethod + def _get_safe_crypt_handler_backend(cls): + return bcrypt_os_crypt_test._get_safe_crypt_handler_backend() + + has_os_crypt_fallback = False + +bcrypt_sha256_builtin_test = _bcrypt_sha256_test.create_backend_case("builtin") + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_cisco.py b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_cisco.py new file mode 100644 index 0000000..ea6594b --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_cisco.py @@ -0,0 +1,457 @@ +""" +passlib.tests.test_handlers_cisco - tests for Cisco-specific algorithms +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import absolute_import, division, print_function +# core +import logging +log = logging.getLogger(__name__) +# site +# pkg +from passlib import hash, exc +from passlib.utils.compat import u +from .utils import UserHandlerMixin, HandlerCase, repeat_string +from .test_handlers import UPASS_TABLE +# module +__all__ = [ + "cisco_pix_test", + "cisco_asa_test", + "cisco_type7_test", +] +#============================================================================= +# shared code for cisco PIX & ASA +#============================================================================= + +class _PixAsaSharedTest(UserHandlerMixin, HandlerCase): + """ + class w/ shared info for PIX & ASA tests. + """ + __unittest_skip = True # for TestCase + requires_user = False # for UserHandlerMixin + + #: shared list of hashes which should be identical under pix & asa7 + #: (i.e. combined secret + user < 17 bytes) + pix_asa_shared_hashes = [ + # + # http://www.perlmonks.org/index.pl?node_id=797623 + # + (("cisco", ""), "2KFQnbNIdI.2KYOU"), # confirmed ASA 9.6 + + # + # http://www.hsc.fr/ressources/breves/pix_crack.html.en + # + (("hsc", ""), "YtT8/k6Np8F1yz2c"), # confirmed ASA 9.6 + + # + # www.freerainbowtables.com/phpBB3/viewtopic.php?f=2&t=1441 + # + (("", ""), "8Ry2YjIyt7RRXU24"), # confirmed ASA 9.6 + (("cisco", "john"), "hN7LzeyYjw12FSIU"), + (("cisco", "jack"), "7DrfeZ7cyOj/PslD"), + + # + # http://comments.gmane.org/gmane.comp.security.openwall.john.user/2529 + # + (("ripper", "alex"), "h3mJrcH0901pqX/m"), + (("cisco", "cisco"), "3USUcOPFUiMCO4Jk"), + (("cisco", "cisco1"), "3USUcOPFUiMCO4Jk"), + (("CscFw-ITC!", "admcom"), "lZt7HSIXw3.QP7.R"), + ("cangetin", "TynyB./ftknE77QP"), + (("cangetin", "rramsey"), "jgBZqYtsWfGcUKDi"), + + # + # http://openwall.info/wiki/john/sample-hashes + # + (("phonehome", "rharris"), "zyIIMSYjiPm0L7a6"), + + # + # http://www.openwall.com/lists/john-users/2010/08/08/3 + # + (("cangetin", ""), "TynyB./ftknE77QP"), + (("cangetin", "rramsey"), "jgBZqYtsWfGcUKDi"), + + # + # from JTR 1.7.9 + # + ("test1", "TRPEas6f/aa6JSPL"), + ("test2", "OMT6mXmAvGyzrCtp"), + ("test3", "gTC7RIy1XJzagmLm"), + ("test4", "oWC1WRwqlBlbpf/O"), + ("password", "NuLKvvWGg.x9HEKO"), + ("0123456789abcdef", ".7nfVBEIEu4KbF/1"), + + # + # http://www.cisco.com/en/US/docs/security/pix/pix50/configuration/guide/commands.html#wp5472 + # + (("1234567890123456", ""), "feCkwUGktTCAgIbD"), # canonical source + (("watag00s1am", ""), "jMorNbK0514fadBh"), # canonical source + + # + # custom + # + (("cisco1", "cisco1"), "jmINXNH6p1BxUppp"), + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'CaiIvkLMu2TOHXGT'), + + # + # passlib reference vectors + # + # Some of these have been confirmed on various ASA firewalls, + # and the exact version is noted next to each hash. + # Would like to verify these under more PIX & ASA versions. + # + # Those without a note are generally an extrapolation, + # to ensure the code stays consistent, but for various reasons, + # hasn't been verified. + # + # * One such case is usernames w/ 1 & 2 digits -- + # ASA (9.6 at least) requires 3+ digits in username. + # + # The following hashes (below 13 chars) should be identical for PIX/ASA. + # Ones which differ are listed separately in the known_correct_hashes + # list for the two test classes. + # + + # 4 char password + (('1234', ''), 'RLPMUQ26KL4blgFN'), # confirmed ASA 9.6 + + # 8 char password + (('01234567', ''), '0T52THgnYdV1tlOF'), # confirmed ASA 9.6 + (('01234567', '3'), '.z0dT9Alkdc7EIGS'), + (('01234567', '36'), 'CC3Lam53t/mHhoE7'), + (('01234567', '365'), '8xPrWpNnBdD2DzdZ'), # confirmed ASA 9.6 + (('01234567', '3333'), '.z0dT9Alkdc7EIGS'), # confirmed ASA 9.6 + (('01234567', '3636'), 'CC3Lam53t/mHhoE7'), # confirmed ASA 9.6 + (('01234567', '3653'), '8xPrWpNnBdD2DzdZ'), # confirmed ASA 9.6 + (('01234567', 'adm'), 'dfWs2qiao6KD/P2L'), # confirmed ASA 9.6 + (('01234567', 'adma'), 'dfWs2qiao6KD/P2L'), # confirmed ASA 9.6 + (('01234567', 'admad'), 'dfWs2qiao6KD/P2L'), # confirmed ASA 9.6 + (('01234567', 'user'), 'PNZ4ycbbZ0jp1.j1'), # confirmed ASA 9.6 + (('01234567', 'user1234'), 'PNZ4ycbbZ0jp1.j1'), # confirmed ASA 9.6 + + # 12 char password + (('0123456789ab', ''), 'S31BxZOGlAigndcJ'), # confirmed ASA 9.6 + (('0123456789ab', '36'), 'wFqSX91X5.YaRKsi'), + (('0123456789ab', '365'), 'qjgo3kNgTVxExbno'), # confirmed ASA 9.6 + (('0123456789ab', '3333'), 'mcXPL/vIZcIxLUQs'), # confirmed ASA 9.6 + (('0123456789ab', '3636'), 'wFqSX91X5.YaRKsi'), # confirmed ASA 9.6 + (('0123456789ab', '3653'), 'qjgo3kNgTVxExbno'), # confirmed ASA 9.6 + (('0123456789ab', 'user'), 'f.T4BKdzdNkjxQl7'), # confirmed ASA 9.6 + (('0123456789ab', 'user1234'), 'f.T4BKdzdNkjxQl7'), # confirmed ASA 9.6 + + # NOTE: remaining reference vectors for 13+ char passwords + # are split up between cisco_pix & cisco_asa tests. + + # unicode passwords + # ASA supposedly uses utf-8 encoding, but entering non-ascii + # chars is error-prone, and while UTF-8 appears to be intended, + # observed behaviors include: + # * ssh cli stripping non-ascii chars entirely + # * ASDM web iface double-encoding utf-8 strings + ((u("t\xe1ble").encode("utf-8"), 'user'), 'Og8fB4NyF0m5Ed9c'), + ((u("t\xe1ble").encode("utf-8").decode("latin-1").encode("utf-8"), + 'user'), 'cMvFC2XVBmK/68yB'), # confirmed ASA 9.6 when typed into ASDM + ] + + def test_calc_digest_spoiler(self): + """ + _calc_checksum() -- spoil oversize passwords during verify + + for details, see 'spoil_digest' flag instead that function. + this helps cisco_pix/cisco_asa implement their policy of + ``.truncate_verify_reject=True``. + """ + def calc(secret, for_hash=False): + return self.handler(use_defaults=for_hash)._calc_checksum(secret) + + # short (non-truncated) password + short_secret = repeat_string("1234", self.handler.truncate_size) + short_hash = calc(short_secret) + + # longer password should have totally different hash, + # to prevent verify from matching (i.e. "spoiled"). + long_secret = short_secret + "X" + long_hash = calc(long_secret) + self.assertNotEqual(long_hash, short_hash) + + # spoiled hash should depend on whole secret, + # so that output isn't predictable + alt_long_secret = short_secret + "Y" + alt_long_hash = calc(alt_long_secret) + self.assertNotEqual(alt_long_hash, short_hash) + self.assertNotEqual(alt_long_hash, long_hash) + + # for hash(), should throw error if password too large + calc(short_secret, for_hash=True) + self.assertRaises(exc.PasswordSizeError, calc, long_secret, for_hash=True) + self.assertRaises(exc.PasswordSizeError, calc, alt_long_secret, for_hash=True) + +#============================================================================= +# cisco pix +#============================================================================= +class cisco_pix_test(_PixAsaSharedTest): + handler = hash.cisco_pix + + #: known correct pix hashes + known_correct_hashes = _PixAsaSharedTest.pix_asa_shared_hashes + [ + # + # passlib reference vectors (PIX-specific) + # + # NOTE: See 'pix_asa_shared_hashes' for general PIX+ASA vectors, + # and general notes about the 'passlib reference vectors' test set. + # + # All of the following are PIX-specific, as ASA starts + # to use a different padding size at 13 characters. + # + # TODO: these need confirming w/ an actual PIX system. + # + + # 13 char password + (('0123456789abc', ''), 'eacOpB7vE7ZDukSF'), + (('0123456789abc', '3'), 'ylJTd/qei66WZe3w'), + (('0123456789abc', '36'), 'hDx8QRlUhwd6bU8N'), + (('0123456789abc', '365'), 'vYOOtnkh1HXcMrM7'), + (('0123456789abc', '3333'), 'ylJTd/qei66WZe3w'), + (('0123456789abc', '3636'), 'hDx8QRlUhwd6bU8N'), + (('0123456789abc', '3653'), 'vYOOtnkh1HXcMrM7'), + (('0123456789abc', 'user'), 'f4/.SALxqDo59mfV'), + (('0123456789abc', 'user1234'), 'f4/.SALxqDo59mfV'), + + # 14 char password + (('0123456789abcd', ''), '6r8888iMxEoPdLp4'), + (('0123456789abcd', '3'), 'f5lvmqWYj9gJqkIH'), + (('0123456789abcd', '36'), 'OJJ1Khg5HeAYBH1c'), + (('0123456789abcd', '365'), 'OJJ1Khg5HeAYBH1c'), + (('0123456789abcd', '3333'), 'f5lvmqWYj9gJqkIH'), + (('0123456789abcd', '3636'), 'OJJ1Khg5HeAYBH1c'), + (('0123456789abcd', '3653'), 'OJJ1Khg5HeAYBH1c'), + (('0123456789abcd', 'adm'), 'DbPLCFIkHc2SiyDk'), + (('0123456789abcd', 'adma'), 'DbPLCFIkHc2SiyDk'), + (('0123456789abcd', 'user'), 'WfO2UiTapPkF/FSn'), + (('0123456789abcd', 'user1234'), 'WfO2UiTapPkF/FSn'), + + # 15 char password + (('0123456789abcde', ''), 'al1e0XFIugTYLai3'), + (('0123456789abcde', '3'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '36'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '365'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '3333'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '3636'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '3653'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', 'adm'), 'KgKx1UQvdR/09i9u'), + (('0123456789abcde', 'adma'), 'KgKx1UQvdR/09i9u'), + (('0123456789abcde', 'user'), 'qLopkenJ4WBqxaZN'), + (('0123456789abcde', 'user1234'), 'qLopkenJ4WBqxaZN'), + + # 16 char password + (('0123456789abcdef', ''), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '36'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '365'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '3333'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '3636'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '3653'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', 'user'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', 'user1234'), '.7nfVBEIEu4KbF/1'), + ] + + +#============================================================================= +# cisco asa +#============================================================================= +class cisco_asa_test(_PixAsaSharedTest): + handler = hash.cisco_asa + + known_correct_hashes = _PixAsaSharedTest.pix_asa_shared_hashes + [ + # + # passlib reference vectors (ASA-specific) + # + # NOTE: See 'pix_asa_shared_hashes' for general PIX+ASA vectors, + # and general notes about the 'passlib reference vectors' test set. + # + + # 13 char password + # NOTE: past this point, ASA pads to 32 bytes instead of 16 + # for all cases where user is set (secret + 4 bytes > 16), + # but still uses 16 bytes for enable pwds (secret <= 16). + # hashes w/ user WON'T match PIX, but "enable" passwords will. + (('0123456789abc', ''), 'eacOpB7vE7ZDukSF'), # confirmed ASA 9.6 + (('0123456789abc', '36'), 'FRV9JG18UBEgX0.O'), + (('0123456789abc', '365'), 'NIwkusG9hmmMy6ZQ'), # confirmed ASA 9.6 + (('0123456789abc', '3333'), 'NmrkP98nT7RAeKZz'), # confirmed ASA 9.6 + (('0123456789abc', '3636'), 'FRV9JG18UBEgX0.O'), # confirmed ASA 9.6 + (('0123456789abc', '3653'), 'NIwkusG9hmmMy6ZQ'), # confirmed ASA 9.6 + (('0123456789abc', 'user'), '8Q/FZeam5ai1A47p'), # confirmed ASA 9.6 + (('0123456789abc', 'user1234'), '8Q/FZeam5ai1A47p'), # confirmed ASA 9.6 + + # 14 char password + (('0123456789abcd', ''), '6r8888iMxEoPdLp4'), # confirmed ASA 9.6 + (('0123456789abcd', '3'), 'yxGoujXKPduTVaYB'), + (('0123456789abcd', '36'), 'W0jckhnhjnr/DiT/'), + (('0123456789abcd', '365'), 'HuVOxfMQNahaoF8u'), # confirmed ASA 9.6 + (('0123456789abcd', '3333'), 'yxGoujXKPduTVaYB'), # confirmed ASA 9.6 + (('0123456789abcd', '3636'), 'W0jckhnhjnr/DiT/'), # confirmed ASA 9.6 + (('0123456789abcd', '3653'), 'HuVOxfMQNahaoF8u'), # confirmed ASA 9.6 + (('0123456789abcd', 'adm'), 'RtOmSeoCs4AUdZqZ'), # confirmed ASA 9.6 + (('0123456789abcd', 'adma'), 'RtOmSeoCs4AUdZqZ'), # confirmed ASA 9.6 + (('0123456789abcd', 'user'), 'rrucwrcM0h25pr.m'), # confirmed ASA 9.6 + (('0123456789abcd', 'user1234'), 'rrucwrcM0h25pr.m'), # confirmed ASA 9.6 + + # 15 char password + (('0123456789abcde', ''), 'al1e0XFIugTYLai3'), # confirmed ASA 9.6 + (('0123456789abcde', '3'), 'nAZrQoHaL.fgrIqt'), + (('0123456789abcde', '36'), '2GxIQ6ICE795587X'), + (('0123456789abcde', '365'), 'QmDsGwCRBbtGEKqM'), # confirmed ASA 9.6 + (('0123456789abcde', '3333'), 'nAZrQoHaL.fgrIqt'), # confirmed ASA 9.6 + (('0123456789abcde', '3636'), '2GxIQ6ICE795587X'), # confirmed ASA 9.6 + (('0123456789abcde', '3653'), 'QmDsGwCRBbtGEKqM'), # confirmed ASA 9.6 + (('0123456789abcde', 'adm'), 'Aj2aP0d.nk62wl4m'), # confirmed ASA 9.6 + (('0123456789abcde', 'adma'), 'Aj2aP0d.nk62wl4m'), # confirmed ASA 9.6 + (('0123456789abcde', 'user'), 'etxiXfo.bINJcXI7'), # confirmed ASA 9.6 + (('0123456789abcde', 'user1234'), 'etxiXfo.bINJcXI7'), # confirmed ASA 9.6 + + # 16 char password + (('0123456789abcdef', ''), '.7nfVBEIEu4KbF/1'), # confirmed ASA 9.6 + (('0123456789abcdef', '36'), 'GhI8.yFSC5lwoafg'), + (('0123456789abcdef', '365'), 'KFBI6cNQauyY6h/G'), # confirmed ASA 9.6 + (('0123456789abcdef', '3333'), 'Ghdi1IlsswgYzzMH'), # confirmed ASA 9.6 + (('0123456789abcdef', '3636'), 'GhI8.yFSC5lwoafg'), # confirmed ASA 9.6 + (('0123456789abcdef', '3653'), 'KFBI6cNQauyY6h/G'), # confirmed ASA 9.6 + (('0123456789abcdef', 'user'), 'IneB.wc9sfRzLPoh'), # confirmed ASA 9.6 + (('0123456789abcdef', 'user1234'), 'IneB.wc9sfRzLPoh'), # confirmed ASA 9.6 + + # 17 char password + # NOTE: past this point, ASA pads to 32 bytes instead of 16 + # for ALL cases, since secret > 16 bytes even for enable pwds; + # and so none of these rest here should match PIX. + (('0123456789abcdefq', ''), 'bKshl.EN.X3CVFRQ'), # confirmed ASA 9.6 + (('0123456789abcdefq', '36'), 'JAeTXHs0n30svlaG'), + (('0123456789abcdefq', '365'), '4fKSSUBHT1ChGqHp'), # confirmed ASA 9.6 + (('0123456789abcdefq', '3333'), 'USEJbxI6.VY4ecBP'), # confirmed ASA 9.6 + (('0123456789abcdefq', '3636'), 'JAeTXHs0n30svlaG'), # confirmed ASA 9.6 + (('0123456789abcdefq', '3653'), '4fKSSUBHT1ChGqHp'), # confirmed ASA 9.6 + (('0123456789abcdefq', 'user'), '/dwqyD7nGdwSrDwk'), # confirmed ASA 9.6 + (('0123456789abcdefq', 'user1234'), '/dwqyD7nGdwSrDwk'), # confirmed ASA 9.6 + + # 27 char password + (('0123456789abcdefqwertyuiopa', ''), '4wp19zS3OCe.2jt5'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', '36'), 'PjUoGqWBKPyV9qOe'), + (('0123456789abcdefqwertyuiopa', '365'), 'bfCy6xFAe5O/gzvM'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', '3333'), 'rd/ZMuGTJFIb2BNG'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', '3636'), 'PjUoGqWBKPyV9qOe'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', '3653'), 'bfCy6xFAe5O/gzvM'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', 'user'), 'zynfWw3UtszxLMgL'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', 'user1234'), 'zynfWw3UtszxLMgL'), # confirmed ASA 9.6 + + # 28 char password + # NOTE: past this point, ASA stops appending the username AT ALL, + # even though there's still room for the first few chars. + (('0123456789abcdefqwertyuiopas', ''), 'W6nbOddI0SutTK7m'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopas', '36'), 'W6nbOddI0SutTK7m'), + (('0123456789abcdefqwertyuiopas', '365'), 'W6nbOddI0SutTK7m'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopas', 'user'), 'W6nbOddI0SutTK7m'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopas', 'user1234'), 'W6nbOddI0SutTK7m'), # confirmed ASA 9.6 + + # 32 char password + # NOTE: this is max size that ASA allows, and throws error for larger + (('0123456789abcdefqwertyuiopasdfgh', ''), '5hPT/iC6DnoBxo6a'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopasdfgh', '36'), '5hPT/iC6DnoBxo6a'), + (('0123456789abcdefqwertyuiopasdfgh', '365'), '5hPT/iC6DnoBxo6a'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopasdfgh', 'user'), '5hPT/iC6DnoBxo6a'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopasdfgh', 'user1234'), '5hPT/iC6DnoBxo6a'), # confirmed ASA 9.6 + ] + + +#============================================================================= +# cisco type 7 +#============================================================================= +class cisco_type7_test(HandlerCase): + handler = hash.cisco_type7 + salt_bits = 4 + salt_type = int + + known_correct_hashes = [ + # + # http://mccltd.net/blog/?p=1034 + # + ("secure ", "04480E051A33490E"), + + # + # http://insecure.org/sploits/cisco.passwords.html + # + ("Its time to go to lunch!", + "153B1F1F443E22292D73212D5300194315591954465A0D0B59"), + + # + # http://blog.ioshints.info/2007/11/type-7-decryption-in-cisco-ios.html + # + ("t35t:pa55w0rd", "08351F1B1D431516475E1B54382F"), + + # + # http://www.m00nie.com/2011/09/cisco-type-7-password-decryption-and-encryption-with-perl/ + # + ("hiImTesting:)", "020E0D7206320A325847071E5F5E"), + + # + # http://packetlife.net/forums/thread/54/ + # + ("cisco123", "060506324F41584B56"), + ("cisco123", "1511021F07257A767B"), + + # + # source ? + # + ('Supe&8ZUbeRp4SS', "06351A3149085123301517391C501918"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '0958EDC8A9F495F6F8A5FD'), + ] + + known_unidentified_hashes = [ + # salt with hex value + "0A480E051A33490E", + + # salt value > 52. this may in fact be valid, but we reject it for now + # (see docs for more). + '99400E4812', + ] + + def test_90_decode(self): + """test cisco_type7.decode()""" + from passlib.utils import to_unicode, to_bytes + + handler = self.handler + for secret, hash in self.known_correct_hashes: + usecret = to_unicode(secret) + bsecret = to_bytes(secret) + self.assertEqual(handler.decode(hash), usecret) + self.assertEqual(handler.decode(hash, None), bsecret) + + self.assertRaises(UnicodeDecodeError, handler.decode, + '0958EDC8A9F495F6F8A5FD', 'ascii') + + def test_91_salt(self): + """test salt value border cases""" + handler = self.handler + self.assertRaises(TypeError, handler, salt=None) + handler(salt=None, use_defaults=True) + self.assertRaises(TypeError, handler, salt='abc') + self.assertRaises(ValueError, handler, salt=-10) + self.assertRaises(ValueError, handler, salt=100) + + self.assertRaises(TypeError, handler.using, salt='abc') + self.assertRaises(ValueError, handler.using, salt=-10) + self.assertRaises(ValueError, handler.using, salt=100) + with self.assertWarningList("salt/offset must be.*"): + subcls = handler.using(salt=100, relaxed=True) + self.assertEqual(subcls(use_defaults=True).salt, 52) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_django.py b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_django.py new file mode 100644 index 0000000..f7c9a0d --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_django.py @@ -0,0 +1,413 @@ +"""passlib.tests.test_handlers_django - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import re +import warnings +# site +# pkg +from passlib import hash +from passlib.utils import repeat_string +from passlib.utils.compat import u +from passlib.tests.utils import TestCase, HandlerCase, skipUnless, SkipTest +from passlib.tests.test_handlers import UPASS_USD, UPASS_TABLE +from passlib.tests.test_ext_django import DJANGO_VERSION, MIN_DJANGO_VERSION, \ + check_django_hasher_has_backend +# module + +#============================================================================= +# django +#============================================================================= + +# standard string django uses +UPASS_LETMEIN = u('l\xe8tmein') + +def vstr(version): + return ".".join(str(e) for e in version) + +class _DjangoHelper(TestCase): + """ + mixin for HandlerCase subclasses that are testing a hasher + which is also present in django. + """ + __unittest_skip = True + + #: minimum django version where hash alg is present / that we support testing against + min_django_version = MIN_DJANGO_VERSION + + #: max django version where hash alg is present + #: TODO: for a bunch of the tests below, this is just max version where + #: settings.PASSWORD_HASHERS includes it by default -- could add helper to patch + #: desired django hasher back in for duration of test. + #: XXX: change this to "disabled_in_django_version" instead? + max_django_version = None + + def _require_django_support(self): + # make sure min django version + if DJANGO_VERSION < self.min_django_version: + raise self.skipTest("Django >= %s not installed" % vstr(self.min_django_version)) + if self.max_django_version and DJANGO_VERSION > self.max_django_version: + raise self.skipTest("Django <= %s not installed" % vstr(self.max_django_version)) + + # make sure django has a backend for specified hasher + name = self.handler.django_name + if not check_django_hasher_has_backend(name): + raise self.skipTest('django hasher %r not available' % name) + + return True + + extra_fuzz_verifiers = HandlerCase.fuzz_verifiers + ( + "fuzz_verifier_django", + ) + + def fuzz_verifier_django(self): + try: + self._require_django_support() + except SkipTest: + return None + from django.contrib.auth.hashers import check_password + + def verify_django(secret, hash): + """django/check_password""" + if self.handler.name == "django_bcrypt" and hash.startswith("bcrypt$$2y$"): + hash = hash.replace("$$2y$", "$$2a$") + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + return check_password(secret, hash) + return verify_django + + def test_90_django_reference(self): + """run known correct hashes through Django's check_password()""" + self._require_django_support() + # XXX: esp. when it's no longer supported by django, + # should verify it's *NOT* recognized + from django.contrib.auth.hashers import check_password + assert self.known_correct_hashes + for secret, hash in self.iter_known_hashes(): + self.assertTrue(check_password(secret, hash), + "secret=%r hash=%r failed to verify" % + (secret, hash)) + self.assertFalse(check_password('x' + secret, hash), + "mangled secret=%r hash=%r incorrect verified" % + (secret, hash)) + + def test_91_django_generation(self): + """test against output of Django's make_password()""" + self._require_django_support() + # XXX: esp. when it's no longer supported by django, + # should verify it's *NOT* recognized + from passlib.utils import tick + from django.contrib.auth.hashers import make_password + name = self.handler.django_name # set for all the django_* handlers + end = tick() + self.max_fuzz_time/2 + generator = self.FuzzHashGenerator(self, self.getRandom()) + while tick() < end: + secret, other = generator.random_password_pair() + if not secret: # django rejects empty passwords. + continue + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + hash = make_password(secret, hasher=name) + self.assertTrue(self.do_identify(hash)) + self.assertTrue(self.do_verify(secret, hash)) + self.assertFalse(self.do_verify(other, hash)) + +class django_disabled_test(HandlerCase): + """test django_disabled""" + handler = hash.django_disabled + disabled_contains_salt = True + + known_correct_hashes = [ + # *everything* should hash to "!", and nothing should verify + ("password", "!"), + ("", "!"), + (UPASS_TABLE, "!"), + ] + + known_alternate_hashes = [ + # django 1.6 appends random alpnum string + ("!9wa845vn7098ythaehasldkfj", "password", "!"), + ] + +class django_des_crypt_test(HandlerCase, _DjangoHelper): + """test django_des_crypt""" + handler = hash.django_des_crypt + max_django_version = (1,9) + + known_correct_hashes = [ + # ensures only first two digits of salt count. + ("password", 'crypt$c2$c2M87q...WWcU'), + ("password", 'crypt$c2e86$c2M87q...WWcU'), + ("passwordignoreme", 'crypt$c2.AZ$c2M87q...WWcU'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'crypt$c2e86$c2hN1Bxd6ZiWs'), + (UPASS_TABLE, 'crypt$0.aQs$0.wB.TT0Czvlo'), + (u("hell\u00D6"), "crypt$sa$saykDgk3BPZ9E"), + + # prevent regression of issue 22 + ("foo", 'crypt$MNVY.9ajgdvDQ$MNVY.9ajgdvDQ'), + ] + + known_alternate_hashes = [ + # ensure django 1.4 empty salt field is accepted; + # but that salt field is re-filled (for django 1.0 compatibility) + ('crypt$$c2M87q...WWcU', "password", 'crypt$c2$c2M87q...WWcU'), + ] + + known_unidentified_hashes = [ + 'sha1$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'crypt$c2$c2M87q', + + # salt must be >2 + 'crypt$f$c2M87q...WWcU', + + # make sure first 2 chars of salt & chk field agree. + 'crypt$ffe86$c2M87q...WWcU', + ] + +class django_salted_md5_test(HandlerCase, _DjangoHelper): + """test django_salted_md5""" + handler = hash.django_salted_md5 + max_django_version = (1,9) + + known_correct_hashes = [ + # test extra large salt + ("password", 'md5$123abcdef$c8272612932975ee80e8a35995708e80'), + + # test django 1.4 alphanumeric salt + ("test", 'md5$3OpqnFAHW5CT$54b29300675271049a1ebae07b395e20'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'md5$c2e86$92105508419a81a6babfaecf876a2fa0'), + (UPASS_TABLE, 'md5$d9eb8$01495b32852bffb27cf5d4394fe7a54c'), + ] + + known_unidentified_hashes = [ + 'sha1$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'md5$aa$bb', + ] + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_salt_size(self): + # workaround for django14 regression -- + # 1.4 won't accept hashes with empty salt strings, unlike 1.3 and earlier. + # looks to be fixed in a future release -- https://code.djangoproject.com/ticket/18144 + # for now, we avoid salt_size==0 under 1.4 + handler = self.handler + default = handler.default_salt_size + assert handler.min_salt_size == 0 + lower = 1 + upper = handler.max_salt_size or default*4 + return self.randintgauss(lower, upper, default, default*.5) + +class django_salted_sha1_test(HandlerCase, _DjangoHelper): + """test django_salted_sha1""" + handler = hash.django_salted_sha1 + max_django_version = (1,9) + + known_correct_hashes = [ + # test extra large salt + ("password",'sha1$123abcdef$e4a1877b0e35c47329e7ed7e58014276168a37ba'), + + # test django 1.4 alphanumeric salt + ("test", 'sha1$bcwHF9Hy8lxS$6b4cfa0651b43161c6f1471ce9523acf1f751ba3'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'sha1$c2e86$0f75c5d7fbd100d587c127ef0b693cde611b4ada'), + (UPASS_TABLE, 'sha1$6d853$ef13a4d8fb57aed0cb573fe9c82e28dc7fd372d4'), + + # generic password + ("MyPassword", 'sha1$54123$893cf12e134c3c215f3a76bd50d13f92404a54d3'), + ] + + known_unidentified_hashes = [ + 'md5$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'sha1$c2e86$0f75', + ] + + # reuse custom random_salt_size() helper... + FuzzHashGenerator = django_salted_md5_test.FuzzHashGenerator + +class django_pbkdf2_sha256_test(HandlerCase, _DjangoHelper): + """test django_pbkdf2_sha256""" + handler = hash.django_pbkdf2_sha256 + + known_correct_hashes = [ + # + # custom - generated via django 1.4 hasher + # + ('not a password', + 'pbkdf2_sha256$10000$kjVJaVz6qsnJ$5yPHw3rwJGECpUf70daLGhOrQ5+AMxIJdz1c3bqK1Rs='), + (UPASS_TABLE, + 'pbkdf2_sha256$10000$bEwAfNrH1TlQ$OgYUblFNUX1B8GfMqaCYUK/iHyO0pa7STTDdaEJBuY0='), + ] + +class django_pbkdf2_sha1_test(HandlerCase, _DjangoHelper): + """test django_pbkdf2_sha1""" + handler = hash.django_pbkdf2_sha1 + + known_correct_hashes = [ + # + # custom - generated via django 1.4 hashers + # + ('not a password', + 'pbkdf2_sha1$10000$wz5B6WkasRoF$atJmJ1o+XfJxKq1+Nu1f1i57Z5I='), + (UPASS_TABLE, + 'pbkdf2_sha1$10000$KZKWwvqb8BfL$rw5pWsxJEU4JrZAQhHTCO+u0f5Y='), + ] + +@skipUnless(hash.bcrypt.has_backend(), "no bcrypt backends available") +class django_bcrypt_test(HandlerCase, _DjangoHelper): + """test django_bcrypt""" + handler = hash.django_bcrypt + # XXX: not sure when this wasn't in default list anymore. somewhere in [2.0 - 2.2] + max_django_version = (2, 0) + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # just copied and adapted a few test vectors from bcrypt (above), + # since django_bcrypt is just a wrapper for the real bcrypt class. + # + ('', 'bcrypt$$2a$06$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.'), + ('abcdefghijklmnopqrstuvwxyz', + 'bcrypt$$2a$10$fVH8e28OQRj9tqiDXs1e1uxpsjN0c7II7YPKXua2NAKYvM6iQk7dq'), + (UPASS_TABLE, + 'bcrypt$$2a$05$Z17AXnnlpzddNUvnC6cZNOSwMA/8oNiKnHTHTwLlBijfucQQlHjaG'), + ] + + # NOTE: the following have been cloned from _bcrypt_test() + + def populate_settings(self, kwds): + # speed up test w/ lower rounds + kwds.setdefault("rounds", 4) + super(django_bcrypt_test, self).populate_settings(kwds) + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(5, 8, 6, 1) + + def random_ident(self): + # omit multi-ident tests, only $2a$ counts for this class + # XXX: enable this to check 2a / 2b? + return None + +@skipUnless(hash.bcrypt.has_backend(), "no bcrypt backends available") +class django_bcrypt_sha256_test(HandlerCase, _DjangoHelper): + """test django_bcrypt_sha256""" + handler = hash.django_bcrypt_sha256 + forbidden_characters = None + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # custom - generated via django 1.6 hasher + # + ('', + 'bcrypt_sha256$$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu'), + (UPASS_LETMEIN, + 'bcrypt_sha256$$2a$08$NDjSAIcas.EcoxCRiArvT.MkNiPYVhrsrnJsRkLueZOoV1bsQqlmC'), + (UPASS_TABLE, + 'bcrypt_sha256$$2a$06$kCXUnRFQptGg491siDKNTu8RxjBGSjALHRuvhPYNFsa4Ea5d9M48u'), + + # test >72 chars is hashed correctly -- under bcrypt these hash the same. + (repeat_string("abc123",72), + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61OySmyXA8FoY4PjGizjE1QSDfuL5MXNni'), + (repeat_string("abc123",72)+"qwr", + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61Ocy0BEz1RK6xslSNi8PlaLX2pe7x/KQG'), + (repeat_string("abc123",72)+"xyz", + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61OvY2zoRVUa2Pugv2ExVOUT2YmhvxUFUa'), + ] + + known_malformed_hashers = [ + # data in django salt field + 'bcrypt_sha256$xyz$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu', + ] + + # NOTE: the following have been cloned from _bcrypt_test() + + def populate_settings(self, kwds): + # speed up test w/ lower rounds + kwds.setdefault("rounds", 4) + super(django_bcrypt_sha256_test, self).populate_settings(kwds) + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(5, 8, 6, 1) + + def random_ident(self): + # omit multi-ident tests, only $2a$ counts for this class + # XXX: enable this to check 2a / 2b? + return None + +from passlib.tests.test_handlers_argon2 import _base_argon2_test + +@skipUnless(hash.argon2.has_backend(), "no argon2 backends available") +class django_argon2_test(HandlerCase, _DjangoHelper): + """test django_bcrypt""" + handler = hash.django_argon2 + + # NOTE: most of this adapted from _base_argon2_test & argon2pure test + + known_correct_hashes = [ + # sample test + ("password", 'argon2$argon2i$v=19$m=256,t=1,p=1$c29tZXNhbHQ$AJFIsNZTMKTAewB4+ETN1A'), + + # sample w/ all parameters different + ("password", 'argon2$argon2i$v=19$m=380,t=2,p=2$c29tZXNhbHQ$SrssP8n7m/12VWPM8dvNrw'), + + # generated from django 1.10.3 + (UPASS_LETMEIN, 'argon2$argon2i$v=19$m=512,t=2,p=2$V25jN1l4UUJZWkR1$MxpA1BD2Gh7+D79gaAw6sQ'), + ] + + def setUpWarnings(self): + super(django_argon2_test, self).setUpWarnings() + warnings.filterwarnings("ignore", ".*Using argon2pure backend.*") + + def do_stub_encrypt(self, handler=None, **settings): + # overriding default since no way to get stub config from argon2._calc_hash() + # (otherwise test_21b_max_rounds blocks trying to do max rounds) + handler = (handler or self.handler).using(**settings) + self = handler.wrapped(use_defaults=True) + self.checksum = self._stub_checksum + assert self.checksum + return handler._wrap_hash(self.to_string()) + + def test_03_legacy_hash_workflow(self): + # override base method + raise self.skipTest("legacy 1.6 workflow not supported") + + class FuzzHashGenerator(_base_argon2_test.FuzzHashGenerator): + + def random_type(self): + # override default since django only uses type I (see note in class) + return "I" + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(1, 3, 2, 1) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_pbkdf2.py b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_pbkdf2.py new file mode 100644 index 0000000..4d2f048 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_pbkdf2.py @@ -0,0 +1,480 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +# core +import logging +log = logging.getLogger(__name__) +import warnings +# site +# pkg +from passlib import hash +from passlib.utils.compat import u +from passlib.tests.utils import TestCase, HandlerCase +from passlib.tests.test_handlers import UPASS_WAV +# module + +#============================================================================= +# ldap_pbkdf2_{digest} +#============================================================================= +# NOTE: since these are all wrappers for the pbkdf2_{digest} hasehs, +# they don't extensive separate testing. + +class ldap_pbkdf2_test(TestCase): + + def test_wrappers(self): + """test ldap pbkdf2 wrappers""" + + self.assertTrue( + hash.ldap_pbkdf2_sha1.verify( + "password", + '{PBKDF2}1212$OB.dtnSEXZK8U5cgxU/GYQ$y5LKPOplRmok7CZp/aqVDVg8zGI', + ) + ) + + self.assertTrue( + hash.ldap_pbkdf2_sha256.verify( + "password", + '{PBKDF2-SHA256}1212$4vjV83LKPjQzk31VI4E0Vw$hsYF68OiOUPdDZ1Fg' + '.fJPeq1h/gXXY7acBp9/6c.tmQ' + ) + ) + + self.assertTrue( + hash.ldap_pbkdf2_sha512.verify( + "password", + '{PBKDF2-SHA512}1212$RHY0Fr3IDMSVO/RSZyb5ow$eNLfBK.eVozomMr.1gYa1' + '7k9B7KIK25NOEshvhrSX.esqY3s.FvWZViXz4KoLlQI.BzY/YTNJOiKc5gBYFYGww' + ) + ) + +#============================================================================= +# pbkdf2 hashes +#============================================================================= +class atlassian_pbkdf2_sha1_test(HandlerCase): + handler = hash.atlassian_pbkdf2_sha1 + + known_correct_hashes = [ + # + # generated using Jira + # + ("admin", '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/p'), + (UPASS_WAV, + "{PKCS5S2}cE9Yq6Am5tQGdHSHhky2XLeOnURwzaLBG2sur7FHKpvy2u0qDn6GcVGRjlmJoIUy"), + ] + + known_malformed_hashes = [ + # bad char ---\/ + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy!0IPksHChwoTAVYFrhsgoq8/p' + + # bad size, missing padding + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/' + + # bad size, with correct padding + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/=' + ] + +class pbkdf2_sha1_test(HandlerCase): + handler = hash.pbkdf2_sha1 + known_correct_hashes = [ + ("password", '$pbkdf2$1212$OB.dtnSEXZK8U5cgxU/GYQ$y5LKPOplRmok7CZp/aqVDVg8zGI'), + (UPASS_WAV, + '$pbkdf2$1212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc'), + ] + + known_malformed_hashes = [ + # zero padded rounds field + '$pbkdf2$01212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc', + + # empty rounds field + '$pbkdf2$$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc', + + # too many field + '$pbkdf2$1212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc$', + ] + +class pbkdf2_sha256_test(HandlerCase): + handler = hash.pbkdf2_sha256 + known_correct_hashes = [ + ("password", + '$pbkdf2-sha256$1212$4vjV83LKPjQzk31VI4E0Vw$hsYF68OiOUPdDZ1Fg.fJPeq1h/gXXY7acBp9/6c.tmQ' + ), + (UPASS_WAV, + '$pbkdf2-sha256$1212$3SABFJGDtyhrQMVt1uABPw$WyaUoqCLgvz97s523nF4iuOqZNbp5Nt8do/cuaa7AiI' + ), + ] + +class pbkdf2_sha512_test(HandlerCase): + handler = hash.pbkdf2_sha512 + known_correct_hashes = [ + ("password", + '$pbkdf2-sha512$1212$RHY0Fr3IDMSVO/RSZyb5ow$eNLfBK.eVozomMr.1gYa1' + '7k9B7KIK25NOEshvhrSX.esqY3s.FvWZViXz4KoLlQI.BzY/YTNJOiKc5gBYFYGww' + ), + (UPASS_WAV, + '$pbkdf2-sha512$1212$KkbvoKGsAIcF8IslDR6skQ$8be/PRmd88Ps8fmPowCJt' + 'tH9G3vgxpG.Krjt3KT.NP6cKJ0V4Prarqf.HBwz0dCkJ6xgWnSj2ynXSV7MlvMa8Q' + ), + ] + +class cta_pbkdf2_sha1_test(HandlerCase): + handler = hash.cta_pbkdf2_sha1 + known_correct_hashes = [ + # + # test vectors from original implementation + # + (u("hashy the \N{SNOWMAN}"), '$p5k2$1000$ZxK4ZBJCfQg=$jJZVscWtO--p1-xIZl6jhO2LKR0='), + + # + # custom + # + ("password", "$p5k2$1$$h1TDLGSw9ST8UMAPeIE13i0t12c="), + (UPASS_WAV, + "$p5k2$4321$OTg3NjU0MzIx$jINJrSvZ3LXeIbUdrJkRpN62_WQ="), + ] + +class dlitz_pbkdf2_sha1_test(HandlerCase): + handler = hash.dlitz_pbkdf2_sha1 + known_correct_hashes = [ + # + # test vectors from original implementation + # + ('cloadm', '$p5k2$$exec$r1EWMCMk7Rlv3L/RNcFXviDefYa0hlql'), + ('gnu', '$p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g'), + ('dcl', '$p5k2$d$tUsch7fU$nqDkaxMDOFBeJsTSfABsyn.PYUXilHwL'), + ('spam', '$p5k2$3e8$H0NX9mT/$wk/sE8vv6OMKuMaqazCJYDSUhWY9YB2J'), + (UPASS_WAV, + '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ'), + ] + +class grub_pbkdf2_sha512_test(HandlerCase): + handler = hash.grub_pbkdf2_sha512 + known_correct_hashes = [ + # + # test vectors generated from cmd line tool + # + + # salt=32 bytes + (UPASS_WAV, + 'grub.pbkdf2.sha512.10000.BCAC1CEC5E4341C8C511C529' + '7FA877BE91C2817B32A35A3ECF5CA6B8B257F751.6968526A' + '2A5B1AEEE0A29A9E057336B48D388FFB3F600233237223C21' + '04DE1752CEC35B0DD1ED49563398A282C0F471099C2803FBA' + '47C7919CABC43192C68F60'), + + # salt=64 bytes + ('toomanysecrets', + 'grub.pbkdf2.sha512.10000.9B436BB6978682363D5C449B' + 'BEAB322676946C632208BC1294D51F47174A9A3B04A7E4785' + '986CD4EA7470FAB8FE9F6BD522D1FC6C51109A8596FB7AD48' + '7C4493.0FE5EF169AFFCB67D86E2581B1E251D88C777B98BA' + '2D3256ECC9F765D84956FC5CA5C4B6FD711AA285F0A04DCF4' + '634083F9A20F4B6F339A52FBD6BED618E527B'), + + ] + +#============================================================================= +# scram hash +#============================================================================= +class scram_test(HandlerCase): + handler = hash.scram + + # TODO: need a bunch more reference vectors from some real + # SCRAM transactions. + known_correct_hashes = [ + # + # taken from example in SCRAM specification (rfc 5802) + # + ('pencil', '$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30'), + + # + # custom + # + + # same as 5802 example hash, but with sha-256 & sha-512 added. + ('pencil', '$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ'), + + # test unicode passwords & saslprep (all the passwords below + # should normalize to the same value: 'IX \xE0') + (u('IX \xE0'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + (u('\u2168\u3000a\u0300'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + (u('\u00ADIX \xE0'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + ] + + known_malformed_hashes = [ + # zero-padding in rounds + '$scram$04096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # non-digit in rounds + '$scram$409A$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # bad char in salt ---\/ + '$scram$4096$QSXCR.Q6sek8bf9-$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # bad char in digest ---\/ + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX3-', + + # missing sections + '$scram$4096$QSXCR.Q6sek8bf92', + '$scram$4096$QSXCR.Q6sek8bf92$', + + # too many sections + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30$', + + # missing separator + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY', + + # too many chars in alg name + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'shaxxx-190=HZbuOlKbWl.eR8AfIposuKbhX30', + + # missing sha-1 alg + '$scram$4096$QSXCR.Q6sek8bf92$sha-256=HZbuOlKbWl.eR8AfIposuKbhX30', + + # non-iana name + '$scram$4096$QSXCR.Q6sek8bf92$sha1=HZbuOlKbWl.eR8AfIposuKbhX30', + ] + + def setUp(self): + super(scram_test, self).setUp() + + # some platforms lack stringprep (e.g. Jython, IronPython) + self.require_stringprep() + + # silence norm_hash_name() warning + warnings.filterwarnings("ignore", r"norm_hash_name\(\): unknown hash") + + def test_90_algs(self): + """test parsing of 'algs' setting""" + defaults = dict(salt=b'A'*10, rounds=1000) + def parse(algs, **kwds): + for k in defaults: + kwds.setdefault(k, defaults[k]) + return self.handler(algs=algs, **kwds).algs + + # None -> default list + self.assertEqual(parse(None, use_defaults=True), hash.scram.default_algs) + self.assertRaises(TypeError, parse, None) + + # strings should be parsed + self.assertEqual(parse("sha1"), ["sha-1"]) + self.assertEqual(parse("sha1, sha256, md5"), ["md5","sha-1","sha-256"]) + + # lists should be normalized + self.assertEqual(parse(["sha-1","sha256"]), ["sha-1","sha-256"]) + + # sha-1 required + self.assertRaises(ValueError, parse, ["sha-256"]) + self.assertRaises(ValueError, parse, algs=[], use_defaults=True) + + # alg names must be < 10 chars + self.assertRaises(ValueError, parse, ["sha-1","shaxxx-190"]) + + # alg & checksum mutually exclusive. + self.assertRaises(RuntimeError, parse, ['sha-1'], + checksum={"sha-1": b"\x00"*20}) + + def test_90_checksums(self): + """test internal parsing of 'checksum' keyword""" + # check non-bytes checksum values are rejected + self.assertRaises(TypeError, self.handler, use_defaults=True, + checksum={'sha-1': u('X')*20}) + + # check sha-1 is required + self.assertRaises(ValueError, self.handler, use_defaults=True, + checksum={'sha-256': b'X'*32}) + + # XXX: anything else that's not tested by the other code already? + + def test_91_extract_digest_info(self): + """test scram.extract_digest_info()""" + edi = self.handler.extract_digest_info + + # return appropriate value or throw KeyError + h = "$scram$10$AAAAAA$sha-1=AQ,bbb=Ag,ccc=Aw" + s = b'\x00'*4 + self.assertEqual(edi(h,"SHA1"), (s,10, b'\x01')) + self.assertEqual(edi(h,"bbb"), (s,10, b'\x02')) + self.assertEqual(edi(h,"ccc"), (s,10, b'\x03')) + self.assertRaises(KeyError, edi, h, "ddd") + + # config strings should cause value error. + c = "$scram$10$....$sha-1,bbb,ccc" + self.assertRaises(ValueError, edi, c, "sha-1") + self.assertRaises(ValueError, edi, c, "bbb") + self.assertRaises(ValueError, edi, c, "ddd") + + def test_92_extract_digest_algs(self): + """test scram.extract_digest_algs()""" + eda = self.handler.extract_digest_algs + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30'), ["sha-1"]) + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', format="hashlib"), + ["sha1"]) + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ'), + ["sha-1","sha-256","sha-512"]) + + def test_93_derive_digest(self): + """test scram.derive_digest()""" + # NOTE: this just does a light test, since derive_digest + # is used by hash / verify, and is tested pretty well via those. + hash = self.handler.derive_digest + + # check various encodings of password work. + s1 = b'\x01\x02\x03' + d1 = b'\xb2\xfb\xab\x82[tNuPnI\x8aZZ\x19\x87\xcen\xe9\xd3' + self.assertEqual(hash(u("\u2168"), s1, 1000, 'sha-1'), d1) + self.assertEqual(hash(b"\xe2\x85\xa8", s1, 1000, 'SHA-1'), d1) + self.assertEqual(hash(u("IX"), s1, 1000, 'sha1'), d1) + self.assertEqual(hash(b"IX", s1, 1000, 'SHA1'), d1) + + # check algs + self.assertEqual(hash("IX", s1, 1000, 'md5'), + b'3\x19\x18\xc0\x1c/\xa8\xbf\xe4\xa3\xc2\x8eM\xe8od') + self.assertRaises(ValueError, hash, "IX", s1, 1000, 'sha-666') + + # check rounds + self.assertRaises(ValueError, hash, "IX", s1, 0, 'sha-1') + + # unicode salts accepted as of passlib 1.7 (previous caused TypeError) + self.assertEqual(hash(u("IX"), s1.decode("latin-1"), 1000, 'sha1'), d1) + + def test_94_saslprep(self): + """test hash/verify use saslprep""" + # NOTE: this just does a light test that saslprep() is being + # called in various places, relying in saslpreps()'s tests + # to verify full normalization behavior. + + # hash unnormalized + h = self.do_encrypt(u("I\u00ADX")) + self.assertTrue(self.do_verify(u("IX"), h)) + self.assertTrue(self.do_verify(u("\u2168"), h)) + + # hash normalized + h = self.do_encrypt(u("\xF3")) + self.assertTrue(self.do_verify(u("o\u0301"), h)) + self.assertTrue(self.do_verify(u("\u200Do\u0301"), h)) + + # throws error if forbidden char provided + self.assertRaises(ValueError, self.do_encrypt, u("\uFDD0")) + self.assertRaises(ValueError, self.do_verify, u("\uFDD0"), h) + + def test_94_using_w_default_algs(self, param="default_algs"): + """using() -- 'default_algs' parameter""" + # create subclass + handler = self.handler + orig = list(handler.default_algs) # in case it's modified in place + subcls = handler.using(**{param: "sha1,md5"}) + + # shouldn't have changed handler + self.assertEqual(handler.default_algs, orig) + + # should have own set + self.assertEqual(subcls.default_algs, ["md5", "sha-1"]) + + # test hash output + h1 = subcls.hash("dummy") + self.assertEqual(handler.extract_digest_algs(h1), ["md5", "sha-1"]) + + def test_94_using_w_algs(self): + """using() -- 'algs' parameter""" + self.test_94_using_w_default_algs(param="algs") + + def test_94_needs_update_algs(self): + """needs_update() -- algs setting""" + handler1 = self.handler.using(algs="sha1,md5") + + # shouldn't need update, has same algs + h1 = handler1.hash("dummy") + self.assertFalse(handler1.needs_update(h1)) + + # *currently* shouldn't need update, has superset of algs required by handler2 + # (may change this policy) + handler2 = handler1.using(algs="sha1") + self.assertFalse(handler2.needs_update(h1)) + + # should need update, doesn't have all algs required by handler3 + handler3 = handler1.using(algs="sha1,sha256") + self.assertTrue(handler3.needs_update(h1)) + + def test_95_context_algs(self): + """test handling of 'algs' in context object""" + handler = self.handler + from passlib.context import CryptContext + c1 = CryptContext(["scram"], scram__algs="sha1,md5") + + h = c1.hash("dummy") + self.assertEqual(handler.extract_digest_algs(h), ["md5", "sha-1"]) + self.assertFalse(c1.needs_update(h)) + + c2 = c1.copy(scram__algs="sha1") + self.assertFalse(c2.needs_update(h)) + + c2 = c1.copy(scram__algs="sha1,sha256") + self.assertTrue(c2.needs_update(h)) + + def test_96_full_verify(self): + """test verify(full=True) flag""" + def vpart(s, h): + return self.handler.verify(s, h) + def vfull(s, h): + return self.handler.verify(s, h, full=True) + + # reference + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertTrue(vfull('pencil', h)) + self.assertFalse(vfull('tape', h)) + + # catch truncated digests. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhV,' # -1 char + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertRaises(ValueError, vfull, 'pencil', h) + + # catch padded digests. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVYa,' # +1 char + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertRaises(ValueError, vfull, 'pencil', h) + + # catch hash containing digests belonging to diff passwords. + # proper behavior for quick-verify (the default) is undefined, + # but full-verify should throw error. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' # 'pencil' + 'sha-256=R7RJDWIbeKRTFwhE9oxh04kab0CllrQ3kCcpZUcligc,' # 'tape' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' # 'pencil' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertTrue(vpart('tape', h)) + self.assertFalse(vpart('pencil', h)) + self.assertRaises(ValueError, vfull, 'pencil', h) + self.assertRaises(ValueError, vfull, 'tape', h) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_scrypt.py b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_scrypt.py new file mode 100644 index 0000000..5ab6d9f --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_handlers_scrypt.py @@ -0,0 +1,111 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +import warnings +warnings.filterwarnings("ignore", ".*using builtin scrypt backend.*") +# site +# pkg +from passlib import hash +from passlib.tests.utils import HandlerCase, TEST_MODE +from passlib.tests.test_handlers import UPASS_TABLE, PASS_TABLE_UTF8 +# module + +#============================================================================= +# scrypt hash +#============================================================================= +class _scrypt_test(HandlerCase): + handler = hash.scrypt + + known_correct_hashes = [ + # + # excepted from test vectors from scrypt whitepaper + # (http://www.tarsnap.com/scrypt/scrypt.pdf, appendix b), + # and encoded using passlib's custom format + # + + # salt=b"" + ("", "$scrypt$ln=4,r=1,p=1$$d9ZXYjhleyA7GcpCwYoEl/FrSETjB0ro39/6P+3iFEI"), + + # salt=b"NaCl" + ("password", "$scrypt$ln=10,r=8,p=16$TmFDbA$/bq+HJ00cgB4VucZDQHp/nxq18vII3gw53N2Y0s3MWI"), + + # + # custom + # + + # simple test + ("test", '$scrypt$ln=8,r=8,p=1$wlhLyXmP8b53bm1NKYVQqg$mTpvG8lzuuDk+DWz8HZIB6Vum6erDuUm0As5yU+VxWA'), + + # different block value + ("password", '$scrypt$ln=8,r=2,p=1$dO6d0xoDoLT2PofQGoNQag$g/Wf2A0vhHhaJM+addK61QPBthSmYB6uVTtQzh8CM3o'), + + # different rounds + (UPASS_TABLE, '$scrypt$ln=7,r=8,p=1$jjGmtDamdA4BQAjBeA9BSA$OiWRHhQtpDx7M/793x6UXK14AD512jg/qNm/hkWZG4M'), + + # alt encoding + (PASS_TABLE_UTF8, '$scrypt$ln=7,r=8,p=1$jjGmtDamdA4BQAjBeA9BSA$OiWRHhQtpDx7M/793x6UXK14AD512jg/qNm/hkWZG4M'), + + # diff block & parallel counts as well + ("nacl", '$scrypt$ln=1,r=4,p=2$yhnD+J+Tci4lZCwFgHCuVQ$fAsEWmxSHuC0cHKMwKVFPzrQukgvK09Sj+NueTSxKds') + ] + + if TEST_MODE("full"): + # add some hashes with larger rounds value. + known_correct_hashes.extend([ + # + # from scrypt whitepaper + # + + # salt=b"SodiumChloride" + ("pleaseletmein", "$scrypt$ln=14,r=8,p=1$U29kaXVtQ2hsb3JpZGU" + "$cCO9yzr9c0hGHAbNgf046/2o+7qQT44+qbVD9lRdofI"), + + # + # openwall format (https://gitlab.com/jas/scrypt-unix-crypt/blob/master/unix-scrypt.txt) + # + ("pleaseletmein", + "$7$C6..../....SodiumChloride$kBGj9fHznVYFQMEn/qDCfrDevf9YDtcDdKvEqHJLV8D"), + + ]) + + known_malformed_hashes = [ + # missing 'p' value + '$scrypt$ln=10,r=1$wvif8/4fg1Cq9V7L2dv73w$bJcLia1lyfQ1X2x0xflehwVXPzWIUQWWdnlGwfVzBeQ', + + # rounds too low + '$scrypt$ln=0,r=1,p=1$wvif8/4fg1Cq9V7L2dv73w$bJcLia1lyfQ1X2x0xflehwVXPzWIUQWWdnlGwfVzBeQ', + + # invalid block size + '$scrypt$ln=10,r=A,p=1$wvif8/4fg1Cq9V7L2dv73w$bJcLia1lyfQ1X2x0xflehwVXPzWIUQWWdnlGwfVzBeQ', + + # r*p too large + '$scrypt$ln=10,r=134217728,p=8$wvif8/4fg1Cq9V7L2dv73w$bJcLia1lyfQ1X2x0xflehwVXPzWIUQWWdnlGwfVzBeQ', + ] + + def setUpWarnings(self): + super(_scrypt_test, self).setUpWarnings() + warnings.filterwarnings("ignore", ".*using builtin scrypt backend.*") + + def populate_settings(self, kwds): + # builtin is still just way too slow. + if self.backend == "builtin": + kwds.setdefault("rounds", 6) + super(_scrypt_test, self).populate_settings(kwds) + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(4, 10, 6, 1) + +# create test cases for specific backends +scrypt_stdlib_test = _scrypt_test.create_backend_case("stdlib") +scrypt_scrypt_test = _scrypt_test.create_backend_case("scrypt") +scrypt_builtin_test = _scrypt_test.create_backend_case("builtin") + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_hosts.py b/venv/lib/python3.12/site-packages/passlib/tests/test_hosts.py new file mode 100644 index 0000000..cbf93ab --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_hosts.py @@ -0,0 +1,97 @@ +"""test passlib.hosts""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib import hosts, hash as hashmod +from passlib.utils import unix_crypt_schemes +from passlib.tests.utils import TestCase +# module + +#============================================================================= +# test predefined app contexts +#============================================================================= +class HostsTest(TestCase): + """perform general tests to make sure contexts work""" + # NOTE: these tests are not really comprehensive, + # since they would do little but duplicate + # the presets in apps.py + # + # they mainly try to ensure no typos + # or dynamic behavior foul-ups. + + def check_unix_disabled(self, ctx): + for hash in [ + "", + "!", + "*", + "!$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0", + ]: + self.assertEqual(ctx.identify(hash), 'unix_disabled') + self.assertFalse(ctx.verify('test', hash)) + + def test_linux_context(self): + ctx = hosts.linux_context + for hash in [ + ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751'), + ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17'), + '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0', + 'kAJJz.Rwp0A/I', + ]: + self.assertTrue(ctx.verify("test", hash)) + self.check_unix_disabled(ctx) + + def test_bsd_contexts(self): + for ctx in [ + hosts.freebsd_context, + hosts.openbsd_context, + hosts.netbsd_context, + ]: + for hash in [ + '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0', + 'kAJJz.Rwp0A/I', + ]: + self.assertTrue(ctx.verify("test", hash)) + h1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + if hashmod.bcrypt.has_backend(): + self.assertTrue(ctx.verify("test", h1)) + else: + self.assertEqual(ctx.identify(h1), "bcrypt") + self.check_unix_disabled(ctx) + + def test_host_context(self): + ctx = getattr(hosts, "host_context", None) + if not ctx: + return self.skipTest("host_context not available on this platform") + + # validate schemes is non-empty, + # and contains unix_disabled + at least one real scheme + schemes = list(ctx.schemes()) + self.assertTrue(schemes, "appears to be unix system, but no known schemes supported by crypt") + self.assertTrue('unix_disabled' in schemes) + schemes.remove("unix_disabled") + self.assertTrue(schemes, "should have schemes beside fallback scheme") + self.assertTrue(set(unix_crypt_schemes).issuperset(schemes)) + + # check for hash support + self.check_unix_disabled(ctx) + for scheme, hash in [ + ("sha512_crypt", ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751')), + ("sha256_crypt", ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17')), + ("md5_crypt", '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0'), + ("des_crypt", 'kAJJz.Rwp0A/I'), + ]: + if scheme in schemes: + self.assertTrue(ctx.verify("test", hash)) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_pwd.py b/venv/lib/python3.12/site-packages/passlib/tests/test_pwd.py new file mode 100644 index 0000000..2c983cd --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_pwd.py @@ -0,0 +1,205 @@ +"""passlib.tests -- tests for passlib.pwd""" +#============================================================================= +# imports +#============================================================================= +# core +import itertools +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.tests.utils import TestCase +# local +__all__ = [ + "UtilsTest", + "GenerateTest", + "StrengthTest", +] + +#============================================================================= +# +#============================================================================= +class UtilsTest(TestCase): + """test internal utilities""" + descriptionPrefix = "passlib.pwd" + + def test_self_info_rate(self): + """_self_info_rate()""" + from passlib.pwd import _self_info_rate + + self.assertEqual(_self_info_rate(""), 0) + + self.assertEqual(_self_info_rate("a" * 8), 0) + + self.assertEqual(_self_info_rate("ab"), 1) + self.assertEqual(_self_info_rate("ab" * 8), 1) + + self.assertEqual(_self_info_rate("abcd"), 2) + self.assertEqual(_self_info_rate("abcd" * 8), 2) + self.assertAlmostEqual(_self_info_rate("abcdaaaa"), 1.5488, places=4) + + # def test_total_self_info(self): + # """_total_self_info()""" + # from passlib.pwd import _total_self_info + # + # self.assertEqual(_total_self_info(""), 0) + # + # self.assertEqual(_total_self_info("a" * 8), 0) + # + # self.assertEqual(_total_self_info("ab"), 2) + # self.assertEqual(_total_self_info("ab" * 8), 16) + # + # self.assertEqual(_total_self_info("abcd"), 8) + # self.assertEqual(_total_self_info("abcd" * 8), 64) + # self.assertAlmostEqual(_total_self_info("abcdaaaa"), 12.3904, places=4) + +#============================================================================= +# word generation +#============================================================================= + +# import subject +from passlib.pwd import genword, default_charsets +ascii_62 = default_charsets['ascii_62'] +hex = default_charsets['hex'] + +class WordGeneratorTest(TestCase): + """test generation routines""" + descriptionPrefix = "passlib.pwd.genword()" + + def setUp(self): + super(WordGeneratorTest, self).setUp() + + # patch some RNG references so they're reproducible. + from passlib.pwd import SequenceGenerator + self.patchAttr(SequenceGenerator, "rng", + self.getRandom("pwd generator")) + + def assertResultContents(self, results, count, chars, unique=True): + """check result list matches expected count & charset""" + self.assertEqual(len(results), count) + if unique: + if unique is True: + unique = count + self.assertEqual(len(set(results)), unique) + self.assertEqual(set("".join(results)), set(chars)) + + def test_general(self): + """general behavior""" + + # basic usage + result = genword() + self.assertEqual(len(result), 9) + + # malformed keyword should have useful error. + self.assertRaisesRegex(TypeError, "(?i)unexpected keyword.*badkwd", genword, badkwd=True) + + def test_returns(self): + """'returns' keyword""" + # returns=int option + results = genword(returns=5000) + self.assertResultContents(results, 5000, ascii_62) + + # returns=iter option + gen = genword(returns=iter) + results = [next(gen) for _ in range(5000)] + self.assertResultContents(results, 5000, ascii_62) + + # invalid returns option + self.assertRaises(TypeError, genword, returns='invalid-type') + + def test_charset(self): + """'charset' & 'chars' options""" + # charset option + results = genword(charset="hex", returns=5000) + self.assertResultContents(results, 5000, hex) + + # chars option + # there are 3**3=27 possible combinations + results = genword(length=3, chars="abc", returns=5000) + self.assertResultContents(results, 5000, "abc", unique=27) + + # chars + charset + self.assertRaises(TypeError, genword, chars='abc', charset='hex') + + # TODO: test rng option + +#============================================================================= +# phrase generation +#============================================================================= + +# import subject +from passlib.pwd import genphrase +simple_words = ["alpha", "beta", "gamma"] + +class PhraseGeneratorTest(TestCase): + """test generation routines""" + descriptionPrefix = "passlib.pwd.genphrase()" + + def assertResultContents(self, results, count, words, unique=True, sep=" "): + """check result list matches expected count & charset""" + self.assertEqual(len(results), count) + if unique: + if unique is True: + unique = count + self.assertEqual(len(set(results)), unique) + out = set(itertools.chain.from_iterable(elem.split(sep) for elem in results)) + self.assertEqual(out, set(words)) + + def test_general(self): + """general behavior""" + + # basic usage + result = genphrase() + self.assertEqual(len(result.split(" ")), 4) # 48 / log(7776, 2) ~= 3.7 -> 4 + + # malformed keyword should have useful error. + self.assertRaisesRegex(TypeError, "(?i)unexpected keyword.*badkwd", genphrase, badkwd=True) + + def test_entropy(self): + """'length' & 'entropy' keywords""" + + # custom entropy + result = genphrase(entropy=70) + self.assertEqual(len(result.split(" ")), 6) # 70 / log(7776, 2) ~= 5.4 -> 6 + + # custom length + result = genphrase(length=3) + self.assertEqual(len(result.split(" ")), 3) + + # custom length < entropy + result = genphrase(length=3, entropy=48) + self.assertEqual(len(result.split(" ")), 4) + + # custom length > entropy + result = genphrase(length=4, entropy=12) + self.assertEqual(len(result.split(" ")), 4) + + def test_returns(self): + """'returns' keyword""" + # returns=int option + results = genphrase(returns=1000, words=simple_words) + self.assertResultContents(results, 1000, simple_words) + + # returns=iter option + gen = genphrase(returns=iter, words=simple_words) + results = [next(gen) for _ in range(1000)] + self.assertResultContents(results, 1000, simple_words) + + # invalid returns option + self.assertRaises(TypeError, genphrase, returns='invalid-type') + + def test_wordset(self): + """'wordset' & 'words' options""" + # wordset option + results = genphrase(words=simple_words, returns=5000) + self.assertResultContents(results, 5000, simple_words) + + # words option + results = genphrase(length=3, words=simple_words, returns=5000) + self.assertResultContents(results, 5000, simple_words, unique=3**3) + + # words + wordset + self.assertRaises(TypeError, genphrase, words=simple_words, wordset='bip39') + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_registry.py b/venv/lib/python3.12/site-packages/passlib/tests/test_registry.py new file mode 100644 index 0000000..8cec48d --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_registry.py @@ -0,0 +1,228 @@ +"""tests for passlib.hash -- (c) Assurance Technologies 2003-2009""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from logging import getLogger +import warnings +import sys +# site +# pkg +from passlib import hash, registry, exc +from passlib.registry import register_crypt_handler, register_crypt_handler_path, \ + get_crypt_handler, list_crypt_handlers, _unload_handler_name as unload_handler_name +import passlib.utils.handlers as uh +from passlib.tests.utils import TestCase +# module +log = getLogger(__name__) + +#============================================================================= +# dummy handlers +# +# NOTE: these are defined outside of test case +# since they're used by test_register_crypt_handler_path(), +# which needs them to be available as module globals. +#============================================================================= +class dummy_0(uh.StaticHandler): + name = "dummy_0" + +class alt_dummy_0(uh.StaticHandler): + name = "dummy_0" + +dummy_x = 1 + +#============================================================================= +# test registry +#============================================================================= +class RegistryTest(TestCase): + + descriptionPrefix = "passlib.registry" + + def setUp(self): + super(RegistryTest, self).setUp() + + # backup registry state & restore it after test. + locations = dict(registry._locations) + handlers = dict(registry._handlers) + def restore(): + registry._locations.clear() + registry._locations.update(locations) + registry._handlers.clear() + registry._handlers.update(handlers) + self.addCleanup(restore) + + def test_hash_proxy(self): + """test passlib.hash proxy object""" + # check dir works + dir(hash) + + # check repr works + repr(hash) + + # check non-existent attrs raise error + self.assertRaises(AttributeError, getattr, hash, 'fooey') + + # GAE tries to set __loader__, + # make sure that doesn't call register_crypt_handler. + old = getattr(hash, "__loader__", None) + test = object() + hash.__loader__ = test + self.assertIs(hash.__loader__, test) + if old is None: + del hash.__loader__ + self.assertFalse(hasattr(hash, "__loader__")) + else: + hash.__loader__ = old + self.assertIs(hash.__loader__, old) + + # check storing attr calls register_crypt_handler + class dummy_1(uh.StaticHandler): + name = "dummy_1" + hash.dummy_1 = dummy_1 + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + # check storing under wrong name results in error + self.assertRaises(ValueError, setattr, hash, "dummy_1x", dummy_1) + + def test_register_crypt_handler_path(self): + """test register_crypt_handler_path()""" + # NOTE: this messes w/ internals of registry, shouldn't be used publically. + paths = registry._locations + + # check namespace is clear + self.assertTrue('dummy_0' not in paths) + self.assertFalse(hasattr(hash, 'dummy_0')) + + # check invalid names are rejected + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", ".test_registry") + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", __name__ + ":dummy_0:xxx") + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", __name__ + ":dummy_0.xxx") + + # try lazy load + register_crypt_handler_path('dummy_0', __name__) + self.assertTrue('dummy_0' in list_crypt_handlers()) + self.assertTrue('dummy_0' not in list_crypt_handlers(loaded_only=True)) + self.assertIs(hash.dummy_0, dummy_0) + self.assertTrue('dummy_0' in list_crypt_handlers(loaded_only=True)) + unload_handler_name('dummy_0') + + # try lazy load w/ alt + register_crypt_handler_path('dummy_0', __name__ + ':alt_dummy_0') + self.assertIs(hash.dummy_0, alt_dummy_0) + unload_handler_name('dummy_0') + + # check lazy load w/ wrong type fails + register_crypt_handler_path('dummy_x', __name__) + self.assertRaises(TypeError, get_crypt_handler, 'dummy_x') + + # check lazy load w/ wrong name fails + register_crypt_handler_path('alt_dummy_0', __name__) + self.assertRaises(ValueError, get_crypt_handler, "alt_dummy_0") + unload_handler_name("alt_dummy_0") + + # TODO: check lazy load which calls register_crypt_handler (warning should be issued) + sys.modules.pop("passlib.tests._test_bad_register", None) + register_crypt_handler_path("dummy_bad", "passlib.tests._test_bad_register") + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", "xxxxxxxxxx", DeprecationWarning) + h = get_crypt_handler("dummy_bad") + from passlib.tests import _test_bad_register as tbr + self.assertIs(h, tbr.alt_dummy_bad) + + def test_register_crypt_handler(self): + """test register_crypt_handler()""" + + self.assertRaises(TypeError, register_crypt_handler, {}) + + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name=None))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="AB_CD"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="ab-cd"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="ab__cd"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="default"))) + + class dummy_1(uh.StaticHandler): + name = "dummy_1" + + class dummy_1b(uh.StaticHandler): + name = "dummy_1" + + self.assertTrue('dummy_1' not in list_crypt_handlers()) + + register_crypt_handler(dummy_1) + register_crypt_handler(dummy_1) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + self.assertRaises(KeyError, register_crypt_handler, dummy_1b) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + register_crypt_handler(dummy_1b, force=True) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1b) + + self.assertTrue('dummy_1' in list_crypt_handlers()) + + def test_get_crypt_handler(self): + """test get_crypt_handler()""" + + class dummy_1(uh.StaticHandler): + name = "dummy_1" + + # without available handler + self.assertRaises(KeyError, get_crypt_handler, "dummy_1") + self.assertIs(get_crypt_handler("dummy_1", None), None) + + # already loaded handler + register_crypt_handler(dummy_1) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", "handler names should be lower-case, and use underscores instead of hyphens:.*", UserWarning) + + # already loaded handler, using incorrect name + self.assertIs(get_crypt_handler("DUMMY-1"), dummy_1) + + # lazy load of unloaded handler, using incorrect name + register_crypt_handler_path('dummy_0', __name__) + self.assertIs(get_crypt_handler("DUMMY-0"), dummy_0) + + # check system & private names aren't returned + from passlib import hash + hash.__dict__["_fake"] = "dummy" + for name in ["_fake", "__package__"]: + self.assertRaises(KeyError, get_crypt_handler, name) + self.assertIs(get_crypt_handler(name, None), None) + + def test_list_crypt_handlers(self): + """test list_crypt_handlers()""" + from passlib.registry import list_crypt_handlers + + # check system & private names aren't returned + hash.__dict__["_fake"] = "dummy" + for name in list_crypt_handlers(): + self.assertFalse(name.startswith("_"), "%r: " % name) + unload_handler_name("_fake") + + def test_handlers(self): + """verify we have tests for all builtin handlers""" + from passlib.registry import list_crypt_handlers + from passlib.tests.test_handlers import get_handler_case, conditionally_available_hashes + for name in list_crypt_handlers(): + # skip some wrappers that don't need independant testing + if name.startswith("ldap_") and name[5:] in list_crypt_handlers(): + continue + if name in ["roundup_plaintext"]: + continue + # check the remaining ones all have a handler + try: + self.assertTrue(get_handler_case(name)) + except exc.MissingBackendError: + if name in conditionally_available_hashes: # expected to fail on some setups + continue + raise + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_totp.py b/venv/lib/python3.12/site-packages/passlib/tests/test_totp.py new file mode 100644 index 0000000..604d2e9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_totp.py @@ -0,0 +1,1604 @@ +"""passlib.tests -- test passlib.totp""" +#============================================================================= +# imports +#============================================================================= +# core +import datetime +from functools import partial +import logging; log = logging.getLogger(__name__) +import sys +import time as _time +# site +# pkg +from passlib import exc +from passlib.utils.compat import unicode, u +from passlib.tests.utils import TestCase, time_call +# subject +from passlib import totp as totp_module +from passlib.totp import TOTP, AppWallet, AES_SUPPORT +# local +__all__ = [ + "EngineTest", +] + +#============================================================================= +# helpers +#============================================================================= + +# XXX: python 3 changed what error base64.b16decode() throws, from TypeError to base64.Error(). +# it wasn't until 3.3 that base32decode() also got changed. +# really should normalize this in the code to a single BinaryDecodeError, +# predicting this cross-version is getting unmanagable. +Base32DecodeError = Base16DecodeError = TypeError +if sys.version_info >= (3,0): + from binascii import Error as Base16DecodeError +if sys.version_info >= (3,3): + from binascii import Error as Base32DecodeError + +PASS1 = "abcdef" +PASS2 = b"\x00\xFF" +KEY1 = '4AOGGDBBQSYHNTUZ' +KEY1_RAW = b'\xe0\x1cc\x0c!\x84\xb0v\xce\x99' +KEY2_RAW = b'\xee]\xcb9\x870\x06 D\xc8y/\xa54&\xe4\x9c\x13\xc2\x18' +KEY3 = 'S3JDVB7QD2R7JPXX' # used in docstrings +KEY4 = 'JBSWY3DPEHPK3PXP' # from google keyuri spec +KEY4_RAW = b'Hello!\xde\xad\xbe\xef' + +# NOTE: for randtime() below, +# * want at least 7 bits on fractional side, to test fractional times to at least 0.01s precision +# * want at least 32 bits on integer side, to test for 32-bit epoch issues. +# most systems *should* have 53 bit mantissa, leaving plenty of room on both ends, +# so using (1<<37) as scale, to allocate 16 bits on fractional side, but generate reasonable # of > 1<<32 times. +# sanity check that we're above 44 ensures minimum requirements (44 - 37 int = 7 frac) +assert sys.float_info.radix == 2, "unexpected float_info.radix" +assert sys.float_info.mant_dig >= 44, "double precision unexpectedly small" + +def _get_max_time_t(): + """ + helper to calc max_time_t constant (see below) + """ + value = 1 << 30 # even for 32 bit systems will handle this + year = 0 + while True: + next_value = value << 1 + try: + next_year = datetime.datetime.utcfromtimestamp(next_value-1).year + except (ValueError, OSError, OverflowError): + # utcfromtimestamp() may throw any of the following: + # + # * year out of range for datetime: + # py < 3.6 throws ValueError. + # (py 3.6.0 returns odd value instead, see workaround below) + # + # * int out of range for host's gmtime/localtime: + # py2 throws ValueError, py3 throws OSError. + # + # * int out of range for host's time_t: + # py2 throws ValueError, py3 throws OverflowError. + # + break + + # Workaround for python 3.6.0 issue -- + # Instead of throwing ValueError if year out of range for datetime, + # Python 3.6 will do some weird behavior that masks high bits + # e.g. (1<<40) -> year 36812, but (1<<41) -> year 6118. + # (Appears to be bug http://bugs.python.org/issue29100) + # This check stops at largest non-wrapping bit size. + if next_year < year: + break + + value = next_value + + # 'value-1' is maximum. + value -= 1 + + # check for crazy case where we're beyond what datetime supports + # (caused by bug 29100 again). compare to max value that datetime + # module supports -- datetime.datetime(9999, 12, 31, 23, 59, 59, 999999) + max_datetime_timestamp = 253402318800 + return min(value, max_datetime_timestamp) + +#: Rough approximation of max value acceptable by hosts's time_t. +#: This is frequently ~2**37 on 64 bit, and ~2**31 on 32 bit systems. +max_time_t = _get_max_time_t() + +def to_b32_size(raw_size): + return (raw_size * 8 + 4) // 5 + +#============================================================================= +# wallet +#============================================================================= +class AppWalletTest(TestCase): + descriptionPrefix = "passlib.totp.AppWallet" + + #============================================================================= + # constructor + #============================================================================= + + def test_secrets_types(self): + """constructor -- 'secrets' param -- input types""" + + # no secrets + wallet = AppWallet() + self.assertEqual(wallet._secrets, {}) + self.assertFalse(wallet.has_secrets) + + # dict + ref = {"1": b"aaa", "2": b"bbb"} + wallet = AppWallet(ref) + self.assertEqual(wallet._secrets, ref) + self.assertTrue(wallet.has_secrets) + + # # list + # wallet = AppWallet(list(ref.items())) + # self.assertEqual(wallet._secrets, ref) + + # # iter + # wallet = AppWallet(iter(ref.items())) + # self.assertEqual(wallet._secrets, ref) + + # "tag:value" string + wallet = AppWallet("\n 1: aaa\n# comment\n \n2: bbb ") + self.assertEqual(wallet._secrets, ref) + + # ensure ":" allowed in secret + wallet = AppWallet("1: aaa: bbb \n# comment\n \n2: bbb ") + self.assertEqual(wallet._secrets, {"1": b"aaa: bbb", "2": b"bbb"}) + + # json dict + wallet = AppWallet('{"1":"aaa","2":"bbb"}') + self.assertEqual(wallet._secrets, ref) + + # # json list + # wallet = AppWallet('[["1","aaa"],["2","bbb"]]') + # self.assertEqual(wallet._secrets, ref) + + # invalid type + self.assertRaises(TypeError, AppWallet, 123) + + # invalid json obj + self.assertRaises(TypeError, AppWallet, "[123]") + + # # invalid list items + # self.assertRaises(ValueError, AppWallet, ["1", b"aaa"]) + + # forbid empty secret + self.assertRaises(ValueError, AppWallet, {"1": "aaa", "2": ""}) + + def test_secrets_tags(self): + """constructor -- 'secrets' param -- tag/value normalization""" + + # test reference + ref = {"1": b"aaa", "02": b"bbb", "C": b"ccc"} + wallet = AppWallet(ref) + self.assertEqual(wallet._secrets, ref) + + # accept unicode + wallet = AppWallet({u("1"): b"aaa", u("02"): b"bbb", u("C"): b"ccc"}) + self.assertEqual(wallet._secrets, ref) + + # normalize int tags + wallet = AppWallet({1: b"aaa", "02": b"bbb", "C": b"ccc"}) + self.assertEqual(wallet._secrets, ref) + + # forbid non-str/int tags + self.assertRaises(TypeError, AppWallet, {(1,): "aaa"}) + + # accept valid tags + wallet = AppWallet({"1-2_3.4": b"aaa"}) + + # forbid invalid tags + self.assertRaises(ValueError, AppWallet, {"-abc": "aaa"}) + self.assertRaises(ValueError, AppWallet, {"ab*$": "aaa"}) + + # coerce value to bytes + wallet = AppWallet({"1": u("aaa"), "02": "bbb", "C": b"ccc"}) + self.assertEqual(wallet._secrets, ref) + + # forbid invalid value types + self.assertRaises(TypeError, AppWallet, {"1": 123}) + self.assertRaises(TypeError, AppWallet, {"1": None}) + self.assertRaises(TypeError, AppWallet, {"1": []}) + + # TODO: test secrets_path + + def test_default_tag(self): + """constructor -- 'default_tag' param""" + + # should sort numerically + wallet = AppWallet({"1": "one", "02": "two"}) + self.assertEqual(wallet.default_tag, "02") + self.assertEqual(wallet.get_secret(wallet.default_tag), b"two") + + # should sort alphabetically if non-digit present + wallet = AppWallet({"1": "one", "02": "two", "A": "aaa"}) + self.assertEqual(wallet.default_tag, "A") + self.assertEqual(wallet.get_secret(wallet.default_tag), b"aaa") + + # should use honor custom tag + wallet = AppWallet({"1": "one", "02": "two", "A": "aaa"}, default_tag="1") + self.assertEqual(wallet.default_tag, "1") + self.assertEqual(wallet.get_secret(wallet.default_tag), b"one") + + # throw error on unknown value + self.assertRaises(KeyError, AppWallet, {"1": "one", "02": "two", "A": "aaa"}, + default_tag="B") + + # should be empty + wallet = AppWallet() + self.assertEqual(wallet.default_tag, None) + self.assertRaises(KeyError, wallet.get_secret, None) + + # TODO: test 'cost' param + + #============================================================================= + # encrypt_key() & decrypt_key() helpers + #============================================================================= + def require_aes_support(self, canary=None): + if AES_SUPPORT: + canary and canary() + else: + canary and self.assertRaises(RuntimeError, canary) + raise self.skipTest("'cryptography' package not installed") + + def test_decrypt_key(self): + """.decrypt_key()""" + + wallet = AppWallet({"1": PASS1, "2": PASS2}) + + # check for support + CIPHER1 = dict(v=1, c=13, s='6D7N7W53O7HHS37NLUFQ', + k='MHCTEGSNPFN5CGBJ', t='1') + self.require_aes_support(canary=partial(wallet.decrypt_key, CIPHER1)) + + # reference key + self.assertEqual(wallet.decrypt_key(CIPHER1)[0], KEY1_RAW) + + # different salt used to encrypt same raw key + CIPHER2 = dict(v=1, c=13, s='SPZJ54Y6IPUD2BYA4C6A', + k='ZGDXXTVQOWYLC2AU', t='1') + self.assertEqual(wallet.decrypt_key(CIPHER2)[0], KEY1_RAW) + + # different sized key, password, and cost + CIPHER3 = dict(v=1, c=8, s='FCCTARTIJWE7CPQHUDKA', + k='D2DRS32YESGHHINWFFCELKN7Z6NAHM4M', t='2') + self.assertEqual(wallet.decrypt_key(CIPHER3)[0], KEY2_RAW) + + # wrong password should silently result in wrong key + temp = CIPHER1.copy() + temp.update(t='2') + self.assertEqual(wallet.decrypt_key(temp)[0], b'\xafD6.F7\xeb\x19\x05Q') + + # missing tag should throw error + temp = CIPHER1.copy() + temp.update(t='3') + self.assertRaises(KeyError, wallet.decrypt_key, temp) + + # unknown version should throw error + temp = CIPHER1.copy() + temp.update(v=999) + self.assertRaises(ValueError, wallet.decrypt_key, temp) + + def test_decrypt_key_needs_recrypt(self): + """.decrypt_key() -- needs_recrypt flag""" + self.require_aes_support() + + wallet = AppWallet({"1": PASS1, "2": PASS2}, encrypt_cost=13) + + # ref should be accepted + ref = dict(v=1, c=13, s='AAAA', k='AAAA', t='2') + self.assertFalse(wallet.decrypt_key(ref)[1]) + + # wrong cost + temp = ref.copy() + temp.update(c=8) + self.assertTrue(wallet.decrypt_key(temp)[1]) + + # wrong tag + temp = ref.copy() + temp.update(t="1") + self.assertTrue(wallet.decrypt_key(temp)[1]) + + # XXX: should this check salt_size? + + def assertSaneResult(self, result, wallet, key, tag="1", + needs_recrypt=False): + """check encrypt_key() result has expected format""" + + self.assertEqual(set(result), set(["v", "t", "c", "s", "k"])) + + self.assertEqual(result['v'], 1) + self.assertEqual(result['t'], tag) + self.assertEqual(result['c'], wallet.encrypt_cost) + + self.assertEqual(len(result['s']), to_b32_size(wallet.salt_size)) + self.assertEqual(len(result['k']), to_b32_size(len(key))) + + result_key, result_needs_recrypt = wallet.decrypt_key(result) + self.assertEqual(result_key, key) + self.assertEqual(result_needs_recrypt, needs_recrypt) + + def test_encrypt_key(self): + """.encrypt_key()""" + + # check for support + wallet = AppWallet({"1": PASS1}, encrypt_cost=5) + self.require_aes_support(canary=partial(wallet.encrypt_key, KEY1_RAW)) + + # basic behavior + result = wallet.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet, KEY1_RAW) + + # creates new salt each time + other = wallet.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet, KEY1_RAW) + self.assertNotEqual(other['s'], result['s']) + self.assertNotEqual(other['k'], result['k']) + + # honors custom cost + wallet2 = AppWallet({"1": PASS1}, encrypt_cost=6) + result = wallet2.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet2, KEY1_RAW) + + # honors default tag + wallet2 = AppWallet({"1": PASS1, "2": PASS2}) + result = wallet2.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet2, KEY1_RAW, tag="2") + + # honor salt size + wallet2 = AppWallet({"1": PASS1}) + wallet2.salt_size = 64 + result = wallet2.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet2, KEY1_RAW) + + # larger key + result = wallet.encrypt_key(KEY2_RAW) + self.assertSaneResult(result, wallet, KEY2_RAW) + + # border case: empty key + # XXX: might want to allow this, but documenting behavior for now + self.assertRaises(ValueError, wallet.encrypt_key, b"") + + def test_encrypt_cost_timing(self): + """verify cost parameter via timing""" + self.require_aes_support() + + # time default cost + wallet = AppWallet({"1": "aaa"}) + wallet.encrypt_cost -= 2 + delta, _ = time_call(partial(wallet.encrypt_key, KEY1_RAW), maxtime=0) + + # this should take (2**3=8) times as long + wallet.encrypt_cost += 3 + delta2, _ = time_call(partial(wallet.encrypt_key, KEY1_RAW), maxtime=0) + + # TODO: rework timing test here to inject mock pbkdf2_hmac() function instead; + # and test that it's being invoked w/ proper options. + self.assertAlmostEqual(delta2, delta*8, delta=(delta*8)*0.5) + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# common OTP code +#============================================================================= + +#: used as base value for RFC test vector keys +RFC_KEY_BYTES_20 = "12345678901234567890".encode("ascii") +RFC_KEY_BYTES_32 = (RFC_KEY_BYTES_20*2)[:32] +RFC_KEY_BYTES_64 = (RFC_KEY_BYTES_20*4)[:64] + +# TODO: this class is separate from TotpTest due to historical issue, +# when there was a base class, and a separate HOTP class. +# these test case classes should probably be combined. +class TotpTest(TestCase): + """ + common code shared by TotpTest & HotpTest + """ + #============================================================================= + # class attrs + #============================================================================= + + descriptionPrefix = "passlib.totp.TOTP" + + #============================================================================= + # setup + #============================================================================= + def setUp(self): + super(TotpTest, self).setUp() + + # clear norm_hash_name() cache so 'unknown hash' warnings get emitted each time + from passlib.crypto.digest import lookup_hash + lookup_hash.clear_cache() + + # monkeypatch module's rng to be deterministic + self.patchAttr(totp_module, "rng", self.getRandom()) + + #============================================================================= + # general helpers + #============================================================================= + def randtime(self): + """ + helper to generate random epoch time + :returns float: epoch time + """ + return self.getRandom().random() * max_time_t + + def randotp(self, cls=None, **kwds): + """ + helper which generates a random TOTP instance. + """ + rng = self.getRandom() + if "key" not in kwds: + kwds['new'] = True + kwds.setdefault("digits", rng.randint(6, 10)) + kwds.setdefault("alg", rng.choice(["sha1", "sha256", "sha512"])) + kwds.setdefault("period", rng.randint(10, 120)) + return (cls or TOTP)(**kwds) + + def test_randotp(self): + """ + internal test -- randotp() + """ + otp1 = self.randotp() + otp2 = self.randotp() + + self.assertNotEqual(otp1.key, otp2.key, "key not randomized:") + + # NOTE: has (1/5)**10 odds of failure + for _ in range(10): + if otp1.digits != otp2.digits: + break + otp2 = self.randotp() + else: + self.fail("digits not randomized") + + # NOTE: has (1/3)**10 odds of failure + for _ in range(10): + if otp1.alg != otp2.alg: + break + otp2 = self.randotp() + else: + self.fail("alg not randomized") + + #============================================================================= + # reference vector helpers + #============================================================================= + + #: default options used by test vectors (unless otherwise stated) + vector_defaults = dict(format="base32", alg="sha1", period=30, digits=8) + + #: various TOTP test vectors, + #: each element in list has format [options, (time, token <, int(expires)>), ...] + vectors = [ + + #------------------------------------------------------------------------- + # passlib test vectors + #------------------------------------------------------------------------- + + # 10 byte key, 6 digits + [dict(key="ACDEFGHJKL234567", digits=6), + # test fencepost to make sure we're rounding right + (1412873399, '221105'), # == 29 mod 30 + (1412873400, '178491'), # == 0 mod 30 + (1412873401, '178491'), # == 1 mod 30 + (1412873429, '178491'), # == 29 mod 30 + (1412873430, '915114'), # == 0 mod 30 + ], + + # 10 byte key, 8 digits + [dict(key="ACDEFGHJKL234567", digits=8), + # should be same as 6 digits (above), but w/ 2 more digits on left side of token. + (1412873399, '20221105'), # == 29 mod 30 + (1412873400, '86178491'), # == 0 mod 30 + (1412873401, '86178491'), # == 1 mod 30 + (1412873429, '86178491'), # == 29 mod 30 + (1412873430, '03915114'), # == 0 mod 30 + ], + + # sanity check on key used in docstrings + [dict(key="S3JD-VB7Q-D2R7-JPXX", digits=6), + (1419622709, '000492'), + (1419622739, '897212'), + ], + + #------------------------------------------------------------------------- + # reference vectors taken from http://tools.ietf.org/html/rfc6238, appendix B + # NOTE: while appendix B states same key used for all tests, the reference + # code in the appendix repeats the key up to the alg's block size, + # and uses *that* as the secret... so that's what we're doing here. + #------------------------------------------------------------------------- + + # sha1 test vectors + [dict(key=RFC_KEY_BYTES_20, format="raw", alg="sha1"), + (59, '94287082'), + (1111111109, '07081804'), + (1111111111, '14050471'), + (1234567890, '89005924'), + (2000000000, '69279037'), + (20000000000, '65353130'), + ], + + # sha256 test vectors + [dict(key=RFC_KEY_BYTES_32, format="raw", alg="sha256"), + (59, '46119246'), + (1111111109, '68084774'), + (1111111111, '67062674'), + (1234567890, '91819424'), + (2000000000, '90698825'), + (20000000000, '77737706'), + ], + + # sha512 test vectors + [dict(key=RFC_KEY_BYTES_64, format="raw", alg="sha512"), + (59, '90693936'), + (1111111109, '25091201'), + (1111111111, '99943326'), + (1234567890, '93441116'), + (2000000000, '38618901'), + (20000000000, '47863826'), + ], + + #------------------------------------------------------------------------- + # other test vectors + #------------------------------------------------------------------------- + + # generated at http://blog.tinisles.com/2011/10/google-authenticator-one-time-password-algorithm-in-javascript + [dict(key="JBSWY3DPEHPK3PXP", digits=6), (1409192430, '727248'), (1419890990, '122419')], + [dict(key="JBSWY3DPEHPK3PXP", digits=9, period=41), (1419891152, '662331049')], + + # found in https://github.com/eloquent/otis/blob/develop/test/suite/Totp/Value/TotpValueGeneratorTest.php, line 45 + [dict(key=RFC_KEY_BYTES_20, format="raw", period=60), (1111111111, '19360094')], + [dict(key=RFC_KEY_BYTES_32, format="raw", alg="sha256", period=60), (1111111111, '40857319')], + [dict(key=RFC_KEY_BYTES_64, format="raw", alg="sha512", period=60), (1111111111, '37023009')], + + ] + + def iter_test_vectors(self): + """ + helper to iterate over test vectors. + yields ``(totp, time, token, expires, prefix)`` tuples. + """ + from passlib.totp import TOTP + for row in self.vectors: + kwds = self.vector_defaults.copy() + kwds.update(row[0]) + for entry in row[1:]: + if len(entry) == 3: + time, token, expires = entry + else: + time, token = entry + expires = None + # NOTE: not re-using otp between calls so that stateful methods + # (like .match) don't have problems. + log.debug("test vector: %r time=%r token=%r expires=%r", kwds, time, token, expires) + otp = TOTP(**kwds) + prefix = "alg=%r time=%r token=%r: " % (otp.alg, time, token) + yield otp, time, token, expires, prefix + + #============================================================================= + # constructor tests + #============================================================================= + def test_ctor_w_new(self): + """constructor -- 'new' parameter""" + + # exactly one of 'key' or 'new' is required + self.assertRaises(TypeError, TOTP) + self.assertRaises(TypeError, TOTP, key='4aoggdbbqsyhntuz', new=True) + + # generates new key + otp = TOTP(new=True) + otp2 = TOTP(new=True) + self.assertNotEqual(otp.key, otp2.key) + + def test_ctor_w_size(self): + """constructor -- 'size' parameter""" + + # should default to digest size, per RFC + self.assertEqual(len(TOTP(new=True, alg="sha1").key), 20) + self.assertEqual(len(TOTP(new=True, alg="sha256").key), 32) + self.assertEqual(len(TOTP(new=True, alg="sha512").key), 64) + + # explicit key size + self.assertEqual(len(TOTP(new=True, size=10).key), 10) + self.assertEqual(len(TOTP(new=True, size=16).key), 16) + + # for new=True, maximum size enforced (based on alg) + self.assertRaises(ValueError, TOTP, new=True, size=21, alg="sha1") + + # for new=True, minimum size enforced + self.assertRaises(ValueError, TOTP, new=True, size=9) + + # for existing key, minimum size is only warned about + with self.assertWarningList([ + dict(category=exc.PasslibSecurityWarning, message_re=".*for security purposes, secret key must be.*") + ]): + _ = TOTP('0A'*9, 'hex') + + def test_ctor_w_key_and_format(self): + """constructor -- 'key' and 'format' parameters""" + + # handle base32 encoding (the default) + self.assertEqual(TOTP(KEY1).key, KEY1_RAW) + + # .. w/ lower case + self.assertEqual(TOTP(KEY1.lower()).key, KEY1_RAW) + + # .. w/ spaces (e.g. user-entered data) + self.assertEqual(TOTP(' 4aog gdbb qsyh ntuz ').key, KEY1_RAW) + + # .. w/ invalid char + self.assertRaises(Base32DecodeError, TOTP, 'ao!ggdbbqsyhntuz') + + # handle hex encoding + self.assertEqual(TOTP('e01c630c2184b076ce99', 'hex').key, KEY1_RAW) + + # .. w/ invalid char + self.assertRaises(Base16DecodeError, TOTP, 'X01c630c2184b076ce99', 'hex') + + # handle raw bytes + self.assertEqual(TOTP(KEY1_RAW, "raw").key, KEY1_RAW) + + def test_ctor_w_alg(self): + """constructor -- 'alg' parameter""" + + # normalize hash names + self.assertEqual(TOTP(KEY1, alg="SHA-256").alg, "sha256") + self.assertEqual(TOTP(KEY1, alg="SHA256").alg, "sha256") + + # invalid alg + self.assertRaises(ValueError, TOTP, KEY1, alg="SHA-333") + + def test_ctor_w_digits(self): + """constructor -- 'digits' parameter""" + self.assertRaises(ValueError, TOTP, KEY1, digits=5) + self.assertEqual(TOTP(KEY1, digits=6).digits, 6) # min value + self.assertEqual(TOTP(KEY1, digits=10).digits, 10) # max value + self.assertRaises(ValueError, TOTP, KEY1, digits=11) + + def test_ctor_w_period(self): + """constructor -- 'period' parameter""" + + # default + self.assertEqual(TOTP(KEY1).period, 30) + + # explicit value + self.assertEqual(TOTP(KEY1, period=63).period, 63) + + # reject wrong type + self.assertRaises(TypeError, TOTP, KEY1, period=1.5) + self.assertRaises(TypeError, TOTP, KEY1, period='abc') + + # reject non-positive values + self.assertRaises(ValueError, TOTP, KEY1, period=0) + self.assertRaises(ValueError, TOTP, KEY1, period=-1) + + def test_ctor_w_label(self): + """constructor -- 'label' parameter""" + self.assertEqual(TOTP(KEY1).label, None) + self.assertEqual(TOTP(KEY1, label="foo@bar").label, "foo@bar") + self.assertRaises(ValueError, TOTP, KEY1, label="foo:bar") + + def test_ctor_w_issuer(self): + """constructor -- 'issuer' parameter""" + self.assertEqual(TOTP(KEY1).issuer, None) + self.assertEqual(TOTP(KEY1, issuer="foo.com").issuer, "foo.com") + self.assertRaises(ValueError, TOTP, KEY1, issuer="foo.com:bar") + + #============================================================================= + # using() tests + #============================================================================= + + # TODO: test using() w/ 'digits', 'alg', 'issue', 'wallet', **wallet_kwds + + def test_using_w_period(self): + """using() -- 'period' parameter""" + + # default + self.assertEqual(TOTP(KEY1).period, 30) + + # explicit value + self.assertEqual(TOTP.using(period=63)(KEY1).period, 63) + + # reject wrong type + self.assertRaises(TypeError, TOTP.using, period=1.5) + self.assertRaises(TypeError, TOTP.using, period='abc') + + # reject non-positive values + self.assertRaises(ValueError, TOTP.using, period=0) + self.assertRaises(ValueError, TOTP.using, period=-1) + + def test_using_w_now(self): + """using -- 'now' parameter""" + + # NOTE: reading time w/ normalize_time() to make sure custom .now actually has effect. + + # default -- time.time + otp = self.randotp() + self.assertIs(otp.now, _time.time) + self.assertAlmostEqual(otp.normalize_time(None), int(_time.time())) + + # custom function + counter = [123.12] + def now(): + counter[0] += 1 + return counter[0] + otp = self.randotp(cls=TOTP.using(now=now)) + # NOTE: TOTP() constructor invokes this as part of test, using up counter values 124 & 125 + self.assertEqual(otp.normalize_time(None), 126) + self.assertEqual(otp.normalize_time(None), 127) + + # require callable + self.assertRaises(TypeError, TOTP.using, now=123) + + # require returns int/float + msg_re = r"now\(\) function must return non-negative" + self.assertRaisesRegex(AssertionError, msg_re, TOTP.using, now=lambda: 'abc') + + # require returns non-negative value + self.assertRaisesRegex(AssertionError, msg_re, TOTP.using, now=lambda: -1) + + #============================================================================= + # internal method tests + #============================================================================= + + def test_normalize_token_instance(self, otp=None): + """normalize_token() -- instance method""" + if otp is None: + otp = self.randotp(digits=7) + + # unicode & bytes + self.assertEqual(otp.normalize_token(u('1234567')), '1234567') + self.assertEqual(otp.normalize_token(b'1234567'), '1234567') + + # int + self.assertEqual(otp.normalize_token(1234567), '1234567') + + # int which needs 0 padding + self.assertEqual(otp.normalize_token(234567), '0234567') + + # reject wrong types (float, None) + self.assertRaises(TypeError, otp.normalize_token, 1234567.0) + self.assertRaises(TypeError, otp.normalize_token, None) + + # too few digits + self.assertRaises(exc.MalformedTokenError, otp.normalize_token, '123456') + + # too many digits + self.assertRaises(exc.MalformedTokenError, otp.normalize_token, '01234567') + self.assertRaises(exc.MalformedTokenError, otp.normalize_token, 12345678) + + def test_normalize_token_class(self): + """normalize_token() -- class method""" + self.test_normalize_token_instance(otp=TOTP.using(digits=7)) + + def test_normalize_time(self): + """normalize_time()""" + TotpFactory = TOTP.using() + otp = self.randotp(TotpFactory) + + for _ in range(10): + time = self.randtime() + tint = int(time) + + self.assertEqual(otp.normalize_time(time), tint) + self.assertEqual(otp.normalize_time(tint + 0.5), tint) + + self.assertEqual(otp.normalize_time(tint), tint) + + dt = datetime.datetime.utcfromtimestamp(time) + self.assertEqual(otp.normalize_time(dt), tint) + + orig = TotpFactory.now + try: + TotpFactory.now = staticmethod(lambda: time) + self.assertEqual(otp.normalize_time(None), tint) + finally: + TotpFactory.now = orig + + self.assertRaises(TypeError, otp.normalize_time, '1234') + + #============================================================================= + # key attr tests + #============================================================================= + + def test_key_attrs(self): + """pretty_key() and .key attributes""" + rng = self.getRandom() + + # test key attrs + otp = TOTP(KEY1_RAW, "raw") + self.assertEqual(otp.key, KEY1_RAW) + self.assertEqual(otp.hex_key, 'e01c630c2184b076ce99') + self.assertEqual(otp.base32_key, KEY1) + + # test pretty_key() + self.assertEqual(otp.pretty_key(), '4AOG-GDBB-QSYH-NTUZ') + self.assertEqual(otp.pretty_key(sep=" "), '4AOG GDBB QSYH NTUZ') + self.assertEqual(otp.pretty_key(sep=False), KEY1) + self.assertEqual(otp.pretty_key(format="hex"), 'e01c-630c-2184-b076-ce99') + + # quick fuzz test: make attr access works for random key & random size + otp = TOTP(new=True, size=rng.randint(10, 20)) + _ = otp.hex_key + _ = otp.base32_key + _ = otp.pretty_key() + + #============================================================================= + # generate() tests + #============================================================================= + def test_totp_token(self): + """generate() -- TotpToken() class""" + from passlib.totp import TOTP, TotpToken + + # test known set of values + otp = TOTP('s3jdvb7qd2r7jpxx') + result = otp.generate(1419622739) + self.assertIsInstance(result, TotpToken) + self.assertEqual(result.token, '897212') + self.assertEqual(result.counter, 47320757) + ##self.assertEqual(result.start_time, 1419622710) + self.assertEqual(result.expire_time, 1419622740) + self.assertEqual(result, ('897212', 1419622740)) + self.assertEqual(len(result), 2) + self.assertEqual(result[0], '897212') + self.assertEqual(result[1], 1419622740) + self.assertRaises(IndexError, result.__getitem__, -3) + self.assertRaises(IndexError, result.__getitem__, 2) + self.assertTrue(result) + + # time dependant bits... + otp.now = lambda : 1419622739.5 + self.assertEqual(result.remaining, 0.5) + self.assertTrue(result.valid) + + otp.now = lambda : 1419622741 + self.assertEqual(result.remaining, 0) + self.assertFalse(result.valid) + + # same time -- shouldn't return same object, but should be equal + result2 = otp.generate(1419622739) + self.assertIsNot(result2, result) + self.assertEqual(result2, result) + + # diff time in period -- shouldn't return same object, but should be equal + result3 = otp.generate(1419622711) + self.assertIsNot(result3, result) + self.assertEqual(result3, result) + + # shouldn't be equal + result4 = otp.generate(1419622999) + self.assertNotEqual(result4, result) + + def test_generate(self): + """generate()""" + from passlib.totp import TOTP + + # generate token + otp = TOTP(new=True) + time = self.randtime() + result = otp.generate(time) + token = result.token + self.assertIsInstance(token, unicode) + start_time = result.counter * 30 + + # should generate same token for next 29s + self.assertEqual(otp.generate(start_time + 29).token, token) + + # and new one at 30s + self.assertNotEqual(otp.generate(start_time + 30).token, token) + + # verify round-trip conversion of datetime + dt = datetime.datetime.utcfromtimestamp(time) + self.assertEqual(int(otp.normalize_time(dt)), int(time)) + + # handle datetime object + self.assertEqual(otp.generate(dt).token, token) + + # omitting value should use current time + otp2 = TOTP.using(now=lambda: time)(key=otp.base32_key) + self.assertEqual(otp2.generate().token, token) + + # reject invalid time + self.assertRaises(ValueError, otp.generate, -1) + + def test_generate_w_reference_vectors(self): + """generate() -- reference vectors""" + for otp, time, token, expires, prefix in self.iter_test_vectors(): + # should output correct token for specified time + result = otp.generate(time) + self.assertEqual(result.token, token, msg=prefix) + self.assertEqual(result.counter, time // otp.period, msg=prefix) + if expires: + self.assertEqual(result.expire_time, expires) + + #============================================================================= + # TotpMatch() tests + #============================================================================= + + def assertTotpMatch(self, match, time, skipped=0, period=30, window=30, msg=''): + from passlib.totp import TotpMatch + + # test type + self.assertIsInstance(match, TotpMatch) + + # totp sanity check + self.assertIsInstance(match.totp, TOTP) + self.assertEqual(match.totp.period, period) + + # test attrs + self.assertEqual(match.time, time, msg=msg + " matched time:") + expected = time // period + counter = expected + skipped + self.assertEqual(match.counter, counter, msg=msg + " matched counter:") + self.assertEqual(match.expected_counter, expected, msg=msg + " expected counter:") + self.assertEqual(match.skipped, skipped, msg=msg + " skipped:") + self.assertEqual(match.cache_seconds, period + window) + expire_time = (counter + 1) * period + self.assertEqual(match.expire_time, expire_time) + self.assertEqual(match.cache_time, expire_time + window) + + # test tuple + self.assertEqual(len(match), 2) + self.assertEqual(match, (counter, time)) + self.assertRaises(IndexError, match.__getitem__, -3) + self.assertEqual(match[0], counter) + self.assertEqual(match[1], time) + self.assertRaises(IndexError, match.__getitem__, 2) + + # test bool + self.assertTrue(match) + + def test_totp_match_w_valid_token(self): + """match() -- valid TotpMatch object""" + time = 141230981 + token = '781501' + otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3) + result = otp.match(token, time) + self.assertTotpMatch(result, time=time, skipped=0) + + def test_totp_match_w_older_token(self): + """match() -- valid TotpMatch object with future token""" + from passlib.totp import TotpMatch + + time = 141230981 + token = '781501' + otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3) + result = otp.match(token, time - 30) + self.assertTotpMatch(result, time=time - 30, skipped=1) + + def test_totp_match_w_new_token(self): + """match() -- valid TotpMatch object with past token""" + time = 141230981 + token = '781501' + otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3) + result = otp.match(token, time + 30) + self.assertTotpMatch(result, time=time + 30, skipped=-1) + + def test_totp_match_w_invalid_token(self): + """match() -- invalid TotpMatch object""" + time = 141230981 + token = '781501' + otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3) + self.assertRaises(exc.InvalidTokenError, otp.match, token, time + 60) + + #============================================================================= + # match() tests + #============================================================================= + + def assertVerifyMatches(self, expect_skipped, token, time, # * + otp, gen_time=None, **kwds): + """helper to test otp.match() output is correct""" + # NOTE: TotpMatch return type tested more throughly above ^^^ + msg = "key=%r alg=%r period=%r token=%r gen_time=%r time=%r:" % \ + (otp.base32_key, otp.alg, otp.period, token, gen_time, time) + result = otp.match(token, time, **kwds) + self.assertTotpMatch(result, + time=otp.normalize_time(time), + period=otp.period, + window=kwds.get("window", 30), + skipped=expect_skipped, + msg=msg) + + def assertVerifyRaises(self, exc_class, token, time, # * + otp, gen_time=None, + **kwds): + """helper to test otp.match() throws correct error""" + # NOTE: TotpMatch return type tested more throughly above ^^^ + msg = "key=%r alg=%r period=%r token=%r gen_time=%r time=%r:" % \ + (otp.base32_key, otp.alg, otp.period, token, gen_time, time) + return self.assertRaises(exc_class, otp.match, token, time, + __msg__=msg, **kwds) + + def test_match_w_window(self): + """match() -- 'time' and 'window' parameters""" + + # init generator & helper + otp = self.randotp() + period = otp.period + time = self.randtime() + token = otp.generate(time).token + common = dict(otp=otp, gen_time=time) + assertMatches = partial(self.assertVerifyMatches, **common) + assertRaises = partial(self.assertVerifyRaises, **common) + + #------------------------------- + # basic validation, and 'window' parameter + #------------------------------- + + # validate against previous counter (passes if window >= period) + assertRaises(exc.InvalidTokenError, token, time - period, window=0) + assertMatches(+1, token, time - period, window=period) + assertMatches(+1, token, time - period, window=2 * period) + + # validate against current counter + assertMatches(0, token, time, window=0) + + # validate against next counter (passes if window >= period) + assertRaises(exc.InvalidTokenError, token, time + period, window=0) + assertMatches(-1, token, time + period, window=period) + assertMatches(-1, token, time + period, window=2 * period) + + # validate against two time steps later (should never pass) + assertRaises(exc.InvalidTokenError, token, time + 2 * period, window=0) + assertRaises(exc.InvalidTokenError, token, time + 2 * period, window=period) + assertMatches(-2, token, time + 2 * period, window=2 * period) + + # TODO: test window values that aren't multiples of period + # (esp ensure counter rounding works correctly) + + #------------------------------- + # time normalization + #------------------------------- + + # handle datetimes + dt = datetime.datetime.utcfromtimestamp(time) + assertMatches(0, token, dt, window=0) + + # reject invalid time + assertRaises(ValueError, token, -1) + + def test_match_w_skew(self): + """match() -- 'skew' parameters""" + # init generator & helper + otp = self.randotp() + period = otp.period + time = self.randtime() + common = dict(otp=otp, gen_time=time) + assertMatches = partial(self.assertVerifyMatches, **common) + assertRaises = partial(self.assertVerifyRaises, **common) + + # assume client is running far behind server / has excessive transmission delay + skew = 3 * period + behind_token = otp.generate(time - skew).token + assertRaises(exc.InvalidTokenError, behind_token, time, window=0) + assertMatches(-3, behind_token, time, window=0, skew=-skew) + + # assume client is running far ahead of server + ahead_token = otp.generate(time + skew).token + assertRaises(exc.InvalidTokenError, ahead_token, time, window=0) + assertMatches(+3, ahead_token, time, window=0, skew=skew) + + # TODO: test skew + larger window + + def test_match_w_reuse(self): + """match() -- 'reuse' and 'last_counter' parameters""" + + # init generator & helper + otp = self.randotp() + period = otp.period + time = self.randtime() + tdata = otp.generate(time) + token = tdata.token + counter = tdata.counter + expire_time = tdata.expire_time + common = dict(otp=otp, gen_time=time) + assertMatches = partial(self.assertVerifyMatches, **common) + assertRaises = partial(self.assertVerifyRaises, **common) + + # last counter unset -- + # previous period's token should count as valid + assertMatches(-1, token, time + period, window=period) + + # last counter set 2 periods ago -- + # previous period's token should count as valid + assertMatches(-1, token, time + period, last_counter=counter-1, + window=period) + + # last counter set 2 periods ago -- + # 2 periods ago's token should NOT count as valid + assertRaises(exc.InvalidTokenError, token, time + 2 * period, + last_counter=counter, window=period) + + # last counter set 1 period ago -- + # previous period's token should now be rejected as 'used' + err = assertRaises(exc.UsedTokenError, token, time + period, + last_counter=counter, window=period) + self.assertEqual(err.expire_time, expire_time) + + # last counter set to current period -- + # current period's token should be rejected + err = assertRaises(exc.UsedTokenError, token, time, + last_counter=counter, window=0) + self.assertEqual(err.expire_time, expire_time) + + def test_match_w_token_normalization(self): + """match() -- token normalization""" + # setup test helper + otp = TOTP('otxl2f5cctbprpzx') + match = otp.match + time = 1412889861 + + # separators / spaces should be stripped (orig token '332136') + self.assertTrue(match(' 3 32-136 ', time)) + + # ascii bytes + self.assertTrue(match(b'332136', time)) + + # too few digits + self.assertRaises(exc.MalformedTokenError, match, '12345', time) + + # invalid char + self.assertRaises(exc.MalformedTokenError, match, '12345X', time) + + # leading zeros count towards size + self.assertRaises(exc.MalformedTokenError, match, '0123456', time) + + def test_match_w_reference_vectors(self): + """match() -- reference vectors""" + for otp, time, token, expires, msg in self.iter_test_vectors(): + # create wrapper + match = otp.match + + # token should match against time + result = match(token, time) + self.assertTrue(result) + self.assertEqual(result.counter, time // otp.period, msg=msg) + + # should NOT match against another time + self.assertRaises(exc.InvalidTokenError, match, token, time + 100, window=0) + + #============================================================================= + # verify() tests + #============================================================================= + def test_verify(self): + """verify()""" + # NOTE: since this is thin wrapper around .from_source() and .match(), + # just testing basic behavior here. + + from passlib.totp import TOTP + + time = 1412889861 + TotpFactory = TOTP.using(now=lambda: time) + + # successful match + source1 = dict(v=1, type="totp", key='otxl2f5cctbprpzx') + match = TotpFactory.verify('332136', source1) + self.assertTotpMatch(match, time=time) + + # failed match + source1 = dict(v=1, type="totp", key='otxl2f5cctbprpzx') + self.assertRaises(exc.InvalidTokenError, TotpFactory.verify, '332155', source1) + + # bad source + source1 = dict(v=1, type="totp") + self.assertRaises(ValueError, TotpFactory.verify, '332155', source1) + + # successful match -- json source + source1json = '{"v": 1, "type": "totp", "key": "otxl2f5cctbprpzx"}' + match = TotpFactory.verify('332136', source1json) + self.assertTotpMatch(match, time=time) + + # successful match -- URI + source1uri = 'otpauth://totp/Label?secret=otxl2f5cctbprpzx' + match = TotpFactory.verify('332136', source1uri) + self.assertTotpMatch(match, time=time) + + #============================================================================= + # serialization frontend tests + #============================================================================= + def test_from_source(self): + """from_source()""" + from passlib.totp import TOTP + from_source = TOTP.from_source + + # uri (unicode) + otp = from_source(u("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "issuer=Example")) + self.assertEqual(otp.key, KEY4_RAW) + + # uri (bytes) + otp = from_source(b"otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + b"issuer=Example") + self.assertEqual(otp.key, KEY4_RAW) + + # dict + otp = from_source(dict(v=1, type="totp", key=KEY4)) + self.assertEqual(otp.key, KEY4_RAW) + + # json (unicode) + otp = from_source(u('{"v": 1, "type": "totp", "key": "JBSWY3DPEHPK3PXP"}')) + self.assertEqual(otp.key, KEY4_RAW) + + # json (bytes) + otp = from_source(b'{"v": 1, "type": "totp", "key": "JBSWY3DPEHPK3PXP"}') + self.assertEqual(otp.key, KEY4_RAW) + + # TOTP object -- return unchanged + self.assertIs(from_source(otp), otp) + + # TOTP object w/ different wallet -- return new one. + wallet1 = AppWallet() + otp1 = TOTP.using(wallet=wallet1).from_source(otp) + self.assertIsNot(otp1, otp) + self.assertEqual(otp1.to_dict(), otp.to_dict()) + + # TOTP object w/ same wallet -- return original + otp2 = TOTP.using(wallet=wallet1).from_source(otp1) + self.assertIs(otp2, otp1) + + # random string + self.assertRaises(ValueError, from_source, u("foo")) + self.assertRaises(ValueError, from_source, b"foo") + + #============================================================================= + # uri serialization tests + #============================================================================= + def test_from_uri(self): + """from_uri()""" + from passlib.totp import TOTP + from_uri = TOTP.from_uri + + # URIs from https://code.google.com/p/google-authenticator/wiki/KeyUriFormat + + #-------------------------------------------------------------------------------- + # canonical uri + #-------------------------------------------------------------------------------- + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "issuer=Example") + self.assertIsInstance(otp, TOTP) + self.assertEqual(otp.key, KEY4_RAW) + self.assertEqual(otp.label, "alice@google.com") + self.assertEqual(otp.issuer, "Example") + self.assertEqual(otp.alg, "sha1") # default + self.assertEqual(otp.period, 30) # default + self.assertEqual(otp.digits, 6) # default + + #-------------------------------------------------------------------------------- + # secret param + #-------------------------------------------------------------------------------- + + # secret case insensitive + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=jbswy3dpehpk3pxp&" + "issuer=Example") + self.assertEqual(otp.key, KEY4_RAW) + + # missing secret + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?digits=6") + + # undecodable secret + self.assertRaises(Base32DecodeError, from_uri, "otpauth://totp/Example:alice@google.com?" + "secret=JBSWY3DPEHP@3PXP") + + #-------------------------------------------------------------------------------- + # label param + #-------------------------------------------------------------------------------- + + # w/ encoded space + otp = from_uri("otpauth://totp/Provider1:Alice%20Smith?secret=JBSWY3DPEHPK3PXP&" + "issuer=Provider1") + self.assertEqual(otp.label, "Alice Smith") + self.assertEqual(otp.issuer, "Provider1") + + # w/ encoded space and colon + # (note url has leading space before 'alice') -- taken from KeyURI spec + otp = from_uri("otpauth://totp/Big%20Corporation%3A%20alice@bigco.com?" + "secret=JBSWY3DPEHPK3PXP") + self.assertEqual(otp.label, "alice@bigco.com") + self.assertEqual(otp.issuer, "Big Corporation") + + #-------------------------------------------------------------------------------- + # issuer param / prefix + #-------------------------------------------------------------------------------- + + # 'new style' issuer only + otp = from_uri("otpauth://totp/alice@bigco.com?secret=JBSWY3DPEHPK3PXP&issuer=Big%20Corporation") + self.assertEqual(otp.label, "alice@bigco.com") + self.assertEqual(otp.issuer, "Big Corporation") + + # new-vs-old issuer mismatch + self.assertRaises(ValueError, TOTP.from_uri, + "otpauth://totp/Provider1:alice?secret=JBSWY3DPEHPK3PXP&issuer=Provider2") + + #-------------------------------------------------------------------------------- + # algorithm param + #-------------------------------------------------------------------------------- + + # custom alg + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&algorithm=SHA256") + self.assertEqual(otp.alg, "sha256") + + # unknown alg + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?" + "secret=JBSWY3DPEHPK3PXP&algorithm=SHA333") + + #-------------------------------------------------------------------------------- + # digit param + #-------------------------------------------------------------------------------- + + # custom digits + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&digits=8") + self.assertEqual(otp.digits, 8) + + # digits out of range / invalid + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&digits=A") + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&digits=%20") + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&digits=15") + + #-------------------------------------------------------------------------------- + # period param + #-------------------------------------------------------------------------------- + + # custom period + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&period=63") + self.assertEqual(otp.period, 63) + + # reject period < 1 + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?" + "secret=JBSWY3DPEHPK3PXP&period=0") + + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?" + "secret=JBSWY3DPEHPK3PXP&period=-1") + + #-------------------------------------------------------------------------------- + # unrecognized param + #-------------------------------------------------------------------------------- + + # should issue warning, but otherwise ignore extra param + with self.assertWarningList([ + dict(category=exc.PasslibRuntimeWarning, message_re="unexpected parameters encountered") + ]): + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "foo=bar&period=63") + self.assertEqual(otp.base32_key, KEY4) + self.assertEqual(otp.period, 63) + + def test_to_uri(self): + """to_uri()""" + + #------------------------------------------------------------------------- + # label & issuer parameters + #------------------------------------------------------------------------- + + # with label & issuer + otp = TOTP(KEY4, alg="sha1", digits=6, period=30) + self.assertEqual(otp.to_uri("alice@google.com", "Example Org"), + "otpauth://totp/Example%20Org:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "issuer=Example%20Org") + + # label is required + self.assertRaises(ValueError, otp.to_uri, None, "Example Org") + + # with label only + self.assertEqual(otp.to_uri("alice@google.com"), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP") + + # with default label from constructor + otp.label = "alice@google.com" + self.assertEqual(otp.to_uri(), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP") + + # with default label & default issuer from constructor + otp.issuer = "Example Org" + self.assertEqual(otp.to_uri(), + "otpauth://totp/Example%20Org:alice@google.com?secret=JBSWY3DPEHPK3PXP" + "&issuer=Example%20Org") + + # reject invalid label + self.assertRaises(ValueError, otp.to_uri, "label:with:semicolons") + + # reject invalid issuer + self.assertRaises(ValueError, otp.to_uri, "alice@google.com", "issuer:with:semicolons") + + #------------------------------------------------------------------------- + # algorithm parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, alg="sha256").to_uri("alice@google.com"), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "algorithm=SHA256") + + #------------------------------------------------------------------------- + # digits parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, digits=8).to_uri("alice@google.com"), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "digits=8") + + #------------------------------------------------------------------------- + # period parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, period=63).to_uri("alice@google.com"), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "period=63") + + #============================================================================= + # dict serialization tests + #============================================================================= + def test_from_dict(self): + """from_dict()""" + from passlib.totp import TOTP + from_dict = TOTP.from_dict + + #-------------------------------------------------------------------------------- + # canonical simple example + #-------------------------------------------------------------------------------- + otp = from_dict(dict(v=1, type="totp", key=KEY4, label="alice@google.com", issuer="Example")) + self.assertIsInstance(otp, TOTP) + self.assertEqual(otp.key, KEY4_RAW) + self.assertEqual(otp.label, "alice@google.com") + self.assertEqual(otp.issuer, "Example") + self.assertEqual(otp.alg, "sha1") # default + self.assertEqual(otp.period, 30) # default + self.assertEqual(otp.digits, 6) # default + + #-------------------------------------------------------------------------------- + # metadata + #-------------------------------------------------------------------------------- + + # missing version + self.assertRaises(ValueError, from_dict, dict(type="totp", key=KEY4)) + + # invalid version + self.assertRaises(ValueError, from_dict, dict(v=0, type="totp", key=KEY4)) + self.assertRaises(ValueError, from_dict, dict(v=999, type="totp", key=KEY4)) + + # missing type + self.assertRaises(ValueError, from_dict, dict(v=1, key=KEY4)) + + #-------------------------------------------------------------------------------- + # secret param + #-------------------------------------------------------------------------------- + + # secret case insensitive + otp = from_dict(dict(v=1, type="totp", key=KEY4.lower(), label="alice@google.com", issuer="Example")) + self.assertEqual(otp.key, KEY4_RAW) + + # missing secret + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp")) + + # undecodable secret + self.assertRaises(Base32DecodeError, from_dict, + dict(v=1, type="totp", key="JBSWY3DPEHP@3PXP")) + + #-------------------------------------------------------------------------------- + # label & issuer params + #-------------------------------------------------------------------------------- + + otp = from_dict(dict(v=1, type="totp", key=KEY4, label="Alice Smith", issuer="Provider1")) + self.assertEqual(otp.label, "Alice Smith") + self.assertEqual(otp.issuer, "Provider1") + + #-------------------------------------------------------------------------------- + # algorithm param + #-------------------------------------------------------------------------------- + + # custom alg + otp = from_dict(dict(v=1, type="totp", key=KEY4, alg="sha256")) + self.assertEqual(otp.alg, "sha256") + + # unknown alg + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp", key=KEY4, alg="sha333")) + + #-------------------------------------------------------------------------------- + # digit param + #-------------------------------------------------------------------------------- + + # custom digits + otp = from_dict(dict(v=1, type="totp", key=KEY4, digits=8)) + self.assertEqual(otp.digits, 8) + + # digits out of range / invalid + self.assertRaises(TypeError, from_dict, dict(v=1, type="totp", key=KEY4, digits="A")) + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp", key=KEY4, digits=15)) + + #-------------------------------------------------------------------------------- + # period param + #-------------------------------------------------------------------------------- + + # custom period + otp = from_dict(dict(v=1, type="totp", key=KEY4, period=63)) + self.assertEqual(otp.period, 63) + + # reject period < 1 + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp", key=KEY4, period=0)) + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp", key=KEY4, period=-1)) + + #-------------------------------------------------------------------------------- + # unrecognized param + #-------------------------------------------------------------------------------- + self.assertRaises(TypeError, from_dict, dict(v=1, type="totp", key=KEY4, INVALID=123)) + + def test_to_dict(self): + """to_dict()""" + + #------------------------------------------------------------------------- + # label & issuer parameters + #------------------------------------------------------------------------- + + # without label or issuer + otp = TOTP(KEY4, alg="sha1", digits=6, period=30) + self.assertEqual(otp.to_dict(), dict(v=1, type="totp", key=KEY4)) + + # with label & issuer from constructor + otp = TOTP(KEY4, alg="sha1", digits=6, period=30, + label="alice@google.com", issuer="Example Org") + self.assertEqual(otp.to_dict(), + dict(v=1, type="totp", key=KEY4, + label="alice@google.com", issuer="Example Org")) + + # with label only + otp = TOTP(KEY4, alg="sha1", digits=6, period=30, + label="alice@google.com") + self.assertEqual(otp.to_dict(), + dict(v=1, type="totp", key=KEY4, + label="alice@google.com")) + + # with issuer only + otp = TOTP(KEY4, alg="sha1", digits=6, period=30, + issuer="Example Org") + self.assertEqual(otp.to_dict(), + dict(v=1, type="totp", key=KEY4, + issuer="Example Org")) + + # don't serialize default issuer + TotpFactory = TOTP.using(issuer="Example Org") + otp = TotpFactory(KEY4) + self.assertEqual(otp.to_dict(), dict(v=1, type="totp", key=KEY4)) + + # don't serialize default issuer *even if explicitly set* + otp = TotpFactory(KEY4, issuer="Example Org") + self.assertEqual(otp.to_dict(), dict(v=1, type="totp", key=KEY4)) + + #------------------------------------------------------------------------- + # algorithm parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, alg="sha256").to_dict(), + dict(v=1, type="totp", key=KEY4, alg="sha256")) + + #------------------------------------------------------------------------- + # digits parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, digits=8).to_dict(), + dict(v=1, type="totp", key=KEY4, digits=8)) + + #------------------------------------------------------------------------- + # period parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, period=63).to_dict(), + dict(v=1, type="totp", key=KEY4, period=63)) + + # TODO: to_dict() + # with encrypt=False + # with encrypt="auto" + wallet + secrets + # with encrypt="auto" + wallet + no secrets + # with encrypt="auto" + no wallet + # with encrypt=True + wallet + secrets + # with encrypt=True + wallet + no secrets + # with encrypt=True + no wallet + # that 'changed' is set for old versions, and old encryption tags. + + #============================================================================= + # json serialization tests + #============================================================================= + + # TODO: from_json() / to_json(). + # (skipped for right now cause just wrapper for from_dict/to_dict) + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_utils.py b/venv/lib/python3.12/site-packages/passlib/tests/test_utils.py new file mode 100644 index 0000000..59ba160 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_utils.py @@ -0,0 +1,1171 @@ +"""tests for passlib.util""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from functools import partial +import warnings +# site +# pkg +# module +from passlib.utils import is_ascii_safe, to_bytes +from passlib.utils.compat import irange, PY2, PY3, u, unicode, join_bytes, PYPY +from passlib.tests.utils import TestCase, hb, run_with_fixed_seeds + +#============================================================================= +# byte funcs +#============================================================================= +class MiscTest(TestCase): + """tests various parts of utils module""" + + # NOTE: could test xor_bytes(), but it's exercised well enough by pbkdf2 test + + def test_compat(self): + """test compat's lazymodule""" + from passlib.utils import compat + # "" + self.assertRegex(repr(compat), + r"^$") + + # test synthentic dir() + dir(compat) + self.assertTrue('UnicodeIO' in dir(compat)) + self.assertTrue('irange' in dir(compat)) + + def test_classproperty(self): + from passlib.utils.decor import classproperty + + class test(object): + xvar = 1 + @classproperty + def xprop(cls): + return cls.xvar + + self.assertEqual(test.xprop, 1) + prop = test.__dict__['xprop'] + self.assertIs(prop.im_func, prop.__func__) + + def test_deprecated_function(self): + from passlib.utils.decor import deprecated_function + # NOTE: not comprehensive, just tests the basic behavior + + @deprecated_function(deprecated="1.6", removed="1.8") + def test_func(*args): + """test docstring""" + return args + + self.assertTrue(".. deprecated::" in test_func.__doc__) + + with self.assertWarningList(dict(category=DeprecationWarning, + message="the function passlib.tests.test_utils.test_func() " + "is deprecated as of Passlib 1.6, and will be " + "removed in Passlib 1.8." + )): + self.assertEqual(test_func(1,2), (1,2)) + + def test_memoized_property(self): + from passlib.utils.decor import memoized_property + + class dummy(object): + counter = 0 + + @memoized_property + def value(self): + value = self.counter + self.counter = value+1 + return value + + d = dummy() + self.assertEqual(d.value, 0) + self.assertEqual(d.value, 0) + self.assertEqual(d.counter, 1) + + prop = dummy.value + if not PY3: + self.assertIs(prop.im_func, prop.__func__) + + def test_getrandbytes(self): + """getrandbytes()""" + from passlib.utils import getrandbytes + wrapper = partial(getrandbytes, self.getRandom()) + self.assertEqual(len(wrapper(0)), 0) + a = wrapper(10) + b = wrapper(10) + self.assertIsInstance(a, bytes) + self.assertEqual(len(a), 10) + self.assertEqual(len(b), 10) + self.assertNotEqual(a, b) + + @run_with_fixed_seeds(count=1024) + def test_getrandstr(self, seed): + """getrandstr()""" + from passlib.utils import getrandstr + + wrapper = partial(getrandstr, self.getRandom(seed=seed)) + + # count 0 + self.assertEqual(wrapper('abc',0), '') + + # count <0 + self.assertRaises(ValueError, wrapper, 'abc', -1) + + # letters 0 + self.assertRaises(ValueError, wrapper, '', 0) + + # letters 1 + self.assertEqual(wrapper('a', 5), 'aaaaa') + + # NOTE: the following parts are non-deterministic, + # with a small chance of failure (outside chance it may pick + # a string w/o one char, even more remote chance of picking + # same string). to combat this, we run it against multiple + # fixed seeds (using run_with_fixed_seeds decorator), + # and hope that they're sufficient to test the range of behavior. + + # letters + x = wrapper(u('abc'), 32) + y = wrapper(u('abc'), 32) + self.assertIsInstance(x, unicode) + self.assertNotEqual(x,y) + self.assertEqual(sorted(set(x)), [u('a'),u('b'),u('c')]) + + # bytes + x = wrapper(b'abc', 32) + y = wrapper(b'abc', 32) + self.assertIsInstance(x, bytes) + self.assertNotEqual(x,y) + # NOTE: decoding this due to py3 bytes + self.assertEqual(sorted(set(x.decode("ascii"))), [u('a'),u('b'),u('c')]) + + def test_generate_password(self): + """generate_password()""" + from passlib.utils import generate_password + warnings.filterwarnings("ignore", "The function.*generate_password\(\) is deprecated") + self.assertEqual(len(generate_password(15)), 15) + + def test_is_crypt_context(self): + """test is_crypt_context()""" + from passlib.utils import is_crypt_context + from passlib.context import CryptContext + cc = CryptContext(["des_crypt"]) + self.assertTrue(is_crypt_context(cc)) + self.assertFalse(not is_crypt_context(cc)) + + def test_genseed(self): + """test genseed()""" + import random + from passlib.utils import genseed + rng = random.Random(genseed()) + a = rng.randint(0, 10**10) + + rng = random.Random(genseed()) + b = rng.randint(0, 10**10) + + self.assertNotEqual(a,b) + + rng.seed(genseed(rng)) + + def test_crypt(self): + """test crypt.crypt() wrappers""" + from passlib.utils import has_crypt, safe_crypt, test_crypt + from passlib.registry import get_supported_os_crypt_schemes, get_crypt_handler + + # test everything is disabled + supported = get_supported_os_crypt_schemes() + if not has_crypt: + self.assertEqual(supported, ()) + self.assertEqual(safe_crypt("test", "aa"), None) + self.assertFalse(test_crypt("test", "aaqPiZY5xR5l.")) # des_crypt() hash of "test" + raise self.skipTest("crypt.crypt() not available") + + # expect there to be something supported, if crypt() is present + if not supported: + # NOTE: failures here should be investigated. usually means one of: + # 1) at least one of passlib's os_crypt detection routines is giving false negative + # 2) crypt() ONLY supports some hash alg which passlib doesn't know about + # 3) crypt() is present but completely disabled (never encountered this yet) + raise self.fail("crypt() present, but no supported schemes found!") + + # pick cheap alg if possible, with minimum rounds, to speed up this test. + # NOTE: trusting hasher class works properly (should have been verified using it's own UTs) + for scheme in ("md5_crypt", "sha256_crypt"): + if scheme in supported: + break + else: + scheme = supported[-1] + hasher = get_crypt_handler(scheme) + if getattr(hasher, "min_rounds", None): + hasher = hasher.using(rounds=hasher.min_rounds) + + # helpers to generate hashes & config strings to work with + def get_hash(secret): + assert isinstance(secret, unicode) + hash = hasher.hash(secret) + if isinstance(hash, bytes): # py2 + hash = hash.decode("utf-8") + assert isinstance(hash, unicode) + return hash + + # test ascii password & return type + s1 = u("test") + h1 = get_hash(s1) + result = safe_crypt(s1, h1) + self.assertIsInstance(result, unicode) + self.assertEqual(result, h1) + self.assertEqual(safe_crypt(to_bytes(s1), to_bytes(h1)), h1) + + # make sure crypt doesn't just blindly return h1 for whatever we pass in + h1x = h1[:-2] + 'xx' + self.assertEqual(safe_crypt(s1, h1x), h1) + + # test utf-8 / unicode password + s2 = u('test\u1234') + h2 = get_hash(s2) + self.assertEqual(safe_crypt(s2, h2), h2) + self.assertEqual(safe_crypt(to_bytes(s2), to_bytes(h2)), h2) + + # test rejects null chars in password + self.assertRaises(ValueError, safe_crypt, '\x00', h1) + + # check test_crypt() + self.assertTrue(test_crypt("test", h1)) + self.assertFalse(test_crypt("test", h1x)) + + # check crypt returning variant error indicators + # some platforms return None on errors, others empty string, + # The BSDs in some cases return ":" + import passlib.utils as mod + orig = mod._crypt + try: + retval = None + mod._crypt = lambda secret, hash: retval + + for retval in [None, "", ":", ":0", "*0"]: + self.assertEqual(safe_crypt("test", h1), None) + self.assertFalse(test_crypt("test", h1)) + + retval = 'xxx' + self.assertEqual(safe_crypt("test", h1), "xxx") + self.assertFalse(test_crypt("test", h1)) + + finally: + mod._crypt = orig + + def test_consteq(self): + """test consteq()""" + # NOTE: this test is kind of over the top, but that's only because + # this is used for the critical task of comparing hashes for equality. + from passlib.utils import consteq, str_consteq + + # ensure error raises for wrong types + self.assertRaises(TypeError, consteq, u(''), b'') + self.assertRaises(TypeError, consteq, u(''), 1) + self.assertRaises(TypeError, consteq, u(''), None) + + self.assertRaises(TypeError, consteq, b'', u('')) + self.assertRaises(TypeError, consteq, b'', 1) + self.assertRaises(TypeError, consteq, b'', None) + + self.assertRaises(TypeError, consteq, None, u('')) + self.assertRaises(TypeError, consteq, None, b'') + self.assertRaises(TypeError, consteq, 1, u('')) + self.assertRaises(TypeError, consteq, 1, b'') + + def consteq_supports_string(value): + # under PY2, it supports all unicode strings (when present at all), + # under PY3, compare_digest() only supports ascii unicode strings. + # confirmed for: cpython 2.7.9, cpython 3.4, pypy, pypy3, pyston + return (consteq is str_consteq or PY2 or is_ascii_safe(value)) + + # check equal inputs compare correctly + for value in [ + u("a"), + u("abc"), + u("\xff\xa2\x12\x00")*10, + ]: + if consteq_supports_string(value): + self.assertTrue(consteq(value, value), "value %r:" % (value,)) + else: + self.assertRaises(TypeError, consteq, value, value) + self.assertTrue(str_consteq(value, value), "value %r:" % (value,)) + + value = value.encode("latin-1") + self.assertTrue(consteq(value, value), "value %r:" % (value,)) + + # check non-equal inputs compare correctly + for l,r in [ + # check same-size comparisons with differing contents fail. + (u("a"), u("c")), + (u("abcabc"), u("zbaabc")), + (u("abcabc"), u("abzabc")), + (u("abcabc"), u("abcabz")), + ((u("\xff\xa2\x12\x00")*10)[:-1] + u("\x01"), + u("\xff\xa2\x12\x00")*10), + + # check different-size comparisons fail. + (u(""), u("a")), + (u("abc"), u("abcdef")), + (u("abc"), u("defabc")), + (u("qwertyuiopasdfghjklzxcvbnm"), u("abc")), + ]: + if consteq_supports_string(l) and consteq_supports_string(r): + self.assertFalse(consteq(l, r), "values %r %r:" % (l,r)) + self.assertFalse(consteq(r, l), "values %r %r:" % (r,l)) + else: + self.assertRaises(TypeError, consteq, l, r) + self.assertRaises(TypeError, consteq, r, l) + self.assertFalse(str_consteq(l, r), "values %r %r:" % (l,r)) + self.assertFalse(str_consteq(r, l), "values %r %r:" % (r,l)) + + l = l.encode("latin-1") + r = r.encode("latin-1") + self.assertFalse(consteq(l, r), "values %r %r:" % (l,r)) + self.assertFalse(consteq(r, l), "values %r %r:" % (r,l)) + + # TODO: add some tests to ensure we take THETA(strlen) time. + # this might be hard to do reproducably. + # NOTE: below code was used to generate stats for analysis + ##from math import log as logb + ##import timeit + ##multipliers = [ 1< encode() -> decode() -> raw + # + + # generate some random bytes + size = rng.randint(1 if saw_zero else 0, 12) + if not size: + saw_zero = True + enc_size = (4*size+2)//3 + raw = getrandbytes(rng, size) + + # encode them, check invariants + encoded = engine.encode_bytes(raw) + self.assertEqual(len(encoded), enc_size) + + # make sure decode returns original + result = engine.decode_bytes(encoded) + self.assertEqual(result, raw) + + # + # test encoded -> decode() -> encode() -> encoded + # + + # generate some random encoded data + if size % 4 == 1: + size += rng.choice([-1,1,2]) + raw_size = 3*size//4 + encoded = getrandstr(rng, engine.bytemap, size) + + # decode them, check invariants + raw = engine.decode_bytes(encoded) + self.assertEqual(len(raw), raw_size, "encoded %d:" % size) + + # make sure encode returns original (barring padding bits) + result = engine.encode_bytes(raw) + if size % 4: + self.assertEqual(result[:-1], encoded[:-1]) + else: + self.assertEqual(result, encoded) + + def test_repair_unused(self): + """test repair_unused()""" + # NOTE: this test relies on encode_bytes() always returning clear + # padding bits - which should be ensured by test vectors. + from passlib.utils import getrandstr + rng = self.getRandom() + engine = self.engine + check_repair_unused = self.engine.check_repair_unused + i = 0 + while i < 300: + size = rng.randint(0,23) + cdata = getrandstr(rng, engine.charmap, size).encode("ascii") + if size & 3 == 1: + # should throw error + self.assertRaises(ValueError, check_repair_unused, cdata) + continue + rdata = engine.encode_bytes(engine.decode_bytes(cdata)) + if rng.random() < .5: + cdata = cdata.decode("ascii") + rdata = rdata.decode("ascii") + if cdata == rdata: + # should leave unchanged + ok, result = check_repair_unused(cdata) + self.assertFalse(ok) + self.assertEqual(result, rdata) + else: + # should repair bits + self.assertNotEqual(size % 4, 0) + ok, result = check_repair_unused(cdata) + self.assertTrue(ok) + self.assertEqual(result, rdata) + i += 1 + + #=================================================================== + # test transposed encode/decode - encoding independant + #=================================================================== + # NOTE: these tests assume normal encode/decode has been tested elsewhere. + + transposed = [ + # orig, result, transpose map + (b"\x33\x22\x11", b"\x11\x22\x33",[2,1,0]), + (b"\x22\x33\x11", b"\x11\x22\x33",[1,2,0]), + ] + + transposed_dups = [ + # orig, result, transpose projection + (b"\x11\x11\x22", b"\x11\x22\x33",[0,0,1]), + ] + + def test_encode_transposed_bytes(self): + """test encode_transposed_bytes()""" + engine = self.engine + for result, input, offsets in self.transposed + self.transposed_dups: + tmp = engine.encode_transposed_bytes(input, offsets) + out = engine.decode_bytes(tmp) + self.assertEqual(out, result) + + self.assertRaises(TypeError, engine.encode_transposed_bytes, u("a"), []) + + def test_decode_transposed_bytes(self): + """test decode_transposed_bytes()""" + engine = self.engine + for input, result, offsets in self.transposed: + tmp = engine.encode_bytes(input) + out = engine.decode_transposed_bytes(tmp, offsets) + self.assertEqual(out, result) + + def test_decode_transposed_bytes_bad(self): + """test decode_transposed_bytes() fails if map is a one-way""" + engine = self.engine + for input, _, offsets in self.transposed_dups: + tmp = engine.encode_bytes(input) + self.assertRaises(TypeError, engine.decode_transposed_bytes, tmp, + offsets) + + #=================================================================== + # test 6bit handling + #=================================================================== + def check_int_pair(self, bits, encoded_pairs): + """helper to check encode_intXX & decode_intXX functions""" + rng = self.getRandom() + engine = self.engine + encode = getattr(engine, "encode_int%s" % bits) + decode = getattr(engine, "decode_int%s" % bits) + pad = -bits % 6 + chars = (bits+pad)//6 + upper = 1< block_size, and wrong type + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=17, hash='md5') + self.assertRaises(TypeError, helper, keylen='1') + +#============================================================================= +# test PBKDF2 support +#============================================================================= +class Pbkdf2_Test(TestCase): + """test pbkdf2() support""" + descriptionPrefix = "passlib.utils.pbkdf2.pbkdf2()" + + pbkdf2_test_vectors = [ + # (result, secret, salt, rounds, keylen, prf="sha1") + + # + # from rfc 3962 + # + + # test case 1 / 128 bit + ( + hb("cdedb5281bb2f801565a1122b2563515"), + b"password", b"ATHENA.MIT.EDUraeburn", 1, 16 + ), + + # test case 2 / 128 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935d"), + b"password", b"ATHENA.MIT.EDUraeburn", 2, 16 + ), + + # test case 2 / 256 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86"), + b"password", b"ATHENA.MIT.EDUraeburn", 2, 32 + ), + + # test case 3 / 256 bit + ( + hb("5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13"), + b"password", b"ATHENA.MIT.EDUraeburn", 1200, 32 + ), + + # test case 4 / 256 bit + ( + hb("d1daa78615f287e6a1c8b120d7062a493f98d203e6be49a6adf4fa574b6e64ee"), + b"password", b'\x12\x34\x56\x78\x78\x56\x34\x12', 5, 32 + ), + + # test case 5 / 256 bit + ( + hb("139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1"), + b"X"*64, b"pass phrase equals block size", 1200, 32 + ), + + # test case 6 / 256 bit + ( + hb("9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a"), + b"X"*65, b"pass phrase exceeds block size", 1200, 32 + ), + + # + # from rfc 6070 + # + ( + hb("0c60c80f961f0e71f3a9b524af6012062fe037a6"), + b"password", b"salt", 1, 20, + ), + + ( + hb("ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + b"password", b"salt", 2, 20, + ), + + ( + hb("4b007901b765489abead49d926f721d065a429c1"), + b"password", b"salt", 4096, 20, + ), + + # just runs too long - could enable if ALL option is set + ##( + ## + ## unhexlify("eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), + ## "password", "salt", 16777216, 20, + ##), + + ( + hb("3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + b"passwordPASSWORDpassword", + b"saltSALTsaltSALTsaltSALTsaltSALTsalt", + 4096, 25, + ), + + ( + hb("56fa6aa75548099dcc37d7f03425e0c3"), + b"pass\00word", b"sa\00lt", 4096, 16, + ), + + # + # from example in http://grub.enbug.org/Authentication + # + ( + hb("887CFF169EA8335235D8004242AA7D6187A41E3187DF0CE14E256D85ED" + "97A97357AAA8FF0A3871AB9EEFF458392F462F495487387F685B7472FC" + "6C29E293F0A0"), + b"hello", + hb("9290F727ED06C38BA4549EF7DE25CF5642659211B7FC076F2D28FEFD71" + "784BB8D8F6FB244A8CC5C06240631B97008565A120764C0EE9C2CB0073" + "994D79080136"), + 10000, 64, "hmac-sha512" + ), + + # + # custom + # + ( + hb('e248fb6b13365146f8ac6307cc222812'), + b"secret", b"salt", 10, 16, "hmac-sha1", + ), + ( + hb('e248fb6b13365146f8ac6307cc2228127872da6d'), + b"secret", b"salt", 10, None, "hmac-sha1", + ), + + ] + + def setUp(self): + super(Pbkdf2_Test, self).setUp() + warnings.filterwarnings("ignore", ".*passlib.utils.pbkdf2.*deprecated", DeprecationWarning) + + def test_known(self): + """test reference vectors""" + from passlib.utils.pbkdf2 import pbkdf2 + for row in self.pbkdf2_test_vectors: + correct, secret, salt, rounds, keylen = row[:5] + prf = row[5] if len(row) == 6 else "hmac-sha1" + result = pbkdf2(secret, salt, rounds, keylen, prf) + self.assertEqual(result, correct) + + def test_border(self): + """test border cases""" + from passlib.utils.pbkdf2 import pbkdf2 + def helper(secret=b'password', salt=b'salt', rounds=1, keylen=None, prf="hmac-sha1"): + return pbkdf2(secret, salt, rounds, keylen, prf) + helper() + + # invalid rounds + self.assertRaises(ValueError, helper, rounds=-1) + self.assertRaises(ValueError, helper, rounds=0) + self.assertRaises(TypeError, helper, rounds='x') + + # invalid keylen + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=0) + helper(keylen=1) + self.assertRaises(OverflowError, helper, keylen=20*(2**32-1)+1) + self.assertRaises(TypeError, helper, keylen='x') + + # invalid secret/salt type + self.assertRaises(TypeError, helper, salt=5) + self.assertRaises(TypeError, helper, secret=5) + + # invalid hash + self.assertRaises(ValueError, helper, prf='hmac-foo') + self.assertRaises(NotImplementedError, helper, prf='foo') + self.assertRaises(TypeError, helper, prf=5) + + def test_default_keylen(self): + """test keylen==None""" + from passlib.utils.pbkdf2 import pbkdf2 + def helper(secret=b'password', salt=b'salt', rounds=1, keylen=None, prf="hmac-sha1"): + return pbkdf2(secret, salt, rounds, keylen, prf) + self.assertEqual(len(helper(prf='hmac-sha1')), 20) + self.assertEqual(len(helper(prf='hmac-sha256')), 32) + + def test_custom_prf(self): + """test custom prf function""" + from passlib.utils.pbkdf2 import pbkdf2 + def prf(key, msg): + return hashlib.md5(key+msg+b'fooey').digest() + self.assertRaises(NotImplementedError, pbkdf2, b'secret', b'salt', 1000, 20, prf) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/test_win32.py b/venv/lib/python3.12/site-packages/passlib/tests/test_win32.py new file mode 100644 index 0000000..e818b62 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/test_win32.py @@ -0,0 +1,50 @@ +"""tests for passlib.win32 -- (c) Assurance Technologies 2003-2009""" +#============================================================================= +# imports +#============================================================================= +# core +import warnings +# site +# pkg +from passlib.tests.utils import TestCase +# module +from passlib.utils.compat import u + +#============================================================================= +# +#============================================================================= +class UtilTest(TestCase): + """test util funcs in passlib.win32""" + + ##test hashes from http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + ## among other places + + def setUp(self): + super(UtilTest, self).setUp() + warnings.filterwarnings("ignore", + "the 'passlib.win32' module is deprecated") + + def test_lmhash(self): + from passlib.win32 import raw_lmhash + for secret, hash in [ + ("OLDPASSWORD", u("c9b81d939d6fd80cd408e6b105741864")), + ("NEWPASSWORD", u('09eeab5aa415d6e4d408e6b105741864')), + ("welcome", u("c23413a8a1e7665faad3b435b51404ee")), + ]: + result = raw_lmhash(secret, hex=True) + self.assertEqual(result, hash) + + def test_nthash(self): + warnings.filterwarnings("ignore", + r"nthash\.raw_nthash\(\) is deprecated") + from passlib.win32 import raw_nthash + for secret, hash in [ + ("OLDPASSWORD", u("6677b2c394311355b54f25eec5bfacf5")), + ("NEWPASSWORD", u("256781a62031289d3c2c98c14f1efc8c")), + ]: + result = raw_nthash(secret, hex=True) + self.assertEqual(result, hash) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/tox_support.py b/venv/lib/python3.12/site-packages/passlib/tests/tox_support.py new file mode 100644 index 0000000..43170bc --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/tox_support.py @@ -0,0 +1,83 @@ +"""passlib.tests.tox_support - helper script for tox tests""" +#============================================================================= +# init script env +#============================================================================= +import os, sys +root_dir = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) +sys.path.insert(0, root_dir) + +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils.compat import print_ +# local +__all__ = [ +] + +#============================================================================= +# main +#============================================================================= +TH_PATH = "passlib.tests.test_handlers" + +def do_hash_tests(*args): + """return list of hash algorithm tests that match regexes""" + if not args: + print(TH_PATH) + return + suffix = '' + args = list(args) + while True: + if args[0] == "--method": + suffix = '.' + args[1] + del args[:2] + else: + break + from passlib.tests import test_handlers + names = [TH_PATH + ":" + name + suffix for name in dir(test_handlers) + if not name.startswith("_") and any(re.match(arg,name) for arg in args)] + print_("\n".join(names)) + return not names + +def do_preset_tests(name): + """return list of preset test names""" + if name == "django" or name == "django-hashes": + do_hash_tests("django_.*_test", "hex_md5_test") + if name == "django": + print_("passlib.tests.test_ext_django") + else: + raise ValueError("unknown name: %r" % name) + +def do_setup_gae(path, runtime): + """write fake GAE ``app.yaml`` to current directory so nosegae will work""" + from passlib.tests.utils import set_file + set_file(os.path.join(path, "app.yaml"), """\ +application: fake-app +version: 2 +runtime: %s +api_version: 1 +threadsafe: no + +handlers: +- url: /.* + script: dummy.py + +libraries: +- name: django + version: "latest" +""" % runtime) + +def main(cmd, *args): + return globals()["do_" + cmd](*args) + +if __name__ == "__main__": + import sys + sys.exit(main(*sys.argv[1:]) or 0) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/tests/utils.py b/venv/lib/python3.12/site-packages/passlib/tests/utils.py new file mode 100644 index 0000000..79a9f9f --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/tests/utils.py @@ -0,0 +1,3621 @@ +"""helpers for passlib unittests""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from binascii import unhexlify +import contextlib +from functools import wraps, partial +import hashlib +import logging; log = logging.getLogger(__name__) +import random +import re +import os +import sys +import tempfile +import threading +import time +from passlib.exc import PasslibHashWarning, PasslibConfigWarning +from passlib.utils.compat import PY3, JYTHON +import warnings +from warnings import warn +# site +# pkg +from passlib import exc +from passlib.exc import MissingBackendError +import passlib.registry as registry +from passlib.tests.backports import TestCase as _TestCase, skip, skipIf, skipUnless, SkipTest +from passlib.utils import has_rounds_info, has_salt_info, rounds_cost_values, \ + rng as sys_rng, getrandstr, is_ascii_safe, to_native_str, \ + repeat_string, tick, batch +from passlib.utils.compat import iteritems, irange, u, unicode, PY2, nullcontext +from passlib.utils.decor import classproperty +import passlib.utils.handlers as uh +# local +__all__ = [ + # util funcs + 'TEST_MODE', + 'set_file', 'get_file', + + # unit testing + 'TestCase', + 'HandlerCase', +] + +#============================================================================= +# environment detection +#============================================================================= +# figure out if we're running under GAE; +# some tests (e.g. FS writing) should be skipped. +# XXX: is there better way to do this? +try: + import google.appengine +except ImportError: + GAE = False +else: + GAE = True + +def ensure_mtime_changed(path): + """ensure file's mtime has changed""" + # NOTE: this is hack to deal w/ filesystems whose mtime resolution is >= 1s, + # when a test needs to be sure the mtime changed after writing to the file. + last = os.path.getmtime(path) + while os.path.getmtime(path) == last: + time.sleep(0.1) + os.utime(path, None) + +def _get_timer_resolution(timer): + def sample(): + start = cur = timer() + while start == cur: + cur = timer() + return cur-start + return min(sample() for _ in range(3)) +TICK_RESOLUTION = _get_timer_resolution(tick) + +#============================================================================= +# test mode +#============================================================================= +_TEST_MODES = ["quick", "default", "full"] +_test_mode = _TEST_MODES.index(os.environ.get("PASSLIB_TEST_MODE", + "default").strip().lower()) + +def TEST_MODE(min=None, max=None): + """check if test for specified mode should be enabled. + + ``"quick"`` + run the bare minimum tests to ensure functionality. + variable-cost hashes are tested at their lowest setting. + hash algorithms are only tested against the backend that will + be used on the current host. no fuzz testing is done. + + ``"default"`` + same as ``"quick"``, except: hash algorithms are tested + at default levels, and a brief round of fuzz testing is done + for each hash. + + ``"full"`` + extra regression and internal tests are enabled, hash algorithms are tested + against all available backends, unavailable ones are mocked whre possible, + additional time is devoted to fuzz testing. + """ + if min and _test_mode < _TEST_MODES.index(min): + return False + if max and _test_mode > _TEST_MODES.index(max): + return False + return True + +#============================================================================= +# hash object inspection +#============================================================================= +def has_relaxed_setting(handler): + """check if handler supports 'relaxed' kwd""" + # FIXME: I've been lazy, should probably just add 'relaxed' kwd + # to all handlers that derive from GenericHandler + + # ignore wrapper classes for now.. though could introspec. + if hasattr(handler, "orig_prefix"): + return False + + return 'relaxed' in handler.setting_kwds or issubclass(handler, + uh.GenericHandler) + +def get_effective_rounds(handler, rounds=None): + """get effective rounds value from handler""" + handler = unwrap_handler(handler) + return handler(rounds=rounds, use_defaults=True).rounds + +def is_default_backend(handler, backend): + """check if backend is the default for source""" + try: + orig = handler.get_backend() + except MissingBackendError: + return False + try: + handler.set_backend("default") + return handler.get_backend() == backend + finally: + handler.set_backend(orig) + +def iter_alt_backends(handler, current=None, fallback=False): + """ + iterate over alternate backends available to handler. + + .. warning:: + not thread-safe due to has_backend() call + """ + if current is None: + current = handler.get_backend() + backends = handler.backends + idx = backends.index(current)+1 if fallback else 0 + for backend in backends[idx:]: + if backend != current and handler.has_backend(backend): + yield backend + +def get_alt_backend(*args, **kwds): + for backend in iter_alt_backends(*args, **kwds): + return backend + return None + +def unwrap_handler(handler): + """return original handler, removing any wrapper objects""" + while hasattr(handler, "wrapped"): + handler = handler.wrapped + return handler + +def handler_derived_from(handler, base): + """ + test if was derived from via . + """ + # XXX: need way to do this more formally via ifc, + # for now just hacking in the cases we encounter in testing. + if handler == base: + return True + elif isinstance(handler, uh.PrefixWrapper): + while handler: + if handler == base: + return True + # helper set by PrefixWrapper().using() just for this case... + handler = handler._derived_from + return False + elif isinstance(handler, type) and issubclass(handler, uh.MinimalHandler): + return issubclass(handler, base) + else: + raise NotImplementedError("don't know how to inspect handler: %r" % (handler,)) + +@contextlib.contextmanager +def patch_calc_min_rounds(handler): + """ + internal helper for do_config_encrypt() -- + context manager which temporarily replaces handler's _calc_checksum() + with one that uses min_rounds; useful when trying to generate config + with high rounds value, but don't care if output is correct. + """ + if isinstance(handler, type) and issubclass(handler, uh.HasRounds): + # XXX: also require GenericHandler for this branch? + wrapped = handler._calc_checksum + def wrapper(self, *args, **kwds): + rounds = self.rounds + try: + self.rounds = self.min_rounds + return wrapped(self, *args, **kwds) + finally: + self.rounds = rounds + handler._calc_checksum = wrapper + try: + yield + finally: + handler._calc_checksum = wrapped + elif isinstance(handler, uh.PrefixWrapper): + with patch_calc_min_rounds(handler.wrapped): + yield + else: + yield + return + +#============================================================================= +# misc helpers +#============================================================================= +def set_file(path, content): + """set file to specified bytes""" + if isinstance(content, unicode): + content = content.encode("utf-8") + with open(path, "wb") as fh: + fh.write(content) + +def get_file(path): + """read file as bytes""" + with open(path, "rb") as fh: + return fh.read() + +def tonn(source): + """convert native string to non-native string""" + if not isinstance(source, str): + return source + elif PY3: + return source.encode("utf-8") + else: + try: + return source.decode("utf-8") + except UnicodeDecodeError: + return source.decode("latin-1") + +def hb(source): + """ + helper for represent byte strings in hex. + + usage: ``hb("deadbeef23")`` + """ + return unhexlify(re.sub(r"\s", "", source)) + +def limit(value, lower, upper): + if value < lower: + return lower + elif value > upper: + return upper + return value + +def quicksleep(delay): + """because time.sleep() doesn't even have 10ms accuracy on some OSes""" + start = tick() + while tick()-start < delay: + pass + +def time_call(func, setup=None, maxtime=1, bestof=10): + """ + timeit() wrapper which tries to get as accurate a measurement as possible w/in maxtime seconds. + + :returns: + ``(avg_seconds_per_call, log10_number_of_repetitions)`` + """ + from timeit import Timer + from math import log + timer = Timer(func, setup=setup or '') + number = 1 + end = tick() + maxtime + while True: + delta = min(timer.repeat(bestof, number)) + if tick() >= end: + return delta/number, int(log(number, 10)) + number *= 10 + +def run_with_fixed_seeds(count=128, master_seed=0x243F6A8885A308D3): + """ + decorator run test method w/ multiple fixed seeds. + """ + def builder(func): + @wraps(func) + def wrapper(*args, **kwds): + rng = random.Random(master_seed) + for _ in irange(count): + kwds['seed'] = rng.getrandbits(32) + func(*args, **kwds) + return wrapper + return builder + +#============================================================================= +# custom test harness +#============================================================================= + +class TestCase(_TestCase): + """passlib-specific test case class + + this class adds a number of features to the standard TestCase... + * common prefix for all test descriptions + * resets warnings filter & registry for every test + * tweaks to message formatting + * __msg__ kwd added to assertRaises() + * suite of methods for matching against warnings + """ + #=================================================================== + # add various custom features + #=================================================================== + + #--------------------------------------------------------------- + # make it easy for test cases to add common prefix to shortDescription + #--------------------------------------------------------------- + + # string prepended to all tests in TestCase + descriptionPrefix = None + + def shortDescription(self): + """wrap shortDescription() method to prepend descriptionPrefix""" + desc = super(TestCase, self).shortDescription() + prefix = self.descriptionPrefix + if prefix: + desc = "%s: %s" % (prefix, desc or str(self)) + return desc + + #--------------------------------------------------------------- + # hack things so nose and ut2 both skip subclasses who have + # "__unittest_skip=True" set, or whose names start with "_" + #--------------------------------------------------------------- + @classproperty + def __unittest_skip__(cls): + # NOTE: this attr is technically a unittest2 internal detail. + name = cls.__name__ + return name.startswith("_") or \ + getattr(cls, "_%s__unittest_skip" % name, False) + + @classproperty + def __test__(cls): + # make nose just proxy __unittest_skip__ + return not cls.__unittest_skip__ + + # flag to skip *this* class + __unittest_skip = True + + #--------------------------------------------------------------- + # reset warning filters & registry before each test + #--------------------------------------------------------------- + + # flag to reset all warning filters & ignore state + resetWarningState = True + + def setUp(self): + super(TestCase, self).setUp() + self.setUpWarnings() + # have uh.debug_only_repr() return real values for duration of test + self.patchAttr(exc, "ENABLE_DEBUG_ONLY_REPR", True) + + def setUpWarnings(self): + """helper to init warning filters before subclass setUp()""" + if self.resetWarningState: + ctx = reset_warnings() + ctx.__enter__() + self.addCleanup(ctx.__exit__) + + # ignore security warnings, tests may deliberately cause these + # TODO: may want to filter out a few of this, but not blanket filter... + # warnings.filterwarnings("ignore", category=exc.PasslibSecurityWarning) + + # ignore warnings about PasswordHash features deprecated in 1.7 + # TODO: should be cleaned in 2.0, when support will be dropped. + # should be kept until then, so we test the legacy paths. + warnings.filterwarnings("ignore", r"the method .*\.(encrypt|genconfig|genhash)\(\) is deprecated") + warnings.filterwarnings("ignore", r"the 'vary_rounds' option is deprecated") + warnings.filterwarnings("ignore", r"Support for `(py-bcrypt|bcryptor)` is deprecated") + + #--------------------------------------------------------------- + # tweak message formatting so longMessage mode is only enabled + # if msg ends with ":", and turn on longMessage by default. + #--------------------------------------------------------------- + longMessage = True + + def _formatMessage(self, msg, std): + if self.longMessage and msg and msg.rstrip().endswith(":"): + return '%s %s' % (msg.rstrip(), std) + else: + return msg or std + + #--------------------------------------------------------------- + # override assertRaises() to support '__msg__' keyword, + # and to return the caught exception for further examination + #--------------------------------------------------------------- + def assertRaises(self, _exc_type, _callable=None, *args, **kwds): + msg = kwds.pop("__msg__", None) + if _callable is None: + # FIXME: this ignores 'msg' + return super(TestCase, self).assertRaises(_exc_type, None, + *args, **kwds) + try: + result = _callable(*args, **kwds) + except _exc_type as err: + return err + std = "function returned %r, expected it to raise %r" % (result, + _exc_type) + raise self.failureException(self._formatMessage(msg, std)) + + #--------------------------------------------------------------- + # forbid a bunch of deprecated aliases so I stop using them + #--------------------------------------------------------------- + def assertEquals(self, *a, **k): + raise AssertionError("this alias is deprecated by unittest2") + assertNotEquals = assertRegexMatches = assertEquals + + #=================================================================== + # custom methods for matching warnings + #=================================================================== + def assertWarning(self, warning, + message_re=None, message=None, + category=None, + filename_re=None, filename=None, + lineno=None, + msg=None, + ): + """check if warning matches specified parameters. + 'warning' is the instance of Warning to match against; + can also be instance of WarningMessage (as returned by catch_warnings). + """ + # check input type + if hasattr(warning, "category"): + # resolve WarningMessage -> Warning, but preserve original + wmsg = warning + warning = warning.message + else: + # no original WarningMessage, passed raw Warning + wmsg = None + + # tests that can use a warning instance or WarningMessage object + if message: + self.assertEqual(str(warning), message, msg) + if message_re: + self.assertRegex(str(warning), message_re, msg) + if category: + self.assertIsInstance(warning, category, msg) + + # tests that require a WarningMessage object + if filename or filename_re: + if not wmsg: + raise TypeError("matching on filename requires a " + "WarningMessage instance") + real = wmsg.filename + if real.endswith(".pyc") or real.endswith(".pyo"): + # FIXME: should use a stdlib call to resolve this back + # to module's original filename. + real = real[:-1] + if filename: + self.assertEqual(real, filename, msg) + if filename_re: + self.assertRegex(real, filename_re, msg) + if lineno: + if not wmsg: + raise TypeError("matching on lineno requires a " + "WarningMessage instance") + self.assertEqual(wmsg.lineno, lineno, msg) + + class _AssertWarningList(warnings.catch_warnings): + """context manager for assertWarningList()""" + def __init__(self, case, **kwds): + self.case = case + self.kwds = kwds + self.__super = super(TestCase._AssertWarningList, self) + self.__super.__init__(record=True) + + def __enter__(self): + self.log = self.__super.__enter__() + + def __exit__(self, *exc_info): + self.__super.__exit__(*exc_info) + if exc_info[0] is None: + self.case.assertWarningList(self.log, **self.kwds) + + def assertWarningList(self, wlist=None, desc=None, msg=None): + """check that warning list (e.g. from catch_warnings) matches pattern""" + if desc is None: + assert wlist is not None + return self._AssertWarningList(self, desc=wlist, msg=msg) + # TODO: make this display better diff of *which* warnings did not match + assert desc is not None + if not isinstance(desc, (list,tuple)): + desc = [desc] + for idx, entry in enumerate(desc): + if isinstance(entry, str): + entry = dict(message_re=entry) + elif isinstance(entry, type) and issubclass(entry, Warning): + entry = dict(category=entry) + elif not isinstance(entry, dict): + raise TypeError("entry must be str, warning, or dict") + try: + data = wlist[idx] + except IndexError: + break + self.assertWarning(data, msg=msg, **entry) + else: + if len(wlist) == len(desc): + return + std = "expected %d warnings, found %d: wlist=%s desc=%r" % \ + (len(desc), len(wlist), self._formatWarningList(wlist), desc) + raise self.failureException(self._formatMessage(msg, std)) + + def consumeWarningList(self, wlist, desc=None, *args, **kwds): + """[deprecated] assertWarningList() variant that clears list afterwards""" + if desc is None: + desc = [] + self.assertWarningList(wlist, desc, *args, **kwds) + del wlist[:] + + def _formatWarning(self, entry): + tail = "" + if hasattr(entry, "message"): + # WarningMessage instance. + tail = " filename=%r lineno=%r" % (entry.filename, entry.lineno) + if entry.line: + tail += " line=%r" % (entry.line,) + entry = entry.message + cls = type(entry) + return "<%s.%s message=%r%s>" % (cls.__module__, cls.__name__, + str(entry), tail) + + def _formatWarningList(self, wlist): + return "[%s]" % ", ".join(self._formatWarning(entry) for entry in wlist) + + #=================================================================== + # capability tests + #=================================================================== + def require_stringprep(self): + """helper to skip test if stringprep is missing""" + from passlib.utils import stringprep + if not stringprep: + from passlib.utils import _stringprep_missing_reason + raise self.skipTest("not available - stringprep module is " + + _stringprep_missing_reason) + + def require_TEST_MODE(self, level): + """skip test for all PASSLIB_TEST_MODE values below """ + if not TEST_MODE(level): + raise self.skipTest("requires >= %r test mode" % level) + + def require_writeable_filesystem(self): + """skip test if writeable FS not available""" + if GAE: + return self.skipTest("GAE doesn't offer read/write filesystem access") + + #=================================================================== + # reproducible random helpers + #=================================================================== + + #: global thread lock for random state + #: XXX: could split into global & per-instance locks if need be + _random_global_lock = threading.Lock() + + #: cache of global seed value, initialized on first call to getRandom() + _random_global_seed = None + + #: per-instance cache of name -> RNG + _random_cache = None + + def getRandom(self, name="default", seed=None): + """ + Return a :class:`random.Random` object for current test method to use. + Within an instance, multiple calls with the same name will return + the same object. + + When first created, each RNG will be seeded with value derived from + a global seed, the test class module & name, the current test method name, + and the **name** parameter. + + The global seed taken from the $RANDOM_TEST_SEED env var, + the $PYTHONHASHSEED env var, or a randomly generated the + first time this method is called. In all cases, the value + is logged for reproducibility. + + :param name: + name to uniquely identify separate RNGs w/in a test + (e.g. for threaded tests). + + :param seed: + override global seed when initialzing rng. + + :rtype: random.Random + """ + # check cache + cache = self._random_cache + if cache and name in cache: + return cache[name] + + with self._random_global_lock: + + # check cache again, and initialize it + cache = self._random_cache + if cache and name in cache: + return cache[name] + elif not cache: + cache = self._random_cache = {} + + # init global seed + global_seed = seed or TestCase._random_global_seed + if global_seed is None: + # NOTE: checking PYTHONHASHSEED, because if that's set, + # the test runner wants something reproducible. + global_seed = TestCase._random_global_seed = \ + int(os.environ.get("RANDOM_TEST_SEED") or + os.environ.get("PYTHONHASHSEED") or + sys_rng.getrandbits(32)) + # XXX: would it be better to print() this? + log.info("using RANDOM_TEST_SEED=%d", global_seed) + + # create seed + cls = type(self) + source = "\n".join([str(global_seed), cls.__module__, cls.__name__, + self._testMethodName, name]) + digest = hashlib.sha256(source.encode("utf-8")).hexdigest() + seed = int(digest[:16], 16) + + # create rng + value = cache[name] = random.Random(seed) + return value + + #=================================================================== + # subtests + #=================================================================== + + has_real_subtest = hasattr(_TestCase, "subTest") + + @contextlib.contextmanager + def subTest(self, *args, **kwds): + """ + wrapper/backport for .subTest() which also traps SkipTest errors. + (see source for details) + """ + # this function works around two things: + # * TestCase.subTest() wasn't added until Py34; so for older python versions, + # we either need unittest2 installed, or provide stub of our own. + # this method provides a stub if needed (based on .has_real_subtest check) + # + # * as 2020-10-08, .subTest() doesn't play nicely w/ .skipTest(); + # and also makes it hard to debug which subtest had a failure. + # (see https://bugs.python.org/issue25894 and https://bugs.python.org/issue35327) + # this method traps skipTest exceptions, and adds some logging to help debug + # which subtest caused the issue. + + # setup way to log subtest info + # XXX: would like better way to inject messages into test output; + # but this at least gets us something for debugging... + # NOTE: this hack will miss parent params if called from nested .subTest() + def _render_title(_msg=None, **params): + out = ("[%s] " % _msg if _msg else "") + if params: + out += "(%s)" % " ".join("%s=%r" % tuple(item) for item in params.items()) + return out.strip() or "" + + test_log = self.getLogger() + title = _render_title(*args, **kwds) + + # use real subtest manager if available + if self.has_real_subtest: + ctx = super(TestCase, self).subTest(*args, **kwds) + else: + ctx = nullcontext() + + # run the subtest + with ctx: + test_log.info("running subtest: %s", title) + try: + yield + except SkipTest: + # silence "SkipTest" exceptions, want to keep running next subtest. + test_log.info("subtest skipped: %s", title) + pass + except Exception as err: + # log unhandled exception occurred + # (assuming traceback will be reported up higher, so not bothering here) + test_log.warning("subtest failed: %s: %s: %r", title, type(err).__name__, str(err)) + raise + + # XXX: check for "failed" state in ``self._outcome`` before writing this? + test_log.info("subtest passed: %s", title) + + #=================================================================== + # other + #=================================================================== + _mktemp_queue = None + + def mktemp(self, *args, **kwds): + """create temp file that's cleaned up at end of test""" + self.require_writeable_filesystem() + fd, path = tempfile.mkstemp(*args, **kwds) + os.close(fd) + queue = self._mktemp_queue + if queue is None: + queue = self._mktemp_queue = [] + def cleaner(): + for path in queue: + if os.path.exists(path): + os.remove(path) + del queue[:] + self.addCleanup(cleaner) + queue.append(path) + return path + + def patchAttr(self, obj, attr, value, require_existing=True, wrap=False): + """monkeypatch object value, restoring original value on cleanup""" + try: + orig = getattr(obj, attr) + except AttributeError: + if require_existing: + raise + def cleanup(): + try: + delattr(obj, attr) + except AttributeError: + pass + self.addCleanup(cleanup) + else: + self.addCleanup(setattr, obj, attr, orig) + if wrap: + value = partial(value, orig) + wraps(orig)(value) + setattr(obj, attr, value) + + def getLogger(self): + """ + return logger named after current test. + """ + cls = type(self) + # NOTE: conditional on qualname for PY2 compat + path = cls.__module__ + "." + getattr(cls, "__qualname__", cls.__name__) + name = self._testMethodName + if name: + path = path + "." + name + return logging.getLogger(path) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# other unittest helpers +#============================================================================= + +RESERVED_BACKEND_NAMES = ["any", "default"] + + +def doesnt_require_backend(func): + """ + decorator for HandlerCase.create_backend_case() -- + used to decorate methods that should be run even if backend isn't present + (by default, full test suite is skipped when backend is missing) + + NOTE: tests decorated with this should not rely on handler have expected (or any!) backend. + """ + func._doesnt_require_backend = True + return func + + +class HandlerCase(TestCase): + """base class for testing password hash handlers (esp passlib.utils.handlers subclasses) + + In order to use this to test a handler, + create a subclass will all the appropriate attributes + filled as listed in the example below, + and run the subclass via unittest. + + .. todo:: + + Document all of the options HandlerCase offers. + + .. note:: + + This is subclass of :class:`unittest.TestCase` + (or :class:`unittest2.TestCase` if available). + """ + #=================================================================== + # class attrs - should be filled in by subclass + #=================================================================== + + #--------------------------------------------------------------- + # handler setup + #--------------------------------------------------------------- + + # handler class to test [required] + handler = None + + # if set, run tests against specified backend + backend = None + + #--------------------------------------------------------------- + # test vectors + #--------------------------------------------------------------- + + # list of (secret, hash) tuples which are known to be correct + known_correct_hashes = [] + + # list of (config, secret, hash) tuples are known to be correct + known_correct_configs = [] + + # list of (alt_hash, secret, hash) tuples, where alt_hash is a hash + # using an alternate representation that should be recognized and verify + # correctly, but should be corrected to match hash when passed through + # genhash() + known_alternate_hashes = [] + + # hashes so malformed they aren't even identified properly + known_unidentified_hashes = [] + + # hashes which are identifiabled but malformed - they should identify() + # as True, but cause an error when passed to genhash/verify. + known_malformed_hashes = [] + + # list of (handler name, hash) pairs for other algorithm's hashes that + # handler shouldn't identify as belonging to it this list should generally + # be sufficient (if handler name in list, that entry will be skipped) + known_other_hashes = [ + ('des_crypt', '6f8c114b58f2c'), + ('md5_crypt', '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + ('sha512_crypt', "$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywW" + "vt0RLE8uZ4oPwcelCjmw2kSYu.Ec6ycULevoBK25fs2xXgMNrCzIMVcgEJAstJeonj1"), + ] + + # passwords used to test basic hash behavior - generally + # don't need to be overidden. + stock_passwords = [ + u("test"), + u("\u20AC\u00A5$"), + b'\xe2\x82\xac\xc2\xa5$' + ] + + #--------------------------------------------------------------- + # option flags + #--------------------------------------------------------------- + + # whether hash is case insensitive + # True, False, or special value "verify-only" (which indicates + # hash contains case-sensitive portion, but verifies is case-insensitive) + secret_case_insensitive = False + + # flag if scheme accepts ALL hash strings (e.g. plaintext) + accepts_all_hashes = False + + # flag if scheme has "is_disabled" set, and contains 'salted' data + disabled_contains_salt = False + + # flag/hack to filter PasslibHashWarning issued by test_72_configs() + filter_config_warnings = False + + # forbid certain characters in passwords + @classproperty + def forbidden_characters(cls): + # anything that supports crypt() interface should forbid null chars, + # since crypt() uses null-terminated strings. + if 'os_crypt' in getattr(cls.handler, "backends", ()): + return b"\x00" + return None + + #=================================================================== + # internal class attrs + #=================================================================== + __unittest_skip = True + + @property + def descriptionPrefix(self): + handler = self.handler + name = handler.name + if hasattr(handler, "get_backend"): + name += " (%s backend)" % (handler.get_backend(),) + return name + + #=================================================================== + # support methods + #=================================================================== + + #--------------------------------------------------------------- + # configuration helpers + #--------------------------------------------------------------- + @classmethod + def iter_known_hashes(cls): + """iterate through known (secret, hash) pairs""" + for secret, hash in cls.known_correct_hashes: + yield secret, hash + for config, secret, hash in cls.known_correct_configs: + yield secret, hash + for alt, secret, hash in cls.known_alternate_hashes: + yield secret, hash + + def get_sample_hash(self): + """test random sample secret/hash pair""" + known = list(self.iter_known_hashes()) + return self.getRandom().choice(known) + + #--------------------------------------------------------------- + # test helpers + #--------------------------------------------------------------- + def check_verify(self, secret, hash, msg=None, negate=False): + """helper to check verify() outcome, honoring is_disabled_handler""" + result = self.do_verify(secret, hash) + self.assertTrue(result is True or result is False, + "verify() returned non-boolean value: %r" % (result,)) + if self.handler.is_disabled or negate: + if not result: + return + if not msg: + msg = ("verify incorrectly returned True: secret=%r, hash=%r" % + (secret, hash)) + raise self.failureException(msg) + else: + if result: + return + if not msg: + msg = "verify failed: secret=%r, hash=%r" % (secret, hash) + raise self.failureException(msg) + + def check_returned_native_str(self, result, func_name): + self.assertIsInstance(result, str, + "%s() failed to return native string: %r" % (func_name, result,)) + + #--------------------------------------------------------------- + # PasswordHash helpers - wraps all calls to PasswordHash api, + # so that subclasses can fill in defaults and account for other specialized behavior + #--------------------------------------------------------------- + def populate_settings(self, kwds): + """subclassable method to populate default settings""" + # use lower rounds settings for certain test modes + handler = self.handler + if 'rounds' in handler.setting_kwds and 'rounds' not in kwds: + mn = handler.min_rounds + df = handler.default_rounds + if TEST_MODE(max="quick"): + # use minimum rounds for quick mode + kwds['rounds'] = max(3, mn) + else: + # use default/16 otherwise + factor = 3 + if getattr(handler, "rounds_cost", None) == "log2": + df -= factor + else: + df //= (1<= 1") + + # check min_salt_size + if cls.min_salt_size < 0: + raise AssertionError("min_salt_chars must be >= 0") + if mx_set and cls.min_salt_size > cls.max_salt_size: + raise AssertionError("min_salt_chars must be <= max_salt_chars") + + # check default_salt_size + if cls.default_salt_size < cls.min_salt_size: + raise AssertionError("default_salt_size must be >= min_salt_size") + if mx_set and cls.default_salt_size > cls.max_salt_size: + raise AssertionError("default_salt_size must be <= max_salt_size") + + # check for 'salt_size' keyword + # NOTE: skipping warning if default salt size is already maxed out + # (might change that in future) + if 'salt_size' not in cls.setting_kwds and (not mx_set or cls.default_salt_size < cls.max_salt_size): + warn('%s: hash handler supports range of salt sizes, ' + 'but doesn\'t offer \'salt_size\' setting' % (cls.name,)) + + # check salt_chars & default_salt_chars + if cls.salt_chars: + if not cls.default_salt_chars: + raise AssertionError("default_salt_chars must not be empty") + for c in cls.default_salt_chars: + if c not in cls.salt_chars: + raise AssertionError("default_salt_chars must be subset of salt_chars: %r not in salt_chars" % (c,)) + else: + if not cls.default_salt_chars: + raise AssertionError("default_salt_chars MUST be specified if salt_chars is empty") + + @property + def salt_bits(self): + """calculate number of salt bits in hash""" + # XXX: replace this with bitsize() method? + handler = self.handler + assert has_salt_info(handler), "need explicit bit-size for " + handler.name + from math import log + # FIXME: this may be off for case-insensitive hashes, but that accounts + # for ~1 bit difference, which is good enough for test_11() + return int(handler.default_salt_size * + log(len(handler.default_salt_chars), 2)) + + def test_11_unique_salt(self): + """test hash() / genconfig() creates new salt each time""" + self.require_salt() + # odds of picking 'n' identical salts at random is '(.5**salt_bits)**n'. + # we want to pick the smallest N needed s.t. odds are <1/10**d, just + # to eliminate false-positives. which works out to n>3.33+d-salt_bits. + # for 1/1e12 odds, n=1 is sufficient for most hashes, but a few border cases (e.g. + # cisco_type7) have < 16 bits of salt, requiring more. + samples = max(1, 4 + 12 - self.salt_bits) + + def sampler(func): + value1 = func() + for _ in irange(samples): + value2 = func() + if value1 != value2: + return + raise self.failureException("failed to find different salt after " + "%d samples" % (samples,)) + sampler(self.do_genconfig) + sampler(lambda: self.do_encrypt("stub")) + + def test_12_min_salt_size(self): + """test hash() / genconfig() honors min_salt_size""" + self.require_salt_info() + + handler = self.handler + salt_char = handler.salt_chars[0:1] + min_size = handler.min_salt_size + + # + # check min is accepted + # + s1 = salt_char * min_size + self.do_genconfig(salt=s1) + + self.do_encrypt('stub', salt_size=min_size) + + # + # check min-1 is rejected + # + if min_size > 0: + self.assertRaises(ValueError, self.do_genconfig, + salt=s1[:-1]) + + self.assertRaises(ValueError, self.do_encrypt, 'stub', + salt_size=min_size-1) + + def test_13_max_salt_size(self): + """test hash() / genconfig() honors max_salt_size""" + self.require_salt_info() + + handler = self.handler + max_size = handler.max_salt_size + salt_char = handler.salt_chars[0:1] + + # NOTE: skipping this for hashes like argon2 since max_salt_size takes WAY too much memory + if max_size is None or max_size > (1 << 20): + # + # if it's not set, salt should never be truncated; so test it + # with an unreasonably large salt. + # + s1 = salt_char * 1024 + c1 = self.do_stub_encrypt(salt=s1) + c2 = self.do_stub_encrypt(salt=s1 + salt_char) + self.assertNotEqual(c1, c2) + + self.do_stub_encrypt(salt_size=1024) + + else: + # + # check max size is accepted + # + s1 = salt_char * max_size + c1 = self.do_stub_encrypt(salt=s1) + + self.do_stub_encrypt(salt_size=max_size) + + # + # check max size + 1 is rejected + # + s2 = s1 + salt_char + self.assertRaises(ValueError, self.do_stub_encrypt, salt=s2) + + self.assertRaises(ValueError, self.do_stub_encrypt, salt_size=max_size + 1) + + # + # should accept too-large salt in relaxed mode + # + if has_relaxed_setting(handler): + with warnings.catch_warnings(record=True): # issues passlibhandlerwarning + c2 = self.do_stub_encrypt(salt=s2, relaxed=True) + self.assertEqual(c2, c1) + + # + # if min_salt supports it, check smaller than mx is NOT truncated + # + if handler.min_salt_size < max_size: + c3 = self.do_stub_encrypt(salt=s1[:-1]) + self.assertNotEqual(c3, c1) + + # whether salt should be passed through bcrypt repair function + fuzz_salts_need_bcrypt_repair = False + + def prepare_salt(self, salt): + """prepare generated salt""" + if self.fuzz_salts_need_bcrypt_repair: + from passlib.utils.binary import bcrypt64 + salt = bcrypt64.repair_unused(salt) + return salt + + def test_14_salt_chars(self): + """test hash() honors salt_chars""" + self.require_salt_info() + + handler = self.handler + mx = handler.max_salt_size + mn = handler.min_salt_size + cs = handler.salt_chars + raw = isinstance(cs, bytes) + + # make sure all listed chars are accepted + for salt in batch(cs, mx or 32): + if len(salt) < mn: + salt = repeat_string(salt, mn) + salt = self.prepare_salt(salt) + self.do_stub_encrypt(salt=salt) + + # check some invalid salt chars, make sure they're rejected + source = u('\x00\xff') + if raw: + source = source.encode("latin-1") + chunk = max(mn, 1) + for c in source: + if c not in cs: + self.assertRaises(ValueError, self.do_stub_encrypt, salt=c*chunk, + __msg__="invalid salt char %r:" % (c,)) + + @property + def salt_type(self): + """hack to determine salt keyword's datatype""" + # NOTE: cisco_type7 uses 'int' + if getattr(self.handler, "_salt_is_bytes", False): + return bytes + else: + return unicode + + def test_15_salt_type(self): + """test non-string salt values""" + self.require_salt() + salt_type = self.salt_type + salt_size = getattr(self.handler, "min_salt_size", 0) or 8 + + # should always throw error for random class. + class fake(object): + pass + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=fake()) + + # unicode should be accepted only if salt_type is unicode. + if salt_type is not unicode: + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=u('x') * salt_size) + + # bytes should be accepted only if salt_type is bytes, + # OR if salt type is unicode and running PY2 - to allow native strings. + if not (salt_type is bytes or (PY2 and salt_type is unicode)): + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=b'x' * salt_size) + + def test_using_salt_size(self): + """Handler.using() -- default_salt_size""" + self.require_salt_info() + + handler = self.handler + mn = handler.min_salt_size + mx = handler.max_salt_size + df = handler.default_salt_size + + # should prevent setting below handler limit + self.assertRaises(ValueError, handler.using, default_salt_size=-1) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(default_salt_size=-1, relaxed=True) + self.assertEqual(temp.default_salt_size, mn) + + # should prevent setting above handler limit + if mx: + self.assertRaises(ValueError, handler.using, default_salt_size=mx+1) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(default_salt_size=mx+1, relaxed=True) + self.assertEqual(temp.default_salt_size, mx) + + # try setting to explicit value + if mn != mx: + temp = handler.using(default_salt_size=mn+1) + self.assertEqual(temp.default_salt_size, mn+1) + self.assertEqual(handler.default_salt_size, df) + + temp = handler.using(default_salt_size=mn+2) + self.assertEqual(temp.default_salt_size, mn+2) + self.assertEqual(handler.default_salt_size, df) + + # accept strings + if mn == mx: + ref = mn + else: + ref = mn + 1 + temp = handler.using(default_salt_size=str(ref)) + self.assertEqual(temp.default_salt_size, ref) + + # reject invalid strings + self.assertRaises(ValueError, handler.using, default_salt_size=str(ref) + "xxx") + + # honor 'salt_size' alias + temp = handler.using(salt_size=ref) + self.assertEqual(temp.default_salt_size, ref) + + #=================================================================== + # rounds + #=================================================================== + def require_rounds_info(self): + if not has_rounds_info(self.handler): + raise self.skipTest("handler lacks rounds attributes") + + def test_20_optional_rounds_attributes(self): + """validate optional rounds attributes""" + self.require_rounds_info() + + cls = self.handler + AssertionError = self.failureException + + # check max_rounds + if cls.max_rounds is None: + raise AssertionError("max_rounds not specified") + if cls.max_rounds < 1: + raise AssertionError("max_rounds must be >= 1") + + # check min_rounds + if cls.min_rounds < 0: + raise AssertionError("min_rounds must be >= 0") + if cls.min_rounds > cls.max_rounds: + raise AssertionError("min_rounds must be <= max_rounds") + + # check default_rounds + if cls.default_rounds is not None: + if cls.default_rounds < cls.min_rounds: + raise AssertionError("default_rounds must be >= min_rounds") + if cls.default_rounds > cls.max_rounds: + raise AssertionError("default_rounds must be <= max_rounds") + + # check rounds_cost + if cls.rounds_cost not in rounds_cost_values: + raise AssertionError("unknown rounds cost constant: %r" % (cls.rounds_cost,)) + + def test_21_min_rounds(self): + """test hash() / genconfig() honors min_rounds""" + self.require_rounds_info() + handler = self.handler + min_rounds = handler.min_rounds + + # check min is accepted + self.do_genconfig(rounds=min_rounds) + self.do_encrypt('stub', rounds=min_rounds) + + # check min-1 is rejected + self.assertRaises(ValueError, self.do_genconfig, rounds=min_rounds-1) + self.assertRaises(ValueError, self.do_encrypt, 'stub', rounds=min_rounds-1) + + # TODO: check relaxed mode clips min-1 + + def test_21b_max_rounds(self): + """test hash() / genconfig() honors max_rounds""" + self.require_rounds_info() + handler = self.handler + max_rounds = handler.max_rounds + + if max_rounds is not None: + # check max+1 is rejected + self.assertRaises(ValueError, self.do_genconfig, rounds=max_rounds+1) + self.assertRaises(ValueError, self.do_encrypt, 'stub', rounds=max_rounds+1) + + # handle max rounds + if max_rounds is None: + self.do_stub_encrypt(rounds=(1 << 31) - 1) + else: + self.do_stub_encrypt(rounds=max_rounds) + + # TODO: check relaxed mode clips max+1 + + #-------------------------------------------------------------------------------------- + # HasRounds.using() / .needs_update() -- desired rounds limits + #-------------------------------------------------------------------------------------- + def _create_using_rounds_helper(self): + """ + setup test helpers for testing handler.using()'s rounds parameters. + """ + self.require_rounds_info() + handler = self.handler + + if handler.name == "bsdi_crypt": + # hack to bypass bsdi-crypt's "odd rounds only" behavior, messes up this test + orig_handler = handler + handler = handler.using() + handler._generate_rounds = classmethod(lambda cls: super(orig_handler, cls)._generate_rounds()) + + # create some fake values to test with + orig_min_rounds = handler.min_rounds + orig_max_rounds = handler.max_rounds + orig_default_rounds = handler.default_rounds + medium = ((orig_max_rounds or 9999) + orig_min_rounds) // 2 + if medium == orig_default_rounds: + medium += 1 + small = (orig_min_rounds + medium) // 2 + large = ((orig_max_rounds or 9999) + medium) // 2 + + if handler.name == "bsdi_crypt": + # hack to avoid even numbered rounds + small |= 1 + medium |= 1 + large |= 1 + adj = 2 + else: + adj = 1 + + # create a subclass with small/medium/large as new default desired values + with self.assertWarningList([]): + subcls = handler.using( + min_desired_rounds=small, + max_desired_rounds=large, + default_rounds=medium, + ) + + # return helpers + return handler, subcls, small, medium, large, adj + + def test_has_rounds_using_harness(self): + """ + HasRounds.using() -- sanity check test harness + """ + # setup helpers + self.require_rounds_info() + handler = self.handler + orig_min_rounds = handler.min_rounds + orig_max_rounds = handler.max_rounds + orig_default_rounds = handler.default_rounds + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + + # shouldn't affect original handler at all + self.assertEqual(handler.min_rounds, orig_min_rounds) + self.assertEqual(handler.max_rounds, orig_max_rounds) + self.assertEqual(handler.min_desired_rounds, None) + self.assertEqual(handler.max_desired_rounds, None) + self.assertEqual(handler.default_rounds, orig_default_rounds) + + # should affect subcls' desired value, but not hard min/max + self.assertEqual(subcls.min_rounds, orig_min_rounds) + self.assertEqual(subcls.max_rounds, orig_max_rounds) + self.assertEqual(subcls.default_rounds, medium) + self.assertEqual(subcls.min_desired_rounds, small) + self.assertEqual(subcls.max_desired_rounds, large) + + def test_has_rounds_using_w_min_rounds(self): + """ + HasRounds.using() -- min_rounds / min_desired_rounds + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + orig_min_rounds = handler.min_rounds + orig_max_rounds = handler.max_rounds + orig_default_rounds = handler.default_rounds + + # .using() should clip values below valid minimum, w/ warning + if orig_min_rounds > 0: + self.assertRaises(ValueError, handler.using, min_desired_rounds=orig_min_rounds - adj) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(min_desired_rounds=orig_min_rounds - adj, relaxed=True) + self.assertEqual(temp.min_desired_rounds, orig_min_rounds) + + # .using() should clip values above valid maximum, w/ warning + if orig_max_rounds: + self.assertRaises(ValueError, handler.using, min_desired_rounds=orig_max_rounds + adj) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(min_desired_rounds=orig_max_rounds + adj, relaxed=True) + self.assertEqual(temp.min_desired_rounds, orig_max_rounds) + + # .using() should allow values below previous desired minimum, w/o warning + with self.assertWarningList([]): + temp = subcls.using(min_desired_rounds=small - adj) + self.assertEqual(temp.min_desired_rounds, small - adj) + + # .using() should allow values w/in previous range + temp = subcls.using(min_desired_rounds=small + 2 * adj) + self.assertEqual(temp.min_desired_rounds, small + 2 * adj) + + # .using() should allow values above previous desired maximum, w/o warning + with self.assertWarningList([]): + temp = subcls.using(min_desired_rounds=large + adj) + self.assertEqual(temp.min_desired_rounds, large + adj) + + # hash() etc should allow explicit values below desired minimum + # NOTE: formerly issued a warning in passlib 1.6, now just a wrapper for .using() + self.assertEqual(get_effective_rounds(subcls, small + adj), small + adj) + self.assertEqual(get_effective_rounds(subcls, small), small) + with self.assertWarningList([]): + self.assertEqual(get_effective_rounds(subcls, small - adj), small - adj) + + # 'min_rounds' should be treated as alias for 'min_desired_rounds' + temp = handler.using(min_rounds=small) + self.assertEqual(temp.min_desired_rounds, small) + + # should be able to specify strings + temp = handler.using(min_rounds=str(small)) + self.assertEqual(temp.min_desired_rounds, small) + + # invalid strings should cause error + self.assertRaises(ValueError, handler.using, min_rounds=str(small) + "xxx") + + def test_has_rounds_replace_w_max_rounds(self): + """ + HasRounds.using() -- max_rounds / max_desired_rounds + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + orig_min_rounds = handler.min_rounds + orig_max_rounds = handler.max_rounds + + # .using() should clip values below valid minimum w/ warning + if orig_min_rounds > 0: + self.assertRaises(ValueError, handler.using, max_desired_rounds=orig_min_rounds - adj) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(max_desired_rounds=orig_min_rounds - adj, relaxed=True) + self.assertEqual(temp.max_desired_rounds, orig_min_rounds) + + # .using() should clip values above valid maximum, w/ warning + if orig_max_rounds: + self.assertRaises(ValueError, handler.using, max_desired_rounds=orig_max_rounds + adj) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(max_desired_rounds=orig_max_rounds + adj, relaxed=True) + self.assertEqual(temp.max_desired_rounds, orig_max_rounds) + + # .using() should clip values below previous minimum, w/ warning + with self.assertWarningList([PasslibConfigWarning]): + temp = subcls.using(max_desired_rounds=small - adj) + self.assertEqual(temp.max_desired_rounds, small) + + # .using() should reject explicit min > max + self.assertRaises(ValueError, subcls.using, + min_desired_rounds=medium+adj, + max_desired_rounds=medium-adj) + + # .using() should allow values w/in previous range + temp = subcls.using(min_desired_rounds=large - 2 * adj) + self.assertEqual(temp.min_desired_rounds, large - 2 * adj) + + # .using() should allow values above previous desired maximum, w/o warning + with self.assertWarningList([]): + temp = subcls.using(max_desired_rounds=large + adj) + self.assertEqual(temp.max_desired_rounds, large + adj) + + # hash() etc should allow explicit values above desired minimum, w/o warning + # NOTE: formerly issued a warning in passlib 1.6, now just a wrapper for .using() + self.assertEqual(get_effective_rounds(subcls, large - adj), large - adj) + self.assertEqual(get_effective_rounds(subcls, large), large) + with self.assertWarningList([]): + self.assertEqual(get_effective_rounds(subcls, large + adj), large + adj) + + # 'max_rounds' should be treated as alias for 'max_desired_rounds' + temp = handler.using(max_rounds=large) + self.assertEqual(temp.max_desired_rounds, large) + + # should be able to specify strings + temp = handler.using(max_desired_rounds=str(large)) + self.assertEqual(temp.max_desired_rounds, large) + + # invalid strings should cause error + self.assertRaises(ValueError, handler.using, max_desired_rounds=str(large) + "xxx") + + def test_has_rounds_using_w_default_rounds(self): + """ + HasRounds.using() -- default_rounds + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + orig_max_rounds = handler.max_rounds + + # XXX: are there any other cases that need testing? + + # implicit default rounds -- increase to min_rounds + temp = subcls.using(min_rounds=medium+adj) + self.assertEqual(temp.default_rounds, medium+adj) + + # implicit default rounds -- decrease to max_rounds + temp = subcls.using(max_rounds=medium-adj) + self.assertEqual(temp.default_rounds, medium-adj) + + # explicit default rounds below desired minimum + # XXX: make this a warning if min is implicit? + self.assertRaises(ValueError, subcls.using, default_rounds=small-adj) + + # explicit default rounds above desired maximum + # XXX: make this a warning if max is implicit? + if orig_max_rounds: + self.assertRaises(ValueError, subcls.using, default_rounds=large+adj) + + # hash() etc should implicit default rounds, but get overridden + self.assertEqual(get_effective_rounds(subcls), medium) + self.assertEqual(get_effective_rounds(subcls, medium+adj), medium+adj) + + # should be able to specify strings + temp = handler.using(default_rounds=str(medium)) + self.assertEqual(temp.default_rounds, medium) + + # invalid strings should cause error + self.assertRaises(ValueError, handler.using, default_rounds=str(medium) + "xxx") + + def test_has_rounds_using_w_rounds(self): + """ + HasRounds.using() -- rounds + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + orig_max_rounds = handler.max_rounds + + # 'rounds' should be treated as fallback for min, max, and default + temp = subcls.using(rounds=medium+adj) + self.assertEqual(temp.min_desired_rounds, medium+adj) + self.assertEqual(temp.default_rounds, medium+adj) + self.assertEqual(temp.max_desired_rounds, medium+adj) + + # 'rounds' should be treated as fallback for min, max, and default + temp = subcls.using(rounds=medium+1, min_rounds=small+adj, + default_rounds=medium, max_rounds=large-adj) + self.assertEqual(temp.min_desired_rounds, small+adj) + self.assertEqual(temp.default_rounds, medium) + self.assertEqual(temp.max_desired_rounds, large-adj) + + def test_has_rounds_using_w_vary_rounds_parsing(self): + """ + HasRounds.using() -- vary_rounds parsing + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + + def parse(value): + return subcls.using(vary_rounds=value).vary_rounds + + # floats should be preserved + self.assertEqual(parse(0.1), 0.1) + self.assertEqual(parse('0.1'), 0.1) + + # 'xx%' should be converted to float + self.assertEqual(parse('10%'), 0.1) + + # ints should be preserved + self.assertEqual(parse(1000), 1000) + self.assertEqual(parse('1000'), 1000) + + # float bounds should be enforced + self.assertRaises(ValueError, parse, -0.1) + self.assertRaises(ValueError, parse, 1.1) + + def test_has_rounds_using_w_vary_rounds_generation(self): + """ + HasRounds.using() -- vary_rounds generation + """ + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + + def get_effective_range(cls): + seen = set(get_effective_rounds(cls) for _ in irange(1000)) + return min(seen), max(seen) + + def assert_rounds_range(vary_rounds, lower, upper): + temp = subcls.using(vary_rounds=vary_rounds) + seen_lower, seen_upper = get_effective_range(temp) + self.assertEqual(seen_lower, lower, "vary_rounds had wrong lower limit:") + self.assertEqual(seen_upper, upper, "vary_rounds had wrong upper limit:") + + # test static + assert_rounds_range(0, medium, medium) + assert_rounds_range("0%", medium, medium) + + # test absolute + assert_rounds_range(adj, medium - adj, medium + adj) + assert_rounds_range(50, max(small, medium - 50), min(large, medium + 50)) + + # test relative - should shift over at 50% mark + if handler.rounds_cost == "log2": + # log rounds "50%" variance should only increase/decrease by 1 cost value + assert_rounds_range("1%", medium, medium) + assert_rounds_range("49%", medium, medium) + assert_rounds_range("50%", medium - adj, medium) + else: + # for linear rounds, range is frequently so huge, won't ever see ends. + # so we just check it's within an expected range. + lower, upper = get_effective_range(subcls.using(vary_rounds="50%")) + + self.assertGreaterEqual(lower, max(small, medium * 0.5)) + self.assertLessEqual(lower, max(small, medium * 0.8)) + + self.assertGreaterEqual(upper, min(large, medium * 1.2)) + self.assertLessEqual(upper, min(large, medium * 1.5)) + + def test_has_rounds_using_and_needs_update(self): + """ + HasRounds.using() -- desired_rounds + needs_update() + """ + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + + temp = subcls.using(min_desired_rounds=small+2, max_desired_rounds=large-2) + + # generate some sample hashes + small_hash = self.do_stub_encrypt(subcls, rounds=small) + medium_hash = self.do_stub_encrypt(subcls, rounds=medium) + large_hash = self.do_stub_encrypt(subcls, rounds=large) + + # everything should be w/in bounds for original handler + self.assertFalse(subcls.needs_update(small_hash)) + self.assertFalse(subcls.needs_update(medium_hash)) + self.assertFalse(subcls.needs_update(large_hash)) + + # small & large should require update for temp handler + self.assertTrue(temp.needs_update(small_hash)) + self.assertFalse(temp.needs_update(medium_hash)) + self.assertTrue(temp.needs_update(large_hash)) + + #=================================================================== + # idents + #=================================================================== + def require_many_idents(self): + handler = self.handler + if not isinstance(handler, type) or not issubclass(handler, uh.HasManyIdents): + raise self.skipTest("handler doesn't derive from HasManyIdents") + + def test_30_HasManyIdents(self): + """validate HasManyIdents configuration""" + cls = self.handler + self.require_many_idents() + + # check settings + self.assertTrue('ident' in cls.setting_kwds) + + # check ident_values list + for value in cls.ident_values: + self.assertIsInstance(value, unicode, + "cls.ident_values must be unicode:") + self.assertTrue(len(cls.ident_values)>1, + "cls.ident_values must have 2+ elements:") + + # check default_ident value + self.assertIsInstance(cls.default_ident, unicode, + "cls.default_ident must be unicode:") + self.assertTrue(cls.default_ident in cls.ident_values, + "cls.default_ident must specify member of cls.ident_values") + + # check optional aliases list + if cls.ident_aliases: + for alias, ident in iteritems(cls.ident_aliases): + self.assertIsInstance(alias, unicode, + "cls.ident_aliases keys must be unicode:") # XXX: allow ints? + self.assertIsInstance(ident, unicode, + "cls.ident_aliases values must be unicode:") + self.assertTrue(ident in cls.ident_values, + "cls.ident_aliases must map to cls.ident_values members: %r" % (ident,)) + + # check constructor validates ident correctly. + handler = cls + hash = self.get_sample_hash()[1] + kwds = handler.parsehash(hash) + del kwds['ident'] + + # ... accepts good ident + handler(ident=cls.default_ident, **kwds) + + # ... requires ident w/o defaults + self.assertRaises(TypeError, handler, **kwds) + + # ... supplies default ident + handler(use_defaults=True, **kwds) + + # ... rejects bad ident + self.assertRaises(ValueError, handler, ident='xXx', **kwds) + + # TODO: check various supported idents + + def test_has_many_idents_using(self): + """HasManyIdents.using() -- 'default_ident' and 'ident' keywords""" + self.require_many_idents() + + # pick alt ident to test with + handler = self.handler + orig_ident = handler.default_ident + for alt_ident in handler.ident_values: + if alt_ident != orig_ident: + break + else: + raise AssertionError("expected to find alternate ident: default=%r values=%r" % + (orig_ident, handler.ident_values)) + + def effective_ident(cls): + cls = unwrap_handler(cls) + return cls(use_defaults=True).ident + + # keep default if nothing else specified + subcls = handler.using() + self.assertEqual(subcls.default_ident, orig_ident) + + # accepts alt ident + subcls = handler.using(default_ident=alt_ident) + self.assertEqual(subcls.default_ident, alt_ident) + self.assertEqual(handler.default_ident, orig_ident) + + # check subcls actually *generates* default ident, + # and that we didn't affect orig handler + self.assertEqual(effective_ident(subcls), alt_ident) + self.assertEqual(effective_ident(handler), orig_ident) + + # rejects bad ident + self.assertRaises(ValueError, handler.using, default_ident='xXx') + + # honor 'ident' alias + subcls = handler.using(ident=alt_ident) + self.assertEqual(subcls.default_ident, alt_ident) + self.assertEqual(handler.default_ident, orig_ident) + + # forbid both at same time + self.assertRaises(TypeError, handler.using, default_ident=alt_ident, ident=alt_ident) + + # check ident aliases are being honored + if handler.ident_aliases: + for alias, ident in handler.ident_aliases.items(): + subcls = handler.using(ident=alias) + self.assertEqual(subcls.default_ident, ident, msg="alias %r:" % alias) + + #=================================================================== + # password size limits + #=================================================================== + def test_truncate_error_setting(self): + """ + validate 'truncate_error' setting & related attributes + """ + # If it doesn't have truncate_size set, + # it shouldn't support truncate_error + hasher = self.handler + if hasher.truncate_size is None: + self.assertNotIn("truncate_error", hasher.setting_kwds) + return + + # if hasher defaults to silently truncating, + # it MUST NOT use .truncate_verify_reject, + # because resulting hashes wouldn't verify! + if not hasher.truncate_error: + self.assertFalse(hasher.truncate_verify_reject) + + # if hasher doesn't have configurable policy, + # it must throw error by default + if "truncate_error" not in hasher.setting_kwds: + self.assertTrue(hasher.truncate_error) + return + + # test value parsing + def parse_value(value): + return hasher.using(truncate_error=value).truncate_error + self.assertEqual(parse_value(None), hasher.truncate_error) + self.assertEqual(parse_value(True), True) + self.assertEqual(parse_value("true"), True) + self.assertEqual(parse_value(False), False) + self.assertEqual(parse_value("false"), False) + self.assertRaises(ValueError, parse_value, "xxx") + + def test_secret_wo_truncate_size(self): + """ + test no password size limits enforced (if truncate_size=None) + """ + # skip if hasher has a maximum password size + hasher = self.handler + if hasher.truncate_size is not None: + self.assertGreaterEqual(hasher.truncate_size, 1) + raise self.skipTest("truncate_size is set") + + # NOTE: this doesn't do an exhaustive search to verify algorithm + # doesn't have some cutoff point, it just tries + # 1024-character string, and alters the last char. + # as long as algorithm doesn't clip secret at point <1024, + # the new secret shouldn't verify. + + # hash a 1024-byte secret + secret = "too many secrets" * 16 + alt = "x" + hash = self.do_encrypt(secret) + + # check that verify doesn't silently reject secret + # (i.e. hasher mistakenly honors .truncate_verify_reject) + verify_success = not hasher.is_disabled + self.assertEqual(self.do_verify(secret, hash), verify_success, + msg="verify rejected correct secret") + + # alter last byte, should get different hash, which won't verify + alt_secret = secret[:-1] + alt + self.assertFalse(self.do_verify(alt_secret, hash), + "full password not used in digest") + + def test_secret_w_truncate_size(self): + """ + test password size limits raise truncate_error (if appropriate) + """ + #-------------------------------------------------- + # check if test is applicable + #-------------------------------------------------- + handler = self.handler + truncate_size = handler.truncate_size + if not truncate_size: + raise self.skipTest("truncate_size not set") + + #-------------------------------------------------- + # setup vars + #-------------------------------------------------- + # try to get versions w/ and w/o truncate_error set. + # set to None if policy isn't configruable + size_error_type = exc.PasswordSizeError + if "truncate_error" in handler.setting_kwds: + without_error = handler.using(truncate_error=False) + with_error = handler.using(truncate_error=True) + size_error_type = exc.PasswordTruncateError + elif handler.truncate_error: + without_error = None + with_error = handler + else: + # NOTE: this mode is currently an error in test_truncate_error_setting() + without_error = handler + with_error = None + + # create some test secrets + base = "too many secrets" + alt = "x" # char that's not in base, used to mutate test secrets + long_secret = repeat_string(base, truncate_size+1) + short_secret = long_secret[:-1] + alt_long_secret = long_secret[:-1] + alt + alt_short_secret = short_secret[:-1] + alt + + # init flags + short_verify_success = not handler.is_disabled + long_verify_success = short_verify_success and \ + not handler.truncate_verify_reject + + #-------------------------------------------------- + # do tests on length secret, and resulting hash. + # should pass regardless of truncate_error policy. + #-------------------------------------------------- + assert without_error or with_error + for cand_hasher in [without_error, with_error]: + + # create & hash string that's exactly chars. + short_hash = self.do_encrypt(short_secret, handler=cand_hasher) + + # check hash verifies, regardless of .truncate_verify_reject + self.assertEqual(self.do_verify(short_secret, short_hash, + handler=cand_hasher), + short_verify_success) + + # changing 'th char should invalidate hash + # if this fails, means (reported) truncate_size is too large. + self.assertFalse(self.do_verify(alt_short_secret, short_hash, + handler=with_error), + "truncate_size value is too large") + + # verify should truncate long secret before comparing + # (unless truncate_verify_reject is set) + self.assertEqual(self.do_verify(long_secret, short_hash, + handler=cand_hasher), + long_verify_success) + + #-------------------------------------------------- + # do tests on length secret, + # w/ truncate error disabled (should silently truncate) + #-------------------------------------------------- + if without_error: + + # create & hash string that's exactly truncate_size+1 chars + long_hash = self.do_encrypt(long_secret, handler=without_error) + + # check verifies against secret (unless truncate_verify_reject=True) + self.assertEqual(self.do_verify(long_secret, long_hash, + handler=without_error), + short_verify_success) + + # check mutating last char doesn't change outcome. + # if this fails, means (reported) truncate_size is too small. + self.assertEqual(self.do_verify(alt_long_secret, long_hash, + handler=without_error), + short_verify_success) + + # check short_secret verifies against this hash + # if this fails, means (reported) truncate_size is too large. + self.assertTrue(self.do_verify(short_secret, long_hash, + handler=without_error)) + + #-------------------------------------------------- + # do tests on length secret, + # w/ truncate error + #-------------------------------------------------- + if with_error: + + # with errors enabled, should forbid truncation. + err = self.assertRaises(size_error_type, self.do_encrypt, + long_secret, handler=with_error) + self.assertEqual(err.max_size, truncate_size) + + #=================================================================== + # password contents + #=================================================================== + def test_61_secret_case_sensitive(self): + """test password case sensitivity""" + hash_insensitive = self.secret_case_insensitive is True + verify_insensitive = self.secret_case_insensitive in [True, + "verify-only"] + + # test hashing lower-case verifies against lower & upper + lower = 'test' + upper = 'TEST' + h1 = self.do_encrypt(lower) + if verify_insensitive and not self.handler.is_disabled: + self.assertTrue(self.do_verify(upper, h1), + "verify() should not be case sensitive") + else: + self.assertFalse(self.do_verify(upper, h1), + "verify() should be case sensitive") + + # test hashing upper-case verifies against lower & upper + h2 = self.do_encrypt(upper) + if verify_insensitive and not self.handler.is_disabled: + self.assertTrue(self.do_verify(lower, h2), + "verify() should not be case sensitive") + else: + self.assertFalse(self.do_verify(lower, h2), + "verify() should be case sensitive") + + # test genhash + # XXX: 2.0: what about 'verify-only' hashes once genhash() is removed? + # won't have easy way to recreate w/ same config to see if hash differs. + # (though only hash this applies to is mssql2000) + h2 = self.do_genhash(upper, h1) + if hash_insensitive or (self.handler.is_disabled and not self.disabled_contains_salt): + self.assertEqual(h2, h1, + "genhash() should not be case sensitive") + else: + self.assertNotEqual(h2, h1, + "genhash() should be case sensitive") + + def test_62_secret_border(self): + """test non-string passwords are rejected""" + hash = self.get_sample_hash()[1] + + # secret=None + self.assertRaises(TypeError, self.do_encrypt, None) + self.assertRaises(TypeError, self.do_genhash, None, hash) + self.assertRaises(TypeError, self.do_verify, None, hash) + + # secret=int (picked as example of entirely wrong class) + self.assertRaises(TypeError, self.do_encrypt, 1) + self.assertRaises(TypeError, self.do_genhash, 1, hash) + self.assertRaises(TypeError, self.do_verify, 1, hash) + + # xxx: move to password size limits section, above? + def test_63_large_secret(self): + """test MAX_PASSWORD_SIZE is enforced""" + from passlib.exc import PasswordSizeError + from passlib.utils import MAX_PASSWORD_SIZE + secret = '.' * (1+MAX_PASSWORD_SIZE) + hash = self.get_sample_hash()[1] + err = self.assertRaises(PasswordSizeError, self.do_genhash, secret, hash) + self.assertEqual(err.max_size, MAX_PASSWORD_SIZE) + self.assertRaises(PasswordSizeError, self.do_encrypt, secret) + self.assertRaises(PasswordSizeError, self.do_verify, secret, hash) + + def test_64_forbidden_chars(self): + """test forbidden characters not allowed in password""" + chars = self.forbidden_characters + if not chars: + raise self.skipTest("none listed") + base = u('stub') + if isinstance(chars, bytes): + from passlib.utils.compat import iter_byte_chars + chars = iter_byte_chars(chars) + base = base.encode("ascii") + for c in chars: + self.assertRaises(ValueError, self.do_encrypt, base + c + base) + + #=================================================================== + # check identify(), verify(), genhash() against test vectors + #=================================================================== + def is_secret_8bit(self, secret): + secret = self.populate_context(secret, {}) + return not is_ascii_safe(secret) + + def expect_os_crypt_failure(self, secret): + """ + check if we're expecting potential verify failure due to crypt.crypt() encoding limitation + """ + if PY3 and self.backend == "os_crypt" and isinstance(secret, bytes): + try: + secret.decode("utf-8") + except UnicodeDecodeError: + return True + return False + + def test_70_hashes(self): + """test known hashes""" + + # sanity check + self.assertTrue(self.known_correct_hashes or self.known_correct_configs, + "test must set at least one of 'known_correct_hashes' " + "or 'known_correct_configs'") + + # run through known secret/hash pairs + saw8bit = False + for secret, hash in self.iter_known_hashes(): + if self.is_secret_8bit(secret): + saw8bit = True + + # hash should be positively identified by handler + self.assertTrue(self.do_identify(hash), + "identify() failed to identify hash: %r" % (hash,)) + + # check if what we're about to do is expected to fail due to crypt.crypt() limitation. + expect_os_crypt_failure = self.expect_os_crypt_failure(secret) + try: + + # secret should verify successfully against hash + self.check_verify(secret, hash, "verify() of known hash failed: " + "secret=%r, hash=%r" % (secret, hash)) + + # genhash() should reproduce same hash + result = self.do_genhash(secret, hash) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + if self.handler.is_disabled and self.disabled_contains_salt: + continue + self.assertEqual(result, hash, "genhash() failed to reproduce " + "known hash: secret=%r, hash=%r: result=%r" % + (secret, hash, result)) + + except MissingBackendError: + if not expect_os_crypt_failure: + raise + + # would really like all handlers to have at least one 8-bit test vector + if not saw8bit: + warn("%s: no 8-bit secrets tested" % self.__class__) + + def test_71_alternates(self): + """test known alternate hashes""" + if not self.known_alternate_hashes: + raise self.skipTest("no alternate hashes provided") + for alt, secret, hash in self.known_alternate_hashes: + + # hash should be positively identified by handler + self.assertTrue(self.do_identify(hash), + "identify() failed to identify alternate hash: %r" % + (hash,)) + + # secret should verify successfully against hash + self.check_verify(secret, alt, "verify() of known alternate hash " + "failed: secret=%r, hash=%r" % (secret, alt)) + + # genhash() should reproduce canonical hash + result = self.do_genhash(secret, alt) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + if self.handler.is_disabled and self.disabled_contains_salt: + continue + self.assertEqual(result, hash, "genhash() failed to normalize " + "known alternate hash: secret=%r, alt=%r, hash=%r: " + "result=%r" % (secret, alt, hash, result)) + + def test_72_configs(self): + """test known config strings""" + # special-case handlers without settings + if not self.handler.setting_kwds: + self.assertFalse(self.known_correct_configs, + "handler should not have config strings") + raise self.skipTest("hash has no settings") + + if not self.known_correct_configs: + # XXX: make this a requirement? + raise self.skipTest("no config strings provided") + + # make sure config strings work (hashes in list tested in test_70) + if self.filter_config_warnings: + warnings.filterwarnings("ignore", category=PasslibHashWarning) + for config, secret, hash in self.known_correct_configs: + + # config should be positively identified by handler + self.assertTrue(self.do_identify(config), + "identify() failed to identify known config string: %r" % + (config,)) + + # verify() should throw error for config strings. + self.assertRaises(ValueError, self.do_verify, secret, config, + __msg__="verify() failed to reject config string: %r" % + (config,)) + + # genhash() should reproduce hash from config. + result = self.do_genhash(secret, config) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + self.assertEqual(result, hash, "genhash() failed to reproduce " + "known hash from config: secret=%r, config=%r, hash=%r: " + "result=%r" % (secret, config, hash, result)) + + def test_73_unidentified(self): + """test known unidentifiably-mangled strings""" + if not self.known_unidentified_hashes: + raise self.skipTest("no unidentified hashes provided") + for hash in self.known_unidentified_hashes: + + # identify() should reject these + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified known unidentifiable " + "hash: %r" % (hash,)) + + # verify() should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for unidentifiable " + "hash: %r" % (hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for unidentifiable " + "hash: %r" % (hash,)) + + def test_74_malformed(self): + """test known identifiable-but-malformed strings""" + if not self.known_malformed_hashes: + raise self.skipTest("no malformed hashes provided") + for hash in self.known_malformed_hashes: + + # identify() should accept these + self.assertTrue(self.do_identify(hash), + "identify() failed to identify known malformed " + "hash: %r" % (hash,)) + + # verify() should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for malformed " + "hash: %r" % (hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for malformed " + "hash: %r" % (hash,)) + + def test_75_foreign(self): + """test known foreign hashes""" + if self.accepts_all_hashes: + raise self.skipTest("not applicable") + if not self.known_other_hashes: + raise self.skipTest("no foreign hashes provided") + for name, hash in self.known_other_hashes: + # NOTE: most tests use default list of foreign hashes, + # so they may include ones belonging to that hash... + # hence the 'own' logic. + + if name == self.handler.name: + # identify should accept these + self.assertTrue(self.do_identify(hash), + "identify() failed to identify known hash: %r" % (hash,)) + + # verify & genhash should NOT throw error + self.do_verify('stub', hash) + result = self.do_genhash('stub', hash) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + + else: + # identify should reject these + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified hash belonging to " + "%s: %r" % (name, hash)) + + # verify should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for hash " + "belonging to %s: %r" % (name, hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for hash " + "belonging to %s: %r" % (name, hash)) + + def test_76_hash_border(self): + """test non-string hashes are rejected""" + # + # test hash=None is handled correctly + # + self.assertRaises(TypeError, self.do_identify, None) + self.assertRaises(TypeError, self.do_verify, 'stub', None) + + # NOTE: changed in 1.7 -- previously 'None' would be accepted when config strings not supported. + self.assertRaises(TypeError, self.do_genhash, 'stub', None) + + # + # test hash=int is rejected (picked as example of entirely wrong type) + # + self.assertRaises(TypeError, self.do_identify, 1) + self.assertRaises(TypeError, self.do_verify, 'stub', 1) + self.assertRaises(TypeError, self.do_genhash, 'stub', 1) + + # + # test hash='' is rejected for all but the plaintext hashes + # + for hash in [u(''), b'']: + if self.accepts_all_hashes: + # then it accepts empty string as well. + self.assertTrue(self.do_identify(hash)) + self.do_verify('stub', hash) + result = self.do_genhash('stub', hash) + self.check_returned_native_str(result, "genhash") + else: + # otherwise it should reject them + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified empty hash") + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__="verify() failed to reject empty hash") + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__="genhash() failed to reject empty hash") + + # + # test identify doesn't throw decoding errors on 8-bit input + # + self.do_identify('\xe2\x82\xac\xc2\xa5$') # utf-8 + self.do_identify('abc\x91\x00') # non-utf8 + + #=================================================================== + # test parsehash() + #=================================================================== + + #: optional list of known parse hash results for hasher + known_parsehash_results = [] + + def require_parsehash(self): + if not hasattr(self.handler, "parsehash"): + raise SkipTest("parsehash() not implemented") + + def test_70_parsehash(self): + """ + parsehash() + """ + # TODO: would like to enhance what this test covers + + self.require_parsehash() + handler = self.handler + + # calls should succeed, and return dict + hash = self.do_encrypt("stub") + result = handler.parsehash(hash) + self.assertIsInstance(result, dict) + # TODO: figure out what invariants we can reliably parse, + # or maybe make subclasses specify that? + + # w/ checksum=False, should omit that key + result2 = handler.parsehash(hash, checksum=False) + correct2 = result.copy() + correct2.pop("checksum", None) + self.assertEqual(result2, correct2) + + # w/ sanitize=True + # correct output should mask salt / checksum; + # but all else should be the same + result3 = handler.parsehash(hash, sanitize=True) + correct3 = result.copy() + if PY2: + # silence warning about bytes & unicode not comparing + # (sanitize may convert bytes into base64 text) + warnings.filterwarnings("ignore", ".*unequal comparison failed to convert.*", + category=UnicodeWarning) + for key in ("salt", "checksum"): + if key in result3: + self.assertNotEqual(result3[key], correct3[key]) + self.assert_is_masked(result3[key]) + correct3[key] = result3[key] + self.assertEqual(result3, correct3) + + def assert_is_masked(self, value): + """ + check value properly masked by :func:`passlib.utils.mask_value` + """ + if value is None: + return + self.assertIsInstance(value, unicode) + # assumes mask_value() defaults will never show more than chars (4); + # and show nothing if size less than 1/ (8). + ref = value if len(value) < 8 else value[4:] + if set(ref) == set(["*"]): + return True + raise self.fail("value not masked: %r" % value) + + def test_71_parsehash_results(self): + """ + parsehash() -- known outputs + """ + self.require_parsehash() + samples = self.known_parsehash_results + if not samples: + raise self.skipTest("no samples present") + # XXX: expand to test w/ checksum=False and/or sanitize=True? + # or read "_unsafe_settings"? + for hash, correct in self.known_parsehash_results: + result = self.handler.parsehash(hash) + self.assertEqual(result, correct, "hash=%r:" % hash) + + #=================================================================== + # fuzz testing + #=================================================================== + def test_77_fuzz_input(self, threaded=False): + """fuzz testing -- random passwords and options + + This test attempts to perform some basic fuzz testing of the hash, + based on whatever information can be found about it. + It does as much as it can within a fixed amount of time + (defaults to 1 second, but can be overridden via $PASSLIB_TEST_FUZZ_TIME). + It tests the following: + + * randomly generated passwords including extended unicode chars + * randomly selected rounds values (if rounds supported) + * randomly selected salt sizes (if salts supported) + * randomly selected identifiers (if multiple found) + * runs output of selected backend against other available backends + (if any) to detect errors occurring between different backends. + * runs output against other "external" verifiers such as OS crypt() + + :param report_thread_state: + if true, writes state of loop to current_thread().passlib_fuzz_state. + used to help debug multi-threaded fuzz test issues (below) + """ + if self.handler.is_disabled: + raise self.skipTest("not applicable") + + # gather info + from passlib.utils import tick + max_time = self.max_fuzz_time + if max_time <= 0: + raise self.skipTest("disabled by test mode") + verifiers = self.get_fuzz_verifiers(threaded=threaded) + def vname(v): + return (v.__doc__ or v.__name__).splitlines()[0] + + # init rng -- using separate one for each thread + # so things are predictable for given RANDOM_TEST_SEED + # (relies on test_78_fuzz_threading() to give threads unique names) + if threaded: + thread_name = threading.current_thread().name + else: + thread_name = "fuzz test" + rng = self.getRandom(name=thread_name) + generator = self.FuzzHashGenerator(self, rng) + + # do as many tests as possible for max_time seconds + log.debug("%s: %s: started; max_time=%r verifiers=%d (%s)", + self.descriptionPrefix, thread_name, max_time, len(verifiers), + ", ".join(vname(v) for v in verifiers)) + start = tick() + stop = start + max_time + count = 0 + while tick() <= stop: + # generate random password & options + opts = generator.generate() + secret = opts['secret'] + other = opts['other'] + settings = opts['settings'] + ctx = opts['context'] + if ctx: + settings['context'] = ctx + + # create new hash + hash = self.do_encrypt(secret, **settings) + ##log.debug("fuzz test: hash=%r secret=%r other=%r", + ## hash, secret, other) + + # run through all verifiers we found. + for verify in verifiers: + name = vname(verify) + result = verify(secret, hash, **ctx) + if result == "skip": # let verifiers signal lack of support + continue + assert result is True or result is False + if not result: + raise self.failureException("failed to verify against %r verifier: " + "secret=%r config=%r hash=%r" % + (name, secret, settings, hash)) + # occasionally check that some other secrets WON'T verify + # against this hash. + if rng.random() < .1: + result = verify(other, hash, **ctx) + if result and result != "skip": + raise self.failureException("was able to verify wrong " + "password using %s: wrong_secret=%r real_secret=%r " + "config=%r hash=%r" % (name, other, secret, settings, hash)) + count += 1 + + log.debug("%s: %s: done; elapsed=%r count=%r", + self.descriptionPrefix, thread_name, tick() - start, count) + + def test_78_fuzz_threading(self): + """multithreaded fuzz testing -- random password & options using multiple threads + + run test_77 simultaneously in multiple threads + in an attempt to detect any concurrency issues + (e.g. the bug fixed by pybcrypt 0.3) + """ + self.require_TEST_MODE("full") + import threading + + # check if this test should run + if self.handler.is_disabled: + raise self.skipTest("not applicable") + thread_count = self.fuzz_thread_count + if thread_count < 1 or self.max_fuzz_time <= 0: + raise self.skipTest("disabled by test mode") + + # buffer to hold errors thrown by threads + failed_lock = threading.Lock() + failed = [0] + + # launch threads, all of which run + # test_77_fuzz_input(), and see if any errors get thrown. + # if hash has concurrency issues, this should reveal it. + def wrapper(): + try: + self.test_77_fuzz_input(threaded=True) + except SkipTest: + pass + except: + with failed_lock: + failed[0] += 1 + raise + def launch(n): + cls = type(self) + name = "Fuzz-Thread-%d ('%s:%s.%s')" % (n, cls.__module__, cls.__name__, + self._testMethodName) + thread = threading.Thread(target=wrapper, name=name) + thread.setDaemon(True) + thread.start() + return thread + threads = [launch(n) for n in irange(thread_count)] + + # wait until all threads exit + timeout = self.max_fuzz_time * thread_count * 4 + stalled = 0 + for thread in threads: + thread.join(timeout) + if not thread.is_alive(): + continue + # XXX: not sure why this is happening, main one seems 1/4 times for sun_md5_crypt + log.error("%s timed out after %f seconds", thread.name, timeout) + stalled += 1 + + # if any thread threw an error, raise one ourselves. + if failed[0]: + raise self.fail("%d/%d threads failed concurrent fuzz testing " + "(see error log for details)" % (failed[0], thread_count)) + if stalled: + raise self.fail("%d/%d threads stalled during concurrent fuzz testing " + "(see error log for details)" % (stalled, thread_count)) + + #--------------------------------------------------------------- + # fuzz constants & helpers + #--------------------------------------------------------------- + + @property + def max_fuzz_time(self): + """amount of time to spend on fuzz testing""" + value = float(os.environ.get("PASSLIB_TEST_FUZZ_TIME") or 0) + if value: + return value + elif TEST_MODE(max="quick"): + return 0 + elif TEST_MODE(max="default"): + return 1 + else: + return 5 + + @property + def fuzz_thread_count(self): + """number of threads for threaded fuzz testing""" + value = int(os.environ.get("PASSLIB_TEST_FUZZ_THREADS") or 0) + if value: + return value + elif TEST_MODE(max="quick"): + return 0 + else: + return 10 + + #--------------------------------------------------------------- + # fuzz verifiers + #--------------------------------------------------------------- + + #: list of custom fuzz-test verifiers (in addition to hasher itself, + #: and backend-specific wrappers of hasher). each element is + #: name of method that will return None / a verifier callable. + fuzz_verifiers = ("fuzz_verifier_default",) + + def get_fuzz_verifiers(self, threaded=False): + """return list of password verifiers (including external libs) + + used by fuzz testing. + verifiers should be callable with signature + ``func(password: unicode, hash: ascii str) -> ok: bool``. + """ + handler = self.handler + verifiers = [] + + # call all methods starting with prefix in order to create + for method_name in self.fuzz_verifiers: + func = getattr(self, method_name)() + if func is not None: + verifiers.append(func) + + # create verifiers for any other available backends + # NOTE: skipping this under threading test, + # since backend switching isn't threadsafe (yet) + if hasattr(handler, "backends") and TEST_MODE("full") and not threaded: + def maker(backend): + def func(secret, hash): + orig_backend = handler.get_backend() + try: + handler.set_backend(backend) + return handler.verify(secret, hash) + finally: + handler.set_backend(orig_backend) + func.__name__ = "check_" + backend + "_backend" + func.__doc__ = backend + "-backend" + return func + for backend in iter_alt_backends(handler): + verifiers.append(maker(backend)) + + return verifiers + + def fuzz_verifier_default(self): + # test against self + def check_default(secret, hash, **ctx): + return self.do_verify(secret, hash, **ctx) + if self.backend: + check_default.__doc__ = self.backend + "-backend" + else: + check_default.__doc__ = "self" + return check_default + + #--------------------------------------------------------------- + # fuzz settings generation + #--------------------------------------------------------------- + class FuzzHashGenerator(object): + """ + helper which takes care of generating random + passwords & configuration options to test hash with. + separate from test class so we can create one per thread. + """ + #========================================================== + # class attrs + #========================================================== + + # alphabet for randomly generated passwords + password_alphabet = u('qwertyASDF1234<>.@*#! \u00E1\u0259\u0411\u2113') + + # encoding when testing bytes + password_encoding = "utf-8" + + # map of setting kwd -> method name. + # will ignore setting if method returns None. + # subclasses should make copy of dict. + settings_map = dict(rounds="random_rounds", + salt_size="random_salt_size", + ident="random_ident") + + # map of context kwd -> method name. + context_map = {} + + #========================================================== + # init / generation + #========================================================== + + def __init__(self, test, rng): + self.test = test + self.handler = test.handler + self.rng = rng + + def generate(self): + """ + generate random password and options for fuzz testing. + :returns: + `(secret, other_secret, settings_kwds, context_kwds)` + """ + def gendict(map): + out = {} + for key, meth in map.items(): + value = getattr(self, meth)() + if value is not None: + out[key] = value + return out + secret, other = self.random_password_pair() + return dict(secret=secret, + other=other, + settings=gendict(self.settings_map), + context=gendict(self.context_map), + ) + + #========================================================== + # helpers + #========================================================== + def randintgauss(self, lower, upper, mu, sigma): + """generate random int w/ gauss distirbution""" + value = self.rng.normalvariate(mu, sigma) + return int(limit(value, lower, upper)) + + #========================================================== + # settings generation + #========================================================== + + def random_rounds(self): + handler = self.handler + if not has_rounds_info(handler): + return None + default = handler.default_rounds or handler.min_rounds + lower = handler.min_rounds + if handler.rounds_cost == "log2": + upper = default + else: + upper = min(default*2, handler.max_rounds) + return self.randintgauss(lower, upper, default, default*.5) + + def random_salt_size(self): + handler = self.handler + if not (has_salt_info(handler) and 'salt_size' in handler.setting_kwds): + return None + default = handler.default_salt_size + lower = handler.min_salt_size + upper = handler.max_salt_size or default*4 + return self.randintgauss(lower, upper, default, default*.5) + + def random_ident(self): + rng = self.rng + handler = self.handler + if 'ident' not in handler.setting_kwds or not hasattr(handler, "ident_values"): + return None + if rng.random() < .5: + return None + # resolve wrappers before reading values + handler = getattr(handler, "wrapped", handler) + return rng.choice(handler.ident_values) + + #========================================================== + # fuzz password generation + #========================================================== + def random_password_pair(self): + """generate random password, and non-matching alternate password""" + secret = self.random_password() + while True: + other = self.random_password() + if self.accept_password_pair(secret, other): + break + rng = self.rng + if rng.randint(0,1): + secret = secret.encode(self.password_encoding) + if rng.randint(0,1): + other = other.encode(self.password_encoding) + return secret, other + + def random_password(self): + """generate random passwords for fuzz testing""" + # occasionally try an empty password + rng = self.rng + if rng.random() < .0001: + return u('') + + # check if truncate size needs to be considered + handler = self.handler + truncate_size = handler.truncate_error and handler.truncate_size + max_size = truncate_size or 999999 + + # pick endpoint + if max_size < 50 or rng.random() < .5: + # chance of small password (~15 chars) + size = self.randintgauss(1, min(max_size, 50), 15, 15) + else: + # otherwise large password (~70 chars) + size = self.randintgauss(50, min(max_size, 99), 70, 20) + + # generate random password + result = getrandstr(rng, self.password_alphabet, size) + + # trim ones that encode past truncate point. + if truncate_size and isinstance(result, unicode): + while len(result.encode("utf-8")) > truncate_size: + result = result[:-1] + + return result + + def accept_password_pair(self, secret, other): + """verify fuzz pair contains different passwords""" + return secret != other + + #========================================================== + # eoc FuzzGenerator + #========================================================== + + #=================================================================== + # "disabled hasher" api + #=================================================================== + + def test_disable_and_enable(self): + """.disable() / .enable() methods""" + # + # setup + # + handler = self.handler + if not handler.is_disabled: + self.assertFalse(hasattr(handler, "disable")) + self.assertFalse(hasattr(handler, "enable")) + self.assertFalse(self.disabled_contains_salt) + raise self.skipTest("not applicable") + + # + # disable() + # + + # w/o existing hash + disabled_default = handler.disable() + self.assertIsInstance(disabled_default, str, + msg="disable() must return native string") + self.assertTrue(handler.identify(disabled_default), + msg="identify() didn't recognize disable() result: %r" % (disabled_default)) + + # w/ existing hash + stub = self.getRandom().choice(self.known_other_hashes)[1] + disabled_stub = handler.disable(stub) + self.assertIsInstance(disabled_stub, str, + msg="disable() must return native string") + self.assertTrue(handler.identify(disabled_stub), + msg="identify() didn't recognize disable() result: %r" % (disabled_stub)) + + # + # enable() + # + + # w/o original hash + self.assertRaisesRegex(ValueError, "cannot restore original hash", + handler.enable, disabled_default) + + # w/ original hash + try: + result = handler.enable(disabled_stub) + error = None + except ValueError as e: + result = None + error = e + + if error is None: + # if supports recovery, should have returned stub (e.g. unix_disabled); + self.assertIsInstance(result, str, + msg="enable() must return native string") + self.assertEqual(result, stub) + else: + # if doesn't, should have thrown appropriate error + self.assertIsInstance(error, ValueError) + self.assertRegex("cannot restore original hash", str(error)) + + # + # test repeating disable() & salting state + # + + # repeating disabled + disabled_default2 = handler.disable() + if self.disabled_contains_salt: + # should return new salt for each call (e.g. django_disabled) + self.assertNotEqual(disabled_default2, disabled_default) + elif error is None: + # should return same result for each hash, but unique across hashes + self.assertEqual(disabled_default2, disabled_default) + + # repeating same hash ... + disabled_stub2 = handler.disable(stub) + if self.disabled_contains_salt: + # ... should return different string (if salted) + self.assertNotEqual(disabled_stub2, disabled_stub) + else: + # ... should return same string + self.assertEqual(disabled_stub2, disabled_stub) + + # using different hash ... + disabled_other = handler.disable(stub + 'xxx') + if self.disabled_contains_salt or error is None: + # ... should return different string (if salted or hash encoded) + self.assertNotEqual(disabled_other, disabled_stub) + else: + # ... should return same string + self.assertEqual(disabled_other, disabled_stub) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# HandlerCase mixins providing additional tests for certain hashes +#============================================================================= +class OsCryptMixin(HandlerCase): + """helper used by create_backend_case() which adds additional features + to test the os_crypt backend. + + * if crypt support is missing, inserts fake crypt support to simulate + a working safe_crypt, to test passlib's codepath as fully as possible. + + * extra tests to verify non-conformant crypt implementations are handled + correctly. + + * check that native crypt support is detected correctly for known platforms. + """ + #=================================================================== + # class attrs + #=================================================================== + + # platforms that are known to support / not support this hash natively. + # list of (platform_regex, True|False|None) entries. + platform_crypt_support = [] + + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + # force this backend + backend = "os_crypt" + + # flag read by HandlerCase to detect if fake os crypt is enabled. + using_patched_crypt = False + + #=================================================================== + # setup + #=================================================================== + def setUp(self): + assert self.backend == "os_crypt" + if not self.handler.has_backend("os_crypt"): + # XXX: currently, any tests that use this are skipped entirely! (see issue 120) + self._patch_safe_crypt() + super(OsCryptMixin, self).setUp() + + @classmethod + def _get_safe_crypt_handler_backend(cls): + """ + return (handler, backend) pair to use for faking crypt.crypt() support for hash. + backend will be None if none availabe. + """ + # find handler that generates safe_crypt() compatible hash + handler = unwrap_handler(cls.handler) + + # hack to prevent recursion issue when .has_backend() is called + handler.get_backend() + + # find backend which isn't os_crypt + alt_backend = get_alt_backend(handler, "os_crypt") + return handler, alt_backend + + @property + def has_os_crypt_fallback(self): + """ + test if there's a fallback handler to test against if os_crypt can't support + a specified secret (may be explicitly set to False for some subclasses) + """ + return self._get_safe_crypt_handler_backend()[0] is not None + + def _patch_safe_crypt(self): + """if crypt() doesn't support current hash alg, this patches + safe_crypt() so that it transparently uses another one of the handler's + backends, so that we can go ahead and test as much of code path + as possible. + """ + # find handler & backend + handler, alt_backend = self._get_safe_crypt_handler_backend() + if not alt_backend: + raise AssertionError("handler has no available alternate backends!") + + # create subclass of handler, which we swap to an alternate backend + alt_handler = handler.using() + alt_handler.set_backend(alt_backend) + + def crypt_stub(secret, hash): + hash = alt_handler.genhash(secret, hash) + assert isinstance(hash, str) + return hash + + import passlib.utils as mod + self.patchAttr(mod, "_crypt", crypt_stub) + self.using_patched_crypt = True + + @classmethod + def _get_skip_backend_reason(cls, backend): + """ + make sure os_crypt backend is tested + when it's known os_crypt will be faked by _patch_safe_crypt() + """ + assert backend == "os_crypt" + reason = super(OsCryptMixin, cls)._get_skip_backend_reason(backend) + + from passlib.utils import has_crypt + if reason == cls._BACKEND_NOT_AVAILABLE and has_crypt: + if TEST_MODE("full") and cls._get_safe_crypt_handler_backend()[1]: + # in this case, _patch_safe_crypt() will monkeypatch os_crypt + # to use another backend, just so we can test os_crypt fully. + return None + else: + return "hash not supported by os crypt()" + + return reason + + #=================================================================== + # custom tests + #=================================================================== + + # TODO: turn into decorator, and use mock library. + def _use_mock_crypt(self): + """ + patch passlib.utils.safe_crypt() so it returns mock value for duration of test. + returns function whose .return_value controls what's returned. + this defaults to None. + """ + import passlib.utils as mod + + def mock_crypt(secret, config): + # let 'test' string through so _load_os_crypt_backend() will still work + if secret == "test": + return mock_crypt.__wrapped__(secret, config) + else: + return mock_crypt.return_value + + mock_crypt.__wrapped__ = mod._crypt + mock_crypt.return_value = None + + self.patchAttr(mod, "_crypt", mock_crypt) + + return mock_crypt + + def test_80_faulty_crypt(self): + """test with faulty crypt()""" + hash = self.get_sample_hash()[1] + exc_types = (exc.InternalBackendError,) + mock_crypt = self._use_mock_crypt() + + def test(value): + # set safe_crypt() to return specified value, and + # make sure assertion error is raised by handler. + mock_crypt.return_value = value + self.assertRaises(exc_types, self.do_genhash, "stub", hash) + self.assertRaises(exc_types, self.do_encrypt, "stub") + self.assertRaises(exc_types, self.do_verify, "stub", hash) + + test('$x' + hash[2:]) # detect wrong prefix + test(hash[:-1]) # detect too short + test(hash + 'x') # detect too long + + def test_81_crypt_fallback(self): + """test per-call crypt() fallback""" + + # mock up safe_crypt to return None + mock_crypt = self._use_mock_crypt() + mock_crypt.return_value = None + + if self.has_os_crypt_fallback: + # handler should have a fallback to use when os_crypt backend refuses to handle secret. + h1 = self.do_encrypt("stub") + h2 = self.do_genhash("stub", h1) + self.assertEqual(h2, h1) + self.assertTrue(self.do_verify("stub", h1)) + else: + # handler should give up + from passlib.exc import InternalBackendError as err_type + hash = self.get_sample_hash()[1] + self.assertRaises(err_type, self.do_encrypt, 'stub') + self.assertRaises(err_type, self.do_genhash, 'stub', hash) + self.assertRaises(err_type, self.do_verify, 'stub', hash) + + @doesnt_require_backend + def test_82_crypt_support(self): + """ + test platform-specific crypt() support detection + + NOTE: this is mainly just a sanity check to ensure the runtime + detection is functioning correctly on some known platforms, + so that we can feel more confident it'll work right on unknown ones. + """ + + # skip wrapper handlers, won't ever have crypt support + if hasattr(self.handler, "orig_prefix"): + raise self.skipTest("not applicable to wrappers") + + # look for first entry that matches current system + # XXX: append "/" + platform.release() to string? + # XXX: probably should rework to support rows being dicts w/ "minver" / "maxver" keys, + # instead of hack where we add major # as part of platform regex. + using_backend = not self.using_patched_crypt + name = self.handler.name + platform = sys.platform + for pattern, expected in self.platform_crypt_support: + if re.match(pattern, platform): + break + else: + raise self.skipTest("no data for %r platform (current host support = %r)" % + (platform, using_backend)) + + # rules can use "state=None" to signal varied support; + # e.g. platform='freebsd8' ... sha256_crypt not added until 8.3 + if expected is None: + raise self.skipTest("varied support on %r platform (current host support = %r)" % + (platform, using_backend)) + + # compare expectation vs reality + if expected == using_backend: + pass + elif expected: + self.fail("expected %r platform would have native support for %r" % + (platform, name)) + else: + self.fail("did not expect %r platform would have native support for %r" % + (platform, name)) + + #=================================================================== + # fuzzy verified support -- add additional verifier that uses os crypt() + #=================================================================== + + def fuzz_verifier_crypt(self): + """test results against OS crypt()""" + + # don't use this if we're faking safe_crypt (pointless test), + # or if handler is a wrapper (only original handler will be supported by os) + handler = self.handler + if self.using_patched_crypt or hasattr(handler, "wrapped"): + return None + + # create a wrapper for fuzzy verified to use + from crypt import crypt + from passlib.utils import _safe_crypt_lock + encoding = self.FuzzHashGenerator.password_encoding + + def check_crypt(secret, hash): + """stdlib-crypt""" + if not self.crypt_supports_variant(hash): + return "skip" + # XXX: any reason not to use safe_crypt() here? or just want to test against bare metal? + secret = to_native_str(secret, encoding) + with _safe_crypt_lock: + return crypt(secret, hash) == hash + + return check_crypt + + def crypt_supports_variant(self, hash): + """ + fuzzy_verified_crypt() helper -- + used to determine if os crypt() supports a particular hash variant. + """ + return True + + #=================================================================== + # eoc + #=================================================================== + +class UserHandlerMixin(HandlerCase): + """helper for handlers w/ 'user' context kwd; mixin for HandlerCase + + this overrides the HandlerCase test harness methods + so that a username is automatically inserted to hash/verify + calls. as well, passing in a pair of strings as the password + will be interpreted as (secret,user) + """ + #=================================================================== + # option flags + #=================================================================== + default_user = "user" + requires_user = True + user_case_insensitive = False + + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + #=================================================================== + # custom tests + #=================================================================== + def test_80_user(self): + """test user context keyword""" + handler = self.handler + password = 'stub' + hash = handler.hash(password, user=self.default_user) + + if self.requires_user: + self.assertRaises(TypeError, handler.hash, password) + self.assertRaises(TypeError, handler.genhash, password, hash) + self.assertRaises(TypeError, handler.verify, password, hash) + else: + # e.g. cisco_pix works with or without one. + handler.hash(password) + handler.genhash(password, hash) + handler.verify(password, hash) + + def test_81_user_case(self): + """test user case sensitivity""" + lower = self.default_user.lower() + upper = lower.upper() + hash = self.do_encrypt('stub', context=dict(user=lower)) + if self.user_case_insensitive: + self.assertTrue(self.do_verify('stub', hash, user=upper), + "user should not be case sensitive") + else: + self.assertFalse(self.do_verify('stub', hash, user=upper), + "user should be case sensitive") + + def test_82_user_salt(self): + """test user used as salt""" + config = self.do_stub_encrypt() + h1 = self.do_genhash('stub', config, user='admin') + h2 = self.do_genhash('stub', config, user='admin') + self.assertEqual(h2, h1) + h3 = self.do_genhash('stub', config, user='root') + self.assertNotEqual(h3, h1) + + # TODO: user size? kinda dicey, depends on algorithm. + + #=================================================================== + # override test helpers + #=================================================================== + def populate_context(self, secret, kwds): + """insert username into kwds""" + if isinstance(secret, tuple): + secret, user = secret + elif not self.requires_user: + return secret + else: + user = self.default_user + if 'user' not in kwds: + kwds['user'] = user + return secret + + #=================================================================== + # modify fuzz testing + #=================================================================== + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + context_map = HandlerCase.FuzzHashGenerator.context_map.copy() + context_map.update(user="random_user") + + user_alphabet = u("asdQWE123") + + def random_user(self): + rng = self.rng + if not self.test.requires_user and rng.random() < .1: + return None + return getrandstr(rng, self.user_alphabet, rng.randint(2,10)) + + #=================================================================== + # eoc + #=================================================================== + +class EncodingHandlerMixin(HandlerCase): + """helper for handlers w/ 'encoding' context kwd; mixin for HandlerCase + + this overrides the HandlerCase test harness methods + so that an encoding can be inserted to hash/verify + calls by passing in a pair of strings as the password + will be interpreted as (secret,encoding) + """ + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + # restrict stock passwords & fuzz alphabet to latin-1, + # so different encodings can be tested safely. + stock_passwords = [ + u("test"), + b"test", + u("\u00AC\u00BA"), + ] + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + password_alphabet = u('qwerty1234<>.@*#! \u00AC') + + def populate_context(self, secret, kwds): + """insert encoding into kwds""" + if isinstance(secret, tuple): + secret, encoding = secret + kwds.setdefault('encoding', encoding) + return secret + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# warnings helpers +#============================================================================= +class reset_warnings(warnings.catch_warnings): + """catch_warnings() wrapper which clears warning registry & filters""" + + def __init__(self, reset_filter="always", reset_registry=".*", **kwds): + super(reset_warnings, self).__init__(**kwds) + self._reset_filter = reset_filter + self._reset_registry = re.compile(reset_registry) if reset_registry else None + + def __enter__(self): + # let parent class archive filter state + ret = super(reset_warnings, self).__enter__() + + # reset the filter to list everything + if self._reset_filter: + warnings.resetwarnings() + warnings.simplefilter(self._reset_filter) + + # archive and clear the __warningregistry__ key for all modules + # that match the 'reset' pattern. + pattern = self._reset_registry + if pattern: + backup = self._orig_registry = {} + for name, mod in list(sys.modules.items()): + if mod is None or not pattern.match(name): + continue + reg = getattr(mod, "__warningregistry__", None) + if reg: + backup[name] = reg.copy() + reg.clear() + return ret + + def __exit__(self, *exc_info): + # restore warning registry for all modules + pattern = self._reset_registry + if pattern: + # restore registry backup, clearing all registry entries that we didn't archive + backup = self._orig_registry + for name, mod in list(sys.modules.items()): + if mod is None or not pattern.match(name): + continue + reg = getattr(mod, "__warningregistry__", None) + if reg: + reg.clear() + orig = backup.get(name) + if orig: + if reg is None: + setattr(mod, "__warningregistry__", orig) + else: + reg.update(orig) + super(reset_warnings, self).__exit__(*exc_info) + +#============================================================================= +# eof +#============================================================================= diff --git a/venv/lib/python3.12/site-packages/passlib/totp.py b/venv/lib/python3.12/site-packages/passlib/totp.py new file mode 100644 index 0000000..9ad5000 --- /dev/null +++ b/venv/lib/python3.12/site-packages/passlib/totp.py @@ -0,0 +1,1908 @@ +"""passlib.totp -- TOTP / RFC6238 / Google Authenticator utilities.""" +#============================================================================= +# imports +#============================================================================= +from __future__ import absolute_import, division, print_function +from passlib.utils.compat import PY3 +# core +import base64 +import calendar +import json +import logging; log = logging.getLogger(__name__) +import math +import struct +import sys +import time as _time +import re +if PY3: + from urllib.parse import urlparse, parse_qsl, quote, unquote +else: + from urllib import quote, unquote + from urlparse import urlparse, parse_qsl +from warnings import warn +# site +try: + # TOTP encrypted keys only supported if cryptography (https://cryptography.io) is installed + from cryptography.hazmat.backends import default_backend as _cg_default_backend + import cryptography.hazmat.primitives.ciphers.algorithms + import cryptography.hazmat.primitives.ciphers.modes + from cryptography.hazmat.primitives import ciphers as _cg_ciphers + del cryptography +except ImportError: + log.debug("can't import 'cryptography' package, totp encryption disabled") + _cg_ciphers = _cg_default_backend = None +# pkg +from passlib import exc +from passlib.exc import TokenError, MalformedTokenError, InvalidTokenError, UsedTokenError +from passlib.utils import (to_unicode, to_bytes, consteq, + getrandbytes, rng, SequenceMixin, xor_bytes, getrandstr) +from passlib.utils.binary import BASE64_CHARS, b32encode, b32decode +from passlib.utils.compat import (u, unicode, native_string_types, bascii_to_str, int_types, num_types, + irange, byte_elem_value, UnicodeIO, suppress_cause) +from passlib.utils.decor import hybrid_method, memoized_property +from passlib.crypto.digest import lookup_hash, compile_hmac, pbkdf2_hmac +from passlib.hash import pbkdf2_sha256 +# local +__all__ = [ + # frontend classes + "AppWallet", + "TOTP", + + # errors (defined in passlib.exc, but exposed here for convenience) + "TokenError", + "MalformedTokenError", + "InvalidTokenError", + "UsedTokenError", + + # internal helper classes + "TotpToken", + "TotpMatch", +] + +#============================================================================= +# HACK: python < 2.7.4's urlparse() won't parse query strings unless the url scheme +# is one of the schemes in the urlparse.uses_query list. 2.7 abandoned +# this, and parses query if present, regardless of the scheme. +# as a workaround for older versions, we add "otpauth" to the known list. +# this was fixed by https://bugs.python.org/issue9374, in 2.7.4 release. +#============================================================================= +if sys.version_info < (2,7,4): + from urlparse import uses_query + if "otpauth" not in uses_query: + uses_query.append("otpauth") + log.debug("registered 'otpauth' scheme with urlparse.uses_query") + del uses_query + +#============================================================================= +# internal helpers +#============================================================================= + +#----------------------------------------------------------------------------- +# token parsing / rendering helpers +#----------------------------------------------------------------------------- + +#: regex used to clean whitespace from tokens & keys +_clean_re = re.compile(u(r"\s|[-=]"), re.U) + +_chunk_sizes = [4,6,5] + +def _get_group_size(klen): + """ + helper for group_string() -- + calculates optimal size of group for given string size. + """ + # look for exact divisor + for size in _chunk_sizes: + if not klen % size: + return size + # fallback to divisor with largest remainder + # (so chunks are as close to even as possible) + best = _chunk_sizes[0] + rem = 0 + for size in _chunk_sizes: + if klen % size > rem: + best = size + rem = klen % size + return best + +def group_string(value, sep="-"): + """ + reformat string into (roughly) evenly-sized groups, separated by **sep**. + useful for making tokens & keys easier to read by humans. + """ + klen = len(value) + size = _get_group_size(klen) + return sep.join(value[o:o+size] for o in irange(0, klen, size)) + +#----------------------------------------------------------------------------- +# encoding helpers +#----------------------------------------------------------------------------- + +def _decode_bytes(key, format): + """ + internal TOTP() helper -- + decodes key according to specified format. + """ + if format == "raw": + if not isinstance(key, bytes): + raise exc.ExpectedTypeError(key, "bytes", "key") + return key + # for encoded data, key must be either unicode or ascii-encoded bytes, + # and must contain a hex or base32 string. + key = to_unicode(key, param="key") + key = _clean_re.sub("", key).encode("utf-8") # strip whitespace & hypens + if format == "hex" or format == "base16": + return base64.b16decode(key.upper()) + elif format == "base32": + return b32decode(key) + # XXX: add base64 support? + else: + raise ValueError("unknown byte-encoding format: %r" % (format,)) + +#============================================================================= +# OTP management +#============================================================================= + +#: flag for detecting if encrypted totp support is present +AES_SUPPORT = bool(_cg_ciphers) + +#: regex for validating secret tags +_tag_re = re.compile("(?i)^[a-z0-9][a-z0-9_.-]*$") + +class AppWallet(object): + """ + This class stores application-wide secrets that can be used + to encrypt & decrypt TOTP keys for storage. + It's mostly an internal detail, applications usually just need + to pass ``secrets`` or ``secrets_path`` to :meth:`TOTP.using`. + + .. seealso:: + + :ref:`totp-storing-instances` for more details on this workflow. + + Arguments + ========= + :param secrets: + Dict of application secrets to use when encrypting/decrypting + stored TOTP keys. This should include a secret to use when encrypting + new keys, but may contain additional older secrets to decrypt + existing stored keys. + + The dict should map tags -> secrets, so that each secret is identified + by a unique tag. This tag will be stored along with the encrypted + key in order to determine which secret should be used for decryption. + Tag should be string that starts with regex range ``[a-z0-9]``, + and the remaining characters must be in ``[a-z0-9_.-]``. + + It is recommended to use something like a incremental counter + ("1", "2", ...), an ISO date ("2016-01-01", "2016-05-16", ...), + or a timestamp ("19803495", "19813495", ...) when assigning tags. + + This mapping be provided in three formats: + + * A python dict mapping tag -> secret + * A JSON-formatted string containing the dict + * A multiline string with the format ``"tag: value\\ntag: value\\n..."`` + + (This last format is mainly useful when loading from a text file via **secrets_path**) + + .. seealso:: :func:`generate_secret` to create a secret with sufficient entropy + + :param secrets_path: + Alternately, callers can specify a separate file where the + application-wide secrets are stored, using either of the string + formats described in **secrets**. + + :param default_tag: + Specifies which tag in **secrets** should be used as the default + for encrypting new keys. If omitted, the tags will be sorted, + and the largest tag used as the default. + + if all tags are numeric, they will be sorted numerically; + otherwise they will be sorted alphabetically. + this permits tags to be assigned numerically, + or e.g. using ``YYYY-MM-DD`` dates. + + :param encrypt_cost: + Optional time-cost factor for key encryption. + This value corresponds to log2() of the number of PBKDF2 + rounds used. + + .. warning:: + + The application secret(s) should be stored in a secure location by + your application, and each secret should contain a large amount + of entropy (to prevent brute-force attacks if the encrypted keys + are leaked). + + :func:`generate_secret` is provided as a convenience helper + to generate a new application secret of suitable size. + + Best practice is to load these values from a file via **secrets_path**, + and then have your application give up permission to read this file + once it's running. + + Public Methods + ============== + .. autoattribute:: has_secrets + .. autoattribute:: default_tag + + Semi-Private Methods + ==================== + The following methods are used internally by the :class:`TOTP` + class in order to encrypt & decrypt keys using the provided application + secrets. They will generally not be publically useful, and may have their + API changed periodically. + + .. automethod:: get_secret + .. automethod:: encrypt_key + .. automethod:: decrypt_key + """ + #======================================================================== + # instance attrs + #======================================================================== + + #: default salt size for encrypt_key() output + salt_size = 12 + + #: default cost (log2 of pbkdf2 rounds) for encrypt_key() output + #: NOTE: this is relatively low, since the majority of the security + #: relies on a high entropy secret to pass to AES. + encrypt_cost = 14 + + #: map of secret tag -> secret bytes + _secrets = None + + #: tag for default secret + default_tag = None + + #======================================================================== + # init + #======================================================================== + def __init__(self, secrets=None, default_tag=None, encrypt_cost=None, + secrets_path=None): + + # TODO: allow a lot more things to be customized from here, + # e.g. setting default TOTP constructor options. + + # + # init cost + # + if encrypt_cost is not None: + if isinstance(encrypt_cost, native_string_types): + encrypt_cost = int(encrypt_cost) + assert encrypt_cost >= 0 + self.encrypt_cost = encrypt_cost + + # + # init secrets map + # + + # load secrets from file (if needed) + if secrets_path is not None: + if secrets is not None: + raise TypeError("'secrets' and 'secrets_path' are mutually exclusive") + secrets = open(secrets_path, "rt").read() + + # parse & store secrets + secrets = self._secrets = self._parse_secrets(secrets) + + # + # init default tag/secret + # + if secrets: + if default_tag is not None: + # verify that tag is present in map + self.get_secret(default_tag) + elif all(tag.isdigit() for tag in secrets): + default_tag = max(secrets, key=int) + else: + default_tag = max(secrets) + self.default_tag = default_tag + + def _parse_secrets(self, source): + """ + parse 'secrets' parameter + + :returns: + Dict[tag:str, secret:bytes] + """ + # parse string formats + # to make this easy to pass in configuration from a separate file, + # 'secrets' can be string using two formats -- json & "tag:value\n" + check_type = True + if isinstance(source, native_string_types): + if source.lstrip().startswith(("[", "{")): + # json list / dict + source = json.loads(source) + elif "\n" in source and ":" in source: + # multiline string containing series of "tag: value\n" rows; + # empty and "#\n" rows are ignored + def iter_pairs(source): + for line in source.splitlines(): + line = line.strip() + if line and not line.startswith("#"): + tag, secret = line.split(":", 1) + yield tag.strip(), secret.strip() + source = iter_pairs(source) + check_type = False + else: + raise ValueError("unrecognized secrets string format") + + # ensure we have iterable of (tag, value) pairs + if source is None: + return {} + elif isinstance(source, dict): + source = source.items() + # XXX: could support iterable of (tag,value) pairs, but not yet needed... + # elif check_type and (isinstance(source, str) or not isinstance(source, Iterable)): + elif check_type: + raise TypeError("'secrets' must be mapping, or list of items") + + # parse into final dict, normalizing contents + return dict(self._parse_secret_pair(tag, value) + for tag, value in source) + + def _parse_secret_pair(self, tag, value): + if isinstance(tag, native_string_types): + pass + elif isinstance(tag, int): + tag = str(tag) + else: + raise TypeError("tag must be unicode/string: %r" % (tag,)) + if not _tag_re.match(tag): + raise ValueError("tag contains invalid characters: %r" % (tag,)) + if not isinstance(value, bytes): + value = to_bytes(value, param="secret %r" % (tag,)) + if not value: + raise ValueError("tag contains empty secret: %r" % (tag,)) + return tag, value + + #======================================================================== + # accessing secrets + #======================================================================== + + @property + def has_secrets(self): + """whether at least one application secret is present""" + return self.default_tag is not None + + def get_secret(self, tag): + """ + resolve a secret tag to the secret (as bytes). + throws a KeyError if not found. + """ + secrets = self._secrets + if not secrets: + raise KeyError("no application secrets configured") + try: + return secrets[tag] + except KeyError: + raise suppress_cause(KeyError("unknown secret tag: %r" % (tag,))) + + #======================================================================== + # encrypted key helpers -- used internally by TOTP + #======================================================================== + + @staticmethod + def _cipher_aes_key(value, secret, salt, cost, decrypt=False): + """ + Internal helper for :meth:`encrypt_key` -- + handles lowlevel encryption/decryption. + + Algorithm details: + + This function uses PBKDF2-HMAC-SHA256 to generate a 32-byte AES key + and a 16-byte IV from the application secret & random salt. + It then uses AES-256-CTR to encrypt/decrypt the TOTP key. + + CTR mode was chosen over CBC because the main attack scenario here + is that the attacker has stolen the database, and is trying to decrypt a TOTP key + (the plaintext value here). To make it hard for them, we want every password + to decrypt to a potentially valid key -- thus need to avoid any authentication + or padding oracle attacks. While some random padding construction could be devised + to make this work for CBC mode, a stream cipher mode is just plain simpler. + OFB/CFB modes would also work here, but seeing as they have malleability + and cyclic issues (though remote and barely relevant here), + CTR was picked as the best overall choice. + """ + # make sure backend AES support is available + if _cg_ciphers is None: + raise RuntimeError("TOTP encryption requires 'cryptography' package " + "(https://cryptography.io)") + + # use pbkdf2 to derive both key (32 bytes) & iv (16 bytes) + # NOTE: this requires 2 sha256 blocks to be calculated. + keyiv = pbkdf2_hmac("sha256", secret, salt=salt, rounds=(1 << cost), keylen=48) + + # use AES-256-CTR to encrypt/decrypt input value + cipher = _cg_ciphers.Cipher(_cg_ciphers.algorithms.AES(keyiv[:32]), + _cg_ciphers.modes.CTR(keyiv[32:]), + _cg_default_backend()) + ctx = cipher.decryptor() if decrypt else cipher.encryptor() + return ctx.update(value) + ctx.finalize() + + def encrypt_key(self, key): + """ + Helper used to encrypt TOTP keys for storage. + + :param key: + TOTP key to encrypt, as raw bytes. + + :returns: + dict containing encrypted TOTP key & configuration parameters. + this format should be treated as opaque, and potentially subject + to change, though it is designed to be easily serialized/deserialized + (e.g. via JSON). + + .. note:: + + This function requires installation of the external + `cryptography `_ package. + + To give some algorithm details: This function uses AES-256-CTR to encrypt + the provided data. It takes the application secret and randomly generated salt, + and uses PBKDF2-HMAC-SHA256 to combine them and generate the AES key & IV. + """ + if not key: + raise ValueError("no key provided") + salt = getrandbytes(rng, self.salt_size) + cost = self.encrypt_cost + tag = self.default_tag + if not tag: + raise TypeError("no application secrets configured, can't encrypt OTP key") + ckey = self._cipher_aes_key(key, self.get_secret(tag), salt, cost) + # XXX: switch to base64? + return dict(v=1, c=cost, t=tag, s=b32encode(salt), k=b32encode(ckey)) + + def decrypt_key(self, enckey): + """ + Helper used to decrypt TOTP keys from storage format. + Consults configured secrets to decrypt key. + + :param source: + source object, as returned by :meth:`encrypt_key`. + + :returns: + ``(key, needs_recrypt)`` -- + + **key** will be the decrypted key, as bytes. + + **needs_recrypt** will be a boolean flag indicating + whether encryption cost or default tag is too old, + and henace that key needs re-encrypting before storing. + + .. note:: + + This function requires installation of the external + `cryptography `_ package. + """ + if not isinstance(enckey, dict): + raise TypeError("'enckey' must be dictionary") + version = enckey.get("v", None) + needs_recrypt = False + if version == 1: + _cipher_key = self._cipher_aes_key + else: + raise ValueError("missing / unrecognized 'enckey' version: %r" % (version,)) + tag = enckey['t'] + cost = enckey['c'] + key = _cipher_key( + value=b32decode(enckey['k']), + secret=self.get_secret(tag), + salt=b32decode(enckey['s']), + cost=cost, + ) + if cost != self.encrypt_cost or tag != self.default_tag: + needs_recrypt = True + return key, needs_recrypt + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# TOTP class +#============================================================================= + +#: helper to convert HOTP counter to bytes +_pack_uint64 = struct.Struct(">Q").pack + +#: helper to extract value from HOTP digest +_unpack_uint32 = struct.Struct(">I").unpack + +#: dummy bytes used as temp key for .using() method +_DUMMY_KEY = b"\x00" * 16 + +class TOTP(object): + """ + Helper for generating and verifying TOTP codes. + + Given a secret key and set of configuration options, this object + offers methods for token generation, token validation, and serialization. + It can also be used to track important persistent TOTP state, + such as the last counter used. + + This class accepts the following options + (only **key** and **format** may be specified as positional arguments). + + :arg str key: + The secret key to use. By default, should be encoded as + a base32 string (see **format** for other encodings). + + Exactly one of **key** or ``new=True`` must be specified. + + :arg str format: + The encoding used by the **key** parameter. May be one of: + ``"base32"`` (base32-encoded string), + ``"hex"`` (hexadecimal string), or ``"raw"`` (raw bytes). + Defaults to ``"base32"``. + + :param bool new: + If ``True``, a new key will be generated using :class:`random.SystemRandom`. + + Exactly one ``new=True`` or **key** must be specified. + + :param str label: + Label to associate with this token when generating a URI. + Displayed to user by most OTP client applications (e.g. Google Authenticator), + and typically has format such as ``"John Smith"`` or ``"jsmith@webservice.example.org"``. + Defaults to ``None``. + See :meth:`to_uri` for details. + + :param str issuer: + String identifying the token issuer (e.g. the domain name of your service). + Used internally by some OTP client applications (e.g. Google Authenticator) to distinguish entries + which otherwise have the same label. + Optional but strongly recommended if you're rendering to a URI. + Defaults to ``None``. + See :meth:`to_uri` for details. + + :param int size: + Number of bytes when generating new keys. Defaults to size of hash algorithm (e.g. 20 for SHA1). + + .. warning:: + + Overriding the default values for ``digits``, ``period``, or ``alg`` may + cause problems with some OTP client programs (such as Google Authenticator), + which may have these defaults hardcoded. + + :param int digits: + The number of digits in the generated / accepted tokens. Defaults to ``6``. + Must be in range [6 .. 10]. + + .. rst-class:: inline-title + .. caution:: + Due to a limitation of the HOTP algorithm, the 10th digit can only take on values 0 .. 2, + and thus offers very little extra security. + + :param str alg: + Name of hash algorithm to use. Defaults to ``"sha1"``. + ``"sha256"`` and ``"sha512"`` are also accepted, per :rfc:`6238`. + + :param int period: + The time-step period to use, in integer seconds. Defaults to ``30``. + + .. + See the passlib documentation for a full list of attributes & methods. + """ + #============================================================================= + # class attrs + #============================================================================= + + #: minimum number of bytes to allow in key, enforced by passlib. + # XXX: see if spec says anything relevant to this. + _min_key_size = 10 + + #: minimum & current serialization version (may be set independently by subclasses) + min_json_version = json_version = 1 + + #: AppWallet that this class will use for encrypting/decrypting keys. + #: (can be overwritten via the :meth:`TOTP.using()` constructor) + wallet = None + + #: function to get system time in seconds, as needed by :meth:`generate` and :meth:`verify`. + #: defaults to :func:`time.time`, but can be overridden on a per-instance basis. + now = _time.time + + #============================================================================= + # instance attrs + #============================================================================= + + #--------------------------------------------------------------------------- + # configuration attrs + #--------------------------------------------------------------------------- + + #: [private] secret key as raw :class:`!bytes` + #: see .key property for public access. + _key = None + + #: [private] cached copy of encrypted secret, + #: so .to_json() doesn't have to re-encrypt on each call. + _encrypted_key = None + + #: [private] cached copy of keyed HMAC function, + #: so ._generate() doesn't have to rebuild this each time + #: ._find_match() invokes it. + _keyed_hmac = None + + #: number of digits in the generated tokens. + digits = 6 + + #: name of hash algorithm in use (e.g. ``"sha1"``) + alg = "sha1" + + #: default label for :meth:`to_uri` + label = None + + #: default issuer for :meth:`to_uri` + issuer = None + + #: number of seconds per counter step. + #: *(TOTP uses an internal time-derived counter which + #: increments by 1 every* :attr:`!period` *seconds)*. + period = 30 + + #--------------------------------------------------------------------------- + # state attrs + #--------------------------------------------------------------------------- + + #: Flag set by deserialization methods to indicate the object needs to be re-serialized. + #: This can be for a number of reasons -- encoded using deprecated format, + #: or encrypted using a deprecated key or too few rounds. + changed = False + + #============================================================================= + # prototype construction + #============================================================================= + @classmethod + def using(cls, digits=None, alg=None, period=None, + issuer=None, wallet=None, now=None, **kwds): + """ + Dynamically create subtype of :class:`!TOTP` class + which has the specified defaults set. + + :parameters: **digits, alg, period, issuer**: + + All these options are the same as in the :class:`TOTP` constructor, + and the resulting class will use any values you specify here + as the default for all TOTP instances it creates. + + :param wallet: + Optional :class:`AppWallet` that will be used for encrypting/decrypting keys. + + :param secrets, secrets_path, encrypt_cost: + + If specified, these options will be passed to the :class:`AppWallet` constructor, + allowing you to directly specify the secret keys that should be used + to encrypt & decrypt stored keys. + + :returns: + subclass of :class:`!TOTP`. + + This method is useful for creating a TOTP class configured + to use your application's secrets for encrypting & decrypting + keys, as well as create new keys using it's desired configuration defaults. + + As an example:: + + >>> # your application can create a custom class when it initializes + >>> from passlib.totp import TOTP, generate_secret + >>> TotpFactory = TOTP.using(secrets={"1": generate_secret()}) + + >>> # subsequent TOTP objects created from this factory + >>> # will use the specified secrets to encrypt their keys... + >>> totp = TotpFactory.new() + >>> totp.to_dict() + {'enckey': {'c': 14, + 'k': 'H77SYXWORDPGVOQTFRR2HFUB3C45XXI7', + 's': 'G5DOQPIHIBUM2OOHHADQ', + 't': '1', + 'v': 1}, + 'type': 'totp', + 'v': 1} + + .. seealso:: :ref:`totp-creation` and :ref:`totp-storing-instances` tutorials for a usage example + """ + # XXX: could add support for setting default match 'window' and 'reuse' policy + + # :param now: + # Optional callable that should return current time for generator to use. + # Default to :func:`time.time`. This optional is generally not needed, + # and is mainly present for examples & unit-testing. + + subcls = type("TOTP", (cls,), {}) + + def norm_param(attr, value): + """ + helper which uses constructor to validate parameter value. + it returns corresponding attribute, so we use normalized value. + """ + # NOTE: this creates *subclass* instance, + # so normalization takes into account any custom params + # already stored. + kwds = dict(key=_DUMMY_KEY, format="raw") + kwds[attr] = value + obj = subcls(**kwds) + return getattr(obj, attr) + + if digits is not None: + subcls.digits = norm_param("digits", digits) + + if alg is not None: + subcls.alg = norm_param("alg", alg) + + if period is not None: + subcls.period = norm_param("period", period) + + # XXX: add default size as configurable parameter? + + if issuer is not None: + subcls.issuer = norm_param("issuer", issuer) + + if kwds: + subcls.wallet = AppWallet(**kwds) + if wallet: + raise TypeError("'wallet' and 'secrets' keywords are mutually exclusive") + elif wallet is not None: + if not isinstance(wallet, AppWallet): + raise exc.ExpectedTypeError(wallet, AppWallet, "wallet") + subcls.wallet = wallet + + if now is not None: + assert isinstance(now(), num_types) and now() >= 0, \ + "now() function must return non-negative int/float" + subcls.now = staticmethod(now) + + return subcls + + #============================================================================= + # init + #============================================================================= + + @classmethod + def new(cls, **kwds): + """ + convenience alias for creating new TOTP key, same as ``TOTP(new=True)`` + """ + return cls(new=True, **kwds) + + def __init__(self, key=None, format="base32", + # keyword only... + new=False, digits=None, alg=None, size=None, period=None, + label=None, issuer=None, changed=False, + **kwds): + super(TOTP, self).__init__(**kwds) + if changed: + self.changed = changed + + # validate & normalize alg + info = lookup_hash(alg or self.alg) + self.alg = info.name + digest_size = info.digest_size + if digest_size < 4: + raise RuntimeError("%r hash digest too small" % alg) + + # parse or generate new key + if new: + # generate new key + if key: + raise TypeError("'key' and 'new=True' are mutually exclusive") + if size is None: + # default to digest size, per RFC 6238 Section 5.1 + size = digest_size + elif size > digest_size: + # not forbidden by spec, but would just be wasted bytes. + # maybe just warn about this? + raise ValueError("'size' should be less than digest size " + "(%d)" % digest_size) + self.key = getrandbytes(rng, size) + elif not key: + raise TypeError("must specify either an existing 'key', or 'new=True'") + elif format == "encrypted": + # NOTE: this handles decrypting & setting '.key' + self.encrypted_key = key + elif key: + # use existing key, encoded using specified + self.key = _decode_bytes(key, format) + + # enforce min key size + if len(self.key) < self._min_key_size: + # only making this fatal for new=True, + # so that existing (but ridiculously small) keys can still be used. + msg = "for security purposes, secret key must be >= %d bytes" % self._min_key_size + if new: + raise ValueError(msg) + else: + warn(msg, exc.PasslibSecurityWarning, stacklevel=1) + + # validate digits + if digits is None: + digits = self.digits + if not isinstance(digits, int_types): + raise TypeError("digits must be an integer, not a %r" % type(digits)) + if digits < 6 or digits > 10: + raise ValueError("digits must in range(6,11)") + self.digits = digits + + # validate label + if label: + self._check_label(label) + self.label = label + + # validate issuer + if issuer: + self._check_issuer(issuer) + self.issuer = issuer + + # init period + if period is not None: + self._check_serial(period, "period", minval=1) + self.period = period + + #============================================================================= + # helpers to verify value types & ranges + #============================================================================= + + @staticmethod + def _check_serial(value, param, minval=0): + """ + check that serial value (e.g. 'counter') is non-negative integer + """ + if not isinstance(value, int_types): + raise exc.ExpectedTypeError(value, "int", param) + if value < minval: + raise ValueError("%s must be >= %d" % (param, minval)) + + @staticmethod + def _check_label(label): + """ + check that label doesn't contain chars forbidden by KeyURI spec + """ + if label and ":" in label: + raise ValueError("label may not contain ':'") + + @staticmethod + def _check_issuer(issuer): + """ + check that issuer doesn't contain chars forbidden by KeyURI spec + """ + if issuer and ":" in issuer: + raise ValueError("issuer may not contain ':'") + + #============================================================================= + # key attributes + #============================================================================= + + #------------------------------------------------------------------ + # raw key + #------------------------------------------------------------------ + @property + def key(self): + """ + secret key as raw bytes + """ + return self._key + + @key.setter + def key(self, value): + # set key + if not isinstance(value, bytes): + raise exc.ExpectedTypeError(value, bytes, "key") + self._key = value + + # clear cached properties derived from key + self._encrypted_key = self._keyed_hmac = None + + #------------------------------------------------------------------ + # encrypted key + #------------------------------------------------------------------ + @property + def encrypted_key(self): + """ + secret key, encrypted using application secret. + this match the output of :meth:`AppWallet.encrypt_key`, + and should be treated as an opaque json serializable object. + """ + enckey = self._encrypted_key + if enckey is None: + wallet = self.wallet + if not wallet: + raise TypeError("no application secrets present, can't encrypt TOTP key") + enckey = self._encrypted_key = wallet.encrypt_key(self.key) + return enckey + + @encrypted_key.setter + def encrypted_key(self, value): + wallet = self.wallet + if not wallet: + raise TypeError("no application secrets present, can't decrypt TOTP key") + self.key, needs_recrypt = wallet.decrypt_key(value) + if needs_recrypt: + # mark as changed so it gets re-encrypted & written to db + self.changed = True + else: + # cache encrypted key for re-use + self._encrypted_key = value + + #------------------------------------------------------------------ + # pretty-printed / encoded key helpers + #------------------------------------------------------------------ + + @property + def hex_key(self): + """ + secret key encoded as hexadecimal string + """ + return bascii_to_str(base64.b16encode(self.key)).lower() + + @property + def base32_key(self): + """ + secret key encoded as base32 string + """ + return b32encode(self.key) + + def pretty_key(self, format="base32", sep="-"): + """ + pretty-print the secret key. + + This is mainly useful for situations where the user cannot get the qrcode to work, + and must enter the key manually into their TOTP client. It tries to format + the key in a manner that is easier for humans to read. + + :param format: + format to output secret key. ``"hex"`` and ``"base32"`` are both accepted. + + :param sep: + separator to insert to break up key visually. + can be any of ``"-"`` (the default), ``" "``, or ``False`` (no separator). + + :return: + key as native string. + + Usage example:: + + >>> t = TOTP('s3jdvb7qd2r7jpxx') + >>> t.pretty_key() + 'S3JD-VB7Q-D2R7-JPXX' + """ + if format == "hex" or format == "base16": + key = self.hex_key + elif format == "base32": + key = self.base32_key + else: + raise ValueError("unknown byte-encoding format: %r" % (format,)) + if sep: + key = group_string(key, sep) + return key + + #============================================================================= + # time & token parsing + #============================================================================= + + @classmethod + def normalize_time(cls, time): + """ + Normalize time value to unix epoch seconds. + + :arg time: + Can be ``None``, :class:`!datetime`, + or unix epoch timestamp as :class:`!float` or :class:`!int`. + If ``None``, uses current system time. + Naive datetimes are treated as UTC. + + :returns: + unix epoch timestamp as :class:`int`. + """ + if isinstance(time, int_types): + return time + elif isinstance(time, float): + return int(time) + elif time is None: + return int(cls.now()) + elif hasattr(time, "utctimetuple"): + # coerce datetime to UTC timestamp + # NOTE: utctimetuple() assumes naive datetimes are in UTC + # NOTE: we explicitly *don't* want microseconds. + return calendar.timegm(time.utctimetuple()) + else: + raise exc.ExpectedTypeError(time, "int, float, or datetime", "time") + + def _time_to_counter(self, time): + """ + convert timestamp to HOTP counter using :attr:`period`. + """ + return time // self.period + + def _counter_to_time(self, counter): + """ + convert HOTP counter to timestamp using :attr:`period`. + """ + return counter * self.period + + @hybrid_method + def normalize_token(self_or_cls, token): + """ + Normalize OTP token representation: + strips whitespace, converts integers to a zero-padded string, + validates token content & number of digits. + + This is a hybrid method -- it can be called at the class level, + as ``TOTP.normalize_token()``, or the instance level as ``TOTP().normalize_token()``. + It will normalize to the instance-specific number of :attr:`~TOTP.digits`, + or use the class default. + + :arg token: + token as ascii bytes, unicode, or an integer. + + :raises ValueError: + if token has wrong number of digits, or contains non-numeric characters. + + :returns: + token as :class:`!unicode` string, containing only digits 0-9. + """ + digits = self_or_cls.digits + if isinstance(token, int_types): + token = u("%0*d") % (digits, token) + else: + token = to_unicode(token, param="token") + token = _clean_re.sub(u(""), token) + if not token.isdigit(): + raise MalformedTokenError("Token must contain only the digits 0-9") + if len(token) != digits: + raise MalformedTokenError("Token must have exactly %d digits" % digits) + return token + + #============================================================================= + # token generation + #============================================================================= + +# # debug helper +# def generate_range(self, size, time=None): +# counter = self._time_to_counter(time) - (size + 1) // 2 +# end = counter + size +# while counter <= end: +# token = self._generate(counter) +# yield TotpToken(self, token, counter) +# counter += 1 + + def generate(self, time=None): + """ + Generate token for specified time + (uses current time if none specified). + + :arg time: + Can be ``None``, a :class:`!datetime`, + or class:`!float` / :class:`!int` unix epoch timestamp. + If ``None`` (the default), uses current system time. + Naive datetimes are treated as UTC. + + :returns: + + A :class:`TotpToken` instance, which can be treated + as a sequence of ``(token, expire_time)`` -- see that class + for more details. + + Usage example:: + + >>> # generate a new token, wrapped in a TotpToken instance... + >>> otp = TOTP('s3jdvb7qd2r7jpxx') + >>> otp.generate(1419622739) + + + >>> # when you just need the token... + >>> otp.generate(1419622739).token + '897212' + """ + time = self.normalize_time(time) + counter = self._time_to_counter(time) + if counter < 0: + raise ValueError("timestamp must be >= 0") + token = self._generate(counter) + return TotpToken(self, token, counter) + + def _generate(self, counter): + """ + base implementation of HOTP token generation algorithm. + + :arg counter: HOTP counter, as non-negative integer + :returns: token as unicode string + """ + # generate digest + assert isinstance(counter, int_types), "counter must be integer" + assert counter >= 0, "counter must be non-negative" + keyed_hmac = self._keyed_hmac + if keyed_hmac is None: + keyed_hmac = self._keyed_hmac = compile_hmac(self.alg, self.key) + digest = keyed_hmac(_pack_uint64(counter)) + digest_size = keyed_hmac.digest_info.digest_size + assert len(digest) == digest_size, "digest_size: sanity check failed" + + # derive 31-bit token value + assert digest_size >= 20, "digest_size: sanity check 2 failed" # otherwise 0xF+4 will run off end of hash. + offset = byte_elem_value(digest[-1]) & 0xF + value = _unpack_uint32(digest[offset:offset+4])[0] & 0x7fffffff + + # render to decimal string, return last chars + # NOTE: the 10'th digit is not as secure, as it can only take on values 0-2, not 0-9, + # due to 31-bit mask on int ">I". But some servers / clients use it :| + # if 31-bit mask removed (which breaks spec), would only get values 0-4. + digits = self.digits + assert 0 < digits < 11, "digits: sanity check failed" + return (u("%0*d") % (digits, value))[-digits:] + + #============================================================================= + # token verification + #============================================================================= + + @classmethod + def verify(cls, token, source, **kwds): + r""" + Convenience wrapper around :meth:`TOTP.from_source` and :meth:`TOTP.match`. + + This parses a TOTP key & configuration from the specified source, + and tries and match the token. + It's designed to parallel the :meth:`passlib.ifc.PasswordHash.verify` method. + + :param token: + Token string to match. + + :param source: + Serialized TOTP key. + Can be anything accepted by :meth:`TOTP.from_source`. + + :param \\*\\*kwds: + All additional keywords passed to :meth:`TOTP.match`. + + :return: + A :class:`TotpMatch` instance, or raises a :exc:`TokenError`. + """ + return cls.from_source(source).match(token, **kwds) + + def match(self, token, time=None, window=30, skew=0, last_counter=None): + """ + Match TOTP token against specified timestamp. + Searches within a window before & after the provided time, + in order to account for transmission delay and small amounts of skew in the client's clock. + + :arg token: + Token to validate. + may be integer or string (whitespace and hyphens are ignored). + + :param time: + Unix epoch timestamp, can be any of :class:`!float`, :class:`!int`, or :class:`!datetime`. + if ``None`` (the default), uses current system time. + *this should correspond to the time the token was received from the client*. + + :param int window: + How far backward and forward in time to search for a match. + Measured in seconds. Defaults to ``30``. Typically only useful if set + to multiples of :attr:`period`. + + :param int skew: + Adjust timestamp by specified value, to account for excessive + client clock skew. Measured in seconds. Defaults to ``0``. + + Negative skew (the common case) indicates transmission delay, + and/or that the client clock is running behind the server. + + Positive skew indicates the client clock is running ahead of the server + (and by enough that it cancels out any negative skew added by + the transmission delay). + + You should ensure the server clock uses a reliable time source such as NTP, + so that only the client clock's inaccuracy needs to be accounted for. + + This is an advanced parameter that should usually be left at ``0``; + The **window** parameter is usually enough to account + for any observed transmission delay. + + :param last_counter: + Optional value of last counter value that was successfully used. + If specified, verify will never search earlier counters, + no matter how large the window is. + + Useful when client has previously authenticated, + and thus should never provide a token older than previously + verified value. + + :raises ~passlib.exc.TokenError: + + If the token is malformed, fails to match, or has already been used. + + :returns TotpMatch: + + Returns a :class:`TotpMatch` instance on successful match. + Can be treated as tuple of ``(counter, time)``. + Raises error if token is malformed / can't be verified. + + Usage example:: + + >>> totp = TOTP('s3jdvb7qd2r7jpxx') + + >>> # valid token for this time period + >>> totp.match('897212', 1419622729) + + + >>> # token from counter step 30 sec ago (within allowed window) + >>> totp.match('000492', 1419622729) + + + >>> # invalid token -- token from 60 sec ago (outside of window) + >>> totp.match('760389', 1419622729) + Traceback: + ... + InvalidTokenError: Token did not match + """ + time = self.normalize_time(time) + self._check_serial(window, "window") + + client_time = time + skew + if last_counter is None: + last_counter = -1 + start = max(last_counter, self._time_to_counter(client_time - window)) + end = self._time_to_counter(client_time + window) + 1 + # XXX: could pass 'expected = _time_to_counter(client_time + TRANSMISSION_DELAY)' + # to the _find_match() method, would help if window set to very large value. + + counter = self._find_match(token, start, end) + assert counter >= last_counter, "sanity check failed: counter went backward" + + if counter == last_counter: + raise UsedTokenError(expire_time=(last_counter + 1) * self.period) + + # NOTE: By returning match tied to

    `` or ```` + if completion_type: + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1] and option[0][:2] == "--": + opt_label += "=" + print(opt_label) + + # Complete sub-commands (unless one is already given). + if not any(name in cwords for name in subcommand.handler_map()): + for handler_name in subcommand.handler_map(): + if handler_name.startswith(current): + print(handler_name) + else: + # show main parser options only when necessary + + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + flattened_opts = chain.from_iterable(opts) + if current.startswith("-"): + for opt in flattened_opts: + if opt.help != optparse.SUPPRESS_HELP: + subcommands += opt._long_opts + opt._short_opts + else: + # get completion type given cwords and all available options + completion_type = get_path_completion_type(cwords, cword, flattened_opts) + if completion_type: + subcommands = list(auto_complete_paths(current, completion_type)) + + print(" ".join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def get_path_completion_type( + cwords: list[str], cword: int, opts: Iterable[Any] +) -> str | None: + """Get the type of path completion (``file``, ``dir``, ``path`` or None) + + :param cwords: same as the environmental variable ``COMP_WORDS`` + :param cword: same as the environmental variable ``COMP_CWORD`` + :param opts: The available options to check + :return: path completion type (``file``, ``dir``, ``path`` or None) + """ + if cword < 2 or not cwords[cword - 2].startswith("-"): + return None + for opt in opts: + if opt.help == optparse.SUPPRESS_HELP: + continue + for o in str(opt).split("/"): + if cwords[cword - 2].split("=")[0] == o: + if not opt.metavar or any( + x in ("path", "file", "dir") for x in opt.metavar.split("/") + ): + return opt.metavar + return None + + +def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]: + """If ``completion_type`` is ``file`` or ``path``, list all regular files + and directories starting with ``current``; otherwise only list directories + starting with ``current``. + + :param current: The word to be completed + :param completion_type: path completion type(``file``, ``path`` or ``dir``) + :return: A generator of regular files and/or directories + """ + directory, filename = os.path.split(current) + current_path = os.path.abspath(directory) + # Don't complete paths if they can't be accessed + if not os.access(current_path, os.R_OK): + return + filename = os.path.normcase(filename) + # list all files that start with ``filename`` + file_list = ( + x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename) + ) + for f in file_list: + opt = os.path.join(current_path, f) + comp_file = os.path.normcase(os.path.join(directory, f)) + # complete regular files when there is not ```` after option + # complete directories when there is ````, ```` or + # ````after option + if completion_type != "dir" and os.path.isfile(opt): + yield comp_file + elif os.path.isdir(opt): + yield os.path.join(comp_file, "") diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py new file mode 100644 index 0000000..7acc29c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/base_command.py @@ -0,0 +1,244 @@ +"""Base Command class, and related routines""" + +from __future__ import annotations + +import logging +import logging.config +import optparse +import os +import sys +import traceback +from optparse import Values +from typing import Callable + +from pip._vendor.rich import reconfigure +from pip._vendor.rich import traceback as rich_traceback + +from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.cli.status_codes import ( + ERROR, + PREVIOUS_BUILD_DIR_ERROR, + UNKNOWN_ERROR, + VIRTUALENV_NOT_FOUND, +) +from pip._internal.exceptions import ( + BadCommand, + CommandError, + DiagnosticPipError, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, +) +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry +from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = ["Command"] + +logger = logging.getLogger(__name__) + + +class Command(CommandContextMixIn): + usage: str = "" + ignore_require_venv: bool = False + + def __init__(self, name: str, summary: str, isolated: bool = False) -> None: + super().__init__() + + self.name = name + self.summary = summary + self.parser = ConfigOptionParser( + usage=self.usage, + prog=f"{get_prog()} {name}", + formatter=UpdatingDefaultsHelpFormatter(), + add_help_option=False, + name=name, + description=self.__doc__, + isolated=isolated, + ) + + self.tempdir_registry: TempDirRegistry | None = None + + # Commands should add options to this option group + optgroup_name = f"{self.name.capitalize()} Options" + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + self.add_options() + + def add_options(self) -> None: + pass + + def handle_pip_version_check(self, options: Values) -> None: + """ + This is a no-op so that commands by default do not do the pip version + check. + """ + # Make sure we do the pip version check if the index_group options + # are present. + assert not hasattr(options, "no_index") + + def run(self, options: Values, args: list[str]) -> int: + raise NotImplementedError + + def _run_wrapper(self, level_number: int, options: Values, args: list[str]) -> int: + def _inner_run() -> int: + try: + return self.run(options, args) + finally: + self.handle_pip_version_check(options) + + if options.debug_mode: + rich_traceback.install(show_locals=True) + return _inner_run() + + try: + status = _inner_run() + assert isinstance(status, int) + return status + except DiagnosticPipError as exc: + logger.error("%s", exc, extra={"rich": True}) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except ( + InstallationError, + BadCommand, + NetworkConnectionError, + ) as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical("%s", exc) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BrokenStdoutLoggingError: + # Bypass our logger and write any remaining messages to + # stderr because stdout no longer works. + print("ERROR: Pipe to stdout was broken", file=sys.stderr) + if level_number <= logging.DEBUG: + traceback.print_exc(file=sys.stderr) + + return ERROR + except KeyboardInterrupt: + logger.critical("Operation cancelled by user") + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BaseException: + logger.critical("Exception:", exc_info=True) + + return UNKNOWN_ERROR + + def parse_args(self, args: list[str]) -> tuple[Values, list[str]]: + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args: list[str]) -> int: + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args: list[str]) -> int: + # We must initialize this before the tempdir manager, otherwise the + # configuration would not be accessible by the time we clean up the + # tempdir manager. + self.tempdir_registry = self.enter_context(tempdir_registry()) + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + + options, args = self.parse_args(args) + + # Set verbosity so that it can be used elsewhere. + self.verbosity = options.verbose - options.quiet + if options.debug_mode: + self.verbosity = 2 + + if hasattr(options, "progress_bar") and options.progress_bar == "auto": + options.progress_bar = "on" if self.verbosity >= 0 else "off" + + reconfigure(no_color=options.no_color) + level_number = setup_logging( + verbosity=self.verbosity, + no_color=options.no_color, + user_log_file=options.log, + ) + + always_enabled_features = set(options.features_enabled) & set( + cmdoptions.ALWAYS_ENABLED_FEATURES + ) + if always_enabled_features: + logger.warning( + "The following features are always enabled: %s. ", + ", ".join(sorted(always_enabled_features)), + ) + + # Make sure that the --python argument isn't specified after the + # subcommand. We can tell, because if --python was specified, + # we should only reach this point if we're running in the created + # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment + # variable set. + if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: + logger.critical( + "The --python option must be placed before the pip subcommand name" + ) + sys.exit(ERROR) + + # TODO: Try to get these passing down from the command? + # without resorting to os.environ to hold these. + # This also affects isolated builds and it should. + + if options.no_input: + os.environ["PIP_NO_INPUT"] = "1" + + if options.exists_action: + os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action) + + if options.require_venv and not self.ignore_require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical("Could not find an activated virtualenv (required).") + sys.exit(VIRTUALENV_NOT_FOUND) + + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you should " + "use sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + return self._run_wrapper(level_number, options, args) + + def handler_map(self) -> dict[str, Callable[[Values, list[str]], None]]: + """ + map of names to handler actions for commands with sub-actions + """ + return {} diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py new file mode 100644 index 0000000..a473775 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/cmdoptions.py @@ -0,0 +1,1110 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +from __future__ import annotations + +import logging +import os +import pathlib +import textwrap +from functools import partial +from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values +from textwrap import dedent +from typing import Any, Callable + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.parser import ConfigOptionParser +from pip._internal.exceptions import CommandError +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix +from pip._internal.models.format_control import FormatControl +from pip._internal.models.index import PyPI +from pip._internal.models.target_python import TargetPython +from pip._internal.utils.hashes import STRONG_HASHES +from pip._internal.utils.misc import strtobool + +logger = logging.getLogger(__name__) + + +def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None: + """ + Raise an option parsing error using parser.error(). + + Args: + parser: an OptionParser instance. + option: an Option instance. + msg: the error text. + """ + msg = f"{option} error: {msg}" + msg = textwrap.fill(" ".join(msg.split())) + parser.error(msg) + + +def make_option_group(group: dict[str, Any], parser: ConfigOptionParser) -> OptionGroup: + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group["name"]) + for option in group["options"]: + option_group.add_option(option()) + return option_group + + +def check_dist_restriction(options: Values, check_target: bool = False) -> None: + """Function for determining if custom platform options are allowed. + + :param options: The OptionParser options. + :param check_target: Whether or not to check if --target is being used. + """ + dist_restriction_set = any( + [ + options.python_version, + options.platforms, + options.abis, + options.implementation, + ] + ) + + binary_only = FormatControl(set(), {":all:"}) + sdist_dependencies_allowed = ( + options.format_control != binary_only and not options.ignore_dependencies + ) + + # Installations or downloads using dist restrictions must not combine + # source distributions and dist-specific wheels, as they are not + # guaranteed to be locally compatible. + if dist_restriction_set and sdist_dependencies_allowed: + raise CommandError( + "When restricting platform and interpreter constraints using " + "--python-version, --platform, --abi, or --implementation, " + "either --no-deps must be set, or --only-binary=:all: must be " + "set and --no-binary must not be set (or must be set to " + ":none:)." + ) + + if check_target: + if not options.dry_run and dist_restriction_set and not options.target_dir: + raise CommandError( + "Can not use any platform or abi specific options unless " + "installing via '--target' or using '--dry-run'" + ) + + +def check_build_constraints(options: Values) -> None: + """Function for validating build constraints options. + + :param options: The OptionParser options. + """ + if hasattr(options, "build_constraints") and options.build_constraints: + if not options.build_isolation: + raise CommandError( + "--build-constraint cannot be used with --no-build-isolation." + ) + + # Import here to avoid circular imports + from pip._internal.network.session import PipSession + from pip._internal.req.req_file import get_file_content + + # Eagerly check build constraints file contents + # is valid so that we don't fail in when trying + # to check constraints in isolated build process + with PipSession() as session: + for constraint_file in options.build_constraints: + get_file_content(constraint_file, session) + + +def _path_option_check(option: Option, opt: str, value: str) -> str: + return os.path.expanduser(value) + + +def _package_name_option_check(option: Option, opt: str, value: str) -> str: + return canonicalize_name(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path", "package_name") + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["package_name"] = _package_name_option_check + TYPE_CHECKER["path"] = _path_option_check + + +########### +# options # +########### + +help_: Callable[..., Option] = partial( + Option, + "-h", + "--help", + dest="help", + action="help", + help="Show help.", +) + +debug_mode: Callable[..., Option] = partial( + Option, + "--debug", + dest="debug_mode", + action="store_true", + default=False, + help=( + "Let unhandled exceptions propagate outside the main subroutine, " + "instead of logging them to stderr." + ), +) + +isolated_mode: Callable[..., Option] = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) + +require_virtualenv: Callable[..., Option] = partial( + Option, + "--require-virtualenv", + "--require-venv", + dest="require_venv", + action="store_true", + default=False, + help=( + "Allow pip to only run in a virtual environment; exit with an error otherwise." + ), +) + +override_externally_managed: Callable[..., Option] = partial( + Option, + "--break-system-packages", + dest="override_externally_managed", + action="store_true", + help="Allow pip to modify an EXTERNALLY-MANAGED Python installation", +) + +python: Callable[..., Option] = partial( + Option, + "--python", + dest="python", + help="Run pip with the specified Python interpreter.", +) + +verbose: Callable[..., Option] = partial( + Option, + "-v", + "--verbose", + dest="verbose", + action="count", + default=0, + help="Give more output. Option is additive, and can be used up to 3 times.", +) + +no_color: Callable[..., Option] = partial( + Option, + "--no-color", + dest="no_color", + action="store_true", + default=False, + help="Suppress colored output.", +) + +version: Callable[..., Option] = partial( + Option, + "-V", + "--version", + dest="version", + action="store_true", + help="Show version and exit.", +) + +quiet: Callable[..., Option] = partial( + Option, + "-q", + "--quiet", + dest="quiet", + action="count", + default=0, + help=( + "Give less output. Option is additive, and can be used up to 3" + " times (corresponding to WARNING, ERROR, and CRITICAL logging" + " levels)." + ), +) + +progress_bar: Callable[..., Option] = partial( + Option, + "--progress-bar", + dest="progress_bar", + type="choice", + choices=["auto", "on", "off", "raw"], + default="auto", + help=( + "Specify whether the progress bar should be used. In 'auto'" + " mode, --quiet will suppress all progress bars." + " [auto, on, off, raw] (default: auto)" + ), +) + +log: Callable[..., Option] = partial( + PipOption, + "--log", + "--log-file", + "--local-log", + dest="log", + metavar="path", + type="path", + help="Path to a verbose appending log.", +) + +no_input: Callable[..., Option] = partial( + Option, + # Don't ask for input + "--no-input", + dest="no_input", + action="store_true", + default=False, + help="Disable prompting for input.", +) + +keyring_provider: Callable[..., Option] = partial( + Option, + "--keyring-provider", + dest="keyring_provider", + choices=["auto", "disabled", "import", "subprocess"], + default="auto", + help=( + "Enable the credential lookup via the keyring library if user input is allowed." + " Specify which mechanism to use [auto, disabled, import, subprocess]." + " (default: %default)" + ), +) + +proxy: Callable[..., Option] = partial( + Option, + "--proxy", + dest="proxy", + type="str", + default="", + help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.", +) + +retries: Callable[..., Option] = partial( + Option, + "--retries", + dest="retries", + type="int", + default=5, + help="Maximum attempts to establish a new HTTP connection. (default: %default)", +) + +resume_retries: Callable[..., Option] = partial( + Option, + "--resume-retries", + dest="resume_retries", + type="int", + default=5, + help="Maximum attempts to resume or restart an incomplete download. " + "(default: %default)", +) + +timeout: Callable[..., Option] = partial( + Option, + "--timeout", + "--default-timeout", + metavar="sec", + dest="timeout", + type="float", + default=15, + help="Set the socket timeout (default %default seconds).", +) + + +def exists_action() -> Option: + return Option( + # Option when path already exist + "--exists-action", + dest="exists_action", + type="choice", + choices=["s", "i", "w", "b", "a"], + default=[], + action="append", + metavar="action", + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", + ) + + +cert: Callable[..., Option] = partial( + PipOption, + "--cert", + dest="cert", + type="path", + metavar="path", + help=( + "Path to PEM-encoded CA certificate bundle. " + "If provided, overrides the default. " + "See 'SSL Certificate Verification' in pip documentation " + "for more information." + ), +) + +client_cert: Callable[..., Option] = partial( + PipOption, + "--client-cert", + dest="client_cert", + type="path", + default=None, + metavar="path", + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.", +) + +index_url: Callable[..., Option] = partial( + Option, + "-i", + "--index-url", + "--pypi-url", + dest="index_url", + metavar="URL", + default=PyPI.simple_url, + help="Base URL of the Python Package Index (default %default). " + "This should point to a repository compliant with PEP 503 " + "(the simple repository API) or a local directory laid out " + "in the same format.", +) + + +def extra_index_url() -> Option: + return Option( + "--extra-index-url", + dest="extra_index_urls", + metavar="URL", + action="append", + default=[], + help="Extra URLs of package indexes to use in addition to " + "--index-url. Should follow the same rules as " + "--index-url.", + ) + + +no_index: Callable[..., Option] = partial( + Option, + "--no-index", + dest="no_index", + action="store_true", + default=False, + help="Ignore package index (only looking at --find-links URLs instead).", +) + + +def find_links() -> Option: + return Option( + "-f", + "--find-links", + dest="find_links", + action="append", + default=[], + metavar="url", + help="If a URL or path to an html file, then parse for links to " + "archives such as sdist (.tar.gz) or wheel (.whl) files. " + "If a local path or file:// URL that's a directory, " + "then look for archives in the directory listing. " + "Links to VCS project URLs are not supported.", + ) + + +def trusted_host() -> Option: + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host or host:port pair as trusted, even though it " + "does not have valid or any HTTPS.", + ) + + +def constraints() -> Option: + return Option( + "-c", + "--constraint", + dest="constraints", + action="append", + default=[], + metavar="file", + help="Constrain versions using the given constraints file. " + "This option can be used multiple times.", + ) + + +def build_constraints() -> Option: + return Option( + "--build-constraint", + dest="build_constraints", + action="append", + type="str", + default=[], + metavar="file", + help=( + "Constrain build dependencies using the given constraints file. " + "This option can be used multiple times." + ), + ) + + +def requirements() -> Option: + return Option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help="Install from the given requirements file. " + "This option can be used multiple times.", + ) + + +def editable() -> Option: + return Option( + "-e", + "--editable", + dest="editables", + action="append", + default=[], + metavar="path/url", + help=( + "Install a project in editable mode (i.e. setuptools " + '"develop mode") from a local project path or a VCS url.' + ), + ) + + +def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None: + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + +src: Callable[..., Option] = partial( + PipOption, + "--src", + "--source", + "--source-dir", + "--source-directory", + dest="src_dir", + type="path", + metavar="dir", + default=get_src_prefix(), + action="callback", + callback=_handle_src, + help="Directory to check out editable projects into. " + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".', +) + + +def _get_format_control(values: Values, option: Option) -> Any: + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.no_binary, + existing.only_binary, + ) + + +def _handle_only_binary( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.only_binary, + existing.no_binary, + ) + + +def no_binary() -> Option: + format_control = FormatControl(set(), set()) + return Option( + "--no-binary", + dest="format_control", + action="callback", + callback=_handle_no_binary, + type="str", + default=format_control, + help="Do not use binary packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all binary packages, ":none:" to empty the set (notice ' + "the colons), or one or more package names with commas between " + "them (no colons). Note that some packages are tricky to compile " + "and may fail to install when this option is used on them.", + ) + + +def only_binary() -> Option: + format_control = FormatControl(set(), set()) + return Option( + "--only-binary", + dest="format_control", + action="callback", + callback=_handle_only_binary, + type="str", + default=format_control, + help="Do not use source packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all source packages, ":none:" to empty the set, or one ' + "or more package names with commas between them. Packages " + "without binary distributions will fail to install when this " + "option is used on them.", + ) + + +platforms: Callable[..., Option] = partial( + Option, + "--platform", + dest="platforms", + metavar="platform", + action="append", + default=None, + help=( + "Only use wheels compatible with . Defaults to the " + "platform of the running system. Use this option multiple times to " + "specify multiple platforms supported by the target interpreter." + ), +) + + +# This was made a separate function for unit-testing purposes. +def _convert_python_version(value: str) -> tuple[tuple[int, ...], str | None]: + """ + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. + + :return: A 2-tuple (version_info, error_msg), where `error_msg` is + non-None if and only if there was a parsing error. + """ + if not value: + # The empty string is the same as not providing a value. + return (None, None) + + parts = value.split(".") + if len(parts) > 3: + return ((), "at most three version parts are allowed") + + if len(parts) == 1: + # Then we are in the case of "3" or "37". + value = parts[0] + if len(value) > 1: + parts = [value[0], value[1:]] + + try: + version_info = tuple(int(part) for part in parts) + except ValueError: + return ((), "each version part must be an integer") + + return (version_info, None) + + +def _handle_python_version( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + """ + Handle a provided --python-version value. + """ + version_info, error_msg = _convert_python_version(value) + if error_msg is not None: + msg = f"invalid --python-version value: {value!r}: {error_msg}" + raise_option_error(parser, option=option, msg=msg) + + parser.values.python_version = version_info + + +python_version: Callable[..., Option] = partial( + Option, + "--python-version", + dest="python_version", + metavar="python_version", + action="callback", + callback=_handle_python_version, + type="str", + default=None, + help=dedent( + """\ + The Python interpreter version to use for wheel and "Requires-Python" + compatibility checks. Defaults to a version derived from the running + interpreter. The version can be specified using up to three dot-separated + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor + version can also be given as a string without dots (e.g. "37" for 3.7.0). + """ + ), +) + + +implementation: Callable[..., Option] = partial( + Option, + "--implementation", + dest="implementation", + metavar="implementation", + default=None, + help=( + "Only use wheels compatible with Python " + "implementation , e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current " + "interpreter implementation is used. Use 'py' to force " + "implementation-agnostic wheels." + ), +) + + +abis: Callable[..., Option] = partial( + Option, + "--abi", + dest="abis", + metavar="abi", + action="append", + default=None, + help=( + "Only use wheels compatible with Python abi , e.g. 'pypy_41'. " + "If not specified, then the current interpreter abi tag is used. " + "Use this option multiple times to specify multiple abis supported " + "by the target interpreter. Generally you will need to specify " + "--implementation, --platform, and --python-version when using this " + "option." + ), +) + + +def add_target_python_options(cmd_opts: OptionGroup) -> None: + cmd_opts.add_option(platforms()) + cmd_opts.add_option(python_version()) + cmd_opts.add_option(implementation()) + cmd_opts.add_option(abis()) + + +def make_target_python(options: Values) -> TargetPython: + target_python = TargetPython( + platforms=options.platforms, + py_version_info=options.python_version, + abis=options.abis, + implementation=options.implementation, + ) + + return target_python + + +def prefer_binary() -> Option: + return Option( + "--prefer-binary", + dest="prefer_binary", + action="store_true", + default=False, + help=( + "Prefer binary packages over source packages, even if the " + "source packages are newer." + ), + ) + + +cache_dir: Callable[..., Option] = partial( + PipOption, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + type="path", + help="Store the cache data in .", +) + + +def _handle_no_cache_dir( + option: Option, opt: str, value: str, parser: OptionParser +) -> None: + """ + Process a value provided for the --no-cache-dir option. + + This is an optparse.Option callback for the --no-cache-dir option. + """ + # The value argument will be None if --no-cache-dir is passed via the + # command-line, since the option doesn't accept arguments. However, + # the value can be non-None if the option is triggered e.g. by an + # environment variable, like PIP_NO_CACHE_DIR=true. + if value is not None: + # Then parse the string value to get argument error-checking. + try: + strtobool(value) + except ValueError as exc: + raise_option_error(parser, option=option, msg=str(exc)) + + # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() + # converted to 0 (like "false" or "no") caused cache_dir to be disabled + # rather than enabled (logic would say the latter). Thus, we disable + # the cache directory not just on values that parse to True, but (for + # backwards compatibility reasons) also on values that parse to False. + # In other words, always set it to False if the option is provided in + # some (valid) form. + parser.values.cache_dir = False + + +no_cache: Callable[..., Option] = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="callback", + callback=_handle_no_cache_dir, + help="Disable the cache.", +) + +no_deps: Callable[..., Option] = partial( + Option, + "--no-deps", + "--no-dependencies", + dest="ignore_dependencies", + action="store_true", + default=False, + help="Don't install package dependencies.", +) + + +def _handle_dependency_group( + option: Option, opt: str, value: str, parser: OptionParser +) -> None: + """ + Process a value provided for the --group option. + + Splits on the rightmost ":", and validates that the path (if present) ends + in `pyproject.toml`. Defaults the path to `pyproject.toml` when one is not given. + + `:` cannot appear in dependency group names, so this is a safe and simple parse. + + This is an optparse.Option callback for the dependency_groups option. + """ + path, sep, groupname = value.rpartition(":") + if not sep: + path = "pyproject.toml" + else: + # check for 'pyproject.toml' filenames using pathlib + if pathlib.PurePath(path).name != "pyproject.toml": + msg = "group paths use 'pyproject.toml' filenames" + raise_option_error(parser, option=option, msg=msg) + + parser.values.dependency_groups.append((path, groupname)) + + +dependency_groups: Callable[..., Option] = partial( + Option, + "--group", + dest="dependency_groups", + default=[], + type=str, + action="callback", + callback=_handle_dependency_group, + metavar="[path:]group", + help='Install a named dependency-group from a "pyproject.toml" file. ' + 'If a path is given, the name of the file must be "pyproject.toml". ' + 'Defaults to using "pyproject.toml" in the current directory.', +) + +ignore_requires_python: Callable[..., Option] = partial( + Option, + "--ignore-requires-python", + dest="ignore_requires_python", + action="store_true", + help="Ignore the Requires-Python information.", +) + +no_build_isolation: Callable[..., Option] = partial( + Option, + "--no-build-isolation", + dest="build_isolation", + action="store_false", + default=True, + help="Disable isolation when building a modern source distribution. " + "Build dependencies specified by PEP 518 must be already installed " + "if this option is used.", +) + +check_build_deps: Callable[..., Option] = partial( + Option, + "--check-build-dependencies", + dest="check_build_deps", + action="store_true", + default=False, + help="Check the build dependencies.", +) + + +use_pep517: Any = partial( + Option, + "--use-pep517", + dest="use_pep517", + action="store_true", + default=True, + help=SUPPRESS_HELP, +) + + +def _handle_config_settings( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + key, sep, val = value.partition("=") + if sep != "=": + parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") + dest = getattr(parser.values, option.dest) + if dest is None: + dest = {} + setattr(parser.values, option.dest, dest) + if key in dest: + if isinstance(dest[key], list): + dest[key].append(val) + else: + dest[key] = [dest[key], val] + else: + dest[key] = val + + +config_settings: Callable[..., Option] = partial( + Option, + "-C", + "--config-settings", + dest="config_settings", + type=str, + action="callback", + callback=_handle_config_settings, + metavar="settings", + help="Configuration settings to be passed to the build backend. " + "Settings take the form KEY=VALUE. Use multiple --config-settings options " + "to pass multiple keys to the backend.", +) + +no_clean: Callable[..., Option] = partial( + Option, + "--no-clean", + action="store_true", + default=False, + help="Don't clean up build directories.", +) + +pre: Callable[..., Option] = partial( + Option, + "--pre", + action="store_true", + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.", +) + +json: Callable[..., Option] = partial( + Option, + "--json", + action="store_true", + default=False, + help="Output data in a machine-readable JSON format.", +) + +disable_pip_version_check: Callable[..., Option] = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=False, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.", +) + +root_user_action: Callable[..., Option] = partial( + Option, + "--root-user-action", + dest="root_user_action", + default="warn", + choices=["warn", "ignore"], + help="Action if pip is run as a root user [warn, ignore] (default: warn)", +) + + +def _handle_merge_hash( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(":", 1) + except ValueError: + parser.error( + f"Arguments to {opt_str} must be a hash name " + "followed by a value, like --hash=sha256:" + "abcde..." + ) + if algo not in STRONG_HASHES: + parser.error( + "Allowed hash algorithms for {} are {}.".format( + opt_str, ", ".join(STRONG_HASHES) + ) + ) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash: Callable[..., Option] = partial( + Option, + "--hash", + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest="hashes", + action="callback", + callback=_handle_merge_hash, + type="string", + help="Verify that the package's archive matches this " + "hash before installing. Example: --hash=sha256:abcdef...", +) + + +require_hashes: Callable[..., Option] = partial( + Option, + "--require-hashes", + dest="require_hashes", + action="store_true", + default=False, + help="Require a hash to check each requirement against, for " + "repeatable installs. This option is implied when any package in a " + "requirements file has a --hash option.", +) + + +list_path: Callable[..., Option] = partial( + PipOption, + "--path", + dest="path", + type="path", + action="append", + help="Restrict to the specified installation path for listing " + "packages (can be used multiple times).", +) + + +def check_list_path_option(options: Values) -> None: + if options.path and (options.user or options.local): + raise CommandError("Cannot combine '--path' with '--user' or '--local'") + + +list_exclude: Callable[..., Option] = partial( + PipOption, + "--exclude", + dest="excludes", + action="append", + metavar="package", + type="package_name", + help="Exclude specified package from the output", +) + + +no_python_version_warning: Callable[..., Option] = partial( + Option, + "--no-python-version-warning", + dest="no_python_version_warning", + action="store_true", + default=False, + help=SUPPRESS_HELP, # No-op, a hold-over from the Python 2->3 transition. +) + + +# Features that are now always on. A warning is printed if they are used. +ALWAYS_ENABLED_FEATURES = [ + "truststore", # always on since 24.2 + "no-binary-enable-wheel-cache", # always on since 23.1 +] + +use_new_feature: Callable[..., Option] = partial( + Option, + "--use-feature", + dest="features_enabled", + metavar="feature", + action="append", + default=[], + choices=[ + "fast-deps", + "build-constraint", + ] + + ALWAYS_ENABLED_FEATURES, + help="Enable new functionality, that may be backward incompatible.", +) + +use_deprecated_feature: Callable[..., Option] = partial( + Option, + "--use-deprecated", + dest="deprecated_features_enabled", + metavar="feature", + action="append", + default=[], + choices=[ + "legacy-resolver", + "legacy-certs", + ], + help=("Enable deprecated functionality, that will be removed in the future."), +) + +########## +# groups # +########## + +general_group: dict[str, Any] = { + "name": "General Options", + "options": [ + help_, + debug_mode, + isolated_mode, + require_virtualenv, + python, + verbose, + version, + quiet, + log, + no_input, + keyring_provider, + proxy, + retries, + timeout, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + no_color, + no_python_version_warning, + use_new_feature, + use_deprecated_feature, + resume_retries, + ], +} + +index_group: dict[str, Any] = { + "name": "Package Index Options", + "options": [ + index_url, + extra_index_url, + no_index, + find_links, + ], +} diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py new file mode 100644 index 0000000..9c167bd --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/command_context.py @@ -0,0 +1,28 @@ +from collections.abc import Generator +from contextlib import AbstractContextManager, ExitStack, contextmanager +from typing import TypeVar + +_T = TypeVar("_T", covariant=True) + + +class CommandContextMixIn: + def __init__(self) -> None: + super().__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self) -> Generator[None, None, None]: + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider: AbstractContextManager[_T]) -> _T: + assert self._in_main_context + + return self._main_context.enter_context(context_provider) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/index_command.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/index_command.py new file mode 100644 index 0000000..f6a82c8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/index_command.py @@ -0,0 +1,175 @@ +""" +Contains command classes which may interact with an index / the network. + +Unlike its sister module, req_command, this module still uses lazy imports +so commands which don't always hit the network (e.g. list w/o --outdated or +--uptodate) don't need waste time importing PipSession and friends. +""" + +from __future__ import annotations + +import logging +import os +import sys +from functools import lru_cache +from optparse import Values +from typing import TYPE_CHECKING + +from pip._vendor import certifi + +from pip._internal.cli.base_command import Command +from pip._internal.cli.command_context import CommandContextMixIn + +if TYPE_CHECKING: + from ssl import SSLContext + + from pip._internal.network.session import PipSession + +logger = logging.getLogger(__name__) + + +@lru_cache +def _create_truststore_ssl_context() -> SSLContext | None: + if sys.version_info < (3, 10): + logger.debug("Disabling truststore because Python version isn't 3.10+") + return None + + try: + import ssl + except ImportError: + logger.warning("Disabling truststore since ssl support is missing") + return None + + try: + from pip._vendor import truststore + except ImportError: + logger.warning("Disabling truststore because platform isn't supported") + return None + + ctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + ctx.load_verify_locations(certifi.where()) + return ctx + + +class SessionCommandMixin(CommandContextMixIn): + """ + A class mixin for command classes needing _build_session(). + """ + + def __init__(self) -> None: + super().__init__() + self._session: PipSession | None = None + + @classmethod + def _get_index_urls(cls, options: Values) -> list[str] | None: + """Return a list of index urls from user-provided options.""" + index_urls = [] + if not getattr(options, "no_index", False): + url = getattr(options, "index_url", None) + if url: + index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) + # Return None rather than an empty list + return index_urls or None + + def get_default_session(self, options: Values) -> PipSession: + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None + return self._session + + def _build_session( + self, + options: Values, + retries: int | None = None, + timeout: int | None = None, + ) -> PipSession: + from pip._internal.network.session import PipSession + + cache_dir = options.cache_dir + assert not cache_dir or os.path.isabs(cache_dir) + + if "legacy-certs" not in options.deprecated_features_enabled: + ssl_context = _create_truststore_ssl_context() + else: + ssl_context = None + + session = PipSession( + cache=os.path.join(cache_dir, "http-v2") if cache_dir else None, + retries=retries if retries is not None else options.retries, + trusted_hosts=options.trusted_hosts, + index_urls=self._get_index_urls(options), + ssl_context=ssl_context, + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = timeout if timeout is not None else options.timeout + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + session.trust_env = False + session.pip_proxy = options.proxy + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + session.auth.keyring_provider = options.keyring_provider + + return session + + +def _pip_self_version_check(session: PipSession, options: Values) -> None: + from pip._internal.self_outdated_check import pip_self_version_check as check + + check(session, options) + + +class IndexGroupCommand(Command, SessionCommandMixin): + """ + Abstract base class for commands with the index_group options. + + This also corresponds to the commands that permit the pip version check. + """ + + def handle_pip_version_check(self, options: Values) -> None: + """ + Do the pip version check if not disabled. + + This overrides the default behavior of not doing the check. + """ + # Make sure the index_group options are present. + assert hasattr(options, "no_index") + + if options.disable_pip_version_check or options.no_index: + return + + try: + # Otherwise, check if we're using the latest version of pip available. + session = self._build_session( + options, + retries=0, + timeout=min(5, options.timeout), + ) + with session: + _pip_self_version_check(session, options) + except Exception: + logger.warning("There was an error checking the latest version of pip.") + logger.debug("See below for error", exc_info=True) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/main.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/main.py new file mode 100644 index 0000000..9a161fd --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/main.py @@ -0,0 +1,80 @@ +"""Primary application entrypoint.""" + +from __future__ import annotations + +import locale +import logging +import os +import sys +import warnings + +from pip._internal.cli.autocompletion import autocomplete +from pip._internal.cli.main_parser import parse_command +from pip._internal.commands import create_command +from pip._internal.exceptions import PipError +from pip._internal.utils import deprecation + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + + +def main(args: list[str] | None = None) -> int: + if args is None: + args = sys.argv[1:] + + # Suppress the pkg_resources deprecation warning + # Note - we use a module of .*pkg_resources to cover + # the normal case (pip._vendor.pkg_resources) and the + # devendored case (a bare pkg_resources) + warnings.filterwarnings( + action="ignore", category=DeprecationWarning, module=".*pkg_resources" + ) + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write(f"ERROR: {exc}") + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, "") + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py new file mode 100644 index 0000000..5ce9f5a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/main_parser.py @@ -0,0 +1,134 @@ +"""A single place for constructing and exposing the main parser""" + +from __future__ import annotations + +import os +import subprocess +import sys + +from pip._internal.build_env import get_runnable_pip +from pip._internal.cli import cmdoptions +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.commands import commands_dict, get_similar_commands +from pip._internal.exceptions import CommandError +from pip._internal.utils.misc import get_pip_version, get_prog + +__all__ = ["create_main_parser", "parse_command"] + + +def create_main_parser() -> ConfigOptionParser: + """Creates and returns the main parser for pip's CLI""" + + parser = ConfigOptionParser( + usage="\n%prog [options]", + add_help_option=False, + formatter=UpdatingDefaultsHelpFormatter(), + name="global", + prog=get_prog(), + ) + parser.disable_interspersed_args() + + parser.version = get_pip_version() + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + # so the help formatter knows + parser.main = True # type: ignore + + # create command listing for description + description = [""] + [ + f"{name:27} {command_info.summary}" + for name, command_info in commands_dict.items() + ] + parser.description = "\n".join(description) + + return parser + + +def identify_python_interpreter(python: str) -> str | None: + # If the named file exists, use it. + # If it's a directory, assume it's a virtual environment and + # look for the environment's Python executable. + if os.path.exists(python): + if os.path.isdir(python): + # bin/python for Unix, Scripts/python.exe for Windows + # Try both in case of odd cases like cygwin. + for exe in ("bin/python", "Scripts/python.exe"): + py = os.path.join(python, exe) + if os.path.exists(py): + return py + else: + return python + + # Could not find the interpreter specified + return None + + +def parse_command(args: list[str]) -> tuple[str, list[str]]: + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --python + if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: + # Re-invoke pip using the specified Python interpreter + interpreter = identify_python_interpreter(general_options.python) + if interpreter is None: + raise CommandError( + f"Could not locate Python interpreter {general_options.python}" + ) + + pip_cmd = [ + interpreter, + get_runnable_pip(), + ] + pip_cmd.extend(args) + + # Set a flag so the child doesn't re-invoke itself, causing + # an infinite loop. + os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1" + returncode = 0 + try: + proc = subprocess.run(pip_cmd) + returncode = proc.returncode + except (subprocess.SubprocessError, OSError) as exc: + raise CommandError(f"Failed to run pip under {interpreter}: {exc}") + sys.exit(returncode) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == "help" and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/parser.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/parser.py new file mode 100644 index 0000000..3905a91 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/parser.py @@ -0,0 +1,298 @@ +"""Base option parser setup""" + +from __future__ import annotations + +import logging +import optparse +import shutil +import sys +import textwrap +from collections.abc import Generator +from contextlib import suppress +from typing import Any, NoReturn + +from pip._internal.cli.status_codes import UNKNOWN_ERROR +from pip._internal.configuration import Configuration, ConfigurationError +from pip._internal.utils.misc import redact_auth_from_url, strtobool + +logger = logging.getLogger(__name__) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # help position must be aligned with __init__.parseopts.description + kwargs["max_help_position"] = 30 + kwargs["indent_increment"] = 1 + kwargs["width"] = shutil.get_terminal_size()[0] - 2 + super().__init__(*args, **kwargs) + + def format_option_strings(self, option: optparse.Option) -> str: + return self._format_option_strings(option) + + def _format_option_strings( + self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", " + ) -> str: + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + assert option.dest is not None + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt.format(metavar.lower())) + + return "".join(opts) + + def format_heading(self, heading: str) -> str: + if heading == "Options": + return "" + return heading + ":\n" + + def format_usage(self, usage: str) -> str: + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " ")) + return msg + + def format_description(self, description: str | None) -> str: + # leave full control over description to us + if description: + if hasattr(self.parser, "main"): + label = "Commands" + else: + label = "Description" + # some doc strings have initial newlines, some don't + description = description.lstrip("\n") + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = f"{label}:\n{description}\n" + return description + else: + return "" + + def format_epilog(self, epilog: str | None) -> str: + # leave full control over epilog to us + if epilog: + return epilog + else: + return "" + + def indent_lines(self, text: str, indent: str) -> str: + new_lines = [indent + line for line in text.split("\n")] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + + Also redact auth from url type options + """ + + def expand_default(self, option: optparse.Option) -> str: + default_values = None + if self.parser is not None: + assert isinstance(self.parser, ConfigOptionParser) + self.parser._update_defaults(self.parser.defaults) + assert option.dest is not None + default_values = self.parser.defaults.get(option.dest) + help_text = super().expand_default(option) + + if default_values and option.metavar == "URL": + if isinstance(default_values, str): + default_values = [default_values] + + # If its not a list, we should abort and just return the help text + if not isinstance(default_values, list): + default_values = [] + + for val in default_values: + help_text = help_text.replace(val, redact_auth_from_url(val)) + + return help_text + + +class CustomOptionParser(optparse.OptionParser): + def insert_option_group( + self, idx: int, *args: Any, **kwargs: Any + ) -> optparse.OptionGroup: + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self) -> list[optparse.Option]: + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + def __init__( + self, + *args: Any, + name: str, + isolated: bool = False, + **kwargs: Any, + ) -> None: + self.name = name + self.config = Configuration(isolated) + + assert self.name + super().__init__(*args, **kwargs) + + def check_default(self, option: optparse.Option, key: str, val: Any) -> Any: + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print(f"An error occurred during configuration: {exc}") + sys.exit(3) + + def _get_ordered_configuration_items( + self, + ) -> Generator[tuple[str, Any], None, None]: + # Configuration gives keys in an unordered manner. Order them. + override_order = ["global", self.name, ":env:"] + + # Pool the options into different groups + section_items: dict[str, list[tuple[str, Any]]] = { + name: [] for name in override_order + } + + for _, value in self.config.items(): # noqa: PERF102 + for section_key, val in value.items(): + # ignore empty values + if not val: + logger.debug( + "Ignoring configuration key '%s' as its value is empty.", + section_key, + ) + continue + + section, key = section_key.split(".", 1) + if section in override_order: + section_items[section].append((key, val)) + + # Yield each group in their override order + for section in override_order: + yield from section_items[section] + + def _update_defaults(self, defaults: dict[str, Any]) -> dict[str, Any]: + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in self._get_ordered_configuration_items(): + # '--' because configuration supports only long names + option = self.get_option("--" + key) + + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + assert option.dest is not None + + if option.action in ("store_true", "store_false"): + try: + val = strtobool(val) + except ValueError: + self.error( + f"{val} is not a valid value for {key} option, " + "please specify a boolean value like yes/no, " + "true/false or 1/0 instead." + ) + elif option.action == "count": + with suppress(ValueError): + val = strtobool(val) + with suppress(ValueError): + val = int(val) + if not isinstance(val, int) or val < 0: + self.error( + f"{val} is not a valid value for {key} option, " + "please instead specify either a non-negative integer " + "or a boolean value like yes/no or false/true " + "which is equivalent to 1/0." + ) + elif option.action == "append": + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == "callback": + assert option.callback is not None + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def get_default_values(self) -> optparse.Values: + """Overriding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + # Load the configuration, or error out in case of an error + try: + self.config.load() + except ConfigurationError as err: + self.exit(UNKNOWN_ERROR, str(err)) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + assert option.dest is not None + default = defaults.get(option.dest) + if isinstance(default, str): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg: str) -> NoReturn: + self.print_usage(sys.stderr) + self.exit(UNKNOWN_ERROR, f"{msg}\n") diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py new file mode 100644 index 0000000..af1bb6a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/progress_bars.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +import functools +import sys +from collections.abc import Generator, Iterable, Iterator +from typing import Callable, Literal, TypeVar + +from pip._vendor.rich.progress import ( + BarColumn, + DownloadColumn, + FileSizeColumn, + MofNCompleteColumn, + Progress, + ProgressColumn, + SpinnerColumn, + TextColumn, + TimeElapsedColumn, + TimeRemainingColumn, + TransferSpeedColumn, +) + +from pip._internal.cli.spinners import RateLimiter +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import get_console, get_indentation + +T = TypeVar("T") +ProgressRenderer = Callable[[Iterable[T]], Iterator[T]] +BarType = Literal["on", "off", "raw"] + + +def _rich_download_progress_bar( + iterable: Iterable[bytes], + *, + bar_type: BarType, + size: int | None, + initial_progress: int | None = None, +) -> Generator[bytes, None, None]: + assert bar_type == "on", "This should only be used in the default mode." + + if not size: + total = float("inf") + columns: tuple[ProgressColumn, ...] = ( + TextColumn("[progress.description]{task.description}"), + SpinnerColumn("line", speed=1.5), + FileSizeColumn(), + TransferSpeedColumn(), + TimeElapsedColumn(), + ) + else: + total = size + columns = ( + TextColumn("[progress.description]{task.description}"), + BarColumn(), + DownloadColumn(), + TransferSpeedColumn(), + TextColumn("{task.fields[time_description]}"), + TimeRemainingColumn(elapsed_when_finished=True), + ) + + progress = Progress(*columns, refresh_per_second=5) + task_id = progress.add_task( + " " * (get_indentation() + 2), total=total, time_description="eta" + ) + if initial_progress is not None: + progress.update(task_id, advance=initial_progress) + with progress: + for chunk in iterable: + yield chunk + progress.update(task_id, advance=len(chunk)) + progress.update(task_id, time_description="") + + +def _rich_install_progress_bar( + iterable: Iterable[InstallRequirement], *, total: int +) -> Iterator[InstallRequirement]: + columns = ( + TextColumn("{task.fields[indent]}"), + BarColumn(), + MofNCompleteColumn(), + TextColumn("{task.description}"), + ) + console = get_console() + + bar = Progress(*columns, refresh_per_second=6, console=console, transient=True) + # Hiding the progress bar at initialization forces a refresh cycle to occur + # until the bar appears, avoiding very short flashes. + task = bar.add_task("", total=total, indent=" " * get_indentation(), visible=False) + with bar: + for req in iterable: + bar.update(task, description=rf"\[{req.name}]", visible=True) + yield req + bar.advance(task) + + +def _raw_progress_bar( + iterable: Iterable[bytes], + *, + size: int | None, + initial_progress: int | None = None, +) -> Generator[bytes, None, None]: + def write_progress(current: int, total: int) -> None: + sys.stdout.write(f"Progress {current} of {total}\n") + sys.stdout.flush() + + current = initial_progress or 0 + total = size or 0 + rate_limiter = RateLimiter(0.25) + + write_progress(current, total) + for chunk in iterable: + current += len(chunk) + if rate_limiter.ready() or current == total: + write_progress(current, total) + rate_limiter.reset() + yield chunk + + +def get_download_progress_renderer( + *, bar_type: BarType, size: int | None = None, initial_progress: int | None = None +) -> ProgressRenderer[bytes]: + """Get an object that can be used to render the download progress. + + Returns a callable, that takes an iterable to "wrap". + """ + if bar_type == "on": + return functools.partial( + _rich_download_progress_bar, + bar_type=bar_type, + size=size, + initial_progress=initial_progress, + ) + elif bar_type == "raw": + return functools.partial( + _raw_progress_bar, + size=size, + initial_progress=initial_progress, + ) + else: + return iter # no-op, when passed an iterator + + +def get_install_progress_renderer( + *, bar_type: BarType, total: int +) -> ProgressRenderer[InstallRequirement]: + """Get an object that can be used to render the install progress. + Returns a callable, that takes an iterable to "wrap". + """ + if bar_type == "on": + return functools.partial(_rich_install_progress_bar, total=total) + else: + return iter diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py new file mode 100644 index 0000000..f6d7f81 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/req_command.py @@ -0,0 +1,371 @@ +"""Contains the RequirementCommand base class. + +This class is in a separate module so the commands that do not always +need PackageFinder capability don't unnecessarily import the +PackageFinder machinery and all its vendored dependencies, etc. +""" + +from __future__ import annotations + +import logging +import os +from functools import partial +from optparse import Values +from typing import Any, Callable, TypeVar + +from pip._internal.build_env import SubprocessBuildEnvironmentInstaller +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.index_command import IndexGroupCommand +from pip._internal.cli.index_command import SessionCommandMixin as SessionCommandMixin +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.operations.build.build_tracker import BuildTracker +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, + install_req_from_parsed_requirement, + install_req_from_req_string, +) +from pip._internal.req.req_dependency_group import parse_dependency_groups +from pip._internal.req.req_file import parse_requirements +from pip._internal.req.req_install import InstallRequirement +from pip._internal.resolution.base import BaseResolver +from pip._internal.utils.temp_dir import ( + TempDirectory, + TempDirectoryTypeRegistry, + tempdir_kinds, +) + +logger = logging.getLogger(__name__) + + +def should_ignore_regular_constraints(options: Values) -> bool: + """ + Check if regular constraints should be ignored because + we are in a isolated build process and build constraints + feature is enabled but no build constraints were passed. + """ + + return os.environ.get("_PIP_IN_BUILD_IGNORE_CONSTRAINTS") == "1" + + +KEEPABLE_TEMPDIR_TYPES = [ + tempdir_kinds.BUILD_ENV, + tempdir_kinds.EPHEM_WHEEL_CACHE, + tempdir_kinds.REQ_BUILD, +] + + +_CommandT = TypeVar("_CommandT", bound="RequirementCommand") + + +def with_cleanup( + func: Callable[[_CommandT, Values, list[str]], int], +) -> Callable[[_CommandT, Values, list[str]], int]: + """Decorator for common logic related to managing temporary + directories. + """ + + def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None: + for t in KEEPABLE_TEMPDIR_TYPES: + registry.set_delete(t, False) + + def wrapper(self: _CommandT, options: Values, args: list[str]) -> int: + assert self.tempdir_registry is not None + if options.no_clean: + configure_tempdir_registry(self.tempdir_registry) + + try: + return func(self, options, args) + except PreviousBuildDirError: + # This kind of conflict can occur when the user passes an explicit + # build directory with a pre-existing folder. In that case we do + # not want to accidentally remove it. + configure_tempdir_registry(self.tempdir_registry) + raise + + return wrapper + + +class RequirementCommand(IndexGroupCommand): + def __init__(self, *args: Any, **kw: Any) -> None: + super().__init__(*args, **kw) + + self.cmd_opts.add_option(cmdoptions.dependency_groups()) + self.cmd_opts.add_option(cmdoptions.no_clean()) + + @staticmethod + def determine_resolver_variant(options: Values) -> str: + """Determines which resolver should be used, based on the given options.""" + if "legacy-resolver" in options.deprecated_features_enabled: + return "legacy" + + return "resolvelib" + + @classmethod + def make_requirement_preparer( + cls, + temp_build_dir: TempDirectory, + options: Values, + build_tracker: BuildTracker, + session: PipSession, + finder: PackageFinder, + use_user_site: bool, + download_dir: str | None = None, + verbosity: int = 0, + ) -> RequirementPreparer: + """ + Create a RequirementPreparer instance for the given parameters. + """ + temp_build_dir_path = temp_build_dir.path + assert temp_build_dir_path is not None + legacy_resolver = False + + resolver_variant = cls.determine_resolver_variant(options) + if resolver_variant == "resolvelib": + lazy_wheel = "fast-deps" in options.features_enabled + if lazy_wheel: + logger.warning( + "pip is using lazily downloaded wheels using HTTP " + "range requests to obtain dependency information. " + "This experimental feature is enabled through " + "--use-feature=fast-deps and it is not ready for " + "production." + ) + else: + legacy_resolver = True + lazy_wheel = False + if "fast-deps" in options.features_enabled: + logger.warning( + "fast-deps has no effect when used with the legacy resolver." + ) + + # Handle build constraints + build_constraints = getattr(options, "build_constraints", []) + build_constraint_feature_enabled = ( + "build-constraint" in options.features_enabled + ) + + return RequirementPreparer( + build_dir=temp_build_dir_path, + src_dir=options.src_dir, + download_dir=download_dir, + build_isolation=options.build_isolation, + build_isolation_installer=SubprocessBuildEnvironmentInstaller( + finder, + build_constraints=build_constraints, + build_constraint_feature_enabled=build_constraint_feature_enabled, + ), + check_build_deps=options.check_build_deps, + build_tracker=build_tracker, + session=session, + progress_bar=options.progress_bar, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, + lazy_wheel=lazy_wheel, + verbosity=verbosity, + legacy_resolver=legacy_resolver, + resume_retries=options.resume_retries, + ) + + @classmethod + def make_resolver( + cls, + preparer: RequirementPreparer, + finder: PackageFinder, + options: Values, + wheel_cache: WheelCache | None = None, + use_user_site: bool = False, + ignore_installed: bool = True, + ignore_requires_python: bool = False, + force_reinstall: bool = False, + upgrade_strategy: str = "to-satisfy-only", + py_version_info: tuple[int, ...] | None = None, + ) -> BaseResolver: + """ + Create a Resolver instance for the given parameters. + """ + make_install_req = partial( + install_req_from_req_string, + isolated=options.isolated_mode, + ) + resolver_variant = cls.determine_resolver_variant(options) + # The long import name and duplicated invocation is needed to convince + # Mypy into correctly typechecking. Otherwise it would complain the + # "Resolver" class being redefined. + if resolver_variant == "resolvelib": + import pip._internal.resolution.resolvelib.resolver + + return pip._internal.resolution.resolvelib.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + import pip._internal.resolution.legacy.resolver + + return pip._internal.resolution.legacy.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + + def get_requirements( + self, + args: list[str], + options: Values, + finder: PackageFinder, + session: PipSession, + ) -> list[InstallRequirement]: + """ + Parse command-line arguments into the corresponding requirements. + """ + requirements: list[InstallRequirement] = [] + + if not should_ignore_regular_constraints(options): + for filename in options.constraints: + for parsed_req in parse_requirements( + filename, + constraint=True, + finder=finder, + options=options, + session=session, + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + user_supplied=False, + ) + requirements.append(req_to_add) + + for req in args: + req_to_add = install_req_from_line( + req, + comes_from=None, + isolated=options.isolated_mode, + user_supplied=True, + config_settings=getattr(options, "config_settings", None), + ) + requirements.append(req_to_add) + + if options.dependency_groups: + for req in parse_dependency_groups(options.dependency_groups): + req_to_add = install_req_from_req_string( + req, + isolated=options.isolated_mode, + user_supplied=True, + ) + requirements.append(req_to_add) + + for req in options.editables: + req_to_add = install_req_from_editable( + req, + user_supplied=True, + isolated=options.isolated_mode, + config_settings=getattr(options, "config_settings", None), + ) + requirements.append(req_to_add) + + # NOTE: options.require_hashes may be set if --require-hashes is True + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, finder=finder, options=options, session=session + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + user_supplied=True, + config_settings=( + parsed_req.options.get("config_settings") + if parsed_req.options + else None + ), + ) + requirements.append(req_to_add) + + # If any requirement has hash options, enable hash checking. + if any(req.has_hash_options for req in requirements): + options.require_hashes = True + + if not ( + args + or options.editables + or options.requirements + or options.dependency_groups + ): + opts = {"name": self.name} + if options.find_links: + raise CommandError( + "You must give at least one requirement to {name} " + '(maybe you meant "pip {name} {links}"?)'.format( + **dict(opts, links=" ".join(options.find_links)) + ) + ) + else: + raise CommandError( + "You must give at least one requirement to {name} " + '(see "pip help {name}")'.format(**opts) + ) + + return requirements + + @staticmethod + def trace_basic_info(finder: PackageFinder) -> None: + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + + def _build_package_finder( + self, + options: Values, + session: PipSession, + target_python: TargetPython | None = None, + ignore_requires_python: bool | None = None, + ) -> PackageFinder: + """ + Create a package finder appropriate to this requirement command. + + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + link_collector = LinkCollector.create(session, options=options) + selection_prefs = SelectionPreferences( + allow_yanked=True, + format_control=options.format_control, + allow_all_prereleases=options.pre, + prefer_binary=options.prefer_binary, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py new file mode 100644 index 0000000..58aad28 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/spinners.py @@ -0,0 +1,235 @@ +from __future__ import annotations + +import contextlib +import itertools +import logging +import sys +import time +from collections.abc import Generator +from typing import IO, Final + +from pip._vendor.rich.console import ( + Console, + ConsoleOptions, + RenderableType, + RenderResult, +) +from pip._vendor.rich.live import Live +from pip._vendor.rich.measure import Measurement +from pip._vendor.rich.text import Text + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_console, get_indentation + +logger = logging.getLogger(__name__) + +SPINNER_CHARS: Final = r"-\|/" +SPINS_PER_SECOND: Final = 8 + + +class SpinnerInterface: + def spin(self) -> None: + raise NotImplementedError() + + def finish(self, final_status: str) -> None: + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): + def __init__( + self, + message: str, + file: IO[str] | None = None, + spin_chars: str = SPINNER_CHARS, + # Empirically, 8 updates/second looks nice + min_update_interval_seconds: float = 1 / SPINS_PER_SECOND, + ): + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status: str) -> None: + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self) -> None: + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status: str) -> None: + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(SpinnerInterface): + def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None: + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status: str) -> None: + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self) -> None: + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status: str) -> None: + if self._finished: + return + self._update(f"finished with status '{final_status}'") + self._finished = True + + +class RateLimiter: + def __init__(self, min_update_interval_seconds: float) -> None: + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update: float = 0 + + def ready(self) -> bool: + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self) -> None: + self._last_update = time.time() + + +@contextlib.contextmanager +def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]: + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner: SpinnerInterface = InteractiveSpinner(message) + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") + + +class _PipRichSpinner: + """ + Custom rich spinner that matches the style of the legacy spinners. + + (*) Updates will be handled in a background thread by a rich live panel + which will call render() automatically at the appropriate time. + """ + + def __init__(self, label: str) -> None: + self.label = label + self._spin_cycle = itertools.cycle(SPINNER_CHARS) + self._spinner_text = "" + self._finished = False + self._indent = get_indentation() * " " + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + yield self.render() + + def __rich_measure__( + self, console: Console, options: ConsoleOptions + ) -> Measurement: + text = self.render() + return Measurement.get(console, options, text) + + def render(self) -> RenderableType: + if not self._finished: + self._spinner_text = next(self._spin_cycle) + + return Text.assemble(self._indent, self.label, " ... ", self._spinner_text) + + def finish(self, status: str) -> None: + """Stop spinning and set a final status message.""" + self._spinner_text = status + self._finished = True + + +@contextlib.contextmanager +def open_rich_spinner(label: str, console: Console | None = None) -> Generator[None]: + if not logger.isEnabledFor(logging.INFO): + # Don't show spinner if --quiet is given. + yield + return + + console = console or get_console() + spinner = _PipRichSpinner(label) + with Live(spinner, refresh_per_second=SPINS_PER_SECOND, console=console): + try: + yield + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") + + +HIDE_CURSOR = "\x1b[?25l" +SHOW_CURSOR = "\x1b[?25h" + + +@contextlib.contextmanager +def hidden_cursor(file: IO[str]) -> Generator[None, None, None]: + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py b/venv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py new file mode 100644 index 0000000..5e29502 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/cli/status_codes.py @@ -0,0 +1,6 @@ +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py new file mode 100644 index 0000000..bedeca9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/__init__.py @@ -0,0 +1,139 @@ +""" +Package containing all pip commands +""" + +from __future__ import annotations + +import importlib +from collections import namedtuple +from typing import Any + +from pip._internal.cli.base_command import Command + +CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary") + +# This dictionary does a bunch of heavy lifting for help output: +# - Enables avoiding additional (costly) imports for presenting `--help`. +# - The ordering matters for help display. +# +# Even though the module path starts with the same "pip._internal.commands" +# prefix, the full path makes testing easier (specifically when modifying +# `commands_dict` in test setup / teardown). +commands_dict: dict[str, CommandInfo] = { + "install": CommandInfo( + "pip._internal.commands.install", + "InstallCommand", + "Install packages.", + ), + "lock": CommandInfo( + "pip._internal.commands.lock", + "LockCommand", + "Generate a lock file.", + ), + "download": CommandInfo( + "pip._internal.commands.download", + "DownloadCommand", + "Download packages.", + ), + "uninstall": CommandInfo( + "pip._internal.commands.uninstall", + "UninstallCommand", + "Uninstall packages.", + ), + "freeze": CommandInfo( + "pip._internal.commands.freeze", + "FreezeCommand", + "Output installed packages in requirements format.", + ), + "inspect": CommandInfo( + "pip._internal.commands.inspect", + "InspectCommand", + "Inspect the python environment.", + ), + "list": CommandInfo( + "pip._internal.commands.list", + "ListCommand", + "List installed packages.", + ), + "show": CommandInfo( + "pip._internal.commands.show", + "ShowCommand", + "Show information about installed packages.", + ), + "check": CommandInfo( + "pip._internal.commands.check", + "CheckCommand", + "Verify installed packages have compatible dependencies.", + ), + "config": CommandInfo( + "pip._internal.commands.configuration", + "ConfigurationCommand", + "Manage local and global configuration.", + ), + "search": CommandInfo( + "pip._internal.commands.search", + "SearchCommand", + "Search PyPI for packages.", + ), + "cache": CommandInfo( + "pip._internal.commands.cache", + "CacheCommand", + "Inspect and manage pip's wheel cache.", + ), + "index": CommandInfo( + "pip._internal.commands.index", + "IndexCommand", + "Inspect information available from package indexes.", + ), + "wheel": CommandInfo( + "pip._internal.commands.wheel", + "WheelCommand", + "Build wheels from your requirements.", + ), + "hash": CommandInfo( + "pip._internal.commands.hash", + "HashCommand", + "Compute hashes of package archives.", + ), + "completion": CommandInfo( + "pip._internal.commands.completion", + "CompletionCommand", + "A helper command used for command completion.", + ), + "debug": CommandInfo( + "pip._internal.commands.debug", + "DebugCommand", + "Show information useful for debugging.", + ), + "help": CommandInfo( + "pip._internal.commands.help", + "HelpCommand", + "Show help for commands.", + ), +} + + +def create_command(name: str, **kwargs: Any) -> Command: + """ + Create an instance of the Command class with the given name. + """ + module_path, class_name, summary = commands_dict[name] + module = importlib.import_module(module_path) + command_class = getattr(module, class_name) + command = command_class(name=name, summary=summary, **kwargs) + + return command + + +def get_similar_commands(name: str) -> str | None: + """Command name auto-correct.""" + from difflib import get_close_matches + + name = name.lower() + + close_commands = get_close_matches(name, commands_dict.keys()) + + if close_commands: + return close_commands[0] + else: + return None diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..45f021f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..400b4ba Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/cache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc new file mode 100644 index 0000000..c75c399 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/check.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc new file mode 100644 index 0000000..7cd80ff Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/completion.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc new file mode 100644 index 0000000..7b3d2b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc new file mode 100644 index 0000000..bf21fab Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/debug.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc new file mode 100644 index 0000000..b0bb097 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/download.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc new file mode 100644 index 0000000..54b884c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc new file mode 100644 index 0000000..0bd311e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/hash.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc new file mode 100644 index 0000000..464145d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/help.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc new file mode 100644 index 0000000..247bc45 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/index.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc new file mode 100644 index 0000000..c64bbd5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc new file mode 100644 index 0000000..85fefa8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/install.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc new file mode 100644 index 0000000..7e6256c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/list.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/lock.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/lock.cpython-312.pyc new file mode 100644 index 0000000..43e739e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/lock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc new file mode 100644 index 0000000..bcd4cc1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/search.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc new file mode 100644 index 0000000..e057215 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/show.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc new file mode 100644 index 0000000..f7997af Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..9fb4667 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/cache.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/cache.py new file mode 100644 index 0000000..c8e7aed --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/cache.py @@ -0,0 +1,231 @@ +import os +import textwrap +from optparse import Values +from typing import Callable + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, PipError +from pip._internal.utils import filesystem +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import format_size + +logger = getLogger(__name__) + + +class CacheCommand(Command): + """ + Inspect and manage pip's wheel cache. + + Subcommands: + + - dir: Show the cache directory. + - info: Show information about the cache. + - list: List filenames of packages stored in the cache. + - remove: Remove one or more package from the cache. + - purge: Remove all items from the cache. + + ```` can be a glob expression or a package name. + """ + + ignore_require_venv = True + usage = """ + %prog dir + %prog info + %prog list [] [--format=[human, abspath]] + %prog remove + %prog purge + """ + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--format", + action="store", + dest="list_format", + default="human", + choices=("human", "abspath"), + help="Select the output format among: human (default) or abspath", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def handler_map(self) -> dict[str, Callable[[Values, list[str]], None]]: + return { + "dir": self.get_cache_dir, + "info": self.get_cache_info, + "list": self.list_cache_items, + "remove": self.remove_cache_items, + "purge": self.purge_cache, + } + + def run(self, options: Values, args: list[str]) -> int: + handler_map = self.handler_map() + + if not options.cache_dir: + logger.error("pip cache commands can not function since cache is disabled.") + return ERROR + + # Determine action + if not args or args[0] not in handler_map: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handler_map)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handler_map[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def get_cache_dir(self, options: Values, args: list[str]) -> None: + if args: + raise CommandError("Too many arguments") + + logger.info(options.cache_dir) + + def get_cache_info(self, options: Values, args: list[str]) -> None: + if args: + raise CommandError("Too many arguments") + + num_http_files = len(self._find_http_files(options)) + num_packages = len(self._find_wheels(options, "*")) + + http_cache_location = self._cache_dir(options, "http-v2") + old_http_cache_location = self._cache_dir(options, "http") + wheels_cache_location = self._cache_dir(options, "wheels") + http_cache_size = filesystem.format_size( + filesystem.directory_size(http_cache_location) + + filesystem.directory_size(old_http_cache_location) + ) + wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) + + message = ( + textwrap.dedent( + """ + Package index page cache location (pip v23.3+): {http_cache_location} + Package index page cache location (older pips): {old_http_cache_location} + Package index page cache size: {http_cache_size} + Number of HTTP files: {num_http_files} + Locally built wheels location: {wheels_cache_location} + Locally built wheels size: {wheels_cache_size} + Number of locally built wheels: {package_count} + """ # noqa: E501 + ) + .format( + http_cache_location=http_cache_location, + old_http_cache_location=old_http_cache_location, + http_cache_size=http_cache_size, + num_http_files=num_http_files, + wheels_cache_location=wheels_cache_location, + package_count=num_packages, + wheels_cache_size=wheels_cache_size, + ) + .strip() + ) + + logger.info(message) + + def list_cache_items(self, options: Values, args: list[str]) -> None: + if len(args) > 1: + raise CommandError("Too many arguments") + + if args: + pattern = args[0] + else: + pattern = "*" + + files = self._find_wheels(options, pattern) + if options.list_format == "human": + self.format_for_human(files) + else: + self.format_for_abspath(files) + + def format_for_human(self, files: list[str]) -> None: + if not files: + logger.info("No locally built wheels cached.") + return + + results = [] + for filename in files: + wheel = os.path.basename(filename) + size = filesystem.format_file_size(filename) + results.append(f" - {wheel} ({size})") + logger.info("Cache contents:\n") + logger.info("\n".join(sorted(results))) + + def format_for_abspath(self, files: list[str]) -> None: + if files: + logger.info("\n".join(sorted(files))) + + def remove_cache_items(self, options: Values, args: list[str]) -> None: + if len(args) > 1: + raise CommandError("Too many arguments") + + if not args: + raise CommandError("Please provide a pattern") + + files = self._find_wheels(options, args[0]) + + no_matching_msg = "No matching packages" + if args[0] == "*": + # Only fetch http files if no specific pattern given + files += self._find_http_files(options) + else: + # Add the pattern to the log message + no_matching_msg += f' for pattern "{args[0]}"' + + if not files: + logger.warning(no_matching_msg) + + bytes_removed = 0 + for filename in files: + bytes_removed += os.stat(filename).st_size + os.unlink(filename) + logger.verbose("Removed %s", filename) + logger.info("Files removed: %s (%s)", len(files), format_size(bytes_removed)) + + def purge_cache(self, options: Values, args: list[str]) -> None: + if args: + raise CommandError("Too many arguments") + + return self.remove_cache_items(options, ["*"]) + + def _cache_dir(self, options: Values, subdir: str) -> str: + return os.path.join(options.cache_dir, subdir) + + def _find_http_files(self, options: Values) -> list[str]: + old_http_dir = self._cache_dir(options, "http") + new_http_dir = self._cache_dir(options, "http-v2") + return filesystem.find_files(old_http_dir, "*") + filesystem.find_files( + new_http_dir, "*" + ) + + def _find_wheels(self, options: Values, pattern: str) -> list[str]: + wheel_dir = self._cache_dir(options, "wheels") + + # The wheel filename format, as specified in PEP 427, is: + # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl + # + # Additionally, non-alphanumeric values in the distribution are + # normalized to underscores (_), meaning hyphens can never occur + # before `-{version}`. + # + # Given that information: + # - If the pattern we're given contains a hyphen (-), the user is + # providing at least the version. Thus, we can just append `*.whl` + # to match the rest of it. + # - If the pattern we're given doesn't contain a hyphen (-), the + # user is only providing the name. Thus, we append `-*.whl` to + # match the hyphen before the version, followed by anything else. + # + # PEP 427: https://www.python.org/dev/peps/pep-0427/ + pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") + + return filesystem.find_files(wheel_dir, pattern) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/check.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/check.py new file mode 100644 index 0000000..516757e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/check.py @@ -0,0 +1,66 @@ +import logging +from optparse import Values + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.metadata import get_default_environment +from pip._internal.operations.check import ( + check_package_set, + check_unsupported, + create_package_set_from_installed, +) +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class CheckCommand(Command): + """Verify installed packages have compatible dependencies.""" + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def run(self, options: Values, args: list[str]) -> int: + package_set, parsing_probs = create_package_set_from_installed() + missing, conflicting = check_package_set(package_set) + unsupported = list( + check_unsupported( + get_default_environment().iter_installed_distributions(), + get_supported(), + ) + ) + + for project_name in missing: + version = package_set[project_name].version + for dependency in missing[project_name]: + write_output( + "%s %s requires %s, which is not installed.", + project_name, + version, + dependency[0], + ) + + for project_name in conflicting: + version = package_set[project_name].version + for dep_name, dep_version, req in conflicting[project_name]: + write_output( + "%s %s has requirement %s, but you have %s %s.", + project_name, + version, + req, + dep_name, + dep_version, + ) + for package in unsupported: + write_output( + "%s %s is not supported on this platform", + package.raw_name, + package.version, + ) + if missing or conflicting or parsing_probs or unsupported: + return ERROR + else: + write_output("No broken requirements found.") + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/completion.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/completion.py new file mode 100644 index 0000000..6d9597b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/completion.py @@ -0,0 +1,135 @@ +import sys +import textwrap +from optparse import Values + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.utils.misc import get_prog + +BASE_COMPLETION = """ +# pip {shell} completion start{script}# pip {shell} completion end +""" + +COMPLETION_SCRIPTS = { + "bash": """ + _pip_completion() + {{ + COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) + }} + complete -o default -F _pip_completion {prog} + """, + "zsh": """ + #compdef -P pip[0-9.]# + __pip() {{ + compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) + }} + if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + __pip "$@" + else + # eval/source/. command, register function for later + compdef __pip -P 'pip[0-9.]#' + fi + """, + "fish": """ + function __fish_complete_pip + set -lx COMP_WORDS \\ + (commandline --current-process --tokenize --cut-at-cursor) \\ + (commandline --current-token --cut-at-cursor) + set -lx COMP_CWORD (math (count $COMP_WORDS) - 1) + set -lx PIP_AUTO_COMPLETE 1 + set -l completions + if string match -q '2.*' $version + set completions (eval $COMP_WORDS[1]) + else + set completions ($COMP_WORDS[1]) + end + string split \\ -- $completions + end + complete -fa "(__fish_complete_pip)" -c {prog} + """, + "powershell": """ + if ((Test-Path Function:\\TabExpansion) -and -not ` + (Test-Path Function:\\_pip_completeBackup)) {{ + Rename-Item Function:\\TabExpansion _pip_completeBackup + }} + function TabExpansion($line, $lastWord) {{ + $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart() + if ($lastBlock.StartsWith("{prog} ")) {{ + $Env:COMP_WORDS=$lastBlock + $Env:COMP_CWORD=$lastBlock.Split().Length - 1 + $Env:PIP_AUTO_COMPLETE=1 + (& {prog}).Split() + Remove-Item Env:COMP_WORDS + Remove-Item Env:COMP_CWORD + Remove-Item Env:PIP_AUTO_COMPLETE + }} + elseif (Test-Path Function:\\_pip_completeBackup) {{ + # Fall back on existing tab expansion + _pip_completeBackup $line $lastWord + }} + }} + """, +} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--bash", + "-b", + action="store_const", + const="bash", + dest="shell", + help="Emit completion code for bash", + ) + self.cmd_opts.add_option( + "--zsh", + "-z", + action="store_const", + const="zsh", + dest="shell", + help="Emit completion code for zsh", + ) + self.cmd_opts.add_option( + "--fish", + "-f", + action="store_const", + const="fish", + dest="shell", + help="Emit completion code for fish", + ) + self.cmd_opts.add_option( + "--powershell", + "-p", + action="store_const", + const="powershell", + dest="shell", + help="Emit completion code for powershell", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: list[str]) -> int: + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ["--" + shell for shell in sorted(shells)] + if options.shell in shells: + script = textwrap.dedent( + COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog()) + ) + print(BASE_COMPLETION.format(script=script, shell=options.shell)) + return SUCCESS + else: + sys.stderr.write( + "ERROR: You must pass {}\n".format(" or ".join(shell_options)) + ) + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py new file mode 100644 index 0000000..7bcea04 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/configuration.py @@ -0,0 +1,288 @@ +from __future__ import annotations + +import logging +import os +import subprocess +from optparse import Values +from typing import Any, Callable + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.configuration import ( + Configuration, + Kind, + get_configuration_files, + kinds, +) +from pip._internal.exceptions import PipError +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_prog, write_output + +logger = logging.getLogger(__name__) + + +class ConfigurationCommand(Command): + """ + Manage local and global configuration. + + Subcommands: + + - list: List the active configuration (or from the file specified) + - edit: Edit the configuration file in an editor + - get: Get the value associated with command.option + - set: Set the command.option=value + - unset: Unset the value associated with command.option + - debug: List the configuration files and values defined under them + + Configuration keys should be dot separated command and option name, + with the special prefix "global" affecting any command. For example, + "pip config set global.index-url https://example.org/" would configure + the index url for all commands, but "pip config set download.timeout 10" + would configure a 10 second timeout only for "pip download" commands. + + If none of --user, --global and --site are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen to the user file by + default. + """ + + ignore_require_venv = True + usage = """ + %prog [] list + %prog [] [--editor ] edit + + %prog [] get command.option + %prog [] set command.option value + %prog [] unset command.option + %prog [] debug + """ + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--editor", + dest="editor", + action="store", + default=None, + help=( + "Editor to use to edit the file. Uses VISUAL or EDITOR " + "environment variables if not provided." + ), + ) + + self.cmd_opts.add_option( + "--global", + dest="global_file", + action="store_true", + default=False, + help="Use the system-wide configuration file only", + ) + + self.cmd_opts.add_option( + "--user", + dest="user_file", + action="store_true", + default=False, + help="Use the user configuration file only", + ) + + self.cmd_opts.add_option( + "--site", + dest="site_file", + action="store_true", + default=False, + help="Use the current environment configuration file only", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def handler_map(self) -> dict[str, Callable[[Values, list[str]], None]]: + return { + "list": self.list_values, + "edit": self.open_in_editor, + "get": self.get_name, + "set": self.set_name_value, + "unset": self.unset_name, + "debug": self.list_config_values, + } + + def run(self, options: Values, args: list[str]) -> int: + handler_map = self.handler_map() + + # Determine action + if not args or args[0] not in handler_map: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handler_map)), + ) + return ERROR + + action = args[0] + + # Determine which configuration files are to be loaded + # Depends on whether the command is modifying. + try: + load_only = self._determine_file( + options, need_value=(action in ["get", "set", "unset", "edit"]) + ) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + # Load a new configuration + self.configuration = Configuration( + isolated=options.isolated_mode, load_only=load_only + ) + self.configuration.load() + + # Error handling happens here, not in the action-handlers. + try: + handler_map[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _determine_file(self, options: Values, need_value: bool) -> Kind | None: + file_options = [ + key + for key, value in ( + (kinds.USER, options.user_file), + (kinds.GLOBAL, options.global_file), + (kinds.SITE, options.site_file), + ) + if value + ] + + if not file_options: + if not need_value: + return None + # Default to user, unless there's a site file. + elif any( + os.path.exists(site_config_file) + for site_config_file in get_configuration_files()[kinds.SITE] + ): + return kinds.SITE + else: + return kinds.USER + elif len(file_options) == 1: + return file_options[0] + + raise PipError( + "Need exactly one file to operate upon " + "(--user, --site, --global) to perform." + ) + + def list_values(self, options: Values, args: list[str]) -> None: + self._get_n_args(args, "list", n=0) + + for key, value in sorted(self.configuration.items()): + for key, value in sorted(value.items()): + write_output("%s=%r", key, value) + + def get_name(self, options: Values, args: list[str]) -> None: + key = self._get_n_args(args, "get [name]", n=1) + value = self.configuration.get_value(key) + + write_output("%s", value) + + def set_name_value(self, options: Values, args: list[str]) -> None: + key, value = self._get_n_args(args, "set [name] [value]", n=2) + self.configuration.set_value(key, value) + + self._save_configuration() + + def unset_name(self, options: Values, args: list[str]) -> None: + key = self._get_n_args(args, "unset [name]", n=1) + self.configuration.unset_value(key) + + self._save_configuration() + + def list_config_values(self, options: Values, args: list[str]) -> None: + """List config key-value pairs across different config files""" + self._get_n_args(args, "debug", n=0) + + self.print_env_var_values() + # Iterate over config files and print if they exist, and the + # key-value pairs present in them if they do + for variant, files in sorted(self.configuration.iter_config_files()): + write_output("%s:", variant) + for fname in files: + with indent_log(): + file_exists = os.path.exists(fname) + write_output("%s, exists: %r", fname, file_exists) + if file_exists: + self.print_config_file_values(variant, fname) + + def print_config_file_values(self, variant: Kind, fname: str) -> None: + """Get key-value pairs from the file of a variant""" + for name, value in self.configuration.get_values_in_config(variant).items(): + with indent_log(): + if name == fname: + for confname, confvalue in value.items(): + write_output("%s: %s", confname, confvalue) + + def print_env_var_values(self) -> None: + """Get key-values pairs present as environment variables""" + write_output("%s:", "env_var") + with indent_log(): + for key, value in sorted(self.configuration.get_environ_vars()): + env_var = f"PIP_{key.upper()}" + write_output("%s=%r", env_var, value) + + def open_in_editor(self, options: Values, args: list[str]) -> None: + editor = self._determine_editor(options) + + fname = self.configuration.get_file_to_edit() + if fname is None: + raise PipError("Could not determine appropriate file.") + elif '"' in fname: + # This shouldn't happen, unless we see a username like that. + # If that happens, we'd appreciate a pull request fixing this. + raise PipError( + f'Can not open an editor for a file name containing "\n{fname}' + ) + + try: + subprocess.check_call(f'{editor} "{fname}"', shell=True) + except FileNotFoundError as e: + if not e.filename: + e.filename = editor + raise + except subprocess.CalledProcessError as e: + raise PipError(f"Editor Subprocess exited with exit code {e.returncode}") + + def _get_n_args(self, args: list[str], example: str, n: int) -> Any: + """Helper to make sure the command got the right number of arguments""" + if len(args) != n: + msg = ( + f"Got unexpected number of arguments, expected {n}. " + f'(example: "{get_prog()} config {example}")' + ) + raise PipError(msg) + + if n == 1: + return args[0] + else: + return args + + def _save_configuration(self) -> None: + # We successfully ran a modifying command. Need to save the + # configuration. + try: + self.configuration.save() + except Exception: + logger.exception( + "Unable to save configuration. Please report this as a bug." + ) + raise PipError("Internal Error.") + + def _determine_editor(self, options: Values) -> str: + if options.editor is not None: + return options.editor + elif "VISUAL" in os.environ: + return os.environ["VISUAL"] + elif "EDITOR" in os.environ: + return os.environ["EDITOR"] + else: + raise PipError("Could not determine editor to use.") diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/debug.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/debug.py new file mode 100644 index 0000000..0e187e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/debug.py @@ -0,0 +1,203 @@ +from __future__ import annotations + +import locale +import logging +import os +import sys +from optparse import Values +from types import ModuleType +from typing import Any + +import pip._vendor +from pip._vendor.certifi import where +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.configuration import Configuration +from pip._internal.metadata import get_environment +from pip._internal.utils.compat import open_text_resource +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_pip_version + +logger = logging.getLogger(__name__) + + +def show_value(name: str, value: Any) -> None: + logger.info("%s: %s", name, value) + + +def show_sys_implementation() -> None: + logger.info("sys.implementation:") + implementation_name = sys.implementation.name + with indent_log(): + show_value("name", implementation_name) + + +def create_vendor_txt_map() -> dict[str, str]: + with open_text_resource("pip._vendor", "vendor.txt") as f: + # Purge non version specifying lines. + # Also, remove any space prefix or suffixes (including comments). + lines = [ + line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line + ] + + # Transform into "module" -> version dict. + return dict(line.split("==", 1) for line in lines) + + +def get_module_from_module_name(module_name: str) -> ModuleType | None: + # Module name can be uppercase in vendor.txt for some reason... + module_name = module_name.lower().replace("-", "_") + # PATCH: setuptools is actually only pkg_resources. + if module_name == "setuptools": + module_name = "pkg_resources" + + try: + __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) + return getattr(pip._vendor, module_name) + except ImportError: + # We allow 'truststore' to fail to import due + # to being unavailable on Python 3.9 and earlier. + if module_name == "truststore" and sys.version_info < (3, 10): + return None + raise + + +def get_vendor_version_from_module(module_name: str) -> str | None: + module = get_module_from_module_name(module_name) + version = getattr(module, "__version__", None) + + if module and not version: + # Try to find version in debundled module info. + assert module.__file__ is not None + env = get_environment([os.path.dirname(module.__file__)]) + dist = env.get_distribution(module_name) + if dist: + version = str(dist.version) + + return version + + +def show_actual_vendor_versions(vendor_txt_versions: dict[str, str]) -> None: + """Log the actual version and print extra info if there is + a conflict or if the actual version could not be imported. + """ + for module_name, expected_version in vendor_txt_versions.items(): + extra_message = "" + actual_version = get_vendor_version_from_module(module_name) + if not actual_version: + extra_message = ( + " (Unable to locate actual module version, using" + " vendor.txt specified version)" + ) + actual_version = expected_version + elif parse_version(actual_version) != parse_version(expected_version): + extra_message = ( + " (CONFLICT: vendor.txt suggests version should" + f" be {expected_version})" + ) + logger.info("%s==%s%s", module_name, actual_version, extra_message) + + +def show_vendor_versions() -> None: + logger.info("vendored library versions:") + + vendor_txt_versions = create_vendor_txt_map() + with indent_log(): + show_actual_vendor_versions(vendor_txt_versions) + + +def show_tags(options: Values) -> None: + tag_limit = 10 + + target_python = make_target_python(options) + tags = target_python.get_sorted_tags() + + # Display the target options that were explicitly provided. + formatted_target = target_python.format_given() + suffix = "" + if formatted_target: + suffix = f" (target: {formatted_target})" + + msg = f"Compatible tags: {len(tags)}{suffix}" + logger.info(msg) + + if options.verbose < 1 and len(tags) > tag_limit: + tags_limited = True + tags = tags[:tag_limit] + else: + tags_limited = False + + with indent_log(): + for tag in tags: + logger.info(str(tag)) + + if tags_limited: + msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" + logger.info(msg) + + +def ca_bundle_info(config: Configuration) -> str: + levels = {key.split(".", 1)[0] for key, _ in config.items()} + if not levels: + return "Not specified" + + levels_that_override_global = ["install", "wheel", "download"] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return "global" + + if "global" in levels: + levels.remove("global") + return ", ".join(levels) + + +class DebugCommand(Command): + """ + Display debug information. + """ + + usage = """ + %prog """ + ignore_require_venv = True + + def add_options(self) -> None: + cmdoptions.add_target_python_options(self.cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + self.parser.config.load() + + def run(self, options: Values, args: list[str]) -> int: + logger.warning( + "This command is only meant for debugging. " + "Do not use this with automation for parsing and getting these " + "details, since the output and options of this command may " + "change without notice." + ) + show_value("pip version", get_pip_version()) + show_value("sys.version", sys.version) + show_value("sys.executable", sys.executable) + show_value("sys.getdefaultencoding", sys.getdefaultencoding()) + show_value("sys.getfilesystemencoding", sys.getfilesystemencoding()) + show_value( + "locale.getpreferredencoding", + locale.getpreferredencoding(), + ) + show_value("sys.platform", sys.platform) + show_sys_implementation() + + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE")) + show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE")) + show_value("pip._vendor.certifi.where()", where()) + show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) + + show_vendor_versions() + + show_tags(options) + + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/download.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/download.py new file mode 100644 index 0000000..903917b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/download.py @@ -0,0 +1,142 @@ +import logging +import os +from optparse import Values + +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.operations.build.build_tracker import get_build_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path, write_output +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] ... + %prog [options] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.build_constraints()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + + self.cmd_opts.add_option( + "-d", + "--dest", + "--destination-dir", + "--destination-directory", + dest="download_dir", + metavar="dir", + default=os.curdir, + help="Download packages into .", + ) + + cmdoptions.add_target_python_options(self.cmd_opts) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: list[str]) -> int: + options.ignore_installed = True + # editable doesn't really make sense for `pip download`, but the bowels + # of the RequirementSet code require that property. + options.editables = [] + + cmdoptions.check_dist_restriction(options) + cmdoptions.check_build_constraints(options) + + options.download_dir = normalize_path(options.download_dir) + ensure_dir(options.download_dir) + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + build_tracker = self.enter_context(get_build_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="download", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + build_tracker=build_tracker, + session=session, + finder=finder, + download_dir=options.download_dir, + use_user_site=False, + verbosity=self.verbosity, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + ignore_requires_python=options.ignore_requires_python, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) + + downloaded: list[str] = [] + for req in requirement_set.requirements.values(): + if req.satisfied_by is None: + assert req.name is not None + preparer.save_linked_requirement(req) + downloaded.append(req.name) + + if downloaded: + write_output("Successfully downloaded %s", " ".join(downloaded)) + + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py new file mode 100644 index 0000000..7794857 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/freeze.py @@ -0,0 +1,107 @@ +import sys +from optparse import Values + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.operations.freeze import freeze +from pip._internal.utils.compat import stdlib_pkgs + + +def _should_suppress_build_backends() -> bool: + return sys.version_info < (3, 12) + + +def _dev_pkgs() -> set[str]: + pkgs = {"pip"} + + if _should_suppress_build_backends(): + pkgs |= {"setuptools", "distribute", "wheel"} + + return pkgs + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help=( + "Use the order in the given requirements file and its " + "comments when generating output. This option can be " + "used multiple times." + ), + ) + self.cmd_opts.add_option( + "-l", + "--local", + dest="local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not output " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + "--all", + dest="freeze_all", + action="store_true", + help=( + "Do not skip these packages in the output:" + " {}".format(", ".join(_dev_pkgs())) + ), + ) + self.cmd_opts.add_option( + "--exclude-editable", + dest="exclude_editable", + action="store_true", + help="Exclude editable package from output.", + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: list[str]) -> int: + skip = set(stdlib_pkgs) + if not options.freeze_all: + skip.update(_dev_pkgs()) + + if options.excludes: + skip.update(options.excludes) + + cmdoptions.check_list_path_option(options) + + for line in freeze( + requirement=options.requirements, + local_only=options.local, + user_only=options.user, + paths=options.path, + isolated=options.isolated_mode, + skip=skip, + exclude_editable=options.exclude_editable, + ): + sys.stdout.write(line + "\n") + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/hash.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/hash.py new file mode 100644 index 0000000..271a4c9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/hash.py @@ -0,0 +1,58 @@ +import hashlib +import logging +import sys +from optparse import Values + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES +from pip._internal.utils.misc import read_chunks, write_output + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + """ + + usage = "%prog [options] ..." + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-a", + "--algorithm", + dest="algorithm", + choices=STRONG_HASHES, + action="store", + default=FAVORITE_HASH, + help="The hash algorithm to use: one of {}".format( + ", ".join(STRONG_HASHES) + ), + ) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: list[str]) -> int: + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + write_output( + "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm) + ) + return SUCCESS + + +def _hash_of_file(path: str, algorithm: str) -> str: + """Return the hash digest of a file.""" + with open(path, "rb") as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/help.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/help.py new file mode 100644 index 0000000..2ae658f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/help.py @@ -0,0 +1,40 @@ +from optparse import Values + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + + usage = """ + %prog """ + ignore_require_venv = True + + def run(self, options: Values, args: list[str]) -> int: + from pip._internal.commands import ( + commands_dict, + create_command, + get_similar_commands, + ) + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + command = create_command(cmd_name) + command.parser.print_help() + + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/index.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/index.py new file mode 100644 index 0000000..ecac998 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/index.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import json +import logging +from collections.abc import Iterable +from optparse import Values +from typing import Any, Callable + +from pip._vendor.packaging.version import Version + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.commands.search import ( + get_installed_distribution, + print_dist_installation_info, +) +from pip._internal.exceptions import CommandError, DistributionNotFound, PipError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class IndexCommand(IndexGroupCommand): + """ + Inspect information available from package indexes. + """ + + ignore_require_venv = True + usage = """ + %prog versions + """ + + def add_options(self) -> None: + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.json()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def handler_map(self) -> dict[str, Callable[[Values, list[str]], None]]: + return { + "versions": self.get_available_package_versions, + } + + def run(self, options: Values, args: list[str]) -> int: + handler_map = self.handler_map() + + # Determine action + if not args or args[0] not in handler_map: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handler_map)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handler_map[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _build_package_finder( + self, + options: Values, + session: PipSession, + target_python: TargetPython | None = None, + ignore_requires_python: bool | None = None, + ) -> PackageFinder: + """ + Create a package finder appropriate to the index command. + """ + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) + + def get_available_package_versions(self, options: Values, args: list[Any]) -> None: + if len(args) != 1: + raise CommandError("You need to specify exactly one argument") + + target_python = cmdoptions.make_target_python(options) + query = args[0] + + with self._build_session(options) as session: + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + versions: Iterable[Version] = ( + candidate.version for candidate in finder.find_all_candidates(query) + ) + + if not options.pre: + # Remove prereleases + versions = ( + version for version in versions if not version.is_prerelease + ) + versions = set(versions) + + if not versions: + raise DistributionNotFound( + f"No matching distribution found for {query}" + ) + + formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)] + latest = formatted_versions[0] + + dist = get_installed_distribution(query) + + if options.json: + structured_output = { + "name": query, + "versions": formatted_versions, + "latest": latest, + } + + if dist is not None: + structured_output["installed_version"] = str(dist.version) + + write_output(json.dumps(structured_output)) + + else: + write_output(f"{query} ({latest})") + write_output("Available versions: {}".format(", ".join(formatted_versions))) + print_dist_installation_info(latest, dist) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py new file mode 100644 index 0000000..e262012 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/inspect.py @@ -0,0 +1,92 @@ +import logging +from optparse import Values +from typing import Any + +from pip._vendor.packaging.markers import default_environment +from pip._vendor.rich import print_json + +from pip import __version__ +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.urls import path_to_url + +logger = logging.getLogger(__name__) + + +class InspectCommand(Command): + """ + Inspect the content of a Python environment and produce a report in JSON format. + """ + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not list " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: list[str]) -> int: + cmdoptions.check_list_path_option(options) + dists = get_environment(options.path).iter_installed_distributions( + local_only=options.local, + user_only=options.user, + skip=set(stdlib_pkgs), + ) + output = { + "version": "1", + "pip_version": __version__, + "installed": [self._dist_to_dict(dist) for dist in dists], + "environment": default_environment(), + # TODO tags? scheme? + } + print_json(data=output) + return SUCCESS + + def _dist_to_dict(self, dist: BaseDistribution) -> dict[str, Any]: + res: dict[str, Any] = { + "metadata": dist.metadata_dict, + "metadata_location": dist.info_location, + } + # direct_url. Note that we don't have download_info (as in the installation + # report) since it is not recorded in installed metadata. + direct_url = dist.direct_url + if direct_url is not None: + res["direct_url"] = direct_url.to_dict() + else: + # Emulate direct_url for legacy editable installs. + editable_project_location = dist.editable_project_location + if editable_project_location is not None: + res["direct_url"] = { + "url": path_to_url(editable_project_location), + "dir_info": { + "editable": True, + }, + } + # installer + installer = dist.installer + if dist.installer: + res["installer"] = installer + # requested + if dist.installed_with_dist_info: + res["requested"] = dist.requested + return res diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/install.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/install.py new file mode 100644 index 0000000..b16b8e3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/install.py @@ -0,0 +1,803 @@ +from __future__ import annotations + +import errno +import json +import operator +import os +import shutil +import site +from optparse import SUPPRESS_HELP, Values +from pathlib import Path + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.requests.exceptions import InvalidProxyURL +from pip._vendor.rich import print_json + +# Eagerly import self_outdated_check to avoid crashes. Otherwise, +# this module would be imported *after* pip was replaced, resulting +# in crashes if the new self_outdated_check module was incompatible +# with the rest of pip that's already imported, or allowing a +# wheel to execute arbitrary code on install by replacing +# self_outdated_check. +import pip._internal.self_outdated_check # noqa: F401 +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import ( + RequirementCommand, + with_cleanup, +) +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import ( + CommandError, + InstallationError, + InstallWheelBuildError, +) +from pip._internal.locations import get_scheme +from pip._internal.metadata import get_environment +from pip._internal.models.installation_report import InstallationReport +from pip._internal.operations.build.build_tracker import get_build_tracker +from pip._internal.operations.check import ConflictDetails, check_install_conflicts +from pip._internal.req import install_given_reqs +from pip._internal.req.req_install import ( + InstallRequirement, +) +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.filesystem import test_writable_dir +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + check_externally_managed, + ensure_dir, + get_pip_version, + protect_pip_from_modification_on_windows, + warn_if_run_as_root, + write_output, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) +from pip._internal.wheel_builder import build + +logger = getLogger(__name__) + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.build_constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) + + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option( + "--dry-run", + action="store_true", + dest="dry_run", + default=False, + help=( + "Don't actually install anything, just print what would be. " + "Can be used in combination with --ignore-installed " + "to 'resolve' the requirements." + ), + ) + self.cmd_opts.add_option( + "-t", + "--target", + dest="target_dir", + metavar="dir", + default=None, + help=( + "Install packages into . " + "By default this will not replace existing files/folders in " + ". Use --upgrade to replace existing packages in " + "with new versions." + ), + ) + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option( + "--user", + dest="use_user_site", + action="store_true", + help=( + "Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.)" + ), + ) + self.cmd_opts.add_option( + "--no-user", + dest="use_user_site", + action="store_false", + help=SUPPRESS_HELP, + ) + self.cmd_opts.add_option( + "--root", + dest="root_path", + metavar="dir", + default=None, + help="Install everything relative to this alternate root directory.", + ) + self.cmd_opts.add_option( + "--prefix", + dest="prefix_path", + metavar="dir", + default=None, + help=( + "Installation prefix where lib, bin and other top-level " + "folders are placed. Note that the resulting installation may " + "contain scripts and other resources which reference the " + "Python interpreter of pip, and not that of ``--prefix``. " + "See also the ``--python`` option if the intention is to " + "install packages into another (possibly pip-free) " + "environment." + ), + ) + + self.cmd_opts.add_option(cmdoptions.src()) + + self.cmd_opts.add_option( + "-U", + "--upgrade", + dest="upgrade", + action="store_true", + help=( + "Upgrade all specified packages to the newest available " + "version. The handling of dependencies depends on the " + "upgrade-strategy used." + ), + ) + + self.cmd_opts.add_option( + "--upgrade-strategy", + dest="upgrade_strategy", + default="only-if-needed", + choices=["only-if-needed", "eager"], + help=( + "Determines how dependency upgrading should be handled " + "[default: %default]. " + '"eager" - dependencies are upgraded regardless of ' + "whether the currently installed version satisfies the " + "requirements of the upgraded package(s). " + '"only-if-needed" - are upgraded only when they do not ' + "satisfy the requirements of the upgraded package(s)." + ), + ) + + self.cmd_opts.add_option( + "--force-reinstall", + dest="force_reinstall", + action="store_true", + help="Reinstall all packages even if they are already up-to-date.", + ) + + self.cmd_opts.add_option( + "-I", + "--ignore-installed", + dest="ignore_installed", + action="store_true", + help=( + "Ignore the installed packages, overwriting them. " + "This can break your system if the existing package " + "is of a different version or was installed " + "with a different package manager!" + ), + ) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.override_externally_managed()) + + self.cmd_opts.add_option(cmdoptions.config_settings()) + + self.cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-warn-script-location", + action="store_false", + dest="warn_script_location", + default=True, + help="Do not warn when installing scripts outside PATH", + ) + self.cmd_opts.add_option( + "--no-warn-conflicts", + action="store_false", + dest="warn_about_conflicts", + default=True, + help="Do not warn about broken dependencies", + ) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.root_user_action()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + self.cmd_opts.add_option( + "--report", + dest="json_report_file", + metavar="file", + default=None, + help=( + "Generate a JSON file describing what pip did to install " + "the provided requirements. " + "Can be used in combination with --dry-run and --ignore-installed " + "to 'resolve' the requirements. " + "When - is used as file name it writes to stdout. " + "When writing to stdout, please combine with the --quiet option " + "to avoid mixing pip logging output with JSON output." + ), + ) + + @with_cleanup + def run(self, options: Values, args: list[str]) -> int: + if options.use_user_site and options.target_dir is not None: + raise CommandError("Can not combine '--user' and '--target'") + + # Check whether the environment we're installing into is externally + # managed, as specified in PEP 668. Specifying --root, --target, or + # --prefix disables the check, since there's no reliable way to locate + # the EXTERNALLY-MANAGED file for those cases. An exception is also + # made specifically for "--dry-run --report" for convenience. + installing_into_current_environment = ( + not (options.dry_run and options.json_report_file) + and options.root_path is None + and options.target_dir is None + and options.prefix_path is None + ) + if ( + installing_into_current_environment + and not options.override_externally_managed + ): + check_externally_managed() + + upgrade_strategy = "to-satisfy-only" + if options.upgrade: + upgrade_strategy = options.upgrade_strategy + + cmdoptions.check_build_constraints(options) + cmdoptions.check_dist_restriction(options, check_target=True) + + logger.verbose("Using %s", get_pip_version()) + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) + + target_temp_dir: TempDirectory | None = None + target_temp_dir_path: str | None = None + if options.target_dir: + options.ignore_installed = True + options.target_dir = os.path.abspath(options.target_dir) + if ( + # fmt: off + os.path.exists(options.target_dir) and + not os.path.isdir(options.target_dir) + # fmt: on + ): + raise CommandError( + "Target path exists but is not a directory, will not continue." + ) + + # Create a target directory for using with the target option + target_temp_dir = TempDirectory(kind="target") + target_temp_dir_path = target_temp_dir.path + self.enter_context(target_temp_dir) + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + build_tracker = self.enter_context(get_build_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="install", + globally_managed=True, + ) + + try: + reqs = self.get_requirements(args, options, finder, session) + + wheel_cache = WheelCache(options.cache_dir) + + # Only when installing is it permitted to use PEP 660. + # In other circumstances (pip wheel, pip download) we generate + # regular (i.e. non editable) metadata and wheels. + for req in reqs: + req.permit_editable_wheels = True + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + build_tracker=build_tracker, + session=session, + finder=finder, + use_user_site=options.use_user_site, + verbosity=self.verbosity, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=not options.target_dir + ) + + if options.json_report_file: + report = InstallationReport(requirement_set.requirements_to_install) + if options.json_report_file == "-": + print_json(data=report.to_dict()) + else: + with open(options.json_report_file, "w", encoding="utf-8") as f: + json.dump(report.to_dict(), f, indent=2, ensure_ascii=False) + + if options.dry_run: + would_install_items = sorted( + (r.metadata["name"], r.metadata["version"]) + for r in requirement_set.requirements_to_install + ) + if would_install_items: + write_output( + "Would install %s", + " ".join("-".join(item) for item in would_install_items), + ) + return SUCCESS + + # If there is any more preparation to do for the actual installation, do + # so now. This includes actually downloading the files in the case that + # we have been using PEP-658 metadata so far. + preparer.prepare_linked_requirements_more( + requirement_set.requirements.values() + ) + + try: + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = False + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) + + reqs_to_build = [ + r for r in requirement_set.requirements_to_install if not r.is_wheel + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=True, + ) + + if build_failures: + raise InstallWheelBuildError(build_failures) + + to_install = resolver.get_installation_order(requirement_set) + + # Check for conflicts in the package set we're installing. + conflicts: ConflictDetails | None = None + should_warn_about_conflicts = ( + not options.ignore_dependencies and options.warn_about_conflicts + ) + if should_warn_about_conflicts: + conflicts = self._determine_conflicts(to_install) + + # Don't warn about script install locations if + # --target or --prefix has been specified + warn_script_location = options.warn_script_location + if options.target_dir or options.prefix_path: + warn_script_location = False + + installed = install_given_reqs( + to_install, + root=options.root_path, + home=target_temp_dir_path, + prefix=options.prefix_path, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + pycompile=options.compile, + progress_bar=options.progress_bar, + ) + + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir_path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + env = get_environment(lib_locations) + + # Display a summary of installed packages, with extra care to + # display a package name as it was requested by the user. + installed.sort(key=operator.attrgetter("name")) + summary = [] + installed_versions = {} + for distribution in env.iter_all_distributions(): + installed_versions[distribution.canonical_name] = distribution.version + for package in installed: + display_name = package.name + version = installed_versions.get(canonicalize_name(display_name), None) + if version: + text = f"{display_name}-{version}" + else: + text = display_name + summary.append(text) + + if conflicts is not None: + self._warn_about_conflicts( + conflicts, + resolver_variant=self.determine_resolver_variant(options), + ) + + installed_desc = " ".join(summary) + if installed_desc: + write_output( + "Successfully installed %s", + installed_desc, + ) + except OSError as error: + show_traceback = self.verbosity >= 1 + + message = create_os_error_message( + error, + show_traceback, + options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) + + return ERROR + + if options.target_dir: + assert target_temp_dir + self._handle_target_dir( + options.target_dir, target_temp_dir, options.upgrade + ) + if options.root_user_action == "warn": + warn_if_run_as_root() + return SUCCESS + + def _handle_target_dir( + self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool + ) -> None: + ensure_dir(target_dir) + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + lib_dir_list = [] + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = get_scheme("", home=target_temp_dir.path) + purelib_dir = scheme.purelib + platlib_dir = scheme.platlib + data_dir = scheme.data + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + "Target directory %s already exists. Specify " + "--upgrade to force replacement.", + target_item_dir, + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + "Target directory %s already exists and is " + "a link. pip will not automatically replace " + "links, please remove if replacement is " + "desired.", + target_item_dir, + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move(os.path.join(lib_dir, item), target_item_dir) + + def _determine_conflicts( + self, to_install: list[InstallRequirement] + ) -> ConflictDetails | None: + try: + return check_install_conflicts(to_install) + except Exception: + logger.exception( + "Error while checking for conflicts. Please file an issue on " + "pip's issue tracker: https://github.com/pypa/pip/issues/new" + ) + return None + + def _warn_about_conflicts( + self, conflict_details: ConflictDetails, resolver_variant: str + ) -> None: + package_set, (missing, conflicting) = conflict_details + if not missing and not conflicting: + return + + parts: list[str] = [] + if resolver_variant == "legacy": + parts.append( + "pip's legacy dependency resolver does not consider dependency " + "conflicts when selecting packages. This behaviour is the " + "source of the following dependency conflicts." + ) + else: + assert resolver_variant == "resolvelib" + parts.append( + "pip's dependency resolver does not currently take into account " + "all the packages that are installed. This behaviour is the " + "source of the following dependency conflicts." + ) + + # NOTE: There is some duplication here, with commands/check.py + for project_name in missing: + version = package_set[project_name][0] + for dependency in missing[project_name]: + message = ( + f"{project_name} {version} requires {dependency[1]}, " + "which is not installed." + ) + parts.append(message) + + for project_name in conflicting: + version = package_set[project_name][0] + for dep_name, dep_version, req in conflicting[project_name]: + message = ( + "{name} {version} requires {requirement}, but {you} have " + "{dep_name} {dep_version} which is incompatible." + ).format( + name=project_name, + version=version, + requirement=req, + dep_name=dep_name, + dep_version=dep_version, + you=("you" if resolver_variant == "resolvelib" else "you'll"), + ) + parts.append(message) + + logger.critical("\n".join(parts)) + + +def get_lib_location_guesses( + user: bool = False, + home: str | None = None, + root: str | None = None, + isolated: bool = False, + prefix: str | None = None, +) -> list[str]: + scheme = get_scheme( + "", + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + return [scheme.purelib, scheme.platlib] + + +def site_packages_writable(root: str | None, isolated: bool) -> bool: + return all( + test_writable_dir(d) + for d in set(get_lib_location_guesses(root=root, isolated=isolated)) + ) + + +def decide_user_install( + use_user_site: bool | None, + prefix_path: str | None = None, + target_dir: str | None = None, + root_path: str | None = None, + isolated_mode: bool = False, +) -> bool: + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + # If we have been asked for a user install explicitly, check compatibility. + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + # Catch all remaining cases which honour the site.ENABLE_USER_SITE + # value, such as a plain Python installation (e.g. no virtualenv). + if not site.ENABLE_USER_SITE: + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are disabled for this Python." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info( + "Defaulting to user installation because normal site-packages " + "is not writeable" + ) + return True + + +def create_os_error_message( + error: OSError, show_traceback: bool, using_user_site: bool +) -> str: + """Format an error message for an OSError + + It may occur anytime during the execution of the install command. + """ + parts = [] + + # Mention the error if we are not going to show a traceback + parts.append("Could not install packages due to an OSError") + if not show_traceback: + parts.append(": ") + parts.append(str(error)) + else: + parts.append(".") + + # Spilt the error indication from a helper message (if any) + parts[-1] += "\n" + + # Suggest useful actions to the user: + # (1) using user site-packages or (2) verifying the permissions + if error.errno == errno.EACCES: + user_option_part = "Consider using the `--user` option" + permissions_part = "Check the permissions" + + if not running_under_virtualenv() and not using_user_site: + parts.extend( + [ + user_option_part, + " or ", + permissions_part.lower(), + ] + ) + else: + parts.append(permissions_part) + parts.append(".\n") + + # Suggest to check "pip config debug" in case of invalid proxy + if type(error) is InvalidProxyURL: + parts.append( + 'Consider checking your local proxy configuration with "pip config debug"' + ) + parts.append(".\n") + + # On Windows, errors like EINVAL or ENOENT may occur + # if a file or folder name exceeds 255 characters, + # or if the full path exceeds 260 characters and long path support isn't enabled. + # This condition checks for such cases and adds a hint to the error output. + + if WINDOWS and error.errno in (errno.EINVAL, errno.ENOENT) and error.filename: + if any(len(part) > 255 for part in Path(error.filename).parts): + parts.append( + "HINT: This error might be caused by a file or folder name exceeding " + "255 characters, which is a Windows limitation even if long paths " + "are enabled.\n " + ) + if len(error.filename) > 260: + parts.append( + "HINT: This error might have occurred since " + "this system does not have Windows Long Path " + "support enabled. You can find information on " + "how to enable this at " + "https://pip.pypa.io/warnings/enable-long-paths\n" + ) + return "".join(parts).strip() + "\n" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/list.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/list.py new file mode 100644 index 0000000..ad27e45 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/list.py @@ -0,0 +1,400 @@ +from __future__ import annotations + +import json +import logging +from collections.abc import Generator, Sequence +from email.parser import Parser +from optparse import Values +from typing import TYPE_CHECKING, cast + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import InvalidVersion, Version + +from pip._internal.cli import cmdoptions +from pip._internal.cli.index_command import IndexGroupCommand +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.misc import tabulate, write_output + +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + from pip._internal.network.session import PipSession + + class _DistWithLatestInfo(BaseDistribution): + """Give the distribution object a couple of extra fields. + + These will be populated during ``get_outdated()``. This is dirty but + makes the rest of the code much cleaner. + """ + + latest_version: Version + latest_filetype: str + + _ProcessedDists = Sequence[_DistWithLatestInfo] + + +logger = logging.getLogger(__name__) + + +class ListCommand(IndexGroupCommand): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-o", + "--outdated", + action="store_true", + default=False, + help="List outdated packages", + ) + self.cmd_opts.add_option( + "-u", + "--uptodate", + action="store_true", + default=False, + help="List uptodate packages", + ) + self.cmd_opts.add_option( + "-e", + "--editable", + action="store_true", + default=False, + help="List editable projects.", + ) + self.cmd_opts.add_option( + "-l", + "--local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not list " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + "--pre", + action="store_true", + default=False, + help=( + "Include pre-release and development versions. By default, " + "pip only finds stable versions." + ), + ) + + self.cmd_opts.add_option( + "--format", + action="store", + dest="list_format", + default="columns", + choices=("columns", "freeze", "json"), + help=( + "Select the output format among: columns (default), freeze, or json. " + "The 'freeze' format cannot be used with the --outdated option." + ), + ) + + self.cmd_opts.add_option( + "--not-required", + action="store_true", + dest="not_required", + help="List packages that are not dependencies of installed packages.", + ) + + self.cmd_opts.add_option( + "--exclude-editable", + action="store_false", + dest="include_editable", + help="Exclude editable package from output.", + ) + self.cmd_opts.add_option( + "--include-editable", + action="store_true", + dest="include_editable", + help="Include editable package in output.", + default=True, + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def handle_pip_version_check(self, options: Values) -> None: + if options.outdated or options.uptodate: + super().handle_pip_version_check(options) + + def _build_package_finder( + self, options: Values, session: PipSession + ) -> PackageFinder: + """ + Create a package finder appropriate to this list command. + """ + # Lazy import the heavy index modules as most list invocations won't need 'em. + from pip._internal.index.collector import LinkCollector + from pip._internal.index.package_finder import PackageFinder + + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + + def run(self, options: Values, args: list[str]) -> int: + if options.outdated and options.uptodate: + raise CommandError("Options --outdated and --uptodate cannot be combined.") + + if options.outdated and options.list_format == "freeze": + raise CommandError( + "List format 'freeze' cannot be used with the --outdated option." + ) + + cmdoptions.check_list_path_option(options) + + skip = set(stdlib_pkgs) + if options.excludes: + skip.update(canonicalize_name(n) for n in options.excludes) + + packages: _ProcessedDists = [ + cast("_DistWithLatestInfo", d) + for d in get_environment(options.path).iter_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + include_editables=options.include_editable, + skip=skip, + ) + ] + + # get_not_required must be called firstly in order to find and + # filter out all dependencies correctly. Otherwise a package + # can't be identified as requirement because some parent packages + # could be filtered out before. + if options.not_required: + packages = self.get_not_required(packages, options) + + if options.outdated: + packages = self.get_outdated(packages, options) + elif options.uptodate: + packages = self.get_uptodate(packages, options) + + self.output_package_listing(packages, options) + return SUCCESS + + def get_outdated( + self, packages: _ProcessedDists, options: Values + ) -> _ProcessedDists: + return [ + dist + for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version > dist.version + ] + + def get_uptodate( + self, packages: _ProcessedDists, options: Values + ) -> _ProcessedDists: + return [ + dist + for dist in self.iter_packages_latest_infos(packages, options) + if dist.latest_version == dist.version + ] + + def get_not_required( + self, packages: _ProcessedDists, options: Values + ) -> _ProcessedDists: + dep_keys = { + canonicalize_name(dep.name) + for dist in packages + for dep in (dist.iter_dependencies() or ()) + } + + # Create a set to remove duplicate packages, and cast it to a list + # to keep the return type consistent with get_outdated and + # get_uptodate + return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys}) + + def iter_packages_latest_infos( + self, packages: _ProcessedDists, options: Values + ) -> Generator[_DistWithLatestInfo, None, None]: + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + + def latest_info( + dist: _DistWithLatestInfo, + ) -> _DistWithLatestInfo | None: + all_candidates = finder.find_all_candidates(dist.canonical_name) + if not options.pre: + # Remove prereleases + all_candidates = [ + candidate + for candidate in all_candidates + if not candidate.version.is_prerelease + ] + + evaluator = finder.make_candidate_evaluator( + project_name=dist.canonical_name, + ) + best_candidate = evaluator.sort_best_candidate(all_candidates) + if best_candidate is None: + return None + + remote_version = best_candidate.version + if best_candidate.link.is_wheel: + typ = "wheel" + else: + typ = "sdist" + dist.latest_version = remote_version + dist.latest_filetype = typ + return dist + + for dist in map(latest_info, packages): + if dist is not None: + yield dist + + def output_package_listing( + self, packages: _ProcessedDists, options: Values + ) -> None: + packages = sorted( + packages, + key=lambda dist: dist.canonical_name, + ) + if options.list_format == "columns" and packages: + data, header = format_for_columns(packages, options) + self.output_package_listing_columns(data, header) + elif options.list_format == "freeze": + for dist in packages: + try: + req_string = f"{dist.raw_name}=={dist.version}" + except InvalidVersion: + req_string = f"{dist.raw_name}==={dist.raw_version}" + if options.verbose >= 1: + write_output("%s (%s)", req_string, dist.location) + else: + write_output(req_string) + elif options.list_format == "json": + write_output(format_for_json(packages, options)) + + def output_package_listing_columns( + self, data: list[list[str]], header: list[str] + ) -> None: + # insert the header first: we need to know the size of column names + if len(data) > 0: + data.insert(0, header) + + pkg_strings, sizes = tabulate(data) + + # Create and add a separator. + if len(data) > 0: + pkg_strings.insert(1, " ".join("-" * x for x in sizes)) + + for val in pkg_strings: + write_output(val) + + +def format_for_columns( + pkgs: _ProcessedDists, options: Values +) -> tuple[list[list[str]], list[str]]: + """ + Convert the package data into something usable + by output_package_listing_columns. + """ + header = ["Package", "Version"] + + running_outdated = options.outdated + if running_outdated: + header.extend(["Latest", "Type"]) + + def wheel_build_tag(dist: BaseDistribution) -> str | None: + try: + wheel_file = dist.read_text("WHEEL") + except FileNotFoundError: + return None + return Parser().parsestr(wheel_file).get("Build") + + build_tags = [wheel_build_tag(p) for p in pkgs] + has_build_tags = any(build_tags) + if has_build_tags: + header.append("Build") + + if options.verbose >= 1: + header.append("Location") + if options.verbose >= 1: + header.append("Installer") + + has_editables = any(x.editable for x in pkgs) + if has_editables: + header.append("Editable project location") + + data = [] + for i, proj in enumerate(pkgs): + # if we're working on the 'outdated' list, separate out the + # latest_version and type + row = [proj.raw_name, proj.raw_version] + + if running_outdated: + row.append(str(proj.latest_version)) + row.append(proj.latest_filetype) + + if has_build_tags: + row.append(build_tags[i] or "") + + if has_editables: + row.append(proj.editable_project_location or "") + + if options.verbose >= 1: + row.append(proj.location or "") + if options.verbose >= 1: + row.append(proj.installer) + + data.append(row) + + return data, header + + +def format_for_json(packages: _ProcessedDists, options: Values) -> str: + data = [] + for dist in packages: + try: + version = str(dist.version) + except InvalidVersion: + version = dist.raw_version + info = { + "name": dist.raw_name, + "version": version, + } + if options.verbose >= 1: + info["location"] = dist.location or "" + info["installer"] = dist.installer + if options.outdated: + info["latest_version"] = str(dist.latest_version) + info["latest_filetype"] = dist.latest_filetype + editable_project_location = dist.editable_project_location + if editable_project_location: + info["editable_project_location"] = editable_project_location + data.append(info) + return json.dumps(data) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/lock.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/lock.py new file mode 100644 index 0000000..b02fb95 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/lock.py @@ -0,0 +1,167 @@ +import sys +from optparse import Values +from pathlib import Path + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import ( + RequirementCommand, + with_cleanup, +) +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.models.pylock import Pylock, is_valid_pylock_file_name +from pip._internal.operations.build.build_tracker import get_build_tracker +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + get_pip_version, +) +from pip._internal.utils.temp_dir import TempDirectory + +logger = getLogger(__name__) + + +class LockCommand(RequirementCommand): + """ + EXPERIMENTAL - Lock packages and their dependencies from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports locking from "requirements files", which provide an easy + way to specify a whole environment to be installed. + + The generated lock file is only guaranteed to be valid for the current + python version and platform. + """ + + usage = """ + %prog [options] [-e] ... + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + cmdoptions.PipOption( + "--output", + "-o", + dest="output_file", + metavar="path", + type="path", + default="pylock.toml", + help="Lock file name (default=pylock.toml). Use - for stdout.", + ) + ) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.build_constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) + + self.cmd_opts.add_option(cmdoptions.editable()) + + self.cmd_opts.add_option(cmdoptions.src()) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) + + self.cmd_opts.add_option(cmdoptions.config_settings()) + + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: list[str]) -> int: + logger.verbose("Using %s", get_pip_version()) + + logger.warning( + "pip lock is currently an experimental command. " + "It may be removed/changed in a future release " + "without prior warning." + ) + + cmdoptions.check_build_constraints(options) + + session = self.get_default_session(options) + + finder = self._build_package_finder( + options=options, + session=session, + ignore_requires_python=options.ignore_requires_python, + ) + build_tracker = self.enter_context(get_build_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="install", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + wheel_cache = WheelCache(options.cache_dir) + + # Only when installing is it permitted to use PEP 660. + # In other circumstances (pip wheel, pip download) we generate + # regular (i.e. non editable) metadata and wheels. + for req in reqs: + req.permit_editable_wheels = True + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + build_tracker=build_tracker, + session=session, + finder=finder, + use_user_site=False, + verbosity=self.verbosity, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=False, + ignore_installed=True, + ignore_requires_python=options.ignore_requires_python, + upgrade_strategy="to-satisfy-only", + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + if options.output_file == "-": + base_dir = Path.cwd() + else: + output_file_path = Path(options.output_file) + if not is_valid_pylock_file_name(output_file_path): + logger.warning( + "%s is not a valid lock file name.", + output_file_path, + ) + base_dir = output_file_path.parent + pylock_toml = Pylock.from_install_requirements( + requirement_set.requirements.values(), base_dir=base_dir + ).as_toml() + if options.output_file == "-": + sys.stdout.write(pylock_toml) + else: + output_file_path.write_text(pylock_toml, encoding="utf-8") + + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/search.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/search.py new file mode 100644 index 0000000..b8dbc27 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/search.py @@ -0,0 +1,178 @@ +from __future__ import annotations + +import logging +import shutil +import sys +import textwrap +import xmlrpc.client +from collections import OrderedDict +from optparse import Values +from typing import TypedDict + +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.metadata import get_default_environment +from pip._internal.metadata.base import BaseDistribution +from pip._internal.models.index import PyPI +from pip._internal.network.xmlrpc import PipXmlrpcTransport +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import write_output + + +class TransformedHit(TypedDict): + name: str + summary: str + versions: list[str] + + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command, SessionCommandMixin): + """Search for PyPI packages whose name or summary contains .""" + + usage = """ + %prog [options] """ + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-i", + "--index", + dest="index", + metavar="URL", + default=PyPI.pypi_url, + help="Base URL of Python Package Index (default %default)", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: list[str]) -> int: + if not args: + raise CommandError("Missing required argument (search query).") + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = shutil.get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query: list[str], options: Values) -> list[dict[str, str]]: + index_url = options.index + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc.client.ServerProxy(index_url, transport) + try: + hits = pypi.search({"name": query, "summary": query}, "or") + except xmlrpc.client.Fault as fault: + message = ( + f"XMLRPC request failed [code: {fault.faultCode}]\n{fault.faultString}" + ) + raise CommandError(message) + assert isinstance(hits, list) + return hits + + +def transform_hits(hits: list[dict[str, str]]) -> list[TransformedHit]: + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages: dict[str, TransformedHit] = OrderedDict() + for hit in hits: + name = hit["name"] + summary = hit["summary"] + version = hit["version"] + + if name not in packages.keys(): + packages[name] = { + "name": name, + "summary": summary, + "versions": [version], + } + else: + packages[name]["versions"].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]["versions"]): + packages[name]["summary"] = summary + + return list(packages.values()) + + +def print_dist_installation_info(latest: str, dist: BaseDistribution | None) -> None: + if dist is not None: + with indent_log(): + if dist.version == latest: + write_output("INSTALLED: %s (latest)", dist.version) + else: + write_output("INSTALLED: %s", dist.version) + if parse_version(latest).pre: + write_output( + "LATEST: %s (pre-release; install" + " with `pip install --pre`)", + latest, + ) + else: + write_output("LATEST: %s", latest) + + +def get_installed_distribution(name: str) -> BaseDistribution | None: + env = get_default_environment() + return env.get_distribution(name) + + +def print_results( + hits: list[TransformedHit], + name_column_width: int | None = None, + terminal_width: int | None = None, +) -> None: + if not hits: + return + if name_column_width is None: + name_column_width = ( + max( + [ + len(hit["name"]) + len(highest_version(hit.get("versions", ["-"]))) + for hit in hits + ] + ) + + 4 + ) + + for hit in hits: + name = hit["name"] + summary = hit["summary"] or "" + latest = highest_version(hit.get("versions", ["-"])) + if terminal_width is not None: + target_width = terminal_width - name_column_width - 5 + if target_width > 10: + # wrap and indent summary to fit terminal + summary_lines = textwrap.wrap(summary, target_width) + summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines) + + name_latest = f"{name} ({latest})" + line = f"{name_latest:{name_column_width}} - {summary}" + try: + write_output(line) + dist = get_installed_distribution(name) + print_dist_installation_info(latest, dist) + except UnicodeEncodeError: + pass + + +def highest_version(versions: list[str]) -> str: + return max(versions, key=parse_version) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/show.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/show.py new file mode 100644 index 0000000..f9fcfa6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/show.py @@ -0,0 +1,231 @@ +from __future__ import annotations + +import logging +import string +from collections.abc import Generator, Iterable, Iterator +from optparse import Values +from typing import NamedTuple + +from pip._vendor.packaging.requirements import InvalidRequirement +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.metadata import BaseDistribution, get_default_environment +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +def normalize_project_url_label(label: str) -> str: + # This logic is from PEP 753 (Well-known Project URLs in Metadata). + chars_to_remove = string.punctuation + string.whitespace + removal_map = str.maketrans("", "", chars_to_remove) + return label.translate(removal_map).lower() + + +class ShowCommand(Command): + """ + Show information about one or more installed packages. + + The output is in RFC-compliant mail header format. + """ + + usage = """ + %prog [options] ...""" + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-f", + "--files", + dest="files", + action="store_true", + default=False, + help="Show the full list of installed files for each package.", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: list[str]) -> int: + if not args: + logger.warning("ERROR: Please provide a package name or names.") + return ERROR + query = args + + results = search_packages_info(query) + if not print_results( + results, list_files=options.files, verbose=options.verbose + ): + return ERROR + return SUCCESS + + +class _PackageInfo(NamedTuple): + name: str + version: str + location: str + editable_project_location: str | None + requires: list[str] + required_by: list[str] + installer: str + metadata_version: str + classifiers: list[str] + summary: str + homepage: str + project_urls: list[str] + author: str + author_email: str + license: str + license_expression: str + entry_points: list[str] + files: list[str] | None + + +def search_packages_info(query: list[str]) -> Generator[_PackageInfo, None, None]: + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + env = get_default_environment() + + installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()} + query_names = [canonicalize_name(name) for name in query] + missing = sorted( + [name for name, pkg in zip(query, query_names) if pkg not in installed] + ) + if missing: + logger.warning("Package(s) not found: %s", ", ".join(missing)) + + def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: + return ( + dist.metadata["Name"] or "UNKNOWN" + for dist in installed.values() + if current_dist.canonical_name + in {canonicalize_name(d.name) for d in dist.iter_dependencies()} + ) + + for query_name in query_names: + try: + dist = installed[query_name] + except KeyError: + continue + + try: + requires = sorted( + # Avoid duplicates in requirements (e.g. due to environment markers). + {req.name for req in dist.iter_dependencies()}, + key=str.lower, + ) + except InvalidRequirement: + requires = sorted(dist.iter_raw_dependencies(), key=str.lower) + + try: + required_by = sorted(_get_requiring_packages(dist), key=str.lower) + except InvalidRequirement: + required_by = ["#N/A"] + + try: + entry_points_text = dist.read_text("entry_points.txt") + entry_points = entry_points_text.splitlines(keepends=False) + except FileNotFoundError: + entry_points = [] + + files_iter = dist.iter_declared_entries() + if files_iter is None: + files: list[str] | None = None + else: + files = sorted(files_iter) + + metadata = dist.metadata + + project_urls = metadata.get_all("Project-URL", []) + homepage = metadata.get("Home-page", "") + if not homepage: + # It's common that there is a "homepage" Project-URL, but Home-page + # remains unset (especially as PEP 621 doesn't surface the field). + for url in project_urls: + url_label, url = url.split(",", maxsplit=1) + normalized_label = normalize_project_url_label(url_label) + if normalized_label == "homepage": + homepage = url.strip() + break + + yield _PackageInfo( + name=dist.raw_name, + version=dist.raw_version, + location=dist.location or "", + editable_project_location=dist.editable_project_location, + requires=requires, + required_by=required_by, + installer=dist.installer, + metadata_version=dist.metadata_version or "", + classifiers=metadata.get_all("Classifier", []), + summary=metadata.get("Summary", ""), + homepage=homepage, + project_urls=project_urls, + author=metadata.get("Author", ""), + author_email=metadata.get("Author-email", ""), + license=metadata.get("License", ""), + license_expression=metadata.get("License-Expression", ""), + entry_points=entry_points, + files=files, + ) + + +def print_results( + distributions: Iterable[_PackageInfo], + list_files: bool, + verbose: bool, +) -> bool: + """ + Print the information from installed distributions found. + """ + results_printed = False + for i, dist in enumerate(distributions): + results_printed = True + if i > 0: + write_output("---") + + metadata_version_tuple = tuple(map(int, dist.metadata_version.split("."))) + + write_output("Name: %s", dist.name) + write_output("Version: %s", dist.version) + write_output("Summary: %s", dist.summary) + write_output("Home-page: %s", dist.homepage) + write_output("Author: %s", dist.author) + write_output("Author-email: %s", dist.author_email) + if metadata_version_tuple >= (2, 4) and dist.license_expression: + write_output("License-Expression: %s", dist.license_expression) + else: + write_output("License: %s", dist.license) + write_output("Location: %s", dist.location) + if dist.editable_project_location is not None: + write_output( + "Editable project location: %s", dist.editable_project_location + ) + write_output("Requires: %s", ", ".join(dist.requires)) + write_output("Required-by: %s", ", ".join(dist.required_by)) + + if verbose: + write_output("Metadata-Version: %s", dist.metadata_version) + write_output("Installer: %s", dist.installer) + write_output("Classifiers:") + for classifier in dist.classifiers: + write_output(" %s", classifier) + write_output("Entry-points:") + for entry in dist.entry_points: + write_output(" %s", entry.strip()) + write_output("Project-URLs:") + for project_url in dist.project_urls: + write_output(" %s", project_url) + if list_files: + write_output("Files:") + if dist.files is None: + write_output("Cannot locate RECORD or installed-files.txt") + else: + for line in dist.files: + write_output(" %s", line.strip()) + return results_printed diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py new file mode 100644 index 0000000..9c4f031 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/uninstall.py @@ -0,0 +1,113 @@ +import logging +from optparse import Values + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.index_command import SessionCommandMixin +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import InstallationError +from pip._internal.req import parse_requirements +from pip._internal.req.constructors import ( + install_req_from_line, + install_req_from_parsed_requirement, +) +from pip._internal.utils.misc import ( + check_externally_managed, + protect_pip_from_modification_on_windows, + warn_if_run_as_root, +) + +logger = logging.getLogger(__name__) + + +class UninstallCommand(Command, SessionCommandMixin): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + + usage = """ + %prog [options] ... + %prog [options] -r ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help=( + "Uninstall all the packages listed in the given requirements " + "file. This option can be used multiple times." + ), + ) + self.cmd_opts.add_option( + "-y", + "--yes", + dest="yes", + action="store_true", + help="Don't ask for confirmation of uninstall deletions.", + ) + self.cmd_opts.add_option(cmdoptions.root_user_action()) + self.cmd_opts.add_option(cmdoptions.override_externally_managed()) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: list[str]) -> int: + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, + isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + else: + logger.warning( + "Invalid requirement: %r ignored -" + " the uninstall command expects named" + " requirements.", + name, + ) + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, options=options, session=session + ): + req = install_req_from_parsed_requirement( + parsed_req, isolated=options.isolated_mode + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + if not reqs_to_uninstall: + raise InstallationError( + f"You must give at least one requirement to {self.name} (see " + f'"pip help {self.name}")' + ) + + if not options.override_externally_managed: + check_externally_managed() + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, + verbose=self.verbosity > 0, + ) + if uninstall_pathset: + uninstall_pathset.commit() + if options.root_user_action == "warn": + warn_if_run_as_root() + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py b/venv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py new file mode 100644 index 0000000..2850394 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/commands/wheel.py @@ -0,0 +1,176 @@ +import logging +import os +import shutil +from optparse import Values + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.operations.build.build_tracker import get_build_tracker +from pip._internal.req.req_install import ( + InstallRequirement, +) +from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.wheel_builder import build + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: https://wheel.readthedocs.io/en/latest/ + + 'pip wheel' uses the build system interface as described here: + https://pip.pypa.io/en/stable/reference/build-system/ + + """ + + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-w", + "--wheel-dir", + dest="wheel_dir", + metavar="dir", + default=os.curdir, + help=( + "Build wheels into , where the default is the " + "current working directory." + ), + ) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.build_constraints()) + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + self.cmd_opts.add_option( + "--no-verify", + dest="no_verify", + action="store_true", + default=False, + help="Don't verify if built wheel is valid.", + ) + + self.cmd_opts.add_option(cmdoptions.config_settings()) + + self.cmd_opts.add_option( + "--pre", + action="store_true", + default=False, + help=( + "Include pre-release and development versions. By default, " + "pip only finds stable versions." + ), + ) + + self.cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: list[str]) -> int: + cmdoptions.check_build_constraints(options) + + session = self.get_default_session(options) + + finder = self._build_package_finder(options, session) + + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) + + build_tracker = self.enter_context(get_build_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="wheel", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + wheel_cache = WheelCache(options.cache_dir) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + build_tracker=build_tracker, + session=session, + finder=finder, + download_dir=options.wheel_dir, + use_user_site=False, + verbosity=self.verbosity, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) + + reqs_to_build: list[InstallRequirement] = [] + for req in requirement_set.requirements.values(): + if req.is_wheel: + preparer.save_linked_requirement(req) + else: + reqs_to_build.append(req) + + # build wheels + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=(not options.no_verify), + ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, + e, + ) + build_failures.append(req) + if len(build_failures) != 0: + raise CommandError("Failed to build one or more wheels") + + return SUCCESS diff --git a/venv/lib/python3.12/site-packages/pip/_internal/configuration.py b/venv/lib/python3.12/site-packages/pip/_internal/configuration.py new file mode 100644 index 0000000..e164653 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/configuration.py @@ -0,0 +1,396 @@ +"""Configuration management setup + +Some terminology: +- name + As written in config files. +- value + Value associated with a name +- key + Name combined with it's section (section.name) +- variant + A single word describing where the configuration key-value pair came from +""" + +from __future__ import annotations + +import configparser +import locale +import os +import sys +from collections.abc import Iterable +from typing import Any, NewType + +from pip._internal.exceptions import ( + ConfigurationError, + ConfigurationFileCouldNotBeLoaded, +) +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ensure_dir, enum + +RawConfigParser = configparser.RawConfigParser # Shorthand +Kind = NewType("Kind", str) + +CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf" +ENV_NAMES_IGNORED = "version", "help" + +# The kinds of configurations there are. +kinds = enum( + USER="user", # User Specific + GLOBAL="global", # System Wide + SITE="site", # [Virtual] Environment Specific + ENV="env", # from PIP_CONFIG_FILE + ENV_VAR="env-var", # from Environment Variables +) +OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR +VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE + +logger = getLogger(__name__) + + +# NOTE: Maybe use the optionx attribute to normalize keynames. +def _normalize_name(name: str) -> str: + """Make a name consistent regardless of source (environment or file)""" + name = name.lower().replace("_", "-") + name = name.removeprefix("--") # only prefer long opts + return name + + +def _disassemble_key(name: str) -> list[str]: + if "." not in name: + error_message = ( + "Key does not contain dot separated section and key. " + f"Perhaps you wanted to use 'global.{name}' instead?" + ) + raise ConfigurationError(error_message) + return name.split(".", 1) + + +def get_configuration_files() -> dict[Kind, list[str]]: + global_config_files = [ + os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") + ] + + site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) + legacy_config_file = os.path.join( + os.path.expanduser("~"), + "pip" if WINDOWS else ".pip", + CONFIG_BASENAME, + ) + new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) + return { + kinds.GLOBAL: global_config_files, + kinds.SITE: [site_config_file], + kinds.USER: [legacy_config_file, new_config_file], + } + + +class Configuration: + """Handles management of configuration. + + Provides an interface to accessing and managing configuration files. + + This class converts provides an API that takes "section.key-name" style + keys and stores the value associated with it as "key-name" under the + section "section". + + This allows for a clean interface wherein the both the section and the + key-name are preserved in an easy to manage form in the configuration files + and the data stored is also nice. + """ + + def __init__(self, isolated: bool, load_only: Kind | None = None) -> None: + super().__init__() + + if load_only is not None and load_only not in VALID_LOAD_ONLY: + raise ConfigurationError( + "Got invalid value for load_only - should be one of {}".format( + ", ".join(map(repr, VALID_LOAD_ONLY)) + ) + ) + self.isolated = isolated + self.load_only = load_only + + # Because we keep track of where we got the data from + self._parsers: dict[Kind, list[tuple[str, RawConfigParser]]] = { + variant: [] for variant in OVERRIDE_ORDER + } + self._config: dict[Kind, dict[str, dict[str, Any]]] = { + variant: {} for variant in OVERRIDE_ORDER + } + self._modified_parsers: list[tuple[str, RawConfigParser]] = [] + + def load(self) -> None: + """Loads configuration from configuration files and environment""" + self._load_config_files() + if not self.isolated: + self._load_environment_vars() + + def get_file_to_edit(self) -> str | None: + """Returns the file with highest priority in configuration""" + assert self.load_only is not None, "Need to be specified a file to be editing" + + try: + return self._get_parser_to_modify()[0] + except IndexError: + return None + + def items(self) -> Iterable[tuple[str, Any]]: + """Returns key-value pairs like dict.items() representing the loaded + configuration + """ + return self._dictionary.items() + + def get_value(self, key: str) -> Any: + """Get a value from the configuration.""" + orig_key = key + key = _normalize_name(key) + try: + clean_config: dict[str, Any] = {} + for file_values in self._dictionary.values(): + clean_config.update(file_values) + return clean_config[key] + except KeyError: + # disassembling triggers a more useful error message than simply + # "No such key" in the case that the key isn't in the form command.option + _disassemble_key(key) + raise ConfigurationError(f"No such key - {orig_key}") + + def set_value(self, key: str, value: Any) -> None: + """Modify a value in the configuration.""" + key = _normalize_name(key) + self._ensure_have_load_only() + + assert self.load_only + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Modify the parser and the configuration + if not parser.has_section(section): + parser.add_section(section) + parser.set(section, name, value) + + self._config[self.load_only].setdefault(fname, {}) + self._config[self.load_only][fname][key] = value + self._mark_as_modified(fname, parser) + + def unset_value(self, key: str) -> None: + """Unset a value in the configuration.""" + orig_key = key + key = _normalize_name(key) + self._ensure_have_load_only() + + assert self.load_only + fname, parser = self._get_parser_to_modify() + + if ( + key not in self._config[self.load_only][fname] + and key not in self._config[self.load_only] + ): + raise ConfigurationError(f"No such key - {orig_key}") + + if parser is not None: + section, name = _disassemble_key(key) + if not ( + parser.has_section(section) and parser.remove_option(section, name) + ): + # The option was not removed. + raise ConfigurationError( + "Fatal Internal error [id=1]. Please report as a bug." + ) + + # The section may be empty after the option was removed. + if not parser.items(section): + parser.remove_section(section) + self._mark_as_modified(fname, parser) + try: + del self._config[self.load_only][fname][key] + except KeyError: + del self._config[self.load_only][key] + + def save(self) -> None: + """Save the current in-memory state.""" + self._ensure_have_load_only() + + for fname, parser in self._modified_parsers: + logger.info("Writing to %s", fname) + + # Ensure directory exists. + ensure_dir(os.path.dirname(fname)) + + # Ensure directory's permission(need to be writeable) + try: + with open(fname, "w") as f: + parser.write(f) + except OSError as error: + raise ConfigurationError( + f"An error occurred while writing to the configuration file " + f"{fname}: {error}" + ) + + # + # Private routines + # + + def _ensure_have_load_only(self) -> None: + if self.load_only is None: + raise ConfigurationError("Needed a specific file to be modifying.") + logger.debug("Will be working with %s variant only", self.load_only) + + @property + def _dictionary(self) -> dict[str, dict[str, Any]]: + """A dictionary representing the loaded configuration.""" + # NOTE: Dictionaries are not populated if not loaded. So, conditionals + # are not needed here. + retval = {} + + for variant in OVERRIDE_ORDER: + retval.update(self._config[variant]) + + return retval + + def _load_config_files(self) -> None: + """Loads configuration from configuration files""" + config_files = dict(self.iter_config_files()) + if config_files[kinds.ENV][0:1] == [os.devnull]: + logger.debug( + "Skipping loading configuration files due to " + "environment's PIP_CONFIG_FILE being os.devnull" + ) + return + + for variant, files in config_files.items(): + for fname in files: + # If there's specific variant set in `load_only`, load only + # that variant, not the others. + if self.load_only is not None and variant != self.load_only: + logger.debug("Skipping file '%s' (variant: %s)", fname, variant) + continue + + parser = self._load_file(variant, fname) + + # Keeping track of the parsers used + self._parsers[variant].append((fname, parser)) + + def _load_file(self, variant: Kind, fname: str) -> RawConfigParser: + logger.verbose("For variant '%s', will try loading '%s'", variant, fname) + parser = self._construct_parser(fname) + + for section in parser.sections(): + items = parser.items(section) + self._config[variant].setdefault(fname, {}) + self._config[variant][fname].update(self._normalized_keys(section, items)) + + return parser + + def _construct_parser(self, fname: str) -> RawConfigParser: + parser = configparser.RawConfigParser() + # If there is no such file, don't bother reading it but create the + # parser anyway, to hold the data. + # Doing this is useful when modifying and saving files, where we don't + # need to construct a parser. + if os.path.exists(fname): + locale_encoding = locale.getpreferredencoding(False) + try: + parser.read(fname, encoding=locale_encoding) + except UnicodeDecodeError: + # See https://github.com/pypa/pip/issues/4963 + raise ConfigurationFileCouldNotBeLoaded( + reason=f"contains invalid {locale_encoding} characters", + fname=fname, + ) + except configparser.Error as error: + # See https://github.com/pypa/pip/issues/4893 + raise ConfigurationFileCouldNotBeLoaded(error=error) + return parser + + def _load_environment_vars(self) -> None: + """Loads configuration from environment variables""" + self._config[kinds.ENV_VAR].setdefault(":env:", {}) + self._config[kinds.ENV_VAR][":env:"].update( + self._normalized_keys(":env:", self.get_environ_vars()) + ) + + def _normalized_keys( + self, section: str, items: Iterable[tuple[str, Any]] + ) -> dict[str, Any]: + """Normalizes items to construct a dictionary with normalized keys. + + This routine is where the names become keys and are made the same + regardless of source - configuration files or environment. + """ + normalized = {} + for name, val in items: + key = section + "." + _normalize_name(name) + normalized[key] = val + return normalized + + def get_environ_vars(self) -> Iterable[tuple[str, str]]: + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + if key.startswith("PIP_"): + name = key[4:].lower() + if name not in ENV_NAMES_IGNORED: + yield name, val + + # XXX: This is patched in the tests. + def iter_config_files(self) -> Iterable[tuple[Kind, list[str]]]: + """Yields variant and configuration files associated with it. + + This should be treated like items of a dictionary. The order + here doesn't affect what gets overridden. That is controlled + by OVERRIDE_ORDER. However this does control the order they are + displayed to the user. It's probably most ergonomic to display + things in the same order as OVERRIDE_ORDER + """ + # SMELL: Move the conditions out of this function + + env_config_file = os.environ.get("PIP_CONFIG_FILE", None) + config_files = get_configuration_files() + + yield kinds.GLOBAL, config_files[kinds.GLOBAL] + + # per-user config is not loaded when env_config_file exists + should_load_user_config = not self.isolated and not ( + env_config_file and os.path.exists(env_config_file) + ) + if should_load_user_config: + # The legacy config file is overridden by the new config file + yield kinds.USER, config_files[kinds.USER] + + # virtualenv config + yield kinds.SITE, config_files[kinds.SITE] + + if env_config_file is not None: + yield kinds.ENV, [env_config_file] + else: + yield kinds.ENV, [] + + def get_values_in_config(self, variant: Kind) -> dict[str, Any]: + """Get values present in a config file""" + return self._config[variant] + + def _get_parser_to_modify(self) -> tuple[str, RawConfigParser]: + # Determine which parser to modify + assert self.load_only + parsers = self._parsers[self.load_only] + if not parsers: + # This should not happen if everything works correctly. + raise ConfigurationError( + "Fatal Internal error [id=2]. Please report as a bug." + ) + + # Use the highest priority parser. + return parsers[-1] + + # XXX: This is patched in the tests. + def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None: + file_parser_tuple = (fname, parser) + if file_parser_tuple not in self._modified_parsers: + self._modified_parsers.append(file_parser_tuple) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._dictionary!r})" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py new file mode 100644 index 0000000..9a89a83 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__init__.py @@ -0,0 +1,21 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.distributions.sdist import SourceDistribution +from pip._internal.distributions.wheel import WheelDistribution +from pip._internal.req.req_install import InstallRequirement + + +def make_distribution_for_install_requirement( + install_req: InstallRequirement, +) -> AbstractDistribution: + """Returns a Distribution for the given InstallRequirement""" + # Editable requirements will always be source distributions. They use the + # legacy logic until we create a modern standard for them. + if install_req.editable: + return SourceDistribution(install_req) + + # If it's a wheel, it's a WheelDistribution + if install_req.is_wheel: + return WheelDistribution(install_req) + + # Otherwise, a SourceDistribution + return SourceDistribution(install_req) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..02956a2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..51ed61e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc new file mode 100644 index 0000000..5ebc018 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc new file mode 100644 index 0000000..0878680 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..d525982 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/base.py b/venv/lib/python3.12/site-packages/pip/_internal/distributions/base.py new file mode 100644 index 0000000..ea61f35 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/distributions/base.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import abc +from typing import TYPE_CHECKING + +from pip._internal.metadata.base import BaseDistribution +from pip._internal.req import InstallRequirement + +if TYPE_CHECKING: + from pip._internal.build_env import BuildEnvironmentInstaller + + +class AbstractDistribution(metaclass=abc.ABCMeta): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: + + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + + - we must be able to create a Distribution object exposing the + above metadata. + + - if we need to do work in the build tracker, we must be able to generate a unique + string to identify the requirement in the build tracker. + """ + + def __init__(self, req: InstallRequirement) -> None: + super().__init__() + self.req = req + + @abc.abstractproperty + def build_tracker_id(self) -> str | None: + """A string that uniquely identifies this requirement to the build tracker. + + If None, then this dist has no work to do in the build tracker, and + ``.prepare_distribution_metadata()`` will not be called.""" + raise NotImplementedError() + + @abc.abstractmethod + def get_metadata_distribution(self) -> BaseDistribution: + raise NotImplementedError() + + @abc.abstractmethod + def prepare_distribution_metadata( + self, + build_env_installer: BuildEnvironmentInstaller, + build_isolation: bool, + check_build_deps: bool, + ) -> None: + raise NotImplementedError() diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py b/venv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py new file mode 100644 index 0000000..b6a67df --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/distributions/installed.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.metadata import BaseDistribution + +if TYPE_CHECKING: + from pip._internal.build_env import BuildEnvironmentInstaller + + +class InstalledDistribution(AbstractDistribution): + """Represents an installed package. + + This does not need any preparation as the required information has already + been computed. + """ + + @property + def build_tracker_id(self) -> str | None: + return None + + def get_metadata_distribution(self) -> BaseDistribution: + assert self.req.satisfied_by is not None, "not actually installed" + return self.req.satisfied_by + + def prepare_distribution_metadata( + self, + build_env_installer: BuildEnvironmentInstaller, + build_isolation: bool, + check_build_deps: bool, + ) -> None: + pass diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py b/venv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py new file mode 100644 index 0000000..f7bd783 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/distributions/sdist.py @@ -0,0 +1,164 @@ +from __future__ import annotations + +import logging +from collections.abc import Iterable +from typing import TYPE_CHECKING + +from pip._internal.build_env import BuildEnvironment +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.exceptions import InstallationError +from pip._internal.metadata import BaseDistribution +from pip._internal.utils.subprocess import runner_with_spinner_message + +if TYPE_CHECKING: + from pip._internal.build_env import BuildEnvironmentInstaller + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated. + """ + + @property + def build_tracker_id(self) -> str | None: + """Identify this requirement uniquely by its link.""" + assert self.req.link + return self.req.link.url_without_fragment + + def get_metadata_distribution(self) -> BaseDistribution: + return self.req.get_dist() + + def prepare_distribution_metadata( + self, + build_env_installer: BuildEnvironmentInstaller, + build_isolation: bool, + check_build_deps: bool, + ) -> None: + # Load pyproject.toml + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + if build_isolation: + # Setup an isolated environment and install the build backend static + # requirements in it. + self._prepare_build_backend(build_env_installer) + # Check that the build backend supports PEP 660. This cannot be done + # earlier because we need to setup the build backend to verify it + # supports build_editable, nor can it be done later, because we want + # to avoid installing build requirements needlessly. + self.req.editable_sanity_check() + # Install the dynamic build requirements. + self._install_build_reqs(build_env_installer) + else: + # When not using build isolation, we still need to check that + # the build backend supports PEP 660. + self.req.editable_sanity_check() + # Check if the current environment provides build dependencies + if check_build_deps: + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + conflicting, missing = self.req.build_env.check_requirements( + pyproject_requires + ) + if conflicting: + self._raise_conflicts("the backend dependencies", conflicting) + if missing: + self._raise_missing_reqs(missing) + self.req.prepare_metadata() + + def _prepare_build_backend( + self, build_env_installer: BuildEnvironmentInstaller + ) -> None: + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment(build_env_installer) + self.req.build_env.install_requirements( + pyproject_requires, "overlay", kind="build dependencies", for_req=self.req + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + self._raise_conflicts("PEP 517/518 supported requirements", conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))), + ) + + def _get_build_requires_wheel(self) -> Iterable[str]: + with self.req.build_env: + runner = runner_with_spinner_message("Getting requirements to build wheel") + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + return backend.get_requires_for_build_wheel() + + def _get_build_requires_editable(self) -> Iterable[str]: + with self.req.build_env: + runner = runner_with_spinner_message( + "Getting requirements to build editable" + ) + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + return backend.get_requires_for_build_editable() + + def _install_build_reqs( + self, build_env_installer: BuildEnvironmentInstaller + ) -> None: + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + if ( + self.req.editable + and self.req.permit_editable_wheels + and self.req.supports_pyproject_editable + ): + build_reqs = self._get_build_requires_editable() + else: + build_reqs = self._get_build_requires_wheel() + conflicting, missing = self.req.build_env.check_requirements(build_reqs) + if conflicting: + self._raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + missing, "normal", kind="backend dependencies", for_req=self.req + ) + + def _raise_conflicts( + self, conflicting_with: str, conflicting_reqs: set[tuple[str, str]] + ) -> None: + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=", ".join( + f"{installed} is incompatible with {wanted}" + for installed, wanted in sorted(conflicting_reqs) + ), + ) + raise InstallationError(error_message) + + def _raise_missing_reqs(self, missing: set[str]) -> None: + format_string = ( + "Some build dependencies for {requirement} are missing: {missing}." + ) + error_message = format_string.format( + requirement=self.req, missing=", ".join(map(repr, sorted(missing))) + ) + raise InstallationError(error_message) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py b/venv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py new file mode 100644 index 0000000..ee12bfa --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/distributions/wheel.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.metadata import ( + BaseDistribution, + FilesystemWheel, + get_wheel_distribution, +) + +if TYPE_CHECKING: + from pip._internal.build_env import BuildEnvironmentInstaller + + +class WheelDistribution(AbstractDistribution): + """Represents a wheel distribution. + + This does not need any preparation as wheels can be directly unpacked. + """ + + @property + def build_tracker_id(self) -> str | None: + return None + + def get_metadata_distribution(self) -> BaseDistribution: + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + assert self.req.local_file_path, "Set as part of preparation during download" + assert self.req.name, "Wheels are never unnamed" + wheel = FilesystemWheel(self.req.local_file_path) + return get_wheel_distribution(wheel, canonicalize_name(self.req.name)) + + def prepare_distribution_metadata( + self, + build_env_installer: BuildEnvironmentInstaller, + build_isolation: bool, + check_build_deps: bool, + ) -> None: + pass diff --git a/venv/lib/python3.12/site-packages/pip/_internal/exceptions.py b/venv/lib/python3.12/site-packages/pip/_internal/exceptions.py new file mode 100644 index 0000000..d6e9095 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/exceptions.py @@ -0,0 +1,898 @@ +"""Exceptions used throughout package. + +This module MUST NOT try to import from anything within `pip._internal` to +operate. This is expected to be importable from any/all files within the +subpackage and, thus, should not depend on them. +""" + +from __future__ import annotations + +import configparser +import contextlib +import locale +import logging +import pathlib +import re +import sys +from collections.abc import Iterator +from itertools import chain, groupby, repeat +from typing import TYPE_CHECKING, Literal + +from pip._vendor.packaging.requirements import InvalidRequirement +from pip._vendor.packaging.version import InvalidVersion +from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult +from pip._vendor.rich.markup import escape +from pip._vendor.rich.text import Text + +if TYPE_CHECKING: + from hashlib import _Hash + + from pip._vendor.requests.models import Request, Response + + from pip._internal.metadata import BaseDistribution + from pip._internal.network.download import _FileDownload + from pip._internal.req.req_install import InstallRequirement + +logger = logging.getLogger(__name__) + + +# +# Scaffolding +# +def _is_kebab_case(s: str) -> bool: + return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None + + +def _prefix_with_indent( + s: Text | str, + console: Console, + *, + prefix: str, + indent: str, +) -> Text: + if isinstance(s, Text): + text = s + else: + text = console.render_str(s) + + return console.render_str(prefix, overflow="ignore") + console.render_str( + f"\n{indent}", overflow="ignore" + ).join(text.split(allow_blank=True)) + + +class PipError(Exception): + """The base pip error.""" + + +class DiagnosticPipError(PipError): + """An error, that presents diagnostic information to the user. + + This contains a bunch of logic, to enable pretty presentation of our error + messages. Each error gets a unique reference. Each error can also include + additional context, a hint and/or a note -- which are presented with the + main error message in a consistent style. + + This is adapted from the error output styling in `sphinx-theme-builder`. + """ + + reference: str + + def __init__( + self, + *, + kind: Literal["error", "warning"] = "error", + reference: str | None = None, + message: str | Text, + context: str | Text | None, + hint_stmt: str | Text | None, + note_stmt: str | Text | None = None, + link: str | None = None, + ) -> None: + # Ensure a proper reference is provided. + if reference is None: + assert hasattr(self, "reference"), "error reference not provided!" + reference = self.reference + assert _is_kebab_case(reference), "error reference must be kebab-case!" + + self.kind = kind + self.reference = reference + + self.message = message + self.context = context + + self.note_stmt = note_stmt + self.hint_stmt = hint_stmt + + self.link = link + + super().__init__(f"<{self.__class__.__name__}: {self.reference}>") + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__}(" + f"reference={self.reference!r}, " + f"message={self.message!r}, " + f"context={self.context!r}, " + f"note_stmt={self.note_stmt!r}, " + f"hint_stmt={self.hint_stmt!r}" + ")>" + ) + + def __rich_console__( + self, + console: Console, + options: ConsoleOptions, + ) -> RenderResult: + colour = "red" if self.kind == "error" else "yellow" + + yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]" + yield "" + + if not options.ascii_only: + # Present the main message, with relevant context indented. + if self.context is not None: + yield _prefix_with_indent( + self.message, + console, + prefix=f"[{colour}]×[/] ", + indent=f"[{colour}]│[/] ", + ) + yield _prefix_with_indent( + self.context, + console, + prefix=f"[{colour}]╰─>[/] ", + indent=f"[{colour}] [/] ", + ) + else: + yield _prefix_with_indent( + self.message, + console, + prefix="[red]×[/] ", + indent=" ", + ) + else: + yield self.message + if self.context is not None: + yield "" + yield self.context + + if self.note_stmt is not None or self.hint_stmt is not None: + yield "" + + if self.note_stmt is not None: + yield _prefix_with_indent( + self.note_stmt, + console, + prefix="[magenta bold]note[/]: ", + indent=" ", + ) + if self.hint_stmt is not None: + yield _prefix_with_indent( + self.hint_stmt, + console, + prefix="[cyan bold]hint[/]: ", + indent=" ", + ) + + if self.link is not None: + yield "" + yield f"Link: {self.link}" + + +# +# Actual Errors +# +class ConfigurationError(PipError): + """General exception in configuration""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class FailedToPrepareCandidate(InstallationError): + """Raised when we fail to prepare a candidate (i.e. fetch and generate metadata). + + This is intentionally not a diagnostic error, since the output will be presented + above this error, when this occurs. This should instead present information to the + user. + """ + + def __init__( + self, *, package_name: str, requirement_chain: str, failed_step: str + ) -> None: + super().__init__(f"Failed to build '{package_name}' when {failed_step.lower()}") + self.package_name = package_name + self.requirement_chain = requirement_chain + self.failed_step = failed_step + + +class MissingPyProjectBuildRequires(DiagnosticPipError): + """Raised when pyproject.toml has `build-system`, but no `build-system.requires`.""" + + reference = "missing-pyproject-build-system-requires" + + def __init__(self, *, package: str) -> None: + super().__init__( + message=f"Can not process {escape(package)}", + context=Text( + "This package has an invalid pyproject.toml file.\n" + "The [build-system] table is missing the mandatory `requires` key." + ), + note_stmt="This is an issue with the package mentioned above, not pip.", + hint_stmt=Text("See PEP 518 for the detailed specification."), + ) + + +class InvalidPyProjectBuildRequires(DiagnosticPipError): + """Raised when pyproject.toml an invalid `build-system.requires`.""" + + reference = "invalid-pyproject-build-system-requires" + + def __init__(self, *, package: str, reason: str) -> None: + super().__init__( + message=f"Can not process {escape(package)}", + context=Text( + "This package has an invalid `build-system.requires` key in " + f"pyproject.toml.\n{reason}" + ), + note_stmt="This is an issue with the package mentioned above, not pip.", + hint_stmt=Text("See PEP 518 for the detailed specification."), + ) + + +class NoneMetadataError(PipError): + """Raised when accessing a Distribution's "METADATA" or "PKG-INFO". + + This signifies an inconsistency, when the Distribution claims to have + the metadata file (if not, raise ``FileNotFoundError`` instead), but is + not actually able to produce its content. This may be due to permission + errors. + """ + + def __init__( + self, + dist: BaseDistribution, + metadata_name: str, + ) -> None: + """ + :param dist: A Distribution object. + :param metadata_name: The name of the metadata being accessed + (can be "METADATA" or "PKG-INFO"). + """ + self.dist = dist + self.metadata_name = metadata_name + + def __str__(self) -> str: + # Use `dist` in the error message because its stringification + # includes more information, like the version and location. + return f"None {self.metadata_name} metadata found for distribution: {self.dist}" + + +class UserInstallationInvalid(InstallationError): + """A --user install is requested on an environment without user site.""" + + def __str__(self) -> str: + return "User base directory is not specified" + + +class InvalidSchemeCombination(InstallationError): + def __str__(self) -> str: + before = ", ".join(str(a) for a in self.args[:-1]) + return f"Cannot set {before} and {self.args[-1]} together" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class NetworkConnectionError(PipError): + """HTTP connection error""" + + def __init__( + self, + error_msg: str, + response: Response | None = None, + request: Request | None = None, + ) -> None: + """ + Initialize NetworkConnectionError with `request` and `response` + objects. + """ + self.response = response + self.request = request + self.error_msg = error_msg + if ( + self.response is not None + and not self.request + and hasattr(response, "request") + ): + self.request = self.response.request + super().__init__(error_msg, response, request) + + def __str__(self) -> str: + return str(self.error_msg) + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class InvalidWheel(InstallationError): + """Invalid (e.g. corrupt) wheel.""" + + def __init__(self, location: str, name: str): + self.location = location + self.name = name + + def __str__(self) -> str: + return f"Wheel '{self.name}' located at {self.location} is invalid." + + +class MetadataInconsistent(InstallationError): + """Built metadata contains inconsistent information. + + This is raised when the metadata contains values (e.g. name and version) + that do not match the information previously obtained from sdist filename, + user-supplied ``#egg=`` value, or an install requirement name. + """ + + def __init__( + self, ireq: InstallRequirement, field: str, f_val: str, m_val: str + ) -> None: + self.ireq = ireq + self.field = field + self.f_val = f_val + self.m_val = m_val + + def __str__(self) -> str: + return ( + f"Requested {self.ireq} has inconsistent {self.field}: " + f"expected {self.f_val!r}, but metadata has {self.m_val!r}" + ) + + +class MetadataInvalid(InstallationError): + """Metadata is invalid.""" + + def __init__(self, ireq: InstallRequirement, error: str) -> None: + self.ireq = ireq + self.error = error + + def __str__(self) -> str: + return f"Requested {self.ireq} has invalid metadata: {self.error}" + + +class InstallationSubprocessError(DiagnosticPipError, InstallationError): + """A subprocess call failed.""" + + reference = "subprocess-exited-with-error" + + def __init__( + self, + *, + command_description: str, + exit_code: int, + output_lines: list[str] | None, + ) -> None: + if output_lines is None: + output_prompt = Text("No available output.") + else: + output_prompt = ( + Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n") + + Text("".join(output_lines)) + + Text.from_markup(R"[red]\[end of output][/]") + ) + + super().__init__( + message=( + f"[green]{escape(command_description)}[/] did not run successfully.\n" + f"exit code: {exit_code}" + ), + context=output_prompt, + hint_stmt=None, + note_stmt=( + "This error originates from a subprocess, and is likely not a " + "problem with pip." + ), + ) + + self.command_description = command_description + self.exit_code = exit_code + + def __str__(self) -> str: + return f"{self.command_description} exited with {self.exit_code}" + + +class MetadataGenerationFailed(DiagnosticPipError, InstallationError): + reference = "metadata-generation-failed" + + def __init__( + self, + *, + package_details: str, + ) -> None: + super().__init__( + message="Encountered error while generating package metadata.", + context=escape(package_details), + hint_stmt="See above for details.", + note_stmt="This is an issue with the package mentioned above, not pip.", + ) + + def __str__(self) -> str: + return "metadata generation failed" + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self) -> None: + self.errors: list[HashError] = [] + + def append(self, error: HashError) -> None: + self.errors.append(error) + + def __str__(self) -> str: + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return "\n".join(lines) + return "" + + def __bool__(self) -> bool: + return bool(self.errors) + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + + req: InstallRequirement | None = None + head = "" + order: int = -1 + + def body(self) -> str: + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + its link already populated by the resolver's _populate_link(). + + """ + return f" {self._requirement_name()}" + + def __str__(self) -> str: + return f"{self.head}\n{self.body()}" + + def _requirement_name(self) -> str: + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else "unknown package" + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ( + "Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:" + ) + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ( + "Can't verify hashes for these file:// requirements because they " + "point to directories:" + ) + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ( + "Hashes are required in --require-hashes mode, but they are " + "missing from some requirements. Here is a list of those " + "requirements along with the hashes their downloaded archives " + "actually had. Add lines like these to your requirements files to " + "prevent tampering. (If you did not enable --require-hashes " + "manually, note that it turns on automatically when any package " + "has a hash.)" + ) + + def __init__(self, gotten_hash: str) -> None: + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self) -> str: + # Dodge circular import. + from pip._internal.utils.hashes import FAVORITE_HASH + + package = None + if self.req: + # In the case of URL-based requirements, display the original URL + # seen in the requirements file rather than the package name, + # so the output can be directly copied into the requirements file. + package = ( + self.req.original_link + if self.req.is_direct + # In case someone feeds something downright stupid + # to InstallRequirement's constructor. + else getattr(self.req, "req", None) + ) + return " {} --hash={}:{}".format( + package or "unknown package", FAVORITE_HASH, self.gotten_hash + ) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ( + "In --require-hashes mode, all requirements must have their " + "versions pinned with ==. These do not:" + ) + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + + order = 4 + head = ( + "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS " + "FILE. If you have updated the package versions, please update " + "the hashes. Otherwise, examine the package contents carefully; " + "someone may have tampered with them." + ) + + def __init__(self, allowed: dict[str, list[str]], gots: dict[str, _Hash]) -> None: + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self) -> str: + return f" {self._requirement_name()}:\n{self._hash_comparison()}" + + def _hash_comparison(self) -> str: + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + + def hash_then_or(hash_name: str) -> chain[str]: + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(" or")) + + lines: list[str] = [] + for hash_name, expecteds in self.allowed.items(): + prefix = hash_then_or(hash_name) + lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds) + lines.append( + f" Got {self.gots[hash_name].hexdigest()}\n" + ) + return "\n".join(lines) + + +class UnsupportedPythonVersion(InstallationError): + """Unsupported python version according to Requires-Python package + metadata.""" + + +class ConfigurationFileCouldNotBeLoaded(ConfigurationError): + """When there are errors while loading a configuration file""" + + def __init__( + self, + reason: str = "could not be loaded", + fname: str | None = None, + error: configparser.Error | None = None, + ) -> None: + super().__init__(error) + self.reason = reason + self.fname = fname + self.error = error + + def __str__(self) -> str: + if self.fname is not None: + message_part = f" in {self.fname}." + else: + assert self.error is not None + message_part = f".\n{self.error}\n" + return f"Configuration file {self.reason}{message_part}" + + +_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\ +The Python environment under {sys.prefix} is managed externally, and may not be +manipulated by the user. Please use specific tooling from the distributor of +the Python installation to interact with this environment instead. +""" + + +class ExternallyManagedEnvironment(DiagnosticPipError): + """The current environment is externally managed. + + This is raised when the current environment is externally managed, as + defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked + and displayed when the error is bubbled up to the user. + + :param error: The error message read from ``EXTERNALLY-MANAGED``. + """ + + reference = "externally-managed-environment" + + def __init__(self, error: str | None) -> None: + if error is None: + context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR) + else: + context = Text(error) + super().__init__( + message="This environment is externally managed", + context=context, + note_stmt=( + "If you believe this is a mistake, please contact your " + "Python installation or OS distribution provider. " + "You can override this, at the risk of breaking your Python " + "installation or OS, by passing --break-system-packages." + ), + hint_stmt=Text("See PEP 668 for the detailed specification."), + ) + + @staticmethod + def _iter_externally_managed_error_keys() -> Iterator[str]: + # LC_MESSAGES is in POSIX, but not the C standard. The most common + # platform that does not implement this category is Windows, where + # using other categories for console message localization is equally + # unreliable, so we fall back to the locale-less vendor message. This + # can always be re-evaluated when a vendor proposes a new alternative. + try: + category = locale.LC_MESSAGES + except AttributeError: + lang: str | None = None + else: + lang, _ = locale.getlocale(category) + if lang is not None: + yield f"Error-{lang}" + for sep in ("-", "_"): + before, found, _ = lang.partition(sep) + if not found: + continue + yield f"Error-{before}" + yield "Error" + + @classmethod + def from_config( + cls, + config: pathlib.Path | str, + ) -> ExternallyManagedEnvironment: + parser = configparser.ConfigParser(interpolation=None) + try: + parser.read(config, encoding="utf-8") + section = parser["externally-managed"] + for key in cls._iter_externally_managed_error_keys(): + with contextlib.suppress(KeyError): + return cls(section[key]) + except KeyError: + pass + except (OSError, UnicodeDecodeError, configparser.ParsingError): + from pip._internal.utils._log import VERBOSE + + exc_info = logger.isEnabledFor(VERBOSE) + logger.warning("Failed to read %s", config, exc_info=exc_info) + return cls(None) + + +class UninstallMissingRecord(DiagnosticPipError): + reference = "uninstall-no-record-file" + + def __init__(self, *, distribution: BaseDistribution) -> None: + installer = distribution.installer + if not installer or installer == "pip": + dep = f"{distribution.raw_name}=={distribution.version}" + hint = Text.assemble( + "You might be able to recover from this via: ", + (f"pip install --force-reinstall --no-deps {dep}", "green"), + ) + else: + hint = Text( + f"The package was installed by {installer}. " + "You should check if it can uninstall the package." + ) + + super().__init__( + message=Text(f"Cannot uninstall {distribution}"), + context=( + "The package's contents are unknown: " + f"no RECORD file was found for {distribution.raw_name}." + ), + hint_stmt=hint, + ) + + +class LegacyDistutilsInstall(DiagnosticPipError): + reference = "uninstall-distutils-installed-package" + + def __init__(self, *, distribution: BaseDistribution) -> None: + super().__init__( + message=Text(f"Cannot uninstall {distribution}"), + context=( + "It is a distutils installed project and thus we cannot accurately " + "determine which files belong to it which would lead to only a partial " + "uninstall." + ), + hint_stmt=None, + ) + + +class InvalidInstalledPackage(DiagnosticPipError): + reference = "invalid-installed-package" + + def __init__( + self, + *, + dist: BaseDistribution, + invalid_exc: InvalidRequirement | InvalidVersion, + ) -> None: + installed_location = dist.installed_location + + if isinstance(invalid_exc, InvalidRequirement): + invalid_type = "requirement" + else: + invalid_type = "version" + + super().__init__( + message=Text( + f"Cannot process installed package {dist} " + + (f"in {installed_location!r} " if installed_location else "") + + f"because it has an invalid {invalid_type}:\n{invalid_exc.args[0]}" + ), + context=( + "Starting with pip 24.1, packages with invalid " + f"{invalid_type}s can not be processed." + ), + hint_stmt="To proceed this package must be uninstalled.", + ) + + +class IncompleteDownloadError(DiagnosticPipError): + """Raised when the downloader receives fewer bytes than advertised + in the Content-Length header.""" + + reference = "incomplete-download" + + def __init__(self, download: _FileDownload) -> None: + # Dodge circular import. + from pip._internal.utils.misc import format_size + + assert download.size is not None + download_status = ( + f"{format_size(download.bytes_received)}/{format_size(download.size)}" + ) + if download.reattempts: + retry_status = f"after {download.reattempts + 1} attempts " + hint = "Use --resume-retries to configure resume attempt limit." + else: + # Download retrying is not enabled. + retry_status = "" + hint = "Consider using --resume-retries to enable download resumption." + message = Text( + f"Download failed {retry_status}because not enough bytes " + f"were received ({download_status})" + ) + + super().__init__( + message=message, + context=f"URL: {download.link.redacted_url}", + hint_stmt=hint, + note_stmt="This is an issue with network connectivity, not pip.", + ) + + +class ResolutionTooDeepError(DiagnosticPipError): + """Raised when the dependency resolver exceeds the maximum recursion depth.""" + + reference = "resolution-too-deep" + + def __init__(self) -> None: + super().__init__( + message="Dependency resolution exceeded maximum depth", + context=( + "Pip cannot resolve the current dependencies as the dependency graph " + "is too complex for pip to solve efficiently." + ), + hint_stmt=( + "Try adding lower bounds to constrain your dependencies, " + "for example: 'package>=2.0.0' instead of just 'package'. " + ), + link="https://pip.pypa.io/en/stable/topics/dependency-resolution/#handling-resolution-too-deep-errors", + ) + + +class InstallWheelBuildError(DiagnosticPipError): + reference = "failed-wheel-build-for-install" + + def __init__(self, failed: list[InstallRequirement]) -> None: + super().__init__( + message=( + "Failed to build installable wheels for some " + "pyproject.toml based projects" + ), + context=", ".join(r.name for r in failed), # type: ignore + hint_stmt=None, + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/index/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/index/__init__.py new file mode 100644 index 0000000..197dd75 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/index/__init__.py @@ -0,0 +1 @@ +"""Index interaction code""" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..cc5d970 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc new file mode 100644 index 0000000..66235e7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/collector.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc new file mode 100644 index 0000000..b2ddcd5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc new file mode 100644 index 0000000..166538b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/index/__pycache__/sources.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/index/collector.py b/venv/lib/python3.12/site-packages/pip/_internal/index/collector.py new file mode 100644 index 0000000..00d66da --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/index/collector.py @@ -0,0 +1,489 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_sources(). +""" + +from __future__ import annotations + +import collections +import email.message +import functools +import itertools +import json +import logging +import os +import urllib.parse +import urllib.request +from collections.abc import Iterable, MutableMapping, Sequence +from dataclasses import dataclass +from html.parser import HTMLParser +from optparse import Values +from typing import ( + Callable, + NamedTuple, + Protocol, +) + +from pip._vendor import requests +from pip._vendor.requests import Response +from pip._vendor.requests.exceptions import RetryError, SSLError + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import redact_auth_from_url +from pip._internal.vcs import vcs + +from .sources import CandidatesFromPage, LinkSource, build_source + +logger = logging.getLogger(__name__) + +ResponseHeaders = MutableMapping[str, str] + + +def _match_vcs_scheme(url: str) -> str | None: + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in "+:": + return scheme + return None + + +class _NotAPIContent(Exception): + def __init__(self, content_type: str, request_desc: str) -> None: + super().__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_api_header(response: Response) -> None: + """ + Check the Content-Type header to ensure the response contains a Simple + API Response. + + Raises `_NotAPIContent` if the content type is not a valid content-type. + """ + content_type = response.headers.get("Content-Type", "Unknown") + + content_type_l = content_type.lower() + if content_type_l.startswith( + ( + "text/html", + "application/vnd.pypi.simple.v1+html", + "application/vnd.pypi.simple.v1+json", + ) + ): + return + + raise _NotAPIContent(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_api_response(url: str, session: PipSession) -> None: + """ + Send a HEAD request to the URL, and ensure the response contains a simple + API Response. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotAPIContent` if the content type is not a valid content type. + """ + scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url) + if scheme not in {"http", "https"}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + raise_for_status(resp) + + _ensure_api_header(resp) + + +def _get_simple_response(url: str, session: PipSession) -> Response: + """Access an Simple API response with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML or Simple API, to avoid downloading a + large file. Raise `_NotHTTP` if the content type cannot be determined, or + `_NotAPIContent` if it is not HTML or a Simple API. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got a Simple API response, + and raise `_NotAPIContent` otherwise. + """ + if is_archive_file(Link(url).filename): + _ensure_api_response(url, session=session) + + logger.debug("Getting page %s", redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": ", ".join( + [ + "application/vnd.pypi.simple.v1+json", + "application/vnd.pypi.simple.v1+html; q=0.1", + "text/html; q=0.01", + ] + ), + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + raise_for_status(resp) + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is a + # Simple API response or not. However we can check after we've + # downloaded it. + _ensure_api_header(resp) + + logger.debug( + "Fetched page %s as %s", + redact_auth_from_url(url), + resp.headers.get("Content-Type", "Unknown"), + ) + + return resp + + +def _get_encoding_from_headers(headers: ResponseHeaders) -> str | None: + """Determine if we have any encoding information in our headers.""" + if headers and "Content-Type" in headers: + m = email.message.Message() + m["content-type"] = headers["Content-Type"] + charset = m.get_param("charset") + if charset: + return str(charset) + return None + + +class CacheablePageContent: + def __init__(self, page: IndexContent) -> None: + assert page.cache_link_parsing + self.page = page + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) and self.page.url == other.page.url + + def __hash__(self) -> int: + return hash(self.page.url) + + +class ParseLinks(Protocol): + def __call__(self, page: IndexContent) -> Iterable[Link]: ... + + +def with_cached_index_content(fn: ParseLinks) -> ParseLinks: + """ + Given a function that parses an Iterable[Link] from an IndexContent, cache the + function's result (keyed by CacheablePageContent), unless the IndexContent + `page` has `page.cache_link_parsing == False`. + """ + + @functools.cache + def wrapper(cacheable_page: CacheablePageContent) -> list[Link]: + return list(fn(cacheable_page.page)) + + @functools.wraps(fn) + def wrapper_wrapper(page: IndexContent) -> list[Link]: + if page.cache_link_parsing: + return wrapper(CacheablePageContent(page)) + return list(fn(page)) + + return wrapper_wrapper + + +@with_cached_index_content +def parse_links(page: IndexContent) -> Iterable[Link]: + """ + Parse a Simple API's Index Content, and yield its anchor elements as Link objects. + """ + + content_type_l = page.content_type.lower() + if content_type_l.startswith("application/vnd.pypi.simple.v1+json"): + data = json.loads(page.content) + for file in data.get("files", []): + link = Link.from_json(file, page.url) + if link is None: + continue + yield link + return + + parser = HTMLLinkParser(page.url) + encoding = page.encoding or "utf-8" + parser.feed(page.content.decode(encoding)) + + url = page.url + base_url = parser.base_url or url + for anchor in parser.anchors: + link = Link.from_element(anchor, page_url=url, base_url=base_url) + if link is None: + continue + yield link + + +@dataclass(frozen=True) +class IndexContent: + """Represents one response (or page), along with its URL. + + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + :param cache_link_parsing: whether links parsed from this page's url + should be cached. PyPI index urls should + have this set to False, for example. + """ + + content: bytes + content_type: str + encoding: str | None + url: str + cache_link_parsing: bool = True + + def __str__(self) -> str: + return redact_auth_from_url(self.url) + + +class HTMLLinkParser(HTMLParser): + """ + HTMLParser that keeps the first base HREF and a list of all anchor + elements' attributes. + """ + + def __init__(self, url: str) -> None: + super().__init__(convert_charrefs=True) + + self.url: str = url + self.base_url: str | None = None + self.anchors: list[dict[str, str | None]] = [] + + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: + if tag == "base" and self.base_url is None: + href = self.get_href(attrs) + if href is not None: + self.base_url = href + elif tag == "a": + self.anchors.append(dict(attrs)) + + def get_href(self, attrs: list[tuple[str, str | None]]) -> str | None: + for name, value in attrs: + if name == "href": + return value + return None + + +def _handle_get_simple_fail( + link: Link, + reason: str | Exception, + meth: Callable[..., None] | None = None, +) -> None: + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_index_content( + response: Response, cache_link_parsing: bool = True +) -> IndexContent: + encoding = _get_encoding_from_headers(response.headers) + return IndexContent( + response.content, + response.headers["Content-Type"], + encoding=encoding, + url=response.url, + cache_link_parsing=cache_link_parsing, + ) + + +def _get_index_content(link: Link, *, session: PipSession) -> IndexContent | None: + url = link.url.split("#", 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.warning( + "Cannot look at %s URL %s because it does not support lookup as web pages.", + vcs_scheme, + link, + ) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib.parse.urlparse(url) + if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith("/"): + url += "/" + # TODO: In the future, it would be nice if pip supported PEP 691 + # style responses in the file:// URLs, however there's no + # standard file extension for application/vnd.pypi.simple.v1+json + # so we'll need to come up with something on our own. + url = urllib.parse.urljoin(url, "index.html") + logger.debug(" file: URL is directory, getting %s", url) + + try: + resp = _get_simple_response(url, session=session) + except _NotHTTP: + logger.warning( + "Skipping page %s because it looks like an archive, and cannot " + "be checked by a HTTP HEAD request.", + link, + ) + except _NotAPIContent as exc: + logger.warning( + "Skipping page %s because the %s request got Content-Type: %s. " + "The only supported Content-Types are application/vnd.pypi.simple.v1+json, " + "application/vnd.pypi.simple.v1+html, and text/html", + link, + exc.request_desc, + exc.content_type, + ) + except NetworkConnectionError as exc: + _handle_get_simple_fail(link, exc) + except RetryError as exc: + _handle_get_simple_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_simple_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_simple_fail(link, f"connection error: {exc}") + except requests.Timeout: + _handle_get_simple_fail(link, "timed out") + else: + return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing) + return None + + +class CollectedSources(NamedTuple): + find_links: Sequence[LinkSource | None] + index_urls: Sequence[LinkSource | None] + + +class LinkCollector: + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_sources() method. + """ + + def __init__( + self, + session: PipSession, + search_scope: SearchScope, + ) -> None: + self.search_scope = search_scope + self.session = session + + @classmethod + def create( + cls, + session: PipSession, + options: Values, + suppress_no_index: bool = False, + ) -> LinkCollector: + """ + :param session: The Session to use to make requests. + :param suppress_no_index: Whether to ignore the --no-index option + when constructing the SearchScope object. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index and not suppress_no_index: + logger.debug( + "Ignoring indexes: %s", + ",".join(redact_auth_from_url(url) for url in index_urls), + ) + index_urls = [] + + # Make sure find_links is a list before passing to create(). + find_links = options.find_links or [] + + search_scope = SearchScope.create( + find_links=find_links, + index_urls=index_urls, + no_index=options.no_index, + ) + link_collector = LinkCollector( + session=session, + search_scope=search_scope, + ) + return link_collector + + @property + def find_links(self) -> list[str]: + return self.search_scope.find_links + + def fetch_response(self, location: Link) -> IndexContent | None: + """ + Fetch an HTML page containing package links. + """ + return _get_index_content(location, session=self.session) + + def collect_sources( + self, + project_name: str, + candidates_from_page: CandidatesFromPage, + ) -> CollectedSources: + # The OrderedDict calls deduplicate sources by URL. + index_url_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=False, + cache_link_parsing=False, + project_name=project_name, + ) + for loc in self.search_scope.get_index_urls_locations(project_name) + ).values() + find_links_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=True, + cache_link_parsing=True, + project_name=project_name, + ) + for loc in self.find_links + ).values() + + if logger.isEnabledFor(logging.DEBUG): + lines = [ + f"* {s.link}" + for s in itertools.chain(find_links_sources, index_url_sources) + if s is not None and s.link is not None + ] + lines = [ + f"{len(lines)} location(s) to search " + f"for versions of {project_name}:" + ] + lines + logger.debug("\n".join(lines)) + + return CollectedSources( + find_links=list(find_links_sources), + index_urls=list(index_url_sources), + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py b/venv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py new file mode 100644 index 0000000..ae6f896 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/index/package_finder.py @@ -0,0 +1,1059 @@ +"""Routines related to PyPI, indexes""" + +from __future__ import annotations + +import enum +import functools +import itertools +import logging +import re +from collections.abc import Iterable +from dataclasses import dataclass +from typing import ( + TYPE_CHECKING, + Optional, + Union, +) + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.tags import Tag +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import InvalidVersion, _BaseVersion +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.index.collector import LinkCollector, parse_links +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.models.wheel import Wheel +from pip._internal.req import InstallRequirement +from pip._internal.utils._log import getLogger +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import build_netloc +from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS + +if TYPE_CHECKING: + from typing_extensions import TypeGuard + +__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"] + + +logger = getLogger(__name__) + +BuildTag = Union[tuple[()], tuple[int, str]] +CandidateSortingKey = tuple[int, int, int, _BaseVersion, Optional[int], BuildTag] + + +def _check_link_requires_python( + link: Link, + version_info: tuple[int, int, int], + ignore_requires_python: bool = False, +) -> bool: + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, + version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, + link, + ) + else: + if not is_compatible: + version = ".".join(map(str, version_info)) + if not ignore_requires_python: + logger.verbose( + "Link requires a different Python (%s not in: %r): %s", + version, + link.requires_python, + link, + ) + return False + + logger.debug( + "Ignoring failed Requires-Python check (%s not in: %r) for link: %s", + version, + link.requires_python, + link, + ) + + return True + + +class LinkType(enum.Enum): + candidate = enum.auto() + different_project = enum.auto() + yanked = enum.auto() + format_unsupported = enum.auto() + format_invalid = enum.auto() + platform_mismatch = enum.auto() + requires_python_mismatch = enum.auto() + + +class LinkEvaluator: + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$") + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name: str, + canonical_name: NormalizedName, + formats: frozenset[str], + target_python: TargetPython, + allow_yanked: bool, + ignore_requires_python: bool | None = None, + ) -> None: + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + + self.project_name = project_name + + def evaluate_link(self, link: Link) -> tuple[LinkType, str]: + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (result, detail), where *result* is an enum + representing whether the evaluation found a candidate, or the reason + why one is not found. If a candidate is found, *detail* will be the + candidate's version string; if one is not found, it contains the + reason the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or "" + return (LinkType.yanked, f"yanked for reason: {reason}") + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (LinkType.format_unsupported, "not a file") + if ext not in SUPPORTED_EXTENSIONS: + return ( + LinkType.format_unsupported, + f"unsupported archive format: {ext}", + ) + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = f"No binaries permitted for {self.project_name}" + return (LinkType.format_unsupported, reason) + if "macosx10" in link.path and ext == ".zip": + return (LinkType.format_unsupported, "macosx10 one") + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return ( + LinkType.format_invalid, + "invalid wheel filename", + ) + if wheel.name != self._canonical_name: + reason = f"wrong project name (not {self.project_name})" + return (LinkType.different_project, reason) + + supported_tags = self._target_python.get_unsorted_tags() + if not wheel.supported(supported_tags): + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = ", ".join(wheel.get_formatted_file_tags()) + reason = ( + f"none of the wheel's tags ({file_tags}) are compatible " + f"(run pip debug --verbose to show compatible tags)" + ) + return (LinkType.platform_mismatch, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + reason = f"No sources permitted for {self.project_name}" + return (LinkType.format_unsupported, reason) + + if not version: + version = _extract_version_from_fragment( + egg_info, + self._canonical_name, + ) + if not version: + reason = f"Missing project version for {self.project_name}" + return (LinkType.format_invalid, reason) + + match = self._py_version_re.search(version) + if match: + version = version[: match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return ( + LinkType.platform_mismatch, + "Python version is incorrect", + ) + + supports_python = _check_link_requires_python( + link, + version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python: + requires_python = link.requires_python + if requires_python: + + def get_version_sort_key(v: str) -> tuple[int, ...]: + return tuple(int(s) for s in v.split(".") if s.isdigit()) + + requires_python = ",".join( + sorted( + (str(s) for s in specifiers.SpecifierSet(requires_python)), + key=get_version_sort_key, + ) + ) + reason = f"{version} Requires-Python {requires_python}" + return (LinkType.requires_python_mismatch, reason) + + logger.debug("Found link %s, version: %s", link, version) + + return (LinkType.candidate, version) + + +def filter_unallowed_hashes( + candidates: list[InstallationCandidate], + hashes: Hashes | None, + project_name: str, +) -> list[InstallationCandidate]: + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + "Given no hashes to check %s links for project %r: " + "discarding no candidates", + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = "discarding no candidates" + else: + discard_message = "discarding {} non-matches:\n {}".format( + len(non_matches), + "\n ".join(str(candidate.link) for candidate in non_matches), + ) + + logger.debug( + "Checked %s links for project %r against %s hashes " + "(%s matches, %s no digest): %s", + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message, + ) + + return filtered + + +@dataclass +class CandidatePreferences: + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + prefer_binary: bool = False + allow_all_prereleases: bool = False + + +@dataclass(frozen=True) +class BestCandidateResult: + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + + :param all_candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + + all_candidates: list[InstallationCandidate] + applicable_candidates: list[InstallationCandidate] + best_candidate: InstallationCandidate | None + + def __post_init__(self) -> None: + assert set(self.applicable_candidates) <= set(self.all_candidates) + + if self.best_candidate is None: + assert not self.applicable_candidates + else: + assert self.best_candidate in self.applicable_candidates + + +class CandidateEvaluator: + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name: str, + target_python: TargetPython | None = None, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + specifier: specifiers.BaseSpecifier | None = None, + hashes: Hashes | None = None, + ) -> CandidateEvaluator: + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_sorted_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name: str, + supported_tags: list[Tag], + specifier: specifiers.BaseSpecifier, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + hashes: Hashes | None = None, + ) -> None: + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + # Since the index of the tag in the _supported_tags list is used + # as a priority, precompute a map from tag to index/priority to be + # used in wheel.find_most_preferred_tag. + self._wheel_tag_preferences = { + tag: idx for idx, tag in enumerate(supported_tags) + } + + def get_applicable_candidates( + self, + candidates: list[InstallationCandidate], + ) -> list[InstallationCandidate]: + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + candidates_and_versions = [(c, str(c.version)) for c in candidates] + versions = set( + specifier.filter( + (v for _, v in candidates_and_versions), + prereleases=allow_prereleases, + ) + ) + + applicable_candidates = [c for c, v in candidates_and_versions if v in versions] + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey: + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag: BuildTag = () + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + try: + pri = -( + wheel.find_most_preferred_tag( + valid_tags, self._wheel_tag_preferences + ) + ) + except ValueError: + raise UnsupportedWheel( + f"{wheel.filename} is not a supported wheel for this platform. It " + "can't be sorted." + ) + if self._prefer_binary: + binary_preference = 1 + build_tag = wheel.build_tag + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, + yank_value, + binary_preference, + candidate.version, + pri, + build_tag, + ) + + def sort_best_candidate( + self, + candidates: list[InstallationCandidate], + ) -> InstallationCandidate | None: + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + best_candidate = max(candidates, key=self._sort_key) + return best_candidate + + def compute_best_candidate( + self, + candidates: list[InstallationCandidate], + ) -> BestCandidateResult: + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder: + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector: LinkCollector, + target_python: TargetPython, + allow_yanked: bool, + format_control: FormatControl | None = None, + candidate_prefs: CandidatePreferences | None = None, + ignore_requires_python: bool | None = None, + ) -> None: + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links: set[tuple[Link, LinkType, str]] = set() + + # Cache of the result of finding candidates + self._all_candidates: dict[str, list[InstallationCandidate]] = {} + self._best_candidates: dict[ + tuple[str, specifiers.BaseSpecifier | None, Hashes | None], + BestCandidateResult, + ] = {} + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector: LinkCollector, + selection_prefs: SelectionPreferences, + target_python: TargetPython | None = None, + ) -> PackageFinder: + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + ) + + @property + def target_python(self) -> TargetPython: + return self._target_python + + @property + def search_scope(self) -> SearchScope: + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope: SearchScope) -> None: + self._link_collector.search_scope = search_scope + + @property + def find_links(self) -> list[str]: + return self._link_collector.find_links + + @property + def index_urls(self) -> list[str]: + return self.search_scope.index_urls + + @property + def proxy(self) -> str | None: + return self._link_collector.session.pip_proxy + + @property + def trusted_hosts(self) -> Iterable[str]: + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def custom_cert(self) -> str | None: + # session.verify is either a boolean (use default bundle/no SSL + # verification) or a string path to a custom CA bundle to use. We only + # care about the latter. + verify = self._link_collector.session.verify + return verify if isinstance(verify, str) else None + + @property + def client_cert(self) -> str | None: + cert = self._link_collector.session.cert + assert not isinstance(cert, tuple), "pip only supports PEM client certs" + return cert + + @property + def allow_all_prereleases(self) -> bool: + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self) -> None: + self._candidate_prefs.allow_all_prereleases = True + + @property + def prefer_binary(self) -> bool: + return self._candidate_prefs.prefer_binary + + def set_prefer_binary(self) -> None: + self._candidate_prefs.prefer_binary = True + + def requires_python_skipped_reasons(self) -> list[str]: + reasons = { + detail + for _, result, detail in self._logged_links + if result == LinkType.requires_python_mismatch + } + return sorted(reasons) + + def make_link_evaluator(self, project_name: str) -> LinkEvaluator: + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ) + + def _sort_links(self, links: Iterable[Link]) -> list[Link]: + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen: set[Link] = set() + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None: + entry = (link, result, detail) + if entry not in self._logged_links: + # Put the link at the end so the reason is more visible and because + # the link string is usually very long. + logger.debug("Skipping link: %s: %s", detail, link) + self._logged_links.add(entry) + + def get_install_candidate( + self, link_evaluator: LinkEvaluator, link: Link + ) -> InstallationCandidate | None: + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + result, detail = link_evaluator.evaluate_link(link) + if result != LinkType.candidate: + self._log_skipped_link(link, result, detail) + return None + + try: + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + version=detail, + ) + except InvalidVersion: + return None + + def evaluate_links( + self, link_evaluator: LinkEvaluator, links: Iterable[Link] + ) -> list[InstallationCandidate]: + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url( + self, project_url: Link, link_evaluator: LinkEvaluator + ) -> list[InstallationCandidate]: + logger.debug( + "Fetching project page and analyzing links: %s", + project_url, + ) + index_response = self._link_collector.fetch_response(project_url) + if index_response is None: + return [] + + page_links = list(parse_links(index_response)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + def find_all_candidates(self, project_name: str) -> list[InstallationCandidate]: + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + if project_name in self._all_candidates: + return self._all_candidates[project_name] + + link_evaluator = self.make_link_evaluator(project_name) + + collected_sources = self._link_collector.collect_sources( + project_name=project_name, + candidates_from_page=functools.partial( + self.process_project_url, + link_evaluator=link_evaluator, + ), + ) + + page_candidates_it = itertools.chain.from_iterable( + source.page_candidates() + for sources in collected_sources + for source in sources + if source is not None + ) + page_candidates = list(page_candidates_it) + + file_links_it = itertools.chain.from_iterable( + source.file_links() + for sources in collected_sources + for source in sources + if source is not None + ) + file_candidates = self.evaluate_links( + link_evaluator, + sorted(file_links_it, reverse=True), + ) + + if logger.isEnabledFor(logging.DEBUG) and file_candidates: + paths = [] + for candidate in file_candidates: + assert candidate.link.url # we need to have a URL + try: + paths.append(candidate.link.file_path) + except Exception: + paths.append(candidate.link.url) # it's not a local file + + logger.debug("Local files found: %s", ", ".join(paths)) + + # This is an intentional priority ordering + self._all_candidates[project_name] = file_candidates + page_candidates + + return self._all_candidates[project_name] + + def make_candidate_evaluator( + self, + project_name: str, + specifier: specifiers.BaseSpecifier | None = None, + hashes: Hashes | None = None, + ) -> CandidateEvaluator: + """Create a CandidateEvaluator object to use.""" + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + def find_best_candidate( + self, + project_name: str, + specifier: specifiers.BaseSpecifier | None = None, + hashes: Hashes | None = None, + ) -> BestCandidateResult: + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + if (project_name, specifier, hashes) in self._best_candidates: + return self._best_candidates[project_name, specifier, hashes] + + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + self._best_candidates[project_name, specifier, hashes] = ( + candidate_evaluator.compute_best_candidate(candidates) + ) + + return self._best_candidates[project_name, specifier, hashes] + + def find_requirement( + self, req: InstallRequirement, upgrade: bool + ) -> InstallationCandidate | None: + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a InstallationCandidate if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + name = req.name + assert name is not None, "find_requirement() called with no name" + + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + name, + specifier=req.specifier, + hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version: _BaseVersion | None = None + if req.satisfied_by is not None: + installed_version = req.satisfied_by.version + + def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str: + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ( + ", ".join( + sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + ) + ) + or "none" + ) + + if installed_version is None and best_candidate is None: + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req, + _format_versions(best_candidate_result.all_candidates), + ) + + raise DistributionNotFound(f"No matching distribution found for {req}") + + def _should_install_candidate( + candidate: InstallationCandidate | None, + ) -> TypeGuard[InstallationCandidate]: + if installed_version is None: + return True + if best_candidate is None: + return False + return best_candidate.version > installed_version + + if not upgrade and installed_version is not None: + if _should_install_candidate(best_candidate): + logger.debug( + "Existing installed version (%s) satisfies requirement " + "(most up-to-date version is %s)", + installed_version, + best_candidate.version, + ) + else: + logger.debug( + "Existing installed version (%s) is most up-to-date and " + "satisfies requirement", + installed_version, + ) + return None + + if _should_install_candidate(best_candidate): + logger.debug( + "Using version %s (newest of versions: %s)", + best_candidate.version, + _format_versions(best_candidate_result.applicable_candidates), + ) + return best_candidate + + # We have an existing version, and its the best version + logger.debug( + "Installed version (%s) is most up-to-date (past versions: %s)", + installed_version, + _format_versions(best_candidate_result.applicable_candidates), + ) + raise BestVersionAlreadyInstalled + + +def _find_name_version_sep(fragment: str, canonical_name: str) -> int: + """Find the separator's index based on the package's canonical name. + + :param fragment: A + filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError(f"{fragment} does not match {canonical_name}") + + +def _extract_version_from_fragment(fragment: str, canonical_name: str) -> str | None: + """Parse the version string from a + filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/venv/lib/python3.12/site-packages/pip/_internal/index/sources.py b/venv/lib/python3.12/site-packages/pip/_internal/index/sources.py new file mode 100644 index 0000000..c67c4d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/index/sources.py @@ -0,0 +1,287 @@ +from __future__ import annotations + +import logging +import mimetypes +import os +from collections import defaultdict +from collections.abc import Iterable +from typing import Callable + +from pip._vendor.packaging.utils import ( + InvalidSdistFilename, + InvalidWheelFilename, + canonicalize_name, + parse_sdist_filename, + parse_wheel_filename, +) + +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url, url_to_path +from pip._internal.vcs import is_url + +logger = logging.getLogger(__name__) + +FoundCandidates = Iterable[InstallationCandidate] +FoundLinks = Iterable[Link] +CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]] +PageValidator = Callable[[Link], bool] + + +class LinkSource: + @property + def link(self) -> Link | None: + """Returns the underlying link, if there's one.""" + raise NotImplementedError() + + def page_candidates(self) -> FoundCandidates: + """Candidates found by parsing an archive listing HTML file.""" + raise NotImplementedError() + + def file_links(self) -> FoundLinks: + """Links found by specifying archives directly.""" + raise NotImplementedError() + + +def _is_html_file(file_url: str) -> bool: + return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" + + +class _FlatDirectoryToUrls: + """Scans directory and caches results""" + + def __init__(self, path: str) -> None: + self._path = path + self._page_candidates: list[str] = [] + self._project_name_to_urls: dict[str, list[str]] = defaultdict(list) + self._scanned_directory = False + + def _scan_directory(self) -> None: + """Scans directory once and populates both page_candidates + and project_name_to_urls at the same time + """ + for entry in os.scandir(self._path): + url = path_to_url(entry.path) + if _is_html_file(url): + self._page_candidates.append(url) + continue + + # File must have a valid wheel or sdist name, + # otherwise not worth considering as a package + try: + project_filename = parse_wheel_filename(entry.name)[0] + except InvalidWheelFilename: + try: + project_filename = parse_sdist_filename(entry.name)[0] + except InvalidSdistFilename: + continue + + self._project_name_to_urls[project_filename].append(url) + self._scanned_directory = True + + @property + def page_candidates(self) -> list[str]: + if not self._scanned_directory: + self._scan_directory() + + return self._page_candidates + + @property + def project_name_to_urls(self) -> dict[str, list[str]]: + if not self._scanned_directory: + self._scan_directory() + + return self._project_name_to_urls + + +class _FlatDirectorySource(LinkSource): + """Link source specified by ``--find-links=``. + + This looks the content of the directory, and returns: + + * ``page_candidates``: Links listed on each HTML file in the directory. + * ``file_candidates``: Archives in the directory. + """ + + _paths_to_urls: dict[str, _FlatDirectoryToUrls] = {} + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + path: str, + project_name: str, + ) -> None: + self._candidates_from_page = candidates_from_page + self._project_name = canonicalize_name(project_name) + + # Get existing instance of _FlatDirectoryToUrls if it exists + if path in self._paths_to_urls: + self._path_to_urls = self._paths_to_urls[path] + else: + self._path_to_urls = _FlatDirectoryToUrls(path=path) + self._paths_to_urls[path] = self._path_to_urls + + @property + def link(self) -> Link | None: + return None + + def page_candidates(self) -> FoundCandidates: + for url in self._path_to_urls.page_candidates: + yield from self._candidates_from_page(Link(url)) + + def file_links(self) -> FoundLinks: + for url in self._path_to_urls.project_name_to_urls[self._project_name]: + yield Link(url) + + +class _LocalFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to + the option, it is converted to a URL first. This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Link | None: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not _is_html_file(self._link.url): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + if _is_html_file(self._link.url): + return + yield self._link + + +class _RemoteFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._page_validator = page_validator + self._link = link + + @property + def link(self) -> Link | None: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not self._page_validator(self._link): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + yield self._link + + +class _IndexDirectorySource(LinkSource): + """``--[extra-]index-url=``. + + This is treated like a remote URL; ``candidates_from_page`` contains logic + for this by appending ``index.html`` to the link. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Link | None: + return self._link + + def page_candidates(self) -> FoundCandidates: + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + return () + + +def build_source( + location: str, + *, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + expand_dir: bool, + cache_link_parsing: bool, + project_name: str, +) -> tuple[str | None, LinkSource | None]: + path: str | None = None + url: str | None = None + if os.path.exists(location): # Is a local path. + url = path_to_url(location) + path = location + elif location.startswith("file:"): # A file: URL. + url = location + path = url_to_path(location) + elif is_url(location): + url = location + + if url is None: + msg = ( + "Location '%s' is ignored: " + "it is either a non-existing path or lacks a specific scheme." + ) + logger.warning(msg, location) + return (None, None) + + if path is None: + source: LinkSource = _RemoteFileSource( + candidates_from_page=candidates_from_page, + page_validator=page_validator, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + + if os.path.isdir(path): + if expand_dir: + source = _FlatDirectorySource( + candidates_from_page=candidates_from_page, + path=path, + project_name=project_name, + ) + else: + source = _IndexDirectorySource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + elif os.path.isfile(path): + source = _LocalFileSource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + logger.warning( + "Location '%s' is ignored: it is neither a file nor a directory.", + location, + ) + return (url, None) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py new file mode 100644 index 0000000..9f2c4fe --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/locations/__init__.py @@ -0,0 +1,441 @@ +from __future__ import annotations + +import functools +import logging +import os +import pathlib +import sys +import sysconfig +from typing import Any + +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.virtualenv import running_under_virtualenv + +from . import _sysconfig +from .base import ( + USER_CACHE_DIR, + get_major_minor_version, + get_src_prefix, + is_osx_framework, + site_packages, + user_site, +) + +__all__ = [ + "USER_CACHE_DIR", + "get_bin_prefix", + "get_bin_user", + "get_major_minor_version", + "get_platlib", + "get_purelib", + "get_scheme", + "get_src_prefix", + "site_packages", + "user_site", +] + + +logger = logging.getLogger(__name__) + + +_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib") + +_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10) + + +def _should_use_sysconfig() -> bool: + """This function determines the value of _USE_SYSCONFIG. + + By default, pip uses sysconfig on Python 3.10+. + But Python distributors can override this decision by setting: + sysconfig._PIP_USE_SYSCONFIG = True / False + Rationale in https://github.com/pypa/pip/issues/10647 + + This is a function for testability, but should be constant during any one + run. + """ + return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT)) + + +_USE_SYSCONFIG = _should_use_sysconfig() + +if not _USE_SYSCONFIG: + # Import distutils lazily to avoid deprecation warnings, + # but import it soon enough that it is in memory and available during + # a pip reinstall. + from . import _distutils + +# Be noisy about incompatibilities if this platforms "should" be using +# sysconfig, but is explicitly opting out and using distutils instead. +if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG: + _MISMATCH_LEVEL = logging.WARNING +else: + _MISMATCH_LEVEL = logging.DEBUG + + +def _looks_like_bpo_44860() -> bool: + """The resolution to bpo-44860 will change this incorrect platlib. + + See . + """ + from distutils.command.install import INSTALL_SCHEMES + + try: + unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"] + except KeyError: + return False + return unix_user_platlib == "$usersite" + + +def _looks_like_red_hat_patched_platlib_purelib(scheme: dict[str, str]) -> bool: + platlib = scheme["platlib"] + if "/$platlibdir/" in platlib: + platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/") + if "/lib64/" not in platlib: + return False + unpatched = platlib.replace("/lib64/", "/lib/") + return unpatched.replace("$platbase/", "$base/") == scheme["purelib"] + + +@functools.cache +def _looks_like_red_hat_lib() -> bool: + """Red Hat patches platlib in unix_prefix and unix_home, but not purelib. + + This is the only way I can see to tell a Red Hat-patched Python. + """ + from distutils.command.install import INSTALL_SCHEMES + + return all( + k in INSTALL_SCHEMES + and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k]) + for k in ("unix_prefix", "unix_home") + ) + + +@functools.cache +def _looks_like_debian_scheme() -> bool: + """Debian adds two additional schemes.""" + from distutils.command.install import INSTALL_SCHEMES + + return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES + + +@functools.cache +def _looks_like_red_hat_scheme() -> bool: + """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``. + + Red Hat's ``00251-change-user-install-location.patch`` changes the install + command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is + (fortunately?) done quite unconditionally, so we create a default command + object without any configuration to detect this. + """ + from distutils.command.install import install + from distutils.dist import Distribution + + cmd: Any = install(Distribution()) + cmd.finalize_options() + return ( + cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local" + and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local" + ) + + +@functools.cache +def _looks_like_slackware_scheme() -> bool: + """Slackware patches sysconfig but fails to patch distutils and site. + + Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib + path, but does not do the same to the site module. + """ + if user_site is None: # User-site not available. + return False + try: + paths = sysconfig.get_paths(scheme="posix_user", expand=False) + except KeyError: # User-site not available. + return False + return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site + + +@functools.cache +def _looks_like_msys2_mingw_scheme() -> bool: + """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. + + However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is + likely going to be included in their 3.10 release, so we ignore the warning. + See msys2/MINGW-packages#9319. + + MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, + and is missing the final ``"site-packages"``. + """ + paths = sysconfig.get_paths("nt", expand=False) + return all( + "Lib" not in p and "lib" in p and not p.endswith("site-packages") + for p in (paths[key] for key in ("platlib", "purelib")) + ) + + +@functools.cache +def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None: + issue_url = "https://github.com/pypa/pip/issues/10151" + message = ( + "Value for %s does not match. Please report this to <%s>" + "\ndistutils: %s" + "\nsysconfig: %s" + ) + logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new) + + +def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: + if old == new: + return False + _warn_mismatched(old, new, key=key) + return True + + +@functools.cache +def _log_context( + *, + user: bool = False, + home: str | None = None, + root: str | None = None, + prefix: str | None = None, +) -> None: + parts = [ + "Additional context:", + "user = %r", + "home = %r", + "root = %r", + "prefix = %r", + ] + + logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix) + + +def get_scheme( + dist_name: str, + user: bool = False, + home: str | None = None, + root: str | None = None, + isolated: bool = False, + prefix: str | None = None, +) -> Scheme: + new = _sysconfig.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + if _USE_SYSCONFIG: + return new + + old = _distutils.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + + warning_contexts = [] + for k in SCHEME_KEYS: + old_v = pathlib.Path(getattr(old, k)) + new_v = pathlib.Path(getattr(new, k)) + + if old_v == new_v: + continue + + # distutils incorrectly put PyPy packages under ``site-packages/python`` + # in the ``posix_home`` scheme, but PyPy devs said they expect the + # directory name to be ``pypy`` instead. So we treat this as a bug fix + # and not warn about it. See bpo-43307 and python/cpython#24628. + skip_pypy_special_case = ( + sys.implementation.name == "pypy" + and home is not None + and k in ("platlib", "purelib") + and old_v.parent == new_v.parent + and old_v.name.startswith("python") + and new_v.name.startswith("pypy") + ) + if skip_pypy_special_case: + continue + + # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in + # the ``include`` value, but distutils's ``headers`` does. We'll let + # CPython decide whether this is a bug or feature. See bpo-43948. + skip_osx_framework_user_special_case = ( + user + and is_osx_framework() + and k == "headers" + and old_v.parent.parent == new_v.parent + and old_v.parent.name.startswith("python") + ) + if skip_osx_framework_user_special_case: + continue + + # On Red Hat and derived Linux distributions, distutils is patched to + # use "lib64" instead of "lib" for platlib. + if k == "platlib" and _looks_like_red_hat_lib(): + continue + + # On Python 3.9+, sysconfig's posix_user scheme sets platlib against + # sys.platlibdir, but distutils's unix_user incorrectly continues + # using the same $usersite for both platlib and purelib. This creates a + # mismatch when sys.platlibdir is not "lib". + skip_bpo_44860 = ( + user + and k == "platlib" + and not WINDOWS + and _PLATLIBDIR != "lib" + and _looks_like_bpo_44860() + ) + if skip_bpo_44860: + continue + + # Slackware incorrectly patches posix_user to use lib64 instead of lib, + # but not usersite to match the location. + skip_slackware_user_scheme = ( + user + and k in ("platlib", "purelib") + and not WINDOWS + and _looks_like_slackware_scheme() + ) + if skip_slackware_user_scheme: + continue + + # Both Debian and Red Hat patch Python to place the system site under + # /usr/local instead of /usr. Debian also places lib in dist-packages + # instead of site-packages, but the /usr/local check should cover it. + skip_linux_system_special_case = ( + not (user or home or prefix or running_under_virtualenv()) + and old_v.parts[1:3] == ("usr", "local") + and len(new_v.parts) > 1 + and new_v.parts[1] == "usr" + and (len(new_v.parts) < 3 or new_v.parts[2] != "local") + and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme()) + ) + if skip_linux_system_special_case: + continue + + # MSYS2 MINGW's sysconfig patch does not include the "site-packages" + # part of the path. This is incorrect and will be fixed in MSYS. + skip_msys2_mingw_bug = ( + WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme() + ) + if skip_msys2_mingw_bug: + continue + + # CPython's POSIX install script invokes pip (via ensurepip) against the + # interpreter located in the source tree, not the install site. This + # triggers special logic in sysconfig that's not present in distutils. + # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194 + skip_cpython_build = ( + sysconfig.is_python_build(check_home=True) + and not WINDOWS + and k in ("headers", "include", "platinclude") + ) + if skip_cpython_build: + continue + + warning_contexts.append((old_v, new_v, f"scheme.{k}")) + + if not warning_contexts: + return old + + # Check if this path mismatch is caused by distutils config files. Those + # files will no longer work once we switch to sysconfig, so this raises a + # deprecation message for them. + default_old = _distutils.distutils_scheme( + dist_name, + user, + home, + root, + isolated, + prefix, + ignore_config_files=True, + ) + if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS): + deprecated( + reason=( + "Configuring installation scheme with distutils config files " + "is deprecated and will no longer work in the near future. If you " + "are using a Homebrew or Linuxbrew Python, please see discussion " + "at https://github.com/Homebrew/homebrew-core/issues/76621" + ), + replacement=None, + gone_in=None, + ) + return old + + # Post warnings about this mismatch so user can report them back. + for old_v, new_v, key in warning_contexts: + _warn_mismatched(old_v, new_v, key=key) + _log_context(user=user, home=home, root=root, prefix=prefix) + + return old + + +def get_bin_prefix() -> str: + new = _sysconfig.get_bin_prefix() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_bin_prefix() + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): + _log_context() + return old + + +def get_bin_user() -> str: + return _sysconfig.get_scheme("", user=True).scripts + + +def _looks_like_deb_system_dist_packages(value: str) -> bool: + """Check if the value is Debian's APT-controlled dist-packages. + + Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the + default package path controlled by APT, but does not patch ``sysconfig`` to + do the same. This is similar to the bug worked around in ``get_scheme()``, + but here the default is ``deb_system`` instead of ``unix_local``. Ultimately + we can't do anything about this Debian bug, and this detection allows us to + skip the warning when needed. + """ + if not _looks_like_debian_scheme(): + return False + if value == "/usr/lib/python3/dist-packages": + return True + return False + + +def get_purelib() -> str: + """Return the default pure-Python lib location.""" + new = _sysconfig.get_purelib() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_purelib() + if _looks_like_deb_system_dist_packages(old): + return old + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"): + _log_context() + return old + + +def get_platlib() -> str: + """Return the default platform-shared lib location.""" + new = _sysconfig.get_platlib() + if _USE_SYSCONFIG: + return new + + from . import _distutils + + old = _distutils.get_platlib() + if _looks_like_deb_system_dist_packages(old): + return old + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): + _log_context() + return old diff --git a/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..96e853b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc new file mode 100644 index 0000000..2a72312 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc new file mode 100644 index 0000000..fd5b3bb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..8b59f18 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/locations/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py b/venv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py new file mode 100644 index 0000000..28c066b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/locations/_distutils.py @@ -0,0 +1,173 @@ +"""Locations where we look for configs, install stuff, etc""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +# If pip's going to use distutils, it should not be using the copy that setuptools +# might have injected into the environment. This is done by removing the injected +# shim, if it's injected. +# +# See https://github.com/pypa/pip/issues/8761 for the original discussion and +# rationale for why this is done within pip. +from __future__ import annotations + +try: + __import__("_distutils_hack").remove_shim() +except (ImportError, AttributeError): + pass + +import logging +import os +import sys +from distutils.cmd import Command as DistutilsCommand +from distutils.command.install import SCHEME_KEYS +from distutils.command.install import install as distutils_install_command +from distutils.sysconfig import get_python_lib + +from pip._internal.models.scheme import Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import get_major_minor_version + +logger = logging.getLogger(__name__) + + +def distutils_scheme( + dist_name: str, + user: bool = False, + home: str | None = None, + root: str | None = None, + isolated: bool = False, + prefix: str | None = None, + *, + ignore_config_files: bool = False, +) -> dict[str, str]: + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + dist_args: dict[str, str | list[str]] = {"name": dist_name} + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] + + d = Distribution(dist_args) + if not ignore_config_files: + try: + d.parse_config_files() + except UnicodeDecodeError: + paths = d.find_config_files() + logger.warning( + "Ignore distutils configs in %s due to encoding errors.", + ", ".join(os.path.basename(p) for p in paths), + ) + obj: DistutilsCommand | None = None + obj = d.get_command_obj("install", create=True) + assert obj is not None + i: distutils_install_command = obj + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), f"user={user} prefix={prefix}" + assert not (home and prefix), f"home={home} prefix={prefix}" + i.user = user or i.user + if user or home: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + + scheme: dict[str, str] = {} + for key in SCHEME_KEYS: + scheme[key] = getattr(i, "install_" + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if "install_lib" in d.get_option_dict("install"): + scheme.update({"purelib": i.install_lib, "platlib": i.install_lib}) + + if running_under_virtualenv(): + if home: + prefix = home + elif user: + prefix = i.install_userbase + else: + prefix = i.prefix + scheme["headers"] = os.path.join( + prefix, + "include", + "site", + f"python{get_major_minor_version()}", + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join(root, path_no_drive[1:]) + + return scheme + + +def get_scheme( + dist_name: str, + user: bool = False, + home: str | None = None, + root: str | None = None, + isolated: bool = False, + prefix: str | None = None, +) -> Scheme: + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) + + +def get_bin_prefix() -> str: + # XXX: In old virtualenv versions, sys.prefix can contain '..' components, + # so we need to call normpath to eliminate them. + prefix = os.path.normpath(sys.prefix) + if WINDOWS: + bin_py = os.path.join(prefix, "Scripts") + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(prefix, "bin") + return bin_py + # Forcing to use /usr/local/bin for standard macOS framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return os.path.join(prefix, "bin") + + +def get_purelib() -> str: + return get_python_lib(plat_specific=False) + + +def get_platlib() -> str: + return get_python_lib(plat_specific=True) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py b/venv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py new file mode 100644 index 0000000..d4a448e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/locations/_sysconfig.py @@ -0,0 +1,215 @@ +from __future__ import annotations + +import logging +import os +import sys +import sysconfig + +from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import change_root, get_major_minor_version, is_osx_framework + +logger = logging.getLogger(__name__) + + +# Notes on _infer_* functions. +# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no +# way to ask things like "what is the '_prefix' scheme on this platform". These +# functions try to answer that with some heuristics while accounting for ad-hoc +# platforms not covered by CPython's default sysconfig implementation. If the +# ad-hoc implementation does not fully implement sysconfig, we'll fall back to +# a POSIX scheme. + +_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) + +_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None) + + +def _should_use_osx_framework_prefix() -> bool: + """Check for Apple's ``osx_framework_library`` scheme. + + Python distributed by Apple's Command Line Tools has this special scheme + that's used when: + + * This is a framework build. + * We are installing into the system prefix. + + This does not account for ``pip install --prefix`` (also means we're not + installing to the system prefix), which should use ``posix_prefix``, but + logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But + since ``prefix`` is not available for ``sysconfig.get_default_scheme()``, + which is the stdlib replacement for ``_infer_prefix()``, presumably Apple + wouldn't be able to magically switch between ``osx_framework_library`` and + ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library`` + means its behavior is consistent whether we use the stdlib implementation + or our own, and we deal with this special case in ``get_scheme()`` instead. + """ + return ( + "osx_framework_library" in _AVAILABLE_SCHEMES + and not running_under_virtualenv() + and is_osx_framework() + ) + + +def _infer_prefix() -> str: + """Try to find a prefix scheme for the current platform. + + This tries: + + * A special ``osx_framework_library`` for Python distributed by Apple's + Command Line Tools, when not running in a virtual environment. + * Implementation + OS, used by PyPy on Windows (``pypy_nt``). + * Implementation without OS, used by PyPy on POSIX (``pypy``). + * OS + "prefix", used by CPython on POSIX (``posix_prefix``). + * Just the OS name, used by CPython on Windows (``nt``). + + If none of the above works, fall back to ``posix_prefix``. + """ + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("prefix") + if _should_use_osx_framework_prefix(): + return "osx_framework_library" + implementation_suffixed = f"{sys.implementation.name}_{os.name}" + if implementation_suffixed in _AVAILABLE_SCHEMES: + return implementation_suffixed + if sys.implementation.name in _AVAILABLE_SCHEMES: + return sys.implementation.name + suffixed = f"{os.name}_prefix" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt". + return os.name + return "posix_prefix" + + +def _infer_user() -> str: + """Try to find a user scheme for the current platform.""" + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("user") + if is_osx_framework() and not running_under_virtualenv(): + suffixed = "osx_framework_user" + else: + suffixed = f"{os.name}_user" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable. + raise UserInstallationInvalid() + return "posix_user" + + +def _infer_home() -> str: + """Try to find a home for the current platform.""" + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("home") + suffixed = f"{os.name}_home" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + return "posix_home" + + +# Update these keys if the user sets a custom home. +_HOME_KEYS = [ + "installed_base", + "base", + "installed_platbase", + "platbase", + "prefix", + "exec_prefix", +] +if sysconfig.get_config_var("userbase") is not None: + _HOME_KEYS.append("userbase") + + +def get_scheme( + dist_name: str, + user: bool = False, + home: str | None = None, + root: str | None = None, + isolated: bool = False, + prefix: str | None = None, +) -> Scheme: + """ + Get the "scheme" corresponding to the input parameters. + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme + :param root: root under which other directories are re-based + :param isolated: ignored, but kept for distutils compatibility (where + this controls whether the user-site pydistutils.cfg is honored) + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + if user and prefix: + raise InvalidSchemeCombination("--user", "--prefix") + if home and prefix: + raise InvalidSchemeCombination("--home", "--prefix") + + if home is not None: + scheme_name = _infer_home() + elif user: + scheme_name = _infer_user() + else: + scheme_name = _infer_prefix() + + # Special case: When installing into a custom prefix, use posix_prefix + # instead of osx_framework_library. See _should_use_osx_framework_prefix() + # docstring for details. + if prefix is not None and scheme_name == "osx_framework_library": + scheme_name = "posix_prefix" + + if home is not None: + variables = {k: home for k in _HOME_KEYS} + elif prefix is not None: + variables = {k: prefix for k in _HOME_KEYS} + else: + variables = {} + + paths = sysconfig.get_paths(scheme=scheme_name, vars=variables) + + # Logic here is very arbitrary, we're doing it for compatibility, don't ask. + # 1. Pip historically uses a special header path in virtual environments. + # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We + # only do the same when not running in a virtual environment because + # pip's historical header path logic (see point 1) did not do this. + if running_under_virtualenv(): + if user: + base = variables.get("userbase", sys.prefix) + else: + base = variables.get("base", sys.prefix) + python_xy = f"python{get_major_minor_version()}" + paths["include"] = os.path.join(base, "include", "site", python_xy) + elif not dist_name: + dist_name = "UNKNOWN" + + scheme = Scheme( + platlib=paths["platlib"], + purelib=paths["purelib"], + headers=os.path.join(paths["include"], dist_name), + scripts=paths["scripts"], + data=paths["data"], + ) + if root is not None: + converted_keys = {} + for key in SCHEME_KEYS: + converted_keys[key] = change_root(root, getattr(scheme, key)) + scheme = Scheme(**converted_keys) + return scheme + + +def get_bin_prefix() -> str: + # Forcing to use /usr/local/bin for standard macOS framework installs. + if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return sysconfig.get_paths()["scripts"] + + +def get_purelib() -> str: + return sysconfig.get_paths()["purelib"] + + +def get_platlib() -> str: + return sysconfig.get_paths()["platlib"] diff --git a/venv/lib/python3.12/site-packages/pip/_internal/locations/base.py b/venv/lib/python3.12/site-packages/pip/_internal/locations/base.py new file mode 100644 index 0000000..17cd0e8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/locations/base.py @@ -0,0 +1,82 @@ +from __future__ import annotations + +import functools +import os +import site +import sys +import sysconfig + +from pip._internal.exceptions import InstallationError +from pip._internal.utils import appdirs +from pip._internal.utils.virtualenv import running_under_virtualenv + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + +# FIXME doesn't account for venv linked to global site-packages +site_packages: str = sysconfig.get_path("purelib") + + +def get_major_minor_version() -> str: + """ + Return the major-minor version of the current Python as a string, e.g. + "3.7" or "3.10". + """ + return "{}.{}".format(*sys.version_info) + + +def change_root(new_root: str, pathname: str) -> str: + """Return 'pathname' with 'new_root' prepended. + + If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname). + Otherwise, it requires making 'pathname' relative and then joining the + two, which is tricky on DOS/Windows and Mac OS. + + This is borrowed from Python's standard library's distutils module. + """ + if os.name == "posix": + if not os.path.isabs(pathname): + return os.path.join(new_root, pathname) + else: + return os.path.join(new_root, pathname[1:]) + + elif os.name == "nt": + (drive, path) = os.path.splitdrive(pathname) + if path[0] == "\\": + path = path[1:] + return os.path.join(new_root, path) + + else: + raise InstallationError( + f"Unknown platform: {os.name}\n" + "Can not change root path prefix on unknown platform." + ) + + +def get_src_prefix() -> str: + if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, "src") + else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), "src") + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit("The folder you are executing pip from can no longer be found.") + + # under macOS + virtualenv sys.prefix is not properly resolved + # it is something like /path/to/python/bin/.. + return os.path.abspath(src_prefix) + + +try: + # Use getusersitepackages if this is present, as it ensures that the + # value is initialised properly. + user_site: str | None = site.getusersitepackages() +except AttributeError: + user_site = site.USER_SITE + + +@functools.cache +def is_osx_framework() -> bool: + return bool(sysconfig.get_config_var("PYTHONFRAMEWORK")) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/main.py b/venv/lib/python3.12/site-packages/pip/_internal/main.py new file mode 100644 index 0000000..ec52c4e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/main.py @@ -0,0 +1,12 @@ +from __future__ import annotations + + +def main(args: list[str] | None = None) -> int: + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py new file mode 100644 index 0000000..1c24efc --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__init__.py @@ -0,0 +1,169 @@ +from __future__ import annotations + +import contextlib +import functools +import os +import sys +from typing import TYPE_CHECKING, Literal, Protocol, cast + +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.misc import strtobool + +from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel + +if TYPE_CHECKING: + from pip._vendor.packaging.utils import NormalizedName + +__all__ = [ + "BaseDistribution", + "BaseEnvironment", + "FilesystemWheel", + "MemoryWheel", + "Wheel", + "get_default_environment", + "get_environment", + "get_wheel_distribution", + "select_backend", +] + + +def _should_use_importlib_metadata() -> bool: + """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend. + + By default, pip uses ``importlib.metadata`` on Python 3.11+, and + ``pkg_resources`` otherwise. Up to Python 3.13, This can be + overridden by a couple of ways: + + * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it + dictates whether ``importlib.metadata`` is used, for Python <3.14. + * On Python 3.11, 3.12 and 3.13, Python distributors can patch + ``importlib.metadata`` to add a global constant + ``_PIP_USE_IMPORTLIB_METADATA = False``. This makes pip use + ``pkg_resources`` (unless the user set the aforementioned environment + variable to *True*). + + On Python 3.14+, the ``pkg_resources`` backend cannot be used. + """ + if sys.version_info >= (3, 14): + # On Python >=3.14 we only support importlib.metadata. + return True + with contextlib.suppress(KeyError, ValueError): + # On Python <3.14, if the environment variable is set, we obey what it says. + return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"])) + if sys.version_info < (3, 11): + # On Python <3.11, we always use pkg_resources, unless the environment + # variable was set. + return False + # On Python 3.11, 3.12 and 3.13, we check if the global constant is set. + import importlib.metadata + + return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True)) + + +def _emit_pkg_resources_deprecation_if_needed() -> None: + if sys.version_info < (3, 11): + # All pip versions supporting Python<=3.11 will support pkg_resources, + # and pkg_resources is the default for these, so let's not bother users. + return + + import importlib.metadata + + if hasattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA"): + # The Python distributor has set the global constant, so we don't + # warn, since it is not a user decision. + return + + # The user has decided to use pkg_resources, so we warn. + deprecated( + reason="Using the pkg_resources metadata backend is deprecated.", + replacement=( + "to use the default importlib.metadata backend, " + "by unsetting the _PIP_USE_IMPORTLIB_METADATA environment variable" + ), + gone_in="26.3", + issue=13317, + ) + + +class Backend(Protocol): + NAME: Literal["importlib", "pkg_resources"] + Distribution: type[BaseDistribution] + Environment: type[BaseEnvironment] + + +@functools.cache +def select_backend() -> Backend: + if _should_use_importlib_metadata(): + from . import importlib + + return cast(Backend, importlib) + + _emit_pkg_resources_deprecation_if_needed() + + from . import pkg_resources + + return cast(Backend, pkg_resources) + + +def get_default_environment() -> BaseEnvironment: + """Get the default representation for the current environment. + + This returns an Environment instance from the chosen backend. The default + Environment instance should be built from ``sys.path`` and may use caching + to share instance state across calls. + """ + return select_backend().Environment.default() + + +def get_environment(paths: list[str] | None) -> BaseEnvironment: + """Get a representation of the environment specified by ``paths``. + + This returns an Environment instance from the chosen backend based on the + given import paths. The backend must build a fresh instance representing + the state of installed distributions when this function is called. + """ + return select_backend().Environment.from_paths(paths) + + +def get_directory_distribution(directory: str) -> BaseDistribution: + """Get the distribution metadata representation in the specified directory. + + This returns a Distribution instance from the chosen backend based on + the given on-disk ``.dist-info`` directory. + """ + return select_backend().Distribution.from_directory(directory) + + +def get_wheel_distribution( + wheel: Wheel, canonical_name: NormalizedName +) -> BaseDistribution: + """Get the representation of the specified wheel's distribution metadata. + + This returns a Distribution instance from the chosen backend based on + the given wheel's ``.dist-info`` directory. + + :param canonical_name: Normalized project name of the given wheel. + """ + return select_backend().Distribution.from_wheel(wheel, canonical_name) + + +def get_metadata_distribution( + metadata_contents: bytes, + filename: str, + canonical_name: str, +) -> BaseDistribution: + """Get the dist representation of the specified METADATA file contents. + + This returns a Distribution instance from the chosen backend sourced from the data + in `metadata_contents`. + + :param metadata_contents: Contents of a METADATA file within a dist, or one served + via PEP 658. + :param filename: Filename for the dist this metadata represents. + :param canonical_name: Normalized project name of the given dist. + """ + return select_backend().Distribution.from_metadata_file_contents( + metadata_contents, + filename, + canonical_name, + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..bc08144 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc new file mode 100644 index 0000000..26f082e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..369518b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc new file mode 100644 index 0000000..9c1ac34 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py b/venv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py new file mode 100644 index 0000000..b39ac05 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/metadata/_json.py @@ -0,0 +1,87 @@ +# Extracted from https://github.com/pfmoore/pkg_metadata +from __future__ import annotations + +from email.header import Header, decode_header, make_header +from email.message import Message +from typing import Any, cast + +METADATA_FIELDS = [ + # Name, Multiple-Use + ("Metadata-Version", False), + ("Name", False), + ("Version", False), + ("Dynamic", True), + ("Platform", True), + ("Supported-Platform", True), + ("Summary", False), + ("Description", False), + ("Description-Content-Type", False), + ("Keywords", False), + ("Home-page", False), + ("Download-URL", False), + ("Author", False), + ("Author-email", False), + ("Maintainer", False), + ("Maintainer-email", False), + ("License", False), + ("License-Expression", False), + ("License-File", True), + ("Classifier", True), + ("Requires-Dist", True), + ("Requires-Python", False), + ("Requires-External", True), + ("Project-URL", True), + ("Provides-Extra", True), + ("Provides-Dist", True), + ("Obsoletes-Dist", True), +] + + +def json_name(field: str) -> str: + return field.lower().replace("-", "_") + + +def msg_to_json(msg: Message) -> dict[str, Any]: + """Convert a Message object into a JSON-compatible dictionary.""" + + def sanitise_header(h: Header | str) -> str: + if isinstance(h, Header): + chunks = [] + for bytes, encoding in decode_header(h): + if encoding == "unknown-8bit": + try: + # See if UTF-8 works + bytes.decode("utf-8") + encoding = "utf-8" + except UnicodeDecodeError: + # If not, latin1 at least won't fail + encoding = "latin1" + chunks.append((bytes, encoding)) + return str(make_header(chunks)) + return str(h) + + result = {} + for field, multi in METADATA_FIELDS: + if field not in msg: + continue + key = json_name(field) + if multi: + value: str | list[str] = [ + sanitise_header(v) for v in msg.get_all(field) # type: ignore + ] + else: + value = sanitise_header(msg.get(field)) # type: ignore + if key == "keywords": + # Accept both comma-separated and space-separated + # forms, for better compatibility with old data. + if "," in value: + value = [v.strip() for v in value.split(",")] + else: + value = value.split() + result[key] = value + + payload = cast(str, msg.get_payload()) + if payload: + result["description"] = payload + + return result diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/base.py b/venv/lib/python3.12/site-packages/pip/_internal/metadata/base.py new file mode 100644 index 0000000..230e114 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/metadata/base.py @@ -0,0 +1,685 @@ +from __future__ import annotations + +import csv +import email.message +import functools +import json +import logging +import pathlib +import re +import zipfile +from collections.abc import Collection, Container, Iterable, Iterator +from typing import ( + IO, + Any, + NamedTuple, + Protocol, + Union, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import NoneMetadataError +from pip._internal.locations import site_packages, user_site +from pip._internal.models.direct_url import ( + DIRECT_URL_METADATA_NAME, + DirectUrl, + DirectUrlValidationError, +) +from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. +from pip._internal.utils.egg_link import egg_link_path_from_sys_path +from pip._internal.utils.misc import is_local, normalize_path +from pip._internal.utils.urls import url_to_path + +from ._json import msg_to_json + +InfoPath = Union[str, pathlib.PurePath] + +logger = logging.getLogger(__name__) + + +class BaseEntryPoint(Protocol): + @property + def name(self) -> str: + raise NotImplementedError() + + @property + def value(self) -> str: + raise NotImplementedError() + + @property + def group(self) -> str: + raise NotImplementedError() + + +def _convert_installed_files_path( + entry: tuple[str, ...], + info: tuple[str, ...], +) -> str: + """Convert a legacy installed-files.txt path into modern RECORD path. + + The legacy format stores paths relative to the info directory, while the + modern format stores paths relative to the package root, e.g. the + site-packages directory. + + :param entry: Path parts of the installed-files.txt entry. + :param info: Path parts of the egg-info directory relative to package root. + :returns: The converted entry. + + For best compatibility with symlinks, this does not use ``abspath()`` or + ``Path.resolve()``, but tries to work with path parts: + + 1. While ``entry`` starts with ``..``, remove the equal amounts of parts + from ``info``; if ``info`` is empty, start appending ``..`` instead. + 2. Join the two directly. + """ + while entry and entry[0] == "..": + if not info or info[-1] == "..": + info += ("..",) + else: + info = info[:-1] + entry = entry[1:] + return str(pathlib.Path(*info, *entry)) + + +class RequiresEntry(NamedTuple): + requirement: str + extra: str + marker: str + + +class BaseDistribution(Protocol): + @classmethod + def from_directory(cls, directory: str) -> BaseDistribution: + """Load the distribution from a metadata directory. + + :param directory: Path to a metadata directory, e.g. ``.dist-info``. + """ + raise NotImplementedError() + + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> BaseDistribution: + """Load the distribution from the contents of a METADATA file. + + This is used to implement PEP 658 by generating a "shallow" dist object that can + be used for resolution without downloading or building the actual dist yet. + + :param metadata_contents: The contents of a METADATA file. + :param filename: File name for the dist with this metadata. + :param project_name: Name of the project this dist represents. + """ + raise NotImplementedError() + + @classmethod + def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: + """Load the distribution from a given wheel. + + :param wheel: A concrete wheel definition. + :param name: File name of the wheel. + + :raises InvalidWheel: Whenever loading of the wheel causes a + :py:exc:`zipfile.BadZipFile` exception to be thrown. + :raises UnsupportedWheel: If the wheel is a valid zip, but malformed + internally. + """ + raise NotImplementedError() + + def __repr__(self) -> str: + return f"{self.raw_name} {self.raw_version} ({self.location})" + + def __str__(self) -> str: + return f"{self.raw_name} {self.raw_version}" + + @property + def location(self) -> str | None: + """Where the distribution is loaded from. + + A string value is not necessarily a filesystem path, since distributions + can be loaded from other sources, e.g. arbitrary zip archives. ``None`` + means the distribution is created in-memory. + + Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If + this is a symbolic link, we want to preserve the relative path between + it and files in the distribution. + """ + raise NotImplementedError() + + @property + def editable_project_location(self) -> str | None: + """The project location for editable distributions. + + This is the directory where pyproject.toml or setup.py is located. + None if the distribution is not installed in editable mode. + """ + # TODO: this property is relatively costly to compute, memoize it ? + direct_url = self.direct_url + if direct_url: + if direct_url.is_local_editable(): + return url_to_path(direct_url.url) + else: + # Search for an .egg-link file by walking sys.path, as it was + # done before by dist_is_editable(). + egg_link_path = egg_link_path_from_sys_path(self.raw_name) + if egg_link_path: + # TODO: get project location from second line of egg_link file + # (https://github.com/pypa/pip/issues/10243) + return self.location + return None + + @property + def installed_location(self) -> str | None: + """The distribution's "installed" location. + + This should generally be a ``site-packages`` directory. This is + usually ``dist.location``, except for legacy develop-installed packages, + where ``dist.location`` is the source code location, and this is where + the ``.egg-link`` file is. + + The returned location is normalized (in particular, with symlinks removed). + """ + raise NotImplementedError() + + @property + def info_location(self) -> str | None: + """Location of the .[egg|dist]-info directory or file. + + Similarly to ``location``, a string value is not necessarily a + filesystem path. ``None`` means the distribution is created in-memory. + + For a modern .dist-info installation on disk, this should be something + like ``{location}/{raw_name}-{version}.dist-info``. + + Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If + this is a symbolic link, we want to preserve the relative path between + it and other files in the distribution. + """ + raise NotImplementedError() + + @property + def installed_by_distutils(self) -> bool: + """Whether this distribution is installed with legacy distutils format. + + A distribution installed with "raw" distutils not patched by setuptools + uses one single file at ``info_location`` to store metadata. We need to + treat this specially on uninstallation. + """ + info_location = self.info_location + if not info_location: + return False + return pathlib.Path(info_location).is_file() + + @property + def installed_as_egg(self) -> bool: + """Whether this distribution is installed as an egg. + + This usually indicates the distribution was installed by (older versions + of) easy_install. + """ + location = self.location + if not location: + return False + # XXX if the distribution is a zipped egg, location has a trailing / + # so we resort to pathlib.Path to check the suffix in a reliable way. + return pathlib.Path(location).suffix == ".egg" + + @property + def installed_with_setuptools_egg_info(self) -> bool: + """Whether this distribution is installed with the ``.egg-info`` format. + + This usually indicates the distribution was installed with setuptools + with an old pip version or with ``single-version-externally-managed``. + + Note that this ensure the metadata store is a directory. distutils can + also installs an ``.egg-info``, but as a file, not a directory. This + property is *False* for that case. Also see ``installed_by_distutils``. + """ + info_location = self.info_location + if not info_location: + return False + if not info_location.endswith(".egg-info"): + return False + return pathlib.Path(info_location).is_dir() + + @property + def installed_with_dist_info(self) -> bool: + """Whether this distribution is installed with the "modern format". + + This indicates a "modern" installation, e.g. storing metadata in the + ``.dist-info`` directory. This applies to installations made by + setuptools (but through pip, not directly), or anything using the + standardized build backend interface (PEP 517). + """ + info_location = self.info_location + if not info_location: + return False + if not info_location.endswith(".dist-info"): + return False + return pathlib.Path(info_location).is_dir() + + @property + def canonical_name(self) -> NormalizedName: + raise NotImplementedError() + + @property + def version(self) -> Version: + raise NotImplementedError() + + @property + def raw_version(self) -> str: + raise NotImplementedError() + + @property + def setuptools_filename(self) -> str: + """Convert a project name to its setuptools-compatible filename. + + This is a copy of ``pkg_resources.to_filename()`` for compatibility. + """ + return self.raw_name.replace("-", "_") + + @property + def direct_url(self) -> DirectUrl | None: + """Obtain a DirectUrl from this distribution. + + Returns None if the distribution has no `direct_url.json` metadata, + or if `direct_url.json` is invalid. + """ + try: + content = self.read_text(DIRECT_URL_METADATA_NAME) + except FileNotFoundError: + return None + try: + return DirectUrl.from_json(content) + except ( + UnicodeDecodeError, + json.JSONDecodeError, + DirectUrlValidationError, + ) as e: + logger.warning( + "Error parsing %s for %s: %s", + DIRECT_URL_METADATA_NAME, + self.canonical_name, + e, + ) + return None + + @property + def installer(self) -> str: + try: + installer_text = self.read_text("INSTALLER") + except (OSError, ValueError, NoneMetadataError): + return "" # Fail silently if the installer file cannot be read. + for line in installer_text.splitlines(): + cleaned_line = line.strip() + if cleaned_line: + return cleaned_line + return "" + + @property + def requested(self) -> bool: + return self.is_file("REQUESTED") + + @property + def editable(self) -> bool: + return bool(self.editable_project_location) + + @property + def local(self) -> bool: + """If distribution is installed in the current virtual environment. + + Always True if we're not in a virtualenv. + """ + if self.installed_location is None: + return False + return is_local(self.installed_location) + + @property + def in_usersite(self) -> bool: + if self.installed_location is None or user_site is None: + return False + return self.installed_location.startswith(normalize_path(user_site)) + + @property + def in_site_packages(self) -> bool: + if self.installed_location is None or site_packages is None: + return False + return self.installed_location.startswith(normalize_path(site_packages)) + + def is_file(self, path: InfoPath) -> bool: + """Check whether an entry in the info directory is a file.""" + raise NotImplementedError() + + def iter_distutils_script_names(self) -> Iterator[str]: + """Find distutils 'scripts' entries metadata. + + If 'scripts' is supplied in ``setup.py``, distutils records those in the + installed distribution's ``scripts`` directory, a file for each script. + """ + raise NotImplementedError() + + def read_text(self, path: InfoPath) -> str: + """Read a file in the info directory. + + :raise FileNotFoundError: If ``path`` does not exist in the directory. + :raise NoneMetadataError: If ``path`` exists in the info directory, but + cannot be read. + """ + raise NotImplementedError() + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + raise NotImplementedError() + + def _metadata_impl(self) -> email.message.Message: + raise NotImplementedError() + + @functools.cached_property + def metadata(self) -> email.message.Message: + """Metadata of distribution parsed from e.g. METADATA or PKG-INFO. + + This should return an empty message if the metadata file is unavailable. + + :raises NoneMetadataError: If the metadata file is available, but does + not contain valid metadata. + """ + metadata = self._metadata_impl() + self._add_egg_info_requires(metadata) + return metadata + + @property + def metadata_dict(self) -> dict[str, Any]: + """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO. + + This should return an empty dict if the metadata file is unavailable. + + :raises NoneMetadataError: If the metadata file is available, but does + not contain valid metadata. + """ + return msg_to_json(self.metadata) + + @property + def metadata_version(self) -> str | None: + """Value of "Metadata-Version:" in distribution metadata, if available.""" + return self.metadata.get("Metadata-Version") + + @property + def raw_name(self) -> str: + """Value of "Name:" in distribution metadata.""" + # The metadata should NEVER be missing the Name: key, but if it somehow + # does, fall back to the known canonical name. + return self.metadata.get("Name", self.canonical_name) + + @property + def requires_python(self) -> SpecifierSet: + """Value of "Requires-Python:" in distribution metadata. + + If the key does not exist or contains an invalid value, an empty + SpecifierSet should be returned. + """ + value = self.metadata.get("Requires-Python") + if value is None: + return SpecifierSet() + try: + # Convert to str to satisfy the type checker; this can be a Header object. + spec = SpecifierSet(str(value)) + except InvalidSpecifier as e: + message = "Package %r has an invalid Requires-Python: %s" + logger.warning(message, self.raw_name, e) + return SpecifierSet() + return spec + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + """Dependencies of this distribution. + + For modern .dist-info distributions, this is the collection of + "Requires-Dist:" entries in distribution metadata. + """ + raise NotImplementedError() + + def iter_raw_dependencies(self) -> Iterable[str]: + """Raw Requires-Dist metadata.""" + return self.metadata.get_all("Requires-Dist", []) + + def iter_provided_extras(self) -> Iterable[NormalizedName]: + """Extras provided by this distribution. + + For modern .dist-info distributions, this is the collection of + "Provides-Extra:" entries in distribution metadata. + + The return value of this function is expected to be normalised names, + per PEP 685, with the returned value being handled appropriately by + `iter_dependencies`. + """ + raise NotImplementedError() + + def _iter_declared_entries_from_record(self) -> Iterator[str] | None: + try: + text = self.read_text("RECORD") + except FileNotFoundError: + return None + # This extra Path-str cast normalizes entries. + return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines())) + + def _iter_declared_entries_from_legacy(self) -> Iterator[str] | None: + try: + text = self.read_text("installed-files.txt") + except FileNotFoundError: + return None + paths = (p for p in text.splitlines(keepends=False) if p) + root = self.location + info = self.info_location + if root is None or info is None: + return paths + try: + info_rel = pathlib.Path(info).relative_to(root) + except ValueError: # info is not relative to root. + return paths + if not info_rel.parts: # info *is* root. + return paths + return ( + _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts) + for p in paths + ) + + def iter_declared_entries(self) -> Iterator[str] | None: + """Iterate through file entries declared in this distribution. + + For modern .dist-info distributions, this is the files listed in the + ``RECORD`` metadata file. For legacy setuptools distributions, this + comes from ``installed-files.txt``, with entries normalized to be + compatible with the format used by ``RECORD``. + + :return: An iterator for listed entries, or None if the distribution + contains neither ``RECORD`` nor ``installed-files.txt``. + """ + return ( + self._iter_declared_entries_from_record() + or self._iter_declared_entries_from_legacy() + ) + + def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]: + """Parse a ``requires.txt`` in an egg-info directory. + + This is an INI-ish format where an egg-info stores dependencies. A + section name describes extra other environment markers, while each entry + is an arbitrary string (not a key-value pair) representing a dependency + as a requirement string (no markers). + + There is a construct in ``importlib.metadata`` called ``Sectioned`` that + does mostly the same, but the format is currently considered private. + """ + try: + content = self.read_text("requires.txt") + except FileNotFoundError: + return + extra = marker = "" # Section-less entries don't have markers. + for line in content.splitlines(): + line = line.strip() + if not line or line.startswith("#"): # Comment; ignored. + continue + if line.startswith("[") and line.endswith("]"): # A section header. + extra, _, marker = line.strip("[]").partition(":") + continue + yield RequiresEntry(requirement=line, extra=extra, marker=marker) + + def _iter_egg_info_extras(self) -> Iterable[str]: + """Get extras from the egg-info directory.""" + known_extras = {""} + for entry in self._iter_requires_txt_entries(): + extra = canonicalize_name(entry.extra) + if extra in known_extras: + continue + known_extras.add(extra) + yield extra + + def _iter_egg_info_dependencies(self) -> Iterable[str]: + """Get distribution dependencies from the egg-info directory. + + To ease parsing, this converts a legacy dependency entry into a PEP 508 + requirement string. Like ``_iter_requires_txt_entries()``, there is code + in ``importlib.metadata`` that does mostly the same, but not do exactly + what we need. + + Namely, ``importlib.metadata`` does not normalize the extra name before + putting it into the requirement string, which causes marker comparison + to fail because the dist-info format do normalize. This is consistent in + all currently available PEP 517 backends, although not standardized. + """ + for entry in self._iter_requires_txt_entries(): + extra = canonicalize_name(entry.extra) + if extra and entry.marker: + marker = f'({entry.marker}) and extra == "{extra}"' + elif extra: + marker = f'extra == "{extra}"' + elif entry.marker: + marker = entry.marker + else: + marker = "" + if marker: + yield f"{entry.requirement} ; {marker}" + else: + yield entry.requirement + + def _add_egg_info_requires(self, metadata: email.message.Message) -> None: + """Add egg-info requires.txt information to the metadata.""" + if not metadata.get_all("Requires-Dist"): + for dep in self._iter_egg_info_dependencies(): + metadata["Requires-Dist"] = dep + if not metadata.get_all("Provides-Extra"): + for extra in self._iter_egg_info_extras(): + metadata["Provides-Extra"] = extra + + +class BaseEnvironment: + """An environment containing distributions to introspect.""" + + @classmethod + def default(cls) -> BaseEnvironment: + raise NotImplementedError() + + @classmethod + def from_paths(cls, paths: list[str] | None) -> BaseEnvironment: + raise NotImplementedError() + + def get_distribution(self, name: str) -> BaseDistribution | None: + """Given a requirement name, return the installed distributions. + + The name may not be normalized. The implementation must canonicalize + it for lookup. + """ + raise NotImplementedError() + + def _iter_distributions(self) -> Iterator[BaseDistribution]: + """Iterate through installed distributions. + + This function should be implemented by subclass, but never called + directly. Use the public ``iter_distribution()`` instead, which + implements additional logic to make sure the distributions are valid. + """ + raise NotImplementedError() + + def iter_all_distributions(self) -> Iterator[BaseDistribution]: + """Iterate through all installed distributions without any filtering.""" + for dist in self._iter_distributions(): + # Make sure the distribution actually comes from a valid Python + # packaging distribution. Pip's AdjacentTempDirectory leaves folders + # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The + # valid project name pattern is taken from PEP 508. + project_name_valid = re.match( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", + dist.canonical_name, + flags=re.IGNORECASE, + ) + if not project_name_valid: + logger.warning( + "Ignoring invalid distribution %s (%s)", + dist.canonical_name, + dist.location, + ) + continue + yield dist + + def iter_installed_distributions( + self, + local_only: bool = True, + skip: Container[str] = stdlib_pkgs, + include_editables: bool = True, + editables_only: bool = False, + user_only: bool = False, + ) -> Iterator[BaseDistribution]: + """Return a list of installed distributions. + + This is based on ``iter_all_distributions()`` with additional filtering + options. Note that ``iter_installed_distributions()`` without arguments + is *not* equal to ``iter_all_distributions()``, since some of the + configurations exclude packages by default. + + :param local_only: If True (default), only return installations + local to the current virtualenv, if in a virtualenv. + :param skip: An iterable of canonicalized project names to ignore; + defaults to ``stdlib_pkgs``. + :param include_editables: If False, don't report editables. + :param editables_only: If True, only report editables. + :param user_only: If True, only report installations in the user + site directory. + """ + it = self.iter_all_distributions() + if local_only: + it = (d for d in it if d.local) + if not include_editables: + it = (d for d in it if not d.editable) + if editables_only: + it = (d for d in it if d.editable) + if user_only: + it = (d for d in it if d.in_usersite) + return (d for d in it if d.canonical_name not in skip) + + +class Wheel(Protocol): + location: str + + def as_zipfile(self) -> zipfile.ZipFile: + raise NotImplementedError() + + +class FilesystemWheel(Wheel): + def __init__(self, location: str) -> None: + self.location = location + + def as_zipfile(self) -> zipfile.ZipFile: + return zipfile.ZipFile(self.location, allowZip64=True) + + +class MemoryWheel(Wheel): + def __init__(self, location: str, stream: IO[bytes]) -> None: + self.location = location + self.stream = stream + + def as_zipfile(self) -> zipfile.ZipFile: + return zipfile.ZipFile(self.stream, allowZip64=True) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py new file mode 100644 index 0000000..a779138 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__init__.py @@ -0,0 +1,6 @@ +from ._dists import Distribution +from ._envs import Environment + +__all__ = ["NAME", "Distribution", "Environment"] + +NAME = "importlib" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3d3a34a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc new file mode 100644 index 0000000..ec8f016 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc new file mode 100644 index 0000000..ef6425c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc new file mode 100644 index 0000000..9230e87 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py new file mode 100644 index 0000000..7de614d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_compat.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +import importlib.metadata +import os +from typing import Any, Protocol, cast + +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + + +class BadMetadata(ValueError): + def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None: + self.dist = dist + self.reason = reason + + def __str__(self) -> str: + return f"Bad metadata in {self.dist} ({self.reason})" + + +class BasePath(Protocol): + """A protocol that various path objects conform. + + This exists because importlib.metadata uses both ``pathlib.Path`` and + ``zipfile.Path``, and we need a common base for type hints (Union does not + work well since ``zipfile.Path`` is too new for our linter setup). + + This does not mean to be exhaustive, but only contains things that present + in both classes *that we need*. + """ + + @property + def name(self) -> str: + raise NotImplementedError() + + @property + def parent(self) -> BasePath: + raise NotImplementedError() + + +def get_info_location(d: importlib.metadata.Distribution) -> BasePath | None: + """Find the path to the distribution's metadata directory. + + HACK: This relies on importlib.metadata's private ``_path`` attribute. Not + all distributions exist on disk, so importlib.metadata is correct to not + expose the attribute as public. But pip's code base is old and not as clean, + so we do this to avoid having to rewrite too many things. Hopefully we can + eliminate this some day. + """ + return getattr(d, "_path", None) + + +def parse_name_and_version_from_info_directory( + dist: importlib.metadata.Distribution, +) -> tuple[str | None, str | None]: + """Get a name and version from the metadata directory name. + + This is much faster than reading distribution metadata. + """ + info_location = get_info_location(dist) + if info_location is None: + return None, None + + stem, suffix = os.path.splitext(info_location.name) + if suffix == ".dist-info": + name, sep, version = stem.partition("-") + if sep: + return name, version + + if suffix == ".egg-info": + name = stem.split("-", 1)[0] + return name, None + + return None, None + + +def get_dist_canonical_name(dist: importlib.metadata.Distribution) -> NormalizedName: + """Get the distribution's normalized name. + + The ``name`` attribute is only available in Python 3.10 or later. We are + targeting exactly that, but Mypy does not know this. + """ + if name := parse_name_and_version_from_info_directory(dist)[0]: + return canonicalize_name(name) + + name = cast(Any, dist).name + if not isinstance(name, str): + raise BadMetadata(dist, reason="invalid metadata entry 'name'") + return canonicalize_name(name) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py new file mode 100644 index 0000000..89364b8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_dists.py @@ -0,0 +1,229 @@ +from __future__ import annotations + +import email.message +import importlib.metadata +import pathlib +import zipfile +from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence +from os import PathLike +from typing import ( + cast, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import InvalidWheel, UnsupportedWheel +from pip._internal.metadata.base import ( + BaseDistribution, + BaseEntryPoint, + InfoPath, + Wheel, +) +from pip._internal.utils.misc import normalize_path +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file + +from ._compat import ( + BadMetadata, + BasePath, + get_dist_canonical_name, + parse_name_and_version_from_info_directory, +) + + +class WheelDistribution(importlib.metadata.Distribution): + """An ``importlib.metadata.Distribution`` read from a wheel. + + Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``, + its implementation is too "lazy" for pip's needs (we can't keep the ZipFile + handle open for the entire lifetime of the distribution object). + + This implementation eagerly reads the entire metadata directory into the + memory instead, and operates from that. + """ + + def __init__( + self, + files: Mapping[pathlib.PurePosixPath, bytes], + info_location: pathlib.PurePosixPath, + ) -> None: + self._files = files + self.info_location = info_location + + @classmethod + def from_zipfile( + cls, + zf: zipfile.ZipFile, + name: str, + location: str, + ) -> WheelDistribution: + info_dir, _ = parse_wheel(zf, name) + paths = ( + (name, pathlib.PurePosixPath(name.split("/", 1)[-1])) + for name in zf.namelist() + if name.startswith(f"{info_dir}/") + ) + files = { + relpath: read_wheel_metadata_file(zf, fullpath) + for fullpath, relpath in paths + } + info_location = pathlib.PurePosixPath(location, info_dir) + return cls(files, info_location) + + def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: + # Only allow iterating through the metadata directory. + if pathlib.PurePosixPath(str(path)) in self._files: + return iter(self._files) + raise FileNotFoundError(path) + + def read_text(self, filename: str) -> str | None: + try: + data = self._files[pathlib.PurePosixPath(filename)] + except KeyError: + return None + try: + text = data.decode("utf-8") + except UnicodeDecodeError as e: + wheel = self.info_location.parent + error = f"Error decoding metadata for {wheel}: {e} in {filename} file" + raise UnsupportedWheel(error) + return text + + def locate_file(self, path: str | PathLike[str]) -> pathlib.Path: + # This method doesn't make sense for our in-memory wheel, but the API + # requires us to define it. + raise NotImplementedError + + +class Distribution(BaseDistribution): + def __init__( + self, + dist: importlib.metadata.Distribution, + info_location: BasePath | None, + installed_location: BasePath | None, + ) -> None: + self._dist = dist + self._info_location = info_location + self._installed_location = installed_location + + @classmethod + def from_directory(cls, directory: str) -> BaseDistribution: + info_location = pathlib.Path(directory) + dist = importlib.metadata.Distribution.at(info_location) + return cls(dist, info_location, info_location.parent) + + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> BaseDistribution: + # Generate temp dir to contain the metadata file, and write the file contents. + temp_dir = pathlib.Path( + TempDirectory(kind="metadata", globally_managed=True).path + ) + metadata_path = temp_dir / "METADATA" + metadata_path.write_bytes(metadata_contents) + # Construct dist pointing to the newly created directory. + dist = importlib.metadata.Distribution.at(metadata_path.parent) + return cls(dist, metadata_path.parent, None) + + @classmethod + def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: + try: + with wheel.as_zipfile() as zf: + dist = WheelDistribution.from_zipfile(zf, name, wheel.location) + except zipfile.BadZipFile as e: + raise InvalidWheel(wheel.location, name) from e + return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location)) + + @property + def location(self) -> str | None: + if self._info_location is None: + return None + return str(self._info_location.parent) + + @property + def info_location(self) -> str | None: + if self._info_location is None: + return None + return str(self._info_location) + + @property + def installed_location(self) -> str | None: + if self._installed_location is None: + return None + return normalize_path(str(self._installed_location)) + + @property + def canonical_name(self) -> NormalizedName: + return get_dist_canonical_name(self._dist) + + @property + def version(self) -> Version: + try: + version = ( + parse_name_and_version_from_info_directory(self._dist)[1] + or self._dist.version + ) + return parse_version(version) + except TypeError: + raise BadMetadata(self._dist, reason="invalid metadata entry `version`") + + @property + def raw_version(self) -> str: + return self._dist.version + + def is_file(self, path: InfoPath) -> bool: + return self._dist.read_text(str(path)) is not None + + def iter_distutils_script_names(self) -> Iterator[str]: + # A distutils installation is always "flat" (not in e.g. egg form), so + # if this distribution's info location is NOT a pathlib.Path (but e.g. + # zipfile.Path), it can never contain any distutils scripts. + if not isinstance(self._info_location, pathlib.Path): + return + for child in self._info_location.joinpath("scripts").iterdir(): + yield child.name + + def read_text(self, path: InfoPath) -> str: + content = self._dist.read_text(str(path)) + if content is None: + raise FileNotFoundError(path) + return content + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + # importlib.metadata's EntryPoint structure satisfies BaseEntryPoint. + return self._dist.entry_points + + def _metadata_impl(self) -> email.message.Message: + # From Python 3.10+, importlib.metadata declares PackageMetadata as the + # return type. This protocol is unfortunately a disaster now and misses + # a ton of fields that we need, including get() and get_payload(). We + # rely on the implementation that the object is actually a Message now, + # until upstream can improve the protocol. (python/cpython#94952) + return cast(email.message.Message, self._dist.metadata) + + def iter_provided_extras(self) -> Iterable[NormalizedName]: + return [ + canonicalize_name(extra) + for extra in self.metadata.get_all("Provides-Extra", []) + ] + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + contexts: Sequence[dict[str, str]] = [{"extra": e} for e in extras] + for req_string in self.metadata.get_all("Requires-Dist", []): + # strip() because email.message.Message.get_all() may return a leading \n + # in case a long header was wrapped. + req = get_requirement(req_string.strip()) + if not req.marker: + yield req + elif not extras and req.marker.evaluate({"extra": ""}): + yield req + elif any(req.marker.evaluate(context) for context in contexts): + yield req diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py new file mode 100644 index 0000000..71a73b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/metadata/importlib/_envs.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +import importlib.metadata +import logging +import os +import pathlib +import sys +import zipfile +from collections.abc import Iterator, Sequence +from typing import Optional + +from pip._vendor.packaging.utils import ( + InvalidWheelFilename, + NormalizedName, + canonicalize_name, + parse_wheel_filename, +) + +from pip._internal.metadata.base import BaseDistribution, BaseEnvironment +from pip._internal.utils.filetypes import WHEEL_EXTENSION + +from ._compat import BadMetadata, BasePath, get_dist_canonical_name, get_info_location +from ._dists import Distribution + +logger = logging.getLogger(__name__) + + +def _looks_like_wheel(location: str) -> bool: + if not location.endswith(WHEEL_EXTENSION): + return False + if not os.path.isfile(location): + return False + try: + parse_wheel_filename(os.path.basename(location)) + except InvalidWheelFilename: + return False + return zipfile.is_zipfile(location) + + +class _DistributionFinder: + """Finder to locate distributions. + + The main purpose of this class is to memoize found distributions' names, so + only one distribution is returned for each package name. At lot of pip code + assumes this (because it is setuptools's behavior), and not doing the same + can potentially cause a distribution in lower precedence path to override a + higher precedence one if the caller is not careful. + + Eventually we probably want to make it possible to see lower precedence + installations as well. It's useful feature, after all. + """ + + FoundResult = tuple[importlib.metadata.Distribution, Optional[BasePath]] + + def __init__(self) -> None: + self._found_names: set[NormalizedName] = set() + + def _find_impl(self, location: str) -> Iterator[FoundResult]: + """Find distributions in a location.""" + # Skip looking inside a wheel. Since a package inside a wheel is not + # always valid (due to .data directories etc.), its .dist-info entry + # should not be considered an installed distribution. + if _looks_like_wheel(location): + return + # To know exactly where we find a distribution, we have to feed in the + # paths one by one, instead of dumping the list to importlib.metadata. + for dist in importlib.metadata.distributions(path=[location]): + info_location = get_info_location(dist) + try: + name = get_dist_canonical_name(dist) + except BadMetadata as e: + logger.warning("Skipping %s due to %s", info_location, e.reason) + continue + if name in self._found_names: + continue + self._found_names.add(name) + yield dist, info_location + + def find(self, location: str) -> Iterator[BaseDistribution]: + """Find distributions in a location. + + The path can be either a directory, or a ZIP archive. + """ + for dist, info_location in self._find_impl(location): + if info_location is None: + installed_location: BasePath | None = None + else: + installed_location = info_location.parent + yield Distribution(dist, info_location, installed_location) + + def find_legacy_editables(self, location: str) -> Iterator[BaseDistribution]: + """Read location in egg-link files and return distributions in there. + + The path should be a directory; otherwise this returns nothing. This + follows how setuptools does this for compatibility. The first non-empty + line in the egg-link is read as a path (resolved against the egg-link's + containing directory if relative). Distributions found at that linked + location are returned. + """ + path = pathlib.Path(location) + if not path.is_dir(): + return + for child in path.iterdir(): + if child.suffix != ".egg-link": + continue + with child.open() as f: + lines = (line.strip() for line in f) + target_rel = next((line for line in lines if line), "") + if not target_rel: + continue + target_location = str(path.joinpath(target_rel)) + for dist, info_location in self._find_impl(target_location): + yield Distribution(dist, info_location, path) + + +class Environment(BaseEnvironment): + def __init__(self, paths: Sequence[str]) -> None: + self._paths = paths + + @classmethod + def default(cls) -> BaseEnvironment: + return cls(sys.path) + + @classmethod + def from_paths(cls, paths: list[str] | None) -> BaseEnvironment: + if paths is None: + return cls(sys.path) + return cls(paths) + + def _iter_distributions(self) -> Iterator[BaseDistribution]: + finder = _DistributionFinder() + for location in self._paths: + yield from finder.find(location) + yield from finder.find_legacy_editables(location) + + def get_distribution(self, name: str) -> BaseDistribution | None: + canonical_name = canonicalize_name(name) + matches = ( + distribution + for distribution in self.iter_all_distributions() + if distribution.canonical_name == canonical_name + ) + return next(matches, None) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py b/venv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py new file mode 100644 index 0000000..89fce8b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/metadata/pkg_resources.py @@ -0,0 +1,298 @@ +from __future__ import annotations + +import email.message +import email.parser +import logging +import os +import zipfile +from collections.abc import Collection, Iterable, Iterator, Mapping +from typing import ( + NamedTuple, +) + +from pip._vendor import pkg_resources +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel +from pip._internal.utils.egg_link import egg_link_path_from_location +from pip._internal.utils.misc import display_path, normalize_path +from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file + +from .base import ( + BaseDistribution, + BaseEntryPoint, + BaseEnvironment, + InfoPath, + Wheel, +) + +__all__ = ["NAME", "Distribution", "Environment"] + +logger = logging.getLogger(__name__) + +NAME = "pkg_resources" + + +class EntryPoint(NamedTuple): + name: str + value: str + group: str + + +class InMemoryMetadata: + """IMetadataProvider that reads metadata files from a dictionary. + + This also maps metadata decoding exceptions to our internal exception type. + """ + + def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None: + self._metadata = metadata + self._wheel_name = wheel_name + + def has_metadata(self, name: str) -> bool: + return name in self._metadata + + def get_metadata(self, name: str) -> str: + try: + return self._metadata[name].decode() + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + f"Error decoding metadata for {self._wheel_name}: {e} in {name} file" + ) + + def get_metadata_lines(self, name: str) -> Iterable[str]: + return pkg_resources.yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name: str) -> bool: + return False + + def metadata_listdir(self, name: str) -> list[str]: + return [] + + def run_script(self, script_name: str, namespace: str) -> None: + pass + + +class Distribution(BaseDistribution): + def __init__(self, dist: pkg_resources.Distribution) -> None: + self._dist = dist + # This is populated lazily, to avoid loading metadata for all possible + # distributions eagerly. + self.__extra_mapping: Mapping[NormalizedName, str] | None = None + + @property + def _extra_mapping(self) -> Mapping[NormalizedName, str]: + if self.__extra_mapping is None: + self.__extra_mapping = { + canonicalize_name(extra): extra for extra in self._dist.extras + } + + return self.__extra_mapping + + @classmethod + def from_directory(cls, directory: str) -> BaseDistribution: + dist_dir = directory.rstrip(os.sep) + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + dist_name = os.path.splitext(dist_dir_name)[0] + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] + + dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata) + return cls(dist) + + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> BaseDistribution: + metadata_dict = { + "METADATA": metadata_contents, + } + dist = pkg_resources.DistInfoDistribution( + location=filename, + metadata=InMemoryMetadata(metadata_dict, filename), + project_name=project_name, + ) + return cls(dist) + + @classmethod + def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: + try: + with wheel.as_zipfile() as zf: + info_dir, _ = parse_wheel(zf, name) + metadata_dict = { + path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path) + for path in zf.namelist() + if path.startswith(f"{info_dir}/") + } + except zipfile.BadZipFile as e: + raise InvalidWheel(wheel.location, name) from e + except UnsupportedWheel as e: + raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") + dist = pkg_resources.DistInfoDistribution( + location=wheel.location, + metadata=InMemoryMetadata(metadata_dict, wheel.location), + project_name=name, + ) + return cls(dist) + + @property + def location(self) -> str | None: + return self._dist.location + + @property + def installed_location(self) -> str | None: + egg_link = egg_link_path_from_location(self.raw_name) + if egg_link: + location = egg_link + elif self.location: + location = self.location + else: + return None + return normalize_path(location) + + @property + def info_location(self) -> str | None: + return self._dist.egg_info + + @property + def installed_by_distutils(self) -> bool: + # A distutils-installed distribution is provided by FileMetadata. This + # provider has a "path" attribute not present anywhere else. Not the + # best introspection logic, but pip has been doing this for a long time. + try: + return bool(self._dist._provider.path) + except AttributeError: + return False + + @property + def canonical_name(self) -> NormalizedName: + return canonicalize_name(self._dist.project_name) + + @property + def version(self) -> Version: + return parse_version(self._dist.version) + + @property + def raw_version(self) -> str: + return self._dist.version + + def is_file(self, path: InfoPath) -> bool: + return self._dist.has_metadata(str(path)) + + def iter_distutils_script_names(self) -> Iterator[str]: + yield from self._dist.metadata_listdir("scripts") + + def read_text(self, path: InfoPath) -> str: + name = str(path) + if not self._dist.has_metadata(name): + raise FileNotFoundError(name) + content = self._dist.get_metadata(name) + if content is None: + raise NoneMetadataError(self, name) + return content + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + for group, entries in self._dist.get_entry_map().items(): + for name, entry_point in entries.items(): + name, _, value = str(entry_point).partition("=") + yield EntryPoint(name=name.strip(), value=value.strip(), group=group) + + def _metadata_impl(self) -> email.message.Message: + """ + :raises NoneMetadataError: if the distribution reports `has_metadata()` + True but `get_metadata()` returns None. + """ + if isinstance(self._dist, pkg_resources.DistInfoDistribution): + metadata_name = "METADATA" + else: + metadata_name = "PKG-INFO" + try: + metadata = self.read_text(metadata_name) + except FileNotFoundError: + if self.location: + displaying_path = display_path(self.location) + else: + displaying_path = repr(self.location) + logger.warning("No metadata found in %s", displaying_path) + metadata = "" + feed_parser = email.parser.FeedParser() + feed_parser.feed(metadata) + return feed_parser.close() + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + if extras: + relevant_extras = set(self._extra_mapping) & set( + map(canonicalize_name, extras) + ) + extras = [self._extra_mapping[extra] for extra in relevant_extras] + return self._dist.requires(extras) + + def iter_provided_extras(self) -> Iterable[NormalizedName]: + return self._extra_mapping.keys() + + +class Environment(BaseEnvironment): + def __init__(self, ws: pkg_resources.WorkingSet) -> None: + self._ws = ws + + @classmethod + def default(cls) -> BaseEnvironment: + return cls(pkg_resources.working_set) + + @classmethod + def from_paths(cls, paths: list[str] | None) -> BaseEnvironment: + return cls(pkg_resources.WorkingSet(paths)) + + def _iter_distributions(self) -> Iterator[BaseDistribution]: + for dist in self._ws: + yield Distribution(dist) + + def _search_distribution(self, name: str) -> BaseDistribution | None: + """Find a distribution matching the ``name`` in the environment. + + This searches from *all* distributions available in the environment, to + match the behavior of ``pkg_resources.get_distribution()``. + """ + canonical_name = canonicalize_name(name) + for dist in self.iter_all_distributions(): + if dist.canonical_name == canonical_name: + return dist + return None + + def get_distribution(self, name: str) -> BaseDistribution | None: + # Search the distribution by looking through the working set. + dist = self._search_distribution(name) + if dist: + return dist + + # If distribution could not be found, call working_set.require to + # update the working set, and try to find the distribution again. + # This might happen for e.g. when you install a package twice, once + # using setup.py develop and again using setup.py install. Now when + # running pip uninstall twice, the package gets removed from the + # working set in the first uninstall, so we have to populate the + # working set again so that pip knows about it and the packages gets + # picked up and is successfully uninstalled the second time too. + try: + # We didn't pass in any version specifiers, so this can never + # raise pkg_resources.VersionConflict. + self._ws.require(name) + except pkg_resources.DistributionNotFound: + return None + return self._search_distribution(name) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/models/__init__.py new file mode 100644 index 0000000..7b1fc29 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/__init__.py @@ -0,0 +1 @@ +"""A package that contains models that represent entities.""" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..486a6e4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc new file mode 100644 index 0000000..eeaa005 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/candidate.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc new file mode 100644 index 0000000..39f0001 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc new file mode 100644 index 0000000..21f4cc4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/format_control.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc new file mode 100644 index 0000000..87f1865 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/index.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc new file mode 100644 index 0000000..0faea85 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc new file mode 100644 index 0000000..b5239d2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/link.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/pylock.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/pylock.cpython-312.pyc new file mode 100644 index 0000000..d1e7934 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/pylock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc new file mode 100644 index 0000000..c53eb03 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/scheme.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc new file mode 100644 index 0000000..99a6192 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc new file mode 100644 index 0000000..936a112 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc new file mode 100644 index 0000000..0deff6e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/target_python.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..67b31ff Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/models/__pycache__/wheel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/candidate.py b/venv/lib/python3.12/site-packages/pip/_internal/models/candidate.py new file mode 100644 index 0000000..f27f283 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/candidate.py @@ -0,0 +1,25 @@ +from dataclasses import dataclass + +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.models.link import Link + + +@dataclass(frozen=True) +class InstallationCandidate: + """Represents a potential "candidate" for installation.""" + + __slots__ = ["name", "version", "link"] + + name: str + version: Version + link: Link + + def __init__(self, name: str, version: str, link: Link) -> None: + object.__setattr__(self, "name", name) + object.__setattr__(self, "version", parse_version(version)) + object.__setattr__(self, "link", link) + + def __str__(self) -> str: + return f"{self.name!r} candidate (version {self.version} at {self.link})" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py b/venv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py new file mode 100644 index 0000000..aefc670 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/direct_url.py @@ -0,0 +1,227 @@ +"""PEP 610""" + +from __future__ import annotations + +import json +import re +import urllib.parse +from collections.abc import Iterable +from dataclasses import dataclass +from typing import Any, ClassVar, TypeVar, Union + +__all__ = [ + "DirectUrl", + "DirectUrlValidationError", + "DirInfo", + "ArchiveInfo", + "VcsInfo", +] + +T = TypeVar("T") + +DIRECT_URL_METADATA_NAME = "direct_url.json" +ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") + + +class DirectUrlValidationError(Exception): + pass + + +def _get( + d: dict[str, Any], expected_type: type[T], key: str, default: T | None = None +) -> T | None: + """Get value from dictionary and verify expected type.""" + if key not in d: + return default + value = d[key] + if not isinstance(value, expected_type): + raise DirectUrlValidationError( + f"{value!r} has unexpected type for {key} (expected {expected_type})" + ) + return value + + +def _get_required( + d: dict[str, Any], expected_type: type[T], key: str, default: T | None = None +) -> T: + value = _get(d, expected_type, key, default) + if value is None: + raise DirectUrlValidationError(f"{key} must have a value") + return value + + +def _exactly_one_of(infos: Iterable[InfoType | None]) -> InfoType: + infos = [info for info in infos if info is not None] + if not infos: + raise DirectUrlValidationError( + "missing one of archive_info, dir_info, vcs_info" + ) + if len(infos) > 1: + raise DirectUrlValidationError( + "more than one of archive_info, dir_info, vcs_info" + ) + assert infos[0] is not None + return infos[0] + + +def _filter_none(**kwargs: Any) -> dict[str, Any]: + """Make dict excluding None values.""" + return {k: v for k, v in kwargs.items() if v is not None} + + +@dataclass +class VcsInfo: + name: ClassVar = "vcs_info" + + vcs: str + commit_id: str + requested_revision: str | None = None + + @classmethod + def _from_dict(cls, d: dict[str, Any] | None) -> VcsInfo | None: + if d is None: + return None + return cls( + vcs=_get_required(d, str, "vcs"), + commit_id=_get_required(d, str, "commit_id"), + requested_revision=_get(d, str, "requested_revision"), + ) + + def _to_dict(self) -> dict[str, Any]: + return _filter_none( + vcs=self.vcs, + requested_revision=self.requested_revision, + commit_id=self.commit_id, + ) + + +class ArchiveInfo: + name = "archive_info" + + def __init__( + self, + hash: str | None = None, + hashes: dict[str, str] | None = None, + ) -> None: + # set hashes before hash, since the hash setter will further populate hashes + self.hashes = hashes + self.hash = hash + + @property + def hash(self) -> str | None: + return self._hash + + @hash.setter + def hash(self, value: str | None) -> None: + if value is not None: + # Auto-populate the hashes key to upgrade to the new format automatically. + # We don't back-populate the legacy hash key from hashes. + try: + hash_name, hash_value = value.split("=", 1) + except ValueError: + raise DirectUrlValidationError( + f"invalid archive_info.hash format: {value!r}" + ) + if self.hashes is None: + self.hashes = {hash_name: hash_value} + elif hash_name not in self.hashes: + self.hashes = self.hashes.copy() + self.hashes[hash_name] = hash_value + self._hash = value + + @classmethod + def _from_dict(cls, d: dict[str, Any] | None) -> ArchiveInfo | None: + if d is None: + return None + return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes")) + + def _to_dict(self) -> dict[str, Any]: + return _filter_none(hash=self.hash, hashes=self.hashes) + + +@dataclass +class DirInfo: + name: ClassVar = "dir_info" + + editable: bool = False + + @classmethod + def _from_dict(cls, d: dict[str, Any] | None) -> DirInfo | None: + if d is None: + return None + return cls(editable=_get_required(d, bool, "editable", default=False)) + + def _to_dict(self) -> dict[str, Any]: + return _filter_none(editable=self.editable or None) + + +InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] + + +@dataclass +class DirectUrl: + url: str + info: InfoType + subdirectory: str | None = None + + def _remove_auth_from_netloc(self, netloc: str) -> str: + if "@" not in netloc: + return netloc + user_pass, netloc_no_user_pass = netloc.split("@", 1) + if ( + isinstance(self.info, VcsInfo) + and self.info.vcs == "git" + and user_pass == "git" + ): + return netloc + if ENV_VAR_RE.match(user_pass): + return netloc + return netloc_no_user_pass + + @property + def redacted_url(self) -> str: + """url with user:password part removed unless it is formed with + environment variables as specified in PEP 610, or it is ``git`` + in the case of a git URL. + """ + purl = urllib.parse.urlsplit(self.url) + netloc = self._remove_auth_from_netloc(purl.netloc) + surl = urllib.parse.urlunsplit( + (purl.scheme, netloc, purl.path, purl.query, purl.fragment) + ) + return surl + + def validate(self) -> None: + self.from_dict(self.to_dict()) + + @classmethod + def from_dict(cls, d: dict[str, Any]) -> DirectUrl: + return DirectUrl( + url=_get_required(d, str, "url"), + subdirectory=_get(d, str, "subdirectory"), + info=_exactly_one_of( + [ + ArchiveInfo._from_dict(_get(d, dict, "archive_info")), + DirInfo._from_dict(_get(d, dict, "dir_info")), + VcsInfo._from_dict(_get(d, dict, "vcs_info")), + ] + ), + ) + + def to_dict(self) -> dict[str, Any]: + res = _filter_none( + url=self.redacted_url, + subdirectory=self.subdirectory, + ) + res[self.info.name] = self.info._to_dict() + return res + + @classmethod + def from_json(cls, s: str) -> DirectUrl: + return cls.from_dict(json.loads(s)) + + def to_json(self) -> str: + return json.dumps(self.to_dict(), sort_keys=True) + + def is_local_editable(self) -> bool: + return isinstance(self.info, DirInfo) and self.info.editable diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/format_control.py b/venv/lib/python3.12/site-packages/pip/_internal/models/format_control.py new file mode 100644 index 0000000..9f07e3f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/format_control.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import CommandError + + +class FormatControl: + """Helper for managing formats from which a package can be installed.""" + + __slots__ = ["no_binary", "only_binary"] + + def __init__( + self, + no_binary: set[str] | None = None, + only_binary: set[str] | None = None, + ) -> None: + if no_binary is None: + no_binary = set() + if only_binary is None: + only_binary = set() + + self.no_binary = no_binary + self.only_binary = only_binary + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + + if self.__slots__ != other.__slots__: + return False + + return all(getattr(self, k) == getattr(other, k) for k in self.__slots__) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})" + + @staticmethod + def handle_mutual_excludes(value: str, target: set[str], other: set[str]) -> None: + if value.startswith("-"): + raise CommandError( + "--no-binary / --only-binary option requires 1 argument." + ) + new = value.split(",") + while ":all:" in new: + other.clear() + target.clear() + target.add(":all:") + del new[: new.index(":all:") + 1] + # Without a none, we want to discard everything as :all: covers it + if ":none:" not in new: + return + for name in new: + if name == ":none:": + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + def get_allowed_formats(self, canonical_name: str) -> frozenset[str]: + result = {"binary", "source"} + if canonical_name in self.only_binary: + result.discard("source") + elif canonical_name in self.no_binary: + result.discard("binary") + elif ":all:" in self.only_binary: + result.discard("source") + elif ":all:" in self.no_binary: + result.discard("binary") + return frozenset(result) + + def disallow_binaries(self) -> None: + self.handle_mutual_excludes( + ":all:", + self.no_binary, + self.only_binary, + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/index.py b/venv/lib/python3.12/site-packages/pip/_internal/models/index.py new file mode 100644 index 0000000..b94c325 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/index.py @@ -0,0 +1,28 @@ +import urllib.parse + + +class PackageIndex: + """Represents a Package Index and provides easier access to endpoints""" + + __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"] + + def __init__(self, url: str, file_storage_domain: str) -> None: + super().__init__() + self.url = url + self.netloc = urllib.parse.urlsplit(url).netloc + self.simple_url = self._url_for_path("simple") + self.pypi_url = self._url_for_path("pypi") + + # This is part of a temporary hack used to block installs of PyPI + # packages which depend on external urls only necessary until PyPI can + # block such packages themselves + self.file_storage_domain = file_storage_domain + + def _url_for_path(self, path: str) -> str: + return urllib.parse.urljoin(self.url, path) + + +PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org") +TestPyPI = PackageIndex( + "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org" +) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py b/venv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py new file mode 100644 index 0000000..3e8e968 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/installation_report.py @@ -0,0 +1,57 @@ +from collections.abc import Sequence +from typing import Any + +from pip._vendor.packaging.markers import default_environment + +from pip import __version__ +from pip._internal.req.req_install import InstallRequirement + + +class InstallationReport: + def __init__(self, install_requirements: Sequence[InstallRequirement]): + self._install_requirements = install_requirements + + @classmethod + def _install_req_to_dict(cls, ireq: InstallRequirement) -> dict[str, Any]: + assert ireq.download_info, f"No download_info for {ireq}" + res = { + # PEP 610 json for the download URL. download_info.archive_info.hashes may + # be absent when the requirement was installed from the wheel cache + # and the cache entry was populated by an older pip version that did not + # record origin.json. + "download_info": ireq.download_info.to_dict(), + # is_direct is true if the requirement was a direct URL reference (which + # includes editable requirements), and false if the requirement was + # downloaded from a PEP 503 index or --find-links. + "is_direct": ireq.is_direct, + # is_yanked is true if the requirement was yanked from the index, but + # was still selected by pip to conform to PEP 592. + "is_yanked": ireq.link.is_yanked if ireq.link else False, + # requested is true if the requirement was specified by the user (aka + # top level requirement), and false if it was installed as a dependency of a + # requirement. https://peps.python.org/pep-0376/#requested + "requested": ireq.user_supplied, + # PEP 566 json encoding for metadata + # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata + "metadata": ireq.get_dist().metadata_dict, + } + if ireq.user_supplied and ireq.extras: + # For top level requirements, the list of requested extras, if any. + res["requested_extras"] = sorted(ireq.extras) + return res + + def to_dict(self) -> dict[str, Any]: + return { + "version": "1", + "pip_version": __version__, + "install": [ + self._install_req_to_dict(ireq) for ireq in self._install_requirements + ], + # https://peps.python.org/pep-0508/#environment-markers + # TODO: currently, the resolver uses the default environment to evaluate + # environment markers, so that is what we report here. In the future, it + # should also take into account options such as --python-version or + # --platform, perhaps under the form of an environment_override field? + # https://github.com/pypa/pip/issues/11198 + "environment": default_environment(), + } diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/link.py b/venv/lib/python3.12/site-packages/pip/_internal/models/link.py new file mode 100644 index 0000000..295035f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/link.py @@ -0,0 +1,613 @@ +from __future__ import annotations + +import functools +import itertools +import logging +import os +import posixpath +import re +import urllib.parse +from collections.abc import Mapping +from dataclasses import dataclass +from typing import ( + TYPE_CHECKING, + Any, + NamedTuple, +) + +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + pairwise, + redact_auth_from_url, + split_auth_from_netloc, + splitext, +) +from pip._internal.utils.urls import path_to_url, url_to_path + +if TYPE_CHECKING: + from pip._internal.index.collector import IndexContent + +logger = logging.getLogger(__name__) + + +# Order matters, earlier hashes have a precedence over later hashes for what +# we will pick to use. +_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5") + + +@dataclass(frozen=True) +class LinkHash: + """Links to content may have embedded hash values. This class parses those. + + `name` must be any member of `_SUPPORTED_HASHES`. + + This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to + be JSON-serializable to conform to PEP 610, this class contains the logic for + parsing a hash name and value for correctness, and then checking whether that hash + conforms to a schema with `.is_hash_allowed()`.""" + + name: str + value: str + + _hash_url_fragment_re = re.compile( + # NB: we do not validate that the second group (.*) is a valid hex + # digest. Instead, we simply keep that string in this class, and then check it + # against Hashes when hash-checking is needed. This is easier to debug than + # proactively discarding an invalid hex digest, as we handle incorrect hashes + # and malformed hashes in the same place. + r"[#&]({choices})=([^&]*)".format( + choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES) + ), + ) + + def __post_init__(self) -> None: + assert self.name in _SUPPORTED_HASHES + + @classmethod + @functools.cache + def find_hash_url_fragment(cls, url: str) -> LinkHash | None: + """Search a string for a checksum algorithm name and encoded output value.""" + match = cls._hash_url_fragment_re.search(url) + if match is None: + return None + name, value = match.groups() + return cls(name=name, value=value) + + def as_dict(self) -> dict[str, str]: + return {self.name: self.value} + + def as_hashes(self) -> Hashes: + """Return a Hashes instance which checks only for the current hash.""" + return Hashes({self.name: [self.value]}) + + def is_hash_allowed(self, hashes: Hashes | None) -> bool: + """ + Return True if the current hash is allowed by `hashes`. + """ + if hashes is None: + return False + return hashes.is_hash_allowed(self.name, hex_digest=self.value) + + +@dataclass(frozen=True) +class MetadataFile: + """Information about a core metadata file associated with a distribution.""" + + hashes: dict[str, str] | None + + def __post_init__(self) -> None: + if self.hashes is not None: + assert all(name in _SUPPORTED_HASHES for name in self.hashes) + + +def supported_hashes(hashes: dict[str, str] | None) -> dict[str, str] | None: + # Remove any unsupported hash types from the mapping. If this leaves no + # supported hashes, return None + if hashes is None: + return None + hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} + if not hashes: + return None + return hashes + + +def _clean_url_path_part(part: str) -> str: + """ + Clean a "part" of a URL path (i.e. after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + return urllib.parse.quote(urllib.parse.unquote(part)) + + +def _clean_file_url_path(part: str) -> str: + """ + Clean the first part of a URL path that corresponds to a local + filesystem path (i.e. the first part after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + # Also, on Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + ret = urllib.request.pathname2url(urllib.request.url2pathname(part)) + if ret.startswith("///"): + # Remove any URL authority section, leaving only the URL path. + ret = ret.removeprefix("//") + return ret + + +# percent-encoded: / +_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE) + + +def _clean_url_path(path: str, is_local_path: bool) -> str: + """ + Clean the path portion of a URL. + """ + if is_local_path: + clean_func = _clean_file_url_path + else: + clean_func = _clean_url_path_part + + # Split on the reserved characters prior to cleaning so that + # revision strings in VCS URLs are properly preserved. + parts = _reserved_chars_re.split(path) + + cleaned_parts = [] + for to_clean, reserved in pairwise(itertools.chain(parts, [""])): + cleaned_parts.append(clean_func(to_clean)) + # Normalize %xx escapes (e.g. %2f -> %2F) + cleaned_parts.append(reserved.upper()) + + return "".join(cleaned_parts) + + +def _ensure_quoted_url(url: str) -> str: + """ + Make sure a link is fully quoted. + For example, if ' ' occurs in the URL, it will be replaced with "%20", + and without double-quoting other characters. + """ + # Split the URL into parts according to the general structure + # `scheme://netloc/path?query#fragment`. + result = urllib.parse.urlsplit(url) + # If the netloc is empty, then the URL refers to a local filesystem path. + is_local_path = not result.netloc + path = _clean_url_path(result.path, is_local_path=is_local_path) + # Temporarily replace scheme with file to ensure the URL generated by + # urlunsplit() contains an empty netloc (file://) as per RFC 1738. + ret = urllib.parse.urlunsplit(result._replace(scheme="file", path=path)) + ret = result.scheme + ret[4:] # Restore original scheme. + return ret + + +def _absolute_link_url(base_url: str, url: str) -> str: + """ + A faster implementation of urllib.parse.urljoin with a shortcut + for absolute http/https URLs. + """ + if url.startswith(("https://", "http://")): + return url + else: + return urllib.parse.urljoin(base_url, url) + + +@functools.total_ordering +class Link: + """Represents a parsed link from a Package Index's simple URL""" + + __slots__ = [ + "_parsed_url", + "_url", + "_path", + "_hashes", + "comes_from", + "requires_python", + "yanked_reason", + "metadata_file_data", + "cache_link_parsing", + "egg_fragment", + ] + + def __init__( + self, + url: str, + comes_from: str | IndexContent | None = None, + requires_python: str | None = None, + yanked_reason: str | None = None, + metadata_file_data: MetadataFile | None = None, + cache_link_parsing: bool = True, + hashes: Mapping[str, str] | None = None, + ) -> None: + """ + :param url: url of the resource pointed to (href of the link) + :param comes_from: instance of IndexContent where the link was found, + or string. + :param requires_python: String containing the `Requires-Python` + metadata field, specified in PEP 345. This may be specified by + a data-requires-python attribute in the HTML link tag, as + described in PEP 503. + :param yanked_reason: the reason the file has been yanked, if the + file has been yanked, or None if the file hasn't been yanked. + This is the value of the "data-yanked" attribute, if present, in + a simple repository HTML link. If the file has been yanked but + no reason was provided, this should be the empty string. See + PEP 592 for more information and the specification. + :param metadata_file_data: the metadata attached to the file, or None if + no such metadata is provided. This argument, if not None, indicates + that a separate metadata file exists, and also optionally supplies + hashes for that file. + :param cache_link_parsing: A flag that is used elsewhere to determine + whether resources retrieved from this link should be cached. PyPI + URLs should generally have this set to False, for example. + :param hashes: A mapping of hash names to digests to allow us to + determine the validity of a download. + """ + + # The comes_from, requires_python, and metadata_file_data arguments are + # only used by classmethods of this class, and are not used in client + # code directly. + + # url can be a UNC windows share + if url.startswith("\\\\"): + url = path_to_url(url) + + self._parsed_url = urllib.parse.urlsplit(url) + # Store the url as a private attribute to prevent accidentally + # trying to set a new value. + self._url = url + # The .path property is hot, so calculate its value ahead of time. + self._path = urllib.parse.unquote(self._parsed_url.path) + + link_hash = LinkHash.find_hash_url_fragment(url) + hashes_from_link = {} if link_hash is None else link_hash.as_dict() + if hashes is None: + self._hashes = hashes_from_link + else: + self._hashes = {**hashes, **hashes_from_link} + + self.comes_from = comes_from + self.requires_python = requires_python if requires_python else None + self.yanked_reason = yanked_reason + self.metadata_file_data = metadata_file_data + + self.cache_link_parsing = cache_link_parsing + self.egg_fragment = self._egg_fragment() + + @classmethod + def from_json( + cls, + file_data: dict[str, Any], + page_url: str, + ) -> Link | None: + """ + Convert an pypi json document from a simple repository page into a Link. + """ + file_url = file_data.get("url") + if file_url is None: + return None + + url = _ensure_quoted_url(_absolute_link_url(page_url, file_url)) + pyrequire = file_data.get("requires-python") + yanked_reason = file_data.get("yanked") + hashes = file_data.get("hashes", {}) + + # PEP 714: Indexes must use the name core-metadata, but + # clients should support the old name as a fallback for compatibility. + metadata_info = file_data.get("core-metadata") + if metadata_info is None: + metadata_info = file_data.get("dist-info-metadata") + + # The metadata info value may be a boolean, or a dict of hashes. + if isinstance(metadata_info, dict): + # The file exists, and hashes have been supplied + metadata_file_data = MetadataFile(supported_hashes(metadata_info)) + elif metadata_info: + # The file exists, but there are no hashes + metadata_file_data = MetadataFile(None) + else: + # False or not present: the file does not exist + metadata_file_data = None + + # The Link.yanked_reason expects an empty string instead of a boolean. + if yanked_reason and not isinstance(yanked_reason, str): + yanked_reason = "" + # The Link.yanked_reason expects None instead of False. + elif not yanked_reason: + yanked_reason = None + + return cls( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + hashes=hashes, + metadata_file_data=metadata_file_data, + ) + + @classmethod + def from_element( + cls, + anchor_attribs: dict[str, str | None], + page_url: str, + base_url: str, + ) -> Link | None: + """ + Convert an anchor element's attributes in a simple repository page to a Link. + """ + href = anchor_attribs.get("href") + if not href: + return None + + url = _ensure_quoted_url(_absolute_link_url(base_url, href)) + pyrequire = anchor_attribs.get("data-requires-python") + yanked_reason = anchor_attribs.get("data-yanked") + + # PEP 714: Indexes must use the name data-core-metadata, but + # clients should support the old name as a fallback for compatibility. + metadata_info = anchor_attribs.get("data-core-metadata") + if metadata_info is None: + metadata_info = anchor_attribs.get("data-dist-info-metadata") + # The metadata info value may be the string "true", or a string of + # the form "hashname=hashval" + if metadata_info == "true": + # The file exists, but there are no hashes + metadata_file_data = MetadataFile(None) + elif metadata_info is None: + # The file does not exist + metadata_file_data = None + else: + # The file exists, and hashes have been supplied + hashname, sep, hashval = metadata_info.partition("=") + if sep == "=": + metadata_file_data = MetadataFile(supported_hashes({hashname: hashval})) + else: + # Error - data is wrong. Treat as no hashes supplied. + logger.debug( + "Index returned invalid data-dist-info-metadata value: %s", + metadata_info, + ) + metadata_file_data = MetadataFile(None) + + return cls( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + metadata_file_data=metadata_file_data, + ) + + def __str__(self) -> str: + if self.requires_python: + rp = f" (requires-python:{self.requires_python})" + else: + rp = "" + if self.comes_from: + return f"{self.redacted_url} (from {self.comes_from}){rp}" + else: + return self.redacted_url + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash(self.url) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Link): + return NotImplemented + return self.url == other.url + + def __lt__(self, other: Any) -> bool: + if not isinstance(other, Link): + return NotImplemented + return self.url < other.url + + @property + def url(self) -> str: + return self._url + + @property + def redacted_url(self) -> str: + return redact_auth_from_url(self.url) + + @property + def filename(self) -> str: + path = self.path.rstrip("/") + name = posixpath.basename(path) + if not name: + # Make sure we don't leak auth information if the netloc + # includes a username and password. + netloc, user_pass = split_auth_from_netloc(self.netloc) + return netloc + + name = urllib.parse.unquote(name) + assert name, f"URL {self._url!r} produced no filename" + return name + + @property + def file_path(self) -> str: + return url_to_path(self.url) + + @property + def scheme(self) -> str: + return self._parsed_url.scheme + + @property + def netloc(self) -> str: + """ + This can contain auth information. + """ + return self._parsed_url.netloc + + @property + def path(self) -> str: + return self._path + + def splitext(self) -> tuple[str, str]: + return splitext(posixpath.basename(self.path.rstrip("/"))) + + @property + def ext(self) -> str: + return self.splitext()[1] + + @property + def url_without_fragment(self) -> str: + scheme, netloc, path, query, fragment = self._parsed_url + return urllib.parse.urlunsplit((scheme, netloc, path, query, "")) + + _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") + + # Per PEP 508. + _project_name_re = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE + ) + + def _egg_fragment(self) -> str | None: + match = self._egg_fragment_re.search(self._url) + if not match: + return None + + # An egg fragment looks like a PEP 508 project name, along with + # an optional extras specifier. Anything else is invalid. + project_name = match.group(1) + if not self._project_name_re.match(project_name): + deprecated( + reason=f"{self} contains an egg fragment with a non-PEP 508 name.", + replacement="to use the req @ url syntax, and remove the egg fragment", + gone_in="26.0", + issue=13157, + ) + + return project_name + + _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") + + @property + def subdirectory_fragment(self) -> str | None: + match = self._subdirectory_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + def metadata_link(self) -> Link | None: + """Return a link to the associated core metadata file (if any).""" + if self.metadata_file_data is None: + return None + metadata_url = f"{self.url_without_fragment}.metadata" + if self.metadata_file_data.hashes is None: + return Link(metadata_url) + return Link(metadata_url, hashes=self.metadata_file_data.hashes) + + def as_hashes(self) -> Hashes: + return Hashes({k: [v] for k, v in self._hashes.items()}) + + @property + def hash(self) -> str | None: + return next(iter(self._hashes.values()), None) + + @property + def hash_name(self) -> str | None: + return next(iter(self._hashes), None) + + @property + def show_url(self) -> str: + return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0]) + + @property + def is_file(self) -> bool: + return self.scheme == "file" + + def is_existing_dir(self) -> bool: + return self.is_file and os.path.isdir(self.file_path) + + @property + def is_wheel(self) -> bool: + return self.ext == WHEEL_EXTENSION + + @property + def is_vcs(self) -> bool: + from pip._internal.vcs import vcs + + return self.scheme in vcs.all_schemes + + @property + def is_yanked(self) -> bool: + return self.yanked_reason is not None + + @property + def has_hash(self) -> bool: + return bool(self._hashes) + + def is_hash_allowed(self, hashes: Hashes | None) -> bool: + """ + Return True if the link has a hash and it is allowed by `hashes`. + """ + if hashes is None: + return False + return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items()) + + +class _CleanResult(NamedTuple): + """Convert link for equivalency check. + + This is used in the resolver to check whether two URL-specified requirements + likely point to the same distribution and can be considered equivalent. This + equivalency logic avoids comparing URLs literally, which can be too strict + (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users. + + Currently this does three things: + + 1. Drop the basic auth part. This is technically wrong since a server can + serve different content based on auth, but if it does that, it is even + impossible to guarantee two URLs without auth are equivalent, since + the user can input different auth information when prompted. So the + practical solution is to assume the auth doesn't affect the response. + 2. Parse the query to avoid the ordering issue. Note that ordering under the + same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are + still considered different. + 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and + hash values, since it should have no impact the downloaded content. Note + that this drops the "egg=" part historically used to denote the requested + project (and extras), which is wrong in the strictest sense, but too many + people are supplying it inconsistently to cause superfluous resolution + conflicts, so we choose to also ignore them. + """ + + parsed: urllib.parse.SplitResult + query: dict[str, list[str]] + subdirectory: str + hashes: dict[str, str] + + +def _clean_link(link: Link) -> _CleanResult: + parsed = link._parsed_url + netloc = parsed.netloc.rsplit("@", 1)[-1] + # According to RFC 8089, an empty host in file: means localhost. + if parsed.scheme == "file" and not netloc: + netloc = "localhost" + fragment = urllib.parse.parse_qs(parsed.fragment) + if "egg" in fragment: + logger.debug("Ignoring egg= fragment in %s", link) + try: + # If there are multiple subdirectory values, use the first one. + # This matches the behavior of Link.subdirectory_fragment. + subdirectory = fragment["subdirectory"][0] + except (IndexError, KeyError): + subdirectory = "" + # If there are multiple hash values under the same algorithm, use the + # first one. This matches the behavior of Link.hash_value. + hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment} + return _CleanResult( + parsed=parsed._replace(netloc=netloc, query="", fragment=""), + query=urllib.parse.parse_qs(parsed.query), + subdirectory=subdirectory, + hashes=hashes, + ) + + +@functools.cache +def links_equivalent(link1: Link, link2: Link) -> bool: + return _clean_link(link1) == _clean_link(link2) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/pylock.py b/venv/lib/python3.12/site-packages/pip/_internal/models/pylock.py new file mode 100644 index 0000000..1b6b8c1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/pylock.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +import dataclasses +import re +from collections.abc import Iterable +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from pip._vendor import tomli_w + +from pip._internal.models.direct_url import ArchiveInfo, DirInfo, VcsInfo +from pip._internal.models.link import Link +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.urls import url_to_path + +if TYPE_CHECKING: + from typing_extensions import Self + +PYLOCK_FILE_NAME_RE = re.compile(r"^pylock\.([^.]+)\.toml$") + + +def is_valid_pylock_file_name(path: Path) -> bool: + return path.name == "pylock.toml" or bool(re.match(PYLOCK_FILE_NAME_RE, path.name)) + + +def _toml_dict_factory(data: list[tuple[str, Any]]) -> dict[str, Any]: + return {key.replace("_", "-"): value for key, value in data if value is not None} + + +@dataclass +class PackageVcs: + type: str + url: str | None + # (not supported) path: Optional[str] + requested_revision: str | None + commit_id: str + subdirectory: str | None + + +@dataclass +class PackageDirectory: + path: str + editable: bool | None + subdirectory: str | None + + +@dataclass +class PackageArchive: + url: str | None + # (not supported) path: Optional[str] + # (not supported) size: Optional[int] + # (not supported) upload_time: Optional[datetime] + hashes: dict[str, str] + subdirectory: str | None + + +@dataclass +class PackageSdist: + name: str + # (not supported) upload_time: Optional[datetime] + url: str | None + # (not supported) path: Optional[str] + # (not supported) size: Optional[int] + hashes: dict[str, str] + + +@dataclass +class PackageWheel: + name: str + # (not supported) upload_time: Optional[datetime] + url: str | None + # (not supported) path: Optional[str] + # (not supported) size: Optional[int] + hashes: dict[str, str] + + +@dataclass +class Package: + name: str + version: str | None = None + # (not supported) marker: Optional[str] + # (not supported) requires_python: Optional[str] + # (not supported) dependencies + vcs: PackageVcs | None = None + directory: PackageDirectory | None = None + archive: PackageArchive | None = None + # (not supported) index: Optional[str] + sdist: PackageSdist | None = None + wheels: list[PackageWheel] | None = None + # (not supported) attestation_identities: Optional[List[Dict[str, Any]]] + # (not supported) tool: Optional[Dict[str, Any]] + + @classmethod + def from_install_requirement(cls, ireq: InstallRequirement, base_dir: Path) -> Self: + base_dir = base_dir.resolve() + dist = ireq.get_dist() + download_info = ireq.download_info + assert download_info + package = cls(name=dist.canonical_name) + if ireq.is_direct: + if isinstance(download_info.info, VcsInfo): + package.vcs = PackageVcs( + type=download_info.info.vcs, + url=download_info.url, + requested_revision=download_info.info.requested_revision, + commit_id=download_info.info.commit_id, + subdirectory=download_info.subdirectory, + ) + elif isinstance(download_info.info, DirInfo): + package.directory = PackageDirectory( + path=( + Path(url_to_path(download_info.url)) + .resolve() + .relative_to(base_dir) + .as_posix() + ), + editable=( + download_info.info.editable + if download_info.info.editable + else None + ), + subdirectory=download_info.subdirectory, + ) + elif isinstance(download_info.info, ArchiveInfo): + if not download_info.info.hashes: + raise NotImplementedError() + package.archive = PackageArchive( + url=download_info.url, + hashes=download_info.info.hashes, + subdirectory=download_info.subdirectory, + ) + else: + # should never happen + raise NotImplementedError() + else: + package.version = str(dist.version) + if isinstance(download_info.info, ArchiveInfo): + if not download_info.info.hashes: + raise NotImplementedError() + link = Link(download_info.url) + if link.is_wheel: + package.wheels = [ + PackageWheel( + name=link.filename, + url=download_info.url, + hashes=download_info.info.hashes, + ) + ] + else: + package.sdist = PackageSdist( + name=link.filename, + url=download_info.url, + hashes=download_info.info.hashes, + ) + else: + # should never happen + raise NotImplementedError() + return package + + +@dataclass +class Pylock: + lock_version: str = "1.0" + # (not supported) environments: Optional[List[str]] + # (not supported) requires_python: Optional[str] + # (not supported) extras: List[str] = [] + # (not supported) dependency_groups: List[str] = [] + created_by: str = "pip" + packages: list[Package] = dataclasses.field(default_factory=list) + # (not supported) tool: Optional[Dict[str, Any]] + + def as_toml(self) -> str: + return tomli_w.dumps(dataclasses.asdict(self, dict_factory=_toml_dict_factory)) + + @classmethod + def from_install_requirements( + cls, install_requirements: Iterable[InstallRequirement], base_dir: Path + ) -> Self: + return cls( + packages=sorted( + ( + Package.from_install_requirement(ireq, base_dir) + for ireq in install_requirements + ), + key=lambda p: p.name, + ) + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/scheme.py b/venv/lib/python3.12/site-packages/pip/_internal/models/scheme.py new file mode 100644 index 0000000..06a9a55 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/scheme.py @@ -0,0 +1,25 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + +from dataclasses import dataclass + +SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"] + + +@dataclass(frozen=True) +class Scheme: + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + + __slots__ = SCHEME_KEYS + + platlib: str + purelib: str + headers: str + scripts: str + data: str diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py b/venv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py new file mode 100644 index 0000000..136163c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/search_scope.py @@ -0,0 +1,126 @@ +import itertools +import logging +import os +import posixpath +import urllib.parse +from dataclasses import dataclass + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.models.index import PyPI +from pip._internal.utils.compat import has_tls +from pip._internal.utils.misc import normalize_path, redact_auth_from_url + +logger = logging.getLogger(__name__) + + +@dataclass(frozen=True) +class SearchScope: + """ + Encapsulates the locations that pip is configured to search. + """ + + __slots__ = ["find_links", "index_urls", "no_index"] + + find_links: list[str] + index_urls: list[str] + no_index: bool + + @classmethod + def create( + cls, + find_links: list[str], + index_urls: list[str], + no_index: bool, + ) -> "SearchScope": + """ + Create a SearchScope object after normalizing the `find_links`. + """ + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + built_find_links: list[str] = [] + for link in find_links: + if link.startswith("~"): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + built_find_links.append(link) + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not has_tls(): + for link in itertools.chain(index_urls, built_find_links): + parsed = urllib.parse.urlparse(link) + if parsed.scheme == "https": + logger.warning( + "pip is configured with locations that require " + "TLS/SSL, however the ssl module in Python is not " + "available." + ) + break + + return cls( + find_links=built_find_links, + index_urls=index_urls, + no_index=no_index, + ) + + def get_formatted_locations(self) -> str: + lines = [] + redacted_index_urls = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + for url in self.index_urls: + redacted_index_url = redact_auth_from_url(url) + + # Parse the URL + purl = urllib.parse.urlsplit(redacted_index_url) + + # URL is generally invalid if scheme and netloc is missing + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not purl.scheme and not purl.netloc: + logger.warning( + 'The index url "%s" seems invalid, please provide a scheme.', + redacted_index_url, + ) + + redacted_index_urls.append(redacted_index_url) + + lines.append( + "Looking in indexes: {}".format(", ".join(redacted_index_urls)) + ) + + if self.find_links: + lines.append( + "Looking in links: {}".format( + ", ".join(redact_auth_from_url(url) for url in self.find_links) + ) + ) + return "\n".join(lines) + + def get_index_urls_locations(self, project_name: str) -> list[str]: + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url: str) -> str: + loc = posixpath.join( + url, urllib.parse.quote(canonicalize_name(project_name)) + ) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith("/"): + loc = loc + "/" + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py b/venv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py new file mode 100644 index 0000000..8d5b42d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/selection_prefs.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from pip._internal.models.format_control import FormatControl + + +# TODO: This needs Python 3.10's improved slots support for dataclasses +# to be converted into a dataclass. +class SelectionPreferences: + """ + Encapsulates the candidate selection preferences for downloading + and installing files. + """ + + __slots__ = [ + "allow_yanked", + "allow_all_prereleases", + "format_control", + "prefer_binary", + "ignore_requires_python", + ] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + allow_yanked: bool, + allow_all_prereleases: bool = False, + format_control: FormatControl | None = None, + prefer_binary: bool = False, + ignore_requires_python: bool | None = None, + ) -> None: + """Create a SelectionPreferences object. + + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + :param prefer_binary: Whether to prefer an old, but valid, binary + dist over a new source dist. + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self.allow_yanked = allow_yanked + self.allow_all_prereleases = allow_all_prereleases + self.format_control = format_control + self.prefer_binary = prefer_binary + self.ignore_requires_python = ignore_requires_python diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/target_python.py b/venv/lib/python3.12/site-packages/pip/_internal/models/target_python.py new file mode 100644 index 0000000..8c38392 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/target_python.py @@ -0,0 +1,122 @@ +from __future__ import annotations + +import sys + +from pip._vendor.packaging.tags import Tag + +from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot +from pip._internal.utils.misc import normalize_version_info + + +class TargetPython: + """ + Encapsulates the properties of a Python interpreter one is targeting + for a package install, download, etc. + """ + + __slots__ = [ + "_given_py_version_info", + "abis", + "implementation", + "platforms", + "py_version", + "py_version_info", + "_valid_tags", + "_valid_tags_set", + ] + + def __init__( + self, + platforms: list[str] | None = None, + py_version_info: tuple[int, ...] | None = None, + abis: list[str] | None = None, + implementation: str | None = None, + ) -> None: + """ + :param platforms: A list of strings or None. If None, searches for + packages that are supported by the current system. Otherwise, will + find packages that can be built on the platforms passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param py_version_info: An optional tuple of ints representing the + Python version information to use (e.g. `sys.version_info[:3]`). + This can have length 1, 2, or 3 when provided. + :param abis: A list of strings or None. This is passed to + compatibility_tags.py's get_supported() function as is. + :param implementation: A string or None. This is passed to + compatibility_tags.py's get_supported() function as is. + """ + # Store the given py_version_info for when we call get_supported(). + self._given_py_version_info = py_version_info + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + py_version = ".".join(map(str, py_version_info[:2])) + + self.abis = abis + self.implementation = implementation + self.platforms = platforms + self.py_version = py_version + self.py_version_info = py_version_info + + # This is used to cache the return value of get_(un)sorted_tags. + self._valid_tags: list[Tag] | None = None + self._valid_tags_set: set[Tag] | None = None + + def format_given(self) -> str: + """ + Format the given, non-None attributes for display. + """ + display_version = None + if self._given_py_version_info is not None: + display_version = ".".join( + str(part) for part in self._given_py_version_info + ) + + key_values = [ + ("platforms", self.platforms), + ("version_info", display_version), + ("abis", self.abis), + ("implementation", self.implementation), + ] + return " ".join( + f"{key}={value!r}" for key, value in key_values if value is not None + ) + + def get_sorted_tags(self) -> list[Tag]: + """ + Return the supported PEP 425 tags to check wheel candidates against. + + The tags are returned in order of preference (most preferred first). + """ + if self._valid_tags is None: + # Pass versions=None if no py_version_info was given since + # versions=None uses special default logic. + py_version_info = self._given_py_version_info + if py_version_info is None: + version = None + else: + version = version_info_to_nodot(py_version_info) + + tags = get_supported( + version=version, + platforms=self.platforms, + abis=self.abis, + impl=self.implementation, + ) + self._valid_tags = tags + + return self._valid_tags + + def get_unsorted_tags(self) -> set[Tag]: + """Exactly the same as get_sorted_tags, but returns a set. + + This is important for performance. + """ + if self._valid_tags_set is None: + self._valid_tags_set = set(self.get_sorted_tags()) + + return self._valid_tags_set diff --git a/venv/lib/python3.12/site-packages/pip/_internal/models/wheel.py b/venv/lib/python3.12/site-packages/pip/_internal/models/wheel.py new file mode 100644 index 0000000..fbd4902 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/models/wheel.py @@ -0,0 +1,80 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" + +from __future__ import annotations + +from collections.abc import Iterable + +from pip._vendor.packaging.tags import Tag +from pip._vendor.packaging.utils import ( + InvalidWheelFilename as _PackagingInvalidWheelFilename, +) +from pip._vendor.packaging.utils import parse_wheel_filename + +from pip._internal.exceptions import InvalidWheelFilename + + +class Wheel: + """A wheel file""" + + def __init__(self, filename: str) -> None: + self.filename = filename + + try: + wheel_info = parse_wheel_filename(filename) + except _PackagingInvalidWheelFilename as e: + raise InvalidWheelFilename(e.args[0]) from None + + self.name, _version, self.build_tag, self.file_tags = wheel_info + self.version = str(_version) + + def get_formatted_file_tags(self) -> list[str]: + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags: list[Tag]) -> int: + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + try: + return next(i for i, t in enumerate(tags) if t in self.file_tags) + except StopIteration: + raise ValueError() + + def find_most_preferred_tag( + self, tags: list[Tag], tag_to_priority: dict[Tag, int] + ) -> int: + """Return the priority of the most preferred tag that one of the wheel's file + tag combinations achieves in the given list of supported tags using the given + tag_to_priority mapping, where lower priorities are more-preferred. + + This is used in place of support_index_min in some cases in order to avoid + an expensive linear scan of a large list of tags. + + :param tags: the PEP 425 tags to check the wheel against. + :param tag_to_priority: a mapping from tag to priority of that tag, where + lower is more preferred. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min( + tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority + ) + + def supported(self, tags: Iterable[Tag]) -> bool: + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/network/__init__.py new file mode 100644 index 0000000..0ae1f56 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/network/__init__.py @@ -0,0 +1 @@ +"""Contains purely network-related utilities.""" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..976657a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..681f414 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/auth.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..1e5302c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/cache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc new file mode 100644 index 0000000..860e247 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/download.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc new file mode 100644 index 0000000..5f1a6ef Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc new file mode 100644 index 0000000..aacc90e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/session.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..63d5824 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc new file mode 100644 index 0000000..6bbd7c0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/auth.py b/venv/lib/python3.12/site-packages/pip/_internal/network/auth.py new file mode 100644 index 0000000..a42f702 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/network/auth.py @@ -0,0 +1,564 @@ +"""Network Authentication Helpers + +Contains interface (MultiDomainBasicAuth) and associated glue code for +providing credentials in the context of network requests. +""" + +from __future__ import annotations + +import logging +import os +import shutil +import subprocess +import sysconfig +import typing +import urllib.parse +from abc import ABC, abstractmethod +from functools import cache +from os.path import commonprefix +from pathlib import Path +from typing import Any, NamedTuple + +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.models import Request, Response +from pip._vendor.requests.utils import get_netrc_auth + +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + ask, + ask_input, + ask_password, + remove_auth_from_url, + split_auth_netloc_from_url, +) +from pip._internal.vcs.versioncontrol import AuthInfo + +logger = getLogger(__name__) + +KEYRING_DISABLED = False + + +class Credentials(NamedTuple): + url: str + username: str + password: str + + +class KeyRingBaseProvider(ABC): + """Keyring base provider interface""" + + has_keyring: bool + + @abstractmethod + def get_auth_info(self, url: str, username: str | None) -> AuthInfo | None: ... + + @abstractmethod + def save_auth_info(self, url: str, username: str, password: str) -> None: ... + + +class KeyRingNullProvider(KeyRingBaseProvider): + """Keyring null provider""" + + has_keyring = False + + def get_auth_info(self, url: str, username: str | None) -> AuthInfo | None: + return None + + def save_auth_info(self, url: str, username: str, password: str) -> None: + return None + + +class KeyRingPythonProvider(KeyRingBaseProvider): + """Keyring interface which uses locally imported `keyring`""" + + has_keyring = True + + def __init__(self) -> None: + import keyring + + self.keyring = keyring + + def get_auth_info(self, url: str, username: str | None) -> AuthInfo | None: + # Support keyring's get_credential interface which supports getting + # credentials without a username. This is only available for + # keyring>=15.2.0. + if hasattr(self.keyring, "get_credential"): + logger.debug("Getting credentials from keyring for %s", url) + cred = self.keyring.get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username is not None: + logger.debug("Getting password from keyring for %s", url) + password = self.keyring.get_password(url, username) + if password: + return username, password + return None + + def save_auth_info(self, url: str, username: str, password: str) -> None: + self.keyring.set_password(url, username, password) + + +class KeyRingCliProvider(KeyRingBaseProvider): + """Provider which uses `keyring` cli + + Instead of calling the keyring package installed alongside pip + we call keyring on the command line which will enable pip to + use which ever installation of keyring is available first in + PATH. + """ + + has_keyring = True + + def __init__(self, cmd: str) -> None: + self.keyring = cmd + + def get_auth_info(self, url: str, username: str | None) -> AuthInfo | None: + # This is the default implementation of keyring.get_credential + # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139 + if username is not None: + password = self._get_password(url, username) + if password is not None: + return username, password + return None + + def save_auth_info(self, url: str, username: str, password: str) -> None: + return self._set_password(url, username, password) + + def _get_password(self, service_name: str, username: str) -> str | None: + """Mirror the implementation of keyring.get_password using cli""" + if self.keyring is None: + return None + + cmd = [self.keyring, "get", service_name, username] + env = os.environ.copy() + env["PYTHONIOENCODING"] = "utf-8" + res = subprocess.run( + cmd, + stdin=subprocess.DEVNULL, + stdout=subprocess.PIPE, + env=env, + ) + if res.returncode: + return None + return res.stdout.decode("utf-8").strip(os.linesep) + + def _set_password(self, service_name: str, username: str, password: str) -> None: + """Mirror the implementation of keyring.set_password using cli""" + if self.keyring is None: + return None + env = os.environ.copy() + env["PYTHONIOENCODING"] = "utf-8" + subprocess.run( + [self.keyring, "set", service_name, username], + input=f"{password}{os.linesep}".encode(), + env=env, + check=True, + ) + return None + + +@cache +def get_keyring_provider(provider: str) -> KeyRingBaseProvider: + logger.verbose("Keyring provider requested: %s", provider) + + # keyring has previously failed and been disabled + if KEYRING_DISABLED: + provider = "disabled" + if provider in ["import", "auto"]: + try: + impl = KeyRingPythonProvider() + logger.verbose("Keyring provider set: import") + return impl + except ImportError: + pass + except Exception as exc: + # In the event of an unexpected exception + # we should warn the user + msg = "Installed copy of keyring fails with exception %s" + if provider == "auto": + msg = msg + ", trying to find a keyring executable as a fallback" + logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG)) + if provider in ["subprocess", "auto"]: + cli = shutil.which("keyring") + if cli and cli.startswith(sysconfig.get_path("scripts")): + # all code within this function is stolen from shutil.which implementation + @typing.no_type_check + def PATH_as_shutil_which_determines_it() -> str: + path = os.environ.get("PATH", None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string + + return path + + scripts = Path(sysconfig.get_path("scripts")) + + paths = [] + for path in PATH_as_shutil_which_determines_it().split(os.pathsep): + p = Path(path) + try: + if not p.samefile(scripts): + paths.append(path) + except FileNotFoundError: + pass + + path = os.pathsep.join(paths) + + cli = shutil.which("keyring", path=path) + + if cli: + logger.verbose("Keyring provider set: subprocess with executable %s", cli) + return KeyRingCliProvider(cli) + + logger.verbose("Keyring provider set: disabled") + return KeyRingNullProvider() + + +class MultiDomainBasicAuth(AuthBase): + def __init__( + self, + prompting: bool = True, + index_urls: list[str] | None = None, + keyring_provider: str = "auto", + ) -> None: + self.prompting = prompting + self.index_urls = index_urls + self.keyring_provider = keyring_provider + self.passwords: dict[str, AuthInfo] = {} + # When the user is prompted to enter credentials and keyring is + # available, we will offer to save them. If the user accepts, + # this value is set to the credentials they entered. After the + # request authenticates, the caller should call + # ``save_credentials`` to save these. + self._credentials_to_save: Credentials | None = None + + @property + def keyring_provider(self) -> KeyRingBaseProvider: + return get_keyring_provider(self._keyring_provider) + + @keyring_provider.setter + def keyring_provider(self, provider: str) -> None: + # The free function get_keyring_provider has been decorated with + # functools.cache. If an exception occurs in get_keyring_auth that + # cache will be cleared and keyring disabled, take that into account + # if you want to remove this indirection. + self._keyring_provider = provider + + @property + def use_keyring(self) -> bool: + # We won't use keyring when --no-input is passed unless + # a specific provider is requested because it might require + # user interaction + return self.prompting or self._keyring_provider not in ["auto", "disabled"] + + def _get_keyring_auth( + self, + url: str | None, + username: str | None, + ) -> AuthInfo | None: + """Return the tuple auth for a given url from keyring.""" + # Do nothing if no url was provided + if not url: + return None + + try: + return self.keyring_provider.get_auth_info(url, username) + except Exception as exc: + # Log the full exception (with stacktrace) at debug, so it'll only + # show up when running in verbose mode. + logger.debug("Keyring is skipped due to an exception", exc_info=True) + # Always log a shortened version of the exception. + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + global KEYRING_DISABLED + KEYRING_DISABLED = True + get_keyring_provider.cache_clear() + return None + + def _get_index_url(self, url: str) -> str | None: + """Return the original index URL matching the requested URL. + + Cached or dynamically generated credentials may work against + the original index URL rather than just the netloc. + + The provided url should have had its username and password + removed already. If the original index url had credentials then + they will be included in the return value. + + Returns None if no matching index was found, or if --no-index + was specified by the user. + """ + if not url or not self.index_urls: + return None + + url = remove_auth_from_url(url).rstrip("/") + "/" + parsed_url = urllib.parse.urlsplit(url) + + candidates = [] + + for index in self.index_urls: + index = index.rstrip("/") + "/" + parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index)) + if parsed_url == parsed_index: + return index + + if parsed_url.netloc != parsed_index.netloc: + continue + + candidate = urllib.parse.urlsplit(index) + candidates.append(candidate) + + if not candidates: + return None + + candidates.sort( + reverse=True, + key=lambda candidate: commonprefix( + [ + parsed_url.path, + candidate.path, + ] + ).rfind("/"), + ) + + return urllib.parse.urlunsplit(candidates[0]) + + def _get_new_credentials( + self, + original_url: str, + *, + allow_netrc: bool = True, + allow_keyring: bool = False, + ) -> AuthInfo: + """Find and return credentials for the specified URL.""" + # Split the credentials and netloc from the url. + url, netloc, url_user_password = split_auth_netloc_from_url( + original_url, + ) + + # Start with the credentials embedded in the url + username, password = url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in url for %s", netloc) + return url_user_password + + # Find a matching index url for this request + index_url = self._get_index_url(url) + if index_url: + # Split the credentials from the url. + index_info = split_auth_netloc_from_url(index_url) + if index_info: + index_url, _, index_url_user_password = index_info + logger.debug("Found index url %s", index_url) + + # If an index URL was found, try its embedded credentials + if index_url and index_url_user_password[0] is not None: + username, password = index_url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in index url for %s", netloc) + return index_url_user_password + + # Get creds from netrc if we still don't have them + if allow_netrc: + netrc_auth = get_netrc_auth(original_url) + if netrc_auth: + logger.debug("Found credentials in netrc for %s", netloc) + return netrc_auth + + # If we don't have a password and keyring is available, use it. + if allow_keyring: + # The index url is more specific than the netloc, so try it first + # fmt: off + kr_auth = ( + self._get_keyring_auth(index_url, username) or + self._get_keyring_auth(netloc, username) + ) + # fmt: on + if kr_auth: + logger.debug("Found credentials in keyring for %s", netloc) + return kr_auth + + return username, password + + def _get_url_and_credentials( + self, original_url: str + ) -> tuple[str, str | None, str | None]: + """Return the credentials to use for the provided URL. + + If allowed, netrc and keyring may be used to obtain the + correct credentials. + + Returns (url_without_credentials, username, password). Note + that even if the original URL contains credentials, this + function may return a different username and password. + """ + url, netloc, _ = split_auth_netloc_from_url(original_url) + + # Try to get credentials from original url + username, password = self._get_new_credentials(original_url) + + # If credentials not found, use any stored credentials for this netloc. + # Do this if either the username or the password is missing. + # This accounts for the situation in which the user has specified + # the username in the index url, but the password comes from keyring. + if (username is None or password is None) and netloc in self.passwords: + un, pw = self.passwords[netloc] + # It is possible that the cached credentials are for a different username, + # in which case the cache should be ignored. + if username is None or username == un: + username, password = un, pw + + if username is not None or password is not None: + # Convert the username and password if they're None, so that + # this netloc will show up as "cached" in the conditional above. + # Further, HTTPBasicAuth doesn't accept None, so it makes sense to + # cache the value that is going to be used. + username = username or "" + password = password or "" + + # Store any acquired credentials. + self.passwords[netloc] = (username, password) + + assert ( + # Credentials were found + (username is not None and password is not None) + # Credentials were not found + or (username is None and password is None) + ), f"Could not load credentials from url: {original_url}" + + return url, username, password + + def __call__(self, req: Request) -> Request: + # Get credentials for this request + url, username, password = self._get_url_and_credentials(req.url) + + # Set the url of the request to the url without any credentials + req.url = url + + if username is not None and password is not None: + # Send the basic auth with this request + req = HTTPBasicAuth(username, password)(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + # Factored out to allow for easy patching in tests + def _prompt_for_password(self, netloc: str) -> tuple[str | None, str | None, bool]: + username = ask_input(f"User for {netloc}: ") if self.prompting else None + if not username: + return None, None, False + if self.use_keyring: + auth = self._get_keyring_auth(netloc, username) + if auth and auth[0] is not None and auth[1] is not None: + return auth[0], auth[1], False + password = ask_password("Password: ") + return username, password, True + + # Factored out to allow for easy patching in tests + def _should_save_password_to_keyring(self) -> bool: + if ( + not self.prompting + or not self.use_keyring + or not self.keyring_provider.has_keyring + ): + return False + return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + + def handle_401(self, resp: Response, **kwargs: Any) -> Response: + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + username, password = None, None + + # Query the keyring for credentials: + if self.use_keyring: + username, password = self._get_new_credentials( + resp.url, + allow_netrc=False, + allow_keyring=True, + ) + + # We are not able to prompt the user so simply return the response + if not self.prompting and not username and not password: + return resp + + parsed = urllib.parse.urlparse(resp.url) + + # Prompt the user for a new username and password + save = False + if not username and not password: + username, password, save = self._prompt_for_password(parsed.netloc) + + # Store the new username and password to use for future requests + self._credentials_to_save = None + if username is not None and password is not None: + self.passwords[parsed.netloc] = (username, password) + + # Prompt to save the password to keyring + if save and self._should_save_password_to_keyring(): + self._credentials_to_save = Credentials( + url=parsed.netloc, + username=username, + password=password, + ) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + # The result of the assignment isn't used, it's just needed to consume + # the content. + _ = resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + req.register_hook("response", self.warn_on_401) + + # On successful request, save the credentials that were used to + # keyring. (Note that if the user responded "no" above, this member + # is not set and nothing will be saved.) + if self._credentials_to_save: + req.register_hook("response", self.save_credentials) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def warn_on_401(self, resp: Response, **kwargs: Any) -> None: + """Response callback to warn about incorrect credentials.""" + if resp.status_code == 401: + logger.warning( + "401 Error, Credentials not correct for %s", + resp.request.url, + ) + + def save_credentials(self, resp: Response, **kwargs: Any) -> None: + """Response callback to save credentials on success.""" + assert ( + self.keyring_provider.has_keyring + ), "should never reach here without keyring" + + creds = self._credentials_to_save + self._credentials_to_save = None + if creds and resp.status_code < 400: + try: + logger.info("Saving credentials to keyring") + self.keyring_provider.save_auth_info( + creds.url, creds.username, creds.password + ) + except Exception: + logger.exception("Failed to save credentials") diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/cache.py b/venv/lib/python3.12/site-packages/pip/_internal/network/cache.py new file mode 100644 index 0000000..2a372f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/network/cache.py @@ -0,0 +1,128 @@ +"""HTTP cache implementation.""" + +from __future__ import annotations + +import os +import shutil +from collections.abc import Generator +from contextlib import contextmanager +from datetime import datetime +from typing import Any, BinaryIO, Callable + +from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache +from pip._vendor.cachecontrol.caches import SeparateBodyFileCache +from pip._vendor.requests.models import Response + +from pip._internal.utils.filesystem import ( + adjacent_tmp_file, + copy_directory_permissions, + replace, +) +from pip._internal.utils.misc import ensure_dir + + +def is_from_cache(response: Response) -> bool: + return getattr(response, "from_cache", False) + + +@contextmanager +def suppressed_cache_errors() -> Generator[None, None, None]: + """If we can't access the cache then we can just skip caching and process + requests as if caching wasn't enabled. + """ + try: + yield + except OSError: + pass + + +class SafeFileCache(SeparateBodyBaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + + There is a race condition when two processes try to write and/or read the + same entry at the same time, since each entry consists of two separate + files (https://github.com/psf/cachecontrol/issues/324). We therefore have + additional logic that makes sure that both files to be present before + returning an entry; this fixes the read side of the race condition. + + For the write side, we assume that the server will only ever return the + same data for the same URL, which ought to be the case for files pip is + downloading. PyPI does not have a mechanism to swap out a wheel for + another wheel, for example. If this assumption is not true, the + CacheControl issue will need to be fixed. + """ + + def __init__(self, directory: str) -> None: + assert directory is not None, "Cache directory must not be None." + super().__init__() + self.directory = directory + + def _get_cache_path(self, name: str) -> str: + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = SeparateBodyFileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key: str) -> bytes | None: + # The cache entry is only valid if both metadata and body exist. + metadata_path = self._get_cache_path(key) + body_path = metadata_path + ".body" + if not (os.path.exists(metadata_path) and os.path.exists(body_path)): + return None + with suppressed_cache_errors(): + with open(metadata_path, "rb") as f: + return f.read() + + def _write_to_file(self, path: str, writer_func: Callable[[BinaryIO], Any]) -> None: + """Common file writing logic with proper permissions and atomic replacement.""" + with suppressed_cache_errors(): + ensure_dir(os.path.dirname(path)) + + with adjacent_tmp_file(path) as f: + writer_func(f) + # Inherit the read/write permissions of the cache directory + # to enable multi-user cache use-cases. + copy_directory_permissions(self.directory, f) + + replace(f.name, path) + + def _write(self, path: str, data: bytes) -> None: + self._write_to_file(path, lambda f: f.write(data)) + + def _write_from_io(self, path: str, source_file: BinaryIO) -> None: + self._write_to_file(path, lambda f: shutil.copyfileobj(source_file, f)) + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + path = self._get_cache_path(key) + self._write(path, value) + + def delete(self, key: str) -> None: + path = self._get_cache_path(key) + with suppressed_cache_errors(): + os.remove(path) + with suppressed_cache_errors(): + os.remove(path + ".body") + + def get_body(self, key: str) -> BinaryIO | None: + # The cache entry is only valid if both metadata and body exist. + metadata_path = self._get_cache_path(key) + body_path = metadata_path + ".body" + if not (os.path.exists(metadata_path) and os.path.exists(body_path)): + return None + with suppressed_cache_errors(): + return open(body_path, "rb") + + def set_body(self, key: str, body: bytes) -> None: + path = self._get_cache_path(key) + ".body" + self._write(path, body) + + def set_body_from_io(self, key: str, body_file: BinaryIO) -> None: + """Set the body of the cache entry from a file object.""" + path = self._get_cache_path(key) + ".body" + self._write_from_io(path, body_file) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/download.py b/venv/lib/python3.12/site-packages/pip/_internal/network/download.py new file mode 100644 index 0000000..9881cc2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/network/download.py @@ -0,0 +1,342 @@ +"""Download files with progress indicators.""" + +from __future__ import annotations + +import email.message +import logging +import mimetypes +import os +from collections.abc import Iterable, Mapping +from dataclasses import dataclass +from http import HTTPStatus +from typing import BinaryIO + +from pip._vendor.requests import PreparedRequest +from pip._vendor.requests.models import Response +from pip._vendor.urllib3 import HTTPResponse as URLlib3Response +from pip._vendor.urllib3._collections import HTTPHeaderDict +from pip._vendor.urllib3.exceptions import ReadTimeoutError + +from pip._internal.cli.progress_bars import BarType, get_download_progress_renderer +from pip._internal.exceptions import IncompleteDownloadError, NetworkConnectionError +from pip._internal.models.index import PyPI +from pip._internal.models.link import Link +from pip._internal.network.cache import SafeFileCache, is_from_cache +from pip._internal.network.session import CacheControlAdapter, PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks +from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp: Response) -> int | None: + try: + return int(resp.headers["content-length"]) + except (ValueError, KeyError, TypeError): + return None + + +def _get_http_response_etag_or_last_modified(resp: Response) -> str | None: + """ + Return either the ETag or Last-Modified header (or None if neither exists). + The return value can be used in an If-Range header. + """ + return resp.headers.get("etag", resp.headers.get("last-modified")) + + +def _log_download( + resp: Response, + link: Link, + progress_bar: BarType, + total_length: int | None, + range_start: int | None = 0, +) -> Iterable[bytes]: + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + if range_start: + logged_url = ( + f"{logged_url} ({format_size(range_start)}/{format_size(total_length)})" + ) + else: + logged_url = f"{logged_url} ({format_size(total_length)})" + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + elif range_start: + logger.info("Resuming download %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (512 * 1024): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp) + + if not show_progress: + return chunks + + renderer = get_download_progress_renderer( + bar_type=progress_bar, size=total_length, initial_progress=range_start + ) + return renderer(chunks) + + +def sanitize_content_filename(filename: str) -> str: + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition: str, default_filename: str) -> str: + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + m = email.message.Message() + m["content-type"] = content_disposition + filename = m.get_param("filename") + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(str(filename)) + return filename or default_filename + + +def _get_http_response_filename(resp: Response, link: Link) -> str: + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get("content-disposition") + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext: str | None = splitext(filename)[1] + if not ext: + ext = mimetypes.guess_extension(resp.headers.get("content-type", "")) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +@dataclass +class _FileDownload: + """Stores the state of a single link download.""" + + link: Link + output_file: BinaryIO + size: int | None + bytes_received: int = 0 + reattempts: int = 0 + + def is_incomplete(self) -> bool: + return bool(self.size is not None and self.bytes_received < self.size) + + def write_chunk(self, data: bytes) -> None: + self.bytes_received += len(data) + self.output_file.write(data) + + def reset_file(self) -> None: + """Delete any saved data and reset progress to zero.""" + self.output_file.seek(0) + self.output_file.truncate() + self.bytes_received = 0 + + +class Downloader: + def __init__( + self, + session: PipSession, + progress_bar: BarType, + resume_retries: int, + ) -> None: + assert ( + resume_retries >= 0 + ), "Number of max resume retries must be bigger or equal to zero" + self._session = session + self._progress_bar = progress_bar + self._resume_retries = resume_retries + + def batch( + self, links: Iterable[Link], location: str + ) -> Iterable[tuple[Link, tuple[str, str]]]: + """Convenience method to download multiple links.""" + for link in links: + filepath, content_type = self(link, location) + yield link, (filepath, content_type) + + def __call__(self, link: Link, location: str) -> tuple[str, str]: + """Download a link and save it under location.""" + resp = self._http_get(link) + download_size = _get_http_response_size(resp) + + filepath = os.path.join(location, _get_http_response_filename(resp, link)) + with open(filepath, "wb") as content_file: + download = _FileDownload(link, content_file, download_size) + self._process_response(download, resp) + if download.is_incomplete(): + self._attempt_resumes_or_redownloads(download, resp) + + content_type = resp.headers.get("Content-Type", "") + return filepath, content_type + + def _process_response(self, download: _FileDownload, resp: Response) -> None: + """Download and save chunks from a response.""" + chunks = _log_download( + resp, + download.link, + self._progress_bar, + download.size, + range_start=download.bytes_received, + ) + try: + for chunk in chunks: + download.write_chunk(chunk) + except ReadTimeoutError as e: + # If the download size is not known, then give up downloading the file. + if download.size is None: + raise e + + logger.warning("Connection timed out while downloading.") + + def _attempt_resumes_or_redownloads( + self, download: _FileDownload, first_resp: Response + ) -> None: + """Attempt to resume/restart the download if connection was dropped.""" + + while download.reattempts < self._resume_retries and download.is_incomplete(): + assert download.size is not None + download.reattempts += 1 + logger.warning( + "Attempting to resume incomplete download (%s/%s, attempt %d)", + format_size(download.bytes_received), + format_size(download.size), + download.reattempts, + ) + + try: + resume_resp = self._http_get_resume(download, should_match=first_resp) + # Fallback: if the server responded with 200 (i.e., the file has + # since been modified or range requests are unsupported) or any + # other unexpected status, restart the download from the beginning. + must_restart = resume_resp.status_code != HTTPStatus.PARTIAL_CONTENT + if must_restart: + download.reset_file() + download.size = _get_http_response_size(resume_resp) + first_resp = resume_resp + + self._process_response(download, resume_resp) + except (ConnectionError, ReadTimeoutError, OSError): + continue + + # No more resume attempts. Raise an error if the download is still incomplete. + if download.is_incomplete(): + os.remove(download.output_file.name) + raise IncompleteDownloadError(download) + + # If we successfully completed the download via resume, manually cache it + # as a complete response to enable future caching + if download.reattempts > 0: + self._cache_resumed_download(download, first_resp) + + def _cache_resumed_download( + self, download: _FileDownload, original_response: Response + ) -> None: + """ + Manually cache a file that was successfully downloaded via resume retries. + + cachecontrol doesn't cache 206 (Partial Content) responses, since they + are not complete files. This method manually adds the final file to the + cache as though it was downloaded in a single request, so that future + requests can use the cache. + """ + url = download.link.url_without_fragment + adapter = self._session.get_adapter(url) + + # Check if the adapter is the CacheControlAdapter (i.e. caching is enabled) + if not isinstance(adapter, CacheControlAdapter): + logger.debug( + "Skipping resume download caching: no cache controller for %s", url + ) + return + + # Check SafeFileCache is being used + assert isinstance( + adapter.cache, SafeFileCache + ), "separate body cache not in use!" + + synthetic_request = PreparedRequest() + synthetic_request.prepare(method="GET", url=url, headers={}) + + synthetic_response_headers = HTTPHeaderDict() + for key, value in original_response.headers.items(): + if key.lower() not in ["content-range", "content-length"]: + synthetic_response_headers[key] = value + synthetic_response_headers["content-length"] = str(download.size) + + synthetic_response = URLlib3Response( + body="", + headers=synthetic_response_headers, + status=200, + preload_content=False, + ) + + # Save metadata and then stream the file contents to cache. + cache_url = adapter.controller.cache_url(url) + metadata_blob = adapter.controller.serializer.dumps( + synthetic_request, synthetic_response, b"" + ) + adapter.cache.set(cache_url, metadata_blob) + download.output_file.flush() + with open(download.output_file.name, "rb") as f: + adapter.cache.set_body_from_io(cache_url, f) + + logger.debug( + "Cached resumed download as complete response for future use: %s", url + ) + + def _http_get_resume( + self, download: _FileDownload, should_match: Response + ) -> Response: + """Issue a HTTP range request to resume the download.""" + # To better understand the download resumption logic, see the mdn web docs: + # https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/Range_requests + headers = HEADERS.copy() + headers["Range"] = f"bytes={download.bytes_received}-" + # If possible, use a conditional range request to avoid corrupted + # downloads caused by the remote file changing in-between. + if identifier := _get_http_response_etag_or_last_modified(should_match): + headers["If-Range"] = identifier + return self._http_get(download.link, headers) + + def _http_get(self, link: Link, headers: Mapping[str, str] = HEADERS) -> Response: + target_url = link.url_without_fragment + try: + resp = self._session.get(target_url, headers=headers, stream=True) + raise_for_status(resp) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + return resp diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py b/venv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py new file mode 100644 index 0000000..0039833 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/network/lazy_wheel.py @@ -0,0 +1,215 @@ +"""Lazy ZIP over HTTP""" + +from __future__ import annotations + +__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"] + +from bisect import bisect_left, bisect_right +from collections.abc import Generator +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any +from zipfile import BadZipFile, ZipFile + +from pip._vendor.packaging.utils import NormalizedName +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks + + +class HTTPRangeRequestUnsupported(Exception): + pass + + +def dist_from_wheel_url( + name: NormalizedName, url: str, session: PipSession +) -> BaseDistribution: + """Return a distribution object from the given wheel URL. + + This uses HTTP range requests to only fetch the portion of the wheel + containing metadata, just enough for the object to be constructed. + If such requests are not supported, HTTPRangeRequestUnsupported + is raised. + """ + with LazyZipOverHTTP(url, session) as zf: + # For read-only ZIP files, ZipFile only needs methods read, + # seek, seekable and tell, not the whole IO protocol. + wheel = MemoryWheel(zf.name, zf) # type: ignore + # After context manager exit, wheel.name + # is an invalid file by intention. + return get_wheel_distribution(wheel, name) + + +class LazyZipOverHTTP: + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, + which is supposed to be fed to ZipFile. If such requests are not + supported by the server, raise HTTPRangeRequestUnsupported + during initialization. + """ + + def __init__( + self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE + ) -> None: + head = session.head(url, headers=HEADERS) + raise_for_status(head) + assert head.status_code == 200 + self._session, self._url, self._chunk_size = session, url, chunk_size + self._length = int(head.headers["Content-Length"]) + self._file = NamedTemporaryFile() + self.truncate(self._length) + self._left: list[int] = [] + self._right: list[int] = [] + if "bytes" not in head.headers.get("Accept-Ranges", "none"): + raise HTTPRangeRequestUnsupported("range request is not supported") + self._check_zip() + + @property + def mode(self) -> str: + """Opening mode, which is always rb.""" + return "rb" + + @property + def name(self) -> str: + """Path to the underlying file.""" + return self._file.name + + def seekable(self) -> bool: + """Return whether random access is supported, which is True.""" + return True + + def close(self) -> None: + """Close the file.""" + self._file.close() + + @property + def closed(self) -> bool: + """Whether the file is closed.""" + return self._file.closed + + def read(self, size: int = -1) -> bytes: + """Read up to size bytes from the object and return them. + + As a convenience, if size is unspecified or -1, + all bytes until EOF are returned. Fewer than + size bytes may be returned if EOF is reached. + """ + download_size = max(size, self._chunk_size) + start, length = self.tell(), self._length + stop = length if size < 0 else min(start + download_size, length) + start = max(0, stop - download_size) + self._download(start, stop - 1) + return self._file.read(size) + + def readable(self) -> bool: + """Return whether the file is readable, which is True.""" + return True + + def seek(self, offset: int, whence: int = 0) -> int: + """Change stream position and return the new absolute position. + + Seek to offset relative position indicated by whence: + * 0: Start of stream (the default). pos should be >= 0; + * 1: Current position - pos may be negative; + * 2: End of stream - pos usually negative. + """ + return self._file.seek(offset, whence) + + def tell(self) -> int: + """Return the current position.""" + return self._file.tell() + + def truncate(self, size: int | None = None) -> int: + """Resize the stream to the given size in bytes. + + If size is unspecified resize to the current position. + The current stream position isn't changed. + + Return the new file size. + """ + return self._file.truncate(size) + + def writable(self) -> bool: + """Return False.""" + return False + + def __enter__(self) -> LazyZipOverHTTP: + self._file.__enter__() + return self + + def __exit__(self, *exc: Any) -> None: + self._file.__exit__(*exc) + + @contextmanager + def _stay(self) -> Generator[None, None, None]: + """Return a context manager keeping the position. + + At the end of the block, seek back to original position. + """ + pos = self.tell() + try: + yield + finally: + self.seek(pos) + + def _check_zip(self) -> None: + """Check and download until the file is a valid ZIP.""" + end = self._length - 1 + for start in reversed(range(0, end, self._chunk_size)): + self._download(start, end) + with self._stay(): + try: + # For read-only ZIP files, ZipFile only needs + # methods read, seek, seekable and tell. + ZipFile(self) + except BadZipFile: + pass + else: + break + + def _stream_response( + self, start: int, end: int, base_headers: dict[str, str] = HEADERS + ) -> Response: + """Return HTTP response to a range request from start to end.""" + headers = base_headers.copy() + headers["Range"] = f"bytes={start}-{end}" + # TODO: Get range requests to be correctly cached + headers["Cache-Control"] = "no-cache" + return self._session.get(self._url, headers=headers, stream=True) + + def _merge( + self, start: int, end: int, left: int, right: int + ) -> Generator[tuple[int, int], None, None]: + """Return a generator of intervals to be fetched. + + Args: + start (int): Start of needed interval + end (int): End of needed interval + left (int): Index of first overlapping downloaded data + right (int): Index after last overlapping downloaded data + """ + lslice, rslice = self._left[left:right], self._right[left:right] + i = start = min([start] + lslice[:1]) + end = max([end] + rslice[-1:]) + for j, k in zip(lslice, rslice): + if j > i: + yield i, j - 1 + i = k + 1 + if i <= end: + yield i, end + self._left[left:right], self._right[left:right] = [start], [end] + + def _download(self, start: int, end: int) -> None: + """Download bytes from start to end inclusively.""" + with self._stay(): + left = bisect_left(self._right, start) + right = bisect_right(self._left, end) + for start, end in self._merge(start, end, left, right): + response = self._stream_response(start, end) + response.raise_for_status() + self.seek(start) + for chunk in response_chunks(response, self._chunk_size): + self._file.write(chunk) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/session.py b/venv/lib/python3.12/site-packages/pip/_internal/network/session.py new file mode 100644 index 0000000..a1f9444 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/network/session.py @@ -0,0 +1,528 @@ +"""PipSession and supporting code, containing all pip-specific +network request configuration and behavior. +""" + +from __future__ import annotations + +import email.utils +import functools +import io +import ipaddress +import json +import logging +import mimetypes +import os +import platform +import shutil +import subprocess +import sys +import urllib.parse +import warnings +from collections.abc import Generator, Mapping, Sequence +from typing import ( + TYPE_CHECKING, + Any, + Optional, + Union, +) + +from pip._vendor import requests, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter +from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter +from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter +from pip._vendor.requests.models import PreparedRequest, Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3.connectionpool import ConnectionPool +from pip._vendor.urllib3.exceptions import InsecureRequestWarning + +from pip import __version__ +from pip._internal.metadata import get_default_environment +from pip._internal.models.link import Link +from pip._internal.network.auth import MultiDomainBasicAuth +from pip._internal.network.cache import SafeFileCache + +# Import ssl from compat so the initial import occurs in only one place. +from pip._internal.utils.compat import has_tls +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.misc import build_url_from_netloc, parse_netloc +from pip._internal.utils.urls import url_to_path + +if TYPE_CHECKING: + from ssl import SSLContext + + from pip._vendor.urllib3.poolmanager import PoolManager + from pip._vendor.urllib3.proxymanager import ProxyManager + + +logger = logging.getLogger(__name__) + +SecureOrigin = tuple[str, str, Optional[Union[int, str]]] + + +# Ignore warning raised when using --trusted-host. +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +SECURE_ORIGINS: list[SecureOrigin] = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] + + +# These are environment variables present when running under various +# CI systems. For each variable, some CI systems that use the variable +# are indicated. The collection was chosen so that for each of a number +# of popular systems, at least one of the environment variables is used. +# This list is used to provide some indication of and lower bound for +# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. +# For more background, see: https://github.com/pypa/pip/issues/5499 +CI_ENVIRONMENT_VARIABLES = ( + # Azure Pipelines + "BUILD_BUILDID", + # Jenkins + "BUILD_ID", + # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI + "CI", + # Explicit environment variable. + "PIP_IS_CI", +) + + +def looks_like_ci() -> bool: + """ + Return whether it looks like pip is running under CI. + """ + # We don't use the method of checking for a tty (e.g. using isatty()) + # because some CI systems mimic a tty (e.g. Travis CI). Thus that + # method doesn't provide definitive information in either direction. + return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) + + +@functools.lru_cache(maxsize=1) +def user_agent() -> str: + """ + Return a string representing the user agent. + """ + data: dict[str, Any] = { + "installer": {"name": "pip", "version": __version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == "CPython": + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == "PyPy": + pypy_version_info = sys.pypy_version_info # type: ignore + if pypy_version_info.releaselevel == "final": + pypy_version_info = pypy_version_info[:3] + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == "Jython": + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == "IronPython": + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + + linux_distribution = distro.name(), distro.version(), distro.codename() + distro_infos: dict[str, Any] = dict( + filter( + lambda x: x[1], + zip(["name", "version", "id"], linux_distribution), + ) + ) + libc = dict( + filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + ) + ) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if has_tls(): + import _ssl as ssl + + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_dist = get_default_environment().get_distribution("setuptools") + if setuptools_dist is not None: + data["setuptools_version"] = str(setuptools_dist.version) + + if shutil.which("rustc") is not None: + # If for any reason `rustc --version` fails, silently ignore it + try: + rustc_output = subprocess.check_output( + ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5 + ) + except Exception: + pass + else: + if rustc_output.startswith(b"rustc "): + # The format of `rustc --version` is: + # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'` + # We extract just the middle (1.52.1) part + data["rustc_version"] = rustc_output.split(b" ")[1].decode() + + # Use None rather than False so as not to give the impression that + # pip knows it is not being run under CI. Rather, it is a null or + # inconclusive result. Also, we include some value rather than no + # value to make it easier to know that the check has been run. + data["ci"] = True if looks_like_ci() else None + + user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") + if user_data is not None: + data["user_data"] = user_data + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class LocalFSAdapter(BaseAdapter): + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: float | tuple[float, float] | None = None, + verify: bool | str = True, + cert: str | tuple[str, str] | None = None, + proxies: Mapping[str, str] | None = None, + ) -> Response: + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + # format the exception raised as a io.BytesIO object, + # to return a better error message: + resp.status_code = 404 + resp.reason = type(exc).__name__ + resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode()) + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict( + { + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + } + ) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self) -> None: + pass + + +class _SSLContextAdapterMixin: + """Mixin to add the ``ssl_context`` constructor argument to HTTP adapters. + + The additional argument is forwarded directly to the pool manager. This allows us + to dynamically decide what SSL store to use at runtime, which is used to implement + the optional ``truststore`` backend. + """ + + def __init__( + self, + *, + ssl_context: SSLContext | None = None, + **kwargs: Any, + ) -> None: + self._ssl_context = ssl_context + super().__init__(**kwargs) + + def init_poolmanager( + self, + connections: int, + maxsize: int, + block: bool = DEFAULT_POOLBLOCK, + **pool_kwargs: Any, + ) -> PoolManager: + if self._ssl_context is not None: + pool_kwargs.setdefault("ssl_context", self._ssl_context) + return super().init_poolmanager( # type: ignore[misc] + connections=connections, + maxsize=maxsize, + block=block, + **pool_kwargs, + ) + + def proxy_manager_for(self, proxy: str, **proxy_kwargs: Any) -> ProxyManager: + # Proxy manager replaces the pool manager, so inject our SSL + # context here too. https://github.com/pypa/pip/issues/13288 + if self._ssl_context is not None: + proxy_kwargs.setdefault("ssl_context", self._ssl_context) + return super().proxy_manager_for(proxy, **proxy_kwargs) # type: ignore[misc] + + +class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter): + pass + + +class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter): + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + def cert_verify( + self, + conn: ConnectionPool, + url: str, + verify: bool | str, + cert: str | tuple[str, str] | None, + ) -> None: + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class InsecureCacheControlAdapter(CacheControlAdapter): + def cert_verify( + self, + conn: ConnectionPool, + url: str, + verify: bool | str, + cert: str | tuple[str, str] | None, + ) -> None: + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class PipSession(requests.Session): + timeout: int | None = None + + def __init__( + self, + *args: Any, + retries: int = 0, + cache: str | None = None, + trusted_hosts: Sequence[str] = (), + index_urls: list[str] | None = None, + ssl_context: SSLContext | None = None, + **kwargs: Any, + ) -> None: + """ + :param trusted_hosts: Domains not to emit warnings for when not using + HTTPS. + """ + super().__init__(*args, **kwargs) + + # Namespace the attribute with "pip_" just in case to prevent + # possible conflicts with the base class. + self.pip_trusted_origins: list[tuple[str, int | None]] = [] + self.pip_proxy = None + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth(index_urls=index_urls) + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 502 may be a transient error from a CDN like CloudFlare or CloudFront + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 502, 503, 520, 527], + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) # type: ignore + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching so we'll use it for all http:// URLs. + # If caching is disabled, we will also use it for + # https:// hosts that we've marked as ignoring + # TLS errors for (trusted-hosts). + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + # We want to _only_ cache responses on securely fetched origins or when + # the host is specified as trusted. We do this because + # we can't validate the response of an insecurely/untrusted fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ssl_context=ssl_context, + ) + self._trusted_host_adapter = InsecureCacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context) + self._trusted_host_adapter = insecure_adapter + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + for host in trusted_hosts: + self.add_trusted_host(host, suppress_logging=True) + + def update_index_urls(self, new_index_urls: list[str]) -> None: + """ + :param new_index_urls: New index urls to update the authentication + handler with. + """ + self.auth.index_urls = new_index_urls + + def add_trusted_host( + self, host: str, source: str | None = None, suppress_logging: bool = False + ) -> None: + """ + :param host: It is okay to provide a host that has previously been + added. + :param source: An optional source string, for logging where the host + string came from. + """ + if not suppress_logging: + msg = f"adding trusted host: {host!r}" + if source is not None: + msg += f" (from {source})" + logger.info(msg) + + parsed_host, parsed_port = parse_netloc(host) + if parsed_host is None: + raise ValueError(f"Trusted host URL must include a host part: {host!r}") + if (parsed_host, parsed_port) not in self.pip_trusted_origins: + self.pip_trusted_origins.append((parsed_host, parsed_port)) + + self.mount( + build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter + ) + self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) + if not parsed_port: + self.mount( + build_url_from_netloc(host, scheme="http") + ":", + self._trusted_host_adapter, + ) + # Mount wildcard ports for the same host. + self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter) + + def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]: + yield from SECURE_ORIGINS + for host, port in self.pip_trusted_origins: + yield ("*", host, "*" if port is None else port) + + def is_secure_origin(self, location: Link) -> bool: + # Determine if this url used a secure transport mechanism + parsed = urllib.parse.urlparse(str(location)) + origin_protocol, origin_host, origin_port = ( + parsed.scheme, + parsed.hostname, + parsed.port, + ) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + origin_protocol = origin_protocol.rsplit("+", 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in self.iter_secure_origins(): + secure_protocol, secure_host, secure_port = secure_origin + if origin_protocol != secure_protocol and secure_protocol != "*": + continue + + try: + addr = ipaddress.ip_address(origin_host or "") + network = ipaddress.ip_network(secure_host) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if ( + origin_host + and origin_host.lower() != secure_host.lower() + and secure_host != "*" + ): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port matches. + if ( + origin_port != secure_port + and secure_port != "*" + and secure_port is not None + ): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + origin_host, + origin_host, + ) + + return False + + def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response: + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + # Allow setting a default proxies on a session + kwargs.setdefault("proxies", self.proxies) + + # Dispatch the actual request + return super().request(method, url, *args, **kwargs) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/utils.py b/venv/lib/python3.12/site-packages/pip/_internal/network/utils.py new file mode 100644 index 0000000..74d3111 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/network/utils.py @@ -0,0 +1,98 @@ +from collections.abc import Generator + +from pip._vendor.requests.models import Response + +from pip._internal.exceptions import NetworkConnectionError + +# The following comments and HTTP headers were originally added by +# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. +# +# We use Accept-Encoding: identity here because requests defaults to +# accepting compressed responses. This breaks in a variety of ways +# depending on how the server is configured. +# - Some servers will notice that the file isn't a compressible file +# and will leave the file alone and with an empty Content-Encoding +# - Some servers will notice that the file is already compressed and +# will leave the file alone, adding a Content-Encoding: gzip header +# - Some servers won't notice anything at all and will take a file +# that's already been compressed and compress it again, and set +# the Content-Encoding: gzip header +# By setting this to request only the identity encoding we're hoping +# to eliminate the third case. Hopefully there does not exist a server +# which when given a file will notice it is already compressed and that +# you're not asking for a compressed file and will then decompress it +# before sending because if that's the case I don't think it'll ever be +# possible to make this work. +HEADERS: dict[str, str] = {"Accept-Encoding": "identity"} + +DOWNLOAD_CHUNK_SIZE = 256 * 1024 + + +def raise_for_status(resp: Response) -> None: + http_error_msg = "" + if isinstance(resp.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. + try: + reason = resp.reason.decode("utf-8") + except UnicodeDecodeError: + reason = resp.reason.decode("iso-8859-1") + else: + reason = resp.reason + + if 400 <= resp.status_code < 500: + http_error_msg = ( + f"{resp.status_code} Client Error: {reason} for url: {resp.url}" + ) + + elif 500 <= resp.status_code < 600: + http_error_msg = ( + f"{resp.status_code} Server Error: {reason} for url: {resp.url}" + ) + + if http_error_msg: + raise NetworkConnectionError(http_error_msg, response=resp) + + +def response_chunks( + response: Response, chunk_size: int = DOWNLOAD_CHUNK_SIZE +) -> Generator[bytes, None, None]: + """Given a requests Response, provide the data chunks.""" + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/venv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py b/venv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py new file mode 100644 index 0000000..f4bddb4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/network/xmlrpc.py @@ -0,0 +1,61 @@ +"""xmlrpclib.Transport implementation""" + +import logging +import urllib.parse +import xmlrpc.client +from typing import TYPE_CHECKING + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status + +if TYPE_CHECKING: + from xmlrpc.client import _HostType, _Marshallable + + from _typeshed import SizedBuffer + +logger = logging.getLogger(__name__) + + +class PipXmlrpcTransport(xmlrpc.client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__( + self, index_url: str, session: PipSession, use_datetime: bool = False + ) -> None: + super().__init__(use_datetime) + index_parts = urllib.parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request( + self, + host: "_HostType", + handler: str, + request_body: "SizedBuffer", + verbose: bool = False, + ) -> tuple["_Marshallable", ...]: + assert isinstance(host, str) + parts = (self._scheme, host, handler, None, None, None) + url = urllib.parse.urlunparse(parts) + try: + headers = {"Content-Type": "text/xml"} + response = self._session.post( + url, + data=request_body, + headers=headers, + stream=True, + ) + raise_for_status(response) + self.verbose = verbose + return self.parse_response(response.raw) + except NetworkConnectionError as exc: + assert exc.response + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, + url, + ) + raise diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2231589 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc new file mode 100644 index 0000000..33697f2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/check.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc new file mode 100644 index 0000000..ba1421a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc new file mode 100644 index 0000000..ac042d8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8091d92 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc new file mode 100644 index 0000000..83faff4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc new file mode 100644 index 0000000..62d744c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc new file mode 100644 index 0000000..1d1e201 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..b6bce1d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc new file mode 100644 index 0000000..08b84d9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/build_tracker.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/build_tracker.py new file mode 100644 index 0000000..cbb4e4f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/build_tracker.py @@ -0,0 +1,140 @@ +from __future__ import annotations + +import contextlib +import hashlib +import logging +import os +from collections.abc import Generator +from types import TracebackType + +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +@contextlib.contextmanager +def update_env_context_manager(**changes: str) -> Generator[None, None, None]: + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values: dict[str, object | str] = {} + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_build_tracker() -> Generator[BuildTracker, None, None]: + root = os.environ.get("PIP_BUILD_TRACKER") + with contextlib.ExitStack() as ctx: + if root is None: + root = ctx.enter_context(TempDirectory(kind="build-tracker")).path + ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with BuildTracker(root) as tracker: + yield tracker + + +class TrackerId(str): + """Uniquely identifying string provided to the build tracker.""" + + +class BuildTracker: + """Ensure that an sdist cannot request itself as a setup requirement. + + When an sdist is prepared, it identifies its setup requirements in the + context of ``BuildTracker.track()``. If a requirement shows up recursively, this + raises an exception. + + This stops fork bombs embedded in malicious packages.""" + + def __init__(self, root: str) -> None: + self._root = root + self._entries: dict[TrackerId, InstallRequirement] = {} + logger.debug("Created build tracker: %s", self._root) + + def __enter__(self) -> BuildTracker: + logger.debug("Entered build tracker: %s", self._root) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.cleanup() + + def _entry_path(self, key: TrackerId) -> str: + hashed = hashlib.sha224(key.encode()).hexdigest() + return os.path.join(self._root, hashed) + + def add(self, req: InstallRequirement, key: TrackerId) -> None: + """Add an InstallRequirement to build tracking.""" + + # Get the file to write information about this requirement. + entry_path = self._entry_path(key) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. + try: + with open(entry_path) as fp: + contents = fp.read() + except FileNotFoundError: + pass + else: + message = f"{req.link} is already being built: {contents}" + raise LookupError(message) + + # If we're here, req should really not be building already. + assert key not in self._entries + + # Start tracking this requirement. + with open(entry_path, "w", encoding="utf-8") as fp: + fp.write(str(req)) + self._entries[key] = req + + logger.debug("Added %s to build tracker %r", req, self._root) + + def remove(self, req: InstallRequirement, key: TrackerId) -> None: + """Remove an InstallRequirement from build tracking.""" + + # Delete the created file and the corresponding entry. + os.unlink(self._entry_path(key)) + del self._entries[key] + + logger.debug("Removed %s from build tracker %r", req, self._root) + + def cleanup(self) -> None: + for key, req in list(self._entries.items()): + self.remove(req, key) + + logger.debug("Removed build tracker: %r", self._root) + + @contextlib.contextmanager + def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]: + """Ensure that `key` cannot install itself as a setup requirement. + + :raises LookupError: If `key` was already provided in a parent invocation of + the context introduced by this method.""" + tracker_id = TrackerId(key) + self.add(req, tracker_id) + yield + self.remove(req, tracker_id) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata.py new file mode 100644 index 0000000..a546809 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata.py @@ -0,0 +1,38 @@ +"""Metadata generation logic for source distributions.""" + +import os + +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import ( + InstallationSubprocessError, + MetadataGenerationFailed, +) +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_metadata( + build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str +) -> str: + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that BuildBackendHookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") + with backend.subprocess_runner(runner): + try: + distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_editable.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_editable.py new file mode 100644 index 0000000..27ecd7d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/metadata_editable.py @@ -0,0 +1,41 @@ +"""Metadata generation logic for source distributions.""" + +import os + +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import ( + InstallationSubprocessError, + MetadataGenerationFailed, +) +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_editable_metadata( + build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str +) -> str: + """Generate metadata using mechanisms described in PEP 660. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that BuildBackendHookCaller implements a fallback for + # prepare_metadata_for_build_wheel/editable, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message( + "Preparing editable metadata (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + try: + distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + + assert distinfo_dir is not None + return os.path.join(metadata_dir, distinfo_dir) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel.py new file mode 100644 index 0000000..8595e45 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +import logging +import os + +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name: str, + backend: BuildBackendHookCaller, + metadata_directory: str, + wheel_directory: str, +) -> str | None: + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug("Destination directory: %s", wheel_directory) + + runner = runner_with_spinner_message( + f"Building wheel for {name} (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + wheel_directory=wheel_directory, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error("Failed building wheel for %s", name) + return None + return os.path.join(wheel_directory, wheel_name) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_editable.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_editable.py new file mode 100644 index 0000000..f00d9b2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/build/wheel_editable.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import logging +import os + +from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_editable( + name: str, + backend: BuildBackendHookCaller, + metadata_directory: str, + wheel_directory: str, +) -> str | None: + """Build one InstallRequirement using the PEP 660 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug("Destination directory: %s", wheel_directory) + + runner = runner_with_spinner_message( + f"Building editable for {name} (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + try: + wheel_name = backend.build_editable( + wheel_directory=wheel_directory, + metadata_directory=metadata_directory, + ) + except HookMissing as e: + logger.error( + "Cannot build editable %s because the build " + "backend does not have the %s hook", + name, + e, + ) + return None + except Exception: + logger.error("Failed building editable for %s", name) + return None + return os.path.join(wheel_directory, wheel_name) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/check.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/check.py new file mode 100644 index 0000000..2d71fa5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/check.py @@ -0,0 +1,175 @@ +"""Validation of dependencies of packages""" + +from __future__ import annotations + +import logging +from collections.abc import Generator, Iterable +from contextlib import suppress +from email.parser import Parser +from functools import reduce +from typing import ( + Callable, + NamedTuple, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.tags import Tag, parse_tag +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.metadata import get_default_environment +from pip._internal.metadata.base import BaseDistribution +from pip._internal.req.req_install import InstallRequirement + +logger = logging.getLogger(__name__) + + +class PackageDetails(NamedTuple): + version: Version + dependencies: list[Requirement] + + +# Shorthands +PackageSet = dict[NormalizedName, PackageDetails] +Missing = tuple[NormalizedName, Requirement] +Conflicting = tuple[NormalizedName, Version, Requirement] + +MissingDict = dict[NormalizedName, list[Missing]] +ConflictingDict = dict[NormalizedName, list[Conflicting]] +CheckResult = tuple[MissingDict, ConflictingDict] +ConflictDetails = tuple[PackageSet, CheckResult] + + +def create_package_set_from_installed() -> tuple[PackageSet, bool]: + """Converts a list of distributions into a PackageSet.""" + package_set = {} + problems = False + env = get_default_environment() + for dist in env.iter_installed_distributions(local_only=False, skip=()): + name = dist.canonical_name + try: + dependencies = list(dist.iter_dependencies()) + package_set[name] = PackageDetails(dist.version, dependencies) + except (OSError, ValueError) as e: + # Don't crash on unreadable or broken metadata. + logger.warning("Error parsing dependencies of %s: %s", name, e) + problems = True + return package_set, problems + + +def check_package_set( + package_set: PackageSet, should_ignore: Callable[[str], bool] | None = None +) -> CheckResult: + """Check if a package set is consistent + + If should_ignore is passed, it should be a callable that takes a + package name and returns a boolean. + """ + + missing = {} + conflicting = {} + + for package_name, package_detail in package_set.items(): + # Info about dependencies of package_name + missing_deps: set[Missing] = set() + conflicting_deps: set[Conflicting] = set() + + if should_ignore and should_ignore(package_name): + continue + + for req in package_detail.dependencies: + name = canonicalize_name(req.name) + + # Check if it's missing + if name not in package_set: + missed = True + if req.marker is not None: + missed = req.marker.evaluate({"extra": ""}) + if missed: + missing_deps.add((name, req)) + continue + + # Check if there's a conflict + version = package_set[name].version + if not req.specifier.contains(version, prereleases=True): + conflicting_deps.add((name, version, req)) + + if missing_deps: + missing[package_name] = sorted(missing_deps, key=str) + if conflicting_deps: + conflicting[package_name] = sorted(conflicting_deps, key=str) + + return missing, conflicting + + +def check_install_conflicts(to_install: list[InstallRequirement]) -> ConflictDetails: + """For checking if the dependency graph would be consistent after \ + installing given requirements + """ + # Start from the current state + package_set, _ = create_package_set_from_installed() + # Install packages + would_be_installed = _simulate_installation_of(to_install, package_set) + + # Only warn about directly-dependent packages; create a whitelist of them + whitelist = _create_whitelist(would_be_installed, package_set) + + return ( + package_set, + check_package_set( + package_set, should_ignore=lambda name: name not in whitelist + ), + ) + + +def check_unsupported( + packages: Iterable[BaseDistribution], + supported_tags: Iterable[Tag], +) -> Generator[BaseDistribution, None, None]: + for p in packages: + with suppress(FileNotFoundError): + wheel_file = p.read_text("WHEEL") + wheel_tags: frozenset[Tag] = reduce( + frozenset.union, + map(parse_tag, Parser().parsestr(wheel_file).get_all("Tag", [])), + frozenset(), + ) + if wheel_tags.isdisjoint(supported_tags): + yield p + + +def _simulate_installation_of( + to_install: list[InstallRequirement], package_set: PackageSet +) -> set[NormalizedName]: + """Computes the version of packages after installing to_install.""" + # Keep track of packages that were installed + installed = set() + + # Modify it as installing requirement_set would (assuming no errors) + for inst_req in to_install: + abstract_dist = make_distribution_for_install_requirement(inst_req) + dist = abstract_dist.get_metadata_distribution() + name = dist.canonical_name + package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies())) + + installed.add(name) + + return installed + + +def _create_whitelist( + would_be_installed: set[NormalizedName], package_set: PackageSet +) -> set[NormalizedName]: + packages_affected = set(would_be_installed) + + for package_name in package_set: + if package_name in packages_affected: + continue + + for req in package_set[package_name].dependencies: + if canonicalize_name(req.name) in packages_affected: + packages_affected.add(package_name) + break + + return packages_affected diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py new file mode 100644 index 0000000..486a833 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/freeze.py @@ -0,0 +1,259 @@ +from __future__ import annotations + +import collections +import logging +import os +from collections.abc import Container, Generator, Iterable +from dataclasses import dataclass, field +from typing import NamedTuple + +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import InvalidVersion + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference + +logger = logging.getLogger(__name__) + + +class _EditableInfo(NamedTuple): + requirement: str + comments: list[str] + + +def freeze( + requirement: list[str] | None = None, + local_only: bool = False, + user_only: bool = False, + paths: list[str] | None = None, + isolated: bool = False, + exclude_editable: bool = False, + skip: Container[str] = (), +) -> Generator[str, None, None]: + installations: dict[str, FrozenRequirement] = {} + + dists = get_environment(paths).iter_installed_distributions( + local_only=local_only, + skip=(), + user_only=user_only, + ) + for dist in dists: + req = FrozenRequirement.from_dist(dist) + if exclude_editable and req.editable: + continue + installations[req.canonical_name] = req + + if requirement: + # the options that don't get turned into an InstallRequirement + # should only be emitted once, even if the same option is in multiple + # requirements files, so we need to keep track of what has been emitted + # so that we don't emit it again if it's seen again + emitted_options: set[str] = set() + # keep track of which files a requirement is in so that we can + # give an accurate warning if a requirement appears multiple times. + req_files: dict[str, list[str]] = collections.defaultdict(list) + for req_file_path in requirement: + with open(req_file_path) as req_file: + for line in req_file: + if ( + not line.strip() + or line.strip().startswith("#") + or line.startswith( + ( + "-r", + "--requirement", + "-f", + "--find-links", + "-i", + "--index-url", + "--pre", + "--trusted-host", + "--process-dependency-links", + "--extra-index-url", + "--use-feature", + ) + ) + ): + line = line.rstrip() + if line not in emitted_options: + emitted_options.add(line) + yield line + continue + + if line.startswith(("-e", "--editable")): + if line.startswith("-e"): + line = line[2:].strip() + else: + line = line[len("--editable") :].strip().lstrip("=") + line_req = install_req_from_editable( + line, + isolated=isolated, + ) + else: + line_req = install_req_from_line( + COMMENT_RE.sub("", line).strip(), + isolated=isolated, + ) + + if not line_req.name: + logger.info( + "Skipping line in requirement file [%s] because " + "it's not clear what it would install: %s", + req_file_path, + line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + else: + line_req_canonical_name = canonicalize_name(line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub("", line).strip(), + line_req.name, + ) + else: + req_files[line_req.name].append(req_file_path) + else: + yield str(installations[line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] + req_files[line_req.name].append(req_file_path) + + # Warn about requirements that were included multiple times (in a + # single requirements file or in different requirements files). + for name, files in req_files.items(): + if len(files) > 1: + logger.warning( + "Requirement %s included multiple times [%s]", + name, + ", ".join(sorted(set(files))), + ) + + yield ("## The following requirements were added by pip freeze:") + for installation in sorted(installations.values(), key=lambda x: x.name.lower()): + if installation.canonical_name not in skip: + yield str(installation).rstrip() + + +def _format_as_name_version(dist: BaseDistribution) -> str: + try: + dist_version = dist.version + except InvalidVersion: + # legacy version + return f"{dist.raw_name}==={dist.raw_version}" + else: + return f"{dist.raw_name}=={dist_version}" + + +def _get_editable_info(dist: BaseDistribution) -> _EditableInfo: + """ + Compute and return values (req, comments) for use in + FrozenRequirement.from_dist(). + """ + editable_project_location = dist.editable_project_location + assert editable_project_location + location = os.path.normcase(os.path.abspath(editable_project_location)) + + from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs + + vcs_backend = vcs.get_backend_for_dir(location) + + if vcs_backend is None: + display = _format_as_name_version(dist) + logger.debug( + 'No VCS found for editable requirement "%s" in: %r', + display, + location, + ) + return _EditableInfo( + requirement=location, + comments=[f"# Editable install with no version control ({display})"], + ) + + vcs_name = type(vcs_backend).__name__ + + try: + req = vcs_backend.get_src_requirement(location, dist.raw_name) + except RemoteNotFoundError: + display = _format_as_name_version(dist) + return _EditableInfo( + requirement=location, + comments=[f"# Editable {vcs_name} install with no remote ({display})"], + ) + except RemoteNotValidError as ex: + display = _format_as_name_version(dist) + return _EditableInfo( + requirement=location, + comments=[ + f"# Editable {vcs_name} install ({display}) with either a deleted " + f"local remote or invalid URI:", + f"# '{ex.url}'", + ], + ) + except BadCommand: + logger.warning( + "cannot determine version of editable source in %s " + "(%s command not found in path)", + location, + vcs_backend.name, + ) + return _EditableInfo(requirement=location, comments=[]) + except InstallationError as exc: + logger.warning("Error when trying to get requirement for VCS system %s", exc) + else: + return _EditableInfo(requirement=req, comments=[]) + + logger.warning("Could not determine repository location of %s", location) + + return _EditableInfo( + requirement=location, + comments=["## !! Could not determine repository location"], + ) + + +@dataclass(frozen=True) +class FrozenRequirement: + name: str + req: str + editable: bool + comments: Iterable[str] = field(default_factory=tuple) + + @property + def canonical_name(self) -> NormalizedName: + return canonicalize_name(self.name) + + @classmethod + def from_dist(cls, dist: BaseDistribution) -> FrozenRequirement: + editable = dist.editable + if editable: + req, comments = _get_editable_info(dist) + else: + comments = [] + direct_url = dist.direct_url + if direct_url: + # if PEP 610 metadata is present, use it + req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name) + else: + # name==version requirement + req = _format_as_name_version(dist) + + return cls(dist.raw_name, req, editable, comments=comments) + + def __str__(self) -> str: + req = self.req + if self.editable: + req = f"-e {req}" + return "\n".join(list(self.comments) + [str(req)]) + "\n" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py new file mode 100644 index 0000000..2645a4a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__init__.py @@ -0,0 +1 @@ +"""For modules related to installing packages.""" diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..43a45d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..52bab8d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py new file mode 100644 index 0000000..2724f15 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/install/wheel.py @@ -0,0 +1,746 @@ +"""Support for installing and building the "wheel" binary package format.""" + +from __future__ import annotations + +import collections +import compileall +import contextlib +import csv +import importlib +import logging +import os.path +import re +import shutil +import sys +import textwrap +import warnings +from base64 import urlsafe_b64encode +from collections.abc import Generator, Iterable, Iterator, Sequence +from email.message import Message +from itertools import chain, filterfalse, starmap +from typing import ( + IO, + Any, + BinaryIO, + Callable, + NewType, + Protocol, + Union, + cast, +) +from zipfile import ZipFile, ZipInfo + +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.distlib.util import get_export_entry +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_major_minor_version +from pip._internal.metadata import ( + BaseDistribution, + FilesystemWheel, + get_wheel_distribution, +) +from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import StreamWrapper, ensure_dir, hash_file, partition +from pip._internal.utils.unpacking import ( + current_umask, + is_within_directory, + set_extracted_file_to_default_mode_plus_executable, + zip_item_is_executable, +) +from pip._internal.utils.wheel import parse_wheel + + +class File(Protocol): + src_record_path: RecordPath + dest_path: str + changed: bool + + def save(self) -> None: + pass + + +logger = logging.getLogger(__name__) + +RecordPath = NewType("RecordPath", str) +InstalledCSVRow = tuple[RecordPath, str, Union[int, str]] + + +def rehash(path: str, blocksize: int = 1 << 20) -> tuple[str, str]: + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=") + return (digest, str(length)) + + +def csv_io_kwargs(mode: str) -> dict[str, Any]: + """Return keyword arguments to properly open a CSV file + in the given mode. + """ + return {"mode": mode, "newline": "", "encoding": "utf-8"} + + +def fix_script(path: str) -> bool: + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + assert os.path.isfile(path) + + with open(path, "rb") as script: + firstline = script.readline() + if not firstline.startswith(b"#!python"): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b"#!" + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, "wb") as script: + script.write(firstline) + script.write(rest) + return True + + +def wheel_root_is_purelib(metadata: Message) -> bool: + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(dist: BaseDistribution) -> tuple[dict[str, str], dict[str, str]]: + console_scripts = {} + gui_scripts = {} + for entry_point in dist.iter_entry_points(): + if entry_point.group == "console_scripts": + console_scripts[entry_point.name] = entry_point.value + elif entry_point.group == "gui_scripts": + gui_scripts[entry_point.name] = entry_point.value + return console_scripts, gui_scripts + + +def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> str | None: + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir: dict[str, set[str]] = collections.defaultdict(set) + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(os.path.normpath(i)).rstrip(os.sep) + for i in os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append( + os.path.normcase(os.path.normpath(os.path.dirname(sys.executable))) + ) + warn_for: dict[str, set[str]] = { + parent_dir: scripts + for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs + } + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts: list[str] = sorted(dir_scripts) + if len(sorted_scripts) == 1: + start_text = f"script {sorted_scripts[0]} is" + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + f"The {start_text} installed in '{parent_dir}' which is not on PATH." + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def _normalized_outrows( + outrows: Iterable[InstalledCSVRow], +) -> list[tuple[str, str, str]]: + """Normalize the given rows of a RECORD file. + + Items in each row are converted into str. Rows are then sorted to make + the value more predictable for tests. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted( + (record_path, hash_, str(size)) for record_path, hash_, size in outrows + ) + + +def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str: + return os.path.join(lib_dir, record_path) + + +def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath: + # On Windows, do not handle relative paths if they belong to different + # logical disks + if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower(): + path = os.path.relpath(path, lib_dir) + + path = path.replace(os.path.sep, "/") + return cast("RecordPath", path) + + +def get_csv_rows_for_installed( + old_csv_rows: list[list[str]], + installed: dict[RecordPath, RecordPath], + changed: set[RecordPath], + generated: list[str], + lib_dir: str, +) -> list[InstalledCSVRow]: + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows: list[InstalledCSVRow] = [] + for row in old_csv_rows: + if len(row) > 3: + logger.warning("RECORD line has more than three elements: %s", row) + old_record_path = cast("RecordPath", row[0]) + new_record_path = installed.pop(old_record_path, old_record_path) + if new_record_path in changed: + digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir)) + else: + digest = row[1] if len(row) > 1 else "" + length = row[2] if len(row) > 2 else "" + installed_rows.append((new_record_path, digest, length)) + for f in generated: + path = _fs_to_record_path(f, lib_dir) + digest, length = rehash(f) + installed_rows.append((path, digest, length)) + return installed_rows + [ + (installed_record_path, "", "") for installed_record_path in installed.values() + ] + + +def get_console_script_specs(console: dict[str, str]) -> list[str]: + """ + Given the mapping from entrypoint name to callable, return the relevant + console script specs. + """ + # Don't mutate caller's version + console = console.copy() + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points. + # Currently, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. As a workaround, we + # override the versioned entry points in the wheel and generate the + # correct ones. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop("pip", None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append("pip = " + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}") + + scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop("easy_install", None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append("easy_install = " + easy_install_script) + + scripts_to_generate.append( + f"easy_install-{get_major_minor_version()} = {easy_install_script}" + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console entry points specified in the wheel + scripts_to_generate.extend(starmap("{} = {}".format, console.items())) + + return scripts_to_generate + + +class ZipBackedFile: + def __init__( + self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile + ) -> None: + self.src_record_path = src_record_path + self.dest_path = dest_path + self._zip_file = zip_file + self.changed = False + + def _getinfo(self) -> ZipInfo: + return self._zip_file.getinfo(self.src_record_path) + + def save(self) -> None: + # When we open the output file below, any existing file is truncated + # before we start writing the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(self.dest_path): + os.unlink(self.dest_path) + + zipinfo = self._getinfo() + + # optimization: the file is created by open(), + # skip the decompression when there is 0 bytes to decompress. + with open(self.dest_path, "wb") as dest: + if zipinfo.file_size > 0: + with self._zip_file.open(zipinfo) as f: + blocksize = min(zipinfo.file_size, 1024 * 1024) + shutil.copyfileobj(f, dest, blocksize) + + if zip_item_is_executable(zipinfo): + set_extracted_file_to_default_mode_plus_executable(self.dest_path) + + +class ScriptFile: + def __init__(self, file: File) -> None: + self._file = file + self.src_record_path = self._file.src_record_path + self.dest_path = self._file.dest_path + self.changed = False + + def save(self) -> None: + self._file.save() + self.changed = fix_script(self.dest_path) + + +class MissingCallableSuffix(InstallationError): + def __init__(self, entry_point: str) -> None: + super().__init__( + f"Invalid script entry point: {entry_point} - A callable " + "suffix is required. See https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information." + ) + + +def _raise_for_invalid_entrypoint(specification: str) -> None: + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + # Override distlib's default script template with one that + # doesn't import `re` module, allowing scripts to load faster. + script_template = textwrap.dedent( + """\ + import sys + from %(module)s import %(import_name)s + if __name__ == '__main__': + if sys.argv[0].endswith('.exe'): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(%(func)s()) +""" + ) + + def make( + self, specification: str, options: dict[str, Any] | None = None + ) -> list[str]: + _raise_for_invalid_entrypoint(specification) + return super().make(specification, options) + + +def _install_wheel( # noqa: C901, PLR0915 function is too long + name: str, + wheel_zip: ZipFile, + wheel_path: str, + scheme: Scheme, + pycompile: bool = True, + warn_script_location: bool = True, + direct_url: DirectUrl | None = None, + requested: bool = False, +) -> None: + """Install a wheel. + + :param name: Name of the project to install + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed: dict[RecordPath, RecordPath] = {} + changed: set[RecordPath] = set() + generated: list[str] = [] + + def record_installed( + srcfile: RecordPath, destfile: str, modified: bool = False + ) -> None: + """Map archive RECORD paths to installation RECORD paths.""" + newpath = _fs_to_record_path(destfile, lib_dir) + installed[srcfile] = newpath + if modified: + changed.add(newpath) + + def is_dir_path(path: RecordPath) -> bool: + return path.endswith("/") + + def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None: + if not is_within_directory(dest_dir_path, target_path): + message = ( + "The wheel {!r} has a file {!r} trying to install" + " outside the target directory {!r}" + ) + raise InstallationError( + message.format(wheel_path, target_path, dest_dir_path) + ) + + def root_scheme_file_maker( + zip_file: ZipFile, dest: str + ) -> Callable[[RecordPath], File]: + def make_root_scheme_file(record_path: RecordPath) -> File: + normed_path = os.path.normpath(record_path) + dest_path = os.path.join(dest, normed_path) + assert_no_path_traversal(dest, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_root_scheme_file + + def data_scheme_file_maker( + zip_file: ZipFile, scheme: Scheme + ) -> Callable[[RecordPath], File]: + scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS} + + def make_data_scheme_file(record_path: RecordPath) -> File: + normed_path = os.path.normpath(record_path) + try: + _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) + except ValueError: + message = ( + f"Unexpected file in {wheel_path}: {record_path!r}. .data directory" + " contents should be named like: '/'." + ) + raise InstallationError(message) + + try: + scheme_path = scheme_paths[scheme_key] + except KeyError: + valid_scheme_keys = ", ".join(sorted(scheme_paths)) + message = ( + f"Unknown scheme key used in {wheel_path}: {scheme_key} " + f"(for file {record_path!r}). .data directory contents " + f"should be in subdirectories named with a valid scheme " + f"key ({valid_scheme_keys})" + ) + raise InstallationError(message) + + dest_path = os.path.join(scheme_path, dest_subpath) + assert_no_path_traversal(scheme_path, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_data_scheme_file + + def is_data_scheme_path(path: RecordPath) -> bool: + return path.split("/", 1)[0].endswith(".data") + + paths = cast(list[RecordPath], wheel_zip.namelist()) + file_paths = filterfalse(is_dir_path, paths) + root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths) + + make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir) + files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths) + + def is_script_scheme_path(path: RecordPath) -> bool: + parts = path.split("/", 2) + return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts" + + other_scheme_paths, script_scheme_paths = partition( + is_script_scheme_path, data_scheme_paths + ) + + make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) + other_scheme_files = map(make_data_scheme_file, other_scheme_paths) + files = chain(files, other_scheme_files) + + # Get the defined entry points + distribution = get_wheel_distribution( + FilesystemWheel(wheel_path), + canonicalize_name(name), + ) + console, gui = get_entrypoints(distribution) + + def is_entrypoint_wrapper(file: File) -> bool: + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + path = file.dest_path + name = os.path.basename(path) + if name.lower().endswith(".exe"): + matchname = name[:-4] + elif name.lower().endswith("-script.py"): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return matchname in console or matchname in gui + + script_scheme_files: Iterator[File] = map( + make_data_scheme_file, script_scheme_paths + ) + script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files) + script_scheme_files = map(ScriptFile, script_scheme_files) + files = chain(files, script_scheme_files) + + existing_parents = set() + for file in files: + # directory creation is lazy and after file filtering + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + parent_dir = os.path.dirname(file.dest_path) + if parent_dir not in existing_parents: + ensure_dir(parent_dir) + existing_parents.add(parent_dir) + file.save() + record_installed(file.src_record_path, file.dest_path, file.changed) + + def pyc_source_file_paths() -> Generator[str, None, None]: + # We de-duplicate installation paths, since there can be overlap (e.g. + # file in .data maps to same location as file in wheel root). + # Sorting installation paths makes it easier to reproduce and debug + # issues related to permissions on existing files. + for installed_path in sorted(set(installed.values())): + full_installed_path = os.path.join(lib_dir, installed_path) + if not os.path.isfile(full_installed_path): + continue + if not full_installed_path.endswith(".py"): + continue + yield full_installed_path + + def pyc_output_path(path: str) -> str: + """Return the path the pyc file would have been written to.""" + return importlib.util.cache_from_source(path) + + # Compile all of the pyc files for the installed files + if pycompile: + with contextlib.redirect_stdout( + StreamWrapper.from_stream(sys.stdout) + ) as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + for path in pyc_source_file_paths(): + success = compileall.compile_file(path, force=True, quiet=True) + if success: + pyc_path = pyc_output_path(path) + assert os.path.exists(pyc_path) + pyc_record_path = cast( + "RecordPath", pyc_path.replace(os.path.sep, "/") + ) + record_installed(pyc_record_path, pyc_path) + logger.debug(stdout.getvalue()) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {""} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate = get_console_script_specs(console) + + gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items())) + + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True})) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + generated_file_mode = 0o666 & ~current_umask() + + @contextlib.contextmanager + def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]: + with adjacent_tmp_file(path, **kwargs) as f: + yield f + os.chmod(f.name, generated_file_mode) + replace(f.name, path) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Record pip as the installer + installer_path = os.path.join(dest_info_dir, "INSTALLER") + with _generate_file(installer_path) as installer_file: + installer_file.write(b"pip\n") + generated.append(installer_path) + + # Record the PEP 610 direct URL reference + if direct_url is not None: + direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) + with _generate_file(direct_url_path) as direct_url_file: + direct_url_file.write(direct_url.to_json().encode("utf-8")) + generated.append(direct_url_path) + + # Record the REQUESTED file + if requested: + requested_path = os.path.join(dest_info_dir, "REQUESTED") + with open(requested_path, "wb"): + pass + generated.append(requested_path) + + record_text = distribution.read_text("RECORD") + record_rows = list(csv.reader(record_text.splitlines())) + + rows = get_csv_rows_for_installed( + record_rows, + installed=installed, + changed=changed, + generated=generated, + lib_dir=lib_dir, + ) + + # Record details of all files installed + record_path = os.path.join(dest_info_dir, "RECORD") + + with _generate_file(record_path, **csv_io_kwargs("w")) as record_file: + # Explicitly cast to typing.IO[str] as a workaround for the mypy error: + # "writer" has incompatible type "BinaryIO"; expected "_Writer" + writer = csv.writer(cast("IO[str]", record_file)) + writer.writerows(_normalized_outrows(rows)) + + +@contextlib.contextmanager +def req_error_context(req_description: str) -> Generator[None, None, None]: + try: + yield + except InstallationError as e: + message = f"For req: {req_description}. {e.args[0]}" + raise InstallationError(message) from e + + +def install_wheel( + name: str, + wheel_path: str, + scheme: Scheme, + req_description: str, + pycompile: bool = True, + warn_script_location: bool = True, + direct_url: DirectUrl | None = None, + requested: bool = False, +) -> None: + with ZipFile(wheel_path, allowZip64=True) as z: + with req_error_context(req_description): + _install_wheel( + name=name, + wheel_zip=z, + wheel_path=wheel_path, + scheme=scheme, + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=requested, + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py b/venv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py new file mode 100644 index 0000000..a72e0e4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/operations/prepare.py @@ -0,0 +1,748 @@ +"""Prepares a distribution for installation""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +from __future__ import annotations + +import mimetypes +import os +import shutil +from collections.abc import Iterable +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.build_env import BuildEnvironmentInstaller +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.distributions.installed import InstalledDistribution +from pip._internal.exceptions import ( + DirectoryUrlHashUnsupported, + HashMismatch, + HashUnpinned, + InstallationError, + MetadataInconsistent, + NetworkConnectionError, + VcsHashUnsupported, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_metadata_distribution +from pip._internal.models.direct_url import ArchiveInfo +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.network.download import Downloader +from pip._internal.network.lazy_wheel import ( + HTTPRangeRequestUnsupported, + dist_from_wheel_url, +) +from pip._internal.network.session import PipSession +from pip._internal.operations.build.build_tracker import BuildTracker +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils._log import getLogger +from pip._internal.utils.direct_url_helpers import ( + direct_url_for_editable, + direct_url_from_link, +) +from pip._internal.utils.hashes import Hashes, MissingHashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + display_path, + hash_file, + hide_url, + redact_auth_from_requirement, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.unpacking import unpack_file +from pip._internal.vcs import vcs + +if TYPE_CHECKING: + from pip._internal.cli.progress_bars import BarType + +logger = getLogger(__name__) + + +def _get_prepared_distribution( + req: InstallRequirement, + build_tracker: BuildTracker, + build_env_installer: BuildEnvironmentInstaller, + build_isolation: bool, + check_build_deps: bool, +) -> BaseDistribution: + """Prepare a distribution for installation.""" + abstract_dist = make_distribution_for_install_requirement(req) + tracker_id = abstract_dist.build_tracker_id + if tracker_id is not None: + with build_tracker.track(req, tracker_id): + abstract_dist.prepare_distribution_metadata( + build_env_installer, build_isolation, check_build_deps + ) + return abstract_dist.get_metadata_distribution() + + +def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity) + + +@dataclass +class File: + path: str + content_type: str | None = None + + def __post_init__(self) -> None: + if self.content_type is None: + # Try to guess the file's MIME type. If the system MIME tables + # can't be loaded, give up. + try: + self.content_type = mimetypes.guess_type(self.path)[0] + except OSError: + pass + + +def get_http_url( + link: Link, + download: Downloader, + download_dir: str | None = None, + hashes: Hashes | None = None, +) -> File: + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, download_dir, hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = None + else: + # let's download to a tmp dir + from_path, content_type = download(link, temp_dir.path) + if hashes: + hashes.check_against_path(from_path) + + return File(from_path, content_type) + + +def get_file_url( + link: Link, download_dir: str | None = None, hashes: Hashes | None = None +) -> File: + """Get file and optionally check its hash.""" + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, download_dir, hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link.file_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + return File(from_path, None) + + +def unpack_url( + link: Link, + location: str, + download: Downloader, + verbosity: int, + download_dir: str | None = None, + hashes: Hashes | None = None, +) -> File | None: + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location, verbosity=verbosity) + return None + + assert not link.is_existing_dir() + + # file urls + if link.is_file: + file = get_file_url(link, download_dir, hashes=hashes) + + # http urls + else: + file = get_http_url( + link, + download, + download_dir, + hashes=hashes, + ) + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies, except wheels + if not link.is_wheel: + unpack_file(file.path, location, file.content_type) + + return file + + +def _check_download_dir( + link: Link, + download_dir: str, + hashes: Hashes | None, + warn_on_hash_mismatch: bool = True, +) -> str | None: + """Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info("File was already downloaded %s", download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + if warn_on_hash_mismatch: + logger.warning( + "Previously-downloaded file %s has bad hash. Re-downloading.", + download_path, + ) + os.unlink(download_path) + return None + return download_path + + +class RequirementPreparer: + """Prepares a Requirement""" + + def __init__( # noqa: PLR0913 (too many parameters) + self, + *, + build_dir: str, + download_dir: str | None, + src_dir: str, + build_isolation: bool, + build_isolation_installer: BuildEnvironmentInstaller, + check_build_deps: bool, + build_tracker: BuildTracker, + session: PipSession, + progress_bar: BarType, + finder: PackageFinder, + require_hashes: bool, + use_user_site: bool, + lazy_wheel: bool, + verbosity: int, + legacy_resolver: bool, + resume_retries: int, + ) -> None: + super().__init__() + + self.src_dir = src_dir + self.build_dir = build_dir + self.build_tracker = build_tracker + self._session = session + self._download = Downloader(session, progress_bar, resume_retries) + self.finder = finder + + # Where still-packed archives should be written to. If None, they are + # not saved, and are deleted immediately after unpacking. + self.download_dir = download_dir + + # Is build isolation allowed? + self.build_isolation = build_isolation + self.build_env_installer = build_isolation_installer + + # Should check build dependencies? + self.check_build_deps = check_build_deps + + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + + # Should wheels be downloaded lazily? + self.use_lazy_wheel = lazy_wheel + + # How verbose should underlying tooling be? + self.verbosity = verbosity + + # Are we using the legacy resolver? + self.legacy_resolver = legacy_resolver + + # Memoized downloaded files, as mapping of url: path. + self._downloaded: dict[str, str] = {} + + # Previous "header" printed for a link-based InstallRequirement + self._previous_requirement_header = ("", "") + + def _log_preparing_link(self, req: InstallRequirement) -> None: + """Provide context for the requirement being prepared.""" + if req.link.is_file and not req.is_wheel_from_cache: + message = "Processing %s" + information = str(display_path(req.link.file_path)) + else: + message = "Collecting %s" + information = redact_auth_from_requirement(req.req) if req.req else str(req) + + # If we used req.req, inject requirement source if available (this + # would already be included if we used req directly) + if req.req and req.comes_from: + if isinstance(req.comes_from, str): + comes_from: str | None = req.comes_from + else: + comes_from = req.comes_from.from_path() + if comes_from: + information += f" (from {comes_from})" + + if (message, information) != self._previous_requirement_header: + self._previous_requirement_header = (message, information) + logger.info(message, information) + + if req.is_wheel_from_cache: + with indent_log(): + logger.info("Using cached %s", req.link.filename) + + def _ensure_link_req_src_dir( + self, req: InstallRequirement, parallel_builds: bool + ) -> None: + """Ensure source_dir of a linked InstallRequirement.""" + # Since source_dir is only set for editable requirements. + if req.link.is_wheel: + # We don't need to unpack wheels, so no need for a source + # directory. + return + assert req.source_dir is None + if req.link.is_existing_dir(): + # build local directories in-tree + req.source_dir = req.link.file_path + return + + # We always delete unpacked sdists after pip runs. + req.ensure_has_source_dir( + self.build_dir, + autodelete=True, + parallel_builds=parallel_builds, + ) + req.ensure_pristine_source_checkout() + + def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: + # By the time this is called, the requirement's link should have + # been checked so we can tell what kind of requirements req is + # and raise some more informative errors than otherwise. + # (For example, we can raise VcsHashUnsupported for a VCS URL + # rather than HashMissing.) + if not self.require_hashes: + return req.hashes(trust_internet=True) + + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would + # report less-useful error messages for unhashable + # requirements, complaining that there's no hash provided. + if req.link.is_vcs: + raise VcsHashUnsupported() + if req.link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + + # Unpinned packages are asking for trouble when a new version + # is uploaded. This isn't a security check, but it saves users + # a surprising hash mismatch in the future. + # file:/// URLs aren't pinnable, so don't complain about them + # not being pinned. + if not req.is_direct and not req.is_pinned: + raise HashUnpinned() + + # If known-good hashes are missing for this requirement, + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + return req.hashes(trust_internet=False) or MissingHashes() + + def _fetch_metadata_only( + self, + req: InstallRequirement, + ) -> BaseDistribution | None: + if self.legacy_resolver: + logger.debug( + "Metadata-only fetching is not used in the legacy resolver", + ) + return None + if self.require_hashes: + logger.debug( + "Metadata-only fetching is not used as hash checking is required", + ) + return None + # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable. + return self._fetch_metadata_using_link_data_attr( + req + ) or self._fetch_metadata_using_lazy_wheel(req.link) + + def _fetch_metadata_using_link_data_attr( + self, + req: InstallRequirement, + ) -> BaseDistribution | None: + """Fetch metadata from the data-dist-info-metadata attribute, if possible.""" + # (1) Get the link to the metadata file, if provided by the backend. + metadata_link = req.link.metadata_link() + if metadata_link is None: + return None + assert req.req is not None + logger.verbose( + "Obtaining dependency information for %s from %s", + req.req, + metadata_link, + ) + # (2) Download the contents of the METADATA file, separate from the dist itself. + metadata_file = get_http_url( + metadata_link, + self._download, + hashes=metadata_link.as_hashes(), + ) + with open(metadata_file.path, "rb") as f: + metadata_contents = f.read() + # (3) Generate a dist just from those file contents. + metadata_dist = get_metadata_distribution( + metadata_contents, + req.link.filename, + req.req.name, + ) + # (4) Ensure the Name: field from the METADATA file matches the name from the + # install requirement. + # + # NB: raw_name will fall back to the name from the install requirement if + # the Name: field is not present, but it's noted in the raw_name docstring + # that that should NEVER happen anyway. + if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name): + raise MetadataInconsistent( + req, "Name", req.req.name, metadata_dist.raw_name + ) + return metadata_dist + + def _fetch_metadata_using_lazy_wheel( + self, + link: Link, + ) -> BaseDistribution | None: + """Fetch metadata using lazy wheel, if possible.""" + # --use-feature=fast-deps must be provided. + if not self.use_lazy_wheel: + return None + if link.is_file or not link.is_wheel: + logger.debug( + "Lazy wheel is not used as %r does not point to a remote wheel", + link, + ) + return None + + wheel = Wheel(link.filename) + name = wheel.name + logger.info( + "Obtaining dependency information from %s %s", + name, + wheel.version, + ) + url = link.url.split("#", 1)[0] + try: + return dist_from_wheel_url(name, url, self._session) + except HTTPRangeRequestUnsupported: + logger.debug("%s does not support range requests", url) + return None + + def _complete_partial_requirements( + self, + partially_downloaded_reqs: Iterable[InstallRequirement], + parallel_builds: bool = False, + ) -> None: + """Download any requirements which were only fetched by metadata.""" + # Download to a temporary directory. These will be copied over as + # needed for downstream 'download', 'wheel', and 'install' commands. + temp_dir = TempDirectory(kind="unpack", globally_managed=True).path + + # Map each link to the requirement that owns it. This allows us to set + # `req.local_file_path` on the appropriate requirement after passing + # all the links at once into BatchDownloader. + links_to_fully_download: dict[Link, InstallRequirement] = {} + for req in partially_downloaded_reqs: + assert req.link + links_to_fully_download[req.link] = req + + batch_download = self._download.batch(links_to_fully_download.keys(), temp_dir) + for link, (filepath, _) in batch_download: + logger.debug("Downloading link %s to %s", link, filepath) + req = links_to_fully_download[link] + # Record the downloaded file path so wheel reqs can extract a Distribution + # in .get_dist(). + req.local_file_path = filepath + # Record that the file is downloaded so we don't do it again in + # _prepare_linked_requirement(). + self._downloaded[req.link.url] = filepath + + # If this is an sdist, we need to unpack it after downloading, but the + # .source_dir won't be set up until we are in _prepare_linked_requirement(). + # Add the downloaded archive to the install requirement to unpack after + # preparing the source dir. + if not req.is_wheel: + req.needs_unpacked_archive(Path(filepath)) + + # This step is necessary to ensure all lazy wheels are processed + # successfully by the 'download', 'wheel', and 'install' commands. + for req in partially_downloaded_reqs: + self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirement( + self, req: InstallRequirement, parallel_builds: bool = False + ) -> BaseDistribution: + """Prepare a requirement to be obtained from req.link.""" + assert req.link + self._log_preparing_link(req) + with indent_log(): + # Check if the relevant file is already available + # in the download directory + file_path = None + if self.download_dir is not None and req.link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir( + req.link, + self.download_dir, + hashes, + # When a locally built wheel has been found in cache, we don't warn + # about re-downloading when the already downloaded wheel hash does + # not match. This is because the hash must be checked against the + # original link, not the cached link. It that case the already + # downloaded file will be removed and re-fetched from cache (which + # implies a hash check against the cache entry's origin.json). + warn_on_hash_mismatch=not req.is_wheel_from_cache, + ) + + if file_path is not None: + # The file is already available, so mark it as downloaded + self._downloaded[req.link.url] = file_path + else: + # The file is not available, attempt to fetch only metadata + metadata_dist = self._fetch_metadata_only(req) + if metadata_dist is not None: + req.needs_more_preparation = True + req.set_dist(metadata_dist) + # Ensure download_info is available even in dry-run mode + if req.download_info is None: + req.download_info = direct_url_from_link( + req.link, req.source_dir + ) + return metadata_dist + + # None of the optimizations worked, fully prepare the requirement + return self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirements_more( + self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False + ) -> None: + """Prepare linked requirements more, if needed.""" + reqs = [req for req in reqs if req.needs_more_preparation] + for req in reqs: + # Determine if any of these requirements were already downloaded. + if self.download_dir is not None and req.link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir(req.link, self.download_dir, hashes) + if file_path is not None: + self._downloaded[req.link.url] = file_path + req.needs_more_preparation = False + + # Prepare requirements we found were already downloaded for some + # reason. The other downloads will be completed separately. + partially_downloaded_reqs: list[InstallRequirement] = [] + for req in reqs: + if req.needs_more_preparation: + partially_downloaded_reqs.append(req) + else: + self._prepare_linked_requirement(req, parallel_builds) + + # TODO: separate this part out from RequirementPreparer when the v1 + # resolver can be removed! + self._complete_partial_requirements( + partially_downloaded_reqs, + parallel_builds=parallel_builds, + ) + + def _prepare_linked_requirement( + self, req: InstallRequirement, parallel_builds: bool + ) -> BaseDistribution: + assert req.link + link = req.link + + hashes = self._get_linked_req_hashes(req) + + if hashes and req.is_wheel_from_cache: + assert req.download_info is not None + assert link.is_wheel + assert link.is_file + # We need to verify hashes, and we have found the requirement in the cache + # of locally built wheels. + if ( + isinstance(req.download_info.info, ArchiveInfo) + and req.download_info.info.hashes + and hashes.has_one_of(req.download_info.info.hashes) + ): + # At this point we know the requirement was built from a hashable source + # artifact, and we verified that the cache entry's hash of the original + # artifact matches one of the hashes we expect. We don't verify hashes + # against the cached wheel, because the wheel is not the original. + hashes = None + else: + logger.warning( + "The hashes of the source archive found in cache entry " + "don't match, ignoring cached built wheel " + "and re-downloading source." + ) + req.link = req.cached_wheel_source_link + link = req.link + + self._ensure_link_req_src_dir(req, parallel_builds) + + if link.is_existing_dir(): + local_file = None + elif link.url not in self._downloaded: + try: + local_file = unpack_url( + link, + req.source_dir, + self._download, + self.verbosity, + self.download_dir, + hashes, + ) + except NetworkConnectionError as exc: + raise InstallationError( + f"Could not install requirement {req} because of HTTP " + f"error {exc} for URL {link}" + ) + else: + file_path = self._downloaded[link.url] + if hashes: + hashes.check_against_path(file_path) + local_file = File(file_path, content_type=None) + + # If download_info is set, we got it from the wheel cache. + if req.download_info is None: + # Editables don't go through this function (see + # prepare_editable_requirement). + assert not req.editable + req.download_info = direct_url_from_link(link, req.source_dir) + # Make sure we have a hash in download_info. If we got it as part of the + # URL, it will have been verified and we can rely on it. Otherwise we + # compute it from the downloaded file. + # FIXME: https://github.com/pypa/pip/issues/11943 + if ( + isinstance(req.download_info.info, ArchiveInfo) + and not req.download_info.info.hashes + and local_file + ): + hash = hash_file(local_file.path)[0].hexdigest() + # We populate info.hash for backward compatibility. + # This will automatically populate info.hashes. + req.download_info.info.hash = f"sha256={hash}" + + # For use in later processing, + # preserve the file path on the requirement. + if local_file: + req.local_file_path = local_file.path + + dist = _get_prepared_distribution( + req, + self.build_tracker, + self.build_env_installer, + self.build_isolation, + self.check_build_deps, + ) + return dist + + def save_linked_requirement(self, req: InstallRequirement) -> None: + assert self.download_dir is not None + assert req.link is not None + link = req.link + if link.is_vcs or (link.is_existing_dir() and req.editable): + # Make a .zip of the source_dir we already created. + req.archive(self.download_dir) + return + + if link.is_existing_dir(): + logger.debug( + "Not copying link to destination directory " + "since it is a directory: %s", + link, + ) + return + if req.local_file_path is None: + # No distribution was downloaded for this requirement. + return + + download_location = os.path.join(self.download_dir, link.filename) + if not os.path.exists(download_location): + shutil.copy(req.local_file_path, download_location) + download_path = display_path(download_location) + logger.info("Saved %s", download_path) + + def prepare_editable_requirement( + self, + req: InstallRequirement, + ) -> BaseDistribution: + """Prepare an editable requirement.""" + assert req.editable, "cannot prepare a non-editable req as editable" + + logger.info("Obtaining %s", req) + + with indent_log(): + if self.require_hashes: + raise InstallationError( + f"The editable requirement {req} cannot be installed when " + "requiring hashes, because there is no single file to " + "hash." + ) + req.ensure_has_source_dir(self.src_dir) + req.update_editable() + assert req.source_dir + req.download_info = direct_url_for_editable(req.unpacked_source_directory) + + dist = _get_prepared_distribution( + req, + self.build_tracker, + self.build_env_installer, + self.build_isolation, + self.check_build_deps, + ) + + req.check_if_exists(self.use_user_site) + + return dist + + def prepare_installed_requirement( + self, + req: InstallRequirement, + skip_reason: str, + ) -> BaseDistribution: + """Prepare an already-installed requirement.""" + assert req.satisfied_by, "req should have been satisfied but isn't" + assert skip_reason is not None, ( + "did not get skip reason skipped but req.satisfied_by " + f"is set to {req.satisfied_by}" + ) + logger.info( + "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version + ) + with indent_log(): + if self.require_hashes: + logger.debug( + "Since it is already installed, we are trusting this " + "package without checking its hash. To ensure a " + "completely repeatable environment, install into an " + "empty virtualenv." + ) + return InstalledDistribution(req).get_metadata_distribution() diff --git a/venv/lib/python3.12/site-packages/pip/_internal/pyproject.py b/venv/lib/python3.12/site-packages/pip/_internal/pyproject.py new file mode 100644 index 0000000..8c2f722 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/pyproject.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +import os +from collections import namedtuple +from typing import Any + +from pip._vendor.packaging.requirements import InvalidRequirement + +from pip._internal.exceptions import ( + InstallationError, + InvalidPyProjectBuildRequires, + MissingPyProjectBuildRequires, +) +from pip._internal.utils.compat import tomllib +from pip._internal.utils.packaging import get_requirement + + +def _is_list_of_str(obj: Any) -> bool: + return isinstance(obj, list) and all(isinstance(item, str) for item in obj) + + +def make_pyproject_path(unpacked_source_directory: str) -> str: + return os.path.join(unpacked_source_directory, "pyproject.toml") + + +BuildSystemDetails = namedtuple( + "BuildSystemDetails", ["requires", "backend", "check", "backend_path"] +) + + +def load_pyproject_toml( + pyproject_toml: str, setup_py: str, req_name: str +) -> BuildSystemDetails: + """Load the pyproject.toml file. + + Parameters: + pyproject_toml - Location of the project's pyproject.toml file + setup_py - Location of the project's setup.py file + req_name - The name of the requirement we're processing (for + error reporting) + + Returns: + None if we should use the legacy code path, otherwise a tuple + ( + requirements from pyproject.toml, + name of PEP 517 backend, + requirements we should check are installed after setting + up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. + ) + """ + has_pyproject = os.path.isfile(pyproject_toml) + has_setup = os.path.isfile(setup_py) + + if not has_pyproject and not has_setup: + raise InstallationError( + f"{req_name} does not appear to be a Python project: " + f"neither 'setup.py' nor 'pyproject.toml' found." + ) + + if has_pyproject: + with open(pyproject_toml, encoding="utf-8") as f: + pp_toml = tomllib.loads(f.read()) + build_system = pp_toml.get("build-system") + else: + build_system = None + + if build_system is None: + # In the absence of any explicit backend specification, we + # assume the setuptools backend that most closely emulates the + # traditional direct setup.py execution, and require wheel and + # a version of setuptools that supports that backend. + + build_system = { + "requires": ["setuptools>=40.8.0"], + "build-backend": "setuptools.build_meta:__legacy__", + } + + # Ensure that the build-system section in pyproject.toml conforms + # to PEP 518. + + # Specifying the build-system table but not the requires key is invalid + if "requires" not in build_system: + raise MissingPyProjectBuildRequires(package=req_name) + + # Error out if requires is not a list of strings + requires = build_system["requires"] + if not _is_list_of_str(requires): + raise InvalidPyProjectBuildRequires( + package=req_name, + reason="It is not a list of strings.", + ) + + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + get_requirement(requirement) + except InvalidRequirement as error: + raise InvalidPyProjectBuildRequires( + package=req_name, + reason=f"It contains an invalid requirement: {requirement!r}", + ) from error + + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) + check: list[str] = [] + if backend is None: + # If the user didn't specify a backend, we assume they want to use + # the setuptools backend. But we can't be sure they have included + # a version of setuptools which supplies the backend. So we + # make a note to check that this requirement is present once + # we have set up the environment. + # This is quite a lot of work to check for a very specific case. But + # the problem is, that case is potentially quite common - projects that + # adopted PEP 518 early for the ability to specify requirements to + # execute setup.py, but never considered needing to mention the build + # tools themselves. The original PEP 518 code had a similar check (but + # implemented in a different way). + backend = "setuptools.build_meta:__legacy__" + check = ["setuptools>=40.8.0"] + + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/req/__init__.py new file mode 100644 index 0000000..5fc8752 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/req/__init__.py @@ -0,0 +1,103 @@ +from __future__ import annotations + +import collections +import logging +from collections.abc import Generator +from dataclasses import dataclass + +from pip._internal.cli.progress_bars import BarType, get_install_progress_renderer +from pip._internal.utils.logging import indent_log + +from .req_file import parse_requirements +from .req_install import InstallRequirement +from .req_set import RequirementSet + +__all__ = [ + "RequirementSet", + "InstallRequirement", + "parse_requirements", + "install_given_reqs", +] + +logger = logging.getLogger(__name__) + + +@dataclass(frozen=True) +class InstallationResult: + name: str + + +def _validate_requirements( + requirements: list[InstallRequirement], +) -> Generator[tuple[str, InstallRequirement], None, None]: + for req in requirements: + assert req.name, f"invalid to-be-installed requirement: {req}" + yield req.name, req + + +def install_given_reqs( + requirements: list[InstallRequirement], + root: str | None, + home: str | None, + prefix: str | None, + warn_script_location: bool, + use_user_site: bool, + pycompile: bool, + progress_bar: BarType, +) -> list[InstallationResult]: + """ + Install everything in the given list. + + (to be called after having downloaded and unpacked the packages) + """ + to_install = collections.OrderedDict(_validate_requirements(requirements)) + + if to_install: + logger.info( + "Installing collected packages: %s", + ", ".join(to_install.keys()), + ) + + installed = [] + + show_progress = logger.isEnabledFor(logging.INFO) and len(to_install) > 1 + + items = iter(to_install.values()) + if show_progress: + renderer = get_install_progress_renderer( + bar_type=progress_bar, total=len(to_install) + ) + items = renderer(items) + + with indent_log(): + for requirement in items: + req_name = requirement.name + assert req_name is not None + if requirement.should_reinstall: + logger.info("Attempting uninstall: %s", req_name) + with indent_log(): + uninstalled_pathset = requirement.uninstall(auto_confirm=True) + else: + uninstalled_pathset = None + + try: + requirement.install( + root=root, + home=home, + prefix=prefix, + warn_script_location=warn_script_location, + use_user_site=use_user_site, + pycompile=pycompile, + ) + except Exception: + # if install did not succeed, rollback previous uninstall + if uninstalled_pathset and not requirement.install_succeeded: + uninstalled_pathset.rollback() + raise + else: + if uninstalled_pathset and requirement.install_succeeded: + uninstalled_pathset.commit() + + installed.append(InstallationResult(req_name)) + + return installed diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..fa9b20b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc new file mode 100644 index 0000000..211fc78 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/constructors.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_dependency_group.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_dependency_group.cpython-312.pyc new file mode 100644 index 0000000..a53f5f2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_dependency_group.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc new file mode 100644 index 0000000..f407fc0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_file.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc new file mode 100644 index 0000000..764483a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_install.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc new file mode 100644 index 0000000..668f3ed Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_set.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc new file mode 100644 index 0000000..d67a339 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/constructors.py b/venv/lib/python3.12/site-packages/pip/_internal/req/constructors.py new file mode 100644 index 0000000..08b92cf --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/req/constructors.py @@ -0,0 +1,566 @@ +"""Backing implementation for InstallRequirement's various constructors + +The idea here is that these formed a major chunk of InstallRequirement's size +so, moving them and support code dedicated to them outside of that class +helps creates for better understandability for the rest of the code. + +These are meant to be used elsewhere within pip to create instances of +InstallRequirement. +""" + +from __future__ import annotations + +import copy +import logging +import os +import re +from collections.abc import Collection +from dataclasses import dataclass + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement +from pip._vendor.packaging.specifiers import Specifier + +from pip._internal.exceptions import InstallationError +from pip._internal.models.index import PyPI, TestPyPI +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_file import ParsedRequirement +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import is_installable_dir +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import is_url, vcs + +__all__ = [ + "install_req_from_editable", + "install_req_from_line", + "parse_editable", +] + +logger = logging.getLogger(__name__) +operators = Specifier._operators.keys() + + +def _strip_extras(path: str) -> tuple[str, str | None]: + m = re.match(r"^(.+)(\[[^\]]+\])$", path) + extras = None + if m: + path_no_extras = m.group(1).rstrip() + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +def convert_extras(extras: str | None) -> set[str]: + if not extras: + return set() + return get_requirement("placeholder" + extras.lower()).extras + + +def _set_requirement_extras(req: Requirement, new_extras: set[str]) -> Requirement: + """ + Returns a new requirement based on the given one, with the supplied extras. If the + given requirement already has extras those are replaced (or dropped if no new extras + are given). + """ + match: re.Match[str] | None = re.fullmatch( + # see https://peps.python.org/pep-0508/#complete-grammar + r"([\w\t .-]+)(\[[^\]]*\])?(.*)", + str(req), + flags=re.ASCII, + ) + # ireq.req is a valid requirement so the regex should always match + assert ( + match is not None + ), f"regex match on requirement {req} failed, this should never happen" + pre: str | None = match.group(1) + post: str | None = match.group(3) + assert ( + pre is not None and post is not None + ), f"regex group selection for requirement {req} failed, this should never happen" + extras: str = "[{}]".format(",".join(sorted(new_extras)) if new_extras else "") + return get_requirement(f"{pre}{extras}{post}") + + +def _parse_direct_url_editable(editable_req: str) -> tuple[str | None, str, set[str]]: + try: + req = Requirement(editable_req) + except InvalidRequirement: + pass + else: + if req.url: + # Join the marker back into the name part. This will be parsed out + # later into a Requirement again. + if req.marker: + name = f"{req.name} ; {req.marker}" + else: + name = req.name + return (name, req.url, req.extras) + + raise ValueError + + +def _parse_pip_syntax_editable(editable_req: str) -> tuple[str | None, str, set[str]]: + url = editable_req + + # If a file path is specified with extras, strip off the extras. + url_no_extras, extras = _strip_extras(url) + + if os.path.isdir(url_no_extras): + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith("file:"): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + get_requirement("placeholder" + extras.lower()).extras, + ) + else: + return package_name, url_no_extras, set() + + for version_control in vcs: + if url.lower().startswith(f"{version_control}:"): + url = f"{version_control}+{url}" + break + + return Link(url).egg_fragment, url, set() + + +def parse_editable(editable_req: str) -> tuple[str | None, str, set[str]]: + """Parses an editable requirement into: + - a requirement name with environment markers + - an URL + - extras + Accepted requirements: + - svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + - local_path[some_extra] + - Foobar[extra] @ svn+http://blahblah@rev#subdirectory=subdir ; markers + """ + try: + package_name, url, extras = _parse_direct_url_editable(editable_req) + except ValueError: + package_name, url, extras = _parse_pip_syntax_editable(editable_req) + + link = Link(url) + + if not link.is_vcs and not link.url.startswith("file:"): + backends = ", ".join(vcs.all_schemes) + raise InstallationError( + f"{editable_req} is not a valid editable requirement. " + f"It should either be a path to a local project or a VCS URL " + f"(beginning with {backends})." + ) + + # The project name can be inferred from local file URIs easily. + if not package_name and not link.url.startswith("file:"): + raise InstallationError( + f"Could not detect requirement name for '{editable_req}', " + "please specify one with your_package_name @ URL" + ) + return package_name, url, extras + + +def check_first_requirement_in_file(filename: str) -> None: + """Check if file is parsable as a requirements file. + + This is heavily based on ``pkg_resources.parse_requirements``, but + simplified to just check the first meaningful line. + + :raises InvalidRequirement: If the first meaningful line cannot be parsed + as an requirement. + """ + with open(filename, encoding="utf-8", errors="ignore") as f: + # Create a steppable iterator, so we can handle \-continuations. + lines = ( + line + for line in (line.strip() for line in f) + if line and not line.startswith("#") # Skip blank lines/comments. + ) + + for line in lines: + # Drop comments -- a hash without a space may be in a URL. + if " #" in line: + line = line[: line.find(" #")] + # If there is a line continuation, drop it, and append the next line. + if line.endswith("\\"): + line = line[:-2].strip() + next(lines, "") + get_requirement(line) + return + + +def deduce_helpful_msg(req: str) -> str: + """Returns helpful msg in case requirements file does not exist, + or cannot be parsed. + + :params req: Requirements file path + """ + if not os.path.exists(req): + return f" File '{req}' does not exist." + msg = " The path does exist. " + # Try to parse and check if it is a requirements file. + try: + check_first_requirement_in_file(req) + except InvalidRequirement: + logger.debug("Cannot parse '%s' as requirements file", req) + else: + msg += ( + f"The argument you provided " + f"({req}) appears to be a" + f" requirements file. If that is the" + f" case, use the '-r' flag to install" + f" the packages specified within it." + ) + return msg + + +@dataclass(frozen=True) +class RequirementParts: + requirement: Requirement | None + link: Link | None + markers: Marker | None + extras: set[str] + + +def parse_req_from_editable(editable_req: str) -> RequirementParts: + name, url, extras_override = parse_editable(editable_req) + + if name is not None: + try: + req: Requirement | None = get_requirement(name) + except InvalidRequirement as exc: + raise InstallationError(f"Invalid requirement: {name!r}: {exc}") + else: + req = None + + link = Link(url) + + return RequirementParts(req, link, None, extras_override) + + +# ---- The actual constructors follow ---- + + +def install_req_from_editable( + editable_req: str, + comes_from: InstallRequirement | str | None = None, + *, + isolated: bool = False, + hash_options: dict[str, list[str]] | None = None, + constraint: bool = False, + user_supplied: bool = False, + permit_editable_wheels: bool = False, + config_settings: dict[str, str | list[str]] | None = None, +) -> InstallRequirement: + parts = parse_req_from_editable(editable_req) + + return InstallRequirement( + parts.requirement, + comes_from=comes_from, + user_supplied=user_supplied, + editable=True, + permit_editable_wheels=permit_editable_wheels, + link=parts.link, + constraint=constraint, + isolated=isolated, + hash_options=hash_options, + config_settings=config_settings, + extras=parts.extras, + ) + + +def _looks_like_path(name: str) -> bool: + """Checks whether the string "looks like" a path on the filesystem. + + This does not check whether the target actually exists, only judge from the + appearance. + + Returns true if any of the following conditions is true: + * a path separator is found (either os.path.sep or os.path.altsep); + * a dot is found (which represents the current directory). + """ + if os.path.sep in name: + return True + if os.path.altsep is not None and os.path.altsep in name: + return True + if name.startswith("."): + return True + return False + + +def _get_url_from_path(path: str, name: str) -> str | None: + """ + First, it checks whether a provided path is an installable directory. If it + is, returns the path. + + If false, check if the path is an archive file (such as a .whl). + The function checks if the path is a file. If false, if the path has + an @, it will treat it as a PEP 440 URL requirement and return the path. + """ + if _looks_like_path(name) and os.path.isdir(path): + if is_installable_dir(path): + return path_to_url(path) + # TODO: The is_installable_dir test here might not be necessary + # now that it is done in load_pyproject_toml too. + raise InstallationError( + f"Directory {name!r} is not installable. Neither 'setup.py' " + "nor 'pyproject.toml' found." + ) + if not is_archive_file(path): + return None + if os.path.isfile(path): + return path_to_url(path) + urlreq_parts = name.split("@", 1) + if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): + # If the path contains '@' and the part before it does not look + # like a path, try to treat it as a PEP 440 URL req instead. + return None + logger.warning( + "Requirement %r looks like a filename, but the file does not exist", + name, + ) + return path_to_url(path) + + +def parse_req_from_line(name: str, line_source: str | None) -> RequirementParts: + if is_url(name): + marker_sep = "; " + else: + marker_sep = ";" + if marker_sep in name: + name, markers_as_string = name.split(marker_sep, 1) + markers_as_string = markers_as_string.strip() + if not markers_as_string: + markers = None + else: + markers = Marker(markers_as_string) + else: + markers = None + name = name.strip() + req_as_string = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras_as_string = None + + if is_url(name): + link = Link(name) + else: + p, extras_as_string = _strip_extras(path) + url = _get_url_from_path(p, name) + if url is not None: + link = Link(url) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == "file" and re.search(r"\.\./", link.url): + link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + req_as_string = f"{wheel.name}=={wheel.version}" + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req_as_string = link.egg_fragment + + # a requirement specifier + else: + req_as_string = name + + extras = convert_extras(extras_as_string) + + def with_source(text: str) -> str: + if not line_source: + return text + return f"{text} (from {line_source})" + + def _parse_req_string(req_as_string: str) -> Requirement: + try: + return get_requirement(req_as_string) + except InvalidRequirement as exc: + if os.path.sep in req_as_string: + add_msg = "It looks like a path." + add_msg += deduce_helpful_msg(req_as_string) + elif "=" in req_as_string and not any( + op in req_as_string for op in operators + ): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = "" + msg = with_source(f"Invalid requirement: {req_as_string!r}: {exc}") + if add_msg: + msg += f"\nHint: {add_msg}" + raise InstallationError(msg) + + if req_as_string is not None: + req: Requirement | None = _parse_req_string(req_as_string) + else: + req = None + + return RequirementParts(req, link, markers, extras) + + +def install_req_from_line( + name: str, + comes_from: str | InstallRequirement | None = None, + *, + isolated: bool = False, + hash_options: dict[str, list[str]] | None = None, + constraint: bool = False, + line_source: str | None = None, + user_supplied: bool = False, + config_settings: dict[str, str | list[str]] | None = None, +) -> InstallRequirement: + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + + :param line_source: An optional string describing where the line is from, + for logging purposes in case of an error. + """ + parts = parse_req_from_line(name, line_source) + + return InstallRequirement( + parts.requirement, + comes_from, + link=parts.link, + markers=parts.markers, + isolated=isolated, + hash_options=hash_options, + config_settings=config_settings, + constraint=constraint, + extras=parts.extras, + user_supplied=user_supplied, + ) + + +def install_req_from_req_string( + req_string: str, + comes_from: InstallRequirement | None = None, + isolated: bool = False, + user_supplied: bool = False, +) -> InstallRequirement: + try: + req = get_requirement(req_string) + except InvalidRequirement as exc: + raise InstallationError(f"Invalid requirement: {req_string!r}: {exc}") + + domains_not_allowed = [ + PyPI.file_storage_domain, + TestPyPI.file_storage_domain, + ] + if ( + req.url + and comes_from + and comes_from.link + and comes_from.link.netloc in domains_not_allowed + ): + # Explicitly disallow pypi packages that depend on external urls + raise InstallationError( + "Packages installed from PyPI cannot depend on packages " + "which are not also hosted on PyPI.\n" + f"{comes_from.name} depends on {req} " + ) + + return InstallRequirement( + req, + comes_from, + isolated=isolated, + user_supplied=user_supplied, + ) + + +def install_req_from_parsed_requirement( + parsed_req: ParsedRequirement, + isolated: bool = False, + user_supplied: bool = False, + config_settings: dict[str, str | list[str]] | None = None, +) -> InstallRequirement: + if parsed_req.is_editable: + req = install_req_from_editable( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + constraint=parsed_req.constraint, + isolated=isolated, + user_supplied=user_supplied, + config_settings=config_settings, + ) + + else: + req = install_req_from_line( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + isolated=isolated, + hash_options=( + parsed_req.options.get("hashes", {}) if parsed_req.options else {} + ), + constraint=parsed_req.constraint, + line_source=parsed_req.line_source, + user_supplied=user_supplied, + config_settings=config_settings, + ) + return req + + +def install_req_from_link_and_ireq( + link: Link, ireq: InstallRequirement +) -> InstallRequirement: + return InstallRequirement( + req=ireq.req, + comes_from=ireq.comes_from, + editable=ireq.editable, + link=link, + markers=ireq.markers, + isolated=ireq.isolated, + hash_options=ireq.hash_options, + config_settings=ireq.config_settings, + user_supplied=ireq.user_supplied, + ) + + +def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement: + """ + Creates a new InstallationRequirement using the given template but without + any extras. Sets the original requirement as the new one's parent + (comes_from). + """ + return InstallRequirement( + req=( + _set_requirement_extras(ireq.req, set()) if ireq.req is not None else None + ), + comes_from=ireq, + editable=ireq.editable, + link=ireq.link, + markers=ireq.markers, + isolated=ireq.isolated, + hash_options=ireq.hash_options, + constraint=ireq.constraint, + extras=[], + config_settings=ireq.config_settings, + user_supplied=ireq.user_supplied, + permit_editable_wheels=ireq.permit_editable_wheels, + ) + + +def install_req_extend_extras( + ireq: InstallRequirement, + extras: Collection[str], +) -> InstallRequirement: + """ + Returns a copy of an installation requirement with some additional extras. + Makes a shallow copy of the ireq object. + """ + result = copy.copy(ireq) + result.extras = {*ireq.extras, *extras} + result.req = ( + _set_requirement_extras(ireq.req, result.extras) + if ireq.req is not None + else None + ) + return result diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/req_dependency_group.py b/venv/lib/python3.12/site-packages/pip/_internal/req/req_dependency_group.py new file mode 100644 index 0000000..396ac1b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/req/req_dependency_group.py @@ -0,0 +1,75 @@ +from collections.abc import Iterable, Iterator +from typing import Any + +from pip._vendor.dependency_groups import DependencyGroupResolver + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.compat import tomllib + + +def parse_dependency_groups(groups: list[tuple[str, str]]) -> list[str]: + """ + Parse dependency groups data as provided via the CLI, in a `[path:]group` syntax. + + Raises InstallationErrors if anything goes wrong. + """ + resolvers = _build_resolvers(path for (path, _) in groups) + return list(_resolve_all_groups(resolvers, groups)) + + +def _resolve_all_groups( + resolvers: dict[str, DependencyGroupResolver], groups: list[tuple[str, str]] +) -> Iterator[str]: + """ + Run all resolution, converting any error from `DependencyGroupResolver` into + an InstallationError. + """ + for path, groupname in groups: + resolver = resolvers[path] + try: + yield from (str(req) for req in resolver.resolve(groupname)) + except (ValueError, TypeError, LookupError) as e: + raise InstallationError( + f"[dependency-groups] resolution failed for '{groupname}' " + f"from '{path}': {e}" + ) from e + + +def _build_resolvers(paths: Iterable[str]) -> dict[str, Any]: + resolvers = {} + for path in paths: + if path in resolvers: + continue + + pyproject = _load_pyproject(path) + if "dependency-groups" not in pyproject: + raise InstallationError( + f"[dependency-groups] table was missing from '{path}'. " + "Cannot resolve '--group' option." + ) + raw_dependency_groups = pyproject["dependency-groups"] + if not isinstance(raw_dependency_groups, dict): + raise InstallationError( + f"[dependency-groups] table was malformed in {path}. " + "Cannot resolve '--group' option." + ) + + resolvers[path] = DependencyGroupResolver(raw_dependency_groups) + return resolvers + + +def _load_pyproject(path: str) -> dict[str, Any]: + """ + This helper loads a pyproject.toml as TOML. + + It raises an InstallationError if the operation fails. + """ + try: + with open(path, "rb") as fp: + return tomllib.load(fp) + except FileNotFoundError: + raise InstallationError(f"{path} not found. Cannot resolve '--group' option.") + except tomllib.TOMLDecodeError as e: + raise InstallationError(f"Error parsing {path}: {e}") from e + except OSError as e: + raise InstallationError(f"Error reading {path}: {e}") from e diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/req_file.py b/venv/lib/python3.12/site-packages/pip/_internal/req/req_file.py new file mode 100644 index 0000000..a4f54b4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/req/req_file.py @@ -0,0 +1,619 @@ +""" +Requirements file parsing +""" + +from __future__ import annotations + +import codecs +import locale +import logging +import optparse +import os +import re +import shlex +import sys +import urllib.parse +from collections.abc import Generator, Iterable +from dataclasses import dataclass +from optparse import Values +from typing import ( + TYPE_CHECKING, + Any, + Callable, + NoReturn, +) + +from pip._internal.cli import cmdoptions +from pip._internal.exceptions import InstallationError, RequirementsFileParseError +from pip._internal.models.search_scope import SearchScope + +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + from pip._internal.network.session import PipSession + +__all__ = ["parse_requirements"] + +ReqFileLines = Iterable[tuple[int, str]] + +LineParser = Callable[[str], tuple[str, Values]] + +SCHEME_RE = re.compile(r"^(http|https|file):", re.I) +COMMENT_RE = re.compile(r"(^|\s+)#.*$") + +# Matches environment variable-style values in '${MY_VARIABLE_1}' with the +# variable name consisting of only uppercase letters, digits or the '_' +# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, +# 2013 Edition. +ENV_VAR_RE = re.compile(r"(?P\$\{(?P[A-Z0-9_]+)\})") + +SUPPORTED_OPTIONS: list[Callable[..., optparse.Option]] = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, + cmdoptions.constraints, + cmdoptions.requirements, + cmdoptions.editable, + cmdoptions.find_links, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.prefer_binary, + cmdoptions.require_hashes, + cmdoptions.pre, + cmdoptions.trusted_host, + cmdoptions.use_new_feature, +] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ: list[Callable[..., optparse.Option]] = [ + cmdoptions.hash, + cmdoptions.config_settings, +] + +SUPPORTED_OPTIONS_EDITABLE_REQ: list[Callable[..., optparse.Option]] = [ + cmdoptions.config_settings, +] + + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] +SUPPORTED_OPTIONS_EDITABLE_REQ_DEST = [ + str(o().dest) for o in SUPPORTED_OPTIONS_EDITABLE_REQ +] + +# order of BOMS is important: codecs.BOM_UTF16_LE is a prefix of codecs.BOM_UTF32_LE +# so data.startswith(BOM_UTF16_LE) would be true for UTF32_LE data +BOMS: list[tuple[bytes, str]] = [ + (codecs.BOM_UTF8, "utf-8"), + (codecs.BOM_UTF32, "utf-32"), + (codecs.BOM_UTF32_BE, "utf-32-be"), + (codecs.BOM_UTF32_LE, "utf-32-le"), + (codecs.BOM_UTF16, "utf-16"), + (codecs.BOM_UTF16_BE, "utf-16-be"), + (codecs.BOM_UTF16_LE, "utf-16-le"), +] + +PEP263_ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)") +DEFAULT_ENCODING = "utf-8" + +logger = logging.getLogger(__name__) + + +@dataclass(frozen=True) +class ParsedRequirement: + # TODO: replace this with slots=True when dropping Python 3.9 support. + __slots__ = ( + "requirement", + "is_editable", + "comes_from", + "constraint", + "options", + "line_source", + ) + + requirement: str + is_editable: bool + comes_from: str + constraint: bool + options: dict[str, Any] | None + line_source: str | None + + +@dataclass(frozen=True) +class ParsedLine: + __slots__ = ("filename", "lineno", "args", "opts", "constraint") + + filename: str + lineno: int + args: str + opts: Values + constraint: bool + + @property + def is_editable(self) -> bool: + return bool(self.opts.editables) + + @property + def requirement(self) -> str | None: + if self.args: + return self.args + elif self.is_editable: + # We don't support multiple -e on one line + return self.opts.editables[0] + return None + + +def parse_requirements( + filename: str, + session: PipSession, + finder: PackageFinder | None = None, + options: optparse.Values | None = None, + constraint: bool = False, +) -> Generator[ParsedRequirement, None, None]: + """Parse a requirements file and yield ParsedRequirement instances. + + :param filename: Path or url of requirements file. + :param session: PipSession instance. + :param finder: Instance of pip.index.PackageFinder. + :param options: cli options. + :param constraint: If true, parsing a constraint file rather than + requirements file. + """ + line_parser = get_line_parser(finder) + parser = RequirementsFileParser(session, line_parser) + + for parsed_line in parser.parse(filename, constraint): + parsed_req = handle_line( + parsed_line, options=options, finder=finder, session=session + ) + if parsed_req is not None: + yield parsed_req + + +def preprocess(content: str) -> ReqFileLines: + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + """ + lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1) + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + lines_enum = expand_env_variables(lines_enum) + return lines_enum + + +def handle_requirement_line( + line: ParsedLine, + options: optparse.Values | None = None, +) -> ParsedRequirement: + # preserve for the nested code path + line_comes_from = "{} {} (line {})".format( + "-c" if line.constraint else "-r", + line.filename, + line.lineno, + ) + + assert line.requirement is not None + + # get the options that apply to requirements + if line.is_editable: + supported_dest = SUPPORTED_OPTIONS_EDITABLE_REQ_DEST + else: + supported_dest = SUPPORTED_OPTIONS_REQ_DEST + req_options = {} + for dest in supported_dest: + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = f"line {line.lineno} of {line.filename}" + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, + ) + + +def handle_option_line( + opts: Values, + filename: str, + lineno: int, + finder: PackageFinder | None = None, + options: optparse.Values | None = None, + session: PipSession | None = None, +) -> None: + if opts.hashes: + logger.warning( + "%s line %s has --hash but no requirement, and will be ignored.", + filename, + lineno, + ) + + if options: + # percolate options upward + if opts.require_hashes: + options.require_hashes = opts.require_hashes + if opts.features_enabled: + options.features_enabled.extend( + f for f in opts.features_enabled if f not in options.features_enabled + ) + + # set finder options + if finder: + find_links = finder.find_links + index_urls = finder.index_urls + no_index = finder.search_scope.no_index + if opts.no_index is True: + no_index = True + index_urls = [] + if opts.index_url and not no_index: + index_urls = [opts.index_url] + if opts.extra_index_urls and not no_index: + index_urls.extend(opts.extra_index_urls) + if opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + find_links.append(value) + + if session: + # We need to update the auth urls in session + session.update_index_urls(index_urls) + + search_scope = SearchScope( + find_links=find_links, + index_urls=index_urls, + no_index=no_index, + ) + finder.search_scope = search_scope + + if opts.pre: + finder.set_allow_all_prereleases() + + if opts.prefer_binary: + finder.set_prefer_binary() + + if session: + for host in opts.trusted_hosts or []: + source = f"line {lineno} of {filename}" + session.add_trusted_host(host, source=source) + + +def handle_line( + line: ParsedLine, + options: optparse.Values | None = None, + finder: PackageFinder | None = None, + session: PipSession | None = None, +) -> ParsedRequirement | None: + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. + + :param line: The parsed line to be processed. + :param options: CLI options. + :param finder: The finder - updated by non-requirement lines. + :param session: The session - updated by non-requirement lines. + + Returns a ParsedRequirement object if the line is a requirement line, + otherwise returns None. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + """ + + if line.requirement is not None: + parsed_req = handle_requirement_line(line, options) + return parsed_req + else: + handle_option_line( + line.opts, + line.filename, + line.lineno, + finder, + options, + session, + ) + return None + + +class RequirementsFileParser: + def __init__( + self, + session: PipSession, + line_parser: LineParser, + ) -> None: + self._session = session + self._line_parser = line_parser + + def parse( + self, filename: str, constraint: bool + ) -> Generator[ParsedLine, None, None]: + """Parse a given file, yielding parsed lines.""" + yield from self._parse_and_recurse( + filename, constraint, [{os.path.abspath(filename): None}] + ) + + def _parse_and_recurse( + self, + filename: str, + constraint: bool, + parsed_files_stack: list[dict[str, str | None]], + ) -> Generator[ParsedLine, None, None]: + for line in self._parse_file(filename, constraint): + if line.requirement is None and ( + line.opts.requirements or line.opts.constraints + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib.parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + # and then abspath so that we can identify recursive references + req_path = os.path.abspath( + os.path.join( + os.path.dirname(filename), + req_path, + ) + ) + parsed_files = parsed_files_stack[0] + if req_path in parsed_files: + initial_file = parsed_files[req_path] + tail = ( + f" and again in {initial_file}" + if initial_file is not None + else "" + ) + raise RequirementsFileParseError( + f"{req_path} recursively references itself in {filename}{tail}" + ) + # Keeping a track where was each file first included in + new_parsed_files = parsed_files.copy() + new_parsed_files[req_path] = filename + yield from self._parse_and_recurse( + req_path, nested_constraint, [new_parsed_files, *parsed_files_stack] + ) + else: + yield line + + def _parse_file( + self, filename: str, constraint: bool + ) -> Generator[ParsedLine, None, None]: + _, content = get_file_content(filename, self._session) + + lines_enum = preprocess(content) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = f"Invalid requirement: {line}\n{e.msg}" + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder: PackageFinder | None) -> LineParser: + def parse_line(line: str) -> tuple[str, Values]: + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + + try: + options = shlex.split(options_str) + except ValueError as e: + raise OptionParsingError(f"Could not split options: {options_str}") from e + + opts, _ = parser.parse_args(options, defaults) + + return args_str, opts + + return parse_line + + +def break_args_options(line: str) -> tuple[str, str]: + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(" ") + args = [] + options = tokens[:] + for token in tokens: + if token.startswith(("-", "--")): + break + else: + args.append(token) + options.pop(0) + return " ".join(args), " ".join(options) + + +class OptionParsingError(Exception): + def __init__(self, msg: str) -> None: + self.msg = msg + + +def build_parser() -> optparse.OptionParser: + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self: Any, msg: str) -> NoReturn: + raise OptionParsingError(msg) + + # NOTE: mypy disallows assigning to a method + # https://github.com/python/mypy/issues/2427 + parser.exit = parser_exit # type: ignore + + return parser + + +def join_lines(lines_enum: ReqFileLines) -> ReqFileLines: + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line: list[str] = [] + for line_number, line in lines_enum: + if not line.endswith("\\") or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = " " + line + if new_line: + new_line.append(line) + assert primary_line_number is not None + yield primary_line_number, "".join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip("\\")) + + # last line contains \ + if new_line: + assert primary_line_number is not None + yield primary_line_number, "".join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines: + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub("", line) + line = line.strip() + if line: + yield line_number, line + + +def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines: + """Replace all environment variables that can be retrieved via `os.getenv`. + + The only allowed format for environment variables defined in the + requirement file is `${MY_VARIABLE_1}` to ensure two things: + + 1. Strings that contain a `$` aren't accidentally (partially) expanded. + 2. Ensure consistency across platforms for requirement files. + + These points are the result of a discussion on the `github pull + request #3514 `_. + + Valid characters in variable names follow the `POSIX standard + `_ and are limited + to uppercase letter, digits and the `_` (underscore). + """ + for line_number, line in lines_enum: + for env_var, var_name in ENV_VAR_RE.findall(line): + value = os.getenv(var_name) + if not value: + continue + + line = line.replace(env_var, value) + + yield line_number, line + + +def get_file_content(url: str, session: PipSession) -> tuple[str, str]: + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + """ + scheme = urllib.parse.urlsplit(url).scheme + # Pip has special support for file:// URLs (LocalFSAdapter). + if scheme in ["http", "https", "file"]: + # Delay importing heavy network modules until absolutely necessary. + from pip._internal.network.utils import raise_for_status + + resp = session.get(url) + raise_for_status(resp) + return resp.url, resp.text + + # Assume this is a bare path. + try: + with open(url, "rb") as f: + raw_content = f.read() + except OSError as exc: + raise InstallationError(f"Could not open requirements file: {exc}") + + content = _decode_req_file(raw_content, url) + + return url, content + + +def _decode_req_file(data: bytes, url: str) -> str: + for bom, encoding in BOMS: + if data.startswith(bom): + return data[len(bom) :].decode(encoding) + + for line in data.split(b"\n")[:2]: + if line[0:1] == b"#": + result = PEP263_ENCODING_RE.search(line) + if result is not None: + encoding = result.groups()[0].decode("ascii") + return data.decode(encoding) + + try: + return data.decode(DEFAULT_ENCODING) + except UnicodeDecodeError: + locale_encoding = locale.getpreferredencoding(False) or sys.getdefaultencoding() + logging.warning( + "unable to decode data from %s with default encoding %s, " + "falling back to encoding from locale: %s. " + "If this is intentional you should specify the encoding with a " + "PEP-263 style comment, e.g. '# -*- coding: %s -*-'", + url, + DEFAULT_ENCODING, + locale_encoding, + locale_encoding, + ) + return data.decode(locale_encoding) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/req_install.py b/venv/lib/python3.12/site-packages/pip/_internal/req/req_install.py new file mode 100644 index 0000000..bd4fb07 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/req/req_install.py @@ -0,0 +1,828 @@ +from __future__ import annotations + +import functools +import logging +import os +import shutil +import sys +import uuid +import zipfile +from collections.abc import Collection, Iterable +from optparse import Values +from pathlib import Path +from typing import Any + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment +from pip._internal.exceptions import InstallationError, PreviousBuildDirError +from pip._internal.locations import get_scheme +from pip._internal.metadata import ( + BaseDistribution, + get_default_environment, + get_directory_distribution, + get_wheel_distribution, +) +from pip._internal.metadata.base import FilesystemWheel +from pip._internal.models.direct_url import DirectUrl +from pip._internal.models.link import Link +from pip._internal.operations.build.metadata import generate_metadata +from pip._internal.operations.build.metadata_editable import generate_editable_metadata +from pip._internal.operations.install.wheel import install_wheel +from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path +from pip._internal.req.req_uninstall import UninstallPathSet +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + ConfiguredBuildBackendHookCaller, + ask_path_exists, + backup_dir, + display_path, + hide_url, + is_installable_dir, + redact_auth_from_requirement, + redact_auth_from_url, +) +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.unpacking import unpack_file +from pip._internal.utils.virtualenv import running_under_virtualenv +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +class InstallRequirement: + """ + Represents something that may be installed later on, may have information + about where to fetch the relevant requirement and also contains logic for + installing the said requirement. + """ + + def __init__( + self, + req: Requirement | None, + comes_from: str | InstallRequirement | None, + editable: bool = False, + link: Link | None = None, + markers: Marker | None = None, + isolated: bool = False, + *, + hash_options: dict[str, list[str]] | None = None, + config_settings: dict[str, str | list[str]] | None = None, + constraint: bool = False, + extras: Collection[str] = (), + user_supplied: bool = False, + permit_editable_wheels: bool = False, + ) -> None: + assert req is None or isinstance(req, Requirement), req + self.req = req + self.comes_from = comes_from + self.constraint = constraint + self.editable = editable + self.permit_editable_wheels = permit_editable_wheels + + # source_dir is the local directory where the linked requirement is + # located, or unpacked. In case unpacking is needed, creating and + # populating source_dir is done by the RequirementPreparer. Note this + # is not necessarily the directory where pyproject.toml or setup.py is + # located - that one is obtained via unpacked_source_directory. + self.source_dir: str | None = None + if self.editable: + assert link + if link.is_file: + self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) + + # original_link is the direct URL that was provided by the user for the + # requirement, either directly or via a constraints file. + if link is None and req and req.url: + # PEP 508 URL requirement + link = Link(req.url) + self.link = self.original_link = link + + # When this InstallRequirement is a wheel obtained from the cache of locally + # built wheels, this is the source link corresponding to the cache entry, which + # was used to download and build the cached wheel. + self.cached_wheel_source_link: Link | None = None + + # Information about the location of the artifact that was downloaded . This + # property is guaranteed to be set in resolver results. + self.download_info: DirectUrl | None = None + + # Path to any downloaded or already-existing package. + self.local_file_path: str | None = None + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path + + if extras: + self.extras = extras + elif req: + self.extras = req.extras + else: + self.extras = set() + if markers is None and req: + markers = req.marker + self.markers = markers + + # This holds the Distribution object if this requirement is already installed. + self.satisfied_by: BaseDistribution | None = None + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False + # Temporary build location + self._temp_build_dir: TempDirectory | None = None + # Set to True after successful installation + self.install_succeeded: bool | None = None + # Supplied options + self.hash_options = hash_options if hash_options else {} + self.config_settings = config_settings + # Set to True after successful preparation of this requirement + self.prepared = False + # User supplied requirement are explicitly requested for installation + # by the user via CLI arguments or requirements files, as opposed to, + # e.g. dependencies, extras or constraints. + self.user_supplied = user_supplied + + self.isolated = isolated + self.build_env: BuildEnvironment = NoOpBuildEnvironment() + + # For PEP 517, the directory where we request the project metadata + # gets stored. We need this to pass to build_wheel, so the backend + # can ensure that the wheel matches the metadata (see the PEP for + # details). + self.metadata_directory: str | None = None + + # The cached metadata distribution that this requirement represents. + # See get_dist / set_dist. + self._distribution: BaseDistribution | None = None + + # The static build requirements (from pyproject.toml) + self.pyproject_requires: list[str] | None = None + + # Build requirements that we will check are available + self.requirements_to_check: list[str] = [] + + # The PEP 517 backend we should use to build the project + self.pep517_backend: BuildBackendHookCaller | None = None + + # This requirement needs more preparation before it can be built + self.needs_more_preparation = False + + # This requirement needs to be unpacked before it can be installed. + self._archive_source: Path | None = None + + def __str__(self) -> str: + if self.req: + s = redact_auth_from_requirement(self.req) + if self.link: + s += f" from {redact_auth_from_url(self.link.url)}" + elif self.link: + s = redact_auth_from_url(self.link.url) + else: + s = "" + if self.satisfied_by is not None: + if self.satisfied_by.location is not None: + location = display_path(self.satisfied_by.location) + else: + location = "" + s += f" in {location}" + if self.comes_from: + if isinstance(self.comes_from, str): + comes_from: str | None = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += f" (from {comes_from})" + return s + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} object: " + f"{str(self)} editable={self.editable!r}>" + ) + + def format_debug(self) -> str: + """An un-tested helper for getting state, for debugging.""" + attributes = vars(self) + names = sorted(attributes) + + state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names)) + return "<{name} object: {{{state}}}>".format( + name=self.__class__.__name__, + state=", ".join(state), + ) + + # Things that are valid for all kinds of requirements? + @property + def name(self) -> str | None: + if self.req is None: + return None + return self.req.name + + @functools.cached_property + def supports_pyproject_editable(self) -> bool: + assert self.pep517_backend + with self.build_env: + runner = runner_with_spinner_message( + "Checking if build backend supports build_editable" + ) + with self.pep517_backend.subprocess_runner(runner): + return "build_editable" in self.pep517_backend._supported_features() + + @property + def specifier(self) -> SpecifierSet: + assert self.req is not None + return self.req.specifier + + @property + def is_direct(self) -> bool: + """Whether this requirement was specified as a direct URL.""" + return self.original_link is not None + + @property + def is_pinned(self) -> bool: + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + assert self.req is not None + specifiers = self.req.specifier + return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} + + def match_markers(self, extras_requested: Iterable[str] | None = None) -> bool: + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ("",) + if self.markers is not None: + return any( + self.markers.evaluate({"extra": extra}) for extra in extras_requested + ) + else: + return True + + @property + def has_hash_options(self) -> bool: + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.hash_options) + + def hashes(self, trust_internet: bool = True) -> Hashes: + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.hash_options.copy() + if trust_internet: + link = self.link + elif self.is_direct and self.user_supplied: + link = self.original_link + else: + link = None + if link and link.hash: + assert link.hash_name is not None + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + def from_path(self) -> str | None: + """Format a nice indicator to show where this "comes from" """ + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + comes_from: str | None + if isinstance(self.comes_from, str): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += "->" + comes_from + return s + + def ensure_build_location( + self, build_dir: str, autodelete: bool, parallel_builds: bool + ) -> str: + assert build_dir is not None + if self._temp_build_dir is not None: + assert self._temp_build_dir.path + return self._temp_build_dir.path + if self.req is None: + # Some systems have /tmp as a symlink which confuses custom + # builds (such as numpy). Thus, we ensure that the real path + # is returned. + self._temp_build_dir = TempDirectory( + kind=tempdir_kinds.REQ_BUILD, globally_managed=True + ) + + return self._temp_build_dir.path + + # This is the only remaining place where we manually determine the path + # for the temporary directory. It is only needed for editables where + # it is the value of the --src option. + + # When parallel builds are enabled, add a UUID to the build directory + # name so multiple builds do not interfere with each other. + dir_name: str = canonicalize_name(self.req.name) + if parallel_builds: + dir_name = f"{dir_name}_{uuid.uuid4().hex}" + + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug("Creating directory %s", build_dir) + os.makedirs(build_dir) + actual_build_dir = os.path.join(build_dir, dir_name) + # `None` indicates that we respect the globally-configured deletion + # settings, which is what we actually want when auto-deleting. + delete_arg = None if autodelete else False + return TempDirectory( + path=actual_build_dir, + delete=delete_arg, + kind=tempdir_kinds.REQ_BUILD, + globally_managed=True, + ).path + + def _set_requirement(self) -> None: + """Set requirement after generating metadata.""" + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None + + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = get_requirement( + "".join( + [ + self.metadata["Name"], + op, + self.metadata["Version"], + ] + ) + ) + + def warn_on_mismatching_name(self) -> None: + assert self.req is not None + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. + return + + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + "Generating metadata for package %s " + "produced metadata for project name %s. Fix your " + "#egg=%s fragments.", + self.name, + metadata_name, + self.name, + ) + self.req = get_requirement(metadata_name) + + def check_if_exists(self, use_user_site: bool) -> None: + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.should_reinstall appropriately. + """ + if self.req is None: + return + existing_dist = get_default_environment().get_distribution(self.req.name) + if not existing_dist: + return + + version_compatible = self.req.specifier.contains( + existing_dist.version, + prereleases=True, + ) + if not version_compatible: + self.satisfied_by = None + if use_user_site: + if existing_dist.in_usersite: + self.should_reinstall = True + elif running_under_virtualenv() and existing_dist.in_site_packages: + raise InstallationError( + f"Will not install to the user site because it will " + f"lack sys.path precedence to {existing_dist.raw_name} " + f"in {existing_dist.location}" + ) + else: + self.should_reinstall = True + else: + if self.editable: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + else: + self.satisfied_by = existing_dist + + # Things valid for wheels + @property + def is_wheel(self) -> bool: + if not self.link: + return False + return self.link.is_wheel + + @property + def is_wheel_from_cache(self) -> bool: + # When True, it means that this InstallRequirement is a local wheel file in the + # cache of locally built wheels. + return self.cached_wheel_source_link is not None + + # Things valid for sdists + @property + def unpacked_source_directory(self) -> str: + assert self.source_dir, f"No source dir for {self}" + return os.path.join( + self.source_dir, self.link and self.link.subdirectory_fragment or "" + ) + + @property + def setup_py_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + setup_py = os.path.join(self.unpacked_source_directory, "setup.py") + + return setup_py + + @property + def pyproject_toml_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + return make_pyproject_path(self.unpacked_source_directory) + + def load_pyproject_toml(self) -> None: + """Load the pyproject.toml file. + + After calling this routine, all of the attributes related to PEP 517 + processing for this requirement have been set. + """ + pyproject_toml_data = load_pyproject_toml( + self.pyproject_toml_path, self.setup_py_path, str(self) + ) + assert pyproject_toml_data + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires + self.pep517_backend = ConfiguredBuildBackendHookCaller( + self, + self.unpacked_source_directory, + backend, + backend_path=backend_path, + ) + + def editable_sanity_check(self) -> None: + """Check that an editable requirement if valid for use with PEP 517/518. + + This verifies that an editable has a build backend that supports PEP 660. + """ + if self.editable and not self.supports_pyproject_editable: + raise InstallationError( + f"Project {self} uses a build backend " + f"that is missing the 'build_editable' hook, so " + f"it cannot be installed in editable mode. " + f"Consider using a build backend that supports PEP 660." + ) + + def prepare_metadata(self) -> None: + """Ensure that project metadata is available. + + Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. + Under legacy processing, call setup.py egg-info. + """ + assert self.source_dir, f"No source dir for {self}" + details = self.name or f"from {self.link}" + + assert self.pep517_backend is not None + if ( + self.editable + and self.permit_editable_wheels + and self.supports_pyproject_editable + ): + self.metadata_directory = generate_editable_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + details=details, + ) + else: + self.metadata_directory = generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + details=details, + ) + + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() + else: + self.warn_on_mismatching_name() + + self.assert_source_matches_version() + + @property + def metadata(self) -> Any: + if not hasattr(self, "_metadata"): + self._metadata = self.get_dist().metadata + + return self._metadata + + def set_dist(self, distribution: BaseDistribution) -> None: + self._distribution = distribution + + def get_dist(self) -> BaseDistribution: + if self._distribution is not None: + return self._distribution + elif self.metadata_directory: + return get_directory_distribution(self.metadata_directory) + elif self.local_file_path and self.is_wheel: + assert self.req is not None + return get_wheel_distribution( + FilesystemWheel(self.local_file_path), + canonicalize_name(self.req.name), + ) + raise AssertionError( + f"InstallRequirement {self} has no metadata directory and no wheel: " + f"can't make a distribution." + ) + + def assert_source_matches_version(self) -> None: + assert self.source_dir, f"No source dir for {self}" + version = self.metadata["version"] + if self.req and self.req.specifier and version not in self.req.specifier: + logger.warning( + "Requested %s, but installing version %s", + self, + version, + ) + else: + logger.debug( + "Source in %s has version %s, which satisfies requirement %s", + display_path(self.source_dir), + version, + self, + ) + + # For both source distributions and editables + def ensure_has_source_dir( + self, + parent_dir: str, + autodelete: bool = False, + parallel_builds: bool = False, + ) -> None: + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.ensure_build_location( + parent_dir, + autodelete=autodelete, + parallel_builds=parallel_builds, + ) + + def needs_unpacked_archive(self, archive_source: Path) -> None: + assert self._archive_source is None + self._archive_source = archive_source + + def ensure_pristine_source_checkout(self) -> None: + """Ensure the source directory has not yet been built in.""" + assert self.source_dir is not None + if self._archive_source is not None: + unpack_file(str(self._archive_source), self.source_dir) + elif is_installable_dir(self.source_dir): + # If a checkout exists, it's unwise to keep going. + # version inconsistencies are logged later, but do not fail + # the installation. + raise PreviousBuildDirError( + f"pip can't proceed with requirements '{self}' due to a " + f"pre-existing build directory ({self.source_dir}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again." + ) + + # For editable installations + def update_editable(self) -> None: + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == "file": + # Static paths don't get updated + return + vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) + # Editable requirements are validated in Requirement constructors. + # So here, if it's neither a path nor a valid VCS URL, it's a bug. + assert vcs_backend, f"Unsupported VCS URL {self.link.url}" + hidden_url = hide_url(self.link.url) + vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0) + + # Top-level Actions + def uninstall( + self, auto_confirm: bool = False, verbose: bool = False + ) -> UninstallPathSet | None: + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + assert self.req + dist = get_default_environment().get_distribution(self.req.name) + if not dist: + logger.warning("Skipping %s as it is not installed.", self.name) + return None + logger.info("Found existing installation: %s", dist) + + uninstalled_pathset = UninstallPathSet.from_dist(dist) + uninstalled_pathset.remove(auto_confirm, verbose) + return uninstalled_pathset + + def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: + def _clean_zip_name(name: str, prefix: str) -> str: + assert name.startswith( + prefix + os.path.sep + ), f"name {name!r} doesn't start with prefix {prefix!r}" + name = name[len(prefix) + 1 :] + name = name.replace(os.path.sep, "/") + return name + + assert self.req is not None + path = os.path.join(parentdir, path) + name = _clean_zip_name(path, rootdir) + return self.req.name + "/" + name + + def archive(self, build_dir: str | None) -> None: + """Saves archive to provided build_dir. + + Used for saving downloaded VCS requirements as part of `pip download`. + """ + assert self.source_dir + if build_dir is None: + return + + create_archive = True + archive_name = "{}-{}.zip".format(self.name, self.metadata["version"]) + archive_path = os.path.join(build_dir, archive_name) + + if os.path.exists(archive_path): + response = ask_path_exists( + f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, " + "(b)ackup, (a)bort ", + ("i", "w", "b", "a"), + ) + if response == "i": + create_archive = False + elif response == "w": + logger.warning("Deleting %s", display_path(archive_path)) + os.remove(archive_path) + elif response == "b": + dest_file = backup_dir(archive_path) + logger.warning( + "Backing up %s to %s", + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + elif response == "a": + sys.exit(-1) + + if not create_archive: + return + + zip_output = zipfile.ZipFile( + archive_path, + "w", + zipfile.ZIP_DEFLATED, + allowZip64=True, + ) + with zip_output: + dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory)) + for dirpath, dirnames, filenames in os.walk(dir): + for dirname in dirnames: + dir_arcname = self._get_archive_name( + dirname, + parentdir=dirpath, + rootdir=dir, + ) + zipdir = zipfile.ZipInfo(dir_arcname + "/") + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip_output.writestr(zipdir, "") + for filename in filenames: + file_arcname = self._get_archive_name( + filename, + parentdir=dirpath, + rootdir=dir, + ) + filename = os.path.join(dirpath, filename) + zip_output.write(filename, file_arcname) + + logger.info("Saved %s", display_path(archive_path)) + + def install( + self, + root: str | None = None, + home: str | None = None, + prefix: str | None = None, + warn_script_location: bool = True, + use_user_site: bool = False, + pycompile: bool = True, + ) -> None: + assert self.req is not None + scheme = get_scheme( + self.req.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + + assert self.is_wheel + assert self.local_file_path + + install_wheel( + self.req.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=self.download_info if self.is_direct else None, + requested=self.user_supplied, + ) + self.install_succeeded = True + + +def check_invalid_constraint_type(req: InstallRequirement) -> str: + # Check for unsupported forms + problem = "" + if not req.name: + problem = "Unnamed requirements are not allowed as constraints" + elif req.editable: + problem = "Editable requirements are not allowed as constraints" + elif req.extras: + problem = "Constraints cannot have extras" + + if problem: + deprecated( + reason=( + "Constraints are only allowed to take the form of a package " + "name and a version specifier. Other forms were originally " + "permitted as an accident of the implementation, but were " + "undocumented. The new implementation of the resolver no " + "longer supports these forms." + ), + replacement="replacing the constraint with a requirement", + # No plan yet for when the new resolver becomes default + gone_in=None, + issue=8210, + ) + + return problem + + +def _has_option(options: Values, reqs: list[InstallRequirement], option: str) -> bool: + if getattr(options, option, None): + return True + for req in reqs: + if getattr(req, option, None): + return True + return False diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/req_set.py b/venv/lib/python3.12/site-packages/pip/_internal/req/req_set.py new file mode 100644 index 0000000..3451b24 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/req/req_set.py @@ -0,0 +1,81 @@ +import logging +from collections import OrderedDict + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.req.req_install import InstallRequirement + +logger = logging.getLogger(__name__) + + +class RequirementSet: + def __init__(self, check_supported_wheels: bool = True) -> None: + """Create a RequirementSet.""" + + self.requirements: dict[str, InstallRequirement] = OrderedDict() + self.check_supported_wheels = check_supported_wheels + + self.unnamed_requirements: list[InstallRequirement] = [] + + def __str__(self) -> str: + requirements = sorted( + (req for req in self.requirements.values() if not req.comes_from), + key=lambda req: canonicalize_name(req.name or ""), + ) + return " ".join(str(req.req) for req in requirements) + + def __repr__(self) -> str: + requirements = sorted( + self.requirements.values(), + key=lambda req: canonicalize_name(req.name or ""), + ) + + format_string = "<{classname} object; {count} requirement(s): {reqs}>" + return format_string.format( + classname=self.__class__.__name__, + count=len(requirements), + reqs=", ".join(str(req.req) for req in requirements), + ) + + def add_unnamed_requirement(self, install_req: InstallRequirement) -> None: + assert not install_req.name + self.unnamed_requirements.append(install_req) + + def add_named_requirement(self, install_req: InstallRequirement) -> None: + assert install_req.name + + project_name = canonicalize_name(install_req.name) + self.requirements[project_name] = install_req + + def has_requirement(self, name: str) -> bool: + project_name = canonicalize_name(name) + + return ( + project_name in self.requirements + and not self.requirements[project_name].constraint + ) + + def get_requirement(self, name: str) -> InstallRequirement: + project_name = canonicalize_name(name) + + if project_name in self.requirements: + return self.requirements[project_name] + + raise KeyError(f"No project with the name {name!r}") + + @property + def all_requirements(self) -> list[InstallRequirement]: + return self.unnamed_requirements + list(self.requirements.values()) + + @property + def requirements_to_install(self) -> list[InstallRequirement]: + """Return the list of requirements that need to be installed. + + TODO remove this property together with the legacy resolver, since the new + resolver only returns requirements that need to be installed. + """ + return [ + install_req + for install_req in self.all_requirements + if not install_req.constraint and not install_req.satisfied_by + ] diff --git a/venv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py b/venv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py new file mode 100644 index 0000000..3f3dde2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/req/req_uninstall.py @@ -0,0 +1,639 @@ +from __future__ import annotations + +import functools +import os +import sys +import sysconfig +from collections.abc import Generator, Iterable +from importlib.util import cache_from_source +from typing import Any, Callable + +from pip._internal.exceptions import LegacyDistutilsInstall, UninstallMissingRecord +from pip._internal.locations import get_bin_prefix, get_bin_user +from pip._internal.metadata import BaseDistribution +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.egg_link import egg_link_path_from_location +from pip._internal.utils.logging import getLogger, indent_log +from pip._internal.utils.misc import ask, normalize_path, renames, rmtree +from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory +from pip._internal.utils.virtualenv import running_under_virtualenv + +logger = getLogger(__name__) + + +def _script_names( + bin_dir: str, script_name: str, is_gui: bool +) -> Generator[str, None, None]: + """Create the fully qualified name of the files created by + {console,gui}_scripts for the given ``dist``. + Returns the list of file names + """ + exe_name = os.path.join(bin_dir, script_name) + yield exe_name + if not WINDOWS: + return + yield f"{exe_name}.exe" + yield f"{exe_name}.exe.manifest" + if is_gui: + yield f"{exe_name}-script.pyw" + else: + yield f"{exe_name}-script.py" + + +def _unique( + fn: Callable[..., Generator[Any, None, None]], +) -> Callable[..., Generator[Any, None, None]]: + @functools.wraps(fn) + def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]: + seen: set[Any] = set() + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + + return unique + + +@_unique +def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]: + """ + Yield all the uninstallation paths for dist based on RECORD-without-.py[co] + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc and .pyo in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .py[co]. + + If RECORD is not found, raises an error, + with possible information from the INSTALLER file. + + https://packaging.python.org/specifications/recording-installed-packages/ + """ + location = dist.location + assert location is not None, "not installed" + + entries = dist.iter_declared_entries() + if entries is None: + raise UninstallMissingRecord(distribution=dist) + + for entry in entries: + path = os.path.join(location, entry) + yield path + if path.endswith(".py"): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base + ".pyc") + yield path + path = os.path.join(dn, base + ".pyo") + yield path + + +def compact(paths: Iterable[str]) -> set[str]: + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + + sep = os.path.sep + short_paths: set[str] = set() + for path in sorted(paths, key=len): + should_skip = any( + path.startswith(shortpath.rstrip("*")) + and path[len(shortpath.rstrip("*").rstrip(sep))] == sep + for shortpath in short_paths + ) + if not should_skip: + short_paths.add(path) + return short_paths + + +def compress_for_rename(paths: Iterable[str]) -> set[str]: + """Returns a set containing the paths that need to be renamed. + + This set may include directories when the original sequence of paths + included every file on disk. + """ + case_map = {os.path.normcase(p): p for p in paths} + remaining = set(case_map) + unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len) + wildcards: set[str] = set() + + def norm_join(*a: str) -> str: + return os.path.normcase(os.path.join(*a)) + + for root in unchecked: + if any(os.path.normcase(root).startswith(w) for w in wildcards): + # This directory has already been handled. + continue + + all_files: set[str] = set() + all_subdirs: set[str] = set() + for dirname, subdirs, files in os.walk(root): + all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) + all_files.update(norm_join(root, dirname, f) for f in files) + # If all the files we found are in our remaining set of files to + # remove, then remove them from the latter set and add a wildcard + # for the directory. + if not (all_files - remaining): + remaining.difference_update(all_files) + wildcards.add(root + os.sep) + + return set(map(case_map.__getitem__, remaining)) | wildcards + + +def compress_for_output_listing(paths: Iterable[str]) -> tuple[set[str], set[str]]: + """Returns a tuple of 2 sets of which paths to display to user + + The first set contains paths that would be deleted. Files of a package + are not added and the top-level directory of the package has a '*' added + at the end - to signify that all it's contents are removed. + + The second set contains files that would have been skipped in the above + folders. + """ + + will_remove = set(paths) + will_skip = set() + + # Determine folders and files + folders = set() + files = set() + for path in will_remove: + if path.endswith(".pyc"): + continue + if path.endswith("__init__.py") or ".dist-info" in path: + folders.add(os.path.dirname(path)) + files.add(path) + + _normcased_files = set(map(os.path.normcase, files)) + + folders = compact(folders) + + # This walks the tree using os.walk to not miss extra folders + # that might get added. + for folder in folders: + for dirpath, _, dirfiles in os.walk(folder): + for fname in dirfiles: + if fname.endswith(".pyc"): + continue + + file_ = os.path.join(dirpath, fname) + if ( + os.path.isfile(file_) + and os.path.normcase(file_) not in _normcased_files + ): + # We are skipping this file. Add it to the set. + will_skip.add(file_) + + will_remove = files | {os.path.join(folder, "*") for folder in folders} + + return will_remove, will_skip + + +class StashedUninstallPathSet: + """A set of file rename operations to stash files while + tentatively uninstalling them.""" + + def __init__(self) -> None: + # Mapping from source file root to [Adjacent]TempDirectory + # for files under that directory. + self._save_dirs: dict[str, TempDirectory] = {} + # (old path, new path) tuples for each move that may need + # to be undone. + self._moves: list[tuple[str, str]] = [] + + def _get_directory_stash(self, path: str) -> str: + """Stashes a directory. + + Directories are stashed adjacent to their original location if + possible, or else moved/copied into the user's temp dir.""" + + try: + save_dir: TempDirectory = AdjacentTempDirectory(path) + except OSError: + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[os.path.normcase(path)] = save_dir + + return save_dir.path + + def _get_file_stash(self, path: str) -> str: + """Stashes a file. + + If no root has been provided, one will be created for the directory + in the user's temp directory.""" + path = os.path.normcase(path) + head, old_head = os.path.dirname(path), None + save_dir = None + + while head != old_head: + try: + save_dir = self._save_dirs[head] + break + except KeyError: + pass + head, old_head = os.path.dirname(head), head + else: + # Did not find any suitable root + head = os.path.dirname(path) + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[head] = save_dir + + relpath = os.path.relpath(path, head) + if relpath and relpath != os.path.curdir: + return os.path.join(save_dir.path, relpath) + return save_dir.path + + def stash(self, path: str) -> str: + """Stashes the directory or file and returns its new location. + Handle symlinks as files to avoid modifying the symlink targets. + """ + path_is_dir = os.path.isdir(path) and not os.path.islink(path) + if path_is_dir: + new_path = self._get_directory_stash(path) + else: + new_path = self._get_file_stash(path) + + self._moves.append((path, new_path)) + if path_is_dir and os.path.isdir(new_path): + # If we're moving a directory, we need to + # remove the destination first or else it will be + # moved to inside the existing directory. + # We just created new_path ourselves, so it will + # be removable. + os.rmdir(new_path) + renames(path, new_path) + return new_path + + def commit(self) -> None: + """Commits the uninstall by removing stashed files.""" + for save_dir in self._save_dirs.values(): + save_dir.cleanup() + self._moves = [] + self._save_dirs = {} + + def rollback(self) -> None: + """Undoes the uninstall by moving stashed files back.""" + for p in self._moves: + logger.info("Moving to %s\n from %s", *p) + + for new_path, path in self._moves: + try: + logger.debug("Replacing %s from %s", new_path, path) + if os.path.isfile(new_path) or os.path.islink(new_path): + os.unlink(new_path) + elif os.path.isdir(new_path): + rmtree(new_path) + renames(path, new_path) + except OSError as ex: + logger.error("Failed to restore %s", new_path) + logger.debug("Exception: %s", ex) + + self.commit() + + @property + def can_rollback(self) -> bool: + return bool(self._moves) + + +class UninstallPathSet: + """A set of file paths to be removed in the uninstallation of a + requirement.""" + + def __init__(self, dist: BaseDistribution) -> None: + self._paths: set[str] = set() + self._refuse: set[str] = set() + self._pth: dict[str, UninstallPthEntries] = {} + self._dist = dist + self._moved_paths = StashedUninstallPathSet() + # Create local cache of normalize_path results. Creating an UninstallPathSet + # can result in hundreds/thousands of redundant calls to normalize_path with + # the same args, which hurts performance. + self._normalize_path_cached = functools.lru_cache(normalize_path) + + def _permitted(self, path: str) -> bool: + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + # aka is_local, but caching normalized sys.prefix + if not running_under_virtualenv(): + return True + return path.startswith(self._normalize_path_cached(sys.prefix)) + + def add(self, path: str) -> None: + head, tail = os.path.split(path) + + # we normalize the head to resolve parent directory symlinks, but not + # the tail, since we only want to uninstall symlinks, not their targets + path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail)) + + if not os.path.exists(path): + return + if self._permitted(path): + self._paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, + # due to imports + if os.path.splitext(path)[1] == ".py": + self.add(cache_from_source(path)) + + def add_pth(self, pth_file: str, entry: str) -> None: + pth_file = self._normalize_path_cached(pth_file) + if self._permitted(pth_file): + if pth_file not in self._pth: + self._pth[pth_file] = UninstallPthEntries(pth_file) + self._pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None: + """Remove paths in ``self._paths`` with confirmation (unless + ``auto_confirm`` is True).""" + + if not self._paths: + logger.info( + "Can't uninstall '%s'. No files were found to uninstall.", + self._dist.raw_name, + ) + return + + dist_name_version = f"{self._dist.raw_name}-{self._dist.raw_version}" + logger.info("Uninstalling %s:", dist_name_version) + + with indent_log(): + if auto_confirm or self._allowed_to_proceed(verbose): + moved = self._moved_paths + + for_rename = compress_for_rename(self._paths) + + for path in sorted(compact(for_rename)): + moved.stash(path) + logger.verbose("Removing file or directory %s", path) + + for pth in self._pth.values(): + pth.remove() + + logger.info("Successfully uninstalled %s", dist_name_version) + + def _allowed_to_proceed(self, verbose: bool) -> bool: + """Display which files would be deleted and prompt for confirmation""" + + def _display(msg: str, paths: Iterable[str]) -> None: + if not paths: + return + + logger.info(msg) + with indent_log(): + for path in sorted(compact(paths)): + logger.info(path) + + if not verbose: + will_remove, will_skip = compress_for_output_listing(self._paths) + else: + # In verbose mode, display all the files that are going to be + # deleted. + will_remove = set(self._paths) + will_skip = set() + + _display("Would remove:", will_remove) + _display("Would not remove (might be manually added):", will_skip) + _display("Would not remove (outside of prefix):", self._refuse) + if verbose: + _display("Will actually move:", compress_for_rename(self._paths)) + + return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n" + + def rollback(self) -> None: + """Rollback the changes previously made by remove().""" + if not self._moved_paths.can_rollback: + logger.error( + "Can't roll back %s; was not uninstalled", + self._dist.raw_name, + ) + return + logger.info("Rolling back uninstall of %s", self._dist.raw_name) + self._moved_paths.rollback() + for pth in self._pth.values(): + pth.rollback() + + def commit(self) -> None: + """Remove temporary save dir: rollback will no longer be possible.""" + self._moved_paths.commit() + + @classmethod + def from_dist(cls, dist: BaseDistribution) -> UninstallPathSet: + dist_location = dist.location + info_location = dist.info_location + if dist_location is None: + logger.info( + "Not uninstalling %s since it is not installed", + dist.canonical_name, + ) + return cls(dist) + + normalized_dist_location = normalize_path(dist_location) + if not dist.local: + logger.info( + "Not uninstalling %s at %s, outside environment %s", + dist.canonical_name, + normalized_dist_location, + sys.prefix, + ) + return cls(dist) + + if normalized_dist_location in { + p + for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")} + if p + }: + logger.info( + "Not uninstalling %s at %s, as it is in the standard library.", + dist.canonical_name, + normalized_dist_location, + ) + return cls(dist) + + paths_to_remove = cls(dist) + develop_egg_link = egg_link_path_from_location(dist.raw_name) + + # Distribution is installed with metadata in a "flat" .egg-info + # directory. This means it is not a modern .dist-info installation, an + # egg, or legacy editable. + setuptools_flat_installation = ( + dist.installed_with_setuptools_egg_info + and info_location is not None + and os.path.exists(info_location) + # If dist is editable and the location points to a ``.egg-info``, + # we are in fact in the legacy editable case. + and not info_location.endswith(f"{dist.setuptools_filename}.egg-info") + ) + + # Uninstall cases order do matter as in the case of 2 installs of the + # same package, pip needs to uninstall the currently detected version + if setuptools_flat_installation: + if info_location is not None: + paths_to_remove.add(info_location) + installed_files = dist.iter_declared_entries() + if installed_files is not None: + for installed_file in installed_files: + paths_to_remove.add(os.path.join(dist_location, installed_file)) + # FIXME: need a test for this elif block + # occurs with --single-version-externally-managed/--record outside + # of pip + elif dist.is_file("top_level.txt"): + try: + namespace_packages = dist.read_text("namespace_packages.txt") + except FileNotFoundError: + namespaces = [] + else: + namespaces = namespace_packages.splitlines(keepends=False) + for top_level_pkg in [ + p + for p in dist.read_text("top_level.txt").splitlines() + if p and p not in namespaces + ]: + path = os.path.join(dist_location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(f"{path}.py") + paths_to_remove.add(f"{path}.pyc") + paths_to_remove.add(f"{path}.pyo") + + elif dist.installed_by_distutils: + raise LegacyDistutilsInstall(distribution=dist) + + elif dist.installed_as_egg: + # package installed by easy_install + # We cannot match on dist.egg_name because it can slightly vary + # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg + # XXX We use normalized_dist_location because dist_location my contain + # a trailing / if the distribution is a zipped egg + # (which is not a directory). + paths_to_remove.add(normalized_dist_location) + easy_install_egg = os.path.split(normalized_dist_location)[1] + easy_install_pth = os.path.join( + os.path.dirname(normalized_dist_location), + "easy-install.pth", + ) + paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg) + + elif dist.installed_with_dist_info: + for path in uninstallation_paths(dist): + paths_to_remove.add(path) + + elif develop_egg_link: + # PEP 660 modern editable is handled in the ``.dist-info`` case + # above, so this only covers the setuptools-style editable. + with open(develop_egg_link) as fh: + link_pointer = os.path.normcase(fh.readline().strip()) + normalized_link_pointer = paths_to_remove._normalize_path_cached( + link_pointer + ) + assert os.path.samefile( + normalized_link_pointer, normalized_dist_location + ), ( + f"Egg-link {develop_egg_link} (to {link_pointer}) does not match " + f"installed location of {dist.raw_name} (at {dist_location})" + ) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join( + os.path.dirname(develop_egg_link), "easy-install.pth" + ) + paths_to_remove.add_pth(easy_install_pth, dist_location) + + else: + logger.debug( + "Not sure how to uninstall: %s - Check: %s", + dist, + dist_location, + ) + + if dist.in_usersite: + bin_dir = get_bin_user() + else: + bin_dir = get_bin_prefix() + + # find distutils scripts= scripts + try: + for script in dist.iter_distutils_script_names(): + paths_to_remove.add(os.path.join(bin_dir, script)) + if WINDOWS: + paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat")) + except (FileNotFoundError, NotADirectoryError): + pass + + # find console_scripts and gui_scripts + def iter_scripts_to_remove( + dist: BaseDistribution, + bin_dir: str, + ) -> Generator[str, None, None]: + for entry_point in dist.iter_entry_points(): + if entry_point.group == "console_scripts": + yield from _script_names(bin_dir, entry_point.name, False) + elif entry_point.group == "gui_scripts": + yield from _script_names(bin_dir, entry_point.name, True) + + for s in iter_scripts_to_remove(dist, bin_dir): + paths_to_remove.add(s) + + return paths_to_remove + + +class UninstallPthEntries: + def __init__(self, pth_file: str) -> None: + self.file = pth_file + self.entries: set[str] = set() + self._saved_lines: list[bytes] | None = None + + def add(self, entry: str) -> None: + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + # os.path.splitdrive is used instead of os.path.isabs because isabs + # treats non-absolute paths with drive letter markings like c:foo\bar + # as absolute paths. It also does not recognize UNC paths if they don't + # have more than "\\sever\share". Valid examples: "\\server\share\" or + # "\\server\share\folder". + if WINDOWS and not os.path.splitdrive(entry)[0]: + entry = entry.replace("\\", "/") + self.entries.add(entry) + + def remove(self) -> None: + logger.verbose("Removing pth entries from %s:", self.file) + + # If the file doesn't exist, log a warning and return + if not os.path.isfile(self.file): + logger.warning("Cannot remove entries from nonexistent file %s", self.file) + return + with open(self.file, "rb") as fh: + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + if any(b"\r\n" in line for line in lines): + endline = "\r\n" + else: + endline = "\n" + # handle missing trailing newline + if lines and not lines[-1].endswith(endline.encode("utf-8")): + lines[-1] = lines[-1] + endline.encode("utf-8") + for entry in self.entries: + try: + logger.verbose("Removing entry: %s", entry) + lines.remove((entry + endline).encode("utf-8")) + except ValueError: + pass + with open(self.file, "wb") as fh: + fh.writelines(lines) + + def rollback(self) -> bool: + if self._saved_lines is None: + logger.error("Cannot roll back changes to %s, none were made", self.file) + return False + logger.debug("Rolling %s back to previous state", self.file) + with open(self.file, "wb") as fh: + fh.writelines(self._saved_lines) + return True diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6724e13 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..19ed061 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/base.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/base.py new file mode 100644 index 0000000..5ec4d96 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/base.py @@ -0,0 +1,20 @@ +from typing import Callable, Optional + +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet + +InstallRequirementProvider = Callable[ + [str, Optional[InstallRequirement]], InstallRequirement +] + + +class BaseResolver: + def resolve( + self, root_reqs: list[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + raise NotImplementedError() + + def get_installation_order( + self, req_set: RequirementSet + ) -> list[InstallRequirement]: + raise NotImplementedError() diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..cf52066 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc new file mode 100644 index 0000000..914dd52 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py new file mode 100644 index 0000000..33a4fdc --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/legacy/resolver.py @@ -0,0 +1,598 @@ +"""Dependency Resolution + +The dependency resolution in pip is performed as follows: + +for top-level requirements: + a. only one spec allowed per project, regardless of conflicts or not. + otherwise a "double requirement" exception is raised + b. they override sub-dependency requirements. +for sub-dependencies + a. "first found, wins" (where the order is breadth first) +""" + +from __future__ import annotations + +import logging +import sys +from collections import defaultdict +from collections.abc import Iterable +from itertools import chain +from typing import Optional + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.requirements import Requirement + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + HashError, + HashErrors, + InstallationError, + NoneMetadataError, + UnsupportedPythonVersion, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.utils import compatibility_tags +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.direct_url_helpers import direct_url_from_link +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import normalize_version_info +from pip._internal.utils.packaging import check_requires_python + +logger = logging.getLogger(__name__) + +DiscoveredDependencies = defaultdict[Optional[str], list[InstallRequirement]] + + +def _check_dist_requires_python( + dist: BaseDistribution, + version_info: tuple[int, int, int], + ignore_requires_python: bool = False, +) -> None: + """ + Check whether the given Python version is compatible with a distribution's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + + :raises UnsupportedPythonVersion: When the given Python version isn't + compatible. + """ + # This idiosyncratically converts the SpecifierSet to str and let + # check_requires_python then parse it again into SpecifierSet. But this + # is the legacy resolver so I'm just not going to bother refactoring. + try: + requires_python = str(dist.requires_python) + except FileNotFoundError as e: + raise NoneMetadataError(dist, str(e)) + try: + is_compatible = check_requires_python( + requires_python, + version_info=version_info, + ) + except specifiers.InvalidSpecifier as exc: + logger.warning( + "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc + ) + return + + if is_compatible: + return + + version = ".".join(map(str, version_info)) + if ignore_requires_python: + logger.debug( + "Ignoring failed Requires-Python check for package %r: %s not in %r", + dist.raw_name, + version, + requires_python, + ) + return + + raise UnsupportedPythonVersion( + f"Package {dist.raw_name!r} requires a different Python: " + f"{version} not in {requires_python!r}" + ) + + +class Resolver(BaseResolver): + """Resolves which packages need to be installed/uninstalled to perform \ + the requested operation without breaking the requirements of any package. + """ + + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: WheelCache | None, + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: tuple[int, ...] | None = None, + ) -> None: + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + self._py_version_info = py_version_info + + self.preparer = preparer + self.finder = finder + self.wheel_cache = wheel_cache + + self.upgrade_strategy = upgrade_strategy + self.force_reinstall = force_reinstall + self.ignore_dependencies = ignore_dependencies + self.ignore_installed = ignore_installed + self.ignore_requires_python = ignore_requires_python + self.use_user_site = use_user_site + self._make_install_req = make_install_req + + self._discovered_dependencies: DiscoveredDependencies = defaultdict(list) + + def resolve( + self, root_reqs: list[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + """Resolve what operations need to be done + + As a side-effect of this method, the packages (and their dependencies) + are downloaded, unpacked and prepared for installation. This + preparation is done by ``pip.operations.prepare``. + + Once PyPI has static dependency metadata available, it would be + possible to move the preparation to become a step separated from + dependency resolution. + """ + requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for req in root_reqs: + if req.constraint: + check_invalid_constraint_type(req) + self._add_requirement_to_set(requirement_set, req) + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # _populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs: list[InstallRequirement] = [] + hash_errors = HashErrors() + for req in chain(requirement_set.all_requirements, discovered_reqs): + try: + discovered_reqs.extend(self._resolve_one(requirement_set, req)) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + return requirement_set + + def _add_requirement_to_set( + self, + requirement_set: RequirementSet, + install_req: InstallRequirement, + parent_req_name: str | None = None, + extras_requested: Iterable[str] | None = None, + ) -> tuple[list[InstallRequirement], InstallRequirement | None]: + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environment markers. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + # If the markers do not match, ignore this requirement. + if not install_req.match_markers(extras_requested): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + install_req.name, + install_req.markers, + ) + return [], None + + # If the wheel is not supported, raise an error. + # Should check this after filtering out based on environment markers to + # allow specifying different wheels based on the environment/OS, in a + # single requirements file. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + tags = compatibility_tags.get_supported() + if requirement_set.check_supported_wheels and not wheel.supported(tags): + raise InstallationError( + f"{wheel.filename} is not a supported wheel on this platform." + ) + + # This next bit is really a sanity check. + assert ( + not install_req.user_supplied or parent_req_name is None + ), "a user supplied req shouldn't have a parent" + + # Unnamed requirements are scanned again and the requirement won't be + # added as a dependency until after scanning. + if not install_req.name: + requirement_set.add_unnamed_requirement(install_req) + return [install_req], None + + try: + existing_req: InstallRequirement | None = requirement_set.get_requirement( + install_req.name + ) + except KeyError: + existing_req = None + + has_conflicting_requirement = ( + parent_req_name is None + and existing_req + and not existing_req.constraint + and existing_req.extras == install_req.extras + and existing_req.req + and install_req.req + and existing_req.req.specifier != install_req.req.specifier + ) + if has_conflicting_requirement: + raise InstallationError( + f"Double requirement given: {install_req} " + f"(already in {existing_req}, name={install_req.name!r})" + ) + + # When no existing requirement exists, add the requirement as a + # dependency and it will be scanned again after. + if not existing_req: + requirement_set.add_named_requirement(install_req) + # We'd want to rescan this requirement later + return [install_req], install_req + + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + if install_req.constraint or not existing_req.constraint: + return [], existing_req + + does_not_satisfy_constraint = install_req.link and not ( + existing_req.link and install_req.link.path == existing_req.link.path + ) + if does_not_satisfy_constraint: + raise InstallationError( + f"Could not satisfy constraints for '{install_req.name}': " + "installation from path or url cannot be " + "constrained to a version" + ) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + # If we're now installing a user supplied requirement, + # mark the existing object as such. + if install_req.user_supplied: + existing_req.user_supplied = True + existing_req.extras = tuple( + sorted(set(existing_req.extras) | set(install_req.extras)) + ) + logger.debug( + "Setting %s extras to: %s", + existing_req, + existing_req.extras, + ) + # Return the existing requirement for addition to the parent and + # scanning again. + return [existing_req], existing_req + + def _is_upgrade_allowed(self, req: InstallRequirement) -> bool: + if self.upgrade_strategy == "to-satisfy-only": + return False + elif self.upgrade_strategy == "eager": + return True + else: + assert self.upgrade_strategy == "only-if-needed" + return req.user_supplied or req.constraint + + def _set_req_to_reinstall(self, req: InstallRequirement) -> None: + """ + Set a requirement to be installed. + """ + # Don't uninstall the conflict if doing a user install and the + # conflict is not a user install. + assert req.satisfied_by is not None + if not self.use_user_site or req.satisfied_by.in_usersite: + req.should_reinstall = True + req.satisfied_by = None + + def _check_skip_installed(self, req_to_install: InstallRequirement) -> str | None: + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + if self.ignore_installed: + return None + + req_to_install.check_if_exists(self.use_user_site) + if not req_to_install.satisfied_by: + return None + + if self.force_reinstall: + self._set_req_to_reinstall(req_to_install) + return None + + if not self._is_upgrade_allowed(req_to_install): + if self.upgrade_strategy == "only-if-needed": + return "already satisfied, skipping upgrade" + return "already satisfied" + + # Check for the possibility of an upgrade. For link-based + # requirements we have to pull the tree down and inspect to assess + # the version #, so it's handled way down. + if not req_to_install.link: + try: + self.finder.find_requirement(req_to_install, upgrade=True) + except BestVersionAlreadyInstalled: + # Then the best version is installed. + return "already up-to-date" + except DistributionNotFound: + # No distribution found, so we squash the error. It will + # be raised later when we re-try later to do the install. + # Why don't we just raise here? + pass + + self._set_req_to_reinstall(req_to_install) + return None + + def _find_requirement_link(self, req: InstallRequirement) -> Link | None: + upgrade = self._is_upgrade_allowed(req) + best_candidate = self.finder.find_requirement(req, upgrade) + if not best_candidate: + return None + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or "" + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + "The candidate selected for download or install is a " + f"yanked version: {best_candidate}\n" + f"Reason for being yanked: {reason}" + ) + logger.warning(msg) + + return link + + def _populate_link(self, req: InstallRequirement) -> None: + """Ensure that if a link can be found for this, that it is found. + + Note that req.link may still be None - if the requirement is already + installed and not needed to be upgraded based on the return value of + _is_upgrade_allowed(). + + If preparer.require_hashes is True, don't use the wheel cache, because + cached wheels, always built locally, have different hashes than the + files downloaded from the index server and thus throw false hash + mismatches. Furthermore, cached wheels at present have undeterministic + contents due to file modification times. + """ + if req.link is None: + req.link = self._find_requirement_link(req) + + if self.wheel_cache is None or self.preparer.require_hashes: + return + + assert req.link is not None, "_find_requirement_link unexpectedly returned None" + cache_entry = self.wheel_cache.get_cache_entry( + link=req.link, + package_name=req.name, + supported_tags=get_supported(), + ) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + if req.link is req.original_link and cache_entry.persistent: + req.cached_wheel_source_link = req.link + if cache_entry.origin is not None: + req.download_info = cache_entry.origin + else: + # Legacy cache entry that does not have origin.json. + # download_info may miss the archive_info.hashes field. + req.download_info = direct_url_from_link( + req.link, link_is_in_wheel_cache=cache_entry.persistent + ) + req.link = cache_entry.link + + def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution: + """Takes a InstallRequirement and returns a single AbstractDist \ + representing a prepared variant of the same. + """ + if req.editable: + return self.preparer.prepare_editable_requirement(req) + + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req.satisfied_by is None + skip_reason = self._check_skip_installed(req) + + if req.satisfied_by: + return self.preparer.prepare_installed_requirement(req, skip_reason) + + # We eagerly populate the link, since that's our "legacy" behavior. + self._populate_link(req) + dist = self.preparer.prepare_linked_requirement(req) + + # NOTE + # The following portion is for determining if a certain package is + # going to be re-installed/upgraded or not and reporting to the user. + # This should probably get cleaned up in a future refactor. + + # req.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req.check_if_exists(self.use_user_site) + + if req.satisfied_by: + should_modify = ( + self.upgrade_strategy != "to-satisfy-only" + or self.force_reinstall + or self.ignore_installed + or req.link.scheme == "file" + ) + if should_modify: + self._set_req_to_reinstall(req) + else: + logger.info( + "Requirement already satisfied (use --upgrade to upgrade): %s", + req, + ) + return dist + + def _resolve_one( + self, + requirement_set: RequirementSet, + req_to_install: InstallRequirement, + ) -> list[InstallRequirement]: + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # Parse and return dependencies + dist = self._get_dist_for(req_to_install) + # This will raise UnsupportedPythonVersion if the given Python + # version isn't compatible with the distribution's Requires-Python. + _check_dist_requires_python( + dist, + version_info=self._py_version_info, + ignore_requires_python=self.ignore_requires_python, + ) + + more_reqs: list[InstallRequirement] = [] + + def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None: + # This idiosyncratically converts the Requirement to str and let + # make_install_req then parse it again into Requirement. But this is + # the legacy resolver so I'm just not going to bother refactoring. + sub_install_req = self._make_install_req(str(subreq), req_to_install) + parent_req_name = req_to_install.name + to_scan_again, add_to_parent = self._add_requirement_to_set( + requirement_set, + sub_install_req, + parent_req_name=parent_req_name, + extras_requested=extras_requested, + ) + if parent_req_name and add_to_parent: + self._discovered_dependencies[parent_req_name].append(add_to_parent) + more_reqs.extend(to_scan_again) + + with indent_log(): + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + assert req_to_install.name is not None + if not requirement_set.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + # 'unnamed' requirements can only come from being directly + # provided by the user. + assert req_to_install.user_supplied + self._add_requirement_to_set( + requirement_set, req_to_install, parent_req_name=None + ) + + if not self.ignore_dependencies: + if req_to_install.extras: + logger.debug( + "Installing extra requirements: %r", + ",".join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.iter_provided_extras()) + ) + for missing in missing_requested: + logger.warning( + "%s %s does not provide the extra '%s'", + dist.raw_name, + dist.version, + missing, + ) + + available_requested = sorted( + set(dist.iter_provided_extras()) & set(req_to_install.extras) + ) + for subreq in dist.iter_dependencies(available_requested): + add_req(subreq, extras_requested=available_requested) + + return more_reqs + + def get_installation_order( + self, req_set: RequirementSet + ) -> list[InstallRequirement]: + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs: set[InstallRequirement] = set() + + def schedule(req: InstallRequirement) -> None: + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._discovered_dependencies[req.name]: + schedule(dep) + order.append(req) + + for install_req in req_set.requirements.values(): + schedule(install_req) + return order diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..0990cd9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..94fd873 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc new file mode 100644 index 0000000..1aa8be9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc new file mode 100644 index 0000000..dadf8eb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc new file mode 100644 index 0000000..96d20d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc new file mode 100644 index 0000000..f8543bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc new file mode 100644 index 0000000..8ade392 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc new file mode 100644 index 0000000..9e2eeea Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc new file mode 100644 index 0000000..1e64aca Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py new file mode 100644 index 0000000..03877b6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/base.py @@ -0,0 +1,142 @@ +from __future__ import annotations + +from collections.abc import Iterable +from dataclasses import dataclass +from typing import Optional + +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName +from pip._vendor.packaging.version import Version + +from pip._internal.models.link import Link, links_equivalent +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.hashes import Hashes + +CandidateLookup = tuple[Optional["Candidate"], Optional[InstallRequirement]] + + +def format_name(project: NormalizedName, extras: frozenset[NormalizedName]) -> str: + if not extras: + return project + extras_expr = ",".join(sorted(extras)) + return f"{project}[{extras_expr}]" + + +@dataclass(frozen=True) +class Constraint: + specifier: SpecifierSet + hashes: Hashes + links: frozenset[Link] + + @classmethod + def empty(cls) -> Constraint: + return Constraint(SpecifierSet(), Hashes(), frozenset()) + + @classmethod + def from_ireq(cls, ireq: InstallRequirement) -> Constraint: + links = frozenset([ireq.link]) if ireq.link else frozenset() + return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links) + + def __bool__(self) -> bool: + return bool(self.specifier) or bool(self.hashes) or bool(self.links) + + def __and__(self, other: InstallRequirement) -> Constraint: + if not isinstance(other, InstallRequirement): + return NotImplemented + specifier = self.specifier & other.specifier + hashes = self.hashes & other.hashes(trust_internet=False) + links = self.links + if other.link: + links = links.union([other.link]) + return Constraint(specifier, hashes, links) + + def is_satisfied_by(self, candidate: Candidate) -> bool: + # Reject if there are any mismatched URL constraints on this package. + if self.links and not all(_match_link(link, candidate) for link in self.links): + return False + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class Requirement: + @property + def project_name(self) -> NormalizedName: + """The "project name" of a requirement. + + This is different from ``name`` if this requirement contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Subclass should override") + + @property + def name(self) -> str: + """The name identifying this requirement in the resolver. + + This is different from ``project_name`` if this requirement contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Subclass should override") + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return False + + def get_candidate_lookup(self) -> CandidateLookup: + raise NotImplementedError("Subclass should override") + + def format_for_error(self) -> str: + raise NotImplementedError("Subclass should override") + + +def _match_link(link: Link, candidate: Candidate) -> bool: + if candidate.source_link: + return links_equivalent(link, candidate.source_link) + return False + + +class Candidate: + @property + def project_name(self) -> NormalizedName: + """The "project name" of the candidate. + + This is different from ``name`` if this candidate contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Override in subclass") + + @property + def name(self) -> str: + """The name identifying this candidate in the resolver. + + This is different from ``project_name`` if this candidate contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Override in subclass") + + @property + def version(self) -> Version: + raise NotImplementedError("Override in subclass") + + @property + def is_installed(self) -> bool: + raise NotImplementedError("Override in subclass") + + @property + def is_editable(self) -> bool: + raise NotImplementedError("Override in subclass") + + @property + def source_link(self) -> Link | None: + raise NotImplementedError("Override in subclass") + + def iter_dependencies(self, with_requires: bool) -> Iterable[Requirement | None]: + raise NotImplementedError("Override in subclass") + + def get_install_requirement(self) -> InstallRequirement | None: + raise NotImplementedError("Override in subclass") + + def format_for_error(self) -> str: + raise NotImplementedError("Subclass should override") diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py new file mode 100644 index 0000000..aa126d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/candidates.py @@ -0,0 +1,591 @@ +from __future__ import annotations + +import logging +import sys +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, Union, cast + +from pip._vendor.packaging.requirements import InvalidRequirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import ( + FailedToPrepareCandidate, + HashError, + InstallationSubprocessError, + InvalidInstalledPackage, + MetadataInconsistent, + MetadataInvalid, +) +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link, links_equivalent +from pip._internal.models.wheel import Wheel +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.direct_url_helpers import direct_url_from_link +from pip._internal.utils.misc import normalize_version_info + +from .base import Candidate, Requirement, format_name + +if TYPE_CHECKING: + from .factory import Factory + +logger = logging.getLogger(__name__) + +BaseCandidate = Union[ + "AlreadyInstalledCandidate", + "EditableCandidate", + "LinkCandidate", +] + +# Avoid conflicting with the PyPI package "Python". +REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "") + + +def as_base_candidate(candidate: Candidate) -> BaseCandidate | None: + """The runtime version of BaseCandidate.""" + base_candidate_classes = ( + AlreadyInstalledCandidate, + EditableCandidate, + LinkCandidate, + ) + if isinstance(candidate, base_candidate_classes): + return candidate + return None + + +def make_install_req_from_link( + link: Link, template: InstallRequirement +) -> InstallRequirement: + assert not template.editable, "template is editable" + if template.req: + line = str(template.req) + else: + line = link.url + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + isolated=template.isolated, + constraint=template.constraint, + hash_options=template.hash_options, + config_settings=template.config_settings, + ) + ireq.original_link = template.original_link + ireq.link = link + ireq.extras = template.extras + return ireq + + +def make_install_req_from_editable( + link: Link, template: InstallRequirement +) -> InstallRequirement: + assert template.editable, "template not editable" + if template.name: + req_string = f"{template.name} @ {link.url}" + else: + req_string = link.url + ireq = install_req_from_editable( + req_string, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + isolated=template.isolated, + constraint=template.constraint, + permit_editable_wheels=template.permit_editable_wheels, + hash_options=template.hash_options, + config_settings=template.config_settings, + ) + ireq.extras = template.extras + return ireq + + +def _make_install_req_from_dist( + dist: BaseDistribution, template: InstallRequirement +) -> InstallRequirement: + if template.req: + line = str(template.req) + elif template.link: + line = f"{dist.canonical_name} @ {template.link.url}" + else: + line = f"{dist.canonical_name}=={dist.version}" + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + isolated=template.isolated, + constraint=template.constraint, + hash_options=template.hash_options, + config_settings=template.config_settings, + ) + ireq.satisfied_by = dist + return ireq + + +class _InstallRequirementBackedCandidate(Candidate): + """A candidate backed by an ``InstallRequirement``. + + This represents a package request with the target not being already + in the environment, and needs to be fetched and installed. The backing + ``InstallRequirement`` is responsible for most of the leg work; this + class exposes appropriate information to the resolver. + + :param link: The link passed to the ``InstallRequirement``. The backing + ``InstallRequirement`` will use this link to fetch the distribution. + :param source_link: The link this candidate "originates" from. This is + different from ``link`` when the link is found in the wheel cache. + ``link`` would point to the wheel cache, while this points to the + found remote link (e.g. from pypi.org). + """ + + dist: BaseDistribution + is_installed = False + + def __init__( + self, + link: Link, + source_link: Link, + ireq: InstallRequirement, + factory: Factory, + name: NormalizedName | None = None, + version: Version | None = None, + ) -> None: + self._link = link + self._source_link = source_link + self._factory = factory + self._ireq = ireq + self._name = name + self._version = version + self.dist = self._prepare() + self._hash: int | None = None + + def __str__(self) -> str: + return f"{self.name} {self.version}" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({str(self._link)!r})" + + def __hash__(self) -> int: + if self._hash is not None: + return self._hash + + self._hash = hash((self.__class__, self._link)) + return self._hash + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return links_equivalent(self._link, other._link) + return False + + @property + def source_link(self) -> Link | None: + return self._source_link + + @property + def project_name(self) -> NormalizedName: + """The normalised name of the project the candidate refers to""" + if self._name is None: + self._name = self.dist.canonical_name + return self._name + + @property + def name(self) -> str: + return self.project_name + + @property + def version(self) -> Version: + if self._version is None: + self._version = self.dist.version + return self._version + + def format_for_error(self) -> str: + return ( + f"{self.name} {self.version} " + f"(from {self._link.file_path if self._link.is_file else self._link})" + ) + + def _prepare_distribution(self) -> BaseDistribution: + raise NotImplementedError("Override in subclass") + + def _check_metadata_consistency(self, dist: BaseDistribution) -> None: + """Check for consistency of project name and version of dist.""" + if self._name is not None and self._name != dist.canonical_name: + raise MetadataInconsistent( + self._ireq, + "name", + self._name, + dist.canonical_name, + ) + if self._version is not None and self._version != dist.version: + raise MetadataInconsistent( + self._ireq, + "version", + str(self._version), + str(dist.version), + ) + # check dependencies are valid + # TODO performance: this means we iterate the dependencies at least twice, + # we may want to cache parsed Requires-Dist + try: + list(dist.iter_dependencies(list(dist.iter_provided_extras()))) + except InvalidRequirement as e: + raise MetadataInvalid(self._ireq, str(e)) + + def _prepare(self) -> BaseDistribution: + try: + dist = self._prepare_distribution() + except HashError as e: + # Provide HashError the underlying ireq that caused it. This + # provides context for the resulting error message to show the + # offending line to the user. + e.req = self._ireq + raise + except InstallationSubprocessError as exc: + if isinstance(self._ireq.comes_from, InstallRequirement): + request_chain = self._ireq.comes_from.from_path() + else: + request_chain = self._ireq.comes_from + + if request_chain is None: + request_chain = "directly requested" + + raise FailedToPrepareCandidate( + package_name=self._ireq.name or str(self._link), + requirement_chain=request_chain, + failed_step=exc.command_description, + ) + + self._check_metadata_consistency(dist) + return dist + + def iter_dependencies(self, with_requires: bool) -> Iterable[Requirement | None]: + # Emit the Requires-Python requirement first to fail fast on + # unsupported candidates and avoid pointless downloads/preparation. + yield self._factory.make_requires_python_requirement(self.dist.requires_python) + requires = self.dist.iter_dependencies() if with_requires else () + for r in requires: + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) + + def get_install_requirement(self) -> InstallRequirement | None: + return self._ireq + + +class LinkCandidate(_InstallRequirementBackedCandidate): + is_editable = False + + def __init__( + self, + link: Link, + template: InstallRequirement, + factory: Factory, + name: NormalizedName | None = None, + version: Version | None = None, + ) -> None: + source_link = link + cache_entry = factory.get_wheel_cache_entry(source_link, name) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + link = cache_entry.link + ireq = make_install_req_from_link(link, template) + assert ireq.link == link + if ireq.link.is_wheel and not ireq.link.is_file: + wheel = Wheel(ireq.link.filename) + wheel_name = wheel.name + assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel" + # Version may not be present for PEP 508 direct URLs + if version is not None: + wheel_version = Version(wheel.version) + assert ( + version == wheel_version + ), f"{version!r} != {wheel_version!r} for wheel {name}" + + if cache_entry is not None: + assert ireq.link.is_wheel + assert ireq.link.is_file + if cache_entry.persistent and template.link is template.original_link: + ireq.cached_wheel_source_link = source_link + if cache_entry.origin is not None: + ireq.download_info = cache_entry.origin + else: + # Legacy cache entry that does not have origin.json. + # download_info may miss the archive_info.hashes field. + ireq.download_info = direct_url_from_link( + source_link, link_is_in_wheel_cache=cache_entry.persistent + ) + + super().__init__( + link=link, + source_link=source_link, + ireq=ireq, + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self) -> BaseDistribution: + preparer = self._factory.preparer + return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True) + + +class EditableCandidate(_InstallRequirementBackedCandidate): + is_editable = True + + def __init__( + self, + link: Link, + template: InstallRequirement, + factory: Factory, + name: NormalizedName | None = None, + version: Version | None = None, + ) -> None: + super().__init__( + link=link, + source_link=link, + ireq=make_install_req_from_editable(link, template), + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self) -> BaseDistribution: + return self._factory.preparer.prepare_editable_requirement(self._ireq) + + +class AlreadyInstalledCandidate(Candidate): + is_installed = True + source_link = None + + def __init__( + self, + dist: BaseDistribution, + template: InstallRequirement, + factory: Factory, + ) -> None: + self.dist = dist + self._ireq = _make_install_req_from_dist(dist, template) + self._factory = factory + self._version = None + + # This is just logging some messages, so we can do it eagerly. + # The returned dist would be exactly the same as self.dist because we + # set satisfied_by in _make_install_req_from_dist. + # TODO: Supply reason based on force_reinstall and upgrade_strategy. + skip_reason = "already satisfied" + factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) + + def __str__(self) -> str: + return str(self.dist) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.dist!r})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AlreadyInstalledCandidate): + return NotImplemented + return self.name == other.name and self.version == other.version + + def __hash__(self) -> int: + return hash((self.name, self.version)) + + @property + def project_name(self) -> NormalizedName: + return self.dist.canonical_name + + @property + def name(self) -> str: + return self.project_name + + @property + def version(self) -> Version: + if self._version is None: + self._version = self.dist.version + return self._version + + @property + def is_editable(self) -> bool: + return self.dist.editable + + def format_for_error(self) -> str: + return f"{self.name} {self.version} (Installed)" + + def iter_dependencies(self, with_requires: bool) -> Iterable[Requirement | None]: + if not with_requires: + return + + try: + for r in self.dist.iter_dependencies(): + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) + except InvalidRequirement as exc: + raise InvalidInstalledPackage(dist=self.dist, invalid_exc=exc) from None + + def get_install_requirement(self) -> InstallRequirement | None: + return None + + +class ExtrasCandidate(Candidate): + """A candidate that has 'extras', indicating additional dependencies. + + Requirements can be for a project with dependencies, something like + foo[extra]. The extras don't affect the project/version being installed + directly, but indicate that we need additional dependencies. We model that + by having an artificial ExtrasCandidate that wraps the "base" candidate. + + The ExtrasCandidate differs from the base in the following ways: + + 1. It has a unique name, of the form foo[extra]. This causes the resolver + to treat it as a separate node in the dependency graph. + 2. When we're getting the candidate's dependencies, + a) We specify that we want the extra dependencies as well. + b) We add a dependency on the base candidate. + See below for why this is needed. + 3. We return None for the underlying InstallRequirement, as the base + candidate will provide it, and we don't want to end up with duplicates. + + The dependency on the base candidate is needed so that the resolver can't + decide that it should recommend foo[extra1] version 1.0 and foo[extra2] + version 2.0. Having those candidates depend on foo=1.0 and foo=2.0 + respectively forces the resolver to recognise that this is a conflict. + """ + + def __init__( + self, + base: BaseCandidate, + extras: frozenset[str], + *, + comes_from: InstallRequirement | None = None, + ) -> None: + """ + :param comes_from: the InstallRequirement that led to this candidate if it + differs from the base's InstallRequirement. This will often be the + case in the sense that this candidate's requirement has the extras + while the base's does not. Unlike the InstallRequirement backed + candidates, this requirement is used solely for reporting purposes, + it does not do any leg work. + """ + self.base = base + self.extras = frozenset(canonicalize_name(e) for e in extras) + self._comes_from = comes_from if comes_from is not None else self.base._ireq + + def __str__(self) -> str: + name, rest = str(self.base).split(" ", 1) + return "{}[{}] {}".format(name, ",".join(self.extras), rest) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})" + + def __hash__(self) -> int: + return hash((self.base, self.extras)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self.base == other.base and self.extras == other.extras + return False + + @property + def project_name(self) -> NormalizedName: + return self.base.project_name + + @property + def name(self) -> str: + """The normalised name of the project the candidate refers to""" + return format_name(self.base.project_name, self.extras) + + @property + def version(self) -> Version: + return self.base.version + + def format_for_error(self) -> str: + return "{} [{}]".format( + self.base.format_for_error(), ", ".join(sorted(self.extras)) + ) + + @property + def is_installed(self) -> bool: + return self.base.is_installed + + @property + def is_editable(self) -> bool: + return self.base.is_editable + + @property + def source_link(self) -> Link | None: + return self.base.source_link + + def iter_dependencies(self, with_requires: bool) -> Iterable[Requirement | None]: + factory = self.base._factory + + # Add a dependency on the exact base + # (See note 2b in the class docstring) + yield factory.make_requirement_from_candidate(self.base) + if not with_requires: + return + + # The user may have specified extras that the candidate doesn't + # support. We ignore any unsupported extras here. + valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras()) + invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras()) + for extra in sorted(invalid_extras): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra, + ) + + for r in self.base.dist.iter_dependencies(valid_extras): + yield from factory.make_requirements_from_spec( + str(r), + self._comes_from, + valid_extras, + ) + + def get_install_requirement(self) -> InstallRequirement | None: + # We don't return anything here, because we always + # depend on the base candidate, and we'll get the + # install requirement from that. + return None + + +class RequiresPythonCandidate(Candidate): + is_installed = False + source_link = None + + def __init__(self, py_version_info: tuple[int, ...] | None) -> None: + if py_version_info is not None: + version_info = normalize_version_info(py_version_info) + else: + version_info = sys.version_info[:3] + self._version = Version(".".join(str(c) for c in version_info)) + + # We don't need to implement __eq__() and __ne__() since there is always + # only one RequiresPythonCandidate in a resolution, i.e. the host Python. + # The built-in object.__eq__() and object.__ne__() do exactly what we want. + + def __str__(self) -> str: + return f"Python {self._version}" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._version!r})" + + @property + def project_name(self) -> NormalizedName: + return REQUIRES_PYTHON_IDENTIFIER + + @property + def name(self) -> str: + return REQUIRES_PYTHON_IDENTIFIER + + @property + def version(self) -> Version: + return self._version + + def format_for_error(self) -> str: + return f"Python {self.version}" + + def iter_dependencies(self, with_requires: bool) -> Iterable[Requirement | None]: + return () + + def get_install_requirement(self) -> InstallRequirement | None: + return None diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py new file mode 100644 index 0000000..07be569 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/factory.py @@ -0,0 +1,845 @@ +from __future__ import annotations + +import contextlib +import functools +import logging +from collections.abc import Iterable, Iterator, Mapping, Sequence +from typing import ( + TYPE_CHECKING, + Callable, + NamedTuple, + Protocol, + TypeVar, + cast, +) + +from pip._vendor.packaging.requirements import InvalidRequirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import InvalidVersion, Version +from pip._vendor.resolvelib import ResolutionImpossible + +from pip._internal.cache import CacheEntry, WheelCache +from pip._internal.exceptions import ( + DistributionNotFound, + InstallationError, + InvalidInstalledPackage, + MetadataInconsistent, + MetadataInvalid, + UnsupportedPythonVersion, + UnsupportedWheel, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_default_environment +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_drop_extras, + install_req_from_link_and_ireq, +) +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.resolution.base import InstallRequirementProvider +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import Candidate, Constraint, Requirement +from .candidates import ( + AlreadyInstalledCandidate, + BaseCandidate, + EditableCandidate, + ExtrasCandidate, + LinkCandidate, + RequiresPythonCandidate, + as_base_candidate, +) +from .found_candidates import FoundCandidates, IndexCandidateInfo +from .requirements import ( + ExplicitRequirement, + RequiresPythonRequirement, + SpecifierRequirement, + SpecifierWithoutExtrasRequirement, + UnsatisfiableRequirement, +) + +if TYPE_CHECKING: + + class ConflictCause(Protocol): + requirement: RequiresPythonRequirement + parent: Candidate + + +logger = logging.getLogger(__name__) + +C = TypeVar("C") +Cache = dict[Link, C] + + +class CollectedRootRequirements(NamedTuple): + requirements: list[Requirement] + constraints: dict[str, Constraint] + user_requested: dict[str, int] + + +class Factory: + def __init__( + self, + finder: PackageFinder, + preparer: RequirementPreparer, + make_install_req: InstallRequirementProvider, + wheel_cache: WheelCache | None, + use_user_site: bool, + force_reinstall: bool, + ignore_installed: bool, + ignore_requires_python: bool, + py_version_info: tuple[int, ...] | None = None, + ) -> None: + self._finder = finder + self.preparer = preparer + self._wheel_cache = wheel_cache + self._python_candidate = RequiresPythonCandidate(py_version_info) + self._make_install_req_from_spec = make_install_req + self._use_user_site = use_user_site + self._force_reinstall = force_reinstall + self._ignore_requires_python = ignore_requires_python + + self._build_failures: Cache[InstallationError] = {} + self._link_candidate_cache: Cache[LinkCandidate] = {} + self._editable_candidate_cache: Cache[EditableCandidate] = {} + self._installed_candidate_cache: dict[str, AlreadyInstalledCandidate] = {} + self._extras_candidate_cache: dict[ + tuple[int, frozenset[NormalizedName]], ExtrasCandidate + ] = {} + self._supported_tags_cache = get_supported() + + if not ignore_installed: + env = get_default_environment() + self._installed_dists = { + dist.canonical_name: dist + for dist in env.iter_installed_distributions(local_only=False) + } + else: + self._installed_dists = {} + + @property + def force_reinstall(self) -> bool: + return self._force_reinstall + + def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: + if not link.is_wheel: + return + wheel = Wheel(link.filename) + if wheel.supported(self._finder.target_python.get_unsorted_tags()): + return + msg = f"{link.filename} is not a supported wheel on this platform." + raise UnsupportedWheel(msg) + + def _make_extras_candidate( + self, + base: BaseCandidate, + extras: frozenset[str], + *, + comes_from: InstallRequirement | None = None, + ) -> ExtrasCandidate: + cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras)) + try: + candidate = self._extras_candidate_cache[cache_key] + except KeyError: + candidate = ExtrasCandidate(base, extras, comes_from=comes_from) + self._extras_candidate_cache[cache_key] = candidate + return candidate + + def _make_candidate_from_dist( + self, + dist: BaseDistribution, + extras: frozenset[str], + template: InstallRequirement, + ) -> Candidate: + try: + base = self._installed_candidate_cache[dist.canonical_name] + except KeyError: + base = AlreadyInstalledCandidate(dist, template, factory=self) + self._installed_candidate_cache[dist.canonical_name] = base + if not extras: + return base + return self._make_extras_candidate(base, extras, comes_from=template) + + def _make_candidate_from_link( + self, + link: Link, + extras: frozenset[str], + template: InstallRequirement, + name: NormalizedName | None, + version: Version | None, + ) -> Candidate | None: + base: BaseCandidate | None = self._make_base_candidate_from_link( + link, template, name, version + ) + if not extras or base is None: + return base + return self._make_extras_candidate(base, extras, comes_from=template) + + def _make_base_candidate_from_link( + self, + link: Link, + template: InstallRequirement, + name: NormalizedName | None, + version: Version | None, + ) -> BaseCandidate | None: + # TODO: Check already installed candidate, and use it if the link and + # editable flag match. + + if link in self._build_failures: + # We already tried this candidate before, and it does not build. + # Don't bother trying again. + return None + + if template.editable: + if link not in self._editable_candidate_cache: + try: + self._editable_candidate_cache[link] = EditableCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except (MetadataInconsistent, MetadataInvalid) as e: + logger.info( + "Discarding [blue underline]%s[/]: [yellow]%s[reset]", + link, + e, + extra={"markup": True}, + ) + self._build_failures[link] = e + return None + + return self._editable_candidate_cache[link] + else: + if link not in self._link_candidate_cache: + try: + self._link_candidate_cache[link] = LinkCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except MetadataInconsistent as e: + logger.info( + "Discarding [blue underline]%s[/]: [yellow]%s[reset]", + link, + e, + extra={"markup": True}, + ) + self._build_failures[link] = e + return None + return self._link_candidate_cache[link] + + def _iter_found_candidates( + self, + ireqs: Sequence[InstallRequirement], + specifier: SpecifierSet, + hashes: Hashes, + prefers_installed: bool, + incompatible_ids: set[int], + ) -> Iterable[Candidate]: + if not ireqs: + return () + + # The InstallRequirement implementation requires us to give it a + # "template". Here we just choose the first requirement to represent + # all of them. + # Hopefully the Project model can correct this mismatch in the future. + template = ireqs[0] + assert template.req, "Candidates found on index must be PEP 508" + name = canonicalize_name(template.req.name) + + extras: frozenset[str] = frozenset() + for ireq in ireqs: + assert ireq.req, "Candidates found on index must be PEP 508" + specifier &= ireq.req.specifier + hashes &= ireq.hashes(trust_internet=False) + extras |= frozenset(ireq.extras) + + def _get_installed_candidate() -> Candidate | None: + """Get the candidate for the currently-installed version.""" + # If --force-reinstall is set, we want the version from the index + # instead, so we "pretend" there is nothing installed. + if self._force_reinstall: + return None + try: + installed_dist = self._installed_dists[name] + except KeyError: + return None + + try: + # Don't use the installed distribution if its version + # does not fit the current dependency graph. + if not specifier.contains(installed_dist.version, prereleases=True): + return None + except InvalidVersion as e: + raise InvalidInstalledPackage(dist=installed_dist, invalid_exc=e) + + candidate = self._make_candidate_from_dist( + dist=installed_dist, + extras=extras, + template=template, + ) + # The candidate is a known incompatibility. Don't use it. + if id(candidate) in incompatible_ids: + return None + return candidate + + def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]: + result = self._finder.find_best_candidate( + project_name=name, + specifier=specifier, + hashes=hashes, + ) + icans = result.applicable_candidates + + # PEP 592: Yanked releases are ignored unless the specifier + # explicitly pins a version (via '==' or '===') that can be + # solely satisfied by a yanked release. + all_yanked = all(ican.link.is_yanked for ican in icans) + + def is_pinned(specifier: SpecifierSet) -> bool: + for sp in specifier: + if sp.operator == "===": + return True + if sp.operator != "==": + continue + if sp.version.endswith(".*"): + continue + return True + return False + + pinned = is_pinned(specifier) + + # PackageFinder returns earlier versions first, so we reverse. + for ican in reversed(icans): + if not (all_yanked and pinned) and ican.link.is_yanked: + continue + func = functools.partial( + self._make_candidate_from_link, + link=ican.link, + extras=extras, + template=template, + name=name, + version=ican.version, + ) + yield ican.version, func + + return FoundCandidates( + iter_index_candidate_infos, + _get_installed_candidate(), + prefers_installed, + incompatible_ids, + ) + + def _iter_explicit_candidates_from_base( + self, + base_requirements: Iterable[Requirement], + extras: frozenset[str], + ) -> Iterator[Candidate]: + """Produce explicit candidates from the base given an extra-ed package. + + :param base_requirements: Requirements known to the resolver. The + requirements are guaranteed to not have extras. + :param extras: The extras to inject into the explicit requirements' + candidates. + """ + for req in base_requirements: + lookup_cand, _ = req.get_candidate_lookup() + if lookup_cand is None: # Not explicit. + continue + # We've stripped extras from the identifier, and should always + # get a BaseCandidate here, unless there's a bug elsewhere. + base_cand = as_base_candidate(lookup_cand) + assert base_cand is not None, "no extras here" + yield self._make_extras_candidate(base_cand, extras) + + def _iter_candidates_from_constraints( + self, + identifier: str, + constraint: Constraint, + template: InstallRequirement, + ) -> Iterator[Candidate]: + """Produce explicit candidates from constraints. + + This creates "fake" InstallRequirement objects that are basically clones + of what "should" be the template, but with original_link set to link. + """ + for link in constraint.links: + self._fail_if_link_is_unsupported_wheel(link) + candidate = self._make_base_candidate_from_link( + link, + template=install_req_from_link_and_ireq(link, template), + name=canonicalize_name(identifier), + version=None, + ) + if candidate: + yield candidate + + def find_candidates( + self, + identifier: str, + requirements: Mapping[str, Iterable[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + constraint: Constraint, + prefers_installed: bool, + is_satisfied_by: Callable[[Requirement, Candidate], bool], + ) -> Iterable[Candidate]: + # Collect basic lookup information from the requirements. + explicit_candidates: set[Candidate] = set() + ireqs: list[InstallRequirement] = [] + for req in requirements[identifier]: + cand, ireq = req.get_candidate_lookup() + if cand is not None: + explicit_candidates.add(cand) + if ireq is not None: + ireqs.append(ireq) + + # If the current identifier contains extras, add requires and explicit + # candidates from entries from extra-less identifier. + with contextlib.suppress(InvalidRequirement): + parsed_requirement = get_requirement(identifier) + if parsed_requirement.name != identifier: + explicit_candidates.update( + self._iter_explicit_candidates_from_base( + requirements.get(parsed_requirement.name, ()), + frozenset(parsed_requirement.extras), + ), + ) + for req in requirements.get(parsed_requirement.name, []): + _, ireq = req.get_candidate_lookup() + if ireq is not None: + ireqs.append(ireq) + + # Add explicit candidates from constraints. We only do this if there are + # known ireqs, which represent requirements not already explicit. If + # there are no ireqs, we're constraining already-explicit requirements, + # which is handled later when we return the explicit candidates. + if ireqs: + try: + explicit_candidates.update( + self._iter_candidates_from_constraints( + identifier, + constraint, + template=ireqs[0], + ), + ) + except UnsupportedWheel: + # If we're constrained to install a wheel incompatible with the + # target architecture, no candidates will ever be valid. + return () + + # Since we cache all the candidates, incompatibility identification + # can be made quicker by comparing only the id() values. + incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())} + + # If none of the requirements want an explicit candidate, we can ask + # the finder for candidates. + if not explicit_candidates: + return self._iter_found_candidates( + ireqs, + constraint.specifier, + constraint.hashes, + prefers_installed, + incompat_ids, + ) + + return ( + c + for c in explicit_candidates + if id(c) not in incompat_ids + and constraint.is_satisfied_by(c) + and all(is_satisfied_by(req, c) for req in requirements[identifier]) + ) + + def _make_requirements_from_install_req( + self, ireq: InstallRequirement, requested_extras: Iterable[str] + ) -> Iterator[Requirement]: + """ + Returns requirement objects associated with the given InstallRequirement. In + most cases this will be a single object but the following special cases exist: + - the InstallRequirement has markers that do not apply -> result is empty + - the InstallRequirement has both a constraint (or link) and extras + -> result is split in two requirement objects: one with the constraint + (or link) and one with the extra. This allows centralized constraint + handling for the base, resulting in fewer candidate rejections. + """ + if not ireq.match_markers(requested_extras): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + ireq.name, + ireq.markers, + ) + elif not ireq.link: + if ireq.extras and ireq.req is not None and ireq.req.specifier: + yield SpecifierWithoutExtrasRequirement(ireq) + yield SpecifierRequirement(ireq) + else: + self._fail_if_link_is_unsupported_wheel(ireq.link) + # Always make the link candidate for the base requirement to make it + # available to `find_candidates` for explicit candidate lookup for any + # set of extras. + # The extras are required separately via a second requirement. + cand = self._make_base_candidate_from_link( + ireq.link, + template=install_req_drop_extras(ireq) if ireq.extras else ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + if cand is None: + # There's no way we can satisfy a URL requirement if the underlying + # candidate fails to build. An unnamed URL must be user-supplied, so + # we fail eagerly. If the URL is named, an unsatisfiable requirement + # can make the resolver do the right thing, either backtrack (and + # maybe find some other requirement that's buildable) or raise a + # ResolutionImpossible eventually. + if not ireq.name: + raise self._build_failures[ireq.link] + yield UnsatisfiableRequirement(canonicalize_name(ireq.name)) + else: + # require the base from the link + yield self.make_requirement_from_candidate(cand) + if ireq.extras: + # require the extras on top of the base candidate + yield self.make_requirement_from_candidate( + self._make_extras_candidate(cand, frozenset(ireq.extras)) + ) + + def collect_root_requirements( + self, root_ireqs: list[InstallRequirement] + ) -> CollectedRootRequirements: + collected = CollectedRootRequirements([], {}, {}) + for i, ireq in enumerate(root_ireqs): + if ireq.constraint: + # Ensure we only accept valid constraints + problem = check_invalid_constraint_type(ireq) + if problem: + raise InstallationError(problem) + if not ireq.match_markers(): + continue + assert ireq.name, "Constraint must be named" + name = canonicalize_name(ireq.name) + if name in collected.constraints: + collected.constraints[name] &= ireq + else: + collected.constraints[name] = Constraint.from_ireq(ireq) + else: + reqs = list( + self._make_requirements_from_install_req( + ireq, + requested_extras=(), + ) + ) + if not reqs: + continue + template = reqs[0] + if ireq.user_supplied and template.name not in collected.user_requested: + collected.user_requested[template.name] = i + collected.requirements.extend(reqs) + # Put requirements with extras at the end of the root requires. This does not + # affect resolvelib's picking preference but it does affect its initial criteria + # population: by putting extras at the end we enable the candidate finder to + # present resolvelib with a smaller set of candidates to resolvelib, already + # taking into account any non-transient constraints on the associated base. This + # means resolvelib will have fewer candidates to visit and reject. + # Python's list sort is stable, meaning relative order is kept for objects with + # the same key. + collected.requirements.sort(key=lambda r: r.name != r.project_name) + return collected + + def make_requirement_from_candidate( + self, candidate: Candidate + ) -> ExplicitRequirement: + return ExplicitRequirement(candidate) + + def make_requirements_from_spec( + self, + specifier: str, + comes_from: InstallRequirement | None, + requested_extras: Iterable[str] = (), + ) -> Iterator[Requirement]: + """ + Returns requirement objects associated with the given specifier. In most cases + this will be a single object but the following special cases exist: + - the specifier has markers that do not apply -> result is empty + - the specifier has both a constraint and extras -> result is split + in two requirement objects: one with the constraint and one with the + extra. This allows centralized constraint handling for the base, + resulting in fewer candidate rejections. + """ + ireq = self._make_install_req_from_spec(specifier, comes_from) + return self._make_requirements_from_install_req(ireq, requested_extras) + + def make_requires_python_requirement( + self, + specifier: SpecifierSet, + ) -> Requirement | None: + if self._ignore_requires_python: + return None + # Don't bother creating a dependency for an empty Requires-Python. + if not str(specifier): + return None + return RequiresPythonRequirement(specifier, self._python_candidate) + + def get_wheel_cache_entry(self, link: Link, name: str | None) -> CacheEntry | None: + """Look up the link in the wheel cache. + + If ``preparer.require_hashes`` is True, don't use the wheel cache, + because cached wheels, always built locally, have different hashes + than the files downloaded from the index server and thus throw false + hash mismatches. Furthermore, cached wheels at present have + nondeterministic contents due to file modification times. + """ + if self._wheel_cache is None: + return None + return self._wheel_cache.get_cache_entry( + link=link, + package_name=name, + supported_tags=self._supported_tags_cache, + ) + + def get_dist_to_uninstall(self, candidate: Candidate) -> BaseDistribution | None: + # TODO: Are there more cases this needs to return True? Editable? + dist = self._installed_dists.get(candidate.project_name) + if dist is None: # Not installed, no uninstallation required. + return None + + # We're installing into global site. The current installation must + # be uninstalled, no matter it's in global or user site, because the + # user site installation has precedence over global. + if not self._use_user_site: + return dist + + # We're installing into user site. Remove the user site installation. + if dist.in_usersite: + return dist + + # We're installing into user site, but the installed incompatible + # package is in global site. We can't uninstall that, and would let + # the new user installation to "shadow" it. But shadowing won't work + # in virtual environments, so we error out. + if running_under_virtualenv() and dist.in_site_packages: + message = ( + f"Will not install to the user site because it will lack " + f"sys.path precedence to {dist.raw_name} in {dist.location}" + ) + raise InstallationError(message) + return None + + def _report_requires_python_error( + self, causes: Sequence[ConflictCause] + ) -> UnsupportedPythonVersion: + assert causes, "Requires-Python error reported with no cause" + + version = self._python_candidate.version + + if len(causes) == 1: + specifier = str(causes[0].requirement.specifier) + message = ( + f"Package {causes[0].parent.name!r} requires a different " + f"Python: {version} not in {specifier!r}" + ) + return UnsupportedPythonVersion(message) + + message = f"Packages require a different Python. {version} not in:" + for cause in causes: + package = cause.parent.format_for_error() + specifier = str(cause.requirement.specifier) + message += f"\n{specifier!r} (required by {package})" + return UnsupportedPythonVersion(message) + + def _report_single_requirement_conflict( + self, req: Requirement, parent: Candidate | None + ) -> DistributionNotFound: + if parent is None: + req_disp = str(req) + else: + req_disp = f"{req} (from {parent.name})" + + cands = self._finder.find_all_candidates(req.project_name) + skipped_by_requires_python = self._finder.requires_python_skipped_reasons() + + versions_set: set[Version] = set() + yanked_versions_set: set[Version] = set() + for c in cands: + is_yanked = c.link.is_yanked if c.link else False + if is_yanked: + yanked_versions_set.add(c.version) + else: + versions_set.add(c.version) + + versions = [str(v) for v in sorted(versions_set)] + yanked_versions = [str(v) for v in sorted(yanked_versions_set)] + + if yanked_versions: + # Saying "version X is yanked" isn't entirely accurate. + # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842 + logger.critical( + "Ignored the following yanked versions: %s", + ", ".join(yanked_versions) or "none", + ) + if skipped_by_requires_python: + logger.critical( + "Ignored the following versions that require a different python " + "version: %s", + "; ".join(skipped_by_requires_python) or "none", + ) + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req_disp, + ", ".join(versions) or "none", + ) + if str(req) == "requirements.txt": + logger.info( + "HINT: You are attempting to install a package literally " + 'named "requirements.txt" (which cannot exist). Consider ' + "using the '-r' flag to install the packages listed in " + "requirements.txt" + ) + + return DistributionNotFound(f"No matching distribution found for {req}") + + def _has_any_candidates(self, project_name: str) -> bool: + """ + Check if there are any candidates available for the project name. + """ + return any( + self.find_candidates( + project_name, + requirements={project_name: []}, + incompatibilities={}, + constraint=Constraint.empty(), + prefers_installed=True, + is_satisfied_by=lambda r, c: True, + ) + ) + + def get_installation_error( + self, + e: ResolutionImpossible[Requirement, Candidate], + constraints: dict[str, Constraint], + ) -> InstallationError: + assert e.causes, "Installation error reported with no cause" + + # If one of the things we can't solve is "we need Python X.Y", + # that is what we report. + requires_python_causes = [ + cause + for cause in e.causes + if isinstance(cause.requirement, RequiresPythonRequirement) + and not cause.requirement.is_satisfied_by(self._python_candidate) + ] + if requires_python_causes: + # The comprehension above makes sure all Requirement instances are + # RequiresPythonRequirement, so let's cast for convenience. + return self._report_requires_python_error( + cast("Sequence[ConflictCause]", requires_python_causes), + ) + + # Otherwise, we have a set of causes which can't all be satisfied + # at once. + + # The simplest case is when we have *one* cause that can't be + # satisfied. We just report that case. + if len(e.causes) == 1: + req, parent = next(iter(e.causes)) + if req.name not in constraints: + return self._report_single_requirement_conflict(req, parent) + + # OK, we now have a list of requirements that can't all be + # satisfied at once. + + # A couple of formatting helpers + def text_join(parts: list[str]) -> str: + if len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def describe_trigger(parent: Candidate) -> str: + ireq = parent.get_install_requirement() + if not ireq or not ireq.comes_from: + return f"{parent.name}=={parent.version}" + if isinstance(ireq.comes_from, InstallRequirement): + return str(ireq.comes_from.name) + return str(ireq.comes_from) + + triggers = set() + for req, parent in e.causes: + if parent is None: + # This is a root requirement, so we can report it directly + trigger = req.format_for_error() + else: + trigger = describe_trigger(parent) + triggers.add(trigger) + + if triggers: + info = text_join(sorted(triggers)) + else: + info = "the requested packages" + + msg = ( + f"Cannot install {info} because these package versions " + "have conflicting dependencies." + ) + logger.critical(msg) + msg = "\nThe conflict is caused by:" + + relevant_constraints = set() + for req, parent in e.causes: + if req.name in constraints: + relevant_constraints.add(req.name) + msg = msg + "\n " + if parent: + msg = msg + f"{parent.name} {parent.version} depends on " + else: + msg = msg + "The user requested " + msg = msg + req.format_for_error() + for key in relevant_constraints: + spec = constraints[key].specifier + msg += f"\n The user requested (constraint) {key}{spec}" + + # Check for causes that had no candidates + causes = set() + for req, _ in e.causes: + causes.add(req.name) + + no_candidates = {c for c in causes if not self._has_any_candidates(c)} + if no_candidates: + msg = ( + msg + + "\n\n" + + "Additionally, some packages in these conflicts have no " + + "matching distributions available for your environment:" + + "\n " + + "\n ".join(sorted(no_candidates)) + ) + + msg = ( + msg + + "\n\n" + + "To fix this you could try to:\n" + + "1. loosen the range of package versions you've specified\n" + + "2. remove package versions to allow pip to attempt to solve " + + "the dependency conflict\n" + ) + + logger.info(msg) + + return DistributionNotFound( + "ResolutionImpossible: for help visit " + "https://pip.pypa.io/en/latest/topics/dependency-resolution/" + "#dealing-with-dependency-conflicts" + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py new file mode 100644 index 0000000..f60653d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py @@ -0,0 +1,166 @@ +"""Utilities to lazily create and visit candidates found. + +Creating and visiting a candidate is a *very* costly operation. It involves +fetching, extracting, potentially building modules from source, and verifying +distribution metadata. It is therefore crucial for performance to keep +everything here lazy all the way down, so we only touch candidates that we +absolutely need, and not "download the world" when we only need one version of +something. +""" + +from __future__ import annotations + +import logging +from collections.abc import Iterator, Sequence +from typing import Any, Callable, Optional + +from pip._vendor.packaging.version import _BaseVersion + +from pip._internal.exceptions import MetadataInvalid + +from .base import Candidate + +logger = logging.getLogger(__name__) + +IndexCandidateInfo = tuple[_BaseVersion, Callable[[], Optional[Candidate]]] + + +def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the package is not already installed. Candidates + from index come later in their normal ordering. + """ + versions_found: set[_BaseVersion] = set() + for version, func in infos: + if version in versions_found: + continue + try: + candidate = func() + except MetadataInvalid as e: + logger.warning( + "Ignoring version %s of %s since it has invalid metadata:\n" + "%s\n" + "Please use pip<24.1 if you need to use this version.", + version, + e.ireq.name, + e, + ) + # Mark version as found to avoid trying other candidates with the same + # version, since they most likely have invalid metadata as well. + versions_found.add(version) + else: + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_prepended( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers the already-installed + candidate and NOT to upgrade. The installed candidate is therefore + always yielded first, and candidates from index come later in their + normal ordering, except skipped when the version is already installed. + """ + yield installed + versions_found: set[_BaseVersion] = {installed.version} + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_inserted( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers to upgrade an + already-installed package. Candidates from index are returned in their + normal ordering, except replaced when the version is already installed. + + The implementation iterates through and yields other candidates, inserting + the installed candidate exactly once before we start yielding older or + equivalent candidates, or after all other candidates if they are all newer. + """ + versions_found: set[_BaseVersion] = set() + for version, func in infos: + if version in versions_found: + continue + # If the installed candidate is better, yield it first. + if installed.version >= version: + yield installed + versions_found.add(installed.version) + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + # If the installed candidate is older than all other candidates. + if installed.version not in versions_found: + yield installed + + +class FoundCandidates(Sequence[Candidate]): + """A lazy sequence to provide candidates to the resolver. + + The intended usage is to return this from `find_matches()` so the resolver + can iterate through the sequence multiple times, but only access the index + page when remote packages are actually needed. This improve performances + when suitable candidates are already installed on disk. + """ + + def __init__( + self, + get_infos: Callable[[], Iterator[IndexCandidateInfo]], + installed: Candidate | None, + prefers_installed: bool, + incompatible_ids: set[int], + ): + self._get_infos = get_infos + self._installed = installed + self._prefers_installed = prefers_installed + self._incompatible_ids = incompatible_ids + self._bool: bool | None = None + + def __getitem__(self, index: Any) -> Any: + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + def __iter__(self) -> Iterator[Candidate]: + infos = self._get_infos() + if not self._installed: + iterator = _iter_built(infos) + elif self._prefers_installed: + iterator = _iter_built_with_prepended(self._installed, infos) + else: + iterator = _iter_built_with_inserted(self._installed, infos) + return (c for c in iterator if id(c) not in self._incompatible_ids) + + def __len__(self) -> int: + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + def __bool__(self) -> bool: + if self._bool is not None: + return self._bool + + if self._prefers_installed and self._installed: + self._bool = True + return True + + self._bool = any(self) + return self._bool diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py new file mode 100644 index 0000000..994748d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/provider.py @@ -0,0 +1,285 @@ +from __future__ import annotations + +import math +from collections.abc import Iterable, Iterator, Mapping, Sequence +from functools import cache +from typing import ( + TYPE_CHECKING, + TypeVar, +) + +from pip._vendor.resolvelib.providers import AbstractProvider + +from pip._internal.req.req_install import InstallRequirement + +from .base import Candidate, Constraint, Requirement +from .candidates import REQUIRES_PYTHON_IDENTIFIER +from .factory import Factory +from .requirements import ExplicitRequirement + +if TYPE_CHECKING: + from pip._vendor.resolvelib.providers import Preference + from pip._vendor.resolvelib.resolvers import RequirementInformation + + PreferenceInformation = RequirementInformation[Requirement, Candidate] + + _ProviderBase = AbstractProvider[Requirement, Candidate, str] +else: + _ProviderBase = AbstractProvider + +# Notes on the relationship between the provider, the factory, and the +# candidate and requirement classes. +# +# The provider is a direct implementation of the resolvelib class. Its role +# is to deliver the API that resolvelib expects. +# +# Rather than work with completely abstract "requirement" and "candidate" +# concepts as resolvelib does, pip has concrete classes implementing these two +# ideas. The API of Requirement and Candidate objects are defined in the base +# classes, but essentially map fairly directly to the equivalent provider +# methods. In particular, `find_matches` and `is_satisfied_by` are +# requirement methods, and `get_dependencies` is a candidate method. +# +# The factory is the interface to pip's internal mechanisms. It is stateless, +# and is created by the resolver and held as a property of the provider. It is +# responsible for creating Requirement and Candidate objects, and provides +# services to those objects (access to pip's finder and preparer). + + +D = TypeVar("D") +V = TypeVar("V") + + +def _get_with_identifier( + mapping: Mapping[str, V], + identifier: str, + default: D, +) -> D | V: + """Get item from a package name lookup mapping with a resolver identifier. + + This extra logic is needed when the target mapping is keyed by package + name, which cannot be directly looked up with an identifier (which may + contain requested extras). Additional logic is added to also look up a value + by "cleaning up" the extras from the identifier. + """ + if identifier in mapping: + return mapping[identifier] + # HACK: Theoretically we should check whether this identifier is a valid + # "NAME[EXTRAS]" format, and parse out the name part with packaging or + # some regular expression. But since pip's resolver only spits out three + # kinds of identifiers: normalized PEP 503 names, normalized names plus + # extras, and Requires-Python, we can cheat a bit here. + name, open_bracket, _ = identifier.partition("[") + if open_bracket and name in mapping: + return mapping[name] + return default + + +class PipProvider(_ProviderBase): + """Pip's provider implementation for resolvelib. + + :params constraints: A mapping of constraints specified by the user. Keys + are canonicalized project names. + :params ignore_dependencies: Whether the user specified ``--no-deps``. + :params upgrade_strategy: The user-specified upgrade strategy. + :params user_requested: A set of canonicalized package names that the user + supplied for pip to install/upgrade. + """ + + def __init__( + self, + factory: Factory, + constraints: dict[str, Constraint], + ignore_dependencies: bool, + upgrade_strategy: str, + user_requested: dict[str, int], + ) -> None: + self._factory = factory + self._constraints = constraints + self._ignore_dependencies = ignore_dependencies + self._upgrade_strategy = upgrade_strategy + self._user_requested = user_requested + + @property + def constraints(self) -> dict[str, Constraint]: + """Public view of user-specified constraints. + + Exposes the provider's constraints mapping without encouraging + external callers to reach into private attributes. + """ + return self._constraints + + def identify(self, requirement_or_candidate: Requirement | Candidate) -> str: + return requirement_or_candidate.name + + def narrow_requirement_selection( + self, + identifiers: Iterable[str], + resolutions: Mapping[str, Candidate], + candidates: Mapping[str, Iterator[Candidate]], + information: Mapping[str, Iterator[PreferenceInformation]], + backtrack_causes: Sequence[PreferenceInformation], + ) -> Iterable[str]: + """Produce a subset of identifiers that should be considered before others. + + Currently pip narrows the following selection: + * Requires-Python, if present is always returned by itself + * Backtrack causes are considered next because they can be identified + in linear time here, whereas because get_preference() is called + for each identifier, it would be quadratic to check for them there. + Further, the current backtrack causes likely need to be resolved + before other requirements as a resolution can't be found while + there is a conflict. + """ + backtrack_identifiers = set() + for info in backtrack_causes: + backtrack_identifiers.add(info.requirement.name) + if info.parent is not None: + backtrack_identifiers.add(info.parent.name) + + current_backtrack_causes = [] + for identifier in identifiers: + # Requires-Python has only one candidate and the check is basically + # free, so we always do it first to avoid needless work if it fails. + # This skips calling get_preference() for all other identifiers. + if identifier == REQUIRES_PYTHON_IDENTIFIER: + return [identifier] + + # Check if this identifier is a backtrack cause + if identifier in backtrack_identifiers: + current_backtrack_causes.append(identifier) + continue + + if current_backtrack_causes: + return current_backtrack_causes + + return identifiers + + def get_preference( + self, + identifier: str, + resolutions: Mapping[str, Candidate], + candidates: Mapping[str, Iterator[Candidate]], + information: Mapping[str, Iterable[PreferenceInformation]], + backtrack_causes: Sequence[PreferenceInformation], + ) -> Preference: + """Produce a sort key for given requirement based on preference. + + The lower the return value is, the more preferred this group of + arguments is. + + Currently pip considers the following in order: + + * Any requirement that is "direct", e.g., points to an explicit URL. + * Any requirement that is "pinned", i.e., contains the operator ``===`` + or ``==`` without a wildcard. + * Any requirement that imposes an upper version limit, i.e., contains the + operator ``<``, ``<=``, ``~=``, or ``==`` with a wildcard. Because + pip prioritizes the latest version, preferring explicit upper bounds + can rule out infeasible candidates sooner. This does not imply that + upper bounds are good practice; they can make dependency management + and resolution harder. + * Order user-specified requirements as they are specified, placing + other requirements afterward. + * Any "non-free" requirement, i.e., one that contains at least one + operator, such as ``>=`` or ``!=``. + * Alphabetical order for consistency (aids debuggability). + """ + try: + next(iter(information[identifier])) + except StopIteration: + # There is no information for this identifier, so there's no known + # candidates. + has_information = False + else: + has_information = True + + if not has_information: + direct = False + ireqs: tuple[InstallRequirement | None, ...] = () + else: + # Go through the information and for each requirement, + # check if it's explicit (e.g., a direct link) and get the + # InstallRequirement (the second element) from get_candidate_lookup() + directs, ireqs = zip( + *( + (isinstance(r, ExplicitRequirement), r.get_candidate_lookup()[1]) + for r, _ in information[identifier] + ) + ) + direct = any(directs) + + operators: list[tuple[str, str]] = [ + (specifier.operator, specifier.version) + for specifier_set in (ireq.specifier for ireq in ireqs if ireq) + for specifier in specifier_set + ] + + pinned = any(((op[:2] == "==") and ("*" not in ver)) for op, ver in operators) + upper_bounded = any( + ((op in ("<", "<=", "~=")) or (op == "==" and "*" in ver)) + for op, ver in operators + ) + unfree = bool(operators) + requested_order = self._user_requested.get(identifier, math.inf) + + return ( + not direct, + not pinned, + not upper_bounded, + requested_order, + not unfree, + identifier, + ) + + def find_matches( + self, + identifier: str, + requirements: Mapping[str, Iterator[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + ) -> Iterable[Candidate]: + def _eligible_for_upgrade(identifier: str) -> bool: + """Are upgrades allowed for this project? + + This checks the upgrade strategy, and whether the project was one + that the user specified in the command line, in order to decide + whether we should upgrade if there's a newer version available. + + (Note that we don't need access to the `--upgrade` flag, because + an upgrade strategy of "to-satisfy-only" means that `--upgrade` + was not specified). + """ + if self._upgrade_strategy == "eager": + return True + elif self._upgrade_strategy == "only-if-needed": + user_order = _get_with_identifier( + self._user_requested, + identifier, + default=None, + ) + return user_order is not None + return False + + constraint = _get_with_identifier( + self._constraints, + identifier, + default=Constraint.empty(), + ) + return self._factory.find_candidates( + identifier=identifier, + requirements=requirements, + constraint=constraint, + prefers_installed=(not _eligible_for_upgrade(identifier)), + incompatibilities=incompatibilities, + is_satisfied_by=self.is_satisfied_by, + ) + + @staticmethod + @cache + def is_satisfied_by(requirement: Requirement, candidate: Candidate) -> bool: + return requirement.is_satisfied_by(candidate) + + def get_dependencies(self, candidate: Candidate) -> Iterable[Requirement]: + with_requires = not self._ignore_dependencies + # iter_dependencies() can perform nontrivial work so delay until needed. + return (r for r in candidate.iter_dependencies(with_requires) if r is not None) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py new file mode 100644 index 0000000..6ba9bbd --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/reporter.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Mapping +from logging import getLogger +from typing import Any + +from pip._vendor.resolvelib.reporters import BaseReporter + +from .base import Candidate, Constraint, Requirement + +logger = getLogger(__name__) + + +class PipReporter(BaseReporter[Requirement, Candidate, str]): + def __init__(self, constraints: Mapping[str, Constraint] | None = None) -> None: + self.reject_count_by_package: defaultdict[str, int] = defaultdict(int) + self._constraints = constraints or {} + + self._messages_at_reject_count = { + 1: ( + "pip is looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 8: ( + "pip is still looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 13: ( + "This is taking longer than usual. You might need to provide " + "the dependency resolver with stricter constraints to reduce " + "runtime. See https://pip.pypa.io/warnings/backtracking for " + "guidance. If you want to abort this run, press Ctrl + C." + ), + } + + def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None: + """Report a candidate being rejected. + + Logs both the rejection count message (if applicable) and details about + the requirements and constraints that caused the rejection. + """ + self.reject_count_by_package[candidate.name] += 1 + + count = self.reject_count_by_package[candidate.name] + if count in self._messages_at_reject_count: + message = self._messages_at_reject_count[count] + logger.info("INFO: %s", message.format(package_name=candidate.name)) + + msg = "Will try a different candidate, due to conflict:" + for req_info in criterion.information: + req, parent = req_info.requirement, req_info.parent + msg += "\n " + if parent: + msg += f"{parent.name} {parent.version} depends on " + else: + msg += "The user requested " + msg += req.format_for_error() + + # Add any relevant constraints + if self._constraints: + name = candidate.name + constraint = self._constraints.get(name) + if constraint and constraint.specifier: + constraint_text = f"{name}{constraint.specifier}" + msg += f"\n The user requested (constraint) {constraint_text}" + + logger.debug(msg) + + +class PipDebuggingReporter(BaseReporter[Requirement, Candidate, str]): + """A reporter that does an info log for every event it sees.""" + + def starting(self) -> None: + logger.info("Reporter.starting()") + + def starting_round(self, index: int) -> None: + logger.info("Reporter.starting_round(%r)", index) + + def ending_round(self, index: int, state: Any) -> None: + logger.info("Reporter.ending_round(%r, state)", index) + logger.debug("Reporter.ending_round(%r, %r)", index, state) + + def ending(self, state: Any) -> None: + logger.info("Reporter.ending(%r)", state) + + def adding_requirement( + self, requirement: Requirement, parent: Candidate | None + ) -> None: + logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent) + + def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None: + logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate) + + def pinning(self, candidate: Candidate) -> None: + logger.info("Reporter.pinning(%r)", candidate) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py new file mode 100644 index 0000000..447e36b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/requirements.py @@ -0,0 +1,247 @@ +from __future__ import annotations + +from typing import Any + +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pip._internal.req.constructors import install_req_drop_extras +from pip._internal.req.req_install import InstallRequirement + +from .base import Candidate, CandidateLookup, Requirement, format_name + + +class ExplicitRequirement(Requirement): + def __init__(self, candidate: Candidate) -> None: + self.candidate = candidate + + def __str__(self) -> str: + return str(self.candidate) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.candidate!r})" + + def __hash__(self) -> int: + return hash(self.candidate) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExplicitRequirement): + return False + return self.candidate == other.candidate + + @property + def project_name(self) -> NormalizedName: + # No need to canonicalize - the candidate did this + return self.candidate.project_name + + @property + def name(self) -> str: + # No need to canonicalize - the candidate did this + return self.candidate.name + + def format_for_error(self) -> str: + return self.candidate.format_for_error() + + def get_candidate_lookup(self) -> CandidateLookup: + return self.candidate, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return candidate == self.candidate + + +class SpecifierRequirement(Requirement): + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = ireq + self._equal_cache: str | None = None + self._hash: int | None = None + self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras) + + @property + def _equal(self) -> str: + if self._equal_cache is not None: + return self._equal_cache + + self._equal_cache = str(self._ireq) + return self._equal_cache + + def __str__(self) -> str: + return str(self._ireq.req) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({str(self._ireq.req)!r})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SpecifierRequirement): + return NotImplemented + return self._equal == other._equal + + def __hash__(self) -> int: + if self._hash is not None: + return self._hash + + self._hash = hash(self._equal) + return self._hash + + @property + def project_name(self) -> NormalizedName: + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + return canonicalize_name(self._ireq.req.name) + + @property + def name(self) -> str: + return format_name(self.project_name, self._extras) + + def format_for_error(self) -> str: + # Convert comma-separated specifiers into "A, B, ..., F and G" + # This makes the specifier a bit more "human readable", without + # risking a change in meaning. (Hopefully! Not all edge cases have + # been checked) + parts = [s.strip() for s in str(self).split(",")] + if len(parts) == 0: + return "" + elif len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def get_candidate_lookup(self) -> CandidateLookup: + return None, self._ireq + + def is_satisfied_by(self, candidate: Candidate) -> bool: + assert candidate.name == self.name, ( + f"Internal issue: Candidate is not for this requirement " + f"{candidate.name} vs {self.name}" + ) + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + spec = self._ireq.req.specifier + return spec.contains(candidate.version, prereleases=True) + + +class SpecifierWithoutExtrasRequirement(SpecifierRequirement): + """ + Requirement backed by an install requirement on a base package. + Trims extras from its install requirement if there are any. + """ + + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = install_req_drop_extras(ireq) + self._equal_cache: str | None = None + self._hash: int | None = None + self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras) + + @property + def _equal(self) -> str: + if self._equal_cache is not None: + return self._equal_cache + + self._equal_cache = str(self._ireq) + return self._equal_cache + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SpecifierWithoutExtrasRequirement): + return NotImplemented + return self._equal == other._equal + + def __hash__(self) -> int: + if self._hash is not None: + return self._hash + + self._hash = hash(self._equal) + return self._hash + + +class RequiresPythonRequirement(Requirement): + """A requirement representing Requires-Python metadata.""" + + def __init__(self, specifier: SpecifierSet, match: Candidate) -> None: + self.specifier = specifier + self._specifier_string = str(specifier) # for faster __eq__ + self._hash: int | None = None + self._candidate = match + + def __str__(self) -> str: + return f"Python {self.specifier}" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({str(self.specifier)!r})" + + def __hash__(self) -> int: + if self._hash is not None: + return self._hash + + self._hash = hash((self._specifier_string, self._candidate)) + return self._hash + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, RequiresPythonRequirement): + return False + return ( + self._specifier_string == other._specifier_string + and self._candidate == other._candidate + ) + + @property + def project_name(self) -> NormalizedName: + return self._candidate.project_name + + @property + def name(self) -> str: + return self._candidate.name + + def format_for_error(self) -> str: + return str(self) + + def get_candidate_lookup(self) -> CandidateLookup: + if self.specifier.contains(self._candidate.version, prereleases=True): + return self._candidate, None + return None, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + assert candidate.name == self._candidate.name, "Not Python candidate" + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class UnsatisfiableRequirement(Requirement): + """A requirement that cannot be satisfied.""" + + def __init__(self, name: NormalizedName) -> None: + self._name = name + + def __str__(self) -> str: + return f"{self._name} (unavailable)" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({str(self._name)!r})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnsatisfiableRequirement): + return NotImplemented + return self._name == other._name + + def __hash__(self) -> int: + return hash(self._name) + + @property + def project_name(self) -> NormalizedName: + return self._name + + @property + def name(self) -> str: + return self._name + + def format_for_error(self) -> str: + return str(self) + + def get_candidate_lookup(self) -> CandidateLookup: + return None, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return False diff --git a/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py new file mode 100644 index 0000000..7e44c17 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/resolution/resolvelib/resolver.py @@ -0,0 +1,332 @@ +from __future__ import annotations + +import contextlib +import functools +import logging +import os +from typing import TYPE_CHECKING, cast + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible, ResolutionTooDeep +from pip._vendor.resolvelib import Resolver as RLResolver +from pip._vendor.resolvelib.structs import DirectedGraph + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import ResolutionTooDeepError +from pip._internal.index.package_finder import PackageFinder +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import install_req_extend_extras +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.resolution.resolvelib.provider import PipProvider +from pip._internal.resolution.resolvelib.reporter import ( + PipDebuggingReporter, + PipReporter, +) +from pip._internal.utils.packaging import get_requirement + +from .base import Candidate, Requirement +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.resolvers import Result as RLResult + + Result = RLResult[Requirement, Candidate, str] + + +logger = logging.getLogger(__name__) + + +class Resolver(BaseResolver): + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: WheelCache | None, + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: tuple[int, ...] | None = None, + ): + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + self.factory = Factory( + finder=finder, + preparer=preparer, + make_install_req=make_install_req, + wheel_cache=wheel_cache, + use_user_site=use_user_site, + force_reinstall=force_reinstall, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + py_version_info=py_version_info, + ) + self.ignore_dependencies = ignore_dependencies + self.upgrade_strategy = upgrade_strategy + self._result: Result | None = None + + def resolve( + self, root_reqs: list[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + collected = self.factory.collect_root_requirements(root_reqs) + provider = PipProvider( + factory=self.factory, + constraints=collected.constraints, + ignore_dependencies=self.ignore_dependencies, + upgrade_strategy=self.upgrade_strategy, + user_requested=collected.user_requested, + ) + if "PIP_RESOLVER_DEBUG" in os.environ: + reporter: BaseReporter[Requirement, Candidate, str] = PipDebuggingReporter() + else: + reporter = PipReporter(constraints=provider.constraints) + + resolver: RLResolver[Requirement, Candidate, str] = RLResolver( + provider, + reporter, + ) + + try: + limit_how_complex_resolution_can_be = 200000 + result = self._result = resolver.resolve( + collected.requirements, max_rounds=limit_how_complex_resolution_can_be + ) + + except ResolutionImpossible as e: + error = self.factory.get_installation_error( + cast("ResolutionImpossible[Requirement, Candidate]", e), + collected.constraints, + ) + raise error from e + except ResolutionTooDeep: + raise ResolutionTooDeepError from None + + req_set = RequirementSet(check_supported_wheels=check_supported_wheels) + # process candidates with extras last to ensure their base equivalent is + # already in the req_set if appropriate. + # Python's sort is stable so using a binary key function keeps relative order + # within both subsets. + for candidate in sorted( + result.mapping.values(), key=lambda c: c.name != c.project_name + ): + ireq = candidate.get_install_requirement() + if ireq is None: + if candidate.name != candidate.project_name: + # extend existing req's extras + with contextlib.suppress(KeyError): + req = req_set.get_requirement(candidate.project_name) + req_set.add_named_requirement( + install_req_extend_extras( + req, get_requirement(candidate.name).extras + ) + ) + continue + + # Check if there is already an installation under the same name, + # and set a flag for later stages to uninstall it, if needed. + installed_dist = self.factory.get_dist_to_uninstall(candidate) + if installed_dist is None: + # There is no existing installation -- nothing to uninstall. + ireq.should_reinstall = False + elif self.factory.force_reinstall: + # The --force-reinstall flag is set -- reinstall. + ireq.should_reinstall = True + elif installed_dist.version != candidate.version: + # The installation is different in version -- reinstall. + ireq.should_reinstall = True + elif candidate.is_editable or installed_dist.editable: + # The incoming distribution is editable, or different in + # editable-ness to installation -- reinstall. + ireq.should_reinstall = True + elif candidate.source_link and candidate.source_link.is_file: + # The incoming distribution is under file:// + if candidate.source_link.is_wheel: + # is a local wheel -- do nothing. + logger.info( + "%s is already installed with the same version as the " + "provided wheel. Use --force-reinstall to force an " + "installation of the wheel.", + ireq.name, + ) + continue + + # is a local sdist or path -- reinstall + ireq.should_reinstall = True + else: + continue + + link = candidate.source_link + if link and link.is_yanked: + # The reason can contain non-ASCII characters, Unicode + # is required for Python 2. + msg = ( + "The candidate selected for download or install is a " + "yanked version: {name!r} candidate (version {version} " + "at {link})\nReason for being yanked: {reason}" + ).format( + name=candidate.name, + version=candidate.version, + link=link, + reason=link.yanked_reason or "", + ) + logger.warning(msg) + + req_set.add_named_requirement(ireq) + + return req_set + + def get_installation_order( + self, req_set: RequirementSet + ) -> list[InstallRequirement]: + """Get order for installation of requirements in RequirementSet. + + The returned list contains a requirement before another that depends on + it. This helps ensure that the environment is kept consistent as they + get installed one-by-one. + + The current implementation creates a topological ordering of the + dependency graph, giving more weight to packages with less + or no dependencies, while breaking any cycles in the graph at + arbitrary points. We make no guarantees about where the cycle + would be broken, other than it *would* be broken. + """ + assert self._result is not None, "must call resolve() first" + + if not req_set.requirements: + # Nothing is left to install, so we do not need an order. + return [] + + graph = self._result.graph + weights = get_topological_weights(graph, set(req_set.requirements.keys())) + + sorted_items = sorted( + req_set.requirements.items(), + key=functools.partial(_req_set_item_sorter, weights=weights), + reverse=True, + ) + return [ireq for _, ireq in sorted_items] + + +def get_topological_weights( + graph: DirectedGraph[str | None], requirement_keys: set[str] +) -> dict[str | None, int]: + """Assign weights to each node based on how "deep" they are. + + This implementation may change at any point in the future without prior + notice. + + We first simplify the dependency graph by pruning any leaves and giving them + the highest weight: a package without any dependencies should be installed + first. This is done again and again in the same way, giving ever less weight + to the newly found leaves. The loop stops when no leaves are left: all + remaining packages have at least one dependency left in the graph. + + Then we continue with the remaining graph, by taking the length for the + longest path to any node from root, ignoring any paths that contain a single + node twice (i.e. cycles). This is done through a depth-first search through + the graph, while keeping track of the path to the node. + + Cycles in the graph result would result in node being revisited while also + being on its own path. In this case, take no action. This helps ensure we + don't get stuck in a cycle. + + When assigning weight, the longer path (i.e. larger length) is preferred. + + We are only interested in the weights of packages that are in the + requirement_keys. + """ + path: set[str | None] = set() + weights: dict[str | None, list[int]] = {} + + def visit(node: str | None) -> None: + if node in path: + # We hit a cycle, so we'll break it here. + return + + # The walk is exponential and for pathologically connected graphs (which + # are the ones most likely to contain cycles in the first place) it can + # take until the heat-death of the universe. To counter this we limit + # the number of attempts to visit (i.e. traverse through) any given + # node. We choose a value here which gives decent enough coverage for + # fairly well behaved graphs, and still limits the walk complexity to be + # linear in nature. + cur_weights = weights.get(node, []) + if len(cur_weights) >= 5: + return + + # Time to visit the children! + path.add(node) + for child in graph.iter_children(node): + visit(child) + path.remove(node) + + if node not in requirement_keys: + return + + cur_weights.append(len(path)) + weights[node] = cur_weights + + # Simplify the graph, pruning leaves that have no dependencies. This is + # needed for large graphs (say over 200 packages) because the `visit` + # function is slower for large/densely connected graphs, taking minutes. + # See https://github.com/pypa/pip/issues/10557 + # We repeat the pruning step until we have no more leaves to remove. + while True: + leaves = set() + for key in graph: + if key is None: + continue + for _child in graph.iter_children(key): + # This means we have at least one child + break + else: + # No child. + leaves.add(key) + if not leaves: + # We are done simplifying. + break + # Calculate the weight for the leaves. + weight = len(graph) - 1 + for leaf in leaves: + if leaf not in requirement_keys: + continue + weights[leaf] = [weight] + # Remove the leaves from the graph, making it simpler. + for leaf in leaves: + graph.remove(leaf) + + # Visit the remaining graph, this will only have nodes to handle if the + # graph had a cycle in it, which the pruning step above could not handle. + # `None` is guaranteed to be the root node by resolvelib. + visit(None) + + # Sanity check: all requirement keys should be in the weights, + # and no other keys should be in the weights. + difference = set(weights.keys()).difference(requirement_keys) + assert not difference, difference + + # Now give back all the weights, choosing the largest ones from what we + # accumulated. + return {node: max(wgts) for (node, wgts) in weights.items()} + + +def _req_set_item_sorter( + item: tuple[str, InstallRequirement], + weights: dict[str | None, int], +) -> tuple[int, str]: + """Key function used to sort install requirements for installation. + + Based on the "weight" mapping calculated in ``get_installation_order()``. + The canonical package name is returned as the second member as a tie- + breaker to ensure the result is predictable, which is useful in tests. + """ + name = canonicalize_name(item[0]) + return weights[name], name diff --git a/venv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py b/venv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py new file mode 100644 index 0000000..5999ddb --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/self_outdated_check.py @@ -0,0 +1,262 @@ +from __future__ import annotations + +import datetime +import functools +import hashlib +import json +import logging +import optparse +import os.path +import sys +from dataclasses import dataclass +from typing import Any, Callable + +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.rich.console import Group +from pip._vendor.rich.markup import escape +from pip._vendor.rich.text import Text + +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import get_default_environment +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.entrypoints import ( + get_best_invocation_for_this_pip, + get_best_invocation_for_this_python, +) +from pip._internal.utils.filesystem import ( + adjacent_tmp_file, + check_path_owner, + copy_directory_permissions, + replace, +) +from pip._internal.utils.misc import ( + ExternallyManagedEnvironment, + check_externally_managed, + ensure_dir, +) + +_WEEK = datetime.timedelta(days=7) + +logger = logging.getLogger(__name__) + + +def _get_statefile_name(key: str) -> str: + key_bytes = key.encode() + name = hashlib.sha224(key_bytes).hexdigest() + return name + + +def _convert_date(isodate: str) -> datetime.datetime: + """Convert an ISO format string to a date. + + Handles the format 2020-01-22T14:24:01Z (trailing Z) + which is not supported by older versions of fromisoformat. + """ + return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00")) + + +class SelfCheckState: + def __init__(self, cache_dir: str) -> None: + self._state: dict[str, Any] = {} + self._statefile_path = None + + # Try to load the existing state + if cache_dir: + self._statefile_path = os.path.join( + cache_dir, "selfcheck", _get_statefile_name(self.key) + ) + try: + with open(self._statefile_path, encoding="utf-8") as statefile: + self._state = json.load(statefile) + except (OSError, ValueError, KeyError): + # Explicitly suppressing exceptions, since we don't want to + # error out if the cache file is invalid. + pass + + @property + def key(self) -> str: + return sys.prefix + + def get(self, current_time: datetime.datetime) -> str | None: + """Check if we have a not-outdated version loaded already.""" + if not self._state: + return None + + if "last_check" not in self._state: + return None + + if "pypi_version" not in self._state: + return None + + # Determine if we need to refresh the state + last_check = _convert_date(self._state["last_check"]) + time_since_last_check = current_time - last_check + if time_since_last_check > _WEEK: + return None + + return self._state["pypi_version"] + + def set(self, pypi_version: str, current_time: datetime.datetime) -> None: + # If we do not have a path to cache in, don't bother saving. + if not self._statefile_path: + return + + statefile_directory = os.path.dirname(self._statefile_path) + + # Check to make sure that we own the directory + if not check_path_owner(statefile_directory): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(statefile_directory) + + state = { + # Include the key so it's easy to tell which pip wrote the + # file. + "key": self.key, + "last_check": current_time.isoformat(), + "pypi_version": pypi_version, + } + + text = json.dumps(state, sort_keys=True, separators=(",", ":")) + + with adjacent_tmp_file(self._statefile_path) as f: + f.write(text.encode()) + copy_directory_permissions(statefile_directory, f) + + try: + # Since we have a prefix-specific state file, we can just + # overwrite whatever is there, no need to check. + replace(f.name, self._statefile_path) + except OSError: + # Best effort. + pass + + +@dataclass +class UpgradePrompt: + old: str + new: str + + def __rich__(self) -> Group: + if WINDOWS: + pip_cmd = f"{get_best_invocation_for_this_python()} -m pip" + else: + pip_cmd = get_best_invocation_for_this_pip() + + notice = "[bold][[reset][blue]notice[reset][bold]][reset]" + return Group( + Text(), + Text.from_markup( + f"{notice} A new release of pip is available: " + f"[red]{self.old}[reset] -> [green]{self.new}[reset]" + ), + Text.from_markup( + f"{notice} To update, run: " + f"[green]{escape(pip_cmd)} install --upgrade pip" + ), + ) + + +def was_installed_by_pip(pkg: str) -> bool: + """Checks whether pkg was installed by pip + + This is used not to display the upgrade message when pip is in fact + installed by system package manager, such as dnf on Fedora. + """ + dist = get_default_environment().get_distribution(pkg) + return dist is not None and "pip" == dist.installer + + +def _get_current_remote_pip_version( + session: PipSession, options: optparse.Values +) -> str | None: + # Lets use PackageFinder to see what the latest pip version is + link_collector = LinkCollector.create( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=False, # Explicitly set to False + ) + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return None + + return str(best_candidate.version) + + +def _self_version_check_logic( + *, + state: SelfCheckState, + current_time: datetime.datetime, + local_version: Version, + get_remote_version: Callable[[], str | None], +) -> UpgradePrompt | None: + remote_version_str = state.get(current_time) + if remote_version_str is None: + remote_version_str = get_remote_version() + if remote_version_str is None: + logger.debug("No remote pip version found") + return None + state.set(remote_version_str, current_time) + + remote_version = parse_version(remote_version_str) + logger.debug("Remote version of pip: %s", remote_version) + logger.debug("Local version of pip: %s", local_version) + + pip_installed_by_pip = was_installed_by_pip("pip") + logger.debug("Was pip installed by pip? %s", pip_installed_by_pip) + if not pip_installed_by_pip: + return None # Only suggest upgrade if pip is installed by pip. + + local_version_is_older = ( + local_version < remote_version + and local_version.base_version != remote_version.base_version + ) + if local_version_is_older: + return UpgradePrompt(old=str(local_version), new=remote_version_str) + + return None + + +def pip_self_version_check(session: PipSession, options: optparse.Values) -> None: + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_dist = get_default_environment().get_distribution("pip") + if not installed_dist: + return + try: + check_externally_managed() + except ExternallyManagedEnvironment: + return + + upgrade_prompt = _self_version_check_logic( + state=SelfCheckState(cache_dir=options.cache_dir), + current_time=datetime.datetime.now(datetime.timezone.utc), + local_version=installed_dist.version, + get_remote_version=functools.partial( + _get_current_remote_pip_version, session, options + ), + ) + if upgrade_prompt is not None: + logger.warning("%s", upgrade_prompt, extra={"rich": True}) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8bd969f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc new file mode 100644 index 0000000..7e09997 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc new file mode 100644 index 0000000..e5cd71c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/_log.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc new file mode 100644 index 0000000..40cb9bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..99143a8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc new file mode 100644 index 0000000..9b38438 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc new file mode 100644 index 0000000..321f4ed Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc new file mode 100644 index 0000000..3200661 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc new file mode 100644 index 0000000..4c61c88 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc new file mode 100644 index 0000000..367ed88 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc new file mode 100644 index 0000000..733c551 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc new file mode 100644 index 0000000..45a756d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc new file mode 100644 index 0000000..c582bbc Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc new file mode 100644 index 0000000..8881308 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc new file mode 100644 index 0000000..b13553a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc new file mode 100644 index 0000000..2e07eb1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/logging.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc new file mode 100644 index 0000000..49c2665 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/misc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc new file mode 100644 index 0000000..72d2ca7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/retry.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/retry.cpython-312.pyc new file mode 100644 index 0000000..2568f6a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/retry.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc new file mode 100644 index 0000000..e3914d2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc new file mode 100644 index 0000000..c7882a8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc new file mode 100644 index 0000000..4dde830 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc new file mode 100644 index 0000000..346bef6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/urls.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc new file mode 100644 index 0000000..8777dcc Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc new file mode 100644 index 0000000..2a86d38 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py new file mode 100644 index 0000000..6ccf53b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/_jaraco_text.py @@ -0,0 +1,109 @@ +"""Functions brought over from jaraco.text. + +These functions are not supposed to be used within `pip._internal`. These are +helper functions brought over from `jaraco.text` to enable vendoring newer +copies of `pkg_resources` without having to vendor `jaraco.text` and its entire +dependency cone; something that our vendoring setup is not currently capable of +handling. + +License reproduced from original source below: + +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +import functools +import itertools + + +def _nonblank(str): + return str and not str.startswith("#") + + +@functools.singledispatch +def yield_lines(iterable): + r""" + Yield valid lines of a string or iterable. + + >>> list(yield_lines('')) + [] + >>> list(yield_lines(['foo', 'bar'])) + ['foo', 'bar'] + >>> list(yield_lines('foo\nbar')) + ['foo', 'bar'] + >>> list(yield_lines('\nfoo\n#bar\nbaz #comment')) + ['foo', 'baz #comment'] + >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n'])) + ['foo', 'bar', 'baz', 'bing'] + """ + return itertools.chain.from_iterable(map(yield_lines, iterable)) + + +@yield_lines.register(str) +def _(text): + return filter(_nonblank, map(str.strip, text.splitlines())) + + +def drop_comment(line): + """ + Drop comments. + + >>> drop_comment('foo # bar') + 'foo' + + A hash without a space may be in a URL. + + >>> drop_comment('http://example.com/foo#bar') + 'http://example.com/foo#bar' + """ + return line.partition(" #")[0] + + +def join_continuation(lines): + r""" + Join lines continued by a trailing backslash. + + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar \\', 'baz'])) + ['foobarbaz'] + + Not sure why, but... + The character preceding the backslash is also elided. + + >>> list(join_continuation(['goo\\', 'dly'])) + ['godly'] + + A terrible idea, but... + If no line is available to continue, suppress the lines. + + >>> list(join_continuation(['foo', 'bar\\', 'baz\\'])) + ['foo'] + """ + lines = iter(lines) + for item in lines: + while item.endswith("\\"): + try: + item = item[:-2].strip() + next(lines) + except StopIteration: + return + yield item diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/_log.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/_log.py new file mode 100644 index 0000000..92c4c6a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/_log.py @@ -0,0 +1,38 @@ +"""Customize logging + +Defines custom logger class for the `logger.verbose(...)` method. + +init_logging() must be called before any other modules that call logging.getLogger. +""" + +import logging +from typing import Any, cast + +# custom log level for `--verbose` output +# between DEBUG and INFO +VERBOSE = 15 + + +class VerboseLogger(logging.Logger): + """Custom Logger, defining a verbose log-level + + VERBOSE is between INFO and DEBUG. + """ + + def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None: + return self.log(VERBOSE, msg, *args, **kwargs) + + +def getLogger(name: str) -> VerboseLogger: + """logging.getLogger, but ensures our VerboseLogger class is returned""" + return cast(VerboseLogger, logging.getLogger(name)) + + +def init_logging() -> None: + """Register our VerboseLogger and VERBOSE log level. + + Should be called before any calls to getLogger(), + i.e. in pip._internal.__init__ + """ + logging.setLoggerClass(VerboseLogger) + logging.addLevelName(VERBOSE, "VERBOSE") diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py new file mode 100644 index 0000000..4152528 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/appdirs.py @@ -0,0 +1,52 @@ +""" +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. + +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. +""" + +import os +import sys + +from pip._vendor import platformdirs as _appdirs + + +def user_cache_dir(appname: str) -> str: + return _appdirs.user_cache_dir(appname, appauthor=False) + + +def _macos_user_config_dir(appname: str, roaming: bool = True) -> str: + # Use ~/Application Support/pip, if the directory exists. + path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming) + if os.path.isdir(path): + return path + + # Use a Linux-like ~/.config/pip, by default. + linux_like_path = "~/.config/" + if appname: + linux_like_path = os.path.join(linux_like_path, appname) + + return os.path.expanduser(linux_like_path) + + +def user_config_dir(appname: str, roaming: bool = True) -> str: + if sys.platform == "darwin": + return _macos_user_config_dir(appname, roaming) + + return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) + + +# for the discussion regarding site_config_dir locations +# see +def site_config_dirs(appname: str) -> list[str]: + if sys.platform == "darwin": + dirval = _appdirs.site_data_dir(appname, appauthor=False, multipath=True) + return dirval.split(os.pathsep) + + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if sys.platform == "win32": + return [dirval] + + # Unix-y system. Look in /etc as well. + return dirval.split(os.pathsep) + ["/etc"] diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/compat.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/compat.py new file mode 100644 index 0000000..324789f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/compat.py @@ -0,0 +1,85 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" + +import importlib.resources +import logging +import os +import sys +from typing import IO + +__all__ = ["get_path_uid", "stdlib_pkgs", "tomllib", "WINDOWS"] + + +logger = logging.getLogger(__name__) + + +def has_tls() -> bool: + try: + import _ssl # noqa: F401 # ignore unused + + return True + except ImportError: + pass + + from pip._vendor.urllib3.util import IS_PYOPENSSL + + return IS_PYOPENSSL + + +def get_path_uid(path: str) -> int: + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, "O_NOFOLLOW"): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError(f"{path} is a symlink; Will not return uid for symlinks") + return file_uid + + +# The importlib.resources.open_text function was deprecated in 3.11 with suggested +# replacement we use below. +if sys.version_info < (3, 11): + open_text_resource = importlib.resources.open_text +else: + + def open_text_resource( + package: str, resource: str, encoding: str = "utf-8", errors: str = "strict" + ) -> IO[str]: + return (importlib.resources.files(package) / resource).open( + "r", encoding=encoding, errors=errors + ) + + +if sys.version_info >= (3, 11): + import tomllib +else: + from pip._vendor import tomli as tomllib + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = {"python", "wsgiref", "argparse"} + + +# windows detection, covers cpython and ironpython +WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt") diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py new file mode 100644 index 0000000..6d98171 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/compatibility_tags.py @@ -0,0 +1,201 @@ +"""Generate and work with PEP 425 Compatibility Tags.""" + +from __future__ import annotations + +import re + +from pip._vendor.packaging.tags import ( + PythonVersion, + Tag, + android_platforms, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + ios_platforms, + mac_platforms, +) + +_apple_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") + + +def version_info_to_nodot(version_info: tuple[int, ...]) -> str: + # Only use up to the first two numbers. + return "".join(map(str, version_info[:2])) + + +def _mac_platforms(arch: str) -> list[str]: + match = _apple_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + "{}_{}".format(name, arch[len("macosx_") :]) + for arch in mac_platforms(mac_version, actual_arch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _ios_platforms(arch: str) -> list[str]: + match = _apple_arch_pat.match(arch) + if match: + name, major, minor, actual_multiarch = match.groups() + ios_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "ios", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "ioscustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + "{}_{}".format(name, arch[len("ios_") :]) + for arch in ios_platforms(ios_version, actual_multiarch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _android_platforms(arch: str) -> list[str]: + match = re.fullmatch(r"android_(\d+)_(.+)", arch) + if match: + api_level, abi = match.groups() + return list(android_platforms(int(api_level), abi)) + else: + # arch pattern didn't match (?!) + return [arch] + + +def _custom_manylinux_platforms(arch: str) -> list[str]: + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch_prefix == "manylinux2014": + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {"i686", "x86_64"}: + arches.append("manylinux2010" + arch_sep + arch_suffix) + arches.append("manylinux1" + arch_sep + arch_suffix) + elif arch_prefix == "manylinux2010": + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append("manylinux1" + arch_sep + arch_suffix) + return arches + + +def _get_custom_platforms(arch: str) -> list[str]: + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch.startswith("macosx"): + arches = _mac_platforms(arch) + elif arch.startswith("ios"): + arches = _ios_platforms(arch) + elif arch_prefix == "android": + arches = _android_platforms(arch) + elif arch_prefix in ["manylinux2014", "manylinux2010"]: + arches = _custom_manylinux_platforms(arch) + else: + arches = [arch] + return arches + + +def _expand_allowed_platforms(platforms: list[str] | None) -> list[str] | None: + if not platforms: + return None + + seen = set() + result = [] + + for p in platforms: + if p in seen: + continue + additions = [c for c in _get_custom_platforms(p) if c not in seen] + seen.update(additions) + result.extend(additions) + + return result + + +def _get_python_version(version: str) -> PythonVersion: + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) + + +def _get_custom_interpreter( + implementation: str | None = None, version: str | None = None +) -> str: + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return f"{implementation}{version}" + + +def get_supported( + version: str | None = None, + platforms: list[str] | None = None, + impl: str | None = None, + abis: list[str] | None = None, +) -> list[Tag]: + """Return a list of supported tags for each version specified in + `versions`. + + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. + :param platform: specify a list of platforms you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abis: specify a list of abis you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported: list[Tag] = [] + + python_version: PythonVersion | None = None + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + platforms = _expand_allowed_platforms(platforms) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) + + return supported diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py new file mode 100644 index 0000000..776e498 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/datetime.py @@ -0,0 +1,10 @@ +"""For when pip wants to check the date or time.""" + +import datetime + + +def today_is_later_than(year: int, month: int, day: int) -> bool: + today = datetime.date.today() + given = datetime.date(year, month, day) + + return today > given diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py new file mode 100644 index 0000000..96e7783 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/deprecation.py @@ -0,0 +1,126 @@ +""" +A module that implements tooling to enable easy warnings about deprecations. +""" + +from __future__ import annotations + +import logging +import warnings +from typing import Any, TextIO + +from pip._vendor.packaging.version import parse + +from pip import __version__ as current_version # NOTE: tests patch this name. + +DEPRECATION_MSG_PREFIX = "DEPRECATION: " + + +class PipDeprecationWarning(Warning): + pass + + +_original_showwarning: Any = None + + +# Warnings <-> Logging Integration +def _showwarning( + message: Warning | str, + category: type[Warning], + filename: str, + lineno: int, + file: TextIO | None = None, + line: str | None = None, +) -> None: + if file is not None: + if _original_showwarning is not None: + _original_showwarning(message, category, filename, lineno, file, line) + elif issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip._internal.deprecations") + logger.warning(message) + else: + _original_showwarning(message, category, filename, lineno, file, line) + + +def install_warning_logger() -> None: + # Enable our Deprecation Warnings + warnings.simplefilter("default", PipDeprecationWarning, append=True) + + global _original_showwarning + + if _original_showwarning is None: + _original_showwarning = warnings.showwarning + warnings.showwarning = _showwarning + + +def deprecated( + *, + reason: str, + replacement: str | None, + gone_in: str | None, + feature_flag: str | None = None, + issue: int | None = None, +) -> None: + """Helper to deprecate existing functionality. + + reason: + Textual reason shown to the user about why this functionality has + been deprecated. Should be a complete sentence. + replacement: + Textual suggestion shown to the user about what alternative + functionality they can use. + gone_in: + The version of pip does this functionality should get removed in. + Raises an error if pip's current version is greater than or equal to + this. + feature_flag: + Command-line flag of the form --use-feature={feature_flag} for testing + upcoming functionality. + issue: + Issue number on the tracker that would serve as a useful place for + users to find related discussion and provide feedback. + """ + + # Determine whether or not the feature is already gone in this version. + is_gone = gone_in is not None and parse(current_version) >= parse(gone_in) + + message_parts = [ + (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"), + ( + gone_in, + ( + "pip {} will enforce this behaviour change." + if not is_gone + else "Since pip {}, this is no longer supported." + ), + ), + ( + replacement, + "A possible replacement is {}.", + ), + ( + feature_flag, + ( + "You can use the flag --use-feature={} to test the upcoming behaviour." + if not is_gone + else None + ), + ), + ( + issue, + "Discussion can be found at https://github.com/pypa/pip/issues/{}", + ), + ] + + message = " ".join( + format_str.format(value) + for value, format_str in message_parts + if format_str is not None and value is not None + ) + + # Raise as an error if this behaviour is deprecated. + if is_gone: + raise PipDeprecationWarning(message) + + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py new file mode 100644 index 0000000..3cbc1e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/direct_url_helpers.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + + +def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str: + """Convert a DirectUrl to a pip requirement string.""" + direct_url.validate() # if invalid, this is a pip bug + requirement = name + " @ " + fragments = [] + if isinstance(direct_url.info, VcsInfo): + requirement += ( + f"{direct_url.info.vcs}+{direct_url.url}@{direct_url.info.commit_id}" + ) + elif isinstance(direct_url.info, ArchiveInfo): + requirement += direct_url.url + if direct_url.info.hash: + fragments.append(direct_url.info.hash) + else: + assert isinstance(direct_url.info, DirInfo) + requirement += direct_url.url + if direct_url.subdirectory: + fragments.append("subdirectory=" + direct_url.subdirectory) + if fragments: + requirement += "#" + "&".join(fragments) + return requirement + + +def direct_url_for_editable(source_dir: str) -> DirectUrl: + return DirectUrl( + url=path_to_url(source_dir), + info=DirInfo(editable=True), + ) + + +def direct_url_from_link( + link: Link, source_dir: str | None = None, link_is_in_wheel_cache: bool = False +) -> DirectUrl: + if link.is_vcs: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend + url, requested_revision, _ = vcs_backend.get_url_rev_and_auth( + link.url_without_fragment + ) + # For VCS links, we need to find out and add commit_id. + if link_is_in_wheel_cache: + # If the requested VCS link corresponds to a cached + # wheel, it means the requested revision was an + # immutable commit hash, otherwise it would not have + # been cached. In that case we don't have a source_dir + # with the VCS checkout. + assert requested_revision + commit_id = requested_revision + else: + # If the wheel was not in cache, it means we have + # had to checkout from VCS to build and we have a source_dir + # which we can inspect to find out the commit id. + assert source_dir + commit_id = vcs_backend.get_revision(source_dir) + return DirectUrl( + url=url, + info=VcsInfo( + vcs=vcs_backend.name, + commit_id=commit_id, + requested_revision=requested_revision, + ), + subdirectory=link.subdirectory_fragment, + ) + elif link.is_existing_dir(): + return DirectUrl( + url=link.url_without_fragment, + info=DirInfo(), + subdirectory=link.subdirectory_fragment, + ) + else: + hash = None + hash_name = link.hash_name + if hash_name: + hash = f"{hash_name}={link.hash}" + return DirectUrl( + url=link.url_without_fragment, + info=ArchiveInfo(hash=hash), + subdirectory=link.subdirectory_fragment, + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py new file mode 100644 index 0000000..dc85a58 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/egg_link.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +import os +import re +import sys + +from pip._internal.locations import site_packages, user_site +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) + +__all__ = [ + "egg_link_path_from_sys_path", + "egg_link_path_from_location", +] + + +def _egg_link_names(raw_name: str) -> list[str]: + """ + Convert a Name metadata value to a .egg-link name, by applying + the same substitution as pkg_resources's safe_name function. + Note: we cannot use canonicalize_name because it has a different logic. + + We also look for the raw name (without normalization) as setuptools 69 changed + the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167). + """ + return [ + re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link", + f"{raw_name}.egg-link", + ] + + +def egg_link_path_from_sys_path(raw_name: str) -> str | None: + """ + Look for a .egg-link file for project name, by walking sys.path. + """ + egg_link_names = _egg_link_names(raw_name) + for path_item in sys.path: + for egg_link_name in egg_link_names: + egg_link = os.path.join(path_item, egg_link_name) + if os.path.isfile(egg_link): + return egg_link + return None + + +def egg_link_path_from_location(raw_name: str) -> str | None: + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites: list[str] = [] + if running_under_virtualenv(): + sites.append(site_packages) + if not virtualenv_no_global() and user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + egg_link_names = _egg_link_names(raw_name) + for site in sites: + for egg_link_name in egg_link_names: + egglink = os.path.join(site, egg_link_name) + if os.path.isfile(egglink): + return egglink + return None diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py new file mode 100644 index 0000000..e3a150e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/entrypoints.py @@ -0,0 +1,88 @@ +from __future__ import annotations + +import itertools +import os +import shutil +import sys + +from pip._internal.cli.main import main +from pip._internal.utils.compat import WINDOWS + +_EXECUTABLE_NAMES = [ + "pip", + f"pip{sys.version_info.major}", + f"pip{sys.version_info.major}.{sys.version_info.minor}", +] +if WINDOWS: + _allowed_extensions = {"", ".exe"} + _EXECUTABLE_NAMES = [ + "".join(parts) + for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions) + ] + + +def _wrapper(args: list[str] | None = None) -> int: + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) + + +def get_best_invocation_for_this_pip() -> str: + """Try to figure out the best way to invoke pip in the current environment.""" + binary_directory = "Scripts" if WINDOWS else "bin" + binary_prefix = os.path.join(sys.prefix, binary_directory) + + # Try to use pip[X[.Y]] names, if those executables for this environment are + # the first on PATH with that name. + path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep) + exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts + if exe_are_in_PATH: + for exe_name in _EXECUTABLE_NAMES: + found_executable = shutil.which(exe_name) + binary_executable = os.path.join(binary_prefix, exe_name) + if ( + found_executable + and os.path.exists(binary_executable) + and os.path.samefile( + found_executable, + binary_executable, + ) + ): + return exe_name + + # Use the `-m` invocation, if there's no "nice" invocation. + return f"{get_best_invocation_for_this_python()} -m pip" + + +def get_best_invocation_for_this_python() -> str: + """Try to figure out the best way to invoke the current Python.""" + exe = sys.executable + exe_name = os.path.basename(exe) + + # Try to use the basename, if it's the first executable. + found_executable = shutil.which(exe_name) + # Virtual environments often symlink to their parent Python binaries, but we don't + # want to treat the Python binaries as equivalent when the environment's Python is + # not on PATH (not activated). Thus, we don't follow symlinks. + if found_executable and os.path.samestat(os.lstat(found_executable), os.lstat(exe)): + return exe_name + + # Use the full executable name, because we couldn't find something simpler. + return exe diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py new file mode 100644 index 0000000..e34ffcf --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/filesystem.py @@ -0,0 +1,164 @@ +from __future__ import annotations + +import fnmatch +import os +import os.path +import random +import sys +from collections.abc import Generator +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, BinaryIO, cast + +from pip._internal.utils.compat import get_path_uid +from pip._internal.utils.misc import format_size +from pip._internal.utils.retry import retry + + +def check_path_owner(path: str) -> bool: + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if sys.platform == "win32" or not hasattr(os, "geteuid"): + return True + + assert os.path.isabs(path) + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) + return False # assume we don't own the path + + +@contextmanager +def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]: + """Return a file-like object pointing to a tmp file next to path. + + The file is created securely and is ensured to be written to disk + after the context reaches its end. + + kwargs will be passed to tempfile.NamedTemporaryFile to control + the way the temporary file will be opened. + """ + with NamedTemporaryFile( + delete=False, + dir=os.path.dirname(path), + prefix=os.path.basename(path), + suffix=".tmp", + **kwargs, + ) as f: + result = cast(BinaryIO, f) + try: + yield result + finally: + result.flush() + os.fsync(result.fileno()) + + +replace = retry(stop_after_delay=1, wait=0.25)(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path: str) -> bool: + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == "posix": + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path: str) -> bool: + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = "accesstest_deleteme_fishfingers_custard_" + alphabet = "abcdefghijklmnopqrstuvwxyz0123456789" + for _ in range(10): + name = basename + "".join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + except FileExistsError: + pass + except PermissionError: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + # This could as well be because the parent dir is not readable, + # due to non-privileged user access. + return False + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise OSError("Unexpected condition testing for writable directory") + + +def find_files(path: str, pattern: str) -> list[str]: + """Returns a list of absolute paths of files beneath path, recursively, + with filenames which match the UNIX-style shell glob pattern.""" + result: list[str] = [] + for root, _, files in os.walk(path): + matches = fnmatch.filter(files, pattern) + result.extend(os.path.join(root, f) for f in matches) + return result + + +def file_size(path: str) -> int | float: + # If it's a symlink, return 0. + if os.path.islink(path): + return 0 + return os.path.getsize(path) + + +def format_file_size(path: str) -> str: + return format_size(file_size(path)) + + +def directory_size(path: str) -> int | float: + size = 0.0 + for root, _dirs, files in os.walk(path): + for filename in files: + file_path = os.path.join(root, filename) + size += file_size(file_path) + return size + + +def format_directory_size(path: str) -> str: + return format_size(directory_size(path)) + + +def copy_directory_permissions(directory: str, target_file: BinaryIO) -> None: + mode = ( + os.stat(directory).st_mode & 0o666 # select read/write permissions of directory + | 0o600 # set owner read/write permissions + ) + # Change permissions only if there is no risk of following a symlink. + if os.chmod in os.supports_fd: + os.chmod(target_file.fileno(), mode) + elif os.chmod in os.supports_follow_symlinks: + os.chmod(target_file.name, mode, follow_symlinks=False) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py new file mode 100644 index 0000000..2b8baad --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/filetypes.py @@ -0,0 +1,24 @@ +"""Filetype information.""" + +from pip._internal.utils.misc import splitext + +WHEEL_EXTENSION = ".whl" +BZ2_EXTENSIONS: tuple[str, ...] = (".tar.bz2", ".tbz") +XZ_EXTENSIONS: tuple[str, ...] = ( + ".tar.xz", + ".txz", + ".tlz", + ".tar.lz", + ".tar.lzma", +) +ZIP_EXTENSIONS: tuple[str, ...] = (".zip", WHEEL_EXTENSION) +TAR_EXTENSIONS: tuple[str, ...] = (".tar.gz", ".tgz", ".tar") +ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS + + +def is_archive_file(name: str) -> bool: + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py new file mode 100644 index 0000000..2cb3013 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/glibc.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +import os +import sys + + +def glibc_version_string() -> str | None: + "Returns glibc version string, or None if not using glibc." + return glibc_version_string_confstr() or glibc_version_string_ctypes() + + +def glibc_version_string_confstr() -> str | None: + "Primary implementation of glibc_version_string using os.confstr." + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module: + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None + try: + gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION") + if gnu_libc_version is None: + return None + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": + _, version = gnu_libc_version.split() + except (AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def glibc_version_string_ctypes() -> str | None: + "Fallback implementation of glibc_version_string using ctypes." + + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can't proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# platform.libc_ver regularly returns completely nonsensical glibc +# versions. E.g. on my computer, platform says: +# +# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.7') +# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.9') +# +# But the truth is: +# +# ~$ ldd --version +# ldd (Debian GLIBC 2.22-11) 2.22 +# +# This is unfortunate, because it means that the linehaul data on libc +# versions that was generated by pip 8.1.2 and earlier is useless and +# misleading. Solution: instead of using platform, use our code that actually +# works. +def libc_ver() -> tuple[str, str]: + """Try to determine the glibc version + + Returns a tuple of strings (lib, version) which default to empty strings + in case the lookup fails. + """ + glibc_version = glibc_version_string() + if glibc_version is None: + return ("", "") + else: + return ("glibc", glibc_version) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py new file mode 100644 index 0000000..3d8c125 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/hashes.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +import hashlib +from collections.abc import Iterable +from typing import TYPE_CHECKING, BinaryIO, NoReturn + +from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError +from pip._internal.utils.misc import read_chunks + +if TYPE_CHECKING: + from hashlib import _Hash + + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = "sha256" + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ["sha256", "sha384", "sha512"] + + +class Hashes: + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + + def __init__(self, hashes: dict[str, list[str]] | None = None) -> None: + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + allowed = {} + if hashes is not None: + for alg, keys in hashes.items(): + # Make sure values are always sorted (to ease equality checks) + allowed[alg] = [k.lower() for k in sorted(keys)] + self._allowed = allowed + + def __and__(self, other: Hashes) -> Hashes: + if not isinstance(other, Hashes): + return NotImplemented + + # If either of the Hashes object is entirely empty (i.e. no hash + # specified at all), all hashes from the other object are allowed. + if not other: + return self + if not self: + return other + + # Otherwise only hashes that present in both objects are allowed. + new = {} + for alg, values in other._allowed.items(): + if alg not in self._allowed: + continue + new[alg] = [v for v in values if v in self._allowed[alg]] + return Hashes(new) + + @property + def digest_count(self) -> int: + return sum(len(digests) for digests in self._allowed.values()) + + def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool: + """Return whether the given hex digest is allowed.""" + return hex_digest in self._allowed.get(hash_name, []) + + def check_against_chunks(self, chunks: Iterable[bytes]) -> None: + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in self._allowed.keys(): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError(f"Unknown hash name: {hash_name}") + + for chunk in chunks: + for hash in gots.values(): + hash.update(chunk) + + for hash_name, got in gots.items(): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots: dict[str, _Hash]) -> NoReturn: + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file: BinaryIO) -> None: + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path: str) -> None: + with open(path, "rb") as file: + return self.check_against_file(file) + + def has_one_of(self, hashes: dict[str, str]) -> bool: + """Return whether any of the given hashes are allowed.""" + for hash_name, hex_digest in hashes.items(): + if self.is_hash_allowed(hash_name, hex_digest): + return True + return False + + def __bool__(self) -> bool: + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Hashes): + return NotImplemented + return self._allowed == other._allowed + + def __hash__(self) -> int: + return hash( + ",".join( + sorted( + ":".join((alg, digest)) + for alg, digest_list in self._allowed.items() + for digest in digest_list + ) + ) + ) + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + + def __init__(self) -> None: + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super().__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots: dict[str, _Hash]) -> NoReturn: + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/logging.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/logging.py new file mode 100644 index 0000000..5cdbeb7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/logging.py @@ -0,0 +1,364 @@ +from __future__ import annotations + +import contextlib +import errno +import logging +import logging.handlers +import os +import sys +import threading +from collections.abc import Generator +from dataclasses import dataclass +from io import TextIOWrapper +from logging import Filter +from typing import Any, ClassVar + +from pip._vendor.rich.console import ( + Console, + ConsoleOptions, + ConsoleRenderable, + RenderableType, + RenderResult, + RichCast, +) +from pip._vendor.rich.highlighter import NullHighlighter +from pip._vendor.rich.logging import RichHandler +from pip._vendor.rich.segment import Segment +from pip._vendor.rich.style import Style + +from pip._internal.utils._log import VERBOSE, getLogger +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX +from pip._internal.utils.misc import ensure_dir + +_log_state = threading.local() +_stdout_console = None +_stderr_console = None +subprocess_logger = getLogger("pip.subprocessor") + + +class BrokenStdoutLoggingError(Exception): + """ + Raised if BrokenPipeError occurs for the stdout stream while logging. + """ + + +def _is_broken_pipe_error(exc_class: type[BaseException], exc: BaseException) -> bool: + if exc_class is BrokenPipeError: + return True + + # On Windows, a broken pipe can show up as EINVAL rather than EPIPE: + # https://bugs.python.org/issue19612 + # https://bugs.python.org/issue30418 + if not WINDOWS: + return False + + return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE) + + +@contextlib.contextmanager +def indent_log(num: int = 2) -> Generator[None, None, None]: + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + # For thread-safety + _log_state.indentation = get_indentation() + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation() -> int: + return getattr(_log_state, "indentation", 0) + + +class IndentingFormatter(logging.Formatter): + default_time_format = "%Y-%m-%dT%H:%M:%S" + + def __init__( + self, + *args: Any, + add_timestamp: bool = False, + **kwargs: Any, + ) -> None: + """ + A logging.Formatter that obeys the indent_log() context manager. + + :param add_timestamp: A bool indicating output lines should be prefixed + with their record's timestamp. + """ + self.add_timestamp = add_timestamp + super().__init__(*args, **kwargs) + + def get_message_start(self, formatted: str, levelno: int) -> str: + """ + Return the start of the formatted log message (not counting the + prefix to add to each line). + """ + if levelno < logging.WARNING: + return "" + if formatted.startswith(DEPRECATION_MSG_PREFIX): + # Then the message already has a prefix. We don't want it to + # look like "WARNING: DEPRECATION: ...." + return "" + if levelno < logging.ERROR: + return "WARNING: " + + return "ERROR: " + + def format(self, record: logging.LogRecord) -> str: + """ + Calls the standard formatter, but will indent all of the log message + lines by our current indentation level. + """ + formatted = super().format(record) + message_start = self.get_message_start(formatted, record.levelno) + formatted = message_start + formatted + + prefix = "" + if self.add_timestamp: + prefix = f"{self.formatTime(record)} " + prefix += " " * get_indentation() + formatted = "".join([prefix + line for line in formatted.splitlines(True)]) + return formatted + + +@dataclass +class IndentedRenderable: + renderable: RenderableType + indent: int + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + segments = console.render(self.renderable, options) + lines = Segment.split_lines(segments) + for line in lines: + yield Segment(" " * self.indent) + yield from line + yield Segment("\n") + + +class PipConsole(Console): + def on_broken_pipe(self) -> None: + # Reraise the original exception, rich 13.8.0+ exits by default + # instead, preventing our handler from firing. + raise BrokenPipeError() from None + + +def get_console(*, stderr: bool = False) -> Console: + if stderr: + assert _stderr_console is not None, "stderr rich console is missing!" + return _stderr_console + else: + assert _stdout_console is not None, "stdout rich console is missing!" + return _stdout_console + + +class RichPipStreamHandler(RichHandler): + KEYWORDS: ClassVar[list[str] | None] = [] + + def __init__(self, console: Console) -> None: + super().__init__( + console=console, + show_time=False, + show_level=False, + show_path=False, + highlighter=NullHighlighter(), + ) + + # Our custom override on Rich's logger, to make things work as we need them to. + def emit(self, record: logging.LogRecord) -> None: + style: Style | None = None + + # If we are given a diagnostic error to present, present it with indentation. + if getattr(record, "rich", False): + assert isinstance(record.args, tuple) + (rich_renderable,) = record.args + assert isinstance( + rich_renderable, (ConsoleRenderable, RichCast, str) + ), f"{rich_renderable} is not rich-console-renderable" + + renderable: RenderableType = IndentedRenderable( + rich_renderable, indent=get_indentation() + ) + else: + message = self.format(record) + renderable = self.render_message(record, message) + if record.levelno is not None: + if record.levelno >= logging.ERROR: + style = Style(color="red") + elif record.levelno >= logging.WARNING: + style = Style(color="yellow") + + try: + self.console.print(renderable, overflow="ignore", crop=False, style=style) + except Exception: + self.handleError(record) + + def handleError(self, record: logging.LogRecord) -> None: + """Called when logging is unable to log some output.""" + + exc_class, exc = sys.exc_info()[:2] + # If a broken pipe occurred while calling write() or flush() on the + # stdout stream in logging's Handler.emit(), then raise our special + # exception so we can handle it in main() instead of logging the + # broken pipe error and continuing. + if ( + exc_class + and exc + and self.console.file is sys.stdout + and _is_broken_pipe_error(exc_class, exc) + ): + raise BrokenStdoutLoggingError() + + return super().handleError(record) + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + def _open(self) -> TextIOWrapper: + ensure_dir(os.path.dirname(self.baseFilename)) + return super()._open() + + +class MaxLevelFilter(Filter): + def __init__(self, level: int) -> None: + self.level = level + + def filter(self, record: logging.LogRecord) -> bool: + return record.levelno < self.level + + +class ExcludeLoggerFilter(Filter): + """ + A logging Filter that excludes records from a logger (or its children). + """ + + def filter(self, record: logging.LogRecord) -> bool: + # The base Filter class allows only records from a logger (or its + # children). + return not super().filter(record) + + +def setup_logging(verbosity: int, no_color: bool, user_log_file: str | None) -> int: + """Configures and sets up all of the logging + + Returns the requested logging level, as its integer value. + """ + + # Determine the level to be logging at. + if verbosity >= 2: + level_number = logging.DEBUG + elif verbosity == 1: + level_number = VERBOSE + elif verbosity == -1: + level_number = logging.WARNING + elif verbosity == -2: + level_number = logging.ERROR + elif verbosity <= -3: + level_number = logging.CRITICAL + else: + level_number = logging.INFO + + level = logging.getLevelName(level_number) + + # The "root" logger should match the "console" level *unless* we also need + # to log to a user log file. + include_user_log = user_log_file is not None + if include_user_log: + additional_log_file = user_log_file + root_level = "DEBUG" + else: + additional_log_file = "/dev/null" + root_level = level + + # Disable any logging besides WARNING unless we have DEBUG level logging + # enabled for vendored libraries. + vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" + + # Shorthands for clarity + handler_classes = { + "stream": "pip._internal.utils.logging.RichPipStreamHandler", + "file": "pip._internal.utils.logging.BetterRotatingFileHandler", + } + handlers = ["console", "console_errors", "console_subprocess"] + ( + ["user_log"] if include_user_log else [] + ) + global _stdout_console, stderr_console + _stdout_console = PipConsole(file=sys.stdout, no_color=no_color, soft_wrap=True) + _stderr_console = PipConsole(file=sys.stderr, no_color=no_color, soft_wrap=True) + + logging.config.dictConfig( + { + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip._internal.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + "restrict_to_subprocess": { + "()": "logging.Filter", + "name": subprocess_logger.name, + }, + "exclude_subprocess": { + "()": "pip._internal.utils.logging.ExcludeLoggerFilter", + "name": subprocess_logger.name, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + "indent_with_timestamp": { + "()": IndentingFormatter, + "format": "%(message)s", + "add_timestamp": True, + }, + }, + "handlers": { + "console": { + "level": level, + "class": handler_classes["stream"], + "console": _stdout_console, + "filters": ["exclude_subprocess", "exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": handler_classes["stream"], + "console": _stderr_console, + "filters": ["exclude_subprocess"], + "formatter": "indent", + }, + # A handler responsible for logging to the console messages + # from the "subprocessor" logger. + "console_subprocess": { + "level": level, + "class": handler_classes["stream"], + "console": _stderr_console, + "filters": ["restrict_to_subprocess"], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": handler_classes["file"], + "filename": additional_log_file, + "encoding": "utf-8", + "delay": True, + "formatter": "indent_with_timestamp", + }, + }, + "root": { + "level": root_level, + "handlers": handlers, + }, + "loggers": {"pip._vendor": {"level": vendored_log_level}}, + } + ) + + return level_number diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/misc.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/misc.py new file mode 100644 index 0000000..3a28e84 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/misc.py @@ -0,0 +1,765 @@ +from __future__ import annotations + +import errno +import getpass +import hashlib +import logging +import os +import posixpath +import shutil +import stat +import sys +import sysconfig +import urllib.parse +from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence +from dataclasses import dataclass +from functools import partial +from io import StringIO +from itertools import filterfalse, tee, zip_longest +from pathlib import Path +from types import FunctionType, TracebackType +from typing import ( + Any, + BinaryIO, + Callable, + Optional, + TextIO, + TypeVar, + cast, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.pyproject_hooks import BuildBackendHookCaller + +from pip import __version__ +from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment +from pip._internal.locations import get_major_minor_version +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.retry import retry +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = [ + "rmtree", + "display_path", + "backup_dir", + "ask", + "splitext", + "format_size", + "is_installable_dir", + "normalize_path", + "renames", + "get_prog", + "ensure_dir", + "remove_auth_from_url", + "check_externally_managed", + "ConfiguredBuildBackendHookCaller", +] + +logger = logging.getLogger(__name__) + +T = TypeVar("T") +ExcInfo = tuple[type[BaseException], BaseException, TracebackType] +VersionInfo = tuple[int, int, int] +NetlocTuple = tuple[str, tuple[Optional[str], Optional[str]]] +OnExc = Callable[[FunctionType, Path, BaseException], Any] +OnErr = Callable[[FunctionType, Path, ExcInfo], Any] + +FILE_CHUNK_SIZE = 1024 * 1024 + + +def get_pip_version() -> str: + pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") + pip_pkg_dir = os.path.abspath(pip_pkg_dir) + + return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})" + + +def normalize_version_info(py_version_info: tuple[int, ...]) -> tuple[int, int, int]: + """ + Convert a tuple of ints representing a Python version to one of length + three. + + :param py_version_info: a tuple of ints representing a Python version, + or None to specify no version. The tuple can have any length. + + :return: a tuple of length three if `py_version_info` is non-None. + Otherwise, return `py_version_info` unchanged (i.e. None). + """ + if len(py_version_info) < 3: + py_version_info += (3 - len(py_version_info)) * (0,) + elif len(py_version_info) > 3: + py_version_info = py_version_info[:3] + + return cast("VersionInfo", py_version_info) + + +def ensure_dir(path: str) -> None: + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: + raise + + +def get_prog() -> str: + try: + prog = os.path.basename(sys.argv[0]) + if prog in ("__main__.py", "-c"): + return f"{sys.executable} -m pip" + else: + return prog + except (AttributeError, TypeError, IndexError): + pass + return "pip" + + +# Retry every half second for up to 3 seconds +@retry(stop_after_delay=3, wait=0.5) +def rmtree(dir: str, ignore_errors: bool = False, onexc: OnExc | None = None) -> None: + if ignore_errors: + onexc = _onerror_ignore + if onexc is None: + onexc = _onerror_reraise + handler: OnErr = partial(rmtree_errorhandler, onexc=onexc) + if sys.version_info >= (3, 12): + # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil. + shutil.rmtree(dir, onexc=handler) # type: ignore + else: + shutil.rmtree(dir, onerror=handler) # type: ignore + + +def _onerror_ignore(*_args: Any) -> None: + pass + + +def _onerror_reraise(*_args: Any) -> None: + raise # noqa: PLE0704 - Bare exception used to reraise existing exception + + +def rmtree_errorhandler( + func: FunctionType, + path: Path, + exc_info: ExcInfo | BaseException, + *, + onexc: OnExc = _onerror_reraise, +) -> None: + """ + `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`). + + * If a file is readonly then it's write flag is set and operation is + retried. + + * `onerror` is the original callback from `rmtree(... onerror=onerror)` + that is chained at the end if the "rm -f" still fails. + """ + try: + st_mode = os.stat(path).st_mode + except OSError: + # it's equivalent to os.path.exists + return + + if not st_mode & stat.S_IWRITE: + # convert to read/write + try: + os.chmod(path, st_mode | stat.S_IWRITE) + except OSError: + pass + else: + # use the original function to repeat the operation + try: + func(path) + return + except OSError: + pass + + if not isinstance(exc_info, BaseException): + _, exc_info, _ = exc_info + onexc(func, path, exc_info) + + +def display_path(path: str) -> str: + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if path.startswith(os.getcwd() + os.path.sep): + path = "." + path[len(os.getcwd()) :] + return path + + +def backup_dir(dir: str, ext: str = ".bak") -> str: + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message: str, options: Iterable[str]) -> str: + for action in os.environ.get("PIP_EXISTS_ACTION", "").split(): + if action in options: + return action + return ask(message, options) + + +def _check_no_input(message: str) -> None: + """Raise an error if no input is allowed.""" + if os.environ.get("PIP_NO_INPUT"): + raise Exception( + f"No input was expected ($PIP_NO_INPUT set); question: {message}" + ) + + +def ask(message: str, options: Iterable[str]) -> str: + """Ask the message interactively, with the given possible responses""" + while 1: + _check_no_input(message) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + "Your response ({!r}) was not one of the expected responses: " + "{}".format(response, ", ".join(options)) + ) + else: + return response + + +def ask_input(message: str) -> str: + """Ask for input interactively.""" + _check_no_input(message) + return input(message) + + +def ask_password(message: str) -> str: + """Ask for a password interactively.""" + _check_no_input(message) + return getpass.getpass(message) + + +def strtobool(val: str) -> int: + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif val in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError(f"invalid truth value {val!r}") + + +def format_size(bytes: float) -> str: + if bytes > 1000 * 1000: + return f"{bytes / 1000.0 / 1000:.1f} MB" + elif bytes > 10 * 1000: + return f"{int(bytes / 1000)} kB" + elif bytes > 1000: + return f"{bytes / 1000.0:.1f} kB" + else: + return f"{int(bytes)} bytes" + + +def tabulate(rows: Iterable[Iterable[Any]]) -> tuple[list[str], list[int]]: + """Return a list of formatted rows and a list of column sizes. + + For example:: + + >>> tabulate([['foobar', 2000], [0xdeadbeef]]) + (['foobar 2000', '3735928559'], [10, 4]) + """ + rows = [tuple(map(str, row)) for row in rows] + sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")] + table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] + return table, sizes + + +def is_installable_dir(path: str) -> bool: + """Is path is a directory containing pyproject.toml or setup.py? + + If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for + a legacy setuptools layout by identifying setup.py. We don't check for the + setup.cfg because using it without setup.py is only available for PEP 517 + projects, which are already covered by the pyproject.toml check. + """ + if not os.path.isdir(path): + return False + if os.path.isfile(os.path.join(path, "pyproject.toml")): + return True + if os.path.isfile(os.path.join(path, "setup.py")): + return True + return False + + +def read_chunks( + file: BinaryIO, size: int = FILE_CHUNK_SIZE +) -> Generator[bytes, None, None]: + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def normalize_path(path: str, resolve_symlinks: bool = True) -> str: + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = os.path.expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path: str) -> tuple[str, str]: + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith(".tar"): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old: str, new: str) -> None: + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path: str) -> bool: + """ + Return True if path is within sys.prefix, if we're running in a virtualenv. + + If we're not in a virtualenv, all paths are considered "local." + + Caution: this function assumes the head of path has been normalized + with normalize_path. + """ + if not running_under_virtualenv(): + return True + return path.startswith(normalize_path(sys.prefix)) + + +def write_output(msg: Any, *args: Any) -> None: + logger.info(msg, *args) + + +class StreamWrapper(StringIO): + orig_stream: TextIO + + @classmethod + def from_stream(cls, orig_stream: TextIO) -> StreamWrapper: + ret = cls() + ret.orig_stream = orig_stream + return ret + + # compileall.compile_dir() needs stdout.encoding to print to stdout + # type ignore is because TextIOBase.encoding is writeable + @property + def encoding(self) -> str: # type: ignore + return self.orig_stream.encoding + + +# Simulates an enum +def enum(*sequential: Any, **named: Any) -> type[Any]: + enums = dict(zip(sequential, range(len(sequential))), **named) + reverse = {value: key for key, value in enums.items()} + enums["reverse_mapping"] = reverse + return type("Enum", (), enums) + + +def build_netloc(host: str, port: int | None) -> str: + """ + Build a netloc from a host-port pair + """ + if port is None: + return host + if ":" in host: + # Only wrap host with square brackets when it is IPv6 + host = f"[{host}]" + return f"{host}:{port}" + + +def build_url_from_netloc(netloc: str, scheme: str = "https") -> str: + """ + Build a full URL from a netloc. + """ + if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. + netloc = f"[{netloc}]" + return f"{scheme}://{netloc}" + + +def parse_netloc(netloc: str) -> tuple[str | None, int | None]: + """ + Return the host-port pair from a netloc. + """ + url = build_url_from_netloc(netloc) + parsed = urllib.parse.urlparse(url) + return parsed.hostname, parsed.port + + +def split_auth_from_netloc(netloc: str) -> NetlocTuple: + """ + Parse out and remove the auth information from a netloc. + + Returns: (netloc, (username, password)). + """ + if "@" not in netloc: + return netloc, (None, None) + + # Split from the right because that's how urllib.parse.urlsplit() + # behaves if more than one @ is present (which can be checked using + # the password attribute of urlsplit()'s return value). + auth, netloc = netloc.rsplit("@", 1) + pw: str | None = None + if ":" in auth: + # Split from the left because that's how urllib.parse.urlsplit() + # behaves if more than one : is present (which again can be checked + # using the password attribute of the return value) + user, pw = auth.split(":", 1) + else: + user, pw = auth, None + + user = urllib.parse.unquote(user) + if pw is not None: + pw = urllib.parse.unquote(pw) + + return netloc, (user, pw) + + +def redact_netloc(netloc: str) -> str: + """ + Replace the sensitive data in a netloc with "****", if it exists. + + For example: + - "user:pass@example.com" returns "user:****@example.com" + - "accesstoken@example.com" returns "****@example.com" + """ + netloc, (user, password) = split_auth_from_netloc(netloc) + if user is None: + return netloc + if password is None: + user = "****" + password = "" + else: + user = urllib.parse.quote(user) + password = ":****" + return f"{user}{password}@{netloc}" + + +def _transform_url( + url: str, transform_netloc: Callable[[str], tuple[Any, ...]] +) -> tuple[str, NetlocTuple]: + """Transform and replace netloc in a url. + + transform_netloc is a function taking the netloc and returning a + tuple. The first element of this tuple is the new netloc. The + entire tuple is returned. + + Returns a tuple containing the transformed url as item 0 and the + original tuple returned by transform_netloc as item 1. + """ + purl = urllib.parse.urlsplit(url) + netloc_tuple = transform_netloc(purl.netloc) + # stripped url + url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment) + surl = urllib.parse.urlunsplit(url_pieces) + return surl, cast("NetlocTuple", netloc_tuple) + + +def _get_netloc(netloc: str) -> NetlocTuple: + return split_auth_from_netloc(netloc) + + +def _redact_netloc(netloc: str) -> tuple[str]: + return (redact_netloc(netloc),) + + +def split_auth_netloc_from_url( + url: str, +) -> tuple[str, str, tuple[str | None, str | None]]: + """ + Parse a url into separate netloc, auth, and url with no auth. + + Returns: (url_without_auth, netloc, (username, password)) + """ + url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) + return url_without_auth, netloc, auth + + +def remove_auth_from_url(url: str) -> str: + """Return a copy of url with 'username:password@' removed.""" + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + return _transform_url(url, _get_netloc)[0] + + +def redact_auth_from_url(url: str) -> str: + """Replace the password in a given url with ****.""" + return _transform_url(url, _redact_netloc)[0] + + +def redact_auth_from_requirement(req: Requirement) -> str: + """Replace the password in a given requirement url with ****.""" + if not req.url: + return str(req) + return str(req).replace(req.url, redact_auth_from_url(req.url)) + + +@dataclass(frozen=True) +class HiddenText: + secret: str + redacted: str + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return self.redacted + + # This is useful for testing. + def __eq__(self, other: Any) -> bool: + if type(self) is not type(other): + return False + + # The string being used for redaction doesn't also have to match, + # just the raw, original string. + return self.secret == other.secret + + +def hide_value(value: str) -> HiddenText: + return HiddenText(value, redacted="****") + + +def hide_url(url: str) -> HiddenText: + redacted = redact_auth_from_url(url) + return HiddenText(url, redacted=redacted) + + +def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None: + """Protection of pip.exe from modification on Windows + + On Windows, any operation modifying pip should be run as: + python -m pip ... + """ + pip_names = [ + "pip", + f"pip{sys.version_info.major}", + f"pip{sys.version_info.major}.{sys.version_info.minor}", + ] + + # See https://github.com/pypa/pip/issues/1299 for more discussion + should_show_use_python_msg = ( + modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names + ) + + if should_show_use_python_msg: + new_command = [sys.executable, "-m", "pip"] + sys.argv[1:] + raise CommandError( + "To modify pip, please run the following command:\n{}".format( + " ".join(new_command) + ) + ) + + +def check_externally_managed() -> None: + """Check whether the current environment is externally managed. + + If the ``EXTERNALLY-MANAGED`` config file is found, the current environment + is considered externally managed, and an ExternallyManagedEnvironment is + raised. + """ + if running_under_virtualenv(): + return + marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED") + if not os.path.isfile(marker): + return + raise ExternallyManagedEnvironment.from_config(marker) + + +def is_console_interactive() -> bool: + """Is this console interactive?""" + return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path: str, blocksize: int = 1 << 20) -> tuple[Any, int]: + """Return (hash, length) for path using hashlib.sha256()""" + + h = hashlib.sha256() + length = 0 + with open(path, "rb") as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def pairwise(iterable: Iterable[Any]) -> Iterator[tuple[Any, Any]]: + """ + Return paired elements. + + For example: + s -> (s0, s1), (s2, s3), (s4, s5), ... + """ + iterable = iter(iterable) + return zip_longest(iterable, iterable) + + +def partition( + pred: Callable[[T], bool], iterable: Iterable[T] +) -> tuple[Iterable[T], Iterable[T]]: + """ + Use a predicate to partition entries into false entries and true entries, + like + + partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 + """ + t1, t2 = tee(iterable) + return filterfalse(pred, t1), filter(pred, t2) + + +class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller): + def __init__( + self, + config_holder: Any, + source_dir: str, + build_backend: str, + backend_path: str | None = None, + runner: Callable[..., None] | None = None, + python_executable: str | None = None, + ): + super().__init__( + source_dir, build_backend, backend_path, runner, python_executable + ) + self.config_holder = config_holder + + def build_wheel( + self, + wheel_directory: str, + config_settings: Mapping[str, Any] | None = None, + metadata_directory: str | None = None, + ) -> str: + cs = self.config_holder.config_settings + return super().build_wheel( + wheel_directory, config_settings=cs, metadata_directory=metadata_directory + ) + + def build_sdist( + self, + sdist_directory: str, + config_settings: Mapping[str, Any] | None = None, + ) -> str: + cs = self.config_holder.config_settings + return super().build_sdist(sdist_directory, config_settings=cs) + + def build_editable( + self, + wheel_directory: str, + config_settings: Mapping[str, Any] | None = None, + metadata_directory: str | None = None, + ) -> str: + cs = self.config_holder.config_settings + return super().build_editable( + wheel_directory, config_settings=cs, metadata_directory=metadata_directory + ) + + def get_requires_for_build_wheel( + self, config_settings: Mapping[str, Any] | None = None + ) -> Sequence[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_wheel(config_settings=cs) + + def get_requires_for_build_sdist( + self, config_settings: Mapping[str, Any] | None = None + ) -> Sequence[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_sdist(config_settings=cs) + + def get_requires_for_build_editable( + self, config_settings: Mapping[str, Any] | None = None + ) -> Sequence[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_editable(config_settings=cs) + + def prepare_metadata_for_build_wheel( + self, + metadata_directory: str, + config_settings: Mapping[str, Any] | None = None, + _allow_fallback: bool = True, + ) -> str: + cs = self.config_holder.config_settings + return super().prepare_metadata_for_build_wheel( + metadata_directory=metadata_directory, + config_settings=cs, + _allow_fallback=_allow_fallback, + ) + + def prepare_metadata_for_build_editable( + self, + metadata_directory: str, + config_settings: Mapping[str, Any] | None = None, + _allow_fallback: bool = True, + ) -> str | None: + cs = self.config_holder.config_settings + return super().prepare_metadata_for_build_editable( + metadata_directory=metadata_directory, + config_settings=cs, + _allow_fallback=_allow_fallback, + ) + + +def warn_if_run_as_root() -> None: + """Output a warning for sudo users on Unix. + + In a virtual environment, sudo pip still writes to virtualenv. + On Windows, users may run pip as Administrator without issues. + This warning only applies to Unix root users outside of virtualenv. + """ + if running_under_virtualenv(): + return + if not hasattr(os, "getuid"): + return + # On Windows, there are no "system managed" Python packages. Installing as + # Administrator via pip is the correct way of updating system environments. + # + # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform + # checks: https://mypy.readthedocs.io/en/stable/common_issues.html + if sys.platform == "win32" or sys.platform == "cygwin": + return + + if os.getuid() != 0: + return + + logger.warning( + "Running pip as the 'root' user can result in broken permissions and " + "conflicting behaviour with the system package manager, possibly " + "rendering your system unusable. " + "It is recommended to use a virtual environment instead: " + "https://pip.pypa.io/warnings/venv. " + "Use the --root-user-action option if you know what you are doing and " + "want to suppress this warning." + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py new file mode 100644 index 0000000..3cbc049 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/packaging.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +import functools +import logging + +from pip._vendor.packaging import specifiers, version +from pip._vendor.packaging.requirements import Requirement + +logger = logging.getLogger(__name__) + + +@functools.lru_cache(maxsize=32) +def check_requires_python( + requires_python: str | None, version_info: tuple[int, ...] +) -> bool: + """ + Check if the given Python version matches a "Requires-Python" specifier. + + :param version_info: A 3-tuple of ints representing a Python + major-minor-micro version to check (e.g. `sys.version_info[:3]`). + + :return: `True` if the given Python version satisfies the requirement. + Otherwise, return `False`. + + :raises InvalidSpecifier: If `requires_python` has an invalid format. + """ + if requires_python is None: + # The package provides no information + return True + requires_python_specifier = specifiers.SpecifierSet(requires_python) + + python_version = version.parse(".".join(map(str, version_info))) + return python_version in requires_python_specifier + + +@functools.lru_cache(maxsize=10000) +def get_requirement(req_string: str) -> Requirement: + """Construct a packaging.Requirement object with caching""" + # Parsing requirement strings is expensive, and is also expected to happen + # with a low diversity of different arguments (at least relative the number + # constructed). This method adds a cache to requirement object creation to + # minimize repeated parsing of the same string to construct equivalent + # Requirement objects. + return Requirement(req_string) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/retry.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/retry.py new file mode 100644 index 0000000..27d3b6e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/retry.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import functools +from time import perf_counter, sleep +from typing import TYPE_CHECKING, Callable, TypeVar + +if TYPE_CHECKING: + from typing_extensions import ParamSpec + + T = TypeVar("T") + P = ParamSpec("P") + + +def retry( + wait: float, stop_after_delay: float +) -> Callable[[Callable[P, T]], Callable[P, T]]: + """Decorator to automatically retry a function on error. + + If the function raises, the function is recalled with the same arguments + until it returns or the time limit is reached. When the time limit is + surpassed, the last exception raised is reraised. + + :param wait: The time to wait after an error before retrying, in seconds. + :param stop_after_delay: The time limit after which retries will cease, + in seconds. + """ + + def wrapper(func: Callable[P, T]) -> Callable[P, T]: + + @functools.wraps(func) + def retry_wrapped(*args: P.args, **kwargs: P.kwargs) -> T: + # The performance counter is monotonic on all platforms we care + # about and has much better resolution than time.monotonic(). + start_time = perf_counter() + while True: + try: + return func(*args, **kwargs) + except Exception: + if perf_counter() - start_time > stop_after_delay: + raise + sleep(wait) + + return retry_wrapped + + return wrapper diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py new file mode 100644 index 0000000..3e7b83f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/subprocess.py @@ -0,0 +1,248 @@ +from __future__ import annotations + +import logging +import os +import shlex +import subprocess +from collections.abc import Iterable, Mapping +from typing import Any, Callable, Literal, Union + +from pip._vendor.rich.markup import escape + +from pip._internal.cli.spinners import SpinnerInterface, open_spinner +from pip._internal.exceptions import InstallationSubprocessError +from pip._internal.utils.logging import VERBOSE, subprocess_logger +from pip._internal.utils.misc import HiddenText + +CommandArgs = list[Union[str, HiddenText]] + + +def make_command(*args: str | HiddenText | CommandArgs) -> CommandArgs: + """ + Create a CommandArgs object. + """ + command_args: CommandArgs = [] + for arg in args: + # Check for list instead of CommandArgs since CommandArgs is + # only known during type-checking. + if isinstance(arg, list): + command_args.extend(arg) + else: + # Otherwise, arg is str or HiddenText. + command_args.append(arg) + + return command_args + + +def format_command_args(args: list[str] | CommandArgs) -> str: + """ + Format command arguments for display. + """ + # For HiddenText arguments, display the redacted form by calling str(). + # Also, we don't apply str() to arguments that aren't HiddenText since + # this can trigger a UnicodeDecodeError in Python 2 if the argument + # has type unicode and includes a non-ascii character. (The type + # checker doesn't ensure the annotations are correct in all cases.) + return " ".join( + shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg) + for arg in args + ) + + +def reveal_command_args(args: list[str] | CommandArgs) -> list[str]: + """ + Return the arguments in their raw, unredacted form. + """ + return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args] + + +def call_subprocess( + cmd: list[str] | CommandArgs, + show_stdout: bool = False, + cwd: str | None = None, + on_returncode: Literal["raise", "warn", "ignore"] = "raise", + extra_ok_returncodes: Iterable[int] | None = None, + extra_environ: Mapping[str, Any] | None = None, + unset_environ: Iterable[str] | None = None, + spinner: SpinnerInterface | None = None, + log_failed_cmd: bool | None = True, + stdout_only: bool | None = False, + *, + command_desc: str, +) -> str: + """ + Args: + show_stdout: if true, use INFO to log the subprocess's stderr and + stdout streams. Otherwise, use DEBUG. Defaults to False. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + log_failed_cmd: if false, failed commands are not logged, only raised. + stdout_only: if true, return only stdout, else return both. When true, + logging of both stdout and stderr occurs when the subprocess has + terminated, else logging occurs as subprocess output is produced. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + if unset_environ is None: + unset_environ = [] + # Most places in pip use show_stdout=False. What this means is-- + # + # - We connect the child's output (combined stderr and stdout) to a + # single pipe, which we read. + # - We log this output to stderr at DEBUG level as it is received. + # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't + # requested), then we show a spinner so the user can still see the + # subprocess is in progress. + # - If the subprocess exits with an error, we log the output to stderr + # at ERROR level if it hasn't already been displayed to the console + # (e.g. if --verbose logging wasn't enabled). This way we don't log + # the output to the console twice. + # + # If show_stdout=True, then the above is still done, but with DEBUG + # replaced by INFO. + if show_stdout: + # Then log the subprocess output at INFO level. + log_subprocess: Callable[..., None] = subprocess_logger.info + used_level = logging.INFO + else: + # Then log the subprocess output using VERBOSE. This also ensures + # it will be logged to the log file (aka user_log), if enabled. + log_subprocess = subprocess_logger.verbose + used_level = VERBOSE + + # Whether the subprocess will be visible in the console. + showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level + + # Only use the spinner if we're not showing the subprocess output + # and we have a spinner. + use_spinner = not showing_subprocess and spinner is not None + + log_subprocess("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE, + cwd=cwd, + env=env, + errors="backslashreplace", + ) + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", + exc, + command_desc, + ) + raise + all_output = [] + if not stdout_only: + assert proc.stdout + assert proc.stdin + proc.stdin.close() + # In this mode, stdout and stderr are in the same pipe. + while True: + line: str = proc.stdout.readline() + if not line: + break + line = line.rstrip() + all_output.append(line + "\n") + + # Show the line immediately. + log_subprocess(line) + # Update the spinner. + if use_spinner: + assert spinner + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + output = "".join(all_output) + else: + # In this mode, stdout and stderr are in different pipes. + # We must use communicate() which is the only safe way to read both. + out, err = proc.communicate() + # log line by line to preserve pip log indenting + for out_line in out.splitlines(): + log_subprocess(out_line) + all_output.append(out) + for err_line in err.splitlines(): + log_subprocess(err_line) + all_output.append(err) + output = out + + proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes + if use_spinner: + assert spinner + if proc_had_error: + spinner.finish("error") + else: + spinner.finish("done") + if proc_had_error: + if on_returncode == "raise": + error = InstallationSubprocessError( + command_description=command_desc, + exit_code=proc.returncode, + output_lines=all_output if not showing_subprocess else None, + ) + if log_failed_cmd: + subprocess_logger.error("%s", error, extra={"rich": True}) + subprocess_logger.verbose( + "[bold magenta]full command[/]: [blue]%s[/]", + escape(format_command_args(cmd)), + extra={"markup": True}, + ) + subprocess_logger.verbose( + "[bold magenta]cwd[/]: %s", + escape(cwd or "[inherit]"), + extra={"markup": True}, + ) + + raise error + elif on_returncode == "warn": + subprocess_logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, + proc.returncode, + cwd, + ) + elif on_returncode == "ignore": + pass + else: + raise ValueError(f"Invalid value: on_returncode={on_returncode!r}") + return output + + +def runner_with_spinner_message(message: str) -> Callable[..., None]: + """Provide a subprocess_runner that shows a spinner message. + + Intended for use with for BuildBackendHookCaller. Thus, the runner has + an API that matches what's expected by BuildBackendHookCaller.subprocess_runner. + """ + + def runner( + cmd: list[str], + cwd: str | None = None, + extra_environ: Mapping[str, Any] | None = None, + ) -> None: + with open_spinner(message) as spinner: + call_subprocess( + cmd, + command_desc=message, + cwd=cwd, + extra_environ=extra_environ, + spinner=spinner, + ) + + return runner diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py new file mode 100644 index 0000000..a9afa76 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/temp_dir.py @@ -0,0 +1,294 @@ +from __future__ import annotations + +import errno +import itertools +import logging +import os.path +import tempfile +import traceback +from collections.abc import Generator +from contextlib import ExitStack, contextmanager +from pathlib import Path +from typing import ( + Any, + Callable, + TypeVar, +) + +from pip._internal.utils.misc import enum, rmtree + +logger = logging.getLogger(__name__) + +_T = TypeVar("_T", bound="TempDirectory") + + +# Kinds of temporary directories. Only needed for ones that are +# globally-managed. +tempdir_kinds = enum( + BUILD_ENV="build-env", + EPHEM_WHEEL_CACHE="ephem-wheel-cache", + REQ_BUILD="req-build", +) + + +_tempdir_manager: ExitStack | None = None + + +@contextmanager +def global_tempdir_manager() -> Generator[None, None, None]: + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry: + """Manages temp directory behavior""" + + def __init__(self) -> None: + self._should_delete: dict[str, bool] = {} + + def set_delete(self, kind: str, value: bool) -> None: + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind: str) -> bool: + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry: TempDirectoryTypeRegistry | None = None + + +@contextmanager +def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]: + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + +class _Default: + pass + + +_default = _Default() + + +class TempDirectory: + """Helper class that owns and cleans up a temporary directory. + + This class can be used as a context manager or as an OO representation of a + temporary directory. + + Attributes: + path + Location to the created temporary directory + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + Methods: + cleanup() + Deletes the temporary directory + + When used as a context manager, if the delete attribute is True, on + exiting the context the temporary directory is deleted. + """ + + def __init__( + self, + path: str | None = None, + delete: bool | None | _Default = _default, + kind: str = "temp", + globally_managed: bool = False, + ignore_cleanup_errors: bool = True, + ): + super().__init__() + + if delete is _default: + if path is not None: + # If we were given an explicit directory, resolve delete option + # now. + delete = False + else: + # Otherwise, we wait until cleanup and see what + # tempdir_registry says. + delete = None + + # The only time we specify path is in for editables where it + # is the value of the --src option. + if path is None: + path = self._create(kind) + + self._path = path + self._deleted = False + self.delete = delete + self.kind = kind + self.ignore_cleanup_errors = ignore_cleanup_errors + + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + @property + def path(self) -> str: + assert not self._deleted, f"Attempted to access deleted path: {self._path}" + return self._path + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.path!r}>" + + def __enter__(self: _T) -> _T: + return self + + def __exit__(self, exc: Any, value: Any, tb: Any) -> None: + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: + self.cleanup() + + def _create(self, kind: str) -> str: + """Create a temporary directory and store its path in self.path""" + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + logger.debug("Created temporary directory: %s", path) + return path + + def cleanup(self) -> None: + """Remove the temporary directory created and reset state""" + self._deleted = True + if not os.path.exists(self._path): + return + + errors: list[BaseException] = [] + + def onerror( + func: Callable[..., Any], + path: Path, + exc_val: BaseException, + ) -> None: + """Log a warning for a `rmtree` error and continue""" + formatted_exc = "\n".join( + traceback.format_exception_only(type(exc_val), exc_val) + ) + formatted_exc = formatted_exc.rstrip() # remove trailing new line + if func in (os.unlink, os.remove, os.rmdir): + logger.debug( + "Failed to remove a temporary file '%s' due to %s.\n", + path, + formatted_exc, + ) + else: + logger.debug("%s failed with %s.", func.__qualname__, formatted_exc) + errors.append(exc_val) + + if self.ignore_cleanup_errors: + try: + # first try with @retry; retrying to handle ephemeral errors + rmtree(self._path, ignore_errors=False) + except OSError: + # last pass ignore/log all errors + rmtree(self._path, onexc=onerror) + if errors: + logger.warning( + "Failed to remove contents in a temporary directory '%s'.\n" + "You can safely remove it manually.", + self._path, + ) + else: + rmtree(self._path) + + +class AdjacentTempDirectory(TempDirectory): + """Helper class that creates a temporary directory adjacent to a real one. + + Attributes: + original + The original directory to create a temp directory for. + path + After calling create() or entering, contains the full + path to the temporary directory. + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + """ + + # The characters that may be used to name the temp directory + # We always prepend a ~ and then rotate through these until + # a usable name is found. + # pkg_resources raises a different error for .dist-info folder + # with leading '-' and invalid metadata + LEADING_CHARS = "-~.=%0123456789" + + def __init__(self, original: str, delete: bool | None = None) -> None: + self.original = original.rstrip("/\\") + super().__init__(delete=delete) + + @classmethod + def _generate_names(cls, name: str) -> Generator[str, None, None]: + """Generates a series of temporary names. + + The algorithm replaces the leading characters in the name + with ones that are valid filesystem characters, but are not + valid package names (for both Python and pip definitions of + package). + """ + for i in range(1, len(name)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i - 1 + ): + new_name = "~" + "".join(candidate) + name[i:] + if new_name != name: + yield new_name + + # If we make it this far, we will have to make a longer name + for i in range(len(cls.LEADING_CHARS)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i + ): + new_name = "~" + "".join(candidate) + name + if new_name != name: + yield new_name + + def _create(self, kind: str) -> str: + root, name = os.path.split(self.original) + for candidate in self._generate_names(name): + path = os.path.join(root, candidate) + try: + os.mkdir(path) + except OSError as ex: + # Continue if the name exists already + if ex.errno != errno.EEXIST: + raise + else: + path = os.path.realpath(path) + break + else: + # Final fallback on the default behavior. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + + logger.debug("Created temporary directory: %s", path) + return path diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py new file mode 100644 index 0000000..bc950ac --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/unpacking.py @@ -0,0 +1,362 @@ +"""Utilities related archives.""" + +from __future__ import annotations + +import logging +import os +import shutil +import stat +import sys +import tarfile +import zipfile +from collections.abc import Iterable +from zipfile import ZipInfo + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.filetypes import ( + BZ2_EXTENSIONS, + TAR_EXTENSIONS, + XZ_EXTENSIONS, + ZIP_EXTENSIONS, +) +from pip._internal.utils.misc import ensure_dir + +logger = logging.getLogger(__name__) + + +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + +try: + import bz2 # noqa + + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug("bz2 module is not available") + +try: + # Only for Python 3.3+ + import lzma # noqa + + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug("lzma module is not available") + + +def current_umask() -> int: + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def split_leading_dir(path: str) -> list[str]: + path = path.lstrip("/").lstrip("\\") + if "/" in path and ( + ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path + ): + return path.split("/", 1) + elif "\\" in path: + return path.split("\\", 1) + else: + return [path, ""] + + +def has_leading_dir(paths: Iterable[str]) -> bool: + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def is_within_directory(directory: str, target: str) -> bool: + """ + Return true if the absolute path of target is within the directory + """ + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + return prefix == abs_directory + + +def _get_default_mode_plus_executable() -> int: + return 0o777 & ~current_umask() | 0o111 + + +def set_extracted_file_to_default_mode_plus_executable(path: str) -> None: + """ + Make file present at path have execute for user/group/world + (chmod +x) is no-op on windows per python docs + """ + os.chmod(path, _get_default_mode_plus_executable()) + + +def zip_item_is_executable(info: ZipInfo) -> bool: + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + return bool(mode and stat.S_ISREG(mode) and mode & 0o111) + + +def unzip_file(filename: str, location: str, flatten: bool = True) -> None: + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, "rb") + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not is_within_directory(location, fn): + message = ( + "The zip file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, fn, location)) + if fn.endswith(("/", "\\")): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + # Don't use read() to avoid allocating an arbitrarily large + # chunk of memory for the file's content + fp = zip.open(name) + try: + with open(fn, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + finally: + fp.close() + if zip_item_is_executable(info): + set_extracted_file_to_default_mode_plus_executable(fn) + finally: + zipfp.close() + + +def untar_file(filename: str, location: str) -> None: + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied on top of the + default. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"): + mode = "r:gz" + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = "r:bz2" + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = "r:xz" + elif filename.lower().endswith(".tar"): + mode = "r" + else: + logger.warning( + "Cannot determine compression type for file %s", + filename, + ) + mode = "r:*" + + tar = tarfile.open(filename, mode, encoding="utf-8") # type: ignore + try: + leading = has_leading_dir([member.name for member in tar.getmembers()]) + + # PEP 706 added `tarfile.data_filter`, and made some other changes to + # Python's tarfile module (see below). The features were backported to + # security releases. + try: + data_filter = tarfile.data_filter + except AttributeError: + _untar_without_filter(filename, location, tar, leading) + else: + default_mode_plus_executable = _get_default_mode_plus_executable() + + if leading: + # Strip the leading directory from all files in the archive, + # including hardlink targets (which are relative to the + # unpack location). + for member in tar.getmembers(): + name_lead, name_rest = split_leading_dir(member.name) + member.name = name_rest + if member.islnk(): + lnk_lead, lnk_rest = split_leading_dir(member.linkname) + if lnk_lead == name_lead: + member.linkname = lnk_rest + + def pip_filter(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo: + orig_mode = member.mode + try: + try: + member = data_filter(member, location) + except tarfile.LinkOutsideDestinationError: + if sys.version_info[:3] in { + (3, 9, 17), + (3, 10, 12), + (3, 11, 4), + }: + # The tarfile filter in specific Python versions + # raises LinkOutsideDestinationError on valid input + # (https://github.com/python/cpython/issues/107845) + # Ignore the error there, but do use the + # more lax `tar_filter` + member = tarfile.tar_filter(member, location) + else: + raise + except tarfile.TarError as exc: + message = "Invalid member in the tar file {}: {}" + # Filter error messages mention the member name. + # No need to add it here. + raise InstallationError( + message.format( + filename, + exc, + ) + ) + if member.isfile() and orig_mode & 0o111: + member.mode = default_mode_plus_executable + else: + # See PEP 706 note above. + # The PEP changed this from `int` to `Optional[int]`, + # where None means "use the default". Mypy doesn't + # know this yet. + member.mode = None # type: ignore [assignment] + return member + + tar.extractall(location, filter=pip_filter) + + finally: + tar.close() + + +def is_symlink_target_in_tar(tar: tarfile.TarFile, tarinfo: tarfile.TarInfo) -> bool: + """Check if the file pointed to by the symbolic link is in the tar archive""" + linkname = os.path.join(os.path.dirname(tarinfo.name), tarinfo.linkname) + + linkname = os.path.normpath(linkname) + linkname = linkname.replace("\\", "/") + + try: + tar.getmember(linkname) + return True + except KeyError: + return False + + +def _untar_without_filter( + filename: str, + location: str, + tar: tarfile.TarFile, + leading: bool, +) -> None: + """Fallback for Python without tarfile.data_filter""" + # NOTE: This function can be removed once pip requires CPython ≥ 3.12.​ + # PEP 706 added tarfile.data_filter, made tarfile extraction operations more secure. + # This feature is fully supported from CPython 3.12 onward. + for member in tar.getmembers(): + fn = member.name + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if not is_within_directory(location, path): + message = ( + "The tar file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, path, location)) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + if not is_symlink_target_in_tar(tar, member): + message = ( + "The tar file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError( + message.format(filename, member.name, member.linkname) + ) + try: + tar._extract_member(member, path) + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + ensure_dir(os.path.dirname(path)) + assert fp is not None + with open(path, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + tar.utime(member, path) + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + set_extracted_file_to_default_mode_plus_executable(path) + + +def unpack_file( + filename: str, + location: str, + content_type: str | None = None, +) -> None: + filename = os.path.realpath(filename) + if ( + content_type == "application/zip" + or filename.lower().endswith(ZIP_EXTENSIONS) + or zipfile.is_zipfile(filename) + ): + unzip_file(filename, location, flatten=not filename.endswith(".whl")) + elif ( + content_type == "application/x-gzip" + or tarfile.is_tarfile(filename) + or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS) + ): + untar_file(filename, location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + "Cannot unpack file %s (downloaded from %s, content-type: %s); " + "cannot detect archive format", + filename, + location, + content_type, + ) + raise InstallationError(f"Cannot determine archive format of {location}") diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/urls.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/urls.py new file mode 100644 index 0000000..e951a5e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/urls.py @@ -0,0 +1,55 @@ +import os +import string +import urllib.parse +import urllib.request + +from .compat import WINDOWS + + +def path_to_url(path: str) -> str: + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib.parse.urljoin("file://", urllib.request.pathname2url(path)) + return url + + +def url_to_path(url: str) -> str: + """ + Convert a file: URL to a path. + """ + assert url.startswith( + "file:" + ), f"You can only turn file: urls into filenames (not {url!r})" + + _, netloc, path, _, _ = urllib.parse.urlsplit(url) + + if not netloc or netloc == "localhost": + # According to RFC 8089, same as empty authority. + netloc = "" + elif WINDOWS: + # If we have a UNC path, prepend UNC share notation. + netloc = "\\\\" + netloc + else: + raise ValueError( + f"non-local file URIs are not supported on this platform: {url!r}" + ) + + path = urllib.request.url2pathname(netloc + path) + + # On Windows, urlsplit parses the path as something like "/C:/Users/foo". + # This creates issues for path-related functions like io.open(), so we try + # to detect and strip the leading slash. + if ( + WINDOWS + and not netloc # Not UNC. + and len(path) >= 3 + and path[0] == "/" # Leading slash to strip. + and path[1] in string.ascii_letters # Drive letter. + and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path. + ): + path = path[1:] + + return path diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py new file mode 100644 index 0000000..b1742a3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/virtualenv.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +import logging +import os +import re +import site +import sys + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?Ptrue|false)" +) + + +def _running_under_venv() -> bool: + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_legacy_virtualenv() -> bool: + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, "real_prefix") + + +def running_under_virtualenv() -> bool: + """True if we're running inside a virtual environment, False otherwise.""" + return _running_under_venv() or _running_under_legacy_virtualenv() + + +def _get_pyvenv_cfg_lines() -> list[str] | None: + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. + """ + pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg") + try: + # Although PEP 405 does not specify, the built-in venv module always + # writes with UTF-8. (pypa/pip#8717) + with open(pyvenv_cfg_file, encoding="utf-8") as f: + return f.read().splitlines() # avoids trailing newlines + except OSError: + return None + + +def _no_global_under_venv() -> bool: + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) + return True + + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group("value") == "false": + return True + return False + + +def _no_global_under_legacy_virtualenv() -> bool: + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. + """ + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_site_packages_file = os.path.join( + site_mod_dir, + "no-global-site-packages.txt", + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global() -> bool: + """Returns a boolean, whether running in venv with no system site-packages.""" + # PEP 405 compliance needs to be checked first since virtualenv >=20 would + # return True for both checks, but is only able to use the PEP 405 config. + if _running_under_venv(): + return _no_global_under_venv() + + if _running_under_legacy_virtualenv(): + return _no_global_under_legacy_virtualenv() + + return False diff --git a/venv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py b/venv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py new file mode 100644 index 0000000..789e736 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/utils/wheel.py @@ -0,0 +1,132 @@ +"""Support functions for working with wheel files.""" + +import logging +from email.message import Message +from email.parser import Parser +from zipfile import BadZipFile, ZipFile + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import UnsupportedWheel + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +def parse_wheel(wheel_zip: ZipFile, name: str) -> tuple[str, Message]: + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source: ZipFile, name: str) -> str: + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = {p.split("/", 1)[0] for p in source.namelist()} + + info_dirs = [s for s in subdirs if s.endswith(".dist-info")] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format(", ".join(info_dirs)) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + f".dist-info directory {info_dir!r} does not start with {canonical_name!r}" + ) + + return info_dir + + +def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes: + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel(f"could not read {path!r} file: {e!r}") + + +def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message: + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = f"{dist_info_dir}/WHEEL" + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = wheel_contents.decode() + except UnicodeDecodeError as e: + raise UnsupportedWheel(f"error decoding {path!r}: {e!r}") + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data: Message) -> tuple[int, ...]: + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split("."))) + except ValueError: + raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}") + + +def check_compatibility(version: tuple[int, ...], name: str) -> None: + """Raises errors or warns if called with an incompatible Wheel-Version. + + pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "{}'s Wheel-Version ({}) is not compatible with this version " + "of pip".format(name, ".".join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + "Installing from a newer Wheel-Version (%s)", + ".".join(map(str, version)), + ) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py new file mode 100644 index 0000000..b6beddb --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__init__.py @@ -0,0 +1,15 @@ +# Expose a limited set of classes and functions so callers outside of +# the vcs package don't need to import deeper than `pip._internal.vcs`. +# (The test directory may still need to import from a vcs sub-package.) +# Import all vcs modules to register each VCS in the VcsSupport object. +import pip._internal.vcs.bazaar +import pip._internal.vcs.git +import pip._internal.vcs.mercurial +import pip._internal.vcs.subversion # noqa: F401 +from pip._internal.vcs.versioncontrol import ( # noqa: F401 + RemoteNotFoundError, + RemoteNotValidError, + is_url, + make_vcs_requirement_url, + vcs, +) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..65bfa90 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc new file mode 100644 index 0000000..ef0695e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc new file mode 100644 index 0000000..625e398 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/git.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc new file mode 100644 index 0000000..b50865e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc new file mode 100644 index 0000000..b29cb1e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc new file mode 100644 index 0000000..0bf3a56 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py b/venv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py new file mode 100644 index 0000000..3a8a21e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/vcs/bazaar.py @@ -0,0 +1,130 @@ +from __future__ import annotations + +import logging + +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = "bzr" + dirname = ".bzr" + repo_name = "branch" + schemes = ( + "bzr+http", + "bzr+https", + "bzr+ssh", + "bzr+sftp", + "bzr+ftp", + "bzr+lp", + "bzr+file", + ) + + @staticmethod + def get_base_rev_args(rev: str) -> list[str]: + return ["-r", rev] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Checking out %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flags = ["--quiet"] + elif verbosity == 1: + flags = [] + else: + flags = [f"-{'v'*verbosity}"] + cmd_args = make_command( + "checkout", "--lightweight", *flags, rev_options.to_args(), url, dest + ) + self.run_command(cmd_args) + + def switch( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + self.run_command(make_command("switch", url), cwd=dest) + + def update( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + flags = [] + + if verbosity <= 0: + flags.append("-q") + + output = self.run_command( + make_command("info"), show_stdout=False, stdout_only=True, cwd=dest + ) + if output.startswith("Standalone "): + # Older versions of pip used to create standalone branches. + # Convert the standalone branch to a checkout by calling "bzr bind". + cmd_args = make_command("bind", *flags, url) + self.run_command(cmd_args, cwd=dest) + + cmd_args = make_command("update", *flags, rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> tuple[str, str | None, AuthInfo]: + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// re-add it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith("ssh://"): + url = "bzr+" + url + return url, rev, user_pass + + @classmethod + def get_remote_url(cls, location: str) -> str: + urls = cls.run_command( + ["info"], show_stdout=False, stdout_only=True, cwd=location + ) + for line in urls.splitlines(): + line = line.strip() + for x in ("checkout of branch: ", "parent branch: "): + if line.startswith(x): + repo = line.split(x)[1] + if cls._is_local_repository(repo): + return path_to_url(repo) + return repo + raise RemoteNotFoundError + + @classmethod + def get_revision(cls, location: str) -> str: + revision = cls.run_command( + ["revno"], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return revision.splitlines()[-1] + + @classmethod + def is_commit_id_equal(cls, dest: str, name: str | None) -> bool: + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/git.py b/venv/lib/python3.12/site-packages/pip/_internal/vcs/git.py new file mode 100644 index 0000000..1769da7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/vcs/git.py @@ -0,0 +1,571 @@ +from __future__ import annotations + +import logging +import os.path +import pathlib +import re +import urllib.parse +import urllib.request +from dataclasses import replace +from typing import Any + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path, hide_url +from pip._internal.utils.subprocess import make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RemoteNotValidError, + RevOptions, + VersionControl, + find_path_to_project_root_from_repo_root, + vcs, +) + +urlsplit = urllib.parse.urlsplit +urlunsplit = urllib.parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +GIT_VERSION_REGEX = re.compile( + r"^git version " # Prefix. + r"(\d+)" # Major. + r"\.(\d+)" # Dot, minor. + r"(?:\.(\d+))?" # Optional dot, patch. + r".*$" # Suffix, including any pre- and post-release segments we don't care about. +) + +HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$") + +# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git' +SCP_REGEX = re.compile( + r"""^ + # Optional user, e.g. 'git@' + (\w+@)? + # Server, e.g. 'github.com'. + ([^/:]+): + # The server-side path. e.g. 'user/project.git'. Must start with an + # alphanumeric character so as not to be confusable with a Windows paths + # like 'C:/foo/bar' or 'C:\foo\bar'. + (\w[^:]*) + $""", + re.VERBOSE, +) + + +def looks_like_hash(sha: str) -> bool: + return bool(HASH_REGEX.match(sha)) + + +class Git(VersionControl): + name = "git" + dirname = ".git" + repo_name = "clone" + schemes = ( + "git+http", + "git+https", + "git+ssh", + "git+git", + "git+file", + ) + # Prevent the user's environment variables from interfering with pip: + # https://github.com/pypa/pip/issues/1130 + unset_environ = ("GIT_DIR", "GIT_WORK_TREE") + default_arg_rev = "HEAD" + + @staticmethod + def get_base_rev_args(rev: str) -> list[str]: + return [rev] + + @classmethod + def run_command(cls, *args: Any, **kwargs: Any) -> str: + if os.environ.get("PIP_NO_INPUT"): + extra_environ = kwargs.get("extra_environ", {}) + extra_environ["GIT_TERMINAL_PROMPT"] = "0" + extra_environ["GIT_SSH_COMMAND"] = "ssh -oBatchMode=yes" + kwargs["extra_environ"] = extra_environ + return super().run_command(*args, **kwargs) + + def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0]) + return not is_tag_or_branch + + def get_git_version(self) -> tuple[int, ...]: + version = self.run_command( + ["version"], + command_desc="git version", + show_stdout=False, + stdout_only=True, + ) + match = GIT_VERSION_REGEX.match(version) + if not match: + logger.warning("Can't parse git version: %s", version) + return () + return (int(match.group(1)), int(match.group(2))) + + @classmethod + def get_current_branch(cls, location: str) -> str | None: + """ + Return the current branch, or None if HEAD isn't at a branch + (e.g. detached HEAD). + """ + # git-symbolic-ref exits with empty stdout if "HEAD" is a detached + # HEAD rather than a symbolic ref. In addition, the -q causes the + # command to exit with status code 1 instead of 128 in this case + # and to suppress the message to stderr. + args = ["symbolic-ref", "-q", "HEAD"] + output = cls.run_command( + args, + extra_ok_returncodes=(1,), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + ref = output.strip() + + if ref.startswith("refs/heads/"): + return ref[len("refs/heads/") :] + + return None + + @classmethod + def get_revision_sha(cls, dest: str, rev: str) -> tuple[str | None, bool]: + """ + Return (sha_or_none, is_branch), where sha_or_none is a commit hash + if the revision names a remote branch or tag, otherwise None. + + Args: + dest: the repository directory. + rev: the revision name. + """ + # Pass rev to pre-filter the list. + output = cls.run_command( + ["show-ref", rev], + cwd=dest, + show_stdout=False, + stdout_only=True, + on_returncode="ignore", + ) + refs = {} + # NOTE: We do not use splitlines here since that would split on other + # unicode separators, which can be maliciously used to install a + # different revision. + for line in output.strip().split("\n"): + line = line.rstrip("\r") + if not line: + continue + try: + ref_sha, ref_name = line.split(" ", maxsplit=2) + except ValueError: + # Include the offending line to simplify troubleshooting if + # this error ever occurs. + raise ValueError(f"unexpected show-ref line: {line!r}") + + refs[ref_name] = ref_sha + + branch_ref = f"refs/remotes/origin/{rev}" + tag_ref = f"refs/tags/{rev}" + + sha = refs.get(branch_ref) + if sha is not None: + return (sha, True) + + sha = refs.get(tag_ref) + + return (sha, False) + + @classmethod + def _should_fetch(cls, dest: str, rev: str) -> bool: + """ + Return true if rev is a ref or is a commit that we don't have locally. + + Branches and tags are not considered in this method because they are + assumed to be always available locally (which is a normal outcome of + ``git clone`` and ``git fetch --tags``). + """ + if rev.startswith("refs/"): + # Always fetch remote refs. + return True + + if not looks_like_hash(rev): + # Git fetch would fail with abbreviated commits. + return False + + if cls.has_commit(dest, rev): + # Don't fetch if we have the commit locally. + return False + + return True + + @classmethod + def resolve_revision( + cls, dest: str, url: HiddenText, rev_options: RevOptions + ) -> RevOptions: + """ + Resolve a revision to a new RevOptions object with the SHA1 of the + branch, tag, or ref if found. + + Args: + rev_options: a RevOptions object. + """ + rev = rev_options.arg_rev + # The arg_rev property's implementation for Git ensures that the + # rev return value is always non-None. + assert rev is not None + + sha, is_branch = cls.get_revision_sha(dest, rev) + + if sha is not None: + rev_options = rev_options.make_new(sha) + rev_options = replace(rev_options, branch_name=(rev if is_branch else None)) + + return rev_options + + # Do not show a warning for the common case of something that has + # the form of a Git commit hash. + if not looks_like_hash(rev): + logger.info( + "Did not find branch or tag '%s', assuming revision or ref.", + rev, + ) + + if not cls._should_fetch(dest, rev): + return rev_options + + # fetch the requested revision + cls.run_command( + make_command("fetch", "-q", url, rev_options.to_args()), + cwd=dest, + ) + # Change the revision to the SHA of the ref we fetched + sha = cls.get_revision(dest, rev="FETCH_HEAD") + rev_options = rev_options.make_new(sha) + + return rev_options + + @classmethod + def is_commit_id_equal(cls, dest: str, name: str | None) -> bool: + """ + Return whether the current commit hash equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + if not name: + # Then avoid an unnecessary subprocess call. + return False + + return cls.get_revision(dest) == name + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest)) + if verbosity <= 0: + flags: tuple[str, ...] = ("--quiet",) + elif verbosity == 1: + flags = () + else: + flags = ("--verbose", "--progress") + if self.get_git_version() >= (2, 17): + # Git added support for partial clone in 2.17 + # https://git-scm.com/docs/partial-clone + # Speeds up cloning by functioning without a complete copy of repository + self.run_command( + make_command( + "clone", + "--filter=blob:none", + *flags, + url, + dest, + ) + ) + else: + self.run_command(make_command("clone", *flags, url, dest)) + + if rev_options.rev: + # Then a specific revision was requested. + rev_options = self.resolve_revision(dest, url, rev_options) + branch_name = getattr(rev_options, "branch_name", None) + logger.debug("Rev options %s, branch_name %s", rev_options, branch_name) + if branch_name is None: + # Only do a checkout if the current commit id doesn't match + # the requested revision. + if not self.is_commit_id_equal(dest, rev_options.rev): + cmd_args = make_command( + "checkout", + "-q", + rev_options.to_args(), + ) + self.run_command(cmd_args, cwd=dest) + elif self.get_current_branch(dest) != branch_name: + # Then a specific branch was requested, and that branch + # is not yet checked out. + track_branch = f"origin/{branch_name}" + cmd_args = [ + "checkout", + "-b", + branch_name, + "--track", + track_branch, + ] + self.run_command(cmd_args, cwd=dest) + else: + sha = self.get_revision(dest) + rev_options = rev_options.make_new(sha) + + logger.info("Resolved %s to commit %s", url, rev_options.rev) + + #: repo may contain submodules + self.update_submodules(dest, verbosity=verbosity) + + def switch( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + self.run_command( + make_command("config", "remote.origin.url", url), + cwd=dest, + ) + + extra_flags = [] + + if verbosity <= 0: + extra_flags.append("-q") + + cmd_args = make_command("checkout", *extra_flags, rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + self.update_submodules(dest, verbosity=verbosity) + + def update( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + extra_flags = [] + + if verbosity <= 0: + extra_flags.append("-q") + + # First fetch changes from the default remote + if self.get_git_version() >= (1, 9): + # fetch tags in addition to everything else + self.run_command(["fetch", "--tags", *extra_flags], cwd=dest) + else: + self.run_command(["fetch", *extra_flags], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + rev_options = self.resolve_revision(dest, url, rev_options) + cmd_args = make_command( + "reset", + "--hard", + *extra_flags, + rev_options.to_args(), + ) + self.run_command(cmd_args, cwd=dest) + #: update submodules + self.update_submodules(dest, verbosity=verbosity) + + @classmethod + def get_remote_url(cls, location: str) -> str: + """ + Return URL of the first remote encountered. + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + # We need to pass 1 for extra_ok_returncodes since the command + # exits with return code 1 if there are no matching lines. + stdout = cls.run_command( + ["config", "--get-regexp", r"remote\..*\.url"], + extra_ok_returncodes=(1,), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + remotes = stdout.splitlines() + try: + found_remote = remotes[0] + except IndexError: + raise RemoteNotFoundError + + for remote in remotes: + if remote.startswith("remote.origin.url "): + found_remote = remote + break + url = found_remote.split(" ")[1] + return cls._git_remote_to_pip_url(url.strip()) + + @staticmethod + def _git_remote_to_pip_url(url: str) -> str: + """ + Convert a remote url from what git uses to what pip accepts. + + There are 3 legal forms **url** may take: + + 1. A fully qualified url: ssh://git@example.com/foo/bar.git + 2. A local project.git folder: /path/to/bare/repository.git + 3. SCP shorthand for form 1: git@example.com:foo/bar.git + + Form 1 is output as-is. Form 2 must be converted to URI and form 3 must + be converted to form 1. + + See the corresponding test test_git_remote_url_to_pip() for examples of + sample inputs/outputs. + """ + if re.match(r"\w+://", url): + # This is already valid. Pass it though as-is. + return url + if os.path.exists(url): + # A local bare remote (git clone --mirror). + # Needs a file:// prefix. + return pathlib.PurePath(url).as_uri() + scp_match = SCP_REGEX.match(url) + if scp_match: + # Add an ssh:// prefix and replace the ':' with a '/'. + return scp_match.expand(r"ssh://\1\2/\3") + # Otherwise, bail out. + raise RemoteNotValidError(url) + + @classmethod + def has_commit(cls, location: str, rev: str) -> bool: + """ + Check if rev is a commit that is available in the local repository. + """ + try: + cls.run_command( + ["rev-parse", "-q", "--verify", "sha^" + rev], + cwd=location, + log_failed_cmd=False, + ) + except InstallationError: + return False + else: + return True + + @classmethod + def get_revision(cls, location: str, rev: str | None = None) -> str: + if rev is None: + rev = "HEAD" + current_rev = cls.run_command( + ["rev-parse", rev], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return current_rev.strip() + + @classmethod + def get_subdirectory(cls, location: str) -> str | None: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + # find the repo root + git_dir = cls.run_command( + ["rev-parse", "--git-dir"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + repo_root = os.path.abspath(os.path.join(git_dir, "..")) + return find_path_to_project_root_from_repo_root(location, repo_root) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> tuple[str, str | None, AuthInfo]: + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes don't + work with a ssh:// scheme (e.g. GitHub). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith("file"): + initial_slashes = path[: -len(path.lstrip("/"))] + newpath = initial_slashes + urllib.request.url2pathname(path).replace( + "\\", "/" + ).lstrip("/") + after_plus = scheme.find("+") + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + if "://" not in url: + assert "file:" not in url + url = url.replace("git+", "git+ssh://") + url, rev, user_pass = super().get_url_rev_and_auth(url) + url = url.replace("ssh://", "") + else: + url, rev, user_pass = super().get_url_rev_and_auth(url) + + return url, rev, user_pass + + @classmethod + def update_submodules(cls, location: str, verbosity: int = 0) -> None: + argv = ["submodule", "update", "--init", "--recursive"] + + if verbosity <= 0: + argv.append("-q") + + if not os.path.exists(os.path.join(location, ".gitmodules")): + return + cls.run_command( + argv, + cwd=location, + ) + + @classmethod + def get_repository_root(cls, location: str) -> str | None: + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ["rev-parse", "--show-toplevel"], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode="raise", + log_failed_cmd=False, + ) + except BadCommand: + logger.debug( + "could not determine if %s is under git control " + "because git is not available", + location, + ) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip("\r\n")) + + @staticmethod + def should_add_vcs_url_prefix(repo_url: str) -> bool: + """In either https or ssh form, requirements must be prefixed with git+.""" + return True + + +vcs.register(Git) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py b/venv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py new file mode 100644 index 0000000..c875803 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/vcs/mercurial.py @@ -0,0 +1,186 @@ +from __future__ import annotations + +import configparser +import logging +import os + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + RevOptions, + VersionControl, + find_path_to_project_root_from_repo_root, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = "hg" + dirname = ".hg" + repo_name = "clone" + schemes = ( + "hg+file", + "hg+http", + "hg+https", + "hg+ssh", + "hg+static-http", + ) + + @staticmethod + def get_base_rev_args(rev: str) -> list[str]: + return [f"--rev={rev}"] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Cloning hg %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flags: tuple[str, ...] = ("--quiet",) + elif verbosity == 1: + flags = () + elif verbosity == 2: + flags = ("--verbose",) + else: + flags = ("--verbose", "--debug") + self.run_command(make_command("clone", "--noupdate", *flags, url, dest)) + self.run_command( + make_command("update", *flags, rev_options.to_args()), + cwd=dest, + ) + + def switch( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + extra_flags = [] + repo_config = os.path.join(dest, self.dirname, "hgrc") + config = configparser.RawConfigParser() + + if verbosity <= 0: + extra_flags.append("-q") + + try: + config.read(repo_config) + config.set("paths", "default", url.secret) + with open(repo_config, "w") as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning("Could not switch Mercurial repository to %s: %s", url, exc) + else: + cmd_args = make_command("update", *extra_flags, rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + def update( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + extra_flags = [] + + if verbosity <= 0: + extra_flags.append("-q") + + self.run_command(["pull", *extra_flags], cwd=dest) + cmd_args = make_command("update", *extra_flags, rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_remote_url(cls, location: str) -> str: + url = cls.run_command( + ["showconfig", "paths.default"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if cls._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the repository-local changeset revision number, as an integer. + """ + current_revision = cls.run_command( + ["parents", "--template={rev}"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_revision + + @classmethod + def get_requirement_revision(cls, location: str) -> str: + """ + Return the changeset identification hash, as a 40-character + hexadecimal string + """ + current_rev_hash = cls.run_command( + ["parents", "--template={node}"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_rev_hash + + @classmethod + def is_commit_id_equal(cls, dest: str, name: str | None) -> bool: + """Always assume the versions don't match""" + return False + + @classmethod + def get_subdirectory(cls, location: str) -> str | None: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + # find the repo root + repo_root = cls.run_command( + ["root"], show_stdout=False, stdout_only=True, cwd=location + ).strip() + if not os.path.isabs(repo_root): + repo_root = os.path.abspath(os.path.join(location, repo_root)) + return find_path_to_project_root_from_repo_root(location, repo_root) + + @classmethod + def get_repository_root(cls, location: str) -> str | None: + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ["root"], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode="raise", + log_failed_cmd=False, + ) + except BadCommand: + logger.debug( + "could not determine if %s is under hg control " + "because hg is not available", + location, + ) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip("\r\n")) + + +vcs.register(Mercurial) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py b/venv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py new file mode 100644 index 0000000..579f428 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/vcs/subversion.py @@ -0,0 +1,335 @@ +from __future__ import annotations + +import logging +import os +import re + +from pip._internal.utils.misc import ( + HiddenText, + display_path, + is_console_interactive, + is_installable_dir, + split_auth_from_netloc, +) +from pip._internal.utils.subprocess import CommandArgs, make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile(r'committed-rev="(\d+)"') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r"(.*)") + + +class Subversion(VersionControl): + name = "svn" + dirname = ".svn" + repo_name = "checkout" + schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file") + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: + return True + + @staticmethod + def get_base_rev_args(rev: str) -> list[str]: + return ["-r", rev] + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, _ in os.walk(location): + if cls.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(cls.dirname) + entries_fn = os.path.join(base, cls.dirname, "entries") + if not os.path.exists(entries_fn): + # FIXME: should we warn? + continue + + dirurl, localrev = cls._get_svn_url_rev(base) + + if base == location: + assert dirurl is not None + base = dirurl + "/" # save the root url + elif not dirurl or not dirurl.startswith(base): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return str(revision) + + @classmethod + def get_netloc_and_auth( + cls, netloc: str, scheme: str + ) -> tuple[str, tuple[str | None, str | None]]: + """ + This override allows the auth information to be passed to svn via the + --username and --password options instead of via the URL. + """ + if scheme == "ssh": + # The --username and --password options can't be used for + # svn+ssh URLs, so keep the auth information in the URL. + return super().get_netloc_and_auth(netloc, scheme) + + return split_auth_from_netloc(netloc) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> tuple[str, str | None, AuthInfo]: + # hotfix the URL scheme after removing svn+ from svn+ssh:// re-add it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith("ssh://"): + url = "svn+" + url + return url, rev, user_pass + + @staticmethod + def make_rev_args(username: str | None, password: HiddenText | None) -> CommandArgs: + extra_args: CommandArgs = [] + if username: + extra_args += ["--username", username] + if password: + extra_args += ["--password", password] + + return extra_args + + @classmethod + def get_remote_url(cls, location: str) -> str: + # In cases where the source is in a subdirectory, we have to look up in + # the location until we find a valid project root. + orig_location = location + while not is_installable_dir(location): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding a Python project. + logger.warning( + "Could not find Python project for directory %s (tried all " + "parent directories)", + orig_location, + ) + raise RemoteNotFoundError + + url, _rev = cls._get_svn_url_rev(location) + if url is None: + raise RemoteNotFoundError + + return url + + @classmethod + def _get_svn_url_rev(cls, location: str) -> tuple[str | None, int]: + from pip._internal.exceptions import InstallationError + + entries_path = os.path.join(location, cls.dirname, "entries") + if os.path.exists(entries_path): + with open(entries_path) as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = "" + + url = None + if data.startswith(("8", "9", "10")): + entries = list(map(str.splitlines, data.split("\n\x0c\n"))) + del entries[0][0] # get rid of the '8' + url = entries[0][3] + revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0] + elif data.startswith("= 1.7 + # Note that using get_remote_call_options is not necessary here + # because `svn info` is being run against a local directory. + # We don't need to worry about making sure interactive mode + # is being used to prompt for passwords, because passwords + # are only potentially needed for remote server requests. + xml = cls.run_command( + ["info", "--xml", location], + show_stdout=False, + stdout_only=True, + ) + match = _svn_info_xml_url_re.search(xml) + assert match is not None + url = match.group(1) + revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + @classmethod + def is_commit_id_equal(cls, dest: str, name: str | None) -> bool: + """Always assume the versions don't match""" + return False + + def __init__(self, use_interactive: bool | None = None) -> None: + if use_interactive is None: + use_interactive = is_console_interactive() + self.use_interactive = use_interactive + + # This member is used to cache the fetched version of the current + # ``svn`` client. + # Special value definitions: + # None: Not evaluated yet. + # Empty tuple: Could not parse version. + self._vcs_version: tuple[int, ...] | None = None + + super().__init__() + + def call_vcs_version(self) -> tuple[int, ...]: + """Query the version of the currently installed Subversion client. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + # Example versions: + # svn, version 1.10.3 (r1842928) + # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 + # svn, version 1.7.14 (r1542130) + # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu + # svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0) + # compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2 + version_prefix = "svn, version " + version = self.run_command(["--version"], show_stdout=False, stdout_only=True) + if not version.startswith(version_prefix): + return () + + version = version[len(version_prefix) :].split()[0] + version_list = version.partition("-")[0].split(".") + try: + parsed_version = tuple(map(int, version_list)) + except ValueError: + return () + + return parsed_version + + def get_vcs_version(self) -> tuple[int, ...]: + """Return the version of the currently installed Subversion client. + + If the version of the Subversion client has already been queried, + a cached value will be used. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + if self._vcs_version is not None: + # Use cached version, if available. + # If parsing the version failed previously (empty tuple), + # do not attempt to parse it again. + return self._vcs_version + + vcs_version = self.call_vcs_version() + self._vcs_version = vcs_version + return vcs_version + + def get_remote_call_options(self) -> CommandArgs: + """Return options to be used on calls to Subversion that contact the server. + + These options are applicable for the following ``svn`` subcommands used + in this class. + + - checkout + - switch + - update + + :return: A list of command line arguments to pass to ``svn``. + """ + if not self.use_interactive: + # --non-interactive switch is available since Subversion 0.14.4. + # Subversion < 1.8 runs in interactive mode by default. + return ["--non-interactive"] + + svn_version = self.get_vcs_version() + # By default, Subversion >= 1.8 runs in non-interactive mode if + # stdin is not a TTY. Since that is how pip invokes SVN, in + # call_subprocess(), pip must pass --force-interactive to ensure + # the user can be prompted for a password, if required. + # SVN added the --force-interactive option in SVN 1.8. Since + # e.g. RHEL/CentOS 7, which is supported until 2024, ships with + # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip + # can't safely add the option if the SVN version is < 1.8 (or unknown). + if svn_version >= (1, 8): + return ["--force-interactive"] + + return [] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Checking out %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flags = ["--quiet"] + else: + flags = [] + cmd_args = make_command( + "checkout", + *flags, + self.get_remote_call_options(), + rev_options.to_args(), + url, + dest, + ) + self.run_command(cmd_args) + + def switch( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + cmd_args = make_command( + "switch", + self.get_remote_call_options(), + rev_options.to_args(), + url, + dest, + ) + self.run_command(cmd_args) + + def update( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + cmd_args = make_command( + "update", + self.get_remote_call_options(), + rev_options.to_args(), + dest, + ) + self.run_command(cmd_args) + + +vcs.register(Subversion) diff --git a/venv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py b/venv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py new file mode 100644 index 0000000..4e91ccd --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/vcs/versioncontrol.py @@ -0,0 +1,693 @@ +"""Handles all VCS (version control) support""" + +from __future__ import annotations + +import logging +import os +import shutil +import sys +import urllib.parse +from collections.abc import Iterable, Iterator, Mapping +from dataclasses import dataclass, field +from typing import ( + Any, + Literal, + Optional, +) + +from pip._internal.cli.spinners import SpinnerInterface +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import ( + HiddenText, + ask_path_exists, + backup_dir, + display_path, + hide_url, + hide_value, + is_installable_dir, + rmtree, +) +from pip._internal.utils.subprocess import ( + CommandArgs, + call_subprocess, + format_command_args, + make_command, +) + +__all__ = ["vcs"] + + +logger = logging.getLogger(__name__) + +AuthInfo = tuple[Optional[str], Optional[str]] + + +def is_url(name: str) -> bool: + """ + Return true if the name looks like a URL. + """ + scheme = urllib.parse.urlsplit(name).scheme + if not scheme: + return False + return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes + + +def make_vcs_requirement_url( + repo_url: str, rev: str, project_name: str, subdir: str | None = None +) -> str: + """ + Return the URL for a VCS requirement. + + Args: + repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). + project_name: the (unescaped) project name. + """ + egg_project_name = project_name.replace("-", "_") + req = f"{repo_url}@{rev}#egg={egg_project_name}" + if subdir: + req += f"&subdirectory={subdir}" + + return req + + +def find_path_to_project_root_from_repo_root( + location: str, repo_root: str +) -> str | None: + """ + Find the the Python project's root by searching up the filesystem from + `location`. Return the path to project root relative to `repo_root`. + Return None if the project root is `repo_root`, or cannot be found. + """ + # find project root. + orig_location = location + while not is_installable_dir(location): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding a Python project. + logger.warning( + "Could not find a Python project for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + if os.path.samefile(repo_root, location): + return None + + return os.path.relpath(location, repo_root) + + +class RemoteNotFoundError(Exception): + pass + + +class RemoteNotValidError(Exception): + def __init__(self, url: str): + super().__init__(url) + self.url = url + + +@dataclass(frozen=True) +class RevOptions: + """ + Encapsulates a VCS-specific revision to install, along with any VCS + install options. + + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. + """ + + vc_class: type[VersionControl] + rev: str | None = None + extra_args: CommandArgs = field(default_factory=list) + branch_name: str | None = None + + def __repr__(self) -> str: + return f"" + + @property + def arg_rev(self) -> str | None: + if self.rev is None: + return self.vc_class.default_arg_rev + + return self.rev + + def to_args(self) -> CommandArgs: + """ + Return the VCS-specific command arguments. + """ + args: CommandArgs = [] + rev = self.arg_rev + if rev is not None: + args += self.vc_class.get_base_rev_args(rev) + args += self.extra_args + + return args + + def to_display(self) -> str: + if not self.rev: + return "" + + return f" (to revision {self.rev})" + + def make_new(self, rev: str) -> RevOptions: + """ + Make a copy of the current instance, but with a new rev. + + Args: + rev: the name of the revision for the new object. + """ + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +class VcsSupport: + _registry: dict[str, VersionControl] = {} + schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"] + + def __init__(self) -> None: + # Register more schemes with urlparse for various version control + # systems + urllib.parse.uses_netloc.extend(self.schemes) + super().__init__() + + def __iter__(self) -> Iterator[str]: + return self._registry.__iter__() + + @property + def backends(self) -> list[VersionControl]: + return list(self._registry.values()) + + @property + def dirnames(self) -> list[str]: + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self) -> list[str]: + schemes: list[str] = [] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls: type[VersionControl]) -> None: + if not hasattr(cls, "name"): + logger.warning("Cannot register VCS %s", cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls() + logger.debug("Registered VCS backend: %s", cls.name) + + def unregister(self, name: str) -> None: + if name in self._registry: + del self._registry[name] + + def get_backend_for_dir(self, location: str) -> VersionControl | None: + """ + Return a VersionControl object if a repository of that type is found + at the given directory. + """ + vcs_backends = {} + for vcs_backend in self._registry.values(): + repo_path = vcs_backend.get_repository_root(location) + if not repo_path: + continue + logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name) + vcs_backends[repo_path] = vcs_backend + + if not vcs_backends: + return None + + # Choose the VCS in the inner-most directory. Since all repository + # roots found here would be either `location` or one of its + # parents, the longest path should have the most path components, + # i.e. the backend representing the inner-most repository. + inner_most_repo_path = max(vcs_backends, key=len) + return vcs_backends[inner_most_repo_path] + + def get_backend_for_scheme(self, scheme: str) -> VersionControl | None: + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + + def get_backend(self, name: str) -> VersionControl | None: + """ + Return a VersionControl object or None. + """ + name = name.lower() + return self._registry.get(name) + + +vcs = VcsSupport() + + +class VersionControl: + name = "" + dirname = "" + repo_name = "" + # List of supported schemes for this Version Control + schemes: tuple[str, ...] = () + # Iterable of environment variable names to pass to call_subprocess(). + unset_environ: tuple[str, ...] = () + default_arg_rev: str | None = None + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: + """ + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ + return not remote_url.lower().startswith(f"{cls.name}:") + + @classmethod + def get_subdirectory(cls, location: str) -> str | None: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + return None + + @classmethod + def get_requirement_revision(cls, repo_dir: str) -> str: + """ + Return the revision string that should be used in a requirement. + """ + return cls.get_revision(repo_dir) + + @classmethod + def get_src_requirement(cls, repo_dir: str, project_name: str) -> str: + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. + + Args: + project_name: the (unescaped) project name. + + The return value has a form similar to the following: + + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = f"{cls.name}+{repo_url}" + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) + req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir) + + return req + + @staticmethod + def get_base_rev_args(rev: str) -> list[str]: + """ + Return the base revision arguments for a vcs command. + + Args: + rev: the name of a revision to install. Cannot be None. + """ + raise NotImplementedError + + def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + + @classmethod + def make_rev_options( + cls, rev: str | None = None, extra_args: CommandArgs | None = None + ) -> RevOptions: + """ + Return a RevOptions object. + + Args: + rev: the name of a revision to install. + extra_args: a list of extra options. + """ + return RevOptions(cls, rev, extra_args=extra_args or []) + + @classmethod + def _is_local_repository(cls, repo: str) -> bool: + """ + posix absolute paths start with os.path.sep, + win32 ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or bool(drive) + + @classmethod + def get_netloc_and_auth( + cls, netloc: str, scheme: str + ) -> tuple[str, tuple[str | None, str | None]]: + """ + Parse the repository URL's netloc, and return the new netloc to use + along with auth information. + + Args: + netloc: the original repository URL netloc. + scheme: the repository URL's scheme without the vcs prefix. + + This is mainly for the Subversion class to override, so that auth + information can be provided via the --username and --password options + instead of through the URL. For other subclasses like Git without + such an option, auth information must stay in the URL. + + Returns: (netloc, (username, password)). + """ + return netloc, (None, None) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> tuple[str, str | None, AuthInfo]: + """ + Parse the repository URL to use, and return the URL, revision, + and auth info to use. + + Returns: (url, rev, (username, password)). + """ + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) + if "+" not in scheme: + raise ValueError( + f"Sorry, {url!r} is a malformed VCS url. " + "The format is +://, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp" + ) + # Remove the vcs prefix. + scheme = scheme.split("+", 1)[1] + netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) + rev = None + if "@" in path: + path, rev = path.rsplit("@", 1) + if not rev: + raise InstallationError( + f"The URL {url!r} has an empty revision (after @) " + "which is not supported. Include a revision after @ " + "or remove @ from the URL." + ) + url = urllib.parse.urlunsplit((scheme, netloc, path, query, "")) + return url, rev, user_pass + + @staticmethod + def make_rev_args(username: str | None, password: HiddenText | None) -> CommandArgs: + """ + Return the RevOptions "extra arguments" to use in obtain(). + """ + return [] + + def get_url_rev_options(self, url: HiddenText) -> tuple[HiddenText, RevOptions]: + """ + Return the URL and RevOptions object to use in obtain(), + as a tuple (url, rev_options). + """ + secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) + username, secret_password = user_pass + password: HiddenText | None = None + if secret_password is not None: + password = hide_value(secret_password) + extra_args = self.make_rev_args(username, password) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return hide_url(secret_url), rev_options + + @staticmethod + def normalize_url(url: str) -> str: + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib.parse.unquote(url).rstrip("/") + + @classmethod + def compare_urls(cls, url1: str, url2: str) -> bool: + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return cls.normalize_url(url1) == cls.normalize_url(url2) + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + """ + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. + verbosity: verbosity level. + """ + raise NotImplementedError + + def switch( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + """ + Switch the repo at ``dest`` to point to ``URL``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def update( + self, + dest: str, + url: HiddenText, + rev_options: RevOptions, + verbosity: int = 0, + ) -> None: + """ + Update an already-existing repo to the given ``rev_options``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + @classmethod + def is_commit_id_equal(cls, dest: str, name: str | None) -> bool: + """ + Return whether the id of the current commit equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + raise NotImplementedError + + def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None: + """ + Install or update in editable mode the package represented by this + VersionControl object. + + :param dest: the repository directory in which to install or update. + :param url: the repository URL starting with a vcs prefix. + :param verbosity: verbosity level. + """ + url, rev_options = self.get_url_rev_options(url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + rev_display = rev_options.to_display() + if self.is_repository_directory(dest): + existing_url = self.get_remote_url(dest) + if self.compare_urls(existing_url, url.secret): + logger.debug( + "%s in %s exists, and has correct URL (%s)", + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + "Updating %s %s%s", + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, url, rev_options, verbosity=verbosity) + else: + logger.info("Skipping because already up-to-date.") + return + + logger.warning( + "%s %s in %s exists with URL %s", + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b")) + else: + logger.warning( + "Directory %s already exists, and is not a %s %s.", + dest, + self.name, + self.repo_name, + ) + # https://github.com/python/mypy/issues/1174 + prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore + + logger.warning( + "The plan is to install the %s repository %s", + self.name, + url, + ) + response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1]) + + if response == "a": + sys.exit(-1) + + if response == "w": + logger.warning("Deleting %s", display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + if response == "b": + dest_dir = backup_dir(dest) + logger.warning("Backing up %s to %s", display_path(dest), dest_dir) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + # Do nothing if the response is "i". + if response == "s": + logger.info( + "Switching %s %s to %s%s", + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options, verbosity=verbosity) + + def unpack(self, location: str, url: HiddenText, verbosity: int) -> None: + """ + Clean up current location and download the url repository + (and vcs infos) into location + + :param url: the repository URL starting with a vcs prefix. + :param verbosity: verbosity level. + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location, url=url, verbosity=verbosity) + + @classmethod + def get_remote_url(cls, location: str) -> str: + """ + Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + raise NotImplementedError + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the current commit id of the files at the given location. + """ + raise NotImplementedError + + @classmethod + def run_command( + cls, + cmd: list[str] | CommandArgs, + show_stdout: bool = True, + cwd: str | None = None, + on_returncode: Literal["raise", "warn", "ignore"] = "raise", + extra_ok_returncodes: Iterable[int] | None = None, + command_desc: str | None = None, + extra_environ: Mapping[str, Any] | None = None, + spinner: SpinnerInterface | None = None, + log_failed_cmd: bool = True, + stdout_only: bool = False, + ) -> str: + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = make_command(cls.name, *cmd) + if command_desc is None: + command_desc = format_command_args(cmd) + try: + return call_subprocess( + cmd, + show_stdout, + cwd, + on_returncode=on_returncode, + extra_ok_returncodes=extra_ok_returncodes, + command_desc=command_desc, + extra_environ=extra_environ, + unset_environ=cls.unset_environ, + spinner=spinner, + log_failed_cmd=log_failed_cmd, + stdout_only=stdout_only, + ) + except NotADirectoryError: + raise BadCommand(f"Cannot find command {cls.name!r} - invalid PATH") + except FileNotFoundError: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + raise BadCommand( + f"Cannot find command {cls.name!r} - do you have " + f"{cls.name!r} installed and in your PATH?" + ) + except PermissionError: + # errno.EACCES = Permission denied + # This error occurs, for instance, when the command is installed + # only for another user. So, the current user don't have + # permission to call the other user command. + raise BadCommand( + f"No permission to execute {cls.name!r} - install it " + f"locally, globally (ask admin), or check your PATH. " + f"See possible solutions at " + f"https://pip.pypa.io/en/latest/reference/pip_freeze/" + f"#fixing-permission-denied." + ) + + @classmethod + def is_repository_directory(cls, path: str) -> bool: + """ + Return whether a directory path is a repository directory. + """ + logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + + @classmethod + def get_repository_root(cls, location: str) -> str | None: + """ + Return the "root" (top-level) directory controlled by the vcs, + or `None` if the directory is not in any. + + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For + example, the Git override checks that Git is actually available. + """ + if cls.is_repository_directory(location): + return location + return None diff --git a/venv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py b/venv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py new file mode 100644 index 0000000..4dbf767 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_internal/wheel_builder.py @@ -0,0 +1,261 @@ +"""Orchestrator for building wheels from InstallRequirements.""" + +from __future__ import annotations + +import logging +import os.path +import re +from collections.abc import Iterable +from tempfile import TemporaryDirectory + +from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version +from pip._vendor.packaging.version import InvalidVersion, Version + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel +from pip._internal.metadata import FilesystemWheel, get_wheel_distribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.build.wheel import build_wheel_pep517 +from pip._internal.operations.build.wheel_editable import build_wheel_editable +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir, hash_file +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + +_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE) + +BuildResult = tuple[list[InstallRequirement], list[InstallRequirement]] + + +def _contains_egg_info(s: str) -> bool: + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_cache( + req: InstallRequirement, +) -> bool | None: + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available. + """ + if req.editable or not req.source_dir: + # never cache editable requirements + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + assert req.link + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req: InstallRequirement, + wheel_cache: WheelCache, +) -> str: + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + assert req.link + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _verify_one(req: InstallRequirement, wheel_path: str) -> None: + canonical_name = canonicalize_name(req.name or "") + w = Wheel(os.path.basename(wheel_path)) + if w.name != canonical_name: + raise InvalidWheelFilename( + f"Wheel has unexpected file name: expected {canonical_name!r}, " + f"got {w.name!r}", + ) + dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name) + dist_verstr = str(dist.version) + if canonicalize_version(dist_verstr) != canonicalize_version(w.version): + raise InvalidWheelFilename( + f"Wheel has unexpected file name: expected {dist_verstr!r}, " + f"got {w.version!r}", + ) + metadata_version_value = dist.metadata_version + if metadata_version_value is None: + raise UnsupportedWheel("Missing Metadata-Version") + try: + metadata_version = Version(metadata_version_value) + except InvalidVersion: + msg = f"Invalid Metadata-Version: {metadata_version_value}" + raise UnsupportedWheel(msg) + if metadata_version >= Version("1.2") and not isinstance(dist.version, Version): + raise UnsupportedWheel( + f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not" + ) + + +def _build_one( + req: InstallRequirement, + output_dir: str, + verify: bool, + editable: bool, +) -> str | None: + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + artifact = "editable" if editable else "wheel" + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building %s for %s failed: %s", + artifact, + req.name, + e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + wheel_path = _build_one_inside_env(req, output_dir, editable) + if wheel_path and verify: + try: + _verify_one(req, wheel_path) + except (InvalidWheelFilename, UnsupportedWheel) as e: + logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e) + return None + return wheel_path + + +def _build_one_inside_env( + req: InstallRequirement, + output_dir: str, + editable: bool, +) -> str | None: + with TemporaryDirectory(dir=output_dir) as wheel_directory: + assert req.name + assert req.metadata_directory + assert req.pep517_backend + if editable: + wheel_path = build_wheel_editable( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + wheel_directory=wheel_directory, + ) + else: + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + wheel_directory=wheel_directory, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + # We can do a replace here because wheel_path is guaranteed to + # be in the same filesystem as output_dir. This will perform an + # atomic rename, which is necessary to avoid concurrency issues + # when populating the cache. + os.replace(wheel_path, dest_path) + logger.info( + "Created wheel for %s: filename=%s size=%d sha256=%s", + req.name, + wheel_name, + length, + wheel_hash.hexdigest(), + ) + logger.info("Stored in directory: %s", output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, + e, + ) + return None + + +def build( + requirements: Iterable[InstallRequirement], + wheel_cache: WheelCache, + verify: bool, +) -> BuildResult: + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + "Building wheels for collected packages: %s", + ", ".join(req.name for req in requirements), # type: ignore + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + assert req.name + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, + cache_dir, + verify, + req.editable and req.permit_editable_wheels, + ) + if wheel_file: + # Record the download origin in the cache + if req.download_info is not None: + # download_info is guaranteed to be set because when we build an + # InstallRequirement it has been through the preparer before, but + # let's be cautious. + wheel_cache.record_download_origin(cache_dir, req.download_info) + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + "Successfully built %s", + " ".join([req.name for req in build_successes]), # type: ignore + ) + if build_failures: + logger.info( + "Failed to build %s", + " ".join([req.name for req in build_failures]), # type: ignore + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/README.rst b/venv/lib/python3.12/site-packages/pip/_vendor/README.rst new file mode 100644 index 0000000..a925e8c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/README.rst @@ -0,0 +1,180 @@ +================ +Vendoring Policy +================ + +* Vendored libraries **MUST** not be modified except as required to + successfully vendor them. +* Vendored libraries **MUST** be released copies of libraries available on + PyPI. +* Vendored libraries **MUST** be available under a license that allows + them to be integrated into ``pip``, which is released under the MIT license. +* Vendored libraries **MUST** be accompanied with LICENSE files. +* The versions of libraries vendored in pip **MUST** be reflected in + ``pip/_vendor/vendor.txt``. +* Vendored libraries **MUST** function without any build steps such as ``2to3`` + or compilation of C code, practically this limits to single source 2.x/3.x and + pure Python. +* Any modifications made to libraries **MUST** be noted in + ``pip/_vendor/README.rst`` and their corresponding patches **MUST** be + included ``tools/vendoring/patches``. +* Vendored libraries should have corresponding ``vendored()`` entries in + ``pip/_vendor/__init__.py``. + +Rationale +========= + +Historically pip has not had any dependencies except for ``setuptools`` itself, +choosing instead to implement any functionality it needed to prevent needing +a dependency. However, starting with pip 1.5, we began to replace code that was +implemented inside of pip with reusable libraries from PyPI. This brought the +typical benefits of reusing libraries instead of reinventing the wheel like +higher quality and more battle tested code, centralization of bug fixes +(particularly security sensitive ones), and better/more features for less work. + +However, there are several issues with having dependencies in the traditional +way (via ``install_requires``) for pip. These issues are: + +**Fragility** + When pip depends on another library to function then if for whatever reason + that library either isn't installed or an incompatible version is installed + then pip ceases to function. This is of course true for all Python + applications, however for every application *except* for pip the way you fix + it is by re-running pip. Obviously, when pip can't run, you can't use pip to + fix pip, so you're left having to manually resolve dependencies and + installing them by hand. + +**Making other libraries uninstallable** + One of pip's current dependencies is the ``requests`` library, for which pip + requires a fairly recent version to run. If pip depended on ``requests`` in + the traditional manner, then we'd either have to maintain compatibility with + every ``requests`` version that has ever existed (and ever will), OR allow + pip to render certain versions of ``requests`` uninstallable. (The second + issue, although technically true for any Python application, is magnified by + pip's ubiquity; pip is installed by default in Python, in ``pyvenv``, and in + ``virtualenv``.) + +**Security** + This might seem puzzling at first glance, since vendoring has a tendency to + complicate updating dependencies for security updates, and that holds true + for pip. However, given the *other* reasons for avoiding dependencies, the + alternative is for pip to reinvent the wheel itself. This is what pip did + historically. It forced pip to re-implement its own HTTPS verification + routines as a workaround for the Python standard library's lack of SSL + validation, which resulted in similar bugs in the validation routine in + ``requests`` and ``urllib3``, except that they had to be discovered and + fixed independently. Even though we're vendoring, reusing libraries keeps + pip more secure by relying on the great work of our dependencies, *and* + allowing for faster, easier security fixes by simply pulling in newer + versions of dependencies. + +**Bootstrapping** + Currently most popular methods of installing pip rely on pip's + self-contained nature to install pip itself. These tools work by bundling a + copy of pip, adding it to ``sys.path``, and then executing that copy of pip. + This is done instead of implementing a "mini installer" (to reduce + duplication); pip already knows how to install a Python package, and is far + more battle-tested than any "mini installer" could ever possibly be. + +Many downstream redistributors have policies against this kind of bundling, and +instead opt to patch the software they distribute to debundle it and make it +rely on the global versions of the software that they already have packaged +(which may have its own patches applied to it). We (the pip team) would prefer +it if pip was *not* debundled in this manner due to the above reasons and +instead we would prefer it if pip would be left intact as it is now. + +In the longer term, if someone has a *portable* solution to the above problems, +other than the bundling method we currently use, that doesn't add additional +problems that are unreasonable then we would be happy to consider, and possibly +switch to said method. This solution must function correctly across all of the +situation that we expect pip to be used and not mandate some external mechanism +such as OS packages. + + +Modifications +============= + +* ``setuptools`` is completely stripped to only keep ``pkg_resources``. +* ``pkg_resources`` has been modified to import its dependencies from + ``pip._vendor``, and to use the vendored copy of ``platformdirs`` + rather than ``appdirs``. +* ``packaging`` has been modified to import its dependencies from + ``pip._vendor``. +* ``CacheControl`` has been modified to import its dependencies from + ``pip._vendor``. +* ``requests`` has been modified to import its other dependencies from + ``pip._vendor`` and to *not* load ``simplejson`` (all platforms) and + ``pyopenssl`` (Windows). +* ``platformdirs`` has been modified to import its submodules from ``pip._vendor.platformdirs``. + +Automatic Vendoring +=================== + +Vendoring is automated via the `vendoring `_ tool from the content of +``pip/_vendor/vendor.txt`` and the different patches in +``tools/vendoring/patches``. +Launch it via ``vendoring sync . -v`` (requires ``vendoring>=0.2.2``). +Tool configuration is done via ``pyproject.toml``. + +To update the vendored library versions, we have a session defined in ``nox``. +The command to upgrade everything is:: + + nox -s vendoring -- --upgrade-all --skip urllib3 --skip setuptools + +At the time of writing (April 2025) we do not upgrade ``urllib3`` because the +next version is a major upgrade and will be handled as an independent PR. We also +do not upgrade ``setuptools``, because we only rely on ``pkg_resources``, and +tracking every ``setuptools`` change is unnecessary for our needs. + + +Managing Local Patches +====================== + +The ``vendoring`` tool automatically applies our local patches, but updating, +the patches sometimes no longer apply cleanly. In that case, the update will +fail. To resolve this, take the following steps: + +1. Revert any incomplete changes in the revendoring branch, to ensure you have + a clean starting point. +2. Run the revendoring of the library with a problem again: ``nox -s vendoring + -- --upgrade ``. +3. This will fail again, but you will have the original source in your working + directory. Review the existing patch against the source, and modify the patch + to reflect the new version of the source. If you ``git add`` the changes the + vendoring made, you can modify the source to reflect the patch file and then + generate a new patch with ``git diff``. +4. Now, revert everything *except* the patch file changes. Leave the modified + patch file unstaged but saved in the working tree. +5. Re-run the vendoring. This time, it should pick up the changed patch file + and apply it cleanly. The patch file changes will be committed along with the + revendoring, so the new commit should be ready to test and publish as a PR. + + +Debundling +========== + +As mentioned in the rationale, we, the pip team, would prefer it if pip was not +debundled (other than optionally ``pip/_vendor/requests/cacert.pem``) and that +pip was left intact. However, if you insist on doing so, we have a +semi-supported method (that we don't test in our CI) and requires a bit of +extra work on your end in order to solve the problems described above. + +1. Delete everything in ``pip/_vendor/`` **except** for + ``pip/_vendor/__init__.py`` and ``pip/_vendor/vendor.txt``. +2. Generate wheels for each of pip's dependencies (and any of their + dependencies) using your patched copies of these libraries. These must be + placed somewhere on the filesystem that pip can access (``pip/_vendor`` is + the default assumption). +3. Modify ``pip/_vendor/__init__.py`` so that the ``DEBUNDLED`` variable is + ``True``. +4. Upon installation, the ``INSTALLER`` file in pip's own ``dist-info`` + directory should be set to something other than ``pip``, so that pip + can detect that it wasn't installed using itself. +5. *(optional)* If you've placed the wheels in a location other than + ``pip/_vendor/``, then modify ``pip/_vendor/__init__.py`` so that the + ``WHEEL_DIR`` variable points to the location you've placed them. +6. *(optional)* Update the ``pip_self_version_check`` logic to use the + appropriate logic for determining the latest available version of pip and + prompt the user with the correct upgrade message. + +Note that partial debundling is **NOT** supported. You need to prepare wheels +for all dependencies for successful debundling. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/__init__.py new file mode 100644 index 0000000..34ccb99 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/__init__.py @@ -0,0 +1,117 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = False + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("cachecontrol") + vendored("certifi") + vendored("dependency-groups") + vendored("distlib") + vendored("distro") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pkg_resources") + vendored("platformdirs") + vendored("progress") + vendored("pyproject_hooks") + vendored("requests") + vendored("requests.exceptions") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") + vendored("resolvelib") + vendored("rich") + vendored("rich.console") + vendored("rich.highlighter") + vendored("rich.logging") + vendored("rich.markup") + vendored("rich.progress") + vendored("rich.segment") + vendored("rich.style") + vendored("rich.text") + vendored("rich.traceback") + if sys.version_info < (3, 11): + vendored("tomli") + vendored("truststore") + vendored("urllib3") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..50dae3e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/LICENSE.txt b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/LICENSE.txt new file mode 100644 index 0000000..d8b3b56 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/LICENSE.txt @@ -0,0 +1,13 @@ +Copyright 2012-2021 Eric Larson + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 0000000..67888db --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -0,0 +1,29 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +"""CacheControl import Interface. + +Make it easy to import from cachecontrol without long namespaces. +""" + +__author__ = "Eric Larson" +__email__ = "eric@ionrock.org" +__version__ = "0.14.3" + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.controller import CacheController +from pip._vendor.cachecontrol.wrapper import CacheControl + +__all__ = [ + "__author__", + "__email__", + "__version__", + "CacheControlAdapter", + "CacheController", + "CacheControl", +] + +import logging + +logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3a1769c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc new file mode 100644 index 0000000..1a81445 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc new file mode 100644 index 0000000..54489af Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc new file mode 100644 index 0000000..3c9d3e2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc new file mode 100644 index 0000000..6d93eb3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc new file mode 100644 index 0000000..01a259e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc new file mode 100644 index 0000000..eb654bb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc new file mode 100644 index 0000000..91d6667 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc new file mode 100644 index 0000000..8c2e85c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000..2c84208 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/_cmd.py @@ -0,0 +1,70 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import logging +from argparse import ArgumentParser +from typing import TYPE_CHECKING + +from pip._vendor import requests + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import logger + +if TYPE_CHECKING: + from argparse import Namespace + + from pip._vendor.cachecontrol.controller import CacheController + + +def setup_logging() -> None: + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + logger.addHandler(handler) + + +def get_session() -> requests.Session: + adapter = CacheControlAdapter( + DictCache(), cache_etags=True, serializer=None, heuristic=None + ) + sess = requests.Session() + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + sess.cache_controller = adapter.controller # type: ignore[attr-defined] + return sess + + +def get_args() -> Namespace: + parser = ArgumentParser() + parser.add_argument("url", help="The URL to try and cache") + return parser.parse_args() + + +def main() -> None: + args = get_args() + sess = get_session() + + # Make a request to get a response + resp = sess.get(args.url) + + # Turn on logging + setup_logging() + + # try setting the cache + cache_controller: CacheController = ( + sess.cache_controller # type: ignore[attr-defined] + ) + cache_controller.cache_response(resp.request, resp.raw) + + # Now try to get it + if cache_controller.cached_request(resp.request): + print("Cached!") + else: + print("Not cached :(") + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000..18084d1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -0,0 +1,168 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import functools +import types +import weakref +import zlib +from typing import TYPE_CHECKING, Any, Collection, Mapping + +from pip._vendor.requests.adapters import HTTPAdapter + +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController +from pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper + +if TYPE_CHECKING: + from pip._vendor.requests import PreparedRequest, Response + from pip._vendor.urllib3 import HTTPResponse + + from pip._vendor.cachecontrol.cache import BaseCache + from pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pip._vendor.cachecontrol.serialize import Serializer + + +class CacheControlAdapter(HTTPAdapter): + invalidating_methods = {"PUT", "PATCH", "DELETE"} + + def __init__( + self, + cache: BaseCache | None = None, + cache_etags: bool = True, + controller_class: type[CacheController] | None = None, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + cacheable_methods: Collection[str] | None = None, + *args: Any, + **kw: Any, + ) -> None: + super().__init__(*args, **kw) + self.cache = DictCache() if cache is None else cache + self.heuristic = heuristic + self.cacheable_methods = cacheable_methods or ("GET",) + + controller_factory = controller_class or CacheController + self.controller = controller_factory( + self.cache, cache_etags=cache_etags, serializer=serializer + ) + + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: None | float | tuple[float, float] | tuple[float, None] = None, + verify: bool | str = True, + cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None, + proxies: Mapping[str, str] | None = None, + cacheable_methods: Collection[str] | None = None, + ) -> Response: + """ + Send a request. Use the request information to see if it + exists in the cache and cache the response if we need to and can. + """ + cacheable = cacheable_methods or self.cacheable_methods + if request.method in cacheable: + try: + cached_response = self.controller.cached_request(request) + except zlib.error: + cached_response = None + if cached_response: + return self.build_response(request, cached_response, from_cache=True) + + # check for etags and add headers if appropriate + request.headers.update(self.controller.conditional_headers(request)) + + resp = super().send(request, stream, timeout, verify, cert, proxies) + + return resp + + def build_response( # type: ignore[override] + self, + request: PreparedRequest, + response: HTTPResponse, + from_cache: bool = False, + cacheable_methods: Collection[str] | None = None, + ) -> Response: + """ + Build a response by making a request or using the cache. + + This will end up calling send and returning a potentially + cached response + """ + cacheable = cacheable_methods or self.cacheable_methods + if not from_cache and request.method in cacheable: + # Check for any heuristics that might update headers + # before trying to cache. + if self.heuristic: + response = self.heuristic.apply(response) + + # apply any expiration heuristics + if response.status == 304: + # We must have sent an ETag request. This could mean + # that we've been expired already or that we simply + # have an etag. In either case, we want to try and + # update the cache if that is the case. + cached_response = self.controller.update_cached_response( + request, response + ) + + if cached_response is not response: + from_cache = True + + # We are done with the server response, read a + # possible response body (compliant servers will + # not return one, but we cannot be 100% sure) and + # release the connection back to the pool. + response.read(decode_content=False) + response.release_conn() + + response = cached_response + + # We always cache the 301 responses + elif int(response.status) in PERMANENT_REDIRECT_STATUSES: + self.controller.cache_response(request, response) + else: + # Wrap the response file with a wrapper that will cache the + # response when the stream has been consumed. + response._fp = CallbackFileWrapper( # type: ignore[assignment] + response._fp, # type: ignore[arg-type] + functools.partial( + self.controller.cache_response, request, weakref.ref(response) + ), + ) + if response.chunked: + super_update_chunk_length = response.__class__._update_chunk_length + + def _update_chunk_length( + weak_self: weakref.ReferenceType[HTTPResponse], + ) -> None: + self = weak_self() + if self is None: + return + + super_update_chunk_length(self) + if self.chunk_left == 0: + self._fp._close() # type: ignore[union-attr] + + response._update_chunk_length = functools.partial( # type: ignore[method-assign] + _update_chunk_length, weakref.ref(response) + ) + + resp: Response = super().build_response(request, response) + + # See if we should invalidate the cache. + if request.method in self.invalidating_methods and resp.ok: + assert request.url is not None + cache_url = self.controller.cache_url(request.url) + self.cache.delete(cache_url) + + # Give the request a from_cache attr to let people use it + resp.from_cache = from_cache # type: ignore[attr-defined] + + return resp + + def close(self) -> None: + self.cache.close() + super().close() # type: ignore[no-untyped-call] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000..91598e9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/cache.py @@ -0,0 +1,75 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +""" +The cache object API for implementing caches. The default is a thread +safe in-memory dictionary. +""" + +from __future__ import annotations + +from threading import Lock +from typing import IO, TYPE_CHECKING, MutableMapping + +if TYPE_CHECKING: + from datetime import datetime + + +class BaseCache: + def get(self, key: str) -> bytes | None: + raise NotImplementedError() + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + raise NotImplementedError() + + def delete(self, key: str) -> None: + raise NotImplementedError() + + def close(self) -> None: + pass + + +class DictCache(BaseCache): + def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None: + self.lock = Lock() + self.data = init_dict or {} + + def get(self, key: str) -> bytes | None: + return self.data.get(key, None) + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + with self.lock: + self.data.update({key: value}) + + def delete(self, key: str) -> None: + with self.lock: + if key in self.data: + self.data.pop(key) + + +class SeparateBodyBaseCache(BaseCache): + """ + In this variant, the body is not stored mixed in with the metadata, but is + passed in (as a bytes-like object) in a separate call to ``set_body()``. + + That is, the expected interaction pattern is:: + + cache.set(key, serialized_metadata) + cache.set_body(key) + + Similarly, the body should be loaded separately via ``get_body()``. + """ + + def set_body(self, key: str, body: bytes) -> None: + raise NotImplementedError() + + def get_body(self, key: str) -> IO[bytes] | None: + """ + Return the body as file-like object. + """ + raise NotImplementedError() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 0000000..24ff469 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__init__.py @@ -0,0 +1,8 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache +from pip._vendor.cachecontrol.caches.redis_cache import RedisCache + +__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..109ed65 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc new file mode 100644 index 0000000..923e0d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc new file mode 100644 index 0000000..61996f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000..45c632c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -0,0 +1,145 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import hashlib +import os +import tempfile +from textwrap import dedent +from typing import IO, TYPE_CHECKING +from pathlib import Path + +from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache +from pip._vendor.cachecontrol.controller import CacheController + +if TYPE_CHECKING: + from datetime import datetime + + from filelock import BaseFileLock + + +class _FileCacheMixin: + """Shared implementation for both FileCache variants.""" + + def __init__( + self, + directory: str | Path, + forever: bool = False, + filemode: int = 0o0600, + dirmode: int = 0o0700, + lock_class: type[BaseFileLock] | None = None, + ) -> None: + try: + if lock_class is None: + from filelock import FileLock + + lock_class = FileLock + except ImportError: + notice = dedent( + """ + NOTE: In order to use the FileCache you must have + filelock installed. You can install it via pip: + pip install cachecontrol[filecache] + """ + ) + raise ImportError(notice) + + self.directory = directory + self.forever = forever + self.filemode = filemode + self.dirmode = dirmode + self.lock_class = lock_class + + @staticmethod + def encode(x: str) -> str: + return hashlib.sha224(x.encode()).hexdigest() + + def _fn(self, name: str) -> str: + # NOTE: This method should not change as some may depend on it. + # See: https://github.com/ionrock/cachecontrol/issues/63 + hashed = self.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key: str) -> bytes | None: + name = self._fn(key) + try: + with open(name, "rb") as fh: + return fh.read() + + except FileNotFoundError: + return None + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + name = self._fn(key) + self._write(name, value) + + def _write(self, path: str, data: bytes) -> None: + """ + Safely write the data to the given path. + """ + # Make sure the directory exists + dirname = os.path.dirname(path) + os.makedirs(dirname, self.dirmode, exist_ok=True) + + with self.lock_class(path + ".lock"): + # Write our actual file + (fd, name) = tempfile.mkstemp(dir=dirname) + try: + os.write(fd, data) + finally: + os.close(fd) + os.chmod(name, self.filemode) + os.replace(name, path) + + def _delete(self, key: str, suffix: str) -> None: + name = self._fn(key) + suffix + if not self.forever: + try: + os.remove(name) + except FileNotFoundError: + pass + + +class FileCache(_FileCacheMixin, BaseCache): + """ + Traditional FileCache: body is stored in memory, so not suitable for large + downloads. + """ + + def delete(self, key: str) -> None: + self._delete(key, "") + + +class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache): + """ + Memory-efficient FileCache: body is stored in a separate file, reducing + peak memory usage. + """ + + def get_body(self, key: str) -> IO[bytes] | None: + name = self._fn(key) + ".body" + try: + return open(name, "rb") + except FileNotFoundError: + return None + + def set_body(self, key: str, body: bytes) -> None: + name = self._fn(key) + ".body" + self._write(name, body) + + def delete(self, key: str) -> None: + self._delete(key, "") + self._delete(key, ".body") + + +def url_to_file_path(url: str, filecache: FileCache) -> str: + """Return the file cache path based on the URL. + + This does not ensure the file exists! + """ + key = CacheController.cache_url(url) + return filecache._fn(key) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000..f4f68c4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -0,0 +1,48 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + + +from datetime import datetime, timezone +from typing import TYPE_CHECKING + +from pip._vendor.cachecontrol.cache import BaseCache + +if TYPE_CHECKING: + from redis import Redis + + +class RedisCache(BaseCache): + def __init__(self, conn: Redis[bytes]) -> None: + self.conn = conn + + def get(self, key: str) -> bytes | None: + return self.conn.get(key) + + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: + if not expires: + self.conn.set(key, value) + elif isinstance(expires, datetime): + now_utc = datetime.now(timezone.utc) + if expires.tzinfo is None: + now_utc = now_utc.replace(tzinfo=None) + delta = expires - now_utc + self.conn.setex(key, int(delta.total_seconds()), value) + else: + self.conn.setex(key, expires, value) + + def delete(self, key: str) -> None: + self.conn.delete(key) + + def clear(self) -> None: + """Helper for clearing all the keys in a database. Use with + caution!""" + for key in self.conn.keys(): + self.conn.delete(key) + + def close(self) -> None: + """Redis uses connection pooling, no need to close the connection.""" + pass diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000..d92d991 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/controller.py @@ -0,0 +1,511 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +""" +The httplib2 algorithms ported for use with requests. +""" + +from __future__ import annotations + +import calendar +import logging +import re +import time +import weakref +from email.utils import parsedate_tz +from typing import TYPE_CHECKING, Collection, Mapping + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache +from pip._vendor.cachecontrol.serialize import Serializer + +if TYPE_CHECKING: + from typing import Literal + + from pip._vendor.requests import PreparedRequest + from pip._vendor.urllib3 import HTTPResponse + + from pip._vendor.cachecontrol.cache import BaseCache + +logger = logging.getLogger(__name__) + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +PERMANENT_REDIRECT_STATUSES = (301, 308) + + +def parse_uri(uri: str) -> tuple[str, str, str, str, str]: + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + match = URI.match(uri) + assert match is not None + groups = match.groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +class CacheController: + """An interface to see if request should cached or not.""" + + def __init__( + self, + cache: BaseCache | None = None, + cache_etags: bool = True, + serializer: Serializer | None = None, + status_codes: Collection[int] | None = None, + ): + self.cache = DictCache() if cache is None else cache + self.cache_etags = cache_etags + self.serializer = serializer or Serializer() + self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308) + + @classmethod + def _urlnorm(cls, uri: str) -> str: + """Normalize the URL to create a safe key for the cache""" + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + + scheme = scheme.lower() + authority = authority.lower() + + if not path: + path = "/" + + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + defrag_uri = scheme + "://" + authority + request_uri + + return defrag_uri + + @classmethod + def cache_url(cls, uri: str) -> str: + return cls._urlnorm(uri) + + def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]: + known_directives = { + # https://tools.ietf.org/html/rfc7234#section-5.2 + "max-age": (int, True), + "max-stale": (int, False), + "min-fresh": (int, True), + "no-cache": (None, False), + "no-store": (None, False), + "no-transform": (None, False), + "only-if-cached": (None, False), + "must-revalidate": (None, False), + "public": (None, False), + "private": (None, False), + "proxy-revalidate": (None, False), + "s-maxage": (int, True), + } + + cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) + + retval: dict[str, int | None] = {} + + for cc_directive in cc_headers.split(","): + if not cc_directive.strip(): + continue + + parts = cc_directive.split("=", 1) + directive = parts[0].strip() + + try: + typ, required = known_directives[directive] + except KeyError: + logger.debug("Ignoring unknown cache-control directive: %s", directive) + continue + + if not typ or not required: + retval[directive] = None + if typ: + try: + retval[directive] = typ(parts[1].strip()) + except IndexError: + if required: + logger.debug( + "Missing value for cache-control " "directive: %s", + directive, + ) + except ValueError: + logger.debug( + "Invalid value for cache-control directive " "%s, must be %s", + directive, + typ.__name__, + ) + + return retval + + def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None: + """ + Load a cached response, or return None if it's not available. + """ + # We do not support caching of partial content: so if the request contains a + # Range header then we don't want to load anything from the cache. + if "Range" in request.headers: + return None + + cache_url = request.url + assert cache_url is not None + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return None + + if isinstance(self.cache, SeparateBodyBaseCache): + body_file = self.cache.get_body(cache_url) + else: + body_file = None + + result = self.serializer.loads(request, cache_data, body_file) + if result is None: + logger.warning("Cache entry deserialization failed, entry ignored") + return result + + def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]: + """ + Return a cached response if it exists in the cache, otherwise + return False. + """ + assert request.url is not None + cache_url = self.cache_url(request.url) + logger.debug('Looking up "%s" in the cache', cache_url) + cc = self.parse_cache_control(request.headers) + + # Bail out if the request insists on fresh data + if "no-cache" in cc: + logger.debug('Request header has "no-cache", cache bypassed') + return False + + if "max-age" in cc and cc["max-age"] == 0: + logger.debug('Request header has "max_age" as 0, cache bypassed') + return False + + # Check whether we can load the response from the cache: + resp = self._load_from_cache(request) + if not resp: + return False + + # If we have a cached permanent redirect, return it immediately. We + # don't need to test our response for other headers b/c it is + # intrinsically "cacheable" as it is Permanent. + # + # See: + # https://tools.ietf.org/html/rfc7231#section-6.4.2 + # + # Client can try to refresh the value by repeating the request + # with cache busting headers as usual (ie no-cache). + if int(resp.status) in PERMANENT_REDIRECT_STATUSES: + msg = ( + "Returning cached permanent redirect response " + "(ignoring date and etag information)" + ) + logger.debug(msg) + return resp + + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) + if not headers or "date" not in headers: + if "etag" not in headers: + # Without date or etag, the cached response can never be used + # and should be deleted. + logger.debug("Purging cached response: no date or etag") + self.cache.delete(cache_url) + logger.debug("Ignoring cached response: no date") + return False + + now = time.time() + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) + current_age = max(0, now - date) + logger.debug("Current age based on date: %i", current_age) + + # TODO: There is an assumption that the result will be a + # urllib3 response object. This may not be best since we + # could probably avoid instantiating or constructing the + # response until we know we need it. + resp_cc = self.parse_cache_control(headers) + + # determine freshness + freshness_lifetime = 0 + + # Check the max-age pragma in the cache control header + max_age = resp_cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age + logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) + + # If there isn't a max-age, check for an expires header + elif "expires" in headers: + expires = parsedate_tz(headers["expires"]) + if expires is not None: + expire_time = calendar.timegm(expires[:6]) - date + freshness_lifetime = max(0, expire_time) + logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) + + # Determine if we are setting freshness limit in the + # request. Note, this overrides what was in the response. + max_age = cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age + logger.debug( + "Freshness lifetime from request max-age: %i", freshness_lifetime + ) + + min_fresh = cc.get("min-fresh") + if min_fresh is not None: + # adjust our current age by our min fresh + current_age += min_fresh + logger.debug("Adjusted current age from min-fresh: %i", current_age) + + # Return entry if it is fresh enough + if freshness_lifetime > current_age: + logger.debug('The response is "fresh", returning cached response') + logger.debug("%i > %i", freshness_lifetime, current_age) + return resp + + # we're not fresh. If we don't have an Etag, clear it out + if "etag" not in headers: + logger.debug('The cached response is "stale" with no etag, purging') + self.cache.delete(cache_url) + + # return the original handler + return False + + def conditional_headers(self, request: PreparedRequest) -> dict[str, str]: + resp = self._load_from_cache(request) + new_headers = {} + + if resp: + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) + + if "etag" in headers: + new_headers["If-None-Match"] = headers["ETag"] + + if "last-modified" in headers: + new_headers["If-Modified-Since"] = headers["Last-Modified"] + + return new_headers + + def _cache_set( + self, + cache_url: str, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + expires_time: int | None = None, + ) -> None: + """ + Store the data in the cache. + """ + if isinstance(self.cache, SeparateBodyBaseCache): + # We pass in the body separately; just put a placeholder empty + # string in the metadata. + self.cache.set( + cache_url, + self.serializer.dumps(request, response, b""), + expires=expires_time, + ) + # body is None can happen when, for example, we're only updating + # headers, as is the case in update_cached_response(). + if body is not None: + self.cache.set_body(cache_url, body) + else: + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body), + expires=expires_time, + ) + + def cache_response( + self, + request: PreparedRequest, + response_or_ref: HTTPResponse | weakref.ReferenceType[HTTPResponse], + body: bytes | None = None, + status_codes: Collection[int] | None = None, + ) -> None: + """ + Algorithm for caching requests. + + This assumes a requests Response object. + """ + if isinstance(response_or_ref, weakref.ReferenceType): + response = response_or_ref() + if response is None: + # The weakref can be None only in case the user used streamed request + # and did not consume or close it, and holds no reference to requests.Response. + # In such case, we don't want to cache the response. + return + else: + response = response_or_ref + + # From httplib2: Don't cache 206's since we aren't going to + # handle byte range requests + cacheable_status_codes = status_codes or self.cacheable_status_codes + if response.status not in cacheable_status_codes: + logger.debug( + "Status code %s not in %s", response.status, cacheable_status_codes + ) + return + + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) + + if "date" in response_headers: + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) + else: + date = 0 + + # If we've been given a body, our response has a Content-Length, that + # Content-Length is valid then we can check to see if the body we've + # been given matches the expected size, and if it doesn't we'll just + # skip trying to cache it. + if ( + body is not None + and "content-length" in response_headers + and response_headers["content-length"].isdigit() + and int(response_headers["content-length"]) != len(body) + ): + return + + cc_req = self.parse_cache_control(request.headers) + cc = self.parse_cache_control(response_headers) + + assert request.url is not None + cache_url = self.cache_url(request.url) + logger.debug('Updating cache with response from "%s"', cache_url) + + # Delete it from the cache if we happen to have it stored there + no_store = False + if "no-store" in cc: + no_store = True + logger.debug('Response header has "no-store"') + if "no-store" in cc_req: + no_store = True + logger.debug('Request header has "no-store"') + if no_store and self.cache.get(cache_url): + logger.debug('Purging existing cache entry to honor "no-store"') + self.cache.delete(cache_url) + if no_store: + return + + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + + # If we've been given an etag, then keep the response + if self.cache_etags and "etag" in response_headers: + expires_time = 0 + if response_headers.get("expires"): + expires = parsedate_tz(response_headers["expires"]) + if expires is not None: + expires_time = calendar.timegm(expires[:6]) - date + + expires_time = max(expires_time, 14 * 86400) + + logger.debug(f"etag object cached for {expires_time} seconds") + logger.debug("Caching due to etag") + self._cache_set(cache_url, request, response, body, expires_time) + + # Add to the cache any permanent redirects. We do this before looking + # that the Date headers. + elif int(response.status) in PERMANENT_REDIRECT_STATUSES: + logger.debug("Caching permanent redirect") + self._cache_set(cache_url, request, response, b"") + + # Add to the cache if the response headers demand it. If there + # is no date header then we can't do anything about expiring + # the cache. + elif "date" in response_headers: + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) + # cache when there is a max-age > 0 + max_age = cc.get("max-age") + if max_age is not None and max_age > 0: + logger.debug("Caching b/c date exists and max-age > 0") + expires_time = max_age + self._cache_set( + cache_url, + request, + response, + body, + expires_time, + ) + + # If the request can expire, it means we should cache it + # in the meantime. + elif "expires" in response_headers: + if response_headers["expires"]: + expires = parsedate_tz(response_headers["expires"]) + if expires is not None: + expires_time = calendar.timegm(expires[:6]) - date + else: + expires_time = None + + logger.debug( + "Caching b/c of expires header. expires in {} seconds".format( + expires_time + ) + ) + self._cache_set( + cache_url, + request, + response, + body, + expires_time, + ) + + def update_cached_response( + self, request: PreparedRequest, response: HTTPResponse + ) -> HTTPResponse: + """On a 304 we will get a new set of headers that we want to + update our cached value with, assuming we have one. + + This should only ever be called when we've sent an ETag and + gotten a 304 as the response. + """ + assert request.url is not None + cache_url = self.cache_url(request.url) + cached_response = self._load_from_cache(request) + + if not cached_response: + # we didn't have a cached response + return response + + # Lets update our headers with the headers from the new request: + # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 + # + # The server isn't supposed to send headers that would make + # the cached body invalid. But... just in case, we'll be sure + # to strip out ones we know that might be problmatic due to + # typical assumptions. + excluded_headers = ["content-length"] + + cached_response.headers.update( + { + k: v + for k, v in response.headers.items() + if k.lower() not in excluded_headers + } + ) + + # we want a 200 b/c we have content via the cache + cached_response.status = 200 + + # update our cache + self._cache_set(cache_url, request, cached_response) + + return cached_response diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000..37d2fa5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/filewrapper.py @@ -0,0 +1,119 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import mmap +from tempfile import NamedTemporaryFile +from typing import TYPE_CHECKING, Any, Callable + +if TYPE_CHECKING: + from http.client import HTTPResponse + + +class CallbackFileWrapper: + """ + Small wrapper around a fp object which will tee everything read into a + buffer, and when that file is closed it will execute a callback with the + contents of that buffer. + + All attributes are proxied to the underlying file object. + + This class uses members with a double underscore (__) leading prefix so as + not to accidentally shadow an attribute. + + The data is stored in a temporary file until it is all available. As long + as the temporary files directory is disk-based (sometimes it's a + memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory + pressure is high. For small files the disk usually won't be used at all, + it'll all be in the filesystem memory cache, so there should be no + performance impact. + """ + + def __init__( + self, fp: HTTPResponse, callback: Callable[[bytes], None] | None + ) -> None: + self.__buf = NamedTemporaryFile("rb+", delete=True) + self.__fp = fp + self.__callback = callback + + def __getattr__(self, name: str) -> Any: + # The vagaries of garbage collection means that self.__fp is + # not always set. By using __getattribute__ and the private + # name[0] allows looking up the attribute value and raising an + # AttributeError when it doesn't exist. This stop things from + # infinitely recursing calls to getattr in the case where + # self.__fp hasn't been set. + # + # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers + fp = self.__getattribute__("_CallbackFileWrapper__fp") + return getattr(fp, name) + + def __is_fp_closed(self) -> bool: + try: + return self.__fp.fp is None + + except AttributeError: + pass + + try: + closed: bool = self.__fp.closed + return closed + + except AttributeError: + pass + + # We just don't cache it then. + # TODO: Add some logging here... + return False + + def _close(self) -> None: + if self.__callback: + if self.__buf.tell() == 0: + # Empty file: + result = b"" + else: + # Return the data without actually loading it into memory, + # relying on Python's buffer API and mmap(). mmap() just gives + # a view directly into the filesystem's memory cache, so it + # doesn't result in duplicate memory use. + self.__buf.seek(0, 0) + result = memoryview( + mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ) + ) + self.__callback(result) + + # We assign this to None here, because otherwise we can get into + # really tricky problems where the CPython interpreter dead locks + # because the callback is holding a reference to something which + # has a __del__ method. Setting this to None breaks the cycle + # and allows the garbage collector to do it's thing normally. + self.__callback = None + + # Closing the temporary file releases memory and frees disk space. + # Important when caching big files. + self.__buf.close() + + def read(self, amt: int | None = None) -> bytes: + data: bytes = self.__fp.read(amt) + if data: + # We may be dealing with b'', a sign that things are over: + # it's passed e.g. after we've already closed self.__buf. + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data + + def _safe_read(self, amt: int) -> bytes: + data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined] + if amt == 2 and data == b"\r\n": + # urllib executes this read to toss the CRLF at the end + # of the chunk. + return data + + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000..b778c4f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/heuristics.py @@ -0,0 +1,157 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import calendar +import time +from datetime import datetime, timedelta, timezone +from email.utils import formatdate, parsedate, parsedate_tz +from typing import TYPE_CHECKING, Any, Mapping + +if TYPE_CHECKING: + from pip._vendor.urllib3 import HTTPResponse + +TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" + + +def expire_after(delta: timedelta, date: datetime | None = None) -> datetime: + date = date or datetime.now(timezone.utc) + return date + delta + + +def datetime_to_header(dt: datetime) -> str: + return formatdate(calendar.timegm(dt.timetuple())) + + +class BaseHeuristic: + def warning(self, response: HTTPResponse) -> str | None: + """ + Return a valid 1xx warning header value describing the cache + adjustments. + + The response is provided too allow warnings like 113 + http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need + to explicitly say response is over 24 hours old. + """ + return '110 - "Response is Stale"' + + def update_headers(self, response: HTTPResponse) -> dict[str, str]: + """Update the response headers with any new headers. + + NOTE: This SHOULD always include some Warning header to + signify that the response was cached by the client, not + by way of the provided headers. + """ + return {} + + def apply(self, response: HTTPResponse) -> HTTPResponse: + updated_headers = self.update_headers(response) + + if updated_headers: + response.headers.update(updated_headers) + warning_header_value = self.warning(response) + if warning_header_value is not None: + response.headers.update({"Warning": warning_header_value}) + + return response + + +class OneDayCache(BaseHeuristic): + """ + Cache the response by providing an expires 1 day in the + future. + """ + + def update_headers(self, response: HTTPResponse) -> dict[str, str]: + headers = {} + + if "expires" not in response.headers: + date = parsedate(response.headers["date"]) + expires = expire_after( + timedelta(days=1), + date=datetime(*date[:6], tzinfo=timezone.utc), # type: ignore[index,misc] + ) + headers["expires"] = datetime_to_header(expires) + headers["cache-control"] = "public" + return headers + + +class ExpiresAfter(BaseHeuristic): + """ + Cache **all** requests for a defined time period. + """ + + def __init__(self, **kw: Any) -> None: + self.delta = timedelta(**kw) + + def update_headers(self, response: HTTPResponse) -> dict[str, str]: + expires = expire_after(self.delta) + return {"expires": datetime_to_header(expires), "cache-control": "public"} + + def warning(self, response: HTTPResponse) -> str | None: + tmpl = "110 - Automatically cached for %s. Response might be stale" + return tmpl % self.delta + + +class LastModified(BaseHeuristic): + """ + If there is no Expires header already, fall back on Last-Modified + using the heuristic from + http://tools.ietf.org/html/rfc7234#section-4.2.2 + to calculate a reasonable value. + + Firefox also does something like this per + https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ + http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 + Unlike mozilla we limit this to 24-hr. + """ + + cacheable_by_default_statuses = { + 200, + 203, + 204, + 206, + 300, + 301, + 404, + 405, + 410, + 414, + 501, + } + + def update_headers(self, resp: HTTPResponse) -> dict[str, str]: + headers: Mapping[str, str] = resp.headers + + if "expires" in headers: + return {} + + if "cache-control" in headers and headers["cache-control"] != "public": + return {} + + if resp.status not in self.cacheable_by_default_statuses: + return {} + + if "date" not in headers or "last-modified" not in headers: + return {} + + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) + last_modified = parsedate(headers["last-modified"]) + if last_modified is None: + return {} + + now = time.time() + current_age = max(0, now - date) + delta = date - calendar.timegm(last_modified) + freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) + if freshness_lifetime <= current_age: + return {} + + expires = date + freshness_lifetime + return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} + + def warning(self, resp: HTTPResponse) -> str | None: + return None diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000..a49487a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -0,0 +1,146 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import io +from typing import IO, TYPE_CHECKING, Any, Mapping, cast + +from pip._vendor import msgpack +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3 import HTTPResponse + +if TYPE_CHECKING: + from pip._vendor.requests import PreparedRequest + + +class Serializer: + serde_version = "4" + + def dumps( + self, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + ) -> bytes: + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) + + if body is None: + # When a body isn't passed in, we'll read the response. We + # also update the response with a new file handler to be + # sure it acts as though it was never read. + body = response.read(decode_content=False) + response._fp = io.BytesIO(body) # type: ignore[assignment] + response.length_remaining = len(body) + + data = { + "response": { + "body": body, # Empty bytestring if body is stored separately + "headers": {str(k): str(v) for k, v in response.headers.items()}, + "status": response.status, + "version": response.version, + "reason": str(response.reason), + "decode_content": response.decode_content, + } + } + + # Construct our vary headers + data["vary"] = {} + if "vary" in response_headers: + varied_headers = response_headers["vary"].split(",") + for header in varied_headers: + header = str(header).strip() + header_value = request.headers.get(header, None) + if header_value is not None: + header_value = str(header_value) + data["vary"][header] = header_value + + return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)]) + + def serialize(self, data: dict[str, Any]) -> bytes: + return cast(bytes, msgpack.dumps(data, use_bin_type=True)) + + def loads( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # Short circuit if we've been given an empty set of data + if not data: + return None + + # Previous versions of this library supported other serialization + # formats, but these have all been removed. + if not data.startswith(f"cc={self.serde_version},".encode()): + return None + + data = data[5:] + return self._loads_v4(request, data, body_file) + + def prepare_response( + self, + request: PreparedRequest, + cached: Mapping[str, Any], + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + """Verify our vary headers match and construct a real urllib3 + HTTPResponse object. + """ + # Special case the '*' Vary value as it means we cannot actually + # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. + if "*" in cached.get("vary", {}): + return None + + # Ensure that the Vary headers for the cached response match our + # request + for header, value in cached.get("vary", {}).items(): + if request.headers.get(header, None) != value: + return None + + body_raw = cached["response"].pop("body") + + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + data=cached["response"]["headers"] + ) + if headers.get("transfer-encoding", "") == "chunked": + headers.pop("transfer-encoding") + + cached["response"]["headers"] = headers + + try: + body: IO[bytes] + if body_file is None: + body = io.BytesIO(body_raw) + else: + body = body_file + except TypeError: + # This can happen if cachecontrol serialized to v1 format (pickle) + # using Python 2. A Python 2 str(byte string) will be unpickled as + # a Python 3 str (unicode string), which will cause the above to + # fail with: + # + # TypeError: 'str' does not support the buffer interface + body = io.BytesIO(body_raw.encode("utf8")) + + # Discard any `strict` parameter serialized by older version of cachecontrol. + cached["response"].pop("strict", None) + + return HTTPResponse(body=body, preload_content=False, **cached["response"]) + + def _loads_v4( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + try: + cached = msgpack.loads(data, raw=False) + except ValueError: + return None + + return self.prepare_response(request, cached, body_file) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000..f618bc3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -0,0 +1,43 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +from typing import TYPE_CHECKING, Collection + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache + +if TYPE_CHECKING: + from pip._vendor import requests + + from pip._vendor.cachecontrol.cache import BaseCache + from pip._vendor.cachecontrol.controller import CacheController + from pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pip._vendor.cachecontrol.serialize import Serializer + + +def CacheControl( + sess: requests.Session, + cache: BaseCache | None = None, + cache_etags: bool = True, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + controller_class: type[CacheController] | None = None, + adapter_class: type[CacheControlAdapter] | None = None, + cacheable_methods: Collection[str] | None = None, +) -> requests.Session: + cache = DictCache() if cache is None else cache + adapter_class = adapter_class or CacheControlAdapter + adapter = adapter_class( + cache, + cache_etags=cache_etags, + serializer=serializer, + heuristic=heuristic, + controller_class=controller_class, + cacheable_methods=cacheable_methods, + ) + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + return sess diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/LICENSE new file mode 100644 index 0000000..62b076c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py new file mode 100644 index 0000000..c4b6c0b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2025.10.05" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py new file mode 100644 index 0000000..0037634 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from pip._vendor.certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3911d88 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..7ec7e6a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..8220f8b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem new file mode 100644 index 0000000..0b68ebf --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/cacert.pem @@ -0,0 +1,4800 @@ + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Label: "CommScope Public Trust ECC Root-01" +# Serial: 385011430473757362783587124273108818652468453534 +# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 +# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d +# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b +-----BEGIN CERTIFICATE----- +MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa +Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C +flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE +hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq +hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg +2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS +Um9poIyNStDuiw7LR47QjRE= +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Label: "CommScope Public Trust ECC Root-02" +# Serial: 234015080301808452132356021271193974922492992893 +# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 +# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 +# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a +-----BEGIN CERTIFICATE----- +MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa +Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL +j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU +v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq +hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n +ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV +mkzw5l4lIhVtwodZ0LKOag== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Label: "CommScope Public Trust RSA Root-01" +# Serial: 354030733275608256394402989253558293562031411421 +# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 +# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 +# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 +NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk +YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh +suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al +DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj +WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl +P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 +KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p +UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ +kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO +Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB +Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U +CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ +KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 +NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ +nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ +QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v +trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a +aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD +j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 +Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w +lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn +YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc +icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Label: "CommScope Public Trust RSA Root-02" +# Serial: 480062499834624527752716769107743131258796508494 +# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa +# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae +# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 +NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE +NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 +kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C +rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz +hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 +LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs +n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku +FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 +kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 +wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v +wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs +5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ +KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB +KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 ++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme +APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq +pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT +6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF +sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt +PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d +lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 +v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O +rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- + +# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" +# Serial: 65916896770016886708751106294915943533 +# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 +# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 +# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a +-----BEGIN CERTIFICATE----- +MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf +e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C +cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O +BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO +PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw +hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG +XSaQpYXFuXqUPoeovQA= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA CYBER Root CA" +# Serial: 85076849864375384482682434040119489222 +# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 +# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 +# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ +MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 +IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 +WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO +LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P +40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF +avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ +34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i +JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu +j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf +Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP +2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA +S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA +oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC +kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW +5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd +BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB +AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t +tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn +68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn +TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t +RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx +f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI +Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz +8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 +NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX +xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 +t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA12" +# Serial: 587887345431707215246142177076162061960426065942 +# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 +# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 +# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw +NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF +KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt +p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd +J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur +FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J +hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K +h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF +AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld +mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ +mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA +8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV +55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ +yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA14" +# Serial: 575790784512929437950770173562378038616896959179 +# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 +# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f +# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw +NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ +FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg +vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy +6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo +/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J +kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ +0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib +y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac +18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs +0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB +SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL +ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk +86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E +rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib +ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT +zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS +DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 +2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo +FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy +K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 +dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl +Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB +365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c +JRNItX+S +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA15" +# Serial: 126083514594751269499665114766174399806381178503 +# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 +# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d +# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a +-----BEGIN CERTIFICATE----- +MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw +UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM +dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy +NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl +cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 +IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 +wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR +ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT +9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp +4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 +bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 2 2023" +# Serial: 153168538924886464690566649552453098598 +# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 +# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 +# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw +OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr +i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE +gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 +k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT +Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl +2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U +cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP +/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS +uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ +0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N +DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ +XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 +GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI +FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n +riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR +VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc +LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn +4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD +hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG +koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 +ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS +Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 +knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ +hJ65bvspmZDogNOfJA== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS ECC Root CA" +# Serial: 310892014698942880364840003424242768478804666567 +# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c +# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36 +# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11 +-----BEGIN CERTIFICATE----- +MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw +WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw +NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE +ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB +c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/ +AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp +guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw +DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01 +L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR +OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS RSA Root CA" +# Serial: 160405846464868906657516898462547310235378010780 +# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12 +# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa +# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3 +-----BEGIN CERTIFICATE----- +MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM +BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN +MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG +A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1 +c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC +AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+ +NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ +Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561 +HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32 +ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb +xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX +i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ +UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j +TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT +bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8 +S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3 +Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4 +iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt +7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp +2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ +g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj +pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M +pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP +XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe +SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0 +ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy +323imttUQ/hHWKNddBWcwauwxzQ= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 2 2023" +# Serial: 139766439402180512324132425437959641711 +# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 +# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b +# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw +OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK +F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE +7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe +EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 +lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb +RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV +jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc +jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx +TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ +ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk +hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF +NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH +kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 +QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 +pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q +3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU +t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX +cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 +ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT +2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs +7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP +gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst +Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh +XBxvWHZks/wCuPWdCg== +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Label: "SwissSign RSA TLS Root CA 2022 - 1" +# Serial: 388078645722908516278762308316089881486363258315 +# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39 +# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce +# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41 +-----BEGIN CERTIFICATE----- +MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE +AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx +MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT +d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg +MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX +vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7 +LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX +5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE +EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt +/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x +0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5 +KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM +0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd +OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta +clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK +wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4 +DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL +BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3 +10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz +Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ +iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc +gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM +ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF +LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp +zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td +Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0 +rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO +gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ +-----END CERTIFICATE----- + +# Issuer: CN=OISTE Server Root ECC G1 O=OISTE Foundation +# Subject: CN=OISTE Server Root ECC G1 O=OISTE Foundation +# Label: "OISTE Server Root ECC G1" +# Serial: 47819833811561661340092227008453318557 +# MD5 Fingerprint: 42:a7:d2:35:ae:02:92:db:19:76:08:de:2f:05:b4:d4 +# SHA1 Fingerprint: 3b:f6:8b:09:ae:2a:92:7b:ba:e3:8d:3f:11:95:d9:e6:44:0c:45:e2 +# SHA256 Fingerprint: ee:c9:97:c0:c3:0f:21:6f:7e:3b:8b:30:7d:2b:ae:42:41:2d:75:3f:c8:21:9d:af:d1:52:0b:25:72:85:0f:49 +-----BEGIN CERTIFICATE----- +MIICNTCCAbqgAwIBAgIQI/nD1jWvjyhLH/BU6n6XnTAKBggqhkjOPQQDAzBLMQsw +CQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UEAwwY +T0lTVEUgU2VydmVyIFJvb3QgRUNDIEcxMB4XDTIzMDUzMTE0NDIyOFoXDTQ4MDUy +NDE0NDIyN1owSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5kYXRp +b24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IEVDQyBHMTB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABBcv+hK8rBjzCvRE1nZCnrPoH7d5qVi2+GXROiFPqOujvqQy +cvO2Ackr/XeFblPdreqqLiWStukhEaivtUwL85Zgmjvn6hp4LrQ95SjeHIC6XG4N +2xml4z+cKrhAS93mT6NjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBQ3 +TYhlz/w9itWj8UnATgwQb0K0nDAdBgNVHQ4EFgQUN02IZc/8PYrVo/FJwE4MEG9C +tJwwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2kAMGYCMQCpKjAd0MKfkFFR +QD6VVCHNFmb3U2wIFjnQEnx/Yxvf4zgAOdktUyBFCxxgZzFDJe0CMQCSia7pXGKD +YmH5LVerVrkR3SW+ak5KGoJr3M/TvEqzPNcum9v4KGm8ay3sMaE641c= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE Server Root RSA G1 O=OISTE Foundation +# Subject: CN=OISTE Server Root RSA G1 O=OISTE Foundation +# Label: " OISTE Server Root RSA G1" +# Serial: 113845518112613905024960613408179309848 +# MD5 Fingerprint: 23:a7:9e:d4:70:b8:b9:14:57:41:8a:7e:44:59:e2:68 +# SHA1 Fingerprint: f7:00:34:25:94:88:68:31:e4:34:87:3f:70:fe:86:b3:86:9f:f0:6e +# SHA256 Fingerprint: 9a:e3:62:32:a5:18:9f:fd:db:35:3d:fd:26:52:0c:01:53:95:d2:27:77:da:c5:9d:b5:7b:98:c0:89:a6:51:e6 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIQVaXZZ5Qoxu0M+ifdWwFNGDANBgkqhkiG9w0BAQwFADBL +MQswCQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UE +AwwYT0lTVEUgU2VydmVyIFJvb3QgUlNBIEcxMB4XDTIzMDUzMTE0MzcxNloXDTQ4 +MDUyNDE0MzcxNVowSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5k +YXRpb24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IFJTQSBHMTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAKqu9KuCz/vlNwvn1ZatkOhLKdxVYOPM +vLO8LZK55KN68YG0nnJyQ98/qwsmtO57Gmn7KNByXEptaZnwYx4M0rH/1ow00O7b +rEi56rAUjtgHqSSY3ekJvqgiG1k50SeH3BzN+Puz6+mTeO0Pzjd8JnduodgsIUzk +ik/HEzxux9UTl7Ko2yRpg1bTacuCErudG/L4NPKYKyqOBGf244ehHa1uzjZ0Dl4z +O8vbUZeUapU8zhhabkvG/AePLhq5SvdkNCncpo1Q4Y2LS+VIG24ugBA/5J8bZT8R +tOpXaZ+0AOuFJJkk9SGdl6r7NH8CaxWQrbueWhl/pIzY+m0o/DjH40ytas7ZTpOS +jswMZ78LS5bOZmdTaMsXEY5Z96ycG7mOaES3GK/m5Q9l3JUJsJMStR8+lKXHiHUh +sd4JJCpM4rzsTGdHwimIuQq6+cF0zowYJmXa92/GjHtoXAvuY8BeS/FOzJ8vD+Ho +mnqT8eDI278n5mUpezbgMxVz8p1rhAhoKzYHKyfMeNhqhw5HdPSqoBNdZH702xSu ++zrkL8Fl47l6QGzwBrd7KJvX4V84c5Ss2XCTLdyEr0YconosP4EmQufU2MVshGYR +i3drVByjtdgQ8K4p92cIiBdcuJd5z+orKu5YM+Vt6SmqZQENghPsJQtdLEByFSnT +kCz3GkPVavBpAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU +8snBDw1jALvsRQ5KH7WxszbNDo0wHQYDVR0OBBYEFPLJwQ8NYwC77EUOSh+1sbM2 +zQ6NMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQwFAAOCAgEANGd5sjrG5T33 +I3K5Ce+SrScfoE4KsvXaFwyihdJ+klH9FWXXXGtkFu6KRcoMQzZENdl//nk6HOjG +5D1rd9QhEOP28yBOqb6J8xycqd+8MDoX0TJD0KqKchxRKEzdNsjkLWd9kYccnbz8 +qyiWXmFcuCIzGEgWUOrKL+mlSdx/PKQZvDatkuK59EvV6wit53j+F8Bdh3foZ3dP +AGav9LEDOr4SfEE15fSmG0eLy3n31r8Xbk5l8PjaV8GUgeV6Vg27Rn9vkf195hfk +gSe7BYhW3SCl95gtkRlpMV+bMPKZrXJAlszYd2abtNUOshD+FKrDgHGdPY3ofRRs +YWSGRqbXVMW215AWRqWFyp464+YTFrYVI8ypKVL9AMb2kI5Wj4kI3Zaq5tNqqYY1 +9tVFeEJKRvwDyF7YZvZFZSS0vod7VSCd9521Kvy5YhnLbDuv0204bKt7ph6N/Ome +/msVuduCmsuY33OhkKCgxeDoAaijFJzIwZqsFVAzje18KotzlUBDJvyBpCpfOZC3 +J8tRd/iWkx7P8nd9H0aTolkelUTFLXVksNb54Dxp6gS1HAviRkRNQzuXSXERvSS2 +wq1yVAb+axj5d9spLFKebXd7Yv0PTY6YMjAwcRLWJTXjn/hvnLXrahut6hDTlhZy +BiElxky8j3C7DOReIoMt0r7+hVu05L0= +-----END CERTIFICATE----- diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py new file mode 100644 index 0000000..2f2f7e0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/core.py @@ -0,0 +1,83 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("pip._vendor.certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +else: + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/certifi/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/certifi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/LICENSE.txt b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/LICENSE.txt new file mode 100644 index 0000000..b9723b8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/LICENSE.txt @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2024-present Stephen Rosen + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__init__.py new file mode 100644 index 0000000..9fec202 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__init__.py @@ -0,0 +1,13 @@ +from ._implementation import ( + CyclicDependencyError, + DependencyGroupInclude, + DependencyGroupResolver, + resolve, +) + +__all__ = ( + "CyclicDependencyError", + "DependencyGroupInclude", + "DependencyGroupResolver", + "resolve", +) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__main__.py b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__main__.py new file mode 100644 index 0000000..48ebb0d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__main__.py @@ -0,0 +1,65 @@ +import argparse +import sys + +from ._implementation import resolve +from ._toml_compat import tomllib + + +def main() -> None: + if tomllib is None: + print( + "Usage error: dependency-groups CLI requires tomli or Python 3.11+", + file=sys.stderr, + ) + raise SystemExit(2) + + parser = argparse.ArgumentParser( + description=( + "A dependency-groups CLI. Prints out a resolved group, newline-delimited." + ) + ) + parser.add_argument( + "GROUP_NAME", nargs="*", help="The dependency group(s) to resolve." + ) + parser.add_argument( + "-f", + "--pyproject-file", + default="pyproject.toml", + help="The pyproject.toml file. Defaults to trying in the current directory.", + ) + parser.add_argument( + "-o", + "--output", + help="An output file. Defaults to stdout.", + ) + parser.add_argument( + "-l", + "--list", + action="store_true", + help="List the available dependency groups", + ) + args = parser.parse_args() + + with open(args.pyproject_file, "rb") as fp: + pyproject = tomllib.load(fp) + + dependency_groups_raw = pyproject.get("dependency-groups", {}) + + if args.list: + print(*dependency_groups_raw.keys()) + return + if not args.GROUP_NAME: + print("A GROUP_NAME is required", file=sys.stderr) + raise SystemExit(3) + + content = "\n".join(resolve(dependency_groups_raw, *args.GROUP_NAME)) + + if args.output is None or args.output == "-": + print(content) + else: + with open(args.output, "w", encoding="utf-8") as fp: + print(content, file=fp) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6e2ff13 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..37e6310 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_implementation.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_implementation.cpython-312.pyc new file mode 100644 index 0000000..2ece701 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_implementation.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_lint_dependency_groups.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_lint_dependency_groups.cpython-312.pyc new file mode 100644 index 0000000..5ace96b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_lint_dependency_groups.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_pip_wrapper.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_pip_wrapper.cpython-312.pyc new file mode 100644 index 0000000..dba39c1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_pip_wrapper.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_toml_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_toml_compat.cpython-312.pyc new file mode 100644 index 0000000..47ed5c2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/__pycache__/_toml_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_implementation.py b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_implementation.py new file mode 100644 index 0000000..64e314a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_implementation.py @@ -0,0 +1,209 @@ +from __future__ import annotations + +import dataclasses +import re +from collections.abc import Mapping + +from pip._vendor.packaging.requirements import Requirement + + +def _normalize_name(name: str) -> str: + return re.sub(r"[-_.]+", "-", name).lower() + + +def _normalize_group_names( + dependency_groups: Mapping[str, str | Mapping[str, str]], +) -> Mapping[str, str | Mapping[str, str]]: + original_names: dict[str, list[str]] = {} + normalized_groups = {} + + for group_name, value in dependency_groups.items(): + normed_group_name = _normalize_name(group_name) + original_names.setdefault(normed_group_name, []).append(group_name) + normalized_groups[normed_group_name] = value + + errors = [] + for normed_name, names in original_names.items(): + if len(names) > 1: + errors.append(f"{normed_name} ({', '.join(names)})") + if errors: + raise ValueError(f"Duplicate dependency group names: {', '.join(errors)}") + + return normalized_groups + + +@dataclasses.dataclass +class DependencyGroupInclude: + include_group: str + + +class CyclicDependencyError(ValueError): + """ + An error representing the detection of a cycle. + """ + + def __init__(self, requested_group: str, group: str, include_group: str) -> None: + self.requested_group = requested_group + self.group = group + self.include_group = include_group + + if include_group == group: + reason = f"{group} includes itself" + else: + reason = f"{include_group} -> {group}, {group} -> {include_group}" + super().__init__( + "Cyclic dependency group include while resolving " + f"{requested_group}: {reason}" + ) + + +class DependencyGroupResolver: + """ + A resolver for Dependency Group data. + + This class handles caching, name normalization, cycle detection, and other + parsing requirements. There are only two public methods for exploring the data: + ``lookup()`` and ``resolve()``. + + :param dependency_groups: A mapping, as provided via pyproject + ``[dependency-groups]``. + """ + + def __init__( + self, + dependency_groups: Mapping[str, str | Mapping[str, str]], + ) -> None: + if not isinstance(dependency_groups, Mapping): + raise TypeError("Dependency Groups table is not a mapping") + self.dependency_groups = _normalize_group_names(dependency_groups) + # a map of group names to parsed data + self._parsed_groups: dict[ + str, tuple[Requirement | DependencyGroupInclude, ...] + ] = {} + # a map of group names to their ancestors, used for cycle detection + self._include_graph_ancestors: dict[str, tuple[str, ...]] = {} + # a cache of completed resolutions to Requirement lists + self._resolve_cache: dict[str, tuple[Requirement, ...]] = {} + + def lookup(self, group: str) -> tuple[Requirement | DependencyGroupInclude, ...]: + """ + Lookup a group name, returning the parsed dependency data for that group. + This will not resolve includes. + + :param group: the name of the group to lookup + + :raises ValueError: if the data does not appear to be valid dependency group + data + :raises TypeError: if the data is not a string + :raises LookupError: if group name is absent + :raises packaging.requirements.InvalidRequirement: if a specifier is not valid + """ + if not isinstance(group, str): + raise TypeError("Dependency group name is not a str") + group = _normalize_name(group) + return self._parse_group(group) + + def resolve(self, group: str) -> tuple[Requirement, ...]: + """ + Resolve a dependency group to a list of requirements. + + :param group: the name of the group to resolve + + :raises TypeError: if the inputs appear to be the wrong types + :raises ValueError: if the data does not appear to be valid dependency group + data + :raises LookupError: if group name is absent + :raises packaging.requirements.InvalidRequirement: if a specifier is not valid + """ + if not isinstance(group, str): + raise TypeError("Dependency group name is not a str") + group = _normalize_name(group) + return self._resolve(group, group) + + def _parse_group( + self, group: str + ) -> tuple[Requirement | DependencyGroupInclude, ...]: + # short circuit -- never do the work twice + if group in self._parsed_groups: + return self._parsed_groups[group] + + if group not in self.dependency_groups: + raise LookupError(f"Dependency group '{group}' not found") + + raw_group = self.dependency_groups[group] + if not isinstance(raw_group, list): + raise TypeError(f"Dependency group '{group}' is not a list") + + elements: list[Requirement | DependencyGroupInclude] = [] + for item in raw_group: + if isinstance(item, str): + # packaging.requirements.Requirement parsing ensures that this is a + # valid PEP 508 Dependency Specifier + # raises InvalidRequirement on failure + elements.append(Requirement(item)) + elif isinstance(item, dict): + if tuple(item.keys()) != ("include-group",): + raise ValueError(f"Invalid dependency group item: {item}") + + include_group = next(iter(item.values())) + elements.append(DependencyGroupInclude(include_group=include_group)) + else: + raise ValueError(f"Invalid dependency group item: {item}") + + self._parsed_groups[group] = tuple(elements) + return self._parsed_groups[group] + + def _resolve(self, group: str, requested_group: str) -> tuple[Requirement, ...]: + """ + This is a helper for cached resolution to strings. + + :param group: The name of the group to resolve. + :param requested_group: The group which was used in the original, user-facing + request. + """ + if group in self._resolve_cache: + return self._resolve_cache[group] + + parsed = self._parse_group(group) + + resolved_group = [] + for item in parsed: + if isinstance(item, Requirement): + resolved_group.append(item) + elif isinstance(item, DependencyGroupInclude): + include_group = _normalize_name(item.include_group) + if include_group in self._include_graph_ancestors.get(group, ()): + raise CyclicDependencyError( + requested_group, group, item.include_group + ) + self._include_graph_ancestors[include_group] = ( + *self._include_graph_ancestors.get(group, ()), + group, + ) + resolved_group.extend(self._resolve(include_group, requested_group)) + else: # unreachable + raise NotImplementedError( + f"Invalid dependency group item after parse: {item}" + ) + + self._resolve_cache[group] = tuple(resolved_group) + return self._resolve_cache[group] + + +def resolve( + dependency_groups: Mapping[str, str | Mapping[str, str]], /, *groups: str +) -> tuple[str, ...]: + """ + Resolve a dependency group to a tuple of requirements, as strings. + + :param dependency_groups: the parsed contents of the ``[dependency-groups]`` table + from ``pyproject.toml`` + :param groups: the name of the group(s) to resolve + + :raises TypeError: if the inputs appear to be the wrong types + :raises ValueError: if the data does not appear to be valid dependency group data + :raises LookupError: if group name is absent + :raises packaging.requirements.InvalidRequirement: if a specifier is not valid + """ + resolver = DependencyGroupResolver(dependency_groups) + return tuple(str(r) for group in groups for r in resolver.resolve(group)) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_lint_dependency_groups.py b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_lint_dependency_groups.py new file mode 100644 index 0000000..09454bd --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_lint_dependency_groups.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import argparse +import sys + +from ._implementation import DependencyGroupResolver +from ._toml_compat import tomllib + + +def main(*, argv: list[str] | None = None) -> None: + if tomllib is None: + print( + "Usage error: dependency-groups CLI requires tomli or Python 3.11+", + file=sys.stderr, + ) + raise SystemExit(2) + + parser = argparse.ArgumentParser( + description=( + "Lint Dependency Groups for validity. " + "This will eagerly load and check all of your Dependency Groups." + ) + ) + parser.add_argument( + "-f", + "--pyproject-file", + default="pyproject.toml", + help="The pyproject.toml file. Defaults to trying in the current directory.", + ) + args = parser.parse_args(argv if argv is not None else sys.argv[1:]) + + with open(args.pyproject_file, "rb") as fp: + pyproject = tomllib.load(fp) + dependency_groups_raw = pyproject.get("dependency-groups", {}) + + errors: list[str] = [] + try: + resolver = DependencyGroupResolver(dependency_groups_raw) + except (ValueError, TypeError) as e: + errors.append(f"{type(e).__name__}: {e}") + else: + for groupname in resolver.dependency_groups: + try: + resolver.resolve(groupname) + except (LookupError, ValueError, TypeError) as e: + errors.append(f"{type(e).__name__}: {e}") + + if errors: + print("errors encountered while examining dependency groups:") + for msg in errors: + print(f" {msg}") + sys.exit(1) + else: + print("ok") + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_pip_wrapper.py b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_pip_wrapper.py new file mode 100644 index 0000000..f86d896 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_pip_wrapper.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +import argparse +import subprocess +import sys + +from ._implementation import DependencyGroupResolver +from ._toml_compat import tomllib + + +def _invoke_pip(deps: list[str]) -> None: + subprocess.check_call([sys.executable, "-m", "pip", "install", *deps]) + + +def main(*, argv: list[str] | None = None) -> None: + if tomllib is None: + print( + "Usage error: dependency-groups CLI requires tomli or Python 3.11+", + file=sys.stderr, + ) + raise SystemExit(2) + + parser = argparse.ArgumentParser(description="Install Dependency Groups.") + parser.add_argument( + "DEPENDENCY_GROUP", nargs="+", help="The dependency groups to install." + ) + parser.add_argument( + "-f", + "--pyproject-file", + default="pyproject.toml", + help="The pyproject.toml file. Defaults to trying in the current directory.", + ) + args = parser.parse_args(argv if argv is not None else sys.argv[1:]) + + with open(args.pyproject_file, "rb") as fp: + pyproject = tomllib.load(fp) + dependency_groups_raw = pyproject.get("dependency-groups", {}) + + errors: list[str] = [] + resolved: list[str] = [] + try: + resolver = DependencyGroupResolver(dependency_groups_raw) + except (ValueError, TypeError) as e: + errors.append(f"{type(e).__name__}: {e}") + else: + for groupname in args.DEPENDENCY_GROUP: + try: + resolved.extend(str(r) for r in resolver.resolve(groupname)) + except (LookupError, ValueError, TypeError) as e: + errors.append(f"{type(e).__name__}: {e}") + + if errors: + print("errors encountered while examining dependency groups:") + for msg in errors: + print(f" {msg}") + sys.exit(1) + + _invoke_pip(resolved) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_toml_compat.py b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_toml_compat.py new file mode 100644 index 0000000..8d6f921 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/_toml_compat.py @@ -0,0 +1,9 @@ +try: + import tomllib +except ImportError: + try: + from pip._vendor import tomli as tomllib # type: ignore[no-redef, unused-ignore] + except ModuleNotFoundError: # pragma: no cover + tomllib = None # type: ignore[assignment, unused-ignore] + +__all__ = ("tomllib",) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/dependency_groups/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/LICENSE.txt b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/LICENSE.txt new file mode 100644 index 0000000..c31ac56 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/LICENSE.txt @@ -0,0 +1,284 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.2 2.1.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2.1 2.2 2002 PSF yes + 2.2.2 2.2.1 2002 PSF yes + 2.2.3 2.2.2 2003 PSF yes + 2.3 2.2.2 2002-2003 PSF yes + 2.3.1 2.3 2002-2003 PSF yes + 2.3.2 2.3.1 2002-2003 PSF yes + 2.3.3 2.3.2 2002-2003 PSF yes + 2.3.4 2.3.3 2004 PSF yes + 2.3.5 2.3.4 2005 PSF yes + 2.4 2.3 2004 PSF yes + 2.4.1 2.4 2005 PSF yes + 2.4.2 2.4.1 2005 PSF yes + 2.4.3 2.4.2 2006 PSF yes + 2.4.4 2.4.3 2006 PSF yes + 2.5 2.4 2006 PSF yes + 2.5.1 2.5 2007 PSF yes + 2.5.2 2.5.1 2008 PSF yes + 2.5.3 2.5.2 2008 PSF yes + 2.6 2.5 2008 PSF yes + 2.6.1 2.6 2008 PSF yes + 2.6.2 2.6.1 2009 PSF yes + 2.6.3 2.6.2 2009 PSF yes + 2.6.4 2.6.3 2009 PSF yes + 2.6.5 2.6.4 2010 PSF yes + 3.0 2.6 2008 PSF yes + 3.0.1 3.0 2009 PSF yes + 3.1 3.0.1 2009 PSF yes + 3.1.1 3.1 2009 PSF yes + 3.1.2 3.1 2010 PSF yes + 3.2 3.1 2010 PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 +Python Software Foundation; All Rights Reserved" are retained in Python alone or +in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py new file mode 100644 index 0000000..4e82943 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2024 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.4.0' + + +class DistlibException(Exception): + pass + + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + + class NullHandler(logging.Handler): + + def handle(self, record): + pass + + def emit(self, record): + pass + + def createLock(self): + self.lock = None + + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..273182a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..a101c20 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc new file mode 100644 index 0000000..d760e3f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc new file mode 100644 index 0000000..a0c97c8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc new file mode 100644 index 0000000..667dcda Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py new file mode 100644 index 0000000..ca561dd --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/compat.py @@ -0,0 +1,1137 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import shutil +import sys + +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + # Leaving this around for now, in case it needs resurrecting in some way + # _userprog = None + # def splituser(host): + # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + # global _userprog + # if _userprog is None: + # import re + # _userprog = re.compile('^(.*)@(.*)$') + + # match = _userprog.match(host) + # if match: return match.group(1, 2) + # return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + from urllib.parse import (urlparse, urlunparse, urljoin, quote, unquote, + urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, HTTPBasicAuthHandler, + HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + + class CertificateError(ValueError): + pass + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" % + (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" % + (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if os.curdir not in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if normdir not in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: # pragma: no cover + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + + +import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections.abc import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + # Issue #99: on some systems (e.g. containerised), + # sys.getfilesystemencoding() returns None, and we need a real value, + # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and + # sys.getfilesystemencoding(): the return value is "the user’s preference + # according to the result of nl_langinfo(CODESET), or None if the + # nl_langinfo(CODESET) failed." + _fsencoding = sys.getfilesystemencoding() or 'utf-8' + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + + cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format( + filename, encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format( + filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, + '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' + A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[ + key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__( + key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union( + *self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +try: + from importlib.util import cache_from_source # Python >= 3.4 +except ImportError: # pragma: no cover + + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover + # {{{ http://code.activestate.com/recipes/576693/ (r9) + # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. + # Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % + len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args), )) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: + _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__, ) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items, ), inst_dict) + return self.__class__, (items, ) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self) == len( + other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext': 'ext_convert', + 'cfg': 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int( + idx + ) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + # rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance( + value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance( + value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py new file mode 100644 index 0000000..fef52aa --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/resources.py @@ -0,0 +1,358 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + # In Python 3.6, _frozen_importlib -> _frozen_importlib_external + try: + import _frozen_importlib_external as _fi + except ImportError: + import _frozen_importlib as _fi + _finder_registry[_fi.SourceFileLoader] = ResourceFinder + _finder_registry[_fi.FileFinder] = ResourceFinder + # See issue #146 + _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder + del _fi +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py new file mode 100644 index 0000000..195dc3f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/scripts.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2023 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys +import time +from zipfile import ZipInfo + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, get_executable, get_platform, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +if __name__ == '__main__': + from %(module)s import %(import_name)s + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +''' + +# Pre-fetch the contents of all executable wrapper stubs. +# This is to address https://github.com/pypa/pip/issues/12666. +# When updating pip, we rename the old pip in place before installing the +# new version. If we try to fetch a wrapper *after* that rename, the finder +# machinery will be confused as the package is no longer available at the +# location where it was imported from. So we load everything into memory in +# advance. + +if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + DISTLIB_PACKAGE = __name__.rsplit('.', 1)[0] + + WRAPPERS = { + r.name: r.bytes + for r in finder(DISTLIB_PACKAGE).iterator("") + if r.name.endswith(".exe") + } + + +def enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + + +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable + + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or (os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _build_shebang(self, executable, post_interp): + """ + Build a shebang line. In the simple case (on Windows, or a shebang line + which is not too long or contains spaces) use a simple formulation for + the shebang. Otherwise, use /bin/sh as the executable, with a contrived + shebang which allows the script to run either under Python or sh, using + suitable quoting. Thanks to Harald Nordgren for his input. + + See also: http://www.in-ulm.de/~mascheck/various/shebang/#length + https://hg.mozilla.org/mozilla-central/file/tip/mach + """ + if os.name != 'posix': + simple_shebang = True + elif getattr(sys, "cross_compiling", False): + # In a cross-compiling environment, the shebang will likely be a + # script; this *must* be invoked with the "safe" version of the + # shebang, or else using os.exec() to run the entry script will + # fail, raising "OSError 8 [Errno 8] Exec format error". + simple_shebang = False + else: + # Add 3 for '#!' prefix and newline suffix. + shebang_length = len(executable) + len(post_interp) + 3 + if sys.platform == 'darwin': + max_shebang_length = 512 + else: + max_shebang_length = 127 + simple_shebang = ((b' ' not in executable) and (shebang_length <= max_shebang_length)) + + if simple_shebang: + result = b'#!' + executable + post_interp + b'\n' + else: + result = b'#!/bin/sh\n' + result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' + result += b"' '''\n" + return result + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + if os.name == 'nt': + # for Python builds from source on Windows, no Python executables with + # a version suffix are created, so we use python.exe + executable = os.path.join(sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) + else: + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp and + '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = self._build_shebang(executable, post_interp) + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError('The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError('The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict( + module=entry.prefix, import_name=entry.suffix.split('.')[0], func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + if not use_launcher: + script_bytes = shebang + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') + if source_date_epoch: + date_time = time.gmtime(int(source_date_epoch))[:6] + zinfo = ZipInfo(filename='__main__.py', date_time=date_time) + zf.writestr(zinfo, script_bytes) + else: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + variant_separator = '-' + + def get_script_filenames(self, name): + result = set() + if '' in self.variants: + result.add(name) + if 'X' in self.variants: + result.add('%s%s' % (name, self.version_info[0])) + if 'X.Y' in self.variants: + result.add('%s%s%s.%s' % (name, self.variant_separator, self.version_info[0], self.version_info[1])) + return result + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + scriptnames = self.get_script_filenames(entry.name) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s is an empty file (skipping)', script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + platform_suffix = '-arm' if get_platform() == 'win-arm64' else '' + name = '%s%s%s.exe' % (kind, bits, platform_suffix) + if name not in WRAPPERS: + msg = ('Unable to find resource %s in package %s' % + (name, DISTLIB_PACKAGE)) + raise ValueError(msg) + return WRAPPERS[name] + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t32.exe b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t32.exe new file mode 100644 index 0000000..52154f0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t32.exe differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t64-arm.exe b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t64-arm.exe new file mode 100644 index 0000000..e1ab8f8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t64-arm.exe differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t64.exe b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t64.exe new file mode 100644 index 0000000..e8bebdb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/t64.exe differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py new file mode 100644 index 0000000..0d5bd7a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/util.py @@ -0,0 +1,1984 @@ +# +# Copyright (C) 2012-2023 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, urljoin, httplib, + xmlrpclib, HTTPHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, ZipFile, + fsdecode, unquote, urlparse) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code as per PEP 508 +# + +IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') +VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') +COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') +MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') +OR = re.compile(r'^or\b\s*') +AND = re.compile(r'^and\b\s*') +NON_SPACE = re.compile(r'(\S+)\s*') +STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') + + +def parse_marker(marker_string): + """ + Parse a marker string and return a dictionary containing a marker expression. + + The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in + the expression grammar, or strings. A string contained in quotes is to be + interpreted as a literal string, and a string not contained in quotes is a + variable (such as os_name). + """ + + def marker_var(remaining): + # either identifier, or literal string + m = IDENTIFIER.match(remaining) + if m: + result = m.groups()[0] + remaining = remaining[m.end():] + elif not remaining: + raise SyntaxError('unexpected end of input') + else: + q = remaining[0] + if q not in '\'"': + raise SyntaxError('invalid expression: %s' % remaining) + oq = '\'"'.replace(q, '') + remaining = remaining[1:] + parts = [q] + while remaining: + # either a string chunk, or oq, or q to terminate + if remaining[0] == q: + break + elif remaining[0] == oq: + parts.append(oq) + remaining = remaining[1:] + else: + m = STRING_CHUNK.match(remaining) + if not m: + raise SyntaxError('error in string literal: %s' % remaining) + parts.append(m.groups()[0]) + remaining = remaining[m.end():] + else: + s = ''.join(parts) + raise SyntaxError('unterminated string: %s' % s) + parts.append(q) + result = ''.join(parts) + remaining = remaining[1:].lstrip() # skip past closing quote + return result, remaining + + def marker_expr(remaining): + if remaining and remaining[0] == '(': + result, remaining = marker(remaining[1:].lstrip()) + if remaining[0] != ')': + raise SyntaxError('unterminated parenthesis: %s' % remaining) + remaining = remaining[1:].lstrip() + else: + lhs, remaining = marker_var(remaining) + while remaining: + m = MARKER_OP.match(remaining) + if not m: + break + op = m.groups()[0] + remaining = remaining[m.end():] + rhs, remaining = marker_var(remaining) + lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + result = lhs + return result, remaining + + def marker_and(remaining): + lhs, remaining = marker_expr(remaining) + while remaining: + m = AND.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_expr(remaining) + lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + def marker(remaining): + lhs, remaining = marker_and(remaining) + while remaining: + m = OR.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_and(remaining) + lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + return marker(marker_string) + + +def parse_requirement(req): + """ + Parse a requirement passed in as a string. Return a Container + whose attributes contain the various parts of the requirement. + """ + remaining = req.strip() + if not remaining or remaining.startswith('#'): + return None + m = IDENTIFIER.match(remaining) + if not m: + raise SyntaxError('name expected: %s' % remaining) + distname = m.groups()[0] + remaining = remaining[m.end():] + extras = mark_expr = versions = uri = None + if remaining and remaining[0] == '[': + i = remaining.find(']', 1) + if i < 0: + raise SyntaxError('unterminated extra: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + extras = [] + while s: + m = IDENTIFIER.match(s) + if not m: + raise SyntaxError('malformed extra: %s' % s) + extras.append(m.groups()[0]) + s = s[m.end():] + if not s: + break + if s[0] != ',': + raise SyntaxError('comma expected in extras: %s' % s) + s = s[1:].lstrip() + if not extras: + extras = None + if remaining: + if remaining[0] == '@': + # it's a URI + remaining = remaining[1:].lstrip() + m = NON_SPACE.match(remaining) + if not m: + raise SyntaxError('invalid URI: %s' % remaining) + uri = m.groups()[0] + t = urlparse(uri) + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not (t.scheme and t.netloc): + raise SyntaxError('Invalid URL: %s' % uri) + remaining = remaining[m.end():].lstrip() + else: + + def get_versions(ver_remaining): + """ + Return a list of operator, version tuples if any are + specified, else None. + """ + m = COMPARE_OP.match(ver_remaining) + versions = None + if m: + versions = [] + while True: + op = m.groups()[0] + ver_remaining = ver_remaining[m.end():] + m = VERSION_IDENTIFIER.match(ver_remaining) + if not m: + raise SyntaxError('invalid version: %s' % ver_remaining) + v = m.groups()[0] + versions.append((op, v)) + ver_remaining = ver_remaining[m.end():] + if not ver_remaining or ver_remaining[0] != ',': + break + ver_remaining = ver_remaining[1:].lstrip() + # Some packages have a trailing comma which would break things + # See issue #148 + if not ver_remaining: + break + m = COMPARE_OP.match(ver_remaining) + if not m: + raise SyntaxError('invalid constraint: %s' % ver_remaining) + if not versions: + versions = None + return versions, ver_remaining + + if remaining[0] != '(': + versions, remaining = get_versions(remaining) + else: + i = remaining.find(')', 1) + if i < 0: + raise SyntaxError('unterminated parenthesis: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + # As a special diversion from PEP 508, allow a version number + # a.b.c in parentheses as a synonym for ~= a.b.c (because this + # is allowed in earlier PEPs) + if COMPARE_OP.match(s): + versions, _ = get_versions(s) + else: + m = VERSION_IDENTIFIER.match(s) + if not m: + raise SyntaxError('invalid constraint: %s' % s) + v = m.groups()[0] + s = s[m.end():].lstrip() + if s: + raise SyntaxError('invalid constraint: %s' % s) + versions = [('~=', v)] + + if remaining: + if remaining[0] != ';': + raise SyntaxError('invalid requirement: %s' % remaining) + remaining = remaining[1:].lstrip() + + mark_expr, remaining = parse_marker(remaining) + + if remaining and remaining[0] != '#': + raise SyntaxError('unexpected trailing data: %s' % remaining) + + if not versions: + rs = distname + else: + rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + return Container(name=distname, extras=extras, constraints=versions, marker=mark_expr, url=uri, requirement=rs) + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(root, path): + # normalizes and returns a lstripped-/-separated path + root = root.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(root) + return path[len(root):].lstrip('/') + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): + # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as + # changes to the stub launcher mean that sys.executable always points + # to the stub on OS X + # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' + # in os.environ): + # result = os.environ['__PYVENV_LAUNCHER__'] + # else: + # result = sys.executable + # return result + # Avoid normcasing: see issue #143 + # result = os.path.normcase(sys.executable) + result = sys.executable + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + # entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + + def __init__(self, func): + self.func = func + # for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + # obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + if os.path.exists(path): + os.remove(path) + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.write_binary_file(path, data.encode(encoding)) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + if not isinstance(hashed_invalidation, py_compile.PycInvalidationMode): + hashed_invalidation = py_compile.PycInvalidationMode.CHECKED_HASH + compile_kwargs['invalidation_mode'] = hashed_invalidation + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '' % (self.name, self.prefix, self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and self.prefix == other.prefix and self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile( + r'''(?P([^\[]\S*)) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path, use_abspath=True): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path) if use_abspath else path) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.rsplit('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result + + +# +# Extended metadata functionality +# + + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + # data = reader.read().decode('utf-8') + # result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix, use_abspath=True): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix, use_abspath=use_abspath) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, args, kwargs, result) + return result + + +# +# Simple sequencing +# +class Sequencer(object): + + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node], lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node], index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: + break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', '.whl') + + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + + # Limit extraction of dangerous items, if this Python + # allows it easily. If not, just trust the input. + # See: https://docs.python.org/3/library/tarfile.html#extraction-filters + def extraction_filter(member, path): + """Run tarfile.tar_filter, but raise the expected ValueError""" + # This is only called if the current Python has tarfile filters + try: + return tarfile.tar_filter(member, path) + except tarfile.FilterError as exc: + raise ValueError(str(exc)) + + archive.extraction_filter = extraction_filter + + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G', 'T', 'P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + # elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + # import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + # import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, CertificateError) + + # + # HTTPSConnection which verifies certificates/matches domains + # + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 + if getattr(self, 'cert_file', None): + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + + +# +# XML-RPC with timeouts +# +class Transport(xmlrpclib.Transport): + + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + return self._connection[1] + + +if ssl: + + class SafeTransport(xmlrpclib.SafeTransport): + + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, **kwargs) + return self._connection[1] + + +class ServerProxy(xmlrpclib.ServerProxy): + + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + # scheme = splittype(uri) # deprecated as of Python 3.8 + scheme = urlparse(uri)[0] + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + # Python 3 determines encoding from locale. Force 'utf-8' + # file encoding to match other forced utf-8 encoding + kwargs['encoding'] = 'utf-8' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + + +class CSVWriter(CSVBase): + + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + + +# +# Configurator functionality +# + + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + + +class SubprocessMixin(object): + """ + Mixin for running subprocesses and capturing their output + """ + + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() + + +# def _get_pypirc_command(): +# """ +# Get the distutils command for interacting with PyPI configurations. +# :return: the command. +# """ +# from distutils.core import Distribution +# from distutils.config import PyPIRCCommand +# d = Distribution() +# return PyPIRCCommand(d) + + +class PyPIRCFile(object): + + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' + DEFAULT_REALM = 'pypi' + + def __init__(self, fn=None, url=None): + if fn is None: + fn = os.path.join(os.path.expanduser('~'), '.pypirc') + self.filename = fn + self.url = url + + def read(self): + result = {} + + if os.path.exists(self.filename): + repository = self.url or self.DEFAULT_REPOSITORY + + config = configparser.RawConfigParser() + config.read(self.filename) + sections = config.sections() + if 'distutils' in sections: + # let's get the list of servers + index_servers = config.get('distutils', 'index-servers') + _servers = [server.strip() for server in index_servers.split('\n') if server.strip() != ''] + if _servers == []: + # nothing set, let's try to get the default pypi + if 'pypi' in sections: + _servers = ['pypi'] + else: + for server in _servers: + result = {'server': server} + result['username'] = config.get(server, 'username') + + # optional params + for key, default in (('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), + ('password', None)): + if config.has_option(server, key): + result[key] = config.get(server, key) + else: + result[key] = default + + # work around people having "repository" for the "pypi" + # section of their config set to the HTTP (rather than + # HTTPS) URL + if (server == 'pypi' and repository in (self.DEFAULT_REPOSITORY, 'pypi')): + result['repository'] = self.DEFAULT_REPOSITORY + elif (result['server'] != repository and result['repository'] != repository): + result = {} + elif 'server-login' in sections: + # old format + server = 'server-login' + if config.has_option(server, 'repository'): + repository = config.get(server, 'repository') + else: + repository = self.DEFAULT_REPOSITORY + result = { + 'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM + } + return result + + def update(self, username, password): + # import pdb; pdb.set_trace() + config = configparser.RawConfigParser() + fn = self.filename + config.read(fn) + if not config.has_section('pypi'): + config.add_section('pypi') + config.set('pypi', 'username', username) + config.set('pypi', 'password', password) + with open(fn, 'w') as f: + config.write(f) + + +def _load_pypirc(index): + """ + Read the PyPI access configuration as supported by distutils. + """ + return PyPIRCFile(url=index.url).read() + + +def _store_pypirc(index): + PyPIRCFile().update(index.username, index.password) + + +# +# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor +# tweaks +# + + +def get_host_platform(): + """Return a string that identifies the current platform. This is used mainly to + distinguish platform-specific build directories and platform-specific built + distributions. Typically includes the OS name and version and the + architecture (as supplied by 'os.uname()'), although the exact information + included depends on the OS; eg. on Linux, the kernel version isn't + particularly important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + + """ + if os.name == 'nt': + if 'amd64' in sys.version.lower(): + return 'win-amd64' + if '(arm)' in sys.version.lower(): + return 'win-arm32' + if '(arm64)' in sys.version.lower(): + return 'win-arm64' + return sys.platform + + # Set for cross builds explicitly + if "_PYTHON_HOST_PLATFORM" in os.environ: + return os.environ["_PYTHON_HOST_PLATFORM"] + + if os.name != 'posix' or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + + (osname, host, release, version, machine) = os.uname() + + # Convert the OS name to lowercase, remove '/' characters, and translate + # spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_').replace('/', '-') + + if osname[:5] == 'linux': + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + + elif osname[:5] == 'sunos': + if release[0] >= '5': # SunOS 5 == Solaris 2 + osname = 'solaris' + release = '%d.%s' % (int(release[0]) - 3, release[2:]) + # We can't use 'platform.architecture()[0]' because a + # bootstrap problem. We use a dict to get an error + # if some suspicious happens. + bitness = {2147483647: '32bit', 9223372036854775807: '64bit'} + machine += '.%s' % bitness[sys.maxsize] + # fall through to standard osname-release-machine representation + elif osname[:3] == 'aix': + from _aix_support import aix_platform + return aix_platform() + elif osname[:6] == 'cygwin': + osname = 'cygwin' + rel_re = re.compile(r'[\d.]+', re.ASCII) + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == 'darwin': + import _osx_support + try: + from distutils import sysconfig + except ImportError: + import sysconfig + osname, release, machine = _osx_support.get_platform_osx(sysconfig.get_config_vars(), osname, release, machine) + + return '%s-%s-%s' % (osname, release, machine) + + +_TARGET_TO_PLAT = { + 'x86': 'win32', + 'x64': 'win-amd64', + 'arm': 'win-arm32', +} + + +def get_platform(): + if os.name != 'nt': + return get_host_platform() + cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') + if cross_compilation_target not in _TARGET_TO_PLAT: + return get_host_platform() + return _TARGET_TO_PLAT[cross_compilation_target] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w32.exe b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w32.exe new file mode 100644 index 0000000..4ee2d3a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w32.exe differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w64-arm.exe b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w64-arm.exe new file mode 100644 index 0000000..951d581 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w64-arm.exe differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w64.exe b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w64.exe new file mode 100644 index 0000000..5763076 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distlib/w64.exe differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distro/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/distro/LICENSE new file mode 100644 index 0000000..e06d208 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distro/LICENSE @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py new file mode 100644 index 0000000..7686fe8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__init__.py @@ -0,0 +1,54 @@ +from .distro import ( + NORMALIZED_DISTRO_ID, + NORMALIZED_LSB_ID, + NORMALIZED_OS_ID, + LinuxDistribution, + __version__, + build_number, + codename, + distro_release_attr, + distro_release_info, + id, + info, + like, + linux_distribution, + lsb_release_attr, + lsb_release_info, + major_version, + minor_version, + name, + os_release_attr, + os_release_info, + uname_attr, + uname_info, + version, + version_parts, +) + +__all__ = [ + "NORMALIZED_DISTRO_ID", + "NORMALIZED_LSB_ID", + "NORMALIZED_OS_ID", + "LinuxDistribution", + "build_number", + "codename", + "distro_release_attr", + "distro_release_info", + "id", + "info", + "like", + "linux_distribution", + "lsb_release_attr", + "lsb_release_info", + "major_version", + "minor_version", + "name", + "os_release_attr", + "os_release_info", + "uname_attr", + "uname_info", + "version", + "version_parts", +] + +__version__ = __version__ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py new file mode 100644 index 0000000..0c01d5b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__main__.py @@ -0,0 +1,4 @@ +from .distro import main + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..623e91e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..94786cb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc new file mode 100644 index 0000000..414e5bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py b/venv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py new file mode 100644 index 0000000..78ccdfa --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/distro/distro.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# Copyright 2015-2021 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is the recommended replacement for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.8 removed it altogether. Its +predecessor function :py:func:`platform.dist` was already deprecated since +Python 2.6 and removed in Python 3.8. Still, there are many cases in which +access to OS distribution information is needed. See `Python issue 1322 +`_ for more information. +""" + +import argparse +import json +import logging +import os +import re +import shlex +import subprocess +import sys +import warnings +from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, +) + +try: + from typing import TypedDict +except ImportError: + # Python 3.7 + TypedDict = dict + +__version__ = "1.9.0" + + +class VersionDict(TypedDict): + major: str + minor: str + build_number: str + + +class InfoDict(TypedDict): + id: str + version: str + version_parts: VersionDict + like: str + codename: str + + +_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") +_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") +_OS_RELEASE_BASENAME = "os-release" + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = { + "ol": "oracle", # Oracle Linux + "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap +} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 + "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 + "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation + "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server + "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + "redhat": "rhel", # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" +) + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") + +# Base file names to be looked up for if _UNIXCONFDIR is not readable. +_DISTRO_RELEASE_BASENAMES = [ + "SuSE-release", + "altlinux-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "rocky-release", + "sl-release", + "slackware-version", +] + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + "debian_version", + "lsb-release", + "oem-release", + _OS_RELEASE_BASENAME, + "system-release", + "plesk-release", + "iredmail-release", + "board-release", + "ec2_version", +) + + +def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]: + """ + .. deprecated:: 1.6.0 + + :func:`distro.linux_distribution()` is deprecated. It should only be + used as a compatibility shim with Python's + :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, + :func:`distro.version` and :func:`distro.name` instead. + + Return information about the current OS distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The extra item (usually in parentheses) after the + os-release version number, or the result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the OS distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular OS distributions. + """ + warnings.warn( + "distro.linux_distribution() is deprecated. It should only be used as a " + "compatibility shim with Python's platform.linux_distribution(). Please use " + "distro.id(), distro.version() and distro.name() instead.", + DeprecationWarning, + stacklevel=2, + ) + return _distro.linux_distribution(full_distribution_name) + + +def id() -> str: + """ + Return the distro ID of the current distribution, as a + machine-readable string. + + For a number of OS distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amzn" Amazon Linux + "arch" Arch Linux + "buildroot" Buildroot + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD + "midnightbsd" MidnightBSD + "rocky" Rocky Linux + "aix" AIX + "guix" Guix System + "altlinux" ALT Linux + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the OS distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty: bool = False) -> str: + """ + Return the name of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file, appended + with the value of the pretty version ("" and "" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + Some other distributions may not provide this kind of information. In these + cases, an empty string would be returned. This behavior can be observed + with rolling releases distributions (e.g. Arch Linux). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the current OS distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best: bool = False) -> str: + """ + Return the major version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best: bool = False) -> str: + """ + Return the minor version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best: bool = False) -> str: + """ + Return the build number of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like() -> str: + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current OS distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + `_. + """ + return _distro.like() + + +def codename() -> str: + """ + Return the codename for the release of the current OS distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information items about the current OS + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current OS distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current OS distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def uname_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + +def os_release_attr(attribute: str) -> str: + """ + Return a single named information item from the os-release file data source + of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute: str) -> str: + """ + Return a single named information item from the lsb_release command output + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +def uname_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + +try: + from functools import cached_property +except ImportError: + # Python < 3.8 + class cached_property: # type: ignore + """A version of @property which caches the value. On access, it calls the + underlying function and sets the value in `__dict__` so future accesses + will not re-call the property. + """ + + def __init__(self, f: Callable[[Any], Any]) -> None: + self._fname = f.__name__ + self._f = f + + def __get__(self, obj: Any, owner: Type[Any]) -> Any: + assert obj is not None, f"call {self._fname} on an instance" + ret = obj.__dict__[self._fname] = self._f(obj) + return ret + + +class LinuxDistribution: + """ + Provides information about a OS distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current OS distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__( + self, + include_lsb: Optional[bool] = None, + os_release_file: str = "", + distro_release_file: str = "", + include_uname: Optional[bool] = None, + root_dir: Optional[str] = None, + include_oslevel: Optional[bool] = None, + ) -> None: + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + * ``include_uname`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + + * ``root_dir`` (string): The absolute path to the root directory to use + to find distro-related information files. Note that ``include_*`` + parameters must not be enabled in combination with ``root_dir``. + + * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command + output is included as a data source. If the oslevel command is not + available in the program execution path the data source will be + empty. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. + This controls whether the lsb information will be loaded. + + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + + * ``include_oslevel`` (bool): The result of the ``include_oslevel`` + parameter. This controls whether (AIX) oslevel information will be + loaded. + + * ``root_dir`` (string): The result of the ``root_dir`` parameter. + The absolute path to the root directory to use to find distro-related + information files. + + Raises: + + * :py:exc:`ValueError`: Initialization parameters combination is not + supported. + + * :py:exc:`OSError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.root_dir = root_dir + self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR + self.usr_lib_dir = ( + os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR + ) + + if os_release_file: + self.os_release_file = os_release_file + else: + etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) + usr_lib_os_release_file = os.path.join( + self.usr_lib_dir, _OS_RELEASE_BASENAME + ) + + # NOTE: The idea is to respect order **and** have it set + # at all times for API backwards compatibility. + if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( + usr_lib_os_release_file + ): + self.os_release_file = etc_dir_os_release_file + else: + self.os_release_file = usr_lib_os_release_file + + self.distro_release_file = distro_release_file or "" # updated later + + is_root_dir_defined = root_dir is not None + if is_root_dir_defined and (include_lsb or include_uname or include_oslevel): + raise ValueError( + "Including subprocess data sources from specific root_dir is disallowed" + " to prevent false information" + ) + self.include_lsb = ( + include_lsb if include_lsb is not None else not is_root_dir_defined + ) + self.include_uname = ( + include_uname if include_uname is not None else not is_root_dir_defined + ) + self.include_oslevel = ( + include_oslevel if include_oslevel is not None else not is_root_dir_defined + ) + + def __repr__(self) -> str: + """Return repr of all info""" + return ( + "LinuxDistribution(" + "os_release_file={self.os_release_file!r}, " + "distro_release_file={self.distro_release_file!r}, " + "include_lsb={self.include_lsb!r}, " + "include_uname={self.include_uname!r}, " + "include_oslevel={self.include_oslevel!r}, " + "root_dir={self.root_dir!r}, " + "_os_release_info={self._os_release_info!r}, " + "_lsb_release_info={self._lsb_release_info!r}, " + "_distro_release_info={self._distro_release_info!r}, " + "_uname_info={self._uname_info!r}, " + "_oslevel_info={self._oslevel_info!r})".format(self=self) + ) + + def linux_distribution( + self, full_distribution_name: bool = True + ) -> Tuple[str, str, str]: + """ + Return information about the OS distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self._os_release_info.get("release_codename") or self.codename(), + ) + + def id(self) -> str: + """Return the distro ID of the OS distribution, as a string. + + For details, see :func:`distro.id`. + """ + + def normalize(distro_id: str, table: Dict[str, str]) -> str: + distro_id = distro_id.lower().replace(" ", "_") + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr("distributor_id") + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + distro_id = self.uname_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return "" + + def name(self, pretty: bool = False) -> str: + """ + Return the name of the OS distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = ( + self.os_release_attr("name") + or self.lsb_release_attr("distributor_id") + or self.distro_release_attr("name") + or self.uname_attr("name") + ) + if pretty: + name = self.os_release_attr("pretty_name") or self.lsb_release_attr( + "description" + ) + if not name: + name = self.distro_release_attr("name") or self.uname_attr("name") + version = self.version(pretty=True) + if version: + name = f"{name} {version}" + return name or "" + + def version(self, pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the OS distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr("version_id"), + self.lsb_release_attr("release"), + self.distro_release_attr("version_id"), + self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( + "version_id", "" + ), + self._parse_distro_release_content( + self.lsb_release_attr("description") + ).get("version_id", ""), + self.uname_attr("release"), + ] + if self.uname_attr("id").startswith("aix"): + # On AIX platforms, prefer oslevel command output. + versions.insert(0, self.oslevel_info()) + elif self.id() == "debian" or "debian" in self.like().split(): + # On Debian-like, add debian_version file content to candidates list. + versions.append(self._debian_version) + version = "" + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == "": + version = v + else: + for v in versions: + if v != "": + version = v + break + if pretty and version and self.codename(): + version = f"{version} ({self.codename()})" + return version + + def version_parts(self, best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the OS distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or "", build_number or "" + return "", "", "" + + def major_version(self, best: bool = False) -> str: + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best: bool = False) -> str: + """ + Return the minor version number of the current distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best: bool = False) -> str: + """ + Return the build number of the current distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self) -> str: + """ + Return the IDs of distributions that are like the OS distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr("id_like") or "" + + def codename(self) -> str: + """ + Return the codename of the OS distribution. + + For details, see :func:`distro.codename`. + """ + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info["codename"] + except KeyError: + return ( + self.lsb_release_attr("codename") + or self.distro_release_attr("codename") + or "" + ) + + def info(self, pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information about the OS + distribution. + + For details, see :func:`distro.info`. + """ + return InfoDict( + id=self.id(), + version=self.version(pretty, best), + version_parts=VersionDict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best), + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the OS distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the OS + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the OS + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def uname_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + return self._uname_info + + def oslevel_info(self) -> str: + """ + Return AIX' oslevel command output. + """ + return self._oslevel_info + + def os_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the os-release file data + source of the OS distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, "") + + def lsb_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the lsb_release command + output data source of the OS distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, "") + + def distro_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the OS distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, "") + + def uname_attr(self, attribute: str) -> str: + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_attr`. + """ + return self._uname_info.get(attribute, "") + + @cached_property + def _os_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file, encoding="utf-8") as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines: TextIO) -> Dict[str, str]: + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + # Ignore any tokens that are not variable assignments + if "=" in token: + k, v = token.split("=", 1) + props[k.lower()] = v + + if "version" in props: + # extract release codename (if any) from version attribute + match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"]) + if match: + release_codename = match.group(1) or match.group(2) + props["codename"] = props["release_codename"] = release_codename + + if "version_codename" in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props["codename"] = props["version_codename"] + elif "ubuntu_codename" in props: + # Same as above but a non-standard field name used on older Ubuntus + props["codename"] = props["ubuntu_codename"] + + return props + + @cached_property + def _lsb_release_info(self) -> Dict[str, str]: + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + if not self.include_lsb: + return {} + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} + content = self._to_str(stdout).splitlines() + return self._parse_lsb_release_content(content) + + @staticmethod + def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]: + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + kv = line.strip("\n").split(":", 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(" ", "_").lower(): v.strip()}) + return props + + @cached_property + def _uname_info(self) -> Dict[str, str]: + if not self.include_uname: + return {} + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + except OSError: + return {} + content = self._to_str(stdout).splitlines() + return self._parse_uname_content(content) + + @cached_property + def _oslevel_info(self) -> str: + if not self.include_oslevel: + return "" + try: + stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL) + except (OSError, subprocess.CalledProcessError): + return "" + return self._to_str(stdout).strip() + + @cached_property + def _debian_version(self) -> str: + try: + with open( + os.path.join(self.etc_dir, "debian_version"), encoding="ascii" + ) as fp: + return fp.readline().rstrip() + except FileNotFoundError: + return "" + + @staticmethod + def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: + if not lines: + return {} + props = {} + match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == "Linux": + return {} + props["id"] = name.lower() + props["name"] = name + props["release"] = version + return props + + @staticmethod + def _to_str(bytestring: bytes) -> str: + encoding = sys.getfilesystemencoding() + return bytestring.decode(encoding) + + @cached_property + def _distro_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file(self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + else: + try: + basenames = [ + basename + for basename in os.listdir(self.etc_dir) + if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES + and os.path.isfile(os.path.join(self.etc_dir, basename)) + ] + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + except OSError: + # This may occur when /etc is not readable but we can't be + # sure about the *-release files. Check common entries of + # /etc for information. If they turn out to not be there the + # error is handled in `_parse_distro_release_file()`. + basenames = _DISTRO_RELEASE_BASENAMES + for basename in basenames: + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match is None: + continue + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + # The name is always present if the pattern matches. + if "name" not in distro_info: + continue + self.distro_release_file = filepath + break + else: # the loop didn't "break": no candidate. + return {} + + if match is not None: + distro_info["id"] = match.group(1) + + # CloudLinux < 7: manually enrich info with proper id. + if "cloudlinux" in distro_info.get("name", "").lower(): + distro_info["id"] = "cloudlinux" + + return distro_info + + def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + try: + with open(filepath, encoding="utf-8") as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + except OSError: + # Ignore not being able to read a specific, seemingly version + # related file. + # See https://github.com/python-distro/distro/issues/162 + return {} + + @staticmethod + def _parse_distro_release_content(line: str) -> Dict[str, str]: + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info["name"] = matches.group(3)[::-1] + if matches.group(2): + distro_info["version_id"] = matches.group(2)[::-1] + if matches.group(1): + distro_info["codename"] = matches.group(1)[::-1] + elif line: + distro_info["name"] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main() -> None: + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="OS distro info tool") + parser.add_argument( + "--json", "-j", help="Output in machine readable format", action="store_true" + ) + + parser.add_argument( + "--root-dir", + "-r", + type=str, + dest="root_dir", + help="Path to the root filesystem directory (defaults to /)", + ) + + args = parser.parse_args() + + if args.root_dir: + dist = LinuxDistribution( + include_lsb=False, + include_uname=False, + include_oslevel=False, + root_dir=args.root_dir, + ) + else: + dist = _distro + + if args.json: + logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) + else: + logger.info("Name: %s", dist.name(pretty=True)) + distribution_version = dist.version(pretty=True) + logger.info("Version: %s", distribution_version) + distribution_codename = dist.codename() + logger.info("Codename: %s", distribution_codename) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/distro/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/distro/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/LICENSE.md b/venv/lib/python3.12/site-packages/pip/_vendor/idna/LICENSE.md new file mode 100644 index 0000000..19b6b45 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2024, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py new file mode 100644 index 0000000..cfdc030 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__init__.py @@ -0,0 +1,45 @@ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain +from .package_data import __version__ + +__all__ = [ + "__version__", + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ae573ae Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc new file mode 100644 index 0000000..0b73beb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..91c9c16 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..b222a34 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc new file mode 100644 index 0000000..37d5f2f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc new file mode 100644 index 0000000..92230f8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc new file mode 100644 index 0000000..436facd Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc new file mode 100644 index 0000000..3713cce Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py b/venv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py new file mode 100644 index 0000000..913abfd --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/codec.py @@ -0,0 +1,122 @@ +import codecs +import re +from typing import Any, Optional, Tuple + +from .core import IDNAError, alabel, decode, encode, ulabel + +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class Codec(codecs.Codec): + def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return "", 0 + + return decode(data), len(data) + + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = b"" + if labels: + if not labels[-1]: + trailing_dot = b"." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = b"." + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_bytes = b".".join(result) + trailing_dot + size += len(trailing_dot) + return result_bytes, size + + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return ("", 0) + + if not isinstance(data, str): + data = str(data, "ascii") + + labels = _unicode_dots_re.split(data) + trailing_dot = "" + if labels: + if not labels[-1]: + trailing_dot = "." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = "." + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != "idna2008": + return None + return codecs.CodecInfo( + name=name, + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) + + +codecs.register(search_function) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py b/venv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py new file mode 100644 index 0000000..1df9f2a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/compat.py @@ -0,0 +1,15 @@ +from typing import Any, Union + +from .core import decode, encode + + +def ToASCII(label: str) -> bytes: + return encode(label) + + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + + +def nameprep(s: Any) -> None: + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/core.py b/venv/lib/python3.12/site-packages/pip/_vendor/idna/core.py new file mode 100644 index 0000000..9115f12 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/core.py @@ -0,0 +1,437 @@ +import bisect +import re +import unicodedata +from typing import Optional, Union + +from . import idnadata +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b"xn--" +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class IDNAError(UnicodeError): + """Base exception for all IDNA-encoding related problems""" + + pass + + +class IDNABidiError(IDNAError): + """Exception when bidirectional requirements are not satisfied""" + + pass + + +class InvalidCodepoint(IDNAError): + """Exception when a disallowed or unallocated codepoint is used""" + + pass + + +class InvalidCodepointContext(IDNAError): + """Exception when the codepoint is not valid in the context it is used""" + + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + + +def _punycode(s: str) -> bytes: + return s.encode("punycode") + + +def _unot(s: int) -> str: + return "U+{:04X}".format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == "": + # String likely comes from a newer version of Unicode + raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) + if direction in ["R", "AL", "AN"]: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ["R", "AL"]: + rtl = True + elif direction == "L": + rtl = False + else: + raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) + + valid_ending = False + number_type: Optional[str] = None + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if direction not in [ + "R", + "AL", + "AN", + "EN", + "ES", + "CS", + "ET", + "ON", + "BN", + "NSM", + ]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) + # Bidi rule 3 + if direction in ["R", "AL", "EN", "AN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + # Bidi rule 4 + if direction in ["AN", "EN"]: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError("Can not mix numeral types in a right-to-left label") + else: + # Bidi rule 5 + if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) + # Bidi rule 6 + if direction in ["L", "EN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + + if not valid_ending: + raise IDNABidiError("Label ends with illegal codepoint directionality") + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == "M": + raise IDNAError("Label begins with an illegal combining character") + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == "--": + raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") + if label[0] == "-" or label[-1] == "-": + raise IDNAError("Label must not start or end with a hyphen") + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize("NFC", label) != label: + raise IDNAError("Label must be in Normalization Form C") + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200C: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos - 1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("L"), ord("D")]: + ok = True + break + else: + break + + if not ok: + return False + + ok = False + for i in range(pos + 1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("R"), ord("D")]: + ok = True + break + else: + break + return ok + + if cp_value == 0x200D: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00B7: + if 0 < pos < len(label) - 1: + if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label) - 1 and len(label) > 1: + return _is_script(label[pos + 1], "Greek") + return False + + elif cp_value == 0x05F3 or cp_value == 0x05F4: + if pos > 0: + return _is_script(label[pos - 1], "Hebrew") + return False + + elif cp_value == 0x30FB: + for cp in label: + if cp == "\u30fb": + continue + if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6F0 <= ord(cp) <= 0x06F9: + return False + return True + + elif 0x6F0 <= cp_value <= 0x6F9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode("utf-8") + if len(label) == 0: + raise IDNAError("Empty Label") + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for pos, cp in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext( + "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + except ValueError: + raise IDNAError( + "Unknown codepoint adjacent to joiner {} at position {} in {}".format( + _unot(cp_value), pos + 1, repr(label) + ) + ) + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): + if not valid_contexto(label, pos): + raise InvalidCodepointContext( + "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + else: + raise InvalidCodepoint( + "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) + ) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode("ascii") + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + return label_bytes + except UnicodeEncodeError: + pass + + check_label(label) + label_bytes = _alabel_prefix + _punycode(label) + + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode("ascii") + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix) :] + if not label_bytes: + raise IDNAError("Malformed A-label, no Punycode eligible content found") + if label_bytes.decode("ascii")[-1] == "-": + raise IDNAError("A-label must not end with a hyphen") + else: + check_label(label_bytes) + return label_bytes.decode("ascii") + + try: + label = label_bytes.decode("punycode") + except UnicodeError: + raise IDNAError("Invalid A-label") + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + + output = "" + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement: Optional[str] = None + if len(uts46row) == 3: + replacement = uts46row[2] + if ( + status == "V" + or (status == "D" and not transitional) + or (status == "3" and not std3_rules and replacement is None) + ): + output += char + elif replacement is not None and ( + status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) + ): + output += replacement + elif status != "I": + raise IndexError() + except IndexError: + raise InvalidCodepoint( + "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) + ) + + return unicodedata.normalize("NFC", output) + + +def encode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, + transitional: bool = False, +) -> bytes: + if not isinstance(s, str): + try: + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("should pass a unicode string to the function rather than a byte string.") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split(".") + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if labels[-1] == "": + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append(b"") + s = b".".join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError("Domain too long") + return s + + +def decode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, +) -> str: + try: + if not isinstance(s, str): + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("Invalid ASCII in A-label") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(".") + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append("") + return ".".join(result) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py b/venv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py new file mode 100644 index 0000000..4be6004 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/idnadata.py @@ -0,0 +1,4243 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "15.1.0" +scripts = { + "Greek": ( + 0x37000000374, + 0x37500000378, + 0x37A0000037E, + 0x37F00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038B, + 0x38C0000038D, + 0x38E000003A2, + 0x3A3000003E2, + 0x3F000000400, + 0x1D2600001D2B, + 0x1D5D00001D62, + 0x1D6600001D6B, + 0x1DBF00001DC0, + 0x1F0000001F16, + 0x1F1800001F1E, + 0x1F2000001F46, + 0x1F4800001F4E, + 0x1F5000001F58, + 0x1F5900001F5A, + 0x1F5B00001F5C, + 0x1F5D00001F5E, + 0x1F5F00001F7E, + 0x1F8000001FB5, + 0x1FB600001FC5, + 0x1FC600001FD4, + 0x1FD600001FDC, + 0x1FDD00001FF0, + 0x1FF200001FF5, + 0x1FF600001FFF, + 0x212600002127, + 0xAB650000AB66, + 0x101400001018F, + 0x101A0000101A1, + 0x1D2000001D246, + ), + "Han": ( + 0x2E8000002E9A, + 0x2E9B00002EF4, + 0x2F0000002FD6, + 0x300500003006, + 0x300700003008, + 0x30210000302A, + 0x30380000303C, + 0x340000004DC0, + 0x4E000000A000, + 0xF9000000FA6E, + 0xFA700000FADA, + 0x16FE200016FE4, + 0x16FF000016FF2, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x2F8000002FA1E, + 0x300000003134B, + 0x31350000323B0, + ), + "Hebrew": ( + 0x591000005C8, + 0x5D0000005EB, + 0x5EF000005F5, + 0xFB1D0000FB37, + 0xFB380000FB3D, + 0xFB3E0000FB3F, + 0xFB400000FB42, + 0xFB430000FB45, + 0xFB460000FB50, + ), + "Hiragana": ( + 0x304100003097, + 0x309D000030A0, + 0x1B0010001B120, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1F2000001F201, + ), + "Katakana": ( + 0x30A1000030FB, + 0x30FD00003100, + 0x31F000003200, + 0x32D0000032FF, + 0x330000003358, + 0xFF660000FF70, + 0xFF710000FF9E, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B001, + 0x1B1200001B123, + 0x1B1550001B156, + 0x1B1640001B168, + ), +} +joining_types = { + 0xAD: 84, + 0x300: 84, + 0x301: 84, + 0x302: 84, + 0x303: 84, + 0x304: 84, + 0x305: 84, + 0x306: 84, + 0x307: 84, + 0x308: 84, + 0x309: 84, + 0x30A: 84, + 0x30B: 84, + 0x30C: 84, + 0x30D: 84, + 0x30E: 84, + 0x30F: 84, + 0x310: 84, + 0x311: 84, + 0x312: 84, + 0x313: 84, + 0x314: 84, + 0x315: 84, + 0x316: 84, + 0x317: 84, + 0x318: 84, + 0x319: 84, + 0x31A: 84, + 0x31B: 84, + 0x31C: 84, + 0x31D: 84, + 0x31E: 84, + 0x31F: 84, + 0x320: 84, + 0x321: 84, + 0x322: 84, + 0x323: 84, + 0x324: 84, + 0x325: 84, + 0x326: 84, + 0x327: 84, + 0x328: 84, + 0x329: 84, + 0x32A: 84, + 0x32B: 84, + 0x32C: 84, + 0x32D: 84, + 0x32E: 84, + 0x32F: 84, + 0x330: 84, + 0x331: 84, + 0x332: 84, + 0x333: 84, + 0x334: 84, + 0x335: 84, + 0x336: 84, + 0x337: 84, + 0x338: 84, + 0x339: 84, + 0x33A: 84, + 0x33B: 84, + 0x33C: 84, + 0x33D: 84, + 0x33E: 84, + 0x33F: 84, + 0x340: 84, + 0x341: 84, + 0x342: 84, + 0x343: 84, + 0x344: 84, + 0x345: 84, + 0x346: 84, + 0x347: 84, + 0x348: 84, + 0x349: 84, + 0x34A: 84, + 0x34B: 84, + 0x34C: 84, + 0x34D: 84, + 0x34E: 84, + 0x34F: 84, + 0x350: 84, + 0x351: 84, + 0x352: 84, + 0x353: 84, + 0x354: 84, + 0x355: 84, + 0x356: 84, + 0x357: 84, + 0x358: 84, + 0x359: 84, + 0x35A: 84, + 0x35B: 84, + 0x35C: 84, + 0x35D: 84, + 0x35E: 84, + 0x35F: 84, + 0x360: 84, + 0x361: 84, + 0x362: 84, + 0x363: 84, + 0x364: 84, + 0x365: 84, + 0x366: 84, + 0x367: 84, + 0x368: 84, + 0x369: 84, + 0x36A: 84, + 0x36B: 84, + 0x36C: 84, + 0x36D: 84, + 0x36E: 84, + 0x36F: 84, + 0x483: 84, + 0x484: 84, + 0x485: 84, + 0x486: 84, + 0x487: 84, + 0x488: 84, + 0x489: 84, + 0x591: 84, + 0x592: 84, + 0x593: 84, + 0x594: 84, + 0x595: 84, + 0x596: 84, + 0x597: 84, + 0x598: 84, + 0x599: 84, + 0x59A: 84, + 0x59B: 84, + 0x59C: 84, + 0x59D: 84, + 0x59E: 84, + 0x59F: 84, + 0x5A0: 84, + 0x5A1: 84, + 0x5A2: 84, + 0x5A3: 84, + 0x5A4: 84, + 0x5A5: 84, + 0x5A6: 84, + 0x5A7: 84, + 0x5A8: 84, + 0x5A9: 84, + 0x5AA: 84, + 0x5AB: 84, + 0x5AC: 84, + 0x5AD: 84, + 0x5AE: 84, + 0x5AF: 84, + 0x5B0: 84, + 0x5B1: 84, + 0x5B2: 84, + 0x5B3: 84, + 0x5B4: 84, + 0x5B5: 84, + 0x5B6: 84, + 0x5B7: 84, + 0x5B8: 84, + 0x5B9: 84, + 0x5BA: 84, + 0x5BB: 84, + 0x5BC: 84, + 0x5BD: 84, + 0x5BF: 84, + 0x5C1: 84, + 0x5C2: 84, + 0x5C4: 84, + 0x5C5: 84, + 0x5C7: 84, + 0x610: 84, + 0x611: 84, + 0x612: 84, + 0x613: 84, + 0x614: 84, + 0x615: 84, + 0x616: 84, + 0x617: 84, + 0x618: 84, + 0x619: 84, + 0x61A: 84, + 0x61C: 84, + 0x620: 68, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62A: 68, + 0x62B: 68, + 0x62C: 68, + 0x62D: 68, + 0x62E: 68, + 0x62F: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63A: 68, + 0x63B: 68, + 0x63C: 68, + 0x63D: 68, + 0x63E: 68, + 0x63F: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64A: 68, + 0x64B: 84, + 0x64C: 84, + 0x64D: 84, + 0x64E: 84, + 0x64F: 84, + 0x650: 84, + 0x651: 84, + 0x652: 84, + 0x653: 84, + 0x654: 84, + 0x655: 84, + 0x656: 84, + 0x657: 84, + 0x658: 84, + 0x659: 84, + 0x65A: 84, + 0x65B: 84, + 0x65C: 84, + 0x65D: 84, + 0x65E: 84, + 0x65F: 84, + 0x66E: 68, + 0x66F: 68, + 0x670: 84, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67A: 68, + 0x67B: 68, + 0x67C: 68, + 0x67D: 68, + 0x67E: 68, + 0x67F: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68A: 82, + 0x68B: 82, + 0x68C: 82, + 0x68D: 82, + 0x68E: 82, + 0x68F: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69A: 68, + 0x69B: 68, + 0x69C: 68, + 0x69D: 68, + 0x69E: 68, + 0x69F: 68, + 0x6A0: 68, + 0x6A1: 68, + 0x6A2: 68, + 0x6A3: 68, + 0x6A4: 68, + 0x6A5: 68, + 0x6A6: 68, + 0x6A7: 68, + 0x6A8: 68, + 0x6A9: 68, + 0x6AA: 68, + 0x6AB: 68, + 0x6AC: 68, + 0x6AD: 68, + 0x6AE: 68, + 0x6AF: 68, + 0x6B0: 68, + 0x6B1: 68, + 0x6B2: 68, + 0x6B3: 68, + 0x6B4: 68, + 0x6B5: 68, + 0x6B6: 68, + 0x6B7: 68, + 0x6B8: 68, + 0x6B9: 68, + 0x6BA: 68, + 0x6BB: 68, + 0x6BC: 68, + 0x6BD: 68, + 0x6BE: 68, + 0x6BF: 68, + 0x6C0: 82, + 0x6C1: 68, + 0x6C2: 68, + 0x6C3: 82, + 0x6C4: 82, + 0x6C5: 82, + 0x6C6: 82, + 0x6C7: 82, + 0x6C8: 82, + 0x6C9: 82, + 0x6CA: 82, + 0x6CB: 82, + 0x6CC: 68, + 0x6CD: 82, + 0x6CE: 68, + 0x6CF: 82, + 0x6D0: 68, + 0x6D1: 68, + 0x6D2: 82, + 0x6D3: 82, + 0x6D5: 82, + 0x6D6: 84, + 0x6D7: 84, + 0x6D8: 84, + 0x6D9: 84, + 0x6DA: 84, + 0x6DB: 84, + 0x6DC: 84, + 0x6DF: 84, + 0x6E0: 84, + 0x6E1: 84, + 0x6E2: 84, + 0x6E3: 84, + 0x6E4: 84, + 0x6E7: 84, + 0x6E8: 84, + 0x6EA: 84, + 0x6EB: 84, + 0x6EC: 84, + 0x6ED: 84, + 0x6EE: 82, + 0x6EF: 82, + 0x6FA: 68, + 0x6FB: 68, + 0x6FC: 68, + 0x6FF: 68, + 0x70F: 84, + 0x710: 82, + 0x711: 84, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71A: 68, + 0x71B: 68, + 0x71C: 68, + 0x71D: 68, + 0x71E: 82, + 0x71F: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72A: 82, + 0x72B: 68, + 0x72C: 82, + 0x72D: 68, + 0x72E: 68, + 0x72F: 82, + 0x730: 84, + 0x731: 84, + 0x732: 84, + 0x733: 84, + 0x734: 84, + 0x735: 84, + 0x736: 84, + 0x737: 84, + 0x738: 84, + 0x739: 84, + 0x73A: 84, + 0x73B: 84, + 0x73C: 84, + 0x73D: 84, + 0x73E: 84, + 0x73F: 84, + 0x740: 84, + 0x741: 84, + 0x742: 84, + 0x743: 84, + 0x744: 84, + 0x745: 84, + 0x746: 84, + 0x747: 84, + 0x748: 84, + 0x749: 84, + 0x74A: 84, + 0x74D: 82, + 0x74E: 68, + 0x74F: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75A: 82, + 0x75B: 82, + 0x75C: 68, + 0x75D: 68, + 0x75E: 68, + 0x75F: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76A: 68, + 0x76B: 82, + 0x76C: 82, + 0x76D: 68, + 0x76E: 68, + 0x76F: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77A: 68, + 0x77B: 68, + 0x77C: 68, + 0x77D: 68, + 0x77E: 68, + 0x77F: 68, + 0x7A6: 84, + 0x7A7: 84, + 0x7A8: 84, + 0x7A9: 84, + 0x7AA: 84, + 0x7AB: 84, + 0x7AC: 84, + 0x7AD: 84, + 0x7AE: 84, + 0x7AF: 84, + 0x7B0: 84, + 0x7CA: 68, + 0x7CB: 68, + 0x7CC: 68, + 0x7CD: 68, + 0x7CE: 68, + 0x7CF: 68, + 0x7D0: 68, + 0x7D1: 68, + 0x7D2: 68, + 0x7D3: 68, + 0x7D4: 68, + 0x7D5: 68, + 0x7D6: 68, + 0x7D7: 68, + 0x7D8: 68, + 0x7D9: 68, + 0x7DA: 68, + 0x7DB: 68, + 0x7DC: 68, + 0x7DD: 68, + 0x7DE: 68, + 0x7DF: 68, + 0x7E0: 68, + 0x7E1: 68, + 0x7E2: 68, + 0x7E3: 68, + 0x7E4: 68, + 0x7E5: 68, + 0x7E6: 68, + 0x7E7: 68, + 0x7E8: 68, + 0x7E9: 68, + 0x7EA: 68, + 0x7EB: 84, + 0x7EC: 84, + 0x7ED: 84, + 0x7EE: 84, + 0x7EF: 84, + 0x7F0: 84, + 0x7F1: 84, + 0x7F2: 84, + 0x7F3: 84, + 0x7FA: 67, + 0x7FD: 84, + 0x816: 84, + 0x817: 84, + 0x818: 84, + 0x819: 84, + 0x81B: 84, + 0x81C: 84, + 0x81D: 84, + 0x81E: 84, + 0x81F: 84, + 0x820: 84, + 0x821: 84, + 0x822: 84, + 0x823: 84, + 0x825: 84, + 0x826: 84, + 0x827: 84, + 0x829: 84, + 0x82A: 84, + 0x82B: 84, + 0x82C: 84, + 0x82D: 84, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84A: 68, + 0x84B: 68, + 0x84C: 68, + 0x84D: 68, + 0x84E: 68, + 0x84F: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x859: 84, + 0x85A: 84, + 0x85B: 84, + 0x860: 68, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86A: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87A: 82, + 0x87B: 82, + 0x87C: 82, + 0x87D: 82, + 0x87E: 82, + 0x87F: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x889: 68, + 0x88A: 68, + 0x88B: 68, + 0x88C: 68, + 0x88D: 68, + 0x88E: 82, + 0x898: 84, + 0x899: 84, + 0x89A: 84, + 0x89B: 84, + 0x89C: 84, + 0x89D: 84, + 0x89E: 84, + 0x89F: 84, + 0x8A0: 68, + 0x8A1: 68, + 0x8A2: 68, + 0x8A3: 68, + 0x8A4: 68, + 0x8A5: 68, + 0x8A6: 68, + 0x8A7: 68, + 0x8A8: 68, + 0x8A9: 68, + 0x8AA: 82, + 0x8AB: 82, + 0x8AC: 82, + 0x8AE: 82, + 0x8AF: 68, + 0x8B0: 68, + 0x8B1: 82, + 0x8B2: 82, + 0x8B3: 68, + 0x8B4: 68, + 0x8B5: 68, + 0x8B6: 68, + 0x8B7: 68, + 0x8B8: 68, + 0x8B9: 82, + 0x8BA: 68, + 0x8BB: 68, + 0x8BC: 68, + 0x8BD: 68, + 0x8BE: 68, + 0x8BF: 68, + 0x8C0: 68, + 0x8C1: 68, + 0x8C2: 68, + 0x8C3: 68, + 0x8C4: 68, + 0x8C5: 68, + 0x8C6: 68, + 0x8C7: 68, + 0x8C8: 68, + 0x8CA: 84, + 0x8CB: 84, + 0x8CC: 84, + 0x8CD: 84, + 0x8CE: 84, + 0x8CF: 84, + 0x8D0: 84, + 0x8D1: 84, + 0x8D2: 84, + 0x8D3: 84, + 0x8D4: 84, + 0x8D5: 84, + 0x8D6: 84, + 0x8D7: 84, + 0x8D8: 84, + 0x8D9: 84, + 0x8DA: 84, + 0x8DB: 84, + 0x8DC: 84, + 0x8DD: 84, + 0x8DE: 84, + 0x8DF: 84, + 0x8E0: 84, + 0x8E1: 84, + 0x8E3: 84, + 0x8E4: 84, + 0x8E5: 84, + 0x8E6: 84, + 0x8E7: 84, + 0x8E8: 84, + 0x8E9: 84, + 0x8EA: 84, + 0x8EB: 84, + 0x8EC: 84, + 0x8ED: 84, + 0x8EE: 84, + 0x8EF: 84, + 0x8F0: 84, + 0x8F1: 84, + 0x8F2: 84, + 0x8F3: 84, + 0x8F4: 84, + 0x8F5: 84, + 0x8F6: 84, + 0x8F7: 84, + 0x8F8: 84, + 0x8F9: 84, + 0x8FA: 84, + 0x8FB: 84, + 0x8FC: 84, + 0x8FD: 84, + 0x8FE: 84, + 0x8FF: 84, + 0x900: 84, + 0x901: 84, + 0x902: 84, + 0x93A: 84, + 0x93C: 84, + 0x941: 84, + 0x942: 84, + 0x943: 84, + 0x944: 84, + 0x945: 84, + 0x946: 84, + 0x947: 84, + 0x948: 84, + 0x94D: 84, + 0x951: 84, + 0x952: 84, + 0x953: 84, + 0x954: 84, + 0x955: 84, + 0x956: 84, + 0x957: 84, + 0x962: 84, + 0x963: 84, + 0x981: 84, + 0x9BC: 84, + 0x9C1: 84, + 0x9C2: 84, + 0x9C3: 84, + 0x9C4: 84, + 0x9CD: 84, + 0x9E2: 84, + 0x9E3: 84, + 0x9FE: 84, + 0xA01: 84, + 0xA02: 84, + 0xA3C: 84, + 0xA41: 84, + 0xA42: 84, + 0xA47: 84, + 0xA48: 84, + 0xA4B: 84, + 0xA4C: 84, + 0xA4D: 84, + 0xA51: 84, + 0xA70: 84, + 0xA71: 84, + 0xA75: 84, + 0xA81: 84, + 0xA82: 84, + 0xABC: 84, + 0xAC1: 84, + 0xAC2: 84, + 0xAC3: 84, + 0xAC4: 84, + 0xAC5: 84, + 0xAC7: 84, + 0xAC8: 84, + 0xACD: 84, + 0xAE2: 84, + 0xAE3: 84, + 0xAFA: 84, + 0xAFB: 84, + 0xAFC: 84, + 0xAFD: 84, + 0xAFE: 84, + 0xAFF: 84, + 0xB01: 84, + 0xB3C: 84, + 0xB3F: 84, + 0xB41: 84, + 0xB42: 84, + 0xB43: 84, + 0xB44: 84, + 0xB4D: 84, + 0xB55: 84, + 0xB56: 84, + 0xB62: 84, + 0xB63: 84, + 0xB82: 84, + 0xBC0: 84, + 0xBCD: 84, + 0xC00: 84, + 0xC04: 84, + 0xC3C: 84, + 0xC3E: 84, + 0xC3F: 84, + 0xC40: 84, + 0xC46: 84, + 0xC47: 84, + 0xC48: 84, + 0xC4A: 84, + 0xC4B: 84, + 0xC4C: 84, + 0xC4D: 84, + 0xC55: 84, + 0xC56: 84, + 0xC62: 84, + 0xC63: 84, + 0xC81: 84, + 0xCBC: 84, + 0xCBF: 84, + 0xCC6: 84, + 0xCCC: 84, + 0xCCD: 84, + 0xCE2: 84, + 0xCE3: 84, + 0xD00: 84, + 0xD01: 84, + 0xD3B: 84, + 0xD3C: 84, + 0xD41: 84, + 0xD42: 84, + 0xD43: 84, + 0xD44: 84, + 0xD4D: 84, + 0xD62: 84, + 0xD63: 84, + 0xD81: 84, + 0xDCA: 84, + 0xDD2: 84, + 0xDD3: 84, + 0xDD4: 84, + 0xDD6: 84, + 0xE31: 84, + 0xE34: 84, + 0xE35: 84, + 0xE36: 84, + 0xE37: 84, + 0xE38: 84, + 0xE39: 84, + 0xE3A: 84, + 0xE47: 84, + 0xE48: 84, + 0xE49: 84, + 0xE4A: 84, + 0xE4B: 84, + 0xE4C: 84, + 0xE4D: 84, + 0xE4E: 84, + 0xEB1: 84, + 0xEB4: 84, + 0xEB5: 84, + 0xEB6: 84, + 0xEB7: 84, + 0xEB8: 84, + 0xEB9: 84, + 0xEBA: 84, + 0xEBB: 84, + 0xEBC: 84, + 0xEC8: 84, + 0xEC9: 84, + 0xECA: 84, + 0xECB: 84, + 0xECC: 84, + 0xECD: 84, + 0xECE: 84, + 0xF18: 84, + 0xF19: 84, + 0xF35: 84, + 0xF37: 84, + 0xF39: 84, + 0xF71: 84, + 0xF72: 84, + 0xF73: 84, + 0xF74: 84, + 0xF75: 84, + 0xF76: 84, + 0xF77: 84, + 0xF78: 84, + 0xF79: 84, + 0xF7A: 84, + 0xF7B: 84, + 0xF7C: 84, + 0xF7D: 84, + 0xF7E: 84, + 0xF80: 84, + 0xF81: 84, + 0xF82: 84, + 0xF83: 84, + 0xF84: 84, + 0xF86: 84, + 0xF87: 84, + 0xF8D: 84, + 0xF8E: 84, + 0xF8F: 84, + 0xF90: 84, + 0xF91: 84, + 0xF92: 84, + 0xF93: 84, + 0xF94: 84, + 0xF95: 84, + 0xF96: 84, + 0xF97: 84, + 0xF99: 84, + 0xF9A: 84, + 0xF9B: 84, + 0xF9C: 84, + 0xF9D: 84, + 0xF9E: 84, + 0xF9F: 84, + 0xFA0: 84, + 0xFA1: 84, + 0xFA2: 84, + 0xFA3: 84, + 0xFA4: 84, + 0xFA5: 84, + 0xFA6: 84, + 0xFA7: 84, + 0xFA8: 84, + 0xFA9: 84, + 0xFAA: 84, + 0xFAB: 84, + 0xFAC: 84, + 0xFAD: 84, + 0xFAE: 84, + 0xFAF: 84, + 0xFB0: 84, + 0xFB1: 84, + 0xFB2: 84, + 0xFB3: 84, + 0xFB4: 84, + 0xFB5: 84, + 0xFB6: 84, + 0xFB7: 84, + 0xFB8: 84, + 0xFB9: 84, + 0xFBA: 84, + 0xFBB: 84, + 0xFBC: 84, + 0xFC6: 84, + 0x102D: 84, + 0x102E: 84, + 0x102F: 84, + 0x1030: 84, + 0x1032: 84, + 0x1033: 84, + 0x1034: 84, + 0x1035: 84, + 0x1036: 84, + 0x1037: 84, + 0x1039: 84, + 0x103A: 84, + 0x103D: 84, + 0x103E: 84, + 0x1058: 84, + 0x1059: 84, + 0x105E: 84, + 0x105F: 84, + 0x1060: 84, + 0x1071: 84, + 0x1072: 84, + 0x1073: 84, + 0x1074: 84, + 0x1082: 84, + 0x1085: 84, + 0x1086: 84, + 0x108D: 84, + 0x109D: 84, + 0x135D: 84, + 0x135E: 84, + 0x135F: 84, + 0x1712: 84, + 0x1713: 84, + 0x1714: 84, + 0x1732: 84, + 0x1733: 84, + 0x1752: 84, + 0x1753: 84, + 0x1772: 84, + 0x1773: 84, + 0x17B4: 84, + 0x17B5: 84, + 0x17B7: 84, + 0x17B8: 84, + 0x17B9: 84, + 0x17BA: 84, + 0x17BB: 84, + 0x17BC: 84, + 0x17BD: 84, + 0x17C6: 84, + 0x17C9: 84, + 0x17CA: 84, + 0x17CB: 84, + 0x17CC: 84, + 0x17CD: 84, + 0x17CE: 84, + 0x17CF: 84, + 0x17D0: 84, + 0x17D1: 84, + 0x17D2: 84, + 0x17D3: 84, + 0x17DD: 84, + 0x1807: 68, + 0x180A: 67, + 0x180B: 84, + 0x180C: 84, + 0x180D: 84, + 0x180F: 84, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182A: 68, + 0x182B: 68, + 0x182C: 68, + 0x182D: 68, + 0x182E: 68, + 0x182F: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183A: 68, + 0x183B: 68, + 0x183C: 68, + 0x183D: 68, + 0x183E: 68, + 0x183F: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184A: 68, + 0x184B: 68, + 0x184C: 68, + 0x184D: 68, + 0x184E: 68, + 0x184F: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185A: 68, + 0x185B: 68, + 0x185C: 68, + 0x185D: 68, + 0x185E: 68, + 0x185F: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186A: 68, + 0x186B: 68, + 0x186C: 68, + 0x186D: 68, + 0x186E: 68, + 0x186F: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188A: 68, + 0x188B: 68, + 0x188C: 68, + 0x188D: 68, + 0x188E: 68, + 0x188F: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189A: 68, + 0x189B: 68, + 0x189C: 68, + 0x189D: 68, + 0x189E: 68, + 0x189F: 68, + 0x18A0: 68, + 0x18A1: 68, + 0x18A2: 68, + 0x18A3: 68, + 0x18A4: 68, + 0x18A5: 68, + 0x18A6: 68, + 0x18A7: 68, + 0x18A8: 68, + 0x18A9: 84, + 0x18AA: 68, + 0x1920: 84, + 0x1921: 84, + 0x1922: 84, + 0x1927: 84, + 0x1928: 84, + 0x1932: 84, + 0x1939: 84, + 0x193A: 84, + 0x193B: 84, + 0x1A17: 84, + 0x1A18: 84, + 0x1A1B: 84, + 0x1A56: 84, + 0x1A58: 84, + 0x1A59: 84, + 0x1A5A: 84, + 0x1A5B: 84, + 0x1A5C: 84, + 0x1A5D: 84, + 0x1A5E: 84, + 0x1A60: 84, + 0x1A62: 84, + 0x1A65: 84, + 0x1A66: 84, + 0x1A67: 84, + 0x1A68: 84, + 0x1A69: 84, + 0x1A6A: 84, + 0x1A6B: 84, + 0x1A6C: 84, + 0x1A73: 84, + 0x1A74: 84, + 0x1A75: 84, + 0x1A76: 84, + 0x1A77: 84, + 0x1A78: 84, + 0x1A79: 84, + 0x1A7A: 84, + 0x1A7B: 84, + 0x1A7C: 84, + 0x1A7F: 84, + 0x1AB0: 84, + 0x1AB1: 84, + 0x1AB2: 84, + 0x1AB3: 84, + 0x1AB4: 84, + 0x1AB5: 84, + 0x1AB6: 84, + 0x1AB7: 84, + 0x1AB8: 84, + 0x1AB9: 84, + 0x1ABA: 84, + 0x1ABB: 84, + 0x1ABC: 84, + 0x1ABD: 84, + 0x1ABE: 84, + 0x1ABF: 84, + 0x1AC0: 84, + 0x1AC1: 84, + 0x1AC2: 84, + 0x1AC3: 84, + 0x1AC4: 84, + 0x1AC5: 84, + 0x1AC6: 84, + 0x1AC7: 84, + 0x1AC8: 84, + 0x1AC9: 84, + 0x1ACA: 84, + 0x1ACB: 84, + 0x1ACC: 84, + 0x1ACD: 84, + 0x1ACE: 84, + 0x1B00: 84, + 0x1B01: 84, + 0x1B02: 84, + 0x1B03: 84, + 0x1B34: 84, + 0x1B36: 84, + 0x1B37: 84, + 0x1B38: 84, + 0x1B39: 84, + 0x1B3A: 84, + 0x1B3C: 84, + 0x1B42: 84, + 0x1B6B: 84, + 0x1B6C: 84, + 0x1B6D: 84, + 0x1B6E: 84, + 0x1B6F: 84, + 0x1B70: 84, + 0x1B71: 84, + 0x1B72: 84, + 0x1B73: 84, + 0x1B80: 84, + 0x1B81: 84, + 0x1BA2: 84, + 0x1BA3: 84, + 0x1BA4: 84, + 0x1BA5: 84, + 0x1BA8: 84, + 0x1BA9: 84, + 0x1BAB: 84, + 0x1BAC: 84, + 0x1BAD: 84, + 0x1BE6: 84, + 0x1BE8: 84, + 0x1BE9: 84, + 0x1BED: 84, + 0x1BEF: 84, + 0x1BF0: 84, + 0x1BF1: 84, + 0x1C2C: 84, + 0x1C2D: 84, + 0x1C2E: 84, + 0x1C2F: 84, + 0x1C30: 84, + 0x1C31: 84, + 0x1C32: 84, + 0x1C33: 84, + 0x1C36: 84, + 0x1C37: 84, + 0x1CD0: 84, + 0x1CD1: 84, + 0x1CD2: 84, + 0x1CD4: 84, + 0x1CD5: 84, + 0x1CD6: 84, + 0x1CD7: 84, + 0x1CD8: 84, + 0x1CD9: 84, + 0x1CDA: 84, + 0x1CDB: 84, + 0x1CDC: 84, + 0x1CDD: 84, + 0x1CDE: 84, + 0x1CDF: 84, + 0x1CE0: 84, + 0x1CE2: 84, + 0x1CE3: 84, + 0x1CE4: 84, + 0x1CE5: 84, + 0x1CE6: 84, + 0x1CE7: 84, + 0x1CE8: 84, + 0x1CED: 84, + 0x1CF4: 84, + 0x1CF8: 84, + 0x1CF9: 84, + 0x1DC0: 84, + 0x1DC1: 84, + 0x1DC2: 84, + 0x1DC3: 84, + 0x1DC4: 84, + 0x1DC5: 84, + 0x1DC6: 84, + 0x1DC7: 84, + 0x1DC8: 84, + 0x1DC9: 84, + 0x1DCA: 84, + 0x1DCB: 84, + 0x1DCC: 84, + 0x1DCD: 84, + 0x1DCE: 84, + 0x1DCF: 84, + 0x1DD0: 84, + 0x1DD1: 84, + 0x1DD2: 84, + 0x1DD3: 84, + 0x1DD4: 84, + 0x1DD5: 84, + 0x1DD6: 84, + 0x1DD7: 84, + 0x1DD8: 84, + 0x1DD9: 84, + 0x1DDA: 84, + 0x1DDB: 84, + 0x1DDC: 84, + 0x1DDD: 84, + 0x1DDE: 84, + 0x1DDF: 84, + 0x1DE0: 84, + 0x1DE1: 84, + 0x1DE2: 84, + 0x1DE3: 84, + 0x1DE4: 84, + 0x1DE5: 84, + 0x1DE6: 84, + 0x1DE7: 84, + 0x1DE8: 84, + 0x1DE9: 84, + 0x1DEA: 84, + 0x1DEB: 84, + 0x1DEC: 84, + 0x1DED: 84, + 0x1DEE: 84, + 0x1DEF: 84, + 0x1DF0: 84, + 0x1DF1: 84, + 0x1DF2: 84, + 0x1DF3: 84, + 0x1DF4: 84, + 0x1DF5: 84, + 0x1DF6: 84, + 0x1DF7: 84, + 0x1DF8: 84, + 0x1DF9: 84, + 0x1DFA: 84, + 0x1DFB: 84, + 0x1DFC: 84, + 0x1DFD: 84, + 0x1DFE: 84, + 0x1DFF: 84, + 0x200B: 84, + 0x200D: 67, + 0x200E: 84, + 0x200F: 84, + 0x202A: 84, + 0x202B: 84, + 0x202C: 84, + 0x202D: 84, + 0x202E: 84, + 0x2060: 84, + 0x2061: 84, + 0x2062: 84, + 0x2063: 84, + 0x2064: 84, + 0x206A: 84, + 0x206B: 84, + 0x206C: 84, + 0x206D: 84, + 0x206E: 84, + 0x206F: 84, + 0x20D0: 84, + 0x20D1: 84, + 0x20D2: 84, + 0x20D3: 84, + 0x20D4: 84, + 0x20D5: 84, + 0x20D6: 84, + 0x20D7: 84, + 0x20D8: 84, + 0x20D9: 84, + 0x20DA: 84, + 0x20DB: 84, + 0x20DC: 84, + 0x20DD: 84, + 0x20DE: 84, + 0x20DF: 84, + 0x20E0: 84, + 0x20E1: 84, + 0x20E2: 84, + 0x20E3: 84, + 0x20E4: 84, + 0x20E5: 84, + 0x20E6: 84, + 0x20E7: 84, + 0x20E8: 84, + 0x20E9: 84, + 0x20EA: 84, + 0x20EB: 84, + 0x20EC: 84, + 0x20ED: 84, + 0x20EE: 84, + 0x20EF: 84, + 0x20F0: 84, + 0x2CEF: 84, + 0x2CF0: 84, + 0x2CF1: 84, + 0x2D7F: 84, + 0x2DE0: 84, + 0x2DE1: 84, + 0x2DE2: 84, + 0x2DE3: 84, + 0x2DE4: 84, + 0x2DE5: 84, + 0x2DE6: 84, + 0x2DE7: 84, + 0x2DE8: 84, + 0x2DE9: 84, + 0x2DEA: 84, + 0x2DEB: 84, + 0x2DEC: 84, + 0x2DED: 84, + 0x2DEE: 84, + 0x2DEF: 84, + 0x2DF0: 84, + 0x2DF1: 84, + 0x2DF2: 84, + 0x2DF3: 84, + 0x2DF4: 84, + 0x2DF5: 84, + 0x2DF6: 84, + 0x2DF7: 84, + 0x2DF8: 84, + 0x2DF9: 84, + 0x2DFA: 84, + 0x2DFB: 84, + 0x2DFC: 84, + 0x2DFD: 84, + 0x2DFE: 84, + 0x2DFF: 84, + 0x302A: 84, + 0x302B: 84, + 0x302C: 84, + 0x302D: 84, + 0x3099: 84, + 0x309A: 84, + 0xA66F: 84, + 0xA670: 84, + 0xA671: 84, + 0xA672: 84, + 0xA674: 84, + 0xA675: 84, + 0xA676: 84, + 0xA677: 84, + 0xA678: 84, + 0xA679: 84, + 0xA67A: 84, + 0xA67B: 84, + 0xA67C: 84, + 0xA67D: 84, + 0xA69E: 84, + 0xA69F: 84, + 0xA6F0: 84, + 0xA6F1: 84, + 0xA802: 84, + 0xA806: 84, + 0xA80B: 84, + 0xA825: 84, + 0xA826: 84, + 0xA82C: 84, + 0xA840: 68, + 0xA841: 68, + 0xA842: 68, + 0xA843: 68, + 0xA844: 68, + 0xA845: 68, + 0xA846: 68, + 0xA847: 68, + 0xA848: 68, + 0xA849: 68, + 0xA84A: 68, + 0xA84B: 68, + 0xA84C: 68, + 0xA84D: 68, + 0xA84E: 68, + 0xA84F: 68, + 0xA850: 68, + 0xA851: 68, + 0xA852: 68, + 0xA853: 68, + 0xA854: 68, + 0xA855: 68, + 0xA856: 68, + 0xA857: 68, + 0xA858: 68, + 0xA859: 68, + 0xA85A: 68, + 0xA85B: 68, + 0xA85C: 68, + 0xA85D: 68, + 0xA85E: 68, + 0xA85F: 68, + 0xA860: 68, + 0xA861: 68, + 0xA862: 68, + 0xA863: 68, + 0xA864: 68, + 0xA865: 68, + 0xA866: 68, + 0xA867: 68, + 0xA868: 68, + 0xA869: 68, + 0xA86A: 68, + 0xA86B: 68, + 0xA86C: 68, + 0xA86D: 68, + 0xA86E: 68, + 0xA86F: 68, + 0xA870: 68, + 0xA871: 68, + 0xA872: 76, + 0xA8C4: 84, + 0xA8C5: 84, + 0xA8E0: 84, + 0xA8E1: 84, + 0xA8E2: 84, + 0xA8E3: 84, + 0xA8E4: 84, + 0xA8E5: 84, + 0xA8E6: 84, + 0xA8E7: 84, + 0xA8E8: 84, + 0xA8E9: 84, + 0xA8EA: 84, + 0xA8EB: 84, + 0xA8EC: 84, + 0xA8ED: 84, + 0xA8EE: 84, + 0xA8EF: 84, + 0xA8F0: 84, + 0xA8F1: 84, + 0xA8FF: 84, + 0xA926: 84, + 0xA927: 84, + 0xA928: 84, + 0xA929: 84, + 0xA92A: 84, + 0xA92B: 84, + 0xA92C: 84, + 0xA92D: 84, + 0xA947: 84, + 0xA948: 84, + 0xA949: 84, + 0xA94A: 84, + 0xA94B: 84, + 0xA94C: 84, + 0xA94D: 84, + 0xA94E: 84, + 0xA94F: 84, + 0xA950: 84, + 0xA951: 84, + 0xA980: 84, + 0xA981: 84, + 0xA982: 84, + 0xA9B3: 84, + 0xA9B6: 84, + 0xA9B7: 84, + 0xA9B8: 84, + 0xA9B9: 84, + 0xA9BC: 84, + 0xA9BD: 84, + 0xA9E5: 84, + 0xAA29: 84, + 0xAA2A: 84, + 0xAA2B: 84, + 0xAA2C: 84, + 0xAA2D: 84, + 0xAA2E: 84, + 0xAA31: 84, + 0xAA32: 84, + 0xAA35: 84, + 0xAA36: 84, + 0xAA43: 84, + 0xAA4C: 84, + 0xAA7C: 84, + 0xAAB0: 84, + 0xAAB2: 84, + 0xAAB3: 84, + 0xAAB4: 84, + 0xAAB7: 84, + 0xAAB8: 84, + 0xAABE: 84, + 0xAABF: 84, + 0xAAC1: 84, + 0xAAEC: 84, + 0xAAED: 84, + 0xAAF6: 84, + 0xABE5: 84, + 0xABE8: 84, + 0xABED: 84, + 0xFB1E: 84, + 0xFE00: 84, + 0xFE01: 84, + 0xFE02: 84, + 0xFE03: 84, + 0xFE04: 84, + 0xFE05: 84, + 0xFE06: 84, + 0xFE07: 84, + 0xFE08: 84, + 0xFE09: 84, + 0xFE0A: 84, + 0xFE0B: 84, + 0xFE0C: 84, + 0xFE0D: 84, + 0xFE0E: 84, + 0xFE0F: 84, + 0xFE20: 84, + 0xFE21: 84, + 0xFE22: 84, + 0xFE23: 84, + 0xFE24: 84, + 0xFE25: 84, + 0xFE26: 84, + 0xFE27: 84, + 0xFE28: 84, + 0xFE29: 84, + 0xFE2A: 84, + 0xFE2B: 84, + 0xFE2C: 84, + 0xFE2D: 84, + 0xFE2E: 84, + 0xFE2F: 84, + 0xFEFF: 84, + 0xFFF9: 84, + 0xFFFA: 84, + 0xFFFB: 84, + 0x101FD: 84, + 0x102E0: 84, + 0x10376: 84, + 0x10377: 84, + 0x10378: 84, + 0x10379: 84, + 0x1037A: 84, + 0x10A01: 84, + 0x10A02: 84, + 0x10A03: 84, + 0x10A05: 84, + 0x10A06: 84, + 0x10A0C: 84, + 0x10A0D: 84, + 0x10A0E: 84, + 0x10A0F: 84, + 0x10A38: 84, + 0x10A39: 84, + 0x10A3A: 84, + 0x10A3F: 84, + 0x10AC0: 68, + 0x10AC1: 68, + 0x10AC2: 68, + 0x10AC3: 68, + 0x10AC4: 68, + 0x10AC5: 82, + 0x10AC7: 82, + 0x10AC9: 82, + 0x10ACA: 82, + 0x10ACD: 76, + 0x10ACE: 82, + 0x10ACF: 82, + 0x10AD0: 82, + 0x10AD1: 82, + 0x10AD2: 82, + 0x10AD3: 68, + 0x10AD4: 68, + 0x10AD5: 68, + 0x10AD6: 68, + 0x10AD7: 76, + 0x10AD8: 68, + 0x10AD9: 68, + 0x10ADA: 68, + 0x10ADB: 68, + 0x10ADC: 68, + 0x10ADD: 82, + 0x10ADE: 68, + 0x10ADF: 68, + 0x10AE0: 68, + 0x10AE1: 82, + 0x10AE4: 82, + 0x10AE5: 84, + 0x10AE6: 84, + 0x10AEB: 68, + 0x10AEC: 68, + 0x10AED: 68, + 0x10AEE: 68, + 0x10AEF: 82, + 0x10B80: 68, + 0x10B81: 82, + 0x10B82: 68, + 0x10B83: 82, + 0x10B84: 82, + 0x10B85: 82, + 0x10B86: 68, + 0x10B87: 68, + 0x10B88: 68, + 0x10B89: 82, + 0x10B8A: 68, + 0x10B8B: 68, + 0x10B8C: 82, + 0x10B8D: 68, + 0x10B8E: 82, + 0x10B8F: 82, + 0x10B90: 68, + 0x10B91: 82, + 0x10BA9: 82, + 0x10BAA: 82, + 0x10BAB: 82, + 0x10BAC: 82, + 0x10BAD: 68, + 0x10BAE: 68, + 0x10D00: 76, + 0x10D01: 68, + 0x10D02: 68, + 0x10D03: 68, + 0x10D04: 68, + 0x10D05: 68, + 0x10D06: 68, + 0x10D07: 68, + 0x10D08: 68, + 0x10D09: 68, + 0x10D0A: 68, + 0x10D0B: 68, + 0x10D0C: 68, + 0x10D0D: 68, + 0x10D0E: 68, + 0x10D0F: 68, + 0x10D10: 68, + 0x10D11: 68, + 0x10D12: 68, + 0x10D13: 68, + 0x10D14: 68, + 0x10D15: 68, + 0x10D16: 68, + 0x10D17: 68, + 0x10D18: 68, + 0x10D19: 68, + 0x10D1A: 68, + 0x10D1B: 68, + 0x10D1C: 68, + 0x10D1D: 68, + 0x10D1E: 68, + 0x10D1F: 68, + 0x10D20: 68, + 0x10D21: 68, + 0x10D22: 82, + 0x10D23: 68, + 0x10D24: 84, + 0x10D25: 84, + 0x10D26: 84, + 0x10D27: 84, + 0x10EAB: 84, + 0x10EAC: 84, + 0x10EFD: 84, + 0x10EFE: 84, + 0x10EFF: 84, + 0x10F30: 68, + 0x10F31: 68, + 0x10F32: 68, + 0x10F33: 82, + 0x10F34: 68, + 0x10F35: 68, + 0x10F36: 68, + 0x10F37: 68, + 0x10F38: 68, + 0x10F39: 68, + 0x10F3A: 68, + 0x10F3B: 68, + 0x10F3C: 68, + 0x10F3D: 68, + 0x10F3E: 68, + 0x10F3F: 68, + 0x10F40: 68, + 0x10F41: 68, + 0x10F42: 68, + 0x10F43: 68, + 0x10F44: 68, + 0x10F46: 84, + 0x10F47: 84, + 0x10F48: 84, + 0x10F49: 84, + 0x10F4A: 84, + 0x10F4B: 84, + 0x10F4C: 84, + 0x10F4D: 84, + 0x10F4E: 84, + 0x10F4F: 84, + 0x10F50: 84, + 0x10F51: 68, + 0x10F52: 68, + 0x10F53: 68, + 0x10F54: 82, + 0x10F70: 68, + 0x10F71: 68, + 0x10F72: 68, + 0x10F73: 68, + 0x10F74: 82, + 0x10F75: 82, + 0x10F76: 68, + 0x10F77: 68, + 0x10F78: 68, + 0x10F79: 68, + 0x10F7A: 68, + 0x10F7B: 68, + 0x10F7C: 68, + 0x10F7D: 68, + 0x10F7E: 68, + 0x10F7F: 68, + 0x10F80: 68, + 0x10F81: 68, + 0x10F82: 84, + 0x10F83: 84, + 0x10F84: 84, + 0x10F85: 84, + 0x10FB0: 68, + 0x10FB2: 68, + 0x10FB3: 68, + 0x10FB4: 82, + 0x10FB5: 82, + 0x10FB6: 82, + 0x10FB8: 68, + 0x10FB9: 82, + 0x10FBA: 82, + 0x10FBB: 68, + 0x10FBC: 68, + 0x10FBD: 82, + 0x10FBE: 68, + 0x10FBF: 68, + 0x10FC1: 68, + 0x10FC2: 82, + 0x10FC3: 82, + 0x10FC4: 68, + 0x10FC9: 82, + 0x10FCA: 68, + 0x10FCB: 76, + 0x11001: 84, + 0x11038: 84, + 0x11039: 84, + 0x1103A: 84, + 0x1103B: 84, + 0x1103C: 84, + 0x1103D: 84, + 0x1103E: 84, + 0x1103F: 84, + 0x11040: 84, + 0x11041: 84, + 0x11042: 84, + 0x11043: 84, + 0x11044: 84, + 0x11045: 84, + 0x11046: 84, + 0x11070: 84, + 0x11073: 84, + 0x11074: 84, + 0x1107F: 84, + 0x11080: 84, + 0x11081: 84, + 0x110B3: 84, + 0x110B4: 84, + 0x110B5: 84, + 0x110B6: 84, + 0x110B9: 84, + 0x110BA: 84, + 0x110C2: 84, + 0x11100: 84, + 0x11101: 84, + 0x11102: 84, + 0x11127: 84, + 0x11128: 84, + 0x11129: 84, + 0x1112A: 84, + 0x1112B: 84, + 0x1112D: 84, + 0x1112E: 84, + 0x1112F: 84, + 0x11130: 84, + 0x11131: 84, + 0x11132: 84, + 0x11133: 84, + 0x11134: 84, + 0x11173: 84, + 0x11180: 84, + 0x11181: 84, + 0x111B6: 84, + 0x111B7: 84, + 0x111B8: 84, + 0x111B9: 84, + 0x111BA: 84, + 0x111BB: 84, + 0x111BC: 84, + 0x111BD: 84, + 0x111BE: 84, + 0x111C9: 84, + 0x111CA: 84, + 0x111CB: 84, + 0x111CC: 84, + 0x111CF: 84, + 0x1122F: 84, + 0x11230: 84, + 0x11231: 84, + 0x11234: 84, + 0x11236: 84, + 0x11237: 84, + 0x1123E: 84, + 0x11241: 84, + 0x112DF: 84, + 0x112E3: 84, + 0x112E4: 84, + 0x112E5: 84, + 0x112E6: 84, + 0x112E7: 84, + 0x112E8: 84, + 0x112E9: 84, + 0x112EA: 84, + 0x11300: 84, + 0x11301: 84, + 0x1133B: 84, + 0x1133C: 84, + 0x11340: 84, + 0x11366: 84, + 0x11367: 84, + 0x11368: 84, + 0x11369: 84, + 0x1136A: 84, + 0x1136B: 84, + 0x1136C: 84, + 0x11370: 84, + 0x11371: 84, + 0x11372: 84, + 0x11373: 84, + 0x11374: 84, + 0x11438: 84, + 0x11439: 84, + 0x1143A: 84, + 0x1143B: 84, + 0x1143C: 84, + 0x1143D: 84, + 0x1143E: 84, + 0x1143F: 84, + 0x11442: 84, + 0x11443: 84, + 0x11444: 84, + 0x11446: 84, + 0x1145E: 84, + 0x114B3: 84, + 0x114B4: 84, + 0x114B5: 84, + 0x114B6: 84, + 0x114B7: 84, + 0x114B8: 84, + 0x114BA: 84, + 0x114BF: 84, + 0x114C0: 84, + 0x114C2: 84, + 0x114C3: 84, + 0x115B2: 84, + 0x115B3: 84, + 0x115B4: 84, + 0x115B5: 84, + 0x115BC: 84, + 0x115BD: 84, + 0x115BF: 84, + 0x115C0: 84, + 0x115DC: 84, + 0x115DD: 84, + 0x11633: 84, + 0x11634: 84, + 0x11635: 84, + 0x11636: 84, + 0x11637: 84, + 0x11638: 84, + 0x11639: 84, + 0x1163A: 84, + 0x1163D: 84, + 0x1163F: 84, + 0x11640: 84, + 0x116AB: 84, + 0x116AD: 84, + 0x116B0: 84, + 0x116B1: 84, + 0x116B2: 84, + 0x116B3: 84, + 0x116B4: 84, + 0x116B5: 84, + 0x116B7: 84, + 0x1171D: 84, + 0x1171E: 84, + 0x1171F: 84, + 0x11722: 84, + 0x11723: 84, + 0x11724: 84, + 0x11725: 84, + 0x11727: 84, + 0x11728: 84, + 0x11729: 84, + 0x1172A: 84, + 0x1172B: 84, + 0x1182F: 84, + 0x11830: 84, + 0x11831: 84, + 0x11832: 84, + 0x11833: 84, + 0x11834: 84, + 0x11835: 84, + 0x11836: 84, + 0x11837: 84, + 0x11839: 84, + 0x1183A: 84, + 0x1193B: 84, + 0x1193C: 84, + 0x1193E: 84, + 0x11943: 84, + 0x119D4: 84, + 0x119D5: 84, + 0x119D6: 84, + 0x119D7: 84, + 0x119DA: 84, + 0x119DB: 84, + 0x119E0: 84, + 0x11A01: 84, + 0x11A02: 84, + 0x11A03: 84, + 0x11A04: 84, + 0x11A05: 84, + 0x11A06: 84, + 0x11A07: 84, + 0x11A08: 84, + 0x11A09: 84, + 0x11A0A: 84, + 0x11A33: 84, + 0x11A34: 84, + 0x11A35: 84, + 0x11A36: 84, + 0x11A37: 84, + 0x11A38: 84, + 0x11A3B: 84, + 0x11A3C: 84, + 0x11A3D: 84, + 0x11A3E: 84, + 0x11A47: 84, + 0x11A51: 84, + 0x11A52: 84, + 0x11A53: 84, + 0x11A54: 84, + 0x11A55: 84, + 0x11A56: 84, + 0x11A59: 84, + 0x11A5A: 84, + 0x11A5B: 84, + 0x11A8A: 84, + 0x11A8B: 84, + 0x11A8C: 84, + 0x11A8D: 84, + 0x11A8E: 84, + 0x11A8F: 84, + 0x11A90: 84, + 0x11A91: 84, + 0x11A92: 84, + 0x11A93: 84, + 0x11A94: 84, + 0x11A95: 84, + 0x11A96: 84, + 0x11A98: 84, + 0x11A99: 84, + 0x11C30: 84, + 0x11C31: 84, + 0x11C32: 84, + 0x11C33: 84, + 0x11C34: 84, + 0x11C35: 84, + 0x11C36: 84, + 0x11C38: 84, + 0x11C39: 84, + 0x11C3A: 84, + 0x11C3B: 84, + 0x11C3C: 84, + 0x11C3D: 84, + 0x11C3F: 84, + 0x11C92: 84, + 0x11C93: 84, + 0x11C94: 84, + 0x11C95: 84, + 0x11C96: 84, + 0x11C97: 84, + 0x11C98: 84, + 0x11C99: 84, + 0x11C9A: 84, + 0x11C9B: 84, + 0x11C9C: 84, + 0x11C9D: 84, + 0x11C9E: 84, + 0x11C9F: 84, + 0x11CA0: 84, + 0x11CA1: 84, + 0x11CA2: 84, + 0x11CA3: 84, + 0x11CA4: 84, + 0x11CA5: 84, + 0x11CA6: 84, + 0x11CA7: 84, + 0x11CAA: 84, + 0x11CAB: 84, + 0x11CAC: 84, + 0x11CAD: 84, + 0x11CAE: 84, + 0x11CAF: 84, + 0x11CB0: 84, + 0x11CB2: 84, + 0x11CB3: 84, + 0x11CB5: 84, + 0x11CB6: 84, + 0x11D31: 84, + 0x11D32: 84, + 0x11D33: 84, + 0x11D34: 84, + 0x11D35: 84, + 0x11D36: 84, + 0x11D3A: 84, + 0x11D3C: 84, + 0x11D3D: 84, + 0x11D3F: 84, + 0x11D40: 84, + 0x11D41: 84, + 0x11D42: 84, + 0x11D43: 84, + 0x11D44: 84, + 0x11D45: 84, + 0x11D47: 84, + 0x11D90: 84, + 0x11D91: 84, + 0x11D95: 84, + 0x11D97: 84, + 0x11EF3: 84, + 0x11EF4: 84, + 0x11F00: 84, + 0x11F01: 84, + 0x11F36: 84, + 0x11F37: 84, + 0x11F38: 84, + 0x11F39: 84, + 0x11F3A: 84, + 0x11F40: 84, + 0x11F42: 84, + 0x13430: 84, + 0x13431: 84, + 0x13432: 84, + 0x13433: 84, + 0x13434: 84, + 0x13435: 84, + 0x13436: 84, + 0x13437: 84, + 0x13438: 84, + 0x13439: 84, + 0x1343A: 84, + 0x1343B: 84, + 0x1343C: 84, + 0x1343D: 84, + 0x1343E: 84, + 0x1343F: 84, + 0x13440: 84, + 0x13447: 84, + 0x13448: 84, + 0x13449: 84, + 0x1344A: 84, + 0x1344B: 84, + 0x1344C: 84, + 0x1344D: 84, + 0x1344E: 84, + 0x1344F: 84, + 0x13450: 84, + 0x13451: 84, + 0x13452: 84, + 0x13453: 84, + 0x13454: 84, + 0x13455: 84, + 0x16AF0: 84, + 0x16AF1: 84, + 0x16AF2: 84, + 0x16AF3: 84, + 0x16AF4: 84, + 0x16B30: 84, + 0x16B31: 84, + 0x16B32: 84, + 0x16B33: 84, + 0x16B34: 84, + 0x16B35: 84, + 0x16B36: 84, + 0x16F4F: 84, + 0x16F8F: 84, + 0x16F90: 84, + 0x16F91: 84, + 0x16F92: 84, + 0x16FE4: 84, + 0x1BC9D: 84, + 0x1BC9E: 84, + 0x1BCA0: 84, + 0x1BCA1: 84, + 0x1BCA2: 84, + 0x1BCA3: 84, + 0x1CF00: 84, + 0x1CF01: 84, + 0x1CF02: 84, + 0x1CF03: 84, + 0x1CF04: 84, + 0x1CF05: 84, + 0x1CF06: 84, + 0x1CF07: 84, + 0x1CF08: 84, + 0x1CF09: 84, + 0x1CF0A: 84, + 0x1CF0B: 84, + 0x1CF0C: 84, + 0x1CF0D: 84, + 0x1CF0E: 84, + 0x1CF0F: 84, + 0x1CF10: 84, + 0x1CF11: 84, + 0x1CF12: 84, + 0x1CF13: 84, + 0x1CF14: 84, + 0x1CF15: 84, + 0x1CF16: 84, + 0x1CF17: 84, + 0x1CF18: 84, + 0x1CF19: 84, + 0x1CF1A: 84, + 0x1CF1B: 84, + 0x1CF1C: 84, + 0x1CF1D: 84, + 0x1CF1E: 84, + 0x1CF1F: 84, + 0x1CF20: 84, + 0x1CF21: 84, + 0x1CF22: 84, + 0x1CF23: 84, + 0x1CF24: 84, + 0x1CF25: 84, + 0x1CF26: 84, + 0x1CF27: 84, + 0x1CF28: 84, + 0x1CF29: 84, + 0x1CF2A: 84, + 0x1CF2B: 84, + 0x1CF2C: 84, + 0x1CF2D: 84, + 0x1CF30: 84, + 0x1CF31: 84, + 0x1CF32: 84, + 0x1CF33: 84, + 0x1CF34: 84, + 0x1CF35: 84, + 0x1CF36: 84, + 0x1CF37: 84, + 0x1CF38: 84, + 0x1CF39: 84, + 0x1CF3A: 84, + 0x1CF3B: 84, + 0x1CF3C: 84, + 0x1CF3D: 84, + 0x1CF3E: 84, + 0x1CF3F: 84, + 0x1CF40: 84, + 0x1CF41: 84, + 0x1CF42: 84, + 0x1CF43: 84, + 0x1CF44: 84, + 0x1CF45: 84, + 0x1CF46: 84, + 0x1D167: 84, + 0x1D168: 84, + 0x1D169: 84, + 0x1D173: 84, + 0x1D174: 84, + 0x1D175: 84, + 0x1D176: 84, + 0x1D177: 84, + 0x1D178: 84, + 0x1D179: 84, + 0x1D17A: 84, + 0x1D17B: 84, + 0x1D17C: 84, + 0x1D17D: 84, + 0x1D17E: 84, + 0x1D17F: 84, + 0x1D180: 84, + 0x1D181: 84, + 0x1D182: 84, + 0x1D185: 84, + 0x1D186: 84, + 0x1D187: 84, + 0x1D188: 84, + 0x1D189: 84, + 0x1D18A: 84, + 0x1D18B: 84, + 0x1D1AA: 84, + 0x1D1AB: 84, + 0x1D1AC: 84, + 0x1D1AD: 84, + 0x1D242: 84, + 0x1D243: 84, + 0x1D244: 84, + 0x1DA00: 84, + 0x1DA01: 84, + 0x1DA02: 84, + 0x1DA03: 84, + 0x1DA04: 84, + 0x1DA05: 84, + 0x1DA06: 84, + 0x1DA07: 84, + 0x1DA08: 84, + 0x1DA09: 84, + 0x1DA0A: 84, + 0x1DA0B: 84, + 0x1DA0C: 84, + 0x1DA0D: 84, + 0x1DA0E: 84, + 0x1DA0F: 84, + 0x1DA10: 84, + 0x1DA11: 84, + 0x1DA12: 84, + 0x1DA13: 84, + 0x1DA14: 84, + 0x1DA15: 84, + 0x1DA16: 84, + 0x1DA17: 84, + 0x1DA18: 84, + 0x1DA19: 84, + 0x1DA1A: 84, + 0x1DA1B: 84, + 0x1DA1C: 84, + 0x1DA1D: 84, + 0x1DA1E: 84, + 0x1DA1F: 84, + 0x1DA20: 84, + 0x1DA21: 84, + 0x1DA22: 84, + 0x1DA23: 84, + 0x1DA24: 84, + 0x1DA25: 84, + 0x1DA26: 84, + 0x1DA27: 84, + 0x1DA28: 84, + 0x1DA29: 84, + 0x1DA2A: 84, + 0x1DA2B: 84, + 0x1DA2C: 84, + 0x1DA2D: 84, + 0x1DA2E: 84, + 0x1DA2F: 84, + 0x1DA30: 84, + 0x1DA31: 84, + 0x1DA32: 84, + 0x1DA33: 84, + 0x1DA34: 84, + 0x1DA35: 84, + 0x1DA36: 84, + 0x1DA3B: 84, + 0x1DA3C: 84, + 0x1DA3D: 84, + 0x1DA3E: 84, + 0x1DA3F: 84, + 0x1DA40: 84, + 0x1DA41: 84, + 0x1DA42: 84, + 0x1DA43: 84, + 0x1DA44: 84, + 0x1DA45: 84, + 0x1DA46: 84, + 0x1DA47: 84, + 0x1DA48: 84, + 0x1DA49: 84, + 0x1DA4A: 84, + 0x1DA4B: 84, + 0x1DA4C: 84, + 0x1DA4D: 84, + 0x1DA4E: 84, + 0x1DA4F: 84, + 0x1DA50: 84, + 0x1DA51: 84, + 0x1DA52: 84, + 0x1DA53: 84, + 0x1DA54: 84, + 0x1DA55: 84, + 0x1DA56: 84, + 0x1DA57: 84, + 0x1DA58: 84, + 0x1DA59: 84, + 0x1DA5A: 84, + 0x1DA5B: 84, + 0x1DA5C: 84, + 0x1DA5D: 84, + 0x1DA5E: 84, + 0x1DA5F: 84, + 0x1DA60: 84, + 0x1DA61: 84, + 0x1DA62: 84, + 0x1DA63: 84, + 0x1DA64: 84, + 0x1DA65: 84, + 0x1DA66: 84, + 0x1DA67: 84, + 0x1DA68: 84, + 0x1DA69: 84, + 0x1DA6A: 84, + 0x1DA6B: 84, + 0x1DA6C: 84, + 0x1DA75: 84, + 0x1DA84: 84, + 0x1DA9B: 84, + 0x1DA9C: 84, + 0x1DA9D: 84, + 0x1DA9E: 84, + 0x1DA9F: 84, + 0x1DAA1: 84, + 0x1DAA2: 84, + 0x1DAA3: 84, + 0x1DAA4: 84, + 0x1DAA5: 84, + 0x1DAA6: 84, + 0x1DAA7: 84, + 0x1DAA8: 84, + 0x1DAA9: 84, + 0x1DAAA: 84, + 0x1DAAB: 84, + 0x1DAAC: 84, + 0x1DAAD: 84, + 0x1DAAE: 84, + 0x1DAAF: 84, + 0x1E000: 84, + 0x1E001: 84, + 0x1E002: 84, + 0x1E003: 84, + 0x1E004: 84, + 0x1E005: 84, + 0x1E006: 84, + 0x1E008: 84, + 0x1E009: 84, + 0x1E00A: 84, + 0x1E00B: 84, + 0x1E00C: 84, + 0x1E00D: 84, + 0x1E00E: 84, + 0x1E00F: 84, + 0x1E010: 84, + 0x1E011: 84, + 0x1E012: 84, + 0x1E013: 84, + 0x1E014: 84, + 0x1E015: 84, + 0x1E016: 84, + 0x1E017: 84, + 0x1E018: 84, + 0x1E01B: 84, + 0x1E01C: 84, + 0x1E01D: 84, + 0x1E01E: 84, + 0x1E01F: 84, + 0x1E020: 84, + 0x1E021: 84, + 0x1E023: 84, + 0x1E024: 84, + 0x1E026: 84, + 0x1E027: 84, + 0x1E028: 84, + 0x1E029: 84, + 0x1E02A: 84, + 0x1E08F: 84, + 0x1E130: 84, + 0x1E131: 84, + 0x1E132: 84, + 0x1E133: 84, + 0x1E134: 84, + 0x1E135: 84, + 0x1E136: 84, + 0x1E2AE: 84, + 0x1E2EC: 84, + 0x1E2ED: 84, + 0x1E2EE: 84, + 0x1E2EF: 84, + 0x1E4EC: 84, + 0x1E4ED: 84, + 0x1E4EE: 84, + 0x1E4EF: 84, + 0x1E8D0: 84, + 0x1E8D1: 84, + 0x1E8D2: 84, + 0x1E8D3: 84, + 0x1E8D4: 84, + 0x1E8D5: 84, + 0x1E8D6: 84, + 0x1E900: 68, + 0x1E901: 68, + 0x1E902: 68, + 0x1E903: 68, + 0x1E904: 68, + 0x1E905: 68, + 0x1E906: 68, + 0x1E907: 68, + 0x1E908: 68, + 0x1E909: 68, + 0x1E90A: 68, + 0x1E90B: 68, + 0x1E90C: 68, + 0x1E90D: 68, + 0x1E90E: 68, + 0x1E90F: 68, + 0x1E910: 68, + 0x1E911: 68, + 0x1E912: 68, + 0x1E913: 68, + 0x1E914: 68, + 0x1E915: 68, + 0x1E916: 68, + 0x1E917: 68, + 0x1E918: 68, + 0x1E919: 68, + 0x1E91A: 68, + 0x1E91B: 68, + 0x1E91C: 68, + 0x1E91D: 68, + 0x1E91E: 68, + 0x1E91F: 68, + 0x1E920: 68, + 0x1E921: 68, + 0x1E922: 68, + 0x1E923: 68, + 0x1E924: 68, + 0x1E925: 68, + 0x1E926: 68, + 0x1E927: 68, + 0x1E928: 68, + 0x1E929: 68, + 0x1E92A: 68, + 0x1E92B: 68, + 0x1E92C: 68, + 0x1E92D: 68, + 0x1E92E: 68, + 0x1E92F: 68, + 0x1E930: 68, + 0x1E931: 68, + 0x1E932: 68, + 0x1E933: 68, + 0x1E934: 68, + 0x1E935: 68, + 0x1E936: 68, + 0x1E937: 68, + 0x1E938: 68, + 0x1E939: 68, + 0x1E93A: 68, + 0x1E93B: 68, + 0x1E93C: 68, + 0x1E93D: 68, + 0x1E93E: 68, + 0x1E93F: 68, + 0x1E940: 68, + 0x1E941: 68, + 0x1E942: 68, + 0x1E943: 68, + 0x1E944: 84, + 0x1E945: 84, + 0x1E946: 84, + 0x1E947: 84, + 0x1E948: 84, + 0x1E949: 84, + 0x1E94A: 84, + 0x1E94B: 84, + 0xE0001: 84, + 0xE0020: 84, + 0xE0021: 84, + 0xE0022: 84, + 0xE0023: 84, + 0xE0024: 84, + 0xE0025: 84, + 0xE0026: 84, + 0xE0027: 84, + 0xE0028: 84, + 0xE0029: 84, + 0xE002A: 84, + 0xE002B: 84, + 0xE002C: 84, + 0xE002D: 84, + 0xE002E: 84, + 0xE002F: 84, + 0xE0030: 84, + 0xE0031: 84, + 0xE0032: 84, + 0xE0033: 84, + 0xE0034: 84, + 0xE0035: 84, + 0xE0036: 84, + 0xE0037: 84, + 0xE0038: 84, + 0xE0039: 84, + 0xE003A: 84, + 0xE003B: 84, + 0xE003C: 84, + 0xE003D: 84, + 0xE003E: 84, + 0xE003F: 84, + 0xE0040: 84, + 0xE0041: 84, + 0xE0042: 84, + 0xE0043: 84, + 0xE0044: 84, + 0xE0045: 84, + 0xE0046: 84, + 0xE0047: 84, + 0xE0048: 84, + 0xE0049: 84, + 0xE004A: 84, + 0xE004B: 84, + 0xE004C: 84, + 0xE004D: 84, + 0xE004E: 84, + 0xE004F: 84, + 0xE0050: 84, + 0xE0051: 84, + 0xE0052: 84, + 0xE0053: 84, + 0xE0054: 84, + 0xE0055: 84, + 0xE0056: 84, + 0xE0057: 84, + 0xE0058: 84, + 0xE0059: 84, + 0xE005A: 84, + 0xE005B: 84, + 0xE005C: 84, + 0xE005D: 84, + 0xE005E: 84, + 0xE005F: 84, + 0xE0060: 84, + 0xE0061: 84, + 0xE0062: 84, + 0xE0063: 84, + 0xE0064: 84, + 0xE0065: 84, + 0xE0066: 84, + 0xE0067: 84, + 0xE0068: 84, + 0xE0069: 84, + 0xE006A: 84, + 0xE006B: 84, + 0xE006C: 84, + 0xE006D: 84, + 0xE006E: 84, + 0xE006F: 84, + 0xE0070: 84, + 0xE0071: 84, + 0xE0072: 84, + 0xE0073: 84, + 0xE0074: 84, + 0xE0075: 84, + 0xE0076: 84, + 0xE0077: 84, + 0xE0078: 84, + 0xE0079: 84, + 0xE007A: 84, + 0xE007B: 84, + 0xE007C: 84, + 0xE007D: 84, + 0xE007E: 84, + 0xE007F: 84, + 0xE0100: 84, + 0xE0101: 84, + 0xE0102: 84, + 0xE0103: 84, + 0xE0104: 84, + 0xE0105: 84, + 0xE0106: 84, + 0xE0107: 84, + 0xE0108: 84, + 0xE0109: 84, + 0xE010A: 84, + 0xE010B: 84, + 0xE010C: 84, + 0xE010D: 84, + 0xE010E: 84, + 0xE010F: 84, + 0xE0110: 84, + 0xE0111: 84, + 0xE0112: 84, + 0xE0113: 84, + 0xE0114: 84, + 0xE0115: 84, + 0xE0116: 84, + 0xE0117: 84, + 0xE0118: 84, + 0xE0119: 84, + 0xE011A: 84, + 0xE011B: 84, + 0xE011C: 84, + 0xE011D: 84, + 0xE011E: 84, + 0xE011F: 84, + 0xE0120: 84, + 0xE0121: 84, + 0xE0122: 84, + 0xE0123: 84, + 0xE0124: 84, + 0xE0125: 84, + 0xE0126: 84, + 0xE0127: 84, + 0xE0128: 84, + 0xE0129: 84, + 0xE012A: 84, + 0xE012B: 84, + 0xE012C: 84, + 0xE012D: 84, + 0xE012E: 84, + 0xE012F: 84, + 0xE0130: 84, + 0xE0131: 84, + 0xE0132: 84, + 0xE0133: 84, + 0xE0134: 84, + 0xE0135: 84, + 0xE0136: 84, + 0xE0137: 84, + 0xE0138: 84, + 0xE0139: 84, + 0xE013A: 84, + 0xE013B: 84, + 0xE013C: 84, + 0xE013D: 84, + 0xE013E: 84, + 0xE013F: 84, + 0xE0140: 84, + 0xE0141: 84, + 0xE0142: 84, + 0xE0143: 84, + 0xE0144: 84, + 0xE0145: 84, + 0xE0146: 84, + 0xE0147: 84, + 0xE0148: 84, + 0xE0149: 84, + 0xE014A: 84, + 0xE014B: 84, + 0xE014C: 84, + 0xE014D: 84, + 0xE014E: 84, + 0xE014F: 84, + 0xE0150: 84, + 0xE0151: 84, + 0xE0152: 84, + 0xE0153: 84, + 0xE0154: 84, + 0xE0155: 84, + 0xE0156: 84, + 0xE0157: 84, + 0xE0158: 84, + 0xE0159: 84, + 0xE015A: 84, + 0xE015B: 84, + 0xE015C: 84, + 0xE015D: 84, + 0xE015E: 84, + 0xE015F: 84, + 0xE0160: 84, + 0xE0161: 84, + 0xE0162: 84, + 0xE0163: 84, + 0xE0164: 84, + 0xE0165: 84, + 0xE0166: 84, + 0xE0167: 84, + 0xE0168: 84, + 0xE0169: 84, + 0xE016A: 84, + 0xE016B: 84, + 0xE016C: 84, + 0xE016D: 84, + 0xE016E: 84, + 0xE016F: 84, + 0xE0170: 84, + 0xE0171: 84, + 0xE0172: 84, + 0xE0173: 84, + 0xE0174: 84, + 0xE0175: 84, + 0xE0176: 84, + 0xE0177: 84, + 0xE0178: 84, + 0xE0179: 84, + 0xE017A: 84, + 0xE017B: 84, + 0xE017C: 84, + 0xE017D: 84, + 0xE017E: 84, + 0xE017F: 84, + 0xE0180: 84, + 0xE0181: 84, + 0xE0182: 84, + 0xE0183: 84, + 0xE0184: 84, + 0xE0185: 84, + 0xE0186: 84, + 0xE0187: 84, + 0xE0188: 84, + 0xE0189: 84, + 0xE018A: 84, + 0xE018B: 84, + 0xE018C: 84, + 0xE018D: 84, + 0xE018E: 84, + 0xE018F: 84, + 0xE0190: 84, + 0xE0191: 84, + 0xE0192: 84, + 0xE0193: 84, + 0xE0194: 84, + 0xE0195: 84, + 0xE0196: 84, + 0xE0197: 84, + 0xE0198: 84, + 0xE0199: 84, + 0xE019A: 84, + 0xE019B: 84, + 0xE019C: 84, + 0xE019D: 84, + 0xE019E: 84, + 0xE019F: 84, + 0xE01A0: 84, + 0xE01A1: 84, + 0xE01A2: 84, + 0xE01A3: 84, + 0xE01A4: 84, + 0xE01A5: 84, + 0xE01A6: 84, + 0xE01A7: 84, + 0xE01A8: 84, + 0xE01A9: 84, + 0xE01AA: 84, + 0xE01AB: 84, + 0xE01AC: 84, + 0xE01AD: 84, + 0xE01AE: 84, + 0xE01AF: 84, + 0xE01B0: 84, + 0xE01B1: 84, + 0xE01B2: 84, + 0xE01B3: 84, + 0xE01B4: 84, + 0xE01B5: 84, + 0xE01B6: 84, + 0xE01B7: 84, + 0xE01B8: 84, + 0xE01B9: 84, + 0xE01BA: 84, + 0xE01BB: 84, + 0xE01BC: 84, + 0xE01BD: 84, + 0xE01BE: 84, + 0xE01BF: 84, + 0xE01C0: 84, + 0xE01C1: 84, + 0xE01C2: 84, + 0xE01C3: 84, + 0xE01C4: 84, + 0xE01C5: 84, + 0xE01C6: 84, + 0xE01C7: 84, + 0xE01C8: 84, + 0xE01C9: 84, + 0xE01CA: 84, + 0xE01CB: 84, + 0xE01CC: 84, + 0xE01CD: 84, + 0xE01CE: 84, + 0xE01CF: 84, + 0xE01D0: 84, + 0xE01D1: 84, + 0xE01D2: 84, + 0xE01D3: 84, + 0xE01D4: 84, + 0xE01D5: 84, + 0xE01D6: 84, + 0xE01D7: 84, + 0xE01D8: 84, + 0xE01D9: 84, + 0xE01DA: 84, + 0xE01DB: 84, + 0xE01DC: 84, + 0xE01DD: 84, + 0xE01DE: 84, + 0xE01DF: 84, + 0xE01E0: 84, + 0xE01E1: 84, + 0xE01E2: 84, + 0xE01E3: 84, + 0xE01E4: 84, + 0xE01E5: 84, + 0xE01E6: 84, + 0xE01E7: 84, + 0xE01E8: 84, + 0xE01E9: 84, + 0xE01EA: 84, + 0xE01EB: 84, + 0xE01EC: 84, + 0xE01ED: 84, + 0xE01EE: 84, + 0xE01EF: 84, +} +codepoint_classes = { + "PVALID": ( + 0x2D0000002E, + 0x300000003A, + 0x610000007B, + 0xDF000000F7, + 0xF800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010A, + 0x10B0000010C, + 0x10D0000010E, + 0x10F00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011A, + 0x11B0000011C, + 0x11D0000011E, + 0x11F00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012A, + 0x12B0000012C, + 0x12D0000012E, + 0x12F00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13A0000013B, + 0x13C0000013D, + 0x13E0000013F, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14B0000014C, + 0x14D0000014E, + 0x14F00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015A, + 0x15B0000015C, + 0x15D0000015E, + 0x15F00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016A, + 0x16B0000016C, + 0x16D0000016E, + 0x16F00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17A0000017B, + 0x17C0000017D, + 0x17E0000017F, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18C0000018E, + 0x19200000193, + 0x19500000196, + 0x1990000019C, + 0x19E0000019F, + 0x1A1000001A2, + 0x1A3000001A4, + 0x1A5000001A6, + 0x1A8000001A9, + 0x1AA000001AC, + 0x1AD000001AE, + 0x1B0000001B1, + 0x1B4000001B5, + 0x1B6000001B7, + 0x1B9000001BC, + 0x1BD000001C4, + 0x1CE000001CF, + 0x1D0000001D1, + 0x1D2000001D3, + 0x1D4000001D5, + 0x1D6000001D7, + 0x1D8000001D9, + 0x1DA000001DB, + 0x1DC000001DE, + 0x1DF000001E0, + 0x1E1000001E2, + 0x1E3000001E4, + 0x1E5000001E6, + 0x1E7000001E8, + 0x1E9000001EA, + 0x1EB000001EC, + 0x1ED000001EE, + 0x1EF000001F1, + 0x1F5000001F6, + 0x1F9000001FA, + 0x1FB000001FC, + 0x1FD000001FE, + 0x1FF00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020A, + 0x20B0000020C, + 0x20D0000020E, + 0x20F00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021A, + 0x21B0000021C, + 0x21D0000021E, + 0x21F00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022A, + 0x22B0000022C, + 0x22D0000022E, + 0x22F00000230, + 0x23100000232, + 0x2330000023A, + 0x23C0000023D, + 0x23F00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024A, + 0x24B0000024C, + 0x24D0000024E, + 0x24F000002B0, + 0x2B9000002C2, + 0x2C6000002D2, + 0x2EC000002ED, + 0x2EE000002EF, + 0x30000000340, + 0x34200000343, + 0x3460000034F, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37B0000037E, + 0x39000000391, + 0x3AC000003CF, + 0x3D7000003D8, + 0x3D9000003DA, + 0x3DB000003DC, + 0x3DD000003DE, + 0x3DF000003E0, + 0x3E1000003E2, + 0x3E3000003E4, + 0x3E5000003E6, + 0x3E7000003E8, + 0x3E9000003EA, + 0x3EB000003EC, + 0x3ED000003EE, + 0x3EF000003F0, + 0x3F3000003F4, + 0x3F8000003F9, + 0x3FB000003FD, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046A, + 0x46B0000046C, + 0x46D0000046E, + 0x46F00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047A, + 0x47B0000047C, + 0x47D0000047E, + 0x47F00000480, + 0x48100000482, + 0x48300000488, + 0x48B0000048C, + 0x48D0000048E, + 0x48F00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049A, + 0x49B0000049C, + 0x49D0000049E, + 0x49F000004A0, + 0x4A1000004A2, + 0x4A3000004A4, + 0x4A5000004A6, + 0x4A7000004A8, + 0x4A9000004AA, + 0x4AB000004AC, + 0x4AD000004AE, + 0x4AF000004B0, + 0x4B1000004B2, + 0x4B3000004B4, + 0x4B5000004B6, + 0x4B7000004B8, + 0x4B9000004BA, + 0x4BB000004BC, + 0x4BD000004BE, + 0x4BF000004C0, + 0x4C2000004C3, + 0x4C4000004C5, + 0x4C6000004C7, + 0x4C8000004C9, + 0x4CA000004CB, + 0x4CC000004CD, + 0x4CE000004D0, + 0x4D1000004D2, + 0x4D3000004D4, + 0x4D5000004D6, + 0x4D7000004D8, + 0x4D9000004DA, + 0x4DB000004DC, + 0x4DD000004DE, + 0x4DF000004E0, + 0x4E1000004E2, + 0x4E3000004E4, + 0x4E5000004E6, + 0x4E7000004E8, + 0x4E9000004EA, + 0x4EB000004EC, + 0x4ED000004EE, + 0x4EF000004F0, + 0x4F1000004F2, + 0x4F3000004F4, + 0x4F5000004F6, + 0x4F7000004F8, + 0x4F9000004FA, + 0x4FB000004FC, + 0x4FD000004FE, + 0x4FF00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050A, + 0x50B0000050C, + 0x50D0000050E, + 0x50F00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051A, + 0x51B0000051C, + 0x51D0000051E, + 0x51F00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052A, + 0x52B0000052C, + 0x52D0000052E, + 0x52F00000530, + 0x5590000055A, + 0x56000000587, + 0x58800000589, + 0x591000005BE, + 0x5BF000005C0, + 0x5C1000005C3, + 0x5C4000005C6, + 0x5C7000005C8, + 0x5D0000005EB, + 0x5EF000005F3, + 0x6100000061B, + 0x62000000640, + 0x64100000660, + 0x66E00000675, + 0x679000006D4, + 0x6D5000006DD, + 0x6DF000006E9, + 0x6EA000006F0, + 0x6FA00000700, + 0x7100000074B, + 0x74D000007B2, + 0x7C0000007F6, + 0x7FD000007FE, + 0x8000000082E, + 0x8400000085C, + 0x8600000086B, + 0x87000000888, + 0x8890000088F, + 0x898000008E2, + 0x8E300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098D, + 0x98F00000991, + 0x993000009A9, + 0x9AA000009B1, + 0x9B2000009B3, + 0x9B6000009BA, + 0x9BC000009C5, + 0x9C7000009C9, + 0x9CB000009CF, + 0x9D7000009D8, + 0x9E0000009E4, + 0x9E6000009F2, + 0x9FC000009FD, + 0x9FE000009FF, + 0xA0100000A04, + 0xA0500000A0B, + 0xA0F00000A11, + 0xA1300000A29, + 0xA2A00000A31, + 0xA3200000A33, + 0xA3500000A36, + 0xA3800000A3A, + 0xA3C00000A3D, + 0xA3E00000A43, + 0xA4700000A49, + 0xA4B00000A4E, + 0xA5100000A52, + 0xA5C00000A5D, + 0xA6600000A76, + 0xA8100000A84, + 0xA8500000A8E, + 0xA8F00000A92, + 0xA9300000AA9, + 0xAAA00000AB1, + 0xAB200000AB4, + 0xAB500000ABA, + 0xABC00000AC6, + 0xAC700000ACA, + 0xACB00000ACE, + 0xAD000000AD1, + 0xAE000000AE4, + 0xAE600000AF0, + 0xAF900000B00, + 0xB0100000B04, + 0xB0500000B0D, + 0xB0F00000B11, + 0xB1300000B29, + 0xB2A00000B31, + 0xB3200000B34, + 0xB3500000B3A, + 0xB3C00000B45, + 0xB4700000B49, + 0xB4B00000B4E, + 0xB5500000B58, + 0xB5F00000B64, + 0xB6600000B70, + 0xB7100000B72, + 0xB8200000B84, + 0xB8500000B8B, + 0xB8E00000B91, + 0xB9200000B96, + 0xB9900000B9B, + 0xB9C00000B9D, + 0xB9E00000BA0, + 0xBA300000BA5, + 0xBA800000BAB, + 0xBAE00000BBA, + 0xBBE00000BC3, + 0xBC600000BC9, + 0xBCA00000BCE, + 0xBD000000BD1, + 0xBD700000BD8, + 0xBE600000BF0, + 0xC0000000C0D, + 0xC0E00000C11, + 0xC1200000C29, + 0xC2A00000C3A, + 0xC3C00000C45, + 0xC4600000C49, + 0xC4A00000C4E, + 0xC5500000C57, + 0xC5800000C5B, + 0xC5D00000C5E, + 0xC6000000C64, + 0xC6600000C70, + 0xC8000000C84, + 0xC8500000C8D, + 0xC8E00000C91, + 0xC9200000CA9, + 0xCAA00000CB4, + 0xCB500000CBA, + 0xCBC00000CC5, + 0xCC600000CC9, + 0xCCA00000CCE, + 0xCD500000CD7, + 0xCDD00000CDF, + 0xCE000000CE4, + 0xCE600000CF0, + 0xCF100000CF4, + 0xD0000000D0D, + 0xD0E00000D11, + 0xD1200000D45, + 0xD4600000D49, + 0xD4A00000D4F, + 0xD5400000D58, + 0xD5F00000D64, + 0xD6600000D70, + 0xD7A00000D80, + 0xD8100000D84, + 0xD8500000D97, + 0xD9A00000DB2, + 0xDB300000DBC, + 0xDBD00000DBE, + 0xDC000000DC7, + 0xDCA00000DCB, + 0xDCF00000DD5, + 0xDD600000DD7, + 0xDD800000DE0, + 0xDE600000DF0, + 0xDF200000DF4, + 0xE0100000E33, + 0xE3400000E3B, + 0xE4000000E4F, + 0xE5000000E5A, + 0xE8100000E83, + 0xE8400000E85, + 0xE8600000E8B, + 0xE8C00000EA4, + 0xEA500000EA6, + 0xEA700000EB3, + 0xEB400000EBE, + 0xEC000000EC5, + 0xEC600000EC7, + 0xEC800000ECF, + 0xED000000EDA, + 0xEDE00000EE0, + 0xF0000000F01, + 0xF0B00000F0C, + 0xF1800000F1A, + 0xF2000000F2A, + 0xF3500000F36, + 0xF3700000F38, + 0xF3900000F3A, + 0xF3E00000F43, + 0xF4400000F48, + 0xF4900000F4D, + 0xF4E00000F52, + 0xF5300000F57, + 0xF5800000F5C, + 0xF5D00000F69, + 0xF6A00000F6D, + 0xF7100000F73, + 0xF7400000F75, + 0xF7A00000F81, + 0xF8200000F85, + 0xF8600000F93, + 0xF9400000F98, + 0xF9900000F9D, + 0xF9E00000FA2, + 0xFA300000FA7, + 0xFA800000FAC, + 0xFAD00000FB9, + 0xFBA00000FBD, + 0xFC600000FC7, + 0x10000000104A, + 0x10500000109E, + 0x10D0000010FB, + 0x10FD00001100, + 0x120000001249, + 0x124A0000124E, + 0x125000001257, + 0x125800001259, + 0x125A0000125E, + 0x126000001289, + 0x128A0000128E, + 0x1290000012B1, + 0x12B2000012B6, + 0x12B8000012BF, + 0x12C0000012C1, + 0x12C2000012C6, + 0x12C8000012D7, + 0x12D800001311, + 0x131200001316, + 0x13180000135B, + 0x135D00001360, + 0x138000001390, + 0x13A0000013F6, + 0x14010000166D, + 0x166F00001680, + 0x16810000169B, + 0x16A0000016EB, + 0x16F1000016F9, + 0x170000001716, + 0x171F00001735, + 0x174000001754, + 0x17600000176D, + 0x176E00001771, + 0x177200001774, + 0x1780000017B4, + 0x17B6000017D4, + 0x17D7000017D8, + 0x17DC000017DE, + 0x17E0000017EA, + 0x18100000181A, + 0x182000001879, + 0x1880000018AB, + 0x18B0000018F6, + 0x19000000191F, + 0x19200000192C, + 0x19300000193C, + 0x19460000196E, + 0x197000001975, + 0x1980000019AC, + 0x19B0000019CA, + 0x19D0000019DA, + 0x1A0000001A1C, + 0x1A2000001A5F, + 0x1A6000001A7D, + 0x1A7F00001A8A, + 0x1A9000001A9A, + 0x1AA700001AA8, + 0x1AB000001ABE, + 0x1ABF00001ACF, + 0x1B0000001B4D, + 0x1B5000001B5A, + 0x1B6B00001B74, + 0x1B8000001BF4, + 0x1C0000001C38, + 0x1C4000001C4A, + 0x1C4D00001C7E, + 0x1CD000001CD3, + 0x1CD400001CFB, + 0x1D0000001D2C, + 0x1D2F00001D30, + 0x1D3B00001D3C, + 0x1D4E00001D4F, + 0x1D6B00001D78, + 0x1D7900001D9B, + 0x1DC000001E00, + 0x1E0100001E02, + 0x1E0300001E04, + 0x1E0500001E06, + 0x1E0700001E08, + 0x1E0900001E0A, + 0x1E0B00001E0C, + 0x1E0D00001E0E, + 0x1E0F00001E10, + 0x1E1100001E12, + 0x1E1300001E14, + 0x1E1500001E16, + 0x1E1700001E18, + 0x1E1900001E1A, + 0x1E1B00001E1C, + 0x1E1D00001E1E, + 0x1E1F00001E20, + 0x1E2100001E22, + 0x1E2300001E24, + 0x1E2500001E26, + 0x1E2700001E28, + 0x1E2900001E2A, + 0x1E2B00001E2C, + 0x1E2D00001E2E, + 0x1E2F00001E30, + 0x1E3100001E32, + 0x1E3300001E34, + 0x1E3500001E36, + 0x1E3700001E38, + 0x1E3900001E3A, + 0x1E3B00001E3C, + 0x1E3D00001E3E, + 0x1E3F00001E40, + 0x1E4100001E42, + 0x1E4300001E44, + 0x1E4500001E46, + 0x1E4700001E48, + 0x1E4900001E4A, + 0x1E4B00001E4C, + 0x1E4D00001E4E, + 0x1E4F00001E50, + 0x1E5100001E52, + 0x1E5300001E54, + 0x1E5500001E56, + 0x1E5700001E58, + 0x1E5900001E5A, + 0x1E5B00001E5C, + 0x1E5D00001E5E, + 0x1E5F00001E60, + 0x1E6100001E62, + 0x1E6300001E64, + 0x1E6500001E66, + 0x1E6700001E68, + 0x1E6900001E6A, + 0x1E6B00001E6C, + 0x1E6D00001E6E, + 0x1E6F00001E70, + 0x1E7100001E72, + 0x1E7300001E74, + 0x1E7500001E76, + 0x1E7700001E78, + 0x1E7900001E7A, + 0x1E7B00001E7C, + 0x1E7D00001E7E, + 0x1E7F00001E80, + 0x1E8100001E82, + 0x1E8300001E84, + 0x1E8500001E86, + 0x1E8700001E88, + 0x1E8900001E8A, + 0x1E8B00001E8C, + 0x1E8D00001E8E, + 0x1E8F00001E90, + 0x1E9100001E92, + 0x1E9300001E94, + 0x1E9500001E9A, + 0x1E9C00001E9E, + 0x1E9F00001EA0, + 0x1EA100001EA2, + 0x1EA300001EA4, + 0x1EA500001EA6, + 0x1EA700001EA8, + 0x1EA900001EAA, + 0x1EAB00001EAC, + 0x1EAD00001EAE, + 0x1EAF00001EB0, + 0x1EB100001EB2, + 0x1EB300001EB4, + 0x1EB500001EB6, + 0x1EB700001EB8, + 0x1EB900001EBA, + 0x1EBB00001EBC, + 0x1EBD00001EBE, + 0x1EBF00001EC0, + 0x1EC100001EC2, + 0x1EC300001EC4, + 0x1EC500001EC6, + 0x1EC700001EC8, + 0x1EC900001ECA, + 0x1ECB00001ECC, + 0x1ECD00001ECE, + 0x1ECF00001ED0, + 0x1ED100001ED2, + 0x1ED300001ED4, + 0x1ED500001ED6, + 0x1ED700001ED8, + 0x1ED900001EDA, + 0x1EDB00001EDC, + 0x1EDD00001EDE, + 0x1EDF00001EE0, + 0x1EE100001EE2, + 0x1EE300001EE4, + 0x1EE500001EE6, + 0x1EE700001EE8, + 0x1EE900001EEA, + 0x1EEB00001EEC, + 0x1EED00001EEE, + 0x1EEF00001EF0, + 0x1EF100001EF2, + 0x1EF300001EF4, + 0x1EF500001EF6, + 0x1EF700001EF8, + 0x1EF900001EFA, + 0x1EFB00001EFC, + 0x1EFD00001EFE, + 0x1EFF00001F08, + 0x1F1000001F16, + 0x1F2000001F28, + 0x1F3000001F38, + 0x1F4000001F46, + 0x1F5000001F58, + 0x1F6000001F68, + 0x1F7000001F71, + 0x1F7200001F73, + 0x1F7400001F75, + 0x1F7600001F77, + 0x1F7800001F79, + 0x1F7A00001F7B, + 0x1F7C00001F7D, + 0x1FB000001FB2, + 0x1FB600001FB7, + 0x1FC600001FC7, + 0x1FD000001FD3, + 0x1FD600001FD8, + 0x1FE000001FE3, + 0x1FE400001FE8, + 0x1FF600001FF7, + 0x214E0000214F, + 0x218400002185, + 0x2C3000002C60, + 0x2C6100002C62, + 0x2C6500002C67, + 0x2C6800002C69, + 0x2C6A00002C6B, + 0x2C6C00002C6D, + 0x2C7100002C72, + 0x2C7300002C75, + 0x2C7600002C7C, + 0x2C8100002C82, + 0x2C8300002C84, + 0x2C8500002C86, + 0x2C8700002C88, + 0x2C8900002C8A, + 0x2C8B00002C8C, + 0x2C8D00002C8E, + 0x2C8F00002C90, + 0x2C9100002C92, + 0x2C9300002C94, + 0x2C9500002C96, + 0x2C9700002C98, + 0x2C9900002C9A, + 0x2C9B00002C9C, + 0x2C9D00002C9E, + 0x2C9F00002CA0, + 0x2CA100002CA2, + 0x2CA300002CA4, + 0x2CA500002CA6, + 0x2CA700002CA8, + 0x2CA900002CAA, + 0x2CAB00002CAC, + 0x2CAD00002CAE, + 0x2CAF00002CB0, + 0x2CB100002CB2, + 0x2CB300002CB4, + 0x2CB500002CB6, + 0x2CB700002CB8, + 0x2CB900002CBA, + 0x2CBB00002CBC, + 0x2CBD00002CBE, + 0x2CBF00002CC0, + 0x2CC100002CC2, + 0x2CC300002CC4, + 0x2CC500002CC6, + 0x2CC700002CC8, + 0x2CC900002CCA, + 0x2CCB00002CCC, + 0x2CCD00002CCE, + 0x2CCF00002CD0, + 0x2CD100002CD2, + 0x2CD300002CD4, + 0x2CD500002CD6, + 0x2CD700002CD8, + 0x2CD900002CDA, + 0x2CDB00002CDC, + 0x2CDD00002CDE, + 0x2CDF00002CE0, + 0x2CE100002CE2, + 0x2CE300002CE5, + 0x2CEC00002CED, + 0x2CEE00002CF2, + 0x2CF300002CF4, + 0x2D0000002D26, + 0x2D2700002D28, + 0x2D2D00002D2E, + 0x2D3000002D68, + 0x2D7F00002D97, + 0x2DA000002DA7, + 0x2DA800002DAF, + 0x2DB000002DB7, + 0x2DB800002DBF, + 0x2DC000002DC7, + 0x2DC800002DCF, + 0x2DD000002DD7, + 0x2DD800002DDF, + 0x2DE000002E00, + 0x2E2F00002E30, + 0x300500003008, + 0x302A0000302E, + 0x303C0000303D, + 0x304100003097, + 0x30990000309B, + 0x309D0000309F, + 0x30A1000030FB, + 0x30FC000030FF, + 0x310500003130, + 0x31A0000031C0, + 0x31F000003200, + 0x340000004DC0, + 0x4E000000A48D, + 0xA4D00000A4FE, + 0xA5000000A60D, + 0xA6100000A62C, + 0xA6410000A642, + 0xA6430000A644, + 0xA6450000A646, + 0xA6470000A648, + 0xA6490000A64A, + 0xA64B0000A64C, + 0xA64D0000A64E, + 0xA64F0000A650, + 0xA6510000A652, + 0xA6530000A654, + 0xA6550000A656, + 0xA6570000A658, + 0xA6590000A65A, + 0xA65B0000A65C, + 0xA65D0000A65E, + 0xA65F0000A660, + 0xA6610000A662, + 0xA6630000A664, + 0xA6650000A666, + 0xA6670000A668, + 0xA6690000A66A, + 0xA66B0000A66C, + 0xA66D0000A670, + 0xA6740000A67E, + 0xA67F0000A680, + 0xA6810000A682, + 0xA6830000A684, + 0xA6850000A686, + 0xA6870000A688, + 0xA6890000A68A, + 0xA68B0000A68C, + 0xA68D0000A68E, + 0xA68F0000A690, + 0xA6910000A692, + 0xA6930000A694, + 0xA6950000A696, + 0xA6970000A698, + 0xA6990000A69A, + 0xA69B0000A69C, + 0xA69E0000A6E6, + 0xA6F00000A6F2, + 0xA7170000A720, + 0xA7230000A724, + 0xA7250000A726, + 0xA7270000A728, + 0xA7290000A72A, + 0xA72B0000A72C, + 0xA72D0000A72E, + 0xA72F0000A732, + 0xA7330000A734, + 0xA7350000A736, + 0xA7370000A738, + 0xA7390000A73A, + 0xA73B0000A73C, + 0xA73D0000A73E, + 0xA73F0000A740, + 0xA7410000A742, + 0xA7430000A744, + 0xA7450000A746, + 0xA7470000A748, + 0xA7490000A74A, + 0xA74B0000A74C, + 0xA74D0000A74E, + 0xA74F0000A750, + 0xA7510000A752, + 0xA7530000A754, + 0xA7550000A756, + 0xA7570000A758, + 0xA7590000A75A, + 0xA75B0000A75C, + 0xA75D0000A75E, + 0xA75F0000A760, + 0xA7610000A762, + 0xA7630000A764, + 0xA7650000A766, + 0xA7670000A768, + 0xA7690000A76A, + 0xA76B0000A76C, + 0xA76D0000A76E, + 0xA76F0000A770, + 0xA7710000A779, + 0xA77A0000A77B, + 0xA77C0000A77D, + 0xA77F0000A780, + 0xA7810000A782, + 0xA7830000A784, + 0xA7850000A786, + 0xA7870000A789, + 0xA78C0000A78D, + 0xA78E0000A790, + 0xA7910000A792, + 0xA7930000A796, + 0xA7970000A798, + 0xA7990000A79A, + 0xA79B0000A79C, + 0xA79D0000A79E, + 0xA79F0000A7A0, + 0xA7A10000A7A2, + 0xA7A30000A7A4, + 0xA7A50000A7A6, + 0xA7A70000A7A8, + 0xA7A90000A7AA, + 0xA7AF0000A7B0, + 0xA7B50000A7B6, + 0xA7B70000A7B8, + 0xA7B90000A7BA, + 0xA7BB0000A7BC, + 0xA7BD0000A7BE, + 0xA7BF0000A7C0, + 0xA7C10000A7C2, + 0xA7C30000A7C4, + 0xA7C80000A7C9, + 0xA7CA0000A7CB, + 0xA7D10000A7D2, + 0xA7D30000A7D4, + 0xA7D50000A7D6, + 0xA7D70000A7D8, + 0xA7D90000A7DA, + 0xA7F60000A7F8, + 0xA7FA0000A828, + 0xA82C0000A82D, + 0xA8400000A874, + 0xA8800000A8C6, + 0xA8D00000A8DA, + 0xA8E00000A8F8, + 0xA8FB0000A8FC, + 0xA8FD0000A92E, + 0xA9300000A954, + 0xA9800000A9C1, + 0xA9CF0000A9DA, + 0xA9E00000A9FF, + 0xAA000000AA37, + 0xAA400000AA4E, + 0xAA500000AA5A, + 0xAA600000AA77, + 0xAA7A0000AAC3, + 0xAADB0000AADE, + 0xAAE00000AAF0, + 0xAAF20000AAF7, + 0xAB010000AB07, + 0xAB090000AB0F, + 0xAB110000AB17, + 0xAB200000AB27, + 0xAB280000AB2F, + 0xAB300000AB5B, + 0xAB600000AB69, + 0xABC00000ABEB, + 0xABEC0000ABEE, + 0xABF00000ABFA, + 0xAC000000D7A4, + 0xFA0E0000FA10, + 0xFA110000FA12, + 0xFA130000FA15, + 0xFA1F0000FA20, + 0xFA210000FA22, + 0xFA230000FA25, + 0xFA270000FA2A, + 0xFB1E0000FB1F, + 0xFE200000FE30, + 0xFE730000FE74, + 0x100000001000C, + 0x1000D00010027, + 0x100280001003B, + 0x1003C0001003E, + 0x1003F0001004E, + 0x100500001005E, + 0x10080000100FB, + 0x101FD000101FE, + 0x102800001029D, + 0x102A0000102D1, + 0x102E0000102E1, + 0x1030000010320, + 0x1032D00010341, + 0x103420001034A, + 0x103500001037B, + 0x103800001039E, + 0x103A0000103C4, + 0x103C8000103D0, + 0x104280001049E, + 0x104A0000104AA, + 0x104D8000104FC, + 0x1050000010528, + 0x1053000010564, + 0x10597000105A2, + 0x105A3000105B2, + 0x105B3000105BA, + 0x105BB000105BD, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010781, + 0x1080000010806, + 0x1080800010809, + 0x1080A00010836, + 0x1083700010839, + 0x1083C0001083D, + 0x1083F00010856, + 0x1086000010877, + 0x108800001089F, + 0x108E0000108F3, + 0x108F4000108F6, + 0x1090000010916, + 0x109200001093A, + 0x10980000109B8, + 0x109BE000109C0, + 0x10A0000010A04, + 0x10A0500010A07, + 0x10A0C00010A14, + 0x10A1500010A18, + 0x10A1900010A36, + 0x10A3800010A3B, + 0x10A3F00010A40, + 0x10A6000010A7D, + 0x10A8000010A9D, + 0x10AC000010AC8, + 0x10AC900010AE7, + 0x10B0000010B36, + 0x10B4000010B56, + 0x10B6000010B73, + 0x10B8000010B92, + 0x10C0000010C49, + 0x10CC000010CF3, + 0x10D0000010D28, + 0x10D3000010D3A, + 0x10E8000010EAA, + 0x10EAB00010EAD, + 0x10EB000010EB2, + 0x10EFD00010F1D, + 0x10F2700010F28, + 0x10F3000010F51, + 0x10F7000010F86, + 0x10FB000010FC5, + 0x10FE000010FF7, + 0x1100000011047, + 0x1106600011076, + 0x1107F000110BB, + 0x110C2000110C3, + 0x110D0000110E9, + 0x110F0000110FA, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111C5, + 0x111C9000111CD, + 0x111CE000111DB, + 0x111DC000111DD, + 0x1120000011212, + 0x1121300011238, + 0x1123E00011242, + 0x1128000011287, + 0x1128800011289, + 0x1128A0001128E, + 0x1128F0001129E, + 0x1129F000112A9, + 0x112B0000112EB, + 0x112F0000112FA, + 0x1130000011304, + 0x113050001130D, + 0x1130F00011311, + 0x1131300011329, + 0x1132A00011331, + 0x1133200011334, + 0x113350001133A, + 0x1133B00011345, + 0x1134700011349, + 0x1134B0001134E, + 0x1135000011351, + 0x1135700011358, + 0x1135D00011364, + 0x113660001136D, + 0x1137000011375, + 0x114000001144B, + 0x114500001145A, + 0x1145E00011462, + 0x11480000114C6, + 0x114C7000114C8, + 0x114D0000114DA, + 0x11580000115B6, + 0x115B8000115C1, + 0x115D8000115DE, + 0x1160000011641, + 0x1164400011645, + 0x116500001165A, + 0x11680000116B9, + 0x116C0000116CA, + 0x117000001171B, + 0x1171D0001172C, + 0x117300001173A, + 0x1174000011747, + 0x118000001183B, + 0x118C0000118EA, + 0x118FF00011907, + 0x119090001190A, + 0x1190C00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193B00011944, + 0x119500001195A, + 0x119A0000119A8, + 0x119AA000119D8, + 0x119DA000119E2, + 0x119E3000119E5, + 0x11A0000011A3F, + 0x11A4700011A48, + 0x11A5000011A9A, + 0x11A9D00011A9E, + 0x11AB000011AF9, + 0x11C0000011C09, + 0x11C0A00011C37, + 0x11C3800011C41, + 0x11C5000011C5A, + 0x11C7200011C90, + 0x11C9200011CA8, + 0x11CA900011CB7, + 0x11D0000011D07, + 0x11D0800011D0A, + 0x11D0B00011D37, + 0x11D3A00011D3B, + 0x11D3C00011D3E, + 0x11D3F00011D48, + 0x11D5000011D5A, + 0x11D6000011D66, + 0x11D6700011D69, + 0x11D6A00011D8F, + 0x11D9000011D92, + 0x11D9300011D99, + 0x11DA000011DAA, + 0x11EE000011EF7, + 0x11F0000011F11, + 0x11F1200011F3B, + 0x11F3E00011F43, + 0x11F5000011F5A, + 0x11FB000011FB1, + 0x120000001239A, + 0x1248000012544, + 0x12F9000012FF1, + 0x1300000013430, + 0x1344000013456, + 0x1440000014647, + 0x1680000016A39, + 0x16A4000016A5F, + 0x16A6000016A6A, + 0x16A7000016ABF, + 0x16AC000016ACA, + 0x16AD000016AEE, + 0x16AF000016AF5, + 0x16B0000016B37, + 0x16B4000016B44, + 0x16B5000016B5A, + 0x16B6300016B78, + 0x16B7D00016B90, + 0x16E6000016E80, + 0x16F0000016F4B, + 0x16F4F00016F88, + 0x16F8F00016FA0, + 0x16FE000016FE2, + 0x16FE300016FE5, + 0x16FF000016FF2, + 0x17000000187F8, + 0x1880000018CD6, + 0x18D0000018D09, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B123, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1B1550001B156, + 0x1B1640001B168, + 0x1B1700001B2FC, + 0x1BC000001BC6B, + 0x1BC700001BC7D, + 0x1BC800001BC89, + 0x1BC900001BC9A, + 0x1BC9D0001BC9F, + 0x1CF000001CF2E, + 0x1CF300001CF47, + 0x1DA000001DA37, + 0x1DA3B0001DA6D, + 0x1DA750001DA76, + 0x1DA840001DA85, + 0x1DA9B0001DAA0, + 0x1DAA10001DAB0, + 0x1DF000001DF1F, + 0x1DF250001DF2B, + 0x1E0000001E007, + 0x1E0080001E019, + 0x1E01B0001E022, + 0x1E0230001E025, + 0x1E0260001E02B, + 0x1E08F0001E090, + 0x1E1000001E12D, + 0x1E1300001E13E, + 0x1E1400001E14A, + 0x1E14E0001E14F, + 0x1E2900001E2AF, + 0x1E2C00001E2FA, + 0x1E4D00001E4FA, + 0x1E7E00001E7E7, + 0x1E7E80001E7EC, + 0x1E7ED0001E7EF, + 0x1E7F00001E7FF, + 0x1E8000001E8C5, + 0x1E8D00001E8D7, + 0x1E9220001E94C, + 0x1E9500001E95A, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x300000003134B, + 0x31350000323B0, + ), + "CONTEXTJ": (0x200C0000200E,), + "CONTEXTO": ( + 0xB7000000B8, + 0x37500000376, + 0x5F3000005F5, + 0x6600000066A, + 0x6F0000006FA, + 0x30FB000030FC, + ), +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py b/venv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py new file mode 100644 index 0000000..7bfaa8d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/intranges.py @@ -0,0 +1,57 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i + 1 < len(sorted_list): + if sorted_list[i] == sorted_list[i + 1] - 1: + continue + current_range = sorted_list[last_write + 1 : i + 1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos - 1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py b/venv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py new file mode 100644 index 0000000..514ff7e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/package_data.py @@ -0,0 +1 @@ +__version__ = "3.10" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/idna/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py b/venv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py new file mode 100644 index 0000000..eb89432 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/idna/uts46data.py @@ -0,0 +1,8681 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "15.1.0" + + +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, "3"), + (0x1, "3"), + (0x2, "3"), + (0x3, "3"), + (0x4, "3"), + (0x5, "3"), + (0x6, "3"), + (0x7, "3"), + (0x8, "3"), + (0x9, "3"), + (0xA, "3"), + (0xB, "3"), + (0xC, "3"), + (0xD, "3"), + (0xE, "3"), + (0xF, "3"), + (0x10, "3"), + (0x11, "3"), + (0x12, "3"), + (0x13, "3"), + (0x14, "3"), + (0x15, "3"), + (0x16, "3"), + (0x17, "3"), + (0x18, "3"), + (0x19, "3"), + (0x1A, "3"), + (0x1B, "3"), + (0x1C, "3"), + (0x1D, "3"), + (0x1E, "3"), + (0x1F, "3"), + (0x20, "3"), + (0x21, "3"), + (0x22, "3"), + (0x23, "3"), + (0x24, "3"), + (0x25, "3"), + (0x26, "3"), + (0x27, "3"), + (0x28, "3"), + (0x29, "3"), + (0x2A, "3"), + (0x2B, "3"), + (0x2C, "3"), + (0x2D, "V"), + (0x2E, "V"), + (0x2F, "3"), + (0x30, "V"), + (0x31, "V"), + (0x32, "V"), + (0x33, "V"), + (0x34, "V"), + (0x35, "V"), + (0x36, "V"), + (0x37, "V"), + (0x38, "V"), + (0x39, "V"), + (0x3A, "3"), + (0x3B, "3"), + (0x3C, "3"), + (0x3D, "3"), + (0x3E, "3"), + (0x3F, "3"), + (0x40, "3"), + (0x41, "M", "a"), + (0x42, "M", "b"), + (0x43, "M", "c"), + (0x44, "M", "d"), + (0x45, "M", "e"), + (0x46, "M", "f"), + (0x47, "M", "g"), + (0x48, "M", "h"), + (0x49, "M", "i"), + (0x4A, "M", "j"), + (0x4B, "M", "k"), + (0x4C, "M", "l"), + (0x4D, "M", "m"), + (0x4E, "M", "n"), + (0x4F, "M", "o"), + (0x50, "M", "p"), + (0x51, "M", "q"), + (0x52, "M", "r"), + (0x53, "M", "s"), + (0x54, "M", "t"), + (0x55, "M", "u"), + (0x56, "M", "v"), + (0x57, "M", "w"), + (0x58, "M", "x"), + (0x59, "M", "y"), + (0x5A, "M", "z"), + (0x5B, "3"), + (0x5C, "3"), + (0x5D, "3"), + (0x5E, "3"), + (0x5F, "3"), + (0x60, "3"), + (0x61, "V"), + (0x62, "V"), + (0x63, "V"), + ] + + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, "V"), + (0x65, "V"), + (0x66, "V"), + (0x67, "V"), + (0x68, "V"), + (0x69, "V"), + (0x6A, "V"), + (0x6B, "V"), + (0x6C, "V"), + (0x6D, "V"), + (0x6E, "V"), + (0x6F, "V"), + (0x70, "V"), + (0x71, "V"), + (0x72, "V"), + (0x73, "V"), + (0x74, "V"), + (0x75, "V"), + (0x76, "V"), + (0x77, "V"), + (0x78, "V"), + (0x79, "V"), + (0x7A, "V"), + (0x7B, "3"), + (0x7C, "3"), + (0x7D, "3"), + (0x7E, "3"), + (0x7F, "3"), + (0x80, "X"), + (0x81, "X"), + (0x82, "X"), + (0x83, "X"), + (0x84, "X"), + (0x85, "X"), + (0x86, "X"), + (0x87, "X"), + (0x88, "X"), + (0x89, "X"), + (0x8A, "X"), + (0x8B, "X"), + (0x8C, "X"), + (0x8D, "X"), + (0x8E, "X"), + (0x8F, "X"), + (0x90, "X"), + (0x91, "X"), + (0x92, "X"), + (0x93, "X"), + (0x94, "X"), + (0x95, "X"), + (0x96, "X"), + (0x97, "X"), + (0x98, "X"), + (0x99, "X"), + (0x9A, "X"), + (0x9B, "X"), + (0x9C, "X"), + (0x9D, "X"), + (0x9E, "X"), + (0x9F, "X"), + (0xA0, "3", " "), + (0xA1, "V"), + (0xA2, "V"), + (0xA3, "V"), + (0xA4, "V"), + (0xA5, "V"), + (0xA6, "V"), + (0xA7, "V"), + (0xA8, "3", " ̈"), + (0xA9, "V"), + (0xAA, "M", "a"), + (0xAB, "V"), + (0xAC, "V"), + (0xAD, "I"), + (0xAE, "V"), + (0xAF, "3", " ̄"), + (0xB0, "V"), + (0xB1, "V"), + (0xB2, "M", "2"), + (0xB3, "M", "3"), + (0xB4, "3", " ́"), + (0xB5, "M", "μ"), + (0xB6, "V"), + (0xB7, "V"), + (0xB8, "3", " ̧"), + (0xB9, "M", "1"), + (0xBA, "M", "o"), + (0xBB, "V"), + (0xBC, "M", "1⁄4"), + (0xBD, "M", "1⁄2"), + (0xBE, "M", "3⁄4"), + (0xBF, "V"), + (0xC0, "M", "à"), + (0xC1, "M", "á"), + (0xC2, "M", "â"), + (0xC3, "M", "ã"), + (0xC4, "M", "ä"), + (0xC5, "M", "å"), + (0xC6, "M", "æ"), + (0xC7, "M", "ç"), + ] + + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, "M", "è"), + (0xC9, "M", "é"), + (0xCA, "M", "ê"), + (0xCB, "M", "ë"), + (0xCC, "M", "ì"), + (0xCD, "M", "í"), + (0xCE, "M", "î"), + (0xCF, "M", "ï"), + (0xD0, "M", "ð"), + (0xD1, "M", "ñ"), + (0xD2, "M", "ò"), + (0xD3, "M", "ó"), + (0xD4, "M", "ô"), + (0xD5, "M", "õ"), + (0xD6, "M", "ö"), + (0xD7, "V"), + (0xD8, "M", "ø"), + (0xD9, "M", "ù"), + (0xDA, "M", "ú"), + (0xDB, "M", "û"), + (0xDC, "M", "ü"), + (0xDD, "M", "ý"), + (0xDE, "M", "þ"), + (0xDF, "D", "ss"), + (0xE0, "V"), + (0xE1, "V"), + (0xE2, "V"), + (0xE3, "V"), + (0xE4, "V"), + (0xE5, "V"), + (0xE6, "V"), + (0xE7, "V"), + (0xE8, "V"), + (0xE9, "V"), + (0xEA, "V"), + (0xEB, "V"), + (0xEC, "V"), + (0xED, "V"), + (0xEE, "V"), + (0xEF, "V"), + (0xF0, "V"), + (0xF1, "V"), + (0xF2, "V"), + (0xF3, "V"), + (0xF4, "V"), + (0xF5, "V"), + (0xF6, "V"), + (0xF7, "V"), + (0xF8, "V"), + (0xF9, "V"), + (0xFA, "V"), + (0xFB, "V"), + (0xFC, "V"), + (0xFD, "V"), + (0xFE, "V"), + (0xFF, "V"), + (0x100, "M", "ā"), + (0x101, "V"), + (0x102, "M", "ă"), + (0x103, "V"), + (0x104, "M", "ą"), + (0x105, "V"), + (0x106, "M", "ć"), + (0x107, "V"), + (0x108, "M", "ĉ"), + (0x109, "V"), + (0x10A, "M", "ċ"), + (0x10B, "V"), + (0x10C, "M", "č"), + (0x10D, "V"), + (0x10E, "M", "ď"), + (0x10F, "V"), + (0x110, "M", "đ"), + (0x111, "V"), + (0x112, "M", "ē"), + (0x113, "V"), + (0x114, "M", "ĕ"), + (0x115, "V"), + (0x116, "M", "ė"), + (0x117, "V"), + (0x118, "M", "ę"), + (0x119, "V"), + (0x11A, "M", "ě"), + (0x11B, "V"), + (0x11C, "M", "ĝ"), + (0x11D, "V"), + (0x11E, "M", "ğ"), + (0x11F, "V"), + (0x120, "M", "ġ"), + (0x121, "V"), + (0x122, "M", "ģ"), + (0x123, "V"), + (0x124, "M", "ĥ"), + (0x125, "V"), + (0x126, "M", "ħ"), + (0x127, "V"), + (0x128, "M", "ĩ"), + (0x129, "V"), + (0x12A, "M", "ī"), + (0x12B, "V"), + ] + + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, "M", "ĭ"), + (0x12D, "V"), + (0x12E, "M", "į"), + (0x12F, "V"), + (0x130, "M", "i̇"), + (0x131, "V"), + (0x132, "M", "ij"), + (0x134, "M", "ĵ"), + (0x135, "V"), + (0x136, "M", "ķ"), + (0x137, "V"), + (0x139, "M", "ĺ"), + (0x13A, "V"), + (0x13B, "M", "ļ"), + (0x13C, "V"), + (0x13D, "M", "ľ"), + (0x13E, "V"), + (0x13F, "M", "l·"), + (0x141, "M", "ł"), + (0x142, "V"), + (0x143, "M", "ń"), + (0x144, "V"), + (0x145, "M", "ņ"), + (0x146, "V"), + (0x147, "M", "ň"), + (0x148, "V"), + (0x149, "M", "ʼn"), + (0x14A, "M", "ŋ"), + (0x14B, "V"), + (0x14C, "M", "ō"), + (0x14D, "V"), + (0x14E, "M", "ŏ"), + (0x14F, "V"), + (0x150, "M", "ő"), + (0x151, "V"), + (0x152, "M", "œ"), + (0x153, "V"), + (0x154, "M", "ŕ"), + (0x155, "V"), + (0x156, "M", "ŗ"), + (0x157, "V"), + (0x158, "M", "ř"), + (0x159, "V"), + (0x15A, "M", "ś"), + (0x15B, "V"), + (0x15C, "M", "ŝ"), + (0x15D, "V"), + (0x15E, "M", "ş"), + (0x15F, "V"), + (0x160, "M", "š"), + (0x161, "V"), + (0x162, "M", "ţ"), + (0x163, "V"), + (0x164, "M", "ť"), + (0x165, "V"), + (0x166, "M", "ŧ"), + (0x167, "V"), + (0x168, "M", "ũ"), + (0x169, "V"), + (0x16A, "M", "ū"), + (0x16B, "V"), + (0x16C, "M", "ŭ"), + (0x16D, "V"), + (0x16E, "M", "ů"), + (0x16F, "V"), + (0x170, "M", "ű"), + (0x171, "V"), + (0x172, "M", "ų"), + (0x173, "V"), + (0x174, "M", "ŵ"), + (0x175, "V"), + (0x176, "M", "ŷ"), + (0x177, "V"), + (0x178, "M", "ÿ"), + (0x179, "M", "ź"), + (0x17A, "V"), + (0x17B, "M", "ż"), + (0x17C, "V"), + (0x17D, "M", "ž"), + (0x17E, "V"), + (0x17F, "M", "s"), + (0x180, "V"), + (0x181, "M", "ɓ"), + (0x182, "M", "ƃ"), + (0x183, "V"), + (0x184, "M", "ƅ"), + (0x185, "V"), + (0x186, "M", "ɔ"), + (0x187, "M", "ƈ"), + (0x188, "V"), + (0x189, "M", "ɖ"), + (0x18A, "M", "ɗ"), + (0x18B, "M", "ƌ"), + (0x18C, "V"), + (0x18E, "M", "ǝ"), + (0x18F, "M", "ə"), + (0x190, "M", "ɛ"), + (0x191, "M", "ƒ"), + (0x192, "V"), + (0x193, "M", "ɠ"), + ] + + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, "M", "ɣ"), + (0x195, "V"), + (0x196, "M", "ɩ"), + (0x197, "M", "ɨ"), + (0x198, "M", "ƙ"), + (0x199, "V"), + (0x19C, "M", "ɯ"), + (0x19D, "M", "ɲ"), + (0x19E, "V"), + (0x19F, "M", "ɵ"), + (0x1A0, "M", "ơ"), + (0x1A1, "V"), + (0x1A2, "M", "ƣ"), + (0x1A3, "V"), + (0x1A4, "M", "ƥ"), + (0x1A5, "V"), + (0x1A6, "M", "ʀ"), + (0x1A7, "M", "ƨ"), + (0x1A8, "V"), + (0x1A9, "M", "ʃ"), + (0x1AA, "V"), + (0x1AC, "M", "ƭ"), + (0x1AD, "V"), + (0x1AE, "M", "ʈ"), + (0x1AF, "M", "ư"), + (0x1B0, "V"), + (0x1B1, "M", "ʊ"), + (0x1B2, "M", "ʋ"), + (0x1B3, "M", "ƴ"), + (0x1B4, "V"), + (0x1B5, "M", "ƶ"), + (0x1B6, "V"), + (0x1B7, "M", "ʒ"), + (0x1B8, "M", "ƹ"), + (0x1B9, "V"), + (0x1BC, "M", "ƽ"), + (0x1BD, "V"), + (0x1C4, "M", "dž"), + (0x1C7, "M", "lj"), + (0x1CA, "M", "nj"), + (0x1CD, "M", "ǎ"), + (0x1CE, "V"), + (0x1CF, "M", "ǐ"), + (0x1D0, "V"), + (0x1D1, "M", "ǒ"), + (0x1D2, "V"), + (0x1D3, "M", "ǔ"), + (0x1D4, "V"), + (0x1D5, "M", "ǖ"), + (0x1D6, "V"), + (0x1D7, "M", "ǘ"), + (0x1D8, "V"), + (0x1D9, "M", "ǚ"), + (0x1DA, "V"), + (0x1DB, "M", "ǜ"), + (0x1DC, "V"), + (0x1DE, "M", "ǟ"), + (0x1DF, "V"), + (0x1E0, "M", "ǡ"), + (0x1E1, "V"), + (0x1E2, "M", "ǣ"), + (0x1E3, "V"), + (0x1E4, "M", "ǥ"), + (0x1E5, "V"), + (0x1E6, "M", "ǧ"), + (0x1E7, "V"), + (0x1E8, "M", "ǩ"), + (0x1E9, "V"), + (0x1EA, "M", "ǫ"), + (0x1EB, "V"), + (0x1EC, "M", "ǭ"), + (0x1ED, "V"), + (0x1EE, "M", "ǯ"), + (0x1EF, "V"), + (0x1F1, "M", "dz"), + (0x1F4, "M", "ǵ"), + (0x1F5, "V"), + (0x1F6, "M", "ƕ"), + (0x1F7, "M", "ƿ"), + (0x1F8, "M", "ǹ"), + (0x1F9, "V"), + (0x1FA, "M", "ǻ"), + (0x1FB, "V"), + (0x1FC, "M", "ǽ"), + (0x1FD, "V"), + (0x1FE, "M", "ǿ"), + (0x1FF, "V"), + (0x200, "M", "ȁ"), + (0x201, "V"), + (0x202, "M", "ȃ"), + (0x203, "V"), + (0x204, "M", "ȅ"), + (0x205, "V"), + (0x206, "M", "ȇ"), + (0x207, "V"), + (0x208, "M", "ȉ"), + (0x209, "V"), + (0x20A, "M", "ȋ"), + (0x20B, "V"), + (0x20C, "M", "ȍ"), + ] + + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, "V"), + (0x20E, "M", "ȏ"), + (0x20F, "V"), + (0x210, "M", "ȑ"), + (0x211, "V"), + (0x212, "M", "ȓ"), + (0x213, "V"), + (0x214, "M", "ȕ"), + (0x215, "V"), + (0x216, "M", "ȗ"), + (0x217, "V"), + (0x218, "M", "ș"), + (0x219, "V"), + (0x21A, "M", "ț"), + (0x21B, "V"), + (0x21C, "M", "ȝ"), + (0x21D, "V"), + (0x21E, "M", "ȟ"), + (0x21F, "V"), + (0x220, "M", "ƞ"), + (0x221, "V"), + (0x222, "M", "ȣ"), + (0x223, "V"), + (0x224, "M", "ȥ"), + (0x225, "V"), + (0x226, "M", "ȧ"), + (0x227, "V"), + (0x228, "M", "ȩ"), + (0x229, "V"), + (0x22A, "M", "ȫ"), + (0x22B, "V"), + (0x22C, "M", "ȭ"), + (0x22D, "V"), + (0x22E, "M", "ȯ"), + (0x22F, "V"), + (0x230, "M", "ȱ"), + (0x231, "V"), + (0x232, "M", "ȳ"), + (0x233, "V"), + (0x23A, "M", "ⱥ"), + (0x23B, "M", "ȼ"), + (0x23C, "V"), + (0x23D, "M", "ƚ"), + (0x23E, "M", "ⱦ"), + (0x23F, "V"), + (0x241, "M", "ɂ"), + (0x242, "V"), + (0x243, "M", "ƀ"), + (0x244, "M", "ʉ"), + (0x245, "M", "ʌ"), + (0x246, "M", "ɇ"), + (0x247, "V"), + (0x248, "M", "ɉ"), + (0x249, "V"), + (0x24A, "M", "ɋ"), + (0x24B, "V"), + (0x24C, "M", "ɍ"), + (0x24D, "V"), + (0x24E, "M", "ɏ"), + (0x24F, "V"), + (0x2B0, "M", "h"), + (0x2B1, "M", "ɦ"), + (0x2B2, "M", "j"), + (0x2B3, "M", "r"), + (0x2B4, "M", "ɹ"), + (0x2B5, "M", "ɻ"), + (0x2B6, "M", "ʁ"), + (0x2B7, "M", "w"), + (0x2B8, "M", "y"), + (0x2B9, "V"), + (0x2D8, "3", " ̆"), + (0x2D9, "3", " ̇"), + (0x2DA, "3", " ̊"), + (0x2DB, "3", " ̨"), + (0x2DC, "3", " ̃"), + (0x2DD, "3", " ̋"), + (0x2DE, "V"), + (0x2E0, "M", "ɣ"), + (0x2E1, "M", "l"), + (0x2E2, "M", "s"), + (0x2E3, "M", "x"), + (0x2E4, "M", "ʕ"), + (0x2E5, "V"), + (0x340, "M", "̀"), + (0x341, "M", "́"), + (0x342, "V"), + (0x343, "M", "̓"), + (0x344, "M", "̈́"), + (0x345, "M", "ι"), + (0x346, "V"), + (0x34F, "I"), + (0x350, "V"), + (0x370, "M", "ͱ"), + (0x371, "V"), + (0x372, "M", "ͳ"), + (0x373, "V"), + (0x374, "M", "ʹ"), + (0x375, "V"), + (0x376, "M", "ͷ"), + (0x377, "V"), + ] + + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, "X"), + (0x37A, "3", " ι"), + (0x37B, "V"), + (0x37E, "3", ";"), + (0x37F, "M", "ϳ"), + (0x380, "X"), + (0x384, "3", " ́"), + (0x385, "3", " ̈́"), + (0x386, "M", "ά"), + (0x387, "M", "·"), + (0x388, "M", "έ"), + (0x389, "M", "ή"), + (0x38A, "M", "ί"), + (0x38B, "X"), + (0x38C, "M", "ό"), + (0x38D, "X"), + (0x38E, "M", "ύ"), + (0x38F, "M", "ώ"), + (0x390, "V"), + (0x391, "M", "α"), + (0x392, "M", "β"), + (0x393, "M", "γ"), + (0x394, "M", "δ"), + (0x395, "M", "ε"), + (0x396, "M", "ζ"), + (0x397, "M", "η"), + (0x398, "M", "θ"), + (0x399, "M", "ι"), + (0x39A, "M", "κ"), + (0x39B, "M", "λ"), + (0x39C, "M", "μ"), + (0x39D, "M", "ν"), + (0x39E, "M", "ξ"), + (0x39F, "M", "ο"), + (0x3A0, "M", "π"), + (0x3A1, "M", "ρ"), + (0x3A2, "X"), + (0x3A3, "M", "σ"), + (0x3A4, "M", "τ"), + (0x3A5, "M", "υ"), + (0x3A6, "M", "φ"), + (0x3A7, "M", "χ"), + (0x3A8, "M", "ψ"), + (0x3A9, "M", "ω"), + (0x3AA, "M", "ϊ"), + (0x3AB, "M", "ϋ"), + (0x3AC, "V"), + (0x3C2, "D", "σ"), + (0x3C3, "V"), + (0x3CF, "M", "ϗ"), + (0x3D0, "M", "β"), + (0x3D1, "M", "θ"), + (0x3D2, "M", "υ"), + (0x3D3, "M", "ύ"), + (0x3D4, "M", "ϋ"), + (0x3D5, "M", "φ"), + (0x3D6, "M", "π"), + (0x3D7, "V"), + (0x3D8, "M", "ϙ"), + (0x3D9, "V"), + (0x3DA, "M", "ϛ"), + (0x3DB, "V"), + (0x3DC, "M", "ϝ"), + (0x3DD, "V"), + (0x3DE, "M", "ϟ"), + (0x3DF, "V"), + (0x3E0, "M", "ϡ"), + (0x3E1, "V"), + (0x3E2, "M", "ϣ"), + (0x3E3, "V"), + (0x3E4, "M", "ϥ"), + (0x3E5, "V"), + (0x3E6, "M", "ϧ"), + (0x3E7, "V"), + (0x3E8, "M", "ϩ"), + (0x3E9, "V"), + (0x3EA, "M", "ϫ"), + (0x3EB, "V"), + (0x3EC, "M", "ϭ"), + (0x3ED, "V"), + (0x3EE, "M", "ϯ"), + (0x3EF, "V"), + (0x3F0, "M", "κ"), + (0x3F1, "M", "ρ"), + (0x3F2, "M", "σ"), + (0x3F3, "V"), + (0x3F4, "M", "θ"), + (0x3F5, "M", "ε"), + (0x3F6, "V"), + (0x3F7, "M", "ϸ"), + (0x3F8, "V"), + (0x3F9, "M", "σ"), + (0x3FA, "M", "ϻ"), + (0x3FB, "V"), + (0x3FD, "M", "ͻ"), + (0x3FE, "M", "ͼ"), + (0x3FF, "M", "ͽ"), + (0x400, "M", "ѐ"), + (0x401, "M", "ё"), + (0x402, "M", "ђ"), + ] + + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, "M", "ѓ"), + (0x404, "M", "є"), + (0x405, "M", "ѕ"), + (0x406, "M", "і"), + (0x407, "M", "ї"), + (0x408, "M", "ј"), + (0x409, "M", "љ"), + (0x40A, "M", "њ"), + (0x40B, "M", "ћ"), + (0x40C, "M", "ќ"), + (0x40D, "M", "ѝ"), + (0x40E, "M", "ў"), + (0x40F, "M", "џ"), + (0x410, "M", "а"), + (0x411, "M", "б"), + (0x412, "M", "в"), + (0x413, "M", "г"), + (0x414, "M", "д"), + (0x415, "M", "е"), + (0x416, "M", "ж"), + (0x417, "M", "з"), + (0x418, "M", "и"), + (0x419, "M", "й"), + (0x41A, "M", "к"), + (0x41B, "M", "л"), + (0x41C, "M", "м"), + (0x41D, "M", "н"), + (0x41E, "M", "о"), + (0x41F, "M", "п"), + (0x420, "M", "р"), + (0x421, "M", "с"), + (0x422, "M", "т"), + (0x423, "M", "у"), + (0x424, "M", "ф"), + (0x425, "M", "х"), + (0x426, "M", "ц"), + (0x427, "M", "ч"), + (0x428, "M", "ш"), + (0x429, "M", "щ"), + (0x42A, "M", "ъ"), + (0x42B, "M", "ы"), + (0x42C, "M", "ь"), + (0x42D, "M", "э"), + (0x42E, "M", "ю"), + (0x42F, "M", "я"), + (0x430, "V"), + (0x460, "M", "ѡ"), + (0x461, "V"), + (0x462, "M", "ѣ"), + (0x463, "V"), + (0x464, "M", "ѥ"), + (0x465, "V"), + (0x466, "M", "ѧ"), + (0x467, "V"), + (0x468, "M", "ѩ"), + (0x469, "V"), + (0x46A, "M", "ѫ"), + (0x46B, "V"), + (0x46C, "M", "ѭ"), + (0x46D, "V"), + (0x46E, "M", "ѯ"), + (0x46F, "V"), + (0x470, "M", "ѱ"), + (0x471, "V"), + (0x472, "M", "ѳ"), + (0x473, "V"), + (0x474, "M", "ѵ"), + (0x475, "V"), + (0x476, "M", "ѷ"), + (0x477, "V"), + (0x478, "M", "ѹ"), + (0x479, "V"), + (0x47A, "M", "ѻ"), + (0x47B, "V"), + (0x47C, "M", "ѽ"), + (0x47D, "V"), + (0x47E, "M", "ѿ"), + (0x47F, "V"), + (0x480, "M", "ҁ"), + (0x481, "V"), + (0x48A, "M", "ҋ"), + (0x48B, "V"), + (0x48C, "M", "ҍ"), + (0x48D, "V"), + (0x48E, "M", "ҏ"), + (0x48F, "V"), + (0x490, "M", "ґ"), + (0x491, "V"), + (0x492, "M", "ғ"), + (0x493, "V"), + (0x494, "M", "ҕ"), + (0x495, "V"), + (0x496, "M", "җ"), + (0x497, "V"), + (0x498, "M", "ҙ"), + (0x499, "V"), + (0x49A, "M", "қ"), + (0x49B, "V"), + (0x49C, "M", "ҝ"), + (0x49D, "V"), + ] + + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, "M", "ҟ"), + (0x49F, "V"), + (0x4A0, "M", "ҡ"), + (0x4A1, "V"), + (0x4A2, "M", "ң"), + (0x4A3, "V"), + (0x4A4, "M", "ҥ"), + (0x4A5, "V"), + (0x4A6, "M", "ҧ"), + (0x4A7, "V"), + (0x4A8, "M", "ҩ"), + (0x4A9, "V"), + (0x4AA, "M", "ҫ"), + (0x4AB, "V"), + (0x4AC, "M", "ҭ"), + (0x4AD, "V"), + (0x4AE, "M", "ү"), + (0x4AF, "V"), + (0x4B0, "M", "ұ"), + (0x4B1, "V"), + (0x4B2, "M", "ҳ"), + (0x4B3, "V"), + (0x4B4, "M", "ҵ"), + (0x4B5, "V"), + (0x4B6, "M", "ҷ"), + (0x4B7, "V"), + (0x4B8, "M", "ҹ"), + (0x4B9, "V"), + (0x4BA, "M", "һ"), + (0x4BB, "V"), + (0x4BC, "M", "ҽ"), + (0x4BD, "V"), + (0x4BE, "M", "ҿ"), + (0x4BF, "V"), + (0x4C0, "X"), + (0x4C1, "M", "ӂ"), + (0x4C2, "V"), + (0x4C3, "M", "ӄ"), + (0x4C4, "V"), + (0x4C5, "M", "ӆ"), + (0x4C6, "V"), + (0x4C7, "M", "ӈ"), + (0x4C8, "V"), + (0x4C9, "M", "ӊ"), + (0x4CA, "V"), + (0x4CB, "M", "ӌ"), + (0x4CC, "V"), + (0x4CD, "M", "ӎ"), + (0x4CE, "V"), + (0x4D0, "M", "ӑ"), + (0x4D1, "V"), + (0x4D2, "M", "ӓ"), + (0x4D3, "V"), + (0x4D4, "M", "ӕ"), + (0x4D5, "V"), + (0x4D6, "M", "ӗ"), + (0x4D7, "V"), + (0x4D8, "M", "ә"), + (0x4D9, "V"), + (0x4DA, "M", "ӛ"), + (0x4DB, "V"), + (0x4DC, "M", "ӝ"), + (0x4DD, "V"), + (0x4DE, "M", "ӟ"), + (0x4DF, "V"), + (0x4E0, "M", "ӡ"), + (0x4E1, "V"), + (0x4E2, "M", "ӣ"), + (0x4E3, "V"), + (0x4E4, "M", "ӥ"), + (0x4E5, "V"), + (0x4E6, "M", "ӧ"), + (0x4E7, "V"), + (0x4E8, "M", "ө"), + (0x4E9, "V"), + (0x4EA, "M", "ӫ"), + (0x4EB, "V"), + (0x4EC, "M", "ӭ"), + (0x4ED, "V"), + (0x4EE, "M", "ӯ"), + (0x4EF, "V"), + (0x4F0, "M", "ӱ"), + (0x4F1, "V"), + (0x4F2, "M", "ӳ"), + (0x4F3, "V"), + (0x4F4, "M", "ӵ"), + (0x4F5, "V"), + (0x4F6, "M", "ӷ"), + (0x4F7, "V"), + (0x4F8, "M", "ӹ"), + (0x4F9, "V"), + (0x4FA, "M", "ӻ"), + (0x4FB, "V"), + (0x4FC, "M", "ӽ"), + (0x4FD, "V"), + (0x4FE, "M", "ӿ"), + (0x4FF, "V"), + (0x500, "M", "ԁ"), + (0x501, "V"), + (0x502, "M", "ԃ"), + ] + + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, "V"), + (0x504, "M", "ԅ"), + (0x505, "V"), + (0x506, "M", "ԇ"), + (0x507, "V"), + (0x508, "M", "ԉ"), + (0x509, "V"), + (0x50A, "M", "ԋ"), + (0x50B, "V"), + (0x50C, "M", "ԍ"), + (0x50D, "V"), + (0x50E, "M", "ԏ"), + (0x50F, "V"), + (0x510, "M", "ԑ"), + (0x511, "V"), + (0x512, "M", "ԓ"), + (0x513, "V"), + (0x514, "M", "ԕ"), + (0x515, "V"), + (0x516, "M", "ԗ"), + (0x517, "V"), + (0x518, "M", "ԙ"), + (0x519, "V"), + (0x51A, "M", "ԛ"), + (0x51B, "V"), + (0x51C, "M", "ԝ"), + (0x51D, "V"), + (0x51E, "M", "ԟ"), + (0x51F, "V"), + (0x520, "M", "ԡ"), + (0x521, "V"), + (0x522, "M", "ԣ"), + (0x523, "V"), + (0x524, "M", "ԥ"), + (0x525, "V"), + (0x526, "M", "ԧ"), + (0x527, "V"), + (0x528, "M", "ԩ"), + (0x529, "V"), + (0x52A, "M", "ԫ"), + (0x52B, "V"), + (0x52C, "M", "ԭ"), + (0x52D, "V"), + (0x52E, "M", "ԯ"), + (0x52F, "V"), + (0x530, "X"), + (0x531, "M", "ա"), + (0x532, "M", "բ"), + (0x533, "M", "գ"), + (0x534, "M", "դ"), + (0x535, "M", "ե"), + (0x536, "M", "զ"), + (0x537, "M", "է"), + (0x538, "M", "ը"), + (0x539, "M", "թ"), + (0x53A, "M", "ժ"), + (0x53B, "M", "ի"), + (0x53C, "M", "լ"), + (0x53D, "M", "խ"), + (0x53E, "M", "ծ"), + (0x53F, "M", "կ"), + (0x540, "M", "հ"), + (0x541, "M", "ձ"), + (0x542, "M", "ղ"), + (0x543, "M", "ճ"), + (0x544, "M", "մ"), + (0x545, "M", "յ"), + (0x546, "M", "ն"), + (0x547, "M", "շ"), + (0x548, "M", "ո"), + (0x549, "M", "չ"), + (0x54A, "M", "պ"), + (0x54B, "M", "ջ"), + (0x54C, "M", "ռ"), + (0x54D, "M", "ս"), + (0x54E, "M", "վ"), + (0x54F, "M", "տ"), + (0x550, "M", "ր"), + (0x551, "M", "ց"), + (0x552, "M", "ւ"), + (0x553, "M", "փ"), + (0x554, "M", "ք"), + (0x555, "M", "օ"), + (0x556, "M", "ֆ"), + (0x557, "X"), + (0x559, "V"), + (0x587, "M", "եւ"), + (0x588, "V"), + (0x58B, "X"), + (0x58D, "V"), + (0x590, "X"), + (0x591, "V"), + (0x5C8, "X"), + (0x5D0, "V"), + (0x5EB, "X"), + (0x5EF, "V"), + (0x5F5, "X"), + (0x606, "V"), + (0x61C, "X"), + (0x61D, "V"), + ] + + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, "M", "اٴ"), + (0x676, "M", "وٴ"), + (0x677, "M", "ۇٴ"), + (0x678, "M", "يٴ"), + (0x679, "V"), + (0x6DD, "X"), + (0x6DE, "V"), + (0x70E, "X"), + (0x710, "V"), + (0x74B, "X"), + (0x74D, "V"), + (0x7B2, "X"), + (0x7C0, "V"), + (0x7FB, "X"), + (0x7FD, "V"), + (0x82E, "X"), + (0x830, "V"), + (0x83F, "X"), + (0x840, "V"), + (0x85C, "X"), + (0x85E, "V"), + (0x85F, "X"), + (0x860, "V"), + (0x86B, "X"), + (0x870, "V"), + (0x88F, "X"), + (0x898, "V"), + (0x8E2, "X"), + (0x8E3, "V"), + (0x958, "M", "क़"), + (0x959, "M", "ख़"), + (0x95A, "M", "ग़"), + (0x95B, "M", "ज़"), + (0x95C, "M", "ड़"), + (0x95D, "M", "ढ़"), + (0x95E, "M", "फ़"), + (0x95F, "M", "य़"), + (0x960, "V"), + (0x984, "X"), + (0x985, "V"), + (0x98D, "X"), + (0x98F, "V"), + (0x991, "X"), + (0x993, "V"), + (0x9A9, "X"), + (0x9AA, "V"), + (0x9B1, "X"), + (0x9B2, "V"), + (0x9B3, "X"), + (0x9B6, "V"), + (0x9BA, "X"), + (0x9BC, "V"), + (0x9C5, "X"), + (0x9C7, "V"), + (0x9C9, "X"), + (0x9CB, "V"), + (0x9CF, "X"), + (0x9D7, "V"), + (0x9D8, "X"), + (0x9DC, "M", "ড়"), + (0x9DD, "M", "ঢ়"), + (0x9DE, "X"), + (0x9DF, "M", "য়"), + (0x9E0, "V"), + (0x9E4, "X"), + (0x9E6, "V"), + (0x9FF, "X"), + (0xA01, "V"), + (0xA04, "X"), + (0xA05, "V"), + (0xA0B, "X"), + (0xA0F, "V"), + (0xA11, "X"), + (0xA13, "V"), + (0xA29, "X"), + (0xA2A, "V"), + (0xA31, "X"), + (0xA32, "V"), + (0xA33, "M", "ਲ਼"), + (0xA34, "X"), + (0xA35, "V"), + (0xA36, "M", "ਸ਼"), + (0xA37, "X"), + (0xA38, "V"), + (0xA3A, "X"), + (0xA3C, "V"), + (0xA3D, "X"), + (0xA3E, "V"), + (0xA43, "X"), + (0xA47, "V"), + (0xA49, "X"), + (0xA4B, "V"), + (0xA4E, "X"), + (0xA51, "V"), + (0xA52, "X"), + (0xA59, "M", "ਖ਼"), + (0xA5A, "M", "ਗ਼"), + (0xA5B, "M", "ਜ਼"), + (0xA5C, "V"), + (0xA5D, "X"), + ] + + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, "M", "ਫ਼"), + (0xA5F, "X"), + (0xA66, "V"), + (0xA77, "X"), + (0xA81, "V"), + (0xA84, "X"), + (0xA85, "V"), + (0xA8E, "X"), + (0xA8F, "V"), + (0xA92, "X"), + (0xA93, "V"), + (0xAA9, "X"), + (0xAAA, "V"), + (0xAB1, "X"), + (0xAB2, "V"), + (0xAB4, "X"), + (0xAB5, "V"), + (0xABA, "X"), + (0xABC, "V"), + (0xAC6, "X"), + (0xAC7, "V"), + (0xACA, "X"), + (0xACB, "V"), + (0xACE, "X"), + (0xAD0, "V"), + (0xAD1, "X"), + (0xAE0, "V"), + (0xAE4, "X"), + (0xAE6, "V"), + (0xAF2, "X"), + (0xAF9, "V"), + (0xB00, "X"), + (0xB01, "V"), + (0xB04, "X"), + (0xB05, "V"), + (0xB0D, "X"), + (0xB0F, "V"), + (0xB11, "X"), + (0xB13, "V"), + (0xB29, "X"), + (0xB2A, "V"), + (0xB31, "X"), + (0xB32, "V"), + (0xB34, "X"), + (0xB35, "V"), + (0xB3A, "X"), + (0xB3C, "V"), + (0xB45, "X"), + (0xB47, "V"), + (0xB49, "X"), + (0xB4B, "V"), + (0xB4E, "X"), + (0xB55, "V"), + (0xB58, "X"), + (0xB5C, "M", "ଡ଼"), + (0xB5D, "M", "ଢ଼"), + (0xB5E, "X"), + (0xB5F, "V"), + (0xB64, "X"), + (0xB66, "V"), + (0xB78, "X"), + (0xB82, "V"), + (0xB84, "X"), + (0xB85, "V"), + (0xB8B, "X"), + (0xB8E, "V"), + (0xB91, "X"), + (0xB92, "V"), + (0xB96, "X"), + (0xB99, "V"), + (0xB9B, "X"), + (0xB9C, "V"), + (0xB9D, "X"), + (0xB9E, "V"), + (0xBA0, "X"), + (0xBA3, "V"), + (0xBA5, "X"), + (0xBA8, "V"), + (0xBAB, "X"), + (0xBAE, "V"), + (0xBBA, "X"), + (0xBBE, "V"), + (0xBC3, "X"), + (0xBC6, "V"), + (0xBC9, "X"), + (0xBCA, "V"), + (0xBCE, "X"), + (0xBD0, "V"), + (0xBD1, "X"), + (0xBD7, "V"), + (0xBD8, "X"), + (0xBE6, "V"), + (0xBFB, "X"), + (0xC00, "V"), + (0xC0D, "X"), + (0xC0E, "V"), + (0xC11, "X"), + (0xC12, "V"), + (0xC29, "X"), + (0xC2A, "V"), + ] + + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, "X"), + (0xC3C, "V"), + (0xC45, "X"), + (0xC46, "V"), + (0xC49, "X"), + (0xC4A, "V"), + (0xC4E, "X"), + (0xC55, "V"), + (0xC57, "X"), + (0xC58, "V"), + (0xC5B, "X"), + (0xC5D, "V"), + (0xC5E, "X"), + (0xC60, "V"), + (0xC64, "X"), + (0xC66, "V"), + (0xC70, "X"), + (0xC77, "V"), + (0xC8D, "X"), + (0xC8E, "V"), + (0xC91, "X"), + (0xC92, "V"), + (0xCA9, "X"), + (0xCAA, "V"), + (0xCB4, "X"), + (0xCB5, "V"), + (0xCBA, "X"), + (0xCBC, "V"), + (0xCC5, "X"), + (0xCC6, "V"), + (0xCC9, "X"), + (0xCCA, "V"), + (0xCCE, "X"), + (0xCD5, "V"), + (0xCD7, "X"), + (0xCDD, "V"), + (0xCDF, "X"), + (0xCE0, "V"), + (0xCE4, "X"), + (0xCE6, "V"), + (0xCF0, "X"), + (0xCF1, "V"), + (0xCF4, "X"), + (0xD00, "V"), + (0xD0D, "X"), + (0xD0E, "V"), + (0xD11, "X"), + (0xD12, "V"), + (0xD45, "X"), + (0xD46, "V"), + (0xD49, "X"), + (0xD4A, "V"), + (0xD50, "X"), + (0xD54, "V"), + (0xD64, "X"), + (0xD66, "V"), + (0xD80, "X"), + (0xD81, "V"), + (0xD84, "X"), + (0xD85, "V"), + (0xD97, "X"), + (0xD9A, "V"), + (0xDB2, "X"), + (0xDB3, "V"), + (0xDBC, "X"), + (0xDBD, "V"), + (0xDBE, "X"), + (0xDC0, "V"), + (0xDC7, "X"), + (0xDCA, "V"), + (0xDCB, "X"), + (0xDCF, "V"), + (0xDD5, "X"), + (0xDD6, "V"), + (0xDD7, "X"), + (0xDD8, "V"), + (0xDE0, "X"), + (0xDE6, "V"), + (0xDF0, "X"), + (0xDF2, "V"), + (0xDF5, "X"), + (0xE01, "V"), + (0xE33, "M", "ํา"), + (0xE34, "V"), + (0xE3B, "X"), + (0xE3F, "V"), + (0xE5C, "X"), + (0xE81, "V"), + (0xE83, "X"), + (0xE84, "V"), + (0xE85, "X"), + (0xE86, "V"), + (0xE8B, "X"), + (0xE8C, "V"), + (0xEA4, "X"), + (0xEA5, "V"), + (0xEA6, "X"), + (0xEA7, "V"), + (0xEB3, "M", "ໍາ"), + (0xEB4, "V"), + ] + + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, "X"), + (0xEC0, "V"), + (0xEC5, "X"), + (0xEC6, "V"), + (0xEC7, "X"), + (0xEC8, "V"), + (0xECF, "X"), + (0xED0, "V"), + (0xEDA, "X"), + (0xEDC, "M", "ຫນ"), + (0xEDD, "M", "ຫມ"), + (0xEDE, "V"), + (0xEE0, "X"), + (0xF00, "V"), + (0xF0C, "M", "་"), + (0xF0D, "V"), + (0xF43, "M", "གྷ"), + (0xF44, "V"), + (0xF48, "X"), + (0xF49, "V"), + (0xF4D, "M", "ཌྷ"), + (0xF4E, "V"), + (0xF52, "M", "དྷ"), + (0xF53, "V"), + (0xF57, "M", "བྷ"), + (0xF58, "V"), + (0xF5C, "M", "ཛྷ"), + (0xF5D, "V"), + (0xF69, "M", "ཀྵ"), + (0xF6A, "V"), + (0xF6D, "X"), + (0xF71, "V"), + (0xF73, "M", "ཱི"), + (0xF74, "V"), + (0xF75, "M", "ཱུ"), + (0xF76, "M", "ྲྀ"), + (0xF77, "M", "ྲཱྀ"), + (0xF78, "M", "ླྀ"), + (0xF79, "M", "ླཱྀ"), + (0xF7A, "V"), + (0xF81, "M", "ཱྀ"), + (0xF82, "V"), + (0xF93, "M", "ྒྷ"), + (0xF94, "V"), + (0xF98, "X"), + (0xF99, "V"), + (0xF9D, "M", "ྜྷ"), + (0xF9E, "V"), + (0xFA2, "M", "ྡྷ"), + (0xFA3, "V"), + (0xFA7, "M", "ྦྷ"), + (0xFA8, "V"), + (0xFAC, "M", "ྫྷ"), + (0xFAD, "V"), + (0xFB9, "M", "ྐྵ"), + (0xFBA, "V"), + (0xFBD, "X"), + (0xFBE, "V"), + (0xFCD, "X"), + (0xFCE, "V"), + (0xFDB, "X"), + (0x1000, "V"), + (0x10A0, "X"), + (0x10C7, "M", "ⴧ"), + (0x10C8, "X"), + (0x10CD, "M", "ⴭ"), + (0x10CE, "X"), + (0x10D0, "V"), + (0x10FC, "M", "ნ"), + (0x10FD, "V"), + (0x115F, "X"), + (0x1161, "V"), + (0x1249, "X"), + (0x124A, "V"), + (0x124E, "X"), + (0x1250, "V"), + (0x1257, "X"), + (0x1258, "V"), + (0x1259, "X"), + (0x125A, "V"), + (0x125E, "X"), + (0x1260, "V"), + (0x1289, "X"), + (0x128A, "V"), + (0x128E, "X"), + (0x1290, "V"), + (0x12B1, "X"), + (0x12B2, "V"), + (0x12B6, "X"), + (0x12B8, "V"), + (0x12BF, "X"), + (0x12C0, "V"), + (0x12C1, "X"), + (0x12C2, "V"), + (0x12C6, "X"), + (0x12C8, "V"), + (0x12D7, "X"), + (0x12D8, "V"), + (0x1311, "X"), + (0x1312, "V"), + ] + + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, "X"), + (0x1318, "V"), + (0x135B, "X"), + (0x135D, "V"), + (0x137D, "X"), + (0x1380, "V"), + (0x139A, "X"), + (0x13A0, "V"), + (0x13F6, "X"), + (0x13F8, "M", "Ᏸ"), + (0x13F9, "M", "Ᏹ"), + (0x13FA, "M", "Ᏺ"), + (0x13FB, "M", "Ᏻ"), + (0x13FC, "M", "Ᏼ"), + (0x13FD, "M", "Ᏽ"), + (0x13FE, "X"), + (0x1400, "V"), + (0x1680, "X"), + (0x1681, "V"), + (0x169D, "X"), + (0x16A0, "V"), + (0x16F9, "X"), + (0x1700, "V"), + (0x1716, "X"), + (0x171F, "V"), + (0x1737, "X"), + (0x1740, "V"), + (0x1754, "X"), + (0x1760, "V"), + (0x176D, "X"), + (0x176E, "V"), + (0x1771, "X"), + (0x1772, "V"), + (0x1774, "X"), + (0x1780, "V"), + (0x17B4, "X"), + (0x17B6, "V"), + (0x17DE, "X"), + (0x17E0, "V"), + (0x17EA, "X"), + (0x17F0, "V"), + (0x17FA, "X"), + (0x1800, "V"), + (0x1806, "X"), + (0x1807, "V"), + (0x180B, "I"), + (0x180E, "X"), + (0x180F, "I"), + (0x1810, "V"), + (0x181A, "X"), + (0x1820, "V"), + (0x1879, "X"), + (0x1880, "V"), + (0x18AB, "X"), + (0x18B0, "V"), + (0x18F6, "X"), + (0x1900, "V"), + (0x191F, "X"), + (0x1920, "V"), + (0x192C, "X"), + (0x1930, "V"), + (0x193C, "X"), + (0x1940, "V"), + (0x1941, "X"), + (0x1944, "V"), + (0x196E, "X"), + (0x1970, "V"), + (0x1975, "X"), + (0x1980, "V"), + (0x19AC, "X"), + (0x19B0, "V"), + (0x19CA, "X"), + (0x19D0, "V"), + (0x19DB, "X"), + (0x19DE, "V"), + (0x1A1C, "X"), + (0x1A1E, "V"), + (0x1A5F, "X"), + (0x1A60, "V"), + (0x1A7D, "X"), + (0x1A7F, "V"), + (0x1A8A, "X"), + (0x1A90, "V"), + (0x1A9A, "X"), + (0x1AA0, "V"), + (0x1AAE, "X"), + (0x1AB0, "V"), + (0x1ACF, "X"), + (0x1B00, "V"), + (0x1B4D, "X"), + (0x1B50, "V"), + (0x1B7F, "X"), + (0x1B80, "V"), + (0x1BF4, "X"), + (0x1BFC, "V"), + (0x1C38, "X"), + (0x1C3B, "V"), + (0x1C4A, "X"), + (0x1C4D, "V"), + (0x1C80, "M", "в"), + ] + + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, "M", "д"), + (0x1C82, "M", "о"), + (0x1C83, "M", "с"), + (0x1C84, "M", "т"), + (0x1C86, "M", "ъ"), + (0x1C87, "M", "ѣ"), + (0x1C88, "M", "ꙋ"), + (0x1C89, "X"), + (0x1C90, "M", "ა"), + (0x1C91, "M", "ბ"), + (0x1C92, "M", "გ"), + (0x1C93, "M", "დ"), + (0x1C94, "M", "ე"), + (0x1C95, "M", "ვ"), + (0x1C96, "M", "ზ"), + (0x1C97, "M", "თ"), + (0x1C98, "M", "ი"), + (0x1C99, "M", "კ"), + (0x1C9A, "M", "ლ"), + (0x1C9B, "M", "მ"), + (0x1C9C, "M", "ნ"), + (0x1C9D, "M", "ო"), + (0x1C9E, "M", "პ"), + (0x1C9F, "M", "ჟ"), + (0x1CA0, "M", "რ"), + (0x1CA1, "M", "ს"), + (0x1CA2, "M", "ტ"), + (0x1CA3, "M", "უ"), + (0x1CA4, "M", "ფ"), + (0x1CA5, "M", "ქ"), + (0x1CA6, "M", "ღ"), + (0x1CA7, "M", "ყ"), + (0x1CA8, "M", "შ"), + (0x1CA9, "M", "ჩ"), + (0x1CAA, "M", "ც"), + (0x1CAB, "M", "ძ"), + (0x1CAC, "M", "წ"), + (0x1CAD, "M", "ჭ"), + (0x1CAE, "M", "ხ"), + (0x1CAF, "M", "ჯ"), + (0x1CB0, "M", "ჰ"), + (0x1CB1, "M", "ჱ"), + (0x1CB2, "M", "ჲ"), + (0x1CB3, "M", "ჳ"), + (0x1CB4, "M", "ჴ"), + (0x1CB5, "M", "ჵ"), + (0x1CB6, "M", "ჶ"), + (0x1CB7, "M", "ჷ"), + (0x1CB8, "M", "ჸ"), + (0x1CB9, "M", "ჹ"), + (0x1CBA, "M", "ჺ"), + (0x1CBB, "X"), + (0x1CBD, "M", "ჽ"), + (0x1CBE, "M", "ჾ"), + (0x1CBF, "M", "ჿ"), + (0x1CC0, "V"), + (0x1CC8, "X"), + (0x1CD0, "V"), + (0x1CFB, "X"), + (0x1D00, "V"), + (0x1D2C, "M", "a"), + (0x1D2D, "M", "æ"), + (0x1D2E, "M", "b"), + (0x1D2F, "V"), + (0x1D30, "M", "d"), + (0x1D31, "M", "e"), + (0x1D32, "M", "ǝ"), + (0x1D33, "M", "g"), + (0x1D34, "M", "h"), + (0x1D35, "M", "i"), + (0x1D36, "M", "j"), + (0x1D37, "M", "k"), + (0x1D38, "M", "l"), + (0x1D39, "M", "m"), + (0x1D3A, "M", "n"), + (0x1D3B, "V"), + (0x1D3C, "M", "o"), + (0x1D3D, "M", "ȣ"), + (0x1D3E, "M", "p"), + (0x1D3F, "M", "r"), + (0x1D40, "M", "t"), + (0x1D41, "M", "u"), + (0x1D42, "M", "w"), + (0x1D43, "M", "a"), + (0x1D44, "M", "ɐ"), + (0x1D45, "M", "ɑ"), + (0x1D46, "M", "ᴂ"), + (0x1D47, "M", "b"), + (0x1D48, "M", "d"), + (0x1D49, "M", "e"), + (0x1D4A, "M", "ə"), + (0x1D4B, "M", "ɛ"), + (0x1D4C, "M", "ɜ"), + (0x1D4D, "M", "g"), + (0x1D4E, "V"), + (0x1D4F, "M", "k"), + (0x1D50, "M", "m"), + (0x1D51, "M", "ŋ"), + (0x1D52, "M", "o"), + (0x1D53, "M", "ɔ"), + ] + + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, "M", "ᴖ"), + (0x1D55, "M", "ᴗ"), + (0x1D56, "M", "p"), + (0x1D57, "M", "t"), + (0x1D58, "M", "u"), + (0x1D59, "M", "ᴝ"), + (0x1D5A, "M", "ɯ"), + (0x1D5B, "M", "v"), + (0x1D5C, "M", "ᴥ"), + (0x1D5D, "M", "β"), + (0x1D5E, "M", "γ"), + (0x1D5F, "M", "δ"), + (0x1D60, "M", "φ"), + (0x1D61, "M", "χ"), + (0x1D62, "M", "i"), + (0x1D63, "M", "r"), + (0x1D64, "M", "u"), + (0x1D65, "M", "v"), + (0x1D66, "M", "β"), + (0x1D67, "M", "γ"), + (0x1D68, "M", "ρ"), + (0x1D69, "M", "φ"), + (0x1D6A, "M", "χ"), + (0x1D6B, "V"), + (0x1D78, "M", "н"), + (0x1D79, "V"), + (0x1D9B, "M", "ɒ"), + (0x1D9C, "M", "c"), + (0x1D9D, "M", "ɕ"), + (0x1D9E, "M", "ð"), + (0x1D9F, "M", "ɜ"), + (0x1DA0, "M", "f"), + (0x1DA1, "M", "ɟ"), + (0x1DA2, "M", "ɡ"), + (0x1DA3, "M", "ɥ"), + (0x1DA4, "M", "ɨ"), + (0x1DA5, "M", "ɩ"), + (0x1DA6, "M", "ɪ"), + (0x1DA7, "M", "ᵻ"), + (0x1DA8, "M", "ʝ"), + (0x1DA9, "M", "ɭ"), + (0x1DAA, "M", "ᶅ"), + (0x1DAB, "M", "ʟ"), + (0x1DAC, "M", "ɱ"), + (0x1DAD, "M", "ɰ"), + (0x1DAE, "M", "ɲ"), + (0x1DAF, "M", "ɳ"), + (0x1DB0, "M", "ɴ"), + (0x1DB1, "M", "ɵ"), + (0x1DB2, "M", "ɸ"), + (0x1DB3, "M", "ʂ"), + (0x1DB4, "M", "ʃ"), + (0x1DB5, "M", "ƫ"), + (0x1DB6, "M", "ʉ"), + (0x1DB7, "M", "ʊ"), + (0x1DB8, "M", "ᴜ"), + (0x1DB9, "M", "ʋ"), + (0x1DBA, "M", "ʌ"), + (0x1DBB, "M", "z"), + (0x1DBC, "M", "ʐ"), + (0x1DBD, "M", "ʑ"), + (0x1DBE, "M", "ʒ"), + (0x1DBF, "M", "θ"), + (0x1DC0, "V"), + (0x1E00, "M", "ḁ"), + (0x1E01, "V"), + (0x1E02, "M", "ḃ"), + (0x1E03, "V"), + (0x1E04, "M", "ḅ"), + (0x1E05, "V"), + (0x1E06, "M", "ḇ"), + (0x1E07, "V"), + (0x1E08, "M", "ḉ"), + (0x1E09, "V"), + (0x1E0A, "M", "ḋ"), + (0x1E0B, "V"), + (0x1E0C, "M", "ḍ"), + (0x1E0D, "V"), + (0x1E0E, "M", "ḏ"), + (0x1E0F, "V"), + (0x1E10, "M", "ḑ"), + (0x1E11, "V"), + (0x1E12, "M", "ḓ"), + (0x1E13, "V"), + (0x1E14, "M", "ḕ"), + (0x1E15, "V"), + (0x1E16, "M", "ḗ"), + (0x1E17, "V"), + (0x1E18, "M", "ḙ"), + (0x1E19, "V"), + (0x1E1A, "M", "ḛ"), + (0x1E1B, "V"), + (0x1E1C, "M", "ḝ"), + (0x1E1D, "V"), + (0x1E1E, "M", "ḟ"), + (0x1E1F, "V"), + (0x1E20, "M", "ḡ"), + (0x1E21, "V"), + (0x1E22, "M", "ḣ"), + (0x1E23, "V"), + ] + + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, "M", "ḥ"), + (0x1E25, "V"), + (0x1E26, "M", "ḧ"), + (0x1E27, "V"), + (0x1E28, "M", "ḩ"), + (0x1E29, "V"), + (0x1E2A, "M", "ḫ"), + (0x1E2B, "V"), + (0x1E2C, "M", "ḭ"), + (0x1E2D, "V"), + (0x1E2E, "M", "ḯ"), + (0x1E2F, "V"), + (0x1E30, "M", "ḱ"), + (0x1E31, "V"), + (0x1E32, "M", "ḳ"), + (0x1E33, "V"), + (0x1E34, "M", "ḵ"), + (0x1E35, "V"), + (0x1E36, "M", "ḷ"), + (0x1E37, "V"), + (0x1E38, "M", "ḹ"), + (0x1E39, "V"), + (0x1E3A, "M", "ḻ"), + (0x1E3B, "V"), + (0x1E3C, "M", "ḽ"), + (0x1E3D, "V"), + (0x1E3E, "M", "ḿ"), + (0x1E3F, "V"), + (0x1E40, "M", "ṁ"), + (0x1E41, "V"), + (0x1E42, "M", "ṃ"), + (0x1E43, "V"), + (0x1E44, "M", "ṅ"), + (0x1E45, "V"), + (0x1E46, "M", "ṇ"), + (0x1E47, "V"), + (0x1E48, "M", "ṉ"), + (0x1E49, "V"), + (0x1E4A, "M", "ṋ"), + (0x1E4B, "V"), + (0x1E4C, "M", "ṍ"), + (0x1E4D, "V"), + (0x1E4E, "M", "ṏ"), + (0x1E4F, "V"), + (0x1E50, "M", "ṑ"), + (0x1E51, "V"), + (0x1E52, "M", "ṓ"), + (0x1E53, "V"), + (0x1E54, "M", "ṕ"), + (0x1E55, "V"), + (0x1E56, "M", "ṗ"), + (0x1E57, "V"), + (0x1E58, "M", "ṙ"), + (0x1E59, "V"), + (0x1E5A, "M", "ṛ"), + (0x1E5B, "V"), + (0x1E5C, "M", "ṝ"), + (0x1E5D, "V"), + (0x1E5E, "M", "ṟ"), + (0x1E5F, "V"), + (0x1E60, "M", "ṡ"), + (0x1E61, "V"), + (0x1E62, "M", "ṣ"), + (0x1E63, "V"), + (0x1E64, "M", "ṥ"), + (0x1E65, "V"), + (0x1E66, "M", "ṧ"), + (0x1E67, "V"), + (0x1E68, "M", "ṩ"), + (0x1E69, "V"), + (0x1E6A, "M", "ṫ"), + (0x1E6B, "V"), + (0x1E6C, "M", "ṭ"), + (0x1E6D, "V"), + (0x1E6E, "M", "ṯ"), + (0x1E6F, "V"), + (0x1E70, "M", "ṱ"), + (0x1E71, "V"), + (0x1E72, "M", "ṳ"), + (0x1E73, "V"), + (0x1E74, "M", "ṵ"), + (0x1E75, "V"), + (0x1E76, "M", "ṷ"), + (0x1E77, "V"), + (0x1E78, "M", "ṹ"), + (0x1E79, "V"), + (0x1E7A, "M", "ṻ"), + (0x1E7B, "V"), + (0x1E7C, "M", "ṽ"), + (0x1E7D, "V"), + (0x1E7E, "M", "ṿ"), + (0x1E7F, "V"), + (0x1E80, "M", "ẁ"), + (0x1E81, "V"), + (0x1E82, "M", "ẃ"), + (0x1E83, "V"), + (0x1E84, "M", "ẅ"), + (0x1E85, "V"), + (0x1E86, "M", "ẇ"), + (0x1E87, "V"), + ] + + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, "M", "ẉ"), + (0x1E89, "V"), + (0x1E8A, "M", "ẋ"), + (0x1E8B, "V"), + (0x1E8C, "M", "ẍ"), + (0x1E8D, "V"), + (0x1E8E, "M", "ẏ"), + (0x1E8F, "V"), + (0x1E90, "M", "ẑ"), + (0x1E91, "V"), + (0x1E92, "M", "ẓ"), + (0x1E93, "V"), + (0x1E94, "M", "ẕ"), + (0x1E95, "V"), + (0x1E9A, "M", "aʾ"), + (0x1E9B, "M", "ṡ"), + (0x1E9C, "V"), + (0x1E9E, "M", "ß"), + (0x1E9F, "V"), + (0x1EA0, "M", "ạ"), + (0x1EA1, "V"), + (0x1EA2, "M", "ả"), + (0x1EA3, "V"), + (0x1EA4, "M", "ấ"), + (0x1EA5, "V"), + (0x1EA6, "M", "ầ"), + (0x1EA7, "V"), + (0x1EA8, "M", "ẩ"), + (0x1EA9, "V"), + (0x1EAA, "M", "ẫ"), + (0x1EAB, "V"), + (0x1EAC, "M", "ậ"), + (0x1EAD, "V"), + (0x1EAE, "M", "ắ"), + (0x1EAF, "V"), + (0x1EB0, "M", "ằ"), + (0x1EB1, "V"), + (0x1EB2, "M", "ẳ"), + (0x1EB3, "V"), + (0x1EB4, "M", "ẵ"), + (0x1EB5, "V"), + (0x1EB6, "M", "ặ"), + (0x1EB7, "V"), + (0x1EB8, "M", "ẹ"), + (0x1EB9, "V"), + (0x1EBA, "M", "ẻ"), + (0x1EBB, "V"), + (0x1EBC, "M", "ẽ"), + (0x1EBD, "V"), + (0x1EBE, "M", "ế"), + (0x1EBF, "V"), + (0x1EC0, "M", "ề"), + (0x1EC1, "V"), + (0x1EC2, "M", "ể"), + (0x1EC3, "V"), + (0x1EC4, "M", "ễ"), + (0x1EC5, "V"), + (0x1EC6, "M", "ệ"), + (0x1EC7, "V"), + (0x1EC8, "M", "ỉ"), + (0x1EC9, "V"), + (0x1ECA, "M", "ị"), + (0x1ECB, "V"), + (0x1ECC, "M", "ọ"), + (0x1ECD, "V"), + (0x1ECE, "M", "ỏ"), + (0x1ECF, "V"), + (0x1ED0, "M", "ố"), + (0x1ED1, "V"), + (0x1ED2, "M", "ồ"), + (0x1ED3, "V"), + (0x1ED4, "M", "ổ"), + (0x1ED5, "V"), + (0x1ED6, "M", "ỗ"), + (0x1ED7, "V"), + (0x1ED8, "M", "ộ"), + (0x1ED9, "V"), + (0x1EDA, "M", "ớ"), + (0x1EDB, "V"), + (0x1EDC, "M", "ờ"), + (0x1EDD, "V"), + (0x1EDE, "M", "ở"), + (0x1EDF, "V"), + (0x1EE0, "M", "ỡ"), + (0x1EE1, "V"), + (0x1EE2, "M", "ợ"), + (0x1EE3, "V"), + (0x1EE4, "M", "ụ"), + (0x1EE5, "V"), + (0x1EE6, "M", "ủ"), + (0x1EE7, "V"), + (0x1EE8, "M", "ứ"), + (0x1EE9, "V"), + (0x1EEA, "M", "ừ"), + (0x1EEB, "V"), + (0x1EEC, "M", "ử"), + (0x1EED, "V"), + (0x1EEE, "M", "ữ"), + (0x1EEF, "V"), + (0x1EF0, "M", "ự"), + ] + + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, "V"), + (0x1EF2, "M", "ỳ"), + (0x1EF3, "V"), + (0x1EF4, "M", "ỵ"), + (0x1EF5, "V"), + (0x1EF6, "M", "ỷ"), + (0x1EF7, "V"), + (0x1EF8, "M", "ỹ"), + (0x1EF9, "V"), + (0x1EFA, "M", "ỻ"), + (0x1EFB, "V"), + (0x1EFC, "M", "ỽ"), + (0x1EFD, "V"), + (0x1EFE, "M", "ỿ"), + (0x1EFF, "V"), + (0x1F08, "M", "ἀ"), + (0x1F09, "M", "ἁ"), + (0x1F0A, "M", "ἂ"), + (0x1F0B, "M", "ἃ"), + (0x1F0C, "M", "ἄ"), + (0x1F0D, "M", "ἅ"), + (0x1F0E, "M", "ἆ"), + (0x1F0F, "M", "ἇ"), + (0x1F10, "V"), + (0x1F16, "X"), + (0x1F18, "M", "ἐ"), + (0x1F19, "M", "ἑ"), + (0x1F1A, "M", "ἒ"), + (0x1F1B, "M", "ἓ"), + (0x1F1C, "M", "ἔ"), + (0x1F1D, "M", "ἕ"), + (0x1F1E, "X"), + (0x1F20, "V"), + (0x1F28, "M", "ἠ"), + (0x1F29, "M", "ἡ"), + (0x1F2A, "M", "ἢ"), + (0x1F2B, "M", "ἣ"), + (0x1F2C, "M", "ἤ"), + (0x1F2D, "M", "ἥ"), + (0x1F2E, "M", "ἦ"), + (0x1F2F, "M", "ἧ"), + (0x1F30, "V"), + (0x1F38, "M", "ἰ"), + (0x1F39, "M", "ἱ"), + (0x1F3A, "M", "ἲ"), + (0x1F3B, "M", "ἳ"), + (0x1F3C, "M", "ἴ"), + (0x1F3D, "M", "ἵ"), + (0x1F3E, "M", "ἶ"), + (0x1F3F, "M", "ἷ"), + (0x1F40, "V"), + (0x1F46, "X"), + (0x1F48, "M", "ὀ"), + (0x1F49, "M", "ὁ"), + (0x1F4A, "M", "ὂ"), + (0x1F4B, "M", "ὃ"), + (0x1F4C, "M", "ὄ"), + (0x1F4D, "M", "ὅ"), + (0x1F4E, "X"), + (0x1F50, "V"), + (0x1F58, "X"), + (0x1F59, "M", "ὑ"), + (0x1F5A, "X"), + (0x1F5B, "M", "ὓ"), + (0x1F5C, "X"), + (0x1F5D, "M", "ὕ"), + (0x1F5E, "X"), + (0x1F5F, "M", "ὗ"), + (0x1F60, "V"), + (0x1F68, "M", "ὠ"), + (0x1F69, "M", "ὡ"), + (0x1F6A, "M", "ὢ"), + (0x1F6B, "M", "ὣ"), + (0x1F6C, "M", "ὤ"), + (0x1F6D, "M", "ὥ"), + (0x1F6E, "M", "ὦ"), + (0x1F6F, "M", "ὧ"), + (0x1F70, "V"), + (0x1F71, "M", "ά"), + (0x1F72, "V"), + (0x1F73, "M", "έ"), + (0x1F74, "V"), + (0x1F75, "M", "ή"), + (0x1F76, "V"), + (0x1F77, "M", "ί"), + (0x1F78, "V"), + (0x1F79, "M", "ό"), + (0x1F7A, "V"), + (0x1F7B, "M", "ύ"), + (0x1F7C, "V"), + (0x1F7D, "M", "ώ"), + (0x1F7E, "X"), + (0x1F80, "M", "ἀι"), + (0x1F81, "M", "ἁι"), + (0x1F82, "M", "ἂι"), + (0x1F83, "M", "ἃι"), + (0x1F84, "M", "ἄι"), + (0x1F85, "M", "ἅι"), + (0x1F86, "M", "ἆι"), + (0x1F87, "M", "ἇι"), + ] + + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, "M", "ἀι"), + (0x1F89, "M", "ἁι"), + (0x1F8A, "M", "ἂι"), + (0x1F8B, "M", "ἃι"), + (0x1F8C, "M", "ἄι"), + (0x1F8D, "M", "ἅι"), + (0x1F8E, "M", "ἆι"), + (0x1F8F, "M", "ἇι"), + (0x1F90, "M", "ἠι"), + (0x1F91, "M", "ἡι"), + (0x1F92, "M", "ἢι"), + (0x1F93, "M", "ἣι"), + (0x1F94, "M", "ἤι"), + (0x1F95, "M", "ἥι"), + (0x1F96, "M", "ἦι"), + (0x1F97, "M", "ἧι"), + (0x1F98, "M", "ἠι"), + (0x1F99, "M", "ἡι"), + (0x1F9A, "M", "ἢι"), + (0x1F9B, "M", "ἣι"), + (0x1F9C, "M", "ἤι"), + (0x1F9D, "M", "ἥι"), + (0x1F9E, "M", "ἦι"), + (0x1F9F, "M", "ἧι"), + (0x1FA0, "M", "ὠι"), + (0x1FA1, "M", "ὡι"), + (0x1FA2, "M", "ὢι"), + (0x1FA3, "M", "ὣι"), + (0x1FA4, "M", "ὤι"), + (0x1FA5, "M", "ὥι"), + (0x1FA6, "M", "ὦι"), + (0x1FA7, "M", "ὧι"), + (0x1FA8, "M", "ὠι"), + (0x1FA9, "M", "ὡι"), + (0x1FAA, "M", "ὢι"), + (0x1FAB, "M", "ὣι"), + (0x1FAC, "M", "ὤι"), + (0x1FAD, "M", "ὥι"), + (0x1FAE, "M", "ὦι"), + (0x1FAF, "M", "ὧι"), + (0x1FB0, "V"), + (0x1FB2, "M", "ὰι"), + (0x1FB3, "M", "αι"), + (0x1FB4, "M", "άι"), + (0x1FB5, "X"), + (0x1FB6, "V"), + (0x1FB7, "M", "ᾶι"), + (0x1FB8, "M", "ᾰ"), + (0x1FB9, "M", "ᾱ"), + (0x1FBA, "M", "ὰ"), + (0x1FBB, "M", "ά"), + (0x1FBC, "M", "αι"), + (0x1FBD, "3", " ̓"), + (0x1FBE, "M", "ι"), + (0x1FBF, "3", " ̓"), + (0x1FC0, "3", " ͂"), + (0x1FC1, "3", " ̈͂"), + (0x1FC2, "M", "ὴι"), + (0x1FC3, "M", "ηι"), + (0x1FC4, "M", "ήι"), + (0x1FC5, "X"), + (0x1FC6, "V"), + (0x1FC7, "M", "ῆι"), + (0x1FC8, "M", "ὲ"), + (0x1FC9, "M", "έ"), + (0x1FCA, "M", "ὴ"), + (0x1FCB, "M", "ή"), + (0x1FCC, "M", "ηι"), + (0x1FCD, "3", " ̓̀"), + (0x1FCE, "3", " ̓́"), + (0x1FCF, "3", " ̓͂"), + (0x1FD0, "V"), + (0x1FD3, "M", "ΐ"), + (0x1FD4, "X"), + (0x1FD6, "V"), + (0x1FD8, "M", "ῐ"), + (0x1FD9, "M", "ῑ"), + (0x1FDA, "M", "ὶ"), + (0x1FDB, "M", "ί"), + (0x1FDC, "X"), + (0x1FDD, "3", " ̔̀"), + (0x1FDE, "3", " ̔́"), + (0x1FDF, "3", " ̔͂"), + (0x1FE0, "V"), + (0x1FE3, "M", "ΰ"), + (0x1FE4, "V"), + (0x1FE8, "M", "ῠ"), + (0x1FE9, "M", "ῡ"), + (0x1FEA, "M", "ὺ"), + (0x1FEB, "M", "ύ"), + (0x1FEC, "M", "ῥ"), + (0x1FED, "3", " ̈̀"), + (0x1FEE, "3", " ̈́"), + (0x1FEF, "3", "`"), + (0x1FF0, "X"), + (0x1FF2, "M", "ὼι"), + (0x1FF3, "M", "ωι"), + (0x1FF4, "M", "ώι"), + (0x1FF5, "X"), + (0x1FF6, "V"), + ] + + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, "M", "ῶι"), + (0x1FF8, "M", "ὸ"), + (0x1FF9, "M", "ό"), + (0x1FFA, "M", "ὼ"), + (0x1FFB, "M", "ώ"), + (0x1FFC, "M", "ωι"), + (0x1FFD, "3", " ́"), + (0x1FFE, "3", " ̔"), + (0x1FFF, "X"), + (0x2000, "3", " "), + (0x200B, "I"), + (0x200C, "D", ""), + (0x200E, "X"), + (0x2010, "V"), + (0x2011, "M", "‐"), + (0x2012, "V"), + (0x2017, "3", " ̳"), + (0x2018, "V"), + (0x2024, "X"), + (0x2027, "V"), + (0x2028, "X"), + (0x202F, "3", " "), + (0x2030, "V"), + (0x2033, "M", "′′"), + (0x2034, "M", "′′′"), + (0x2035, "V"), + (0x2036, "M", "‵‵"), + (0x2037, "M", "‵‵‵"), + (0x2038, "V"), + (0x203C, "3", "!!"), + (0x203D, "V"), + (0x203E, "3", " ̅"), + (0x203F, "V"), + (0x2047, "3", "??"), + (0x2048, "3", "?!"), + (0x2049, "3", "!?"), + (0x204A, "V"), + (0x2057, "M", "′′′′"), + (0x2058, "V"), + (0x205F, "3", " "), + (0x2060, "I"), + (0x2061, "X"), + (0x2064, "I"), + (0x2065, "X"), + (0x2070, "M", "0"), + (0x2071, "M", "i"), + (0x2072, "X"), + (0x2074, "M", "4"), + (0x2075, "M", "5"), + (0x2076, "M", "6"), + (0x2077, "M", "7"), + (0x2078, "M", "8"), + (0x2079, "M", "9"), + (0x207A, "3", "+"), + (0x207B, "M", "−"), + (0x207C, "3", "="), + (0x207D, "3", "("), + (0x207E, "3", ")"), + (0x207F, "M", "n"), + (0x2080, "M", "0"), + (0x2081, "M", "1"), + (0x2082, "M", "2"), + (0x2083, "M", "3"), + (0x2084, "M", "4"), + (0x2085, "M", "5"), + (0x2086, "M", "6"), + (0x2087, "M", "7"), + (0x2088, "M", "8"), + (0x2089, "M", "9"), + (0x208A, "3", "+"), + (0x208B, "M", "−"), + (0x208C, "3", "="), + (0x208D, "3", "("), + (0x208E, "3", ")"), + (0x208F, "X"), + (0x2090, "M", "a"), + (0x2091, "M", "e"), + (0x2092, "M", "o"), + (0x2093, "M", "x"), + (0x2094, "M", "ə"), + (0x2095, "M", "h"), + (0x2096, "M", "k"), + (0x2097, "M", "l"), + (0x2098, "M", "m"), + (0x2099, "M", "n"), + (0x209A, "M", "p"), + (0x209B, "M", "s"), + (0x209C, "M", "t"), + (0x209D, "X"), + (0x20A0, "V"), + (0x20A8, "M", "rs"), + (0x20A9, "V"), + (0x20C1, "X"), + (0x20D0, "V"), + (0x20F1, "X"), + (0x2100, "3", "a/c"), + (0x2101, "3", "a/s"), + (0x2102, "M", "c"), + (0x2103, "M", "°c"), + (0x2104, "V"), + ] + + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, "3", "c/o"), + (0x2106, "3", "c/u"), + (0x2107, "M", "ɛ"), + (0x2108, "V"), + (0x2109, "M", "°f"), + (0x210A, "M", "g"), + (0x210B, "M", "h"), + (0x210F, "M", "ħ"), + (0x2110, "M", "i"), + (0x2112, "M", "l"), + (0x2114, "V"), + (0x2115, "M", "n"), + (0x2116, "M", "no"), + (0x2117, "V"), + (0x2119, "M", "p"), + (0x211A, "M", "q"), + (0x211B, "M", "r"), + (0x211E, "V"), + (0x2120, "M", "sm"), + (0x2121, "M", "tel"), + (0x2122, "M", "tm"), + (0x2123, "V"), + (0x2124, "M", "z"), + (0x2125, "V"), + (0x2126, "M", "ω"), + (0x2127, "V"), + (0x2128, "M", "z"), + (0x2129, "V"), + (0x212A, "M", "k"), + (0x212B, "M", "å"), + (0x212C, "M", "b"), + (0x212D, "M", "c"), + (0x212E, "V"), + (0x212F, "M", "e"), + (0x2131, "M", "f"), + (0x2132, "X"), + (0x2133, "M", "m"), + (0x2134, "M", "o"), + (0x2135, "M", "א"), + (0x2136, "M", "ב"), + (0x2137, "M", "ג"), + (0x2138, "M", "ד"), + (0x2139, "M", "i"), + (0x213A, "V"), + (0x213B, "M", "fax"), + (0x213C, "M", "π"), + (0x213D, "M", "γ"), + (0x213F, "M", "π"), + (0x2140, "M", "∑"), + (0x2141, "V"), + (0x2145, "M", "d"), + (0x2147, "M", "e"), + (0x2148, "M", "i"), + (0x2149, "M", "j"), + (0x214A, "V"), + (0x2150, "M", "1⁄7"), + (0x2151, "M", "1⁄9"), + (0x2152, "M", "1⁄10"), + (0x2153, "M", "1⁄3"), + (0x2154, "M", "2⁄3"), + (0x2155, "M", "1⁄5"), + (0x2156, "M", "2⁄5"), + (0x2157, "M", "3⁄5"), + (0x2158, "M", "4⁄5"), + (0x2159, "M", "1⁄6"), + (0x215A, "M", "5⁄6"), + (0x215B, "M", "1⁄8"), + (0x215C, "M", "3⁄8"), + (0x215D, "M", "5⁄8"), + (0x215E, "M", "7⁄8"), + (0x215F, "M", "1⁄"), + (0x2160, "M", "i"), + (0x2161, "M", "ii"), + (0x2162, "M", "iii"), + (0x2163, "M", "iv"), + (0x2164, "M", "v"), + (0x2165, "M", "vi"), + (0x2166, "M", "vii"), + (0x2167, "M", "viii"), + (0x2168, "M", "ix"), + (0x2169, "M", "x"), + (0x216A, "M", "xi"), + (0x216B, "M", "xii"), + (0x216C, "M", "l"), + (0x216D, "M", "c"), + (0x216E, "M", "d"), + (0x216F, "M", "m"), + (0x2170, "M", "i"), + (0x2171, "M", "ii"), + (0x2172, "M", "iii"), + (0x2173, "M", "iv"), + (0x2174, "M", "v"), + (0x2175, "M", "vi"), + (0x2176, "M", "vii"), + (0x2177, "M", "viii"), + (0x2178, "M", "ix"), + (0x2179, "M", "x"), + (0x217A, "M", "xi"), + (0x217B, "M", "xii"), + (0x217C, "M", "l"), + ] + + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, "M", "c"), + (0x217E, "M", "d"), + (0x217F, "M", "m"), + (0x2180, "V"), + (0x2183, "X"), + (0x2184, "V"), + (0x2189, "M", "0⁄3"), + (0x218A, "V"), + (0x218C, "X"), + (0x2190, "V"), + (0x222C, "M", "∫∫"), + (0x222D, "M", "∫∫∫"), + (0x222E, "V"), + (0x222F, "M", "∮∮"), + (0x2230, "M", "∮∮∮"), + (0x2231, "V"), + (0x2329, "M", "〈"), + (0x232A, "M", "〉"), + (0x232B, "V"), + (0x2427, "X"), + (0x2440, "V"), + (0x244B, "X"), + (0x2460, "M", "1"), + (0x2461, "M", "2"), + (0x2462, "M", "3"), + (0x2463, "M", "4"), + (0x2464, "M", "5"), + (0x2465, "M", "6"), + (0x2466, "M", "7"), + (0x2467, "M", "8"), + (0x2468, "M", "9"), + (0x2469, "M", "10"), + (0x246A, "M", "11"), + (0x246B, "M", "12"), + (0x246C, "M", "13"), + (0x246D, "M", "14"), + (0x246E, "M", "15"), + (0x246F, "M", "16"), + (0x2470, "M", "17"), + (0x2471, "M", "18"), + (0x2472, "M", "19"), + (0x2473, "M", "20"), + (0x2474, "3", "(1)"), + (0x2475, "3", "(2)"), + (0x2476, "3", "(3)"), + (0x2477, "3", "(4)"), + (0x2478, "3", "(5)"), + (0x2479, "3", "(6)"), + (0x247A, "3", "(7)"), + (0x247B, "3", "(8)"), + (0x247C, "3", "(9)"), + (0x247D, "3", "(10)"), + (0x247E, "3", "(11)"), + (0x247F, "3", "(12)"), + (0x2480, "3", "(13)"), + (0x2481, "3", "(14)"), + (0x2482, "3", "(15)"), + (0x2483, "3", "(16)"), + (0x2484, "3", "(17)"), + (0x2485, "3", "(18)"), + (0x2486, "3", "(19)"), + (0x2487, "3", "(20)"), + (0x2488, "X"), + (0x249C, "3", "(a)"), + (0x249D, "3", "(b)"), + (0x249E, "3", "(c)"), + (0x249F, "3", "(d)"), + (0x24A0, "3", "(e)"), + (0x24A1, "3", "(f)"), + (0x24A2, "3", "(g)"), + (0x24A3, "3", "(h)"), + (0x24A4, "3", "(i)"), + (0x24A5, "3", "(j)"), + (0x24A6, "3", "(k)"), + (0x24A7, "3", "(l)"), + (0x24A8, "3", "(m)"), + (0x24A9, "3", "(n)"), + (0x24AA, "3", "(o)"), + (0x24AB, "3", "(p)"), + (0x24AC, "3", "(q)"), + (0x24AD, "3", "(r)"), + (0x24AE, "3", "(s)"), + (0x24AF, "3", "(t)"), + (0x24B0, "3", "(u)"), + (0x24B1, "3", "(v)"), + (0x24B2, "3", "(w)"), + (0x24B3, "3", "(x)"), + (0x24B4, "3", "(y)"), + (0x24B5, "3", "(z)"), + (0x24B6, "M", "a"), + (0x24B7, "M", "b"), + (0x24B8, "M", "c"), + (0x24B9, "M", "d"), + (0x24BA, "M", "e"), + (0x24BB, "M", "f"), + (0x24BC, "M", "g"), + (0x24BD, "M", "h"), + (0x24BE, "M", "i"), + (0x24BF, "M", "j"), + (0x24C0, "M", "k"), + ] + + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24C1, "M", "l"), + (0x24C2, "M", "m"), + (0x24C3, "M", "n"), + (0x24C4, "M", "o"), + (0x24C5, "M", "p"), + (0x24C6, "M", "q"), + (0x24C7, "M", "r"), + (0x24C8, "M", "s"), + (0x24C9, "M", "t"), + (0x24CA, "M", "u"), + (0x24CB, "M", "v"), + (0x24CC, "M", "w"), + (0x24CD, "M", "x"), + (0x24CE, "M", "y"), + (0x24CF, "M", "z"), + (0x24D0, "M", "a"), + (0x24D1, "M", "b"), + (0x24D2, "M", "c"), + (0x24D3, "M", "d"), + (0x24D4, "M", "e"), + (0x24D5, "M", "f"), + (0x24D6, "M", "g"), + (0x24D7, "M", "h"), + (0x24D8, "M", "i"), + (0x24D9, "M", "j"), + (0x24DA, "M", "k"), + (0x24DB, "M", "l"), + (0x24DC, "M", "m"), + (0x24DD, "M", "n"), + (0x24DE, "M", "o"), + (0x24DF, "M", "p"), + (0x24E0, "M", "q"), + (0x24E1, "M", "r"), + (0x24E2, "M", "s"), + (0x24E3, "M", "t"), + (0x24E4, "M", "u"), + (0x24E5, "M", "v"), + (0x24E6, "M", "w"), + (0x24E7, "M", "x"), + (0x24E8, "M", "y"), + (0x24E9, "M", "z"), + (0x24EA, "M", "0"), + (0x24EB, "V"), + (0x2A0C, "M", "∫∫∫∫"), + (0x2A0D, "V"), + (0x2A74, "3", "::="), + (0x2A75, "3", "=="), + (0x2A76, "3", "==="), + (0x2A77, "V"), + (0x2ADC, "M", "⫝̸"), + (0x2ADD, "V"), + (0x2B74, "X"), + (0x2B76, "V"), + (0x2B96, "X"), + (0x2B97, "V"), + (0x2C00, "M", "ⰰ"), + (0x2C01, "M", "ⰱ"), + (0x2C02, "M", "ⰲ"), + (0x2C03, "M", "ⰳ"), + (0x2C04, "M", "ⰴ"), + (0x2C05, "M", "ⰵ"), + (0x2C06, "M", "ⰶ"), + (0x2C07, "M", "ⰷ"), + (0x2C08, "M", "ⰸ"), + (0x2C09, "M", "ⰹ"), + (0x2C0A, "M", "ⰺ"), + (0x2C0B, "M", "ⰻ"), + (0x2C0C, "M", "ⰼ"), + (0x2C0D, "M", "ⰽ"), + (0x2C0E, "M", "ⰾ"), + (0x2C0F, "M", "ⰿ"), + (0x2C10, "M", "ⱀ"), + (0x2C11, "M", "ⱁ"), + (0x2C12, "M", "ⱂ"), + (0x2C13, "M", "ⱃ"), + (0x2C14, "M", "ⱄ"), + (0x2C15, "M", "ⱅ"), + (0x2C16, "M", "ⱆ"), + (0x2C17, "M", "ⱇ"), + (0x2C18, "M", "ⱈ"), + (0x2C19, "M", "ⱉ"), + (0x2C1A, "M", "ⱊ"), + (0x2C1B, "M", "ⱋ"), + (0x2C1C, "M", "ⱌ"), + (0x2C1D, "M", "ⱍ"), + (0x2C1E, "M", "ⱎ"), + (0x2C1F, "M", "ⱏ"), + (0x2C20, "M", "ⱐ"), + (0x2C21, "M", "ⱑ"), + (0x2C22, "M", "ⱒ"), + (0x2C23, "M", "ⱓ"), + (0x2C24, "M", "ⱔ"), + (0x2C25, "M", "ⱕ"), + (0x2C26, "M", "ⱖ"), + (0x2C27, "M", "ⱗ"), + (0x2C28, "M", "ⱘ"), + (0x2C29, "M", "ⱙ"), + (0x2C2A, "M", "ⱚ"), + (0x2C2B, "M", "ⱛ"), + (0x2C2C, "M", "ⱜ"), + ] + + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C2D, "M", "ⱝ"), + (0x2C2E, "M", "ⱞ"), + (0x2C2F, "M", "ⱟ"), + (0x2C30, "V"), + (0x2C60, "M", "ⱡ"), + (0x2C61, "V"), + (0x2C62, "M", "ɫ"), + (0x2C63, "M", "ᵽ"), + (0x2C64, "M", "ɽ"), + (0x2C65, "V"), + (0x2C67, "M", "ⱨ"), + (0x2C68, "V"), + (0x2C69, "M", "ⱪ"), + (0x2C6A, "V"), + (0x2C6B, "M", "ⱬ"), + (0x2C6C, "V"), + (0x2C6D, "M", "ɑ"), + (0x2C6E, "M", "ɱ"), + (0x2C6F, "M", "ɐ"), + (0x2C70, "M", "ɒ"), + (0x2C71, "V"), + (0x2C72, "M", "ⱳ"), + (0x2C73, "V"), + (0x2C75, "M", "ⱶ"), + (0x2C76, "V"), + (0x2C7C, "M", "j"), + (0x2C7D, "M", "v"), + (0x2C7E, "M", "ȿ"), + (0x2C7F, "M", "ɀ"), + (0x2C80, "M", "ⲁ"), + (0x2C81, "V"), + (0x2C82, "M", "ⲃ"), + (0x2C83, "V"), + (0x2C84, "M", "ⲅ"), + (0x2C85, "V"), + (0x2C86, "M", "ⲇ"), + (0x2C87, "V"), + (0x2C88, "M", "ⲉ"), + (0x2C89, "V"), + (0x2C8A, "M", "ⲋ"), + (0x2C8B, "V"), + (0x2C8C, "M", "ⲍ"), + (0x2C8D, "V"), + (0x2C8E, "M", "ⲏ"), + (0x2C8F, "V"), + (0x2C90, "M", "ⲑ"), + (0x2C91, "V"), + (0x2C92, "M", "ⲓ"), + (0x2C93, "V"), + (0x2C94, "M", "ⲕ"), + (0x2C95, "V"), + (0x2C96, "M", "ⲗ"), + (0x2C97, "V"), + (0x2C98, "M", "ⲙ"), + (0x2C99, "V"), + (0x2C9A, "M", "ⲛ"), + (0x2C9B, "V"), + (0x2C9C, "M", "ⲝ"), + (0x2C9D, "V"), + (0x2C9E, "M", "ⲟ"), + (0x2C9F, "V"), + (0x2CA0, "M", "ⲡ"), + (0x2CA1, "V"), + (0x2CA2, "M", "ⲣ"), + (0x2CA3, "V"), + (0x2CA4, "M", "ⲥ"), + (0x2CA5, "V"), + (0x2CA6, "M", "ⲧ"), + (0x2CA7, "V"), + (0x2CA8, "M", "ⲩ"), + (0x2CA9, "V"), + (0x2CAA, "M", "ⲫ"), + (0x2CAB, "V"), + (0x2CAC, "M", "ⲭ"), + (0x2CAD, "V"), + (0x2CAE, "M", "ⲯ"), + (0x2CAF, "V"), + (0x2CB0, "M", "ⲱ"), + (0x2CB1, "V"), + (0x2CB2, "M", "ⲳ"), + (0x2CB3, "V"), + (0x2CB4, "M", "ⲵ"), + (0x2CB5, "V"), + (0x2CB6, "M", "ⲷ"), + (0x2CB7, "V"), + (0x2CB8, "M", "ⲹ"), + (0x2CB9, "V"), + (0x2CBA, "M", "ⲻ"), + (0x2CBB, "V"), + (0x2CBC, "M", "ⲽ"), + (0x2CBD, "V"), + (0x2CBE, "M", "ⲿ"), + (0x2CBF, "V"), + (0x2CC0, "M", "ⳁ"), + (0x2CC1, "V"), + (0x2CC2, "M", "ⳃ"), + (0x2CC3, "V"), + (0x2CC4, "M", "ⳅ"), + (0x2CC5, "V"), + (0x2CC6, "M", "ⳇ"), + ] + + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC7, "V"), + (0x2CC8, "M", "ⳉ"), + (0x2CC9, "V"), + (0x2CCA, "M", "ⳋ"), + (0x2CCB, "V"), + (0x2CCC, "M", "ⳍ"), + (0x2CCD, "V"), + (0x2CCE, "M", "ⳏ"), + (0x2CCF, "V"), + (0x2CD0, "M", "ⳑ"), + (0x2CD1, "V"), + (0x2CD2, "M", "ⳓ"), + (0x2CD3, "V"), + (0x2CD4, "M", "ⳕ"), + (0x2CD5, "V"), + (0x2CD6, "M", "ⳗ"), + (0x2CD7, "V"), + (0x2CD8, "M", "ⳙ"), + (0x2CD9, "V"), + (0x2CDA, "M", "ⳛ"), + (0x2CDB, "V"), + (0x2CDC, "M", "ⳝ"), + (0x2CDD, "V"), + (0x2CDE, "M", "ⳟ"), + (0x2CDF, "V"), + (0x2CE0, "M", "ⳡ"), + (0x2CE1, "V"), + (0x2CE2, "M", "ⳣ"), + (0x2CE3, "V"), + (0x2CEB, "M", "ⳬ"), + (0x2CEC, "V"), + (0x2CED, "M", "ⳮ"), + (0x2CEE, "V"), + (0x2CF2, "M", "ⳳ"), + (0x2CF3, "V"), + (0x2CF4, "X"), + (0x2CF9, "V"), + (0x2D26, "X"), + (0x2D27, "V"), + (0x2D28, "X"), + (0x2D2D, "V"), + (0x2D2E, "X"), + (0x2D30, "V"), + (0x2D68, "X"), + (0x2D6F, "M", "ⵡ"), + (0x2D70, "V"), + (0x2D71, "X"), + (0x2D7F, "V"), + (0x2D97, "X"), + (0x2DA0, "V"), + (0x2DA7, "X"), + (0x2DA8, "V"), + (0x2DAF, "X"), + (0x2DB0, "V"), + (0x2DB7, "X"), + (0x2DB8, "V"), + (0x2DBF, "X"), + (0x2DC0, "V"), + (0x2DC7, "X"), + (0x2DC8, "V"), + (0x2DCF, "X"), + (0x2DD0, "V"), + (0x2DD7, "X"), + (0x2DD8, "V"), + (0x2DDF, "X"), + (0x2DE0, "V"), + (0x2E5E, "X"), + (0x2E80, "V"), + (0x2E9A, "X"), + (0x2E9B, "V"), + (0x2E9F, "M", "母"), + (0x2EA0, "V"), + (0x2EF3, "M", "龟"), + (0x2EF4, "X"), + (0x2F00, "M", "一"), + (0x2F01, "M", "丨"), + (0x2F02, "M", "丶"), + (0x2F03, "M", "丿"), + (0x2F04, "M", "乙"), + (0x2F05, "M", "亅"), + (0x2F06, "M", "二"), + (0x2F07, "M", "亠"), + (0x2F08, "M", "人"), + (0x2F09, "M", "儿"), + (0x2F0A, "M", "入"), + (0x2F0B, "M", "八"), + (0x2F0C, "M", "冂"), + (0x2F0D, "M", "冖"), + (0x2F0E, "M", "冫"), + (0x2F0F, "M", "几"), + (0x2F10, "M", "凵"), + (0x2F11, "M", "刀"), + (0x2F12, "M", "力"), + (0x2F13, "M", "勹"), + (0x2F14, "M", "匕"), + (0x2F15, "M", "匚"), + (0x2F16, "M", "匸"), + (0x2F17, "M", "十"), + (0x2F18, "M", "卜"), + (0x2F19, "M", "卩"), + ] + + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F1A, "M", "厂"), + (0x2F1B, "M", "厶"), + (0x2F1C, "M", "又"), + (0x2F1D, "M", "口"), + (0x2F1E, "M", "囗"), + (0x2F1F, "M", "土"), + (0x2F20, "M", "士"), + (0x2F21, "M", "夂"), + (0x2F22, "M", "夊"), + (0x2F23, "M", "夕"), + (0x2F24, "M", "大"), + (0x2F25, "M", "女"), + (0x2F26, "M", "子"), + (0x2F27, "M", "宀"), + (0x2F28, "M", "寸"), + (0x2F29, "M", "小"), + (0x2F2A, "M", "尢"), + (0x2F2B, "M", "尸"), + (0x2F2C, "M", "屮"), + (0x2F2D, "M", "山"), + (0x2F2E, "M", "巛"), + (0x2F2F, "M", "工"), + (0x2F30, "M", "己"), + (0x2F31, "M", "巾"), + (0x2F32, "M", "干"), + (0x2F33, "M", "幺"), + (0x2F34, "M", "广"), + (0x2F35, "M", "廴"), + (0x2F36, "M", "廾"), + (0x2F37, "M", "弋"), + (0x2F38, "M", "弓"), + (0x2F39, "M", "彐"), + (0x2F3A, "M", "彡"), + (0x2F3B, "M", "彳"), + (0x2F3C, "M", "心"), + (0x2F3D, "M", "戈"), + (0x2F3E, "M", "戶"), + (0x2F3F, "M", "手"), + (0x2F40, "M", "支"), + (0x2F41, "M", "攴"), + (0x2F42, "M", "文"), + (0x2F43, "M", "斗"), + (0x2F44, "M", "斤"), + (0x2F45, "M", "方"), + (0x2F46, "M", "无"), + (0x2F47, "M", "日"), + (0x2F48, "M", "曰"), + (0x2F49, "M", "月"), + (0x2F4A, "M", "木"), + (0x2F4B, "M", "欠"), + (0x2F4C, "M", "止"), + (0x2F4D, "M", "歹"), + (0x2F4E, "M", "殳"), + (0x2F4F, "M", "毋"), + (0x2F50, "M", "比"), + (0x2F51, "M", "毛"), + (0x2F52, "M", "氏"), + (0x2F53, "M", "气"), + (0x2F54, "M", "水"), + (0x2F55, "M", "火"), + (0x2F56, "M", "爪"), + (0x2F57, "M", "父"), + (0x2F58, "M", "爻"), + (0x2F59, "M", "爿"), + (0x2F5A, "M", "片"), + (0x2F5B, "M", "牙"), + (0x2F5C, "M", "牛"), + (0x2F5D, "M", "犬"), + (0x2F5E, "M", "玄"), + (0x2F5F, "M", "玉"), + (0x2F60, "M", "瓜"), + (0x2F61, "M", "瓦"), + (0x2F62, "M", "甘"), + (0x2F63, "M", "生"), + (0x2F64, "M", "用"), + (0x2F65, "M", "田"), + (0x2F66, "M", "疋"), + (0x2F67, "M", "疒"), + (0x2F68, "M", "癶"), + (0x2F69, "M", "白"), + (0x2F6A, "M", "皮"), + (0x2F6B, "M", "皿"), + (0x2F6C, "M", "目"), + (0x2F6D, "M", "矛"), + (0x2F6E, "M", "矢"), + (0x2F6F, "M", "石"), + (0x2F70, "M", "示"), + (0x2F71, "M", "禸"), + (0x2F72, "M", "禾"), + (0x2F73, "M", "穴"), + (0x2F74, "M", "立"), + (0x2F75, "M", "竹"), + (0x2F76, "M", "米"), + (0x2F77, "M", "糸"), + (0x2F78, "M", "缶"), + (0x2F79, "M", "网"), + (0x2F7A, "M", "羊"), + (0x2F7B, "M", "羽"), + (0x2F7C, "M", "老"), + (0x2F7D, "M", "而"), + ] + + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7E, "M", "耒"), + (0x2F7F, "M", "耳"), + (0x2F80, "M", "聿"), + (0x2F81, "M", "肉"), + (0x2F82, "M", "臣"), + (0x2F83, "M", "自"), + (0x2F84, "M", "至"), + (0x2F85, "M", "臼"), + (0x2F86, "M", "舌"), + (0x2F87, "M", "舛"), + (0x2F88, "M", "舟"), + (0x2F89, "M", "艮"), + (0x2F8A, "M", "色"), + (0x2F8B, "M", "艸"), + (0x2F8C, "M", "虍"), + (0x2F8D, "M", "虫"), + (0x2F8E, "M", "血"), + (0x2F8F, "M", "行"), + (0x2F90, "M", "衣"), + (0x2F91, "M", "襾"), + (0x2F92, "M", "見"), + (0x2F93, "M", "角"), + (0x2F94, "M", "言"), + (0x2F95, "M", "谷"), + (0x2F96, "M", "豆"), + (0x2F97, "M", "豕"), + (0x2F98, "M", "豸"), + (0x2F99, "M", "貝"), + (0x2F9A, "M", "赤"), + (0x2F9B, "M", "走"), + (0x2F9C, "M", "足"), + (0x2F9D, "M", "身"), + (0x2F9E, "M", "車"), + (0x2F9F, "M", "辛"), + (0x2FA0, "M", "辰"), + (0x2FA1, "M", "辵"), + (0x2FA2, "M", "邑"), + (0x2FA3, "M", "酉"), + (0x2FA4, "M", "釆"), + (0x2FA5, "M", "里"), + (0x2FA6, "M", "金"), + (0x2FA7, "M", "長"), + (0x2FA8, "M", "門"), + (0x2FA9, "M", "阜"), + (0x2FAA, "M", "隶"), + (0x2FAB, "M", "隹"), + (0x2FAC, "M", "雨"), + (0x2FAD, "M", "靑"), + (0x2FAE, "M", "非"), + (0x2FAF, "M", "面"), + (0x2FB0, "M", "革"), + (0x2FB1, "M", "韋"), + (0x2FB2, "M", "韭"), + (0x2FB3, "M", "音"), + (0x2FB4, "M", "頁"), + (0x2FB5, "M", "風"), + (0x2FB6, "M", "飛"), + (0x2FB7, "M", "食"), + (0x2FB8, "M", "首"), + (0x2FB9, "M", "香"), + (0x2FBA, "M", "馬"), + (0x2FBB, "M", "骨"), + (0x2FBC, "M", "高"), + (0x2FBD, "M", "髟"), + (0x2FBE, "M", "鬥"), + (0x2FBF, "M", "鬯"), + (0x2FC0, "M", "鬲"), + (0x2FC1, "M", "鬼"), + (0x2FC2, "M", "魚"), + (0x2FC3, "M", "鳥"), + (0x2FC4, "M", "鹵"), + (0x2FC5, "M", "鹿"), + (0x2FC6, "M", "麥"), + (0x2FC7, "M", "麻"), + (0x2FC8, "M", "黃"), + (0x2FC9, "M", "黍"), + (0x2FCA, "M", "黑"), + (0x2FCB, "M", "黹"), + (0x2FCC, "M", "黽"), + (0x2FCD, "M", "鼎"), + (0x2FCE, "M", "鼓"), + (0x2FCF, "M", "鼠"), + (0x2FD0, "M", "鼻"), + (0x2FD1, "M", "齊"), + (0x2FD2, "M", "齒"), + (0x2FD3, "M", "龍"), + (0x2FD4, "M", "龜"), + (0x2FD5, "M", "龠"), + (0x2FD6, "X"), + (0x3000, "3", " "), + (0x3001, "V"), + (0x3002, "M", "."), + (0x3003, "V"), + (0x3036, "M", "〒"), + (0x3037, "V"), + (0x3038, "M", "十"), + (0x3039, "M", "卄"), + (0x303A, "M", "卅"), + (0x303B, "V"), + (0x3040, "X"), + ] + + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3041, "V"), + (0x3097, "X"), + (0x3099, "V"), + (0x309B, "3", " ゙"), + (0x309C, "3", " ゚"), + (0x309D, "V"), + (0x309F, "M", "より"), + (0x30A0, "V"), + (0x30FF, "M", "コト"), + (0x3100, "X"), + (0x3105, "V"), + (0x3130, "X"), + (0x3131, "M", "ᄀ"), + (0x3132, "M", "ᄁ"), + (0x3133, "M", "ᆪ"), + (0x3134, "M", "ᄂ"), + (0x3135, "M", "ᆬ"), + (0x3136, "M", "ᆭ"), + (0x3137, "M", "ᄃ"), + (0x3138, "M", "ᄄ"), + (0x3139, "M", "ᄅ"), + (0x313A, "M", "ᆰ"), + (0x313B, "M", "ᆱ"), + (0x313C, "M", "ᆲ"), + (0x313D, "M", "ᆳ"), + (0x313E, "M", "ᆴ"), + (0x313F, "M", "ᆵ"), + (0x3140, "M", "ᄚ"), + (0x3141, "M", "ᄆ"), + (0x3142, "M", "ᄇ"), + (0x3143, "M", "ᄈ"), + (0x3144, "M", "ᄡ"), + (0x3145, "M", "ᄉ"), + (0x3146, "M", "ᄊ"), + (0x3147, "M", "ᄋ"), + (0x3148, "M", "ᄌ"), + (0x3149, "M", "ᄍ"), + (0x314A, "M", "ᄎ"), + (0x314B, "M", "ᄏ"), + (0x314C, "M", "ᄐ"), + (0x314D, "M", "ᄑ"), + (0x314E, "M", "ᄒ"), + (0x314F, "M", "ᅡ"), + (0x3150, "M", "ᅢ"), + (0x3151, "M", "ᅣ"), + (0x3152, "M", "ᅤ"), + (0x3153, "M", "ᅥ"), + (0x3154, "M", "ᅦ"), + (0x3155, "M", "ᅧ"), + (0x3156, "M", "ᅨ"), + (0x3157, "M", "ᅩ"), + (0x3158, "M", "ᅪ"), + (0x3159, "M", "ᅫ"), + (0x315A, "M", "ᅬ"), + (0x315B, "M", "ᅭ"), + (0x315C, "M", "ᅮ"), + (0x315D, "M", "ᅯ"), + (0x315E, "M", "ᅰ"), + (0x315F, "M", "ᅱ"), + (0x3160, "M", "ᅲ"), + (0x3161, "M", "ᅳ"), + (0x3162, "M", "ᅴ"), + (0x3163, "M", "ᅵ"), + (0x3164, "X"), + (0x3165, "M", "ᄔ"), + (0x3166, "M", "ᄕ"), + (0x3167, "M", "ᇇ"), + (0x3168, "M", "ᇈ"), + (0x3169, "M", "ᇌ"), + (0x316A, "M", "ᇎ"), + (0x316B, "M", "ᇓ"), + (0x316C, "M", "ᇗ"), + (0x316D, "M", "ᇙ"), + (0x316E, "M", "ᄜ"), + (0x316F, "M", "ᇝ"), + (0x3170, "M", "ᇟ"), + (0x3171, "M", "ᄝ"), + (0x3172, "M", "ᄞ"), + (0x3173, "M", "ᄠ"), + (0x3174, "M", "ᄢ"), + (0x3175, "M", "ᄣ"), + (0x3176, "M", "ᄧ"), + (0x3177, "M", "ᄩ"), + (0x3178, "M", "ᄫ"), + (0x3179, "M", "ᄬ"), + (0x317A, "M", "ᄭ"), + (0x317B, "M", "ᄮ"), + (0x317C, "M", "ᄯ"), + (0x317D, "M", "ᄲ"), + (0x317E, "M", "ᄶ"), + (0x317F, "M", "ᅀ"), + (0x3180, "M", "ᅇ"), + (0x3181, "M", "ᅌ"), + (0x3182, "M", "ᇱ"), + (0x3183, "M", "ᇲ"), + (0x3184, "M", "ᅗ"), + (0x3185, "M", "ᅘ"), + (0x3186, "M", "ᅙ"), + (0x3187, "M", "ᆄ"), + (0x3188, "M", "ᆅ"), + ] + + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3189, "M", "ᆈ"), + (0x318A, "M", "ᆑ"), + (0x318B, "M", "ᆒ"), + (0x318C, "M", "ᆔ"), + (0x318D, "M", "ᆞ"), + (0x318E, "M", "ᆡ"), + (0x318F, "X"), + (0x3190, "V"), + (0x3192, "M", "一"), + (0x3193, "M", "二"), + (0x3194, "M", "三"), + (0x3195, "M", "四"), + (0x3196, "M", "上"), + (0x3197, "M", "中"), + (0x3198, "M", "下"), + (0x3199, "M", "甲"), + (0x319A, "M", "乙"), + (0x319B, "M", "丙"), + (0x319C, "M", "丁"), + (0x319D, "M", "天"), + (0x319E, "M", "地"), + (0x319F, "M", "人"), + (0x31A0, "V"), + (0x31E4, "X"), + (0x31F0, "V"), + (0x3200, "3", "(ᄀ)"), + (0x3201, "3", "(ᄂ)"), + (0x3202, "3", "(ᄃ)"), + (0x3203, "3", "(ᄅ)"), + (0x3204, "3", "(ᄆ)"), + (0x3205, "3", "(ᄇ)"), + (0x3206, "3", "(ᄉ)"), + (0x3207, "3", "(ᄋ)"), + (0x3208, "3", "(ᄌ)"), + (0x3209, "3", "(ᄎ)"), + (0x320A, "3", "(ᄏ)"), + (0x320B, "3", "(ᄐ)"), + (0x320C, "3", "(ᄑ)"), + (0x320D, "3", "(ᄒ)"), + (0x320E, "3", "(가)"), + (0x320F, "3", "(나)"), + (0x3210, "3", "(다)"), + (0x3211, "3", "(라)"), + (0x3212, "3", "(마)"), + (0x3213, "3", "(바)"), + (0x3214, "3", "(사)"), + (0x3215, "3", "(아)"), + (0x3216, "3", "(자)"), + (0x3217, "3", "(차)"), + (0x3218, "3", "(카)"), + (0x3219, "3", "(타)"), + (0x321A, "3", "(파)"), + (0x321B, "3", "(하)"), + (0x321C, "3", "(주)"), + (0x321D, "3", "(오전)"), + (0x321E, "3", "(오후)"), + (0x321F, "X"), + (0x3220, "3", "(一)"), + (0x3221, "3", "(二)"), + (0x3222, "3", "(三)"), + (0x3223, "3", "(四)"), + (0x3224, "3", "(五)"), + (0x3225, "3", "(六)"), + (0x3226, "3", "(七)"), + (0x3227, "3", "(八)"), + (0x3228, "3", "(九)"), + (0x3229, "3", "(十)"), + (0x322A, "3", "(月)"), + (0x322B, "3", "(火)"), + (0x322C, "3", "(水)"), + (0x322D, "3", "(木)"), + (0x322E, "3", "(金)"), + (0x322F, "3", "(土)"), + (0x3230, "3", "(日)"), + (0x3231, "3", "(株)"), + (0x3232, "3", "(有)"), + (0x3233, "3", "(社)"), + (0x3234, "3", "(名)"), + (0x3235, "3", "(特)"), + (0x3236, "3", "(財)"), + (0x3237, "3", "(祝)"), + (0x3238, "3", "(労)"), + (0x3239, "3", "(代)"), + (0x323A, "3", "(呼)"), + (0x323B, "3", "(学)"), + (0x323C, "3", "(監)"), + (0x323D, "3", "(企)"), + (0x323E, "3", "(資)"), + (0x323F, "3", "(協)"), + (0x3240, "3", "(祭)"), + (0x3241, "3", "(休)"), + (0x3242, "3", "(自)"), + (0x3243, "3", "(至)"), + (0x3244, "M", "問"), + (0x3245, "M", "幼"), + (0x3246, "M", "文"), + (0x3247, "M", "箏"), + (0x3248, "V"), + (0x3250, "M", "pte"), + (0x3251, "M", "21"), + ] + + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3252, "M", "22"), + (0x3253, "M", "23"), + (0x3254, "M", "24"), + (0x3255, "M", "25"), + (0x3256, "M", "26"), + (0x3257, "M", "27"), + (0x3258, "M", "28"), + (0x3259, "M", "29"), + (0x325A, "M", "30"), + (0x325B, "M", "31"), + (0x325C, "M", "32"), + (0x325D, "M", "33"), + (0x325E, "M", "34"), + (0x325F, "M", "35"), + (0x3260, "M", "ᄀ"), + (0x3261, "M", "ᄂ"), + (0x3262, "M", "ᄃ"), + (0x3263, "M", "ᄅ"), + (0x3264, "M", "ᄆ"), + (0x3265, "M", "ᄇ"), + (0x3266, "M", "ᄉ"), + (0x3267, "M", "ᄋ"), + (0x3268, "M", "ᄌ"), + (0x3269, "M", "ᄎ"), + (0x326A, "M", "ᄏ"), + (0x326B, "M", "ᄐ"), + (0x326C, "M", "ᄑ"), + (0x326D, "M", "ᄒ"), + (0x326E, "M", "가"), + (0x326F, "M", "나"), + (0x3270, "M", "다"), + (0x3271, "M", "라"), + (0x3272, "M", "마"), + (0x3273, "M", "바"), + (0x3274, "M", "사"), + (0x3275, "M", "아"), + (0x3276, "M", "자"), + (0x3277, "M", "차"), + (0x3278, "M", "카"), + (0x3279, "M", "타"), + (0x327A, "M", "파"), + (0x327B, "M", "하"), + (0x327C, "M", "참고"), + (0x327D, "M", "주의"), + (0x327E, "M", "우"), + (0x327F, "V"), + (0x3280, "M", "一"), + (0x3281, "M", "二"), + (0x3282, "M", "三"), + (0x3283, "M", "四"), + (0x3284, "M", "五"), + (0x3285, "M", "六"), + (0x3286, "M", "七"), + (0x3287, "M", "八"), + (0x3288, "M", "九"), + (0x3289, "M", "十"), + (0x328A, "M", "月"), + (0x328B, "M", "火"), + (0x328C, "M", "水"), + (0x328D, "M", "木"), + (0x328E, "M", "金"), + (0x328F, "M", "土"), + (0x3290, "M", "日"), + (0x3291, "M", "株"), + (0x3292, "M", "有"), + (0x3293, "M", "社"), + (0x3294, "M", "名"), + (0x3295, "M", "特"), + (0x3296, "M", "財"), + (0x3297, "M", "祝"), + (0x3298, "M", "労"), + (0x3299, "M", "秘"), + (0x329A, "M", "男"), + (0x329B, "M", "女"), + (0x329C, "M", "適"), + (0x329D, "M", "優"), + (0x329E, "M", "印"), + (0x329F, "M", "注"), + (0x32A0, "M", "項"), + (0x32A1, "M", "休"), + (0x32A2, "M", "写"), + (0x32A3, "M", "正"), + (0x32A4, "M", "上"), + (0x32A5, "M", "中"), + (0x32A6, "M", "下"), + (0x32A7, "M", "左"), + (0x32A8, "M", "右"), + (0x32A9, "M", "医"), + (0x32AA, "M", "宗"), + (0x32AB, "M", "学"), + (0x32AC, "M", "監"), + (0x32AD, "M", "企"), + (0x32AE, "M", "資"), + (0x32AF, "M", "協"), + (0x32B0, "M", "夜"), + (0x32B1, "M", "36"), + (0x32B2, "M", "37"), + (0x32B3, "M", "38"), + (0x32B4, "M", "39"), + (0x32B5, "M", "40"), + ] + + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B6, "M", "41"), + (0x32B7, "M", "42"), + (0x32B8, "M", "43"), + (0x32B9, "M", "44"), + (0x32BA, "M", "45"), + (0x32BB, "M", "46"), + (0x32BC, "M", "47"), + (0x32BD, "M", "48"), + (0x32BE, "M", "49"), + (0x32BF, "M", "50"), + (0x32C0, "M", "1月"), + (0x32C1, "M", "2月"), + (0x32C2, "M", "3月"), + (0x32C3, "M", "4月"), + (0x32C4, "M", "5月"), + (0x32C5, "M", "6月"), + (0x32C6, "M", "7月"), + (0x32C7, "M", "8月"), + (0x32C8, "M", "9月"), + (0x32C9, "M", "10月"), + (0x32CA, "M", "11月"), + (0x32CB, "M", "12月"), + (0x32CC, "M", "hg"), + (0x32CD, "M", "erg"), + (0x32CE, "M", "ev"), + (0x32CF, "M", "ltd"), + (0x32D0, "M", "ア"), + (0x32D1, "M", "イ"), + (0x32D2, "M", "ウ"), + (0x32D3, "M", "エ"), + (0x32D4, "M", "オ"), + (0x32D5, "M", "カ"), + (0x32D6, "M", "キ"), + (0x32D7, "M", "ク"), + (0x32D8, "M", "ケ"), + (0x32D9, "M", "コ"), + (0x32DA, "M", "サ"), + (0x32DB, "M", "シ"), + (0x32DC, "M", "ス"), + (0x32DD, "M", "セ"), + (0x32DE, "M", "ソ"), + (0x32DF, "M", "タ"), + (0x32E0, "M", "チ"), + (0x32E1, "M", "ツ"), + (0x32E2, "M", "テ"), + (0x32E3, "M", "ト"), + (0x32E4, "M", "ナ"), + (0x32E5, "M", "ニ"), + (0x32E6, "M", "ヌ"), + (0x32E7, "M", "ネ"), + (0x32E8, "M", "ノ"), + (0x32E9, "M", "ハ"), + (0x32EA, "M", "ヒ"), + (0x32EB, "M", "フ"), + (0x32EC, "M", "ヘ"), + (0x32ED, "M", "ホ"), + (0x32EE, "M", "マ"), + (0x32EF, "M", "ミ"), + (0x32F0, "M", "ム"), + (0x32F1, "M", "メ"), + (0x32F2, "M", "モ"), + (0x32F3, "M", "ヤ"), + (0x32F4, "M", "ユ"), + (0x32F5, "M", "ヨ"), + (0x32F6, "M", "ラ"), + (0x32F7, "M", "リ"), + (0x32F8, "M", "ル"), + (0x32F9, "M", "レ"), + (0x32FA, "M", "ロ"), + (0x32FB, "M", "ワ"), + (0x32FC, "M", "ヰ"), + (0x32FD, "M", "ヱ"), + (0x32FE, "M", "ヲ"), + (0x32FF, "M", "令和"), + (0x3300, "M", "アパート"), + (0x3301, "M", "アルファ"), + (0x3302, "M", "アンペア"), + (0x3303, "M", "アール"), + (0x3304, "M", "イニング"), + (0x3305, "M", "インチ"), + (0x3306, "M", "ウォン"), + (0x3307, "M", "エスクード"), + (0x3308, "M", "エーカー"), + (0x3309, "M", "オンス"), + (0x330A, "M", "オーム"), + (0x330B, "M", "カイリ"), + (0x330C, "M", "カラット"), + (0x330D, "M", "カロリー"), + (0x330E, "M", "ガロン"), + (0x330F, "M", "ガンマ"), + (0x3310, "M", "ギガ"), + (0x3311, "M", "ギニー"), + (0x3312, "M", "キュリー"), + (0x3313, "M", "ギルダー"), + (0x3314, "M", "キロ"), + (0x3315, "M", "キログラム"), + (0x3316, "M", "キロメートル"), + (0x3317, "M", "キロワット"), + (0x3318, "M", "グラム"), + (0x3319, "M", "グラムトン"), + ] + + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x331A, "M", "クルゼイロ"), + (0x331B, "M", "クローネ"), + (0x331C, "M", "ケース"), + (0x331D, "M", "コルナ"), + (0x331E, "M", "コーポ"), + (0x331F, "M", "サイクル"), + (0x3320, "M", "サンチーム"), + (0x3321, "M", "シリング"), + (0x3322, "M", "センチ"), + (0x3323, "M", "セント"), + (0x3324, "M", "ダース"), + (0x3325, "M", "デシ"), + (0x3326, "M", "ドル"), + (0x3327, "M", "トン"), + (0x3328, "M", "ナノ"), + (0x3329, "M", "ノット"), + (0x332A, "M", "ハイツ"), + (0x332B, "M", "パーセント"), + (0x332C, "M", "パーツ"), + (0x332D, "M", "バーレル"), + (0x332E, "M", "ピアストル"), + (0x332F, "M", "ピクル"), + (0x3330, "M", "ピコ"), + (0x3331, "M", "ビル"), + (0x3332, "M", "ファラッド"), + (0x3333, "M", "フィート"), + (0x3334, "M", "ブッシェル"), + (0x3335, "M", "フラン"), + (0x3336, "M", "ヘクタール"), + (0x3337, "M", "ペソ"), + (0x3338, "M", "ペニヒ"), + (0x3339, "M", "ヘルツ"), + (0x333A, "M", "ペンス"), + (0x333B, "M", "ページ"), + (0x333C, "M", "ベータ"), + (0x333D, "M", "ポイント"), + (0x333E, "M", "ボルト"), + (0x333F, "M", "ホン"), + (0x3340, "M", "ポンド"), + (0x3341, "M", "ホール"), + (0x3342, "M", "ホーン"), + (0x3343, "M", "マイクロ"), + (0x3344, "M", "マイル"), + (0x3345, "M", "マッハ"), + (0x3346, "M", "マルク"), + (0x3347, "M", "マンション"), + (0x3348, "M", "ミクロン"), + (0x3349, "M", "ミリ"), + (0x334A, "M", "ミリバール"), + (0x334B, "M", "メガ"), + (0x334C, "M", "メガトン"), + (0x334D, "M", "メートル"), + (0x334E, "M", "ヤード"), + (0x334F, "M", "ヤール"), + (0x3350, "M", "ユアン"), + (0x3351, "M", "リットル"), + (0x3352, "M", "リラ"), + (0x3353, "M", "ルピー"), + (0x3354, "M", "ルーブル"), + (0x3355, "M", "レム"), + (0x3356, "M", "レントゲン"), + (0x3357, "M", "ワット"), + (0x3358, "M", "0点"), + (0x3359, "M", "1点"), + (0x335A, "M", "2点"), + (0x335B, "M", "3点"), + (0x335C, "M", "4点"), + (0x335D, "M", "5点"), + (0x335E, "M", "6点"), + (0x335F, "M", "7点"), + (0x3360, "M", "8点"), + (0x3361, "M", "9点"), + (0x3362, "M", "10点"), + (0x3363, "M", "11点"), + (0x3364, "M", "12点"), + (0x3365, "M", "13点"), + (0x3366, "M", "14点"), + (0x3367, "M", "15点"), + (0x3368, "M", "16点"), + (0x3369, "M", "17点"), + (0x336A, "M", "18点"), + (0x336B, "M", "19点"), + (0x336C, "M", "20点"), + (0x336D, "M", "21点"), + (0x336E, "M", "22点"), + (0x336F, "M", "23点"), + (0x3370, "M", "24点"), + (0x3371, "M", "hpa"), + (0x3372, "M", "da"), + (0x3373, "M", "au"), + (0x3374, "M", "bar"), + (0x3375, "M", "ov"), + (0x3376, "M", "pc"), + (0x3377, "M", "dm"), + (0x3378, "M", "dm2"), + (0x3379, "M", "dm3"), + (0x337A, "M", "iu"), + (0x337B, "M", "平成"), + (0x337C, "M", "昭和"), + (0x337D, "M", "大正"), + ] + + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337E, "M", "明治"), + (0x337F, "M", "株式会社"), + (0x3380, "M", "pa"), + (0x3381, "M", "na"), + (0x3382, "M", "μa"), + (0x3383, "M", "ma"), + (0x3384, "M", "ka"), + (0x3385, "M", "kb"), + (0x3386, "M", "mb"), + (0x3387, "M", "gb"), + (0x3388, "M", "cal"), + (0x3389, "M", "kcal"), + (0x338A, "M", "pf"), + (0x338B, "M", "nf"), + (0x338C, "M", "μf"), + (0x338D, "M", "μg"), + (0x338E, "M", "mg"), + (0x338F, "M", "kg"), + (0x3390, "M", "hz"), + (0x3391, "M", "khz"), + (0x3392, "M", "mhz"), + (0x3393, "M", "ghz"), + (0x3394, "M", "thz"), + (0x3395, "M", "μl"), + (0x3396, "M", "ml"), + (0x3397, "M", "dl"), + (0x3398, "M", "kl"), + (0x3399, "M", "fm"), + (0x339A, "M", "nm"), + (0x339B, "M", "μm"), + (0x339C, "M", "mm"), + (0x339D, "M", "cm"), + (0x339E, "M", "km"), + (0x339F, "M", "mm2"), + (0x33A0, "M", "cm2"), + (0x33A1, "M", "m2"), + (0x33A2, "M", "km2"), + (0x33A3, "M", "mm3"), + (0x33A4, "M", "cm3"), + (0x33A5, "M", "m3"), + (0x33A6, "M", "km3"), + (0x33A7, "M", "m∕s"), + (0x33A8, "M", "m∕s2"), + (0x33A9, "M", "pa"), + (0x33AA, "M", "kpa"), + (0x33AB, "M", "mpa"), + (0x33AC, "M", "gpa"), + (0x33AD, "M", "rad"), + (0x33AE, "M", "rad∕s"), + (0x33AF, "M", "rad∕s2"), + (0x33B0, "M", "ps"), + (0x33B1, "M", "ns"), + (0x33B2, "M", "μs"), + (0x33B3, "M", "ms"), + (0x33B4, "M", "pv"), + (0x33B5, "M", "nv"), + (0x33B6, "M", "μv"), + (0x33B7, "M", "mv"), + (0x33B8, "M", "kv"), + (0x33B9, "M", "mv"), + (0x33BA, "M", "pw"), + (0x33BB, "M", "nw"), + (0x33BC, "M", "μw"), + (0x33BD, "M", "mw"), + (0x33BE, "M", "kw"), + (0x33BF, "M", "mw"), + (0x33C0, "M", "kω"), + (0x33C1, "M", "mω"), + (0x33C2, "X"), + (0x33C3, "M", "bq"), + (0x33C4, "M", "cc"), + (0x33C5, "M", "cd"), + (0x33C6, "M", "c∕kg"), + (0x33C7, "X"), + (0x33C8, "M", "db"), + (0x33C9, "M", "gy"), + (0x33CA, "M", "ha"), + (0x33CB, "M", "hp"), + (0x33CC, "M", "in"), + (0x33CD, "M", "kk"), + (0x33CE, "M", "km"), + (0x33CF, "M", "kt"), + (0x33D0, "M", "lm"), + (0x33D1, "M", "ln"), + (0x33D2, "M", "log"), + (0x33D3, "M", "lx"), + (0x33D4, "M", "mb"), + (0x33D5, "M", "mil"), + (0x33D6, "M", "mol"), + (0x33D7, "M", "ph"), + (0x33D8, "X"), + (0x33D9, "M", "ppm"), + (0x33DA, "M", "pr"), + (0x33DB, "M", "sr"), + (0x33DC, "M", "sv"), + (0x33DD, "M", "wb"), + (0x33DE, "M", "v∕m"), + (0x33DF, "M", "a∕m"), + (0x33E0, "M", "1日"), + (0x33E1, "M", "2日"), + ] + + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33E2, "M", "3日"), + (0x33E3, "M", "4日"), + (0x33E4, "M", "5日"), + (0x33E5, "M", "6日"), + (0x33E6, "M", "7日"), + (0x33E7, "M", "8日"), + (0x33E8, "M", "9日"), + (0x33E9, "M", "10日"), + (0x33EA, "M", "11日"), + (0x33EB, "M", "12日"), + (0x33EC, "M", "13日"), + (0x33ED, "M", "14日"), + (0x33EE, "M", "15日"), + (0x33EF, "M", "16日"), + (0x33F0, "M", "17日"), + (0x33F1, "M", "18日"), + (0x33F2, "M", "19日"), + (0x33F3, "M", "20日"), + (0x33F4, "M", "21日"), + (0x33F5, "M", "22日"), + (0x33F6, "M", "23日"), + (0x33F7, "M", "24日"), + (0x33F8, "M", "25日"), + (0x33F9, "M", "26日"), + (0x33FA, "M", "27日"), + (0x33FB, "M", "28日"), + (0x33FC, "M", "29日"), + (0x33FD, "M", "30日"), + (0x33FE, "M", "31日"), + (0x33FF, "M", "gal"), + (0x3400, "V"), + (0xA48D, "X"), + (0xA490, "V"), + (0xA4C7, "X"), + (0xA4D0, "V"), + (0xA62C, "X"), + (0xA640, "M", "ꙁ"), + (0xA641, "V"), + (0xA642, "M", "ꙃ"), + (0xA643, "V"), + (0xA644, "M", "ꙅ"), + (0xA645, "V"), + (0xA646, "M", "ꙇ"), + (0xA647, "V"), + (0xA648, "M", "ꙉ"), + (0xA649, "V"), + (0xA64A, "M", "ꙋ"), + (0xA64B, "V"), + (0xA64C, "M", "ꙍ"), + (0xA64D, "V"), + (0xA64E, "M", "ꙏ"), + (0xA64F, "V"), + (0xA650, "M", "ꙑ"), + (0xA651, "V"), + (0xA652, "M", "ꙓ"), + (0xA653, "V"), + (0xA654, "M", "ꙕ"), + (0xA655, "V"), + (0xA656, "M", "ꙗ"), + (0xA657, "V"), + (0xA658, "M", "ꙙ"), + (0xA659, "V"), + (0xA65A, "M", "ꙛ"), + (0xA65B, "V"), + (0xA65C, "M", "ꙝ"), + (0xA65D, "V"), + (0xA65E, "M", "ꙟ"), + (0xA65F, "V"), + (0xA660, "M", "ꙡ"), + (0xA661, "V"), + (0xA662, "M", "ꙣ"), + (0xA663, "V"), + (0xA664, "M", "ꙥ"), + (0xA665, "V"), + (0xA666, "M", "ꙧ"), + (0xA667, "V"), + (0xA668, "M", "ꙩ"), + (0xA669, "V"), + (0xA66A, "M", "ꙫ"), + (0xA66B, "V"), + (0xA66C, "M", "ꙭ"), + (0xA66D, "V"), + (0xA680, "M", "ꚁ"), + (0xA681, "V"), + (0xA682, "M", "ꚃ"), + (0xA683, "V"), + (0xA684, "M", "ꚅ"), + (0xA685, "V"), + (0xA686, "M", "ꚇ"), + (0xA687, "V"), + (0xA688, "M", "ꚉ"), + (0xA689, "V"), + (0xA68A, "M", "ꚋ"), + (0xA68B, "V"), + (0xA68C, "M", "ꚍ"), + (0xA68D, "V"), + (0xA68E, "M", "ꚏ"), + (0xA68F, "V"), + (0xA690, "M", "ꚑ"), + (0xA691, "V"), + ] + + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA692, "M", "ꚓ"), + (0xA693, "V"), + (0xA694, "M", "ꚕ"), + (0xA695, "V"), + (0xA696, "M", "ꚗ"), + (0xA697, "V"), + (0xA698, "M", "ꚙ"), + (0xA699, "V"), + (0xA69A, "M", "ꚛ"), + (0xA69B, "V"), + (0xA69C, "M", "ъ"), + (0xA69D, "M", "ь"), + (0xA69E, "V"), + (0xA6F8, "X"), + (0xA700, "V"), + (0xA722, "M", "ꜣ"), + (0xA723, "V"), + (0xA724, "M", "ꜥ"), + (0xA725, "V"), + (0xA726, "M", "ꜧ"), + (0xA727, "V"), + (0xA728, "M", "ꜩ"), + (0xA729, "V"), + (0xA72A, "M", "ꜫ"), + (0xA72B, "V"), + (0xA72C, "M", "ꜭ"), + (0xA72D, "V"), + (0xA72E, "M", "ꜯ"), + (0xA72F, "V"), + (0xA732, "M", "ꜳ"), + (0xA733, "V"), + (0xA734, "M", "ꜵ"), + (0xA735, "V"), + (0xA736, "M", "ꜷ"), + (0xA737, "V"), + (0xA738, "M", "ꜹ"), + (0xA739, "V"), + (0xA73A, "M", "ꜻ"), + (0xA73B, "V"), + (0xA73C, "M", "ꜽ"), + (0xA73D, "V"), + (0xA73E, "M", "ꜿ"), + (0xA73F, "V"), + (0xA740, "M", "ꝁ"), + (0xA741, "V"), + (0xA742, "M", "ꝃ"), + (0xA743, "V"), + (0xA744, "M", "ꝅ"), + (0xA745, "V"), + (0xA746, "M", "ꝇ"), + (0xA747, "V"), + (0xA748, "M", "ꝉ"), + (0xA749, "V"), + (0xA74A, "M", "ꝋ"), + (0xA74B, "V"), + (0xA74C, "M", "ꝍ"), + (0xA74D, "V"), + (0xA74E, "M", "ꝏ"), + (0xA74F, "V"), + (0xA750, "M", "ꝑ"), + (0xA751, "V"), + (0xA752, "M", "ꝓ"), + (0xA753, "V"), + (0xA754, "M", "ꝕ"), + (0xA755, "V"), + (0xA756, "M", "ꝗ"), + (0xA757, "V"), + (0xA758, "M", "ꝙ"), + (0xA759, "V"), + (0xA75A, "M", "ꝛ"), + (0xA75B, "V"), + (0xA75C, "M", "ꝝ"), + (0xA75D, "V"), + (0xA75E, "M", "ꝟ"), + (0xA75F, "V"), + (0xA760, "M", "ꝡ"), + (0xA761, "V"), + (0xA762, "M", "ꝣ"), + (0xA763, "V"), + (0xA764, "M", "ꝥ"), + (0xA765, "V"), + (0xA766, "M", "ꝧ"), + (0xA767, "V"), + (0xA768, "M", "ꝩ"), + (0xA769, "V"), + (0xA76A, "M", "ꝫ"), + (0xA76B, "V"), + (0xA76C, "M", "ꝭ"), + (0xA76D, "V"), + (0xA76E, "M", "ꝯ"), + (0xA76F, "V"), + (0xA770, "M", "ꝯ"), + (0xA771, "V"), + (0xA779, "M", "ꝺ"), + (0xA77A, "V"), + (0xA77B, "M", "ꝼ"), + (0xA77C, "V"), + (0xA77D, "M", "ᵹ"), + (0xA77E, "M", "ꝿ"), + (0xA77F, "V"), + ] + + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA780, "M", "ꞁ"), + (0xA781, "V"), + (0xA782, "M", "ꞃ"), + (0xA783, "V"), + (0xA784, "M", "ꞅ"), + (0xA785, "V"), + (0xA786, "M", "ꞇ"), + (0xA787, "V"), + (0xA78B, "M", "ꞌ"), + (0xA78C, "V"), + (0xA78D, "M", "ɥ"), + (0xA78E, "V"), + (0xA790, "M", "ꞑ"), + (0xA791, "V"), + (0xA792, "M", "ꞓ"), + (0xA793, "V"), + (0xA796, "M", "ꞗ"), + (0xA797, "V"), + (0xA798, "M", "ꞙ"), + (0xA799, "V"), + (0xA79A, "M", "ꞛ"), + (0xA79B, "V"), + (0xA79C, "M", "ꞝ"), + (0xA79D, "V"), + (0xA79E, "M", "ꞟ"), + (0xA79F, "V"), + (0xA7A0, "M", "ꞡ"), + (0xA7A1, "V"), + (0xA7A2, "M", "ꞣ"), + (0xA7A3, "V"), + (0xA7A4, "M", "ꞥ"), + (0xA7A5, "V"), + (0xA7A6, "M", "ꞧ"), + (0xA7A7, "V"), + (0xA7A8, "M", "ꞩ"), + (0xA7A9, "V"), + (0xA7AA, "M", "ɦ"), + (0xA7AB, "M", "ɜ"), + (0xA7AC, "M", "ɡ"), + (0xA7AD, "M", "ɬ"), + (0xA7AE, "M", "ɪ"), + (0xA7AF, "V"), + (0xA7B0, "M", "ʞ"), + (0xA7B1, "M", "ʇ"), + (0xA7B2, "M", "ʝ"), + (0xA7B3, "M", "ꭓ"), + (0xA7B4, "M", "ꞵ"), + (0xA7B5, "V"), + (0xA7B6, "M", "ꞷ"), + (0xA7B7, "V"), + (0xA7B8, "M", "ꞹ"), + (0xA7B9, "V"), + (0xA7BA, "M", "ꞻ"), + (0xA7BB, "V"), + (0xA7BC, "M", "ꞽ"), + (0xA7BD, "V"), + (0xA7BE, "M", "ꞿ"), + (0xA7BF, "V"), + (0xA7C0, "M", "ꟁ"), + (0xA7C1, "V"), + (0xA7C2, "M", "ꟃ"), + (0xA7C3, "V"), + (0xA7C4, "M", "ꞔ"), + (0xA7C5, "M", "ʂ"), + (0xA7C6, "M", "ᶎ"), + (0xA7C7, "M", "ꟈ"), + (0xA7C8, "V"), + (0xA7C9, "M", "ꟊ"), + (0xA7CA, "V"), + (0xA7CB, "X"), + (0xA7D0, "M", "ꟑ"), + (0xA7D1, "V"), + (0xA7D2, "X"), + (0xA7D3, "V"), + (0xA7D4, "X"), + (0xA7D5, "V"), + (0xA7D6, "M", "ꟗ"), + (0xA7D7, "V"), + (0xA7D8, "M", "ꟙ"), + (0xA7D9, "V"), + (0xA7DA, "X"), + (0xA7F2, "M", "c"), + (0xA7F3, "M", "f"), + (0xA7F4, "M", "q"), + (0xA7F5, "M", "ꟶ"), + (0xA7F6, "V"), + (0xA7F8, "M", "ħ"), + (0xA7F9, "M", "œ"), + (0xA7FA, "V"), + (0xA82D, "X"), + (0xA830, "V"), + (0xA83A, "X"), + (0xA840, "V"), + (0xA878, "X"), + (0xA880, "V"), + (0xA8C6, "X"), + (0xA8CE, "V"), + (0xA8DA, "X"), + (0xA8E0, "V"), + (0xA954, "X"), + ] + + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA95F, "V"), + (0xA97D, "X"), + (0xA980, "V"), + (0xA9CE, "X"), + (0xA9CF, "V"), + (0xA9DA, "X"), + (0xA9DE, "V"), + (0xA9FF, "X"), + (0xAA00, "V"), + (0xAA37, "X"), + (0xAA40, "V"), + (0xAA4E, "X"), + (0xAA50, "V"), + (0xAA5A, "X"), + (0xAA5C, "V"), + (0xAAC3, "X"), + (0xAADB, "V"), + (0xAAF7, "X"), + (0xAB01, "V"), + (0xAB07, "X"), + (0xAB09, "V"), + (0xAB0F, "X"), + (0xAB11, "V"), + (0xAB17, "X"), + (0xAB20, "V"), + (0xAB27, "X"), + (0xAB28, "V"), + (0xAB2F, "X"), + (0xAB30, "V"), + (0xAB5C, "M", "ꜧ"), + (0xAB5D, "M", "ꬷ"), + (0xAB5E, "M", "ɫ"), + (0xAB5F, "M", "ꭒ"), + (0xAB60, "V"), + (0xAB69, "M", "ʍ"), + (0xAB6A, "V"), + (0xAB6C, "X"), + (0xAB70, "M", "Ꭰ"), + (0xAB71, "M", "Ꭱ"), + (0xAB72, "M", "Ꭲ"), + (0xAB73, "M", "Ꭳ"), + (0xAB74, "M", "Ꭴ"), + (0xAB75, "M", "Ꭵ"), + (0xAB76, "M", "Ꭶ"), + (0xAB77, "M", "Ꭷ"), + (0xAB78, "M", "Ꭸ"), + (0xAB79, "M", "Ꭹ"), + (0xAB7A, "M", "Ꭺ"), + (0xAB7B, "M", "Ꭻ"), + (0xAB7C, "M", "Ꭼ"), + (0xAB7D, "M", "Ꭽ"), + (0xAB7E, "M", "Ꭾ"), + (0xAB7F, "M", "Ꭿ"), + (0xAB80, "M", "Ꮀ"), + (0xAB81, "M", "Ꮁ"), + (0xAB82, "M", "Ꮂ"), + (0xAB83, "M", "Ꮃ"), + (0xAB84, "M", "Ꮄ"), + (0xAB85, "M", "Ꮅ"), + (0xAB86, "M", "Ꮆ"), + (0xAB87, "M", "Ꮇ"), + (0xAB88, "M", "Ꮈ"), + (0xAB89, "M", "Ꮉ"), + (0xAB8A, "M", "Ꮊ"), + (0xAB8B, "M", "Ꮋ"), + (0xAB8C, "M", "Ꮌ"), + (0xAB8D, "M", "Ꮍ"), + (0xAB8E, "M", "Ꮎ"), + (0xAB8F, "M", "Ꮏ"), + (0xAB90, "M", "Ꮐ"), + (0xAB91, "M", "Ꮑ"), + (0xAB92, "M", "Ꮒ"), + (0xAB93, "M", "Ꮓ"), + (0xAB94, "M", "Ꮔ"), + (0xAB95, "M", "Ꮕ"), + (0xAB96, "M", "Ꮖ"), + (0xAB97, "M", "Ꮗ"), + (0xAB98, "M", "Ꮘ"), + (0xAB99, "M", "Ꮙ"), + (0xAB9A, "M", "Ꮚ"), + (0xAB9B, "M", "Ꮛ"), + (0xAB9C, "M", "Ꮜ"), + (0xAB9D, "M", "Ꮝ"), + (0xAB9E, "M", "Ꮞ"), + (0xAB9F, "M", "Ꮟ"), + (0xABA0, "M", "Ꮠ"), + (0xABA1, "M", "Ꮡ"), + (0xABA2, "M", "Ꮢ"), + (0xABA3, "M", "Ꮣ"), + (0xABA4, "M", "Ꮤ"), + (0xABA5, "M", "Ꮥ"), + (0xABA6, "M", "Ꮦ"), + (0xABA7, "M", "Ꮧ"), + (0xABA8, "M", "Ꮨ"), + (0xABA9, "M", "Ꮩ"), + (0xABAA, "M", "Ꮪ"), + (0xABAB, "M", "Ꮫ"), + (0xABAC, "M", "Ꮬ"), + (0xABAD, "M", "Ꮭ"), + (0xABAE, "M", "Ꮮ"), + ] + + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAF, "M", "Ꮯ"), + (0xABB0, "M", "Ꮰ"), + (0xABB1, "M", "Ꮱ"), + (0xABB2, "M", "Ꮲ"), + (0xABB3, "M", "Ꮳ"), + (0xABB4, "M", "Ꮴ"), + (0xABB5, "M", "Ꮵ"), + (0xABB6, "M", "Ꮶ"), + (0xABB7, "M", "Ꮷ"), + (0xABB8, "M", "Ꮸ"), + (0xABB9, "M", "Ꮹ"), + (0xABBA, "M", "Ꮺ"), + (0xABBB, "M", "Ꮻ"), + (0xABBC, "M", "Ꮼ"), + (0xABBD, "M", "Ꮽ"), + (0xABBE, "M", "Ꮾ"), + (0xABBF, "M", "Ꮿ"), + (0xABC0, "V"), + (0xABEE, "X"), + (0xABF0, "V"), + (0xABFA, "X"), + (0xAC00, "V"), + (0xD7A4, "X"), + (0xD7B0, "V"), + (0xD7C7, "X"), + (0xD7CB, "V"), + (0xD7FC, "X"), + (0xF900, "M", "豈"), + (0xF901, "M", "更"), + (0xF902, "M", "車"), + (0xF903, "M", "賈"), + (0xF904, "M", "滑"), + (0xF905, "M", "串"), + (0xF906, "M", "句"), + (0xF907, "M", "龜"), + (0xF909, "M", "契"), + (0xF90A, "M", "金"), + (0xF90B, "M", "喇"), + (0xF90C, "M", "奈"), + (0xF90D, "M", "懶"), + (0xF90E, "M", "癩"), + (0xF90F, "M", "羅"), + (0xF910, "M", "蘿"), + (0xF911, "M", "螺"), + (0xF912, "M", "裸"), + (0xF913, "M", "邏"), + (0xF914, "M", "樂"), + (0xF915, "M", "洛"), + (0xF916, "M", "烙"), + (0xF917, "M", "珞"), + (0xF918, "M", "落"), + (0xF919, "M", "酪"), + (0xF91A, "M", "駱"), + (0xF91B, "M", "亂"), + (0xF91C, "M", "卵"), + (0xF91D, "M", "欄"), + (0xF91E, "M", "爛"), + (0xF91F, "M", "蘭"), + (0xF920, "M", "鸞"), + (0xF921, "M", "嵐"), + (0xF922, "M", "濫"), + (0xF923, "M", "藍"), + (0xF924, "M", "襤"), + (0xF925, "M", "拉"), + (0xF926, "M", "臘"), + (0xF927, "M", "蠟"), + (0xF928, "M", "廊"), + (0xF929, "M", "朗"), + (0xF92A, "M", "浪"), + (0xF92B, "M", "狼"), + (0xF92C, "M", "郎"), + (0xF92D, "M", "來"), + (0xF92E, "M", "冷"), + (0xF92F, "M", "勞"), + (0xF930, "M", "擄"), + (0xF931, "M", "櫓"), + (0xF932, "M", "爐"), + (0xF933, "M", "盧"), + (0xF934, "M", "老"), + (0xF935, "M", "蘆"), + (0xF936, "M", "虜"), + (0xF937, "M", "路"), + (0xF938, "M", "露"), + (0xF939, "M", "魯"), + (0xF93A, "M", "鷺"), + (0xF93B, "M", "碌"), + (0xF93C, "M", "祿"), + (0xF93D, "M", "綠"), + (0xF93E, "M", "菉"), + (0xF93F, "M", "錄"), + (0xF940, "M", "鹿"), + (0xF941, "M", "論"), + (0xF942, "M", "壟"), + (0xF943, "M", "弄"), + (0xF944, "M", "籠"), + (0xF945, "M", "聾"), + (0xF946, "M", "牢"), + (0xF947, "M", "磊"), + (0xF948, "M", "賂"), + (0xF949, "M", "雷"), + ] + + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF94A, "M", "壘"), + (0xF94B, "M", "屢"), + (0xF94C, "M", "樓"), + (0xF94D, "M", "淚"), + (0xF94E, "M", "漏"), + (0xF94F, "M", "累"), + (0xF950, "M", "縷"), + (0xF951, "M", "陋"), + (0xF952, "M", "勒"), + (0xF953, "M", "肋"), + (0xF954, "M", "凜"), + (0xF955, "M", "凌"), + (0xF956, "M", "稜"), + (0xF957, "M", "綾"), + (0xF958, "M", "菱"), + (0xF959, "M", "陵"), + (0xF95A, "M", "讀"), + (0xF95B, "M", "拏"), + (0xF95C, "M", "樂"), + (0xF95D, "M", "諾"), + (0xF95E, "M", "丹"), + (0xF95F, "M", "寧"), + (0xF960, "M", "怒"), + (0xF961, "M", "率"), + (0xF962, "M", "異"), + (0xF963, "M", "北"), + (0xF964, "M", "磻"), + (0xF965, "M", "便"), + (0xF966, "M", "復"), + (0xF967, "M", "不"), + (0xF968, "M", "泌"), + (0xF969, "M", "數"), + (0xF96A, "M", "索"), + (0xF96B, "M", "參"), + (0xF96C, "M", "塞"), + (0xF96D, "M", "省"), + (0xF96E, "M", "葉"), + (0xF96F, "M", "說"), + (0xF970, "M", "殺"), + (0xF971, "M", "辰"), + (0xF972, "M", "沈"), + (0xF973, "M", "拾"), + (0xF974, "M", "若"), + (0xF975, "M", "掠"), + (0xF976, "M", "略"), + (0xF977, "M", "亮"), + (0xF978, "M", "兩"), + (0xF979, "M", "凉"), + (0xF97A, "M", "梁"), + (0xF97B, "M", "糧"), + (0xF97C, "M", "良"), + (0xF97D, "M", "諒"), + (0xF97E, "M", "量"), + (0xF97F, "M", "勵"), + (0xF980, "M", "呂"), + (0xF981, "M", "女"), + (0xF982, "M", "廬"), + (0xF983, "M", "旅"), + (0xF984, "M", "濾"), + (0xF985, "M", "礪"), + (0xF986, "M", "閭"), + (0xF987, "M", "驪"), + (0xF988, "M", "麗"), + (0xF989, "M", "黎"), + (0xF98A, "M", "力"), + (0xF98B, "M", "曆"), + (0xF98C, "M", "歷"), + (0xF98D, "M", "轢"), + (0xF98E, "M", "年"), + (0xF98F, "M", "憐"), + (0xF990, "M", "戀"), + (0xF991, "M", "撚"), + (0xF992, "M", "漣"), + (0xF993, "M", "煉"), + (0xF994, "M", "璉"), + (0xF995, "M", "秊"), + (0xF996, "M", "練"), + (0xF997, "M", "聯"), + (0xF998, "M", "輦"), + (0xF999, "M", "蓮"), + (0xF99A, "M", "連"), + (0xF99B, "M", "鍊"), + (0xF99C, "M", "列"), + (0xF99D, "M", "劣"), + (0xF99E, "M", "咽"), + (0xF99F, "M", "烈"), + (0xF9A0, "M", "裂"), + (0xF9A1, "M", "說"), + (0xF9A2, "M", "廉"), + (0xF9A3, "M", "念"), + (0xF9A4, "M", "捻"), + (0xF9A5, "M", "殮"), + (0xF9A6, "M", "簾"), + (0xF9A7, "M", "獵"), + (0xF9A8, "M", "令"), + (0xF9A9, "M", "囹"), + (0xF9AA, "M", "寧"), + (0xF9AB, "M", "嶺"), + (0xF9AC, "M", "怜"), + (0xF9AD, "M", "玲"), + ] + + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AE, "M", "瑩"), + (0xF9AF, "M", "羚"), + (0xF9B0, "M", "聆"), + (0xF9B1, "M", "鈴"), + (0xF9B2, "M", "零"), + (0xF9B3, "M", "靈"), + (0xF9B4, "M", "領"), + (0xF9B5, "M", "例"), + (0xF9B6, "M", "禮"), + (0xF9B7, "M", "醴"), + (0xF9B8, "M", "隸"), + (0xF9B9, "M", "惡"), + (0xF9BA, "M", "了"), + (0xF9BB, "M", "僚"), + (0xF9BC, "M", "寮"), + (0xF9BD, "M", "尿"), + (0xF9BE, "M", "料"), + (0xF9BF, "M", "樂"), + (0xF9C0, "M", "燎"), + (0xF9C1, "M", "療"), + (0xF9C2, "M", "蓼"), + (0xF9C3, "M", "遼"), + (0xF9C4, "M", "龍"), + (0xF9C5, "M", "暈"), + (0xF9C6, "M", "阮"), + (0xF9C7, "M", "劉"), + (0xF9C8, "M", "杻"), + (0xF9C9, "M", "柳"), + (0xF9CA, "M", "流"), + (0xF9CB, "M", "溜"), + (0xF9CC, "M", "琉"), + (0xF9CD, "M", "留"), + (0xF9CE, "M", "硫"), + (0xF9CF, "M", "紐"), + (0xF9D0, "M", "類"), + (0xF9D1, "M", "六"), + (0xF9D2, "M", "戮"), + (0xF9D3, "M", "陸"), + (0xF9D4, "M", "倫"), + (0xF9D5, "M", "崙"), + (0xF9D6, "M", "淪"), + (0xF9D7, "M", "輪"), + (0xF9D8, "M", "律"), + (0xF9D9, "M", "慄"), + (0xF9DA, "M", "栗"), + (0xF9DB, "M", "率"), + (0xF9DC, "M", "隆"), + (0xF9DD, "M", "利"), + (0xF9DE, "M", "吏"), + (0xF9DF, "M", "履"), + (0xF9E0, "M", "易"), + (0xF9E1, "M", "李"), + (0xF9E2, "M", "梨"), + (0xF9E3, "M", "泥"), + (0xF9E4, "M", "理"), + (0xF9E5, "M", "痢"), + (0xF9E6, "M", "罹"), + (0xF9E7, "M", "裏"), + (0xF9E8, "M", "裡"), + (0xF9E9, "M", "里"), + (0xF9EA, "M", "離"), + (0xF9EB, "M", "匿"), + (0xF9EC, "M", "溺"), + (0xF9ED, "M", "吝"), + (0xF9EE, "M", "燐"), + (0xF9EF, "M", "璘"), + (0xF9F0, "M", "藺"), + (0xF9F1, "M", "隣"), + (0xF9F2, "M", "鱗"), + (0xF9F3, "M", "麟"), + (0xF9F4, "M", "林"), + (0xF9F5, "M", "淋"), + (0xF9F6, "M", "臨"), + (0xF9F7, "M", "立"), + (0xF9F8, "M", "笠"), + (0xF9F9, "M", "粒"), + (0xF9FA, "M", "狀"), + (0xF9FB, "M", "炙"), + (0xF9FC, "M", "識"), + (0xF9FD, "M", "什"), + (0xF9FE, "M", "茶"), + (0xF9FF, "M", "刺"), + (0xFA00, "M", "切"), + (0xFA01, "M", "度"), + (0xFA02, "M", "拓"), + (0xFA03, "M", "糖"), + (0xFA04, "M", "宅"), + (0xFA05, "M", "洞"), + (0xFA06, "M", "暴"), + (0xFA07, "M", "輻"), + (0xFA08, "M", "行"), + (0xFA09, "M", "降"), + (0xFA0A, "M", "見"), + (0xFA0B, "M", "廓"), + (0xFA0C, "M", "兀"), + (0xFA0D, "M", "嗀"), + (0xFA0E, "V"), + (0xFA10, "M", "塚"), + (0xFA11, "V"), + (0xFA12, "M", "晴"), + ] + + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA13, "V"), + (0xFA15, "M", "凞"), + (0xFA16, "M", "猪"), + (0xFA17, "M", "益"), + (0xFA18, "M", "礼"), + (0xFA19, "M", "神"), + (0xFA1A, "M", "祥"), + (0xFA1B, "M", "福"), + (0xFA1C, "M", "靖"), + (0xFA1D, "M", "精"), + (0xFA1E, "M", "羽"), + (0xFA1F, "V"), + (0xFA20, "M", "蘒"), + (0xFA21, "V"), + (0xFA22, "M", "諸"), + (0xFA23, "V"), + (0xFA25, "M", "逸"), + (0xFA26, "M", "都"), + (0xFA27, "V"), + (0xFA2A, "M", "飯"), + (0xFA2B, "M", "飼"), + (0xFA2C, "M", "館"), + (0xFA2D, "M", "鶴"), + (0xFA2E, "M", "郞"), + (0xFA2F, "M", "隷"), + (0xFA30, "M", "侮"), + (0xFA31, "M", "僧"), + (0xFA32, "M", "免"), + (0xFA33, "M", "勉"), + (0xFA34, "M", "勤"), + (0xFA35, "M", "卑"), + (0xFA36, "M", "喝"), + (0xFA37, "M", "嘆"), + (0xFA38, "M", "器"), + (0xFA39, "M", "塀"), + (0xFA3A, "M", "墨"), + (0xFA3B, "M", "層"), + (0xFA3C, "M", "屮"), + (0xFA3D, "M", "悔"), + (0xFA3E, "M", "慨"), + (0xFA3F, "M", "憎"), + (0xFA40, "M", "懲"), + (0xFA41, "M", "敏"), + (0xFA42, "M", "既"), + (0xFA43, "M", "暑"), + (0xFA44, "M", "梅"), + (0xFA45, "M", "海"), + (0xFA46, "M", "渚"), + (0xFA47, "M", "漢"), + (0xFA48, "M", "煮"), + (0xFA49, "M", "爫"), + (0xFA4A, "M", "琢"), + (0xFA4B, "M", "碑"), + (0xFA4C, "M", "社"), + (0xFA4D, "M", "祉"), + (0xFA4E, "M", "祈"), + (0xFA4F, "M", "祐"), + (0xFA50, "M", "祖"), + (0xFA51, "M", "祝"), + (0xFA52, "M", "禍"), + (0xFA53, "M", "禎"), + (0xFA54, "M", "穀"), + (0xFA55, "M", "突"), + (0xFA56, "M", "節"), + (0xFA57, "M", "練"), + (0xFA58, "M", "縉"), + (0xFA59, "M", "繁"), + (0xFA5A, "M", "署"), + (0xFA5B, "M", "者"), + (0xFA5C, "M", "臭"), + (0xFA5D, "M", "艹"), + (0xFA5F, "M", "著"), + (0xFA60, "M", "褐"), + (0xFA61, "M", "視"), + (0xFA62, "M", "謁"), + (0xFA63, "M", "謹"), + (0xFA64, "M", "賓"), + (0xFA65, "M", "贈"), + (0xFA66, "M", "辶"), + (0xFA67, "M", "逸"), + (0xFA68, "M", "難"), + (0xFA69, "M", "響"), + (0xFA6A, "M", "頻"), + (0xFA6B, "M", "恵"), + (0xFA6C, "M", "𤋮"), + (0xFA6D, "M", "舘"), + (0xFA6E, "X"), + (0xFA70, "M", "並"), + (0xFA71, "M", "况"), + (0xFA72, "M", "全"), + (0xFA73, "M", "侀"), + (0xFA74, "M", "充"), + (0xFA75, "M", "冀"), + (0xFA76, "M", "勇"), + (0xFA77, "M", "勺"), + (0xFA78, "M", "喝"), + (0xFA79, "M", "啕"), + (0xFA7A, "M", "喙"), + (0xFA7B, "M", "嗢"), + (0xFA7C, "M", "塚"), + ] + + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA7D, "M", "墳"), + (0xFA7E, "M", "奄"), + (0xFA7F, "M", "奔"), + (0xFA80, "M", "婢"), + (0xFA81, "M", "嬨"), + (0xFA82, "M", "廒"), + (0xFA83, "M", "廙"), + (0xFA84, "M", "彩"), + (0xFA85, "M", "徭"), + (0xFA86, "M", "惘"), + (0xFA87, "M", "慎"), + (0xFA88, "M", "愈"), + (0xFA89, "M", "憎"), + (0xFA8A, "M", "慠"), + (0xFA8B, "M", "懲"), + (0xFA8C, "M", "戴"), + (0xFA8D, "M", "揄"), + (0xFA8E, "M", "搜"), + (0xFA8F, "M", "摒"), + (0xFA90, "M", "敖"), + (0xFA91, "M", "晴"), + (0xFA92, "M", "朗"), + (0xFA93, "M", "望"), + (0xFA94, "M", "杖"), + (0xFA95, "M", "歹"), + (0xFA96, "M", "殺"), + (0xFA97, "M", "流"), + (0xFA98, "M", "滛"), + (0xFA99, "M", "滋"), + (0xFA9A, "M", "漢"), + (0xFA9B, "M", "瀞"), + (0xFA9C, "M", "煮"), + (0xFA9D, "M", "瞧"), + (0xFA9E, "M", "爵"), + (0xFA9F, "M", "犯"), + (0xFAA0, "M", "猪"), + (0xFAA1, "M", "瑱"), + (0xFAA2, "M", "甆"), + (0xFAA3, "M", "画"), + (0xFAA4, "M", "瘝"), + (0xFAA5, "M", "瘟"), + (0xFAA6, "M", "益"), + (0xFAA7, "M", "盛"), + (0xFAA8, "M", "直"), + (0xFAA9, "M", "睊"), + (0xFAAA, "M", "着"), + (0xFAAB, "M", "磌"), + (0xFAAC, "M", "窱"), + (0xFAAD, "M", "節"), + (0xFAAE, "M", "类"), + (0xFAAF, "M", "絛"), + (0xFAB0, "M", "練"), + (0xFAB1, "M", "缾"), + (0xFAB2, "M", "者"), + (0xFAB3, "M", "荒"), + (0xFAB4, "M", "華"), + (0xFAB5, "M", "蝹"), + (0xFAB6, "M", "襁"), + (0xFAB7, "M", "覆"), + (0xFAB8, "M", "視"), + (0xFAB9, "M", "調"), + (0xFABA, "M", "諸"), + (0xFABB, "M", "請"), + (0xFABC, "M", "謁"), + (0xFABD, "M", "諾"), + (0xFABE, "M", "諭"), + (0xFABF, "M", "謹"), + (0xFAC0, "M", "變"), + (0xFAC1, "M", "贈"), + (0xFAC2, "M", "輸"), + (0xFAC3, "M", "遲"), + (0xFAC4, "M", "醙"), + (0xFAC5, "M", "鉶"), + (0xFAC6, "M", "陼"), + (0xFAC7, "M", "難"), + (0xFAC8, "M", "靖"), + (0xFAC9, "M", "韛"), + (0xFACA, "M", "響"), + (0xFACB, "M", "頋"), + (0xFACC, "M", "頻"), + (0xFACD, "M", "鬒"), + (0xFACE, "M", "龜"), + (0xFACF, "M", "𢡊"), + (0xFAD0, "M", "𢡄"), + (0xFAD1, "M", "𣏕"), + (0xFAD2, "M", "㮝"), + (0xFAD3, "M", "䀘"), + (0xFAD4, "M", "䀹"), + (0xFAD5, "M", "𥉉"), + (0xFAD6, "M", "𥳐"), + (0xFAD7, "M", "𧻓"), + (0xFAD8, "M", "齃"), + (0xFAD9, "M", "龎"), + (0xFADA, "X"), + (0xFB00, "M", "ff"), + (0xFB01, "M", "fi"), + (0xFB02, "M", "fl"), + (0xFB03, "M", "ffi"), + (0xFB04, "M", "ffl"), + (0xFB05, "M", "st"), + ] + + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB07, "X"), + (0xFB13, "M", "մն"), + (0xFB14, "M", "մե"), + (0xFB15, "M", "մի"), + (0xFB16, "M", "վն"), + (0xFB17, "M", "մխ"), + (0xFB18, "X"), + (0xFB1D, "M", "יִ"), + (0xFB1E, "V"), + (0xFB1F, "M", "ײַ"), + (0xFB20, "M", "ע"), + (0xFB21, "M", "א"), + (0xFB22, "M", "ד"), + (0xFB23, "M", "ה"), + (0xFB24, "M", "כ"), + (0xFB25, "M", "ל"), + (0xFB26, "M", "ם"), + (0xFB27, "M", "ר"), + (0xFB28, "M", "ת"), + (0xFB29, "3", "+"), + (0xFB2A, "M", "שׁ"), + (0xFB2B, "M", "שׂ"), + (0xFB2C, "M", "שּׁ"), + (0xFB2D, "M", "שּׂ"), + (0xFB2E, "M", "אַ"), + (0xFB2F, "M", "אָ"), + (0xFB30, "M", "אּ"), + (0xFB31, "M", "בּ"), + (0xFB32, "M", "גּ"), + (0xFB33, "M", "דּ"), + (0xFB34, "M", "הּ"), + (0xFB35, "M", "וּ"), + (0xFB36, "M", "זּ"), + (0xFB37, "X"), + (0xFB38, "M", "טּ"), + (0xFB39, "M", "יּ"), + (0xFB3A, "M", "ךּ"), + (0xFB3B, "M", "כּ"), + (0xFB3C, "M", "לּ"), + (0xFB3D, "X"), + (0xFB3E, "M", "מּ"), + (0xFB3F, "X"), + (0xFB40, "M", "נּ"), + (0xFB41, "M", "סּ"), + (0xFB42, "X"), + (0xFB43, "M", "ףּ"), + (0xFB44, "M", "פּ"), + (0xFB45, "X"), + (0xFB46, "M", "צּ"), + (0xFB47, "M", "קּ"), + (0xFB48, "M", "רּ"), + (0xFB49, "M", "שּ"), + (0xFB4A, "M", "תּ"), + (0xFB4B, "M", "וֹ"), + (0xFB4C, "M", "בֿ"), + (0xFB4D, "M", "כֿ"), + (0xFB4E, "M", "פֿ"), + (0xFB4F, "M", "אל"), + (0xFB50, "M", "ٱ"), + (0xFB52, "M", "ٻ"), + (0xFB56, "M", "پ"), + (0xFB5A, "M", "ڀ"), + (0xFB5E, "M", "ٺ"), + (0xFB62, "M", "ٿ"), + (0xFB66, "M", "ٹ"), + (0xFB6A, "M", "ڤ"), + (0xFB6E, "M", "ڦ"), + (0xFB72, "M", "ڄ"), + (0xFB76, "M", "ڃ"), + (0xFB7A, "M", "چ"), + (0xFB7E, "M", "ڇ"), + (0xFB82, "M", "ڍ"), + (0xFB84, "M", "ڌ"), + (0xFB86, "M", "ڎ"), + (0xFB88, "M", "ڈ"), + (0xFB8A, "M", "ژ"), + (0xFB8C, "M", "ڑ"), + (0xFB8E, "M", "ک"), + (0xFB92, "M", "گ"), + (0xFB96, "M", "ڳ"), + (0xFB9A, "M", "ڱ"), + (0xFB9E, "M", "ں"), + (0xFBA0, "M", "ڻ"), + (0xFBA4, "M", "ۀ"), + (0xFBA6, "M", "ہ"), + (0xFBAA, "M", "ھ"), + (0xFBAE, "M", "ے"), + (0xFBB0, "M", "ۓ"), + (0xFBB2, "V"), + (0xFBC3, "X"), + (0xFBD3, "M", "ڭ"), + (0xFBD7, "M", "ۇ"), + (0xFBD9, "M", "ۆ"), + (0xFBDB, "M", "ۈ"), + (0xFBDD, "M", "ۇٴ"), + (0xFBDE, "M", "ۋ"), + (0xFBE0, "M", "ۅ"), + (0xFBE2, "M", "ۉ"), + (0xFBE4, "M", "ې"), + (0xFBE8, "M", "ى"), + ] + + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBEA, "M", "ئا"), + (0xFBEC, "M", "ئە"), + (0xFBEE, "M", "ئو"), + (0xFBF0, "M", "ئۇ"), + (0xFBF2, "M", "ئۆ"), + (0xFBF4, "M", "ئۈ"), + (0xFBF6, "M", "ئې"), + (0xFBF9, "M", "ئى"), + (0xFBFC, "M", "ی"), + (0xFC00, "M", "ئج"), + (0xFC01, "M", "ئح"), + (0xFC02, "M", "ئم"), + (0xFC03, "M", "ئى"), + (0xFC04, "M", "ئي"), + (0xFC05, "M", "بج"), + (0xFC06, "M", "بح"), + (0xFC07, "M", "بخ"), + (0xFC08, "M", "بم"), + (0xFC09, "M", "بى"), + (0xFC0A, "M", "بي"), + (0xFC0B, "M", "تج"), + (0xFC0C, "M", "تح"), + (0xFC0D, "M", "تخ"), + (0xFC0E, "M", "تم"), + (0xFC0F, "M", "تى"), + (0xFC10, "M", "تي"), + (0xFC11, "M", "ثج"), + (0xFC12, "M", "ثم"), + (0xFC13, "M", "ثى"), + (0xFC14, "M", "ثي"), + (0xFC15, "M", "جح"), + (0xFC16, "M", "جم"), + (0xFC17, "M", "حج"), + (0xFC18, "M", "حم"), + (0xFC19, "M", "خج"), + (0xFC1A, "M", "خح"), + (0xFC1B, "M", "خم"), + (0xFC1C, "M", "سج"), + (0xFC1D, "M", "سح"), + (0xFC1E, "M", "سخ"), + (0xFC1F, "M", "سم"), + (0xFC20, "M", "صح"), + (0xFC21, "M", "صم"), + (0xFC22, "M", "ضج"), + (0xFC23, "M", "ضح"), + (0xFC24, "M", "ضخ"), + (0xFC25, "M", "ضم"), + (0xFC26, "M", "طح"), + (0xFC27, "M", "طم"), + (0xFC28, "M", "ظم"), + (0xFC29, "M", "عج"), + (0xFC2A, "M", "عم"), + (0xFC2B, "M", "غج"), + (0xFC2C, "M", "غم"), + (0xFC2D, "M", "فج"), + (0xFC2E, "M", "فح"), + (0xFC2F, "M", "فخ"), + (0xFC30, "M", "فم"), + (0xFC31, "M", "فى"), + (0xFC32, "M", "في"), + (0xFC33, "M", "قح"), + (0xFC34, "M", "قم"), + (0xFC35, "M", "قى"), + (0xFC36, "M", "قي"), + (0xFC37, "M", "كا"), + (0xFC38, "M", "كج"), + (0xFC39, "M", "كح"), + (0xFC3A, "M", "كخ"), + (0xFC3B, "M", "كل"), + (0xFC3C, "M", "كم"), + (0xFC3D, "M", "كى"), + (0xFC3E, "M", "كي"), + (0xFC3F, "M", "لج"), + (0xFC40, "M", "لح"), + (0xFC41, "M", "لخ"), + (0xFC42, "M", "لم"), + (0xFC43, "M", "لى"), + (0xFC44, "M", "لي"), + (0xFC45, "M", "مج"), + (0xFC46, "M", "مح"), + (0xFC47, "M", "مخ"), + (0xFC48, "M", "مم"), + (0xFC49, "M", "مى"), + (0xFC4A, "M", "مي"), + (0xFC4B, "M", "نج"), + (0xFC4C, "M", "نح"), + (0xFC4D, "M", "نخ"), + (0xFC4E, "M", "نم"), + (0xFC4F, "M", "نى"), + (0xFC50, "M", "ني"), + (0xFC51, "M", "هج"), + (0xFC52, "M", "هم"), + (0xFC53, "M", "هى"), + (0xFC54, "M", "هي"), + (0xFC55, "M", "يج"), + (0xFC56, "M", "يح"), + (0xFC57, "M", "يخ"), + (0xFC58, "M", "يم"), + (0xFC59, "M", "يى"), + (0xFC5A, "M", "يي"), + ] + + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC5B, "M", "ذٰ"), + (0xFC5C, "M", "رٰ"), + (0xFC5D, "M", "ىٰ"), + (0xFC5E, "3", " ٌّ"), + (0xFC5F, "3", " ٍّ"), + (0xFC60, "3", " َّ"), + (0xFC61, "3", " ُّ"), + (0xFC62, "3", " ِّ"), + (0xFC63, "3", " ّٰ"), + (0xFC64, "M", "ئر"), + (0xFC65, "M", "ئز"), + (0xFC66, "M", "ئم"), + (0xFC67, "M", "ئن"), + (0xFC68, "M", "ئى"), + (0xFC69, "M", "ئي"), + (0xFC6A, "M", "بر"), + (0xFC6B, "M", "بز"), + (0xFC6C, "M", "بم"), + (0xFC6D, "M", "بن"), + (0xFC6E, "M", "بى"), + (0xFC6F, "M", "بي"), + (0xFC70, "M", "تر"), + (0xFC71, "M", "تز"), + (0xFC72, "M", "تم"), + (0xFC73, "M", "تن"), + (0xFC74, "M", "تى"), + (0xFC75, "M", "تي"), + (0xFC76, "M", "ثر"), + (0xFC77, "M", "ثز"), + (0xFC78, "M", "ثم"), + (0xFC79, "M", "ثن"), + (0xFC7A, "M", "ثى"), + (0xFC7B, "M", "ثي"), + (0xFC7C, "M", "فى"), + (0xFC7D, "M", "في"), + (0xFC7E, "M", "قى"), + (0xFC7F, "M", "قي"), + (0xFC80, "M", "كا"), + (0xFC81, "M", "كل"), + (0xFC82, "M", "كم"), + (0xFC83, "M", "كى"), + (0xFC84, "M", "كي"), + (0xFC85, "M", "لم"), + (0xFC86, "M", "لى"), + (0xFC87, "M", "لي"), + (0xFC88, "M", "ما"), + (0xFC89, "M", "مم"), + (0xFC8A, "M", "نر"), + (0xFC8B, "M", "نز"), + (0xFC8C, "M", "نم"), + (0xFC8D, "M", "نن"), + (0xFC8E, "M", "نى"), + (0xFC8F, "M", "ني"), + (0xFC90, "M", "ىٰ"), + (0xFC91, "M", "ير"), + (0xFC92, "M", "يز"), + (0xFC93, "M", "يم"), + (0xFC94, "M", "ين"), + (0xFC95, "M", "يى"), + (0xFC96, "M", "يي"), + (0xFC97, "M", "ئج"), + (0xFC98, "M", "ئح"), + (0xFC99, "M", "ئخ"), + (0xFC9A, "M", "ئم"), + (0xFC9B, "M", "ئه"), + (0xFC9C, "M", "بج"), + (0xFC9D, "M", "بح"), + (0xFC9E, "M", "بخ"), + (0xFC9F, "M", "بم"), + (0xFCA0, "M", "به"), + (0xFCA1, "M", "تج"), + (0xFCA2, "M", "تح"), + (0xFCA3, "M", "تخ"), + (0xFCA4, "M", "تم"), + (0xFCA5, "M", "ته"), + (0xFCA6, "M", "ثم"), + (0xFCA7, "M", "جح"), + (0xFCA8, "M", "جم"), + (0xFCA9, "M", "حج"), + (0xFCAA, "M", "حم"), + (0xFCAB, "M", "خج"), + (0xFCAC, "M", "خم"), + (0xFCAD, "M", "سج"), + (0xFCAE, "M", "سح"), + (0xFCAF, "M", "سخ"), + (0xFCB0, "M", "سم"), + (0xFCB1, "M", "صح"), + (0xFCB2, "M", "صخ"), + (0xFCB3, "M", "صم"), + (0xFCB4, "M", "ضج"), + (0xFCB5, "M", "ضح"), + (0xFCB6, "M", "ضخ"), + (0xFCB7, "M", "ضم"), + (0xFCB8, "M", "طح"), + (0xFCB9, "M", "ظم"), + (0xFCBA, "M", "عج"), + (0xFCBB, "M", "عم"), + (0xFCBC, "M", "غج"), + (0xFCBD, "M", "غم"), + (0xFCBE, "M", "فج"), + ] + + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBF, "M", "فح"), + (0xFCC0, "M", "فخ"), + (0xFCC1, "M", "فم"), + (0xFCC2, "M", "قح"), + (0xFCC3, "M", "قم"), + (0xFCC4, "M", "كج"), + (0xFCC5, "M", "كح"), + (0xFCC6, "M", "كخ"), + (0xFCC7, "M", "كل"), + (0xFCC8, "M", "كم"), + (0xFCC9, "M", "لج"), + (0xFCCA, "M", "لح"), + (0xFCCB, "M", "لخ"), + (0xFCCC, "M", "لم"), + (0xFCCD, "M", "له"), + (0xFCCE, "M", "مج"), + (0xFCCF, "M", "مح"), + (0xFCD0, "M", "مخ"), + (0xFCD1, "M", "مم"), + (0xFCD2, "M", "نج"), + (0xFCD3, "M", "نح"), + (0xFCD4, "M", "نخ"), + (0xFCD5, "M", "نم"), + (0xFCD6, "M", "نه"), + (0xFCD7, "M", "هج"), + (0xFCD8, "M", "هم"), + (0xFCD9, "M", "هٰ"), + (0xFCDA, "M", "يج"), + (0xFCDB, "M", "يح"), + (0xFCDC, "M", "يخ"), + (0xFCDD, "M", "يم"), + (0xFCDE, "M", "يه"), + (0xFCDF, "M", "ئم"), + (0xFCE0, "M", "ئه"), + (0xFCE1, "M", "بم"), + (0xFCE2, "M", "به"), + (0xFCE3, "M", "تم"), + (0xFCE4, "M", "ته"), + (0xFCE5, "M", "ثم"), + (0xFCE6, "M", "ثه"), + (0xFCE7, "M", "سم"), + (0xFCE8, "M", "سه"), + (0xFCE9, "M", "شم"), + (0xFCEA, "M", "شه"), + (0xFCEB, "M", "كل"), + (0xFCEC, "M", "كم"), + (0xFCED, "M", "لم"), + (0xFCEE, "M", "نم"), + (0xFCEF, "M", "نه"), + (0xFCF0, "M", "يم"), + (0xFCF1, "M", "يه"), + (0xFCF2, "M", "ـَّ"), + (0xFCF3, "M", "ـُّ"), + (0xFCF4, "M", "ـِّ"), + (0xFCF5, "M", "طى"), + (0xFCF6, "M", "طي"), + (0xFCF7, "M", "عى"), + (0xFCF8, "M", "عي"), + (0xFCF9, "M", "غى"), + (0xFCFA, "M", "غي"), + (0xFCFB, "M", "سى"), + (0xFCFC, "M", "سي"), + (0xFCFD, "M", "شى"), + (0xFCFE, "M", "شي"), + (0xFCFF, "M", "حى"), + (0xFD00, "M", "حي"), + (0xFD01, "M", "جى"), + (0xFD02, "M", "جي"), + (0xFD03, "M", "خى"), + (0xFD04, "M", "خي"), + (0xFD05, "M", "صى"), + (0xFD06, "M", "صي"), + (0xFD07, "M", "ضى"), + (0xFD08, "M", "ضي"), + (0xFD09, "M", "شج"), + (0xFD0A, "M", "شح"), + (0xFD0B, "M", "شخ"), + (0xFD0C, "M", "شم"), + (0xFD0D, "M", "شر"), + (0xFD0E, "M", "سر"), + (0xFD0F, "M", "صر"), + (0xFD10, "M", "ضر"), + (0xFD11, "M", "طى"), + (0xFD12, "M", "طي"), + (0xFD13, "M", "عى"), + (0xFD14, "M", "عي"), + (0xFD15, "M", "غى"), + (0xFD16, "M", "غي"), + (0xFD17, "M", "سى"), + (0xFD18, "M", "سي"), + (0xFD19, "M", "شى"), + (0xFD1A, "M", "شي"), + (0xFD1B, "M", "حى"), + (0xFD1C, "M", "حي"), + (0xFD1D, "M", "جى"), + (0xFD1E, "M", "جي"), + (0xFD1F, "M", "خى"), + (0xFD20, "M", "خي"), + (0xFD21, "M", "صى"), + (0xFD22, "M", "صي"), + ] + + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD23, "M", "ضى"), + (0xFD24, "M", "ضي"), + (0xFD25, "M", "شج"), + (0xFD26, "M", "شح"), + (0xFD27, "M", "شخ"), + (0xFD28, "M", "شم"), + (0xFD29, "M", "شر"), + (0xFD2A, "M", "سر"), + (0xFD2B, "M", "صر"), + (0xFD2C, "M", "ضر"), + (0xFD2D, "M", "شج"), + (0xFD2E, "M", "شح"), + (0xFD2F, "M", "شخ"), + (0xFD30, "M", "شم"), + (0xFD31, "M", "سه"), + (0xFD32, "M", "شه"), + (0xFD33, "M", "طم"), + (0xFD34, "M", "سج"), + (0xFD35, "M", "سح"), + (0xFD36, "M", "سخ"), + (0xFD37, "M", "شج"), + (0xFD38, "M", "شح"), + (0xFD39, "M", "شخ"), + (0xFD3A, "M", "طم"), + (0xFD3B, "M", "ظم"), + (0xFD3C, "M", "اً"), + (0xFD3E, "V"), + (0xFD50, "M", "تجم"), + (0xFD51, "M", "تحج"), + (0xFD53, "M", "تحم"), + (0xFD54, "M", "تخم"), + (0xFD55, "M", "تمج"), + (0xFD56, "M", "تمح"), + (0xFD57, "M", "تمخ"), + (0xFD58, "M", "جمح"), + (0xFD5A, "M", "حمي"), + (0xFD5B, "M", "حمى"), + (0xFD5C, "M", "سحج"), + (0xFD5D, "M", "سجح"), + (0xFD5E, "M", "سجى"), + (0xFD5F, "M", "سمح"), + (0xFD61, "M", "سمج"), + (0xFD62, "M", "سمم"), + (0xFD64, "M", "صحح"), + (0xFD66, "M", "صمم"), + (0xFD67, "M", "شحم"), + (0xFD69, "M", "شجي"), + (0xFD6A, "M", "شمخ"), + (0xFD6C, "M", "شمم"), + (0xFD6E, "M", "ضحى"), + (0xFD6F, "M", "ضخم"), + (0xFD71, "M", "طمح"), + (0xFD73, "M", "طمم"), + (0xFD74, "M", "طمي"), + (0xFD75, "M", "عجم"), + (0xFD76, "M", "عمم"), + (0xFD78, "M", "عمى"), + (0xFD79, "M", "غمم"), + (0xFD7A, "M", "غمي"), + (0xFD7B, "M", "غمى"), + (0xFD7C, "M", "فخم"), + (0xFD7E, "M", "قمح"), + (0xFD7F, "M", "قمم"), + (0xFD80, "M", "لحم"), + (0xFD81, "M", "لحي"), + (0xFD82, "M", "لحى"), + (0xFD83, "M", "لجج"), + (0xFD85, "M", "لخم"), + (0xFD87, "M", "لمح"), + (0xFD89, "M", "محج"), + (0xFD8A, "M", "محم"), + (0xFD8B, "M", "محي"), + (0xFD8C, "M", "مجح"), + (0xFD8D, "M", "مجم"), + (0xFD8E, "M", "مخج"), + (0xFD8F, "M", "مخم"), + (0xFD90, "X"), + (0xFD92, "M", "مجخ"), + (0xFD93, "M", "همج"), + (0xFD94, "M", "همم"), + (0xFD95, "M", "نحم"), + (0xFD96, "M", "نحى"), + (0xFD97, "M", "نجم"), + (0xFD99, "M", "نجى"), + (0xFD9A, "M", "نمي"), + (0xFD9B, "M", "نمى"), + (0xFD9C, "M", "يمم"), + (0xFD9E, "M", "بخي"), + (0xFD9F, "M", "تجي"), + (0xFDA0, "M", "تجى"), + (0xFDA1, "M", "تخي"), + (0xFDA2, "M", "تخى"), + (0xFDA3, "M", "تمي"), + (0xFDA4, "M", "تمى"), + (0xFDA5, "M", "جمي"), + (0xFDA6, "M", "جحى"), + (0xFDA7, "M", "جمى"), + (0xFDA8, "M", "سخى"), + (0xFDA9, "M", "صحي"), + (0xFDAA, "M", "شحي"), + ] + + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDAB, "M", "ضحي"), + (0xFDAC, "M", "لجي"), + (0xFDAD, "M", "لمي"), + (0xFDAE, "M", "يحي"), + (0xFDAF, "M", "يجي"), + (0xFDB0, "M", "يمي"), + (0xFDB1, "M", "ممي"), + (0xFDB2, "M", "قمي"), + (0xFDB3, "M", "نحي"), + (0xFDB4, "M", "قمح"), + (0xFDB5, "M", "لحم"), + (0xFDB6, "M", "عمي"), + (0xFDB7, "M", "كمي"), + (0xFDB8, "M", "نجح"), + (0xFDB9, "M", "مخي"), + (0xFDBA, "M", "لجم"), + (0xFDBB, "M", "كمم"), + (0xFDBC, "M", "لجم"), + (0xFDBD, "M", "نجح"), + (0xFDBE, "M", "جحي"), + (0xFDBF, "M", "حجي"), + (0xFDC0, "M", "مجي"), + (0xFDC1, "M", "فمي"), + (0xFDC2, "M", "بحي"), + (0xFDC3, "M", "كمم"), + (0xFDC4, "M", "عجم"), + (0xFDC5, "M", "صمم"), + (0xFDC6, "M", "سخي"), + (0xFDC7, "M", "نجي"), + (0xFDC8, "X"), + (0xFDCF, "V"), + (0xFDD0, "X"), + (0xFDF0, "M", "صلے"), + (0xFDF1, "M", "قلے"), + (0xFDF2, "M", "الله"), + (0xFDF3, "M", "اكبر"), + (0xFDF4, "M", "محمد"), + (0xFDF5, "M", "صلعم"), + (0xFDF6, "M", "رسول"), + (0xFDF7, "M", "عليه"), + (0xFDF8, "M", "وسلم"), + (0xFDF9, "M", "صلى"), + (0xFDFA, "3", "صلى الله عليه وسلم"), + (0xFDFB, "3", "جل جلاله"), + (0xFDFC, "M", "ریال"), + (0xFDFD, "V"), + (0xFE00, "I"), + (0xFE10, "3", ","), + (0xFE11, "M", "、"), + (0xFE12, "X"), + (0xFE13, "3", ":"), + (0xFE14, "3", ";"), + (0xFE15, "3", "!"), + (0xFE16, "3", "?"), + (0xFE17, "M", "〖"), + (0xFE18, "M", "〗"), + (0xFE19, "X"), + (0xFE20, "V"), + (0xFE30, "X"), + (0xFE31, "M", "—"), + (0xFE32, "M", "–"), + (0xFE33, "3", "_"), + (0xFE35, "3", "("), + (0xFE36, "3", ")"), + (0xFE37, "3", "{"), + (0xFE38, "3", "}"), + (0xFE39, "M", "〔"), + (0xFE3A, "M", "〕"), + (0xFE3B, "M", "【"), + (0xFE3C, "M", "】"), + (0xFE3D, "M", "《"), + (0xFE3E, "M", "》"), + (0xFE3F, "M", "〈"), + (0xFE40, "M", "〉"), + (0xFE41, "M", "「"), + (0xFE42, "M", "」"), + (0xFE43, "M", "『"), + (0xFE44, "M", "』"), + (0xFE45, "V"), + (0xFE47, "3", "["), + (0xFE48, "3", "]"), + (0xFE49, "3", " ̅"), + (0xFE4D, "3", "_"), + (0xFE50, "3", ","), + (0xFE51, "M", "、"), + (0xFE52, "X"), + (0xFE54, "3", ";"), + (0xFE55, "3", ":"), + (0xFE56, "3", "?"), + (0xFE57, "3", "!"), + (0xFE58, "M", "—"), + (0xFE59, "3", "("), + (0xFE5A, "3", ")"), + (0xFE5B, "3", "{"), + (0xFE5C, "3", "}"), + (0xFE5D, "M", "〔"), + (0xFE5E, "M", "〕"), + (0xFE5F, "3", "#"), + (0xFE60, "3", "&"), + (0xFE61, "3", "*"), + ] + + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE62, "3", "+"), + (0xFE63, "M", "-"), + (0xFE64, "3", "<"), + (0xFE65, "3", ">"), + (0xFE66, "3", "="), + (0xFE67, "X"), + (0xFE68, "3", "\\"), + (0xFE69, "3", "$"), + (0xFE6A, "3", "%"), + (0xFE6B, "3", "@"), + (0xFE6C, "X"), + (0xFE70, "3", " ً"), + (0xFE71, "M", "ـً"), + (0xFE72, "3", " ٌ"), + (0xFE73, "V"), + (0xFE74, "3", " ٍ"), + (0xFE75, "X"), + (0xFE76, "3", " َ"), + (0xFE77, "M", "ـَ"), + (0xFE78, "3", " ُ"), + (0xFE79, "M", "ـُ"), + (0xFE7A, "3", " ِ"), + (0xFE7B, "M", "ـِ"), + (0xFE7C, "3", " ّ"), + (0xFE7D, "M", "ـّ"), + (0xFE7E, "3", " ْ"), + (0xFE7F, "M", "ـْ"), + (0xFE80, "M", "ء"), + (0xFE81, "M", "آ"), + (0xFE83, "M", "أ"), + (0xFE85, "M", "ؤ"), + (0xFE87, "M", "إ"), + (0xFE89, "M", "ئ"), + (0xFE8D, "M", "ا"), + (0xFE8F, "M", "ب"), + (0xFE93, "M", "ة"), + (0xFE95, "M", "ت"), + (0xFE99, "M", "ث"), + (0xFE9D, "M", "ج"), + (0xFEA1, "M", "ح"), + (0xFEA5, "M", "خ"), + (0xFEA9, "M", "د"), + (0xFEAB, "M", "ذ"), + (0xFEAD, "M", "ر"), + (0xFEAF, "M", "ز"), + (0xFEB1, "M", "س"), + (0xFEB5, "M", "ش"), + (0xFEB9, "M", "ص"), + (0xFEBD, "M", "ض"), + (0xFEC1, "M", "ط"), + (0xFEC5, "M", "ظ"), + (0xFEC9, "M", "ع"), + (0xFECD, "M", "غ"), + (0xFED1, "M", "ف"), + (0xFED5, "M", "ق"), + (0xFED9, "M", "ك"), + (0xFEDD, "M", "ل"), + (0xFEE1, "M", "م"), + (0xFEE5, "M", "ن"), + (0xFEE9, "M", "ه"), + (0xFEED, "M", "و"), + (0xFEEF, "M", "ى"), + (0xFEF1, "M", "ي"), + (0xFEF5, "M", "لآ"), + (0xFEF7, "M", "لأ"), + (0xFEF9, "M", "لإ"), + (0xFEFB, "M", "لا"), + (0xFEFD, "X"), + (0xFEFF, "I"), + (0xFF00, "X"), + (0xFF01, "3", "!"), + (0xFF02, "3", '"'), + (0xFF03, "3", "#"), + (0xFF04, "3", "$"), + (0xFF05, "3", "%"), + (0xFF06, "3", "&"), + (0xFF07, "3", "'"), + (0xFF08, "3", "("), + (0xFF09, "3", ")"), + (0xFF0A, "3", "*"), + (0xFF0B, "3", "+"), + (0xFF0C, "3", ","), + (0xFF0D, "M", "-"), + (0xFF0E, "M", "."), + (0xFF0F, "3", "/"), + (0xFF10, "M", "0"), + (0xFF11, "M", "1"), + (0xFF12, "M", "2"), + (0xFF13, "M", "3"), + (0xFF14, "M", "4"), + (0xFF15, "M", "5"), + (0xFF16, "M", "6"), + (0xFF17, "M", "7"), + (0xFF18, "M", "8"), + (0xFF19, "M", "9"), + (0xFF1A, "3", ":"), + (0xFF1B, "3", ";"), + (0xFF1C, "3", "<"), + (0xFF1D, "3", "="), + (0xFF1E, "3", ">"), + ] + + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1F, "3", "?"), + (0xFF20, "3", "@"), + (0xFF21, "M", "a"), + (0xFF22, "M", "b"), + (0xFF23, "M", "c"), + (0xFF24, "M", "d"), + (0xFF25, "M", "e"), + (0xFF26, "M", "f"), + (0xFF27, "M", "g"), + (0xFF28, "M", "h"), + (0xFF29, "M", "i"), + (0xFF2A, "M", "j"), + (0xFF2B, "M", "k"), + (0xFF2C, "M", "l"), + (0xFF2D, "M", "m"), + (0xFF2E, "M", "n"), + (0xFF2F, "M", "o"), + (0xFF30, "M", "p"), + (0xFF31, "M", "q"), + (0xFF32, "M", "r"), + (0xFF33, "M", "s"), + (0xFF34, "M", "t"), + (0xFF35, "M", "u"), + (0xFF36, "M", "v"), + (0xFF37, "M", "w"), + (0xFF38, "M", "x"), + (0xFF39, "M", "y"), + (0xFF3A, "M", "z"), + (0xFF3B, "3", "["), + (0xFF3C, "3", "\\"), + (0xFF3D, "3", "]"), + (0xFF3E, "3", "^"), + (0xFF3F, "3", "_"), + (0xFF40, "3", "`"), + (0xFF41, "M", "a"), + (0xFF42, "M", "b"), + (0xFF43, "M", "c"), + (0xFF44, "M", "d"), + (0xFF45, "M", "e"), + (0xFF46, "M", "f"), + (0xFF47, "M", "g"), + (0xFF48, "M", "h"), + (0xFF49, "M", "i"), + (0xFF4A, "M", "j"), + (0xFF4B, "M", "k"), + (0xFF4C, "M", "l"), + (0xFF4D, "M", "m"), + (0xFF4E, "M", "n"), + (0xFF4F, "M", "o"), + (0xFF50, "M", "p"), + (0xFF51, "M", "q"), + (0xFF52, "M", "r"), + (0xFF53, "M", "s"), + (0xFF54, "M", "t"), + (0xFF55, "M", "u"), + (0xFF56, "M", "v"), + (0xFF57, "M", "w"), + (0xFF58, "M", "x"), + (0xFF59, "M", "y"), + (0xFF5A, "M", "z"), + (0xFF5B, "3", "{"), + (0xFF5C, "3", "|"), + (0xFF5D, "3", "}"), + (0xFF5E, "3", "~"), + (0xFF5F, "M", "⦅"), + (0xFF60, "M", "⦆"), + (0xFF61, "M", "."), + (0xFF62, "M", "「"), + (0xFF63, "M", "」"), + (0xFF64, "M", "、"), + (0xFF65, "M", "・"), + (0xFF66, "M", "ヲ"), + (0xFF67, "M", "ァ"), + (0xFF68, "M", "ィ"), + (0xFF69, "M", "ゥ"), + (0xFF6A, "M", "ェ"), + (0xFF6B, "M", "ォ"), + (0xFF6C, "M", "ャ"), + (0xFF6D, "M", "ュ"), + (0xFF6E, "M", "ョ"), + (0xFF6F, "M", "ッ"), + (0xFF70, "M", "ー"), + (0xFF71, "M", "ア"), + (0xFF72, "M", "イ"), + (0xFF73, "M", "ウ"), + (0xFF74, "M", "エ"), + (0xFF75, "M", "オ"), + (0xFF76, "M", "カ"), + (0xFF77, "M", "キ"), + (0xFF78, "M", "ク"), + (0xFF79, "M", "ケ"), + (0xFF7A, "M", "コ"), + (0xFF7B, "M", "サ"), + (0xFF7C, "M", "シ"), + (0xFF7D, "M", "ス"), + (0xFF7E, "M", "セ"), + (0xFF7F, "M", "ソ"), + (0xFF80, "M", "タ"), + (0xFF81, "M", "チ"), + (0xFF82, "M", "ツ"), + ] + + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF83, "M", "テ"), + (0xFF84, "M", "ト"), + (0xFF85, "M", "ナ"), + (0xFF86, "M", "ニ"), + (0xFF87, "M", "ヌ"), + (0xFF88, "M", "ネ"), + (0xFF89, "M", "ノ"), + (0xFF8A, "M", "ハ"), + (0xFF8B, "M", "ヒ"), + (0xFF8C, "M", "フ"), + (0xFF8D, "M", "ヘ"), + (0xFF8E, "M", "ホ"), + (0xFF8F, "M", "マ"), + (0xFF90, "M", "ミ"), + (0xFF91, "M", "ム"), + (0xFF92, "M", "メ"), + (0xFF93, "M", "モ"), + (0xFF94, "M", "ヤ"), + (0xFF95, "M", "ユ"), + (0xFF96, "M", "ヨ"), + (0xFF97, "M", "ラ"), + (0xFF98, "M", "リ"), + (0xFF99, "M", "ル"), + (0xFF9A, "M", "レ"), + (0xFF9B, "M", "ロ"), + (0xFF9C, "M", "ワ"), + (0xFF9D, "M", "ン"), + (0xFF9E, "M", "゙"), + (0xFF9F, "M", "゚"), + (0xFFA0, "X"), + (0xFFA1, "M", "ᄀ"), + (0xFFA2, "M", "ᄁ"), + (0xFFA3, "M", "ᆪ"), + (0xFFA4, "M", "ᄂ"), + (0xFFA5, "M", "ᆬ"), + (0xFFA6, "M", "ᆭ"), + (0xFFA7, "M", "ᄃ"), + (0xFFA8, "M", "ᄄ"), + (0xFFA9, "M", "ᄅ"), + (0xFFAA, "M", "ᆰ"), + (0xFFAB, "M", "ᆱ"), + (0xFFAC, "M", "ᆲ"), + (0xFFAD, "M", "ᆳ"), + (0xFFAE, "M", "ᆴ"), + (0xFFAF, "M", "ᆵ"), + (0xFFB0, "M", "ᄚ"), + (0xFFB1, "M", "ᄆ"), + (0xFFB2, "M", "ᄇ"), + (0xFFB3, "M", "ᄈ"), + (0xFFB4, "M", "ᄡ"), + (0xFFB5, "M", "ᄉ"), + (0xFFB6, "M", "ᄊ"), + (0xFFB7, "M", "ᄋ"), + (0xFFB8, "M", "ᄌ"), + (0xFFB9, "M", "ᄍ"), + (0xFFBA, "M", "ᄎ"), + (0xFFBB, "M", "ᄏ"), + (0xFFBC, "M", "ᄐ"), + (0xFFBD, "M", "ᄑ"), + (0xFFBE, "M", "ᄒ"), + (0xFFBF, "X"), + (0xFFC2, "M", "ᅡ"), + (0xFFC3, "M", "ᅢ"), + (0xFFC4, "M", "ᅣ"), + (0xFFC5, "M", "ᅤ"), + (0xFFC6, "M", "ᅥ"), + (0xFFC7, "M", "ᅦ"), + (0xFFC8, "X"), + (0xFFCA, "M", "ᅧ"), + (0xFFCB, "M", "ᅨ"), + (0xFFCC, "M", "ᅩ"), + (0xFFCD, "M", "ᅪ"), + (0xFFCE, "M", "ᅫ"), + (0xFFCF, "M", "ᅬ"), + (0xFFD0, "X"), + (0xFFD2, "M", "ᅭ"), + (0xFFD3, "M", "ᅮ"), + (0xFFD4, "M", "ᅯ"), + (0xFFD5, "M", "ᅰ"), + (0xFFD6, "M", "ᅱ"), + (0xFFD7, "M", "ᅲ"), + (0xFFD8, "X"), + (0xFFDA, "M", "ᅳ"), + (0xFFDB, "M", "ᅴ"), + (0xFFDC, "M", "ᅵ"), + (0xFFDD, "X"), + (0xFFE0, "M", "¢"), + (0xFFE1, "M", "£"), + (0xFFE2, "M", "¬"), + (0xFFE3, "3", " ̄"), + (0xFFE4, "M", "¦"), + (0xFFE5, "M", "¥"), + (0xFFE6, "M", "₩"), + (0xFFE7, "X"), + (0xFFE8, "M", "│"), + (0xFFE9, "M", "←"), + (0xFFEA, "M", "↑"), + (0xFFEB, "M", "→"), + (0xFFEC, "M", "↓"), + (0xFFED, "M", "■"), + ] + + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEE, "M", "○"), + (0xFFEF, "X"), + (0x10000, "V"), + (0x1000C, "X"), + (0x1000D, "V"), + (0x10027, "X"), + (0x10028, "V"), + (0x1003B, "X"), + (0x1003C, "V"), + (0x1003E, "X"), + (0x1003F, "V"), + (0x1004E, "X"), + (0x10050, "V"), + (0x1005E, "X"), + (0x10080, "V"), + (0x100FB, "X"), + (0x10100, "V"), + (0x10103, "X"), + (0x10107, "V"), + (0x10134, "X"), + (0x10137, "V"), + (0x1018F, "X"), + (0x10190, "V"), + (0x1019D, "X"), + (0x101A0, "V"), + (0x101A1, "X"), + (0x101D0, "V"), + (0x101FE, "X"), + (0x10280, "V"), + (0x1029D, "X"), + (0x102A0, "V"), + (0x102D1, "X"), + (0x102E0, "V"), + (0x102FC, "X"), + (0x10300, "V"), + (0x10324, "X"), + (0x1032D, "V"), + (0x1034B, "X"), + (0x10350, "V"), + (0x1037B, "X"), + (0x10380, "V"), + (0x1039E, "X"), + (0x1039F, "V"), + (0x103C4, "X"), + (0x103C8, "V"), + (0x103D6, "X"), + (0x10400, "M", "𐐨"), + (0x10401, "M", "𐐩"), + (0x10402, "M", "𐐪"), + (0x10403, "M", "𐐫"), + (0x10404, "M", "𐐬"), + (0x10405, "M", "𐐭"), + (0x10406, "M", "𐐮"), + (0x10407, "M", "𐐯"), + (0x10408, "M", "𐐰"), + (0x10409, "M", "𐐱"), + (0x1040A, "M", "𐐲"), + (0x1040B, "M", "𐐳"), + (0x1040C, "M", "𐐴"), + (0x1040D, "M", "𐐵"), + (0x1040E, "M", "𐐶"), + (0x1040F, "M", "𐐷"), + (0x10410, "M", "𐐸"), + (0x10411, "M", "𐐹"), + (0x10412, "M", "𐐺"), + (0x10413, "M", "𐐻"), + (0x10414, "M", "𐐼"), + (0x10415, "M", "𐐽"), + (0x10416, "M", "𐐾"), + (0x10417, "M", "𐐿"), + (0x10418, "M", "𐑀"), + (0x10419, "M", "𐑁"), + (0x1041A, "M", "𐑂"), + (0x1041B, "M", "𐑃"), + (0x1041C, "M", "𐑄"), + (0x1041D, "M", "𐑅"), + (0x1041E, "M", "𐑆"), + (0x1041F, "M", "𐑇"), + (0x10420, "M", "𐑈"), + (0x10421, "M", "𐑉"), + (0x10422, "M", "𐑊"), + (0x10423, "M", "𐑋"), + (0x10424, "M", "𐑌"), + (0x10425, "M", "𐑍"), + (0x10426, "M", "𐑎"), + (0x10427, "M", "𐑏"), + (0x10428, "V"), + (0x1049E, "X"), + (0x104A0, "V"), + (0x104AA, "X"), + (0x104B0, "M", "𐓘"), + (0x104B1, "M", "𐓙"), + (0x104B2, "M", "𐓚"), + (0x104B3, "M", "𐓛"), + (0x104B4, "M", "𐓜"), + (0x104B5, "M", "𐓝"), + (0x104B6, "M", "𐓞"), + (0x104B7, "M", "𐓟"), + (0x104B8, "M", "𐓠"), + (0x104B9, "M", "𐓡"), + ] + + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104BA, "M", "𐓢"), + (0x104BB, "M", "𐓣"), + (0x104BC, "M", "𐓤"), + (0x104BD, "M", "𐓥"), + (0x104BE, "M", "𐓦"), + (0x104BF, "M", "𐓧"), + (0x104C0, "M", "𐓨"), + (0x104C1, "M", "𐓩"), + (0x104C2, "M", "𐓪"), + (0x104C3, "M", "𐓫"), + (0x104C4, "M", "𐓬"), + (0x104C5, "M", "𐓭"), + (0x104C6, "M", "𐓮"), + (0x104C7, "M", "𐓯"), + (0x104C8, "M", "𐓰"), + (0x104C9, "M", "𐓱"), + (0x104CA, "M", "𐓲"), + (0x104CB, "M", "𐓳"), + (0x104CC, "M", "𐓴"), + (0x104CD, "M", "𐓵"), + (0x104CE, "M", "𐓶"), + (0x104CF, "M", "𐓷"), + (0x104D0, "M", "𐓸"), + (0x104D1, "M", "𐓹"), + (0x104D2, "M", "𐓺"), + (0x104D3, "M", "𐓻"), + (0x104D4, "X"), + (0x104D8, "V"), + (0x104FC, "X"), + (0x10500, "V"), + (0x10528, "X"), + (0x10530, "V"), + (0x10564, "X"), + (0x1056F, "V"), + (0x10570, "M", "𐖗"), + (0x10571, "M", "𐖘"), + (0x10572, "M", "𐖙"), + (0x10573, "M", "𐖚"), + (0x10574, "M", "𐖛"), + (0x10575, "M", "𐖜"), + (0x10576, "M", "𐖝"), + (0x10577, "M", "𐖞"), + (0x10578, "M", "𐖟"), + (0x10579, "M", "𐖠"), + (0x1057A, "M", "𐖡"), + (0x1057B, "X"), + (0x1057C, "M", "𐖣"), + (0x1057D, "M", "𐖤"), + (0x1057E, "M", "𐖥"), + (0x1057F, "M", "𐖦"), + (0x10580, "M", "𐖧"), + (0x10581, "M", "𐖨"), + (0x10582, "M", "𐖩"), + (0x10583, "M", "𐖪"), + (0x10584, "M", "𐖫"), + (0x10585, "M", "𐖬"), + (0x10586, "M", "𐖭"), + (0x10587, "M", "𐖮"), + (0x10588, "M", "𐖯"), + (0x10589, "M", "𐖰"), + (0x1058A, "M", "𐖱"), + (0x1058B, "X"), + (0x1058C, "M", "𐖳"), + (0x1058D, "M", "𐖴"), + (0x1058E, "M", "𐖵"), + (0x1058F, "M", "𐖶"), + (0x10590, "M", "𐖷"), + (0x10591, "M", "𐖸"), + (0x10592, "M", "𐖹"), + (0x10593, "X"), + (0x10594, "M", "𐖻"), + (0x10595, "M", "𐖼"), + (0x10596, "X"), + (0x10597, "V"), + (0x105A2, "X"), + (0x105A3, "V"), + (0x105B2, "X"), + (0x105B3, "V"), + (0x105BA, "X"), + (0x105BB, "V"), + (0x105BD, "X"), + (0x10600, "V"), + (0x10737, "X"), + (0x10740, "V"), + (0x10756, "X"), + (0x10760, "V"), + (0x10768, "X"), + (0x10780, "V"), + (0x10781, "M", "ː"), + (0x10782, "M", "ˑ"), + (0x10783, "M", "æ"), + (0x10784, "M", "ʙ"), + (0x10785, "M", "ɓ"), + (0x10786, "X"), + (0x10787, "M", "ʣ"), + (0x10788, "M", "ꭦ"), + (0x10789, "M", "ʥ"), + (0x1078A, "M", "ʤ"), + (0x1078B, "M", "ɖ"), + (0x1078C, "M", "ɗ"), + ] + + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1078D, "M", "ᶑ"), + (0x1078E, "M", "ɘ"), + (0x1078F, "M", "ɞ"), + (0x10790, "M", "ʩ"), + (0x10791, "M", "ɤ"), + (0x10792, "M", "ɢ"), + (0x10793, "M", "ɠ"), + (0x10794, "M", "ʛ"), + (0x10795, "M", "ħ"), + (0x10796, "M", "ʜ"), + (0x10797, "M", "ɧ"), + (0x10798, "M", "ʄ"), + (0x10799, "M", "ʪ"), + (0x1079A, "M", "ʫ"), + (0x1079B, "M", "ɬ"), + (0x1079C, "M", "𝼄"), + (0x1079D, "M", "ꞎ"), + (0x1079E, "M", "ɮ"), + (0x1079F, "M", "𝼅"), + (0x107A0, "M", "ʎ"), + (0x107A1, "M", "𝼆"), + (0x107A2, "M", "ø"), + (0x107A3, "M", "ɶ"), + (0x107A4, "M", "ɷ"), + (0x107A5, "M", "q"), + (0x107A6, "M", "ɺ"), + (0x107A7, "M", "𝼈"), + (0x107A8, "M", "ɽ"), + (0x107A9, "M", "ɾ"), + (0x107AA, "M", "ʀ"), + (0x107AB, "M", "ʨ"), + (0x107AC, "M", "ʦ"), + (0x107AD, "M", "ꭧ"), + (0x107AE, "M", "ʧ"), + (0x107AF, "M", "ʈ"), + (0x107B0, "M", "ⱱ"), + (0x107B1, "X"), + (0x107B2, "M", "ʏ"), + (0x107B3, "M", "ʡ"), + (0x107B4, "M", "ʢ"), + (0x107B5, "M", "ʘ"), + (0x107B6, "M", "ǀ"), + (0x107B7, "M", "ǁ"), + (0x107B8, "M", "ǂ"), + (0x107B9, "M", "𝼊"), + (0x107BA, "M", "𝼞"), + (0x107BB, "X"), + (0x10800, "V"), + (0x10806, "X"), + (0x10808, "V"), + (0x10809, "X"), + (0x1080A, "V"), + (0x10836, "X"), + (0x10837, "V"), + (0x10839, "X"), + (0x1083C, "V"), + (0x1083D, "X"), + (0x1083F, "V"), + (0x10856, "X"), + (0x10857, "V"), + (0x1089F, "X"), + (0x108A7, "V"), + (0x108B0, "X"), + (0x108E0, "V"), + (0x108F3, "X"), + (0x108F4, "V"), + (0x108F6, "X"), + (0x108FB, "V"), + (0x1091C, "X"), + (0x1091F, "V"), + (0x1093A, "X"), + (0x1093F, "V"), + (0x10940, "X"), + (0x10980, "V"), + (0x109B8, "X"), + (0x109BC, "V"), + (0x109D0, "X"), + (0x109D2, "V"), + (0x10A04, "X"), + (0x10A05, "V"), + (0x10A07, "X"), + (0x10A0C, "V"), + (0x10A14, "X"), + (0x10A15, "V"), + (0x10A18, "X"), + (0x10A19, "V"), + (0x10A36, "X"), + (0x10A38, "V"), + (0x10A3B, "X"), + (0x10A3F, "V"), + (0x10A49, "X"), + (0x10A50, "V"), + (0x10A59, "X"), + (0x10A60, "V"), + (0x10AA0, "X"), + (0x10AC0, "V"), + (0x10AE7, "X"), + (0x10AEB, "V"), + (0x10AF7, "X"), + (0x10B00, "V"), + ] + + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10B36, "X"), + (0x10B39, "V"), + (0x10B56, "X"), + (0x10B58, "V"), + (0x10B73, "X"), + (0x10B78, "V"), + (0x10B92, "X"), + (0x10B99, "V"), + (0x10B9D, "X"), + (0x10BA9, "V"), + (0x10BB0, "X"), + (0x10C00, "V"), + (0x10C49, "X"), + (0x10C80, "M", "𐳀"), + (0x10C81, "M", "𐳁"), + (0x10C82, "M", "𐳂"), + (0x10C83, "M", "𐳃"), + (0x10C84, "M", "𐳄"), + (0x10C85, "M", "𐳅"), + (0x10C86, "M", "𐳆"), + (0x10C87, "M", "𐳇"), + (0x10C88, "M", "𐳈"), + (0x10C89, "M", "𐳉"), + (0x10C8A, "M", "𐳊"), + (0x10C8B, "M", "𐳋"), + (0x10C8C, "M", "𐳌"), + (0x10C8D, "M", "𐳍"), + (0x10C8E, "M", "𐳎"), + (0x10C8F, "M", "𐳏"), + (0x10C90, "M", "𐳐"), + (0x10C91, "M", "𐳑"), + (0x10C92, "M", "𐳒"), + (0x10C93, "M", "𐳓"), + (0x10C94, "M", "𐳔"), + (0x10C95, "M", "𐳕"), + (0x10C96, "M", "𐳖"), + (0x10C97, "M", "𐳗"), + (0x10C98, "M", "𐳘"), + (0x10C99, "M", "𐳙"), + (0x10C9A, "M", "𐳚"), + (0x10C9B, "M", "𐳛"), + (0x10C9C, "M", "𐳜"), + (0x10C9D, "M", "𐳝"), + (0x10C9E, "M", "𐳞"), + (0x10C9F, "M", "𐳟"), + (0x10CA0, "M", "𐳠"), + (0x10CA1, "M", "𐳡"), + (0x10CA2, "M", "𐳢"), + (0x10CA3, "M", "𐳣"), + (0x10CA4, "M", "𐳤"), + (0x10CA5, "M", "𐳥"), + (0x10CA6, "M", "𐳦"), + (0x10CA7, "M", "𐳧"), + (0x10CA8, "M", "𐳨"), + (0x10CA9, "M", "𐳩"), + (0x10CAA, "M", "𐳪"), + (0x10CAB, "M", "𐳫"), + (0x10CAC, "M", "𐳬"), + (0x10CAD, "M", "𐳭"), + (0x10CAE, "M", "𐳮"), + (0x10CAF, "M", "𐳯"), + (0x10CB0, "M", "𐳰"), + (0x10CB1, "M", "𐳱"), + (0x10CB2, "M", "𐳲"), + (0x10CB3, "X"), + (0x10CC0, "V"), + (0x10CF3, "X"), + (0x10CFA, "V"), + (0x10D28, "X"), + (0x10D30, "V"), + (0x10D3A, "X"), + (0x10E60, "V"), + (0x10E7F, "X"), + (0x10E80, "V"), + (0x10EAA, "X"), + (0x10EAB, "V"), + (0x10EAE, "X"), + (0x10EB0, "V"), + (0x10EB2, "X"), + (0x10EFD, "V"), + (0x10F28, "X"), + (0x10F30, "V"), + (0x10F5A, "X"), + (0x10F70, "V"), + (0x10F8A, "X"), + (0x10FB0, "V"), + (0x10FCC, "X"), + (0x10FE0, "V"), + (0x10FF7, "X"), + (0x11000, "V"), + (0x1104E, "X"), + (0x11052, "V"), + (0x11076, "X"), + (0x1107F, "V"), + (0x110BD, "X"), + (0x110BE, "V"), + (0x110C3, "X"), + (0x110D0, "V"), + (0x110E9, "X"), + (0x110F0, "V"), + ] + + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110FA, "X"), + (0x11100, "V"), + (0x11135, "X"), + (0x11136, "V"), + (0x11148, "X"), + (0x11150, "V"), + (0x11177, "X"), + (0x11180, "V"), + (0x111E0, "X"), + (0x111E1, "V"), + (0x111F5, "X"), + (0x11200, "V"), + (0x11212, "X"), + (0x11213, "V"), + (0x11242, "X"), + (0x11280, "V"), + (0x11287, "X"), + (0x11288, "V"), + (0x11289, "X"), + (0x1128A, "V"), + (0x1128E, "X"), + (0x1128F, "V"), + (0x1129E, "X"), + (0x1129F, "V"), + (0x112AA, "X"), + (0x112B0, "V"), + (0x112EB, "X"), + (0x112F0, "V"), + (0x112FA, "X"), + (0x11300, "V"), + (0x11304, "X"), + (0x11305, "V"), + (0x1130D, "X"), + (0x1130F, "V"), + (0x11311, "X"), + (0x11313, "V"), + (0x11329, "X"), + (0x1132A, "V"), + (0x11331, "X"), + (0x11332, "V"), + (0x11334, "X"), + (0x11335, "V"), + (0x1133A, "X"), + (0x1133B, "V"), + (0x11345, "X"), + (0x11347, "V"), + (0x11349, "X"), + (0x1134B, "V"), + (0x1134E, "X"), + (0x11350, "V"), + (0x11351, "X"), + (0x11357, "V"), + (0x11358, "X"), + (0x1135D, "V"), + (0x11364, "X"), + (0x11366, "V"), + (0x1136D, "X"), + (0x11370, "V"), + (0x11375, "X"), + (0x11400, "V"), + (0x1145C, "X"), + (0x1145D, "V"), + (0x11462, "X"), + (0x11480, "V"), + (0x114C8, "X"), + (0x114D0, "V"), + (0x114DA, "X"), + (0x11580, "V"), + (0x115B6, "X"), + (0x115B8, "V"), + (0x115DE, "X"), + (0x11600, "V"), + (0x11645, "X"), + (0x11650, "V"), + (0x1165A, "X"), + (0x11660, "V"), + (0x1166D, "X"), + (0x11680, "V"), + (0x116BA, "X"), + (0x116C0, "V"), + (0x116CA, "X"), + (0x11700, "V"), + (0x1171B, "X"), + (0x1171D, "V"), + (0x1172C, "X"), + (0x11730, "V"), + (0x11747, "X"), + (0x11800, "V"), + (0x1183C, "X"), + (0x118A0, "M", "𑣀"), + (0x118A1, "M", "𑣁"), + (0x118A2, "M", "𑣂"), + (0x118A3, "M", "𑣃"), + (0x118A4, "M", "𑣄"), + (0x118A5, "M", "𑣅"), + (0x118A6, "M", "𑣆"), + (0x118A7, "M", "𑣇"), + (0x118A8, "M", "𑣈"), + (0x118A9, "M", "𑣉"), + (0x118AA, "M", "𑣊"), + ] + + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118AB, "M", "𑣋"), + (0x118AC, "M", "𑣌"), + (0x118AD, "M", "𑣍"), + (0x118AE, "M", "𑣎"), + (0x118AF, "M", "𑣏"), + (0x118B0, "M", "𑣐"), + (0x118B1, "M", "𑣑"), + (0x118B2, "M", "𑣒"), + (0x118B3, "M", "𑣓"), + (0x118B4, "M", "𑣔"), + (0x118B5, "M", "𑣕"), + (0x118B6, "M", "𑣖"), + (0x118B7, "M", "𑣗"), + (0x118B8, "M", "𑣘"), + (0x118B9, "M", "𑣙"), + (0x118BA, "M", "𑣚"), + (0x118BB, "M", "𑣛"), + (0x118BC, "M", "𑣜"), + (0x118BD, "M", "𑣝"), + (0x118BE, "M", "𑣞"), + (0x118BF, "M", "𑣟"), + (0x118C0, "V"), + (0x118F3, "X"), + (0x118FF, "V"), + (0x11907, "X"), + (0x11909, "V"), + (0x1190A, "X"), + (0x1190C, "V"), + (0x11914, "X"), + (0x11915, "V"), + (0x11917, "X"), + (0x11918, "V"), + (0x11936, "X"), + (0x11937, "V"), + (0x11939, "X"), + (0x1193B, "V"), + (0x11947, "X"), + (0x11950, "V"), + (0x1195A, "X"), + (0x119A0, "V"), + (0x119A8, "X"), + (0x119AA, "V"), + (0x119D8, "X"), + (0x119DA, "V"), + (0x119E5, "X"), + (0x11A00, "V"), + (0x11A48, "X"), + (0x11A50, "V"), + (0x11AA3, "X"), + (0x11AB0, "V"), + (0x11AF9, "X"), + (0x11B00, "V"), + (0x11B0A, "X"), + (0x11C00, "V"), + (0x11C09, "X"), + (0x11C0A, "V"), + (0x11C37, "X"), + (0x11C38, "V"), + (0x11C46, "X"), + (0x11C50, "V"), + (0x11C6D, "X"), + (0x11C70, "V"), + (0x11C90, "X"), + (0x11C92, "V"), + (0x11CA8, "X"), + (0x11CA9, "V"), + (0x11CB7, "X"), + (0x11D00, "V"), + (0x11D07, "X"), + (0x11D08, "V"), + (0x11D0A, "X"), + (0x11D0B, "V"), + (0x11D37, "X"), + (0x11D3A, "V"), + (0x11D3B, "X"), + (0x11D3C, "V"), + (0x11D3E, "X"), + (0x11D3F, "V"), + (0x11D48, "X"), + (0x11D50, "V"), + (0x11D5A, "X"), + (0x11D60, "V"), + (0x11D66, "X"), + (0x11D67, "V"), + (0x11D69, "X"), + (0x11D6A, "V"), + (0x11D8F, "X"), + (0x11D90, "V"), + (0x11D92, "X"), + (0x11D93, "V"), + (0x11D99, "X"), + (0x11DA0, "V"), + (0x11DAA, "X"), + (0x11EE0, "V"), + (0x11EF9, "X"), + (0x11F00, "V"), + (0x11F11, "X"), + (0x11F12, "V"), + (0x11F3B, "X"), + (0x11F3E, "V"), + ] + + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11F5A, "X"), + (0x11FB0, "V"), + (0x11FB1, "X"), + (0x11FC0, "V"), + (0x11FF2, "X"), + (0x11FFF, "V"), + (0x1239A, "X"), + (0x12400, "V"), + (0x1246F, "X"), + (0x12470, "V"), + (0x12475, "X"), + (0x12480, "V"), + (0x12544, "X"), + (0x12F90, "V"), + (0x12FF3, "X"), + (0x13000, "V"), + (0x13430, "X"), + (0x13440, "V"), + (0x13456, "X"), + (0x14400, "V"), + (0x14647, "X"), + (0x16800, "V"), + (0x16A39, "X"), + (0x16A40, "V"), + (0x16A5F, "X"), + (0x16A60, "V"), + (0x16A6A, "X"), + (0x16A6E, "V"), + (0x16ABF, "X"), + (0x16AC0, "V"), + (0x16ACA, "X"), + (0x16AD0, "V"), + (0x16AEE, "X"), + (0x16AF0, "V"), + (0x16AF6, "X"), + (0x16B00, "V"), + (0x16B46, "X"), + (0x16B50, "V"), + (0x16B5A, "X"), + (0x16B5B, "V"), + (0x16B62, "X"), + (0x16B63, "V"), + (0x16B78, "X"), + (0x16B7D, "V"), + (0x16B90, "X"), + (0x16E40, "M", "𖹠"), + (0x16E41, "M", "𖹡"), + (0x16E42, "M", "𖹢"), + (0x16E43, "M", "𖹣"), + (0x16E44, "M", "𖹤"), + (0x16E45, "M", "𖹥"), + (0x16E46, "M", "𖹦"), + (0x16E47, "M", "𖹧"), + (0x16E48, "M", "𖹨"), + (0x16E49, "M", "𖹩"), + (0x16E4A, "M", "𖹪"), + (0x16E4B, "M", "𖹫"), + (0x16E4C, "M", "𖹬"), + (0x16E4D, "M", "𖹭"), + (0x16E4E, "M", "𖹮"), + (0x16E4F, "M", "𖹯"), + (0x16E50, "M", "𖹰"), + (0x16E51, "M", "𖹱"), + (0x16E52, "M", "𖹲"), + (0x16E53, "M", "𖹳"), + (0x16E54, "M", "𖹴"), + (0x16E55, "M", "𖹵"), + (0x16E56, "M", "𖹶"), + (0x16E57, "M", "𖹷"), + (0x16E58, "M", "𖹸"), + (0x16E59, "M", "𖹹"), + (0x16E5A, "M", "𖹺"), + (0x16E5B, "M", "𖹻"), + (0x16E5C, "M", "𖹼"), + (0x16E5D, "M", "𖹽"), + (0x16E5E, "M", "𖹾"), + (0x16E5F, "M", "𖹿"), + (0x16E60, "V"), + (0x16E9B, "X"), + (0x16F00, "V"), + (0x16F4B, "X"), + (0x16F4F, "V"), + (0x16F88, "X"), + (0x16F8F, "V"), + (0x16FA0, "X"), + (0x16FE0, "V"), + (0x16FE5, "X"), + (0x16FF0, "V"), + (0x16FF2, "X"), + (0x17000, "V"), + (0x187F8, "X"), + (0x18800, "V"), + (0x18CD6, "X"), + (0x18D00, "V"), + (0x18D09, "X"), + (0x1AFF0, "V"), + (0x1AFF4, "X"), + (0x1AFF5, "V"), + (0x1AFFC, "X"), + (0x1AFFD, "V"), + ] + + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFFF, "X"), + (0x1B000, "V"), + (0x1B123, "X"), + (0x1B132, "V"), + (0x1B133, "X"), + (0x1B150, "V"), + (0x1B153, "X"), + (0x1B155, "V"), + (0x1B156, "X"), + (0x1B164, "V"), + (0x1B168, "X"), + (0x1B170, "V"), + (0x1B2FC, "X"), + (0x1BC00, "V"), + (0x1BC6B, "X"), + (0x1BC70, "V"), + (0x1BC7D, "X"), + (0x1BC80, "V"), + (0x1BC89, "X"), + (0x1BC90, "V"), + (0x1BC9A, "X"), + (0x1BC9C, "V"), + (0x1BCA0, "I"), + (0x1BCA4, "X"), + (0x1CF00, "V"), + (0x1CF2E, "X"), + (0x1CF30, "V"), + (0x1CF47, "X"), + (0x1CF50, "V"), + (0x1CFC4, "X"), + (0x1D000, "V"), + (0x1D0F6, "X"), + (0x1D100, "V"), + (0x1D127, "X"), + (0x1D129, "V"), + (0x1D15E, "M", "𝅗𝅥"), + (0x1D15F, "M", "𝅘𝅥"), + (0x1D160, "M", "𝅘𝅥𝅮"), + (0x1D161, "M", "𝅘𝅥𝅯"), + (0x1D162, "M", "𝅘𝅥𝅰"), + (0x1D163, "M", "𝅘𝅥𝅱"), + (0x1D164, "M", "𝅘𝅥𝅲"), + (0x1D165, "V"), + (0x1D173, "X"), + (0x1D17B, "V"), + (0x1D1BB, "M", "𝆹𝅥"), + (0x1D1BC, "M", "𝆺𝅥"), + (0x1D1BD, "M", "𝆹𝅥𝅮"), + (0x1D1BE, "M", "𝆺𝅥𝅮"), + (0x1D1BF, "M", "𝆹𝅥𝅯"), + (0x1D1C0, "M", "𝆺𝅥𝅯"), + (0x1D1C1, "V"), + (0x1D1EB, "X"), + (0x1D200, "V"), + (0x1D246, "X"), + (0x1D2C0, "V"), + (0x1D2D4, "X"), + (0x1D2E0, "V"), + (0x1D2F4, "X"), + (0x1D300, "V"), + (0x1D357, "X"), + (0x1D360, "V"), + (0x1D379, "X"), + (0x1D400, "M", "a"), + (0x1D401, "M", "b"), + (0x1D402, "M", "c"), + (0x1D403, "M", "d"), + (0x1D404, "M", "e"), + (0x1D405, "M", "f"), + (0x1D406, "M", "g"), + (0x1D407, "M", "h"), + (0x1D408, "M", "i"), + (0x1D409, "M", "j"), + (0x1D40A, "M", "k"), + (0x1D40B, "M", "l"), + (0x1D40C, "M", "m"), + (0x1D40D, "M", "n"), + (0x1D40E, "M", "o"), + (0x1D40F, "M", "p"), + (0x1D410, "M", "q"), + (0x1D411, "M", "r"), + (0x1D412, "M", "s"), + (0x1D413, "M", "t"), + (0x1D414, "M", "u"), + (0x1D415, "M", "v"), + (0x1D416, "M", "w"), + (0x1D417, "M", "x"), + (0x1D418, "M", "y"), + (0x1D419, "M", "z"), + (0x1D41A, "M", "a"), + (0x1D41B, "M", "b"), + (0x1D41C, "M", "c"), + (0x1D41D, "M", "d"), + (0x1D41E, "M", "e"), + (0x1D41F, "M", "f"), + (0x1D420, "M", "g"), + (0x1D421, "M", "h"), + (0x1D422, "M", "i"), + (0x1D423, "M", "j"), + (0x1D424, "M", "k"), + ] + + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D425, "M", "l"), + (0x1D426, "M", "m"), + (0x1D427, "M", "n"), + (0x1D428, "M", "o"), + (0x1D429, "M", "p"), + (0x1D42A, "M", "q"), + (0x1D42B, "M", "r"), + (0x1D42C, "M", "s"), + (0x1D42D, "M", "t"), + (0x1D42E, "M", "u"), + (0x1D42F, "M", "v"), + (0x1D430, "M", "w"), + (0x1D431, "M", "x"), + (0x1D432, "M", "y"), + (0x1D433, "M", "z"), + (0x1D434, "M", "a"), + (0x1D435, "M", "b"), + (0x1D436, "M", "c"), + (0x1D437, "M", "d"), + (0x1D438, "M", "e"), + (0x1D439, "M", "f"), + (0x1D43A, "M", "g"), + (0x1D43B, "M", "h"), + (0x1D43C, "M", "i"), + (0x1D43D, "M", "j"), + (0x1D43E, "M", "k"), + (0x1D43F, "M", "l"), + (0x1D440, "M", "m"), + (0x1D441, "M", "n"), + (0x1D442, "M", "o"), + (0x1D443, "M", "p"), + (0x1D444, "M", "q"), + (0x1D445, "M", "r"), + (0x1D446, "M", "s"), + (0x1D447, "M", "t"), + (0x1D448, "M", "u"), + (0x1D449, "M", "v"), + (0x1D44A, "M", "w"), + (0x1D44B, "M", "x"), + (0x1D44C, "M", "y"), + (0x1D44D, "M", "z"), + (0x1D44E, "M", "a"), + (0x1D44F, "M", "b"), + (0x1D450, "M", "c"), + (0x1D451, "M", "d"), + (0x1D452, "M", "e"), + (0x1D453, "M", "f"), + (0x1D454, "M", "g"), + (0x1D455, "X"), + (0x1D456, "M", "i"), + (0x1D457, "M", "j"), + (0x1D458, "M", "k"), + (0x1D459, "M", "l"), + (0x1D45A, "M", "m"), + (0x1D45B, "M", "n"), + (0x1D45C, "M", "o"), + (0x1D45D, "M", "p"), + (0x1D45E, "M", "q"), + (0x1D45F, "M", "r"), + (0x1D460, "M", "s"), + (0x1D461, "M", "t"), + (0x1D462, "M", "u"), + (0x1D463, "M", "v"), + (0x1D464, "M", "w"), + (0x1D465, "M", "x"), + (0x1D466, "M", "y"), + (0x1D467, "M", "z"), + (0x1D468, "M", "a"), + (0x1D469, "M", "b"), + (0x1D46A, "M", "c"), + (0x1D46B, "M", "d"), + (0x1D46C, "M", "e"), + (0x1D46D, "M", "f"), + (0x1D46E, "M", "g"), + (0x1D46F, "M", "h"), + (0x1D470, "M", "i"), + (0x1D471, "M", "j"), + (0x1D472, "M", "k"), + (0x1D473, "M", "l"), + (0x1D474, "M", "m"), + (0x1D475, "M", "n"), + (0x1D476, "M", "o"), + (0x1D477, "M", "p"), + (0x1D478, "M", "q"), + (0x1D479, "M", "r"), + (0x1D47A, "M", "s"), + (0x1D47B, "M", "t"), + (0x1D47C, "M", "u"), + (0x1D47D, "M", "v"), + (0x1D47E, "M", "w"), + (0x1D47F, "M", "x"), + (0x1D480, "M", "y"), + (0x1D481, "M", "z"), + (0x1D482, "M", "a"), + (0x1D483, "M", "b"), + (0x1D484, "M", "c"), + (0x1D485, "M", "d"), + (0x1D486, "M", "e"), + (0x1D487, "M", "f"), + (0x1D488, "M", "g"), + ] + + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D489, "M", "h"), + (0x1D48A, "M", "i"), + (0x1D48B, "M", "j"), + (0x1D48C, "M", "k"), + (0x1D48D, "M", "l"), + (0x1D48E, "M", "m"), + (0x1D48F, "M", "n"), + (0x1D490, "M", "o"), + (0x1D491, "M", "p"), + (0x1D492, "M", "q"), + (0x1D493, "M", "r"), + (0x1D494, "M", "s"), + (0x1D495, "M", "t"), + (0x1D496, "M", "u"), + (0x1D497, "M", "v"), + (0x1D498, "M", "w"), + (0x1D499, "M", "x"), + (0x1D49A, "M", "y"), + (0x1D49B, "M", "z"), + (0x1D49C, "M", "a"), + (0x1D49D, "X"), + (0x1D49E, "M", "c"), + (0x1D49F, "M", "d"), + (0x1D4A0, "X"), + (0x1D4A2, "M", "g"), + (0x1D4A3, "X"), + (0x1D4A5, "M", "j"), + (0x1D4A6, "M", "k"), + (0x1D4A7, "X"), + (0x1D4A9, "M", "n"), + (0x1D4AA, "M", "o"), + (0x1D4AB, "M", "p"), + (0x1D4AC, "M", "q"), + (0x1D4AD, "X"), + (0x1D4AE, "M", "s"), + (0x1D4AF, "M", "t"), + (0x1D4B0, "M", "u"), + (0x1D4B1, "M", "v"), + (0x1D4B2, "M", "w"), + (0x1D4B3, "M", "x"), + (0x1D4B4, "M", "y"), + (0x1D4B5, "M", "z"), + (0x1D4B6, "M", "a"), + (0x1D4B7, "M", "b"), + (0x1D4B8, "M", "c"), + (0x1D4B9, "M", "d"), + (0x1D4BA, "X"), + (0x1D4BB, "M", "f"), + (0x1D4BC, "X"), + (0x1D4BD, "M", "h"), + (0x1D4BE, "M", "i"), + (0x1D4BF, "M", "j"), + (0x1D4C0, "M", "k"), + (0x1D4C1, "M", "l"), + (0x1D4C2, "M", "m"), + (0x1D4C3, "M", "n"), + (0x1D4C4, "X"), + (0x1D4C5, "M", "p"), + (0x1D4C6, "M", "q"), + (0x1D4C7, "M", "r"), + (0x1D4C8, "M", "s"), + (0x1D4C9, "M", "t"), + (0x1D4CA, "M", "u"), + (0x1D4CB, "M", "v"), + (0x1D4CC, "M", "w"), + (0x1D4CD, "M", "x"), + (0x1D4CE, "M", "y"), + (0x1D4CF, "M", "z"), + (0x1D4D0, "M", "a"), + (0x1D4D1, "M", "b"), + (0x1D4D2, "M", "c"), + (0x1D4D3, "M", "d"), + (0x1D4D4, "M", "e"), + (0x1D4D5, "M", "f"), + (0x1D4D6, "M", "g"), + (0x1D4D7, "M", "h"), + (0x1D4D8, "M", "i"), + (0x1D4D9, "M", "j"), + (0x1D4DA, "M", "k"), + (0x1D4DB, "M", "l"), + (0x1D4DC, "M", "m"), + (0x1D4DD, "M", "n"), + (0x1D4DE, "M", "o"), + (0x1D4DF, "M", "p"), + (0x1D4E0, "M", "q"), + (0x1D4E1, "M", "r"), + (0x1D4E2, "M", "s"), + (0x1D4E3, "M", "t"), + (0x1D4E4, "M", "u"), + (0x1D4E5, "M", "v"), + (0x1D4E6, "M", "w"), + (0x1D4E7, "M", "x"), + (0x1D4E8, "M", "y"), + (0x1D4E9, "M", "z"), + (0x1D4EA, "M", "a"), + (0x1D4EB, "M", "b"), + (0x1D4EC, "M", "c"), + (0x1D4ED, "M", "d"), + (0x1D4EE, "M", "e"), + (0x1D4EF, "M", "f"), + ] + + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4F0, "M", "g"), + (0x1D4F1, "M", "h"), + (0x1D4F2, "M", "i"), + (0x1D4F3, "M", "j"), + (0x1D4F4, "M", "k"), + (0x1D4F5, "M", "l"), + (0x1D4F6, "M", "m"), + (0x1D4F7, "M", "n"), + (0x1D4F8, "M", "o"), + (0x1D4F9, "M", "p"), + (0x1D4FA, "M", "q"), + (0x1D4FB, "M", "r"), + (0x1D4FC, "M", "s"), + (0x1D4FD, "M", "t"), + (0x1D4FE, "M", "u"), + (0x1D4FF, "M", "v"), + (0x1D500, "M", "w"), + (0x1D501, "M", "x"), + (0x1D502, "M", "y"), + (0x1D503, "M", "z"), + (0x1D504, "M", "a"), + (0x1D505, "M", "b"), + (0x1D506, "X"), + (0x1D507, "M", "d"), + (0x1D508, "M", "e"), + (0x1D509, "M", "f"), + (0x1D50A, "M", "g"), + (0x1D50B, "X"), + (0x1D50D, "M", "j"), + (0x1D50E, "M", "k"), + (0x1D50F, "M", "l"), + (0x1D510, "M", "m"), + (0x1D511, "M", "n"), + (0x1D512, "M", "o"), + (0x1D513, "M", "p"), + (0x1D514, "M", "q"), + (0x1D515, "X"), + (0x1D516, "M", "s"), + (0x1D517, "M", "t"), + (0x1D518, "M", "u"), + (0x1D519, "M", "v"), + (0x1D51A, "M", "w"), + (0x1D51B, "M", "x"), + (0x1D51C, "M", "y"), + (0x1D51D, "X"), + (0x1D51E, "M", "a"), + (0x1D51F, "M", "b"), + (0x1D520, "M", "c"), + (0x1D521, "M", "d"), + (0x1D522, "M", "e"), + (0x1D523, "M", "f"), + (0x1D524, "M", "g"), + (0x1D525, "M", "h"), + (0x1D526, "M", "i"), + (0x1D527, "M", "j"), + (0x1D528, "M", "k"), + (0x1D529, "M", "l"), + (0x1D52A, "M", "m"), + (0x1D52B, "M", "n"), + (0x1D52C, "M", "o"), + (0x1D52D, "M", "p"), + (0x1D52E, "M", "q"), + (0x1D52F, "M", "r"), + (0x1D530, "M", "s"), + (0x1D531, "M", "t"), + (0x1D532, "M", "u"), + (0x1D533, "M", "v"), + (0x1D534, "M", "w"), + (0x1D535, "M", "x"), + (0x1D536, "M", "y"), + (0x1D537, "M", "z"), + (0x1D538, "M", "a"), + (0x1D539, "M", "b"), + (0x1D53A, "X"), + (0x1D53B, "M", "d"), + (0x1D53C, "M", "e"), + (0x1D53D, "M", "f"), + (0x1D53E, "M", "g"), + (0x1D53F, "X"), + (0x1D540, "M", "i"), + (0x1D541, "M", "j"), + (0x1D542, "M", "k"), + (0x1D543, "M", "l"), + (0x1D544, "M", "m"), + (0x1D545, "X"), + (0x1D546, "M", "o"), + (0x1D547, "X"), + (0x1D54A, "M", "s"), + (0x1D54B, "M", "t"), + (0x1D54C, "M", "u"), + (0x1D54D, "M", "v"), + (0x1D54E, "M", "w"), + (0x1D54F, "M", "x"), + (0x1D550, "M", "y"), + (0x1D551, "X"), + (0x1D552, "M", "a"), + (0x1D553, "M", "b"), + (0x1D554, "M", "c"), + (0x1D555, "M", "d"), + (0x1D556, "M", "e"), + ] + + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D557, "M", "f"), + (0x1D558, "M", "g"), + (0x1D559, "M", "h"), + (0x1D55A, "M", "i"), + (0x1D55B, "M", "j"), + (0x1D55C, "M", "k"), + (0x1D55D, "M", "l"), + (0x1D55E, "M", "m"), + (0x1D55F, "M", "n"), + (0x1D560, "M", "o"), + (0x1D561, "M", "p"), + (0x1D562, "M", "q"), + (0x1D563, "M", "r"), + (0x1D564, "M", "s"), + (0x1D565, "M", "t"), + (0x1D566, "M", "u"), + (0x1D567, "M", "v"), + (0x1D568, "M", "w"), + (0x1D569, "M", "x"), + (0x1D56A, "M", "y"), + (0x1D56B, "M", "z"), + (0x1D56C, "M", "a"), + (0x1D56D, "M", "b"), + (0x1D56E, "M", "c"), + (0x1D56F, "M", "d"), + (0x1D570, "M", "e"), + (0x1D571, "M", "f"), + (0x1D572, "M", "g"), + (0x1D573, "M", "h"), + (0x1D574, "M", "i"), + (0x1D575, "M", "j"), + (0x1D576, "M", "k"), + (0x1D577, "M", "l"), + (0x1D578, "M", "m"), + (0x1D579, "M", "n"), + (0x1D57A, "M", "o"), + (0x1D57B, "M", "p"), + (0x1D57C, "M", "q"), + (0x1D57D, "M", "r"), + (0x1D57E, "M", "s"), + (0x1D57F, "M", "t"), + (0x1D580, "M", "u"), + (0x1D581, "M", "v"), + (0x1D582, "M", "w"), + (0x1D583, "M", "x"), + (0x1D584, "M", "y"), + (0x1D585, "M", "z"), + (0x1D586, "M", "a"), + (0x1D587, "M", "b"), + (0x1D588, "M", "c"), + (0x1D589, "M", "d"), + (0x1D58A, "M", "e"), + (0x1D58B, "M", "f"), + (0x1D58C, "M", "g"), + (0x1D58D, "M", "h"), + (0x1D58E, "M", "i"), + (0x1D58F, "M", "j"), + (0x1D590, "M", "k"), + (0x1D591, "M", "l"), + (0x1D592, "M", "m"), + (0x1D593, "M", "n"), + (0x1D594, "M", "o"), + (0x1D595, "M", "p"), + (0x1D596, "M", "q"), + (0x1D597, "M", "r"), + (0x1D598, "M", "s"), + (0x1D599, "M", "t"), + (0x1D59A, "M", "u"), + (0x1D59B, "M", "v"), + (0x1D59C, "M", "w"), + (0x1D59D, "M", "x"), + (0x1D59E, "M", "y"), + (0x1D59F, "M", "z"), + (0x1D5A0, "M", "a"), + (0x1D5A1, "M", "b"), + (0x1D5A2, "M", "c"), + (0x1D5A3, "M", "d"), + (0x1D5A4, "M", "e"), + (0x1D5A5, "M", "f"), + (0x1D5A6, "M", "g"), + (0x1D5A7, "M", "h"), + (0x1D5A8, "M", "i"), + (0x1D5A9, "M", "j"), + (0x1D5AA, "M", "k"), + (0x1D5AB, "M", "l"), + (0x1D5AC, "M", "m"), + (0x1D5AD, "M", "n"), + (0x1D5AE, "M", "o"), + (0x1D5AF, "M", "p"), + (0x1D5B0, "M", "q"), + (0x1D5B1, "M", "r"), + (0x1D5B2, "M", "s"), + (0x1D5B3, "M", "t"), + (0x1D5B4, "M", "u"), + (0x1D5B5, "M", "v"), + (0x1D5B6, "M", "w"), + (0x1D5B7, "M", "x"), + (0x1D5B8, "M", "y"), + (0x1D5B9, "M", "z"), + (0x1D5BA, "M", "a"), + ] + + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5BB, "M", "b"), + (0x1D5BC, "M", "c"), + (0x1D5BD, "M", "d"), + (0x1D5BE, "M", "e"), + (0x1D5BF, "M", "f"), + (0x1D5C0, "M", "g"), + (0x1D5C1, "M", "h"), + (0x1D5C2, "M", "i"), + (0x1D5C3, "M", "j"), + (0x1D5C4, "M", "k"), + (0x1D5C5, "M", "l"), + (0x1D5C6, "M", "m"), + (0x1D5C7, "M", "n"), + (0x1D5C8, "M", "o"), + (0x1D5C9, "M", "p"), + (0x1D5CA, "M", "q"), + (0x1D5CB, "M", "r"), + (0x1D5CC, "M", "s"), + (0x1D5CD, "M", "t"), + (0x1D5CE, "M", "u"), + (0x1D5CF, "M", "v"), + (0x1D5D0, "M", "w"), + (0x1D5D1, "M", "x"), + (0x1D5D2, "M", "y"), + (0x1D5D3, "M", "z"), + (0x1D5D4, "M", "a"), + (0x1D5D5, "M", "b"), + (0x1D5D6, "M", "c"), + (0x1D5D7, "M", "d"), + (0x1D5D8, "M", "e"), + (0x1D5D9, "M", "f"), + (0x1D5DA, "M", "g"), + (0x1D5DB, "M", "h"), + (0x1D5DC, "M", "i"), + (0x1D5DD, "M", "j"), + (0x1D5DE, "M", "k"), + (0x1D5DF, "M", "l"), + (0x1D5E0, "M", "m"), + (0x1D5E1, "M", "n"), + (0x1D5E2, "M", "o"), + (0x1D5E3, "M", "p"), + (0x1D5E4, "M", "q"), + (0x1D5E5, "M", "r"), + (0x1D5E6, "M", "s"), + (0x1D5E7, "M", "t"), + (0x1D5E8, "M", "u"), + (0x1D5E9, "M", "v"), + (0x1D5EA, "M", "w"), + (0x1D5EB, "M", "x"), + (0x1D5EC, "M", "y"), + (0x1D5ED, "M", "z"), + (0x1D5EE, "M", "a"), + (0x1D5EF, "M", "b"), + (0x1D5F0, "M", "c"), + (0x1D5F1, "M", "d"), + (0x1D5F2, "M", "e"), + (0x1D5F3, "M", "f"), + (0x1D5F4, "M", "g"), + (0x1D5F5, "M", "h"), + (0x1D5F6, "M", "i"), + (0x1D5F7, "M", "j"), + (0x1D5F8, "M", "k"), + (0x1D5F9, "M", "l"), + (0x1D5FA, "M", "m"), + (0x1D5FB, "M", "n"), + (0x1D5FC, "M", "o"), + (0x1D5FD, "M", "p"), + (0x1D5FE, "M", "q"), + (0x1D5FF, "M", "r"), + (0x1D600, "M", "s"), + (0x1D601, "M", "t"), + (0x1D602, "M", "u"), + (0x1D603, "M", "v"), + (0x1D604, "M", "w"), + (0x1D605, "M", "x"), + (0x1D606, "M", "y"), + (0x1D607, "M", "z"), + (0x1D608, "M", "a"), + (0x1D609, "M", "b"), + (0x1D60A, "M", "c"), + (0x1D60B, "M", "d"), + (0x1D60C, "M", "e"), + (0x1D60D, "M", "f"), + (0x1D60E, "M", "g"), + (0x1D60F, "M", "h"), + (0x1D610, "M", "i"), + (0x1D611, "M", "j"), + (0x1D612, "M", "k"), + (0x1D613, "M", "l"), + (0x1D614, "M", "m"), + (0x1D615, "M", "n"), + (0x1D616, "M", "o"), + (0x1D617, "M", "p"), + (0x1D618, "M", "q"), + (0x1D619, "M", "r"), + (0x1D61A, "M", "s"), + (0x1D61B, "M", "t"), + (0x1D61C, "M", "u"), + (0x1D61D, "M", "v"), + (0x1D61E, "M", "w"), + ] + + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D61F, "M", "x"), + (0x1D620, "M", "y"), + (0x1D621, "M", "z"), + (0x1D622, "M", "a"), + (0x1D623, "M", "b"), + (0x1D624, "M", "c"), + (0x1D625, "M", "d"), + (0x1D626, "M", "e"), + (0x1D627, "M", "f"), + (0x1D628, "M", "g"), + (0x1D629, "M", "h"), + (0x1D62A, "M", "i"), + (0x1D62B, "M", "j"), + (0x1D62C, "M", "k"), + (0x1D62D, "M", "l"), + (0x1D62E, "M", "m"), + (0x1D62F, "M", "n"), + (0x1D630, "M", "o"), + (0x1D631, "M", "p"), + (0x1D632, "M", "q"), + (0x1D633, "M", "r"), + (0x1D634, "M", "s"), + (0x1D635, "M", "t"), + (0x1D636, "M", "u"), + (0x1D637, "M", "v"), + (0x1D638, "M", "w"), + (0x1D639, "M", "x"), + (0x1D63A, "M", "y"), + (0x1D63B, "M", "z"), + (0x1D63C, "M", "a"), + (0x1D63D, "M", "b"), + (0x1D63E, "M", "c"), + (0x1D63F, "M", "d"), + (0x1D640, "M", "e"), + (0x1D641, "M", "f"), + (0x1D642, "M", "g"), + (0x1D643, "M", "h"), + (0x1D644, "M", "i"), + (0x1D645, "M", "j"), + (0x1D646, "M", "k"), + (0x1D647, "M", "l"), + (0x1D648, "M", "m"), + (0x1D649, "M", "n"), + (0x1D64A, "M", "o"), + (0x1D64B, "M", "p"), + (0x1D64C, "M", "q"), + (0x1D64D, "M", "r"), + (0x1D64E, "M", "s"), + (0x1D64F, "M", "t"), + (0x1D650, "M", "u"), + (0x1D651, "M", "v"), + (0x1D652, "M", "w"), + (0x1D653, "M", "x"), + (0x1D654, "M", "y"), + (0x1D655, "M", "z"), + (0x1D656, "M", "a"), + (0x1D657, "M", "b"), + (0x1D658, "M", "c"), + (0x1D659, "M", "d"), + (0x1D65A, "M", "e"), + (0x1D65B, "M", "f"), + (0x1D65C, "M", "g"), + (0x1D65D, "M", "h"), + (0x1D65E, "M", "i"), + (0x1D65F, "M", "j"), + (0x1D660, "M", "k"), + (0x1D661, "M", "l"), + (0x1D662, "M", "m"), + (0x1D663, "M", "n"), + (0x1D664, "M", "o"), + (0x1D665, "M", "p"), + (0x1D666, "M", "q"), + (0x1D667, "M", "r"), + (0x1D668, "M", "s"), + (0x1D669, "M", "t"), + (0x1D66A, "M", "u"), + (0x1D66B, "M", "v"), + (0x1D66C, "M", "w"), + (0x1D66D, "M", "x"), + (0x1D66E, "M", "y"), + (0x1D66F, "M", "z"), + (0x1D670, "M", "a"), + (0x1D671, "M", "b"), + (0x1D672, "M", "c"), + (0x1D673, "M", "d"), + (0x1D674, "M", "e"), + (0x1D675, "M", "f"), + (0x1D676, "M", "g"), + (0x1D677, "M", "h"), + (0x1D678, "M", "i"), + (0x1D679, "M", "j"), + (0x1D67A, "M", "k"), + (0x1D67B, "M", "l"), + (0x1D67C, "M", "m"), + (0x1D67D, "M", "n"), + (0x1D67E, "M", "o"), + (0x1D67F, "M", "p"), + (0x1D680, "M", "q"), + (0x1D681, "M", "r"), + (0x1D682, "M", "s"), + ] + + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D683, "M", "t"), + (0x1D684, "M", "u"), + (0x1D685, "M", "v"), + (0x1D686, "M", "w"), + (0x1D687, "M", "x"), + (0x1D688, "M", "y"), + (0x1D689, "M", "z"), + (0x1D68A, "M", "a"), + (0x1D68B, "M", "b"), + (0x1D68C, "M", "c"), + (0x1D68D, "M", "d"), + (0x1D68E, "M", "e"), + (0x1D68F, "M", "f"), + (0x1D690, "M", "g"), + (0x1D691, "M", "h"), + (0x1D692, "M", "i"), + (0x1D693, "M", "j"), + (0x1D694, "M", "k"), + (0x1D695, "M", "l"), + (0x1D696, "M", "m"), + (0x1D697, "M", "n"), + (0x1D698, "M", "o"), + (0x1D699, "M", "p"), + (0x1D69A, "M", "q"), + (0x1D69B, "M", "r"), + (0x1D69C, "M", "s"), + (0x1D69D, "M", "t"), + (0x1D69E, "M", "u"), + (0x1D69F, "M", "v"), + (0x1D6A0, "M", "w"), + (0x1D6A1, "M", "x"), + (0x1D6A2, "M", "y"), + (0x1D6A3, "M", "z"), + (0x1D6A4, "M", "ı"), + (0x1D6A5, "M", "ȷ"), + (0x1D6A6, "X"), + (0x1D6A8, "M", "α"), + (0x1D6A9, "M", "β"), + (0x1D6AA, "M", "γ"), + (0x1D6AB, "M", "δ"), + (0x1D6AC, "M", "ε"), + (0x1D6AD, "M", "ζ"), + (0x1D6AE, "M", "η"), + (0x1D6AF, "M", "θ"), + (0x1D6B0, "M", "ι"), + (0x1D6B1, "M", "κ"), + (0x1D6B2, "M", "λ"), + (0x1D6B3, "M", "μ"), + (0x1D6B4, "M", "ν"), + (0x1D6B5, "M", "ξ"), + (0x1D6B6, "M", "ο"), + (0x1D6B7, "M", "π"), + (0x1D6B8, "M", "ρ"), + (0x1D6B9, "M", "θ"), + (0x1D6BA, "M", "σ"), + (0x1D6BB, "M", "τ"), + (0x1D6BC, "M", "υ"), + (0x1D6BD, "M", "φ"), + (0x1D6BE, "M", "χ"), + (0x1D6BF, "M", "ψ"), + (0x1D6C0, "M", "ω"), + (0x1D6C1, "M", "∇"), + (0x1D6C2, "M", "α"), + (0x1D6C3, "M", "β"), + (0x1D6C4, "M", "γ"), + (0x1D6C5, "M", "δ"), + (0x1D6C6, "M", "ε"), + (0x1D6C7, "M", "ζ"), + (0x1D6C8, "M", "η"), + (0x1D6C9, "M", "θ"), + (0x1D6CA, "M", "ι"), + (0x1D6CB, "M", "κ"), + (0x1D6CC, "M", "λ"), + (0x1D6CD, "M", "μ"), + (0x1D6CE, "M", "ν"), + (0x1D6CF, "M", "ξ"), + (0x1D6D0, "M", "ο"), + (0x1D6D1, "M", "π"), + (0x1D6D2, "M", "ρ"), + (0x1D6D3, "M", "σ"), + (0x1D6D5, "M", "τ"), + (0x1D6D6, "M", "υ"), + (0x1D6D7, "M", "φ"), + (0x1D6D8, "M", "χ"), + (0x1D6D9, "M", "ψ"), + (0x1D6DA, "M", "ω"), + (0x1D6DB, "M", "∂"), + (0x1D6DC, "M", "ε"), + (0x1D6DD, "M", "θ"), + (0x1D6DE, "M", "κ"), + (0x1D6DF, "M", "φ"), + (0x1D6E0, "M", "ρ"), + (0x1D6E1, "M", "π"), + (0x1D6E2, "M", "α"), + (0x1D6E3, "M", "β"), + (0x1D6E4, "M", "γ"), + (0x1D6E5, "M", "δ"), + (0x1D6E6, "M", "ε"), + (0x1D6E7, "M", "ζ"), + (0x1D6E8, "M", "η"), + ] + + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6E9, "M", "θ"), + (0x1D6EA, "M", "ι"), + (0x1D6EB, "M", "κ"), + (0x1D6EC, "M", "λ"), + (0x1D6ED, "M", "μ"), + (0x1D6EE, "M", "ν"), + (0x1D6EF, "M", "ξ"), + (0x1D6F0, "M", "ο"), + (0x1D6F1, "M", "π"), + (0x1D6F2, "M", "ρ"), + (0x1D6F3, "M", "θ"), + (0x1D6F4, "M", "σ"), + (0x1D6F5, "M", "τ"), + (0x1D6F6, "M", "υ"), + (0x1D6F7, "M", "φ"), + (0x1D6F8, "M", "χ"), + (0x1D6F9, "M", "ψ"), + (0x1D6FA, "M", "ω"), + (0x1D6FB, "M", "∇"), + (0x1D6FC, "M", "α"), + (0x1D6FD, "M", "β"), + (0x1D6FE, "M", "γ"), + (0x1D6FF, "M", "δ"), + (0x1D700, "M", "ε"), + (0x1D701, "M", "ζ"), + (0x1D702, "M", "η"), + (0x1D703, "M", "θ"), + (0x1D704, "M", "ι"), + (0x1D705, "M", "κ"), + (0x1D706, "M", "λ"), + (0x1D707, "M", "μ"), + (0x1D708, "M", "ν"), + (0x1D709, "M", "ξ"), + (0x1D70A, "M", "ο"), + (0x1D70B, "M", "π"), + (0x1D70C, "M", "ρ"), + (0x1D70D, "M", "σ"), + (0x1D70F, "M", "τ"), + (0x1D710, "M", "υ"), + (0x1D711, "M", "φ"), + (0x1D712, "M", "χ"), + (0x1D713, "M", "ψ"), + (0x1D714, "M", "ω"), + (0x1D715, "M", "∂"), + (0x1D716, "M", "ε"), + (0x1D717, "M", "θ"), + (0x1D718, "M", "κ"), + (0x1D719, "M", "φ"), + (0x1D71A, "M", "ρ"), + (0x1D71B, "M", "π"), + (0x1D71C, "M", "α"), + (0x1D71D, "M", "β"), + (0x1D71E, "M", "γ"), + (0x1D71F, "M", "δ"), + (0x1D720, "M", "ε"), + (0x1D721, "M", "ζ"), + (0x1D722, "M", "η"), + (0x1D723, "M", "θ"), + (0x1D724, "M", "ι"), + (0x1D725, "M", "κ"), + (0x1D726, "M", "λ"), + (0x1D727, "M", "μ"), + (0x1D728, "M", "ν"), + (0x1D729, "M", "ξ"), + (0x1D72A, "M", "ο"), + (0x1D72B, "M", "π"), + (0x1D72C, "M", "ρ"), + (0x1D72D, "M", "θ"), + (0x1D72E, "M", "σ"), + (0x1D72F, "M", "τ"), + (0x1D730, "M", "υ"), + (0x1D731, "M", "φ"), + (0x1D732, "M", "χ"), + (0x1D733, "M", "ψ"), + (0x1D734, "M", "ω"), + (0x1D735, "M", "∇"), + (0x1D736, "M", "α"), + (0x1D737, "M", "β"), + (0x1D738, "M", "γ"), + (0x1D739, "M", "δ"), + (0x1D73A, "M", "ε"), + (0x1D73B, "M", "ζ"), + (0x1D73C, "M", "η"), + (0x1D73D, "M", "θ"), + (0x1D73E, "M", "ι"), + (0x1D73F, "M", "κ"), + (0x1D740, "M", "λ"), + (0x1D741, "M", "μ"), + (0x1D742, "M", "ν"), + (0x1D743, "M", "ξ"), + (0x1D744, "M", "ο"), + (0x1D745, "M", "π"), + (0x1D746, "M", "ρ"), + (0x1D747, "M", "σ"), + (0x1D749, "M", "τ"), + (0x1D74A, "M", "υ"), + (0x1D74B, "M", "φ"), + (0x1D74C, "M", "χ"), + (0x1D74D, "M", "ψ"), + (0x1D74E, "M", "ω"), + ] + + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D74F, "M", "∂"), + (0x1D750, "M", "ε"), + (0x1D751, "M", "θ"), + (0x1D752, "M", "κ"), + (0x1D753, "M", "φ"), + (0x1D754, "M", "ρ"), + (0x1D755, "M", "π"), + (0x1D756, "M", "α"), + (0x1D757, "M", "β"), + (0x1D758, "M", "γ"), + (0x1D759, "M", "δ"), + (0x1D75A, "M", "ε"), + (0x1D75B, "M", "ζ"), + (0x1D75C, "M", "η"), + (0x1D75D, "M", "θ"), + (0x1D75E, "M", "ι"), + (0x1D75F, "M", "κ"), + (0x1D760, "M", "λ"), + (0x1D761, "M", "μ"), + (0x1D762, "M", "ν"), + (0x1D763, "M", "ξ"), + (0x1D764, "M", "ο"), + (0x1D765, "M", "π"), + (0x1D766, "M", "ρ"), + (0x1D767, "M", "θ"), + (0x1D768, "M", "σ"), + (0x1D769, "M", "τ"), + (0x1D76A, "M", "υ"), + (0x1D76B, "M", "φ"), + (0x1D76C, "M", "χ"), + (0x1D76D, "M", "ψ"), + (0x1D76E, "M", "ω"), + (0x1D76F, "M", "∇"), + (0x1D770, "M", "α"), + (0x1D771, "M", "β"), + (0x1D772, "M", "γ"), + (0x1D773, "M", "δ"), + (0x1D774, "M", "ε"), + (0x1D775, "M", "ζ"), + (0x1D776, "M", "η"), + (0x1D777, "M", "θ"), + (0x1D778, "M", "ι"), + (0x1D779, "M", "κ"), + (0x1D77A, "M", "λ"), + (0x1D77B, "M", "μ"), + (0x1D77C, "M", "ν"), + (0x1D77D, "M", "ξ"), + (0x1D77E, "M", "ο"), + (0x1D77F, "M", "π"), + (0x1D780, "M", "ρ"), + (0x1D781, "M", "σ"), + (0x1D783, "M", "τ"), + (0x1D784, "M", "υ"), + (0x1D785, "M", "φ"), + (0x1D786, "M", "χ"), + (0x1D787, "M", "ψ"), + (0x1D788, "M", "ω"), + (0x1D789, "M", "∂"), + (0x1D78A, "M", "ε"), + (0x1D78B, "M", "θ"), + (0x1D78C, "M", "κ"), + (0x1D78D, "M", "φ"), + (0x1D78E, "M", "ρ"), + (0x1D78F, "M", "π"), + (0x1D790, "M", "α"), + (0x1D791, "M", "β"), + (0x1D792, "M", "γ"), + (0x1D793, "M", "δ"), + (0x1D794, "M", "ε"), + (0x1D795, "M", "ζ"), + (0x1D796, "M", "η"), + (0x1D797, "M", "θ"), + (0x1D798, "M", "ι"), + (0x1D799, "M", "κ"), + (0x1D79A, "M", "λ"), + (0x1D79B, "M", "μ"), + (0x1D79C, "M", "ν"), + (0x1D79D, "M", "ξ"), + (0x1D79E, "M", "ο"), + (0x1D79F, "M", "π"), + (0x1D7A0, "M", "ρ"), + (0x1D7A1, "M", "θ"), + (0x1D7A2, "M", "σ"), + (0x1D7A3, "M", "τ"), + (0x1D7A4, "M", "υ"), + (0x1D7A5, "M", "φ"), + (0x1D7A6, "M", "χ"), + (0x1D7A7, "M", "ψ"), + (0x1D7A8, "M", "ω"), + (0x1D7A9, "M", "∇"), + (0x1D7AA, "M", "α"), + (0x1D7AB, "M", "β"), + (0x1D7AC, "M", "γ"), + (0x1D7AD, "M", "δ"), + (0x1D7AE, "M", "ε"), + (0x1D7AF, "M", "ζ"), + (0x1D7B0, "M", "η"), + (0x1D7B1, "M", "θ"), + (0x1D7B2, "M", "ι"), + (0x1D7B3, "M", "κ"), + ] + + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7B4, "M", "λ"), + (0x1D7B5, "M", "μ"), + (0x1D7B6, "M", "ν"), + (0x1D7B7, "M", "ξ"), + (0x1D7B8, "M", "ο"), + (0x1D7B9, "M", "π"), + (0x1D7BA, "M", "ρ"), + (0x1D7BB, "M", "σ"), + (0x1D7BD, "M", "τ"), + (0x1D7BE, "M", "υ"), + (0x1D7BF, "M", "φ"), + (0x1D7C0, "M", "χ"), + (0x1D7C1, "M", "ψ"), + (0x1D7C2, "M", "ω"), + (0x1D7C3, "M", "∂"), + (0x1D7C4, "M", "ε"), + (0x1D7C5, "M", "θ"), + (0x1D7C6, "M", "κ"), + (0x1D7C7, "M", "φ"), + (0x1D7C8, "M", "ρ"), + (0x1D7C9, "M", "π"), + (0x1D7CA, "M", "ϝ"), + (0x1D7CC, "X"), + (0x1D7CE, "M", "0"), + (0x1D7CF, "M", "1"), + (0x1D7D0, "M", "2"), + (0x1D7D1, "M", "3"), + (0x1D7D2, "M", "4"), + (0x1D7D3, "M", "5"), + (0x1D7D4, "M", "6"), + (0x1D7D5, "M", "7"), + (0x1D7D6, "M", "8"), + (0x1D7D7, "M", "9"), + (0x1D7D8, "M", "0"), + (0x1D7D9, "M", "1"), + (0x1D7DA, "M", "2"), + (0x1D7DB, "M", "3"), + (0x1D7DC, "M", "4"), + (0x1D7DD, "M", "5"), + (0x1D7DE, "M", "6"), + (0x1D7DF, "M", "7"), + (0x1D7E0, "M", "8"), + (0x1D7E1, "M", "9"), + (0x1D7E2, "M", "0"), + (0x1D7E3, "M", "1"), + (0x1D7E4, "M", "2"), + (0x1D7E5, "M", "3"), + (0x1D7E6, "M", "4"), + (0x1D7E7, "M", "5"), + (0x1D7E8, "M", "6"), + (0x1D7E9, "M", "7"), + (0x1D7EA, "M", "8"), + (0x1D7EB, "M", "9"), + (0x1D7EC, "M", "0"), + (0x1D7ED, "M", "1"), + (0x1D7EE, "M", "2"), + (0x1D7EF, "M", "3"), + (0x1D7F0, "M", "4"), + (0x1D7F1, "M", "5"), + (0x1D7F2, "M", "6"), + (0x1D7F3, "M", "7"), + (0x1D7F4, "M", "8"), + (0x1D7F5, "M", "9"), + (0x1D7F6, "M", "0"), + (0x1D7F7, "M", "1"), + (0x1D7F8, "M", "2"), + (0x1D7F9, "M", "3"), + (0x1D7FA, "M", "4"), + (0x1D7FB, "M", "5"), + (0x1D7FC, "M", "6"), + (0x1D7FD, "M", "7"), + (0x1D7FE, "M", "8"), + (0x1D7FF, "M", "9"), + (0x1D800, "V"), + (0x1DA8C, "X"), + (0x1DA9B, "V"), + (0x1DAA0, "X"), + (0x1DAA1, "V"), + (0x1DAB0, "X"), + (0x1DF00, "V"), + (0x1DF1F, "X"), + (0x1DF25, "V"), + (0x1DF2B, "X"), + (0x1E000, "V"), + (0x1E007, "X"), + (0x1E008, "V"), + (0x1E019, "X"), + (0x1E01B, "V"), + (0x1E022, "X"), + (0x1E023, "V"), + (0x1E025, "X"), + (0x1E026, "V"), + (0x1E02B, "X"), + (0x1E030, "M", "а"), + (0x1E031, "M", "б"), + (0x1E032, "M", "в"), + (0x1E033, "M", "г"), + (0x1E034, "M", "д"), + (0x1E035, "M", "е"), + (0x1E036, "M", "ж"), + ] + + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E037, "M", "з"), + (0x1E038, "M", "и"), + (0x1E039, "M", "к"), + (0x1E03A, "M", "л"), + (0x1E03B, "M", "м"), + (0x1E03C, "M", "о"), + (0x1E03D, "M", "п"), + (0x1E03E, "M", "р"), + (0x1E03F, "M", "с"), + (0x1E040, "M", "т"), + (0x1E041, "M", "у"), + (0x1E042, "M", "ф"), + (0x1E043, "M", "х"), + (0x1E044, "M", "ц"), + (0x1E045, "M", "ч"), + (0x1E046, "M", "ш"), + (0x1E047, "M", "ы"), + (0x1E048, "M", "э"), + (0x1E049, "M", "ю"), + (0x1E04A, "M", "ꚉ"), + (0x1E04B, "M", "ә"), + (0x1E04C, "M", "і"), + (0x1E04D, "M", "ј"), + (0x1E04E, "M", "ө"), + (0x1E04F, "M", "ү"), + (0x1E050, "M", "ӏ"), + (0x1E051, "M", "а"), + (0x1E052, "M", "б"), + (0x1E053, "M", "в"), + (0x1E054, "M", "г"), + (0x1E055, "M", "д"), + (0x1E056, "M", "е"), + (0x1E057, "M", "ж"), + (0x1E058, "M", "з"), + (0x1E059, "M", "и"), + (0x1E05A, "M", "к"), + (0x1E05B, "M", "л"), + (0x1E05C, "M", "о"), + (0x1E05D, "M", "п"), + (0x1E05E, "M", "с"), + (0x1E05F, "M", "у"), + (0x1E060, "M", "ф"), + (0x1E061, "M", "х"), + (0x1E062, "M", "ц"), + (0x1E063, "M", "ч"), + (0x1E064, "M", "ш"), + (0x1E065, "M", "ъ"), + (0x1E066, "M", "ы"), + (0x1E067, "M", "ґ"), + (0x1E068, "M", "і"), + (0x1E069, "M", "ѕ"), + (0x1E06A, "M", "џ"), + (0x1E06B, "M", "ҫ"), + (0x1E06C, "M", "ꙑ"), + (0x1E06D, "M", "ұ"), + (0x1E06E, "X"), + (0x1E08F, "V"), + (0x1E090, "X"), + (0x1E100, "V"), + (0x1E12D, "X"), + (0x1E130, "V"), + (0x1E13E, "X"), + (0x1E140, "V"), + (0x1E14A, "X"), + (0x1E14E, "V"), + (0x1E150, "X"), + (0x1E290, "V"), + (0x1E2AF, "X"), + (0x1E2C0, "V"), + (0x1E2FA, "X"), + (0x1E2FF, "V"), + (0x1E300, "X"), + (0x1E4D0, "V"), + (0x1E4FA, "X"), + (0x1E7E0, "V"), + (0x1E7E7, "X"), + (0x1E7E8, "V"), + (0x1E7EC, "X"), + (0x1E7ED, "V"), + (0x1E7EF, "X"), + (0x1E7F0, "V"), + (0x1E7FF, "X"), + (0x1E800, "V"), + (0x1E8C5, "X"), + (0x1E8C7, "V"), + (0x1E8D7, "X"), + (0x1E900, "M", "𞤢"), + (0x1E901, "M", "𞤣"), + (0x1E902, "M", "𞤤"), + (0x1E903, "M", "𞤥"), + (0x1E904, "M", "𞤦"), + (0x1E905, "M", "𞤧"), + (0x1E906, "M", "𞤨"), + (0x1E907, "M", "𞤩"), + (0x1E908, "M", "𞤪"), + (0x1E909, "M", "𞤫"), + (0x1E90A, "M", "𞤬"), + (0x1E90B, "M", "𞤭"), + (0x1E90C, "M", "𞤮"), + (0x1E90D, "M", "𞤯"), + ] + + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E90E, "M", "𞤰"), + (0x1E90F, "M", "𞤱"), + (0x1E910, "M", "𞤲"), + (0x1E911, "M", "𞤳"), + (0x1E912, "M", "𞤴"), + (0x1E913, "M", "𞤵"), + (0x1E914, "M", "𞤶"), + (0x1E915, "M", "𞤷"), + (0x1E916, "M", "𞤸"), + (0x1E917, "M", "𞤹"), + (0x1E918, "M", "𞤺"), + (0x1E919, "M", "𞤻"), + (0x1E91A, "M", "𞤼"), + (0x1E91B, "M", "𞤽"), + (0x1E91C, "M", "𞤾"), + (0x1E91D, "M", "𞤿"), + (0x1E91E, "M", "𞥀"), + (0x1E91F, "M", "𞥁"), + (0x1E920, "M", "𞥂"), + (0x1E921, "M", "𞥃"), + (0x1E922, "V"), + (0x1E94C, "X"), + (0x1E950, "V"), + (0x1E95A, "X"), + (0x1E95E, "V"), + (0x1E960, "X"), + (0x1EC71, "V"), + (0x1ECB5, "X"), + (0x1ED01, "V"), + (0x1ED3E, "X"), + (0x1EE00, "M", "ا"), + (0x1EE01, "M", "ب"), + (0x1EE02, "M", "ج"), + (0x1EE03, "M", "د"), + (0x1EE04, "X"), + (0x1EE05, "M", "و"), + (0x1EE06, "M", "ز"), + (0x1EE07, "M", "ح"), + (0x1EE08, "M", "ط"), + (0x1EE09, "M", "ي"), + (0x1EE0A, "M", "ك"), + (0x1EE0B, "M", "ل"), + (0x1EE0C, "M", "م"), + (0x1EE0D, "M", "ن"), + (0x1EE0E, "M", "س"), + (0x1EE0F, "M", "ع"), + (0x1EE10, "M", "ف"), + (0x1EE11, "M", "ص"), + (0x1EE12, "M", "ق"), + (0x1EE13, "M", "ر"), + (0x1EE14, "M", "ش"), + (0x1EE15, "M", "ت"), + (0x1EE16, "M", "ث"), + (0x1EE17, "M", "خ"), + (0x1EE18, "M", "ذ"), + (0x1EE19, "M", "ض"), + (0x1EE1A, "M", "ظ"), + (0x1EE1B, "M", "غ"), + (0x1EE1C, "M", "ٮ"), + (0x1EE1D, "M", "ں"), + (0x1EE1E, "M", "ڡ"), + (0x1EE1F, "M", "ٯ"), + (0x1EE20, "X"), + (0x1EE21, "M", "ب"), + (0x1EE22, "M", "ج"), + (0x1EE23, "X"), + (0x1EE24, "M", "ه"), + (0x1EE25, "X"), + (0x1EE27, "M", "ح"), + (0x1EE28, "X"), + (0x1EE29, "M", "ي"), + (0x1EE2A, "M", "ك"), + (0x1EE2B, "M", "ل"), + (0x1EE2C, "M", "م"), + (0x1EE2D, "M", "ن"), + (0x1EE2E, "M", "س"), + (0x1EE2F, "M", "ع"), + (0x1EE30, "M", "ف"), + (0x1EE31, "M", "ص"), + (0x1EE32, "M", "ق"), + (0x1EE33, "X"), + (0x1EE34, "M", "ش"), + (0x1EE35, "M", "ت"), + (0x1EE36, "M", "ث"), + (0x1EE37, "M", "خ"), + (0x1EE38, "X"), + (0x1EE39, "M", "ض"), + (0x1EE3A, "X"), + (0x1EE3B, "M", "غ"), + (0x1EE3C, "X"), + (0x1EE42, "M", "ج"), + (0x1EE43, "X"), + (0x1EE47, "M", "ح"), + (0x1EE48, "X"), + (0x1EE49, "M", "ي"), + (0x1EE4A, "X"), + (0x1EE4B, "M", "ل"), + (0x1EE4C, "X"), + (0x1EE4D, "M", "ن"), + (0x1EE4E, "M", "س"), + ] + + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE4F, "M", "ع"), + (0x1EE50, "X"), + (0x1EE51, "M", "ص"), + (0x1EE52, "M", "ق"), + (0x1EE53, "X"), + (0x1EE54, "M", "ش"), + (0x1EE55, "X"), + (0x1EE57, "M", "خ"), + (0x1EE58, "X"), + (0x1EE59, "M", "ض"), + (0x1EE5A, "X"), + (0x1EE5B, "M", "غ"), + (0x1EE5C, "X"), + (0x1EE5D, "M", "ں"), + (0x1EE5E, "X"), + (0x1EE5F, "M", "ٯ"), + (0x1EE60, "X"), + (0x1EE61, "M", "ب"), + (0x1EE62, "M", "ج"), + (0x1EE63, "X"), + (0x1EE64, "M", "ه"), + (0x1EE65, "X"), + (0x1EE67, "M", "ح"), + (0x1EE68, "M", "ط"), + (0x1EE69, "M", "ي"), + (0x1EE6A, "M", "ك"), + (0x1EE6B, "X"), + (0x1EE6C, "M", "م"), + (0x1EE6D, "M", "ن"), + (0x1EE6E, "M", "س"), + (0x1EE6F, "M", "ع"), + (0x1EE70, "M", "ف"), + (0x1EE71, "M", "ص"), + (0x1EE72, "M", "ق"), + (0x1EE73, "X"), + (0x1EE74, "M", "ش"), + (0x1EE75, "M", "ت"), + (0x1EE76, "M", "ث"), + (0x1EE77, "M", "خ"), + (0x1EE78, "X"), + (0x1EE79, "M", "ض"), + (0x1EE7A, "M", "ظ"), + (0x1EE7B, "M", "غ"), + (0x1EE7C, "M", "ٮ"), + (0x1EE7D, "X"), + (0x1EE7E, "M", "ڡ"), + (0x1EE7F, "X"), + (0x1EE80, "M", "ا"), + (0x1EE81, "M", "ب"), + (0x1EE82, "M", "ج"), + (0x1EE83, "M", "د"), + (0x1EE84, "M", "ه"), + (0x1EE85, "M", "و"), + (0x1EE86, "M", "ز"), + (0x1EE87, "M", "ح"), + (0x1EE88, "M", "ط"), + (0x1EE89, "M", "ي"), + (0x1EE8A, "X"), + (0x1EE8B, "M", "ل"), + (0x1EE8C, "M", "م"), + (0x1EE8D, "M", "ن"), + (0x1EE8E, "M", "س"), + (0x1EE8F, "M", "ع"), + (0x1EE90, "M", "ف"), + (0x1EE91, "M", "ص"), + (0x1EE92, "M", "ق"), + (0x1EE93, "M", "ر"), + (0x1EE94, "M", "ش"), + (0x1EE95, "M", "ت"), + (0x1EE96, "M", "ث"), + (0x1EE97, "M", "خ"), + (0x1EE98, "M", "ذ"), + (0x1EE99, "M", "ض"), + (0x1EE9A, "M", "ظ"), + (0x1EE9B, "M", "غ"), + (0x1EE9C, "X"), + (0x1EEA1, "M", "ب"), + (0x1EEA2, "M", "ج"), + (0x1EEA3, "M", "د"), + (0x1EEA4, "X"), + (0x1EEA5, "M", "و"), + (0x1EEA6, "M", "ز"), + (0x1EEA7, "M", "ح"), + (0x1EEA8, "M", "ط"), + (0x1EEA9, "M", "ي"), + (0x1EEAA, "X"), + (0x1EEAB, "M", "ل"), + (0x1EEAC, "M", "م"), + (0x1EEAD, "M", "ن"), + (0x1EEAE, "M", "س"), + (0x1EEAF, "M", "ع"), + (0x1EEB0, "M", "ف"), + (0x1EEB1, "M", "ص"), + (0x1EEB2, "M", "ق"), + (0x1EEB3, "M", "ر"), + (0x1EEB4, "M", "ش"), + (0x1EEB5, "M", "ت"), + (0x1EEB6, "M", "ث"), + (0x1EEB7, "M", "خ"), + (0x1EEB8, "M", "ذ"), + ] + + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEB9, "M", "ض"), + (0x1EEBA, "M", "ظ"), + (0x1EEBB, "M", "غ"), + (0x1EEBC, "X"), + (0x1EEF0, "V"), + (0x1EEF2, "X"), + (0x1F000, "V"), + (0x1F02C, "X"), + (0x1F030, "V"), + (0x1F094, "X"), + (0x1F0A0, "V"), + (0x1F0AF, "X"), + (0x1F0B1, "V"), + (0x1F0C0, "X"), + (0x1F0C1, "V"), + (0x1F0D0, "X"), + (0x1F0D1, "V"), + (0x1F0F6, "X"), + (0x1F101, "3", "0,"), + (0x1F102, "3", "1,"), + (0x1F103, "3", "2,"), + (0x1F104, "3", "3,"), + (0x1F105, "3", "4,"), + (0x1F106, "3", "5,"), + (0x1F107, "3", "6,"), + (0x1F108, "3", "7,"), + (0x1F109, "3", "8,"), + (0x1F10A, "3", "9,"), + (0x1F10B, "V"), + (0x1F110, "3", "(a)"), + (0x1F111, "3", "(b)"), + (0x1F112, "3", "(c)"), + (0x1F113, "3", "(d)"), + (0x1F114, "3", "(e)"), + (0x1F115, "3", "(f)"), + (0x1F116, "3", "(g)"), + (0x1F117, "3", "(h)"), + (0x1F118, "3", "(i)"), + (0x1F119, "3", "(j)"), + (0x1F11A, "3", "(k)"), + (0x1F11B, "3", "(l)"), + (0x1F11C, "3", "(m)"), + (0x1F11D, "3", "(n)"), + (0x1F11E, "3", "(o)"), + (0x1F11F, "3", "(p)"), + (0x1F120, "3", "(q)"), + (0x1F121, "3", "(r)"), + (0x1F122, "3", "(s)"), + (0x1F123, "3", "(t)"), + (0x1F124, "3", "(u)"), + (0x1F125, "3", "(v)"), + (0x1F126, "3", "(w)"), + (0x1F127, "3", "(x)"), + (0x1F128, "3", "(y)"), + (0x1F129, "3", "(z)"), + (0x1F12A, "M", "〔s〕"), + (0x1F12B, "M", "c"), + (0x1F12C, "M", "r"), + (0x1F12D, "M", "cd"), + (0x1F12E, "M", "wz"), + (0x1F12F, "V"), + (0x1F130, "M", "a"), + (0x1F131, "M", "b"), + (0x1F132, "M", "c"), + (0x1F133, "M", "d"), + (0x1F134, "M", "e"), + (0x1F135, "M", "f"), + (0x1F136, "M", "g"), + (0x1F137, "M", "h"), + (0x1F138, "M", "i"), + (0x1F139, "M", "j"), + (0x1F13A, "M", "k"), + (0x1F13B, "M", "l"), + (0x1F13C, "M", "m"), + (0x1F13D, "M", "n"), + (0x1F13E, "M", "o"), + (0x1F13F, "M", "p"), + (0x1F140, "M", "q"), + (0x1F141, "M", "r"), + (0x1F142, "M", "s"), + (0x1F143, "M", "t"), + (0x1F144, "M", "u"), + (0x1F145, "M", "v"), + (0x1F146, "M", "w"), + (0x1F147, "M", "x"), + (0x1F148, "M", "y"), + (0x1F149, "M", "z"), + (0x1F14A, "M", "hv"), + (0x1F14B, "M", "mv"), + (0x1F14C, "M", "sd"), + (0x1F14D, "M", "ss"), + (0x1F14E, "M", "ppv"), + (0x1F14F, "M", "wc"), + (0x1F150, "V"), + (0x1F16A, "M", "mc"), + (0x1F16B, "M", "md"), + (0x1F16C, "M", "mr"), + (0x1F16D, "V"), + (0x1F190, "M", "dj"), + (0x1F191, "V"), + ] + + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F1AE, "X"), + (0x1F1E6, "V"), + (0x1F200, "M", "ほか"), + (0x1F201, "M", "ココ"), + (0x1F202, "M", "サ"), + (0x1F203, "X"), + (0x1F210, "M", "手"), + (0x1F211, "M", "字"), + (0x1F212, "M", "双"), + (0x1F213, "M", "デ"), + (0x1F214, "M", "二"), + (0x1F215, "M", "多"), + (0x1F216, "M", "解"), + (0x1F217, "M", "天"), + (0x1F218, "M", "交"), + (0x1F219, "M", "映"), + (0x1F21A, "M", "無"), + (0x1F21B, "M", "料"), + (0x1F21C, "M", "前"), + (0x1F21D, "M", "後"), + (0x1F21E, "M", "再"), + (0x1F21F, "M", "新"), + (0x1F220, "M", "初"), + (0x1F221, "M", "終"), + (0x1F222, "M", "生"), + (0x1F223, "M", "販"), + (0x1F224, "M", "声"), + (0x1F225, "M", "吹"), + (0x1F226, "M", "演"), + (0x1F227, "M", "投"), + (0x1F228, "M", "捕"), + (0x1F229, "M", "一"), + (0x1F22A, "M", "三"), + (0x1F22B, "M", "遊"), + (0x1F22C, "M", "左"), + (0x1F22D, "M", "中"), + (0x1F22E, "M", "右"), + (0x1F22F, "M", "指"), + (0x1F230, "M", "走"), + (0x1F231, "M", "打"), + (0x1F232, "M", "禁"), + (0x1F233, "M", "空"), + (0x1F234, "M", "合"), + (0x1F235, "M", "満"), + (0x1F236, "M", "有"), + (0x1F237, "M", "月"), + (0x1F238, "M", "申"), + (0x1F239, "M", "割"), + (0x1F23A, "M", "営"), + (0x1F23B, "M", "配"), + (0x1F23C, "X"), + (0x1F240, "M", "〔本〕"), + (0x1F241, "M", "〔三〕"), + (0x1F242, "M", "〔二〕"), + (0x1F243, "M", "〔安〕"), + (0x1F244, "M", "〔点〕"), + (0x1F245, "M", "〔打〕"), + (0x1F246, "M", "〔盗〕"), + (0x1F247, "M", "〔勝〕"), + (0x1F248, "M", "〔敗〕"), + (0x1F249, "X"), + (0x1F250, "M", "得"), + (0x1F251, "M", "可"), + (0x1F252, "X"), + (0x1F260, "V"), + (0x1F266, "X"), + (0x1F300, "V"), + (0x1F6D8, "X"), + (0x1F6DC, "V"), + (0x1F6ED, "X"), + (0x1F6F0, "V"), + (0x1F6FD, "X"), + (0x1F700, "V"), + (0x1F777, "X"), + (0x1F77B, "V"), + (0x1F7DA, "X"), + (0x1F7E0, "V"), + (0x1F7EC, "X"), + (0x1F7F0, "V"), + (0x1F7F1, "X"), + (0x1F800, "V"), + (0x1F80C, "X"), + (0x1F810, "V"), + (0x1F848, "X"), + (0x1F850, "V"), + (0x1F85A, "X"), + (0x1F860, "V"), + (0x1F888, "X"), + (0x1F890, "V"), + (0x1F8AE, "X"), + (0x1F8B0, "V"), + (0x1F8B2, "X"), + (0x1F900, "V"), + (0x1FA54, "X"), + (0x1FA60, "V"), + (0x1FA6E, "X"), + (0x1FA70, "V"), + (0x1FA7D, "X"), + (0x1FA80, "V"), + (0x1FA89, "X"), + ] + + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FA90, "V"), + (0x1FABE, "X"), + (0x1FABF, "V"), + (0x1FAC6, "X"), + (0x1FACE, "V"), + (0x1FADC, "X"), + (0x1FAE0, "V"), + (0x1FAE9, "X"), + (0x1FAF0, "V"), + (0x1FAF9, "X"), + (0x1FB00, "V"), + (0x1FB93, "X"), + (0x1FB94, "V"), + (0x1FBCB, "X"), + (0x1FBF0, "M", "0"), + (0x1FBF1, "M", "1"), + (0x1FBF2, "M", "2"), + (0x1FBF3, "M", "3"), + (0x1FBF4, "M", "4"), + (0x1FBF5, "M", "5"), + (0x1FBF6, "M", "6"), + (0x1FBF7, "M", "7"), + (0x1FBF8, "M", "8"), + (0x1FBF9, "M", "9"), + (0x1FBFA, "X"), + (0x20000, "V"), + (0x2A6E0, "X"), + (0x2A700, "V"), + (0x2B73A, "X"), + (0x2B740, "V"), + (0x2B81E, "X"), + (0x2B820, "V"), + (0x2CEA2, "X"), + (0x2CEB0, "V"), + (0x2EBE1, "X"), + (0x2EBF0, "V"), + (0x2EE5E, "X"), + (0x2F800, "M", "丽"), + (0x2F801, "M", "丸"), + (0x2F802, "M", "乁"), + (0x2F803, "M", "𠄢"), + (0x2F804, "M", "你"), + (0x2F805, "M", "侮"), + (0x2F806, "M", "侻"), + (0x2F807, "M", "倂"), + (0x2F808, "M", "偺"), + (0x2F809, "M", "備"), + (0x2F80A, "M", "僧"), + (0x2F80B, "M", "像"), + (0x2F80C, "M", "㒞"), + (0x2F80D, "M", "𠘺"), + (0x2F80E, "M", "免"), + (0x2F80F, "M", "兔"), + (0x2F810, "M", "兤"), + (0x2F811, "M", "具"), + (0x2F812, "M", "𠔜"), + (0x2F813, "M", "㒹"), + (0x2F814, "M", "內"), + (0x2F815, "M", "再"), + (0x2F816, "M", "𠕋"), + (0x2F817, "M", "冗"), + (0x2F818, "M", "冤"), + (0x2F819, "M", "仌"), + (0x2F81A, "M", "冬"), + (0x2F81B, "M", "况"), + (0x2F81C, "M", "𩇟"), + (0x2F81D, "M", "凵"), + (0x2F81E, "M", "刃"), + (0x2F81F, "M", "㓟"), + (0x2F820, "M", "刻"), + (0x2F821, "M", "剆"), + (0x2F822, "M", "割"), + (0x2F823, "M", "剷"), + (0x2F824, "M", "㔕"), + (0x2F825, "M", "勇"), + (0x2F826, "M", "勉"), + (0x2F827, "M", "勤"), + (0x2F828, "M", "勺"), + (0x2F829, "M", "包"), + (0x2F82A, "M", "匆"), + (0x2F82B, "M", "北"), + (0x2F82C, "M", "卉"), + (0x2F82D, "M", "卑"), + (0x2F82E, "M", "博"), + (0x2F82F, "M", "即"), + (0x2F830, "M", "卽"), + (0x2F831, "M", "卿"), + (0x2F834, "M", "𠨬"), + (0x2F835, "M", "灰"), + (0x2F836, "M", "及"), + (0x2F837, "M", "叟"), + (0x2F838, "M", "𠭣"), + (0x2F839, "M", "叫"), + (0x2F83A, "M", "叱"), + (0x2F83B, "M", "吆"), + (0x2F83C, "M", "咞"), + (0x2F83D, "M", "吸"), + (0x2F83E, "M", "呈"), + (0x2F83F, "M", "周"), + (0x2F840, "M", "咢"), + ] + + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F841, "M", "哶"), + (0x2F842, "M", "唐"), + (0x2F843, "M", "啓"), + (0x2F844, "M", "啣"), + (0x2F845, "M", "善"), + (0x2F847, "M", "喙"), + (0x2F848, "M", "喫"), + (0x2F849, "M", "喳"), + (0x2F84A, "M", "嗂"), + (0x2F84B, "M", "圖"), + (0x2F84C, "M", "嘆"), + (0x2F84D, "M", "圗"), + (0x2F84E, "M", "噑"), + (0x2F84F, "M", "噴"), + (0x2F850, "M", "切"), + (0x2F851, "M", "壮"), + (0x2F852, "M", "城"), + (0x2F853, "M", "埴"), + (0x2F854, "M", "堍"), + (0x2F855, "M", "型"), + (0x2F856, "M", "堲"), + (0x2F857, "M", "報"), + (0x2F858, "M", "墬"), + (0x2F859, "M", "𡓤"), + (0x2F85A, "M", "売"), + (0x2F85B, "M", "壷"), + (0x2F85C, "M", "夆"), + (0x2F85D, "M", "多"), + (0x2F85E, "M", "夢"), + (0x2F85F, "M", "奢"), + (0x2F860, "M", "𡚨"), + (0x2F861, "M", "𡛪"), + (0x2F862, "M", "姬"), + (0x2F863, "M", "娛"), + (0x2F864, "M", "娧"), + (0x2F865, "M", "姘"), + (0x2F866, "M", "婦"), + (0x2F867, "M", "㛮"), + (0x2F868, "X"), + (0x2F869, "M", "嬈"), + (0x2F86A, "M", "嬾"), + (0x2F86C, "M", "𡧈"), + (0x2F86D, "M", "寃"), + (0x2F86E, "M", "寘"), + (0x2F86F, "M", "寧"), + (0x2F870, "M", "寳"), + (0x2F871, "M", "𡬘"), + (0x2F872, "M", "寿"), + (0x2F873, "M", "将"), + (0x2F874, "X"), + (0x2F875, "M", "尢"), + (0x2F876, "M", "㞁"), + (0x2F877, "M", "屠"), + (0x2F878, "M", "屮"), + (0x2F879, "M", "峀"), + (0x2F87A, "M", "岍"), + (0x2F87B, "M", "𡷤"), + (0x2F87C, "M", "嵃"), + (0x2F87D, "M", "𡷦"), + (0x2F87E, "M", "嵮"), + (0x2F87F, "M", "嵫"), + (0x2F880, "M", "嵼"), + (0x2F881, "M", "巡"), + (0x2F882, "M", "巢"), + (0x2F883, "M", "㠯"), + (0x2F884, "M", "巽"), + (0x2F885, "M", "帨"), + (0x2F886, "M", "帽"), + (0x2F887, "M", "幩"), + (0x2F888, "M", "㡢"), + (0x2F889, "M", "𢆃"), + (0x2F88A, "M", "㡼"), + (0x2F88B, "M", "庰"), + (0x2F88C, "M", "庳"), + (0x2F88D, "M", "庶"), + (0x2F88E, "M", "廊"), + (0x2F88F, "M", "𪎒"), + (0x2F890, "M", "廾"), + (0x2F891, "M", "𢌱"), + (0x2F893, "M", "舁"), + (0x2F894, "M", "弢"), + (0x2F896, "M", "㣇"), + (0x2F897, "M", "𣊸"), + (0x2F898, "M", "𦇚"), + (0x2F899, "M", "形"), + (0x2F89A, "M", "彫"), + (0x2F89B, "M", "㣣"), + (0x2F89C, "M", "徚"), + (0x2F89D, "M", "忍"), + (0x2F89E, "M", "志"), + (0x2F89F, "M", "忹"), + (0x2F8A0, "M", "悁"), + (0x2F8A1, "M", "㤺"), + (0x2F8A2, "M", "㤜"), + (0x2F8A3, "M", "悔"), + (0x2F8A4, "M", "𢛔"), + (0x2F8A5, "M", "惇"), + (0x2F8A6, "M", "慈"), + (0x2F8A7, "M", "慌"), + (0x2F8A8, "M", "慎"), + ] + + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8A9, "M", "慌"), + (0x2F8AA, "M", "慺"), + (0x2F8AB, "M", "憎"), + (0x2F8AC, "M", "憲"), + (0x2F8AD, "M", "憤"), + (0x2F8AE, "M", "憯"), + (0x2F8AF, "M", "懞"), + (0x2F8B0, "M", "懲"), + (0x2F8B1, "M", "懶"), + (0x2F8B2, "M", "成"), + (0x2F8B3, "M", "戛"), + (0x2F8B4, "M", "扝"), + (0x2F8B5, "M", "抱"), + (0x2F8B6, "M", "拔"), + (0x2F8B7, "M", "捐"), + (0x2F8B8, "M", "𢬌"), + (0x2F8B9, "M", "挽"), + (0x2F8BA, "M", "拼"), + (0x2F8BB, "M", "捨"), + (0x2F8BC, "M", "掃"), + (0x2F8BD, "M", "揤"), + (0x2F8BE, "M", "𢯱"), + (0x2F8BF, "M", "搢"), + (0x2F8C0, "M", "揅"), + (0x2F8C1, "M", "掩"), + (0x2F8C2, "M", "㨮"), + (0x2F8C3, "M", "摩"), + (0x2F8C4, "M", "摾"), + (0x2F8C5, "M", "撝"), + (0x2F8C6, "M", "摷"), + (0x2F8C7, "M", "㩬"), + (0x2F8C8, "M", "敏"), + (0x2F8C9, "M", "敬"), + (0x2F8CA, "M", "𣀊"), + (0x2F8CB, "M", "旣"), + (0x2F8CC, "M", "書"), + (0x2F8CD, "M", "晉"), + (0x2F8CE, "M", "㬙"), + (0x2F8CF, "M", "暑"), + (0x2F8D0, "M", "㬈"), + (0x2F8D1, "M", "㫤"), + (0x2F8D2, "M", "冒"), + (0x2F8D3, "M", "冕"), + (0x2F8D4, "M", "最"), + (0x2F8D5, "M", "暜"), + (0x2F8D6, "M", "肭"), + (0x2F8D7, "M", "䏙"), + (0x2F8D8, "M", "朗"), + (0x2F8D9, "M", "望"), + (0x2F8DA, "M", "朡"), + (0x2F8DB, "M", "杞"), + (0x2F8DC, "M", "杓"), + (0x2F8DD, "M", "𣏃"), + (0x2F8DE, "M", "㭉"), + (0x2F8DF, "M", "柺"), + (0x2F8E0, "M", "枅"), + (0x2F8E1, "M", "桒"), + (0x2F8E2, "M", "梅"), + (0x2F8E3, "M", "𣑭"), + (0x2F8E4, "M", "梎"), + (0x2F8E5, "M", "栟"), + (0x2F8E6, "M", "椔"), + (0x2F8E7, "M", "㮝"), + (0x2F8E8, "M", "楂"), + (0x2F8E9, "M", "榣"), + (0x2F8EA, "M", "槪"), + (0x2F8EB, "M", "檨"), + (0x2F8EC, "M", "𣚣"), + (0x2F8ED, "M", "櫛"), + (0x2F8EE, "M", "㰘"), + (0x2F8EF, "M", "次"), + (0x2F8F0, "M", "𣢧"), + (0x2F8F1, "M", "歔"), + (0x2F8F2, "M", "㱎"), + (0x2F8F3, "M", "歲"), + (0x2F8F4, "M", "殟"), + (0x2F8F5, "M", "殺"), + (0x2F8F6, "M", "殻"), + (0x2F8F7, "M", "𣪍"), + (0x2F8F8, "M", "𡴋"), + (0x2F8F9, "M", "𣫺"), + (0x2F8FA, "M", "汎"), + (0x2F8FB, "M", "𣲼"), + (0x2F8FC, "M", "沿"), + (0x2F8FD, "M", "泍"), + (0x2F8FE, "M", "汧"), + (0x2F8FF, "M", "洖"), + (0x2F900, "M", "派"), + (0x2F901, "M", "海"), + (0x2F902, "M", "流"), + (0x2F903, "M", "浩"), + (0x2F904, "M", "浸"), + (0x2F905, "M", "涅"), + (0x2F906, "M", "𣴞"), + (0x2F907, "M", "洴"), + (0x2F908, "M", "港"), + (0x2F909, "M", "湮"), + (0x2F90A, "M", "㴳"), + (0x2F90B, "M", "滋"), + (0x2F90C, "M", "滇"), + ] + + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F90D, "M", "𣻑"), + (0x2F90E, "M", "淹"), + (0x2F90F, "M", "潮"), + (0x2F910, "M", "𣽞"), + (0x2F911, "M", "𣾎"), + (0x2F912, "M", "濆"), + (0x2F913, "M", "瀹"), + (0x2F914, "M", "瀞"), + (0x2F915, "M", "瀛"), + (0x2F916, "M", "㶖"), + (0x2F917, "M", "灊"), + (0x2F918, "M", "災"), + (0x2F919, "M", "灷"), + (0x2F91A, "M", "炭"), + (0x2F91B, "M", "𠔥"), + (0x2F91C, "M", "煅"), + (0x2F91D, "M", "𤉣"), + (0x2F91E, "M", "熜"), + (0x2F91F, "X"), + (0x2F920, "M", "爨"), + (0x2F921, "M", "爵"), + (0x2F922, "M", "牐"), + (0x2F923, "M", "𤘈"), + (0x2F924, "M", "犀"), + (0x2F925, "M", "犕"), + (0x2F926, "M", "𤜵"), + (0x2F927, "M", "𤠔"), + (0x2F928, "M", "獺"), + (0x2F929, "M", "王"), + (0x2F92A, "M", "㺬"), + (0x2F92B, "M", "玥"), + (0x2F92C, "M", "㺸"), + (0x2F92E, "M", "瑇"), + (0x2F92F, "M", "瑜"), + (0x2F930, "M", "瑱"), + (0x2F931, "M", "璅"), + (0x2F932, "M", "瓊"), + (0x2F933, "M", "㼛"), + (0x2F934, "M", "甤"), + (0x2F935, "M", "𤰶"), + (0x2F936, "M", "甾"), + (0x2F937, "M", "𤲒"), + (0x2F938, "M", "異"), + (0x2F939, "M", "𢆟"), + (0x2F93A, "M", "瘐"), + (0x2F93B, "M", "𤾡"), + (0x2F93C, "M", "𤾸"), + (0x2F93D, "M", "𥁄"), + (0x2F93E, "M", "㿼"), + (0x2F93F, "M", "䀈"), + (0x2F940, "M", "直"), + (0x2F941, "M", "𥃳"), + (0x2F942, "M", "𥃲"), + (0x2F943, "M", "𥄙"), + (0x2F944, "M", "𥄳"), + (0x2F945, "M", "眞"), + (0x2F946, "M", "真"), + (0x2F948, "M", "睊"), + (0x2F949, "M", "䀹"), + (0x2F94A, "M", "瞋"), + (0x2F94B, "M", "䁆"), + (0x2F94C, "M", "䂖"), + (0x2F94D, "M", "𥐝"), + (0x2F94E, "M", "硎"), + (0x2F94F, "M", "碌"), + (0x2F950, "M", "磌"), + (0x2F951, "M", "䃣"), + (0x2F952, "M", "𥘦"), + (0x2F953, "M", "祖"), + (0x2F954, "M", "𥚚"), + (0x2F955, "M", "𥛅"), + (0x2F956, "M", "福"), + (0x2F957, "M", "秫"), + (0x2F958, "M", "䄯"), + (0x2F959, "M", "穀"), + (0x2F95A, "M", "穊"), + (0x2F95B, "M", "穏"), + (0x2F95C, "M", "𥥼"), + (0x2F95D, "M", "𥪧"), + (0x2F95F, "X"), + (0x2F960, "M", "䈂"), + (0x2F961, "M", "𥮫"), + (0x2F962, "M", "篆"), + (0x2F963, "M", "築"), + (0x2F964, "M", "䈧"), + (0x2F965, "M", "𥲀"), + (0x2F966, "M", "糒"), + (0x2F967, "M", "䊠"), + (0x2F968, "M", "糨"), + (0x2F969, "M", "糣"), + (0x2F96A, "M", "紀"), + (0x2F96B, "M", "𥾆"), + (0x2F96C, "M", "絣"), + (0x2F96D, "M", "䌁"), + (0x2F96E, "M", "緇"), + (0x2F96F, "M", "縂"), + (0x2F970, "M", "繅"), + (0x2F971, "M", "䌴"), + (0x2F972, "M", "𦈨"), + (0x2F973, "M", "𦉇"), + ] + + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F974, "M", "䍙"), + (0x2F975, "M", "𦋙"), + (0x2F976, "M", "罺"), + (0x2F977, "M", "𦌾"), + (0x2F978, "M", "羕"), + (0x2F979, "M", "翺"), + (0x2F97A, "M", "者"), + (0x2F97B, "M", "𦓚"), + (0x2F97C, "M", "𦔣"), + (0x2F97D, "M", "聠"), + (0x2F97E, "M", "𦖨"), + (0x2F97F, "M", "聰"), + (0x2F980, "M", "𣍟"), + (0x2F981, "M", "䏕"), + (0x2F982, "M", "育"), + (0x2F983, "M", "脃"), + (0x2F984, "M", "䐋"), + (0x2F985, "M", "脾"), + (0x2F986, "M", "媵"), + (0x2F987, "M", "𦞧"), + (0x2F988, "M", "𦞵"), + (0x2F989, "M", "𣎓"), + (0x2F98A, "M", "𣎜"), + (0x2F98B, "M", "舁"), + (0x2F98C, "M", "舄"), + (0x2F98D, "M", "辞"), + (0x2F98E, "M", "䑫"), + (0x2F98F, "M", "芑"), + (0x2F990, "M", "芋"), + (0x2F991, "M", "芝"), + (0x2F992, "M", "劳"), + (0x2F993, "M", "花"), + (0x2F994, "M", "芳"), + (0x2F995, "M", "芽"), + (0x2F996, "M", "苦"), + (0x2F997, "M", "𦬼"), + (0x2F998, "M", "若"), + (0x2F999, "M", "茝"), + (0x2F99A, "M", "荣"), + (0x2F99B, "M", "莭"), + (0x2F99C, "M", "茣"), + (0x2F99D, "M", "莽"), + (0x2F99E, "M", "菧"), + (0x2F99F, "M", "著"), + (0x2F9A0, "M", "荓"), + (0x2F9A1, "M", "菊"), + (0x2F9A2, "M", "菌"), + (0x2F9A3, "M", "菜"), + (0x2F9A4, "M", "𦰶"), + (0x2F9A5, "M", "𦵫"), + (0x2F9A6, "M", "𦳕"), + (0x2F9A7, "M", "䔫"), + (0x2F9A8, "M", "蓱"), + (0x2F9A9, "M", "蓳"), + (0x2F9AA, "M", "蔖"), + (0x2F9AB, "M", "𧏊"), + (0x2F9AC, "M", "蕤"), + (0x2F9AD, "M", "𦼬"), + (0x2F9AE, "M", "䕝"), + (0x2F9AF, "M", "䕡"), + (0x2F9B0, "M", "𦾱"), + (0x2F9B1, "M", "𧃒"), + (0x2F9B2, "M", "䕫"), + (0x2F9B3, "M", "虐"), + (0x2F9B4, "M", "虜"), + (0x2F9B5, "M", "虧"), + (0x2F9B6, "M", "虩"), + (0x2F9B7, "M", "蚩"), + (0x2F9B8, "M", "蚈"), + (0x2F9B9, "M", "蜎"), + (0x2F9BA, "M", "蛢"), + (0x2F9BB, "M", "蝹"), + (0x2F9BC, "M", "蜨"), + (0x2F9BD, "M", "蝫"), + (0x2F9BE, "M", "螆"), + (0x2F9BF, "X"), + (0x2F9C0, "M", "蟡"), + (0x2F9C1, "M", "蠁"), + (0x2F9C2, "M", "䗹"), + (0x2F9C3, "M", "衠"), + (0x2F9C4, "M", "衣"), + (0x2F9C5, "M", "𧙧"), + (0x2F9C6, "M", "裗"), + (0x2F9C7, "M", "裞"), + (0x2F9C8, "M", "䘵"), + (0x2F9C9, "M", "裺"), + (0x2F9CA, "M", "㒻"), + (0x2F9CB, "M", "𧢮"), + (0x2F9CC, "M", "𧥦"), + (0x2F9CD, "M", "䚾"), + (0x2F9CE, "M", "䛇"), + (0x2F9CF, "M", "誠"), + (0x2F9D0, "M", "諭"), + (0x2F9D1, "M", "變"), + (0x2F9D2, "M", "豕"), + (0x2F9D3, "M", "𧲨"), + (0x2F9D4, "M", "貫"), + (0x2F9D5, "M", "賁"), + (0x2F9D6, "M", "贛"), + (0x2F9D7, "M", "起"), + ] + + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9D8, "M", "𧼯"), + (0x2F9D9, "M", "𠠄"), + (0x2F9DA, "M", "跋"), + (0x2F9DB, "M", "趼"), + (0x2F9DC, "M", "跰"), + (0x2F9DD, "M", "𠣞"), + (0x2F9DE, "M", "軔"), + (0x2F9DF, "M", "輸"), + (0x2F9E0, "M", "𨗒"), + (0x2F9E1, "M", "𨗭"), + (0x2F9E2, "M", "邔"), + (0x2F9E3, "M", "郱"), + (0x2F9E4, "M", "鄑"), + (0x2F9E5, "M", "𨜮"), + (0x2F9E6, "M", "鄛"), + (0x2F9E7, "M", "鈸"), + (0x2F9E8, "M", "鋗"), + (0x2F9E9, "M", "鋘"), + (0x2F9EA, "M", "鉼"), + (0x2F9EB, "M", "鏹"), + (0x2F9EC, "M", "鐕"), + (0x2F9ED, "M", "𨯺"), + (0x2F9EE, "M", "開"), + (0x2F9EF, "M", "䦕"), + (0x2F9F0, "M", "閷"), + (0x2F9F1, "M", "𨵷"), + (0x2F9F2, "M", "䧦"), + (0x2F9F3, "M", "雃"), + (0x2F9F4, "M", "嶲"), + (0x2F9F5, "M", "霣"), + (0x2F9F6, "M", "𩅅"), + (0x2F9F7, "M", "𩈚"), + (0x2F9F8, "M", "䩮"), + (0x2F9F9, "M", "䩶"), + (0x2F9FA, "M", "韠"), + (0x2F9FB, "M", "𩐊"), + (0x2F9FC, "M", "䪲"), + (0x2F9FD, "M", "𩒖"), + (0x2F9FE, "M", "頋"), + (0x2FA00, "M", "頩"), + (0x2FA01, "M", "𩖶"), + (0x2FA02, "M", "飢"), + (0x2FA03, "M", "䬳"), + (0x2FA04, "M", "餩"), + (0x2FA05, "M", "馧"), + (0x2FA06, "M", "駂"), + (0x2FA07, "M", "駾"), + (0x2FA08, "M", "䯎"), + (0x2FA09, "M", "𩬰"), + (0x2FA0A, "M", "鬒"), + (0x2FA0B, "M", "鱀"), + (0x2FA0C, "M", "鳽"), + (0x2FA0D, "M", "䳎"), + (0x2FA0E, "M", "䳭"), + (0x2FA0F, "M", "鵧"), + (0x2FA10, "M", "𪃎"), + (0x2FA11, "M", "䳸"), + (0x2FA12, "M", "𪄅"), + (0x2FA13, "M", "𪈎"), + (0x2FA14, "M", "𪊑"), + (0x2FA15, "M", "麻"), + (0x2FA16, "M", "䵖"), + (0x2FA17, "M", "黹"), + (0x2FA18, "M", "黾"), + (0x2FA19, "M", "鼅"), + (0x2FA1A, "M", "鼏"), + (0x2FA1B, "M", "鼖"), + (0x2FA1C, "M", "鼻"), + (0x2FA1D, "M", "𪘀"), + (0x2FA1E, "X"), + (0x30000, "V"), + (0x3134B, "X"), + (0x31350, "V"), + (0x323B0, "X"), + (0xE0100, "I"), + (0xE01F0, "X"), + ] + + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() + + _seg_81() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/COPYING b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/COPYING new file mode 100644 index 0000000..f067af3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/COPYING @@ -0,0 +1,14 @@ +Copyright (C) 2008-2011 INADA Naoki + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py new file mode 100644 index 0000000..f3266b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__init__.py @@ -0,0 +1,55 @@ +# ruff: noqa: F401 +import os + +from .exceptions import * # noqa: F403 +from .ext import ExtType, Timestamp + +version = (1, 1, 2) +__version__ = "1.1.2" + + +if os.environ.get("MSGPACK_PUREPYTHON"): + from .fallback import Packer, Unpacker, unpackb +else: + try: + from ._cmsgpack import Packer, Unpacker, unpackb + except ImportError: + from .fallback import Packer, Unpacker, unpackb + + +def pack(o, stream, **kwargs): + """ + Pack object `o` and write it to `stream` + + See :class:`Packer` for options. + """ + packer = Packer(**kwargs) + stream.write(packer.pack(o)) + + +def packb(o, **kwargs): + """ + Pack object `o` and return packed bytes + + See :class:`Packer` for options. + """ + return Packer(**kwargs).pack(o) + + +def unpack(stream, **kwargs): + """ + Unpack an object from `stream`. + + Raises `ExtraData` when `stream` contains extra bytes. + See :class:`Unpacker` for options. + """ + data = stream.read() + return unpackb(data, **kwargs) + + +# alias for compatibility to simplejson/marshal/pickle. +load = unpack +loads = unpackb + +dump = pack +dumps = packb diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8693274 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..9abe7ff Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc new file mode 100644 index 0000000..6058ccc Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc new file mode 100644 index 0000000..9a38e0a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py new file mode 100644 index 0000000..d6d2615 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/exceptions.py @@ -0,0 +1,48 @@ +class UnpackException(Exception): + """Base class for some exceptions raised while unpacking. + + NOTE: unpack may raise exception other than subclass of + UnpackException. If you want to catch all error, catch + Exception instead. + """ + + +class BufferFull(UnpackException): + pass + + +class OutOfData(UnpackException): + pass + + +class FormatError(ValueError, UnpackException): + """Invalid msgpack format""" + + +class StackError(ValueError, UnpackException): + """Too nested""" + + +# Deprecated. Use ValueError instead +UnpackValueError = ValueError + + +class ExtraData(UnpackValueError): + """ExtraData is raised when there is trailing data. + + This exception is raised while only one-shot (not streaming) + unpack. + """ + + def __init__(self, unpacked, extra): + self.unpacked = unpacked + self.extra = extra + + def __str__(self): + return "unpack(b) received extra data." + + +# Deprecated. Use Exception instead to catch all exception during packing. +PackException = Exception +PackValueError = ValueError +PackOverflowError = OverflowError diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py new file mode 100644 index 0000000..9694819 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/ext.py @@ -0,0 +1,170 @@ +import datetime +import struct +from collections import namedtuple + + +class ExtType(namedtuple("ExtType", "code data")): + """ExtType represents ext type in msgpack.""" + + def __new__(cls, code, data): + if not isinstance(code, int): + raise TypeError("code must be int") + if not isinstance(data, bytes): + raise TypeError("data must be bytes") + if not 0 <= code <= 127: + raise ValueError("code must be 0~127") + return super().__new__(cls, code, data) + + +class Timestamp: + """Timestamp represents the Timestamp extension type in msgpack. + + When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. + When using pure-Python msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and + unpack `Timestamp`. + + This class is immutable: Do not override seconds and nanoseconds. + """ + + __slots__ = ["seconds", "nanoseconds"] + + def __init__(self, seconds, nanoseconds=0): + """Initialize a Timestamp object. + + :param int seconds: + Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds). + May be negative. + + :param int nanoseconds: + Number of nanoseconds to add to `seconds` to get fractional time. + Maximum is 999_999_999. Default is 0. + + Note: Negative times (before the UNIX epoch) are represented as neg. seconds + pos. ns. + """ + if not isinstance(seconds, int): + raise TypeError("seconds must be an integer") + if not isinstance(nanoseconds, int): + raise TypeError("nanoseconds must be an integer") + if not (0 <= nanoseconds < 10**9): + raise ValueError("nanoseconds must be a non-negative integer less than 999999999.") + self.seconds = seconds + self.nanoseconds = nanoseconds + + def __repr__(self): + """String representation of Timestamp.""" + return f"Timestamp(seconds={self.seconds}, nanoseconds={self.nanoseconds})" + + def __eq__(self, other): + """Check for equality with another Timestamp object""" + if type(other) is self.__class__: + return self.seconds == other.seconds and self.nanoseconds == other.nanoseconds + return False + + def __ne__(self, other): + """not-equals method (see :func:`__eq__()`)""" + return not self.__eq__(other) + + def __hash__(self): + return hash((self.seconds, self.nanoseconds)) + + @staticmethod + def from_bytes(b): + """Unpack bytes into a `Timestamp` object. + + Used for pure-Python msgpack unpacking. + + :param b: Payload from msgpack ext message with code -1 + :type b: bytes + + :returns: Timestamp object unpacked from msgpack ext payload + :rtype: Timestamp + """ + if len(b) == 4: + seconds = struct.unpack("!L", b)[0] + nanoseconds = 0 + elif len(b) == 8: + data64 = struct.unpack("!Q", b)[0] + seconds = data64 & 0x00000003FFFFFFFF + nanoseconds = data64 >> 34 + elif len(b) == 12: + nanoseconds, seconds = struct.unpack("!Iq", b) + else: + raise ValueError( + "Timestamp type can only be created from 32, 64, or 96-bit byte objects" + ) + return Timestamp(seconds, nanoseconds) + + def to_bytes(self): + """Pack this Timestamp object into bytes. + + Used for pure-Python msgpack packing. + + :returns data: Payload for EXT message with code -1 (timestamp type) + :rtype: bytes + """ + if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits + data64 = self.nanoseconds << 34 | self.seconds + if data64 & 0xFFFFFFFF00000000 == 0: + # nanoseconds is zero and seconds < 2**32, so timestamp 32 + data = struct.pack("!L", data64) + else: + # timestamp 64 + data = struct.pack("!Q", data64) + else: + # timestamp 96 + data = struct.pack("!Iq", self.nanoseconds, self.seconds) + return data + + @staticmethod + def from_unix(unix_sec): + """Create a Timestamp from posix timestamp in seconds. + + :param unix_float: Posix timestamp in seconds. + :type unix_float: int or float + """ + seconds = int(unix_sec // 1) + nanoseconds = int((unix_sec % 1) * 10**9) + return Timestamp(seconds, nanoseconds) + + def to_unix(self): + """Get the timestamp as a floating-point value. + + :returns: posix timestamp + :rtype: float + """ + return self.seconds + self.nanoseconds / 1e9 + + @staticmethod + def from_unix_nano(unix_ns): + """Create a Timestamp from posix timestamp in nanoseconds. + + :param int unix_ns: Posix timestamp in nanoseconds. + :rtype: Timestamp + """ + return Timestamp(*divmod(unix_ns, 10**9)) + + def to_unix_nano(self): + """Get the timestamp as a unixtime in nanoseconds. + + :returns: posix timestamp in nanoseconds + :rtype: int + """ + return self.seconds * 10**9 + self.nanoseconds + + def to_datetime(self): + """Get the timestamp as a UTC datetime. + + :rtype: `datetime.datetime` + """ + utc = datetime.timezone.utc + return datetime.datetime.fromtimestamp(0, utc) + datetime.timedelta( + seconds=self.seconds, microseconds=self.nanoseconds // 1000 + ) + + @staticmethod + def from_datetime(dt): + """Create a Timestamp from datetime with tzinfo. + + :rtype: Timestamp + """ + return Timestamp(seconds=int(dt.timestamp()), nanoseconds=dt.microsecond * 1000) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py new file mode 100644 index 0000000..b02e47c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/msgpack/fallback.py @@ -0,0 +1,929 @@ +"""Fallback pure Python implementation of msgpack""" + +import struct +import sys +from datetime import datetime as _DateTime + +if hasattr(sys, "pypy_version_info"): + from __pypy__ import newlist_hint + from __pypy__.builders import BytesBuilder + + _USING_STRINGBUILDER = True + + class BytesIO: + def __init__(self, s=b""): + if s: + self.builder = BytesBuilder(len(s)) + self.builder.append(s) + else: + self.builder = BytesBuilder() + + def write(self, s): + if isinstance(s, memoryview): + s = s.tobytes() + elif isinstance(s, bytearray): + s = bytes(s) + self.builder.append(s) + + def getvalue(self): + return self.builder.build() + +else: + from io import BytesIO + + _USING_STRINGBUILDER = False + + def newlist_hint(size): + return [] + + +from .exceptions import BufferFull, ExtraData, FormatError, OutOfData, StackError +from .ext import ExtType, Timestamp + +EX_SKIP = 0 +EX_CONSTRUCT = 1 +EX_READ_ARRAY_HEADER = 2 +EX_READ_MAP_HEADER = 3 + +TYPE_IMMEDIATE = 0 +TYPE_ARRAY = 1 +TYPE_MAP = 2 +TYPE_RAW = 3 +TYPE_BIN = 4 +TYPE_EXT = 5 + +DEFAULT_RECURSE_LIMIT = 511 + + +def _check_type_strict(obj, t, type=type, tuple=tuple): + if type(t) is tuple: + return type(obj) in t + else: + return type(obj) is t + + +def _get_data_from_buffer(obj): + view = memoryview(obj) + if view.itemsize != 1: + raise ValueError("cannot unpack from multi-byte object") + return view + + +def unpackb(packed, **kwargs): + """ + Unpack an object from `packed`. + + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``ValueError`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + + See :class:`Unpacker` for options. + """ + unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs) + unpacker.feed(packed) + try: + ret = unpacker._unpack() + except OutOfData: + raise ValueError("Unpack failed: incomplete input") + except RecursionError: + raise StackError + if unpacker._got_extradata(): + raise ExtraData(ret, unpacker._get_extradata()) + return ret + + +_NO_FORMAT_USED = "" +_MSGPACK_HEADERS = { + 0xC4: (1, _NO_FORMAT_USED, TYPE_BIN), + 0xC5: (2, ">H", TYPE_BIN), + 0xC6: (4, ">I", TYPE_BIN), + 0xC7: (2, "Bb", TYPE_EXT), + 0xC8: (3, ">Hb", TYPE_EXT), + 0xC9: (5, ">Ib", TYPE_EXT), + 0xCA: (4, ">f"), + 0xCB: (8, ">d"), + 0xCC: (1, _NO_FORMAT_USED), + 0xCD: (2, ">H"), + 0xCE: (4, ">I"), + 0xCF: (8, ">Q"), + 0xD0: (1, "b"), + 0xD1: (2, ">h"), + 0xD2: (4, ">i"), + 0xD3: (8, ">q"), + 0xD4: (1, "b1s", TYPE_EXT), + 0xD5: (2, "b2s", TYPE_EXT), + 0xD6: (4, "b4s", TYPE_EXT), + 0xD7: (8, "b8s", TYPE_EXT), + 0xD8: (16, "b16s", TYPE_EXT), + 0xD9: (1, _NO_FORMAT_USED, TYPE_RAW), + 0xDA: (2, ">H", TYPE_RAW), + 0xDB: (4, ">I", TYPE_RAW), + 0xDC: (2, ">H", TYPE_ARRAY), + 0xDD: (4, ">I", TYPE_ARRAY), + 0xDE: (2, ">H", TYPE_MAP), + 0xDF: (4, ">I", TYPE_MAP), +} + + +class Unpacker: + """Streaming unpacker. + + Arguments: + + :param file_like: + File-like object having `.read(n)` method. + If specified, unpacker reads serialized data from it and `.feed()` is not usable. + + :param int read_size: + Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) + + :param bool use_list: + If true, unpack msgpack array to Python list. + Otherwise, unpack to Python tuple. (default: True) + + :param bool raw: + If true, unpack msgpack raw to Python bytes. + Otherwise, unpack to Python str by decoding with UTF-8 encoding (default). + + :param int timestamp: + Control how timestamp type is unpacked: + + 0 - Timestamp + 1 - float (Seconds from the EPOCH) + 2 - int (Nanoseconds from the EPOCH) + 3 - datetime.datetime (UTC). + + :param bool strict_map_key: + If true (default), only str or bytes are accepted for map (dict) keys. + + :param object_hook: + When specified, it should be callable. + Unpacker calls it with a dict argument after unpacking msgpack map. + (See also simplejson) + + :param object_pairs_hook: + When specified, it should be callable. + Unpacker calls it with a list of key-value pairs after unpacking msgpack map. + (See also simplejson) + + :param str unicode_errors: + The error handler for decoding unicode. (default: 'strict') + This option should be used only when you have msgpack data which + contains invalid UTF-8 string. + + :param int max_buffer_size: + Limits size of data waiting unpacked. 0 means 2**32-1. + The default value is 100*1024*1024 (100MiB). + Raises `BufferFull` exception when it is insufficient. + You should set this parameter when unpacking data from untrusted source. + + :param int max_str_len: + Deprecated, use *max_buffer_size* instead. + Limits max length of str. (default: max_buffer_size) + + :param int max_bin_len: + Deprecated, use *max_buffer_size* instead. + Limits max length of bin. (default: max_buffer_size) + + :param int max_array_len: + Limits max length of array. + (default: max_buffer_size) + + :param int max_map_len: + Limits max length of map. + (default: max_buffer_size//2) + + :param int max_ext_len: + Deprecated, use *max_buffer_size* instead. + Limits max size of ext type. (default: max_buffer_size) + + Example of streaming deserialize from file-like object:: + + unpacker = Unpacker(file_like) + for o in unpacker: + process(o) + + Example of streaming deserialize from socket:: + + unpacker = Unpacker() + while True: + buf = sock.recv(1024**2) + if not buf: + break + unpacker.feed(buf) + for o in unpacker: + process(o) + + Raises ``ExtraData`` when *packed* contains extra bytes. + Raises ``OutOfData`` when *packed* is incomplete. + Raises ``FormatError`` when *packed* is not valid msgpack. + Raises ``StackError`` when *packed* contains too nested. + Other exceptions can be raised during unpacking. + """ + + def __init__( + self, + file_like=None, + *, + read_size=0, + use_list=True, + raw=False, + timestamp=0, + strict_map_key=True, + object_hook=None, + object_pairs_hook=None, + list_hook=None, + unicode_errors=None, + max_buffer_size=100 * 1024 * 1024, + ext_hook=ExtType, + max_str_len=-1, + max_bin_len=-1, + max_array_len=-1, + max_map_len=-1, + max_ext_len=-1, + ): + if unicode_errors is None: + unicode_errors = "strict" + + if file_like is None: + self._feeding = True + else: + if not callable(file_like.read): + raise TypeError("`file_like.read` must be callable") + self.file_like = file_like + self._feeding = False + + #: array of bytes fed. + self._buffer = bytearray() + #: Which position we currently reads + self._buff_i = 0 + + # When Unpacker is used as an iterable, between the calls to next(), + # the buffer is not "consumed" completely, for efficiency sake. + # Instead, it is done sloppily. To make sure we raise BufferFull at + # the correct moments, we have to keep track of how sloppy we were. + # Furthermore, when the buffer is incomplete (that is: in the case + # we raise an OutOfData) we need to rollback the buffer to the correct + # state, which _buf_checkpoint records. + self._buf_checkpoint = 0 + + if not max_buffer_size: + max_buffer_size = 2**31 - 1 + if max_str_len == -1: + max_str_len = max_buffer_size + if max_bin_len == -1: + max_bin_len = max_buffer_size + if max_array_len == -1: + max_array_len = max_buffer_size + if max_map_len == -1: + max_map_len = max_buffer_size // 2 + if max_ext_len == -1: + max_ext_len = max_buffer_size + + self._max_buffer_size = max_buffer_size + if read_size > self._max_buffer_size: + raise ValueError("read_size must be smaller than max_buffer_size") + self._read_size = read_size or min(self._max_buffer_size, 16 * 1024) + self._raw = bool(raw) + self._strict_map_key = bool(strict_map_key) + self._unicode_errors = unicode_errors + self._use_list = use_list + if not (0 <= timestamp <= 3): + raise ValueError("timestamp must be 0..3") + self._timestamp = timestamp + self._list_hook = list_hook + self._object_hook = object_hook + self._object_pairs_hook = object_pairs_hook + self._ext_hook = ext_hook + self._max_str_len = max_str_len + self._max_bin_len = max_bin_len + self._max_array_len = max_array_len + self._max_map_len = max_map_len + self._max_ext_len = max_ext_len + self._stream_offset = 0 + + if list_hook is not None and not callable(list_hook): + raise TypeError("`list_hook` is not callable") + if object_hook is not None and not callable(object_hook): + raise TypeError("`object_hook` is not callable") + if object_pairs_hook is not None and not callable(object_pairs_hook): + raise TypeError("`object_pairs_hook` is not callable") + if object_hook is not None and object_pairs_hook is not None: + raise TypeError("object_pairs_hook and object_hook are mutually exclusive") + if not callable(ext_hook): + raise TypeError("`ext_hook` is not callable") + + def feed(self, next_bytes): + assert self._feeding + view = _get_data_from_buffer(next_bytes) + if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size: + raise BufferFull + + # Strip buffer before checkpoint before reading file. + if self._buf_checkpoint > 0: + del self._buffer[: self._buf_checkpoint] + self._buff_i -= self._buf_checkpoint + self._buf_checkpoint = 0 + + # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython + self._buffer.extend(view) + view.release() + + def _consume(self): + """Gets rid of the used parts of the buffer.""" + self._stream_offset += self._buff_i - self._buf_checkpoint + self._buf_checkpoint = self._buff_i + + def _got_extradata(self): + return self._buff_i < len(self._buffer) + + def _get_extradata(self): + return self._buffer[self._buff_i :] + + def read_bytes(self, n): + ret = self._read(n, raise_outofdata=False) + self._consume() + return ret + + def _read(self, n, raise_outofdata=True): + # (int) -> bytearray + self._reserve(n, raise_outofdata=raise_outofdata) + i = self._buff_i + ret = self._buffer[i : i + n] + self._buff_i = i + len(ret) + return ret + + def _reserve(self, n, raise_outofdata=True): + remain_bytes = len(self._buffer) - self._buff_i - n + + # Fast path: buffer has n bytes already + if remain_bytes >= 0: + return + + if self._feeding: + self._buff_i = self._buf_checkpoint + raise OutOfData + + # Strip buffer before checkpoint before reading file. + if self._buf_checkpoint > 0: + del self._buffer[: self._buf_checkpoint] + self._buff_i -= self._buf_checkpoint + self._buf_checkpoint = 0 + + # Read from file + remain_bytes = -remain_bytes + if remain_bytes + len(self._buffer) > self._max_buffer_size: + raise BufferFull + while remain_bytes > 0: + to_read_bytes = max(self._read_size, remain_bytes) + read_data = self.file_like.read(to_read_bytes) + if not read_data: + break + assert isinstance(read_data, bytes) + self._buffer += read_data + remain_bytes -= len(read_data) + + if len(self._buffer) < n + self._buff_i and raise_outofdata: + self._buff_i = 0 # rollback + raise OutOfData + + def _read_header(self): + typ = TYPE_IMMEDIATE + n = 0 + obj = None + self._reserve(1) + b = self._buffer[self._buff_i] + self._buff_i += 1 + if b & 0b10000000 == 0: + obj = b + elif b & 0b11100000 == 0b11100000: + obj = -1 - (b ^ 0xFF) + elif b & 0b11100000 == 0b10100000: + n = b & 0b00011111 + typ = TYPE_RAW + if n > self._max_str_len: + raise ValueError(f"{n} exceeds max_str_len({self._max_str_len})") + obj = self._read(n) + elif b & 0b11110000 == 0b10010000: + n = b & 0b00001111 + typ = TYPE_ARRAY + if n > self._max_array_len: + raise ValueError(f"{n} exceeds max_array_len({self._max_array_len})") + elif b & 0b11110000 == 0b10000000: + n = b & 0b00001111 + typ = TYPE_MAP + if n > self._max_map_len: + raise ValueError(f"{n} exceeds max_map_len({self._max_map_len})") + elif b == 0xC0: + obj = None + elif b == 0xC2: + obj = False + elif b == 0xC3: + obj = True + elif 0xC4 <= b <= 0xC6: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + if len(fmt) > 0: + n = struct.unpack_from(fmt, self._buffer, self._buff_i)[0] + else: + n = self._buffer[self._buff_i] + self._buff_i += size + if n > self._max_bin_len: + raise ValueError(f"{n} exceeds max_bin_len({self._max_bin_len})") + obj = self._read(n) + elif 0xC7 <= b <= 0xC9: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + L, n = struct.unpack_from(fmt, self._buffer, self._buff_i) + self._buff_i += size + if L > self._max_ext_len: + raise ValueError(f"{L} exceeds max_ext_len({self._max_ext_len})") + obj = self._read(L) + elif 0xCA <= b <= 0xD3: + size, fmt = _MSGPACK_HEADERS[b] + self._reserve(size) + if len(fmt) > 0: + obj = struct.unpack_from(fmt, self._buffer, self._buff_i)[0] + else: + obj = self._buffer[self._buff_i] + self._buff_i += size + elif 0xD4 <= b <= 0xD8: + size, fmt, typ = _MSGPACK_HEADERS[b] + if self._max_ext_len < size: + raise ValueError(f"{size} exceeds max_ext_len({self._max_ext_len})") + self._reserve(size + 1) + n, obj = struct.unpack_from(fmt, self._buffer, self._buff_i) + self._buff_i += size + 1 + elif 0xD9 <= b <= 0xDB: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + if len(fmt) > 0: + (n,) = struct.unpack_from(fmt, self._buffer, self._buff_i) + else: + n = self._buffer[self._buff_i] + self._buff_i += size + if n > self._max_str_len: + raise ValueError(f"{n} exceeds max_str_len({self._max_str_len})") + obj = self._read(n) + elif 0xDC <= b <= 0xDD: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + (n,) = struct.unpack_from(fmt, self._buffer, self._buff_i) + self._buff_i += size + if n > self._max_array_len: + raise ValueError(f"{n} exceeds max_array_len({self._max_array_len})") + elif 0xDE <= b <= 0xDF: + size, fmt, typ = _MSGPACK_HEADERS[b] + self._reserve(size) + (n,) = struct.unpack_from(fmt, self._buffer, self._buff_i) + self._buff_i += size + if n > self._max_map_len: + raise ValueError(f"{n} exceeds max_map_len({self._max_map_len})") + else: + raise FormatError("Unknown header: 0x%x" % b) + return typ, n, obj + + def _unpack(self, execute=EX_CONSTRUCT): + typ, n, obj = self._read_header() + + if execute == EX_READ_ARRAY_HEADER: + if typ != TYPE_ARRAY: + raise ValueError("Expected array") + return n + if execute == EX_READ_MAP_HEADER: + if typ != TYPE_MAP: + raise ValueError("Expected map") + return n + # TODO should we eliminate the recursion? + if typ == TYPE_ARRAY: + if execute == EX_SKIP: + for i in range(n): + # TODO check whether we need to call `list_hook` + self._unpack(EX_SKIP) + return + ret = newlist_hint(n) + for i in range(n): + ret.append(self._unpack(EX_CONSTRUCT)) + if self._list_hook is not None: + ret = self._list_hook(ret) + # TODO is the interaction between `list_hook` and `use_list` ok? + return ret if self._use_list else tuple(ret) + if typ == TYPE_MAP: + if execute == EX_SKIP: + for i in range(n): + # TODO check whether we need to call hooks + self._unpack(EX_SKIP) + self._unpack(EX_SKIP) + return + if self._object_pairs_hook is not None: + ret = self._object_pairs_hook( + (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) for _ in range(n) + ) + else: + ret = {} + for _ in range(n): + key = self._unpack(EX_CONSTRUCT) + if self._strict_map_key and type(key) not in (str, bytes): + raise ValueError("%s is not allowed for map key" % str(type(key))) + if isinstance(key, str): + key = sys.intern(key) + ret[key] = self._unpack(EX_CONSTRUCT) + if self._object_hook is not None: + ret = self._object_hook(ret) + return ret + if execute == EX_SKIP: + return + if typ == TYPE_RAW: + if self._raw: + obj = bytes(obj) + else: + obj = obj.decode("utf_8", self._unicode_errors) + return obj + if typ == TYPE_BIN: + return bytes(obj) + if typ == TYPE_EXT: + if n == -1: # timestamp + ts = Timestamp.from_bytes(bytes(obj)) + if self._timestamp == 1: + return ts.to_unix() + elif self._timestamp == 2: + return ts.to_unix_nano() + elif self._timestamp == 3: + return ts.to_datetime() + else: + return ts + else: + return self._ext_hook(n, bytes(obj)) + assert typ == TYPE_IMMEDIATE + return obj + + def __iter__(self): + return self + + def __next__(self): + try: + ret = self._unpack(EX_CONSTRUCT) + self._consume() + return ret + except OutOfData: + self._consume() + raise StopIteration + except RecursionError: + raise StackError + + next = __next__ + + def skip(self): + self._unpack(EX_SKIP) + self._consume() + + def unpack(self): + try: + ret = self._unpack(EX_CONSTRUCT) + except RecursionError: + raise StackError + self._consume() + return ret + + def read_array_header(self): + ret = self._unpack(EX_READ_ARRAY_HEADER) + self._consume() + return ret + + def read_map_header(self): + ret = self._unpack(EX_READ_MAP_HEADER) + self._consume() + return ret + + def tell(self): + return self._stream_offset + + +class Packer: + """ + MessagePack Packer + + Usage:: + + packer = Packer() + astream.write(packer.pack(a)) + astream.write(packer.pack(b)) + + Packer's constructor has some keyword arguments: + + :param default: + When specified, it should be callable. + Convert user type to builtin type that Packer supports. + See also simplejson's document. + + :param bool use_single_float: + Use single precision float type for float. (default: False) + + :param bool autoreset: + Reset buffer after each pack and return its content as `bytes`. (default: True). + If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. + + :param bool use_bin_type: + Use bin type introduced in msgpack spec 2.0 for bytes. + It also enables str8 type for unicode. (default: True) + + :param bool strict_types: + If set to true, types will be checked to be exact. Derived classes + from serializable types will not be serialized and will be + treated as unsupported type and forwarded to default. + Additionally tuples will not be serialized as lists. + This is useful when trying to implement accurate serialization + for python types. + + :param bool datetime: + If set to true, datetime with tzinfo is packed into Timestamp type. + Note that the tzinfo is stripped in the timestamp. + You can get UTC datetime with `timestamp=3` option of the Unpacker. + + :param str unicode_errors: + The error handler for encoding unicode. (default: 'strict') + DO NOT USE THIS!! This option is kept for very specific usage. + + :param int buf_size: + Internal buffer size. This option is used only for C implementation. + """ + + def __init__( + self, + *, + default=None, + use_single_float=False, + autoreset=True, + use_bin_type=True, + strict_types=False, + datetime=False, + unicode_errors=None, + buf_size=None, + ): + self._strict_types = strict_types + self._use_float = use_single_float + self._autoreset = autoreset + self._use_bin_type = use_bin_type + self._buffer = BytesIO() + self._datetime = bool(datetime) + self._unicode_errors = unicode_errors or "strict" + if default is not None and not callable(default): + raise TypeError("default must be callable") + self._default = default + + def _pack( + self, + obj, + nest_limit=DEFAULT_RECURSE_LIMIT, + check=isinstance, + check_type_strict=_check_type_strict, + ): + default_used = False + if self._strict_types: + check = check_type_strict + list_types = list + else: + list_types = (list, tuple) + while True: + if nest_limit < 0: + raise ValueError("recursion limit exceeded") + if obj is None: + return self._buffer.write(b"\xc0") + if check(obj, bool): + if obj: + return self._buffer.write(b"\xc3") + return self._buffer.write(b"\xc2") + if check(obj, int): + if 0 <= obj < 0x80: + return self._buffer.write(struct.pack("B", obj)) + if -0x20 <= obj < 0: + return self._buffer.write(struct.pack("b", obj)) + if 0x80 <= obj <= 0xFF: + return self._buffer.write(struct.pack("BB", 0xCC, obj)) + if -0x80 <= obj < 0: + return self._buffer.write(struct.pack(">Bb", 0xD0, obj)) + if 0xFF < obj <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xCD, obj)) + if -0x8000 <= obj < -0x80: + return self._buffer.write(struct.pack(">Bh", 0xD1, obj)) + if 0xFFFF < obj <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xCE, obj)) + if -0x80000000 <= obj < -0x8000: + return self._buffer.write(struct.pack(">Bi", 0xD2, obj)) + if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF: + return self._buffer.write(struct.pack(">BQ", 0xCF, obj)) + if -0x8000000000000000 <= obj < -0x80000000: + return self._buffer.write(struct.pack(">Bq", 0xD3, obj)) + if not default_used and self._default is not None: + obj = self._default(obj) + default_used = True + continue + raise OverflowError("Integer value out of range") + if check(obj, (bytes, bytearray)): + n = len(obj) + if n >= 2**32: + raise ValueError("%s is too large" % type(obj).__name__) + self._pack_bin_header(n) + return self._buffer.write(obj) + if check(obj, str): + obj = obj.encode("utf-8", self._unicode_errors) + n = len(obj) + if n >= 2**32: + raise ValueError("String is too large") + self._pack_raw_header(n) + return self._buffer.write(obj) + if check(obj, memoryview): + n = obj.nbytes + if n >= 2**32: + raise ValueError("Memoryview is too large") + self._pack_bin_header(n) + return self._buffer.write(obj) + if check(obj, float): + if self._use_float: + return self._buffer.write(struct.pack(">Bf", 0xCA, obj)) + return self._buffer.write(struct.pack(">Bd", 0xCB, obj)) + if check(obj, (ExtType, Timestamp)): + if check(obj, Timestamp): + code = -1 + data = obj.to_bytes() + else: + code = obj.code + data = obj.data + assert isinstance(code, int) + assert isinstance(data, bytes) + L = len(data) + if L == 1: + self._buffer.write(b"\xd4") + elif L == 2: + self._buffer.write(b"\xd5") + elif L == 4: + self._buffer.write(b"\xd6") + elif L == 8: + self._buffer.write(b"\xd7") + elif L == 16: + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xC7, L)) + elif L <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xC8, L)) + else: + self._buffer.write(struct.pack(">BI", 0xC9, L)) + self._buffer.write(struct.pack("b", code)) + self._buffer.write(data) + return + if check(obj, list_types): + n = len(obj) + self._pack_array_header(n) + for i in range(n): + self._pack(obj[i], nest_limit - 1) + return + if check(obj, dict): + return self._pack_map_pairs(len(obj), obj.items(), nest_limit - 1) + + if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None: + obj = Timestamp.from_datetime(obj) + default_used = 1 + continue + + if not default_used and self._default is not None: + obj = self._default(obj) + default_used = 1 + continue + + if self._datetime and check(obj, _DateTime): + raise ValueError(f"Cannot serialize {obj!r} where tzinfo=None") + + raise TypeError(f"Cannot serialize {obj!r}") + + def pack(self, obj): + try: + self._pack(obj) + except: + self._buffer = BytesIO() # force reset + raise + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = BytesIO() + return ret + + def pack_map_pairs(self, pairs): + self._pack_map_pairs(len(pairs), pairs) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = BytesIO() + return ret + + def pack_array_header(self, n): + if n >= 2**32: + raise ValueError + self._pack_array_header(n) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = BytesIO() + return ret + + def pack_map_header(self, n): + if n >= 2**32: + raise ValueError + self._pack_map_header(n) + if self._autoreset: + ret = self._buffer.getvalue() + self._buffer = BytesIO() + return ret + + def pack_ext_type(self, typecode, data): + if not isinstance(typecode, int): + raise TypeError("typecode must have int type.") + if not 0 <= typecode <= 127: + raise ValueError("typecode should be 0-127") + if not isinstance(data, bytes): + raise TypeError("data must have bytes type") + L = len(data) + if L > 0xFFFFFFFF: + raise ValueError("Too large data") + if L == 1: + self._buffer.write(b"\xd4") + elif L == 2: + self._buffer.write(b"\xd5") + elif L == 4: + self._buffer.write(b"\xd6") + elif L == 8: + self._buffer.write(b"\xd7") + elif L == 16: + self._buffer.write(b"\xd8") + elif L <= 0xFF: + self._buffer.write(b"\xc7" + struct.pack("B", L)) + elif L <= 0xFFFF: + self._buffer.write(b"\xc8" + struct.pack(">H", L)) + else: + self._buffer.write(b"\xc9" + struct.pack(">I", L)) + self._buffer.write(struct.pack("B", typecode)) + self._buffer.write(data) + + def _pack_array_header(self, n): + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x90 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDC, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDD, n)) + raise ValueError("Array is too large") + + def _pack_map_header(self, n): + if n <= 0x0F: + return self._buffer.write(struct.pack("B", 0x80 + n)) + if n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xDE, n)) + if n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xDF, n)) + raise ValueError("Dict is too large") + + def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): + self._pack_map_header(n) + for k, v in pairs: + self._pack(k, nest_limit - 1) + self._pack(v, nest_limit - 1) + + def _pack_raw_header(self, n): + if n <= 0x1F: + self._buffer.write(struct.pack("B", 0xA0 + n)) + elif self._use_bin_type and n <= 0xFF: + self._buffer.write(struct.pack(">BB", 0xD9, n)) + elif n <= 0xFFFF: + self._buffer.write(struct.pack(">BH", 0xDA, n)) + elif n <= 0xFFFFFFFF: + self._buffer.write(struct.pack(">BI", 0xDB, n)) + else: + raise ValueError("Raw is too large") + + def _pack_bin_header(self, n): + if not self._use_bin_type: + return self._pack_raw_header(n) + elif n <= 0xFF: + return self._buffer.write(struct.pack(">BB", 0xC4, n)) + elif n <= 0xFFFF: + return self._buffer.write(struct.pack(">BH", 0xC5, n)) + elif n <= 0xFFFFFFFF: + return self._buffer.write(struct.pack(">BI", 0xC6, n)) + else: + raise ValueError("Bin is too large") + + def bytes(self): + """Return internal buffer contents as bytes object""" + return self._buffer.getvalue() + + def reset(self): + """Reset internal buffer. + + This method is useful only when autoreset=False. + """ + self._buffer = BytesIO() + + def getbuffer(self): + """Return view of internal buffer.""" + if _USING_STRINGBUILDER: + return memoryview(self.bytes()) + else: + return self._buffer.getbuffer() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE new file mode 100644 index 0000000..6f62d44 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE.APACHE b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE.APACHE new file mode 100644 index 0000000..f433b1a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE.BSD b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE.BSD new file mode 100644 index 0000000..42ce7b7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py new file mode 100644 index 0000000..d45c22c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__init__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "25.0" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = f"2014 {__author__}" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..665cdb1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-312.pyc new file mode 100644 index 0000000..ece78ee Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc new file mode 100644 index 0000000..67ec398 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc new file mode 100644 index 0000000..c21f092 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_parser.cpython-312.pyc new file mode 100644 index 0000000..0b3259a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_parser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc new file mode 100644 index 0000000..0c7cea8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-312.pyc new file mode 100644 index 0000000..f93f800 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc new file mode 100644 index 0000000..cb12ae1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-312.pyc new file mode 100644 index 0000000..21c1cb9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc new file mode 100644 index 0000000..696fd74 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc new file mode 100644 index 0000000..d972931 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc new file mode 100644 index 0000000..0fd572a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..d127dd1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000..2a5e2f8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_elffile.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_elffile.py new file mode 100644 index 0000000..7a5afc3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_elffile.py @@ -0,0 +1,109 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +from __future__ import annotations + +import enum +import os +import struct +from typing import IO + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error as e: + raise ELFInvalid("unable to parse identification") from e + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError as e: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})" + ) from e + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> str | None: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py new file mode 100644 index 0000000..95f5576 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_manylinux.py @@ -0,0 +1,262 @@ +from __future__ import annotations + +import collections +import contextlib +import functools +import os +import re +import sys +import warnings +from typing import Generator, Iterator, NamedTuple, Sequence + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + +# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` +# as the type for `path` until then. +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]: + try: + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None + + +def _is_linux_armhf(executable: str) -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) + + +def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: + if "armv7l" in archs: + return _is_linux_armhf(executable) + if "i686" in archs: + return _is_linux_i686(executable) + allowed_archs = { + "x86_64", + "aarch64", + "ppc64", + "ppc64le", + "s390x", + "loongarch64", + "riscv64", + } + return any(arch in allowed_archs for arch in archs) + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> str | None: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # Should be a string like "glibc 2.17". + version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.rsplit() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> str | None: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> str | None: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> tuple[int, int]: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + f"Expected glibc version with 2 components major.minor, got: {version_str}", + RuntimeWarning, + stacklevel=2, + ) + return -1, -1 + return int(m.group("major")), int(m.group("minor")) + + +@functools.lru_cache +def _get_glibc_version() -> tuple[int, int]: + version_str = _glibc_version_string() + if version_str is None: + return (-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17): + if hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + (2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + (2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + (2, 5): "manylinux1", +} + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate manylinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be manylinux-compatible. + + :returns: An iterator of compatible manylinux tags. + """ + if not _have_compatible_abi(sys.executable, archs): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if set(archs) & {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for arch in archs: + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + tag = "manylinux_{}_{}".format(*glibc_version) + if _is_compatible(arch, glibc_version): + yield f"{tag}_{arch}" + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_compatible(arch, glibc_version): + yield f"{legacy_tag}_{arch}" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py new file mode 100644 index 0000000..d2bf30b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_musllinux.py @@ -0,0 +1,85 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +from __future__ import annotations + +import functools +import re +import subprocess +import sys +from typing import Iterator, NamedTuple, Sequence + +from ._elffile import ELFFile + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> _MuslVersion | None: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache +def _get_musl_version(executable: str) -> _MuslVersion | None: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: + return None + proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for arch in archs: + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_parser.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_parser.py new file mode 100644 index 0000000..0007c0a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_parser.py @@ -0,0 +1,353 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains EBNF-inspired grammar representing +the implementation. +""" + +from __future__ import annotations + +import ast +from typing import NamedTuple, Sequence, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]] +MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: list[str] + specifier: str + marker: MarkerList | None + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> tuple[str, str, MarkerList | None]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, span_start=url_start, after="URL and whitespace" + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + after=( + "version specifier" + if specifier + else "name and no valid version specifier" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, after: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected end or semicolon (after {after})", + span_start=span_start, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> list[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens( + "LEFT_BRACKET", + "RIGHT_BRACKET", + around="extras", + ): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> list[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: list[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="version specifier", + ): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + span_start = tokenizer.position + parsed_specifiers += tokenizer.read().text + if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): + tokenizer.raise_syntax_error( + ".* suffix can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position + 1, + ) + if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): + tokenizer.raise_syntax_error( + "Local version label can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position, + ) + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: + retval = _parse_marker(tokenizer) + tokenizer.expect("END", expected="end of marker expression") + return retval + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="marker expression", + ): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if env_var in ("platform_python_implementation", "python_implementation"): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py new file mode 100644 index 0000000..90a6465 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_structures.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InfinityType: + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_tokenizer.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_tokenizer.py new file mode 100644 index 0000000..d28a9b6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/_tokenizer.py @@ -0,0 +1,195 @@ +from __future__ import annotations + +import contextlib +import re +from dataclasses import dataclass +from typing import Iterator, NoReturn + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return "\n ".join([self.message, self.source, marker]) + + +DEFAULT_RULES: dict[str, str | re.Pattern[str]] = { + "LEFT_PARENTHESIS": r"\(", + "RIGHT_PARENTHESIS": r"\)", + "LEFT_BRACKET": r"\[", + "RIGHT_BRACKET": r"\]", + "SEMICOLON": r";", + "COMMA": r",", + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": r"(===|==|~=|!=|<=|>=|<|>)", + "BOOLOP": r"\b(or|and)\b", + "IN": r"\bin\b", + "NOT": r"\bnot\b", + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extras? + |dependency_groups + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": r"\@", + "URL": r"[^ \t]+", + "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", + "VERSION_PREFIX_TRAIL": r"\.\*", + "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", + "WS": r"[ \t]+", + "END": r"$", +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: dict[str, str | re.Pattern[str]], + ) -> None: + self.source = source + self.rules: dict[str, re.Pattern[str]] = { + name: re.compile(pattern) for name, pattern in rules.items() + } + self.next_token: Token | None = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert self.next_token is None, ( + f"Cannot check for {name!r}, already have {self.next_token!r}" + ) + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: int | None = None, + span_end: int | None = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens( + self, open_token: str, close_token: str, *, around: str + ) -> Iterator[None]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected matching {close_token} for {open_token}, after {around}", + span_start=open_position, + ) + + self.read() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__init__.py new file mode 100644 index 0000000..031f277 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__init__.py @@ -0,0 +1,145 @@ +####################################################################################### +# +# Adapted from: +# https://github.com/pypa/hatch/blob/5352e44/backend/src/hatchling/licenses/parse.py +# +# MIT License +# +# Copyright (c) 2017-present Ofek Lev +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of this +# software and associated documentation files (the "Software"), to deal in the Software +# without restriction, including without limitation the rights to use, copy, modify, +# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be included in all copies +# or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE +# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +# +# With additional allowance of arbitrary `LicenseRef-` identifiers, not just +# `LicenseRef-Public-Domain` and `LicenseRef-Proprietary`. +# +####################################################################################### +from __future__ import annotations + +import re +from typing import NewType, cast + +from pip._vendor.packaging.licenses._spdx import EXCEPTIONS, LICENSES + +__all__ = [ + "InvalidLicenseExpression", + "NormalizedLicenseExpression", + "canonicalize_license_expression", +] + +license_ref_allowed = re.compile("^[A-Za-z0-9.-]*$") + +NormalizedLicenseExpression = NewType("NormalizedLicenseExpression", str) + + +class InvalidLicenseExpression(ValueError): + """Raised when a license-expression string is invalid + + >>> canonicalize_license_expression("invalid") + Traceback (most recent call last): + ... + packaging.licenses.InvalidLicenseExpression: Invalid license expression: 'invalid' + """ + + +def canonicalize_license_expression( + raw_license_expression: str, +) -> NormalizedLicenseExpression: + if not raw_license_expression: + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) + + # Pad any parentheses so tokenization can be achieved by merely splitting on + # whitespace. + license_expression = raw_license_expression.replace("(", " ( ").replace(")", " ) ") + licenseref_prefix = "LicenseRef-" + license_refs = { + ref.lower(): "LicenseRef-" + ref[len(licenseref_prefix) :] + for ref in license_expression.split() + if ref.lower().startswith(licenseref_prefix.lower()) + } + + # Normalize to lower case so we can look up licenses/exceptions + # and so boolean operators are Python-compatible. + license_expression = license_expression.lower() + + tokens = license_expression.split() + + # Rather than implementing boolean logic, we create an expression that Python can + # parse. Everything that is not involved with the grammar itself is treated as + # `False` and the expression should evaluate as such. + python_tokens = [] + for token in tokens: + if token not in {"or", "and", "with", "(", ")"}: + python_tokens.append("False") + elif token == "with": + python_tokens.append("or") + elif token == "(" and python_tokens and python_tokens[-1] not in {"or", "and"}: + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) + else: + python_tokens.append(token) + + python_expression = " ".join(python_tokens) + try: + invalid = eval(python_expression, globals(), locals()) + except Exception: + invalid = True + + if invalid is not False: + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) from None + + # Take a final pass to check for unknown licenses/exceptions. + normalized_tokens = [] + for token in tokens: + if token in {"or", "and", "with", "(", ")"}: + normalized_tokens.append(token.upper()) + continue + + if normalized_tokens and normalized_tokens[-1] == "WITH": + if token not in EXCEPTIONS: + message = f"Unknown license exception: {token!r}" + raise InvalidLicenseExpression(message) + + normalized_tokens.append(EXCEPTIONS[token]["id"]) + else: + if token.endswith("+"): + final_token = token[:-1] + suffix = "+" + else: + final_token = token + suffix = "" + + if final_token.startswith("licenseref-"): + if not license_ref_allowed.match(final_token): + message = f"Invalid licenseref: {final_token!r}" + raise InvalidLicenseExpression(message) + normalized_tokens.append(license_refs[final_token] + suffix) + else: + if final_token not in LICENSES: + message = f"Unknown license: {final_token!r}" + raise InvalidLicenseExpression(message) + normalized_tokens.append(LICENSES[final_token]["id"] + suffix) + + normalized_expression = " ".join(normalized_tokens) + + return cast( + NormalizedLicenseExpression, + normalized_expression.replace("( ", "(").replace(" )", ")"), + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..8f71353 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/_spdx.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/_spdx.cpython-312.pyc new file mode 100644 index 0000000..51685e4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/__pycache__/_spdx.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/_spdx.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/_spdx.py new file mode 100644 index 0000000..eac2227 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/licenses/_spdx.py @@ -0,0 +1,759 @@ + +from __future__ import annotations + +from typing import TypedDict + +class SPDXLicense(TypedDict): + id: str + deprecated: bool + +class SPDXException(TypedDict): + id: str + deprecated: bool + + +VERSION = '3.25.0' + +LICENSES: dict[str, SPDXLicense] = { + '0bsd': {'id': '0BSD', 'deprecated': False}, + '3d-slicer-1.0': {'id': '3D-Slicer-1.0', 'deprecated': False}, + 'aal': {'id': 'AAL', 'deprecated': False}, + 'abstyles': {'id': 'Abstyles', 'deprecated': False}, + 'adacore-doc': {'id': 'AdaCore-doc', 'deprecated': False}, + 'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False}, + 'adobe-display-postscript': {'id': 'Adobe-Display-PostScript', 'deprecated': False}, + 'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False}, + 'adobe-utopia': {'id': 'Adobe-Utopia', 'deprecated': False}, + 'adsl': {'id': 'ADSL', 'deprecated': False}, + 'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False}, + 'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False}, + 'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False}, + 'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False}, + 'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False}, + 'afmparse': {'id': 'Afmparse', 'deprecated': False}, + 'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True}, + 'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False}, + 'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False}, + 'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True}, + 'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False}, + 'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False}, + 'aladdin': {'id': 'Aladdin', 'deprecated': False}, + 'amd-newlib': {'id': 'AMD-newlib', 'deprecated': False}, + 'amdplpa': {'id': 'AMDPLPA', 'deprecated': False}, + 'aml': {'id': 'AML', 'deprecated': False}, + 'aml-glslang': {'id': 'AML-glslang', 'deprecated': False}, + 'ampas': {'id': 'AMPAS', 'deprecated': False}, + 'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False}, + 'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False}, + 'any-osi': {'id': 'any-OSI', 'deprecated': False}, + 'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False}, + 'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False}, + 'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False}, + 'apafml': {'id': 'APAFML', 'deprecated': False}, + 'apl-1.0': {'id': 'APL-1.0', 'deprecated': False}, + 'app-s2p': {'id': 'App-s2p', 'deprecated': False}, + 'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False}, + 'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False}, + 'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False}, + 'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False}, + 'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False}, + 'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False}, + 'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False}, + 'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False}, + 'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False}, + 'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0', 'deprecated': False}, + 'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1', 'deprecated': False}, + 'baekmuk': {'id': 'Baekmuk', 'deprecated': False}, + 'bahyph': {'id': 'Bahyph', 'deprecated': False}, + 'barr': {'id': 'Barr', 'deprecated': False}, + 'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer', 'deprecated': False}, + 'beerware': {'id': 'Beerware', 'deprecated': False}, + 'bitstream-charter': {'id': 'Bitstream-Charter', 'deprecated': False}, + 'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False}, + 'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False}, + 'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False}, + 'blessing': {'id': 'blessing', 'deprecated': False}, + 'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False}, + 'boehm-gc': {'id': 'Boehm-GC', 'deprecated': False}, + 'borceux': {'id': 'Borceux', 'deprecated': False}, + 'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause', 'deprecated': False}, + 'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause', 'deprecated': False}, + 'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False}, + 'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False}, + 'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin', 'deprecated': False}, + 'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines', 'deprecated': False}, + 'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True}, + 'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True}, + 'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False}, + 'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False}, + 'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False}, + 'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica', 'deprecated': False}, + 'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False}, + 'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False}, + 'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex', 'deprecated': False}, + 'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP', 'deprecated': False}, + 'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False}, + 'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False}, + 'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False}, + 'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False}, + 'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun', 'deprecated': False}, + 'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False}, + 'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False}, + 'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False}, + 'bsd-4.3reno': {'id': 'BSD-4.3RENO', 'deprecated': False}, + 'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE', 'deprecated': False}, + 'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement', 'deprecated': False}, + 'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer', 'deprecated': False}, + 'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk', 'deprecated': False}, + 'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False}, + 'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file', 'deprecated': False}, + 'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False}, + 'bsd-systemics': {'id': 'BSD-Systemics', 'deprecated': False}, + 'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works', 'deprecated': False}, + 'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False}, + 'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False}, + 'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True}, + 'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False}, + 'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False}, + 'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False}, + 'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False}, + 'caldera': {'id': 'Caldera', 'deprecated': False}, + 'caldera-no-preamble': {'id': 'Caldera-no-preamble', 'deprecated': False}, + 'catharon': {'id': 'Catharon', 'deprecated': False}, + 'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False}, + 'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False}, + 'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False}, + 'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False}, + 'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False}, + 'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False}, + 'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False}, + 'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU', 'deprecated': False}, + 'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False}, + 'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False}, + 'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False}, + 'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False}, + 'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False}, + 'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False}, + 'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False}, + 'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False}, + 'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False}, + 'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False}, + 'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False}, + 'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False}, + 'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False}, + 'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False}, + 'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False}, + 'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False}, + 'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False}, + 'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False}, + 'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False}, + 'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False}, + 'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE', 'deprecated': False}, + 'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False}, + 'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False}, + 'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False}, + 'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False}, + 'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False}, + 'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False}, + 'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False}, + 'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False}, + 'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False}, + 'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False}, + 'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False}, + 'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False}, + 'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False}, + 'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False}, + 'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False}, + 'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False}, + 'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False}, + 'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False}, + 'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False}, + 'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False}, + 'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False}, + 'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO', 'deprecated': False}, + 'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False}, + 'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False}, + 'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False}, + 'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False}, + 'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False}, + 'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False}, + 'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False}, + 'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False}, + 'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False}, + 'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False}, + 'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False}, + 'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False}, + 'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False}, + 'cecill-b': {'id': 'CECILL-B', 'deprecated': False}, + 'cecill-c': {'id': 'CECILL-C', 'deprecated': False}, + 'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False}, + 'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False}, + 'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False}, + 'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False}, + 'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False}, + 'cfitsio': {'id': 'CFITSIO', 'deprecated': False}, + 'check-cvs': {'id': 'check-cvs', 'deprecated': False}, + 'checkmk': {'id': 'checkmk', 'deprecated': False}, + 'clartistic': {'id': 'ClArtistic', 'deprecated': False}, + 'clips': {'id': 'Clips', 'deprecated': False}, + 'cmu-mach': {'id': 'CMU-Mach', 'deprecated': False}, + 'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc', 'deprecated': False}, + 'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False}, + 'cnri-python': {'id': 'CNRI-Python', 'deprecated': False}, + 'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False}, + 'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False}, + 'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False}, + 'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False}, + 'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False}, + 'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False}, + 'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG', 'deprecated': False}, + 'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False}, + 'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False}, + 'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False}, + 'cronyx': {'id': 'Cronyx', 'deprecated': False}, + 'crossword': {'id': 'Crossword', 'deprecated': False}, + 'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False}, + 'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False}, + 'cube': {'id': 'Cube', 'deprecated': False}, + 'curl': {'id': 'curl', 'deprecated': False}, + 'cve-tou': {'id': 'cve-tou', 'deprecated': False}, + 'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False}, + 'dec-3-clause': {'id': 'DEC-3-Clause', 'deprecated': False}, + 'diffmark': {'id': 'diffmark', 'deprecated': False}, + 'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False}, + 'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0', 'deprecated': False}, + 'doc': {'id': 'DOC', 'deprecated': False}, + 'docbook-schema': {'id': 'DocBook-Schema', 'deprecated': False}, + 'docbook-xml': {'id': 'DocBook-XML', 'deprecated': False}, + 'dotseqn': {'id': 'Dotseqn', 'deprecated': False}, + 'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False}, + 'drl-1.1': {'id': 'DRL-1.1', 'deprecated': False}, + 'dsdp': {'id': 'DSDP', 'deprecated': False}, + 'dtoa': {'id': 'dtoa', 'deprecated': False}, + 'dvipdfm': {'id': 'dvipdfm', 'deprecated': False}, + 'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False}, + 'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False}, + 'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True}, + 'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False}, + 'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False}, + 'egenix': {'id': 'eGenix', 'deprecated': False}, + 'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False}, + 'entessa': {'id': 'Entessa', 'deprecated': False}, + 'epics': {'id': 'EPICS', 'deprecated': False}, + 'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False}, + 'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False}, + 'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False}, + 'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False}, + 'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False}, + 'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False}, + 'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False}, + 'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False}, + 'eurosym': {'id': 'Eurosym', 'deprecated': False}, + 'fair': {'id': 'Fair', 'deprecated': False}, + 'fbm': {'id': 'FBM', 'deprecated': False}, + 'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False}, + 'ferguson-twofish': {'id': 'Ferguson-Twofish', 'deprecated': False}, + 'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False}, + 'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False}, + 'freeimage': {'id': 'FreeImage', 'deprecated': False}, + 'fsfap': {'id': 'FSFAP', 'deprecated': False}, + 'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer', 'deprecated': False}, + 'fsful': {'id': 'FSFUL', 'deprecated': False}, + 'fsfullr': {'id': 'FSFULLR', 'deprecated': False}, + 'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False}, + 'ftl': {'id': 'FTL', 'deprecated': False}, + 'furuseth': {'id': 'Furuseth', 'deprecated': False}, + 'fwlw': {'id': 'fwlw', 'deprecated': False}, + 'gcr-docs': {'id': 'GCR-docs', 'deprecated': False}, + 'gd': {'id': 'GD', 'deprecated': False}, + 'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True}, + 'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False}, + 'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False}, + 'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False}, + 'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False}, + 'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False}, + 'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True}, + 'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False}, + 'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False}, + 'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False}, + 'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False}, + 'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False}, + 'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True}, + 'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False}, + 'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False}, + 'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False}, + 'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False}, + 'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False}, + 'giftware': {'id': 'Giftware', 'deprecated': False}, + 'gl2ps': {'id': 'GL2PS', 'deprecated': False}, + 'glide': {'id': 'Glide', 'deprecated': False}, + 'glulxe': {'id': 'Glulxe', 'deprecated': False}, + 'glwtpl': {'id': 'GLWTPL', 'deprecated': False}, + 'gnuplot': {'id': 'gnuplot', 'deprecated': False}, + 'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True}, + 'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True}, + 'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False}, + 'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False}, + 'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True}, + 'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True}, + 'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False}, + 'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False}, + 'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True}, + 'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True}, + 'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True}, + 'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True}, + 'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True}, + 'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True}, + 'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True}, + 'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False}, + 'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False}, + 'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True}, + 'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True}, + 'graphics-gems': {'id': 'Graphics-Gems', 'deprecated': False}, + 'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False}, + 'gtkbook': {'id': 'gtkbook', 'deprecated': False}, + 'gutmann': {'id': 'Gutmann', 'deprecated': False}, + 'haskellreport': {'id': 'HaskellReport', 'deprecated': False}, + 'hdparm': {'id': 'hdparm', 'deprecated': False}, + 'hidapi': {'id': 'HIDAPI', 'deprecated': False}, + 'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False}, + 'hp-1986': {'id': 'HP-1986', 'deprecated': False}, + 'hp-1989': {'id': 'HP-1989', 'deprecated': False}, + 'hpnd': {'id': 'HPND', 'deprecated': False}, + 'hpnd-dec': {'id': 'HPND-DEC', 'deprecated': False}, + 'hpnd-doc': {'id': 'HPND-doc', 'deprecated': False}, + 'hpnd-doc-sell': {'id': 'HPND-doc-sell', 'deprecated': False}, + 'hpnd-export-us': {'id': 'HPND-export-US', 'deprecated': False}, + 'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement', 'deprecated': False}, + 'hpnd-export-us-modify': {'id': 'HPND-export-US-modify', 'deprecated': False}, + 'hpnd-export2-us': {'id': 'HPND-export2-US', 'deprecated': False}, + 'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston', 'deprecated': False}, + 'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG', 'deprecated': False}, + 'hpnd-intel': {'id': 'HPND-Intel', 'deprecated': False}, + 'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney', 'deprecated': False}, + 'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn', 'deprecated': False}, + 'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant', 'deprecated': False}, + 'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer', 'deprecated': False}, + 'hpnd-netrek': {'id': 'HPND-Netrek', 'deprecated': False}, + 'hpnd-pbmplus': {'id': 'HPND-Pbmplus', 'deprecated': False}, + 'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver', 'deprecated': False}, + 'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr', 'deprecated': False}, + 'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False}, + 'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer', 'deprecated': False}, + 'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev', 'deprecated': False}, + 'hpnd-uc': {'id': 'HPND-UC', 'deprecated': False}, + 'hpnd-uc-export-us': {'id': 'HPND-UC-export-US', 'deprecated': False}, + 'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False}, + 'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False}, + 'icu': {'id': 'ICU', 'deprecated': False}, + 'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA', 'deprecated': False}, + 'ijg': {'id': 'IJG', 'deprecated': False}, + 'ijg-short': {'id': 'IJG-short', 'deprecated': False}, + 'imagemagick': {'id': 'ImageMagick', 'deprecated': False}, + 'imatix': {'id': 'iMatix', 'deprecated': False}, + 'imlib2': {'id': 'Imlib2', 'deprecated': False}, + 'info-zip': {'id': 'Info-ZIP', 'deprecated': False}, + 'inner-net-2.0': {'id': 'Inner-Net-2.0', 'deprecated': False}, + 'intel': {'id': 'Intel', 'deprecated': False}, + 'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False}, + 'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False}, + 'ipa': {'id': 'IPA', 'deprecated': False}, + 'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False}, + 'isc': {'id': 'ISC', 'deprecated': False}, + 'isc-veillard': {'id': 'ISC-Veillard', 'deprecated': False}, + 'jam': {'id': 'Jam', 'deprecated': False}, + 'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False}, + 'jpl-image': {'id': 'JPL-image', 'deprecated': False}, + 'jpnic': {'id': 'JPNIC', 'deprecated': False}, + 'json': {'id': 'JSON', 'deprecated': False}, + 'kastrup': {'id': 'Kastrup', 'deprecated': False}, + 'kazlib': {'id': 'Kazlib', 'deprecated': False}, + 'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False}, + 'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False}, + 'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False}, + 'latex2e': {'id': 'Latex2e', 'deprecated': False}, + 'latex2e-translated-notice': {'id': 'Latex2e-translated-notice', 'deprecated': False}, + 'leptonica': {'id': 'Leptonica', 'deprecated': False}, + 'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True}, + 'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True}, + 'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False}, + 'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False}, + 'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True}, + 'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True}, + 'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False}, + 'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False}, + 'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True}, + 'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True}, + 'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False}, + 'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False}, + 'lgpllr': {'id': 'LGPLLR', 'deprecated': False}, + 'libpng': {'id': 'Libpng', 'deprecated': False}, + 'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False}, + 'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False}, + 'libtiff': {'id': 'libtiff', 'deprecated': False}, + 'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False}, + 'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False}, + 'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False}, + 'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False}, + 'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para', 'deprecated': False}, + 'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False}, + 'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para', 'deprecated': False}, + 'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var', 'deprecated': False}, + 'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False}, + 'loop': {'id': 'LOOP', 'deprecated': False}, + 'lpd-document': {'id': 'LPD-document', 'deprecated': False}, + 'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False}, + 'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False}, + 'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False}, + 'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False}, + 'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False}, + 'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False}, + 'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False}, + 'lsof': {'id': 'lsof', 'deprecated': False}, + 'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts', 'deprecated': False}, + 'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False}, + 'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False}, + 'mackerras-3-clause': {'id': 'Mackerras-3-Clause', 'deprecated': False}, + 'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment', 'deprecated': False}, + 'magaz': {'id': 'magaz', 'deprecated': False}, + 'mailprio': {'id': 'mailprio', 'deprecated': False}, + 'makeindex': {'id': 'MakeIndex', 'deprecated': False}, + 'martin-birgmeier': {'id': 'Martin-Birgmeier', 'deprecated': False}, + 'mcphee-slideshow': {'id': 'McPhee-slideshow', 'deprecated': False}, + 'metamail': {'id': 'metamail', 'deprecated': False}, + 'minpack': {'id': 'Minpack', 'deprecated': False}, + 'miros': {'id': 'MirOS', 'deprecated': False}, + 'mit': {'id': 'MIT', 'deprecated': False}, + 'mit-0': {'id': 'MIT-0', 'deprecated': False}, + 'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False}, + 'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False}, + 'mit-enna': {'id': 'MIT-enna', 'deprecated': False}, + 'mit-feh': {'id': 'MIT-feh', 'deprecated': False}, + 'mit-festival': {'id': 'MIT-Festival', 'deprecated': False}, + 'mit-khronos-old': {'id': 'MIT-Khronos-old', 'deprecated': False}, + 'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False}, + 'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False}, + 'mit-testregex': {'id': 'MIT-testregex', 'deprecated': False}, + 'mit-wu': {'id': 'MIT-Wu', 'deprecated': False}, + 'mitnfa': {'id': 'MITNFA', 'deprecated': False}, + 'mmixware': {'id': 'MMIXware', 'deprecated': False}, + 'motosoto': {'id': 'Motosoto', 'deprecated': False}, + 'mpeg-ssg': {'id': 'MPEG-SSG', 'deprecated': False}, + 'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False}, + 'mpich2': {'id': 'mpich2', 'deprecated': False}, + 'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False}, + 'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False}, + 'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False}, + 'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False}, + 'mplus': {'id': 'mplus', 'deprecated': False}, + 'ms-lpl': {'id': 'MS-LPL', 'deprecated': False}, + 'ms-pl': {'id': 'MS-PL', 'deprecated': False}, + 'ms-rl': {'id': 'MS-RL', 'deprecated': False}, + 'mtll': {'id': 'MTLL', 'deprecated': False}, + 'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False}, + 'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False}, + 'multics': {'id': 'Multics', 'deprecated': False}, + 'mup': {'id': 'Mup', 'deprecated': False}, + 'naist-2003': {'id': 'NAIST-2003', 'deprecated': False}, + 'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False}, + 'naumen': {'id': 'Naumen', 'deprecated': False}, + 'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False}, + 'ncbi-pd': {'id': 'NCBI-PD', 'deprecated': False}, + 'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False}, + 'ncl': {'id': 'NCL', 'deprecated': False}, + 'ncsa': {'id': 'NCSA', 'deprecated': False}, + 'net-snmp': {'id': 'Net-SNMP', 'deprecated': True}, + 'netcdf': {'id': 'NetCDF', 'deprecated': False}, + 'newsletr': {'id': 'Newsletr', 'deprecated': False}, + 'ngpl': {'id': 'NGPL', 'deprecated': False}, + 'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False}, + 'nist-pd': {'id': 'NIST-PD', 'deprecated': False}, + 'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False}, + 'nist-software': {'id': 'NIST-Software', 'deprecated': False}, + 'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False}, + 'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False}, + 'nlpl': {'id': 'NLPL', 'deprecated': False}, + 'nokia': {'id': 'Nokia', 'deprecated': False}, + 'nosl': {'id': 'NOSL', 'deprecated': False}, + 'noweb': {'id': 'Noweb', 'deprecated': False}, + 'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False}, + 'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False}, + 'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False}, + 'nrl': {'id': 'NRL', 'deprecated': False}, + 'ntp': {'id': 'NTP', 'deprecated': False}, + 'ntp-0': {'id': 'NTP-0', 'deprecated': False}, + 'nunit': {'id': 'Nunit', 'deprecated': True}, + 'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False}, + 'oar': {'id': 'OAR', 'deprecated': False}, + 'occt-pl': {'id': 'OCCT-PL', 'deprecated': False}, + 'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False}, + 'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False}, + 'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False}, + 'offis': {'id': 'OFFIS', 'deprecated': False}, + 'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False}, + 'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False}, + 'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False}, + 'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False}, + 'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False}, + 'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False}, + 'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False}, + 'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False}, + 'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False}, + 'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False}, + 'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False}, + 'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False}, + 'ogtsl': {'id': 'OGTSL', 'deprecated': False}, + 'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False}, + 'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False}, + 'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False}, + 'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False}, + 'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False}, + 'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False}, + 'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False}, + 'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False}, + 'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False}, + 'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False}, + 'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False}, + 'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False}, + 'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False}, + 'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False}, + 'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False}, + 'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False}, + 'olfl-1.3': {'id': 'OLFL-1.3', 'deprecated': False}, + 'oml': {'id': 'OML', 'deprecated': False}, + 'openpbs-2.3': {'id': 'OpenPBS-2.3', 'deprecated': False}, + 'openssl': {'id': 'OpenSSL', 'deprecated': False}, + 'openssl-standalone': {'id': 'OpenSSL-standalone', 'deprecated': False}, + 'openvision': {'id': 'OpenVision', 'deprecated': False}, + 'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False}, + 'opl-uk-3.0': {'id': 'OPL-UK-3.0', 'deprecated': False}, + 'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False}, + 'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False}, + 'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False}, + 'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False}, + 'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False}, + 'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False}, + 'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False}, + 'padl': {'id': 'PADL', 'deprecated': False}, + 'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False}, + 'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False}, + 'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False}, + 'php-3.0': {'id': 'PHP-3.0', 'deprecated': False}, + 'php-3.01': {'id': 'PHP-3.01', 'deprecated': False}, + 'pixar': {'id': 'Pixar', 'deprecated': False}, + 'pkgconf': {'id': 'pkgconf', 'deprecated': False}, + 'plexus': {'id': 'Plexus', 'deprecated': False}, + 'pnmstitch': {'id': 'pnmstitch', 'deprecated': False}, + 'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False}, + 'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False}, + 'postgresql': {'id': 'PostgreSQL', 'deprecated': False}, + 'ppl': {'id': 'PPL', 'deprecated': False}, + 'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False}, + 'psfrag': {'id': 'psfrag', 'deprecated': False}, + 'psutils': {'id': 'psutils', 'deprecated': False}, + 'python-2.0': {'id': 'Python-2.0', 'deprecated': False}, + 'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False}, + 'python-ldap': {'id': 'python-ldap', 'deprecated': False}, + 'qhull': {'id': 'Qhull', 'deprecated': False}, + 'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False}, + 'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004', 'deprecated': False}, + 'radvd': {'id': 'radvd', 'deprecated': False}, + 'rdisc': {'id': 'Rdisc', 'deprecated': False}, + 'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False}, + 'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False}, + 'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False}, + 'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False}, + 'rsa-md': {'id': 'RSA-MD', 'deprecated': False}, + 'rscpl': {'id': 'RSCPL', 'deprecated': False}, + 'ruby': {'id': 'Ruby', 'deprecated': False}, + 'ruby-pty': {'id': 'Ruby-pty', 'deprecated': False}, + 'sax-pd': {'id': 'SAX-PD', 'deprecated': False}, + 'sax-pd-2.0': {'id': 'SAX-PD-2.0', 'deprecated': False}, + 'saxpath': {'id': 'Saxpath', 'deprecated': False}, + 'scea': {'id': 'SCEA', 'deprecated': False}, + 'schemereport': {'id': 'SchemeReport', 'deprecated': False}, + 'sendmail': {'id': 'Sendmail', 'deprecated': False}, + 'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False}, + 'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False}, + 'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False}, + 'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False}, + 'sgi-opengl': {'id': 'SGI-OpenGL', 'deprecated': False}, + 'sgp4': {'id': 'SGP4', 'deprecated': False}, + 'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False}, + 'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False}, + 'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False}, + 'sissl': {'id': 'SISSL', 'deprecated': False}, + 'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False}, + 'sl': {'id': 'SL', 'deprecated': False}, + 'sleepycat': {'id': 'Sleepycat', 'deprecated': False}, + 'smlnj': {'id': 'SMLNJ', 'deprecated': False}, + 'smppl': {'id': 'SMPPL', 'deprecated': False}, + 'snia': {'id': 'SNIA', 'deprecated': False}, + 'snprintf': {'id': 'snprintf', 'deprecated': False}, + 'softsurfer': {'id': 'softSurfer', 'deprecated': False}, + 'soundex': {'id': 'Soundex', 'deprecated': False}, + 'spencer-86': {'id': 'Spencer-86', 'deprecated': False}, + 'spencer-94': {'id': 'Spencer-94', 'deprecated': False}, + 'spencer-99': {'id': 'Spencer-99', 'deprecated': False}, + 'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False}, + 'ssh-keyscan': {'id': 'ssh-keyscan', 'deprecated': False}, + 'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False}, + 'ssh-short': {'id': 'SSH-short', 'deprecated': False}, + 'ssleay-standalone': {'id': 'SSLeay-standalone', 'deprecated': False}, + 'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False}, + 'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True}, + 'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False}, + 'sun-ppp': {'id': 'Sun-PPP', 'deprecated': False}, + 'sun-ppp-2000': {'id': 'Sun-PPP-2000', 'deprecated': False}, + 'sunpro': {'id': 'SunPro', 'deprecated': False}, + 'swl': {'id': 'SWL', 'deprecated': False}, + 'swrule': {'id': 'swrule', 'deprecated': False}, + 'symlinks': {'id': 'Symlinks', 'deprecated': False}, + 'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False}, + 'tcl': {'id': 'TCL', 'deprecated': False}, + 'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False}, + 'termreadkey': {'id': 'TermReadKey', 'deprecated': False}, + 'tgppl-1.0': {'id': 'TGPPL-1.0', 'deprecated': False}, + 'threeparttable': {'id': 'threeparttable', 'deprecated': False}, + 'tmate': {'id': 'TMate', 'deprecated': False}, + 'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False}, + 'tosl': {'id': 'TOSL', 'deprecated': False}, + 'tpdl': {'id': 'TPDL', 'deprecated': False}, + 'tpl-1.0': {'id': 'TPL-1.0', 'deprecated': False}, + 'ttwl': {'id': 'TTWL', 'deprecated': False}, + 'ttyp0': {'id': 'TTYP0', 'deprecated': False}, + 'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False}, + 'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False}, + 'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0', 'deprecated': False}, + 'ucar': {'id': 'UCAR', 'deprecated': False}, + 'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False}, + 'ulem': {'id': 'ulem', 'deprecated': False}, + 'umich-merit': {'id': 'UMich-Merit', 'deprecated': False}, + 'unicode-3.0': {'id': 'Unicode-3.0', 'deprecated': False}, + 'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False}, + 'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False}, + 'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False}, + 'unixcrypt': {'id': 'UnixCrypt', 'deprecated': False}, + 'unlicense': {'id': 'Unlicense', 'deprecated': False}, + 'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False}, + 'urt-rle': {'id': 'URT-RLE', 'deprecated': False}, + 'vim': {'id': 'Vim', 'deprecated': False}, + 'vostrom': {'id': 'VOSTROM', 'deprecated': False}, + 'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False}, + 'w3c': {'id': 'W3C', 'deprecated': False}, + 'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False}, + 'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False}, + 'w3m': {'id': 'w3m', 'deprecated': False}, + 'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False}, + 'widget-workshop': {'id': 'Widget-Workshop', 'deprecated': False}, + 'wsuipa': {'id': 'Wsuipa', 'deprecated': False}, + 'wtfpl': {'id': 'WTFPL', 'deprecated': False}, + 'wxwindows': {'id': 'wxWindows', 'deprecated': True}, + 'x11': {'id': 'X11', 'deprecated': False}, + 'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False}, + 'x11-swapped': {'id': 'X11-swapped', 'deprecated': False}, + 'xdebug-1.03': {'id': 'Xdebug-1.03', 'deprecated': False}, + 'xerox': {'id': 'Xerox', 'deprecated': False}, + 'xfig': {'id': 'Xfig', 'deprecated': False}, + 'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False}, + 'xinetd': {'id': 'xinetd', 'deprecated': False}, + 'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev', 'deprecated': False}, + 'xlock': {'id': 'xlock', 'deprecated': False}, + 'xnet': {'id': 'Xnet', 'deprecated': False}, + 'xpp': {'id': 'xpp', 'deprecated': False}, + 'xskat': {'id': 'XSkat', 'deprecated': False}, + 'xzoom': {'id': 'xzoom', 'deprecated': False}, + 'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False}, + 'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False}, + 'zed': {'id': 'Zed', 'deprecated': False}, + 'zeeff': {'id': 'Zeeff', 'deprecated': False}, + 'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False}, + 'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False}, + 'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False}, + 'zlib': {'id': 'Zlib', 'deprecated': False}, + 'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False}, + 'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False}, + 'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False}, + 'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False}, +} + +EXCEPTIONS: dict[str, SPDXException] = { + '389-exception': {'id': '389-exception', 'deprecated': False}, + 'asterisk-exception': {'id': 'Asterisk-exception', 'deprecated': False}, + 'asterisk-linking-protocols-exception': {'id': 'Asterisk-linking-protocols-exception', 'deprecated': False}, + 'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False}, + 'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False}, + 'autoconf-exception-generic': {'id': 'Autoconf-exception-generic', 'deprecated': False}, + 'autoconf-exception-generic-3.0': {'id': 'Autoconf-exception-generic-3.0', 'deprecated': False}, + 'autoconf-exception-macro': {'id': 'Autoconf-exception-macro', 'deprecated': False}, + 'bison-exception-1.24': {'id': 'Bison-exception-1.24', 'deprecated': False}, + 'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False}, + 'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False}, + 'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False}, + 'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False}, + 'cryptsetup-openssl-exception': {'id': 'cryptsetup-OpenSSL-exception', 'deprecated': False}, + 'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False}, + 'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False}, + 'erlang-otp-linking-exception': {'id': 'erlang-otp-linking-exception', 'deprecated': False}, + 'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False}, + 'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False}, + 'fmt-exception': {'id': 'fmt-exception', 'deprecated': False}, + 'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False}, + 'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False}, + 'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False}, + 'gcc-exception-2.0-note': {'id': 'GCC-exception-2.0-note', 'deprecated': False}, + 'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False}, + 'gmsh-exception': {'id': 'Gmsh-exception', 'deprecated': False}, + 'gnat-exception': {'id': 'GNAT-exception', 'deprecated': False}, + 'gnome-examples-exception': {'id': 'GNOME-examples-exception', 'deprecated': False}, + 'gnu-compiler-exception': {'id': 'GNU-compiler-exception', 'deprecated': False}, + 'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False}, + 'gpl-3.0-interface-exception': {'id': 'GPL-3.0-interface-exception', 'deprecated': False}, + 'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False}, + 'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False}, + 'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False}, + 'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False}, + 'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False}, + 'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False}, + 'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False}, + 'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False}, + 'libpri-openh323-exception': {'id': 'libpri-OpenH323-exception', 'deprecated': False}, + 'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False}, + 'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False}, + 'llgpl': {'id': 'LLGPL', 'deprecated': False}, + 'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False}, + 'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False}, + 'mif-exception': {'id': 'mif-exception', 'deprecated': False}, + 'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True}, + 'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False}, + 'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False}, + 'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False}, + 'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False}, + 'pcre2-exception': {'id': 'PCRE2-exception', 'deprecated': False}, + 'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False}, + 'qpl-1.0-inria-2004-exception': {'id': 'QPL-1.0-INRIA-2004-exception', 'deprecated': False}, + 'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False}, + 'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False}, + 'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False}, + 'romic-exception': {'id': 'romic-exception', 'deprecated': False}, + 'rrdtool-floss-exception-2.0': {'id': 'RRDtool-FLOSS-exception-2.0', 'deprecated': False}, + 'sane-exception': {'id': 'SANE-exception', 'deprecated': False}, + 'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False}, + 'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False}, + 'stunnel-exception': {'id': 'stunnel-exception', 'deprecated': False}, + 'swi-exception': {'id': 'SWI-exception', 'deprecated': False}, + 'swift-exception': {'id': 'Swift-exception', 'deprecated': False}, + 'texinfo-exception': {'id': 'Texinfo-exception', 'deprecated': False}, + 'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False}, + 'ubdl-exception': {'id': 'UBDL-exception', 'deprecated': False}, + 'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False}, + 'vsftpd-openssl-exception': {'id': 'vsftpd-openssl-exception', 'deprecated': False}, + 'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False}, + 'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False}, +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py new file mode 100644 index 0000000..e7cea57 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/markers.py @@ -0,0 +1,362 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import operator +import os +import platform +import sys +from typing import AbstractSet, Any, Callable, Literal, TypedDict, Union, cast + +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable +from ._parser import parse_marker as _parse_marker +from ._tokenizer import ParserSyntaxError +from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name + +__all__ = [ + "EvaluateContext", + "InvalidMarker", + "Marker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "default_environment", +] + +Operator = Callable[[str, Union[str, AbstractSet[str]]], bool] +EvaluateContext = Literal["metadata", "lock_file", "requirement"] +MARKERS_ALLOWING_SET = {"extras", "dependency_groups"} + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Environment(TypedDict): + implementation_name: str + """The implementation's identifier, e.g. ``'cpython'``.""" + + implementation_version: str + """ + The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or + ``'7.3.13'`` for PyPy3.10 v7.3.13. + """ + + os_name: str + """ + The value of :py:data:`os.name`. The name of the operating system dependent module + imported, e.g. ``'posix'``. + """ + + platform_machine: str + """ + Returns the machine type, e.g. ``'i386'``. + + An empty string if the value cannot be determined. + """ + + platform_release: str + """ + The system's release, e.g. ``'2.2.0'`` or ``'NT'``. + + An empty string if the value cannot be determined. + """ + + platform_system: str + """ + The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``. + + An empty string if the value cannot be determined. + """ + + platform_version: str + """ + The system's release version, e.g. ``'#3 on degas'``. + + An empty string if the value cannot be determined. + """ + + python_full_version: str + """ + The Python version as string ``'major.minor.patchlevel'``. + + Note that unlike the Python :py:data:`sys.version`, this value will always include + the patchlevel (it defaults to 0). + """ + + platform_python_implementation: str + """ + A string identifying the Python implementation, e.g. ``'CPython'``. + """ + + python_version: str + """The Python version as string ``'major.minor'``.""" + + sys_platform: str + """ + This string contains a platform identifier that can be used to append + platform-specific components to :py:data:`sys.path`, for instance. + + For Unix systems, except on Linux and AIX, this is the lowercased OS name as + returned by ``uname -s`` with the first part of the version as returned by + ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python + was built. + """ + + +def _normalize_extra_values(results: Any) -> Any: + """ + Normalize extra values. + """ + if isinstance(results[0], tuple): + lhs, op, rhs = results[0] + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + results[0] = lhs, op, rhs + return results + + +def _format_marker( + marker: list[str] | MarkerAtom | str, first: bool | None = True +) -> str: + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str]) -> bool: + if isinstance(rhs, str): + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs, prereleases=True) + + oper: Operator | None = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +def _normalize( + lhs: str, rhs: str | AbstractSet[str], key: str +) -> tuple[str, str | AbstractSet[str]]: + # PEP 685 – Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + assert isinstance(rhs, str), "extra value must be a string" + return (canonicalize_name(lhs), canonicalize_name(rhs)) + if key in MARKERS_ALLOWING_SET: + if isinstance(rhs, str): # pragma: no cover + return (canonicalize_name(lhs), canonicalize_name(rhs)) + else: + return (canonicalize_name(lhs), {canonicalize_name(v) for v in rhs}) + + # other environment markers don't have such standards + return lhs, rhs + + +def _evaluate_markers( + markers: MarkerList, environment: dict[str, str | AbstractSet[str]] +) -> bool: + groups: list[list[bool]] = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, str)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + environment_key = lhs.value + lhs_value = environment[environment_key] + rhs_value = rhs.value + else: + lhs_value = lhs.value + environment_key = rhs.value + rhs_value = environment[environment_key] + assert isinstance(lhs_value, str), "lhs must be a string" + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info: sys._version_info) -> str: + version = f"{info.major}.{info.minor}.{info.micro}" + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Environment: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. + try: + self._markers = _normalize_extra_values(_parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + + def evaluate( + self, + environment: dict[str, str] | None = None, + context: EvaluateContext = "metadata", + ) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. The *context* parameter specifies what + context the markers are being evaluated for, which influences what markers + are considered valid. Acceptable values are "metadata" (for core metadata; + default), "lock_file", and "requirement" (i.e. all other situations). + + The environment is determined from the current Python process. + """ + current_environment = cast( + "dict[str, str | AbstractSet[str]]", default_environment() + ) + if context == "lock_file": + current_environment.update( + extras=frozenset(), dependency_groups=frozenset() + ) + elif context == "metadata": + current_environment["extra"] = "" + if environment is not None: + current_environment.update(environment) + # The API used to allow setting extra to None. We need to handle this + # case for backwards compatibility. + if "extra" in current_environment and current_environment["extra"] is None: + current_environment["extra"] = "" + + return _evaluate_markers( + self._markers, _repair_python_full_version(current_environment) + ) + + +def _repair_python_full_version( + env: dict[str, str | AbstractSet[str]], +) -> dict[str, str | AbstractSet[str]]: + """ + Work around platform.python_version() returning something that is not PEP 440 + compliant for non-tagged Python builds. + """ + python_full_version = cast(str, env["python_full_version"]) + if python_full_version.endswith("+"): + env["python_full_version"] = f"{python_full_version}local" + return env diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/metadata.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/metadata.py new file mode 100644 index 0000000..3bd8602 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/metadata.py @@ -0,0 +1,862 @@ +from __future__ import annotations + +import email.feedparser +import email.header +import email.message +import email.parser +import email.policy +import pathlib +import sys +import typing +from typing import ( + Any, + Callable, + Generic, + Literal, + TypedDict, + cast, +) + +from . import licenses, requirements, specifiers, utils +from . import version as version_module +from .licenses import NormalizedLicenseExpression + +T = typing.TypeVar("T") + + +if sys.version_info >= (3, 11): # pragma: no cover + ExceptionGroup = ExceptionGroup +else: # pragma: no cover + + class ExceptionGroup(Exception): + """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. + + If :external:exc:`ExceptionGroup` is already defined by Python itself, + that version is used instead. + """ + + message: str + exceptions: list[Exception] + + def __init__(self, message: str, exceptions: list[Exception]) -> None: + self.message = message + self.exceptions = exceptions + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" + + +class InvalidMetadata(ValueError): + """A metadata field contains invalid data.""" + + field: str + """The name of the field that contains invalid data.""" + + def __init__(self, field: str, message: str) -> None: + self.field = field + super().__init__(message) + + +# The RawMetadata class attempts to make as few assumptions about the underlying +# serialization formats as possible. The idea is that as long as a serialization +# formats offer some very basic primitives in *some* way then we can support +# serializing to and from that format. +class RawMetadata(TypedDict, total=False): + """A dictionary of raw core metadata. + + Each field in core metadata maps to a key of this dictionary (when data is + provided). The key is lower-case and underscores are used instead of dashes + compared to the equivalent core metadata field. Any core metadata field that + can be specified multiple times or can hold multiple values in a single + field have a key with a plural name. See :class:`Metadata` whose attributes + match the keys of this dictionary. + + Core metadata fields that can be specified multiple times are stored as a + list or dict depending on which is appropriate for the field. Any fields + which hold multiple values in a single field are stored as a list. + + """ + + # Metadata 1.0 - PEP 241 + metadata_version: str + name: str + version: str + platforms: list[str] + summary: str + description: str + keywords: list[str] + home_page: str + author: str + author_email: str + license: str + + # Metadata 1.1 - PEP 314 + supported_platforms: list[str] + download_url: str + classifiers: list[str] + requires: list[str] + provides: list[str] + obsoletes: list[str] + + # Metadata 1.2 - PEP 345 + maintainer: str + maintainer_email: str + requires_dist: list[str] + provides_dist: list[str] + obsoletes_dist: list[str] + requires_python: str + requires_external: list[str] + project_urls: dict[str, str] + + # Metadata 2.0 + # PEP 426 attempted to completely revamp the metadata format + # but got stuck without ever being able to build consensus on + # it and ultimately ended up withdrawn. + # + # However, a number of tools had started emitting METADATA with + # `2.0` Metadata-Version, so for historical reasons, this version + # was skipped. + + # Metadata 2.1 - PEP 566 + description_content_type: str + provides_extra: list[str] + + # Metadata 2.2 - PEP 643 + dynamic: list[str] + + # Metadata 2.3 - PEP 685 + # No new fields were added in PEP 685, just some edge case were + # tightened up to provide better interoptability. + + # Metadata 2.4 - PEP 639 + license_expression: str + license_files: list[str] + + +_STRING_FIELDS = { + "author", + "author_email", + "description", + "description_content_type", + "download_url", + "home_page", + "license", + "license_expression", + "maintainer", + "maintainer_email", + "metadata_version", + "name", + "requires_python", + "summary", + "version", +} + +_LIST_FIELDS = { + "classifiers", + "dynamic", + "license_files", + "obsoletes", + "obsoletes_dist", + "platforms", + "provides", + "provides_dist", + "provides_extra", + "requires", + "requires_dist", + "requires_external", + "supported_platforms", +} + +_DICT_FIELDS = { + "project_urls", +} + + +def _parse_keywords(data: str) -> list[str]: + """Split a string of comma-separated keywords into a list of keywords.""" + return [k.strip() for k in data.split(",")] + + +def _parse_project_urls(data: list[str]) -> dict[str, str]: + """Parse a list of label/URL string pairings separated by a comma.""" + urls = {} + for pair in data: + # Our logic is slightly tricky here as we want to try and do + # *something* reasonable with malformed data. + # + # The main thing that we have to worry about, is data that does + # not have a ',' at all to split the label from the Value. There + # isn't a singular right answer here, and we will fail validation + # later on (if the caller is validating) so it doesn't *really* + # matter, but since the missing value has to be an empty str + # and our return value is dict[str, str], if we let the key + # be the missing value, then they'd have multiple '' values that + # overwrite each other in a accumulating dict. + # + # The other potentional issue is that it's possible to have the + # same label multiple times in the metadata, with no solid "right" + # answer with what to do in that case. As such, we'll do the only + # thing we can, which is treat the field as unparseable and add it + # to our list of unparsed fields. + parts = [p.strip() for p in pair.split(",", 1)] + parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items + + # TODO: The spec doesn't say anything about if the keys should be + # considered case sensitive or not... logically they should + # be case-preserving and case-insensitive, but doing that + # would open up more cases where we might have duplicate + # entries. + label, url = parts + if label in urls: + # The label already exists in our set of urls, so this field + # is unparseable, and we can just add the whole thing to our + # unparseable data and stop processing it. + raise KeyError("duplicate labels in project urls") + urls[label] = url + + return urls + + +def _get_payload(msg: email.message.Message, source: bytes | str) -> str: + """Get the body of the message.""" + # If our source is a str, then our caller has managed encodings for us, + # and we don't need to deal with it. + if isinstance(source, str): + payload = msg.get_payload() + assert isinstance(payload, str) + return payload + # If our source is a bytes, then we're managing the encoding and we need + # to deal with it. + else: + bpayload = msg.get_payload(decode=True) + assert isinstance(bpayload, bytes) + try: + return bpayload.decode("utf8", "strict") + except UnicodeDecodeError as exc: + raise ValueError("payload in an invalid encoding") from exc + + +# The various parse_FORMAT functions here are intended to be as lenient as +# possible in their parsing, while still returning a correctly typed +# RawMetadata. +# +# To aid in this, we also generally want to do as little touching of the +# data as possible, except where there are possibly some historic holdovers +# that make valid data awkward to work with. +# +# While this is a lower level, intermediate format than our ``Metadata`` +# class, some light touch ups can make a massive difference in usability. + +# Map METADATA fields to RawMetadata. +_EMAIL_TO_RAW_MAPPING = { + "author": "author", + "author-email": "author_email", + "classifier": "classifiers", + "description": "description", + "description-content-type": "description_content_type", + "download-url": "download_url", + "dynamic": "dynamic", + "home-page": "home_page", + "keywords": "keywords", + "license": "license", + "license-expression": "license_expression", + "license-file": "license_files", + "maintainer": "maintainer", + "maintainer-email": "maintainer_email", + "metadata-version": "metadata_version", + "name": "name", + "obsoletes": "obsoletes", + "obsoletes-dist": "obsoletes_dist", + "platform": "platforms", + "project-url": "project_urls", + "provides": "provides", + "provides-dist": "provides_dist", + "provides-extra": "provides_extra", + "requires": "requires", + "requires-dist": "requires_dist", + "requires-external": "requires_external", + "requires-python": "requires_python", + "summary": "summary", + "supported-platform": "supported_platforms", + "version": "version", +} +_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} + + +def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]: + """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). + + This function returns a two-item tuple of dicts. The first dict is of + recognized fields from the core metadata specification. Fields that can be + parsed and translated into Python's built-in types are converted + appropriately. All other fields are left as-is. Fields that are allowed to + appear multiple times are stored as lists. + + The second dict contains all other fields from the metadata. This includes + any unrecognized fields. It also includes any fields which are expected to + be parsed into a built-in type but were not formatted appropriately. Finally, + any fields that are expected to appear only once but are repeated are + included in this dict. + + """ + raw: dict[str, str | list[str] | dict[str, str]] = {} + unparsed: dict[str, list[str]] = {} + + if isinstance(data, str): + parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) + else: + parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) + + # We have to wrap parsed.keys() in a set, because in the case of multiple + # values for a key (a list), the key will appear multiple times in the + # list of keys, but we're avoiding that by using get_all(). + for name in frozenset(parsed.keys()): + # Header names in RFC are case insensitive, so we'll normalize to all + # lower case to make comparisons easier. + name = name.lower() + + # We use get_all() here, even for fields that aren't multiple use, + # because otherwise someone could have e.g. two Name fields, and we + # would just silently ignore it rather than doing something about it. + headers = parsed.get_all(name) or [] + + # The way the email module works when parsing bytes is that it + # unconditionally decodes the bytes as ascii using the surrogateescape + # handler. When you pull that data back out (such as with get_all() ), + # it looks to see if the str has any surrogate escapes, and if it does + # it wraps it in a Header object instead of returning the string. + # + # As such, we'll look for those Header objects, and fix up the encoding. + value = [] + # Flag if we have run into any issues processing the headers, thus + # signalling that the data belongs in 'unparsed'. + valid_encoding = True + for h in headers: + # It's unclear if this can return more types than just a Header or + # a str, so we'll just assert here to make sure. + assert isinstance(h, (email.header.Header, str)) + + # If it's a header object, we need to do our little dance to get + # the real data out of it. In cases where there is invalid data + # we're going to end up with mojibake, but there's no obvious, good + # way around that without reimplementing parts of the Header object + # ourselves. + # + # That should be fine since, if mojibacked happens, this key is + # going into the unparsed dict anyways. + if isinstance(h, email.header.Header): + # The Header object stores it's data as chunks, and each chunk + # can be independently encoded, so we'll need to check each + # of them. + chunks: list[tuple[bytes, str | None]] = [] + for bin, encoding in email.header.decode_header(h): + try: + bin.decode("utf8", "strict") + except UnicodeDecodeError: + # Enable mojibake. + encoding = "latin1" + valid_encoding = False + else: + encoding = "utf8" + chunks.append((bin, encoding)) + + # Turn our chunks back into a Header object, then let that + # Header object do the right thing to turn them into a + # string for us. + value.append(str(email.header.make_header(chunks))) + # This is already a string, so just add it. + else: + value.append(h) + + # We've processed all of our values to get them into a list of str, + # but we may have mojibake data, in which case this is an unparsed + # field. + if not valid_encoding: + unparsed[name] = value + continue + + raw_name = _EMAIL_TO_RAW_MAPPING.get(name) + if raw_name is None: + # This is a bit of a weird situation, we've encountered a key that + # we don't know what it means, so we don't know whether it's meant + # to be a list or not. + # + # Since we can't really tell one way or another, we'll just leave it + # as a list, even though it may be a single item list, because that's + # what makes the most sense for email headers. + unparsed[name] = value + continue + + # If this is one of our string fields, then we'll check to see if our + # value is a list of a single item. If it is then we'll assume that + # it was emitted as a single string, and unwrap the str from inside + # the list. + # + # If it's any other kind of data, then we haven't the faintest clue + # what we should parse it as, and we have to just add it to our list + # of unparsed stuff. + if raw_name in _STRING_FIELDS and len(value) == 1: + raw[raw_name] = value[0] + # If this is one of our list of string fields, then we can just assign + # the value, since email *only* has strings, and our get_all() call + # above ensures that this is a list. + elif raw_name in _LIST_FIELDS: + raw[raw_name] = value + # Special Case: Keywords + # The keywords field is implemented in the metadata spec as a str, + # but it conceptually is a list of strings, and is serialized using + # ", ".join(keywords), so we'll do some light data massaging to turn + # this into what it logically is. + elif raw_name == "keywords" and len(value) == 1: + raw[raw_name] = _parse_keywords(value[0]) + # Special Case: Project-URL + # The project urls is implemented in the metadata spec as a list of + # specially-formatted strings that represent a key and a value, which + # is fundamentally a mapping, however the email format doesn't support + # mappings in a sane way, so it was crammed into a list of strings + # instead. + # + # We will do a little light data massaging to turn this into a map as + # it logically should be. + elif raw_name == "project_urls": + try: + raw[raw_name] = _parse_project_urls(value) + except KeyError: + unparsed[name] = value + # Nothing that we've done has managed to parse this, so it'll just + # throw it in our unparseable data and move on. + else: + unparsed[name] = value + + # We need to support getting the Description from the message payload in + # addition to getting it from the the headers. This does mean, though, there + # is the possibility of it being set both ways, in which case we put both + # in 'unparsed' since we don't know which is right. + try: + payload = _get_payload(parsed, data) + except ValueError: + unparsed.setdefault("description", []).append( + parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload] + ) + else: + if payload: + # Check to see if we've already got a description, if so then both + # it, and this body move to unparseable. + if "description" in raw: + description_header = cast(str, raw.pop("description")) + unparsed.setdefault("description", []).extend( + [description_header, payload] + ) + elif "description" in unparsed: + unparsed["description"].append(payload) + else: + raw["description"] = payload + + # We need to cast our `raw` to a metadata, because a TypedDict only support + # literal key names, but we're computing our key names on purpose, but the + # way this function is implemented, our `TypedDict` can only have valid key + # names. + return cast(RawMetadata, raw), unparsed + + +_NOT_FOUND = object() + + +# Keep the two values in sync. +_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"] +_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"] + +_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) + + +class _Validator(Generic[T]): + """Validate a metadata field. + + All _process_*() methods correspond to a core metadata field. The method is + called with the field's raw value. If the raw value is valid it is returned + in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). + If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause + as appropriate). + """ + + name: str + raw_name: str + added: _MetadataVersion + + def __init__( + self, + *, + added: _MetadataVersion = "1.0", + ) -> None: + self.added = added + + def __set_name__(self, _owner: Metadata, name: str) -> None: + self.name = name + self.raw_name = _RAW_TO_EMAIL_MAPPING[name] + + def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T: + # With Python 3.8, the caching can be replaced with functools.cached_property(). + # No need to check the cache as attribute lookup will resolve into the + # instance's __dict__ before __get__ is called. + cache = instance.__dict__ + value = instance._raw.get(self.name) + + # To make the _process_* methods easier, we'll check if the value is None + # and if this field is NOT a required attribute, and if both of those + # things are true, we'll skip the the converter. This will mean that the + # converters never have to deal with the None union. + if self.name in _REQUIRED_ATTRS or value is not None: + try: + converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") + except AttributeError: + pass + else: + value = converter(value) + + cache[self.name] = value + try: + del instance._raw[self.name] # type: ignore[misc] + except KeyError: + pass + + return cast(T, value) + + def _invalid_metadata( + self, msg: str, cause: Exception | None = None + ) -> InvalidMetadata: + exc = InvalidMetadata( + self.raw_name, msg.format_map({"field": repr(self.raw_name)}) + ) + exc.__cause__ = cause + return exc + + def _process_metadata_version(self, value: str) -> _MetadataVersion: + # Implicitly makes Metadata-Version required. + if value not in _VALID_METADATA_VERSIONS: + raise self._invalid_metadata(f"{value!r} is not a valid metadata version") + return cast(_MetadataVersion, value) + + def _process_name(self, value: str) -> str: + if not value: + raise self._invalid_metadata("{field} is a required field") + # Validate the name as a side-effect. + try: + utils.canonicalize_name(value, validate=True) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return value + + def _process_version(self, value: str) -> version_module.Version: + if not value: + raise self._invalid_metadata("{field} is a required field") + try: + return version_module.parse(value) + except version_module.InvalidVersion as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_summary(self, value: str) -> str: + """Check the field contains no newlines.""" + if "\n" in value: + raise self._invalid_metadata("{field} must be a single line") + return value + + def _process_description_content_type(self, value: str) -> str: + content_types = {"text/plain", "text/x-rst", "text/markdown"} + message = email.message.EmailMessage() + message["content-type"] = value + + content_type, parameters = ( + # Defaults to `text/plain` if parsing failed. + message.get_content_type().lower(), + message["content-type"].params, + ) + # Check if content-type is valid or defaulted to `text/plain` and thus was + # not parseable. + if content_type not in content_types or content_type not in value.lower(): + raise self._invalid_metadata( + f"{{field}} must be one of {list(content_types)}, not {value!r}" + ) + + charset = parameters.get("charset", "UTF-8") + if charset != "UTF-8": + raise self._invalid_metadata( + f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" + ) + + markdown_variants = {"GFM", "CommonMark"} + variant = parameters.get("variant", "GFM") # Use an acceptable default. + if content_type == "text/markdown" and variant not in markdown_variants: + raise self._invalid_metadata( + f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " + f"not {variant!r}", + ) + return value + + def _process_dynamic(self, value: list[str]) -> list[str]: + for dynamic_field in map(str.lower, value): + if dynamic_field in {"name", "version", "metadata-version"}: + raise self._invalid_metadata( + f"{dynamic_field!r} is not allowed as a dynamic field" + ) + elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: + raise self._invalid_metadata( + f"{dynamic_field!r} is not a valid dynamic field" + ) + return list(map(str.lower, value)) + + def _process_provides_extra( + self, + value: list[str], + ) -> list[utils.NormalizedName]: + normalized_names = [] + try: + for name in value: + normalized_names.append(utils.canonicalize_name(name, validate=True)) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return normalized_names + + def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: + try: + return specifiers.SpecifierSet(value) + except specifiers.InvalidSpecifier as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_requires_dist( + self, + value: list[str], + ) -> list[requirements.Requirement]: + reqs = [] + try: + for req in value: + reqs.append(requirements.Requirement(req)) + except requirements.InvalidRequirement as exc: + raise self._invalid_metadata( + f"{req!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return reqs + + def _process_license_expression( + self, value: str + ) -> NormalizedLicenseExpression | None: + try: + return licenses.canonicalize_license_expression(value) + except ValueError as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_license_files(self, value: list[str]) -> list[str]: + paths = [] + for path in value: + if ".." in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, " + "parent directory indicators are not allowed" + ) + if "*" in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be resolved" + ) + if ( + pathlib.PurePosixPath(path).is_absolute() + or pathlib.PureWindowsPath(path).is_absolute() + ): + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be relative" + ) + if pathlib.PureWindowsPath(path).as_posix() != path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must use '/' delimiter" + ) + paths.append(path) + return paths + + +class Metadata: + """Representation of distribution metadata. + + Compared to :class:`RawMetadata`, this class provides objects representing + metadata fields instead of only using built-in types. Any invalid metadata + will cause :exc:`InvalidMetadata` to be raised (with a + :py:attr:`~BaseException.__cause__` attribute as appropriate). + """ + + _raw: RawMetadata + + @classmethod + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata: + """Create an instance from :class:`RawMetadata`. + + If *validate* is true, all metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + ins = cls() + ins._raw = data.copy() # Mutations occur due to caching enriched values. + + if validate: + exceptions: list[Exception] = [] + try: + metadata_version = ins.metadata_version + metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) + except InvalidMetadata as metadata_version_exc: + exceptions.append(metadata_version_exc) + metadata_version = None + + # Make sure to check for the fields that are present, the required + # fields (so their absence can be reported). + fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS + # Remove fields that have already been checked. + fields_to_check -= {"metadata_version"} + + for key in fields_to_check: + try: + if metadata_version: + # Can't use getattr() as that triggers descriptor protocol which + # will fail due to no value for the instance argument. + try: + field_metadata_version = cls.__dict__[key].added + except KeyError: + exc = InvalidMetadata(key, f"unrecognized field: {key!r}") + exceptions.append(exc) + continue + field_age = _VALID_METADATA_VERSIONS.index( + field_metadata_version + ) + if field_age > metadata_age: + field = _RAW_TO_EMAIL_MAPPING[key] + exc = InvalidMetadata( + field, + f"{field} introduced in metadata version " + f"{field_metadata_version}, not {metadata_version}", + ) + exceptions.append(exc) + continue + getattr(ins, key) + except InvalidMetadata as exc: + exceptions.append(exc) + + if exceptions: + raise ExceptionGroup("invalid metadata", exceptions) + + return ins + + @classmethod + def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata: + """Parse metadata from email headers. + + If *validate* is true, the metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + raw, unparsed = parse_email(data) + + if validate: + exceptions: list[Exception] = [] + for unparsed_key in unparsed: + if unparsed_key in _EMAIL_TO_RAW_MAPPING: + message = f"{unparsed_key!r} has invalid data" + else: + message = f"unrecognized field: {unparsed_key!r}" + exceptions.append(InvalidMetadata(unparsed_key, message)) + + if exceptions: + raise ExceptionGroup("unparsed", exceptions) + + try: + return cls.from_raw(raw, validate=validate) + except ExceptionGroup as exc_group: + raise ExceptionGroup( + "invalid or unparsed metadata", exc_group.exceptions + ) from None + + metadata_version: _Validator[_MetadataVersion] = _Validator() + """:external:ref:`core-metadata-metadata-version` + (required; validated to be a valid metadata version)""" + # `name` is not normalized/typed to NormalizedName so as to provide access to + # the original/raw name. + name: _Validator[str] = _Validator() + """:external:ref:`core-metadata-name` + (required; validated using :func:`~packaging.utils.canonicalize_name` and its + *validate* parameter)""" + version: _Validator[version_module.Version] = _Validator() + """:external:ref:`core-metadata-version` (required)""" + dynamic: _Validator[list[str] | None] = _Validator( + added="2.2", + ) + """:external:ref:`core-metadata-dynamic` + (validated against core metadata field names and lowercased)""" + platforms: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-platform`""" + supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-supported-platform`""" + summary: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" + description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body + """:external:ref:`core-metadata-description`""" + description_content_type: _Validator[str | None] = _Validator(added="2.1") + """:external:ref:`core-metadata-description-content-type` (validated)""" + keywords: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-keywords`""" + home_page: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-home-page`""" + download_url: _Validator[str | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-download-url`""" + author: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author`""" + author_email: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author-email`""" + maintainer: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer`""" + maintainer_email: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer-email`""" + license: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-license`""" + license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator( + added="2.4" + ) + """:external:ref:`core-metadata-license-expression`""" + license_files: _Validator[list[str] | None] = _Validator(added="2.4") + """:external:ref:`core-metadata-license-file`""" + classifiers: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-classifier`""" + requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-dist`""" + requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-python`""" + # Because `Requires-External` allows for non-PEP 440 version specifiers, we + # don't do any processing on the values. + requires_external: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-requires-external`""" + project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-project-url`""" + # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation + # regardless of metadata version. + provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator( + added="2.1", + ) + """:external:ref:`core-metadata-provides-extra`""" + provides_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-provides-dist`""" + obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-obsoletes-dist`""" + requires: _Validator[list[str] | None] = _Validator(added="1.1") + """``Requires`` (deprecated)""" + provides: _Validator[list[str] | None] = _Validator(added="1.1") + """``Provides`` (deprecated)""" + obsoletes: _Validator[list[str] | None] = _Validator(added="1.1") + """``Obsoletes`` (deprecated)""" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py new file mode 100644 index 0000000..4e068c9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/requirements.py @@ -0,0 +1,91 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import annotations + +from typing import Any, Iterator + +from ._parser import parse_requirement as _parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet +from .utils import canonicalize_name + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +class Requirement: + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string: str) -> None: + try: + parsed = _parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + self.url: str | None = parsed.url or None + self.extras: set[str] = set(parsed.extras or []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Marker | None = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) + + def _iter_parts(self, name: str) -> Iterator[str]: + yield name + + if self.extras: + formatted_extras = ",".join(sorted(self.extras)) + yield f"[{formatted_extras}]" + + if self.specifier: + yield str(self.specifier) + + if self.url: + yield f"@ {self.url}" + if self.marker: + yield " " + + if self.marker: + yield f"; {self.marker}" + + def __str__(self) -> str: + return "".join(self._iter_parts(self.name)) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash( + ( + self.__class__.__name__, + *self._iter_parts(canonicalize_name(self.name)), + ) + ) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + canonicalize_name(self.name) == canonicalize_name(other.name) + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py new file mode 100644 index 0000000..47c3929 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/specifiers.py @@ -0,0 +1,1019 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from pip._vendor.packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from pip._vendor.packaging.version import Version +""" + +from __future__ import annotations + +import abc +import itertools +import re +from typing import Callable, Iterable, Iterator, TypeVar, Union + +from .utils import canonicalize_version +from .version import Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] + + +def _coerce_version(version: UnparsedVersion) -> Version: + if not isinstance(version, Version): + version = Version(version) + return version + + +class InvalidSpecifier(ValueError): + """ + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier-like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier-like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier-like + objects are equal. + + :param other: The other object to check against. + """ + + @property + @abc.abstractmethod + def prereleases(self) -> bool | None: + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. + """ + + @prereleases.setter + def prereleases(self, value: bool) -> None: + """Setter for :attr:`prereleases`. + + :param value: The value to set. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: bool | None = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. + + .. tip:: + + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ + + _operator_regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. + (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: {spec!r}") + + self._spec: tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 + @property # type: ignore[override] + def prereleases(self) -> bool: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "===", ">", "<"]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if Version(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> tuple[str, str]: + canonical_version = canonicalize_version( + self._spec[1], + strip_trailing_zero=(self._spec[0] != "~="), + ) + return self._spec[0], canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = _version_join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + def _compare_equal(self, prospective: Version, spec: str) -> bool: + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + normalized_prospective = canonicalize_version( + prospective.public, strip_trailing_zero=False + ) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) + # Split the spec out by bangs and dots, and pretend that there is + # an implicit dot in between a release segment and a pre-release segment. + split_spec = _version_split(normalized_spec) + + # Split the prospective version out by bangs and dots, and pretend + # that there is an implicit dot in between a release segment and + # a pre-release segment. + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = padded_prospective[: len(split_spec)] + + return shortened_prospective == split_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + def __contains__(self, item: str | Version) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) + + def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + False + >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") + True + >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) + True + """ + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version, this allows us to have a shortcut for + # "2.0" in Specifier(">=2") + normalized_item = _coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(Specifier().contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = _coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version: str) -> list[str]: + """Split version into components. + + The split components are intended for version comparison. The logic does + not attempt to retain the original version string, so joining the + components back with :func:`_version_join` may not produce the original + version string. + """ + result: list[str] = [] + + epoch, _, rest = version.rpartition("!") + result.append(epoch or "0") + + for item in rest.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _version_join(components: list[str]) -> str: + """Join split version components into a version string. + + This function assumes the input came from :func:`_version_split`, where the + first component must be the epoch (either empty or numeric), and all other + components numeric. + """ + epoch, *rest = components + return f"{epoch}!{'.'.join(rest)}" + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return ( + list(itertools.chain.from_iterable(left_split)), + list(itertools.chain.from_iterable(right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + + def __init__( + self, + specifiers: str | Iterable[Specifier] = "", + prereleases: bool | None = None, + ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + May also be an iterable of ``Specifier`` instances, which will be used + as is. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ + + if isinstance(specifiers, str): + # Split on `,` to break each individual specifier into its own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Make each individual specifier a Specifier and save in a frozen set + # for later. + self._specs = frozenset(map(Specifier, split_specifiers)) + else: + # Save the supplied specifiers in a frozen set. + self._specs = frozenset(specifiers) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + @property + def prereleases(self) -> bool | None: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: SpecifierSet | str) -> SpecifierSet: + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" + return len(self._specs) + + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. + + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) + + def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, + item: UnparsedVersion, + prereleases: bool | None = None, + installed: bool | None = None, + ) -> bool: + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + # Ensure that our item is a Version instance. + if not isinstance(item, Version): + item = Version(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + if installed and item.is_prerelease: + item = Version(item.base_version) + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + [] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iter(iterable) + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases. + else: + filtered: list[UnparsedVersionVar] = [] + found_prereleases: list[UnparsedVersionVar] = [] + + for item in iterable: + parsed_version = _coerce_version(item) + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return iter(found_prereleases) + + return iter(filtered) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py new file mode 100644 index 0000000..8522f59 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/tags.py @@ -0,0 +1,656 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import logging +import platform +import re +import struct +import subprocess +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Iterable, + Iterator, + Sequence, + Tuple, + cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +AppleVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: dict[str, str] = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = struct.calcsize("P") == 4 + + +class Tag: + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_abi", "_hash", "_interpreter", "_platform"] + + def __init__(self, interpreter: str, abi: str, platform: str) -> None: + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self) -> str: + return self._interpreter + + @property + def abi(self) -> str: + return self._abi + + @property + def platform(self) -> str: + return self._platform + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) + ) + + def __hash__(self) -> int: + return self._hash + + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" + + def __repr__(self) -> str: + return f"<{self} @ {id(self)}>" + + +def parse_tag(tag: str) -> frozenset[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> int | str | None: + value: int | str | None = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_").replace(" ", "_") + + +def _is_threaded_cpython(abis: list[str]) -> bool: + """ + Determine if the ABI corresponds to a threaded (`--disable-gil`) build. + + The threaded builds are indicated by a "t" in the abiflags. + """ + if len(abis) == 0: + return False + # expect e.g., cp313 + m = re.match(r"cp\d+(.*)", abis[0]) + if not m: + return False + abiflags = m.group(1) + return "t" in abiflags + + +def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) + builds do not support abi3. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + threading = debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): + threading = "t" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append(f"cp{version}{threading}") + abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") + return abis + + +def cpython_tags( + python_version: PythonVersion | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + if not python_version: + python_version = sys.version_info[:2] + + interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + threading = _is_threaded_cpython(abis) + use_abi3 = _abi3_applies(python_version, threading) + if use_abi3: + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if use_abi3: + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + version = _version_nodot((python_version[0], minor_version)) + interpreter = f"cp{version}" + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> list[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] + + +def generic_tags( + interpreter: str | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + else: + abis = list(abis) + platforms = list(platforms or platform_tags()) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( + python_version: PythonVersion | None = None, + interpreter: str | None = None, + platforms: Iterable[str] | None = None, +) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms( + version: AppleVersion | None = None, arch: str | None = None +) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + text=True, + ).stdout + version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + + if (10, 0) <= version and version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + major_version = 10 + for minor_version in range(version[1], -1, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + minor_version = 0 + for major_version in range(version[0], 10, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + major_version = 10 + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + else: + for minor_version in range(16, 3, -1): + compat_version = major_version, minor_version + binary_format = "universal2" + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + +def ios_platforms( + version: AppleVersion | None = None, multiarch: str | None = None +) -> Iterator[str]: + """ + Yields the platform tags for an iOS system. + + :param version: A two-item tuple specifying the iOS version to generate + platform tags for. Defaults to the current iOS version. + :param multiarch: The CPU architecture+ABI to generate platform tags for - + (the value used by `sys.implementation._multiarch` e.g., + `arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current + multiarch value. + """ + if version is None: + # if iOS is the current platform, ios_ver *must* be defined. However, + # it won't exist for CPython versions before 3.13, which causes a mypy + # error. + _, release, _, _ = platform.ios_ver() # type: ignore[attr-defined, unused-ignore] + version = cast("AppleVersion", tuple(map(int, release.split(".")[:2]))) + + if multiarch is None: + multiarch = sys.implementation._multiarch + multiarch = multiarch.replace("-", "_") + + ios_platform_template = "ios_{major}_{minor}_{multiarch}" + + # Consider any iOS major.minor version from the version requested, down to + # 12.0. 12.0 is the first iOS version that is known to have enough features + # to support CPython. Consider every possible minor release up to X.9. There + # highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra + # candidates that won't ever match doesn't really hurt, and it saves us from + # having to keep an explicit list of known iOS versions in the code. Return + # the results descending order of version number. + + # If the requested major version is less than 12, there won't be any matches. + if version[0] < 12: + return + + # Consider the actual X.Y version that was requested. + yield ios_platform_template.format( + major=version[0], minor=version[1], multiarch=multiarch + ) + + # Consider every minor version from X.0 to the minor version prior to the + # version requested by the platform. + for minor in range(version[1] - 1, -1, -1): + yield ios_platform_template.format( + major=version[0], minor=minor, multiarch=multiarch + ) + + for major in range(version[0] - 1, 11, -1): + for minor in range(9, -1, -1): + yield ios_platform_template.format( + major=major, minor=minor, multiarch=multiarch + ) + + +def android_platforms( + api_level: int | None = None, abi: str | None = None +) -> Iterator[str]: + """ + Yields the :attr:`~Tag.platform` tags for Android. If this function is invoked on + non-Android platforms, the ``api_level`` and ``abi`` arguments are required. + + :param int api_level: The maximum `API level + `__ to return. Defaults + to the current system's version, as returned by ``platform.android_ver``. + :param str abi: The `Android ABI `__, + e.g. ``arm64_v8a``. Defaults to the current system's ABI , as returned by + ``sysconfig.get_platform``. Hyphens and periods will be replaced with + underscores. + """ + if platform.system() != "Android" and (api_level is None or abi is None): + raise TypeError( + "on non-Android platforms, the api_level and abi arguments are required" + ) + + if api_level is None: + # Python 3.13 was the first version to return platform.system() == "Android", + # and also the first version to define platform.android_ver(). + api_level = platform.android_ver().api_level # type: ignore[attr-defined] + + if abi is None: + abi = sysconfig.get_platform().split("-")[-1] + abi = _normalize_string(abi) + + # 16 is the minimum API level known to have enough features to support CPython + # without major patching. Yield every API level from the maximum down to the + # minimum, inclusive. + min_api_level = 16 + for ver in range(api_level, min_api_level - 1, -1): + yield f"android_{ver}_{abi}" + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) + if not linux.startswith("linux_"): + # we should never be here, just yield the sysconfig one and return + yield linux + return + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv8l" + _, arch = linux.split("_", 1) + archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) + yield from _manylinux.platform_tags(archs) + yield from _musllinux.platform_tags(archs) + for arch in archs: + yield f"linux_{arch}" + + +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "iOS": + return ios_platforms() + elif platform.system() == "Android": + return android_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name() -> str: + """ + Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. + """ + name = sys.implementation.name + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: + """ + Returns the version of the running interpreter. + """ + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + + interp_name = interpreter_name() + if interp_name == "cp": + yield from cpython_tags(warn=warn) + else: + yield from generic_tags() + + if interp_name == "pp": + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) + else: + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py new file mode 100644 index 0000000..2345095 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/utils.py @@ -0,0 +1,163 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import functools +import re +from typing import NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version, _TrimmedRelease + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidName(ValueError): + """ + An invalid distribution name; users should refer to the packaging user guide. + """ + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +# Core metadata spec for `Name` +_validate_regex = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE +) +_canonicalize_regex = re.compile(r"[-_.]+") +_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: + if validate and not _validate_regex.match(name): + raise InvalidName(f"name is invalid: {name!r}") + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast(NormalizedName, value) + + +def is_normalized_name(name: str) -> bool: + return _normalized_regex.match(name) is not None + + +@functools.singledispatch +def canonicalize_version( + version: Version | str, *, strip_trailing_zero: bool = True +) -> str: + """ + Return a canonical form of a version as a string. + + >>> canonicalize_version('1.0.1') + '1.0.1' + + Per PEP 625, versions may have multiple canonical forms, differing + only by trailing zeros. + + >>> canonicalize_version('1.0.0') + '1' + >>> canonicalize_version('1.0.0', strip_trailing_zero=False) + '1.0.0' + + Invalid versions are returned unaltered. + + >>> canonicalize_version('foo bar baz') + 'foo bar baz' + """ + return str(_TrimmedRelease(str(version)) if strip_trailing_zero else version) + + +@canonicalize_version.register +def _(version: str, *, strip_trailing_zero: bool = True) -> str: + try: + parsed = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + return canonicalize_version(parsed, strip_trailing_zero=strip_trailing_zero) + + +def parse_wheel_filename( + filename: str, +) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename!r}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename!r}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name. + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename!r}") + name = canonicalize_name(name_part) + + try: + version = Version(parts[1]) + except InvalidVersion as e: + raise InvalidWheelFilename( + f"Invalid wheel filename (invalid version): {filename!r}" + ) from e + + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in {filename!r}" + ) + build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename!r}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}") + + name = canonicalize_name(name_part) + + try: + version = Version(version_part) + except InvalidVersion as e: + raise InvalidSdistFilename( + f"Invalid sdist filename (invalid version): {filename!r}" + ) from e + + return (name, version) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py new file mode 100644 index 0000000..21f44ca --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/packaging/version.py @@ -0,0 +1,582 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from pip._vendor.packaging.version import parse, Version +""" + +from __future__ import annotations + +import itertools +import re +from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"] + +LocalType = Tuple[Union[int, str], ...] + +CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] +CmpLocalType = Union[ + NegativeInfinityType, + Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + CmpPrePostDevType, + CmpPrePostDevType, + CmpPrePostDevType, + CmpLocalType, +] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] + + +class _Version(NamedTuple): + epoch: int + release: tuple[int, ...] + dev: tuple[str, int] | None + pre: tuple[str, int] | None + post: tuple[str, int] | None + local: LocalType | None + + +def parse(version: str) -> Version: + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. + """ + return Version(version) + + +class InvalidVersion(ValueError): + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' + """ + + +class _BaseVersion: + _key: tuple[Any, ...] + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +_VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                              # pre-release
    +            [-_\.]?
    +            (?Palpha|a|beta|b|preview|pre|c|rc)
    +            [-_\.]?
    +            (?P[0-9]+)?
    +        )?
    +        (?P                                         # post release
    +            (?:-(?P[0-9]+))
    +            |
    +            (?:
    +                [-_\.]?
    +                (?Ppost|rev|r)
    +                [-_\.]?
    +                (?P[0-9]+)?
    +            )
    +        )?
    +        (?P                                          # dev release
    +            [-_\.]?
    +            (?Pdev)
    +            [-_\.]?
    +            (?P[0-9]+)?
    +        )?
    +    )
    +    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
    +"""
    +
    +VERSION_PATTERN = _VERSION_PATTERN
    +"""
    +A string containing the regular expression used to match a valid version.
    +
    +The pattern is not anchored at either end, and is intended for embedding in larger
    +expressions (for example, matching a version number as part of a file name). The
    +regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
    +flags set.
    +
    +:meta hide-value:
    +"""
    +
    +
    +class Version(_BaseVersion):
    +    """This class abstracts handling of a project's versions.
    +
    +    A :class:`Version` instance is comparison aware and can be compared and
    +    sorted using the standard Python interfaces.
    +
    +    >>> v1 = Version("1.0a5")
    +    >>> v2 = Version("1.0")
    +    >>> v1
    +    
    +    >>> v2
    +    
    +    >>> v1 < v2
    +    True
    +    >>> v1 == v2
    +    False
    +    >>> v1 > v2
    +    False
    +    >>> v1 >= v2
    +    False
    +    >>> v1 <= v2
    +    True
    +    """
    +
    +    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
    +    _key: CmpKey
    +
    +    def __init__(self, version: str) -> None:
    +        """Initialize a Version object.
    +
    +        :param version:
    +            The string representation of a version which will be parsed and normalized
    +            before use.
    +        :raises InvalidVersion:
    +            If the ``version`` does not conform to PEP 440 in any way then this
    +            exception will be raised.
    +        """
    +
    +        # Validate the version and parse it into pieces
    +        match = self._regex.search(version)
    +        if not match:
    +            raise InvalidVersion(f"Invalid version: {version!r}")
    +
    +        # Store the parsed out pieces of the version
    +        self._version = _Version(
    +            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
    +            release=tuple(int(i) for i in match.group("release").split(".")),
    +            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
    +            post=_parse_letter_version(
    +                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
    +            ),
    +            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
    +            local=_parse_local_version(match.group("local")),
    +        )
    +
    +        # Generate a key which will be used for sorting
    +        self._key = _cmpkey(
    +            self._version.epoch,
    +            self._version.release,
    +            self._version.pre,
    +            self._version.post,
    +            self._version.dev,
    +            self._version.local,
    +        )
    +
    +    def __repr__(self) -> str:
    +        """A representation of the Version that shows all internal state.
    +
    +        >>> Version('1.0.0')
    +        
    +        """
    +        return f""
    +
    +    def __str__(self) -> str:
    +        """A string representation of the version that can be round-tripped.
    +
    +        >>> str(Version("1.0a5"))
    +        '1.0a5'
    +        """
    +        parts = []
    +
    +        # Epoch
    +        if self.epoch != 0:
    +            parts.append(f"{self.epoch}!")
    +
    +        # Release segment
    +        parts.append(".".join(str(x) for x in self.release))
    +
    +        # Pre-release
    +        if self.pre is not None:
    +            parts.append("".join(str(x) for x in self.pre))
    +
    +        # Post-release
    +        if self.post is not None:
    +            parts.append(f".post{self.post}")
    +
    +        # Development release
    +        if self.dev is not None:
    +            parts.append(f".dev{self.dev}")
    +
    +        # Local version segment
    +        if self.local is not None:
    +            parts.append(f"+{self.local}")
    +
    +        return "".join(parts)
    +
    +    @property
    +    def epoch(self) -> int:
    +        """The epoch of the version.
    +
    +        >>> Version("2.0.0").epoch
    +        0
    +        >>> Version("1!2.0.0").epoch
    +        1
    +        """
    +        return self._version.epoch
    +
    +    @property
    +    def release(self) -> tuple[int, ...]:
    +        """The components of the "release" segment of the version.
    +
    +        >>> Version("1.2.3").release
    +        (1, 2, 3)
    +        >>> Version("2.0.0").release
    +        (2, 0, 0)
    +        >>> Version("1!2.0.0.post0").release
    +        (2, 0, 0)
    +
    +        Includes trailing zeroes but not the epoch or any pre-release / development /
    +        post-release suffixes.
    +        """
    +        return self._version.release
    +
    +    @property
    +    def pre(self) -> tuple[str, int] | None:
    +        """The pre-release segment of the version.
    +
    +        >>> print(Version("1.2.3").pre)
    +        None
    +        >>> Version("1.2.3a1").pre
    +        ('a', 1)
    +        >>> Version("1.2.3b1").pre
    +        ('b', 1)
    +        >>> Version("1.2.3rc1").pre
    +        ('rc', 1)
    +        """
    +        return self._version.pre
    +
    +    @property
    +    def post(self) -> int | None:
    +        """The post-release number of the version.
    +
    +        >>> print(Version("1.2.3").post)
    +        None
    +        >>> Version("1.2.3.post1").post
    +        1
    +        """
    +        return self._version.post[1] if self._version.post else None
    +
    +    @property
    +    def dev(self) -> int | None:
    +        """The development number of the version.
    +
    +        >>> print(Version("1.2.3").dev)
    +        None
    +        >>> Version("1.2.3.dev1").dev
    +        1
    +        """
    +        return self._version.dev[1] if self._version.dev else None
    +
    +    @property
    +    def local(self) -> str | None:
    +        """The local version segment of the version.
    +
    +        >>> print(Version("1.2.3").local)
    +        None
    +        >>> Version("1.2.3+abc").local
    +        'abc'
    +        """
    +        if self._version.local:
    +            return ".".join(str(x) for x in self._version.local)
    +        else:
    +            return None
    +
    +    @property
    +    def public(self) -> str:
    +        """The public portion of the version.
    +
    +        >>> Version("1.2.3").public
    +        '1.2.3'
    +        >>> Version("1.2.3+abc").public
    +        '1.2.3'
    +        >>> Version("1!1.2.3dev1+abc").public
    +        '1!1.2.3.dev1'
    +        """
    +        return str(self).split("+", 1)[0]
    +
    +    @property
    +    def base_version(self) -> str:
    +        """The "base version" of the version.
    +
    +        >>> Version("1.2.3").base_version
    +        '1.2.3'
    +        >>> Version("1.2.3+abc").base_version
    +        '1.2.3'
    +        >>> Version("1!1.2.3dev1+abc").base_version
    +        '1!1.2.3'
    +
    +        The "base version" is the public version of the project without any pre or post
    +        release markers.
    +        """
    +        parts = []
    +
    +        # Epoch
    +        if self.epoch != 0:
    +            parts.append(f"{self.epoch}!")
    +
    +        # Release segment
    +        parts.append(".".join(str(x) for x in self.release))
    +
    +        return "".join(parts)
    +
    +    @property
    +    def is_prerelease(self) -> bool:
    +        """Whether this version is a pre-release.
    +
    +        >>> Version("1.2.3").is_prerelease
    +        False
    +        >>> Version("1.2.3a1").is_prerelease
    +        True
    +        >>> Version("1.2.3b1").is_prerelease
    +        True
    +        >>> Version("1.2.3rc1").is_prerelease
    +        True
    +        >>> Version("1.2.3dev1").is_prerelease
    +        True
    +        """
    +        return self.dev is not None or self.pre is not None
    +
    +    @property
    +    def is_postrelease(self) -> bool:
    +        """Whether this version is a post-release.
    +
    +        >>> Version("1.2.3").is_postrelease
    +        False
    +        >>> Version("1.2.3.post1").is_postrelease
    +        True
    +        """
    +        return self.post is not None
    +
    +    @property
    +    def is_devrelease(self) -> bool:
    +        """Whether this version is a development release.
    +
    +        >>> Version("1.2.3").is_devrelease
    +        False
    +        >>> Version("1.2.3.dev1").is_devrelease
    +        True
    +        """
    +        return self.dev is not None
    +
    +    @property
    +    def major(self) -> int:
    +        """The first item of :attr:`release` or ``0`` if unavailable.
    +
    +        >>> Version("1.2.3").major
    +        1
    +        """
    +        return self.release[0] if len(self.release) >= 1 else 0
    +
    +    @property
    +    def minor(self) -> int:
    +        """The second item of :attr:`release` or ``0`` if unavailable.
    +
    +        >>> Version("1.2.3").minor
    +        2
    +        >>> Version("1").minor
    +        0
    +        """
    +        return self.release[1] if len(self.release) >= 2 else 0
    +
    +    @property
    +    def micro(self) -> int:
    +        """The third item of :attr:`release` or ``0`` if unavailable.
    +
    +        >>> Version("1.2.3").micro
    +        3
    +        >>> Version("1").micro
    +        0
    +        """
    +        return self.release[2] if len(self.release) >= 3 else 0
    +
    +
    +class _TrimmedRelease(Version):
    +    @property
    +    def release(self) -> tuple[int, ...]:
    +        """
    +        Release segment without any trailing zeros.
    +
    +        >>> _TrimmedRelease('1.0.0').release
    +        (1,)
    +        >>> _TrimmedRelease('0.0').release
    +        (0,)
    +        """
    +        rel = super().release
    +        nonzeros = (index for index, val in enumerate(rel) if val)
    +        last_nonzero = max(nonzeros, default=0)
    +        return rel[: last_nonzero + 1]
    +
    +
    +def _parse_letter_version(
    +    letter: str | None, number: str | bytes | SupportsInt | None
    +) -> tuple[str, int] | None:
    +    if letter:
    +        # We consider there to be an implicit 0 in a pre-release if there is
    +        # not a numeral associated with it.
    +        if number is None:
    +            number = 0
    +
    +        # We normalize any letters to their lower case form
    +        letter = letter.lower()
    +
    +        # We consider some words to be alternate spellings of other words and
    +        # in those cases we want to normalize the spellings to our preferred
    +        # spelling.
    +        if letter == "alpha":
    +            letter = "a"
    +        elif letter == "beta":
    +            letter = "b"
    +        elif letter in ["c", "pre", "preview"]:
    +            letter = "rc"
    +        elif letter in ["rev", "r"]:
    +            letter = "post"
    +
    +        return letter, int(number)
    +
    +    assert not letter
    +    if number:
    +        # We assume if we are given a number, but we are not given a letter
    +        # then this is using the implicit post release syntax (e.g. 1.0-1)
    +        letter = "post"
    +
    +        return letter, int(number)
    +
    +    return None
    +
    +
    +_local_version_separators = re.compile(r"[\._-]")
    +
    +
    +def _parse_local_version(local: str | None) -> LocalType | None:
    +    """
    +    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    +    """
    +    if local is not None:
    +        return tuple(
    +            part.lower() if not part.isdigit() else int(part)
    +            for part in _local_version_separators.split(local)
    +        )
    +    return None
    +
    +
    +def _cmpkey(
    +    epoch: int,
    +    release: tuple[int, ...],
    +    pre: tuple[str, int] | None,
    +    post: tuple[str, int] | None,
    +    dev: tuple[str, int] | None,
    +    local: LocalType | None,
    +) -> CmpKey:
    +    # When we compare a release version, we want to compare it with all of the
    +    # trailing zeros removed. So we'll use a reverse the list, drop all the now
    +    # leading zeros until we come to something non zero, then take the rest
    +    # re-reverse it back into the correct order and make it a tuple and use
    +    # that for our sorting key.
    +    _release = tuple(
    +        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
    +    )
    +
    +    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
    +    # We'll do this by abusing the pre segment, but we _only_ want to do this
    +    # if there is not a pre or a post segment. If we have one of those then
    +    # the normal sorting rules will handle this case correctly.
    +    if pre is None and post is None and dev is not None:
    +        _pre: CmpPrePostDevType = NegativeInfinity
    +    # Versions without a pre-release (except as noted above) should sort after
    +    # those with one.
    +    elif pre is None:
    +        _pre = Infinity
    +    else:
    +        _pre = pre
    +
    +    # Versions without a post segment should sort before those with one.
    +    if post is None:
    +        _post: CmpPrePostDevType = NegativeInfinity
    +
    +    else:
    +        _post = post
    +
    +    # Versions without a development segment should sort after those with one.
    +    if dev is None:
    +        _dev: CmpPrePostDevType = Infinity
    +
    +    else:
    +        _dev = dev
    +
    +    if local is None:
    +        # Versions without a local segment should sort before those with one.
    +        _local: CmpLocalType = NegativeInfinity
    +    else:
    +        # Versions with a local segment need that segment parsed to implement
    +        # the sorting rules in PEP440.
    +        # - Alpha numeric segments sort before numeric segments
    +        # - Alpha numeric segments sort lexicographically
    +        # - Numeric segments sort numerically
    +        # - Shorter versions sort before longer versions when the prefixes
    +        #   match exactly
    +        _local = tuple(
    +            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
    +        )
    +
    +    return epoch, _release, _pre, _post, _dev, _local
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/LICENSE
    new file mode 100644
    index 0000000..1bb5a44
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/LICENSE
    @@ -0,0 +1,17 @@
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to
    +deal in the Software without restriction, including without limitation the
    +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
    +sell copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
    +IN THE SOFTWARE.
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py
    new file mode 100644
    index 0000000..72f2b03
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py
    @@ -0,0 +1,3676 @@
    +# TODO: Add Generic type annotations to initialized collections.
    +# For now we'd simply use implicit Any/Unknown which would add redundant annotations
    +# mypy: disable-error-code="var-annotated"
    +"""
    +Package resource API
    +--------------------
    +
    +A resource is a logical file contained within a package, or a logical
    +subdirectory thereof.  The package resource API expects resource names
    +to have their path parts separated with ``/``, *not* whatever the local
    +path separator is.  Do not use os.path operations to manipulate resource
    +names being passed into the API.
    +
    +The package resource API is designed to work with normal filesystem packages,
    +.egg files, and unpacked .egg files.  It can also work in a limited way with
    +.zip files and with custom PEP 302 loaders that support the ``get_data()``
    +method.
    +
    +This module is deprecated. Users are directed to :mod:`importlib.resources`,
    +:mod:`importlib.metadata` and :pypi:`packaging` instead.
    +"""
    +
    +from __future__ import annotations
    +
    +import sys
    +
    +if sys.version_info < (3, 8):  # noqa: UP036 # Check for unsupported versions
    +    raise RuntimeError("Python 3.8 or later is required")
    +
    +import os
    +import io
    +import time
    +import re
    +import types
    +from typing import (
    +    Any,
    +    Literal,
    +    Dict,
    +    Iterator,
    +    Mapping,
    +    MutableSequence,
    +    NamedTuple,
    +    NoReturn,
    +    Tuple,
    +    Union,
    +    TYPE_CHECKING,
    +    Protocol,
    +    Callable,
    +    Iterable,
    +    TypeVar,
    +    overload,
    +)
    +import zipfile
    +import zipimport
    +import warnings
    +import stat
    +import functools
    +import pkgutil
    +import operator
    +import platform
    +import collections
    +import plistlib
    +import email.parser
    +import errno
    +import tempfile
    +import textwrap
    +import inspect
    +import ntpath
    +import posixpath
    +import importlib
    +import importlib.abc
    +import importlib.machinery
    +from pkgutil import get_importer
    +
    +import _imp
    +
    +# capture these to bypass sandboxing
    +from os import utime
    +from os import open as os_open
    +from os.path import isdir, split
    +
    +try:
    +    from os import mkdir, rename, unlink
    +
    +    WRITE_SUPPORT = True
    +except ImportError:
    +    # no write support, probably under GAE
    +    WRITE_SUPPORT = False
    +
    +from pip._internal.utils._jaraco_text import (
    +    yield_lines,
    +    drop_comment,
    +    join_continuation,
    +)
    +from pip._vendor.packaging import markers as _packaging_markers
    +from pip._vendor.packaging import requirements as _packaging_requirements
    +from pip._vendor.packaging import utils as _packaging_utils
    +from pip._vendor.packaging import version as _packaging_version
    +from pip._vendor.platformdirs import user_cache_dir as _user_cache_dir
    +
    +if TYPE_CHECKING:
    +    from _typeshed import BytesPath, StrPath, StrOrBytesPath
    +    from typing_extensions import Self
    +
    +
    +# Patch: Remove deprecation warning from vendored pkg_resources.
    +# Setting PYTHONWARNINGS=error to verify builds produce no warnings
    +# causes immediate exceptions.
    +# See https://github.com/pypa/pip/issues/12243
    +
    +
    +_T = TypeVar("_T")
    +_DistributionT = TypeVar("_DistributionT", bound="Distribution")
    +# Type aliases
    +_NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
    +_InstallerTypeT = Callable[["Requirement"], "_DistributionT"]
    +_InstallerType = Callable[["Requirement"], Union["Distribution", None]]
    +_PkgReqType = Union[str, "Requirement"]
    +_EPDistType = Union["Distribution", _PkgReqType]
    +_MetadataType = Union["IResourceProvider", None]
    +_ResolvedEntryPoint = Any  # Can be any attribute in the module
    +_ResourceStream = Any  # TODO / Incomplete: A readable file-like object
    +# Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
    +_ModuleLike = Union[object, types.ModuleType]
    +# Any: Should be _ModuleLike but we end up with issues where _ModuleLike doesn't have _ZipLoaderModule's __loader__
    +_ProviderFactoryType = Callable[[Any], "IResourceProvider"]
    +_DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
    +_NSHandlerType = Callable[[_T, str, str, types.ModuleType], Union[str, None]]
    +_AdapterT = TypeVar(
    +    "_AdapterT", _DistFinderType[Any], _ProviderFactoryType, _NSHandlerType[Any]
    +)
    +
    +
    +# Use _typeshed.importlib.LoaderProtocol once available https://github.com/python/typeshed/pull/11890
    +class _LoaderProtocol(Protocol):
    +    def load_module(self, fullname: str, /) -> types.ModuleType: ...
    +
    +
    +class _ZipLoaderModule(Protocol):
    +    __loader__: zipimport.zipimporter
    +
    +
    +_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
    +
    +
    +class PEP440Warning(RuntimeWarning):
    +    """
    +    Used when there is an issue with a version or specifier not complying with
    +    PEP 440.
    +    """
    +
    +
    +parse_version = _packaging_version.Version
    +
    +
    +_state_vars: dict[str, str] = {}
    +
    +
    +def _declare_state(vartype: str, varname: str, initial_value: _T) -> _T:
    +    _state_vars[varname] = vartype
    +    return initial_value
    +
    +
    +def __getstate__() -> dict[str, Any]:
    +    state = {}
    +    g = globals()
    +    for k, v in _state_vars.items():
    +        state[k] = g['_sget_' + v](g[k])
    +    return state
    +
    +
    +def __setstate__(state: dict[str, Any]) -> dict[str, Any]:
    +    g = globals()
    +    for k, v in state.items():
    +        g['_sset_' + _state_vars[k]](k, g[k], v)
    +    return state
    +
    +
    +def _sget_dict(val):
    +    return val.copy()
    +
    +
    +def _sset_dict(key, ob, state):
    +    ob.clear()
    +    ob.update(state)
    +
    +
    +def _sget_object(val):
    +    return val.__getstate__()
    +
    +
    +def _sset_object(key, ob, state):
    +    ob.__setstate__(state)
    +
    +
    +_sget_none = _sset_none = lambda *args: None
    +
    +
    +def get_supported_platform():
    +    """Return this platform's maximum compatible version.
    +
    +    distutils.util.get_platform() normally reports the minimum version
    +    of macOS that would be required to *use* extensions produced by
    +    distutils.  But what we want when checking compatibility is to know the
    +    version of macOS that we are *running*.  To allow usage of packages that
    +    explicitly require a newer version of macOS, we must also know the
    +    current version of the OS.
    +
    +    If this condition occurs for any other platform with a version in its
    +    platform strings, this function should be extended accordingly.
    +    """
    +    plat = get_build_platform()
    +    m = macosVersionString.match(plat)
    +    if m is not None and sys.platform == "darwin":
    +        try:
    +            plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
    +        except ValueError:
    +            # not macOS
    +            pass
    +    return plat
    +
    +
    +__all__ = [
    +    # Basic resource access and distribution/entry point discovery
    +    'require',
    +    'run_script',
    +    'get_provider',
    +    'get_distribution',
    +    'load_entry_point',
    +    'get_entry_map',
    +    'get_entry_info',
    +    'iter_entry_points',
    +    'resource_string',
    +    'resource_stream',
    +    'resource_filename',
    +    'resource_listdir',
    +    'resource_exists',
    +    'resource_isdir',
    +    # Environmental control
    +    'declare_namespace',
    +    'working_set',
    +    'add_activation_listener',
    +    'find_distributions',
    +    'set_extraction_path',
    +    'cleanup_resources',
    +    'get_default_cache',
    +    # Primary implementation classes
    +    'Environment',
    +    'WorkingSet',
    +    'ResourceManager',
    +    'Distribution',
    +    'Requirement',
    +    'EntryPoint',
    +    # Exceptions
    +    'ResolutionError',
    +    'VersionConflict',
    +    'DistributionNotFound',
    +    'UnknownExtra',
    +    'ExtractionError',
    +    # Warnings
    +    'PEP440Warning',
    +    # Parsing functions and string utilities
    +    'parse_requirements',
    +    'parse_version',
    +    'safe_name',
    +    'safe_version',
    +    'get_platform',
    +    'compatible_platforms',
    +    'yield_lines',
    +    'split_sections',
    +    'safe_extra',
    +    'to_filename',
    +    'invalid_marker',
    +    'evaluate_marker',
    +    # filesystem utilities
    +    'ensure_directory',
    +    'normalize_path',
    +    # Distribution "precedence" constants
    +    'EGG_DIST',
    +    'BINARY_DIST',
    +    'SOURCE_DIST',
    +    'CHECKOUT_DIST',
    +    'DEVELOP_DIST',
    +    # "Provider" interfaces, implementations, and registration/lookup APIs
    +    'IMetadataProvider',
    +    'IResourceProvider',
    +    'FileMetadata',
    +    'PathMetadata',
    +    'EggMetadata',
    +    'EmptyProvider',
    +    'empty_provider',
    +    'NullProvider',
    +    'EggProvider',
    +    'DefaultProvider',
    +    'ZipProvider',
    +    'register_finder',
    +    'register_namespace_handler',
    +    'register_loader_type',
    +    'fixup_namespace_packages',
    +    'get_importer',
    +    # Warnings
    +    'PkgResourcesDeprecationWarning',
    +    # Deprecated/backward compatibility only
    +    'run_main',
    +    'AvailableDistributions',
    +]
    +
    +
    +class ResolutionError(Exception):
    +    """Abstract base for dependency resolution errors"""
    +
    +    def __repr__(self):
    +        return self.__class__.__name__ + repr(self.args)
    +
    +
    +class VersionConflict(ResolutionError):
    +    """
    +    An already-installed version conflicts with the requested version.
    +
    +    Should be initialized with the installed Distribution and the requested
    +    Requirement.
    +    """
    +
    +    _template = "{self.dist} is installed but {self.req} is required"
    +
    +    @property
    +    def dist(self) -> Distribution:
    +        return self.args[0]
    +
    +    @property
    +    def req(self) -> Requirement:
    +        return self.args[1]
    +
    +    def report(self):
    +        return self._template.format(**locals())
    +
    +    def with_context(self, required_by: set[Distribution | str]):
    +        """
    +        If required_by is non-empty, return a version of self that is a
    +        ContextualVersionConflict.
    +        """
    +        if not required_by:
    +            return self
    +        args = self.args + (required_by,)
    +        return ContextualVersionConflict(*args)
    +
    +
    +class ContextualVersionConflict(VersionConflict):
    +    """
    +    A VersionConflict that accepts a third parameter, the set of the
    +    requirements that required the installed Distribution.
    +    """
    +
    +    _template = VersionConflict._template + ' by {self.required_by}'
    +
    +    @property
    +    def required_by(self) -> set[str]:
    +        return self.args[2]
    +
    +
    +class DistributionNotFound(ResolutionError):
    +    """A requested distribution was not found"""
    +
    +    _template = (
    +        "The '{self.req}' distribution was not found "
    +        "and is required by {self.requirers_str}"
    +    )
    +
    +    @property
    +    def req(self) -> Requirement:
    +        return self.args[0]
    +
    +    @property
    +    def requirers(self) -> set[str] | None:
    +        return self.args[1]
    +
    +    @property
    +    def requirers_str(self):
    +        if not self.requirers:
    +            return 'the application'
    +        return ', '.join(self.requirers)
    +
    +    def report(self):
    +        return self._template.format(**locals())
    +
    +    def __str__(self):
    +        return self.report()
    +
    +
    +class UnknownExtra(ResolutionError):
    +    """Distribution doesn't have an "extra feature" of the given name"""
    +
    +
    +_provider_factories: dict[type[_ModuleLike], _ProviderFactoryType] = {}
    +
    +PY_MAJOR = '{}.{}'.format(*sys.version_info)
    +EGG_DIST = 3
    +BINARY_DIST = 2
    +SOURCE_DIST = 1
    +CHECKOUT_DIST = 0
    +DEVELOP_DIST = -1
    +
    +
    +def register_loader_type(
    +    loader_type: type[_ModuleLike], provider_factory: _ProviderFactoryType
    +):
    +    """Register `provider_factory` to make providers for `loader_type`
    +
    +    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
    +    and `provider_factory` is a function that, passed a *module* object,
    +    returns an ``IResourceProvider`` for that module.
    +    """
    +    _provider_factories[loader_type] = provider_factory
    +
    +
    +@overload
    +def get_provider(moduleOrReq: str) -> IResourceProvider: ...
    +@overload
    +def get_provider(moduleOrReq: Requirement) -> Distribution: ...
    +def get_provider(moduleOrReq: str | Requirement) -> IResourceProvider | Distribution:
    +    """Return an IResourceProvider for the named module or requirement"""
    +    if isinstance(moduleOrReq, Requirement):
    +        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
    +    try:
    +        module = sys.modules[moduleOrReq]
    +    except KeyError:
    +        __import__(moduleOrReq)
    +        module = sys.modules[moduleOrReq]
    +    loader = getattr(module, '__loader__', None)
    +    return _find_adapter(_provider_factories, loader)(module)
    +
    +
    +@functools.lru_cache(maxsize=None)
    +def _macos_vers():
    +    version = platform.mac_ver()[0]
    +    # fallback for MacPorts
    +    if version == '':
    +        plist = '/System/Library/CoreServices/SystemVersion.plist'
    +        if os.path.exists(plist):
    +            with open(plist, 'rb') as fh:
    +                plist_content = plistlib.load(fh)
    +            if 'ProductVersion' in plist_content:
    +                version = plist_content['ProductVersion']
    +    return version.split('.')
    +
    +
    +def _macos_arch(machine):
    +    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
    +
    +
    +def get_build_platform():
    +    """Return this platform's string for platform-specific distributions
    +
    +    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
    +    needs some hacks for Linux and macOS.
    +    """
    +    from sysconfig import get_platform
    +
    +    plat = get_platform()
    +    if sys.platform == "darwin" and not plat.startswith('macosx-'):
    +        try:
    +            version = _macos_vers()
    +            machine = os.uname()[4].replace(" ", "_")
    +            return "macosx-%d.%d-%s" % (
    +                int(version[0]),
    +                int(version[1]),
    +                _macos_arch(machine),
    +            )
    +        except ValueError:
    +            # if someone is running a non-Mac darwin system, this will fall
    +            # through to the default implementation
    +            pass
    +    return plat
    +
    +
    +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
    +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
    +# XXX backward compat
    +get_platform = get_build_platform
    +
    +
    +def compatible_platforms(provided: str | None, required: str | None):
    +    """Can code for the `provided` platform run on the `required` platform?
    +
    +    Returns true if either platform is ``None``, or the platforms are equal.
    +
    +    XXX Needs compatibility checks for Linux and other unixy OSes.
    +    """
    +    if provided is None or required is None or provided == required:
    +        # easy case
    +        return True
    +
    +    # macOS special cases
    +    reqMac = macosVersionString.match(required)
    +    if reqMac:
    +        provMac = macosVersionString.match(provided)
    +
    +        # is this a Mac package?
    +        if not provMac:
    +            # this is backwards compatibility for packages built before
    +            # setuptools 0.6. All packages built after this point will
    +            # use the new macOS designation.
    +            provDarwin = darwinVersionString.match(provided)
    +            if provDarwin:
    +                dversion = int(provDarwin.group(1))
    +                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
    +                if (
    +                    dversion == 7
    +                    and macosversion >= "10.3"
    +                    or dversion == 8
    +                    and macosversion >= "10.4"
    +                ):
    +                    return True
    +            # egg isn't macOS or legacy darwin
    +            return False
    +
    +        # are they the same major version and machine type?
    +        if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3):
    +            return False
    +
    +        # is the required OS major update >= the provided one?
    +        if int(provMac.group(2)) > int(reqMac.group(2)):
    +            return False
    +
    +        return True
    +
    +    # XXX Linux and other platforms' special cases should go here
    +    return False
    +
    +
    +@overload
    +def get_distribution(dist: _DistributionT) -> _DistributionT: ...
    +@overload
    +def get_distribution(dist: _PkgReqType) -> Distribution: ...
    +def get_distribution(dist: Distribution | _PkgReqType) -> Distribution:
    +    """Return a current distribution object for a Requirement or string"""
    +    if isinstance(dist, str):
    +        dist = Requirement.parse(dist)
    +    if isinstance(dist, Requirement):
    +        # Bad type narrowing, dist has to be a Requirement here, so get_provider has to return Distribution
    +        dist = get_provider(dist)  # type: ignore[assignment]
    +    if not isinstance(dist, Distribution):
    +        raise TypeError("Expected str, Requirement, or Distribution", dist)
    +    return dist
    +
    +
    +def load_entry_point(dist: _EPDistType, group: str, name: str) -> _ResolvedEntryPoint:
    +    """Return `name` entry point of `group` for `dist` or raise ImportError"""
    +    return get_distribution(dist).load_entry_point(group, name)
    +
    +
    +@overload
    +def get_entry_map(
    +    dist: _EPDistType, group: None = None
    +) -> dict[str, dict[str, EntryPoint]]: ...
    +@overload
    +def get_entry_map(dist: _EPDistType, group: str) -> dict[str, EntryPoint]: ...
    +def get_entry_map(dist: _EPDistType, group: str | None = None):
    +    """Return the entry point map for `group`, or the full entry map"""
    +    return get_distribution(dist).get_entry_map(group)
    +
    +
    +def get_entry_info(dist: _EPDistType, group: str, name: str):
    +    """Return the EntryPoint object for `group`+`name`, or ``None``"""
    +    return get_distribution(dist).get_entry_info(group, name)
    +
    +
    +class IMetadataProvider(Protocol):
    +    def has_metadata(self, name: str) -> bool:
    +        """Does the package's distribution contain the named metadata?"""
    +
    +    def get_metadata(self, name: str) -> str:
    +        """The named metadata resource as a string"""
    +
    +    def get_metadata_lines(self, name: str) -> Iterator[str]:
    +        """Yield named metadata resource as list of non-blank non-comment lines
    +
    +        Leading and trailing whitespace is stripped from each line, and lines
    +        with ``#`` as the first non-blank character are omitted."""
    +
    +    def metadata_isdir(self, name: str) -> bool:
    +        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
    +
    +    def metadata_listdir(self, name: str) -> list[str]:
    +        """List of metadata names in the directory (like ``os.listdir()``)"""
    +
    +    def run_script(self, script_name: str, namespace: dict[str, Any]) -> None:
    +        """Execute the named script in the supplied namespace dictionary"""
    +
    +
    +class IResourceProvider(IMetadataProvider, Protocol):
    +    """An object that provides access to package resources"""
    +
    +    def get_resource_filename(
    +        self, manager: ResourceManager, resource_name: str
    +    ) -> str:
    +        """Return a true filesystem path for `resource_name`
    +
    +        `manager` must be a ``ResourceManager``"""
    +
    +    def get_resource_stream(
    +        self, manager: ResourceManager, resource_name: str
    +    ) -> _ResourceStream:
    +        """Return a readable file-like object for `resource_name`
    +
    +        `manager` must be a ``ResourceManager``"""
    +
    +    def get_resource_string(
    +        self, manager: ResourceManager, resource_name: str
    +    ) -> bytes:
    +        """Return the contents of `resource_name` as :obj:`bytes`
    +
    +        `manager` must be a ``ResourceManager``"""
    +
    +    def has_resource(self, resource_name: str) -> bool:
    +        """Does the package contain the named resource?"""
    +
    +    def resource_isdir(self, resource_name: str) -> bool:
    +        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
    +
    +    def resource_listdir(self, resource_name: str) -> list[str]:
    +        """List of resource names in the directory (like ``os.listdir()``)"""
    +
    +
    +class WorkingSet:
    +    """A collection of active distributions on sys.path (or a similar list)"""
    +
    +    def __init__(self, entries: Iterable[str] | None = None):
    +        """Create working set from list of path entries (default=sys.path)"""
    +        self.entries: list[str] = []
    +        self.entry_keys = {}
    +        self.by_key = {}
    +        self.normalized_to_canonical_keys = {}
    +        self.callbacks = []
    +
    +        if entries is None:
    +            entries = sys.path
    +
    +        for entry in entries:
    +            self.add_entry(entry)
    +
    +    @classmethod
    +    def _build_master(cls):
    +        """
    +        Prepare the master working set.
    +        """
    +        ws = cls()
    +        try:
    +            from __main__ import __requires__
    +        except ImportError:
    +            # The main program does not list any requirements
    +            return ws
    +
    +        # ensure the requirements are met
    +        try:
    +            ws.require(__requires__)
    +        except VersionConflict:
    +            return cls._build_from_requirements(__requires__)
    +
    +        return ws
    +
    +    @classmethod
    +    def _build_from_requirements(cls, req_spec):
    +        """
    +        Build a working set from a requirement spec. Rewrites sys.path.
    +        """
    +        # try it without defaults already on sys.path
    +        # by starting with an empty path
    +        ws = cls([])
    +        reqs = parse_requirements(req_spec)
    +        dists = ws.resolve(reqs, Environment())
    +        for dist in dists:
    +            ws.add(dist)
    +
    +        # add any missing entries from sys.path
    +        for entry in sys.path:
    +            if entry not in ws.entries:
    +                ws.add_entry(entry)
    +
    +        # then copy back to sys.path
    +        sys.path[:] = ws.entries
    +        return ws
    +
    +    def add_entry(self, entry: str):
    +        """Add a path item to ``.entries``, finding any distributions on it
    +
    +        ``find_distributions(entry, True)`` is used to find distributions
    +        corresponding to the path entry, and they are added.  `entry` is
    +        always appended to ``.entries``, even if it is already present.
    +        (This is because ``sys.path`` can contain the same value more than
    +        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
    +        equal ``sys.path``.)
    +        """
    +        self.entry_keys.setdefault(entry, [])
    +        self.entries.append(entry)
    +        for dist in find_distributions(entry, True):
    +            self.add(dist, entry, False)
    +
    +    def __contains__(self, dist: Distribution) -> bool:
    +        """True if `dist` is the active distribution for its project"""
    +        return self.by_key.get(dist.key) == dist
    +
    +    def find(self, req: Requirement) -> Distribution | None:
    +        """Find a distribution matching requirement `req`
    +
    +        If there is an active distribution for the requested project, this
    +        returns it as long as it meets the version requirement specified by
    +        `req`.  But, if there is an active distribution for the project and it
    +        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
    +        If there is no active distribution for the requested project, ``None``
    +        is returned.
    +        """
    +        dist = self.by_key.get(req.key)
    +
    +        if dist is None:
    +            canonical_key = self.normalized_to_canonical_keys.get(req.key)
    +
    +            if canonical_key is not None:
    +                req.key = canonical_key
    +                dist = self.by_key.get(canonical_key)
    +
    +        if dist is not None and dist not in req:
    +            # XXX add more info
    +            raise VersionConflict(dist, req)
    +        return dist
    +
    +    def iter_entry_points(self, group: str, name: str | None = None):
    +        """Yield entry point objects from `group` matching `name`
    +
    +        If `name` is None, yields all entry points in `group` from all
    +        distributions in the working set, otherwise only ones matching
    +        both `group` and `name` are yielded (in distribution order).
    +        """
    +        return (
    +            entry
    +            for dist in self
    +            for entry in dist.get_entry_map(group).values()
    +            if name is None or name == entry.name
    +        )
    +
    +    def run_script(self, requires: str, script_name: str):
    +        """Locate distribution for `requires` and run `script_name` script"""
    +        ns = sys._getframe(1).f_globals
    +        name = ns['__name__']
    +        ns.clear()
    +        ns['__name__'] = name
    +        self.require(requires)[0].run_script(script_name, ns)
    +
    +    def __iter__(self) -> Iterator[Distribution]:
    +        """Yield distributions for non-duplicate projects in the working set
    +
    +        The yield order is the order in which the items' path entries were
    +        added to the working set.
    +        """
    +        seen = set()
    +        for item in self.entries:
    +            if item not in self.entry_keys:
    +                # workaround a cache issue
    +                continue
    +
    +            for key in self.entry_keys[item]:
    +                if key not in seen:
    +                    seen.add(key)
    +                    yield self.by_key[key]
    +
    +    def add(
    +        self,
    +        dist: Distribution,
    +        entry: str | None = None,
    +        insert: bool = True,
    +        replace: bool = False,
    +    ):
    +        """Add `dist` to working set, associated with `entry`
    +
    +        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
    +        On exit from this routine, `entry` is added to the end of the working
    +        set's ``.entries`` (if it wasn't already present).
    +
    +        `dist` is only added to the working set if it's for a project that
    +        doesn't already have a distribution in the set, unless `replace=True`.
    +        If it's added, any callbacks registered with the ``subscribe()`` method
    +        will be called.
    +        """
    +        if insert:
    +            dist.insert_on(self.entries, entry, replace=replace)
    +
    +        if entry is None:
    +            entry = dist.location
    +        keys = self.entry_keys.setdefault(entry, [])
    +        keys2 = self.entry_keys.setdefault(dist.location, [])
    +        if not replace and dist.key in self.by_key:
    +            # ignore hidden distros
    +            return
    +
    +        self.by_key[dist.key] = dist
    +        normalized_name = _packaging_utils.canonicalize_name(dist.key)
    +        self.normalized_to_canonical_keys[normalized_name] = dist.key
    +        if dist.key not in keys:
    +            keys.append(dist.key)
    +        if dist.key not in keys2:
    +            keys2.append(dist.key)
    +        self._added_new(dist)
    +
    +    @overload
    +    def resolve(
    +        self,
    +        requirements: Iterable[Requirement],
    +        env: Environment | None,
    +        installer: _InstallerTypeT[_DistributionT],
    +        replace_conflicting: bool = False,
    +        extras: tuple[str, ...] | None = None,
    +    ) -> list[_DistributionT]: ...
    +    @overload
    +    def resolve(
    +        self,
    +        requirements: Iterable[Requirement],
    +        env: Environment | None = None,
    +        *,
    +        installer: _InstallerTypeT[_DistributionT],
    +        replace_conflicting: bool = False,
    +        extras: tuple[str, ...] | None = None,
    +    ) -> list[_DistributionT]: ...
    +    @overload
    +    def resolve(
    +        self,
    +        requirements: Iterable[Requirement],
    +        env: Environment | None = None,
    +        installer: _InstallerType | None = None,
    +        replace_conflicting: bool = False,
    +        extras: tuple[str, ...] | None = None,
    +    ) -> list[Distribution]: ...
    +    def resolve(
    +        self,
    +        requirements: Iterable[Requirement],
    +        env: Environment | None = None,
    +        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
    +        replace_conflicting: bool = False,
    +        extras: tuple[str, ...] | None = None,
    +    ) -> list[Distribution] | list[_DistributionT]:
    +        """List all distributions needed to (recursively) meet `requirements`
    +
    +        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
    +        if supplied, should be an ``Environment`` instance.  If
    +        not supplied, it defaults to all distributions available within any
    +        entry or distribution in the working set.  `installer`, if supplied,
    +        will be invoked with each requirement that cannot be met by an
    +        already-installed distribution; it should return a ``Distribution`` or
    +        ``None``.
    +
    +        Unless `replace_conflicting=True`, raises a VersionConflict exception
    +        if
    +        any requirements are found on the path that have the correct name but
    +        the wrong version.  Otherwise, if an `installer` is supplied it will be
    +        invoked to obtain the correct version of the requirement and activate
    +        it.
    +
    +        `extras` is a list of the extras to be used with these requirements.
    +        This is important because extra requirements may look like `my_req;
    +        extra = "my_extra"`, which would otherwise be interpreted as a purely
    +        optional requirement.  Instead, we want to be able to assert that these
    +        requirements are truly required.
    +        """
    +
    +        # set up the stack
    +        requirements = list(requirements)[::-1]
    +        # set of processed requirements
    +        processed = set()
    +        # key -> dist
    +        best = {}
    +        to_activate = []
    +
    +        req_extras = _ReqExtras()
    +
    +        # Mapping of requirement to set of distributions that required it;
    +        # useful for reporting info about conflicts.
    +        required_by = collections.defaultdict(set)
    +
    +        while requirements:
    +            # process dependencies breadth-first
    +            req = requirements.pop(0)
    +            if req in processed:
    +                # Ignore cyclic or redundant dependencies
    +                continue
    +
    +            if not req_extras.markers_pass(req, extras):
    +                continue
    +
    +            dist = self._resolve_dist(
    +                req, best, replace_conflicting, env, installer, required_by, to_activate
    +            )
    +
    +            # push the new requirements onto the stack
    +            new_requirements = dist.requires(req.extras)[::-1]
    +            requirements.extend(new_requirements)
    +
    +            # Register the new requirements needed by req
    +            for new_requirement in new_requirements:
    +                required_by[new_requirement].add(req.project_name)
    +                req_extras[new_requirement] = req.extras
    +
    +            processed.add(req)
    +
    +        # return list of distros to activate
    +        return to_activate
    +
    +    def _resolve_dist(
    +        self, req, best, replace_conflicting, env, installer, required_by, to_activate
    +    ) -> Distribution:
    +        dist = best.get(req.key)
    +        if dist is None:
    +            # Find the best distribution and add it to the map
    +            dist = self.by_key.get(req.key)
    +            if dist is None or (dist not in req and replace_conflicting):
    +                ws = self
    +                if env is None:
    +                    if dist is None:
    +                        env = Environment(self.entries)
    +                    else:
    +                        # Use an empty environment and workingset to avoid
    +                        # any further conflicts with the conflicting
    +                        # distribution
    +                        env = Environment([])
    +                        ws = WorkingSet([])
    +                dist = best[req.key] = env.best_match(
    +                    req, ws, installer, replace_conflicting=replace_conflicting
    +                )
    +                if dist is None:
    +                    requirers = required_by.get(req, None)
    +                    raise DistributionNotFound(req, requirers)
    +            to_activate.append(dist)
    +        if dist not in req:
    +            # Oops, the "best" so far conflicts with a dependency
    +            dependent_req = required_by[req]
    +            raise VersionConflict(dist, req).with_context(dependent_req)
    +        return dist
    +
    +    @overload
    +    def find_plugins(
    +        self,
    +        plugin_env: Environment,
    +        full_env: Environment | None,
    +        installer: _InstallerTypeT[_DistributionT],
    +        fallback: bool = True,
    +    ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
    +    @overload
    +    def find_plugins(
    +        self,
    +        plugin_env: Environment,
    +        full_env: Environment | None = None,
    +        *,
    +        installer: _InstallerTypeT[_DistributionT],
    +        fallback: bool = True,
    +    ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
    +    @overload
    +    def find_plugins(
    +        self,
    +        plugin_env: Environment,
    +        full_env: Environment | None = None,
    +        installer: _InstallerType | None = None,
    +        fallback: bool = True,
    +    ) -> tuple[list[Distribution], dict[Distribution, Exception]]: ...
    +    def find_plugins(
    +        self,
    +        plugin_env: Environment,
    +        full_env: Environment | None = None,
    +        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
    +        fallback: bool = True,
    +    ) -> tuple[
    +        list[Distribution] | list[_DistributionT],
    +        dict[Distribution, Exception],
    +    ]:
    +        """Find all activatable distributions in `plugin_env`
    +
    +        Example usage::
    +
    +            distributions, errors = working_set.find_plugins(
    +                Environment(plugin_dirlist)
    +            )
    +            # add plugins+libs to sys.path
    +            map(working_set.add, distributions)
    +            # display errors
    +            print('Could not load', errors)
    +
    +        The `plugin_env` should be an ``Environment`` instance that contains
    +        only distributions that are in the project's "plugin directory" or
    +        directories. The `full_env`, if supplied, should be an ``Environment``
    +        contains all currently-available distributions.  If `full_env` is not
    +        supplied, one is created automatically from the ``WorkingSet`` this
    +        method is called on, which will typically mean that every directory on
    +        ``sys.path`` will be scanned for distributions.
    +
    +        `installer` is a standard installer callback as used by the
    +        ``resolve()`` method. The `fallback` flag indicates whether we should
    +        attempt to resolve older versions of a plugin if the newest version
    +        cannot be resolved.
    +
    +        This method returns a 2-tuple: (`distributions`, `error_info`), where
    +        `distributions` is a list of the distributions found in `plugin_env`
    +        that were loadable, along with any other distributions that are needed
    +        to resolve their dependencies.  `error_info` is a dictionary mapping
    +        unloadable plugin distributions to an exception instance describing the
    +        error that occurred. Usually this will be a ``DistributionNotFound`` or
    +        ``VersionConflict`` instance.
    +        """
    +
    +        plugin_projects = list(plugin_env)
    +        # scan project names in alphabetic order
    +        plugin_projects.sort()
    +
    +        error_info: dict[Distribution, Exception] = {}
    +        distributions: dict[Distribution, Exception | None] = {}
    +
    +        if full_env is None:
    +            env = Environment(self.entries)
    +            env += plugin_env
    +        else:
    +            env = full_env + plugin_env
    +
    +        shadow_set = self.__class__([])
    +        # put all our entries in shadow_set
    +        list(map(shadow_set.add, self))
    +
    +        for project_name in plugin_projects:
    +            for dist in plugin_env[project_name]:
    +                req = [dist.as_requirement()]
    +
    +                try:
    +                    resolvees = shadow_set.resolve(req, env, installer)
    +
    +                except ResolutionError as v:
    +                    # save error info
    +                    error_info[dist] = v
    +                    if fallback:
    +                        # try the next older version of project
    +                        continue
    +                    else:
    +                        # give up on this project, keep going
    +                        break
    +
    +                else:
    +                    list(map(shadow_set.add, resolvees))
    +                    distributions.update(dict.fromkeys(resolvees))
    +
    +                    # success, no need to try any more versions of this project
    +                    break
    +
    +        sorted_distributions = list(distributions)
    +        sorted_distributions.sort()
    +
    +        return sorted_distributions, error_info
    +
    +    def require(self, *requirements: _NestedStr):
    +        """Ensure that distributions matching `requirements` are activated
    +
    +        `requirements` must be a string or a (possibly-nested) sequence
    +        thereof, specifying the distributions and versions required.  The
    +        return value is a sequence of the distributions that needed to be
    +        activated to fulfill the requirements; all relevant distributions are
    +        included, even if they were already activated in this working set.
    +        """
    +        needed = self.resolve(parse_requirements(requirements))
    +
    +        for dist in needed:
    +            self.add(dist)
    +
    +        return needed
    +
    +    def subscribe(
    +        self, callback: Callable[[Distribution], object], existing: bool = True
    +    ):
    +        """Invoke `callback` for all distributions
    +
    +        If `existing=True` (default),
    +        call on all existing ones, as well.
    +        """
    +        if callback in self.callbacks:
    +            return
    +        self.callbacks.append(callback)
    +        if not existing:
    +            return
    +        for dist in self:
    +            callback(dist)
    +
    +    def _added_new(self, dist):
    +        for callback in self.callbacks:
    +            callback(dist)
    +
    +    def __getstate__(self):
    +        return (
    +            self.entries[:],
    +            self.entry_keys.copy(),
    +            self.by_key.copy(),
    +            self.normalized_to_canonical_keys.copy(),
    +            self.callbacks[:],
    +        )
    +
    +    def __setstate__(self, e_k_b_n_c):
    +        entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c
    +        self.entries = entries[:]
    +        self.entry_keys = keys.copy()
    +        self.by_key = by_key.copy()
    +        self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy()
    +        self.callbacks = callbacks[:]
    +
    +
    +class _ReqExtras(Dict["Requirement", Tuple[str, ...]]):
    +    """
    +    Map each requirement to the extras that demanded it.
    +    """
    +
    +    def markers_pass(self, req: Requirement, extras: tuple[str, ...] | None = None):
    +        """
    +        Evaluate markers for req against each extra that
    +        demanded it.
    +
    +        Return False if the req has a marker and fails
    +        evaluation. Otherwise, return True.
    +        """
    +        extra_evals = (
    +            req.marker.evaluate({'extra': extra})
    +            for extra in self.get(req, ()) + (extras or (None,))
    +        )
    +        return not req.marker or any(extra_evals)
    +
    +
    +class Environment:
    +    """Searchable snapshot of distributions on a search path"""
    +
    +    def __init__(
    +        self,
    +        search_path: Iterable[str] | None = None,
    +        platform: str | None = get_supported_platform(),
    +        python: str | None = PY_MAJOR,
    +    ):
    +        """Snapshot distributions available on a search path
    +
    +        Any distributions found on `search_path` are added to the environment.
    +        `search_path` should be a sequence of ``sys.path`` items.  If not
    +        supplied, ``sys.path`` is used.
    +
    +        `platform` is an optional string specifying the name of the platform
    +        that platform-specific distributions must be compatible with.  If
    +        unspecified, it defaults to the current platform.  `python` is an
    +        optional string naming the desired version of Python (e.g. ``'3.6'``);
    +        it defaults to the current version.
    +
    +        You may explicitly set `platform` (and/or `python`) to ``None`` if you
    +        wish to map *all* distributions, not just those compatible with the
    +        running platform or Python version.
    +        """
    +        self._distmap = {}
    +        self.platform = platform
    +        self.python = python
    +        self.scan(search_path)
    +
    +    def can_add(self, dist: Distribution):
    +        """Is distribution `dist` acceptable for this environment?
    +
    +        The distribution must match the platform and python version
    +        requirements specified when this environment was created, or False
    +        is returned.
    +        """
    +        py_compat = (
    +            self.python is None
    +            or dist.py_version is None
    +            or dist.py_version == self.python
    +        )
    +        return py_compat and compatible_platforms(dist.platform, self.platform)
    +
    +    def remove(self, dist: Distribution):
    +        """Remove `dist` from the environment"""
    +        self._distmap[dist.key].remove(dist)
    +
    +    def scan(self, search_path: Iterable[str] | None = None):
    +        """Scan `search_path` for distributions usable in this environment
    +
    +        Any distributions found are added to the environment.
    +        `search_path` should be a sequence of ``sys.path`` items.  If not
    +        supplied, ``sys.path`` is used.  Only distributions conforming to
    +        the platform/python version defined at initialization are added.
    +        """
    +        if search_path is None:
    +            search_path = sys.path
    +
    +        for item in search_path:
    +            for dist in find_distributions(item):
    +                self.add(dist)
    +
    +    def __getitem__(self, project_name: str) -> list[Distribution]:
    +        """Return a newest-to-oldest list of distributions for `project_name`
    +
    +        Uses case-insensitive `project_name` comparison, assuming all the
    +        project's distributions use their project's name converted to all
    +        lowercase as their key.
    +
    +        """
    +        distribution_key = project_name.lower()
    +        return self._distmap.get(distribution_key, [])
    +
    +    def add(self, dist: Distribution):
    +        """Add `dist` if we ``can_add()`` it and it has not already been added"""
    +        if self.can_add(dist) and dist.has_version():
    +            dists = self._distmap.setdefault(dist.key, [])
    +            if dist not in dists:
    +                dists.append(dist)
    +                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
    +
    +    @overload
    +    def best_match(
    +        self,
    +        req: Requirement,
    +        working_set: WorkingSet,
    +        installer: _InstallerTypeT[_DistributionT],
    +        replace_conflicting: bool = False,
    +    ) -> _DistributionT: ...
    +    @overload
    +    def best_match(
    +        self,
    +        req: Requirement,
    +        working_set: WorkingSet,
    +        installer: _InstallerType | None = None,
    +        replace_conflicting: bool = False,
    +    ) -> Distribution | None: ...
    +    def best_match(
    +        self,
    +        req: Requirement,
    +        working_set: WorkingSet,
    +        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
    +        replace_conflicting: bool = False,
    +    ) -> Distribution | None:
    +        """Find distribution best matching `req` and usable on `working_set`
    +
    +        This calls the ``find(req)`` method of the `working_set` to see if a
    +        suitable distribution is already active.  (This may raise
    +        ``VersionConflict`` if an unsuitable version of the project is already
    +        active in the specified `working_set`.)  If a suitable distribution
    +        isn't active, this method returns the newest distribution in the
    +        environment that meets the ``Requirement`` in `req`.  If no suitable
    +        distribution is found, and `installer` is supplied, then the result of
    +        calling the environment's ``obtain(req, installer)`` method will be
    +        returned.
    +        """
    +        try:
    +            dist = working_set.find(req)
    +        except VersionConflict:
    +            if not replace_conflicting:
    +                raise
    +            dist = None
    +        if dist is not None:
    +            return dist
    +        for dist in self[req.key]:
    +            if dist in req:
    +                return dist
    +        # try to download/install
    +        return self.obtain(req, installer)
    +
    +    @overload
    +    def obtain(
    +        self,
    +        requirement: Requirement,
    +        installer: _InstallerTypeT[_DistributionT],
    +    ) -> _DistributionT: ...
    +    @overload
    +    def obtain(
    +        self,
    +        requirement: Requirement,
    +        installer: Callable[[Requirement], None] | None = None,
    +    ) -> None: ...
    +    @overload
    +    def obtain(
    +        self,
    +        requirement: Requirement,
    +        installer: _InstallerType | None = None,
    +    ) -> Distribution | None: ...
    +    def obtain(
    +        self,
    +        requirement: Requirement,
    +        installer: Callable[[Requirement], None]
    +        | _InstallerType
    +        | None
    +        | _InstallerTypeT[_DistributionT] = None,
    +    ) -> Distribution | None:
    +        """Obtain a distribution matching `requirement` (e.g. via download)
    +
    +        Obtain a distro that matches requirement (e.g. via download).  In the
    +        base ``Environment`` class, this routine just returns
    +        ``installer(requirement)``, unless `installer` is None, in which case
    +        None is returned instead.  This method is a hook that allows subclasses
    +        to attempt other ways of obtaining a distribution before falling back
    +        to the `installer` argument."""
    +        return installer(requirement) if installer else None
    +
    +    def __iter__(self) -> Iterator[str]:
    +        """Yield the unique project names of the available distributions"""
    +        for key in self._distmap.keys():
    +            if self[key]:
    +                yield key
    +
    +    def __iadd__(self, other: Distribution | Environment):
    +        """In-place addition of a distribution or environment"""
    +        if isinstance(other, Distribution):
    +            self.add(other)
    +        elif isinstance(other, Environment):
    +            for project in other:
    +                for dist in other[project]:
    +                    self.add(dist)
    +        else:
    +            raise TypeError("Can't add %r to environment" % (other,))
    +        return self
    +
    +    def __add__(self, other: Distribution | Environment):
    +        """Add an environment or distribution to an environment"""
    +        new = self.__class__([], platform=None, python=None)
    +        for env in self, other:
    +            new += env
    +        return new
    +
    +
    +# XXX backward compatibility
    +AvailableDistributions = Environment
    +
    +
    +class ExtractionError(RuntimeError):
    +    """An error occurred extracting a resource
    +
    +    The following attributes are available from instances of this exception:
    +
    +    manager
    +        The resource manager that raised this exception
    +
    +    cache_path
    +        The base directory for resource extraction
    +
    +    original_error
    +        The exception instance that caused extraction to fail
    +    """
    +
    +    manager: ResourceManager
    +    cache_path: str
    +    original_error: BaseException | None
    +
    +
    +class ResourceManager:
    +    """Manage resource extraction and packages"""
    +
    +    extraction_path: str | None = None
    +
    +    def __init__(self):
    +        self.cached_files = {}
    +
    +    def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str):
    +        """Does the named resource exist?"""
    +        return get_provider(package_or_requirement).has_resource(resource_name)
    +
    +    def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str):
    +        """Is the named resource an existing directory?"""
    +        return get_provider(package_or_requirement).resource_isdir(resource_name)
    +
    +    def resource_filename(
    +        self, package_or_requirement: _PkgReqType, resource_name: str
    +    ):
    +        """Return a true filesystem path for specified resource"""
    +        return get_provider(package_or_requirement).get_resource_filename(
    +            self, resource_name
    +        )
    +
    +    def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str):
    +        """Return a readable file-like object for specified resource"""
    +        return get_provider(package_or_requirement).get_resource_stream(
    +            self, resource_name
    +        )
    +
    +    def resource_string(
    +        self, package_or_requirement: _PkgReqType, resource_name: str
    +    ) -> bytes:
    +        """Return specified resource as :obj:`bytes`"""
    +        return get_provider(package_or_requirement).get_resource_string(
    +            self, resource_name
    +        )
    +
    +    def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str):
    +        """List the contents of the named resource directory"""
    +        return get_provider(package_or_requirement).resource_listdir(resource_name)
    +
    +    def extraction_error(self) -> NoReturn:
    +        """Give an error message for problems extracting file(s)"""
    +
    +        old_exc = sys.exc_info()[1]
    +        cache_path = self.extraction_path or get_default_cache()
    +
    +        tmpl = textwrap.dedent(
    +            """
    +            Can't extract file(s) to egg cache
    +
    +            The following error occurred while trying to extract file(s)
    +            to the Python egg cache:
    +
    +              {old_exc}
    +
    +            The Python egg cache directory is currently set to:
    +
    +              {cache_path}
    +
    +            Perhaps your account does not have write access to this directory?
    +            You can change the cache directory by setting the PYTHON_EGG_CACHE
    +            environment variable to point to an accessible directory.
    +            """
    +        ).lstrip()
    +        err = ExtractionError(tmpl.format(**locals()))
    +        err.manager = self
    +        err.cache_path = cache_path
    +        err.original_error = old_exc
    +        raise err
    +
    +    def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()):
    +        """Return absolute location in cache for `archive_name` and `names`
    +
    +        The parent directory of the resulting path will be created if it does
    +        not already exist.  `archive_name` should be the base filename of the
    +        enclosing egg (which may not be the name of the enclosing zipfile!),
    +        including its ".egg" extension.  `names`, if provided, should be a
    +        sequence of path name parts "under" the egg's extraction location.
    +
    +        This method should only be called by resource providers that need to
    +        obtain an extraction location, and only for names they intend to
    +        extract, as it tracks the generated names for possible cleanup later.
    +        """
    +        extract_path = self.extraction_path or get_default_cache()
    +        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
    +        try:
    +            _bypass_ensure_directory(target_path)
    +        except Exception:
    +            self.extraction_error()
    +
    +        self._warn_unsafe_extraction_path(extract_path)
    +
    +        self.cached_files[target_path] = True
    +        return target_path
    +
    +    @staticmethod
    +    def _warn_unsafe_extraction_path(path):
    +        """
    +        If the default extraction path is overridden and set to an insecure
    +        location, such as /tmp, it opens up an opportunity for an attacker to
    +        replace an extracted file with an unauthorized payload. Warn the user
    +        if a known insecure location is used.
    +
    +        See Distribute #375 for more details.
    +        """
    +        if os.name == 'nt' and not path.startswith(os.environ['windir']):
    +            # On Windows, permissions are generally restrictive by default
    +            #  and temp directories are not writable by other users, so
    +            #  bypass the warning.
    +            return
    +        mode = os.stat(path).st_mode
    +        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
    +            msg = (
    +                "Extraction path is writable by group/others "
    +                "and vulnerable to attack when "
    +                "used with get_resource_filename ({path}). "
    +                "Consider a more secure "
    +                "location (set with .set_extraction_path or the "
    +                "PYTHON_EGG_CACHE environment variable)."
    +            ).format(**locals())
    +            warnings.warn(msg, UserWarning)
    +
    +    def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath):
    +        """Perform any platform-specific postprocessing of `tempname`
    +
    +        This is where Mac header rewrites should be done; other platforms don't
    +        have anything special they should do.
    +
    +        Resource providers should call this method ONLY after successfully
    +        extracting a compressed resource.  They must NOT call it on resources
    +        that are already in the filesystem.
    +
    +        `tempname` is the current (temporary) name of the file, and `filename`
    +        is the name it will be renamed to by the caller after this routine
    +        returns.
    +        """
    +
    +        if os.name == 'posix':
    +            # Make the resource executable
    +            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
    +            os.chmod(tempname, mode)
    +
    +    def set_extraction_path(self, path: str):
    +        """Set the base path where resources will be extracted to, if needed.
    +
    +        If you do not call this routine before any extractions take place, the
    +        path defaults to the return value of ``get_default_cache()``.  (Which
    +        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
    +        platform-specific fallbacks.  See that routine's documentation for more
    +        details.)
    +
    +        Resources are extracted to subdirectories of this path based upon
    +        information given by the ``IResourceProvider``.  You may set this to a
    +        temporary directory, but then you must call ``cleanup_resources()`` to
    +        delete the extracted files when done.  There is no guarantee that
    +        ``cleanup_resources()`` will be able to remove all extracted files.
    +
    +        (Note: you may not change the extraction path for a given resource
    +        manager once resources have been extracted, unless you first call
    +        ``cleanup_resources()``.)
    +        """
    +        if self.cached_files:
    +            raise ValueError("Can't change extraction path, files already extracted")
    +
    +        self.extraction_path = path
    +
    +    def cleanup_resources(self, force: bool = False) -> list[str]:
    +        """
    +        Delete all extracted resource files and directories, returning a list
    +        of the file and directory names that could not be successfully removed.
    +        This function does not have any concurrency protection, so it should
    +        generally only be called when the extraction path is a temporary
    +        directory exclusive to a single process.  This method is not
    +        automatically called; you must call it explicitly or register it as an
    +        ``atexit`` function if you wish to ensure cleanup of a temporary
    +        directory used for extractions.
    +        """
    +        # XXX
    +        return []
    +
    +
    +def get_default_cache() -> str:
    +    """
    +    Return the ``PYTHON_EGG_CACHE`` environment variable
    +    or a platform-relevant user cache dir for an app
    +    named "Python-Eggs".
    +    """
    +    return os.environ.get('PYTHON_EGG_CACHE') or _user_cache_dir(appname='Python-Eggs')
    +
    +
    +def safe_name(name: str):
    +    """Convert an arbitrary string to a standard distribution name
    +
    +    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
    +    """
    +    return re.sub('[^A-Za-z0-9.]+', '-', name)
    +
    +
    +def safe_version(version: str):
    +    """
    +    Convert an arbitrary string to a standard version string
    +    """
    +    try:
    +        # normalize the version
    +        return str(_packaging_version.Version(version))
    +    except _packaging_version.InvalidVersion:
    +        version = version.replace(' ', '.')
    +        return re.sub('[^A-Za-z0-9.]+', '-', version)
    +
    +
    +def _forgiving_version(version):
    +    """Fallback when ``safe_version`` is not safe enough
    +    >>> parse_version(_forgiving_version('0.23ubuntu1'))
    +    
    +    >>> parse_version(_forgiving_version('0.23-'))
    +    
    +    >>> parse_version(_forgiving_version('0.-_'))
    +    
    +    >>> parse_version(_forgiving_version('42.+?1'))
    +    
    +    >>> parse_version(_forgiving_version('hello world'))
    +    
    +    """
    +    version = version.replace(' ', '.')
    +    match = _PEP440_FALLBACK.search(version)
    +    if match:
    +        safe = match["safe"]
    +        rest = version[len(safe) :]
    +    else:
    +        safe = "0"
    +        rest = version
    +    local = f"sanitized.{_safe_segment(rest)}".strip(".")
    +    return f"{safe}.dev0+{local}"
    +
    +
    +def _safe_segment(segment):
    +    """Convert an arbitrary string into a safe segment"""
    +    segment = re.sub('[^A-Za-z0-9.]+', '-', segment)
    +    segment = re.sub('-[^A-Za-z0-9]+', '-', segment)
    +    return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
    +
    +
    +def safe_extra(extra: str):
    +    """Convert an arbitrary string to a standard 'extra' name
    +
    +    Any runs of non-alphanumeric characters are replaced with a single '_',
    +    and the result is always lowercased.
    +    """
    +    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
    +
    +
    +def to_filename(name: str):
    +    """Convert a project or version name to its filename-escaped form
    +
    +    Any '-' characters are currently replaced with '_'.
    +    """
    +    return name.replace('-', '_')
    +
    +
    +def invalid_marker(text: str):
    +    """
    +    Validate text as a PEP 508 environment marker; return an exception
    +    if invalid or False otherwise.
    +    """
    +    try:
    +        evaluate_marker(text)
    +    except SyntaxError as e:
    +        e.filename = None
    +        e.lineno = None
    +        return e
    +    return False
    +
    +
    +def evaluate_marker(text: str, extra: str | None = None) -> bool:
    +    """
    +    Evaluate a PEP 508 environment marker.
    +    Return a boolean indicating the marker result in this environment.
    +    Raise SyntaxError if marker is invalid.
    +
    +    This implementation uses the 'pyparsing' module.
    +    """
    +    try:
    +        marker = _packaging_markers.Marker(text)
    +        return marker.evaluate()
    +    except _packaging_markers.InvalidMarker as e:
    +        raise SyntaxError(e) from e
    +
    +
    +class NullProvider:
    +    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
    +
    +    egg_name: str | None = None
    +    egg_info: str | None = None
    +    loader: _LoaderProtocol | None = None
    +
    +    def __init__(self, module: _ModuleLike):
    +        self.loader = getattr(module, '__loader__', None)
    +        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
    +
    +    def get_resource_filename(self, manager: ResourceManager, resource_name: str):
    +        return self._fn(self.module_path, resource_name)
    +
    +    def get_resource_stream(self, manager: ResourceManager, resource_name: str):
    +        return io.BytesIO(self.get_resource_string(manager, resource_name))
    +
    +    def get_resource_string(
    +        self, manager: ResourceManager, resource_name: str
    +    ) -> bytes:
    +        return self._get(self._fn(self.module_path, resource_name))
    +
    +    def has_resource(self, resource_name: str):
    +        return self._has(self._fn(self.module_path, resource_name))
    +
    +    def _get_metadata_path(self, name):
    +        return self._fn(self.egg_info, name)
    +
    +    def has_metadata(self, name: str) -> bool:
    +        if not self.egg_info:
    +            return False
    +
    +        path = self._get_metadata_path(name)
    +        return self._has(path)
    +
    +    def get_metadata(self, name: str):
    +        if not self.egg_info:
    +            return ""
    +        path = self._get_metadata_path(name)
    +        value = self._get(path)
    +        try:
    +            return value.decode('utf-8')
    +        except UnicodeDecodeError as exc:
    +            # Include the path in the error message to simplify
    +            # troubleshooting, and without changing the exception type.
    +            exc.reason += ' in {} file at path: {}'.format(name, path)
    +            raise
    +
    +    def get_metadata_lines(self, name: str) -> Iterator[str]:
    +        return yield_lines(self.get_metadata(name))
    +
    +    def resource_isdir(self, resource_name: str):
    +        return self._isdir(self._fn(self.module_path, resource_name))
    +
    +    def metadata_isdir(self, name: str) -> bool:
    +        return bool(self.egg_info and self._isdir(self._fn(self.egg_info, name)))
    +
    +    def resource_listdir(self, resource_name: str):
    +        return self._listdir(self._fn(self.module_path, resource_name))
    +
    +    def metadata_listdir(self, name: str) -> list[str]:
    +        if self.egg_info:
    +            return self._listdir(self._fn(self.egg_info, name))
    +        return []
    +
    +    def run_script(self, script_name: str, namespace: dict[str, Any]):
    +        script = 'scripts/' + script_name
    +        if not self.has_metadata(script):
    +            raise ResolutionError(
    +                "Script {script!r} not found in metadata at {self.egg_info!r}".format(
    +                    **locals()
    +                ),
    +            )
    +
    +        script_text = self.get_metadata(script).replace('\r\n', '\n')
    +        script_text = script_text.replace('\r', '\n')
    +        script_filename = self._fn(self.egg_info, script)
    +        namespace['__file__'] = script_filename
    +        if os.path.exists(script_filename):
    +            source = _read_utf8_with_fallback(script_filename)
    +            code = compile(source, script_filename, 'exec')
    +            exec(code, namespace, namespace)
    +        else:
    +            from linecache import cache
    +
    +            cache[script_filename] = (
    +                len(script_text),
    +                0,
    +                script_text.split('\n'),
    +                script_filename,
    +            )
    +            script_code = compile(script_text, script_filename, 'exec')
    +            exec(script_code, namespace, namespace)
    +
    +    def _has(self, path) -> bool:
    +        raise NotImplementedError(
    +            "Can't perform this operation for unregistered loader type"
    +        )
    +
    +    def _isdir(self, path) -> bool:
    +        raise NotImplementedError(
    +            "Can't perform this operation for unregistered loader type"
    +        )
    +
    +    def _listdir(self, path) -> list[str]:
    +        raise NotImplementedError(
    +            "Can't perform this operation for unregistered loader type"
    +        )
    +
    +    def _fn(self, base: str | None, resource_name: str):
    +        if base is None:
    +            raise TypeError(
    +                "`base` parameter in `_fn` is `None`. Either override this method or check the parameter first."
    +            )
    +        self._validate_resource_path(resource_name)
    +        if resource_name:
    +            return os.path.join(base, *resource_name.split('/'))
    +        return base
    +
    +    @staticmethod
    +    def _validate_resource_path(path):
    +        """
    +        Validate the resource paths according to the docs.
    +        https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
    +
    +        >>> warned = getfixture('recwarn')
    +        >>> warnings.simplefilter('always')
    +        >>> vrp = NullProvider._validate_resource_path
    +        >>> vrp('foo/bar.txt')
    +        >>> bool(warned)
    +        False
    +        >>> vrp('../foo/bar.txt')
    +        >>> bool(warned)
    +        True
    +        >>> warned.clear()
    +        >>> vrp('/foo/bar.txt')
    +        >>> bool(warned)
    +        True
    +        >>> vrp('foo/../../bar.txt')
    +        >>> bool(warned)
    +        True
    +        >>> warned.clear()
    +        >>> vrp('foo/f../bar.txt')
    +        >>> bool(warned)
    +        False
    +
    +        Windows path separators are straight-up disallowed.
    +        >>> vrp(r'\\foo/bar.txt')
    +        Traceback (most recent call last):
    +        ...
    +        ValueError: Use of .. or absolute path in a resource path \
    +is not allowed.
    +
    +        >>> vrp(r'C:\\foo/bar.txt')
    +        Traceback (most recent call last):
    +        ...
    +        ValueError: Use of .. or absolute path in a resource path \
    +is not allowed.
    +
    +        Blank values are allowed
    +
    +        >>> vrp('')
    +        >>> bool(warned)
    +        False
    +
    +        Non-string values are not.
    +
    +        >>> vrp(None)
    +        Traceback (most recent call last):
    +        ...
    +        AttributeError: ...
    +        """
    +        invalid = (
    +            os.path.pardir in path.split(posixpath.sep)
    +            or posixpath.isabs(path)
    +            or ntpath.isabs(path)
    +            or path.startswith("\\")
    +        )
    +        if not invalid:
    +            return
    +
    +        msg = "Use of .. or absolute path in a resource path is not allowed."
    +
    +        # Aggressively disallow Windows absolute paths
    +        if (path.startswith("\\") or ntpath.isabs(path)) and not posixpath.isabs(path):
    +            raise ValueError(msg)
    +
    +        # for compatibility, warn; in future
    +        # raise ValueError(msg)
    +        issue_warning(
    +            msg[:-1] + " and will raise exceptions in a future release.",
    +            DeprecationWarning,
    +        )
    +
    +    def _get(self, path) -> bytes:
    +        if hasattr(self.loader, 'get_data') and self.loader:
    +            # Already checked get_data exists
    +            return self.loader.get_data(path)  # type: ignore[attr-defined]
    +        raise NotImplementedError(
    +            "Can't perform this operation for loaders without 'get_data()'"
    +        )
    +
    +
    +register_loader_type(object, NullProvider)
    +
    +
    +def _parents(path):
    +    """
    +    yield all parents of path including path
    +    """
    +    last = None
    +    while path != last:
    +        yield path
    +        last = path
    +        path, _ = os.path.split(path)
    +
    +
    +class EggProvider(NullProvider):
    +    """Provider based on a virtual filesystem"""
    +
    +    def __init__(self, module: _ModuleLike):
    +        super().__init__(module)
    +        self._setup_prefix()
    +
    +    def _setup_prefix(self):
    +        # Assume that metadata may be nested inside a "basket"
    +        # of multiple eggs and use module_path instead of .archive.
    +        eggs = filter(_is_egg_path, _parents(self.module_path))
    +        egg = next(eggs, None)
    +        egg and self._set_egg(egg)
    +
    +    def _set_egg(self, path: str):
    +        self.egg_name = os.path.basename(path)
    +        self.egg_info = os.path.join(path, 'EGG-INFO')
    +        self.egg_root = path
    +
    +
    +class DefaultProvider(EggProvider):
    +    """Provides access to package resources in the filesystem"""
    +
    +    def _has(self, path) -> bool:
    +        return os.path.exists(path)
    +
    +    def _isdir(self, path) -> bool:
    +        return os.path.isdir(path)
    +
    +    def _listdir(self, path):
    +        return os.listdir(path)
    +
    +    def get_resource_stream(self, manager: object, resource_name: str):
    +        return open(self._fn(self.module_path, resource_name), 'rb')
    +
    +    def _get(self, path) -> bytes:
    +        with open(path, 'rb') as stream:
    +            return stream.read()
    +
    +    @classmethod
    +    def _register(cls):
    +        loader_names = (
    +            'SourceFileLoader',
    +            'SourcelessFileLoader',
    +        )
    +        for name in loader_names:
    +            loader_cls = getattr(importlib.machinery, name, type(None))
    +            register_loader_type(loader_cls, cls)
    +
    +
    +DefaultProvider._register()
    +
    +
    +class EmptyProvider(NullProvider):
    +    """Provider that returns nothing for all requests"""
    +
    +    # A special case, we don't want all Providers inheriting from NullProvider to have a potentially None module_path
    +    module_path: str | None = None  # type: ignore[assignment]
    +
    +    _isdir = _has = lambda self, path: False
    +
    +    def _get(self, path) -> bytes:
    +        return b''
    +
    +    def _listdir(self, path):
    +        return []
    +
    +    def __init__(self):
    +        pass
    +
    +
    +empty_provider = EmptyProvider()
    +
    +
    +class ZipManifests(Dict[str, "MemoizedZipManifests.manifest_mod"]):
    +    """
    +    zip manifest builder
    +    """
    +
    +    # `path` could be `StrPath | IO[bytes]` but that violates the LSP for `MemoizedZipManifests.load`
    +    @classmethod
    +    def build(cls, path: str):
    +        """
    +        Build a dictionary similar to the zipimport directory
    +        caches, except instead of tuples, store ZipInfo objects.
    +
    +        Use a platform-specific path separator (os.sep) for the path keys
    +        for compatibility with pypy on Windows.
    +        """
    +        with zipfile.ZipFile(path) as zfile:
    +            items = (
    +                (
    +                    name.replace('/', os.sep),
    +                    zfile.getinfo(name),
    +                )
    +                for name in zfile.namelist()
    +            )
    +            return dict(items)
    +
    +    load = build
    +
    +
    +class MemoizedZipManifests(ZipManifests):
    +    """
    +    Memoized zipfile manifests.
    +    """
    +
    +    class manifest_mod(NamedTuple):
    +        manifest: dict[str, zipfile.ZipInfo]
    +        mtime: float
    +
    +    def load(self, path: str) -> dict[str, zipfile.ZipInfo]:  # type: ignore[override] # ZipManifests.load is a classmethod
    +        """
    +        Load a manifest at path or return a suitable manifest already loaded.
    +        """
    +        path = os.path.normpath(path)
    +        mtime = os.stat(path).st_mtime
    +
    +        if path not in self or self[path].mtime != mtime:
    +            manifest = self.build(path)
    +            self[path] = self.manifest_mod(manifest, mtime)
    +
    +        return self[path].manifest
    +
    +
    +class ZipProvider(EggProvider):
    +    """Resource support for zips and eggs"""
    +
    +    eagers: list[str] | None = None
    +    _zip_manifests = MemoizedZipManifests()
    +    # ZipProvider's loader should always be a zipimporter or equivalent
    +    loader: zipimport.zipimporter
    +
    +    def __init__(self, module: _ZipLoaderModule):
    +        super().__init__(module)
    +        self.zip_pre = self.loader.archive + os.sep
    +
    +    def _zipinfo_name(self, fspath):
    +        # Convert a virtual filename (full path to file) into a zipfile subpath
    +        # usable with the zipimport directory cache for our target archive
    +        fspath = fspath.rstrip(os.sep)
    +        if fspath == self.loader.archive:
    +            return ''
    +        if fspath.startswith(self.zip_pre):
    +            return fspath[len(self.zip_pre) :]
    +        raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre))
    +
    +    def _parts(self, zip_path):
    +        # Convert a zipfile subpath into an egg-relative path part list.
    +        # pseudo-fs path
    +        fspath = self.zip_pre + zip_path
    +        if fspath.startswith(self.egg_root + os.sep):
    +            return fspath[len(self.egg_root) + 1 :].split(os.sep)
    +        raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root))
    +
    +    @property
    +    def zipinfo(self):
    +        return self._zip_manifests.load(self.loader.archive)
    +
    +    def get_resource_filename(self, manager: ResourceManager, resource_name: str):
    +        if not self.egg_name:
    +            raise NotImplementedError(
    +                "resource_filename() only supported for .egg, not .zip"
    +            )
    +        # no need to lock for extraction, since we use temp names
    +        zip_path = self._resource_to_zip(resource_name)
    +        eagers = self._get_eager_resources()
    +        if '/'.join(self._parts(zip_path)) in eagers:
    +            for name in eagers:
    +                self._extract_resource(manager, self._eager_to_zip(name))
    +        return self._extract_resource(manager, zip_path)
    +
    +    @staticmethod
    +    def _get_date_and_size(zip_stat):
    +        size = zip_stat.file_size
    +        # ymdhms+wday, yday, dst
    +        date_time = zip_stat.date_time + (0, 0, -1)
    +        # 1980 offset already done
    +        timestamp = time.mktime(date_time)
    +        return timestamp, size
    +
    +    # FIXME: 'ZipProvider._extract_resource' is too complex (12)
    +    def _extract_resource(self, manager: ResourceManager, zip_path) -> str:  # noqa: C901
    +        if zip_path in self._index():
    +            for name in self._index()[zip_path]:
    +                last = self._extract_resource(manager, os.path.join(zip_path, name))
    +            # return the extracted directory name
    +            return os.path.dirname(last)
    +
    +        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
    +
    +        if not WRITE_SUPPORT:
    +            raise OSError(
    +                '"os.rename" and "os.unlink" are not supported on this platform'
    +            )
    +        try:
    +            if not self.egg_name:
    +                raise OSError(
    +                    '"egg_name" is empty. This likely means no egg could be found from the "module_path".'
    +                )
    +            real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))
    +
    +            if self._is_current(real_path, zip_path):
    +                return real_path
    +
    +            outf, tmpnam = _mkstemp(
    +                ".$extract",
    +                dir=os.path.dirname(real_path),
    +            )
    +            os.write(outf, self.loader.get_data(zip_path))
    +            os.close(outf)
    +            utime(tmpnam, (timestamp, timestamp))
    +            manager.postprocess(tmpnam, real_path)
    +
    +            try:
    +                rename(tmpnam, real_path)
    +
    +            except OSError:
    +                if os.path.isfile(real_path):
    +                    if self._is_current(real_path, zip_path):
    +                        # the file became current since it was checked above,
    +                        #  so proceed.
    +                        return real_path
    +                    # Windows, del old file and retry
    +                    elif os.name == 'nt':
    +                        unlink(real_path)
    +                        rename(tmpnam, real_path)
    +                        return real_path
    +                raise
    +
    +        except OSError:
    +            # report a user-friendly error
    +            manager.extraction_error()
    +
    +        return real_path
    +
    +    def _is_current(self, file_path, zip_path):
    +        """
    +        Return True if the file_path is current for this zip_path
    +        """
    +        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
    +        if not os.path.isfile(file_path):
    +            return False
    +        stat = os.stat(file_path)
    +        if stat.st_size != size or stat.st_mtime != timestamp:
    +            return False
    +        # check that the contents match
    +        zip_contents = self.loader.get_data(zip_path)
    +        with open(file_path, 'rb') as f:
    +            file_contents = f.read()
    +        return zip_contents == file_contents
    +
    +    def _get_eager_resources(self):
    +        if self.eagers is None:
    +            eagers = []
    +            for name in ('native_libs.txt', 'eager_resources.txt'):
    +                if self.has_metadata(name):
    +                    eagers.extend(self.get_metadata_lines(name))
    +            self.eagers = eagers
    +        return self.eagers
    +
    +    def _index(self):
    +        try:
    +            return self._dirindex
    +        except AttributeError:
    +            ind = {}
    +            for path in self.zipinfo:
    +                parts = path.split(os.sep)
    +                while parts:
    +                    parent = os.sep.join(parts[:-1])
    +                    if parent in ind:
    +                        ind[parent].append(parts[-1])
    +                        break
    +                    else:
    +                        ind[parent] = [parts.pop()]
    +            self._dirindex = ind
    +            return ind
    +
    +    def _has(self, fspath) -> bool:
    +        zip_path = self._zipinfo_name(fspath)
    +        return zip_path in self.zipinfo or zip_path in self._index()
    +
    +    def _isdir(self, fspath) -> bool:
    +        return self._zipinfo_name(fspath) in self._index()
    +
    +    def _listdir(self, fspath):
    +        return list(self._index().get(self._zipinfo_name(fspath), ()))
    +
    +    def _eager_to_zip(self, resource_name: str):
    +        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
    +
    +    def _resource_to_zip(self, resource_name: str):
    +        return self._zipinfo_name(self._fn(self.module_path, resource_name))
    +
    +
    +register_loader_type(zipimport.zipimporter, ZipProvider)
    +
    +
    +class FileMetadata(EmptyProvider):
    +    """Metadata handler for standalone PKG-INFO files
    +
    +    Usage::
    +
    +        metadata = FileMetadata("/path/to/PKG-INFO")
    +
    +    This provider rejects all data and metadata requests except for PKG-INFO,
    +    which is treated as existing, and will be the contents of the file at
    +    the provided location.
    +    """
    +
    +    def __init__(self, path: StrPath):
    +        self.path = path
    +
    +    def _get_metadata_path(self, name):
    +        return self.path
    +
    +    def has_metadata(self, name: str) -> bool:
    +        return name == 'PKG-INFO' and os.path.isfile(self.path)
    +
    +    def get_metadata(self, name: str):
    +        if name != 'PKG-INFO':
    +            raise KeyError("No metadata except PKG-INFO is available")
    +
    +        with open(self.path, encoding='utf-8', errors="replace") as f:
    +            metadata = f.read()
    +        self._warn_on_replacement(metadata)
    +        return metadata
    +
    +    def _warn_on_replacement(self, metadata):
    +        replacement_char = '�'
    +        if replacement_char in metadata:
    +            tmpl = "{self.path} could not be properly decoded in UTF-8"
    +            msg = tmpl.format(**locals())
    +            warnings.warn(msg)
    +
    +    def get_metadata_lines(self, name: str) -> Iterator[str]:
    +        return yield_lines(self.get_metadata(name))
    +
    +
    +class PathMetadata(DefaultProvider):
    +    """Metadata provider for egg directories
    +
    +    Usage::
    +
    +        # Development eggs:
    +
    +        egg_info = "/path/to/PackageName.egg-info"
    +        base_dir = os.path.dirname(egg_info)
    +        metadata = PathMetadata(base_dir, egg_info)
    +        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
    +        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
    +
    +        # Unpacked egg directories:
    +
    +        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
    +        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
    +        dist = Distribution.from_filename(egg_path, metadata=metadata)
    +    """
    +
    +    def __init__(self, path: str, egg_info: str):
    +        self.module_path = path
    +        self.egg_info = egg_info
    +
    +
    +class EggMetadata(ZipProvider):
    +    """Metadata provider for .egg files"""
    +
    +    def __init__(self, importer: zipimport.zipimporter):
    +        """Create a metadata provider from a zipimporter"""
    +
    +        self.zip_pre = importer.archive + os.sep
    +        self.loader = importer
    +        if importer.prefix:
    +            self.module_path = os.path.join(importer.archive, importer.prefix)
    +        else:
    +            self.module_path = importer.archive
    +        self._setup_prefix()
    +
    +
    +_distribution_finders: dict[type, _DistFinderType[Any]] = _declare_state(
    +    'dict', '_distribution_finders', {}
    +)
    +
    +
    +def register_finder(importer_type: type[_T], distribution_finder: _DistFinderType[_T]):
    +    """Register `distribution_finder` to find distributions in sys.path items
    +
    +    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
    +    handler), and `distribution_finder` is a callable that, passed a path
    +    item and the importer instance, yields ``Distribution`` instances found on
    +    that path item.  See ``pkg_resources.find_on_path`` for an example."""
    +    _distribution_finders[importer_type] = distribution_finder
    +
    +
    +def find_distributions(path_item: str, only: bool = False):
    +    """Yield distributions accessible via `path_item`"""
    +    importer = get_importer(path_item)
    +    finder = _find_adapter(_distribution_finders, importer)
    +    return finder(importer, path_item, only)
    +
    +
    +def find_eggs_in_zip(
    +    importer: zipimport.zipimporter, path_item: str, only: bool = False
    +) -> Iterator[Distribution]:
    +    """
    +    Find eggs in zip files; possibly multiple nested eggs.
    +    """
    +    if importer.archive.endswith('.whl'):
    +        # wheels are not supported with this finder
    +        # they don't have PKG-INFO metadata, and won't ever contain eggs
    +        return
    +    metadata = EggMetadata(importer)
    +    if metadata.has_metadata('PKG-INFO'):
    +        yield Distribution.from_filename(path_item, metadata=metadata)
    +    if only:
    +        # don't yield nested distros
    +        return
    +    for subitem in metadata.resource_listdir(''):
    +        if _is_egg_path(subitem):
    +            subpath = os.path.join(path_item, subitem)
    +            dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
    +            yield from dists
    +        elif subitem.lower().endswith(('.dist-info', '.egg-info')):
    +            subpath = os.path.join(path_item, subitem)
    +            submeta = EggMetadata(zipimport.zipimporter(subpath))
    +            submeta.egg_info = subpath
    +            yield Distribution.from_location(path_item, subitem, submeta)
    +
    +
    +register_finder(zipimport.zipimporter, find_eggs_in_zip)
    +
    +
    +def find_nothing(
    +    importer: object | None, path_item: str | None, only: bool | None = False
    +):
    +    return ()
    +
    +
    +register_finder(object, find_nothing)
    +
    +
    +def find_on_path(importer: object | None, path_item, only=False):
    +    """Yield distributions accessible on a sys.path directory"""
    +    path_item = _normalize_cached(path_item)
    +
    +    if _is_unpacked_egg(path_item):
    +        yield Distribution.from_filename(
    +            path_item,
    +            metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')),
    +        )
    +        return
    +
    +    entries = (os.path.join(path_item, child) for child in safe_listdir(path_item))
    +
    +    # scan for .egg and .egg-info in directory
    +    for entry in sorted(entries):
    +        fullpath = os.path.join(path_item, entry)
    +        factory = dist_factory(path_item, entry, only)
    +        yield from factory(fullpath)
    +
    +
    +def dist_factory(path_item, entry, only):
    +    """Return a dist_factory for the given entry."""
    +    lower = entry.lower()
    +    is_egg_info = lower.endswith('.egg-info')
    +    is_dist_info = lower.endswith('.dist-info') and os.path.isdir(
    +        os.path.join(path_item, entry)
    +    )
    +    is_meta = is_egg_info or is_dist_info
    +    return (
    +        distributions_from_metadata
    +        if is_meta
    +        else find_distributions
    +        if not only and _is_egg_path(entry)
    +        else resolve_egg_link
    +        if not only and lower.endswith('.egg-link')
    +        else NoDists()
    +    )
    +
    +
    +class NoDists:
    +    """
    +    >>> bool(NoDists())
    +    False
    +
    +    >>> list(NoDists()('anything'))
    +    []
    +    """
    +
    +    def __bool__(self):
    +        return False
    +
    +    def __call__(self, fullpath):
    +        return iter(())
    +
    +
    +def safe_listdir(path: StrOrBytesPath):
    +    """
    +    Attempt to list contents of path, but suppress some exceptions.
    +    """
    +    try:
    +        return os.listdir(path)
    +    except (PermissionError, NotADirectoryError):
    +        pass
    +    except OSError as e:
    +        # Ignore the directory if does not exist, not a directory or
    +        # permission denied
    +        if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT):
    +            raise
    +    return ()
    +
    +
    +def distributions_from_metadata(path: str):
    +    root = os.path.dirname(path)
    +    if os.path.isdir(path):
    +        if len(os.listdir(path)) == 0:
    +            # empty metadata dir; skip
    +            return
    +        metadata: _MetadataType = PathMetadata(root, path)
    +    else:
    +        metadata = FileMetadata(path)
    +    entry = os.path.basename(path)
    +    yield Distribution.from_location(
    +        root,
    +        entry,
    +        metadata,
    +        precedence=DEVELOP_DIST,
    +    )
    +
    +
    +def non_empty_lines(path):
    +    """
    +    Yield non-empty lines from file at path
    +    """
    +    for line in _read_utf8_with_fallback(path).splitlines():
    +        line = line.strip()
    +        if line:
    +            yield line
    +
    +
    +def resolve_egg_link(path):
    +    """
    +    Given a path to an .egg-link, resolve distributions
    +    present in the referenced path.
    +    """
    +    referenced_paths = non_empty_lines(path)
    +    resolved_paths = (
    +        os.path.join(os.path.dirname(path), ref) for ref in referenced_paths
    +    )
    +    dist_groups = map(find_distributions, resolved_paths)
    +    return next(dist_groups, ())
    +
    +
    +if hasattr(pkgutil, 'ImpImporter'):
    +    register_finder(pkgutil.ImpImporter, find_on_path)
    +
    +register_finder(importlib.machinery.FileFinder, find_on_path)
    +
    +_namespace_handlers: dict[type, _NSHandlerType[Any]] = _declare_state(
    +    'dict', '_namespace_handlers', {}
    +)
    +_namespace_packages: dict[str | None, list[str]] = _declare_state(
    +    'dict', '_namespace_packages', {}
    +)
    +
    +
    +def register_namespace_handler(
    +    importer_type: type[_T], namespace_handler: _NSHandlerType[_T]
    +):
    +    """Register `namespace_handler` to declare namespace packages
    +
    +    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
    +    handler), and `namespace_handler` is a callable like this::
    +
    +        def namespace_handler(importer, path_entry, moduleName, module):
    +            # return a path_entry to use for child packages
    +
    +    Namespace handlers are only called if the importer object has already
    +    agreed that it can handle the relevant path item, and they should only
    +    return a subpath if the module __path__ does not already contain an
    +    equivalent subpath.  For an example namespace handler, see
    +    ``pkg_resources.file_ns_handler``.
    +    """
    +    _namespace_handlers[importer_type] = namespace_handler
    +
    +
    +def _handle_ns(packageName, path_item):
    +    """Ensure that named package includes a subpath of path_item (if needed)"""
    +
    +    importer = get_importer(path_item)
    +    if importer is None:
    +        return None
    +
    +    # use find_spec (PEP 451) and fall-back to find_module (PEP 302)
    +    try:
    +        spec = importer.find_spec(packageName)
    +    except AttributeError:
    +        # capture warnings due to #1111
    +        with warnings.catch_warnings():
    +            warnings.simplefilter("ignore")
    +            loader = importer.find_module(packageName)
    +    else:
    +        loader = spec.loader if spec else None
    +
    +    if loader is None:
    +        return None
    +    module = sys.modules.get(packageName)
    +    if module is None:
    +        module = sys.modules[packageName] = types.ModuleType(packageName)
    +        module.__path__ = []
    +        _set_parent_ns(packageName)
    +    elif not hasattr(module, '__path__'):
    +        raise TypeError("Not a package:", packageName)
    +    handler = _find_adapter(_namespace_handlers, importer)
    +    subpath = handler(importer, path_item, packageName, module)
    +    if subpath is not None:
    +        path = module.__path__
    +        path.append(subpath)
    +        importlib.import_module(packageName)
    +        _rebuild_mod_path(path, packageName, module)
    +    return subpath
    +
    +
    +def _rebuild_mod_path(orig_path, package_name, module: types.ModuleType):
    +    """
    +    Rebuild module.__path__ ensuring that all entries are ordered
    +    corresponding to their sys.path order
    +    """
    +    sys_path = [_normalize_cached(p) for p in sys.path]
    +
    +    def safe_sys_path_index(entry):
    +        """
    +        Workaround for #520 and #513.
    +        """
    +        try:
    +            return sys_path.index(entry)
    +        except ValueError:
    +            return float('inf')
    +
    +    def position_in_sys_path(path):
    +        """
    +        Return the ordinal of the path based on its position in sys.path
    +        """
    +        path_parts = path.split(os.sep)
    +        module_parts = package_name.count('.') + 1
    +        parts = path_parts[:-module_parts]
    +        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
    +
    +    new_path = sorted(orig_path, key=position_in_sys_path)
    +    new_path = [_normalize_cached(p) for p in new_path]
    +
    +    if isinstance(module.__path__, list):
    +        module.__path__[:] = new_path
    +    else:
    +        module.__path__ = new_path
    +
    +
    +def declare_namespace(packageName: str):
    +    """Declare that package 'packageName' is a namespace package"""
    +
    +    msg = (
    +        f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n"
    +        "Implementing implicit namespace packages (as specified in PEP 420) "
    +        "is preferred to `pkg_resources.declare_namespace`. "
    +        "See https://setuptools.pypa.io/en/latest/references/"
    +        "keywords.html#keyword-namespace-packages"
    +    )
    +    warnings.warn(msg, DeprecationWarning, stacklevel=2)
    +
    +    _imp.acquire_lock()
    +    try:
    +        if packageName in _namespace_packages:
    +            return
    +
    +        path: MutableSequence[str] = sys.path
    +        parent, _, _ = packageName.rpartition('.')
    +
    +        if parent:
    +            declare_namespace(parent)
    +            if parent not in _namespace_packages:
    +                __import__(parent)
    +            try:
    +                path = sys.modules[parent].__path__
    +            except AttributeError as e:
    +                raise TypeError("Not a package:", parent) from e
    +
    +        # Track what packages are namespaces, so when new path items are added,
    +        # they can be updated
    +        _namespace_packages.setdefault(parent or None, []).append(packageName)
    +        _namespace_packages.setdefault(packageName, [])
    +
    +        for path_item in path:
    +            # Ensure all the parent's path items are reflected in the child,
    +            # if they apply
    +            _handle_ns(packageName, path_item)
    +
    +    finally:
    +        _imp.release_lock()
    +
    +
    +def fixup_namespace_packages(path_item: str, parent: str | None = None):
    +    """Ensure that previously-declared namespace packages include path_item"""
    +    _imp.acquire_lock()
    +    try:
    +        for package in _namespace_packages.get(parent, ()):
    +            subpath = _handle_ns(package, path_item)
    +            if subpath:
    +                fixup_namespace_packages(subpath, package)
    +    finally:
    +        _imp.release_lock()
    +
    +
    +def file_ns_handler(
    +    importer: object,
    +    path_item: StrPath,
    +    packageName: str,
    +    module: types.ModuleType,
    +):
    +    """Compute an ns-package subpath for a filesystem or zipfile importer"""
    +
    +    subpath = os.path.join(path_item, packageName.split('.')[-1])
    +    normalized = _normalize_cached(subpath)
    +    for item in module.__path__:
    +        if _normalize_cached(item) == normalized:
    +            break
    +    else:
    +        # Only return the path if it's not already there
    +        return subpath
    +
    +
    +if hasattr(pkgutil, 'ImpImporter'):
    +    register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
    +
    +register_namespace_handler(zipimport.zipimporter, file_ns_handler)
    +register_namespace_handler(importlib.machinery.FileFinder, file_ns_handler)
    +
    +
    +def null_ns_handler(
    +    importer: object,
    +    path_item: str | None,
    +    packageName: str | None,
    +    module: _ModuleLike | None,
    +):
    +    return None
    +
    +
    +register_namespace_handler(object, null_ns_handler)
    +
    +
    +@overload
    +def normalize_path(filename: StrPath) -> str: ...
    +@overload
    +def normalize_path(filename: BytesPath) -> bytes: ...
    +def normalize_path(filename: StrOrBytesPath):
    +    """Normalize a file/dir name for comparison purposes"""
    +    return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
    +
    +
    +def _cygwin_patch(filename: StrOrBytesPath):  # pragma: nocover
    +    """
    +    Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
    +    symlink components. Using
    +    os.path.abspath() works around this limitation. A fix in os.getcwd()
    +    would probably better, in Cygwin even more so, except
    +    that this seems to be by design...
    +    """
    +    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
    +
    +
    +if TYPE_CHECKING:
    +    # https://github.com/python/mypy/issues/16261
    +    # https://github.com/python/typeshed/issues/6347
    +    @overload
    +    def _normalize_cached(filename: StrPath) -> str: ...
    +    @overload
    +    def _normalize_cached(filename: BytesPath) -> bytes: ...
    +    def _normalize_cached(filename: StrOrBytesPath) -> str | bytes: ...
    +else:
    +
    +    @functools.lru_cache(maxsize=None)
    +    def _normalize_cached(filename):
    +        return normalize_path(filename)
    +
    +
    +def _is_egg_path(path):
    +    """
    +    Determine if given path appears to be an egg.
    +    """
    +    return _is_zip_egg(path) or _is_unpacked_egg(path)
    +
    +
    +def _is_zip_egg(path):
    +    return (
    +        path.lower().endswith('.egg')
    +        and os.path.isfile(path)
    +        and zipfile.is_zipfile(path)
    +    )
    +
    +
    +def _is_unpacked_egg(path):
    +    """
    +    Determine if given path appears to be an unpacked egg.
    +    """
    +    return path.lower().endswith('.egg') and os.path.isfile(
    +        os.path.join(path, 'EGG-INFO', 'PKG-INFO')
    +    )
    +
    +
    +def _set_parent_ns(packageName):
    +    parts = packageName.split('.')
    +    name = parts.pop()
    +    if parts:
    +        parent = '.'.join(parts)
    +        setattr(sys.modules[parent], name, sys.modules[packageName])
    +
    +
    +MODULE = re.compile(r"\w+(\.\w+)*$").match
    +EGG_NAME = re.compile(
    +    r"""
    +    (?P[^-]+) (
    +        -(?P[^-]+) (
    +            -py(?P[^-]+) (
    +                -(?P.+)
    +            )?
    +        )?
    +    )?
    +    """,
    +    re.VERBOSE | re.IGNORECASE,
    +).match
    +
    +
    +class EntryPoint:
    +    """Object representing an advertised importable object"""
    +
    +    def __init__(
    +        self,
    +        name: str,
    +        module_name: str,
    +        attrs: Iterable[str] = (),
    +        extras: Iterable[str] = (),
    +        dist: Distribution | None = None,
    +    ):
    +        if not MODULE(module_name):
    +            raise ValueError("Invalid module name", module_name)
    +        self.name = name
    +        self.module_name = module_name
    +        self.attrs = tuple(attrs)
    +        self.extras = tuple(extras)
    +        self.dist = dist
    +
    +    def __str__(self):
    +        s = "%s = %s" % (self.name, self.module_name)
    +        if self.attrs:
    +            s += ':' + '.'.join(self.attrs)
    +        if self.extras:
    +            s += ' [%s]' % ','.join(self.extras)
    +        return s
    +
    +    def __repr__(self):
    +        return "EntryPoint.parse(%r)" % str(self)
    +
    +    @overload
    +    def load(
    +        self,
    +        require: Literal[True] = True,
    +        env: Environment | None = None,
    +        installer: _InstallerType | None = None,
    +    ) -> _ResolvedEntryPoint: ...
    +    @overload
    +    def load(
    +        self,
    +        require: Literal[False],
    +        *args: Any,
    +        **kwargs: Any,
    +    ) -> _ResolvedEntryPoint: ...
    +    def load(
    +        self,
    +        require: bool = True,
    +        *args: Environment | _InstallerType | None,
    +        **kwargs: Environment | _InstallerType | None,
    +    ) -> _ResolvedEntryPoint:
    +        """
    +        Require packages for this EntryPoint, then resolve it.
    +        """
    +        if not require or args or kwargs:
    +            warnings.warn(
    +                "Parameters to load are deprecated.  Call .resolve and "
    +                ".require separately.",
    +                PkgResourcesDeprecationWarning,
    +                stacklevel=2,
    +            )
    +        if require:
    +            # We could pass `env` and `installer` directly,
    +            # but keeping `*args` and `**kwargs` for backwards compatibility
    +            self.require(*args, **kwargs)  # type: ignore
    +        return self.resolve()
    +
    +    def resolve(self) -> _ResolvedEntryPoint:
    +        """
    +        Resolve the entry point from its module and attrs.
    +        """
    +        module = __import__(self.module_name, fromlist=['__name__'], level=0)
    +        try:
    +            return functools.reduce(getattr, self.attrs, module)
    +        except AttributeError as exc:
    +            raise ImportError(str(exc)) from exc
    +
    +    def require(
    +        self,
    +        env: Environment | None = None,
    +        installer: _InstallerType | None = None,
    +    ):
    +        if not self.dist:
    +            error_cls = UnknownExtra if self.extras else AttributeError
    +            raise error_cls("Can't require() without a distribution", self)
    +
    +        # Get the requirements for this entry point with all its extras and
    +        # then resolve them. We have to pass `extras` along when resolving so
    +        # that the working set knows what extras we want. Otherwise, for
    +        # dist-info distributions, the working set will assume that the
    +        # requirements for that extra are purely optional and skip over them.
    +        reqs = self.dist.requires(self.extras)
    +        items = working_set.resolve(reqs, env, installer, extras=self.extras)
    +        list(map(working_set.add, items))
    +
    +    pattern = re.compile(
    +        r'\s*'
    +        r'(?P.+?)\s*'
    +        r'=\s*'
    +        r'(?P[\w.]+)\s*'
    +        r'(:\s*(?P[\w.]+))?\s*'
    +        r'(?P\[.*\])?\s*$'
    +    )
    +
    +    @classmethod
    +    def parse(cls, src: str, dist: Distribution | None = None):
    +        """Parse a single entry point from string `src`
    +
    +        Entry point syntax follows the form::
    +
    +            name = some.module:some.attr [extra1, extra2]
    +
    +        The entry name and module name are required, but the ``:attrs`` and
    +        ``[extras]`` parts are optional
    +        """
    +        m = cls.pattern.match(src)
    +        if not m:
    +            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
    +            raise ValueError(msg, src)
    +        res = m.groupdict()
    +        extras = cls._parse_extras(res['extras'])
    +        attrs = res['attr'].split('.') if res['attr'] else ()
    +        return cls(res['name'], res['module'], attrs, extras, dist)
    +
    +    @classmethod
    +    def _parse_extras(cls, extras_spec):
    +        if not extras_spec:
    +            return ()
    +        req = Requirement.parse('x' + extras_spec)
    +        if req.specs:
    +            raise ValueError
    +        return req.extras
    +
    +    @classmethod
    +    def parse_group(
    +        cls,
    +        group: str,
    +        lines: _NestedStr,
    +        dist: Distribution | None = None,
    +    ):
    +        """Parse an entry point group"""
    +        if not MODULE(group):
    +            raise ValueError("Invalid group name", group)
    +        this: dict[str, Self] = {}
    +        for line in yield_lines(lines):
    +            ep = cls.parse(line, dist)
    +            if ep.name in this:
    +                raise ValueError("Duplicate entry point", group, ep.name)
    +            this[ep.name] = ep
    +        return this
    +
    +    @classmethod
    +    def parse_map(
    +        cls,
    +        data: str | Iterable[str] | dict[str, str | Iterable[str]],
    +        dist: Distribution | None = None,
    +    ):
    +        """Parse a map of entry point groups"""
    +        _data: Iterable[tuple[str | None, str | Iterable[str]]]
    +        if isinstance(data, dict):
    +            _data = data.items()
    +        else:
    +            _data = split_sections(data)
    +        maps: dict[str, dict[str, Self]] = {}
    +        for group, lines in _data:
    +            if group is None:
    +                if not lines:
    +                    continue
    +                raise ValueError("Entry points must be listed in groups")
    +            group = group.strip()
    +            if group in maps:
    +                raise ValueError("Duplicate group name", group)
    +            maps[group] = cls.parse_group(group, lines, dist)
    +        return maps
    +
    +
    +def _version_from_file(lines):
    +    """
    +    Given an iterable of lines from a Metadata file, return
    +    the value of the Version field, if present, or None otherwise.
    +    """
    +
    +    def is_version_line(line):
    +        return line.lower().startswith('version:')
    +
    +    version_lines = filter(is_version_line, lines)
    +    line = next(iter(version_lines), '')
    +    _, _, value = line.partition(':')
    +    return safe_version(value.strip()) or None
    +
    +
    +class Distribution:
    +    """Wrap an actual or potential sys.path entry w/metadata"""
    +
    +    PKG_INFO = 'PKG-INFO'
    +
    +    def __init__(
    +        self,
    +        location: str | None = None,
    +        metadata: _MetadataType = None,
    +        project_name: str | None = None,
    +        version: str | None = None,
    +        py_version: str | None = PY_MAJOR,
    +        platform: str | None = None,
    +        precedence: int = EGG_DIST,
    +    ):
    +        self.project_name = safe_name(project_name or 'Unknown')
    +        if version is not None:
    +            self._version = safe_version(version)
    +        self.py_version = py_version
    +        self.platform = platform
    +        self.location = location
    +        self.precedence = precedence
    +        self._provider = metadata or empty_provider
    +
    +    @classmethod
    +    def from_location(
    +        cls,
    +        location: str,
    +        basename: StrPath,
    +        metadata: _MetadataType = None,
    +        **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
    +    ) -> Distribution:
    +        project_name, version, py_version, platform = [None] * 4
    +        basename, ext = os.path.splitext(basename)
    +        if ext.lower() in _distributionImpl:
    +            cls = _distributionImpl[ext.lower()]
    +
    +            match = EGG_NAME(basename)
    +            if match:
    +                project_name, version, py_version, platform = match.group(
    +                    'name', 'ver', 'pyver', 'plat'
    +                )
    +        return cls(
    +            location,
    +            metadata,
    +            project_name=project_name,
    +            version=version,
    +            py_version=py_version,
    +            platform=platform,
    +            **kw,
    +        )._reload_version()
    +
    +    def _reload_version(self):
    +        return self
    +
    +    @property
    +    def hashcmp(self):
    +        return (
    +            self._forgiving_parsed_version,
    +            self.precedence,
    +            self.key,
    +            self.location,
    +            self.py_version or '',
    +            self.platform or '',
    +        )
    +
    +    def __hash__(self):
    +        return hash(self.hashcmp)
    +
    +    def __lt__(self, other: Distribution):
    +        return self.hashcmp < other.hashcmp
    +
    +    def __le__(self, other: Distribution):
    +        return self.hashcmp <= other.hashcmp
    +
    +    def __gt__(self, other: Distribution):
    +        return self.hashcmp > other.hashcmp
    +
    +    def __ge__(self, other: Distribution):
    +        return self.hashcmp >= other.hashcmp
    +
    +    def __eq__(self, other: object):
    +        if not isinstance(other, self.__class__):
    +            # It's not a Distribution, so they are not equal
    +            return False
    +        return self.hashcmp == other.hashcmp
    +
    +    def __ne__(self, other: object):
    +        return not self == other
    +
    +    # These properties have to be lazy so that we don't have to load any
    +    # metadata until/unless it's actually needed.  (i.e., some distributions
    +    # may not know their name or version without loading PKG-INFO)
    +
    +    @property
    +    def key(self):
    +        try:
    +            return self._key
    +        except AttributeError:
    +            self._key = key = self.project_name.lower()
    +            return key
    +
    +    @property
    +    def parsed_version(self):
    +        if not hasattr(self, "_parsed_version"):
    +            try:
    +                self._parsed_version = parse_version(self.version)
    +            except _packaging_version.InvalidVersion as ex:
    +                info = f"(package: {self.project_name})"
    +                if hasattr(ex, "add_note"):
    +                    ex.add_note(info)  # PEP 678
    +                    raise
    +                raise _packaging_version.InvalidVersion(f"{str(ex)} {info}") from None
    +
    +        return self._parsed_version
    +
    +    @property
    +    def _forgiving_parsed_version(self):
    +        try:
    +            return self.parsed_version
    +        except _packaging_version.InvalidVersion as ex:
    +            self._parsed_version = parse_version(_forgiving_version(self.version))
    +
    +            notes = "\n".join(getattr(ex, "__notes__", []))  # PEP 678
    +            msg = f"""!!\n\n
    +            *************************************************************************
    +            {str(ex)}\n{notes}
    +
    +            This is a long overdue deprecation.
    +            For the time being, `pkg_resources` will use `{self._parsed_version}`
    +            as a replacement to avoid breaking existing environments,
    +            but no future compatibility is guaranteed.
    +
    +            If you maintain package {self.project_name} you should implement
    +            the relevant changes to adequate the project to PEP 440 immediately.
    +            *************************************************************************
    +            \n\n!!
    +            """
    +            warnings.warn(msg, DeprecationWarning)
    +
    +            return self._parsed_version
    +
    +    @property
    +    def version(self):
    +        try:
    +            return self._version
    +        except AttributeError as e:
    +            version = self._get_version()
    +            if version is None:
    +                path = self._get_metadata_path_for_display(self.PKG_INFO)
    +                msg = ("Missing 'Version:' header and/or {} file at path: {}").format(
    +                    self.PKG_INFO, path
    +                )
    +                raise ValueError(msg, self) from e
    +
    +            return version
    +
    +    @property
    +    def _dep_map(self):
    +        """
    +        A map of extra to its list of (direct) requirements
    +        for this distribution, including the null extra.
    +        """
    +        try:
    +            return self.__dep_map
    +        except AttributeError:
    +            self.__dep_map = self._filter_extras(self._build_dep_map())
    +        return self.__dep_map
    +
    +    @staticmethod
    +    def _filter_extras(dm: dict[str | None, list[Requirement]]):
    +        """
    +        Given a mapping of extras to dependencies, strip off
    +        environment markers and filter out any dependencies
    +        not matching the markers.
    +        """
    +        for extra in list(filter(None, dm)):
    +            new_extra: str | None = extra
    +            reqs = dm.pop(extra)
    +            new_extra, _, marker = extra.partition(':')
    +            fails_marker = marker and (
    +                invalid_marker(marker) or not evaluate_marker(marker)
    +            )
    +            if fails_marker:
    +                reqs = []
    +            new_extra = safe_extra(new_extra) or None
    +
    +            dm.setdefault(new_extra, []).extend(reqs)
    +        return dm
    +
    +    def _build_dep_map(self):
    +        dm = {}
    +        for name in 'requires.txt', 'depends.txt':
    +            for extra, reqs in split_sections(self._get_metadata(name)):
    +                dm.setdefault(extra, []).extend(parse_requirements(reqs))
    +        return dm
    +
    +    def requires(self, extras: Iterable[str] = ()):
    +        """List of Requirements needed for this distro if `extras` are used"""
    +        dm = self._dep_map
    +        deps: list[Requirement] = []
    +        deps.extend(dm.get(None, ()))
    +        for ext in extras:
    +            try:
    +                deps.extend(dm[safe_extra(ext)])
    +            except KeyError as e:
    +                raise UnknownExtra(
    +                    "%s has no such extra feature %r" % (self, ext)
    +                ) from e
    +        return deps
    +
    +    def _get_metadata_path_for_display(self, name):
    +        """
    +        Return the path to the given metadata file, if available.
    +        """
    +        try:
    +            # We need to access _get_metadata_path() on the provider object
    +            # directly rather than through this class's __getattr__()
    +            # since _get_metadata_path() is marked private.
    +            path = self._provider._get_metadata_path(name)
    +
    +        # Handle exceptions e.g. in case the distribution's metadata
    +        # provider doesn't support _get_metadata_path().
    +        except Exception:
    +            return '[could not detect]'
    +
    +        return path
    +
    +    def _get_metadata(self, name):
    +        if self.has_metadata(name):
    +            yield from self.get_metadata_lines(name)
    +
    +    def _get_version(self):
    +        lines = self._get_metadata(self.PKG_INFO)
    +        return _version_from_file(lines)
    +
    +    def activate(self, path: list[str] | None = None, replace: bool = False):
    +        """Ensure distribution is importable on `path` (default=sys.path)"""
    +        if path is None:
    +            path = sys.path
    +        self.insert_on(path, replace=replace)
    +        if path is sys.path and self.location is not None:
    +            fixup_namespace_packages(self.location)
    +            for pkg in self._get_metadata('namespace_packages.txt'):
    +                if pkg in sys.modules:
    +                    declare_namespace(pkg)
    +
    +    def egg_name(self):
    +        """Return what this distribution's standard .egg filename should be"""
    +        filename = "%s-%s-py%s" % (
    +            to_filename(self.project_name),
    +            to_filename(self.version),
    +            self.py_version or PY_MAJOR,
    +        )
    +
    +        if self.platform:
    +            filename += '-' + self.platform
    +        return filename
    +
    +    def __repr__(self):
    +        if self.location:
    +            return "%s (%s)" % (self, self.location)
    +        else:
    +            return str(self)
    +
    +    def __str__(self):
    +        try:
    +            version = getattr(self, 'version', None)
    +        except ValueError:
    +            version = None
    +        version = version or "[unknown version]"
    +        return "%s %s" % (self.project_name, version)
    +
    +    def __getattr__(self, attr):
    +        """Delegate all unrecognized public attributes to .metadata provider"""
    +        if attr.startswith('_'):
    +            raise AttributeError(attr)
    +        return getattr(self._provider, attr)
    +
    +    def __dir__(self):
    +        return list(
    +            set(super().__dir__())
    +            | set(attr for attr in self._provider.__dir__() if not attr.startswith('_'))
    +        )
    +
    +    @classmethod
    +    def from_filename(
    +        cls,
    +        filename: StrPath,
    +        metadata: _MetadataType = None,
    +        **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
    +    ):
    +        return cls.from_location(
    +            _normalize_cached(filename), os.path.basename(filename), metadata, **kw
    +        )
    +
    +    def as_requirement(self):
    +        """Return a ``Requirement`` that matches this distribution exactly"""
    +        if isinstance(self.parsed_version, _packaging_version.Version):
    +            spec = "%s==%s" % (self.project_name, self.parsed_version)
    +        else:
    +            spec = "%s===%s" % (self.project_name, self.parsed_version)
    +
    +        return Requirement.parse(spec)
    +
    +    def load_entry_point(self, group: str, name: str) -> _ResolvedEntryPoint:
    +        """Return the `name` entry point of `group` or raise ImportError"""
    +        ep = self.get_entry_info(group, name)
    +        if ep is None:
    +            raise ImportError("Entry point %r not found" % ((group, name),))
    +        return ep.load()
    +
    +    @overload
    +    def get_entry_map(self, group: None = None) -> dict[str, dict[str, EntryPoint]]: ...
    +    @overload
    +    def get_entry_map(self, group: str) -> dict[str, EntryPoint]: ...
    +    def get_entry_map(self, group: str | None = None):
    +        """Return the entry point map for `group`, or the full entry map"""
    +        if not hasattr(self, "_ep_map"):
    +            self._ep_map = EntryPoint.parse_map(
    +                self._get_metadata('entry_points.txt'), self
    +            )
    +        if group is not None:
    +            return self._ep_map.get(group, {})
    +        return self._ep_map
    +
    +    def get_entry_info(self, group: str, name: str):
    +        """Return the EntryPoint object for `group`+`name`, or ``None``"""
    +        return self.get_entry_map(group).get(name)
    +
    +    # FIXME: 'Distribution.insert_on' is too complex (13)
    +    def insert_on(  # noqa: C901
    +        self,
    +        path: list[str],
    +        loc=None,
    +        replace: bool = False,
    +    ):
    +        """Ensure self.location is on path
    +
    +        If replace=False (default):
    +            - If location is already in path anywhere, do nothing.
    +            - Else:
    +              - If it's an egg and its parent directory is on path,
    +                insert just ahead of the parent.
    +              - Else: add to the end of path.
    +        If replace=True:
    +            - If location is already on path anywhere (not eggs)
    +              or higher priority than its parent (eggs)
    +              do nothing.
    +            - Else:
    +              - If it's an egg and its parent directory is on path,
    +                insert just ahead of the parent,
    +                removing any lower-priority entries.
    +              - Else: add it to the front of path.
    +        """
    +
    +        loc = loc or self.location
    +        if not loc:
    +            return
    +
    +        nloc = _normalize_cached(loc)
    +        bdir = os.path.dirname(nloc)
    +        npath = [(p and _normalize_cached(p) or p) for p in path]
    +
    +        for p, item in enumerate(npath):
    +            if item == nloc:
    +                if replace:
    +                    break
    +                else:
    +                    # don't modify path (even removing duplicates) if
    +                    # found and not replace
    +                    return
    +            elif item == bdir and self.precedence == EGG_DIST:
    +                # if it's an .egg, give it precedence over its directory
    +                # UNLESS it's already been added to sys.path and replace=False
    +                if (not replace) and nloc in npath[p:]:
    +                    return
    +                if path is sys.path:
    +                    self.check_version_conflict()
    +                path.insert(p, loc)
    +                npath.insert(p, nloc)
    +                break
    +        else:
    +            if path is sys.path:
    +                self.check_version_conflict()
    +            if replace:
    +                path.insert(0, loc)
    +            else:
    +                path.append(loc)
    +            return
    +
    +        # p is the spot where we found or inserted loc; now remove duplicates
    +        while True:
    +            try:
    +                np = npath.index(nloc, p + 1)
    +            except ValueError:
    +                break
    +            else:
    +                del npath[np], path[np]
    +                # ha!
    +                p = np
    +
    +        return
    +
    +    def check_version_conflict(self):
    +        if self.key == 'setuptools':
    +            # ignore the inevitable setuptools self-conflicts  :(
    +            return
    +
    +        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
    +        loc = normalize_path(self.location)
    +        for modname in self._get_metadata('top_level.txt'):
    +            if (
    +                modname not in sys.modules
    +                or modname in nsp
    +                or modname in _namespace_packages
    +            ):
    +                continue
    +            if modname in ('pkg_resources', 'setuptools', 'site'):
    +                continue
    +            fn = getattr(sys.modules[modname], '__file__', None)
    +            if fn and (
    +                normalize_path(fn).startswith(loc) or fn.startswith(self.location)
    +            ):
    +                continue
    +            issue_warning(
    +                "Module %s was already imported from %s, but %s is being added"
    +                " to sys.path" % (modname, fn, self.location),
    +            )
    +
    +    def has_version(self):
    +        try:
    +            self.version
    +        except ValueError:
    +            issue_warning("Unbuilt egg for " + repr(self))
    +            return False
    +        except SystemError:
    +            # TODO: remove this except clause when python/cpython#103632 is fixed.
    +            return False
    +        return True
    +
    +    def clone(self, **kw: str | int | IResourceProvider | None):
    +        """Copy this distribution, substituting in any changed keyword args"""
    +        names = 'project_name version py_version platform location precedence'
    +        for attr in names.split():
    +            kw.setdefault(attr, getattr(self, attr, None))
    +        kw.setdefault('metadata', self._provider)
    +        # Unsafely unpacking. But keeping **kw for backwards and subclassing compatibility
    +        return self.__class__(**kw)  # type:ignore[arg-type]
    +
    +    @property
    +    def extras(self):
    +        return [dep for dep in self._dep_map if dep]
    +
    +
    +class EggInfoDistribution(Distribution):
    +    def _reload_version(self):
    +        """
    +        Packages installed by distutils (e.g. numpy or scipy),
    +        which uses an old safe_version, and so
    +        their version numbers can get mangled when
    +        converted to filenames (e.g., 1.11.0.dev0+2329eae to
    +        1.11.0.dev0_2329eae). These distributions will not be
    +        parsed properly
    +        downstream by Distribution and safe_version, so
    +        take an extra step and try to get the version number from
    +        the metadata file itself instead of the filename.
    +        """
    +        md_version = self._get_version()
    +        if md_version:
    +            self._version = md_version
    +        return self
    +
    +
    +class DistInfoDistribution(Distribution):
    +    """
    +    Wrap an actual or potential sys.path entry
    +    w/metadata, .dist-info style.
    +    """
    +
    +    PKG_INFO = 'METADATA'
    +    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
    +
    +    @property
    +    def _parsed_pkg_info(self):
    +        """Parse and cache metadata"""
    +        try:
    +            return self._pkg_info
    +        except AttributeError:
    +            metadata = self.get_metadata(self.PKG_INFO)
    +            self._pkg_info = email.parser.Parser().parsestr(metadata)
    +            return self._pkg_info
    +
    +    @property
    +    def _dep_map(self):
    +        try:
    +            return self.__dep_map
    +        except AttributeError:
    +            self.__dep_map = self._compute_dependencies()
    +            return self.__dep_map
    +
    +    def _compute_dependencies(self) -> dict[str | None, list[Requirement]]:
    +        """Recompute this distribution's dependencies."""
    +        self.__dep_map: dict[str | None, list[Requirement]] = {None: []}
    +
    +        reqs: list[Requirement] = []
    +        # Including any condition expressions
    +        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
    +            reqs.extend(parse_requirements(req))
    +
    +        def reqs_for_extra(extra):
    +            for req in reqs:
    +                if not req.marker or req.marker.evaluate({'extra': extra}):
    +                    yield req
    +
    +        common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
    +        self.__dep_map[None].extend(common)
    +
    +        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
    +            s_extra = safe_extra(extra.strip())
    +            self.__dep_map[s_extra] = [
    +                r for r in reqs_for_extra(extra) if r not in common
    +            ]
    +
    +        return self.__dep_map
    +
    +
    +_distributionImpl = {
    +    '.egg': Distribution,
    +    '.egg-info': EggInfoDistribution,
    +    '.dist-info': DistInfoDistribution,
    +}
    +
    +
    +def issue_warning(*args, **kw):
    +    level = 1
    +    g = globals()
    +    try:
    +        # find the first stack frame that is *not* code in
    +        # the pkg_resources module, to use for the warning
    +        while sys._getframe(level).f_globals is g:
    +            level += 1
    +    except ValueError:
    +        pass
    +    warnings.warn(stacklevel=level + 1, *args, **kw)
    +
    +
    +def parse_requirements(strs: _NestedStr):
    +    """
    +    Yield ``Requirement`` objects for each specification in `strs`.
    +
    +    `strs` must be a string, or a (possibly-nested) iterable thereof.
    +    """
    +    return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
    +
    +
    +class RequirementParseError(_packaging_requirements.InvalidRequirement):
    +    "Compatibility wrapper for InvalidRequirement"
    +
    +
    +class Requirement(_packaging_requirements.Requirement):
    +    def __init__(self, requirement_string: str):
    +        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
    +        super().__init__(requirement_string)
    +        self.unsafe_name = self.name
    +        project_name = safe_name(self.name)
    +        self.project_name, self.key = project_name, project_name.lower()
    +        self.specs = [(spec.operator, spec.version) for spec in self.specifier]
    +        # packaging.requirements.Requirement uses a set for its extras. We use a variable-length tuple
    +        self.extras: tuple[str] = tuple(map(safe_extra, self.extras))
    +        self.hashCmp = (
    +            self.key,
    +            self.url,
    +            self.specifier,
    +            frozenset(self.extras),
    +            str(self.marker) if self.marker else None,
    +        )
    +        self.__hash = hash(self.hashCmp)
    +
    +    def __eq__(self, other: object):
    +        return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
    +
    +    def __ne__(self, other):
    +        return not self == other
    +
    +    def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool:
    +        if isinstance(item, Distribution):
    +            if item.key != self.key:
    +                return False
    +
    +            item = item.version
    +
    +        # Allow prereleases always in order to match the previous behavior of
    +        # this method. In the future this should be smarter and follow PEP 440
    +        # more accurately.
    +        return self.specifier.contains(item, prereleases=True)
    +
    +    def __hash__(self):
    +        return self.__hash
    +
    +    def __repr__(self):
    +        return "Requirement.parse(%r)" % str(self)
    +
    +    @staticmethod
    +    def parse(s: str | Iterable[str]):
    +        (req,) = parse_requirements(s)
    +        return req
    +
    +
    +def _always_object(classes):
    +    """
    +    Ensure object appears in the mro even
    +    for old-style classes.
    +    """
    +    if object not in classes:
    +        return classes + (object,)
    +    return classes
    +
    +
    +def _find_adapter(registry: Mapping[type, _AdapterT], ob: object) -> _AdapterT:
    +    """Return an adapter factory for `ob` from `registry`"""
    +    types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
    +    for t in types:
    +        if t in registry:
    +            return registry[t]
    +    # _find_adapter would previously return None, and immediately be called.
    +    # So we're raising a TypeError to keep backward compatibility if anyone depended on that behaviour.
    +    raise TypeError(f"Could not find adapter for {registry} and {ob}")
    +
    +
    +def ensure_directory(path: StrOrBytesPath):
    +    """Ensure that the parent directory of `path` exists"""
    +    dirname = os.path.dirname(path)
    +    os.makedirs(dirname, exist_ok=True)
    +
    +
    +def _bypass_ensure_directory(path):
    +    """Sandbox-bypassing version of ensure_directory()"""
    +    if not WRITE_SUPPORT:
    +        raise OSError('"os.mkdir" not supported on this platform.')
    +    dirname, filename = split(path)
    +    if dirname and filename and not isdir(dirname):
    +        _bypass_ensure_directory(dirname)
    +        try:
    +            mkdir(dirname, 0o755)
    +        except FileExistsError:
    +            pass
    +
    +
    +def split_sections(s: _NestedStr) -> Iterator[tuple[str | None, list[str]]]:
    +    """Split a string or iterable thereof into (section, content) pairs
    +
    +    Each ``section`` is a stripped version of the section header ("[section]")
    +    and each ``content`` is a list of stripped lines excluding blank lines and
    +    comment-only lines.  If there are any such lines before the first section
    +    header, they're returned in a first ``section`` of ``None``.
    +    """
    +    section = None
    +    content = []
    +    for line in yield_lines(s):
    +        if line.startswith("["):
    +            if line.endswith("]"):
    +                if section or content:
    +                    yield section, content
    +                section = line[1:-1].strip()
    +                content = []
    +            else:
    +                raise ValueError("Invalid section heading", line)
    +        else:
    +            content.append(line)
    +
    +    # wrap up last segment
    +    yield section, content
    +
    +
    +def _mkstemp(*args, **kw):
    +    old_open = os.open
    +    try:
    +        # temporarily bypass sandboxing
    +        os.open = os_open
    +        return tempfile.mkstemp(*args, **kw)
    +    finally:
    +        # and then put it back
    +        os.open = old_open
    +
    +
    +# Silence the PEP440Warning by default, so that end users don't get hit by it
    +# randomly just because they use pkg_resources. We want to append the rule
    +# because we want earlier uses of filterwarnings to take precedence over this
    +# one.
    +warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
    +
    +
    +class PkgResourcesDeprecationWarning(Warning):
    +    """
    +    Base class for warning about deprecations in ``pkg_resources``
    +
    +    This class is not derived from ``DeprecationWarning``, and as such is
    +    visible by default.
    +    """
    +
    +
    +# Ported from ``setuptools`` to avoid introducing an import inter-dependency:
    +_LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
    +
    +
    +def _read_utf8_with_fallback(file: str, fallback_encoding=_LOCALE_ENCODING) -> str:
    +    """See setuptools.unicode_utils._read_utf8_with_fallback"""
    +    try:
    +        with open(file, "r", encoding="utf-8") as f:
    +            return f.read()
    +    except UnicodeDecodeError:  # pragma: no cover
    +        msg = f"""\
    +        ********************************************************************************
    +        `encoding="utf-8"` fails with {file!r}, trying `encoding={fallback_encoding!r}`.
    +
    +        This fallback behaviour is considered **deprecated** and future versions of
    +        `setuptools/pkg_resources` may not implement it.
    +
    +        Please encode {file!r} with "utf-8" to ensure future builds will succeed.
    +
    +        If this file was produced by `setuptools` itself, cleaning up the cached files
    +        and re-building/re-installing the package with a newer version of `setuptools`
    +        (e.g. by updating `build-system.requires` in its `pyproject.toml`)
    +        might solve the problem.
    +        ********************************************************************************
    +        """
    +        # TODO: Add a deadline?
    +        #       See comment in setuptools.unicode_utils._Utf8EncodingNeeded
    +        warnings.warn(msg, PkgResourcesDeprecationWarning, stacklevel=2)
    +        with open(file, "r", encoding=fallback_encoding) as f:
    +            return f.read()
    +
    +
    +# from jaraco.functools 1.3
    +def _call_aside(f, *args, **kwargs):
    +    f(*args, **kwargs)
    +    return f
    +
    +
    +@_call_aside
    +def _initialize(g=globals()):
    +    "Set up global resource manager (deliberately not state-saved)"
    +    manager = ResourceManager()
    +    g['_manager'] = manager
    +    g.update(
    +        (name, getattr(manager, name))
    +        for name in dir(manager)
    +        if not name.startswith('_')
    +    )
    +
    +
    +@_call_aside
    +def _initialize_master_working_set():
    +    """
    +    Prepare the master working set and make the ``require()``
    +    API available.
    +
    +    This function has explicit effects on the global state
    +    of pkg_resources. It is intended to be invoked once at
    +    the initialization of this module.
    +
    +    Invocation by other packages is unsupported and done
    +    at their own risk.
    +    """
    +    working_set = _declare_state('object', 'working_set', WorkingSet._build_master())
    +
    +    require = working_set.require
    +    iter_entry_points = working_set.iter_entry_points
    +    add_activation_listener = working_set.subscribe
    +    run_script = working_set.run_script
    +    # backward compatibility
    +    run_main = run_script
    +    # Activate all distributions already on sys.path with replace=False and
    +    # ensure that all distributions added to the working set in the future
    +    # (e.g. by calling ``require()``) will get activated as well,
    +    # with higher priority (replace=True).
    +    tuple(dist.activate(replace=False) for dist in working_set)
    +    add_activation_listener(
    +        lambda dist: dist.activate(replace=True),
    +        existing=False,
    +    )
    +    working_set.entries = []
    +    # match order
    +    list(map(working_set.add_entry, sys.path))
    +    globals().update(locals())
    +
    +
    +if TYPE_CHECKING:
    +    # All of these are set by the @_call_aside methods above
    +    __resource_manager = ResourceManager()  # Won't exist at runtime
    +    resource_exists = __resource_manager.resource_exists
    +    resource_isdir = __resource_manager.resource_isdir
    +    resource_filename = __resource_manager.resource_filename
    +    resource_stream = __resource_manager.resource_stream
    +    resource_string = __resource_manager.resource_string
    +    resource_listdir = __resource_manager.resource_listdir
    +    set_extraction_path = __resource_manager.set_extraction_path
    +    cleanup_resources = __resource_manager.cleanup_resources
    +
    +    working_set = WorkingSet()
    +    require = working_set.require
    +    iter_entry_points = working_set.iter_entry_points
    +    add_activation_listener = working_set.subscribe
    +    run_script = working_set.run_script
    +    run_main = run_script
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc
    new file mode 100644
    index 0000000..6cb9630
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/LICENSE
    new file mode 100644
    index 0000000..f35fed9
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/LICENSE
    @@ -0,0 +1,21 @@
    +MIT License
    +
    +Copyright (c) 2010-202x The platformdirs developers
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py
    new file mode 100644
    index 0000000..2325ec2
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__init__.py
    @@ -0,0 +1,631 @@
    +"""
    +Utilities for determining application-specific dirs.
    +
    +See  for details and usage.
    +
    +"""
    +
    +from __future__ import annotations
    +
    +import os
    +import sys
    +from typing import TYPE_CHECKING
    +
    +from .api import PlatformDirsABC
    +from .version import __version__
    +from .version import __version_tuple__ as __version_info__
    +
    +if TYPE_CHECKING:
    +    from pathlib import Path
    +    from typing import Literal
    +
    +if sys.platform == "win32":
    +    from pip._vendor.platformdirs.windows import Windows as _Result
    +elif sys.platform == "darwin":
    +    from pip._vendor.platformdirs.macos import MacOS as _Result
    +else:
    +    from pip._vendor.platformdirs.unix import Unix as _Result
    +
    +
    +def _set_platform_dir_class() -> type[PlatformDirsABC]:
    +    if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
    +        if os.getenv("SHELL") or os.getenv("PREFIX"):
    +            return _Result
    +
    +        from pip._vendor.platformdirs.android import _android_folder  # noqa: PLC0415
    +
    +        if _android_folder() is not None:
    +            from pip._vendor.platformdirs.android import Android  # noqa: PLC0415
    +
    +            return Android  # return to avoid redefinition of a result
    +
    +    return _Result
    +
    +
    +if TYPE_CHECKING:
    +    # Work around mypy issue: https://github.com/python/mypy/issues/10962
    +    PlatformDirs = _Result
    +else:
    +    PlatformDirs = _set_platform_dir_class()  #: Currently active platform
    +AppDirs = PlatformDirs  #: Backwards compatibility with appdirs
    +
    +
    +def user_data_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    roaming: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param roaming: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: data directory tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        roaming=roaming,
    +        ensure_exists=ensure_exists,
    +    ).user_data_dir
    +
    +
    +def site_data_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    multipath: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param multipath: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: data directory shared by users
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        multipath=multipath,
    +        ensure_exists=ensure_exists,
    +    ).site_data_dir
    +
    +
    +def user_config_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    roaming: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param roaming: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: config directory tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        roaming=roaming,
    +        ensure_exists=ensure_exists,
    +    ).user_config_dir
    +
    +
    +def site_config_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    multipath: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param multipath: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: config directory shared by the users
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        multipath=multipath,
    +        ensure_exists=ensure_exists,
    +    ).site_config_dir
    +
    +
    +def user_cache_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: cache directory tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).user_cache_dir
    +
    +
    +def site_cache_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `opinion `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: cache directory tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).site_cache_dir
    +
    +
    +def user_state_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    roaming: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param roaming: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: state directory tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        roaming=roaming,
    +        ensure_exists=ensure_exists,
    +    ).user_state_dir
    +
    +
    +def user_log_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: log directory tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).user_log_dir
    +
    +
    +def user_documents_dir() -> str:
    +    """:returns: documents directory tied to the user"""
    +    return PlatformDirs().user_documents_dir
    +
    +
    +def user_downloads_dir() -> str:
    +    """:returns: downloads directory tied to the user"""
    +    return PlatformDirs().user_downloads_dir
    +
    +
    +def user_pictures_dir() -> str:
    +    """:returns: pictures directory tied to the user"""
    +    return PlatformDirs().user_pictures_dir
    +
    +
    +def user_videos_dir() -> str:
    +    """:returns: videos directory tied to the user"""
    +    return PlatformDirs().user_videos_dir
    +
    +
    +def user_music_dir() -> str:
    +    """:returns: music directory tied to the user"""
    +    return PlatformDirs().user_music_dir
    +
    +
    +def user_desktop_dir() -> str:
    +    """:returns: desktop directory tied to the user"""
    +    return PlatformDirs().user_desktop_dir
    +
    +
    +def user_runtime_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `opinion `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: runtime directory tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).user_runtime_dir
    +
    +
    +def site_runtime_dir(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> str:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `opinion `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: runtime directory shared by users
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).site_runtime_dir
    +
    +
    +def user_data_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    roaming: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param roaming: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: data path tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        roaming=roaming,
    +        ensure_exists=ensure_exists,
    +    ).user_data_path
    +
    +
    +def site_data_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    multipath: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param multipath: See `multipath `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: data path shared by users
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        multipath=multipath,
    +        ensure_exists=ensure_exists,
    +    ).site_data_path
    +
    +
    +def user_config_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    roaming: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param roaming: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: config path tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        roaming=roaming,
    +        ensure_exists=ensure_exists,
    +    ).user_config_path
    +
    +
    +def site_config_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    multipath: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param multipath: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: config path shared by the users
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        multipath=multipath,
    +        ensure_exists=ensure_exists,
    +    ).site_config_path
    +
    +
    +def site_cache_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `opinion `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: cache directory tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).site_cache_path
    +
    +
    +def user_cache_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: cache path tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).user_cache_path
    +
    +
    +def user_state_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    roaming: bool = False,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param roaming: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: state path tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        roaming=roaming,
    +        ensure_exists=ensure_exists,
    +    ).user_state_path
    +
    +
    +def user_log_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `roaming `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: log path tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).user_log_path
    +
    +
    +def user_documents_path() -> Path:
    +    """:returns: documents a path tied to the user"""
    +    return PlatformDirs().user_documents_path
    +
    +
    +def user_downloads_path() -> Path:
    +    """:returns: downloads path tied to the user"""
    +    return PlatformDirs().user_downloads_path
    +
    +
    +def user_pictures_path() -> Path:
    +    """:returns: pictures path tied to the user"""
    +    return PlatformDirs().user_pictures_path
    +
    +
    +def user_videos_path() -> Path:
    +    """:returns: videos path tied to the user"""
    +    return PlatformDirs().user_videos_path
    +
    +
    +def user_music_path() -> Path:
    +    """:returns: music path tied to the user"""
    +    return PlatformDirs().user_music_path
    +
    +
    +def user_desktop_path() -> Path:
    +    """:returns: desktop path tied to the user"""
    +    return PlatformDirs().user_desktop_path
    +
    +
    +def user_runtime_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `opinion `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: runtime path tied to the user
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).user_runtime_path
    +
    +
    +def site_runtime_path(
    +    appname: str | None = None,
    +    appauthor: str | Literal[False] | None = None,
    +    version: str | None = None,
    +    opinion: bool = True,  # noqa: FBT001, FBT002
    +    ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +) -> Path:
    +    """
    +    :param appname: See `appname `.
    +    :param appauthor: See `appauthor `.
    +    :param version: See `version `.
    +    :param opinion: See `opinion `.
    +    :param ensure_exists: See `ensure_exists `.
    +    :returns: runtime path shared by users
    +    """
    +    return PlatformDirs(
    +        appname=appname,
    +        appauthor=appauthor,
    +        version=version,
    +        opinion=opinion,
    +        ensure_exists=ensure_exists,
    +    ).site_runtime_path
    +
    +
    +__all__ = [
    +    "AppDirs",
    +    "PlatformDirs",
    +    "PlatformDirsABC",
    +    "__version__",
    +    "__version_info__",
    +    "site_cache_dir",
    +    "site_cache_path",
    +    "site_config_dir",
    +    "site_config_path",
    +    "site_data_dir",
    +    "site_data_path",
    +    "site_runtime_dir",
    +    "site_runtime_path",
    +    "user_cache_dir",
    +    "user_cache_path",
    +    "user_config_dir",
    +    "user_config_path",
    +    "user_data_dir",
    +    "user_data_path",
    +    "user_desktop_dir",
    +    "user_desktop_path",
    +    "user_documents_dir",
    +    "user_documents_path",
    +    "user_downloads_dir",
    +    "user_downloads_path",
    +    "user_log_dir",
    +    "user_log_path",
    +    "user_music_dir",
    +    "user_music_path",
    +    "user_pictures_dir",
    +    "user_pictures_path",
    +    "user_runtime_dir",
    +    "user_runtime_path",
    +    "user_state_dir",
    +    "user_state_path",
    +    "user_videos_dir",
    +    "user_videos_path",
    +]
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py
    new file mode 100644
    index 0000000..fa8a677
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__main__.py
    @@ -0,0 +1,55 @@
    +"""Main entry point."""
    +
    +from __future__ import annotations
    +
    +from pip._vendor.platformdirs import PlatformDirs, __version__
    +
    +PROPS = (
    +    "user_data_dir",
    +    "user_config_dir",
    +    "user_cache_dir",
    +    "user_state_dir",
    +    "user_log_dir",
    +    "user_documents_dir",
    +    "user_downloads_dir",
    +    "user_pictures_dir",
    +    "user_videos_dir",
    +    "user_music_dir",
    +    "user_runtime_dir",
    +    "site_data_dir",
    +    "site_config_dir",
    +    "site_cache_dir",
    +    "site_runtime_dir",
    +)
    +
    +
    +def main() -> None:
    +    """Run the main entry point."""
    +    app_name = "MyApp"
    +    app_author = "MyCompany"
    +
    +    print(f"-- platformdirs {__version__} --")  # noqa: T201
    +
    +    print("-- app dirs (with optional 'version')")  # noqa: T201
    +    dirs = PlatformDirs(app_name, app_author, version="1.0")
    +    for prop in PROPS:
    +        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
    +
    +    print("\n-- app dirs (without optional 'version')")  # noqa: T201
    +    dirs = PlatformDirs(app_name, app_author)
    +    for prop in PROPS:
    +        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
    +
    +    print("\n-- app dirs (without optional 'appauthor')")  # noqa: T201
    +    dirs = PlatformDirs(app_name)
    +    for prop in PROPS:
    +        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
    +
    +    print("\n-- app dirs (with disabled 'appauthor')")  # noqa: T201
    +    dirs = PlatformDirs(app_name, appauthor=False)
    +    for prop in PROPS:
    +        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
    +
    +
    +if __name__ == "__main__":
    +    main()
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc
    new file mode 100644
    index 0000000..31834f2
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc
    new file mode 100644
    index 0000000..1715c84
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc
    new file mode 100644
    index 0000000..b45f39e
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc
    new file mode 100644
    index 0000000..fa380f0
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc
    new file mode 100644
    index 0000000..19b7461
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc
    new file mode 100644
    index 0000000..d72d369
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc
    new file mode 100644
    index 0000000..0a95f41
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc
    new file mode 100644
    index 0000000..6d4ef12
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py
    new file mode 100644
    index 0000000..92efc85
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/android.py
    @@ -0,0 +1,249 @@
    +"""Android."""
    +
    +from __future__ import annotations
    +
    +import os
    +import re
    +import sys
    +from functools import lru_cache
    +from typing import TYPE_CHECKING, cast
    +
    +from .api import PlatformDirsABC
    +
    +
    +class Android(PlatformDirsABC):
    +    """
    +    Follows the guidance `from here `_.
    +
    +    Makes use of the `appname `, `version
    +    `, `ensure_exists `.
    +
    +    """
    +
    +    @property
    +    def user_data_dir(self) -> str:
    +        """:return: data directory tied to the user, e.g. ``/data/user///files/``"""
    +        return self._append_app_name_and_version(cast("str", _android_folder()), "files")
    +
    +    @property
    +    def site_data_dir(self) -> str:
    +        """:return: data directory shared by users, same as `user_data_dir`"""
    +        return self.user_data_dir
    +
    +    @property
    +    def user_config_dir(self) -> str:
    +        """
    +        :return: config directory tied to the user, e.g. \
    +        ``/data/user///shared_prefs/``
    +        """
    +        return self._append_app_name_and_version(cast("str", _android_folder()), "shared_prefs")
    +
    +    @property
    +    def site_config_dir(self) -> str:
    +        """:return: config directory shared by the users, same as `user_config_dir`"""
    +        return self.user_config_dir
    +
    +    @property
    +    def user_cache_dir(self) -> str:
    +        """:return: cache directory tied to the user, e.g.,``/data/user///cache/``"""
    +        return self._append_app_name_and_version(cast("str", _android_folder()), "cache")
    +
    +    @property
    +    def site_cache_dir(self) -> str:
    +        """:return: cache directory shared by users, same as `user_cache_dir`"""
    +        return self.user_cache_dir
    +
    +    @property
    +    def user_state_dir(self) -> str:
    +        """:return: state directory tied to the user, same as `user_data_dir`"""
    +        return self.user_data_dir
    +
    +    @property
    +    def user_log_dir(self) -> str:
    +        """
    +        :return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
    +          e.g. ``/data/user///cache//log``
    +        """
    +        path = self.user_cache_dir
    +        if self.opinion:
    +            path = os.path.join(path, "log")  # noqa: PTH118
    +        return path
    +
    +    @property
    +    def user_documents_dir(self) -> str:
    +        """:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``"""
    +        return _android_documents_folder()
    +
    +    @property
    +    def user_downloads_dir(self) -> str:
    +        """:return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``"""
    +        return _android_downloads_folder()
    +
    +    @property
    +    def user_pictures_dir(self) -> str:
    +        """:return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``"""
    +        return _android_pictures_folder()
    +
    +    @property
    +    def user_videos_dir(self) -> str:
    +        """:return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``"""
    +        return _android_videos_folder()
    +
    +    @property
    +    def user_music_dir(self) -> str:
    +        """:return: music directory tied to the user e.g. ``/storage/emulated/0/Music``"""
    +        return _android_music_folder()
    +
    +    @property
    +    def user_desktop_dir(self) -> str:
    +        """:return: desktop directory tied to the user e.g. ``/storage/emulated/0/Desktop``"""
    +        return "/storage/emulated/0/Desktop"
    +
    +    @property
    +    def user_runtime_dir(self) -> str:
    +        """
    +        :return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
    +          e.g. ``/data/user///cache//tmp``
    +        """
    +        path = self.user_cache_dir
    +        if self.opinion:
    +            path = os.path.join(path, "tmp")  # noqa: PTH118
    +        return path
    +
    +    @property
    +    def site_runtime_dir(self) -> str:
    +        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
    +        return self.user_runtime_dir
    +
    +
    +@lru_cache(maxsize=1)
    +def _android_folder() -> str | None:  # noqa: C901
    +    """:return: base folder for the Android OS or None if it cannot be found"""
    +    result: str | None = None
    +    # type checker isn't happy with our "import android", just don't do this when type checking see
    +    # https://stackoverflow.com/a/61394121
    +    if not TYPE_CHECKING:
    +        try:
    +            # First try to get a path to android app using python4android (if available)...
    +            from android import mActivity  # noqa: PLC0415
    +
    +            context = cast("android.content.Context", mActivity.getApplicationContext())  # noqa: F821
    +            result = context.getFilesDir().getParentFile().getAbsolutePath()
    +        except Exception:  # noqa: BLE001
    +            result = None
    +    if result is None:
    +        try:
    +            # ...and fall back to using plain pyjnius, if python4android isn't available or doesn't deliver any useful
    +            # result...
    +            from jnius import autoclass  # noqa: PLC0415
    +
    +            context = autoclass("android.content.Context")
    +            result = context.getFilesDir().getParentFile().getAbsolutePath()
    +        except Exception:  # noqa: BLE001
    +            result = None
    +    if result is None:
    +        # and if that fails, too, find an android folder looking at path on the sys.path
    +        # warning: only works for apps installed under /data, not adopted storage etc.
    +        pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
    +        for path in sys.path:
    +            if pattern.match(path):
    +                result = path.split("/files")[0]
    +                break
    +        else:
    +            result = None
    +    if result is None:
    +        # one last try: find an android folder looking at path on the sys.path taking adopted storage paths into
    +        # account
    +        pattern = re.compile(r"/mnt/expand/[a-fA-F0-9-]{36}/(data|user/\d+)/(.+)/files")
    +        for path in sys.path:
    +            if pattern.match(path):
    +                result = path.split("/files")[0]
    +                break
    +        else:
    +            result = None
    +    return result
    +
    +
    +@lru_cache(maxsize=1)
    +def _android_documents_folder() -> str:
    +    """:return: documents folder for the Android OS"""
    +    # Get directories with pyjnius
    +    try:
    +        from jnius import autoclass  # noqa: PLC0415
    +
    +        context = autoclass("android.content.Context")
    +        environment = autoclass("android.os.Environment")
    +        documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
    +    except Exception:  # noqa: BLE001
    +        documents_dir = "/storage/emulated/0/Documents"
    +
    +    return documents_dir
    +
    +
    +@lru_cache(maxsize=1)
    +def _android_downloads_folder() -> str:
    +    """:return: downloads folder for the Android OS"""
    +    # Get directories with pyjnius
    +    try:
    +        from jnius import autoclass  # noqa: PLC0415
    +
    +        context = autoclass("android.content.Context")
    +        environment = autoclass("android.os.Environment")
    +        downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
    +    except Exception:  # noqa: BLE001
    +        downloads_dir = "/storage/emulated/0/Downloads"
    +
    +    return downloads_dir
    +
    +
    +@lru_cache(maxsize=1)
    +def _android_pictures_folder() -> str:
    +    """:return: pictures folder for the Android OS"""
    +    # Get directories with pyjnius
    +    try:
    +        from jnius import autoclass  # noqa: PLC0415
    +
    +        context = autoclass("android.content.Context")
    +        environment = autoclass("android.os.Environment")
    +        pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath()
    +    except Exception:  # noqa: BLE001
    +        pictures_dir = "/storage/emulated/0/Pictures"
    +
    +    return pictures_dir
    +
    +
    +@lru_cache(maxsize=1)
    +def _android_videos_folder() -> str:
    +    """:return: videos folder for the Android OS"""
    +    # Get directories with pyjnius
    +    try:
    +        from jnius import autoclass  # noqa: PLC0415
    +
    +        context = autoclass("android.content.Context")
    +        environment = autoclass("android.os.Environment")
    +        videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath()
    +    except Exception:  # noqa: BLE001
    +        videos_dir = "/storage/emulated/0/DCIM/Camera"
    +
    +    return videos_dir
    +
    +
    +@lru_cache(maxsize=1)
    +def _android_music_folder() -> str:
    +    """:return: music folder for the Android OS"""
    +    # Get directories with pyjnius
    +    try:
    +        from jnius import autoclass  # noqa: PLC0415
    +
    +        context = autoclass("android.content.Context")
    +        environment = autoclass("android.os.Environment")
    +        music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath()
    +    except Exception:  # noqa: BLE001
    +        music_dir = "/storage/emulated/0/Music"
    +
    +    return music_dir
    +
    +
    +__all__ = [
    +    "Android",
    +]
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py
    new file mode 100644
    index 0000000..251600e
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/api.py
    @@ -0,0 +1,299 @@
    +"""Base API."""
    +
    +from __future__ import annotations
    +
    +import os
    +from abc import ABC, abstractmethod
    +from pathlib import Path
    +from typing import TYPE_CHECKING
    +
    +if TYPE_CHECKING:
    +    from collections.abc import Iterator
    +    from typing import Literal
    +
    +
    +class PlatformDirsABC(ABC):  # noqa: PLR0904
    +    """Abstract base class for platform directories."""
    +
    +    def __init__(  # noqa: PLR0913, PLR0917
    +        self,
    +        appname: str | None = None,
    +        appauthor: str | Literal[False] | None = None,
    +        version: str | None = None,
    +        roaming: bool = False,  # noqa: FBT001, FBT002
    +        multipath: bool = False,  # noqa: FBT001, FBT002
    +        opinion: bool = True,  # noqa: FBT001, FBT002
    +        ensure_exists: bool = False,  # noqa: FBT001, FBT002
    +    ) -> None:
    +        """
    +        Create a new platform directory.
    +
    +        :param appname: See `appname`.
    +        :param appauthor: See `appauthor`.
    +        :param version: See `version`.
    +        :param roaming: See `roaming`.
    +        :param multipath: See `multipath`.
    +        :param opinion: See `opinion`.
    +        :param ensure_exists: See `ensure_exists`.
    +
    +        """
    +        self.appname = appname  #: The name of application.
    +        self.appauthor = appauthor
    +        """
    +        The name of the app author or distributing body for this application.
    +
    +        Typically, it is the owning company name. Defaults to `appname`. You may pass ``False`` to disable it.
    +
    +        """
    +        self.version = version
    +        """
    +        An optional version path element to append to the path.
    +
    +        You might want to use this if you want multiple versions of your app to be able to run independently. If used,
    +        this would typically be ``.``.
    +
    +        """
    +        self.roaming = roaming
    +        """
    +        Whether to use the roaming appdata directory on Windows.
    +
    +        That means that for users on a Windows network setup for roaming profiles, this user data will be synced on
    +        login (see
    +        `here `_).
    +
    +        """
    +        self.multipath = multipath
    +        """
    +        An optional parameter which indicates that the entire list of data dirs should be returned.
    +
    +        By default, the first item would only be returned.
    +
    +        """
    +        self.opinion = opinion  #: A flag to indicating to use opinionated values.
    +        self.ensure_exists = ensure_exists
    +        """
    +        Optionally create the directory (and any missing parents) upon access if it does not exist.
    +
    +        By default, no directories are created.
    +
    +        """
    +
    +    def _append_app_name_and_version(self, *base: str) -> str:
    +        params = list(base[1:])
    +        if self.appname:
    +            params.append(self.appname)
    +            if self.version:
    +                params.append(self.version)
    +        path = os.path.join(base[0], *params)  # noqa: PTH118
    +        self._optionally_create_directory(path)
    +        return path
    +
    +    def _optionally_create_directory(self, path: str) -> None:
    +        if self.ensure_exists:
    +            Path(path).mkdir(parents=True, exist_ok=True)
    +
    +    def _first_item_as_path_if_multipath(self, directory: str) -> Path:
    +        if self.multipath:
    +            # If multipath is True, the first path is returned.
    +            directory = directory.partition(os.pathsep)[0]
    +        return Path(directory)
    +
    +    @property
    +    @abstractmethod
    +    def user_data_dir(self) -> str:
    +        """:return: data directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def site_data_dir(self) -> str:
    +        """:return: data directory shared by users"""
    +
    +    @property
    +    @abstractmethod
    +    def user_config_dir(self) -> str:
    +        """:return: config directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def site_config_dir(self) -> str:
    +        """:return: config directory shared by the users"""
    +
    +    @property
    +    @abstractmethod
    +    def user_cache_dir(self) -> str:
    +        """:return: cache directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def site_cache_dir(self) -> str:
    +        """:return: cache directory shared by users"""
    +
    +    @property
    +    @abstractmethod
    +    def user_state_dir(self) -> str:
    +        """:return: state directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def user_log_dir(self) -> str:
    +        """:return: log directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def user_documents_dir(self) -> str:
    +        """:return: documents directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def user_downloads_dir(self) -> str:
    +        """:return: downloads directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def user_pictures_dir(self) -> str:
    +        """:return: pictures directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def user_videos_dir(self) -> str:
    +        """:return: videos directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def user_music_dir(self) -> str:
    +        """:return: music directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def user_desktop_dir(self) -> str:
    +        """:return: desktop directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def user_runtime_dir(self) -> str:
    +        """:return: runtime directory tied to the user"""
    +
    +    @property
    +    @abstractmethod
    +    def site_runtime_dir(self) -> str:
    +        """:return: runtime directory shared by users"""
    +
    +    @property
    +    def user_data_path(self) -> Path:
    +        """:return: data path tied to the user"""
    +        return Path(self.user_data_dir)
    +
    +    @property
    +    def site_data_path(self) -> Path:
    +        """:return: data path shared by users"""
    +        return Path(self.site_data_dir)
    +
    +    @property
    +    def user_config_path(self) -> Path:
    +        """:return: config path tied to the user"""
    +        return Path(self.user_config_dir)
    +
    +    @property
    +    def site_config_path(self) -> Path:
    +        """:return: config path shared by the users"""
    +        return Path(self.site_config_dir)
    +
    +    @property
    +    def user_cache_path(self) -> Path:
    +        """:return: cache path tied to the user"""
    +        return Path(self.user_cache_dir)
    +
    +    @property
    +    def site_cache_path(self) -> Path:
    +        """:return: cache path shared by users"""
    +        return Path(self.site_cache_dir)
    +
    +    @property
    +    def user_state_path(self) -> Path:
    +        """:return: state path tied to the user"""
    +        return Path(self.user_state_dir)
    +
    +    @property
    +    def user_log_path(self) -> Path:
    +        """:return: log path tied to the user"""
    +        return Path(self.user_log_dir)
    +
    +    @property
    +    def user_documents_path(self) -> Path:
    +        """:return: documents a path tied to the user"""
    +        return Path(self.user_documents_dir)
    +
    +    @property
    +    def user_downloads_path(self) -> Path:
    +        """:return: downloads path tied to the user"""
    +        return Path(self.user_downloads_dir)
    +
    +    @property
    +    def user_pictures_path(self) -> Path:
    +        """:return: pictures path tied to the user"""
    +        return Path(self.user_pictures_dir)
    +
    +    @property
    +    def user_videos_path(self) -> Path:
    +        """:return: videos path tied to the user"""
    +        return Path(self.user_videos_dir)
    +
    +    @property
    +    def user_music_path(self) -> Path:
    +        """:return: music path tied to the user"""
    +        return Path(self.user_music_dir)
    +
    +    @property
    +    def user_desktop_path(self) -> Path:
    +        """:return: desktop path tied to the user"""
    +        return Path(self.user_desktop_dir)
    +
    +    @property
    +    def user_runtime_path(self) -> Path:
    +        """:return: runtime path tied to the user"""
    +        return Path(self.user_runtime_dir)
    +
    +    @property
    +    def site_runtime_path(self) -> Path:
    +        """:return: runtime path shared by users"""
    +        return Path(self.site_runtime_dir)
    +
    +    def iter_config_dirs(self) -> Iterator[str]:
    +        """:yield: all user and site configuration directories."""
    +        yield self.user_config_dir
    +        yield self.site_config_dir
    +
    +    def iter_data_dirs(self) -> Iterator[str]:
    +        """:yield: all user and site data directories."""
    +        yield self.user_data_dir
    +        yield self.site_data_dir
    +
    +    def iter_cache_dirs(self) -> Iterator[str]:
    +        """:yield: all user and site cache directories."""
    +        yield self.user_cache_dir
    +        yield self.site_cache_dir
    +
    +    def iter_runtime_dirs(self) -> Iterator[str]:
    +        """:yield: all user and site runtime directories."""
    +        yield self.user_runtime_dir
    +        yield self.site_runtime_dir
    +
    +    def iter_config_paths(self) -> Iterator[Path]:
    +        """:yield: all user and site configuration paths."""
    +        for path in self.iter_config_dirs():
    +            yield Path(path)
    +
    +    def iter_data_paths(self) -> Iterator[Path]:
    +        """:yield: all user and site data paths."""
    +        for path in self.iter_data_dirs():
    +            yield Path(path)
    +
    +    def iter_cache_paths(self) -> Iterator[Path]:
    +        """:yield: all user and site cache paths."""
    +        for path in self.iter_cache_dirs():
    +            yield Path(path)
    +
    +    def iter_runtime_paths(self) -> Iterator[Path]:
    +        """:yield: all user and site runtime paths."""
    +        for path in self.iter_runtime_dirs():
    +            yield Path(path)
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py
    new file mode 100644
    index 0000000..30ab368
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/macos.py
    @@ -0,0 +1,146 @@
    +"""macOS."""
    +
    +from __future__ import annotations
    +
    +import os.path
    +import sys
    +from typing import TYPE_CHECKING
    +
    +from .api import PlatformDirsABC
    +
    +if TYPE_CHECKING:
    +    from pathlib import Path
    +
    +
    +class MacOS(PlatformDirsABC):
    +    """
    +    Platform directories for the macOS operating system.
    +
    +    Follows the guidance from
    +    `Apple documentation `_.
    +    Makes use of the `appname `,
    +    `version `,
    +    `ensure_exists `.
    +
    +    """
    +
    +    @property
    +    def user_data_dir(self) -> str:
    +        """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
    +        return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support"))  # noqa: PTH111
    +
    +    @property
    +    def site_data_dir(self) -> str:
    +        """
    +        :return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``.
    +          If we're using a Python binary managed by `Homebrew `_, the directory
    +          will be under the Homebrew prefix, e.g. ``$homebrew_prefix/share/$appname/$version``.
    +          If `multipath ` is enabled, and we're in Homebrew,
    +          the response is a multi-path string separated by ":", e.g.
    +          ``$homebrew_prefix/share/$appname/$version:/Library/Application Support/$appname/$version``
    +        """
    +        is_homebrew = "/opt/python" in sys.prefix
    +        homebrew_prefix = sys.prefix.split("/opt/python")[0] if is_homebrew else ""
    +        path_list = [self._append_app_name_and_version(f"{homebrew_prefix}/share")] if is_homebrew else []
    +        path_list.append(self._append_app_name_and_version("/Library/Application Support"))
    +        if self.multipath:
    +            return os.pathsep.join(path_list)
    +        return path_list[0]
    +
    +    @property
    +    def site_data_path(self) -> Path:
    +        """:return: data path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
    +        return self._first_item_as_path_if_multipath(self.site_data_dir)
    +
    +    @property
    +    def user_config_dir(self) -> str:
    +        """:return: config directory tied to the user, same as `user_data_dir`"""
    +        return self.user_data_dir
    +
    +    @property
    +    def site_config_dir(self) -> str:
    +        """:return: config directory shared by the users, same as `site_data_dir`"""
    +        return self.site_data_dir
    +
    +    @property
    +    def user_cache_dir(self) -> str:
    +        """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
    +        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))  # noqa: PTH111
    +
    +    @property
    +    def site_cache_dir(self) -> str:
    +        """
    +        :return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``.
    +          If we're using a Python binary managed by `Homebrew `_, the directory
    +          will be under the Homebrew prefix, e.g. ``$homebrew_prefix/var/cache/$appname/$version``.
    +          If `multipath ` is enabled, and we're in Homebrew,
    +          the response is a multi-path string separated by ":", e.g.
    +          ``$homebrew_prefix/var/cache/$appname/$version:/Library/Caches/$appname/$version``
    +        """
    +        is_homebrew = "/opt/python" in sys.prefix
    +        homebrew_prefix = sys.prefix.split("/opt/python")[0] if is_homebrew else ""
    +        path_list = [self._append_app_name_and_version(f"{homebrew_prefix}/var/cache")] if is_homebrew else []
    +        path_list.append(self._append_app_name_and_version("/Library/Caches"))
    +        if self.multipath:
    +            return os.pathsep.join(path_list)
    +        return path_list[0]
    +
    +    @property
    +    def site_cache_path(self) -> Path:
    +        """:return: cache path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
    +        return self._first_item_as_path_if_multipath(self.site_cache_dir)
    +
    +    @property
    +    def user_state_dir(self) -> str:
    +        """:return: state directory tied to the user, same as `user_data_dir`"""
    +        return self.user_data_dir
    +
    +    @property
    +    def user_log_dir(self) -> str:
    +        """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
    +        return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))  # noqa: PTH111
    +
    +    @property
    +    def user_documents_dir(self) -> str:
    +        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
    +        return os.path.expanduser("~/Documents")  # noqa: PTH111
    +
    +    @property
    +    def user_downloads_dir(self) -> str:
    +        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
    +        return os.path.expanduser("~/Downloads")  # noqa: PTH111
    +
    +    @property
    +    def user_pictures_dir(self) -> str:
    +        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
    +        return os.path.expanduser("~/Pictures")  # noqa: PTH111
    +
    +    @property
    +    def user_videos_dir(self) -> str:
    +        """:return: videos directory tied to the user, e.g. ``~/Movies``"""
    +        return os.path.expanduser("~/Movies")  # noqa: PTH111
    +
    +    @property
    +    def user_music_dir(self) -> str:
    +        """:return: music directory tied to the user, e.g. ``~/Music``"""
    +        return os.path.expanduser("~/Music")  # noqa: PTH111
    +
    +    @property
    +    def user_desktop_dir(self) -> str:
    +        """:return: desktop directory tied to the user, e.g. ``~/Desktop``"""
    +        return os.path.expanduser("~/Desktop")  # noqa: PTH111
    +
    +    @property
    +    def user_runtime_dir(self) -> str:
    +        """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
    +        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))  # noqa: PTH111
    +
    +    @property
    +    def site_runtime_dir(self) -> str:
    +        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
    +        return self.user_runtime_dir
    +
    +
    +__all__ = [
    +    "MacOS",
    +]
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/py.typed
    new file mode 100644
    index 0000000..e69de29
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py
    new file mode 100644
    index 0000000..fc75d8d
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/unix.py
    @@ -0,0 +1,272 @@
    +"""Unix."""
    +
    +from __future__ import annotations
    +
    +import os
    +import sys
    +from configparser import ConfigParser
    +from pathlib import Path
    +from typing import TYPE_CHECKING, NoReturn
    +
    +from .api import PlatformDirsABC
    +
    +if TYPE_CHECKING:
    +    from collections.abc import Iterator
    +
    +if sys.platform == "win32":
    +
    +    def getuid() -> NoReturn:
    +        msg = "should only be used on Unix"
    +        raise RuntimeError(msg)
    +
    +else:
    +    from os import getuid
    +
    +
    +class Unix(PlatformDirsABC):  # noqa: PLR0904
    +    """
    +    On Unix/Linux, we follow the `XDG Basedir Spec `_.
    +
    +    The spec allows overriding directories with environment variables. The examples shown are the default values,
    +    alongside the name of the environment variable that overrides them. Makes use of the `appname
    +    `, `version `, `multipath
    +    `, `opinion `, `ensure_exists
    +    `.
    +
    +    """
    +
    +    @property
    +    def user_data_dir(self) -> str:
    +        """
    +        :return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
    +         ``$XDG_DATA_HOME/$appname/$version``
    +        """
    +        path = os.environ.get("XDG_DATA_HOME", "")
    +        if not path.strip():
    +            path = os.path.expanduser("~/.local/share")  # noqa: PTH111
    +        return self._append_app_name_and_version(path)
    +
    +    @property
    +    def _site_data_dirs(self) -> list[str]:
    +        path = os.environ.get("XDG_DATA_DIRS", "")
    +        if not path.strip():
    +            path = f"/usr/local/share{os.pathsep}/usr/share"
    +        return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
    +
    +    @property
    +    def site_data_dir(self) -> str:
    +        """
    +        :return: data directories shared by users (if `multipath ` is
    +         enabled and ``XDG_DATA_DIRS`` is set and a multi path the response is also a multi path separated by the
    +         OS path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
    +        """
    +        # XDG default for $XDG_DATA_DIRS; only first, if multipath is False
    +        dirs = self._site_data_dirs
    +        if not self.multipath:
    +            return dirs[0]
    +        return os.pathsep.join(dirs)
    +
    +    @property
    +    def user_config_dir(self) -> str:
    +        """
    +        :return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
    +         ``$XDG_CONFIG_HOME/$appname/$version``
    +        """
    +        path = os.environ.get("XDG_CONFIG_HOME", "")
    +        if not path.strip():
    +            path = os.path.expanduser("~/.config")  # noqa: PTH111
    +        return self._append_app_name_and_version(path)
    +
    +    @property
    +    def _site_config_dirs(self) -> list[str]:
    +        path = os.environ.get("XDG_CONFIG_DIRS", "")
    +        if not path.strip():
    +            path = "/etc/xdg"
    +        return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
    +
    +    @property
    +    def site_config_dir(self) -> str:
    +        """
    +        :return: config directories shared by users (if `multipath `
    +         is enabled and ``XDG_CONFIG_DIRS`` is set and a multi path the response is also a multi path separated by
    +         the OS path separator), e.g. ``/etc/xdg/$appname/$version``
    +        """
    +        # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
    +        dirs = self._site_config_dirs
    +        if not self.multipath:
    +            return dirs[0]
    +        return os.pathsep.join(dirs)
    +
    +    @property
    +    def user_cache_dir(self) -> str:
    +        """
    +        :return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
    +         ``~/$XDG_CACHE_HOME/$appname/$version``
    +        """
    +        path = os.environ.get("XDG_CACHE_HOME", "")
    +        if not path.strip():
    +            path = os.path.expanduser("~/.cache")  # noqa: PTH111
    +        return self._append_app_name_and_version(path)
    +
    +    @property
    +    def site_cache_dir(self) -> str:
    +        """:return: cache directory shared by users, e.g. ``/var/cache/$appname/$version``"""
    +        return self._append_app_name_and_version("/var/cache")
    +
    +    @property
    +    def user_state_dir(self) -> str:
    +        """
    +        :return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
    +         ``$XDG_STATE_HOME/$appname/$version``
    +        """
    +        path = os.environ.get("XDG_STATE_HOME", "")
    +        if not path.strip():
    +            path = os.path.expanduser("~/.local/state")  # noqa: PTH111
    +        return self._append_app_name_and_version(path)
    +
    +    @property
    +    def user_log_dir(self) -> str:
    +        """:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it"""
    +        path = self.user_state_dir
    +        if self.opinion:
    +            path = os.path.join(path, "log")  # noqa: PTH118
    +            self._optionally_create_directory(path)
    +        return path
    +
    +    @property
    +    def user_documents_dir(self) -> str:
    +        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
    +        return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents")
    +
    +    @property
    +    def user_downloads_dir(self) -> str:
    +        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
    +        return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads")
    +
    +    @property
    +    def user_pictures_dir(self) -> str:
    +        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
    +        return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures")
    +
    +    @property
    +    def user_videos_dir(self) -> str:
    +        """:return: videos directory tied to the user, e.g. ``~/Videos``"""
    +        return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos")
    +
    +    @property
    +    def user_music_dir(self) -> str:
    +        """:return: music directory tied to the user, e.g. ``~/Music``"""
    +        return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music")
    +
    +    @property
    +    def user_desktop_dir(self) -> str:
    +        """:return: desktop directory tied to the user, e.g. ``~/Desktop``"""
    +        return _get_user_media_dir("XDG_DESKTOP_DIR", "~/Desktop")
    +
    +    @property
    +    def user_runtime_dir(self) -> str:
    +        """
    +        :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
    +         ``$XDG_RUNTIME_DIR/$appname/$version``.
    +
    +         For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if
    +         exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR``
    +         is not set.
    +        """
    +        path = os.environ.get("XDG_RUNTIME_DIR", "")
    +        if not path.strip():
    +            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
    +                path = f"/var/run/user/{getuid()}"
    +                if not Path(path).exists():
    +                    path = f"/tmp/runtime-{getuid()}"  # noqa: S108
    +            else:
    +                path = f"/run/user/{getuid()}"
    +        return self._append_app_name_and_version(path)
    +
    +    @property
    +    def site_runtime_dir(self) -> str:
    +        """
    +        :return: runtime directory shared by users, e.g. ``/run/$appname/$version`` or \
    +        ``$XDG_RUNTIME_DIR/$appname/$version``.
    +
    +        Note that this behaves almost exactly like `user_runtime_dir` if ``$XDG_RUNTIME_DIR`` is set, but will
    +        fall back to paths associated to the root user instead of a regular logged-in user if it's not set.
    +
    +        If you wish to ensure that a logged-in root user path is returned e.g. ``/run/user/0``, use `user_runtime_dir`
    +        instead.
    +
    +        For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/$appname/$version`` if ``$XDG_RUNTIME_DIR`` is not set.
    +        """
    +        path = os.environ.get("XDG_RUNTIME_DIR", "")
    +        if not path.strip():
    +            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
    +                path = "/var/run"
    +            else:
    +                path = "/run"
    +        return self._append_app_name_and_version(path)
    +
    +    @property
    +    def site_data_path(self) -> Path:
    +        """:return: data path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
    +        return self._first_item_as_path_if_multipath(self.site_data_dir)
    +
    +    @property
    +    def site_config_path(self) -> Path:
    +        """:return: config path shared by the users, returns the first item, even if ``multipath`` is set to ``True``"""
    +        return self._first_item_as_path_if_multipath(self.site_config_dir)
    +
    +    @property
    +    def site_cache_path(self) -> Path:
    +        """:return: cache path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
    +        return self._first_item_as_path_if_multipath(self.site_cache_dir)
    +
    +    def iter_config_dirs(self) -> Iterator[str]:
    +        """:yield: all user and site configuration directories."""
    +        yield self.user_config_dir
    +        yield from self._site_config_dirs
    +
    +    def iter_data_dirs(self) -> Iterator[str]:
    +        """:yield: all user and site data directories."""
    +        yield self.user_data_dir
    +        yield from self._site_data_dirs
    +
    +
    +def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
    +    media_dir = _get_user_dirs_folder(env_var)
    +    if media_dir is None:
    +        media_dir = os.environ.get(env_var, "").strip()
    +        if not media_dir:
    +            media_dir = os.path.expanduser(fallback_tilde_path)  # noqa: PTH111
    +
    +    return media_dir
    +
    +
    +def _get_user_dirs_folder(key: str) -> str | None:
    +    """
    +    Return directory from user-dirs.dirs config file.
    +
    +    See https://freedesktop.org/wiki/Software/xdg-user-dirs/.
    +
    +    """
    +    user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs"
    +    if user_dirs_config_path.exists():
    +        parser = ConfigParser()
    +
    +        with user_dirs_config_path.open() as stream:
    +            # Add fake section header, so ConfigParser doesn't complain
    +            parser.read_string(f"[top]\n{stream.read()}")
    +
    +        if key not in parser["top"]:
    +            return None
    +
    +        path = parser["top"][key].strip('"')
    +        # Handle relative home paths
    +        return path.replace("$HOME", os.path.expanduser("~"))  # noqa: PTH111
    +
    +    return None
    +
    +
    +__all__ = [
    +    "Unix",
    +]
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py
    new file mode 100644
    index 0000000..3575282
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/version.py
    @@ -0,0 +1,34 @@
    +# file generated by setuptools-scm
    +# don't change, don't track in version control
    +
    +__all__ = [
    +    "__version__",
    +    "__version_tuple__",
    +    "version",
    +    "version_tuple",
    +    "__commit_id__",
    +    "commit_id",
    +]
    +
    +TYPE_CHECKING = False
    +if TYPE_CHECKING:
    +    from typing import Tuple
    +    from typing import Union
    +
    +    VERSION_TUPLE = Tuple[Union[int, str], ...]
    +    COMMIT_ID = Union[str, None]
    +else:
    +    VERSION_TUPLE = object
    +    COMMIT_ID = object
    +
    +version: str
    +__version__: str
    +__version_tuple__: VERSION_TUPLE
    +version_tuple: VERSION_TUPLE
    +commit_id: COMMIT_ID
    +__commit_id__: COMMIT_ID
    +
    +__version__ = version = '4.5.0'
    +__version_tuple__ = version_tuple = (4, 5, 0)
    +
    +__commit_id__ = commit_id = None
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py
    new file mode 100644
    index 0000000..d7bc960
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/platformdirs/windows.py
    @@ -0,0 +1,272 @@
    +"""Windows."""
    +
    +from __future__ import annotations
    +
    +import os
    +import sys
    +from functools import lru_cache
    +from typing import TYPE_CHECKING
    +
    +from .api import PlatformDirsABC
    +
    +if TYPE_CHECKING:
    +    from collections.abc import Callable
    +
    +
    +class Windows(PlatformDirsABC):
    +    """
    +    `MSDN on where to store app data files `_.
    +
    +    Makes use of the `appname `, `appauthor
    +    `, `version `, `roaming
    +    `, `opinion `, `ensure_exists
    +    `.
    +
    +    """
    +
    +    @property
    +    def user_data_dir(self) -> str:
    +        """
    +        :return: data directory tied to the user, e.g.
    +         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
    +         ``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
    +        """
    +        const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
    +        path = os.path.normpath(get_win_folder(const))
    +        return self._append_parts(path)
    +
    +    def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
    +        params = []
    +        if self.appname:
    +            if self.appauthor is not False:
    +                author = self.appauthor or self.appname
    +                params.append(author)
    +            params.append(self.appname)
    +            if opinion_value is not None and self.opinion:
    +                params.append(opinion_value)
    +            if self.version:
    +                params.append(self.version)
    +        path = os.path.join(path, *params)  # noqa: PTH118
    +        self._optionally_create_directory(path)
    +        return path
    +
    +    @property
    +    def site_data_dir(self) -> str:
    +        """:return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
    +        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
    +        return self._append_parts(path)
    +
    +    @property
    +    def user_config_dir(self) -> str:
    +        """:return: config directory tied to the user, same as `user_data_dir`"""
    +        return self.user_data_dir
    +
    +    @property
    +    def site_config_dir(self) -> str:
    +        """:return: config directory shared by the users, same as `site_data_dir`"""
    +        return self.site_data_dir
    +
    +    @property
    +    def user_cache_dir(self) -> str:
    +        """
    +        :return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
    +         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
    +        """
    +        path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
    +        return self._append_parts(path, opinion_value="Cache")
    +
    +    @property
    +    def site_cache_dir(self) -> str:
    +        """:return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
    +        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
    +        return self._append_parts(path, opinion_value="Cache")
    +
    +    @property
    +    def user_state_dir(self) -> str:
    +        """:return: state directory tied to the user, same as `user_data_dir`"""
    +        return self.user_data_dir
    +
    +    @property
    +    def user_log_dir(self) -> str:
    +        """:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it"""
    +        path = self.user_data_dir
    +        if self.opinion:
    +            path = os.path.join(path, "Logs")  # noqa: PTH118
    +            self._optionally_create_directory(path)
    +        return path
    +
    +    @property
    +    def user_documents_dir(self) -> str:
    +        """:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``"""
    +        return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
    +
    +    @property
    +    def user_downloads_dir(self) -> str:
    +        """:return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``"""
    +        return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS"))
    +
    +    @property
    +    def user_pictures_dir(self) -> str:
    +        """:return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``"""
    +        return os.path.normpath(get_win_folder("CSIDL_MYPICTURES"))
    +
    +    @property
    +    def user_videos_dir(self) -> str:
    +        """:return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``"""
    +        return os.path.normpath(get_win_folder("CSIDL_MYVIDEO"))
    +
    +    @property
    +    def user_music_dir(self) -> str:
    +        """:return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``"""
    +        return os.path.normpath(get_win_folder("CSIDL_MYMUSIC"))
    +
    +    @property
    +    def user_desktop_dir(self) -> str:
    +        """:return: desktop directory tied to the user, e.g. ``%USERPROFILE%\\Desktop``"""
    +        return os.path.normpath(get_win_folder("CSIDL_DESKTOPDIRECTORY"))
    +
    +    @property
    +    def user_runtime_dir(self) -> str:
    +        """
    +        :return: runtime directory tied to the user, e.g.
    +         ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
    +        """
    +        path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))  # noqa: PTH118
    +        return self._append_parts(path)
    +
    +    @property
    +    def site_runtime_dir(self) -> str:
    +        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
    +        return self.user_runtime_dir
    +
    +
    +def get_win_folder_from_env_vars(csidl_name: str) -> str:
    +    """Get folder from environment variables."""
    +    result = get_win_folder_if_csidl_name_not_env_var(csidl_name)
    +    if result is not None:
    +        return result
    +
    +    env_var_name = {
    +        "CSIDL_APPDATA": "APPDATA",
    +        "CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
    +        "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
    +    }.get(csidl_name)
    +    if env_var_name is None:
    +        msg = f"Unknown CSIDL name: {csidl_name}"
    +        raise ValueError(msg)
    +    result = os.environ.get(env_var_name)
    +    if result is None:
    +        msg = f"Unset environment variable: {env_var_name}"
    +        raise ValueError(msg)
    +    return result
    +
    +
    +def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None:
    +    """Get a folder for a CSIDL name that does not exist as an environment variable."""
    +    if csidl_name == "CSIDL_PERSONAL":
    +        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")  # noqa: PTH118
    +
    +    if csidl_name == "CSIDL_DOWNLOADS":
    +        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads")  # noqa: PTH118
    +
    +    if csidl_name == "CSIDL_MYPICTURES":
    +        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures")  # noqa: PTH118
    +
    +    if csidl_name == "CSIDL_MYVIDEO":
    +        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos")  # noqa: PTH118
    +
    +    if csidl_name == "CSIDL_MYMUSIC":
    +        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music")  # noqa: PTH118
    +    return None
    +
    +
    +def get_win_folder_from_registry(csidl_name: str) -> str:
    +    """
    +    Get folder from the registry.
    +
    +    This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer
    +    for all CSIDL_* names.
    +
    +    """
    +    shell_folder_name = {
    +        "CSIDL_APPDATA": "AppData",
    +        "CSIDL_COMMON_APPDATA": "Common AppData",
    +        "CSIDL_LOCAL_APPDATA": "Local AppData",
    +        "CSIDL_PERSONAL": "Personal",
    +        "CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}",
    +        "CSIDL_MYPICTURES": "My Pictures",
    +        "CSIDL_MYVIDEO": "My Video",
    +        "CSIDL_MYMUSIC": "My Music",
    +    }.get(csidl_name)
    +    if shell_folder_name is None:
    +        msg = f"Unknown CSIDL name: {csidl_name}"
    +        raise ValueError(msg)
    +    if sys.platform != "win32":  # only needed for mypy type checker to know that this code runs only on Windows
    +        raise NotImplementedError
    +    import winreg  # noqa: PLC0415
    +
    +    key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
    +    directory, _ = winreg.QueryValueEx(key, shell_folder_name)
    +    return str(directory)
    +
    +
    +def get_win_folder_via_ctypes(csidl_name: str) -> str:
    +    """Get folder with ctypes."""
    +    # There is no 'CSIDL_DOWNLOADS'.
    +    # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead.
    +    # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
    +
    +    import ctypes  # noqa: PLC0415
    +
    +    csidl_const = {
    +        "CSIDL_APPDATA": 26,
    +        "CSIDL_COMMON_APPDATA": 35,
    +        "CSIDL_LOCAL_APPDATA": 28,
    +        "CSIDL_PERSONAL": 5,
    +        "CSIDL_MYPICTURES": 39,
    +        "CSIDL_MYVIDEO": 14,
    +        "CSIDL_MYMUSIC": 13,
    +        "CSIDL_DOWNLOADS": 40,
    +        "CSIDL_DESKTOPDIRECTORY": 16,
    +    }.get(csidl_name)
    +    if csidl_const is None:
    +        msg = f"Unknown CSIDL name: {csidl_name}"
    +        raise ValueError(msg)
    +
    +    buf = ctypes.create_unicode_buffer(1024)
    +    windll = getattr(ctypes, "windll")  # noqa: B009 # using getattr to avoid false positive with mypy type checker
    +    windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
    +
    +    # Downgrade to short path name if it has high-bit chars.
    +    if any(ord(c) > 255 for c in buf):  # noqa: PLR2004
    +        buf2 = ctypes.create_unicode_buffer(1024)
    +        if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
    +            buf = buf2
    +
    +    if csidl_name == "CSIDL_DOWNLOADS":
    +        return os.path.join(buf.value, "Downloads")  # noqa: PTH118
    +
    +    return buf.value
    +
    +
    +def _pick_get_win_folder() -> Callable[[str], str]:
    +    try:
    +        import ctypes  # noqa: PLC0415
    +    except ImportError:
    +        pass
    +    else:
    +        if hasattr(ctypes, "windll"):
    +            return get_win_folder_via_ctypes
    +    try:
    +        import winreg  # noqa: PLC0415, F401
    +    except ImportError:
    +        return get_win_folder_from_env_vars
    +    else:
    +        return get_win_folder_from_registry
    +
    +
    +get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
    +
    +__all__ = [
    +    "Windows",
    +]
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/LICENSE
    new file mode 100644
    index 0000000..446a1a8
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/LICENSE
    @@ -0,0 +1,25 @@
    +Copyright (c) 2006-2022 by the respective authors (see AUTHORS file).
    +All rights reserved.
    +
    +Redistribution and use in source and binary forms, with or without
    +modification, are permitted provided that the following conditions are
    +met:
    +
    +* Redistributions of source code must retain the above copyright
    +  notice, this list of conditions and the following disclaimer.
    +
    +* Redistributions in binary form must reproduce the above copyright
    +  notice, this list of conditions and the following disclaimer in the
    +  documentation and/or other materials provided with the distribution.
    +
    +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
    +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
    +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
    +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
    +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
    +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
    +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
    +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
    +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
    +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
    +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py
    new file mode 100644
    index 0000000..cb229c9
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__init__.py
    @@ -0,0 +1,82 @@
    +"""
    +    Pygments
    +    ~~~~~~~~
    +
    +    Pygments is a syntax highlighting package written in Python.
    +
    +    It is a generic syntax highlighter for general use in all kinds of software
    +    such as forum systems, wikis or other applications that need to prettify
    +    source code. Highlights are:
    +
    +    * a wide range of common languages and markup formats is supported
    +    * special attention is paid to details, increasing quality by a fair amount
    +    * support for new languages and formats are added easily
    +    * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
    +      formats that PIL supports, and ANSI sequences
    +    * it is usable as a command-line tool and as a library
    +    * ... and it highlights even Brainfuck!
    +
    +    The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
    +
    +    .. _Pygments master branch:
    +       https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
    +
    +    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +from io import StringIO, BytesIO
    +
    +__version__ = '2.19.2'
    +__docformat__ = 'restructuredtext'
    +
    +__all__ = ['lex', 'format', 'highlight']
    +
    +
    +def lex(code, lexer):
    +    """
    +    Lex `code` with the `lexer` (must be a `Lexer` instance)
    +    and return an iterable of tokens. Currently, this only calls
    +    `lexer.get_tokens()`.
    +    """
    +    try:
    +        return lexer.get_tokens(code)
    +    except TypeError:
    +        # Heuristic to catch a common mistake.
    +        from pip._vendor.pygments.lexer import RegexLexer
    +        if isinstance(lexer, type) and issubclass(lexer, RegexLexer):
    +            raise TypeError('lex() argument must be a lexer instance, '
    +                            'not a class')
    +        raise
    +
    +
    +def format(tokens, formatter, outfile=None):  # pylint: disable=redefined-builtin
    +    """
    +    Format ``tokens`` (an iterable of tokens) with the formatter ``formatter``
    +    (a `Formatter` instance).
    +
    +    If ``outfile`` is given and a valid file object (an object with a
    +    ``write`` method), the result will be written to it, otherwise it
    +    is returned as a string.
    +    """
    +    try:
    +        if not outfile:
    +            realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
    +            formatter.format(tokens, realoutfile)
    +            return realoutfile.getvalue()
    +        else:
    +            formatter.format(tokens, outfile)
    +    except TypeError:
    +        # Heuristic to catch a common mistake.
    +        from pip._vendor.pygments.formatter import Formatter
    +        if isinstance(formatter, type) and issubclass(formatter, Formatter):
    +            raise TypeError('format() argument must be a formatter instance, '
    +                            'not a class')
    +        raise
    +
    +
    +def highlight(code, lexer, formatter, outfile=None):
    +    """
    +    This is the most high-level highlighting function. It combines `lex` and
    +    `format` in one function.
    +    """
    +    return format(lex(code, lexer), formatter, outfile)
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py
    new file mode 100644
    index 0000000..a2e612f
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__main__.py
    @@ -0,0 +1,17 @@
    +"""
    +    pygments.__main__
    +    ~~~~~~~~~~~~~~~~~
    +
    +    Main entry point for ``python -m pygments``.
    +
    +    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import sys
    +from pip._vendor.pygments.cmdline import main
    +
    +try:
    +    sys.exit(main(sys.argv))
    +except KeyboardInterrupt:
    +    sys.exit(1)
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc
    new file mode 100644
    index 0000000..b069dd7
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc
    new file mode 100644
    index 0000000..cbdd981
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc
    new file mode 100644
    index 0000000..62d9b36
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc
    new file mode 100644
    index 0000000..f6334c2
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc
    new file mode 100644
    index 0000000..ff4932e
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc
    new file mode 100644
    index 0000000..e486f2e
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc
    new file mode 100644
    index 0000000..844648b
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc
    new file mode 100644
    index 0000000..a029831
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc
    new file mode 100644
    index 0000000..99892ac
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc
    new file mode 100644
    index 0000000..c1348bf
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc
    new file mode 100644
    index 0000000..ef09930
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc
    new file mode 100644
    index 0000000..64270e1
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc
    new file mode 100644
    index 0000000..858453e
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc
    new file mode 100644
    index 0000000..59595dd
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc
    new file mode 100644
    index 0000000..d35a626
    Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-312.pyc differ
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py
    new file mode 100644
    index 0000000..ee1ac27
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/console.py
    @@ -0,0 +1,70 @@
    +"""
    +    pygments.console
    +    ~~~~~~~~~~~~~~~~
    +
    +    Format colored console output.
    +
    +    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +esc = "\x1b["
    +
    +codes = {}
    +codes[""] = ""
    +codes["reset"] = esc + "39;49;00m"
    +
    +codes["bold"] = esc + "01m"
    +codes["faint"] = esc + "02m"
    +codes["standout"] = esc + "03m"
    +codes["underline"] = esc + "04m"
    +codes["blink"] = esc + "05m"
    +codes["overline"] = esc + "06m"
    +
    +dark_colors = ["black", "red", "green", "yellow", "blue",
    +               "magenta", "cyan", "gray"]
    +light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue",
    +                "brightmagenta", "brightcyan", "white"]
    +
    +x = 30
    +for dark, light in zip(dark_colors, light_colors):
    +    codes[dark] = esc + "%im" % x
    +    codes[light] = esc + "%im" % (60 + x)
    +    x += 1
    +
    +del dark, light, x
    +
    +codes["white"] = codes["bold"]
    +
    +
    +def reset_color():
    +    return codes["reset"]
    +
    +
    +def colorize(color_key, text):
    +    return codes[color_key] + text + codes["reset"]
    +
    +
    +def ansiformat(attr, text):
    +    """
    +    Format ``text`` with a color and/or some attributes::
    +
    +        color       normal color
    +        *color*     bold color
    +        _color_     underlined color
    +        +color+     blinking color
    +    """
    +    result = []
    +    if attr[:1] == attr[-1:] == '+':
    +        result.append(codes['blink'])
    +        attr = attr[1:-1]
    +    if attr[:1] == attr[-1:] == '*':
    +        result.append(codes['bold'])
    +        attr = attr[1:-1]
    +    if attr[:1] == attr[-1:] == '_':
    +        result.append(codes['underline'])
    +        attr = attr[1:-1]
    +    result.append(codes[attr])
    +    result.append(text)
    +    result.append(codes['reset'])
    +    return ''.join(result)
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py
    new file mode 100644
    index 0000000..5efff43
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filter.py
    @@ -0,0 +1,70 @@
    +"""
    +    pygments.filter
    +    ~~~~~~~~~~~~~~~
    +
    +    Module that implements the default filter.
    +
    +    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +
    +def apply_filters(stream, filters, lexer=None):
    +    """
    +    Use this method to apply an iterable of filters to
    +    a stream. If lexer is given it's forwarded to the
    +    filter, otherwise the filter receives `None`.
    +    """
    +    def _apply(filter_, stream):
    +        yield from filter_.filter(lexer, stream)
    +    for filter_ in filters:
    +        stream = _apply(filter_, stream)
    +    return stream
    +
    +
    +def simplefilter(f):
    +    """
    +    Decorator that converts a function into a filter::
    +
    +        @simplefilter
    +        def lowercase(self, lexer, stream, options):
    +            for ttype, value in stream:
    +                yield ttype, value.lower()
    +    """
    +    return type(f.__name__, (FunctionFilter,), {
    +        '__module__': getattr(f, '__module__'),
    +        '__doc__': f.__doc__,
    +        'function': f,
    +    })
    +
    +
    +class Filter:
    +    """
    +    Default filter. Subclass this class or use the `simplefilter`
    +    decorator to create own filters.
    +    """
    +
    +    def __init__(self, **options):
    +        self.options = options
    +
    +    def filter(self, lexer, stream):
    +        raise NotImplementedError()
    +
    +
    +class FunctionFilter(Filter):
    +    """
    +    Abstract class used by `simplefilter` to create simple
    +    function filters on the fly. The `simplefilter` decorator
    +    automatically creates subclasses of this class for
    +    functions passed to it.
    +    """
    +    function = None
    +
    +    def __init__(self, **options):
    +        if not hasattr(self, 'function'):
    +            raise TypeError(f'{self.__class__.__name__!r} used without bound function')
    +        Filter.__init__(self, **options)
    +
    +    def filter(self, lexer, stream):
    +        # pylint: disable=not-callable
    +        yield from self.function(lexer, stream, self.options)
    diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py
    new file mode 100644
    index 0000000..97380c9
    --- /dev/null
    +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__init__.py
    @@ -0,0 +1,940 @@
    +"""
    +    pygments.filters
    +    ~~~~~~~~~~~~~~~~
    +
    +    Module containing filter lookup functions and default
    +    filters.
    +
    +    :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
    +    :license: BSD, see LICENSE for details.
    +"""
    +
    +import re
    +
    +from pip._vendor.pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
    +    string_to_tokentype
    +from pip._vendor.pygments.filter import Filter
    +from pip._vendor.pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
    +    get_choice_opt, ClassNotFound, OptionError
    +from pip._vendor.pygments.plugin import find_plugin_filters
    +
    +
    +def find_filter_class(filtername):
    +    """Lookup a filter by name. Return None if not found."""
    +    if filtername in FILTERS:
    +        return FILTERS[filtername]
    +    for name, cls in find_plugin_filters():
    +        if name == filtername:
    +            return cls
    +    return None
    +
    +
    +def get_filter_by_name(filtername, **options):
    +    """Return an instantiated filter.
    +
    +    Options are passed to the filter initializer if wanted.
    +    Raise a ClassNotFound if not found.
    +    """
    +    cls = find_filter_class(filtername)
    +    if cls:
    +        return cls(**options)
    +    else:
    +        raise ClassNotFound(f'filter {filtername!r} not found')
    +
    +
    +def get_all_filters():
    +    """Return a generator of all filter names."""
    +    yield from FILTERS
    +    for name, _ in find_plugin_filters():
    +        yield name
    +
    +
    +def _replace_special(ttype, value, regex, specialttype,
    +                     replacefunc=lambda x: x):
    +    last = 0
    +    for match in regex.finditer(value):
    +        start, end = match.start(), match.end()
    +        if start != last:
    +            yield ttype, value[last:start]
    +        yield specialttype, replacefunc(value[start:end])
    +        last = end
    +    if last != len(value):
    +        yield ttype, value[last:]
    +
    +
    +class CodeTagFilter(Filter):
    +    """Highlight special code tags in comments and docstrings.
    +
    +    Options accepted:
    +
    +    `codetags` : list of strings
    +       A list of strings that are flagged as code tags.  The default is to
    +       highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``.
    +
    +    .. versionchanged:: 2.13
    +       Now recognizes ``FIXME`` by default.
    +    """
    +
    +    def __init__(self, **options):
    +        Filter.__init__(self, **options)
    +        tags = get_list_opt(options, 'codetags',
    +                            ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE'])
    +        self.tag_re = re.compile(r'\b({})\b'.format('|'.join([
    +            re.escape(tag) for tag in tags if tag
    +        ])))
    +
    +    def filter(self, lexer, stream):
    +        regex = self.tag_re
    +        for ttype, value in stream:
    +            if ttype in String.Doc or \
    +               ttype in Comment and \
    +               ttype not in Comment.Preproc:
    +                yield from _replace_special(ttype, value, regex, Comment.Special)
    +            else:
    +                yield ttype, value
    +
    +
    +class SymbolFilter(Filter):
    +    """Convert mathematical symbols such as \\ in Isabelle
    +    or \\longrightarrow in LaTeX into Unicode characters.
    +
    +    This is mostly useful for HTML or console output when you want to
    +    approximate the source rendering you'd see in an IDE.
    +
    +    Options accepted:
    +
    +    `lang` : string
    +       The symbol language. Must be one of ``'isabelle'`` or
    +       ``'latex'``.  The default is ``'isabelle'``.
    +    """
    +
    +    latex_symbols = {
    +        '\\alpha'                : '\U000003b1',
    +        '\\beta'                 : '\U000003b2',
    +        '\\gamma'                : '\U000003b3',
    +        '\\delta'                : '\U000003b4',
    +        '\\varepsilon'           : '\U000003b5',
    +        '\\zeta'                 : '\U000003b6',
    +        '\\eta'                  : '\U000003b7',
    +        '\\vartheta'             : '\U000003b8',
    +        '\\iota'                 : '\U000003b9',
    +        '\\kappa'                : '\U000003ba',
    +        '\\lambda'               : '\U000003bb',
    +        '\\mu'                   : '\U000003bc',
    +        '\\nu'                   : '\U000003bd',
    +        '\\xi'                   : '\U000003be',
    +        '\\pi'                   : '\U000003c0',
    +        '\\varrho'               : '\U000003c1',
    +        '\\sigma'                : '\U000003c3',
    +        '\\tau'                  : '\U000003c4',
    +        '\\upsilon'              : '\U000003c5',
    +        '\\varphi'               : '\U000003c6',
    +        '\\chi'                  : '\U000003c7',
    +        '\\psi'                  : '\U000003c8',
    +        '\\omega'                : '\U000003c9',
    +        '\\Gamma'                : '\U00000393',
    +        '\\Delta'                : '\U00000394',
    +        '\\Theta'                : '\U00000398',
    +        '\\Lambda'               : '\U0000039b',
    +        '\\Xi'                   : '\U0000039e',
    +        '\\Pi'                   : '\U000003a0',
    +        '\\Sigma'                : '\U000003a3',
    +        '\\Upsilon'              : '\U000003a5',
    +        '\\Phi'                  : '\U000003a6',
    +        '\\Psi'                  : '\U000003a8',
    +        '\\Omega'                : '\U000003a9',
    +        '\\leftarrow'            : '\U00002190',
    +        '\\longleftarrow'        : '\U000027f5',
    +        '\\rightarrow'           : '\U00002192',
    +        '\\longrightarrow'       : '\U000027f6',
    +        '\\Leftarrow'            : '\U000021d0',
    +        '\\Longleftarrow'        : '\U000027f8',
    +        '\\Rightarrow'           : '\U000021d2',
    +        '\\Longrightarrow'       : '\U000027f9',
    +        '\\leftrightarrow'       : '\U00002194',
    +        '\\longleftrightarrow'   : '\U000027f7',
    +        '\\Leftrightarrow'       : '\U000021d4',
    +        '\\Longleftrightarrow'   : '\U000027fa',
    +        '\\mapsto'               : '\U000021a6',
    +        '\\longmapsto'           : '\U000027fc',
    +        '\\relbar'               : '\U00002500',
    +        '\\Relbar'               : '\U00002550',
    +        '\\hookleftarrow'        : '\U000021a9',
    +        '\\hookrightarrow'       : '\U000021aa',
    +        '\\leftharpoondown'      : '\U000021bd',
    +        '\\rightharpoondown'     : '\U000021c1',
    +        '\\leftharpoonup'        : '\U000021bc',
    +        '\\rightharpoonup'       : '\U000021c0',
    +        '\\rightleftharpoons'    : '\U000021cc',
    +        '\\leadsto'              : '\U0000219d',
    +        '\\downharpoonleft'      : '\U000021c3',
    +        '\\downharpoonright'     : '\U000021c2',
    +        '\\upharpoonleft'        : '\U000021bf',
    +        '\\upharpoonright'       : '\U000021be',
    +        '\\restriction'          : '\U000021be',
    +        '\\uparrow'              : '\U00002191',
    +        '\\Uparrow'              : '\U000021d1',
    +        '\\downarrow'            : '\U00002193',
    +        '\\Downarrow'            : '\U000021d3',
    +        '\\updownarrow'          : '\U00002195',
    +        '\\Updownarrow'          : '\U000021d5',
    +        '\\langle'               : '\U000027e8',
    +        '\\rangle'               : '\U000027e9',
    +        '\\lceil'                : '\U00002308',
    +        '\\rceil'                : '\U00002309',
    +        '\\lfloor'               : '\U0000230a',
    +        '\\rfloor'               : '\U0000230b',
    +        '\\flqq'                 : '\U000000ab',
    +        '\\frqq'                 : '\U000000bb',
    +        '\\bot'                  : '\U000022a5',
    +        '\\top'                  : '\U000022a4',
    +        '\\wedge'                : '\U00002227',
    +        '\\bigwedge'             : '\U000022c0',
    +        '\\vee'                  : '\U00002228',
    +        '\\bigvee'               : '\U000022c1',
    +        '\\forall'               : '\U00002200',
    +        '\\exists'               : '\U00002203',
    +        '\\nexists'              : '\U00002204',
    +        '\\neg'                  : '\U000000ac',
    +        '\\Box'                  : '\U000025a1',
    +        '\\Diamond'              : '\U000025c7',
    +        '\\vdash'                : '\U000022a2',
    +        '\\models'               : '\U000022a8',
    +        '\\dashv'                : '\U000022a3',
    +        '\\surd'                 : '\U0000221a',
    +        '\\le'                   : '\U00002264',
    +        '\\ge'                   : '\U00002265',
    +        '\\ll'                   : '\U0000226a',
    +        '\\gg'                   : '\U0000226b',
    +        '\\lesssim'              : '\U00002272',
    +        '\\gtrsim'               : '\U00002273',
    +        '\\lessapprox'           : '\U00002a85',
    +        '\\gtrapprox'            : '\U00002a86',
    +        '\\in'                   : '\U00002208',
    +        '\\notin'                : '\U00002209',
    +        '\\subset'               : '\U00002282',
    +        '\\supset'               : '\U00002283',
    +        '\\subseteq'             : '\U00002286',
    +        '\\supseteq'             : '\U00002287',
    +        '\\sqsubset'             : '\U0000228f',
    +        '\\sqsupset'             : '\U00002290',
    +        '\\sqsubseteq'           : '\U00002291',
    +        '\\sqsupseteq'           : '\U00002292',
    +        '\\cap'                  : '\U00002229',
    +        '\\bigcap'               : '\U000022c2',
    +        '\\cup'                  : '\U0000222a',
    +        '\\bigcup'               : '\U000022c3',
    +        '\\sqcup'                : '\U00002294',
    +        '\\bigsqcup'             : '\U00002a06',
    +        '\\sqcap'                : '\U00002293',
    +        '\\Bigsqcap'             : '\U00002a05',
    +        '\\setminus'             : '\U00002216',
    +        '\\propto'               : '\U0000221d',
    +        '\\uplus'                : '\U0000228e',
    +        '\\bigplus'              : '\U00002a04',
    +        '\\sim'                  : '\U0000223c',
    +        '\\doteq'                : '\U00002250',
    +        '\\simeq'                : '\U00002243',
    +        '\\approx'               : '\U00002248',
    +        '\\asymp'                : '\U0000224d',
    +        '\\cong'                 : '\U00002245',
    +        '\\equiv'                : '\U00002261',
    +        '\\Join'                 : '\U000022c8',
    +        '\\bowtie'               : '\U00002a1d',
    +        '\\prec'                 : '\U0000227a',
    +        '\\succ'                 : '\U0000227b',
    +        '\\preceq'               : '\U0000227c',
    +        '\\succeq'               : '\U0000227d',
    +        '\\parallel'             : '\U00002225',
    +        '\\mid'                  : '\U000000a6',
    +        '\\pm'                   : '\U000000b1',
    +        '\\mp'                   : '\U00002213',
    +        '\\times'                : '\U000000d7',
    +        '\\div'                  : '\U000000f7',
    +        '\\cdot'                 : '\U000022c5',
    +        '\\star'                 : '\U000022c6',
    +        '\\circ'                 : '\U00002218',
    +        '\\dagger'               : '\U00002020',
    +        '\\ddagger'              : '\U00002021',
    +        '\\lhd'                  : '\U000022b2',
    +        '\\rhd'                  : '\U000022b3',
    +        '\\unlhd'                : '\U000022b4',
    +        '\\unrhd'                : '\U000022b5',
    +        '\\triangleleft'         : '\U000025c3',
    +        '\\triangleright'        : '\U000025b9',
    +        '\\triangle'             : '\U000025b3',
    +        '\\triangleq'            : '\U0000225c',
    +        '\\oplus'                : '\U00002295',
    +        '\\bigoplus'             : '\U00002a01',
    +        '\\otimes'               : '\U00002297',
    +        '\\bigotimes'            : '\U00002a02',
    +        '\\odot'                 : '\U00002299',
    +        '\\bigodot'              : '\U00002a00',
    +        '\\ominus'               : '\U00002296',
    +        '\\oslash'               : '\U00002298',
    +        '\\dots'                 : '\U00002026',
    +        '\\cdots'                : '\U000022ef',
    +        '\\sum'                  : '\U00002211',
    +        '\\prod'                 : '\U0000220f',
    +        '\\coprod'               : '\U00002210',
    +        '\\infty'                : '\U0000221e',
    +        '\\int'                  : '\U0000222b',
    +        '\\oint'                 : '\U0000222e',
    +        '\\clubsuit'             : '\U00002663',
    +        '\\diamondsuit'          : '\U00002662',
    +        '\\heartsuit'            : '\U00002661',
    +        '\\spadesuit'            : '\U00002660',
    +        '\\aleph'                : '\U00002135',
    +        '\\emptyset'             : '\U00002205',
    +        '\\nabla'                : '\U00002207',
    +        '\\partial'              : '\U00002202',
    +        '\\flat'                 : '\U0000266d',
    +        '\\natural'              : '\U0000266e',
    +        '\\sharp'                : '\U0000266f',
    +        '\\angle'                : '\U00002220',
    +        '\\copyright'            : '\U000000a9',
    +        '\\textregistered'       : '\U000000ae',
    +        '\\textonequarter'       : '\U000000bc',
    +        '\\textonehalf'          : '\U000000bd',
    +        '\\textthreequarters'    : '\U000000be',
    +        '\\textordfeminine'      : '\U000000aa',
    +        '\\textordmasculine'     : '\U000000ba',
    +        '\\euro'                 : '\U000020ac',
    +        '\\pounds'               : '\U000000a3',
    +        '\\yen'                  : '\U000000a5',
    +        '\\textcent'             : '\U000000a2',
    +        '\\textcurrency'         : '\U000000a4',
    +        '\\textdegree'           : '\U000000b0',
    +    }
    +
    +    isabelle_symbols = {
    +        '\\'                 : '\U0001d7ec',
    +        '\\'                  : '\U0001d7ed',
    +        '\\'                  : '\U0001d7ee',
    +        '\\'                : '\U0001d7ef',
    +        '\\'                 : '\U0001d7f0',
    +        '\\'                 : '\U0001d7f1',
    +        '\\'                  : '\U0001d7f2',
    +        '\\'                : '\U0001d7f3',
    +        '\\'                : '\U0001d7f4',
    +        '\\'                 : '\U0001d7f5',
    +        '\\'                    : '\U0001d49c',
    +        '\\'                    : '\U0000212c',
    +        '\\'                    : '\U0001d49e',
    +        '\\'                    : '\U0001d49f',
    +        '\\'                    : '\U00002130',
    +        '\\'                    : '\U00002131',
    +        '\\'                    : '\U0001d4a2',
    +        '\\'                    : '\U0000210b',
    +        '\\'                    : '\U00002110',
    +        '\\'                    : '\U0001d4a5',
    +        '\\'                    : '\U0001d4a6',
    +        '\\'                    : '\U00002112',
    +        '\\'                    : '\U00002133',
    +        '\\'                    : '\U0001d4a9',
    +        '\\'                    : '\U0001d4aa',
    +        '\\

    ' : '\U0001d5c9', + '\\' : '\U0001d5ca', + '\\' : '\U0001d5cb', + '\\' : '\U0001d5cc', + '\\' : '\U0001d5cd', + '\\' : '\U0001d5ce', + '\\' : '\U0001d5cf', + '\\' : '\U0001d5d0', + '\\' : '\U0001d5d1', + '\\' : '\U0001d5d2', + '\\' : '\U0001d5d3', + '\\' : '\U0001d504', + '\\' : '\U0001d505', + '\\' : '\U0000212d', + '\\

    ' : '\U0001d507', + '\\' : '\U0001d508', + '\\' : '\U0001d509', + '\\' : '\U0001d50a', + '\\' : '\U0000210c', + '\\' : '\U00002111', + '\\' : '\U0001d50d', + '\\' : '\U0001d50e', + '\\' : '\U0001d50f', + '\\' : '\U0001d510', + '\\' : '\U0001d511', + '\\' : '\U0001d512', + '\\' : '\U0001d513', + '\\' : '\U0001d514', + '\\' : '\U0000211c', + '\\' : '\U0001d516', + '\\' : '\U0001d517', + '\\' : '\U0001d518', + '\\' : '\U0001d519', + '\\' : '\U0001d51a', + '\\' : '\U0001d51b', + '\\' : '\U0001d51c', + '\\' : '\U00002128', + '\\' : '\U0001d51e', + '\\' : '\U0001d51f', + '\\' : '\U0001d520', + '\\
    ' : '\U0001d521', + '\\' : '\U0001d522', + '\\' : '\U0001d523', + '\\' : '\U0001d524', + '\\' : '\U0001d525', + '\\' : '\U0001d526', + '\\' : '\U0001d527', + '\\' : '\U0001d528', + '\\' : '\U0001d529', + '\\' : '\U0001d52a', + '\\' : '\U0001d52b', + '\\' : '\U0001d52c', + '\\' : '\U0001d52d', + '\\' : '\U0001d52e', + '\\' : '\U0001d52f', + '\\' : '\U0001d530', + '\\' : '\U0001d531', + '\\' : '\U0001d532', + '\\' : '\U0001d533', + '\\' : '\U0001d534', + '\\' : '\U0001d535', + '\\' : '\U0001d536', + '\\' : '\U0001d537', + '\\' : '\U000003b1', + '\\' : '\U000003b2', + '\\' : '\U000003b3', + '\\' : '\U000003b4', + '\\' : '\U000003b5', + '\\' : '\U000003b6', + '\\' : '\U000003b7', + '\\' : '\U000003b8', + '\\' : '\U000003b9', + '\\' : '\U000003ba', + '\\' : '\U000003bb', + '\\' : '\U000003bc', + '\\' : '\U000003bd', + '\\' : '\U000003be', + '\\' : '\U000003c0', + '\\' : '\U000003c1', + '\\' : '\U000003c3', + '\\' : '\U000003c4', + '\\' : '\U000003c5', + '\\' : '\U000003c6', + '\\' : '\U000003c7', + '\\' : '\U000003c8', + '\\' : '\U000003c9', + '\\' : '\U00000393', + '\\' : '\U00000394', + '\\' : '\U00000398', + '\\' : '\U0000039b', + '\\' : '\U0000039e', + '\\' : '\U000003a0', + '\\' : '\U000003a3', + '\\' : '\U000003a5', + '\\' : '\U000003a6', + '\\' : '\U000003a8', + '\\' : '\U000003a9', + '\\' : '\U0001d539', + '\\' : '\U00002102', + '\\' : '\U00002115', + '\\' : '\U0000211a', + '\\' : '\U0000211d', + '\\' : '\U00002124', + '\\' : '\U00002190', + '\\' : '\U000027f5', + '\\' : '\U00002192', + '\\' : '\U000027f6', + '\\' : '\U000021d0', + '\\' : '\U000027f8', + '\\' : '\U000021d2', + '\\' : '\U000027f9', + '\\' : '\U00002194', + '\\' : '\U000027f7', + '\\' : '\U000021d4', + '\\' : '\U000027fa', + '\\' : '\U000021a6', + '\\' : '\U000027fc', + '\\' : '\U00002500', + '\\' : '\U00002550', + '\\' : '\U000021a9', + '\\' : '\U000021aa', + '\\' : '\U000021bd', + '\\' : '\U000021c1', + '\\' : '\U000021bc', + '\\' : '\U000021c0', + '\\' : '\U000021cc', + '\\' : '\U0000219d', + '\\' : '\U000021c3', + '\\' : '\U000021c2', + '\\' : '\U000021bf', + '\\' : '\U000021be', + '\\' : '\U000021be', + '\\' : '\U00002237', + '\\' : '\U00002191', + '\\' : '\U000021d1', + '\\' : '\U00002193', + '\\' : '\U000021d3', + '\\' : '\U00002195', + '\\' : '\U000021d5', + '\\' : '\U000027e8', + '\\' : '\U000027e9', + '\\' : '\U00002308', + '\\' : '\U00002309', + '\\' : '\U0000230a', + '\\' : '\U0000230b', + '\\' : '\U00002987', + '\\' : '\U00002988', + '\\' : '\U000027e6', + '\\' : '\U000027e7', + '\\' : '\U00002983', + '\\' : '\U00002984', + '\\' : '\U000000ab', + '\\' : '\U000000bb', + '\\' : '\U000022a5', + '\\' : '\U000022a4', + '\\' : '\U00002227', + '\\' : '\U000022c0', + '\\' : '\U00002228', + '\\' : '\U000022c1', + '\\' : '\U00002200', + '\\' : '\U00002203', + '\\' : '\U00002204', + '\\' : '\U000000ac', + '\\' : '\U000025a1', + '\\' : '\U000025c7', + '\\' : '\U000022a2', + '\\' : '\U000022a8', + '\\' : '\U000022a9', + '\\' : '\U000022ab', + '\\' : '\U000022a3', + '\\' : '\U0000221a', + '\\' : '\U00002264', + '\\' : '\U00002265', + '\\' : '\U0000226a', + '\\' : '\U0000226b', + '\\' : '\U00002272', + '\\' : '\U00002273', + '\\' : '\U00002a85', + '\\' : '\U00002a86', + '\\' : '\U00002208', + '\\' : '\U00002209', + '\\' : '\U00002282', + '\\' : '\U00002283', + '\\' : '\U00002286', + '\\' : '\U00002287', + '\\' : '\U0000228f', + '\\' : '\U00002290', + '\\' : '\U00002291', + '\\' : '\U00002292', + '\\' : '\U00002229', + '\\' : '\U000022c2', + '\\' : '\U0000222a', + '\\' : '\U000022c3', + '\\' : '\U00002294', + '\\' : '\U00002a06', + '\\' : '\U00002293', + '\\' : '\U00002a05', + '\\' : '\U00002216', + '\\' : '\U0000221d', + '\\' : '\U0000228e', + '\\' : '\U00002a04', + '\\' : '\U00002260', + '\\' : '\U0000223c', + '\\' : '\U00002250', + '\\' : '\U00002243', + '\\' : '\U00002248', + '\\' : '\U0000224d', + '\\' : '\U00002245', + '\\' : '\U00002323', + '\\' : '\U00002261', + '\\' : '\U00002322', + '\\' : '\U000022c8', + '\\' : '\U00002a1d', + '\\' : '\U0000227a', + '\\' : '\U0000227b', + '\\' : '\U0000227c', + '\\' : '\U0000227d', + '\\' : '\U00002225', + '\\' : '\U000000a6', + '\\' : '\U000000b1', + '\\' : '\U00002213', + '\\' : '\U000000d7', + '\\
    ' : '\U000000f7', + '\\' : '\U000022c5', + '\\' : '\U000022c6', + '\\' : '\U00002219', + '\\' : '\U00002218', + '\\' : '\U00002020', + '\\' : '\U00002021', + '\\' : '\U000022b2', + '\\' : '\U000022b3', + '\\' : '\U000022b4', + '\\' : '\U000022b5', + '\\' : '\U000025c3', + '\\' : '\U000025b9', + '\\' : '\U000025b3', + '\\' : '\U0000225c', + '\\' : '\U00002295', + '\\' : '\U00002a01', + '\\' : '\U00002297', + '\\' : '\U00002a02', + '\\' : '\U00002299', + '\\' : '\U00002a00', + '\\' : '\U00002296', + '\\' : '\U00002298', + '\\' : '\U00002026', + '\\' : '\U000022ef', + '\\' : '\U00002211', + '\\' : '\U0000220f', + '\\' : '\U00002210', + '\\' : '\U0000221e', + '\\' : '\U0000222b', + '\\' : '\U0000222e', + '\\' : '\U00002663', + '\\' : '\U00002662', + '\\' : '\U00002661', + '\\' : '\U00002660', + '\\' : '\U00002135', + '\\' : '\U00002205', + '\\' : '\U00002207', + '\\' : '\U00002202', + '\\' : '\U0000266d', + '\\' : '\U0000266e', + '\\' : '\U0000266f', + '\\' : '\U00002220', + '\\' : '\U000000a9', + '\\' : '\U000000ae', + '\\' : '\U000000ad', + '\\' : '\U000000af', + '\\' : '\U000000bc', + '\\' : '\U000000bd', + '\\' : '\U000000be', + '\\' : '\U000000aa', + '\\' : '\U000000ba', + '\\
    ' : '\U000000a7', + '\\' : '\U000000b6', + '\\' : '\U000000a1', + '\\' : '\U000000bf', + '\\' : '\U000020ac', + '\\' : '\U000000a3', + '\\' : '\U000000a5', + '\\' : '\U000000a2', + '\\' : '\U000000a4', + '\\' : '\U000000b0', + '\\' : '\U00002a3f', + '\\' : '\U00002127', + '\\' : '\U000025ca', + '\\' : '\U00002118', + '\\' : '\U00002240', + '\\' : '\U000022c4', + '\\' : '\U000000b4', + '\\' : '\U00000131', + '\\' : '\U000000a8', + '\\' : '\U000000b8', + '\\' : '\U000002dd', + '\\' : '\U000003f5', + '\\' : '\U000023ce', + '\\' : '\U00002039', + '\\' : '\U0000203a', + '\\' : '\U00002302', + '\\<^sub>' : '\U000021e9', + '\\<^sup>' : '\U000021e7', + '\\<^bold>' : '\U00002759', + '\\<^bsub>' : '\U000021d8', + '\\<^esub>' : '\U000021d9', + '\\<^bsup>' : '\U000021d7', + '\\<^esup>' : '\U000021d6', + } + + lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols} + + def __init__(self, **options): + Filter.__init__(self, **options) + lang = get_choice_opt(options, 'lang', + ['isabelle', 'latex'], 'isabelle') + self.symbols = self.lang_map[lang] + + def filter(self, lexer, stream): + for ttype, value in stream: + if value in self.symbols: + yield ttype, self.symbols[value] + else: + yield ttype, value + + +class KeywordCaseFilter(Filter): + """Convert keywords to lowercase or uppercase or capitalize them, which + means first letter uppercase, rest lowercase. + + This can be useful e.g. if you highlight Pascal code and want to adapt the + code to your styleguide. + + Options accepted: + + `case` : string + The casing to convert keywords to. Must be one of ``'lower'``, + ``'upper'`` or ``'capitalize'``. The default is ``'lower'``. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + case = get_choice_opt(options, 'case', + ['lower', 'upper', 'capitalize'], 'lower') + self.convert = getattr(str, case) + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Keyword: + yield ttype, self.convert(value) + else: + yield ttype, value + + +class NameHighlightFilter(Filter): + """Highlight a normal Name (and Name.*) token with a different token type. + + Example:: + + filter = NameHighlightFilter( + names=['foo', 'bar', 'baz'], + tokentype=Name.Function, + ) + + This would highlight the names "foo", "bar" and "baz" + as functions. `Name.Function` is the default token type. + + Options accepted: + + `names` : list of strings + A list of names that should be given the different token type. + There is no default. + `tokentype` : TokenType or string + A token type or a string containing a token type name that is + used for highlighting the strings in `names`. The default is + `Name.Function`. + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.names = set(get_list_opt(options, 'names', [])) + tokentype = options.get('tokentype') + if tokentype: + self.tokentype = string_to_tokentype(tokentype) + else: + self.tokentype = Name.Function + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype in Name and value in self.names: + yield self.tokentype, value + else: + yield ttype, value + + +class ErrorToken(Exception): + pass + + +class RaiseOnErrorTokenFilter(Filter): + """Raise an exception when the lexer generates an error token. + + Options accepted: + + `excclass` : Exception class + The exception class to raise. + The default is `pygments.filters.ErrorToken`. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + self.exception = options.get('excclass', ErrorToken) + try: + # issubclass() will raise TypeError if first argument is not a class + if not issubclass(self.exception, Exception): + raise TypeError + except TypeError: + raise OptionError('excclass option is not an exception class') + + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype is Error: + raise self.exception(value) + yield ttype, value + + +class VisibleWhitespaceFilter(Filter): + """Convert tabs, newlines and/or spaces to visible characters. + + Options accepted: + + `spaces` : string or bool + If this is a one-character string, spaces will be replaces by this string. + If it is another true value, spaces will be replaced by ``·`` (unicode + MIDDLE DOT). If it is a false value, spaces will not be replaced. The + default is ``False``. + `tabs` : string or bool + The same as for `spaces`, but the default replacement character is ``»`` + (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value + is ``False``. Note: this will not work if the `tabsize` option for the + lexer is nonzero, as tabs will already have been expanded then. + `tabsize` : int + If tabs are to be replaced by this filter (see the `tabs` option), this + is the total number of characters that a tab should be expanded to. + The default is ``8``. + `newlines` : string or bool + The same as for `spaces`, but the default replacement character is ``¶`` + (unicode PILCROW SIGN). The default value is ``False``. + `wstokentype` : bool + If true, give whitespace the special `Whitespace` token type. This allows + styling the visible whitespace differently (e.g. greyed out), but it can + disrupt background colors. The default is ``True``. + + .. versionadded:: 0.8 + """ + + def __init__(self, **options): + Filter.__init__(self, **options) + for name, default in [('spaces', '·'), + ('tabs', '»'), + ('newlines', '¶')]: + opt = options.get(name, False) + if isinstance(opt, str) and len(opt) == 1: + setattr(self, name, opt) + else: + setattr(self, name, (opt and default or '')) + tabsize = get_int_opt(options, 'tabsize', 8) + if self.tabs: + self.tabs += ' ' * (tabsize - 1) + if self.newlines: + self.newlines += '\n' + self.wstt = get_bool_opt(options, 'wstokentype', True) + + def filter(self, lexer, stream): + if self.wstt: + spaces = self.spaces or ' ' + tabs = self.tabs or '\t' + newlines = self.newlines or '\n' + regex = re.compile(r'\s') + + def replacefunc(wschar): + if wschar == ' ': + return spaces + elif wschar == '\t': + return tabs + elif wschar == '\n': + return newlines + return wschar + + for ttype, value in stream: + yield from _replace_special(ttype, value, regex, Whitespace, + replacefunc) + else: + spaces, tabs, newlines = self.spaces, self.tabs, self.newlines + # simpler processing + for ttype, value in stream: + if spaces: + value = value.replace(' ', spaces) + if tabs: + value = value.replace('\t', tabs) + if newlines: + value = value.replace('\n', newlines) + yield ttype, value + + +class GobbleFilter(Filter): + """Gobbles source code lines (eats initial characters). + + This filter drops the first ``n`` characters off every line of code. This + may be useful when the source code fed to the lexer is indented by a fixed + amount of space that isn't desired in the output. + + Options accepted: + + `n` : int + The number of characters to gobble. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + self.n = get_int_opt(options, 'n', 0) + + def gobble(self, value, left): + if left < len(value): + return value[left:], 0 + else: + return '', left - len(value) + + def filter(self, lexer, stream): + n = self.n + left = n # How many characters left to gobble. + for ttype, value in stream: + # Remove ``left`` tokens from first line, ``n`` from all others. + parts = value.split('\n') + (parts[0], left) = self.gobble(parts[0], left) + for i in range(1, len(parts)): + (parts[i], left) = self.gobble(parts[i], n) + value = '\n'.join(parts) + + if value != '': + yield ttype, value + + +class TokenMergeFilter(Filter): + """Merges consecutive tokens with the same token type in the output + stream of a lexer. + + .. versionadded:: 1.2 + """ + def __init__(self, **options): + Filter.__init__(self, **options) + + def filter(self, lexer, stream): + current_type = None + current_value = None + for ttype, value in stream: + if ttype is current_type: + current_value += value + else: + if current_type is not None: + yield current_type, current_value + current_type = ttype + current_value = value + if current_type is not None: + yield current_type, current_value + + +FILTERS = { + 'codetagify': CodeTagFilter, + 'keywordcase': KeywordCaseFilter, + 'highlight': NameHighlightFilter, + 'raiseonerror': RaiseOnErrorTokenFilter, + 'whitespace': VisibleWhitespaceFilter, + 'gobble': GobbleFilter, + 'tokenmerge': TokenMergeFilter, + 'symbols': SymbolFilter, +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..78b75cf Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py new file mode 100644 index 0000000..0041e41 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatter.py @@ -0,0 +1,129 @@ +""" + pygments.formatter + ~~~~~~~~~~~~~~~~~~ + + Base formatter class. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import codecs + +from pip._vendor.pygments.util import get_bool_opt +from pip._vendor.pygments.styles import get_style_by_name + +__all__ = ['Formatter'] + + +def _lookup_style(style): + if isinstance(style, str): + return get_style_by_name(style) + return style + + +class Formatter: + """ + Converts a token stream to text. + + Formatters should have attributes to help selecting them. These + are similar to the corresponding :class:`~pygments.lexer.Lexer` + attributes. + + .. autoattribute:: name + :no-value: + + .. autoattribute:: aliases + :no-value: + + .. autoattribute:: filenames + :no-value: + + You can pass options as keyword arguments to the constructor. + All formatters accept these basic options: + + ``style`` + The style to use, can be a string or a Style subclass + (default: "default"). Not used by e.g. the + TerminalFormatter. + ``full`` + Tells the formatter to output a "full" document, i.e. + a complete self-contained document. This doesn't have + any effect for some formatters (default: false). + ``title`` + If ``full`` is true, the title that should be used to + caption the document (default: ''). + ``encoding`` + If given, must be an encoding name. This will be used to + convert the Unicode token strings to byte strings in the + output. If it is "" or None, Unicode strings will be written + to the output file, which most file-like objects do not + support (default: None). + ``outencoding`` + Overrides ``encoding`` if given. + + """ + + #: Full name for the formatter, in human-readable form. + name = None + + #: A list of short, unique identifiers that can be used to lookup + #: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. + aliases = [] + + #: A list of fnmatch patterns that match filenames for which this + #: formatter can produce output. The patterns in this list should be unique + #: among all formatters. + filenames = [] + + #: If True, this formatter outputs Unicode strings when no encoding + #: option is given. + unicodeoutput = True + + def __init__(self, **options): + """ + As with lexers, this constructor takes arbitrary optional arguments, + and if you override it, you should first process your own options, then + call the base class implementation. + """ + self.style = _lookup_style(options.get('style', 'default')) + self.full = get_bool_opt(options, 'full', False) + self.title = options.get('title', '') + self.encoding = options.get('encoding', None) or None + if self.encoding in ('guess', 'chardet'): + # can happen for e.g. pygmentize -O encoding=guess + self.encoding = 'utf-8' + self.encoding = options.get('outencoding') or self.encoding + self.options = options + + def get_style_defs(self, arg=''): + """ + This method must return statements or declarations suitable to define + the current style for subsequent highlighted text (e.g. CSS classes + in the `HTMLFormatter`). + + The optional argument `arg` can be used to modify the generation and + is formatter dependent (it is standardized because it can be given on + the command line). + + This method is called by the ``-S`` :doc:`command-line option `, + the `arg` is then given by the ``-a`` option. + """ + return '' + + def format(self, tokensource, outfile): + """ + This method must format the tokens from the `tokensource` iterable and + write the formatted version to the file object `outfile`. + + Formatter options can control how exactly the tokens are converted. + """ + if self.encoding: + # wrap the outfile in a StreamWriter + outfile = codecs.lookup(self.encoding)[3](outfile) + return self.format_unencoded(tokensource, outfile) + + # Allow writing Formatter[str] or Formatter[bytes]. That's equivalent to + # Formatter. This helps when using third-party type stubs from typeshed. + def __class_getitem__(cls, name): + return cls diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py new file mode 100644 index 0000000..014f2ee --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__init__.py @@ -0,0 +1,157 @@ +""" + pygments.formatters + ~~~~~~~~~~~~~~~~~~~ + + Pygments formatters. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +import sys +import types +import fnmatch +from os.path import basename + +from pip._vendor.pygments.formatters._mapping import FORMATTERS +from pip._vendor.pygments.plugin import find_plugin_formatters +from pip._vendor.pygments.util import ClassNotFound + +__all__ = ['get_formatter_by_name', 'get_formatter_for_filename', + 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) + +_formatter_cache = {} # classes by name +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + + +def _load_formatters(module_name): + """Load a formatter (and all others in the module too).""" + mod = __import__(module_name, None, None, ['__all__']) + for formatter_name in mod.__all__: + cls = getattr(mod, formatter_name) + _formatter_cache[cls.name] = cls + + +def get_all_formatters(): + """Return a generator for all formatter classes.""" + # NB: this returns formatter classes, not info like get_all_lexers(). + for info in FORMATTERS.values(): + if info[1] not in _formatter_cache: + _load_formatters(info[0]) + yield _formatter_cache[info[1]] + for _, formatter in find_plugin_formatters(): + yield formatter + + +def find_formatter_class(alias): + """Lookup a formatter by alias. + + Returns None if not found. + """ + for module_name, name, aliases, _, _ in FORMATTERS.values(): + if alias in aliases: + if name not in _formatter_cache: + _load_formatters(module_name) + return _formatter_cache[name] + for _, cls in find_plugin_formatters(): + if alias in cls.aliases: + return cls + + +def get_formatter_by_name(_alias, **options): + """ + Return an instance of a :class:`.Formatter` subclass that has `alias` in its + aliases list. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that + alias is found. + """ + cls = find_formatter_class(_alias) + if cls is None: + raise ClassNotFound(f"no formatter found for name {_alias!r}") + return cls(**options) + + +def load_formatter_from_file(filename, formattername="CustomFormatter", **options): + """ + Return a `Formatter` subclass instance loaded from the provided file, relative + to the current directory. + + The file is expected to contain a Formatter class named ``formattername`` + (by default, CustomFormatter). Users should be very careful with the input, because + this method is equivalent to running ``eval()`` on the input file. The formatter is + given the `options` at its instantiation. + + :exc:`pygments.util.ClassNotFound` is raised if there are any errors loading + the formatter. + + .. versionadded:: 2.2 + """ + try: + # This empty dict will contain the namespace for the exec'd file + custom_namespace = {} + with open(filename, 'rb') as f: + exec(f.read(), custom_namespace) + # Retrieve the class `formattername` from that namespace + if formattername not in custom_namespace: + raise ClassNotFound(f'no valid {formattername} class found in {filename}') + formatter_class = custom_namespace[formattername] + # And finally instantiate it with the options + return formatter_class(**options) + except OSError as err: + raise ClassNotFound(f'cannot read {filename}: {err}') + except ClassNotFound: + raise + except Exception as err: + raise ClassNotFound(f'error when loading custom formatter: {err}') + + +def get_formatter_for_filename(fn, **options): + """ + Return a :class:`.Formatter` subclass instance that has a filename pattern + matching `fn`. The formatter is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename + is found. + """ + fn = basename(fn) + for modname, name, _, filenames, _ in FORMATTERS.values(): + for filename in filenames: + if _fn_matches(fn, filename): + if name not in _formatter_cache: + _load_formatters(modname) + return _formatter_cache[name](**options) + for _name, cls in find_plugin_formatters(): + for filename in cls.filenames: + if _fn_matches(fn, filename): + return cls(**options) + raise ClassNotFound(f"no formatter found for file name {fn!r}") + + +class _automodule(types.ModuleType): + """Automatically import formatters.""" + + def __getattr__(self, name): + info = FORMATTERS.get(name) + if info: + _load_formatters(info[0]) + cls = _formatter_cache[info[1]] + setattr(self, name, cls) + return cls + raise AttributeError(name) + + +oldmod = sys.modules[__name__] +newmod = _automodule(__name__) +newmod.__dict__.update(oldmod.__dict__) +sys.modules[__name__] = newmod +del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..684269c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc new file mode 100644 index 0000000..ced83fe Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py new file mode 100644 index 0000000..72ca840 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/formatters/_mapping.py @@ -0,0 +1,23 @@ +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. + +FORMATTERS = { + 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), + 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'), + 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags. By default, the content is enclosed in a ``
    `` tag, itself wrapped in a ``
    `` tag (but see the `nowrap` option). The ``
    ``'s CSS class can be set by the `cssclass` option."), + 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'), + 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), + 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'), + 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'), + 'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'), + 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'), + 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'), + 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ```` element with explicit ``x`` and ``y`` coordinates containing ```` elements with the individual token styles.'), + 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), + 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.'), +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexer.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexer.py new file mode 100644 index 0000000..c05aa81 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexer.py @@ -0,0 +1,963 @@ +""" + pygments.lexer + ~~~~~~~~~~~~~~ + + Base lexer classes. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +import sys +import time + +from pip._vendor.pygments.filter import apply_filters, Filter +from pip._vendor.pygments.filters import get_filter_by_name +from pip._vendor.pygments.token import Error, Text, Other, Whitespace, _TokenType +from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \ + make_analysator, Future, guess_decode +from pip._vendor.pygments.regexopt import regex_opt + +__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer', + 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this', + 'default', 'words', 'line_re'] + +line_re = re.compile('.*?\n') + +_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'), + (b'\xff\xfe\0\0', 'utf-32'), + (b'\0\0\xfe\xff', 'utf-32be'), + (b'\xff\xfe', 'utf-16'), + (b'\xfe\xff', 'utf-16be')] + +_default_analyse = staticmethod(lambda x: 0.0) + + +class LexerMeta(type): + """ + This metaclass automagically converts ``analyse_text`` methods into + static methods which always return float values. + """ + + def __new__(mcs, name, bases, d): + if 'analyse_text' in d: + d['analyse_text'] = make_analysator(d['analyse_text']) + return type.__new__(mcs, name, bases, d) + + +class Lexer(metaclass=LexerMeta): + """ + Lexer for a specific language. + + See also :doc:`lexerdevelopment`, a high-level guide to writing + lexers. + + Lexer classes have attributes used for choosing the most appropriate + lexer based on various criteria. + + .. autoattribute:: name + :no-value: + .. autoattribute:: aliases + :no-value: + .. autoattribute:: filenames + :no-value: + .. autoattribute:: alias_filenames + .. autoattribute:: mimetypes + :no-value: + .. autoattribute:: priority + + Lexers included in Pygments should have two additional attributes: + + .. autoattribute:: url + :no-value: + .. autoattribute:: version_added + :no-value: + + Lexers included in Pygments may have additional attributes: + + .. autoattribute:: _example + :no-value: + + You can pass options to the constructor. The basic options recognized + by all lexers and processed by the base `Lexer` class are: + + ``stripnl`` + Strip leading and trailing newlines from the input (default: True). + ``stripall`` + Strip all leading and trailing whitespace from the input + (default: False). + ``ensurenl`` + Make sure that the input ends with a newline (default: True). This + is required for some lexers that consume input linewise. + + .. versionadded:: 1.3 + + ``tabsize`` + If given and greater than 0, expand tabs in the input (default: 0). + ``encoding`` + If given, must be an encoding name. This encoding will be used to + convert the input string to Unicode, if it is not already a Unicode + string (default: ``'guess'``, which uses a simple UTF-8 / Locale / + Latin1 detection. Can also be ``'chardet'`` to use the chardet + library, if it is installed. + ``inencoding`` + Overrides the ``encoding`` if given. + """ + + #: Full name of the lexer, in human-readable form + name = None + + #: A list of short, unique identifiers that can be used to look + #: up the lexer from a list, e.g., using `get_lexer_by_name()`. + aliases = [] + + #: A list of `fnmatch` patterns that match filenames which contain + #: content for this lexer. The patterns in this list should be unique among + #: all lexers. + filenames = [] + + #: A list of `fnmatch` patterns that match filenames which may or may not + #: contain content for this lexer. This list is used by the + #: :func:`.guess_lexer_for_filename()` function, to determine which lexers + #: are then included in guessing the correct one. That means that + #: e.g. every lexer for HTML and a template language should include + #: ``\*.html`` in this list. + alias_filenames = [] + + #: A list of MIME types for content that can be lexed with this lexer. + mimetypes = [] + + #: Priority, should multiple lexers match and no content is provided + priority = 0 + + #: URL of the language specification/definition. Used in the Pygments + #: documentation. Set to an empty string to disable. + url = None + + #: Version of Pygments in which the lexer was added. + version_added = None + + #: Example file name. Relative to the ``tests/examplefiles`` directory. + #: This is used by the documentation generator to show an example. + _example = None + + def __init__(self, **options): + """ + This constructor takes arbitrary options as keyword arguments. + Every subclass must first process its own options and then call + the `Lexer` constructor, since it processes the basic + options like `stripnl`. + + An example looks like this: + + .. sourcecode:: python + + def __init__(self, **options): + self.compress = options.get('compress', '') + Lexer.__init__(self, **options) + + As these options must all be specifiable as strings (due to the + command line usage), there are various utility functions + available to help with that, see `Utilities`_. + """ + self.options = options + self.stripnl = get_bool_opt(options, 'stripnl', True) + self.stripall = get_bool_opt(options, 'stripall', False) + self.ensurenl = get_bool_opt(options, 'ensurenl', True) + self.tabsize = get_int_opt(options, 'tabsize', 0) + self.encoding = options.get('encoding', 'guess') + self.encoding = options.get('inencoding') or self.encoding + self.filters = [] + for filter_ in get_list_opt(options, 'filters', ()): + self.add_filter(filter_) + + def __repr__(self): + if self.options: + return f'' + else: + return f'' + + def add_filter(self, filter_, **options): + """ + Add a new stream filter to this lexer. + """ + if not isinstance(filter_, Filter): + filter_ = get_filter_by_name(filter_, **options) + self.filters.append(filter_) + + def analyse_text(text): + """ + A static method which is called for lexer guessing. + + It should analyse the text and return a float in the range + from ``0.0`` to ``1.0``. If it returns ``0.0``, the lexer + will not be selected as the most probable one, if it returns + ``1.0``, it will be selected immediately. This is used by + `guess_lexer`. + + The `LexerMeta` metaclass automatically wraps this function so + that it works like a static method (no ``self`` or ``cls`` + parameter) and the return value is automatically converted to + `float`. If the return value is an object that is boolean `False` + it's the same as if the return values was ``0.0``. + """ + + def _preprocess_lexer_input(self, text): + """Apply preprocessing such as decoding the input, removing BOM and normalizing newlines.""" + + if not isinstance(text, str): + if self.encoding == 'guess': + text, _ = guess_decode(text) + elif self.encoding == 'chardet': + try: + # pip vendoring note: this code is not reachable by pip, + # removed import of chardet to make it clear. + raise ImportError('chardet is not vendored by pip') + except ImportError as e: + raise ImportError('To enable chardet encoding guessing, ' + 'please install the chardet library ' + 'from http://chardet.feedparser.org/') from e + # check for BOM first + decoded = None + for bom, encoding in _encoding_map: + if text.startswith(bom): + decoded = text[len(bom):].decode(encoding, 'replace') + break + # no BOM found, so use chardet + if decoded is None: + enc = chardet.detect(text[:1024]) # Guess using first 1KB + decoded = text.decode(enc.get('encoding') or 'utf-8', + 'replace') + text = decoded + else: + text = text.decode(self.encoding) + if text.startswith('\ufeff'): + text = text[len('\ufeff'):] + else: + if text.startswith('\ufeff'): + text = text[len('\ufeff'):] + + # text now *is* a unicode string + text = text.replace('\r\n', '\n') + text = text.replace('\r', '\n') + if self.stripall: + text = text.strip() + elif self.stripnl: + text = text.strip('\n') + if self.tabsize > 0: + text = text.expandtabs(self.tabsize) + if self.ensurenl and not text.endswith('\n'): + text += '\n' + + return text + + def get_tokens(self, text, unfiltered=False): + """ + This method is the basic interface of a lexer. It is called by + the `highlight()` function. It must process the text and return an + iterable of ``(tokentype, value)`` pairs from `text`. + + Normally, you don't need to override this method. The default + implementation processes the options recognized by all lexers + (`stripnl`, `stripall` and so on), and then yields all tokens + from `get_tokens_unprocessed()`, with the ``index`` dropped. + + If `unfiltered` is set to `True`, the filtering mechanism is + bypassed even if filters are defined. + """ + text = self._preprocess_lexer_input(text) + + def streamer(): + for _, t, v in self.get_tokens_unprocessed(text): + yield t, v + stream = streamer() + if not unfiltered: + stream = apply_filters(stream, self.filters, self) + return stream + + def get_tokens_unprocessed(self, text): + """ + This method should process the text and return an iterable of + ``(index, tokentype, value)`` tuples where ``index`` is the starting + position of the token within the input text. + + It must be overridden by subclasses. It is recommended to + implement it as a generator to maximize effectiveness. + """ + raise NotImplementedError + + +class DelegatingLexer(Lexer): + """ + This lexer takes two lexer as arguments. A root lexer and + a language lexer. First everything is scanned using the language + lexer, afterwards all ``Other`` tokens are lexed using the root + lexer. + + The lexers from the ``template`` lexer package use this base lexer. + """ + + def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options): + self.root_lexer = _root_lexer(**options) + self.language_lexer = _language_lexer(**options) + self.needle = _needle + Lexer.__init__(self, **options) + + def get_tokens_unprocessed(self, text): + buffered = '' + insertions = [] + lng_buffer = [] + for i, t, v in self.language_lexer.get_tokens_unprocessed(text): + if t is self.needle: + if lng_buffer: + insertions.append((len(buffered), lng_buffer)) + lng_buffer = [] + buffered += v + else: + lng_buffer.append((i, t, v)) + if lng_buffer: + insertions.append((len(buffered), lng_buffer)) + return do_insertions(insertions, + self.root_lexer.get_tokens_unprocessed(buffered)) + + +# ------------------------------------------------------------------------------ +# RegexLexer and ExtendedRegexLexer +# + + +class include(str): # pylint: disable=invalid-name + """ + Indicates that a state should include rules from another state. + """ + pass + + +class _inherit: + """ + Indicates the a state should inherit from its superclass. + """ + def __repr__(self): + return 'inherit' + +inherit = _inherit() # pylint: disable=invalid-name + + +class combined(tuple): # pylint: disable=invalid-name + """ + Indicates a state combined from multiple states. + """ + + def __new__(cls, *args): + return tuple.__new__(cls, args) + + def __init__(self, *args): + # tuple.__init__ doesn't do anything + pass + + +class _PseudoMatch: + """ + A pseudo match object constructed from a string. + """ + + def __init__(self, start, text): + self._text = text + self._start = start + + def start(self, arg=None): + return self._start + + def end(self, arg=None): + return self._start + len(self._text) + + def group(self, arg=None): + if arg: + raise IndexError('No such group') + return self._text + + def groups(self): + return (self._text,) + + def groupdict(self): + return {} + + +def bygroups(*args): + """ + Callback that yields multiple actions for each group in the match. + """ + def callback(lexer, match, ctx=None): + for i, action in enumerate(args): + if action is None: + continue + elif type(action) is _TokenType: + data = match.group(i + 1) + if data: + yield match.start(i + 1), action, data + else: + data = match.group(i + 1) + if data is not None: + if ctx: + ctx.pos = match.start(i + 1) + for item in action(lexer, + _PseudoMatch(match.start(i + 1), data), ctx): + if item: + yield item + if ctx: + ctx.pos = match.end() + return callback + + +class _This: + """ + Special singleton used for indicating the caller class. + Used by ``using``. + """ + +this = _This() + + +def using(_other, **kwargs): + """ + Callback that processes the match with a different lexer. + + The keyword arguments are forwarded to the lexer, except `state` which + is handled separately. + + `state` specifies the state that the new lexer will start in, and can + be an enumerable such as ('root', 'inline', 'string') or a simple + string which is assumed to be on top of the root state. + + Note: For that to work, `_other` must not be an `ExtendedRegexLexer`. + """ + gt_kwargs = {} + if 'state' in kwargs: + s = kwargs.pop('state') + if isinstance(s, (list, tuple)): + gt_kwargs['stack'] = s + else: + gt_kwargs['stack'] = ('root', s) + + if _other is this: + def callback(lexer, match, ctx=None): + # if keyword arguments are given the callback + # function has to create a new lexer instance + if kwargs: + # XXX: cache that somehow + kwargs.update(lexer.options) + lx = lexer.__class__(**kwargs) + else: + lx = lexer + s = match.start() + for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs): + yield i + s, t, v + if ctx: + ctx.pos = match.end() + else: + def callback(lexer, match, ctx=None): + # XXX: cache that somehow + kwargs.update(lexer.options) + lx = _other(**kwargs) + + s = match.start() + for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs): + yield i + s, t, v + if ctx: + ctx.pos = match.end() + return callback + + +class default: + """ + Indicates a state or state action (e.g. #pop) to apply. + For example default('#pop') is equivalent to ('', Token, '#pop') + Note that state tuples may be used as well. + + .. versionadded:: 2.0 + """ + def __init__(self, state): + self.state = state + + +class words(Future): + """ + Indicates a list of literal words that is transformed into an optimized + regex that matches any of the words. + + .. versionadded:: 2.0 + """ + def __init__(self, words, prefix='', suffix=''): + self.words = words + self.prefix = prefix + self.suffix = suffix + + def get(self): + return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix) + + +class RegexLexerMeta(LexerMeta): + """ + Metaclass for RegexLexer, creates the self._tokens attribute from + self.tokens on the first instantiation. + """ + + def _process_regex(cls, regex, rflags, state): + """Preprocess the regular expression component of a token definition.""" + if isinstance(regex, Future): + regex = regex.get() + return re.compile(regex, rflags).match + + def _process_token(cls, token): + """Preprocess the token component of a token definition.""" + assert type(token) is _TokenType or callable(token), \ + f'token type must be simple type or callable, not {token!r}' + return token + + def _process_new_state(cls, new_state, unprocessed, processed): + """Preprocess the state transition action of a token definition.""" + if isinstance(new_state, str): + # an existing state + if new_state == '#pop': + return -1 + elif new_state in unprocessed: + return (new_state,) + elif new_state == '#push': + return new_state + elif new_state[:5] == '#pop:': + return -int(new_state[5:]) + else: + assert False, f'unknown new state {new_state!r}' + elif isinstance(new_state, combined): + # combine a new state from existing ones + tmp_state = '_tmp_%d' % cls._tmpname + cls._tmpname += 1 + itokens = [] + for istate in new_state: + assert istate != new_state, f'circular state ref {istate!r}' + itokens.extend(cls._process_state(unprocessed, + processed, istate)) + processed[tmp_state] = itokens + return (tmp_state,) + elif isinstance(new_state, tuple): + # push more than one state + for istate in new_state: + assert (istate in unprocessed or + istate in ('#pop', '#push')), \ + 'unknown new state ' + istate + return new_state + else: + assert False, f'unknown new state def {new_state!r}' + + def _process_state(cls, unprocessed, processed, state): + """Preprocess a single state definition.""" + assert isinstance(state, str), f"wrong state name {state!r}" + assert state[0] != '#', f"invalid state name {state!r}" + if state in processed: + return processed[state] + tokens = processed[state] = [] + rflags = cls.flags + for tdef in unprocessed[state]: + if isinstance(tdef, include): + # it's a state reference + assert tdef != state, f"circular state reference {state!r}" + tokens.extend(cls._process_state(unprocessed, processed, + str(tdef))) + continue + if isinstance(tdef, _inherit): + # should be processed already, but may not in the case of: + # 1. the state has no counterpart in any parent + # 2. the state includes more than one 'inherit' + continue + if isinstance(tdef, default): + new_state = cls._process_new_state(tdef.state, unprocessed, processed) + tokens.append((re.compile('').match, None, new_state)) + continue + + assert type(tdef) is tuple, f"wrong rule def {tdef!r}" + + try: + rex = cls._process_regex(tdef[0], rflags, state) + except Exception as err: + raise ValueError(f"uncompilable regex {tdef[0]!r} in state {state!r} of {cls!r}: {err}") from err + + token = cls._process_token(tdef[1]) + + if len(tdef) == 2: + new_state = None + else: + new_state = cls._process_new_state(tdef[2], + unprocessed, processed) + + tokens.append((rex, token, new_state)) + return tokens + + def process_tokendef(cls, name, tokendefs=None): + """Preprocess a dictionary of token definitions.""" + processed = cls._all_tokens[name] = {} + tokendefs = tokendefs or cls.tokens[name] + for state in list(tokendefs): + cls._process_state(tokendefs, processed, state) + return processed + + def get_tokendefs(cls): + """ + Merge tokens from superclasses in MRO order, returning a single tokendef + dictionary. + + Any state that is not defined by a subclass will be inherited + automatically. States that *are* defined by subclasses will, by + default, override that state in the superclass. If a subclass wishes to + inherit definitions from a superclass, it can use the special value + "inherit", which will cause the superclass' state definition to be + included at that point in the state. + """ + tokens = {} + inheritable = {} + for c in cls.__mro__: + toks = c.__dict__.get('tokens', {}) + + for state, items in toks.items(): + curitems = tokens.get(state) + if curitems is None: + # N.b. because this is assigned by reference, sufficiently + # deep hierarchies are processed incrementally (e.g. for + # A(B), B(C), C(RegexLexer), B will be premodified so X(B) + # will not see any inherits in B). + tokens[state] = items + try: + inherit_ndx = items.index(inherit) + except ValueError: + continue + inheritable[state] = inherit_ndx + continue + + inherit_ndx = inheritable.pop(state, None) + if inherit_ndx is None: + continue + + # Replace the "inherit" value with the items + curitems[inherit_ndx:inherit_ndx+1] = items + try: + # N.b. this is the index in items (that is, the superclass + # copy), so offset required when storing below. + new_inh_ndx = items.index(inherit) + except ValueError: + pass + else: + inheritable[state] = inherit_ndx + new_inh_ndx + + return tokens + + def __call__(cls, *args, **kwds): + """Instantiate cls after preprocessing its token definitions.""" + if '_tokens' not in cls.__dict__: + cls._all_tokens = {} + cls._tmpname = 0 + if hasattr(cls, 'token_variants') and cls.token_variants: + # don't process yet + pass + else: + cls._tokens = cls.process_tokendef('', cls.get_tokendefs()) + + return type.__call__(cls, *args, **kwds) + + +class RegexLexer(Lexer, metaclass=RegexLexerMeta): + """ + Base for simple stateful regular expression-based lexers. + Simplifies the lexing process so that you need only + provide a list of states and regular expressions. + """ + + #: Flags for compiling the regular expressions. + #: Defaults to MULTILINE. + flags = re.MULTILINE + + #: At all time there is a stack of states. Initially, the stack contains + #: a single state 'root'. The top of the stack is called "the current state". + #: + #: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}`` + #: + #: ``new_state`` can be omitted to signify no state transition. + #: If ``new_state`` is a string, it is pushed on the stack. This ensure + #: the new current state is ``new_state``. + #: If ``new_state`` is a tuple of strings, all of those strings are pushed + #: on the stack and the current state will be the last element of the list. + #: ``new_state`` can also be ``combined('state1', 'state2', ...)`` + #: to signify a new, anonymous state combined from the rules of two + #: or more existing ones. + #: Furthermore, it can be '#pop' to signify going back one step in + #: the state stack, or '#push' to push the current state on the stack + #: again. Note that if you push while in a combined state, the combined + #: state itself is pushed, and not only the state in which the rule is + #: defined. + #: + #: The tuple can also be replaced with ``include('state')``, in which + #: case the rules from the state named by the string are included in the + #: current one. + tokens = {} + + def get_tokens_unprocessed(self, text, stack=('root',)): + """ + Split ``text`` into (tokentype, text) pairs. + + ``stack`` is the initial stack (default: ``['root']``) + """ + pos = 0 + tokendefs = self._tokens + statestack = list(stack) + statetokens = tokendefs[statestack[-1]] + while 1: + for rexmatch, action, new_state in statetokens: + m = rexmatch(text, pos) + if m: + if action is not None: + if type(action) is _TokenType: + yield pos, action, m.group() + else: + yield from action(self, m) + pos = m.end() + if new_state is not None: + # state transition + if isinstance(new_state, tuple): + for state in new_state: + if state == '#pop': + if len(statestack) > 1: + statestack.pop() + elif state == '#push': + statestack.append(statestack[-1]) + else: + statestack.append(state) + elif isinstance(new_state, int): + # pop, but keep at least one state on the stack + # (random code leading to unexpected pops should + # not allow exceptions) + if abs(new_state) >= len(statestack): + del statestack[1:] + else: + del statestack[new_state:] + elif new_state == '#push': + statestack.append(statestack[-1]) + else: + assert False, f"wrong state def: {new_state!r}" + statetokens = tokendefs[statestack[-1]] + break + else: + # We are here only if all state tokens have been considered + # and there was not a match on any of them. + try: + if text[pos] == '\n': + # at EOL, reset state to "root" + statestack = ['root'] + statetokens = tokendefs['root'] + yield pos, Whitespace, '\n' + pos += 1 + continue + yield pos, Error, text[pos] + pos += 1 + except IndexError: + break + + +class LexerContext: + """ + A helper object that holds lexer position data. + """ + + def __init__(self, text, pos, stack=None, end=None): + self.text = text + self.pos = pos + self.end = end or len(text) # end=0 not supported ;-) + self.stack = stack or ['root'] + + def __repr__(self): + return f'LexerContext({self.text!r}, {self.pos!r}, {self.stack!r})' + + +class ExtendedRegexLexer(RegexLexer): + """ + A RegexLexer that uses a context object to store its state. + """ + + def get_tokens_unprocessed(self, text=None, context=None): + """ + Split ``text`` into (tokentype, text) pairs. + If ``context`` is given, use this lexer context instead. + """ + tokendefs = self._tokens + if not context: + ctx = LexerContext(text, 0) + statetokens = tokendefs['root'] + else: + ctx = context + statetokens = tokendefs[ctx.stack[-1]] + text = ctx.text + while 1: + for rexmatch, action, new_state in statetokens: + m = rexmatch(text, ctx.pos, ctx.end) + if m: + if action is not None: + if type(action) is _TokenType: + yield ctx.pos, action, m.group() + ctx.pos = m.end() + else: + yield from action(self, m, ctx) + if not new_state: + # altered the state stack? + statetokens = tokendefs[ctx.stack[-1]] + # CAUTION: callback must set ctx.pos! + if new_state is not None: + # state transition + if isinstance(new_state, tuple): + for state in new_state: + if state == '#pop': + if len(ctx.stack) > 1: + ctx.stack.pop() + elif state == '#push': + ctx.stack.append(ctx.stack[-1]) + else: + ctx.stack.append(state) + elif isinstance(new_state, int): + # see RegexLexer for why this check is made + if abs(new_state) >= len(ctx.stack): + del ctx.stack[1:] + else: + del ctx.stack[new_state:] + elif new_state == '#push': + ctx.stack.append(ctx.stack[-1]) + else: + assert False, f"wrong state def: {new_state!r}" + statetokens = tokendefs[ctx.stack[-1]] + break + else: + try: + if ctx.pos >= ctx.end: + break + if text[ctx.pos] == '\n': + # at EOL, reset state to "root" + ctx.stack = ['root'] + statetokens = tokendefs['root'] + yield ctx.pos, Text, '\n' + ctx.pos += 1 + continue + yield ctx.pos, Error, text[ctx.pos] + ctx.pos += 1 + except IndexError: + break + + +def do_insertions(insertions, tokens): + """ + Helper for lexers which must combine the results of several + sublexers. + + ``insertions`` is a list of ``(index, itokens)`` pairs. + Each ``itokens`` iterable should be inserted at position + ``index`` into the token stream given by the ``tokens`` + argument. + + The result is a combined token stream. + + TODO: clean up the code here. + """ + insertions = iter(insertions) + try: + index, itokens = next(insertions) + except StopIteration: + # no insertions + yield from tokens + return + + realpos = None + insleft = True + + # iterate over the token stream where we want to insert + # the tokens from the insertion list. + for i, t, v in tokens: + # first iteration. store the position of first item + if realpos is None: + realpos = i + oldi = 0 + while insleft and i + len(v) >= index: + tmpval = v[oldi:index - i] + if tmpval: + yield realpos, t, tmpval + realpos += len(tmpval) + for it_index, it_token, it_value in itokens: + yield realpos, it_token, it_value + realpos += len(it_value) + oldi = index - i + try: + index, itokens = next(insertions) + except StopIteration: + insleft = False + break # not strictly necessary + if oldi < len(v): + yield realpos, t, v[oldi:] + realpos += len(v) - oldi + + # leftover tokens + while insleft: + # no normal tokens, set realpos to zero + realpos = realpos or 0 + for p, t, v in itokens: + yield realpos, t, v + realpos += len(v) + try: + index, itokens = next(insertions) + except StopIteration: + insleft = False + break # not strictly necessary + + +class ProfilingRegexLexerMeta(RegexLexerMeta): + """Metaclass for ProfilingRegexLexer, collects regex timing info.""" + + def _process_regex(cls, regex, rflags, state): + if isinstance(regex, words): + rex = regex_opt(regex.words, prefix=regex.prefix, + suffix=regex.suffix) + else: + rex = regex + compiled = re.compile(rex, rflags) + + def match_func(text, pos, endpos=sys.maxsize): + info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0]) + t0 = time.time() + res = compiled.match(text, pos, endpos) + t1 = time.time() + info[0] += 1 + info[1] += t1 - t0 + return res + return match_func + + +class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta): + """Drop-in replacement for RegexLexer that does profiling of its regexes.""" + + _prof_data = [] + _prof_sort_index = 4 # defaults to time per call + + def get_tokens_unprocessed(self, text, stack=('root',)): + # this needs to be a stack, since using(this) will produce nested calls + self.__class__._prof_data.append({}) + yield from RegexLexer.get_tokens_unprocessed(self, text, stack) + rawdata = self.__class__._prof_data.pop() + data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65], + n, 1000 * t, 1000 * t / n) + for ((s, r), (n, t)) in rawdata.items()), + key=lambda x: x[self._prof_sort_index], + reverse=True) + sum_total = sum(x[3] for x in data) + + print() + print('Profiling result for %s lexing %d chars in %.3f ms' % + (self.__class__.__name__, len(text), sum_total)) + print('=' * 110) + print('%-20s %-64s ncalls tottime percall' % ('state', 'regex')) + print('-' * 110) + for d in data: + print('%-20s %-65s %5d %8.4f %8.4f' % d) + print('=' * 110) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__init__.py new file mode 100644 index 0000000..49184ec --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__init__.py @@ -0,0 +1,362 @@ +""" + pygments.lexers + ~~~~~~~~~~~~~~~ + + Pygments lexers. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +import sys +import types +import fnmatch +from os.path import basename + +from pip._vendor.pygments.lexers._mapping import LEXERS +from pip._vendor.pygments.modeline import get_filetype_from_buffer +from pip._vendor.pygments.plugin import find_plugin_lexers +from pip._vendor.pygments.util import ClassNotFound, guess_decode + +COMPAT = { + 'Python3Lexer': 'PythonLexer', + 'Python3TracebackLexer': 'PythonTracebackLexer', + 'LeanLexer': 'Lean3Lexer', +} + +__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class', + 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT) + +_lexer_cache = {} +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + + +def _load_lexers(module_name): + """Load a lexer (and all others in the module too).""" + mod = __import__(module_name, None, None, ['__all__']) + for lexer_name in mod.__all__: + cls = getattr(mod, lexer_name) + _lexer_cache[cls.name] = cls + + +def get_all_lexers(plugins=True): + """Return a generator of tuples in the form ``(name, aliases, + filenames, mimetypes)`` of all know lexers. + + If *plugins* is true (the default), plugin lexers supplied by entrypoints + are also returned. Otherwise, only builtin ones are considered. + """ + for item in LEXERS.values(): + yield item[1:] + if plugins: + for lexer in find_plugin_lexers(): + yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes + + +def find_lexer_class(name): + """ + Return the `Lexer` subclass that with the *name* attribute as given by + the *name* argument. + """ + if name in _lexer_cache: + return _lexer_cache[name] + # lookup builtin lexers + for module_name, lname, aliases, _, _ in LEXERS.values(): + if name == lname: + _load_lexers(module_name) + return _lexer_cache[name] + # continue with lexers from setuptools entrypoints + for cls in find_plugin_lexers(): + if cls.name == name: + return cls + + +def find_lexer_class_by_name(_alias): + """ + Return the `Lexer` subclass that has `alias` in its aliases list, without + instantiating it. + + Like `get_lexer_by_name`, but does not instantiate the class. + + Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is + found. + + .. versionadded:: 2.2 + """ + if not _alias: + raise ClassNotFound(f'no lexer for alias {_alias!r} found') + # lookup builtin lexers + for module_name, name, aliases, _, _ in LEXERS.values(): + if _alias.lower() in aliases: + if name not in _lexer_cache: + _load_lexers(module_name) + return _lexer_cache[name] + # continue with lexers from setuptools entrypoints + for cls in find_plugin_lexers(): + if _alias.lower() in cls.aliases: + return cls + raise ClassNotFound(f'no lexer for alias {_alias!r} found') + + +def get_lexer_by_name(_alias, **options): + """ + Return an instance of a `Lexer` subclass that has `alias` in its + aliases list. The lexer is given the `options` at its + instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is + found. + """ + if not _alias: + raise ClassNotFound(f'no lexer for alias {_alias!r} found') + + # lookup builtin lexers + for module_name, name, aliases, _, _ in LEXERS.values(): + if _alias.lower() in aliases: + if name not in _lexer_cache: + _load_lexers(module_name) + return _lexer_cache[name](**options) + # continue with lexers from setuptools entrypoints + for cls in find_plugin_lexers(): + if _alias.lower() in cls.aliases: + return cls(**options) + raise ClassNotFound(f'no lexer for alias {_alias!r} found') + + +def load_lexer_from_file(filename, lexername="CustomLexer", **options): + """Load a lexer from a file. + + This method expects a file located relative to the current working + directory, which contains a Lexer class. By default, it expects the + Lexer to be name CustomLexer; you can specify your own class name + as the second argument to this function. + + Users should be very careful with the input, because this method + is equivalent to running eval on the input file. + + Raises ClassNotFound if there are any problems importing the Lexer. + + .. versionadded:: 2.2 + """ + try: + # This empty dict will contain the namespace for the exec'd file + custom_namespace = {} + with open(filename, 'rb') as f: + exec(f.read(), custom_namespace) + # Retrieve the class `lexername` from that namespace + if lexername not in custom_namespace: + raise ClassNotFound(f'no valid {lexername} class found in {filename}') + lexer_class = custom_namespace[lexername] + # And finally instantiate it with the options + return lexer_class(**options) + except OSError as err: + raise ClassNotFound(f'cannot read {filename}: {err}') + except ClassNotFound: + raise + except Exception as err: + raise ClassNotFound(f'error when loading custom lexer: {err}') + + +def find_lexer_class_for_filename(_fn, code=None): + """Get a lexer for a filename. + + If multiple lexers match the filename pattern, use ``analyse_text()`` to + figure out which one is more appropriate. + + Returns None if not found. + """ + matches = [] + fn = basename(_fn) + for modname, name, _, filenames, _ in LEXERS.values(): + for filename in filenames: + if _fn_matches(fn, filename): + if name not in _lexer_cache: + _load_lexers(modname) + matches.append((_lexer_cache[name], filename)) + for cls in find_plugin_lexers(): + for filename in cls.filenames: + if _fn_matches(fn, filename): + matches.append((cls, filename)) + + if isinstance(code, bytes): + # decode it, since all analyse_text functions expect unicode + code = guess_decode(code) + + def get_rating(info): + cls, filename = info + # explicit patterns get a bonus + bonus = '*' not in filename and 0.5 or 0 + # The class _always_ defines analyse_text because it's included in + # the Lexer class. The default implementation returns None which + # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py + # to find lexers which need it overridden. + if code: + return cls.analyse_text(code) + bonus, cls.__name__ + return cls.priority + bonus, cls.__name__ + + if matches: + matches.sort(key=get_rating) + # print "Possible lexers, after sort:", matches + return matches[-1][0] + + +def get_lexer_for_filename(_fn, code=None, **options): + """Get a lexer for a filename. + + Return a `Lexer` subclass instance that has a filename pattern + matching `fn`. The lexer is given the `options` at its + instantiation. + + Raise :exc:`pygments.util.ClassNotFound` if no lexer for that filename + is found. + + If multiple lexers match the filename pattern, use their ``analyse_text()`` + methods to figure out which one is more appropriate. + """ + res = find_lexer_class_for_filename(_fn, code) + if not res: + raise ClassNotFound(f'no lexer for filename {_fn!r} found') + return res(**options) + + +def get_lexer_for_mimetype(_mime, **options): + """ + Return a `Lexer` subclass instance that has `mime` in its mimetype + list. The lexer is given the `options` at its instantiation. + + Will raise :exc:`pygments.util.ClassNotFound` if not lexer for that mimetype + is found. + """ + for modname, name, _, _, mimetypes in LEXERS.values(): + if _mime in mimetypes: + if name not in _lexer_cache: + _load_lexers(modname) + return _lexer_cache[name](**options) + for cls in find_plugin_lexers(): + if _mime in cls.mimetypes: + return cls(**options) + raise ClassNotFound(f'no lexer for mimetype {_mime!r} found') + + +def _iter_lexerclasses(plugins=True): + """Return an iterator over all lexer classes.""" + for key in sorted(LEXERS): + module_name, name = LEXERS[key][:2] + if name not in _lexer_cache: + _load_lexers(module_name) + yield _lexer_cache[name] + if plugins: + yield from find_plugin_lexers() + + +def guess_lexer_for_filename(_fn, _text, **options): + """ + As :func:`guess_lexer()`, but only lexers which have a pattern in `filenames` + or `alias_filenames` that matches `filename` are taken into consideration. + + :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can + handle the content. + """ + fn = basename(_fn) + primary = {} + matching_lexers = set() + for lexer in _iter_lexerclasses(): + for filename in lexer.filenames: + if _fn_matches(fn, filename): + matching_lexers.add(lexer) + primary[lexer] = True + for filename in lexer.alias_filenames: + if _fn_matches(fn, filename): + matching_lexers.add(lexer) + primary[lexer] = False + if not matching_lexers: + raise ClassNotFound(f'no lexer for filename {fn!r} found') + if len(matching_lexers) == 1: + return matching_lexers.pop()(**options) + result = [] + for lexer in matching_lexers: + rv = lexer.analyse_text(_text) + if rv == 1.0: + return lexer(**options) + result.append((rv, lexer)) + + def type_sort(t): + # sort by: + # - analyse score + # - is primary filename pattern? + # - priority + # - last resort: class name + return (t[0], primary[t[1]], t[1].priority, t[1].__name__) + result.sort(key=type_sort) + + return result[-1][1](**options) + + +def guess_lexer(_text, **options): + """ + Return a `Lexer` subclass instance that's guessed from the text in + `text`. For that, the :meth:`.analyse_text()` method of every known lexer + class is called with the text as argument, and the lexer which returned the + highest value will be instantiated and returned. + + :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can + handle the content. + """ + + if not isinstance(_text, str): + inencoding = options.get('inencoding', options.get('encoding')) + if inencoding: + _text = _text.decode(inencoding or 'utf8') + else: + _text, _ = guess_decode(_text) + + # try to get a vim modeline first + ft = get_filetype_from_buffer(_text) + + if ft is not None: + try: + return get_lexer_by_name(ft, **options) + except ClassNotFound: + pass + + best_lexer = [0.0, None] + for lexer in _iter_lexerclasses(): + rv = lexer.analyse_text(_text) + if rv == 1.0: + return lexer(**options) + if rv > best_lexer[0]: + best_lexer[:] = (rv, lexer) + if not best_lexer[0] or best_lexer[1] is None: + raise ClassNotFound('no lexer matching the text found') + return best_lexer[1](**options) + + +class _automodule(types.ModuleType): + """Automatically import lexers.""" + + def __getattr__(self, name): + info = LEXERS.get(name) + if info: + _load_lexers(info[0]) + cls = _lexer_cache[info[1]] + setattr(self, name, cls) + return cls + if name in COMPAT: + return getattr(self, COMPAT[name]) + raise AttributeError(name) + + +oldmod = sys.modules[__name__] +newmod = _automodule(__name__) +newmod.__dict__.update(oldmod.__dict__) +sys.modules[__name__] = newmod +del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..209f205 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc new file mode 100644 index 0000000..10ca3e3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc new file mode 100644 index 0000000..7e2a549 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/_mapping.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/_mapping.py new file mode 100644 index 0000000..c0d6a8a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/_mapping.py @@ -0,0 +1,602 @@ +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. + +LEXERS = { + 'ABAPLexer': ('pip._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), + 'AMDGPULexer': ('pip._vendor.pygments.lexers.amdgpu', 'AMDGPU', ('amdgpu',), ('*.isa',), ()), + 'APLLexer': ('pip._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl', '*.aplf', '*.aplo', '*.apln', '*.aplc', '*.apli', '*.dyalog'), ()), + 'AbnfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)), + 'ActionScript3Lexer': ('pip._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('actionscript3', 'as3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), + 'ActionScriptLexer': ('pip._vendor.pygments.lexers.actionscript', 'ActionScript', ('actionscript', 'as'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), + 'AdaLexer': ('pip._vendor.pygments.lexers.ada', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), + 'AdlLexer': ('pip._vendor.pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()), + 'AgdaLexer': ('pip._vendor.pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), + 'AheuiLexer': ('pip._vendor.pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()), + 'AlloyLexer': ('pip._vendor.pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), + 'AmbientTalkLexer': ('pip._vendor.pygments.lexers.ambient', 'AmbientTalk', ('ambienttalk', 'ambienttalk/2', 'at'), ('*.at',), ('text/x-ambienttalk',)), + 'AmplLexer': ('pip._vendor.pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()), + 'Angular2HtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()), + 'Angular2Lexer': ('pip._vendor.pygments.lexers.templates', 'Angular2', ('ng2',), (), ()), + 'AntlrActionScriptLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-actionscript', 'antlr-as'), ('*.G', '*.g'), ()), + 'AntlrCSharpLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), + 'AntlrCppLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()), + 'AntlrJavaLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()), + 'AntlrLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()), + 'AntlrObjectiveCLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()), + 'AntlrPerlLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()), + 'AntlrPythonLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), + 'AntlrRubyLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()), + 'ApacheConfLexer': ('pip._vendor.pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), + 'AppleScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), + 'ArduinoLexer': ('pip._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), + 'ArrowLexer': ('pip._vendor.pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()), + 'ArturoLexer': ('pip._vendor.pygments.lexers.arturo', 'Arturo', ('arturo', 'art'), ('*.art',), ()), + 'AscLexer': ('pip._vendor.pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature', 'application/pem-certificate-chain')), + 'Asn1Lexer': ('pip._vendor.pygments.lexers.asn1', 'ASN.1', ('asn1',), ('*.asn1',), ()), + 'AspectJLexer': ('pip._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), + 'AsymptoteLexer': ('pip._vendor.pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)), + 'AugeasLexer': ('pip._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), + 'AutoItLexer': ('pip._vendor.pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), + 'AutohotkeyLexer': ('pip._vendor.pygments.lexers.automation', 'autohotkey', ('autohotkey', 'ahk'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), + 'AwkLexer': ('pip._vendor.pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), + 'BBCBasicLexer': ('pip._vendor.pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()), + 'BBCodeLexer': ('pip._vendor.pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), + 'BCLexer': ('pip._vendor.pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), + 'BQNLexer': ('pip._vendor.pygments.lexers.bqn', 'BQN', ('bqn',), ('*.bqn',), ()), + 'BSTLexer': ('pip._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), + 'BareLexer': ('pip._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()), + 'BaseMakefileLexer': ('pip._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), + 'BashLexer': ('pip._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell', 'openrc'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')), + 'BashSessionLexer': ('pip._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), + 'BatchLexer': ('pip._vendor.pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), + 'BddLexer': ('pip._vendor.pygments.lexers.bdd', 'Bdd', ('bdd',), ('*.feature',), ('text/x-bdd',)), + 'BefungeLexer': ('pip._vendor.pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), + 'BerryLexer': ('pip._vendor.pygments.lexers.berry', 'Berry', ('berry', 'be'), ('*.be',), ('text/x-berry', 'application/x-berry')), + 'BibTeXLexer': ('pip._vendor.pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)), + 'BlitzBasicLexer': ('pip._vendor.pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), + 'BlitzMaxLexer': ('pip._vendor.pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), + 'BlueprintLexer': ('pip._vendor.pygments.lexers.blueprint', 'Blueprint', ('blueprint',), ('*.blp',), ('text/x-blueprint',)), + 'BnfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)), + 'BoaLexer': ('pip._vendor.pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()), + 'BooLexer': ('pip._vendor.pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), + 'BoogieLexer': ('pip._vendor.pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()), + 'BrainfuckLexer': ('pip._vendor.pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), + 'BugsLexer': ('pip._vendor.pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), + 'CAmkESLexer': ('pip._vendor.pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()), + 'CLexer': ('pip._vendor.pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc', '*.x[bp]m'), ('text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap')), + 'CMakeLexer': ('pip._vendor.pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), + 'CObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), + 'CPSALexer': ('pip._vendor.pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()), + 'CSSUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'CSS+UL4', ('css+ul4',), ('*.cssul4',), ()), + 'CSharpAspxLexer': ('pip._vendor.pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), + 'CSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'C#', ('csharp', 'c#', 'cs'), ('*.cs',), ('text/x-csharp',)), + 'Ca65Lexer': ('pip._vendor.pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()), + 'CadlLexer': ('pip._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()), + 'CapDLLexer': ('pip._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()), + 'CapnProtoLexer': ('pip._vendor.pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()), + 'CarbonLexer': ('pip._vendor.pygments.lexers.carbon', 'Carbon', ('carbon',), ('*.carbon',), ('text/x-carbon',)), + 'CbmBasicV2Lexer': ('pip._vendor.pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), + 'CddlLexer': ('pip._vendor.pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)), + 'CeylonLexer': ('pip._vendor.pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), + 'Cfengine3Lexer': ('pip._vendor.pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), + 'ChaiscriptLexer': ('pip._vendor.pygments.lexers.scripting', 'ChaiScript', ('chaiscript', 'chai'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), + 'ChapelLexer': ('pip._vendor.pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), + 'CharmciLexer': ('pip._vendor.pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()), + 'CheetahHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), + 'CheetahJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Cheetah', ('javascript+cheetah', 'js+cheetah', 'javascript+spitfire', 'js+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), + 'CheetahLexer': ('pip._vendor.pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')), + 'CheetahXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')), + 'CirruLexer': ('pip._vendor.pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)), + 'ClayLexer': ('pip._vendor.pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), + 'CleanLexer': ('pip._vendor.pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()), + 'ClojureLexer': ('pip._vendor.pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj', '*.cljc'), ('text/x-clojure', 'application/x-clojure')), + 'ClojureScriptLexer': ('pip._vendor.pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), + 'CobolFreeformatLexer': ('pip._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), + 'CobolLexer': ('pip._vendor.pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), + 'CodeQLLexer': ('pip._vendor.pygments.lexers.codeql', 'CodeQL', ('codeql', 'ql'), ('*.ql', '*.qll'), ()), + 'CoffeeScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)), + 'ColdfusionCFCLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), + 'ColdfusionHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), + 'ColdfusionLexer': ('pip._vendor.pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), + 'Comal80Lexer': ('pip._vendor.pygments.lexers.comal', 'COMAL-80', ('comal', 'comal80'), ('*.cml', '*.comal'), ()), + 'CommonLispLexer': ('pip._vendor.pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)), + 'ComponentPascalLexer': ('pip._vendor.pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)), + 'CoqLexer': ('pip._vendor.pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), + 'CplintLexer': ('pip._vendor.pygments.lexers.cplint', 'cplint', ('cplint',), ('*.ecl', '*.prolog', '*.pro', '*.pl', '*.P', '*.lpad', '*.cpl'), ('text/x-cplint',)), + 'CppLexer': ('pip._vendor.pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP', '*.tpp'), ('text/x-c++hdr', 'text/x-c++src')), + 'CppObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), + 'CrmshLexer': ('pip._vendor.pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()), + 'CrocLexer': ('pip._vendor.pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), + 'CryptolLexer': ('pip._vendor.pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), + 'CrystalLexer': ('pip._vendor.pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)), + 'CsoundDocumentLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()), + 'CsoundOrchestraLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()), + 'CsoundScoreLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()), + 'CssDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), ('*.css.j2', '*.css.jinja2'), ('text/css+django', 'text/css+jinja')), + 'CssErbLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Ruby', ('css+ruby', 'css+erb'), (), ('text/css+ruby',)), + 'CssGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), + 'CssLexer': ('pip._vendor.pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)), + 'CssPhpLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)), + 'CssSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)), + 'CudaLexer': ('pip._vendor.pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), + 'CypherLexer': ('pip._vendor.pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()), + 'CythonLexer': ('pip._vendor.pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')), + 'DLexer': ('pip._vendor.pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), + 'DObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), + 'DarcsPatchLexer': ('pip._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), + 'DartLexer': ('pip._vendor.pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), + 'Dasm16Lexer': ('pip._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), + 'DaxLexer': ('pip._vendor.pygments.lexers.dax', 'Dax', ('dax',), ('*.dax',), ()), + 'DebianControlLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()), + 'DebianSourcesLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Sources file', ('debian.sources',), ('*.sources',), ()), + 'DelphiLexer': ('pip._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), + 'DesktopLexer': ('pip._vendor.pygments.lexers.configs', 'Desktop file', ('desktop',), ('*.desktop',), ('application/x-desktop',)), + 'DevicetreeLexer': ('pip._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)), + 'DgLexer': ('pip._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), + 'DiffLexer': ('pip._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), + 'DjangoLexer': ('pip._vendor.pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), + 'DnsZoneLexer': ('pip._vendor.pygments.lexers.dns', 'Zone', ('zone',), ('*.zone',), ('text/dns',)), + 'DockerLexer': ('pip._vendor.pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), + 'DtdLexer': ('pip._vendor.pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), + 'DuelLexer': ('pip._vendor.pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), + 'DylanConsoleLexer': ('pip._vendor.pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), + 'DylanLexer': ('pip._vendor.pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), + 'DylanLidLexer': ('pip._vendor.pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), + 'ECLLexer': ('pip._vendor.pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), + 'ECLexer': ('pip._vendor.pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), + 'EarlGreyLexer': ('pip._vendor.pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)), + 'EasytrieveLexer': ('pip._vendor.pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)), + 'EbnfLexer': ('pip._vendor.pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), + 'EiffelLexer': ('pip._vendor.pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), + 'ElixirConsoleLexer': ('pip._vendor.pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), + 'ElixirLexer': ('pip._vendor.pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs', '*.leex'), ('text/x-elixir',)), + 'ElmLexer': ('pip._vendor.pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)), + 'ElpiLexer': ('pip._vendor.pygments.lexers.elpi', 'Elpi', ('elpi',), ('*.elpi',), ('text/x-elpi',)), + 'EmacsLispLexer': ('pip._vendor.pygments.lexers.lisp', 'EmacsLisp', ('emacs-lisp', 'elisp', 'emacs'), ('*.el',), ('text/x-elisp', 'application/x-elisp')), + 'EmailLexer': ('pip._vendor.pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)), + 'ErbLexer': ('pip._vendor.pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), + 'ErlangLexer': ('pip._vendor.pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), + 'ErlangShellLexer': ('pip._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), + 'EvoqueHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), (), ('text/html+evoque',)), + 'EvoqueLexer': ('pip._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), + 'EvoqueXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), (), ('application/xml+evoque',)), + 'ExeclineLexer': ('pip._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()), + 'EzhilLexer': ('pip._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), + 'FSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi', '*.fsx'), ('text/x-fsharp',)), + 'FStarLexer': ('pip._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)), + 'FactorLexer': ('pip._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), + 'FancyLexer': ('pip._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), + 'FantomLexer': ('pip._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), + 'FelixLexer': ('pip._vendor.pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), + 'FennelLexer': ('pip._vendor.pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()), + 'FiftLexer': ('pip._vendor.pygments.lexers.fift', 'Fift', ('fift', 'fif'), ('*.fif',), ()), + 'FishShellLexer': ('pip._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), + 'FlatlineLexer': ('pip._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)), + 'FloScriptLexer': ('pip._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()), + 'ForthLexer': ('pip._vendor.pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)), + 'FortranFixedLexer': ('pip._vendor.pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()), + 'FortranLexer': ('pip._vendor.pygments.lexers.fortran', 'Fortran', ('fortran', 'f90'), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)), + 'FoxProLexer': ('pip._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), + 'FreeFemLexer': ('pip._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)), + 'FuncLexer': ('pip._vendor.pygments.lexers.func', 'FunC', ('func', 'fc'), ('*.fc', '*.func'), ()), + 'FutharkLexer': ('pip._vendor.pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)), + 'GAPConsoleLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP session', ('gap-console', 'gap-repl'), ('*.tst',), ()), + 'GAPLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), + 'GDScriptLexer': ('pip._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')), + 'GLShaderLexer': ('pip._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), + 'GSQLLexer': ('pip._vendor.pygments.lexers.gsql', 'GSQL', ('gsql',), ('*.gsql',), ()), + 'GasLexer': ('pip._vendor.pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)), + 'GcodeLexer': ('pip._vendor.pygments.lexers.gcodelexer', 'g-code', ('gcode',), ('*.gcode',), ()), + 'GenshiLexer': ('pip._vendor.pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), + 'GenshiTextLexer': ('pip._vendor.pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), + 'GettextLexer': ('pip._vendor.pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), + 'GherkinLexer': ('pip._vendor.pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)), + 'GleamLexer': ('pip._vendor.pygments.lexers.gleam', 'Gleam', ('gleam',), ('*.gleam',), ('text/x-gleam',)), + 'GnuplotLexer': ('pip._vendor.pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), + 'GoLexer': ('pip._vendor.pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)), + 'GoloLexer': ('pip._vendor.pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), + 'GoodDataCLLexer': ('pip._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), + 'GoogleSqlLexer': ('pip._vendor.pygments.lexers.sql', 'GoogleSQL', ('googlesql', 'zetasql'), ('*.googlesql', '*.googlesql.sql'), ('text/x-google-sql', 'text/x-google-sql-aux')), + 'GosuLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), + 'GosuTemplateLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), + 'GraphQLLexer': ('pip._vendor.pygments.lexers.graphql', 'GraphQL', ('graphql',), ('*.graphql',), ()), + 'GraphvizLexer': ('pip._vendor.pygments.lexers.graphviz', 'Graphviz', ('graphviz', 'dot'), ('*.gv', '*.dot'), ('text/x-graphviz', 'text/vnd.graphviz')), + 'GroffLexer': ('pip._vendor.pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1-9]', '*.man', '*.1p', '*.3pm'), ('application/x-troff', 'text/troff')), + 'GroovyLexer': ('pip._vendor.pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)), + 'HLSLShaderLexer': ('pip._vendor.pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)), + 'HTMLUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'HTML+UL4', ('html+ul4',), ('*.htmlul4',), ()), + 'HamlLexer': ('pip._vendor.pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), + 'HandlebarsHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), + 'HandlebarsLexer': ('pip._vendor.pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), + 'HareLexer': ('pip._vendor.pygments.lexers.hare', 'Hare', ('hare',), ('*.ha',), ('text/x-hare',)), + 'HaskellLexer': ('pip._vendor.pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), + 'HaxeLexer': ('pip._vendor.pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), + 'HexdumpLexer': ('pip._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), + 'HsailLexer': ('pip._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)), + 'HspecLexer': ('pip._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), ('*Spec.hs',), ()), + 'HtmlDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), ('*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2'), ('text/html+django', 'text/html+jinja')), + 'HtmlGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), + 'HtmlLexer': ('pip._vendor.pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), + 'HtmlPhpLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')), + 'HtmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), + 'HttpLexer': ('pip._vendor.pygments.lexers.textfmts', 'HTTP', ('http',), (), ()), + 'HxmlLexer': ('pip._vendor.pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), + 'HyLexer': ('pip._vendor.pygments.lexers.lisp', 'Hy', ('hylang', 'hy'), ('*.hy',), ('text/x-hy', 'application/x-hy')), + 'HybrisLexer': ('pip._vendor.pygments.lexers.scripting', 'Hybris', ('hybris',), ('*.hyb',), ('text/x-hybris', 'application/x-hybris')), + 'IDLLexer': ('pip._vendor.pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), + 'IconLexer': ('pip._vendor.pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()), + 'IdrisLexer': ('pip._vendor.pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), + 'IgorLexer': ('pip._vendor.pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), + 'Inform6Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), + 'Inform6TemplateLexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), + 'Inform7Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), + 'IniLexer': ('pip._vendor.pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf', '.editorconfig'), ('text/x-ini', 'text/inf')), + 'IoLexer': ('pip._vendor.pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), + 'IokeLexer': ('pip._vendor.pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), + 'IrcLogsLexer': ('pip._vendor.pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), + 'IsabelleLexer': ('pip._vendor.pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)), + 'JLexer': ('pip._vendor.pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)), + 'JMESPathLexer': ('pip._vendor.pygments.lexers.jmespath', 'JMESPath', ('jmespath', 'jp'), ('*.jp',), ()), + 'JSLTLexer': ('pip._vendor.pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)), + 'JagsLexer': ('pip._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), + 'JanetLexer': ('pip._vendor.pygments.lexers.lisp', 'Janet', ('janet',), ('*.janet', '*.jdn'), ('text/x-janet', 'application/x-janet')), + 'JasminLexer': ('pip._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), + 'JavaLexer': ('pip._vendor.pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)), + 'JavascriptDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), ('*.js.j2', '*.js.jinja2'), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), + 'JavascriptErbLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Ruby', ('javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), + 'JavascriptGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), + 'JavascriptLexer': ('pip._vendor.pygments.lexers.javascript', 'JavaScript', ('javascript', 'js'), ('*.js', '*.jsm', '*.mjs', '*.cjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), + 'JavascriptPhpLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+PHP', ('javascript+php', 'js+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), + 'JavascriptSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Smarty', ('javascript+smarty', 'js+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), + 'JavascriptUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Javascript+UL4', ('js+ul4',), ('*.jsul4',), ()), + 'JclLexer': ('pip._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)), + 'JsgfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')), + 'Json5Lexer': ('pip._vendor.pygments.lexers.json5', 'JSON5', ('json5',), ('*.json5',), ()), + 'JsonBareObjectLexer': ('pip._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()), + 'JsonLdLexer': ('pip._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), + 'JsonLexer': ('pip._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', '*.jsonl', '*.ndjson', 'Pipfile.lock'), ('application/json', 'application/json-object', 'application/x-ndjson', 'application/jsonl', 'application/json-seq')), + 'JsonnetLexer': ('pip._vendor.pygments.lexers.jsonnet', 'Jsonnet', ('jsonnet',), ('*.jsonnet', '*.libsonnet'), ()), + 'JspLexer': ('pip._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), + 'JsxLexer': ('pip._vendor.pygments.lexers.jsx', 'JSX', ('jsx', 'react'), ('*.jsx', '*.react'), ('text/jsx', 'text/typescript-jsx')), + 'JuliaConsoleLexer': ('pip._vendor.pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()), + 'JuliaLexer': ('pip._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), + 'JuttleLexer': ('pip._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), + 'KLexer': ('pip._vendor.pygments.lexers.q', 'K', ('k',), ('*.k',), ()), + 'KalLexer': ('pip._vendor.pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), + 'KconfigLexer': ('pip._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), + 'KernelLogLexer': ('pip._vendor.pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()), + 'KokaLexer': ('pip._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), + 'KotlinLexer': ('pip._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)), + 'KuinLexer': ('pip._vendor.pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()), + 'KustoLexer': ('pip._vendor.pygments.lexers.kusto', 'Kusto', ('kql', 'kusto'), ('*.kql', '*.kusto', '.csl'), ()), + 'LSLLexer': ('pip._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), + 'LassoCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), + 'LassoHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), + 'LassoJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), + 'LassoLexer': ('pip._vendor.pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), + 'LassoXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), + 'LdaprcLexer': ('pip._vendor.pygments.lexers.ldap', 'LDAP configuration file', ('ldapconf', 'ldaprc'), ('.ldaprc', 'ldaprc', 'ldap.conf'), ('text/x-ldapconf',)), + 'LdifLexer': ('pip._vendor.pygments.lexers.ldap', 'LDIF', ('ldif',), ('*.ldif',), ('text/x-ldif',)), + 'Lean3Lexer': ('pip._vendor.pygments.lexers.lean', 'Lean', ('lean', 'lean3'), ('*.lean',), ('text/x-lean', 'text/x-lean3')), + 'Lean4Lexer': ('pip._vendor.pygments.lexers.lean', 'Lean4', ('lean4',), ('*.lean',), ('text/x-lean4',)), + 'LessCssLexer': ('pip._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)), + 'LighttpdConfLexer': ('pip._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)), + 'LilyPondLexer': ('pip._vendor.pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()), + 'LimboLexer': ('pip._vendor.pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), + 'LiquidLexer': ('pip._vendor.pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), + 'LiterateAgdaLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Agda', ('literate-agda', 'lagda'), ('*.lagda',), ('text/x-literate-agda',)), + 'LiterateCryptolLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Cryptol', ('literate-cryptol', 'lcryptol', 'lcry'), ('*.lcry',), ('text/x-literate-cryptol',)), + 'LiterateHaskellLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Haskell', ('literate-haskell', 'lhaskell', 'lhs'), ('*.lhs',), ('text/x-literate-haskell',)), + 'LiterateIdrisLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Idris', ('literate-idris', 'lidris', 'lidr'), ('*.lidr',), ('text/x-literate-idris',)), + 'LiveScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'LiveScript', ('livescript', 'live-script'), ('*.ls',), ('text/livescript',)), + 'LlvmLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), + 'LlvmMirBodyLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()), + 'LlvmMirLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()), + 'LogosLexer': ('pip._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), + 'LogtalkLexer': ('pip._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), + 'LuaLexer': ('pip._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), + 'LuauLexer': ('pip._vendor.pygments.lexers.scripting', 'Luau', ('luau',), ('*.luau',), ()), + 'MCFunctionLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)), + 'MCSchemaLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCSchema', ('mcschema',), ('*.mcschema',), ('text/mcschema',)), + 'MIMELexer': ('pip._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')), + 'MIPSLexer': ('pip._vendor.pygments.lexers.mips', 'MIPS', ('mips',), ('*.mips', '*.MIPS'), ()), + 'MOOCodeLexer': ('pip._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), + 'MSDOSSessionLexer': ('pip._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()), + 'Macaulay2Lexer': ('pip._vendor.pygments.lexers.macaulay2', 'Macaulay2', ('macaulay2',), ('*.m2',), ()), + 'MakefileLexer': ('pip._vendor.pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), + 'MakoCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), + 'MakoHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)), + 'MakoJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')), + 'MakoLexer': ('pip._vendor.pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), + 'MakoXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), + 'MapleLexer': ('pip._vendor.pygments.lexers.maple', 'Maple', ('maple',), ('*.mpl', '*.mi', '*.mm'), ('text/x-maple',)), + 'MaqlLexer': ('pip._vendor.pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), + 'MarkdownLexer': ('pip._vendor.pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)), + 'MaskLexer': ('pip._vendor.pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), + 'MasonLexer': ('pip._vendor.pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), + 'MathematicaLexer': ('pip._vendor.pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), + 'MatlabLexer': ('pip._vendor.pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), + 'MatlabSessionLexer': ('pip._vendor.pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()), + 'MaximaLexer': ('pip._vendor.pygments.lexers.maxima', 'Maxima', ('maxima', 'macsyma'), ('*.mac', '*.max'), ()), + 'MesonLexer': ('pip._vendor.pygments.lexers.meson', 'Meson', ('meson', 'meson.build'), ('meson.build', 'meson_options.txt'), ('text/x-meson',)), + 'MiniDLexer': ('pip._vendor.pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)), + 'MiniScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'MiniScript', ('miniscript', 'ms'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')), + 'ModelicaLexer': ('pip._vendor.pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), + 'Modula2Lexer': ('pip._vendor.pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), + 'MoinWikiLexer': ('pip._vendor.pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), + 'MojoLexer': ('pip._vendor.pygments.lexers.mojo', 'Mojo', ('mojo', '🔥'), ('*.mojo', '*.🔥'), ('text/x-mojo', 'application/x-mojo')), + 'MonkeyLexer': ('pip._vendor.pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), + 'MonteLexer': ('pip._vendor.pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()), + 'MoonScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), + 'MoselLexer': ('pip._vendor.pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()), + 'MozPreprocCssLexer': ('pip._vendor.pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()), + 'MozPreprocHashLexer': ('pip._vendor.pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()), + 'MozPreprocJavascriptLexer': ('pip._vendor.pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()), + 'MozPreprocPercentLexer': ('pip._vendor.pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()), + 'MozPreprocXulLexer': ('pip._vendor.pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()), + 'MqlLexer': ('pip._vendor.pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), + 'MscgenLexer': ('pip._vendor.pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), + 'MuPADLexer': ('pip._vendor.pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()), + 'MxmlLexer': ('pip._vendor.pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()), + 'MySqlLexer': ('pip._vendor.pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)), + 'MyghtyCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)), + 'MyghtyHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)), + 'MyghtyJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Myghty', ('javascript+myghty', 'js+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')), + 'MyghtyLexer': ('pip._vendor.pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)), + 'MyghtyXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)), + 'NCLLexer': ('pip._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)), + 'NSISLexer': ('pip._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), + 'NasmLexer': ('pip._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM', '*.nasm'), ('text/x-nasm',)), + 'NasmObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), + 'NemerleLexer': ('pip._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), + 'NesCLexer': ('pip._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), + 'NestedTextLexer': ('pip._vendor.pygments.lexers.configs', 'NestedText', ('nestedtext', 'nt'), ('*.nt',), ()), + 'NewLispLexer': ('pip._vendor.pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')), + 'NewspeakLexer': ('pip._vendor.pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), + 'NginxConfLexer': ('pip._vendor.pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)), + 'NimrodLexer': ('pip._vendor.pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nim',)), + 'NitLexer': ('pip._vendor.pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()), + 'NixLexer': ('pip._vendor.pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), + 'NodeConsoleLexer': ('pip._vendor.pygments.lexers.javascript', 'Node.js REPL console session', ('nodejsrepl',), (), ('text/x-nodejsrepl',)), + 'NotmuchLexer': ('pip._vendor.pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()), + 'NuSMVLexer': ('pip._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()), + 'NumPyLexer': ('pip._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()), + 'NumbaIRLexer': ('pip._vendor.pygments.lexers.numbair', 'Numba_IR', ('numba_ir', 'numbair'), ('*.numba_ir',), ('text/x-numba_ir', 'text/x-numbair')), + 'ObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), + 'ObjectiveCLexer': ('pip._vendor.pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), + 'ObjectiveCppLexer': ('pip._vendor.pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), + 'ObjectiveJLexer': ('pip._vendor.pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), + 'OcamlLexer': ('pip._vendor.pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), + 'OctaveLexer': ('pip._vendor.pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)), + 'OdinLexer': ('pip._vendor.pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)), + 'OmgIdlLexer': ('pip._vendor.pygments.lexers.c_like', 'OMG Interface Definition Language', ('omg-idl',), ('*.idl', '*.pidl'), ()), + 'OocLexer': ('pip._vendor.pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), + 'OpaLexer': ('pip._vendor.pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), + 'OpenEdgeLexer': ('pip._vendor.pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), + 'OpenScadLexer': ('pip._vendor.pygments.lexers.openscad', 'OpenSCAD', ('openscad',), ('*.scad',), ('application/x-openscad',)), + 'OrgLexer': ('pip._vendor.pygments.lexers.markup', 'Org Mode', ('org', 'orgmode', 'org-mode'), ('*.org',), ('text/org',)), + 'OutputLexer': ('pip._vendor.pygments.lexers.special', 'Text output', ('output',), (), ()), + 'PacmanConfLexer': ('pip._vendor.pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), + 'PanLexer': ('pip._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), + 'ParaSailLexer': ('pip._vendor.pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), + 'PawnLexer': ('pip._vendor.pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), + 'PddlLexer': ('pip._vendor.pygments.lexers.pddl', 'PDDL', ('pddl',), ('*.pddl',), ()), + 'PegLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)), + 'Perl6Lexer': ('pip._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')), + 'PerlLexer': ('pip._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')), + 'PhixLexer': ('pip._vendor.pygments.lexers.phix', 'Phix', ('phix',), ('*.exw',), ('text/x-phix',)), + 'PhpLexer': ('pip._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), + 'PigLexer': ('pip._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), + 'PikeLexer': ('pip._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), + 'PkgConfigLexer': ('pip._vendor.pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), + 'PlPgsqlLexer': ('pip._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), + 'PointlessLexer': ('pip._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()), + 'PonyLexer': ('pip._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()), + 'PortugolLexer': ('pip._vendor.pygments.lexers.pascal', 'Portugol', ('portugol',), ('*.alg', '*.portugol'), ()), + 'PostScriptLexer': ('pip._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), + 'PostgresConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), + 'PostgresExplainLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL EXPLAIN dialect', ('postgres-explain',), ('*.explain',), ('text/x-postgresql-explain',)), + 'PostgresLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), + 'PovrayLexer': ('pip._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), + 'PowerShellLexer': ('pip._vendor.pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), + 'PowerShellSessionLexer': ('pip._vendor.pygments.lexers.shell', 'PowerShell Session', ('pwsh-session', 'ps1con'), (), ()), + 'PraatLexer': ('pip._vendor.pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()), + 'ProcfileLexer': ('pip._vendor.pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()), + 'PrologLexer': ('pip._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), + 'PromQLLexer': ('pip._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()), + 'PromelaLexer': ('pip._vendor.pygments.lexers.c_like', 'Promela', ('promela',), ('*.pml', '*.prom', '*.prm', '*.promela', '*.pr', '*.pm'), ('text/x-promela',)), + 'PropertiesLexer': ('pip._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), + 'ProtoBufLexer': ('pip._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), + 'PrqlLexer': ('pip._vendor.pygments.lexers.prql', 'PRQL', ('prql',), ('*.prql',), ('application/prql', 'application/x-prql')), + 'PsyshConsoleLexer': ('pip._vendor.pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()), + 'PtxLexer': ('pip._vendor.pygments.lexers.ptx', 'PTX', ('ptx',), ('*.ptx',), ('text/x-ptx',)), + 'PugLexer': ('pip._vendor.pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')), + 'PuppetLexer': ('pip._vendor.pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), + 'PyPyLogLexer': ('pip._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), + 'Python2Lexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')), + 'Python2TracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)), + 'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon', 'python-console'), (), ('text/x-python-doctest',)), + 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark', 'pyi'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), + 'PythonTracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')), + 'PythonUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()), + 'QBasicLexer': ('pip._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), + 'QLexer': ('pip._vendor.pygments.lexers.q', 'Q', ('q',), ('*.q',), ()), + 'QVToLexer': ('pip._vendor.pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), + 'QlikLexer': ('pip._vendor.pygments.lexers.qlik', 'Qlik', ('qlik', 'qlikview', 'qliksense', 'qlikscript'), ('*.qvs', '*.qvw'), ()), + 'QmlLexer': ('pip._vendor.pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')), + 'RConsoleLexer': ('pip._vendor.pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), + 'RNCCompactLexer': ('pip._vendor.pygments.lexers.rnc', 'Relax-NG Compact', ('rng-compact', 'rnc'), ('*.rnc',), ()), + 'RPMSpecLexer': ('pip._vendor.pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), + 'RacketLexer': ('pip._vendor.pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), + 'RagelCLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()), + 'RagelCppLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()), + 'RagelDLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()), + 'RagelEmbeddedLexer': ('pip._vendor.pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()), + 'RagelJavaLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()), + 'RagelLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()), + 'RagelObjectiveCLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()), + 'RagelRubyLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()), + 'RawTokenLexer': ('pip._vendor.pygments.lexers.special', 'Raw token data', (), (), ('application/x-pygments-tokens',)), + 'RdLexer': ('pip._vendor.pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)), + 'ReasonLexer': ('pip._vendor.pygments.lexers.ml', 'ReasonML', ('reasonml', 'reason'), ('*.re', '*.rei'), ('text/x-reasonml',)), + 'RebolLexer': ('pip._vendor.pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), + 'RedLexer': ('pip._vendor.pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), + 'RedcodeLexer': ('pip._vendor.pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()), + 'RegeditLexer': ('pip._vendor.pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), + 'RegoLexer': ('pip._vendor.pygments.lexers.rego', 'Rego', ('rego',), ('*.rego',), ('text/x-rego',)), + 'ResourceLexer': ('pip._vendor.pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()), + 'RexxLexer': ('pip._vendor.pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), + 'RhtmlLexer': ('pip._vendor.pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), + 'RideLexer': ('pip._vendor.pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)), + 'RitaLexer': ('pip._vendor.pygments.lexers.rita', 'Rita', ('rita',), ('*.rita',), ('text/rita',)), + 'RoboconfGraphLexer': ('pip._vendor.pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()), + 'RoboconfInstancesLexer': ('pip._vendor.pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()), + 'RobotFrameworkLexer': ('pip._vendor.pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot', '*.resource'), ('text/x-robotframework',)), + 'RqlLexer': ('pip._vendor.pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), + 'RslLexer': ('pip._vendor.pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), + 'RstLexer': ('pip._vendor.pygments.lexers.markup', 'reStructuredText', ('restructuredtext', 'rst', 'rest'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), + 'RtsLexer': ('pip._vendor.pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts',), ()), + 'RubyConsoleLexer': ('pip._vendor.pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), + 'RubyLexer': ('pip._vendor.pygments.lexers.ruby', 'Ruby', ('ruby', 'rb', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile'), ('text/x-ruby', 'application/x-ruby')), + 'RustLexer': ('pip._vendor.pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')), + 'SASLexer': ('pip._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), + 'SLexer': ('pip._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), + 'SMLLexer': ('pip._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), + 'SNBTLexer': ('pip._vendor.pygments.lexers.minecraft', 'SNBT', ('snbt',), ('*.snbt',), ('text/snbt',)), + 'SarlLexer': ('pip._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)), + 'SassLexer': ('pip._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), + 'SaviLexer': ('pip._vendor.pygments.lexers.savi', 'Savi', ('savi',), ('*.savi',), ()), + 'ScalaLexer': ('pip._vendor.pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), + 'ScamlLexer': ('pip._vendor.pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), + 'ScdocLexer': ('pip._vendor.pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()), + 'SchemeLexer': ('pip._vendor.pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), + 'ScilabLexer': ('pip._vendor.pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), + 'ScssLexer': ('pip._vendor.pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), + 'SedLexer': ('pip._vendor.pygments.lexers.textedit', 'Sed', ('sed', 'gsed', 'ssed'), ('*.sed', '*.[gs]sed'), ('text/x-sed',)), + 'ShExCLexer': ('pip._vendor.pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)), + 'ShenLexer': ('pip._vendor.pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), + 'SieveLexer': ('pip._vendor.pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()), + 'SilverLexer': ('pip._vendor.pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()), + 'SingularityLexer': ('pip._vendor.pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()), + 'SlashLexer': ('pip._vendor.pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()), + 'SlimLexer': ('pip._vendor.pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), + 'SlurmBashLexer': ('pip._vendor.pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()), + 'SmaliLexer': ('pip._vendor.pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), + 'SmalltalkLexer': ('pip._vendor.pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), + 'SmartGameFormatLexer': ('pip._vendor.pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()), + 'SmartyLexer': ('pip._vendor.pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), + 'SmithyLexer': ('pip._vendor.pygments.lexers.smithy', 'Smithy', ('smithy',), ('*.smithy',), ()), + 'SnobolLexer': ('pip._vendor.pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), + 'SnowballLexer': ('pip._vendor.pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()), + 'SolidityLexer': ('pip._vendor.pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()), + 'SoongLexer': ('pip._vendor.pygments.lexers.soong', 'Soong', ('androidbp', 'bp', 'soong'), ('Android.bp',), ()), + 'SophiaLexer': ('pip._vendor.pygments.lexers.sophia', 'Sophia', ('sophia',), ('*.aes',), ()), + 'SourcePawnLexer': ('pip._vendor.pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), + 'SourcesListLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()), + 'SparqlLexer': ('pip._vendor.pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), + 'SpiceLexer': ('pip._vendor.pygments.lexers.spice', 'Spice', ('spice', 'spicelang'), ('*.spice',), ('text/x-spice',)), + 'SqlJinjaLexer': ('pip._vendor.pygments.lexers.templates', 'SQL+Jinja', ('sql+jinja',), ('*.sql', '*.sql.j2', '*.sql.jinja2'), ()), + 'SqlLexer': ('pip._vendor.pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), + 'SqliteConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), + 'SquidConfLexer': ('pip._vendor.pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), + 'SrcinfoLexer': ('pip._vendor.pygments.lexers.srcinfo', 'Srcinfo', ('srcinfo',), ('.SRCINFO',), ()), + 'SspLexer': ('pip._vendor.pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), + 'StanLexer': ('pip._vendor.pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()), + 'StataLexer': ('pip._vendor.pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')), + 'SuperColliderLexer': ('pip._vendor.pygments.lexers.supercollider', 'SuperCollider', ('supercollider', 'sc'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')), + 'SwiftLexer': ('pip._vendor.pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), + 'SwigLexer': ('pip._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), + 'SystemVerilogLexer': ('pip._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), + 'SystemdLexer': ('pip._vendor.pygments.lexers.configs', 'Systemd', ('systemd',), ('*.service', '*.socket', '*.device', '*.mount', '*.automount', '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope'), ()), + 'TAPLexer': ('pip._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), + 'TNTLexer': ('pip._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()), + 'TOMLLexer': ('pip._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ('application/toml',)), + 'TableGenLexer': ('pip._vendor.pygments.lexers.tablegen', 'TableGen', ('tablegen', 'td'), ('*.td',), ()), + 'TactLexer': ('pip._vendor.pygments.lexers.tact', 'Tact', ('tact',), ('*.tact',), ()), + 'Tads3Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), + 'TalLexer': ('pip._vendor.pygments.lexers.tal', 'Tal', ('tal', 'uxntal'), ('*.tal',), ('text/x-uxntal',)), + 'TasmLexer': ('pip._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)), + 'TclLexer': ('pip._vendor.pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), + 'TcshLexer': ('pip._vendor.pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), + 'TcshSessionLexer': ('pip._vendor.pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()), + 'TeaTemplateLexer': ('pip._vendor.pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), + 'TealLexer': ('pip._vendor.pygments.lexers.teal', 'teal', ('teal',), ('*.teal',), ()), + 'TeraTermLexer': ('pip._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)), + 'TermcapLexer': ('pip._vendor.pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()), + 'TerminfoLexer': ('pip._vendor.pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()), + 'TerraformLexer': ('pip._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf', 'hcl'), ('*.tf', '*.hcl'), ('application/x-tf', 'application/x-terraform')), + 'TexLexer': ('pip._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), + 'TextLexer': ('pip._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), + 'ThingsDBLexer': ('pip._vendor.pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()), + 'ThriftLexer': ('pip._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), + 'TiddlyWiki5Lexer': ('pip._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)), + 'TlbLexer': ('pip._vendor.pygments.lexers.tlb', 'Tl-b', ('tlb',), ('*.tlb',), ()), + 'TlsLexer': ('pip._vendor.pygments.lexers.tls', 'TLS Presentation Language', ('tls',), (), ()), + 'TodotxtLexer': ('pip._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), + 'TransactSqlLexer': ('pip._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), + 'TreetopLexer': ('pip._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), + 'TsxLexer': ('pip._vendor.pygments.lexers.jsx', 'TSX', ('tsx',), ('*.tsx',), ('text/typescript-tsx',)), + 'TurtleLexer': ('pip._vendor.pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), + 'TwigHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), + 'TwigLexer': ('pip._vendor.pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)), + 'TypeScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'TypeScript', ('typescript', 'ts'), ('*.ts',), ('application/x-typescript', 'text/x-typescript')), + 'TypoScriptCssDataLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()), + 'TypoScriptHtmlDataLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()), + 'TypoScriptLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)), + 'TypstLexer': ('pip._vendor.pygments.lexers.typst', 'Typst', ('typst',), ('*.typ',), ('text/x-typst',)), + 'UL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'UL4', ('ul4',), ('*.ul4',), ()), + 'UcodeLexer': ('pip._vendor.pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()), + 'UniconLexer': ('pip._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), + 'UnixConfigLexer': ('pip._vendor.pygments.lexers.configs', 'Unix/Linux config files', ('unixconfig', 'linuxconfig'), (), ()), + 'UrbiscriptLexer': ('pip._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), + 'UrlEncodedLexer': ('pip._vendor.pygments.lexers.html', 'urlencoded', ('urlencoded',), (), ('application/x-www-form-urlencoded',)), + 'UsdLexer': ('pip._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()), + 'VBScriptLexer': ('pip._vendor.pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()), + 'VCLLexer': ('pip._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), + 'VCLSnippetLexer': ('pip._vendor.pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)), + 'VCTreeStatusLexer': ('pip._vendor.pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), + 'VGLLexer': ('pip._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), + 'ValaLexer': ('pip._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), + 'VbNetAspxLexer': ('pip._vendor.pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), + 'VbNetLexer': ('pip._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet', 'lobas', 'oobas', 'sobas', 'visual-basic', 'visualbasic'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), + 'VelocityHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), + 'VelocityLexer': ('pip._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()), + 'VelocityXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), + 'VerifpalLexer': ('pip._vendor.pygments.lexers.verifpal', 'Verifpal', ('verifpal',), ('*.vp',), ('text/x-verifpal',)), + 'VerilogLexer': ('pip._vendor.pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), + 'VhdlLexer': ('pip._vendor.pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), + 'VimLexer': ('pip._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), + 'VisualPrologGrammarLexer': ('pip._vendor.pygments.lexers.vip', 'Visual Prolog Grammar', ('visualprologgrammar',), ('*.vipgrm',), ()), + 'VisualPrologLexer': ('pip._vendor.pygments.lexers.vip', 'Visual Prolog', ('visualprolog',), ('*.pro', '*.cl', '*.i', '*.pack', '*.ph'), ()), + 'VueLexer': ('pip._vendor.pygments.lexers.html', 'Vue', ('vue',), ('*.vue',), ()), + 'VyperLexer': ('pip._vendor.pygments.lexers.vyper', 'Vyper', ('vyper',), ('*.vy',), ()), + 'WDiffLexer': ('pip._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), + 'WatLexer': ('pip._vendor.pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()), + 'WebIDLLexer': ('pip._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), + 'WgslLexer': ('pip._vendor.pygments.lexers.wgsl', 'WebGPU Shading Language', ('wgsl',), ('*.wgsl',), ('text/wgsl',)), + 'WhileyLexer': ('pip._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), + 'WikitextLexer': ('pip._vendor.pygments.lexers.markup', 'Wikitext', ('wikitext', 'mediawiki'), (), ('text/x-wiki',)), + 'WoWTocLexer': ('pip._vendor.pygments.lexers.wowtoc', 'World of Warcraft TOC', ('wowtoc',), ('*.toc',), ()), + 'WrenLexer': ('pip._vendor.pygments.lexers.wren', 'Wren', ('wren',), ('*.wren',), ()), + 'X10Lexer': ('pip._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), + 'XMLUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'XML+UL4', ('xml+ul4',), ('*.xmlul4',), ()), + 'XQueryLexer': ('pip._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), + 'XmlDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), ('*.xml.j2', '*.xml.jinja2'), ('application/xml+django', 'application/xml+jinja')), + 'XmlErbLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Ruby', ('xml+ruby', 'xml+erb'), (), ('application/xml+ruby',)), + 'XmlLexer': ('pip._vendor.pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')), + 'XmlPhpLexer': ('pip._vendor.pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)), + 'XmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)), + 'XorgLexer': ('pip._vendor.pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()), + 'XppLexer': ('pip._vendor.pygments.lexers.dotnet', 'X++', ('xpp', 'x++'), ('*.xpp',), ()), + 'XsltLexer': ('pip._vendor.pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), + 'XtendLexer': ('pip._vendor.pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), + 'XtlangLexer': ('pip._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()), + 'YamlJinjaLexer': ('pip._vendor.pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2'), ('text/x-yaml+jinja', 'text/x-sls')), + 'YamlLexer': ('pip._vendor.pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), + 'YangLexer': ('pip._vendor.pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)), + 'YaraLexer': ('pip._vendor.pygments.lexers.yara', 'YARA', ('yara', 'yar'), ('*.yar',), ('text/x-yara',)), + 'ZeekLexer': ('pip._vendor.pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()), + 'ZephirLexer': ('pip._vendor.pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()), + 'ZigLexer': ('pip._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)), + 'apdlexer': ('pip._vendor.pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()), +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/python.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/python.py new file mode 100644 index 0000000..1b78829 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/lexers/python.py @@ -0,0 +1,1201 @@ +""" + pygments.lexers.python + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Python and related languages. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import keyword + +from pip._vendor.pygments.lexer import DelegatingLexer, RegexLexer, include, \ + bygroups, using, default, words, combined, this +from pip._vendor.pygments.util import get_bool_opt, shebang_matches +from pip._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Generic, Other, Error, Whitespace +from pip._vendor.pygments import unistring as uni + +__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer', + 'Python2Lexer', 'Python2TracebackLexer', + 'CythonLexer', 'DgLexer', 'NumPyLexer'] + + +class PythonLexer(RegexLexer): + """ + For Python source code (version 3.x). + + .. versionchanged:: 2.5 + This is now the default ``PythonLexer``. It is still available as the + alias ``Python3Lexer``. + """ + + name = 'Python' + url = 'https://www.python.org' + aliases = ['python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark', 'pyi'] + filenames = [ + '*.py', + '*.pyw', + # Type stubs + '*.pyi', + # Jython + '*.jy', + # Sage + '*.sage', + # SCons + '*.sc', + 'SConstruct', + 'SConscript', + # Skylark/Starlark (used by Bazel, Buck, and Pants) + '*.bzl', + 'BUCK', + 'BUILD', + 'BUILD.bazel', + 'WORKSPACE', + # Twisted Application infrastructure + '*.tac', + ] + mimetypes = ['text/x-python', 'application/x-python', + 'text/x-python3', 'application/x-python3'] + version_added = '0.10' + + uni_name = f"[{uni.xid_start}][{uni.xid_continue}]*" + + def innerstring_rules(ttype): + return [ + # the old style '%s' % (...) string formatting (still valid in Py3) + (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' + '[hlL]?[E-GXc-giorsaux%]', String.Interpol), + # the new style '{}'.format(...) string formatting + (r'\{' + r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name + r'(\![sra])?' # conversion + r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?' + r'\}', String.Interpol), + + # backslashes, quotes and formatting signs must be parsed one at a time + (r'[^\\\'"%{\n]+', ttype), + (r'[\'"\\]', ttype), + # unhandled string formatting sign + (r'%|(\{{1,2})', ttype) + # newlines are an error (use "nl" state) + ] + + def fstring_rules(ttype): + return [ + # Assuming that a '}' is the closing brace after format specifier. + # Sadly, this means that we won't detect syntax error. But it's + # more important to parse correct syntax correctly, than to + # highlight invalid syntax. + (r'\}', String.Interpol), + (r'\{', String.Interpol, 'expr-inside-fstring'), + # backslashes, quotes and formatting signs must be parsed one at a time + (r'[^\\\'"{}\n]+', ttype), + (r'[\'"\\]', ttype), + # newlines are an error (use "nl" state) + ] + + tokens = { + 'root': [ + (r'\n', Whitespace), + (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")', + bygroups(Whitespace, String.Affix, String.Doc)), + (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')", + bygroups(Whitespace, String.Affix, String.Doc)), + (r'\A#!.+$', Comment.Hashbang), + (r'#.*$', Comment.Single), + (r'\\\n', Text), + (r'\\', Text), + include('keywords'), + include('soft-keywords'), + (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'funcname'), + (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'classname'), + (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace), + 'fromimport'), + (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace), + 'import'), + include('expr'), + ], + 'expr': [ + # raw f-strings + ('(?i)(rf|fr)(""")', + bygroups(String.Affix, String.Double), + combined('rfstringescape', 'tdqf')), + ("(?i)(rf|fr)(''')", + bygroups(String.Affix, String.Single), + combined('rfstringescape', 'tsqf')), + ('(?i)(rf|fr)(")', + bygroups(String.Affix, String.Double), + combined('rfstringescape', 'dqf')), + ("(?i)(rf|fr)(')", + bygroups(String.Affix, String.Single), + combined('rfstringescape', 'sqf')), + # non-raw f-strings + ('([fF])(""")', bygroups(String.Affix, String.Double), + combined('fstringescape', 'tdqf')), + ("([fF])(''')", bygroups(String.Affix, String.Single), + combined('fstringescape', 'tsqf')), + ('([fF])(")', bygroups(String.Affix, String.Double), + combined('fstringescape', 'dqf')), + ("([fF])(')", bygroups(String.Affix, String.Single), + combined('fstringescape', 'sqf')), + # raw bytes and strings + ('(?i)(rb|br|r)(""")', + bygroups(String.Affix, String.Double), 'tdqs'), + ("(?i)(rb|br|r)(''')", + bygroups(String.Affix, String.Single), 'tsqs'), + ('(?i)(rb|br|r)(")', + bygroups(String.Affix, String.Double), 'dqs'), + ("(?i)(rb|br|r)(')", + bygroups(String.Affix, String.Single), 'sqs'), + # non-raw strings + ('([uU]?)(""")', bygroups(String.Affix, String.Double), + combined('stringescape', 'tdqs')), + ("([uU]?)(''')", bygroups(String.Affix, String.Single), + combined('stringescape', 'tsqs')), + ('([uU]?)(")', bygroups(String.Affix, String.Double), + combined('stringescape', 'dqs')), + ("([uU]?)(')", bygroups(String.Affix, String.Single), + combined('stringescape', 'sqs')), + # non-raw bytes + ('([bB])(""")', bygroups(String.Affix, String.Double), + combined('bytesescape', 'tdqs')), + ("([bB])(''')", bygroups(String.Affix, String.Single), + combined('bytesescape', 'tsqs')), + ('([bB])(")', bygroups(String.Affix, String.Double), + combined('bytesescape', 'dqs')), + ("([bB])(')", bygroups(String.Affix, String.Single), + combined('bytesescape', 'sqs')), + + (r'[^\S\n]+', Text), + include('numbers'), + (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator), + (r'[]{}:(),;[]', Punctuation), + (r'(in|is|and|or|not)\b', Operator.Word), + include('expr-keywords'), + include('builtins'), + include('magicfuncs'), + include('magicvars'), + include('name'), + ], + 'expr-inside-fstring': [ + (r'[{([]', Punctuation, 'expr-inside-fstring-inner'), + # without format specifier + (r'(=\s*)?' # debug (https://bugs.python.org/issue36817) + r'(\![sraf])?' # conversion + r'\}', String.Interpol, '#pop'), + # with format specifier + # we'll catch the remaining '}' in the outer scope + (r'(=\s*)?' # debug (https://bugs.python.org/issue36817) + r'(\![sraf])?' # conversion + r':', String.Interpol, '#pop'), + (r'\s+', Whitespace), # allow new lines + include('expr'), + ], + 'expr-inside-fstring-inner': [ + (r'[{([]', Punctuation, 'expr-inside-fstring-inner'), + (r'[])}]', Punctuation, '#pop'), + (r'\s+', Whitespace), # allow new lines + include('expr'), + ], + 'expr-keywords': [ + # Based on https://docs.python.org/3/reference/expressions.html + (words(( + 'async for', 'await', 'else', 'for', 'if', 'lambda', + 'yield', 'yield from'), suffix=r'\b'), + Keyword), + (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant), + ], + 'keywords': [ + (words(( + 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif', + 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda', + 'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield', + 'yield from', 'as', 'with'), suffix=r'\b'), + Keyword), + (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant), + ], + 'soft-keywords': [ + # `match`, `case` and `_` soft keywords + (r'(^[ \t]*)' # at beginning of line + possible indentation + r'(match|case)\b' # a possible keyword + r'(?![ \t]*(?:' # not followed by... + r'[:,;=^&|@~)\]}]|(?:' + # characters and keywords that mean this isn't + # pattern matching (but None/True/False is ok) + r'|'.join(k for k in keyword.kwlist if k[0].islower()) + r')\b))', + bygroups(Text, Keyword), 'soft-keywords-inner'), + ], + 'soft-keywords-inner': [ + # optional `_` keyword + (r'(\s+)([^\n_]*)(_\b)', bygroups(Whitespace, using(this), Keyword)), + default('#pop') + ], + 'builtins': [ + (words(( + '__import__', 'abs', 'aiter', 'all', 'any', 'bin', 'bool', 'bytearray', + 'breakpoint', 'bytes', 'callable', 'chr', 'classmethod', 'compile', + 'complex', 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', + 'filter', 'float', 'format', 'frozenset', 'getattr', 'globals', + 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'isinstance', + 'issubclass', 'iter', 'len', 'list', 'locals', 'map', 'max', + 'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', + 'print', 'property', 'range', 'repr', 'reversed', 'round', 'set', + 'setattr', 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', + 'tuple', 'type', 'vars', 'zip'), prefix=r'(?>|[-~+/*%=<>&^|.]', Operator), + include('keywords'), + (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'funcname'), + (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'classname'), + (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace), + 'fromimport'), + (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace), + 'import'), + include('builtins'), + include('magicfuncs'), + include('magicvars'), + include('backtick'), + ('([rR]|[uUbB][rR]|[rR][uUbB])(""")', + bygroups(String.Affix, String.Double), 'tdqs'), + ("([rR]|[uUbB][rR]|[rR][uUbB])(''')", + bygroups(String.Affix, String.Single), 'tsqs'), + ('([rR]|[uUbB][rR]|[rR][uUbB])(")', + bygroups(String.Affix, String.Double), 'dqs'), + ("([rR]|[uUbB][rR]|[rR][uUbB])(')", + bygroups(String.Affix, String.Single), 'sqs'), + ('([uUbB]?)(""")', bygroups(String.Affix, String.Double), + combined('stringescape', 'tdqs')), + ("([uUbB]?)(''')", bygroups(String.Affix, String.Single), + combined('stringescape', 'tsqs')), + ('([uUbB]?)(")', bygroups(String.Affix, String.Double), + combined('stringescape', 'dqs')), + ("([uUbB]?)(')", bygroups(String.Affix, String.Single), + combined('stringescape', 'sqs')), + include('name'), + include('numbers'), + ], + 'keywords': [ + (words(( + 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except', + 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass', + 'print', 'raise', 'return', 'try', 'while', 'yield', + 'yield from', 'as', 'with'), suffix=r'\b'), + Keyword), + ], + 'builtins': [ + (words(( + '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', + 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod', + 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod', + 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float', + 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id', + 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len', + 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object', + 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce', + 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice', + 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type', + 'unichr', 'unicode', 'vars', 'xrange', 'zip'), + prefix=r'(?>> )(.*\n)', bygroups(Generic.Prompt, Other.Code), 'continuations'), + # This happens, e.g., when tracebacks are embedded in documentation; + # trailing whitespaces are often stripped in such contexts. + (r'(>>>)(\n)', bygroups(Generic.Prompt, Whitespace)), + (r'(\^C)?Traceback \(most recent call last\):\n', Other.Traceback, 'traceback'), + # SyntaxError starts with this + (r' File "[^"]+", line \d+', Other.Traceback, 'traceback'), + (r'.*\n', Generic.Output), + ], + 'continuations': [ + (r'(\.\.\. )(.*\n)', bygroups(Generic.Prompt, Other.Code)), + # See above. + (r'(\.\.\.)(\n)', bygroups(Generic.Prompt, Whitespace)), + default('#pop'), + ], + 'traceback': [ + # As soon as we see a traceback, consume everything until the next + # >>> prompt. + (r'(?=>>>( |$))', Text, '#pop'), + (r'(KeyboardInterrupt)(\n)', bygroups(Name.Class, Whitespace)), + (r'.*\n', Other.Traceback), + ], + } + + +class PythonConsoleLexer(DelegatingLexer): + """ + For Python console output or doctests, such as: + + .. sourcecode:: pycon + + >>> a = 'foo' + >>> print(a) + foo + >>> 1 / 0 + Traceback (most recent call last): + File "", line 1, in + ZeroDivisionError: integer division or modulo by zero + + Additional options: + + `python3` + Use Python 3 lexer for code. Default is ``True``. + + .. versionadded:: 1.0 + .. versionchanged:: 2.5 + Now defaults to ``True``. + """ + + name = 'Python console session' + aliases = ['pycon', 'python-console'] + mimetypes = ['text/x-python-doctest'] + url = 'https://python.org' + version_added = '' + + def __init__(self, **options): + python3 = get_bool_opt(options, 'python3', True) + if python3: + pylexer = PythonLexer + tblexer = PythonTracebackLexer + else: + pylexer = Python2Lexer + tblexer = Python2TracebackLexer + # We have two auxiliary lexers. Use DelegatingLexer twice with + # different tokens. TODO: DelegatingLexer should support this + # directly, by accepting a tuplet of auxiliary lexers and a tuple of + # distinguishing tokens. Then we wouldn't need this intermediary + # class. + class _ReplaceInnerCode(DelegatingLexer): + def __init__(self, **options): + super().__init__(pylexer, _PythonConsoleLexerBase, Other.Code, **options) + super().__init__(tblexer, _ReplaceInnerCode, Other.Traceback, **options) + + +class PythonTracebackLexer(RegexLexer): + """ + For Python 3.x tracebacks, with support for chained exceptions. + + .. versionchanged:: 2.5 + This is now the default ``PythonTracebackLexer``. It is still available + as the alias ``Python3TracebackLexer``. + """ + + name = 'Python Traceback' + aliases = ['pytb', 'py3tb'] + filenames = ['*.pytb', '*.py3tb'] + mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback'] + url = 'https://python.org' + version_added = '1.0' + + tokens = { + 'root': [ + (r'\n', Whitespace), + (r'^(\^C)?Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), + (r'^During handling of the above exception, another ' + r'exception occurred:\n\n', Generic.Traceback), + (r'^The above exception was the direct cause of the ' + r'following exception:\n\n', Generic.Traceback), + (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), + (r'^.*\n', Other), + ], + 'intb': [ + (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', + bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)), + (r'^( File )("[^"]+")(, line )(\d+)(\n)', + bygroups(Text, Name.Builtin, Text, Number, Whitespace)), + (r'^( )(.+)(\n)', + bygroups(Whitespace, using(PythonLexer), Whitespace), 'markers'), + (r'^([ \t]*)(\.\.\.)(\n)', + bygroups(Whitespace, Comment, Whitespace)), # for doctests... + (r'^([^:]+)(: )(.+)(\n)', + bygroups(Generic.Error, Text, Name, Whitespace), '#pop'), + (r'^([a-zA-Z_][\w.]*)(:?\n)', + bygroups(Generic.Error, Whitespace), '#pop'), + default('#pop'), + ], + 'markers': [ + # Either `PEP 657 ` + # error locations in Python 3.11+, or single-caret markers + # for syntax errors before that. + (r'^( {4,})([~^]+)(\n)', + bygroups(Whitespace, Punctuation.Marker, Whitespace), + '#pop'), + default('#pop'), + ], + } + + +Python3TracebackLexer = PythonTracebackLexer + + +class Python2TracebackLexer(RegexLexer): + """ + For Python tracebacks. + + .. versionchanged:: 2.5 + This class has been renamed from ``PythonTracebackLexer``. + ``PythonTracebackLexer`` now refers to the Python 3 variant. + """ + + name = 'Python 2.x Traceback' + aliases = ['py2tb'] + filenames = ['*.py2tb'] + mimetypes = ['text/x-python2-traceback'] + url = 'https://python.org' + version_added = '0.7' + + tokens = { + 'root': [ + # Cover both (most recent call last) and (innermost last) + # The optional ^C allows us to catch keyboard interrupt signals. + (r'^(\^C)?(Traceback.*\n)', + bygroups(Text, Generic.Traceback), 'intb'), + # SyntaxError starts with this. + (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), + (r'^.*\n', Other), + ], + 'intb': [ + (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', + bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)), + (r'^( File )("[^"]+")(, line )(\d+)(\n)', + bygroups(Text, Name.Builtin, Text, Number, Whitespace)), + (r'^( )(.+)(\n)', + bygroups(Text, using(Python2Lexer), Whitespace), 'marker'), + (r'^([ \t]*)(\.\.\.)(\n)', + bygroups(Text, Comment, Whitespace)), # for doctests... + (r'^([^:]+)(: )(.+)(\n)', + bygroups(Generic.Error, Text, Name, Whitespace), '#pop'), + (r'^([a-zA-Z_]\w*)(:?\n)', + bygroups(Generic.Error, Whitespace), '#pop') + ], + 'marker': [ + # For syntax errors. + (r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'), + default('#pop'), + ], + } + + +class CythonLexer(RegexLexer): + """ + For Pyrex and Cython source code. + """ + + name = 'Cython' + url = 'https://cython.org' + aliases = ['cython', 'pyx', 'pyrex'] + filenames = ['*.pyx', '*.pxd', '*.pxi'] + mimetypes = ['text/x-cython', 'application/x-cython'] + version_added = '1.1' + + tokens = { + 'root': [ + (r'\n', Whitespace), + (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Whitespace, String.Doc)), + (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Whitespace, String.Doc)), + (r'[^\S\n]+', Text), + (r'#.*$', Comment), + (r'[]{}:(),;[]', Punctuation), + (r'\\\n', Whitespace), + (r'\\', Text), + (r'(in|is|and|or|not)\b', Operator.Word), + (r'(<)([a-zA-Z0-9.?]+)(>)', + bygroups(Punctuation, Keyword.Type, Punctuation)), + (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator), + (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)', + bygroups(Keyword, Number.Integer, Operator, Whitespace, Operator, + Name, Punctuation)), + include('keywords'), + (r'(def|property)(\s+)', bygroups(Keyword, Whitespace), 'funcname'), + (r'(cp?def)(\s+)', bygroups(Keyword, Whitespace), 'cdef'), + # (should actually start a block with only cdefs) + (r'(cdef)(:)', bygroups(Keyword, Punctuation)), + (r'(class|struct)(\s+)', bygroups(Keyword, Whitespace), 'classname'), + (r'(from)(\s+)', bygroups(Keyword, Whitespace), 'fromimport'), + (r'(c?import)(\s+)', bygroups(Keyword, Whitespace), 'import'), + include('builtins'), + include('backtick'), + ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'), + ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'), + ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'), + ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'), + ('[uU]?"""', String, combined('stringescape', 'tdqs')), + ("[uU]?'''", String, combined('stringescape', 'tsqs')), + ('[uU]?"', String, combined('stringescape', 'dqs')), + ("[uU]?'", String, combined('stringescape', 'sqs')), + include('name'), + include('numbers'), + ], + 'keywords': [ + (words(( + 'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif', + 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil', + 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print', + 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'), + Keyword), + (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc), + ], + 'builtins': [ + (words(( + '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint', + 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', + 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr', + 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit', + 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals', + 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance', + 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max', + 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t', + 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed', + 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', + 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned', + 'vars', 'xrange', 'zip'), prefix=r'(?]? \d* )? : + .* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ ) +''', re.VERBOSE) + + +def get_filetype_from_line(l): # noqa: E741 + m = modeline_re.search(l) + if m: + return m.group(1) + + +def get_filetype_from_buffer(buf, max_lines=5): + """ + Scan the buffer for modelines and return filetype if one is found. + """ + lines = buf.splitlines() + for line in lines[-1:-max_lines-1:-1]: + ret = get_filetype_from_line(line) + if ret: + return ret + for i in range(max_lines, -1, -1): + if i < len(lines): + ret = get_filetype_from_line(lines[i]) + if ret: + return ret + + return None diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/plugin.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/plugin.py new file mode 100644 index 0000000..498db42 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/plugin.py @@ -0,0 +1,72 @@ +""" + pygments.plugin + ~~~~~~~~~~~~~~~ + + Pygments plugin interface. + + lexer plugins:: + + [pygments.lexers] + yourlexer = yourmodule:YourLexer + + formatter plugins:: + + [pygments.formatters] + yourformatter = yourformatter:YourFormatter + /.ext = yourformatter:YourFormatter + + As you can see, you can define extensions for the formatter + with a leading slash. + + syntax plugins:: + + [pygments.styles] + yourstyle = yourstyle:YourStyle + + filter plugin:: + + [pygments.filter] + yourfilter = yourfilter:YourFilter + + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +from importlib.metadata import entry_points + +LEXER_ENTRY_POINT = 'pygments.lexers' +FORMATTER_ENTRY_POINT = 'pygments.formatters' +STYLE_ENTRY_POINT = 'pygments.styles' +FILTER_ENTRY_POINT = 'pygments.filters' + + +def iter_entry_points(group_name): + groups = entry_points() + if hasattr(groups, 'select'): + # New interface in Python 3.10 and newer versions of the + # importlib_metadata backport. + return groups.select(group=group_name) + else: + # Older interface, deprecated in Python 3.10 and recent + # importlib_metadata, but we need it in Python 3.8 and 3.9. + return groups.get(group_name, []) + + +def find_plugin_lexers(): + for entrypoint in iter_entry_points(LEXER_ENTRY_POINT): + yield entrypoint.load() + + +def find_plugin_formatters(): + for entrypoint in iter_entry_points(FORMATTER_ENTRY_POINT): + yield entrypoint.name, entrypoint.load() + + +def find_plugin_styles(): + for entrypoint in iter_entry_points(STYLE_ENTRY_POINT): + yield entrypoint.name, entrypoint.load() + + +def find_plugin_filters(): + for entrypoint in iter_entry_points(FILTER_ENTRY_POINT): + yield entrypoint.name, entrypoint.load() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/regexopt.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/regexopt.py new file mode 100644 index 0000000..cc8d2c3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/regexopt.py @@ -0,0 +1,91 @@ +""" + pygments.regexopt + ~~~~~~~~~~~~~~~~~ + + An algorithm that generates optimized regexes for matching long lists of + literal strings. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +from re import escape +from os.path import commonprefix +from itertools import groupby +from operator import itemgetter + +CS_ESCAPE = re.compile(r'[\[\^\\\-\]]') +FIRST_ELEMENT = itemgetter(0) + + +def make_charset(letters): + return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']' + + +def regex_opt_inner(strings, open_paren): + """Return a regex that matches any string in the sorted list of strings.""" + close_paren = open_paren and ')' or '' + # print strings, repr(open_paren) + if not strings: + # print '-> nothing left' + return '' + first = strings[0] + if len(strings) == 1: + # print '-> only 1 string' + return open_paren + escape(first) + close_paren + if not first: + # print '-> first string empty' + return open_paren + regex_opt_inner(strings[1:], '(?:') \ + + '?' + close_paren + if len(first) == 1: + # multiple one-char strings? make a charset + oneletter = [] + rest = [] + for s in strings: + if len(s) == 1: + oneletter.append(s) + else: + rest.append(s) + if len(oneletter) > 1: # do we have more than one oneletter string? + if rest: + # print '-> 1-character + rest' + return open_paren + regex_opt_inner(rest, '') + '|' \ + + make_charset(oneletter) + close_paren + # print '-> only 1-character' + return open_paren + make_charset(oneletter) + close_paren + prefix = commonprefix(strings) + if prefix: + plen = len(prefix) + # we have a prefix for all strings + # print '-> prefix:', prefix + return open_paren + escape(prefix) \ + + regex_opt_inner([s[plen:] for s in strings], '(?:') \ + + close_paren + # is there a suffix? + strings_rev = [s[::-1] for s in strings] + suffix = commonprefix(strings_rev) + if suffix: + slen = len(suffix) + # print '-> suffix:', suffix[::-1] + return open_paren \ + + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \ + + escape(suffix[::-1]) + close_paren + # recurse on common 1-string prefixes + # print '-> last resort' + return open_paren + \ + '|'.join(regex_opt_inner(list(group[1]), '') + for group in groupby(strings, lambda s: s[0] == first[0])) \ + + close_paren + + +def regex_opt(strings, prefix='', suffix=''): + """Return a compiled regex that matches any string in the given list. + + The strings to match must be literal strings, not regexes. They will be + regex-escaped. + + *prefix* and *suffix* are pre- and appended to the final regex. + """ + strings = sorted(strings) + return prefix + regex_opt_inner(strings, '(') + suffix diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/scanner.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/scanner.py new file mode 100644 index 0000000..3c8c848 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/scanner.py @@ -0,0 +1,104 @@ +""" + pygments.scanner + ~~~~~~~~~~~~~~~~ + + This library implements a regex based scanner. Some languages + like Pascal are easy to parse but have some keywords that + depend on the context. Because of this it's impossible to lex + that just by using a regular expression lexer like the + `RegexLexer`. + + Have a look at the `DelphiLexer` to get an idea of how to use + this scanner. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +import re + + +class EndOfText(RuntimeError): + """ + Raise if end of text is reached and the user + tried to call a match function. + """ + + +class Scanner: + """ + Simple scanner + + All method patterns are regular expression strings (not + compiled expressions!) + """ + + def __init__(self, text, flags=0): + """ + :param text: The text which should be scanned + :param flags: default regular expression flags + """ + self.data = text + self.data_length = len(text) + self.start_pos = 0 + self.pos = 0 + self.flags = flags + self.last = None + self.match = None + self._re_cache = {} + + def eos(self): + """`True` if the scanner reached the end of text.""" + return self.pos >= self.data_length + eos = property(eos, eos.__doc__) + + def check(self, pattern): + """ + Apply `pattern` on the current position and return + the match object. (Doesn't touch pos). Use this for + lookahead. + """ + if self.eos: + raise EndOfText() + if pattern not in self._re_cache: + self._re_cache[pattern] = re.compile(pattern, self.flags) + return self._re_cache[pattern].match(self.data, self.pos) + + def test(self, pattern): + """Apply a pattern on the current position and check + if it patches. Doesn't touch pos. + """ + return self.check(pattern) is not None + + def scan(self, pattern): + """ + Scan the text for the given pattern and update pos/match + and related fields. The return value is a boolean that + indicates if the pattern matched. The matched value is + stored on the instance as ``match``, the last value is + stored as ``last``. ``start_pos`` is the position of the + pointer before the pattern was matched, ``pos`` is the + end position. + """ + if self.eos: + raise EndOfText() + if pattern not in self._re_cache: + self._re_cache[pattern] = re.compile(pattern, self.flags) + self.last = self.match + m = self._re_cache[pattern].match(self.data, self.pos) + if m is None: + return False + self.start_pos = m.start() + self.pos = m.end() + self.match = m.group() + return True + + def get_char(self): + """Scan exactly one char.""" + self.scan('.') + + def __repr__(self): + return '<%s %d/%d>' % ( + self.__class__.__name__, + self.pos, + self.data_length + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/sphinxext.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/sphinxext.py new file mode 100644 index 0000000..955d958 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/sphinxext.py @@ -0,0 +1,247 @@ +""" + pygments.sphinxext + ~~~~~~~~~~~~~~~~~~ + + Sphinx extension to generate automatic documentation of lexers, + formatters and filters. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import sys + +from docutils import nodes +from docutils.statemachine import ViewList +from docutils.parsers.rst import Directive +from sphinx.util.nodes import nested_parse_with_titles + + +MODULEDOC = ''' +.. module:: %s + +%s +%s +''' + +LEXERDOC = ''' +.. class:: %s + + :Short names: %s + :Filenames: %s + :MIME types: %s + + %s + + %s + +''' + +FMTERDOC = ''' +.. class:: %s + + :Short names: %s + :Filenames: %s + + %s + +''' + +FILTERDOC = ''' +.. class:: %s + + :Name: %s + + %s + +''' + + +class PygmentsDoc(Directive): + """ + A directive to collect all lexers/formatters/filters and generate + autoclass directives for them. + """ + has_content = False + required_arguments = 1 + optional_arguments = 0 + final_argument_whitespace = False + option_spec = {} + + def run(self): + self.filenames = set() + if self.arguments[0] == 'lexers': + out = self.document_lexers() + elif self.arguments[0] == 'formatters': + out = self.document_formatters() + elif self.arguments[0] == 'filters': + out = self.document_filters() + elif self.arguments[0] == 'lexers_overview': + out = self.document_lexers_overview() + else: + raise Exception('invalid argument for "pygmentsdoc" directive') + node = nodes.compound() + vl = ViewList(out.split('\n'), source='') + nested_parse_with_titles(self.state, vl, node) + for fn in self.filenames: + self.state.document.settings.record_dependencies.add(fn) + return node.children + + def document_lexers_overview(self): + """Generate a tabular overview of all lexers. + + The columns are the lexer name, the extensions handled by this lexer + (or "None"), the aliases and a link to the lexer class.""" + from pip._vendor.pygments.lexers._mapping import LEXERS + from pip._vendor.pygments.lexers import find_lexer_class + out = [] + + table = [] + + def format_link(name, url): + if url: + return f'`{name} <{url}>`_' + return name + + for classname, data in sorted(LEXERS.items(), key=lambda x: x[1][1].lower()): + lexer_cls = find_lexer_class(data[1]) + extensions = lexer_cls.filenames + lexer_cls.alias_filenames + + table.append({ + 'name': format_link(data[1], lexer_cls.url), + 'extensions': ', '.join(extensions).replace('*', '\\*').replace('_', '\\') or 'None', + 'aliases': ', '.join(data[2]), + 'class': f'{data[0]}.{classname}' + }) + + column_names = ['name', 'extensions', 'aliases', 'class'] + column_lengths = [max([len(row[column]) for row in table if row[column]]) + for column in column_names] + + def write_row(*columns): + """Format a table row""" + out = [] + for length, col in zip(column_lengths, columns): + if col: + out.append(col.ljust(length)) + else: + out.append(' '*length) + + return ' '.join(out) + + def write_seperator(): + """Write a table separator row""" + sep = ['='*c for c in column_lengths] + return write_row(*sep) + + out.append(write_seperator()) + out.append(write_row('Name', 'Extension(s)', 'Short name(s)', 'Lexer class')) + out.append(write_seperator()) + for row in table: + out.append(write_row( + row['name'], + row['extensions'], + row['aliases'], + f':class:`~{row["class"]}`')) + out.append(write_seperator()) + + return '\n'.join(out) + + def document_lexers(self): + from pip._vendor.pygments.lexers._mapping import LEXERS + from pip._vendor import pygments + import inspect + import pathlib + + out = [] + modules = {} + moduledocstrings = {} + for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]): + module = data[0] + mod = __import__(module, None, None, [classname]) + self.filenames.add(mod.__file__) + cls = getattr(mod, classname) + if not cls.__doc__: + print(f"Warning: {classname} does not have a docstring.") + docstring = cls.__doc__ + if isinstance(docstring, bytes): + docstring = docstring.decode('utf8') + + example_file = getattr(cls, '_example', None) + if example_file: + p = pathlib.Path(inspect.getabsfile(pygments)).parent.parent /\ + 'tests' / 'examplefiles' / example_file + content = p.read_text(encoding='utf-8') + if not content: + raise Exception( + f"Empty example file '{example_file}' for lexer " + f"{classname}") + + if data[2]: + lexer_name = data[2][0] + docstring += '\n\n .. admonition:: Example\n' + docstring += f'\n .. code-block:: {lexer_name}\n\n' + for line in content.splitlines(): + docstring += f' {line}\n' + + if cls.version_added: + version_line = f'.. versionadded:: {cls.version_added}' + else: + version_line = '' + + modules.setdefault(module, []).append(( + classname, + ', '.join(data[2]) or 'None', + ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None', + ', '.join(data[4]) or 'None', + docstring, + version_line)) + if module not in moduledocstrings: + moddoc = mod.__doc__ + if isinstance(moddoc, bytes): + moddoc = moddoc.decode('utf8') + moduledocstrings[module] = moddoc + + for module, lexers in sorted(modules.items(), key=lambda x: x[0]): + if moduledocstrings[module] is None: + raise Exception(f"Missing docstring for {module}") + heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.') + out.append(MODULEDOC % (module, heading, '-'*len(heading))) + for data in lexers: + out.append(LEXERDOC % data) + + return ''.join(out) + + def document_formatters(self): + from pip._vendor.pygments.formatters import FORMATTERS + + out = [] + for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]): + module = data[0] + mod = __import__(module, None, None, [classname]) + self.filenames.add(mod.__file__) + cls = getattr(mod, classname) + docstring = cls.__doc__ + if isinstance(docstring, bytes): + docstring = docstring.decode('utf8') + heading = cls.__name__ + out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None', + ', '.join(data[3]).replace('*', '\\*') or 'None', + docstring)) + return ''.join(out) + + def document_filters(self): + from pip._vendor.pygments.filters import FILTERS + + out = [] + for name, cls in FILTERS.items(): + self.filenames.add(sys.modules[cls.__module__].__file__) + docstring = cls.__doc__ + if isinstance(docstring, bytes): + docstring = docstring.decode('utf8') + out.append(FILTERDOC % (cls.__name__, name, docstring)) + return ''.join(out) + + +def setup(app): + app.add_directive('pygmentsdoc', PygmentsDoc) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/style.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/style.py new file mode 100644 index 0000000..be5f832 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/style.py @@ -0,0 +1,203 @@ +""" + pygments.style + ~~~~~~~~~~~~~~ + + Basic style object. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pip._vendor.pygments.token import Token, STANDARD_TYPES + +# Default mapping of ansixxx to RGB colors. +_ansimap = { + # dark + 'ansiblack': '000000', + 'ansired': '7f0000', + 'ansigreen': '007f00', + 'ansiyellow': '7f7fe0', + 'ansiblue': '00007f', + 'ansimagenta': '7f007f', + 'ansicyan': '007f7f', + 'ansigray': 'e5e5e5', + # normal + 'ansibrightblack': '555555', + 'ansibrightred': 'ff0000', + 'ansibrightgreen': '00ff00', + 'ansibrightyellow': 'ffff00', + 'ansibrightblue': '0000ff', + 'ansibrightmagenta': 'ff00ff', + 'ansibrightcyan': '00ffff', + 'ansiwhite': 'ffffff', +} +# mapping of deprecated #ansixxx colors to new color names +_deprecated_ansicolors = { + # dark + '#ansiblack': 'ansiblack', + '#ansidarkred': 'ansired', + '#ansidarkgreen': 'ansigreen', + '#ansibrown': 'ansiyellow', + '#ansidarkblue': 'ansiblue', + '#ansipurple': 'ansimagenta', + '#ansiteal': 'ansicyan', + '#ansilightgray': 'ansigray', + # normal + '#ansidarkgray': 'ansibrightblack', + '#ansired': 'ansibrightred', + '#ansigreen': 'ansibrightgreen', + '#ansiyellow': 'ansibrightyellow', + '#ansiblue': 'ansibrightblue', + '#ansifuchsia': 'ansibrightmagenta', + '#ansiturquoise': 'ansibrightcyan', + '#ansiwhite': 'ansiwhite', +} +ansicolors = set(_ansimap) + + +class StyleMeta(type): + + def __new__(mcs, name, bases, dct): + obj = type.__new__(mcs, name, bases, dct) + for token in STANDARD_TYPES: + if token not in obj.styles: + obj.styles[token] = '' + + def colorformat(text): + if text in ansicolors: + return text + if text[0:1] == '#': + col = text[1:] + if len(col) == 6: + return col + elif len(col) == 3: + return col[0] * 2 + col[1] * 2 + col[2] * 2 + elif text == '': + return '' + elif text.startswith('var') or text.startswith('calc'): + return text + assert False, f"wrong color format {text!r}" + + _styles = obj._styles = {} + + for ttype in obj.styles: + for token in ttype.split(): + if token in _styles: + continue + ndef = _styles.get(token.parent, None) + styledefs = obj.styles.get(token, '').split() + if not ndef or token is None: + ndef = ['', 0, 0, 0, '', '', 0, 0, 0] + elif 'noinherit' in styledefs and token is not Token: + ndef = _styles[Token][:] + else: + ndef = ndef[:] + _styles[token] = ndef + for styledef in obj.styles.get(token, '').split(): + if styledef == 'noinherit': + pass + elif styledef == 'bold': + ndef[1] = 1 + elif styledef == 'nobold': + ndef[1] = 0 + elif styledef == 'italic': + ndef[2] = 1 + elif styledef == 'noitalic': + ndef[2] = 0 + elif styledef == 'underline': + ndef[3] = 1 + elif styledef == 'nounderline': + ndef[3] = 0 + elif styledef[:3] == 'bg:': + ndef[4] = colorformat(styledef[3:]) + elif styledef[:7] == 'border:': + ndef[5] = colorformat(styledef[7:]) + elif styledef == 'roman': + ndef[6] = 1 + elif styledef == 'sans': + ndef[7] = 1 + elif styledef == 'mono': + ndef[8] = 1 + else: + ndef[0] = colorformat(styledef) + + return obj + + def style_for_token(cls, token): + t = cls._styles[token] + ansicolor = bgansicolor = None + color = t[0] + if color in _deprecated_ansicolors: + color = _deprecated_ansicolors[color] + if color in ansicolors: + ansicolor = color + color = _ansimap[color] + bgcolor = t[4] + if bgcolor in _deprecated_ansicolors: + bgcolor = _deprecated_ansicolors[bgcolor] + if bgcolor in ansicolors: + bgansicolor = bgcolor + bgcolor = _ansimap[bgcolor] + + return { + 'color': color or None, + 'bold': bool(t[1]), + 'italic': bool(t[2]), + 'underline': bool(t[3]), + 'bgcolor': bgcolor or None, + 'border': t[5] or None, + 'roman': bool(t[6]) or None, + 'sans': bool(t[7]) or None, + 'mono': bool(t[8]) or None, + 'ansicolor': ansicolor, + 'bgansicolor': bgansicolor, + } + + def list_styles(cls): + return list(cls) + + def styles_token(cls, ttype): + return ttype in cls._styles + + def __iter__(cls): + for token in cls._styles: + yield token, cls.style_for_token(token) + + def __len__(cls): + return len(cls._styles) + + +class Style(metaclass=StyleMeta): + + #: overall background color (``None`` means transparent) + background_color = '#ffffff' + + #: highlight background color + highlight_color = '#ffffcc' + + #: line number font color + line_number_color = 'inherit' + + #: line number background color + line_number_background_color = 'transparent' + + #: special line number font color + line_number_special_color = '#000000' + + #: special line number background color + line_number_special_background_color = '#ffffc0' + + #: Style definitions for individual token types. + styles = {} + + #: user-friendly style name (used when selecting the style, so this + # should be all-lowercase, no spaces, hyphens) + name = 'unnamed' + + aliases = [] + + # Attribute for lexers defined within Pygments. If set + # to True, the style is not shown in the style gallery + # on the website. This is intended for language-specific + # styles. + web_style_gallery_exclude = False diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__init__.py new file mode 100644 index 0000000..96d53dc --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__init__.py @@ -0,0 +1,61 @@ +""" + pygments.styles + ~~~~~~~~~~~~~~~ + + Contains built-in styles. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pip._vendor.pygments.plugin import find_plugin_styles +from pip._vendor.pygments.util import ClassNotFound +from pip._vendor.pygments.styles._mapping import STYLES + +#: A dictionary of built-in styles, mapping style names to +#: ``'submodule::classname'`` strings. +#: This list is deprecated. Use `pygments.styles.STYLES` instead +STYLE_MAP = {v[1]: v[0].split('.')[-1] + '::' + k for k, v in STYLES.items()} + +#: Internal reverse mapping to make `get_style_by_name` more efficient +_STYLE_NAME_TO_MODULE_MAP = {v[1]: (v[0], k) for k, v in STYLES.items()} + + +def get_style_by_name(name): + """ + Return a style class by its short name. The names of the builtin styles + are listed in :data:`pygments.styles.STYLE_MAP`. + + Will raise :exc:`pygments.util.ClassNotFound` if no style of that name is + found. + """ + if name in _STYLE_NAME_TO_MODULE_MAP: + mod, cls = _STYLE_NAME_TO_MODULE_MAP[name] + builtin = "yes" + else: + for found_name, style in find_plugin_styles(): + if name == found_name: + return style + # perhaps it got dropped into our styles package + builtin = "" + mod = 'pygments.styles.' + name + cls = name.title() + "Style" + + try: + mod = __import__(mod, None, None, [cls]) + except ImportError: + raise ClassNotFound(f"Could not find style module {mod!r}" + + (builtin and ", though it should be builtin") + + ".") + try: + return getattr(mod, cls) + except AttributeError: + raise ClassNotFound(f"Could not find style class {cls!r} in style module.") + + +def get_all_styles(): + """Return a generator for all styles by name, both builtin and plugin.""" + for v in STYLES.values(): + yield v[1] + for name, _ in find_plugin_styles(): + yield name diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f941133 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-312.pyc new file mode 100644 index 0000000..758e945 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/_mapping.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/_mapping.py new file mode 100644 index 0000000..49a7fae --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/styles/_mapping.py @@ -0,0 +1,54 @@ +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. + +STYLES = { + 'AbapStyle': ('pygments.styles.abap', 'abap', ()), + 'AlgolStyle': ('pygments.styles.algol', 'algol', ()), + 'Algol_NuStyle': ('pygments.styles.algol_nu', 'algol_nu', ()), + 'ArduinoStyle': ('pygments.styles.arduino', 'arduino', ()), + 'AutumnStyle': ('pygments.styles.autumn', 'autumn', ()), + 'BlackWhiteStyle': ('pygments.styles.bw', 'bw', ()), + 'BorlandStyle': ('pygments.styles.borland', 'borland', ()), + 'CoffeeStyle': ('pygments.styles.coffee', 'coffee', ()), + 'ColorfulStyle': ('pygments.styles.colorful', 'colorful', ()), + 'DefaultStyle': ('pygments.styles.default', 'default', ()), + 'DraculaStyle': ('pygments.styles.dracula', 'dracula', ()), + 'EmacsStyle': ('pygments.styles.emacs', 'emacs', ()), + 'FriendlyGrayscaleStyle': ('pygments.styles.friendly_grayscale', 'friendly_grayscale', ()), + 'FriendlyStyle': ('pygments.styles.friendly', 'friendly', ()), + 'FruityStyle': ('pygments.styles.fruity', 'fruity', ()), + 'GhDarkStyle': ('pygments.styles.gh_dark', 'github-dark', ()), + 'GruvboxDarkStyle': ('pygments.styles.gruvbox', 'gruvbox-dark', ()), + 'GruvboxLightStyle': ('pygments.styles.gruvbox', 'gruvbox-light', ()), + 'IgorStyle': ('pygments.styles.igor', 'igor', ()), + 'InkPotStyle': ('pygments.styles.inkpot', 'inkpot', ()), + 'LightbulbStyle': ('pygments.styles.lightbulb', 'lightbulb', ()), + 'LilyPondStyle': ('pygments.styles.lilypond', 'lilypond', ()), + 'LovelaceStyle': ('pygments.styles.lovelace', 'lovelace', ()), + 'ManniStyle': ('pygments.styles.manni', 'manni', ()), + 'MaterialStyle': ('pygments.styles.material', 'material', ()), + 'MonokaiStyle': ('pygments.styles.monokai', 'monokai', ()), + 'MurphyStyle': ('pygments.styles.murphy', 'murphy', ()), + 'NativeStyle': ('pygments.styles.native', 'native', ()), + 'NordDarkerStyle': ('pygments.styles.nord', 'nord-darker', ()), + 'NordStyle': ('pygments.styles.nord', 'nord', ()), + 'OneDarkStyle': ('pygments.styles.onedark', 'one-dark', ()), + 'ParaisoDarkStyle': ('pygments.styles.paraiso_dark', 'paraiso-dark', ()), + 'ParaisoLightStyle': ('pygments.styles.paraiso_light', 'paraiso-light', ()), + 'PastieStyle': ('pygments.styles.pastie', 'pastie', ()), + 'PerldocStyle': ('pygments.styles.perldoc', 'perldoc', ()), + 'RainbowDashStyle': ('pygments.styles.rainbow_dash', 'rainbow_dash', ()), + 'RrtStyle': ('pygments.styles.rrt', 'rrt', ()), + 'SasStyle': ('pygments.styles.sas', 'sas', ()), + 'SolarizedDarkStyle': ('pygments.styles.solarized', 'solarized-dark', ()), + 'SolarizedLightStyle': ('pygments.styles.solarized', 'solarized-light', ()), + 'StarofficeStyle': ('pygments.styles.staroffice', 'staroffice', ()), + 'StataDarkStyle': ('pygments.styles.stata_dark', 'stata-dark', ()), + 'StataLightStyle': ('pygments.styles.stata_light', 'stata-light', ()), + 'TangoStyle': ('pygments.styles.tango', 'tango', ()), + 'TracStyle': ('pygments.styles.trac', 'trac', ()), + 'VimStyle': ('pygments.styles.vim', 'vim', ()), + 'VisualStudioStyle': ('pygments.styles.vs', 'vs', ()), + 'XcodeStyle': ('pygments.styles.xcode', 'xcode', ()), + 'ZenburnStyle': ('pygments.styles.zenburn', 'zenburn', ()), +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/token.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/token.py new file mode 100644 index 0000000..2f3b97e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/token.py @@ -0,0 +1,214 @@ +""" + pygments.token + ~~~~~~~~~~~~~~ + + Basic token types and the standard tokens. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + + +class _TokenType(tuple): + parent = None + + def split(self): + buf = [] + node = self + while node is not None: + buf.append(node) + node = node.parent + buf.reverse() + return buf + + def __init__(self, *args): + # no need to call super.__init__ + self.subtypes = set() + + def __contains__(self, val): + return self is val or ( + type(val) is self.__class__ and + val[:len(self)] == self + ) + + def __getattr__(self, val): + if not val or not val[0].isupper(): + return tuple.__getattribute__(self, val) + new = _TokenType(self + (val,)) + setattr(self, val, new) + self.subtypes.add(new) + new.parent = self + return new + + def __repr__(self): + return 'Token' + (self and '.' or '') + '.'.join(self) + + def __copy__(self): + # These instances are supposed to be singletons + return self + + def __deepcopy__(self, memo): + # These instances are supposed to be singletons + return self + + +Token = _TokenType() + +# Special token types +Text = Token.Text +Whitespace = Text.Whitespace +Escape = Token.Escape +Error = Token.Error +# Text that doesn't belong to this lexer (e.g. HTML in PHP) +Other = Token.Other + +# Common token types for source code +Keyword = Token.Keyword +Name = Token.Name +Literal = Token.Literal +String = Literal.String +Number = Literal.Number +Punctuation = Token.Punctuation +Operator = Token.Operator +Comment = Token.Comment + +# Generic types for non-source code +Generic = Token.Generic + +# String and some others are not direct children of Token. +# alias them: +Token.Token = Token +Token.String = String +Token.Number = Number + + +def is_token_subtype(ttype, other): + """ + Return True if ``ttype`` is a subtype of ``other``. + + exists for backwards compatibility. use ``ttype in other`` now. + """ + return ttype in other + + +def string_to_tokentype(s): + """ + Convert a string into a token type:: + + >>> string_to_token('String.Double') + Token.Literal.String.Double + >>> string_to_token('Token.Literal.Number') + Token.Literal.Number + >>> string_to_token('') + Token + + Tokens that are already tokens are returned unchanged: + + >>> string_to_token(String) + Token.Literal.String + """ + if isinstance(s, _TokenType): + return s + if not s: + return Token + node = Token + for item in s.split('.'): + node = getattr(node, item) + return node + + +# Map standard token types to short names, used in CSS class naming. +# If you add a new item, please be sure to run this file to perform +# a consistency check for duplicate values. +STANDARD_TYPES = { + Token: '', + + Text: '', + Whitespace: 'w', + Escape: 'esc', + Error: 'err', + Other: 'x', + + Keyword: 'k', + Keyword.Constant: 'kc', + Keyword.Declaration: 'kd', + Keyword.Namespace: 'kn', + Keyword.Pseudo: 'kp', + Keyword.Reserved: 'kr', + Keyword.Type: 'kt', + + Name: 'n', + Name.Attribute: 'na', + Name.Builtin: 'nb', + Name.Builtin.Pseudo: 'bp', + Name.Class: 'nc', + Name.Constant: 'no', + Name.Decorator: 'nd', + Name.Entity: 'ni', + Name.Exception: 'ne', + Name.Function: 'nf', + Name.Function.Magic: 'fm', + Name.Property: 'py', + Name.Label: 'nl', + Name.Namespace: 'nn', + Name.Other: 'nx', + Name.Tag: 'nt', + Name.Variable: 'nv', + Name.Variable.Class: 'vc', + Name.Variable.Global: 'vg', + Name.Variable.Instance: 'vi', + Name.Variable.Magic: 'vm', + + Literal: 'l', + Literal.Date: 'ld', + + String: 's', + String.Affix: 'sa', + String.Backtick: 'sb', + String.Char: 'sc', + String.Delimiter: 'dl', + String.Doc: 'sd', + String.Double: 's2', + String.Escape: 'se', + String.Heredoc: 'sh', + String.Interpol: 'si', + String.Other: 'sx', + String.Regex: 'sr', + String.Single: 's1', + String.Symbol: 'ss', + + Number: 'm', + Number.Bin: 'mb', + Number.Float: 'mf', + Number.Hex: 'mh', + Number.Integer: 'mi', + Number.Integer.Long: 'il', + Number.Oct: 'mo', + + Operator: 'o', + Operator.Word: 'ow', + + Punctuation: 'p', + Punctuation.Marker: 'pm', + + Comment: 'c', + Comment.Hashbang: 'ch', + Comment.Multiline: 'cm', + Comment.Preproc: 'cp', + Comment.PreprocFile: 'cpf', + Comment.Single: 'c1', + Comment.Special: 'cs', + + Generic: 'g', + Generic.Deleted: 'gd', + Generic.Emph: 'ge', + Generic.Error: 'gr', + Generic.Heading: 'gh', + Generic.Inserted: 'gi', + Generic.Output: 'go', + Generic.Prompt: 'gp', + Generic.Strong: 'gs', + Generic.Subheading: 'gu', + Generic.EmphStrong: 'ges', + Generic.Traceback: 'gt', +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/unistring.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/unistring.py new file mode 100644 index 0000000..e3bd2e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/unistring.py @@ -0,0 +1,153 @@ +""" + pygments.unistring + ~~~~~~~~~~~~~~~~~~ + + Strings of all Unicode characters of a certain category. + Used for matching in Unicode-aware languages. Run to regenerate. + + Inspired by chartypes_create.py from the MoinMoin project. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +Cc = '\x00-\x1f\x7f-\x9f' + +Cf = '\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f' + +Cn = '\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff' + +Co = '\ue000-\uf8ff\U000f0000-\U000ffffd\U00100000-\U0010fffd' + +Cs = '\ud800-\udbff\\\udc00\udc01-\udfff' + +Ll = 'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943' + +Lm = '\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1' + +Lo = '\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d' + +Lt = '\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc' + +Lu = 'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921' + +Mc = '\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172' + +Me = '\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672' + +Mn = '\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef' + +Nd = '0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959' + +Nl = '\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e' + +No = '\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c' + +Pc = '_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f' + +Pd = '\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d' + +Pe = ')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63' + +Pf = '\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21' + +Pi = '\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20' + +Po = "!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f" + +Ps = '(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62' + +Sc = '$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6\U0001ecb0' + +Sk = '\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3\U0001f3fb-\U0001f3ff' + +Sm = '+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1' + +So = '\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d' + +Zl = '\u2028' + +Zp = '\u2029' + +Zs = ' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000' + +xid_continue = '0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef' + +xid_start = 'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d' + +cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs'] + +# Generated from unidata 11.0.0 + +def combine(*args): + return ''.join(globals()[cat] for cat in args) + + +def allexcept(*args): + newcats = cats[:] + for arg in args: + newcats.remove(arg) + return ''.join(globals()[cat] for cat in newcats) + + +def _handle_runs(char_list): # pragma: no cover + buf = [] + for c in char_list: + if len(c) == 1: + if buf and buf[-1][1] == chr(ord(c)-1): + buf[-1] = (buf[-1][0], c) + else: + buf.append((c, c)) + else: + buf.append((c, c)) + for a, b in buf: + if a == b: + yield a + else: + yield f'{a}-{b}' + + +if __name__ == '__main__': # pragma: no cover + import unicodedata + + categories = {'xid_start': [], 'xid_continue': []} + + with open(__file__, encoding='utf-8') as fp: + content = fp.read() + + header = content[:content.find('Cc =')] + footer = content[content.find("def combine("):] + + for code in range(0x110000): + c = chr(code) + cat = unicodedata.category(c) + if ord(c) == 0xdc00: + # Hack to avoid combining this combining with the preceding high + # surrogate, 0xdbff, when doing a repr. + c = '\\' + c + elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e): + # Escape regex metachars. + c = '\\' + c + categories.setdefault(cat, []).append(c) + # XID_START and XID_CONTINUE are special categories used for matching + # identifiers in Python 3. + if c.isidentifier(): + categories['xid_start'].append(c) + if ('a' + c).isidentifier(): + categories['xid_continue'].append(c) + + with open(__file__, 'w', encoding='utf-8') as fp: + fp.write(header) + + for cat in sorted(categories): + val = ''.join(_handle_runs(categories[cat])) + fp.write(f'{cat} = {val!a}\n\n') + + cats = sorted(categories) + cats.remove('xid_start') + cats.remove('xid_continue') + fp.write(f'cats = {cats!r}\n\n') + + fp.write(f'# Generated from unidata {unicodedata.unidata_version}\n\n') + + fp.write(footer) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pygments/util.py b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/util.py new file mode 100644 index 0000000..71c5710 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pygments/util.py @@ -0,0 +1,324 @@ +""" + pygments.util + ~~~~~~~~~~~~~ + + Utility functions. + + :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +from io import TextIOWrapper + + +split_path_re = re.compile(r'[/\\ ]') +doctype_lookup_re = re.compile(r''' + ]*> +''', re.DOTALL | re.MULTILINE | re.VERBOSE) +tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?', + re.IGNORECASE | re.DOTALL | re.MULTILINE) +xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I) + + +class ClassNotFound(ValueError): + """Raised if one of the lookup functions didn't find a matching class.""" + + +class OptionError(Exception): + """ + This exception will be raised by all option processing functions if + the type or value of the argument is not correct. + """ + +def get_choice_opt(options, optname, allowed, default=None, normcase=False): + """ + If the key `optname` from the dictionary is not in the sequence + `allowed`, raise an error, otherwise return it. + """ + string = options.get(optname, default) + if normcase: + string = string.lower() + if string not in allowed: + raise OptionError('Value for option {} must be one of {}'.format(optname, ', '.join(map(str, allowed)))) + return string + + +def get_bool_opt(options, optname, default=None): + """ + Intuitively, this is `options.get(optname, default)`, but restricted to + Boolean value. The Booleans can be represented as string, in order to accept + Boolean value from the command line arguments. If the key `optname` is + present in the dictionary `options` and is not associated with a Boolean, + raise an `OptionError`. If it is absent, `default` is returned instead. + + The valid string values for ``True`` are ``1``, ``yes``, ``true`` and + ``on``, the ones for ``False`` are ``0``, ``no``, ``false`` and ``off`` + (matched case-insensitively). + """ + string = options.get(optname, default) + if isinstance(string, bool): + return string + elif isinstance(string, int): + return bool(string) + elif not isinstance(string, str): + raise OptionError(f'Invalid type {string!r} for option {optname}; use ' + '1/0, yes/no, true/false, on/off') + elif string.lower() in ('1', 'yes', 'true', 'on'): + return True + elif string.lower() in ('0', 'no', 'false', 'off'): + return False + else: + raise OptionError(f'Invalid value {string!r} for option {optname}; use ' + '1/0, yes/no, true/false, on/off') + + +def get_int_opt(options, optname, default=None): + """As :func:`get_bool_opt`, but interpret the value as an integer.""" + string = options.get(optname, default) + try: + return int(string) + except TypeError: + raise OptionError(f'Invalid type {string!r} for option {optname}; you ' + 'must give an integer value') + except ValueError: + raise OptionError(f'Invalid value {string!r} for option {optname}; you ' + 'must give an integer value') + +def get_list_opt(options, optname, default=None): + """ + If the key `optname` from the dictionary `options` is a string, + split it at whitespace and return it. If it is already a list + or a tuple, it is returned as a list. + """ + val = options.get(optname, default) + if isinstance(val, str): + return val.split() + elif isinstance(val, (list, tuple)): + return list(val) + else: + raise OptionError(f'Invalid type {val!r} for option {optname}; you ' + 'must give a list value') + + +def docstring_headline(obj): + if not obj.__doc__: + return '' + res = [] + for line in obj.__doc__.strip().splitlines(): + if line.strip(): + res.append(" " + line.strip()) + else: + break + return ''.join(res).lstrip() + + +def make_analysator(f): + """Return a static text analyser function that returns float values.""" + def text_analyse(text): + try: + rv = f(text) + except Exception: + return 0.0 + if not rv: + return 0.0 + try: + return min(1.0, max(0.0, float(rv))) + except (ValueError, TypeError): + return 0.0 + text_analyse.__doc__ = f.__doc__ + return staticmethod(text_analyse) + + +def shebang_matches(text, regex): + r"""Check if the given regular expression matches the last part of the + shebang if one exists. + + >>> from pygments.util import shebang_matches + >>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?') + True + >>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?') + True + >>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?') + False + >>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?') + False + >>> shebang_matches('#!/usr/bin/startsomethingwith python', + ... r'python(2\.\d)?') + True + + It also checks for common windows executable file extensions:: + + >>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?') + True + + Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does + the same as ``'perl -e'``) + + Note that this method automatically searches the whole string (eg: + the regular expression is wrapped in ``'^$'``) + """ + index = text.find('\n') + if index >= 0: + first_line = text[:index].lower() + else: + first_line = text.lower() + if first_line.startswith('#!'): + try: + found = [x for x in split_path_re.split(first_line[2:].strip()) + if x and not x.startswith('-')][-1] + except IndexError: + return False + regex = re.compile(rf'^{regex}(\.(exe|cmd|bat|bin))?$', re.IGNORECASE) + if regex.search(found) is not None: + return True + return False + + +def doctype_matches(text, regex): + """Check if the doctype matches a regular expression (if present). + + Note that this method only checks the first part of a DOCTYPE. + eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"' + """ + m = doctype_lookup_re.search(text) + if m is None: + return False + doctype = m.group(1) + return re.compile(regex, re.I).match(doctype.strip()) is not None + + +def html_doctype_matches(text): + """Check if the file looks like it has a html doctype.""" + return doctype_matches(text, r'html') + + +_looks_like_xml_cache = {} + + +def looks_like_xml(text): + """Check if a doctype exists or if we have some tags.""" + if xml_decl_re.match(text): + return True + key = hash(text) + try: + return _looks_like_xml_cache[key] + except KeyError: + m = doctype_lookup_re.search(text) + if m is not None: + return True + rv = tag_re.search(text[:1000]) is not None + _looks_like_xml_cache[key] = rv + return rv + + +def surrogatepair(c): + """Given a unicode character code with length greater than 16 bits, + return the two 16 bit surrogate pair. + """ + # From example D28 of: + # http://www.unicode.org/book/ch03.pdf + return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff))) + + +def format_lines(var_name, seq, raw=False, indent_level=0): + """Formats a sequence of strings for output.""" + lines = [] + base_indent = ' ' * indent_level * 4 + inner_indent = ' ' * (indent_level + 1) * 4 + lines.append(base_indent + var_name + ' = (') + if raw: + # These should be preformatted reprs of, say, tuples. + for i in seq: + lines.append(inner_indent + i + ',') + else: + for i in seq: + # Force use of single quotes + r = repr(i + '"') + lines.append(inner_indent + r[:-2] + r[-1] + ',') + lines.append(base_indent + ')') + return '\n'.join(lines) + + +def duplicates_removed(it, already_seen=()): + """ + Returns a list with duplicates removed from the iterable `it`. + + Order is preserved. + """ + lst = [] + seen = set() + for i in it: + if i in seen or i in already_seen: + continue + lst.append(i) + seen.add(i) + return lst + + +class Future: + """Generic class to defer some work. + + Handled specially in RegexLexerMeta, to support regex string construction at + first use. + """ + def get(self): + raise NotImplementedError + + +def guess_decode(text): + """Decode *text* with guessed encoding. + + First try UTF-8; this should fail for non-UTF-8 encodings. + Then try the preferred locale encoding. + Fall back to latin-1, which always works. + """ + try: + text = text.decode('utf-8') + return text, 'utf-8' + except UnicodeDecodeError: + try: + import locale + prefencoding = locale.getpreferredencoding() + text = text.decode() + return text, prefencoding + except (UnicodeDecodeError, LookupError): + text = text.decode('latin1') + return text, 'latin1' + + +def guess_decode_from_terminal(text, term): + """Decode *text* coming from terminal *term*. + + First try the terminal encoding, if given. + Then try UTF-8. Then try the preferred locale encoding. + Fall back to latin-1, which always works. + """ + if getattr(term, 'encoding', None): + try: + text = text.decode(term.encoding) + except UnicodeDecodeError: + pass + else: + return text, term.encoding + return guess_decode(text) + + +def terminal_encoding(term): + """Return our best guess of encoding for the given *term*.""" + if getattr(term, 'encoding', None): + return term.encoding + import locale + return locale.getpreferredencoding() + + +class UnclosingTextIOWrapper(TextIOWrapper): + # Don't close underlying buffer on destruction. + def close(self): + self.flush() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/LICENSE new file mode 100644 index 0000000..b0ae9db --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Thomas Kluyver + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py new file mode 100644 index 0000000..746b89f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__init__.py @@ -0,0 +1,31 @@ +"""Wrappers to call pyproject.toml-based build backend hooks. +""" + +from typing import TYPE_CHECKING + +from ._impl import ( + BackendUnavailable, + BuildBackendHookCaller, + HookMissing, + UnsupportedOperation, + default_subprocess_runner, + quiet_subprocess_runner, +) + +__version__ = "1.2.0" +__all__ = [ + "BackendUnavailable", + "BackendInvalid", + "HookMissing", + "UnsupportedOperation", + "default_subprocess_runner", + "quiet_subprocess_runner", + "BuildBackendHookCaller", +] + +BackendInvalid = BackendUnavailable # Deprecated alias, previously a separate exception + +if TYPE_CHECKING: + from ._impl import SubprocessRunner + + __all__ += ["SubprocessRunner"] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..0bcd1d5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc new file mode 100644 index 0000000..1752fe6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py new file mode 100644 index 0000000..d1e9d7b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_impl.py @@ -0,0 +1,410 @@ +import json +import os +import sys +import tempfile +from contextlib import contextmanager +from os.path import abspath +from os.path import join as pjoin +from subprocess import STDOUT, check_call, check_output +from typing import TYPE_CHECKING, Any, Iterator, Mapping, Optional, Sequence + +from ._in_process import _in_proc_script_path + +if TYPE_CHECKING: + from typing import Protocol + + class SubprocessRunner(Protocol): + """A protocol for the subprocess runner.""" + + def __call__( + self, + cmd: Sequence[str], + cwd: Optional[str] = None, + extra_environ: Optional[Mapping[str, str]] = None, + ) -> None: + ... + + +def write_json(obj: Mapping[str, Any], path: str, **kwargs) -> None: + with open(path, "w", encoding="utf-8") as f: + json.dump(obj, f, **kwargs) + + +def read_json(path: str) -> Mapping[str, Any]: + with open(path, encoding="utf-8") as f: + return json.load(f) + + +class BackendUnavailable(Exception): + """Will be raised if the backend cannot be imported in the hook process.""" + + def __init__( + self, + traceback: str, + message: Optional[str] = None, + backend_name: Optional[str] = None, + backend_path: Optional[Sequence[str]] = None, + ) -> None: + # Preserving arg order for the sake of API backward compatibility. + self.backend_name = backend_name + self.backend_path = backend_path + self.traceback = traceback + super().__init__(message or "Error while importing backend") + + +class HookMissing(Exception): + """Will be raised on missing hooks (if a fallback can't be used).""" + + def __init__(self, hook_name: str) -> None: + super().__init__(hook_name) + self.hook_name = hook_name + + +class UnsupportedOperation(Exception): + """May be raised by build_sdist if the backend indicates that it can't.""" + + def __init__(self, traceback: str) -> None: + self.traceback = traceback + + +def default_subprocess_runner( + cmd: Sequence[str], + cwd: Optional[str] = None, + extra_environ: Optional[Mapping[str, str]] = None, +) -> None: + """The default method of calling the wrapper subprocess. + + This uses :func:`subprocess.check_call` under the hood. + """ + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + check_call(cmd, cwd=cwd, env=env) + + +def quiet_subprocess_runner( + cmd: Sequence[str], + cwd: Optional[str] = None, + extra_environ: Optional[Mapping[str, str]] = None, +) -> None: + """Call the subprocess while suppressing output. + + This uses :func:`subprocess.check_output` under the hood. + """ + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + + check_output(cmd, cwd=cwd, env=env, stderr=STDOUT) + + +def norm_and_check(source_tree: str, requested: str) -> str: + """Normalise and check a backend path. + + Ensure that the requested backend path is specified as a relative path, + and resolves to a location under the given source tree. + + Return an absolute version of the requested path. + """ + if os.path.isabs(requested): + raise ValueError("paths must be relative") + + abs_source = os.path.abspath(source_tree) + abs_requested = os.path.normpath(os.path.join(abs_source, requested)) + # We have to use commonprefix for Python 2.7 compatibility. So we + # normalise case to avoid problems because commonprefix is a character + # based comparison :-( + norm_source = os.path.normcase(abs_source) + norm_requested = os.path.normcase(abs_requested) + if os.path.commonprefix([norm_source, norm_requested]) != norm_source: + raise ValueError("paths must be inside source tree") + + return abs_requested + + +class BuildBackendHookCaller: + """A wrapper to call the build backend hooks for a source directory.""" + + def __init__( + self, + source_dir: str, + build_backend: str, + backend_path: Optional[Sequence[str]] = None, + runner: Optional["SubprocessRunner"] = None, + python_executable: Optional[str] = None, + ) -> None: + """ + :param source_dir: The source directory to invoke the build backend for + :param build_backend: The build backend spec + :param backend_path: Additional path entries for the build backend spec + :param runner: The :ref:`subprocess runner ` to use + :param python_executable: + The Python executable used to invoke the build backend + """ + if runner is None: + runner = default_subprocess_runner + + self.source_dir = abspath(source_dir) + self.build_backend = build_backend + if backend_path: + backend_path = [norm_and_check(self.source_dir, p) for p in backend_path] + self.backend_path = backend_path + self._subprocess_runner = runner + if not python_executable: + python_executable = sys.executable + self.python_executable = python_executable + + @contextmanager + def subprocess_runner(self, runner: "SubprocessRunner") -> Iterator[None]: + """A context manager for temporarily overriding the default + :ref:`subprocess runner `. + + :param runner: The new subprocess runner to use within the context. + + .. code-block:: python + + hook_caller = BuildBackendHookCaller(...) + with hook_caller.subprocess_runner(quiet_subprocess_runner): + ... + """ + prev = self._subprocess_runner + self._subprocess_runner = runner + try: + yield + finally: + self._subprocess_runner = prev + + def _supported_features(self) -> Sequence[str]: + """Return the list of optional features supported by the backend.""" + return self._call_hook("_supported_features", {}) + + def get_requires_for_build_wheel( + self, + config_settings: Optional[Mapping[str, Any]] = None, + ) -> Sequence[str]: + """Get additional dependencies required for building a wheel. + + :param config_settings: The configuration settings for the build backend + :returns: A list of :pep:`dependency specifiers <508>`. + + .. admonition:: Fallback + + If the build backend does not defined a hook with this name, an + empty list will be returned. + """ + return self._call_hook( + "get_requires_for_build_wheel", {"config_settings": config_settings} + ) + + def prepare_metadata_for_build_wheel( + self, + metadata_directory: str, + config_settings: Optional[Mapping[str, Any]] = None, + _allow_fallback: bool = True, + ) -> str: + """Prepare a ``*.dist-info`` folder with metadata for this project. + + :param metadata_directory: The directory to write the metadata to + :param config_settings: The configuration settings for the build backend + :param _allow_fallback: + Whether to allow the fallback to building a wheel and extracting + the metadata from it. Should be passed as a keyword argument only. + + :returns: Name of the newly created subfolder within + ``metadata_directory``, containing the metadata. + + .. admonition:: Fallback + + If the build backend does not define a hook with this name and + ``_allow_fallback`` is truthy, the backend will be asked to build a + wheel via the ``build_wheel`` hook and the dist-info extracted from + that will be returned. + """ + return self._call_hook( + "prepare_metadata_for_build_wheel", + { + "metadata_directory": abspath(metadata_directory), + "config_settings": config_settings, + "_allow_fallback": _allow_fallback, + }, + ) + + def build_wheel( + self, + wheel_directory: str, + config_settings: Optional[Mapping[str, Any]] = None, + metadata_directory: Optional[str] = None, + ) -> str: + """Build a wheel from this project. + + :param wheel_directory: The directory to write the wheel to + :param config_settings: The configuration settings for the build backend + :param metadata_directory: The directory to reuse existing metadata from + :returns: + The name of the newly created wheel within ``wheel_directory``. + + .. admonition:: Interaction with fallback + + If the ``build_wheel`` hook was called in the fallback for + :meth:`prepare_metadata_for_build_wheel`, the build backend would + not be invoked. Instead, the previously built wheel will be copied + to ``wheel_directory`` and the name of that file will be returned. + """ + if metadata_directory is not None: + metadata_directory = abspath(metadata_directory) + return self._call_hook( + "build_wheel", + { + "wheel_directory": abspath(wheel_directory), + "config_settings": config_settings, + "metadata_directory": metadata_directory, + }, + ) + + def get_requires_for_build_editable( + self, + config_settings: Optional[Mapping[str, Any]] = None, + ) -> Sequence[str]: + """Get additional dependencies required for building an editable wheel. + + :param config_settings: The configuration settings for the build backend + :returns: A list of :pep:`dependency specifiers <508>`. + + .. admonition:: Fallback + + If the build backend does not defined a hook with this name, an + empty list will be returned. + """ + return self._call_hook( + "get_requires_for_build_editable", {"config_settings": config_settings} + ) + + def prepare_metadata_for_build_editable( + self, + metadata_directory: str, + config_settings: Optional[Mapping[str, Any]] = None, + _allow_fallback: bool = True, + ) -> Optional[str]: + """Prepare a ``*.dist-info`` folder with metadata for this project. + + :param metadata_directory: The directory to write the metadata to + :param config_settings: The configuration settings for the build backend + :param _allow_fallback: + Whether to allow the fallback to building a wheel and extracting + the metadata from it. Should be passed as a keyword argument only. + :returns: Name of the newly created subfolder within + ``metadata_directory``, containing the metadata. + + .. admonition:: Fallback + + If the build backend does not define a hook with this name and + ``_allow_fallback`` is truthy, the backend will be asked to build a + wheel via the ``build_editable`` hook and the dist-info + extracted from that will be returned. + """ + return self._call_hook( + "prepare_metadata_for_build_editable", + { + "metadata_directory": abspath(metadata_directory), + "config_settings": config_settings, + "_allow_fallback": _allow_fallback, + }, + ) + + def build_editable( + self, + wheel_directory: str, + config_settings: Optional[Mapping[str, Any]] = None, + metadata_directory: Optional[str] = None, + ) -> str: + """Build an editable wheel from this project. + + :param wheel_directory: The directory to write the wheel to + :param config_settings: The configuration settings for the build backend + :param metadata_directory: The directory to reuse existing metadata from + :returns: + The name of the newly created wheel within ``wheel_directory``. + + .. admonition:: Interaction with fallback + + If the ``build_editable`` hook was called in the fallback for + :meth:`prepare_metadata_for_build_editable`, the build backend + would not be invoked. Instead, the previously built wheel will be + copied to ``wheel_directory`` and the name of that file will be + returned. + """ + if metadata_directory is not None: + metadata_directory = abspath(metadata_directory) + return self._call_hook( + "build_editable", + { + "wheel_directory": abspath(wheel_directory), + "config_settings": config_settings, + "metadata_directory": metadata_directory, + }, + ) + + def get_requires_for_build_sdist( + self, + config_settings: Optional[Mapping[str, Any]] = None, + ) -> Sequence[str]: + """Get additional dependencies required for building an sdist. + + :returns: A list of :pep:`dependency specifiers <508>`. + """ + return self._call_hook( + "get_requires_for_build_sdist", {"config_settings": config_settings} + ) + + def build_sdist( + self, + sdist_directory: str, + config_settings: Optional[Mapping[str, Any]] = None, + ) -> str: + """Build an sdist from this project. + + :returns: + The name of the newly created sdist within ``wheel_directory``. + """ + return self._call_hook( + "build_sdist", + { + "sdist_directory": abspath(sdist_directory), + "config_settings": config_settings, + }, + ) + + def _call_hook(self, hook_name: str, kwargs: Mapping[str, Any]) -> Any: + extra_environ = {"_PYPROJECT_HOOKS_BUILD_BACKEND": self.build_backend} + + if self.backend_path: + backend_path = os.pathsep.join(self.backend_path) + extra_environ["_PYPROJECT_HOOKS_BACKEND_PATH"] = backend_path + + with tempfile.TemporaryDirectory() as td: + hook_input = {"kwargs": kwargs} + write_json(hook_input, pjoin(td, "input.json"), indent=2) + + # Run the hook in a subprocess + with _in_proc_script_path() as script: + python = self.python_executable + self._subprocess_runner( + [python, abspath(str(script)), hook_name, td], + cwd=self.source_dir, + extra_environ=extra_environ, + ) + + data = read_json(pjoin(td, "output.json")) + if data.get("unsupported"): + raise UnsupportedOperation(data.get("traceback", "")) + if data.get("no_backend"): + raise BackendUnavailable( + data.get("traceback", ""), + message=data.get("backend_error", ""), + backend_name=self.build_backend, + backend_path=self.backend_path, + ) + if data.get("hook_missing"): + raise HookMissing(data.get("missing_hook_name") or hook_name) + return data["return_val"] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py new file mode 100644 index 0000000..906d0ba --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py @@ -0,0 +1,21 @@ +"""This is a subpackage because the directory is on sys.path for _in_process.py + +The subpackage should stay as empty as possible to avoid shadowing modules that +the backend might import. +""" + +import importlib.resources as resources + +try: + resources.files +except AttributeError: + # Python 3.8 compatibility + def _in_proc_script_path(): + return resources.path(__package__, "_in_process.py") + +else: + + def _in_proc_script_path(): + return resources.as_file( + resources.files(__package__).joinpath("_in_process.py") + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..bc395ab Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc new file mode 100644 index 0000000..872cd86 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py new file mode 100644 index 0000000..d689bab --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py @@ -0,0 +1,389 @@ +"""This is invoked in a subprocess to call the build backend hooks. + +It expects: +- Command line args: hook_name, control_dir +- Environment variables: + _PYPROJECT_HOOKS_BUILD_BACKEND=entry.point:spec + _PYPROJECT_HOOKS_BACKEND_PATH=paths (separated with os.pathsep) +- control_dir/input.json: + - {"kwargs": {...}} + +Results: +- control_dir/output.json + - {"return_val": ...} +""" +import json +import os +import os.path +import re +import shutil +import sys +import traceback +from glob import glob +from importlib import import_module +from importlib.machinery import PathFinder +from os.path import join as pjoin + +# This file is run as a script, and `import wrappers` is not zip-safe, so we +# include write_json() and read_json() from wrappers.py. + + +def write_json(obj, path, **kwargs): + with open(path, "w", encoding="utf-8") as f: + json.dump(obj, f, **kwargs) + + +def read_json(path): + with open(path, encoding="utf-8") as f: + return json.load(f) + + +class BackendUnavailable(Exception): + """Raised if we cannot import the backend""" + + def __init__(self, message, traceback=None): + super().__init__(message) + self.message = message + self.traceback = traceback + + +class HookMissing(Exception): + """Raised if a hook is missing and we are not executing the fallback""" + + def __init__(self, hook_name=None): + super().__init__(hook_name) + self.hook_name = hook_name + + +def _build_backend(): + """Find and load the build backend""" + backend_path = os.environ.get("_PYPROJECT_HOOKS_BACKEND_PATH") + ep = os.environ["_PYPROJECT_HOOKS_BUILD_BACKEND"] + mod_path, _, obj_path = ep.partition(":") + + if backend_path: + # Ensure in-tree backend directories have the highest priority when importing. + extra_pathitems = backend_path.split(os.pathsep) + sys.meta_path.insert(0, _BackendPathFinder(extra_pathitems, mod_path)) + + try: + obj = import_module(mod_path) + except ImportError: + msg = f"Cannot import {mod_path!r}" + raise BackendUnavailable(msg, traceback.format_exc()) + + if obj_path: + for path_part in obj_path.split("."): + obj = getattr(obj, path_part) + return obj + + +class _BackendPathFinder: + """Implements the MetaPathFinder interface to locate modules in ``backend-path``. + + Since the environment provided by the frontend can contain all sorts of + MetaPathFinders, the only way to ensure the backend is loaded from the + right place is to prepend our own. + """ + + def __init__(self, backend_path, backend_module): + self.backend_path = backend_path + self.backend_module = backend_module + self.backend_parent, _, _ = backend_module.partition(".") + + def find_spec(self, fullname, _path, _target=None): + if "." in fullname: + # Rely on importlib to find nested modules based on parent's path + return None + + # Ignore other items in _path or sys.path and use backend_path instead: + spec = PathFinder.find_spec(fullname, path=self.backend_path) + if spec is None and fullname == self.backend_parent: + # According to the spec, the backend MUST be loaded from backend-path. + # Therefore, we can halt the import machinery and raise a clean error. + msg = f"Cannot find module {self.backend_module!r} in {self.backend_path!r}" + raise BackendUnavailable(msg) + + return spec + + if sys.version_info >= (3, 8): + + def find_distributions(self, context=None): + # Delayed import: Python 3.7 does not contain importlib.metadata + from importlib.metadata import DistributionFinder, MetadataPathFinder + + context = DistributionFinder.Context(path=self.backend_path) + return MetadataPathFinder.find_distributions(context=context) + + +def _supported_features(): + """Return the list of options features supported by the backend. + + Returns a list of strings. + The only possible value is 'build_editable'. + """ + backend = _build_backend() + features = [] + if hasattr(backend, "build_editable"): + features.append("build_editable") + return features + + +def get_requires_for_build_wheel(config_settings): + """Invoke the optional get_requires_for_build_wheel hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_wheel + except AttributeError: + return [] + else: + return hook(config_settings) + + +def get_requires_for_build_editable(config_settings): + """Invoke the optional get_requires_for_build_editable hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_editable + except AttributeError: + return [] + else: + return hook(config_settings) + + +def prepare_metadata_for_build_wheel( + metadata_directory, config_settings, _allow_fallback +): + """Invoke optional prepare_metadata_for_build_wheel + + Implements a fallback by building a wheel if the hook isn't defined, + unless _allow_fallback is False in which case HookMissing is raised. + """ + backend = _build_backend() + try: + hook = backend.prepare_metadata_for_build_wheel + except AttributeError: + if not _allow_fallback: + raise HookMissing() + else: + return hook(metadata_directory, config_settings) + # fallback to build_wheel outside the try block to avoid exception chaining + # which can be confusing to users and is not relevant + whl_basename = backend.build_wheel(metadata_directory, config_settings) + return _get_wheel_metadata_from_wheel( + whl_basename, metadata_directory, config_settings + ) + + +def prepare_metadata_for_build_editable( + metadata_directory, config_settings, _allow_fallback +): + """Invoke optional prepare_metadata_for_build_editable + + Implements a fallback by building an editable wheel if the hook isn't + defined, unless _allow_fallback is False in which case HookMissing is + raised. + """ + backend = _build_backend() + try: + hook = backend.prepare_metadata_for_build_editable + except AttributeError: + if not _allow_fallback: + raise HookMissing() + try: + build_hook = backend.build_editable + except AttributeError: + raise HookMissing(hook_name="build_editable") + else: + whl_basename = build_hook(metadata_directory, config_settings) + return _get_wheel_metadata_from_wheel( + whl_basename, metadata_directory, config_settings + ) + else: + return hook(metadata_directory, config_settings) + + +WHEEL_BUILT_MARKER = "PYPROJECT_HOOKS_ALREADY_BUILT_WHEEL" + + +def _dist_info_files(whl_zip): + """Identify the .dist-info folder inside a wheel ZipFile.""" + res = [] + for path in whl_zip.namelist(): + m = re.match(r"[^/\\]+-[^/\\]+\.dist-info/", path) + if m: + res.append(path) + if res: + return res + raise Exception("No .dist-info folder found in wheel") + + +def _get_wheel_metadata_from_wheel(whl_basename, metadata_directory, config_settings): + """Extract the metadata from a wheel. + + Fallback for when the build backend does not + define the 'get_wheel_metadata' hook. + """ + from zipfile import ZipFile + + with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), "wb"): + pass # Touch marker file + + whl_file = os.path.join(metadata_directory, whl_basename) + with ZipFile(whl_file) as zipf: + dist_info = _dist_info_files(zipf) + zipf.extractall(path=metadata_directory, members=dist_info) + return dist_info[0].split("/")[0] + + +def _find_already_built_wheel(metadata_directory): + """Check for a wheel already built during the get_wheel_metadata hook.""" + if not metadata_directory: + return None + metadata_parent = os.path.dirname(metadata_directory) + if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)): + return None + + whl_files = glob(os.path.join(metadata_parent, "*.whl")) + if not whl_files: + print("Found wheel built marker, but no .whl files") + return None + if len(whl_files) > 1: + print( + "Found multiple .whl files; unspecified behaviour. " + "Will call build_wheel." + ) + return None + + # Exactly one .whl file + return whl_files[0] + + +def build_wheel(wheel_directory, config_settings, metadata_directory=None): + """Invoke the mandatory build_wheel hook. + + If a wheel was already built in the + prepare_metadata_for_build_wheel fallback, this + will copy it rather than rebuilding the wheel. + """ + prebuilt_whl = _find_already_built_wheel(metadata_directory) + if prebuilt_whl: + shutil.copy2(prebuilt_whl, wheel_directory) + return os.path.basename(prebuilt_whl) + + return _build_backend().build_wheel( + wheel_directory, config_settings, metadata_directory + ) + + +def build_editable(wheel_directory, config_settings, metadata_directory=None): + """Invoke the optional build_editable hook. + + If a wheel was already built in the + prepare_metadata_for_build_editable fallback, this + will copy it rather than rebuilding the wheel. + """ + backend = _build_backend() + try: + hook = backend.build_editable + except AttributeError: + raise HookMissing() + else: + prebuilt_whl = _find_already_built_wheel(metadata_directory) + if prebuilt_whl: + shutil.copy2(prebuilt_whl, wheel_directory) + return os.path.basename(prebuilt_whl) + + return hook(wheel_directory, config_settings, metadata_directory) + + +def get_requires_for_build_sdist(config_settings): + """Invoke the optional get_requires_for_build_wheel hook + + Returns [] if the hook is not defined. + """ + backend = _build_backend() + try: + hook = backend.get_requires_for_build_sdist + except AttributeError: + return [] + else: + return hook(config_settings) + + +class _DummyException(Exception): + """Nothing should ever raise this exception""" + + +class GotUnsupportedOperation(Exception): + """For internal use when backend raises UnsupportedOperation""" + + def __init__(self, traceback): + self.traceback = traceback + + +def build_sdist(sdist_directory, config_settings): + """Invoke the mandatory build_sdist hook.""" + backend = _build_backend() + try: + return backend.build_sdist(sdist_directory, config_settings) + except getattr(backend, "UnsupportedOperation", _DummyException): + raise GotUnsupportedOperation(traceback.format_exc()) + + +HOOK_NAMES = { + "get_requires_for_build_wheel", + "prepare_metadata_for_build_wheel", + "build_wheel", + "get_requires_for_build_editable", + "prepare_metadata_for_build_editable", + "build_editable", + "get_requires_for_build_sdist", + "build_sdist", + "_supported_features", +} + + +def main(): + if len(sys.argv) < 3: + sys.exit("Needs args: hook_name, control_dir") + hook_name = sys.argv[1] + control_dir = sys.argv[2] + if hook_name not in HOOK_NAMES: + sys.exit("Unknown hook: %s" % hook_name) + + # Remove the parent directory from sys.path to avoid polluting the backend + # import namespace with this directory. + here = os.path.dirname(__file__) + if here in sys.path: + sys.path.remove(here) + + hook = globals()[hook_name] + + hook_input = read_json(pjoin(control_dir, "input.json")) + + json_out = {"unsupported": False, "return_val": None} + try: + json_out["return_val"] = hook(**hook_input["kwargs"]) + except BackendUnavailable as e: + json_out["no_backend"] = True + json_out["traceback"] = e.traceback + json_out["backend_error"] = e.message + except GotUnsupportedOperation as e: + json_out["unsupported"] = True + json_out["traceback"] = e.traceback + except HookMissing as e: + json_out["hook_missing"] = True + json_out["missing_hook_name"] = e.hook_name or hook_name + + write_json(json_out, pjoin(control_dir, "output.json"), indent=2) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/pyproject_hooks/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/requests/LICENSE new file mode 100644 index 0000000..67db858 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py new file mode 100644 index 0000000..04230fc --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__init__.py @@ -0,0 +1,179 @@ +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +Requests HTTP Library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: + + >>> import requests + >>> r = requests.get('https://www.python.org') + >>> r.status_code + 200 + >>> b'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post('https://httpbin.org/post', data=payload) + >>> print(r.text) + { + ... + "form": { + "key1": "value1", + "key2": "value2" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at . + +:copyright: (c) 2017 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. +""" + +import warnings + +from pip._vendor import urllib3 + +from .exceptions import RequestsDependencyWarning + +charset_normalizer_version = None +chardet_version = None + + +def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): + urllib3_version = urllib3_version.split(".") + assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append("0") + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) + # urllib3 >= 1.21.1 + assert major >= 1 + if major == 1: + assert minor >= 21 + + # Check charset_normalizer for compatibility. + if chardet_version: + major, minor, patch = chardet_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # chardet_version >= 3.0.2, < 6.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) + elif charset_normalizer_version: + major, minor, patch = charset_normalizer_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # charset_normalizer >= 2.0.0 < 4.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) + else: + # pip does not need or use character detection + pass + + +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split("."))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) + warnings.warn(warning, RequestsDependencyWarning) + + +# Check imported dependencies for compatibility. +try: + check_compatibility( + urllib3.__version__, chardet_version, charset_normalizer_version + ) +except (AssertionError, ValueError): + warnings.warn( + "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " + "version!".format( + urllib3.__version__, chardet_version, charset_normalizer_version + ), + RequestsDependencyWarning, + ) + +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + # Note: This logic prevents upgrading cryptography on Windows, if imported + # as part of pip. + from pip._internal.utils.compat import WINDOWS + if not WINDOWS: + raise ImportError("pip internals: don't import cryptography on Windows") + try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): + from pip._vendor.urllib3.contrib import pyopenssl + + pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + + _check_cryptography(cryptography_version) +except ImportError: + pass + +# urllib3's DependencyWarnings should be silenced. +from pip._vendor.urllib3.exceptions import DependencyWarning + +warnings.simplefilter("ignore", DependencyWarning) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +from logging import NullHandler + +from . import packages, utils +from .__version__ import ( + __author__, + __author_email__, + __build__, + __cake__, + __copyright__, + __description__, + __license__, + __title__, + __url__, + __version__, +) +from .api import delete, get, head, options, patch, post, put, request +from .exceptions import ( + ConnectionError, + ConnectTimeout, + FileModeWarning, + HTTPError, + JSONDecodeError, + ReadTimeout, + RequestException, + Timeout, + TooManyRedirects, + URLRequired, +) +from .models import PreparedRequest, Request, Response +from .sessions import Session, session +from .status_codes import codes + +logging.getLogger(__name__).addHandler(NullHandler()) + +# FileModeWarnings go off per the default. +warnings.simplefilter("default", FileModeWarning, append=True) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..36b62b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc new file mode 100644 index 0000000..b8a6e1c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc new file mode 100644 index 0000000..a214516 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc new file mode 100644 index 0000000..06ff0ec Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc new file mode 100644 index 0000000..3f48ec6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc new file mode 100644 index 0000000..4c510a2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc new file mode 100644 index 0000000..9c2d8c8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000..65e6cfe Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc new file mode 100644 index 0000000..3c57c65 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..2254d77 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc new file mode 100644 index 0000000..d773e41 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/help.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc new file mode 100644 index 0000000..8b7a401 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..016b48c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc new file mode 100644 index 0000000..43a5a5d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc new file mode 100644 index 0000000..e94adb9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc new file mode 100644 index 0000000..914474b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc new file mode 100644 index 0000000..a3361f1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..84d2c7e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py new file mode 100644 index 0000000..effdd98 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = "requests" +__description__ = "Python HTTP for Humans." +__url__ = "https://requests.readthedocs.io" +__version__ = "2.32.5" +__build__ = 0x023205 +__author__ = "Kenneth Reitz" +__author_email__ = "me@kennethreitz.org" +__license__ = "Apache-2.0" +__copyright__ = "Copyright Kenneth Reitz" +__cake__ = "\u2728 \U0001f370 \u2728" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py new file mode 100644 index 0000000..f2cf635 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/_internal_utils.py @@ -0,0 +1,50 @@ +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" +import re + +from .compat import builtin_str + +_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") +_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") +_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") +_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") + +_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) +_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) +HEADER_VALIDATORS = { + bytes: _HEADER_VALIDATORS_BYTE, + str: _HEADER_VALIDATORS_STR, +} + + +def to_native_string(string, encoding="ascii"): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode("ascii") + return True + except UnicodeEncodeError: + return False diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py new file mode 100644 index 0000000..67ccebc --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/adapters.py @@ -0,0 +1,696 @@ +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket # noqa: F401 +import typing +import warnings + +from pip._vendor.urllib3.exceptions import ClosedPoolError, ConnectTimeoutError +from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError +from pip._vendor.urllib3.exceptions import InvalidHeader as _InvalidHeader +from pip._vendor.urllib3.exceptions import ( + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, +) +from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError +from pip._vendor.urllib3.exceptions import ReadTimeoutError, ResponseError +from pip._vendor.urllib3.exceptions import SSLError as _SSLError +from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url +from pip._vendor.urllib3.util import Timeout as TimeoutSauce +from pip._vendor.urllib3.util import parse_url +from pip._vendor.urllib3.util.retry import Retry + +from .auth import _basic_auth_str +from .compat import basestring, urlparse +from .cookies import extract_cookies_to_jar +from .exceptions import ( + ConnectionError, + ConnectTimeout, + InvalidHeader, + InvalidProxyURL, + InvalidSchema, + InvalidURL, + ProxyError, + ReadTimeout, + RetryError, + SSLError, +) +from .models import Response +from .structures import CaseInsensitiveDict +from .utils import ( + DEFAULT_CA_BUNDLE_PATH, + extract_zipped_paths, + get_auth_from_url, + get_encoding_from_headers, + prepend_scheme_if_needed, + select_proxy, + urldefragauth, +) + +try: + from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + + +if typing.TYPE_CHECKING: + from .models import PreparedRequest + + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +def _urllib3_request_context( + request: "PreparedRequest", + verify: "bool | str | None", + client_cert: "typing.Tuple[str, str] | str | None", + poolmanager: "PoolManager", +) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])": + host_params = {} + pool_kwargs = {} + parsed_request_url = urlparse(request.url) + scheme = parsed_request_url.scheme.lower() + port = parsed_request_url.port + + cert_reqs = "CERT_REQUIRED" + if verify is False: + cert_reqs = "CERT_NONE" + elif isinstance(verify, str): + if not os.path.isdir(verify): + pool_kwargs["ca_certs"] = verify + else: + pool_kwargs["ca_cert_dir"] = verify + pool_kwargs["cert_reqs"] = cert_reqs + if client_cert is not None: + if isinstance(client_cert, tuple) and len(client_cert) == 2: + pool_kwargs["cert_file"] = client_cert[0] + pool_kwargs["key_file"] = client_cert[1] + else: + # According to our docs, we allow users to specify just the client + # cert path + pool_kwargs["cert_file"] = client_cert + host_params = { + "scheme": scheme, + "host": parsed_request_url.hostname, + "port": port, + } + return host_params, pool_kwargs + + +class BaseAdapter: + """The Base Transport Adapter""" + + def __init__(self): + super().__init__() + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session ` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + + __attrs__ = [ + "max_retries", + "config", + "_pool_connections", + "_pool_maxsize", + "_pool_block", + ] + + def __init__( + self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, + max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK, + ): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super().__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager( + self._pool_connections, self._pool_maxsize, block=self._pool_block + ) + + def init_poolmanager( + self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs + ): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager( + num_pools=connections, + maxsize=maxsize, + block=block, + **pool_kwargs, + ) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith("socks"): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith("https") and verify: + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" + ) + + conn.cert_reqs = "CERT_REQUIRED" + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = "CERT_NONE" + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise OSError( + f"Could not find the TLS certificate file, " + f"invalid path: {conn.cert_file}" + ) + if conn.key_file and not os.path.exists(conn.key_file): + raise OSError( + f"Could not find the TLS key file, invalid path: {conn.key_file}" + ) + + def build_response(self, req, resp): + """Builds a :class:`Response ` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter ` + + :param req: The :class:`PreparedRequest ` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, "status", None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode("utf-8") + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def build_connection_pool_key_attributes(self, request, verify, cert=None): + """Build the PoolKey attributes used by urllib3 to return a connection. + + This looks at the PreparedRequest, the user-specified verify value, + and the value of the cert parameter to determine what PoolKey values + to use to select a connection from a given urllib3 Connection Pool. + + The SSL related pool key arguments are not consistently set. As of + this writing, use the following to determine what keys may be in that + dictionary: + + * If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the + default Requests SSL Context + * If ``verify`` is ``False``, ``"ssl_context"`` will not be set but + ``"cert_reqs"`` will be set + * If ``verify`` is a string, (i.e., it is a user-specified trust bundle) + ``"ca_certs"`` will be set if the string is not a directory recognized + by :py:func:`os.path.isdir`, otherwise ``"ca_cert_dir"`` will be + set. + * If ``"cert"`` is specified, ``"cert_file"`` will always be set. If + ``"cert"`` is a tuple with a second item, ``"key_file"`` will also + be present + + To override these settings, one may subclass this class, call this + method and use the above logic to change parameters as desired. For + example, if one wishes to use a custom :py:class:`ssl.SSLContext` one + must both set ``"ssl_context"`` and based on what else they require, + alter the other keys to ensure the desired behaviour. + + :param request: + The PreparedReqest being sent over the connection. + :type request: + :class:`~requests.models.PreparedRequest` + :param verify: + Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use. + :param cert: + (optional) Any user-provided SSL certificate for client + authentication (a.k.a., mTLS). This may be a string (i.e., just + the path to a file which holds both certificate and key) or a + tuple of length 2 with the certificate file path and key file + path. + :returns: + A tuple of two dictionaries. The first is the "host parameters" + portion of the Pool Key including scheme, hostname, and port. The + second is a dictionary of SSLContext related parameters. + """ + return _urllib3_request_context(request, verify, cert, self.poolmanager) + + def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None): + """Returns a urllib3 connection for the given request and TLS settings. + This should not be called from user code, and is only exposed for use + when subclassing the :class:`HTTPAdapter `. + + :param request: + The :class:`PreparedRequest ` object to be sent + over the connection. + :param verify: + Either a boolean, in which case it controls whether we verify the + server's TLS certificate, or a string, in which case it must be a + path to a CA bundle to use. + :param proxies: + (optional) The proxies dictionary to apply to the request. + :param cert: + (optional) Any user-provided SSL certificate to be used for client + authentication (a.k.a., mTLS). + :rtype: + urllib3.ConnectionPool + """ + proxy = select_proxy(request.url, proxies) + try: + host_params, pool_kwargs = self.build_connection_pool_key_attributes( + request, + verify, + cert, + ) + except ValueError as e: + raise InvalidURL(e, request=request) + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_host( + **host_params, pool_kwargs=pool_kwargs + ) + else: + # Only scheme should be lower case + conn = self.poolmanager.connection_from_host( + **host_params, pool_kwargs=pool_kwargs + ) + + return conn + + def get_connection(self, url, proxies=None): + """DEPRECATED: Users should move to `get_connection_with_tls_context` + for all subclasses of HTTPAdapter using Requests>=2.32.2. + + Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + warnings.warn( + ( + "`get_connection` has been deprecated in favor of " + "`get_connection_with_tls_context`. Custom HTTPAdapter subclasses " + "will need to migrate for Requests>=2.32.2. Please see " + "https://github.com/psf/requests/pull/6710 for more details." + ), + DeprecationWarning, + ) + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = proxy and scheme != "https" + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith("socks") + + url = request.path_url + if url.startswith("//"): # Don't confuse urllib3 + url = f"/{url.lstrip('/')}" + + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter `. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return headers + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + try: + conn = self.get_connection_with_tls_context( + request, verify, proxies=proxies, cert=cert + ) + except LocationValueError as e: + raise InvalidURL(e, request=request) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers( + request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + ) + + chunked = not (request.body is None or "Content-Length" in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError: + raise ValueError( + f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " + f"or a single float to set both timeouts to the same value." + ) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + chunked=chunked, + ) + + except (ProtocolError, OSError) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + elif isinstance(e, _InvalidHeader): + raise InvalidHeader(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/api.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/api.py new file mode 100644 index 0000000..5960744 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/api.py @@ -0,0 +1,157 @@ +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request `. + + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response ` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req + + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("get", url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("options", url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return request("head", url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("post", url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("put", url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("patch", url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("delete", url, **kwargs) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py new file mode 100644 index 0000000..4a7ce6d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/auth.py @@ -0,0 +1,314 @@ +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import hashlib +import os +import re +import threading +import time +import warnings +from base64 import b64encode + +from ._internal_utils import to_native_string +from .compat import basestring, str, urlparse +from .cookies import extract_cookies_to_jar +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +CONTENT_TYPE_MULTI_PART = "multipart/form-data" + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(type(password)), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode("latin1") + + if isinstance(password, str): + password = password.encode("latin1") + + authstr = "Basic " + to_native_string( + b64encode(b":".join((username, password))).strip() + ) + + return authstr + + +class AuthBase: + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError("Auth hooks must be callable.") + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers["Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, "init"): + self._thread_local.init = True + self._thread_local.last_nonce = "" + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal["realm"] + nonce = self._thread_local.chal["nonce"] + qop = self._thread_local.chal.get("qop") + algorithm = self._thread_local.chal.get("algorithm") + opaque = self._thread_local.chal.get("opaque") + hash_utf8 = None + + if algorithm is None: + _algorithm = "MD5" + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == "MD5" or _algorithm == "MD5-SESS": + + def md5_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.md5(x).hexdigest() + + hash_utf8 = md5_utf8 + elif _algorithm == "SHA": + + def sha_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha1(x).hexdigest() + + hash_utf8 = sha_utf8 + elif _algorithm == "SHA-256": + + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha256(x).hexdigest() + + hash_utf8 = sha256_utf8 + elif _algorithm == "SHA-512": + + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha512(x).hexdigest() + + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731 + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += f"?{p_parsed.query}" + + A1 = f"{self.username}:{realm}:{self.password}" + A2 = f"{method}:{path}" + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = f"{self._thread_local.nonce_count:08x}" + s = str(self._thread_local.nonce_count).encode("utf-8") + s += nonce.encode("utf-8") + s += time.ctime().encode("utf-8") + s += os.urandom(8) + + cnonce = hashlib.sha1(s).hexdigest()[:16] + if _algorithm == "MD5-SESS": + HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}") + + if not qop: + respdig = KD(HA1, f"{nonce}:{HA2}") + elif qop == "auth" or "auth" in qop.split(","): + noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}" + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = ( + f'username="{self.username}", realm="{realm}", nonce="{nonce}", ' + f'uri="{path}", response="{respdig}"' + ) + if opaque: + base += f', opaque="{opaque}"' + if algorithm: + base += f', algorithm="{algorithm}"' + if entdig: + base += f', digest="{entdig}"' + if qop: + base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"' + + return f"Digest {base}" + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/psf/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get("www-authenticate", "") + + if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: + self._thread_local.num_401_calls += 1 + pat = re.compile(r"digest ", flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers["Authorization"] = self.build_digest_header( + prep.method, prep.url + ) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers["Authorization"] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook("response", self.handle_401) + r.register_hook("response", self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py new file mode 100644 index 0000000..2743144 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/certs.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" +from pip._vendor.certifi import where + +if __name__ == "__main__": + print(where()) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py new file mode 100644 index 0000000..b95a921 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/compat.py @@ -0,0 +1,90 @@ +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module previously handled import compatibility issues +between Python 2 and Python 3. It remains for backwards +compatibility until the next major version. +""" + +import sys + +# ------- +# urllib3 +# ------- +from pip._vendor.urllib3 import __version__ as urllib3_version + +# Detect which major version of urllib3 is being used. +try: + is_urllib3_1 = int(urllib3_version.split(".")[0]) == 1 +except (TypeError, AttributeError): + # If we can't discern a version, prefer old functionality. + is_urllib3_1 = True + +# ------------------- +# Character Detection +# ------------------- + + +def _resolve_char_detection(): + """Find supported character detection libraries.""" + chardet = None + return chardet + + +chardet = _resolve_char_detection() + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = _ver[0] == 2 + +#: Python 3.x? +is_py3 = _ver[0] == 3 + +# Note: We've patched out simplejson support in pip because it prevents +# upgrading simplejson on Windows. +import json +from json import JSONDecodeError + +# Keep OrderedDict for backwards compatibility. +from collections import OrderedDict +from collections.abc import Callable, Mapping, MutableMapping +from http import cookiejar as cookielib +from http.cookies import Morsel +from io import StringIO + +# -------------- +# Legacy Imports +# -------------- +from urllib.parse import ( + quote, + quote_plus, + unquote, + unquote_plus, + urldefrag, + urlencode, + urljoin, + urlparse, + urlsplit, + urlunparse, +) +from urllib.request import ( + getproxies, + getproxies_environment, + parse_http_list, + proxy_bypass, + proxy_bypass_environment, +) + +builtin_str = str +str = str +bytes = bytes +basestring = (str, bytes) +numeric_types = (int, float) +integer_types = (int,) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py new file mode 100644 index 0000000..f69d0cd --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/cookies.py @@ -0,0 +1,561 @@ +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `http.cookiejar.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import calendar +import copy +import time + +from ._internal_utils import to_native_string +from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest: + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `http.cookiejar.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get("Host"): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers["Host"], encoding="utf-8") + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse( + [ + parsed.scheme, + host, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment, + ] + ) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookiejar has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError( + "Cookie headers should be added with add_unredirected_header()" + ) + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse: + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `http.cookiejar` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookiejar` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, "_original_response") and response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get("Cookie") + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name( + self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + ) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if (domain is None or cookie.domain == domain) and ( + path is None or cookie.path == path + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super().__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if ( + hasattr(cookie.value, "startswith") + and cookie.value.startswith('"') + and cookie.value.endswith('"') + ): + cookie.value = cookie.value.replace('\\"', "") + return super().set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super().update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: + # if there are multiple cookies that meet passed in criteria + raise CookieConflictError( + f"There are multiple cookies with name, {name!r}" + ) + # we will eventually return this as long as no cookie conflict + toReturn = cookie.value + + if toReturn: + return toReturn + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop("_cookies_lock") + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if "_cookies_lock" not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, "copy"): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = { + "version": 0, + "name": name, + "value": value, + "port": None, + "domain": "", + "path": "/", + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + + badargs = set(kwargs) - set(result) + if badargs: + raise TypeError( + f"create_cookie() got unexpected keyword arguments: {list(badargs)}" + ) + + result.update(kwargs) + result["port_specified"] = bool(result["port"]) + result["domain_specified"] = bool(result["domain"]) + result["domain_initial_dot"] = result["domain"].startswith(".") + result["path_specified"] = bool(result["path"]) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel["max-age"]: + try: + expires = int(time.time() + int(morsel["max-age"])) + except ValueError: + raise TypeError(f"max-age: {morsel['max-age']} must be integer") + elif morsel["expires"]: + time_template = "%a, %d-%b-%Y %H:%M:%S GMT" + expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) + return create_cookie( + comment=morsel["comment"], + comment_url=bool(morsel["comment"]), + discard=False, + domain=morsel["domain"], + expires=expires, + name=morsel.key, + path=morsel["path"], + port=None, + rest={"HttpOnly": morsel["httponly"]}, + rfc2109=False, + secure=bool(morsel["secure"]), + value=morsel.value, + version=morsel["version"] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + :rtype: CookieJar + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + :rtype: CookieJar + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError("You can only merge into CookieJar") + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py new file mode 100644 index 0000000..7f3660f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/exceptions.py @@ -0,0 +1,151 @@ +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError + +from .compat import JSONDecodeError as CompatJSONDecodeError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop("response", None) + self.response = response + self.request = kwargs.pop("request", None) + if response is not None and not self.request and hasattr(response, "request"): + self.request = self.response.request + super().__init__(*args, **kwargs) + + +class InvalidJSONError(RequestException): + """A JSON error occurred.""" + + +class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): + """Couldn't decode the text into json""" + + def __init__(self, *args, **kwargs): + """ + Construct the JSONDecodeError instance first with all + args. Then use it's args to construct the IOError so that + the json specific args aren't used as IOError specific args + and the error message from JSONDecodeError is preserved. + """ + CompatJSONDecodeError.__init__(self, *args) + InvalidJSONError.__init__(self, *self.args, **kwargs) + + def __reduce__(self): + """ + The __reduce__ method called when pickling the object must + be the one from the JSONDecodeError (be it json/simplejson) + as it expects all the arguments for instantiation, not just + one like the IOError, and the MRO would by default call the + __reduce__ method from the IOError due to the inheritance order. + """ + return CompatJSONDecodeError.__reduce__(self) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL scheme (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """The URL scheme provided is either invalid or unsupported.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content.""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed.""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body.""" + + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/help.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/help.py new file mode 100644 index 0000000..ddbb615 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/help.py @@ -0,0 +1,127 @@ +"""Module containing bug report helper(s).""" + +import json +import platform +import ssl +import sys + +from pip._vendor import idna +from pip._vendor import urllib3 + +from . import __version__ as requests_version + +charset_normalizer = None +chardet = None + +try: + from pip._vendor.urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import cryptography + import OpenSSL + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 3.10.3 it will return + {'name': 'CPython', 'version': '3.10.3'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == "CPython": + implementation_version = platform.python_version() + elif implementation == "PyPy": + implementation_version = "{}.{}.{}".format( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro, + ) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join( + [implementation_version, sys.pypy_version_info.releaselevel] + ) + elif implementation == "Jython": + implementation_version = platform.python_version() # Complete Guess + elif implementation == "IronPython": + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = "Unknown" + + return {"name": implementation, "version": implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + "system": platform.system(), + "release": platform.release(), + } + except OSError: + platform_info = { + "system": "Unknown", + "release": "Unknown", + } + + implementation_info = _implementation() + urllib3_info = {"version": urllib3.__version__} + charset_normalizer_info = {"version": None} + chardet_info = {"version": None} + if charset_normalizer: + charset_normalizer_info = {"version": charset_normalizer.__version__} + if chardet: + chardet_info = {"version": chardet.__version__} + + pyopenssl_info = { + "version": None, + "openssl_version": "", + } + if OpenSSL: + pyopenssl_info = { + "version": OpenSSL.__version__, + "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}", + } + cryptography_info = { + "version": getattr(cryptography, "__version__", ""), + } + idna_info = { + "version": getattr(idna, "__version__", ""), + } + + system_ssl = ssl.OPENSSL_VERSION_NUMBER + system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} + + return { + "platform": platform_info, + "implementation": implementation_info, + "system_ssl": system_ssl_info, + "using_pyopenssl": pyopenssl is not None, + "using_charset_normalizer": chardet is None, + "pyOpenSSL": pyopenssl_info, + "urllib3": urllib3_info, + "chardet": chardet_info, + "charset_normalizer": charset_normalizer_info, + "cryptography": cryptography_info, + "idna": idna_info, + "requests": { + "version": requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py new file mode 100644 index 0000000..d181ba2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/hooks.py @@ -0,0 +1,33 @@ +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ["response"] + + +def default_hooks(): + return {event: [] for event in HOOKS} + + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or {} + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, "__call__"): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/models.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/models.py new file mode 100644 index 0000000..22de95c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/models.py @@ -0,0 +1,1039 @@ +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +import encodings.idna # noqa: F401 +from io import UnsupportedOperation + +from pip._vendor.urllib3.exceptions import ( + DecodeError, + LocationParseError, + ProtocolError, + ReadTimeoutError, + SSLError, +) +from pip._vendor.urllib3.fields import RequestField +from pip._vendor.urllib3.filepost import encode_multipart_formdata +from pip._vendor.urllib3.util import parse_url + +from ._internal_utils import to_native_string, unicode_is_ascii +from .auth import HTTPBasicAuth +from .compat import ( + Callable, + JSONDecodeError, + Mapping, + basestring, + builtin_str, + chardet, + cookielib, +) +from .compat import json as complexjson +from .compat import urlencode, urlsplit, urlunparse +from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header +from .exceptions import ( + ChunkedEncodingError, + ConnectionError, + ContentDecodingError, + HTTPError, + InvalidJSONError, + InvalidURL, +) +from .exceptions import JSONDecodeError as RequestsJSONDecodeError +from .exceptions import MissingSchema +from .exceptions import SSLError as RequestsSSLError +from .exceptions import StreamConsumedError +from .hooks import default_hooks +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( + check_header_validity, + get_auth_from_url, + guess_filename, + guess_json_utf, + iter_slices, + parse_header_links, + requote_uri, + stream_decode_response_unicode, + super_len, + to_key_val_list, +) + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin: + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = "/" + + url.append(path) + + query = p.query + if query: + url.append("?") + url.append(query) + + return "".join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, "read"): + return data + elif hasattr(data, "__iter__"): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): + vs = [vs] + for v in vs: + if v is not None: + result.append( + ( + k.encode("utf-8") if isinstance(k, str) else k, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if not files: + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, "__iter__"): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + ( + field.decode("utf-8") + if isinstance(field, bytes) + else field, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + + for k, v in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, "read"): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin: + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError(f'Unsupported event specified, with event name "{event}"') + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, "__iter__"): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request ` object. + + Used to prepare a :class:`PreparedRequest `, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> req.prepare() + + """ + + def __init__( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for k, v in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return f"" + + def prepare(self): + """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest ` object, + containing the exact bytes that will be sent to the server. + + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> r = req.prepare() + >>> r + + + >>> s = requests.Session() + >>> s.send(r) + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return f"" + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + from pip._vendor import idna + + try: + host = idna.encode(host, uts46=True).decode("utf-8") + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/psf/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode("utf8") + else: + url = str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ":" in url and not url.lower().startswith("http"): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + raise MissingSchema( + f"Invalid URL {url!r}: No scheme supplied. " + f"Perhaps you meant https://{url}?" + ) + + if not host: + raise InvalidURL(f"Invalid URL {url!r}: No host supplied") + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL("URL has an invalid label.") + elif host.startswith(("*", ".")): + raise InvalidURL("URL has an invalid label.") + + # Carefully reconstruct the network location + netloc = auth or "" + if netloc: + netloc += "@" + netloc += host + if port: + netloc += f":{port}" + + # Bare domains aren't valid URLs. + if not path: + path = "/" + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = f"{query}&{enc_params}" + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = "application/json" + + try: + body = complexjson.dumps(json, allow_nan=False) + except ValueError as ve: + raise InvalidJSONError(ve, request=self) + + if not isinstance(body, bytes): + body = body.encode("utf-8") + + is_stream = all( + [ + hasattr(data, "__iter__"), + not isinstance(data, (basestring, list, tuple, Mapping)), + ] + ) + + if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + body = data + + if getattr(body, "tell", None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except OSError: + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError( + "Streamed bodies and files are mutually exclusive." + ) + + if length: + self.headers["Content-Length"] = builtin_str(length) + else: + self.headers["Transfer-Encoding"] = "chunked" + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, "read"): + content_type = None + else: + content_type = "application/x-www-form-urlencoded" + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ("content-type" not in self.headers): + self.headers["Content-Type"] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers["Content-Length"] = builtin_str(length) + elif ( + self.method not in ("GET", "HEAD") + and self.headers.get("Content-Length") is None + ): + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers["Content-Length"] = "0" + + def prepare_auth(self, auth, url=""): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers["Cookie"] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response: + """The :class:`Response ` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + "_content", + "status_code", + "headers", + "url", + "history", + "encoding", + "reason", + "cookies", + "elapsed", + "request", + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + #: This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, "_content_consumed", True) + setattr(self, "raw", None) + + def __repr__(self): + return f"" + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return "location" in self.headers and self.status_code in REDIRECT_STATI + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return "location" in self.headers and self.status_code in ( + codes.moved_permanently, + codes.permanent_redirect, + ) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" + if chardet is not None: + return chardet.detect(self.content)["encoding"] + else: + # If no character detection library is available, we'll fall back + # to a standard Python utf-8 str. + return "utf-8" + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, "stream"): + try: + yield from self.raw.stream(chunk_size, decode_content=True) + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + except SSLError as e: + raise RequestsSSLError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError( + f"chunk_size must be an int, it is instead a {type(chunk_size)}." + ) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines( + self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None + ): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content( + chunk_size=chunk_size, decode_unicode=decode_unicode + ): + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + yield from lines + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError("The content for this response was already consumed") + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``charset_normalizer`` or ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return "" + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors="replace") + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors="replace") + + return content + + def json(self, **kwargs): + r"""Decodes the JSON response body (if any) as a Python object. + + This may return a dictionary, list, etc. depending on what is in the response. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises requests.exceptions.JSONDecodeError: If the response body does not + contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using charset_normalizer to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads(self.content.decode(encoding), **kwargs) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + except JSONDecodeError as e: + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + try: + return complexjson.loads(self.text, **kwargs) + except JSONDecodeError as e: + # Catch JSON-related errors and raise as requests.JSONDecodeError + # This aliases json.JSONDecodeError and simplejson.JSONDecodeError + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get("link") + + resolved_links = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get("rel") or link.get("url") + resolved_links[key] = link + + return resolved_links + + def raise_for_status(self): + """Raises :class:`HTTPError`, if one occurred.""" + + http_error_msg = "" + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode("utf-8") + except UnicodeDecodeError: + reason = self.reason.decode("iso-8859-1") + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = ( + f"{self.status_code} Client Error: {reason} for url: {self.url}" + ) + + elif 500 <= self.status_code < 600: + http_error_msg = ( + f"{self.status_code} Server Error: {reason} for url: {self.url}" + ) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, "release_conn", None) + if release_conn is not None: + release_conn() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py new file mode 100644 index 0000000..200c382 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/packages.py @@ -0,0 +1,25 @@ +import sys + +from .compat import chardet + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ("urllib3", "idna"): + vendored_package = "pip._vendor." + package + locals()[package] = __import__(vendored_package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == vendored_package or mod.startswith(vendored_package + '.'): + unprefixed_mod = mod[len("pip._vendor."):] + sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod] + +if chardet is not None: + target = chardet.__name__ + for mod in list(sys.modules): + if mod == target or mod.startswith(f"{target}."): + imported_mod = sys.modules[mod] + sys.modules[f"requests.packages.{mod}"] = imported_mod + mod = mod.replace(target, "chardet") + sys.modules[f"requests.packages.{mod}"] = imported_mod diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py new file mode 100644 index 0000000..731550d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/sessions.py @@ -0,0 +1,831 @@ +""" +requests.sessions +~~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from collections import OrderedDict +from datetime import timedelta + +from ._internal_utils import to_native_string +from .adapters import HTTPAdapter +from .auth import _basic_auth_str +from .compat import Mapping, cookielib, urljoin, urlparse +from .cookies import ( + RequestsCookieJar, + cookiejar_from_dict, + extract_cookies_to_jar, + merge_cookies, +) +from .exceptions import ( + ChunkedEncodingError, + ContentDecodingError, + InvalidSchema, + TooManyRedirects, +) +from .hooks import default_hooks, dispatch_hook + +# formerly defined here, reexposed here for backward compatibility +from .models import ( # noqa: F401 + DEFAULT_REDIRECT_LIMIT, + REDIRECT_STATI, + PreparedRequest, + Request, +) +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( # noqa: F401 + DEFAULT_PORTS, + default_headers, + get_auth_from_url, + get_environ_proxies, + get_netrc_auth, + requote_uri, + resolve_proxies, + rewind_body, + should_bypass_proxies, + to_key_val_list, +) + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == "win32": + preferred_clock = time.perf_counter +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get("response") == []: + return request_hooks + + if request_hooks is None or request_hooks.get("response") == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin: + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers["location"] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + location = location.encode("latin1") + return to_native_string(location, "utf8") + return None + + def should_strip_auth(self, old_url, new_url): + """Decide whether Authorization header should be removed when redirecting""" + old_parsed = urlparse(old_url) + new_parsed = urlparse(new_url) + if old_parsed.hostname != new_parsed.hostname: + return True + # Special case: allow http -> https redirect when using the standard + # ports. This isn't specified by RFC 7235, but is kept to avoid + # breaking backwards compatibility with older versions of requests + # that allowed any redirects on the same host. + if ( + old_parsed.scheme == "http" + and old_parsed.port in (80, None) + and new_parsed.scheme == "https" + and new_parsed.port in (443, None) + ): + return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if ( + not changed_scheme + and old_parsed.port in default_port + and new_parsed.port in default_port + ): + return False + + # Standard case: root URI must match + return changed_port or changed_scheme + + def resolve_redirects( + self, + resp, + req, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, + yield_requests=False, + **adapter_kwargs, + ): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects( + f"Exceeded {self.max_redirects} redirects.", response=resp + ) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith("//"): + parsed_rurl = urlparse(resp.url) + url = ":".join([to_native_string(parsed_rurl.scheme), url]) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == "" and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/psf/requests/issues/1084 + if resp.status_code not in ( + codes.temporary_redirect, + codes.permanent_redirect, + ): + # https://github.com/psf/requests/issues/3490 + purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + headers.pop("Cookie", None) + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = prepared_request._body_position is not None and ( + "Content-Length" in headers or "Transfer-Encoding" in headers + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs, + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if "Authorization" in headers and self.should_strip_auth( + response.request.url, url + ): + # If we get redirected to a new host, we should strip out any + # authentication headers. + del headers["Authorization"] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + headers = prepared_request.headers + scheme = urlparse(prepared_request.url).scheme + new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) + + if "Proxy-Authorization" in headers: + del headers["Proxy-Authorization"] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + # urllib3 handles proxy authorization for us in the standard adapter. + # Avoid appending this to TLS tunneled requests where it may be leaked. + if not scheme.startswith("https") and username and password: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != "HEAD": + method = "GET" + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == "POST": + method = "GET" + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('https://httpbin.org/get') + + + Or as a context manager:: + + >>> with requests.Session() as s: + ... s.get('https://httpbin.org/get') + + """ + + __attrs__ = [ + "headers", + "cookies", + "auth", + "proxies", + "hooks", + "params", + "verify", + "cert", + "adapters", + "stream", + "trust_env", + "max_redirects", + ] + + def __init__(self): + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request ` sent from this + #: :class:`Session `. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request `. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request `. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request `. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar `, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount("https://", HTTPAdapter()) + self.mount("http://", HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest ` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request ` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies + ) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting( + request.headers, self.headers, dict_class=CaseInsensitiveDict + ), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + """Constructs a :class:`Request `, prepares it and sends it. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param hooks: (optional) Dictionary mapping hook name to one event or + list of events, event must be callable. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + "timeout": timeout, + "allow_redirects": allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("GET", url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("OPTIONS", url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return self.request("HEAD", url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("POST", url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PUT", url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PATCH", url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("DELETE", url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault("stream", self.stream) + kwargs.setdefault("verify", self.verify) + kwargs.setdefault("cert", self.cert) + if "proxies" not in kwargs: + kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError("You can only send PreparedRequests.") + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop("allow_redirects", True) + stream = kwargs.get("stream") + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook("response", hooks, r, **kwargs) + + # Persist cookies + if r.history: + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Resolve redirects if allowed. + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next( + self.resolve_redirects(r, request, yield_requests=True, **kwargs) + ) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get("no_proxy") if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for k, v in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration + # and be compatible with cURL. + if verify is True or verify is None: + verify = ( + os.environ.get("REQUESTS_CA_BUNDLE") + or os.environ.get("CURL_CA_BUNDLE") + or verify + ) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for prefix, adapter in self.adapters.items(): + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema(f"No connection adapters were found for {url!r}") + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = {attr: getattr(self, attr, None) for attr in self.__attrs__} + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + .. deprecated:: 1.0.0 + + This method has been deprecated since version 1.0.0 and is only kept for + backwards compatibility. New code should use :class:`~requests.sessions.Session` + to create a session. This may be removed at a future date. + + :rtype: Session + """ + return Session() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py new file mode 100644 index 0000000..c7945a2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/status_codes.py @@ -0,0 +1,128 @@ +r""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + # Informational. + 100: ("continue",), + 101: ("switching_protocols",), + 102: ("processing", "early-hints"), + 103: ("checkpoint",), + 122: ("uri_too_long", "request_uri_too_long"), + 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), + 201: ("created",), + 202: ("accepted",), + 203: ("non_authoritative_info", "non_authoritative_information"), + 204: ("no_content",), + 205: ("reset_content", "reset"), + 206: ("partial_content", "partial"), + 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"), + 208: ("already_reported",), + 226: ("im_used",), + # Redirection. + 300: ("multiple_choices",), + 301: ("moved_permanently", "moved", "\\o-"), + 302: ("found",), + 303: ("see_other", "other"), + 304: ("not_modified",), + 305: ("use_proxy",), + 306: ("switch_proxy",), + 307: ("temporary_redirect", "temporary_moved", "temporary"), + 308: ( + "permanent_redirect", + "resume_incomplete", + "resume", + ), # "resume" and "resume_incomplete" to be removed in 3.0 + # Client Error. + 400: ("bad_request", "bad"), + 401: ("unauthorized",), + 402: ("payment_required", "payment"), + 403: ("forbidden",), + 404: ("not_found", "-o-"), + 405: ("method_not_allowed", "not_allowed"), + 406: ("not_acceptable",), + 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 408: ("request_timeout", "timeout"), + 409: ("conflict",), + 410: ("gone",), + 411: ("length_required",), + 412: ("precondition_failed", "precondition"), + 413: ("request_entity_too_large", "content_too_large"), + 414: ("request_uri_too_large", "uri_too_long"), + 415: ("unsupported_media_type", "unsupported_media", "media_type"), + 416: ( + "requested_range_not_satisfiable", + "requested_range", + "range_not_satisfiable", + ), + 417: ("expectation_failed",), + 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), + 421: ("misdirected_request",), + 422: ("unprocessable_entity", "unprocessable", "unprocessable_content"), + 423: ("locked",), + 424: ("failed_dependency", "dependency"), + 425: ("unordered_collection", "unordered", "too_early"), + 426: ("upgrade_required", "upgrade"), + 428: ("precondition_required", "precondition"), + 429: ("too_many_requests", "too_many"), + 431: ("header_fields_too_large", "fields_too_large"), + 444: ("no_response", "none"), + 449: ("retry_with", "retry"), + 450: ("blocked_by_windows_parental_controls", "parental_controls"), + 451: ("unavailable_for_legal_reasons", "legal_reasons"), + 499: ("client_closed_request",), + # Server Error. + 500: ("internal_server_error", "server_error", "/o\\", "✗"), + 501: ("not_implemented",), + 502: ("bad_gateway",), + 503: ("service_unavailable", "unavailable"), + 504: ("gateway_timeout",), + 505: ("http_version_not_supported", "http_version"), + 506: ("variant_also_negotiates",), + 507: ("insufficient_storage",), + 509: ("bandwidth_limit_exceeded", "bandwidth"), + 510: ("not_extended",), + 511: ("network_authentication_required", "network_auth", "network_authentication"), +} + +codes = LookupDict(name="status_codes") + + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(("\\", "/")): + setattr(codes, title.upper(), code) + + def doc(code): + names = ", ".join(f"``{n}``" for n in _codes[code]) + return "* %d: %s" % (code, names) + + global __doc__ + __doc__ = ( + __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes)) + if __doc__ is not None + else None + ) + + +_init() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py new file mode 100644 index 0000000..188e13e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/structures.py @@ -0,0 +1,99 @@ +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from collections import OrderedDict + +from .compat import Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super().__init__() + + def __repr__(self): + return f"" + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py b/venv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py new file mode 100644 index 0000000..e8ea5ad --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/requests/utils.py @@ -0,0 +1,1086 @@ +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile +from collections import OrderedDict + +from pip._vendor.urllib3.util import make_headers, parse_url + +from . import certs +from .__version__ import __version__ + +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import ( # noqa: F401 + _HEADER_VALIDATORS_BYTE, + _HEADER_VALIDATORS_STR, + HEADER_VALIDATORS, + to_native_string, +) +from .compat import ( + Mapping, + basestring, + bytes, + getproxies, + getproxies_environment, + integer_types, + is_urllib3_1, +) +from .compat import parse_http_list as _parse_list_header +from .compat import ( + proxy_bypass, + proxy_bypass_environment, + quote, + str, + unquote, + urlparse, + urlunparse, +) +from .cookies import cookiejar_from_dict +from .exceptions import ( + FileModeWarning, + InvalidHeader, + InvalidURL, + UnrewindableBodyError, +) +from .structures import CaseInsensitiveDict + +NETRC_FILES = (".netrc", "_netrc") + +DEFAULT_CA_BUNDLE_PATH = certs.where() + +DEFAULT_PORTS = {"http": 80, "https": 443} + +# Ensure that ', ' is used to preserve previous delimiter behavior. +DEFAULT_ACCEPT_ENCODING = ", ".join( + re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) +) + + +if sys.platform == "win32": + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", + ) + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] + except (OSError, ValueError): + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(";") + # filter out empty strings to avoid re.match return true in the following code. + proxyOverride = filter(None, proxyOverride) + # now check if we match one of the registry values. + for test in proxyOverride: + if test == "": + if "." not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, "items"): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if not is_urllib3_1 and isinstance(o, str): + # urllib3 2.x+ treats all strings as utf-8 instead + # of latin-1 (iso-8859-1) like http.client. + o = o.encode("utf-8") + + if hasattr(o, "__len__"): + total_length = len(o) + + elif hasattr(o, "len"): + total_length = o.len + + elif hasattr(o, "fileno"): + try: + fileno = o.fileno() + except (io.UnsupportedOperation, AttributeError): + # AttributeError is a surprising exception, seeing as how we've just checked + # that `hasattr(o, 'fileno')`. It happens for objects obtained via + # `Tarfile.extractfile()`, per issue 5229. + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if "b" not in o.mode: + warnings.warn( + ( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode." + ), + FileModeWarning, + ) + + if hasattr(o, "tell"): + try: + current_position = o.tell() + except OSError: + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, "seek") and total_length is None: + # StringIO and BytesIO have seek but no usable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except OSError: + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + netrc_file = os.environ.get("NETRC") + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = (f"~/{f}" for f in NETRC_FILES) + + try: + from netrc import NetrcParseError, netrc + + netrc_path = None + + for f in netrc_locations: + loc = os.path.expanduser(f) + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + host = ri.hostname + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = 0 if _netrc[0] else 1 + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, OSError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # App Engine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, "name", None) + if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + if not prefix: + # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), + # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users + break + member = "/".join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, member.split("/")[-1]) + if not os.path.exists(extracted_path): + # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition + with atomic_open(extracted_path) as file_handler: + file_handler.write(zip_file.read(member)) + return extracted_path + + +@contextlib.contextmanager +def atomic_open(filename): + """Write a file to the disk in an atomic fashion""" + tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) + try: + with os.fdopen(tmp_descriptor, "wb") as tmp_handler: + yield tmp_handler + os.replace(tmp_name, filename) + except BaseException: + os.remove(tmp_name) + raise + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if "=" not in item: + result[item] = None + continue + name, value = item.split("=", 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {cookie.name: cookie.value for cookie in cj} + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn( + ( + "In requests 3.0, get_encodings_from_content will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + charset_re = re.compile(r']', flags=re.I) + pragma_re = re.compile(r']', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return ( + charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content) + ) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(";") + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1 :].strip(items_to_strip) + params_dict[key.lower()] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get("content-type") + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if "charset" in params: + return params["charset"].strip("'\"") + + if "text" in content_type: + return "ISO-8859-1" + + if "application/json" in content_type: + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + return "utf-8" + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes an iterator.""" + + if r.encoding is None: + yield from iterator + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace") + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b"", final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos : pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn( + ( + "In requests 3.0, get_unicode_from_response will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors="replace") + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~" +) + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split("%") + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL(f"Invalid percent-escape sequence: '{h}'") + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = f"%{parts[i]}" + else: + parts[i] = f"%{parts[i]}" + return "".join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] + netaddr, bits = net.split("/") + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack(">I", bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except OSError: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count("/") == 1: + try: + mask = int(string_network.split("/")[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split("/")[0]) + except OSError: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + def get_proxy(key): + return os.environ.get(key) or os.environ.get(key.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy("no_proxy") + parsed = urlparse(url) + + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += f":{parsed.port}" + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + with set_environ("no_proxy", no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get("all")) + + proxy_keys = [ + urlparts.scheme + "://" + urlparts.hostname, + urlparts.scheme, + "all://" + urlparts.hostname, + "all", + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def resolve_proxies(request, proxies, trust_env=True): + """This method takes proxy information from a request and configuration + input to resolve a mapping of target proxies. This will consider settings + such as NO_PROXY to strip proxy configurations. + + :param request: Request or PreparedRequest + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + :param trust_env: Boolean declaring whether to trust environment configs + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + url = request.url + scheme = urlparse(url).scheme + no_proxy = proxies.get("no_proxy") + new_proxies = proxies.copy() + + if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy): + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get("all")) + + if proxy: + new_proxies.setdefault(scheme, proxy) + return new_proxies + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return f"{name}/{__version__}" + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict( + { + "User-Agent": default_user_agent(), + "Accept-Encoding": DEFAULT_ACCEPT_ENCODING, + "Accept": "*/*", + "Connection": "keep-alive", + } + ) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = " '\"" + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + + link = {"url": url.strip("<> '\"")} + + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = "\x00".encode("ascii") # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + parsed = parse_url(url) + scheme, auth, host, port, path, query, fragment = parsed + + # A defect in urlparse determines that there isn't a netloc present in some + # urls. We previously assumed parsing was overly cautious, and swapped the + # netloc and path. Due to a lack of tests on the original defect, this is + # maintained with parse_url for backwards compatibility. + netloc = parsed.netloc + if not netloc: + netloc, path = path, netloc + + if auth: + # parse_url doesn't provide the netloc with auth + # so we'll add it ourselves. + netloc = "@".join([auth, netloc]) + if scheme is None: + scheme = new_scheme + if path is None: + path = "" + + return urlunparse((scheme, netloc, path, "", query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ("", "") + + return auth + + +def check_header_validity(header): + """Verifies that header parts don't contain leading whitespace + reserved characters, or return characters. + + :param header: tuple, in the format (name, value). + """ + name, value = header + _validate_header_part(header, name, 0) + _validate_header_part(header, value, 1) + + +def _validate_header_part(header, header_part, header_validator_index): + if isinstance(header_part, str): + validator = _HEADER_VALIDATORS_STR[header_validator_index] + elif isinstance(header_part, bytes): + validator = _HEADER_VALIDATORS_BYTE[header_validator_index] + else: + raise InvalidHeader( + f"Header part ({header_part!r}) from {header} " + f"must be of type str or bytes, not {type(header_part)}" + ) + + if not validator.match(header_part): + header_kind = "name" if header_validator_index == 0 else "value" + raise InvalidHeader( + f"Invalid leading whitespace, reserved character(s), or return " + f"character(s) in header {header_kind}: {header_part!r}" + ) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit("@", 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, "")) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, "seek", None) + if body_seek is not None and isinstance( + prepared_request._body_position, integer_types + ): + try: + body_seek(prepared_request._body_position) + except OSError: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect." + ) + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/LICENSE new file mode 100644 index 0000000..b907776 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Tzu-ping Chung + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py new file mode 100644 index 0000000..4c7f815 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__init__.py @@ -0,0 +1,27 @@ +__all__ = [ + "AbstractProvider", + "AbstractResolver", + "BaseReporter", + "InconsistentCandidate", + "RequirementsConflicted", + "ResolutionError", + "ResolutionImpossible", + "ResolutionTooDeep", + "Resolver", + "__version__", +] + +__version__ = "1.2.1" + + +from .providers import AbstractProvider +from .reporters import BaseReporter +from .resolvers import ( + AbstractResolver, + InconsistentCandidate, + RequirementsConflicted, + ResolutionError, + ResolutionImpossible, + ResolutionTooDeep, + Resolver, +) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c034e50 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc new file mode 100644 index 0000000..ad281a9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc new file mode 100644 index 0000000..dc4b216 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc new file mode 100644 index 0000000..2a124fe Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py new file mode 100644 index 0000000..524e3d8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/providers.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Generic, + Iterable, + Iterator, + Mapping, + Sequence, +) + +from .structs import CT, KT, RT, Matches, RequirementInformation + +if TYPE_CHECKING: + from typing import Any, Protocol + + class Preference(Protocol): + def __lt__(self, __other: Any) -> bool: ... + + +class AbstractProvider(Generic[RT, CT, KT]): + """Delegate class to provide the required interface for the resolver.""" + + def identify(self, requirement_or_candidate: RT | CT) -> KT: + """Given a requirement or candidate, return an identifier for it. + + This is used to identify, e.g. whether two requirements + should have their specifier parts merged or a candidate matches a + requirement via ``find_matches()``. + """ + raise NotImplementedError + + def get_preference( + self, + identifier: KT, + resolutions: Mapping[KT, CT], + candidates: Mapping[KT, Iterator[CT]], + information: Mapping[KT, Iterator[RequirementInformation[RT, CT]]], + backtrack_causes: Sequence[RequirementInformation[RT, CT]], + ) -> Preference: + """Produce a sort key for given requirement based on preference. + + As this is a sort key it will be called O(n) times per backtrack + step, where n is the number of `identifier`s, if you have a check + which is expensive in some sense. E.g. It needs to make O(n) checks + per call or takes significant wall clock time, consider using + `narrow_requirement_selection` to filter the `identifier`s, which + is applied before this sort key is called. + + The preference is defined as "I think this requirement should be + resolved first". The lower the return value is, the more preferred + this group of arguments is. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the requirement being considered. + :param resolutions: Mapping of candidates currently pinned by the + resolver. Each key is an identifier, and the value is a candidate. + The candidate may conflict with requirements from ``information``. + :param candidates: Mapping of each dependency's possible candidates. + Each value is an iterator of candidates. + :param information: Mapping of requirement information of each package. + Each value is an iterator of *requirement information*. + :param backtrack_causes: Sequence of *requirement information* that are + the requirements that caused the resolver to most recently + backtrack. + + A *requirement information* instance is a named tuple with two members: + + * ``requirement`` specifies a requirement contributing to the current + list of candidates. + * ``parent`` specifies the candidate that provides (depended on) the + requirement, or ``None`` to indicate a root requirement. + + The preference could depend on various issues, including (not + necessarily in this order): + + * Is this package pinned in the current resolution result? + * How relaxed is the requirement? Stricter ones should probably be + worked on first? (I don't know, actually.) + * How many possibilities are there to satisfy this requirement? Those + with few left should likely be worked on first, I guess? + * Are there any known conflicts for this requirement? We should + probably work on those with the most known conflicts. + + A sortable value should be returned (this will be used as the ``key`` + parameter of the built-in sorting function). The smaller the value is, + the more preferred this requirement is (i.e. the sorting function + is called with ``reverse=False``). + """ + raise NotImplementedError + + def find_matches( + self, + identifier: KT, + requirements: Mapping[KT, Iterator[RT]], + incompatibilities: Mapping[KT, Iterator[CT]], + ) -> Matches[CT]: + """Find all possible candidates that satisfy the given constraints. + + :param identifier: An identifier as returned by ``identify()``. All + candidates returned by this method should produce the same + identifier. + :param requirements: A mapping of requirements that all returned + candidates must satisfy. Each key is an identifier, and the value + an iterator of requirements for that dependency. + :param incompatibilities: A mapping of known incompatibile candidates of + each dependency. Each key is an identifier, and the value an + iterator of incompatibilities known to the resolver. All + incompatibilities *must* be excluded from the return value. + + This should try to get candidates based on the requirements' types. + For VCS, local, and archive requirements, the one-and-only match is + returned, and for a "named" requirement, the index(es) should be + consulted to find concrete candidates for this requirement. + + The return value should produce candidates ordered by preference; the + most preferred candidate should come first. The return type may be one + of the following: + + * A callable that returns an iterator that yields candidates. + * An collection of candidates. + * An iterable of candidates. This will be consumed immediately into a + list of candidates. + """ + raise NotImplementedError + + def is_satisfied_by(self, requirement: RT, candidate: CT) -> bool: + """Whether the given requirement can be satisfied by a candidate. + + The candidate is guaranteed to have been generated from the + requirement. + + A boolean should be returned to indicate whether ``candidate`` is a + viable solution to the requirement. + """ + raise NotImplementedError + + def get_dependencies(self, candidate: CT) -> Iterable[RT]: + """Get dependencies of a candidate. + + This should return a collection of requirements that `candidate` + specifies as its dependencies. + """ + raise NotImplementedError + + def narrow_requirement_selection( + self, + identifiers: Iterable[KT], + resolutions: Mapping[KT, CT], + candidates: Mapping[KT, Iterator[CT]], + information: Mapping[KT, Iterator[RequirementInformation[RT, CT]]], + backtrack_causes: Sequence[RequirementInformation[RT, CT]], + ) -> Iterable[KT]: + """ + An optional method to narrow the selection of requirements being + considered during resolution. This method is called O(1) time per + backtrack step. + + :param identifiers: An iterable of `identifiers` as returned by + ``identify()``. These identify all requirements currently being + considered. + :param resolutions: A mapping of candidates currently pinned by the + resolver. Each key is an identifier, and the value is a candidate + that may conflict with requirements from ``information``. + :param candidates: A mapping of each dependency's possible candidates. + Each value is an iterator of candidates. + :param information: A mapping of requirement information for each package. + Each value is an iterator of *requirement information*. + :param backtrack_causes: A sequence of *requirement information* that are + the requirements causing the resolver to most recently + backtrack. + + A *requirement information* instance is a named tuple with two members: + + * ``requirement`` specifies a requirement contributing to the current + list of candidates. + * ``parent`` specifies the candidate that provides (is depended on for) + the requirement, or ``None`` to indicate a root requirement. + + Must return a non-empty subset of `identifiers`, with the default + implementation being to return `identifiers` unchanged. Those `identifiers` + will then be passed to the sort key `get_preference` to pick the most + prefered requirement to attempt to pin, unless `narrow_requirement_selection` + returns only 1 requirement, in which case that will be used without + calling the sort key `get_preference`. + + This method is designed to be used by the provider to optimize the + dependency resolution, e.g. if a check cost is O(m) and it can be done + against all identifiers at once then filtering the requirement selection + here will cost O(m) but making it part of the sort key in `get_preference` + will cost O(m*n), where n is the number of `identifiers`. + + Returns: + Iterable[KT]: A non-empty subset of `identifiers`. + """ + return identifiers diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/py.typed b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py new file mode 100644 index 0000000..6c14220 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/reporters.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Collection, Generic + +from .structs import CT, KT, RT, RequirementInformation, State + +if TYPE_CHECKING: + from .resolvers import Criterion + + +class BaseReporter(Generic[RT, CT, KT]): + """Delegate class to provide progress reporting for the resolver.""" + + def starting(self) -> None: + """Called before the resolution actually starts.""" + + def starting_round(self, index: int) -> None: + """Called before each round of resolution starts. + + The index is zero-based. + """ + + def ending_round(self, index: int, state: State[RT, CT, KT]) -> None: + """Called before each round of resolution ends. + + This is NOT called if the resolution ends at this round. Use `ending` + if you want to report finalization. The index is zero-based. + """ + + def ending(self, state: State[RT, CT, KT]) -> None: + """Called before the resolution ends successfully.""" + + def adding_requirement(self, requirement: RT, parent: CT | None) -> None: + """Called when adding a new requirement into the resolve criteria. + + :param requirement: The additional requirement to be applied to filter + the available candidaites. + :param parent: The candidate that requires ``requirement`` as a + dependency, or None if ``requirement`` is one of the root + requirements passed in from ``Resolver.resolve()``. + """ + + def resolving_conflicts( + self, causes: Collection[RequirementInformation[RT, CT]] + ) -> None: + """Called when starting to attempt requirement conflict resolution. + + :param causes: The information on the collision that caused the backtracking. + """ + + def rejecting_candidate(self, criterion: Criterion[RT, CT], candidate: CT) -> None: + """Called when rejecting a candidate during backtracking.""" + + def pinning(self, candidate: CT) -> None: + """Called when adding a candidate to the potential solution.""" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__init__.py new file mode 100644 index 0000000..b249221 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__init__.py @@ -0,0 +1,27 @@ +from ..structs import RequirementInformation +from .abstract import AbstractResolver, Result +from .criterion import Criterion +from .exceptions import ( + InconsistentCandidate, + RequirementsConflicted, + ResolutionError, + ResolutionImpossible, + ResolutionTooDeep, + ResolverException, +) +from .resolution import Resolution, Resolver + +__all__ = [ + "AbstractResolver", + "Criterion", + "InconsistentCandidate", + "RequirementInformation", + "RequirementsConflicted", + "Resolution", + "ResolutionError", + "ResolutionImpossible", + "ResolutionTooDeep", + "Resolver", + "ResolverException", + "Result", +] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f3f58b2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/abstract.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/abstract.cpython-312.pyc new file mode 100644 index 0000000..42fe104 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/abstract.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/criterion.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/criterion.cpython-312.pyc new file mode 100644 index 0000000..04be98b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/criterion.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 0000000..daf93f3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/resolution.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/resolution.cpython-312.pyc new file mode 100644 index 0000000..5e07c42 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/__pycache__/resolution.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/abstract.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/abstract.py new file mode 100644 index 0000000..db32b3b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/abstract.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import collections +from typing import TYPE_CHECKING, Any, Generic, Iterable, NamedTuple + +from ..structs import CT, KT, RT, DirectedGraph + +if TYPE_CHECKING: + from ..providers import AbstractProvider + from ..reporters import BaseReporter + from .criterion import Criterion + + class Result(NamedTuple, Generic[RT, CT, KT]): + mapping: dict[KT, CT] + graph: DirectedGraph[KT | None] + criteria: dict[KT, Criterion[RT, CT]] + +else: + Result = collections.namedtuple("Result", ["mapping", "graph", "criteria"]) + + +class AbstractResolver(Generic[RT, CT, KT]): + """The thing that performs the actual resolution work.""" + + base_exception = Exception + + def __init__( + self, + provider: AbstractProvider[RT, CT, KT], + reporter: BaseReporter[RT, CT, KT], + ) -> None: + self.provider = provider + self.reporter = reporter + + def resolve(self, requirements: Iterable[RT], **kwargs: Any) -> Result[RT, CT, KT]: + """Take a collection of constraints, spit out the resolution result. + + This returns a representation of the final resolution state, with one + guarenteed attribute ``mapping`` that contains resolved candidates as + values. The keys are their respective identifiers. + + :param requirements: A collection of constraints. + :param kwargs: Additional keyword arguments that subclasses may accept. + + :raises: ``self.base_exception`` or its subclass. + """ + raise NotImplementedError diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/criterion.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/criterion.py new file mode 100644 index 0000000..ee5019c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/criterion.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import Collection, Generic, Iterable, Iterator + +from ..structs import CT, RT, RequirementInformation + + +class Criterion(Generic[RT, CT]): + """Representation of possible resolution results of a package. + + This holds three attributes: + + * `information` is a collection of `RequirementInformation` pairs. + Each pair is a requirement contributing to this criterion, and the + candidate that provides the requirement. + * `incompatibilities` is a collection of all known not-to-work candidates + to exclude from consideration. + * `candidates` is a collection containing all possible candidates deducted + from the union of contributing requirements and known incompatibilities. + It should never be empty, except when the criterion is an attribute of a + raised `RequirementsConflicted` (in which case it is always empty). + + .. note:: + This class is intended to be externally immutable. **Do not** mutate + any of its attribute containers. + """ + + def __init__( + self, + candidates: Iterable[CT], + information: Collection[RequirementInformation[RT, CT]], + incompatibilities: Collection[CT], + ) -> None: + self.candidates = candidates + self.information = information + self.incompatibilities = incompatibilities + + def __repr__(self) -> str: + requirements = ", ".join( + f"({req!r}, via={parent!r})" for req, parent in self.information + ) + return f"Criterion({requirements})" + + def iter_requirement(self) -> Iterator[RT]: + return (i.requirement for i in self.information) + + def iter_parent(self) -> Iterator[CT | None]: + return (i.parent for i in self.information) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/exceptions.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/exceptions.py new file mode 100644 index 0000000..35e2755 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/exceptions.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Collection, Generic + +from ..structs import CT, RT, RequirementInformation + +if TYPE_CHECKING: + from .criterion import Criterion + + +class ResolverException(Exception): + """A base class for all exceptions raised by this module. + + Exceptions derived by this class should all be handled in this module. Any + bubbling pass the resolver should be treated as a bug. + """ + + +class RequirementsConflicted(ResolverException, Generic[RT, CT]): + def __init__(self, criterion: Criterion[RT, CT]) -> None: + super().__init__(criterion) + self.criterion = criterion + + def __str__(self) -> str: + return "Requirements conflict: {}".format( + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class InconsistentCandidate(ResolverException, Generic[RT, CT]): + def __init__(self, candidate: CT, criterion: Criterion[RT, CT]): + super().__init__(candidate, criterion) + self.candidate = candidate + self.criterion = criterion + + def __str__(self) -> str: + return "Provided candidate {!r} does not satisfy {}".format( + self.candidate, + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class ResolutionError(ResolverException): + pass + + +class ResolutionImpossible(ResolutionError, Generic[RT, CT]): + def __init__(self, causes: Collection[RequirementInformation[RT, CT]]): + super().__init__(causes) + # causes is a list of RequirementInformation objects + self.causes = causes + + +class ResolutionTooDeep(ResolutionError): + def __init__(self, round_count: int) -> None: + super().__init__(round_count) + self.round_count = round_count diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/resolution.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/resolution.py new file mode 100644 index 0000000..8c13d3b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/resolvers/resolution.py @@ -0,0 +1,627 @@ +from __future__ import annotations + +import collections +import itertools +import operator +from typing import TYPE_CHECKING, Generic + +from ..structs import ( + CT, + KT, + RT, + DirectedGraph, + IterableView, + IteratorMapping, + RequirementInformation, + State, + build_iter_view, +) +from .abstract import AbstractResolver, Result +from .criterion import Criterion +from .exceptions import ( + InconsistentCandidate, + RequirementsConflicted, + ResolutionImpossible, + ResolutionTooDeep, + ResolverException, +) + +if TYPE_CHECKING: + from collections.abc import Collection, Iterable, Mapping + + from ..providers import AbstractProvider, Preference + from ..reporters import BaseReporter + +_OPTIMISTIC_BACKJUMPING_RATIO: float = 0.1 + + +def _build_result(state: State[RT, CT, KT]) -> Result[RT, CT, KT]: + mapping = state.mapping + all_keys: dict[int, KT | None] = {id(v): k for k, v in mapping.items()} + all_keys[id(None)] = None + + graph: DirectedGraph[KT | None] = DirectedGraph() + graph.add(None) # Sentinel as root dependencies' parent. + + connected: set[KT | None] = {None} + for key, criterion in state.criteria.items(): + if not _has_route_to_root(state.criteria, key, all_keys, connected): + continue + if key not in graph: + graph.add(key) + for p in criterion.iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey not in graph: + graph.add(pkey) + graph.connect(pkey, key) + + return Result( + mapping={k: v for k, v in mapping.items() if k in connected}, + graph=graph, + criteria=state.criteria, + ) + + +class Resolution(Generic[RT, CT, KT]): + """Stateful resolution object. + + This is designed as a one-off object that holds information to kick start + the resolution process, and holds the results afterwards. + """ + + def __init__( + self, + provider: AbstractProvider[RT, CT, KT], + reporter: BaseReporter[RT, CT, KT], + ) -> None: + self._p = provider + self._r = reporter + self._states: list[State[RT, CT, KT]] = [] + + # Optimistic backjumping variables + self._optimistic_backjumping_ratio = _OPTIMISTIC_BACKJUMPING_RATIO + self._save_states: list[State[RT, CT, KT]] | None = None + self._optimistic_start_round: int | None = None + + @property + def state(self) -> State[RT, CT, KT]: + try: + return self._states[-1] + except IndexError as e: + raise AttributeError("state") from e + + def _push_new_state(self) -> None: + """Push a new state into history. + + This new state will be used to hold resolution results of the next + coming round. + """ + base = self._states[-1] + state = State( + mapping=base.mapping.copy(), + criteria=base.criteria.copy(), + backtrack_causes=base.backtrack_causes[:], + ) + self._states.append(state) + + def _add_to_criteria( + self, + criteria: dict[KT, Criterion[RT, CT]], + requirement: RT, + parent: CT | None, + ) -> None: + self._r.adding_requirement(requirement=requirement, parent=parent) + + identifier = self._p.identify(requirement_or_candidate=requirement) + criterion = criteria.get(identifier) + if criterion: + incompatibilities = list(criterion.incompatibilities) + else: + incompatibilities = [] + + matches = self._p.find_matches( + identifier=identifier, + requirements=IteratorMapping( + criteria, + operator.methodcaller("iter_requirement"), + {identifier: [requirement]}, + ), + incompatibilities=IteratorMapping( + criteria, + operator.attrgetter("incompatibilities"), + {identifier: incompatibilities}, + ), + ) + + if criterion: + information = list(criterion.information) + information.append(RequirementInformation(requirement, parent)) + else: + information = [RequirementInformation(requirement, parent)] + + criterion = Criterion( + candidates=build_iter_view(matches), + information=information, + incompatibilities=incompatibilities, + ) + if not criterion.candidates: + raise RequirementsConflicted(criterion) + criteria[identifier] = criterion + + def _remove_information_from_criteria( + self, criteria: dict[KT, Criterion[RT, CT]], parents: Collection[KT] + ) -> None: + """Remove information from parents of criteria. + + Concretely, removes all values from each criterion's ``information`` + field that have one of ``parents`` as provider of the requirement. + + :param criteria: The criteria to update. + :param parents: Identifiers for which to remove information from all criteria. + """ + if not parents: + return + for key, criterion in criteria.items(): + criteria[key] = Criterion( + criterion.candidates, + [ + information + for information in criterion.information + if ( + information.parent is None + or self._p.identify(information.parent) not in parents + ) + ], + criterion.incompatibilities, + ) + + def _get_preference(self, name: KT) -> Preference: + return self._p.get_preference( + identifier=name, + resolutions=self.state.mapping, + candidates=IteratorMapping( + self.state.criteria, + operator.attrgetter("candidates"), + ), + information=IteratorMapping( + self.state.criteria, + operator.attrgetter("information"), + ), + backtrack_causes=self.state.backtrack_causes, + ) + + def _is_current_pin_satisfying( + self, name: KT, criterion: Criterion[RT, CT] + ) -> bool: + try: + current_pin = self.state.mapping[name] + except KeyError: + return False + return all( + self._p.is_satisfied_by(requirement=r, candidate=current_pin) + for r in criterion.iter_requirement() + ) + + def _get_updated_criteria(self, candidate: CT) -> dict[KT, Criterion[RT, CT]]: + criteria = self.state.criteria.copy() + for requirement in self._p.get_dependencies(candidate=candidate): + self._add_to_criteria(criteria, requirement, parent=candidate) + return criteria + + def _attempt_to_pin_criterion(self, name: KT) -> list[Criterion[RT, CT]]: + criterion = self.state.criteria[name] + + causes: list[Criterion[RT, CT]] = [] + for candidate in criterion.candidates: + try: + criteria = self._get_updated_criteria(candidate) + except RequirementsConflicted as e: + self._r.rejecting_candidate(e.criterion, candidate) + causes.append(e.criterion) + continue + + # Check the newly-pinned candidate actually works. This should + # always pass under normal circumstances, but in the case of a + # faulty provider, we will raise an error to notify the implementer + # to fix find_matches() and/or is_satisfied_by(). + satisfied = all( + self._p.is_satisfied_by(requirement=r, candidate=candidate) + for r in criterion.iter_requirement() + ) + if not satisfied: + raise InconsistentCandidate(candidate, criterion) + + self._r.pinning(candidate=candidate) + self.state.criteria.update(criteria) + + # Put newly-pinned candidate at the end. This is essential because + # backtracking looks at this mapping to get the last pin. + self.state.mapping.pop(name, None) + self.state.mapping[name] = candidate + + return [] + + # All candidates tried, nothing works. This criterion is a dead + # end, signal for backtracking. + return causes + + def _patch_criteria( + self, incompatibilities_from_broken: list[tuple[KT, list[CT]]] + ) -> bool: + # Create a new state from the last known-to-work one, and apply + # the previously gathered incompatibility information. + for k, incompatibilities in incompatibilities_from_broken: + if not incompatibilities: + continue + try: + criterion = self.state.criteria[k] + except KeyError: + continue + matches = self._p.find_matches( + identifier=k, + requirements=IteratorMapping( + self.state.criteria, + operator.methodcaller("iter_requirement"), + ), + incompatibilities=IteratorMapping( + self.state.criteria, + operator.attrgetter("incompatibilities"), + {k: incompatibilities}, + ), + ) + candidates: IterableView[CT] = build_iter_view(matches) + if not candidates: + return False + incompatibilities.extend(criterion.incompatibilities) + self.state.criteria[k] = Criterion( + candidates=candidates, + information=list(criterion.information), + incompatibilities=incompatibilities, + ) + return True + + def _save_state(self) -> None: + """Save states for potential rollback if optimistic backjumping fails.""" + if self._save_states is None: + self._save_states = [ + State( + mapping=s.mapping.copy(), + criteria=s.criteria.copy(), + backtrack_causes=s.backtrack_causes[:], + ) + for s in self._states + ] + + def _rollback_states(self) -> None: + """Rollback states and disable optimistic backjumping.""" + self._optimistic_backjumping_ratio = 0.0 + if self._save_states: + self._states = self._save_states + self._save_states = None + + def _backjump(self, causes: list[RequirementInformation[RT, CT]]) -> bool: + """Perform backjumping. + + When we enter here, the stack is like this:: + + [ state Z ] + [ state Y ] + [ state X ] + .... earlier states are irrelevant. + + 1. No pins worked for Z, so it does not have a pin. + 2. We want to reset state Y to unpinned, and pin another candidate. + 3. State X holds what state Y was before the pin, but does not + have the incompatibility information gathered in state Y. + + Each iteration of the loop will: + + 1. Identify Z. The incompatibility is not always caused by the latest + state. For example, given three requirements A, B and C, with + dependencies A1, B1 and C1, where A1 and B1 are incompatible: the + last state might be related to C, so we want to discard the + previous state. + 2. Discard Z. + 3. Discard Y but remember its incompatibility information gathered + previously, and the failure we're dealing with right now. + 4. Push a new state Y' based on X, and apply the incompatibility + information from Y to Y'. + 5a. If this causes Y' to conflict, we need to backtrack again. Make Y' + the new Z and go back to step 2. + 5b. If the incompatibilities apply cleanly, end backtracking. + """ + incompatible_reqs: Iterable[CT | RT] = itertools.chain( + (c.parent for c in causes if c.parent is not None), + (c.requirement for c in causes), + ) + incompatible_deps = {self._p.identify(r) for r in incompatible_reqs} + while len(self._states) >= 3: + # Remove the state that triggered backtracking. + del self._states[-1] + + # Optimistically backtrack to a state that caused the incompatibility + broken_state = self.state + while True: + # Retrieve the last candidate pin and known incompatibilities. + try: + broken_state = self._states.pop() + name, candidate = broken_state.mapping.popitem() + except (IndexError, KeyError): + raise ResolutionImpossible(causes) from None + + if ( + not self._optimistic_backjumping_ratio + and name not in incompatible_deps + ): + # For safe backjumping only backjump if the current dependency + # is not the same as the incompatible dependency + break + + # On the first time a non-safe backjump is done the state + # is saved so we can restore it later if the resolution fails + if ( + self._optimistic_backjumping_ratio + and self._save_states is None + and name not in incompatible_deps + ): + self._save_state() + + # If the current dependencies and the incompatible dependencies + # are overlapping then we have likely found a cause of the + # incompatibility + current_dependencies = { + self._p.identify(d) for d in self._p.get_dependencies(candidate) + } + if not current_dependencies.isdisjoint(incompatible_deps): + break + + # Fallback: We should not backtrack to the point where + # broken_state.mapping is empty, so stop backtracking for + # a chance for the resolution to recover + if not broken_state.mapping: + break + + # Guard: We need at least two state to remain to both + # backtrack and push a new state + if len(self._states) <= 1: + raise ResolutionImpossible(causes) + + incompatibilities_from_broken = [ + (k, list(v.incompatibilities)) for k, v in broken_state.criteria.items() + ] + + # Also mark the newly known incompatibility. + incompatibilities_from_broken.append((name, [candidate])) + + self._push_new_state() + success = self._patch_criteria(incompatibilities_from_broken) + + # It works! Let's work on this new state. + if success: + return True + + # State does not work after applying known incompatibilities. + # Try the still previous state. + + # No way to backtrack anymore. + return False + + def _extract_causes( + self, criteron: list[Criterion[RT, CT]] + ) -> list[RequirementInformation[RT, CT]]: + """Extract causes from list of criterion and deduplicate""" + return list({id(i): i for c in criteron for i in c.information}.values()) + + def resolve(self, requirements: Iterable[RT], max_rounds: int) -> State[RT, CT, KT]: + if self._states: + raise RuntimeError("already resolved") + + self._r.starting() + + # Initialize the root state. + self._states = [ + State( + mapping=collections.OrderedDict(), + criteria={}, + backtrack_causes=[], + ) + ] + for r in requirements: + try: + self._add_to_criteria(self.state.criteria, r, parent=None) + except RequirementsConflicted as e: + raise ResolutionImpossible(e.criterion.information) from e + + # The root state is saved as a sentinel so the first ever pin can have + # something to backtrack to if it fails. The root state is basically + # pinning the virtual "root" package in the graph. + self._push_new_state() + + # Variables for optimistic backjumping + optimistic_rounds_cutoff: int | None = None + optimistic_backjumping_start_round: int | None = None + + for round_index in range(max_rounds): + self._r.starting_round(index=round_index) + + # Handle if optimistic backjumping has been running for too long + if self._optimistic_backjumping_ratio and self._save_states is not None: + if optimistic_backjumping_start_round is None: + optimistic_backjumping_start_round = round_index + optimistic_rounds_cutoff = int( + (max_rounds - round_index) * self._optimistic_backjumping_ratio + ) + + if optimistic_rounds_cutoff <= 0: + self._rollback_states() + continue + elif optimistic_rounds_cutoff is not None: + if ( + round_index - optimistic_backjumping_start_round + >= optimistic_rounds_cutoff + ): + self._rollback_states() + continue + + unsatisfied_names = [ + key + for key, criterion in self.state.criteria.items() + if not self._is_current_pin_satisfying(key, criterion) + ] + + # All criteria are accounted for. Nothing more to pin, we are done! + if not unsatisfied_names: + self._r.ending(state=self.state) + return self.state + + # keep track of satisfied names to calculate diff after pinning + satisfied_names = set(self.state.criteria.keys()) - set(unsatisfied_names) + + if len(unsatisfied_names) > 1: + narrowed_unstatisfied_names = list( + self._p.narrow_requirement_selection( + identifiers=unsatisfied_names, + resolutions=self.state.mapping, + candidates=IteratorMapping( + self.state.criteria, + operator.attrgetter("candidates"), + ), + information=IteratorMapping( + self.state.criteria, + operator.attrgetter("information"), + ), + backtrack_causes=self.state.backtrack_causes, + ) + ) + else: + narrowed_unstatisfied_names = unsatisfied_names + + # If there are no unsatisfied names use unsatisfied names + if not narrowed_unstatisfied_names: + raise RuntimeError("narrow_requirement_selection returned 0 names") + + # If there is only 1 unsatisfied name skip calling self._get_preference + if len(narrowed_unstatisfied_names) > 1: + # Choose the most preferred unpinned criterion to try. + name = min(narrowed_unstatisfied_names, key=self._get_preference) + else: + name = narrowed_unstatisfied_names[0] + + failure_criterion = self._attempt_to_pin_criterion(name) + + if failure_criterion: + causes = self._extract_causes(failure_criterion) + # Backjump if pinning fails. The backjump process puts us in + # an unpinned state, so we can work on it in the next round. + self._r.resolving_conflicts(causes=causes) + + try: + success = self._backjump(causes) + except ResolutionImpossible: + if self._optimistic_backjumping_ratio and self._save_states: + failed_optimistic_backjumping = True + else: + raise + else: + failed_optimistic_backjumping = bool( + not success + and self._optimistic_backjumping_ratio + and self._save_states + ) + + if failed_optimistic_backjumping and self._save_states: + self._rollback_states() + else: + self.state.backtrack_causes[:] = causes + + # Dead ends everywhere. Give up. + if not success: + raise ResolutionImpossible(self.state.backtrack_causes) + else: + # discard as information sources any invalidated names + # (unsatisfied names that were previously satisfied) + newly_unsatisfied_names = { + key + for key, criterion in self.state.criteria.items() + if key in satisfied_names + and not self._is_current_pin_satisfying(key, criterion) + } + self._remove_information_from_criteria( + self.state.criteria, newly_unsatisfied_names + ) + # Pinning was successful. Push a new state to do another pin. + self._push_new_state() + + self._r.ending_round(index=round_index, state=self.state) + + raise ResolutionTooDeep(max_rounds) + + +class Resolver(AbstractResolver[RT, CT, KT]): + """The thing that performs the actual resolution work.""" + + base_exception = ResolverException + + def resolve( # type: ignore[override] + self, + requirements: Iterable[RT], + max_rounds: int = 100, + ) -> Result[RT, CT, KT]: + """Take a collection of constraints, spit out the resolution result. + + The return value is a representation to the final resolution result. It + is a tuple subclass with three public members: + + * `mapping`: A dict of resolved candidates. Each key is an identifier + of a requirement (as returned by the provider's `identify` method), + and the value is the resolved candidate. + * `graph`: A `DirectedGraph` instance representing the dependency tree. + The vertices are keys of `mapping`, and each edge represents *why* + a particular package is included. A special vertex `None` is + included to represent parents of user-supplied requirements. + * `criteria`: A dict of "criteria" that hold detailed information on + how edges in the graph are derived. Each key is an identifier of a + requirement, and the value is a `Criterion` instance. + + The following exceptions may be raised if a resolution cannot be found: + + * `ResolutionImpossible`: A resolution cannot be found for the given + combination of requirements. The `causes` attribute of the + exception is a list of (requirement, parent), giving the + requirements that could not be satisfied. + * `ResolutionTooDeep`: The dependency tree is too deeply nested and + the resolver gave up. This is usually caused by a circular + dependency, but you can try to resolve this by increasing the + `max_rounds` argument. + """ + resolution = Resolution(self.provider, self.reporter) + state = resolution.resolve(requirements, max_rounds=max_rounds) + return _build_result(state) + + +def _has_route_to_root( + criteria: Mapping[KT, Criterion[RT, CT]], + key: KT | None, + all_keys: dict[int, KT | None], + connected: set[KT | None], +) -> bool: + if key in connected: + return True + if key not in criteria: + return False + assert key is not None + for p in criteria[key].iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey in connected: + connected.add(key) + return True + if _has_route_to_root(criteria, pkey, all_keys, connected): + connected.add(key) + return True + return False diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py new file mode 100644 index 0000000..18c74d4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/resolvelib/structs.py @@ -0,0 +1,209 @@ +from __future__ import annotations + +import itertools +from collections import namedtuple +from typing import ( + TYPE_CHECKING, + Callable, + Generic, + Iterable, + Iterator, + Mapping, + NamedTuple, + Sequence, + TypeVar, + Union, +) + +KT = TypeVar("KT") # Identifier. +RT = TypeVar("RT") # Requirement. +CT = TypeVar("CT") # Candidate. + +Matches = Union[Iterable[CT], Callable[[], Iterable[CT]]] + +if TYPE_CHECKING: + from .resolvers.criterion import Criterion + + class RequirementInformation(NamedTuple, Generic[RT, CT]): + requirement: RT + parent: CT | None + + class State(NamedTuple, Generic[RT, CT, KT]): + """Resolution state in a round.""" + + mapping: dict[KT, CT] + criteria: dict[KT, Criterion[RT, CT]] + backtrack_causes: list[RequirementInformation[RT, CT]] + +else: + RequirementInformation = namedtuple( + "RequirementInformation", ["requirement", "parent"] + ) + State = namedtuple("State", ["mapping", "criteria", "backtrack_causes"]) + + +class DirectedGraph(Generic[KT]): + """A graph structure with directed edges.""" + + def __init__(self) -> None: + self._vertices: set[KT] = set() + self._forwards: dict[KT, set[KT]] = {} # -> Set[] + self._backwards: dict[KT, set[KT]] = {} # -> Set[] + + def __iter__(self) -> Iterator[KT]: + return iter(self._vertices) + + def __len__(self) -> int: + return len(self._vertices) + + def __contains__(self, key: KT) -> bool: + return key in self._vertices + + def copy(self) -> DirectedGraph[KT]: + """Return a shallow copy of this graph.""" + other = type(self)() + other._vertices = set(self._vertices) + other._forwards = {k: set(v) for k, v in self._forwards.items()} + other._backwards = {k: set(v) for k, v in self._backwards.items()} + return other + + def add(self, key: KT) -> None: + """Add a new vertex to the graph.""" + if key in self._vertices: + raise ValueError("vertex exists") + self._vertices.add(key) + self._forwards[key] = set() + self._backwards[key] = set() + + def remove(self, key: KT) -> None: + """Remove a vertex from the graph, disconnecting all edges from/to it.""" + self._vertices.remove(key) + for f in self._forwards.pop(key): + self._backwards[f].remove(key) + for t in self._backwards.pop(key): + self._forwards[t].remove(key) + + def connected(self, f: KT, t: KT) -> bool: + return f in self._backwards[t] and t in self._forwards[f] + + def connect(self, f: KT, t: KT) -> None: + """Connect two existing vertices. + + Nothing happens if the vertices are already connected. + """ + if t not in self._vertices: + raise KeyError(t) + self._forwards[f].add(t) + self._backwards[t].add(f) + + def iter_edges(self) -> Iterator[tuple[KT, KT]]: + for f, children in self._forwards.items(): + for t in children: + yield f, t + + def iter_children(self, key: KT) -> Iterator[KT]: + return iter(self._forwards[key]) + + def iter_parents(self, key: KT) -> Iterator[KT]: + return iter(self._backwards[key]) + + +class IteratorMapping(Mapping[KT, Iterator[CT]], Generic[RT, CT, KT]): + def __init__( + self, + mapping: Mapping[KT, RT], + accessor: Callable[[RT], Iterable[CT]], + appends: Mapping[KT, Iterable[CT]] | None = None, + ) -> None: + self._mapping = mapping + self._accessor = accessor + self._appends: Mapping[KT, Iterable[CT]] = appends or {} + + def __repr__(self) -> str: + return "IteratorMapping({!r}, {!r}, {!r})".format( + self._mapping, + self._accessor, + self._appends, + ) + + def __bool__(self) -> bool: + return bool(self._mapping or self._appends) + + def __contains__(self, key: object) -> bool: + return key in self._mapping or key in self._appends + + def __getitem__(self, k: KT) -> Iterator[CT]: + try: + v = self._mapping[k] + except KeyError: + return iter(self._appends[k]) + return itertools.chain(self._accessor(v), self._appends.get(k, ())) + + def __iter__(self) -> Iterator[KT]: + more = (k for k in self._appends if k not in self._mapping) + return itertools.chain(self._mapping, more) + + def __len__(self) -> int: + more = sum(1 for k in self._appends if k not in self._mapping) + return len(self._mapping) + more + + +class _FactoryIterableView(Iterable[RT]): + """Wrap an iterator factory returned by `find_matches()`. + + Calling `iter()` on this class would invoke the underlying iterator + factory, making it a "collection with ordering" that can be iterated + through multiple times, but lacks random access methods presented in + built-in Python sequence types. + """ + + def __init__(self, factory: Callable[[], Iterable[RT]]) -> None: + self._factory = factory + self._iterable: Iterable[RT] | None = None + + def __repr__(self) -> str: + return f"{type(self).__name__}({list(self)})" + + def __bool__(self) -> bool: + try: + next(iter(self)) + except StopIteration: + return False + return True + + def __iter__(self) -> Iterator[RT]: + iterable = self._factory() if self._iterable is None else self._iterable + self._iterable, current = itertools.tee(iterable) + return current + + +class _SequenceIterableView(Iterable[RT]): + """Wrap an iterable returned by find_matches(). + + This is essentially just a proxy to the underlying sequence that provides + the same interface as `_FactoryIterableView`. + """ + + def __init__(self, sequence: Sequence[RT]): + self._sequence = sequence + + def __repr__(self) -> str: + return f"{type(self).__name__}({self._sequence})" + + def __bool__(self) -> bool: + return bool(self._sequence) + + def __iter__(self) -> Iterator[RT]: + return iter(self._sequence) + + +def build_iter_view(matches: Matches[CT]) -> Iterable[CT]: + """Build an iterable view from the value returned by `find_matches()`.""" + if callable(matches): + return _FactoryIterableView(matches) + if not isinstance(matches, Sequence): + matches = list(matches) + return _SequenceIterableView(matches) + + +IterableView = Iterable diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/LICENSE b/venv/lib/python3.12/site-packages/pip/_vendor/rich/LICENSE new file mode 100644 index 0000000..4415505 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2020 Will McGugan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py new file mode 100644 index 0000000..73f58d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__init__.py @@ -0,0 +1,177 @@ +"""Rich text and beautiful formatting in the terminal.""" + +import os +from typing import IO, TYPE_CHECKING, Any, Callable, Optional, Union + +from ._extension import load_ipython_extension # noqa: F401 + +__all__ = ["get_console", "reconfigure", "print", "inspect", "print_json"] + +if TYPE_CHECKING: + from .console import Console + +# Global console used by alternative print +_console: Optional["Console"] = None + +try: + _IMPORT_CWD = os.path.abspath(os.getcwd()) +except FileNotFoundError: + # Can happen if the cwd has been deleted + _IMPORT_CWD = "" + + +def get_console() -> "Console": + """Get a global :class:`~rich.console.Console` instance. This function is used when Rich requires a Console, + and hasn't been explicitly given one. + + Returns: + Console: A console instance. + """ + global _console + if _console is None: + from .console import Console + + _console = Console() + + return _console + + +def reconfigure(*args: Any, **kwargs: Any) -> None: + """Reconfigures the global console by replacing it with another. + + Args: + *args (Any): Positional arguments for the replacement :class:`~rich.console.Console`. + **kwargs (Any): Keyword arguments for the replacement :class:`~rich.console.Console`. + """ + from pip._vendor.rich.console import Console + + new_console = Console(*args, **kwargs) + _console = get_console() + _console.__dict__ = new_console.__dict__ + + +def print( + *objects: Any, + sep: str = " ", + end: str = "\n", + file: Optional[IO[str]] = None, + flush: bool = False, +) -> None: + r"""Print object(s) supplied via positional arguments. + This function has an identical signature to the built-in print. + For more advanced features, see the :class:`~rich.console.Console` class. + + Args: + sep (str, optional): Separator between printed objects. Defaults to " ". + end (str, optional): Character to write at end of output. Defaults to "\\n". + file (IO[str], optional): File to write to, or None for stdout. Defaults to None. + flush (bool, optional): Has no effect as Rich always flushes output. Defaults to False. + + """ + from .console import Console + + write_console = get_console() if file is None else Console(file=file) + return write_console.print(*objects, sep=sep, end=end) + + +def print_json( + json: Optional[str] = None, + *, + data: Any = None, + indent: Union[None, int, str] = 2, + highlight: bool = True, + skip_keys: bool = False, + ensure_ascii: bool = False, + check_circular: bool = True, + allow_nan: bool = True, + default: Optional[Callable[[Any], Any]] = None, + sort_keys: bool = False, +) -> None: + """Pretty prints JSON. Output will be valid JSON. + + Args: + json (str): A string containing JSON. + data (Any): If json is not supplied, then encode this data. + indent (int, optional): Number of spaces to indent. Defaults to 2. + highlight (bool, optional): Enable highlighting of output: Defaults to True. + skip_keys (bool, optional): Skip keys not of a basic type. Defaults to False. + ensure_ascii (bool, optional): Escape all non-ascii characters. Defaults to False. + check_circular (bool, optional): Check for circular references. Defaults to True. + allow_nan (bool, optional): Allow NaN and Infinity values. Defaults to True. + default (Callable, optional): A callable that converts values that can not be encoded + in to something that can be JSON encoded. Defaults to None. + sort_keys (bool, optional): Sort dictionary keys. Defaults to False. + """ + + get_console().print_json( + json, + data=data, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + + +def inspect( + obj: Any, + *, + console: Optional["Console"] = None, + title: Optional[str] = None, + help: bool = False, + methods: bool = False, + docs: bool = True, + private: bool = False, + dunder: bool = False, + sort: bool = True, + all: bool = False, + value: bool = True, +) -> None: + """Inspect any Python object. + + * inspect() to see summarized info. + * inspect(, methods=True) to see methods. + * inspect(, help=True) to see full (non-abbreviated) help. + * inspect(, private=True) to see private attributes (single underscore). + * inspect(, dunder=True) to see attributes beginning with double underscore. + * inspect(, all=True) to see all attributes. + + Args: + obj (Any): An object to inspect. + title (str, optional): Title to display over inspect result, or None use type. Defaults to None. + help (bool, optional): Show full help text rather than just first paragraph. Defaults to False. + methods (bool, optional): Enable inspection of callables. Defaults to False. + docs (bool, optional): Also render doc strings. Defaults to True. + private (bool, optional): Show private attributes (beginning with underscore). Defaults to False. + dunder (bool, optional): Show attributes starting with double underscore. Defaults to False. + sort (bool, optional): Sort attributes alphabetically. Defaults to True. + all (bool, optional): Show all attributes. Defaults to False. + value (bool, optional): Pretty print value. Defaults to True. + """ + _console = console or get_console() + from pip._vendor.rich._inspect import Inspect + + # Special case for inspect(inspect) + is_inspect = obj is inspect + + _inspect = Inspect( + obj, + title=title, + help=is_inspect or help, + methods=is_inspect or methods, + docs=is_inspect or docs, + private=private, + dunder=dunder, + sort=sort, + all=all, + value=value, + ) + _console.print(_inspect) + + +if __name__ == "__main__": # pragma: no cover + print("Hello, **World**") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py new file mode 100644 index 0000000..a583f1e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__main__.py @@ -0,0 +1,245 @@ +import colorsys +import io +from time import process_time + +from pip._vendor.rich import box +from pip._vendor.rich.color import Color +from pip._vendor.rich.console import Console, ConsoleOptions, Group, RenderableType, RenderResult +from pip._vendor.rich.markdown import Markdown +from pip._vendor.rich.measure import Measurement +from pip._vendor.rich.pretty import Pretty +from pip._vendor.rich.segment import Segment +from pip._vendor.rich.style import Style +from pip._vendor.rich.syntax import Syntax +from pip._vendor.rich.table import Table +from pip._vendor.rich.text import Text + + +class ColorBox: + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + for y in range(0, 5): + for x in range(options.max_width): + h = x / options.max_width + l = 0.1 + ((y / 5) * 0.7) + r1, g1, b1 = colorsys.hls_to_rgb(h, l, 1.0) + r2, g2, b2 = colorsys.hls_to_rgb(h, l + 0.7 / 10, 1.0) + bgcolor = Color.from_rgb(r1 * 255, g1 * 255, b1 * 255) + color = Color.from_rgb(r2 * 255, g2 * 255, b2 * 255) + yield Segment("▄", Style(color=color, bgcolor=bgcolor)) + yield Segment.line() + + def __rich_measure__( + self, console: "Console", options: ConsoleOptions + ) -> Measurement: + return Measurement(1, options.max_width) + + +def make_test_card() -> Table: + """Get a renderable that demonstrates a number of features.""" + table = Table.grid(padding=1, pad_edge=True) + table.title = "Rich features" + table.add_column("Feature", no_wrap=True, justify="center", style="bold red") + table.add_column("Demonstration") + + color_table = Table( + box=None, + expand=False, + show_header=False, + show_edge=False, + pad_edge=False, + ) + color_table.add_row( + ( + "✓ [bold green]4-bit color[/]\n" + "✓ [bold blue]8-bit color[/]\n" + "✓ [bold magenta]Truecolor (16.7 million)[/]\n" + "✓ [bold yellow]Dumb terminals[/]\n" + "✓ [bold cyan]Automatic color conversion" + ), + ColorBox(), + ) + + table.add_row("Colors", color_table) + + table.add_row( + "Styles", + "All ansi styles: [bold]bold[/], [dim]dim[/], [italic]italic[/italic], [underline]underline[/], [strike]strikethrough[/], [reverse]reverse[/], and even [blink]blink[/].", + ) + + lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque in metus sed sapien ultricies pretium a at justo. Maecenas luctus velit et auctor maximus." + lorem_table = Table.grid(padding=1, collapse_padding=True) + lorem_table.pad_edge = False + lorem_table.add_row( + Text(lorem, justify="left", style="green"), + Text(lorem, justify="center", style="yellow"), + Text(lorem, justify="right", style="blue"), + Text(lorem, justify="full", style="red"), + ) + table.add_row( + "Text", + Group( + Text.from_markup( + """Word wrap text. Justify [green]left[/], [yellow]center[/], [blue]right[/] or [red]full[/].\n""" + ), + lorem_table, + ), + ) + + def comparison(renderable1: RenderableType, renderable2: RenderableType) -> Table: + table = Table(show_header=False, pad_edge=False, box=None, expand=True) + table.add_column("1", ratio=1) + table.add_column("2", ratio=1) + table.add_row(renderable1, renderable2) + return table + + table.add_row( + "Asian\nlanguage\nsupport", + ":flag_for_china: 该库支持中文,日文和韩文文本!\n:flag_for_japan: ライブラリは中国語、日本語、韓国語のテキストをサポートしています\n:flag_for_south_korea: 이 라이브러리는 중국어, 일본어 및 한국어 텍스트를 지원합니다", + ) + + markup_example = ( + "[bold magenta]Rich[/] supports a simple [i]bbcode[/i]-like [b]markup[/b] for [yellow]color[/], [underline]style[/], and emoji! " + ":+1: :apple: :ant: :bear: :baguette_bread: :bus: " + ) + table.add_row("Markup", markup_example) + + example_table = Table( + show_edge=False, + show_header=True, + expand=False, + row_styles=["none", "dim"], + box=box.SIMPLE, + ) + example_table.add_column("[green]Date", style="green", no_wrap=True) + example_table.add_column("[blue]Title", style="blue") + example_table.add_column( + "[cyan]Production Budget", + style="cyan", + justify="right", + no_wrap=True, + ) + example_table.add_column( + "[magenta]Box Office", + style="magenta", + justify="right", + no_wrap=True, + ) + example_table.add_row( + "Dec 20, 2019", + "Star Wars: The Rise of Skywalker", + "$275,000,000", + "$375,126,118", + ) + example_table.add_row( + "May 25, 2018", + "[b]Solo[/]: A Star Wars Story", + "$275,000,000", + "$393,151,347", + ) + example_table.add_row( + "Dec 15, 2017", + "Star Wars Ep. VIII: The Last Jedi", + "$262,000,000", + "[bold]$1,332,539,889[/bold]", + ) + example_table.add_row( + "May 19, 1999", + "Star Wars Ep. [b]I[/b]: [i]The phantom Menace", + "$115,000,000", + "$1,027,044,677", + ) + + table.add_row("Tables", example_table) + + code = '''\ +def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + for value in iter_values: + yield False, previous_value + previous_value = value + yield True, previous_value''' + + pretty_data = { + "foo": [ + 3.1427, + ( + "Paul Atreides", + "Vladimir Harkonnen", + "Thufir Hawat", + ), + ], + "atomic": (False, True, None), + } + table.add_row( + "Syntax\nhighlighting\n&\npretty\nprinting", + comparison( + Syntax(code, "python3", line_numbers=True, indent_guides=True), + Pretty(pretty_data, indent_guides=True), + ), + ) + + markdown_example = """\ +# Markdown + +Supports much of the *markdown* __syntax__! + +- Headers +- Basic formatting: **bold**, *italic*, `code` +- Block quotes +- Lists, and more... + """ + table.add_row( + "Markdown", comparison("[cyan]" + markdown_example, Markdown(markdown_example)) + ) + + table.add_row( + "+more!", + """Progress bars, columns, styled logging handler, tracebacks, etc...""", + ) + return table + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.panel import Panel + + console = Console( + file=io.StringIO(), + force_terminal=True, + ) + test_card = make_test_card() + + # Print once to warm cache + start = process_time() + console.print(test_card) + pre_cache_taken = round((process_time() - start) * 1000.0, 1) + + console.file = io.StringIO() + + start = process_time() + console.print(test_card) + taken = round((process_time() - start) * 1000.0, 1) + + c = Console(record=True) + c.print(test_card) + + console = Console() + console.print(f"[dim]rendered in [not dim]{pre_cache_taken}ms[/] (cold cache)") + console.print(f"[dim]rendered in [not dim]{taken}ms[/] (warm cache)") + console.print() + console.print( + Panel.fit( + "[b magenta]Hope you enjoy using Rich![/]\n\n" + "Please consider sponsoring me if you get value from my work.\n\n" + "Even the price of a ☕ can brighten my day!\n\n" + "https://github.com/sponsors/willmcgugan", + border_style="red", + title="Help ensure Rich is maintained", + ) + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a4a9aa2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000..670cfbf Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc new file mode 100644 index 0000000..d1ad0d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc new file mode 100644 index 0000000..ea07be6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc new file mode 100644 index 0000000..0a0dd5a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc new file mode 100644 index 0000000..7c4a6a9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc new file mode 100644 index 0000000..7849e45 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc new file mode 100644 index 0000000..bc1d73e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc new file mode 100644 index 0000000..b68d481 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc new file mode 100644 index 0000000..ee436af Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc new file mode 100644 index 0000000..90fbae0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc new file mode 100644 index 0000000..b3170c8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc new file mode 100644 index 0000000..878947c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc new file mode 100644 index 0000000..542de82 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc new file mode 100644 index 0000000..10d97c6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc new file mode 100644 index 0000000..d3389c5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc new file mode 100644 index 0000000..803e628 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc new file mode 100644 index 0000000..e30ee27 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc new file mode 100644 index 0000000..31d9a30 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc new file mode 100644 index 0000000..695979e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc new file mode 100644 index 0000000..1295830 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc new file mode 100644 index 0000000..7bb66ef Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc new file mode 100644 index 0000000..79ce5fd Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc new file mode 100644 index 0000000..88718d2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/align.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc new file mode 100644 index 0000000..b0413e4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc new file mode 100644 index 0000000..38e5d24 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc new file mode 100644 index 0000000..5800fdd Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/box.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc new file mode 100644 index 0000000..adf729d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc new file mode 100644 index 0000000..ec821d8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc new file mode 100644 index 0000000..37c50f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc new file mode 100644 index 0000000..cedcd2f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc new file mode 100644 index 0000000..2e4c7e3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/console.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc new file mode 100644 index 0000000..d0f3ab3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc new file mode 100644 index 0000000..732ff3a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc new file mode 100644 index 0000000..baefba1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/control.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc new file mode 100644 index 0000000..c711be6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc new file mode 100644 index 0000000..7867b81 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc new file mode 100644 index 0000000..f0c77f8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..7ed1021 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc new file mode 100644 index 0000000..67a1607 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc new file mode 100644 index 0000000..c5d3c76 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc new file mode 100644 index 0000000..0a7a6da Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc new file mode 100644 index 0000000..8c10f9c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/json.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc new file mode 100644 index 0000000..7a245b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc new file mode 100644 index 0000000..dd922c0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc new file mode 100644 index 0000000..3326d31 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc new file mode 100644 index 0000000..4d43b8f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc new file mode 100644 index 0000000..89cca29 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc new file mode 100644 index 0000000..055adc1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc new file mode 100644 index 0000000..f2b424a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc new file mode 100644 index 0000000..b32a828 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc new file mode 100644 index 0000000..441173f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc new file mode 100644 index 0000000..c10b470 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc new file mode 100644 index 0000000..6286992 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc new file mode 100644 index 0000000..f2b2864 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc new file mode 100644 index 0000000..73e3624 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc new file mode 100644 index 0000000..9d1dd8d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc new file mode 100644 index 0000000..d425e1c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc new file mode 100644 index 0000000..3044982 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc new file mode 100644 index 0000000..0de19a4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/region.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc new file mode 100644 index 0000000..e301869 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc new file mode 100644 index 0000000..a277018 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc new file mode 100644 index 0000000..be87552 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc new file mode 100644 index 0000000..08d0433 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc new file mode 100644 index 0000000..42d35b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc new file mode 100644 index 0000000..3b75463 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc new file mode 100644 index 0000000..e82eb1b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/status.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc new file mode 100644 index 0000000..8758d2c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/style.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc new file mode 100644 index 0000000..fa7b10f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc new file mode 100644 index 0000000..2a803ac Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc new file mode 100644 index 0000000..ff61b93 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/table.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc new file mode 100644 index 0000000..97f2d1f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc new file mode 100644 index 0000000..637501d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/text.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc new file mode 100644 index 0000000..3cf67c0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc new file mode 100644 index 0000000..0daa5db Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc new file mode 100644 index 0000000..8cfd0e3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc new file mode 100644 index 0000000..ab6a827 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py new file mode 100644 index 0000000..608ae3a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_cell_widths.py @@ -0,0 +1,454 @@ +# Auto generated by make_terminal_widths.py + +CELL_WIDTHS = [ + (0, 0, 0), + (1, 31, -1), + (127, 159, -1), + (173, 173, 0), + (768, 879, 0), + (1155, 1161, 0), + (1425, 1469, 0), + (1471, 1471, 0), + (1473, 1474, 0), + (1476, 1477, 0), + (1479, 1479, 0), + (1536, 1541, 0), + (1552, 1562, 0), + (1564, 1564, 0), + (1611, 1631, 0), + (1648, 1648, 0), + (1750, 1757, 0), + (1759, 1764, 0), + (1767, 1768, 0), + (1770, 1773, 0), + (1807, 1807, 0), + (1809, 1809, 0), + (1840, 1866, 0), + (1958, 1968, 0), + (2027, 2035, 0), + (2045, 2045, 0), + (2070, 2073, 0), + (2075, 2083, 0), + (2085, 2087, 0), + (2089, 2093, 0), + (2137, 2139, 0), + (2192, 2193, 0), + (2200, 2207, 0), + (2250, 2307, 0), + (2362, 2364, 0), + (2366, 2383, 0), + (2385, 2391, 0), + (2402, 2403, 0), + (2433, 2435, 0), + (2492, 2492, 0), + (2494, 2500, 0), + (2503, 2504, 0), + (2507, 2509, 0), + (2519, 2519, 0), + (2530, 2531, 0), + (2558, 2558, 0), + (2561, 2563, 0), + (2620, 2620, 0), + (2622, 2626, 0), + (2631, 2632, 0), + (2635, 2637, 0), + (2641, 2641, 0), + (2672, 2673, 0), + (2677, 2677, 0), + (2689, 2691, 0), + (2748, 2748, 0), + (2750, 2757, 0), + (2759, 2761, 0), + (2763, 2765, 0), + (2786, 2787, 0), + (2810, 2815, 0), + (2817, 2819, 0), + (2876, 2876, 0), + (2878, 2884, 0), + (2887, 2888, 0), + (2891, 2893, 0), + (2901, 2903, 0), + (2914, 2915, 0), + (2946, 2946, 0), + (3006, 3010, 0), + (3014, 3016, 0), + (3018, 3021, 0), + (3031, 3031, 0), + (3072, 3076, 0), + (3132, 3132, 0), + (3134, 3140, 0), + (3142, 3144, 0), + (3146, 3149, 0), + (3157, 3158, 0), + (3170, 3171, 0), + (3201, 3203, 0), + (3260, 3260, 0), + (3262, 3268, 0), + (3270, 3272, 0), + (3274, 3277, 0), + (3285, 3286, 0), + (3298, 3299, 0), + (3315, 3315, 0), + (3328, 3331, 0), + (3387, 3388, 0), + (3390, 3396, 0), + (3398, 3400, 0), + (3402, 3405, 0), + (3415, 3415, 0), + (3426, 3427, 0), + (3457, 3459, 0), + (3530, 3530, 0), + (3535, 3540, 0), + (3542, 3542, 0), + (3544, 3551, 0), + (3570, 3571, 0), + (3633, 3633, 0), + (3636, 3642, 0), + (3655, 3662, 0), + (3761, 3761, 0), + (3764, 3772, 0), + (3784, 3790, 0), + (3864, 3865, 0), + (3893, 3893, 0), + (3895, 3895, 0), + (3897, 3897, 0), + (3902, 3903, 0), + (3953, 3972, 0), + (3974, 3975, 0), + (3981, 3991, 0), + (3993, 4028, 0), + (4038, 4038, 0), + (4139, 4158, 0), + (4182, 4185, 0), + (4190, 4192, 0), + (4194, 4196, 0), + (4199, 4205, 0), + (4209, 4212, 0), + (4226, 4237, 0), + (4239, 4239, 0), + (4250, 4253, 0), + (4352, 4447, 2), + (4448, 4607, 0), + (4957, 4959, 0), + (5906, 5909, 0), + (5938, 5940, 0), + (5970, 5971, 0), + (6002, 6003, 0), + (6068, 6099, 0), + (6109, 6109, 0), + (6155, 6159, 0), + (6277, 6278, 0), + (6313, 6313, 0), + (6432, 6443, 0), + (6448, 6459, 0), + (6679, 6683, 0), + (6741, 6750, 0), + (6752, 6780, 0), + (6783, 6783, 0), + (6832, 6862, 0), + (6912, 6916, 0), + (6964, 6980, 0), + (7019, 7027, 0), + (7040, 7042, 0), + (7073, 7085, 0), + (7142, 7155, 0), + (7204, 7223, 0), + (7376, 7378, 0), + (7380, 7400, 0), + (7405, 7405, 0), + (7412, 7412, 0), + (7415, 7417, 0), + (7616, 7679, 0), + (8203, 8207, 0), + (8232, 8238, 0), + (8288, 8292, 0), + (8294, 8303, 0), + (8400, 8432, 0), + (8986, 8987, 2), + (9001, 9002, 2), + (9193, 9196, 2), + (9200, 9200, 2), + (9203, 9203, 2), + (9725, 9726, 2), + (9748, 9749, 2), + (9800, 9811, 2), + (9855, 9855, 2), + (9875, 9875, 2), + (9889, 9889, 2), + (9898, 9899, 2), + (9917, 9918, 2), + (9924, 9925, 2), + (9934, 9934, 2), + (9940, 9940, 2), + (9962, 9962, 2), + (9970, 9971, 2), + (9973, 9973, 2), + (9978, 9978, 2), + (9981, 9981, 2), + (9989, 9989, 2), + (9994, 9995, 2), + (10024, 10024, 2), + (10060, 10060, 2), + (10062, 10062, 2), + (10067, 10069, 2), + (10071, 10071, 2), + (10133, 10135, 2), + (10160, 10160, 2), + (10175, 10175, 2), + (11035, 11036, 2), + (11088, 11088, 2), + (11093, 11093, 2), + (11503, 11505, 0), + (11647, 11647, 0), + (11744, 11775, 0), + (11904, 11929, 2), + (11931, 12019, 2), + (12032, 12245, 2), + (12272, 12329, 2), + (12330, 12335, 0), + (12336, 12350, 2), + (12353, 12438, 2), + (12441, 12442, 0), + (12443, 12543, 2), + (12549, 12591, 2), + (12593, 12686, 2), + (12688, 12771, 2), + (12783, 12830, 2), + (12832, 12871, 2), + (12880, 19903, 2), + (19968, 42124, 2), + (42128, 42182, 2), + (42607, 42610, 0), + (42612, 42621, 0), + (42654, 42655, 0), + (42736, 42737, 0), + (43010, 43010, 0), + (43014, 43014, 0), + (43019, 43019, 0), + (43043, 43047, 0), + (43052, 43052, 0), + (43136, 43137, 0), + (43188, 43205, 0), + (43232, 43249, 0), + (43263, 43263, 0), + (43302, 43309, 0), + (43335, 43347, 0), + (43360, 43388, 2), + (43392, 43395, 0), + (43443, 43456, 0), + (43493, 43493, 0), + (43561, 43574, 0), + (43587, 43587, 0), + (43596, 43597, 0), + (43643, 43645, 0), + (43696, 43696, 0), + (43698, 43700, 0), + (43703, 43704, 0), + (43710, 43711, 0), + (43713, 43713, 0), + (43755, 43759, 0), + (43765, 43766, 0), + (44003, 44010, 0), + (44012, 44013, 0), + (44032, 55203, 2), + (55216, 55295, 0), + (63744, 64255, 2), + (64286, 64286, 0), + (65024, 65039, 0), + (65040, 65049, 2), + (65056, 65071, 0), + (65072, 65106, 2), + (65108, 65126, 2), + (65128, 65131, 2), + (65279, 65279, 0), + (65281, 65376, 2), + (65504, 65510, 2), + (65529, 65531, 0), + (66045, 66045, 0), + (66272, 66272, 0), + (66422, 66426, 0), + (68097, 68099, 0), + (68101, 68102, 0), + (68108, 68111, 0), + (68152, 68154, 0), + (68159, 68159, 0), + (68325, 68326, 0), + (68900, 68903, 0), + (69291, 69292, 0), + (69373, 69375, 0), + (69446, 69456, 0), + (69506, 69509, 0), + (69632, 69634, 0), + (69688, 69702, 0), + (69744, 69744, 0), + (69747, 69748, 0), + (69759, 69762, 0), + (69808, 69818, 0), + (69821, 69821, 0), + (69826, 69826, 0), + (69837, 69837, 0), + (69888, 69890, 0), + (69927, 69940, 0), + (69957, 69958, 0), + (70003, 70003, 0), + (70016, 70018, 0), + (70067, 70080, 0), + (70089, 70092, 0), + (70094, 70095, 0), + (70188, 70199, 0), + (70206, 70206, 0), + (70209, 70209, 0), + (70367, 70378, 0), + (70400, 70403, 0), + (70459, 70460, 0), + (70462, 70468, 0), + (70471, 70472, 0), + (70475, 70477, 0), + (70487, 70487, 0), + (70498, 70499, 0), + (70502, 70508, 0), + (70512, 70516, 0), + (70709, 70726, 0), + (70750, 70750, 0), + (70832, 70851, 0), + (71087, 71093, 0), + (71096, 71104, 0), + (71132, 71133, 0), + (71216, 71232, 0), + (71339, 71351, 0), + (71453, 71467, 0), + (71724, 71738, 0), + (71984, 71989, 0), + (71991, 71992, 0), + (71995, 71998, 0), + (72000, 72000, 0), + (72002, 72003, 0), + (72145, 72151, 0), + (72154, 72160, 0), + (72164, 72164, 0), + (72193, 72202, 0), + (72243, 72249, 0), + (72251, 72254, 0), + (72263, 72263, 0), + (72273, 72283, 0), + (72330, 72345, 0), + (72751, 72758, 0), + (72760, 72767, 0), + (72850, 72871, 0), + (72873, 72886, 0), + (73009, 73014, 0), + (73018, 73018, 0), + (73020, 73021, 0), + (73023, 73029, 0), + (73031, 73031, 0), + (73098, 73102, 0), + (73104, 73105, 0), + (73107, 73111, 0), + (73459, 73462, 0), + (73472, 73473, 0), + (73475, 73475, 0), + (73524, 73530, 0), + (73534, 73538, 0), + (78896, 78912, 0), + (78919, 78933, 0), + (92912, 92916, 0), + (92976, 92982, 0), + (94031, 94031, 0), + (94033, 94087, 0), + (94095, 94098, 0), + (94176, 94179, 2), + (94180, 94180, 0), + (94192, 94193, 0), + (94208, 100343, 2), + (100352, 101589, 2), + (101632, 101640, 2), + (110576, 110579, 2), + (110581, 110587, 2), + (110589, 110590, 2), + (110592, 110882, 2), + (110898, 110898, 2), + (110928, 110930, 2), + (110933, 110933, 2), + (110948, 110951, 2), + (110960, 111355, 2), + (113821, 113822, 0), + (113824, 113827, 0), + (118528, 118573, 0), + (118576, 118598, 0), + (119141, 119145, 0), + (119149, 119170, 0), + (119173, 119179, 0), + (119210, 119213, 0), + (119362, 119364, 0), + (121344, 121398, 0), + (121403, 121452, 0), + (121461, 121461, 0), + (121476, 121476, 0), + (121499, 121503, 0), + (121505, 121519, 0), + (122880, 122886, 0), + (122888, 122904, 0), + (122907, 122913, 0), + (122915, 122916, 0), + (122918, 122922, 0), + (123023, 123023, 0), + (123184, 123190, 0), + (123566, 123566, 0), + (123628, 123631, 0), + (124140, 124143, 0), + (125136, 125142, 0), + (125252, 125258, 0), + (126980, 126980, 2), + (127183, 127183, 2), + (127374, 127374, 2), + (127377, 127386, 2), + (127488, 127490, 2), + (127504, 127547, 2), + (127552, 127560, 2), + (127568, 127569, 2), + (127584, 127589, 2), + (127744, 127776, 2), + (127789, 127797, 2), + (127799, 127868, 2), + (127870, 127891, 2), + (127904, 127946, 2), + (127951, 127955, 2), + (127968, 127984, 2), + (127988, 127988, 2), + (127992, 127994, 2), + (127995, 127999, 0), + (128000, 128062, 2), + (128064, 128064, 2), + (128066, 128252, 2), + (128255, 128317, 2), + (128331, 128334, 2), + (128336, 128359, 2), + (128378, 128378, 2), + (128405, 128406, 2), + (128420, 128420, 2), + (128507, 128591, 2), + (128640, 128709, 2), + (128716, 128716, 2), + (128720, 128722, 2), + (128725, 128727, 2), + (128732, 128735, 2), + (128747, 128748, 2), + (128756, 128764, 2), + (128992, 129003, 2), + (129008, 129008, 2), + (129292, 129338, 2), + (129340, 129349, 2), + (129351, 129535, 2), + (129648, 129660, 2), + (129664, 129672, 2), + (129680, 129725, 2), + (129727, 129733, 2), + (129742, 129755, 2), + (129760, 129768, 2), + (129776, 129784, 2), + (131072, 196605, 2), + (196608, 262141, 2), + (917505, 917505, 0), + (917536, 917631, 0), + (917760, 917999, 0), +] diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py new file mode 100644 index 0000000..1f2877b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_codes.py @@ -0,0 +1,3610 @@ +EMOJI = { + "1st_place_medal": "🥇", + "2nd_place_medal": "🥈", + "3rd_place_medal": "🥉", + "ab_button_(blood_type)": "🆎", + "atm_sign": "🏧", + "a_button_(blood_type)": "🅰", + "afghanistan": "🇦🇫", + "albania": "🇦🇱", + "algeria": "🇩🇿", + "american_samoa": "🇦🇸", + "andorra": "🇦🇩", + "angola": "🇦🇴", + "anguilla": "🇦🇮", + "antarctica": "🇦🇶", + "antigua_&_barbuda": "🇦🇬", + "aquarius": "♒", + "argentina": "🇦🇷", + "aries": "♈", + "armenia": "🇦🇲", + "aruba": "🇦🇼", + "ascension_island": "🇦🇨", + "australia": "🇦🇺", + "austria": "🇦🇹", + "azerbaijan": "🇦🇿", + "back_arrow": "🔙", + "b_button_(blood_type)": "🅱", + "bahamas": "🇧🇸", + "bahrain": "🇧🇭", + "bangladesh": "🇧🇩", + "barbados": "🇧🇧", + "belarus": "🇧🇾", + "belgium": "🇧🇪", + "belize": "🇧🇿", + "benin": "🇧🇯", + "bermuda": "🇧🇲", + "bhutan": "🇧🇹", + "bolivia": "🇧🇴", + "bosnia_&_herzegovina": "🇧🇦", + "botswana": "🇧🇼", + "bouvet_island": "🇧🇻", + "brazil": "🇧🇷", + "british_indian_ocean_territory": "🇮🇴", + "british_virgin_islands": "🇻🇬", + "brunei": "🇧🇳", + "bulgaria": "🇧🇬", + "burkina_faso": "🇧🇫", + "burundi": "🇧🇮", + "cl_button": "🆑", + "cool_button": "🆒", + "cambodia": "🇰🇭", + "cameroon": "🇨🇲", + "canada": "🇨🇦", + "canary_islands": "🇮🇨", + "cancer": "♋", + "cape_verde": "🇨🇻", + "capricorn": "♑", + "caribbean_netherlands": "🇧🇶", + "cayman_islands": "🇰🇾", + "central_african_republic": "🇨🇫", + "ceuta_&_melilla": "🇪🇦", + "chad": "🇹🇩", + "chile": "🇨🇱", + "china": "🇨🇳", + "christmas_island": "🇨🇽", + "christmas_tree": "🎄", + "clipperton_island": "🇨🇵", + "cocos_(keeling)_islands": "🇨🇨", + "colombia": "🇨🇴", + "comoros": "🇰🇲", + "congo_-_brazzaville": "🇨🇬", + "congo_-_kinshasa": "🇨🇩", + "cook_islands": "🇨🇰", + "costa_rica": "🇨🇷", + "croatia": "🇭🇷", + "cuba": "🇨🇺", + "curaçao": "🇨🇼", + "cyprus": "🇨🇾", + "czechia": "🇨🇿", + "côte_d’ivoire": "🇨🇮", + "denmark": "🇩🇰", + "diego_garcia": "🇩🇬", + "djibouti": "🇩🇯", + "dominica": "🇩🇲", + "dominican_republic": "🇩🇴", + "end_arrow": "🔚", + "ecuador": "🇪🇨", + "egypt": "🇪🇬", + "el_salvador": "🇸🇻", + "england": "🏴\U000e0067\U000e0062\U000e0065\U000e006e\U000e0067\U000e007f", + "equatorial_guinea": "🇬🇶", + "eritrea": "🇪🇷", + "estonia": "🇪🇪", + "ethiopia": "🇪🇹", + "european_union": "🇪🇺", + "free_button": "🆓", + "falkland_islands": "🇫🇰", + "faroe_islands": "🇫🇴", + "fiji": "🇫🇯", + "finland": "🇫🇮", + "france": "🇫🇷", + "french_guiana": "🇬🇫", + "french_polynesia": "🇵🇫", + "french_southern_territories": "🇹🇫", + "gabon": "🇬🇦", + "gambia": "🇬🇲", + "gemini": "♊", + "georgia": "🇬🇪", + "germany": "🇩🇪", + "ghana": "🇬🇭", + "gibraltar": "🇬🇮", + "greece": "🇬🇷", + "greenland": "🇬🇱", + "grenada": "🇬🇩", + "guadeloupe": "🇬🇵", + "guam": "🇬🇺", + "guatemala": "🇬🇹", + "guernsey": "🇬🇬", + "guinea": "🇬🇳", + "guinea-bissau": "🇬🇼", + "guyana": "🇬🇾", + "haiti": "🇭🇹", + "heard_&_mcdonald_islands": "🇭🇲", + "honduras": "🇭🇳", + "hong_kong_sar_china": "🇭🇰", + "hungary": "🇭🇺", + "id_button": "🆔", + "iceland": "🇮🇸", + "india": "🇮🇳", + "indonesia": "🇮🇩", + "iran": "🇮🇷", + "iraq": "🇮🇶", + "ireland": "🇮🇪", + "isle_of_man": "🇮🇲", + "israel": "🇮🇱", + "italy": "🇮🇹", + "jamaica": "🇯🇲", + "japan": "🗾", + "japanese_acceptable_button": "🉑", + "japanese_application_button": "🈸", + "japanese_bargain_button": "🉐", + "japanese_castle": "🏯", + "japanese_congratulations_button": "㊗", + "japanese_discount_button": "🈹", + "japanese_dolls": "🎎", + "japanese_free_of_charge_button": "🈚", + "japanese_here_button": "🈁", + "japanese_monthly_amount_button": "🈷", + "japanese_no_vacancy_button": "🈵", + "japanese_not_free_of_charge_button": "🈶", + "japanese_open_for_business_button": "🈺", + "japanese_passing_grade_button": "🈴", + "japanese_post_office": "🏣", + "japanese_prohibited_button": "🈲", + "japanese_reserved_button": "🈯", + "japanese_secret_button": "㊙", + "japanese_service_charge_button": "🈂", + "japanese_symbol_for_beginner": "🔰", + "japanese_vacancy_button": "🈳", + "jersey": "🇯🇪", + "jordan": "🇯🇴", + "kazakhstan": "🇰🇿", + "kenya": "🇰🇪", + "kiribati": "🇰🇮", + "kosovo": "🇽🇰", + "kuwait": "🇰🇼", + "kyrgyzstan": "🇰🇬", + "laos": "🇱🇦", + "latvia": "🇱🇻", + "lebanon": "🇱🇧", + "leo": "♌", + "lesotho": "🇱🇸", + "liberia": "🇱🇷", + "libra": "♎", + "libya": "🇱🇾", + "liechtenstein": "🇱🇮", + "lithuania": "🇱🇹", + "luxembourg": "🇱🇺", + "macau_sar_china": "🇲🇴", + "macedonia": "🇲🇰", + "madagascar": "🇲🇬", + "malawi": "🇲🇼", + "malaysia": "🇲🇾", + "maldives": "🇲🇻", + "mali": "🇲🇱", + "malta": "🇲🇹", + "marshall_islands": "🇲🇭", + "martinique": "🇲🇶", + "mauritania": "🇲🇷", + "mauritius": "🇲🇺", + "mayotte": "🇾🇹", + "mexico": "🇲🇽", + "micronesia": "🇫🇲", + "moldova": "🇲🇩", + "monaco": "🇲🇨", + "mongolia": "🇲🇳", + "montenegro": "🇲🇪", + "montserrat": "🇲🇸", + "morocco": "🇲🇦", + "mozambique": "🇲🇿", + "mrs._claus": "🤶", + "mrs._claus_dark_skin_tone": "🤶🏿", + "mrs._claus_light_skin_tone": "🤶🏻", + "mrs._claus_medium-dark_skin_tone": "🤶🏾", + "mrs._claus_medium-light_skin_tone": "🤶🏼", + "mrs._claus_medium_skin_tone": "🤶🏽", + "myanmar_(burma)": "🇲🇲", + "new_button": "🆕", + "ng_button": "🆖", + "namibia": "🇳🇦", + "nauru": "🇳🇷", + "nepal": "🇳🇵", + "netherlands": "🇳🇱", + "new_caledonia": "🇳🇨", + "new_zealand": "🇳🇿", + "nicaragua": "🇳🇮", + "niger": "🇳🇪", + "nigeria": "🇳🇬", + "niue": "🇳🇺", + "norfolk_island": "🇳🇫", + "north_korea": "🇰🇵", + "northern_mariana_islands": "🇲🇵", + "norway": "🇳🇴", + "ok_button": "🆗", + "ok_hand": "👌", + "ok_hand_dark_skin_tone": "👌🏿", + "ok_hand_light_skin_tone": "👌🏻", + "ok_hand_medium-dark_skin_tone": "👌🏾", + "ok_hand_medium-light_skin_tone": "👌🏼", + "ok_hand_medium_skin_tone": "👌🏽", + "on!_arrow": "🔛", + "o_button_(blood_type)": "🅾", + "oman": "🇴🇲", + "ophiuchus": "⛎", + "p_button": "🅿", + "pakistan": "🇵🇰", + "palau": "🇵🇼", + "palestinian_territories": "🇵🇸", + "panama": "🇵🇦", + "papua_new_guinea": "🇵🇬", + "paraguay": "🇵🇾", + "peru": "🇵🇪", + "philippines": "🇵🇭", + "pisces": "♓", + "pitcairn_islands": "🇵🇳", + "poland": "🇵🇱", + "portugal": "🇵🇹", + "puerto_rico": "🇵🇷", + "qatar": "🇶🇦", + "romania": "🇷🇴", + "russia": "🇷🇺", + "rwanda": "🇷🇼", + "réunion": "🇷🇪", + "soon_arrow": "🔜", + "sos_button": "🆘", + "sagittarius": "♐", + "samoa": "🇼🇸", + "san_marino": "🇸🇲", + "santa_claus": "🎅", + "santa_claus_dark_skin_tone": "🎅🏿", + "santa_claus_light_skin_tone": "🎅🏻", + "santa_claus_medium-dark_skin_tone": "🎅🏾", + "santa_claus_medium-light_skin_tone": "🎅🏼", + "santa_claus_medium_skin_tone": "🎅🏽", + "saudi_arabia": "🇸🇦", + "scorpio": "♏", + "scotland": "🏴\U000e0067\U000e0062\U000e0073\U000e0063\U000e0074\U000e007f", + "senegal": "🇸🇳", + "serbia": "🇷🇸", + "seychelles": "🇸🇨", + "sierra_leone": "🇸🇱", + "singapore": "🇸🇬", + "sint_maarten": "🇸🇽", + "slovakia": "🇸🇰", + "slovenia": "🇸🇮", + "solomon_islands": "🇸🇧", + "somalia": "🇸🇴", + "south_africa": "🇿🇦", + "south_georgia_&_south_sandwich_islands": "🇬🇸", + "south_korea": "🇰🇷", + "south_sudan": "🇸🇸", + "spain": "🇪🇸", + "sri_lanka": "🇱🇰", + "st._barthélemy": "🇧🇱", + "st._helena": "🇸🇭", + "st._kitts_&_nevis": "🇰🇳", + "st._lucia": "🇱🇨", + "st._martin": "🇲🇫", + "st._pierre_&_miquelon": "🇵🇲", + "st._vincent_&_grenadines": "🇻🇨", + "statue_of_liberty": "🗽", + "sudan": "🇸🇩", + "suriname": "🇸🇷", + "svalbard_&_jan_mayen": "🇸🇯", + "swaziland": "🇸🇿", + "sweden": "🇸🇪", + "switzerland": "🇨🇭", + "syria": "🇸🇾", + "são_tomé_&_príncipe": "🇸🇹", + "t-rex": "🦖", + "top_arrow": "🔝", + "taiwan": "🇹🇼", + "tajikistan": "🇹🇯", + "tanzania": "🇹🇿", + "taurus": "♉", + "thailand": "🇹🇭", + "timor-leste": "🇹🇱", + "togo": "🇹🇬", + "tokelau": "🇹🇰", + "tokyo_tower": "🗼", + "tonga": "🇹🇴", + "trinidad_&_tobago": "🇹🇹", + "tristan_da_cunha": "🇹🇦", + "tunisia": "🇹🇳", + "turkey": "🦃", + "turkmenistan": "🇹🇲", + "turks_&_caicos_islands": "🇹🇨", + "tuvalu": "🇹🇻", + "u.s._outlying_islands": "🇺🇲", + "u.s._virgin_islands": "🇻🇮", + "up!_button": "🆙", + "uganda": "🇺🇬", + "ukraine": "🇺🇦", + "united_arab_emirates": "🇦🇪", + "united_kingdom": "🇬🇧", + "united_nations": "🇺🇳", + "united_states": "🇺🇸", + "uruguay": "🇺🇾", + "uzbekistan": "🇺🇿", + "vs_button": "🆚", + "vanuatu": "🇻🇺", + "vatican_city": "🇻🇦", + "venezuela": "🇻🇪", + "vietnam": "🇻🇳", + "virgo": "♍", + "wales": "🏴\U000e0067\U000e0062\U000e0077\U000e006c\U000e0073\U000e007f", + "wallis_&_futuna": "🇼🇫", + "western_sahara": "🇪🇭", + "yemen": "🇾🇪", + "zambia": "🇿🇲", + "zimbabwe": "🇿🇼", + "abacus": "🧮", + "adhesive_bandage": "🩹", + "admission_tickets": "🎟", + "adult": "🧑", + "adult_dark_skin_tone": "🧑🏿", + "adult_light_skin_tone": "🧑🏻", + "adult_medium-dark_skin_tone": "🧑🏾", + "adult_medium-light_skin_tone": "🧑🏼", + "adult_medium_skin_tone": "🧑🏽", + "aerial_tramway": "🚡", + "airplane": "✈", + "airplane_arrival": "🛬", + "airplane_departure": "🛫", + "alarm_clock": "⏰", + "alembic": "⚗", + "alien": "👽", + "alien_monster": "👾", + "ambulance": "🚑", + "american_football": "🏈", + "amphora": "🏺", + "anchor": "⚓", + "anger_symbol": "💢", + "angry_face": "😠", + "angry_face_with_horns": "👿", + "anguished_face": "😧", + "ant": "🐜", + "antenna_bars": "📶", + "anxious_face_with_sweat": "😰", + "articulated_lorry": "🚛", + "artist_palette": "🎨", + "astonished_face": "😲", + "atom_symbol": "⚛", + "auto_rickshaw": "🛺", + "automobile": "🚗", + "avocado": "🥑", + "axe": "🪓", + "baby": "👶", + "baby_angel": "👼", + "baby_angel_dark_skin_tone": "👼🏿", + "baby_angel_light_skin_tone": "👼🏻", + "baby_angel_medium-dark_skin_tone": "👼🏾", + "baby_angel_medium-light_skin_tone": "👼🏼", + "baby_angel_medium_skin_tone": "👼🏽", + "baby_bottle": "🍼", + "baby_chick": "🐤", + "baby_dark_skin_tone": "👶🏿", + "baby_light_skin_tone": "👶🏻", + "baby_medium-dark_skin_tone": "👶🏾", + "baby_medium-light_skin_tone": "👶🏼", + "baby_medium_skin_tone": "👶🏽", + "baby_symbol": "🚼", + "backhand_index_pointing_down": "👇", + "backhand_index_pointing_down_dark_skin_tone": "👇🏿", + "backhand_index_pointing_down_light_skin_tone": "👇🏻", + "backhand_index_pointing_down_medium-dark_skin_tone": "👇🏾", + "backhand_index_pointing_down_medium-light_skin_tone": "👇🏼", + "backhand_index_pointing_down_medium_skin_tone": "👇🏽", + "backhand_index_pointing_left": "👈", + "backhand_index_pointing_left_dark_skin_tone": "👈🏿", + "backhand_index_pointing_left_light_skin_tone": "👈🏻", + "backhand_index_pointing_left_medium-dark_skin_tone": "👈🏾", + "backhand_index_pointing_left_medium-light_skin_tone": "👈🏼", + "backhand_index_pointing_left_medium_skin_tone": "👈🏽", + "backhand_index_pointing_right": "👉", + "backhand_index_pointing_right_dark_skin_tone": "👉🏿", + "backhand_index_pointing_right_light_skin_tone": "👉🏻", + "backhand_index_pointing_right_medium-dark_skin_tone": "👉🏾", + "backhand_index_pointing_right_medium-light_skin_tone": "👉🏼", + "backhand_index_pointing_right_medium_skin_tone": "👉🏽", + "backhand_index_pointing_up": "👆", + "backhand_index_pointing_up_dark_skin_tone": "👆🏿", + "backhand_index_pointing_up_light_skin_tone": "👆🏻", + "backhand_index_pointing_up_medium-dark_skin_tone": "👆🏾", + "backhand_index_pointing_up_medium-light_skin_tone": "👆🏼", + "backhand_index_pointing_up_medium_skin_tone": "👆🏽", + "bacon": "🥓", + "badger": "🦡", + "badminton": "🏸", + "bagel": "🥯", + "baggage_claim": "🛄", + "baguette_bread": "🥖", + "balance_scale": "⚖", + "bald": "🦲", + "bald_man": "👨\u200d🦲", + "bald_woman": "👩\u200d🦲", + "ballet_shoes": "🩰", + "balloon": "🎈", + "ballot_box_with_ballot": "🗳", + "ballot_box_with_check": "☑", + "banana": "🍌", + "banjo": "🪕", + "bank": "🏦", + "bar_chart": "📊", + "barber_pole": "💈", + "baseball": "⚾", + "basket": "🧺", + "basketball": "🏀", + "bat": "🦇", + "bathtub": "🛁", + "battery": "🔋", + "beach_with_umbrella": "🏖", + "beaming_face_with_smiling_eyes": "😁", + "bear_face": "🐻", + "bearded_person": "🧔", + "bearded_person_dark_skin_tone": "🧔🏿", + "bearded_person_light_skin_tone": "🧔🏻", + "bearded_person_medium-dark_skin_tone": "🧔🏾", + "bearded_person_medium-light_skin_tone": "🧔🏼", + "bearded_person_medium_skin_tone": "🧔🏽", + "beating_heart": "💓", + "bed": "🛏", + "beer_mug": "🍺", + "bell": "🔔", + "bell_with_slash": "🔕", + "bellhop_bell": "🛎", + "bento_box": "🍱", + "beverage_box": "🧃", + "bicycle": "🚲", + "bikini": "👙", + "billed_cap": "🧢", + "biohazard": "☣", + "bird": "🐦", + "birthday_cake": "🎂", + "black_circle": "⚫", + "black_flag": "🏴", + "black_heart": "🖤", + "black_large_square": "⬛", + "black_medium-small_square": "◾", + "black_medium_square": "◼", + "black_nib": "✒", + "black_small_square": "▪", + "black_square_button": "🔲", + "blond-haired_man": "👱\u200d♂️", + "blond-haired_man_dark_skin_tone": "👱🏿\u200d♂️", + "blond-haired_man_light_skin_tone": "👱🏻\u200d♂️", + "blond-haired_man_medium-dark_skin_tone": "👱🏾\u200d♂️", + "blond-haired_man_medium-light_skin_tone": "👱🏼\u200d♂️", + "blond-haired_man_medium_skin_tone": "👱🏽\u200d♂️", + "blond-haired_person": "👱", + "blond-haired_person_dark_skin_tone": "👱🏿", + "blond-haired_person_light_skin_tone": "👱🏻", + "blond-haired_person_medium-dark_skin_tone": "👱🏾", + "blond-haired_person_medium-light_skin_tone": "👱🏼", + "blond-haired_person_medium_skin_tone": "👱🏽", + "blond-haired_woman": "👱\u200d♀️", + "blond-haired_woman_dark_skin_tone": "👱🏿\u200d♀️", + "blond-haired_woman_light_skin_tone": "👱🏻\u200d♀️", + "blond-haired_woman_medium-dark_skin_tone": "👱🏾\u200d♀️", + "blond-haired_woman_medium-light_skin_tone": "👱🏼\u200d♀️", + "blond-haired_woman_medium_skin_tone": "👱🏽\u200d♀️", + "blossom": "🌼", + "blowfish": "🐡", + "blue_book": "📘", + "blue_circle": "🔵", + "blue_heart": "💙", + "blue_square": "🟦", + "boar": "🐗", + "bomb": "💣", + "bone": "🦴", + "bookmark": "🔖", + "bookmark_tabs": "📑", + "books": "📚", + "bottle_with_popping_cork": "🍾", + "bouquet": "💐", + "bow_and_arrow": "🏹", + "bowl_with_spoon": "🥣", + "bowling": "🎳", + "boxing_glove": "🥊", + "boy": "👦", + "boy_dark_skin_tone": "👦🏿", + "boy_light_skin_tone": "👦🏻", + "boy_medium-dark_skin_tone": "👦🏾", + "boy_medium-light_skin_tone": "👦🏼", + "boy_medium_skin_tone": "👦🏽", + "brain": "🧠", + "bread": "🍞", + "breast-feeding": "🤱", + "breast-feeding_dark_skin_tone": "🤱🏿", + "breast-feeding_light_skin_tone": "🤱🏻", + "breast-feeding_medium-dark_skin_tone": "🤱🏾", + "breast-feeding_medium-light_skin_tone": "🤱🏼", + "breast-feeding_medium_skin_tone": "🤱🏽", + "brick": "🧱", + "bride_with_veil": "👰", + "bride_with_veil_dark_skin_tone": "👰🏿", + "bride_with_veil_light_skin_tone": "👰🏻", + "bride_with_veil_medium-dark_skin_tone": "👰🏾", + "bride_with_veil_medium-light_skin_tone": "👰🏼", + "bride_with_veil_medium_skin_tone": "👰🏽", + "bridge_at_night": "🌉", + "briefcase": "💼", + "briefs": "🩲", + "bright_button": "🔆", + "broccoli": "🥦", + "broken_heart": "💔", + "broom": "🧹", + "brown_circle": "🟤", + "brown_heart": "🤎", + "brown_square": "🟫", + "bug": "🐛", + "building_construction": "🏗", + "bullet_train": "🚅", + "burrito": "🌯", + "bus": "🚌", + "bus_stop": "🚏", + "bust_in_silhouette": "👤", + "busts_in_silhouette": "👥", + "butter": "🧈", + "butterfly": "🦋", + "cactus": "🌵", + "calendar": "📆", + "call_me_hand": "🤙", + "call_me_hand_dark_skin_tone": "🤙🏿", + "call_me_hand_light_skin_tone": "🤙🏻", + "call_me_hand_medium-dark_skin_tone": "🤙🏾", + "call_me_hand_medium-light_skin_tone": "🤙🏼", + "call_me_hand_medium_skin_tone": "🤙🏽", + "camel": "🐫", + "camera": "📷", + "camera_with_flash": "📸", + "camping": "🏕", + "candle": "🕯", + "candy": "🍬", + "canned_food": "🥫", + "canoe": "🛶", + "card_file_box": "🗃", + "card_index": "📇", + "card_index_dividers": "🗂", + "carousel_horse": "🎠", + "carp_streamer": "🎏", + "carrot": "🥕", + "castle": "🏰", + "cat": "🐱", + "cat_face": "🐱", + "cat_face_with_tears_of_joy": "😹", + "cat_face_with_wry_smile": "😼", + "chains": "⛓", + "chair": "🪑", + "chart_decreasing": "📉", + "chart_increasing": "📈", + "chart_increasing_with_yen": "💹", + "cheese_wedge": "🧀", + "chequered_flag": "🏁", + "cherries": "🍒", + "cherry_blossom": "🌸", + "chess_pawn": "♟", + "chestnut": "🌰", + "chicken": "🐔", + "child": "🧒", + "child_dark_skin_tone": "🧒🏿", + "child_light_skin_tone": "🧒🏻", + "child_medium-dark_skin_tone": "🧒🏾", + "child_medium-light_skin_tone": "🧒🏼", + "child_medium_skin_tone": "🧒🏽", + "children_crossing": "🚸", + "chipmunk": "🐿", + "chocolate_bar": "🍫", + "chopsticks": "🥢", + "church": "⛪", + "cigarette": "🚬", + "cinema": "🎦", + "circled_m": "Ⓜ", + "circus_tent": "🎪", + "cityscape": "🏙", + "cityscape_at_dusk": "🌆", + "clamp": "🗜", + "clapper_board": "🎬", + "clapping_hands": "👏", + "clapping_hands_dark_skin_tone": "👏🏿", + "clapping_hands_light_skin_tone": "👏🏻", + "clapping_hands_medium-dark_skin_tone": "👏🏾", + "clapping_hands_medium-light_skin_tone": "👏🏼", + "clapping_hands_medium_skin_tone": "👏🏽", + "classical_building": "🏛", + "clinking_beer_mugs": "🍻", + "clinking_glasses": "🥂", + "clipboard": "📋", + "clockwise_vertical_arrows": "🔃", + "closed_book": "📕", + "closed_mailbox_with_lowered_flag": "📪", + "closed_mailbox_with_raised_flag": "📫", + "closed_umbrella": "🌂", + "cloud": "☁", + "cloud_with_lightning": "🌩", + "cloud_with_lightning_and_rain": "⛈", + "cloud_with_rain": "🌧", + "cloud_with_snow": "🌨", + "clown_face": "🤡", + "club_suit": "♣", + "clutch_bag": "👝", + "coat": "🧥", + "cocktail_glass": "🍸", + "coconut": "🥥", + "coffin": "⚰", + "cold_face": "🥶", + "collision": "💥", + "comet": "☄", + "compass": "🧭", + "computer_disk": "💽", + "computer_mouse": "🖱", + "confetti_ball": "🎊", + "confounded_face": "😖", + "confused_face": "😕", + "construction": "🚧", + "construction_worker": "👷", + "construction_worker_dark_skin_tone": "👷🏿", + "construction_worker_light_skin_tone": "👷🏻", + "construction_worker_medium-dark_skin_tone": "👷🏾", + "construction_worker_medium-light_skin_tone": "👷🏼", + "construction_worker_medium_skin_tone": "👷🏽", + "control_knobs": "🎛", + "convenience_store": "🏪", + "cooked_rice": "🍚", + "cookie": "🍪", + "cooking": "🍳", + "copyright": "©", + "couch_and_lamp": "🛋", + "counterclockwise_arrows_button": "🔄", + "couple_with_heart": "💑", + "couple_with_heart_man_man": "👨\u200d❤️\u200d👨", + "couple_with_heart_woman_man": "👩\u200d❤️\u200d👨", + "couple_with_heart_woman_woman": "👩\u200d❤️\u200d👩", + "cow": "🐮", + "cow_face": "🐮", + "cowboy_hat_face": "🤠", + "crab": "🦀", + "crayon": "🖍", + "credit_card": "💳", + "crescent_moon": "🌙", + "cricket": "🦗", + "cricket_game": "🏏", + "crocodile": "🐊", + "croissant": "🥐", + "cross_mark": "❌", + "cross_mark_button": "❎", + "crossed_fingers": "🤞", + "crossed_fingers_dark_skin_tone": "🤞🏿", + "crossed_fingers_light_skin_tone": "🤞🏻", + "crossed_fingers_medium-dark_skin_tone": "🤞🏾", + "crossed_fingers_medium-light_skin_tone": "🤞🏼", + "crossed_fingers_medium_skin_tone": "🤞🏽", + "crossed_flags": "🎌", + "crossed_swords": "⚔", + "crown": "👑", + "crying_cat_face": "😿", + "crying_face": "😢", + "crystal_ball": "🔮", + "cucumber": "🥒", + "cupcake": "🧁", + "cup_with_straw": "🥤", + "curling_stone": "🥌", + "curly_hair": "🦱", + "curly-haired_man": "👨\u200d🦱", + "curly-haired_woman": "👩\u200d🦱", + "curly_loop": "➰", + "currency_exchange": "💱", + "curry_rice": "🍛", + "custard": "🍮", + "customs": "🛃", + "cut_of_meat": "🥩", + "cyclone": "🌀", + "dagger": "🗡", + "dango": "🍡", + "dashing_away": "💨", + "deaf_person": "🧏", + "deciduous_tree": "🌳", + "deer": "🦌", + "delivery_truck": "🚚", + "department_store": "🏬", + "derelict_house": "🏚", + "desert": "🏜", + "desert_island": "🏝", + "desktop_computer": "🖥", + "detective": "🕵", + "detective_dark_skin_tone": "🕵🏿", + "detective_light_skin_tone": "🕵🏻", + "detective_medium-dark_skin_tone": "🕵🏾", + "detective_medium-light_skin_tone": "🕵🏼", + "detective_medium_skin_tone": "🕵🏽", + "diamond_suit": "♦", + "diamond_with_a_dot": "💠", + "dim_button": "🔅", + "direct_hit": "🎯", + "disappointed_face": "😞", + "diving_mask": "🤿", + "diya_lamp": "🪔", + "dizzy": "💫", + "dizzy_face": "😵", + "dna": "🧬", + "dog": "🐶", + "dog_face": "🐶", + "dollar_banknote": "💵", + "dolphin": "🐬", + "door": "🚪", + "dotted_six-pointed_star": "🔯", + "double_curly_loop": "➿", + "double_exclamation_mark": "‼", + "doughnut": "🍩", + "dove": "🕊", + "down-left_arrow": "↙", + "down-right_arrow": "↘", + "down_arrow": "⬇", + "downcast_face_with_sweat": "😓", + "downwards_button": "🔽", + "dragon": "🐉", + "dragon_face": "🐲", + "dress": "👗", + "drooling_face": "🤤", + "drop_of_blood": "🩸", + "droplet": "💧", + "drum": "🥁", + "duck": "🦆", + "dumpling": "🥟", + "dvd": "📀", + "e-mail": "📧", + "eagle": "🦅", + "ear": "👂", + "ear_dark_skin_tone": "👂🏿", + "ear_light_skin_tone": "👂🏻", + "ear_medium-dark_skin_tone": "👂🏾", + "ear_medium-light_skin_tone": "👂🏼", + "ear_medium_skin_tone": "👂🏽", + "ear_of_corn": "🌽", + "ear_with_hearing_aid": "🦻", + "egg": "🍳", + "eggplant": "🍆", + "eight-pointed_star": "✴", + "eight-spoked_asterisk": "✳", + "eight-thirty": "🕣", + "eight_o’clock": "🕗", + "eject_button": "⏏", + "electric_plug": "🔌", + "elephant": "🐘", + "eleven-thirty": "🕦", + "eleven_o’clock": "🕚", + "elf": "🧝", + "elf_dark_skin_tone": "🧝🏿", + "elf_light_skin_tone": "🧝🏻", + "elf_medium-dark_skin_tone": "🧝🏾", + "elf_medium-light_skin_tone": "🧝🏼", + "elf_medium_skin_tone": "🧝🏽", + "envelope": "✉", + "envelope_with_arrow": "📩", + "euro_banknote": "💶", + "evergreen_tree": "🌲", + "ewe": "🐑", + "exclamation_mark": "❗", + "exclamation_question_mark": "⁉", + "exploding_head": "🤯", + "expressionless_face": "😑", + "eye": "👁", + "eye_in_speech_bubble": "👁️\u200d🗨️", + "eyes": "👀", + "face_blowing_a_kiss": "😘", + "face_savoring_food": "😋", + "face_screaming_in_fear": "😱", + "face_vomiting": "🤮", + "face_with_hand_over_mouth": "🤭", + "face_with_head-bandage": "🤕", + "face_with_medical_mask": "😷", + "face_with_monocle": "🧐", + "face_with_open_mouth": "😮", + "face_with_raised_eyebrow": "🤨", + "face_with_rolling_eyes": "🙄", + "face_with_steam_from_nose": "😤", + "face_with_symbols_on_mouth": "🤬", + "face_with_tears_of_joy": "😂", + "face_with_thermometer": "🤒", + "face_with_tongue": "😛", + "face_without_mouth": "😶", + "factory": "🏭", + "fairy": "🧚", + "fairy_dark_skin_tone": "🧚🏿", + "fairy_light_skin_tone": "🧚🏻", + "fairy_medium-dark_skin_tone": "🧚🏾", + "fairy_medium-light_skin_tone": "🧚🏼", + "fairy_medium_skin_tone": "🧚🏽", + "falafel": "🧆", + "fallen_leaf": "🍂", + "family": "👪", + "family_man_boy": "👨\u200d👦", + "family_man_boy_boy": "👨\u200d👦\u200d👦", + "family_man_girl": "👨\u200d👧", + "family_man_girl_boy": "👨\u200d👧\u200d👦", + "family_man_girl_girl": "👨\u200d👧\u200d👧", + "family_man_man_boy": "👨\u200d👨\u200d👦", + "family_man_man_boy_boy": "👨\u200d👨\u200d👦\u200d👦", + "family_man_man_girl": "👨\u200d👨\u200d👧", + "family_man_man_girl_boy": "👨\u200d👨\u200d👧\u200d👦", + "family_man_man_girl_girl": "👨\u200d👨\u200d👧\u200d👧", + "family_man_woman_boy": "👨\u200d👩\u200d👦", + "family_man_woman_boy_boy": "👨\u200d👩\u200d👦\u200d👦", + "family_man_woman_girl": "👨\u200d👩\u200d👧", + "family_man_woman_girl_boy": "👨\u200d👩\u200d👧\u200d👦", + "family_man_woman_girl_girl": "👨\u200d👩\u200d👧\u200d👧", + "family_woman_boy": "👩\u200d👦", + "family_woman_boy_boy": "👩\u200d👦\u200d👦", + "family_woman_girl": "👩\u200d👧", + "family_woman_girl_boy": "👩\u200d👧\u200d👦", + "family_woman_girl_girl": "👩\u200d👧\u200d👧", + "family_woman_woman_boy": "👩\u200d👩\u200d👦", + "family_woman_woman_boy_boy": "👩\u200d👩\u200d👦\u200d👦", + "family_woman_woman_girl": "👩\u200d👩\u200d👧", + "family_woman_woman_girl_boy": "👩\u200d👩\u200d👧\u200d👦", + "family_woman_woman_girl_girl": "👩\u200d👩\u200d👧\u200d👧", + "fast-forward_button": "⏩", + "fast_down_button": "⏬", + "fast_reverse_button": "⏪", + "fast_up_button": "⏫", + "fax_machine": "📠", + "fearful_face": "😨", + "female_sign": "♀", + "ferris_wheel": "🎡", + "ferry": "⛴", + "field_hockey": "🏑", + "file_cabinet": "🗄", + "file_folder": "📁", + "film_frames": "🎞", + "film_projector": "📽", + "fire": "🔥", + "fire_extinguisher": "🧯", + "firecracker": "🧨", + "fire_engine": "🚒", + "fireworks": "🎆", + "first_quarter_moon": "🌓", + "first_quarter_moon_face": "🌛", + "fish": "🐟", + "fish_cake_with_swirl": "🍥", + "fishing_pole": "🎣", + "five-thirty": "🕠", + "five_o’clock": "🕔", + "flag_in_hole": "⛳", + "flamingo": "🦩", + "flashlight": "🔦", + "flat_shoe": "🥿", + "fleur-de-lis": "⚜", + "flexed_biceps": "💪", + "flexed_biceps_dark_skin_tone": "💪🏿", + "flexed_biceps_light_skin_tone": "💪🏻", + "flexed_biceps_medium-dark_skin_tone": "💪🏾", + "flexed_biceps_medium-light_skin_tone": "💪🏼", + "flexed_biceps_medium_skin_tone": "💪🏽", + "floppy_disk": "💾", + "flower_playing_cards": "🎴", + "flushed_face": "😳", + "flying_disc": "🥏", + "flying_saucer": "🛸", + "fog": "🌫", + "foggy": "🌁", + "folded_hands": "🙏", + "folded_hands_dark_skin_tone": "🙏🏿", + "folded_hands_light_skin_tone": "🙏🏻", + "folded_hands_medium-dark_skin_tone": "🙏🏾", + "folded_hands_medium-light_skin_tone": "🙏🏼", + "folded_hands_medium_skin_tone": "🙏🏽", + "foot": "🦶", + "footprints": "👣", + "fork_and_knife": "🍴", + "fork_and_knife_with_plate": "🍽", + "fortune_cookie": "🥠", + "fountain": "⛲", + "fountain_pen": "🖋", + "four-thirty": "🕟", + "four_leaf_clover": "🍀", + "four_o’clock": "🕓", + "fox_face": "🦊", + "framed_picture": "🖼", + "french_fries": "🍟", + "fried_shrimp": "🍤", + "frog_face": "🐸", + "front-facing_baby_chick": "🐥", + "frowning_face": "☹", + "frowning_face_with_open_mouth": "😦", + "fuel_pump": "⛽", + "full_moon": "🌕", + "full_moon_face": "🌝", + "funeral_urn": "⚱", + "game_die": "🎲", + "garlic": "🧄", + "gear": "⚙", + "gem_stone": "💎", + "genie": "🧞", + "ghost": "👻", + "giraffe": "🦒", + "girl": "👧", + "girl_dark_skin_tone": "👧🏿", + "girl_light_skin_tone": "👧🏻", + "girl_medium-dark_skin_tone": "👧🏾", + "girl_medium-light_skin_tone": "👧🏼", + "girl_medium_skin_tone": "👧🏽", + "glass_of_milk": "🥛", + "glasses": "👓", + "globe_showing_americas": "🌎", + "globe_showing_asia-australia": "🌏", + "globe_showing_europe-africa": "🌍", + "globe_with_meridians": "🌐", + "gloves": "🧤", + "glowing_star": "🌟", + "goal_net": "🥅", + "goat": "🐐", + "goblin": "👺", + "goggles": "🥽", + "gorilla": "🦍", + "graduation_cap": "🎓", + "grapes": "🍇", + "green_apple": "🍏", + "green_book": "📗", + "green_circle": "🟢", + "green_heart": "💚", + "green_salad": "🥗", + "green_square": "🟩", + "grimacing_face": "😬", + "grinning_cat_face": "😺", + "grinning_cat_face_with_smiling_eyes": "😸", + "grinning_face": "😀", + "grinning_face_with_big_eyes": "😃", + "grinning_face_with_smiling_eyes": "😄", + "grinning_face_with_sweat": "😅", + "grinning_squinting_face": "😆", + "growing_heart": "💗", + "guard": "💂", + "guard_dark_skin_tone": "💂🏿", + "guard_light_skin_tone": "💂🏻", + "guard_medium-dark_skin_tone": "💂🏾", + "guard_medium-light_skin_tone": "💂🏼", + "guard_medium_skin_tone": "💂🏽", + "guide_dog": "🦮", + "guitar": "🎸", + "hamburger": "🍔", + "hammer": "🔨", + "hammer_and_pick": "⚒", + "hammer_and_wrench": "🛠", + "hamster_face": "🐹", + "hand_with_fingers_splayed": "🖐", + "hand_with_fingers_splayed_dark_skin_tone": "🖐🏿", + "hand_with_fingers_splayed_light_skin_tone": "🖐🏻", + "hand_with_fingers_splayed_medium-dark_skin_tone": "🖐🏾", + "hand_with_fingers_splayed_medium-light_skin_tone": "🖐🏼", + "hand_with_fingers_splayed_medium_skin_tone": "🖐🏽", + "handbag": "👜", + "handshake": "🤝", + "hatching_chick": "🐣", + "headphone": "🎧", + "hear-no-evil_monkey": "🙉", + "heart_decoration": "💟", + "heart_suit": "♥", + "heart_with_arrow": "💘", + "heart_with_ribbon": "💝", + "heavy_check_mark": "✔", + "heavy_division_sign": "➗", + "heavy_dollar_sign": "💲", + "heavy_heart_exclamation": "❣", + "heavy_large_circle": "⭕", + "heavy_minus_sign": "➖", + "heavy_multiplication_x": "✖", + "heavy_plus_sign": "➕", + "hedgehog": "🦔", + "helicopter": "🚁", + "herb": "🌿", + "hibiscus": "🌺", + "high-heeled_shoe": "👠", + "high-speed_train": "🚄", + "high_voltage": "⚡", + "hiking_boot": "🥾", + "hindu_temple": "🛕", + "hippopotamus": "🦛", + "hole": "🕳", + "honey_pot": "🍯", + "honeybee": "🐝", + "horizontal_traffic_light": "🚥", + "horse": "🐴", + "horse_face": "🐴", + "horse_racing": "🏇", + "horse_racing_dark_skin_tone": "🏇🏿", + "horse_racing_light_skin_tone": "🏇🏻", + "horse_racing_medium-dark_skin_tone": "🏇🏾", + "horse_racing_medium-light_skin_tone": "🏇🏼", + "horse_racing_medium_skin_tone": "🏇🏽", + "hospital": "🏥", + "hot_beverage": "☕", + "hot_dog": "🌭", + "hot_face": "🥵", + "hot_pepper": "🌶", + "hot_springs": "♨", + "hotel": "🏨", + "hourglass_done": "⌛", + "hourglass_not_done": "⏳", + "house": "🏠", + "house_with_garden": "🏡", + "houses": "🏘", + "hugging_face": "🤗", + "hundred_points": "💯", + "hushed_face": "😯", + "ice": "🧊", + "ice_cream": "🍨", + "ice_hockey": "🏒", + "ice_skate": "⛸", + "inbox_tray": "📥", + "incoming_envelope": "📨", + "index_pointing_up": "☝", + "index_pointing_up_dark_skin_tone": "☝🏿", + "index_pointing_up_light_skin_tone": "☝🏻", + "index_pointing_up_medium-dark_skin_tone": "☝🏾", + "index_pointing_up_medium-light_skin_tone": "☝🏼", + "index_pointing_up_medium_skin_tone": "☝🏽", + "infinity": "♾", + "information": "ℹ", + "input_latin_letters": "🔤", + "input_latin_lowercase": "🔡", + "input_latin_uppercase": "🔠", + "input_numbers": "🔢", + "input_symbols": "🔣", + "jack-o-lantern": "🎃", + "jeans": "👖", + "jigsaw": "🧩", + "joker": "🃏", + "joystick": "🕹", + "kaaba": "🕋", + "kangaroo": "🦘", + "key": "🔑", + "keyboard": "⌨", + "keycap_#": "#️⃣", + "keycap_*": "*️⃣", + "keycap_0": "0️⃣", + "keycap_1": "1️⃣", + "keycap_10": "🔟", + "keycap_2": "2️⃣", + "keycap_3": "3️⃣", + "keycap_4": "4️⃣", + "keycap_5": "5️⃣", + "keycap_6": "6️⃣", + "keycap_7": "7️⃣", + "keycap_8": "8️⃣", + "keycap_9": "9️⃣", + "kick_scooter": "🛴", + "kimono": "👘", + "kiss": "💋", + "kiss_man_man": "👨\u200d❤️\u200d💋\u200d👨", + "kiss_mark": "💋", + "kiss_woman_man": "👩\u200d❤️\u200d💋\u200d👨", + "kiss_woman_woman": "👩\u200d❤️\u200d💋\u200d👩", + "kissing_cat_face": "😽", + "kissing_face": "😗", + "kissing_face_with_closed_eyes": "😚", + "kissing_face_with_smiling_eyes": "😙", + "kitchen_knife": "🔪", + "kite": "🪁", + "kiwi_fruit": "🥝", + "koala": "🐨", + "lab_coat": "🥼", + "label": "🏷", + "lacrosse": "🥍", + "lady_beetle": "🐞", + "laptop_computer": "💻", + "large_blue_diamond": "🔷", + "large_orange_diamond": "🔶", + "last_quarter_moon": "🌗", + "last_quarter_moon_face": "🌜", + "last_track_button": "⏮", + "latin_cross": "✝", + "leaf_fluttering_in_wind": "🍃", + "leafy_green": "🥬", + "ledger": "📒", + "left-facing_fist": "🤛", + "left-facing_fist_dark_skin_tone": "🤛🏿", + "left-facing_fist_light_skin_tone": "🤛🏻", + "left-facing_fist_medium-dark_skin_tone": "🤛🏾", + "left-facing_fist_medium-light_skin_tone": "🤛🏼", + "left-facing_fist_medium_skin_tone": "🤛🏽", + "left-right_arrow": "↔", + "left_arrow": "⬅", + "left_arrow_curving_right": "↪", + "left_luggage": "🛅", + "left_speech_bubble": "🗨", + "leg": "🦵", + "lemon": "🍋", + "leopard": "🐆", + "level_slider": "🎚", + "light_bulb": "💡", + "light_rail": "🚈", + "link": "🔗", + "linked_paperclips": "🖇", + "lion_face": "🦁", + "lipstick": "💄", + "litter_in_bin_sign": "🚮", + "lizard": "🦎", + "llama": "🦙", + "lobster": "🦞", + "locked": "🔒", + "locked_with_key": "🔐", + "locked_with_pen": "🔏", + "locomotive": "🚂", + "lollipop": "🍭", + "lotion_bottle": "🧴", + "loudly_crying_face": "😭", + "loudspeaker": "📢", + "love-you_gesture": "🤟", + "love-you_gesture_dark_skin_tone": "🤟🏿", + "love-you_gesture_light_skin_tone": "🤟🏻", + "love-you_gesture_medium-dark_skin_tone": "🤟🏾", + "love-you_gesture_medium-light_skin_tone": "🤟🏼", + "love-you_gesture_medium_skin_tone": "🤟🏽", + "love_hotel": "🏩", + "love_letter": "💌", + "luggage": "🧳", + "lying_face": "🤥", + "mage": "🧙", + "mage_dark_skin_tone": "🧙🏿", + "mage_light_skin_tone": "🧙🏻", + "mage_medium-dark_skin_tone": "🧙🏾", + "mage_medium-light_skin_tone": "🧙🏼", + "mage_medium_skin_tone": "🧙🏽", + "magnet": "🧲", + "magnifying_glass_tilted_left": "🔍", + "magnifying_glass_tilted_right": "🔎", + "mahjong_red_dragon": "🀄", + "male_sign": "♂", + "man": "👨", + "man_and_woman_holding_hands": "👫", + "man_artist": "👨\u200d🎨", + "man_artist_dark_skin_tone": "👨🏿\u200d🎨", + "man_artist_light_skin_tone": "👨🏻\u200d🎨", + "man_artist_medium-dark_skin_tone": "👨🏾\u200d🎨", + "man_artist_medium-light_skin_tone": "👨🏼\u200d🎨", + "man_artist_medium_skin_tone": "👨🏽\u200d🎨", + "man_astronaut": "👨\u200d🚀", + "man_astronaut_dark_skin_tone": "👨🏿\u200d🚀", + "man_astronaut_light_skin_tone": "👨🏻\u200d🚀", + "man_astronaut_medium-dark_skin_tone": "👨🏾\u200d🚀", + "man_astronaut_medium-light_skin_tone": "👨🏼\u200d🚀", + "man_astronaut_medium_skin_tone": "👨🏽\u200d🚀", + "man_biking": "🚴\u200d♂️", + "man_biking_dark_skin_tone": "🚴🏿\u200d♂️", + "man_biking_light_skin_tone": "🚴🏻\u200d♂️", + "man_biking_medium-dark_skin_tone": "🚴🏾\u200d♂️", + "man_biking_medium-light_skin_tone": "🚴🏼\u200d♂️", + "man_biking_medium_skin_tone": "🚴🏽\u200d♂️", + "man_bouncing_ball": "⛹️\u200d♂️", + "man_bouncing_ball_dark_skin_tone": "⛹🏿\u200d♂️", + "man_bouncing_ball_light_skin_tone": "⛹🏻\u200d♂️", + "man_bouncing_ball_medium-dark_skin_tone": "⛹🏾\u200d♂️", + "man_bouncing_ball_medium-light_skin_tone": "⛹🏼\u200d♂️", + "man_bouncing_ball_medium_skin_tone": "⛹🏽\u200d♂️", + "man_bowing": "🙇\u200d♂️", + "man_bowing_dark_skin_tone": "🙇🏿\u200d♂️", + "man_bowing_light_skin_tone": "🙇🏻\u200d♂️", + "man_bowing_medium-dark_skin_tone": "🙇🏾\u200d♂️", + "man_bowing_medium-light_skin_tone": "🙇🏼\u200d♂️", + "man_bowing_medium_skin_tone": "🙇🏽\u200d♂️", + "man_cartwheeling": "🤸\u200d♂️", + "man_cartwheeling_dark_skin_tone": "🤸🏿\u200d♂️", + "man_cartwheeling_light_skin_tone": "🤸🏻\u200d♂️", + "man_cartwheeling_medium-dark_skin_tone": "🤸🏾\u200d♂️", + "man_cartwheeling_medium-light_skin_tone": "🤸🏼\u200d♂️", + "man_cartwheeling_medium_skin_tone": "🤸🏽\u200d♂️", + "man_climbing": "🧗\u200d♂️", + "man_climbing_dark_skin_tone": "🧗🏿\u200d♂️", + "man_climbing_light_skin_tone": "🧗🏻\u200d♂️", + "man_climbing_medium-dark_skin_tone": "🧗🏾\u200d♂️", + "man_climbing_medium-light_skin_tone": "🧗🏼\u200d♂️", + "man_climbing_medium_skin_tone": "🧗🏽\u200d♂️", + "man_construction_worker": "👷\u200d♂️", + "man_construction_worker_dark_skin_tone": "👷🏿\u200d♂️", + "man_construction_worker_light_skin_tone": "👷🏻\u200d♂️", + "man_construction_worker_medium-dark_skin_tone": "👷🏾\u200d♂️", + "man_construction_worker_medium-light_skin_tone": "👷🏼\u200d♂️", + "man_construction_worker_medium_skin_tone": "👷🏽\u200d♂️", + "man_cook": "👨\u200d🍳", + "man_cook_dark_skin_tone": "👨🏿\u200d🍳", + "man_cook_light_skin_tone": "👨🏻\u200d🍳", + "man_cook_medium-dark_skin_tone": "👨🏾\u200d🍳", + "man_cook_medium-light_skin_tone": "👨🏼\u200d🍳", + "man_cook_medium_skin_tone": "👨🏽\u200d🍳", + "man_dancing": "🕺", + "man_dancing_dark_skin_tone": "🕺🏿", + "man_dancing_light_skin_tone": "🕺🏻", + "man_dancing_medium-dark_skin_tone": "🕺🏾", + "man_dancing_medium-light_skin_tone": "🕺🏼", + "man_dancing_medium_skin_tone": "🕺🏽", + "man_dark_skin_tone": "👨🏿", + "man_detective": "🕵️\u200d♂️", + "man_detective_dark_skin_tone": "🕵🏿\u200d♂️", + "man_detective_light_skin_tone": "🕵🏻\u200d♂️", + "man_detective_medium-dark_skin_tone": "🕵🏾\u200d♂️", + "man_detective_medium-light_skin_tone": "🕵🏼\u200d♂️", + "man_detective_medium_skin_tone": "🕵🏽\u200d♂️", + "man_elf": "🧝\u200d♂️", + "man_elf_dark_skin_tone": "🧝🏿\u200d♂️", + "man_elf_light_skin_tone": "🧝🏻\u200d♂️", + "man_elf_medium-dark_skin_tone": "🧝🏾\u200d♂️", + "man_elf_medium-light_skin_tone": "🧝🏼\u200d♂️", + "man_elf_medium_skin_tone": "🧝🏽\u200d♂️", + "man_facepalming": "🤦\u200d♂️", + "man_facepalming_dark_skin_tone": "🤦🏿\u200d♂️", + "man_facepalming_light_skin_tone": "🤦🏻\u200d♂️", + "man_facepalming_medium-dark_skin_tone": "🤦🏾\u200d♂️", + "man_facepalming_medium-light_skin_tone": "🤦🏼\u200d♂️", + "man_facepalming_medium_skin_tone": "🤦🏽\u200d♂️", + "man_factory_worker": "👨\u200d🏭", + "man_factory_worker_dark_skin_tone": "👨🏿\u200d🏭", + "man_factory_worker_light_skin_tone": "👨🏻\u200d🏭", + "man_factory_worker_medium-dark_skin_tone": "👨🏾\u200d🏭", + "man_factory_worker_medium-light_skin_tone": "👨🏼\u200d🏭", + "man_factory_worker_medium_skin_tone": "👨🏽\u200d🏭", + "man_fairy": "🧚\u200d♂️", + "man_fairy_dark_skin_tone": "🧚🏿\u200d♂️", + "man_fairy_light_skin_tone": "🧚🏻\u200d♂️", + "man_fairy_medium-dark_skin_tone": "🧚🏾\u200d♂️", + "man_fairy_medium-light_skin_tone": "🧚🏼\u200d♂️", + "man_fairy_medium_skin_tone": "🧚🏽\u200d♂️", + "man_farmer": "👨\u200d🌾", + "man_farmer_dark_skin_tone": "👨🏿\u200d🌾", + "man_farmer_light_skin_tone": "👨🏻\u200d🌾", + "man_farmer_medium-dark_skin_tone": "👨🏾\u200d🌾", + "man_farmer_medium-light_skin_tone": "👨🏼\u200d🌾", + "man_farmer_medium_skin_tone": "👨🏽\u200d🌾", + "man_firefighter": "👨\u200d🚒", + "man_firefighter_dark_skin_tone": "👨🏿\u200d🚒", + "man_firefighter_light_skin_tone": "👨🏻\u200d🚒", + "man_firefighter_medium-dark_skin_tone": "👨🏾\u200d🚒", + "man_firefighter_medium-light_skin_tone": "👨🏼\u200d🚒", + "man_firefighter_medium_skin_tone": "👨🏽\u200d🚒", + "man_frowning": "🙍\u200d♂️", + "man_frowning_dark_skin_tone": "🙍🏿\u200d♂️", + "man_frowning_light_skin_tone": "🙍🏻\u200d♂️", + "man_frowning_medium-dark_skin_tone": "🙍🏾\u200d♂️", + "man_frowning_medium-light_skin_tone": "🙍🏼\u200d♂️", + "man_frowning_medium_skin_tone": "🙍🏽\u200d♂️", + "man_genie": "🧞\u200d♂️", + "man_gesturing_no": "🙅\u200d♂️", + "man_gesturing_no_dark_skin_tone": "🙅🏿\u200d♂️", + "man_gesturing_no_light_skin_tone": "🙅🏻\u200d♂️", + "man_gesturing_no_medium-dark_skin_tone": "🙅🏾\u200d♂️", + "man_gesturing_no_medium-light_skin_tone": "🙅🏼\u200d♂️", + "man_gesturing_no_medium_skin_tone": "🙅🏽\u200d♂️", + "man_gesturing_ok": "🙆\u200d♂️", + "man_gesturing_ok_dark_skin_tone": "🙆🏿\u200d♂️", + "man_gesturing_ok_light_skin_tone": "🙆🏻\u200d♂️", + "man_gesturing_ok_medium-dark_skin_tone": "🙆🏾\u200d♂️", + "man_gesturing_ok_medium-light_skin_tone": "🙆🏼\u200d♂️", + "man_gesturing_ok_medium_skin_tone": "🙆🏽\u200d♂️", + "man_getting_haircut": "💇\u200d♂️", + "man_getting_haircut_dark_skin_tone": "💇🏿\u200d♂️", + "man_getting_haircut_light_skin_tone": "💇🏻\u200d♂️", + "man_getting_haircut_medium-dark_skin_tone": "💇🏾\u200d♂️", + "man_getting_haircut_medium-light_skin_tone": "💇🏼\u200d♂️", + "man_getting_haircut_medium_skin_tone": "💇🏽\u200d♂️", + "man_getting_massage": "💆\u200d♂️", + "man_getting_massage_dark_skin_tone": "💆🏿\u200d♂️", + "man_getting_massage_light_skin_tone": "💆🏻\u200d♂️", + "man_getting_massage_medium-dark_skin_tone": "💆🏾\u200d♂️", + "man_getting_massage_medium-light_skin_tone": "💆🏼\u200d♂️", + "man_getting_massage_medium_skin_tone": "💆🏽\u200d♂️", + "man_golfing": "🏌️\u200d♂️", + "man_golfing_dark_skin_tone": "🏌🏿\u200d♂️", + "man_golfing_light_skin_tone": "🏌🏻\u200d♂️", + "man_golfing_medium-dark_skin_tone": "🏌🏾\u200d♂️", + "man_golfing_medium-light_skin_tone": "🏌🏼\u200d♂️", + "man_golfing_medium_skin_tone": "🏌🏽\u200d♂️", + "man_guard": "💂\u200d♂️", + "man_guard_dark_skin_tone": "💂🏿\u200d♂️", + "man_guard_light_skin_tone": "💂🏻\u200d♂️", + "man_guard_medium-dark_skin_tone": "💂🏾\u200d♂️", + "man_guard_medium-light_skin_tone": "💂🏼\u200d♂️", + "man_guard_medium_skin_tone": "💂🏽\u200d♂️", + "man_health_worker": "👨\u200d⚕️", + "man_health_worker_dark_skin_tone": "👨🏿\u200d⚕️", + "man_health_worker_light_skin_tone": "👨🏻\u200d⚕️", + "man_health_worker_medium-dark_skin_tone": "👨🏾\u200d⚕️", + "man_health_worker_medium-light_skin_tone": "👨🏼\u200d⚕️", + "man_health_worker_medium_skin_tone": "👨🏽\u200d⚕️", + "man_in_lotus_position": "🧘\u200d♂️", + "man_in_lotus_position_dark_skin_tone": "🧘🏿\u200d♂️", + "man_in_lotus_position_light_skin_tone": "🧘🏻\u200d♂️", + "man_in_lotus_position_medium-dark_skin_tone": "🧘🏾\u200d♂️", + "man_in_lotus_position_medium-light_skin_tone": "🧘🏼\u200d♂️", + "man_in_lotus_position_medium_skin_tone": "🧘🏽\u200d♂️", + "man_in_manual_wheelchair": "👨\u200d🦽", + "man_in_motorized_wheelchair": "👨\u200d🦼", + "man_in_steamy_room": "🧖\u200d♂️", + "man_in_steamy_room_dark_skin_tone": "🧖🏿\u200d♂️", + "man_in_steamy_room_light_skin_tone": "🧖🏻\u200d♂️", + "man_in_steamy_room_medium-dark_skin_tone": "🧖🏾\u200d♂️", + "man_in_steamy_room_medium-light_skin_tone": "🧖🏼\u200d♂️", + "man_in_steamy_room_medium_skin_tone": "🧖🏽\u200d♂️", + "man_in_suit_levitating": "🕴", + "man_in_suit_levitating_dark_skin_tone": "🕴🏿", + "man_in_suit_levitating_light_skin_tone": "🕴🏻", + "man_in_suit_levitating_medium-dark_skin_tone": "🕴🏾", + "man_in_suit_levitating_medium-light_skin_tone": "🕴🏼", + "man_in_suit_levitating_medium_skin_tone": "🕴🏽", + "man_in_tuxedo": "🤵", + "man_in_tuxedo_dark_skin_tone": "🤵🏿", + "man_in_tuxedo_light_skin_tone": "🤵🏻", + "man_in_tuxedo_medium-dark_skin_tone": "🤵🏾", + "man_in_tuxedo_medium-light_skin_tone": "🤵🏼", + "man_in_tuxedo_medium_skin_tone": "🤵🏽", + "man_judge": "👨\u200d⚖️", + "man_judge_dark_skin_tone": "👨🏿\u200d⚖️", + "man_judge_light_skin_tone": "👨🏻\u200d⚖️", + "man_judge_medium-dark_skin_tone": "👨🏾\u200d⚖️", + "man_judge_medium-light_skin_tone": "👨🏼\u200d⚖️", + "man_judge_medium_skin_tone": "👨🏽\u200d⚖️", + "man_juggling": "🤹\u200d♂️", + "man_juggling_dark_skin_tone": "🤹🏿\u200d♂️", + "man_juggling_light_skin_tone": "🤹🏻\u200d♂️", + "man_juggling_medium-dark_skin_tone": "🤹🏾\u200d♂️", + "man_juggling_medium-light_skin_tone": "🤹🏼\u200d♂️", + "man_juggling_medium_skin_tone": "🤹🏽\u200d♂️", + "man_lifting_weights": "🏋️\u200d♂️", + "man_lifting_weights_dark_skin_tone": "🏋🏿\u200d♂️", + "man_lifting_weights_light_skin_tone": "🏋🏻\u200d♂️", + "man_lifting_weights_medium-dark_skin_tone": "🏋🏾\u200d♂️", + "man_lifting_weights_medium-light_skin_tone": "🏋🏼\u200d♂️", + "man_lifting_weights_medium_skin_tone": "🏋🏽\u200d♂️", + "man_light_skin_tone": "👨🏻", + "man_mage": "🧙\u200d♂️", + "man_mage_dark_skin_tone": "🧙🏿\u200d♂️", + "man_mage_light_skin_tone": "🧙🏻\u200d♂️", + "man_mage_medium-dark_skin_tone": "🧙🏾\u200d♂️", + "man_mage_medium-light_skin_tone": "🧙🏼\u200d♂️", + "man_mage_medium_skin_tone": "🧙🏽\u200d♂️", + "man_mechanic": "👨\u200d🔧", + "man_mechanic_dark_skin_tone": "👨🏿\u200d🔧", + "man_mechanic_light_skin_tone": "👨🏻\u200d🔧", + "man_mechanic_medium-dark_skin_tone": "👨🏾\u200d🔧", + "man_mechanic_medium-light_skin_tone": "👨🏼\u200d🔧", + "man_mechanic_medium_skin_tone": "👨🏽\u200d🔧", + "man_medium-dark_skin_tone": "👨🏾", + "man_medium-light_skin_tone": "👨🏼", + "man_medium_skin_tone": "👨🏽", + "man_mountain_biking": "🚵\u200d♂️", + "man_mountain_biking_dark_skin_tone": "🚵🏿\u200d♂️", + "man_mountain_biking_light_skin_tone": "🚵🏻\u200d♂️", + "man_mountain_biking_medium-dark_skin_tone": "🚵🏾\u200d♂️", + "man_mountain_biking_medium-light_skin_tone": "🚵🏼\u200d♂️", + "man_mountain_biking_medium_skin_tone": "🚵🏽\u200d♂️", + "man_office_worker": "👨\u200d💼", + "man_office_worker_dark_skin_tone": "👨🏿\u200d💼", + "man_office_worker_light_skin_tone": "👨🏻\u200d💼", + "man_office_worker_medium-dark_skin_tone": "👨🏾\u200d💼", + "man_office_worker_medium-light_skin_tone": "👨🏼\u200d💼", + "man_office_worker_medium_skin_tone": "👨🏽\u200d💼", + "man_pilot": "👨\u200d✈️", + "man_pilot_dark_skin_tone": "👨🏿\u200d✈️", + "man_pilot_light_skin_tone": "👨🏻\u200d✈️", + "man_pilot_medium-dark_skin_tone": "👨🏾\u200d✈️", + "man_pilot_medium-light_skin_tone": "👨🏼\u200d✈️", + "man_pilot_medium_skin_tone": "👨🏽\u200d✈️", + "man_playing_handball": "🤾\u200d♂️", + "man_playing_handball_dark_skin_tone": "🤾🏿\u200d♂️", + "man_playing_handball_light_skin_tone": "🤾🏻\u200d♂️", + "man_playing_handball_medium-dark_skin_tone": "🤾🏾\u200d♂️", + "man_playing_handball_medium-light_skin_tone": "🤾🏼\u200d♂️", + "man_playing_handball_medium_skin_tone": "🤾🏽\u200d♂️", + "man_playing_water_polo": "🤽\u200d♂️", + "man_playing_water_polo_dark_skin_tone": "🤽🏿\u200d♂️", + "man_playing_water_polo_light_skin_tone": "🤽🏻\u200d♂️", + "man_playing_water_polo_medium-dark_skin_tone": "🤽🏾\u200d♂️", + "man_playing_water_polo_medium-light_skin_tone": "🤽🏼\u200d♂️", + "man_playing_water_polo_medium_skin_tone": "🤽🏽\u200d♂️", + "man_police_officer": "👮\u200d♂️", + "man_police_officer_dark_skin_tone": "👮🏿\u200d♂️", + "man_police_officer_light_skin_tone": "👮🏻\u200d♂️", + "man_police_officer_medium-dark_skin_tone": "👮🏾\u200d♂️", + "man_police_officer_medium-light_skin_tone": "👮🏼\u200d♂️", + "man_police_officer_medium_skin_tone": "👮🏽\u200d♂️", + "man_pouting": "🙎\u200d♂️", + "man_pouting_dark_skin_tone": "🙎🏿\u200d♂️", + "man_pouting_light_skin_tone": "🙎🏻\u200d♂️", + "man_pouting_medium-dark_skin_tone": "🙎🏾\u200d♂️", + "man_pouting_medium-light_skin_tone": "🙎🏼\u200d♂️", + "man_pouting_medium_skin_tone": "🙎🏽\u200d♂️", + "man_raising_hand": "🙋\u200d♂️", + "man_raising_hand_dark_skin_tone": "🙋🏿\u200d♂️", + "man_raising_hand_light_skin_tone": "🙋🏻\u200d♂️", + "man_raising_hand_medium-dark_skin_tone": "🙋🏾\u200d♂️", + "man_raising_hand_medium-light_skin_tone": "🙋🏼\u200d♂️", + "man_raising_hand_medium_skin_tone": "🙋🏽\u200d♂️", + "man_rowing_boat": "🚣\u200d♂️", + "man_rowing_boat_dark_skin_tone": "🚣🏿\u200d♂️", + "man_rowing_boat_light_skin_tone": "🚣🏻\u200d♂️", + "man_rowing_boat_medium-dark_skin_tone": "🚣🏾\u200d♂️", + "man_rowing_boat_medium-light_skin_tone": "🚣🏼\u200d♂️", + "man_rowing_boat_medium_skin_tone": "🚣🏽\u200d♂️", + "man_running": "🏃\u200d♂️", + "man_running_dark_skin_tone": "🏃🏿\u200d♂️", + "man_running_light_skin_tone": "🏃🏻\u200d♂️", + "man_running_medium-dark_skin_tone": "🏃🏾\u200d♂️", + "man_running_medium-light_skin_tone": "🏃🏼\u200d♂️", + "man_running_medium_skin_tone": "🏃🏽\u200d♂️", + "man_scientist": "👨\u200d🔬", + "man_scientist_dark_skin_tone": "👨🏿\u200d🔬", + "man_scientist_light_skin_tone": "👨🏻\u200d🔬", + "man_scientist_medium-dark_skin_tone": "👨🏾\u200d🔬", + "man_scientist_medium-light_skin_tone": "👨🏼\u200d🔬", + "man_scientist_medium_skin_tone": "👨🏽\u200d🔬", + "man_shrugging": "🤷\u200d♂️", + "man_shrugging_dark_skin_tone": "🤷🏿\u200d♂️", + "man_shrugging_light_skin_tone": "🤷🏻\u200d♂️", + "man_shrugging_medium-dark_skin_tone": "🤷🏾\u200d♂️", + "man_shrugging_medium-light_skin_tone": "🤷🏼\u200d♂️", + "man_shrugging_medium_skin_tone": "🤷🏽\u200d♂️", + "man_singer": "👨\u200d🎤", + "man_singer_dark_skin_tone": "👨🏿\u200d🎤", + "man_singer_light_skin_tone": "👨🏻\u200d🎤", + "man_singer_medium-dark_skin_tone": "👨🏾\u200d🎤", + "man_singer_medium-light_skin_tone": "👨🏼\u200d🎤", + "man_singer_medium_skin_tone": "👨🏽\u200d🎤", + "man_student": "👨\u200d🎓", + "man_student_dark_skin_tone": "👨🏿\u200d🎓", + "man_student_light_skin_tone": "👨🏻\u200d🎓", + "man_student_medium-dark_skin_tone": "👨🏾\u200d🎓", + "man_student_medium-light_skin_tone": "👨🏼\u200d🎓", + "man_student_medium_skin_tone": "👨🏽\u200d🎓", + "man_surfing": "🏄\u200d♂️", + "man_surfing_dark_skin_tone": "🏄🏿\u200d♂️", + "man_surfing_light_skin_tone": "🏄🏻\u200d♂️", + "man_surfing_medium-dark_skin_tone": "🏄🏾\u200d♂️", + "man_surfing_medium-light_skin_tone": "🏄🏼\u200d♂️", + "man_surfing_medium_skin_tone": "🏄🏽\u200d♂️", + "man_swimming": "🏊\u200d♂️", + "man_swimming_dark_skin_tone": "🏊🏿\u200d♂️", + "man_swimming_light_skin_tone": "🏊🏻\u200d♂️", + "man_swimming_medium-dark_skin_tone": "🏊🏾\u200d♂️", + "man_swimming_medium-light_skin_tone": "🏊🏼\u200d♂️", + "man_swimming_medium_skin_tone": "🏊🏽\u200d♂️", + "man_teacher": "👨\u200d🏫", + "man_teacher_dark_skin_tone": "👨🏿\u200d🏫", + "man_teacher_light_skin_tone": "👨🏻\u200d🏫", + "man_teacher_medium-dark_skin_tone": "👨🏾\u200d🏫", + "man_teacher_medium-light_skin_tone": "👨🏼\u200d🏫", + "man_teacher_medium_skin_tone": "👨🏽\u200d🏫", + "man_technologist": "👨\u200d💻", + "man_technologist_dark_skin_tone": "👨🏿\u200d💻", + "man_technologist_light_skin_tone": "👨🏻\u200d💻", + "man_technologist_medium-dark_skin_tone": "👨🏾\u200d💻", + "man_technologist_medium-light_skin_tone": "👨🏼\u200d💻", + "man_technologist_medium_skin_tone": "👨🏽\u200d💻", + "man_tipping_hand": "💁\u200d♂️", + "man_tipping_hand_dark_skin_tone": "💁🏿\u200d♂️", + "man_tipping_hand_light_skin_tone": "💁🏻\u200d♂️", + "man_tipping_hand_medium-dark_skin_tone": "💁🏾\u200d♂️", + "man_tipping_hand_medium-light_skin_tone": "💁🏼\u200d♂️", + "man_tipping_hand_medium_skin_tone": "💁🏽\u200d♂️", + "man_vampire": "🧛\u200d♂️", + "man_vampire_dark_skin_tone": "🧛🏿\u200d♂️", + "man_vampire_light_skin_tone": "🧛🏻\u200d♂️", + "man_vampire_medium-dark_skin_tone": "🧛🏾\u200d♂️", + "man_vampire_medium-light_skin_tone": "🧛🏼\u200d♂️", + "man_vampire_medium_skin_tone": "🧛🏽\u200d♂️", + "man_walking": "🚶\u200d♂️", + "man_walking_dark_skin_tone": "🚶🏿\u200d♂️", + "man_walking_light_skin_tone": "🚶🏻\u200d♂️", + "man_walking_medium-dark_skin_tone": "🚶🏾\u200d♂️", + "man_walking_medium-light_skin_tone": "🚶🏼\u200d♂️", + "man_walking_medium_skin_tone": "🚶🏽\u200d♂️", + "man_wearing_turban": "👳\u200d♂️", + "man_wearing_turban_dark_skin_tone": "👳🏿\u200d♂️", + "man_wearing_turban_light_skin_tone": "👳🏻\u200d♂️", + "man_wearing_turban_medium-dark_skin_tone": "👳🏾\u200d♂️", + "man_wearing_turban_medium-light_skin_tone": "👳🏼\u200d♂️", + "man_wearing_turban_medium_skin_tone": "👳🏽\u200d♂️", + "man_with_probing_cane": "👨\u200d🦯", + "man_with_chinese_cap": "👲", + "man_with_chinese_cap_dark_skin_tone": "👲🏿", + "man_with_chinese_cap_light_skin_tone": "👲🏻", + "man_with_chinese_cap_medium-dark_skin_tone": "👲🏾", + "man_with_chinese_cap_medium-light_skin_tone": "👲🏼", + "man_with_chinese_cap_medium_skin_tone": "👲🏽", + "man_zombie": "🧟\u200d♂️", + "mango": "🥭", + "mantelpiece_clock": "🕰", + "manual_wheelchair": "🦽", + "man’s_shoe": "👞", + "map_of_japan": "🗾", + "maple_leaf": "🍁", + "martial_arts_uniform": "🥋", + "mate": "🧉", + "meat_on_bone": "🍖", + "mechanical_arm": "🦾", + "mechanical_leg": "🦿", + "medical_symbol": "⚕", + "megaphone": "📣", + "melon": "🍈", + "memo": "📝", + "men_with_bunny_ears": "👯\u200d♂️", + "men_wrestling": "🤼\u200d♂️", + "menorah": "🕎", + "men’s_room": "🚹", + "mermaid": "🧜\u200d♀️", + "mermaid_dark_skin_tone": "🧜🏿\u200d♀️", + "mermaid_light_skin_tone": "🧜🏻\u200d♀️", + "mermaid_medium-dark_skin_tone": "🧜🏾\u200d♀️", + "mermaid_medium-light_skin_tone": "🧜🏼\u200d♀️", + "mermaid_medium_skin_tone": "🧜🏽\u200d♀️", + "merman": "🧜\u200d♂️", + "merman_dark_skin_tone": "🧜🏿\u200d♂️", + "merman_light_skin_tone": "🧜🏻\u200d♂️", + "merman_medium-dark_skin_tone": "🧜🏾\u200d♂️", + "merman_medium-light_skin_tone": "🧜🏼\u200d♂️", + "merman_medium_skin_tone": "🧜🏽\u200d♂️", + "merperson": "🧜", + "merperson_dark_skin_tone": "🧜🏿", + "merperson_light_skin_tone": "🧜🏻", + "merperson_medium-dark_skin_tone": "🧜🏾", + "merperson_medium-light_skin_tone": "🧜🏼", + "merperson_medium_skin_tone": "🧜🏽", + "metro": "🚇", + "microbe": "🦠", + "microphone": "🎤", + "microscope": "🔬", + "middle_finger": "🖕", + "middle_finger_dark_skin_tone": "🖕🏿", + "middle_finger_light_skin_tone": "🖕🏻", + "middle_finger_medium-dark_skin_tone": "🖕🏾", + "middle_finger_medium-light_skin_tone": "🖕🏼", + "middle_finger_medium_skin_tone": "🖕🏽", + "military_medal": "🎖", + "milky_way": "🌌", + "minibus": "🚐", + "moai": "🗿", + "mobile_phone": "📱", + "mobile_phone_off": "📴", + "mobile_phone_with_arrow": "📲", + "money-mouth_face": "🤑", + "money_bag": "💰", + "money_with_wings": "💸", + "monkey": "🐒", + "monkey_face": "🐵", + "monorail": "🚝", + "moon_cake": "🥮", + "moon_viewing_ceremony": "🎑", + "mosque": "🕌", + "mosquito": "🦟", + "motor_boat": "🛥", + "motor_scooter": "🛵", + "motorcycle": "🏍", + "motorized_wheelchair": "🦼", + "motorway": "🛣", + "mount_fuji": "🗻", + "mountain": "⛰", + "mountain_cableway": "🚠", + "mountain_railway": "🚞", + "mouse": "🐭", + "mouse_face": "🐭", + "mouth": "👄", + "movie_camera": "🎥", + "mushroom": "🍄", + "musical_keyboard": "🎹", + "musical_note": "🎵", + "musical_notes": "🎶", + "musical_score": "🎼", + "muted_speaker": "🔇", + "nail_polish": "💅", + "nail_polish_dark_skin_tone": "💅🏿", + "nail_polish_light_skin_tone": "💅🏻", + "nail_polish_medium-dark_skin_tone": "💅🏾", + "nail_polish_medium-light_skin_tone": "💅🏼", + "nail_polish_medium_skin_tone": "💅🏽", + "name_badge": "📛", + "national_park": "🏞", + "nauseated_face": "🤢", + "nazar_amulet": "🧿", + "necktie": "👔", + "nerd_face": "🤓", + "neutral_face": "😐", + "new_moon": "🌑", + "new_moon_face": "🌚", + "newspaper": "📰", + "next_track_button": "⏭", + "night_with_stars": "🌃", + "nine-thirty": "🕤", + "nine_o’clock": "🕘", + "no_bicycles": "🚳", + "no_entry": "⛔", + "no_littering": "🚯", + "no_mobile_phones": "📵", + "no_one_under_eighteen": "🔞", + "no_pedestrians": "🚷", + "no_smoking": "🚭", + "non-potable_water": "🚱", + "nose": "👃", + "nose_dark_skin_tone": "👃🏿", + "nose_light_skin_tone": "👃🏻", + "nose_medium-dark_skin_tone": "👃🏾", + "nose_medium-light_skin_tone": "👃🏼", + "nose_medium_skin_tone": "👃🏽", + "notebook": "📓", + "notebook_with_decorative_cover": "📔", + "nut_and_bolt": "🔩", + "octopus": "🐙", + "oden": "🍢", + "office_building": "🏢", + "ogre": "👹", + "oil_drum": "🛢", + "old_key": "🗝", + "old_man": "👴", + "old_man_dark_skin_tone": "👴🏿", + "old_man_light_skin_tone": "👴🏻", + "old_man_medium-dark_skin_tone": "👴🏾", + "old_man_medium-light_skin_tone": "👴🏼", + "old_man_medium_skin_tone": "👴🏽", + "old_woman": "👵", + "old_woman_dark_skin_tone": "👵🏿", + "old_woman_light_skin_tone": "👵🏻", + "old_woman_medium-dark_skin_tone": "👵🏾", + "old_woman_medium-light_skin_tone": "👵🏼", + "old_woman_medium_skin_tone": "👵🏽", + "older_adult": "🧓", + "older_adult_dark_skin_tone": "🧓🏿", + "older_adult_light_skin_tone": "🧓🏻", + "older_adult_medium-dark_skin_tone": "🧓🏾", + "older_adult_medium-light_skin_tone": "🧓🏼", + "older_adult_medium_skin_tone": "🧓🏽", + "om": "🕉", + "oncoming_automobile": "🚘", + "oncoming_bus": "🚍", + "oncoming_fist": "👊", + "oncoming_fist_dark_skin_tone": "👊🏿", + "oncoming_fist_light_skin_tone": "👊🏻", + "oncoming_fist_medium-dark_skin_tone": "👊🏾", + "oncoming_fist_medium-light_skin_tone": "👊🏼", + "oncoming_fist_medium_skin_tone": "👊🏽", + "oncoming_police_car": "🚔", + "oncoming_taxi": "🚖", + "one-piece_swimsuit": "🩱", + "one-thirty": "🕜", + "one_o’clock": "🕐", + "onion": "🧅", + "open_book": "📖", + "open_file_folder": "📂", + "open_hands": "👐", + "open_hands_dark_skin_tone": "👐🏿", + "open_hands_light_skin_tone": "👐🏻", + "open_hands_medium-dark_skin_tone": "👐🏾", + "open_hands_medium-light_skin_tone": "👐🏼", + "open_hands_medium_skin_tone": "👐🏽", + "open_mailbox_with_lowered_flag": "📭", + "open_mailbox_with_raised_flag": "📬", + "optical_disk": "💿", + "orange_book": "📙", + "orange_circle": "🟠", + "orange_heart": "🧡", + "orange_square": "🟧", + "orangutan": "🦧", + "orthodox_cross": "☦", + "otter": "🦦", + "outbox_tray": "📤", + "owl": "🦉", + "ox": "🐂", + "oyster": "🦪", + "package": "📦", + "page_facing_up": "📄", + "page_with_curl": "📃", + "pager": "📟", + "paintbrush": "🖌", + "palm_tree": "🌴", + "palms_up_together": "🤲", + "palms_up_together_dark_skin_tone": "🤲🏿", + "palms_up_together_light_skin_tone": "🤲🏻", + "palms_up_together_medium-dark_skin_tone": "🤲🏾", + "palms_up_together_medium-light_skin_tone": "🤲🏼", + "palms_up_together_medium_skin_tone": "🤲🏽", + "pancakes": "🥞", + "panda_face": "🐼", + "paperclip": "📎", + "parrot": "🦜", + "part_alternation_mark": "〽", + "party_popper": "🎉", + "partying_face": "🥳", + "passenger_ship": "🛳", + "passport_control": "🛂", + "pause_button": "⏸", + "paw_prints": "🐾", + "peace_symbol": "☮", + "peach": "🍑", + "peacock": "🦚", + "peanuts": "🥜", + "pear": "🍐", + "pen": "🖊", + "pencil": "📝", + "penguin": "🐧", + "pensive_face": "😔", + "people_holding_hands": "🧑\u200d🤝\u200d🧑", + "people_with_bunny_ears": "👯", + "people_wrestling": "🤼", + "performing_arts": "🎭", + "persevering_face": "😣", + "person_biking": "🚴", + "person_biking_dark_skin_tone": "🚴🏿", + "person_biking_light_skin_tone": "🚴🏻", + "person_biking_medium-dark_skin_tone": "🚴🏾", + "person_biking_medium-light_skin_tone": "🚴🏼", + "person_biking_medium_skin_tone": "🚴🏽", + "person_bouncing_ball": "⛹", + "person_bouncing_ball_dark_skin_tone": "⛹🏿", + "person_bouncing_ball_light_skin_tone": "⛹🏻", + "person_bouncing_ball_medium-dark_skin_tone": "⛹🏾", + "person_bouncing_ball_medium-light_skin_tone": "⛹🏼", + "person_bouncing_ball_medium_skin_tone": "⛹🏽", + "person_bowing": "🙇", + "person_bowing_dark_skin_tone": "🙇🏿", + "person_bowing_light_skin_tone": "🙇🏻", + "person_bowing_medium-dark_skin_tone": "🙇🏾", + "person_bowing_medium-light_skin_tone": "🙇🏼", + "person_bowing_medium_skin_tone": "🙇🏽", + "person_cartwheeling": "🤸", + "person_cartwheeling_dark_skin_tone": "🤸🏿", + "person_cartwheeling_light_skin_tone": "🤸🏻", + "person_cartwheeling_medium-dark_skin_tone": "🤸🏾", + "person_cartwheeling_medium-light_skin_tone": "🤸🏼", + "person_cartwheeling_medium_skin_tone": "🤸🏽", + "person_climbing": "🧗", + "person_climbing_dark_skin_tone": "🧗🏿", + "person_climbing_light_skin_tone": "🧗🏻", + "person_climbing_medium-dark_skin_tone": "🧗🏾", + "person_climbing_medium-light_skin_tone": "🧗🏼", + "person_climbing_medium_skin_tone": "🧗🏽", + "person_facepalming": "🤦", + "person_facepalming_dark_skin_tone": "🤦🏿", + "person_facepalming_light_skin_tone": "🤦🏻", + "person_facepalming_medium-dark_skin_tone": "🤦🏾", + "person_facepalming_medium-light_skin_tone": "🤦🏼", + "person_facepalming_medium_skin_tone": "🤦🏽", + "person_fencing": "🤺", + "person_frowning": "🙍", + "person_frowning_dark_skin_tone": "🙍🏿", + "person_frowning_light_skin_tone": "🙍🏻", + "person_frowning_medium-dark_skin_tone": "🙍🏾", + "person_frowning_medium-light_skin_tone": "🙍🏼", + "person_frowning_medium_skin_tone": "🙍🏽", + "person_gesturing_no": "🙅", + "person_gesturing_no_dark_skin_tone": "🙅🏿", + "person_gesturing_no_light_skin_tone": "🙅🏻", + "person_gesturing_no_medium-dark_skin_tone": "🙅🏾", + "person_gesturing_no_medium-light_skin_tone": "🙅🏼", + "person_gesturing_no_medium_skin_tone": "🙅🏽", + "person_gesturing_ok": "🙆", + "person_gesturing_ok_dark_skin_tone": "🙆🏿", + "person_gesturing_ok_light_skin_tone": "🙆🏻", + "person_gesturing_ok_medium-dark_skin_tone": "🙆🏾", + "person_gesturing_ok_medium-light_skin_tone": "🙆🏼", + "person_gesturing_ok_medium_skin_tone": "🙆🏽", + "person_getting_haircut": "💇", + "person_getting_haircut_dark_skin_tone": "💇🏿", + "person_getting_haircut_light_skin_tone": "💇🏻", + "person_getting_haircut_medium-dark_skin_tone": "💇🏾", + "person_getting_haircut_medium-light_skin_tone": "💇🏼", + "person_getting_haircut_medium_skin_tone": "💇🏽", + "person_getting_massage": "💆", + "person_getting_massage_dark_skin_tone": "💆🏿", + "person_getting_massage_light_skin_tone": "💆🏻", + "person_getting_massage_medium-dark_skin_tone": "💆🏾", + "person_getting_massage_medium-light_skin_tone": "💆🏼", + "person_getting_massage_medium_skin_tone": "💆🏽", + "person_golfing": "🏌", + "person_golfing_dark_skin_tone": "🏌🏿", + "person_golfing_light_skin_tone": "🏌🏻", + "person_golfing_medium-dark_skin_tone": "🏌🏾", + "person_golfing_medium-light_skin_tone": "🏌🏼", + "person_golfing_medium_skin_tone": "🏌🏽", + "person_in_bed": "🛌", + "person_in_bed_dark_skin_tone": "🛌🏿", + "person_in_bed_light_skin_tone": "🛌🏻", + "person_in_bed_medium-dark_skin_tone": "🛌🏾", + "person_in_bed_medium-light_skin_tone": "🛌🏼", + "person_in_bed_medium_skin_tone": "🛌🏽", + "person_in_lotus_position": "🧘", + "person_in_lotus_position_dark_skin_tone": "🧘🏿", + "person_in_lotus_position_light_skin_tone": "🧘🏻", + "person_in_lotus_position_medium-dark_skin_tone": "🧘🏾", + "person_in_lotus_position_medium-light_skin_tone": "🧘🏼", + "person_in_lotus_position_medium_skin_tone": "🧘🏽", + "person_in_steamy_room": "🧖", + "person_in_steamy_room_dark_skin_tone": "🧖🏿", + "person_in_steamy_room_light_skin_tone": "🧖🏻", + "person_in_steamy_room_medium-dark_skin_tone": "🧖🏾", + "person_in_steamy_room_medium-light_skin_tone": "🧖🏼", + "person_in_steamy_room_medium_skin_tone": "🧖🏽", + "person_juggling": "🤹", + "person_juggling_dark_skin_tone": "🤹🏿", + "person_juggling_light_skin_tone": "🤹🏻", + "person_juggling_medium-dark_skin_tone": "🤹🏾", + "person_juggling_medium-light_skin_tone": "🤹🏼", + "person_juggling_medium_skin_tone": "🤹🏽", + "person_kneeling": "🧎", + "person_lifting_weights": "🏋", + "person_lifting_weights_dark_skin_tone": "🏋🏿", + "person_lifting_weights_light_skin_tone": "🏋🏻", + "person_lifting_weights_medium-dark_skin_tone": "🏋🏾", + "person_lifting_weights_medium-light_skin_tone": "🏋🏼", + "person_lifting_weights_medium_skin_tone": "🏋🏽", + "person_mountain_biking": "🚵", + "person_mountain_biking_dark_skin_tone": "🚵🏿", + "person_mountain_biking_light_skin_tone": "🚵🏻", + "person_mountain_biking_medium-dark_skin_tone": "🚵🏾", + "person_mountain_biking_medium-light_skin_tone": "🚵🏼", + "person_mountain_biking_medium_skin_tone": "🚵🏽", + "person_playing_handball": "🤾", + "person_playing_handball_dark_skin_tone": "🤾🏿", + "person_playing_handball_light_skin_tone": "🤾🏻", + "person_playing_handball_medium-dark_skin_tone": "🤾🏾", + "person_playing_handball_medium-light_skin_tone": "🤾🏼", + "person_playing_handball_medium_skin_tone": "🤾🏽", + "person_playing_water_polo": "🤽", + "person_playing_water_polo_dark_skin_tone": "🤽🏿", + "person_playing_water_polo_light_skin_tone": "🤽🏻", + "person_playing_water_polo_medium-dark_skin_tone": "🤽🏾", + "person_playing_water_polo_medium-light_skin_tone": "🤽🏼", + "person_playing_water_polo_medium_skin_tone": "🤽🏽", + "person_pouting": "🙎", + "person_pouting_dark_skin_tone": "🙎🏿", + "person_pouting_light_skin_tone": "🙎🏻", + "person_pouting_medium-dark_skin_tone": "🙎🏾", + "person_pouting_medium-light_skin_tone": "🙎🏼", + "person_pouting_medium_skin_tone": "🙎🏽", + "person_raising_hand": "🙋", + "person_raising_hand_dark_skin_tone": "🙋🏿", + "person_raising_hand_light_skin_tone": "🙋🏻", + "person_raising_hand_medium-dark_skin_tone": "🙋🏾", + "person_raising_hand_medium-light_skin_tone": "🙋🏼", + "person_raising_hand_medium_skin_tone": "🙋🏽", + "person_rowing_boat": "🚣", + "person_rowing_boat_dark_skin_tone": "🚣🏿", + "person_rowing_boat_light_skin_tone": "🚣🏻", + "person_rowing_boat_medium-dark_skin_tone": "🚣🏾", + "person_rowing_boat_medium-light_skin_tone": "🚣🏼", + "person_rowing_boat_medium_skin_tone": "🚣🏽", + "person_running": "🏃", + "person_running_dark_skin_tone": "🏃🏿", + "person_running_light_skin_tone": "🏃🏻", + "person_running_medium-dark_skin_tone": "🏃🏾", + "person_running_medium-light_skin_tone": "🏃🏼", + "person_running_medium_skin_tone": "🏃🏽", + "person_shrugging": "🤷", + "person_shrugging_dark_skin_tone": "🤷🏿", + "person_shrugging_light_skin_tone": "🤷🏻", + "person_shrugging_medium-dark_skin_tone": "🤷🏾", + "person_shrugging_medium-light_skin_tone": "🤷🏼", + "person_shrugging_medium_skin_tone": "🤷🏽", + "person_standing": "🧍", + "person_surfing": "🏄", + "person_surfing_dark_skin_tone": "🏄🏿", + "person_surfing_light_skin_tone": "🏄🏻", + "person_surfing_medium-dark_skin_tone": "🏄🏾", + "person_surfing_medium-light_skin_tone": "🏄🏼", + "person_surfing_medium_skin_tone": "🏄🏽", + "person_swimming": "🏊", + "person_swimming_dark_skin_tone": "🏊🏿", + "person_swimming_light_skin_tone": "🏊🏻", + "person_swimming_medium-dark_skin_tone": "🏊🏾", + "person_swimming_medium-light_skin_tone": "🏊🏼", + "person_swimming_medium_skin_tone": "🏊🏽", + "person_taking_bath": "🛀", + "person_taking_bath_dark_skin_tone": "🛀🏿", + "person_taking_bath_light_skin_tone": "🛀🏻", + "person_taking_bath_medium-dark_skin_tone": "🛀🏾", + "person_taking_bath_medium-light_skin_tone": "🛀🏼", + "person_taking_bath_medium_skin_tone": "🛀🏽", + "person_tipping_hand": "💁", + "person_tipping_hand_dark_skin_tone": "💁🏿", + "person_tipping_hand_light_skin_tone": "💁🏻", + "person_tipping_hand_medium-dark_skin_tone": "💁🏾", + "person_tipping_hand_medium-light_skin_tone": "💁🏼", + "person_tipping_hand_medium_skin_tone": "💁🏽", + "person_walking": "🚶", + "person_walking_dark_skin_tone": "🚶🏿", + "person_walking_light_skin_tone": "🚶🏻", + "person_walking_medium-dark_skin_tone": "🚶🏾", + "person_walking_medium-light_skin_tone": "🚶🏼", + "person_walking_medium_skin_tone": "🚶🏽", + "person_wearing_turban": "👳", + "person_wearing_turban_dark_skin_tone": "👳🏿", + "person_wearing_turban_light_skin_tone": "👳🏻", + "person_wearing_turban_medium-dark_skin_tone": "👳🏾", + "person_wearing_turban_medium-light_skin_tone": "👳🏼", + "person_wearing_turban_medium_skin_tone": "👳🏽", + "petri_dish": "🧫", + "pick": "⛏", + "pie": "🥧", + "pig": "🐷", + "pig_face": "🐷", + "pig_nose": "🐽", + "pile_of_poo": "💩", + "pill": "💊", + "pinching_hand": "🤏", + "pine_decoration": "🎍", + "pineapple": "🍍", + "ping_pong": "🏓", + "pirate_flag": "🏴\u200d☠️", + "pistol": "🔫", + "pizza": "🍕", + "place_of_worship": "🛐", + "play_button": "▶", + "play_or_pause_button": "⏯", + "pleading_face": "🥺", + "police_car": "🚓", + "police_car_light": "🚨", + "police_officer": "👮", + "police_officer_dark_skin_tone": "👮🏿", + "police_officer_light_skin_tone": "👮🏻", + "police_officer_medium-dark_skin_tone": "👮🏾", + "police_officer_medium-light_skin_tone": "👮🏼", + "police_officer_medium_skin_tone": "👮🏽", + "poodle": "🐩", + "pool_8_ball": "🎱", + "popcorn": "🍿", + "post_office": "🏣", + "postal_horn": "📯", + "postbox": "📮", + "pot_of_food": "🍲", + "potable_water": "🚰", + "potato": "🥔", + "poultry_leg": "🍗", + "pound_banknote": "💷", + "pouting_cat_face": "😾", + "pouting_face": "😡", + "prayer_beads": "📿", + "pregnant_woman": "🤰", + "pregnant_woman_dark_skin_tone": "🤰🏿", + "pregnant_woman_light_skin_tone": "🤰🏻", + "pregnant_woman_medium-dark_skin_tone": "🤰🏾", + "pregnant_woman_medium-light_skin_tone": "🤰🏼", + "pregnant_woman_medium_skin_tone": "🤰🏽", + "pretzel": "🥨", + "probing_cane": "🦯", + "prince": "🤴", + "prince_dark_skin_tone": "🤴🏿", + "prince_light_skin_tone": "🤴🏻", + "prince_medium-dark_skin_tone": "🤴🏾", + "prince_medium-light_skin_tone": "🤴🏼", + "prince_medium_skin_tone": "🤴🏽", + "princess": "👸", + "princess_dark_skin_tone": "👸🏿", + "princess_light_skin_tone": "👸🏻", + "princess_medium-dark_skin_tone": "👸🏾", + "princess_medium-light_skin_tone": "👸🏼", + "princess_medium_skin_tone": "👸🏽", + "printer": "🖨", + "prohibited": "🚫", + "purple_circle": "🟣", + "purple_heart": "💜", + "purple_square": "🟪", + "purse": "👛", + "pushpin": "📌", + "question_mark": "❓", + "rabbit": "🐰", + "rabbit_face": "🐰", + "raccoon": "🦝", + "racing_car": "🏎", + "radio": "📻", + "radio_button": "🔘", + "radioactive": "☢", + "railway_car": "🚃", + "railway_track": "🛤", + "rainbow": "🌈", + "rainbow_flag": "🏳️\u200d🌈", + "raised_back_of_hand": "🤚", + "raised_back_of_hand_dark_skin_tone": "🤚🏿", + "raised_back_of_hand_light_skin_tone": "🤚🏻", + "raised_back_of_hand_medium-dark_skin_tone": "🤚🏾", + "raised_back_of_hand_medium-light_skin_tone": "🤚🏼", + "raised_back_of_hand_medium_skin_tone": "🤚🏽", + "raised_fist": "✊", + "raised_fist_dark_skin_tone": "✊🏿", + "raised_fist_light_skin_tone": "✊🏻", + "raised_fist_medium-dark_skin_tone": "✊🏾", + "raised_fist_medium-light_skin_tone": "✊🏼", + "raised_fist_medium_skin_tone": "✊🏽", + "raised_hand": "✋", + "raised_hand_dark_skin_tone": "✋🏿", + "raised_hand_light_skin_tone": "✋🏻", + "raised_hand_medium-dark_skin_tone": "✋🏾", + "raised_hand_medium-light_skin_tone": "✋🏼", + "raised_hand_medium_skin_tone": "✋🏽", + "raising_hands": "🙌", + "raising_hands_dark_skin_tone": "🙌🏿", + "raising_hands_light_skin_tone": "🙌🏻", + "raising_hands_medium-dark_skin_tone": "🙌🏾", + "raising_hands_medium-light_skin_tone": "🙌🏼", + "raising_hands_medium_skin_tone": "🙌🏽", + "ram": "🐏", + "rat": "🐀", + "razor": "🪒", + "ringed_planet": "🪐", + "receipt": "🧾", + "record_button": "⏺", + "recycling_symbol": "♻", + "red_apple": "🍎", + "red_circle": "🔴", + "red_envelope": "🧧", + "red_hair": "🦰", + "red-haired_man": "👨\u200d🦰", + "red-haired_woman": "👩\u200d🦰", + "red_heart": "❤", + "red_paper_lantern": "🏮", + "red_square": "🟥", + "red_triangle_pointed_down": "🔻", + "red_triangle_pointed_up": "🔺", + "registered": "®", + "relieved_face": "😌", + "reminder_ribbon": "🎗", + "repeat_button": "🔁", + "repeat_single_button": "🔂", + "rescue_worker’s_helmet": "⛑", + "restroom": "🚻", + "reverse_button": "◀", + "revolving_hearts": "💞", + "rhinoceros": "🦏", + "ribbon": "🎀", + "rice_ball": "🍙", + "rice_cracker": "🍘", + "right-facing_fist": "🤜", + "right-facing_fist_dark_skin_tone": "🤜🏿", + "right-facing_fist_light_skin_tone": "🤜🏻", + "right-facing_fist_medium-dark_skin_tone": "🤜🏾", + "right-facing_fist_medium-light_skin_tone": "🤜🏼", + "right-facing_fist_medium_skin_tone": "🤜🏽", + "right_anger_bubble": "🗯", + "right_arrow": "➡", + "right_arrow_curving_down": "⤵", + "right_arrow_curving_left": "↩", + "right_arrow_curving_up": "⤴", + "ring": "💍", + "roasted_sweet_potato": "🍠", + "robot_face": "🤖", + "rocket": "🚀", + "roll_of_paper": "🧻", + "rolled-up_newspaper": "🗞", + "roller_coaster": "🎢", + "rolling_on_the_floor_laughing": "🤣", + "rooster": "🐓", + "rose": "🌹", + "rosette": "🏵", + "round_pushpin": "📍", + "rugby_football": "🏉", + "running_shirt": "🎽", + "running_shoe": "👟", + "sad_but_relieved_face": "😥", + "safety_pin": "🧷", + "safety_vest": "🦺", + "salt": "🧂", + "sailboat": "⛵", + "sake": "🍶", + "sandwich": "🥪", + "sari": "🥻", + "satellite": "📡", + "satellite_antenna": "📡", + "sauropod": "🦕", + "saxophone": "🎷", + "scarf": "🧣", + "school": "🏫", + "school_backpack": "🎒", + "scissors": "✂", + "scorpion": "🦂", + "scroll": "📜", + "seat": "💺", + "see-no-evil_monkey": "🙈", + "seedling": "🌱", + "selfie": "🤳", + "selfie_dark_skin_tone": "🤳🏿", + "selfie_light_skin_tone": "🤳🏻", + "selfie_medium-dark_skin_tone": "🤳🏾", + "selfie_medium-light_skin_tone": "🤳🏼", + "selfie_medium_skin_tone": "🤳🏽", + "service_dog": "🐕\u200d🦺", + "seven-thirty": "🕢", + "seven_o’clock": "🕖", + "shallow_pan_of_food": "🥘", + "shamrock": "☘", + "shark": "🦈", + "shaved_ice": "🍧", + "sheaf_of_rice": "🌾", + "shield": "🛡", + "shinto_shrine": "⛩", + "ship": "🚢", + "shooting_star": "🌠", + "shopping_bags": "🛍", + "shopping_cart": "🛒", + "shortcake": "🍰", + "shorts": "🩳", + "shower": "🚿", + "shrimp": "🦐", + "shuffle_tracks_button": "🔀", + "shushing_face": "🤫", + "sign_of_the_horns": "🤘", + "sign_of_the_horns_dark_skin_tone": "🤘🏿", + "sign_of_the_horns_light_skin_tone": "🤘🏻", + "sign_of_the_horns_medium-dark_skin_tone": "🤘🏾", + "sign_of_the_horns_medium-light_skin_tone": "🤘🏼", + "sign_of_the_horns_medium_skin_tone": "🤘🏽", + "six-thirty": "🕡", + "six_o’clock": "🕕", + "skateboard": "🛹", + "skier": "⛷", + "skis": "🎿", + "skull": "💀", + "skull_and_crossbones": "☠", + "skunk": "🦨", + "sled": "🛷", + "sleeping_face": "😴", + "sleepy_face": "😪", + "slightly_frowning_face": "🙁", + "slightly_smiling_face": "🙂", + "slot_machine": "🎰", + "sloth": "🦥", + "small_airplane": "🛩", + "small_blue_diamond": "🔹", + "small_orange_diamond": "🔸", + "smiling_cat_face_with_heart-eyes": "😻", + "smiling_face": "☺", + "smiling_face_with_halo": "😇", + "smiling_face_with_3_hearts": "🥰", + "smiling_face_with_heart-eyes": "😍", + "smiling_face_with_horns": "😈", + "smiling_face_with_smiling_eyes": "😊", + "smiling_face_with_sunglasses": "😎", + "smirking_face": "😏", + "snail": "🐌", + "snake": "🐍", + "sneezing_face": "🤧", + "snow-capped_mountain": "🏔", + "snowboarder": "🏂", + "snowboarder_dark_skin_tone": "🏂🏿", + "snowboarder_light_skin_tone": "🏂🏻", + "snowboarder_medium-dark_skin_tone": "🏂🏾", + "snowboarder_medium-light_skin_tone": "🏂🏼", + "snowboarder_medium_skin_tone": "🏂🏽", + "snowflake": "❄", + "snowman": "☃", + "snowman_without_snow": "⛄", + "soap": "🧼", + "soccer_ball": "⚽", + "socks": "🧦", + "softball": "🥎", + "soft_ice_cream": "🍦", + "spade_suit": "♠", + "spaghetti": "🍝", + "sparkle": "❇", + "sparkler": "🎇", + "sparkles": "✨", + "sparkling_heart": "💖", + "speak-no-evil_monkey": "🙊", + "speaker_high_volume": "🔊", + "speaker_low_volume": "🔈", + "speaker_medium_volume": "🔉", + "speaking_head": "🗣", + "speech_balloon": "💬", + "speedboat": "🚤", + "spider": "🕷", + "spider_web": "🕸", + "spiral_calendar": "🗓", + "spiral_notepad": "🗒", + "spiral_shell": "🐚", + "spoon": "🥄", + "sponge": "🧽", + "sport_utility_vehicle": "🚙", + "sports_medal": "🏅", + "spouting_whale": "🐳", + "squid": "🦑", + "squinting_face_with_tongue": "😝", + "stadium": "🏟", + "star-struck": "🤩", + "star_and_crescent": "☪", + "star_of_david": "✡", + "station": "🚉", + "steaming_bowl": "🍜", + "stethoscope": "🩺", + "stop_button": "⏹", + "stop_sign": "🛑", + "stopwatch": "⏱", + "straight_ruler": "📏", + "strawberry": "🍓", + "studio_microphone": "🎙", + "stuffed_flatbread": "🥙", + "sun": "☀", + "sun_behind_cloud": "⛅", + "sun_behind_large_cloud": "🌥", + "sun_behind_rain_cloud": "🌦", + "sun_behind_small_cloud": "🌤", + "sun_with_face": "🌞", + "sunflower": "🌻", + "sunglasses": "😎", + "sunrise": "🌅", + "sunrise_over_mountains": "🌄", + "sunset": "🌇", + "superhero": "🦸", + "supervillain": "🦹", + "sushi": "🍣", + "suspension_railway": "🚟", + "swan": "🦢", + "sweat_droplets": "💦", + "synagogue": "🕍", + "syringe": "💉", + "t-shirt": "👕", + "taco": "🌮", + "takeout_box": "🥡", + "tanabata_tree": "🎋", + "tangerine": "🍊", + "taxi": "🚕", + "teacup_without_handle": "🍵", + "tear-off_calendar": "📆", + "teddy_bear": "🧸", + "telephone": "☎", + "telephone_receiver": "📞", + "telescope": "🔭", + "television": "📺", + "ten-thirty": "🕥", + "ten_o’clock": "🕙", + "tennis": "🎾", + "tent": "⛺", + "test_tube": "🧪", + "thermometer": "🌡", + "thinking_face": "🤔", + "thought_balloon": "💭", + "thread": "🧵", + "three-thirty": "🕞", + "three_o’clock": "🕒", + "thumbs_down": "👎", + "thumbs_down_dark_skin_tone": "👎🏿", + "thumbs_down_light_skin_tone": "👎🏻", + "thumbs_down_medium-dark_skin_tone": "👎🏾", + "thumbs_down_medium-light_skin_tone": "👎🏼", + "thumbs_down_medium_skin_tone": "👎🏽", + "thumbs_up": "👍", + "thumbs_up_dark_skin_tone": "👍🏿", + "thumbs_up_light_skin_tone": "👍🏻", + "thumbs_up_medium-dark_skin_tone": "👍🏾", + "thumbs_up_medium-light_skin_tone": "👍🏼", + "thumbs_up_medium_skin_tone": "👍🏽", + "ticket": "🎫", + "tiger": "🐯", + "tiger_face": "🐯", + "timer_clock": "⏲", + "tired_face": "😫", + "toolbox": "🧰", + "toilet": "🚽", + "tomato": "🍅", + "tongue": "👅", + "tooth": "🦷", + "top_hat": "🎩", + "tornado": "🌪", + "trackball": "🖲", + "tractor": "🚜", + "trade_mark": "™", + "train": "🚋", + "tram": "🚊", + "tram_car": "🚋", + "triangular_flag": "🚩", + "triangular_ruler": "📐", + "trident_emblem": "🔱", + "trolleybus": "🚎", + "trophy": "🏆", + "tropical_drink": "🍹", + "tropical_fish": "🐠", + "trumpet": "🎺", + "tulip": "🌷", + "tumbler_glass": "🥃", + "turtle": "🐢", + "twelve-thirty": "🕧", + "twelve_o’clock": "🕛", + "two-hump_camel": "🐫", + "two-thirty": "🕝", + "two_hearts": "💕", + "two_men_holding_hands": "👬", + "two_o’clock": "🕑", + "two_women_holding_hands": "👭", + "umbrella": "☂", + "umbrella_on_ground": "⛱", + "umbrella_with_rain_drops": "☔", + "unamused_face": "😒", + "unicorn_face": "🦄", + "unlocked": "🔓", + "up-down_arrow": "↕", + "up-left_arrow": "↖", + "up-right_arrow": "↗", + "up_arrow": "⬆", + "upside-down_face": "🙃", + "upwards_button": "🔼", + "vampire": "🧛", + "vampire_dark_skin_tone": "🧛🏿", + "vampire_light_skin_tone": "🧛🏻", + "vampire_medium-dark_skin_tone": "🧛🏾", + "vampire_medium-light_skin_tone": "🧛🏼", + "vampire_medium_skin_tone": "🧛🏽", + "vertical_traffic_light": "🚦", + "vibration_mode": "📳", + "victory_hand": "✌", + "victory_hand_dark_skin_tone": "✌🏿", + "victory_hand_light_skin_tone": "✌🏻", + "victory_hand_medium-dark_skin_tone": "✌🏾", + "victory_hand_medium-light_skin_tone": "✌🏼", + "victory_hand_medium_skin_tone": "✌🏽", + "video_camera": "📹", + "video_game": "🎮", + "videocassette": "📼", + "violin": "🎻", + "volcano": "🌋", + "volleyball": "🏐", + "vulcan_salute": "🖖", + "vulcan_salute_dark_skin_tone": "🖖🏿", + "vulcan_salute_light_skin_tone": "🖖🏻", + "vulcan_salute_medium-dark_skin_tone": "🖖🏾", + "vulcan_salute_medium-light_skin_tone": "🖖🏼", + "vulcan_salute_medium_skin_tone": "🖖🏽", + "waffle": "🧇", + "waning_crescent_moon": "🌘", + "waning_gibbous_moon": "🌖", + "warning": "⚠", + "wastebasket": "🗑", + "watch": "⌚", + "water_buffalo": "🐃", + "water_closet": "🚾", + "water_wave": "🌊", + "watermelon": "🍉", + "waving_hand": "👋", + "waving_hand_dark_skin_tone": "👋🏿", + "waving_hand_light_skin_tone": "👋🏻", + "waving_hand_medium-dark_skin_tone": "👋🏾", + "waving_hand_medium-light_skin_tone": "👋🏼", + "waving_hand_medium_skin_tone": "👋🏽", + "wavy_dash": "〰", + "waxing_crescent_moon": "🌒", + "waxing_gibbous_moon": "🌔", + "weary_cat_face": "🙀", + "weary_face": "😩", + "wedding": "💒", + "whale": "🐳", + "wheel_of_dharma": "☸", + "wheelchair_symbol": "♿", + "white_circle": "⚪", + "white_exclamation_mark": "❕", + "white_flag": "🏳", + "white_flower": "💮", + "white_hair": "🦳", + "white-haired_man": "👨\u200d🦳", + "white-haired_woman": "👩\u200d🦳", + "white_heart": "🤍", + "white_heavy_check_mark": "✅", + "white_large_square": "⬜", + "white_medium-small_square": "◽", + "white_medium_square": "◻", + "white_medium_star": "⭐", + "white_question_mark": "❔", + "white_small_square": "▫", + "white_square_button": "🔳", + "wilted_flower": "🥀", + "wind_chime": "🎐", + "wind_face": "🌬", + "wine_glass": "🍷", + "winking_face": "😉", + "winking_face_with_tongue": "😜", + "wolf_face": "🐺", + "woman": "👩", + "woman_artist": "👩\u200d🎨", + "woman_artist_dark_skin_tone": "👩🏿\u200d🎨", + "woman_artist_light_skin_tone": "👩🏻\u200d🎨", + "woman_artist_medium-dark_skin_tone": "👩🏾\u200d🎨", + "woman_artist_medium-light_skin_tone": "👩🏼\u200d🎨", + "woman_artist_medium_skin_tone": "👩🏽\u200d🎨", + "woman_astronaut": "👩\u200d🚀", + "woman_astronaut_dark_skin_tone": "👩🏿\u200d🚀", + "woman_astronaut_light_skin_tone": "👩🏻\u200d🚀", + "woman_astronaut_medium-dark_skin_tone": "👩🏾\u200d🚀", + "woman_astronaut_medium-light_skin_tone": "👩🏼\u200d🚀", + "woman_astronaut_medium_skin_tone": "👩🏽\u200d🚀", + "woman_biking": "🚴\u200d♀️", + "woman_biking_dark_skin_tone": "🚴🏿\u200d♀️", + "woman_biking_light_skin_tone": "🚴🏻\u200d♀️", + "woman_biking_medium-dark_skin_tone": "🚴🏾\u200d♀️", + "woman_biking_medium-light_skin_tone": "🚴🏼\u200d♀️", + "woman_biking_medium_skin_tone": "🚴🏽\u200d♀️", + "woman_bouncing_ball": "⛹️\u200d♀️", + "woman_bouncing_ball_dark_skin_tone": "⛹🏿\u200d♀️", + "woman_bouncing_ball_light_skin_tone": "⛹🏻\u200d♀️", + "woman_bouncing_ball_medium-dark_skin_tone": "⛹🏾\u200d♀️", + "woman_bouncing_ball_medium-light_skin_tone": "⛹🏼\u200d♀️", + "woman_bouncing_ball_medium_skin_tone": "⛹🏽\u200d♀️", + "woman_bowing": "🙇\u200d♀️", + "woman_bowing_dark_skin_tone": "🙇🏿\u200d♀️", + "woman_bowing_light_skin_tone": "🙇🏻\u200d♀️", + "woman_bowing_medium-dark_skin_tone": "🙇🏾\u200d♀️", + "woman_bowing_medium-light_skin_tone": "🙇🏼\u200d♀️", + "woman_bowing_medium_skin_tone": "🙇🏽\u200d♀️", + "woman_cartwheeling": "🤸\u200d♀️", + "woman_cartwheeling_dark_skin_tone": "🤸🏿\u200d♀️", + "woman_cartwheeling_light_skin_tone": "🤸🏻\u200d♀️", + "woman_cartwheeling_medium-dark_skin_tone": "🤸🏾\u200d♀️", + "woman_cartwheeling_medium-light_skin_tone": "🤸🏼\u200d♀️", + "woman_cartwheeling_medium_skin_tone": "🤸🏽\u200d♀️", + "woman_climbing": "🧗\u200d♀️", + "woman_climbing_dark_skin_tone": "🧗🏿\u200d♀️", + "woman_climbing_light_skin_tone": "🧗🏻\u200d♀️", + "woman_climbing_medium-dark_skin_tone": "🧗🏾\u200d♀️", + "woman_climbing_medium-light_skin_tone": "🧗🏼\u200d♀️", + "woman_climbing_medium_skin_tone": "🧗🏽\u200d♀️", + "woman_construction_worker": "👷\u200d♀️", + "woman_construction_worker_dark_skin_tone": "👷🏿\u200d♀️", + "woman_construction_worker_light_skin_tone": "👷🏻\u200d♀️", + "woman_construction_worker_medium-dark_skin_tone": "👷🏾\u200d♀️", + "woman_construction_worker_medium-light_skin_tone": "👷🏼\u200d♀️", + "woman_construction_worker_medium_skin_tone": "👷🏽\u200d♀️", + "woman_cook": "👩\u200d🍳", + "woman_cook_dark_skin_tone": "👩🏿\u200d🍳", + "woman_cook_light_skin_tone": "👩🏻\u200d🍳", + "woman_cook_medium-dark_skin_tone": "👩🏾\u200d🍳", + "woman_cook_medium-light_skin_tone": "👩🏼\u200d🍳", + "woman_cook_medium_skin_tone": "👩🏽\u200d🍳", + "woman_dancing": "💃", + "woman_dancing_dark_skin_tone": "💃🏿", + "woman_dancing_light_skin_tone": "💃🏻", + "woman_dancing_medium-dark_skin_tone": "💃🏾", + "woman_dancing_medium-light_skin_tone": "💃🏼", + "woman_dancing_medium_skin_tone": "💃🏽", + "woman_dark_skin_tone": "👩🏿", + "woman_detective": "🕵️\u200d♀️", + "woman_detective_dark_skin_tone": "🕵🏿\u200d♀️", + "woman_detective_light_skin_tone": "🕵🏻\u200d♀️", + "woman_detective_medium-dark_skin_tone": "🕵🏾\u200d♀️", + "woman_detective_medium-light_skin_tone": "🕵🏼\u200d♀️", + "woman_detective_medium_skin_tone": "🕵🏽\u200d♀️", + "woman_elf": "🧝\u200d♀️", + "woman_elf_dark_skin_tone": "🧝🏿\u200d♀️", + "woman_elf_light_skin_tone": "🧝🏻\u200d♀️", + "woman_elf_medium-dark_skin_tone": "🧝🏾\u200d♀️", + "woman_elf_medium-light_skin_tone": "🧝🏼\u200d♀️", + "woman_elf_medium_skin_tone": "🧝🏽\u200d♀️", + "woman_facepalming": "🤦\u200d♀️", + "woman_facepalming_dark_skin_tone": "🤦🏿\u200d♀️", + "woman_facepalming_light_skin_tone": "🤦🏻\u200d♀️", + "woman_facepalming_medium-dark_skin_tone": "🤦🏾\u200d♀️", + "woman_facepalming_medium-light_skin_tone": "🤦🏼\u200d♀️", + "woman_facepalming_medium_skin_tone": "🤦🏽\u200d♀️", + "woman_factory_worker": "👩\u200d🏭", + "woman_factory_worker_dark_skin_tone": "👩🏿\u200d🏭", + "woman_factory_worker_light_skin_tone": "👩🏻\u200d🏭", + "woman_factory_worker_medium-dark_skin_tone": "👩🏾\u200d🏭", + "woman_factory_worker_medium-light_skin_tone": "👩🏼\u200d🏭", + "woman_factory_worker_medium_skin_tone": "👩🏽\u200d🏭", + "woman_fairy": "🧚\u200d♀️", + "woman_fairy_dark_skin_tone": "🧚🏿\u200d♀️", + "woman_fairy_light_skin_tone": "🧚🏻\u200d♀️", + "woman_fairy_medium-dark_skin_tone": "🧚🏾\u200d♀️", + "woman_fairy_medium-light_skin_tone": "🧚🏼\u200d♀️", + "woman_fairy_medium_skin_tone": "🧚🏽\u200d♀️", + "woman_farmer": "👩\u200d🌾", + "woman_farmer_dark_skin_tone": "👩🏿\u200d🌾", + "woman_farmer_light_skin_tone": "👩🏻\u200d🌾", + "woman_farmer_medium-dark_skin_tone": "👩🏾\u200d🌾", + "woman_farmer_medium-light_skin_tone": "👩🏼\u200d🌾", + "woman_farmer_medium_skin_tone": "👩🏽\u200d🌾", + "woman_firefighter": "👩\u200d🚒", + "woman_firefighter_dark_skin_tone": "👩🏿\u200d🚒", + "woman_firefighter_light_skin_tone": "👩🏻\u200d🚒", + "woman_firefighter_medium-dark_skin_tone": "👩🏾\u200d🚒", + "woman_firefighter_medium-light_skin_tone": "👩🏼\u200d🚒", + "woman_firefighter_medium_skin_tone": "👩🏽\u200d🚒", + "woman_frowning": "🙍\u200d♀️", + "woman_frowning_dark_skin_tone": "🙍🏿\u200d♀️", + "woman_frowning_light_skin_tone": "🙍🏻\u200d♀️", + "woman_frowning_medium-dark_skin_tone": "🙍🏾\u200d♀️", + "woman_frowning_medium-light_skin_tone": "🙍🏼\u200d♀️", + "woman_frowning_medium_skin_tone": "🙍🏽\u200d♀️", + "woman_genie": "🧞\u200d♀️", + "woman_gesturing_no": "🙅\u200d♀️", + "woman_gesturing_no_dark_skin_tone": "🙅🏿\u200d♀️", + "woman_gesturing_no_light_skin_tone": "🙅🏻\u200d♀️", + "woman_gesturing_no_medium-dark_skin_tone": "🙅🏾\u200d♀️", + "woman_gesturing_no_medium-light_skin_tone": "🙅🏼\u200d♀️", + "woman_gesturing_no_medium_skin_tone": "🙅🏽\u200d♀️", + "woman_gesturing_ok": "🙆\u200d♀️", + "woman_gesturing_ok_dark_skin_tone": "🙆🏿\u200d♀️", + "woman_gesturing_ok_light_skin_tone": "🙆🏻\u200d♀️", + "woman_gesturing_ok_medium-dark_skin_tone": "🙆🏾\u200d♀️", + "woman_gesturing_ok_medium-light_skin_tone": "🙆🏼\u200d♀️", + "woman_gesturing_ok_medium_skin_tone": "🙆🏽\u200d♀️", + "woman_getting_haircut": "💇\u200d♀️", + "woman_getting_haircut_dark_skin_tone": "💇🏿\u200d♀️", + "woman_getting_haircut_light_skin_tone": "💇🏻\u200d♀️", + "woman_getting_haircut_medium-dark_skin_tone": "💇🏾\u200d♀️", + "woman_getting_haircut_medium-light_skin_tone": "💇🏼\u200d♀️", + "woman_getting_haircut_medium_skin_tone": "💇🏽\u200d♀️", + "woman_getting_massage": "💆\u200d♀️", + "woman_getting_massage_dark_skin_tone": "💆🏿\u200d♀️", + "woman_getting_massage_light_skin_tone": "💆🏻\u200d♀️", + "woman_getting_massage_medium-dark_skin_tone": "💆🏾\u200d♀️", + "woman_getting_massage_medium-light_skin_tone": "💆🏼\u200d♀️", + "woman_getting_massage_medium_skin_tone": "💆🏽\u200d♀️", + "woman_golfing": "🏌️\u200d♀️", + "woman_golfing_dark_skin_tone": "🏌🏿\u200d♀️", + "woman_golfing_light_skin_tone": "🏌🏻\u200d♀️", + "woman_golfing_medium-dark_skin_tone": "🏌🏾\u200d♀️", + "woman_golfing_medium-light_skin_tone": "🏌🏼\u200d♀️", + "woman_golfing_medium_skin_tone": "🏌🏽\u200d♀️", + "woman_guard": "💂\u200d♀️", + "woman_guard_dark_skin_tone": "💂🏿\u200d♀️", + "woman_guard_light_skin_tone": "💂🏻\u200d♀️", + "woman_guard_medium-dark_skin_tone": "💂🏾\u200d♀️", + "woman_guard_medium-light_skin_tone": "💂🏼\u200d♀️", + "woman_guard_medium_skin_tone": "💂🏽\u200d♀️", + "woman_health_worker": "👩\u200d⚕️", + "woman_health_worker_dark_skin_tone": "👩🏿\u200d⚕️", + "woman_health_worker_light_skin_tone": "👩🏻\u200d⚕️", + "woman_health_worker_medium-dark_skin_tone": "👩🏾\u200d⚕️", + "woman_health_worker_medium-light_skin_tone": "👩🏼\u200d⚕️", + "woman_health_worker_medium_skin_tone": "👩🏽\u200d⚕️", + "woman_in_lotus_position": "🧘\u200d♀️", + "woman_in_lotus_position_dark_skin_tone": "🧘🏿\u200d♀️", + "woman_in_lotus_position_light_skin_tone": "🧘🏻\u200d♀️", + "woman_in_lotus_position_medium-dark_skin_tone": "🧘🏾\u200d♀️", + "woman_in_lotus_position_medium-light_skin_tone": "🧘🏼\u200d♀️", + "woman_in_lotus_position_medium_skin_tone": "🧘🏽\u200d♀️", + "woman_in_manual_wheelchair": "👩\u200d🦽", + "woman_in_motorized_wheelchair": "👩\u200d🦼", + "woman_in_steamy_room": "🧖\u200d♀️", + "woman_in_steamy_room_dark_skin_tone": "🧖🏿\u200d♀️", + "woman_in_steamy_room_light_skin_tone": "🧖🏻\u200d♀️", + "woman_in_steamy_room_medium-dark_skin_tone": "🧖🏾\u200d♀️", + "woman_in_steamy_room_medium-light_skin_tone": "🧖🏼\u200d♀️", + "woman_in_steamy_room_medium_skin_tone": "🧖🏽\u200d♀️", + "woman_judge": "👩\u200d⚖️", + "woman_judge_dark_skin_tone": "👩🏿\u200d⚖️", + "woman_judge_light_skin_tone": "👩🏻\u200d⚖️", + "woman_judge_medium-dark_skin_tone": "👩🏾\u200d⚖️", + "woman_judge_medium-light_skin_tone": "👩🏼\u200d⚖️", + "woman_judge_medium_skin_tone": "👩🏽\u200d⚖️", + "woman_juggling": "🤹\u200d♀️", + "woman_juggling_dark_skin_tone": "🤹🏿\u200d♀️", + "woman_juggling_light_skin_tone": "🤹🏻\u200d♀️", + "woman_juggling_medium-dark_skin_tone": "🤹🏾\u200d♀️", + "woman_juggling_medium-light_skin_tone": "🤹🏼\u200d♀️", + "woman_juggling_medium_skin_tone": "🤹🏽\u200d♀️", + "woman_lifting_weights": "🏋️\u200d♀️", + "woman_lifting_weights_dark_skin_tone": "🏋🏿\u200d♀️", + "woman_lifting_weights_light_skin_tone": "🏋🏻\u200d♀️", + "woman_lifting_weights_medium-dark_skin_tone": "🏋🏾\u200d♀️", + "woman_lifting_weights_medium-light_skin_tone": "🏋🏼\u200d♀️", + "woman_lifting_weights_medium_skin_tone": "🏋🏽\u200d♀️", + "woman_light_skin_tone": "👩🏻", + "woman_mage": "🧙\u200d♀️", + "woman_mage_dark_skin_tone": "🧙🏿\u200d♀️", + "woman_mage_light_skin_tone": "🧙🏻\u200d♀️", + "woman_mage_medium-dark_skin_tone": "🧙🏾\u200d♀️", + "woman_mage_medium-light_skin_tone": "🧙🏼\u200d♀️", + "woman_mage_medium_skin_tone": "🧙🏽\u200d♀️", + "woman_mechanic": "👩\u200d🔧", + "woman_mechanic_dark_skin_tone": "👩🏿\u200d🔧", + "woman_mechanic_light_skin_tone": "👩🏻\u200d🔧", + "woman_mechanic_medium-dark_skin_tone": "👩🏾\u200d🔧", + "woman_mechanic_medium-light_skin_tone": "👩🏼\u200d🔧", + "woman_mechanic_medium_skin_tone": "👩🏽\u200d🔧", + "woman_medium-dark_skin_tone": "👩🏾", + "woman_medium-light_skin_tone": "👩🏼", + "woman_medium_skin_tone": "👩🏽", + "woman_mountain_biking": "🚵\u200d♀️", + "woman_mountain_biking_dark_skin_tone": "🚵🏿\u200d♀️", + "woman_mountain_biking_light_skin_tone": "🚵🏻\u200d♀️", + "woman_mountain_biking_medium-dark_skin_tone": "🚵🏾\u200d♀️", + "woman_mountain_biking_medium-light_skin_tone": "🚵🏼\u200d♀️", + "woman_mountain_biking_medium_skin_tone": "🚵🏽\u200d♀️", + "woman_office_worker": "👩\u200d💼", + "woman_office_worker_dark_skin_tone": "👩🏿\u200d💼", + "woman_office_worker_light_skin_tone": "👩🏻\u200d💼", + "woman_office_worker_medium-dark_skin_tone": "👩🏾\u200d💼", + "woman_office_worker_medium-light_skin_tone": "👩🏼\u200d💼", + "woman_office_worker_medium_skin_tone": "👩🏽\u200d💼", + "woman_pilot": "👩\u200d✈️", + "woman_pilot_dark_skin_tone": "👩🏿\u200d✈️", + "woman_pilot_light_skin_tone": "👩🏻\u200d✈️", + "woman_pilot_medium-dark_skin_tone": "👩🏾\u200d✈️", + "woman_pilot_medium-light_skin_tone": "👩🏼\u200d✈️", + "woman_pilot_medium_skin_tone": "👩🏽\u200d✈️", + "woman_playing_handball": "🤾\u200d♀️", + "woman_playing_handball_dark_skin_tone": "🤾🏿\u200d♀️", + "woman_playing_handball_light_skin_tone": "🤾🏻\u200d♀️", + "woman_playing_handball_medium-dark_skin_tone": "🤾🏾\u200d♀️", + "woman_playing_handball_medium-light_skin_tone": "🤾🏼\u200d♀️", + "woman_playing_handball_medium_skin_tone": "🤾🏽\u200d♀️", + "woman_playing_water_polo": "🤽\u200d♀️", + "woman_playing_water_polo_dark_skin_tone": "🤽🏿\u200d♀️", + "woman_playing_water_polo_light_skin_tone": "🤽🏻\u200d♀️", + "woman_playing_water_polo_medium-dark_skin_tone": "🤽🏾\u200d♀️", + "woman_playing_water_polo_medium-light_skin_tone": "🤽🏼\u200d♀️", + "woman_playing_water_polo_medium_skin_tone": "🤽🏽\u200d♀️", + "woman_police_officer": "👮\u200d♀️", + "woman_police_officer_dark_skin_tone": "👮🏿\u200d♀️", + "woman_police_officer_light_skin_tone": "👮🏻\u200d♀️", + "woman_police_officer_medium-dark_skin_tone": "👮🏾\u200d♀️", + "woman_police_officer_medium-light_skin_tone": "👮🏼\u200d♀️", + "woman_police_officer_medium_skin_tone": "👮🏽\u200d♀️", + "woman_pouting": "🙎\u200d♀️", + "woman_pouting_dark_skin_tone": "🙎🏿\u200d♀️", + "woman_pouting_light_skin_tone": "🙎🏻\u200d♀️", + "woman_pouting_medium-dark_skin_tone": "🙎🏾\u200d♀️", + "woman_pouting_medium-light_skin_tone": "🙎🏼\u200d♀️", + "woman_pouting_medium_skin_tone": "🙎🏽\u200d♀️", + "woman_raising_hand": "🙋\u200d♀️", + "woman_raising_hand_dark_skin_tone": "🙋🏿\u200d♀️", + "woman_raising_hand_light_skin_tone": "🙋🏻\u200d♀️", + "woman_raising_hand_medium-dark_skin_tone": "🙋🏾\u200d♀️", + "woman_raising_hand_medium-light_skin_tone": "🙋🏼\u200d♀️", + "woman_raising_hand_medium_skin_tone": "🙋🏽\u200d♀️", + "woman_rowing_boat": "🚣\u200d♀️", + "woman_rowing_boat_dark_skin_tone": "🚣🏿\u200d♀️", + "woman_rowing_boat_light_skin_tone": "🚣🏻\u200d♀️", + "woman_rowing_boat_medium-dark_skin_tone": "🚣🏾\u200d♀️", + "woman_rowing_boat_medium-light_skin_tone": "🚣🏼\u200d♀️", + "woman_rowing_boat_medium_skin_tone": "🚣🏽\u200d♀️", + "woman_running": "🏃\u200d♀️", + "woman_running_dark_skin_tone": "🏃🏿\u200d♀️", + "woman_running_light_skin_tone": "🏃🏻\u200d♀️", + "woman_running_medium-dark_skin_tone": "🏃🏾\u200d♀️", + "woman_running_medium-light_skin_tone": "🏃🏼\u200d♀️", + "woman_running_medium_skin_tone": "🏃🏽\u200d♀️", + "woman_scientist": "👩\u200d🔬", + "woman_scientist_dark_skin_tone": "👩🏿\u200d🔬", + "woman_scientist_light_skin_tone": "👩🏻\u200d🔬", + "woman_scientist_medium-dark_skin_tone": "👩🏾\u200d🔬", + "woman_scientist_medium-light_skin_tone": "👩🏼\u200d🔬", + "woman_scientist_medium_skin_tone": "👩🏽\u200d🔬", + "woman_shrugging": "🤷\u200d♀️", + "woman_shrugging_dark_skin_tone": "🤷🏿\u200d♀️", + "woman_shrugging_light_skin_tone": "🤷🏻\u200d♀️", + "woman_shrugging_medium-dark_skin_tone": "🤷🏾\u200d♀️", + "woman_shrugging_medium-light_skin_tone": "🤷🏼\u200d♀️", + "woman_shrugging_medium_skin_tone": "🤷🏽\u200d♀️", + "woman_singer": "👩\u200d🎤", + "woman_singer_dark_skin_tone": "👩🏿\u200d🎤", + "woman_singer_light_skin_tone": "👩🏻\u200d🎤", + "woman_singer_medium-dark_skin_tone": "👩🏾\u200d🎤", + "woman_singer_medium-light_skin_tone": "👩🏼\u200d🎤", + "woman_singer_medium_skin_tone": "👩🏽\u200d🎤", + "woman_student": "👩\u200d🎓", + "woman_student_dark_skin_tone": "👩🏿\u200d🎓", + "woman_student_light_skin_tone": "👩🏻\u200d🎓", + "woman_student_medium-dark_skin_tone": "👩🏾\u200d🎓", + "woman_student_medium-light_skin_tone": "👩🏼\u200d🎓", + "woman_student_medium_skin_tone": "👩🏽\u200d🎓", + "woman_surfing": "🏄\u200d♀️", + "woman_surfing_dark_skin_tone": "🏄🏿\u200d♀️", + "woman_surfing_light_skin_tone": "🏄🏻\u200d♀️", + "woman_surfing_medium-dark_skin_tone": "🏄🏾\u200d♀️", + "woman_surfing_medium-light_skin_tone": "🏄🏼\u200d♀️", + "woman_surfing_medium_skin_tone": "🏄🏽\u200d♀️", + "woman_swimming": "🏊\u200d♀️", + "woman_swimming_dark_skin_tone": "🏊🏿\u200d♀️", + "woman_swimming_light_skin_tone": "🏊🏻\u200d♀️", + "woman_swimming_medium-dark_skin_tone": "🏊🏾\u200d♀️", + "woman_swimming_medium-light_skin_tone": "🏊🏼\u200d♀️", + "woman_swimming_medium_skin_tone": "🏊🏽\u200d♀️", + "woman_teacher": "👩\u200d🏫", + "woman_teacher_dark_skin_tone": "👩🏿\u200d🏫", + "woman_teacher_light_skin_tone": "👩🏻\u200d🏫", + "woman_teacher_medium-dark_skin_tone": "👩🏾\u200d🏫", + "woman_teacher_medium-light_skin_tone": "👩🏼\u200d🏫", + "woman_teacher_medium_skin_tone": "👩🏽\u200d🏫", + "woman_technologist": "👩\u200d💻", + "woman_technologist_dark_skin_tone": "👩🏿\u200d💻", + "woman_technologist_light_skin_tone": "👩🏻\u200d💻", + "woman_technologist_medium-dark_skin_tone": "👩🏾\u200d💻", + "woman_technologist_medium-light_skin_tone": "👩🏼\u200d💻", + "woman_technologist_medium_skin_tone": "👩🏽\u200d💻", + "woman_tipping_hand": "💁\u200d♀️", + "woman_tipping_hand_dark_skin_tone": "💁🏿\u200d♀️", + "woman_tipping_hand_light_skin_tone": "💁🏻\u200d♀️", + "woman_tipping_hand_medium-dark_skin_tone": "💁🏾\u200d♀️", + "woman_tipping_hand_medium-light_skin_tone": "💁🏼\u200d♀️", + "woman_tipping_hand_medium_skin_tone": "💁🏽\u200d♀️", + "woman_vampire": "🧛\u200d♀️", + "woman_vampire_dark_skin_tone": "🧛🏿\u200d♀️", + "woman_vampire_light_skin_tone": "🧛🏻\u200d♀️", + "woman_vampire_medium-dark_skin_tone": "🧛🏾\u200d♀️", + "woman_vampire_medium-light_skin_tone": "🧛🏼\u200d♀️", + "woman_vampire_medium_skin_tone": "🧛🏽\u200d♀️", + "woman_walking": "🚶\u200d♀️", + "woman_walking_dark_skin_tone": "🚶🏿\u200d♀️", + "woman_walking_light_skin_tone": "🚶🏻\u200d♀️", + "woman_walking_medium-dark_skin_tone": "🚶🏾\u200d♀️", + "woman_walking_medium-light_skin_tone": "🚶🏼\u200d♀️", + "woman_walking_medium_skin_tone": "🚶🏽\u200d♀️", + "woman_wearing_turban": "👳\u200d♀️", + "woman_wearing_turban_dark_skin_tone": "👳🏿\u200d♀️", + "woman_wearing_turban_light_skin_tone": "👳🏻\u200d♀️", + "woman_wearing_turban_medium-dark_skin_tone": "👳🏾\u200d♀️", + "woman_wearing_turban_medium-light_skin_tone": "👳🏼\u200d♀️", + "woman_wearing_turban_medium_skin_tone": "👳🏽\u200d♀️", + "woman_with_headscarf": "🧕", + "woman_with_headscarf_dark_skin_tone": "🧕🏿", + "woman_with_headscarf_light_skin_tone": "🧕🏻", + "woman_with_headscarf_medium-dark_skin_tone": "🧕🏾", + "woman_with_headscarf_medium-light_skin_tone": "🧕🏼", + "woman_with_headscarf_medium_skin_tone": "🧕🏽", + "woman_with_probing_cane": "👩\u200d🦯", + "woman_zombie": "🧟\u200d♀️", + "woman’s_boot": "👢", + "woman’s_clothes": "👚", + "woman’s_hat": "👒", + "woman’s_sandal": "👡", + "women_with_bunny_ears": "👯\u200d♀️", + "women_wrestling": "🤼\u200d♀️", + "women’s_room": "🚺", + "woozy_face": "🥴", + "world_map": "🗺", + "worried_face": "😟", + "wrapped_gift": "🎁", + "wrench": "🔧", + "writing_hand": "✍", + "writing_hand_dark_skin_tone": "✍🏿", + "writing_hand_light_skin_tone": "✍🏻", + "writing_hand_medium-dark_skin_tone": "✍🏾", + "writing_hand_medium-light_skin_tone": "✍🏼", + "writing_hand_medium_skin_tone": "✍🏽", + "yarn": "🧶", + "yawning_face": "🥱", + "yellow_circle": "🟡", + "yellow_heart": "💛", + "yellow_square": "🟨", + "yen_banknote": "💴", + "yo-yo": "🪀", + "yin_yang": "☯", + "zany_face": "🤪", + "zebra": "🦓", + "zipper-mouth_face": "🤐", + "zombie": "🧟", + "zzz": "💤", + "åland_islands": "🇦🇽", + "keycap_asterisk": "*⃣", + "keycap_digit_eight": "8⃣", + "keycap_digit_five": "5⃣", + "keycap_digit_four": "4⃣", + "keycap_digit_nine": "9⃣", + "keycap_digit_one": "1⃣", + "keycap_digit_seven": "7⃣", + "keycap_digit_six": "6⃣", + "keycap_digit_three": "3⃣", + "keycap_digit_two": "2⃣", + "keycap_digit_zero": "0⃣", + "keycap_number_sign": "#⃣", + "light_skin_tone": "🏻", + "medium_light_skin_tone": "🏼", + "medium_skin_tone": "🏽", + "medium_dark_skin_tone": "🏾", + "dark_skin_tone": "🏿", + "regional_indicator_symbol_letter_a": "🇦", + "regional_indicator_symbol_letter_b": "🇧", + "regional_indicator_symbol_letter_c": "🇨", + "regional_indicator_symbol_letter_d": "🇩", + "regional_indicator_symbol_letter_e": "🇪", + "regional_indicator_symbol_letter_f": "🇫", + "regional_indicator_symbol_letter_g": "🇬", + "regional_indicator_symbol_letter_h": "🇭", + "regional_indicator_symbol_letter_i": "🇮", + "regional_indicator_symbol_letter_j": "🇯", + "regional_indicator_symbol_letter_k": "🇰", + "regional_indicator_symbol_letter_l": "🇱", + "regional_indicator_symbol_letter_m": "🇲", + "regional_indicator_symbol_letter_n": "🇳", + "regional_indicator_symbol_letter_o": "🇴", + "regional_indicator_symbol_letter_p": "🇵", + "regional_indicator_symbol_letter_q": "🇶", + "regional_indicator_symbol_letter_r": "🇷", + "regional_indicator_symbol_letter_s": "🇸", + "regional_indicator_symbol_letter_t": "🇹", + "regional_indicator_symbol_letter_u": "🇺", + "regional_indicator_symbol_letter_v": "🇻", + "regional_indicator_symbol_letter_w": "🇼", + "regional_indicator_symbol_letter_x": "🇽", + "regional_indicator_symbol_letter_y": "🇾", + "regional_indicator_symbol_letter_z": "🇿", + "airplane_arriving": "🛬", + "space_invader": "👾", + "football": "🏈", + "anger": "💢", + "angry": "😠", + "anguished": "😧", + "signal_strength": "📶", + "arrows_counterclockwise": "🔄", + "arrow_heading_down": "⤵", + "arrow_heading_up": "⤴", + "art": "🎨", + "astonished": "😲", + "athletic_shoe": "👟", + "atm": "🏧", + "car": "🚗", + "red_car": "🚗", + "angel": "👼", + "back": "🔙", + "badminton_racquet_and_shuttlecock": "🏸", + "dollar": "💵", + "euro": "💶", + "pound": "💷", + "yen": "💴", + "barber": "💈", + "bath": "🛀", + "bear": "🐻", + "heartbeat": "💓", + "beer": "🍺", + "no_bell": "🔕", + "bento": "🍱", + "bike": "🚲", + "bicyclist": "🚴", + "8ball": "🎱", + "biohazard_sign": "☣", + "birthday": "🎂", + "black_circle_for_record": "⏺", + "clubs": "♣", + "diamonds": "♦", + "arrow_double_down": "⏬", + "hearts": "♥", + "rewind": "⏪", + "black_left__pointing_double_triangle_with_vertical_bar": "⏮", + "arrow_backward": "◀", + "black_medium_small_square": "◾", + "question": "❓", + "fast_forward": "⏩", + "black_right__pointing_double_triangle_with_vertical_bar": "⏭", + "arrow_forward": "▶", + "black_right__pointing_triangle_with_double_vertical_bar": "⏯", + "arrow_right": "➡", + "spades": "♠", + "black_square_for_stop": "⏹", + "sunny": "☀", + "phone": "☎", + "recycle": "♻", + "arrow_double_up": "⏫", + "busstop": "🚏", + "date": "📅", + "flags": "🎏", + "cat2": "🐈", + "joy_cat": "😹", + "smirk_cat": "😼", + "chart_with_downwards_trend": "📉", + "chart_with_upwards_trend": "📈", + "chart": "💹", + "mega": "📣", + "checkered_flag": "🏁", + "accept": "🉑", + "ideograph_advantage": "🉐", + "congratulations": "㊗", + "secret": "㊙", + "m": "Ⓜ", + "city_sunset": "🌆", + "clapper": "🎬", + "clap": "👏", + "beers": "🍻", + "clock830": "🕣", + "clock8": "🕗", + "clock1130": "🕦", + "clock11": "🕚", + "clock530": "🕠", + "clock5": "🕔", + "clock430": "🕟", + "clock4": "🕓", + "clock930": "🕤", + "clock9": "🕘", + "clock130": "🕜", + "clock1": "🕐", + "clock730": "🕢", + "clock7": "🕖", + "clock630": "🕡", + "clock6": "🕕", + "clock1030": "🕥", + "clock10": "🕙", + "clock330": "🕞", + "clock3": "🕒", + "clock1230": "🕧", + "clock12": "🕛", + "clock230": "🕝", + "clock2": "🕑", + "arrows_clockwise": "🔃", + "repeat": "🔁", + "repeat_one": "🔂", + "closed_lock_with_key": "🔐", + "mailbox_closed": "📪", + "mailbox": "📫", + "cloud_with_tornado": "🌪", + "cocktail": "🍸", + "boom": "💥", + "compression": "🗜", + "confounded": "😖", + "confused": "😕", + "rice": "🍚", + "cow2": "🐄", + "cricket_bat_and_ball": "🏏", + "x": "❌", + "cry": "😢", + "curry": "🍛", + "dagger_knife": "🗡", + "dancer": "💃", + "dark_sunglasses": "🕶", + "dash": "💨", + "truck": "🚚", + "derelict_house_building": "🏚", + "diamond_shape_with_a_dot_inside": "💠", + "dart": "🎯", + "disappointed_relieved": "😥", + "disappointed": "😞", + "do_not_litter": "🚯", + "dog2": "🐕", + "flipper": "🐬", + "loop": "➿", + "bangbang": "‼", + "double_vertical_bar": "⏸", + "dove_of_peace": "🕊", + "small_red_triangle_down": "🔻", + "arrow_down_small": "🔽", + "arrow_down": "⬇", + "dromedary_camel": "🐪", + "e__mail": "📧", + "corn": "🌽", + "ear_of_rice": "🌾", + "earth_americas": "🌎", + "earth_asia": "🌏", + "earth_africa": "🌍", + "eight_pointed_black_star": "✴", + "eight_spoked_asterisk": "✳", + "eject_symbol": "⏏", + "bulb": "💡", + "emoji_modifier_fitzpatrick_type__1__2": "🏻", + "emoji_modifier_fitzpatrick_type__3": "🏼", + "emoji_modifier_fitzpatrick_type__4": "🏽", + "emoji_modifier_fitzpatrick_type__5": "🏾", + "emoji_modifier_fitzpatrick_type__6": "🏿", + "end": "🔚", + "email": "✉", + "european_castle": "🏰", + "european_post_office": "🏤", + "interrobang": "⁉", + "expressionless": "😑", + "eyeglasses": "👓", + "massage": "💆", + "yum": "😋", + "scream": "😱", + "kissing_heart": "😘", + "sweat": "😓", + "face_with_head__bandage": "🤕", + "triumph": "😤", + "mask": "😷", + "no_good": "🙅", + "ok_woman": "🙆", + "open_mouth": "😮", + "cold_sweat": "😰", + "stuck_out_tongue": "😛", + "stuck_out_tongue_closed_eyes": "😝", + "stuck_out_tongue_winking_eye": "😜", + "joy": "😂", + "no_mouth": "😶", + "santa": "🎅", + "fax": "📠", + "fearful": "😨", + "field_hockey_stick_and_ball": "🏑", + "first_quarter_moon_with_face": "🌛", + "fish_cake": "🍥", + "fishing_pole_and_fish": "🎣", + "facepunch": "👊", + "punch": "👊", + "flag_for_afghanistan": "🇦🇫", + "flag_for_albania": "🇦🇱", + "flag_for_algeria": "🇩🇿", + "flag_for_american_samoa": "🇦🇸", + "flag_for_andorra": "🇦🇩", + "flag_for_angola": "🇦🇴", + "flag_for_anguilla": "🇦🇮", + "flag_for_antarctica": "🇦🇶", + "flag_for_antigua_&_barbuda": "🇦🇬", + "flag_for_argentina": "🇦🇷", + "flag_for_armenia": "🇦🇲", + "flag_for_aruba": "🇦🇼", + "flag_for_ascension_island": "🇦🇨", + "flag_for_australia": "🇦🇺", + "flag_for_austria": "🇦🇹", + "flag_for_azerbaijan": "🇦🇿", + "flag_for_bahamas": "🇧🇸", + "flag_for_bahrain": "🇧🇭", + "flag_for_bangladesh": "🇧🇩", + "flag_for_barbados": "🇧🇧", + "flag_for_belarus": "🇧🇾", + "flag_for_belgium": "🇧🇪", + "flag_for_belize": "🇧🇿", + "flag_for_benin": "🇧🇯", + "flag_for_bermuda": "🇧🇲", + "flag_for_bhutan": "🇧🇹", + "flag_for_bolivia": "🇧🇴", + "flag_for_bosnia_&_herzegovina": "🇧🇦", + "flag_for_botswana": "🇧🇼", + "flag_for_bouvet_island": "🇧🇻", + "flag_for_brazil": "🇧🇷", + "flag_for_british_indian_ocean_territory": "🇮🇴", + "flag_for_british_virgin_islands": "🇻🇬", + "flag_for_brunei": "🇧🇳", + "flag_for_bulgaria": "🇧🇬", + "flag_for_burkina_faso": "🇧🇫", + "flag_for_burundi": "🇧🇮", + "flag_for_cambodia": "🇰🇭", + "flag_for_cameroon": "🇨🇲", + "flag_for_canada": "🇨🇦", + "flag_for_canary_islands": "🇮🇨", + "flag_for_cape_verde": "🇨🇻", + "flag_for_caribbean_netherlands": "🇧🇶", + "flag_for_cayman_islands": "🇰🇾", + "flag_for_central_african_republic": "🇨🇫", + "flag_for_ceuta_&_melilla": "🇪🇦", + "flag_for_chad": "🇹🇩", + "flag_for_chile": "🇨🇱", + "flag_for_china": "🇨🇳", + "flag_for_christmas_island": "🇨🇽", + "flag_for_clipperton_island": "🇨🇵", + "flag_for_cocos__islands": "🇨🇨", + "flag_for_colombia": "🇨🇴", + "flag_for_comoros": "🇰🇲", + "flag_for_congo____brazzaville": "🇨🇬", + "flag_for_congo____kinshasa": "🇨🇩", + "flag_for_cook_islands": "🇨🇰", + "flag_for_costa_rica": "🇨🇷", + "flag_for_croatia": "🇭🇷", + "flag_for_cuba": "🇨🇺", + "flag_for_curaçao": "🇨🇼", + "flag_for_cyprus": "🇨🇾", + "flag_for_czech_republic": "🇨🇿", + "flag_for_côte_d’ivoire": "🇨🇮", + "flag_for_denmark": "🇩🇰", + "flag_for_diego_garcia": "🇩🇬", + "flag_for_djibouti": "🇩🇯", + "flag_for_dominica": "🇩🇲", + "flag_for_dominican_republic": "🇩🇴", + "flag_for_ecuador": "🇪🇨", + "flag_for_egypt": "🇪🇬", + "flag_for_el_salvador": "🇸🇻", + "flag_for_equatorial_guinea": "🇬🇶", + "flag_for_eritrea": "🇪🇷", + "flag_for_estonia": "🇪🇪", + "flag_for_ethiopia": "🇪🇹", + "flag_for_european_union": "🇪🇺", + "flag_for_falkland_islands": "🇫🇰", + "flag_for_faroe_islands": "🇫🇴", + "flag_for_fiji": "🇫🇯", + "flag_for_finland": "🇫🇮", + "flag_for_france": "🇫🇷", + "flag_for_french_guiana": "🇬🇫", + "flag_for_french_polynesia": "🇵🇫", + "flag_for_french_southern_territories": "🇹🇫", + "flag_for_gabon": "🇬🇦", + "flag_for_gambia": "🇬🇲", + "flag_for_georgia": "🇬🇪", + "flag_for_germany": "🇩🇪", + "flag_for_ghana": "🇬🇭", + "flag_for_gibraltar": "🇬🇮", + "flag_for_greece": "🇬🇷", + "flag_for_greenland": "🇬🇱", + "flag_for_grenada": "🇬🇩", + "flag_for_guadeloupe": "🇬🇵", + "flag_for_guam": "🇬🇺", + "flag_for_guatemala": "🇬🇹", + "flag_for_guernsey": "🇬🇬", + "flag_for_guinea": "🇬🇳", + "flag_for_guinea__bissau": "🇬🇼", + "flag_for_guyana": "🇬🇾", + "flag_for_haiti": "🇭🇹", + "flag_for_heard_&_mcdonald_islands": "🇭🇲", + "flag_for_honduras": "🇭🇳", + "flag_for_hong_kong": "🇭🇰", + "flag_for_hungary": "🇭🇺", + "flag_for_iceland": "🇮🇸", + "flag_for_india": "🇮🇳", + "flag_for_indonesia": "🇮🇩", + "flag_for_iran": "🇮🇷", + "flag_for_iraq": "🇮🇶", + "flag_for_ireland": "🇮🇪", + "flag_for_isle_of_man": "🇮🇲", + "flag_for_israel": "🇮🇱", + "flag_for_italy": "🇮🇹", + "flag_for_jamaica": "🇯🇲", + "flag_for_japan": "🇯🇵", + "flag_for_jersey": "🇯🇪", + "flag_for_jordan": "🇯🇴", + "flag_for_kazakhstan": "🇰🇿", + "flag_for_kenya": "🇰🇪", + "flag_for_kiribati": "🇰🇮", + "flag_for_kosovo": "🇽🇰", + "flag_for_kuwait": "🇰🇼", + "flag_for_kyrgyzstan": "🇰🇬", + "flag_for_laos": "🇱🇦", + "flag_for_latvia": "🇱🇻", + "flag_for_lebanon": "🇱🇧", + "flag_for_lesotho": "🇱🇸", + "flag_for_liberia": "🇱🇷", + "flag_for_libya": "🇱🇾", + "flag_for_liechtenstein": "🇱🇮", + "flag_for_lithuania": "🇱🇹", + "flag_for_luxembourg": "🇱🇺", + "flag_for_macau": "🇲🇴", + "flag_for_macedonia": "🇲🇰", + "flag_for_madagascar": "🇲🇬", + "flag_for_malawi": "🇲🇼", + "flag_for_malaysia": "🇲🇾", + "flag_for_maldives": "🇲🇻", + "flag_for_mali": "🇲🇱", + "flag_for_malta": "🇲🇹", + "flag_for_marshall_islands": "🇲🇭", + "flag_for_martinique": "🇲🇶", + "flag_for_mauritania": "🇲🇷", + "flag_for_mauritius": "🇲🇺", + "flag_for_mayotte": "🇾🇹", + "flag_for_mexico": "🇲🇽", + "flag_for_micronesia": "🇫🇲", + "flag_for_moldova": "🇲🇩", + "flag_for_monaco": "🇲🇨", + "flag_for_mongolia": "🇲🇳", + "flag_for_montenegro": "🇲🇪", + "flag_for_montserrat": "🇲🇸", + "flag_for_morocco": "🇲🇦", + "flag_for_mozambique": "🇲🇿", + "flag_for_myanmar": "🇲🇲", + "flag_for_namibia": "🇳🇦", + "flag_for_nauru": "🇳🇷", + "flag_for_nepal": "🇳🇵", + "flag_for_netherlands": "🇳🇱", + "flag_for_new_caledonia": "🇳🇨", + "flag_for_new_zealand": "🇳🇿", + "flag_for_nicaragua": "🇳🇮", + "flag_for_niger": "🇳🇪", + "flag_for_nigeria": "🇳🇬", + "flag_for_niue": "🇳🇺", + "flag_for_norfolk_island": "🇳🇫", + "flag_for_north_korea": "🇰🇵", + "flag_for_northern_mariana_islands": "🇲🇵", + "flag_for_norway": "🇳🇴", + "flag_for_oman": "🇴🇲", + "flag_for_pakistan": "🇵🇰", + "flag_for_palau": "🇵🇼", + "flag_for_palestinian_territories": "🇵🇸", + "flag_for_panama": "🇵🇦", + "flag_for_papua_new_guinea": "🇵🇬", + "flag_for_paraguay": "🇵🇾", + "flag_for_peru": "🇵🇪", + "flag_for_philippines": "🇵🇭", + "flag_for_pitcairn_islands": "🇵🇳", + "flag_for_poland": "🇵🇱", + "flag_for_portugal": "🇵🇹", + "flag_for_puerto_rico": "🇵🇷", + "flag_for_qatar": "🇶🇦", + "flag_for_romania": "🇷🇴", + "flag_for_russia": "🇷🇺", + "flag_for_rwanda": "🇷🇼", + "flag_for_réunion": "🇷🇪", + "flag_for_samoa": "🇼🇸", + "flag_for_san_marino": "🇸🇲", + "flag_for_saudi_arabia": "🇸🇦", + "flag_for_senegal": "🇸🇳", + "flag_for_serbia": "🇷🇸", + "flag_for_seychelles": "🇸🇨", + "flag_for_sierra_leone": "🇸🇱", + "flag_for_singapore": "🇸🇬", + "flag_for_sint_maarten": "🇸🇽", + "flag_for_slovakia": "🇸🇰", + "flag_for_slovenia": "🇸🇮", + "flag_for_solomon_islands": "🇸🇧", + "flag_for_somalia": "🇸🇴", + "flag_for_south_africa": "🇿🇦", + "flag_for_south_georgia_&_south_sandwich_islands": "🇬🇸", + "flag_for_south_korea": "🇰🇷", + "flag_for_south_sudan": "🇸🇸", + "flag_for_spain": "🇪🇸", + "flag_for_sri_lanka": "🇱🇰", + "flag_for_st._barthélemy": "🇧🇱", + "flag_for_st._helena": "🇸🇭", + "flag_for_st._kitts_&_nevis": "🇰🇳", + "flag_for_st._lucia": "🇱🇨", + "flag_for_st._martin": "🇲🇫", + "flag_for_st._pierre_&_miquelon": "🇵🇲", + "flag_for_st._vincent_&_grenadines": "🇻🇨", + "flag_for_sudan": "🇸🇩", + "flag_for_suriname": "🇸🇷", + "flag_for_svalbard_&_jan_mayen": "🇸🇯", + "flag_for_swaziland": "🇸🇿", + "flag_for_sweden": "🇸🇪", + "flag_for_switzerland": "🇨🇭", + "flag_for_syria": "🇸🇾", + "flag_for_são_tomé_&_príncipe": "🇸🇹", + "flag_for_taiwan": "🇹🇼", + "flag_for_tajikistan": "🇹🇯", + "flag_for_tanzania": "🇹🇿", + "flag_for_thailand": "🇹🇭", + "flag_for_timor__leste": "🇹🇱", + "flag_for_togo": "🇹🇬", + "flag_for_tokelau": "🇹🇰", + "flag_for_tonga": "🇹🇴", + "flag_for_trinidad_&_tobago": "🇹🇹", + "flag_for_tristan_da_cunha": "🇹🇦", + "flag_for_tunisia": "🇹🇳", + "flag_for_turkey": "🇹🇷", + "flag_for_turkmenistan": "🇹🇲", + "flag_for_turks_&_caicos_islands": "🇹🇨", + "flag_for_tuvalu": "🇹🇻", + "flag_for_u.s._outlying_islands": "🇺🇲", + "flag_for_u.s._virgin_islands": "🇻🇮", + "flag_for_uganda": "🇺🇬", + "flag_for_ukraine": "🇺🇦", + "flag_for_united_arab_emirates": "🇦🇪", + "flag_for_united_kingdom": "🇬🇧", + "flag_for_united_states": "🇺🇸", + "flag_for_uruguay": "🇺🇾", + "flag_for_uzbekistan": "🇺🇿", + "flag_for_vanuatu": "🇻🇺", + "flag_for_vatican_city": "🇻🇦", + "flag_for_venezuela": "🇻🇪", + "flag_for_vietnam": "🇻🇳", + "flag_for_wallis_&_futuna": "🇼🇫", + "flag_for_western_sahara": "🇪🇭", + "flag_for_yemen": "🇾🇪", + "flag_for_zambia": "🇿🇲", + "flag_for_zimbabwe": "🇿🇼", + "flag_for_åland_islands": "🇦🇽", + "golf": "⛳", + "fleur__de__lis": "⚜", + "muscle": "💪", + "flushed": "😳", + "frame_with_picture": "🖼", + "fries": "🍟", + "frog": "🐸", + "hatched_chick": "🐥", + "frowning": "😦", + "fuelpump": "⛽", + "full_moon_with_face": "🌝", + "gem": "💎", + "star2": "🌟", + "golfer": "🏌", + "mortar_board": "🎓", + "grimacing": "😬", + "smile_cat": "😸", + "grinning": "😀", + "grin": "😁", + "heartpulse": "💗", + "guardsman": "💂", + "haircut": "💇", + "hamster": "🐹", + "raising_hand": "🙋", + "headphones": "🎧", + "hear_no_evil": "🙉", + "cupid": "💘", + "gift_heart": "💝", + "heart": "❤", + "exclamation": "❗", + "heavy_exclamation_mark": "❗", + "heavy_heart_exclamation_mark_ornament": "❣", + "o": "⭕", + "helm_symbol": "⎈", + "helmet_with_white_cross": "⛑", + "high_heel": "👠", + "bullettrain_side": "🚄", + "bullettrain_front": "🚅", + "high_brightness": "🔆", + "zap": "⚡", + "hocho": "🔪", + "knife": "🔪", + "bee": "🐝", + "traffic_light": "🚥", + "racehorse": "🐎", + "coffee": "☕", + "hotsprings": "♨", + "hourglass": "⌛", + "hourglass_flowing_sand": "⏳", + "house_buildings": "🏘", + "100": "💯", + "hushed": "😯", + "ice_hockey_stick_and_puck": "🏒", + "imp": "👿", + "information_desk_person": "💁", + "information_source": "ℹ", + "capital_abcd": "🔠", + "abc": "🔤", + "abcd": "🔡", + "1234": "🔢", + "symbols": "🔣", + "izakaya_lantern": "🏮", + "lantern": "🏮", + "jack_o_lantern": "🎃", + "dolls": "🎎", + "japanese_goblin": "👺", + "japanese_ogre": "👹", + "beginner": "🔰", + "zero": "0️⃣", + "one": "1️⃣", + "ten": "🔟", + "two": "2️⃣", + "three": "3️⃣", + "four": "4️⃣", + "five": "5️⃣", + "six": "6️⃣", + "seven": "7️⃣", + "eight": "8️⃣", + "nine": "9️⃣", + "couplekiss": "💏", + "kissing_cat": "😽", + "kissing": "😗", + "kissing_closed_eyes": "😚", + "kissing_smiling_eyes": "😙", + "beetle": "🐞", + "large_blue_circle": "🔵", + "last_quarter_moon_with_face": "🌜", + "leaves": "🍃", + "mag": "🔍", + "left_right_arrow": "↔", + "leftwards_arrow_with_hook": "↩", + "arrow_left": "⬅", + "lock": "🔒", + "lock_with_ink_pen": "🔏", + "sob": "😭", + "low_brightness": "🔅", + "lower_left_ballpoint_pen": "🖊", + "lower_left_crayon": "🖍", + "lower_left_fountain_pen": "🖋", + "lower_left_paintbrush": "🖌", + "mahjong": "🀄", + "couple": "👫", + "man_in_business_suit_levitating": "🕴", + "man_with_gua_pi_mao": "👲", + "man_with_turban": "👳", + "mans_shoe": "👞", + "shoe": "👞", + "menorah_with_nine_branches": "🕎", + "mens": "🚹", + "minidisc": "💽", + "iphone": "📱", + "calling": "📲", + "money__mouth_face": "🤑", + "moneybag": "💰", + "rice_scene": "🎑", + "mountain_bicyclist": "🚵", + "mouse2": "🐁", + "lips": "👄", + "moyai": "🗿", + "notes": "🎶", + "nail_care": "💅", + "ab": "🆎", + "negative_squared_cross_mark": "❎", + "a": "🅰", + "b": "🅱", + "o2": "🅾", + "parking": "🅿", + "new_moon_with_face": "🌚", + "no_entry_sign": "🚫", + "underage": "🔞", + "non__potable_water": "🚱", + "arrow_upper_right": "↗", + "arrow_upper_left": "↖", + "office": "🏢", + "older_man": "👴", + "older_woman": "👵", + "om_symbol": "🕉", + "on": "🔛", + "book": "📖", + "unlock": "🔓", + "mailbox_with_no_mail": "📭", + "mailbox_with_mail": "📬", + "cd": "💿", + "tada": "🎉", + "feet": "🐾", + "walking": "🚶", + "pencil2": "✏", + "pensive": "😔", + "persevere": "😣", + "bow": "🙇", + "raised_hands": "🙌", + "person_with_ball": "⛹", + "person_with_blond_hair": "👱", + "pray": "🙏", + "person_with_pouting_face": "🙎", + "computer": "💻", + "pig2": "🐖", + "hankey": "💩", + "poop": "💩", + "shit": "💩", + "bamboo": "🎍", + "gun": "🔫", + "black_joker": "🃏", + "rotating_light": "🚨", + "cop": "👮", + "stew": "🍲", + "pouch": "👝", + "pouting_cat": "😾", + "rage": "😡", + "put_litter_in_its_place": "🚮", + "rabbit2": "🐇", + "racing_motorcycle": "🏍", + "radioactive_sign": "☢", + "fist": "✊", + "hand": "✋", + "raised_hand_with_fingers_splayed": "🖐", + "raised_hand_with_part_between_middle_and_ring_fingers": "🖖", + "blue_car": "🚙", + "apple": "🍎", + "relieved": "😌", + "reversed_hand_with_middle_finger_extended": "🖕", + "mag_right": "🔎", + "arrow_right_hook": "↪", + "sweet_potato": "🍠", + "robot": "🤖", + "rolled__up_newspaper": "🗞", + "rowboat": "🚣", + "runner": "🏃", + "running": "🏃", + "running_shirt_with_sash": "🎽", + "boat": "⛵", + "scales": "⚖", + "school_satchel": "🎒", + "scorpius": "♏", + "see_no_evil": "🙈", + "sheep": "🐑", + "stars": "🌠", + "cake": "🍰", + "six_pointed_star": "🔯", + "ski": "🎿", + "sleeping_accommodation": "🛌", + "sleeping": "😴", + "sleepy": "😪", + "sleuth_or_spy": "🕵", + "heart_eyes_cat": "😻", + "smiley_cat": "😺", + "innocent": "😇", + "heart_eyes": "😍", + "smiling_imp": "😈", + "smiley": "😃", + "sweat_smile": "😅", + "smile": "😄", + "laughing": "😆", + "satisfied": "😆", + "blush": "😊", + "smirk": "😏", + "smoking": "🚬", + "snow_capped_mountain": "🏔", + "soccer": "⚽", + "icecream": "🍦", + "soon": "🔜", + "arrow_lower_right": "↘", + "arrow_lower_left": "↙", + "speak_no_evil": "🙊", + "speaker": "🔈", + "mute": "🔇", + "sound": "🔉", + "loud_sound": "🔊", + "speaking_head_in_silhouette": "🗣", + "spiral_calendar_pad": "🗓", + "spiral_note_pad": "🗒", + "shell": "🐚", + "sweat_drops": "💦", + "u5272": "🈹", + "u5408": "🈴", + "u55b6": "🈺", + "u6307": "🈯", + "u6708": "🈷", + "u6709": "🈶", + "u6e80": "🈵", + "u7121": "🈚", + "u7533": "🈸", + "u7981": "🈲", + "u7a7a": "🈳", + "cl": "🆑", + "cool": "🆒", + "free": "🆓", + "id": "🆔", + "koko": "🈁", + "sa": "🈂", + "new": "🆕", + "ng": "🆖", + "ok": "🆗", + "sos": "🆘", + "up": "🆙", + "vs": "🆚", + "steam_locomotive": "🚂", + "ramen": "🍜", + "partly_sunny": "⛅", + "city_sunrise": "🌇", + "surfer": "🏄", + "swimmer": "🏊", + "shirt": "👕", + "tshirt": "👕", + "table_tennis_paddle_and_ball": "🏓", + "tea": "🍵", + "tv": "📺", + "three_button_mouse": "🖱", + "+1": "👍", + "thumbsup": "👍", + "__1": "👎", + "-1": "👎", + "thumbsdown": "👎", + "thunder_cloud_and_rain": "⛈", + "tiger2": "🐅", + "tophat": "🎩", + "top": "🔝", + "tm": "™", + "train2": "🚆", + "triangular_flag_on_post": "🚩", + "trident": "🔱", + "twisted_rightwards_arrows": "🔀", + "unamused": "😒", + "small_red_triangle": "🔺", + "arrow_up_small": "🔼", + "arrow_up_down": "↕", + "upside__down_face": "🙃", + "arrow_up": "⬆", + "v": "✌", + "vhs": "📼", + "wc": "🚾", + "ocean": "🌊", + "waving_black_flag": "🏴", + "wave": "👋", + "waving_white_flag": "🏳", + "moon": "🌔", + "scream_cat": "🙀", + "weary": "😩", + "weight_lifter": "🏋", + "whale2": "🐋", + "wheelchair": "♿", + "point_down": "👇", + "grey_exclamation": "❕", + "white_frowning_face": "☹", + "white_check_mark": "✅", + "point_left": "👈", + "white_medium_small_square": "◽", + "star": "⭐", + "grey_question": "❔", + "point_right": "👉", + "relaxed": "☺", + "white_sun_behind_cloud": "🌥", + "white_sun_behind_cloud_with_rain": "🌦", + "white_sun_with_small_cloud": "🌤", + "point_up_2": "👆", + "point_up": "☝", + "wind_blowing_face": "🌬", + "wink": "😉", + "wolf": "🐺", + "dancers": "👯", + "boot": "👢", + "womans_clothes": "👚", + "womans_hat": "👒", + "sandal": "👡", + "womens": "🚺", + "worried": "😟", + "gift": "🎁", + "zipper__mouth_face": "🤐", + "regional_indicator_a": "🇦", + "regional_indicator_b": "🇧", + "regional_indicator_c": "🇨", + "regional_indicator_d": "🇩", + "regional_indicator_e": "🇪", + "regional_indicator_f": "🇫", + "regional_indicator_g": "🇬", + "regional_indicator_h": "🇭", + "regional_indicator_i": "🇮", + "regional_indicator_j": "🇯", + "regional_indicator_k": "🇰", + "regional_indicator_l": "🇱", + "regional_indicator_m": "🇲", + "regional_indicator_n": "🇳", + "regional_indicator_o": "🇴", + "regional_indicator_p": "🇵", + "regional_indicator_q": "🇶", + "regional_indicator_r": "🇷", + "regional_indicator_s": "🇸", + "regional_indicator_t": "🇹", + "regional_indicator_u": "🇺", + "regional_indicator_v": "🇻", + "regional_indicator_w": "🇼", + "regional_indicator_x": "🇽", + "regional_indicator_y": "🇾", + "regional_indicator_z": "🇿", +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py new file mode 100644 index 0000000..bb2cafa --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_emoji_replace.py @@ -0,0 +1,32 @@ +from typing import Callable, Match, Optional +import re + +from ._emoji_codes import EMOJI + + +_ReStringMatch = Match[str] # regex match object +_ReSubCallable = Callable[[_ReStringMatch], str] # Callable invoked by re.sub +_EmojiSubMethod = Callable[[_ReSubCallable, str], str] # Sub method of a compiled re + + +def _emoji_replace( + text: str, + default_variant: Optional[str] = None, + _emoji_sub: _EmojiSubMethod = re.compile(r"(:(\S*?)(?:(?:\-)(emoji|text))?:)").sub, +) -> str: + """Replace emoji code in text.""" + get_emoji = EMOJI.__getitem__ + variants = {"text": "\uFE0E", "emoji": "\uFE0F"} + get_variant = variants.get + default_variant_code = variants.get(default_variant, "") if default_variant else "" + + def do_replace(match: Match[str]) -> str: + emoji_code, emoji_name, variant = match.groups() + try: + return get_emoji(emoji_name.lower()) + get_variant( + variant, default_variant_code + ) + except KeyError: + return emoji_code + + return _emoji_sub(do_replace, text) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py new file mode 100644 index 0000000..e7527e5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_export_format.py @@ -0,0 +1,76 @@ +CONSOLE_HTML_FORMAT = """\ + + + + + + + +
    {code}
    + + +""" + +CONSOLE_SVG_FORMAT = """\ + + + + + + + + + {lines} + + + {chrome} + + {backgrounds} + + {matrix} + + + +""" + +_SVG_FONT_FAMILY = "Rich Fira Code" +_SVG_CLASSES_PREFIX = "rich-svg" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py new file mode 100644 index 0000000..cbd6da9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_extension.py @@ -0,0 +1,10 @@ +from typing import Any + + +def load_ipython_extension(ip: Any) -> None: # pragma: no cover + # prevent circular import + from pip._vendor.rich.pretty import install + from pip._vendor.rich.traceback import install as tr_install + + install() + tr_install() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py new file mode 100644 index 0000000..b17ee65 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_fileno.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import IO, Callable + + +def get_fileno(file_like: IO[str]) -> int | None: + """Get fileno() from a file, accounting for poorly implemented file-like objects. + + Args: + file_like (IO): A file-like object. + + Returns: + int | None: The result of fileno if available, or None if operation failed. + """ + fileno: Callable[[], int] | None = getattr(file_like, "fileno", None) + if fileno is not None: + try: + return fileno() + except Exception: + # `fileno` is documented as potentially raising a OSError + # Alas, from the issues, there are so many poorly implemented file-like objects, + # that `fileno()` can raise just about anything. + return None + return None diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py new file mode 100644 index 0000000..27d65ce --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_inspect.py @@ -0,0 +1,268 @@ +import inspect +from inspect import cleandoc, getdoc, getfile, isclass, ismodule, signature +from typing import Any, Collection, Iterable, Optional, Tuple, Type, Union + +from .console import Group, RenderableType +from .control import escape_control_codes +from .highlighter import ReprHighlighter +from .jupyter import JupyterMixin +from .panel import Panel +from .pretty import Pretty +from .table import Table +from .text import Text, TextType + + +def _first_paragraph(doc: str) -> str: + """Get the first paragraph from a docstring.""" + paragraph, _, _ = doc.partition("\n\n") + return paragraph + + +class Inspect(JupyterMixin): + """A renderable to inspect any Python Object. + + Args: + obj (Any): An object to inspect. + title (str, optional): Title to display over inspect result, or None use type. Defaults to None. + help (bool, optional): Show full help text rather than just first paragraph. Defaults to False. + methods (bool, optional): Enable inspection of callables. Defaults to False. + docs (bool, optional): Also render doc strings. Defaults to True. + private (bool, optional): Show private attributes (beginning with underscore). Defaults to False. + dunder (bool, optional): Show attributes starting with double underscore. Defaults to False. + sort (bool, optional): Sort attributes alphabetically. Defaults to True. + all (bool, optional): Show all attributes. Defaults to False. + value (bool, optional): Pretty print value of object. Defaults to True. + """ + + def __init__( + self, + obj: Any, + *, + title: Optional[TextType] = None, + help: bool = False, + methods: bool = False, + docs: bool = True, + private: bool = False, + dunder: bool = False, + sort: bool = True, + all: bool = True, + value: bool = True, + ) -> None: + self.highlighter = ReprHighlighter() + self.obj = obj + self.title = title or self._make_title(obj) + if all: + methods = private = dunder = True + self.help = help + self.methods = methods + self.docs = docs or help + self.private = private or dunder + self.dunder = dunder + self.sort = sort + self.value = value + + def _make_title(self, obj: Any) -> Text: + """Make a default title.""" + title_str = ( + str(obj) + if (isclass(obj) or callable(obj) or ismodule(obj)) + else str(type(obj)) + ) + title_text = self.highlighter(title_str) + return title_text + + def __rich__(self) -> Panel: + return Panel.fit( + Group(*self._render()), + title=self.title, + border_style="scope.border", + padding=(0, 1), + ) + + def _get_signature(self, name: str, obj: Any) -> Optional[Text]: + """Get a signature for a callable.""" + try: + _signature = str(signature(obj)) + ":" + except ValueError: + _signature = "(...)" + except TypeError: + return None + + source_filename: Optional[str] = None + try: + source_filename = getfile(obj) + except (OSError, TypeError): + # OSError is raised if obj has no source file, e.g. when defined in REPL. + pass + + callable_name = Text(name, style="inspect.callable") + if source_filename: + callable_name.stylize(f"link file://{source_filename}") + signature_text = self.highlighter(_signature) + + qualname = name or getattr(obj, "__qualname__", name) + + # If obj is a module, there may be classes (which are callable) to display + if inspect.isclass(obj): + prefix = "class" + elif inspect.iscoroutinefunction(obj): + prefix = "async def" + else: + prefix = "def" + + qual_signature = Text.assemble( + (f"{prefix} ", f"inspect.{prefix.replace(' ', '_')}"), + (qualname, "inspect.callable"), + signature_text, + ) + + return qual_signature + + def _render(self) -> Iterable[RenderableType]: + """Render object.""" + + def sort_items(item: Tuple[str, Any]) -> Tuple[bool, str]: + key, (_error, value) = item + return (callable(value), key.strip("_").lower()) + + def safe_getattr(attr_name: str) -> Tuple[Any, Any]: + """Get attribute or any exception.""" + try: + return (None, getattr(obj, attr_name)) + except Exception as error: + return (error, None) + + obj = self.obj + keys = dir(obj) + total_items = len(keys) + if not self.dunder: + keys = [key for key in keys if not key.startswith("__")] + if not self.private: + keys = [key for key in keys if not key.startswith("_")] + not_shown_count = total_items - len(keys) + items = [(key, safe_getattr(key)) for key in keys] + if self.sort: + items.sort(key=sort_items) + + items_table = Table.grid(padding=(0, 1), expand=False) + items_table.add_column(justify="right") + add_row = items_table.add_row + highlighter = self.highlighter + + if callable(obj): + signature = self._get_signature("", obj) + if signature is not None: + yield signature + yield "" + + if self.docs: + _doc = self._get_formatted_doc(obj) + if _doc is not None: + doc_text = Text(_doc, style="inspect.help") + doc_text = highlighter(doc_text) + yield doc_text + yield "" + + if self.value and not (isclass(obj) or callable(obj) or ismodule(obj)): + yield Panel( + Pretty(obj, indent_guides=True, max_length=10, max_string=60), + border_style="inspect.value.border", + ) + yield "" + + for key, (error, value) in items: + key_text = Text.assemble( + ( + key, + "inspect.attr.dunder" if key.startswith("__") else "inspect.attr", + ), + (" =", "inspect.equals"), + ) + if error is not None: + warning = key_text.copy() + warning.stylize("inspect.error") + add_row(warning, highlighter(repr(error))) + continue + + if callable(value): + if not self.methods: + continue + + _signature_text = self._get_signature(key, value) + if _signature_text is None: + add_row(key_text, Pretty(value, highlighter=highlighter)) + else: + if self.docs: + docs = self._get_formatted_doc(value) + if docs is not None: + _signature_text.append("\n" if "\n" in docs else " ") + doc = highlighter(docs) + doc.stylize("inspect.doc") + _signature_text.append(doc) + + add_row(key_text, _signature_text) + else: + add_row(key_text, Pretty(value, highlighter=highlighter)) + if items_table.row_count: + yield items_table + elif not_shown_count: + yield Text.from_markup( + f"[b cyan]{not_shown_count}[/][i] attribute(s) not shown.[/i] " + f"Run [b][magenta]inspect[/]([not b]inspect[/])[/b] for options." + ) + + def _get_formatted_doc(self, object_: Any) -> Optional[str]: + """ + Extract the docstring of an object, process it and returns it. + The processing consists in cleaning up the docstring's indentation, + taking only its 1st paragraph if `self.help` is not True, + and escape its control codes. + + Args: + object_ (Any): the object to get the docstring from. + + Returns: + Optional[str]: the processed docstring, or None if no docstring was found. + """ + docs = getdoc(object_) + if docs is None: + return None + docs = cleandoc(docs).strip() + if not self.help: + docs = _first_paragraph(docs) + return escape_control_codes(docs) + + +def get_object_types_mro(obj: Union[object, Type[Any]]) -> Tuple[type, ...]: + """Returns the MRO of an object's class, or of the object itself if it's a class.""" + if not hasattr(obj, "__mro__"): + # N.B. we cannot use `if type(obj) is type` here because it doesn't work with + # some types of classes, such as the ones that use abc.ABCMeta. + obj = type(obj) + return getattr(obj, "__mro__", ()) + + +def get_object_types_mro_as_strings(obj: object) -> Collection[str]: + """ + Returns the MRO of an object's class as full qualified names, or of the object itself if it's a class. + + Examples: + `object_types_mro_as_strings(JSONDecoder)` will return `['json.decoder.JSONDecoder', 'builtins.object']` + """ + return [ + f'{getattr(type_, "__module__", "")}.{getattr(type_, "__qualname__", "")}' + for type_ in get_object_types_mro(obj) + ] + + +def is_object_one_of_types( + obj: object, fully_qualified_types_names: Collection[str] +) -> bool: + """ + Returns `True` if the given object's class (or the object itself, if it's a class) has one of the + fully qualified names in its MRO. + """ + for type_name in get_object_types_mro_as_strings(obj): + if type_name in fully_qualified_types_names: + return True + return False diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py new file mode 100644 index 0000000..fc16c84 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_log_render.py @@ -0,0 +1,94 @@ +from datetime import datetime +from typing import Iterable, List, Optional, TYPE_CHECKING, Union, Callable + + +from .text import Text, TextType + +if TYPE_CHECKING: + from .console import Console, ConsoleRenderable, RenderableType + from .table import Table + +FormatTimeCallable = Callable[[datetime], Text] + + +class LogRender: + def __init__( + self, + show_time: bool = True, + show_level: bool = False, + show_path: bool = True, + time_format: Union[str, FormatTimeCallable] = "[%x %X]", + omit_repeated_times: bool = True, + level_width: Optional[int] = 8, + ) -> None: + self.show_time = show_time + self.show_level = show_level + self.show_path = show_path + self.time_format = time_format + self.omit_repeated_times = omit_repeated_times + self.level_width = level_width + self._last_time: Optional[Text] = None + + def __call__( + self, + console: "Console", + renderables: Iterable["ConsoleRenderable"], + log_time: Optional[datetime] = None, + time_format: Optional[Union[str, FormatTimeCallable]] = None, + level: TextType = "", + path: Optional[str] = None, + line_no: Optional[int] = None, + link_path: Optional[str] = None, + ) -> "Table": + from .containers import Renderables + from .table import Table + + output = Table.grid(padding=(0, 1)) + output.expand = True + if self.show_time: + output.add_column(style="log.time") + if self.show_level: + output.add_column(style="log.level", width=self.level_width) + output.add_column(ratio=1, style="log.message", overflow="fold") + if self.show_path and path: + output.add_column(style="log.path") + row: List["RenderableType"] = [] + if self.show_time: + log_time = log_time or console.get_datetime() + time_format = time_format or self.time_format + if callable(time_format): + log_time_display = time_format(log_time) + else: + log_time_display = Text(log_time.strftime(time_format)) + if log_time_display == self._last_time and self.omit_repeated_times: + row.append(Text(" " * len(log_time_display))) + else: + row.append(log_time_display) + self._last_time = log_time_display + if self.show_level: + row.append(level) + + row.append(Renderables(renderables)) + if self.show_path and path: + path_text = Text() + path_text.append( + path, style=f"link file://{link_path}" if link_path else "" + ) + if line_no: + path_text.append(":") + path_text.append( + f"{line_no}", + style=f"link file://{link_path}#{line_no}" if link_path else "", + ) + row.append(path_text) + + output.add_row(*row) + return output + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console + + c = Console() + c.print("[on blue]Hello", justify="right") + c.log("[on blue]hello", justify="right") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py new file mode 100644 index 0000000..01c6caf --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_loop.py @@ -0,0 +1,43 @@ +from typing import Iterable, Tuple, TypeVar + +T = TypeVar("T") + + +def loop_first(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for first value.""" + iter_values = iter(values) + try: + value = next(iter_values) + except StopIteration: + return + yield True, value + for value in iter_values: + yield False, value + + +def loop_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: + """Iterate and generate a tuple with a flag for last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + for value in iter_values: + yield False, previous_value + previous_value = value + yield True, previous_value + + +def loop_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]: + """Iterate and generate a tuple with a flag for first and last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + first = True + for value in iter_values: + yield first, False, previous_value + first = False + previous_value = value + yield first, True, previous_value diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py new file mode 100644 index 0000000..6ae05d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_null_file.py @@ -0,0 +1,69 @@ +from types import TracebackType +from typing import IO, Iterable, Iterator, List, Optional, Type + + +class NullFile(IO[str]): + def close(self) -> None: + pass + + def isatty(self) -> bool: + return False + + def read(self, __n: int = 1) -> str: + return "" + + def readable(self) -> bool: + return False + + def readline(self, __limit: int = 1) -> str: + return "" + + def readlines(self, __hint: int = 1) -> List[str]: + return [] + + def seek(self, __offset: int, __whence: int = 1) -> int: + return 0 + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + return 0 + + def truncate(self, __size: Optional[int] = 1) -> int: + return 0 + + def writable(self) -> bool: + return False + + def writelines(self, __lines: Iterable[str]) -> None: + pass + + def __next__(self) -> str: + return "" + + def __iter__(self) -> Iterator[str]: + return iter([""]) + + def __enter__(self) -> IO[str]: + return self + + def __exit__( + self, + __t: Optional[Type[BaseException]], + __value: Optional[BaseException], + __traceback: Optional[TracebackType], + ) -> None: + pass + + def write(self, text: str) -> int: + return 0 + + def flush(self) -> None: + pass + + def fileno(self) -> int: + return -1 + + +NULL_FILE = NullFile() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py new file mode 100644 index 0000000..3c748d3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_palettes.py @@ -0,0 +1,309 @@ +from .palette import Palette + + +# Taken from https://en.wikipedia.org/wiki/ANSI_escape_code (Windows 10 column) +WINDOWS_PALETTE = Palette( + [ + (12, 12, 12), + (197, 15, 31), + (19, 161, 14), + (193, 156, 0), + (0, 55, 218), + (136, 23, 152), + (58, 150, 221), + (204, 204, 204), + (118, 118, 118), + (231, 72, 86), + (22, 198, 12), + (249, 241, 165), + (59, 120, 255), + (180, 0, 158), + (97, 214, 214), + (242, 242, 242), + ] +) + +# # The standard ansi colors (including bright variants) +STANDARD_PALETTE = Palette( + [ + (0, 0, 0), + (170, 0, 0), + (0, 170, 0), + (170, 85, 0), + (0, 0, 170), + (170, 0, 170), + (0, 170, 170), + (170, 170, 170), + (85, 85, 85), + (255, 85, 85), + (85, 255, 85), + (255, 255, 85), + (85, 85, 255), + (255, 85, 255), + (85, 255, 255), + (255, 255, 255), + ] +) + + +# The 256 color palette +EIGHT_BIT_PALETTE = Palette( + [ + (0, 0, 0), + (128, 0, 0), + (0, 128, 0), + (128, 128, 0), + (0, 0, 128), + (128, 0, 128), + (0, 128, 128), + (192, 192, 192), + (128, 128, 128), + (255, 0, 0), + (0, 255, 0), + (255, 255, 0), + (0, 0, 255), + (255, 0, 255), + (0, 255, 255), + (255, 255, 255), + (0, 0, 0), + (0, 0, 95), + (0, 0, 135), + (0, 0, 175), + (0, 0, 215), + (0, 0, 255), + (0, 95, 0), + (0, 95, 95), + (0, 95, 135), + (0, 95, 175), + (0, 95, 215), + (0, 95, 255), + (0, 135, 0), + (0, 135, 95), + (0, 135, 135), + (0, 135, 175), + (0, 135, 215), + (0, 135, 255), + (0, 175, 0), + (0, 175, 95), + (0, 175, 135), + (0, 175, 175), + (0, 175, 215), + (0, 175, 255), + (0, 215, 0), + (0, 215, 95), + (0, 215, 135), + (0, 215, 175), + (0, 215, 215), + (0, 215, 255), + (0, 255, 0), + (0, 255, 95), + (0, 255, 135), + (0, 255, 175), + (0, 255, 215), + (0, 255, 255), + (95, 0, 0), + (95, 0, 95), + (95, 0, 135), + (95, 0, 175), + (95, 0, 215), + (95, 0, 255), + (95, 95, 0), + (95, 95, 95), + (95, 95, 135), + (95, 95, 175), + (95, 95, 215), + (95, 95, 255), + (95, 135, 0), + (95, 135, 95), + (95, 135, 135), + (95, 135, 175), + (95, 135, 215), + (95, 135, 255), + (95, 175, 0), + (95, 175, 95), + (95, 175, 135), + (95, 175, 175), + (95, 175, 215), + (95, 175, 255), + (95, 215, 0), + (95, 215, 95), + (95, 215, 135), + (95, 215, 175), + (95, 215, 215), + (95, 215, 255), + (95, 255, 0), + (95, 255, 95), + (95, 255, 135), + (95, 255, 175), + (95, 255, 215), + (95, 255, 255), + (135, 0, 0), + (135, 0, 95), + (135, 0, 135), + (135, 0, 175), + (135, 0, 215), + (135, 0, 255), + (135, 95, 0), + (135, 95, 95), + (135, 95, 135), + (135, 95, 175), + (135, 95, 215), + (135, 95, 255), + (135, 135, 0), + (135, 135, 95), + (135, 135, 135), + (135, 135, 175), + (135, 135, 215), + (135, 135, 255), + (135, 175, 0), + (135, 175, 95), + (135, 175, 135), + (135, 175, 175), + (135, 175, 215), + (135, 175, 255), + (135, 215, 0), + (135, 215, 95), + (135, 215, 135), + (135, 215, 175), + (135, 215, 215), + (135, 215, 255), + (135, 255, 0), + (135, 255, 95), + (135, 255, 135), + (135, 255, 175), + (135, 255, 215), + (135, 255, 255), + (175, 0, 0), + (175, 0, 95), + (175, 0, 135), + (175, 0, 175), + (175, 0, 215), + (175, 0, 255), + (175, 95, 0), + (175, 95, 95), + (175, 95, 135), + (175, 95, 175), + (175, 95, 215), + (175, 95, 255), + (175, 135, 0), + (175, 135, 95), + (175, 135, 135), + (175, 135, 175), + (175, 135, 215), + (175, 135, 255), + (175, 175, 0), + (175, 175, 95), + (175, 175, 135), + (175, 175, 175), + (175, 175, 215), + (175, 175, 255), + (175, 215, 0), + (175, 215, 95), + (175, 215, 135), + (175, 215, 175), + (175, 215, 215), + (175, 215, 255), + (175, 255, 0), + (175, 255, 95), + (175, 255, 135), + (175, 255, 175), + (175, 255, 215), + (175, 255, 255), + (215, 0, 0), + (215, 0, 95), + (215, 0, 135), + (215, 0, 175), + (215, 0, 215), + (215, 0, 255), + (215, 95, 0), + (215, 95, 95), + (215, 95, 135), + (215, 95, 175), + (215, 95, 215), + (215, 95, 255), + (215, 135, 0), + (215, 135, 95), + (215, 135, 135), + (215, 135, 175), + (215, 135, 215), + (215, 135, 255), + (215, 175, 0), + (215, 175, 95), + (215, 175, 135), + (215, 175, 175), + (215, 175, 215), + (215, 175, 255), + (215, 215, 0), + (215, 215, 95), + (215, 215, 135), + (215, 215, 175), + (215, 215, 215), + (215, 215, 255), + (215, 255, 0), + (215, 255, 95), + (215, 255, 135), + (215, 255, 175), + (215, 255, 215), + (215, 255, 255), + (255, 0, 0), + (255, 0, 95), + (255, 0, 135), + (255, 0, 175), + (255, 0, 215), + (255, 0, 255), + (255, 95, 0), + (255, 95, 95), + (255, 95, 135), + (255, 95, 175), + (255, 95, 215), + (255, 95, 255), + (255, 135, 0), + (255, 135, 95), + (255, 135, 135), + (255, 135, 175), + (255, 135, 215), + (255, 135, 255), + (255, 175, 0), + (255, 175, 95), + (255, 175, 135), + (255, 175, 175), + (255, 175, 215), + (255, 175, 255), + (255, 215, 0), + (255, 215, 95), + (255, 215, 135), + (255, 215, 175), + (255, 215, 215), + (255, 215, 255), + (255, 255, 0), + (255, 255, 95), + (255, 255, 135), + (255, 255, 175), + (255, 255, 215), + (255, 255, 255), + (8, 8, 8), + (18, 18, 18), + (28, 28, 28), + (38, 38, 38), + (48, 48, 48), + (58, 58, 58), + (68, 68, 68), + (78, 78, 78), + (88, 88, 88), + (98, 98, 98), + (108, 108, 108), + (118, 118, 118), + (128, 128, 128), + (138, 138, 138), + (148, 148, 148), + (158, 158, 158), + (168, 168, 168), + (178, 178, 178), + (188, 188, 188), + (198, 198, 198), + (208, 208, 208), + (218, 218, 218), + (228, 228, 228), + (238, 238, 238), + ] +) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py new file mode 100644 index 0000000..4f6d8b2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_pick.py @@ -0,0 +1,17 @@ +from typing import Optional + + +def pick_bool(*values: Optional[bool]) -> bool: + """Pick the first non-none bool or return the last value. + + Args: + *values (bool): Any number of boolean or None values. + + Returns: + bool: First non-none boolean. + """ + assert values, "1 or more values required" + for value in values: + if value is not None: + return value + return bool(value) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py new file mode 100644 index 0000000..5fd5a38 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_ratio.py @@ -0,0 +1,153 @@ +from fractions import Fraction +from math import ceil +from typing import cast, List, Optional, Sequence, Protocol + + +class Edge(Protocol): + """Any object that defines an edge (such as Layout).""" + + size: Optional[int] = None + ratio: int = 1 + minimum_size: int = 1 + + +def ratio_resolve(total: int, edges: Sequence[Edge]) -> List[int]: + """Divide total space to satisfy size, ratio, and minimum_size, constraints. + + The returned list of integers should add up to total in most cases, unless it is + impossible to satisfy all the constraints. For instance, if there are two edges + with a minimum size of 20 each and `total` is 30 then the returned list will be + greater than total. In practice, this would mean that a Layout object would + clip the rows that would overflow the screen height. + + Args: + total (int): Total number of characters. + edges (List[Edge]): Edges within total space. + + Returns: + List[int]: Number of characters for each edge. + """ + # Size of edge or None for yet to be determined + sizes = [(edge.size or None) for edge in edges] + + _Fraction = Fraction + + # While any edges haven't been calculated + while None in sizes: + # Get flexible edges and index to map these back on to sizes list + flexible_edges = [ + (index, edge) + for index, (size, edge) in enumerate(zip(sizes, edges)) + if size is None + ] + # Remaining space in total + remaining = total - sum(size or 0 for size in sizes) + if remaining <= 0: + # No room for flexible edges + return [ + ((edge.minimum_size or 1) if size is None else size) + for size, edge in zip(sizes, edges) + ] + # Calculate number of characters in a ratio portion + portion = _Fraction( + remaining, sum((edge.ratio or 1) for _, edge in flexible_edges) + ) + + # If any edges will be less than their minimum, replace size with the minimum + for index, edge in flexible_edges: + if portion * edge.ratio <= edge.minimum_size: + sizes[index] = edge.minimum_size + # New fixed size will invalidate calculations, so we need to repeat the process + break + else: + # Distribute flexible space and compensate for rounding error + # Since edge sizes can only be integers we need to add the remainder + # to the following line + remainder = _Fraction(0) + for index, edge in flexible_edges: + size, remainder = divmod(portion * edge.ratio + remainder, 1) + sizes[index] = size + break + # Sizes now contains integers only + return cast(List[int], sizes) + + +def ratio_reduce( + total: int, ratios: List[int], maximums: List[int], values: List[int] +) -> List[int]: + """Divide an integer total in to parts based on ratios. + + Args: + total (int): The total to divide. + ratios (List[int]): A list of integer ratios. + maximums (List[int]): List of maximums values for each slot. + values (List[int]): List of values + + Returns: + List[int]: A list of integers guaranteed to sum to total. + """ + ratios = [ratio if _max else 0 for ratio, _max in zip(ratios, maximums)] + total_ratio = sum(ratios) + if not total_ratio: + return values[:] + total_remaining = total + result: List[int] = [] + append = result.append + for ratio, maximum, value in zip(ratios, maximums, values): + if ratio and total_ratio > 0: + distributed = min(maximum, round(ratio * total_remaining / total_ratio)) + append(value - distributed) + total_remaining -= distributed + total_ratio -= ratio + else: + append(value) + return result + + +def ratio_distribute( + total: int, ratios: List[int], minimums: Optional[List[int]] = None +) -> List[int]: + """Distribute an integer total in to parts based on ratios. + + Args: + total (int): The total to divide. + ratios (List[int]): A list of integer ratios. + minimums (List[int]): List of minimum values for each slot. + + Returns: + List[int]: A list of integers guaranteed to sum to total. + """ + if minimums: + ratios = [ratio if _min else 0 for ratio, _min in zip(ratios, minimums)] + total_ratio = sum(ratios) + assert total_ratio > 0, "Sum of ratios must be > 0" + + total_remaining = total + distributed_total: List[int] = [] + append = distributed_total.append + if minimums is None: + _minimums = [0] * len(ratios) + else: + _minimums = minimums + for ratio, minimum in zip(ratios, _minimums): + if total_ratio > 0: + distributed = max(minimum, ceil(ratio * total_remaining / total_ratio)) + else: + distributed = total_remaining + append(distributed) + total_ratio -= ratio + total_remaining -= distributed + return distributed_total + + +if __name__ == "__main__": + from dataclasses import dataclass + + @dataclass + class E: + size: Optional[int] = None + ratio: int = 1 + minimum_size: int = 1 + + resolved = ratio_resolve(110, [E(None, 1, 1), E(None, 1, 1), E(None, 1, 1)]) + print(sum(resolved)) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py new file mode 100644 index 0000000..d0bb1fe --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_spinners.py @@ -0,0 +1,482 @@ +""" +Spinners are from: +* cli-spinners: + MIT License + Copyright (c) Sindre Sorhus (sindresorhus.com) + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, + INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE + FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + IN THE SOFTWARE. +""" + +SPINNERS = { + "dots": { + "interval": 80, + "frames": "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏", + }, + "dots2": {"interval": 80, "frames": "⣾⣽⣻⢿⡿⣟⣯⣷"}, + "dots3": { + "interval": 80, + "frames": "⠋⠙⠚⠞⠖⠦⠴⠲⠳⠓", + }, + "dots4": { + "interval": 80, + "frames": "⠄⠆⠇⠋⠙⠸⠰⠠⠰⠸⠙⠋⠇⠆", + }, + "dots5": { + "interval": 80, + "frames": "⠋⠙⠚⠒⠂⠂⠒⠲⠴⠦⠖⠒⠐⠐⠒⠓⠋", + }, + "dots6": { + "interval": 80, + "frames": "⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠴⠲⠒⠂⠂⠒⠚⠙⠉⠁", + }, + "dots7": { + "interval": 80, + "frames": "⠈⠉⠋⠓⠒⠐⠐⠒⠖⠦⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈", + }, + "dots8": { + "interval": 80, + "frames": "⠁⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈⠈", + }, + "dots9": {"interval": 80, "frames": "⢹⢺⢼⣸⣇⡧⡗⡏"}, + "dots10": {"interval": 80, "frames": "⢄⢂⢁⡁⡈⡐⡠"}, + "dots11": {"interval": 100, "frames": "⠁⠂⠄⡀⢀⠠⠐⠈"}, + "dots12": { + "interval": 80, + "frames": [ + "⢀⠀", + "⡀⠀", + "⠄⠀", + "⢂⠀", + "⡂⠀", + "⠅⠀", + "⢃⠀", + "⡃⠀", + "⠍⠀", + "⢋⠀", + "⡋⠀", + "⠍⠁", + "⢋⠁", + "⡋⠁", + "⠍⠉", + "⠋⠉", + "⠋⠉", + "⠉⠙", + "⠉⠙", + "⠉⠩", + "⠈⢙", + "⠈⡙", + "⢈⠩", + "⡀⢙", + "⠄⡙", + "⢂⠩", + "⡂⢘", + "⠅⡘", + "⢃⠨", + "⡃⢐", + "⠍⡐", + "⢋⠠", + "⡋⢀", + "⠍⡁", + "⢋⠁", + "⡋⠁", + "⠍⠉", + "⠋⠉", + "⠋⠉", + "⠉⠙", + "⠉⠙", + "⠉⠩", + "⠈⢙", + "⠈⡙", + "⠈⠩", + "⠀⢙", + "⠀⡙", + "⠀⠩", + "⠀⢘", + "⠀⡘", + "⠀⠨", + "⠀⢐", + "⠀⡐", + "⠀⠠", + "⠀⢀", + "⠀⡀", + ], + }, + "dots8Bit": { + "interval": 80, + "frames": "⠀⠁⠂⠃⠄⠅⠆⠇⡀⡁⡂⡃⡄⡅⡆⡇⠈⠉⠊⠋⠌⠍⠎⠏⡈⡉⡊⡋⡌⡍⡎⡏⠐⠑⠒⠓⠔⠕⠖⠗⡐⡑⡒⡓⡔⡕⡖⡗⠘⠙⠚⠛⠜⠝⠞⠟⡘⡙" + "⡚⡛⡜⡝⡞⡟⠠⠡⠢⠣⠤⠥⠦⠧⡠⡡⡢⡣⡤⡥⡦⡧⠨⠩⠪⠫⠬⠭⠮⠯⡨⡩⡪⡫⡬⡭⡮⡯⠰⠱⠲⠳⠴⠵⠶⠷⡰⡱⡲⡳⡴⡵⡶⡷⠸⠹⠺⠻" + "⠼⠽⠾⠿⡸⡹⡺⡻⡼⡽⡾⡿⢀⢁⢂⢃⢄⢅⢆⢇⣀⣁⣂⣃⣄⣅⣆⣇⢈⢉⢊⢋⢌⢍⢎⢏⣈⣉⣊⣋⣌⣍⣎⣏⢐⢑⢒⢓⢔⢕⢖⢗⣐⣑⣒⣓⣔⣕" + "⣖⣗⢘⢙⢚⢛⢜⢝⢞⢟⣘⣙⣚⣛⣜⣝⣞⣟⢠⢡⢢⢣⢤⢥⢦⢧⣠⣡⣢⣣⣤⣥⣦⣧⢨⢩⢪⢫⢬⢭⢮⢯⣨⣩⣪⣫⣬⣭⣮⣯⢰⢱⢲⢳⢴⢵⢶⢷" + "⣰⣱⣲⣳⣴⣵⣶⣷⢸⢹⢺⢻⢼⢽⢾⢿⣸⣹⣺⣻⣼⣽⣾⣿", + }, + "line": {"interval": 130, "frames": ["-", "\\", "|", "/"]}, + "line2": {"interval": 100, "frames": "⠂-–—–-"}, + "pipe": {"interval": 100, "frames": "┤┘┴└├┌┬┐"}, + "simpleDots": {"interval": 400, "frames": [". ", ".. ", "...", " "]}, + "simpleDotsScrolling": { + "interval": 200, + "frames": [". ", ".. ", "...", " ..", " .", " "], + }, + "star": {"interval": 70, "frames": "✶✸✹✺✹✷"}, + "star2": {"interval": 80, "frames": "+x*"}, + "flip": { + "interval": 70, + "frames": "___-``'´-___", + }, + "hamburger": {"interval": 100, "frames": "☱☲☴"}, + "growVertical": { + "interval": 120, + "frames": "▁▃▄▅▆▇▆▅▄▃", + }, + "growHorizontal": { + "interval": 120, + "frames": "▏▎▍▌▋▊▉▊▋▌▍▎", + }, + "balloon": {"interval": 140, "frames": " .oO@* "}, + "balloon2": {"interval": 120, "frames": ".oO°Oo."}, + "noise": {"interval": 100, "frames": "▓▒░"}, + "bounce": {"interval": 120, "frames": "⠁⠂⠄⠂"}, + "boxBounce": {"interval": 120, "frames": "▖▘▝▗"}, + "boxBounce2": {"interval": 100, "frames": "▌▀▐▄"}, + "triangle": {"interval": 50, "frames": "◢◣◤◥"}, + "arc": {"interval": 100, "frames": "◜◠◝◞◡◟"}, + "circle": {"interval": 120, "frames": "◡⊙◠"}, + "squareCorners": {"interval": 180, "frames": "◰◳◲◱"}, + "circleQuarters": {"interval": 120, "frames": "◴◷◶◵"}, + "circleHalves": {"interval": 50, "frames": "◐◓◑◒"}, + "squish": {"interval": 100, "frames": "╫╪"}, + "toggle": {"interval": 250, "frames": "⊶⊷"}, + "toggle2": {"interval": 80, "frames": "▫▪"}, + "toggle3": {"interval": 120, "frames": "□■"}, + "toggle4": {"interval": 100, "frames": "■□▪▫"}, + "toggle5": {"interval": 100, "frames": "▮▯"}, + "toggle6": {"interval": 300, "frames": "ဝ၀"}, + "toggle7": {"interval": 80, "frames": "⦾⦿"}, + "toggle8": {"interval": 100, "frames": "◍◌"}, + "toggle9": {"interval": 100, "frames": "◉◎"}, + "toggle10": {"interval": 100, "frames": "㊂㊀㊁"}, + "toggle11": {"interval": 50, "frames": "⧇⧆"}, + "toggle12": {"interval": 120, "frames": "☗☖"}, + "toggle13": {"interval": 80, "frames": "=*-"}, + "arrow": {"interval": 100, "frames": "←↖↑↗→↘↓↙"}, + "arrow2": { + "interval": 80, + "frames": ["⬆️ ", "↗️ ", "➡️ ", "↘️ ", "⬇️ ", "↙️ ", "⬅️ ", "↖️ "], + }, + "arrow3": { + "interval": 120, + "frames": ["▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸"], + }, + "bouncingBar": { + "interval": 80, + "frames": [ + "[ ]", + "[= ]", + "[== ]", + "[=== ]", + "[ ===]", + "[ ==]", + "[ =]", + "[ ]", + "[ =]", + "[ ==]", + "[ ===]", + "[====]", + "[=== ]", + "[== ]", + "[= ]", + ], + }, + "bouncingBall": { + "interval": 80, + "frames": [ + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "( ●)", + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "(● )", + ], + }, + "smiley": {"interval": 200, "frames": ["😄 ", "😝 "]}, + "monkey": {"interval": 300, "frames": ["🙈 ", "🙈 ", "🙉 ", "🙊 "]}, + "hearts": {"interval": 100, "frames": ["💛 ", "💙 ", "💜 ", "💚 ", "❤️ "]}, + "clock": { + "interval": 100, + "frames": [ + "🕛 ", + "🕐 ", + "🕑 ", + "🕒 ", + "🕓 ", + "🕔 ", + "🕕 ", + "🕖 ", + "🕗 ", + "🕘 ", + "🕙 ", + "🕚 ", + ], + }, + "earth": {"interval": 180, "frames": ["🌍 ", "🌎 ", "🌏 "]}, + "material": { + "interval": 17, + "frames": [ + "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "███████▁▁▁▁▁▁▁▁▁▁▁▁▁", + "████████▁▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "██████████▁▁▁▁▁▁▁▁▁▁", + "███████████▁▁▁▁▁▁▁▁▁", + "█████████████▁▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁▁██████████████▁▁▁▁", + "▁▁▁██████████████▁▁▁", + "▁▁▁▁█████████████▁▁▁", + "▁▁▁▁██████████████▁▁", + "▁▁▁▁██████████████▁▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁██████████████▁", + "▁▁▁▁▁▁██████████████", + "▁▁▁▁▁▁██████████████", + "▁▁▁▁▁▁▁█████████████", + "▁▁▁▁▁▁▁█████████████", + "▁▁▁▁▁▁▁▁████████████", + "▁▁▁▁▁▁▁▁████████████", + "▁▁▁▁▁▁▁▁▁███████████", + "▁▁▁▁▁▁▁▁▁███████████", + "▁▁▁▁▁▁▁▁▁▁██████████", + "▁▁▁▁▁▁▁▁▁▁██████████", + "▁▁▁▁▁▁▁▁▁▁▁▁████████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁██████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "█▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "██▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "███▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "████▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "█████▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "██████▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "████████▁▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "█████████▁▁▁▁▁▁▁▁▁▁▁", + "███████████▁▁▁▁▁▁▁▁▁", + "████████████▁▁▁▁▁▁▁▁", + "████████████▁▁▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "██████████████▁▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁██████████████▁▁▁▁▁", + "▁▁▁█████████████▁▁▁▁", + "▁▁▁▁▁████████████▁▁▁", + "▁▁▁▁▁████████████▁▁▁", + "▁▁▁▁▁▁███████████▁▁▁", + "▁▁▁▁▁▁▁▁█████████▁▁▁", + "▁▁▁▁▁▁▁▁█████████▁▁▁", + "▁▁▁▁▁▁▁▁▁█████████▁▁", + "▁▁▁▁▁▁▁▁▁█████████▁▁", + "▁▁▁▁▁▁▁▁▁▁█████████▁", + "▁▁▁▁▁▁▁▁▁▁▁████████▁", + "▁▁▁▁▁▁▁▁▁▁▁████████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁███████▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁███████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁████", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁███", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁██", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁█", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + ], + }, + "moon": { + "interval": 80, + "frames": ["🌑 ", "🌒 ", "🌓 ", "🌔 ", "🌕 ", "🌖 ", "🌗 ", "🌘 "], + }, + "runner": {"interval": 140, "frames": ["🚶 ", "🏃 "]}, + "pong": { + "interval": 80, + "frames": [ + "▐⠂ ▌", + "▐⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂▌", + "▐ ⠠▌", + "▐ ⡀▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐⠠ ▌", + ], + }, + "shark": { + "interval": 120, + "frames": [ + "▐|\\____________▌", + "▐_|\\___________▌", + "▐__|\\__________▌", + "▐___|\\_________▌", + "▐____|\\________▌", + "▐_____|\\_______▌", + "▐______|\\______▌", + "▐_______|\\_____▌", + "▐________|\\____▌", + "▐_________|\\___▌", + "▐__________|\\__▌", + "▐___________|\\_▌", + "▐____________|\\▌", + "▐____________/|▌", + "▐___________/|_▌", + "▐__________/|__▌", + "▐_________/|___▌", + "▐________/|____▌", + "▐_______/|_____▌", + "▐______/|______▌", + "▐_____/|_______▌", + "▐____/|________▌", + "▐___/|_________▌", + "▐__/|__________▌", + "▐_/|___________▌", + "▐/|____________▌", + ], + }, + "dqpb": {"interval": 100, "frames": "dqpb"}, + "weather": { + "interval": 100, + "frames": [ + "☀️ ", + "☀️ ", + "☀️ ", + "🌤 ", + "⛅️ ", + "🌥 ", + "☁️ ", + "🌧 ", + "🌨 ", + "🌧 ", + "🌨 ", + "🌧 ", + "🌨 ", + "⛈ ", + "🌨 ", + "🌧 ", + "🌨 ", + "☁️ ", + "🌥 ", + "⛅️ ", + "🌤 ", + "☀️ ", + "☀️ ", + ], + }, + "christmas": {"interval": 400, "frames": "🌲🎄"}, + "grenade": { + "interval": 80, + "frames": [ + "، ", + "′ ", + " ´ ", + " ‾ ", + " ⸌", + " ⸊", + " |", + " ⁎", + " ⁕", + " ෴ ", + " ⁓", + " ", + " ", + " ", + ], + }, + "point": {"interval": 125, "frames": ["∙∙∙", "●∙∙", "∙●∙", "∙∙●", "∙∙∙"]}, + "layer": {"interval": 150, "frames": "-=≡"}, + "betaWave": { + "interval": 80, + "frames": [ + "ρββββββ", + "βρβββββ", + "ββρββββ", + "βββρβββ", + "ββββρββ", + "βββββρβ", + "ββββββρ", + ], + }, + "aesthetic": { + "interval": 80, + "frames": [ + "▰▱▱▱▱▱▱", + "▰▰▱▱▱▱▱", + "▰▰▰▱▱▱▱", + "▰▰▰▰▱▱▱", + "▰▰▰▰▰▱▱", + "▰▰▰▰▰▰▱", + "▰▰▰▰▰▰▰", + "▰▱▱▱▱▱▱", + ], + }, +} diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py new file mode 100644 index 0000000..194564e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_stack.py @@ -0,0 +1,16 @@ +from typing import List, TypeVar + +T = TypeVar("T") + + +class Stack(List[T]): + """A small shim over builtin list.""" + + @property + def top(self) -> T: + """Get top of stack.""" + return self[-1] + + def push(self, item: T) -> None: + """Push an item on to the stack (append in stack nomenclature).""" + self.append(item) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py new file mode 100644 index 0000000..a2ca6be --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_timer.py @@ -0,0 +1,19 @@ +""" +Timer context manager, only used in debug. + +""" + +from time import time + +import contextlib +from typing import Generator + + +@contextlib.contextmanager +def timer(subject: str = "time") -> Generator[None, None, None]: + """print the elapsed time. (only used in debugging)""" + start = time() + yield + elapsed = time() - start + elapsed_ms = elapsed * 1000 + print(f"{subject} elapsed {elapsed_ms:.1f}ms") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py new file mode 100644 index 0000000..2eba1b9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_win32_console.py @@ -0,0 +1,661 @@ +"""Light wrapper around the Win32 Console API - this module should only be imported on Windows + +The API that this module wraps is documented at https://docs.microsoft.com/en-us/windows/console/console-functions +""" + +import ctypes +import sys +from typing import Any + +windll: Any = None +if sys.platform == "win32": + windll = ctypes.LibraryLoader(ctypes.WinDLL) +else: + raise ImportError(f"{__name__} can only be imported on Windows") + +import time +from ctypes import Structure, byref, wintypes +from typing import IO, NamedTuple, Type, cast + +from pip._vendor.rich.color import ColorSystem +from pip._vendor.rich.style import Style + +STDOUT = -11 +ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4 + +COORD = wintypes._COORD + + +class LegacyWindowsError(Exception): + pass + + +class WindowsCoordinates(NamedTuple): + """Coordinates in the Windows Console API are (y, x), not (x, y). + This class is intended to prevent that confusion. + Rows and columns are indexed from 0. + This class can be used in place of wintypes._COORD in arguments and argtypes. + """ + + row: int + col: int + + @classmethod + def from_param(cls, value: "WindowsCoordinates") -> COORD: + """Converts a WindowsCoordinates into a wintypes _COORD structure. + This classmethod is internally called by ctypes to perform the conversion. + + Args: + value (WindowsCoordinates): The input coordinates to convert. + + Returns: + wintypes._COORD: The converted coordinates struct. + """ + return COORD(value.col, value.row) + + +class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + + +class CONSOLE_CURSOR_INFO(ctypes.Structure): + _fields_ = [("dwSize", wintypes.DWORD), ("bVisible", wintypes.BOOL)] + + +_GetStdHandle = windll.kernel32.GetStdHandle +_GetStdHandle.argtypes = [ + wintypes.DWORD, +] +_GetStdHandle.restype = wintypes.HANDLE + + +def GetStdHandle(handle: int = STDOUT) -> wintypes.HANDLE: + """Retrieves a handle to the specified standard device (standard input, standard output, or standard error). + + Args: + handle (int): Integer identifier for the handle. Defaults to -11 (stdout). + + Returns: + wintypes.HANDLE: The handle + """ + return cast(wintypes.HANDLE, _GetStdHandle(handle)) + + +_GetConsoleMode = windll.kernel32.GetConsoleMode +_GetConsoleMode.argtypes = [wintypes.HANDLE, wintypes.LPDWORD] +_GetConsoleMode.restype = wintypes.BOOL + + +def GetConsoleMode(std_handle: wintypes.HANDLE) -> int: + """Retrieves the current input mode of a console's input buffer + or the current output mode of a console screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + + Raises: + LegacyWindowsError: If any error occurs while calling the Windows console API. + + Returns: + int: Value representing the current console mode as documented at + https://docs.microsoft.com/en-us/windows/console/getconsolemode#parameters + """ + + console_mode = wintypes.DWORD() + success = bool(_GetConsoleMode(std_handle, console_mode)) + if not success: + raise LegacyWindowsError("Unable to get legacy Windows Console Mode") + return console_mode.value + + +_FillConsoleOutputCharacterW = windll.kernel32.FillConsoleOutputCharacterW +_FillConsoleOutputCharacterW.argtypes = [ + wintypes.HANDLE, + ctypes.c_char, + wintypes.DWORD, + cast(Type[COORD], WindowsCoordinates), + ctypes.POINTER(wintypes.DWORD), +] +_FillConsoleOutputCharacterW.restype = wintypes.BOOL + + +def FillConsoleOutputCharacter( + std_handle: wintypes.HANDLE, + char: str, + length: int, + start: WindowsCoordinates, +) -> int: + """Writes a character to the console screen buffer a specified number of times, beginning at the specified coordinates. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + char (str): The character to write. Must be a string of length 1. + length (int): The number of times to write the character. + start (WindowsCoordinates): The coordinates to start writing at. + + Returns: + int: The number of characters written. + """ + character = ctypes.c_char(char.encode()) + num_characters = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + _FillConsoleOutputCharacterW( + std_handle, + character, + num_characters, + start, + byref(num_written), + ) + return num_written.value + + +_FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute +_FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + cast(Type[COORD], WindowsCoordinates), + ctypes.POINTER(wintypes.DWORD), +] +_FillConsoleOutputAttribute.restype = wintypes.BOOL + + +def FillConsoleOutputAttribute( + std_handle: wintypes.HANDLE, + attributes: int, + length: int, + start: WindowsCoordinates, +) -> int: + """Sets the character attributes for a specified number of character cells, + beginning at the specified coordinates in a screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + attributes (int): Integer value representing the foreground and background colours of the cells. + length (int): The number of cells to set the output attribute of. + start (WindowsCoordinates): The coordinates of the first cell whose attributes are to be set. + + Returns: + int: The number of cells whose attributes were actually set. + """ + num_cells = wintypes.DWORD(length) + style_attrs = wintypes.WORD(attributes) + num_written = wintypes.DWORD(0) + _FillConsoleOutputAttribute( + std_handle, style_attrs, num_cells, start, byref(num_written) + ) + return num_written.value + + +_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute +_SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, +] +_SetConsoleTextAttribute.restype = wintypes.BOOL + + +def SetConsoleTextAttribute( + std_handle: wintypes.HANDLE, attributes: wintypes.WORD +) -> bool: + """Set the colour attributes for all text written after this function is called. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + attributes (int): Integer value representing the foreground and background colours. + + + Returns: + bool: True if the attribute was set successfully, otherwise False. + """ + return bool(_SetConsoleTextAttribute(std_handle, attributes)) + + +_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo +_GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO), +] +_GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + +def GetConsoleScreenBufferInfo( + std_handle: wintypes.HANDLE, +) -> CONSOLE_SCREEN_BUFFER_INFO: + """Retrieves information about the specified console screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + + Returns: + CONSOLE_SCREEN_BUFFER_INFO: A CONSOLE_SCREEN_BUFFER_INFO ctype struct contain information about + screen size, cursor position, colour attributes, and more.""" + console_screen_buffer_info = CONSOLE_SCREEN_BUFFER_INFO() + _GetConsoleScreenBufferInfo(std_handle, byref(console_screen_buffer_info)) + return console_screen_buffer_info + + +_SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition +_SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + cast(Type[COORD], WindowsCoordinates), +] +_SetConsoleCursorPosition.restype = wintypes.BOOL + + +def SetConsoleCursorPosition( + std_handle: wintypes.HANDLE, coords: WindowsCoordinates +) -> bool: + """Set the position of the cursor in the console screen + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + coords (WindowsCoordinates): The coordinates to move the cursor to. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleCursorPosition(std_handle, coords)) + + +_GetConsoleCursorInfo = windll.kernel32.GetConsoleCursorInfo +_GetConsoleCursorInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_CURSOR_INFO), +] +_GetConsoleCursorInfo.restype = wintypes.BOOL + + +def GetConsoleCursorInfo( + std_handle: wintypes.HANDLE, cursor_info: CONSOLE_CURSOR_INFO +) -> bool: + """Get the cursor info - used to get cursor visibility and width + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + cursor_info (CONSOLE_CURSOR_INFO): CONSOLE_CURSOR_INFO ctype struct that receives information + about the console's cursor. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_GetConsoleCursorInfo(std_handle, byref(cursor_info))) + + +_SetConsoleCursorInfo = windll.kernel32.SetConsoleCursorInfo +_SetConsoleCursorInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_CURSOR_INFO), +] +_SetConsoleCursorInfo.restype = wintypes.BOOL + + +def SetConsoleCursorInfo( + std_handle: wintypes.HANDLE, cursor_info: CONSOLE_CURSOR_INFO +) -> bool: + """Set the cursor info - used for adjusting cursor visibility and width + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + cursor_info (CONSOLE_CURSOR_INFO): CONSOLE_CURSOR_INFO ctype struct containing the new cursor info. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleCursorInfo(std_handle, byref(cursor_info))) + + +_SetConsoleTitle = windll.kernel32.SetConsoleTitleW +_SetConsoleTitle.argtypes = [wintypes.LPCWSTR] +_SetConsoleTitle.restype = wintypes.BOOL + + +def SetConsoleTitle(title: str) -> bool: + """Sets the title of the current console window + + Args: + title (str): The new title of the console window. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleTitle(title)) + + +class LegacyWindowsTerm: + """This class allows interaction with the legacy Windows Console API. It should only be used in the context + of environments where virtual terminal processing is not available. However, if it is used in a Windows environment, + the entire API should work. + + Args: + file (IO[str]): The file which the Windows Console API HANDLE is retrieved from, defaults to sys.stdout. + """ + + BRIGHT_BIT = 8 + + # Indices are ANSI color numbers, values are the corresponding Windows Console API color numbers + ANSI_TO_WINDOWS = [ + 0, # black The Windows colours are defined in wincon.h as follows: + 4, # red define FOREGROUND_BLUE 0x0001 -- 0000 0001 + 2, # green define FOREGROUND_GREEN 0x0002 -- 0000 0010 + 6, # yellow define FOREGROUND_RED 0x0004 -- 0000 0100 + 1, # blue define FOREGROUND_INTENSITY 0x0008 -- 0000 1000 + 5, # magenta define BACKGROUND_BLUE 0x0010 -- 0001 0000 + 3, # cyan define BACKGROUND_GREEN 0x0020 -- 0010 0000 + 7, # white define BACKGROUND_RED 0x0040 -- 0100 0000 + 8, # bright black (grey) define BACKGROUND_INTENSITY 0x0080 -- 1000 0000 + 12, # bright red + 10, # bright green + 14, # bright yellow + 9, # bright blue + 13, # bright magenta + 11, # bright cyan + 15, # bright white + ] + + def __init__(self, file: "IO[str]") -> None: + handle = GetStdHandle(STDOUT) + self._handle = handle + default_text = GetConsoleScreenBufferInfo(handle).wAttributes + self._default_text = default_text + + self._default_fore = default_text & 7 + self._default_back = (default_text >> 4) & 7 + self._default_attrs = self._default_fore | (self._default_back << 4) + + self._file = file + self.write = file.write + self.flush = file.flush + + @property + def cursor_position(self) -> WindowsCoordinates: + """Returns the current position of the cursor (0-based) + + Returns: + WindowsCoordinates: The current cursor position. + """ + coord: COORD = GetConsoleScreenBufferInfo(self._handle).dwCursorPosition + return WindowsCoordinates(row=coord.Y, col=coord.X) + + @property + def screen_size(self) -> WindowsCoordinates: + """Returns the current size of the console screen buffer, in character columns and rows + + Returns: + WindowsCoordinates: The width and height of the screen as WindowsCoordinates. + """ + screen_size: COORD = GetConsoleScreenBufferInfo(self._handle).dwSize + return WindowsCoordinates(row=screen_size.Y, col=screen_size.X) + + def write_text(self, text: str) -> None: + """Write text directly to the terminal without any modification of styles + + Args: + text (str): The text to write to the console + """ + self.write(text) + self.flush() + + def write_styled(self, text: str, style: Style) -> None: + """Write styled text to the terminal. + + Args: + text (str): The text to write + style (Style): The style of the text + """ + color = style.color + bgcolor = style.bgcolor + if style.reverse: + color, bgcolor = bgcolor, color + + if color: + fore = color.downgrade(ColorSystem.WINDOWS).number + fore = fore if fore is not None else 7 # Default to ANSI 7: White + if style.bold: + fore = fore | self.BRIGHT_BIT + if style.dim: + fore = fore & ~self.BRIGHT_BIT + fore = self.ANSI_TO_WINDOWS[fore] + else: + fore = self._default_fore + + if bgcolor: + back = bgcolor.downgrade(ColorSystem.WINDOWS).number + back = back if back is not None else 0 # Default to ANSI 0: Black + back = self.ANSI_TO_WINDOWS[back] + else: + back = self._default_back + + assert fore is not None + assert back is not None + + SetConsoleTextAttribute( + self._handle, attributes=ctypes.c_ushort(fore | (back << 4)) + ) + self.write_text(text) + SetConsoleTextAttribute(self._handle, attributes=self._default_text) + + def move_cursor_to(self, new_position: WindowsCoordinates) -> None: + """Set the position of the cursor + + Args: + new_position (WindowsCoordinates): The WindowsCoordinates representing the new position of the cursor. + """ + if new_position.col < 0 or new_position.row < 0: + return + SetConsoleCursorPosition(self._handle, coords=new_position) + + def erase_line(self) -> None: + """Erase all content on the line the cursor is currently located at""" + screen_size = self.screen_size + cursor_position = self.cursor_position + cells_to_erase = screen_size.col + start_coordinates = WindowsCoordinates(row=cursor_position.row, col=0) + FillConsoleOutputCharacter( + self._handle, " ", length=cells_to_erase, start=start_coordinates + ) + FillConsoleOutputAttribute( + self._handle, + self._default_attrs, + length=cells_to_erase, + start=start_coordinates, + ) + + def erase_end_of_line(self) -> None: + """Erase all content from the cursor position to the end of that line""" + cursor_position = self.cursor_position + cells_to_erase = self.screen_size.col - cursor_position.col + FillConsoleOutputCharacter( + self._handle, " ", length=cells_to_erase, start=cursor_position + ) + FillConsoleOutputAttribute( + self._handle, + self._default_attrs, + length=cells_to_erase, + start=cursor_position, + ) + + def erase_start_of_line(self) -> None: + """Erase all content from the cursor position to the start of that line""" + row, col = self.cursor_position + start = WindowsCoordinates(row, 0) + FillConsoleOutputCharacter(self._handle, " ", length=col, start=start) + FillConsoleOutputAttribute( + self._handle, self._default_attrs, length=col, start=start + ) + + def move_cursor_up(self) -> None: + """Move the cursor up a single cell""" + cursor_position = self.cursor_position + SetConsoleCursorPosition( + self._handle, + coords=WindowsCoordinates( + row=cursor_position.row - 1, col=cursor_position.col + ), + ) + + def move_cursor_down(self) -> None: + """Move the cursor down a single cell""" + cursor_position = self.cursor_position + SetConsoleCursorPosition( + self._handle, + coords=WindowsCoordinates( + row=cursor_position.row + 1, + col=cursor_position.col, + ), + ) + + def move_cursor_forward(self) -> None: + """Move the cursor forward a single cell. Wrap to the next line if required.""" + row, col = self.cursor_position + if col == self.screen_size.col - 1: + row += 1 + col = 0 + else: + col += 1 + SetConsoleCursorPosition( + self._handle, coords=WindowsCoordinates(row=row, col=col) + ) + + def move_cursor_to_column(self, column: int) -> None: + """Move cursor to the column specified by the zero-based column index, staying on the same row + + Args: + column (int): The zero-based column index to move the cursor to. + """ + row, _ = self.cursor_position + SetConsoleCursorPosition(self._handle, coords=WindowsCoordinates(row, column)) + + def move_cursor_backward(self) -> None: + """Move the cursor backward a single cell. Wrap to the previous line if required.""" + row, col = self.cursor_position + if col == 0: + row -= 1 + col = self.screen_size.col - 1 + else: + col -= 1 + SetConsoleCursorPosition( + self._handle, coords=WindowsCoordinates(row=row, col=col) + ) + + def hide_cursor(self) -> None: + """Hide the cursor""" + current_cursor_size = self._get_cursor_size() + invisible_cursor = CONSOLE_CURSOR_INFO(dwSize=current_cursor_size, bVisible=0) + SetConsoleCursorInfo(self._handle, cursor_info=invisible_cursor) + + def show_cursor(self) -> None: + """Show the cursor""" + current_cursor_size = self._get_cursor_size() + visible_cursor = CONSOLE_CURSOR_INFO(dwSize=current_cursor_size, bVisible=1) + SetConsoleCursorInfo(self._handle, cursor_info=visible_cursor) + + def set_title(self, title: str) -> None: + """Set the title of the terminal window + + Args: + title (str): The new title of the console window + """ + assert len(title) < 255, "Console title must be less than 255 characters" + SetConsoleTitle(title) + + def _get_cursor_size(self) -> int: + """Get the percentage of the character cell that is filled by the cursor""" + cursor_info = CONSOLE_CURSOR_INFO() + GetConsoleCursorInfo(self._handle, cursor_info=cursor_info) + return int(cursor_info.dwSize) + + +if __name__ == "__main__": + handle = GetStdHandle() + + from pip._vendor.rich.console import Console + + console = Console() + + term = LegacyWindowsTerm(sys.stdout) + term.set_title("Win32 Console Examples") + + style = Style(color="black", bgcolor="red") + + heading = Style.parse("black on green") + + # Check colour output + console.rule("Checking colour output") + console.print("[on red]on red!") + console.print("[blue]blue!") + console.print("[yellow]yellow!") + console.print("[bold yellow]bold yellow!") + console.print("[bright_yellow]bright_yellow!") + console.print("[dim bright_yellow]dim bright_yellow!") + console.print("[italic cyan]italic cyan!") + console.print("[bold white on blue]bold white on blue!") + console.print("[reverse bold white on blue]reverse bold white on blue!") + console.print("[bold black on cyan]bold black on cyan!") + console.print("[black on green]black on green!") + console.print("[blue on green]blue on green!") + console.print("[white on black]white on black!") + console.print("[black on white]black on white!") + console.print("[#1BB152 on #DA812D]#1BB152 on #DA812D!") + + # Check cursor movement + console.rule("Checking cursor movement") + console.print() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("went back and wrapped to prev line") + time.sleep(1) + term.move_cursor_up() + term.write_text("we go up") + time.sleep(1) + term.move_cursor_down() + term.write_text("and down") + time.sleep(1) + term.move_cursor_up() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("we went up and back 2") + time.sleep(1) + term.move_cursor_down() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("we went down and back 2") + time.sleep(1) + + # Check erasing of lines + term.hide_cursor() + console.print() + console.rule("Checking line erasing") + console.print("\n...Deleting to the start of the line...") + term.write_text("The red arrow shows the cursor location, and direction of erase") + time.sleep(1) + term.move_cursor_to_column(16) + term.write_styled("<", Style.parse("black on red")) + term.move_cursor_backward() + time.sleep(1) + term.erase_start_of_line() + time.sleep(1) + + console.print("\n\n...And to the end of the line...") + term.write_text("The red arrow shows the cursor location, and direction of erase") + time.sleep(1) + + term.move_cursor_to_column(16) + term.write_styled(">", Style.parse("black on red")) + time.sleep(1) + term.erase_end_of_line() + time.sleep(1) + + console.print("\n\n...Now the whole line will be erased...") + term.write_styled("I'm going to disappear!", style=Style.parse("black on cyan")) + time.sleep(1) + term.erase_line() + + term.show_cursor() + print("\n") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py new file mode 100644 index 0000000..7520a9f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows.py @@ -0,0 +1,71 @@ +import sys +from dataclasses import dataclass + + +@dataclass +class WindowsConsoleFeatures: + """Windows features available.""" + + vt: bool = False + """The console supports VT codes.""" + truecolor: bool = False + """The console supports truecolor.""" + + +try: + import ctypes + from ctypes import LibraryLoader + + if sys.platform == "win32": + windll = LibraryLoader(ctypes.WinDLL) + else: + windll = None + raise ImportError("Not windows") + + from pip._vendor.rich._win32_console import ( + ENABLE_VIRTUAL_TERMINAL_PROCESSING, + GetConsoleMode, + GetStdHandle, + LegacyWindowsError, + ) + +except (AttributeError, ImportError, ValueError): + # Fallback if we can't load the Windows DLL + def get_windows_console_features() -> WindowsConsoleFeatures: + features = WindowsConsoleFeatures() + return features + +else: + + def get_windows_console_features() -> WindowsConsoleFeatures: + """Get windows console features. + + Returns: + WindowsConsoleFeatures: An instance of WindowsConsoleFeatures. + """ + handle = GetStdHandle() + try: + console_mode = GetConsoleMode(handle) + success = True + except LegacyWindowsError: + console_mode = 0 + success = False + vt = bool(success and console_mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING) + truecolor = False + if vt: + win_version = sys.getwindowsversion() + truecolor = win_version.major > 10 or ( + win_version.major == 10 and win_version.build >= 15063 + ) + features = WindowsConsoleFeatures(vt=vt, truecolor=truecolor) + return features + + +if __name__ == "__main__": + import platform + + features = get_windows_console_features() + from pip._vendor.rich import print + + print(f'platform="{platform.system()}"') + print(repr(features)) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py new file mode 100644 index 0000000..5ece056 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_windows_renderer.py @@ -0,0 +1,56 @@ +from typing import Iterable, Sequence, Tuple, cast + +from pip._vendor.rich._win32_console import LegacyWindowsTerm, WindowsCoordinates +from pip._vendor.rich.segment import ControlCode, ControlType, Segment + + +def legacy_windows_render(buffer: Iterable[Segment], term: LegacyWindowsTerm) -> None: + """Makes appropriate Windows Console API calls based on the segments in the buffer. + + Args: + buffer (Iterable[Segment]): Iterable of Segments to convert to Win32 API calls. + term (LegacyWindowsTerm): Used to call the Windows Console API. + """ + for text, style, control in buffer: + if not control: + if style: + term.write_styled(text, style) + else: + term.write_text(text) + else: + control_codes: Sequence[ControlCode] = control + for control_code in control_codes: + control_type = control_code[0] + if control_type == ControlType.CURSOR_MOVE_TO: + _, x, y = cast(Tuple[ControlType, int, int], control_code) + term.move_cursor_to(WindowsCoordinates(row=y - 1, col=x - 1)) + elif control_type == ControlType.CARRIAGE_RETURN: + term.write_text("\r") + elif control_type == ControlType.HOME: + term.move_cursor_to(WindowsCoordinates(0, 0)) + elif control_type == ControlType.CURSOR_UP: + term.move_cursor_up() + elif control_type == ControlType.CURSOR_DOWN: + term.move_cursor_down() + elif control_type == ControlType.CURSOR_FORWARD: + term.move_cursor_forward() + elif control_type == ControlType.CURSOR_BACKWARD: + term.move_cursor_backward() + elif control_type == ControlType.CURSOR_MOVE_TO_COLUMN: + _, column = cast(Tuple[ControlType, int], control_code) + term.move_cursor_to_column(column - 1) + elif control_type == ControlType.HIDE_CURSOR: + term.hide_cursor() + elif control_type == ControlType.SHOW_CURSOR: + term.show_cursor() + elif control_type == ControlType.ERASE_IN_LINE: + _, mode = cast(Tuple[ControlType, int], control_code) + if mode == 0: + term.erase_end_of_line() + elif mode == 1: + term.erase_start_of_line() + elif mode == 2: + term.erase_line() + elif control_type == ControlType.SET_WINDOW_TITLE: + _, title = cast(Tuple[ControlType, str], control_code) + term.set_title(title) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py new file mode 100644 index 0000000..2e94ff6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/_wrap.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +import re +from typing import Iterable + +from ._loop import loop_last +from .cells import cell_len, chop_cells + +re_word = re.compile(r"\s*\S+\s*") + + +def words(text: str) -> Iterable[tuple[int, int, str]]: + """Yields each word from the text as a tuple + containing (start_index, end_index, word). A "word" in this context may + include the actual word and any whitespace to the right. + """ + position = 0 + word_match = re_word.match(text, position) + while word_match is not None: + start, end = word_match.span() + word = word_match.group(0) + yield start, end, word + word_match = re_word.match(text, end) + + +def divide_line(text: str, width: int, fold: bool = True) -> list[int]: + """Given a string of text, and a width (measured in cells), return a list + of cell offsets which the string should be split at in order for it to fit + within the given width. + + Args: + text: The text to examine. + width: The available cell width. + fold: If True, words longer than `width` will be folded onto a new line. + + Returns: + A list of indices to break the line at. + """ + break_positions: list[int] = [] # offsets to insert the breaks at + append = break_positions.append + cell_offset = 0 + _cell_len = cell_len + + for start, _end, word in words(text): + word_length = _cell_len(word.rstrip()) + remaining_space = width - cell_offset + word_fits_remaining_space = remaining_space >= word_length + + if word_fits_remaining_space: + # Simplest case - the word fits within the remaining width for this line. + cell_offset += _cell_len(word) + else: + # Not enough space remaining for this word on the current line. + if word_length > width: + # The word doesn't fit on any line, so we can't simply + # place it on the next line... + if fold: + # Fold the word across multiple lines. + folded_word = chop_cells(word, width=width) + for last, line in loop_last(folded_word): + if start: + append(start) + if last: + cell_offset = _cell_len(line) + else: + start += len(line) + else: + # Folding isn't allowed, so crop the word. + if start: + append(start) + cell_offset = _cell_len(word) + elif cell_offset and start: + # The word doesn't fit within the remaining space on the current + # line, but it *can* fit on to the next (empty) line. + append(start) + cell_offset = _cell_len(word) + + return break_positions + + +if __name__ == "__main__": # pragma: no cover + from .console import Console + + console = Console(width=10) + console.print("12345 abcdefghijklmnopqrstuvwyxzABCDEFGHIJKLMNOPQRSTUVWXYZ 12345") + print(chop_cells("abcdefghijklmnopqrstuvwxyz", 10)) + + console = Console(width=20) + console.rule() + console.print("TextualはPythonの高速アプリケーション開発フレームワークです") + + console.rule() + console.print("アプリケーションは1670万色を使用でき") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py new file mode 100644 index 0000000..e6e498e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/abc.py @@ -0,0 +1,33 @@ +from abc import ABC + + +class RichRenderable(ABC): + """An abstract base class for Rich renderables. + + Note that there is no need to extend this class, the intended use is to check if an + object supports the Rich renderable protocol. For example:: + + if isinstance(my_object, RichRenderable): + console.print(my_object) + + """ + + @classmethod + def __subclasshook__(cls, other: type) -> bool: + """Check if this class supports the rich render protocol.""" + return hasattr(other, "__rich_console__") or hasattr(other, "__rich__") + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.text import Text + + t = Text() + print(isinstance(Text, RichRenderable)) + print(isinstance(t, RichRenderable)) + + class Foo: + pass + + f = Foo() + print(isinstance(f, RichRenderable)) + print(isinstance("", RichRenderable)) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/align.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/align.py new file mode 100644 index 0000000..e65dc5b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/align.py @@ -0,0 +1,306 @@ +from itertools import chain +from typing import TYPE_CHECKING, Iterable, Optional, Literal + +from .constrain import Constrain +from .jupyter import JupyterMixin +from .measure import Measurement +from .segment import Segment +from .style import StyleType + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderableType, RenderResult + +AlignMethod = Literal["left", "center", "right"] +VerticalAlignMethod = Literal["top", "middle", "bottom"] + + +class Align(JupyterMixin): + """Align a renderable by adding spaces if necessary. + + Args: + renderable (RenderableType): A console renderable. + align (AlignMethod): One of "left", "center", or "right"" + style (StyleType, optional): An optional style to apply to the background. + vertical (Optional[VerticalAlignMethod], optional): Optional vertical align, one of "top", "middle", or "bottom". Defaults to None. + pad (bool, optional): Pad the right with spaces. Defaults to True. + width (int, optional): Restrict contents to given width, or None to use default width. Defaults to None. + height (int, optional): Set height of align renderable, or None to fit to contents. Defaults to None. + + Raises: + ValueError: if ``align`` is not one of the expected values. + """ + + def __init__( + self, + renderable: "RenderableType", + align: AlignMethod = "left", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> None: + if align not in ("left", "center", "right"): + raise ValueError( + f'invalid value for align, expected "left", "center", or "right" (not {align!r})' + ) + if vertical is not None and vertical not in ("top", "middle", "bottom"): + raise ValueError( + f'invalid value for vertical, expected "top", "middle", or "bottom" (not {vertical!r})' + ) + self.renderable = renderable + self.align = align + self.style = style + self.vertical = vertical + self.pad = pad + self.width = width + self.height = height + + def __repr__(self) -> str: + return f"Align({self.renderable!r}, {self.align!r})" + + @classmethod + def left( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the left.""" + return cls( + renderable, + "left", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + @classmethod + def center( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the center.""" + return cls( + renderable, + "center", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + @classmethod + def right( + cls, + renderable: "RenderableType", + style: Optional[StyleType] = None, + *, + vertical: Optional[VerticalAlignMethod] = None, + pad: bool = True, + width: Optional[int] = None, + height: Optional[int] = None, + ) -> "Align": + """Align a renderable to the right.""" + return cls( + renderable, + "right", + style=style, + vertical=vertical, + pad=pad, + width=width, + height=height, + ) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + align = self.align + width = console.measure(self.renderable, options=options).maximum + rendered = console.render( + Constrain( + self.renderable, width if self.width is None else min(width, self.width) + ), + options.update(height=None), + ) + lines = list(Segment.split_lines(rendered)) + width, height = Segment.get_shape(lines) + lines = Segment.set_shape(lines, width, height) + new_line = Segment.line() + excess_space = options.max_width - width + style = console.get_style(self.style) if self.style is not None else None + + def generate_segments() -> Iterable[Segment]: + if excess_space <= 0: + # Exact fit + for line in lines: + yield from line + yield new_line + + elif align == "left": + # Pad on the right + pad = Segment(" " * excess_space, style) if self.pad else None + for line in lines: + yield from line + if pad: + yield pad + yield new_line + + elif align == "center": + # Pad left and right + left = excess_space // 2 + pad = Segment(" " * left, style) + pad_right = ( + Segment(" " * (excess_space - left), style) if self.pad else None + ) + for line in lines: + if left: + yield pad + yield from line + if pad_right: + yield pad_right + yield new_line + + elif align == "right": + # Padding on left + pad = Segment(" " * excess_space, style) + for line in lines: + yield pad + yield from line + yield new_line + + blank_line = ( + Segment(f"{' ' * (self.width or options.max_width)}\n", style) + if self.pad + else Segment("\n") + ) + + def blank_lines(count: int) -> Iterable[Segment]: + if count > 0: + for _ in range(count): + yield blank_line + + vertical_height = self.height or options.height + iter_segments: Iterable[Segment] + if self.vertical and vertical_height is not None: + if self.vertical == "top": + bottom_space = vertical_height - height + iter_segments = chain(generate_segments(), blank_lines(bottom_space)) + elif self.vertical == "middle": + top_space = (vertical_height - height) // 2 + bottom_space = vertical_height - top_space - height + iter_segments = chain( + blank_lines(top_space), + generate_segments(), + blank_lines(bottom_space), + ) + else: # self.vertical == "bottom": + top_space = vertical_height - height + iter_segments = chain(blank_lines(top_space), generate_segments()) + else: + iter_segments = generate_segments() + if self.style: + style = console.get_style(self.style) + iter_segments = Segment.apply_style(iter_segments, style) + yield from iter_segments + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> Measurement: + measurement = Measurement.get(console, options, self.renderable) + return measurement + + +class VerticalCenter(JupyterMixin): + """Vertically aligns a renderable. + + Warn: + This class is deprecated and may be removed in a future version. Use Align class with + `vertical="middle"`. + + Args: + renderable (RenderableType): A renderable object. + style (StyleType, optional): An optional style to apply to the background. Defaults to None. + """ + + def __init__( + self, + renderable: "RenderableType", + style: Optional[StyleType] = None, + ) -> None: + self.renderable = renderable + self.style = style + + def __repr__(self) -> str: + return f"VerticalCenter({self.renderable!r})" + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + style = console.get_style(self.style) if self.style is not None else None + lines = console.render_lines( + self.renderable, options.update(height=None), pad=False + ) + width, _height = Segment.get_shape(lines) + new_line = Segment.line() + height = options.height or options.size.height + top_space = (height - len(lines)) // 2 + bottom_space = height - top_space - len(lines) + blank_line = Segment(f"{' ' * width}", style) + + def blank_lines(count: int) -> Iterable[Segment]: + for _ in range(count): + yield blank_line + yield new_line + + if top_space > 0: + yield from blank_lines(top_space) + for line in lines: + yield from line + yield new_line + if bottom_space > 0: + yield from blank_lines(bottom_space) + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> Measurement: + measurement = Measurement.get(console, options, self.renderable) + return measurement + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console, Group + from pip._vendor.rich.highlighter import ReprHighlighter + from pip._vendor.rich.panel import Panel + + highlighter = ReprHighlighter() + console = Console() + + panel = Panel( + Group( + Align.left(highlighter("align='left'")), + Align.center(highlighter("align='center'")), + Align.right(highlighter("align='right'")), + ), + width=60, + style="on dark_blue", + title="Align", + ) + + console.print( + Align.center(panel, vertical="middle", style="on red", height=console.height) + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py new file mode 100644 index 0000000..7de86ce --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/ansi.py @@ -0,0 +1,241 @@ +import re +import sys +from contextlib import suppress +from typing import Iterable, NamedTuple, Optional + +from .color import Color +from .style import Style +from .text import Text + +re_ansi = re.compile( + r""" +(?:\x1b[0-?])| +(?:\x1b\](.*?)\x1b\\)| +(?:\x1b([(@-Z\\-_]|\[[0-?]*[ -/]*[@-~])) +""", + re.VERBOSE, +) + + +class _AnsiToken(NamedTuple): + """Result of ansi tokenized string.""" + + plain: str = "" + sgr: Optional[str] = "" + osc: Optional[str] = "" + + +def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]: + """Tokenize a string in to plain text and ANSI codes. + + Args: + ansi_text (str): A String containing ANSI codes. + + Yields: + AnsiToken: A named tuple of (plain, sgr, osc) + """ + + position = 0 + sgr: Optional[str] + osc: Optional[str] + for match in re_ansi.finditer(ansi_text): + start, end = match.span(0) + osc, sgr = match.groups() + if start > position: + yield _AnsiToken(ansi_text[position:start]) + if sgr: + if sgr == "(": + position = end + 1 + continue + if sgr.endswith("m"): + yield _AnsiToken("", sgr[1:-1], osc) + else: + yield _AnsiToken("", sgr, osc) + position = end + if position < len(ansi_text): + yield _AnsiToken(ansi_text[position:]) + + +SGR_STYLE_MAP = { + 1: "bold", + 2: "dim", + 3: "italic", + 4: "underline", + 5: "blink", + 6: "blink2", + 7: "reverse", + 8: "conceal", + 9: "strike", + 21: "underline2", + 22: "not dim not bold", + 23: "not italic", + 24: "not underline", + 25: "not blink", + 26: "not blink2", + 27: "not reverse", + 28: "not conceal", + 29: "not strike", + 30: "color(0)", + 31: "color(1)", + 32: "color(2)", + 33: "color(3)", + 34: "color(4)", + 35: "color(5)", + 36: "color(6)", + 37: "color(7)", + 39: "default", + 40: "on color(0)", + 41: "on color(1)", + 42: "on color(2)", + 43: "on color(3)", + 44: "on color(4)", + 45: "on color(5)", + 46: "on color(6)", + 47: "on color(7)", + 49: "on default", + 51: "frame", + 52: "encircle", + 53: "overline", + 54: "not frame not encircle", + 55: "not overline", + 90: "color(8)", + 91: "color(9)", + 92: "color(10)", + 93: "color(11)", + 94: "color(12)", + 95: "color(13)", + 96: "color(14)", + 97: "color(15)", + 100: "on color(8)", + 101: "on color(9)", + 102: "on color(10)", + 103: "on color(11)", + 104: "on color(12)", + 105: "on color(13)", + 106: "on color(14)", + 107: "on color(15)", +} + + +class AnsiDecoder: + """Translate ANSI code in to styled Text.""" + + def __init__(self) -> None: + self.style = Style.null() + + def decode(self, terminal_text: str) -> Iterable[Text]: + """Decode ANSI codes in an iterable of lines. + + Args: + lines (Iterable[str]): An iterable of lines of terminal output. + + Yields: + Text: Marked up Text. + """ + for line in terminal_text.splitlines(): + yield self.decode_line(line) + + def decode_line(self, line: str) -> Text: + """Decode a line containing ansi codes. + + Args: + line (str): A line of terminal output. + + Returns: + Text: A Text instance marked up according to ansi codes. + """ + from_ansi = Color.from_ansi + from_rgb = Color.from_rgb + _Style = Style + text = Text() + append = text.append + line = line.rsplit("\r", 1)[-1] + for plain_text, sgr, osc in _ansi_tokenize(line): + if plain_text: + append(plain_text, self.style or None) + elif osc is not None: + if osc.startswith("8;"): + _params, semicolon, link = osc[2:].partition(";") + if semicolon: + self.style = self.style.update_link(link or None) + elif sgr is not None: + # Translate in to semi-colon separated codes + # Ignore invalid codes, because we want to be lenient + codes = [ + min(255, int(_code) if _code else 0) + for _code in sgr.split(";") + if _code.isdigit() or _code == "" + ] + iter_codes = iter(codes) + for code in iter_codes: + if code == 0: + # reset + self.style = _Style.null() + elif code in SGR_STYLE_MAP: + # styles + self.style += _Style.parse(SGR_STYLE_MAP[code]) + elif code == 38: + #  Foreground + with suppress(StopIteration): + color_type = next(iter_codes) + if color_type == 5: + self.style += _Style.from_color( + from_ansi(next(iter_codes)) + ) + elif color_type == 2: + self.style += _Style.from_color( + from_rgb( + next(iter_codes), + next(iter_codes), + next(iter_codes), + ) + ) + elif code == 48: + # Background + with suppress(StopIteration): + color_type = next(iter_codes) + if color_type == 5: + self.style += _Style.from_color( + None, from_ansi(next(iter_codes)) + ) + elif color_type == 2: + self.style += _Style.from_color( + None, + from_rgb( + next(iter_codes), + next(iter_codes), + next(iter_codes), + ), + ) + + return text + + +if sys.platform != "win32" and __name__ == "__main__": # pragma: no cover + import io + import os + import pty + import sys + + decoder = AnsiDecoder() + + stdout = io.BytesIO() + + def read(fd: int) -> bytes: + data = os.read(fd, 1024) + stdout.write(data) + return data + + pty.spawn(sys.argv[1:], read) + + from .console import Console + + console = Console(record=True) + + stdout_result = stdout.getvalue().decode("utf-8") + print(stdout_result) + + for line in decoder.decode(stdout_result): + console.print(line) + + console.save_html("stdout.html") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py new file mode 100644 index 0000000..022284b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/bar.py @@ -0,0 +1,93 @@ +from typing import Optional, Union + +from .color import Color +from .console import Console, ConsoleOptions, RenderResult +from .jupyter import JupyterMixin +from .measure import Measurement +from .segment import Segment +from .style import Style + +# There are left-aligned characters for 1/8 to 7/8, but +# the right-aligned characters exist only for 1/8 and 4/8. +BEGIN_BLOCK_ELEMENTS = ["█", "█", "█", "▐", "▐", "▐", "▕", "▕"] +END_BLOCK_ELEMENTS = [" ", "▏", "▎", "▍", "▌", "▋", "▊", "▉"] +FULL_BLOCK = "█" + + +class Bar(JupyterMixin): + """Renders a solid block bar. + + Args: + size (float): Value for the end of the bar. + begin (float): Begin point (between 0 and size, inclusive). + end (float): End point (between 0 and size, inclusive). + width (int, optional): Width of the bar, or ``None`` for maximum width. Defaults to None. + color (Union[Color, str], optional): Color of the bar. Defaults to "default". + bgcolor (Union[Color, str], optional): Color of bar background. Defaults to "default". + """ + + def __init__( + self, + size: float, + begin: float, + end: float, + *, + width: Optional[int] = None, + color: Union[Color, str] = "default", + bgcolor: Union[Color, str] = "default", + ): + self.size = size + self.begin = max(begin, 0) + self.end = min(end, size) + self.width = width + self.style = Style(color=color, bgcolor=bgcolor) + + def __repr__(self) -> str: + return f"Bar({self.size}, {self.begin}, {self.end})" + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + width = min( + self.width if self.width is not None else options.max_width, + options.max_width, + ) + + if self.begin >= self.end: + yield Segment(" " * width, self.style) + yield Segment.line() + return + + prefix_complete_eights = int(width * 8 * self.begin / self.size) + prefix_bar_count = prefix_complete_eights // 8 + prefix_eights_count = prefix_complete_eights % 8 + + body_complete_eights = int(width * 8 * self.end / self.size) + body_bar_count = body_complete_eights // 8 + body_eights_count = body_complete_eights % 8 + + # When start and end fall into the same cell, we ideally should render + # a symbol that's "center-aligned", but there is no good symbol in Unicode. + # In this case, we fall back to right-aligned block symbol for simplicity. + + prefix = " " * prefix_bar_count + if prefix_eights_count: + prefix += BEGIN_BLOCK_ELEMENTS[prefix_eights_count] + + body = FULL_BLOCK * body_bar_count + if body_eights_count: + body += END_BLOCK_ELEMENTS[body_eights_count] + + suffix = " " * (width - len(body)) + + yield Segment(prefix + body[len(prefix) :] + suffix, self.style) + yield Segment.line() + + def __rich_measure__( + self, console: Console, options: ConsoleOptions + ) -> Measurement: + return ( + Measurement(self.width, self.width) + if self.width is not None + else Measurement(4, options.max_width) + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/box.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/box.py new file mode 100644 index 0000000..3f330cc --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/box.py @@ -0,0 +1,474 @@ +from typing import TYPE_CHECKING, Iterable, List, Literal + + +from ._loop import loop_last + +if TYPE_CHECKING: + from pip._vendor.rich.console import ConsoleOptions + + +class Box: + """Defines characters to render boxes. + + ┌─┬┐ top + │ ││ head + ├─┼┤ head_row + │ ││ mid + ├─┼┤ row + ├─┼┤ foot_row + │ ││ foot + └─┴┘ bottom + + Args: + box (str): Characters making up box. + ascii (bool, optional): True if this box uses ascii characters only. Default is False. + """ + + def __init__(self, box: str, *, ascii: bool = False) -> None: + self._box = box + self.ascii = ascii + line1, line2, line3, line4, line5, line6, line7, line8 = box.splitlines() + # top + self.top_left, self.top, self.top_divider, self.top_right = iter(line1) + # head + self.head_left, _, self.head_vertical, self.head_right = iter(line2) + # head_row + ( + self.head_row_left, + self.head_row_horizontal, + self.head_row_cross, + self.head_row_right, + ) = iter(line3) + + # mid + self.mid_left, _, self.mid_vertical, self.mid_right = iter(line4) + # row + self.row_left, self.row_horizontal, self.row_cross, self.row_right = iter(line5) + # foot_row + ( + self.foot_row_left, + self.foot_row_horizontal, + self.foot_row_cross, + self.foot_row_right, + ) = iter(line6) + # foot + self.foot_left, _, self.foot_vertical, self.foot_right = iter(line7) + # bottom + self.bottom_left, self.bottom, self.bottom_divider, self.bottom_right = iter( + line8 + ) + + def __repr__(self) -> str: + return "Box(...)" + + def __str__(self) -> str: + return self._box + + def substitute(self, options: "ConsoleOptions", safe: bool = True) -> "Box": + """Substitute this box for another if it won't render due to platform issues. + + Args: + options (ConsoleOptions): Console options used in rendering. + safe (bool, optional): Substitute this for another Box if there are known problems + displaying on the platform (currently only relevant on Windows). Default is True. + + Returns: + Box: A different Box or the same Box. + """ + box = self + if options.legacy_windows and safe: + box = LEGACY_WINDOWS_SUBSTITUTIONS.get(box, box) + if options.ascii_only and not box.ascii: + box = ASCII + return box + + def get_plain_headed_box(self) -> "Box": + """If this box uses special characters for the borders of the header, then + return the equivalent box that does not. + + Returns: + Box: The most similar Box that doesn't use header-specific box characters. + If the current Box already satisfies this criterion, then it's returned. + """ + return PLAIN_HEADED_SUBSTITUTIONS.get(self, self) + + def get_top(self, widths: Iterable[int]) -> str: + """Get the top of a simple box. + + Args: + widths (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + + parts: List[str] = [] + append = parts.append + append(self.top_left) + for last, width in loop_last(widths): + append(self.top * width) + if not last: + append(self.top_divider) + append(self.top_right) + return "".join(parts) + + def get_row( + self, + widths: Iterable[int], + level: Literal["head", "row", "foot", "mid"] = "row", + edge: bool = True, + ) -> str: + """Get the top of a simple box. + + Args: + width (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + if level == "head": + left = self.head_row_left + horizontal = self.head_row_horizontal + cross = self.head_row_cross + right = self.head_row_right + elif level == "row": + left = self.row_left + horizontal = self.row_horizontal + cross = self.row_cross + right = self.row_right + elif level == "mid": + left = self.mid_left + horizontal = " " + cross = self.mid_vertical + right = self.mid_right + elif level == "foot": + left = self.foot_row_left + horizontal = self.foot_row_horizontal + cross = self.foot_row_cross + right = self.foot_row_right + else: + raise ValueError("level must be 'head', 'row' or 'foot'") + + parts: List[str] = [] + append = parts.append + if edge: + append(left) + for last, width in loop_last(widths): + append(horizontal * width) + if not last: + append(cross) + if edge: + append(right) + return "".join(parts) + + def get_bottom(self, widths: Iterable[int]) -> str: + """Get the bottom of a simple box. + + Args: + widths (List[int]): Widths of columns. + + Returns: + str: A string of box characters. + """ + + parts: List[str] = [] + append = parts.append + append(self.bottom_left) + for last, width in loop_last(widths): + append(self.bottom * width) + if not last: + append(self.bottom_divider) + append(self.bottom_right) + return "".join(parts) + + +# fmt: off +ASCII: Box = Box( + "+--+\n" + "| ||\n" + "|-+|\n" + "| ||\n" + "|-+|\n" + "|-+|\n" + "| ||\n" + "+--+\n", + ascii=True, +) + +ASCII2: Box = Box( + "+-++\n" + "| ||\n" + "+-++\n" + "| ||\n" + "+-++\n" + "+-++\n" + "| ||\n" + "+-++\n", + ascii=True, +) + +ASCII_DOUBLE_HEAD: Box = Box( + "+-++\n" + "| ||\n" + "+=++\n" + "| ||\n" + "+-++\n" + "+-++\n" + "| ||\n" + "+-++\n", + ascii=True, +) + +SQUARE: Box = Box( + "┌─┬┐\n" + "│ ││\n" + "├─┼┤\n" + "│ ││\n" + "├─┼┤\n" + "├─┼┤\n" + "│ ││\n" + "└─┴┘\n" +) + +SQUARE_DOUBLE_HEAD: Box = Box( + "┌─┬┐\n" + "│ ││\n" + "╞═╪╡\n" + "│ ││\n" + "├─┼┤\n" + "├─┼┤\n" + "│ ││\n" + "└─┴┘\n" +) + +MINIMAL: Box = Box( + " ╷ \n" + " │ \n" + "╶─┼╴\n" + " │ \n" + "╶─┼╴\n" + "╶─┼╴\n" + " │ \n" + " ╵ \n" +) + + +MINIMAL_HEAVY_HEAD: Box = Box( + " ╷ \n" + " │ \n" + "╺━┿╸\n" + " │ \n" + "╶─┼╴\n" + "╶─┼╴\n" + " │ \n" + " ╵ \n" +) + +MINIMAL_DOUBLE_HEAD: Box = Box( + " ╷ \n" + " │ \n" + " ═╪ \n" + " │ \n" + " ─┼ \n" + " ─┼ \n" + " │ \n" + " ╵ \n" +) + + +SIMPLE: Box = Box( + " \n" + " \n" + " ── \n" + " \n" + " \n" + " ── \n" + " \n" + " \n" +) + +SIMPLE_HEAD: Box = Box( + " \n" + " \n" + " ── \n" + " \n" + " \n" + " \n" + " \n" + " \n" +) + + +SIMPLE_HEAVY: Box = Box( + " \n" + " \n" + " ━━ \n" + " \n" + " \n" + " ━━ \n" + " \n" + " \n" +) + + +HORIZONTALS: Box = Box( + " ── \n" + " \n" + " ── \n" + " \n" + " ── \n" + " ── \n" + " \n" + " ── \n" +) + +ROUNDED: Box = Box( + "╭─┬╮\n" + "│ ││\n" + "├─┼┤\n" + "│ ││\n" + "├─┼┤\n" + "├─┼┤\n" + "│ ││\n" + "╰─┴╯\n" +) + +HEAVY: Box = Box( + "┏━┳┓\n" + "┃ ┃┃\n" + "┣━╋┫\n" + "┃ ┃┃\n" + "┣━╋┫\n" + "┣━╋┫\n" + "┃ ┃┃\n" + "┗━┻┛\n" +) + +HEAVY_EDGE: Box = Box( + "┏━┯┓\n" + "┃ │┃\n" + "┠─┼┨\n" + "┃ │┃\n" + "┠─┼┨\n" + "┠─┼┨\n" + "┃ │┃\n" + "┗━┷┛\n" +) + +HEAVY_HEAD: Box = Box( + "┏━┳┓\n" + "┃ ┃┃\n" + "┡━╇┩\n" + "│ ││\n" + "├─┼┤\n" + "├─┼┤\n" + "│ ││\n" + "└─┴┘\n" +) + +DOUBLE: Box = Box( + "╔═╦╗\n" + "║ ║║\n" + "╠═╬╣\n" + "║ ║║\n" + "╠═╬╣\n" + "╠═╬╣\n" + "║ ║║\n" + "╚═╩╝\n" +) + +DOUBLE_EDGE: Box = Box( + "╔═╤╗\n" + "║ │║\n" + "╟─┼╢\n" + "║ │║\n" + "╟─┼╢\n" + "╟─┼╢\n" + "║ │║\n" + "╚═╧╝\n" +) + +MARKDOWN: Box = Box( + " \n" + "| ||\n" + "|-||\n" + "| ||\n" + "|-||\n" + "|-||\n" + "| ||\n" + " \n", + ascii=True, +) +# fmt: on + +# Map Boxes that don't render with raster fonts on to equivalent that do +LEGACY_WINDOWS_SUBSTITUTIONS = { + ROUNDED: SQUARE, + MINIMAL_HEAVY_HEAD: MINIMAL, + SIMPLE_HEAVY: SIMPLE, + HEAVY: SQUARE, + HEAVY_EDGE: SQUARE, + HEAVY_HEAD: SQUARE, +} + +# Map headed boxes to their headerless equivalents +PLAIN_HEADED_SUBSTITUTIONS = { + HEAVY_HEAD: SQUARE, + SQUARE_DOUBLE_HEAD: SQUARE, + MINIMAL_DOUBLE_HEAD: MINIMAL, + MINIMAL_HEAVY_HEAD: MINIMAL, + ASCII_DOUBLE_HEAD: ASCII2, +} + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.columns import Columns + from pip._vendor.rich.panel import Panel + + from . import box as box + from .console import Console + from .table import Table + from .text import Text + + console = Console(record=True) + + BOXES = [ + "ASCII", + "ASCII2", + "ASCII_DOUBLE_HEAD", + "SQUARE", + "SQUARE_DOUBLE_HEAD", + "MINIMAL", + "MINIMAL_HEAVY_HEAD", + "MINIMAL_DOUBLE_HEAD", + "SIMPLE", + "SIMPLE_HEAD", + "SIMPLE_HEAVY", + "HORIZONTALS", + "ROUNDED", + "HEAVY", + "HEAVY_EDGE", + "HEAVY_HEAD", + "DOUBLE", + "DOUBLE_EDGE", + "MARKDOWN", + ] + + console.print(Panel("[bold green]Box Constants", style="green"), justify="center") + console.print() + + columns = Columns(expand=True, padding=2) + for box_name in sorted(BOXES): + table = Table( + show_footer=True, style="dim", border_style="not dim", expand=True + ) + table.add_column("Header 1", "Footer 1") + table.add_column("Header 2", "Footer 2") + table.add_row("Cell", "Cell") + table.add_row("Cell", "Cell") + table.box = getattr(box, box_name) + table.title = Text(f"box.{box_name}", style="magenta") + columns.add_renderable(table) + console.print(columns) + + # console.save_svg("box.svg") diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py new file mode 100644 index 0000000..a854622 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/cells.py @@ -0,0 +1,174 @@ +from __future__ import annotations + +from functools import lru_cache +from typing import Callable + +from ._cell_widths import CELL_WIDTHS + +# Ranges of unicode ordinals that produce a 1-cell wide character +# This is non-exhaustive, but covers most common Western characters +_SINGLE_CELL_UNICODE_RANGES: list[tuple[int, int]] = [ + (0x20, 0x7E), # Latin (excluding non-printable) + (0xA0, 0xAC), + (0xAE, 0x002FF), + (0x00370, 0x00482), # Greek / Cyrillic + (0x02500, 0x025FC), # Box drawing, box elements, geometric shapes + (0x02800, 0x028FF), # Braille +] + +# A set of characters that are a single cell wide +_SINGLE_CELLS = frozenset( + [ + character + for _start, _end in _SINGLE_CELL_UNICODE_RANGES + for character in map(chr, range(_start, _end + 1)) + ] +) + +# When called with a string this will return True if all +# characters are single-cell, otherwise False +_is_single_cell_widths: Callable[[str], bool] = _SINGLE_CELLS.issuperset + + +@lru_cache(4096) +def cached_cell_len(text: str) -> int: + """Get the number of cells required to display text. + + This method always caches, which may use up a lot of memory. It is recommended to use + `cell_len` over this method. + + Args: + text (str): Text to display. + + Returns: + int: Get the number of cells required to display text. + """ + if _is_single_cell_widths(text): + return len(text) + return sum(map(get_character_cell_size, text)) + + +def cell_len(text: str, _cell_len: Callable[[str], int] = cached_cell_len) -> int: + """Get the number of cells required to display text. + + Args: + text (str): Text to display. + + Returns: + int: Get the number of cells required to display text. + """ + if len(text) < 512: + return _cell_len(text) + if _is_single_cell_widths(text): + return len(text) + return sum(map(get_character_cell_size, text)) + + +@lru_cache(maxsize=4096) +def get_character_cell_size(character: str) -> int: + """Get the cell size of a character. + + Args: + character (str): A single character. + + Returns: + int: Number of cells (0, 1 or 2) occupied by that character. + """ + codepoint = ord(character) + _table = CELL_WIDTHS + lower_bound = 0 + upper_bound = len(_table) - 1 + index = (lower_bound + upper_bound) // 2 + while True: + start, end, width = _table[index] + if codepoint < start: + upper_bound = index - 1 + elif codepoint > end: + lower_bound = index + 1 + else: + return 0 if width == -1 else width + if upper_bound < lower_bound: + break + index = (lower_bound + upper_bound) // 2 + return 1 + + +def set_cell_size(text: str, total: int) -> str: + """Set the length of a string to fit within given number of cells.""" + + if _is_single_cell_widths(text): + size = len(text) + if size < total: + return text + " " * (total - size) + return text[:total] + + if total <= 0: + return "" + cell_size = cell_len(text) + if cell_size == total: + return text + if cell_size < total: + return text + " " * (total - cell_size) + + start = 0 + end = len(text) + + # Binary search until we find the right size + while True: + pos = (start + end) // 2 + before = text[: pos + 1] + before_len = cell_len(before) + if before_len == total + 1 and cell_len(before[-1]) == 2: + return before[:-1] + " " + if before_len == total: + return before + if before_len > total: + end = pos + else: + start = pos + + +def chop_cells( + text: str, + width: int, +) -> list[str]: + """Split text into lines such that each line fits within the available (cell) width. + + Args: + text: The text to fold such that it fits in the given width. + width: The width available (number of cells). + + Returns: + A list of strings such that each string in the list has cell width + less than or equal to the available width. + """ + _get_character_cell_size = get_character_cell_size + lines: list[list[str]] = [[]] + + append_new_line = lines.append + append_to_last_line = lines[-1].append + + total_width = 0 + + for character in text: + cell_width = _get_character_cell_size(character) + char_doesnt_fit = total_width + cell_width > width + + if char_doesnt_fit: + append_new_line([character]) + append_to_last_line = lines[-1].append + total_width = cell_width + else: + append_to_last_line(character) + total_width += cell_width + + return ["".join(line) for line in lines] + + +if __name__ == "__main__": # pragma: no cover + print(get_character_cell_size("😽")) + for line in chop_cells("""这是对亚洲语言支持的测试。面对模棱两可的想法,拒绝猜测的诱惑。""", 8): + print(line) + for n in range(80, 1, -1): + print(set_cell_size("""这是对亚洲语言支持的测试。面对模棱两可的想法,拒绝猜测的诱惑。""", n) + "|") + print("x" * n) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/color.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/color.py new file mode 100644 index 0000000..e2c23a6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/color.py @@ -0,0 +1,621 @@ +import re +import sys +from colorsys import rgb_to_hls +from enum import IntEnum +from functools import lru_cache +from typing import TYPE_CHECKING, NamedTuple, Optional, Tuple + +from ._palettes import EIGHT_BIT_PALETTE, STANDARD_PALETTE, WINDOWS_PALETTE +from .color_triplet import ColorTriplet +from .repr import Result, rich_repr +from .terminal_theme import DEFAULT_TERMINAL_THEME + +if TYPE_CHECKING: # pragma: no cover + from .terminal_theme import TerminalTheme + from .text import Text + + +WINDOWS = sys.platform == "win32" + + +class ColorSystem(IntEnum): + """One of the 3 color system supported by terminals.""" + + STANDARD = 1 + EIGHT_BIT = 2 + TRUECOLOR = 3 + WINDOWS = 4 + + def __repr__(self) -> str: + return f"ColorSystem.{self.name}" + + def __str__(self) -> str: + return repr(self) + + +class ColorType(IntEnum): + """Type of color stored in Color class.""" + + DEFAULT = 0 + STANDARD = 1 + EIGHT_BIT = 2 + TRUECOLOR = 3 + WINDOWS = 4 + + def __repr__(self) -> str: + return f"ColorType.{self.name}" + + +ANSI_COLOR_NAMES = { + "black": 0, + "red": 1, + "green": 2, + "yellow": 3, + "blue": 4, + "magenta": 5, + "cyan": 6, + "white": 7, + "bright_black": 8, + "bright_red": 9, + "bright_green": 10, + "bright_yellow": 11, + "bright_blue": 12, + "bright_magenta": 13, + "bright_cyan": 14, + "bright_white": 15, + "grey0": 16, + "gray0": 16, + "navy_blue": 17, + "dark_blue": 18, + "blue3": 20, + "blue1": 21, + "dark_green": 22, + "deep_sky_blue4": 25, + "dodger_blue3": 26, + "dodger_blue2": 27, + "green4": 28, + "spring_green4": 29, + "turquoise4": 30, + "deep_sky_blue3": 32, + "dodger_blue1": 33, + "green3": 40, + "spring_green3": 41, + "dark_cyan": 36, + "light_sea_green": 37, + "deep_sky_blue2": 38, + "deep_sky_blue1": 39, + "spring_green2": 47, + "cyan3": 43, + "dark_turquoise": 44, + "turquoise2": 45, + "green1": 46, + "spring_green1": 48, + "medium_spring_green": 49, + "cyan2": 50, + "cyan1": 51, + "dark_red": 88, + "deep_pink4": 125, + "purple4": 55, + "purple3": 56, + "blue_violet": 57, + "orange4": 94, + "grey37": 59, + "gray37": 59, + "medium_purple4": 60, + "slate_blue3": 62, + "royal_blue1": 63, + "chartreuse4": 64, + "dark_sea_green4": 71, + "pale_turquoise4": 66, + "steel_blue": 67, + "steel_blue3": 68, + "cornflower_blue": 69, + "chartreuse3": 76, + "cadet_blue": 73, + "sky_blue3": 74, + "steel_blue1": 81, + "pale_green3": 114, + "sea_green3": 78, + "aquamarine3": 79, + "medium_turquoise": 80, + "chartreuse2": 112, + "sea_green2": 83, + "sea_green1": 85, + "aquamarine1": 122, + "dark_slate_gray2": 87, + "dark_magenta": 91, + "dark_violet": 128, + "purple": 129, + "light_pink4": 95, + "plum4": 96, + "medium_purple3": 98, + "slate_blue1": 99, + "yellow4": 106, + "wheat4": 101, + "grey53": 102, + "gray53": 102, + "light_slate_grey": 103, + "light_slate_gray": 103, + "medium_purple": 104, + "light_slate_blue": 105, + "dark_olive_green3": 149, + "dark_sea_green": 108, + "light_sky_blue3": 110, + "sky_blue2": 111, + "dark_sea_green3": 150, + "dark_slate_gray3": 116, + "sky_blue1": 117, + "chartreuse1": 118, + "light_green": 120, + "pale_green1": 156, + "dark_slate_gray1": 123, + "red3": 160, + "medium_violet_red": 126, + "magenta3": 164, + "dark_orange3": 166, + "indian_red": 167, + "hot_pink3": 168, + "medium_orchid3": 133, + "medium_orchid": 134, + "medium_purple2": 140, + "dark_goldenrod": 136, + "light_salmon3": 173, + "rosy_brown": 138, + "grey63": 139, + "gray63": 139, + "medium_purple1": 141, + "gold3": 178, + "dark_khaki": 143, + "navajo_white3": 144, + "grey69": 145, + "gray69": 145, + "light_steel_blue3": 146, + "light_steel_blue": 147, + "yellow3": 184, + "dark_sea_green2": 157, + "light_cyan3": 152, + "light_sky_blue1": 153, + "green_yellow": 154, + "dark_olive_green2": 155, + "dark_sea_green1": 193, + "pale_turquoise1": 159, + "deep_pink3": 162, + "magenta2": 200, + "hot_pink2": 169, + "orchid": 170, + "medium_orchid1": 207, + "orange3": 172, + "light_pink3": 174, + "pink3": 175, + "plum3": 176, + "violet": 177, + "light_goldenrod3": 179, + "tan": 180, + "misty_rose3": 181, + "thistle3": 182, + "plum2": 183, + "khaki3": 185, + "light_goldenrod2": 222, + "light_yellow3": 187, + "grey84": 188, + "gray84": 188, + "light_steel_blue1": 189, + "yellow2": 190, + "dark_olive_green1": 192, + "honeydew2": 194, + "light_cyan1": 195, + "red1": 196, + "deep_pink2": 197, + "deep_pink1": 199, + "magenta1": 201, + "orange_red1": 202, + "indian_red1": 204, + "hot_pink": 206, + "dark_orange": 208, + "salmon1": 209, + "light_coral": 210, + "pale_violet_red1": 211, + "orchid2": 212, + "orchid1": 213, + "orange1": 214, + "sandy_brown": 215, + "light_salmon1": 216, + "light_pink1": 217, + "pink1": 218, + "plum1": 219, + "gold1": 220, + "navajo_white1": 223, + "misty_rose1": 224, + "thistle1": 225, + "yellow1": 226, + "light_goldenrod1": 227, + "khaki1": 228, + "wheat1": 229, + "cornsilk1": 230, + "grey100": 231, + "gray100": 231, + "grey3": 232, + "gray3": 232, + "grey7": 233, + "gray7": 233, + "grey11": 234, + "gray11": 234, + "grey15": 235, + "gray15": 235, + "grey19": 236, + "gray19": 236, + "grey23": 237, + "gray23": 237, + "grey27": 238, + "gray27": 238, + "grey30": 239, + "gray30": 239, + "grey35": 240, + "gray35": 240, + "grey39": 241, + "gray39": 241, + "grey42": 242, + "gray42": 242, + "grey46": 243, + "gray46": 243, + "grey50": 244, + "gray50": 244, + "grey54": 245, + "gray54": 245, + "grey58": 246, + "gray58": 246, + "grey62": 247, + "gray62": 247, + "grey66": 248, + "gray66": 248, + "grey70": 249, + "gray70": 249, + "grey74": 250, + "gray74": 250, + "grey78": 251, + "gray78": 251, + "grey82": 252, + "gray82": 252, + "grey85": 253, + "gray85": 253, + "grey89": 254, + "gray89": 254, + "grey93": 255, + "gray93": 255, +} + + +class ColorParseError(Exception): + """The color could not be parsed.""" + + +RE_COLOR = re.compile( + r"""^ +\#([0-9a-f]{6})$| +color\(([0-9]{1,3})\)$| +rgb\(([\d\s,]+)\)$ +""", + re.VERBOSE, +) + + +@rich_repr +class Color(NamedTuple): + """Terminal color definition.""" + + name: str + """The name of the color (typically the input to Color.parse).""" + type: ColorType + """The type of the color.""" + number: Optional[int] = None + """The color number, if a standard color, or None.""" + triplet: Optional[ColorTriplet] = None + """A triplet of color components, if an RGB color.""" + + def __rich__(self) -> "Text": + """Displays the actual color if Rich printed.""" + from .style import Style + from .text import Text + + return Text.assemble( + f"", + ) + + def __rich_repr__(self) -> Result: + yield self.name + yield self.type + yield "number", self.number, None + yield "triplet", self.triplet, None + + @property + def system(self) -> ColorSystem: + """Get the native color system for this color.""" + if self.type == ColorType.DEFAULT: + return ColorSystem.STANDARD + return ColorSystem(int(self.type)) + + @property + def is_system_defined(self) -> bool: + """Check if the color is ultimately defined by the system.""" + return self.system not in (ColorSystem.EIGHT_BIT, ColorSystem.TRUECOLOR) + + @property + def is_default(self) -> bool: + """Check if the color is a default color.""" + return self.type == ColorType.DEFAULT + + def get_truecolor( + self, theme: Optional["TerminalTheme"] = None, foreground: bool = True + ) -> ColorTriplet: + """Get an equivalent color triplet for this color. + + Args: + theme (TerminalTheme, optional): Optional terminal theme, or None to use default. Defaults to None. + foreground (bool, optional): True for a foreground color, or False for background. Defaults to True. + + Returns: + ColorTriplet: A color triplet containing RGB components. + """ + + if theme is None: + theme = DEFAULT_TERMINAL_THEME + if self.type == ColorType.TRUECOLOR: + assert self.triplet is not None + return self.triplet + elif self.type == ColorType.EIGHT_BIT: + assert self.number is not None + return EIGHT_BIT_PALETTE[self.number] + elif self.type == ColorType.STANDARD: + assert self.number is not None + return theme.ansi_colors[self.number] + elif self.type == ColorType.WINDOWS: + assert self.number is not None + return WINDOWS_PALETTE[self.number] + else: # self.type == ColorType.DEFAULT: + assert self.number is None + return theme.foreground_color if foreground else theme.background_color + + @classmethod + def from_ansi(cls, number: int) -> "Color": + """Create a Color number from it's 8-bit ansi number. + + Args: + number (int): A number between 0-255 inclusive. + + Returns: + Color: A new Color instance. + """ + return cls( + name=f"color({number})", + type=(ColorType.STANDARD if number < 16 else ColorType.EIGHT_BIT), + number=number, + ) + + @classmethod + def from_triplet(cls, triplet: "ColorTriplet") -> "Color": + """Create a truecolor RGB color from a triplet of values. + + Args: + triplet (ColorTriplet): A color triplet containing red, green and blue components. + + Returns: + Color: A new color object. + """ + return cls(name=triplet.hex, type=ColorType.TRUECOLOR, triplet=triplet) + + @classmethod + def from_rgb(cls, red: float, green: float, blue: float) -> "Color": + """Create a truecolor from three color components in the range(0->255). + + Args: + red (float): Red component in range 0-255. + green (float): Green component in range 0-255. + blue (float): Blue component in range 0-255. + + Returns: + Color: A new color object. + """ + return cls.from_triplet(ColorTriplet(int(red), int(green), int(blue))) + + @classmethod + def default(cls) -> "Color": + """Get a Color instance representing the default color. + + Returns: + Color: Default color. + """ + return cls(name="default", type=ColorType.DEFAULT) + + @classmethod + @lru_cache(maxsize=1024) + def parse(cls, color: str) -> "Color": + """Parse a color definition.""" + original_color = color + color = color.lower().strip() + + if color == "default": + return cls(color, type=ColorType.DEFAULT) + + color_number = ANSI_COLOR_NAMES.get(color) + if color_number is not None: + return cls( + color, + type=(ColorType.STANDARD if color_number < 16 else ColorType.EIGHT_BIT), + number=color_number, + ) + + color_match = RE_COLOR.match(color) + if color_match is None: + raise ColorParseError(f"{original_color!r} is not a valid color") + + color_24, color_8, color_rgb = color_match.groups() + if color_24: + triplet = ColorTriplet( + int(color_24[0:2], 16), int(color_24[2:4], 16), int(color_24[4:6], 16) + ) + return cls(color, ColorType.TRUECOLOR, triplet=triplet) + + elif color_8: + number = int(color_8) + if number > 255: + raise ColorParseError(f"color number must be <= 255 in {color!r}") + return cls( + color, + type=(ColorType.STANDARD if number < 16 else ColorType.EIGHT_BIT), + number=number, + ) + + else: # color_rgb: + components = color_rgb.split(",") + if len(components) != 3: + raise ColorParseError( + f"expected three components in {original_color!r}" + ) + red, green, blue = components + triplet = ColorTriplet(int(red), int(green), int(blue)) + if not all(component <= 255 for component in triplet): + raise ColorParseError( + f"color components must be <= 255 in {original_color!r}" + ) + return cls(color, ColorType.TRUECOLOR, triplet=triplet) + + @lru_cache(maxsize=1024) + def get_ansi_codes(self, foreground: bool = True) -> Tuple[str, ...]: + """Get the ANSI escape codes for this color.""" + _type = self.type + if _type == ColorType.DEFAULT: + return ("39" if foreground else "49",) + + elif _type == ColorType.WINDOWS: + number = self.number + assert number is not None + fore, back = (30, 40) if number < 8 else (82, 92) + return (str(fore + number if foreground else back + number),) + + elif _type == ColorType.STANDARD: + number = self.number + assert number is not None + fore, back = (30, 40) if number < 8 else (82, 92) + return (str(fore + number if foreground else back + number),) + + elif _type == ColorType.EIGHT_BIT: + assert self.number is not None + return ("38" if foreground else "48", "5", str(self.number)) + + else: # self.standard == ColorStandard.TRUECOLOR: + assert self.triplet is not None + red, green, blue = self.triplet + return ("38" if foreground else "48", "2", str(red), str(green), str(blue)) + + @lru_cache(maxsize=1024) + def downgrade(self, system: ColorSystem) -> "Color": + """Downgrade a color system to a system with fewer colors.""" + + if self.type in (ColorType.DEFAULT, system): + return self + # Convert to 8-bit color from truecolor color + if system == ColorSystem.EIGHT_BIT and self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + _h, l, s = rgb_to_hls(*self.triplet.normalized) + # If saturation is under 15% assume it is grayscale + if s < 0.15: + gray = round(l * 25.0) + if gray == 0: + color_number = 16 + elif gray == 25: + color_number = 231 + else: + color_number = 231 + gray + return Color(self.name, ColorType.EIGHT_BIT, number=color_number) + + red, green, blue = self.triplet + six_red = red / 95 if red < 95 else 1 + (red - 95) / 40 + six_green = green / 95 if green < 95 else 1 + (green - 95) / 40 + six_blue = blue / 95 if blue < 95 else 1 + (blue - 95) / 40 + + color_number = ( + 16 + 36 * round(six_red) + 6 * round(six_green) + round(six_blue) + ) + return Color(self.name, ColorType.EIGHT_BIT, number=color_number) + + # Convert to standard from truecolor or 8-bit + elif system == ColorSystem.STANDARD: + if self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + triplet = self.triplet + else: # self.system == ColorSystem.EIGHT_BIT + assert self.number is not None + triplet = ColorTriplet(*EIGHT_BIT_PALETTE[self.number]) + + color_number = STANDARD_PALETTE.match(triplet) + return Color(self.name, ColorType.STANDARD, number=color_number) + + elif system == ColorSystem.WINDOWS: + if self.system == ColorSystem.TRUECOLOR: + assert self.triplet is not None + triplet = self.triplet + else: # self.system == ColorSystem.EIGHT_BIT + assert self.number is not None + if self.number < 16: + return Color(self.name, ColorType.WINDOWS, number=self.number) + triplet = ColorTriplet(*EIGHT_BIT_PALETTE[self.number]) + + color_number = WINDOWS_PALETTE.match(triplet) + return Color(self.name, ColorType.WINDOWS, number=color_number) + + return self + + +def parse_rgb_hex(hex_color: str) -> ColorTriplet: + """Parse six hex characters in to RGB triplet.""" + assert len(hex_color) == 6, "must be 6 characters" + color = ColorTriplet( + int(hex_color[0:2], 16), int(hex_color[2:4], 16), int(hex_color[4:6], 16) + ) + return color + + +def blend_rgb( + color1: ColorTriplet, color2: ColorTriplet, cross_fade: float = 0.5 +) -> ColorTriplet: + """Blend one RGB color in to another.""" + r1, g1, b1 = color1 + r2, g2, b2 = color2 + new_color = ColorTriplet( + int(r1 + (r2 - r1) * cross_fade), + int(g1 + (g2 - g1) * cross_fade), + int(b1 + (b2 - b1) * cross_fade), + ) + return new_color + + +if __name__ == "__main__": # pragma: no cover + from .console import Console + from .table import Table + from .text import Text + + console = Console() + + table = Table(show_footer=False, show_edge=True) + table.add_column("Color", width=10, overflow="ellipsis") + table.add_column("Number", justify="right", style="yellow") + table.add_column("Name", style="green") + table.add_column("Hex", style="blue") + table.add_column("RGB", style="magenta") + + colors = sorted((v, k) for k, v in ANSI_COLOR_NAMES.items()) + for color_number, name in colors: + if "grey" in name: + continue + color_cell = Text(" " * 10, style=f"on {name}") + if color_number < 16: + table.add_row(color_cell, f"{color_number}", Text(f'"{name}"')) + else: + color = EIGHT_BIT_PALETTE[color_number] # type: ignore[has-type] + table.add_row( + color_cell, str(color_number), Text(f'"{name}"'), color.hex, color.rgb + ) + + console.print(table) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py new file mode 100644 index 0000000..02cab32 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/color_triplet.py @@ -0,0 +1,38 @@ +from typing import NamedTuple, Tuple + + +class ColorTriplet(NamedTuple): + """The red, green, and blue components of a color.""" + + red: int + """Red component in 0 to 255 range.""" + green: int + """Green component in 0 to 255 range.""" + blue: int + """Blue component in 0 to 255 range.""" + + @property + def hex(self) -> str: + """get the color triplet in CSS style.""" + red, green, blue = self + return f"#{red:02x}{green:02x}{blue:02x}" + + @property + def rgb(self) -> str: + """The color in RGB format. + + Returns: + str: An rgb color, e.g. ``"rgb(100,23,255)"``. + """ + red, green, blue = self + return f"rgb({red},{green},{blue})" + + @property + def normalized(self) -> Tuple[float, float, float]: + """Convert components into floats between 0 and 1. + + Returns: + Tuple[float, float, float]: A tuple of three normalized colour components. + """ + red, green, blue = self + return red / 255.0, green / 255.0, blue / 255.0 diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py new file mode 100644 index 0000000..669a3a7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/columns.py @@ -0,0 +1,187 @@ +from collections import defaultdict +from itertools import chain +from operator import itemgetter +from typing import Dict, Iterable, List, Optional, Tuple + +from .align import Align, AlignMethod +from .console import Console, ConsoleOptions, RenderableType, RenderResult +from .constrain import Constrain +from .measure import Measurement +from .padding import Padding, PaddingDimensions +from .table import Table +from .text import TextType +from .jupyter import JupyterMixin + + +class Columns(JupyterMixin): + """Display renderables in neat columns. + + Args: + renderables (Iterable[RenderableType]): Any number of Rich renderables (including str). + width (int, optional): The desired width of the columns, or None to auto detect. Defaults to None. + padding (PaddingDimensions, optional): Optional padding around cells. Defaults to (0, 1). + expand (bool, optional): Expand columns to full width. Defaults to False. + equal (bool, optional): Arrange in to equal sized columns. Defaults to False. + column_first (bool, optional): Align items from top to bottom (rather than left to right). Defaults to False. + right_to_left (bool, optional): Start column from right hand side. Defaults to False. + align (str, optional): Align value ("left", "right", or "center") or None for default. Defaults to None. + title (TextType, optional): Optional title for Columns. + """ + + def __init__( + self, + renderables: Optional[Iterable[RenderableType]] = None, + padding: PaddingDimensions = (0, 1), + *, + width: Optional[int] = None, + expand: bool = False, + equal: bool = False, + column_first: bool = False, + right_to_left: bool = False, + align: Optional[AlignMethod] = None, + title: Optional[TextType] = None, + ) -> None: + self.renderables = list(renderables or []) + self.width = width + self.padding = padding + self.expand = expand + self.equal = equal + self.column_first = column_first + self.right_to_left = right_to_left + self.align: Optional[AlignMethod] = align + self.title = title + + def add_renderable(self, renderable: RenderableType) -> None: + """Add a renderable to the columns. + + Args: + renderable (RenderableType): Any renderable object. + """ + self.renderables.append(renderable) + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + render_str = console.render_str + renderables = [ + render_str(renderable) if isinstance(renderable, str) else renderable + for renderable in self.renderables + ] + if not renderables: + return + _top, right, _bottom, left = Padding.unpack(self.padding) + width_padding = max(left, right) + max_width = options.max_width + widths: Dict[int, int] = defaultdict(int) + column_count = len(renderables) + + get_measurement = Measurement.get + renderable_widths = [ + get_measurement(console, options, renderable).maximum + for renderable in renderables + ] + if self.equal: + renderable_widths = [max(renderable_widths)] * len(renderable_widths) + + def iter_renderables( + column_count: int, + ) -> Iterable[Tuple[int, Optional[RenderableType]]]: + item_count = len(renderables) + if self.column_first: + width_renderables = list(zip(renderable_widths, renderables)) + + column_lengths: List[int] = [item_count // column_count] * column_count + for col_no in range(item_count % column_count): + column_lengths[col_no] += 1 + + row_count = (item_count + column_count - 1) // column_count + cells = [[-1] * column_count for _ in range(row_count)] + row = col = 0 + for index in range(item_count): + cells[row][col] = index + column_lengths[col] -= 1 + if column_lengths[col]: + row += 1 + else: + col += 1 + row = 0 + for index in chain.from_iterable(cells): + if index == -1: + break + yield width_renderables[index] + else: + yield from zip(renderable_widths, renderables) + # Pad odd elements with spaces + if item_count % column_count: + for _ in range(column_count - (item_count % column_count)): + yield 0, None + + table = Table.grid(padding=self.padding, collapse_padding=True, pad_edge=False) + table.expand = self.expand + table.title = self.title + + if self.width is not None: + column_count = (max_width) // (self.width + width_padding) + for _ in range(column_count): + table.add_column(width=self.width) + else: + while column_count > 1: + widths.clear() + column_no = 0 + for renderable_width, _ in iter_renderables(column_count): + widths[column_no] = max(widths[column_no], renderable_width) + total_width = sum(widths.values()) + width_padding * ( + len(widths) - 1 + ) + if total_width > max_width: + column_count = len(widths) - 1 + break + else: + column_no = (column_no + 1) % column_count + else: + break + + get_renderable = itemgetter(1) + _renderables = [ + get_renderable(_renderable) + for _renderable in iter_renderables(column_count) + ] + if self.equal: + _renderables = [ + None + if renderable is None + else Constrain(renderable, renderable_widths[0]) + for renderable in _renderables + ] + if self.align: + align = self.align + _Align = Align + _renderables = [ + None if renderable is None else _Align(renderable, align) + for renderable in _renderables + ] + + right_to_left = self.right_to_left + add_row = table.add_row + for start in range(0, len(_renderables), column_count): + row = _renderables[start : start + column_count] + if right_to_left: + row = row[::-1] + add_row(*row) + yield table + + +if __name__ == "__main__": # pragma: no cover + import os + + console = Console() + + files = [f"{i} {s}" for i, s in enumerate(sorted(os.listdir()))] + columns = Columns(files, padding=(0, 1), expand=False, equal=False) + console.print(columns) + console.rule() + columns.column_first = True + console.print(columns) + columns.right_to_left = True + console.rule() + console.print(columns) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/console.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/console.py new file mode 100644 index 0000000..db2ba55 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/console.py @@ -0,0 +1,2680 @@ +import inspect +import os +import sys +import threading +import zlib +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from datetime import datetime +from functools import wraps +from getpass import getpass +from html import escape +from inspect import isclass +from itertools import islice +from math import ceil +from time import monotonic +from types import FrameType, ModuleType, TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + Literal, + Mapping, + NamedTuple, + Optional, + Protocol, + TextIO, + Tuple, + Type, + Union, + cast, + runtime_checkable, +) + +from pip._vendor.rich._null_file import NULL_FILE + +from . import errors, themes +from ._emoji_replace import _emoji_replace +from ._export_format import CONSOLE_HTML_FORMAT, CONSOLE_SVG_FORMAT +from ._fileno import get_fileno +from ._log_render import FormatTimeCallable, LogRender +from .align import Align, AlignMethod +from .color import ColorSystem, blend_rgb +from .control import Control +from .emoji import EmojiVariant +from .highlighter import NullHighlighter, ReprHighlighter +from .markup import render as render_markup +from .measure import Measurement, measure_renderables +from .pager import Pager, SystemPager +from .pretty import Pretty, is_expandable +from .protocol import rich_cast +from .region import Region +from .scope import render_scope +from .screen import Screen +from .segment import Segment +from .style import Style, StyleType +from .styled import Styled +from .terminal_theme import DEFAULT_TERMINAL_THEME, SVG_EXPORT_THEME, TerminalTheme +from .text import Text, TextType +from .theme import Theme, ThemeStack + +if TYPE_CHECKING: + from ._windows import WindowsConsoleFeatures + from .live import Live + from .status import Status + +JUPYTER_DEFAULT_COLUMNS = 115 +JUPYTER_DEFAULT_LINES = 100 +WINDOWS = sys.platform == "win32" + +HighlighterType = Callable[[Union[str, "Text"]], "Text"] +JustifyMethod = Literal["default", "left", "center", "right", "full"] +OverflowMethod = Literal["fold", "crop", "ellipsis", "ignore"] + + +class NoChange: + pass + + +NO_CHANGE = NoChange() + +try: + _STDIN_FILENO = sys.__stdin__.fileno() # type: ignore[union-attr] +except Exception: + _STDIN_FILENO = 0 +try: + _STDOUT_FILENO = sys.__stdout__.fileno() # type: ignore[union-attr] +except Exception: + _STDOUT_FILENO = 1 +try: + _STDERR_FILENO = sys.__stderr__.fileno() # type: ignore[union-attr] +except Exception: + _STDERR_FILENO = 2 + +_STD_STREAMS = (_STDIN_FILENO, _STDOUT_FILENO, _STDERR_FILENO) +_STD_STREAMS_OUTPUT = (_STDOUT_FILENO, _STDERR_FILENO) + + +_TERM_COLORS = { + "kitty": ColorSystem.EIGHT_BIT, + "256color": ColorSystem.EIGHT_BIT, + "16color": ColorSystem.STANDARD, +} + + +class ConsoleDimensions(NamedTuple): + """Size of the terminal.""" + + width: int + """The width of the console in 'cells'.""" + height: int + """The height of the console in lines.""" + + +@dataclass +class ConsoleOptions: + """Options for __rich_console__ method.""" + + size: ConsoleDimensions + """Size of console.""" + legacy_windows: bool + """legacy_windows: flag for legacy windows.""" + min_width: int + """Minimum width of renderable.""" + max_width: int + """Maximum width of renderable.""" + is_terminal: bool + """True if the target is a terminal, otherwise False.""" + encoding: str + """Encoding of terminal.""" + max_height: int + """Height of container (starts as terminal)""" + justify: Optional[JustifyMethod] = None + """Justify value override for renderable.""" + overflow: Optional[OverflowMethod] = None + """Overflow value override for renderable.""" + no_wrap: Optional[bool] = False + """Disable wrapping for text.""" + highlight: Optional[bool] = None + """Highlight override for render_str.""" + markup: Optional[bool] = None + """Enable markup when rendering strings.""" + height: Optional[int] = None + + @property + def ascii_only(self) -> bool: + """Check if renderables should use ascii only.""" + return not self.encoding.startswith("utf") + + def copy(self) -> "ConsoleOptions": + """Return a copy of the options. + + Returns: + ConsoleOptions: a copy of self. + """ + options: ConsoleOptions = ConsoleOptions.__new__(ConsoleOptions) + options.__dict__ = self.__dict__.copy() + return options + + def update( + self, + *, + width: Union[int, NoChange] = NO_CHANGE, + min_width: Union[int, NoChange] = NO_CHANGE, + max_width: Union[int, NoChange] = NO_CHANGE, + justify: Union[Optional[JustifyMethod], NoChange] = NO_CHANGE, + overflow: Union[Optional[OverflowMethod], NoChange] = NO_CHANGE, + no_wrap: Union[Optional[bool], NoChange] = NO_CHANGE, + highlight: Union[Optional[bool], NoChange] = NO_CHANGE, + markup: Union[Optional[bool], NoChange] = NO_CHANGE, + height: Union[Optional[int], NoChange] = NO_CHANGE, + ) -> "ConsoleOptions": + """Update values, return a copy.""" + options = self.copy() + if not isinstance(width, NoChange): + options.min_width = options.max_width = max(0, width) + if not isinstance(min_width, NoChange): + options.min_width = min_width + if not isinstance(max_width, NoChange): + options.max_width = max_width + if not isinstance(justify, NoChange): + options.justify = justify + if not isinstance(overflow, NoChange): + options.overflow = overflow + if not isinstance(no_wrap, NoChange): + options.no_wrap = no_wrap + if not isinstance(highlight, NoChange): + options.highlight = highlight + if not isinstance(markup, NoChange): + options.markup = markup + if not isinstance(height, NoChange): + if height is not None: + options.max_height = height + options.height = None if height is None else max(0, height) + return options + + def update_width(self, width: int) -> "ConsoleOptions": + """Update just the width, return a copy. + + Args: + width (int): New width (sets both min_width and max_width) + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.min_width = options.max_width = max(0, width) + return options + + def update_height(self, height: int) -> "ConsoleOptions": + """Update the height, and return a copy. + + Args: + height (int): New height + + Returns: + ~ConsoleOptions: New Console options instance. + """ + options = self.copy() + options.max_height = options.height = height + return options + + def reset_height(self) -> "ConsoleOptions": + """Return a copy of the options with height set to ``None``. + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.height = None + return options + + def update_dimensions(self, width: int, height: int) -> "ConsoleOptions": + """Update the width and height, and return a copy. + + Args: + width (int): New width (sets both min_width and max_width). + height (int): New height. + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.min_width = options.max_width = max(0, width) + options.height = options.max_height = height + return options + + +@runtime_checkable +class RichCast(Protocol): + """An object that may be 'cast' to a console renderable.""" + + def __rich__( + self, + ) -> Union["ConsoleRenderable", "RichCast", str]: # pragma: no cover + ... + + +@runtime_checkable +class ConsoleRenderable(Protocol): + """An object that supports the console protocol.""" + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": # pragma: no cover + ... + + +# A type that may be rendered by Console. +RenderableType = Union[ConsoleRenderable, RichCast, str] +"""A string or any object that may be rendered by Rich.""" + +# The result of calling a __rich_console__ method. +RenderResult = Iterable[Union[RenderableType, Segment]] + +_null_highlighter = NullHighlighter() + + +class CaptureError(Exception): + """An error in the Capture context manager.""" + + +class NewLine: + """A renderable to generate new line(s)""" + + def __init__(self, count: int = 1) -> None: + self.count = count + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> Iterable[Segment]: + yield Segment("\n" * self.count) + + +class ScreenUpdate: + """Render a list of lines at a given offset.""" + + def __init__(self, lines: List[List[Segment]], x: int, y: int) -> None: + self._lines = lines + self.x = x + self.y = y + + def __rich_console__( + self, console: "Console", options: ConsoleOptions + ) -> RenderResult: + x = self.x + move_to = Control.move_to + for offset, line in enumerate(self._lines, self.y): + yield move_to(x, offset) + yield from line + + +class Capture: + """Context manager to capture the result of printing to the console. + See :meth:`~rich.console.Console.capture` for how to use. + + Args: + console (Console): A console instance to capture output. + """ + + def __init__(self, console: "Console") -> None: + self._console = console + self._result: Optional[str] = None + + def __enter__(self) -> "Capture": + self._console.begin_capture() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self._result = self._console.end_capture() + + def get(self) -> str: + """Get the result of the capture.""" + if self._result is None: + raise CaptureError( + "Capture result is not available until context manager exits." + ) + return self._result + + +class ThemeContext: + """A context manager to use a temporary theme. See :meth:`~rich.console.Console.use_theme` for usage.""" + + def __init__(self, console: "Console", theme: Theme, inherit: bool = True) -> None: + self.console = console + self.theme = theme + self.inherit = inherit + + def __enter__(self) -> "ThemeContext": + self.console.push_theme(self.theme) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.console.pop_theme() + + +class PagerContext: + """A context manager that 'pages' content. See :meth:`~rich.console.Console.pager` for usage.""" + + def __init__( + self, + console: "Console", + pager: Optional[Pager] = None, + styles: bool = False, + links: bool = False, + ) -> None: + self._console = console + self.pager = SystemPager() if pager is None else pager + self.styles = styles + self.links = links + + def __enter__(self) -> "PagerContext": + self._console._enter_buffer() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if exc_type is None: + with self._console._lock: + buffer: List[Segment] = self._console._buffer[:] + del self._console._buffer[:] + segments: Iterable[Segment] = buffer + if not self.styles: + segments = Segment.strip_styles(segments) + elif not self.links: + segments = Segment.strip_links(segments) + content = self._console._render_buffer(segments) + self.pager.show(content) + self._console._exit_buffer() + + +class ScreenContext: + """A context manager that enables an alternative screen. See :meth:`~rich.console.Console.screen` for usage.""" + + def __init__( + self, console: "Console", hide_cursor: bool, style: StyleType = "" + ) -> None: + self.console = console + self.hide_cursor = hide_cursor + self.screen = Screen(style=style) + self._changed = False + + def update( + self, *renderables: RenderableType, style: Optional[StyleType] = None + ) -> None: + """Update the screen. + + Args: + renderable (RenderableType, optional): Optional renderable to replace current renderable, + or None for no change. Defaults to None. + style: (Style, optional): Replacement style, or None for no change. Defaults to None. + """ + if renderables: + self.screen.renderable = ( + Group(*renderables) if len(renderables) > 1 else renderables[0] + ) + if style is not None: + self.screen.style = style + self.console.print(self.screen, end="") + + def __enter__(self) -> "ScreenContext": + self._changed = self.console.set_alt_screen(True) + if self._changed and self.hide_cursor: + self.console.show_cursor(False) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if self._changed: + self.console.set_alt_screen(False) + if self.hide_cursor: + self.console.show_cursor(True) + + +class Group: + """Takes a group of renderables and returns a renderable object that renders the group. + + Args: + renderables (Iterable[RenderableType]): An iterable of renderable objects. + fit (bool, optional): Fit dimension of group to contents, or fill available space. Defaults to True. + """ + + def __init__(self, *renderables: "RenderableType", fit: bool = True) -> None: + self._renderables = renderables + self.fit = fit + self._render: Optional[List[RenderableType]] = None + + @property + def renderables(self) -> List["RenderableType"]: + if self._render is None: + self._render = list(self._renderables) + return self._render + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + if self.fit: + return measure_renderables(console, options, self.renderables) + else: + return Measurement(options.max_width, options.max_width) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> RenderResult: + yield from self.renderables + + +def group(fit: bool = True) -> Callable[..., Callable[..., Group]]: + """A decorator that turns an iterable of renderables in to a group. + + Args: + fit (bool, optional): Fit dimension of group to contents, or fill available space. Defaults to True. + """ + + def decorator( + method: Callable[..., Iterable[RenderableType]], + ) -> Callable[..., Group]: + """Convert a method that returns an iterable of renderables in to a Group.""" + + @wraps(method) + def _replace(*args: Any, **kwargs: Any) -> Group: + renderables = method(*args, **kwargs) + return Group(*renderables, fit=fit) + + return _replace + + return decorator + + +def _is_jupyter() -> bool: # pragma: no cover + """Check if we're running in a Jupyter notebook.""" + try: + get_ipython # type: ignore[name-defined] + except NameError: + return False + ipython = get_ipython() # type: ignore[name-defined] + shell = ipython.__class__.__name__ + if ( + "google.colab" in str(ipython.__class__) + or os.getenv("DATABRICKS_RUNTIME_VERSION") + or shell == "ZMQInteractiveShell" + ): + return True # Jupyter notebook or qtconsole + elif shell == "TerminalInteractiveShell": + return False # Terminal running IPython + else: + return False # Other type (?) + + +COLOR_SYSTEMS = { + "standard": ColorSystem.STANDARD, + "256": ColorSystem.EIGHT_BIT, + "truecolor": ColorSystem.TRUECOLOR, + "windows": ColorSystem.WINDOWS, +} + +_COLOR_SYSTEMS_NAMES = {system: name for name, system in COLOR_SYSTEMS.items()} + + +@dataclass +class ConsoleThreadLocals(threading.local): + """Thread local values for Console context.""" + + theme_stack: ThemeStack + buffer: List[Segment] = field(default_factory=list) + buffer_index: int = 0 + + +class RenderHook(ABC): + """Provides hooks in to the render process.""" + + @abstractmethod + def process_renderables( + self, renderables: List[ConsoleRenderable] + ) -> List[ConsoleRenderable]: + """Called with a list of objects to render. + + This method can return a new list of renderables, or modify and return the same list. + + Args: + renderables (List[ConsoleRenderable]): A number of renderable objects. + + Returns: + List[ConsoleRenderable]: A replacement list of renderables. + """ + + +_windows_console_features: Optional["WindowsConsoleFeatures"] = None + + +def get_windows_console_features() -> "WindowsConsoleFeatures": # pragma: no cover + global _windows_console_features + if _windows_console_features is not None: + return _windows_console_features + from ._windows import get_windows_console_features + + _windows_console_features = get_windows_console_features() + return _windows_console_features + + +def detect_legacy_windows() -> bool: + """Detect legacy Windows.""" + return WINDOWS and not get_windows_console_features().vt + + +class Console: + """A high level console interface. + + Args: + color_system (str, optional): The color system supported by your terminal, + either ``"standard"``, ``"256"`` or ``"truecolor"``. Leave as ``"auto"`` to autodetect. + force_terminal (Optional[bool], optional): Enable/disable terminal control codes, or None to auto-detect terminal. Defaults to None. + force_jupyter (Optional[bool], optional): Enable/disable Jupyter rendering, or None to auto-detect Jupyter. Defaults to None. + force_interactive (Optional[bool], optional): Enable/disable interactive mode, or None to auto detect. Defaults to None. + soft_wrap (Optional[bool], optional): Set soft wrap default on print method. Defaults to False. + theme (Theme, optional): An optional style theme object, or ``None`` for default theme. + stderr (bool, optional): Use stderr rather than stdout if ``file`` is not specified. Defaults to False. + file (IO, optional): A file object where the console should write to. Defaults to stdout. + quiet (bool, Optional): Boolean to suppress all output. Defaults to False. + width (int, optional): The width of the terminal. Leave as default to auto-detect width. + height (int, optional): The height of the terminal. Leave as default to auto-detect height. + style (StyleType, optional): Style to apply to all output, or None for no style. Defaults to None. + no_color (Optional[bool], optional): Enabled no color mode, or None to auto detect. Defaults to None. + tab_size (int, optional): Number of spaces used to replace a tab character. Defaults to 8. + record (bool, optional): Boolean to enable recording of terminal output, + required to call :meth:`export_html`, :meth:`export_svg`, and :meth:`export_text`. Defaults to False. + markup (bool, optional): Boolean to enable :ref:`console_markup`. Defaults to True. + emoji (bool, optional): Enable emoji code. Defaults to True. + emoji_variant (str, optional): Optional emoji variant, either "text" or "emoji". Defaults to None. + highlight (bool, optional): Enable automatic highlighting. Defaults to True. + log_time (bool, optional): Boolean to enable logging of time by :meth:`log` methods. Defaults to True. + log_path (bool, optional): Boolean to enable the logging of the caller by :meth:`log`. Defaults to True. + log_time_format (Union[str, TimeFormatterCallable], optional): If ``log_time`` is enabled, either string for strftime or callable that formats the time. Defaults to "[%X] ". + highlighter (HighlighterType, optional): Default highlighter. + legacy_windows (bool, optional): Enable legacy Windows mode, or ``None`` to auto detect. Defaults to ``None``. + safe_box (bool, optional): Restrict box options that don't render on legacy Windows. + get_datetime (Callable[[], datetime], optional): Callable that gets the current time as a datetime.datetime object (used by Console.log), + or None for datetime.now. + get_time (Callable[[], time], optional): Callable that gets the current time in seconds, default uses time.monotonic. + """ + + _environ: Mapping[str, str] = os.environ + + def __init__( + self, + *, + color_system: Optional[ + Literal["auto", "standard", "256", "truecolor", "windows"] + ] = "auto", + force_terminal: Optional[bool] = None, + force_jupyter: Optional[bool] = None, + force_interactive: Optional[bool] = None, + soft_wrap: bool = False, + theme: Optional[Theme] = None, + stderr: bool = False, + file: Optional[IO[str]] = None, + quiet: bool = False, + width: Optional[int] = None, + height: Optional[int] = None, + style: Optional[StyleType] = None, + no_color: Optional[bool] = None, + tab_size: int = 8, + record: bool = False, + markup: bool = True, + emoji: bool = True, + emoji_variant: Optional[EmojiVariant] = None, + highlight: bool = True, + log_time: bool = True, + log_path: bool = True, + log_time_format: Union[str, FormatTimeCallable] = "[%X]", + highlighter: Optional["HighlighterType"] = ReprHighlighter(), + legacy_windows: Optional[bool] = None, + safe_box: bool = True, + get_datetime: Optional[Callable[[], datetime]] = None, + get_time: Optional[Callable[[], float]] = None, + _environ: Optional[Mapping[str, str]] = None, + ): + # Copy of os.environ allows us to replace it for testing + if _environ is not None: + self._environ = _environ + + self.is_jupyter = _is_jupyter() if force_jupyter is None else force_jupyter + if self.is_jupyter: + if width is None: + jupyter_columns = self._environ.get("JUPYTER_COLUMNS") + if jupyter_columns is not None and jupyter_columns.isdigit(): + width = int(jupyter_columns) + else: + width = JUPYTER_DEFAULT_COLUMNS + if height is None: + jupyter_lines = self._environ.get("JUPYTER_LINES") + if jupyter_lines is not None and jupyter_lines.isdigit(): + height = int(jupyter_lines) + else: + height = JUPYTER_DEFAULT_LINES + + self.tab_size = tab_size + self.record = record + self._markup = markup + self._emoji = emoji + self._emoji_variant: Optional[EmojiVariant] = emoji_variant + self._highlight = highlight + self.legacy_windows: bool = ( + (detect_legacy_windows() and not self.is_jupyter) + if legacy_windows is None + else legacy_windows + ) + + if width is None: + columns = self._environ.get("COLUMNS") + if columns is not None and columns.isdigit(): + width = int(columns) - self.legacy_windows + if height is None: + lines = self._environ.get("LINES") + if lines is not None and lines.isdigit(): + height = int(lines) + + self.soft_wrap = soft_wrap + self._width = width + self._height = height + + self._color_system: Optional[ColorSystem] + + self._force_terminal = None + if force_terminal is not None: + self._force_terminal = force_terminal + + self._file = file + self.quiet = quiet + self.stderr = stderr + + if color_system is None: + self._color_system = None + elif color_system == "auto": + self._color_system = self._detect_color_system() + else: + self._color_system = COLOR_SYSTEMS[color_system] + + self._lock = threading.RLock() + self._log_render = LogRender( + show_time=log_time, + show_path=log_path, + time_format=log_time_format, + ) + self.highlighter: HighlighterType = highlighter or _null_highlighter + self.safe_box = safe_box + self.get_datetime = get_datetime or datetime.now + self.get_time = get_time or monotonic + self.style = style + self.no_color = ( + no_color + if no_color is not None + else self._environ.get("NO_COLOR", "") != "" + ) + if force_interactive is None: + tty_interactive = self._environ.get("TTY_INTERACTIVE", None) + if tty_interactive is not None: + if tty_interactive == "0": + force_interactive = False + elif tty_interactive == "1": + force_interactive = True + + self.is_interactive = ( + (self.is_terminal and not self.is_dumb_terminal) + if force_interactive is None + else force_interactive + ) + + self._record_buffer_lock = threading.RLock() + self._thread_locals = ConsoleThreadLocals( + theme_stack=ThemeStack(themes.DEFAULT if theme is None else theme) + ) + self._record_buffer: List[Segment] = [] + self._render_hooks: List[RenderHook] = [] + self._live_stack: List[Live] = [] + self._is_alt_screen = False + + def __repr__(self) -> str: + return f"" + + @property + def file(self) -> IO[str]: + """Get the file object to write to.""" + file = self._file or (sys.stderr if self.stderr else sys.stdout) + file = getattr(file, "rich_proxied_file", file) + if file is None: + file = NULL_FILE + return file + + @file.setter + def file(self, new_file: IO[str]) -> None: + """Set a new file object.""" + self._file = new_file + + @property + def _buffer(self) -> List[Segment]: + """Get a thread local buffer.""" + return self._thread_locals.buffer + + @property + def _buffer_index(self) -> int: + """Get a thread local buffer.""" + return self._thread_locals.buffer_index + + @_buffer_index.setter + def _buffer_index(self, value: int) -> None: + self._thread_locals.buffer_index = value + + @property + def _theme_stack(self) -> ThemeStack: + """Get the thread local theme stack.""" + return self._thread_locals.theme_stack + + def _detect_color_system(self) -> Optional[ColorSystem]: + """Detect color system from env vars.""" + if self.is_jupyter: + return ColorSystem.TRUECOLOR + if not self.is_terminal or self.is_dumb_terminal: + return None + if WINDOWS: # pragma: no cover + if self.legacy_windows: # pragma: no cover + return ColorSystem.WINDOWS + windows_console_features = get_windows_console_features() + return ( + ColorSystem.TRUECOLOR + if windows_console_features.truecolor + else ColorSystem.EIGHT_BIT + ) + else: + color_term = self._environ.get("COLORTERM", "").strip().lower() + if color_term in ("truecolor", "24bit"): + return ColorSystem.TRUECOLOR + term = self._environ.get("TERM", "").strip().lower() + _term_name, _hyphen, colors = term.rpartition("-") + color_system = _TERM_COLORS.get(colors, ColorSystem.STANDARD) + return color_system + + def _enter_buffer(self) -> None: + """Enter in to a buffer context, and buffer all output.""" + self._buffer_index += 1 + + def _exit_buffer(self) -> None: + """Leave buffer context, and render content if required.""" + self._buffer_index -= 1 + self._check_buffer() + + def set_live(self, live: "Live") -> bool: + """Set Live instance. Used by Live context manager (no need to call directly). + + Args: + live (Live): Live instance using this Console. + + Returns: + Boolean that indicates if the live is the topmost of the stack. + + Raises: + errors.LiveError: If this Console has a Live context currently active. + """ + with self._lock: + self._live_stack.append(live) + return len(self._live_stack) == 1 + + def clear_live(self) -> None: + """Clear the Live instance. Used by the Live context manager (no need to call directly).""" + with self._lock: + self._live_stack.pop() + + def push_render_hook(self, hook: RenderHook) -> None: + """Add a new render hook to the stack. + + Args: + hook (RenderHook): Render hook instance. + """ + with self._lock: + self._render_hooks.append(hook) + + def pop_render_hook(self) -> None: + """Pop the last renderhook from the stack.""" + with self._lock: + self._render_hooks.pop() + + def __enter__(self) -> "Console": + """Own context manager to enter buffer context.""" + self._enter_buffer() + return self + + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: + """Exit buffer context.""" + self._exit_buffer() + + def begin_capture(self) -> None: + """Begin capturing console output. Call :meth:`end_capture` to exit capture mode and return output.""" + self._enter_buffer() + + def end_capture(self) -> str: + """End capture mode and return captured string. + + Returns: + str: Console output. + """ + render_result = self._render_buffer(self._buffer) + del self._buffer[:] + self._exit_buffer() + return render_result + + def push_theme(self, theme: Theme, *, inherit: bool = True) -> None: + """Push a new theme on to the top of the stack, replacing the styles from the previous theme. + Generally speaking, you should call :meth:`~rich.console.Console.use_theme` to get a context manager, rather + than calling this method directly. + + Args: + theme (Theme): A theme instance. + inherit (bool, optional): Inherit existing styles. Defaults to True. + """ + self._theme_stack.push_theme(theme, inherit=inherit) + + def pop_theme(self) -> None: + """Remove theme from top of stack, restoring previous theme.""" + self._theme_stack.pop_theme() + + def use_theme(self, theme: Theme, *, inherit: bool = True) -> ThemeContext: + """Use a different theme for the duration of the context manager. + + Args: + theme (Theme): Theme instance to user. + inherit (bool, optional): Inherit existing console styles. Defaults to True. + + Returns: + ThemeContext: [description] + """ + return ThemeContext(self, theme, inherit) + + @property + def color_system(self) -> Optional[str]: + """Get color system string. + + Returns: + Optional[str]: "standard", "256" or "truecolor". + """ + + if self._color_system is not None: + return _COLOR_SYSTEMS_NAMES[self._color_system] + else: + return None + + @property + def encoding(self) -> str: + """Get the encoding of the console file, e.g. ``"utf-8"``. + + Returns: + str: A standard encoding string. + """ + return (getattr(self.file, "encoding", "utf-8") or "utf-8").lower() + + @property + def is_terminal(self) -> bool: + """Check if the console is writing to a terminal. + + Returns: + bool: True if the console writing to a device capable of + understanding escape sequences, otherwise False. + """ + # If dev has explicitly set this value, return it + if self._force_terminal is not None: + return self._force_terminal + + # Fudge for Idle + if hasattr(sys.stdin, "__module__") and sys.stdin.__module__.startswith( + "idlelib" + ): + # Return False for Idle which claims to be a tty but can't handle ansi codes + return False + + if self.is_jupyter: + # return False for Jupyter, which may have FORCE_COLOR set + return False + + environ = self._environ + + tty_compatible = environ.get("TTY_COMPATIBLE", "") + # 0 indicates device is not tty compatible + if tty_compatible == "0": + return False + # 1 indicates device is tty compatible + if tty_compatible == "1": + return True + + # https://force-color.org/ + force_color = environ.get("FORCE_COLOR") + if force_color is not None: + return force_color != "" + + # Any other value defaults to auto detect + isatty: Optional[Callable[[], bool]] = getattr(self.file, "isatty", None) + try: + return False if isatty is None else isatty() + except ValueError: + # in some situation (at the end of a pytest run for example) isatty() can raise + # ValueError: I/O operation on closed file + # return False because we aren't in a terminal anymore + return False + + @property + def is_dumb_terminal(self) -> bool: + """Detect dumb terminal. + + Returns: + bool: True if writing to a dumb terminal, otherwise False. + + """ + _term = self._environ.get("TERM", "") + is_dumb = _term.lower() in ("dumb", "unknown") + return self.is_terminal and is_dumb + + @property + def options(self) -> ConsoleOptions: + """Get default console options.""" + size = self.size + return ConsoleOptions( + max_height=size.height, + size=size, + legacy_windows=self.legacy_windows, + min_width=1, + max_width=size.width, + encoding=self.encoding, + is_terminal=self.is_terminal, + ) + + @property + def size(self) -> ConsoleDimensions: + """Get the size of the console. + + Returns: + ConsoleDimensions: A named tuple containing the dimensions. + """ + + if self._width is not None and self._height is not None: + return ConsoleDimensions(self._width - self.legacy_windows, self._height) + + if self.is_dumb_terminal: + return ConsoleDimensions(80, 25) + + width: Optional[int] = None + height: Optional[int] = None + + streams = _STD_STREAMS_OUTPUT if WINDOWS else _STD_STREAMS + for file_descriptor in streams: + try: + width, height = os.get_terminal_size(file_descriptor) + except (AttributeError, ValueError, OSError): # Probably not a terminal + pass + else: + break + + columns = self._environ.get("COLUMNS") + if columns is not None and columns.isdigit(): + width = int(columns) + lines = self._environ.get("LINES") + if lines is not None and lines.isdigit(): + height = int(lines) + + # get_terminal_size can report 0, 0 if run from pseudo-terminal + width = width or 80 + height = height or 25 + return ConsoleDimensions( + width - self.legacy_windows if self._width is None else self._width, + height if self._height is None else self._height, + ) + + @size.setter + def size(self, new_size: Tuple[int, int]) -> None: + """Set a new size for the terminal. + + Args: + new_size (Tuple[int, int]): New width and height. + """ + width, height = new_size + self._width = width + self._height = height + + @property + def width(self) -> int: + """Get the width of the console. + + Returns: + int: The width (in characters) of the console. + """ + return self.size.width + + @width.setter + def width(self, width: int) -> None: + """Set width. + + Args: + width (int): New width. + """ + self._width = width + + @property + def height(self) -> int: + """Get the height of the console. + + Returns: + int: The height (in lines) of the console. + """ + return self.size.height + + @height.setter + def height(self, height: int) -> None: + """Set height. + + Args: + height (int): new height. + """ + self._height = height + + def bell(self) -> None: + """Play a 'bell' sound (if supported by the terminal).""" + self.control(Control.bell()) + + def capture(self) -> Capture: + """A context manager to *capture* the result of print() or log() in a string, + rather than writing it to the console. + + Example: + >>> from rich.console import Console + >>> console = Console() + >>> with console.capture() as capture: + ... console.print("[bold magenta]Hello World[/]") + >>> print(capture.get()) + + Returns: + Capture: Context manager with disables writing to the terminal. + """ + capture = Capture(self) + return capture + + def pager( + self, pager: Optional[Pager] = None, styles: bool = False, links: bool = False + ) -> PagerContext: + """A context manager to display anything printed within a "pager". The pager application + is defined by the system and will typically support at least pressing a key to scroll. + + Args: + pager (Pager, optional): A pager object, or None to use :class:`~rich.pager.SystemPager`. Defaults to None. + styles (bool, optional): Show styles in pager. Defaults to False. + links (bool, optional): Show links in pager. Defaults to False. + + Example: + >>> from rich.console import Console + >>> from rich.__main__ import make_test_card + >>> console = Console() + >>> with console.pager(): + console.print(make_test_card()) + + Returns: + PagerContext: A context manager. + """ + return PagerContext(self, pager=pager, styles=styles, links=links) + + def line(self, count: int = 1) -> None: + """Write new line(s). + + Args: + count (int, optional): Number of new lines. Defaults to 1. + """ + + assert count >= 0, "count must be >= 0" + self.print(NewLine(count)) + + def clear(self, home: bool = True) -> None: + """Clear the screen. + + Args: + home (bool, optional): Also move the cursor to 'home' position. Defaults to True. + """ + if home: + self.control(Control.clear(), Control.home()) + else: + self.control(Control.clear()) + + def status( + self, + status: RenderableType, + *, + spinner: str = "dots", + spinner_style: StyleType = "status.spinner", + speed: float = 1.0, + refresh_per_second: float = 12.5, + ) -> "Status": + """Display a status and spinner. + + Args: + status (RenderableType): A status renderable (str or Text typically). + spinner (str, optional): Name of spinner animation (see python -m rich.spinner). Defaults to "dots". + spinner_style (StyleType, optional): Style of spinner. Defaults to "status.spinner". + speed (float, optional): Speed factor for spinner animation. Defaults to 1.0. + refresh_per_second (float, optional): Number of refreshes per second. Defaults to 12.5. + + Returns: + Status: A Status object that may be used as a context manager. + """ + from .status import Status + + status_renderable = Status( + status, + console=self, + spinner=spinner, + spinner_style=spinner_style, + speed=speed, + refresh_per_second=refresh_per_second, + ) + return status_renderable + + def show_cursor(self, show: bool = True) -> bool: + """Show or hide the cursor. + + Args: + show (bool, optional): Set visibility of the cursor. + """ + if self.is_terminal: + self.control(Control.show_cursor(show)) + return True + return False + + def set_alt_screen(self, enable: bool = True) -> bool: + """Enables alternative screen mode. + + Note, if you enable this mode, you should ensure that is disabled before + the application exits. See :meth:`~rich.Console.screen` for a context manager + that handles this for you. + + Args: + enable (bool, optional): Enable (True) or disable (False) alternate screen. Defaults to True. + + Returns: + bool: True if the control codes were written. + + """ + changed = False + if self.is_terminal and not self.legacy_windows: + self.control(Control.alt_screen(enable)) + changed = True + self._is_alt_screen = enable + return changed + + @property + def is_alt_screen(self) -> bool: + """Check if the alt screen was enabled. + + Returns: + bool: True if the alt screen was enabled, otherwise False. + """ + return self._is_alt_screen + + def set_window_title(self, title: str) -> bool: + """Set the title of the console terminal window. + + Warning: There is no means within Rich of "resetting" the window title to its + previous value, meaning the title you set will persist even after your application + exits. + + ``fish`` shell resets the window title before and after each command by default, + negating this issue. Windows Terminal and command prompt will also reset the title for you. + Most other shells and terminals, however, do not do this. + + Some terminals may require configuration changes before you can set the title. + Some terminals may not support setting the title at all. + + Other software (including the terminal itself, the shell, custom prompts, plugins, etc.) + may also set the terminal window title. This could result in whatever value you write + using this method being overwritten. + + Args: + title (str): The new title of the terminal window. + + Returns: + bool: True if the control code to change the terminal title was + written, otherwise False. Note that a return value of True + does not guarantee that the window title has actually changed, + since the feature may be unsupported/disabled in some terminals. + """ + if self.is_terminal: + self.control(Control.title(title)) + return True + return False + + def screen( + self, hide_cursor: bool = True, style: Optional[StyleType] = None + ) -> "ScreenContext": + """Context manager to enable and disable 'alternative screen' mode. + + Args: + hide_cursor (bool, optional): Also hide the cursor. Defaults to False. + style (Style, optional): Optional style for screen. Defaults to None. + + Returns: + ~ScreenContext: Context which enables alternate screen on enter, and disables it on exit. + """ + return ScreenContext(self, hide_cursor=hide_cursor, style=style or "") + + def measure( + self, renderable: RenderableType, *, options: Optional[ConsoleOptions] = None + ) -> Measurement: + """Measure a renderable. Returns a :class:`~rich.measure.Measurement` object which contains + information regarding the number of characters required to print the renderable. + + Args: + renderable (RenderableType): Any renderable or string. + options (Optional[ConsoleOptions], optional): Options to use when measuring, or None + to use default options. Defaults to None. + + Returns: + Measurement: A measurement of the renderable. + """ + measurement = Measurement.get(self, options or self.options, renderable) + return measurement + + def render( + self, renderable: RenderableType, options: Optional[ConsoleOptions] = None + ) -> Iterable[Segment]: + """Render an object in to an iterable of `Segment` instances. + + This method contains the logic for rendering objects with the console protocol. + You are unlikely to need to use it directly, unless you are extending the library. + + Args: + renderable (RenderableType): An object supporting the console protocol, or + an object that may be converted to a string. + options (ConsoleOptions, optional): An options object, or None to use self.options. Defaults to None. + + Returns: + Iterable[Segment]: An iterable of segments that may be rendered. + """ + + _options = options or self.options + if _options.max_width < 1: + # No space to render anything. This prevents potential recursion errors. + return + render_iterable: RenderResult + + renderable = rich_cast(renderable) + if hasattr(renderable, "__rich_console__") and not isclass(renderable): + render_iterable = renderable.__rich_console__(self, _options) + elif isinstance(renderable, str): + text_renderable = self.render_str( + renderable, highlight=_options.highlight, markup=_options.markup + ) + render_iterable = text_renderable.__rich_console__(self, _options) + else: + raise errors.NotRenderableError( + f"Unable to render {renderable!r}; " + "A str, Segment or object with __rich_console__ method is required" + ) + + try: + iter_render = iter(render_iterable) + except TypeError: + raise errors.NotRenderableError( + f"object {render_iterable!r} is not renderable" + ) + _Segment = Segment + _options = _options.reset_height() + for render_output in iter_render: + if isinstance(render_output, _Segment): + yield render_output + else: + yield from self.render(render_output, _options) + + def render_lines( + self, + renderable: RenderableType, + options: Optional[ConsoleOptions] = None, + *, + style: Optional[Style] = None, + pad: bool = True, + new_lines: bool = False, + ) -> List[List[Segment]]: + """Render objects in to a list of lines. + + The output of render_lines is useful when further formatting of rendered console text + is required, such as the Panel class which draws a border around any renderable object. + + Args: + renderable (RenderableType): Any object renderable in the console. + options (Optional[ConsoleOptions], optional): Console options, or None to use self.options. Default to ``None``. + style (Style, optional): Optional style to apply to renderables. Defaults to ``None``. + pad (bool, optional): Pad lines shorter than render width. Defaults to ``True``. + new_lines (bool, optional): Include "\n" characters at end of lines. + + Returns: + List[List[Segment]]: A list of lines, where a line is a list of Segment objects. + """ + with self._lock: + render_options = options or self.options + _rendered = self.render(renderable, render_options) + if style: + _rendered = Segment.apply_style(_rendered, style) + + render_height = render_options.height + if render_height is not None: + render_height = max(0, render_height) + + lines = list( + islice( + Segment.split_and_crop_lines( + _rendered, + render_options.max_width, + include_new_lines=new_lines, + pad=pad, + style=style, + ), + None, + render_height, + ) + ) + if render_options.height is not None: + extra_lines = render_options.height - len(lines) + if extra_lines > 0: + pad_line = [ + ( + [ + Segment(" " * render_options.max_width, style), + Segment("\n"), + ] + if new_lines + else [Segment(" " * render_options.max_width, style)] + ) + ] + lines.extend(pad_line * extra_lines) + + return lines + + def render_str( + self, + text: str, + *, + style: Union[str, Style] = "", + justify: Optional[JustifyMethod] = None, + overflow: Optional[OverflowMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + highlighter: Optional[HighlighterType] = None, + ) -> "Text": + """Convert a string to a Text instance. This is called automatically if + you print or log a string. + + Args: + text (str): Text to render. + style (Union[str, Style], optional): Style to apply to rendered text. + justify (str, optional): Justify method: "default", "left", "center", "full", or "right". Defaults to ``None``. + overflow (str, optional): Overflow method: "crop", "fold", or "ellipsis". Defaults to ``None``. + emoji (Optional[bool], optional): Enable emoji, or ``None`` to use Console default. + markup (Optional[bool], optional): Enable markup, or ``None`` to use Console default. + highlight (Optional[bool], optional): Enable highlighting, or ``None`` to use Console default. + highlighter (HighlighterType, optional): Optional highlighter to apply. + Returns: + ConsoleRenderable: Renderable object. + + """ + emoji_enabled = emoji or (emoji is None and self._emoji) + markup_enabled = markup or (markup is None and self._markup) + highlight_enabled = highlight or (highlight is None and self._highlight) + + if markup_enabled: + rich_text = render_markup( + text, + style=style, + emoji=emoji_enabled, + emoji_variant=self._emoji_variant, + ) + rich_text.justify = justify + rich_text.overflow = overflow + else: + rich_text = Text( + ( + _emoji_replace(text, default_variant=self._emoji_variant) + if emoji_enabled + else text + ), + justify=justify, + overflow=overflow, + style=style, + ) + + _highlighter = (highlighter or self.highlighter) if highlight_enabled else None + if _highlighter is not None: + highlight_text = _highlighter(str(rich_text)) + highlight_text.copy_styles(rich_text) + return highlight_text + + return rich_text + + def get_style( + self, name: Union[str, Style], *, default: Optional[Union[Style, str]] = None + ) -> Style: + """Get a Style instance by its theme name or parse a definition. + + Args: + name (str): The name of a style or a style definition. + + Returns: + Style: A Style object. + + Raises: + MissingStyle: If no style could be parsed from name. + + """ + if isinstance(name, Style): + return name + + try: + style = self._theme_stack.get(name) + if style is None: + style = Style.parse(name) + return style.copy() if style.link else style + except errors.StyleSyntaxError as error: + if default is not None: + return self.get_style(default) + raise errors.MissingStyle( + f"Failed to get style {name!r}; {error}" + ) from None + + def _collect_renderables( + self, + objects: Iterable[Any], + sep: str, + end: str, + *, + justify: Optional[JustifyMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + ) -> List[ConsoleRenderable]: + """Combine a number of renderables and text into one renderable. + + Args: + objects (Iterable[Any]): Anything that Rich can render. + sep (str): String to write between print data. + end (str): String to write at end of print data. + justify (str, optional): One of "left", "right", "center", or "full". Defaults to ``None``. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. + + Returns: + List[ConsoleRenderable]: A list of things to render. + """ + renderables: List[ConsoleRenderable] = [] + _append = renderables.append + text: List[Text] = [] + append_text = text.append + + append = _append + if justify in ("left", "center", "right"): + + def align_append(renderable: RenderableType) -> None: + _append(Align(renderable, cast(AlignMethod, justify))) + + append = align_append + + _highlighter: HighlighterType = _null_highlighter + if highlight or (highlight is None and self._highlight): + _highlighter = self.highlighter + + def check_text() -> None: + if text: + sep_text = Text(sep, justify=justify, end=end) + append(sep_text.join(text)) + text.clear() + + for renderable in objects: + renderable = rich_cast(renderable) + if isinstance(renderable, str): + append_text( + self.render_str( + renderable, + emoji=emoji, + markup=markup, + highlight=highlight, + highlighter=_highlighter, + ) + ) + elif isinstance(renderable, Text): + append_text(renderable) + elif isinstance(renderable, ConsoleRenderable): + check_text() + append(renderable) + elif is_expandable(renderable): + check_text() + append(Pretty(renderable, highlighter=_highlighter)) + else: + append_text(_highlighter(str(renderable))) + + check_text() + + if self.style is not None: + style = self.get_style(self.style) + renderables = [Styled(renderable, style) for renderable in renderables] + + return renderables + + def rule( + self, + title: TextType = "", + *, + characters: str = "─", + style: Union[str, Style] = "rule.line", + align: AlignMethod = "center", + ) -> None: + """Draw a line with optional centered title. + + Args: + title (str, optional): Text to render over the rule. Defaults to "". + characters (str, optional): Character(s) to form the line. Defaults to "─". + style (str, optional): Style of line. Defaults to "rule.line". + align (str, optional): How to align the title, one of "left", "center", or "right". Defaults to "center". + """ + from .rule import Rule + + rule = Rule(title=title, characters=characters, style=style, align=align) + self.print(rule) + + def control(self, *control: Control) -> None: + """Insert non-printing control codes. + + Args: + control_codes (str): Control codes, such as those that may move the cursor. + """ + if not self.is_dumb_terminal: + with self: + self._buffer.extend(_control.segment for _control in control) + + def out( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + highlight: Optional[bool] = None, + ) -> None: + """Output to the terminal. This is a low-level way of writing to the terminal which unlike + :meth:`~rich.console.Console.print` won't pretty print, wrap text, or apply markup, but will + optionally apply highlighting and a basic style. + + Args: + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use + console default. Defaults to ``None``. + """ + raw_output: str = sep.join(str(_object) for _object in objects) + self.print( + raw_output, + style=style, + highlight=highlight, + emoji=False, + markup=False, + no_wrap=True, + overflow="ignore", + crop=False, + end=end, + ) + + def print( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + justify: Optional[JustifyMethod] = None, + overflow: Optional[OverflowMethod] = None, + no_wrap: Optional[bool] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + width: Optional[int] = None, + height: Optional[int] = None, + crop: bool = True, + soft_wrap: Optional[bool] = None, + new_line_start: bool = False, + ) -> None: + """Print to the console. + + Args: + objects (positional args): Objects to log to the terminal. + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + justify (str, optional): Justify method: "default", "left", "right", "center", or "full". Defaults to ``None``. + overflow (str, optional): Overflow method: "ignore", "crop", "fold", or "ellipsis". Defaults to None. + no_wrap (Optional[bool], optional): Disable word wrapping. Defaults to None. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. Defaults to ``None``. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. Defaults to ``None``. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. Defaults to ``None``. + width (Optional[int], optional): Width of output, or ``None`` to auto-detect. Defaults to ``None``. + crop (Optional[bool], optional): Crop output to width of terminal. Defaults to True. + soft_wrap (bool, optional): Enable soft wrap mode which disables word wrapping and cropping of text or ``None`` for + Console default. Defaults to ``None``. + new_line_start (bool, False): Insert a new line at the start if the output contains more than one line. Defaults to ``False``. + """ + if not objects: + objects = (NewLine(),) + + if soft_wrap is None: + soft_wrap = self.soft_wrap + if soft_wrap: + if no_wrap is None: + no_wrap = True + if overflow is None: + overflow = "ignore" + crop = False + render_hooks = self._render_hooks[:] + with self: + renderables = self._collect_renderables( + objects, + sep, + end, + justify=justify, + emoji=emoji, + markup=markup, + highlight=highlight, + ) + for hook in render_hooks: + renderables = hook.process_renderables(renderables) + render_options = self.options.update( + justify=justify, + overflow=overflow, + width=min(width, self.width) if width is not None else NO_CHANGE, + height=height, + no_wrap=no_wrap, + markup=markup, + highlight=highlight, + ) + + new_segments: List[Segment] = [] + extend = new_segments.extend + render = self.render + if style is None: + for renderable in renderables: + extend(render(renderable, render_options)) + else: + for renderable in renderables: + extend( + Segment.apply_style( + render(renderable, render_options), self.get_style(style) + ) + ) + if new_line_start: + if ( + len("".join(segment.text for segment in new_segments).splitlines()) + > 1 + ): + new_segments.insert(0, Segment.line()) + if crop: + buffer_extend = self._buffer.extend + for line in Segment.split_and_crop_lines( + new_segments, self.width, pad=False + ): + buffer_extend(line) + else: + self._buffer.extend(new_segments) + + def print_json( + self, + json: Optional[str] = None, + *, + data: Any = None, + indent: Union[None, int, str] = 2, + highlight: bool = True, + skip_keys: bool = False, + ensure_ascii: bool = False, + check_circular: bool = True, + allow_nan: bool = True, + default: Optional[Callable[[Any], Any]] = None, + sort_keys: bool = False, + ) -> None: + """Pretty prints JSON. Output will be valid JSON. + + Args: + json (Optional[str]): A string containing JSON. + data (Any): If json is not supplied, then encode this data. + indent (Union[None, int, str], optional): Number of spaces to indent. Defaults to 2. + highlight (bool, optional): Enable highlighting of output: Defaults to True. + skip_keys (bool, optional): Skip keys not of a basic type. Defaults to False. + ensure_ascii (bool, optional): Escape all non-ascii characters. Defaults to False. + check_circular (bool, optional): Check for circular references. Defaults to True. + allow_nan (bool, optional): Allow NaN and Infinity values. Defaults to True. + default (Callable, optional): A callable that converts values that can not be encoded + in to something that can be JSON encoded. Defaults to None. + sort_keys (bool, optional): Sort dictionary keys. Defaults to False. + """ + from pip._vendor.rich.json import JSON + + if json is None: + json_renderable = JSON.from_data( + data, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + else: + if not isinstance(json, str): + raise TypeError( + f"json must be str. Did you mean print_json(data={json!r}) ?" + ) + json_renderable = JSON( + json, + indent=indent, + highlight=highlight, + skip_keys=skip_keys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + default=default, + sort_keys=sort_keys, + ) + self.print(json_renderable, soft_wrap=True) + + def update_screen( + self, + renderable: RenderableType, + *, + region: Optional[Region] = None, + options: Optional[ConsoleOptions] = None, + ) -> None: + """Update the screen at a given offset. + + Args: + renderable (RenderableType): A Rich renderable. + region (Region, optional): Region of screen to update, or None for entire screen. Defaults to None. + x (int, optional): x offset. Defaults to 0. + y (int, optional): y offset. Defaults to 0. + + Raises: + errors.NoAltScreen: If the Console isn't in alt screen mode. + + """ + if not self.is_alt_screen: + raise errors.NoAltScreen("Alt screen must be enabled to call update_screen") + render_options = options or self.options + if region is None: + x = y = 0 + render_options = render_options.update_dimensions( + render_options.max_width, render_options.height or self.height + ) + else: + x, y, width, height = region + render_options = render_options.update_dimensions(width, height) + + lines = self.render_lines(renderable, options=render_options) + self.update_screen_lines(lines, x, y) + + def update_screen_lines( + self, lines: List[List[Segment]], x: int = 0, y: int = 0 + ) -> None: + """Update lines of the screen at a given offset. + + Args: + lines (List[List[Segment]]): Rendered lines (as produced by :meth:`~rich.Console.render_lines`). + x (int, optional): x offset (column no). Defaults to 0. + y (int, optional): y offset (column no). Defaults to 0. + + Raises: + errors.NoAltScreen: If the Console isn't in alt screen mode. + """ + if not self.is_alt_screen: + raise errors.NoAltScreen("Alt screen must be enabled to call update_screen") + screen_update = ScreenUpdate(lines, x, y) + segments = self.render(screen_update) + self._buffer.extend(segments) + self._check_buffer() + + def print_exception( + self, + *, + width: Optional[int] = 100, + extra_lines: int = 3, + theme: Optional[str] = None, + word_wrap: bool = False, + show_locals: bool = False, + suppress: Iterable[Union[str, ModuleType]] = (), + max_frames: int = 100, + ) -> None: + """Prints a rich render of the last exception and traceback. + + Args: + width (Optional[int], optional): Number of characters used to render code. Defaults to 100. + extra_lines (int, optional): Additional lines of code to render. Defaults to 3. + theme (str, optional): Override pygments theme used in traceback + word_wrap (bool, optional): Enable word wrapping of long lines. Defaults to False. + show_locals (bool, optional): Enable display of local variables. Defaults to False. + suppress (Iterable[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback. + max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100. + """ + from .traceback import Traceback + + traceback = Traceback( + width=width, + extra_lines=extra_lines, + theme=theme, + word_wrap=word_wrap, + show_locals=show_locals, + suppress=suppress, + max_frames=max_frames, + ) + self.print(traceback) + + @staticmethod + def _caller_frame_info( + offset: int, + currentframe: Callable[[], Optional[FrameType]] = inspect.currentframe, + ) -> Tuple[str, int, Dict[str, Any]]: + """Get caller frame information. + + Args: + offset (int): the caller offset within the current frame stack. + currentframe (Callable[[], Optional[FrameType]], optional): the callable to use to + retrieve the current frame. Defaults to ``inspect.currentframe``. + + Returns: + Tuple[str, int, Dict[str, Any]]: A tuple containing the filename, the line number and + the dictionary of local variables associated with the caller frame. + + Raises: + RuntimeError: If the stack offset is invalid. + """ + # Ignore the frame of this local helper + offset += 1 + + frame = currentframe() + if frame is not None: + # Use the faster currentframe where implemented + while offset and frame is not None: + frame = frame.f_back + offset -= 1 + assert frame is not None + return frame.f_code.co_filename, frame.f_lineno, frame.f_locals + else: + # Fallback to the slower stack + frame_info = inspect.stack()[offset] + return frame_info.filename, frame_info.lineno, frame_info.frame.f_locals + + def log( + self, + *objects: Any, + sep: str = " ", + end: str = "\n", + style: Optional[Union[str, Style]] = None, + justify: Optional[JustifyMethod] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + log_locals: bool = False, + _stack_offset: int = 1, + ) -> None: + """Log rich content to the terminal. + + Args: + objects (positional args): Objects to log to the terminal. + sep (str, optional): String to write between print data. Defaults to " ". + end (str, optional): String to write at end of print data. Defaults to "\\\\n". + style (Union[str, Style], optional): A style to apply to output. Defaults to None. + justify (str, optional): One of "left", "right", "center", or "full". Defaults to ``None``. + emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. Defaults to None. + markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. Defaults to None. + highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. Defaults to None. + log_locals (bool, optional): Boolean to enable logging of locals where ``log()`` + was called. Defaults to False. + _stack_offset (int, optional): Offset of caller from end of call stack. Defaults to 1. + """ + if not objects: + objects = (NewLine(),) + + render_hooks = self._render_hooks[:] + + with self: + renderables = self._collect_renderables( + objects, + sep, + end, + justify=justify, + emoji=emoji, + markup=markup, + highlight=highlight, + ) + if style is not None: + renderables = [Styled(renderable, style) for renderable in renderables] + + filename, line_no, locals = self._caller_frame_info(_stack_offset) + link_path = None if filename.startswith("<") else os.path.abspath(filename) + path = filename.rpartition(os.sep)[-1] + if log_locals: + locals_map = { + key: value + for key, value in locals.items() + if not key.startswith("__") + } + renderables.append(render_scope(locals_map, title="[i]locals")) + + renderables = [ + self._log_render( + self, + renderables, + log_time=self.get_datetime(), + path=path, + line_no=line_no, + link_path=link_path, + ) + ] + for hook in render_hooks: + renderables = hook.process_renderables(renderables) + new_segments: List[Segment] = [] + extend = new_segments.extend + render = self.render + render_options = self.options + for renderable in renderables: + extend(render(renderable, render_options)) + buffer_extend = self._buffer.extend + for line in Segment.split_and_crop_lines( + new_segments, self.width, pad=False + ): + buffer_extend(line) + + def on_broken_pipe(self) -> None: + """This function is called when a `BrokenPipeError` is raised. + + This can occur when piping Textual output in Linux and macOS. + The default implementation is to exit the app, but you could implement + this method in a subclass to change the behavior. + + See https://docs.python.org/3/library/signal.html#note-on-sigpipe for details. + """ + self.quiet = True + devnull = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull, sys.stdout.fileno()) + raise SystemExit(1) + + def _check_buffer(self) -> None: + """Check if the buffer may be rendered. Render it if it can (e.g. Console.quiet is False) + Rendering is supported on Windows, Unix and Jupyter environments. For + legacy Windows consoles, the win32 API is called directly. + This method will also record what it renders if recording is enabled via Console.record. + """ + if self.quiet: + del self._buffer[:] + return + + try: + self._write_buffer() + except BrokenPipeError: + self.on_broken_pipe() + + def _write_buffer(self) -> None: + """Write the buffer to the output file.""" + + with self._lock: + if self.record and not self._buffer_index: + with self._record_buffer_lock: + self._record_buffer.extend(self._buffer[:]) + + if self._buffer_index == 0: + if self.is_jupyter: # pragma: no cover + from .jupyter import display + + display(self._buffer, self._render_buffer(self._buffer[:])) + del self._buffer[:] + else: + if WINDOWS: + use_legacy_windows_render = False + if self.legacy_windows: + fileno = get_fileno(self.file) + if fileno is not None: + use_legacy_windows_render = ( + fileno in _STD_STREAMS_OUTPUT + ) + + if use_legacy_windows_render: + from pip._vendor.rich._win32_console import LegacyWindowsTerm + from pip._vendor.rich._windows_renderer import legacy_windows_render + + buffer = self._buffer[:] + if self.no_color and self._color_system: + buffer = list(Segment.remove_color(buffer)) + + legacy_windows_render(buffer, LegacyWindowsTerm(self.file)) + else: + # Either a non-std stream on legacy Windows, or modern Windows. + text = self._render_buffer(self._buffer[:]) + # https://bugs.python.org/issue37871 + # https://github.com/python/cpython/issues/82052 + # We need to avoid writing more than 32Kb in a single write, due to the above bug + write = self.file.write + # Worse case scenario, every character is 4 bytes of utf-8 + MAX_WRITE = 32 * 1024 // 4 + try: + if len(text) <= MAX_WRITE: + write(text) + else: + batch: List[str] = [] + batch_append = batch.append + size = 0 + for line in text.splitlines(True): + if size + len(line) > MAX_WRITE and batch: + write("".join(batch)) + batch.clear() + size = 0 + batch_append(line) + size += len(line) + if batch: + write("".join(batch)) + batch.clear() + except UnicodeEncodeError as error: + error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" + raise + else: + text = self._render_buffer(self._buffer[:]) + try: + self.file.write(text) + except UnicodeEncodeError as error: + error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" + raise + + self.file.flush() + del self._buffer[:] + + def _render_buffer(self, buffer: Iterable[Segment]) -> str: + """Render buffered output, and clear buffer.""" + output: List[str] = [] + append = output.append + color_system = self._color_system + legacy_windows = self.legacy_windows + not_terminal = not self.is_terminal + if self.no_color and color_system: + buffer = Segment.remove_color(buffer) + for text, style, control in buffer: + if style: + append( + style.render( + text, + color_system=color_system, + legacy_windows=legacy_windows, + ) + ) + elif not (not_terminal and control): + append(text) + + rendered = "".join(output) + return rendered + + def input( + self, + prompt: TextType = "", + *, + markup: bool = True, + emoji: bool = True, + password: bool = False, + stream: Optional[TextIO] = None, + ) -> str: + """Displays a prompt and waits for input from the user. The prompt may contain color / style. + + It works in the same way as Python's builtin :func:`input` function and provides elaborate line editing and history features if Python's builtin :mod:`readline` module is previously loaded. + + Args: + prompt (Union[str, Text]): Text to render in the prompt. + markup (bool, optional): Enable console markup (requires a str prompt). Defaults to True. + emoji (bool, optional): Enable emoji (requires a str prompt). Defaults to True. + password: (bool, optional): Hide typed text. Defaults to False. + stream: (TextIO, optional): Optional file to read input from (rather than stdin). Defaults to None. + + Returns: + str: Text read from stdin. + """ + if prompt: + self.print(prompt, markup=markup, emoji=emoji, end="") + if password: + result = getpass("", stream=stream) + else: + if stream: + result = stream.readline() + else: + result = input() + return result + + def export_text(self, *, clear: bool = True, styles: bool = False) -> str: + """Generate text from console contents (requires record=True argument in constructor). + + Args: + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + styles (bool, optional): If ``True``, ansi escape codes will be included. ``False`` for plain text. + Defaults to ``False``. + + Returns: + str: String containing console contents. + + """ + assert ( + self.record + ), "To export console contents set record=True in the constructor or instance" + + with self._record_buffer_lock: + if styles: + text = "".join( + (style.render(text) if style else text) + for text, style, _ in self._record_buffer + ) + else: + text = "".join( + segment.text + for segment in self._record_buffer + if not segment.control + ) + if clear: + del self._record_buffer[:] + return text + + def save_text(self, path: str, *, clear: bool = True, styles: bool = False) -> None: + """Generate text from console and save to a given location (requires record=True argument in constructor). + + Args: + path (str): Path to write text files. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + styles (bool, optional): If ``True``, ansi style codes will be included. ``False`` for plain text. + Defaults to ``False``. + + """ + text = self.export_text(clear=clear, styles=styles) + with open(path, "w", encoding="utf-8") as write_file: + write_file.write(text) + + def export_html( + self, + *, + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: Optional[str] = None, + inline_styles: bool = False, + ) -> str: + """Generate HTML from console contents (requires record=True argument in constructor). + + Args: + theme (TerminalTheme, optional): TerminalTheme object containing console colors. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + code_format (str, optional): Format string to render HTML. In addition to '{foreground}', + '{background}', and '{code}', should contain '{stylesheet}' if inline_styles is ``False``. + inline_styles (bool, optional): If ``True`` styles will be inlined in to spans, which makes files + larger but easier to cut and paste markup. If ``False``, styles will be embedded in a style tag. + Defaults to False. + + Returns: + str: String containing console contents as HTML. + """ + assert ( + self.record + ), "To export console contents set record=True in the constructor or instance" + fragments: List[str] = [] + append = fragments.append + _theme = theme or DEFAULT_TERMINAL_THEME + stylesheet = "" + + render_code_format = CONSOLE_HTML_FORMAT if code_format is None else code_format + + with self._record_buffer_lock: + if inline_styles: + for text, style, _ in Segment.filter_control( + Segment.simplify(self._record_buffer) + ): + text = escape(text) + if style: + rule = style.get_html_style(_theme) + if style.link: + text = f'{text}' + text = f'{text}' if rule else text + append(text) + else: + styles: Dict[str, int] = {} + for text, style, _ in Segment.filter_control( + Segment.simplify(self._record_buffer) + ): + text = escape(text) + if style: + rule = style.get_html_style(_theme) + style_number = styles.setdefault(rule, len(styles) + 1) + if style.link: + text = f'{text}' + else: + text = f'{text}' + append(text) + stylesheet_rules: List[str] = [] + stylesheet_append = stylesheet_rules.append + for style_rule, style_number in styles.items(): + if style_rule: + stylesheet_append(f".r{style_number} {{{style_rule}}}") + stylesheet = "\n".join(stylesheet_rules) + + rendered_code = render_code_format.format( + code="".join(fragments), + stylesheet=stylesheet, + foreground=_theme.foreground_color.hex, + background=_theme.background_color.hex, + ) + if clear: + del self._record_buffer[:] + return rendered_code + + def save_html( + self, + path: str, + *, + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_HTML_FORMAT, + inline_styles: bool = False, + ) -> None: + """Generate HTML from console contents and write to a file (requires record=True argument in constructor). + + Args: + path (str): Path to write html file. + theme (TerminalTheme, optional): TerminalTheme object containing console colors. + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. + code_format (str, optional): Format string to render HTML. In addition to '{foreground}', + '{background}', and '{code}', should contain '{stylesheet}' if inline_styles is ``False``. + inline_styles (bool, optional): If ``True`` styles will be inlined in to spans, which makes files + larger but easier to cut and paste markup. If ``False``, styles will be embedded in a style tag. + Defaults to False. + + """ + html = self.export_html( + theme=theme, + clear=clear, + code_format=code_format, + inline_styles=inline_styles, + ) + with open(path, "w", encoding="utf-8") as write_file: + write_file.write(html) + + def export_svg( + self, + *, + title: str = "Rich", + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_SVG_FORMAT, + font_aspect_ratio: float = 0.61, + unique_id: Optional[str] = None, + ) -> str: + """ + Generate an SVG from the console contents (requires record=True in Console constructor). + + Args: + title (str, optional): The title of the tab in the output image + theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` + code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables + into the string in order to form the final SVG output. The default template used and the variables + injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format`` + string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font). + If you aren't specifying a different font inside ``code_format``, you probably don't need this. + unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node + ids). If not set, this defaults to a computed value based on the recorded content. + """ + + from pip._vendor.rich.cells import cell_len + + style_cache: Dict[Style, str] = {} + + def get_svg_style(style: Style) -> str: + """Convert a Style to CSS rules for SVG.""" + if style in style_cache: + return style_cache[style] + css_rules = [] + color = ( + _theme.foreground_color + if (style.color is None or style.color.is_default) + else style.color.get_truecolor(_theme) + ) + bgcolor = ( + _theme.background_color + if (style.bgcolor is None or style.bgcolor.is_default) + else style.bgcolor.get_truecolor(_theme) + ) + if style.reverse: + color, bgcolor = bgcolor, color + if style.dim: + color = blend_rgb(color, bgcolor, 0.4) + css_rules.append(f"fill: {color.hex}") + if style.bold: + css_rules.append("font-weight: bold") + if style.italic: + css_rules.append("font-style: italic;") + if style.underline: + css_rules.append("text-decoration: underline;") + if style.strike: + css_rules.append("text-decoration: line-through;") + + css = ";".join(css_rules) + style_cache[style] = css + return css + + _theme = theme or SVG_EXPORT_THEME + + width = self.width + char_height = 20 + char_width = char_height * font_aspect_ratio + line_height = char_height * 1.22 + + margin_top = 1 + margin_right = 1 + margin_bottom = 1 + margin_left = 1 + + padding_top = 40 + padding_right = 8 + padding_bottom = 8 + padding_left = 8 + + padding_width = padding_left + padding_right + padding_height = padding_top + padding_bottom + margin_width = margin_left + margin_right + margin_height = margin_top + margin_bottom + + text_backgrounds: List[str] = [] + text_group: List[str] = [] + classes: Dict[str, int] = {} + style_no = 1 + + def escape_text(text: str) -> str: + """HTML escape text and replace spaces with nbsp.""" + return escape(text).replace(" ", " ") + + def make_tag( + name: str, content: Optional[str] = None, **attribs: object + ) -> str: + """Make a tag from name, content, and attributes.""" + + def stringify(value: object) -> str: + if isinstance(value, (float)): + return format(value, "g") + return str(value) + + tag_attribs = " ".join( + f'{k.lstrip("_").replace("_", "-")}="{stringify(v)}"' + for k, v in attribs.items() + ) + return ( + f"<{name} {tag_attribs}>{content}" + if content + else f"<{name} {tag_attribs}/>" + ) + + with self._record_buffer_lock: + segments = list(Segment.filter_control(self._record_buffer)) + if clear: + self._record_buffer.clear() + + if unique_id is None: + unique_id = "terminal-" + str( + zlib.adler32( + ("".join(repr(segment) for segment in segments)).encode( + "utf-8", + "ignore", + ) + + title.encode("utf-8", "ignore") + ) + ) + y = 0 + for y, line in enumerate(Segment.split_and_crop_lines(segments, length=width)): + x = 0 + for text, style, _control in line: + style = style or Style() + rules = get_svg_style(style) + if rules not in classes: + classes[rules] = style_no + style_no += 1 + class_name = f"r{classes[rules]}" + + if style.reverse: + has_background = True + background = ( + _theme.foreground_color.hex + if style.color is None + else style.color.get_truecolor(_theme).hex + ) + else: + bgcolor = style.bgcolor + has_background = bgcolor is not None and not bgcolor.is_default + background = ( + _theme.background_color.hex + if style.bgcolor is None + else style.bgcolor.get_truecolor(_theme).hex + ) + + text_length = cell_len(text) + if has_background: + text_backgrounds.append( + make_tag( + "rect", + fill=background, + x=x * char_width, + y=y * line_height + 1.5, + width=char_width * text_length, + height=line_height + 0.25, + shape_rendering="crispEdges", + ) + ) + + if text != " " * len(text): + text_group.append( + make_tag( + "text", + escape_text(text), + _class=f"{unique_id}-{class_name}", + x=x * char_width, + y=y * line_height + char_height, + textLength=char_width * len(text), + clip_path=f"url(#{unique_id}-line-{y})", + ) + ) + x += cell_len(text) + + line_offsets = [line_no * line_height + 1.5 for line_no in range(y)] + lines = "\n".join( + f""" + {make_tag("rect", x=0, y=offset, width=char_width * width, height=line_height + 0.25)} + """ + for line_no, offset in enumerate(line_offsets) + ) + + styles = "\n".join( + f".{unique_id}-r{rule_no} {{ {css} }}" for css, rule_no in classes.items() + ) + backgrounds = "".join(text_backgrounds) + matrix = "".join(text_group) + + terminal_width = ceil(width * char_width + padding_width) + terminal_height = (y + 1) * line_height + padding_height + chrome = make_tag( + "rect", + fill=_theme.background_color.hex, + stroke="rgba(255,255,255,0.35)", + stroke_width="1", + x=margin_left, + y=margin_top, + width=terminal_width, + height=terminal_height, + rx=8, + ) + + title_color = _theme.foreground_color.hex + if title: + chrome += make_tag( + "text", + escape_text(title), + _class=f"{unique_id}-title", + fill=title_color, + text_anchor="middle", + x=terminal_width // 2, + y=margin_top + char_height + 6, + ) + chrome += f""" + + + + + + """ + + svg = code_format.format( + unique_id=unique_id, + char_width=char_width, + char_height=char_height, + line_height=line_height, + terminal_width=char_width * width - 1, + terminal_height=(y + 1) * line_height - 1, + width=terminal_width + margin_width, + height=terminal_height + margin_height, + terminal_x=margin_left + padding_left, + terminal_y=margin_top + padding_top, + styles=styles, + chrome=chrome, + backgrounds=backgrounds, + matrix=matrix, + lines=lines, + ) + return svg + + def save_svg( + self, + path: str, + *, + title: str = "Rich", + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_SVG_FORMAT, + font_aspect_ratio: float = 0.61, + unique_id: Optional[str] = None, + ) -> None: + """Generate an SVG file from the console contents (requires record=True in Console constructor). + + Args: + path (str): The path to write the SVG to. + title (str, optional): The title of the tab in the output image + theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` + code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables + into the string in order to form the final SVG output. The default template used and the variables + injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format`` + string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font). + If you aren't specifying a different font inside ``code_format``, you probably don't need this. + unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node + ids). If not set, this defaults to a computed value based on the recorded content. + """ + svg = self.export_svg( + title=title, + theme=theme, + clear=clear, + code_format=code_format, + font_aspect_ratio=font_aspect_ratio, + unique_id=unique_id, + ) + with open(path, "w", encoding="utf-8") as write_file: + write_file.write(svg) + + +def _svg_hash(svg_main_code: str) -> str: + """Returns a unique hash for the given SVG main code. + + Args: + svg_main_code (str): The content we're going to inject in the SVG envelope. + + Returns: + str: a hash of the given content + """ + return str(zlib.adler32(svg_main_code.encode())) + + +if __name__ == "__main__": # pragma: no cover + console = Console(record=True) + + console.log( + "JSONRPC [i]request[/i]", + 5, + 1.3, + True, + False, + None, + { + "jsonrpc": "2.0", + "method": "subtract", + "params": {"minuend": 42, "subtrahend": 23}, + "id": 3, + }, + ) + + console.log("Hello, World!", "{'a': 1}", repr(console)) + + console.print( + { + "name": None, + "empty": [], + "quiz": { + "sport": { + "answered": True, + "q1": { + "question": "Which one is correct team name in NBA?", + "options": [ + "New York Bulls", + "Los Angeles Kings", + "Golden State Warriors", + "Huston Rocket", + ], + "answer": "Huston Rocket", + }, + }, + "maths": { + "answered": False, + "q1": { + "question": "5 + 7 = ?", + "options": [10, 11, 12, 13], + "answer": 12, + }, + "q2": { + "question": "12 - 8 = ?", + "options": [1, 2, 3, 4], + "answer": 4, + }, + }, + }, + } + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py new file mode 100644 index 0000000..65fdf56 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/constrain.py @@ -0,0 +1,37 @@ +from typing import Optional, TYPE_CHECKING + +from .jupyter import JupyterMixin +from .measure import Measurement + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderableType, RenderResult + + +class Constrain(JupyterMixin): + """Constrain the width of a renderable to a given number of characters. + + Args: + renderable (RenderableType): A renderable object. + width (int, optional): The maximum width (in characters) to render. Defaults to 80. + """ + + def __init__(self, renderable: "RenderableType", width: Optional[int] = 80) -> None: + self.renderable = renderable + self.width = width + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + if self.width is None: + yield self.renderable + else: + child_options = options.update_width(min(self.width, options.max_width)) + yield from console.render(self.renderable, child_options) + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + if self.width is not None: + options = options.update_width(self.width) + measurement = Measurement.get(console, options, self.renderable) + return measurement diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py new file mode 100644 index 0000000..901ff8b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/containers.py @@ -0,0 +1,167 @@ +from itertools import zip_longest +from typing import ( + TYPE_CHECKING, + Iterable, + Iterator, + List, + Optional, + TypeVar, + Union, + overload, +) + +if TYPE_CHECKING: + from .console import ( + Console, + ConsoleOptions, + JustifyMethod, + OverflowMethod, + RenderResult, + RenderableType, + ) + from .text import Text + +from .cells import cell_len +from .measure import Measurement + +T = TypeVar("T") + + +class Renderables: + """A list subclass which renders its contents to the console.""" + + def __init__( + self, renderables: Optional[Iterable["RenderableType"]] = None + ) -> None: + self._renderables: List["RenderableType"] = ( + list(renderables) if renderables is not None else [] + ) + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + """Console render method to insert line-breaks.""" + yield from self._renderables + + def __rich_measure__( + self, console: "Console", options: "ConsoleOptions" + ) -> "Measurement": + dimensions = [ + Measurement.get(console, options, renderable) + for renderable in self._renderables + ] + if not dimensions: + return Measurement(1, 1) + _min = max(dimension.minimum for dimension in dimensions) + _max = max(dimension.maximum for dimension in dimensions) + return Measurement(_min, _max) + + def append(self, renderable: "RenderableType") -> None: + self._renderables.append(renderable) + + def __iter__(self) -> Iterable["RenderableType"]: + return iter(self._renderables) + + +class Lines: + """A list subclass which can render to the console.""" + + def __init__(self, lines: Iterable["Text"] = ()) -> None: + self._lines: List["Text"] = list(lines) + + def __repr__(self) -> str: + return f"Lines({self._lines!r})" + + def __iter__(self) -> Iterator["Text"]: + return iter(self._lines) + + @overload + def __getitem__(self, index: int) -> "Text": + ... + + @overload + def __getitem__(self, index: slice) -> List["Text"]: + ... + + def __getitem__(self, index: Union[slice, int]) -> Union["Text", List["Text"]]: + return self._lines[index] + + def __setitem__(self, index: int, value: "Text") -> "Lines": + self._lines[index] = value + return self + + def __len__(self) -> int: + return self._lines.__len__() + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + """Console render method to insert line-breaks.""" + yield from self._lines + + def append(self, line: "Text") -> None: + self._lines.append(line) + + def extend(self, lines: Iterable["Text"]) -> None: + self._lines.extend(lines) + + def pop(self, index: int = -1) -> "Text": + return self._lines.pop(index) + + def justify( + self, + console: "Console", + width: int, + justify: "JustifyMethod" = "left", + overflow: "OverflowMethod" = "fold", + ) -> None: + """Justify and overflow text to a given width. + + Args: + console (Console): Console instance. + width (int): Number of cells available per line. + justify (str, optional): Default justify method for text: "left", "center", "full" or "right". Defaults to "left". + overflow (str, optional): Default overflow for text: "crop", "fold", or "ellipsis". Defaults to "fold". + + """ + from .text import Text + + if justify == "left": + for line in self._lines: + line.truncate(width, overflow=overflow, pad=True) + elif justify == "center": + for line in self._lines: + line.rstrip() + line.truncate(width, overflow=overflow) + line.pad_left((width - cell_len(line.plain)) // 2) + line.pad_right(width - cell_len(line.plain)) + elif justify == "right": + for line in self._lines: + line.rstrip() + line.truncate(width, overflow=overflow) + line.pad_left(width - cell_len(line.plain)) + elif justify == "full": + for line_index, line in enumerate(self._lines): + if line_index == len(self._lines) - 1: + break + words = line.split(" ") + words_size = sum(cell_len(word.plain) for word in words) + num_spaces = len(words) - 1 + spaces = [1 for _ in range(num_spaces)] + index = 0 + if spaces: + while words_size + num_spaces < width: + spaces[len(spaces) - index - 1] += 1 + num_spaces += 1 + index = (index + 1) % len(spaces) + tokens: List[Text] = [] + for index, (word, next_word) in enumerate( + zip_longest(words, words[1:]) + ): + tokens.append(word) + if index < len(spaces): + style = word.get_style_at_offset(console, -1) + next_style = next_word.get_style_at_offset(console, 0) + space_style = style if style == next_style else line.style + tokens.append(Text(" " * spaces[index], style=space_style)) + self[line_index] = Text("").join(tokens) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/control.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/control.py new file mode 100644 index 0000000..84963e9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/control.py @@ -0,0 +1,219 @@ +import time +from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Union, Final + +from .segment import ControlCode, ControlType, Segment + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderResult + +STRIP_CONTROL_CODES: Final = [ + 7, # Bell + 8, # Backspace + 11, # Vertical tab + 12, # Form feed + 13, # Carriage return +] +_CONTROL_STRIP_TRANSLATE: Final = { + _codepoint: None for _codepoint in STRIP_CONTROL_CODES +} + +CONTROL_ESCAPE: Final = { + 7: "\\a", + 8: "\\b", + 11: "\\v", + 12: "\\f", + 13: "\\r", +} + +CONTROL_CODES_FORMAT: Dict[int, Callable[..., str]] = { + ControlType.BELL: lambda: "\x07", + ControlType.CARRIAGE_RETURN: lambda: "\r", + ControlType.HOME: lambda: "\x1b[H", + ControlType.CLEAR: lambda: "\x1b[2J", + ControlType.ENABLE_ALT_SCREEN: lambda: "\x1b[?1049h", + ControlType.DISABLE_ALT_SCREEN: lambda: "\x1b[?1049l", + ControlType.SHOW_CURSOR: lambda: "\x1b[?25h", + ControlType.HIDE_CURSOR: lambda: "\x1b[?25l", + ControlType.CURSOR_UP: lambda param: f"\x1b[{param}A", + ControlType.CURSOR_DOWN: lambda param: f"\x1b[{param}B", + ControlType.CURSOR_FORWARD: lambda param: f"\x1b[{param}C", + ControlType.CURSOR_BACKWARD: lambda param: f"\x1b[{param}D", + ControlType.CURSOR_MOVE_TO_COLUMN: lambda param: f"\x1b[{param+1}G", + ControlType.ERASE_IN_LINE: lambda param: f"\x1b[{param}K", + ControlType.CURSOR_MOVE_TO: lambda x, y: f"\x1b[{y+1};{x+1}H", + ControlType.SET_WINDOW_TITLE: lambda title: f"\x1b]0;{title}\x07", +} + + +class Control: + """A renderable that inserts a control code (non printable but may move cursor). + + Args: + *codes (str): Positional arguments are either a :class:`~rich.segment.ControlType` enum or a + tuple of ControlType and an integer parameter + """ + + __slots__ = ["segment"] + + def __init__(self, *codes: Union[ControlType, ControlCode]) -> None: + control_codes: List[ControlCode] = [ + (code,) if isinstance(code, ControlType) else code for code in codes + ] + _format_map = CONTROL_CODES_FORMAT + rendered_codes = "".join( + _format_map[code](*parameters) for code, *parameters in control_codes + ) + self.segment = Segment(rendered_codes, None, control_codes) + + @classmethod + def bell(cls) -> "Control": + """Ring the 'bell'.""" + return cls(ControlType.BELL) + + @classmethod + def home(cls) -> "Control": + """Move cursor to 'home' position.""" + return cls(ControlType.HOME) + + @classmethod + def move(cls, x: int = 0, y: int = 0) -> "Control": + """Move cursor relative to current position. + + Args: + x (int): X offset. + y (int): Y offset. + + Returns: + ~Control: Control object. + + """ + + def get_codes() -> Iterable[ControlCode]: + control = ControlType + if x: + yield ( + control.CURSOR_FORWARD if x > 0 else control.CURSOR_BACKWARD, + abs(x), + ) + if y: + yield ( + control.CURSOR_DOWN if y > 0 else control.CURSOR_UP, + abs(y), + ) + + control = cls(*get_codes()) + return control + + @classmethod + def move_to_column(cls, x: int, y: int = 0) -> "Control": + """Move to the given column, optionally add offset to row. + + Returns: + x (int): absolute x (column) + y (int): optional y offset (row) + + Returns: + ~Control: Control object. + """ + + return ( + cls( + (ControlType.CURSOR_MOVE_TO_COLUMN, x), + ( + ControlType.CURSOR_DOWN if y > 0 else ControlType.CURSOR_UP, + abs(y), + ), + ) + if y + else cls((ControlType.CURSOR_MOVE_TO_COLUMN, x)) + ) + + @classmethod + def move_to(cls, x: int, y: int) -> "Control": + """Move cursor to absolute position. + + Args: + x (int): x offset (column) + y (int): y offset (row) + + Returns: + ~Control: Control object. + """ + return cls((ControlType.CURSOR_MOVE_TO, x, y)) + + @classmethod + def clear(cls) -> "Control": + """Clear the screen.""" + return cls(ControlType.CLEAR) + + @classmethod + def show_cursor(cls, show: bool) -> "Control": + """Show or hide the cursor.""" + return cls(ControlType.SHOW_CURSOR if show else ControlType.HIDE_CURSOR) + + @classmethod + def alt_screen(cls, enable: bool) -> "Control": + """Enable or disable alt screen.""" + if enable: + return cls(ControlType.ENABLE_ALT_SCREEN, ControlType.HOME) + else: + return cls(ControlType.DISABLE_ALT_SCREEN) + + @classmethod + def title(cls, title: str) -> "Control": + """Set the terminal window title + + Args: + title (str): The new terminal window title + """ + return cls((ControlType.SET_WINDOW_TITLE, title)) + + def __str__(self) -> str: + return self.segment.text + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + if self.segment.text: + yield self.segment + + +def strip_control_codes( + text: str, _translate_table: Dict[int, None] = _CONTROL_STRIP_TRANSLATE +) -> str: + """Remove control codes from text. + + Args: + text (str): A string possibly contain control codes. + + Returns: + str: String with control codes removed. + """ + return text.translate(_translate_table) + + +def escape_control_codes( + text: str, + _translate_table: Dict[int, str] = CONTROL_ESCAPE, +) -> str: + """Replace control codes with their "escaped" equivalent in the given text. + (e.g. "\b" becomes "\\b") + + Args: + text (str): A string possibly containing control codes. + + Returns: + str: String with control codes replaced with their escaped version. + """ + return text.translate(_translate_table) + + +if __name__ == "__main__": # pragma: no cover + from pip._vendor.rich.console import Console + + console = Console() + console.print("Look at the title of your terminal window ^") + # console.print(Control((ControlType.SET_WINDOW_TITLE, "Hello, world!"))) + for i in range(10): + console.set_window_title("🚀 Loading" + "." * i) + time.sleep(0.5) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py new file mode 100644 index 0000000..61797bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/default_styles.py @@ -0,0 +1,193 @@ +from typing import Dict + +from .style import Style + +DEFAULT_STYLES: Dict[str, Style] = { + "none": Style.null(), + "reset": Style( + color="default", + bgcolor="default", + dim=False, + bold=False, + italic=False, + underline=False, + blink=False, + blink2=False, + reverse=False, + conceal=False, + strike=False, + ), + "dim": Style(dim=True), + "bright": Style(dim=False), + "bold": Style(bold=True), + "strong": Style(bold=True), + "code": Style(reverse=True, bold=True), + "italic": Style(italic=True), + "emphasize": Style(italic=True), + "underline": Style(underline=True), + "blink": Style(blink=True), + "blink2": Style(blink2=True), + "reverse": Style(reverse=True), + "strike": Style(strike=True), + "black": Style(color="black"), + "red": Style(color="red"), + "green": Style(color="green"), + "yellow": Style(color="yellow"), + "magenta": Style(color="magenta"), + "cyan": Style(color="cyan"), + "white": Style(color="white"), + "inspect.attr": Style(color="yellow", italic=True), + "inspect.attr.dunder": Style(color="yellow", italic=True, dim=True), + "inspect.callable": Style(bold=True, color="red"), + "inspect.async_def": Style(italic=True, color="bright_cyan"), + "inspect.def": Style(italic=True, color="bright_cyan"), + "inspect.class": Style(italic=True, color="bright_cyan"), + "inspect.error": Style(bold=True, color="red"), + "inspect.equals": Style(), + "inspect.help": Style(color="cyan"), + "inspect.doc": Style(dim=True), + "inspect.value.border": Style(color="green"), + "live.ellipsis": Style(bold=True, color="red"), + "layout.tree.row": Style(dim=False, color="red"), + "layout.tree.column": Style(dim=False, color="blue"), + "logging.keyword": Style(bold=True, color="yellow"), + "logging.level.notset": Style(dim=True), + "logging.level.debug": Style(color="green"), + "logging.level.info": Style(color="blue"), + "logging.level.warning": Style(color="yellow"), + "logging.level.error": Style(color="red", bold=True), + "logging.level.critical": Style(color="red", bold=True, reverse=True), + "log.level": Style.null(), + "log.time": Style(color="cyan", dim=True), + "log.message": Style.null(), + "log.path": Style(dim=True), + "repr.ellipsis": Style(color="yellow"), + "repr.indent": Style(color="green", dim=True), + "repr.error": Style(color="red", bold=True), + "repr.str": Style(color="green", italic=False, bold=False), + "repr.brace": Style(bold=True), + "repr.comma": Style(bold=True), + "repr.ipv4": Style(bold=True, color="bright_green"), + "repr.ipv6": Style(bold=True, color="bright_green"), + "repr.eui48": Style(bold=True, color="bright_green"), + "repr.eui64": Style(bold=True, color="bright_green"), + "repr.tag_start": Style(bold=True), + "repr.tag_name": Style(color="bright_magenta", bold=True), + "repr.tag_contents": Style(color="default"), + "repr.tag_end": Style(bold=True), + "repr.attrib_name": Style(color="yellow", italic=False), + "repr.attrib_equal": Style(bold=True), + "repr.attrib_value": Style(color="magenta", italic=False), + "repr.number": Style(color="cyan", bold=True, italic=False), + "repr.number_complex": Style(color="cyan", bold=True, italic=False), # same + "repr.bool_true": Style(color="bright_green", italic=True), + "repr.bool_false": Style(color="bright_red", italic=True), + "repr.none": Style(color="magenta", italic=True), + "repr.url": Style(underline=True, color="bright_blue", italic=False, bold=False), + "repr.uuid": Style(color="bright_yellow", bold=False), + "repr.call": Style(color="magenta", bold=True), + "repr.path": Style(color="magenta"), + "repr.filename": Style(color="bright_magenta"), + "rule.line": Style(color="bright_green"), + "rule.text": Style.null(), + "json.brace": Style(bold=True), + "json.bool_true": Style(color="bright_green", italic=True), + "json.bool_false": Style(color="bright_red", italic=True), + "json.null": Style(color="magenta", italic=True), + "json.number": Style(color="cyan", bold=True, italic=False), + "json.str": Style(color="green", italic=False, bold=False), + "json.key": Style(color="blue", bold=True), + "prompt": Style.null(), + "prompt.choices": Style(color="magenta", bold=True), + "prompt.default": Style(color="cyan", bold=True), + "prompt.invalid": Style(color="red"), + "prompt.invalid.choice": Style(color="red"), + "pretty": Style.null(), + "scope.border": Style(color="blue"), + "scope.key": Style(color="yellow", italic=True), + "scope.key.special": Style(color="yellow", italic=True, dim=True), + "scope.equals": Style(color="red"), + "table.header": Style(bold=True), + "table.footer": Style(bold=True), + "table.cell": Style.null(), + "table.title": Style(italic=True), + "table.caption": Style(italic=True, dim=True), + "traceback.error": Style(color="red", italic=True), + "traceback.border.syntax_error": Style(color="bright_red"), + "traceback.border": Style(color="red"), + "traceback.text": Style.null(), + "traceback.title": Style(color="red", bold=True), + "traceback.exc_type": Style(color="bright_red", bold=True), + "traceback.exc_value": Style.null(), + "traceback.offset": Style(color="bright_red", bold=True), + "traceback.error_range": Style(underline=True, bold=True), + "traceback.note": Style(color="green", bold=True), + "traceback.group.border": Style(color="magenta"), + "bar.back": Style(color="grey23"), + "bar.complete": Style(color="rgb(249,38,114)"), + "bar.finished": Style(color="rgb(114,156,31)"), + "bar.pulse": Style(color="rgb(249,38,114)"), + "progress.description": Style.null(), + "progress.filesize": Style(color="green"), + "progress.filesize.total": Style(color="green"), + "progress.download": Style(color="green"), + "progress.elapsed": Style(color="yellow"), + "progress.percentage": Style(color="magenta"), + "progress.remaining": Style(color="cyan"), + "progress.data.speed": Style(color="red"), + "progress.spinner": Style(color="green"), + "status.spinner": Style(color="green"), + "tree": Style(), + "tree.line": Style(), + "markdown.paragraph": Style(), + "markdown.text": Style(), + "markdown.em": Style(italic=True), + "markdown.emph": Style(italic=True), # For commonmark backwards compatibility + "markdown.strong": Style(bold=True), + "markdown.code": Style(bold=True, color="cyan", bgcolor="black"), + "markdown.code_block": Style(color="cyan", bgcolor="black"), + "markdown.block_quote": Style(color="magenta"), + "markdown.list": Style(color="cyan"), + "markdown.item": Style(), + "markdown.item.bullet": Style(color="yellow", bold=True), + "markdown.item.number": Style(color="yellow", bold=True), + "markdown.hr": Style(color="yellow"), + "markdown.h1.border": Style(), + "markdown.h1": Style(bold=True), + "markdown.h2": Style(bold=True, underline=True), + "markdown.h3": Style(bold=True), + "markdown.h4": Style(bold=True, dim=True), + "markdown.h5": Style(underline=True), + "markdown.h6": Style(italic=True), + "markdown.h7": Style(italic=True, dim=True), + "markdown.link": Style(color="bright_blue"), + "markdown.link_url": Style(color="blue", underline=True), + "markdown.s": Style(strike=True), + "iso8601.date": Style(color="blue"), + "iso8601.time": Style(color="magenta"), + "iso8601.timezone": Style(color="yellow"), +} + + +if __name__ == "__main__": # pragma: no cover + import argparse + import io + + from pip._vendor.rich.console import Console + from pip._vendor.rich.table import Table + from pip._vendor.rich.text import Text + + parser = argparse.ArgumentParser() + parser.add_argument("--html", action="store_true", help="Export as HTML table") + args = parser.parse_args() + html: bool = args.html + console = Console(record=True, width=70, file=io.StringIO()) if html else Console() + + table = Table("Name", "Styling") + + for style_name, style in DEFAULT_STYLES.items(): + table.add_row(Text(style_name, style=style), str(style)) + + console.print(table) + if html: + print(console.export_html(inline_styles=True)) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py new file mode 100644 index 0000000..92893b3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/diagnose.py @@ -0,0 +1,39 @@ +import os +import platform + +from pip._vendor.rich import inspect +from pip._vendor.rich.console import Console, get_windows_console_features +from pip._vendor.rich.panel import Panel +from pip._vendor.rich.pretty import Pretty + + +def report() -> None: # pragma: no cover + """Print a report to the terminal with debugging information""" + console = Console() + inspect(console) + features = get_windows_console_features() + inspect(features) + + env_names = ( + "CLICOLOR", + "COLORTERM", + "COLUMNS", + "JPY_PARENT_PID", + "JUPYTER_COLUMNS", + "JUPYTER_LINES", + "LINES", + "NO_COLOR", + "TERM_PROGRAM", + "TERM", + "TTY_COMPATIBLE", + "TTY_INTERACTIVE", + "VSCODE_VERBOSE_LOGGING", + ) + env = {name: os.getenv(name) for name in env_names} + console.print(Panel.fit((Pretty(env)), title="[b]Environment Variables")) + + console.print(f'platform="{platform.system()}"') + + +if __name__ == "__main__": # pragma: no cover + report() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py new file mode 100644 index 0000000..4a667ed --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/emoji.py @@ -0,0 +1,91 @@ +import sys +from typing import TYPE_CHECKING, Optional, Union, Literal + +from .jupyter import JupyterMixin +from .segment import Segment +from .style import Style +from ._emoji_codes import EMOJI +from ._emoji_replace import _emoji_replace + + +if TYPE_CHECKING: + from .console import Console, ConsoleOptions, RenderResult + + +EmojiVariant = Literal["emoji", "text"] + + +class NoEmoji(Exception): + """No emoji by that name.""" + + +class Emoji(JupyterMixin): + __slots__ = ["name", "style", "_char", "variant"] + + VARIANTS = {"text": "\uFE0E", "emoji": "\uFE0F"} + + def __init__( + self, + name: str, + style: Union[str, Style] = "none", + variant: Optional[EmojiVariant] = None, + ) -> None: + """A single emoji character. + + Args: + name (str): Name of emoji. + style (Union[str, Style], optional): Optional style. Defaults to None. + + Raises: + NoEmoji: If the emoji doesn't exist. + """ + self.name = name + self.style = style + self.variant = variant + try: + self._char = EMOJI[name] + except KeyError: + raise NoEmoji(f"No emoji called {name!r}") + if variant is not None: + self._char += self.VARIANTS.get(variant, "") + + @classmethod + def replace(cls, text: str) -> str: + """Replace emoji markup with corresponding unicode characters. + + Args: + text (str): A string with emojis codes, e.g. "Hello :smiley:!" + + Returns: + str: A string with emoji codes replaces with actual emoji. + """ + return _emoji_replace(text) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return self._char + + def __rich_console__( + self, console: "Console", options: "ConsoleOptions" + ) -> "RenderResult": + yield Segment(self._char, console.get_style(self.style)) + + +if __name__ == "__main__": # pragma: no cover + import sys + + from pip._vendor.rich.columns import Columns + from pip._vendor.rich.console import Console + + console = Console(record=True) + + columns = Columns( + (f":{name}: {name}" for name in sorted(EMOJI.keys()) if "\u200D" not in name), + column_first=True, + ) + + console.print(columns) + if len(sys.argv) > 1: + console.save_html(sys.argv[1]) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py new file mode 100644 index 0000000..0bcbe53 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/errors.py @@ -0,0 +1,34 @@ +class ConsoleError(Exception): + """An error in console operation.""" + + +class StyleError(Exception): + """An error in styles.""" + + +class StyleSyntaxError(ConsoleError): + """Style was badly formatted.""" + + +class MissingStyle(StyleError): + """No such style.""" + + +class StyleStackError(ConsoleError): + """Style stack is invalid.""" + + +class NotRenderableError(ConsoleError): + """Object is not renderable.""" + + +class MarkupError(ConsoleError): + """Markup was badly formatted.""" + + +class LiveError(ConsoleError): + """Error related to Live display.""" + + +class NoAltScreen(ConsoleError): + """Alt screen mode was required.""" diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py new file mode 100644 index 0000000..4b0b0da --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/file_proxy.py @@ -0,0 +1,57 @@ +import io +from typing import IO, TYPE_CHECKING, Any, List + +from .ansi import AnsiDecoder +from .text import Text + +if TYPE_CHECKING: + from .console import Console + + +class FileProxy(io.TextIOBase): + """Wraps a file (e.g. sys.stdout) and redirects writes to a console.""" + + def __init__(self, console: "Console", file: IO[str]) -> None: + self.__console = console + self.__file = file + self.__buffer: List[str] = [] + self.__ansi_decoder = AnsiDecoder() + + @property + def rich_proxied_file(self) -> IO[str]: + """Get proxied file.""" + return self.__file + + def __getattr__(self, name: str) -> Any: + return getattr(self.__file, name) + + def write(self, text: str) -> int: + if not isinstance(text, str): + raise TypeError(f"write() argument must be str, not {type(text).__name__}") + buffer = self.__buffer + lines: List[str] = [] + while text: + line, new_line, text = text.partition("\n") + if new_line: + lines.append("".join(buffer) + line) + buffer.clear() + else: + buffer.append(line) + break + if lines: + console = self.__console + with console: + output = Text("\n").join( + self.__ansi_decoder.decode_line(line) for line in lines + ) + console.print(output) + return len(text) + + def flush(self) -> None: + output = "".join(self.__buffer) + if output: + self.__console.print(output) + del self.__buffer[:] + + def fileno(self) -> int: + return self.__file.fileno() diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py new file mode 100644 index 0000000..83bc911 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/filesize.py @@ -0,0 +1,88 @@ +"""Functions for reporting filesizes. Borrowed from https://github.com/PyFilesystem/pyfilesystem2 + +The functions declared in this module should cover the different +use cases needed to generate a string representation of a file size +using several different units. Since there are many standards regarding +file size units, three different functions have been implemented. + +See Also: + * `Wikipedia: Binary prefix `_ + +""" + +__all__ = ["decimal"] + +from typing import Iterable, List, Optional, Tuple + + +def _to_str( + size: int, + suffixes: Iterable[str], + base: int, + *, + precision: Optional[int] = 1, + separator: Optional[str] = " ", +) -> str: + if size == 1: + return "1 byte" + elif size < base: + return f"{size:,} bytes" + + for i, suffix in enumerate(suffixes, 2): # noqa: B007 + unit = base**i + if size < unit: + break + return "{:,.{precision}f}{separator}{}".format( + (base * size / unit), + suffix, + precision=precision, + separator=separator, + ) + + +def pick_unit_and_suffix(size: int, suffixes: List[str], base: int) -> Tuple[int, str]: + """Pick a suffix and base for the given size.""" + for i, suffix in enumerate(suffixes): + unit = base**i + if size < unit * base: + break + return unit, suffix + + +def decimal( + size: int, + *, + precision: Optional[int] = 1, + separator: Optional[str] = " ", +) -> str: + """Convert a filesize in to a string (powers of 1000, SI prefixes). + + In this convention, ``1000 B = 1 kB``. + + This is typically the format used to advertise the storage + capacity of USB flash drives and the like (*256 MB* meaning + actually a storage capacity of more than *256 000 000 B*), + or used by **Mac OS X** since v10.6 to report file sizes. + + Arguments: + int (size): A file size. + int (precision): The number of decimal places to include (default = 1). + str (separator): The string to separate the value from the units (default = " "). + + Returns: + `str`: A string containing a abbreviated file size and units. + + Example: + >>> filesize.decimal(30000) + '30.0 kB' + >>> filesize.decimal(30000, precision=2, separator="") + '30.00kB' + + """ + return _to_str( + size, + ("kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"), + 1000, + precision=precision, + separator=separator, + ) diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py b/venv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py new file mode 100644 index 0000000..e4c462e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pip/_vendor/rich/highlighter.py @@ -0,0 +1,232 @@ +import re +from abc import ABC, abstractmethod +from typing import List, Union + +from .text import Span, Text + + +def _combine_regex(*regexes: str) -> str: + """Combine a number of regexes in to a single regex. + + Returns: + str: New regex with all regexes ORed together. + """ + return "|".join(regexes) + + +class Highlighter(ABC): + """Abstract base class for highlighters.""" + + def __call__(self, text: Union[str, Text]) -> Text: + """Highlight a str or Text instance. + + Args: + text (Union[str, ~Text]): Text to highlight. + + Raises: + TypeError: If not called with text or str. + + Returns: + Text: A test instance with highlighting applied. + """ + if isinstance(text, str): + highlight_text = Text(text) + elif isinstance(text, Text): + highlight_text = text.copy() + else: + raise TypeError(f"str or Text instance required, not {text!r}") + self.highlight(highlight_text) + return highlight_text + + @abstractmethod + def highlight(self, text: Text) -> None: + """Apply highlighting in place to text. + + Args: + text (~Text): A text object highlight. + """ + + +class NullHighlighter(Highlighter): + """A highlighter object that doesn't highlight. + + May be used to disable highlighting entirely. + + """ + + def highlight(self, text: Text) -> None: + """Nothing to do""" + + +class RegexHighlighter(Highlighter): + """Applies highlighting from a list of regular expressions.""" + + highlights: List[str] = [] + base_style: str = "" + + def highlight(self, text: Text) -> None: + """Highlight :class:`rich.text.Text` using regular expressions. + + Args: + text (~Text): Text to highlighted. + + """ + + highlight_regex = text.highlight_regex + for re_highlight in self.highlights: + highlight_regex(re_highlight, style_prefix=self.base_style) + + +class ReprHighlighter(RegexHighlighter): + """Highlights the text typically produced from ``__repr__`` methods.""" + + base_style = "repr." + highlights = [ + r"(?P<)(?P[-\w.:|]*)(?P[\w\W]*)(?P>)", + r'(?P[\w_]{1,50})=(?P"?[\w_]+"?)?', + r"(?P[][{}()])", + _combine_regex( + r"(?P[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})", + r"(?P([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4})", + r"(?P(?:[0-9A-Fa-f]{1,2}-){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){3}[0-9A-Fa-f]{4})", + r"(?P(?:[0-9A-Fa-f]{1,2}-){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){2}[0-9A-Fa-f]{4})", + r"(?P[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})", + r"(?P[\w.]*?)\(", + r"\b(?PTrue)\b|\b(?PFalse)\b|\b(?PNone)\b", + r"(?P\.\.\.)", + r"(?P(?(?\B(/[-\w._+]+)*\/)(?P[-\w._+]*)?", + r"(?b?'''.*?(?(file|https|http|ws|wss)://[-0-9a-zA-Z$_+!`(),.?/;:&=%#~@]*)", + ), + ] + + +class JSONHighlighter(RegexHighlighter): + """Highlights JSON""" + + # Captures the start and end of JSON strings, handling escaped quotes + JSON_STR = r"(?b?\".*?(?[\{\[\(\)\]\}])", + r"\b(?Ptrue)\b|\b(?Pfalse)\b|\b(?Pnull)\b", + r"(?P(? None: + super().highlight(text) + + # Additional work to handle highlighting JSON keys + plain = text.plain + append = text.spans.append + whitespace = self.JSON_WHITESPACE + for match in re.finditer(self.JSON_STR, plain): + start, end = match.span() + cursor = end + while cursor < len(plain): + char = plain[cursor] + cursor += 1 + if char == ":": + append(Span(start, end, "json.key")) + elif char in whitespace: + continue + break + + +class ISO8601Highlighter(RegexHighlighter): + """Highlights the ISO8601 date time strings. + Regex reference: https://www.oreilly.com/library/view/regular-expressions-cookbook/9781449327453/ch04s07.html + """ + + base_style = "iso8601." + highlights = [ + # + # Dates + # + # Calendar month (e.g. 2008-08). The hyphen is required + r"^(?P[0-9]{4})-(?P1[0-2]|0[1-9])$", + # Calendar date w/o hyphens (e.g. 20080830) + r"^(?P(?P[0-9]{4})(?P1[0-2]|0[1-9])(?P3[01]|0[1-9]|[12][0-9]))$", + # Ordinal date (e.g. 2008-243). The hyphen is optional + r"^(?P(?P[0-9]{4})-?(?P36[0-6]|3[0-5][0-9]|[12][0-9]{2}|0[1-9][0-9]|00[1-9]))$", + # + # Weeks + # + # Week of the year (e.g., 2008-W35). The hyphen is optional + r"^(?P(?P[0-9]{4})-?W(?P5[0-3]|[1-4][0-9]|0[1-9]))$", + # Week date (e.g., 2008-W35-6). The hyphens are optional + r"^(?P(?P[0-9]{4})-?W(?P5[0-3]|[1-4][0-9]|0[1-9])-?(?P[1-7]))$", + # + # Times + # + # Hours and minutes (e.g., 17:21). The colon is optional + r"^(?P

    ' : '\U0001d4ab', + '\\' : '\U0001d4ac', + '\\' : '\U0000211b', + '\\' : '\U0001d4ae', + '\\' : '\U0001d4af', + '\\' : '\U0001d4b0', + '\\' : '\U0001d4b1', + '\\' : '\U0001d4b2', + '\\' : '\U0001d4b3', + '\\' : '\U0001d4b4', + '\\' : '\U0001d4b5', + '\\' : '\U0001d5ba', + '\\' : '\U0001d5bb', + '\\' : '\U0001d5bc', + '\\' : '\U0001d5bd', + '\\' : '\U0001d5be', + '\\' : '\U0001d5bf', + '\\' : '\U0001d5c0', + '\\' : '\U0001d5c1', + '\\' : '\U0001d5c2', + '\\' : '\U0001d5c3', + '\\' : '\U0001d5c4', + '\\' : '\U0001d5c5', + '\\' : '\U0001d5c6', + '\\' : '\U0001d5c7', + '\\' : '\U0001d5c8', + '\\